62 fTuneParameters(tuneParameters),
64 fOptimizationFitType(optimizationFitType),
71 std::string
name =
"OptimizeConfigParameters_";
75 Log() << kFATAL <<
" ERROR: Sorry, Regression is not yet implement for automatic parameter optimization"
76 <<
" --> exit" <<
Endl;
79 Log() << kINFO <<
"Automatic optimisation of tuning parameters in "
82 std::map<TString,TMVA::Interval*>::iterator it;
84 Log() << kINFO << it->first
85 <<
" in range from: " << it->second->GetMin()
86 <<
" to: " << it->second->GetMax()
87 <<
" in : " << it->second->GetNbins() <<
" steps"
98 if(!GetMethod()->IsSilentFile()) GetMethod()->BaseDir()->cd();
107 y[i] = fFOMvsIter[i];
113 h->SetXTitle(
"#iteration "+fOptimizationFitType);
114 h->SetYTitle(fFOMType);
116 gFOMvsIter->
SetName((
TString(GetMethod()->GetName())+
"_FOMvsIter").Data());
117 if(!GetMethod()->IsSilentFile()) gFOMvsIter->
Write();
118 if(!GetMethod()->IsSilentFile())
h->Write();
129 if (fOptimizationFitType ==
"Scan" ) this->optimizeScan();
130 else if (fOptimizationFitType ==
"FitGA" || fOptimizationFitType ==
"Minuit" ) this->optimizeFit();
132 Log() << kFATAL <<
"You have chosen as optimization type " << fOptimizationFitType
133 <<
" that is not (yet) coded --> exit()" <<
Endl;
136 Log() << kINFO <<
"For " << GetMethod()->GetName() <<
" the optimized Parameters are: " <<
Endl;
137 std::map<TString,Double_t>::iterator it;
138 for(it=fTunedParameters.begin(); it!= fTunedParameters.end(); ++it){
139 Log() << kINFO << it->first <<
" = " << it->second <<
Endl;
141 return fTunedParameters;
150 std::vector < int > indices;
151 for (
UInt_t i=0; i< base.size(); i++){
152 indices.push_back(val % base[i] );
153 val =
int( floor(
float(val) /
float(base[i]) ) );
167 Double_t bestFOM=-1000000, currentFOM;
169 std::map<TString,Double_t> currentParameters;
170 std::map<TString,TMVA::Interval*>::iterator it;
174 currentParameters.clear();
175 fTunedParameters.clear();
177 for (it=fTuneParameters.begin(); it!=fTuneParameters.end(); ++it){
178 currentParameters.insert(std::pair<TString,Double_t>(it->first,it->second->GetMin()));
179 fTunedParameters.insert(std::pair<TString,Double_t>(it->first,it->second->GetMin()));
186 std::vector< std::vector <Double_t> >
v;
187 for (it=fTuneParameters.begin(); it!=fTuneParameters.end(); ++it){
188 std::vector< Double_t > tmp;
189 for (
Int_t k=0; k<it->second->GetNbins(); k++){
190 tmp.push_back(it->second->GetElement(k));
195 std::vector< int > Nindividual;
196 for (
UInt_t i=0; i<
v.size(); i++) {
198 Nindividual.push_back(
v[i].
size());
202 for (
int i=0; i<Ntot; i++){
204 std::vector<int> indices = GetScanIndices(i, Nindividual );
205 for (it=fTuneParameters.begin(),
index=0;
index< indices.size(); ++
index, ++it){
206 currentParameters[it->first] =
v[
index][indices[
index]];
208 Log() << kINFO <<
"--------------------------" <<
Endl;
209 Log() << kINFO <<
"Settings being evaluated:" <<
Endl;
210 for (std::map<TString,Double_t>::iterator it_print=currentParameters.begin();
211 it_print!=currentParameters.end(); ++it_print){
212 Log() << kINFO <<
" " << it_print->first <<
" = " << it_print->second <<
Endl;
215 GetMethod()->Reset();
216 GetMethod()->SetTuneParameters(currentParameters);
218 if(!GetMethod()->IsSilentFile()) GetMethod()->BaseDir()->cd();
219 if (i==0) GetMethod()->GetTransformationHandler().CalcTransformations(
220 GetMethod()->Data()->GetEventCollection());
222 GetMethod()->Train();
224 currentFOM = GetFOM();
225 Log() << kINFO <<
"FOM was found : " << currentFOM <<
"; current best is " << bestFOM <<
Endl;
227 if (currentFOM > bestFOM) {
228 bestFOM = currentFOM;
229 for (std::map<TString,Double_t>::iterator iter=currentParameters.begin();
230 iter != currentParameters.end(); ++iter){
231 fTunedParameters[iter->first]=iter->second;
236 GetMethod()->Reset();
237 GetMethod()->SetTuneParameters(fTunedParameters);
245 std::vector<TMVA::Interval*> ranges;
246 std::map<TString, TMVA::Interval*>::iterator it;
247 std::vector<Double_t> pars;
249 for (it=fTuneParameters.begin(); it != fTuneParameters.end(); ++it){
251 pars.push_back( (it->second)->GetMean() );
257 GetMethod()->GetTransformationHandler().CalcTransformations(GetMethod()->Data()->GetEventCollection());
263 if ( fOptimizationFitType ==
"Minuit" ) {
264 TString opt=
"FitStrategy=0:UseImprove=False:UseMinos=False:Tolerance=100";
268 "FitterMinuit_BDTOptimize",
270 }
else if ( fOptimizationFitType ==
"FitGA" ) {
271 TString opt=
"PopSize=20:Steps=30:Cycles=3:ConvCrit=0.01:SaveBestCycle=5";
273 "FitterGA_BDTOptimize",
276 Log() << kWARNING <<
" you did not specify a valid OptimizationFitType "
277 <<
" will use the default (FitGA) " <<
Endl;
278 TString opt=
"PopSize=20:Steps=30:Cycles=3:ConvCrit=0.01:SaveBestCycle=5";
280 "FitterGA_BDTOptimize",
290 for (
UInt_t ipar=0; ipar<ranges.size(); ipar++)
delete ranges[ipar];
292 GetMethod()->Reset();
294 fTunedParameters.clear();
296 for (it=fTuneParameters.begin(); it!=fTuneParameters.end(); ++it){
297 fTunedParameters.insert(std::pair<TString,Double_t>(it->first,pars[jcount++]));
300 GetMethod()->SetTuneParameters(fTunedParameters);
309 std::map< std::vector<Double_t> ,
Double_t>::const_iterator iter;
310 iter = fAlreadyTrainedParCombination.find(pars);
312 if (iter != fAlreadyTrainedParCombination.end()) {
318 std::map<TString,Double_t> currentParameters;
321 std::map<TString, TMVA::Interval*>::iterator it;
322 for (it=fTuneParameters.begin(); it!=fTuneParameters.end(); ++it){
323 currentParameters[it->first] = pars[icount++];
325 GetMethod()->Reset();
326 GetMethod()->SetTuneParameters(currentParameters);
327 if(!GetMethod()->IsSilentFile()) GetMethod()->BaseDir()->cd();
330 GetMethod()->GetTransformationHandler().
331 CalcTransformations(GetMethod()->Data()->GetEventCollection());
335 GetMethod()->Train();
341 fAlreadyTrainedParCombination.insert(std::make_pair(pars,-currentFOM));
361 Log() << kFATAL <<
" ERROR, " <<
percent <<
" in " << fFOMType
362 <<
" is not a valid floating point number" <<
Endl;
368 if (fMethod->DoRegression()){
369 std::cout <<
" ERROR: Sorry, Regression is not yet implement for automatic parameter optimisation"
370 <<
" --> exit" << std::endl;
373 if (fFOMType ==
"Separation") fom = GetSeparation();
374 else if (fFOMType ==
"ROCIntegral") fom = GetROCIntegral();
375 else if (fFOMType.BeginsWith(
"SigEffAtBkgEff0")) fom = GetSigEffAtBkgEff(parsePercent(fFOMType));
376 else if (fFOMType.BeginsWith(
"BkgRejAtSigEff0")) fom = GetBkgRejAtSigEff(parsePercent(fFOMType));
377 else if (fFOMType.BeginsWith(
"BkgEffAtSigEff0")) fom = GetBkgEffAtSigEff(parsePercent(fFOMType));
379 Log()<< kFATAL <<
" ERROR, you've specified as Figure of Merit in the "
380 <<
" parameter optimisation " << fFOMType <<
" which has not"
381 <<
" been implemented yet!! ---> exit " <<
Endl;
385 fFOMvsIter.push_back(fom);
395 if (fMvaSig) fMvaSig->Delete();
396 if (fMvaBkg) fMvaBkg->Delete();
397 if (fMvaSigFineBin) fMvaSigFineBin->Delete();
398 if (fMvaBkgFineBin) fMvaBkgFineBin->Delete();
406 fMvaSig =
new TH1D(
"fMvaSig",
"",100,-1.5,1.5);
407 fMvaBkg =
new TH1D(
"fMvaBkg",
"",100,-1.5,1.5);
408 fMvaSigFineBin =
new TH1D(
"fMvaSigFineBin",
"",100000,-1.5,1.5);
409 fMvaBkgFineBin =
new TH1D(
"fMvaBkgFineBin",
"",100000,-1.5,1.5);
411 const std::vector< Event*> events=fMethod->Data()->GetEventCollection(
Types::kTesting);
413 UInt_t signalClassNr = fMethod->DataInfo().GetClassInfo(
"Signal")->GetNumber();
417 for (
UInt_t iev=0; iev < events.size() ; iev++){
421 if (events[iev]->GetClass() == signalClassNr) {
422 fMvaSig->Fill(fMethod->GetMvaValue(events[iev]),events[iev]->GetWeight());
423 fMvaSigFineBin->Fill(fMethod->GetMvaValue(events[iev]),events[iev]->GetWeight());
425 fMvaBkg->Fill(fMethod->GetMvaValue(events[iev]),events[iev]->GetWeight());
426 fMvaBkgFineBin->Fill(fMethod->GetMvaValue(events[iev]),events[iev]->GetWeight());
442 std::cout <<
"Separation calculation via histograms (not PDFs) seems to give still strange results!! Don't do that, check!!"<<std::endl;
473 for (
UInt_t i=0; i<nsteps; i++){
480 if ( (fMvaSigFineBin->GetXaxis()->GetXmin() != fMvaBkgFineBin->GetXaxis()->GetXmin()) ||
481 (fMvaSigFineBin->GetNbinsX() != fMvaBkgFineBin->GetNbinsX()) ){
482 std::cout <<
" Error in OptimizeConfigParameters GetROCIntegral, unequal histograms for sig and bkg.." << std::endl;
486 Double_t *cumulator = fMvaBkgFineBin->GetIntegral();
487 Int_t nbins = fMvaSigFineBin->GetNbinsX();
492 for (
Int_t ibin=1; ibin<=nbins; ibin++){
493 sigIntegral += fMvaSigFineBin->GetBinContent(ibin) * fMvaSigFineBin->GetBinWidth(ibin);
497 for (
Int_t ibin=1; ibin <= nbins; ibin++){
498 integral += (cumulator[ibin]) * fMvaSigFineBin->GetBinContent(ibin)/sigIntegral * fMvaSigFineBin->GetBinWidth(ibin) ;
515 if ( (fMvaSigFineBin->GetXaxis()->GetXmin() != fMvaBkgFineBin->GetXaxis()->GetXmin()) ||
516 (fMvaSigFineBin->GetNbinsX() != fMvaBkgFineBin->GetNbinsX()) ){
517 std::cout <<
" Error in OptimizeConfigParameters GetSigEffAt, unequal histograms for sig and bkg.." << std::endl;
520 Double_t *bkgCumulator = fMvaBkgFineBin->GetIntegral();
521 Double_t *sigCumulator = fMvaSigFineBin->GetIntegral();
523 Int_t nbins=fMvaBkgFineBin->GetNbinsX();
532 while (bkgCumulator[nbins-ibin] > (1-bkgEff)) {
533 sigEff = sigCumulator[nbins]-sigCumulator[nbins-ibin];
552 if ( (fMvaSigFineBin->GetXaxis()->GetXmin() != fMvaBkgFineBin->GetXaxis()->GetXmin()) ||
553 (fMvaSigFineBin->GetNbinsX() != fMvaBkgFineBin->GetNbinsX()) ){
554 std::cout <<
" Error in OptimizeConfigParameters GetBkgEffAt, unequal histograms for sig and bkg.." << std::endl;
558 Double_t *bkgCumulator = fMvaBkgFineBin->GetIntegral();
559 Double_t *sigCumulator = fMvaSigFineBin->GetIntegral();
561 Int_t nbins=fMvaBkgFineBin->GetNbinsX();
570 while ( sigCumulator[nbins]-sigCumulator[nbins-ibin] < sigEff) {
571 bkgEff = bkgCumulator[nbins]-bkgCumulator[nbins-ibin];
589 if ( (fMvaSigFineBin->GetXaxis()->GetXmin() != fMvaBkgFineBin->GetXaxis()->GetXmin()) ||
590 (fMvaSigFineBin->GetNbinsX() != fMvaBkgFineBin->GetNbinsX()) ){
591 std::cout <<
" Error in OptimizeConfigParameters GetBkgEffAt, unequal histograms for sig and bkg.." << std::endl;
595 Double_t *bkgCumulator = fMvaBkgFineBin->GetIntegral();
596 Double_t *sigCumulator = fMvaSigFineBin->GetIntegral();
598 Int_t nbins=fMvaBkgFineBin->GetNbinsX();
607 while ( sigCumulator[nbins]-sigCumulator[nbins-ibin] < sigEff) {
608 bkgRej = bkgCumulator[nbins-ibin];
size_t size(const MatrixT &matrix)
retrieve the size of a square matrix
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void input
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t index
Option_t Option_t TPoint TPoint percent
A TGraph is an object made of two arrays X and Y with npoints each.
void SetName(const char *name="") override
Set graph name.
1-D histogram with a double per channel (see TH1 documentation)
2-D histogram with a double per channel (see TH1 documentation)
void CheckForUnusedOptions() const
checks for unused options in option string
static void SetIsTraining(Bool_t)
when this static function is called, it sets the flag whether events with negative event weight shoul...
Base class for TMVA fitters.
Double_t Run()
estimator function interface for fitting
Fitter using a Genetic Algorithm.
The TMVA::Interval Class.
Virtual base Class for all MVA method.
const char * GetName() const
Bool_t DoRegression() const
ostringstream derivative to redirect and format output
std::vector< int > GetScanIndices(int val, std::vector< int > base)
helper function to scan through the all the combinations in the parameter space
MsgLogger * fLogger
! message logger
Double_t GetBkgRejAtSigEff(Double_t sigEff=0.5)
calculate the background rejection for a given signal efficiency
virtual ~OptimizeConfigParameters()
the destructor (delete the OptimizeConfigParameters, store the graph and .. delete it)
Double_t GetBkgEffAtSigEff(Double_t sigEff=0.5)
calculate the background efficiency for a given signal efficiency
void optimizeScan()
do the actual optimization using a simple scan method, i.e.
OptimizeConfigParameters(MethodBase *const method, std::map< TString, TMVA::Interval * > tuneParameters, TString fomType="Separation", TString optimizationType="GA")
Constructor which sets either "Classification or Regression".
std::map< TString, TMVA::Interval * > fTuneParameters
parameters included in the tuning
MethodBase *const fMethod
The MVA method to be evaluated.
TString fOptimizationFitType
which type of optimisation procedure to be used
std::map< TString, Double_t > optimize()
Double_t GetSeparation()
return the separation between the signal and background MVA ouput distribution
TString fFOMType
the FOM type (Separation, ROC integra.. whatever you implemented..
Double_t GetFOM()
Return the Figure of Merit (FOM) used in the parameter optimization process.
Double_t GetSigEffAtBkgEff(Double_t bkgEff=0.1)
calculate the signal efficiency for a given background efficiency
Double_t GetROCIntegral()
calculate the area (integral) under the ROC curve as a overall quality measure of the classification
void GetMVADists()
fill the private histograms with the mva distributions for sig/bkg
Double_t EstimatorFunction(std::vector< Double_t > &)
return the estimator (from current FOM) for the fitting interface
PDF wrapper for histograms; uses user-defined spline interpolation.
Double_t GetVal(Double_t x) const
returns value PDF(x)
Double_t GetIntegral(Double_t xmin, Double_t xmax)
computes PDF integral within given ranges
virtual Int_t Write(const char *name=nullptr, Int_t option=0, Int_t bufsize=0)
Write this object to the current directory.
MsgLogger & Endl(MsgLogger &ml)
Short_t Max(Short_t a, Short_t b)
Returns the largest of a and b.
Short_t Min(Short_t a, Short_t b)
Returns the smallest of a and b.