58    fTuneParameters(tuneParameters),
    60    fOptimizationFitType(optimizationFitType),
    67    std::string 
name = 
"OptimizeConfigParameters_";
    70    if (fMethod->DoRegression()){
    71       Log() << 
kFATAL << 
" ERROR: Sorry, Regression is not yet implement for automatic parameter optimization"    72             << 
" --> exit" << 
Endl;
    75    Log() << 
kINFO << 
"Automatic optimisation of tuning parameters in "     78    std::map<TString,TMVA::Interval*>::iterator it;
    79    for (it=fTuneParameters.begin(); it!=fTuneParameters.end();it++) {
    81             << 
" in range from: " << it->second->GetMin()
    82             << 
" to: " << it->second->GetMax()
    83             << 
" in : " << it->second->GetNbins()  << 
" steps"    86    Log() << 
kINFO << 
" using the options: " << fFOMType << 
" and " << fOptimizationFitType << 
Endl;
   107       if (ymin>y[i]) ymin=y[i];
   108       if (ymax<y[i]) ymax=y[i];
   131             << 
" that is not (yet) coded --> exit()" << 
Endl;
   135    std::map<TString,Double_t>::iterator it;
   137       Log() << 
kINFO << it->first << 
" = " << it->second << 
Endl;
   148    std::vector < int > indices;
   149    for (
UInt_t i=0; i< base.size(); i++){
   150       indices.push_back(val % base[i] );
   151       val = int( 
floor( 
float(val) / 
float(base[i]) ) );
   165    Double_t      bestFOM=-1000000, currentFOM;
   167    std::map<TString,Double_t> currentParameters;
   168    std::map<TString,TMVA::Interval*>::iterator it;
   172    currentParameters.clear();
   176       currentParameters.insert(std::pair<TString,Double_t>(it->first,it->second->GetMin()));
   177       fTunedParameters.insert(std::pair<TString,Double_t>(it->first,it->second->GetMin()));
   184    std::vector< std::vector <Double_t> > 
v;
   186       std::vector< Double_t > tmp;
   187       for (
Int_t k=0; k<it->second->GetNbins(); k++){
   188          tmp.push_back(it->second->GetElement(k));
   193    std::vector< int > Nindividual;
   194    for (
UInt_t i=0; i<v.size(); i++) {
   196       Nindividual.push_back(v[i].size());
   200    for (
int i=0; i<Ntot; i++){
   203       for (it=
fTuneParameters.begin(), index=0; index< indices.size(); index++, it++){
   204          currentParameters[it->first] = v[index][indices[index]];
   208       for (std::map<TString,Double_t>::iterator it_print=currentParameters.begin(); 
   209            it_print!=currentParameters.end(); it_print++){
   210          Log() << 
kINFO << 
"  " << it_print->first  << 
" = " << it_print->second << 
Endl;
   223       Log() << 
kINFO << 
"FOM was found : " << currentFOM << 
"; current best is " << bestFOM << 
Endl;
   225       if (currentFOM > bestFOM) {
   226          bestFOM = currentFOM;
   227          for (std::map<TString,Double_t>::iterator iter=currentParameters.begin();
   228               iter != currentParameters.end(); iter++){
   241    std::vector<TMVA::Interval*> ranges; 
   242    std::map<TString, TMVA::Interval*>::iterator it;
   243    std::vector<Double_t> pars;    
   247       pars.push_back( (it->second)->GetMean() );  
   262                                   "FitterMinuit_BDTOptimize", 
   265       TString opt=
"PopSize=20:Steps=30:Cycles=3:ConvCrit=0.01:SaveBestCycle=5";
   267                                   "FitterGA_BDTOptimize", 
   270       Log() << 
kWARNING << 
" you did not specify a valid OptimizationFitType "    271             << 
" will use the default (FitGA) " << 
Endl;
   272       TString opt=
"PopSize=20:Steps=30:Cycles=3:ConvCrit=0.01:SaveBestCycle=5";
   274                                   "FitterGA_BDTOptimize", 
   284    for (
UInt_t ipar=0; ipar<ranges.size(); ipar++) 
delete ranges[ipar];
   292       fTunedParameters.insert(std::pair<TString,Double_t>(it->first,pars[jcount++]));
   304    std::map< std::vector<Double_t> , 
Double_t>::const_iterator iter;
   313       std::map<TString,Double_t> currentParameters;
   316       std::map<TString, TMVA::Interval*>::iterator it;
   318          currentParameters[it->first] = pars[icount++];
   326             CalcTransformations(
GetMethod()->
Data()->GetEventCollection());
   349       std::cout << 
" ERROR: Sorry, Regression is not yet implement for automatic parameter optimisation"   350                 << 
" --> exit" << std::endl;
   361          Log()<<
kFATAL << 
" ERROR, you've specified as Figure of Merit in the "   362               << 
" parameter optimisation " << 
fFOMType << 
" which has not"   363               << 
" been implemented yet!! ---> exit " << 
Endl;
   398    for (
UInt_t iev=0; iev < events.size() ; iev++){
   402       if (events[iev]->
GetClass() == signalClassNr) {
   423       std::cout << 
"Separation caclulcaton via histograms (not PDFs) seems to give still strange results!! Don't do that, check!!"<<std::endl;
   455       for (
UInt_t i=0; i<nsteps; i++){
   464          std::cout << 
" Error in OptimizeConfigParameters GetROCIntegral, unequal histograms for sig and bkg.." << std::endl;
   500       std::cout << 
" Error in OptimizeConfigParameters GetSigEffAt, unequal histograms for sig and bkg.." << std::endl;
   515       while (bkgCumulator[nbins-ibin] > (1-bkgEff)) {
   516          sigEff = sigCumulator[
nbins]-sigCumulator[nbins-ibin];
   536       std::cout << 
" Error in OptimizeConfigParameters GetBkgEffAt, unequal histograms for sig and bkg.." << std::endl;
   552       while ( sigCumulator[nbins]-sigCumulator[nbins-ibin] < sigEff) {
   553          bkgEff = bkgCumulator[
nbins]-bkgCumulator[nbins-ibin];
   572       std::cout << 
" Error in OptimizeConfigParameters GetBkgEffAt, unequal histograms for sig and bkg.." << std::endl;
   588       while ( sigCumulator[nbins]-sigCumulator[nbins-ibin] < sigEff) {
   589          bkgRej = bkgCumulator[nbins-ibin];
 std::map< TString, Double_t > fTunedParameters
 
virtual Int_t Write(const char *name=0, Int_t option=0, Int_t bufsize=0)
Write this object to the current directory. 
 
std::string GetName(const std::string &scope_name)
 
virtual Int_t Fill(Double_t x)
Increment bin with abscissa X by 1. 
 
virtual void SetTuneParameters(std::map< TString, Double_t > tuneParameters)
set the tuning parameters accoding to the argument This is just a dummy . 
 
std::map< TString, TMVA::Interval * > fTuneParameters
 
MsgLogger & Endl(MsgLogger &ml)
 
virtual Double_t GetMvaValue(Double_t *errLower=0, Double_t *errUpper=0)=0
 
virtual void SetName(const char *name)
Set the name of the TNamed. 
 
virtual Double_t GetBinContent(Int_t bin) const
Return content of bin number bin. 
 
Double_t GetSeparation()
return the searation between the signal and background MVA ouput distribution 
 
TransformationHandler & GetTransformationHandler(Bool_t takeReroutedIfAvailable=true)
 
void optimizeScan()
do the actual optimization using a simple scan method, i.e. 
 
Short_t Min(Short_t a, Short_t b)
 
virtual void SetYTitle(const char *title)
 
const std::vector< Event * > & GetEventCollection(Types::ETreeType type=Types::kMaxTreeType) const
 
Double_t GetFOM()
Return the Figure of Merit (FOM) used in the parameter optimization process. 
 
static void SetIsTraining(Bool_t)
when this static function is called, it sets the flag whether events with negative event weight shoul...
 
void GetMVADists()
fill the private histograms with the mva distributinos for sig/bkg 
 
Double_t Run()
estimator function interface for fitting 
 
std::vector< Float_t > fFOMvsIter
 
std::vector< std::vector< double > > Data
 
MethodBase *const fMethod
 
DataSetInfo & DataInfo() const
 
Bool_t DoRegression() const
 
TCppMethod_t GetMethod(TCppScope_t scope, TCppIndex_t imeth)
 
virtual void Delete(Option_t *option="")
Delete this object. 
 
virtual Double_t * GetIntegral()
Return a pointer to the array of bins integral. 
 
Double_t GetROCIntegral()
calculate the area (integral) under the ROC curve as a overall quality measure of the classification ...
 
const char * GetName() const
 
ClassInfo * GetClassInfo(Int_t clNum) const
 
std::map< TString, Double_t > optimize()
 
virtual ~OptimizeConfigParameters()
the destructor (delete the OptimizeConfigParameters, store the graph and .. delete it) ...
 
Double_t GetSigEffAtBkgEff(Double_t bkgEff=0.1)
calculate the signal efficiency for a given background efficiency 
 
virtual Double_t GetBinWidth(Int_t bin) const
Return bin width for 1D histogram. 
 
Double_t GetBkgEffAtSigEff(Double_t sigEff=0.5)
calculate the background efficiency for a given signal efficiency 
 
THist< 2, double, THistStatContent, THistStatUncertainty > TH2D
 
Abstract ClassifierFactory template that handles arbitrary types. 
 
virtual void SetXTitle(const char *title)
 
virtual Bool_t cd(const char *path=0)
Change current directory to "this" directory. 
 
std::vector< int > GetScanIndices(int val, std::vector< int > base)
helper function to scan through the all the combinations in the parameter space 
 
Double_t GetBkgRejAtSigEff(Double_t sigEff=0.5)
calculate the background rejection for a given signal efficiency 
 
TDirectory * BaseDir() const
returns the ROOT directory where info/histograms etc of the corresponding MVA method instance are sto...
 
Short_t Max(Short_t a, Short_t b)
 
A Graph is a graphics object made of two arrays X and Y with npoints each. 
 
Double_t EstimatorFunction(std::vector< Double_t > &)
return the estimator (from current FOM) for the fitting interface 
 
std::map< std::vector< Double_t >, Double_t > fAlreadyTrainedParCombination
 
THist< 1, double, THistStatContent, THistStatUncertainty > TH1D
 
Double_t GetIntegral(Double_t xmin, Double_t xmax)
computes PDF integral within given ranges 
 
virtual Int_t GetNbinsX() const
 
void CheckForUnusedOptions() const
checks for unused options in option string 
 
Double_t GetVal(Double_t x) const
returns value PDF(x) 
 
tomato 2-D histogram with a double per channel (see TH1 documentation)} 
 
TString fOptimizationFitType