ROOT  6.07/01
Reference Guide
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Groups Pages
TMVAClassificationApplication.C
Go to the documentation of this file.
1 /**********************************************************************************
2  * Project : TMVA - a Root-integrated toolkit for multivariate data analysis *
3  * Package : TMVA *
4  * Exectuable: TMVAClassificationApplication *
5  * *
6  * This macro provides a simple example on how to use the trained classifiers *
7  * within an analysis module *
8  **********************************************************************************/
9 
10 #include <cstdlib>
11 #include <vector>
12 #include <iostream>
13 #include <map>
14 #include <string>
15 
16 #include "TFile.h"
17 #include "TTree.h"
18 #include "TString.h"
19 #include "TSystem.h"
20 #include "TROOT.h"
21 #include "TStopwatch.h"
22 
23 #include "TMVA/Tools.h"
24 #include "TMVA/Reader.h"
25 #include "TMVA/MethodCuts.h"
26 
27 using namespace TMVA;
28 
29 void TMVAClassificationApplication( TString myMethodList = "" )
30 {
31 
32  //---------------------------------------------------------------
33 
34  // This loads the library
36 
37  // Default MVA methods to be trained + tested
38  std::map<std::string,int> Use;
39 
40  // --- Cut optimisation
41  Use["Cuts"] = 1;
42  Use["CutsD"] = 1;
43  Use["CutsPCA"] = 0;
44  Use["CutsGA"] = 0;
45  Use["CutsSA"] = 0;
46  //
47  // --- 1-dimensional likelihood ("naive Bayes estimator")
48  Use["Likelihood"] = 1;
49  Use["LikelihoodD"] = 0; // the "D" extension indicates decorrelated input variables (see option strings)
50  Use["LikelihoodPCA"] = 1; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
51  Use["LikelihoodKDE"] = 0;
52  Use["LikelihoodMIX"] = 0;
53  //
54  // --- Mutidimensional likelihood and Nearest-Neighbour methods
55  Use["PDERS"] = 1;
56  Use["PDERSD"] = 0;
57  Use["PDERSPCA"] = 0;
58  Use["PDEFoam"] = 1;
59  Use["PDEFoamBoost"] = 0; // uses generalised MVA method boosting
60  Use["KNN"] = 1; // k-nearest neighbour method
61  //
62  // --- Linear Discriminant Analysis
63  Use["LD"] = 1; // Linear Discriminant identical to Fisher
64  Use["Fisher"] = 0;
65  Use["FisherG"] = 0;
66  Use["BoostedFisher"] = 0; // uses generalised MVA method boosting
67  Use["HMatrix"] = 0;
68  //
69  // --- Function Discriminant analysis
70  Use["FDA_GA"] = 1; // minimisation of user-defined function using Genetics Algorithm
71  Use["FDA_SA"] = 0;
72  Use["FDA_MC"] = 0;
73  Use["FDA_MT"] = 0;
74  Use["FDA_GAMT"] = 0;
75  Use["FDA_MCMT"] = 0;
76  //
77  // --- Neural Networks (all are feed-forward Multilayer Perceptrons)
78  Use["MLP"] = 0; // Recommended ANN
79  Use["MLPBFGS"] = 0; // Recommended ANN with optional training method
80  Use["MLPBNN"] = 1; // Recommended ANN with BFGS training method and bayesian regulator
81  Use["CFMlpANN"] = 0; // Depreciated ANN from ALEPH
82  Use["TMlpANN"] = 0; // ROOT's own ANN
83  //
84  // --- Support Vector Machine
85  Use["SVM"] = 1;
86  //
87  // --- Boosted Decision Trees
88  Use["BDT"] = 1; // uses Adaptive Boost
89  Use["BDTG"] = 0; // uses Gradient Boost
90  Use["BDTB"] = 0; // uses Bagging
91  Use["BDTD"] = 0; // decorrelation + Adaptive Boost
92  //
93  // --- Friedman's RuleFit method, ie, an optimised series of cuts ("rules")
94  Use["RuleFit"] = 1;
95  // ---------------------------------------------------------------
96  Use["Plugin"] = 0;
97  Use["Category"] = 0;
98  Use["SVM_Gauss"] = 0;
99  Use["SVM_Poly"] = 0;
100  Use["SVM_Lin"] = 0;
101 
102  std::cout << std::endl;
103  std::cout << "==> Start TMVAClassificationApplication" << std::endl;
104 
105  // Select methods (don't look at this code - not of interest)
106  if (myMethodList != "") {
107  for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
108 
109  std::vector<TString> mlist = gTools().SplitString( myMethodList, ',' );
110  for (UInt_t i=0; i<mlist.size(); i++) {
111  std::string regMethod(mlist[i]);
112 
113  if (Use.find(regMethod) == Use.end()) {
114  std::cout << "Method \"" << regMethod
115  << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
116  for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
117  std::cout << it->first << " ";
118  }
119  std::cout << std::endl;
120  return;
121  }
122  Use[regMethod] = 1;
123  }
124  }
125 
126  // --------------------------------------------------------------------------------------------------
127 
128  // --- Create the Reader object
129 
130  TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" );
131 
132  // Create a set of variables and declare them to the reader
133  // - the variable names MUST corresponds in name and type to those given in the weight file(s) used
134  Float_t var1, var2;
135  Float_t var3, var4;
136  reader->AddVariable( "myvar1 := var1+var2", &var1 );
137  reader->AddVariable( "myvar2 := var1-var2", &var2 );
138  reader->AddVariable( "var3", &var3 );
139  reader->AddVariable( "var4", &var4 );
140 
141  // Spectator variables declared in the training have to be added to the reader, too
142  Float_t spec1,spec2;
143  reader->AddSpectator( "spec1 := var1*2", &spec1 );
144  reader->AddSpectator( "spec2 := var1*3", &spec2 );
145 
146  Float_t Category_cat1, Category_cat2, Category_cat3;
147  if (Use["Category"]){
148  // Add artificial spectators for distinguishing categories
149  reader->AddSpectator( "Category_cat1 := var3<=0", &Category_cat1 );
150  reader->AddSpectator( "Category_cat2 := (var3>0)&&(var4<0)", &Category_cat2 );
151  reader->AddSpectator( "Category_cat3 := (var3>0)&&(var4>=0)", &Category_cat3 );
152  }
153 
154  // --- Book the MVA methods
155 
156  TString dir = "weights/";
157  TString prefix = "TMVAClassification";
158 
159  // Book method(s)
160  for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
161  if (it->second) {
162  TString methodName = TString(it->first) + TString(" method");
163  TString weightfile = dir + prefix + TString("_") + TString(it->first) + TString(".weights.xml");
164  reader->BookMVA( methodName, weightfile );
165  }
166  }
167 
168  // Book output histograms
169  UInt_t nbin = 100;
170  TH1F *histLk(0), *histLkD(0), *histLkPCA(0), *histLkKDE(0), *histLkMIX(0), *histPD(0), *histPDD(0);
171  TH1F *histPDPCA(0), *histPDEFoam(0), *histPDEFoamErr(0), *histPDEFoamSig(0), *histKNN(0), *histHm(0);
172  TH1F *histFi(0), *histFiG(0), *histFiB(0), *histLD(0), *histNn(0),*histNnbfgs(0),*histNnbnn(0);
173  TH1F *histNnC(0), *histNnT(0), *histBdt(0), *histBdtG(0), *histBdtD(0), *histRf(0), *histSVMG(0);
174  TH1F *histSVMP(0), *histSVML(0), *histFDAMT(0), *histFDAGA(0), *histCat(0), *histPBdt(0);
175 
176  if (Use["Likelihood"]) histLk = new TH1F( "MVA_Likelihood", "MVA_Likelihood", nbin, -1, 1 );
177  if (Use["LikelihoodD"]) histLkD = new TH1F( "MVA_LikelihoodD", "MVA_LikelihoodD", nbin, -1, 0.9999 );
178  if (Use["LikelihoodPCA"]) histLkPCA = new TH1F( "MVA_LikelihoodPCA", "MVA_LikelihoodPCA", nbin, -1, 1 );
179  if (Use["LikelihoodKDE"]) histLkKDE = new TH1F( "MVA_LikelihoodKDE", "MVA_LikelihoodKDE", nbin, -0.00001, 0.99999 );
180  if (Use["LikelihoodMIX"]) histLkMIX = new TH1F( "MVA_LikelihoodMIX", "MVA_LikelihoodMIX", nbin, 0, 1 );
181  if (Use["PDERS"]) histPD = new TH1F( "MVA_PDERS", "MVA_PDERS", nbin, 0, 1 );
182  if (Use["PDERSD"]) histPDD = new TH1F( "MVA_PDERSD", "MVA_PDERSD", nbin, 0, 1 );
183  if (Use["PDERSPCA"]) histPDPCA = new TH1F( "MVA_PDERSPCA", "MVA_PDERSPCA", nbin, 0, 1 );
184  if (Use["KNN"]) histKNN = new TH1F( "MVA_KNN", "MVA_KNN", nbin, 0, 1 );
185  if (Use["HMatrix"]) histHm = new TH1F( "MVA_HMatrix", "MVA_HMatrix", nbin, -0.95, 1.55 );
186  if (Use["Fisher"]) histFi = new TH1F( "MVA_Fisher", "MVA_Fisher", nbin, -4, 4 );
187  if (Use["FisherG"]) histFiG = new TH1F( "MVA_FisherG", "MVA_FisherG", nbin, -1, 1 );
188  if (Use["BoostedFisher"]) histFiB = new TH1F( "MVA_BoostedFisher", "MVA_BoostedFisher", nbin, -2, 2 );
189  if (Use["LD"]) histLD = new TH1F( "MVA_LD", "MVA_LD", nbin, -2, 2 );
190  if (Use["MLP"]) histNn = new TH1F( "MVA_MLP", "MVA_MLP", nbin, -1.25, 1.5 );
191  if (Use["MLPBFGS"]) histNnbfgs = new TH1F( "MVA_MLPBFGS", "MVA_MLPBFGS", nbin, -1.25, 1.5 );
192  if (Use["MLPBNN"]) histNnbnn = new TH1F( "MVA_MLPBNN", "MVA_MLPBNN", nbin, -1.25, 1.5 );
193  if (Use["CFMlpANN"]) histNnC = new TH1F( "MVA_CFMlpANN", "MVA_CFMlpANN", nbin, 0, 1 );
194  if (Use["TMlpANN"]) histNnT = new TH1F( "MVA_TMlpANN", "MVA_TMlpANN", nbin, -1.3, 1.3 );
195  if (Use["BDT"]) histBdt = new TH1F( "MVA_BDT", "MVA_BDT", nbin, -0.8, 0.8 );
196  if (Use["BDTD"]) histBdtD = new TH1F( "MVA_BDTD", "MVA_BDTD", nbin, -0.8, 0.8 );
197  if (Use["BDTG"]) histBdtG = new TH1F( "MVA_BDTG", "MVA_BDTG", nbin, -1.0, 1.0 );
198  if (Use["RuleFit"]) histRf = new TH1F( "MVA_RuleFit", "MVA_RuleFit", nbin, -2.0, 2.0 );
199  if (Use["SVM_Gauss"]) histSVMG = new TH1F( "MVA_SVM_Gauss", "MVA_SVM_Gauss", nbin, 0.0, 1.0 );
200  if (Use["SVM_Poly"]) histSVMP = new TH1F( "MVA_SVM_Poly", "MVA_SVM_Poly", nbin, 0.0, 1.0 );
201  if (Use["SVM_Lin"]) histSVML = new TH1F( "MVA_SVM_Lin", "MVA_SVM_Lin", nbin, 0.0, 1.0 );
202  if (Use["FDA_MT"]) histFDAMT = new TH1F( "MVA_FDA_MT", "MVA_FDA_MT", nbin, -2.0, 3.0 );
203  if (Use["FDA_GA"]) histFDAGA = new TH1F( "MVA_FDA_GA", "MVA_FDA_GA", nbin, -2.0, 3.0 );
204  if (Use["Category"]) histCat = new TH1F( "MVA_Category", "MVA_Category", nbin, -2., 2. );
205  if (Use["Plugin"]) histPBdt = new TH1F( "MVA_PBDT", "MVA_BDT", nbin, -0.8, 0.8 );
206 
207  // PDEFoam also returns per-event error, fill in histogram, and also fill significance
208  if (Use["PDEFoam"]) {
209  histPDEFoam = new TH1F( "MVA_PDEFoam", "MVA_PDEFoam", nbin, 0, 1 );
210  histPDEFoamErr = new TH1F( "MVA_PDEFoamErr", "MVA_PDEFoam error", nbin, 0, 1 );
211  histPDEFoamSig = new TH1F( "MVA_PDEFoamSig", "MVA_PDEFoam significance", nbin, 0, 10 );
212  }
213 
214  // Book example histogram for probability (the other methods are done similarly)
215  TH1F *probHistFi(0), *rarityHistFi(0);
216  if (Use["Fisher"]) {
217  probHistFi = new TH1F( "MVA_Fisher_Proba", "MVA_Fisher_Proba", nbin, 0, 1 );
218  rarityHistFi = new TH1F( "MVA_Fisher_Rarity", "MVA_Fisher_Rarity", nbin, 0, 1 );
219  }
220 
221  // Prepare input tree (this must be replaced by your data source)
222  // in this example, there is a toy tree with signal and one with background events
223  // we'll later on use only the "signal" events for the test in this example.
224  //
225  TFile *input(0);
226  TString fname = "./tmva_example.root";
227  if (!gSystem->AccessPathName( fname ))
228  input = TFile::Open( fname ); // check if file in local directory exists
229  else
230  input = TFile::Open( "http://root.cern.ch/files/tmva_class_example.root" ); // if not: download from ROOT server
231 
232  if (!input) {
233  std::cout << "ERROR: could not open data file" << std::endl;
234  exit(1);
235  }
236  std::cout << "--- TMVAClassificationApp : Using input file: " << input->GetName() << std::endl;
237 
238  // --- Event loop
239 
240  // Prepare the event tree
241  // - here the variable names have to corresponds to your tree
242  // - you can use the same variables as above which is slightly faster,
243  // but of course you can use different ones and copy the values inside the event loop
244  //
245  std::cout << "--- Select signal sample" << std::endl;
246  TTree* theTree = (TTree*)input->Get("TreeS");
247  Float_t userVar1, userVar2;
248  theTree->SetBranchAddress( "var1", &userVar1 );
249  theTree->SetBranchAddress( "var2", &userVar2 );
250  theTree->SetBranchAddress( "var3", &var3 );
251  theTree->SetBranchAddress( "var4", &var4 );
252 
253  // Efficiency calculator for cut method
254  Int_t nSelCutsGA = 0;
255  Double_t effS = 0.7;
256 
257  std::vector<Float_t> vecVar(4); // vector for EvaluateMVA tests
258 
259  std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl;
260  TStopwatch sw;
261  sw.Start();
262  for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) {
263 
264  if (ievt%1000 == 0) std::cout << "--- ... Processing event: " << ievt << std::endl;
265 
266  theTree->GetEntry(ievt);
267 
268  var1 = userVar1 + userVar2;
269  var2 = userVar1 - userVar2;
270 
271  // --- Return the MVA outputs and fill into histograms
272 
273  if (Use["CutsGA"]) {
274  // Cuts is a special case: give the desired signal efficienciy
275  Bool_t passed = reader->EvaluateMVA( "CutsGA method", effS );
276  if (passed) nSelCutsGA++;
277  }
278 
279  if (Use["Likelihood" ]) histLk ->Fill( reader->EvaluateMVA( "Likelihood method" ) );
280  if (Use["LikelihoodD" ]) histLkD ->Fill( reader->EvaluateMVA( "LikelihoodD method" ) );
281  if (Use["LikelihoodPCA"]) histLkPCA ->Fill( reader->EvaluateMVA( "LikelihoodPCA method" ) );
282  if (Use["LikelihoodKDE"]) histLkKDE ->Fill( reader->EvaluateMVA( "LikelihoodKDE method" ) );
283  if (Use["LikelihoodMIX"]) histLkMIX ->Fill( reader->EvaluateMVA( "LikelihoodMIX method" ) );
284  if (Use["PDERS" ]) histPD ->Fill( reader->EvaluateMVA( "PDERS method" ) );
285  if (Use["PDERSD" ]) histPDD ->Fill( reader->EvaluateMVA( "PDERSD method" ) );
286  if (Use["PDERSPCA" ]) histPDPCA ->Fill( reader->EvaluateMVA( "PDERSPCA method" ) );
287  if (Use["KNN" ]) histKNN ->Fill( reader->EvaluateMVA( "KNN method" ) );
288  if (Use["HMatrix" ]) histHm ->Fill( reader->EvaluateMVA( "HMatrix method" ) );
289  if (Use["Fisher" ]) histFi ->Fill( reader->EvaluateMVA( "Fisher method" ) );
290  if (Use["FisherG" ]) histFiG ->Fill( reader->EvaluateMVA( "FisherG method" ) );
291  if (Use["BoostedFisher"]) histFiB ->Fill( reader->EvaluateMVA( "BoostedFisher method" ) );
292  if (Use["LD" ]) histLD ->Fill( reader->EvaluateMVA( "LD method" ) );
293  if (Use["MLP" ]) histNn ->Fill( reader->EvaluateMVA( "MLP method" ) );
294  if (Use["MLPBFGS" ]) histNnbfgs ->Fill( reader->EvaluateMVA( "MLPBFGS method" ) );
295  if (Use["MLPBNN" ]) histNnbnn ->Fill( reader->EvaluateMVA( "MLPBNN method" ) );
296  if (Use["CFMlpANN" ]) histNnC ->Fill( reader->EvaluateMVA( "CFMlpANN method" ) );
297  if (Use["TMlpANN" ]) histNnT ->Fill( reader->EvaluateMVA( "TMlpANN method" ) );
298  if (Use["BDT" ]) histBdt ->Fill( reader->EvaluateMVA( "BDT method" ) );
299  if (Use["BDTD" ]) histBdtD ->Fill( reader->EvaluateMVA( "BDTD method" ) );
300  if (Use["BDTG" ]) histBdtG ->Fill( reader->EvaluateMVA( "BDTG method" ) );
301  if (Use["RuleFit" ]) histRf ->Fill( reader->EvaluateMVA( "RuleFit method" ) );
302  if (Use["SVM_Gauss" ]) histSVMG ->Fill( reader->EvaluateMVA( "SVM_Gauss method" ) );
303  if (Use["SVM_Poly" ]) histSVMP ->Fill( reader->EvaluateMVA( "SVM_Poly method" ) );
304  if (Use["SVM_Lin" ]) histSVML ->Fill( reader->EvaluateMVA( "SVM_Lin method" ) );
305  if (Use["FDA_MT" ]) histFDAMT ->Fill( reader->EvaluateMVA( "FDA_MT method" ) );
306  if (Use["FDA_GA" ]) histFDAGA ->Fill( reader->EvaluateMVA( "FDA_GA method" ) );
307  if (Use["Category" ]) histCat ->Fill( reader->EvaluateMVA( "Category method" ) );
308  if (Use["Plugin" ]) histPBdt ->Fill( reader->EvaluateMVA( "P_BDT method" ) );
309 
310  // Retrieve also per-event error
311  if (Use["PDEFoam"]) {
312  Double_t val = reader->EvaluateMVA( "PDEFoam method" );
313  Double_t err = reader->GetMVAError();
314  histPDEFoam ->Fill( val );
315  histPDEFoamErr->Fill( err );
316  if (err>1.e-50) histPDEFoamSig->Fill( val/err );
317  }
318 
319  // Retrieve probability instead of MVA output
320  if (Use["Fisher"]) {
321  probHistFi ->Fill( reader->GetProba ( "Fisher method" ) );
322  rarityHistFi->Fill( reader->GetRarity( "Fisher method" ) );
323  }
324  }
325 
326  // Get elapsed time
327  sw.Stop();
328  std::cout << "--- End of event loop: "; sw.Print();
329 
330  // Get efficiency for cuts classifier
331  if (Use["CutsGA"]) std::cout << "--- Efficiency for CutsGA method: " << double(nSelCutsGA)/theTree->GetEntries()
332  << " (for a required signal efficiency of " << effS << ")" << std::endl;
333 
334  if (Use["CutsGA"]) {
335 
336  // test: retrieve cuts for particular signal efficiency
337  // CINT ignores dynamic_casts so we have to use a cuts-secific Reader function to acces the pointer
338  TMVA::MethodCuts* mcuts = reader->FindCutsMVA( "CutsGA method" ) ;
339 
340  if (mcuts) {
341  std::vector<Double_t> cutsMin;
342  std::vector<Double_t> cutsMax;
343  mcuts->GetCuts( 0.7, cutsMin, cutsMax );
344  std::cout << "--- -------------------------------------------------------------" << std::endl;
345  std::cout << "--- Retrieve cut values for signal efficiency of 0.7 from Reader" << std::endl;
346  for (UInt_t ivar=0; ivar<cutsMin.size(); ivar++) {
347  std::cout << "... Cut: "
348  << cutsMin[ivar]
349  << " < \""
350  << mcuts->GetInputVar(ivar)
351  << "\" <= "
352  << cutsMax[ivar] << std::endl;
353  }
354  std::cout << "--- -------------------------------------------------------------" << std::endl;
355  }
356  }
357 
358  // --- Write histograms
359 
360  TFile *target = new TFile( "TMVApp.root","RECREATE" );
361  if (Use["Likelihood" ]) histLk ->Write();
362  if (Use["LikelihoodD" ]) histLkD ->Write();
363  if (Use["LikelihoodPCA"]) histLkPCA ->Write();
364  if (Use["LikelihoodKDE"]) histLkKDE ->Write();
365  if (Use["LikelihoodMIX"]) histLkMIX ->Write();
366  if (Use["PDERS" ]) histPD ->Write();
367  if (Use["PDERSD" ]) histPDD ->Write();
368  if (Use["PDERSPCA" ]) histPDPCA ->Write();
369  if (Use["KNN" ]) histKNN ->Write();
370  if (Use["HMatrix" ]) histHm ->Write();
371  if (Use["Fisher" ]) histFi ->Write();
372  if (Use["FisherG" ]) histFiG ->Write();
373  if (Use["BoostedFisher"]) histFiB ->Write();
374  if (Use["LD" ]) histLD ->Write();
375  if (Use["MLP" ]) histNn ->Write();
376  if (Use["MLPBFGS" ]) histNnbfgs ->Write();
377  if (Use["MLPBNN" ]) histNnbnn ->Write();
378  if (Use["CFMlpANN" ]) histNnC ->Write();
379  if (Use["TMlpANN" ]) histNnT ->Write();
380  if (Use["BDT" ]) histBdt ->Write();
381  if (Use["BDTD" ]) histBdtD ->Write();
382  if (Use["BDTG" ]) histBdtG ->Write();
383  if (Use["RuleFit" ]) histRf ->Write();
384  if (Use["SVM_Gauss" ]) histSVMG ->Write();
385  if (Use["SVM_Poly" ]) histSVMP ->Write();
386  if (Use["SVM_Lin" ]) histSVML ->Write();
387  if (Use["FDA_MT" ]) histFDAMT ->Write();
388  if (Use["FDA_GA" ]) histFDAGA ->Write();
389  if (Use["Category" ]) histCat ->Write();
390  if (Use["Plugin" ]) histPBdt ->Write();
391 
392  // Write also error and significance histos
393  if (Use["PDEFoam"]) { histPDEFoam->Write(); histPDEFoamErr->Write(); histPDEFoamSig->Write(); }
394 
395  // Write also probability hists
396  if (Use["Fisher"]) { if (probHistFi != 0) probHistFi->Write(); if (rarityHistFi != 0) rarityHistFi->Write(); }
397  target->Close();
398 
399  std::cout << "--- Created root file: \"TMVApp.root\" containing the MVA output histograms" << std::endl;
400 
401  delete reader;
402 
403  std::cout << "==> TMVAClassificationApplication is done!" << std::endl << std::endl;
404 }
405 
406 int main( int argc, char** argv )
407 {
408  TString methodList;
409  for (int i=1; i<argc; i++) {
410  TString regMethod(argv[i]);
411  if(regMethod=="-b" || regMethod=="--batch") continue;
412  if (!methodList.IsNull()) methodList += TString(",");
413  methodList += regMethod;
414  }
415  TMVAClassificationApplication(methodList);
416  return 0;
417 }
virtual Bool_t AccessPathName(const char *path, EAccessMode mode=kFileExists)
Returns FALSE if one can access a file using the specified access mode.
Definition: TSystem.cxx:1213
int main(int argc, char **argv)
static Tools & Instance()
Definition: Tools.cxx:80
void Print(Option_t *option="") const
Print the real and cpu time passed between the start and stop events.
Definition: TStopwatch.cxx:217
long long Long64_t
Definition: RtypesCore.h:69
void Start(Bool_t reset=kTRUE)
Start the stopwatch.
Definition: TStopwatch.cxx:56
float Float_t
Definition: RtypesCore.h:53
void AddVariable(const TString &expression, Float_t *)
Add a float variable or expression to the reader.
Definition: Reader.cxx:307
virtual Int_t GetEntry(Long64_t entry=0, Int_t getall=0)
Read all branches of entry and return total number of bytes read.
Definition: TTree.cxx:5144
Double_t GetRarity(const TString &methodTag, Double_t mvaVal=-9999999)
evaluates the MVA's rarity
Definition: Reader.cxx:762
Basic string class.
Definition: TString.h:137
int Int_t
Definition: RtypesCore.h:41
bool Bool_t
Definition: RtypesCore.h:59
const TString & GetInputVar(Int_t i) const
Definition: MethodBase.h:314
static TFile * Open(const char *name, Option_t *option="", const char *ftitle="", Int_t compress=1, Int_t netopt=0)
Create / open a file.
Definition: TFile.cxx:3851
Tools & gTools()
Definition: Tools.cxx:79
void Stop()
Stop the stopwatch.
Definition: TStopwatch.cxx:75
virtual Int_t SetBranchAddress(const char *bname, void *add, TBranch **ptr=0)
Change branch address, dealing with clone trees properly.
Definition: TTree.cxx:7510
Double_t GetCuts(Double_t effS, std::vector< Double_t > &cutMin, std::vector< Double_t > &cutMax) const
retrieve cut values for given signal efficiency
Definition: MethodCuts.cxx:561
void TMVAClassificationApplication(TString myMethodList="")
Double_t GetMVAError() const
Definition: Reader.h:107
IMethod * BookMVA(const TString &methodTag, const TString &weightfile)
read method name from weight file
Definition: Reader.cxx:376
R__EXTERN TSystem * gSystem
Definition: TSystem.h:545
unsigned int UInt_t
Definition: RtypesCore.h:42
Bool_t IsNull() const
Definition: TString.h:387
void AddSpectator(const TString &expression, Float_t *)
Add a float spectator or expression to the reader.
Definition: Reader.cxx:325
double Double_t
Definition: RtypesCore.h:55
void dir(char *path=0)
Definition: rootalias.C:30
Double_t EvaluateMVA(const std::vector< Float_t > &, const TString &methodTag, Double_t aux=0)
Evaluate a std::vector<float> of input data for a given method The parameter aux is obligatory for th...
Definition: Reader.cxx:484
virtual Long64_t GetEntries() const
Definition: TTree.h:386
A TTree object has a header with a name and a title.
Definition: TTree.h:98
std::vector< TString > SplitString(const TString &theOpt, const char separator) const
splits the option string at 'separator' and fills the list 'splitV' with the primitive strings ...
Definition: Tools.cxx:1207
Double_t GetProba(const TString &methodTag, Double_t ap_sig=0.5, Double_t mvaVal=-9999999)
evaluates probability of MVA for given set of input variables
Definition: Reader.cxx:731
MethodCuts * FindCutsMVA(const TString &methodTag)
special function for Cuts to avoid dynamic_casts in ROOT macros, which are not properly handled by CI...
Definition: Reader.cxx:723
Stopwatch class.
Definition: TStopwatch.h:30