==> Start TMVARegressionApplication
: Booking "BDTG method" of type "BDT" from datasetreg/weights/TMVARegression_BDTG.weights.xml.
: Reading weight file: datasetreg/weights/TMVARegression_BDTG.weights.xml
<HEADER> DataSetInfo : [Default] : Added class "Regression"
: Booked classifier "BDTG" of type: "BDT"
: Booking "DNN_CPU method" of type "DL" from datasetreg/weights/TMVARegression_DNN_CPU.weights.xml.
: Reading weight file: datasetreg/weights/TMVARegression_DNN_CPU.weights.xml
: Booked classifier "DNN_CPU" of type: "DL"
: Booking "KNN method" of type "KNN" from datasetreg/weights/TMVARegression_KNN.weights.xml.
: Reading weight file: datasetreg/weights/TMVARegression_KNN.weights.xml
: Creating kd-tree with 1000 events
: Computing scale factor for 1d distributions: (ifrac, bottom, top) = (80%, 10%, 90%)
<HEADER> ModulekNN : Optimizing tree for 2 variables with 1000 values
: <Fill> Class 1 has 1000 events
: Booked classifier "KNN" of type: "KNN"
: Booking "LD method" of type "LD" from datasetreg/weights/TMVARegression_LD.weights.xml.
: Reading weight file: datasetreg/weights/TMVARegression_LD.weights.xml
: Booked classifier "LD" of type: "LD"
: Booking "PDEFoam method" of type "PDEFoam" from datasetreg/weights/TMVARegression_PDEFoam.weights.xml.
: Reading weight file: datasetreg/weights/TMVARegression_PDEFoam.weights.xml
: Read foams from file: datasetreg/weights/TMVARegression_PDEFoam.weights_foams.root
: Booked classifier "PDEFoam" of type: "PDEFoam"
--- TMVARegressionApp : Using input file: ./files/tmva_reg_example.root
--- Select signal sample
: Rebuilding Dataset Default
--- End of event loop: Real time 0:00:02, CP time 2.900
--- Created root file: "TMVARegApp.root" containing the MVA output histograms
==> TMVARegressionApplication is done!
#include <cstdlib>
#include <vector>
#include <iostream>
#include <map>
#include <string>
void TMVARegressionApplication(
TString myMethodList =
"" )
{
std::map<std::string,int> Use;
Use["PDERS"] = 0;
Use["PDEFoam"] = 1;
Use["KNN"] = 1;
Use["LD"] = 1;
Use["FDA_GA"] = 0;
Use["FDA_MC"] = 0;
Use["FDA_MT"] = 0;
Use["FDA_GAMT"] = 0;
Use["MLP"] = 0;
#ifdef R__HAS_TMVAGPU
Use["DNN_GPU"] = 1;
Use["DNN_CPU"] = 0;
#else
Use["DNN_GPU"] = 0;
#ifdef R__HAS_TMVACPU
Use["DNN_CPU"] = 1;
#else
Use["DNN_CPU"] = 0;
#endif
#endif
Use["SVM"] = 0;
Use["BDT"] = 0;
Use["BDTG"] = 1;
std::cout << std::endl;
std::cout << "==> Start TMVARegressionApplication" << std::endl;
if (myMethodList != "") {
for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
std::vector<TString> mlist = gTools().
SplitString( myMethodList,
',' );
for (
UInt_t i=0; i<mlist.size(); i++) {
std::string regMethod(mlist[i]);
if (Use.find(regMethod) == Use.end()) {
std::cout << "Method \"" << regMethod << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first << " ";
std::cout << std::endl;
return;
}
Use[regMethod] = 1;
}
}
TString dir =
"datasetreg/weights/";
for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
if (it->second) {
TString methodName = it->first +
" method";
TString weightfile = dir + prefix +
"_" +
TString(it->first) +
".weights.xml";
reader->
BookMVA( methodName, weightfile );
}
}
for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
TH1*
h =
new TH1F( it->first.c_str(),
TString(it->first) +
" method", 100, -100, 600 );
if (it->second) hists[++nhists] =
h;
}
nhists++;
TString fname =
"./tmva_reg_example.root";
}
else {
input =
TFile::Open(
"http://root.cern/files/tmva_reg_example.root",
"CACHEREAD");
}
std::cout << "ERROR: could not open data file" << std::endl;
exit(1);
}
std::cout <<
"--- TMVARegressionApp : Using input file: " <<
input->GetName() << std::endl;
std::cout << "--- Select signal sample" << std::endl;
std::cout <<
"--- Processing: " << theTree->
GetEntries() <<
" events" << std::endl;
if (ievt%1000 == 0) {
std::cout << "--- ... Processing event: " << ievt << std::endl;
}
for (
Int_t ih=0; ih<nhists; ih++) {
}
}
std::cout <<
"--- End of event loop: "; sw.
Print();
for (
Int_t ih=0; ih<nhists; ih++) hists[ih]->Write();
std::cout <<
"--- Created root file: \"" <<
target->GetName()
<< "\" containing the MVA output histograms" << std::endl;
delete reader;
std::cout << "==> TMVARegressionApplication is done!" << std::endl << std::endl;
}
int main(
int argc,
char** argv )
{
for (int i=1; i<argc; i++) {
if(regMethod=="-b" || regMethod=="--batch") continue;
methodList += regMethod;
}
TMVARegressionApplication(methodList);
return 0;
}
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void input
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t target
R__EXTERN TSystem * gSystem
A ROOT file is an on-disk file, usually with extension .root, that stores objects in a file-system-li...
static TFile * Open(const char *name, Option_t *option="", const char *ftitle="", Int_t compress=ROOT::RCompressionSetting::EDefaults::kUseCompiledDefault, Int_t netopt=0)
Create / open a file.
static Bool_t SetCacheFileDir(std::string_view cacheDir, Bool_t operateDisconnected=kTRUE, Bool_t forceCacheread=kFALSE)
Sets the directory where to locally stage/cache remote files.
1-D histogram with a float per channel (see TH1 documentation)
TH1 is the base class of all histogram classes in ROOT.
virtual Int_t Fill(Double_t x)
Increment bin with abscissa X by 1.
The Reader class serves to use the MVAs in a specific analysis context.
const std::vector< Float_t > & EvaluateRegression(const TString &methodTag, Double_t aux=0)
evaluates MVA for given set of input variables
IMethod * BookMVA(const TString &methodTag, const TString &weightfile)
read method name from weight file
void AddSpectator(const TString &expression, Float_t *)
Add a float spectator or expression to the reader.
void AddVariable(const TString &expression, Float_t *)
Add a float variable or expression to the reader.
const char * GetTitle() const override
Returns title of object.
void Start(Bool_t reset=kTRUE)
Start the stopwatch.
void Stop()
Stop the stopwatch.
void Print(Option_t *option="") const override
Print the real and cpu time passed between the start and stop events.
virtual Bool_t AccessPathName(const char *path, EAccessMode mode=kFileExists)
Returns FALSE if one can access a file using the specified access mode.
A TTree represents a columnar dataset.
virtual Int_t GetEntry(Long64_t entry, Int_t getall=0)
Read all branches of entry and return total number of bytes read.
virtual Int_t SetBranchAddress(const char *bname, void *add, TBranch **ptr=nullptr)
Change branch address, dealing with clone trees properly.
virtual Long64_t GetEntries() const
create variable transformations