31 #ifndef ROOT_TMVA_MethodBDT    32 #define ROOT_TMVA_MethodBDT    49 #ifndef ROOT_TMVA_MethodBase    52 #ifndef ROOT_TMVA_DecisionTree    55 #ifndef ROOT_TMVA_Event   146       inline const std::vector<TMVA::DecisionTree*> & 
GetForest() 
const;
   166                                      const TString& className ) 
const;
   225       std::map< const TMVA::Event*,std::vector<double> > 
fResiduals; 
 
void Train(void)
BDT training. 
 
void PreProcessNegativeEventWeights()
o.k. 
 
void GetBaggedSubSample(std::vector< const TMVA::Event *> &)
fills fEventSample with fBaggedSampleFraction*NEvents random training events 
 
std::vector< Bool_t > fIsLowSigCut
 
Double_t RegBoost(std::vector< const TMVA::Event *> &, DecisionTree *dt)
a special boosting only for Regression ... 
 
void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility 
 
std::map< const TMVA::Event *, LossFunctionEventInfo > fLossFunctionEventInfo
 
Bool_t fPairNegWeightsGlobal
 
void SetUseNvars(Int_t n)
 
const Ranking * CreateRanking()
Compute ranking of input variables. 
 
std::vector< Bool_t > fIsHighSigCut
 
void DeclareOptions()
define the options (their key words) that can be set in the option string know options: nTrees number...
 
std::vector< Double_t > fVariableImportance
 
void DeterminePreselectionCuts(const std::vector< const TMVA::Event *> &eventSample)
find useful preselection cuts that will be applied before and Decision Tree training. 
 
void MakeClassInstantiateNode(DecisionTreeNode *n, std::ostream &fout, const TString &className) const
recursively descends a tree and writes the node instance to the output streem 
 
Double_t fMinLinCorrForFisher
 
std::vector< const TMVA::Event * > fEventSample
 
Double_t Bagging()
call it boot-strapping, re-sampling or whatever you like, in the end it is nothing else but applying ...
 
Double_t AdaBoost(std::vector< const TMVA::Event *> &, DecisionTree *dt)
the AdaBoost implementation. 
 
void ProcessOptions()
the option string is decoded, for available options see "DeclareOptions" 
 
void GetHelpMessage() const
Get help message text. 
 
std::vector< Bool_t > fIsHighBkgCut
 
void SetShrinkage(Double_t s)
 
Double_t AdaCost(std::vector< const TMVA::Event *> &, DecisionTree *dt)
the AdaCost boosting algorithm takes a simple cost Matrix (currently fixed for all events...
 
void MakeClassSpecific(std::ostream &, const TString &) const
make ROOT-independent C++ class for classifier response (classifier-specific implementation) ...
 
const std::vector< const TMVA::Event * > & GetTrainingEvents() const
 
TString fRegressionLossFunctionBDTGS
 
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
 
std::vector< Double_t > fHighBkgCut
 
Double_t GetGradBoostMVA(const TMVA::Event *e, UInt_t nTrees)
returns MVA value: -1 for background, 1 for signal 
 
Double_t fBaggedSampleFraction
 
Bool_t fInverseBoostNegWeights
 
Double_t GradBoostRegression(std::vector< const TMVA::Event *> &, DecisionTree *dt)
Implementation of M_TreeBoost using any loss function as desribed by Friedman 1999. 
 
virtual void SetTuneParameters(std::map< TString, Double_t > tuneParameters)
set the tuning parameters accoding to the argument 
 
#define ClassDef(name, id)
 
const std::vector< TMVA::DecisionTree * > & GetForest() const
 
void MakeClassSpecificHeader(std::ostream &, const TString &) const
specific class header 
 
Double_t fSigToBkgFraction
 
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
BDT can handle classification with multiple classes and regression with one regression-target. 
 
void Reset(void)
reset the method, as if it had just been instantiated (forget all training etc.) 
 
void SetMinNodeSize(Double_t sizeInPercent)
 
Double_t AdaBoostR2(std::vector< const TMVA::Event *> &, DecisionTree *dt)
adaption of the AdaBoost to regression problems (see H.Drucker 1997) 
 
std::vector< Double_t > fHighSigCut
 
const std::vector< Float_t > & GetMulticlassValues()
get the multiclass MVA response for the BDT classifier 
 
Bool_t fNoNegWeightsInTraining
 
const std::vector< Float_t > & GetRegressionValues()
get the regression value generated by the BDTs 
 
void InitEventSample()
initialize the event sample (i.e. reset the boost-weights... etc) 
 
std::vector< Bool_t > fIsLowBkgCut
 
void WriteMonitoringHistosToFile(void) const
Here we could write some histograms created during the processing to the output file. 
 
std::vector< Double_t > fLowBkgCut
 
Double_t fNodePurityLimit
 
void SetBaggedSampleFraction(Double_t f)
 
void BoostMonitor(Int_t iTree)
fills the ROCIntegral vs Itree from the testSample for the monitoring plots during the training ...
 
Bool_t fTrainWithNegWeights
 
Bool_t fSkipNormalization
 
virtual ~MethodBDT(void)
destructor Note: fEventSample and ValidationSample are already deleted at the end of TRAIN When they ...
 
void SetNodePurityLimit(Double_t l)
 
Double_t PrivateGetMvaValue(const TMVA::Event *ev, Double_t *err=0, Double_t *errUpper=0, UInt_t useNTrees=0)
Return the MVA value (range [-1;1]) that classifies the event according to the majority vote from the...
 
Double_t GradBoost(std::vector< const TMVA::Event *> &, DecisionTree *dt, UInt_t cls=0)
Calculate the desired response value for each region. 
 
SeparationBase * fSepType
 
void Init(void)
common initialisation with defaults for the BDT-Method 
 
void ReadWeightsFromXML(void *parent)
reads the BDT from the xml file 
 
Double_t TestTreeQuality(DecisionTree *dt)
test the tree quality.. in terms of Miscalssification 
 
DecisionTree::EPruneMethod fPruneMethod
 
void ReadWeightsFromStream(std::istream &istr)
read the weights (BDT coefficients) 
 
Double_t ApplyPreselectionCuts(const Event *ev)
aply the preselection cuts before even bothing about any Decision Trees in the GetMVA ...
 
void UpdateTargets(std::vector< const TMVA::Event *> &, UInt_t cls=0)
Calculate residua for all events;. 
 
void SetMaxDepth(Int_t d)
 
void AddWeightsXMLTo(void *parent) const
write weights to XML 
 
void SetAdaBoostBeta(Double_t b)
 
std::vector< const TMVA::Event * > * fTrainSample
 
you should not use this method at all Int_t Int_t Double_t Double_t Double_t e
 
Double_t Boost(std::vector< const TMVA::Event *> &, DecisionTree *dt, UInt_t cls=0)
apply the boosting alogrithim (the algorithm is selecte via the the "option" given in the constructor...
 
LossFunctionBDT * fRegressionLossFunctionBDTG
 
virtual std::map< TString, Double_t > OptimizeTuningParameters(TString fomType="ROCIntegral", TString fitType="FitGA")
call the Optimzier with the set of paremeters and ranges that are meant to be tuned. 
 
TString fNegWeightTreatment
 
Abstract ClassifierFactory template that handles arbitrary types. 
 
you should not use this method at all Int_t Int_t Double_t Double_t Double_t Int_t Double_t Double_t Double_t Double_t b
 
std::vector< const TMVA::Event * > fValidationSample
 
std::vector< DecisionTree * > fForest
 
std::vector< Double_t > GetVariableImportance()
Return the relative variable importance, normalized to all variables together having the importance 1...
 
Double_t fFValidationEvents
 
const std::vector< double > & GetBoostWeights() const
 
std::vector< Double_t > fLowSigCut
 
void UpdateTargetsRegression(std::vector< const TMVA::Event *> &, Bool_t first=kFALSE)
Calculate current residuals for all events and update targets for next iteration. ...
 
A TTree object has a header with a name and a title. 
 
std::map< const TMVA::Event *, std::vector< double > > fResiduals
 
static const Int_t fgDebugLevel
 
virtual void ReadWeightsFromStream(std::istream &)=0
 
std::vector< const TMVA::Event * > fSubSample
 
void InitGradBoost(std::vector< const TMVA::Event *> &)
initialize targets for first tree 
 
std::vector< double > fBoostWeights
 
MethodBDT(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="")
the standard constructor for the "boosted decision trees"