31 #ifndef ROOT_TMVA_MethodBDT 32 #define ROOT_TMVA_MethodBDT 136 inline const std::vector<TMVA::DecisionTree*> &
GetForest()
const;
156 const TString& className )
const;
215 std::map< const TMVA::Event*,std::vector<double> >
fResiduals;
void Train(void)
BDT training.
void PreProcessNegativeEventWeights()
O.k.
void GetBaggedSubSample(std::vector< const TMVA::Event *> &)
Fills fEventSample with fBaggedSampleFraction*NEvents random training events.
std::vector< Bool_t > fIsLowSigCut
Double_t RegBoost(std::vector< const TMVA::Event *> &, DecisionTree *dt)
A special boosting only for Regression (not implemented).
void DeclareCompatibilityOptions()
Options that are used ONLY for the READER to ensure backward compatibility.
std::map< const TMVA::Event *, LossFunctionEventInfo > fLossFunctionEventInfo
Bool_t fPairNegWeightsGlobal
void SetUseNvars(Int_t n)
const Ranking * CreateRanking()
Compute ranking of input variables.
std::vector< Bool_t > fIsHighSigCut
void DeclareOptions()
Define the options (their key words).
std::vector< Double_t > fVariableImportance
void DeterminePreselectionCuts(const std::vector< const TMVA::Event *> &eventSample)
Find useful preselection cuts that will be applied before and Decision Tree training.
void MakeClassInstantiateNode(DecisionTreeNode *n, std::ostream &fout, const TString &className) const
Recursively descends a tree and writes the node instance to the output stream.
Double_t fMinLinCorrForFisher
Virtual base Class for all MVA method.
std::vector< const TMVA::Event * > fEventSample
Double_t Bagging()
Call it boot-strapping, re-sampling or whatever you like, in the end it is nothing else but applying ...
Ranking for variables in method (implementation)
Double_t AdaBoost(std::vector< const TMVA::Event *> &, DecisionTree *dt)
The AdaBoost implementation.
void ProcessOptions()
The option string is decoded, for available options see "DeclareOptions".
void GetHelpMessage() const
Get help message text.
std::vector< Bool_t > fIsHighBkgCut
void SetShrinkage(Double_t s)
Double_t AdaCost(std::vector< const TMVA::Event *> &, DecisionTree *dt)
The AdaCost boosting algorithm takes a simple cost Matrix (currently fixed for all events...
void MakeClassSpecific(std::ostream &, const TString &) const
Make ROOT-independent C++ class for classifier response (classifier-specific implementation).
const std::vector< const TMVA::Event * > & GetTrainingEvents() const
TString fRegressionLossFunctionBDTGS
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
std::vector< Double_t > fHighBkgCut
Double_t GetGradBoostMVA(const TMVA::Event *e, UInt_t nTrees)
Returns MVA value: -1 for background, 1 for signal.
Double_t fBaggedSampleFraction
Bool_t fInverseBoostNegWeights
Double_t GradBoostRegression(std::vector< const TMVA::Event *> &, DecisionTree *dt)
Implementation of M_TreeBoost using any loss function as described by Friedman 1999.
virtual void SetTuneParameters(std::map< TString, Double_t > tuneParameters)
Set the tuning parameters according to the argument.
#define ClassDef(name, id)
const std::vector< TMVA::DecisionTree * > & GetForest() const
void MakeClassSpecificHeader(std::ostream &, const TString &) const
Specific class header.
Double_t fSigToBkgFraction
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
BDT can handle classification with multiple classes and regression with one regression-target.
void Reset(void)
Reset the method, as if it had just been instantiated (forget all training etc.). ...
void SetMinNodeSize(Double_t sizeInPercent)
Double_t AdaBoostR2(std::vector< const TMVA::Event *> &, DecisionTree *dt)
Adaption of the AdaBoost to regression problems (see H.Drucker 1997).
std::vector< Double_t > fHighSigCut
Class that contains all the data information.
const std::vector< Float_t > & GetMulticlassValues()
Get the multiclass MVA response for the BDT classifier.
Bool_t fNoNegWeightsInTraining
const std::vector< Float_t > & GetRegressionValues()
Get the regression value generated by the BDTs.
void InitEventSample()
Initialize the event sample (i.e. reset the boost-weights... etc).
std::vector< Bool_t > fIsLowBkgCut
void WriteMonitoringHistosToFile(void) const
Here we could write some histograms created during the processing to the output file.
std::vector< Double_t > fLowBkgCut
Double_t fNodePurityLimit
void SetBaggedSampleFraction(Double_t f)
void BoostMonitor(Int_t iTree)
Fills the ROCIntegral vs Itree from the testSample for the monitoring plots during the training ...
Bool_t fTrainWithNegWeights
Bool_t fSkipNormalization
virtual ~MethodBDT(void)
Destructor.
void SetNodePurityLimit(Double_t l)
Double_t PrivateGetMvaValue(const TMVA::Event *ev, Double_t *err=0, Double_t *errUpper=0, UInt_t useNTrees=0)
Return the MVA value (range [-1;1]) that classifies the event according to the majority vote from the...
Implementation of a Decision Tree.
Double_t GradBoost(std::vector< const TMVA::Event *> &, DecisionTree *dt, UInt_t cls=0)
Calculate the desired response value for each region.
SeparationBase * fSepType
void Init(void)
Common initialisation with defaults for the BDT-Method.
void ReadWeightsFromXML(void *parent)
Reads the BDT from the xml file.
An interface to calculate the "SeparationGain" for different separation criteria used in various trai...
Double_t TestTreeQuality(DecisionTree *dt)
Test the tree quality.. in terms of Misclassification.
DecisionTree::EPruneMethod fPruneMethod
void ReadWeightsFromStream(std::istream &istr)
Read the weights (BDT coefficients).
Double_t ApplyPreselectionCuts(const Event *ev)
Apply the preselection cuts before even bothering about any Decision Trees in the GetMVA ...
void UpdateTargets(std::vector< const TMVA::Event *> &, UInt_t cls=0)
Calculate residual for all events.
void SetMaxDepth(Int_t d)
void AddWeightsXMLTo(void *parent) const
Write weights to XML.
void SetAdaBoostBeta(Double_t b)
std::vector< const TMVA::Event * > * fTrainSample
you should not use this method at all Int_t Int_t Double_t Double_t Double_t e
Double_t Boost(std::vector< const TMVA::Event *> &, DecisionTree *dt, UInt_t cls=0)
Apply the boosting algorithm (the algorithm is selecte via the the "option" given in the constructor...
LossFunctionBDT * fRegressionLossFunctionBDTG
virtual std::map< TString, Double_t > OptimizeTuningParameters(TString fomType="ROCIntegral", TString fitType="FitGA")
Call the Optimizer with the set of parameters and ranges that are meant to be tuned.
TString fNegWeightTreatment
Abstract ClassifierFactory template that handles arbitrary types.
you should not use this method at all Int_t Int_t Double_t Double_t Double_t Int_t Double_t Double_t Double_t Double_t b
std::vector< const TMVA::Event * > fValidationSample
std::vector< DecisionTree * > fForest
std::vector< Double_t > GetVariableImportance()
Return the relative variable importance, normalized to all variables together having the importance 1...
Double_t fFValidationEvents
const std::vector< double > & GetBoostWeights() const
std::vector< Double_t > fLowSigCut
void UpdateTargetsRegression(std::vector< const TMVA::Event *> &, Bool_t first=kFALSE)
Calculate current residuals for all events and update targets for next iteration. ...
A TTree object has a header with a name and a title.
std::map< const TMVA::Event *, std::vector< double > > fResiduals
static const Int_t fgDebugLevel
virtual void ReadWeightsFromStream(std::istream &)=0
std::vector< const TMVA::Event * > fSubSample
Analysis of Boosted Decision Trees.
void InitGradBoost(std::vector< const TMVA::Event *> &)
Initialize targets for first tree.
std::vector< double > fBoostWeights
MethodBDT(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="")
The standard constructor for the "boosted decision trees".