vector<Double_t> | fEventWeights | original weights of the events - follows fTrainingEvents |
vector<const TMVA::DecisionTree*> | fForest | the input forest of decision trees |
TMVA::MsgLogger* | fLogger | message logger |
const TMVA::MethodBase* | fMethodBase | pointer the method base which initialized this RuleFit instance |
const TMVA::MethodRuleFit* | fMethodRuleFit | pointer the method which initialized this RuleFit instance |
Double_t | fNEveEffTrain | reweighted number of events = sum(wi) |
UInt_t | fNTreeSample | number of events in sub sample = frac*neve |
TMVA::RuleEnsemble | fRuleEnsemble | the ensemble of rules |
TMVA::RuleFitParams | fRuleFitParams | fit rule parameters |
vector<const TMVA::Event*> | fTrainingEvents | all training events |
vector<const TMVA::Event*> | fTrainingEventsRndm | idem, but randomly shuffled |
Bool_t | fVisHistsUseImp | if true, use importance as weight; else coef in vis hists |
static const Int_t | randSEED | set to 1 for debugging purposes or to zero for random seeds |
initialize the parameters of the RuleFit method and make rules
calculate the sum of weights
set the current message type to that of mlog for this class and all other subtools
build the decision tree using fNTreeSample events from fTrainingEventsRndm
Boost the events. The algorithm below is the called AdaBoost. See MethodBDT for details. Actually, this is a more or less copy of MethodBDT::AdaBoost().
draw a random subsample of the training events without replacement
normalize rule importance hists if all weights are positive, the scale will be 1/maxweight if minimum weight < 0, then the scale will be 1/max(maxweight,abs(minweight))
fill rule correlation between vx and vy, weighted with either the importance or the coefficient
help routine to MakeVisHists() - fills for all variables
help routine to MakeVisHists() - fills for all correlation plots
get first and second variables from title
this will create a histograms intended rather for debugging or for the curious user
{ std::random_shuffle(fTrainingEventsRndm.begin(),fTrainingEventsRndm.end()); }
set minimum importance allowed
{ fRuleEnsemble.SetImportanceCut(minimp); }
set minimum rule distance - see RuleEnsemble
{ fRuleEnsemble.SetRuleMinDist(d); }
make visualization histograms
{ fRuleFitParams.SetGDNPathSteps(n); }
{ return static_cast< const Event *>(fTrainingEvents[i]); }
const Event* GetTrainingEvent(UInt_t i, UInt_t isub) const { return &(fTrainingEvents[fSubsampleEvents[isub]])[i]; }
{ return fTrainingEvents; }