Node for the Decision Tree
The node specifies ONE variable out of the given set of selection variable
that is used to split the sample which "arrives" at the node, into a left
(background-enhanced) and a right (signal-enhanced) sample.
virtual void | ReadAttributes(void* node, UInt_t tmva_Version_Code = TMVA_VERSION_CODE) |
virtual void | ReadContent(stringstream& s) |
virtual Bool_t | ReadDataRecord(istream& is, UInt_t tmva_Version_Code = TMVA_VERSION_CODE) |
Bool_t | fCutType | true: if event variable > cutValue ==> signal , false otherwise |
Float_t | fCutValue | cut value appplied on this node to discriminate bkg against sig |
UInt_t | TMVA::Node::fDepth | depth of the node within the tree (seen from root node) |
vector<Double_t> | fFisherCoeff | the fisher coeff (offset at the last element) |
Bool_t | fIsTerminalNode | ! flag to set node as terminal (i.e., without deleting its descendants) |
TMVA::Node* | TMVA::Node::fLeft | pointers to the two "daughter" nodes |
Int_t | fNodeType | Type of node: -1 == Bkg-leaf, 1 == Signal-leaf, 0 = internal |
TMVA::Node* | TMVA::Node::fParent | the previous (parent) node |
TMVA::BinaryTree* | TMVA::Node::fParentTree | pointer to the parent tree to which the Node belongs |
char | TMVA::Node::fPos | position, i.e. it is a left (l) or right (r) daughter |
Float_t | fPurity | the node purity |
Float_t | fRMS | response RMS of the regression node |
Float_t | fResponse | response value in case of regression |
TMVA::Node* | TMVA::Node::fRight | pointers to the two "daughter" nodes |
Short_t | fSelector | index of variable used in node selection (decision tree) |
TMVA::DTNodeTrainingInfo* | fTrainInfo | |
static TMVA::MsgLogger* | fgLogger | static because there is a huge number of nodes... |
copy constructor of a node. It will result in an explicit copy of the node and recursively all it's daughters
test event if it decends the tree at this node to the right
test event if it decends the tree at this node to the left
return the S/(S+B) (purity) for the node REM: even if nodes with purity 0.01 are very PURE background nodes, they still get a small value of the purity.
recursively print the node and its daughters (--> print the 'tree')
Read the data block
clear the nodes (their S/N, Nevents etc), just keep the structure of the tree
temporary stored node values (number of events, etc.) that originate not from the training but from the validation data (used in pruning)
return the minimum of variable ivar from the training sample that pass/end up in this node
return the maximum of variable ivar from the training sample that pass/end up in this node
set the minimum of variable ivar from the training sample that pass/end up in this node
set the maximum of variable ivar from the training sample that pass/end up in this node
adding attributes to tree node (well, was used in BinarySearchTree, and somehow I guess someone programmed it such that we need this in this tree too, although we don't..)
reading attributes from tree node (well, was used in BinarySearchTree, and somehow I guess someone programmed it such that we need this in this tree too, although we don't..)
set index of variable used for discrimination at this node
{ fSelector = i; }
return index of variable used for discrimination at this node
{ return fSelector; }
set node type: 1 signal node, -1 bkg leave, 0 intermediate Node
{ fNodeType = t;}
return node type: 1 signal node, -1 bkg leave, 0 intermediate Node
{ return fNodeType; }
set the sum of the signal weights in the node
{ fTrainInfo->fNSigEvents = s; }
set the sum of the backgr weights in the node
{ fTrainInfo->fNBkgEvents = b; }
set the number of events that entered the node (during training)
{ fTrainInfo->fNEvents =nev ; }
set the sum of the unweighted signal events in the node
{ fTrainInfo->fNSigEvents_unweighted = s; }
set the sum of the unweighted backgr events in the node
{ fTrainInfo->fNBkgEvents_unweighted = b; }
set the number of unweighted events that entered the node (during training)
{ fTrainInfo->fNEvents_unweighted =nev ; }
increment the sum of the signal weights in the node
{ fTrainInfo->fNSigEvents += s; }
increment the sum of the backgr weights in the node
{ fTrainInfo->fNBkgEvents += b; }
increment the number of events that entered the node (during training)
{ fTrainInfo->fNEvents +=nev ; }
increment the sum of the signal weights in the node
{ fTrainInfo->fNSigEvents_unweighted += 1; }
increment the sum of the backgr weights in the node
{ fTrainInfo->fNBkgEvents_unweighted += 1; }
increment the number of events that entered the node (during training)
{ fTrainInfo->fNEvents_unweighted +=1 ; }
return the sum of the signal weights in the node
{ return fTrainInfo->fNSigEvents; }
return the sum of the backgr weights in the node
{ return fTrainInfo->fNBkgEvents; }
return the number of events that entered the node (during training)
{ return fTrainInfo->fNEvents; }
return the sum of unweighted signal weights in the node
{ return fTrainInfo->fNSigEvents_unweighted; }
return the sum of unweighted backgr weights in the node
{ return fTrainInfo->fNBkgEvents_unweighted; }
return the number of unweighted events that entered the node (during training)
{ return fTrainInfo->fNEvents_unweighted; }
set the choosen index, measure of "purity" (separation between S and B) AT this node
{ fTrainInfo->fSeparationIndex =sep ; }
return the separation index AT this node
{ return fTrainInfo->fSeparationIndex; }
set the separation, or information gained BY this nodes selection
{ fTrainInfo->fSeparationGain =sep ; }
return the gain in separation obtained by this nodes selection
{ return fTrainInfo->fSeparationGain; }
get pointers to children, mother in the tree return pointer to the left/right daughter or parent node
{ return dynamic_cast<DecisionTreeNode*>(fLeft); }
set pointer to the left/right daughter and parent node
{ fLeft = dynamic_cast<DecisionTreeNode*>(l);}
the node resubstitution estimate, R(t), for Cost Complexity pruning
{ fTrainInfo->fNodeR = r; }
the resubstitution estimate, R(T_t), of the tree rooted at this node
{ fTrainInfo->fSubTreeR = r; }
R(t) - R(T_t) the critical point alpha = ------------- |~T_t| - 1
{ fTrainInfo->fAlpha = alpha; }
the minimum alpha in the tree rooted at this node
{ fTrainInfo->fG = g; }
number of terminal nodes in the subtree rooted here
{ fTrainInfo->fNTerminal = n; }
number of background/signal events from the pruning validation sample
{ fTrainInfo->fNB = b; }