ROOT logo
// @(#)root/tmva $Id: DecisionTreeNode.h 29195 2009-06-24 10:39:49Z brun $    
// Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss 

/**********************************************************************************
 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis       *
 * Package: TMVA                                                                  *
 * Class  : DecisionTreeNode                                                      *
 * Web    : http://tmva.sourceforge.net                                           *
 *                                                                                *
 * Description:                                                                   *
 *      Node for the Decision Tree                                                *
 *                                                                                *
 * Authors (alphabetical):                                                        *
 *      Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland              *
 *      Helge Voss      <Helge.Voss@cern.ch>     - MPI-K Heidelberg, Germany      *
 *      Kai Voss        <Kai.Voss@cern.ch>       - U. of Victoria, Canada         *
 *                                                                                *
 * Copyright (c) 2005:                                                            *
 *      CERN, Switzerland                                                         * 
 *      U. of Victoria, Canada                                                    * 
 *      MPI-K Heidelberg, Germany                                                 * 
 *                                                                                *
 * Redistribution and use in source and binary forms, with or without             *
 * modification, are permitted according to the terms listed in LICENSE           *
 * (http://tmva.sourceforge.net/LICENSE)                                          *
 **********************************************************************************/

#ifndef ROOT_TMVA_DecisionTreeNode
#define ROOT_TMVA_DecisionTreeNode

//////////////////////////////////////////////////////////////////////////
//                                                                      //
// DecisionTreeNode                                                     //
//                                                                      //
// Node for the Decision Tree                                           //
//                                                                      //
//////////////////////////////////////////////////////////////////////////

#ifndef ROOT_TMVA_Node
#include "TMVA/Node.h"
#endif

#include <vector>
#include <map>
namespace TMVA {
  
   class Event;
   class MsgLogger;

   class DecisionTreeNode: public Node {
    
   public:
    
      // constructor of an essentially "empty" node floating in space
      DecisionTreeNode ();
      // constructor of a daughter node as a daughter of 'p'
      DecisionTreeNode (Node* p, char pos); 
    
      // copy constructor 
      DecisionTreeNode (const DecisionTreeNode &n, DecisionTreeNode* parent = NULL); 
    
      virtual ~DecisionTreeNode(){}

      virtual Node* CreateNode() const { return new DecisionTreeNode(); }
    
      // test event if it decends the tree at this node to the right  
      virtual Bool_t GoesRight( const Event & ) const;
    
      // test event if it decends the tree at this node to the left 
      virtual Bool_t GoesLeft ( const Event & ) const;
    
      // set index of variable used for discrimination at this node
      void SetSelector( Short_t i) { fSelector = i; }
      // return index of variable used for discrimination at this node 
      Short_t GetSelector() const { return fSelector; }
    
    
      // set the cut value applied at this node 
      void  SetCutValue ( Float_t c ) { fCutValue  = c; }
      // return the cut value applied at this node
      Float_t GetCutValue ( void ) const { return fCutValue;  }
    
      // set true: if event variable > cutValue ==> signal , false otherwise
      void SetCutType( Bool_t t   ) { fCutType = t; }
      // return kTRUE: Cuts select signal, kFALSE: Cuts select bkg
      Bool_t GetCutType( void ) const { return fCutType; }
    
      // set node type: 1 signal node, -1 bkg leave, 0 intermediate Node
      void  SetNodeType( Int_t t ) { fNodeType = t;} 
      // return node type: 1 signal node, -1 bkg leave, 0 intermediate Node 
      Int_t GetNodeType( void ) const { return fNodeType; }
    
      //return  S/(S+B) (purity) at this node (from  training)
      Float_t GetPurity( void ) const ;

      //set the response of the node (for regression)
      void SetResponse( Float_t r ) { fResponse = r;}

      //return the response of the node (for regression)
      Float_t GetResponse( void ) const { return fResponse;}

      //set the RMS of the response of the node (for regression)
      void SetRMS( Float_t r ) { fRMS = r;}

      //return the RMS of the response of the node (for regression)
      Float_t GetRMS( void ) const { return fRMS;}

      // set the sum of the signal weights in the node
      void SetNSigEvents( Float_t s ) { fNSigEvents = s; }
    
      // set the sum of the backgr weights in the node
      void SetNBkgEvents( Float_t b ) { fNBkgEvents = b; }
    
      // set the number of events that entered the node (during training)
      void SetNEvents( Float_t nev ){ fNEvents =nev ; }
    
      // set the sum of the unweighted signal events in the node
      void SetNSigEvents_unweighted( Float_t s ) { fNSigEvents_unweighted = s; }
    
      // set the sum of the unweighted backgr events in the node
      void SetNBkgEvents_unweighted( Float_t b ) { fNBkgEvents_unweighted = b; }
    
      // set the number of unweighted events that entered the node (during training)
      void SetNEvents_unweighted( Float_t nev ){ fNEvents_unweighted =nev ; }
    
      // increment the sum of the signal weights in the node
      void IncrementNSigEvents( Float_t s ) { fNSigEvents += s; }
    
      // increment the sum of the backgr weights in the node
      void IncrementNBkgEvents( Float_t b ) { fNBkgEvents += b; }
    
      // increment the number of events that entered the node (during training)
      void IncrementNEvents( Float_t nev ){ fNEvents +=nev ; }
    
      // increment the sum of the signal weights in the node
      void IncrementNSigEvents_unweighted( ) { fNSigEvents_unweighted += 1; }
    
      // increment the sum of the backgr weights in the node
      void IncrementNBkgEvents_unweighted( ) { fNBkgEvents_unweighted += 1; }
    
      // increment the number of events that entered the node (during training)
      void IncrementNEvents_unweighted( ){ fNEvents_unweighted +=1 ; }
    
      // return the sum of the signal weights in the node
      Float_t GetNSigEvents( void ) const  { return fNSigEvents; }
    
      // return the sum of the backgr weights in the node
      Float_t GetNBkgEvents( void ) const  { return fNBkgEvents; }
    
      // return  the number of events that entered the node (during training)
      Float_t GetNEvents( void ) const  { return fNEvents; }
    
      // return the sum of unweighted signal weights in the node
      Float_t GetNSigEvents_unweighted( void ) const  { return fNSigEvents_unweighted; }
    
      // return the sum of unweighted backgr weights in the node
      Float_t GetNBkgEvents_unweighted( void ) const  { return fNBkgEvents_unweighted; }
    
      // return  the number of unweighted events that entered the node (during training)
      Float_t GetNEvents_unweighted( void ) const  { return fNEvents_unweighted; }
    
    
      // set the choosen index, measure of "purity" (separation between S and B) AT this node
      void SetSeparationIndex( Float_t sep ){ fSeparationIndex =sep ; }
      // return the separation index AT this node
      Float_t GetSeparationIndex( void ) const  { return fSeparationIndex; }
    
      // set the separation, or information gained BY this nodes selection
      void SetSeparationGain( Float_t sep ){ fSeparationGain =sep ; }
      // return the gain in separation obtained by this nodes selection
      Float_t GetSeparationGain( void ) const  { return fSeparationGain; }
    
      // printout of the node
      virtual void Print( ostream& os ) const;
    
      // recursively print the node and its daughters (--> print the 'tree')
      virtual void PrintRec( ostream&  os ) const;

      virtual void AddAttributesToNode(void* node) const;
      virtual void AddContentToNode(std::stringstream& s) const;

      // recursively clear the nodes content (S/N etc, but not the cut criteria) 
      void ClearNodeAndAllDaughters();

      // get pointers to children, mother in the tree
      inline DecisionTreeNode* GetLeftDaughter( ) { return dynamic_cast<DecisionTreeNode*>(GetLeft()); }
      inline DecisionTreeNode* GetRightDaughter( ) { return dynamic_cast<DecisionTreeNode*>(GetRight()); }
      inline DecisionTreeNode* GetMother( ) { return dynamic_cast<DecisionTreeNode*>(GetParent()); }
      inline const DecisionTreeNode* GetLeftDaughter( ) const { return dynamic_cast<DecisionTreeNode*>(GetLeft()); }
      inline const DecisionTreeNode* GetRightDaughter( ) const { return dynamic_cast<DecisionTreeNode*>(GetRight()); }
      inline const DecisionTreeNode* GetMother( ) const { return dynamic_cast<DecisionTreeNode*>(GetParent()); }

      ULong_t GetSequence() const {return fSequence;}
    
      void SetSequence(ULong_t s) {fSequence=s;}
    
      // the node resubstitution estimate, R(t), for Cost Complexity pruning
      inline void SetNodeR( Double_t r ) { fNodeR = r;    }
      inline Double_t GetNodeR( ) const  { return fNodeR; }

      // the resubstitution estimate, R(T_t), of the tree rooted at this node
      inline void SetSubTreeR( Double_t r ) { fSubTreeR = r;    }
      inline Double_t GetSubTreeR( ) const  { return fSubTreeR; }

      //                             R(t) - R(T_t)
      // the critical point alpha =  -------------
      //                              |~T_t| - 1
      inline void SetAlpha( Double_t alpha ) { fAlpha = alpha; }
      inline Double_t GetAlpha( ) const      { return fAlpha;  }
    
      // the minimum alpha in the tree rooted at this node
      inline void SetAlphaMinSubtree( Double_t g ) { fG = g;    }
      inline Double_t GetAlphaMinSubtree( ) const  { return fG; }

      // number of terminal nodes in the subtree rooted here
      inline void SetNTerminal( Int_t n ) { fNTerminal = n;    }
      inline Int_t GetNTerminal( ) const  { return fNTerminal; }

      // number of background/signal events from the pruning validation sample
      inline void SetNBValidation( Double_t b ) { fNB = b; }
      inline void SetNSValidation( Double_t s ) { fNS = s; }
      inline Double_t GetNBValidation( ) const  { return fNB; }
      inline Double_t GetNSValidation( ) const  { return fNS; }

    
      inline void SetSumTarget(Float_t t)  {fSumTarget = t; }
      inline void SetSumTarget2(Float_t t2){fSumTarget2 = t2; }

      inline void AddToSumTarget(Float_t t)  {fSumTarget += t; }
      inline void AddToSumTarget2(Float_t t2){fSumTarget2 += t2; }

      inline Float_t GetSumTarget()  const {return fSumTarget; }
      inline Float_t GetSumTarget2() const {return fSumTarget2; }

    
      // reset the pruning validation data
      void ResetValidationData( );

      // flag indicates whether this node is terminal
      inline Bool_t IsTerminal() const            { return fIsTerminalNode; }
      inline void SetTerminal( Bool_t s = kTRUE ) { fIsTerminalNode = s;    }
      void PrintPrune( ostream& os ) const ;
      void PrintRecPrune( ostream& os ) const;

      void     SetCC(Double_t cc) {fCC = cc;};
      Double_t GetCC() const {return fCC;};

      Float_t GetSampleMin(UInt_t ivar) const;
      Float_t GetSampleMax(UInt_t ivar) const;
      void     SetSampleMin(UInt_t ivar, Float_t xmin);
      void     SetSampleMax(UInt_t ivar, Float_t xmax);

   private:

      virtual void ReadAttributes(void* node);
      virtual Bool_t ReadDataRecord( istream& is );
      virtual void ReadContent(std::stringstream& s);

      Double_t fNodeR;           // node resubstitution estimate, R(t)
      Double_t fSubTreeR;        // R(T) = Sum(R(t) : t in ~T)
      Double_t fAlpha;           // critical alpha for this node
      Double_t fG;               // minimum alpha in subtree rooted at this node
      Int_t    fNTerminal;       // number of terminal nodes in subtree rooted at this node
      Double_t fNB;              // sum of weights of background events from the pruning sample in this node
      Double_t fNS;              // ditto for the signal events

      Float_t  fSumTarget;       // sum of weight*target  used for the calculatio of the variance (regression)
      Float_t  fSumTarget2;      // sum of weight*target^2 used for the calculatio of the variance (regression)
    

      Float_t  fCutValue;        // cut value appplied on this node to discriminate bkg against sig
      Bool_t   fCutType;         // true: if event variable > cutValue ==> signal , false otherwise
      Short_t  fSelector;        // index of variable used in node selection (decision tree) 
    
      Float_t  fNSigEvents;      // sum of weights of signal event in the node
      Float_t  fNBkgEvents;      // sum of weights of backgr event in the node
      Float_t  fNEvents;         // number of events in that entered the node (during training)
    
      Float_t  fNSigEvents_unweighted;      // sum of signal event in the node
      Float_t  fNBkgEvents_unweighted;      // sum of backgr event in the node
      Float_t  fNEvents_unweighted;         // number of events in that entered the node (during training)
    
      Float_t  fSeparationIndex; // measure of "purity" (separation between S and B) AT this node
      Float_t  fSeparationGain;  // measure of "purity", separation, or information gained BY this nodes selection
      Float_t  fResponse;        // response value in case of regression
      Float_t  fRMS;             // response RMS of the regression node 
      Int_t    fNodeType;        // Type of node: -1 == Bkg-leaf, 1 == Signal-leaf, 0 = internal 
    
      ULong_t  fSequence;        // bit coded left right sequence to reach the node

      Bool_t   fIsTerminalNode;    //! flag to set node as terminal (i.e., without deleting its descendants)

      Double_t fCC;              // debug variable for cost complexity pruing .. temporary bla

      std::vector< Float_t >  fSampleMin; // the minima for each ivar of the sample on the node during training
      std::vector< Float_t >  fSampleMax; // the maxima for each ivar of the sample on the node during training



      static MsgLogger* fgLogger;    // static because there is a huge number of nodes...
    
      ClassDef(DecisionTreeNode,0) // Node for the Decision Tree 
    
         };
} // namespace TMVA

#endif 
 DecisionTreeNode.h:1
 DecisionTreeNode.h:2
 DecisionTreeNode.h:3
 DecisionTreeNode.h:4
 DecisionTreeNode.h:5
 DecisionTreeNode.h:6
 DecisionTreeNode.h:7
 DecisionTreeNode.h:8
 DecisionTreeNode.h:9
 DecisionTreeNode.h:10
 DecisionTreeNode.h:11
 DecisionTreeNode.h:12
 DecisionTreeNode.h:13
 DecisionTreeNode.h:14
 DecisionTreeNode.h:15
 DecisionTreeNode.h:16
 DecisionTreeNode.h:17
 DecisionTreeNode.h:18
 DecisionTreeNode.h:19
 DecisionTreeNode.h:20
 DecisionTreeNode.h:21
 DecisionTreeNode.h:22
 DecisionTreeNode.h:23
 DecisionTreeNode.h:24
 DecisionTreeNode.h:25
 DecisionTreeNode.h:26
 DecisionTreeNode.h:27
 DecisionTreeNode.h:28
 DecisionTreeNode.h:29
 DecisionTreeNode.h:30
 DecisionTreeNode.h:31
 DecisionTreeNode.h:32
 DecisionTreeNode.h:33
 DecisionTreeNode.h:34
 DecisionTreeNode.h:35
 DecisionTreeNode.h:36
 DecisionTreeNode.h:37
 DecisionTreeNode.h:38
 DecisionTreeNode.h:39
 DecisionTreeNode.h:40
 DecisionTreeNode.h:41
 DecisionTreeNode.h:42
 DecisionTreeNode.h:43
 DecisionTreeNode.h:44
 DecisionTreeNode.h:45
 DecisionTreeNode.h:46
 DecisionTreeNode.h:47
 DecisionTreeNode.h:48
 DecisionTreeNode.h:49
 DecisionTreeNode.h:50
 DecisionTreeNode.h:51
 DecisionTreeNode.h:52
 DecisionTreeNode.h:53
 DecisionTreeNode.h:54
 DecisionTreeNode.h:55
 DecisionTreeNode.h:56
 DecisionTreeNode.h:57
 DecisionTreeNode.h:58
 DecisionTreeNode.h:59
 DecisionTreeNode.h:60
 DecisionTreeNode.h:61
 DecisionTreeNode.h:62
 DecisionTreeNode.h:63
 DecisionTreeNode.h:64
 DecisionTreeNode.h:65
 DecisionTreeNode.h:66
 DecisionTreeNode.h:67
 DecisionTreeNode.h:68
 DecisionTreeNode.h:69
 DecisionTreeNode.h:70
 DecisionTreeNode.h:71
 DecisionTreeNode.h:72
 DecisionTreeNode.h:73
 DecisionTreeNode.h:74
 DecisionTreeNode.h:75
 DecisionTreeNode.h:76
 DecisionTreeNode.h:77
 DecisionTreeNode.h:78
 DecisionTreeNode.h:79
 DecisionTreeNode.h:80
 DecisionTreeNode.h:81
 DecisionTreeNode.h:82
 DecisionTreeNode.h:83
 DecisionTreeNode.h:84
 DecisionTreeNode.h:85
 DecisionTreeNode.h:86
 DecisionTreeNode.h:87
 DecisionTreeNode.h:88
 DecisionTreeNode.h:89
 DecisionTreeNode.h:90
 DecisionTreeNode.h:91
 DecisionTreeNode.h:92
 DecisionTreeNode.h:93
 DecisionTreeNode.h:94
 DecisionTreeNode.h:95
 DecisionTreeNode.h:96
 DecisionTreeNode.h:97
 DecisionTreeNode.h:98
 DecisionTreeNode.h:99
 DecisionTreeNode.h:100
 DecisionTreeNode.h:101
 DecisionTreeNode.h:102
 DecisionTreeNode.h:103
 DecisionTreeNode.h:104
 DecisionTreeNode.h:105
 DecisionTreeNode.h:106
 DecisionTreeNode.h:107
 DecisionTreeNode.h:108
 DecisionTreeNode.h:109
 DecisionTreeNode.h:110
 DecisionTreeNode.h:111
 DecisionTreeNode.h:112
 DecisionTreeNode.h:113
 DecisionTreeNode.h:114
 DecisionTreeNode.h:115
 DecisionTreeNode.h:116
 DecisionTreeNode.h:117
 DecisionTreeNode.h:118
 DecisionTreeNode.h:119
 DecisionTreeNode.h:120
 DecisionTreeNode.h:121
 DecisionTreeNode.h:122
 DecisionTreeNode.h:123
 DecisionTreeNode.h:124
 DecisionTreeNode.h:125
 DecisionTreeNode.h:126
 DecisionTreeNode.h:127
 DecisionTreeNode.h:128
 DecisionTreeNode.h:129
 DecisionTreeNode.h:130
 DecisionTreeNode.h:131
 DecisionTreeNode.h:132
 DecisionTreeNode.h:133
 DecisionTreeNode.h:134
 DecisionTreeNode.h:135
 DecisionTreeNode.h:136
 DecisionTreeNode.h:137
 DecisionTreeNode.h:138
 DecisionTreeNode.h:139
 DecisionTreeNode.h:140
 DecisionTreeNode.h:141
 DecisionTreeNode.h:142
 DecisionTreeNode.h:143
 DecisionTreeNode.h:144
 DecisionTreeNode.h:145
 DecisionTreeNode.h:146
 DecisionTreeNode.h:147
 DecisionTreeNode.h:148
 DecisionTreeNode.h:149
 DecisionTreeNode.h:150
 DecisionTreeNode.h:151
 DecisionTreeNode.h:152
 DecisionTreeNode.h:153
 DecisionTreeNode.h:154
 DecisionTreeNode.h:155
 DecisionTreeNode.h:156
 DecisionTreeNode.h:157
 DecisionTreeNode.h:158
 DecisionTreeNode.h:159
 DecisionTreeNode.h:160
 DecisionTreeNode.h:161
 DecisionTreeNode.h:162
 DecisionTreeNode.h:163
 DecisionTreeNode.h:164
 DecisionTreeNode.h:165
 DecisionTreeNode.h:166
 DecisionTreeNode.h:167
 DecisionTreeNode.h:168
 DecisionTreeNode.h:169
 DecisionTreeNode.h:170
 DecisionTreeNode.h:171
 DecisionTreeNode.h:172
 DecisionTreeNode.h:173
 DecisionTreeNode.h:174
 DecisionTreeNode.h:175
 DecisionTreeNode.h:176
 DecisionTreeNode.h:177
 DecisionTreeNode.h:178
 DecisionTreeNode.h:179
 DecisionTreeNode.h:180
 DecisionTreeNode.h:181
 DecisionTreeNode.h:182
 DecisionTreeNode.h:183
 DecisionTreeNode.h:184
 DecisionTreeNode.h:185
 DecisionTreeNode.h:186
 DecisionTreeNode.h:187
 DecisionTreeNode.h:188
 DecisionTreeNode.h:189
 DecisionTreeNode.h:190
 DecisionTreeNode.h:191
 DecisionTreeNode.h:192
 DecisionTreeNode.h:193
 DecisionTreeNode.h:194
 DecisionTreeNode.h:195
 DecisionTreeNode.h:196
 DecisionTreeNode.h:197
 DecisionTreeNode.h:198
 DecisionTreeNode.h:199
 DecisionTreeNode.h:200
 DecisionTreeNode.h:201
 DecisionTreeNode.h:202
 DecisionTreeNode.h:203
 DecisionTreeNode.h:204
 DecisionTreeNode.h:205
 DecisionTreeNode.h:206
 DecisionTreeNode.h:207
 DecisionTreeNode.h:208
 DecisionTreeNode.h:209
 DecisionTreeNode.h:210
 DecisionTreeNode.h:211
 DecisionTreeNode.h:212
 DecisionTreeNode.h:213
 DecisionTreeNode.h:214
 DecisionTreeNode.h:215
 DecisionTreeNode.h:216
 DecisionTreeNode.h:217
 DecisionTreeNode.h:218
 DecisionTreeNode.h:219
 DecisionTreeNode.h:220
 DecisionTreeNode.h:221
 DecisionTreeNode.h:222
 DecisionTreeNode.h:223
 DecisionTreeNode.h:224
 DecisionTreeNode.h:225
 DecisionTreeNode.h:226
 DecisionTreeNode.h:227
 DecisionTreeNode.h:228
 DecisionTreeNode.h:229
 DecisionTreeNode.h:230
 DecisionTreeNode.h:231
 DecisionTreeNode.h:232
 DecisionTreeNode.h:233
 DecisionTreeNode.h:234
 DecisionTreeNode.h:235
 DecisionTreeNode.h:236
 DecisionTreeNode.h:237
 DecisionTreeNode.h:238
 DecisionTreeNode.h:239
 DecisionTreeNode.h:240
 DecisionTreeNode.h:241
 DecisionTreeNode.h:242
 DecisionTreeNode.h:243
 DecisionTreeNode.h:244
 DecisionTreeNode.h:245
 DecisionTreeNode.h:246
 DecisionTreeNode.h:247
 DecisionTreeNode.h:248
 DecisionTreeNode.h:249
 DecisionTreeNode.h:250
 DecisionTreeNode.h:251
 DecisionTreeNode.h:252
 DecisionTreeNode.h:253
 DecisionTreeNode.h:254
 DecisionTreeNode.h:255
 DecisionTreeNode.h:256
 DecisionTreeNode.h:257
 DecisionTreeNode.h:258
 DecisionTreeNode.h:259
 DecisionTreeNode.h:260
 DecisionTreeNode.h:261
 DecisionTreeNode.h:262
 DecisionTreeNode.h:263
 DecisionTreeNode.h:264
 DecisionTreeNode.h:265
 DecisionTreeNode.h:266
 DecisionTreeNode.h:267
 DecisionTreeNode.h:268
 DecisionTreeNode.h:269
 DecisionTreeNode.h:270
 DecisionTreeNode.h:271
 DecisionTreeNode.h:272
 DecisionTreeNode.h:273
 DecisionTreeNode.h:274
 DecisionTreeNode.h:275
 DecisionTreeNode.h:276
 DecisionTreeNode.h:277
 DecisionTreeNode.h:278
 DecisionTreeNode.h:279
 DecisionTreeNode.h:280
 DecisionTreeNode.h:281
 DecisionTreeNode.h:282
 DecisionTreeNode.h:283
 DecisionTreeNode.h:284
 DecisionTreeNode.h:285
 DecisionTreeNode.h:286
 DecisionTreeNode.h:287
 DecisionTreeNode.h:288
 DecisionTreeNode.h:289
 DecisionTreeNode.h:290
 DecisionTreeNode.h:291
 DecisionTreeNode.h:292
 DecisionTreeNode.h:293
 DecisionTreeNode.h:294
 DecisionTreeNode.h:295
 DecisionTreeNode.h:296
 DecisionTreeNode.h:297
 DecisionTreeNode.h:298
 DecisionTreeNode.h:299
 DecisionTreeNode.h:300
 DecisionTreeNode.h:301
 DecisionTreeNode.h:302
 DecisionTreeNode.h:303
 DecisionTreeNode.h:304
 DecisionTreeNode.h:305
 DecisionTreeNode.h:306
 DecisionTreeNode.h:307