// @(#)root/tmva $Id$
// Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss, Eckhard von Toerne

/**********************************************************************************
 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis       *
 * Package: TMVA                                                                  *
 * Class  : TMVA::DecisionTreeNode                                                *
 * Web    : http://tmva.sourceforge.net                                           *
 *                                                                                *
 * Description:                                                                   *
 *      Implementation of a Decision Tree Node                                    *
 *                                                                                *
 * Authors (alphabetical):                                                        *
 *      Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland              *
 *      Helge Voss      <Helge.Voss@cern.ch>     - MPI-K Heidelberg, Germany      *
 *      Kai Voss        <Kai.Voss@cern.ch>       - U. of Victoria, Canada         *
 *      Eckhard von Toerne <evt@physik.uni-bonn.de>  - U. of Bonn, Germany        *
 *                                                                                *
 * CopyRight (c) 2009:                                                            *
 *      CERN, Switzerland                                                         *
 *      U. of Victoria, Canada                                                    *
 *      MPI-K Heidelberg, Germany                                                 *
 *      U. of Bonn, Germany                                                       *
 *                                                                                *
 * Redistribution and use in source and binary forms, with or without             *
 * modification, are permitted according to the terms listed in LICENSE           *
 * (http://tmva.sourceforge.net/LICENSE)                                          *
 **********************************************************************************/

//_______________________________________________________________________
//
// Node for the Decision Tree
//
// The node specifies ONE variable out of the given set of selection variable
// that is used to split the sample which "arrives" at the node, into a left
// (background-enhanced) and a right (signal-enhanced) sample.
//_______________________________________________________________________

#include <algorithm>
#include <exception>
#include <iomanip>
#include <limits>

#include "TMVA/MsgLogger.h"
#include "TMVA/DecisionTreeNode.h"
#include "TMVA/Tools.h"
#include "TMVA/Event.h"

using std::string;

ClassImp(TMVA::DecisionTreeNode)

bool     TMVA::DecisionTreeNode::fgIsTraining = false;
UInt_t   TMVA::DecisionTreeNode::fgTmva_Version_Code = 0;
//_______________________________________________________________________
TMVA::DecisionTreeNode::DecisionTreeNode()
   : TMVA::Node(),
     fCutValue(0),
     fCutType ( kTRUE ),
     fSelector ( -1 ),
     fResponse(-99 ),
     fRMS(0),
     fNodeType (-99 ),
     fPurity (-99),
     fIsTerminalNode( kFALSE )
{
   // constructor of an essentially "empty" node floating in space
   if (DecisionTreeNode::fgIsTraining){
      fTrainInfo = new DTNodeTrainingInfo();
      //std::cout << "Node constructor with TrainingINFO"<<std::endl;
   }
   else {
      //std::cout << "**Node constructor WITHOUT TrainingINFO"<<std::endl;
      fTrainInfo = 0;
   }
}

//_______________________________________________________________________
TMVA::DecisionTreeNode::DecisionTreeNode(TMVA::Node* p, char pos)
   : TMVA::Node(p, pos),
     fCutValue( 0 ),
     fCutType ( kTRUE ),
     fSelector( -1 ),
     fResponse(-99 ),
     fRMS(0),
     fNodeType( -99 ),
     fPurity (-99),
     fIsTerminalNode( kFALSE )
{
   // constructor of a daughter node as a daughter of 'p'
   if (DecisionTreeNode::fgIsTraining){
      fTrainInfo = new DTNodeTrainingInfo();
      //std::cout << "Node constructor with TrainingINFO"<<std::endl;
   }
   else {
      //std::cout << "**Node constructor WITHOUT TrainingINFO"<<std::endl;
      fTrainInfo = 0;
   }
}

//_______________________________________________________________________
TMVA::DecisionTreeNode::DecisionTreeNode(const TMVA::DecisionTreeNode &n,
                                         DecisionTreeNode* parent)
   : TMVA::Node(n),
     fCutValue( n.fCutValue ),
     fCutType ( n.fCutType ),
     fSelector( n.fSelector ),
     fResponse( n.fResponse ),
     fRMS     ( n.fRMS),
     fNodeType( n.fNodeType ),
     fPurity  ( n.fPurity),
     fIsTerminalNode( n.fIsTerminalNode )
{
   // copy constructor of a node. It will result in an explicit copy of
   // the node and recursively all it's daughters
   this->SetParent( parent );
   if (n.GetLeft() == 0 ) this->SetLeft(NULL);
   else this->SetLeft( new DecisionTreeNode( *((DecisionTreeNode*)(n.GetLeft())),this));

   if (n.GetRight() == 0 ) this->SetRight(NULL);
   else this->SetRight( new DecisionTreeNode( *((DecisionTreeNode*)(n.GetRight())),this));

   if (DecisionTreeNode::fgIsTraining){
      fTrainInfo = new DTNodeTrainingInfo(*(n.fTrainInfo));
      //std::cout << "Node constructor with TrainingINFO"<<std::endl;
   }
   else {
      //std::cout << "**Node constructor WITHOUT TrainingINFO"<<std::endl;
      fTrainInfo = 0;
   }
}

//_______________________________________________________________________
TMVA::DecisionTreeNode::~DecisionTreeNode(){
   // destructor
   delete fTrainInfo;
}


//_______________________________________________________________________
Bool_t TMVA::DecisionTreeNode::GoesRight(const TMVA::Event & e) const
{
   // test event if it decends the tree at this node to the right
   Bool_t result;
   // first check if the fisher criterium is used or ordinary cuts:
   if (GetNFisherCoeff() == 0){

      result = (e.GetValue(this->GetSelector()) >= this->GetCutValue() );

   }else{

      Double_t fisher = this->GetFisherCoeff(fFisherCoeff.size()-1); // the offset
      for (UInt_t ivar=0; ivar<fFisherCoeff.size()-1; ivar++)
         fisher += this->GetFisherCoeff(ivar)*(e.GetValue(ivar));

      result = fisher > this->GetCutValue();
   }

   if (fCutType == kTRUE) return result; //the cuts are selecting Signal ;
   else return !result;
}

//_______________________________________________________________________
Bool_t TMVA::DecisionTreeNode::GoesLeft(const TMVA::Event & e) const
{
   // test event if it decends the tree at this node to the left
   if (!this->GoesRight(e)) return kTRUE;
   else return kFALSE;
}


//_______________________________________________________________________
void TMVA::DecisionTreeNode::SetPurity( void )
{
   // return the S/(S+B) (purity) for the node
   // REM: even if nodes with purity 0.01 are very PURE background nodes, they still
   //      get a small value of the purity.

   if ( ( this->GetNSigEvents() + this->GetNBkgEvents() ) > 0 ) {
      fPurity = this->GetNSigEvents() / ( this->GetNSigEvents() + this->GetNBkgEvents());
   }
   else {
      Log() << kINFO << "Zero events in purity calcuation , return purity=0.5" << Endl;
      this->Print(Log());
      fPurity = 0.5;
   }
   return;
}

// print a node
//_______________________________________________________________________
void TMVA::DecisionTreeNode::Print(std::ostream& os) const
{
   //print the node
   os << "< ***  "  << std::endl;
   os << " d: "     << this->GetDepth()
      << std::setprecision(6)
      << "NCoef: "  << this->GetNFisherCoeff();
   for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++) { os << "fC"<<i<<": " << this->GetFisherCoeff(i);}
   os << " ivar: "  << this->GetSelector()
      << " cut: "   << this->GetCutValue()
      << " cType: " << this->GetCutType()
      << " s: "     << this->GetNSigEvents()
      << " b: "     << this->GetNBkgEvents()
      << " nEv: "   << this->GetNEvents()
      << " suw: "   << this->GetNSigEvents_unweighted()
      << " buw: "   << this->GetNBkgEvents_unweighted()
      << " nEvuw: " << this->GetNEvents_unweighted()
      << " sepI: "  << this->GetSeparationIndex()
      << " sepG: "  << this->GetSeparationGain()
      << " nType: " << this->GetNodeType()
      << std::endl;

   os << "My address is " << long(this) << ", ";
   if (this->GetParent() != NULL) os << " parent at addr: "         << long(this->GetParent()) ;
   if (this->GetLeft()   != NULL) os << " left daughter at addr: "  << long(this->GetLeft());
   if (this->GetRight()  != NULL) os << " right daughter at addr: " << long(this->GetRight()) ;

   os << " **** > " << std::endl;
}

//_______________________________________________________________________
void TMVA::DecisionTreeNode::PrintRec(std::ostream& os) const
{
   //recursively print the node and its daughters (--> print the 'tree')

   os << this->GetDepth()
      << std::setprecision(6)
      << " "         << this->GetPos()
      << "NCoef: "   << this->GetNFisherCoeff();
   for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++) {os << "fC"<<i<<": " << this->GetFisherCoeff(i);}
   os << " ivar: "   << this->GetSelector()
      << " cut: "    << this->GetCutValue()
      << " cType: "  << this->GetCutType()
      << " s: "      << this->GetNSigEvents()
      << " b: "      << this->GetNBkgEvents()
      << " nEv: "    << this->GetNEvents()
      << " suw: "    << this->GetNSigEvents_unweighted()
      << " buw: "    << this->GetNBkgEvents_unweighted()
      << " nEvuw: "  << this->GetNEvents_unweighted()
      << " sepI: "   << this->GetSeparationIndex()
      << " sepG: "   << this->GetSeparationGain()
      << " res: "    << this->GetResponse()
      << " rms: "    << this->GetRMS()
      << " nType: "  << this->GetNodeType();
   if (this->GetCC() > 10000000000000.) os << " CC: " << 100000. << std::endl;
   else os << " CC: "  << this->GetCC() << std::endl;

   if (this->GetLeft()  != NULL) this->GetLeft() ->PrintRec(os);
   if (this->GetRight() != NULL) this->GetRight()->PrintRec(os);
}

//_______________________________________________________________________
Bool_t TMVA::DecisionTreeNode::ReadDataRecord( std::istream& is, UInt_t tmva_Version_Code )
{
   // Read the data block
   fgTmva_Version_Code=tmva_Version_Code;
   string tmp;

   Float_t cutVal, cutType, nsig, nbkg, nEv, nsig_unweighted, nbkg_unweighted, nEv_unweighted;
   Float_t separationIndex, separationGain, response(-99), cc(0);
   Int_t   depth, ivar, nodeType;
   ULong_t lseq;
   char pos;

   is >> depth;                                         // 2
   if ( depth==-1 ) { return kFALSE; }
   //   if ( depth==-1 ) { delete this; return kFALSE; }
   is >> pos ;                                          // r
   this->SetDepth(depth);
   this->SetPos(pos);

   if (tmva_Version_Code < TMVA_VERSION(4,0,0)) {
      is >> tmp >> lseq
         >> tmp >> ivar
         >> tmp >> cutVal
         >> tmp >> cutType
         >> tmp >> nsig
         >> tmp >> nbkg
         >> tmp >> nEv
         >> tmp >> nsig_unweighted
         >> tmp >> nbkg_unweighted
         >> tmp >> nEv_unweighted
         >> tmp >> separationIndex
         >> tmp >> separationGain
         >> tmp >> nodeType;
   } else {
      is >> tmp >> lseq
         >> tmp >> ivar
         >> tmp >> cutVal
         >> tmp >> cutType
         >> tmp >> nsig
         >> tmp >> nbkg
         >> tmp >> nEv
         >> tmp >> nsig_unweighted
         >> tmp >> nbkg_unweighted
         >> tmp >> nEv_unweighted
         >> tmp >> separationIndex
         >> tmp >> separationGain
         >> tmp >> response
         >> tmp >> nodeType
         >> tmp >> cc;
   }

   this->SetSelector((UInt_t)ivar);
   this->SetCutValue(cutVal);
   this->SetCutType(cutType);
   this->SetNodeType(nodeType);
   if (fTrainInfo){
      this->SetNSigEvents(nsig);
      this->SetNBkgEvents(nbkg);
      this->SetNEvents(nEv);
      this->SetNSigEvents_unweighted(nsig_unweighted);
      this->SetNBkgEvents_unweighted(nbkg_unweighted);
      this->SetNEvents_unweighted(nEv_unweighted);
      this->SetSeparationIndex(separationIndex);
      this->SetSeparationGain(separationGain);
      this->SetPurity();
      //      this->SetResponse(response); old .txt weightfiles don't know regression yet
      this->SetCC(cc);
   }

   return kTRUE;
}

//_______________________________________________________________________
void TMVA::DecisionTreeNode::ClearNodeAndAllDaughters()
{
   // clear the nodes (their S/N, Nevents etc), just keep the structure of the tree
   SetNSigEvents(0);
   SetNBkgEvents(0);
   SetNEvents(0);
   SetNSigEvents_unweighted(0);
   SetNBkgEvents_unweighted(0);
   SetNEvents_unweighted(0);
   SetSeparationIndex(-1);
   SetSeparationGain(-1);
   SetPurity();

   if (this->GetLeft()  != NULL) ((DecisionTreeNode*)(this->GetLeft()))->ClearNodeAndAllDaughters();
   if (this->GetRight() != NULL) ((DecisionTreeNode*)(this->GetRight()))->ClearNodeAndAllDaughters();
}

//_______________________________________________________________________
void TMVA::DecisionTreeNode::ResetValidationData( ) {
   // temporary stored node values (number of events, etc.) that originate
   // not from the training but from the validation data (used in pruning)
   SetNBValidation( 0.0 );
   SetNSValidation( 0.0 );
   SetSumTarget( 0 );
   SetSumTarget2( 0 );

   if(GetLeft() != NULL && GetRight() != NULL) {
      GetLeft()->ResetValidationData();
      GetRight()->ResetValidationData();
   }
}

//_______________________________________________________________________
void TMVA::DecisionTreeNode::PrintPrune( std::ostream& os ) const {
   // printout of the node (can be read in with ReadDataRecord)

   os << "----------------------" << std::endl
      << "|~T_t| " << GetNTerminal() << std::endl
      << "R(t): " << GetNodeR() << std::endl
      << "R(T_t): " << GetSubTreeR() << std::endl
      << "g(t): " << GetAlpha() << std::endl
      << "G(t): "  << GetAlphaMinSubtree() << std::endl;
}

//_______________________________________________________________________
void TMVA::DecisionTreeNode::PrintRecPrune( std::ostream& os ) const {
   // recursive printout of the node and its daughters

   this->PrintPrune(os);
   if(this->GetLeft() != NULL && this->GetRight() != NULL) {
      ((DecisionTreeNode*)this->GetLeft())->PrintRecPrune(os);
      ((DecisionTreeNode*)this->GetRight())->PrintRecPrune(os);
   }
}

//_______________________________________________________________________
void TMVA::DecisionTreeNode::SetCC(Double_t cc)
{
   if (fTrainInfo) fTrainInfo->fCC = cc;
   else Log() << kFATAL << "call to SetCC without trainingInfo" << Endl;
}

//_______________________________________________________________________
Float_t TMVA::DecisionTreeNode::GetSampleMin(UInt_t ivar) const {
   // return the minimum of variable ivar from the training sample
   // that pass/end up in this node
   if (fTrainInfo && ivar < fTrainInfo->fSampleMin.size()) return fTrainInfo->fSampleMin[ivar];
   else Log() << kFATAL << "You asked for Min of the event sample in node for variable "
              << ivar << " that is out of range" << Endl;
   return -9999;
}

//_______________________________________________________________________
Float_t TMVA::DecisionTreeNode::GetSampleMax(UInt_t ivar) const {
   // return the maximum of variable ivar from the training sample
   // that pass/end up in this node
   if (fTrainInfo && ivar < fTrainInfo->fSampleMin.size()) return fTrainInfo->fSampleMax[ivar];
   else Log() << kFATAL << "You asked for Max of the event sample in node for variable "
              << ivar << " that is out of range" << Endl;
   return 9999;
}

//_______________________________________________________________________
void TMVA::DecisionTreeNode::SetSampleMin(UInt_t ivar, Float_t xmin){
   // set the minimum of variable ivar from the training sample
   // that pass/end up in this node
   if ( fTrainInfo) {
      if ( ivar >= fTrainInfo->fSampleMin.size()) fTrainInfo->fSampleMin.resize(ivar+1);
      fTrainInfo->fSampleMin[ivar]=xmin;
   }
}

//_______________________________________________________________________
void TMVA::DecisionTreeNode::SetSampleMax(UInt_t ivar, Float_t xmax){
   // set the maximum of variable ivar from the training sample
   // that pass/end up in this node
   if( ! fTrainInfo ) return;
   if ( ivar >= fTrainInfo->fSampleMax.size() )
      fTrainInfo->fSampleMax.resize(ivar+1);
   fTrainInfo->fSampleMax[ivar]=xmax;
}

//_______________________________________________________________________
void TMVA::DecisionTreeNode::ReadAttributes(void* node, UInt_t /* tmva_Version_Code */  )
{
   Float_t tempNSigEvents,tempNBkgEvents;

   Int_t nCoef;
   if (gTools().HasAttr(node, "NCoef")){
      gTools().ReadAttr(node, "NCoef",  nCoef                  );
      this->SetNFisherCoeff(nCoef);
      Double_t tmp;
      for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++) {
         gTools().ReadAttr(node, Form("fC%d",i),  tmp          );
         this->SetFisherCoeff(i,tmp);
      }
   }else{
      this->SetNFisherCoeff(0);
   }
   gTools().ReadAttr(node, "IVar",  fSelector               );
   gTools().ReadAttr(node, "Cut",   fCutValue               );
   gTools().ReadAttr(node, "cType", fCutType                );
   if (gTools().HasAttr(node,"res")) gTools().ReadAttr(node, "res",   fResponse);
   if (gTools().HasAttr(node,"rms")) gTools().ReadAttr(node, "rms",   fRMS);
   //   else {
   if( gTools().HasAttr(node, "purity") ) {
      gTools().ReadAttr(node, "purity",fPurity );
   } else {
      gTools().ReadAttr(node, "nS",    tempNSigEvents             );
      gTools().ReadAttr(node, "nB",    tempNBkgEvents             );
      fPurity = tempNSigEvents / (tempNSigEvents + tempNBkgEvents);
   }
   //   }
   gTools().ReadAttr(node, "nType", fNodeType               );
}


//_______________________________________________________________________
void TMVA::DecisionTreeNode::AddAttributesToNode(void* node) const
{
   // add attribute to xml
   gTools().AddAttr(node, "NCoef", GetNFisherCoeff());
   for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++)
      gTools().AddAttr(node, Form("fC%d",i),  this->GetFisherCoeff(i));

   gTools().AddAttr(node, "IVar",  GetSelector());
   gTools().AddAttr(node, "Cut",   GetCutValue());
   gTools().AddAttr(node, "cType", GetCutType());

   //UInt_t analysisType = (dynamic_cast<const TMVA::DecisionTree*>(GetParentTree()) )->GetAnalysisType();
   //   if ( analysisType == TMVA::Types:: kRegression) {
   gTools().AddAttr(node, "res",   GetResponse());
   gTools().AddAttr(node, "rms",   GetRMS());
   //} else if ( analysisType == TMVA::Types::kClassification) {
   gTools().AddAttr(node, "purity",GetPurity());
   //}
   gTools().AddAttr(node, "nType", GetNodeType());
}

//_______________________________________________________________________
void  TMVA::DecisionTreeNode::SetFisherCoeff(Int_t ivar, Double_t coeff)
{
   // set fisher coefficients
   if ((Int_t) fFisherCoeff.size()<ivar+1) fFisherCoeff.resize(ivar+1) ;
   fFisherCoeff[ivar]=coeff;
}

//_______________________________________________________________________
void TMVA::DecisionTreeNode::AddContentToNode( std::stringstream& /*s*/ ) const
{
   // adding attributes to tree node  (well, was used in BinarySearchTree,
   // and somehow I guess someone programmed it such that we need this in
   // this tree too, although we don't..)
}

//_______________________________________________________________________
void TMVA::DecisionTreeNode::ReadContent( std::stringstream& /*s*/ )
{
   // reading attributes from tree node  (well, was used in BinarySearchTree,
   // and somehow I guess someone programmed it such that we need this in
   // this tree too, although we don't..)
}
//_______________________________________________________________________
TMVA::MsgLogger& TMVA::DecisionTreeNode::Log() {
  TTHREAD_TLS_DECL_ARG(MsgLogger,logger,"DecisionTreeNode");    // static because there is a huge number of nodes...
  return logger;
}
 DecisionTreeNode.cxx:1
 DecisionTreeNode.cxx:2
 DecisionTreeNode.cxx:3
 DecisionTreeNode.cxx:4
 DecisionTreeNode.cxx:5
 DecisionTreeNode.cxx:6
 DecisionTreeNode.cxx:7
 DecisionTreeNode.cxx:8
 DecisionTreeNode.cxx:9
 DecisionTreeNode.cxx:10
 DecisionTreeNode.cxx:11
 DecisionTreeNode.cxx:12
 DecisionTreeNode.cxx:13
 DecisionTreeNode.cxx:14
 DecisionTreeNode.cxx:15
 DecisionTreeNode.cxx:16
 DecisionTreeNode.cxx:17
 DecisionTreeNode.cxx:18
 DecisionTreeNode.cxx:19
 DecisionTreeNode.cxx:20
 DecisionTreeNode.cxx:21
 DecisionTreeNode.cxx:22
 DecisionTreeNode.cxx:23
 DecisionTreeNode.cxx:24
 DecisionTreeNode.cxx:25
 DecisionTreeNode.cxx:26
 DecisionTreeNode.cxx:27
 DecisionTreeNode.cxx:28
 DecisionTreeNode.cxx:29
 DecisionTreeNode.cxx:30
 DecisionTreeNode.cxx:31
 DecisionTreeNode.cxx:32
 DecisionTreeNode.cxx:33
 DecisionTreeNode.cxx:34
 DecisionTreeNode.cxx:35
 DecisionTreeNode.cxx:36
 DecisionTreeNode.cxx:37
 DecisionTreeNode.cxx:38
 DecisionTreeNode.cxx:39
 DecisionTreeNode.cxx:40
 DecisionTreeNode.cxx:41
 DecisionTreeNode.cxx:42
 DecisionTreeNode.cxx:43
 DecisionTreeNode.cxx:44
 DecisionTreeNode.cxx:45
 DecisionTreeNode.cxx:46
 DecisionTreeNode.cxx:47
 DecisionTreeNode.cxx:48
 DecisionTreeNode.cxx:49
 DecisionTreeNode.cxx:50
 DecisionTreeNode.cxx:51
 DecisionTreeNode.cxx:52
 DecisionTreeNode.cxx:53
 DecisionTreeNode.cxx:54
 DecisionTreeNode.cxx:55
 DecisionTreeNode.cxx:56
 DecisionTreeNode.cxx:57
 DecisionTreeNode.cxx:58
 DecisionTreeNode.cxx:59
 DecisionTreeNode.cxx:60
 DecisionTreeNode.cxx:61
 DecisionTreeNode.cxx:62
 DecisionTreeNode.cxx:63
 DecisionTreeNode.cxx:64
 DecisionTreeNode.cxx:65
 DecisionTreeNode.cxx:66
 DecisionTreeNode.cxx:67
 DecisionTreeNode.cxx:68
 DecisionTreeNode.cxx:69
 DecisionTreeNode.cxx:70
 DecisionTreeNode.cxx:71
 DecisionTreeNode.cxx:72
 DecisionTreeNode.cxx:73
 DecisionTreeNode.cxx:74
 DecisionTreeNode.cxx:75
 DecisionTreeNode.cxx:76
 DecisionTreeNode.cxx:77
 DecisionTreeNode.cxx:78
 DecisionTreeNode.cxx:79
 DecisionTreeNode.cxx:80
 DecisionTreeNode.cxx:81
 DecisionTreeNode.cxx:82
 DecisionTreeNode.cxx:83
 DecisionTreeNode.cxx:84
 DecisionTreeNode.cxx:85
 DecisionTreeNode.cxx:86
 DecisionTreeNode.cxx:87
 DecisionTreeNode.cxx:88
 DecisionTreeNode.cxx:89
 DecisionTreeNode.cxx:90
 DecisionTreeNode.cxx:91
 DecisionTreeNode.cxx:92
 DecisionTreeNode.cxx:93
 DecisionTreeNode.cxx:94
 DecisionTreeNode.cxx:95
 DecisionTreeNode.cxx:96
 DecisionTreeNode.cxx:97
 DecisionTreeNode.cxx:98
 DecisionTreeNode.cxx:99
 DecisionTreeNode.cxx:100
 DecisionTreeNode.cxx:101
 DecisionTreeNode.cxx:102
 DecisionTreeNode.cxx:103
 DecisionTreeNode.cxx:104
 DecisionTreeNode.cxx:105
 DecisionTreeNode.cxx:106
 DecisionTreeNode.cxx:107
 DecisionTreeNode.cxx:108
 DecisionTreeNode.cxx:109
 DecisionTreeNode.cxx:110
 DecisionTreeNode.cxx:111
 DecisionTreeNode.cxx:112
 DecisionTreeNode.cxx:113
 DecisionTreeNode.cxx:114
 DecisionTreeNode.cxx:115
 DecisionTreeNode.cxx:116
 DecisionTreeNode.cxx:117
 DecisionTreeNode.cxx:118
 DecisionTreeNode.cxx:119
 DecisionTreeNode.cxx:120
 DecisionTreeNode.cxx:121
 DecisionTreeNode.cxx:122
 DecisionTreeNode.cxx:123
 DecisionTreeNode.cxx:124
 DecisionTreeNode.cxx:125
 DecisionTreeNode.cxx:126
 DecisionTreeNode.cxx:127
 DecisionTreeNode.cxx:128
 DecisionTreeNode.cxx:129
 DecisionTreeNode.cxx:130
 DecisionTreeNode.cxx:131
 DecisionTreeNode.cxx:132
 DecisionTreeNode.cxx:133
 DecisionTreeNode.cxx:134
 DecisionTreeNode.cxx:135
 DecisionTreeNode.cxx:136
 DecisionTreeNode.cxx:137
 DecisionTreeNode.cxx:138
 DecisionTreeNode.cxx:139
 DecisionTreeNode.cxx:140
 DecisionTreeNode.cxx:141
 DecisionTreeNode.cxx:142
 DecisionTreeNode.cxx:143
 DecisionTreeNode.cxx:144
 DecisionTreeNode.cxx:145
 DecisionTreeNode.cxx:146
 DecisionTreeNode.cxx:147
 DecisionTreeNode.cxx:148
 DecisionTreeNode.cxx:149
 DecisionTreeNode.cxx:150
 DecisionTreeNode.cxx:151
 DecisionTreeNode.cxx:152
 DecisionTreeNode.cxx:153
 DecisionTreeNode.cxx:154
 DecisionTreeNode.cxx:155
 DecisionTreeNode.cxx:156
 DecisionTreeNode.cxx:157
 DecisionTreeNode.cxx:158
 DecisionTreeNode.cxx:159
 DecisionTreeNode.cxx:160
 DecisionTreeNode.cxx:161
 DecisionTreeNode.cxx:162
 DecisionTreeNode.cxx:163
 DecisionTreeNode.cxx:164
 DecisionTreeNode.cxx:165
 DecisionTreeNode.cxx:166
 DecisionTreeNode.cxx:167
 DecisionTreeNode.cxx:168
 DecisionTreeNode.cxx:169
 DecisionTreeNode.cxx:170
 DecisionTreeNode.cxx:171
 DecisionTreeNode.cxx:172
 DecisionTreeNode.cxx:173
 DecisionTreeNode.cxx:174
 DecisionTreeNode.cxx:175
 DecisionTreeNode.cxx:176
 DecisionTreeNode.cxx:177
 DecisionTreeNode.cxx:178
 DecisionTreeNode.cxx:179
 DecisionTreeNode.cxx:180
 DecisionTreeNode.cxx:181
 DecisionTreeNode.cxx:182
 DecisionTreeNode.cxx:183
 DecisionTreeNode.cxx:184
 DecisionTreeNode.cxx:185
 DecisionTreeNode.cxx:186
 DecisionTreeNode.cxx:187
 DecisionTreeNode.cxx:188
 DecisionTreeNode.cxx:189
 DecisionTreeNode.cxx:190
 DecisionTreeNode.cxx:191
 DecisionTreeNode.cxx:192
 DecisionTreeNode.cxx:193
 DecisionTreeNode.cxx:194
 DecisionTreeNode.cxx:195
 DecisionTreeNode.cxx:196
 DecisionTreeNode.cxx:197
 DecisionTreeNode.cxx:198
 DecisionTreeNode.cxx:199
 DecisionTreeNode.cxx:200
 DecisionTreeNode.cxx:201
 DecisionTreeNode.cxx:202
 DecisionTreeNode.cxx:203
 DecisionTreeNode.cxx:204
 DecisionTreeNode.cxx:205
 DecisionTreeNode.cxx:206
 DecisionTreeNode.cxx:207
 DecisionTreeNode.cxx:208
 DecisionTreeNode.cxx:209
 DecisionTreeNode.cxx:210
 DecisionTreeNode.cxx:211
 DecisionTreeNode.cxx:212
 DecisionTreeNode.cxx:213
 DecisionTreeNode.cxx:214
 DecisionTreeNode.cxx:215
 DecisionTreeNode.cxx:216
 DecisionTreeNode.cxx:217
 DecisionTreeNode.cxx:218
 DecisionTreeNode.cxx:219
 DecisionTreeNode.cxx:220
 DecisionTreeNode.cxx:221
 DecisionTreeNode.cxx:222
 DecisionTreeNode.cxx:223
 DecisionTreeNode.cxx:224
 DecisionTreeNode.cxx:225
 DecisionTreeNode.cxx:226
 DecisionTreeNode.cxx:227
 DecisionTreeNode.cxx:228
 DecisionTreeNode.cxx:229
 DecisionTreeNode.cxx:230
 DecisionTreeNode.cxx:231
 DecisionTreeNode.cxx:232
 DecisionTreeNode.cxx:233
 DecisionTreeNode.cxx:234
 DecisionTreeNode.cxx:235
 DecisionTreeNode.cxx:236
 DecisionTreeNode.cxx:237
 DecisionTreeNode.cxx:238
 DecisionTreeNode.cxx:239
 DecisionTreeNode.cxx:240
 DecisionTreeNode.cxx:241
 DecisionTreeNode.cxx:242
 DecisionTreeNode.cxx:243
 DecisionTreeNode.cxx:244
 DecisionTreeNode.cxx:245
 DecisionTreeNode.cxx:246
 DecisionTreeNode.cxx:247
 DecisionTreeNode.cxx:248
 DecisionTreeNode.cxx:249
 DecisionTreeNode.cxx:250
 DecisionTreeNode.cxx:251
 DecisionTreeNode.cxx:252
 DecisionTreeNode.cxx:253
 DecisionTreeNode.cxx:254
 DecisionTreeNode.cxx:255
 DecisionTreeNode.cxx:256
 DecisionTreeNode.cxx:257
 DecisionTreeNode.cxx:258
 DecisionTreeNode.cxx:259
 DecisionTreeNode.cxx:260
 DecisionTreeNode.cxx:261
 DecisionTreeNode.cxx:262
 DecisionTreeNode.cxx:263
 DecisionTreeNode.cxx:264
 DecisionTreeNode.cxx:265
 DecisionTreeNode.cxx:266
 DecisionTreeNode.cxx:267
 DecisionTreeNode.cxx:268
 DecisionTreeNode.cxx:269
 DecisionTreeNode.cxx:270
 DecisionTreeNode.cxx:271
 DecisionTreeNode.cxx:272
 DecisionTreeNode.cxx:273
 DecisionTreeNode.cxx:274
 DecisionTreeNode.cxx:275
 DecisionTreeNode.cxx:276
 DecisionTreeNode.cxx:277
 DecisionTreeNode.cxx:278
 DecisionTreeNode.cxx:279
 DecisionTreeNode.cxx:280
 DecisionTreeNode.cxx:281
 DecisionTreeNode.cxx:282
 DecisionTreeNode.cxx:283
 DecisionTreeNode.cxx:284
 DecisionTreeNode.cxx:285
 DecisionTreeNode.cxx:286
 DecisionTreeNode.cxx:287
 DecisionTreeNode.cxx:288
 DecisionTreeNode.cxx:289
 DecisionTreeNode.cxx:290
 DecisionTreeNode.cxx:291
 DecisionTreeNode.cxx:292
 DecisionTreeNode.cxx:293
 DecisionTreeNode.cxx:294
 DecisionTreeNode.cxx:295
 DecisionTreeNode.cxx:296
 DecisionTreeNode.cxx:297
 DecisionTreeNode.cxx:298
 DecisionTreeNode.cxx:299
 DecisionTreeNode.cxx:300
 DecisionTreeNode.cxx:301
 DecisionTreeNode.cxx:302
 DecisionTreeNode.cxx:303
 DecisionTreeNode.cxx:304
 DecisionTreeNode.cxx:305
 DecisionTreeNode.cxx:306
 DecisionTreeNode.cxx:307
 DecisionTreeNode.cxx:308
 DecisionTreeNode.cxx:309
 DecisionTreeNode.cxx:310
 DecisionTreeNode.cxx:311
 DecisionTreeNode.cxx:312
 DecisionTreeNode.cxx:313
 DecisionTreeNode.cxx:314
 DecisionTreeNode.cxx:315
 DecisionTreeNode.cxx:316
 DecisionTreeNode.cxx:317
 DecisionTreeNode.cxx:318
 DecisionTreeNode.cxx:319
 DecisionTreeNode.cxx:320
 DecisionTreeNode.cxx:321
 DecisionTreeNode.cxx:322
 DecisionTreeNode.cxx:323
 DecisionTreeNode.cxx:324
 DecisionTreeNode.cxx:325
 DecisionTreeNode.cxx:326
 DecisionTreeNode.cxx:327
 DecisionTreeNode.cxx:328
 DecisionTreeNode.cxx:329
 DecisionTreeNode.cxx:330
 DecisionTreeNode.cxx:331
 DecisionTreeNode.cxx:332
 DecisionTreeNode.cxx:333
 DecisionTreeNode.cxx:334
 DecisionTreeNode.cxx:335
 DecisionTreeNode.cxx:336
 DecisionTreeNode.cxx:337
 DecisionTreeNode.cxx:338
 DecisionTreeNode.cxx:339
 DecisionTreeNode.cxx:340
 DecisionTreeNode.cxx:341
 DecisionTreeNode.cxx:342
 DecisionTreeNode.cxx:343
 DecisionTreeNode.cxx:344
 DecisionTreeNode.cxx:345
 DecisionTreeNode.cxx:346
 DecisionTreeNode.cxx:347
 DecisionTreeNode.cxx:348
 DecisionTreeNode.cxx:349
 DecisionTreeNode.cxx:350
 DecisionTreeNode.cxx:351
 DecisionTreeNode.cxx:352
 DecisionTreeNode.cxx:353
 DecisionTreeNode.cxx:354
 DecisionTreeNode.cxx:355
 DecisionTreeNode.cxx:356
 DecisionTreeNode.cxx:357
 DecisionTreeNode.cxx:358
 DecisionTreeNode.cxx:359
 DecisionTreeNode.cxx:360
 DecisionTreeNode.cxx:361
 DecisionTreeNode.cxx:362
 DecisionTreeNode.cxx:363
 DecisionTreeNode.cxx:364
 DecisionTreeNode.cxx:365
 DecisionTreeNode.cxx:366
 DecisionTreeNode.cxx:367
 DecisionTreeNode.cxx:368
 DecisionTreeNode.cxx:369
 DecisionTreeNode.cxx:370
 DecisionTreeNode.cxx:371
 DecisionTreeNode.cxx:372
 DecisionTreeNode.cxx:373
 DecisionTreeNode.cxx:374
 DecisionTreeNode.cxx:375
 DecisionTreeNode.cxx:376
 DecisionTreeNode.cxx:377
 DecisionTreeNode.cxx:378
 DecisionTreeNode.cxx:379
 DecisionTreeNode.cxx:380
 DecisionTreeNode.cxx:381
 DecisionTreeNode.cxx:382
 DecisionTreeNode.cxx:383
 DecisionTreeNode.cxx:384
 DecisionTreeNode.cxx:385
 DecisionTreeNode.cxx:386
 DecisionTreeNode.cxx:387
 DecisionTreeNode.cxx:388
 DecisionTreeNode.cxx:389
 DecisionTreeNode.cxx:390
 DecisionTreeNode.cxx:391
 DecisionTreeNode.cxx:392
 DecisionTreeNode.cxx:393
 DecisionTreeNode.cxx:394
 DecisionTreeNode.cxx:395
 DecisionTreeNode.cxx:396
 DecisionTreeNode.cxx:397
 DecisionTreeNode.cxx:398
 DecisionTreeNode.cxx:399
 DecisionTreeNode.cxx:400
 DecisionTreeNode.cxx:401
 DecisionTreeNode.cxx:402
 DecisionTreeNode.cxx:403
 DecisionTreeNode.cxx:404
 DecisionTreeNode.cxx:405
 DecisionTreeNode.cxx:406
 DecisionTreeNode.cxx:407
 DecisionTreeNode.cxx:408
 DecisionTreeNode.cxx:409
 DecisionTreeNode.cxx:410
 DecisionTreeNode.cxx:411
 DecisionTreeNode.cxx:412
 DecisionTreeNode.cxx:413
 DecisionTreeNode.cxx:414
 DecisionTreeNode.cxx:415
 DecisionTreeNode.cxx:416
 DecisionTreeNode.cxx:417
 DecisionTreeNode.cxx:418
 DecisionTreeNode.cxx:419
 DecisionTreeNode.cxx:420
 DecisionTreeNode.cxx:421
 DecisionTreeNode.cxx:422
 DecisionTreeNode.cxx:423
 DecisionTreeNode.cxx:424
 DecisionTreeNode.cxx:425
 DecisionTreeNode.cxx:426
 DecisionTreeNode.cxx:427
 DecisionTreeNode.cxx:428
 DecisionTreeNode.cxx:429
 DecisionTreeNode.cxx:430
 DecisionTreeNode.cxx:431
 DecisionTreeNode.cxx:432
 DecisionTreeNode.cxx:433
 DecisionTreeNode.cxx:434
 DecisionTreeNode.cxx:435
 DecisionTreeNode.cxx:436
 DecisionTreeNode.cxx:437
 DecisionTreeNode.cxx:438
 DecisionTreeNode.cxx:439
 DecisionTreeNode.cxx:440
 DecisionTreeNode.cxx:441
 DecisionTreeNode.cxx:442
 DecisionTreeNode.cxx:443
 DecisionTreeNode.cxx:444
 DecisionTreeNode.cxx:445
 DecisionTreeNode.cxx:446
 DecisionTreeNode.cxx:447
 DecisionTreeNode.cxx:448
 DecisionTreeNode.cxx:449
 DecisionTreeNode.cxx:450
 DecisionTreeNode.cxx:451
 DecisionTreeNode.cxx:452
 DecisionTreeNode.cxx:453
 DecisionTreeNode.cxx:454
 DecisionTreeNode.cxx:455
 DecisionTreeNode.cxx:456
 DecisionTreeNode.cxx:457
 DecisionTreeNode.cxx:458
 DecisionTreeNode.cxx:459
 DecisionTreeNode.cxx:460
 DecisionTreeNode.cxx:461
 DecisionTreeNode.cxx:462
 DecisionTreeNode.cxx:463
 DecisionTreeNode.cxx:464
 DecisionTreeNode.cxx:465
 DecisionTreeNode.cxx:466
 DecisionTreeNode.cxx:467
 DecisionTreeNode.cxx:468
 DecisionTreeNode.cxx:469
 DecisionTreeNode.cxx:470
 DecisionTreeNode.cxx:471
 DecisionTreeNode.cxx:472
 DecisionTreeNode.cxx:473
 DecisionTreeNode.cxx:474
 DecisionTreeNode.cxx:475
 DecisionTreeNode.cxx:476
 DecisionTreeNode.cxx:477
 DecisionTreeNode.cxx:478
 DecisionTreeNode.cxx:479
 DecisionTreeNode.cxx:480
 DecisionTreeNode.cxx:481
 DecisionTreeNode.cxx:482
 DecisionTreeNode.cxx:483
 DecisionTreeNode.cxx:484
 DecisionTreeNode.cxx:485
 DecisionTreeNode.cxx:486
 DecisionTreeNode.cxx:487
 DecisionTreeNode.cxx:488
 DecisionTreeNode.cxx:489
 DecisionTreeNode.cxx:490
 DecisionTreeNode.cxx:491
 DecisionTreeNode.cxx:492
 DecisionTreeNode.cxx:493
 DecisionTreeNode.cxx:494
 DecisionTreeNode.cxx:495
 DecisionTreeNode.cxx:496
 DecisionTreeNode.cxx:497
 DecisionTreeNode.cxx:498
 DecisionTreeNode.cxx:499
 DecisionTreeNode.cxx:500
 DecisionTreeNode.cxx:501
 DecisionTreeNode.cxx:502
 DecisionTreeNode.cxx:503
 DecisionTreeNode.cxx:504
 DecisionTreeNode.cxx:505
 DecisionTreeNode.cxx:506
 DecisionTreeNode.cxx:507
 DecisionTreeNode.cxx:508
 DecisionTreeNode.cxx:509
 DecisionTreeNode.cxx:510
 DecisionTreeNode.cxx:511
 DecisionTreeNode.cxx:512
 DecisionTreeNode.cxx:513