ROOT logo
// @(#)root/tmva $Id: DecisionTree.cxx 31458 2009-11-30 13:58:20Z stelzer $
// Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss

/**********************************************************************************
 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis       *
 * Package: TMVA                                                                  *
 * Class  : TMVA::DecisionTree                                                    *
 * Web    : http://tmva.sourceforge.net                                           *
 *                                                                                *
 * Description:                                                                   *
 *      Implementation of a Decision Tree                                         *
 *                                                                                *
 * Authors (alphabetical):                                                        *
 *      Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland              *
 *      Helge Voss      <Helge.Voss@cern.ch>     - MPI-K Heidelberg, Germany      *
 *      Kai Voss        <Kai.Voss@cern.ch>       - U. of Victoria, Canada         *
 *                                                                                *
 * Copyright (c) 2005:                                                            *
 *      CERN, Switzerland                                                         *
 *      U. of Victoria, Canada                                                    *
 *      MPI-K Heidelberg, Germany                                                 *
 *                                                                                *
 * Redistribution and use in source and binary forms, with or without             *
 * modification, are permitted according to the terms listed in LICENSE           *
 * (http://mva.sourceforge.net/license.txt)                                       *
 *                                                                                *
 **********************************************************************************/

//_______________________________________________________________________
//
// Implementation of a Decision Tree
//
// In a decision tree successive decision nodes are used to categorize the
// events out of the sample as either signal or background. Each node
// uses only a single discriminating variable to decide if the event is
// signal-like ("goes right") or background-like ("goes left"). This
// forms a tree like structure with "baskets" at the end (leave nodes),
// and an event is classified as either signal or background according to
// whether the basket where it ends up has been classified signal or
// background during the training. Training of a decision tree is the
// process to define the "cut criteria" for each node. The training
// starts with the root node. Here one takes the full training event
// sample and selects the variable and corresponding cut value that gives
// the best separation between signal and background at this stage. Using
// this cut criterion, the sample is then divided into two subsamples, a
// signal-like (right) and a background-like (left) sample. Two new nodes
// are then created for each of the two sub-samples and they are
// constructed using the same mechanism as described for the root
// node. The devision is stopped once a certain node has reached either a
// minimum number of events, or a minimum or maximum signal purity. These
// leave nodes are then called "signal" or "background" if they contain
// more signal respective background events from the training sample.
//_______________________________________________________________________

#include <iostream>
#include <algorithm>
#include <vector>
#include <limits>
#include <fstream>
#include <algorithm>

#include "TRandom3.h"
#include "TMath.h"

#include "TMVA/MsgLogger.h"
#include "TMVA/DecisionTree.h"
#include "TMVA/DecisionTreeNode.h"
#include "TMVA/BinarySearchTree.h"

#include "TMVA/Tools.h"

#include "TMVA/GiniIndex.h"
#include "TMVA/CrossEntropy.h"
#include "TMVA/MisClassificationError.h"
#include "TMVA/SdivSqrtSplusB.h"
#include "TMVA/Event.h"
#include "TMVA/BDTEventWrapper.h"
#include "TMVA/IPruneTool.h"
#include "TMVA/CostComplexityPruneTool.h"
#include "TMVA/ExpectedErrorPruneTool.h"

const Int_t TMVA::DecisionTree::fgRandomSeed = 0; // set nonzero for debugging and zero for random seeds

using std::vector;

ClassImp(TMVA::DecisionTree)

//_______________________________________________________________________
TMVA::DecisionTree::DecisionTree(): 
   BinaryTree(),
   fNvars      (0),
   fNCuts      (-1),
   fSepType    (NULL),
   fMinSize    (0),
   fPruneMethod(kCostComplexityPruning),
   fNodePurityLimit(0.5),
   fRandomisedTree (kFALSE),
   fUseNvars   (0),
   fMyTrandom  (NULL),
   fNNodesMax(999999),
   fMaxDepth(999999),
   fTreeID(0)
{
   // default constructor using the GiniIndex as separation criterion, 
   // no restrictions on minium number of events in a leave note or the
   // separation gain in the node splitting
   
   fLogger->SetSource( "DecisionTree" );
}

//_______________________________________________________________________
TMVA::DecisionTree::DecisionTree( TMVA::SeparationBase *sepType,Int_t minSize, Int_t nCuts, 
                                  Bool_t randomisedTree, Int_t useNvars, UInt_t nNodesMax,
                                  UInt_t nMaxDepth, Int_t iSeed, Float_t purityLimit, Int_t treeID ):
   BinaryTree(),
   fNvars          (0),
   fNCuts          (nCuts),
   fSepType        (sepType),
   fMinSize        (minSize),
   fPruneMethod    (kCostComplexityPruning),
   fNodePurityLimit(purityLimit),
   fRandomisedTree (randomisedTree),
   fUseNvars       (useNvars),
   fMyTrandom      (new TRandom3(iSeed)),
   fNNodesMax      (nNodesMax),
   fMaxDepth       (nMaxDepth),
   fTreeID         (treeID)
{
   // constructor specifying the separation type, the min number of
   // events in a no that is still subjected to further splitting, the
   // number of bins in the grid used in applying the cut for the node
   // splitting.
   
   fLogger->SetSource( "DecisionTree" );

   if (sepType == NULL) { // it is interpreted as a regression tree, where
                          // currently the separation type (simple least square)
                          // cannot be chosen freely)
      fAnalysisType = Types::kRegression;
      fRegType = new RegressionVariance();
      if ( nCuts <=0 ) {
         fNCuts = 200;
         Log() << kWarning << " You had choosen the training mode using optimal cuts, not\n"
               << " based on a grid of " << fNCuts << " by setting the option NCuts < 0\n"
               << " as this doesn't exist yet, I set it to " << fNCuts << " and use the grid"
               << Endl;
      }
   }else{
      fAnalysisType = Types::kClassification;
   }
}

//_______________________________________________________________________
TMVA::DecisionTree::DecisionTree( const DecisionTree &d ):
   BinaryTree(),
   fNvars      (d.fNvars),
   fNCuts      (d.fNCuts),
   fSepType    (d.fSepType),
   fMinSize    (d.fMinSize),
   fPruneMethod(d.fPruneMethod),
   fNodePurityLimit(d.fNodePurityLimit),
   fRandomisedTree (d.fRandomisedTree),
   fUseNvars       (d.fUseNvars),
   fMyTrandom      (new TRandom3(fgRandomSeed)),  // well, that means it's not an identical copy. But I only ever intend to really copy trees that are "outgrown" already. 
   fNNodesMax  (d.fNNodesMax),
   fMaxDepth   (d.fMaxDepth),
   fTreeID     (d.fTreeID),
   fAnalysisType(d.fAnalysisType)
{
   // copy constructor that creates a true copy, i.e. a completely independent tree 
   // the node copy will recursively copy all the nodes 
   this->SetRoot( new DecisionTreeNode ( *((DecisionTreeNode*)(d.GetRoot())) ) );
   this->SetParentTreeInNodes();
   fNNodes = d.fNNodes;
   fLogger->SetSource( "DecisionTree" );
}


//_______________________________________________________________________
TMVA::DecisionTree::~DecisionTree()
{
   // destructor
  
   // desctruction of the tree nodes done in the "base class" BinaryTree
  
   if (fMyTrandom) delete fMyTrandom;
}

//_______________________________________________________________________
void TMVA::DecisionTree::SetParentTreeInNodes( DecisionTreeNode *n )
{
   // descend a tree to find all its leaf nodes, fill max depth reached in the
   // tree at the same time. 
  
   if (n == NULL) { //default, start at the tree top, then descend recursively
      n = (DecisionTreeNode*) this->GetRoot();
      if (n == NULL) {
         Log() << kFATAL << "SetParentTreeNodes: started with undefined ROOT node" <<Endl;
         return ;
      }
   } 
  
   if ((this->GetLeftDaughter(n) == NULL) && (this->GetRightDaughter(n) != NULL) ) {
      Log() << kFATAL << " Node with only one daughter?? Something went wrong" << Endl;
      return;
   }  else if ((this->GetLeftDaughter(n) != NULL) && (this->GetRightDaughter(n) == NULL) ) {
      Log() << kFATAL << " Node with only one daughter?? Something went wrong" << Endl;
      return;
   } 
   else { 
      if (this->GetLeftDaughter(n) != NULL) {
         this->SetParentTreeInNodes( this->GetLeftDaughter(n) );
      }
      if (this->GetRightDaughter(n) != NULL) {
         this->SetParentTreeInNodes( this->GetRightDaughter(n) );
      }
   }
   n->SetParentTree(this);
   if (n->GetDepth() > this->GetTotalTreeDepth()) this->SetTotalTreeDepth(n->GetDepth());
   return;
}

//_______________________________________________________________________
UInt_t TMVA::DecisionTree::BuildTree( const vector<TMVA::Event*> & eventSample,
                                      TMVA::DecisionTreeNode *node ) 
{
   // building the decision tree by recursively calling the splitting of 
   // one (root-) node into two daughter nodes (returns the number of nodes)

   Bool_t IsRootNode=kFALSE;
   if (node==NULL) {
      IsRootNode = kTRUE;
      //start with the root node
      node = new TMVA::DecisionTreeNode();
      fNNodes = 1;   
      this->SetRoot(node);
      // have to use "s" for start as "r" for "root" would be the same as "r" for "right"
      this->GetRoot()->SetPos('s');
      this->GetRoot()->SetDepth(0);
      this->GetRoot()->SetParentTree(this);
   }
  
   UInt_t nevents = eventSample.size();

   if (nevents > 0 ) {
      fNvars = eventSample[0]->GetNVariables();
      fVariableImportance.resize(fNvars);
   }
   else Log() << kFATAL << ":<BuildTree> eventsample Size == 0 " << Endl;
  
   Float_t s=0, b=0;
   Float_t suw=0, buw=0;
   Float_t target=0, target2=0;
   const UInt_t cNvars = fNvars;
   Float_t *xmin = new Float_t[Int_t(cNvars)]; 
   Float_t *xmax = new Float_t[Int_t(cNvars)]; 
   for (UInt_t iev=0; iev<eventSample.size(); iev++) {
      const TMVA::Event* evt = eventSample[iev];
      const Float_t weight = evt->GetWeight();
      if (evt->IsSignal()) {
         s += weight;
         suw += 1;
      } 
      else {
         b += weight;
         buw += 1;
      }
      if ( DoRegression() ) {
         const Float_t tgt = evt->GetTarget(0);
         target +=weight*tgt;
         target2+=weight*tgt*tgt;
      }

      for (UInt_t ivar=0; ivar<fNvars; ivar++) {
         const Float_t val = evt->GetValue(ivar);
         if (iev==0) xmin[ivar]=xmax[ivar]=val;
         if (val < xmin[ivar]) xmin[ivar]=val; 
         if (val > xmax[ivar]) xmax[ivar]=val; 
      }
   }
  
   if (s+b < 0) {
      Log() << kWARNING << " One of the Decision Tree nodes has negative total number of signal or background events. " 
            << "(Nsig="<<s<<" Nbkg="<<b<<" Probaby you use a Monte Carlo with negative weights. That should in principle " 
            << "be fine as long as on average you end up with something positive. For this you have to make sure that the "
            << "minimul number of (unweighted) events demanded for a tree node (currently you use: nEventsMin="<<fMinSize
            << ", you can set this via the BDT option string when booking the classifier) is large enough to allow for "
            << "reasonable averaging!!!" << Endl
            << " If this does not help.. maybe you want to try the option: NoNegWeightsInTraining which ignores events "
            << "with negative weight in the training." << Endl;
      double nBkg=0.;
      for (UInt_t i=0; i<eventSample.size(); i++) {
         if (!(eventSample[i]->IsSignal())) {
            nBkg += eventSample[i]->GetWeight();
            std::cout << "Event "<< i<< " has (original) weight: " <<  eventSample[i]->GetWeight()/eventSample[i]->GetBoostWeight() 
                      << " boostWeight: " << eventSample[i]->GetBoostWeight() << std::endl;
         }
      }
      std::cout << " that gives in total: " << nBkg<<std::endl;
   } 
  
   node->SetNSigEvents(s);
   node->SetNBkgEvents(b);
   node->SetNSigEvents_unweighted(suw);
   node->SetNBkgEvents_unweighted(buw);
   if (node == this->GetRoot()) {
      node->SetNEvents(s+b);
      node->SetNEvents_unweighted(suw+buw);
   }
   for (UInt_t ivar=0; ivar<fNvars; ivar++) {
      node->SetSampleMin(ivar,xmin[ivar]);
      node->SetSampleMax(ivar,xmax[ivar]);
   }
   delete[] xmin;
   delete[] xmax;
  
   // I now demand the minimum number of events for both daughter nodes. Hence if the number
   // of events in the parent node is not at least two times as big, I don't even need to try
   // splitting

   //      std::cout << "NNodes="<<fNNodes << " Max="<<fNNodesMax<<std::endl;
   if (eventSample.size() >= 2*fMinSize && fNNodes < fNNodesMax && node->GetDepth() < fMaxDepth) {
      Float_t separationGain;
      if (fNCuts > 0)
         separationGain = this->TrainNodeFast(eventSample, node);
      else
         separationGain = this->TrainNodeFull(eventSample, node);
      
      if (separationGain < std::numeric_limits<double>::epsilon()) { // we could not gain anything, e.g. all events are in one bin, 
         /// if (separationGain < 0.00000001) { // we could not gain anything, e.g. all events are in one bin, 
         // no cut can actually do anything to improve the node
         // hence, naturally, the current node is a leaf node
         if (DoRegression()) {
            node->SetSeparationIndex(fRegType->GetSeparationIndex(s+b,target,target2));
            node->SetResponse(target/(s+b));
            node->SetRMS(sqrt(target2/(s+b) - target/(s+b)*target/(s+b)));
         }
         else {
            node->SetSeparationIndex(fSepType->GetSeparationIndex(s,b));
         }
         if (node->GetPurity() > fNodePurityLimit) node->SetNodeType(1);
         else node->SetNodeType(-1);
         if (node->GetDepth() > this->GetTotalTreeDepth()) this->SetTotalTreeDepth(node->GetDepth());

      } else {

         vector<TMVA::Event*> leftSample; leftSample.reserve(nevents);
         vector<TMVA::Event*> rightSample; rightSample.reserve(nevents);

         Float_t nRight=0, nLeft=0;

         for (UInt_t ie=0; ie< nevents ; ie++) {
            if (node->GoesRight(*eventSample[ie])) {
               rightSample.push_back(eventSample[ie]);
               nRight += eventSample[ie]->GetWeight();
            }
            else {
               leftSample.push_back(eventSample[ie]);
               nLeft += eventSample[ie]->GetWeight();
            }
         }

      
         // sanity check
         if (leftSample.size() == 0 || rightSample.size() == 0) {
            Log() << kFATAL << "<TrainNode> all events went to the same branch" << Endl
                  << "---                       Hence new node == old node ... check" << Endl
                  << "---                         left:" << leftSample.size()
                  << " right:" << rightSample.size() << Endl
                  << "--- this should never happen, please write a bug report to Helge.Voss@cern.ch"
                  << Endl;
         }
      
         // continue building daughter nodes for the left and the right eventsample
         TMVA::DecisionTreeNode *rightNode = new TMVA::DecisionTreeNode(node,'r');
         fNNodes++;
         //         rightNode->SetPos('r');
         //         rightNode->SetDepth( node->GetDepth() + 1 );
         rightNode->SetNEvents(nRight);
         rightNode->SetNEvents_unweighted(rightSample.size());
      
         TMVA::DecisionTreeNode *leftNode = new TMVA::DecisionTreeNode(node,'l');
         fNNodes++;
         //         leftNode->SetPos('l');
         //         leftNode->SetDepth( node->GetDepth() + 1 );
         leftNode->SetNEvents(nLeft);
         leftNode->SetNEvents_unweighted(leftSample.size());
      
         node->SetNodeType(0);
         node->SetLeft(leftNode);
         node->SetRight(rightNode);

         this->BuildTree(rightSample, rightNode);
         this->BuildTree(leftSample,  leftNode );
      }
   } 
   else{ // it is a leaf node
      if (DoRegression()) {
         node->SetSeparationIndex(fRegType->GetSeparationIndex(s+b,target,target2));
         node->SetResponse(target/(s+b));
         node->SetRMS(sqrt(target2/(s+b) - target/(s+b)*target/(s+b)));
      }
      else {
         node->SetSeparationIndex(fSepType->GetSeparationIndex(s,b));
      }

      if (node->GetPurity() > fNodePurityLimit) node->SetNodeType(1);
      else node->SetNodeType(-1);

      if (node->GetDepth() > this->GetTotalTreeDepth()) this->SetTotalTreeDepth(node->GetDepth());
   }
  
   //   if (IsRootNode) this->CleanTree();
   return fNNodes;
}

//_______________________________________________________________________
void TMVA::DecisionTree::FillTree( vector<TMVA::Event*> & eventSample )
  
{
   // fill the existing the decision tree structure by filling event
   // in from the top node and see where they happen to end up
   for (UInt_t i=0; i<eventSample.size(); i++) {
      this->FillEvent(*(eventSample[i]),NULL);
   }
}

//_______________________________________________________________________
void TMVA::DecisionTree::FillEvent( TMVA::Event & event,  
                                    TMVA::DecisionTreeNode *node )
{
   // fill the existing the decision tree structure by filling event
   // in from the top node and see where they happen to end up
  
   if (node == NULL) { // that's the start, take the Root node
      node = (TMVA::DecisionTreeNode*)this->GetRoot();
   }
  
   node->IncrementNEvents( event.GetWeight() );
   node->IncrementNEvents_unweighted( );
  
   if (event.IsSignal()) {
      node->IncrementNSigEvents( event.GetWeight() );
      node->IncrementNSigEvents_unweighted( );
   } 
   else {
      node->IncrementNBkgEvents( event.GetWeight() );
      node->IncrementNSigEvents_unweighted( );
   }
   node->SetSeparationIndex(fSepType->GetSeparationIndex(node->GetNSigEvents(),
                                                         node->GetNBkgEvents()));
  
   if (node->GetNodeType() == 0) { //intermediate node --> go down
      if (node->GoesRight(event))
         this->FillEvent(event,(TMVA::DecisionTreeNode*)(node->GetRight())) ;
      else
         this->FillEvent(event,(TMVA::DecisionTreeNode*)(node->GetLeft())) ;
   }
  
  
}

//_______________________________________________________________________
void TMVA::DecisionTree::ClearTree()
{
   // clear the tree nodes (their S/N, Nevents etc), just keep the structure of the tree
  
   if (this->GetRoot()!=NULL) 
      ((DecisionTreeNode*)(this->GetRoot()))->ClearNodeAndAllDaughters();
  
}

//_______________________________________________________________________
UInt_t TMVA::DecisionTree::CleanTree( DecisionTreeNode *node )
{
   // remove those last splits that result in two leaf nodes that
   // are both of the type (i.e. both signal or both background)
   // this of course is only a reasonable thing to do when you use
   // "YesOrNo" leafs, while it might loose s.th. if you use the
   // purity information in the nodes.
   // --> hence I don't call it automatically in the tree building


   if (node==NULL) {
      node = (DecisionTreeNode *)this->GetRoot();
   }
  
   DecisionTreeNode *l = (DecisionTreeNode*)node->GetLeft();
   DecisionTreeNode *r = (DecisionTreeNode*)node->GetRight();
   if (node->GetNodeType() == 0) {
      this->CleanTree(l);
      this->CleanTree(r);
      if (l->GetNodeType() * r->GetNodeType() > 0) {
         this->PruneNode(node);
      }
   }
   
   // update the number of nodes after the cleaning
   return this->CountNodes();
   
}

//_______________________________________________________________________
Double_t TMVA::DecisionTree::PruneTree( vector<Event*>* validationSample )
{
   // prune (get rid of internal nodes) the Decision tree to avoid overtraining
   // serveral different pruning methods can be applied as selected by the 
   // variable "fPruneMethod". 
  
   //   std::ofstream logfile("dt_pruning.log");
  
   IPruneTool* tool(NULL);
   PruningInfo* info(NULL);

   if( fPruneMethod == kNoPruning ) return 0.0;

   if      (fPruneMethod == kExpectedErrorPruning) 
      //      tool = new ExpectedErrorPruneTool(logfile);
      tool = new ExpectedErrorPruneTool();
   else if (fPruneMethod == kCostComplexityPruning) 
      {
         tool = new CostComplexityPruneTool();
      }
   else {
      Log() << kFATAL << "Selected pruning method not yet implemented "
            << Endl;
   }
   if(!tool) return 0.0;

   tool->SetPruneStrength(GetPruneStrength());
   if(tool->IsAutomatic()) {
      if(validationSample == NULL) 
         Log() << kFATAL << "Cannot automate the pruning algorithm without an "
               << "independent validation sample!" << Endl;
      if(validationSample->size() == 0) 
         Log() << kFATAL << "Cannot automate the pruning algorithm without an "
               << "independent validation sample!" << Endl;
   }

   info = tool->CalculatePruningInfo(this,validationSample);
   if(!info) {
      delete tool;
      Log() << kFATAL << "Error pruning tree! Check prune.log for more information." 
            << Endl;
   }
   Double_t pruneStrength = info->PruneStrength;

   //   Log() << kDEBUG << "Optimal prune strength (alpha): " << pruneStrength
   //           << " has quality index " << info->QualityIndex << Endl;
   

   for (UInt_t i = 0; i < info->PruneSequence.size(); ++i) {
      //      std::cout << "bla : Pruning everything below node " << std::endl;
      //      info->PruneSequence[i]->Print(std::cout);
      
      PruneNode(info->PruneSequence[i]);
   }
   // update the number of nodes after the pruning
   this->CountNodes();

   delete tool;
   delete info;

   //   this->CleanTree();

   return pruneStrength;
};


//_______________________________________________________________________
void TMVA::DecisionTree::ApplyValidationSample( const EventList* validationSample ) const
{
   // run the validation sample through the (pruned) tree and fill in the nodes
   // the variables NSValidation and NBValidadtion (i.e. how many of the Signal
   // and Background events from the validation sample. This is then later used
   // when asking for the "tree quality" .. 
   ((DecisionTreeNode*)GetRoot())->ResetValidationData();
   for (UInt_t ievt=0; ievt < validationSample->size(); ievt++) {
      CheckEventWithPrunedTree(*(*validationSample)[ievt]);
   }
}

//_______________________________________________________________________
Double_t TMVA::DecisionTree::TestPrunedTreeQuality( const DecisionTreeNode* n, Int_t mode ) const
{
   // return the misclassification rate of a pruned tree
   // a "pruned tree" may have set the variable "IsTerminal" to "arbitrary" at
   // any node, hence this tree quality testing will stop there, hence test
   // the pruned tree (while the full tree is still in place for normal/later use)
   
   if (n == NULL) { // default, start at the tree top, then descend recursively
      n = (DecisionTreeNode*) this->GetRoot();
      if (n == NULL) {
         Log() << kFATAL << "TestPrunedTreeQuality: started with undefined ROOT node" <<Endl;
         return 0;
      }
   } 

   if( n->GetLeftDaughter() != NULL && n->GetRightDaughter() != NULL && !n->IsTerminal() ) {
      return (TestPrunedTreeQuality( n->GetLeftDaughter(), mode ) +
              TestPrunedTreeQuality( n->GetRightDaughter(), mode ));
   }
   else { // terminal leaf (in a pruned subtree of T_max at least)
      if (DoRegression()) {
         Float_t sumw = n->GetNSValidation() + n->GetNBValidation();
         return n->GetSumTarget2() - 2*n->GetSumTarget()*n->GetResponse() + sumw*n->GetResponse()*n->GetResponse();
      } 
      else {
         if (mode == 0) {
            if (n->GetPurity() > this->GetNodePurityLimit()) // this is a signal leaf, according to the training
               return n->GetNBValidation();
            else
               return n->GetNSValidation();
         }
         else if ( mode == 1 ) {
            // calculate the weighted error using the pruning validation sample
            return (n->GetPurity() * n->GetNBValidation() + (1.0 - n->GetPurity()) * n->GetNSValidation());
         }
         else {
            throw std::string("Unknown ValidationQualityMode");
         }
      }
   }
}

//_______________________________________________________________________
void TMVA::DecisionTree::CheckEventWithPrunedTree( const Event& e ) const
{
   // pass a single validation event throught a pruned decision tree
   // on the way down the tree, fill in all the "intermediate" information
   // that would normally be there from training.

   DecisionTreeNode* current = (DecisionTreeNode*) this->GetRoot();
   if (current == NULL) {
      Log() << kFATAL << "CheckEventWithPrunedTree: started with undefined ROOT node" <<Endl;
   }

   while(current != NULL) {
      if(e.IsSignal())
         current->SetNSValidation(current->GetNSValidation() + e.GetWeight());
      else
         current->SetNBValidation(current->GetNBValidation() + e.GetWeight());

      if (e.GetNTargets() > 0) {
         current->AddToSumTarget(e.GetWeight()*e.GetTarget(0));
         current->AddToSumTarget2(e.GetWeight()*e.GetTarget(0)*e.GetTarget(0));
      }

      if (current->GetRight() == NULL || current->GetLeft() == NULL) {
         current = NULL;
      }
      else {
         if (current->GoesRight(e))
            current = (TMVA::DecisionTreeNode*)current->GetRight();
         else
            current = (TMVA::DecisionTreeNode*)current->GetLeft();
      }
   }
}

//_______________________________________________________________________
Float_t TMVA::DecisionTree::GetSumWeights( const EventList* validationSample ) const
{
   // calculate the normalization factor for a pruning validation sample
   Float_t sumWeights = 0.0;
   for( EventList::const_iterator it = validationSample->begin();
        it != validationSample->end(); ++it ) {
      sumWeights += (*it)->GetWeight();
   }
   return sumWeights;
}



//_______________________________________________________________________
UInt_t TMVA::DecisionTree::CountLeafNodes( TMVA::DecisionTreeNode *n )
{
   // return the number of terminal nodes in the sub-tree below Node n
  
   if (n == NULL) { // default, start at the tree top, then descend recursively
      n = (DecisionTreeNode*) this->GetRoot();
      if (n == NULL) {
         Log() << kFATAL << "CountLeafNodes: started with undefined ROOT node" <<Endl;
         return 0;
      }
   } 
  
   UInt_t countLeafs=0;
  
   if ((this->GetLeftDaughter(n) == NULL) && (this->GetRightDaughter(n) == NULL) ) {
      countLeafs += 1;
   } 
   else { 
      if (this->GetLeftDaughter(n) != NULL) {
         countLeafs += this->CountLeafNodes( this->GetLeftDaughter(n) );
      }
      if (this->GetRightDaughter(n) != NULL) {
         countLeafs += this->CountLeafNodes( this->GetRightDaughter(n) );
      }
   }
   return countLeafs;
}

//_______________________________________________________________________
void TMVA::DecisionTree::DescendTree( DecisionTreeNode* n )
{
   // descend a tree to find all its leaf nodes
  
   if (n == NULL) { // default, start at the tree top, then descend recursively
      n = (DecisionTreeNode*) this->GetRoot();
      if (n == NULL) {
         Log() << kFATAL << "DescendTree: started with undefined ROOT node" <<Endl;
         return ;
      }
   } 
  
   if ((this->GetLeftDaughter(n) == NULL) && (this->GetRightDaughter(n) == NULL) ) {
      // do nothing
   } 
   else if ((this->GetLeftDaughter(n) == NULL) && (this->GetRightDaughter(n) != NULL) ) {
      Log() << kFATAL << " Node with only one daughter?? Something went wrong" << Endl;
      return;
   }  
   else if ((this->GetLeftDaughter(n) != NULL) && (this->GetRightDaughter(n) == NULL) ) {
      Log() << kFATAL << " Node with only one daughter?? Something went wrong" << Endl;
      return;
   } 
   else { 
      if (this->GetLeftDaughter(n) != NULL) {
         this->DescendTree( this->GetLeftDaughter(n) );
      }
      if (this->GetRightDaughter(n) != NULL) {
         this->DescendTree( this->GetRightDaughter(n) );
      }
   }
}

//_______________________________________________________________________
void TMVA::DecisionTree::PruneNode( DecisionTreeNode* node )
{
   // prune away the subtree below the node 
   DecisionTreeNode *l = (DecisionTreeNode*)node->GetLeft();
   DecisionTreeNode *r = (DecisionTreeNode*)node->GetRight();
  
   node->SetRight(NULL);
   node->SetLeft(NULL);
   node->SetSelector(-1);
   node->SetSeparationGain(-1);
   if (node->GetPurity() > fNodePurityLimit) node->SetNodeType(1);
   else node->SetNodeType(-1);
   this->DeleteNode(l);
   this->DeleteNode(r);
   // update the stored number of nodes in the Tree
   this->CountNodes();
  
}

//_______________________________________________________________________
void TMVA::DecisionTree::PruneNodeInPlace( DecisionTreeNode* node ) {
   // prune a node temporaily (without actually deleting its decendants
   // which allows testing the pruned tree quality for many different
   // pruning stages without "touching" the tree.

   if(node == NULL) return;
   node->SetNTerminal(1);
   node->SetSubTreeR( node->GetNodeR() );
   node->SetAlpha( std::numeric_limits<double>::infinity( ) );
   node->SetAlphaMinSubtree( std::numeric_limits<double>::infinity( ) );
   node->SetTerminal(kTRUE); // set the node to be terminal without deleting its descendants FIXME not needed
}

//_______________________________________________________________________
TMVA::DecisionTreeNode* TMVA::DecisionTree::GetLeftDaughter( DecisionTreeNode* n)
{
   // get left daughter node current node "n"
   return (DecisionTreeNode*) n->GetLeft();
}

//_______________________________________________________________________
TMVA::DecisionTreeNode* TMVA::DecisionTree::GetRightDaughter( DecisionTreeNode *n)
{
   // get right daughter node current node "n"
   return (DecisionTreeNode*) n->GetRight();
}

//_______________________________________________________________________
TMVA::DecisionTreeNode* TMVA::DecisionTree::GetNode( ULong_t sequence, UInt_t depth )
{
   // retrieve node from the tree. Its position (up to a maximal tree depth of 64)
   // is coded as a sequence of left-right moves starting from the root, coded as
   // 0-1 bit patterns stored in the "long-integer"  (i.e. 0:left ; 1:right
  
   DecisionTreeNode* current = (DecisionTreeNode*) this->GetRoot();
  
   for (UInt_t i =0;  i < depth; i++) {
      ULong_t tmp = 1 << i;
      if ( tmp & sequence) current = this->GetRightDaughter(current);
      else current = this->GetLeftDaughter(current);
   }
  
   return current;
}


//_______________________________________________________________________
Float_t TMVA::DecisionTree::TrainNodeFast( const vector<TMVA::Event*> & eventSample,
                                           TMVA::DecisionTreeNode *node )
{
   // Decide how to split a node using one of the variables that gives
   // the best separation of signal/background. In order to do this, for each 
   // variable a scan of the different cut values in a grid (grid = fNCuts) is 
   // performed and the resulting separation gains are compared.

   Float_t separationGain = -1, sepTmp;
   Float_t cutValue=-999;
   Int_t mxVar=-1, cutIndex=0;
   Bool_t cutType=kTRUE;
   Float_t  nTotS, nTotB;
   Int_t     nTotS_unWeighted, nTotB_unWeighted; 
   UInt_t nevents = eventSample.size();

  
   const UInt_t nBins = fNCuts+1;
   const UInt_t cNvars = fNvars;

   Float_t** nSelS = new Float_t* [cNvars];
   Float_t** nSelB = new Float_t* [cNvars];
   Float_t** nSelS_unWeighted = new Float_t* [cNvars];
   Float_t** nSelB_unWeighted = new Float_t* [cNvars];
   Float_t** target = new Float_t* [cNvars];
   Float_t** target2 = new Float_t* [cNvars];
   Float_t** cutValues = new Float_t* [cNvars];

   for (UInt_t i=0; i<cNvars; i++) {
      nSelS[i] = new Float_t [nBins];
      nSelB[i] = new Float_t [nBins];
      nSelS_unWeighted[i] = new Float_t [nBins];
      nSelB_unWeighted[i] = new Float_t [nBins];
      target[i] = new Float_t [nBins];
      target2[i] = new Float_t [nBins];
      cutValues[i] = new Float_t [nBins];
   }

   Float_t *xmin = new Float_t[cNvars]; 
   Float_t *xmax = new Float_t[cNvars];

   Bool_t *useVariable = new Bool_t[fNvars];   // for performance reasons instead of vector<Bool_t> useVariable(fNvars);

   for (UInt_t ivar=0; ivar < fNvars; ivar++) {
      useVariable[ivar]=kFALSE;
      xmin[ivar]=node->GetSampleMin(ivar);
      xmax[ivar]=node->GetSampleMax(ivar);
      
      for (UInt_t ibin=0; ibin<nBins; ibin++) {
         nSelS[ivar][ibin]=0;
         nSelB[ivar][ibin]=0;
         nSelS_unWeighted[ivar][ibin]=0;
         nSelB_unWeighted[ivar][ibin]=0;
         target[ivar][ibin]=0;
         target2[ivar][ibin]=0;
         cutValues[ivar][ibin]=0;
      }
   }

   if (fRandomisedTree) { // choose for each node splitting a random subset of variables to choose from
      if (fUseNvars==0) { // no number specified ... choose s.th. which hopefully works well 
         if (fNvars < 12) fUseNvars = TMath::Max(2,Int_t( Float_t(fNvars) / 2.5 ));
         else if (fNvars < 40) fUseNvars = Int_t( Float_t(fNvars) / 5 );
         else fUseNvars = Int_t( Float_t(fNvars) / 10 );
      }
      Int_t nSelectedVars = 0;
      while (nSelectedVars < fUseNvars) {
         Double_t bla = fMyTrandom->Rndm()*fNvars;
         useVariable[Int_t (bla)] = kTRUE;
         nSelectedVars = 0;
         for (UInt_t ivar=0; ivar < fNvars; ivar++) {
            if (useVariable[ivar] == kTRUE) nSelectedVars++;
         }
      }
   } 
   else {
      for (UInt_t ivar=0; ivar < fNvars; ivar++) useVariable[ivar] = kTRUE;
   }

   for (UInt_t ivar=0; ivar < fNvars; ivar++) {
      if ( useVariable[ivar] ) {
         
         //set the grid for the cut scan on the variables like this:
         // 
         //  |       |        |         |         |   ...      |        |  
         // xmin                                                       xmax
         //
         // cut      0        1         2         3   ...     fNCuts-1 (counting from zero)
         // bin  0       1         2         3       .....      nBins-1=fNCuts (counting from zero)
         // --> nBins = fNCuts+1
         // (NOTE, the cuts at xmin or xmax would just give the whole sample and
         //  hence can be safely omitted
         
         Float_t istepSize =( xmax[ivar] - xmin[ivar] ) / Float_t(nBins);
         for (Int_t icut=0; icut<fNCuts; icut++) {
            cutValues[ivar][icut]=xmin[ivar]+(Float_t(icut+1))*istepSize;
         }
      }
   }
  
   nTotS=0; nTotB=0;
   nTotS_unWeighted=0; nTotB_unWeighted=0;   
   for (UInt_t iev=0; iev<nevents; iev++) {
      Bool_t eventType = eventSample[iev]->IsSignal();
      Float_t eventWeight =  eventSample[iev]->GetWeight(); 
      if (eventType) {
         nTotS+=eventWeight;
         nTotS_unWeighted++;
      }
      else {
         nTotB+=eventWeight;
         nTotB_unWeighted++;
      }
      
      Int_t iBin=-1;
      for (UInt_t ivar=0; ivar < fNvars; ivar++) {
         // now scan trough the cuts for each varable and find which one gives
         // the best separationGain at the current stage.
         if ( useVariable[ivar] ) {
            Float_t eventData = eventSample[iev]->GetValue(ivar); 
            // "maximum" is nbins-1 (the "-1" because we start counting from 0 !!
            iBin = TMath::Min(Int_t(nBins-1),TMath::Max(0,int (nBins*(eventData-xmin[ivar])/(xmax[ivar]-xmin[ivar]) ) ));
            if (eventType) {
               nSelS[ivar][iBin]+=eventWeight;
               nSelS_unWeighted[ivar][iBin]++;
            } 
            else {
               nSelB[ivar][iBin]+=eventWeight;
               nSelB_unWeighted[ivar][iBin]++;
            }
            if (DoRegression()) {
               target[ivar][iBin] +=eventWeight*eventSample[iev]->GetTarget(0);
               target2[ivar][iBin]+=eventWeight*eventSample[iev]->GetTarget(0)*eventSample[iev]->GetTarget(0);
            }
         }
      }
   }
   // now turn the "histogram" into a cummulative distribution
   for (UInt_t ivar=0; ivar < fNvars; ivar++) {
      if (useVariable[ivar]) {
         for (UInt_t ibin=1; ibin < nBins; ibin++) {
            nSelS[ivar][ibin]+=nSelS[ivar][ibin-1];
            nSelS_unWeighted[ivar][ibin]+=nSelS_unWeighted[ivar][ibin-1];
            nSelB[ivar][ibin]+=nSelB[ivar][ibin-1];
            nSelB_unWeighted[ivar][ibin]+=nSelB_unWeighted[ivar][ibin-1];
            if (DoRegression()) {
               target[ivar][ibin] +=target[ivar][ibin-1] ;
               target2[ivar][ibin]+=target2[ivar][ibin-1];
            }
         }
         if (nSelS_unWeighted[ivar][nBins-1] +nSelB_unWeighted[ivar][nBins-1] != eventSample.size()) {
            Log() << kFATAL << "Helge, you have a bug ....nSelS_unw..+nSelB_unw..= "
                  << nSelS_unWeighted[ivar][nBins-1] +nSelB_unWeighted[ivar][nBins-1] 
                  << " while eventsample size = " << eventSample.size()
                  << Endl;
         }
         double lastBins=nSelS[ivar][nBins-1] +nSelB[ivar][nBins-1];
         double totalSum=nTotS+nTotB;
         if (TMath::Abs(lastBins-totalSum)/totalSum>0.01) {
            Log() << kFATAL << "Helge, you have another bug ....nSelS+nSelB= "
                  << lastBins
                  << " while total number of events = " << totalSum
                  << Endl;
         }
      }
   }
   
   // now select the optimal cuts for each varable and find which one gives
   // the best separationGain at the current stage
   for (UInt_t ivar=0; ivar < fNvars; ivar++) {
      if (useVariable[ivar]) {
         for (UInt_t iBin=0; iBin<nBins-1; iBin++) { // the last bin contains "all events" -->skip
            // the separationGain is defined as the various indices (Gini, CorssEntropy, e.t.c)
            // calculated by the "SamplePurities" fom the branches that would go to the
            // left or the right from this node if "these" cuts were used in the Node:
            // hereby: nSelS and nSelB would go to the right branch
            //        (nTotS - nSelS) + (nTotB - nSelB)  would go to the left branch;

            // only allow splits where both daughter nodes match the specified miniumum number
            // for this use the "unweighted" events, as you are interested in statistically 
            // significant splits, which is determined by the actual number of entries
            // for a node, rather than the sum of event weights.

            Double_t sl = nSelS_unWeighted[ivar][iBin];
            Double_t bl = nSelB_unWeighted[ivar][iBin];
            Double_t s  = nTotS_unWeighted;
            Double_t b  = nTotB_unWeighted;
            Double_t sr = s-sl;
            Double_t br = b-bl;
            if ( (sl+bl)>=fMinSize && (sr+br)>=fMinSize ) {

               if (DoRegression()) {
                  sepTmp = fRegType->GetSeparationGain(nSelS[ivar][iBin]+nSelB[ivar][iBin], 
                                                       target[ivar][iBin],target2[ivar][iBin],
                                                       nTotS+nTotB,
                                                       target[ivar][nBins-1],target2[ivar][nBins-1]);
               } else {
                  sepTmp = fSepType->GetSeparationGain(nSelS[ivar][iBin], nSelB[ivar][iBin], nTotS, nTotB);
               }
               if (separationGain < sepTmp) {
                  separationGain = sepTmp;
                  mxVar = ivar;
                  cutIndex = iBin;
                  if (cutIndex >= fNCuts) Log()<<kFATAL<<"ibin for cut " << iBin << Endl; 
               }
            }
         }
      }
   }
   
   if (DoRegression()) {
      node->SetSeparationIndex(fRegType->GetSeparationIndex(nTotS+nTotB,target[0][nBins-1],target2[0][nBins-1]));
      node->SetResponse(target[0][nBins-1]/(nTotS+nTotB));
      node->SetRMS(sqrt(target2[0][nBins-1]/(nTotS+nTotB) - target[0][nBins-1]/(nTotS+nTotB)*target[0][nBins-1]/(nTotS+nTotB)));
   }
   else {
      node->SetSeparationIndex(fSepType->GetSeparationIndex(nTotS,nTotB));
   }
   if (mxVar >= 0) { 
      if (nSelS[mxVar][cutIndex]/nTotS > nSelB[mxVar][cutIndex]/nTotB) cutType=kTRUE;
      else cutType=kFALSE;
      cutValue = cutValues[mxVar][cutIndex];
    
      node->SetSelector((UInt_t)mxVar);
      node->SetCutValue(cutValue);
      node->SetCutType(cutType);
      node->SetSeparationGain(separationGain);

      fVariableImportance[mxVar] += separationGain*separationGain * (nTotS+nTotB) * (nTotS+nTotB) ;
   }
   else {
      separationGain = 0;
   }
  

   for (UInt_t i=0; i<cNvars; i++) {
      delete [] nSelS[i];
      delete [] nSelB[i];
      delete [] nSelS_unWeighted[i];
      delete [] nSelB_unWeighted[i];
      delete [] target[i];
      delete [] target2[i];
      delete [] cutValues[i];
   }
   delete nSelS;
   delete nSelB;
   delete nSelS_unWeighted;
   delete nSelB_unWeighted;
   delete target;
   delete target2;
   delete cutValues;

   delete [] xmin;
   delete [] xmax;

   delete [] useVariable;

   return separationGain;
}

//_______________________________________________________________________
Float_t TMVA::DecisionTree::TrainNodeFull( const vector<TMVA::Event*> & eventSample,
                                           TMVA::DecisionTreeNode *node )
{
  
   // train a node by finding the single optimal cut for a single variable
   // that best separates signal and background (maximizes the separation gain)
  
   Float_t nTotS = 0.0, nTotB = 0.0;
   Int_t nTotS_unWeighted = 0, nTotB_unWeighted = 0;  
  
   vector<TMVA::BDTEventWrapper> bdtEventSample;
  
   // List of optimal cuts, separation gains, and cut types (removed background or signal) - one for each variable
   vector<Float_t> lCutValue( fNvars, 0.0 );
   vector<Float_t> lSepGain( fNvars, -1.0e6 );
   vector<Char_t> lCutType( fNvars ); // <----- bool is stored (for performance reasons, no vector<bool>  has been taken)
   lCutType.assign( fNvars, Char_t(kFALSE) );
  
   // Initialize (un)weighted counters for signal & background
   // Construct a list of event wrappers that point to the original data
   for( vector<TMVA::Event*>::const_iterator it = eventSample.begin(); it != eventSample.end(); ++it ) {
      if( (*it)->IsSignal() ) { // signal or background event
         nTotS += (*it)->GetWeight();
         ++nTotS_unWeighted;
      }
      else {
         nTotB += (*it)->GetWeight();
         ++nTotB_unWeighted;
      }
      bdtEventSample.push_back(TMVA::BDTEventWrapper(*it));
   }
  
   vector<Char_t> useVariable(fNvars); // <----- bool is stored (for performance reasons, no vector<bool>  has been taken)
   useVariable.assign( fNvars, Char_t(kTRUE) );

   for (UInt_t ivar=0; ivar < fNvars; ivar++) useVariable[ivar]=Char_t(kFALSE);
   if (fRandomisedTree) { // choose for each node splitting a random subset of variables to choose from
      if (fUseNvars ==0 ) { // no number specified ... choose s.th. which hopefully works well 
         if (fNvars < 12) fUseNvars = TMath::Max(2,Int_t( Float_t(fNvars) / 2.5 ));
         else if (fNvars < 40) fUseNvars = Int_t( Float_t(fNvars) / 5 );
         else fUseNvars = Int_t( Float_t(fNvars) / 10 );
      }
      Int_t nSelectedVars = 0;
      while (nSelectedVars < fUseNvars) {
         Double_t bla = fMyTrandom->Rndm()*fNvars;
         useVariable[Int_t (bla)] = Char_t(kTRUE);
         nSelectedVars = 0;
         for (UInt_t ivar=0; ivar < fNvars; ivar++) {
            if(useVariable[ivar] == Char_t(kTRUE)) nSelectedVars++;
         }
      }
   } 
   else {
      for (UInt_t ivar=0; ivar < fNvars; ivar++) useVariable[ivar] = Char_t(kTRUE);
   }
  
   for( UInt_t ivar = 0; ivar < fNvars; ivar++ ) { // loop over all discriminating variables
      if(!useVariable[ivar]) continue; // only optimze with selected variables
      TMVA::BDTEventWrapper::SetVarIndex(ivar); // select the variable to sort by
      std::sort( bdtEventSample.begin(),bdtEventSample.end() ); // sort the event data 
    
      Float_t bkgWeightCtr = 0.0, sigWeightCtr = 0.0;
      vector<TMVA::BDTEventWrapper>::iterator it = bdtEventSample.begin(), it_end = bdtEventSample.end();
      for( ; it != it_end; ++it ) {
         if( (**it)->IsSignal() ) // specify signal or background event
            sigWeightCtr += (**it)->GetWeight();
         else 
            bkgWeightCtr += (**it)->GetWeight(); 
         // Store the accumulated signal (background) weights
         it->SetCumulativeWeight(false,bkgWeightCtr); 
         it->SetCumulativeWeight(true,sigWeightCtr);
      }
    
      const Float_t fPMin = 1.0e-6;
      Bool_t cutType = kFALSE;
      Long64_t index = 0;
      Float_t separationGain = -1.0, sepTmp = 0.0, cutValue = 0.0, dVal = 0.0, norm = 0.0;
      // Locate the optimal cut for this (ivar-th) variable
      for( it = bdtEventSample.begin(); it != it_end; ++it ) {
         if( index == 0 ) { ++index; continue; }
         if( *(*it) == NULL ) {
            Log() << kFATAL << "In TrainNodeFull(): have a null event! Where index=" 
                  << index << ", and parent node=" << node->GetParent() << Endl;
            break;
         }
         dVal = bdtEventSample[index].GetVal() - bdtEventSample[index-1].GetVal();
         norm = TMath::Abs(bdtEventSample[index].GetVal() + bdtEventSample[index-1].GetVal());
         // Only allow splits where both daughter nodes have the specified miniumum number of events
         // Splits are only sensible when the data are ordered (eg. don't split inside a sequence of 0's)
         if( index >= fMinSize && (nTotS_unWeighted + nTotB_unWeighted) - index >= fMinSize && TMath::Abs(dVal/(0.5*norm + 1)) > fPMin ) {
            sepTmp = fSepType->GetSeparationGain( it->GetCumulativeWeight(true), it->GetCumulativeWeight(false), sigWeightCtr, bkgWeightCtr );
            if( sepTmp > separationGain ) {
               separationGain = sepTmp;
               cutValue = it->GetVal() - 0.5*dVal; 
               Float_t nSelS = it->GetCumulativeWeight(true);
               Float_t nSelB = it->GetCumulativeWeight(false);
               // Indicate whether this cut is improving the node purity by removing background (enhancing signal)
               // or by removing signal (enhancing background)
               if( nSelS/sigWeightCtr > nSelB/bkgWeightCtr ) cutType = kTRUE; 
               else cutType = kFALSE; 
            }
         }
         ++index;
      }
      lCutType[ivar] = Char_t(cutType);
      lCutValue[ivar] = cutValue;
      lSepGain[ivar] = separationGain;
   }
  
   Float_t separationGain = -1.0;
   Int_t iVarIndex = -1;
   for( UInt_t ivar = 0; ivar < fNvars; ivar++ ) {
      if( lSepGain[ivar] > separationGain ) {
         iVarIndex = ivar;
         separationGain = lSepGain[ivar];
      }
   }
  
   if(iVarIndex >= 0) {
      node->SetSelector(iVarIndex);
      node->SetCutValue(lCutValue[iVarIndex]);
      node->SetSeparationGain(lSepGain[iVarIndex]);
      node->SetCutType(lCutType[iVarIndex]);
    
      fVariableImportance[iVarIndex] += separationGain*separationGain * (nTotS+nTotB) * (nTotS+nTotB);
   }
   else {
      separationGain = 0.0;
   }
  
   return separationGain;
}

//___________________________________________________________________________________
TMVA::DecisionTreeNode* TMVA::DecisionTree::GetEventNode(const TMVA::Event & e) const
{
   // get the pointer to the leaf node where a particular event ends up in...
   // (used in gradient boostinge)

   TMVA::DecisionTreeNode *current = (TMVA::DecisionTreeNode*)this->GetRoot();
   while(current->GetNodeType() == 0) { // intermediate node in a tree
      current = (current->GoesRight(e)) ?
         (TMVA::DecisionTreeNode*)current->GetRight() :
         (TMVA::DecisionTreeNode*)current->GetLeft();
   }
   return current;
}

//_______________________________________________________________________
Double_t TMVA::DecisionTree::CheckEvent( const TMVA::Event & e, Bool_t UseYesNoLeaf ) const
{
   // the event e is put into the decision tree (starting at the root node)
   // and the output is NodeType (signal) or (background) of the final node (basket)
   // in which the given events ends up. I.e. the result of the classification if
   // the event for this decision tree.
  
   TMVA::DecisionTreeNode *current = (TMVA::DecisionTreeNode*)this->GetRoot();
   if (!current)
      Log() << kFATAL << "CheckEvent: started with undefined ROOT node" <<Endl;

   while (current->GetNodeType() == 0) { // intermediate node in a (pruned) tree
      current = (current->GoesRight(e)) ? 
         (TMVA::DecisionTreeNode*)current->GetRight() :
         (TMVA::DecisionTreeNode*)current->GetLeft();
      if (!current) {
         Log() << kFATAL << "DT::CheckEvent: inconsistent tree structure" <<Endl;
      }

   }
  
   if ( DoRegression() ){
      return current->GetResponse();
   } 
   else {
      if (UseYesNoLeaf) return Float_t ( current->GetNodeType() );
      else              return current->GetPurity();
   }
}

//_______________________________________________________________________
Float_t  TMVA::DecisionTree::SamplePurity( vector<TMVA::Event*> eventSample )
{
   // calculates the purity S/(S+B) of a given event sample
  
   Float_t sumsig=0, sumbkg=0, sumtot=0;
   for (UInt_t ievt=0; ievt<eventSample.size(); ievt++) {
      if (!(eventSample[ievt]->IsSignal())) sumbkg+=eventSample[ievt]->GetWeight();
      if ((eventSample[ievt]->IsSignal())) sumsig+=eventSample[ievt]->GetWeight();
      sumtot+=eventSample[ievt]->GetWeight();
   }
   // sanity check
   if (sumtot!= (sumsig+sumbkg)){
      Log() << kFATAL << "<SamplePurity> sumtot != sumsig+sumbkg"
            << sumtot << " " << sumsig << " " << sumbkg << Endl;
   }
   if (sumtot>0) return sumsig/(sumsig + sumbkg);
   else return -1;
}

//_______________________________________________________________________
vector< Double_t >  TMVA::DecisionTree::GetVariableImportance()
{
   // Return the relative variable importance, normalized to all
   // variables together having the importance 1. The importance in
   // evaluated as the total separation-gain that this variable had in
   // the decision trees (weighted by the number of events)
  
   vector<Double_t> relativeImportance(fNvars);
   Float_t  sum=0;
   for (UInt_t i=0; i< fNvars; i++) {
      sum += fVariableImportance[i];
      relativeImportance[i] = fVariableImportance[i];
   } 
  
   for (UInt_t i=0; i< fNvars; i++) {
      if (sum > std::numeric_limits<double>::epsilon())
         relativeImportance[i] /= sum;
      else 
         relativeImportance[i] = 0;
   } 
   return relativeImportance;
}

//_______________________________________________________________________
Double_t  TMVA::DecisionTree::GetVariableImportance( UInt_t ivar )
{
   // returns the relative improtance of variable ivar
  
   vector<Double_t> relativeImportance = this->GetVariableImportance();
   if (ivar < fNvars) return relativeImportance[ivar];
   else {
      Log() << kFATAL << "<GetVariableImportance>" << Endl
            << "---                     ivar = " << ivar << " is out of range " << Endl;
   }
  
   return -1;
}

 DecisionTree.cxx:1
 DecisionTree.cxx:2
 DecisionTree.cxx:3
 DecisionTree.cxx:4
 DecisionTree.cxx:5
 DecisionTree.cxx:6
 DecisionTree.cxx:7
 DecisionTree.cxx:8
 DecisionTree.cxx:9
 DecisionTree.cxx:10
 DecisionTree.cxx:11
 DecisionTree.cxx:12
 DecisionTree.cxx:13
 DecisionTree.cxx:14
 DecisionTree.cxx:15
 DecisionTree.cxx:16
 DecisionTree.cxx:17
 DecisionTree.cxx:18
 DecisionTree.cxx:19
 DecisionTree.cxx:20
 DecisionTree.cxx:21
 DecisionTree.cxx:22
 DecisionTree.cxx:23
 DecisionTree.cxx:24
 DecisionTree.cxx:25
 DecisionTree.cxx:26
 DecisionTree.cxx:27
 DecisionTree.cxx:28
 DecisionTree.cxx:29
 DecisionTree.cxx:30
 DecisionTree.cxx:31
 DecisionTree.cxx:32
 DecisionTree.cxx:33
 DecisionTree.cxx:34
 DecisionTree.cxx:35
 DecisionTree.cxx:36
 DecisionTree.cxx:37
 DecisionTree.cxx:38
 DecisionTree.cxx:39
 DecisionTree.cxx:40
 DecisionTree.cxx:41
 DecisionTree.cxx:42
 DecisionTree.cxx:43
 DecisionTree.cxx:44
 DecisionTree.cxx:45
 DecisionTree.cxx:46
 DecisionTree.cxx:47
 DecisionTree.cxx:48
 DecisionTree.cxx:49
 DecisionTree.cxx:50
 DecisionTree.cxx:51
 DecisionTree.cxx:52
 DecisionTree.cxx:53
 DecisionTree.cxx:54
 DecisionTree.cxx:55
 DecisionTree.cxx:56
 DecisionTree.cxx:57
 DecisionTree.cxx:58
 DecisionTree.cxx:59
 DecisionTree.cxx:60
 DecisionTree.cxx:61
 DecisionTree.cxx:62
 DecisionTree.cxx:63
 DecisionTree.cxx:64
 DecisionTree.cxx:65
 DecisionTree.cxx:66
 DecisionTree.cxx:67
 DecisionTree.cxx:68
 DecisionTree.cxx:69
 DecisionTree.cxx:70
 DecisionTree.cxx:71
 DecisionTree.cxx:72
 DecisionTree.cxx:73
 DecisionTree.cxx:74
 DecisionTree.cxx:75
 DecisionTree.cxx:76
 DecisionTree.cxx:77
 DecisionTree.cxx:78
 DecisionTree.cxx:79
 DecisionTree.cxx:80
 DecisionTree.cxx:81
 DecisionTree.cxx:82
 DecisionTree.cxx:83
 DecisionTree.cxx:84
 DecisionTree.cxx:85
 DecisionTree.cxx:86
 DecisionTree.cxx:87
 DecisionTree.cxx:88
 DecisionTree.cxx:89
 DecisionTree.cxx:90
 DecisionTree.cxx:91
 DecisionTree.cxx:92
 DecisionTree.cxx:93
 DecisionTree.cxx:94
 DecisionTree.cxx:95
 DecisionTree.cxx:96
 DecisionTree.cxx:97
 DecisionTree.cxx:98
 DecisionTree.cxx:99
 DecisionTree.cxx:100
 DecisionTree.cxx:101
 DecisionTree.cxx:102
 DecisionTree.cxx:103
 DecisionTree.cxx:104
 DecisionTree.cxx:105
 DecisionTree.cxx:106
 DecisionTree.cxx:107
 DecisionTree.cxx:108
 DecisionTree.cxx:109
 DecisionTree.cxx:110
 DecisionTree.cxx:111
 DecisionTree.cxx:112
 DecisionTree.cxx:113
 DecisionTree.cxx:114
 DecisionTree.cxx:115
 DecisionTree.cxx:116
 DecisionTree.cxx:117
 DecisionTree.cxx:118
 DecisionTree.cxx:119
 DecisionTree.cxx:120
 DecisionTree.cxx:121
 DecisionTree.cxx:122
 DecisionTree.cxx:123
 DecisionTree.cxx:124
 DecisionTree.cxx:125
 DecisionTree.cxx:126
 DecisionTree.cxx:127
 DecisionTree.cxx:128
 DecisionTree.cxx:129
 DecisionTree.cxx:130
 DecisionTree.cxx:131
 DecisionTree.cxx:132
 DecisionTree.cxx:133
 DecisionTree.cxx:134
 DecisionTree.cxx:135
 DecisionTree.cxx:136
 DecisionTree.cxx:137
 DecisionTree.cxx:138
 DecisionTree.cxx:139
 DecisionTree.cxx:140
 DecisionTree.cxx:141
 DecisionTree.cxx:142
 DecisionTree.cxx:143
 DecisionTree.cxx:144
 DecisionTree.cxx:145
 DecisionTree.cxx:146
 DecisionTree.cxx:147
 DecisionTree.cxx:148
 DecisionTree.cxx:149
 DecisionTree.cxx:150
 DecisionTree.cxx:151
 DecisionTree.cxx:152
 DecisionTree.cxx:153
 DecisionTree.cxx:154
 DecisionTree.cxx:155
 DecisionTree.cxx:156
 DecisionTree.cxx:157
 DecisionTree.cxx:158
 DecisionTree.cxx:159
 DecisionTree.cxx:160
 DecisionTree.cxx:161
 DecisionTree.cxx:162
 DecisionTree.cxx:163
 DecisionTree.cxx:164
 DecisionTree.cxx:165
 DecisionTree.cxx:166
 DecisionTree.cxx:167
 DecisionTree.cxx:168
 DecisionTree.cxx:169
 DecisionTree.cxx:170
 DecisionTree.cxx:171
 DecisionTree.cxx:172
 DecisionTree.cxx:173
 DecisionTree.cxx:174
 DecisionTree.cxx:175
 DecisionTree.cxx:176
 DecisionTree.cxx:177
 DecisionTree.cxx:178
 DecisionTree.cxx:179
 DecisionTree.cxx:180
 DecisionTree.cxx:181
 DecisionTree.cxx:182
 DecisionTree.cxx:183
 DecisionTree.cxx:184
 DecisionTree.cxx:185
 DecisionTree.cxx:186
 DecisionTree.cxx:187
 DecisionTree.cxx:188
 DecisionTree.cxx:189
 DecisionTree.cxx:190
 DecisionTree.cxx:191
 DecisionTree.cxx:192
 DecisionTree.cxx:193
 DecisionTree.cxx:194
 DecisionTree.cxx:195
 DecisionTree.cxx:196
 DecisionTree.cxx:197
 DecisionTree.cxx:198
 DecisionTree.cxx:199
 DecisionTree.cxx:200
 DecisionTree.cxx:201
 DecisionTree.cxx:202
 DecisionTree.cxx:203
 DecisionTree.cxx:204
 DecisionTree.cxx:205
 DecisionTree.cxx:206
 DecisionTree.cxx:207
 DecisionTree.cxx:208
 DecisionTree.cxx:209
 DecisionTree.cxx:210
 DecisionTree.cxx:211
 DecisionTree.cxx:212
 DecisionTree.cxx:213
 DecisionTree.cxx:214
 DecisionTree.cxx:215
 DecisionTree.cxx:216
 DecisionTree.cxx:217
 DecisionTree.cxx:218
 DecisionTree.cxx:219
 DecisionTree.cxx:220
 DecisionTree.cxx:221
 DecisionTree.cxx:222
 DecisionTree.cxx:223
 DecisionTree.cxx:224
 DecisionTree.cxx:225
 DecisionTree.cxx:226
 DecisionTree.cxx:227
 DecisionTree.cxx:228
 DecisionTree.cxx:229
 DecisionTree.cxx:230
 DecisionTree.cxx:231
 DecisionTree.cxx:232
 DecisionTree.cxx:233
 DecisionTree.cxx:234
 DecisionTree.cxx:235
 DecisionTree.cxx:236
 DecisionTree.cxx:237
 DecisionTree.cxx:238
 DecisionTree.cxx:239
 DecisionTree.cxx:240
 DecisionTree.cxx:241
 DecisionTree.cxx:242
 DecisionTree.cxx:243
 DecisionTree.cxx:244
 DecisionTree.cxx:245
 DecisionTree.cxx:246
 DecisionTree.cxx:247
 DecisionTree.cxx:248
 DecisionTree.cxx:249
 DecisionTree.cxx:250
 DecisionTree.cxx:251
 DecisionTree.cxx:252
 DecisionTree.cxx:253
 DecisionTree.cxx:254
 DecisionTree.cxx:255
 DecisionTree.cxx:256
 DecisionTree.cxx:257
 DecisionTree.cxx:258
 DecisionTree.cxx:259
 DecisionTree.cxx:260
 DecisionTree.cxx:261
 DecisionTree.cxx:262
 DecisionTree.cxx:263
 DecisionTree.cxx:264
 DecisionTree.cxx:265
 DecisionTree.cxx:266
 DecisionTree.cxx:267
 DecisionTree.cxx:268
 DecisionTree.cxx:269
 DecisionTree.cxx:270
 DecisionTree.cxx:271
 DecisionTree.cxx:272
 DecisionTree.cxx:273
 DecisionTree.cxx:274
 DecisionTree.cxx:275
 DecisionTree.cxx:276
 DecisionTree.cxx:277
 DecisionTree.cxx:278
 DecisionTree.cxx:279
 DecisionTree.cxx:280
 DecisionTree.cxx:281
 DecisionTree.cxx:282
 DecisionTree.cxx:283
 DecisionTree.cxx:284
 DecisionTree.cxx:285
 DecisionTree.cxx:286
 DecisionTree.cxx:287
 DecisionTree.cxx:288
 DecisionTree.cxx:289
 DecisionTree.cxx:290
 DecisionTree.cxx:291
 DecisionTree.cxx:292
 DecisionTree.cxx:293
 DecisionTree.cxx:294
 DecisionTree.cxx:295
 DecisionTree.cxx:296
 DecisionTree.cxx:297
 DecisionTree.cxx:298
 DecisionTree.cxx:299
 DecisionTree.cxx:300
 DecisionTree.cxx:301
 DecisionTree.cxx:302
 DecisionTree.cxx:303
 DecisionTree.cxx:304
 DecisionTree.cxx:305
 DecisionTree.cxx:306
 DecisionTree.cxx:307
 DecisionTree.cxx:308
 DecisionTree.cxx:309
 DecisionTree.cxx:310
 DecisionTree.cxx:311
 DecisionTree.cxx:312
 DecisionTree.cxx:313
 DecisionTree.cxx:314
 DecisionTree.cxx:315
 DecisionTree.cxx:316
 DecisionTree.cxx:317
 DecisionTree.cxx:318
 DecisionTree.cxx:319
 DecisionTree.cxx:320
 DecisionTree.cxx:321
 DecisionTree.cxx:322
 DecisionTree.cxx:323
 DecisionTree.cxx:324
 DecisionTree.cxx:325
 DecisionTree.cxx:326
 DecisionTree.cxx:327
 DecisionTree.cxx:328
 DecisionTree.cxx:329
 DecisionTree.cxx:330
 DecisionTree.cxx:331
 DecisionTree.cxx:332
 DecisionTree.cxx:333
 DecisionTree.cxx:334
 DecisionTree.cxx:335
 DecisionTree.cxx:336
 DecisionTree.cxx:337
 DecisionTree.cxx:338
 DecisionTree.cxx:339
 DecisionTree.cxx:340
 DecisionTree.cxx:341
 DecisionTree.cxx:342
 DecisionTree.cxx:343
 DecisionTree.cxx:344
 DecisionTree.cxx:345
 DecisionTree.cxx:346
 DecisionTree.cxx:347
 DecisionTree.cxx:348
 DecisionTree.cxx:349
 DecisionTree.cxx:350
 DecisionTree.cxx:351
 DecisionTree.cxx:352
 DecisionTree.cxx:353
 DecisionTree.cxx:354
 DecisionTree.cxx:355
 DecisionTree.cxx:356
 DecisionTree.cxx:357
 DecisionTree.cxx:358
 DecisionTree.cxx:359
 DecisionTree.cxx:360
 DecisionTree.cxx:361
 DecisionTree.cxx:362
 DecisionTree.cxx:363
 DecisionTree.cxx:364
 DecisionTree.cxx:365
 DecisionTree.cxx:366
 DecisionTree.cxx:367
 DecisionTree.cxx:368
 DecisionTree.cxx:369
 DecisionTree.cxx:370
 DecisionTree.cxx:371
 DecisionTree.cxx:372
 DecisionTree.cxx:373
 DecisionTree.cxx:374
 DecisionTree.cxx:375
 DecisionTree.cxx:376
 DecisionTree.cxx:377
 DecisionTree.cxx:378
 DecisionTree.cxx:379
 DecisionTree.cxx:380
 DecisionTree.cxx:381
 DecisionTree.cxx:382
 DecisionTree.cxx:383
 DecisionTree.cxx:384
 DecisionTree.cxx:385
 DecisionTree.cxx:386
 DecisionTree.cxx:387
 DecisionTree.cxx:388
 DecisionTree.cxx:389
 DecisionTree.cxx:390
 DecisionTree.cxx:391
 DecisionTree.cxx:392
 DecisionTree.cxx:393
 DecisionTree.cxx:394
 DecisionTree.cxx:395
 DecisionTree.cxx:396
 DecisionTree.cxx:397
 DecisionTree.cxx:398
 DecisionTree.cxx:399
 DecisionTree.cxx:400
 DecisionTree.cxx:401
 DecisionTree.cxx:402
 DecisionTree.cxx:403
 DecisionTree.cxx:404
 DecisionTree.cxx:405
 DecisionTree.cxx:406
 DecisionTree.cxx:407
 DecisionTree.cxx:408
 DecisionTree.cxx:409
 DecisionTree.cxx:410
 DecisionTree.cxx:411
 DecisionTree.cxx:412
 DecisionTree.cxx:413
 DecisionTree.cxx:414
 DecisionTree.cxx:415
 DecisionTree.cxx:416
 DecisionTree.cxx:417
 DecisionTree.cxx:418
 DecisionTree.cxx:419
 DecisionTree.cxx:420
 DecisionTree.cxx:421
 DecisionTree.cxx:422
 DecisionTree.cxx:423
 DecisionTree.cxx:424
 DecisionTree.cxx:425
 DecisionTree.cxx:426
 DecisionTree.cxx:427
 DecisionTree.cxx:428
 DecisionTree.cxx:429
 DecisionTree.cxx:430
 DecisionTree.cxx:431
 DecisionTree.cxx:432
 DecisionTree.cxx:433
 DecisionTree.cxx:434
 DecisionTree.cxx:435
 DecisionTree.cxx:436
 DecisionTree.cxx:437
 DecisionTree.cxx:438
 DecisionTree.cxx:439
 DecisionTree.cxx:440
 DecisionTree.cxx:441
 DecisionTree.cxx:442
 DecisionTree.cxx:443
 DecisionTree.cxx:444
 DecisionTree.cxx:445
 DecisionTree.cxx:446
 DecisionTree.cxx:447
 DecisionTree.cxx:448
 DecisionTree.cxx:449
 DecisionTree.cxx:450
 DecisionTree.cxx:451
 DecisionTree.cxx:452
 DecisionTree.cxx:453
 DecisionTree.cxx:454
 DecisionTree.cxx:455
 DecisionTree.cxx:456
 DecisionTree.cxx:457
 DecisionTree.cxx:458
 DecisionTree.cxx:459
 DecisionTree.cxx:460
 DecisionTree.cxx:461
 DecisionTree.cxx:462
 DecisionTree.cxx:463
 DecisionTree.cxx:464
 DecisionTree.cxx:465
 DecisionTree.cxx:466
 DecisionTree.cxx:467
 DecisionTree.cxx:468
 DecisionTree.cxx:469
 DecisionTree.cxx:470
 DecisionTree.cxx:471
 DecisionTree.cxx:472
 DecisionTree.cxx:473
 DecisionTree.cxx:474
 DecisionTree.cxx:475
 DecisionTree.cxx:476
 DecisionTree.cxx:477
 DecisionTree.cxx:478
 DecisionTree.cxx:479
 DecisionTree.cxx:480
 DecisionTree.cxx:481
 DecisionTree.cxx:482
 DecisionTree.cxx:483
 DecisionTree.cxx:484
 DecisionTree.cxx:485
 DecisionTree.cxx:486
 DecisionTree.cxx:487
 DecisionTree.cxx:488
 DecisionTree.cxx:489
 DecisionTree.cxx:490
 DecisionTree.cxx:491
 DecisionTree.cxx:492
 DecisionTree.cxx:493
 DecisionTree.cxx:494
 DecisionTree.cxx:495
 DecisionTree.cxx:496
 DecisionTree.cxx:497
 DecisionTree.cxx:498
 DecisionTree.cxx:499
 DecisionTree.cxx:500
 DecisionTree.cxx:501
 DecisionTree.cxx:502
 DecisionTree.cxx:503
 DecisionTree.cxx:504
 DecisionTree.cxx:505
 DecisionTree.cxx:506
 DecisionTree.cxx:507
 DecisionTree.cxx:508
 DecisionTree.cxx:509
 DecisionTree.cxx:510
 DecisionTree.cxx:511
 DecisionTree.cxx:512
 DecisionTree.cxx:513
 DecisionTree.cxx:514
 DecisionTree.cxx:515
 DecisionTree.cxx:516
 DecisionTree.cxx:517
 DecisionTree.cxx:518
 DecisionTree.cxx:519
 DecisionTree.cxx:520
 DecisionTree.cxx:521
 DecisionTree.cxx:522
 DecisionTree.cxx:523
 DecisionTree.cxx:524
 DecisionTree.cxx:525
 DecisionTree.cxx:526
 DecisionTree.cxx:527
 DecisionTree.cxx:528
 DecisionTree.cxx:529
 DecisionTree.cxx:530
 DecisionTree.cxx:531
 DecisionTree.cxx:532
 DecisionTree.cxx:533
 DecisionTree.cxx:534
 DecisionTree.cxx:535
 DecisionTree.cxx:536
 DecisionTree.cxx:537
 DecisionTree.cxx:538
 DecisionTree.cxx:539
 DecisionTree.cxx:540
 DecisionTree.cxx:541
 DecisionTree.cxx:542
 DecisionTree.cxx:543
 DecisionTree.cxx:544
 DecisionTree.cxx:545
 DecisionTree.cxx:546
 DecisionTree.cxx:547
 DecisionTree.cxx:548
 DecisionTree.cxx:549
 DecisionTree.cxx:550
 DecisionTree.cxx:551
 DecisionTree.cxx:552
 DecisionTree.cxx:553
 DecisionTree.cxx:554
 DecisionTree.cxx:555
 DecisionTree.cxx:556
 DecisionTree.cxx:557
 DecisionTree.cxx:558
 DecisionTree.cxx:559
 DecisionTree.cxx:560
 DecisionTree.cxx:561
 DecisionTree.cxx:562
 DecisionTree.cxx:563
 DecisionTree.cxx:564
 DecisionTree.cxx:565
 DecisionTree.cxx:566
 DecisionTree.cxx:567
 DecisionTree.cxx:568
 DecisionTree.cxx:569
 DecisionTree.cxx:570
 DecisionTree.cxx:571
 DecisionTree.cxx:572
 DecisionTree.cxx:573
 DecisionTree.cxx:574
 DecisionTree.cxx:575
 DecisionTree.cxx:576
 DecisionTree.cxx:577
 DecisionTree.cxx:578
 DecisionTree.cxx:579
 DecisionTree.cxx:580
 DecisionTree.cxx:581
 DecisionTree.cxx:582
 DecisionTree.cxx:583
 DecisionTree.cxx:584
 DecisionTree.cxx:585
 DecisionTree.cxx:586
 DecisionTree.cxx:587
 DecisionTree.cxx:588
 DecisionTree.cxx:589
 DecisionTree.cxx:590
 DecisionTree.cxx:591
 DecisionTree.cxx:592
 DecisionTree.cxx:593
 DecisionTree.cxx:594
 DecisionTree.cxx:595
 DecisionTree.cxx:596
 DecisionTree.cxx:597
 DecisionTree.cxx:598
 DecisionTree.cxx:599
 DecisionTree.cxx:600
 DecisionTree.cxx:601
 DecisionTree.cxx:602
 DecisionTree.cxx:603
 DecisionTree.cxx:604
 DecisionTree.cxx:605
 DecisionTree.cxx:606
 DecisionTree.cxx:607
 DecisionTree.cxx:608
 DecisionTree.cxx:609
 DecisionTree.cxx:610
 DecisionTree.cxx:611
 DecisionTree.cxx:612
 DecisionTree.cxx:613
 DecisionTree.cxx:614
 DecisionTree.cxx:615
 DecisionTree.cxx:616
 DecisionTree.cxx:617
 DecisionTree.cxx:618
 DecisionTree.cxx:619
 DecisionTree.cxx:620
 DecisionTree.cxx:621
 DecisionTree.cxx:622
 DecisionTree.cxx:623
 DecisionTree.cxx:624
 DecisionTree.cxx:625
 DecisionTree.cxx:626
 DecisionTree.cxx:627
 DecisionTree.cxx:628
 DecisionTree.cxx:629
 DecisionTree.cxx:630
 DecisionTree.cxx:631
 DecisionTree.cxx:632
 DecisionTree.cxx:633
 DecisionTree.cxx:634
 DecisionTree.cxx:635
 DecisionTree.cxx:636
 DecisionTree.cxx:637
 DecisionTree.cxx:638
 DecisionTree.cxx:639
 DecisionTree.cxx:640
 DecisionTree.cxx:641
 DecisionTree.cxx:642
 DecisionTree.cxx:643
 DecisionTree.cxx:644
 DecisionTree.cxx:645
 DecisionTree.cxx:646
 DecisionTree.cxx:647
 DecisionTree.cxx:648
 DecisionTree.cxx:649
 DecisionTree.cxx:650
 DecisionTree.cxx:651
 DecisionTree.cxx:652
 DecisionTree.cxx:653
 DecisionTree.cxx:654
 DecisionTree.cxx:655
 DecisionTree.cxx:656
 DecisionTree.cxx:657
 DecisionTree.cxx:658
 DecisionTree.cxx:659
 DecisionTree.cxx:660
 DecisionTree.cxx:661
 DecisionTree.cxx:662
 DecisionTree.cxx:663
 DecisionTree.cxx:664
 DecisionTree.cxx:665
 DecisionTree.cxx:666
 DecisionTree.cxx:667
 DecisionTree.cxx:668
 DecisionTree.cxx:669
 DecisionTree.cxx:670
 DecisionTree.cxx:671
 DecisionTree.cxx:672
 DecisionTree.cxx:673
 DecisionTree.cxx:674
 DecisionTree.cxx:675
 DecisionTree.cxx:676
 DecisionTree.cxx:677
 DecisionTree.cxx:678
 DecisionTree.cxx:679
 DecisionTree.cxx:680
 DecisionTree.cxx:681
 DecisionTree.cxx:682
 DecisionTree.cxx:683
 DecisionTree.cxx:684
 DecisionTree.cxx:685
 DecisionTree.cxx:686
 DecisionTree.cxx:687
 DecisionTree.cxx:688
 DecisionTree.cxx:689
 DecisionTree.cxx:690
 DecisionTree.cxx:691
 DecisionTree.cxx:692
 DecisionTree.cxx:693
 DecisionTree.cxx:694
 DecisionTree.cxx:695
 DecisionTree.cxx:696
 DecisionTree.cxx:697
 DecisionTree.cxx:698
 DecisionTree.cxx:699
 DecisionTree.cxx:700
 DecisionTree.cxx:701
 DecisionTree.cxx:702
 DecisionTree.cxx:703
 DecisionTree.cxx:704
 DecisionTree.cxx:705
 DecisionTree.cxx:706
 DecisionTree.cxx:707
 DecisionTree.cxx:708
 DecisionTree.cxx:709
 DecisionTree.cxx:710
 DecisionTree.cxx:711
 DecisionTree.cxx:712
 DecisionTree.cxx:713
 DecisionTree.cxx:714
 DecisionTree.cxx:715
 DecisionTree.cxx:716
 DecisionTree.cxx:717
 DecisionTree.cxx:718
 DecisionTree.cxx:719
 DecisionTree.cxx:720
 DecisionTree.cxx:721
 DecisionTree.cxx:722
 DecisionTree.cxx:723
 DecisionTree.cxx:724
 DecisionTree.cxx:725
 DecisionTree.cxx:726
 DecisionTree.cxx:727
 DecisionTree.cxx:728
 DecisionTree.cxx:729
 DecisionTree.cxx:730
 DecisionTree.cxx:731
 DecisionTree.cxx:732
 DecisionTree.cxx:733
 DecisionTree.cxx:734
 DecisionTree.cxx:735
 DecisionTree.cxx:736
 DecisionTree.cxx:737
 DecisionTree.cxx:738
 DecisionTree.cxx:739
 DecisionTree.cxx:740
 DecisionTree.cxx:741
 DecisionTree.cxx:742
 DecisionTree.cxx:743
 DecisionTree.cxx:744
 DecisionTree.cxx:745
 DecisionTree.cxx:746
 DecisionTree.cxx:747
 DecisionTree.cxx:748
 DecisionTree.cxx:749
 DecisionTree.cxx:750
 DecisionTree.cxx:751
 DecisionTree.cxx:752
 DecisionTree.cxx:753
 DecisionTree.cxx:754
 DecisionTree.cxx:755
 DecisionTree.cxx:756
 DecisionTree.cxx:757
 DecisionTree.cxx:758
 DecisionTree.cxx:759
 DecisionTree.cxx:760
 DecisionTree.cxx:761
 DecisionTree.cxx:762
 DecisionTree.cxx:763
 DecisionTree.cxx:764
 DecisionTree.cxx:765
 DecisionTree.cxx:766
 DecisionTree.cxx:767
 DecisionTree.cxx:768
 DecisionTree.cxx:769
 DecisionTree.cxx:770
 DecisionTree.cxx:771
 DecisionTree.cxx:772
 DecisionTree.cxx:773
 DecisionTree.cxx:774
 DecisionTree.cxx:775
 DecisionTree.cxx:776
 DecisionTree.cxx:777
 DecisionTree.cxx:778
 DecisionTree.cxx:779
 DecisionTree.cxx:780
 DecisionTree.cxx:781
 DecisionTree.cxx:782
 DecisionTree.cxx:783
 DecisionTree.cxx:784
 DecisionTree.cxx:785
 DecisionTree.cxx:786
 DecisionTree.cxx:787
 DecisionTree.cxx:788
 DecisionTree.cxx:789
 DecisionTree.cxx:790
 DecisionTree.cxx:791
 DecisionTree.cxx:792
 DecisionTree.cxx:793
 DecisionTree.cxx:794
 DecisionTree.cxx:795
 DecisionTree.cxx:796
 DecisionTree.cxx:797
 DecisionTree.cxx:798
 DecisionTree.cxx:799
 DecisionTree.cxx:800
 DecisionTree.cxx:801
 DecisionTree.cxx:802
 DecisionTree.cxx:803
 DecisionTree.cxx:804
 DecisionTree.cxx:805
 DecisionTree.cxx:806
 DecisionTree.cxx:807
 DecisionTree.cxx:808
 DecisionTree.cxx:809
 DecisionTree.cxx:810
 DecisionTree.cxx:811
 DecisionTree.cxx:812
 DecisionTree.cxx:813
 DecisionTree.cxx:814
 DecisionTree.cxx:815
 DecisionTree.cxx:816
 DecisionTree.cxx:817
 DecisionTree.cxx:818
 DecisionTree.cxx:819
 DecisionTree.cxx:820
 DecisionTree.cxx:821
 DecisionTree.cxx:822
 DecisionTree.cxx:823
 DecisionTree.cxx:824
 DecisionTree.cxx:825
 DecisionTree.cxx:826
 DecisionTree.cxx:827
 DecisionTree.cxx:828
 DecisionTree.cxx:829
 DecisionTree.cxx:830
 DecisionTree.cxx:831
 DecisionTree.cxx:832
 DecisionTree.cxx:833
 DecisionTree.cxx:834
 DecisionTree.cxx:835
 DecisionTree.cxx:836
 DecisionTree.cxx:837
 DecisionTree.cxx:838
 DecisionTree.cxx:839
 DecisionTree.cxx:840
 DecisionTree.cxx:841
 DecisionTree.cxx:842
 DecisionTree.cxx:843
 DecisionTree.cxx:844
 DecisionTree.cxx:845
 DecisionTree.cxx:846
 DecisionTree.cxx:847
 DecisionTree.cxx:848
 DecisionTree.cxx:849
 DecisionTree.cxx:850
 DecisionTree.cxx:851
 DecisionTree.cxx:852
 DecisionTree.cxx:853
 DecisionTree.cxx:854
 DecisionTree.cxx:855
 DecisionTree.cxx:856
 DecisionTree.cxx:857
 DecisionTree.cxx:858
 DecisionTree.cxx:859
 DecisionTree.cxx:860
 DecisionTree.cxx:861
 DecisionTree.cxx:862
 DecisionTree.cxx:863
 DecisionTree.cxx:864
 DecisionTree.cxx:865
 DecisionTree.cxx:866
 DecisionTree.cxx:867
 DecisionTree.cxx:868
 DecisionTree.cxx:869
 DecisionTree.cxx:870
 DecisionTree.cxx:871
 DecisionTree.cxx:872
 DecisionTree.cxx:873
 DecisionTree.cxx:874
 DecisionTree.cxx:875
 DecisionTree.cxx:876
 DecisionTree.cxx:877
 DecisionTree.cxx:878
 DecisionTree.cxx:879
 DecisionTree.cxx:880
 DecisionTree.cxx:881
 DecisionTree.cxx:882
 DecisionTree.cxx:883
 DecisionTree.cxx:884
 DecisionTree.cxx:885
 DecisionTree.cxx:886
 DecisionTree.cxx:887
 DecisionTree.cxx:888
 DecisionTree.cxx:889
 DecisionTree.cxx:890
 DecisionTree.cxx:891
 DecisionTree.cxx:892
 DecisionTree.cxx:893
 DecisionTree.cxx:894
 DecisionTree.cxx:895
 DecisionTree.cxx:896
 DecisionTree.cxx:897
 DecisionTree.cxx:898
 DecisionTree.cxx:899
 DecisionTree.cxx:900
 DecisionTree.cxx:901
 DecisionTree.cxx:902
 DecisionTree.cxx:903
 DecisionTree.cxx:904
 DecisionTree.cxx:905
 DecisionTree.cxx:906
 DecisionTree.cxx:907
 DecisionTree.cxx:908
 DecisionTree.cxx:909
 DecisionTree.cxx:910
 DecisionTree.cxx:911
 DecisionTree.cxx:912
 DecisionTree.cxx:913
 DecisionTree.cxx:914
 DecisionTree.cxx:915
 DecisionTree.cxx:916
 DecisionTree.cxx:917
 DecisionTree.cxx:918
 DecisionTree.cxx:919
 DecisionTree.cxx:920
 DecisionTree.cxx:921
 DecisionTree.cxx:922
 DecisionTree.cxx:923
 DecisionTree.cxx:924
 DecisionTree.cxx:925
 DecisionTree.cxx:926
 DecisionTree.cxx:927
 DecisionTree.cxx:928
 DecisionTree.cxx:929
 DecisionTree.cxx:930
 DecisionTree.cxx:931
 DecisionTree.cxx:932
 DecisionTree.cxx:933
 DecisionTree.cxx:934
 DecisionTree.cxx:935
 DecisionTree.cxx:936
 DecisionTree.cxx:937
 DecisionTree.cxx:938
 DecisionTree.cxx:939
 DecisionTree.cxx:940
 DecisionTree.cxx:941
 DecisionTree.cxx:942
 DecisionTree.cxx:943
 DecisionTree.cxx:944
 DecisionTree.cxx:945
 DecisionTree.cxx:946
 DecisionTree.cxx:947
 DecisionTree.cxx:948
 DecisionTree.cxx:949
 DecisionTree.cxx:950
 DecisionTree.cxx:951
 DecisionTree.cxx:952
 DecisionTree.cxx:953
 DecisionTree.cxx:954
 DecisionTree.cxx:955
 DecisionTree.cxx:956
 DecisionTree.cxx:957
 DecisionTree.cxx:958
 DecisionTree.cxx:959
 DecisionTree.cxx:960
 DecisionTree.cxx:961
 DecisionTree.cxx:962
 DecisionTree.cxx:963
 DecisionTree.cxx:964
 DecisionTree.cxx:965
 DecisionTree.cxx:966
 DecisionTree.cxx:967
 DecisionTree.cxx:968
 DecisionTree.cxx:969
 DecisionTree.cxx:970
 DecisionTree.cxx:971
 DecisionTree.cxx:972
 DecisionTree.cxx:973
 DecisionTree.cxx:974
 DecisionTree.cxx:975
 DecisionTree.cxx:976
 DecisionTree.cxx:977
 DecisionTree.cxx:978
 DecisionTree.cxx:979
 DecisionTree.cxx:980
 DecisionTree.cxx:981
 DecisionTree.cxx:982
 DecisionTree.cxx:983
 DecisionTree.cxx:984
 DecisionTree.cxx:985
 DecisionTree.cxx:986
 DecisionTree.cxx:987
 DecisionTree.cxx:988
 DecisionTree.cxx:989
 DecisionTree.cxx:990
 DecisionTree.cxx:991
 DecisionTree.cxx:992
 DecisionTree.cxx:993
 DecisionTree.cxx:994
 DecisionTree.cxx:995
 DecisionTree.cxx:996
 DecisionTree.cxx:997
 DecisionTree.cxx:998
 DecisionTree.cxx:999
 DecisionTree.cxx:1000
 DecisionTree.cxx:1001
 DecisionTree.cxx:1002
 DecisionTree.cxx:1003
 DecisionTree.cxx:1004
 DecisionTree.cxx:1005
 DecisionTree.cxx:1006
 DecisionTree.cxx:1007
 DecisionTree.cxx:1008
 DecisionTree.cxx:1009
 DecisionTree.cxx:1010
 DecisionTree.cxx:1011
 DecisionTree.cxx:1012
 DecisionTree.cxx:1013
 DecisionTree.cxx:1014
 DecisionTree.cxx:1015
 DecisionTree.cxx:1016
 DecisionTree.cxx:1017
 DecisionTree.cxx:1018
 DecisionTree.cxx:1019
 DecisionTree.cxx:1020
 DecisionTree.cxx:1021
 DecisionTree.cxx:1022
 DecisionTree.cxx:1023
 DecisionTree.cxx:1024
 DecisionTree.cxx:1025
 DecisionTree.cxx:1026
 DecisionTree.cxx:1027
 DecisionTree.cxx:1028
 DecisionTree.cxx:1029
 DecisionTree.cxx:1030
 DecisionTree.cxx:1031
 DecisionTree.cxx:1032
 DecisionTree.cxx:1033
 DecisionTree.cxx:1034
 DecisionTree.cxx:1035
 DecisionTree.cxx:1036
 DecisionTree.cxx:1037
 DecisionTree.cxx:1038
 DecisionTree.cxx:1039
 DecisionTree.cxx:1040
 DecisionTree.cxx:1041
 DecisionTree.cxx:1042
 DecisionTree.cxx:1043
 DecisionTree.cxx:1044
 DecisionTree.cxx:1045
 DecisionTree.cxx:1046
 DecisionTree.cxx:1047
 DecisionTree.cxx:1048
 DecisionTree.cxx:1049
 DecisionTree.cxx:1050
 DecisionTree.cxx:1051
 DecisionTree.cxx:1052
 DecisionTree.cxx:1053
 DecisionTree.cxx:1054
 DecisionTree.cxx:1055
 DecisionTree.cxx:1056
 DecisionTree.cxx:1057
 DecisionTree.cxx:1058
 DecisionTree.cxx:1059
 DecisionTree.cxx:1060
 DecisionTree.cxx:1061
 DecisionTree.cxx:1062
 DecisionTree.cxx:1063
 DecisionTree.cxx:1064
 DecisionTree.cxx:1065
 DecisionTree.cxx:1066
 DecisionTree.cxx:1067
 DecisionTree.cxx:1068
 DecisionTree.cxx:1069
 DecisionTree.cxx:1070
 DecisionTree.cxx:1071
 DecisionTree.cxx:1072
 DecisionTree.cxx:1073
 DecisionTree.cxx:1074
 DecisionTree.cxx:1075
 DecisionTree.cxx:1076
 DecisionTree.cxx:1077
 DecisionTree.cxx:1078
 DecisionTree.cxx:1079
 DecisionTree.cxx:1080
 DecisionTree.cxx:1081
 DecisionTree.cxx:1082
 DecisionTree.cxx:1083
 DecisionTree.cxx:1084
 DecisionTree.cxx:1085
 DecisionTree.cxx:1086
 DecisionTree.cxx:1087
 DecisionTree.cxx:1088
 DecisionTree.cxx:1089
 DecisionTree.cxx:1090
 DecisionTree.cxx:1091
 DecisionTree.cxx:1092
 DecisionTree.cxx:1093
 DecisionTree.cxx:1094
 DecisionTree.cxx:1095
 DecisionTree.cxx:1096
 DecisionTree.cxx:1097
 DecisionTree.cxx:1098
 DecisionTree.cxx:1099
 DecisionTree.cxx:1100
 DecisionTree.cxx:1101
 DecisionTree.cxx:1102
 DecisionTree.cxx:1103
 DecisionTree.cxx:1104
 DecisionTree.cxx:1105
 DecisionTree.cxx:1106
 DecisionTree.cxx:1107
 DecisionTree.cxx:1108
 DecisionTree.cxx:1109
 DecisionTree.cxx:1110
 DecisionTree.cxx:1111
 DecisionTree.cxx:1112
 DecisionTree.cxx:1113
 DecisionTree.cxx:1114
 DecisionTree.cxx:1115
 DecisionTree.cxx:1116
 DecisionTree.cxx:1117
 DecisionTree.cxx:1118
 DecisionTree.cxx:1119
 DecisionTree.cxx:1120
 DecisionTree.cxx:1121
 DecisionTree.cxx:1122
 DecisionTree.cxx:1123
 DecisionTree.cxx:1124
 DecisionTree.cxx:1125
 DecisionTree.cxx:1126
 DecisionTree.cxx:1127
 DecisionTree.cxx:1128
 DecisionTree.cxx:1129
 DecisionTree.cxx:1130
 DecisionTree.cxx:1131
 DecisionTree.cxx:1132
 DecisionTree.cxx:1133
 DecisionTree.cxx:1134
 DecisionTree.cxx:1135
 DecisionTree.cxx:1136
 DecisionTree.cxx:1137
 DecisionTree.cxx:1138
 DecisionTree.cxx:1139
 DecisionTree.cxx:1140
 DecisionTree.cxx:1141
 DecisionTree.cxx:1142
 DecisionTree.cxx:1143
 DecisionTree.cxx:1144
 DecisionTree.cxx:1145
 DecisionTree.cxx:1146
 DecisionTree.cxx:1147
 DecisionTree.cxx:1148
 DecisionTree.cxx:1149
 DecisionTree.cxx:1150
 DecisionTree.cxx:1151
 DecisionTree.cxx:1152
 DecisionTree.cxx:1153
 DecisionTree.cxx:1154
 DecisionTree.cxx:1155
 DecisionTree.cxx:1156
 DecisionTree.cxx:1157
 DecisionTree.cxx:1158
 DecisionTree.cxx:1159
 DecisionTree.cxx:1160
 DecisionTree.cxx:1161
 DecisionTree.cxx:1162
 DecisionTree.cxx:1163
 DecisionTree.cxx:1164
 DecisionTree.cxx:1165
 DecisionTree.cxx:1166
 DecisionTree.cxx:1167
 DecisionTree.cxx:1168
 DecisionTree.cxx:1169
 DecisionTree.cxx:1170
 DecisionTree.cxx:1171
 DecisionTree.cxx:1172
 DecisionTree.cxx:1173
 DecisionTree.cxx:1174
 DecisionTree.cxx:1175
 DecisionTree.cxx:1176
 DecisionTree.cxx:1177
 DecisionTree.cxx:1178
 DecisionTree.cxx:1179
 DecisionTree.cxx:1180
 DecisionTree.cxx:1181
 DecisionTree.cxx:1182
 DecisionTree.cxx:1183
 DecisionTree.cxx:1184
 DecisionTree.cxx:1185
 DecisionTree.cxx:1186
 DecisionTree.cxx:1187
 DecisionTree.cxx:1188
 DecisionTree.cxx:1189
 DecisionTree.cxx:1190
 DecisionTree.cxx:1191
 DecisionTree.cxx:1192
 DecisionTree.cxx:1193
 DecisionTree.cxx:1194
 DecisionTree.cxx:1195
 DecisionTree.cxx:1196
 DecisionTree.cxx:1197
 DecisionTree.cxx:1198
 DecisionTree.cxx:1199
 DecisionTree.cxx:1200
 DecisionTree.cxx:1201
 DecisionTree.cxx:1202
 DecisionTree.cxx:1203
 DecisionTree.cxx:1204
 DecisionTree.cxx:1205
 DecisionTree.cxx:1206
 DecisionTree.cxx:1207
 DecisionTree.cxx:1208
 DecisionTree.cxx:1209
 DecisionTree.cxx:1210
 DecisionTree.cxx:1211
 DecisionTree.cxx:1212
 DecisionTree.cxx:1213
 DecisionTree.cxx:1214
 DecisionTree.cxx:1215
 DecisionTree.cxx:1216
 DecisionTree.cxx:1217
 DecisionTree.cxx:1218
 DecisionTree.cxx:1219
 DecisionTree.cxx:1220
 DecisionTree.cxx:1221
 DecisionTree.cxx:1222
 DecisionTree.cxx:1223
 DecisionTree.cxx:1224
 DecisionTree.cxx:1225
 DecisionTree.cxx:1226
 DecisionTree.cxx:1227
 DecisionTree.cxx:1228
 DecisionTree.cxx:1229
 DecisionTree.cxx:1230
 DecisionTree.cxx:1231
 DecisionTree.cxx:1232
 DecisionTree.cxx:1233
 DecisionTree.cxx:1234
 DecisionTree.cxx:1235
 DecisionTree.cxx:1236
 DecisionTree.cxx:1237
 DecisionTree.cxx:1238
 DecisionTree.cxx:1239
 DecisionTree.cxx:1240
 DecisionTree.cxx:1241
 DecisionTree.cxx:1242
 DecisionTree.cxx:1243
 DecisionTree.cxx:1244
 DecisionTree.cxx:1245
 DecisionTree.cxx:1246
 DecisionTree.cxx:1247
 DecisionTree.cxx:1248
 DecisionTree.cxx:1249
 DecisionTree.cxx:1250
 DecisionTree.cxx:1251
 DecisionTree.cxx:1252
 DecisionTree.cxx:1253
 DecisionTree.cxx:1254
 DecisionTree.cxx:1255
 DecisionTree.cxx:1256
 DecisionTree.cxx:1257
 DecisionTree.cxx:1258
 DecisionTree.cxx:1259
 DecisionTree.cxx:1260
 DecisionTree.cxx:1261
 DecisionTree.cxx:1262
 DecisionTree.cxx:1263
 DecisionTree.cxx:1264
 DecisionTree.cxx:1265
 DecisionTree.cxx:1266
 DecisionTree.cxx:1267
 DecisionTree.cxx:1268
 DecisionTree.cxx:1269
 DecisionTree.cxx:1270
 DecisionTree.cxx:1271
 DecisionTree.cxx:1272
 DecisionTree.cxx:1273
 DecisionTree.cxx:1274
 DecisionTree.cxx:1275
 DecisionTree.cxx:1276
 DecisionTree.cxx:1277
 DecisionTree.cxx:1278
 DecisionTree.cxx:1279
 DecisionTree.cxx:1280
 DecisionTree.cxx:1281
 DecisionTree.cxx:1282
 DecisionTree.cxx:1283
 DecisionTree.cxx:1284
 DecisionTree.cxx:1285
 DecisionTree.cxx:1286
 DecisionTree.cxx:1287
 DecisionTree.cxx:1288
 DecisionTree.cxx:1289
 DecisionTree.cxx:1290
 DecisionTree.cxx:1291
 DecisionTree.cxx:1292
 DecisionTree.cxx:1293
 DecisionTree.cxx:1294
 DecisionTree.cxx:1295
 DecisionTree.cxx:1296
 DecisionTree.cxx:1297
 DecisionTree.cxx:1298
 DecisionTree.cxx:1299
 DecisionTree.cxx:1300
 DecisionTree.cxx:1301
 DecisionTree.cxx:1302
 DecisionTree.cxx:1303
 DecisionTree.cxx:1304