// @(#)root/tmva $Id$
// Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss, Eckhard von Toerne, Jan Therhaag

/**********************************************************************************
 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis       *
 * Package: TMVA                                                                  *
 * Class  : TMVA::DecisionTree                                                    *
 * Web    : http://tmva.sourceforge.net                                           *
 *                                                                                *
 * Description:                                                                   *
 *      Implementation of a Decision Tree                                         *
 *                                                                                *
 * Authors (alphabetical):                                                        *
 *      Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland              *
 *      Helge Voss      <Helge.Voss@cern.ch>     - MPI-K Heidelberg, Germany      *
 *      Kai Voss        <Kai.Voss@cern.ch>       - U. of Victoria, Canada         *
 *      Eckhard v. Toerne  <evt@uni-bonn.de>          - U of Bonn, Germany        *
 *      Jan Therhaag          <Jan.Therhaag@cern.ch>   - U of Bonn, Germany       *
 *                                                                                *
 * Copyright (c) 2005-2011:                                                       *
 *      CERN, Switzerland                                                         *
 *      U. of Victoria, Canada                                                    *
 *      MPI-K Heidelberg, Germany                                                 *
 *      U. of Bonn, Germany                                                       *               
 *                                                                                *
 * Redistribution and use in source and binary forms, with or without             *
 * modification, are permitted according to the terms listed in LICENSE           *
 * (http://mva.sourceforge.net/license.txt)                                       *
 *                                                                                *
 **********************************************************************************/

//_______________________________________________________________________
//
// Implementation of a Decision Tree
//
// In a decision tree successive decision nodes are used to categorize the
// events out of the sample as either signal or background. Each node
// uses only a single discriminating variable to decide if the event is
// signal-like ("goes right") or background-like ("goes left"). This
// forms a tree like structure with "baskets" at the end (leave nodes),
// and an event is classified as either signal or background according to
// whether the basket where it ends up has been classified signal or
// background during the training. Training of a decision tree is the
// process to define the "cut criteria" for each node. The training
// starts with the root node. Here one takes the full training event
// sample and selects the variable and corresponding cut value that gives
// the best separation between signal and background at this stage. Using
// this cut criterion, the sample is then divided into two subsamples, a
// signal-like (right) and a background-like (left) sample. Two new nodes
// are then created for each of the two sub-samples and they are
// constructed using the same mechanism as described for the root
// node. The devision is stopped once a certain node has reached either a
// minimum number of events, or a minimum or maximum signal purity. These
// leave nodes are then called "signal" or "background" if they contain
// more signal respective background events from the training sample.
//_______________________________________________________________________

#include <iostream>
#include <algorithm>
#include <vector>
#include <limits>
#include <fstream>
#include <algorithm>
#include <cassert>

#include "TRandom3.h"
#include "TMath.h"
#include "TMatrix.h"

#include "TMVA/MsgLogger.h"
#include "TMVA/DecisionTree.h"
#include "TMVA/DecisionTreeNode.h"
#include "TMVA/BinarySearchTree.h"

#include "TMVA/Tools.h"

#include "TMVA/GiniIndex.h"
#include "TMVA/CrossEntropy.h"
#include "TMVA/MisClassificationError.h"
#include "TMVA/SdivSqrtSplusB.h"
#include "TMVA/Event.h"
#include "TMVA/BDTEventWrapper.h"
#include "TMVA/IPruneTool.h"
#include "TMVA/CostComplexityPruneTool.h"
#include "TMVA/ExpectedErrorPruneTool.h"

const Int_t TMVA::DecisionTree::fgRandomSeed = 0; // set nonzero for debugging and zero for random seeds

using std::vector;

ClassImp(TMVA::DecisionTree)

//_______________________________________________________________________
TMVA::DecisionTree::DecisionTree():
BinaryTree(),
   fNvars          (0),
   fNCuts          (-1),
   fUseFisherCuts  (kFALSE),
   fMinLinCorrForFisher (1),
   fUseExclusiveVars (kTRUE),
   fSepType        (NULL),
   fRegType        (NULL),
   fMinSize        (0),
   fMinNodeSize    (1),
   fMinSepGain (0),
   fUseSearchTree(kFALSE),
   fPruneStrength(0),
   fPruneMethod    (kNoPruning),
   fNNodesBeforePruning(0),
   fNodePurityLimit(0.5),
   fRandomisedTree (kFALSE),
   fUseNvars       (0),
   fUsePoissonNvars(kFALSE),
   fMyTrandom (NULL), 
   fMaxDepth       (999999),
   fSigClass       (0),
   fTreeID         (0),
   fAnalysisType   (Types::kClassification),
   fDataSetInfo    (NULL)
{
   // default constructor using the GiniIndex as separation criterion,
   // no restrictions on minium number of events in a leave note or the
   // separation gain in the node splitting
}

//_______________________________________________________________________
TMVA::DecisionTree::DecisionTree( TMVA::SeparationBase *sepType, Float_t minSize, Int_t nCuts, DataSetInfo* dataInfo, UInt_t cls,
                                  Bool_t randomisedTree, Int_t useNvars, Bool_t usePoissonNvars,
                                  UInt_t nMaxDepth, Int_t iSeed, Float_t purityLimit, Int_t treeID):
   BinaryTree(),
   fNvars          (0),
   fNCuts          (nCuts),
   fUseFisherCuts  (kFALSE),
   fMinLinCorrForFisher (1),
   fUseExclusiveVars (kTRUE),
   fSepType        (sepType),
   fRegType        (NULL),
   fMinSize        (0),
   fMinNodeSize    (minSize),
   fMinSepGain     (0),
   fUseSearchTree  (kFALSE),
   fPruneStrength  (0),
   fPruneMethod    (kNoPruning),
   fNNodesBeforePruning(0),
   fNodePurityLimit(purityLimit),
   fRandomisedTree (randomisedTree),
   fUseNvars       (useNvars),
   fUsePoissonNvars(usePoissonNvars),
   fMyTrandom      (new TRandom3(iSeed)),
   fMaxDepth       (nMaxDepth),
   fSigClass       (cls),
   fTreeID         (treeID),
   fAnalysisType   (Types::kClassification),
   fDataSetInfo    (dataInfo)
{
   // constructor specifying the separation type, the min number of
   // events in a no that is still subjected to further splitting, the
   // number of bins in the grid used in applying the cut for the node
   // splitting.

   if (sepType == NULL) { // it is interpreted as a regression tree, where
                          // currently the separation type (simple least square)
                          // cannot be chosen freely)
      fAnalysisType = Types::kRegression;
      fRegType = new RegressionVariance();
      if ( nCuts <=0 ) {
         fNCuts = 200;
         Log() << kWARNING << " You had choosen the training mode using optimal cuts, not\n"
               << " based on a grid of " << fNCuts << " by setting the option NCuts < 0\n"
               << " as this doesn't exist yet, I set it to " << fNCuts << " and use the grid"
               << Endl;
      }
   }else{
      fAnalysisType = Types::kClassification;
   }
}

//_______________________________________________________________________
TMVA::DecisionTree::DecisionTree( const DecisionTree &d ):
   BinaryTree(),
   fNvars      (d.fNvars),
   fNCuts      (d.fNCuts),
   fUseFisherCuts  (d.fUseFisherCuts),
   fMinLinCorrForFisher (d.fMinLinCorrForFisher),
   fUseExclusiveVars (d.fUseExclusiveVars),
   fSepType    (d.fSepType),
   fRegType    (d.fRegType),
   fMinSize    (d.fMinSize),
   fMinNodeSize(d.fMinNodeSize),
   fMinSepGain (d.fMinSepGain),
   fUseSearchTree  (d.fUseSearchTree),
   fPruneStrength  (d.fPruneStrength),
   fPruneMethod    (d.fPruneMethod),
   fNodePurityLimit(d.fNodePurityLimit),
   fRandomisedTree (d.fRandomisedTree),
   fUseNvars       (d.fUseNvars),
   fUsePoissonNvars(d.fUsePoissonNvars),
   fMyTrandom      (new TRandom3(fgRandomSeed)),  // well, that means it's not an identical copy. But I only ever intend to really copy trees that are "outgrown" already. 
   fMaxDepth   (d.fMaxDepth),
   fSigClass   (d.fSigClass),
   fTreeID     (d.fTreeID),
   fAnalysisType(d.fAnalysisType),
   fDataSetInfo    (d.fDataSetInfo)
{
   // copy constructor that creates a true copy, i.e. a completely independent tree
   // the node copy will recursively copy all the nodes
   this->SetRoot( new TMVA::DecisionTreeNode ( *((DecisionTreeNode*)(d.GetRoot())) ) );
   this->SetParentTreeInNodes();
   fNNodes = d.fNNodes;
}


//_______________________________________________________________________
TMVA::DecisionTree::~DecisionTree()
{
   // destructor

   // destruction of the tree nodes done in the "base class" BinaryTree

   if (fMyTrandom) delete fMyTrandom;
   if (fRegType) delete fRegType;
}

//_______________________________________________________________________
void TMVA::DecisionTree::SetParentTreeInNodes( Node *n )
{
   // descend a tree to find all its leaf nodes, fill max depth reached in the
   // tree at the same time.

   if (n == NULL) { //default, start at the tree top, then descend recursively
      n = this->GetRoot();
      if (n == NULL) {
         Log() << kFATAL << "SetParentTreeNodes: started with undefined ROOT node" <<Endl;
         return ;
      }
   }

   if ((this->GetLeftDaughter(n) == NULL) && (this->GetRightDaughter(n) != NULL) ) {
      Log() << kFATAL << " Node with only one daughter?? Something went wrong" << Endl;
      return;
   }  else if ((this->GetLeftDaughter(n) != NULL) && (this->GetRightDaughter(n) == NULL) ) {
      Log() << kFATAL << " Node with only one daughter?? Something went wrong" << Endl;
      return;
   }
   else {
      if (this->GetLeftDaughter(n) != NULL) {
         this->SetParentTreeInNodes( this->GetLeftDaughter(n) );
      }
      if (this->GetRightDaughter(n) != NULL) {
         this->SetParentTreeInNodes( this->GetRightDaughter(n) );
      }
   }
   n->SetParentTree(this);
   if (n->GetDepth() > this->GetTotalTreeDepth()) this->SetTotalTreeDepth(n->GetDepth());
   return;
}

//_______________________________________________________________________
TMVA::DecisionTree* TMVA::DecisionTree::CreateFromXML(void* node, UInt_t tmva_Version_Code ) {
   // re-create a new tree (decision tree or search tree) from XML
   std::string type("");
   gTools().ReadAttr(node,"type", type);
   DecisionTree* dt = new DecisionTree();

   dt->ReadXML( node, tmva_Version_Code );
   return dt;
}


//_______________________________________________________________________
UInt_t TMVA::DecisionTree::BuildTree( const std::vector<const TMVA::Event*> & eventSample,
                                      TMVA::DecisionTreeNode *node)
{
   // building the decision tree by recursively calling the splitting of
   // one (root-) node into two daughter nodes (returns the number of nodes)

   if (node==NULL) {
      //start with the root node
      node = new TMVA::DecisionTreeNode();
      fNNodes = 1;
      this->SetRoot(node);
      // have to use "s" for start as "r" for "root" would be the same as "r" for "right"
      this->GetRoot()->SetPos('s');
      this->GetRoot()->SetDepth(0);
      this->GetRoot()->SetParentTree(this);
      fMinSize = fMinNodeSize/100. * eventSample.size();
      if (GetTreeID()==0){
         Log() << kINFO << "The minimal node size MinNodeSize=" << fMinNodeSize << " fMinNodeSize="<<fMinNodeSize<< "% is translated to an actual number of events = "<< fMinSize<< " for the training sample size of " << eventSample.size() << Endl;
         Log() << kINFO << "Note: This number will be taken as absolute minimum in the node, " << Endl;
         Log() << kINFO << "      in terms of 'weighted events' and unweighted ones !! " << Endl;
      }
   }

   UInt_t nevents = eventSample.size();

   if (nevents > 0 ) {
      if (fNvars==0) fNvars = eventSample[0]->GetNVariables(); // should have been set before, but ... well..
      fVariableImportance.resize(fNvars);
   }
   else Log() << kFATAL << ":<BuildTree> eventsample Size == 0 " << Endl;

   Double_t s=0, b=0;
   Double_t suw=0, buw=0;
   Double_t sub=0, bub=0; // unboosted!
   Double_t target=0, target2=0;
   Float_t *xmin = new Float_t[fNvars];
   Float_t *xmax = new Float_t[fNvars];
   for (UInt_t ivar=0; ivar<fNvars; ivar++) {
      xmin[ivar]=xmax[ivar]=0;
   }
   for (UInt_t iev=0; iev<eventSample.size(); iev++) {
      const TMVA::Event* evt = eventSample[iev];
      const Double_t weight = evt->GetWeight();
      const Double_t orgWeight = evt->GetOriginalWeight(); // unboosted!
      if (evt->GetClass() == fSigClass) {
         s += weight;
         suw += 1;
         sub += orgWeight; 
      }
      else {
         b += weight;
         buw += 1;
         bub += orgWeight;
      }
      if ( DoRegression() ) {
         const Double_t tgt = evt->GetTarget(0);
         target +=weight*tgt;
         target2+=weight*tgt*tgt;
      }

      for (UInt_t ivar=0; ivar<fNvars; ivar++) {
         const Double_t val = evt->GetValue(ivar);
         if (iev==0) xmin[ivar]=xmax[ivar]=val;
         if (val < xmin[ivar]) xmin[ivar]=val;
         if (val > xmax[ivar]) xmax[ivar]=val;
      }
   }


   if (s+b < 0) {
      Log() << kWARNING << " One of the Decision Tree nodes has negative total number of signal or background events. "
            << "(Nsig="<<s<<" Nbkg="<<b<<" Probaby you use a Monte Carlo with negative weights. That should in principle "
            << "be fine as long as on average you end up with something positive. For this you have to make sure that the "
            << "minimul number of (unweighted) events demanded for a tree node (currently you use: MinNodeSize="<<fMinNodeSize
            << "% of training events, you can set this via the BDT option string when booking the classifier) is large enough "
            << "to allow for reasonable averaging!!!" << Endl
            << " If this does not help.. maybe you want to try the option: NoNegWeightsInTraining which ignores events "
            << "with negative weight in the training." << Endl;
      double nBkg=0.;
      for (UInt_t i=0; i<eventSample.size(); i++) {
         if (eventSample[i]->GetClass() != fSigClass) {
            nBkg += eventSample[i]->GetWeight();
            Log() << kDEBUG << "Event "<< i<< " has (original) weight: " <<  eventSample[i]->GetWeight()/eventSample[i]->GetBoostWeight() 
                  << " boostWeight: " << eventSample[i]->GetBoostWeight() << Endl;
         }
      }
      Log() << kDEBUG << " that gives in total: " << nBkg<<Endl;
   }

   node->SetNSigEvents(s);
   node->SetNBkgEvents(b);
   node->SetNSigEvents_unweighted(suw);
   node->SetNBkgEvents_unweighted(buw);
   node->SetNSigEvents_unboosted(sub);
   node->SetNBkgEvents_unboosted(bub);
   node->SetPurity();
   if (node == this->GetRoot()) {
      node->SetNEvents(s+b);
      node->SetNEvents_unweighted(suw+buw);
      node->SetNEvents_unboosted(sub+bub);
   }
   for (UInt_t ivar=0; ivar<fNvars; ivar++) {
      node->SetSampleMin(ivar,xmin[ivar]);
      node->SetSampleMax(ivar,xmax[ivar]);
   }
   delete[] xmin;
   delete[] xmax;

   // I now demand the minimum number of events for both daughter nodes. Hence if the number
   // of events in the parent node is not at least two times as big, I don't even need to try
   // splitting

   // ask here for actuall "events" independent of their weight.. OR the weighted events
   // to execeed the min requested number of events per dauther node
   // (NOTE: make sure that at the eventSample at the ROOT node has sum_of_weights == sample.size() !
   //   if ((eventSample.size() >= 2*fMinSize ||s+b >= 2*fMinSize) && node->GetDepth() < fMaxDepth 
   // std::cout << "------------------------------------------------------------------"<<std::endl;
   // std::cout << "------------------------------------------------------------------"<<std::endl;
   // std::cout << " eveSampleSize = "<< eventSample.size() << " s+b="<<s+b << std::endl;
   if ((eventSample.size() >= 2*fMinSize  && s+b >= 2*fMinSize) && node->GetDepth() < fMaxDepth 
       && ( ( s!=0 && b !=0 && !DoRegression()) || ( (s+b)!=0 && DoRegression()) ) ) {
      Double_t separationGain;
      if (fNCuts > 0){
         separationGain = this->TrainNodeFast(eventSample, node);
      } else {
         separationGain = this->TrainNodeFull(eventSample, node);
      }
      if (separationGain < std::numeric_limits<double>::epsilon()) { // we could not gain anything, e.g. all events are in one bin,
         /// if (separationGain < 0.00000001) { // we could not gain anything, e.g. all events are in one bin,
         // no cut can actually do anything to improve the node
         // hence, naturally, the current node is a leaf node
         if (DoRegression()) {
            node->SetSeparationIndex(fRegType->GetSeparationIndex(s+b,target,target2));
            node->SetResponse(target/(s+b));
            if( (target2/(s+b) - target/(s+b)*target/(s+b)) < std::numeric_limits<double>::epsilon() ){
               node->SetRMS(0);
            }else{
               node->SetRMS(TMath::Sqrt(target2/(s+b) - target/(s+b)*target/(s+b)));
            }
         }
         else {
            node->SetSeparationIndex(fSepType->GetSeparationIndex(s,b));
           
            if (node->GetPurity() > fNodePurityLimit) node->SetNodeType(1);
            else node->SetNodeType(-1);
         }
         if (node->GetDepth() > this->GetTotalTreeDepth()) this->SetTotalTreeDepth(node->GetDepth());
         
      } else {

         std::vector<const TMVA::Event*> leftSample; leftSample.reserve(nevents);
         std::vector<const TMVA::Event*> rightSample; rightSample.reserve(nevents);

         Double_t nRight=0, nLeft=0;
         Double_t nRightUnBoosted=0, nLeftUnBoosted=0;

         for (UInt_t ie=0; ie< nevents ; ie++) {
            if (node->GoesRight(*eventSample[ie])) {
               rightSample.push_back(eventSample[ie]);
               nRight += eventSample[ie]->GetWeight();
               nRightUnBoosted += eventSample[ie]->GetOriginalWeight();
            }
            else {
               leftSample.push_back(eventSample[ie]);
               nLeft += eventSample[ie]->GetWeight();
               nLeftUnBoosted += eventSample[ie]->GetOriginalWeight();
            }
         }
         // std::cout << " left:" << leftSample.size()
         //           << " right:" << rightSample.size() 
         //           << " total:" << leftSample.size()+rightSample.size()
         //           << std::endl
         //           << " while the separation is thought to be " << separationGain
         //           <<   std::endl;;

         // sanity check
         if (leftSample.empty() || rightSample.empty()) {
            Log() << kERROR << "<TrainNode> all events went to the same branch" << Endl
                  << "---                       Hence new node == old node ... check" << Endl
                  << "---                         left:" << leftSample.size()
                  << " right:" << rightSample.size() << Endl
                  << " while the separation is thought to be " << separationGain
                  << kFATAL << "--- this should never happen, please write a bug report to Helge.Voss@cern.ch" << Endl;
         }

         // continue building daughter nodes for the left and the right eventsample
         TMVA::DecisionTreeNode *rightNode = new TMVA::DecisionTreeNode(node,'r');
         fNNodes++;
         rightNode->SetNEvents(nRight);
         rightNode->SetNEvents_unboosted(nRightUnBoosted);
         rightNode->SetNEvents_unweighted(rightSample.size());

         TMVA::DecisionTreeNode *leftNode = new TMVA::DecisionTreeNode(node,'l');

         fNNodes++;
         leftNode->SetNEvents(nLeft);
         leftNode->SetNEvents_unboosted(nLeftUnBoosted);
         leftNode->SetNEvents_unweighted(leftSample.size());

         node->SetNodeType(0);
         node->SetLeft(leftNode);
         node->SetRight(rightNode);

         this->BuildTree(rightSample, rightNode);
         this->BuildTree(leftSample,  leftNode );

      }
   }
   else{ // it is a leaf node
      if (DoRegression()) {
         node->SetSeparationIndex(fRegType->GetSeparationIndex(s+b,target,target2));
         node->SetResponse(target/(s+b));
         if( (target2/(s+b) - target/(s+b)*target/(s+b)) < std::numeric_limits<double>::epsilon() ) {
            node->SetRMS(0);
         }else{
            node->SetRMS(TMath::Sqrt(target2/(s+b) - target/(s+b)*target/(s+b)));
         }
      }
      else {
         node->SetSeparationIndex(fSepType->GetSeparationIndex(s,b));
         if   (node->GetPurity() > fNodePurityLimit) node->SetNodeType(1);
         else node->SetNodeType(-1);
         // loop through the event sample ending up in this node and check for events with negative weight
         // those "cannot" be boosted normally. Hence, if there is one of those
         // is misclassified, find randomly as many events with positive weights in this
         // node as needed to get the same absolute number of weight, and mark them as 
         // "not to be boosted" in order to make up for not boosting the negative weight event
      }
      
      
      if (node->GetDepth() > this->GetTotalTreeDepth()) this->SetTotalTreeDepth(node->GetDepth());
   }
   
   //   if (IsRootNode) this->CleanTree();
   return fNNodes;
}

//_______________________________________________________________________
void TMVA::DecisionTree::FillTree( const std::vector<TMVA::Event*> & eventSample )
  
{
   // fill the existing the decision tree structure by filling event
   // in from the top node and see where they happen to end up
   for (UInt_t i=0; i<eventSample.size(); i++) {
      this->FillEvent(*(eventSample[i]),NULL);
   }
}

//_______________________________________________________________________
void TMVA::DecisionTree::FillEvent( const TMVA::Event & event,  
                                    TMVA::DecisionTreeNode *node )
{
   // fill the existing the decision tree structure by filling event
   // in from the top node and see where they happen to end up
  
   if (node == NULL) { // that's the start, take the Root node
      node = this->GetRoot();
   }
  
   node->IncrementNEvents( event.GetWeight() );
   node->IncrementNEvents_unweighted( );
  
   if (event.GetClass() == fSigClass) {
      node->IncrementNSigEvents( event.GetWeight() );
      node->IncrementNSigEvents_unweighted( );
   } 
   else {
      node->IncrementNBkgEvents( event.GetWeight() );
      node->IncrementNBkgEvents_unweighted( );
   }
   node->SetSeparationIndex(fSepType->GetSeparationIndex(node->GetNSigEvents(),
                                                         node->GetNBkgEvents()));
  
   if (node->GetNodeType() == 0) { //intermediate node --> go down
      if (node->GoesRight(event))
         this->FillEvent(event,dynamic_cast<TMVA::DecisionTreeNode*>(node->GetRight())) ;
      else
         this->FillEvent(event,dynamic_cast<TMVA::DecisionTreeNode*>(node->GetLeft())) ;
   }
  
  
}

//_______________________________________________________________________
void TMVA::DecisionTree::ClearTree()
{
   // clear the tree nodes (their S/N, Nevents etc), just keep the structure of the tree
  
   if (this->GetRoot()!=NULL) this->GetRoot()->ClearNodeAndAllDaughters();
  
}

//_______________________________________________________________________
UInt_t TMVA::DecisionTree::CleanTree( DecisionTreeNode *node )
{
   // remove those last splits that result in two leaf nodes that
   // are both of the type (i.e. both signal or both background)
   // this of course is only a reasonable thing to do when you use
   // "YesOrNo" leafs, while it might loose s.th. if you use the
   // purity information in the nodes.
   // --> hence I don't call it automatically in the tree building

   if (node==NULL) {
      node = this->GetRoot();
   }

   DecisionTreeNode *l = node->GetLeft();
   DecisionTreeNode *r = node->GetRight();

   if (node->GetNodeType() == 0) {
      this->CleanTree(l);
      this->CleanTree(r);
      if (l->GetNodeType() * r->GetNodeType() > 0) {

         this->PruneNode(node);
      }
   }
   // update the number of nodes after the cleaning
   return this->CountNodes();
   
}

//_______________________________________________________________________
Double_t TMVA::DecisionTree::PruneTree( const EventConstList* validationSample )
{
   // prune (get rid of internal nodes) the Decision tree to avoid overtraining
   // serveral different pruning methods can be applied as selected by the 
   // variable "fPruneMethod". 
  
   //   std::ofstream logfile("dt_pruning.log");

  

   IPruneTool* tool(NULL);
   PruningInfo* info(NULL);

   if( fPruneMethod == kNoPruning ) return 0.0;

   if      (fPruneMethod == kExpectedErrorPruning) 
      //      tool = new ExpectedErrorPruneTool(logfile);
      tool = new ExpectedErrorPruneTool();
   else if (fPruneMethod == kCostComplexityPruning) 
   {
      tool = new CostComplexityPruneTool();
   }
   else {
      Log() << kFATAL << "Selected pruning method not yet implemented "
            << Endl;
   }

   if(!tool) return 0.0;

   tool->SetPruneStrength(GetPruneStrength());
   if(tool->IsAutomatic()) {
      if(validationSample == NULL){ 
         Log() << kFATAL << "Cannot automate the pruning algorithm without an "
               << "independent validation sample!" << Endl;
      }else if(validationSample->size() == 0) {
         Log() << kFATAL << "Cannot automate the pruning algorithm with "
               << "independent validation sample of ZERO events!" << Endl;
      }
   }

   info = tool->CalculatePruningInfo(this,validationSample);
   Double_t pruneStrength=0;
   if(!info) {
      Log() << kFATAL << "Error pruning tree! Check prune.log for more information." 
            << Endl;
   } else {
      pruneStrength = info->PruneStrength;
      
      //   Log() << kDEBUG << "Optimal prune strength (alpha): " << pruneStrength
      //           << " has quality index " << info->QualityIndex << Endl;
      
      
      for (UInt_t i = 0; i < info->PruneSequence.size(); ++i) {
         
         PruneNode(info->PruneSequence[i]);
      }
      // update the number of nodes after the pruning
      this->CountNodes();
   }
   
   delete tool;
   delete info;
   
   return pruneStrength;
};


//_______________________________________________________________________
void TMVA::DecisionTree::ApplyValidationSample( const EventConstList* validationSample ) const
{
   // run the validation sample through the (pruned) tree and fill in the nodes
   // the variables NSValidation and NBValidadtion (i.e. how many of the Signal
   // and Background events from the validation sample. This is then later used
   // when asking for the "tree quality" .. 
   GetRoot()->ResetValidationData();
   for (UInt_t ievt=0; ievt < validationSample->size(); ievt++) {
      CheckEventWithPrunedTree((*validationSample)[ievt]);
   }
}

//_______________________________________________________________________
Double_t TMVA::DecisionTree::TestPrunedTreeQuality( const DecisionTreeNode* n, Int_t mode ) const
{
   // return the misclassification rate of a pruned tree
   // a "pruned tree" may have set the variable "IsTerminal" to "arbitrary" at
   // any node, hence this tree quality testing will stop there, hence test
   // the pruned tree (while the full tree is still in place for normal/later use)
   
   if (n == NULL) { // default, start at the tree top, then descend recursively
      n = this->GetRoot();
      if (n == NULL) {
         Log() << kFATAL << "TestPrunedTreeQuality: started with undefined ROOT node" <<Endl;
         return 0;
      }
   } 

   if( n->GetLeft() != NULL && n->GetRight() != NULL && !n->IsTerminal() ) {
      return (TestPrunedTreeQuality( n->GetLeft(), mode ) +
              TestPrunedTreeQuality( n->GetRight(), mode ));
   }
   else { // terminal leaf (in a pruned subtree of T_max at least)
      if (DoRegression()) {
         Double_t sumw = n->GetNSValidation() + n->GetNBValidation();
         return n->GetSumTarget2() - 2*n->GetSumTarget()*n->GetResponse() + sumw*n->GetResponse()*n->GetResponse();
      } 
      else {
         if (mode == 0) {
            if (n->GetPurity() > this->GetNodePurityLimit()) // this is a signal leaf, according to the training
               return n->GetNBValidation();
            else
               return n->GetNSValidation();
         }
         else if ( mode == 1 ) {
            // calculate the weighted error using the pruning validation sample
            return (n->GetPurity() * n->GetNBValidation() + (1.0 - n->GetPurity()) * n->GetNSValidation());
         }
         else {
            throw std::string("Unknown ValidationQualityMode");
         }
      }
   }
}

//_______________________________________________________________________
void TMVA::DecisionTree::CheckEventWithPrunedTree( const Event* e ) const
{
   // pass a single validation event throught a pruned decision tree
   // on the way down the tree, fill in all the "intermediate" information
   // that would normally be there from training.

   DecisionTreeNode* current =  this->GetRoot();
   if (current == NULL) {
      Log() << kFATAL << "CheckEventWithPrunedTree: started with undefined ROOT node" <<Endl;
   }

   while(current != NULL) {
      if(e->GetClass() == fSigClass)
         current->SetNSValidation(current->GetNSValidation() + e->GetWeight());
      else
         current->SetNBValidation(current->GetNBValidation() + e->GetWeight());

      if (e->GetNTargets() > 0) {
         current->AddToSumTarget(e->GetWeight()*e->GetTarget(0));
         current->AddToSumTarget2(e->GetWeight()*e->GetTarget(0)*e->GetTarget(0));
      }

      if (current->GetRight() == NULL || current->GetLeft() == NULL) {
         current = NULL;
      }
      else {
         if (current->GoesRight(*e))
            current = (TMVA::DecisionTreeNode*)current->GetRight();
         else
            current = (TMVA::DecisionTreeNode*)current->GetLeft();
      }
   }
}

//_______________________________________________________________________
Double_t TMVA::DecisionTree::GetSumWeights( const EventConstList* validationSample ) const
{
   // calculate the normalization factor for a pruning validation sample
   Double_t sumWeights = 0.0;
   for( EventConstList::const_iterator it = validationSample->begin();
        it != validationSample->end(); ++it ) {
      sumWeights += (*it)->GetWeight();
   }
   return sumWeights;
}



//_______________________________________________________________________
UInt_t TMVA::DecisionTree::CountLeafNodes( TMVA::Node *n )
{
   // return the number of terminal nodes in the sub-tree below Node n
  
   if (n == NULL) { // default, start at the tree top, then descend recursively
      n =  this->GetRoot();
      if (n == NULL) {
         Log() << kFATAL << "CountLeafNodes: started with undefined ROOT node" <<Endl;
         return 0;
      }
   } 
  
   UInt_t countLeafs=0;
  
   if ((this->GetLeftDaughter(n) == NULL) && (this->GetRightDaughter(n) == NULL) ) {
      countLeafs += 1;
   } 
   else { 
      if (this->GetLeftDaughter(n) != NULL) {
         countLeafs += this->CountLeafNodes( this->GetLeftDaughter(n) );
      }
      if (this->GetRightDaughter(n) != NULL) {
         countLeafs += this->CountLeafNodes( this->GetRightDaughter(n) );
      }
   }
   return countLeafs;
}

//_______________________________________________________________________
void TMVA::DecisionTree::DescendTree( Node* n )
{
   // descend a tree to find all its leaf nodes
  
   if (n == NULL) { // default, start at the tree top, then descend recursively
      n =  this->GetRoot();
      if (n == NULL) {
         Log() << kFATAL << "DescendTree: started with undefined ROOT node" <<Endl;
         return ;
      }
   } 
  
   if ((this->GetLeftDaughter(n) == NULL) && (this->GetRightDaughter(n) == NULL) ) {
      // do nothing
   } 
   else if ((this->GetLeftDaughter(n) == NULL) && (this->GetRightDaughter(n) != NULL) ) {
      Log() << kFATAL << " Node with only one daughter?? Something went wrong" << Endl;
      return;
   }  
   else if ((this->GetLeftDaughter(n) != NULL) && (this->GetRightDaughter(n) == NULL) ) {
      Log() << kFATAL << " Node with only one daughter?? Something went wrong" << Endl;
      return;
   } 
   else { 
      if (this->GetLeftDaughter(n) != NULL) {
         this->DescendTree( this->GetLeftDaughter(n) );
      }
      if (this->GetRightDaughter(n) != NULL) {
         this->DescendTree( this->GetRightDaughter(n) );
      }
   }
}

//_______________________________________________________________________
void TMVA::DecisionTree::PruneNode( DecisionTreeNode* node )
{
   // prune away the subtree below the node 
   DecisionTreeNode *l = node->GetLeft();
   DecisionTreeNode *r = node->GetRight();

   node->SetRight(NULL);
   node->SetLeft(NULL);
   node->SetSelector(-1);
   node->SetSeparationGain(-1);
   if (node->GetPurity() > fNodePurityLimit) node->SetNodeType(1);
   else node->SetNodeType(-1);
   this->DeleteNode(l);
   this->DeleteNode(r);
   // update the stored number of nodes in the Tree
   this->CountNodes();
  
}

//_______________________________________________________________________
void TMVA::DecisionTree::PruneNodeInPlace( DecisionTreeNode* node ) {
   // prune a node temporaily (without actually deleting its decendants
   // which allows testing the pruned tree quality for many different
   // pruning stages without "touching" the tree.

   if(node == NULL) return;
   node->SetNTerminal(1);
   node->SetSubTreeR( node->GetNodeR() );
   node->SetAlpha( std::numeric_limits<double>::infinity( ) );
   node->SetAlphaMinSubtree( std::numeric_limits<double>::infinity( ) );
   node->SetTerminal(kTRUE); // set the node to be terminal without deleting its descendants FIXME not needed
}

//_______________________________________________________________________
TMVA::Node* TMVA::DecisionTree::GetNode( ULong_t sequence, UInt_t depth )
{
   // retrieve node from the tree. Its position (up to a maximal tree depth of 64)
   // is coded as a sequence of left-right moves starting from the root, coded as
   // 0-1 bit patterns stored in the "long-integer"  (i.e. 0:left ; 1:right
  
   Node* current = this->GetRoot();
  
   for (UInt_t i =0;  i < depth; i++) {
      ULong_t tmp = 1 << i;
      if ( tmp & sequence) current = this->GetRightDaughter(current);
      else current = this->GetLeftDaughter(current);
   }
  
   return current;
}


//_______________________________________________________________________
void TMVA::DecisionTree::GetRandomisedVariables(Bool_t *useVariable, UInt_t *mapVariable, UInt_t &useNvars){
   //
   for (UInt_t ivar=0; ivar<fNvars; ivar++) useVariable[ivar]=kFALSE;
   if (fUseNvars==0) { // no number specified ... choose s.th. which hopefully works well 
      // watch out, should never happen as it is initialised automatically in MethodBDT already!!!
      fUseNvars        =  UInt_t(TMath::Sqrt(fNvars)+0.6);
   }
   if (fUsePoissonNvars) useNvars=TMath::Min(fNvars,TMath::Max(UInt_t(1),(UInt_t) fMyTrandom->Poisson(fUseNvars)));
   else useNvars = fUseNvars;

   UInt_t nSelectedVars = 0;
   while (nSelectedVars < useNvars) {
      Double_t bla = fMyTrandom->Rndm()*fNvars;
      useVariable[Int_t (bla)] = kTRUE;
      nSelectedVars = 0;
      for (UInt_t ivar=0; ivar < fNvars; ivar++) {
         if (useVariable[ivar] == kTRUE) { 
            mapVariable[nSelectedVars] = ivar;
            nSelectedVars++;
         }
      }
   }
   if (nSelectedVars != useNvars) { std::cout << "Bug in TrainNode - GetRandisedVariables()... sorry" << std::endl; std::exit(1);}
}

//_______________________________________________________________________
Double_t TMVA::DecisionTree::TrainNodeFast( const EventConstList & eventSample,
                                            TMVA::DecisionTreeNode *node )
{
   // Decide how to split a node using one of the variables that gives
   // the best separation of signal/background. In order to do this, for each 
   // variable a scan of the different cut values in a grid (grid = fNCuts) is 
   // performed and the resulting separation gains are compared.
   // in addition to the individual variables, one can also ask for a fisher
   // discriminant being built out of (some) of the variables and used as a
   // possible multivariate split.

   Double_t  separationGainTotal = -1, sepTmp;
   Double_t *separationGain    = new Double_t[fNvars+1];
   Int_t    *cutIndex          = new Int_t[fNvars+1];  //-1;

   for (UInt_t ivar=0; ivar <= fNvars; ivar++) {
      separationGain[ivar]=-1;
      cutIndex[ivar]=-1;
   }
   Int_t     mxVar = -1;
   Bool_t    cutType = kTRUE;
   Double_t  nTotS, nTotB;
   Int_t     nTotS_unWeighted, nTotB_unWeighted; 
   UInt_t nevents = eventSample.size();


   // the +1 comes from the fact that I treat later on the Fisher output as an 
   // additional possible variable.
   Bool_t *useVariable = new Bool_t[fNvars+1];   // for performance reasons instead of std::vector<Bool_t> useVariable(fNvars);
   UInt_t *mapVariable = new UInt_t[fNvars+1];    // map the subset of variables used in randomised trees to the original variable number (used in the Event() ) 

   std::vector<Double_t> fisherCoeff;
 
   if (fRandomisedTree) { // choose for each node splitting a random subset of variables to choose from
      UInt_t tmp=fUseNvars;
      GetRandomisedVariables(useVariable,mapVariable,tmp);
   } 
   else {
      for (UInt_t ivar=0; ivar < fNvars; ivar++) {
         useVariable[ivar] = kTRUE;
         mapVariable[ivar] = ivar;
      }
   }
   useVariable[fNvars] = kFALSE; //by default fisher is not used..

   Bool_t fisherOK = kFALSE; // flag to show that the fisher discriminant could be calculated correctly or not;
   if (fUseFisherCuts) {
      useVariable[fNvars] = kTRUE; // that's were I store the "fisher MVA"

      //use for the Fisher discriminant ONLY those variables that show
      //some reasonable linear correlation in either Signal or Background
      Bool_t *useVarInFisher = new Bool_t[fNvars];   // for performance reasons instead of std::vector<Bool_t> useVariable(fNvars);
      UInt_t *mapVarInFisher = new UInt_t[fNvars];   // map the subset of variables used in randomised trees to the original variable number (used in the Event() ) 
      for (UInt_t ivar=0; ivar < fNvars; ivar++) {
         useVarInFisher[ivar] = kFALSE;
         mapVarInFisher[ivar] = ivar;
      }
      
      std::vector<TMatrixDSym*>* covMatrices;
      covMatrices = gTools().CalcCovarianceMatrices( eventSample, 2 ); // currently for 2 classes only
      if (!covMatrices){
         Log() << kWARNING << " in TrainNodeFast, the covariance Matrices needed for the Fisher-Cuts returned error --> revert to just normal cuts for this node" << Endl;
         fisherOK = kFALSE;
      }else{
         TMatrixD *ss = new TMatrixD(*(covMatrices->at(0)));
         TMatrixD *bb = new TMatrixD(*(covMatrices->at(1)));
         const TMatrixD *s = gTools().GetCorrelationMatrix(ss);
         const TMatrixD *b = gTools().GetCorrelationMatrix(bb);
         
         for (UInt_t ivar=0; ivar < fNvars; ivar++) {
            for (UInt_t jvar=ivar+1; jvar < fNvars; jvar++) {
               if (  ( TMath::Abs( (*s)(ivar, jvar)) > fMinLinCorrForFisher) ||
                     ( TMath::Abs( (*b)(ivar, jvar)) > fMinLinCorrForFisher) ){
                  useVarInFisher[ivar] = kTRUE;
                  useVarInFisher[jvar] = kTRUE;
               }
            }
         }
         // now as you know which variables you want to use, count and map them:
         // such that you can use an array/matrix filled only with THOSE variables
         // that you used
         UInt_t nFisherVars = 0;
         for (UInt_t ivar=0; ivar < fNvars; ivar++) {
            //now .. pick those variables that are used in the FIsher and are also
            //  part of the "allowed" variables in case of Randomized Trees)
            if (useVarInFisher[ivar] && useVariable[ivar]) {
               mapVarInFisher[nFisherVars++]=ivar;
               // now exclud the the variables used in the Fisher cuts, and don't 
               // use them anymore in the individual variable scan
               if (fUseExclusiveVars) useVariable[ivar] = kFALSE;
            }
         }
         
         
         fisherCoeff = this->GetFisherCoefficients(eventSample, nFisherVars, mapVarInFisher);
         fisherOK = kTRUE;
      }
      delete [] useVarInFisher;
      delete [] mapVarInFisher;

   }


   UInt_t cNvars = fNvars;
   if (fUseFisherCuts && fisherOK) cNvars++;  // use the Fisher output simple as additional variable

   UInt_t* nBins = new UInt_t [cNvars];

   Double_t** nSelS = new Double_t* [cNvars];
   Double_t** nSelB = new Double_t* [cNvars];
   Double_t** nSelS_unWeighted = new Double_t* [cNvars];
   Double_t** nSelB_unWeighted = new Double_t* [cNvars];
   Double_t** target = new Double_t* [cNvars];
   Double_t** target2 = new Double_t* [cNvars];
   Double_t** cutValues = new Double_t* [cNvars];

   for (UInt_t ivar=0; ivar<cNvars; ivar++) {
      nBins[ivar] = fNCuts+1;
      if (ivar < fNvars) {
         if (fDataSetInfo->GetVariableInfo(ivar).GetVarType() == 'I') {
            nBins[ivar] = node->GetSampleMax(ivar) - node->GetSampleMin(ivar) + 1; 
         }
      }
     
      nSelS[ivar] = new Double_t [nBins[ivar]];
      nSelB[ivar] = new Double_t [nBins[ivar]];
      nSelS_unWeighted[ivar] = new Double_t [nBins[ivar]];
      nSelB_unWeighted[ivar] = new Double_t [nBins[ivar]];
      target[ivar] = new Double_t [nBins[ivar]];
      target2[ivar] = new Double_t [nBins[ivar]];
      cutValues[ivar] = new Double_t [nBins[ivar]];
   
   }

   Double_t *xmin = new Double_t[cNvars]; 
   Double_t *xmax = new Double_t[cNvars];

   for (UInt_t ivar=0; ivar < cNvars; ivar++) {
      if (ivar < fNvars){
         xmin[ivar]=node->GetSampleMin(ivar);
         xmax[ivar]=node->GetSampleMax(ivar);
         if (xmax[ivar]-xmin[ivar] < std::numeric_limits<double>::epsilon() ) {
            //  std::cout << " variable " << ivar << " has no proper range in (xmax[ivar]-xmin[ivar] = " << xmax[ivar]-xmin[ivar] << std::endl;
            //  std::cout << " will set useVariable[ivar]=false"<<std::endl;
            useVariable[ivar]=kFALSE;
         }
         
      } else { // the fisher variable
         xmin[ivar]=999;
         xmax[ivar]=-999;
         // too bad, for the moment I don't know how to do this without looping
         // once to get the "min max" and then AGAIN to fill the histogram
         for (UInt_t iev=0; iev<nevents; iev++) {
            // returns the Fisher value (no fixed range)
            Double_t result = fisherCoeff[fNvars]; // the fisher constant offset
            for (UInt_t jvar=0; jvar<fNvars; jvar++)
               result += fisherCoeff[jvar]*(eventSample[iev])->GetValue(jvar);
            if (result > xmax[ivar]) xmax[ivar]=result;
            if (result < xmin[ivar]) xmin[ivar]=result;
         }
      }
      for (UInt_t ibin=0; ibin<nBins[ivar]; ibin++) {
         nSelS[ivar][ibin]=0;
         nSelB[ivar][ibin]=0;
         nSelS_unWeighted[ivar][ibin]=0;
         nSelB_unWeighted[ivar][ibin]=0;
         target[ivar][ibin]=0;
         target2[ivar][ibin]=0;
         cutValues[ivar][ibin]=0;
      }
   }

   // fill the cut values for the scan:
   for (UInt_t ivar=0; ivar < cNvars; ivar++) {

      if ( useVariable[ivar] ) {
         
         //set the grid for the cut scan on the variables like this:
         // 
         //  |       |        |         |         |   ...      |        |  
         // xmin                                                       xmax
         //
         // cut      0        1         2         3   ...     fNCuts-1 (counting from zero)
         // bin  0       1         2         3       .....      nBins-1=fNCuts (counting from zero)
         // --> nBins = fNCuts+1
         // (NOTE, the cuts at xmin or xmax would just give the whole sample and
         //  hence can be safely omitted
         
         Double_t istepSize =( xmax[ivar] - xmin[ivar] ) / Double_t(nBins[ivar]);
         if (ivar < fNvars) {
            if (fDataSetInfo->GetVariableInfo(ivar).GetVarType() == 'I') istepSize = 1;
         }

         // std::cout << "ivar="<<ivar
         //           <<" min="<<xmin[ivar]  
         //           << " max="<<xmax[ivar] 
         //           << " widht=" << istepSize 
         //           << " nBins["<<ivar<<"]="<<nBins[ivar]<<std::endl;
         for (UInt_t icut=0; icut<nBins[ivar]-1; icut++) {
            cutValues[ivar][icut]=xmin[ivar]+(Double_t(icut+1))*istepSize;
            //            std::cout << " cutValues["<<ivar<<"]["<<icut<<"]=" <<  cutValues[ivar][icut] << std::endl;
         }
      }
   }
  
   nTotS=0; nTotB=0;
   nTotS_unWeighted=0; nTotB_unWeighted=0;   
   for (UInt_t iev=0; iev<nevents; iev++) {

      Double_t eventWeight =  eventSample[iev]->GetWeight(); 
      if (eventSample[iev]->GetClass() == fSigClass) {
         nTotS+=eventWeight;
         nTotS_unWeighted++;
      }
      else {
         nTotB+=eventWeight;
         nTotB_unWeighted++;
      }
      
      Int_t iBin=-1;
      for (UInt_t ivar=0; ivar < cNvars; ivar++) {
         // now scan trough the cuts for each varable and find which one gives
         // the best separationGain at the current stage.
         if ( useVariable[ivar] ) {
            Double_t eventData;
            if (ivar < fNvars) eventData = eventSample[iev]->GetValue(ivar); 
            else { // the fisher variable
               eventData = fisherCoeff[fNvars];
               for (UInt_t jvar=0; jvar<fNvars; jvar++)
                  eventData += fisherCoeff[jvar]*(eventSample[iev])->GetValue(jvar);
               
            }
            // "maximum" is nbins-1 (the "-1" because we start counting from 0 !!
            iBin = TMath::Min(Int_t(nBins[ivar]-1),TMath::Max(0,int (nBins[ivar]*(eventData-xmin[ivar])/(xmax[ivar]-xmin[ivar]) ) ));
            if (eventSample[iev]->GetClass() == fSigClass) {
               nSelS[ivar][iBin]+=eventWeight;
               nSelS_unWeighted[ivar][iBin]++;
            } 
            else {
               nSelB[ivar][iBin]+=eventWeight;
               nSelB_unWeighted[ivar][iBin]++;
            }
            if (DoRegression()) {
               target[ivar][iBin] +=eventWeight*eventSample[iev]->GetTarget(0);
               target2[ivar][iBin]+=eventWeight*eventSample[iev]->GetTarget(0)*eventSample[iev]->GetTarget(0);
            }
         }
      }
   }   
   // now turn the "histogram" into a cumulative distribution
   for (UInt_t ivar=0; ivar < cNvars; ivar++) {
      if (useVariable[ivar]) {
         for (UInt_t ibin=1; ibin < nBins[ivar]; ibin++) {
            nSelS[ivar][ibin]+=nSelS[ivar][ibin-1];
            nSelS_unWeighted[ivar][ibin]+=nSelS_unWeighted[ivar][ibin-1];
            nSelB[ivar][ibin]+=nSelB[ivar][ibin-1];
            nSelB_unWeighted[ivar][ibin]+=nSelB_unWeighted[ivar][ibin-1];
            if (DoRegression()) {
               target[ivar][ibin] +=target[ivar][ibin-1] ;
               target2[ivar][ibin]+=target2[ivar][ibin-1];
            }
         }
         if (nSelS_unWeighted[ivar][nBins[ivar]-1] +nSelB_unWeighted[ivar][nBins[ivar]-1] != eventSample.size()) {
            Log() << kFATAL << "Helge, you have a bug ....nSelS_unw..+nSelB_unw..= "
                  << nSelS_unWeighted[ivar][nBins[ivar]-1] +nSelB_unWeighted[ivar][nBins[ivar]-1] 
                  << " while eventsample size = " << eventSample.size()
                  << Endl;
         }
         double lastBins=nSelS[ivar][nBins[ivar]-1] +nSelB[ivar][nBins[ivar]-1];
         double totalSum=nTotS+nTotB;
         if (TMath::Abs(lastBins-totalSum)/totalSum>0.01) {
            Log() << kFATAL << "Helge, you have another bug ....nSelS+nSelB= "
                  << lastBins
                  << " while total number of events = " << totalSum
                  << Endl;
         }
      }
   }
   // now select the optimal cuts for each varable and find which one gives
   // the best separationGain at the current stage
   for (UInt_t ivar=0; ivar < cNvars; ivar++) {
      if (useVariable[ivar]) {
         for (UInt_t iBin=0; iBin<nBins[ivar]-1; iBin++) { // the last bin contains "all events" -->skip
            // the separationGain is defined as the various indices (Gini, CorssEntropy, e.t.c)
            // calculated by the "SamplePurities" fom the branches that would go to the
            // left or the right from this node if "these" cuts were used in the Node:
            // hereby: nSelS and nSelB would go to the right branch
            //        (nTotS - nSelS) + (nTotB - nSelB)  would go to the left branch;

            // only allow splits where both daughter nodes match the specified miniumum number
            // for this use the "unweighted" events, as you are interested in statistically 
            // significant splits, which is determined by the actual number of entries
            // for a node, rather than the sum of event weights.

            Double_t sl = nSelS_unWeighted[ivar][iBin];
            Double_t bl = nSelB_unWeighted[ivar][iBin];
            Double_t s  = nTotS_unWeighted;
            Double_t b  = nTotB_unWeighted;
            Double_t slW = nSelS[ivar][iBin];
            Double_t blW = nSelB[ivar][iBin];
            Double_t sW  = nTotS;
            Double_t bW  = nTotB;
            Double_t sr = s-sl;
            Double_t br = b-bl;
            Double_t srW = sW-slW;
            Double_t brW = bW-blW;
            //            std::cout << "sl="<<sl << " bl="<<bl<<" fMinSize="<<fMinSize << "sr="<<sr << " br="<<br  <<std::endl;
            if ( ((sl+bl)>=fMinSize && (sr+br)>=fMinSize)
                 && ((slW+blW)>=fMinSize && (srW+brW)>=fMinSize) 
                 ) {

               if (DoRegression()) {
                  sepTmp = fRegType->GetSeparationGain(nSelS[ivar][iBin]+nSelB[ivar][iBin], 
                                                       target[ivar][iBin],target2[ivar][iBin],
                                                       nTotS+nTotB,
                                                       target[ivar][nBins[ivar]-1],target2[ivar][nBins[ivar]-1]);
               } else {
                  sepTmp = fSepType->GetSeparationGain(nSelS[ivar][iBin], nSelB[ivar][iBin], nTotS, nTotB);
               }
               if (separationGain[ivar] < sepTmp) {
                  separationGain[ivar] = sepTmp;  
                  cutIndex[ivar]       = iBin;
               }
            }
         }
      }
   }


   //now you have found the best separation cut for each variable, now compare the variables
   for (UInt_t ivar=0; ivar < cNvars; ivar++) {
      if (useVariable[ivar] ) {
         if (separationGainTotal < separationGain[ivar]) {
            separationGainTotal = separationGain[ivar];
            mxVar = ivar;
         }
      }
   }

   if (mxVar >= 0) {    
      if (DoRegression()) {
         node->SetSeparationIndex(fRegType->GetSeparationIndex(nTotS+nTotB,target[0][nBins[mxVar]-1],target2[0][nBins[mxVar]-1]));
         node->SetResponse(target[0][nBins[mxVar]-1]/(nTotS+nTotB));
         if ( (target2[0][nBins[mxVar]-1]/(nTotS+nTotB) - target[0][nBins[mxVar]-1]/(nTotS+nTotB)*target[0][nBins[mxVar]-1]/(nTotS+nTotB)) < std::numeric_limits<double>::epsilon() ) {
            node->SetRMS(0);
         }else{ 
            node->SetRMS(TMath::Sqrt(target2[0][nBins[mxVar]-1]/(nTotS+nTotB) - target[0][nBins[mxVar]-1]/(nTotS+nTotB)*target[0][nBins[mxVar]-1]/(nTotS+nTotB)));
         }
      }
      else {
         node->SetSeparationIndex(fSepType->GetSeparationIndex(nTotS,nTotB));
         if (mxVar >=0){ 
            if (nSelS[mxVar][cutIndex[mxVar]]/nTotS > nSelB[mxVar][cutIndex[mxVar]]/nTotB) cutType=kTRUE;
            else cutType=kFALSE;
         }      
      }
      node->SetSelector((UInt_t)mxVar);
      node->SetCutValue(cutValues[mxVar][cutIndex[mxVar]]);
      node->SetCutType(cutType);
      node->SetSeparationGain(separationGainTotal);
      if (mxVar < (Int_t) fNvars){ // the fisher cut is actually not used in this node, hence don't need to store fisher components
         node->SetNFisherCoeff(0);
         fVariableImportance[mxVar] += separationGainTotal*separationGainTotal * (nTotS+nTotB) * (nTotS+nTotB) ;
         //for (UInt_t ivar=0; ivar<fNvars; ivar++) fVariableImportance[ivar] += separationGain[ivar]*separationGain[ivar] * (nTotS+nTotB) * (nTotS+nTotB) ;
      }else{
         // allocate Fisher coefficients (use fNvars, and set the non-used ones to zero. Might
         // be even less storage space on average than storing also the mapping used otherwise
         // can always be changed relatively easy
         node->SetNFisherCoeff(fNvars+1);     
         for (UInt_t ivar=0; ivar<=fNvars; ivar++) {
            node->SetFisherCoeff(ivar,fisherCoeff[ivar]);
            // take 'fisher coeff. weighted estimate as variable importance, "Don't fill the offset coefficient though :) 
            if (ivar<fNvars){
               fVariableImportance[ivar] += fisherCoeff[ivar]*fisherCoeff[ivar]*separationGainTotal*separationGainTotal * (nTotS+nTotB) * (nTotS+nTotB) ;
            }
         }
      } 
   }
   else {
      separationGainTotal = 0;
   }

   // if (mxVar > -1) {
   //   std::cout << "------------------------------------------------------------------"<<std::endl;
   //   std::cout << "cutting on Var: " << mxVar << " with cutIndex " << cutIndex[mxVar] << " being: " << cutValues[mxVar][cutIndex[mxVar]] << std::endl;
   //   std::cout << " nSelS = " << nSelS_unWeighted[mxVar][cutIndex[mxVar]] << " nSelB = " << nSelB_unWeighted[mxVar][cutIndex[mxVar]] << " (right) sum:= " << nSelS_unWeighted[mxVar][cutIndex[mxVar]] + nSelB_unWeighted[mxVar][cutIndex[mxVar]] << std::endl;
   //   std::cout << " nSelS = " << nTotS_unWeighted - nSelS_unWeighted[mxVar][cutIndex[mxVar]] << " nSelB = " << nTotB_unWeighted-nSelB_unWeighted[mxVar][cutIndex[mxVar]] << " (left) sum:= " << nTotS_unWeighted + nTotB_unWeighted - nSelS_unWeighted[mxVar][cutIndex[mxVar]] - nSelB_unWeighted[mxVar][cutIndex[mxVar]] << std::endl;
   //   std::cout << " nSelS = " << nSelS[mxVar][cutIndex[mxVar]] << " nSelB = " << nSelB[mxVar][cutIndex[mxVar]] << std::endl;
   //   std::cout << " s/s+b " << nSelS_unWeighted[mxVar][cutIndex[mxVar]]/( nSelS_unWeighted[mxVar][cutIndex[mxVar]] + nSelB_unWeighted[mxVar][cutIndex[mxVar]]) 
   //             << " s/s+b " << (nTotS - nSelS_unWeighted[mxVar][cutIndex[mxVar]])/( nTotS-nSelS_unWeighted[mxVar][cutIndex[mxVar]] + nTotB-nSelB_unWeighted[mxVar][cutIndex[mxVar]]) << std::endl;
   //   std::cout << " nTotS = " << nTotS << " nTotB = " << nTotB << std::endl;
   //   std::cout << " separationGainTotal " << separationGainTotal << std::endl;
   // } else {
   //   std::cout << "------------------------------------------------------------------"<<std::endl;
   //   std::cout << " obviously didn't find new mxVar " << mxVar << std::endl;
   // }
   for (UInt_t i=0; i<cNvars; i++) {
      delete [] nSelS[i];
      delete [] nSelB[i];
      delete [] nSelS_unWeighted[i];
      delete [] nSelB_unWeighted[i];
      delete [] target[i];
      delete [] target2[i];
      delete [] cutValues[i];
   }
   delete [] nSelS;
   delete [] nSelB;
   delete [] nSelS_unWeighted;
   delete [] nSelB_unWeighted;
   delete [] target;
   delete [] target2;
   delete [] cutValues;

   delete [] xmin;
   delete [] xmax;

   delete [] useVariable;
   delete [] mapVariable;

   delete [] separationGain;
   delete [] cutIndex;

   delete [] nBins; 

   return separationGainTotal;

}



//_______________________________________________________________________
std::vector<Double_t>  TMVA::DecisionTree::GetFisherCoefficients(const EventConstList &eventSample, UInt_t nFisherVars, UInt_t *mapVarInFisher){ 
   // calculate the fisher coefficients for the event sample and the variables used

   std::vector<Double_t> fisherCoeff(fNvars+1);

   // initializaton of global matrices and vectors
   // average value of each variables for S, B, S+B
   TMatrixD* meanMatx = new TMatrixD( nFisherVars, 3 );
   
   // the covariance 'within class' and 'between class' matrices
   TMatrixD* betw = new TMatrixD( nFisherVars, nFisherVars );
   TMatrixD* with = new TMatrixD( nFisherVars, nFisherVars );
   TMatrixD* cov  = new TMatrixD( nFisherVars, nFisherVars );

   //
   // compute mean values of variables in each sample, and the overall means
   //

   // initialize internal sum-of-weights variables
   Double_t sumOfWeightsS = 0;
   Double_t sumOfWeightsB = 0;
   
   
   // init vectors
   Double_t* sumS = new Double_t[nFisherVars];
   Double_t* sumB = new Double_t[nFisherVars];
   for (UInt_t ivar=0; ivar<nFisherVars; ivar++) { sumS[ivar] = sumB[ivar] = 0; }   

   UInt_t nevents = eventSample.size();   
   // compute sample means
   for (UInt_t ievt=0; ievt<nevents; ievt++) {
      
      // read the Training Event into "event"
      const Event * ev = eventSample[ievt];
      
      // sum of weights
      Double_t weight = ev->GetWeight();
      if (ev->GetClass() == fSigClass) sumOfWeightsS += weight;
      else                             sumOfWeightsB += weight;

      Double_t* sum = ev->GetClass() == fSigClass ? sumS : sumB;
      for (UInt_t ivar=0; ivar<nFisherVars; ivar++) {
         sum[ivar] += ev->GetValue( mapVarInFisher[ivar] )*weight;
      }
   }
   for (UInt_t ivar=0; ivar<nFisherVars; ivar++) {   
      (*meanMatx)( ivar, 2 ) = sumS[ivar];
      (*meanMatx)( ivar, 0 ) = sumS[ivar]/sumOfWeightsS;
      
      (*meanMatx)( ivar, 2 ) += sumB[ivar];
      (*meanMatx)( ivar, 1 ) = sumB[ivar]/sumOfWeightsB;
      
      // signal + background
      (*meanMatx)( ivar, 2 ) /= (sumOfWeightsS + sumOfWeightsB);
   }  

   delete [] sumS;

   delete [] sumB;

   // the matrix of covariance 'within class' reflects the dispersion of the
   // events relative to the center of gravity of their own class  

   // assert required

   assert( sumOfWeightsS > 0 && sumOfWeightsB > 0 );

   // product matrices (x-<x>)(y-<y>) where x;y are variables

   const Int_t nFisherVars2 = nFisherVars*nFisherVars;
   Double_t *sum2Sig  = new Double_t[nFisherVars2];
   Double_t *sum2Bgd  = new Double_t[nFisherVars2];
   Double_t *xval    = new Double_t[nFisherVars2];
   memset(sum2Sig,0,nFisherVars2*sizeof(Double_t));
   memset(sum2Bgd,0,nFisherVars2*sizeof(Double_t));
   
   // 'within class' covariance
   for (UInt_t ievt=0; ievt<nevents; ievt++) {

      // read the Training Event into "event"
      //      const Event* ev = eventSample[ievt];
      const Event* ev = eventSample.at(ievt);

      Double_t weight = ev->GetWeight(); // may ignore events with negative weights

      for (UInt_t x=0; x<nFisherVars; x++) {
         xval[x] = ev->GetValue( mapVarInFisher[x] );
      }
      Int_t k=0;
      for (UInt_t x=0; x<nFisherVars; x++) {
         for (UInt_t y=0; y<nFisherVars; y++) {            
            if ( ev->GetClass() == fSigClass ) sum2Sig[k] += ( (xval[x] - (*meanMatx)(x, 0))*(xval[y] - (*meanMatx)(y, 0)) )*weight;
            else                               sum2Bgd[k] += ( (xval[x] - (*meanMatx)(x, 1))*(xval[y] - (*meanMatx)(y, 1)) )*weight;
            k++;
         }
      }
   }
   Int_t k=0;
   for (UInt_t x=0; x<nFisherVars; x++) {
      for (UInt_t y=0; y<nFisherVars; y++) {
         (*with)(x, y) = sum2Sig[k]/sumOfWeightsS + sum2Bgd[k]/sumOfWeightsB;
         k++;
      }
   }

   delete [] sum2Sig;
   delete [] sum2Bgd;
   delete [] xval;


   // the matrix of covariance 'between class' reflects the dispersion of the
   // events of a class relative to the global center of gravity of all the class
   // hence the separation between classes


   Double_t prodSig, prodBgd;

   for (UInt_t x=0; x<nFisherVars; x++) {
      for (UInt_t y=0; y<nFisherVars; y++) {

         prodSig = ( ((*meanMatx)(x, 0) - (*meanMatx)(x, 2))*
                     ((*meanMatx)(y, 0) - (*meanMatx)(y, 2)) );
         prodBgd = ( ((*meanMatx)(x, 1) - (*meanMatx)(x, 2))*
                     ((*meanMatx)(y, 1) - (*meanMatx)(y, 2)) );

         (*betw)(x, y) = (sumOfWeightsS*prodSig + sumOfWeightsB*prodBgd) / (sumOfWeightsS + sumOfWeightsB);
      }
   }



   // compute full covariance matrix from sum of within and between matrices
   for (UInt_t x=0; x<nFisherVars; x++) 
      for (UInt_t y=0; y<nFisherVars; y++) 
         (*cov)(x, y) = (*with)(x, y) + (*betw)(x, y);
        
   // Fisher = Sum { [coeff]*[variables] }
   //
   // let Xs be the array of the mean values of variables for signal evts
   // let Xb be the array of the mean values of variables for backgd evts
   // let InvWith be the inverse matrix of the 'within class' correlation matrix
   //
   // then the array of Fisher coefficients is 
   // [coeff] =TMath::Sqrt(fNsig*fNbgd)/fNevt*transpose{Xs-Xb}*InvWith
   TMatrixD* theMat = with; // Fishers original
   //   TMatrixD* theMat = cov; // Mahalanobis
      
   TMatrixD invCov( *theMat );
   if ( TMath::Abs(invCov.Determinant()) < 10E-24 ) {
      Log() << kWARNING << "FisherCoeff matrix is almost singular with deterninant="
            << TMath::Abs(invCov.Determinant()) 
            << " did you use the variables that are linear combinations or highly correlated?" 
            << Endl;
   }
   if ( TMath::Abs(invCov.Determinant()) < 10E-120 ) {
      Log() << kFATAL << "FisherCoeff matrix is singular with determinant="
            << TMath::Abs(invCov.Determinant())  
            << " did you use the variables that are linear combinations?" 
            << Endl;
   }

   invCov.Invert();
   
   // apply rescaling factor
   Double_t xfact = TMath::Sqrt( sumOfWeightsS*sumOfWeightsB ) / (sumOfWeightsS + sumOfWeightsB);

   // compute difference of mean values
   std::vector<Double_t> diffMeans( nFisherVars );

   for (UInt_t ivar=0; ivar<=fNvars; ivar++) fisherCoeff[ivar] = 0;
   for (UInt_t ivar=0; ivar<nFisherVars; ivar++) {
      for (UInt_t jvar=0; jvar<nFisherVars; jvar++) {
         Double_t d = (*meanMatx)(jvar, 0) - (*meanMatx)(jvar, 1);
         fisherCoeff[mapVarInFisher[ivar]] += invCov(ivar, jvar)*d;
      }    
    
      // rescale
      fisherCoeff[mapVarInFisher[ivar]] *= xfact;
   }

   // offset correction
   Double_t f0 = 0.0;
   for (UInt_t ivar=0; ivar<nFisherVars; ivar++){ 
      f0 += fisherCoeff[mapVarInFisher[ivar]]*((*meanMatx)(ivar, 0) + (*meanMatx)(ivar, 1));
   }
   f0 /= -2.0;  

   fisherCoeff[fNvars] = f0;  //as we start counting variables from "zero", I store the fisher offset at the END
   
   return fisherCoeff;
}

//_______________________________________________________________________
Double_t TMVA::DecisionTree::TrainNodeFull( const EventConstList & eventSample,
                                            TMVA::DecisionTreeNode *node )
{
  
   // train a node by finding the single optimal cut for a single variable
   // that best separates signal and background (maximizes the separation gain)
  
   Double_t nTotS = 0.0, nTotB = 0.0;
   Int_t nTotS_unWeighted = 0, nTotB_unWeighted = 0;  
  
   std::vector<TMVA::BDTEventWrapper> bdtEventSample;
  
   // List of optimal cuts, separation gains, and cut types (removed background or signal) - one for each variable
   std::vector<Double_t> lCutValue( fNvars, 0.0 );
   std::vector<Double_t> lSepGain( fNvars, -1.0e6 );
   std::vector<Char_t> lCutType( fNvars ); // <----- bool is stored (for performance reasons, no std::vector<bool>  has been taken)
   lCutType.assign( fNvars, Char_t(kFALSE) );
  
   // Initialize (un)weighted counters for signal & background
   // Construct a list of event wrappers that point to the original data
   for( std::vector<const TMVA::Event*>::const_iterator it = eventSample.begin(); it != eventSample.end(); ++it ) {
      if((*it)->GetClass() == fSigClass) { // signal or background event
         nTotS += (*it)->GetWeight();
         ++nTotS_unWeighted;
      }
      else {
         nTotB += (*it)->GetWeight();
         ++nTotB_unWeighted;
      }
      bdtEventSample.push_back(TMVA::BDTEventWrapper(*it));
   }
  
   std::vector<Char_t> useVariable(fNvars); // <----- bool is stored (for performance reasons, no std::vector<bool>  has been taken)
   useVariable.assign( fNvars, Char_t(kTRUE) );

   for (UInt_t ivar=0; ivar < fNvars; ivar++) useVariable[ivar]=Char_t(kFALSE);
   if (fRandomisedTree) { // choose for each node splitting a random subset of variables to choose from
      if (fUseNvars ==0 ) { // no number specified ... choose s.th. which hopefully works well 
         // watch out, should never happen as it is initialised automatically in MethodBDT already!!!
         fUseNvars        =  UInt_t(TMath::Sqrt(fNvars)+0.6);
      }
      Int_t nSelectedVars = 0;
      while (nSelectedVars < fUseNvars) {
         Double_t bla = fMyTrandom->Rndm()*fNvars;
         useVariable[Int_t (bla)] = Char_t(kTRUE);
         nSelectedVars = 0;
         for (UInt_t ivar=0; ivar < fNvars; ivar++) {
            if(useVariable[ivar] == Char_t(kTRUE)) nSelectedVars++;
         }
      }
   } 
   else {
      for (UInt_t ivar=0; ivar < fNvars; ivar++) useVariable[ivar] = Char_t(kTRUE);
   }
  
   for( UInt_t ivar = 0; ivar < fNvars; ivar++ ) { // loop over all discriminating variables
      if(!useVariable[ivar]) continue; // only optimze with selected variables
      TMVA::BDTEventWrapper::SetVarIndex(ivar); // select the variable to sort by
      std::sort( bdtEventSample.begin(),bdtEventSample.end() ); // sort the event data 
    
      Double_t bkgWeightCtr = 0.0, sigWeightCtr = 0.0;
      std::vector<TMVA::BDTEventWrapper>::iterator it = bdtEventSample.begin(), it_end = bdtEventSample.end();
      for( ; it != it_end; ++it ) {
         if((**it)->GetClass() == fSigClass ) // specify signal or background event
            sigWeightCtr += (**it)->GetWeight();
         else 
            bkgWeightCtr += (**it)->GetWeight(); 
         // Store the accumulated signal (background) weights
         it->SetCumulativeWeight(false,bkgWeightCtr); 
         it->SetCumulativeWeight(true,sigWeightCtr);
      }
    
      const Double_t fPMin = 1.0e-6;
      Bool_t cutType = kFALSE;
      Long64_t index = 0;
      Double_t separationGain = -1.0, sepTmp = 0.0, cutValue = 0.0, dVal = 0.0, norm = 0.0;
      // Locate the optimal cut for this (ivar-th) variable
      for( it = bdtEventSample.begin(); it != it_end; ++it ) {
         if( index == 0 ) { ++index; continue; }
         if( *(*it) == NULL ) {
            Log() << kFATAL << "In TrainNodeFull(): have a null event! Where index=" 
                  << index << ", and parent node=" << node->GetParent() << Endl;
            break;
         }
         dVal = bdtEventSample[index].GetVal() - bdtEventSample[index-1].GetVal();
         norm = TMath::Abs(bdtEventSample[index].GetVal() + bdtEventSample[index-1].GetVal());
         // Only allow splits where both daughter nodes have the specified miniumum number of events
         // Splits are only sensible when the data are ordered (eg. don't split inside a sequence of 0's)
         if( index >= fMinSize && (nTotS_unWeighted + nTotB_unWeighted) - index >= fMinSize && TMath::Abs(dVal/(0.5*norm + 1)) > fPMin ) {
            sepTmp = fSepType->GetSeparationGain( it->GetCumulativeWeight(true), it->GetCumulativeWeight(false), sigWeightCtr, bkgWeightCtr );
            if( sepTmp > separationGain ) {
               separationGain = sepTmp;
               cutValue = it->GetVal() - 0.5*dVal; 
               Double_t nSelS = it->GetCumulativeWeight(true);
               Double_t nSelB = it->GetCumulativeWeight(false);
               // Indicate whether this cut is improving the node purity by removing background (enhancing signal)
               // or by removing signal (enhancing background)
               if( nSelS/sigWeightCtr > nSelB/bkgWeightCtr ) cutType = kTRUE; 
               else cutType = kFALSE; 
            }
         }
         ++index;
      }
      lCutType[ivar] = Char_t(cutType);
      lCutValue[ivar] = cutValue;
      lSepGain[ivar] = separationGain;
   }
  
   Double_t separationGain = -1.0;
   Int_t iVarIndex = -1;
   for( UInt_t ivar = 0; ivar < fNvars; ivar++ ) {
      if( lSepGain[ivar] > separationGain ) {
         iVarIndex = ivar;
         separationGain = lSepGain[ivar];
      }
   }
  
   if(iVarIndex >= 0) {
      node->SetSelector(iVarIndex);
      node->SetCutValue(lCutValue[iVarIndex]);
      node->SetSeparationGain(lSepGain[iVarIndex]);
      node->SetCutType(lCutType[iVarIndex]);
      fVariableImportance[iVarIndex] += separationGain*separationGain * (nTotS+nTotB) * (nTotS+nTotB);
   }
   else {
      separationGain = 0.0;
   }
  
   return separationGain;
}

//___________________________________________________________________________________
TMVA::DecisionTreeNode* TMVA::DecisionTree::GetEventNode(const TMVA::Event & e) const
{
   // get the pointer to the leaf node where a particular event ends up in...
   // (used in gradient boosting)

   TMVA::DecisionTreeNode *current = (TMVA::DecisionTreeNode*)this->GetRoot();
   while(current->GetNodeType() == 0) { // intermediate node in a tree
      current = (current->GoesRight(e)) ?
         (TMVA::DecisionTreeNode*)current->GetRight() :
         (TMVA::DecisionTreeNode*)current->GetLeft();
   }
   return current;
}

//_______________________________________________________________________
Double_t TMVA::DecisionTree::CheckEvent( const TMVA::Event * e, Bool_t UseYesNoLeaf ) const
{
   // the event e is put into the decision tree (starting at the root node)
   // and the output is NodeType (signal) or (background) of the final node (basket)
   // in which the given events ends up. I.e. the result of the classification if
   // the event for this decision tree.
  
   TMVA::DecisionTreeNode *current = this->GetRoot();
   if (!current){
      Log() << kFATAL << "CheckEvent: started with undefined ROOT node" <<Endl;
      return 0; //keeps covarity happy that doesn't know that kFATAL causes an exit
   }

   while (current->GetNodeType() == 0) { // intermediate node in a (pruned) tree
      current = (current->GoesRight(*e)) ? 
         current->GetRight() :
         current->GetLeft();
      if (!current) {
         Log() << kFATAL << "DT::CheckEvent: inconsistent tree structure" <<Endl;
      }

   }
  
   if ( DoRegression() ){
      return current->GetResponse();
   } 
   else {
      if (UseYesNoLeaf) return Double_t ( current->GetNodeType() );
      else              return current->GetPurity();
   }
}

//_______________________________________________________________________
Double_t  TMVA::DecisionTree::SamplePurity( std::vector<TMVA::Event*> eventSample )
{
   // calculates the purity S/(S+B) of a given event sample
  
   Double_t sumsig=0, sumbkg=0, sumtot=0;
   for (UInt_t ievt=0; ievt<eventSample.size(); ievt++) {
      if (eventSample[ievt]->GetClass() != fSigClass) sumbkg+=eventSample[ievt]->GetWeight();
      else sumsig+=eventSample[ievt]->GetWeight();
      sumtot+=eventSample[ievt]->GetWeight();
   }
   // sanity check
   if (sumtot!= (sumsig+sumbkg)){
      Log() << kFATAL << "<SamplePurity> sumtot != sumsig+sumbkg"
            << sumtot << " " << sumsig << " " << sumbkg << Endl;
   }
   if (sumtot>0) return sumsig/(sumsig + sumbkg);
   else return -1;
}

//_______________________________________________________________________
vector< Double_t >  TMVA::DecisionTree::GetVariableImportance()
{
   // Return the relative variable importance, normalized to all
   // variables together having the importance 1. The importance in
   // evaluated as the total separation-gain that this variable had in
   // the decision trees (weighted by the number of events)
  
   std::vector<Double_t> relativeImportance(fNvars);
   Double_t  sum=0;
   for (UInt_t i=0; i< fNvars; i++) {
      sum += fVariableImportance[i];
      relativeImportance[i] = fVariableImportance[i];
   } 
  
   for (UInt_t i=0; i< fNvars; i++) {
      if (sum > std::numeric_limits<double>::epsilon())
         relativeImportance[i] /= sum;
      else 
         relativeImportance[i] = 0;
   } 
   return relativeImportance;
}

//_______________________________________________________________________
Double_t  TMVA::DecisionTree::GetVariableImportance( UInt_t ivar )
{
   // returns the relative improtance of variable ivar
  
   std::vector<Double_t> relativeImportance = this->GetVariableImportance();
   if (ivar < fNvars) return relativeImportance[ivar];
   else {
      Log() << kFATAL << "<GetVariableImportance>" << Endl
            << "---                     ivar = " << ivar << " is out of range " << Endl;
   }
  
   return -1;
}

 DecisionTree.cxx:1
 DecisionTree.cxx:2
 DecisionTree.cxx:3
 DecisionTree.cxx:4
 DecisionTree.cxx:5
 DecisionTree.cxx:6
 DecisionTree.cxx:7
 DecisionTree.cxx:8
 DecisionTree.cxx:9
 DecisionTree.cxx:10
 DecisionTree.cxx:11
 DecisionTree.cxx:12
 DecisionTree.cxx:13
 DecisionTree.cxx:14
 DecisionTree.cxx:15
 DecisionTree.cxx:16
 DecisionTree.cxx:17
 DecisionTree.cxx:18
 DecisionTree.cxx:19
 DecisionTree.cxx:20
 DecisionTree.cxx:21
 DecisionTree.cxx:22
 DecisionTree.cxx:23
 DecisionTree.cxx:24
 DecisionTree.cxx:25
 DecisionTree.cxx:26
 DecisionTree.cxx:27
 DecisionTree.cxx:28
 DecisionTree.cxx:29
 DecisionTree.cxx:30
 DecisionTree.cxx:31
 DecisionTree.cxx:32
 DecisionTree.cxx:33
 DecisionTree.cxx:34
 DecisionTree.cxx:35
 DecisionTree.cxx:36
 DecisionTree.cxx:37
 DecisionTree.cxx:38
 DecisionTree.cxx:39
 DecisionTree.cxx:40
 DecisionTree.cxx:41
 DecisionTree.cxx:42
 DecisionTree.cxx:43
 DecisionTree.cxx:44
 DecisionTree.cxx:45
 DecisionTree.cxx:46
 DecisionTree.cxx:47
 DecisionTree.cxx:48
 DecisionTree.cxx:49
 DecisionTree.cxx:50
 DecisionTree.cxx:51
 DecisionTree.cxx:52
 DecisionTree.cxx:53
 DecisionTree.cxx:54
 DecisionTree.cxx:55
 DecisionTree.cxx:56
 DecisionTree.cxx:57
 DecisionTree.cxx:58
 DecisionTree.cxx:59
 DecisionTree.cxx:60
 DecisionTree.cxx:61
 DecisionTree.cxx:62
 DecisionTree.cxx:63
 DecisionTree.cxx:64
 DecisionTree.cxx:65
 DecisionTree.cxx:66
 DecisionTree.cxx:67
 DecisionTree.cxx:68
 DecisionTree.cxx:69
 DecisionTree.cxx:70
 DecisionTree.cxx:71
 DecisionTree.cxx:72
 DecisionTree.cxx:73
 DecisionTree.cxx:74
 DecisionTree.cxx:75
 DecisionTree.cxx:76
 DecisionTree.cxx:77
 DecisionTree.cxx:78
 DecisionTree.cxx:79
 DecisionTree.cxx:80
 DecisionTree.cxx:81
 DecisionTree.cxx:82
 DecisionTree.cxx:83
 DecisionTree.cxx:84
 DecisionTree.cxx:85
 DecisionTree.cxx:86
 DecisionTree.cxx:87
 DecisionTree.cxx:88
 DecisionTree.cxx:89
 DecisionTree.cxx:90
 DecisionTree.cxx:91
 DecisionTree.cxx:92
 DecisionTree.cxx:93
 DecisionTree.cxx:94
 DecisionTree.cxx:95
 DecisionTree.cxx:96
 DecisionTree.cxx:97
 DecisionTree.cxx:98
 DecisionTree.cxx:99
 DecisionTree.cxx:100
 DecisionTree.cxx:101
 DecisionTree.cxx:102
 DecisionTree.cxx:103
 DecisionTree.cxx:104
 DecisionTree.cxx:105
 DecisionTree.cxx:106
 DecisionTree.cxx:107
 DecisionTree.cxx:108
 DecisionTree.cxx:109
 DecisionTree.cxx:110
 DecisionTree.cxx:111
 DecisionTree.cxx:112
 DecisionTree.cxx:113
 DecisionTree.cxx:114
 DecisionTree.cxx:115
 DecisionTree.cxx:116
 DecisionTree.cxx:117
 DecisionTree.cxx:118
 DecisionTree.cxx:119
 DecisionTree.cxx:120
 DecisionTree.cxx:121
 DecisionTree.cxx:122
 DecisionTree.cxx:123
 DecisionTree.cxx:124
 DecisionTree.cxx:125
 DecisionTree.cxx:126
 DecisionTree.cxx:127
 DecisionTree.cxx:128
 DecisionTree.cxx:129
 DecisionTree.cxx:130
 DecisionTree.cxx:131
 DecisionTree.cxx:132
 DecisionTree.cxx:133
 DecisionTree.cxx:134
 DecisionTree.cxx:135
 DecisionTree.cxx:136
 DecisionTree.cxx:137
 DecisionTree.cxx:138
 DecisionTree.cxx:139
 DecisionTree.cxx:140
 DecisionTree.cxx:141
 DecisionTree.cxx:142
 DecisionTree.cxx:143
 DecisionTree.cxx:144
 DecisionTree.cxx:145
 DecisionTree.cxx:146
 DecisionTree.cxx:147
 DecisionTree.cxx:148
 DecisionTree.cxx:149
 DecisionTree.cxx:150
 DecisionTree.cxx:151
 DecisionTree.cxx:152
 DecisionTree.cxx:153
 DecisionTree.cxx:154
 DecisionTree.cxx:155
 DecisionTree.cxx:156
 DecisionTree.cxx:157
 DecisionTree.cxx:158
 DecisionTree.cxx:159
 DecisionTree.cxx:160
 DecisionTree.cxx:161
 DecisionTree.cxx:162
 DecisionTree.cxx:163
 DecisionTree.cxx:164
 DecisionTree.cxx:165
 DecisionTree.cxx:166
 DecisionTree.cxx:167
 DecisionTree.cxx:168
 DecisionTree.cxx:169
 DecisionTree.cxx:170
 DecisionTree.cxx:171
 DecisionTree.cxx:172
 DecisionTree.cxx:173
 DecisionTree.cxx:174
 DecisionTree.cxx:175
 DecisionTree.cxx:176
 DecisionTree.cxx:177
 DecisionTree.cxx:178
 DecisionTree.cxx:179
 DecisionTree.cxx:180
 DecisionTree.cxx:181
 DecisionTree.cxx:182
 DecisionTree.cxx:183
 DecisionTree.cxx:184
 DecisionTree.cxx:185
 DecisionTree.cxx:186
 DecisionTree.cxx:187
 DecisionTree.cxx:188
 DecisionTree.cxx:189
 DecisionTree.cxx:190
 DecisionTree.cxx:191
 DecisionTree.cxx:192
 DecisionTree.cxx:193
 DecisionTree.cxx:194
 DecisionTree.cxx:195
 DecisionTree.cxx:196
 DecisionTree.cxx:197
 DecisionTree.cxx:198
 DecisionTree.cxx:199
 DecisionTree.cxx:200
 DecisionTree.cxx:201
 DecisionTree.cxx:202
 DecisionTree.cxx:203
 DecisionTree.cxx:204
 DecisionTree.cxx:205
 DecisionTree.cxx:206
 DecisionTree.cxx:207
 DecisionTree.cxx:208
 DecisionTree.cxx:209
 DecisionTree.cxx:210
 DecisionTree.cxx:211
 DecisionTree.cxx:212
 DecisionTree.cxx:213
 DecisionTree.cxx:214
 DecisionTree.cxx:215
 DecisionTree.cxx:216
 DecisionTree.cxx:217
 DecisionTree.cxx:218
 DecisionTree.cxx:219
 DecisionTree.cxx:220
 DecisionTree.cxx:221
 DecisionTree.cxx:222
 DecisionTree.cxx:223
 DecisionTree.cxx:224
 DecisionTree.cxx:225
 DecisionTree.cxx:226
 DecisionTree.cxx:227
 DecisionTree.cxx:228
 DecisionTree.cxx:229
 DecisionTree.cxx:230
 DecisionTree.cxx:231
 DecisionTree.cxx:232
 DecisionTree.cxx:233
 DecisionTree.cxx:234
 DecisionTree.cxx:235
 DecisionTree.cxx:236
 DecisionTree.cxx:237
 DecisionTree.cxx:238
 DecisionTree.cxx:239
 DecisionTree.cxx:240
 DecisionTree.cxx:241
 DecisionTree.cxx:242
 DecisionTree.cxx:243
 DecisionTree.cxx:244
 DecisionTree.cxx:245
 DecisionTree.cxx:246
 DecisionTree.cxx:247
 DecisionTree.cxx:248
 DecisionTree.cxx:249
 DecisionTree.cxx:250
 DecisionTree.cxx:251
 DecisionTree.cxx:252
 DecisionTree.cxx:253
 DecisionTree.cxx:254
 DecisionTree.cxx:255
 DecisionTree.cxx:256
 DecisionTree.cxx:257
 DecisionTree.cxx:258
 DecisionTree.cxx:259
 DecisionTree.cxx:260
 DecisionTree.cxx:261
 DecisionTree.cxx:262
 DecisionTree.cxx:263
 DecisionTree.cxx:264
 DecisionTree.cxx:265
 DecisionTree.cxx:266
 DecisionTree.cxx:267
 DecisionTree.cxx:268
 DecisionTree.cxx:269
 DecisionTree.cxx:270
 DecisionTree.cxx:271
 DecisionTree.cxx:272
 DecisionTree.cxx:273
 DecisionTree.cxx:274
 DecisionTree.cxx:275
 DecisionTree.cxx:276
 DecisionTree.cxx:277
 DecisionTree.cxx:278
 DecisionTree.cxx:279
 DecisionTree.cxx:280
 DecisionTree.cxx:281
 DecisionTree.cxx:282
 DecisionTree.cxx:283
 DecisionTree.cxx:284
 DecisionTree.cxx:285
 DecisionTree.cxx:286
 DecisionTree.cxx:287
 DecisionTree.cxx:288
 DecisionTree.cxx:289
 DecisionTree.cxx:290
 DecisionTree.cxx:291
 DecisionTree.cxx:292
 DecisionTree.cxx:293
 DecisionTree.cxx:294
 DecisionTree.cxx:295
 DecisionTree.cxx:296
 DecisionTree.cxx:297
 DecisionTree.cxx:298
 DecisionTree.cxx:299
 DecisionTree.cxx:300
 DecisionTree.cxx:301
 DecisionTree.cxx:302
 DecisionTree.cxx:303
 DecisionTree.cxx:304
 DecisionTree.cxx:305
 DecisionTree.cxx:306
 DecisionTree.cxx:307
 DecisionTree.cxx:308
 DecisionTree.cxx:309
 DecisionTree.cxx:310
 DecisionTree.cxx:311
 DecisionTree.cxx:312
 DecisionTree.cxx:313
 DecisionTree.cxx:314
 DecisionTree.cxx:315
 DecisionTree.cxx:316
 DecisionTree.cxx:317
 DecisionTree.cxx:318
 DecisionTree.cxx:319
 DecisionTree.cxx:320
 DecisionTree.cxx:321
 DecisionTree.cxx:322
 DecisionTree.cxx:323
 DecisionTree.cxx:324
 DecisionTree.cxx:325
 DecisionTree.cxx:326
 DecisionTree.cxx:327
 DecisionTree.cxx:328
 DecisionTree.cxx:329
 DecisionTree.cxx:330
 DecisionTree.cxx:331
 DecisionTree.cxx:332
 DecisionTree.cxx:333
 DecisionTree.cxx:334
 DecisionTree.cxx:335
 DecisionTree.cxx:336
 DecisionTree.cxx:337
 DecisionTree.cxx:338
 DecisionTree.cxx:339
 DecisionTree.cxx:340
 DecisionTree.cxx:341
 DecisionTree.cxx:342
 DecisionTree.cxx:343
 DecisionTree.cxx:344
 DecisionTree.cxx:345
 DecisionTree.cxx:346
 DecisionTree.cxx:347
 DecisionTree.cxx:348
 DecisionTree.cxx:349
 DecisionTree.cxx:350
 DecisionTree.cxx:351
 DecisionTree.cxx:352
 DecisionTree.cxx:353
 DecisionTree.cxx:354
 DecisionTree.cxx:355
 DecisionTree.cxx:356
 DecisionTree.cxx:357
 DecisionTree.cxx:358
 DecisionTree.cxx:359
 DecisionTree.cxx:360
 DecisionTree.cxx:361
 DecisionTree.cxx:362
 DecisionTree.cxx:363
 DecisionTree.cxx:364
 DecisionTree.cxx:365
 DecisionTree.cxx:366
 DecisionTree.cxx:367
 DecisionTree.cxx:368
 DecisionTree.cxx:369
 DecisionTree.cxx:370
 DecisionTree.cxx:371
 DecisionTree.cxx:372
 DecisionTree.cxx:373
 DecisionTree.cxx:374
 DecisionTree.cxx:375
 DecisionTree.cxx:376
 DecisionTree.cxx:377
 DecisionTree.cxx:378
 DecisionTree.cxx:379
 DecisionTree.cxx:380
 DecisionTree.cxx:381
 DecisionTree.cxx:382
 DecisionTree.cxx:383
 DecisionTree.cxx:384
 DecisionTree.cxx:385
 DecisionTree.cxx:386
 DecisionTree.cxx:387
 DecisionTree.cxx:388
 DecisionTree.cxx:389
 DecisionTree.cxx:390
 DecisionTree.cxx:391
 DecisionTree.cxx:392
 DecisionTree.cxx:393
 DecisionTree.cxx:394
 DecisionTree.cxx:395
 DecisionTree.cxx:396
 DecisionTree.cxx:397
 DecisionTree.cxx:398
 DecisionTree.cxx:399
 DecisionTree.cxx:400
 DecisionTree.cxx:401
 DecisionTree.cxx:402
 DecisionTree.cxx:403
 DecisionTree.cxx:404
 DecisionTree.cxx:405
 DecisionTree.cxx:406
 DecisionTree.cxx:407
 DecisionTree.cxx:408
 DecisionTree.cxx:409
 DecisionTree.cxx:410
 DecisionTree.cxx:411
 DecisionTree.cxx:412
 DecisionTree.cxx:413
 DecisionTree.cxx:414
 DecisionTree.cxx:415
 DecisionTree.cxx:416
 DecisionTree.cxx:417
 DecisionTree.cxx:418
 DecisionTree.cxx:419
 DecisionTree.cxx:420
 DecisionTree.cxx:421
 DecisionTree.cxx:422
 DecisionTree.cxx:423
 DecisionTree.cxx:424
 DecisionTree.cxx:425
 DecisionTree.cxx:426
 DecisionTree.cxx:427
 DecisionTree.cxx:428
 DecisionTree.cxx:429
 DecisionTree.cxx:430
 DecisionTree.cxx:431
 DecisionTree.cxx:432
 DecisionTree.cxx:433
 DecisionTree.cxx:434
 DecisionTree.cxx:435
 DecisionTree.cxx:436
 DecisionTree.cxx:437
 DecisionTree.cxx:438
 DecisionTree.cxx:439
 DecisionTree.cxx:440
 DecisionTree.cxx:441
 DecisionTree.cxx:442
 DecisionTree.cxx:443
 DecisionTree.cxx:444
 DecisionTree.cxx:445
 DecisionTree.cxx:446
 DecisionTree.cxx:447
 DecisionTree.cxx:448
 DecisionTree.cxx:449
 DecisionTree.cxx:450
 DecisionTree.cxx:451
 DecisionTree.cxx:452
 DecisionTree.cxx:453
 DecisionTree.cxx:454
 DecisionTree.cxx:455
 DecisionTree.cxx:456
 DecisionTree.cxx:457
 DecisionTree.cxx:458
 DecisionTree.cxx:459
 DecisionTree.cxx:460
 DecisionTree.cxx:461
 DecisionTree.cxx:462
 DecisionTree.cxx:463
 DecisionTree.cxx:464
 DecisionTree.cxx:465
 DecisionTree.cxx:466
 DecisionTree.cxx:467
 DecisionTree.cxx:468
 DecisionTree.cxx:469
 DecisionTree.cxx:470
 DecisionTree.cxx:471
 DecisionTree.cxx:472
 DecisionTree.cxx:473
 DecisionTree.cxx:474
 DecisionTree.cxx:475
 DecisionTree.cxx:476
 DecisionTree.cxx:477
 DecisionTree.cxx:478
 DecisionTree.cxx:479
 DecisionTree.cxx:480
 DecisionTree.cxx:481
 DecisionTree.cxx:482
 DecisionTree.cxx:483
 DecisionTree.cxx:484
 DecisionTree.cxx:485
 DecisionTree.cxx:486
 DecisionTree.cxx:487
 DecisionTree.cxx:488
 DecisionTree.cxx:489
 DecisionTree.cxx:490
 DecisionTree.cxx:491
 DecisionTree.cxx:492
 DecisionTree.cxx:493
 DecisionTree.cxx:494
 DecisionTree.cxx:495
 DecisionTree.cxx:496
 DecisionTree.cxx:497
 DecisionTree.cxx:498
 DecisionTree.cxx:499
 DecisionTree.cxx:500
 DecisionTree.cxx:501
 DecisionTree.cxx:502
 DecisionTree.cxx:503
 DecisionTree.cxx:504
 DecisionTree.cxx:505
 DecisionTree.cxx:506
 DecisionTree.cxx:507
 DecisionTree.cxx:508
 DecisionTree.cxx:509
 DecisionTree.cxx:510
 DecisionTree.cxx:511
 DecisionTree.cxx:512
 DecisionTree.cxx:513
 DecisionTree.cxx:514
 DecisionTree.cxx:515
 DecisionTree.cxx:516
 DecisionTree.cxx:517
 DecisionTree.cxx:518
 DecisionTree.cxx:519
 DecisionTree.cxx:520
 DecisionTree.cxx:521
 DecisionTree.cxx:522
 DecisionTree.cxx:523
 DecisionTree.cxx:524
 DecisionTree.cxx:525
 DecisionTree.cxx:526
 DecisionTree.cxx:527
 DecisionTree.cxx:528
 DecisionTree.cxx:529
 DecisionTree.cxx:530
 DecisionTree.cxx:531
 DecisionTree.cxx:532
 DecisionTree.cxx:533
 DecisionTree.cxx:534
 DecisionTree.cxx:535
 DecisionTree.cxx:536
 DecisionTree.cxx:537
 DecisionTree.cxx:538
 DecisionTree.cxx:539
 DecisionTree.cxx:540
 DecisionTree.cxx:541
 DecisionTree.cxx:542
 DecisionTree.cxx:543
 DecisionTree.cxx:544
 DecisionTree.cxx:545
 DecisionTree.cxx:546
 DecisionTree.cxx:547
 DecisionTree.cxx:548
 DecisionTree.cxx:549
 DecisionTree.cxx:550
 DecisionTree.cxx:551
 DecisionTree.cxx:552
 DecisionTree.cxx:553
 DecisionTree.cxx:554
 DecisionTree.cxx:555
 DecisionTree.cxx:556
 DecisionTree.cxx:557
 DecisionTree.cxx:558
 DecisionTree.cxx:559
 DecisionTree.cxx:560
 DecisionTree.cxx:561
 DecisionTree.cxx:562
 DecisionTree.cxx:563
 DecisionTree.cxx:564
 DecisionTree.cxx:565
 DecisionTree.cxx:566
 DecisionTree.cxx:567
 DecisionTree.cxx:568
 DecisionTree.cxx:569
 DecisionTree.cxx:570
 DecisionTree.cxx:571
 DecisionTree.cxx:572
 DecisionTree.cxx:573
 DecisionTree.cxx:574
 DecisionTree.cxx:575
 DecisionTree.cxx:576
 DecisionTree.cxx:577
 DecisionTree.cxx:578
 DecisionTree.cxx:579
 DecisionTree.cxx:580
 DecisionTree.cxx:581
 DecisionTree.cxx:582
 DecisionTree.cxx:583
 DecisionTree.cxx:584
 DecisionTree.cxx:585
 DecisionTree.cxx:586
 DecisionTree.cxx:587
 DecisionTree.cxx:588
 DecisionTree.cxx:589
 DecisionTree.cxx:590
 DecisionTree.cxx:591
 DecisionTree.cxx:592
 DecisionTree.cxx:593
 DecisionTree.cxx:594
 DecisionTree.cxx:595
 DecisionTree.cxx:596
 DecisionTree.cxx:597
 DecisionTree.cxx:598
 DecisionTree.cxx:599
 DecisionTree.cxx:600
 DecisionTree.cxx:601
 DecisionTree.cxx:602
 DecisionTree.cxx:603
 DecisionTree.cxx:604
 DecisionTree.cxx:605
 DecisionTree.cxx:606
 DecisionTree.cxx:607
 DecisionTree.cxx:608
 DecisionTree.cxx:609
 DecisionTree.cxx:610
 DecisionTree.cxx:611
 DecisionTree.cxx:612
 DecisionTree.cxx:613
 DecisionTree.cxx:614
 DecisionTree.cxx:615
 DecisionTree.cxx:616
 DecisionTree.cxx:617
 DecisionTree.cxx:618
 DecisionTree.cxx:619
 DecisionTree.cxx:620
 DecisionTree.cxx:621
 DecisionTree.cxx:622
 DecisionTree.cxx:623
 DecisionTree.cxx:624
 DecisionTree.cxx:625
 DecisionTree.cxx:626
 DecisionTree.cxx:627
 DecisionTree.cxx:628
 DecisionTree.cxx:629
 DecisionTree.cxx:630
 DecisionTree.cxx:631
 DecisionTree.cxx:632
 DecisionTree.cxx:633
 DecisionTree.cxx:634
 DecisionTree.cxx:635
 DecisionTree.cxx:636
 DecisionTree.cxx:637
 DecisionTree.cxx:638
 DecisionTree.cxx:639
 DecisionTree.cxx:640
 DecisionTree.cxx:641
 DecisionTree.cxx:642
 DecisionTree.cxx:643
 DecisionTree.cxx:644
 DecisionTree.cxx:645
 DecisionTree.cxx:646
 DecisionTree.cxx:647
 DecisionTree.cxx:648
 DecisionTree.cxx:649
 DecisionTree.cxx:650
 DecisionTree.cxx:651
 DecisionTree.cxx:652
 DecisionTree.cxx:653
 DecisionTree.cxx:654
 DecisionTree.cxx:655
 DecisionTree.cxx:656
 DecisionTree.cxx:657
 DecisionTree.cxx:658
 DecisionTree.cxx:659
 DecisionTree.cxx:660
 DecisionTree.cxx:661
 DecisionTree.cxx:662
 DecisionTree.cxx:663
 DecisionTree.cxx:664
 DecisionTree.cxx:665
 DecisionTree.cxx:666
 DecisionTree.cxx:667
 DecisionTree.cxx:668
 DecisionTree.cxx:669
 DecisionTree.cxx:670
 DecisionTree.cxx:671
 DecisionTree.cxx:672
 DecisionTree.cxx:673
 DecisionTree.cxx:674
 DecisionTree.cxx:675
 DecisionTree.cxx:676
 DecisionTree.cxx:677
 DecisionTree.cxx:678
 DecisionTree.cxx:679
 DecisionTree.cxx:680
 DecisionTree.cxx:681
 DecisionTree.cxx:682
 DecisionTree.cxx:683
 DecisionTree.cxx:684
 DecisionTree.cxx:685
 DecisionTree.cxx:686
 DecisionTree.cxx:687
 DecisionTree.cxx:688
 DecisionTree.cxx:689
 DecisionTree.cxx:690
 DecisionTree.cxx:691
 DecisionTree.cxx:692
 DecisionTree.cxx:693
 DecisionTree.cxx:694
 DecisionTree.cxx:695
 DecisionTree.cxx:696
 DecisionTree.cxx:697
 DecisionTree.cxx:698
 DecisionTree.cxx:699
 DecisionTree.cxx:700
 DecisionTree.cxx:701
 DecisionTree.cxx:702
 DecisionTree.cxx:703
 DecisionTree.cxx:704
 DecisionTree.cxx:705
 DecisionTree.cxx:706
 DecisionTree.cxx:707
 DecisionTree.cxx:708
 DecisionTree.cxx:709
 DecisionTree.cxx:710
 DecisionTree.cxx:711
 DecisionTree.cxx:712
 DecisionTree.cxx:713
 DecisionTree.cxx:714
 DecisionTree.cxx:715
 DecisionTree.cxx:716
 DecisionTree.cxx:717
 DecisionTree.cxx:718
 DecisionTree.cxx:719
 DecisionTree.cxx:720
 DecisionTree.cxx:721
 DecisionTree.cxx:722
 DecisionTree.cxx:723
 DecisionTree.cxx:724
 DecisionTree.cxx:725
 DecisionTree.cxx:726
 DecisionTree.cxx:727
 DecisionTree.cxx:728
 DecisionTree.cxx:729
 DecisionTree.cxx:730
 DecisionTree.cxx:731
 DecisionTree.cxx:732
 DecisionTree.cxx:733
 DecisionTree.cxx:734
 DecisionTree.cxx:735
 DecisionTree.cxx:736
 DecisionTree.cxx:737
 DecisionTree.cxx:738
 DecisionTree.cxx:739
 DecisionTree.cxx:740
 DecisionTree.cxx:741
 DecisionTree.cxx:742
 DecisionTree.cxx:743
 DecisionTree.cxx:744
 DecisionTree.cxx:745
 DecisionTree.cxx:746
 DecisionTree.cxx:747
 DecisionTree.cxx:748
 DecisionTree.cxx:749
 DecisionTree.cxx:750
 DecisionTree.cxx:751
 DecisionTree.cxx:752
 DecisionTree.cxx:753
 DecisionTree.cxx:754
 DecisionTree.cxx:755
 DecisionTree.cxx:756
 DecisionTree.cxx:757
 DecisionTree.cxx:758
 DecisionTree.cxx:759
 DecisionTree.cxx:760
 DecisionTree.cxx:761
 DecisionTree.cxx:762
 DecisionTree.cxx:763
 DecisionTree.cxx:764
 DecisionTree.cxx:765
 DecisionTree.cxx:766
 DecisionTree.cxx:767
 DecisionTree.cxx:768
 DecisionTree.cxx:769
 DecisionTree.cxx:770
 DecisionTree.cxx:771
 DecisionTree.cxx:772
 DecisionTree.cxx:773
 DecisionTree.cxx:774
 DecisionTree.cxx:775
 DecisionTree.cxx:776
 DecisionTree.cxx:777
 DecisionTree.cxx:778
 DecisionTree.cxx:779
 DecisionTree.cxx:780
 DecisionTree.cxx:781
 DecisionTree.cxx:782
 DecisionTree.cxx:783
 DecisionTree.cxx:784
 DecisionTree.cxx:785
 DecisionTree.cxx:786
 DecisionTree.cxx:787
 DecisionTree.cxx:788
 DecisionTree.cxx:789
 DecisionTree.cxx:790
 DecisionTree.cxx:791
 DecisionTree.cxx:792
 DecisionTree.cxx:793
 DecisionTree.cxx:794
 DecisionTree.cxx:795
 DecisionTree.cxx:796
 DecisionTree.cxx:797
 DecisionTree.cxx:798
 DecisionTree.cxx:799
 DecisionTree.cxx:800
 DecisionTree.cxx:801
 DecisionTree.cxx:802
 DecisionTree.cxx:803
 DecisionTree.cxx:804
 DecisionTree.cxx:805
 DecisionTree.cxx:806
 DecisionTree.cxx:807
 DecisionTree.cxx:808
 DecisionTree.cxx:809
 DecisionTree.cxx:810
 DecisionTree.cxx:811
 DecisionTree.cxx:812
 DecisionTree.cxx:813
 DecisionTree.cxx:814
 DecisionTree.cxx:815
 DecisionTree.cxx:816
 DecisionTree.cxx:817
 DecisionTree.cxx:818
 DecisionTree.cxx:819
 DecisionTree.cxx:820
 DecisionTree.cxx:821
 DecisionTree.cxx:822
 DecisionTree.cxx:823
 DecisionTree.cxx:824
 DecisionTree.cxx:825
 DecisionTree.cxx:826
 DecisionTree.cxx:827
 DecisionTree.cxx:828
 DecisionTree.cxx:829
 DecisionTree.cxx:830
 DecisionTree.cxx:831
 DecisionTree.cxx:832
 DecisionTree.cxx:833
 DecisionTree.cxx:834
 DecisionTree.cxx:835
 DecisionTree.cxx:836
 DecisionTree.cxx:837
 DecisionTree.cxx:838
 DecisionTree.cxx:839
 DecisionTree.cxx:840
 DecisionTree.cxx:841
 DecisionTree.cxx:842
 DecisionTree.cxx:843
 DecisionTree.cxx:844
 DecisionTree.cxx:845
 DecisionTree.cxx:846
 DecisionTree.cxx:847
 DecisionTree.cxx:848
 DecisionTree.cxx:849
 DecisionTree.cxx:850
 DecisionTree.cxx:851
 DecisionTree.cxx:852
 DecisionTree.cxx:853
 DecisionTree.cxx:854
 DecisionTree.cxx:855
 DecisionTree.cxx:856
 DecisionTree.cxx:857
 DecisionTree.cxx:858
 DecisionTree.cxx:859
 DecisionTree.cxx:860
 DecisionTree.cxx:861
 DecisionTree.cxx:862
 DecisionTree.cxx:863
 DecisionTree.cxx:864
 DecisionTree.cxx:865
 DecisionTree.cxx:866
 DecisionTree.cxx:867
 DecisionTree.cxx:868
 DecisionTree.cxx:869
 DecisionTree.cxx:870
 DecisionTree.cxx:871
 DecisionTree.cxx:872
 DecisionTree.cxx:873
 DecisionTree.cxx:874
 DecisionTree.cxx:875
 DecisionTree.cxx:876
 DecisionTree.cxx:877
 DecisionTree.cxx:878
 DecisionTree.cxx:879
 DecisionTree.cxx:880
 DecisionTree.cxx:881
 DecisionTree.cxx:882
 DecisionTree.cxx:883
 DecisionTree.cxx:884
 DecisionTree.cxx:885
 DecisionTree.cxx:886
 DecisionTree.cxx:887
 DecisionTree.cxx:888
 DecisionTree.cxx:889
 DecisionTree.cxx:890
 DecisionTree.cxx:891
 DecisionTree.cxx:892
 DecisionTree.cxx:893
 DecisionTree.cxx:894
 DecisionTree.cxx:895
 DecisionTree.cxx:896
 DecisionTree.cxx:897
 DecisionTree.cxx:898
 DecisionTree.cxx:899
 DecisionTree.cxx:900
 DecisionTree.cxx:901
 DecisionTree.cxx:902
 DecisionTree.cxx:903
 DecisionTree.cxx:904
 DecisionTree.cxx:905
 DecisionTree.cxx:906
 DecisionTree.cxx:907
 DecisionTree.cxx:908
 DecisionTree.cxx:909
 DecisionTree.cxx:910
 DecisionTree.cxx:911
 DecisionTree.cxx:912
 DecisionTree.cxx:913
 DecisionTree.cxx:914
 DecisionTree.cxx:915
 DecisionTree.cxx:916
 DecisionTree.cxx:917
 DecisionTree.cxx:918
 DecisionTree.cxx:919
 DecisionTree.cxx:920
 DecisionTree.cxx:921
 DecisionTree.cxx:922
 DecisionTree.cxx:923
 DecisionTree.cxx:924
 DecisionTree.cxx:925
 DecisionTree.cxx:926
 DecisionTree.cxx:927
 DecisionTree.cxx:928
 DecisionTree.cxx:929
 DecisionTree.cxx:930
 DecisionTree.cxx:931
 DecisionTree.cxx:932
 DecisionTree.cxx:933
 DecisionTree.cxx:934
 DecisionTree.cxx:935
 DecisionTree.cxx:936
 DecisionTree.cxx:937
 DecisionTree.cxx:938
 DecisionTree.cxx:939
 DecisionTree.cxx:940
 DecisionTree.cxx:941
 DecisionTree.cxx:942
 DecisionTree.cxx:943
 DecisionTree.cxx:944
 DecisionTree.cxx:945
 DecisionTree.cxx:946
 DecisionTree.cxx:947
 DecisionTree.cxx:948
 DecisionTree.cxx:949
 DecisionTree.cxx:950
 DecisionTree.cxx:951
 DecisionTree.cxx:952
 DecisionTree.cxx:953
 DecisionTree.cxx:954
 DecisionTree.cxx:955
 DecisionTree.cxx:956
 DecisionTree.cxx:957
 DecisionTree.cxx:958
 DecisionTree.cxx:959
 DecisionTree.cxx:960
 DecisionTree.cxx:961
 DecisionTree.cxx:962
 DecisionTree.cxx:963
 DecisionTree.cxx:964
 DecisionTree.cxx:965
 DecisionTree.cxx:966
 DecisionTree.cxx:967
 DecisionTree.cxx:968
 DecisionTree.cxx:969
 DecisionTree.cxx:970
 DecisionTree.cxx:971
 DecisionTree.cxx:972
 DecisionTree.cxx:973
 DecisionTree.cxx:974
 DecisionTree.cxx:975
 DecisionTree.cxx:976
 DecisionTree.cxx:977
 DecisionTree.cxx:978
 DecisionTree.cxx:979
 DecisionTree.cxx:980
 DecisionTree.cxx:981
 DecisionTree.cxx:982
 DecisionTree.cxx:983
 DecisionTree.cxx:984
 DecisionTree.cxx:985
 DecisionTree.cxx:986
 DecisionTree.cxx:987
 DecisionTree.cxx:988
 DecisionTree.cxx:989
 DecisionTree.cxx:990
 DecisionTree.cxx:991
 DecisionTree.cxx:992
 DecisionTree.cxx:993
 DecisionTree.cxx:994
 DecisionTree.cxx:995
 DecisionTree.cxx:996
 DecisionTree.cxx:997
 DecisionTree.cxx:998
 DecisionTree.cxx:999
 DecisionTree.cxx:1000
 DecisionTree.cxx:1001
 DecisionTree.cxx:1002
 DecisionTree.cxx:1003
 DecisionTree.cxx:1004
 DecisionTree.cxx:1005
 DecisionTree.cxx:1006
 DecisionTree.cxx:1007
 DecisionTree.cxx:1008
 DecisionTree.cxx:1009
 DecisionTree.cxx:1010
 DecisionTree.cxx:1011
 DecisionTree.cxx:1012
 DecisionTree.cxx:1013
 DecisionTree.cxx:1014
 DecisionTree.cxx:1015
 DecisionTree.cxx:1016
 DecisionTree.cxx:1017
 DecisionTree.cxx:1018
 DecisionTree.cxx:1019
 DecisionTree.cxx:1020
 DecisionTree.cxx:1021
 DecisionTree.cxx:1022
 DecisionTree.cxx:1023
 DecisionTree.cxx:1024
 DecisionTree.cxx:1025
 DecisionTree.cxx:1026
 DecisionTree.cxx:1027
 DecisionTree.cxx:1028
 DecisionTree.cxx:1029
 DecisionTree.cxx:1030
 DecisionTree.cxx:1031
 DecisionTree.cxx:1032
 DecisionTree.cxx:1033
 DecisionTree.cxx:1034
 DecisionTree.cxx:1035
 DecisionTree.cxx:1036
 DecisionTree.cxx:1037
 DecisionTree.cxx:1038
 DecisionTree.cxx:1039
 DecisionTree.cxx:1040
 DecisionTree.cxx:1041
 DecisionTree.cxx:1042
 DecisionTree.cxx:1043
 DecisionTree.cxx:1044
 DecisionTree.cxx:1045
 DecisionTree.cxx:1046
 DecisionTree.cxx:1047
 DecisionTree.cxx:1048
 DecisionTree.cxx:1049
 DecisionTree.cxx:1050
 DecisionTree.cxx:1051
 DecisionTree.cxx:1052
 DecisionTree.cxx:1053
 DecisionTree.cxx:1054
 DecisionTree.cxx:1055
 DecisionTree.cxx:1056
 DecisionTree.cxx:1057
 DecisionTree.cxx:1058
 DecisionTree.cxx:1059
 DecisionTree.cxx:1060
 DecisionTree.cxx:1061
 DecisionTree.cxx:1062
 DecisionTree.cxx:1063
 DecisionTree.cxx:1064
 DecisionTree.cxx:1065
 DecisionTree.cxx:1066
 DecisionTree.cxx:1067
 DecisionTree.cxx:1068
 DecisionTree.cxx:1069
 DecisionTree.cxx:1070
 DecisionTree.cxx:1071
 DecisionTree.cxx:1072
 DecisionTree.cxx:1073
 DecisionTree.cxx:1074
 DecisionTree.cxx:1075
 DecisionTree.cxx:1076
 DecisionTree.cxx:1077
 DecisionTree.cxx:1078
 DecisionTree.cxx:1079
 DecisionTree.cxx:1080
 DecisionTree.cxx:1081
 DecisionTree.cxx:1082
 DecisionTree.cxx:1083
 DecisionTree.cxx:1084
 DecisionTree.cxx:1085
 DecisionTree.cxx:1086
 DecisionTree.cxx:1087
 DecisionTree.cxx:1088
 DecisionTree.cxx:1089
 DecisionTree.cxx:1090
 DecisionTree.cxx:1091
 DecisionTree.cxx:1092
 DecisionTree.cxx:1093
 DecisionTree.cxx:1094
 DecisionTree.cxx:1095
 DecisionTree.cxx:1096
 DecisionTree.cxx:1097
 DecisionTree.cxx:1098
 DecisionTree.cxx:1099
 DecisionTree.cxx:1100
 DecisionTree.cxx:1101
 DecisionTree.cxx:1102
 DecisionTree.cxx:1103
 DecisionTree.cxx:1104
 DecisionTree.cxx:1105
 DecisionTree.cxx:1106
 DecisionTree.cxx:1107
 DecisionTree.cxx:1108
 DecisionTree.cxx:1109
 DecisionTree.cxx:1110
 DecisionTree.cxx:1111
 DecisionTree.cxx:1112
 DecisionTree.cxx:1113
 DecisionTree.cxx:1114
 DecisionTree.cxx:1115
 DecisionTree.cxx:1116
 DecisionTree.cxx:1117
 DecisionTree.cxx:1118
 DecisionTree.cxx:1119
 DecisionTree.cxx:1120
 DecisionTree.cxx:1121
 DecisionTree.cxx:1122
 DecisionTree.cxx:1123
 DecisionTree.cxx:1124
 DecisionTree.cxx:1125
 DecisionTree.cxx:1126
 DecisionTree.cxx:1127
 DecisionTree.cxx:1128
 DecisionTree.cxx:1129
 DecisionTree.cxx:1130
 DecisionTree.cxx:1131
 DecisionTree.cxx:1132
 DecisionTree.cxx:1133
 DecisionTree.cxx:1134
 DecisionTree.cxx:1135
 DecisionTree.cxx:1136
 DecisionTree.cxx:1137
 DecisionTree.cxx:1138
 DecisionTree.cxx:1139
 DecisionTree.cxx:1140
 DecisionTree.cxx:1141
 DecisionTree.cxx:1142
 DecisionTree.cxx:1143
 DecisionTree.cxx:1144
 DecisionTree.cxx:1145
 DecisionTree.cxx:1146
 DecisionTree.cxx:1147
 DecisionTree.cxx:1148
 DecisionTree.cxx:1149
 DecisionTree.cxx:1150
 DecisionTree.cxx:1151
 DecisionTree.cxx:1152
 DecisionTree.cxx:1153
 DecisionTree.cxx:1154
 DecisionTree.cxx:1155
 DecisionTree.cxx:1156
 DecisionTree.cxx:1157
 DecisionTree.cxx:1158
 DecisionTree.cxx:1159
 DecisionTree.cxx:1160
 DecisionTree.cxx:1161
 DecisionTree.cxx:1162
 DecisionTree.cxx:1163
 DecisionTree.cxx:1164
 DecisionTree.cxx:1165
 DecisionTree.cxx:1166
 DecisionTree.cxx:1167
 DecisionTree.cxx:1168
 DecisionTree.cxx:1169
 DecisionTree.cxx:1170
 DecisionTree.cxx:1171
 DecisionTree.cxx:1172
 DecisionTree.cxx:1173
 DecisionTree.cxx:1174
 DecisionTree.cxx:1175
 DecisionTree.cxx:1176
 DecisionTree.cxx:1177
 DecisionTree.cxx:1178
 DecisionTree.cxx:1179
 DecisionTree.cxx:1180
 DecisionTree.cxx:1181
 DecisionTree.cxx:1182
 DecisionTree.cxx:1183
 DecisionTree.cxx:1184
 DecisionTree.cxx:1185
 DecisionTree.cxx:1186
 DecisionTree.cxx:1187
 DecisionTree.cxx:1188
 DecisionTree.cxx:1189
 DecisionTree.cxx:1190
 DecisionTree.cxx:1191
 DecisionTree.cxx:1192
 DecisionTree.cxx:1193
 DecisionTree.cxx:1194
 DecisionTree.cxx:1195
 DecisionTree.cxx:1196
 DecisionTree.cxx:1197
 DecisionTree.cxx:1198
 DecisionTree.cxx:1199
 DecisionTree.cxx:1200
 DecisionTree.cxx:1201
 DecisionTree.cxx:1202
 DecisionTree.cxx:1203
 DecisionTree.cxx:1204
 DecisionTree.cxx:1205
 DecisionTree.cxx:1206
 DecisionTree.cxx:1207
 DecisionTree.cxx:1208
 DecisionTree.cxx:1209
 DecisionTree.cxx:1210
 DecisionTree.cxx:1211
 DecisionTree.cxx:1212
 DecisionTree.cxx:1213
 DecisionTree.cxx:1214
 DecisionTree.cxx:1215
 DecisionTree.cxx:1216
 DecisionTree.cxx:1217
 DecisionTree.cxx:1218
 DecisionTree.cxx:1219
 DecisionTree.cxx:1220
 DecisionTree.cxx:1221
 DecisionTree.cxx:1222
 DecisionTree.cxx:1223
 DecisionTree.cxx:1224
 DecisionTree.cxx:1225
 DecisionTree.cxx:1226
 DecisionTree.cxx:1227
 DecisionTree.cxx:1228
 DecisionTree.cxx:1229
 DecisionTree.cxx:1230
 DecisionTree.cxx:1231
 DecisionTree.cxx:1232
 DecisionTree.cxx:1233
 DecisionTree.cxx:1234
 DecisionTree.cxx:1235
 DecisionTree.cxx:1236
 DecisionTree.cxx:1237
 DecisionTree.cxx:1238
 DecisionTree.cxx:1239
 DecisionTree.cxx:1240
 DecisionTree.cxx:1241
 DecisionTree.cxx:1242
 DecisionTree.cxx:1243
 DecisionTree.cxx:1244
 DecisionTree.cxx:1245
 DecisionTree.cxx:1246
 DecisionTree.cxx:1247
 DecisionTree.cxx:1248
 DecisionTree.cxx:1249
 DecisionTree.cxx:1250
 DecisionTree.cxx:1251
 DecisionTree.cxx:1252
 DecisionTree.cxx:1253
 DecisionTree.cxx:1254
 DecisionTree.cxx:1255
 DecisionTree.cxx:1256
 DecisionTree.cxx:1257
 DecisionTree.cxx:1258
 DecisionTree.cxx:1259
 DecisionTree.cxx:1260
 DecisionTree.cxx:1261
 DecisionTree.cxx:1262
 DecisionTree.cxx:1263
 DecisionTree.cxx:1264
 DecisionTree.cxx:1265
 DecisionTree.cxx:1266
 DecisionTree.cxx:1267
 DecisionTree.cxx:1268
 DecisionTree.cxx:1269
 DecisionTree.cxx:1270
 DecisionTree.cxx:1271
 DecisionTree.cxx:1272
 DecisionTree.cxx:1273
 DecisionTree.cxx:1274
 DecisionTree.cxx:1275
 DecisionTree.cxx:1276
 DecisionTree.cxx:1277
 DecisionTree.cxx:1278
 DecisionTree.cxx:1279
 DecisionTree.cxx:1280
 DecisionTree.cxx:1281
 DecisionTree.cxx:1282
 DecisionTree.cxx:1283
 DecisionTree.cxx:1284
 DecisionTree.cxx:1285
 DecisionTree.cxx:1286
 DecisionTree.cxx:1287
 DecisionTree.cxx:1288
 DecisionTree.cxx:1289
 DecisionTree.cxx:1290
 DecisionTree.cxx:1291
 DecisionTree.cxx:1292
 DecisionTree.cxx:1293
 DecisionTree.cxx:1294
 DecisionTree.cxx:1295
 DecisionTree.cxx:1296
 DecisionTree.cxx:1297
 DecisionTree.cxx:1298
 DecisionTree.cxx:1299
 DecisionTree.cxx:1300
 DecisionTree.cxx:1301
 DecisionTree.cxx:1302
 DecisionTree.cxx:1303
 DecisionTree.cxx:1304
 DecisionTree.cxx:1305
 DecisionTree.cxx:1306
 DecisionTree.cxx:1307
 DecisionTree.cxx:1308
 DecisionTree.cxx:1309
 DecisionTree.cxx:1310
 DecisionTree.cxx:1311
 DecisionTree.cxx:1312
 DecisionTree.cxx:1313
 DecisionTree.cxx:1314
 DecisionTree.cxx:1315
 DecisionTree.cxx:1316
 DecisionTree.cxx:1317
 DecisionTree.cxx:1318
 DecisionTree.cxx:1319
 DecisionTree.cxx:1320
 DecisionTree.cxx:1321
 DecisionTree.cxx:1322
 DecisionTree.cxx:1323
 DecisionTree.cxx:1324
 DecisionTree.cxx:1325
 DecisionTree.cxx:1326
 DecisionTree.cxx:1327
 DecisionTree.cxx:1328
 DecisionTree.cxx:1329
 DecisionTree.cxx:1330
 DecisionTree.cxx:1331
 DecisionTree.cxx:1332
 DecisionTree.cxx:1333
 DecisionTree.cxx:1334
 DecisionTree.cxx:1335
 DecisionTree.cxx:1336
 DecisionTree.cxx:1337
 DecisionTree.cxx:1338
 DecisionTree.cxx:1339
 DecisionTree.cxx:1340
 DecisionTree.cxx:1341
 DecisionTree.cxx:1342
 DecisionTree.cxx:1343
 DecisionTree.cxx:1344
 DecisionTree.cxx:1345
 DecisionTree.cxx:1346
 DecisionTree.cxx:1347
 DecisionTree.cxx:1348
 DecisionTree.cxx:1349
 DecisionTree.cxx:1350
 DecisionTree.cxx:1351
 DecisionTree.cxx:1352
 DecisionTree.cxx:1353
 DecisionTree.cxx:1354
 DecisionTree.cxx:1355
 DecisionTree.cxx:1356
 DecisionTree.cxx:1357
 DecisionTree.cxx:1358
 DecisionTree.cxx:1359
 DecisionTree.cxx:1360
 DecisionTree.cxx:1361
 DecisionTree.cxx:1362
 DecisionTree.cxx:1363
 DecisionTree.cxx:1364
 DecisionTree.cxx:1365
 DecisionTree.cxx:1366
 DecisionTree.cxx:1367
 DecisionTree.cxx:1368
 DecisionTree.cxx:1369
 DecisionTree.cxx:1370
 DecisionTree.cxx:1371
 DecisionTree.cxx:1372
 DecisionTree.cxx:1373
 DecisionTree.cxx:1374
 DecisionTree.cxx:1375
 DecisionTree.cxx:1376
 DecisionTree.cxx:1377
 DecisionTree.cxx:1378
 DecisionTree.cxx:1379
 DecisionTree.cxx:1380
 DecisionTree.cxx:1381
 DecisionTree.cxx:1382
 DecisionTree.cxx:1383
 DecisionTree.cxx:1384
 DecisionTree.cxx:1385
 DecisionTree.cxx:1386
 DecisionTree.cxx:1387
 DecisionTree.cxx:1388
 DecisionTree.cxx:1389
 DecisionTree.cxx:1390
 DecisionTree.cxx:1391
 DecisionTree.cxx:1392
 DecisionTree.cxx:1393
 DecisionTree.cxx:1394
 DecisionTree.cxx:1395
 DecisionTree.cxx:1396
 DecisionTree.cxx:1397
 DecisionTree.cxx:1398
 DecisionTree.cxx:1399
 DecisionTree.cxx:1400
 DecisionTree.cxx:1401
 DecisionTree.cxx:1402
 DecisionTree.cxx:1403
 DecisionTree.cxx:1404
 DecisionTree.cxx:1405
 DecisionTree.cxx:1406
 DecisionTree.cxx:1407
 DecisionTree.cxx:1408
 DecisionTree.cxx:1409
 DecisionTree.cxx:1410
 DecisionTree.cxx:1411
 DecisionTree.cxx:1412
 DecisionTree.cxx:1413
 DecisionTree.cxx:1414
 DecisionTree.cxx:1415
 DecisionTree.cxx:1416
 DecisionTree.cxx:1417
 DecisionTree.cxx:1418
 DecisionTree.cxx:1419
 DecisionTree.cxx:1420
 DecisionTree.cxx:1421
 DecisionTree.cxx:1422
 DecisionTree.cxx:1423
 DecisionTree.cxx:1424
 DecisionTree.cxx:1425
 DecisionTree.cxx:1426
 DecisionTree.cxx:1427
 DecisionTree.cxx:1428
 DecisionTree.cxx:1429
 DecisionTree.cxx:1430
 DecisionTree.cxx:1431
 DecisionTree.cxx:1432
 DecisionTree.cxx:1433
 DecisionTree.cxx:1434
 DecisionTree.cxx:1435
 DecisionTree.cxx:1436
 DecisionTree.cxx:1437
 DecisionTree.cxx:1438
 DecisionTree.cxx:1439
 DecisionTree.cxx:1440
 DecisionTree.cxx:1441
 DecisionTree.cxx:1442
 DecisionTree.cxx:1443
 DecisionTree.cxx:1444
 DecisionTree.cxx:1445
 DecisionTree.cxx:1446
 DecisionTree.cxx:1447
 DecisionTree.cxx:1448
 DecisionTree.cxx:1449
 DecisionTree.cxx:1450
 DecisionTree.cxx:1451
 DecisionTree.cxx:1452
 DecisionTree.cxx:1453
 DecisionTree.cxx:1454
 DecisionTree.cxx:1455
 DecisionTree.cxx:1456
 DecisionTree.cxx:1457
 DecisionTree.cxx:1458
 DecisionTree.cxx:1459
 DecisionTree.cxx:1460
 DecisionTree.cxx:1461
 DecisionTree.cxx:1462
 DecisionTree.cxx:1463
 DecisionTree.cxx:1464
 DecisionTree.cxx:1465
 DecisionTree.cxx:1466
 DecisionTree.cxx:1467
 DecisionTree.cxx:1468
 DecisionTree.cxx:1469
 DecisionTree.cxx:1470
 DecisionTree.cxx:1471
 DecisionTree.cxx:1472
 DecisionTree.cxx:1473
 DecisionTree.cxx:1474
 DecisionTree.cxx:1475
 DecisionTree.cxx:1476
 DecisionTree.cxx:1477
 DecisionTree.cxx:1478
 DecisionTree.cxx:1479
 DecisionTree.cxx:1480
 DecisionTree.cxx:1481
 DecisionTree.cxx:1482
 DecisionTree.cxx:1483
 DecisionTree.cxx:1484
 DecisionTree.cxx:1485
 DecisionTree.cxx:1486
 DecisionTree.cxx:1487
 DecisionTree.cxx:1488
 DecisionTree.cxx:1489
 DecisionTree.cxx:1490
 DecisionTree.cxx:1491
 DecisionTree.cxx:1492
 DecisionTree.cxx:1493
 DecisionTree.cxx:1494
 DecisionTree.cxx:1495
 DecisionTree.cxx:1496
 DecisionTree.cxx:1497
 DecisionTree.cxx:1498
 DecisionTree.cxx:1499
 DecisionTree.cxx:1500
 DecisionTree.cxx:1501
 DecisionTree.cxx:1502
 DecisionTree.cxx:1503
 DecisionTree.cxx:1504
 DecisionTree.cxx:1505
 DecisionTree.cxx:1506
 DecisionTree.cxx:1507
 DecisionTree.cxx:1508
 DecisionTree.cxx:1509
 DecisionTree.cxx:1510
 DecisionTree.cxx:1511
 DecisionTree.cxx:1512
 DecisionTree.cxx:1513
 DecisionTree.cxx:1514
 DecisionTree.cxx:1515
 DecisionTree.cxx:1516
 DecisionTree.cxx:1517
 DecisionTree.cxx:1518
 DecisionTree.cxx:1519
 DecisionTree.cxx:1520
 DecisionTree.cxx:1521
 DecisionTree.cxx:1522
 DecisionTree.cxx:1523
 DecisionTree.cxx:1524
 DecisionTree.cxx:1525
 DecisionTree.cxx:1526
 DecisionTree.cxx:1527
 DecisionTree.cxx:1528
 DecisionTree.cxx:1529
 DecisionTree.cxx:1530
 DecisionTree.cxx:1531
 DecisionTree.cxx:1532
 DecisionTree.cxx:1533
 DecisionTree.cxx:1534
 DecisionTree.cxx:1535
 DecisionTree.cxx:1536
 DecisionTree.cxx:1537
 DecisionTree.cxx:1538
 DecisionTree.cxx:1539
 DecisionTree.cxx:1540
 DecisionTree.cxx:1541
 DecisionTree.cxx:1542
 DecisionTree.cxx:1543
 DecisionTree.cxx:1544
 DecisionTree.cxx:1545
 DecisionTree.cxx:1546
 DecisionTree.cxx:1547
 DecisionTree.cxx:1548
 DecisionTree.cxx:1549
 DecisionTree.cxx:1550
 DecisionTree.cxx:1551
 DecisionTree.cxx:1552
 DecisionTree.cxx:1553
 DecisionTree.cxx:1554
 DecisionTree.cxx:1555
 DecisionTree.cxx:1556
 DecisionTree.cxx:1557
 DecisionTree.cxx:1558
 DecisionTree.cxx:1559
 DecisionTree.cxx:1560
 DecisionTree.cxx:1561
 DecisionTree.cxx:1562
 DecisionTree.cxx:1563
 DecisionTree.cxx:1564
 DecisionTree.cxx:1565
 DecisionTree.cxx:1566
 DecisionTree.cxx:1567
 DecisionTree.cxx:1568
 DecisionTree.cxx:1569
 DecisionTree.cxx:1570
 DecisionTree.cxx:1571
 DecisionTree.cxx:1572
 DecisionTree.cxx:1573
 DecisionTree.cxx:1574
 DecisionTree.cxx:1575
 DecisionTree.cxx:1576
 DecisionTree.cxx:1577
 DecisionTree.cxx:1578
 DecisionTree.cxx:1579
 DecisionTree.cxx:1580
 DecisionTree.cxx:1581
 DecisionTree.cxx:1582
 DecisionTree.cxx:1583
 DecisionTree.cxx:1584
 DecisionTree.cxx:1585
 DecisionTree.cxx:1586
 DecisionTree.cxx:1587
 DecisionTree.cxx:1588
 DecisionTree.cxx:1589
 DecisionTree.cxx:1590
 DecisionTree.cxx:1591
 DecisionTree.cxx:1592
 DecisionTree.cxx:1593
 DecisionTree.cxx:1594
 DecisionTree.cxx:1595
 DecisionTree.cxx:1596
 DecisionTree.cxx:1597
 DecisionTree.cxx:1598
 DecisionTree.cxx:1599
 DecisionTree.cxx:1600
 DecisionTree.cxx:1601
 DecisionTree.cxx:1602
 DecisionTree.cxx:1603
 DecisionTree.cxx:1604
 DecisionTree.cxx:1605
 DecisionTree.cxx:1606
 DecisionTree.cxx:1607
 DecisionTree.cxx:1608
 DecisionTree.cxx:1609
 DecisionTree.cxx:1610
 DecisionTree.cxx:1611
 DecisionTree.cxx:1612
 DecisionTree.cxx:1613
 DecisionTree.cxx:1614
 DecisionTree.cxx:1615
 DecisionTree.cxx:1616
 DecisionTree.cxx:1617
 DecisionTree.cxx:1618
 DecisionTree.cxx:1619
 DecisionTree.cxx:1620
 DecisionTree.cxx:1621
 DecisionTree.cxx:1622
 DecisionTree.cxx:1623
 DecisionTree.cxx:1624
 DecisionTree.cxx:1625
 DecisionTree.cxx:1626
 DecisionTree.cxx:1627
 DecisionTree.cxx:1628
 DecisionTree.cxx:1629
 DecisionTree.cxx:1630
 DecisionTree.cxx:1631
 DecisionTree.cxx:1632
 DecisionTree.cxx:1633
 DecisionTree.cxx:1634
 DecisionTree.cxx:1635
 DecisionTree.cxx:1636
 DecisionTree.cxx:1637
 DecisionTree.cxx:1638
 DecisionTree.cxx:1639
 DecisionTree.cxx:1640
 DecisionTree.cxx:1641
 DecisionTree.cxx:1642
 DecisionTree.cxx:1643
 DecisionTree.cxx:1644
 DecisionTree.cxx:1645
 DecisionTree.cxx:1646
 DecisionTree.cxx:1647
 DecisionTree.cxx:1648
 DecisionTree.cxx:1649
 DecisionTree.cxx:1650
 DecisionTree.cxx:1651
 DecisionTree.cxx:1652
 DecisionTree.cxx:1653
 DecisionTree.cxx:1654
 DecisionTree.cxx:1655
 DecisionTree.cxx:1656
 DecisionTree.cxx:1657
 DecisionTree.cxx:1658
 DecisionTree.cxx:1659
 DecisionTree.cxx:1660
 DecisionTree.cxx:1661
 DecisionTree.cxx:1662
 DecisionTree.cxx:1663
 DecisionTree.cxx:1664
 DecisionTree.cxx:1665
 DecisionTree.cxx:1666
 DecisionTree.cxx:1667
 DecisionTree.cxx:1668
 DecisionTree.cxx:1669
 DecisionTree.cxx:1670
 DecisionTree.cxx:1671
 DecisionTree.cxx:1672
 DecisionTree.cxx:1673
 DecisionTree.cxx:1674
 DecisionTree.cxx:1675
 DecisionTree.cxx:1676
 DecisionTree.cxx:1677
 DecisionTree.cxx:1678
 DecisionTree.cxx:1679
 DecisionTree.cxx:1680
 DecisionTree.cxx:1681
 DecisionTree.cxx:1682
 DecisionTree.cxx:1683
 DecisionTree.cxx:1684
 DecisionTree.cxx:1685
 DecisionTree.cxx:1686
 DecisionTree.cxx:1687
 DecisionTree.cxx:1688
 DecisionTree.cxx:1689
 DecisionTree.cxx:1690
 DecisionTree.cxx:1691
 DecisionTree.cxx:1692
 DecisionTree.cxx:1693
 DecisionTree.cxx:1694
 DecisionTree.cxx:1695
 DecisionTree.cxx:1696
 DecisionTree.cxx:1697
 DecisionTree.cxx:1698
 DecisionTree.cxx:1699
 DecisionTree.cxx:1700
 DecisionTree.cxx:1701
 DecisionTree.cxx:1702
 DecisionTree.cxx:1703
 DecisionTree.cxx:1704
 DecisionTree.cxx:1705
 DecisionTree.cxx:1706
 DecisionTree.cxx:1707
 DecisionTree.cxx:1708
 DecisionTree.cxx:1709
 DecisionTree.cxx:1710
 DecisionTree.cxx:1711
 DecisionTree.cxx:1712
 DecisionTree.cxx:1713
 DecisionTree.cxx:1714
 DecisionTree.cxx:1715
 DecisionTree.cxx:1716
 DecisionTree.cxx:1717
 DecisionTree.cxx:1718
 DecisionTree.cxx:1719
 DecisionTree.cxx:1720
 DecisionTree.cxx:1721
 DecisionTree.cxx:1722
 DecisionTree.cxx:1723
 DecisionTree.cxx:1724
 DecisionTree.cxx:1725
 DecisionTree.cxx:1726
 DecisionTree.cxx:1727
 DecisionTree.cxx:1728
 DecisionTree.cxx:1729
 DecisionTree.cxx:1730
 DecisionTree.cxx:1731
 DecisionTree.cxx:1732
 DecisionTree.cxx:1733
 DecisionTree.cxx:1734
 DecisionTree.cxx:1735
 DecisionTree.cxx:1736
 DecisionTree.cxx:1737
 DecisionTree.cxx:1738
 DecisionTree.cxx:1739
 DecisionTree.cxx:1740
 DecisionTree.cxx:1741
 DecisionTree.cxx:1742
 DecisionTree.cxx:1743
 DecisionTree.cxx:1744
 DecisionTree.cxx:1745
 DecisionTree.cxx:1746
 DecisionTree.cxx:1747
 DecisionTree.cxx:1748
 DecisionTree.cxx:1749
 DecisionTree.cxx:1750
 DecisionTree.cxx:1751
 DecisionTree.cxx:1752
 DecisionTree.cxx:1753
 DecisionTree.cxx:1754
 DecisionTree.cxx:1755
 DecisionTree.cxx:1756
 DecisionTree.cxx:1757
 DecisionTree.cxx:1758
 DecisionTree.cxx:1759
 DecisionTree.cxx:1760
 DecisionTree.cxx:1761
 DecisionTree.cxx:1762
 DecisionTree.cxx:1763
 DecisionTree.cxx:1764
 DecisionTree.cxx:1765
 DecisionTree.cxx:1766
 DecisionTree.cxx:1767
 DecisionTree.cxx:1768
 DecisionTree.cxx:1769
 DecisionTree.cxx:1770