ROOT logo
// @(#)root/tmva $Id: MethodDT.cxx 41909 2011-11-11 14:46:39Z pcanal $
// Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss

/**********************************************************************************
 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis       *
 * Package: TMVA                                                                  *
 * Class  : MethodDT (DT = Decision Trees)                                         *
 * Web    : http://tmva.sourceforge.net                                           *
 *                                                                                *
 * Description:                                                                   *
 *      Analysis of Boosted Decision Trees                                        *
 *                                                                                *
 * Authors (alphabetical):                                                        *
 *      Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland              *
 *      Helge Voss      <Helge.Voss@cern.ch>     - MPI-K Heidelberg, Germany      *
 *      Or Cohen        <orcohenor@gmail.com>    - Weizmann Inst., Israel         *
 *                                                                                *
 * Copyright (c) 2005:                                                            *
 *      CERN, Switzerland                                                         *
 *      MPI-K Heidelberg, Germany                                                 *
 *                                                                                *
 * Redistribution and use in source and binary forms, with or without             *
 * modification, are permitted according to the terms listed in LICENSE           *
 * (http://tmva.sourceforge.net/LICENSE)                                          *
 **********************************************************************************/

//_______________________________________________________________________
//
// Analysis of Boosted Decision Trees
//
// Boosted decision trees have been successfully used in High Energy
// Physics analysis for example by the MiniBooNE experiment
// (Yang-Roe-Zhu, physics/0508045). In Boosted Decision Trees, the
// selection is done on a majority vote on the result of several decision
// trees, which are all derived from the same training sample by
// supplying different event weights during the training.
//
// Decision trees:
//
// successive decision nodes are used to categorize the
// events out of the sample as either signal or background. Each node
// uses only a single discriminating variable to decide if the event is
// signal-like ("goes right") or background-like ("goes left"). This
// forms a tree like structure with "baskets" at the end (leave nodes),
// and an event is classified as either signal or background according to
// whether the basket where it ends up has been classified signal or
// background during the training. Training of a decision tree is the
// process to define the "cut criteria" for each node. The training
// starts with the root node. Here one takes the full training event
// sample and selects the variable and corresponding cut value that gives
// the best separation between signal and background at this stage. Using
// this cut criterion, the sample is then divided into two subsamples, a
// signal-like (right) and a background-like (left) sample. Two new nodes
// are then created for each of the two sub-samples and they are
// constructed using the same mechanism as described for the root
// node. The devision is stopped once a certain node has reached either a
// minimum number of events, or a minimum or maximum signal purity. These
// leave nodes are then called "signal" or "background" if they contain
// more signal respective background events from the training sample.
//
// Boosting:
//
// the idea behind the boosting is, that signal events from the training
// sample, that *end up in a background node (and vice versa) are given a
// larger weight than events that are in the correct leave node. This
// results in a re-weighed training event sample, with which then a new
// decision tree can be developed. The boosting can be applied several
// times (typically 100-500 times) and one ends up with a set of decision
// trees (a forest).
//
// Bagging:
//
// In this particular variant of the Boosted Decision Trees the boosting
// is not done on the basis of previous training results, but by a simple
// stochasitc re-sampling of the initial training event sample.
//
// Analysis:
//
// applying an individual decision tree to a test event results in a
// classification of the event as either signal or background. For the
// boosted decision tree selection, an event is successively subjected to
// the whole set of decision trees and depending on how often it is
// classified as signal, a "likelihood" estimator is constructed for the
// event being signal or background. The value of this estimator is the
// one which is then used to select the events from an event sample, and
// the cut value on this estimator defines the efficiency and purity of
// the selection.
//*
//_______________________________________________________________________

#include <algorithm>
#include "Riostream.h"
#include "TRandom3.h"
#include "TMath.h"
#include "TObjString.h"

#include "TMVA/ClassifierFactory.h"
#include "TMVA/MethodDT.h"
#include "TMVA/Tools.h"
#include "TMVA/Timer.h"
#include "TMVA/Ranking.h"
#include "TMVA/SdivSqrtSplusB.h"
#include "TMVA/BinarySearchTree.h"
#include "TMVA/SeparationBase.h"
#include "TMVA/GiniIndex.h"
#include "TMVA/CrossEntropy.h"
#include "TMVA/MisClassificationError.h"
#include "TMVA/MethodBoost.h"
#include "TMVA/CCPruner.h"

using std::vector;

REGISTER_METHOD(DT)

ClassImp(TMVA::MethodDT)

//_______________________________________________________________________
TMVA::MethodDT::MethodDT( const TString& jobName,
                          const TString& methodTitle,
                          DataSetInfo& theData,
                          const TString& theOption,
                          TDirectory* theTargetDir ) :
   TMVA::MethodBase( jobName, Types::kDT, methodTitle, theData, theOption, theTargetDir )
   , fTree(0)
   , fNodeMinEvents(0)
   , fNCuts(0)
   , fUseYesNoLeaf(kFALSE)
   , fNodePurityLimit(0)
   , fNNodesMax(0)
   , fMaxDepth(0)
   , fErrorFraction(0)
   , fPruneStrength(0)
   , fPruneMethod(DecisionTree::kNoPruning)
   , fAutomatic(kFALSE)
   , fRandomisedTrees(kFALSE)
   , fUseNvars(0)
   , fPruneBeforeBoost(kFALSE)
   , fDeltaPruneStrength(0)
{
   // the standard constructor for just an ordinar "decision trees"
}

//_______________________________________________________________________
TMVA::MethodDT::MethodDT( DataSetInfo& dsi,
                          const TString& theWeightFile,
                          TDirectory* theTargetDir ) :
   TMVA::MethodBase( Types::kDT, dsi, theWeightFile, theTargetDir )
   , fTree(0)
   , fNodeMinEvents(0)
   , fNCuts(0)
   , fUseYesNoLeaf(kFALSE)
   , fNodePurityLimit(0)
   , fNNodesMax(0)
   , fMaxDepth(0)
   , fErrorFraction(0)
   , fPruneStrength(0)
   , fPruneMethod(DecisionTree::kNoPruning)
   , fAutomatic(kFALSE)
   , fRandomisedTrees(kFALSE)
   , fUseNvars(0)
   , fPruneBeforeBoost(kFALSE)
   , fDeltaPruneStrength(0)
{
   //constructor from Reader
}

//_______________________________________________________________________
Bool_t TMVA::MethodDT::HasAnalysisType( Types::EAnalysisType type, UInt_t numberClasses, UInt_t /*numberTargets*/ )
{
   // FDA can handle classification with 2 classes and regression with one regression-target
   if( type == Types::kClassification && numberClasses == 2 ) return kTRUE;
   return kFALSE;
}


//_______________________________________________________________________
void TMVA::MethodDT::DeclareOptions()
{
   // define the options (their key words) that can be set in the option string
   // UseRandomisedTrees  choose at each node splitting a random set of variables 
   // UseNvars         use UseNvars variables in randomised trees
   // SeparationType   the separation criterion applied in the node splitting
   //                  known: GiniIndex
   //                         MisClassificationError
   //                         CrossEntropy
   //                         SDivSqrtSPlusB
   // nEventsMin:      the minimum number of events in a node (leaf criteria, stop splitting)
   // nCuts:           the number of steps in the optimisation of the cut for a node (if < 0, then
   //                  step size is determined by the events)
   // UseYesNoLeaf     decide if the classification is done simply by the node type, or the S/B
   //                  (from the training) in the leaf node
   // NodePurityLimit  the minimum purity to classify a node as a signal node (used in pruning and boosting to determine
   //                  misclassification error rate)
   // PruneMethod      The Pruning method: 
   //                  known: NoPruning  // switch off pruning completely
   //                         ExpectedError
   //                         CostComplexity 
   // PruneStrength    a parameter to adjust the amount of pruning. Should be large enouth such that overtraining is avoided");

   DeclareOptionRef(fRandomisedTrees,"UseRandomisedTrees","Choose at each node splitting a random set of variables and *bagging*");
   DeclareOptionRef(fUseNvars,"UseNvars","Number of variables used if randomised Tree option is chosen");
   DeclareOptionRef(fUseYesNoLeaf=kTRUE, "UseYesNoLeaf", 
                    "Use Sig or Bkg node type or the ratio S/B as classification in the leaf node");
   DeclareOptionRef(fNodePurityLimit=0.5, "NodePurityLimit", "In boosting/pruning, nodes with purity > NodePurityLimit are signal; background otherwise.");
   DeclareOptionRef(fPruneBeforeBoost=kFALSE, "PruneBeforeBoost", 
                    "Whether to perform the prune process right after the training or after the boosting");
   DeclareOptionRef(fSepTypeS="GiniIndex", "SeparationType", "Separation criterion for node splitting");
   AddPreDefVal(TString("MisClassificationError"));
   AddPreDefVal(TString("GiniIndex"));
   AddPreDefVal(TString("CrossEntropy"));
   AddPreDefVal(TString("SDivSqrtSPlusB"));
   DeclareOptionRef(fNodeMinEvents, "nEventsMin", "Minimum number of events in a leaf node (default: max(20, N_train/(Nvar^2)/10) ) ");
   DeclareOptionRef(fNCuts, "nCuts", "Number of steps during node cut optimisation");
   DeclareOptionRef(fPruneStrength, "PruneStrength", "Pruning strength (negative value == automatic adjustment)");
   DeclareOptionRef(fPruneMethodS, "PruneMethod", "Pruning method: NoPruning (switched off), ExpectedError or CostComplexity");
   
   AddPreDefVal(TString("NoPruning"));
   AddPreDefVal(TString("ExpectedError"));
   AddPreDefVal(TString("CostComplexity"));

   DeclareOptionRef(fNNodesMax=100000,"NNodesMax","Max number of nodes in tree");
   if (DoRegression()) {
      DeclareOptionRef(fMaxDepth=50,"MaxDepth","Max depth of the decision tree allowed");
   }else{
      DeclareOptionRef(fMaxDepth=3,"MaxDepth","Max depth of the decision tree allowed");
   }
}

//_______________________________________________________________________
void TMVA::MethodDT::ProcessOptions() 
{
   // the option string is decoded, for available options see "DeclareOptions"
   fSepTypeS.ToLower();
   if      (fSepTypeS == "misclassificationerror") fSepType = new MisClassificationError();
   else if (fSepTypeS == "giniindex")              fSepType = new GiniIndex();
   else if (fSepTypeS == "crossentropy")           fSepType = new CrossEntropy();
   else if (fSepTypeS == "sdivsqrtsplusb")         fSepType = new SdivSqrtSplusB();
   else {
      Log() << kINFO << GetOptions() << Endl;
      Log() << kFATAL << "<ProcessOptions> unknown Separation Index option called" << Endl;
   }     

   //   std::cout << "fSeptypes " << fSepTypeS << "  fseptype " << fSepType << std::endl;

   fPruneMethodS.ToLower();
   if      (fPruneMethodS == "expectederror" )   fPruneMethod = DecisionTree::kExpectedErrorPruning;
   else if (fPruneMethodS == "costcomplexity" )  fPruneMethod = DecisionTree::kCostComplexityPruning;
   else if (fPruneMethodS == "nopruning" )       fPruneMethod = DecisionTree::kNoPruning;
   else {
      Log() << kINFO << GetOptions() << Endl;
      Log() << kFATAL << "<ProcessOptions> unknown PruneMethod option called" << Endl;
   }

   if (fPruneStrength < 0) fAutomatic = kTRUE;
   else fAutomatic = kFALSE;
   if (fAutomatic && fPruneMethod==!DecisionTree::kCostComplexityPruning){
      Log() << kFATAL 
            <<  "Sorry autmoatic pruning strength determination is not implemented yet for ExpectedErrorPruning" << Endl;
   }


   if (this->Data()->HasNegativeEventWeights()){
      Log() << kINFO << " You are using a Monte Carlo that has also negative weights. "
              << "That should in principle be fine as long as on average you end up with "
              << "something positive. For this you have to make sure that the minimal number "
              << "of (unweighted) events demanded for a tree node (currently you use: nEventsMin="
              <<fNodeMinEvents<<", you can set this via the BDT option string when booking the "
              << "classifier) is large enough to allow for reasonable averaging!!! "
              << " If this does not help.. maybe you want to try the option: NoNegWeightsInTraining  "
              << "which ignores events with negative weight in the training. " << Endl
              << Endl << "Note: You'll get a WARNING message during the training if that should ever happen" << Endl;
   }
   
   if (fRandomisedTrees){
      Log() << kINFO << " Randomised trees should use *bagging* as *boost* method. Did you set this in the *MethodBoost* ? . Here I can enforce only the *no pruning*" << Endl;
      fPruneMethod = DecisionTree::kNoPruning;
      //      fBoostType   = "Bagging";
   }

}

//_______________________________________________________________________
void TMVA::MethodDT::Init( void )
{
   // common initialisation with defaults for the DT-Method
   fNodeMinEvents  = TMath::Max( 20, int( Data()->GetNTrainingEvents() / (10*GetNvar()*GetNvar())) );
   fNCuts          = 20; 
   fPruneMethod    = DecisionTree::kNoPruning;
   fPruneStrength  = 5;     // means automatic determination of the prune strength using a validation sample  
   fDeltaPruneStrength=0.1;
   fRandomisedTrees= kFALSE;
   fUseNvars       = GetNvar();

   // reference cut value to distingiush signal-like from background-like events   
   SetSignalReferenceCut( 0 );
   if (fAnalysisType == Types::kClassification || fAnalysisType == Types::kMulticlass ) {
      fMaxDepth        = 3;
   }else {
      fMaxDepth = 50;
   }
}

//_______________________________________________________________________
TMVA::MethodDT::~MethodDT( void )
{
   //destructor
   delete fTree;
}

//_______________________________________________________________________
void TMVA::MethodDT::Train( void )
{
   TMVA::DecisionTreeNode::fgIsTraining=true;
   fTree = new DecisionTree( fSepType, fNodeMinEvents, fNCuts, 0, 
                             fRandomisedTrees, fUseNvars, fNNodesMax, fMaxDepth,0 );
   if (fRandomisedTrees) Log()<<kWARNING<<" randomised Trees do not work yet in this framework," 
                                << " as I do not know how to give each tree a new random seed, now they"
                                << " will be all the same and that is not good " << Endl;
   fTree->SetAnalysisType( GetAnalysisType() );

   fTree->BuildTree(GetEventCollection(Types::kTraining));
   TMVA::DecisionTreeNode::fgIsTraining=false;
}

//_______________________________________________________________________
Bool_t TMVA::MethodDT::MonitorBoost( MethodBoost* booster )
{
   Int_t methodIndex = booster->GetMethodIndex();
   if (booster->GetBoostStage() == Types::kBoostProcBegin)
      {
         booster->AddMonitoringHist(new TH1I("NodesBeforePruning","nodes before pruning",booster->GetBoostNum(),0,booster->GetBoostNum()));
         booster->AddMonitoringHist(new TH1I("NodesAfterPruning","nodes after pruning",booster->GetBoostNum(),0,booster->GetBoostNum()));
         booster->AddMonitoringHist(new TH1D("PruneStrength","prune  strength",booster->GetBoostNum(),0,booster->GetBoostNum()));
      }

   if (booster->GetBoostStage() == Types::kBeforeTraining)
      {
         if (methodIndex == 0)
            {
               booster->GetMonitoringHist(2)->SetXTitle("#tree");
               booster->GetMonitoringHist(2)->SetYTitle("PruneStrength");
               //dividing the data set for pruning where strength is calculated automatically
               if (fAutomatic)
                  {
                     Data()->DivideTrainingSet(2);
                     Data()->MoveTrainingBlock(1,Types::kValidation,kTRUE);
                  }
            }
      }
   else if (booster->GetBoostStage() == Types::kBeforeBoosting)
      booster->GetMonitoringHist(0)->SetBinContent(booster->GetBoostNum()+1,fTree->GetNNodes());

   if (booster->GetBoostStage() == ((fPruneBeforeBoost)?Types::kBeforeBoosting:Types::kBoostValidation)
       && !(fPruneMethod == DecisionTree::kNoPruning)) {
      
      if (methodIndex==0 && fPruneBeforeBoost == kFALSE)
         Log() << kINFO << "Pruning "<< booster->GetBoostNum() << " Decision Trees ... patience please" << Endl;
         
      //reading the previous value
      if (fAutomatic && methodIndex > 0) {
         MethodDT* mdt = dynamic_cast<MethodDT*>(booster->GetPreviousMethod());
         if(mdt)
            fPruneStrength = mdt->GetPruneStrength();
      }

      booster->GetMonitoringHist(0)->SetBinContent(methodIndex+1,fTree->GetNNodes());
      booster->GetMonitoringHist(2)->SetBinContent(methodIndex+1,PruneTree(methodIndex));
      booster->GetMonitoringHist(1)->SetBinContent(methodIndex+1,fTree->GetNNodes());
   } // no pruning is performed
   else if (booster->GetBoostStage() != Types::kBoostProcEnd)
      return kFALSE;

   //finishing the pruning process, printing out everything
   if (booster->GetBoostStage() == Types::kBoostProcEnd)
      {
         if (fPruneMethod == DecisionTree::kNoPruning) {
            Log() << kINFO << "<Train> average number of nodes (w/o pruning) : "
                    <<  booster->GetMonitoringHist(0)->GetMean() << Endl;
         }
         else
            {
               Log() << kINFO << "<Train> average number of nodes before/after pruning : " 
                       << booster->GetMonitoringHist(0)->GetMean() << " / " 
                       << booster->GetMonitoringHist(1)->GetMean()
                       << Endl;
            }
      }

   return kTRUE;
}


//_______________________________________________________________________
Double_t TMVA::MethodDT::PruneTree(const Int_t /* methodIndex */ )
{
   if (fAutomatic && fPruneMethod == DecisionTree::kCostComplexityPruning) { // automatic cost complexity pruning
      CCPruner* pruneTool = new CCPruner(fTree, this->Data() , fSepType);
      pruneTool->Optimize();
      std::vector<DecisionTreeNode*> nodes = pruneTool->GetOptimalPruneSequence();
      fPruneStrength = pruneTool->GetOptimalPruneStrength();
      for(UInt_t i = 0; i < nodes.size(); i++) 
         fTree->PruneNode(nodes[i]);
      delete pruneTool;
   } 
   else if (fAutomatic &&  fPruneMethod != DecisionTree::kCostComplexityPruning){
      /*

      Double_t alpha = 0;
      Double_t delta = fDeltaPruneStrength;
      
      DecisionTree*  dcopy;
      vector<Double_t> q;
      multimap<Double_t,Double_t> quality;
      Int_t nnodes=fTree->GetNNodes();

      // find the maxiumum prune strength that still leaves some nodes 
      Bool_t forceStop = kFALSE;
      Int_t troubleCount=0, previousNnodes=nnodes;


      nnodes=fTree->GetNNodes();
      while (nnodes > 3 && !forceStop) {
         dcopy = new DecisionTree(*fTree);
         dcopy->SetPruneStrength(alpha+=delta);
         dcopy->PruneTree();
         q.push_back(TestTreeQuality(dcopy));
         quality.insert(pair<const Double_t,Double_t>(q.back(),alpha));
         nnodes=dcopy->GetNNodes();
         if (previousNnodes == nnodes) troubleCount++;
         else { 
            troubleCount=0; // reset counter
            if (nnodes < previousNnodes / 2 ) fDeltaPruneStrength /= 2.;
         }
         previousNnodes = nnodes;
         if (troubleCount > 20) {
            if (methodIndex == 0 && fPruneStrength <=0) {//maybe you need larger stepsize ??
               fDeltaPruneStrength *= 5;
               Log() << kINFO << "<PruneTree> trouble determining optimal prune strength"
                       << " for Tree " << methodIndex
                       << " --> first try to increase the step size"
                       << " currently Prunestrenght= " << alpha 
                       << " stepsize " << fDeltaPruneStrength << " " << Endl;
               troubleCount = 0;   // try again
               fPruneStrength = 1; // if it was for the first time.. 
            } else if (methodIndex == 0 && fPruneStrength <=2) {//maybe you need much larger stepsize ??
               fDeltaPruneStrength *= 5;
               Log() << kINFO << "<PruneTree> trouble determining optimal prune strength"
                       << " for Tree " << methodIndex
                       << " -->  try to increase the step size even more.. "
                       << " if that still didn't work, TRY IT BY HAND"  
                       << " currently Prunestrenght= " << alpha 
                       << " stepsize " << fDeltaPruneStrength << " " << Endl;
               troubleCount = 0;   // try again
               fPruneStrength = 3; // if it was for the first time.. 
            } else {
               forceStop=kTRUE;
               Log() << kINFO << "<PruneTree> trouble determining optimal prune strength"
                       << " for Tree " << methodIndex << " at tested prune strength: " << alpha << " --> abort forced, use same strength as for previous tree:"
                       << fPruneStrength << Endl;
            }
         }
         if (fgDebugLevel==1) Log() << kINFO << "Pruneed with ("<<alpha
                                      << ") give quality: " << q.back()
                                      << " and #nodes: " << nnodes  
                                      << Endl;
         delete dcopy;
      }
      if (!forceStop) {
         multimap<Double_t,Double_t>::reverse_iterator it=quality.rend();
         it++;
         fPruneStrength = it->second;
         // adjust the step size for the next tree.. think that 20 steps are sort of
         // fine enough.. could become a tunable option later..
         fDeltaPruneStrength *= Double_t(q.size())/20.;
      }

      fTree->SetPruneStrength(fPruneStrength);
      fTree->PruneTree();
      */
   } 
   else {
      fTree->SetPruneStrength(fPruneStrength);
      fTree->PruneTree();
   }
   return fPruneStrength;
}

//_______________________________________________________________________
Double_t TMVA::MethodDT::TestTreeQuality( DecisionTree *dt )
{
   Data()->SetCurrentType(Types::kValidation);
   // test the tree quality.. in terms of Miscalssification
   Double_t SumCorrect=0,SumWrong=0;
   for (Long64_t ievt=0; ievt<Data()->GetNEvents(); ievt++)
      {
         Event * ev = Data()->GetEvent(ievt);
         if ((dt->CheckEvent(*ev) > dt->GetNodePurityLimit() ) == DataInfo().IsSignal(ev)) SumCorrect+=ev->GetWeight();
         else SumWrong+=ev->GetWeight();
      }
   Data()->SetCurrentType(Types::kTraining);
   return  SumCorrect / (SumCorrect + SumWrong);
}

//_______________________________________________________________________
void TMVA::MethodDT::AddWeightsXMLTo( void* parent ) const 
{
   fTree->AddXMLTo(parent);
   //Log() << kFATAL << "Please implement writing of weights as XML" << Endl;
}

//_______________________________________________________________________
void TMVA::MethodDT::ReadWeightsFromXML( void* wghtnode)
{
   if(fTree)
      delete fTree;
   fTree = new DecisionTree();
   fTree->ReadXML(wghtnode,GetTrainingTMVAVersionCode());
}

//_______________________________________________________________________
void  TMVA::MethodDT::ReadWeightsFromStream( istream& istr )
{
   delete fTree;
   fTree = new DecisionTree();
   fTree->Read(istr);
}

//_______________________________________________________________________
Double_t TMVA::MethodDT::GetMvaValue( Double_t* err, Double_t* errUpper )
{
   // returns MVA value

   // cannot determine error
   NoErrorCalc(err, errUpper);

   return fTree->CheckEvent(*GetEvent(),fUseYesNoLeaf);
}

//_______________________________________________________________________
void TMVA::MethodDT::GetHelpMessage() const
{

}
//_______________________________________________________________________
const TMVA::Ranking* TMVA::MethodDT::CreateRanking()
{
   return 0;
}
 MethodDT.cxx:1
 MethodDT.cxx:2
 MethodDT.cxx:3
 MethodDT.cxx:4
 MethodDT.cxx:5
 MethodDT.cxx:6
 MethodDT.cxx:7
 MethodDT.cxx:8
 MethodDT.cxx:9
 MethodDT.cxx:10
 MethodDT.cxx:11
 MethodDT.cxx:12
 MethodDT.cxx:13
 MethodDT.cxx:14
 MethodDT.cxx:15
 MethodDT.cxx:16
 MethodDT.cxx:17
 MethodDT.cxx:18
 MethodDT.cxx:19
 MethodDT.cxx:20
 MethodDT.cxx:21
 MethodDT.cxx:22
 MethodDT.cxx:23
 MethodDT.cxx:24
 MethodDT.cxx:25
 MethodDT.cxx:26
 MethodDT.cxx:27
 MethodDT.cxx:28
 MethodDT.cxx:29
 MethodDT.cxx:30
 MethodDT.cxx:31
 MethodDT.cxx:32
 MethodDT.cxx:33
 MethodDT.cxx:34
 MethodDT.cxx:35
 MethodDT.cxx:36
 MethodDT.cxx:37
 MethodDT.cxx:38
 MethodDT.cxx:39
 MethodDT.cxx:40
 MethodDT.cxx:41
 MethodDT.cxx:42
 MethodDT.cxx:43
 MethodDT.cxx:44
 MethodDT.cxx:45
 MethodDT.cxx:46
 MethodDT.cxx:47
 MethodDT.cxx:48
 MethodDT.cxx:49
 MethodDT.cxx:50
 MethodDT.cxx:51
 MethodDT.cxx:52
 MethodDT.cxx:53
 MethodDT.cxx:54
 MethodDT.cxx:55
 MethodDT.cxx:56
 MethodDT.cxx:57
 MethodDT.cxx:58
 MethodDT.cxx:59
 MethodDT.cxx:60
 MethodDT.cxx:61
 MethodDT.cxx:62
 MethodDT.cxx:63
 MethodDT.cxx:64
 MethodDT.cxx:65
 MethodDT.cxx:66
 MethodDT.cxx:67
 MethodDT.cxx:68
 MethodDT.cxx:69
 MethodDT.cxx:70
 MethodDT.cxx:71
 MethodDT.cxx:72
 MethodDT.cxx:73
 MethodDT.cxx:74
 MethodDT.cxx:75
 MethodDT.cxx:76
 MethodDT.cxx:77
 MethodDT.cxx:78
 MethodDT.cxx:79
 MethodDT.cxx:80
 MethodDT.cxx:81
 MethodDT.cxx:82
 MethodDT.cxx:83
 MethodDT.cxx:84
 MethodDT.cxx:85
 MethodDT.cxx:86
 MethodDT.cxx:87
 MethodDT.cxx:88
 MethodDT.cxx:89
 MethodDT.cxx:90
 MethodDT.cxx:91
 MethodDT.cxx:92
 MethodDT.cxx:93
 MethodDT.cxx:94
 MethodDT.cxx:95
 MethodDT.cxx:96
 MethodDT.cxx:97
 MethodDT.cxx:98
 MethodDT.cxx:99
 MethodDT.cxx:100
 MethodDT.cxx:101
 MethodDT.cxx:102
 MethodDT.cxx:103
 MethodDT.cxx:104
 MethodDT.cxx:105
 MethodDT.cxx:106
 MethodDT.cxx:107
 MethodDT.cxx:108
 MethodDT.cxx:109
 MethodDT.cxx:110
 MethodDT.cxx:111
 MethodDT.cxx:112
 MethodDT.cxx:113
 MethodDT.cxx:114
 MethodDT.cxx:115
 MethodDT.cxx:116
 MethodDT.cxx:117
 MethodDT.cxx:118
 MethodDT.cxx:119
 MethodDT.cxx:120
 MethodDT.cxx:121
 MethodDT.cxx:122
 MethodDT.cxx:123
 MethodDT.cxx:124
 MethodDT.cxx:125
 MethodDT.cxx:126
 MethodDT.cxx:127
 MethodDT.cxx:128
 MethodDT.cxx:129
 MethodDT.cxx:130
 MethodDT.cxx:131
 MethodDT.cxx:132
 MethodDT.cxx:133
 MethodDT.cxx:134
 MethodDT.cxx:135
 MethodDT.cxx:136
 MethodDT.cxx:137
 MethodDT.cxx:138
 MethodDT.cxx:139
 MethodDT.cxx:140
 MethodDT.cxx:141
 MethodDT.cxx:142
 MethodDT.cxx:143
 MethodDT.cxx:144
 MethodDT.cxx:145
 MethodDT.cxx:146
 MethodDT.cxx:147
 MethodDT.cxx:148
 MethodDT.cxx:149
 MethodDT.cxx:150
 MethodDT.cxx:151
 MethodDT.cxx:152
 MethodDT.cxx:153
 MethodDT.cxx:154
 MethodDT.cxx:155
 MethodDT.cxx:156
 MethodDT.cxx:157
 MethodDT.cxx:158
 MethodDT.cxx:159
 MethodDT.cxx:160
 MethodDT.cxx:161
 MethodDT.cxx:162
 MethodDT.cxx:163
 MethodDT.cxx:164
 MethodDT.cxx:165
 MethodDT.cxx:166
 MethodDT.cxx:167
 MethodDT.cxx:168
 MethodDT.cxx:169
 MethodDT.cxx:170
 MethodDT.cxx:171
 MethodDT.cxx:172
 MethodDT.cxx:173
 MethodDT.cxx:174
 MethodDT.cxx:175
 MethodDT.cxx:176
 MethodDT.cxx:177
 MethodDT.cxx:178
 MethodDT.cxx:179
 MethodDT.cxx:180
 MethodDT.cxx:181
 MethodDT.cxx:182
 MethodDT.cxx:183
 MethodDT.cxx:184
 MethodDT.cxx:185
 MethodDT.cxx:186
 MethodDT.cxx:187
 MethodDT.cxx:188
 MethodDT.cxx:189
 MethodDT.cxx:190
 MethodDT.cxx:191
 MethodDT.cxx:192
 MethodDT.cxx:193
 MethodDT.cxx:194
 MethodDT.cxx:195
 MethodDT.cxx:196
 MethodDT.cxx:197
 MethodDT.cxx:198
 MethodDT.cxx:199
 MethodDT.cxx:200
 MethodDT.cxx:201
 MethodDT.cxx:202
 MethodDT.cxx:203
 MethodDT.cxx:204
 MethodDT.cxx:205
 MethodDT.cxx:206
 MethodDT.cxx:207
 MethodDT.cxx:208
 MethodDT.cxx:209
 MethodDT.cxx:210
 MethodDT.cxx:211
 MethodDT.cxx:212
 MethodDT.cxx:213
 MethodDT.cxx:214
 MethodDT.cxx:215
 MethodDT.cxx:216
 MethodDT.cxx:217
 MethodDT.cxx:218
 MethodDT.cxx:219
 MethodDT.cxx:220
 MethodDT.cxx:221
 MethodDT.cxx:222
 MethodDT.cxx:223
 MethodDT.cxx:224
 MethodDT.cxx:225
 MethodDT.cxx:226
 MethodDT.cxx:227
 MethodDT.cxx:228
 MethodDT.cxx:229
 MethodDT.cxx:230
 MethodDT.cxx:231
 MethodDT.cxx:232
 MethodDT.cxx:233
 MethodDT.cxx:234
 MethodDT.cxx:235
 MethodDT.cxx:236
 MethodDT.cxx:237
 MethodDT.cxx:238
 MethodDT.cxx:239
 MethodDT.cxx:240
 MethodDT.cxx:241
 MethodDT.cxx:242
 MethodDT.cxx:243
 MethodDT.cxx:244
 MethodDT.cxx:245
 MethodDT.cxx:246
 MethodDT.cxx:247
 MethodDT.cxx:248
 MethodDT.cxx:249
 MethodDT.cxx:250
 MethodDT.cxx:251
 MethodDT.cxx:252
 MethodDT.cxx:253
 MethodDT.cxx:254
 MethodDT.cxx:255
 MethodDT.cxx:256
 MethodDT.cxx:257
 MethodDT.cxx:258
 MethodDT.cxx:259
 MethodDT.cxx:260
 MethodDT.cxx:261
 MethodDT.cxx:262
 MethodDT.cxx:263
 MethodDT.cxx:264
 MethodDT.cxx:265
 MethodDT.cxx:266
 MethodDT.cxx:267
 MethodDT.cxx:268
 MethodDT.cxx:269
 MethodDT.cxx:270
 MethodDT.cxx:271
 MethodDT.cxx:272
 MethodDT.cxx:273
 MethodDT.cxx:274
 MethodDT.cxx:275
 MethodDT.cxx:276
 MethodDT.cxx:277
 MethodDT.cxx:278
 MethodDT.cxx:279
 MethodDT.cxx:280
 MethodDT.cxx:281
 MethodDT.cxx:282
 MethodDT.cxx:283
 MethodDT.cxx:284
 MethodDT.cxx:285
 MethodDT.cxx:286
 MethodDT.cxx:287
 MethodDT.cxx:288
 MethodDT.cxx:289
 MethodDT.cxx:290
 MethodDT.cxx:291
 MethodDT.cxx:292
 MethodDT.cxx:293
 MethodDT.cxx:294
 MethodDT.cxx:295
 MethodDT.cxx:296
 MethodDT.cxx:297
 MethodDT.cxx:298
 MethodDT.cxx:299
 MethodDT.cxx:300
 MethodDT.cxx:301
 MethodDT.cxx:302
 MethodDT.cxx:303
 MethodDT.cxx:304
 MethodDT.cxx:305
 MethodDT.cxx:306
 MethodDT.cxx:307
 MethodDT.cxx:308
 MethodDT.cxx:309
 MethodDT.cxx:310
 MethodDT.cxx:311
 MethodDT.cxx:312
 MethodDT.cxx:313
 MethodDT.cxx:314
 MethodDT.cxx:315
 MethodDT.cxx:316
 MethodDT.cxx:317
 MethodDT.cxx:318
 MethodDT.cxx:319
 MethodDT.cxx:320
 MethodDT.cxx:321
 MethodDT.cxx:322
 MethodDT.cxx:323
 MethodDT.cxx:324
 MethodDT.cxx:325
 MethodDT.cxx:326
 MethodDT.cxx:327
 MethodDT.cxx:328
 MethodDT.cxx:329
 MethodDT.cxx:330
 MethodDT.cxx:331
 MethodDT.cxx:332
 MethodDT.cxx:333
 MethodDT.cxx:334
 MethodDT.cxx:335
 MethodDT.cxx:336
 MethodDT.cxx:337
 MethodDT.cxx:338
 MethodDT.cxx:339
 MethodDT.cxx:340
 MethodDT.cxx:341
 MethodDT.cxx:342
 MethodDT.cxx:343
 MethodDT.cxx:344
 MethodDT.cxx:345
 MethodDT.cxx:346
 MethodDT.cxx:347
 MethodDT.cxx:348
 MethodDT.cxx:349
 MethodDT.cxx:350
 MethodDT.cxx:351
 MethodDT.cxx:352
 MethodDT.cxx:353
 MethodDT.cxx:354
 MethodDT.cxx:355
 MethodDT.cxx:356
 MethodDT.cxx:357
 MethodDT.cxx:358
 MethodDT.cxx:359
 MethodDT.cxx:360
 MethodDT.cxx:361
 MethodDT.cxx:362
 MethodDT.cxx:363
 MethodDT.cxx:364
 MethodDT.cxx:365
 MethodDT.cxx:366
 MethodDT.cxx:367
 MethodDT.cxx:368
 MethodDT.cxx:369
 MethodDT.cxx:370
 MethodDT.cxx:371
 MethodDT.cxx:372
 MethodDT.cxx:373
 MethodDT.cxx:374
 MethodDT.cxx:375
 MethodDT.cxx:376
 MethodDT.cxx:377
 MethodDT.cxx:378
 MethodDT.cxx:379
 MethodDT.cxx:380
 MethodDT.cxx:381
 MethodDT.cxx:382
 MethodDT.cxx:383
 MethodDT.cxx:384
 MethodDT.cxx:385
 MethodDT.cxx:386
 MethodDT.cxx:387
 MethodDT.cxx:388
 MethodDT.cxx:389
 MethodDT.cxx:390
 MethodDT.cxx:391
 MethodDT.cxx:392
 MethodDT.cxx:393
 MethodDT.cxx:394
 MethodDT.cxx:395
 MethodDT.cxx:396
 MethodDT.cxx:397
 MethodDT.cxx:398
 MethodDT.cxx:399
 MethodDT.cxx:400
 MethodDT.cxx:401
 MethodDT.cxx:402
 MethodDT.cxx:403
 MethodDT.cxx:404
 MethodDT.cxx:405
 MethodDT.cxx:406
 MethodDT.cxx:407
 MethodDT.cxx:408
 MethodDT.cxx:409
 MethodDT.cxx:410
 MethodDT.cxx:411
 MethodDT.cxx:412
 MethodDT.cxx:413
 MethodDT.cxx:414
 MethodDT.cxx:415
 MethodDT.cxx:416
 MethodDT.cxx:417
 MethodDT.cxx:418
 MethodDT.cxx:419
 MethodDT.cxx:420
 MethodDT.cxx:421
 MethodDT.cxx:422
 MethodDT.cxx:423
 MethodDT.cxx:424
 MethodDT.cxx:425
 MethodDT.cxx:426
 MethodDT.cxx:427
 MethodDT.cxx:428
 MethodDT.cxx:429
 MethodDT.cxx:430
 MethodDT.cxx:431
 MethodDT.cxx:432
 MethodDT.cxx:433
 MethodDT.cxx:434
 MethodDT.cxx:435
 MethodDT.cxx:436
 MethodDT.cxx:437
 MethodDT.cxx:438
 MethodDT.cxx:439
 MethodDT.cxx:440
 MethodDT.cxx:441
 MethodDT.cxx:442
 MethodDT.cxx:443
 MethodDT.cxx:444
 MethodDT.cxx:445
 MethodDT.cxx:446
 MethodDT.cxx:447
 MethodDT.cxx:448
 MethodDT.cxx:449
 MethodDT.cxx:450
 MethodDT.cxx:451
 MethodDT.cxx:452
 MethodDT.cxx:453
 MethodDT.cxx:454
 MethodDT.cxx:455
 MethodDT.cxx:456
 MethodDT.cxx:457
 MethodDT.cxx:458
 MethodDT.cxx:459
 MethodDT.cxx:460
 MethodDT.cxx:461
 MethodDT.cxx:462
 MethodDT.cxx:463
 MethodDT.cxx:464
 MethodDT.cxx:465
 MethodDT.cxx:466
 MethodDT.cxx:467
 MethodDT.cxx:468
 MethodDT.cxx:469
 MethodDT.cxx:470
 MethodDT.cxx:471
 MethodDT.cxx:472
 MethodDT.cxx:473
 MethodDT.cxx:474
 MethodDT.cxx:475
 MethodDT.cxx:476
 MethodDT.cxx:477
 MethodDT.cxx:478
 MethodDT.cxx:479
 MethodDT.cxx:480
 MethodDT.cxx:481
 MethodDT.cxx:482
 MethodDT.cxx:483
 MethodDT.cxx:484
 MethodDT.cxx:485
 MethodDT.cxx:486
 MethodDT.cxx:487
 MethodDT.cxx:488
 MethodDT.cxx:489
 MethodDT.cxx:490
 MethodDT.cxx:491
 MethodDT.cxx:492
 MethodDT.cxx:493
 MethodDT.cxx:494
 MethodDT.cxx:495
 MethodDT.cxx:496
 MethodDT.cxx:497
 MethodDT.cxx:498
 MethodDT.cxx:499
 MethodDT.cxx:500
 MethodDT.cxx:501
 MethodDT.cxx:502
 MethodDT.cxx:503
 MethodDT.cxx:504
 MethodDT.cxx:505
 MethodDT.cxx:506
 MethodDT.cxx:507
 MethodDT.cxx:508
 MethodDT.cxx:509
 MethodDT.cxx:510
 MethodDT.cxx:511
 MethodDT.cxx:512
 MethodDT.cxx:513
 MethodDT.cxx:514
 MethodDT.cxx:515
 MethodDT.cxx:516
 MethodDT.cxx:517
 MethodDT.cxx:518
 MethodDT.cxx:519
 MethodDT.cxx:520
 MethodDT.cxx:521
 MethodDT.cxx:522
 MethodDT.cxx:523
 MethodDT.cxx:524
 MethodDT.cxx:525
 MethodDT.cxx:526
 MethodDT.cxx:527
 MethodDT.cxx:528
 MethodDT.cxx:529
 MethodDT.cxx:530
 MethodDT.cxx:531
 MethodDT.cxx:532
 MethodDT.cxx:533
 MethodDT.cxx:534
 MethodDT.cxx:535
 MethodDT.cxx:536
 MethodDT.cxx:537
 MethodDT.cxx:538
 MethodDT.cxx:539
 MethodDT.cxx:540
 MethodDT.cxx:541
 MethodDT.cxx:542
 MethodDT.cxx:543
 MethodDT.cxx:544
 MethodDT.cxx:545
 MethodDT.cxx:546
 MethodDT.cxx:547
 MethodDT.cxx:548