Logo ROOT   6.12/07
Reference Guide
CostComplexityPruneTool.cxx
Go to the documentation of this file.
1 /**********************************************************************************
2  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
3  * Package: TMVA *
4  * Class : TMVA::DecisionTree *
5  * Web : http://tmva.sourceforge.net *
6  * *
7  * Description: *
8  * Implementation of a Decision Tree *
9  * *
10  * Authors (alphabetical): *
11  * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
12  * Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
13  * Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
14  * Doug Schouten <dschoute@sfu.ca> - Simon Fraser U., Canada *
15  * *
16  * Copyright (c) 2005: *
17  * CERN, Switzerland *
18  * U. of Victoria, Canada *
19  * MPI-K Heidelberg, Germany *
20  * *
21  * Redistribution and use in source and binary forms, with or without *
22  * modification, are permitted according to the terms listed in LICENSE *
23  * (http://mva.sourceforge.net/license.txt) *
24  * *
25  **********************************************************************************/
26 
27 /*! \class TMVA::CostComplexityPruneTool
28 \ingroup TMVA
29 A class to prune a decision tree using the Cost Complexity method.
30 (see "Classification and Regression Trees" by Leo Breiman et al)
31 
32 ### Some definitions:
33 
34  - \f$ T_{max} \f$ - the initial, usually highly overtrained tree, that is to be pruned back
35  - \f$ R(T) \f$ - quality index (Gini, misclassification rate, or other) of a tree \f$ T \f$
36  - \f$ \sim T \f$ - set of terminal nodes in \f$ T \f$
37  - \f$ T' \f$ - the pruned subtree of \f$ T_max \f$ that has the best quality index \f$ R(T') \f$
38  - \f$ \alpha \f$ - the prune strength parameter in Cost Complexity pruning \f$ (R_{\alpha}(T) = R(T) + \alpha*|\sim T|) \f$
39 
40 There are two running modes in CCPruner: (i) one may select a prune strength and prune back
41 the tree \f$ T_{max}\f$ until the criterion:
42 \f[
43  \alpha < \frac{R(T) - R(t)}{|\sim T_t| - 1}
44 \f]
45 
46 is true for all nodes t in \f$ T \f$, or (ii) the algorithm finds the sequence of critical points
47 \f$ \alpha_k < \alpha_{k+1} ... < \alpha_K \f$ such that \f$ T_K = root(T_{max}) \f$ and then selects the optimally-pruned
48 subtree, defined to be the subtree with the best quality index for the validation sample.
49 */
50 
52 
53 #include "TMVA/MsgLogger.h"
54 #include "TMVA/SeparationBase.h"
55 #include "TMVA/DecisionTree.h"
56 
57 #include "RtypesCore.h"
58 
59 #include <fstream>
60 #include <limits>
61 #include <math.h>
62 
63 using namespace TMVA;
64 
65 
66 ////////////////////////////////////////////////////////////////////////////////
67 /// the constructor for the cost complexity pruning
68 
70  IPruneTool(),
71  fLogger(new MsgLogger("CostComplexityPruneTool") )
72 {
73  fOptimalK = -1;
74 
75  // !! changed from Dougs code. Now use the QualityIndex stored already
76  // in the nodes when no "new" QualityIndex calculator is given. Like this
77  // I can easily implement the Regression. For Regression, the pruning uses the
78  // same separation index as in the tree building, hence doesn't need to re-calculate
79  // (which would need more info than simply "s" and "b")
80 
81  fQualityIndexTool = qualityIndex;
82 
83  //fLogger->SetMinType( kDEBUG );
84  fLogger->SetMinType( kWARNING );
85 }
86 
87 ////////////////////////////////////////////////////////////////////////////////
88 /// the destructor for the cost complexity pruning
89 
91  if(fQualityIndexTool != NULL) delete fQualityIndexTool;
92 }
93 
94 ////////////////////////////////////////////////////////////////////////////////
95 /// the routine that basically "steers" the pruning process. Call the calculation of
96 /// the pruning sequence, the tree quality and alike..
97 
100  const IPruneTool::EventSample* validationSample,
101  Bool_t isAutomatic )
102 {
103  if( isAutomatic ) SetAutomatic();
104 
105  if( dt == NULL || (IsAutomatic() && validationSample == NULL) ) {
106  // must have a valid decision tree to prune, and if the prune strength
107  // is to be chosen automatically, must have a test sample from
108  // which to calculate the quality of the pruned tree(s)
109  return NULL;
110  }
111 
112  Double_t Q = -1.0;
113  Double_t W = 1.0;
114 
115  if(IsAutomatic()) {
116  // run the pruning validation sample through the unpruned tree
117  dt->ApplyValidationSample(validationSample);
118  W = dt->GetSumWeights(validationSample); // get the sum of weights in the pruning validation sample
119  // calculate the quality of the tree in the unpruned case
120  Q = dt->TestPrunedTreeQuality();
121 
122  Log() << kDEBUG << "Node purity limit is: " << dt->GetNodePurityLimit() << Endl;
123  Log() << kDEBUG << "Sum of weights in pruning validation sample: " << W << Endl;
124  Log() << kDEBUG << "Quality of tree prior to any pruning is " << Q/W << Endl;
125  }
126 
127  // store the cost complexity metadata for the decision tree at each node
128  try {
130  }
131  catch(std::string error) {
132  Log() << kERROR << "Couldn't initialize the tree meta data because of error ("
133  << error << ")" << Endl;
134  return NULL;
135  }
136 
137  Log() << kDEBUG << "Automatic cost complexity pruning is " << (IsAutomatic()?"on":"off") << "." << Endl;
138 
139  try {
140  Optimize( dt, W ); // run the cost complexity pruning algorithm
141  }
142  catch(std::string error) {
143  Log() << kERROR << "Error optimizing pruning sequence ("
144  << error << ")" << Endl;
145  return NULL;
146  }
147 
148  Log() << kDEBUG << "Index of pruning sequence to stop at: " << fOptimalK << Endl;
149 
150  PruningInfo* info = new PruningInfo();
151 
152 
153  if(fOptimalK < 0) {
154  // no pruning necessary, or wasn't able to compute a sequence
155  info->PruneStrength = 0;
156  info->QualityIndex = Q/W;
157  info->PruneSequence.clear();
158  Log() << kINFO << "no proper pruning could be calculated. Tree "
159  << dt->GetTreeID() << " will not be pruned. Do not worry if this "
160  << " happens for a few trees " << Endl;
161  return info;
162  }
164  Log() << kDEBUG << " prune until k=" << fOptimalK << " with alpha="<<fPruneStrengthList[fOptimalK]<< Endl;
165  for( Int_t i = 0; i < fOptimalK; i++ ){
166  info->PruneSequence.push_back(fPruneSequence[i]);
167  }
168  if( IsAutomatic() ){
170  }
171  else {
173  }
174 
175  return info;
176 }
177 
178 ////////////////////////////////////////////////////////////////////////////////
179 /// initialise "meta data" for the pruning, like the "costcomplexity", the
180 /// critical alpha, the minimal alpha down the tree, etc... for each node!!
181 
183  if( n == NULL ) return;
184 
185  Double_t s = n->GetNSigEvents();
186  Double_t b = n->GetNBkgEvents();
187  // set R(t) = N_events*Gini(t) or MisclassificationError(t), etc.
189  else n->SetNodeR( (s+b)*n->GetSeparationIndex() );
190 
191  if(n->GetLeft() != NULL && n->GetRight() != NULL) { // n is an interior (non-leaf) node
192  n->SetTerminal(kFALSE);
193  // traverse the tree
196  // set |~T_t|
197  n->SetNTerminal( n->GetLeft()->GetNTerminal() +
198  n->GetRight()->GetNTerminal());
199  // set R(T) = sum[n' in ~T]{ R(n') }
200  n->SetSubTreeR( (n->GetLeft()->GetSubTreeR() +
201  n->GetRight()->GetSubTreeR()));
202  // set alpha_c, the alpha value at which it becomes advantageous to prune at node n
203  n->SetAlpha( ((n->GetNodeR() - n->GetSubTreeR()) /
204  (n->GetNTerminal() - 1)));
205 
206  // G(t) = min( alpha_c, G(l(n)), G(r(n)) )
207  // the minimum alpha in subtree rooted at this node
208  n->SetAlphaMinSubtree( std::min(n->GetAlpha(), std::min(n->GetLeft()->GetAlphaMinSubtree(),
209  n->GetRight()->GetAlphaMinSubtree())));
210  n->SetCC(n->GetAlpha());
211 
212  } else { // n is a terminal node
213  n->SetNTerminal( 1 ); n->SetTerminal( );
215  else n->SetSubTreeR( (s+b)*n->GetSeparationIndex() );
216  n->SetAlpha(std::numeric_limits<double>::infinity( ));
217  n->SetAlphaMinSubtree(std::numeric_limits<double>::infinity( ));
218  n->SetCC(n->GetAlpha());
219  }
220 
221  // DecisionTreeNode* R = (DecisionTreeNode*)mdt->GetRoot();
222  // Double_t x = R->GetAlphaMinSubtree();
223  // Log() << "alphaMin(Root) = " << x << Endl;
224 }
225 
226 
227 ////////////////////////////////////////////////////////////////////////////////
228 /// after the critical \f$ \alpha \f$ values (at which the corresponding nodes would
229 /// be pruned away) had been established in the "InitMetaData" we need now:
230 /// automatic pruning:
231 ///
232 /// find the value of \f$ \alpha \f$ for which the test sample gives minimal error,
233 /// on the tree with all nodes pruned that have \f$ \alpha_{critical} < \alpha \f$,
234 /// fixed parameter pruning
235 ///
236 
238  Int_t k = 1;
239  Double_t alpha = -1.0e10;
241 
242  fQualityIndexList.clear();
243  fPruneSequence.clear();
244  fPruneStrengthList.clear();
245 
247 
248  Double_t qmin = 0.0;
249  if(IsAutomatic()){
250  // initialize the tree quality (actually at this stage, it is the quality of the yet unpruned tree
251  qmin = dt->TestPrunedTreeQuality()/weights;
252  }
253 
254  // now prune the tree in steps until it is gone. At each pruning step, the pruning
255  // takes place at the node that is regarded as the "weakest link".
256  // for automatic pruning, at each step, we calculate the current quality of the
257  // tree and in the end we will prune at the minimum of the tree quality
258  // for the fixed parameter pruning, the cut is simply set at a relative position
259  // in the sequence according to the "length" of the sequence of pruned trees.
260  // 100: at the end (pruned until the root node would be the next pruning candidate
261  // 50: in the middle of the sequence
262  // etc...
263  while(R->GetNTerminal() > 1) { // prune upwards to the root node
264 
265  // initialize alpha
266  alpha = TMath::Max(R->GetAlphaMinSubtree(), alpha);
267 
268  if( R->GetAlphaMinSubtree() >= R->GetAlpha() ) {
269  Log() << kDEBUG << "\nCaught trying to prune the root node!" << Endl;
270  break;
271  }
272 
273 
274  DecisionTreeNode* t = R;
275 
276  // descend to the weakest link
277  while(t->GetAlphaMinSubtree() < t->GetAlpha()) {
278  // std::cout << t->GetAlphaMinSubtree() << " " << t->GetAlpha()<< " "
279  // << t->GetAlphaMinSubtree()- t->GetAlpha()<< " t==R?" << int(t == R) << std::endl;
280  // while( (t->GetAlphaMinSubtree() - t->GetAlpha()) < epsilon) {
281  // if(TMath::Abs(t->GetAlphaMinSubtree() - t->GetLeft()->GetAlphaMinSubtree())/TMath::Abs(t->GetAlphaMinSubtree()) < epsilon) {
282  if(TMath::Abs(t->GetAlphaMinSubtree() - t->GetLeft()->GetAlphaMinSubtree()) < epsilon) {
283  t = t->GetLeft();
284  } else {
285  t = t->GetRight();
286  }
287  }
288 
289  if( t == R ) {
290  Log() << kDEBUG << "\nCaught trying to prune the root node!" << Endl;
291  break;
292  }
293 
294  DecisionTreeNode* n = t;
295 
296  // Log() << kDEBUG << "alpha[" << k << "]: " << alpha << Endl;
297  // Log() << kDEBUG << "===========================" << Endl
298  // << "Pruning branch listed below the node" << Endl;
299  // t->Print( Log() );
300  // Log() << kDEBUG << "===========================" << Endl;
301  // t->PrintRecPrune( Log() );
302 
303  dt->PruneNodeInPlace(t); // prune the branch rooted at node t
304 
305  while(t != R) { // go back up the (pruned) tree and recalculate R(T), alpha_c
306  t = t->GetParent();
307  t->SetNTerminal(t->GetLeft()->GetNTerminal() + t->GetRight()->GetNTerminal());
308  t->SetSubTreeR(t->GetLeft()->GetSubTreeR() + t->GetRight()->GetSubTreeR());
309  t->SetAlpha((t->GetNodeR() - t->GetSubTreeR())/(t->GetNTerminal() - 1));
310  t->SetAlphaMinSubtree(std::min(t->GetAlpha(), std::min(t->GetLeft()->GetAlphaMinSubtree(),
311  t->GetRight()->GetAlphaMinSubtree())));
312  t->SetCC(t->GetAlpha());
313  }
314  k += 1;
315 
316  Log() << kDEBUG << "after this pruning step I would have " << R->GetNTerminal() << " remaining terminal nodes " << Endl;
317 
318  if(IsAutomatic()) {
319  Double_t q = dt->TestPrunedTreeQuality()/weights;
320  fQualityIndexList.push_back(q);
321  }
322  else {
323  fQualityIndexList.push_back(1.0);
324  }
325  fPruneSequence.push_back(n);
326  fPruneStrengthList.push_back(alpha);
327  }
328 
329  if(fPruneSequence.empty()) {
330  fOptimalK = -1;
331  return;
332  }
333 
334  if(IsAutomatic()) {
335  k = -1;
336  for(UInt_t i = 0; i < fQualityIndexList.size(); i++) {
337  if(fQualityIndexList[i] < qmin) {
338  qmin = fQualityIndexList[i];
339  k = i;
340  }
341  }
342  fOptimalK = k;
343  }
344  else {
345  // regularize the prune strength relative to this tree
346  fOptimalK = int(fPruneStrength/100.0 * fPruneSequence.size() );
347  Log() << kDEBUG << "SequenzeSize="<<fPruneSequence.size()
348  << " fOptimalK " << fOptimalK << Endl;
349 
350  }
351 
352  Log() << kDEBUG << "\n************ Summary for Tree " << dt->GetTreeID() << " *******" << Endl
353  << "Number of trees in the sequence: " << fPruneSequence.size() << Endl;
354 
355  Log() << kDEBUG << "Pruning strength parameters: [";
356  for(UInt_t i = 0; i < fPruneStrengthList.size()-1; i++)
357  Log() << kDEBUG << fPruneStrengthList[i] << ", ";
358  Log() << kDEBUG << fPruneStrengthList[fPruneStrengthList.size()-1] << "]" << Endl;
359 
360  Log() << kDEBUG << "Misclassification rates: [";
361  for(UInt_t i = 0; i < fQualityIndexList.size()-1; i++)
362  Log() << kDEBUG << fQualityIndexList[i] << ", ";
363  Log() << kDEBUG << fQualityIndexList[fQualityIndexList.size()-1] << "]" << Endl;
364 
365  Log() << kDEBUG << "Prune index: " << fOptimalK+1 << Endl;
366 
367 }
368 
Double_t PruneStrength
quality measure for a pruned subtree T of T_max
Definition: IPruneTool.h:46
Int_t fOptimalK
map of R(T) -> pruning index
virtual ~CostComplexityPruneTool()
the destructor for the cost complexity pruning
MsgLogger & Endl(MsgLogger &ml)
Definition: MsgLogger.h:158
Double_t fPruneStrength
Definition: IPruneTool.h:101
Double_t GetNodePurityLimit() const
Definition: DecisionTree.h:156
CostComplexityPruneTool(SeparationBase *qualityIndex=NULL)
the constructor for the cost complexity pruning
virtual DecisionTreeNode * GetParent() const
int Int_t
Definition: RtypesCore.h:41
std::vector< DecisionTreeNode * > PruneSequence
the regularization parameter for pruning
Definition: IPruneTool.h:47
bool Bool_t
Definition: RtypesCore.h:59
Double_t TestPrunedTreeQuality(const DecisionTreeNode *dt=NULL, Int_t mode=0) const
return the misclassification rate of a pruned tree a "pruned tree" may have set the variable "IsTermi...
Float_t GetNSigEvents(void) const
virtual DecisionTreeNode * GetRoot() const
Definition: DecisionTree.h:88
void SetAutomatic()
Definition: IPruneTool.h:94
Short_t Abs(Short_t d)
Definition: TMathBase.h:108
Bool_t IsAutomatic() const
Definition: IPruneTool.h:95
Double_t GetSubTreeR() const
Float_t GetSeparationIndex(void) const
Float_t GetNBkgEvents(void) const
Double_t GetNodeR() const
Double_t GetSumWeights(const EventConstList *validationSample) const
calculate the normalization factor for a pruning validation sample
void SetNodeR(Double_t r)
void SetMinType(EMsgType minType)
Definition: MsgLogger.h:72
std::vector< const Event * > EventSample
Definition: IPruneTool.h:74
void Optimize(DecisionTree *dt, Double_t weights)
after the critical values (at which the corresponding nodes would be pruned away) had been establish...
void SetSubTreeR(Double_t r)
void SetAlpha(Double_t alpha)
Implementation of a Decision Tree.
Definition: DecisionTree.h:59
unsigned int UInt_t
Definition: RtypesCore.h:42
An interface to calculate the "SeparationGain" for different separation criteria used in various trai...
void PruneNodeInPlace(TMVA::DecisionTreeNode *node)
prune a node temporarily (without actually deleting its descendants which allows testing the pruned t...
void ApplyValidationSample(const EventConstList *validationSample) const
run the validation sample through the (pruned) tree and fill in the nodes the variables NSValidation ...
REAL epsilon
Definition: triangle.c:617
virtual Double_t GetSeparationIndex(const Double_t s, const Double_t b)=0
const Bool_t kFALSE
Definition: RtypesCore.h:88
Double_t GetAlphaMinSubtree() const
double Double_t
Definition: RtypesCore.h:55
void SetAlphaMinSubtree(Double_t g)
IPruneTool - a helper interface class to prune a decision tree.
Definition: IPruneTool.h:70
static constexpr double s
void SetTerminal(Bool_t s=kTRUE)
std::vector< Double_t > fPruneStrengthList
map of weakest links (i.e., branches to prune) -> pruning index
std::vector< Double_t > fQualityIndexList
map of alpha -> pruning index
ostringstream derivative to redirect and format output
Definition: MsgLogger.h:59
std::vector< DecisionTreeNode * > fPruneSequence
the quality index used to calculate R(t), R(T) = sum[t in ~T]{ R(t) }
Abstract ClassifierFactory template that handles arbitrary types.
void InitTreePruningMetaData(DecisionTreeNode *n)
the optimal index of the prune sequence
Short_t Max(Short_t a, Short_t b)
Definition: TMathBase.h:200
you should not use this method at all Int_t Int_t Double_t Double_t Double_t Int_t Double_t Double_t Double_t Double_t b
Definition: TRolke.cxx:630
virtual DecisionTreeNode * GetLeft() const
Double_t QualityIndex
Definition: IPruneTool.h:45
virtual DecisionTreeNode * GetRight() const
Double_t GetAlpha() const
float * q
Definition: THbookFile.cxx:87
constexpr Double_t R()
Definition: TMath.h:213
const Int_t n
Definition: legend1.C:16
static double Q[]
virtual PruningInfo * CalculatePruningInfo(DecisionTree *dt, const IPruneTool::EventSample *testEvents=NULL, Bool_t isAutomatic=kFALSE)
the routine that basically "steers" the pruning process.
MsgLogger & Log() const
output stream to save logging information