Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
MethodPDEFoam.cxx
Go to the documentation of this file.
1// @(#)root/tmva $Id$
2// Author: Tancredi Carli, Dominik Dannheim, Alexander Voigt
3
4/**********************************************************************************
5 * Project: TMVA - a Root-integrated toolkit for multivariate Data analysis *
6 * Package: TMVA *
7 * Class : MethodPDEFoam *
8 * *
9 * *
10 * Description: *
11 * Implementation (see header for description) *
12 * *
13 * Authors (alphabetical): *
14 * Tancredi Carli - CERN, Switzerland *
15 * Dominik Dannheim - CERN, Switzerland *
16 * Alexander Voigt - TU Dresden, Germany *
17 * Peter Speckmayer - CERN, Switzerland *
18 * *
19 * Original author of the TFoam implementation: *
20 * S. Jadach - Institute of Nuclear Physics, Cracow, Poland *
21 * *
22 * Copyright (c) 2008, 2010: *
23 * CERN, Switzerland *
24 * MPI-K Heidelberg, Germany *
25 * *
26 * Redistribution and use in source and binary forms, with or without *
27 * modification, are permitted according to the terms listed in LICENSE *
28 * (see tmva/doc/LICENSE) *
29 **********************************************************************************/
30
31/*! \class TMVA::MethodPDEFoam
32\ingroup TMVA
33
34The PDEFoam method is an extension of the PDERS method, which
35divides the multi-dimensional phase space in a finite number of
36hyper-rectangles (cells) of constant event density. This "foam" of
37cells is filled with averaged probability-density information
38sampled from a training event sample.
39
40For a given number of cells, the binning algorithm adjusts the size
41and position of the cells inside the multidimensional phase space
42based on a binary-split algorithm, minimizing the variance of the
43event density in the cell.
44The binned event density information of the final foam is stored in
45binary trees, allowing for a fast and memory-efficient
46classification of events.
47
48The implementation of PDEFoam is based on the Monte-Carlo
49integration package TFoam included in the analysis package ROOT.
50*/
51
52#include "TMVA/MethodPDEFoam.h"
53
55#include "TMVA/Config.h"
56#include "TMVA/Configurable.h"
57#include "TMVA/CrossEntropy.h"
58#include "TMVA/DataSet.h"
59#include "TMVA/DataSetInfo.h"
60#include "TMVA/Event.h"
61#include "TMVA/GiniIndex.h"
63#include "TMVA/IMethod.h"
65#include "TMVA/MethodBase.h"
66#include "TMVA/MsgLogger.h"
67#include "TMVA/Ranking.h"
68#include "TMVA/SdivSqrtSplusB.h"
69#include "TMVA/SeparationBase.h"
70#include "TMVA/Tools.h"
71#include "TMVA/Types.h"
72#include "TMVA/VariableInfo.h"
73
74#include "TMath.h"
75#include "TH1F.h"
76#include "TFile.h"
77
78REGISTER_METHOD(PDEFoam)
79
80
81////////////////////////////////////////////////////////////////////////////////
82/// init PDEFoam objects
83
85 const TString& methodTitle,
88 MethodBase( jobName, Types::kPDEFoam, methodTitle, dsi, theOption)
89 , fSigBgSeparated(kFALSE)
90 , fFrac(0.001)
91 , fDiscrErrCut(-1.0)
92 , fVolFrac(1.0/15.0)
93 , fnCells(999)
94 , fnActiveCells(500)
95 , fnSampl(2000)
96 , fnBin(5)
97 , fEvPerBin(10000)
98 , fCompress(kTRUE)
99 , fMultiTargetRegression(kFALSE)
100 , fNmin(100)
101 , fCutNmin(kTRUE)
102 , fMaxDepth(0)
103 , fKernelStr("None")
104 , fKernel(kNone)
105 , fKernelEstimator(NULL)
106 , fTargetSelectionStr("Mean")
107 , fTargetSelection(kMean)
108 , fFillFoamWithOrigWeights(kFALSE)
109 , fUseYesNoCell(kFALSE)
110 , fDTLogic("None")
111 , fDTSeparation(kFoam)
112 , fPeekMax(kTRUE)
113 , fXmin()
114 , fXmax()
115 , fFoam()
116{
117}
118
119////////////////////////////////////////////////////////////////////////////////
120/// constructor from weight file
121
123 const TString& theWeightFile) :
124 MethodBase( Types::kPDEFoam, dsi, theWeightFile)
125 , fSigBgSeparated(kFALSE)
126 , fFrac(0.001)
127 , fDiscrErrCut(-1.0)
128 , fVolFrac(1.0/15.0)
129 , fnCells(999)
130 , fnActiveCells(500)
131 , fnSampl(2000)
132 , fnBin(5)
133 , fEvPerBin(10000)
134 , fCompress(kTRUE)
135 , fMultiTargetRegression(kFALSE)
136 , fNmin(100)
137 , fCutNmin(kTRUE)
138 , fMaxDepth(0)
139 , fKernelStr("None")
140 , fKernel(kNone)
141 , fKernelEstimator(NULL)
142 , fTargetSelectionStr("Mean")
143 , fTargetSelection(kMean)
144 , fFillFoamWithOrigWeights(kFALSE)
145 , fUseYesNoCell(kFALSE)
146 , fDTLogic("None")
147 , fDTSeparation(kFoam)
148 , fPeekMax(kTRUE)
149 , fXmin()
150 , fXmax()
151 , fFoam()
152{
153}
154
155////////////////////////////////////////////////////////////////////////////////
156/// PDEFoam can handle classification with multiple classes and regression
157/// with one or more regression-targets
158
166
167////////////////////////////////////////////////////////////////////////////////
168/// default initialization called by all constructors
169
171{
172 // init PDEFoam options
173 fSigBgSeparated = kFALSE; // default: unified foam
174 fFrac = 0.001; // fraction of outlier events
175 fDiscrErrCut = -1.; // cut on discriminator error
176 fVolFrac = 1./15.; // range searching box size
177 fnActiveCells = 500; // number of active cells to create
178 fnCells = fnActiveCells*2-1; // total number of cells
179 fnSampl = 2000; // number of sampling points in cell
180 fnBin = 5; // number of bins in edge histogram
181 fEvPerBin = 10000; // number of events per bin
182 fNmin = 100; // minimum number of events in cell
183 fMaxDepth = 0; // cell tree depth (default: unlimited)
184 fFillFoamWithOrigWeights = kFALSE; // fill orig. weights into foam
185 fUseYesNoCell = kFALSE; // return -1 or 1 for bg or signal events
186 fDTLogic = "None"; // decision tree algorithmus
187 fDTSeparation = kFoam; // separation type
188
189 fKernel = kNone; // default: use no kernel
190 fKernelEstimator= NULL; // kernel estimator used during evaluation
191 fTargetSelection= kMean; // default: use mean for target selection (only multi target regression!)
192
193 fCompress = kTRUE; // compress ROOT output file
194 fMultiTargetRegression = kFALSE; // multi-target regression
195
196 DeleteFoams();
197
198 if (fUseYesNoCell)
199 SetSignalReferenceCut( 0.0 ); // MVA output in [-1, 1]
200 else
201 SetSignalReferenceCut( 0.5 ); // MVA output in [0, 1]
202}
203
204////////////////////////////////////////////////////////////////////////////////
205/// Declare MethodPDEFoam options
206
208{
209 DeclareOptionRef( fSigBgSeparated = kFALSE, "SigBgSeparate", "Separate foams for signal and background" );
210 DeclareOptionRef( fFrac = 0.001, "TailCut", "Fraction of outlier events that are excluded from the foam in each dimension" );
211 DeclareOptionRef( fVolFrac = 1./15., "VolFrac", "Size of sampling box, used for density calculation during foam build-up (maximum value: 1.0 is equivalent to volume of entire foam)");
212 DeclareOptionRef( fnActiveCells = 500, "nActiveCells", "Maximum number of active cells to be created by the foam");
213 DeclareOptionRef( fnSampl = 2000, "nSampl", "Number of generated MC events per cell");
214 DeclareOptionRef( fnBin = 5, "nBin", "Number of bins in edge histograms");
215 DeclareOptionRef( fCompress = kTRUE, "Compress", "Compress foam output file");
216 DeclareOptionRef( fMultiTargetRegression = kFALSE, "MultiTargetRegression", "Do regression with multiple targets");
217 DeclareOptionRef( fNmin = 100, "Nmin", "Number of events in cell required to split cell");
218 DeclareOptionRef( fMaxDepth = 0, "MaxDepth", "Maximum depth of cell tree (0=unlimited)");
219 DeclareOptionRef( fFillFoamWithOrigWeights = kFALSE, "FillFoamWithOrigWeights", "Fill foam with original or boost weights");
220 DeclareOptionRef( fUseYesNoCell = kFALSE, "UseYesNoCell", "Return -1 or 1 for bkg or signal like events");
221 DeclareOptionRef( fDTLogic = "None", "DTLogic", "Use decision tree algorithm to split cells");
222 AddPreDefVal(TString("None"));
223 AddPreDefVal(TString("GiniIndex"));
224 AddPreDefVal(TString("MisClassificationError"));
225 AddPreDefVal(TString("CrossEntropy"));
226 AddPreDefVal(TString("GiniIndexWithLaplace"));
227 AddPreDefVal(TString("SdivSqrtSplusB"));
228
229 DeclareOptionRef( fKernelStr = "None", "Kernel", "Kernel type used");
230 AddPreDefVal(TString("None"));
231 AddPreDefVal(TString("Gauss"));
232 AddPreDefVal(TString("LinNeighbors"));
233 DeclareOptionRef( fTargetSelectionStr = "Mean", "TargetSelection", "Target selection method");
234 AddPreDefVal(TString("Mean"));
235 AddPreDefVal(TString("Mpv"));
236}
237
238
239////////////////////////////////////////////////////////////////////////////////
240/// options that are used ONLY for the READER to ensure backward compatibility
241
244 DeclareOptionRef(fCutNmin = kTRUE, "CutNmin", "Requirement for minimal number of events in cell");
245 DeclareOptionRef(fPeekMax = kTRUE, "PeekMax", "Peek cell with max. loss for the next split");
246}
247
248////////////////////////////////////////////////////////////////////////////////
249/// process user options
250
252{
253 if (!(fFrac>=0. && fFrac<=1.)) {
254 Log() << kWARNING << "TailCut not in [0.,1] ==> using 0.001 instead" << Endl;
255 fFrac = 0.001;
256 }
257
258 if (fnActiveCells < 1) {
259 Log() << kWARNING << "invalid number of active cells specified: "
260 << fnActiveCells << "; setting nActiveCells=2" << Endl;
261 fnActiveCells = 2;
262 }
263 fnCells = fnActiveCells*2-1;
264
265 // DT logic is only applicable if a single foam is trained
266 if (fSigBgSeparated && fDTLogic != "None") {
267 Log() << kFATAL << "Decision tree logic works only for a single foam (SigBgSeparate=F)" << Endl;
268 }
269
270 // set separation to use
271 if (fDTLogic == "None")
272 fDTSeparation = kFoam;
273 else if (fDTLogic == "GiniIndex")
274 fDTSeparation = kGiniIndex;
275 else if (fDTLogic == "MisClassificationError")
276 fDTSeparation = kMisClassificationError;
277 else if (fDTLogic == "CrossEntropy")
278 fDTSeparation = kCrossEntropy;
279 else if (fDTLogic == "GiniIndexWithLaplace")
280 fDTSeparation = kGiniIndexWithLaplace;
281 else if (fDTLogic == "SdivSqrtSplusB")
282 fDTSeparation = kSdivSqrtSplusB;
283 else {
284 Log() << kWARNING << "Unknown separation type: " << fDTLogic
285 << ", setting to None" << Endl;
286 fDTLogic = "None";
287 fDTSeparation = kFoam;
288 }
289
290 if (fKernelStr == "None" ) fKernel = kNone;
291 else if (fKernelStr == "Gauss" ) fKernel = kGaus;
292 else if (fKernelStr == "LinNeighbors") fKernel = kLinN;
293
294 if (fTargetSelectionStr == "Mean" ) fTargetSelection = kMean;
295 else fTargetSelection = kMpv;
296 // sanity check: number of targets > 1 and MultiTargetRegression=F
297 // makes no sense --> set MultiTargetRegression=T
298 if (DoRegression() && Data()->GetNTargets() > 1 && !fMultiTargetRegression) {
299 Log() << kWARNING << "Warning: number of targets > 1"
300 << " and MultiTargetRegression=F was set, this makes no sense!"
301 << " --> I'm setting MultiTargetRegression=T" << Endl;
302 fMultiTargetRegression = kTRUE;
303 }
304}
305
306////////////////////////////////////////////////////////////////////////////////
307/// destructor
308
310{
311 DeleteFoams();
312
313 if (fKernelEstimator != NULL)
314 delete fKernelEstimator;
315}
316
317////////////////////////////////////////////////////////////////////////////////
318/// Determine foam range [fXmin, fXmax] for all dimensions, such
319/// that a fraction of 'fFrac' events lie outside the foam.
320
322{
323 fXmin.clear();
324 fXmax.clear();
325 UInt_t kDim = GetNvar(); // == Data()->GetNVariables();
326 UInt_t tDim = Data()->GetNTargets();
327 UInt_t vDim = Data()->GetNVariables();
328 if (fMultiTargetRegression)
329 kDim += tDim;
330
331 Float_t *xmin = new Float_t[kDim];
332 Float_t *xmax = new Float_t[kDim];
333
334 // set default values
335 for (UInt_t dim=0; dim<kDim; dim++) {
336 xmin[dim] = FLT_MAX;
337 xmax[dim] = FLT_MIN;
338 }
339
340 Log() << kDEBUG << "Number of training events: " << Data()->GetNTrainingEvents() << Endl;
341 Int_t nevoutside = (Int_t)((Data()->GetNTrainingEvents())*(fFrac)); // number of events that are outside the range
342 Int_t rangehistbins = 10000; // number of bins in histos
343
344 // loop over all testing signal and BG events and clac minimal and
345 // maximal value of every variable
346 for (Long64_t i=0; i<(GetNEvents()); i++) { // events loop
347 const Event* ev = GetEvent(i);
348 for (UInt_t dim=0; dim<kDim; dim++) { // variables loop
349 Float_t val;
350 if (fMultiTargetRegression) {
351 if (dim < vDim)
352 val = ev->GetValue(dim);
353 else
354 val = ev->GetTarget(dim-vDim);
355 }
356 else
357 val = ev->GetValue(dim);
358
359 if (val<xmin[dim])
360 xmin[dim] = val;
361 if (val>xmax[dim])
362 xmax[dim] = val;
363 }
364 }
365
366 // Create and fill histograms for each dimension (with same events
367 // as before), to determine range based on number of events outside
368 // the range
369 TH1F **range_h = new TH1F*[kDim];
370 for (UInt_t dim=0; dim<kDim; dim++) {
371 range_h[dim] = new TH1F(TString::Format("range%i", dim), "range", rangehistbins, xmin[dim], xmax[dim]);
372 }
373
374 // fill all testing events into histos
375 for (Long64_t i=0; i<GetNEvents(); i++) {
376 const Event* ev = GetEvent(i);
377 for (UInt_t dim=0; dim<kDim; dim++) {
378 if (fMultiTargetRegression) {
379 if (dim < vDim)
380 range_h[dim]->Fill(ev->GetValue(dim));
381 else
382 range_h[dim]->Fill(ev->GetTarget(dim-vDim));
383 }
384 else
385 range_h[dim]->Fill(ev->GetValue(dim));
386 }
387 }
388
389 // calc Xmin, Xmax from Histos
390 for (UInt_t dim=0; dim<kDim; dim++) {
391 for (Int_t i=1; i<(rangehistbins+1); i++) { // loop over bins
392 if (range_h[dim]->Integral(0, i) > nevoutside) { // calc left limit (integral over bins 0..i = nevoutside)
393 xmin[dim]=range_h[dim]->GetBinLowEdge(i);
394 break;
395 }
396 }
397 for (Int_t i=rangehistbins; i>0; i--) { // calc right limit (integral over bins i..max = nevoutside)
398 if (range_h[dim]->Integral(i, (rangehistbins+1)) > nevoutside) {
399 xmax[dim]=range_h[dim]->GetBinLowEdge(i+1);
400 break;
401 }
402 }
403 }
404 // now xmin[] and xmax[] contain upper/lower limits for every dimension
405
406 // copy xmin[], xmax[] values to the class variable
407 fXmin.clear();
408 fXmax.clear();
409 for (UInt_t dim=0; dim<kDim; dim++) {
410 fXmin.push_back(xmin[dim]);
411 fXmax.push_back(xmax[dim]);
412 }
413
414
415 delete[] xmin;
416 delete[] xmax;
417
418 // delete histos
419 for (UInt_t dim=0; dim<kDim; dim++)
420 delete range_h[dim];
421 delete[] range_h;
422
423 return;
424}
425
426////////////////////////////////////////////////////////////////////////////////
427/// Train PDE-Foam depending on the set options
428
430{
431 Log() << kVERBOSE << "Calculate Xmin and Xmax for every dimension" << Endl;
432 CalcXminXmax();
433
434 // delete foams
435 DeleteFoams();
436
437 // start training
438 if (DoRegression()) {
439 if (fMultiTargetRegression)
440 TrainMultiTargetRegression();
441 else
442 TrainMonoTargetRegression();
443 }
444 else {
445 if (DoMulticlass())
446 TrainMultiClassification();
447 else {
448 if (DataInfo().GetNormalization() != "EQUALNUMEVENTS" ) {
449 Log() << kHEADER << "NormMode=" << DataInfo().GetNormalization()
450 << " chosen. Note that only NormMode=EqualNumEvents"
451 << " ensures that Discriminant values correspond to"
452 << " signal probabilities." << Endl;
453 }
454
455 Log() << kDEBUG << "N_sig for training events: " << Data()->GetNEvtSigTrain() << Endl;
456 Log() << kDEBUG << "N_bg for training events: " << Data()->GetNEvtBkgdTrain() << Endl;
457 Log() << kDEBUG << "User normalization: " << DataInfo().GetNormalization().Data() << Endl;
458
459 if (fSigBgSeparated)
460 TrainSeparatedClassification();
461 else
462 TrainUnifiedClassification();
463 }
464 }
465
466 // delete the binary search tree in order to save memory
467 for(UInt_t i=0; i<fFoam.size(); i++) {
468 if(fFoam.at(i))
469 fFoam.at(i)->DeleteBinarySearchTree();
470 }
471 ExitFromTraining();
472}
473
474////////////////////////////////////////////////////////////////////////////////
475/// Creation of 2 separated foams: one for signal events, one for
476/// background events. At the end the foam cells of fFoam[0] will
477/// contain the average number of signal events and fFoam[1] will
478/// contain the average number of background events.
479
481{
483 foamcaption[0] = "SignalFoam";
484 foamcaption[1] = "BgFoam";
485
486 for(int i=0; i<2; i++) {
487 // create 2 PDEFoams
488 fFoam.push_back( InitFoam(foamcaption[i], kSeparate) );
489
490 Log() << kVERBOSE << "Filling binary search tree of " << foamcaption[i]
491 << " with events" << Endl;
492 // insert event to BinarySearchTree
493 for (Long64_t k=0; k<GetNEvents(); ++k) {
494 const Event* ev = GetEvent(k);
495 if ((i==0 && DataInfo().IsSignal(ev)) || (i==1 && !DataInfo().IsSignal(ev)))
496 if (!(IgnoreEventsWithNegWeightsInTraining() && ev->GetWeight()<=0))
497 fFoam.back()->FillBinarySearchTree(ev);
498 }
499
500 Log() << kINFO << "Build up " << foamcaption[i] << Endl;
501 fFoam.back()->Create(); // build foam
502
503 Log() << kVERBOSE << "Filling foam cells with events" << Endl;
504 // loop over all events -> fill foam cells
505 for (Long64_t k=0; k<GetNEvents(); ++k) {
506 const Event* ev = GetEvent(k);
507 Float_t weight = fFillFoamWithOrigWeights ? ev->GetOriginalWeight() : ev->GetWeight();
508 if ((i==0 && DataInfo().IsSignal(ev)) || (i==1 && !DataInfo().IsSignal(ev)))
509 if (!(IgnoreEventsWithNegWeightsInTraining() && ev->GetWeight()<=0))
510 fFoam.back()->FillFoamCells(ev, weight);
511 }
512 }
513}
514
515////////////////////////////////////////////////////////////////////////////////
516/// Create only one unified foam (fFoam[0]) whose cells contain the
517/// average discriminator (N_sig)/(N_sig + N_bg)
518
520{
521 fFoam.push_back( InitFoam("DiscrFoam", kDiscr, fSignalClass) );
522
523 Log() << kVERBOSE << "Filling binary search tree of discriminator foam with events" << Endl;
524 // insert event to BinarySearchTree
525 for (Long64_t k=0; k<GetNEvents(); ++k) {
526 const Event* ev = GetEvent(k);
527 if (!(IgnoreEventsWithNegWeightsInTraining() && ev->GetWeight()<=0))
528 fFoam.back()->FillBinarySearchTree(ev);
529 }
530
531 Log() << kINFO << "Build up discriminator foam" << Endl;
532 fFoam.back()->Create(); // build foam
533
534 Log() << kVERBOSE << "Filling foam cells with events" << Endl;
535 // loop over all training events -> fill foam cells with N_sig and N_Bg
536 for (Long64_t k=0; k<GetNEvents(); ++k) {
537 const Event* ev = GetEvent(k);
538 Float_t weight = fFillFoamWithOrigWeights ? ev->GetOriginalWeight() : ev->GetWeight();
539 if (!(IgnoreEventsWithNegWeightsInTraining() && ev->GetWeight()<=0))
540 fFoam.back()->FillFoamCells(ev, weight);
541 }
542
543 Log() << kVERBOSE << "Calculate cell discriminator"<< Endl;
544 // calc discriminator (and it's error) for each cell
545 fFoam.back()->Finalize();
546}
547
548////////////////////////////////////////////////////////////////////////////////
549/// Create one unified foam (see TrainUnifiedClassification()) for
550/// each class, where the cells of foam i (fFoam[i]) contain the
551/// average fraction of events of class i, i.e.
552///
553/// D = number events of class i / total number of events
554
556{
557 for (UInt_t iClass=0; iClass<DataInfo().GetNClasses(); ++iClass) {
558
559 fFoam.push_back( InitFoam(TString::Format("MultiClassFoam%u",iClass), kMultiClass, iClass) );
560
561 Log() << kVERBOSE << "Filling binary search tree of multiclass foam "
562 << iClass << " with events" << Endl;
563 // insert event to BinarySearchTree
564 for (Long64_t k=0; k<GetNEvents(); ++k) {
565 const Event* ev = GetEvent(k);
566 if (!(IgnoreEventsWithNegWeightsInTraining() && ev->GetWeight()<=0))
567 fFoam.back()->FillBinarySearchTree(ev);
568 }
569
570 Log() << kINFO << "Build up multiclass foam " << iClass << Endl;
571 fFoam.back()->Create(); // build foam
572
573 Log() << kVERBOSE << "Filling foam cells with events" << Endl;
574 // loop over all training events and fill foam cells with signal
575 // and background events
576 for (Long64_t k=0; k<GetNEvents(); ++k) {
577 const Event* ev = GetEvent(k);
578 Float_t weight = fFillFoamWithOrigWeights ? ev->GetOriginalWeight() : ev->GetWeight();
579 if (!(IgnoreEventsWithNegWeightsInTraining() && ev->GetWeight()<=0))
580 fFoam.back()->FillFoamCells(ev, weight);
581 }
582
583 Log() << kVERBOSE << "Calculate cell discriminator"<< Endl;
584 // calc discriminator (and it's error) for each cell
585 fFoam.back()->Finalize();
586 }
587}
588
589////////////////////////////////////////////////////////////////////////////////
590/// Training one (mono target regression) foam, whose cells contain
591/// the average 0th target. The dimension of the foam = number of
592/// non-targets (= number of variables).
593
595{
596 if (Data()->GetNTargets() != 1) {
597 Log() << kFATAL << "Can't do mono-target regression with "
598 << Data()->GetNTargets() << " targets!" << Endl;
599 }
600
601 Log() << kDEBUG << "MethodPDEFoam: number of Targets: " << Data()->GetNTargets() << Endl;
602
603 fFoam.push_back( InitFoam("MonoTargetRegressionFoam", kMonoTarget) );
604
605 Log() << kVERBOSE << "Filling binary search tree with events" << Endl;
606 // insert event to BinarySearchTree
607 for (Long64_t k=0; k<GetNEvents(); ++k) {
608 const Event* ev = GetEvent(k);
609 if (!(IgnoreEventsWithNegWeightsInTraining() && ev->GetWeight()<=0))
610 fFoam.back()->FillBinarySearchTree(ev);
611 }
612
613 Log() << kINFO << "Build mono target regression foam" << Endl;
614 fFoam.back()->Create(); // build foam
615
616 Log() << kVERBOSE << "Filling foam cells with events" << Endl;
617 // loop over all events -> fill foam cells with target
618 for (Long64_t k=0; k<GetNEvents(); ++k) {
619 const Event* ev = GetEvent(k);
620 Float_t weight = fFillFoamWithOrigWeights ? ev->GetOriginalWeight() : ev->GetWeight();
621 if (!(IgnoreEventsWithNegWeightsInTraining() && ev->GetWeight()<=0))
622 fFoam.back()->FillFoamCells(ev, weight);
623 }
624
625 Log() << kVERBOSE << "Calculate average cell targets"<< Endl;
626 // calc weight (and it's error) for each cell
627 fFoam.back()->Finalize();
628}
629
630////////////////////////////////////////////////////////////////////////////////
631/// Training one (multi target regression) foam, whose cells contain
632/// the average event density. The dimension of the foam = number
633/// of non-targets + number of targets.
634
636{
637 Log() << kDEBUG << "Number of variables: " << Data()->GetNVariables() << Endl;
638 Log() << kDEBUG << "Number of Targets: " << Data()->GetNTargets() << Endl;
639 Log() << kDEBUG << "Dimension of foam: " << Data()->GetNVariables()+Data()->GetNTargets() << Endl;
640 if (fKernel==kLinN)
641 Log() << kFATAL << "LinNeighbors kernel currently not supported"
642 << " for multi target regression" << Endl;
643
644 fFoam.push_back( InitFoam("MultiTargetRegressionFoam", kMultiTarget) );
645
646 Log() << kVERBOSE << "Filling binary search tree of multi target regression foam with events"
647 << Endl;
648 // insert event to BinarySearchTree
649 for (Long64_t k=0; k<GetNEvents(); ++k) {
650 Event *ev = new Event(*GetEvent(k));
651 // since in multi-target regression targets are handled like
652 // variables --> remove targets and add them to the event variabels
653 std::vector<Float_t> targets(ev->GetTargets());
654 const UInt_t nVariables = ev->GetValues().size();
655 for (UInt_t i = 0; i < targets.size(); ++i)
656 ev->SetVal(i+nVariables, targets.at(i));
657 ev->GetTargets().clear();
658 if (!(IgnoreEventsWithNegWeightsInTraining() && ev->GetWeight()<=0))
659 fFoam.back()->FillBinarySearchTree(ev);
660 // since the binary search tree copies the event, one can delete
661 // it
662 delete ev;
663 }
664
665 Log() << kINFO << "Build multi target regression foam" << Endl;
666 fFoam.back()->Create(); // build foam
667
668 Log() << kVERBOSE << "Filling foam cells with events" << Endl;
669 // loop over all events -> fill foam cells with number of events
670 for (Long64_t k=0; k<GetNEvents(); ++k) {
671 Event *ev = new Event(*GetEvent(k));
672 // since in multi-target regression targets are handled like
673 // variables --> remove targets and add them to the event variabels
674 std::vector<Float_t> targets = ev->GetTargets();
675 const UInt_t nVariables = ev->GetValues().size();
676 Float_t weight = fFillFoamWithOrigWeights ? ev->GetOriginalWeight() : ev->GetWeight();
677 for (UInt_t i = 0; i < targets.size(); ++i)
678 ev->SetVal(i+nVariables, targets.at(i));
679 ev->GetTargets().clear();
680 if (!(IgnoreEventsWithNegWeightsInTraining() && ev->GetWeight()<=0))
681 fFoam.back()->FillFoamCells(ev, weight);
682 // since the PDEFoam copies the event, one can delete it
683 delete ev;
684 }
685}
686
687////////////////////////////////////////////////////////////////////////////////
688/// Return Mva-Value.
689///
690/// In case of `fSigBgSeparated==false` (one unified PDEFoam was
691/// trained) the function returns the content of the cell, which
692/// corresponds to the current TMVA::Event, i.e. D =
693/// N_sig/(N_bg+N_sig).
694///
695/// In case of `fSigBgSeparated==true` (two separate PDEFoams were
696/// trained) the function returns
697///
698/// D = Density_sig/(Density_sig+Density_bg)
699///
700/// where 'Density_sig' is the content of the cell in the signal
701/// PDEFoam (fFoam[0]) and 'Density_bg' is the content of the cell
702/// in the background PDEFoam (fFoam[1]).
703///
704/// In both cases the error on the discriminant is stored in 'err'
705/// and 'errUpper'. (Of course err and errUpper must be non-zero
706/// and point to valid address to make this work.)
707
709{
710 const Event* ev = GetEvent();
711 Double_t discr = 0.;
712
713 if (fSigBgSeparated) {
714 std::vector<Float_t> xvec = ev->GetValues();
715
716 Double_t density_sig = 0.; // calc signal event density
717 Double_t density_bg = 0.; // calc background event density
718 density_sig = fFoam.at(0)->GetCellValue(xvec, kValueDensity, fKernelEstimator);
719 density_bg = fFoam.at(1)->GetCellValue(xvec, kValueDensity, fKernelEstimator);
720
721 // calc discriminator (normed!)
722 if ( (density_sig+density_bg) > 0 )
724 else
725 discr = 0.5; // assume 50% signal probability, if no events found (bad assumption, but can be overruled by cut on error)
726 }
727 else { // Signal and Bg not separated
728 // get discriminator direct from the foam
729 discr = fFoam.at(0)->GetCellValue(ev->GetValues(), kValue, fKernelEstimator);
730 }
731
732 // calculate the error
733 if (err || errUpper) {
734 const Double_t discr_error = CalculateMVAError();
735 if (err != 0) *err = discr_error;
736 if (errUpper != 0) *errUpper = discr_error;
737 }
738
739 if (fUseYesNoCell)
740 return (discr < 0.5 ? -1 : 1);
741 else
742 return discr;
743}
744
745////////////////////////////////////////////////////////////////////////////////
746/// Calculate the error on the Mva value
747///
748/// If `fSigBgSeparated == true` the error is calculated from the
749/// number of events in the signal and background PDEFoam cells.
750///
751/// If `fSigBgSeparated == false`, the error is taken directly from
752/// the PDEFoam cell.
753
755{
756 const Event* ev = GetEvent(); // current event
757 Double_t mvaError = 0.0; // the error on the Mva value
758
759 if (fSigBgSeparated) {
760 const std::vector<Float_t>& xvec = ev->GetValues();
761
762 const Double_t neventsB = fFoam.at(1)->GetCellValue(xvec, kValue, fKernelEstimator);
763 const Double_t neventsS = fFoam.at(0)->GetCellValue(xvec, kValue, fKernelEstimator);
764 const Double_t scaleB = 1.;
765 // estimation of statistical error on counted signal/background events
766 const Double_t errorS = neventsS == 0 ? 1.0 : TMath::Sqrt(neventsS);
767 const Double_t errorB = neventsB == 0 ? 1.0 : TMath::Sqrt(neventsB);
768
769 if ((neventsS > 1e-10) || (neventsB > 1e-10)) {
770 // eq. (5) in paper T.Carli, B.Koblitz 2002
772 Sqr(scaleB * neventsS / Sqr(neventsS + scaleB * neventsB) * errorB));
773 } else {
774 mvaError = 1.0;
775 }
776 } else { // Signal and Bg not separated
777 // get discriminator error direct from the foam
778 mvaError = fFoam.at(0)->GetCellValue(ev->GetValues(), kValueError, fKernelEstimator);
779 }
780
781 return mvaError;
782}
783
784////////////////////////////////////////////////////////////////////////////////
785/// Get the multiclass MVA response for the PDEFoam classifier. The
786/// returned MVA values are normalized, i.e. their sum equals 1.
787
788const std::vector<Float_t>& TMVA::MethodPDEFoam::GetMulticlassValues()
789{
790 const TMVA::Event *ev = GetEvent();
791 std::vector<Float_t> xvec = ev->GetValues();
792
793 if (fMulticlassReturnVal == NULL)
794 fMulticlassReturnVal = new std::vector<Float_t>();
795 fMulticlassReturnVal->clear();
796 fMulticlassReturnVal->reserve(DataInfo().GetNClasses());
797
798 std::vector<Float_t> temp; // temp class. values
799 UInt_t nClasses = DataInfo().GetNClasses();
800 temp.reserve(nClasses);
801 for (UInt_t iClass = 0; iClass < nClasses; ++iClass) {
802 temp.push_back(fFoam.at(iClass)->GetCellValue(xvec, kValue, fKernelEstimator));
803 }
804
805 for (UInt_t iClass = 0; iClass < nClasses; ++iClass) {
806 Float_t norm = 0.0; // normalization
807 for (UInt_t j = 0; j < nClasses; ++j) {
808 if (iClass != j)
809 norm += exp(temp[j] - temp[iClass]);
810 }
811 fMulticlassReturnVal->push_back(1.0 / (1.0 + norm));
812 }
813
814 return *fMulticlassReturnVal;
815}
816
817////////////////////////////////////////////////////////////////////////////////
818/// Compute ranking of input variables from the number of cuts made
819/// in each PDEFoam dimension. The PDEFoam dimension (the variable)
820/// for which the most cuts were done is ranked highest.
821
823{
824 // create the ranking object
825 fRanking = new Ranking(GetName(), "Variable Importance");
826 std::vector<Float_t> importance(GetNvar(), 0);
827
828 // determine variable importances
829 for (UInt_t ifoam = 0; ifoam < fFoam.size(); ++ifoam) {
830 // get the number of cuts made in every dimension of foam
831 PDEFoamCell *root_cell = fFoam.at(ifoam)->GetRootCell();
832 std::vector<UInt_t> nCuts(fFoam.at(ifoam)->GetTotDim(), 0);
833 GetNCuts(root_cell, nCuts);
834
835 // fill the importance vector (ignoring the target dimensions in
836 // case of a multi-target regression foam)
837 UInt_t sumOfCuts = 0;
838 std::vector<Float_t> tmp_importance;
839 for (UInt_t ivar = 0; ivar < GetNvar(); ++ivar) {
840 sumOfCuts += nCuts.at(ivar);
841 tmp_importance.push_back( nCuts.at(ivar) );
842 }
843 // normalization of the variable importances of this foam: the
844 // sum of all variable importances equals 1 for this foam
845 for (UInt_t ivar = 0; ivar < GetNvar(); ++ivar) {
846 if (sumOfCuts > 0)
848 else
849 tmp_importance.at(ivar) = 0;
850 }
851 // the overall variable importance is the average over all foams
852 for (UInt_t ivar = 0; ivar < GetNvar(); ++ivar) {
853 importance.at(ivar) += tmp_importance.at(ivar) / fFoam.size();
854 }
855 }
856
857 // fill ranking vector
858 for (UInt_t ivar = 0; ivar < GetNvar(); ++ivar) {
859 fRanking->AddRank(Rank(GetInputLabel(ivar), importance.at(ivar)));
860 }
861
862 return fRanking;
863}
864
865////////////////////////////////////////////////////////////////////////////////
866/// Fill in 'nCuts' the number of cuts made in every foam dimension,
867/// starting at the root cell 'cell'.
868///
869/// Parameters:
870///
871/// - cell - root cell to start the counting from
872///
873/// - nCuts - the number of cuts are saved in this vector
874
876{
877 if (cell == NULL || cell->GetStat() == 1) // cell is active
878 return;
879
880 nCuts.at(cell->GetBest())++;
881
882 if (cell->GetDau0() != NULL)
883 GetNCuts(cell->GetDau0(), nCuts);
884 if (cell->GetDau1() != NULL)
885 GetNCuts(cell->GetDau1(), nCuts);
886}
887
888////////////////////////////////////////////////////////////////////////////////
889/// Set Xmin, Xmax for every dimension in the given pdefoam object
890
892{
893 if (!pdefoam){
894 Log() << kFATAL << "Null pointer given!" << Endl;
895 return;
896 }
897
898 UInt_t num_vars = GetNvar();
899 if (fMultiTargetRegression)
900 num_vars += Data()->GetNTargets();
901
902 for (UInt_t idim=0; idim<num_vars; idim++) { // set upper/ lower limit in foam
903 Log()<< kDEBUG << "foam: SetXmin[dim="<<idim<<"]: " << fXmin.at(idim) << Endl;
904 Log()<< kDEBUG << "foam: SetXmax[dim="<<idim<<"]: " << fXmax.at(idim) << Endl;
905 pdefoam->SetXmin(idim, fXmin.at(idim));
906 pdefoam->SetXmax(idim, fXmax.at(idim));
907 }
908}
909
910////////////////////////////////////////////////////////////////////////////////
911/// Create a new PDEFoam, set the PDEFoam options (nCells, nBin,
912/// Xmin, Xmax, etc.) and initialize the PDEFoam by calling
913/// pdefoam->Initialize().
914///
915/// Parameters:
916///
917/// - foamcaption - name of PDEFoam object
918///
919/// - ft - type of PDEFoam
920///
921/// Candidates are:
922/// - kSeparate - creates TMVA::PDEFoamEvent
923/// - kDiscr - creates TMVA::PDEFoamDiscriminant
924/// - kMonoTarget - creates TMVA::PDEFoamTarget
925/// - kMultiTarget - creates TMVA::MultiTarget
926/// - kMultiClass - creates TMVA::PDEFoamDiscriminant
927///
928/// If 'fDTSeparation != kFoam' then a TMVA::PDEFoamDecisionTree
929/// is created (the separation type depends on fDTSeparation).
930///
931/// - cls - marked event class (optional, default value = 0)
932
934{
935 // number of foam dimensions
936 Int_t dim = 1;
937 if (ft == kMultiTarget)
938 // dimension of foam = number of targets + non-targets
939 dim = Data()->GetNTargets() + Data()->GetNVariables();
940 else
941 dim = GetNvar();
942
943 // calculate range-searching box
944 std::vector<Double_t> box;
945 for (Int_t idim = 0; idim < dim; ++idim) {
946 box.push_back((fXmax.at(idim) - fXmin.at(idim))* fVolFrac);
947 }
948
949 // create PDEFoam and PDEFoamDensityBase
952 if (fDTSeparation == kFoam) {
953 // use PDEFoam algorithm
954 switch (ft) {
955 case kSeparate:
958 break;
959 case kMultiTarget:
960 pdefoam = new PDEFoamMultiTarget(foamcaption, fTargetSelection);
962 break;
963 case kDiscr:
964 case kMultiClass:
967 break;
968 case kMonoTarget:
971 break;
972 default:
973 Log() << kFATAL << "Unknown PDEFoam type!" << Endl;
974 break;
975 }
976 } else {
977 // create a decision tree like PDEFoam
978
979 // create separation type class, which is owned by
980 // PDEFoamDecisionTree (i.e. PDEFoamDecisionTree will delete it)
982 switch (fDTSeparation) {
983 case kGiniIndex:
984 sepType = new GiniIndex();
985 break;
986 case kMisClassificationError:
988 break;
989 case kCrossEntropy:
990 sepType = new CrossEntropy();
991 break;
992 case kGiniIndexWithLaplace:
994 break;
995 case kSdivSqrtSplusB:
996 sepType = new SdivSqrtSplusB();
997 break;
998 default:
999 Log() << kFATAL << "Separation type " << fDTSeparation
1000 << " currently not supported" << Endl;
1001 break;
1002 }
1003 switch (ft) {
1004 case kDiscr:
1005 case kMultiClass:
1008 break;
1009 default:
1010 Log() << kFATAL << "Decision tree cell split algorithm is only"
1011 << " available for (multi) classification with a single"
1012 << " PDE-Foam (SigBgSeparate=F)" << Endl;
1013 break;
1014 }
1015 }
1016
1017 if (pdefoam) pdefoam->SetDensity(density);
1018 else Log() << kFATAL << "PDEFoam pointer not set, exiting.." << Endl;
1019
1020 // create pdefoam kernel
1021 fKernelEstimator = CreatePDEFoamKernel();
1022
1023 // set fLogger attributes
1024 pdefoam->Log().SetMinType(this->Log().GetMinType());
1025
1026 // set PDEFoam parameters
1027 pdefoam->SetDim( dim);
1028 pdefoam->SetnCells( fnCells); // optional
1029 pdefoam->SetnSampl( fnSampl); // optional
1030 pdefoam->SetnBin( fnBin); // optional
1031 pdefoam->SetEvPerBin( fEvPerBin); // optional
1032
1033 // cuts
1034 pdefoam->SetNmin(fNmin);
1035 pdefoam->SetMaxDepth(fMaxDepth); // maximum cell tree depth
1036
1037 // Init PDEFoam
1038 pdefoam->Initialize();
1039
1040 // Set Xmin, Xmax
1041 SetXminXmax(pdefoam);
1042
1043 return pdefoam;
1044}
1045
1046////////////////////////////////////////////////////////////////////////////////
1047/// Return regression values for both multi- and mono-target regression
1048
1049const std::vector<Float_t>& TMVA::MethodPDEFoam::GetRegressionValues()
1050{
1051 if (fRegressionReturnVal == 0) fRegressionReturnVal = new std::vector<Float_t>();
1052 fRegressionReturnVal->clear();
1053 fRegressionReturnVal->reserve(Data()->GetNTargets());
1054
1055 const Event* ev = GetEvent();
1056 std::vector<Float_t> vals = ev->GetValues(); // get array of event variables (non-targets)
1057
1058 if (vals.empty()) {
1059 Log() << kWARNING << "<GetRegressionValues> value vector is empty. " << Endl;
1060 }
1061
1062 if (fMultiTargetRegression) {
1063 // create std::map from event variables
1064 std::map<Int_t, Float_t> xvec;
1065 for (UInt_t i=0; i<vals.size(); ++i)
1066 xvec.insert(std::pair<Int_t, Float_t>(i, vals.at(i)));
1067 // get the targets
1068 std::vector<Float_t> targets = fFoam.at(0)->GetCellValue( xvec, kValue );
1069
1070 // sanity check
1071 if (targets.size() != Data()->GetNTargets())
1072 Log() << kFATAL << "Something wrong with multi-target regression foam: "
1073 << "number of targets does not match the DataSet()" << Endl;
1074 for(UInt_t i=0; i<targets.size(); i++)
1075 fRegressionReturnVal->push_back(targets.at(i));
1076 }
1077 else {
1078 fRegressionReturnVal->push_back(fFoam.at(0)->GetCellValue(vals, kValue, fKernelEstimator));
1079 }
1080
1081 // apply inverse transformation to regression values
1082 Event * evT = new Event(*ev);
1083 for (UInt_t itgt = 0; itgt < Data()->GetNTargets(); itgt++) {
1084 evT->SetTarget(itgt, fRegressionReturnVal->at(itgt) );
1085 }
1086 const Event* evT2 = GetTransformationHandler().InverseTransform( evT );
1087 fRegressionReturnVal->clear();
1088 for (UInt_t itgt = 0; itgt < Data()->GetNTargets(); itgt++) {
1089 fRegressionReturnVal->push_back( evT2->GetTarget(itgt) );
1090 }
1091
1092 delete evT;
1093
1094 return (*fRegressionReturnVal);
1095}
1096
1097////////////////////////////////////////////////////////////////////////////////
1098/// create a pdefoam kernel estimator, depending on the current
1099/// value of fKernel
1100
1102{
1103 switch (fKernel) {
1104 case kNone:
1105 return new PDEFoamKernelTrivial();
1106 case kLinN:
1107 return new PDEFoamKernelLinN();
1108 case kGaus:
1109 return new PDEFoamKernelGauss(fVolFrac/2.0);
1110 default:
1111 Log() << kFATAL << "Kernel: " << fKernel << " not supported!" << Endl;
1112 return NULL;
1113 }
1114 return NULL;
1115}
1116
1117////////////////////////////////////////////////////////////////////////////////
1118/// Deletes all trained foams
1119
1121{
1122 for (UInt_t i=0; i<fFoam.size(); i++)
1123 if (fFoam.at(i)) delete fFoam.at(i);
1124 fFoam.clear();
1125}
1126
1127////////////////////////////////////////////////////////////////////////////////
1128/// reset MethodPDEFoam:
1129///
1130/// - delete all PDEFoams
1131/// - delete the kernel estimator
1132
1134{
1135 DeleteFoams();
1136
1137 if (fKernelEstimator != NULL) {
1138 delete fKernelEstimator;
1139 fKernelEstimator = NULL;
1140 }
1141}
1142
1143////////////////////////////////////////////////////////////////////////////////
1144
1147
1148////////////////////////////////////////////////////////////////////////////////
1149/// create XML output of PDEFoam method variables
1150
1152{
1153 void* wght = gTools().AddChild(parent, "Weights");
1154 gTools().AddAttr( wght, "SigBgSeparated", fSigBgSeparated );
1155 gTools().AddAttr( wght, "Frac", fFrac );
1156 gTools().AddAttr( wght, "DiscrErrCut", fDiscrErrCut );
1157 gTools().AddAttr( wght, "VolFrac", fVolFrac );
1158 gTools().AddAttr( wght, "nCells", fnCells );
1159 gTools().AddAttr( wght, "nSampl", fnSampl );
1160 gTools().AddAttr( wght, "nBin", fnBin );
1161 gTools().AddAttr( wght, "EvPerBin", fEvPerBin );
1162 gTools().AddAttr( wght, "Compress", fCompress );
1163 gTools().AddAttr( wght, "DoRegression", DoRegression() );
1164 gTools().AddAttr( wght, "CutNmin", fNmin>0 );
1165 gTools().AddAttr( wght, "Nmin", fNmin );
1166 gTools().AddAttr( wght, "CutRMSmin", false );
1167 gTools().AddAttr( wght, "RMSmin", 0.0 );
1168 gTools().AddAttr( wght, "Kernel", KernelToUInt(fKernel) );
1169 gTools().AddAttr( wght, "TargetSelection", TargetSelectionToUInt(fTargetSelection) );
1170 gTools().AddAttr( wght, "FillFoamWithOrigWeights", fFillFoamWithOrigWeights );
1171 gTools().AddAttr( wght, "UseYesNoCell", fUseYesNoCell );
1172
1173 // save foam borders Xmin[i], Xmax[i]
1174 void *xmin_wrap;
1175 for (UInt_t i=0; i<fXmin.size(); i++){
1176 xmin_wrap = gTools().AddChild( wght, "Xmin" );
1177 gTools().AddAttr( xmin_wrap, "Index", i );
1178 gTools().AddAttr( xmin_wrap, "Value", fXmin.at(i) );
1179 }
1180 void *xmax_wrap;
1181 for (UInt_t i=0; i<fXmax.size(); i++){
1182 xmax_wrap = gTools().AddChild( wght, "Xmax" );
1183 gTools().AddAttr( xmax_wrap, "Index", i );
1184 gTools().AddAttr( xmax_wrap, "Value", fXmax.at(i) );
1185 }
1186
1187 // write foams to xml file
1188 WriteFoamsToFile();
1189}
1190
1191////////////////////////////////////////////////////////////////////////////////
1192/// Write PDEFoams to file
1193
1195{
1196 // fill variable names into foam
1197 FillVariableNamesToFoam();
1198
1199 TString rfname( GetWeightFileName() );
1200
1201 // replace in case of txt weight file
1202 rfname.ReplaceAll( TString(".") + gConfig().GetIONames().fWeightFileExtension + ".txt", ".xml" );
1203
1204 // add foam indicator to distinguish from main weight file
1205 rfname.ReplaceAll( ".xml", "_foams.root" );
1206
1207 TFile *rootFile = 0;
1208 if (fCompress) rootFile = new TFile(rfname, "RECREATE", "foamfile", 9);
1209 else rootFile = new TFile(rfname, "RECREATE");
1210
1211 // write the foams
1212 for (UInt_t i=0; i<fFoam.size(); ++i) {
1213 Log() << "writing foam " << fFoam.at(i)->GetFoamName().Data()
1214 << " to file" << Endl;
1215 fFoam.at(i)->Write(fFoam.at(i)->GetFoamName().Data());
1216 }
1217
1218 rootFile->Close();
1219 Log() << kINFO << "Foams written to file: "
1220 << gTools().Color("lightblue") << rfname << gTools().Color("reset") << Endl;
1221}
1222
1223////////////////////////////////////////////////////////////////////////////////
1224/// read options and internal parameters
1225
1227{
1228 istr >> fSigBgSeparated; // Separate Sig and Bg, or not
1229 istr >> fFrac; // Fraction used for calc of Xmin, Xmax
1230 istr >> fDiscrErrCut; // cut on discriminant error
1231 istr >> fVolFrac; // volume fraction (used for density calculation during buildup)
1232 istr >> fnCells; // Number of Cells (500)
1233 istr >> fnSampl; // Number of MC events per cell in build-up (1000)
1234 istr >> fnBin; // Number of bins in build-up (100)
1235 istr >> fEvPerBin; // Maximum events (equiv.) per bin in build-up (1000)
1236 istr >> fCompress; // compress output file
1237
1238 Bool_t regr;
1239 istr >> regr; // regression foam
1240 SetAnalysisType( (regr ? Types::kRegression : Types::kClassification ) );
1241
1242 Bool_t CutNmin, CutRMSmin; // dummy for backwards compatible.
1243 Float_t RMSmin; // dummy for backwards compatible.
1244 istr >> CutNmin; // cut on minimal number of events in cell
1245 istr >> fNmin;
1246 istr >> CutRMSmin; // cut on minimal RMS in cell
1247 istr >> RMSmin;
1248
1249 UInt_t ker = 0;
1250 istr >> ker; // used kernel for GetMvaValue()
1251 fKernel = UIntToKernel(ker);
1252
1253 UInt_t ts = 0;
1254 istr >> ts; // used method for target selection
1255 fTargetSelection = UIntToTargetSelection(ts);
1256
1257 istr >> fFillFoamWithOrigWeights; // fill foam with original event weights
1258 istr >> fUseYesNoCell; // return -1 or 1 for bg or signal event
1259
1260 // clear old range and prepare new range
1261 fXmin.clear();
1262 fXmax.clear();
1263 UInt_t kDim = GetNvar();
1264 if (fMultiTargetRegression)
1265 kDim += Data()->GetNTargets();
1266 fXmin.assign(kDim, 0);
1267 fXmax.assign(kDim, 0);
1268
1269 // read range
1270 for (UInt_t i=0; i<kDim; i++)
1271 istr >> fXmin.at(i);
1272 for (UInt_t i=0; i<kDim; i++)
1273 istr >> fXmax.at(i);
1274
1275 // read pure foams from file
1276 ReadFoamsFromFile();
1277}
1278
1279////////////////////////////////////////////////////////////////////////////////
1280/// read PDEFoam variables from xml weight file
1281
1283{
1284 gTools().ReadAttr( wghtnode, "SigBgSeparated", fSigBgSeparated );
1285 gTools().ReadAttr( wghtnode, "Frac", fFrac );
1286 gTools().ReadAttr( wghtnode, "DiscrErrCut", fDiscrErrCut );
1287 gTools().ReadAttr( wghtnode, "VolFrac", fVolFrac );
1288 gTools().ReadAttr( wghtnode, "nCells", fnCells );
1289 gTools().ReadAttr( wghtnode, "nSampl", fnSampl );
1290 gTools().ReadAttr( wghtnode, "nBin", fnBin );
1291 gTools().ReadAttr( wghtnode, "EvPerBin", fEvPerBin );
1292 gTools().ReadAttr( wghtnode, "Compress", fCompress );
1293 Bool_t regr; // dummy for backwards compatible.
1294 gTools().ReadAttr( wghtnode, "DoRegression", regr );
1295 Bool_t CutNmin; // dummy for backwards compatible.
1296 gTools().ReadAttr( wghtnode, "CutNmin", CutNmin );
1297 gTools().ReadAttr( wghtnode, "Nmin", fNmin );
1298 Bool_t CutRMSmin; // dummy for backwards compatible.
1299 Float_t RMSmin; // dummy for backwards compatible.
1300 gTools().ReadAttr( wghtnode, "CutRMSmin", CutRMSmin );
1301 gTools().ReadAttr( wghtnode, "RMSmin", RMSmin );
1302 UInt_t ker = 0;
1303 gTools().ReadAttr( wghtnode, "Kernel", ker );
1304 fKernel = UIntToKernel(ker);
1305 UInt_t ts = 0;
1306 gTools().ReadAttr( wghtnode, "TargetSelection", ts );
1307 fTargetSelection = UIntToTargetSelection(ts);
1308 if (gTools().HasAttr(wghtnode, "FillFoamWithOrigWeights"))
1309 gTools().ReadAttr( wghtnode, "FillFoamWithOrigWeights", fFillFoamWithOrigWeights );
1310 if (gTools().HasAttr(wghtnode, "UseYesNoCell"))
1311 gTools().ReadAttr( wghtnode, "UseYesNoCell", fUseYesNoCell );
1312
1313 // clear old range [Xmin, Xmax] and prepare new range for reading
1314 fXmin.clear();
1315 fXmax.clear();
1316 UInt_t kDim = GetNvar();
1317 if (fMultiTargetRegression)
1318 kDim += Data()->GetNTargets();
1319 fXmin.assign(kDim, 0);
1320 fXmax.assign(kDim, 0);
1321
1322 // read foam range
1323 void *xmin_wrap = gTools().GetChild( wghtnode );
1324 for (UInt_t counter=0; counter<kDim; counter++) {
1325 UInt_t i=0;
1326 gTools().ReadAttr( xmin_wrap , "Index", i );
1327 if (i>=kDim)
1328 Log() << kFATAL << "dimension index out of range:" << i << Endl;
1329 gTools().ReadAttr( xmin_wrap , "Value", fXmin.at(i) );
1331 }
1332
1333 void *xmax_wrap = xmin_wrap;
1334 for (UInt_t counter=0; counter<kDim; counter++) {
1335 UInt_t i=0;
1336 gTools().ReadAttr( xmax_wrap , "Index", i );
1337 if (i>=kDim)
1338 Log() << kFATAL << "dimension index out of range:" << i << Endl;
1339 gTools().ReadAttr( xmax_wrap , "Value", fXmax.at(i) );
1341 }
1342
1343 // if foams exist, delete them
1344 DeleteFoams();
1345
1346 // read pure foams from file
1347 ReadFoamsFromFile();
1348
1349 // recreate the pdefoam kernel estimator
1350 if (fKernelEstimator != NULL)
1351 delete fKernelEstimator;
1352 fKernelEstimator = CreatePDEFoamKernel();
1353}
1354
1355////////////////////////////////////////////////////////////////////////////////
1356/// Reads a foam with name 'foamname' from file, and returns a clone
1357/// of the foam. The given ROOT file must be open. (The ROOT file
1358/// will not be closed in this function.)
1359///
1360/// Parameters:
1361///
1362/// - file - an open ROOT file
1363///
1364/// - foamname - name of foam to load from the file
1365///
1366/// Returns:
1367///
1368/// If a foam with name 'foamname' exists in the file, then it is
1369/// read from the file, cloned and returned. If a foam with name
1370/// 'foamname' does not exist in the file or the clone operation
1371/// does not succeed, then NULL is returned.
1372
1374{
1375 if (file == NULL) {
1376 Log() << kWARNING << "<ReadClonedFoamFromFile>: NULL pointer given" << Endl;
1377 return NULL;
1378 }
1379
1380 // try to load the foam from the file
1381 PDEFoam *foam = (PDEFoam*) file->Get(foamname);
1382 if (foam == NULL) {
1383 return NULL;
1384 }
1385 // try to clone the foam
1386 foam = (PDEFoam*) foam->Clone();
1387 if (foam == NULL) {
1388 Log() << kWARNING << "<ReadClonedFoamFromFile>: " << foamname
1389 << " could not be cloned!" << Endl;
1390 return NULL;
1391 }
1392
1393 return foam;
1394}
1395
1396////////////////////////////////////////////////////////////////////////////////
1397/// read foams from file
1398
1400{
1401 TString rfname( GetWeightFileName() );
1402
1403 // replace in case of txt weight file
1404 rfname.ReplaceAll( TString(".") + gConfig().GetIONames().fWeightFileExtension + ".txt", ".xml" );
1405
1406 // add foam indicator to distinguish from main weight file
1407 rfname.ReplaceAll( ".xml", "_foams.root" );
1408
1409 Log() << kINFO << "Read foams from file: " << gTools().Color("lightblue")
1410 << rfname << gTools().Color("reset") << Endl;
1411 TFile *rootFile = new TFile( rfname, "READ" );
1412 if (rootFile->IsZombie()) Log() << kFATAL << "Cannot open file \"" << rfname << "\"" << Endl;
1413
1414 // read foams from file
1415 if (DoRegression()) {
1416 if (fMultiTargetRegression)
1417 fFoam.push_back(ReadClonedFoamFromFile(rootFile, "MultiTargetRegressionFoam"));
1418 else
1419 fFoam.push_back(ReadClonedFoamFromFile(rootFile, "MonoTargetRegressionFoam"));
1420 } else {
1421 if (fSigBgSeparated) {
1422 fFoam.push_back(ReadClonedFoamFromFile(rootFile, "SignalFoam"));
1423 fFoam.push_back(ReadClonedFoamFromFile(rootFile, "BgFoam"));
1424 } else {
1425 // try to load discriminator foam
1426 PDEFoam *foam = ReadClonedFoamFromFile(rootFile, "DiscrFoam");
1427 if (foam != NULL)
1428 fFoam.push_back(foam);
1429 else {
1430 // load multiclass foams
1431 for (UInt_t iClass=0; iClass<DataInfo().GetNClasses(); ++iClass) {
1432 fFoam.push_back(ReadClonedFoamFromFile(rootFile, TString::Format("MultiClassFoam%u",iClass)));
1433 }
1434 }
1435 }
1436 }
1437
1438 // Close the root file. Note, that the foams are still present in
1439 // memory!
1440 rootFile->Close();
1441 delete rootFile;
1442
1443 for (UInt_t i=0; i<fFoam.size(); ++i) {
1444 if (!fFoam.at(0))
1445 Log() << kFATAL << "Could not load foam!" << Endl;
1446 }
1447}
1448
1449////////////////////////////////////////////////////////////////////////////////
1450/// convert UInt_t to EKernel (used for reading weight files)
1451
1453{
1454 switch(iker) {
1455 case 0: return kNone;
1456 case 1: return kGaus;
1457 case 2: return kLinN;
1458 default:
1459 Log() << kWARNING << "<UIntToKernel>: unknown kernel number: " << iker << Endl;
1460 return kNone;
1461 }
1462 return kNone;
1463}
1464
1465////////////////////////////////////////////////////////////////////////////////
1466/// convert UInt_t to ETargetSelection (used for reading weight files)
1467
1469{
1470 switch(its) {
1471 case 0: return kMean;
1472 case 1: return kMpv;
1473 default:
1474 Log() << kWARNING << "<UIntToTargetSelection>: unknown method TargetSelection: " << its << Endl;
1475 return kMean;
1476 }
1477 return kMean;
1478}
1479
1480////////////////////////////////////////////////////////////////////////////////
1481/// store the variable names in all foams
1482
1484{
1485 for (UInt_t ifoam=0; ifoam<fFoam.size(); ifoam++) {
1486 for (Int_t idim=0; idim<fFoam.at(ifoam)->GetTotDim(); idim++) {
1487 if(fMultiTargetRegression && (UInt_t)idim>=DataInfo().GetNVariables())
1488 fFoam.at(ifoam)->AddVariableName(DataInfo().GetTargetInfo(idim-DataInfo().GetNVariables()).GetExpression().Data());
1489 else
1490 fFoam.at(ifoam)->AddVariableName(DataInfo().GetVariableInfo(idim).GetExpression().Data());
1491 }
1492 }
1493}
1494
1495////////////////////////////////////////////////////////////////////////////////
1496/// write PDEFoam-specific classifier response
1497/// NOT IMPLEMENTED YET!
1498
1499void TMVA::MethodPDEFoam::MakeClassSpecific( std::ostream& /*fout*/, const TString& /*className*/ ) const
1500{
1501}
1502
1503////////////////////////////////////////////////////////////////////////////////
1504/// provide help message
1505
1507{
1508 Log() << Endl;
1509 Log() << gTools().Color("bold") << "--- Short description:" << gTools().Color("reset") << Endl;
1510 Log() << Endl;
1511 Log() << "PDE-Foam is a variation of the PDE-RS method using a self-adapting" << Endl;
1512 Log() << "binning method to divide the multi-dimensional variable space into a" << Endl;
1513 Log() << "finite number of hyper-rectangles (cells). The binning algorithm " << Endl;
1514 Log() << "adjusts the size and position of a predefined number of cells such" << Endl;
1515 Log() << "that the variance of the signal and background densities inside the " << Endl;
1516 Log() << "cells reaches a minimum" << Endl;
1517 Log() << Endl;
1518 Log() << gTools().Color("bold") << "--- Use of booking options:" << gTools().Color("reset") << Endl;
1519 Log() << Endl;
1520 Log() << "The PDEFoam classifier supports two different algorithms: " << Endl;
1521 Log() << Endl;
1522 Log() << " (1) Create one foam, which stores the signal over background" << Endl;
1523 Log() << " probability density. During foam buildup the variance of the" << Endl;
1524 Log() << " discriminant inside the cells is minimised." << Endl;
1525 Log() << Endl;
1526 Log() << " Booking option: SigBgSeparated=F" << Endl;
1527 Log() << Endl;
1528 Log() << " (2) Create two separate foams, one for the signal events and one for" << Endl;
1529 Log() << " background events. During foam buildup the variance of the" << Endl;
1530 Log() << " event density inside the cells is minimised separately for" << Endl;
1531 Log() << " signal and background." << Endl;
1532 Log() << Endl;
1533 Log() << " Booking option: SigBgSeparated=T" << Endl;
1534 Log() << Endl;
1535 Log() << "The following options can be set (the listed values are found to be a" << Endl;
1536 Log() << "good starting point for most applications):" << Endl;
1537 Log() << Endl;
1538 Log() << " SigBgSeparate False Separate Signal and Background" << Endl;
1539 Log() << " TailCut 0.001 Fraction of outlier events that excluded" << Endl;
1540 Log() << " from the foam in each dimension " << Endl;
1541 Log() << " VolFrac 0.0666 Volume fraction (used for density calculation" << Endl;
1542 Log() << " during foam build-up) " << Endl;
1543 Log() << " nActiveCells 500 Maximal number of active cells in final foam " << Endl;
1544 Log() << " nSampl 2000 Number of MC events per cell in foam build-up " << Endl;
1545 Log() << " nBin 5 Number of bins used in foam build-up " << Endl;
1546 Log() << " Nmin 100 Number of events in cell required to split cell" << Endl;
1547 Log() << " Kernel None Kernel type used (possible values are: None," << Endl;
1548 Log() << " Gauss)" << Endl;
1549 Log() << " Compress True Compress foam output file " << Endl;
1550 Log() << Endl;
1551 Log() << " Additional regression options:" << Endl;
1552 Log() << Endl;
1553 Log() << "MultiTargetRegression False Do regression with multiple targets " << Endl;
1554 Log() << " TargetSelection Mean Target selection method (possible values are: " << Endl;
1555 Log() << " Mean, Mpv)" << Endl;
1556 Log() << Endl;
1557 Log() << gTools().Color("bold") << "--- Performance optimisation:" << gTools().Color("reset") << Endl;
1558 Log() << Endl;
1559 Log() << "The performance of the two implementations was found to be similar for" << Endl;
1560 Log() << "most examples studied. For the same number of cells per foam, the two-" << Endl;
1561 Log() << "foam option approximately doubles the amount of computer memory needed" << Endl;
1562 Log() << "during classification. For special cases where the event-density" << Endl;
1563 Log() << "distribution of signal and background events is very different, the" << Endl;
1564 Log() << "two-foam option was found to perform significantly better than the" << Endl;
1565 Log() << "option with only one foam." << Endl;
1566 Log() << Endl;
1567 Log() << "In order to gain better classification performance we recommend to set" << Endl;
1568 Log() << "the parameter \"nActiveCells\" to a high value." << Endl;
1569 Log() << Endl;
1570 Log() << "The parameter \"VolFrac\" specifies the size of the sampling volume" << Endl;
1571 Log() << "during foam buildup and should be tuned in order to achieve optimal" << Endl;
1572 Log() << "performance. A larger box leads to a reduced statistical uncertainty" << Endl;
1573 Log() << "for small training samples and to smoother sampling. A smaller box on" << Endl;
1574 Log() << "the other hand increases the sensitivity to statistical fluctuations" << Endl;
1575 Log() << "in the training samples, but for sufficiently large training samples" << Endl;
1576 Log() << "it will result in a more precise local estimate of the sampled" << Endl;
1577 Log() << "density. In general, higher dimensional problems require larger box" << Endl;
1578 Log() << "sizes, due to the reduced average number of events per box volume. The" << Endl;
1579 Log() << "default value of 0.0666 was optimised for an example with 5" << Endl;
1580 Log() << "observables and training samples of the order of 50000 signal and" << Endl;
1581 Log() << "background events each." << Endl;
1582 Log() << Endl;
1583 Log() << "Furthermore kernel weighting can be activated, which will lead to an" << Endl;
1584 Log() << "additional performance improvement. Note that Gauss weighting will" << Endl;
1585 Log() << "significantly increase the response time of the method. LinNeighbors" << Endl;
1586 Log() << "weighting performs a linear interpolation with direct neighbor cells" << Endl;
1587 Log() << "for each dimension and is much faster than Gauss weighting." << Endl;
1588 Log() << Endl;
1589 Log() << "The classification results were found to be rather insensitive to the" << Endl;
1590 Log() << "values of the parameters \"nSamples\" and \"nBin\"." << Endl;
1591}
#define REGISTER_METHOD(CLASS)
for example
const Handle_t kNone
Definition GuiTypes.h:88
#define e(i)
Definition RSha256.hxx:103
int Int_t
Signed integer 4 bytes (int)
Definition RtypesCore.h:59
float Float_t
Float 4 bytes (float)
Definition RtypesCore.h:71
constexpr Bool_t kFALSE
Definition RtypesCore.h:108
constexpr Bool_t kTRUE
Definition RtypesCore.h:107
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t type
float xmin
float xmax
TObject * Get(const char *namecycle) override
Return pointer to object identified by namecycle.
A ROOT file is an on-disk file, usually with extension .root, that stores objects in a file-system-li...
Definition TFile.h:131
1-D histogram with a float per channel (see TH1 documentation)
Definition TH1.h:879
Implementation of the CrossEntropy as separation criterion.
Class that contains all the data information.
Definition DataSetInfo.h:62
Implementation of the GiniIndex With Laplace correction as separation criterion.
Implementation of the GiniIndex as separation criterion.
Definition GiniIndex.h:63
Virtual base Class for all MVA method.
Definition MethodBase.h:111
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
The PDEFoam method is an extension of the PDERS method, which divides the multi-dimensional phase spa...
Double_t CalculateMVAError()
Calculate the error on the Mva value.
void DeclareCompatibilityOptions() override
options that are used ONLY for the READER to ensure backward compatibility
void TrainMultiClassification()
Create one unified foam (see TrainUnifiedClassification()) for each class, where the cells of foam i ...
void TrainMultiTargetRegression(void)
Training one (multi target regression) foam, whose cells contain the average event density.
void Train(void) override
Train PDE-Foam depending on the set options.
void DeleteFoams()
Deletes all trained foams.
virtual ~MethodPDEFoam(void)
destructor
void Init(void) override
default initialization called by all constructors
PDEFoam * InitFoam(TString, EFoamType, UInt_t cls=0)
Create a new PDEFoam, set the PDEFoam options (nCells, nBin, Xmin, Xmax, etc.) and initialize the PDE...
Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets) override
PDEFoam can handle classification with multiple classes and regression with one or more regression-ta...
void GetHelpMessage() const override
provide help message
void FillVariableNamesToFoam() const
store the variable names in all foams
void TrainMonoTargetRegression(void)
Training one (mono target regression) foam, whose cells contain the average 0th target.
void TrainUnifiedClassification(void)
Create only one unified foam (fFoam[0]) whose cells contain the average discriminator (N_sig)/(N_sig ...
void ReadFoamsFromFile()
read foams from file
void ProcessOptions() override
process user options
EKernel UIntToKernel(UInt_t iker)
convert UInt_t to EKernel (used for reading weight files)
PDEFoamKernelBase * CreatePDEFoamKernel()
create a pdefoam kernel estimator, depending on the current value of fKernel
void ReadWeightsFromStream(std::istream &i) override
read options and internal parameters
const std::vector< Float_t > & GetMulticlassValues() override
Get the multiclass MVA response for the PDEFoam classifier.
void Reset() override
reset MethodPDEFoam:
void MakeClassSpecific(std::ostream &, const TString &) const override
write PDEFoam-specific classifier response NOT IMPLEMENTED YET!
void AddWeightsXMLTo(void *parent) const override
create XML output of PDEFoam method variables
void CalcXminXmax()
Determine foam range [fXmin, fXmax] for all dimensions, such that a fraction of 'fFrac' events lie ou...
void GetNCuts(PDEFoamCell *cell, std::vector< UInt_t > &nCuts)
Fill in 'nCuts' the number of cuts made in every foam dimension, starting at the root cell 'cell'.
PDEFoam * ReadClonedFoamFromFile(TFile *, const TString &)
Reads a foam with name 'foamname' from file, and returns a clone of the foam.
const std::vector< Float_t > & GetRegressionValues() override
Return regression values for both multi- and mono-target regression.
MethodPDEFoam(const TString &jobName, const TString &methodTitle, DataSetInfo &dsi, const TString &theOption="PDEFoam")
init PDEFoam objects
ETargetSelection UIntToTargetSelection(UInt_t its)
convert UInt_t to ETargetSelection (used for reading weight files)
void DeclareOptions() override
Declare MethodPDEFoam options.
void ReadWeightsFromXML(void *wghtnode) override
read PDEFoam variables from xml weight file
void TrainSeparatedClassification(void)
Creation of 2 separated foams: one for signal events, one for background events.
void SetXminXmax(TMVA::PDEFoam *)
Set Xmin, Xmax for every dimension in the given pdefoam object.
void WriteFoamsToFile() const
Write PDEFoams to file.
Double_t GetMvaValue(Double_t *err=nullptr, Double_t *errUpper=nullptr) override
Return Mva-Value.
const Ranking * CreateRanking() override
Compute ranking of input variables from the number of cuts made in each PDEFoam dimension.
Implementation of the MisClassificationError as separation criterion.
This is a concrete implementation of PDEFoam.
This PDEFoam variant acts like a decision tree and stores in every cell the discriminant.
This is an abstract class, which provides an interface for a PDEFoam density estimator.
This is a concrete implementation of PDEFoam.
This PDEFoam variant stores in every cell the discriminant.
This is a concrete implementation of PDEFoam.
This PDEFoam variant stores in every cell the sum of event weights and the sum of the squared event w...
This class is the abstract kernel interface for PDEFoam.
This PDEFoam kernel estimates a cell value for a given event by weighting all cell values with a gaus...
This PDEFoam kernel estimates a cell value for a given event by weighting with cell values of the nea...
This class is a trivial PDEFoam kernel estimator.
This PDEFoam variant is used to estimate multiple targets by creating an event density foam (PDEFoamE...
This is a concrete implementation of PDEFoam.
This PDEFoam variant stores in every cell the average target fTarget (see the Constructor) as well as...
Implementation of PDEFoam.
Definition PDEFoam.h:79
Ranking for variables in method (implementation)
Definition Ranking.h:48
Implementation of the SdivSqrtSplusB as separation criterion.
An interface to calculate the "SeparationGain" for different separation criteria used in various trai...
const TString & Color(const TString &)
human readable color strings
Definition Tools.cxx:828
void ReadAttr(void *node, const char *, T &value)
read attribute from xml
Definition Tools.h:329
void * GetChild(void *parent, const char *childname=nullptr)
get child node
Definition Tools.cxx:1150
void AddAttr(void *node, const char *, const T &value, Int_t precision=16)
add attribute to xml
Definition Tools.h:347
void * AddChild(void *parent, const char *childname, const char *content=nullptr, bool isRootNode=false)
add child node
Definition Tools.cxx:1124
void * GetNextChild(void *prevchild, const char *childname=nullptr)
XML helpers.
Definition Tools.cxx:1162
Singleton class for Global types used by TMVA.
Definition Types.h:71
@ kMulticlass
Definition Types.h:129
@ kClassification
Definition Types.h:127
@ kRegression
Definition Types.h:128
virtual Int_t Write(const char *name=nullptr, Int_t option=0, Int_t bufsize=0)
Write this object to the current directory.
Definition TObject.cxx:964
Basic string class.
Definition TString.h:138
static TString Format(const char *fmt,...)
Static method which formats a string using a printf style format descriptor and return a TString.
Definition TString.cxx:2384
void box(Int_t pat, Double_t x1, Double_t y1, Double_t x2, Double_t y2)
Definition fillpatterns.C:1
create variable transformations
Config & gConfig()
Tools & gTools()
MsgLogger & Endl(MsgLogger &ml)
Definition MsgLogger.h:148
Double_t Sqrt(Double_t x)
Returns the square root of x.
Definition TMath.h:673