Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
RuleFit.cxx
Go to the documentation of this file.
1// @(#)root/tmva $Id$
2// Author: Andreas Hoecker, Joerg Stelzer, Fredrik Tegenfeldt, Helge Voss
3
4/**********************************************************************************
5 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6 * Package: TMVA *
7 * Class : Rule *
8 * *
9 * *
10 * Description: *
11 * A class describing a 'rule' *
12 * Each internal node of a tree defines a rule from all the parental nodes. *
13 * A rule with 0 or 1 nodes in the list is a root rule -> corresponds to a0. *
14 * Input: a decision tree (in the constructor) *
15 * its coefficient *
16 * *
17 * *
18 * Authors (alphabetical): *
19 * Fredrik Tegenfeldt <Fredrik.Tegenfeldt@cern.ch> - Iowa State U., USA *
20 * *
21 * Copyright (c) 2005: *
22 * CERN, Switzerland *
23 * Iowa State U. *
24 * MPI-K Heidelberg, Germany *
25 * *
26 * Redistribution and use in source and binary forms, with or without *
27 * modification, are permitted according to the terms listed in LICENSE *
28 * (see tmva/doc/LICENSE) *
29 **********************************************************************************/
30
31/*! \class TMVA::RuleFit
32\ingroup TMVA
33A class implementing various fits of rule ensembles
34*/
35#include "TMVA/RuleFit.h"
36
37#include "TMVA/DataSet.h"
38#include "TMVA/DecisionTree.h"
39#include "TMVA/Event.h"
40#include "TMVA/Factory.h" // for root base dir
41#include "TMVA/GiniIndex.h"
42#include "TMVA/MethodBase.h"
43#include "TMVA/MethodRuleFit.h"
44#include "TMVA/MsgLogger.h"
45#include "TMVA/Timer.h"
46#include "TMVA/Tools.h"
47#include "TMVA/Types.h"
48#include "TMVA/SeparationBase.h"
49
50#include "TDirectory.h"
51#include "TH2F.h"
52#include "TKey.h"
53#include "TRandom3.h"
54#include "TROOT.h" // for gROOT
55
56#include <algorithm>
57#include <random>
58
60
61////////////////////////////////////////////////////////////////////////////////
62/// constructor
63
65 : fVisHistsUseImp( kTRUE )
66 , fLogger(new MsgLogger("RuleFit"))
67{
68 Initialize(rfbase);
69 fRNGEngine.seed(randSEED);
70}
71
72////////////////////////////////////////////////////////////////////////////////
73/// default constructor
74
76 : fNTreeSample(0)
77 , fNEveEffTrain(0)
78 , fMethodRuleFit(0)
79 , fMethodBase(0)
80 , fVisHistsUseImp(kTRUE)
81 , fLogger(new MsgLogger("RuleFit"))
82{
83 fRNGEngine.seed(randSEED);
84}
85
86////////////////////////////////////////////////////////////////////////////////
87/// destructor
88
90{
91 delete fLogger;
92}
93
94////////////////////////////////////////////////////////////////////////////////
95/// init effective number of events (using event weights)
96
98{
99 UInt_t neve = fTrainingEvents.size();
100 if (neve==0) return;
101 //
102 fNEveEffTrain = CalcWeightSum( &fTrainingEvents );
103 //
104}
105
106////////////////////////////////////////////////////////////////////////////////
107/// initialize pointers
108
110{
111 this->SetMethodBase(rfbase);
112 fRuleEnsemble.Initialize( this );
113 fRuleFitParams.SetRuleFit( this );
114}
115
116////////////////////////////////////////////////////////////////////////////////
117/// initialize the parameters of the RuleFit method and make rules
118
120{
121 InitPtrs(rfbase);
122
123 if (fMethodRuleFit){
124 fMethodRuleFit->Data()->SetCurrentType(Types::kTraining);
125 UInt_t nevents = fMethodRuleFit->Data()->GetNTrainingEvents();
126 std::vector<const TMVA::Event*> tmp;
127 for (Long64_t ievt=0; ievt<nevents; ievt++) {
128 const Event *event = fMethodRuleFit->GetEvent(ievt);
129 tmp.push_back(event);
130 }
131 SetTrainingEvents( tmp );
132 }
133 // SetTrainingEvents( fMethodRuleFit->GetTrainingEvents() );
134
135 InitNEveEff();
136
137 MakeForest();
138
139 // Make the model - Rule + Linear (if fDoLinear is true)
140 fRuleEnsemble.MakeModel();
141
142 // init rulefit params
143 fRuleFitParams.Init();
144
145}
146
147////////////////////////////////////////////////////////////////////////////////
148/// set MethodBase
149
151{
152 fMethodBase = rfbase;
153 fMethodRuleFit = dynamic_cast<const MethodRuleFit *>(rfbase);
154}
155
156////////////////////////////////////////////////////////////////////////////////
157/// copy method
158
159void TMVA::RuleFit::Copy( const RuleFit& other )
160{
161 if(this != &other) {
162 fMethodRuleFit = other.GetMethodRuleFit();
163 fMethodBase = other.GetMethodBase();
164 fTrainingEvents = other.GetTrainingEvents();
165 // fSubsampleEvents = other.GetSubsampleEvents();
166
167 fForest = other.GetForest();
168 fRuleEnsemble = other.GetRuleEnsemble();
169 }
170}
171
172////////////////////////////////////////////////////////////////////////////////
173/// calculate the sum of weights
174
175Double_t TMVA::RuleFit::CalcWeightSum( const std::vector<const Event *> *events, UInt_t neve )
176{
177 if (events==0) return 0.0;
178 if (neve==0) neve=events->size();
179 //
180 Double_t sumw=0;
181 for (UInt_t ie=0; ie<neve; ie++) {
182 sumw += ((*events)[ie])->GetWeight();
183 }
184 return sumw;
185}
186
187////////////////////////////////////////////////////////////////////////////////
188/// set the current message type to that of mlog for this class and all other subtools
189
191{
192 fLogger->SetMinType(t);
193 fRuleEnsemble.SetMsgType(t);
194 fRuleFitParams.SetMsgType(t);
195}
196
197////////////////////////////////////////////////////////////////////////////////
198/// build the decision tree using fNTreeSample events from fTrainingEventsRndm
199
201{
202 if (dt==0) return;
203 if (fMethodRuleFit==0) {
204 Log() << kFATAL << "RuleFit::BuildTree() - Attempting to build a tree NOT from a MethodRuleFit" << Endl;
205 }
206 std::vector<const Event *> evevec;
207 for (UInt_t ie=0; ie<fNTreeSample; ie++) {
208 evevec.push_back(fTrainingEventsRndm[ie]);
209 }
210 dt->BuildTree(evevec);
211 if (fMethodRuleFit->GetPruneMethod() != DecisionTree::kNoPruning) {
212 dt->SetPruneMethod(fMethodRuleFit->GetPruneMethod());
213 dt->SetPruneStrength(fMethodRuleFit->GetPruneStrength());
214 dt->PruneTree();
215 }
216}
217
218////////////////////////////////////////////////////////////////////////////////
219/// make a forest of decisiontrees
220
222{
223 if (fMethodRuleFit==0) {
224 Log() << kFATAL << "RuleFit::BuildTree() - Attempting to build a tree NOT from a MethodRuleFit" << Endl;
225 }
226 Log() << kDEBUG << "Creating a forest with " << fMethodRuleFit->GetNTrees() << " decision trees" << Endl;
227 Log() << kDEBUG << "Each tree is built using a random subsample with " << fNTreeSample << " events" << Endl;
228 //
229 Timer timer( fMethodRuleFit->GetNTrees(), "RuleFit" );
230
231 //
232 TRandom3 rndGen;
233 //
234 // First save all event weights.
235 // Weights are modified by the boosting.
236 // Those weights we do not want for the later fitting.
237 //
238 Bool_t useBoost = fMethodRuleFit->UseBoost(); // (AdaBoost (True) or RandomForest/Tree (False)
239
240 if (useBoost) SaveEventWeights();
241
242 for (Int_t i=0; i<fMethodRuleFit->GetNTrees(); i++) {
243 // timer.DrawProgressBar(i);
244 if (!useBoost) ReshuffleEvents();
245
246 DecisionTree *dt=nullptr;
247 Bool_t tryAgain=kTRUE;
248 Int_t ntries=0;
249 const Int_t ntriesMax=10;
250 Double_t frnd = 0.;
251 while (tryAgain) {
252 frnd = 100*rndGen.Uniform( fMethodRuleFit->GetMinFracNEve(), 0.5*fMethodRuleFit->GetMaxFracNEve() );
253 Int_t iclass = 0; // event class being treated as signal during training
254 Bool_t useRandomisedTree = !useBoost;
255 dt = new DecisionTree( fMethodRuleFit->GetSeparationBase(), frnd, fMethodRuleFit->GetNCuts(), &(fMethodRuleFit->DataInfo()), iclass, useRandomisedTree);
256 dt->SetNVars(fMethodBase->GetNvar());
257
258 BuildTree(dt); // reads fNTreeSample events from fTrainingEventsRndm
259 if (dt->GetNNodes()<3) {
260 delete dt;
261 dt=0;
262 }
263 ntries++;
264 tryAgain = ((dt==0) && (ntries<ntriesMax));
265 }
266 if (dt) {
267 fForest.push_back(dt);
268 if (useBoost) Boost(dt);
269
270 } else {
271
272 Log() << kWARNING << "------------------------------------------------------------------" << Endl;
273 Log() << kWARNING << " Failed growing a tree even after " << ntriesMax << " trials" << Endl;
274 Log() << kWARNING << " Possible solutions: " << Endl;
275 Log() << kWARNING << " 1. increase the number of training events" << Endl;
276 Log() << kWARNING << " 2. set a lower min fraction cut (fEventsMin)" << Endl;
277 Log() << kWARNING << " 3. maybe also decrease the max fraction cut (fEventsMax)" << Endl;
278 Log() << kWARNING << " If the above warning occurs rarely only, it can be ignored" << Endl;
279 Log() << kWARNING << "------------------------------------------------------------------" << Endl;
280 }
281
282 Log() << kDEBUG << "Built tree with minimum cut at N = " << frnd <<"% events"
283 << " => N(nodes) = " << fForest.back()->GetNNodes()
284 << " ; n(tries) = " << ntries
285 << Endl;
286 }
287
288 // Now restore event weights
289 if (useBoost) RestoreEventWeights();
290
291 // print statistics on the forest created
292 ForestStatistics();
293}
294
295////////////////////////////////////////////////////////////////////////////////
296/// save event weights - must be done before making the forest
297
299{
300 fEventWeights.clear();
301 for (std::vector<const Event*>::iterator e=fTrainingEvents.begin(); e!=fTrainingEvents.end(); ++e) {
302 Double_t w = (*e)->GetBoostWeight();
303 fEventWeights.push_back(w);
304 }
305}
306
307////////////////////////////////////////////////////////////////////////////////
308/// save event weights - must be done before making the forest
309
311{
312 UInt_t ie=0;
313 if (fEventWeights.size() != fTrainingEvents.size()) {
314 Log() << kERROR << "RuleFit::RestoreEventWeights() called without having called SaveEventWeights() before!" << Endl;
315 return;
316 }
317 for (std::vector<const Event*>::iterator e=fTrainingEvents.begin(); e!=fTrainingEvents.end(); ++e) {
318 (*e)->SetBoostWeight(fEventWeights[ie]);
319 ie++;
320 }
321}
322
323////////////////////////////////////////////////////////////////////////////////
324/// Boost the events. The algorithm below is the called AdaBoost.
325/// See MethodBDT for details.
326/// Actually, this is a more or less copy of MethodBDT::AdaBoost().
327
329{
330 Double_t sumw=0; // sum of initial weights - all events
331 Double_t sumwfalse=0; // idem, only misclassified events
332 //
333 std::vector<Char_t> correctSelected; // <--- boolean stored
334 //
335 for (std::vector<const Event*>::iterator e=fTrainingEvents.begin(); e!=fTrainingEvents.end(); ++e) {
336 Bool_t isSignalType = (dt->CheckEvent(*e,kTRUE) > 0.5 );
337 Double_t w = (*e)->GetWeight();
338 sumw += w;
339 //
340 if (isSignalType == fMethodBase->DataInfo().IsSignal(*e)) { // correctly classified
341 correctSelected.push_back(kTRUE);
342 }
343 else { // misclassified
344 sumwfalse+= w;
345 correctSelected.push_back(kFALSE);
346 }
347 }
348 // misclassification error
349 Double_t err = sumwfalse/sumw;
350 // calculate boost weight for misclassified events
351 // use for now the exponent = 1.0
352 // one could have w = ((1-err)/err)^beta
353 Double_t boostWeight = (err>0 ? (1.0-err)/err : 1000.0);
354 Double_t newSumw=0.0;
355 UInt_t ie=0;
356 // set new weight to misclassified events
357 for (std::vector<const Event*>::iterator e=fTrainingEvents.begin(); e!=fTrainingEvents.end(); ++e) {
358 if (!correctSelected[ie])
359 (*e)->SetBoostWeight( (*e)->GetBoostWeight() * boostWeight);
360 newSumw+=(*e)->GetWeight();
361 ie++;
362 }
363 // reweight all events
364 Double_t scale = sumw/newSumw;
365 for (std::vector<const Event*>::iterator e=fTrainingEvents.begin(); e!=fTrainingEvents.end(); ++e) {
366 (*e)->SetBoostWeight( (*e)->GetBoostWeight() * scale);
367 }
368 Log() << kDEBUG << "boostWeight = " << boostWeight << " scale = " << scale << Endl;
369}
370
371////////////////////////////////////////////////////////////////////////////////
372/// summary of statistics of all trees
373/// - end-nodes: average and spread
374
376{
377 UInt_t ntrees = fForest.size();
378 if (ntrees==0) return;
379 const DecisionTree *tree;
380 Double_t sumn2 = 0;
381 Double_t sumn = 0;
382 Double_t nd;
383 for (UInt_t i=0; i<ntrees; i++) {
384 tree = fForest[i];
385 nd = Double_t(tree->GetNNodes());
386 sumn += nd;
387 sumn2 += nd*nd;
388 }
389 Double_t sig = TMath::Sqrt( gTools().ComputeVariance( sumn2, sumn, ntrees ));
390 Log() << kVERBOSE << "Nodes in trees: average & std dev = " << sumn/ntrees << " , " << sig << Endl;
391}
392
393////////////////////////////////////////////////////////////////////////////////
394///
395/// Fit the coefficients for the rule ensemble
396///
397
399{
400 Log() << kVERBOSE << "Fitting rule/linear terms" << Endl;
401 fRuleFitParams.MakeGDPath();
402}
403
404////////////////////////////////////////////////////////////////////////////////
405/// calculates the importance of each rule
406
408{
409 Log() << kVERBOSE << "Calculating importance" << Endl;
410 fRuleEnsemble.CalcImportance();
411 fRuleEnsemble.CleanupRules();
412 fRuleEnsemble.CleanupLinear();
413 fRuleEnsemble.CalcVarImportance();
414 Log() << kVERBOSE << "Filling rule statistics" << Endl;
415 fRuleEnsemble.RuleResponseStats();
416}
417
418////////////////////////////////////////////////////////////////////////////////
419/// evaluate single event
420
422{
423 return fRuleEnsemble.EvalEvent( e );
424}
425
426////////////////////////////////////////////////////////////////////////////////
427/// set the training events randomly
428
429void TMVA::RuleFit::SetTrainingEvents( const std::vector<const Event *>& el )
430{
431 if (fMethodRuleFit==0) Log() << kFATAL << "RuleFit::SetTrainingEvents - MethodRuleFit not initialized" << Endl;
432 UInt_t neve = el.size();
433 if (neve==0) Log() << kWARNING << "An empty sample of training events was given" << Endl;
434
435 // copy vector
436 fTrainingEvents.clear();
437 fTrainingEventsRndm.clear();
438 for (UInt_t i=0; i<neve; i++) {
439 fTrainingEvents.push_back(static_cast< const Event *>(el[i]));
440 fTrainingEventsRndm.push_back(static_cast< const Event *>(el[i]));
441 }
442
443 // Re-shuffle the vector, ie, recreate it in a random order
444 std::shuffle(fTrainingEventsRndm.begin(), fTrainingEventsRndm.end(), fRNGEngine);
445
446 // fraction events per tree
447 fNTreeSample = static_cast<UInt_t>(neve*fMethodRuleFit->GetTreeEveFrac());
448 Log() << kDEBUG << "Number of events per tree : " << fNTreeSample
449 << " ( N(events) = " << neve << " )"
450 << " randomly drawn without replacement" << Endl;
451}
452
453////////////////////////////////////////////////////////////////////////////////
454/// draw a random subsample of the training events without replacement
455
456void TMVA::RuleFit::GetRndmSampleEvents(std::vector< const Event * > & evevec, UInt_t nevents)
457{
458 ReshuffleEvents();
459 if ((nevents<fTrainingEventsRndm.size()) && (nevents>0)) {
460 evevec.resize(nevents);
461 for (UInt_t ie=0; ie<nevents; ie++) {
462 evevec[ie] = fTrainingEventsRndm[ie];
463 }
464 }
465 else {
466 Log() << kWARNING << "GetRndmSampleEvents() : requested sub sample size larger than total size (BUG!).";
467 }
468}
469////////////////////////////////////////////////////////////////////////////////
470/// normalize rule importance hists
471///
472/// if all weights are positive, the scale will be 1/maxweight
473/// if minimum weight < 0, then the scale will be 1/max(maxweight,abs(minweight))
474
475void TMVA::RuleFit::NormVisHists(std::vector<TH2F *> & hlist)
476{
477 if (hlist.empty()) return;
478 //
479 Double_t wmin=0;
480 Double_t wmax=0;
481 Double_t w,wm;
482 Double_t awmin;
483 Double_t scale;
484 for (UInt_t i=0; i<hlist.size(); i++) {
485 TH2F *hs = hlist[i];
486 w = hs->GetMaximum();
487 wm = hs->GetMinimum();
488 if (i==0) {
489 wmin=wm;
490 wmax=w;
491 }
492 else {
493 if (w>wmax) wmax=w;
494 if (wm<wmin) wmin=wm;
495 }
496 }
497 awmin = TMath::Abs(wmin);
498 Double_t usemin,usemax;
499 if (awmin>wmax) {
500 scale = 1.0/awmin;
501 usemin = -1.0;
502 usemax = scale*wmax;
503 }
504 else {
505 scale = 1.0/wmax;
506 usemin = scale*wmin;
507 usemax = 1.0;
508 }
509
510 //
511 for (UInt_t i=0; i<hlist.size(); i++) {
512 TH2F *hs = hlist[i];
513 hs->Scale(scale);
514 hs->SetMinimum(usemin);
515 hs->SetMaximum(usemax);
516 }
517}
518
519////////////////////////////////////////////////////////////////////////////////
520/// Fill cut
521
522void TMVA::RuleFit::FillCut(TH2F* h2, const Rule *rule, Int_t vind)
523{
524 if (rule==0) return;
525 if (h2==0) return;
526 //
527 Double_t rmin, rmax;
528 Bool_t dormin,dormax;
529 Bool_t ruleHasVar = rule->GetRuleCut()->GetCutRange(vind,rmin,rmax,dormin,dormax);
530 if (!ruleHasVar) return;
531 //
532 Int_t firstbin = h2->GetBin(1,1,1);
533 if(firstbin<0) firstbin=0;
534 Int_t lastbin = h2->GetBin(h2->GetNbinsX(),1,1);
535 Int_t binmin=(dormin ? h2->FindBin(rmin,0.5):firstbin);
536 Int_t binmax=(dormax ? h2->FindBin(rmax,0.5):lastbin);
537 Int_t fbin;
538 Double_t xbinw = h2->GetXaxis()->GetBinWidth(firstbin);
539 Double_t fbmin = h2->GetXaxis()->GetBinLowEdge(binmin-firstbin+1);
540 Double_t lbmax = h2->GetXaxis()->GetBinLowEdge(binmax-firstbin+1)+xbinw;
541 Double_t fbfrac = (dormin ? ((fbmin+xbinw-rmin)/xbinw):1.0);
542 Double_t lbfrac = (dormax ? ((rmax-lbmax+xbinw)/xbinw):1.0);
543 Double_t f;
544 Double_t xc;
545 Double_t val;
546
547 for (Int_t bin = binmin; bin<binmax+1; bin++) {
548 fbin = bin-firstbin+1;
549 if (bin==binmin) {
550 f = fbfrac;
551 }
552 else if (bin==binmax) {
553 f = lbfrac;
554 }
555 else {
556 f = 1.0;
557 }
558 xc = h2->GetXaxis()->GetBinCenter(fbin);
559 //
560 if (fVisHistsUseImp) {
561 val = rule->GetImportance();
562 }
563 else {
564 val = rule->GetCoefficient()*rule->GetSupport();
565 }
566 h2->Fill(xc,0.5,val*f);
567 }
568}
569
570////////////////////////////////////////////////////////////////////////////////
571/// fill lin
572
574{
575 if (h2==0) return;
576 if (!fRuleEnsemble.DoLinear()) return;
577 //
578 Int_t firstbin = 1;
579 Int_t lastbin = h2->GetNbinsX();
580 Double_t xc;
581 Double_t val;
582 if (fVisHistsUseImp) {
583 val = fRuleEnsemble.GetLinImportance(vind);
584 }
585 else {
586 val = fRuleEnsemble.GetLinCoefficients(vind);
587 }
588 for (Int_t bin = firstbin; bin<lastbin+1; bin++) {
589 xc = h2->GetXaxis()->GetBinCenter(bin);
590 h2->Fill(xc,0.5,val);
591 }
592}
593
594////////////////////////////////////////////////////////////////////////////////
595/// fill rule correlation between vx and vy, weighted with either the importance or the coefficient
596
597void TMVA::RuleFit::FillCorr(TH2F* h2,const Rule *rule,Int_t vx, Int_t vy)
598{
599 if (rule==0) return;
600 if (h2==0) return;
601 Double_t val;
602 if (fVisHistsUseImp) {
603 val = rule->GetImportance();
604 }
605 else {
606 val = rule->GetCoefficient()*rule->GetSupport();
607 }
608 //
609 Double_t rxmin, rxmax, rymin, rymax;
610 Bool_t dorxmin, dorxmax, dorymin, dorymax;
611 //
612 // Get range in rule for X and Y
613 //
614 Bool_t ruleHasVarX = rule->GetRuleCut()->GetCutRange(vx,rxmin,rxmax,dorxmin,dorxmax);
615 Bool_t ruleHasVarY = rule->GetRuleCut()->GetCutRange(vy,rymin,rymax,dorymin,dorymax);
616 if (!(ruleHasVarX || ruleHasVarY)) return;
617 // min max of varX and varY in hist
618 Double_t vxmin = (dorxmin ? rxmin:h2->GetXaxis()->GetXmin());
619 Double_t vxmax = (dorxmax ? rxmax:h2->GetXaxis()->GetXmax());
620 Double_t vymin = (dorymin ? rymin:h2->GetYaxis()->GetXmin());
621 Double_t vymax = (dorymax ? rymax:h2->GetYaxis()->GetXmax());
622 // min max bin in X and Y
623 Int_t binxmin = h2->GetXaxis()->FindBin(vxmin);
624 Int_t binxmax = h2->GetXaxis()->FindBin(vxmax);
625 Int_t binymin = h2->GetYaxis()->FindBin(vymin);
626 Int_t binymax = h2->GetYaxis()->FindBin(vymax);
627 // bin widths
628 Double_t xbinw = h2->GetXaxis()->GetBinWidth(binxmin);
629 Double_t ybinw = h2->GetYaxis()->GetBinWidth(binxmin);
630 Double_t xbinmin = h2->GetXaxis()->GetBinLowEdge(binxmin);
631 Double_t xbinmax = h2->GetXaxis()->GetBinLowEdge(binxmax)+xbinw;
632 Double_t ybinmin = h2->GetYaxis()->GetBinLowEdge(binymin);
633 Double_t ybinmax = h2->GetYaxis()->GetBinLowEdge(binymax)+ybinw;
634 // fraction of edges
635 Double_t fxbinmin = (dorxmin ? ((xbinmin+xbinw-vxmin)/xbinw):1.0);
636 Double_t fxbinmax = (dorxmax ? ((vxmax-xbinmax+xbinw)/xbinw):1.0);
637 Double_t fybinmin = (dorymin ? ((ybinmin+ybinw-vymin)/ybinw):1.0);
638 Double_t fybinmax = (dorymax ? ((vymax-ybinmax+ybinw)/ybinw):1.0);
639 //
640 Double_t fx,fy;
641 Double_t xc,yc;
642 // fill histo
643 for (Int_t binx = binxmin; binx<binxmax+1; binx++) {
644 if (binx==binxmin) {
645 fx = fxbinmin;
646 }
647 else if (binx==binxmax) {
648 fx = fxbinmax;
649 }
650 else {
651 fx = 1.0;
652 }
653 xc = h2->GetXaxis()->GetBinCenter(binx);
654 for (Int_t biny = binymin; biny<binymax+1; biny++) {
655 if (biny==binymin) {
656 fy = fybinmin;
657 }
658 else if (biny==binymax) {
659 fy = fybinmax;
660 }
661 else {
662 fy = 1.0;
663 }
664 yc = h2->GetYaxis()->GetBinCenter(biny);
665 h2->Fill(xc,yc,val*fx*fy);
666 }
667 }
668}
669
670////////////////////////////////////////////////////////////////////////////////
671/// help routine to MakeVisHists() - fills for all variables
672
673void TMVA::RuleFit::FillVisHistCut(const Rule* rule, std::vector<TH2F *> & hlist)
674{
675 Int_t nhists = hlist.size();
676 Int_t nvar = fMethodBase->GetNvar();
677 if (nhists!=nvar) Log() << kFATAL << "BUG TRAP: number of hists is not equal the number of variables!" << Endl;
678 //
679 std::vector<Int_t> vindex;
680 TString hstr;
681 // not a nice way to do a check...
682 for (Int_t ih=0; ih<nhists; ih++) {
683 hstr = hlist[ih]->GetTitle();
684 for (Int_t iv=0; iv<nvar; iv++) {
685 if (fMethodBase->GetInputTitle(iv) == hstr)
686 vindex.push_back(iv);
687 }
688 }
689 //
690 for (Int_t iv=0; iv<nvar; iv++) {
691 if (rule) {
692 if (rule->ContainsVariable(vindex[iv])) {
693 FillCut(hlist[iv],rule,vindex[iv]);
694 }
695 }
696 else {
697 FillLin(hlist[iv],vindex[iv]);
698 }
699 }
700}
701////////////////////////////////////////////////////////////////////////////////
702/// help routine to MakeVisHists() - fills for all correlation plots
703
704void TMVA::RuleFit::FillVisHistCorr(const Rule * rule, std::vector<TH2F *> & hlist)
705{
706 if (rule==0) return;
707 Double_t ruleimp = rule->GetImportance();
708 if (!(ruleimp>0)) return;
709 if (ruleimp<fRuleEnsemble.GetImportanceCut()) return;
710 //
711 Int_t nhists = hlist.size();
712 Int_t nvar = fMethodBase->GetNvar();
713 Int_t ncorr = (nvar*(nvar+1)/2)-nvar;
714 if (nhists!=ncorr) Log() << kERROR << "BUG TRAP: number of corr hists is not correct! ncorr = "
715 << ncorr << " nvar = " << nvar << " nhists = " << nhists << Endl;
716 //
717 std::vector< std::pair<Int_t,Int_t> > vindex;
718 TString hstr, var1, var2;
719 Int_t iv1=0,iv2=0;
720 // not a nice way to do a check...
721 for (Int_t ih=0; ih<nhists; ih++) {
722 hstr = hlist[ih]->GetName();
723 if (GetCorrVars( hstr, var1, var2 )) {
724 iv1 = fMethodBase->DataInfo().FindVarIndex( var1 );
725 iv2 = fMethodBase->DataInfo().FindVarIndex( var2 );
726 vindex.push_back( std::pair<Int_t,Int_t>(iv2,iv1) ); // pair X, Y
727 }
728 else {
729 Log() << kERROR << "BUG TRAP: should not be here - failed getting var1 and var2" << Endl;
730 }
731 }
732 //
733 for (Int_t ih=0; ih<nhists; ih++) {
734 if ( (rule->ContainsVariable(vindex[ih].first)) ||
735 (rule->ContainsVariable(vindex[ih].second)) ) {
736 FillCorr(hlist[ih],rule,vindex[ih].first,vindex[ih].second);
737 }
738 }
739}
740////////////////////////////////////////////////////////////////////////////////
741/// get first and second variables from title
742
744{
745 var1="";
746 var2="";
747 if(!title.BeginsWith("scat_")) return kFALSE;
748
749 TString titleCopy = title(5,title.Length());
750 if(titleCopy.Index("_RF2D")>=0) titleCopy.Remove(titleCopy.Index("_RF2D"));
751
752 Int_t splitPos = titleCopy.Index("_vs_");
753 if(splitPos>=0) { // there is a _vs_ in the string
754 var1 = titleCopy(0,splitPos);
755 var2 = titleCopy(splitPos+4, titleCopy.Length());
756 return kTRUE;
757 }
758 else {
759 var1 = titleCopy;
760 return kFALSE;
761 }
762}
763////////////////////////////////////////////////////////////////////////////////
764/// this will create histograms visualizing the rule ensemble
765
767{
768 const TString directories[5] = { "InputVariables_Id",
769 "InputVariables_Deco",
770 "InputVariables_PCA",
771 "InputVariables_Gauss",
772 "InputVariables_Gauss_Deco" };
773
774 const TString corrDirName = "CorrelationPlots";
775
776 TDirectory* rootDir = fMethodBase->GetFile();
777 TDirectory* varDir = 0;
778 TDirectory* corrDir = 0;
779
780 TDirectory* methodDir = fMethodBase->BaseDir();
781 TString varDirName;
782 //
783 Bool_t done=(rootDir==0);
784 Int_t type=0;
785 if (done) {
786 Log() << kWARNING << "No basedir - BUG??" << Endl;
787 return;
788 }
789 while (!done) {
790 varDir = (TDirectory*)rootDir->Get( directories[type] );
791 type++;
792 done = ((varDir!=0) || (type>4));
793 }
794 if (varDir==0) {
795 Log() << kWARNING << "No input variable directory found - BUG?" << Endl;
796 return;
797 }
798 corrDir = (TDirectory*)varDir->Get( corrDirName );
799 if (corrDir==0) {
800 Log() << kWARNING << "No correlation directory found" << Endl;
801 Log() << kWARNING << "Check for other warnings related to correlation histograms" << Endl;
802 return;
803 }
804 if (methodDir==0) {
805 Log() << kWARNING << "No rulefit method directory found - BUG?" << Endl;
806 return;
807 }
808
809 varDirName = varDir->GetName();
810 varDir->cd();
811 //
812 // get correlation plot directory
813 corrDir = (TDirectory *)varDir->Get(corrDirName);
814 if (corrDir==0) {
815 Log() << kWARNING << "No correlation directory found : " << corrDirName << Endl;
816 return;
817 }
818
819 // how many plots are in the var directory?
820 Int_t noPlots = ((varDir->GetListOfKeys())->GetEntries()) / 2;
821 Log() << kDEBUG << "Got number of plots = " << noPlots << Endl;
822
823 // loop over all objects in directory
824 std::vector<TH2F *> h1Vector;
825 std::vector<TH2F *> h2CorrVector;
826 TIter next(varDir->GetListOfKeys());
827 TKey *key;
828 while ((key = (TKey*)next())) {
829 // make sure, that we only look at histograms
830 TClass *cl = gROOT->GetClass(key->GetClassName());
831 if (!cl->InheritsFrom(TH1F::Class())) continue;
832 TH1F *sig = (TH1F*)key->ReadObj();
833 TString hname= sig->GetName();
834 Log() << kDEBUG << "Got histogram : " << hname << Endl;
835
836 // check for all signal histograms
837 if (hname.Contains("__S")){ // found a new signal plot
838 TString htitle = sig->GetTitle();
839 htitle.ReplaceAll("signal","");
840 TString newname = hname;
841 newname.ReplaceAll("__Signal","__RF");
842 newname.ReplaceAll("__S","__RF");
843
844 methodDir->cd();
845 TH2F *newhist = new TH2F(newname,htitle,sig->GetNbinsX(),sig->GetXaxis()->GetXmin(),sig->GetXaxis()->GetXmax(),
846 1,sig->GetYaxis()->GetXmin(),sig->GetYaxis()->GetXmax());
847 varDir->cd();
848 h1Vector.push_back( newhist );
849 }
850 }
851 //
852 corrDir->cd();
853 TString var1,var2;
854 TIter nextCorr(corrDir->GetListOfKeys());
855 while ((key = (TKey*)nextCorr())) {
856 // make sure, that we only look at histograms
857 TClass *cl = gROOT->GetClass(key->GetClassName());
858 if (!cl->InheritsFrom(TH2F::Class())) continue;
859 TH2F *sig = (TH2F*)key->ReadObj();
860 TString hname= sig->GetName();
861
862 // check for all signal histograms
863 if ((hname.Contains("scat_")) && (hname.Contains("_Signal"))) {
864 Log() << kDEBUG << "Got histogram (2D) : " << hname << Endl;
865 TString htitle = sig->GetTitle();
866 htitle.ReplaceAll("(Signal)","");
867 TString newname = hname;
868 newname.ReplaceAll("_Signal","_RF2D");
869
870 methodDir->cd();
871 const Int_t rebin=2;
872 TH2F *newhist = new TH2F(newname,htitle,
873 sig->GetNbinsX()/rebin,sig->GetXaxis()->GetXmin(),sig->GetXaxis()->GetXmax(),
874 sig->GetNbinsY()/rebin,sig->GetYaxis()->GetXmin(),sig->GetYaxis()->GetXmax());
875 if (GetCorrVars( newname, var1, var2 )) {
876 Int_t iv1 = fMethodBase->DataInfo().FindVarIndex(var1);
877 Int_t iv2 = fMethodBase->DataInfo().FindVarIndex(var2);
878 if (iv1<0) {
879 sig->GetYaxis()->SetTitle(var1);
880 }
881 else {
882 sig->GetYaxis()->SetTitle(fMethodBase->GetInputTitle(iv1));
883 }
884 if (iv2<0) {
885 sig->GetXaxis()->SetTitle(var2);
886 }
887 else {
888 sig->GetXaxis()->SetTitle(fMethodBase->GetInputTitle(iv2));
889 }
890 }
891 corrDir->cd();
892 h2CorrVector.push_back( newhist );
893 }
894 }
895
896 varDir->cd();
897 // fill rules
898 UInt_t nrules = fRuleEnsemble.GetNRules();
899 const Rule *rule;
900 for (UInt_t i=0; i<nrules; i++) {
901 rule = fRuleEnsemble.GetRulesConst(i);
902 FillVisHistCut(rule, h1Vector);
903 }
904 // fill linear terms and normalise hists
905 FillVisHistCut(0, h1Vector);
906 NormVisHists(h1Vector);
907
908 //
909 corrDir->cd();
910 // fill rules
911 for (UInt_t i=0; i<nrules; i++) {
912 rule = fRuleEnsemble.GetRulesConst(i);
913 FillVisHistCorr(rule, h2CorrVector);
914 }
915 NormVisHists(h2CorrVector);
916
917 // write histograms to file
918 methodDir->cd();
919 for (UInt_t i=0; i<h1Vector.size(); i++) h1Vector[i]->Write();
920 for (UInt_t i=0; i<h2CorrVector.size(); i++) h2CorrVector[i]->Write();
921}
922
923////////////////////////////////////////////////////////////////////////////////
924/// this will create a histograms intended rather for debugging or for the curious user
925
927{
928 TDirectory* methodDir = fMethodBase->BaseDir();
929 if (methodDir==0) {
930 Log() << kWARNING << "<MakeDebugHists> No rulefit method directory found - bug?" << Endl;
931 return;
932 }
933 //
934 methodDir->cd();
935 std::vector<Double_t> distances;
936 std::vector<Double_t> fncuts;
937 std::vector<Double_t> fnvars;
938 const Rule *ruleA;
939 const Rule *ruleB;
940 Double_t dABmin=1000000.0;
941 Double_t dABmax=-1.0;
942 UInt_t nrules = fRuleEnsemble.GetNRules();
943 for (UInt_t i=0; i<nrules; i++) {
944 ruleA = fRuleEnsemble.GetRulesConst(i);
945 for (UInt_t j=i+1; j<nrules; j++) {
946 ruleB = fRuleEnsemble.GetRulesConst(j);
947 Double_t dAB = ruleA->RuleDist( *ruleB, kTRUE );
948 if (dAB>-0.5) {
949 UInt_t nc = ruleA->GetNcuts();
950 UInt_t nv = ruleA->GetNumVarsUsed();
951 distances.push_back(dAB);
952 fncuts.push_back(static_cast<Double_t>(nc));
953 fnvars.push_back(static_cast<Double_t>(nv));
954 if (dAB<dABmin) dABmin=dAB;
955 if (dAB>dABmax) dABmax=dAB;
956 }
957 }
958 }
959 //
960 TH1F *histDist = new TH1F("RuleDist","Rule distances",100,dABmin,dABmax);
961 TTree *distNtuple = new TTree("RuleDistNtuple","RuleDist ntuple");
962 Double_t ntDist;
963 Double_t ntNcuts;
964 Double_t ntNvars;
965 distNtuple->Branch("dist", &ntDist, "dist/D");
966 distNtuple->Branch("ncuts",&ntNcuts, "ncuts/D");
967 distNtuple->Branch("nvars",&ntNvars, "nvars/D");
968 //
969 for (UInt_t i=0; i<distances.size(); i++) {
970 histDist->Fill(distances[i]);
971 ntDist = distances[i];
972 ntNcuts = fncuts[i];
973 ntNvars = fnvars[i];
974 distNtuple->Fill();
975 }
976 distNtuple->Write();
977}
#define f(i)
Definition RSha256.hxx:104
#define e(i)
Definition RSha256.hxx:103
double fy() const
constexpr Bool_t kFALSE
Definition RtypesCore.h:101
double Double_t
Definition RtypesCore.h:59
long long Long64_t
Definition RtypesCore.h:80
constexpr Bool_t kTRUE
Definition RtypesCore.h:100
#define ClassImp(name)
Definition Rtypes.h:377
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t wmin
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t type
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t wmax
#define gROOT
Definition TROOT.h:406
Double_t GetXmax() const
Definition TAxis.h:140
Double_t GetXmin() const
Definition TAxis.h:139
TClass instances represent classes, structs and namespaces in the ROOT type system.
Definition TClass.h:81
Bool_t InheritsFrom(const char *cl) const override
Return kTRUE if this class inherits from a class with name "classname".
Definition TClass.cxx:4874
static TClass * GetClass(const char *name, Bool_t load=kTRUE, Bool_t silent=kFALSE)
Static method returning pointer to TClass of the specified class name.
Definition TClass.cxx:2968
Describe directory structure in memory.
Definition TDirectory.h:45
virtual TObject * Get(const char *namecycle)
Return pointer to object identified by namecycle.
virtual TFile * GetFile() const
Definition TDirectory.h:220
virtual Bool_t cd()
Change current directory to "this" directory.
virtual TList * GetListOfKeys() const
Definition TDirectory.h:223
1-D histogram with a float per channel (see TH1 documentation)
Definition TH1.h:621
static TClass * Class()
virtual Int_t GetNbinsY() const
Definition TH1.h:298
TAxis * GetXaxis()
Definition TH1.h:324
virtual Double_t GetMaximum(Double_t maxval=FLT_MAX) const
Return maximum value smaller than maxval of bins in the range, unless the value has been overridden b...
Definition TH1.cxx:8509
virtual Int_t GetNbinsX() const
Definition TH1.h:297
virtual void SetMaximum(Double_t maximum=-1111)
Definition TH1.h:403
virtual Int_t Fill(Double_t x)
Increment bin with abscissa X by 1.
Definition TH1.cxx:3340
TAxis * GetYaxis()
Definition TH1.h:325
virtual void SetMinimum(Double_t minimum=-1111)
Definition TH1.h:404
virtual void Scale(Double_t c1=1, Option_t *option="")
Multiply this histogram by a constant c1.
Definition TH1.cxx:6568
virtual Double_t GetMinimum(Double_t minval=-FLT_MAX) const
Return minimum value larger than minval of bins in the range, unless the value has been overridden by...
Definition TH1.cxx:8599
2-D histogram with a float per channel (see TH1 documentation)
Definition TH2.h:295
static TClass * Class()
Book space in a file, create I/O buffers, to fill them, (un)compress them.
Definition TKey.h:28
virtual const char * GetClassName() const
Definition TKey.h:75
virtual TObject * ReadObj()
To read a TObject* from the file.
Definition TKey.cxx:758
UInt_t GetNNodes() const
Definition BinaryTree.h:86
Implementation of a Decision Tree.
UInt_t BuildTree(const EventConstList &eventSample, DecisionTreeNode *node=nullptr)
building the decision tree by recursively calling the splitting of one (root-) node into two daughter...
void SetPruneMethod(EPruneMethod m=kCostComplexityPruning)
void SetPruneStrength(Double_t p)
Double_t CheckEvent(const TMVA::Event *, Bool_t UseYesNoLeaf=kFALSE) const
the event e is put into the decision tree (starting at the root node) and the output is NodeType (sig...
void SetNVars(Int_t n)
Double_t PruneTree(const EventConstList *validationSample=nullptr)
prune (get rid of internal nodes) the Decision tree to avoid overtraining several different pruning m...
Virtual base Class for all MVA method.
Definition MethodBase.h:111
J Friedman's RuleFit method.
ostringstream derivative to redirect and format output
Definition MsgLogger.h:57
Bool_t GetCutRange(Int_t sel, Double_t &rmin, Double_t &rmax, Bool_t &dormin, Bool_t &dormax) const
get cut range for a given selector
Definition RuleCut.cxx:176
A class implementing various fits of rule ensembles.
Definition RuleFit.h:46
void GetRndmSampleEvents(std::vector< const TMVA::Event * > &evevec, UInt_t nevents)
draw a random subsample of the training events without replacement
Definition RuleFit.cxx:456
Double_t EvalEvent(const Event &e)
evaluate single event
Definition RuleFit.cxx:421
void SetMethodBase(const MethodBase *rfbase)
set MethodBase
Definition RuleFit.cxx:150
void InitPtrs(const TMVA::MethodBase *rfbase)
initialize pointers
Definition RuleFit.cxx:109
void Boost(TMVA::DecisionTree *dt)
Boost the events.
Definition RuleFit.cxx:328
void ForestStatistics()
summary of statistics of all trees
Definition RuleFit.cxx:375
static const Int_t randSEED
Definition RuleFit.h:176
void CalcImportance()
calculates the importance of each rule
Definition RuleFit.cxx:407
void SetMsgType(EMsgType t)
set the current message type to that of mlog for this class and all other subtools
Definition RuleFit.cxx:190
void Initialize(const TMVA::MethodBase *rfbase)
initialize the parameters of the RuleFit method and make rules
Definition RuleFit.cxx:119
virtual ~RuleFit(void)
destructor
Definition RuleFit.cxx:89
void FillVisHistCorr(const Rule *rule, std::vector< TH2F * > &hlist)
help routine to MakeVisHists() - fills for all correlation plots
Definition RuleFit.cxx:704
std::default_random_engine fRNGEngine
Definition RuleFit.h:177
void InitNEveEff()
init effective number of events (using event weights)
Definition RuleFit.cxx:97
void SaveEventWeights()
save event weights - must be done before making the forest
Definition RuleFit.cxx:298
void FillCut(TH2F *h2, const TMVA::Rule *rule, Int_t vind)
Fill cut.
Definition RuleFit.cxx:522
void FillLin(TH2F *h2, Int_t vind)
fill lin
Definition RuleFit.cxx:573
Bool_t GetCorrVars(TString &title, TString &var1, TString &var2)
get first and second variables from title
Definition RuleFit.cxx:743
void MakeForest()
make a forest of decisiontrees
Definition RuleFit.cxx:221
const std::vector< const TMVA::DecisionTree * > & GetForest() const
Definition RuleFit.h:144
void FitCoefficients()
Fit the coefficients for the rule ensemble.
Definition RuleFit.cxx:398
const MethodBase * GetMethodBase() const
Definition RuleFit.h:150
void FillCorr(TH2F *h2, const TMVA::Rule *rule, Int_t v1, Int_t v2)
fill rule correlation between vx and vy, weighted with either the importance or the coefficient
Definition RuleFit.cxx:597
void NormVisHists(std::vector< TH2F * > &hlist)
normalize rule importance hists
Definition RuleFit.cxx:475
void RestoreEventWeights()
save event weights - must be done before making the forest
Definition RuleFit.cxx:310
void MakeVisHists()
this will create histograms visualizing the rule ensemble
Definition RuleFit.cxx:766
void FillVisHistCut(const Rule *rule, std::vector< TH2F * > &hlist)
help routine to MakeVisHists() - fills for all variables
Definition RuleFit.cxx:673
void BuildTree(TMVA::DecisionTree *dt)
build the decision tree using fNTreeSample events from fTrainingEventsRndm
Definition RuleFit.cxx:200
const std::vector< const TMVA::Event * > & GetTrainingEvents() const
Definition RuleFit.h:138
const MethodRuleFit * GetMethodRuleFit() const
Definition RuleFit.h:149
void SetTrainingEvents(const std::vector< const TMVA::Event * > &el)
set the training events randomly
Definition RuleFit.cxx:429
void Copy(const RuleFit &other)
copy method
Definition RuleFit.cxx:159
const RuleEnsemble & GetRuleEnsemble() const
Definition RuleFit.h:145
Double_t CalcWeightSum(const std::vector< const TMVA::Event * > *events, UInt_t neve=0)
calculate the sum of weights
Definition RuleFit.cxx:175
RuleFit(void)
default constructor
Definition RuleFit.cxx:75
void MakeDebugHists()
this will create a histograms intended rather for debugging or for the curious user
Definition RuleFit.cxx:926
Implementation of a rule.
Definition Rule.h:50
Double_t GetSupport() const
Definition Rule.h:142
UInt_t GetNcuts() const
Definition Rule.h:133
UInt_t GetNumVarsUsed() const
Definition Rule.h:130
const RuleCut * GetRuleCut() const
Definition Rule.h:139
Double_t GetCoefficient() const
Definition Rule.h:141
Double_t GetImportance() const
Definition Rule.h:145
Double_t RuleDist(const Rule &other, Bool_t useCutValue) const
Returns:
Definition Rule.cxx:192
Bool_t ContainsVariable(UInt_t iv) const
check if variable in node
Definition Rule.cxx:139
Timing information for training and evaluation of MVA methods.
Definition Timer.h:58
@ kTraining
Definition Types.h:143
virtual void SetTitle(const char *title="")
Set the title of the TNamed.
Definition TNamed.cxx:164
const char * GetName() const override
Returns name of object.
Definition TNamed.h:47
const char * GetTitle() const override
Returns title of object.
Definition TNamed.h:48
Random number generator class based on M.
Definition TRandom3.h:27
virtual Double_t Uniform(Double_t x1=1)
Returns a uniform deviate on the interval (0, x1).
Definition TRandom.cxx:682
Basic string class.
Definition TString.h:139
Ssiz_t Length() const
Definition TString.h:417
TString & ReplaceAll(const TString &s1, const TString &s2)
Definition TString.h:704
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
Definition TString.h:623
TString & Remove(Ssiz_t pos)
Definition TString.h:685
Bool_t Contains(const char *pat, ECaseCompare cmp=kExact) const
Definition TString.h:632
Ssiz_t Index(const char *pat, Ssiz_t i=0, ECaseCompare cmp=kExact) const
Definition TString.h:651
A TTree represents a columnar dataset.
Definition TTree.h:79
virtual Int_t Fill()
Fill all branches.
Definition TTree.cxx:4603
TBranch * Branch(const char *name, T *obj, Int_t bufsize=32000, Int_t splitlevel=99)
Add a new branch, and infer the data type from the type of obj being passed.
Definition TTree.h:353
Int_t Write(const char *name=nullptr, Int_t option=0, Int_t bufsize=0) override
Write this object to the current directory.
Definition TTree.cxx:9743
Tools & gTools()
MsgLogger & Endl(MsgLogger &ml)
Definition MsgLogger.h:148
Double_t Sqrt(Double_t x)
Returns the square root of x.
Definition TMath.h:662
Short_t Abs(Short_t d)
Returns the absolute value of parameter Short_t d.
Definition TMathBase.h:123