Logo ROOT  
Reference Guide
MethodHMatrix.cxx
Go to the documentation of this file.
1// @(#)root/tmva $Id$
2// Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss
3
4/**********************************************************************************
5 * Project: TMVA - a Root-integrated toolkit for multivariate Data analysis *
6 * Package: TMVA *
7 * Class : TMVA::MethodHMatrix *
8 * Web : http://tmva.sourceforge.net *
9 * *
10 * Description: *
11 * Implementation (see header file for description) *
12 * *
13 * Authors (alphabetical): *
14 * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
15 * Peter Speckmayer <Peter.Speckmayer@cern.ch> - CERN, Switzerland *
16 * Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
17 * Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
18 * *
19 * Copyright (c) 2005: *
20 * CERN, Switzerland *
21 * U. of Victoria, Canada *
22 * MPI-K Heidelberg, Germany *
23 * *
24 * Redistribution and use in source and binary forms, with or without *
25 * modification, are permitted according to the terms listed in LICENSE *
26 * (http://tmva.sourceforge.net/LICENSE) *
27 **********************************************************************************/
28
29#include "TMVA/MethodHMatrix.h"
30
32#include "TMVA/DataSet.h"
33#include "TMVA/DataSetInfo.h"
34#include "TMVA/IMethod.h"
35#include "TMVA/MethodBase.h"
36#include "TMVA/MsgLogger.h"
37#include "TMVA/Tools.h"
38#include "TMVA/Types.h"
39
40#include "Riostream.h"
41#include "TMatrix.h"
42#include "TVectorT.h"
43#include "TList.h"
44
45#include <algorithm>
46
47REGISTER_METHOD(HMatrix)
48
50
51/*! \class TMVA::MethodHMatrix
52\ingroup TMVA
53
54 H-Matrix method, which is implemented as a simple comparison of
55 chi-squared estimators for signal and background, taking into
56 account the linear correlations between the input variables
57
58 This MVA approach is used by the \f$D\emptyset \f$ collaboration (FNAL) for the
59 purpose of electron identification (see, eg.,
60 [hep-ex/9507007](http://arxiv.org/abs/hep-ex/9507007)).
61 As it is implemented in TMVA, it is usually equivalent or worse than
62 the Fisher-Mahalanobis discriminant, and it has only been added for
63 the purpose of completeness.
64 Two chi^2 estimators are computed for an event, each one
65 for signal and background, using the estimates for the means and
66 covariance matrices obtained from the training sample:<br>
67
68\f[
69\chi^2_\eta = (x_\eta(i) - \bar{x}_\eta)^T C_\eta^{-1} (x_\eta(i) - \bar{x}_\eta), \eta = S,B
70\f]
71
72 TMVA then uses as normalised analyser for event \f$ (i) \f$ the ratio:
73\f[
74\frac{(chi_S(i)^2 - chi_B^2(i))}{(chi_S^2(i) + chi_B^2(i))}
75\f]
76*/
77
78
79
80////////////////////////////////////////////////////////////////////////////////
81/// standard constructor for the H-Matrix method
82
84 const TString& methodTitle,
85 DataSetInfo& theData,
86 const TString& theOption )
87 : TMVA::MethodBase( jobName, Types::kHMatrix, methodTitle, theData, theOption)
88 ,fInvHMatrixS(0)
89 ,fInvHMatrixB(0)
90 ,fVecMeanS(0)
91 ,fVecMeanB(0)
92{
93}
94
95////////////////////////////////////////////////////////////////////////////////
96/// constructor from weight file
97
99 const TString& theWeightFile)
100 : TMVA::MethodBase( Types::kHMatrix, theData, theWeightFile)
101 ,fInvHMatrixS(0)
102 ,fInvHMatrixB(0)
103 ,fVecMeanS(0)
104 ,fVecMeanB(0)
105{
106}
107
108////////////////////////////////////////////////////////////////////////////////
109/// default initialization called by all constructors
110
112{
113 //SetNormalised( kFALSE ); obsolete!
114
115 fInvHMatrixS = new TMatrixD( GetNvar(), GetNvar() );
116 fInvHMatrixB = new TMatrixD( GetNvar(), GetNvar() );
117 fVecMeanS = new TVectorD( GetNvar() );
118 fVecMeanB = new TVectorD( GetNvar() );
119
120 // the minimum requirement to declare an event signal-like
121 SetSignalReferenceCut( 0.0 );
122}
123
124////////////////////////////////////////////////////////////////////////////////
125/// destructor
126
128{
129 if (NULL != fInvHMatrixS) delete fInvHMatrixS;
130 if (NULL != fInvHMatrixB) delete fInvHMatrixB;
131 if (NULL != fVecMeanS ) delete fVecMeanS;
132 if (NULL != fVecMeanB ) delete fVecMeanB;
133}
134
135////////////////////////////////////////////////////////////////////////////////
136/// FDA can handle classification with 2 classes and regression with one regression-target
137
139{
140 if( type == Types::kClassification && numberClasses == 2 ) return kTRUE;
141 return kFALSE;
142}
143
144
145////////////////////////////////////////////////////////////////////////////////
146/// MethodHMatrix options: none (apart from those implemented in MethodBase)
147
149{
150}
151
152////////////////////////////////////////////////////////////////////////////////
153/// process user options
154
156{
157}
158
159////////////////////////////////////////////////////////////////////////////////
160/// computes H-matrices for signal and background samples
161
163{
164 // covariance matrices for signal and background
165 ComputeCovariance( kTRUE, fInvHMatrixS );
166 ComputeCovariance( kFALSE, fInvHMatrixB );
167
168 // sanity checks
169 if (TMath::Abs(fInvHMatrixS->Determinant()) < 10E-24) {
170 Log() << kWARNING << "<Train> H-matrix S is almost singular with determinant= "
171 << TMath::Abs(fInvHMatrixS->Determinant())
172 << " did you use the variables that are linear combinations or highly correlated ???"
173 << Endl;
174 }
175 if (TMath::Abs(fInvHMatrixB->Determinant()) < 10E-24) {
176 Log() << kWARNING << "<Train> H-matrix B is almost singular with determinant= "
177 << TMath::Abs(fInvHMatrixB->Determinant())
178 << " did you use the variables that are linear combinations or highly correlated ???"
179 << Endl;
180 }
181
182 if (TMath::Abs(fInvHMatrixS->Determinant()) < 10E-120) {
183 Log() << kFATAL << "<Train> H-matrix S is singular with determinant= "
184 << TMath::Abs(fInvHMatrixS->Determinant())
185 << " did you use the variables that are linear combinations ???"
186 << Endl;
187 }
188 if (TMath::Abs(fInvHMatrixB->Determinant()) < 10E-120) {
189 Log() << kFATAL << "<Train> H-matrix B is singular with determinant= "
190 << TMath::Abs(fInvHMatrixB->Determinant())
191 << " did you use the variables that are linear combinations ???"
192 << Endl;
193 }
194
195 // invert matrix
196 fInvHMatrixS->Invert();
197 fInvHMatrixB->Invert();
198 ExitFromTraining();
199}
200
201////////////////////////////////////////////////////////////////////////////////
202/// compute covariance matrix
203
205{
206 Data()->SetCurrentType(Types::kTraining);
207
208 const UInt_t nvar = DataInfo().GetNVariables();
209 UInt_t ivar, jvar;
210
211 // init matrices
212 TVectorD vec(nvar); vec *= 0;
213 TMatrixD mat2(nvar, nvar); mat2 *= 0;
214
215 // initialize internal sum-of-weights variables
216 Double_t sumOfWeights = 0;
217 Double_t *xval = new Double_t[nvar];
218
219 // perform event loop
220 for (Int_t i=0, iEnd=Data()->GetNEvents(); i<iEnd; ++i) {
221
222 // retrieve the original (not transformed) event
223 const Event* origEvt = Data()->GetEvent(i);
224 Double_t weight = origEvt->GetWeight();
225
226 // in case event with neg weights are to be ignored
227 if (IgnoreEventsWithNegWeightsInTraining() && weight <= 0) continue;
228
229 if (DataInfo().IsSignal(origEvt) != isSignal) continue;
230
231 // transform the event
232 GetTransformationHandler().SetTransformationReferenceClass( origEvt->GetClass() );
233 const Event* ev = GetTransformationHandler().Transform( origEvt );
234
235 // event is of good type
236 sumOfWeights += weight;
237
238 // mean values
239 for (ivar=0; ivar<nvar; ivar++) xval[ivar] = ev->GetValue(ivar);
240
241 // covariance matrix
242 for (ivar=0; ivar<nvar; ivar++) {
243
244 vec(ivar) += xval[ivar]*weight;
245 mat2(ivar, ivar) += (xval[ivar]*xval[ivar])*weight;
246
247 for (jvar=ivar+1; jvar<nvar; jvar++) {
248 mat2(ivar, jvar) += (xval[ivar]*xval[jvar])*weight;
249 mat2(jvar, ivar) = mat2(ivar, jvar); // symmetric matrix
250 }
251 }
252 }
253
254 // variance-covariance
255 for (ivar=0; ivar<nvar; ivar++) {
256
257 if (isSignal) (*fVecMeanS)(ivar) = vec(ivar)/sumOfWeights;
258 else (*fVecMeanB)(ivar) = vec(ivar)/sumOfWeights;
259
260 for (jvar=0; jvar<nvar; jvar++) {
261 (*mat)(ivar, jvar) = mat2(ivar, jvar)/sumOfWeights - vec(ivar)*vec(jvar)/(sumOfWeights*sumOfWeights);
262 }
263 }
264
265 delete [] xval;
266}
267
268////////////////////////////////////////////////////////////////////////////////
269/// returns the H-matrix signal estimator
270
272{
273 Double_t s = GetChi2( Types::kSignal );
274 Double_t b = GetChi2( Types::kBackground );
275
276 if (s+b < 0) Log() << kFATAL << "big trouble: s+b: " << s+b << Endl;
277
278 // cannot determine error
279 NoErrorCalc(err, errUpper);
280
281 return (b - s)/(s + b);
282}
283
284////////////////////////////////////////////////////////////////////////////////
285/// compute chi2-estimator for event according to type (signal/background)
286
288{
289 // get original (not transformed) event
290
291 const Event* origEvt = fTmpEvent ? fTmpEvent:Data()->GetEvent();
292
293 // loop over variables
294 UInt_t ivar(0), jvar(0), nvar(GetNvar());
295 std::vector<Double_t> val( nvar );
296
297 // transform the event according to the given type (signal/background)
298 if (type==Types::kSignal)
299 GetTransformationHandler().SetTransformationReferenceClass( fSignalClass );
300 else
301 GetTransformationHandler().SetTransformationReferenceClass( fBackgroundClass );
302
303 const Event* ev = GetTransformationHandler().Transform( origEvt );
304
305 for (ivar=0; ivar<nvar; ivar++) val[ivar] = ev->GetValue( ivar );
306
307 Double_t chi2 = 0;
308 for (ivar=0; ivar<nvar; ivar++) {
309 for (jvar=0; jvar<nvar; jvar++) {
310 if (type == Types::kSignal)
311 chi2 += ( (val[ivar] - (*fVecMeanS)(ivar))*(val[jvar] - (*fVecMeanS)(jvar))
312 * (*fInvHMatrixS)(ivar,jvar) );
313 else
314 chi2 += ( (val[ivar] - (*fVecMeanB)(ivar))*(val[jvar] - (*fVecMeanB)(jvar))
315 * (*fInvHMatrixB)(ivar,jvar) );
316 }
317 }
318
319 // sanity check
320 if (chi2 < 0) Log() << kFATAL << "<GetChi2> negative chi2: " << chi2 << Endl;
321
322 return chi2;
323}
324
325////////////////////////////////////////////////////////////////////////////////
326/// create XML description for HMatrix classification
327
328void TMVA::MethodHMatrix::AddWeightsXMLTo( void* parent ) const
329{
330 void* wght = gTools().AddChild(parent, "Weights");
331 gTools().WriteTVectorDToXML( wght, "VecMeanS", fVecMeanS );
332 gTools().WriteTVectorDToXML( wght, "VecMeanB", fVecMeanB );
333 gTools().WriteTMatrixDToXML( wght, "InvHMatS", fInvHMatrixS );
334 gTools().WriteTMatrixDToXML( wght, "InvHMatB", fInvHMatrixB );
335}
336
337////////////////////////////////////////////////////////////////////////////////
338/// read weights from XML file
339
341{
342 void* descnode = gTools().GetChild(wghtnode);
343 gTools().ReadTVectorDFromXML( descnode, "VecMeanS", fVecMeanS );
344 descnode = gTools().GetNextChild(descnode);
345 gTools().ReadTVectorDFromXML( descnode, "VecMeanB", fVecMeanB );
346 descnode = gTools().GetNextChild(descnode);
347 gTools().ReadTMatrixDFromXML( descnode, "InvHMatS", fInvHMatrixS );
348 descnode = gTools().GetNextChild(descnode);
349 gTools().ReadTMatrixDFromXML( descnode, "InvHMatB", fInvHMatrixB );
350}
351
352////////////////////////////////////////////////////////////////////////////////
353/// read variable names and min/max
354/// NOTE: the latter values are mandatory for the normalisation
355/// in the reader application !!!
356
358{
359 UInt_t ivar,jvar;
360 TString var, dummy;
361 istr >> dummy;
362 //this->SetMethodName(dummy);
363
364 // mean vectors
365 for (ivar=0; ivar<GetNvar(); ivar++)
366 istr >> (*fVecMeanS)(ivar) >> (*fVecMeanB)(ivar);
367
368 // inverse covariance matrices (signal)
369 for (ivar=0; ivar<GetNvar(); ivar++)
370 for (jvar=0; jvar<GetNvar(); jvar++)
371 istr >> (*fInvHMatrixS)(ivar,jvar);
372
373 // inverse covariance matrices (background)
374 for (ivar=0; ivar<GetNvar(); ivar++)
375 for (jvar=0; jvar<GetNvar(); jvar++)
376 istr >> (*fInvHMatrixB)(ivar,jvar);
377}
378
379////////////////////////////////////////////////////////////////////////////////
380/// write Fisher-specific classifier response
381
382void TMVA::MethodHMatrix::MakeClassSpecific( std::ostream& fout, const TString& className ) const
383{
384 fout << " // arrays of input evt vs. variable " << std::endl;
385 fout << " double fInvHMatrixS[" << GetNvar() << "][" << GetNvar() << "]; // inverse H-matrix (signal)" << std::endl;
386 fout << " double fInvHMatrixB[" << GetNvar() << "][" << GetNvar() << "]; // inverse H-matrix (background)" << std::endl;
387 fout << " double fVecMeanS[" << GetNvar() << "]; // vector of mean values (signal)" << std::endl;
388 fout << " double fVecMeanB[" << GetNvar() << "]; // vector of mean values (background)" << std::endl;
389 fout << " " << std::endl;
390 fout << " double GetChi2( const std::vector<double>& inputValues, int type ) const;" << std::endl;
391 fout << "};" << std::endl;
392 fout << " " << std::endl;
393 fout << "void " << className << "::Initialize() " << std::endl;
394 fout << "{" << std::endl;
395 fout << " // init vectors with mean values" << std::endl;
396 for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
397 fout << " fVecMeanS[" << ivar << "] = " << (*fVecMeanS)(ivar) << ";" << std::endl;
398 fout << " fVecMeanB[" << ivar << "] = " << (*fVecMeanB)(ivar) << ";" << std::endl;
399 }
400 fout << " " << std::endl;
401 fout << " // init H-matrices" << std::endl;
402 for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
403 for (UInt_t jvar=0; jvar<GetNvar(); jvar++) {
404 fout << " fInvHMatrixS[" << ivar << "][" << jvar << "] = "
405 << (*fInvHMatrixS)(ivar,jvar) << ";" << std::endl;
406 fout << " fInvHMatrixB[" << ivar << "][" << jvar << "] = "
407 << (*fInvHMatrixB)(ivar,jvar) << ";" << std::endl;
408 }
409 }
410 fout << "}" << std::endl;
411 fout << " " << std::endl;
412 fout << "inline double " << className << "::GetMvaValue__( const std::vector<double>& inputValues ) const" << std::endl;
413 fout << "{" << std::endl;
414 fout << " // returns the H-matrix signal estimator" << std::endl;
415 fout << " std::vector<double> inputValuesSig = inputValues;" << std::endl;
416 fout << " std::vector<double> inputValuesBgd = inputValues;" << std::endl;
417 if (GetTransformationHandler().GetTransformationList().GetSize() != 0) {
418
419 UInt_t signalClass =DataInfo().GetClassInfo("Signal")->GetNumber();
420 UInt_t backgroundClass=DataInfo().GetClassInfo("Background")->GetNumber();
421
422 fout << " Transform(inputValuesSig," << signalClass << ");" << std::endl;
423 fout << " Transform(inputValuesBgd," << backgroundClass << ");" << std::endl;
424 }
425
426 // fout << " for(uint i=0; i<GetNvar(); ++i) std::cout << inputValuesSig.at(i) << \" \" << inputValuesBgd.at(i) << std::endl; " << std::endl;
427
428 fout << " double s = GetChi2( inputValuesSig, " << Types::kSignal << " );" << std::endl;
429 fout << " double b = GetChi2( inputValuesBgd, " << Types::kBackground << " );" << std::endl;
430
431 // fout << " std::cout << s << \" \" << b << std::endl; " << std::endl;
432
433 fout << " " << std::endl;
434 fout << " if (s+b <= 0) std::cout << \"Problem in class " << className << "::GetMvaValue__: s+b = \"" << std::endl;
435 fout << " << s+b << \" <= 0 \" << std::endl;" << std::endl;
436 fout << " " << std::endl;
437 fout << " return (b - s)/(s + b);" << std::endl;
438 fout << "}" << std::endl;
439 fout << " " << std::endl;
440 fout << "inline double " << className << "::GetChi2( const std::vector<double>& inputValues, int type ) const" << std::endl;
441 fout << "{" << std::endl;
442 fout << " // compute chi2-estimator for event according to type (signal/background)" << std::endl;
443 fout << " " << std::endl;
444 fout << " size_t ivar,jvar;" << std::endl;
445 fout << " double chi2 = 0;" << std::endl;
446 fout << " for (ivar=0; ivar<GetNvar(); ivar++) {" << std::endl;
447 fout << " for (jvar=0; jvar<GetNvar(); jvar++) {" << std::endl;
448 fout << " if (type == " << Types::kSignal << ") " << std::endl;
449 fout << " chi2 += ( (inputValues[ivar] - fVecMeanS[ivar])*(inputValues[jvar] - fVecMeanS[jvar])" << std::endl;
450 fout << " * fInvHMatrixS[ivar][jvar] );" << std::endl;
451 fout << " else" << std::endl;
452 fout << " chi2 += ( (inputValues[ivar] - fVecMeanB[ivar])*(inputValues[jvar] - fVecMeanB[jvar])" << std::endl;
453 fout << " * fInvHMatrixB[ivar][jvar] );" << std::endl;
454 fout << " }" << std::endl;
455 fout << " } // loop over variables " << std::endl;
456 fout << " " << std::endl;
457 fout << " // sanity check" << std::endl;
458 fout << " if (chi2 < 0) std::cout << \"Problem in class " << className << "::GetChi2: chi2 = \"" << std::endl;
459 fout << " << chi2 << \" < 0 \" << std::endl;" << std::endl;
460 fout << " " << std::endl;
461 fout << " return chi2;" << std::endl;
462 fout << "}" << std::endl;
463 fout << " " << std::endl;
464 fout << "// Clean up" << std::endl;
465 fout << "inline void " << className << "::Clear() " << std::endl;
466 fout << "{" << std::endl;
467 fout << " // nothing to clear" << std::endl;
468 fout << "}" << std::endl;
469}
470
471////////////////////////////////////////////////////////////////////////////////
472/// get help message text
473///
474/// typical length of text line:
475/// "|--------------------------------------------------------------|"
476
478{
479 Log() << Endl;
480 Log() << gTools().Color("bold") << "--- Short description:" << gTools().Color("reset") << Endl;
481 Log() << Endl;
482 Log() << "The H-Matrix classifier discriminates one class (signal) of a feature" << Endl;
483 Log() << "vector from another (background). The correlated elements of the" << Endl;
484 Log() << "vector are assumed to be Gaussian distributed, and the inverse of" << Endl;
485 Log() << "the covariance matrix is the H-Matrix. A multivariate chi-squared" << Endl;
486 Log() << "estimator is built that exploits differences in the mean values of" << Endl;
487 Log() << "the vector elements between the two classes for the purpose of" << Endl;
488 Log() << "discrimination." << Endl;
489 Log() << Endl;
490 Log() << gTools().Color("bold") << "--- Performance optimisation:" << gTools().Color("reset") << Endl;
491 Log() << Endl;
492 Log() << "The TMVA implementation of the H-Matrix classifier has been shown" << Endl;
493 Log() << "to underperform in comparison with the corresponding Fisher discriminant," << Endl;
494 Log() << "when using similar assumptions and complexity. Its use is therefore" << Endl;
495 Log() << "depreciated. Only in cases where the background model is strongly" << Endl;
496 Log() << "non-Gaussian, H-Matrix may perform better than Fisher. In such" << Endl;
497 Log() << "occurrences the user is advised to employ non-linear classifiers. " << Endl;
498 Log() << Endl;
499 Log() << gTools().Color("bold") << "--- Performance tuning via configuration options:" << gTools().Color("reset") << Endl;
500 Log() << Endl;
501 Log() << "None" << Endl;
502}
#define REGISTER_METHOD(CLASS)
for example
#define b(i)
Definition: RSha256.hxx:100
static RooMathCoreReg dummy
const Bool_t kFALSE
Definition: RtypesCore.h:90
double Double_t
Definition: RtypesCore.h:57
const Bool_t kTRUE
Definition: RtypesCore.h:89
#define ClassImp(name)
Definition: Rtypes.h:361
int type
Definition: TGX11.cxx:120
TMatrixT< Double_t > TMatrixD
Definition: TMatrixDfwd.h:22
TVectorT< Double_t > TVectorD
Definition: TVectorDfwd.h:22
Class that contains all the data information.
Definition: DataSetInfo.h:60
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
Definition: Event.cxx:236
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not.
Definition: Event.cxx:381
UInt_t GetClass() const
Definition: Event.h:86
Virtual base Class for all MVA method.
Definition: MethodBase.h:111
H-Matrix method, which is implemented as a simple comparison of chi-squared estimators for signal and...
Definition: MethodHMatrix.h:52
virtual ~MethodHMatrix()
destructor
MethodHMatrix(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="")
standard constructor for the H-Matrix method
void ComputeCovariance(Bool_t, TMatrixD *)
compute covariance matrix
void DeclareOptions()
MethodHMatrix options: none (apart from those implemented in MethodBase)
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
FDA can handle classification with 2 classes and regression with one regression-target.
Double_t GetChi2(Types::ESBType)
compute chi2-estimator for event according to type (signal/background)
void ProcessOptions()
process user options
void MakeClassSpecific(std::ostream &, const TString &) const
write Fisher-specific classifier response
void Init()
default initialization called by all constructors
void GetHelpMessage() const
get help message text
void ReadWeightsFromXML(void *wghtnode)
read weights from XML file
void Train()
computes H-matrices for signal and background samples
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
returns the H-matrix signal estimator
void ReadWeightsFromStream(std::istream &istr)
read variable names and min/max NOTE: the latter values are mandatory for the normalisation in the re...
void AddWeightsXMLTo(void *parent) const
create XML description for HMatrix classification
void * GetNextChild(void *prevchild, const char *childname=0)
XML helpers.
Definition: Tools.cxx:1173
void * AddChild(void *parent, const char *childname, const char *content=0, bool isRootNode=false)
add child node
Definition: Tools.cxx:1135
void ReadTVectorDFromXML(void *node, const char *name, TVectorD *vec)
Definition: Tools.cxx:1278
const TString & Color(const TString &)
human readable color strings
Definition: Tools.cxx:839
void * GetChild(void *parent, const char *childname=0)
get child node
Definition: Tools.cxx:1161
void WriteTVectorDToXML(void *node, const char *name, TVectorD *vec)
Definition: Tools.cxx:1270
void WriteTMatrixDToXML(void *node, const char *name, TMatrixD *mat)
XML helpers.
Definition: Tools.cxx:1254
void ReadTMatrixDFromXML(void *node, const char *name, TMatrixD *mat)
Definition: Tools.cxx:1287
Singleton class for Global types used by TMVA.
Definition: Types.h:73
@ kSignal
Definition: Types.h:136
@ kBackground
Definition: Types.h:137
EAnalysisType
Definition: Types.h:127
@ kClassification
Definition: Types.h:128
@ kTraining
Definition: Types.h:144
Basic string class.
Definition: TString.h:131
static constexpr double s
create variable transformations
Tools & gTools()
MsgLogger & Endl(MsgLogger &ml)
Definition: MsgLogger.h:158
constexpr Double_t E()
Base of natural log:
Definition: TMath.h:97
Double_t Log(Double_t x)
Definition: TMath.h:750
Short_t Abs(Short_t d)
Definition: TMathBase.h:120