Logo ROOT   6.14/05
Reference Guide
MethodTMlpANN.cxx
Go to the documentation of this file.
1 // @(#)root/tmva $Id$
2 // Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss, Eckhard von Toerne
3 /**********************************************************************************
4  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
5  * Package: TMVA *
6  * Class : MethodTMlpANN *
7  * Web : http://tmva.sourceforge.net *
8  * *
9  * Description: *
10  * Implementation (see header for description) *
11  * *
12  * Authors (alphabetical): *
13  * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
14  * Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
15  * Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
16  * *
17  * Copyright (c) 2005: *
18  * CERN, Switzerland *
19  * U. of Victoria, Canada *
20  * MPI-K Heidelberg, Germany *
21  * *
22  * Redistribution and use in source and binary forms, with or without *
23  * modification, are permitted according to the terms listed in LICENSE *
24  * (http://tmva.sourceforge.net/LICENSE) *
25  **********************************************************************************/
26 
27 /*! \class TMVA::MethodTMlpANN
28 \ingroup TMVA
29 
30 This is the TMVA TMultiLayerPerceptron interface class. It provides the
31 training and testing the ROOT internal MLP class in the TMVA framework.
32 
33 Available learning methods:<br>
34 
35  - Stochastic
36  - Batch
37  - SteepestDescent
38  - RibierePolak
39  - FletcherReeves
40  - BFGS
41 
42 See the TMultiLayerPerceptron class description
43 for details on this ANN.
44 */
45 
46 #include "TMVA/MethodTMlpANN.h"
47 
48 #include "TMVA/Config.h"
49 #include "TMVA/Configurable.h"
50 #include "TMVA/DataSet.h"
51 #include "TMVA/DataSetInfo.h"
52 #include "TMVA/IMethod.h"
53 #include "TMVA/MethodBase.h"
54 #include "TMVA/MsgLogger.h"
55 #include "TMVA/Types.h"
56 #include "TMVA/VariableInfo.h"
57 
58 #include "TMVA/ClassifierFactory.h"
59 #include "TMVA/Tools.h"
60 
61 #include "Riostream.h"
62 #include "TLeaf.h"
63 #include "TEventList.h"
64 #include "TObjString.h"
65 #include "TROOT.h"
66 #include "TMultiLayerPerceptron.h"
67 
68 #include <cstdlib>
69 #include <iostream>
70 #include <fstream>
71 
72 
73 using std::atoi;
74 
75 // some additional TMlpANN options
77 #if ROOT_VERSION_CODE > ROOT_VERSION(5,13,06)
78 //const TMultiLayerPerceptron::ELearningMethod LearningMethod__= TMultiLayerPerceptron::kStochastic;
79 // const TMultiLayerPerceptron::ELearningMethod LearningMethod__= TMultiLayerPerceptron::kBatch;
80 #else
81 //const TMultiLayerPerceptron::LearningMethod LearningMethod__= TMultiLayerPerceptron::kStochastic;
82 #endif
83 
84 REGISTER_METHOD(TMlpANN)
85 
87 
88 ////////////////////////////////////////////////////////////////////////////////
89 /// standard constructor
90 
92  const TString& methodTitle,
93  DataSetInfo& theData,
94  const TString& theOption) :
95  TMVA::MethodBase( jobName, Types::kTMlpANN, methodTitle, theData, theOption),
96  fMLP(0),
97  fLocalTrainingTree(0),
98  fNcycles(100),
99  fValidationFraction(0.5),
100  fLearningMethod( "" )
101 {
102 }
103 
104 ////////////////////////////////////////////////////////////////////////////////
105 /// constructor from weight file
106 
108  const TString& theWeightFile) :
109  TMVA::MethodBase( Types::kTMlpANN, theData, theWeightFile),
110  fMLP(0),
112  fNcycles(100),
113  fValidationFraction(0.5),
114  fLearningMethod( "" )
115 {
116 }
117 
118 ////////////////////////////////////////////////////////////////////////////////
119 /// TMlpANN can handle classification with 2 classes
120 
122  UInt_t /*numberTargets*/ )
123 {
124  if (type == Types::kClassification && numberClasses == 2) return kTRUE;
125  return kFALSE;
126 }
127 
128 
129 ////////////////////////////////////////////////////////////////////////////////
130 /// default initialisations
131 
133 {
134 }
135 
136 ////////////////////////////////////////////////////////////////////////////////
137 /// destructor
138 
140 {
141  if (fMLP) delete fMLP;
142 }
143 
144 ////////////////////////////////////////////////////////////////////////////////
145 /// translates options from option string into TMlpANN language
146 
148 {
149  fHiddenLayer = ":";
150 
151  while (layerSpec.Length()>0) {
152  TString sToAdd="";
153  if (layerSpec.First(',')<0) {
154  sToAdd = layerSpec;
155  layerSpec = "";
156  }
157  else {
158  sToAdd = layerSpec(0,layerSpec.First(','));
159  layerSpec = layerSpec(layerSpec.First(',')+1,layerSpec.Length());
160  }
161  int nNodes = 0;
162  if (sToAdd.BeginsWith("N")) { sToAdd.Remove(0,1); nNodes = GetNvar(); }
163  nNodes += atoi(sToAdd);
164  fHiddenLayer = Form( "%s%i:", (const char*)fHiddenLayer, nNodes );
165  }
166 
167  // set input vars
168  std::vector<TString>::iterator itrVar = (*fInputVars).begin();
169  std::vector<TString>::iterator itrVarEnd = (*fInputVars).end();
170  fMLPBuildOptions = "";
171  for (; itrVar != itrVarEnd; ++itrVar) {
173  TString myVar = *itrVar; ;
174  fMLPBuildOptions += myVar;
175  fMLPBuildOptions += ",";
176  }
177  fMLPBuildOptions.Chop(); // remove last ","
178 
179  // prepare final options for MLP kernel
181  fMLPBuildOptions += "type";
182 
183  Log() << kINFO << "Use " << fNcycles << " training cycles" << Endl;
184  Log() << kINFO << "Use configuration (nodes per hidden layer): " << fHiddenLayer << Endl;
185 }
186 
187 ////////////////////////////////////////////////////////////////////////////////
188 /// define the options (their key words) that can be set in the option string
189 ///
190 /// know options:
191 ///
192 /// - NCycles <integer> Number of training cycles (too many cycles could overtrain the network)
193 /// - HiddenLayers <string> Layout of the hidden layers (nodes per layer)
194 /// * specifications for each hidden layer are separated by comma
195 /// * for each layer the number of nodes can be either absolut (simply a number)
196 /// or relative to the number of input nodes to the neural net (N)
197 /// * there is always a single node in the output layer
198 ///
199 /// example: a net with 6 input nodes and "Hiddenlayers=N-1,N-2" has 6,5,4,1 nodes in the
200 /// layers 1,2,3,4, respectively
201 
203 {
204  DeclareOptionRef( fNcycles = 200, "NCycles", "Number of training cycles" );
205  DeclareOptionRef( fLayerSpec = "N,N-1", "HiddenLayers", "Specification of hidden layer architecture (N stands for number of variables; any integers may also be used)" );
206 
207  DeclareOptionRef( fValidationFraction = 0.5, "ValidationFraction",
208  "Fraction of events in training tree used for cross validation" );
209 
210  DeclareOptionRef( fLearningMethod = "Stochastic", "LearningMethod", "Learning method" );
211  AddPreDefVal( TString("Stochastic") );
212  AddPreDefVal( TString("Batch") );
213  AddPreDefVal( TString("SteepestDescent") );
214  AddPreDefVal( TString("RibierePolak") );
215  AddPreDefVal( TString("FletcherReeves") );
216  AddPreDefVal( TString("BFGS") );
217 }
218 
219 ////////////////////////////////////////////////////////////////////////////////
220 /// builds the neural network as specified by the user
221 
223 {
225 
227  Log() << kFATAL << "Mechanism to ignore events with negative weights in training not available for method"
228  << GetMethodTypeName()
229  << " --> please remove \"IgnoreNegWeightsInTraining\" option from booking string."
230  << Endl;
231  }
232 }
233 
234 ////////////////////////////////////////////////////////////////////////////////
235 /// calculate the value of the neural net for the current event
236 
238 {
239  const Event* ev = GetEvent();
240  TTHREAD_TLS_DECL_ARG(Double_t*, d, new Double_t[Data()->GetNVariables()]);
241 
242  for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
243  d[ivar] = (Double_t)ev->GetValue(ivar);
244  }
245  Double_t mvaVal = fMLP->Evaluate(0,d);
246 
247  // cannot determine error
248  NoErrorCalc(err, errUpper);
249 
250  return mvaVal;
251 }
252 
253 ////////////////////////////////////////////////////////////////////////////////
254 /// performs TMlpANN training
255 /// available learning methods:
256 ///
257 /// - TMultiLayerPerceptron::kStochastic
258 /// - TMultiLayerPerceptron::kBatch
259 /// - TMultiLayerPerceptron::kSteepestDescent
260 /// - TMultiLayerPerceptron::kRibierePolak
261 /// - TMultiLayerPerceptron::kFletcherReeves
262 /// - TMultiLayerPerceptron::kBFGS
263 ///
264 /// TMultiLayerPerceptron wants test and training tree at once
265 /// so merge the training and testing trees from the MVA factory first:
266 
268 {
269  Int_t type;
270  Float_t weight;
271  const Long_t basketsize = 128000;
272  Float_t* vArr = new Float_t[GetNvar()];
273 
274  TTree *localTrainingTree = new TTree( "TMLPtrain", "Local training tree for TMlpANN" );
275  localTrainingTree->Branch( "type", &type, "type/I", basketsize );
276  localTrainingTree->Branch( "weight", &weight, "weight/F", basketsize );
277 
278  for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
279  const char* myVar = GetInternalVarName(ivar).Data();
280  localTrainingTree->Branch( myVar, &vArr[ivar], Form("Var%02i/F", ivar), basketsize );
281  }
282 
283  for (UInt_t ievt=0; ievt<Data()->GetNEvents(); ievt++) {
284  const Event *ev = GetEvent(ievt);
285  for (UInt_t i=0; i<GetNvar(); i++) {
286  vArr[i] = ev->GetValue( i );
287  }
288  type = DataInfo().IsSignal( ev ) ? 1 : 0;
289  weight = ev->GetWeight();
290  localTrainingTree->Fill();
291  }
292 
293  // These are the event lists for the mlp train method
294  // first events in the tree are for training
295  // the rest for internal testing (cross validation)...
296  // NOTE: the training events are ordered: first part is signal, second part background
297  TString trainList = "Entry$<";
298  trainList += 1.0-fValidationFraction;
299  trainList += "*";
300  trainList += (Int_t)Data()->GetNEvtSigTrain();
301  trainList += " || (Entry$>";
302  trainList += (Int_t)Data()->GetNEvtSigTrain();
303  trainList += " && Entry$<";
304  trainList += (Int_t)(Data()->GetNEvtSigTrain() + (1.0 - fValidationFraction)*Data()->GetNEvtBkgdTrain());
305  trainList += ")";
306  TString testList = TString("!(") + trainList + ")";
307 
308  // print the requirements
309  Log() << kHEADER << "Requirement for training events: \"" << trainList << "\"" << Endl;
310  Log() << kINFO << "Requirement for validation events: \"" << testList << "\"" << Endl;
311 
312  // localTrainingTree->Print();
313 
314  // create NN
315  if (fMLP != 0) { delete fMLP; fMLP = 0; }
317  localTrainingTree,
318  trainList,
319  testList );
320  fMLP->SetEventWeight( "weight" );
321 
322  // set learning method
323 #if ROOT_VERSION_CODE > ROOT_VERSION(5,13,06)
325 #else
326  TMultiLayerPerceptron::LearningMethod learningMethod = TMultiLayerPerceptron::kStochastic;
327 #endif
328 
330  if (fLearningMethod == "stochastic" ) learningMethod = TMultiLayerPerceptron::kStochastic;
331  else if (fLearningMethod == "batch" ) learningMethod = TMultiLayerPerceptron::kBatch;
332  else if (fLearningMethod == "steepestdescent" ) learningMethod = TMultiLayerPerceptron::kSteepestDescent;
333  else if (fLearningMethod == "ribierepolak" ) learningMethod = TMultiLayerPerceptron::kRibierePolak;
334  else if (fLearningMethod == "fletcherreeves" ) learningMethod = TMultiLayerPerceptron::kFletcherReeves;
335  else if (fLearningMethod == "bfgs" ) learningMethod = TMultiLayerPerceptron::kBFGS;
336  else {
337  Log() << kFATAL << "Unknown Learning Method: \"" << fLearningMethod << "\"" << Endl;
338  }
339  fMLP->SetLearningMethod( learningMethod );
340 
341  // train NN
342  fMLP->Train(fNcycles, "" ); //"text,update=50" );
343 
344  // write weights to File;
345  // this is not nice, but fMLP gets deleted at the end of Train()
346  delete localTrainingTree;
347  delete [] vArr;
348 }
349 
350 ////////////////////////////////////////////////////////////////////////////////
351 /// write weights to xml file
352 
353 void TMVA::MethodTMlpANN::AddWeightsXMLTo( void* parent ) const
354 {
355  // first the architecture
356  void *wght = gTools().AddChild(parent, "Weights");
357  void* arch = gTools().AddChild( wght, "Architecture" );
358  gTools().AddAttr( arch, "BuildOptions", fMLPBuildOptions.Data() );
359 
360  // dump weights first in temporary txt file, read from there into xml
361  const TString tmpfile=GetWeightFileDir()+"/TMlp.nn.weights.temp";
362  fMLP->DumpWeights( tmpfile.Data() );
363  std::ifstream inf( tmpfile.Data() );
364  char temp[256];
365  TString data("");
366  void *ch=NULL;
367  while (inf.getline(temp,256)) {
368  TString dummy(temp);
369  //std::cout << dummy << std::endl; // remove annoying debug printout with std::cout
370  if (dummy.BeginsWith('#')) {
371  if (ch!=0) gTools().AddRawLine( ch, data.Data() );
372  dummy = dummy.Strip(TString::kLeading, '#');
373  dummy = dummy(0,dummy.First(' '));
374  ch = gTools().AddChild(wght, dummy);
375  data.Resize(0);
376  continue;
377  }
378  data += (dummy + " ");
379  }
380  if (ch != 0) gTools().AddRawLine( ch, data.Data() );
381 
382  inf.close();
383 }
384 
385 ////////////////////////////////////////////////////////////////////////////////
386 /// rebuild temporary textfile from xml weightfile and load this
387 /// file into MLP
388 
390 {
391  void* ch = gTools().GetChild(wghtnode);
392  gTools().ReadAttr( ch, "BuildOptions", fMLPBuildOptions );
393 
394  ch = gTools().GetNextChild(ch);
395  const TString fname = GetWeightFileDir()+"/TMlp.nn.weights.temp";
396  std::ofstream fout( fname.Data() );
397  double temp1=0,temp2=0;
398  while (ch) {
399  const char* nodecontent = gTools().GetContent(ch);
400  std::stringstream content(nodecontent);
401  if (strcmp(gTools().GetName(ch),"input")==0) {
402  fout << "#input normalization" << std::endl;
403  while ((content >> temp1) &&(content >> temp2)) {
404  fout << temp1 << " " << temp2 << std::endl;
405  }
406  }
407  if (strcmp(gTools().GetName(ch),"output")==0) {
408  fout << "#output normalization" << std::endl;
409  while ((content >> temp1) &&(content >> temp2)) {
410  fout << temp1 << " " << temp2 << std::endl;
411  }
412  }
413  if (strcmp(gTools().GetName(ch),"neurons")==0) {
414  fout << "#neurons weights" << std::endl;
415  while (content >> temp1) {
416  fout << temp1 << std::endl;
417  }
418  }
419  if (strcmp(gTools().GetName(ch),"synapses")==0) {
420  fout << "#synapses weights" ;
421  while (content >> temp1) {
422  fout << std::endl << temp1 ;
423  }
424  }
425  ch = gTools().GetNextChild(ch);
426  }
427  fout.close();;
428 
429  // Here we create a dummy tree necessary to create a minimal NN
430  // to be used for testing, evaluation and application
431  TTHREAD_TLS_DECL_ARG(Double_t*, d, new Double_t[Data()->GetNVariables()]);
432  TTHREAD_TLS(Int_t) type;
433 
434  gROOT->cd();
435  TTree * dummyTree = new TTree("dummy","Empty dummy tree", 1);
436  for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
438  dummyTree->Branch(Form("%s",vn.Data()), d+ivar, Form("%s/D",vn.Data()));
439  }
440  dummyTree->Branch("type", &type, "type/I");
441 
442  if (fMLP != 0) { delete fMLP; fMLP = 0; }
443  fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(), dummyTree );
444  fMLP->LoadWeights( fname );
445 }
446 
447 ////////////////////////////////////////////////////////////////////////////////
448 /// read weights from stream
449 /// since the MLP can not read from the stream, we
450 /// 1st: write the weights to temporary file
451 
453 {
454  std::ofstream fout( "./TMlp.nn.weights.temp" );
455  fout << istr.rdbuf();
456  fout.close();
457  // 2nd: load the weights from the temporary file into the MLP
458  // the MLP is already build
459  Log() << kINFO << "Load TMLP weights into " << fMLP << Endl;
460 
461  Double_t* d = new Double_t[Data()->GetNVariables()] ;
462  Int_t type;
463  gROOT->cd();
464  TTree * dummyTree = new TTree("dummy","Empty dummy tree", 1);
465  for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
466  TString vn = DataInfo().GetVariableInfo(ivar).GetLabel();
467  dummyTree->Branch(Form("%s",vn.Data()), d+ivar, Form("%s/D",vn.Data()));
468  }
469  dummyTree->Branch("type", &type, "type/I");
470 
471  if (fMLP != 0) { delete fMLP; fMLP = 0; }
472  fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(), dummyTree );
473 
474  fMLP->LoadWeights( "./TMlp.nn.weights.temp" );
475  // here we can delete the temporary file
476  // how?
477  delete [] d;
478 }
479 
480 ////////////////////////////////////////////////////////////////////////////////
481 /// create reader class for classifier -> overwrites base class function
482 /// create specific class for TMultiLayerPerceptron
483 
484 void TMVA::MethodTMlpANN::MakeClass( const TString& theClassFileName ) const
485 {
486  // the default consists of
487  TString classFileName = "";
488  if (theClassFileName == "")
489  classFileName = GetWeightFileDir() + "/" + GetJobName() + "_" + GetMethodName() + ".class";
490  else
491  classFileName = theClassFileName;
492 
493  classFileName.ReplaceAll(".class","");
494  Log() << kINFO << "Creating specific (TMultiLayerPerceptron) standalone response class: " << classFileName << Endl;
495  fMLP->Export( classFileName.Data() );
496 }
497 
498 ////////////////////////////////////////////////////////////////////////////////
499 /// write specific classifier response
500 /// nothing to do here - all taken care of by TMultiLayerPerceptron
501 
502 void TMVA::MethodTMlpANN::MakeClassSpecific( std::ostream& /*fout*/, const TString& /*className*/ ) const
503 {
504 }
505 
506 ////////////////////////////////////////////////////////////////////////////////
507 /// get help message text
508 ///
509 /// typical length of text line:
510 /// "|--------------------------------------------------------------|"
511 
513 {
514  Log() << Endl;
515  Log() << gTools().Color("bold") << "--- Short description:" << gTools().Color("reset") << Endl;
516  Log() << Endl;
517  Log() << "This feed-forward multilayer perceptron neural network is the " << Endl;
518  Log() << "standard implementation distributed with ROOT (class TMultiLayerPerceptron)." << Endl;
519  Log() << Endl;
520  Log() << "Detailed information is available here:" << Endl;
521  if (gConfig().WriteOptionsReference()) {
522  Log() << "<a href=\"http://root.cern.ch/root/html/TMultiLayerPerceptron.html\">";
523  Log() << "http://root.cern.ch/root/html/TMultiLayerPerceptron.html</a>" << Endl;
524  }
525  else Log() << "http://root.cern.ch/root/html/TMultiLayerPerceptron.html" << Endl;
526  Log() << Endl;
527 }
void Train(void)
performs TMlpANN training available learning methods:
MsgLogger & Endl(MsgLogger &ml)
Definition: MsgLogger.h:158
Singleton class for Global types used by TMVA.
Definition: Types.h:73
const TString & GetInternalName() const
Definition: VariableInfo.h:58
float Float_t
Definition: RtypesCore.h:53
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
calculate the value of the neural net for the current event
TString & ReplaceAll(const TString &s1, const TString &s2)
Definition: TString.h:687
UInt_t GetNvar() const
Definition: MethodBase.h:335
virtual Int_t Fill()
Fill all branches.
Definition: TTree.cxx:4374
Config & gConfig()
MsgLogger & Log() const
Definition: Configurable.h:122
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
void Export(Option_t *filename="NNfunction", Option_t *language="C++") const
Exports the NN as a function for any non-ROOT-dependant code Supported languages are: only C++ ...
EAnalysisType
Definition: Types.h:127
Virtual base Class for all MVA method.
Definition: MethodBase.h:109
UInt_t GetNVariables() const
access the number of variables through the datasetinfo
Definition: DataSet.cxx:216
Double_t Evaluate(Int_t index, Double_t *params) const
Returns the Neural Net for a given set of input parameters #parameters must equal #input neurons...
#define gROOT
Definition: TROOT.h:410
Double_t fValidationFraction
Basic string class.
Definition: TString.h:131
void GetHelpMessage() const
get help message text
const TString & GetInternalVarName(Int_t ivar) const
Definition: MethodBase.h:499
void ToLower()
Change string to lower-case.
Definition: TString.cxx:1100
int Int_t
Definition: RtypesCore.h:41
bool Bool_t
Definition: RtypesCore.h:59
const TString & GetLabel() const
Definition: VariableInfo.h:59
void AddAttr(void *node, const char *, const T &value, Int_t precision=16)
add attribute to xml
Definition: Tools.h:353
void * AddChild(void *parent, const char *childname, const char *content=0, bool isRootNode=false)
add child node
Definition: Tools.cxx:1136
const TString & GetWeightFileDir() const
Definition: MethodBase.h:481
Long64_t GetNEvtBkgdTrain()
return number of background training events in dataset
Definition: DataSet.cxx:451
MethodTMlpANN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="3000:N-1:N-2")
standard constructor
void Init(void)
default initialisations
const Event * GetEvent() const
Definition: MethodBase.h:740
DataSet * Data() const
Definition: MethodBase.h:400
void * GetChild(void *parent, const char *childname=0)
get child node
Definition: Tools.cxx:1162
void ReadWeightsFromXML(void *wghtnode)
rebuild temporary textfile from xml weightfile and load this file into MLP
DataSetInfo & DataInfo() const
Definition: MethodBase.h:401
Bool_t DumpWeights(Option_t *filename="-") const
Dumps the weights to a text file.
virtual ~MethodTMlpANN(void)
destructor
Ssiz_t First(char c) const
Find first occurrence of a character c.
Definition: TString.cxx:487
Class that contains all the data information.
Definition: DataSetInfo.h:60
const Bool_t EnforceNormalization__
void ProcessOptions()
builds the neural network as specified by the user
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not...
Definition: Event.cxx:382
Bool_t AddRawLine(void *node, const char *raw)
XML helpers.
Definition: Tools.cxx:1202
void Train(Int_t nEpoch, Option_t *option="text", Double_t minE=0)
Train the network.
const char * GetName() const
Definition: MethodBase.h:325
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
Definition: TString.h:610
unsigned int UInt_t
Definition: RtypesCore.h:42
char * Form(const char *fmt,...)
Ssiz_t Length() const
Definition: TString.h:405
TSubString Strip(EStripType s=kTrailing, char c=' ') const
Return a substring of self stripped at beginning and/or end.
Definition: TString.cxx:1081
const TString & GetJobName() const
Definition: MethodBase.h:321
const TString & GetMethodName() const
Definition: MethodBase.h:322
const char * GetContent(void *node)
XML helpers.
Definition: Tools.cxx:1186
void ReadAttr(void *node, const char *, T &value)
read attribute from xml
Definition: Tools.h:335
Tools & gTools()
Bool_t LoadWeights(Option_t *filename="")
Loads the weights from a text file conforming to the format defined by DumpWeights.
TMultiLayerPerceptron * fMLP
UInt_t GetNVariables() const
Definition: MethodBase.h:336
const Bool_t kFALSE
Definition: RtypesCore.h:88
Float_t GetValue(UInt_t ivar) const
return value of i&#39;th variable
Definition: Event.cxx:237
TString & Remove(Ssiz_t pos)
Definition: TString.h:668
long Long_t
Definition: RtypesCore.h:50
#define d(i)
Definition: RSha256.hxx:102
Bool_t IgnoreEventsWithNegWeightsInTraining() const
Definition: MethodBase.h:675
#define ClassImp(name)
Definition: Rtypes.h:359
double Double_t
Definition: RtypesCore.h:55
Long64_t GetNEvtSigTrain()
return number of signal training events in dataset
Definition: DataSet.cxx:443
int type
Definition: TGX11.cxx:120
void ReadWeightsFromStream(std::istream &istr)
read weights from stream since the MLP can not read from the stream, we 1st: write the weights to tem...
static RooMathCoreReg dummy
void * GetNextChild(void *prevchild, const char *childname=0)
XML helpers.
Definition: Tools.cxx:1174
VariableInfo & GetVariableInfo(Int_t i)
Definition: DataSetInfo.h:96
void AddPreDefVal(const T &)
Definition: Configurable.h:168
const TString & Color(const TString &)
human readable color strings
Definition: Tools.cxx:840
virtual Int_t Branch(TCollection *list, Int_t bufsize=32000, Int_t splitlevel=99, const char *name="")
Create one branch for each element in the collection.
Definition: TTree.cxx:1711
void MakeClass(const TString &classFileName=TString("")) const
create reader class for classifier -> overwrites base class function create specific class for TMulti...
#define REGISTER_METHOD(CLASS)
for example
Abstract ClassifierFactory template that handles arbitrary types.
void AddWeightsXMLTo(void *parent) const
write weights to xml file
TString GetMethodTypeName() const
Definition: MethodBase.h:323
This is the TMVA TMultiLayerPerceptron interface class.
Definition: MethodTMlpANN.h:49
Long64_t GetNEvents(Types::ETreeType type=Types::kMaxTreeType) const
Definition: DataSet.h:217
void CreateMLPOptions(TString)
translates options from option string into TMlpANN language
Bool_t IsSignal(const Event *ev) const
A TTree object has a header with a name and a title.
Definition: TTree.h:70
void SetEventWeight(const char *)
Set the event weight.
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response nothing to do here - all taken care of by TMultiLayerPerceptron ...
void SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method)
Sets the learning method.
const Bool_t kTRUE
Definition: RtypesCore.h:87
void DeclareOptions()
define the options (their key words) that can be set in the option string
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
Definition: MethodBase.cxx:841
void Resize(Ssiz_t n)
Resize the string. Truncate or add blanks as necessary.
Definition: TString.cxx:1070
TString & Chop()
Definition: TString.h:674
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
TMlpANN can handle classification with 2 classes.
const char * Data() const
Definition: TString.h:364