Logo ROOT   6.08/07
Reference Guide
MethodTMlpANN.cxx
Go to the documentation of this file.
1 // @(#)root/tmva $Id$
2 // Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss, Eckhard von Toerne
3 /**********************************************************************************
4  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
5  * Package: TMVA *
6  * Class : MethodTMlpANN *
7  * Web : http://tmva.sourceforge.net *
8  * *
9  * Description: *
10  * Implementation (see header for description) *
11  * *
12  * Authors (alphabetical): *
13  * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
14  * Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
15  * Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
16  * *
17  * Copyright (c) 2005: *
18  * CERN, Switzerland *
19  * U. of Victoria, Canada *
20  * MPI-K Heidelberg, Germany *
21  * *
22  * Redistribution and use in source and binary forms, with or without *
23  * modification, are permitted according to the terms listed in LICENSE *
24  * (http://tmva.sourceforge.net/LICENSE) *
25  **********************************************************************************/
26 
27 ////////////////////////////////////////////////////////////////////////////////
28 
29 /* Begin_Html
30 
31  This is the TMVA TMultiLayerPerceptron interface class. It provides the
32  training and testing the ROOT internal MLP class in the TMVA framework.<be>
33 
34  Available learning methods:<br>
35  <ul>
36  <li>Stochastic </li>
37  <li>Batch </li>
38  <li>SteepestDescent </li>
39  <li>RibierePolak </li>
40  <li>FletcherReeves </li>
41  <li>BFGS </li>
42  </ul>
43  End_Html */
44 //
45 // See the TMultiLayerPerceptron class description
46 // for details on this ANN.
47 //
48 //_______________________________________________________________________
49 
50 #include "TMVA/MethodTMlpANN.h"
51 
52 #include "TMVA/Config.h"
53 #include "TMVA/Configurable.h"
54 #include "TMVA/DataSet.h"
55 #include "TMVA/DataSetInfo.h"
56 #include "TMVA/IMethod.h"
57 #include "TMVA/MethodBase.h"
58 #include "TMVA/MsgLogger.h"
59 #include "TMVA/Types.h"
60 #include "TMVA/VariableInfo.h"
61 
62 #include "TMVA/ClassifierFactory.h"
63 #ifndef ROOT_TMVA_Tools
64 #include "TMVA/Tools.h"
65 #endif
66 
67 #include "Riostream.h"
68 #include "TLeaf.h"
69 #include "TEventList.h"
70 #include "TObjString.h"
71 #include "TROOT.h"
72 #include "TMultiLayerPerceptron.h"
73 
74 #include <cstdlib>
75 #include <iostream>
76 #include <fstream>
77 
78 
79 using std::atoi;
80 
81 // some additional TMlpANN options
83 #if ROOT_VERSION_CODE > ROOT_VERSION(5,13,06)
84 //const TMultiLayerPerceptron::ELearningMethod LearningMethod__= TMultiLayerPerceptron::kStochastic;
85 // const TMultiLayerPerceptron::ELearningMethod LearningMethod__= TMultiLayerPerceptron::kBatch;
86 #else
87 //const TMultiLayerPerceptron::LearningMethod LearningMethod__= TMultiLayerPerceptron::kStochastic;
88 #endif
89 
90 REGISTER_METHOD(TMlpANN)
91 
93 
94 ////////////////////////////////////////////////////////////////////////////////
95 /// standard constructor
96 
98  const TString& methodTitle,
99  DataSetInfo& theData,
100  const TString& theOption) :
101  TMVA::MethodBase( jobName, Types::kTMlpANN, methodTitle, theData, theOption),
102  fMLP(0),
103  fLocalTrainingTree(0),
104  fNcycles(100),
105  fValidationFraction(0.5),
106  fLearningMethod( "" )
107 {
108 }
109 
110 ////////////////////////////////////////////////////////////////////////////////
111 /// constructor from weight file
112 
114  const TString& theWeightFile) :
115  TMVA::MethodBase( Types::kTMlpANN, theData, theWeightFile),
116  fMLP(0),
117  fLocalTrainingTree(0),
118  fNcycles(100),
119  fValidationFraction(0.5),
120  fLearningMethod( "" )
121 {
122 }
123 
124 ////////////////////////////////////////////////////////////////////////////////
125 /// TMlpANN can handle classification with 2 classes
126 
128  UInt_t /*numberTargets*/ )
129 {
130  if (type == Types::kClassification && numberClasses == 2) return kTRUE;
131  return kFALSE;
132 }
133 
134 
135 ////////////////////////////////////////////////////////////////////////////////
136 /// default initialisations
137 
139 {
140 }
141 
142 ////////////////////////////////////////////////////////////////////////////////
143 /// destructor
144 
146 {
147  if (fMLP) delete fMLP;
148 }
149 
150 ////////////////////////////////////////////////////////////////////////////////
151 /// translates options from option string into TMlpANN language
152 
154 {
155  fHiddenLayer = ":";
156 
157  while (layerSpec.Length()>0) {
158  TString sToAdd="";
159  if (layerSpec.First(',')<0) {
160  sToAdd = layerSpec;
161  layerSpec = "";
162  }
163  else {
164  sToAdd = layerSpec(0,layerSpec.First(','));
165  layerSpec = layerSpec(layerSpec.First(',')+1,layerSpec.Length());
166  }
167  int nNodes = 0;
168  if (sToAdd.BeginsWith("N")) { sToAdd.Remove(0,1); nNodes = GetNvar(); }
169  nNodes += atoi(sToAdd);
170  fHiddenLayer = Form( "%s%i:", (const char*)fHiddenLayer, nNodes );
171  }
172 
173  // set input vars
174  std::vector<TString>::iterator itrVar = (*fInputVars).begin();
175  std::vector<TString>::iterator itrVarEnd = (*fInputVars).end();
176  fMLPBuildOptions = "";
177  for (; itrVar != itrVarEnd; itrVar++) {
179  TString myVar = *itrVar; ;
180  fMLPBuildOptions += myVar;
181  fMLPBuildOptions += ",";
182  }
183  fMLPBuildOptions.Chop(); // remove last ","
184 
185  // prepare final options for MLP kernel
187  fMLPBuildOptions += "type";
188 
189  Log() << kINFO << "Use " << fNcycles << " training cycles" << Endl;
190  Log() << kINFO << "Use configuration (nodes per hidden layer): " << fHiddenLayer << Endl;
191 }
192 
193 ////////////////////////////////////////////////////////////////////////////////
194 /// define the options (their key words) that can be set in the option string
195 /// know options:
196 /// NCycles <integer> Number of training cycles (too many cycles could overtrain the network)
197 /// HiddenLayers <string> Layout of the hidden layers (nodes per layer)
198 /// * specifiactions for each hidden layer are separated by commata
199 /// * for each layer the number of nodes can be either absolut (simply a number)
200 /// or relative to the number of input nodes to the neural net (N)
201 /// * there is always a single node in the output layer
202 /// example: a net with 6 input nodes and "Hiddenlayers=N-1,N-2" has 6,5,4,1 nodes in the
203 /// layers 1,2,3,4, repectively
204 
206 {
207  DeclareOptionRef( fNcycles = 200, "NCycles", "Number of training cycles" );
208  DeclareOptionRef( fLayerSpec = "N,N-1", "HiddenLayers", "Specification of hidden layer architecture (N stands for number of variables; any integers may also be used)" );
209 
210  DeclareOptionRef( fValidationFraction = 0.5, "ValidationFraction",
211  "Fraction of events in training tree used for cross validation" );
212 
213  DeclareOptionRef( fLearningMethod = "Stochastic", "LearningMethod", "Learning method" );
214  AddPreDefVal( TString("Stochastic") );
215  AddPreDefVal( TString("Batch") );
216  AddPreDefVal( TString("SteepestDescent") );
217  AddPreDefVal( TString("RibierePolak") );
218  AddPreDefVal( TString("FletcherReeves") );
219  AddPreDefVal( TString("BFGS") );
220 }
221 
222 ////////////////////////////////////////////////////////////////////////////////
223 /// builds the neural network as specified by the user
224 
226 {
228 
230  Log() << kFATAL << "Mechanism to ignore events with negative weights in training not available for method"
231  << GetMethodTypeName()
232  << " --> please remove \"IgnoreNegWeightsInTraining\" option from booking string."
233  << Endl;
234  }
235 }
236 
237 ////////////////////////////////////////////////////////////////////////////////
238 /// calculate the value of the neural net for the current event
239 
241 {
242  const Event* ev = GetEvent();
243  TTHREAD_TLS_DECL_ARG(Double_t*, d, new Double_t[Data()->GetNVariables()]);
244 
245  for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
246  d[ivar] = (Double_t)ev->GetValue(ivar);
247  }
248  Double_t mvaVal = fMLP->Evaluate(0,d);
249 
250  // cannot determine error
251  NoErrorCalc(err, errUpper);
252 
253  return mvaVal;
254 }
255 
256 ////////////////////////////////////////////////////////////////////////////////
257 /// performs TMlpANN training
258 /// available learning methods:
259 ///
260 /// TMultiLayerPerceptron::kStochastic
261 /// TMultiLayerPerceptron::kBatch
262 /// TMultiLayerPerceptron::kSteepestDescent
263 /// TMultiLayerPerceptron::kRibierePolak
264 /// TMultiLayerPerceptron::kFletcherReeves
265 /// TMultiLayerPerceptron::kBFGS
266 ///
267 /// TMultiLayerPerceptron wants test and training tree at once
268 /// so merge the training and testing trees from the MVA factory first:
269 
271 {
272  Int_t type;
273  Float_t weight;
274  const Long_t basketsize = 128000;
275  Float_t* vArr = new Float_t[GetNvar()];
276 
277  TTree *localTrainingTree = new TTree( "TMLPtrain", "Local training tree for TMlpANN" );
278  localTrainingTree->Branch( "type", &type, "type/I", basketsize );
279  localTrainingTree->Branch( "weight", &weight, "weight/F", basketsize );
280 
281  for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
282  const char* myVar = GetInternalVarName(ivar).Data();
283  localTrainingTree->Branch( myVar, &vArr[ivar], Form("Var%02i/F", ivar), basketsize );
284  }
285 
286  for (UInt_t ievt=0; ievt<Data()->GetNEvents(); ievt++) {
287  const Event *ev = GetEvent(ievt);
288  for (UInt_t i=0; i<GetNvar(); i++) {
289  vArr[i] = ev->GetValue( i );
290  }
291  type = DataInfo().IsSignal( ev ) ? 1 : 0;
292  weight = ev->GetWeight();
293  localTrainingTree->Fill();
294  }
295 
296  // These are the event lists for the mlp train method
297  // first events in the tree are for training
298  // the rest for internal testing (cross validation)...
299  // NOTE: the training events are ordered: first part is signal, second part background
300  TString trainList = "Entry$<";
301  trainList += 1.0-fValidationFraction;
302  trainList += "*";
303  trainList += (Int_t)Data()->GetNEvtSigTrain();
304  trainList += " || (Entry$>";
305  trainList += (Int_t)Data()->GetNEvtSigTrain();
306  trainList += " && Entry$<";
307  trainList += (Int_t)(Data()->GetNEvtSigTrain() + (1.0 - fValidationFraction)*Data()->GetNEvtBkgdTrain());
308  trainList += ")";
309  TString testList = TString("!(") + trainList + ")";
310 
311  // print the requirements
312  Log() << kHEADER << "Requirement for training events: \"" << trainList << "\"" << Endl;
313  Log() << kINFO << "Requirement for validation events: \"" << testList << "\"" << Endl;
314 
315  // localTrainingTree->Print();
316 
317  // create NN
318  if (fMLP != 0) { delete fMLP; fMLP = 0; }
320  localTrainingTree,
321  trainList,
322  testList );
323  fMLP->SetEventWeight( "weight" );
324 
325  // set learning method
326 #if ROOT_VERSION_CODE > ROOT_VERSION(5,13,06)
328 #else
329  TMultiLayerPerceptron::LearningMethod learningMethod = TMultiLayerPerceptron::kStochastic;
330 #endif
331 
333  if (fLearningMethod == "stochastic" ) learningMethod = TMultiLayerPerceptron::kStochastic;
334  else if (fLearningMethod == "batch" ) learningMethod = TMultiLayerPerceptron::kBatch;
335  else if (fLearningMethod == "steepestdescent" ) learningMethod = TMultiLayerPerceptron::kSteepestDescent;
336  else if (fLearningMethod == "ribierepolak" ) learningMethod = TMultiLayerPerceptron::kRibierePolak;
337  else if (fLearningMethod == "fletcherreeves" ) learningMethod = TMultiLayerPerceptron::kFletcherReeves;
338  else if (fLearningMethod == "bfgs" ) learningMethod = TMultiLayerPerceptron::kBFGS;
339  else {
340  Log() << kFATAL << "Unknown Learning Method: \"" << fLearningMethod << "\"" << Endl;
341  }
342  fMLP->SetLearningMethod( learningMethod );
343 
344  // train NN
345  fMLP->Train(fNcycles, "" ); //"text,update=50" );
346 
347  // write weights to File;
348  // this is not nice, but fMLP gets deleted at the end of Train()
349  delete localTrainingTree;
350  delete [] vArr;
351 }
352 
353 
354 ////////////////////////////////////////////////////////////////////////////////
355 /// write weights to xml file
356 
357 void TMVA::MethodTMlpANN::AddWeightsXMLTo( void* parent ) const
358 {
359  // first the architecture
360  void *wght = gTools().AddChild(parent, "Weights");
361  void* arch = gTools().AddChild( wght, "Architecture" );
362  gTools().AddAttr( arch, "BuildOptions", fMLPBuildOptions.Data() );
363 
364  // dump weights first in temporary txt file, read from there into xml
365  const TString tmpfile=GetWeightFileDir()+"/TMlp.nn.weights.temp";
366  fMLP->DumpWeights( tmpfile.Data() );
367  std::ifstream inf( tmpfile.Data() );
368  char temp[256];
369  TString data("");
370  void *ch=NULL;
371  while (inf.getline(temp,256)) {
372  TString dummy(temp);
373  //std::cout << dummy << std::endl; // remove annoying debug printout with std::cout
374  if (dummy.BeginsWith('#')) {
375  if (ch!=0) gTools().AddRawLine( ch, data.Data() );
376  dummy = dummy.Strip(TString::kLeading, '#');
377  dummy = dummy(0,dummy.First(' '));
378  ch = gTools().AddChild(wght, dummy);
379  data.Resize(0);
380  continue;
381  }
382  data += (dummy + " ");
383  }
384  if (ch != 0) gTools().AddRawLine( ch, data.Data() );
385 
386  inf.close();
387 }
388 
389 ////////////////////////////////////////////////////////////////////////////////
390 /// rebuild temporary textfile from xml weightfile and load this
391 /// file into MLP
392 
394 {
395  void* ch = gTools().GetChild(wghtnode);
396  gTools().ReadAttr( ch, "BuildOptions", fMLPBuildOptions );
397 
398  ch = gTools().GetNextChild(ch);
399  const TString fname = GetWeightFileDir()+"/TMlp.nn.weights.temp";
400  std::ofstream fout( fname.Data() );
401  double temp1=0,temp2=0;
402  while (ch) {
403  const char* nodecontent = gTools().GetContent(ch);
404  std::stringstream content(nodecontent);
405  if (strcmp(gTools().GetName(ch),"input")==0) {
406  fout << "#input normalization" << std::endl;
407  while ((content >> temp1) &&(content >> temp2)) {
408  fout << temp1 << " " << temp2 << std::endl;
409  }
410  }
411  if (strcmp(gTools().GetName(ch),"output")==0) {
412  fout << "#output normalization" << std::endl;
413  while ((content >> temp1) &&(content >> temp2)) {
414  fout << temp1 << " " << temp2 << std::endl;
415  }
416  }
417  if (strcmp(gTools().GetName(ch),"neurons")==0) {
418  fout << "#neurons weights" << std::endl;
419  while (content >> temp1) {
420  fout << temp1 << std::endl;
421  }
422  }
423  if (strcmp(gTools().GetName(ch),"synapses")==0) {
424  fout << "#synapses weights" ;
425  while (content >> temp1) {
426  fout << std::endl << temp1 ;
427  }
428  }
429  ch = gTools().GetNextChild(ch);
430  }
431  fout.close();;
432 
433  // Here we create a dummy tree necessary to create a minimal NN
434  // to be used for testing, evaluation and application
435  TTHREAD_TLS_DECL_ARG(Double_t*, d, new Double_t[Data()->GetNVariables()]);
436  TTHREAD_TLS(Int_t) type;
437 
438  gROOT->cd();
439  TTree * dummyTree = new TTree("dummy","Empty dummy tree", 1);
440  for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
442  dummyTree->Branch(Form("%s",vn.Data()), d+ivar, Form("%s/D",vn.Data()));
443  }
444  dummyTree->Branch("type", &type, "type/I");
445 
446  if (fMLP != 0) { delete fMLP; fMLP = 0; }
447  fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(), dummyTree );
448  fMLP->LoadWeights( fname );
449 }
450 
451 ////////////////////////////////////////////////////////////////////////////////
452 /// read weights from stream
453 /// since the MLP can not read from the stream, we
454 /// 1st: write the weights to temporary file
455 
457 {
458  std::ofstream fout( "./TMlp.nn.weights.temp" );
459  fout << istr.rdbuf();
460  fout.close();
461  // 2nd: load the weights from the temporary file into the MLP
462  // the MLP is already build
463  Log() << kINFO << "Load TMLP weights into " << fMLP << Endl;
464 
465  Double_t* d = new Double_t[Data()->GetNVariables()] ;
466  Int_t type;
467  gROOT->cd();
468  TTree * dummyTree = new TTree("dummy","Empty dummy tree", 1);
469  for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
470  TString vn = DataInfo().GetVariableInfo(ivar).GetLabel();
471  dummyTree->Branch(Form("%s",vn.Data()), d+ivar, Form("%s/D",vn.Data()));
472  }
473  dummyTree->Branch("type", &type, "type/I");
474 
475  if (fMLP != 0) { delete fMLP; fMLP = 0; }
476  fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(), dummyTree );
477 
478  fMLP->LoadWeights( "./TMlp.nn.weights.temp" );
479  // here we can delete the temporary file
480  // how?
481  delete [] d;
482 }
483 
484 ////////////////////////////////////////////////////////////////////////////////
485 /// create reader class for classifier -> overwrites base class function
486 /// create specific class for TMultiLayerPerceptron
487 
488 void TMVA::MethodTMlpANN::MakeClass( const TString& theClassFileName ) const
489 {
490  // the default consists of
491  TString classFileName = "";
492  if (theClassFileName == "")
493  classFileName = GetWeightFileDir() + "/" + GetJobName() + "_" + GetMethodName() + ".class";
494  else
495  classFileName = theClassFileName;
496 
497  classFileName.ReplaceAll(".class","");
498  Log() << kINFO << "Creating specific (TMultiLayerPerceptron) standalone response class: " << classFileName << Endl;
499  fMLP->Export( classFileName.Data() );
500 }
501 
502 ////////////////////////////////////////////////////////////////////////////////
503 /// write specific classifier response
504 /// nothing to do here - all taken care of by TMultiLayerPerceptron
505 
506 void TMVA::MethodTMlpANN::MakeClassSpecific( std::ostream& /*fout*/, const TString& /*className*/ ) const
507 {
508 }
509 
510 ////////////////////////////////////////////////////////////////////////////////
511 /// get help message text
512 ///
513 /// typical length of text line:
514 /// "|--------------------------------------------------------------|"
515 
517 {
518  Log() << Endl;
519  Log() << gTools().Color("bold") << "--- Short description:" << gTools().Color("reset") << Endl;
520  Log() << Endl;
521  Log() << "This feed-forward multilayer perceptron neural network is the " << Endl;
522  Log() << "standard implementation distributed with ROOT (class TMultiLayerPerceptron)." << Endl;
523  Log() << Endl;
524  Log() << "Detailed information is available here:" << Endl;
525  if (gConfig().WriteOptionsReference()) {
526  Log() << "<a href=\"http://root.cern.ch/root/html/TMultiLayerPerceptron.html\">";
527  Log() << "http://root.cern.ch/root/html/TMultiLayerPerceptron.html</a>" << Endl;
528  }
529  else Log() << "http://root.cern.ch/root/html/TMultiLayerPerceptron.html" << Endl;
530  Log() << Endl;
531 }
Config & gConfig()
Definition: Config.cxx:43
void Train(void)
performs TMlpANN training available learning methods:
MsgLogger & Endl(MsgLogger &ml)
Definition: MsgLogger.h:162
const TString & GetInternalName() const
Definition: VariableInfo.h:66
float Float_t
Definition: RtypesCore.h:53
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
calculate the value of the neural net for the current event
TString & ReplaceAll(const TString &s1, const TString &s2)
Definition: TString.h:635
UInt_t GetNvar() const
Definition: MethodBase.h:340
virtual Int_t Fill()
Fill all branches.
Definition: TTree.cxx:4375
MsgLogger & Log() const
Definition: Configurable.h:128
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
void Export(Option_t *filename="NNfunction", Option_t *language="C++") const
Exports the NN as a function for any non-ROOT-dependant code Supported languages are: only C++ ...
EAnalysisType
Definition: Types.h:129
UInt_t GetNVariables() const
access the number of variables through the datasetinfo
Definition: DataSet.cxx:225
Double_t Evaluate(Int_t index, Double_t *params) const
Returns the Neural Net for a given set of input parameters #parameters must equal #input neurons...
#define gROOT
Definition: TROOT.h:364
Double_t fValidationFraction
Basic string class.
Definition: TString.h:137
void GetHelpMessage() const
get help message text
const TString & GetInternalVarName(Int_t ivar) const
Definition: MethodBase.h:504
void ToLower()
Change string to lower-case.
Definition: TString.cxx:1089
int Int_t
Definition: RtypesCore.h:41
bool Bool_t
Definition: RtypesCore.h:59
const Bool_t kFALSE
Definition: Rtypes.h:92
const TString & GetLabel() const
Definition: VariableInfo.h:67
void AddAttr(void *node, const char *, const T &value, Int_t precision=16)
Definition: Tools.h:309
void * AddChild(void *parent, const char *childname, const char *content=0, bool isRootNode=false)
add child node
Definition: Tools.cxx:1134
const TString & GetWeightFileDir() const
Definition: MethodBase.h:486
Long64_t GetNEvtBkgdTrain()
return number of background training events in dataset
Definition: DataSet.cxx:443
Tools & gTools()
Definition: Tools.cxx:79
MethodTMlpANN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="3000:N-1:N-2")
standard constructor
void Init(void)
default initialisations
const Event * GetEvent() const
Definition: MethodBase.h:745
DataSet * Data() const
Definition: MethodBase.h:405
void * GetChild(void *parent, const char *childname=0)
get child node
Definition: Tools.cxx:1158
void ReadWeightsFromXML(void *wghtnode)
rebuild temporary textfile from xml weightfile and load this file into MLP
DataSetInfo & DataInfo() const
Definition: MethodBase.h:406
Bool_t DumpWeights(Option_t *filename="-") const
Dumps the weights to a text file.
virtual ~MethodTMlpANN(void)
destructor
Ssiz_t First(char c) const
Find first occurrence of a character c.
Definition: TString.cxx:467
const Bool_t EnforceNormalization__
void ProcessOptions()
builds the neural network as specified by the user
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not...
Definition: Event.cxx:378
Bool_t AddRawLine(void *node, const char *raw)
XML helpers.
Definition: Tools.cxx:1198
void Train(Int_t nEpoch, Option_t *option="text", Double_t minE=0)
Train the network.
const char * GetName() const
Definition: MethodBase.h:330
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
Definition: TString.h:558
unsigned int UInt_t
Definition: RtypesCore.h:42
char * Form(const char *fmt,...)
Ssiz_t Length() const
Definition: TString.h:390
TSubString Strip(EStripType s=kTrailing, char c=' ') const
Return a substring of self stripped at beginning and/or end.
Definition: TString.cxx:1070
const TString & GetJobName() const
Definition: MethodBase.h:326
const TString & GetMethodName() const
Definition: MethodBase.h:327
const char * GetContent(void *node)
XML helpers.
Definition: Tools.cxx:1182
void ReadAttr(void *node, const char *, T &value)
Definition: Tools.h:296
Bool_t LoadWeights(Option_t *filename="")
Loads the weights from a text file conforming to the format defined by DumpWeights.
TMultiLayerPerceptron * fMLP
UInt_t GetNVariables() const
Definition: MethodBase.h:341
Float_t GetValue(UInt_t ivar) const
return value of i&#39;th variable
Definition: Event.cxx:233
TString & Remove(Ssiz_t pos)
Definition: TString.h:616
long Long_t
Definition: RtypesCore.h:50
Bool_t IgnoreEventsWithNegWeightsInTraining() const
Definition: MethodBase.h:680
#define ClassImp(name)
Definition: Rtypes.h:279
double Double_t
Definition: RtypesCore.h:55
Long64_t GetNEvtSigTrain()
return number of signal training events in dataset
Definition: DataSet.cxx:435
int type
Definition: TGX11.cxx:120
void ReadWeightsFromStream(std::istream &istr)
read weights from stream since the MLP can not read from the stream, we 1st: write the weights to tem...
static RooMathCoreReg dummy
void * GetNextChild(void *prevchild, const char *childname=0)
XML helpers.
Definition: Tools.cxx:1170
VariableInfo & GetVariableInfo(Int_t i)
Definition: DataSetInfo.h:114
void AddPreDefVal(const T &)
Definition: Configurable.h:174
const TString & Color(const TString &)
human readable color strings
Definition: Tools.cxx:837
virtual Int_t Branch(TCollection *list, Int_t bufsize=32000, Int_t splitlevel=99, const char *name="")
Create one branch for each element in the collection.
Definition: TTree.cxx:1652
void MakeClass(const TString &classFileName=TString("")) const
create reader class for classifier -> overwrites base class function create specific class for TMulti...
#define REGISTER_METHOD(CLASS)
for example
Abstract ClassifierFactory template that handles arbitrary types.
void AddWeightsXMLTo(void *parent) const
write weights to xml file
TString GetMethodTypeName() const
Definition: MethodBase.h:328
#define NULL
Definition: Rtypes.h:82
Long64_t GetNEvents(Types::ETreeType type=Types::kMaxTreeType) const
Definition: DataSet.h:229
void CreateMLPOptions(TString)
translates options from option string into TMlpANN language
Bool_t IsSignal(const Event *ev) const
A TTree object has a header with a name and a title.
Definition: TTree.h:98
void SetEventWeight(const char *)
Set the event weight.
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response nothing to do here - all taken care of by TMultiLayerPerceptron ...
const Bool_t kTRUE
Definition: Rtypes.h:91
void SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method)
Sets the learning method.
void DeclareOptions()
define the options (their key words) that can be set in the option string know options: NCycles <inte...
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
Definition: MethodBase.cxx:819
void Resize(Ssiz_t n)
Resize the string. Truncate or add blanks as necessary.
Definition: TString.cxx:1059
TString & Chop()
Definition: TString.h:622
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
TMlpANN can handle classification with 2 classes.
const char * Data() const
Definition: TString.h:349