ROOT  6.07/01
Reference Guide
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Groups Pages
MethodTMlpANN.cxx
Go to the documentation of this file.
1 // @(#)root/tmva $Id$
2 // Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss, Eckhard von Toerne
3 /**********************************************************************************
4  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
5  * Package: TMVA *
6  * Class : MethodTMlpANN *
7  * Web : http://tmva.sourceforge.net *
8  * *
9  * Description: *
10  * Implementation (see header for description) *
11  * *
12  * Authors (alphabetical): *
13  * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
14  * Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
15  * Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
16  * *
17  * Copyright (c) 2005: *
18  * CERN, Switzerland *
19  * U. of Victoria, Canada *
20  * MPI-K Heidelberg, Germany *
21  * *
22  * Redistribution and use in source and binary forms, with or without *
23  * modification, are permitted according to the terms listed in LICENSE *
24  * (http://tmva.sourceforge.net/LICENSE) *
25  **********************************************************************************/
26 
27 ////////////////////////////////////////////////////////////////////////////////
28 
29 /* Begin_Html
30 
31  This is the TMVA TMultiLayerPerceptron interface class. It provides the
32  training and testing the ROOT internal MLP class in the TMVA framework.<be>
33 
34  Available learning methods:<br>
35  <ul>
36  <li>Stochastic </li>
37  <li>Batch </li>
38  <li>SteepestDescent </li>
39  <li>RibierePolak </li>
40  <li>FletcherReeves </li>
41  <li>BFGS </li>
42  </ul>
43 End_Html */
44 //
45 // See the TMultiLayerPerceptron class description
46 // for details on this ANN.
47 //
48 //_______________________________________________________________________
49 
50 #include "TMVA/MethodTMlpANN.h"
51 
52 #include <cstdlib>
53 #include <iostream>
54 #include <fstream>
55 
56 #include "Riostream.h"
57 #include "TLeaf.h"
58 #include "TEventList.h"
59 #include "TObjString.h"
60 #include "TROOT.h"
61 #include "TMultiLayerPerceptron.h"
62 
63 #include "TMVA/Config.h"
64 #include "TMVA/DataSet.h"
65 #include "TMVA/DataSetInfo.h"
66 #include "TMVA/MethodBase.h"
67 #include "TMVA/MsgLogger.h"
68 #include "TMVA/Types.h"
69 #include "TMVA/VariableInfo.h"
70 
71 #include "TMVA/ClassifierFactory.h"
72 #ifndef ROOT_TMVA_Tools
73 #include "TMVA/Tools.h"
74 #endif
75 
76 using std::atoi;
77 
78 // some additional TMlpANN options
80 #if ROOT_VERSION_CODE > ROOT_VERSION(5,13,06)
81 //const TMultiLayerPerceptron::ELearningMethod LearningMethod__= TMultiLayerPerceptron::kStochastic;
82 // const TMultiLayerPerceptron::ELearningMethod LearningMethod__= TMultiLayerPerceptron::kBatch;
83 #else
84 //const TMultiLayerPerceptron::LearningMethod LearningMethod__= TMultiLayerPerceptron::kStochastic;
85 #endif
86 
87 REGISTER_METHOD(TMlpANN)
88 
89 ClassImp(TMVA::MethodTMlpANN)
90 
91 ////////////////////////////////////////////////////////////////////////////////
92 /// standard constructor
93 
94 TMVA::MethodTMlpANN::MethodTMlpANN( const TString& jobName,
95  const TString& methodTitle,
96  DataSetInfo& theData,
97  const TString& theOption,
98  TDirectory* theTargetDir) :
99  TMVA::MethodBase( jobName, Types::kTMlpANN, methodTitle, theData, theOption, theTargetDir ),
100  fMLP(0),
101  fLocalTrainingTree(0),
102  fNcycles(100),
103  fValidationFraction(0.5),
104  fLearningMethod( "" )
105 {
106 }
107 
108 ////////////////////////////////////////////////////////////////////////////////
109 /// constructor from weight file
110 
112  const TString& theWeightFile,
113  TDirectory* theTargetDir ) :
114  TMVA::MethodBase( Types::kTMlpANN, theData, theWeightFile, theTargetDir ),
115  fMLP(0),
116  fLocalTrainingTree(0),
117  fNcycles(100),
118  fValidationFraction(0.5),
119  fLearningMethod( "" )
120 {
121 }
122 
123 ////////////////////////////////////////////////////////////////////////////////
124 /// TMlpANN can handle classification with 2 classes
125 
127  UInt_t /*numberTargets*/ )
128 {
129  if (type == Types::kClassification && numberClasses == 2) return kTRUE;
130  return kFALSE;
131 }
132 
133 
134 ////////////////////////////////////////////////////////////////////////////////
135 /// default initialisations
136 
138 {
139 }
140 
141 ////////////////////////////////////////////////////////////////////////////////
142 /// destructor
143 
145 {
146  if (fMLP) delete fMLP;
147 }
148 
149 ////////////////////////////////////////////////////////////////////////////////
150 /// translates options from option string into TMlpANN language
151 
153 {
154  fHiddenLayer = ":";
155 
156  while (layerSpec.Length()>0) {
157  TString sToAdd="";
158  if (layerSpec.First(',')<0) {
159  sToAdd = layerSpec;
160  layerSpec = "";
161  }
162  else {
163  sToAdd = layerSpec(0,layerSpec.First(','));
164  layerSpec = layerSpec(layerSpec.First(',')+1,layerSpec.Length());
165  }
166  int nNodes = 0;
167  if (sToAdd.BeginsWith("N")) { sToAdd.Remove(0,1); nNodes = GetNvar(); }
168  nNodes += atoi(sToAdd);
169  fHiddenLayer = Form( "%s%i:", (const char*)fHiddenLayer, nNodes );
170  }
171 
172  // set input vars
173  std::vector<TString>::iterator itrVar = (*fInputVars).begin();
174  std::vector<TString>::iterator itrVarEnd = (*fInputVars).end();
175  fMLPBuildOptions = "";
176  for (; itrVar != itrVarEnd; itrVar++) {
177  if (EnforceNormalization__) fMLPBuildOptions += "@";
178  TString myVar = *itrVar; ;
179  fMLPBuildOptions += myVar;
180  fMLPBuildOptions += ",";
181  }
182  fMLPBuildOptions.Chop(); // remove last ","
183 
184  // prepare final options for MLP kernel
185  fMLPBuildOptions += fHiddenLayer;
186  fMLPBuildOptions += "type";
187 
188  Log() << kINFO << "Use " << fNcycles << " training cycles" << Endl;
189  Log() << kINFO << "Use configuration (nodes per hidden layer): " << fHiddenLayer << Endl;
190 }
191 
192 ////////////////////////////////////////////////////////////////////////////////
193 /// define the options (their key words) that can be set in the option string
194 /// know options:
195 /// NCycles <integer> Number of training cycles (too many cycles could overtrain the network)
196 /// HiddenLayers <string> Layout of the hidden layers (nodes per layer)
197 /// * specifiactions for each hidden layer are separated by commata
198 /// * for each layer the number of nodes can be either absolut (simply a number)
199 /// or relative to the number of input nodes to the neural net (N)
200 /// * there is always a single node in the output layer
201 /// example: a net with 6 input nodes and "Hiddenlayers=N-1,N-2" has 6,5,4,1 nodes in the
202 /// layers 1,2,3,4, repectively
203 
205 {
206  DeclareOptionRef( fNcycles = 200, "NCycles", "Number of training cycles" );
207  DeclareOptionRef( fLayerSpec = "N,N-1", "HiddenLayers", "Specification of hidden layer architecture (N stands for number of variables; any integers may also be used)" );
208 
209  DeclareOptionRef( fValidationFraction = 0.5, "ValidationFraction",
210  "Fraction of events in training tree used for cross validation" );
211 
212  DeclareOptionRef( fLearningMethod = "Stochastic", "LearningMethod", "Learning method" );
213  AddPreDefVal( TString("Stochastic") );
214  AddPreDefVal( TString("Batch") );
215  AddPreDefVal( TString("SteepestDescent") );
216  AddPreDefVal( TString("RibierePolak") );
217  AddPreDefVal( TString("FletcherReeves") );
218  AddPreDefVal( TString("BFGS") );
219 }
220 
221 ////////////////////////////////////////////////////////////////////////////////
222 /// builds the neural network as specified by the user
223 
225 {
226  CreateMLPOptions(fLayerSpec);
227 
228  if (IgnoreEventsWithNegWeightsInTraining()) {
229  Log() << kFATAL << "Mechanism to ignore events with negative weights in training not available for method"
230  << GetMethodTypeName()
231  << " --> please remove \"IgnoreNegWeightsInTraining\" option from booking string."
232  << Endl;
233  }
234 }
235 
236 ////////////////////////////////////////////////////////////////////////////////
237 /// calculate the value of the neural net for the current event
238 
240 {
241  const Event* ev = GetEvent();
242  TTHREAD_TLS_DECL_ARG(Double_t*, d, new Double_t[Data()->GetNVariables()]);
243 
244  for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
245  d[ivar] = (Double_t)ev->GetValue(ivar);
246  }
247  Double_t mvaVal = fMLP->Evaluate(0,d);
248 
249  // cannot determine error
250  NoErrorCalc(err, errUpper);
251 
252  return mvaVal;
253 }
254 
255 ////////////////////////////////////////////////////////////////////////////////
256 /// performs TMlpANN training
257 /// available learning methods:
258 ///
259 /// TMultiLayerPerceptron::kStochastic
260 /// TMultiLayerPerceptron::kBatch
261 /// TMultiLayerPerceptron::kSteepestDescent
262 /// TMultiLayerPerceptron::kRibierePolak
263 /// TMultiLayerPerceptron::kFletcherReeves
264 /// TMultiLayerPerceptron::kBFGS
265 ///
266 /// TMultiLayerPerceptron wants test and training tree at once
267 /// so merge the training and testing trees from the MVA factory first:
268 
270 {
271  Int_t type;
272  Float_t weight;
273  const Long_t basketsize = 128000;
274  Float_t* vArr = new Float_t[GetNvar()];
275 
276  TTree *localTrainingTree = new TTree( "TMLPtrain", "Local training tree for TMlpANN" );
277  localTrainingTree->Branch( "type", &type, "type/I", basketsize );
278  localTrainingTree->Branch( "weight", &weight, "weight/F", basketsize );
279 
280  for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
281  const char* myVar = GetInternalVarName(ivar).Data();
282  localTrainingTree->Branch( myVar, &vArr[ivar], Form("Var%02i/F", ivar), basketsize );
283  }
284 
285  for (UInt_t ievt=0; ievt<Data()->GetNEvents(); ievt++) {
286  const Event *ev = GetEvent(ievt);
287  for (UInt_t i=0; i<GetNvar(); i++) {
288  vArr[i] = ev->GetValue( i );
289  }
290  type = DataInfo().IsSignal( ev ) ? 1 : 0;
291  weight = ev->GetWeight();
292  localTrainingTree->Fill();
293  }
294 
295  // These are the event lists for the mlp train method
296  // first events in the tree are for training
297  // the rest for internal testing (cross validation)...
298  // NOTE: the training events are ordered: first part is signal, second part background
299  TString trainList = "Entry$<";
300  trainList += 1.0-fValidationFraction;
301  trainList += "*";
302  trainList += (Int_t)Data()->GetNEvtSigTrain();
303  trainList += " || (Entry$>";
304  trainList += (Int_t)Data()->GetNEvtSigTrain();
305  trainList += " && Entry$<";
306  trainList += (Int_t)(Data()->GetNEvtSigTrain() + (1.0 - fValidationFraction)*Data()->GetNEvtBkgdTrain());
307  trainList += ")";
308  TString testList = TString("!(") + trainList + ")";
309 
310  // print the requirements
311  Log() << kINFO << "Requirement for training events: \"" << trainList << "\"" << Endl;
312  Log() << kINFO << "Requirement for validation events: \"" << testList << "\"" << Endl;
313 
314  // localTrainingTree->Print();
315 
316  // create NN
317  if (fMLP != 0) { delete fMLP; fMLP = 0; }
318  fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(),
319  localTrainingTree,
320  trainList,
321  testList );
322  fMLP->SetEventWeight( "weight" );
323 
324  // set learning method
325 #if ROOT_VERSION_CODE > ROOT_VERSION(5,13,06)
327 #else
328  TMultiLayerPerceptron::LearningMethod learningMethod = TMultiLayerPerceptron::kStochastic;
329 #endif
330 
331  fLearningMethod.ToLower();
332  if (fLearningMethod == "stochastic" ) learningMethod = TMultiLayerPerceptron::kStochastic;
333  else if (fLearningMethod == "batch" ) learningMethod = TMultiLayerPerceptron::kBatch;
334  else if (fLearningMethod == "steepestdescent" ) learningMethod = TMultiLayerPerceptron::kSteepestDescent;
335  else if (fLearningMethod == "ribierepolak" ) learningMethod = TMultiLayerPerceptron::kRibierePolak;
336  else if (fLearningMethod == "fletcherreeves" ) learningMethod = TMultiLayerPerceptron::kFletcherReeves;
337  else if (fLearningMethod == "bfgs" ) learningMethod = TMultiLayerPerceptron::kBFGS;
338  else {
339  Log() << kFATAL << "Unknown Learning Method: \"" << fLearningMethod << "\"" << Endl;
340  }
341  fMLP->SetLearningMethod( learningMethod );
342 
343  // train NN
344  fMLP->Train(fNcycles, "text,update=50" );
345 
346  // write weights to File;
347  // this is not nice, but fMLP gets deleted at the end of Train()
348  delete localTrainingTree;
349  delete [] vArr;
350 }
351 
352 
353 ////////////////////////////////////////////////////////////////////////////////
354 /// write weights to xml file
355 
356 void TMVA::MethodTMlpANN::AddWeightsXMLTo( void* parent ) const
357 {
358  // first the architecture
359  void *wght = gTools().AddChild(parent, "Weights");
360  void* arch = gTools().AddChild( wght, "Architecture" );
361  gTools().AddAttr( arch, "BuildOptions", fMLPBuildOptions.Data() );
362 
363  // dump weights first in temporary txt file, read from there into xml
364  fMLP->DumpWeights( "weights/TMlp.nn.weights.temp" );
365  std::ifstream inf( "weights/TMlp.nn.weights.temp" );
366  char temp[256];
367  TString data("");
368  void *ch=NULL;
369  while (inf.getline(temp,256)) {
370  TString dummy(temp);
371  //std::cout << dummy << std::endl; // remove annoying debug printout with std::cout
372  if (dummy.BeginsWith('#')) {
373  if (ch!=0) gTools().AddRawLine( ch, data.Data() );
374  dummy = dummy.Strip(TString::kLeading, '#');
375  dummy = dummy(0,dummy.First(' '));
376  ch = gTools().AddChild(wght, dummy);
377  data.Resize(0);
378  continue;
379  }
380  data += (dummy + " ");
381  }
382  if (ch != 0) gTools().AddRawLine( ch, data.Data() );
383 
384  inf.close();
385 }
386 
387 ////////////////////////////////////////////////////////////////////////////////
388 /// rebuild temporary textfile from xml weightfile and load this
389 /// file into MLP
390 
392 {
393  void* ch = gTools().GetChild(wghtnode);
394  gTools().ReadAttr( ch, "BuildOptions", fMLPBuildOptions );
395 
396  ch = gTools().GetNextChild(ch);
397  const char* fname = "weights/TMlp.nn.weights.temp";
398  std::ofstream fout( fname );
399  double temp1=0,temp2=0;
400  while (ch) {
401  const char* nodecontent = gTools().GetContent(ch);
402  std::stringstream content(nodecontent);
403  if (strcmp(gTools().GetName(ch),"input")==0) {
404  fout << "#input normalization" << std::endl;
405  while ((content >> temp1) &&(content >> temp2)) {
406  fout << temp1 << " " << temp2 << std::endl;
407  }
408  }
409  if (strcmp(gTools().GetName(ch),"output")==0) {
410  fout << "#output normalization" << std::endl;
411  while ((content >> temp1) &&(content >> temp2)) {
412  fout << temp1 << " " << temp2 << std::endl;
413  }
414  }
415  if (strcmp(gTools().GetName(ch),"neurons")==0) {
416  fout << "#neurons weights" << std::endl;
417  while (content >> temp1) {
418  fout << temp1 << std::endl;
419  }
420  }
421  if (strcmp(gTools().GetName(ch),"synapses")==0) {
422  fout << "#synapses weights" ;
423  while (content >> temp1) {
424  fout << std::endl << temp1 ;
425  }
426  }
427  ch = gTools().GetNextChild(ch);
428  }
429  fout.close();;
430 
431  // Here we create a dummy tree necessary to create a minimal NN
432  // to be used for testing, evaluation and application
433  TTHREAD_TLS_DECL_ARG(Double_t*, d, new Double_t[Data()->GetNVariables()]);
434  TTHREAD_TLS(Int_t) type;
435 
436  gROOT->cd();
437  TTree * dummyTree = new TTree("dummy","Empty dummy tree", 1);
438  for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
439  TString vn = DataInfo().GetVariableInfo(ivar).GetInternalName();
440  dummyTree->Branch(Form("%s",vn.Data()), d+ivar, Form("%s/D",vn.Data()));
441  }
442  dummyTree->Branch("type", &type, "type/I");
443 
444  if (fMLP != 0) { delete fMLP; fMLP = 0; }
445  fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(), dummyTree );
446  fMLP->LoadWeights( fname );
447 }
448 
449 ////////////////////////////////////////////////////////////////////////////////
450 /// read weights from stream
451 /// since the MLP can not read from the stream, we
452 /// 1st: write the weights to temporary file
453 
455 {
456  std::ofstream fout( "./TMlp.nn.weights.temp" );
457  fout << istr.rdbuf();
458  fout.close();
459  // 2nd: load the weights from the temporary file into the MLP
460  // the MLP is already build
461  Log() << kINFO << "Load TMLP weights into " << fMLP << Endl;
462 
463  Double_t* d = new Double_t[Data()->GetNVariables()] ;
464  Int_t type;
465  gROOT->cd();
466  TTree * dummyTree = new TTree("dummy","Empty dummy tree", 1);
467  for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
468  TString vn = DataInfo().GetVariableInfo(ivar).GetLabel();
469  dummyTree->Branch(Form("%s",vn.Data()), d+ivar, Form("%s/D",vn.Data()));
470  }
471  dummyTree->Branch("type", &type, "type/I");
472 
473  if (fMLP != 0) { delete fMLP; fMLP = 0; }
474  fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(), dummyTree );
475 
476  fMLP->LoadWeights( "./TMlp.nn.weights.temp" );
477  // here we can delete the temporary file
478  // how?
479  delete [] d;
480 }
481 
482 ////////////////////////////////////////////////////////////////////////////////
483 /// create reader class for classifier -> overwrites base class function
484 /// create specific class for TMultiLayerPerceptron
485 
486 void TMVA::MethodTMlpANN::MakeClass( const TString& theClassFileName ) const
487 {
488  // the default consists of
489  TString classFileName = "";
490  if (theClassFileName == "")
491  classFileName = GetWeightFileDir() + "/" + GetJobName() + "_" + GetMethodName() + ".class";
492  else
493  classFileName = theClassFileName;
494 
495  classFileName.ReplaceAll(".class","");
496  Log() << kINFO << "Creating specific (TMultiLayerPerceptron) standalone response class: " << classFileName << Endl;
497  fMLP->Export( classFileName.Data() );
498 }
499 
500 ////////////////////////////////////////////////////////////////////////////////
501 /// write specific classifier response
502 /// nothing to do here - all taken care of by TMultiLayerPerceptron
503 
504 void TMVA::MethodTMlpANN::MakeClassSpecific( std::ostream& /*fout*/, const TString& /*className*/ ) const
505 {
506 }
507 
508 ////////////////////////////////////////////////////////////////////////////////
509 /// get help message text
510 ///
511 /// typical length of text line:
512 /// "|--------------------------------------------------------------|"
513 
515 {
516  Log() << Endl;
517  Log() << gTools().Color("bold") << "--- Short description:" << gTools().Color("reset") << Endl;
518  Log() << Endl;
519  Log() << "This feed-forward multilayer perceptron neural network is the " << Endl;
520  Log() << "standard implementation distributed with ROOT (class TMultiLayerPerceptron)." << Endl;
521  Log() << Endl;
522  Log() << "Detailed information is available here:" << Endl;
523  if (gConfig().WriteOptionsReference()) {
524  Log() << "<a href=\"http://root.cern.ch/root/html/TMultiLayerPerceptron.html\">";
525  Log() << "http://root.cern.ch/root/html/TMultiLayerPerceptron.html</a>" << Endl;
526  }
527  else Log() << "http://root.cern.ch/root/html/TMultiLayerPerceptron.html" << Endl;
528  Log() << Endl;
529 }
void Train(void)
performs TMlpANN training available learning methods:
MsgLogger & Endl(MsgLogger &ml)
Definition: MsgLogger.h:162
Ssiz_t Length() const
Definition: TString.h:390
float Float_t
Definition: RtypesCore.h:53
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
calculate the value of the neural net for the current event
TString & ReplaceAll(const TString &s1, const TString &s2)
Definition: TString.h:635
virtual Int_t Fill()
Fill all branches.
Definition: TTree.cxx:4306
void MakeClass(const TString &classFileName=TString("")) const
create reader class for classifier -> overwrites base class function create specific class for TMulti...
Config & gConfig()
void GetHelpMessage() const
get help message text
EAnalysisType
Definition: Types.h:124
#define gROOT
Definition: TROOT.h:344
Basic string class.
Definition: TString.h:137
int Int_t
Definition: RtypesCore.h:41
bool Bool_t
Definition: RtypesCore.h:59
const Bool_t kFALSE
Definition: Rtypes.h:92
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not...
Definition: Event.cxx:376
MethodTMlpANN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="3000:N-1:N-2", TDirectory *theTargetDir=0)
void AddAttr(void *node, const char *, const T &value, Int_t precision=16)
Definition: Tools.h:308
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
Definition: TString.h:558
void * AddChild(void *parent, const char *childname, const char *content=0, bool isRootNode=false)
add child node
Definition: Tools.cxx:1134
const char * Data() const
Definition: TString.h:349
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response nothing to do here - all taken care of by TMultiLayerPerceptron ...
Tools & gTools()
Definition: Tools.cxx:79
void Init(void)
default initialisations
int d
Definition: tornado.py:11
void * GetChild(void *parent, const char *childname=0)
get child node
Definition: Tools.cxx:1158
ClassImp(TMVA::MethodTMlpANN) TMVA
standard constructor
void ReadWeightsFromXML(void *wghtnode)
rebuild temporary textfile from xml weightfile and load this file into MLP
std::vector< std::vector< double > > Data
virtual ~MethodTMlpANN(void)
destructor
const Bool_t EnforceNormalization__
void ProcessOptions()
builds the neural network as specified by the user
Bool_t AddRawLine(void *node, const char *raw)
XML helpers.
Definition: Tools.cxx:1198
std::string GetMethodName(TCppMethod_t)
Definition: Cppyy.cxx:706
unsigned int UInt_t
Definition: RtypesCore.h:42
char * Form(const char *fmt,...)
const char * GetContent(void *node)
XML helpers.
Definition: Tools.cxx:1182
TSubString Strip(EStripType s=kTrailing, char c= ' ') const
Return a substring of self stripped at beginning and/or end.
Definition: TString.cxx:1056
void ReadAttr(void *node, const char *, T &value)
Definition: Tools.h:295
Bool_t LoadWeights(Option_t *filename="")
Loads the weights from a text file conforming to the format defined by DumpWeights.
TString & Remove(Ssiz_t pos)
Definition: TString.h:616
long Long_t
Definition: RtypesCore.h:50
double Double_t
Definition: RtypesCore.h:55
Describe directory structure in memory.
Definition: TDirectory.h:44
int type
Definition: TGX11.cxx:120
void ReadWeightsFromStream(std::istream &istr)
read weights from stream since the MLP can not read from the stream, we 1st: write the weights to tem...
static RooMathCoreReg dummy
void * GetNextChild(void *prevchild, const char *childname=0)
XML helpers.
Definition: Tools.cxx:1170
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
Definition: Event.cxx:231
void AddWeightsXMLTo(void *parent) const
write weights to xml file
const TString & Color(const TString &)
human readable color strings
Definition: Tools.cxx:837
virtual Int_t Branch(TCollection *list, Int_t bufsize=32000, Int_t splitlevel=99, const char *name="")
Create one branch for each element in the collection.
Definition: TTree.cxx:1623
#define REGISTER_METHOD(CLASS)
for example
#define NULL
Definition: Rtypes.h:82
void CreateMLPOptions(TString)
translates options from option string into TMlpANN language
A TTree object has a header with a name and a title.
Definition: TTree.h:98
void SetEventWeight(const char *)
Set the event weight.
const Bool_t kTRUE
Definition: Rtypes.h:91
Definition: math.cpp:60
Ssiz_t First(char c) const
Find first occurrence of a character c.
Definition: TString.cxx:453
void DeclareOptions()
define the options (their key words) that can be set in the option string know options: NCycles <inte...
void Resize(Ssiz_t n)
Resize the string. Truncate or add blanks as necessary.
Definition: TString.cxx:1045
TString & Chop()
Definition: TString.h:622
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
TMlpANN can handle classification with 2 classes.