Logo ROOT  
Reference Guide
MethodTMlpANN.cxx
Go to the documentation of this file.
1 // @(#)root/tmva $Id$
2 // Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss, Eckhard von Toerne
3 /**********************************************************************************
4  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
5  * Package: TMVA *
6  * Class : MethodTMlpANN *
7  * Web : http://tmva.sourceforge.net *
8  * *
9  * Description: *
10  * Implementation (see header for description) *
11  * *
12  * Authors (alphabetical): *
13  * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
14  * Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
15  * Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
16  * *
17  * Copyright (c) 2005: *
18  * CERN, Switzerland *
19  * U. of Victoria, Canada *
20  * MPI-K Heidelberg, Germany *
21  * *
22  * Redistribution and use in source and binary forms, with or without *
23  * modification, are permitted according to the terms listed in LICENSE *
24  * (http://tmva.sourceforge.net/LICENSE) *
25  **********************************************************************************/
26 
27 /*! \class TMVA::MethodTMlpANN
28 \ingroup TMVA
29 
30 This is the TMVA TMultiLayerPerceptron interface class. It provides the
31 training and testing the ROOT internal MLP class in the TMVA framework.
32 
33 Available learning methods:<br>
34 
35  - Stochastic
36  - Batch
37  - SteepestDescent
38  - RibierePolak
39  - FletcherReeves
40  - BFGS
41 
42 See the TMultiLayerPerceptron class description
43 for details on this ANN.
44 */
45 
46 #include "TMVA/MethodTMlpANN.h"
47 
48 #include "TMVA/Config.h"
49 #include "TMVA/Configurable.h"
50 #include "TMVA/DataSet.h"
51 #include "TMVA/DataSetInfo.h"
52 #include "TMVA/IMethod.h"
53 #include "TMVA/MethodBase.h"
54 #include "TMVA/MsgLogger.h"
55 #include "TMVA/Types.h"
56 #include "TMVA/VariableInfo.h"
57 
58 #include "TMVA/ClassifierFactory.h"
59 #include "TMVA/Tools.h"
60 
61 #include "TLeaf.h"
62 #include "TEventList.h"
63 #include "TROOT.h"
64 #include "TMultiLayerPerceptron.h"
65 #include "ThreadLocalStorage.h"
66 
67 #include <cstdlib>
68 #include <iostream>
69 #include <fstream>
70 
71 
72 using std::atoi;
73 
74 // some additional TMlpANN options
76 
77 REGISTER_METHOD(TMlpANN)
78 
80 
81 ////////////////////////////////////////////////////////////////////////////////
82 /// standard constructor
83 
85  const TString& methodTitle,
86  DataSetInfo& theData,
87  const TString& theOption) :
88  TMVA::MethodBase( jobName, Types::kTMlpANN, methodTitle, theData, theOption),
89  fMLP(0),
90  fLocalTrainingTree(0),
91  fNcycles(100),
92  fValidationFraction(0.5),
93  fLearningMethod( "" )
94 {
95 }
96 
97 ////////////////////////////////////////////////////////////////////////////////
98 /// constructor from weight file
99 
101  const TString& theWeightFile) :
102  TMVA::MethodBase( Types::kTMlpANN, theData, theWeightFile),
103  fMLP(0),
104  fLocalTrainingTree(0),
105  fNcycles(100),
106  fValidationFraction(0.5),
107  fLearningMethod( "" )
108 {
109 }
110 
111 ////////////////////////////////////////////////////////////////////////////////
112 /// TMlpANN can handle classification with 2 classes
113 
115  UInt_t /*numberTargets*/ )
116 {
117  if (type == Types::kClassification && numberClasses == 2) return kTRUE;
118  return kFALSE;
119 }
120 
121 
122 ////////////////////////////////////////////////////////////////////////////////
123 /// default initialisations
124 
126 {
127 }
128 
129 ////////////////////////////////////////////////////////////////////////////////
130 /// destructor
131 
133 {
134  if (fMLP) delete fMLP;
135 }
136 
137 ////////////////////////////////////////////////////////////////////////////////
138 /// translates options from option string into TMlpANN language
139 
141 {
142  fHiddenLayer = ":";
143 
144  while (layerSpec.Length()>0) {
145  TString sToAdd="";
146  if (layerSpec.First(',')<0) {
147  sToAdd = layerSpec;
148  layerSpec = "";
149  }
150  else {
151  sToAdd = layerSpec(0,layerSpec.First(','));
152  layerSpec = layerSpec(layerSpec.First(',')+1,layerSpec.Length());
153  }
154  int nNodes = 0;
155  if (sToAdd.BeginsWith("N")) { sToAdd.Remove(0,1); nNodes = GetNvar(); }
156  nNodes += atoi(sToAdd);
157  fHiddenLayer = Form( "%s%i:", (const char*)fHiddenLayer, nNodes );
158  }
159 
160  // set input vars
161  std::vector<TString>::iterator itrVar = (*fInputVars).begin();
162  std::vector<TString>::iterator itrVarEnd = (*fInputVars).end();
163  fMLPBuildOptions = "";
164  for (; itrVar != itrVarEnd; ++itrVar) {
165  if (EnforceNormalization__) fMLPBuildOptions += "@";
166  TString myVar = *itrVar; ;
167  fMLPBuildOptions += myVar;
168  fMLPBuildOptions += ",";
169  }
170  fMLPBuildOptions.Chop(); // remove last ","
171 
172  // prepare final options for MLP kernel
173  fMLPBuildOptions += fHiddenLayer;
174  fMLPBuildOptions += "type";
175 
176  Log() << kINFO << "Use " << fNcycles << " training cycles" << Endl;
177  Log() << kINFO << "Use configuration (nodes per hidden layer): " << fHiddenLayer << Endl;
178 }
179 
180 ////////////////////////////////////////////////////////////////////////////////
181 /// define the options (their key words) that can be set in the option string
182 ///
183 /// know options:
184 ///
185 /// - NCycles <integer> Number of training cycles (too many cycles could overtrain the network)
186 /// - HiddenLayers <string> Layout of the hidden layers (nodes per layer)
187 /// * specifications for each hidden layer are separated by comma
188 /// * for each layer the number of nodes can be either absolut (simply a number)
189 /// or relative to the number of input nodes to the neural net (N)
190 /// * there is always a single node in the output layer
191 ///
192 /// example: a net with 6 input nodes and "Hiddenlayers=N-1,N-2" has 6,5,4,1 nodes in the
193 /// layers 1,2,3,4, respectively
194 
196 {
197  DeclareOptionRef( fNcycles = 200, "NCycles", "Number of training cycles" );
198  DeclareOptionRef( fLayerSpec = "N,N-1", "HiddenLayers", "Specification of hidden layer architecture (N stands for number of variables; any integers may also be used)" );
199 
200  DeclareOptionRef( fValidationFraction = 0.5, "ValidationFraction",
201  "Fraction of events in training tree used for cross validation" );
202 
203  DeclareOptionRef( fLearningMethod = "Stochastic", "LearningMethod", "Learning method" );
204  AddPreDefVal( TString("Stochastic") );
205  AddPreDefVal( TString("Batch") );
206  AddPreDefVal( TString("SteepestDescent") );
207  AddPreDefVal( TString("RibierePolak") );
208  AddPreDefVal( TString("FletcherReeves") );
209  AddPreDefVal( TString("BFGS") );
210 }
211 
212 ////////////////////////////////////////////////////////////////////////////////
213 /// builds the neural network as specified by the user
214 
216 {
217  CreateMLPOptions(fLayerSpec);
218 
219  if (IgnoreEventsWithNegWeightsInTraining()) {
220  Log() << kFATAL << "Mechanism to ignore events with negative weights in training not available for method"
221  << GetMethodTypeName()
222  << " --> please remove \"IgnoreNegWeightsInTraining\" option from booking string."
223  << Endl;
224  }
225 }
226 
227 ////////////////////////////////////////////////////////////////////////////////
228 /// calculate the value of the neural net for the current event
229 
231 {
232  const Event* ev = GetEvent();
233  TTHREAD_TLS_DECL_ARG(Double_t*, d, new Double_t[Data()->GetNVariables()]);
234 
235  for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
236  d[ivar] = (Double_t)ev->GetValue(ivar);
237  }
238  Double_t mvaVal = fMLP->Evaluate(0,d);
239 
240  // cannot determine error
241  NoErrorCalc(err, errUpper);
242 
243  return mvaVal;
244 }
245 
246 ////////////////////////////////////////////////////////////////////////////////
247 /// performs TMlpANN training
248 /// available learning methods:
249 ///
250 /// - TMultiLayerPerceptron::kStochastic
251 /// - TMultiLayerPerceptron::kBatch
252 /// - TMultiLayerPerceptron::kSteepestDescent
253 /// - TMultiLayerPerceptron::kRibierePolak
254 /// - TMultiLayerPerceptron::kFletcherReeves
255 /// - TMultiLayerPerceptron::kBFGS
256 ///
257 /// TMultiLayerPerceptron wants test and training tree at once
258 /// so merge the training and testing trees from the MVA factory first:
259 
261 {
262  Int_t type;
263  Float_t weight;
264  const Long_t basketsize = 128000;
265  Float_t* vArr = new Float_t[GetNvar()];
266 
267  TTree *localTrainingTree = new TTree( "TMLPtrain", "Local training tree for TMlpANN" );
268  localTrainingTree->Branch( "type", &type, "type/I", basketsize );
269  localTrainingTree->Branch( "weight", &weight, "weight/F", basketsize );
270 
271  for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
272  const char* myVar = GetInternalVarName(ivar).Data();
273  localTrainingTree->Branch( myVar, &vArr[ivar], Form("Var%02i/F", ivar), basketsize );
274  }
275 
276  for (UInt_t ievt=0; ievt<Data()->GetNEvents(); ievt++) {
277  const Event *ev = GetEvent(ievt);
278  for (UInt_t i=0; i<GetNvar(); i++) {
279  vArr[i] = ev->GetValue( i );
280  }
281  type = DataInfo().IsSignal( ev ) ? 1 : 0;
282  weight = ev->GetWeight();
283  localTrainingTree->Fill();
284  }
285 
286  // These are the event lists for the mlp train method
287  // first events in the tree are for training
288  // the rest for internal testing (cross validation)...
289  // NOTE: the training events are ordered: first part is signal, second part background
290  TString trainList = "Entry$<";
291  trainList += 1.0-fValidationFraction;
292  trainList += "*";
293  trainList += (Int_t)Data()->GetNEvtSigTrain();
294  trainList += " || (Entry$>";
295  trainList += (Int_t)Data()->GetNEvtSigTrain();
296  trainList += " && Entry$<";
297  trainList += (Int_t)(Data()->GetNEvtSigTrain() + (1.0 - fValidationFraction)*Data()->GetNEvtBkgdTrain());
298  trainList += ")";
299  TString testList = TString("!(") + trainList + ")";
300 
301  // print the requirements
302  Log() << kHEADER << "Requirement for training events: \"" << trainList << "\"" << Endl;
303  Log() << kINFO << "Requirement for validation events: \"" << testList << "\"" << Endl;
304 
305  // localTrainingTree->Print();
306 
307  // create NN
308  if (fMLP != 0) { delete fMLP; fMLP = 0; }
309  fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(),
310  localTrainingTree,
311  trainList,
312  testList );
313  fMLP->SetEventWeight( "weight" );
314 
315  // set learning method
317 
318  fLearningMethod.ToLower();
319  if (fLearningMethod == "stochastic" ) learningMethod = TMultiLayerPerceptron::kStochastic;
320  else if (fLearningMethod == "batch" ) learningMethod = TMultiLayerPerceptron::kBatch;
321  else if (fLearningMethod == "steepestdescent" ) learningMethod = TMultiLayerPerceptron::kSteepestDescent;
322  else if (fLearningMethod == "ribierepolak" ) learningMethod = TMultiLayerPerceptron::kRibierePolak;
323  else if (fLearningMethod == "fletcherreeves" ) learningMethod = TMultiLayerPerceptron::kFletcherReeves;
324  else if (fLearningMethod == "bfgs" ) learningMethod = TMultiLayerPerceptron::kBFGS;
325  else {
326  Log() << kFATAL << "Unknown Learning Method: \"" << fLearningMethod << "\"" << Endl;
327  }
328  fMLP->SetLearningMethod( learningMethod );
329 
330  // train NN
331  fMLP->Train(fNcycles, "" ); //"text,update=50" );
332 
333  // write weights to File;
334  // this is not nice, but fMLP gets deleted at the end of Train()
335  delete localTrainingTree;
336  delete [] vArr;
337 }
338 
339 ////////////////////////////////////////////////////////////////////////////////
340 /// write weights to xml file
341 
342 void TMVA::MethodTMlpANN::AddWeightsXMLTo( void* parent ) const
343 {
344  // first the architecture
345  void *wght = gTools().AddChild(parent, "Weights");
346  void* arch = gTools().AddChild( wght, "Architecture" );
347  gTools().AddAttr( arch, "BuildOptions", fMLPBuildOptions.Data() );
348 
349  // dump weights first in temporary txt file, read from there into xml
350  const TString tmpfile=GetWeightFileDir()+"/TMlp.nn.weights.temp";
351  fMLP->DumpWeights( tmpfile.Data() );
352  std::ifstream inf( tmpfile.Data() );
353  char temp[256];
354  TString data("");
355  void *ch=NULL;
356  while (inf.getline(temp,256)) {
357  TString dummy(temp);
358  //std::cout << dummy << std::endl; // remove annoying debug printout with std::cout
359  if (dummy.BeginsWith('#')) {
360  if (ch!=0) gTools().AddRawLine( ch, data.Data() );
361  dummy = dummy.Strip(TString::kLeading, '#');
362  dummy = dummy(0,dummy.First(' '));
363  ch = gTools().AddChild(wght, dummy);
364  data.Resize(0);
365  continue;
366  }
367  data += (dummy + " ");
368  }
369  if (ch != 0) gTools().AddRawLine( ch, data.Data() );
370 
371  inf.close();
372 }
373 
374 ////////////////////////////////////////////////////////////////////////////////
375 /// rebuild temporary textfile from xml weightfile and load this
376 /// file into MLP
377 
379 {
380  void* ch = gTools().GetChild(wghtnode);
381  gTools().ReadAttr( ch, "BuildOptions", fMLPBuildOptions );
382 
383  ch = gTools().GetNextChild(ch);
384  const TString fname = GetWeightFileDir()+"/TMlp.nn.weights.temp";
385  std::ofstream fout( fname.Data() );
386  double temp1=0,temp2=0;
387  while (ch) {
388  const char* nodecontent = gTools().GetContent(ch);
389  std::stringstream content(nodecontent);
390  if (strcmp(gTools().GetName(ch),"input")==0) {
391  fout << "#input normalization" << std::endl;
392  while ((content >> temp1) &&(content >> temp2)) {
393  fout << temp1 << " " << temp2 << std::endl;
394  }
395  }
396  if (strcmp(gTools().GetName(ch),"output")==0) {
397  fout << "#output normalization" << std::endl;
398  while ((content >> temp1) &&(content >> temp2)) {
399  fout << temp1 << " " << temp2 << std::endl;
400  }
401  }
402  if (strcmp(gTools().GetName(ch),"neurons")==0) {
403  fout << "#neurons weights" << std::endl;
404  while (content >> temp1) {
405  fout << temp1 << std::endl;
406  }
407  }
408  if (strcmp(gTools().GetName(ch),"synapses")==0) {
409  fout << "#synapses weights" ;
410  while (content >> temp1) {
411  fout << std::endl << temp1 ;
412  }
413  }
414  ch = gTools().GetNextChild(ch);
415  }
416  fout.close();;
417 
418  // Here we create a dummy tree necessary to create a minimal NN
419  // to be used for testing, evaluation and application
420  TTHREAD_TLS_DECL_ARG(Double_t*, d, new Double_t[Data()->GetNVariables()]);
421  TTHREAD_TLS(Int_t) type;
422 
423  gROOT->cd();
424  TTree * dummyTree = new TTree("dummy","Empty dummy tree", 1);
425  for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
426  TString vn = DataInfo().GetVariableInfo(ivar).GetInternalName();
427  dummyTree->Branch(Form("%s",vn.Data()), d+ivar, Form("%s/D",vn.Data()));
428  }
429  dummyTree->Branch("type", &type, "type/I");
430 
431  if (fMLP != 0) { delete fMLP; fMLP = 0; }
432  fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(), dummyTree );
433  fMLP->LoadWeights( fname );
434 }
435 
436 ////////////////////////////////////////////////////////////////////////////////
437 /// read weights from stream
438 /// since the MLP can not read from the stream, we
439 /// 1st: write the weights to temporary file
440 
442 {
443  std::ofstream fout( "./TMlp.nn.weights.temp" );
444  fout << istr.rdbuf();
445  fout.close();
446  // 2nd: load the weights from the temporary file into the MLP
447  // the MLP is already build
448  Log() << kINFO << "Load TMLP weights into " << fMLP << Endl;
449 
450  Double_t* d = new Double_t[Data()->GetNVariables()] ;
451  Int_t type;
452  gROOT->cd();
453  TTree * dummyTree = new TTree("dummy","Empty dummy tree", 1);
454  for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
455  TString vn = DataInfo().GetVariableInfo(ivar).GetLabel();
456  dummyTree->Branch(Form("%s",vn.Data()), d+ivar, Form("%s/D",vn.Data()));
457  }
458  dummyTree->Branch("type", &type, "type/I");
459 
460  if (fMLP != 0) { delete fMLP; fMLP = 0; }
461  fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(), dummyTree );
462 
463  fMLP->LoadWeights( "./TMlp.nn.weights.temp" );
464  // here we can delete the temporary file
465  // how?
466  delete [] d;
467 }
468 
469 ////////////////////////////////////////////////////////////////////////////////
470 /// create reader class for classifier -> overwrites base class function
471 /// create specific class for TMultiLayerPerceptron
472 
473 void TMVA::MethodTMlpANN::MakeClass( const TString& theClassFileName ) const
474 {
475  // the default consists of
476  TString classFileName = "";
477  if (theClassFileName == "")
478  classFileName = GetWeightFileDir() + "/" + GetJobName() + "_" + GetMethodName() + ".class";
479  else
480  classFileName = theClassFileName;
481 
482  classFileName.ReplaceAll(".class","");
483  Log() << kINFO << "Creating specific (TMultiLayerPerceptron) standalone response class: " << classFileName << Endl;
484  fMLP->Export( classFileName.Data() );
485 }
486 
487 ////////////////////////////////////////////////////////////////////////////////
488 /// write specific classifier response
489 /// nothing to do here - all taken care of by TMultiLayerPerceptron
490 
491 void TMVA::MethodTMlpANN::MakeClassSpecific( std::ostream& /*fout*/, const TString& /*className*/ ) const
492 {
493 }
494 
495 ////////////////////////////////////////////////////////////////////////////////
496 /// get help message text
497 ///
498 /// typical length of text line:
499 /// "|--------------------------------------------------------------|"
500 
502 {
503  Log() << Endl;
504  Log() << gTools().Color("bold") << "--- Short description:" << gTools().Color("reset") << Endl;
505  Log() << Endl;
506  Log() << "This feed-forward multilayer perceptron neural network is the " << Endl;
507  Log() << "standard implementation distributed with ROOT (class TMultiLayerPerceptron)." << Endl;
508  Log() << Endl;
509  Log() << "Detailed information is available here:" << Endl;
510  if (gConfig().WriteOptionsReference()) {
511  Log() << "<a href=\"http://root.cern.ch/root/html/TMultiLayerPerceptron.html\">";
512  Log() << "http://root.cern.ch/root/html/TMultiLayerPerceptron.html</a>" << Endl;
513  }
514  else Log() << "http://root.cern.ch/root/html/TMultiLayerPerceptron.html" << Endl;
515  Log() << Endl;
516 }
kTRUE
const Bool_t kTRUE
Definition: RtypesCore.h:91
TMVA::Tools::GetChild
void * GetChild(void *parent, const char *childname=0)
get child node
Definition: Tools.cxx:1162
TMVA::MethodTMlpANN::HasAnalysisType
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
TMlpANN can handle classification with 2 classes.
Definition: MethodTMlpANN.cxx:114
TMultiLayerPerceptron
This class describes a neural network.
Definition: TMultiLayerPerceptron.h:26
TMVA::Tools::GetContent
const char * GetContent(void *node)
XML helpers.
Definition: Tools.cxx:1186
TString::Strip
TSubString Strip(EStripType s=kTrailing, char c=' ') const
Return a substring of self stripped at beginning and/or end.
Definition: TString.cxx:1106
TString::Data
const char * Data() const
Definition: TString.h:369
TMVA::MethodTMlpANN::DeclareOptions
void DeclareOptions()
define the options (their key words) that can be set in the option string
Definition: MethodTMlpANN.cxx:195
DataSetInfo.h
ClassImp
#define ClassImp(name)
Definition: Rtypes.h:364
Form
char * Form(const char *fmt,...)
TMVA::MethodTMlpANN::ReadWeightsFromXML
void ReadWeightsFromXML(void *wghtnode)
rebuild temporary textfile from xml weightfile and load this file into MLP
Definition: MethodTMlpANN.cxx:378
TMultiLayerPerceptron::kStochastic
@ kStochastic
Definition: TMultiLayerPerceptron.h:30
IMethod.h
TMath::Log
Double_t Log(Double_t x)
Definition: TMath.h:760
TTree
A TTree represents a columnar dataset.
Definition: TTree.h:79
TMVA::Tools::AddChild
void * AddChild(void *parent, const char *childname, const char *content=0, bool isRootNode=false)
add child node
Definition: Tools.cxx:1136
Float_t
float Float_t
Definition: RtypesCore.h:57
VariableInfo.h
Int_t
int Int_t
Definition: RtypesCore.h:45
TMultiLayerPerceptron::kBatch
@ kBatch
Definition: TMultiLayerPerceptron.h:30
TMVA::MethodTMlpANN::GetHelpMessage
void GetHelpMessage() const
get help message text
Definition: MethodTMlpANN.cxx:501
TString::Length
Ssiz_t Length() const
Definition: TString.h:410
MethodBase.h
TTree::Fill
virtual Int_t Fill()
Fill all branches.
Definition: TTree.cxx:4536
TString
Basic string class.
Definition: TString.h:136
Bool_t
bool Bool_t
Definition: RtypesCore.h:63
REGISTER_METHOD
#define REGISTER_METHOD(CLASS)
for example
Definition: ClassifierFactory.h:124
TMultiLayerPerceptron::kFletcherReeves
@ kFletcherReeves
Definition: TMultiLayerPerceptron.h:31
bool
TString::ReplaceAll
TString & ReplaceAll(const TString &s1, const TString &s2)
Definition: TString.h:692
TMVA::MethodTMlpANN::MethodTMlpANN
MethodTMlpANN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="3000:N-1:N-2")
standard constructor
Definition: MethodTMlpANN.cxx:84
TROOT.h
TMVA::MethodTMlpANN::MakeClass
void MakeClass(const TString &classFileName=TString("")) const
create reader class for classifier -> overwrites base class function create specific class for TMulti...
Definition: MethodTMlpANN.cxx:473
TMVA::Tools::AddAttr
void AddAttr(void *node, const char *, const T &value, Int_t precision=16)
add attribute to xml
Definition: Tools.h:353
TMVA::MethodTMlpANN::Init
void Init(void)
default initialisations
Definition: MethodTMlpANN.cxx:125
TMVA::Event::GetValue
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
Definition: Event.cxx:236
TMVA::DataSetInfo
Class that contains all the data information.
Definition: DataSetInfo.h:62
TMultiLayerPerceptron::ELearningMethod
ELearningMethod
Definition: TMultiLayerPerceptron.h:30
TString::Resize
void Resize(Ssiz_t n)
Resize the string. Truncate or add blanks as necessary.
Definition: TString.cxx:1095
MsgLogger.h
TMVA::MethodTMlpANN::GetMvaValue
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
calculate the value of the neural net for the current event
Definition: MethodTMlpANN.cxx:230
TLeaf.h
EnforceNormalization__
const Bool_t EnforceNormalization__
Definition: MethodTMlpANN.cxx:75
TString::Chop
TString & Chop()
Definition: TString.h:679
TMVA::Types::EAnalysisType
EAnalysisType
Definition: Types.h:128
TTree::Branch
TBranch * Branch(const char *name, T *obj, Int_t bufsize=32000, Int_t splitlevel=99)
Add a new branch, and infer the data type from the type of obj being passed.
Definition: TTree.h:349
TString::Remove
TString & Remove(Ssiz_t pos)
Definition: TString.h:673
TMVA::gConfig
Config & gConfig()
kFALSE
const Bool_t kFALSE
Definition: RtypesCore.h:92
Long_t
long Long_t
Definition: RtypesCore.h:54
TMVA::Tools::ReadAttr
void ReadAttr(void *node, const char *, T &value)
read attribute from xml
Definition: Tools.h:335
TString::First
Ssiz_t First(char c) const
Find first occurrence of a character c.
Definition: TString.cxx:499
TMVA::Types::kClassification
@ kClassification
Definition: Types.h:129
TString::BeginsWith
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
Definition: TString.h:615
TMVA::MethodTMlpANN::MakeClassSpecific
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response nothing to do here - all taken care of by TMultiLayerPerceptron
Definition: MethodTMlpANN.cxx:491
TMultiLayerPerceptron::kBFGS
@ kBFGS
Definition: TMultiLayerPerceptron.h:31
TMVA::MethodTMlpANN::ProcessOptions
void ProcessOptions()
builds the neural network as specified by the user
Definition: MethodTMlpANN.cxx:215
TMVA::MethodBase
Virtual base Class for all MVA method.
Definition: MethodBase.h:111
TMVA::Types
Singleton class for Global types used by TMVA.
Definition: Types.h:73
Types.h
Configurable.h
TMVA::Endl
MsgLogger & Endl(MsgLogger &ml)
Definition: MsgLogger.h:158
Config.h
TMVA::MethodTMlpANN::ReadWeightsFromStream
virtual void ReadWeightsFromStream(std::istream &)=0
unsigned int
TMVA::TMVAGlob::GetMethodName
void GetMethodName(TString &name, TKey *mkey)
Definition: tmvaglob.cxx:342
TMVA::Tools::Color
const TString & Color(const TString &)
human readable color strings
Definition: Tools.cxx:840
TMVA::MethodTMlpANN::CreateMLPOptions
void CreateMLPOptions(TString)
translates options from option string into TMlpANN language
Definition: MethodTMlpANN.cxx:140
TMVA::MethodTMlpANN::~MethodTMlpANN
virtual ~MethodTMlpANN(void)
destructor
Definition: MethodTMlpANN.cxx:132
Double_t
double Double_t
Definition: RtypesCore.h:59
TMultiLayerPerceptron.h
TMultiLayerPerceptron::kSteepestDescent
@ kSteepestDescent
Definition: TMultiLayerPerceptron.h:30
TMVA::Event::GetWeight
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not.
Definition: Event.cxx:381
TMVA::Tools::GetNextChild
void * GetNextChild(void *prevchild, const char *childname=0)
XML helpers.
Definition: Tools.cxx:1174
TMVA::Event
Definition: Event.h:51
TMVA::MethodTMlpANN
This is the TMVA TMultiLayerPerceptron interface class.
Definition: MethodTMlpANN.h:49
TMVA::MethodTMlpANN::AddWeightsXMLTo
void AddWeightsXMLTo(void *parent) const
write weights to xml file
Definition: MethodTMlpANN.cxx:342
TMultiLayerPerceptron::kRibierePolak
@ kRibierePolak
Definition: TMultiLayerPerceptron.h:31
d
#define d(i)
Definition: RSha256.hxx:102
TMVA::MethodTMlpANN::Train
void Train(void)
performs TMlpANN training available learning methods:
Definition: MethodTMlpANN.cxx:260
TString::kLeading
@ kLeading
Definition: TString.h:267
Tools.h
ClassifierFactory.h
type
int type
Definition: TGX11.cxx:121
TMVA::gTools
Tools & gTools()
DataSet.h
TMVA::Tools::AddRawLine
Bool_t AddRawLine(void *node, const char *raw)
XML helpers.
Definition: Tools.cxx:1202
ThreadLocalStorage.h
TMVA
create variable transformations
Definition: GeneticMinimizer.h:22
gROOT
#define gROOT
Definition: TROOT.h:406
int
MethodTMlpANN.h
TEventList.h