72 #ifndef ROOT_TMVA_Tools
80 #if ROOT_VERSION_CODE > ROOT_VERSION(5,13,06)
94 TMVA::MethodTMlpANN::MethodTMlpANN( const
TString& jobName,
99 TMVA::MethodBase( jobName, Types::kTMlpANN, methodTitle, theData, theOption, theTargetDir ),
101 fLocalTrainingTree(0),
103 fValidationFraction(0.5),
104 fLearningMethod( "" )
114 TMVA::
MethodBase(
Types::kTMlpANN, theData, theWeightFile, theTargetDir ),
116 fLocalTrainingTree(0),
118 fValidationFraction(0.5),
119 fLearningMethod(
"" )
146 if (fMLP)
delete fMLP;
156 while (layerSpec.
Length()>0) {
158 if (layerSpec.
First(
',')<0) {
163 sToAdd = layerSpec(0,layerSpec.
First(
','));
164 layerSpec = layerSpec(layerSpec.First(
',')+1,layerSpec.Length());
168 nNodes += atoi(sToAdd);
169 fHiddenLayer =
Form(
"%s%i:", (
const char*)fHiddenLayer, nNodes );
173 std::vector<TString>::iterator itrVar = (*fInputVars).begin();
174 std::vector<TString>::iterator itrVarEnd = (*fInputVars).end();
175 fMLPBuildOptions =
"";
176 for (; itrVar != itrVarEnd; itrVar++) {
179 fMLPBuildOptions += myVar;
180 fMLPBuildOptions +=
",";
182 fMLPBuildOptions.
Chop();
185 fMLPBuildOptions += fHiddenLayer;
186 fMLPBuildOptions +=
"type";
188 Log() <<
kINFO <<
"Use " << fNcycles <<
" training cycles" <<
Endl;
189 Log() <<
kINFO <<
"Use configuration (nodes per hidden layer): " << fHiddenLayer <<
Endl;
206 DeclareOptionRef( fNcycles = 200,
"NCycles",
"Number of training cycles" );
207 DeclareOptionRef( fLayerSpec =
"N,N-1",
"HiddenLayers",
"Specification of hidden layer architecture (N stands for number of variables; any integers may also be used)" );
209 DeclareOptionRef( fValidationFraction = 0.5,
"ValidationFraction",
210 "Fraction of events in training tree used for cross validation" );
212 DeclareOptionRef( fLearningMethod =
"Stochastic",
"LearningMethod",
"Learning method" );
213 AddPreDefVal(
TString(
"Stochastic") );
214 AddPreDefVal(
TString(
"Batch") );
215 AddPreDefVal(
TString(
"SteepestDescent") );
216 AddPreDefVal(
TString(
"RibierePolak") );
217 AddPreDefVal(
TString(
"FletcherReeves") );
218 AddPreDefVal(
TString(
"BFGS") );
226 CreateMLPOptions(fLayerSpec);
228 if (IgnoreEventsWithNegWeightsInTraining()) {
229 Log() <<
kFATAL <<
"Mechanism to ignore events with negative weights in training not available for method"
230 << GetMethodTypeName()
231 <<
" --> please remove \"IgnoreNegWeightsInTraining\" option from booking string."
241 const Event* ev = GetEvent();
244 for (
UInt_t ivar = 0; ivar<
Data()->GetNVariables(); ivar++) {
250 NoErrorCalc(err, errUpper);
273 const Long_t basketsize = 128000;
276 TTree *localTrainingTree =
new TTree(
"TMLPtrain",
"Local training tree for TMlpANN" );
277 localTrainingTree->
Branch(
"type", &type,
"type/I", basketsize );
278 localTrainingTree->
Branch(
"weight", &weight,
"weight/F", basketsize );
280 for (
UInt_t ivar=0; ivar<GetNvar(); ivar++) {
281 const char* myVar = GetInternalVarName(ivar).Data();
282 localTrainingTree->
Branch( myVar, &vArr[ivar],
Form(
"Var%02i/F", ivar), basketsize );
285 for (
UInt_t ievt=0; ievt<
Data()->GetNEvents(); ievt++) {
286 const Event *ev = GetEvent(ievt);
287 for (
UInt_t i=0; i<GetNvar(); i++) {
290 type = DataInfo().IsSignal( ev ) ? 1 : 0;
292 localTrainingTree->
Fill();
300 trainList += 1.0-fValidationFraction;
302 trainList += (
Int_t)
Data()->GetNEvtSigTrain();
303 trainList +=
" || (Entry$>";
304 trainList += (
Int_t)
Data()->GetNEvtSigTrain();
305 trainList +=
" && Entry$<";
306 trainList += (
Int_t)(
Data()->GetNEvtSigTrain() + (1.0 - fValidationFraction)*
Data()->GetNEvtBkgdTrain());
311 Log() <<
kINFO <<
"Requirement for training events: \"" << trainList <<
"\"" <<
Endl;
312 Log() <<
kINFO <<
"Requirement for validation events: \"" << testList <<
"\"" <<
Endl;
317 if (fMLP != 0) {
delete fMLP; fMLP = 0; }
325 #if ROOT_VERSION_CODE > ROOT_VERSION(5,13,06)
331 fLearningMethod.ToLower();
339 Log() <<
kFATAL <<
"Unknown Learning Method: \"" << fLearningMethod <<
"\"" <<
Endl;
341 fMLP->SetLearningMethod( learningMethod );
344 fMLP->Train(fNcycles,
"text,update=50" );
348 delete localTrainingTree;
361 gTools().
AddAttr( arch,
"BuildOptions", fMLPBuildOptions.Data() );
364 fMLP->DumpWeights(
"weights/TMlp.nn.weights.temp" );
365 std::ifstream inf(
"weights/TMlp.nn.weights.temp" );
369 while (inf.getline(temp,256)) {
380 data += (dummy +
" ");
397 const char* fname =
"weights/TMlp.nn.weights.temp";
398 std::ofstream fout( fname );
399 double temp1=0,temp2=0;
402 std::stringstream content(nodecontent);
403 if (strcmp(
gTools().GetName(ch),
"input")==0) {
404 fout <<
"#input normalization" << std::endl;
405 while ((content >> temp1) &&(content >> temp2)) {
406 fout << temp1 <<
" " << temp2 << std::endl;
409 if (strcmp(
gTools().GetName(ch),
"output")==0) {
410 fout <<
"#output normalization" << std::endl;
411 while ((content >> temp1) &&(content >> temp2)) {
412 fout << temp1 <<
" " << temp2 << std::endl;
415 if (strcmp(
gTools().GetName(ch),
"neurons")==0) {
416 fout <<
"#neurons weights" << std::endl;
417 while (content >> temp1) {
418 fout << temp1 << std::endl;
421 if (strcmp(
gTools().GetName(ch),
"synapses")==0) {
422 fout <<
"#synapses weights" ;
423 while (content >> temp1) {
424 fout << std::endl << temp1 ;
437 TTree * dummyTree =
new TTree(
"dummy",
"Empty dummy tree", 1);
438 for (
UInt_t ivar = 0; ivar<
Data()->GetNVariables(); ivar++) {
439 TString vn = DataInfo().GetVariableInfo(ivar).GetInternalName();
444 if (fMLP != 0) {
delete fMLP; fMLP = 0; }
456 std::ofstream fout(
"./TMlp.nn.weights.temp" );
457 fout << istr.rdbuf();
461 Log() <<
kINFO <<
"Load TMLP weights into " << fMLP <<
Endl;
466 TTree * dummyTree =
new TTree(
"dummy",
"Empty dummy tree", 1);
467 for (
UInt_t ivar = 0; ivar<
Data()->GetNVariables(); ivar++) {
468 TString vn = DataInfo().GetVariableInfo(ivar).GetLabel();
471 dummyTree->
Branch(
"type", &type,
"type/I");
473 if (fMLP != 0) {
delete fMLP; fMLP = 0; }
490 if (theClassFileName ==
"")
491 classFileName = GetWeightFileDir() +
"/" + GetJobName() +
"_" +
GetMethodName() +
".class";
493 classFileName = theClassFileName;
496 Log() <<
kINFO <<
"Creating specific (TMultiLayerPerceptron) standalone response class: " << classFileName <<
Endl;
497 fMLP->Export( classFileName.
Data() );
519 Log() <<
"This feed-forward multilayer perceptron neural network is the " <<
Endl;
520 Log() <<
"standard implementation distributed with ROOT (class TMultiLayerPerceptron)." <<
Endl;
522 Log() <<
"Detailed information is available here:" <<
Endl;
523 if (
gConfig().WriteOptionsReference()) {
524 Log() <<
"<a href=\"http://root.cern.ch/root/html/TMultiLayerPerceptron.html\">";
525 Log() <<
"http://root.cern.ch/root/html/TMultiLayerPerceptron.html</a>" <<
Endl;
527 else Log() <<
"http://root.cern.ch/root/html/TMultiLayerPerceptron.html" <<
Endl;
void Train(void)
performs TMlpANN training available learning methods:
MsgLogger & Endl(MsgLogger &ml)
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
calculate the value of the neural net for the current event
TString & ReplaceAll(const TString &s1, const TString &s2)
virtual Int_t Fill()
Fill all branches.
void MakeClass(const TString &classFileName=TString("")) const
create reader class for classifier -> overwrites base class function create specific class for TMulti...
void GetHelpMessage() const
get help message text
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not...
MethodTMlpANN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="3000:N-1:N-2", TDirectory *theTargetDir=0)
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
const char * Data() const
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response nothing to do here - all taken care of by TMultiLayerPerceptron ...
void Init(void)
default initialisations
ClassImp(TMVA::MethodTMlpANN) TMVA
standard constructor
void ReadWeightsFromXML(void *wghtnode)
rebuild temporary textfile from xml weightfile and load this file into MLP
std::vector< std::vector< double > > Data
virtual ~MethodTMlpANN(void)
destructor
const Bool_t EnforceNormalization__
void ProcessOptions()
builds the neural network as specified by the user
std::string GetMethodName(TCppMethod_t)
char * Form(const char *fmt,...)
TSubString Strip(EStripType s=kTrailing, char c= ' ') const
Return a substring of self stripped at beginning and/or end.
Bool_t LoadWeights(Option_t *filename="")
Loads the weights from a text file conforming to the format defined by DumpWeights.
TString & Remove(Ssiz_t pos)
Describe directory structure in memory.
void ReadWeightsFromStream(std::istream &istr)
read weights from stream since the MLP can not read from the stream, we 1st: write the weights to tem...
static RooMathCoreReg dummy
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
void AddWeightsXMLTo(void *parent) const
write weights to xml file
virtual Int_t Branch(TCollection *list, Int_t bufsize=32000, Int_t splitlevel=99, const char *name="")
Create one branch for each element in the collection.
#define REGISTER_METHOD(CLASS)
for example
void CreateMLPOptions(TString)
translates options from option string into TMlpANN language
A TTree object has a header with a name and a title.
void SetEventWeight(const char *)
Set the event weight.
Ssiz_t First(char c) const
Find first occurrence of a character c.
void DeclareOptions()
define the options (their key words) that can be set in the option string know options: NCycles <inte...
void Resize(Ssiz_t n)
Resize the string. Truncate or add blanks as necessary.
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
TMlpANN can handle classification with 2 classes.