90   fLocalTrainingTree(0),
 
   92   fValidationFraction(0.5),
 
 
  104   fLocalTrainingTree(0),
 
  106   fValidationFraction(0.5),
 
  107   fLearningMethod( 
"" )
 
 
  134   if (fMLP) 
delete fMLP;
 
 
  161   std::vector<TString>::iterator 
itrVar    = (*fInputVars).
begin();
 
  162   std::vector<TString>::iterator 
itrVarEnd = (*fInputVars).
end();
 
  163   fMLPBuildOptions = 
"";
 
  167      fMLPBuildOptions += 
myVar;
 
  168      fMLPBuildOptions += 
",";
 
  170   fMLPBuildOptions.Chop(); 
 
  173   fMLPBuildOptions += fHiddenLayer;
 
  174   fMLPBuildOptions += 
"type";
 
  176   Log() << kINFO << 
"Use " << fNcycles << 
" training cycles" << 
Endl;
 
  177   Log() << kINFO << 
"Use configuration (nodes per hidden layer): " << fHiddenLayer << 
Endl;
 
 
  197   DeclareOptionRef( fNcycles    = 200,       
"NCycles",      
"Number of training cycles" );
 
  198   DeclareOptionRef( fLayerSpec  = 
"N,N-1",   
"HiddenLayers", 
"Specification of hidden layer architecture (N stands for number of variables; any integers may also be used)" );
 
  200   DeclareOptionRef( fValidationFraction = 0.5, 
"ValidationFraction",
 
  201                     "Fraction of events in training tree used for cross validation" );
 
  203   DeclareOptionRef( fLearningMethod = 
"Stochastic", 
"LearningMethod", 
"Learning method" );
 
  204   AddPreDefVal( 
TString(
"Stochastic") );
 
  205   AddPreDefVal( 
TString(
"Batch") );
 
  206   AddPreDefVal( 
TString(
"SteepestDescent") );
 
  207   AddPreDefVal( 
TString(
"RibierePolak") );
 
  208   AddPreDefVal( 
TString(
"FletcherReeves") );
 
  209   AddPreDefVal( 
TString(
"BFGS") );
 
 
  217   CreateMLPOptions(fLayerSpec);
 
  219   if (IgnoreEventsWithNegWeightsInTraining()) {
 
  220      Log() << kFATAL << 
"Mechanism to ignore events with negative weights in training not available for method" 
  221            << GetMethodTypeName()
 
  222            << 
" --> please remove \"IgnoreNegWeightsInTraining\" option from booking string." 
 
  279      for (
UInt_t i=0; i<GetNvar(); i++) {
 
  280         vArr[i] = 
ev->GetValue( i );
 
  282      type   = DataInfo().IsSignal( 
ev ) ? 1 : 0;
 
  283      weight = 
ev->GetWeight();
 
  298   trainList += (
Int_t)(Data()->GetNEvtSigTrain() + (1.0 - fValidationFraction)*Data()->GetNEvtBkgdTrain());
 
  303   Log() << kHEADER << 
"Requirement for training   events: \"" << 
trainList << 
"\"" << 
Endl;
 
  304   Log() << kINFO << 
"Requirement for validation events: \"" << 
testList << 
"\"" << 
Endl;
 
  309   if (fMLP) { 
delete fMLP; fMLP = 
nullptr; }
 
  314   fMLP->SetEventWeight( 
"weight" );
 
  319   fLearningMethod.ToLower();
 
  327      Log() << kFATAL << 
"Unknown Learning Method: \"" << fLearningMethod << 
"\"" << 
Endl;
 
  332   fMLP->Train(fNcycles, 
"" ); 
 
 
  352   fMLP->DumpWeights( 
tmpfile.Data() );
 
  357   while (
inf.getline(temp,256)) {
 
  363         dummy = dummy(0,dummy.
First(
' '));
 
  368      data += (dummy + 
" ");
 
 
  385   const TString fname = GetWeightFileDir()+
"/TMlp.nn.weights.temp";
 
  392         fout << 
"#input normalization" << std::endl;
 
  398         fout << 
"#output normalization" << std::endl;
 
  404         fout << 
"#neurons weights" << std::endl;
 
  410         fout << 
"#synapses weights" ;
 
  427      TString vn = DataInfo().GetVariableInfo(
ivar).GetInternalName();
 
  433   if (fMLP) { 
delete fMLP; fMLP = 
nullptr; }
 
  435   fMLP->LoadWeights( 
fname );
 
 
  445   std::ofstream 
fout( 
"./TMlp.nn.weights.temp" );
 
  450   Log() << kINFO << 
"Load TMLP weights into " << fMLP << 
Endl;
 
  463   if (fMLP) { 
delete fMLP; fMLP = 
nullptr; }
 
  466   fMLP->LoadWeights( 
"./TMlp.nn.weights.temp" );
 
 
  481      classFileName = GetWeightFileDir() + 
"/" + GetJobName() + 
"_" + GetMethodName() + 
".class";
 
  486   Log() << kINFO << 
"Creating specific (TMultiLayerPerceptron) standalone response class: " << 
classFileName << 
Endl;
 
 
  509   Log() << 
"This feed-forward multilayer perceptron neural network is the " << 
Endl;
 
  510   Log() << 
"standard implementation distributed with ROOT (class TMultiLayerPerceptron)." << 
Endl;
 
  512   Log() << 
"Detailed information is available here:" << 
Endl;
 
  513   if (
gConfig().WriteOptionsReference()) {
 
  514      Log() << 
"<a href=\"https://root.cern/doc/master/classTMultiLayerPerceptron.html\">";
 
  515      Log() << 
"https://root.cern/doc/master/classTMultiLayerPerceptron.html</a>" << 
Endl;
 
  517   else Log() << 
"https://root.cern/doc/master/classTMultiLayerPerceptron.html" << 
Endl;
 
 
#define REGISTER_METHOD(CLASS)
for example
const Bool_t EnforceNormalization__
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void data
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t type
const_iterator begin() const
const_iterator end() const
Class that contains all the data information.
Virtual base Class for all MVA method.
This is the TMVA TMultiLayerPerceptron interface class.
void ReadWeightsFromStream(std::istream &istr)
read weights from stream since the MLP can not read from the stream, we 1st: write the weights to tem...
void Init(void)
default initialisations
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
TMlpANN can handle classification with 2 classes.
void Train(void)
performs TMlpANN training available learning methods:
Double_t GetMvaValue(Double_t *err=nullptr, Double_t *errUpper=nullptr)
calculate the value of the neural net for the current event
void DeclareOptions()
define the options (their key words) that can be set in the option string
void CreateMLPOptions(TString)
translates options from option string into TMlpANN language
void ReadWeightsFromXML(void *wghtnode)
rebuild temporary textfile from xml weightfile and load this file into MLP
MethodTMlpANN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="3000:N-1:N-2")
standard constructor
void ProcessOptions()
builds the neural network as specified by the user
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response nothing to do here - all taken care of by TMultiLayerPerceptron
void AddWeightsXMLTo(void *parent) const
write weights to xml file
void MakeClass(const TString &classFileName=TString("")) const
create reader class for classifier -> overwrites base class function create specific class for TMulti...
virtual ~MethodTMlpANN(void)
destructor
void GetHelpMessage() const
get help message text
Singleton class for Global types used by TMVA.
This class describes a neural network.
TSubString Strip(EStripType s=kTrailing, char c=' ') const
Return a substring of self stripped at beginning and/or end.
Ssiz_t First(char c) const
Find first occurrence of a character c.
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
static TString Format(const char *fmt,...)
Static method which formats a string using a printf style format descriptor and return a TString.
A TTree represents a columnar dataset.
create variable transformations
MsgLogger & Endl(MsgLogger &ml)