Logo ROOT  
Reference Guide
TMultiLayerPerceptron.h
Go to the documentation of this file.
1// @(#)root/mlp:$Id$
2// Author: Christophe.Delaere@cern.ch 20/07/03
3
4/*************************************************************************
5 * Copyright (C) 1995-2003, Rene Brun and Fons Rademakers. *
6 * All rights reserved. *
7 * *
8 * For the licensing terms see $ROOTSYS/LICENSE. *
9 * For the list of contributors see $ROOTSYS/README/CREDITS. *
10 *************************************************************************/
11
12#ifndef ROOT_TMultiLayerPerceptron
13#define ROOT_TMultiLayerPerceptron
14
15#include "TObject.h"
16#include "TString.h"
17#include "TObjArray.h"
18#include "TMatrixD.h"
19#include "TNeuron.h"
20
21class TTree;
22class TEventList;
23class TTreeFormula;
25
27 friend class TMLPAnalyzer;
28
29 public:
34 TMultiLayerPerceptron(const char* layout, TTree* data = 0,
35 const char* training = "Entry$%2==0",
36 const char* test = "",
38 const char* extF = "", const char* extD = "");
39 TMultiLayerPerceptron(const char* layout,
40 const char* weight, TTree* data = 0,
41 const char* training = "Entry$%2==0",
42 const char* test = "",
44 const char* extF = "", const char* extD = "");
45 TMultiLayerPerceptron(const char* layout, TTree* data,
46 TEventList* training,
49 const char* extF = "", const char* extD = "");
50 TMultiLayerPerceptron(const char* layout,
51 const char* weight, TTree* data,
52 TEventList* training,
55 const char* extF = "", const char* extD = "");
56 virtual ~TMultiLayerPerceptron();
57 void SetData(TTree*);
58 void SetTrainingDataSet(TEventList* train);
60 void SetTrainingDataSet(const char* train);
61 void SetTestDataSet(const char* test);
63 void SetEventWeight(const char*);
64 void Train(Int_t nEpoch, Option_t* option = "text", Double_t minE=0);
65 Double_t Result(Int_t event, Int_t index = 0) const;
66 Double_t GetError(Int_t event) const;
68 void ComputeDEDw() const;
69 void Randomize() const;
70 void SetEta(Double_t eta);
71 void SetEpsilon(Double_t eps);
72 void SetDelta(Double_t delta);
73 void SetEtaDecay(Double_t ed);
74 void SetTau(Double_t tau);
75 void SetReset(Int_t reset);
76 inline Double_t GetEta() const { return fEta; }
77 inline Double_t GetEpsilon() const { return fEpsilon; }
78 inline Double_t GetDelta() const { return fDelta; }
79 inline Double_t GetEtaDecay() const { return fEtaDecay; }
81 inline Double_t GetTau() const { return fTau; }
82 inline Int_t GetReset() const { return fReset; }
83 inline TString GetStructure() const { return fStructure; }
84 inline TNeuron::ENeuronType GetType() const { return fType; }
85 void DrawResult(Int_t index = 0, Option_t* option = "test") const;
86 Bool_t DumpWeights(Option_t* filename = "-") const;
87 Bool_t LoadWeights(Option_t* filename = "");
88 Double_t Evaluate(Int_t index, Double_t* params) const;
89 void Export(Option_t* filename = "NNfunction", Option_t* language = "C++") const;
90 virtual void Draw(Option_t *option="");
91
92 protected:
93 void AttachData();
94 void BuildNetwork();
95 void GetEntry(Int_t) const;
96 // it's a choice not to force learning function being const, even if possible
98 void MLP_Batch(Double_t*);
100 void SteepestDir(Double_t*);
104 void BFGSDir(TMatrixD&, Double_t*);
109
110 private:
113 void ExpandStructure();
116 void BuildOneHiddenLayer(const TString& sNumNodes, Int_t& layer,
117 Int_t& prevStart, Int_t& prevStop,
118 Bool_t lastLayer);
120 void Shuffle(Int_t*, Int_t) const;
122
123 TTree* fData; ///<! pointer to the tree used as datasource
124 Int_t fCurrentTree; ///<! index of the current tree in a chain
125 Double_t fCurrentTreeWeight; ///<! weight of the current tree in a chain
126 TObjArray fNetwork; ///< Collection of all the neurons in the network
127 TObjArray fFirstLayer; ///< Collection of the input neurons; subset of fNetwork
128 TObjArray fLastLayer; ///< Collection of the output neurons; subset of fNetwork
129 TObjArray fSynapses; ///< Collection of all the synapses in the network
130 TString fStructure; ///< String containing the network structure
131 TString fWeight; ///< String containing the event weight
132 TNeuron::ENeuronType fType; ///< Type of hidden neurons
133 TNeuron::ENeuronType fOutType; ///< Type of output neurons
134 TString fextF; ///< String containing the function name
135 TString fextD; ///< String containing the derivative name
136 TEventList *fTraining; ///<! EventList defining the events in the training dataset
137 TEventList *fTest; ///<! EventList defining the events in the test dataset
138 ELearningMethod fLearningMethod; ///<! The Learning Method
139 TTreeFormula* fEventWeight; ///<! formula representing the event weight
140 TTreeFormulaManager* fManager; ///<! TTreeFormulaManager for the weight and neurons
141 Double_t fEta; ///<! Eta - used in stochastic minimisation - Default=0.1
142 Double_t fEpsilon; ///<! Epsilon - used in stochastic minimisation - Default=0.
143 Double_t fDelta; ///<! Delta - used in stochastic minimisation - Default=0.
144 Double_t fEtaDecay; ///<! EtaDecay - Eta *= EtaDecay at each epoch - Default=1.
145 Double_t fTau; ///<! Tau - used in line search - Default=3.
146 Double_t fLastAlpha; ///<! internal parameter used in line search
147 Int_t fReset; ///<! number of epochs between two resets of the search direction to the steepest descent - Default=50
148 Bool_t fTrainingOwner; ///<! internal flag whether one has to delete fTraining or not
149 Bool_t fTestOwner; ///<! internal flag whether one has to delete fTest or not
150
151 ClassDef(TMultiLayerPerceptron, 4) // a Neural Network
152};
153
154#endif
int Int_t
Definition: RtypesCore.h:41
bool Bool_t
Definition: RtypesCore.h:59
double Double_t
Definition: RtypesCore.h:55
const char Option_t
Definition: RtypesCore.h:62
#define ClassDef(name, id)
Definition: Rtypes.h:326
int type
Definition: TGX11.cxx:120
A TEventList object is a list of selected events (entries) in a TTree.
Definition: TEventList.h:31
This utility class contains a set of tests usefull when developing a neural network.
Definition: TMLPAnalyzer.h:25
This class describes a neural network.
TTreeFormula * fEventWeight
! formula representing the event weight
void BuildOneHiddenLayer(const TString &sNumNodes, Int_t &layer, Int_t &prevStart, Int_t &prevStop, Bool_t lastLayer)
Builds a hidden layer, updates the number of layers.
void SteepestDir(Double_t *)
Sets the search direction to steepest descent.
void BuildNetwork()
Instantiates the network from the description.
TObjArray fNetwork
Collection of all the neurons in the network.
Double_t Evaluate(Int_t index, Double_t *params) const
Returns the Neural Net for a given set of input parameters #parameters must equal #input neurons.
TEventList * fTest
! EventList defining the events in the test dataset
bool GetBFGSH(TMatrixD &, TMatrixD &, TMatrixD &)
Computes the hessian matrix using the BFGS update algorithm.
void BuildHiddenLayers(TString &)
Builds hidden layers.
void BuildFirstLayer(TString &)
Instantiates the neurons in input Inputs are normalised and the type is set to kOff (simple forward o...
void SetTau(Double_t tau)
Sets Tau - used in line search (look at the constructor for the complete description of learning meth...
TMultiLayerPerceptron()
Default constructor.
Double_t GetSumSquareError() const
Error on the output for a given event.
void ConjugateGradientsDir(Double_t *, Double_t)
Sets the search direction to conjugate gradient direction beta should be:
Double_t fTau
! Tau - used in line search - Default=3.
TTree * fData
! pointer to the tree used as datasource
Double_t Result(Int_t event, Int_t index=0) const
Computes the output for a given event.
void SetGammaDelta(TMatrixD &, TMatrixD &, Double_t *)
Sets the gamma and delta vectors Gamma is computed here, so ComputeDEDw cannot have been called bef...
TEventList * fTraining
! EventList defining the events in the training dataset
TString fStructure
String containing the network structure.
Int_t fReset
! number of epochs between two resets of the search direction to the steepest descent - Default=50
Bool_t LoadWeights(Option_t *filename="")
Loads the weights from a text file conforming to the format defined by DumpWeights.
void MLP_Batch(Double_t *)
One step for the batch (stochastic) method.
TNeuron::ENeuronType fOutType
Type of output neurons.
Double_t fCurrentTreeWeight
! weight of the current tree in a chain
ELearningMethod fLearningMethod
! The Learning Method
Double_t fLastAlpha
! internal parameter used in line search
Int_t fCurrentTree
! index of the current tree in a chain
Double_t GetEtaDecay() const
void Export(Option_t *filename="NNfunction", Option_t *language="C++") const
Exports the NN as a function for any non-ROOT-dependant code Supported languages are: only C++ ,...
Double_t fEpsilon
! Epsilon - used in stochastic minimisation - Default=0.
void Train(Int_t nEpoch, Option_t *option="text", Double_t minE=0)
Train the network.
TNeuron::ENeuronType GetType() const
void BFGSDir(TMatrixD &, Double_t *)
Computes the direction for the BFGS algorithm as the product between the Hessian estimate (bfgsh) and...
void SetTestDataSet(TEventList *test)
Sets the Test dataset.
Bool_t fTrainingOwner
! internal flag whether one has to delete fTraining or not
void SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method)
Sets the learning method.
void SetTrainingDataSet(TEventList *train)
Sets the Training dataset.
TMultiLayerPerceptron & operator=(const TMultiLayerPerceptron &)
void BuildLastLayer(TString &, Int_t)
Builds the output layer Neurons are linear combinations of input, by default.
Double_t fDelta
! Delta - used in stochastic minimisation - Default=0.
TTreeFormulaManager * fManager
! TTreeFormulaManager for the weight and neurons
void Randomize() const
Randomize the weights.
Bool_t LineSearch(Double_t *, Double_t *)
Search along the line defined by direction.
virtual void Draw(Option_t *option="")
Draws the network structure.
void ExpandStructure()
Expand the structure of the first layer.
Double_t fEta
! Eta - used in stochastic minimisation - Default=0.1
Double_t GetError(Int_t event) const
Error on the output for a given event.
TMultiLayerPerceptron::ELearningMethod GetLearningMethod() const
Double_t fEtaDecay
! EtaDecay - Eta *= EtaDecay at each epoch - Default=1.
void SetEtaDecay(Double_t ed)
Sets EtaDecay - Eta *= EtaDecay at each epoch (look at the constructor for the complete description o...
void AttachData()
Connects the TTree to Neurons in input and output layers.
TMultiLayerPerceptron(const TMultiLayerPerceptron &)
void SetData(TTree *)
Set the data source.
void SetEventWeight(const char *)
Set the event weight.
Bool_t DumpWeights(Option_t *filename="-") const
Dumps the weights to a text file.
TString fWeight
String containing the event weight.
void SetDelta(Double_t delta)
Sets Delta - used in stochastic minimisation (look at the constructor for the complete description of...
Double_t GetCrossEntropy() const
Cross entropy error for a softmax output neuron, for a given event.
void SetReset(Int_t reset)
Sets number of epochs between two resets of the search direction to the steepest descent.
Bool_t fTestOwner
! internal flag whether one has to delete fTest or not
void Shuffle(Int_t *, Int_t) const
Shuffle the Int_t index[n] in input.
virtual ~TMultiLayerPerceptron()
Destructor.
Double_t DerivDir(Double_t *)
scalar product between gradient and direction = derivative along direction
void MLP_Stochastic(Double_t *)
One step for the stochastic method buffer should contain the previous dw vector and will be updated.
TObjArray fSynapses
Collection of all the synapses in the network.
void MLP_Line(Double_t *, Double_t *, Double_t)
Sets the weights to a point along a line Weights are set to [origin + (dist * dir)].
TNeuron::ENeuronType fType
Type of hidden neurons.
TObjArray fLastLayer
Collection of the output neurons; subset of fNetwork.
TString fextD
String containing the derivative name.
void ComputeDEDw() const
Compute the DEDw = sum on all training events of dedw for each weight normalized by the number of eve...
Double_t GetCrossEntropyBinary() const
Cross entropy error for sigmoid output neurons, for a given event.
void DrawResult(Int_t index=0, Option_t *option="test") const
Draws the neural net output It produces an histogram with the output for the two datasets.
void SetEta(Double_t eta)
Sets Eta - used in stochastic minimisation (look at the constructor for the complete description of l...
TObjArray fFirstLayer
Collection of the input neurons; subset of fNetwork.
void GetEntry(Int_t) const
Load an entry into the network.
void SetEpsilon(Double_t eps)
Sets Epsilon - used in stochastic minimisation (look at the constructor for the complete description ...
TString fextF
String containing the function name.
ENeuronType
Definition: TNeuron.h:29
@ kSigmoid
Definition: TNeuron.h:29
An array of TObjects.
Definition: TObjArray.h:37
Mother of all ROOT objects.
Definition: TObject.h:37
Basic string class.
Definition: TString.h:131
Used to coordinate one or more TTreeFormula objects.
Used to pass a selection expression to the Tree drawing routine.
Definition: TTreeFormula.h:58
A TTree represents a columnar dataset.
Definition: TTree.h:72
Definition: test.py:1