38#ifndef ROOT_TMVA_MethodMLP
39#define ROOT_TMVA_MethodMLP
64#define MethodMLP_UseMinuit__
65#undef MethodMLP_UseMinuit__
86 void Train()
override;
114 void Init()
override;
155#ifdef MethodMLP_UseMinuit__
208#ifdef MethodMLP_UseMinuit__
#define ClassDefOverride(name, id)
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t index
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t type
Class that contains all the data information.
Interface for a fitter 'target'.
Base class for all TMVA methods using artificial neural networks.
Multilayer Perceptron class built off of MethodANNBase.
Int_t fResetStep
reset time (how often we clear hessian matrix)
bool fCalculateErrors
compute inverse hessian matrix at the end of the training
std::vector< std::pair< Float_t, Float_t > > * fDeviationsFromTargets
deviation from the targets, event weight
Double_t fTau
line search variable
Int_t fGA_SC_rate
GA settings: SC_rate.
void GetHelpMessage() const override
get help message text
Float_t fWeightRange
suppress outliers for the estimator calculation
Float_t fSamplingWeight
changing factor for event weights when sampling is turned on
Int_t fBatchSize
batch size, only matters if in batch learning mode
void BackPropagationMinimize(Int_t nEpochs)
minimize estimator / train network with back propagation algorithm
Double_t GetMSEErr(const Event *ev, UInt_t index=0)
zjh
void MakeClassSpecific(std::ostream &, const TString &) const override
write specific classifier response
void DeclareOptions() override
define the options (their key words) that can be set in the option string
void AdjustSynapseWeights()
just adjust the synapse weights (should be called in batch mode)
std::vector< Double_t > fPriorDev
zjh
TString fBpModeS
backprop learning mode option string (sequential or batch)
void SteepestDir(TMatrixD &Dir)
void TrainOneEpoch()
train network over a single epoch/cycle of events
TString fTrainMethodS
training method option param
Int_t fGA_nsteps
GA settings: number of steps.
Bool_t GetHessian(TMatrixD &Hessian, TMatrixD &Gamma, TMatrixD &Delta)
Double_t ComputeEstimator(std::vector< Double_t > ¶meters)
this function is called by GeneticANN for GA optimization
static const Bool_t fgPRINT_BATCH
debug flags
void InitializeLearningRates()
initialize learning rates of synapses, used only by back propagation
Int_t fGA_preCalc
GA settings: number of pre-calc steps.
void CalculateNeuronDeltas()
have each neuron calculate its delta by back propagation
Int_t fTestRate
test for overtraining performed at each #th epochs
Double_t fDecayRate
decay rate for above learning rate
ETrainingMethod fTrainingMethod
method of training, BP or GA
Double_t EstimatorFunction(std::vector< Double_t > ¶meters) override
interface to the estimate
EBPTrainingMode fBPMode
backprop learning mode (sequential or batch)
Double_t DerivDir(TMatrixD &Dir)
Double_t fGA_SC_factor
GA settings: SC_factor.
Double_t GetCEErr(const Event *ev, UInt_t index=0)
zjh
virtual ~MethodMLP()
destructor nothing to be done
Int_t fGA_SC_steps
GA settings: SC_steps.
void SetDir(TMatrixD &Hessian, TMatrixD &Dir)
void Shuffle(Int_t *index, Int_t n)
Input:
Bool_t fSamplingTraining
The training sample is sampled.
void SimulateEvent(const Event *ev)
void SetDirWeights(std::vector< Double_t > &Origin, TMatrixD &Dir, Double_t alpha)
void SetGammaDelta(TMatrixD &Gamma, TMatrixD &Delta, std::vector< Double_t > &Buffer)
Double_t fLearnRate
learning rate for synapse weight adjustments
void GetApproxInvHessian(TMatrixD &InvHessian, bool regulate=true)
rank-1 approximation, neglect 2nd derivatives. //zjh
void BFGSMinimize(Int_t nEpochs)
train network with BFGS algorithm
void UpdateSynapses()
update synapse error fields and adjust the weights (if in sequential mode)
static const Int_t fgPRINT_ESTIMATOR_INC
debug flags
Float_t fSamplingFraction
fraction of events which is sampled for training
void TrainOneEvent(Int_t ievt)
train network over a single event this uses the new event model
Double_t GetDesiredOutput(const Event *ev)
get the desired output of this event
void GeneticMinimize()
create genetics class similar to GeneticCut give it vector of parameter ranges (parameters = weights)...
Bool_t fSamplingTesting
The testing sample is sampled.
Double_t fLastAlpha
line search variable
void Init() override
default initializations
Float_t fSamplingEpoch
fraction of epochs where sampling is used
Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets) override
MLP can handle classification with 2 classes and regression with one regression-target.
void DecaySynapseWeights(Bool_t lateEpoch)
decay synapse weights in last 10 epochs, lower learning rate even more to find a good minimum
void TrainOneEventFast(Int_t ievt, Float_t *&branchVar, Int_t &type)
fast per-event training
Bool_t fEpochMon
create and fill epoch-wise monitoring histograms (makes outputfile big!)
void UpdateNetwork(Double_t desired, Double_t eventWeight=1.0)
update the network based on how closely the output matched the desired output
MethodMLP(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption)
standard constructor
void UpdateRegulators()
zjh
Bool_t LineSearch(TMatrixD &Dir, std::vector< Double_t > &Buffer, Double_t *dError=nullptr)
zjh
Double_t GetMvaValue(Double_t *err=nullptr, Double_t *errUpper=nullptr) override
get the mva value generated by the NN
Double_t CalculateEstimator(Types::ETreeType treeType=Types::kTraining, Int_t iEpoch=-1)
calculate the estimator that training is attempting to minimize
void ProcessOptions() override
process user options
static const Bool_t fgPRINT_SEQ
debug flags
create variable transformations