#ifndef ROOT_TMVA_MethodMLP
#define ROOT_TMVA_MethodMLP
#include <vector>
#include "TString.h"
#include "TTree.h"
#include "TObjArray.h"
#include "TRandom3.h"
#include "TH1F.h"
#ifndef ROOT_TMVA_IFitterTarget
#include "TMVA/IFitterTarget.h"
#endif
#ifndef ROOT_TMVA_MethodBase
#include "TMVA/MethodBase.h"
#endif
#ifndef ROOT_TMVA_MethodANNBase
#include "TMVA/MethodANNBase.h"
#endif
#ifndef ROOT_TMVA_TNeuron
#include "TMVA/TNeuron.h"
#endif
#ifndef ROOT_TMVA_TActivation
#include "TMVA/TActivation.h"
#endif
#define MethodMLP_UseMinuit__
#undef  MethodMLP_UseMinuit__
namespace TMVA {
   class MethodMLP : public MethodANNBase, public IFitterTarget {
   public:
      
      MethodMLP( TString jobName, 
                 TString  methodTitle,
                 DataSet& theData,
                 TString theOption, 
                 TDirectory* theTargetDir = 0 );
      MethodMLP( DataSet& theData, 
                 TString theWeightFile, 
                 TDirectory* theTargetDir = 0 );
      virtual ~MethodMLP();
      void Train() { Train(NumCycles()); }
      
      Double_t ComputeEstimator( std::vector<Double_t>& parameters);
      Double_t EstimatorFunction( std::vector<Double_t>& parameters);
      enum ETrainingMethod { kBP=0, kGA };
      enum EBPTrainingMode { kSequential=0, kBatch };
   protected:
      
      virtual void MakeClassSpecific( std::ostream&, const TString& ) const;
      
      void GetHelpMessage() const;
   private:
      
      virtual void DeclareOptions();
      virtual void ProcessOptions();
      
      void     Train( Int_t nEpochs );
      void     InitMLP();
      void     InitializeLearningRates(); 
      
      Double_t CalculateEstimator( Types::ETreeType treeType = Types::kTraining );
      
      void     BackPropagationMinimize( Int_t nEpochs );
      void     TrainOneEpoch();
      void     Shuffle( Int_t* index, Int_t n );
      void     DecaySynapseWeights(Bool_t lateEpoch );
      void     TrainOneEvent( Int_t ievt);
      Double_t GetDesiredOutput();
      void     UpdateNetwork( Double_t desired, Double_t eventWeight=1.0 );
      void     CalculateNeuronDeltas();
      void     UpdateSynapses();
      void     AdjustSynapseWeights();
      
      void     TrainOneEventFast( Int_t ievt, Float_t*& branchVar, Int_t& type );
      
      void GeneticMinimize();
#ifdef MethodMLP_UseMinuit__
      
      void MinuitMinimize();
      static MethodMLP* GetThisPtr() { return fgThis; }
      static void IFCN( Int_t& npars, Double_t* grad, Double_t &f, Double_t* fitPars, Int_t ifl );
      void FCN( Int_t& npars, Double_t* grad, Double_t &f, Double_t* fitPars, Int_t ifl );
#endif
      
      ETrainingMethod fTrainingMethod; 
      TString         fTrainMethodS;   
      
      Double_t        fLearnRate;      
      Double_t        fDecayRate;      
      EBPTrainingMode fBPMode;         
      TString         fBpModeS;        
      Int_t           fBatchSize;      
      Int_t           fTestRate;       
      
      
      Int_t           fGA_nsteps;      
      Int_t           fGA_preCalc;     
      Int_t           fGA_SC_steps;    
      Int_t           fGA_SC_rate; 
      Double_t        fGA_SC_factor;   
#ifdef MethodMLP_UseMinuit__
      
      Int_t          fNumberOfWeights; 
      static MethodMLP* fgThis;        
#endif
      
      static const Int_t  fgPRINT_ESTIMATOR_INC = 10;     
      static const Bool_t fgPRINT_SEQ           = kFALSE; 
      static const Bool_t fgPRINT_BATCH         = kFALSE; 
      ClassDef(MethodMLP,0) 
   };
} 
#endif
This page has been automatically generated. If you have any comments or suggestions about the page layout send a mail to ROOT support, or contact the developers with any questions or problems regarding ROOT.