library: libTMVA
#include "MethodMLP.h"

TMVA::MethodMLP


class description - header file - source file
viewCVS header - viewCVS source

class TMVA::MethodMLP: public TMVA::MethodANNBase

Inheritance Inherited Members Includes Libraries
Class Charts

Function Members (Methods)

Display options:
Show inherited
Show non-public
public:
virtual~MethodMLP()
voidTObject::AbstractMethod(const char* method) const
virtual voidTObject::AppendPad(Option_t* option = "")
virtual voidTObject::Browse(TBrowser* b)
static TClass*Class()
virtual const char*TObject::ClassName() const
virtual voidTObject::Clear(Option_t* = "")
virtual TObject*TObject::Clone(const char* newname = "") const
virtual Int_tTObject::Compare(const TObject* obj) const
Double_tComputeEstimator(const vector<Double_t>& parameters)
virtual voidTObject::Copy(TObject& object) const
virtual const TMVA::Ranking*TMVA::MethodANNBase::CreateRanking()
TMVA::DataSet&TMVA::MethodBase::Data() const
Bool_tTMVA::MethodANNBase::Debug() const
virtual voidTObject::Delete(Option_t* option = "")
virtual Int_tTObject::DistancetoPrimitive(Int_t px, Int_t py)
virtual voidTObject::Draw(Option_t* option = "")
virtual voidTObject::DrawClass() const
virtual TObject*TObject::DrawClone(Option_t* option = "") const
virtual voidTObject::Dump() const
virtual voidTObject::Error(const char* method, const char* msgfmt) const
virtual voidTObject::Execute(const char* method, const char* params, Int_t* error = 0)
virtual voidTObject::Execute(TMethod* method, TObjArray* params, Int_t* error = 0)
virtual voidTObject::ExecuteEvent(Int_t event, Int_t px, Int_t py)
virtual voidTObject::Fatal(const char* method, const char* msgfmt) const
virtual TObject*TObject::FindObject(const char* name) const
virtual TObject*TObject::FindObject(const TObject* obj) const
virtual Option_t*TObject::GetDrawOption() const
static Long_tTObject::GetDtorOnly()
Double_tTMVA::MethodBase::GetEffForRoot(Double_t)
virtual Double_tTMVA::MethodBase::GetEfficiency(TString, TTree*)
Double_tTMVA::MethodBase::GetEventVal(Int_t ivar) const
Double_tTMVA::MethodBase::GetEventValNormalized(Int_t ivar) const
Double_tTMVA::MethodBase::GetEventWeight() const
virtual const char*TObject::GetIconName() const
const TString&TMVA::MethodBase::GetInputExp(int i) const
const TString&TMVA::MethodBase::GetInputVar(int i) const
virtual const TString&TMVA::MethodBase::GetJobName() const
virtual const TString&TMVA::MethodBase::GetMethodName() const
virtual const TString&TMVA::MethodBase::GetMethodTitle() const
virtual const TMVA::Types::EMVATMVA::MethodBase::GetMethodType() const
virtual Double_tTMVA::MethodBase::GetmuTransform(TTree*)
virtual Double_tTMVA::MethodANNBase::GetMvaValue()
virtual const char*TMVA::MethodBase::GetName() const
Int_tTMVA::MethodBase::GetNvar() const
virtual char*TObject::GetObjectInfo(Int_t px, Int_t py) const
static Bool_tTObject::GetObjectStat()
virtual Double_tTMVA::MethodBase::GetOptimalSignificance(Double_t SignalEvents, Double_t BackgroundEvents, Double_t& optimal_significance_value) const
virtual Option_t*TObject::GetOption() const
TStringTMVA::MethodBase::GetOptions() const
virtual TMVA::Types::EPreprocessingMethodTMVA::MethodBase::GetPreprocessingMethod() const
virtual Double_tTMVA::MethodBase::GetSeparation()
virtual Double_tTMVA::MethodBase::GetSignificance()
TTree*TMVA::MethodBase::GetTestTree() const
static TMVA::MethodBase*TMVA::MethodBase::GetThisBase()
virtual const char*TObject::GetTitle() const
virtual Double_tTMVA::MethodBase::GetTrainingEfficiency(TString)
TTree*TMVA::MethodBase::GetTrainingTree() const
virtual UInt_tTObject::GetUniqueID() const
virtual TStringTMVA::MethodBase::GetWeightFileDir() const
virtual TStringTMVA::MethodBase::GetWeightFileExtension() const
TStringTMVA::MethodBase::GetWeightFileName() const
TMVA::MethodBase::EWeightFileTypeTMVA::MethodBase::GetWeightFileType() const
Double_tTMVA::MethodBase::GetXmax(Int_t ivar, TMVA::Types::EPreprocessingMethod corr = Types::kNone) const
Double_tTMVA::MethodBase::GetXmax(const TString& var, TMVA::Types::EPreprocessingMethod corr = Types::kNone) const
Double_tTMVA::MethodBase::GetXmin(Int_t ivar, TMVA::Types::EPreprocessingMethod corr = Types::kNone) const
Double_tTMVA::MethodBase::GetXmin(const TString& var, TMVA::Types::EPreprocessingMethod corr = Types::kNone) const
virtual Bool_tTObject::HandleTimer(TTimer* timer)
virtual ULong_tTObject::Hash() const
Bool_tTMVA::MethodBase::HasTrainingTree() const
static Double_tTMVA::MethodBase::IGetEffForRoot(Double_t)
virtual voidTObject::Info(const char* method, const char* msgfmt) const
virtual Bool_tTObject::InheritsFrom(const char* classname) const
virtual Bool_tTObject::InheritsFrom(const TClass* cl) const
voidTMVA::MethodANNBase::InitANNBase()
virtual voidTObject::Inspect() const
voidTObject::InvertBit(UInt_t f)
virtual TClass*IsA() const
virtual Bool_tTObject::IsEqual(const TObject* obj) const
virtual Bool_tTObject::IsFolder() const
virtual Bool_tTMVA::MethodBase::IsOK() const
Bool_tTObject::IsOnHeap() const
virtual Bool_tTMVA::MethodBase::IsSignalLike()
virtual Bool_tTObject::IsSortable() const
Bool_tTObject::IsZombie() const
virtual voidTObject::ls(Option_t* option = "") const
voidTObject::MayNotUse(const char* method) const
TMVA::MethodMLPMethodMLP(TMVA::DataSet& theData, TString theWeightFile, TDirectory* theTargetDir = 0)
TMVA::MethodMLPMethodMLP(TString jobName, TString methodTitle, TMVA::DataSet& theData, TString theOption, TDirectory* theTargetDir = 0)
Double_tTMVA::MethodBase::Norm(Int_t ivar, Double_t x) const
Double_tTMVA::MethodBase::Norm(TString var, Double_t x) const
virtual Bool_tTObject::Notify()
static voidTObject::operator delete(void* ptr)
static voidTObject::operator delete(void* ptr, void* vp)
static voidTObject::operator delete[](void* ptr)
static voidTObject::operator delete[](void* ptr, void* vp)
void*TObject::operator new(size_t sz)
void*TObject::operator new(size_t sz, void* vp)
void*TObject::operator new[](size_t sz)
void*TObject::operator new[](size_t sz, void* vp)
TMVA::IMethod&TMVA::IMethod::operator=(const TMVA::IMethod&)
virtual voidTObject::Paint(Option_t* option = "")
virtual voidTObject::Pop()
virtual voidTMVA::MethodBase::PrepareEvaluationTree(TTree* theTestTree)
virtual voidTObject::Print(Option_t* option = "") const
virtual voidTMVA::MethodANNBase::PrintNetwork()
virtual Int_tTObject::Read(const char* name)
virtual voidTMVA::MethodBase::ReadStateFromFile()
virtual voidTMVA::MethodBase::ReadStateFromStream(istream& i)
virtual Bool_tTMVA::MethodBase::ReadTestEvent(UInt_t ievt, TMVA::Types::ESBType type = Types::kMaxSBType)
Bool_tTMVA::MethodBase::ReadTrainingEvent(UInt_t ievt, TMVA::Types::ESBType type = Types::kMaxSBType)
virtual voidTMVA::MethodANNBase::ReadWeightsFromStream(istream& istr)
virtual voidTObject::RecursiveRemove(TObject* obj)
voidTObject::ResetBit(UInt_t f)
virtual voidTObject::SaveAs(const char* filename = "", Option_t* option = "") const
virtual voidTObject::SavePrimitive(ostream& out, Option_t* option = "")
voidTMVA::MethodANNBase::SetActivation(TMVA::TActivation* activation)
voidTObject::SetBit(UInt_t f)
voidTObject::SetBit(UInt_t f, Bool_t set)
virtual voidTObject::SetDrawOption(Option_t* option = "")
static voidTObject::SetDtorOnly(void* obj)
virtual voidTMVA::MethodBase::SetJobName(TString jobName)
voidTMVA::MethodBase::SetMethodName(TString methodName)
voidTMVA::MethodBase::SetMethodTitle(TString methodTitle)
voidTMVA::MethodBase::SetMethodType(TMVA::Types::EMVA methodType)
voidTMVA::MethodANNBase::SetNeuronInputCalculator(TMVA::TNeuronInput* inputCalculator)
voidTMVA::MethodBase::SetNvar(Int_t n)
static voidTObject::SetObjectStat(Bool_t stat)
voidTMVA::MethodBase::SetPreprocessingMethod(TMVA::Types::EPreprocessingMethod m)
virtual voidTObject::SetUniqueID(UInt_t uid)
voidTMVA::MethodBase::SetVerbose(Bool_t v = kTRUE)
virtual voidTMVA::MethodBase::SetWeightFileDir(TString fileDir)
virtual voidTMVA::MethodBase::SetWeightFileExtension(TString fileExtension)
voidTMVA::MethodBase::SetWeightFileName(TString)
voidTMVA::MethodBase::SetWeightFileType(TMVA::MethodBase::EWeightFileType w)
voidTMVA::MethodBase::SetXmax(Int_t ivar, Double_t x, TMVA::Types::EPreprocessingMethod corr = Types::kNone)
voidTMVA::MethodBase::SetXmax(const TString& var, Double_t x, TMVA::Types::EPreprocessingMethod corr = Types::kNone)
voidTMVA::MethodBase::SetXmin(Int_t ivar, Double_t x, TMVA::Types::EPreprocessingMethod corr = Types::kNone)
voidTMVA::MethodBase::SetXmin(const TString& var, Double_t x, TMVA::Types::EPreprocessingMethod corr = Types::kNone)
virtual voidShowMembers(TMemberInspector& insp, char* parent)
virtual voidStreamer(TBuffer& b)
voidStreamerNVirtual(TBuffer& b)
virtual voidTObject::SysError(const char* method, const char* msgfmt) const
virtual voidTMVA::MethodBase::Test(TTree* theTestTree = 0)
Bool_tTObject::TestBit(UInt_t f) const
Int_tTObject::TestBits(UInt_t f) const
virtual voidTMVA::MethodBase::TestInit(TTree* theTestTree = 0)
virtual voidTrain()
voidTMVA::MethodBase::TrainMethod()
virtual voidTObject::UseCurrentStyle()
Bool_tTMVA::MethodBase::Verbose() const
virtual voidTObject::Warning(const char* method, const char* msgfmt) const
virtual Int_tTObject::Write(const char* name = "0", Int_t option = 0, Int_t bufsize = 0)
virtual Int_tTObject::Write(const char* name = "0", Int_t option = 0, Int_t bufsize = 0) const
virtual voidTMVA::MethodBase::WriteEvaluationHistosToFile(TDirectory* targetDir)
virtual voidTMVA::MethodANNBase::WriteMonitoringHistosToFile() const
voidTMVA::MethodBase::WriteStateToFile() const
virtual voidTMVA::MethodBase::WriteStateToStream(ostream& o) const
virtual voidTMVA::MethodANNBase::WriteWeightsToStream(ostream& o) const
protected:
TDirectory*TMVA::MethodBase::BaseDir() const
virtual voidTMVA::MethodANNBase::BuildNetwork(vector<Int_t>* layout, vector<Double_t>* weights = NULL)
Bool_tTMVA::MethodBase::CheckSanity(TTree* theTree = 0)
virtual voidTObject::DoError(int level, const char* location, const char* fmt, va_list va) const
voidTMVA::MethodBase::EnableLooseOptions(Bool_t b = kTRUE)
voidTMVA::MethodANNBase::ForceNetworkCalculations()
voidTMVA::MethodANNBase::ForceNetworkInputs(Int_t ignoreIndex = -1)
TMVA::MethodBase::ECutOrientationTMVA::MethodBase::GetCutOrientation() const
TMVA::TNeuron*TMVA::MethodANNBase::GetInputNeuron(Int_t index)
Double_tTMVA::MethodANNBase::GetNetworkOutput()
TMVA::TNeuron*TMVA::MethodANNBase::GetOutputNeuron()
TMVA::Types::ESBTypeTMVA::MethodBase::GetPreprocessingType() const
Double_tTMVA::MethodBase::GetSignalReferenceCut() const
const TString&TMVA::MethodBase::GetTestvarName() const
const TString&TMVA::MethodBase::GetTestvarPrefix() const
const TList&TMVA::MethodBase::ListOfOptions() const
TDirectory*TMVA::MethodBase::LocalTDir() const
voidTObject::MakeZombie()
Bool_tTMVA::MethodANNBase::Normalize()
Int_tTMVA::MethodANNBase::NumCycles()
vector<Int_t>*TMVA::MethodANNBase::ParseLayoutString(TString layerSpec)
voidTMVA::MethodBase::ParseOptions(Bool_t verbose = kTRUE)
voidTMVA::MethodANNBase::PrintMessage(TString message, Bool_t force = kFALSE) const
voidTMVA::MethodBase::PrintOptions() const
voidTMVA::MethodBase::ReadOptionsFromStream(istream& istr)
voidTMVA::MethodBase::ResetThisBase()
voidTMVA::MethodBase::SetPreprocessingType(TMVA::Types::ESBType t)
voidTMVA::MethodBase::SetSignalReferenceCut(Double_t cut)
voidTMVA::MethodBase::SetTestvarName()
voidTMVA::MethodBase::SetTestvarName(TString v)
voidTMVA::MethodBase::SetTestvarPrefix(TString prefix)
voidTMVA::MethodBase::Statistics(TMVA::Types::ETreeType treeType, const TString& theVarName, Double_t&, Double_t&, Double_t&, Double_t&, Double_t&, Double_t&, Bool_t norm = kFALSE)
voidTMVA::MethodANNBase::WaitForKeyboard()
voidTMVA::MethodBase::WriteOptionsToStream(ostream& o) const
private:
voidAdjustSynapseWeights()
voidBackPropagationMinimize(Int_t nEpochs)
Double_tCalculateEstimator(TMVA::Types::ETreeType treeType = Types::kTraining)
voidCalculateNeuronDeltas()
voidDecaySynapseWeights(Bool_t lateEpoch)
virtual voidDeclareOptions()
voidGeneticMinimize()
Double_tGetDesiredOutput()
voidInitializeLearningRates()
voidInitMLP()
virtual voidProcessOptions()
voidShuffle(Int_t* index, Int_t n)
voidTrain(Int_t nEpochs)
voidTrainOneEpoch()
voidTrainOneEvent(Int_t ievt)
voidTrainOneEventFast(Int_t ievt, Float_t*& branchVar, Int_t& type)
voidUpdateNetwork(Double_t desired, Double_t eventWeight = 1.0)
voidUpdateSynapses()

Data Members

public:
enum ETrainingMethod { kBP
kGA
};
enum EBPTrainingMode { kSequential
kBatch
};
enum TMVA::MethodBase::EWeightFileType { kROOT
kTEXT
};
enum TMVA::MethodBase::ECutOrientation { kNegative
kPositive
};
enum TObject::EStatusBits { kCanDelete
kMustCleanup
kObjInCanvas
kIsReferenced
kHasUUID
kCannotPick
kNoContextMenu
kInvalidObject
};
enum TObject::[unnamed] { kIsOnHeap
kNotDeleted
kZombie
kBitMask
kSingleKey
kOverwrite
kWriteDelete
};
protected:
TObjArray*TMVA::MethodANNBase::fNetworkTObjArray of TObjArrays representing network
TObjArray*TMVA::MethodANNBase::fSynapsesarray of pointers to synapses, no structural data
TMVA::TActivation*TMVA::MethodANNBase::fActivationactivation function to be used for hidden layers
TMVA::TActivation*TMVA::MethodANNBase::fIdentityactivation for input and output layers
TRandom3*TMVA::MethodANNBase::frgenrandom number generator for various uses
TMVA::TNeuronInput*TMVA::MethodANNBase::fInputCalculatorinput calculator for all neurons
TH1F*TMVA::MethodANNBase::fEstimatorHistTrainmonitors convergence of training sample
TH1F*TMVA::MethodANNBase::fEstimatorHistTestmonitors convergence of independent test sample
TMVA::Ranking*TMVA::MethodBase::fRankingranking
vector<TString>*TMVA::MethodBase::fInputVarsvector of input variables used in MVA
Bool_tTMVA::MethodBase::fIsOKstatus of sanity checks
TH1*TMVA::MethodBase::fHistS_plotbinMVA plots used for graphics representation (signal)
TH1*TMVA::MethodBase::fHistB_plotbinMVA plots used for graphics representation (background)
TH1*TMVA::MethodBase::fHistS_highbinMVA plots used for efficiency calculations (signal)
TH1*TMVA::MethodBase::fHistB_highbinMVA plots used for efficiency calculations (background)
TH1*TMVA::MethodBase::fEffSefficiency plot (signal)
TH1*TMVA::MethodBase::fEffBefficiency plot (background)
TH1*TMVA::MethodBase::fEffBvsSbackground efficiency versus signal efficiency
TH1*TMVA::MethodBase::fRejBvsSbackground rejection (=1-eff.) versus signal efficiency
TH1*TMVA::MethodBase::fHistBhatSworking histograms needed for mu-transform (signal)
TH1*TMVA::MethodBase::fHistBhatBworking histograms needed for mu-transform (background)
TH1*TMVA::MethodBase::fHistMuSmu-transform (signal)
TH1*TMVA::MethodBase::fHistMuBmu-transform (background)
TH1*TMVA::MethodBase::fTrainEffSTraining efficiency plot (signal)
TH1*TMVA::MethodBase::fTrainEffBTraining efficiency plot (background)
TH1*TMVA::MethodBase::fTrainEffBvsSTraining background efficiency versus signal efficiency
TH1*TMVA::MethodBase::fTrainRejBvsSTraining background rejection (=1-eff.) versus signal efficiency
Double_tTMVA::MethodBase::fX
Double_tTMVA::MethodBase::fMode
TGraph*TMVA::MethodBase::fGraphSgraphs used for splines for efficiency (signal)
TGraph*TMVA::MethodBase::fGraphBgraphs used for splines for efficiency (background)
TGraph*TMVA::MethodBase::fGrapheffBvsSgraphs used for splines for signal eff. versus background eff.
TMVA::PDF*TMVA::MethodBase::fSplSPDFs of MVA distribution (signal)
TMVA::PDF*TMVA::MethodBase::fSplBPDFs of MVA distribution (background)
TSpline*TMVA::MethodBase::fSpleffBvsSsplines for signal eff. versus background eff.
TGraph*TMVA::MethodBase::fGraphTrainSgraphs used for splines for training efficiency (signal)
TGraph*TMVA::MethodBase::fGraphTrainBgraphs used for splines for training efficiency (background)
TGraph*TMVA::MethodBase::fGraphTrainEffBvsSgraphs used for splines for training signal eff. versus background eff.
TMVA::PDF*TMVA::MethodBase::fSplTrainSPDFs of training MVA distribution (signal)
TMVA::PDF*TMVA::MethodBase::fSplTrainBPDFs of training MVA distribution (background)
TSpline*TMVA::MethodBase::fSplTrainEffBvsSsplines for training signal eff. versus background eff.
Int_tTMVA::MethodBase::fNbinsnumber of bins in representative histograms
Int_tTMVA::MethodBase::fNbinsHnumber of bins in evaluation histograms
TMVA::MethodBase::ECutOrientationTMVA::MethodBase::fCutOrientation+1 if Sig>Bkg, -1 otherwise
TMVA::TSpline1*TMVA::MethodBase::fSplRefShelper splines for RootFinder (signal)
TMVA::TSpline1*TMVA::MethodBase::fSplRefBhelper splines for RootFinder (background)
TMVA::TSpline1*TMVA::MethodBase::fSplTrainRefShelper splines for RootFinder (signal)
TMVA::TSpline1*TMVA::MethodBase::fSplTrainRefBhelper splines for RootFinder (background)
TMVA::OptionBase*TMVA::MethodBase::fLastDeclaredOptionlast declared option
TListTMVA::MethodBase::fListOfOptionsoption list
TMVA::MsgLoggerTMVA::MethodBase::fLoggermessage logger
private:
TMVA::MethodMLP::ETrainingMethodfTrainingMethodmethod of training, BP or GA
TStringfTrainMethodStraining method option param
Double_tfLearnRatelearning rate for synapse weight adjustments
Double_tfDecayRatedecay rate for above learning rate
TMVA::MethodMLP::EBPTrainingModefBPModebackprop learning mode (sequential or batch)
TStringfBpModeSbackprop learning mode option string (sequential or batch)
Int_tfBatchSizebatch size, only matters if in batch learning mode
Int_tfTestRatetest for overtraining performed at each #th epochs
Int_tfGA_nstepsGA settings: number of steps
Int_tfGA_preCalcGA settings: number of pre-calc steps
Int_tfGA_SC_stepsGA settings: SC_steps
Int_tfGA_SC_offstepsGA settings: SC_offsteps
Double_tfGA_SC_factorGA settings: SC_factor
static const Int_tfgPRINT_ESTIMATOR_INCdebug flags
static const Bool_tfgPRINT_SEQdebug flags
static const Bool_tfgPRINT_BATCHdebug flags

Class Description

                                                                      
 Multilayer Perceptron class built off of MethodANNBase  
_______________________________________________________________________
MethodMLP( TString jobName, TString methodTitle, DataSet& theData, TString theOption, TDirectory* theTargetDir )
 standard constructor
MethodMLP( DataSet& theData, TString theWeightFile, TDirectory* theTargetDir )
 construct from a weight file -- most work is done by MethodANNBase constructor
~MethodMLP()
 destructor
 nothing to be done
void InitMLP()
 default initializations
void DeclareOptions()
 define the options (their key words) that can be set in the option string 
 know options:
 TrainingMethod  <string>     Training method
    available values are:         BP   Back-Propagation <default>
                                  GA   Genetic Algorithm (takes a LONG time)

 LearningRate    <float>      NN learning rate parameter
 DecayRate       <float>      Decay rate for learning parameter
 TestRate        <int>        Test for overtraining performed at each #th epochs

 BPMode          <string>     Back-propagation learning mode
    available values are:         sequential <default>
                                  batch

 BatchSize       <int>        Batch size: number of events/batch, only set if in Batch Mode, -1 for BatchSize=number_of_events
void ProcessOptions()
 process user options
void InitializeLearningRates()
 initialize learning rates of synapses, used only by backpropagation
Double_t CalculateEstimator( TMVA::Types::ETreeType treeType )
 calculate the estimator that training is attempting to minimize
void Train(Int_t nEpochs)
 train the network
void BackPropagationMinimize(Int_t nEpochs)
 minimize estimator / train network with backpropagation algorithm
void TrainOneEpoch()
 train network over a single epoch/cyle of events
void Shuffle(Int_t* index, Int_t n)
 Input:
   index: the array to shuffle
   n: the size of the array
 Output:
   index: the shuffled indexes
 This method is used for sequential training
void DecaySynapseWeights(Bool_t lateEpoch)
 decay synapse weights
 in last 10 epochs, lower learning rate even more to find a good minimum
void TrainOneEventFast(Int_t ievt, Float_t*& branchVar, Int_t& type)
 fast per-event training
void TrainOneEvent(Int_t ievt)
 train network over a single event
 this uses the new event model
Double_t GetDesiredOutput()
 get the desired output of this event
void UpdateNetwork(Double_t desired, Double_t eventWeight)
 update the network based on how closely
 the output matched the desired output
void CalculateNeuronDeltas()
 have each neuron calculate its delta by backpropagation
void GeneticMinimize()
 create genetics class similar to GeneticCut
 give it vector of parameter ranges (parameters = weights)
 link fitness function of this class to ComputeEstimator
 instantiate GA (see MethodCuts)
 run it
 then this should exist for GA, Minuit and random sampling
Double_t ComputeEstimator(const vector<Double_t>& parameters)
 this function is called by GeneticANN for GA optimization
void UpdateSynapses()
 update synapse error fields and adjust the weights (if in sequential mode)
void AdjustSynapseWeights()
 just adjust the synapse weights (should be called in batch mode)
void Train()
{ Train(NumCycles()); }

Author: Andreas Hoecker, Matt Jachowski
Last update: root/tmva $Id: MethodMLP.cxx,v 1.6 2006/11/20 15:35:28 brun Exp $
Copyright (c) 2005: *


ROOT page - Class index - Class Hierarchy - Top of the page

This page has been automatically generated. If you have any comments or suggestions about the page layout send a mail to ROOT support, or contact the developers with any questions or problems regarding ROOT.