library: libTMVA
#include "MethodANNBase.h"

TMVA::MethodANNBase


class description - header file - source file
viewCVS header - viewCVS source

class TMVA::MethodANNBase: public TMVA::MethodBase

Inheritance Inherited Members Includes Libraries
Class Charts

Function Members (Methods)

Display options:
Show inherited
Show non-public
 
    This is an abstract class, constructors will not be documented.
    Look at the header to check for available constructors.

public:
virtual~MethodANNBase()
voidTObject::AbstractMethod(const char* method) const
virtual voidTObject::AppendPad(Option_t* option = "")
virtual voidTObject::Browse(TBrowser* b)
static TClass*Class()
virtual const char*TObject::ClassName() const
virtual voidTObject::Clear(Option_t* = "")
virtual TObject*TObject::Clone(const char* newname = "") const
virtual Int_tTObject::Compare(const TObject* obj) const
virtual voidTObject::Copy(TObject& object) const
virtual const TMVA::Ranking*CreateRanking()
TMVA::DataSet&TMVA::MethodBase::Data() const
Bool_tDebug() const
virtual voidDeclareOptions()
virtual voidTObject::Delete(Option_t* option = "")
virtual Int_tTObject::DistancetoPrimitive(Int_t px, Int_t py)
virtual voidTObject::Draw(Option_t* option = "")
virtual voidTObject::DrawClass() const
virtual TObject*TObject::DrawClone(Option_t* option = "") const
virtual voidTObject::Dump() const
virtual voidTObject::Error(const char* method, const char* msgfmt) const
virtual voidTObject::Execute(const char* method, const char* params, Int_t* error = 0)
virtual voidTObject::Execute(TMethod* method, TObjArray* params, Int_t* error = 0)
virtual voidTObject::ExecuteEvent(Int_t event, Int_t px, Int_t py)
virtual voidTObject::Fatal(const char* method, const char* msgfmt) const
virtual TObject*TObject::FindObject(const char* name) const
virtual TObject*TObject::FindObject(const TObject* obj) const
virtual Option_t*TObject::GetDrawOption() const
static Long_tTObject::GetDtorOnly()
Double_tTMVA::MethodBase::GetEffForRoot(Double_t)
virtual Double_tTMVA::MethodBase::GetEfficiency(TString, TTree*)
Double_tTMVA::MethodBase::GetEventVal(Int_t ivar) const
Double_tTMVA::MethodBase::GetEventValNormalized(Int_t ivar) const
Double_tTMVA::MethodBase::GetEventWeight() const
virtual const char*TObject::GetIconName() const
const TString&TMVA::MethodBase::GetInputExp(int i) const
const TString&TMVA::MethodBase::GetInputVar(int i) const
virtual const TString&TMVA::MethodBase::GetJobName() const
virtual const TString&TMVA::MethodBase::GetMethodName() const
virtual const TString&TMVA::MethodBase::GetMethodTitle() const
virtual const TMVA::Types::EMVATMVA::MethodBase::GetMethodType() const
virtual Double_tTMVA::MethodBase::GetmuTransform(TTree*)
virtual Double_tGetMvaValue()
virtual const char*TMVA::MethodBase::GetName() const
Int_tTMVA::MethodBase::GetNvar() const
virtual char*TObject::GetObjectInfo(Int_t px, Int_t py) const
static Bool_tTObject::GetObjectStat()
virtual Double_tTMVA::MethodBase::GetOptimalSignificance(Double_t SignalEvents, Double_t BackgroundEvents, Double_t& optimal_significance_value) const
virtual Option_t*TObject::GetOption() const
TStringTMVA::MethodBase::GetOptions() const
virtual TMVA::Types::EPreprocessingMethodTMVA::MethodBase::GetPreprocessingMethod() const
virtual Double_tTMVA::MethodBase::GetSeparation()
virtual Double_tTMVA::MethodBase::GetSignificance()
TTree*TMVA::MethodBase::GetTestTree() const
static TMVA::MethodBase*TMVA::MethodBase::GetThisBase()
virtual const char*TObject::GetTitle() const
virtual Double_tTMVA::MethodBase::GetTrainingEfficiency(TString)
TTree*TMVA::MethodBase::GetTrainingTree() const
virtual UInt_tTObject::GetUniqueID() const
virtual TStringTMVA::MethodBase::GetWeightFileDir() const
virtual TStringTMVA::MethodBase::GetWeightFileExtension() const
TStringTMVA::MethodBase::GetWeightFileName() const
TMVA::MethodBase::EWeightFileTypeTMVA::MethodBase::GetWeightFileType() const
Double_tTMVA::MethodBase::GetXmax(Int_t ivar, TMVA::Types::EPreprocessingMethod corr = Types::kNone) const
Double_tTMVA::MethodBase::GetXmax(const TString& var, TMVA::Types::EPreprocessingMethod corr = Types::kNone) const
Double_tTMVA::MethodBase::GetXmin(Int_t ivar, TMVA::Types::EPreprocessingMethod corr = Types::kNone) const
Double_tTMVA::MethodBase::GetXmin(const TString& var, TMVA::Types::EPreprocessingMethod corr = Types::kNone) const
virtual Bool_tTObject::HandleTimer(TTimer* timer)
virtual ULong_tTObject::Hash() const
Bool_tTMVA::MethodBase::HasTrainingTree() const
static Double_tTMVA::MethodBase::IGetEffForRoot(Double_t)
virtual voidTObject::Info(const char* method, const char* msgfmt) const
virtual Bool_tTObject::InheritsFrom(const char* classname) const
virtual Bool_tTObject::InheritsFrom(const TClass* cl) const
voidInitANNBase()
virtual voidTObject::Inspect() const
voidTObject::InvertBit(UInt_t f)
virtual TClass*IsA() const
virtual Bool_tTObject::IsEqual(const TObject* obj) const
virtual Bool_tTObject::IsFolder() const
virtual Bool_tTMVA::MethodBase::IsOK() const
Bool_tTObject::IsOnHeap() const
virtual Bool_tTMVA::MethodBase::IsSignalLike()
virtual Bool_tTObject::IsSortable() const
Bool_tTObject::IsZombie() const
virtual voidTObject::ls(Option_t* option = "") const
voidTObject::MayNotUse(const char* method) const
Double_tTMVA::MethodBase::Norm(Int_t ivar, Double_t x) const
Double_tTMVA::MethodBase::Norm(TString var, Double_t x) const
virtual Bool_tTObject::Notify()
static voidTObject::operator delete(void* ptr)
static voidTObject::operator delete(void* ptr, void* vp)
static voidTObject::operator delete[](void* ptr)
static voidTObject::operator delete[](void* ptr, void* vp)
void*TObject::operator new(size_t sz)
void*TObject::operator new(size_t sz, void* vp)
void*TObject::operator new[](size_t sz)
void*TObject::operator new[](size_t sz, void* vp)
TMVA::IMethod&TMVA::IMethod::operator=(const TMVA::IMethod&)
virtual voidTObject::Paint(Option_t* option = "")
virtual voidTObject::Pop()
virtual voidTMVA::MethodBase::PrepareEvaluationTree(TTree* theTestTree)
virtual voidTObject::Print(Option_t* option = "") const
virtual voidPrintNetwork()
virtual voidProcessOptions()
virtual Int_tTObject::Read(const char* name)
virtual voidTMVA::MethodBase::ReadStateFromFile()
virtual voidTMVA::MethodBase::ReadStateFromStream(istream& i)
virtual Bool_tTMVA::MethodBase::ReadTestEvent(UInt_t ievt, TMVA::Types::ESBType type = Types::kMaxSBType)
Bool_tTMVA::MethodBase::ReadTrainingEvent(UInt_t ievt, TMVA::Types::ESBType type = Types::kMaxSBType)
virtual voidReadWeightsFromStream(istream& istr)
virtual voidTObject::RecursiveRemove(TObject* obj)
voidTObject::ResetBit(UInt_t f)
virtual voidTObject::SaveAs(const char* filename = "", Option_t* option = "") const
virtual voidTObject::SavePrimitive(ostream& out, Option_t* option = "")
voidSetActivation(TMVA::TActivation* activation)
voidTObject::SetBit(UInt_t f)
voidTObject::SetBit(UInt_t f, Bool_t set)
virtual voidTObject::SetDrawOption(Option_t* option = "")
static voidTObject::SetDtorOnly(void* obj)
virtual voidTMVA::MethodBase::SetJobName(TString jobName)
voidTMVA::MethodBase::SetMethodName(TString methodName)
voidTMVA::MethodBase::SetMethodTitle(TString methodTitle)
voidTMVA::MethodBase::SetMethodType(TMVA::Types::EMVA methodType)
voidSetNeuronInputCalculator(TMVA::TNeuronInput* inputCalculator)
voidTMVA::MethodBase::SetNvar(Int_t n)
static voidTObject::SetObjectStat(Bool_t stat)
voidTMVA::MethodBase::SetPreprocessingMethod(TMVA::Types::EPreprocessingMethod m)
virtual voidTObject::SetUniqueID(UInt_t uid)
voidTMVA::MethodBase::SetVerbose(Bool_t v = kTRUE)
virtual voidTMVA::MethodBase::SetWeightFileDir(TString fileDir)
virtual voidTMVA::MethodBase::SetWeightFileExtension(TString fileExtension)
voidTMVA::MethodBase::SetWeightFileName(TString)
voidTMVA::MethodBase::SetWeightFileType(TMVA::MethodBase::EWeightFileType w)
voidTMVA::MethodBase::SetXmax(Int_t ivar, Double_t x, TMVA::Types::EPreprocessingMethod corr = Types::kNone)
voidTMVA::MethodBase::SetXmax(const TString& var, Double_t x, TMVA::Types::EPreprocessingMethod corr = Types::kNone)
voidTMVA::MethodBase::SetXmin(Int_t ivar, Double_t x, TMVA::Types::EPreprocessingMethod corr = Types::kNone)
voidTMVA::MethodBase::SetXmin(const TString& var, Double_t x, TMVA::Types::EPreprocessingMethod corr = Types::kNone)
virtual voidShowMembers(TMemberInspector& insp, char* parent)
virtual voidStreamer(TBuffer& b)
voidStreamerNVirtual(TBuffer& b)
virtual voidTObject::SysError(const char* method, const char* msgfmt) const
virtual voidTMVA::MethodBase::Test(TTree* theTestTree = 0)
Bool_tTObject::TestBit(UInt_t f) const
Int_tTObject::TestBits(UInt_t f) const
virtual voidTMVA::MethodBase::TestInit(TTree* theTestTree = 0)
virtual voidTrain()
voidTMVA::MethodBase::TrainMethod()
virtual voidTObject::UseCurrentStyle()
Bool_tTMVA::MethodBase::Verbose() const
virtual voidTObject::Warning(const char* method, const char* msgfmt) const
virtual Int_tTObject::Write(const char* name = "0", Int_t option = 0, Int_t bufsize = 0)
virtual Int_tTObject::Write(const char* name = "0", Int_t option = 0, Int_t bufsize = 0) const
virtual voidTMVA::MethodBase::WriteEvaluationHistosToFile(TDirectory* targetDir)
virtual voidWriteMonitoringHistosToFile() const
voidTMVA::MethodBase::WriteStateToFile() const
virtual voidTMVA::MethodBase::WriteStateToStream(ostream& o) const
virtual voidWriteWeightsToStream(ostream& o) const
protected:
TDirectory*TMVA::MethodBase::BaseDir() const
virtual voidBuildNetwork(vector<Int_t>* layout, vector<Double_t>* weights = NULL)
Bool_tTMVA::MethodBase::CheckSanity(TTree* theTree = 0)
virtual voidTObject::DoError(int level, const char* location, const char* fmt, va_list va) const
voidTMVA::MethodBase::EnableLooseOptions(Bool_t b = kTRUE)
voidForceNetworkCalculations()
voidForceNetworkInputs(Int_t ignoreIndex = -1)
TMVA::MethodBase::ECutOrientationTMVA::MethodBase::GetCutOrientation() const
TMVA::TNeuron*GetInputNeuron(Int_t index)
Double_tGetNetworkOutput()
TMVA::TNeuron*GetOutputNeuron()
TMVA::Types::ESBTypeTMVA::MethodBase::GetPreprocessingType() const
Double_tTMVA::MethodBase::GetSignalReferenceCut() const
const TString&TMVA::MethodBase::GetTestvarName() const
const TString&TMVA::MethodBase::GetTestvarPrefix() const
const TList&TMVA::MethodBase::ListOfOptions() const
TDirectory*TMVA::MethodBase::LocalTDir() const
voidTObject::MakeZombie()
Bool_tNormalize()
Int_tNumCycles()
vector<Int_t>*ParseLayoutString(TString layerSpec)
voidTMVA::MethodBase::ParseOptions(Bool_t verbose = kTRUE)
voidPrintMessage(TString message, Bool_t force = kFALSE) const
voidTMVA::MethodBase::PrintOptions() const
voidTMVA::MethodBase::ReadOptionsFromStream(istream& istr)
voidTMVA::MethodBase::ResetThisBase()
voidTMVA::MethodBase::SetPreprocessingType(TMVA::Types::ESBType t)
voidTMVA::MethodBase::SetSignalReferenceCut(Double_t cut)
voidTMVA::MethodBase::SetTestvarName()
voidTMVA::MethodBase::SetTestvarName(TString v)
voidTMVA::MethodBase::SetTestvarPrefix(TString prefix)
voidTMVA::MethodBase::Statistics(TMVA::Types::ETreeType treeType, const TString& theVarName, Double_t&, Double_t&, Double_t&, Double_t&, Double_t&, Double_t&, Bool_t norm = kFALSE)
voidWaitForKeyboard()
voidTMVA::MethodBase::WriteOptionsToStream(ostream& o) const
private:
voidAddPreLinks(TMVA::TNeuron* neuron, TObjArray* prevLayer)
voidBuildLayer(Int_t numNeurons, TObjArray* curLayer, TObjArray* prevLayer, Int_t layerIndex, Int_t numLayers)
voidBuildLayers(vector<Int_t>* layout)
voidDeleteNetwork()
voidDeleteNetworkLayer(TObjArray*& layer)
voidForceWeights(vector<Double_t>* weights)
voidInitWeights()
voidPrintLayer(TObjArray* layer)
voidPrintNeuron(TMVA::TNeuron* neuron)

Data Members

public:
enum TMVA::MethodBase::EWeightFileType { kROOT
kTEXT
};
enum TMVA::MethodBase::ECutOrientation { kNegative
kPositive
};
enum TObject::EStatusBits { kCanDelete
kMustCleanup
kObjInCanvas
kIsReferenced
kHasUUID
kCannotPick
kNoContextMenu
kInvalidObject
};
enum TObject::[unnamed] { kIsOnHeap
kNotDeleted
kZombie
kBitMask
kSingleKey
kOverwrite
kWriteDelete
};
protected:
TObjArray*fNetworkTObjArray of TObjArrays representing network
TObjArray*fSynapsesarray of pointers to synapses, no structural data
TMVA::TActivation*fActivationactivation function to be used for hidden layers
TMVA::TActivation*fIdentityactivation for input and output layers
TRandom3*frgenrandom number generator for various uses
TMVA::TNeuronInput*fInputCalculatorinput calculator for all neurons
TH1F*fEstimatorHistTrainmonitors convergence of training sample
TH1F*fEstimatorHistTestmonitors convergence of independent test sample
TMVA::Ranking*TMVA::MethodBase::fRankingranking
vector<TString>*TMVA::MethodBase::fInputVarsvector of input variables used in MVA
Bool_tTMVA::MethodBase::fIsOKstatus of sanity checks
TH1*TMVA::MethodBase::fHistS_plotbinMVA plots used for graphics representation (signal)
TH1*TMVA::MethodBase::fHistB_plotbinMVA plots used for graphics representation (background)
TH1*TMVA::MethodBase::fHistS_highbinMVA plots used for efficiency calculations (signal)
TH1*TMVA::MethodBase::fHistB_highbinMVA plots used for efficiency calculations (background)
TH1*TMVA::MethodBase::fEffSefficiency plot (signal)
TH1*TMVA::MethodBase::fEffBefficiency plot (background)
TH1*TMVA::MethodBase::fEffBvsSbackground efficiency versus signal efficiency
TH1*TMVA::MethodBase::fRejBvsSbackground rejection (=1-eff.) versus signal efficiency
TH1*TMVA::MethodBase::fHistBhatSworking histograms needed for mu-transform (signal)
TH1*TMVA::MethodBase::fHistBhatBworking histograms needed for mu-transform (background)
TH1*TMVA::MethodBase::fHistMuSmu-transform (signal)
TH1*TMVA::MethodBase::fHistMuBmu-transform (background)
TH1*TMVA::MethodBase::fTrainEffSTraining efficiency plot (signal)
TH1*TMVA::MethodBase::fTrainEffBTraining efficiency plot (background)
TH1*TMVA::MethodBase::fTrainEffBvsSTraining background efficiency versus signal efficiency
TH1*TMVA::MethodBase::fTrainRejBvsSTraining background rejection (=1-eff.) versus signal efficiency
Double_tTMVA::MethodBase::fX
Double_tTMVA::MethodBase::fMode
TGraph*TMVA::MethodBase::fGraphSgraphs used for splines for efficiency (signal)
TGraph*TMVA::MethodBase::fGraphBgraphs used for splines for efficiency (background)
TGraph*TMVA::MethodBase::fGrapheffBvsSgraphs used for splines for signal eff. versus background eff.
TMVA::PDF*TMVA::MethodBase::fSplSPDFs of MVA distribution (signal)
TMVA::PDF*TMVA::MethodBase::fSplBPDFs of MVA distribution (background)
TSpline*TMVA::MethodBase::fSpleffBvsSsplines for signal eff. versus background eff.
TGraph*TMVA::MethodBase::fGraphTrainSgraphs used for splines for training efficiency (signal)
TGraph*TMVA::MethodBase::fGraphTrainBgraphs used for splines for training efficiency (background)
TGraph*TMVA::MethodBase::fGraphTrainEffBvsSgraphs used for splines for training signal eff. versus background eff.
TMVA::PDF*TMVA::MethodBase::fSplTrainSPDFs of training MVA distribution (signal)
TMVA::PDF*TMVA::MethodBase::fSplTrainBPDFs of training MVA distribution (background)
TSpline*TMVA::MethodBase::fSplTrainEffBvsSsplines for training signal eff. versus background eff.
Int_tTMVA::MethodBase::fNbinsnumber of bins in representative histograms
Int_tTMVA::MethodBase::fNbinsHnumber of bins in evaluation histograms
TMVA::MethodBase::ECutOrientationTMVA::MethodBase::fCutOrientation+1 if Sig>Bkg, -1 otherwise
TMVA::TSpline1*TMVA::MethodBase::fSplRefShelper splines for RootFinder (signal)
TMVA::TSpline1*TMVA::MethodBase::fSplRefBhelper splines for RootFinder (background)
TMVA::TSpline1*TMVA::MethodBase::fSplTrainRefShelper splines for RootFinder (signal)
TMVA::TSpline1*TMVA::MethodBase::fSplTrainRefBhelper splines for RootFinder (background)
TMVA::OptionBase*TMVA::MethodBase::fLastDeclaredOptionlast declared option
TListTMVA::MethodBase::fListOfOptionsoption list
TMVA::MsgLoggerTMVA::MethodBase::fLoggermessage logger
private:
Int_tfNcyclesnumber of epochs to train
Bool_tfNormalizeflag for input data normalization
TStringfNeuronTypename of neuron activation function class
TStringfNeuronInputTypename of neuron input calculator class
TObjArray*fInputLayercache this for fast access
TMVA::TNeuron*fOutputNeuroncache this for fast access
TStringfLayerSpeclayout specification option
static const Bool_tfgDEBUGdebug flag
static const Bool_tfgFIXED_SEEDfix rand generator seed

Class Description

                                                                      
 Base class for all TMVA methods using artificial neural networks      
                                                                      
_______________________________________________________________________
void DeclareOptions()
 define the options (their key words) that can be set in the option string 
 here the options valid for ALL MVA methods are declared.
 know options: NCycles=xx              :the number of training cycles
               Normalize=kTRUE,kFALSe  :if normalised in put variables should be used
               HiddenLayser="N-1,N-2"  :the specification of the hidden layers
               NeuronType=sigmoid,tanh,radial,linar  : the type of activation function
                                                       used at the neuronn
                
void ProcessOptions()
 decode the options in the option string
vector<Int_t>* ParseLayoutString(TString layerSpec)
 parse layout specification string and return a vector, each entry
 containing the number of neurons to go in each successive layer
void InitANNBase()
 initialize ANNBase object
~MethodANNBase()
 destructor
void DeleteNetwork()
 delete/clear network
void DeleteNetworkLayer(TObjArray*& layer)
 delete a network layer
void BuildNetwork(vector<Int_t>* layout, vector<Double_t>* weights)
 build network given a layout (number of neurons in each layer)
 and optional weights array
void BuildLayers(vector<Int_t>* layout)
 build the network layers
void BuildLayer(Int_t numNeurons, TObjArray* curLayer, TObjArray* prevLayer, Int_t layerIndex, Int_t numLayers)
 build a single layer with neurons and synapses connecting this
 layer to the previous layer
void AddPreLinks(TNeuron* neuron, TObjArray* prevLayer)
 add synapses connecting a neuron to its preceding layer
void InitWeights()
 initialize the synapse weights randomly
void ForceWeights(vector<Double_t>* weights)
 force the synapse weights
void ForceNetworkInputs(Int_t ignoreIndex)
 force the input values of the input neurons
 force the value for each input neuron
void ForceNetworkCalculations()
 calculate input values to each neuron
void PrintMessage(TString message, Bool_t force)
 print messages, turn off printing by setting verbose and debug flag appropriately
void WaitForKeyboard()
 wait for keyboard input, for debugging
void PrintNetwork()
 print network representation, for debugging
void PrintLayer(TObjArray* layer)
 print a single layer, for debugging
void PrintNeuron(TNeuron* neuron)
 print a neuron, for debugging
Double_t GetMvaValue()
 get the mva value generated by the NN
void WriteWeightsToStream( ostream & o)
 write the weights stream
void ReadWeightsFromStream( istream & istr)
 destroy/clear the network then read it back in from the weights file
const TMVA::Ranking* CreateRanking()
 compute ranking of input variables by summing function of weights
void WriteMonitoringHistosToFile()
 write histograms to file
void SetActivation(TActivation* activation)
 setters for subclasses
void SetNeuronInputCalculator(TNeuronInput* inputCalculator)
void Train()
 this will have to be overridden by every subclass
Bool_t Debug()
{ return fgDEBUG; }
Double_t GetNetworkOutput()
{ return GetOutputNeuron()->GetActivationValue(); }
Int_t NumCycles()
 accessors
{ return fNcycles; }
Bool_t Normalize()
{ return fNormalize; }
TNeuron* GetInputNeuron(Int_t index)
{ return (TNeuron*)fInputLayer->At(index); }
TNeuron* GetOutputNeuron()
{ return fOutputNeuron; }

Author: Andreas Hoecker, Matt Jachowski
Last update: root/tmva $Id: MethodANNBase.cxx,v 1.10 2006/11/20 15:35:28 brun Exp $
Copyright (c) 2005: *


ROOT page - Class index - Class Hierarchy - Top of the page

This page has been automatically generated. If you have any comments or suggestions about the page layout send a mail to ROOT support, or contact the developers with any questions or problems regarding ROOT.