ROOT logo
ROOT » TMVA » TMVA::MethodANNBase

class TMVA::MethodANNBase: public TMVA::MethodBase


 Base class for all TMVA methods using artificial neural networks


Function Members (Methods)

 
    This is an abstract class, constructors will not be documented.
    Look at the header to check for available constructors.

public:
virtual~MethodANNBase()
voidTObject::AbstractMethod(const char* method) const
voidTMVA::Configurable::AddOptionsXMLTo(void* parent) const
voidTMVA::MethodBase::AddOutput(TMVA::Types::ETreeType type, TMVA::Types::EAnalysisType analysisType)
virtual voidAddWeightsXMLTo(void* parent) const
virtual voidTObject::AppendPad(Option_t* option = "")
TDirectory*TMVA::MethodBase::BaseDir() const
virtual voidTObject::Browse(TBrowser* b)
voidTMVA::Configurable::CheckForUnusedOptions() const
virtual voidTMVA::MethodBase::CheckSetup()
static TClass*Class()
virtual const char*TObject::ClassName() const
virtual voidTObject::Clear(Option_t* = "")
virtual TObject*TObject::Clone(const char* newname = "") const
virtual Int_tTObject::Compare(const TObject* obj) const
TMVA::ConfigurableTMVA::Configurable::Configurable(const TString& theOption = "")
virtual voidTObject::Copy(TObject& object) const
virtual const TMVA::Ranking*CreateRanking()
TMVA::DataSet*TMVA::MethodBase::Data() const
TMVA::DataSetInfo&TMVA::MethodBase::DataInfo() const
Bool_tDebug() const
virtual voidTMVA::MethodBase::DeclareCompatibilityOptions()
virtual voidDeclareOptions()
virtual voidTObject::Delete(Option_t* option = "")MENU
voidTMVA::MethodBase::DisableWriting(Bool_t setter)
virtual Int_tTObject::DistancetoPrimitive(Int_t px, Int_t py)
Bool_tTMVA::MethodBase::DoMulticlass() const
Bool_tTMVA::MethodBase::DoRegression() const
virtual voidTObject::Draw(Option_t* option = "")
virtual voidTObject::DrawClass() constMENU
virtual TObject*TObject::DrawClone(Option_t* option = "") constMENU
virtual voidTObject::Dump() constMENU
virtual voidTObject::Error(const char* method, const char* msgfmt) const
virtual voidTObject::Execute(const char* method, const char* params, Int_t* error = 0)
virtual voidTObject::Execute(TMethod* method, TObjArray* params, Int_t* error = 0)
virtual voidTObject::ExecuteEvent(Int_t event, Int_t px, Int_t py)
virtual voidTObject::Fatal(const char* method, const char* msgfmt) const
virtual TObject*TObject::FindObject(const char* name) const
virtual TObject*TObject::FindObject(const TObject* obj) const
TMVA::Types::EAnalysisTypeTMVA::MethodBase::GetAnalysisType() const
const char*TMVA::Configurable::GetConfigDescription() const
const char*TMVA::Configurable::GetConfigName() const
virtual Option_t*TObject::GetDrawOption() const
static Long_tTObject::GetDtorOnly()
virtual Double_tTMVA::MethodBase::GetEfficiency(const TString&, TMVA::Types::ETreeType, Double_t& err)
const TMVA::Event*TMVA::MethodBase::GetEvent() const
const TMVA::Event*TMVA::MethodBase::GetEvent(const TMVA::Event* ev) const
const TMVA::Event*TMVA::MethodBase::GetEvent(Long64_t ievt) const
const TMVA::Event*TMVA::MethodBase::GetEvent(Long64_t ievt, TMVA::Types::ETreeType type) const
const vector<TMVA::Event*>&TMVA::MethodBase::GetEventCollection(TMVA::Types::ETreeType type)
virtual const char*TObject::GetIconName() const
const TString&TMVA::MethodBase::GetInputLabel(Int_t i) const
const TString&TMVA::MethodBase::GetInputTitle(Int_t i) const
const TString&TMVA::MethodBase::GetInputVar(Int_t i) const
const TString&TMVA::MethodBase::GetJobName() const
virtual Double_tTMVA::MethodBase::GetMaximumSignificance(Double_t SignalEvents, Double_t BackgroundEvents, Double_t& optimal_significance_value) const
Double_tTMVA::MethodBase::GetMean(Int_t ivar) const
const TString&TMVA::MethodBase::GetMethodName() const
TMVA::Types::EMVATMVA::MethodBase::GetMethodType() const
TStringTMVA::MethodBase::GetMethodTypeName() const
virtual vector<Float_t>TMVA::MethodBase::GetMulticlassEfficiency(vector<std::vector<Float_t> >& purity)
virtual vector<Float_t>TMVA::MethodBase::GetMulticlassTrainingEfficiency(vector<std::vector<Float_t> >& purity)
virtual const vector<Float_t>&GetMulticlassValues()
virtual Double_tGetMvaValue(Double_t* err = 0, Double_t* errUpper = 0)
virtual const char*TMVA::MethodBase::GetName() const
UInt_tTMVA::MethodBase::GetNEvents() const
UInt_tTMVA::MethodBase::GetNTargets() const
UInt_tTMVA::MethodBase::GetNvar() const
UInt_tTMVA::MethodBase::GetNVariables() const
virtual char*TObject::GetObjectInfo(Int_t px, Int_t py) const
static Bool_tTObject::GetObjectStat()
virtual Option_t*TObject::GetOption() const
const TString&TMVA::Configurable::GetOptions() const
virtual Double_tTMVA::MethodBase::GetProba(Double_t mvaVal, Double_t ap_sig)
const TStringTMVA::MethodBase::GetProbaName() const
virtual Double_tTMVA::MethodBase::GetRarity(Double_t mvaVal, TMVA::Types::ESBType reftype = Types::kBackground) const
virtual voidTMVA::MethodBase::GetRegressionDeviation(UInt_t tgtNum, TMVA::Types::ETreeType type, Double_t& stddev, Double_t& stddev90Percent) const
virtual const vector<Float_t>&GetRegressionValues()
Double_tTMVA::MethodBase::GetRMS(Int_t ivar) const
virtual Double_tTMVA::MethodBase::GetROCIntegral(TMVA::PDF* pdfS = 0, TMVA::PDF* pdfB = 0) const
virtual Double_tTMVA::MethodBase::GetSeparation(TH1*, TH1*) const
virtual Double_tTMVA::MethodBase::GetSeparation(TMVA::PDF* pdfS = 0, TMVA::PDF* pdfB = 0) const
Double_tTMVA::MethodBase::GetSignalReferenceCut() const
virtual Double_tTMVA::MethodBase::GetSignificance() const
const TMVA::Event*TMVA::MethodBase::GetTestingEvent(Long64_t ievt) const
Double_tTMVA::MethodBase::GetTestTime() const
const TString&TMVA::MethodBase::GetTestvarName() const
virtual const char*TObject::GetTitle() const
virtual Double_tTMVA::MethodBase::GetTrainingEfficiency(const TString&)
const TMVA::Event*TMVA::MethodBase::GetTrainingEvent(Long64_t ievt) const
UInt_tTMVA::MethodBase::GetTrainingROOTVersionCode() const
TStringTMVA::MethodBase::GetTrainingROOTVersionString() const
UInt_tTMVA::MethodBase::GetTrainingTMVAVersionCode() const
TStringTMVA::MethodBase::GetTrainingTMVAVersionString() const
Double_tTMVA::MethodBase::GetTrainTime() const
TMVA::TransformationHandler&TMVA::MethodBase::GetTransformationHandler()
const TMVA::TransformationHandler&TMVA::MethodBase::GetTransformationHandler() const
virtual UInt_tTObject::GetUniqueID() const
TStringTMVA::MethodBase::GetWeightFileName() const
Double_tTMVA::MethodBase::GetXmax(Int_t ivar) const
Double_tTMVA::MethodBase::GetXmin(Int_t ivar) const
virtual Bool_tTObject::HandleTimer(TTimer* timer)
virtual Bool_tTMVA::IMethod::HasAnalysisType(TMVA::Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
virtual ULong_tTObject::Hash() const
Bool_tTMVA::MethodBase::HasMVAPdfs() const
virtual voidTObject::Info(const char* method, const char* msgfmt) const
virtual Bool_tTObject::InheritsFrom(const char* classname) const
virtual Bool_tTObject::InheritsFrom(const TClass* cl) const
virtual voidTMVA::MethodBase::Init()
voidInitANNBase()
virtual voidTObject::Inspect() constMENU
voidTObject::InvertBit(UInt_t f)
virtual TClass*IsA() const
virtual Bool_tTObject::IsEqual(const TObject* obj) const
virtual Bool_tTObject::IsFolder() const
Bool_tTObject::IsOnHeap() const
virtual Bool_tTMVA::MethodBase::IsSignalLike()
virtual Bool_tTObject::IsSortable() const
Bool_tTObject::IsZombie() const
virtual voidTObject::ls(Option_t* option = "") const
virtual voidTMVA::MethodBase::MakeClass(const TString& classFileName = TString("")) const
voidTObject::MayNotUse(const char* method) const
TDirectory*TMVA::MethodBase::MethodBaseDir() const
virtual Bool_tTMVA::MethodBase::MonitorBoost(TMVA::MethodBoost*)
virtual Bool_tTObject::Notify()
static voidTObject::operator delete(void* ptr)
static voidTObject::operator delete(void* ptr, void* vp)
static voidTObject::operator delete[](void* ptr)
static voidTObject::operator delete[](void* ptr, void* vp)
void*TObject::operator new(size_t sz)
void*TObject::operator new(size_t sz, void* vp)
void*TObject::operator new[](size_t sz)
void*TObject::operator new[](size_t sz, void* vp)
TMVA::IMethod&TMVA::IMethod::operator=(const TMVA::IMethod&)
virtual map<TString,Double_t>TMVA::MethodBase::OptimizeTuningParameters(TString fomType = "ROCIntegral", TString fitType = "FitGA")
virtual voidTObject::Paint(Option_t* option = "")
virtual voidTMVA::Configurable::ParseOptions()
virtual voidTObject::Pop()
virtual voidTObject::Print(Option_t* option = "") const
virtual voidTMVA::MethodBase::PrintHelpMessage() const
virtual voidPrintNetwork() const
voidTMVA::Configurable::PrintOptions() const
virtual voidProcessOptions()
voidTMVA::MethodBase::ProcessSetup()
virtual Int_tTObject::Read(const char* name)
voidTMVA::Configurable::ReadOptionsFromStream(istream& istr)
voidTMVA::Configurable::ReadOptionsFromXML(void* node)
voidTMVA::MethodBase::ReadStateFromFile()
voidTMVA::MethodBase::ReadStateFromStream(istream& tf)
voidTMVA::MethodBase::ReadStateFromStream(TFile& rf)
voidTMVA::MethodBase::ReadStateFromXMLString(const char* xmlstr)
virtual voidReadWeightsFromStream(istream& istr)
virtual voidReadWeightsFromXML(void* wghtnode)
virtual voidTObject::RecursiveRemove(TObject* obj)
virtual voidTMVA::MethodBase::Reset()
voidTObject::ResetBit(UInt_t f)
virtual voidTObject::SaveAs(const char* filename = "", Option_t* option = "") constMENU
virtual voidTObject::SavePrimitive(basic_ostream<char,char_traits<char> >& out, Option_t* option = "")
voidSetActivation(TMVA::TActivation* activation)
virtual voidTMVA::MethodBase::SetAnalysisType(TMVA::Types::EAnalysisType type)
voidTMVA::MethodBase::SetBaseDir(TDirectory* methodDir)
voidTObject::SetBit(UInt_t f)
voidTObject::SetBit(UInt_t f, Bool_t set)
voidTMVA::Configurable::SetConfigDescription(const char* d)
voidTMVA::Configurable::SetConfigName(const char* n)
virtual voidTMVA::MethodBase::SetCurrentEvent(Long64_t ievt) const
virtual voidTObject::SetDrawOption(Option_t* option = "")MENU
static voidTObject::SetDtorOnly(void* obj)
voidTMVA::MethodBase::SetMethodBaseDir(TDirectory* methodDir)
voidTMVA::MethodBase::SetMethodDir(TDirectory* methodDir)
voidTMVA::Configurable::SetMsgType(TMVA::EMsgType t)
voidSetNeuronInputCalculator(TMVA::TNeuronInput* inputCalculator)
static voidTObject::SetObjectStat(Bool_t stat)
voidTMVA::Configurable::SetOptions(const TString& s)
voidTMVA::MethodBase::SetSignalReferenceCut(Double_t cut)
voidTMVA::MethodBase::SetTestTime(Double_t testTime)
voidTMVA::MethodBase::SetTestvarName(const TString& v = "")
voidTMVA::MethodBase::SetTrainTime(Double_t trainTime)
virtual voidTMVA::MethodBase::SetTuneParameters(map<TString,Double_t> tuneParameters)
virtual voidTObject::SetUniqueID(UInt_t uid)
voidTMVA::MethodBase::SetupMethod()
virtual voidShowMembers(TMemberInspector& insp)
virtual voidStreamer(TBuffer& b)
voidStreamerNVirtual(TBuffer& b)
virtual voidTObject::SysError(const char* method, const char* msgfmt) const
Bool_tTObject::TestBit(UInt_t f) const
Int_tTObject::TestBits(UInt_t f) const
virtual voidTMVA::MethodBase::TestClassification()
virtual voidTMVA::MethodBase::TestMulticlass()
virtual voidTMVA::MethodBase::TestRegression(Double_t& bias, Double_t& biasT, Double_t& dev, Double_t& devT, Double_t& rms, Double_t& rmsT, Double_t& mInf, Double_t& mInfT, Double_t& corr, TMVA::Types::ETreeType type)
virtual voidTrain()
voidTMVA::MethodBase::TrainMethod()
virtual voidTObject::UseCurrentStyle()
virtual voidTObject::Warning(const char* method, const char* msgfmt) const
virtual Int_tTObject::Write(const char* name = 0, Int_t option = 0, Int_t bufsize = 0)
virtual Int_tTObject::Write(const char* name = 0, Int_t option = 0, Int_t bufsize = 0) const
virtual voidTMVA::MethodBase::WriteEvaluationHistosToFile(TMVA::Types::ETreeType treetype)
virtual voidWriteMonitoringHistosToFile() const
voidTMVA::Configurable::WriteOptionsToStream(ostream& o, const TString& prefix) const
voidTMVA::MethodBase::WriteStateToFile() const
protected:
virtual voidBuildNetwork(vector<Int_t>* layout, vector<Double_t>* weights = NULL, Bool_t fromFile = kFALSE)
voidCreateWeightMonitoringHists(const TString& bulkname, vector<TH1*>* hv = 0) const
virtual voidTObject::DoError(int level, const char* location, const char* fmt, va_list va) const
voidTMVA::Configurable::EnableLooseOptions(Bool_t b = kTRUE)
voidForceNetworkCalculations()
voidForceNetworkInputs(const TMVA::Event* ev, Int_t ignoreIndex = -1)
virtual voidTMVA::IMethod::GetHelpMessage() const
TMVA::TNeuron*GetInputNeuron(Int_t index)
const TString&TMVA::MethodBase::GetInternalVarName(Int_t ivar) const
Double_tGetNetworkOutput()
const TString&TMVA::MethodBase::GetOriginalVarName(Int_t ivar) const
TMVA::TNeuron*GetOutputNeuron(Int_t index = 0)
const TString&TMVA::Configurable::GetReferenceFile() const
static TMVA::MethodBase*TMVA::MethodBase::GetThisBase()
Float_tTMVA::MethodBase::GetTWeight(const TMVA::Event* ev) const
const TString&TMVA::MethodBase::GetWeightFileDir() const
Bool_tTMVA::MethodBase::HasTrainingTree() const
Bool_tTMVA::MethodBase::Help() const
Bool_tTMVA::MethodBase::IgnoreEventsWithNegWeightsInTraining() const
Bool_tTMVA::MethodBase::IsConstructedFromWeightFile() const
Bool_tTMVA::MethodBase::IsNormalised() const
TMVA::MsgLogger&TMVA::Configurable::Log() const
Bool_tTMVA::Configurable::LooseOptionCheckingEnabled() const
virtual voidMakeClassSpecific(ostream&, const TString&) const
virtual voidTMVA::MethodBase::MakeClassSpecificHeader(ostream&, const TString& = "") const
voidTObject::MakeZombie()
voidTMVA::MethodBase::NoErrorCalc(Double_t *const err, Double_t *const errUpper)
Int_tNumCycles()
vector<Int_t>*ParseLayoutString(TString layerSpec)
voidPrintMessage(TString message, Bool_t force = kFALSE) const
voidTMVA::Configurable::ResetSetFlag()
voidTMVA::MethodBase::SetNormalised(Bool_t norm)
voidTMVA::MethodBase::SetWeightFileDir(TString fileDir)
voidTMVA::MethodBase::SetWeightFileName(TString)
voidTMVA::MethodBase::Statistics(TMVA::Types::ETreeType treeType, const TString& theVarName, Double_t&, Double_t&, Double_t&, Double_t&, Double_t&, Double_t&)
Bool_tTMVA::MethodBase::TxtWeightsOnly() const
Bool_tTMVA::MethodBase::Verbose() const
voidWaitForKeyboard()
voidTMVA::Configurable::WriteOptionsReferenceToFile()
private:
voidAddPreLinks(TMVA::TNeuron* neuron, TObjArray* prevLayer)
voidBuildLayer(Int_t numNeurons, TObjArray* curLayer, TObjArray* prevLayer, Int_t layerIndex, Int_t numLayers, Bool_t from_file = false)
voidBuildLayers(vector<Int_t>* layout, Bool_t from_file = false)
voidDeleteNetwork()
voidDeleteNetworkLayer(TObjArray*& layer)
voidForceWeights(vector<Double_t>* weights)
voidInitWeights()
voidPrintLayer(TObjArray* layer) const
voidPrintNeuron(TMVA::TNeuron* neuron) const

Data Members

public:
enum EEstimator { kMSE
kCE
};
enum TMVA::MethodBase::EWeightFileType { kROOT
kTEXT
};
enum TObject::EStatusBits { kCanDelete
kMustCleanup
kObjInCanvas
kIsReferenced
kHasUUID
kCannotPick
kNoContextMenu
kInvalidObject
};
enum TObject::[unnamed] { kIsOnHeap
kNotDeleted
kZombie
kBitMask
kSingleKey
kOverwrite
kWriteDelete
};
public:
Bool_tTMVA::MethodBase::fSetupCompletedis method setup
const TMVA::Event*TMVA::MethodBase::fTmpEvent! temporary event when testing on a different DataSet than the own one
protected:
TMVA::TActivation*fActivationactivation function to be used for hidden layers
TMVA::Types::EAnalysisTypeTMVA::MethodBase::fAnalysisTypemethod-mode : true --> regression, false --> classification
UInt_tTMVA::MethodBase::fBackgroundClassindex of the Background-class
vector<TH1*>fEpochMonHistBepoch monitoring hitograms for background
vector<TH1*>fEpochMonHistSepoch monitoring hitograms for signal
vector<TH1*>fEpochMonHistWepoch monitoring hitograms for weights
TMVA::MethodANNBase::EEstimatorfEstimator
TH1F*fEstimatorHistTestmonitors convergence of independent test sample
TH1F*fEstimatorHistTrainmonitors convergence of training sample
TStringfEstimatorS
TMVA::TActivation*fIdentityactivation for input and output layers
TMVA::TNeuronInput*fInputCalculatorinput calculator for all neurons
vector<TString>*TMVA::MethodBase::fInputVarsvector of input variables used in MVA
TMatrixDfInvHessianzjh
vector<Float_t>*TMVA::MethodBase::fMulticlassReturnValholds the return-values for the multiclass classification
Int_tTMVA::MethodBase::fNbinsnumber of bins in representative histograms
Int_tTMVA::MethodBase::fNbinsHnumber of bins in evaluation histograms
TObjArray*fNetworkTObjArray of TObjArrays representing network
TMVA::TActivation*fOutputactivation function to be used for output layers, depending on estimator
Int_tfRandomSeedrandom seed for initial synapse weights
TMVA::Ranking*TMVA::MethodBase::fRankingpointer to ranking object (created by derived classifiers)
vector<Float_t>*TMVA::MethodBase::fRegressionReturnValholds the return-values for the regression
vector<Int_t>fRegulatorIdxindex to different priors from every synapses
vector<Double_t>fRegulatorsthe priors as regulator
UInt_tTMVA::MethodBase::fSignalClassindex of the Signal-class
TObjArray*fSynapsesarray of pointers to synapses, no structural data
boolfUseRegulatorzjh
TRandom3*frgenrandom number generator for various uses
private:
TObjArray*fInputLayercache this for fast access
TStringfLayerSpeclayout specification option
Int_tfNcyclesnumber of epochs to train
TStringfNeuronInputTypename of neuron input calculator class
TStringfNeuronTypename of neuron activation function class
vector<TNeuron*>fOutputNeuronscache this for fast access
static const Bool_tfgDEBUGdebug flag

Class Charts

Inheritance Inherited Members Includes Libraries
Class Charts

Function documentation

void DeclareOptions()
 define the options (their key words) that can be set in the option string
 here the options valid for ALL MVA methods are declared.
 know options: NCycles=xx              :the number of training cycles
               Normalize=kTRUE,kFALSe  :if normalised in put variables should be used
               HiddenLayser="N-1,N-2"  :the specification of the hidden layers
               NeuronType=sigmoid,tanh,radial,linar  : the type of activation function
                                                       used at the neuronn

void ProcessOptions()
 do nothing specific at this moment
vector<Int_t>* ParseLayoutString(TString layerSpec)
 parse layout specification string and return a vector, each entry
 containing the number of neurons to go in each successive layer
void InitANNBase()
 initialize ANNBase object
~MethodANNBase()
 destructor
void DeleteNetwork()
 delete/clear network
void DeleteNetworkLayer(TObjArray*& layer)
 delete a network layer
void BuildNetwork(vector<Int_t>* layout, vector<Double_t>* weights = NULL, Bool_t fromFile = kFALSE)
 build network given a layout (number of neurons in each layer)
 and optional weights array
void BuildLayers(vector<Int_t>* layout, Bool_t from_file = false)
 build the network layers
void BuildLayer(Int_t numNeurons, TObjArray* curLayer, TObjArray* prevLayer, Int_t layerIndex, Int_t numLayers, Bool_t from_file = false)
 build a single layer with neurons and synapses connecting this
 layer to the previous layer
void AddPreLinks(TMVA::TNeuron* neuron, TObjArray* prevLayer)
 add synapses connecting a neuron to its preceding layer
void InitWeights()
 initialize the synapse weights randomly
void ForceWeights(vector<Double_t>* weights)
 force the synapse weights
void ForceNetworkInputs(const TMVA::Event* ev, Int_t ignoreIndex = -1)
 force the input values of the input neurons
 force the value for each input neuron
void ForceNetworkCalculations()
 calculate input values to each neuron
void PrintMessage(TString message, Bool_t force = kFALSE) const
 print messages, turn off printing by setting verbose and debug flag appropriately
void WaitForKeyboard()
 wait for keyboard input, for debugging
void PrintNetwork() const
 print network representation, for debugging
void PrintLayer(TObjArray* layer) const
 print a single layer, for debugging
void PrintNeuron(TMVA::TNeuron* neuron) const
 print a neuron, for debugging
Double_t GetMvaValue(Double_t* err = 0, Double_t* errUpper = 0)
 get the mva value generated by the NN
const std::vector<Float_t> & GetRegressionValues()
 get the regression value generated by the NN
const std::vector<Float_t> & GetMulticlassValues()
 get the multiclass classification values generated by the NN
void AddWeightsXMLTo(void* parent) const
 create XML description of ANN classifier
void ReadWeightsFromXML(void* wghtnode)
 read MLP from xml weight file
void ReadWeightsFromStream(istream& istr)
 destroy/clear the network then read it back in from the weights file
const TMVA::Ranking* CreateRanking()
 compute ranking of input variables by summing function of weights
void CreateWeightMonitoringHists(const TString& bulkname, vector<TH1*>* hv = 0) const
void WriteMonitoringHistosToFile() const
 write histograms to file
void MakeClassSpecific(ostream& , const TString& ) const
 write specific classifier response
Bool_t Debug() const
 who the hell makes such strange Debug flags that even use "global pointers"..
void SetActivation(TMVA::TActivation* activation)
 setters for subclasses
void SetNeuronInputCalculator(TMVA::TNeuronInput* inputCalculator)
void Train()
 this will have to be overridden by every subclass
Double_t GetNetworkOutput()
Int_t NumCycles()
 accessors
{ return fNcycles; }
TNeuron* GetInputNeuron(Int_t index)
{ return (TNeuron*)fInputLayer->At(index); }
TNeuron* GetOutputNeuron(Int_t index = 0)
{ return fOutputNeurons.at(index); }