43 #if __cplusplus > 199711L
80 TMVA::MethodANNBase::MethodANNBase( const
TString& jobName,
81 Types::EMVA methodType,
86 : TMVA::MethodBase( jobName, methodType, methodTitle, theData, theOption, theTargetDir )
103 : TMVA::
MethodBase( methodType, theData, theWeightFile, theTargetDir )
125 DeclareOptionRef( fNcycles = 500,
"NCycles",
"Number of training cycles" );
126 DeclareOptionRef( fLayerSpec =
"N,N-1",
"HiddenLayers",
"Specification of hidden layer architecture" );
127 DeclareOptionRef( fNeuronType =
"sigmoid",
"NeuronType",
"Neuron activation function type" );
128 DeclareOptionRef( fRandomSeed = 1,
"RandomSeed",
"Random seed for initial synapse weights (0 means unique seed for each run; default value '1')");
130 DeclareOptionRef(fEstimatorS=
"MSE",
"EstimatorType",
131 "MSE (Mean Square Estimator) for Gaussian Likelihood or CE(Cross-Entropy) for Bernoulli Likelihood" );
138 Int_t nTypes = names->size();
139 for (
Int_t i = 0; i < nTypes; i++)
140 AddPreDefVal(names->at(i));
143 DeclareOptionRef(fNeuronInputType=
"sum",
"NeuronInputType",
"Neuron input function type");
146 nTypes = names->size();
147 for (
Int_t i = 0; i < nTypes; i++) AddPreDefVal(names->at(i));
157 if ( DoRegression() || DoMulticlass()) fEstimatorS =
"MSE";
158 else fEstimatorS =
"CE" ;
159 if (fEstimatorS ==
"MSE" ) fEstimator = kMSE;
160 else if (fEstimatorS ==
"CE") fEstimator = kCE;
161 std::vector<Int_t>* layout = ParseLayoutString(fLayerSpec);
162 BuildNetwork(layout);
172 std::vector<Int_t>* layout =
new std::vector<Int_t>();
173 layout->push_back((
Int_t)GetNvar());
174 while(layerSpec.
Length()>0) {
176 if (layerSpec.
First(
',')<0) {
181 sToAdd = layerSpec(0,layerSpec.
First(
','));
182 layerSpec = layerSpec(layerSpec.First(
',')+1,layerSpec.Length());
186 nNodes += atoi(sToAdd);
187 layout->push_back(nNodes);
190 layout->push_back( DataInfo().GetNTargets() );
191 else if( DoMulticlass() )
192 layout->push_back( DataInfo().GetNClasses() );
194 layout->push_back(1);
197 for( std::vector<Int_t>::iterator it = layout->begin(); it != layout->end(); it++ ){
214 fInputCalculator =
NULL;
216 fEstimatorHistTrain =
NULL;
217 fEstimatorHistTest =
NULL;
220 fEpochMonHistS.clear();
221 fEpochMonHistB.clear();
222 fEpochMonHistW.clear();
226 fOutputNeurons.clear();
246 if (fNetwork !=
NULL) {
249 for (
Int_t i = 0; i < numLayers; i++) {
251 DeleteNetworkLayer(layer);
256 if (frgen !=
NULL)
delete frgen;
257 if (fActivation !=
NULL)
delete fActivation;
258 if (fOutput !=
NULL)
delete fOutput;
259 if (fIdentity !=
NULL)
delete fIdentity;
260 if (fInputCalculator !=
NULL)
delete fInputCalculator;
261 if (fSynapses !=
NULL)
delete fSynapses;
268 fInputCalculator =
NULL;
279 for (
Int_t i = 0; i < numNeurons; i++) {
293 if (fEstimatorS ==
"MSE") fEstimator = kMSE;
294 else if (fEstimatorS ==
"CE") fEstimator = kCE;
295 else Log()<<
kWARNING<<
"fEstimator="<<fEstimator<<
"\tfEstimatorS="<<fEstimatorS<<
Endl;
296 if (fEstimator!=kMSE && fEstimator!=kCE)
Log()<<
kWARNING<<
"Estimator type unspecified \t"<<
Endl;
313 fRegulatorIdx.clear();
315 BuildLayers( layout, fromFile );
318 fInputLayer = (
TObjArray*)fNetwork->At(0);
320 fOutputNeurons.clear();
322 fOutputNeurons.push_back( (
TNeuron*)outputLayer->
At(i) );
325 if (weights ==
NULL) InitWeights();
326 else ForceWeights(weights);
340 Int_t numLayers = layout->size();
342 for (
Int_t i = 0; i < numLayers; i++) {
344 BuildLayer(layout->at(i), curLayer, prevLayer, i, numLayers, fromFile);
345 prevLayer = curLayer;
346 fNetwork->
Add(curLayer);
350 for (
Int_t i = 0; i < numLayers; i++) {
353 if (i!=0 && i!=numLayers-1) fRegulators.push_back(0.);
354 for (
Int_t j = 0; j < numNeurons; j++) {
355 if (i==0) fRegulators.push_back(0.);
358 for (
Int_t k = 0; k < numSynapses; k++) {
360 fSynapses->Add(synapse);
361 fRegulatorIdx.push_back(fRegulators.size()-1);
376 for (
Int_t j = 0; j < numNeurons; j++) {
377 if (fromFile && (layerIndex != numLayers-1) && (j==numNeurons-1)){
382 curLayer->
Add(neuron);
389 if (layerIndex == 0) {
395 if (layerIndex == numLayers-1) {
401 AddPreLinks(neuron, prevLayer);
404 curLayer->
Add(neuron);
410 if (layerIndex != numLayers-1) {
415 curLayer->
Add(neuron);
429 for (
Int_t i = 0; i < numNeurons; i++) {
444 PrintMessage(
"Initializing weights");
447 Int_t numSynapses = fSynapses->GetEntriesFast();
449 for (
Int_t i = 0; i < numSynapses; i++) {
450 synapse = (
TSynapse*)fSynapses->At(i);
451 synapse->
SetWeight(4.0*frgen->Rndm() - 2.0);
460 PrintMessage(
"Forcing weights");
462 Int_t numSynapses = fSynapses->GetEntriesFast();
464 for (
Int_t i = 0; i < numSynapses; i++) {
465 synapse = (
TSynapse*)fSynapses->At(i);
480 for (
UInt_t j = 0; j < GetNvar(); j++) {
484 neuron = GetInputNeuron(j);
496 Int_t numLayers = fNetwork->GetEntriesFast();
499 for (
Int_t i = 0; i < numLayers; i++) {
503 for (
Int_t j = 0; j < numNeurons; j++) {
526 Log() <<
kINFO <<
"***Type anything to continue (q to quit): ";
527 std::getline(std::cin, dummy);
528 if (dummy ==
"q" || dummy ==
"Q") {
529 PrintMessage(
"quit" );
540 if (!
Debug())
return;
543 PrintMessage(
"Printing network " );
544 Log() <<
kINFO <<
"-------------------------------------------------------------------" <<
Endl;
549 for (
Int_t i = 0; i < numLayers; i++) {
554 Log() <<
kINFO <<
"Layer #" << i <<
" (" << numNeurons <<
" neurons):" <<
Endl;
555 PrintLayer( curLayer );
567 for (
Int_t j = 0; j < numNeurons; j++) {
571 PrintNeuron( neuron );
581 <<
"\t\tValue:\t" << neuron->
GetValue()
584 Log() <<
kINFO <<
"\t\tActivationEquation:\t";
601 const Event * ev = GetEvent();
603 for (
UInt_t i = 0; i < GetNvar(); i++) {
607 ForceNetworkCalculations();
614 NoErrorCalc(err, errUpper);
628 const Event * ev = GetEvent();
630 for (
UInt_t i = 0; i < GetNvar(); i++) {
634 ForceNetworkCalculations();
639 if (fRegressionReturnVal ==
NULL) fRegressionReturnVal =
new std::vector<Float_t>();
640 fRegressionReturnVal->clear();
644 for (
UInt_t itgt = 0; itgt < ntgts; itgt++) {
648 const Event* evT2 = GetTransformationHandler().InverseTransform( evT );
649 for (
UInt_t itgt = 0; itgt < ntgts; itgt++) {
650 fRegressionReturnVal->push_back( evT2->
GetTarget(itgt) );
655 return *fRegressionReturnVal;
675 const Event * ev = GetEvent();
677 for (
UInt_t i = 0; i < GetNvar(); i++) {
681 ForceNetworkCalculations();
685 if (fMulticlassReturnVal ==
NULL) fMulticlassReturnVal =
new std::vector<Float_t>();
686 fMulticlassReturnVal->clear();
687 std::vector<Float_t> temp;
689 UInt_t nClasses = DataInfo().GetNClasses();
690 for (
UInt_t icls = 0; icls < nClasses; icls++) {
691 temp.push_back(GetOutputNeuron( icls )->GetActivationValue() );
694 for(
UInt_t iClass=0; iClass<nClasses; iClass++){
696 for(
UInt_t j=0;j<nClasses;j++){
698 norm+=
exp(temp[j]-temp[iClass]);
700 (*fMulticlassReturnVal).push_back(1.0/(1.0+norm));
705 return *fMulticlassReturnVal;
714 Int_t numLayers = fNetwork->GetEntriesFast();
719 for (
Int_t i = 0; i < numLayers; i++) {
725 for (
Int_t j = 0; j < numNeurons; j++) {
730 if(numSynapses==0)
continue;
731 std::stringstream s(
"");
733 for (
Int_t k = 0; k < numSynapses; k++) {
735 s << std::scientific << synapse->
GetWeight() <<
" ";
742 if( fInvHessian.GetNcols()>0 ){
746 Int_t nElements = fInvHessian.GetNoElements();
747 Int_t nRows = fInvHessian.GetNrows();
748 Int_t nCols = fInvHessian.GetNcols();
755 fInvHessian.GetMatrix2Array( elements );
764 std::stringstream s(
"");
766 for(
Int_t col = 0; col < nCols; ++col ){
767 s << std::scientific << (*(elements+index)) <<
" ";
784 std::vector<Int_t>* layout =
new std::vector<Int_t>();
786 void* xmlLayout =
NULL;
789 xmlLayout = wghtnode;
793 layout->resize( nLayers );
801 layout->at(index) = nNeurons;
805 BuildNetwork( layout,
NULL, fromFile );
808 if (GetTrainingTMVAVersionCode() <
TMVA_VERSION(4,2,1) && fActivation->GetExpression().Contains(
"tanh")){
830 std::stringstream s(content);
831 for (
UInt_t iSyn = 0; iSyn<nSyn; iSyn++) {
848 void* xmlInvHessian =
NULL;
854 fUseRegulator =
kTRUE;
864 fInvHessian.ResizeTo( nRows, nCols );
869 Log() <<
kFATAL <<
"you tried to read a hessian matrix with " << nElements <<
" elements, --> too large, guess s.th. went wrong reading from the weight file" <<
Endl;
872 elements =
new Double_t[nElements+10];
885 std::stringstream s(content);
886 for (
Int_t iCol = 0; iCol<nCols; iCol++) {
887 s >> (*(elements+index));
894 fInvHessian.SetMatrixArray( elements );
911 std::vector<Double_t>* weights =
new std::vector<Double_t>();
913 while (istr>> dummy >> weight) weights->push_back(weight);
915 ForceWeights(weights);
927 fRanking =
new Ranking( GetName(),
"Importance" );
934 for (
UInt_t ivar = 0; ivar < GetNvar(); ivar++) {
936 neuron = GetInputNeuron(ivar);
939 varName = GetInputVar(ivar);
944 meanS, meanB, rmsS, rmsB, xmin, xmax );
948 if (avgVal<meanrms) avgVal = meanrms;
949 if (IsNormalised()) avgVal = 0.5*(1 +
gTools().
NormVariable( avgVal, GetXmin( ivar ), GetXmax( ivar )));
951 for (
Int_t j = 0; j < numSynapses; j++) {
956 importance *= avgVal * avgVal;
958 fRanking->AddRank(
Rank( varName, importance ) );
967 std::vector<TH1*>* hv )
const
970 Int_t numLayers = fNetwork->GetEntriesFast();
972 for (
Int_t i = 0; i < numLayers-1; i++) {
980 hist =
new TH2F(name +
"", name +
"",
981 numNeurons1, 0, numNeurons1, numNeurons2, 0, numNeurons2);
983 for (
Int_t j = 0; j < numNeurons1; j++) {
988 for (
Int_t k = 0; k < numSynapses; k++) {
991 hist->SetBinContent(j+1, k+1, synapse->
GetWeight());
996 if (hv) hv->push_back( hist );
1009 PrintMessage(
Form(
"Write special histos to file: %s", BaseDir()->GetPath()),
kTRUE);
1011 if (fEstimatorHistTrain) fEstimatorHistTrain->Write();
1012 if (fEstimatorHistTest ) fEstimatorHistTest ->Write();
1015 CreateWeightMonitoringHists(
"weights_hist" );
1018 #if __cplusplus > 199711L
1019 static std::atomic<int> epochMonitoringDirectoryNumber{0};
1021 static int epochMonitoringDirectoryNumber = 0;
1023 int epochVal = epochMonitoringDirectoryNumber++;
1026 epochdir = BaseDir()->
mkdir(
"EpochMonitoring" );
1028 epochdir = BaseDir()->
mkdir(
Form(
"EpochMonitoring_%4d",epochVal) );
1031 for (std::vector<TH1*>::const_iterator it = fEpochMonHistS.begin(); it != fEpochMonHistS.end(); it++) {
1035 for (std::vector<TH1*>::const_iterator it = fEpochMonHistB.begin(); it != fEpochMonHistB.end(); it++) {
1039 for (std::vector<TH1*>::const_iterator it = fEpochMonHistW.begin(); it != fEpochMonHistW.end(); it++) {
1051 Int_t numLayers = fNetwork->GetEntries();
1054 fout <<
" double ActivationFnc(double x) const;" << std::endl;
1055 fout <<
" double OutputActivationFnc(double x) const;" << std::endl;
1057 fout <<
" int fLayers;" << std::endl;
1058 fout <<
" int fLayerSize["<<numLayers<<
"];" << std::endl;
1059 int numNodesFrom = -1;
1060 for (
Int_t lIdx = 0; lIdx < numLayers; lIdx++) {
1061 int numNodesTo = ((
TObjArray*)fNetwork->At(lIdx))->GetEntries();
1062 if (numNodesFrom<0) { numNodesFrom=numNodesTo;
continue; }
1063 fout <<
" double fWeightMatrix" << lIdx-1 <<
"to" << lIdx <<
"[" << numNodesTo <<
"][" << numNodesFrom <<
"];";
1064 fout <<
" // weight matrix from layer " << lIdx-1 <<
" to " << lIdx << std::endl;
1065 numNodesFrom = numNodesTo;
1068 fout <<
" double * fWeights["<<numLayers<<
"];" << std::endl;
1069 fout <<
"};" << std::endl;
1073 fout <<
"inline void " << className <<
"::Initialize()" << std::endl;
1074 fout <<
"{" << std::endl;
1075 fout <<
" // build network structure" << std::endl;
1076 fout <<
" fLayers = " << numLayers <<
";" << std::endl;
1077 for (
Int_t lIdx = 0; lIdx < numLayers; lIdx++) {
1080 fout <<
" fLayerSize[" << lIdx <<
"] = " << numNodes <<
"; fWeights["<<lIdx<<
"] = new double["<<numNodes<<
"]; " << std::endl;
1083 for (
Int_t i = 0; i < numLayers-1; i++) {
1084 fout <<
" // weight matrix from layer " << i <<
" to " << i+1 << std::endl;
1087 for (
Int_t j = 0; j < numNeurons; j++) {
1090 for (
Int_t k = 0; k < numSynapses; k++) {
1092 fout <<
" fWeightMatrix" << i <<
"to" << i+1 <<
"[" << k <<
"][" << j <<
"] = " << synapse->
GetWeight() <<
";" << std::endl;
1097 fout <<
"}" << std::endl;
1101 fout <<
"inline double " << className <<
"::GetMvaValue__( const std::vector<double>& inputValues ) const" << std::endl;
1102 fout <<
"{" << std::endl;
1103 fout <<
" if (inputValues.size() != (unsigned int)fLayerSize[0]-1) {" << std::endl;
1104 fout <<
" std::cout << \"Input vector needs to be of size \" << fLayerSize[0]-1 << std::endl;" << std::endl;
1105 fout <<
" return 0;" << std::endl;
1106 fout <<
" }" << std::endl;
1108 fout <<
" for (int l=0; l<fLayers; l++)" << std::endl;
1109 fout <<
" for (int i=0; i<fLayerSize[l]; i++) fWeights[l][i]=0;" << std::endl;
1111 fout <<
" for (int l=0; l<fLayers-1; l++)" << std::endl;
1112 fout <<
" fWeights[l][fLayerSize[l]-1]=1;" << std::endl;
1114 fout <<
" for (int i=0; i<fLayerSize[0]-1; i++)" << std::endl;
1115 fout <<
" fWeights[0][i]=inputValues[i];" << std::endl;
1117 for (
Int_t i = 0; i < numLayers-1; i++) {
1118 fout <<
" // layer " << i <<
" to " << i+1 << std::endl;
1119 if (i+1 == numLayers-1) {
1120 fout <<
" for (int o=0; o<fLayerSize[" << i+1 <<
"]; o++) {" << std::endl;
1123 fout <<
" for (int o=0; o<fLayerSize[" << i+1 <<
"]-1; o++) {" << std::endl;
1125 fout <<
" for (int i=0; i<fLayerSize[" << i <<
"]; i++) {" << std::endl;
1126 fout <<
" double inputVal = fWeightMatrix" << i <<
"to" << i+1 <<
"[o][i] * fWeights[" << i <<
"][i];" << std::endl;
1128 if ( fNeuronInputType ==
"sum") {
1129 fout <<
" fWeights[" << i+1 <<
"][o] += inputVal;" << std::endl;
1131 else if ( fNeuronInputType ==
"sqsum") {
1132 fout <<
" fWeights[" << i+1 <<
"][o] += inputVal*inputVal;" << std::endl;
1135 fout <<
" fWeights[" << i+1 <<
"][o] += fabs(inputVal);" << std::endl;
1137 fout <<
" }" << std::endl;
1138 if (i+1 != numLayers-1)
1139 fout <<
" fWeights[" << i+1 <<
"][o] = ActivationFnc(fWeights[" << i+1 <<
"][o]);" << std::endl;
1140 else fout <<
" fWeights[" << i+1 <<
"][o] = OutputActivationFnc(fWeights[" << i+1 <<
"][o]);" << std::endl;
1141 fout <<
" }" << std::endl;
1144 fout <<
" return fWeights[" << numLayers-1 <<
"][0];" << std::endl;
1145 fout <<
"}" << std::endl;
1148 TString fncName = className+
"::ActivationFnc";
1149 fActivation->MakeFunction(fout, fncName);
1150 fncName = className+
"::OutputActivationFnc";
1151 fOutput->MakeFunction(fout, fncName);
1153 fout <<
" " << std::endl;
1154 fout <<
"// Clean up" << std::endl;
1155 fout <<
"inline void " << className <<
"::Clear() " << std::endl;
1156 fout <<
"{" << std::endl;
1157 fout <<
" // clean up the arrays" << std::endl;
1158 fout <<
" for (int lIdx = 0; lIdx < "<<numLayers<<
"; lIdx++) {" << std::endl;
1159 fout <<
" delete[] fWeights[lIdx];" << std::endl;
1160 fout <<
" }" << std::endl;
1161 fout <<
"}" << std::endl;
void WaitForKeyboard()
wait for keyboard input, for debugging
Double_t GetDelta() const
virtual Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
get the mva value generated by the NN
void BuildLayer(Int_t numNeurons, TObjArray *curLayer, TObjArray *prevLayer, Int_t layerIndex, Int_t numLayers, Bool_t from_file=false)
build a single layer with neurons and synapses connecting this layer to the previous layer ...
void AddWeightsXMLTo(void *parent) const
create XML description of ANN classifier
Random number generator class based on M.
MsgLogger & Endl(MsgLogger &ml)
void ForceNetworkCalculations()
calculate input values to each neuron
void ForceValue(Double_t value)
force the value, typically for input and bias neurons
void DeleteNetwork()
delete/clear network
void AddPreLinks(TNeuron *neuron, TObjArray *prevLayer)
add synapses connecting a neuron to its preceding layer
Double_t GetValue() const
const Ranking * CreateRanking()
compute ranking of input variables by summing function of weights
void SetPostNeuron(TNeuron *post)
virtual void ReadWeightsFromStream(std::istream &istr)
destroy/clear the network then read it back in from the weights file
XMLNodePointer_t GetNext(XMLNodePointer_t xmlnode, Bool_t realnode=kTRUE)
return next to xmlnode node if realnode==kTRUE, any special nodes in between will be skipped ...
virtual TDirectory * mkdir(const char *name, const char *title="")
Create a sub-directory and return a pointer to the created directory.
void SetTarget(UInt_t itgt, Float_t value)
set the target value (dimension itgt) to value
void PrintPostLinks() const
Int_t GetEntriesFast() const
virtual void DeclareOptions()
define the options (their key words) that can be set in the option string here the options valid for ...
void SetActivationEqn(TActivation *activation)
set activation equation
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
const char * GetNodeContent(XMLNodePointer_t xmlnode)
get contents (if any) of xml node
void ForceWeights(std::vector< Double_t > *weights)
force the synapse weights
const char * Data() const
void PrintLayer(TObjArray *layer) const
print a single layer, for debugging
void PrintMessage(TString message, Bool_t force=kFALSE) const
print messages, turn off printing by setting verbose and debug flag appropriately ...
virtual void ProcessOptions()
do nothing specific at this moment
virtual void BuildNetwork(std::vector< Int_t > *layout, std::vector< Double_t > *weights=NULL, Bool_t fromFile=kFALSE)
build network given a layout (number of neurons in each layer) and optional weights array ...
TActivation * CreateActivation(EActivationType type) const
ClassImp(TMVA::MethodANNBase) TMVA
standard constructor Note: Right now it is an option to choose the neuron input function, but only the input function "sum" leads to weight convergence – otherwise the weights go to nan and lead to an ABORT.
void ReadWeightsFromXML(void *wghtnode)
read MLP from xml weight file
Double_t GetActivationValue() const
Int_t NumPostLinks() const
Bool_t AddRawLine(XMLNodePointer_t parent, const char *line)
Add just line into xml file Line should has correct xml syntax that later it can be decoded by xml pa...
virtual void WriteMonitoringHistosToFile() const
write histograms to file
2-D histogram with a float per channel (see TH1 documentation)}
Bool_t Debug() const
who the hell makes such strange Debug flags that even use "global pointers"..
void PrintActivationEqn()
print activation equation, for debugging
char * Form(const char *fmt,...)
void PrintPreLinks() const
void CreateWeightMonitoringHists(const TString &bulkname, std::vector< TH1 * > *hv=0) const
void Debug(Int_t level, const char *va_(fmt),...)
void InitWeights()
initialize the synapse weights randomly
TString & Remove(Ssiz_t pos)
#define TMVA_VERSION(a, b, c)
std::vector< Int_t > * ParseLayoutString(TString layerSpec)
parse layout specification string and return a vector, each entry containing the number of neurons to...
XMLAttrPointer_t NewAttr(XMLNodePointer_t xmlnode, XMLNsPointer_t, const char *name, const char *value)
creates new attribute for xmlnode, namespaces are not supported for attributes
virtual const std::vector< Float_t > & GetMulticlassValues()
get the multiclass classification values generated by the NN
Describe directory structure in memory.
void ForceNetworkInputs(const Event *ev, Int_t ignoreIndex=-1)
force the input values of the input neurons force the value for each input neuron ...
static RooMathCoreReg dummy
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
void CalculateValue()
calculate neuron input
std::vector< TString > * GetAllActivationNames() const
Int_t GetEntries() const
Return the number of objects in array (i.e.
RooCmdArg Verbose(Bool_t flag=kTRUE)
virtual ~MethodANNBase()
destructor
static Vc_ALWAYS_INLINE int_v max(const int_v &x, const int_v &y)
void SetPreNeuron(TNeuron *pre)
virtual void PrintNetwork() const
print network representation, for debugging
void SetWeight(Double_t weight)
set synapse weight
Float_t GetTarget(UInt_t itgt) const
void CalculateActivationValue()
calculate neuron activation/output
void AddPostLink(TSynapse *post)
add synapse as a post-link to this neuron
MethodANNBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &theData, const TString &theOption, TDirectory *theTargetDir)
virtual Bool_t cd(const char *path=0)
Change current directory to "this" directory.
XMLNodePointer_t GetChild(XMLNodePointer_t xmlnode, Bool_t realnode=kTRUE)
returns first child of xml node
XMLNodePointer_t NewChild(XMLNodePointer_t parent, XMLNsPointer_t ns, const char *name, const char *content=0)
create new child element for parent node
virtual const std::vector< Float_t > & GetRegressionValues()
get the regression value generated by the NN
TSynapse * PostLinkAt(Int_t index) const
Int_t NumPreLinks() const
TObject * At(Int_t idx) const
void AddPreLink(TSynapse *pre)
add synapse as a pre-link to this neuron
void DeletePreLinks()
delete all pre-links
void DeleteNetworkLayer(TObjArray *&layer)
delete a network layer
void BuildLayers(std::vector< Int_t > *layout, Bool_t from_file=false)
build the network layers
double norm(double *x, double *p)
virtual void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response
void SetInputCalculator(TNeuronInput *calculator)
set input calculator
Ssiz_t First(char c) const
Find first occurrence of a character c.
void PrintNeuron(TNeuron *neuron) const
print a neuron, for debugging
void InitANNBase()
initialize ANNBase object