87:
TMVA::
MethodBase( jobName, methodType, methodTitle, theData, theOption)
127 DeclareOptionRef( fNcycles = 500,
"NCycles",
"Number of training cycles" );
128 DeclareOptionRef( fLayerSpec =
"N,N-1",
"HiddenLayers",
"Specification of hidden layer architecture" );
129 DeclareOptionRef( fNeuronType =
"sigmoid",
"NeuronType",
"Neuron activation function type" );
130 DeclareOptionRef( fRandomSeed = 1,
"RandomSeed",
"Random seed for initial synapse weights (0 means unique seed for each run; default value '1')");
132 DeclareOptionRef(fEstimatorS=
"MSE",
"EstimatorType",
133 "MSE (Mean Square Estimator) for Gaussian Likelihood or CE(Cross-Entropy) for Bernoulli Likelihood" );
140 Int_t nTypes = names->size();
141 for (
Int_t i = 0; i < nTypes; i++)
142 AddPreDefVal(names->at(i));
145 DeclareOptionRef(fNeuronInputType=
"sum",
"NeuronInputType",
"Neuron input function type");
148 nTypes = names->size();
149 for (
Int_t i = 0; i < nTypes; i++) AddPreDefVal(names->at(i));
159 if ( DoRegression() || DoMulticlass()) fEstimatorS =
"MSE";
160 else fEstimatorS =
"CE" ;
161 if (fEstimatorS ==
"MSE" ) fEstimator = kMSE;
162 else if (fEstimatorS ==
"CE") fEstimator = kCE;
163 std::vector<Int_t>* layout = ParseLayoutString(fLayerSpec);
164 BuildNetwork(layout);
174 std::vector<Int_t>* layout =
new std::vector<Int_t>();
175 layout->push_back((
Int_t)GetNvar());
176 while(layerSpec.
Length()>0) {
178 if (layerSpec.
First(
',')<0) {
183 sToAdd = layerSpec(0,layerSpec.
First(
','));
184 layerSpec = layerSpec(layerSpec.
First(
',')+1,layerSpec.
Length());
188 nNodes += atoi(sToAdd);
189 layout->push_back(nNodes);
192 layout->push_back( DataInfo().GetNTargets() );
193 else if( DoMulticlass() )
194 layout->push_back( DataInfo().GetNClasses() );
196 layout->push_back(1);
211 fInputCalculator = NULL;
213 fEstimatorHistTrain = NULL;
214 fEstimatorHistTest = NULL;
217 fEpochMonHistS.clear();
218 fEpochMonHistB.clear();
219 fEpochMonHistW.clear();
223 fOutputNeurons.clear();
243 if (fNetwork != NULL) {
245 Int_t numLayers = fNetwork->GetEntriesFast();
246 for (
Int_t i = 0; i < numLayers; i++) {
248 DeleteNetworkLayer(layer);
253 if (frgen != NULL)
delete frgen;
254 if (fActivation != NULL)
delete fActivation;
255 if (fOutput != NULL)
delete fOutput;
256 if (fIdentity != NULL)
delete fIdentity;
257 if (fInputCalculator != NULL)
delete fInputCalculator;
258 if (fSynapses != NULL)
delete fSynapses;
265 fInputCalculator = NULL;
276 for (
Int_t i = 0; i < numNeurons; i++) {
290 if (fEstimatorS ==
"MSE") fEstimator = kMSE;
291 else if (fEstimatorS ==
"CE") fEstimator = kCE;
292 else Log()<<kWARNING<<
"fEstimator="<<fEstimator<<
"\tfEstimatorS="<<fEstimatorS<<
Endl;
293 if (fEstimator!=kMSE && fEstimator!=kCE) Log()<<kWARNING<<
"Estimator type unspecified \t"<<
Endl;
296 Log() << kHEADER <<
"Building Network. " <<
Endl;
311 fRegulatorIdx.clear();
313 BuildLayers( layout, fromFile );
316 fInputLayer = (
TObjArray*)fNetwork->At(0);
318 fOutputNeurons.clear();
320 fOutputNeurons.push_back( (
TNeuron*)outputLayer->
At(i) );
323 if (weights == NULL) InitWeights();
324 else ForceWeights(weights);
335 Int_t numLayers = layout->size();
337 for (
Int_t i = 0; i < numLayers; i++) {
339 BuildLayer(layout->at(i), curLayer, prevLayer, i, numLayers, fromFile);
340 prevLayer = curLayer;
341 fNetwork->
Add(curLayer);
345 for (
Int_t i = 0; i < numLayers; i++) {
348 if (i!=0 && i!=numLayers-1) fRegulators.push_back(0.);
349 for (
Int_t j = 0; j < numNeurons; j++) {
350 if (i==0) fRegulators.push_back(0.);
353 for (
Int_t k = 0; k < numSynapses; k++) {
355 fSynapses->Add(synapse);
356 fRegulatorIdx.push_back(fRegulators.size()-1);
371 for (
Int_t j = 0; j < numNeurons; j++) {
372 if (fromFile && (layerIndex != numLayers-1) && (j==numNeurons-1)){
377 curLayer->
Add(neuron);
384 if (layerIndex == 0) {
390 if (layerIndex == numLayers-1) {
396 AddPreLinks(neuron, prevLayer);
399 curLayer->
Add(neuron);
405 if (layerIndex != numLayers-1) {
410 curLayer->
Add(neuron);
424 for (
Int_t i = 0; i < numNeurons; i++) {
439 PrintMessage(
"Initializing weights");
442 Int_t numSynapses = fSynapses->GetEntriesFast();
444 for (
Int_t i = 0; i < numSynapses; i++) {
445 synapse = (
TSynapse*)fSynapses->At(i);
446 synapse->
SetWeight(4.0*frgen->Rndm() - 2.0);
455 PrintMessage(
"Forcing weights");
457 Int_t numSynapses = fSynapses->GetEntriesFast();
459 for (
Int_t i = 0; i < numSynapses; i++) {
460 synapse = (
TSynapse*)fSynapses->At(i);
475 for (
UInt_t j = 0; j < GetNvar(); j++) {
479 neuron = GetInputNeuron(j);
491 Int_t numLayers = fNetwork->GetEntriesFast();
494 for (
Int_t i = 0; i < numLayers; i++) {
498 for (
Int_t j = 0; j < numNeurons; j++) {
512 if (Verbose() ||
Debug() || force) Log() << kINFO << message <<
Endl;
521 Log() << kINFO <<
"***Type anything to continue (q to quit): ";
522 std::getline(std::cin, dummy);
523 if (dummy ==
"q" || dummy ==
"Q") {
524 PrintMessage(
"quit" );
535 if (!
Debug())
return;
537 Log() << kINFO <<
Endl;
538 PrintMessage(
"Printing network " );
539 Log() << kINFO <<
"-------------------------------------------------------------------" <<
Endl;
542 Int_t numLayers = fNetwork->GetEntriesFast();
544 for (
Int_t i = 0; i < numLayers; i++) {
549 Log() << kINFO <<
"Layer #" << i <<
" (" << numNeurons <<
" neurons):" <<
Endl;
550 PrintLayer( curLayer );
562 for (
Int_t j = 0; j < numNeurons; j++) {
564 Log() << kINFO <<
"\tNeuron #" << j <<
" (LinksIn: " << neuron->
NumPreLinks()
566 PrintNeuron( neuron );
576 <<
"\t\tValue:\t" << neuron->
GetValue()
579 Log() << kINFO <<
"\t\tActivationEquation:\t";
581 Log() << kINFO <<
"\t\tLinksIn:" <<
Endl;
583 Log() << kINFO <<
"\t\tLinksOut:" <<
Endl;
596 const Event * ev = GetEvent();
598 for (
UInt_t i = 0; i < GetNvar(); i++) {
602 ForceNetworkCalculations();
609 NoErrorCalc(err, errUpper);
623 const Event * ev = GetEvent();
625 for (
UInt_t i = 0; i < GetNvar(); i++) {
629 ForceNetworkCalculations();
634 if (fRegressionReturnVal == NULL) fRegressionReturnVal =
new std::vector<Float_t>();
635 fRegressionReturnVal->clear();
639 for (
UInt_t itgt = 0; itgt < ntgts; itgt++) {
643 const Event* evT2 = GetTransformationHandler().InverseTransform( evT );
644 for (
UInt_t itgt = 0; itgt < ntgts; itgt++) {
645 fRegressionReturnVal->push_back( evT2->
GetTarget(itgt) );
650 return *fRegressionReturnVal;
662 const Event * ev = GetEvent();
664 for (
UInt_t i = 0; i < GetNvar(); i++) {
668 ForceNetworkCalculations();
672 if (fMulticlassReturnVal == NULL) fMulticlassReturnVal =
new std::vector<Float_t>();
673 fMulticlassReturnVal->clear();
674 std::vector<Float_t> temp;
676 UInt_t nClasses = DataInfo().GetNClasses();
677 for (
UInt_t icls = 0; icls < nClasses; icls++) {
678 temp.push_back(GetOutputNeuron( icls )->GetActivationValue() );
681 for(
UInt_t iClass=0; iClass<nClasses; iClass++){
683 for(
UInt_t j=0;j<nClasses;j++){
685 norm+=exp(temp[j]-temp[iClass]);
687 (*fMulticlassReturnVal).push_back(1.0/(1.0+norm));
692 return *fMulticlassReturnVal;
701 Int_t numLayers = fNetwork->GetEntriesFast();
706 for (
Int_t i = 0; i < numLayers; i++) {
712 for (
Int_t j = 0; j < numNeurons; j++) {
717 if(numSynapses==0)
continue;
718 std::stringstream s(
"");
720 for (
Int_t k = 0; k < numSynapses; k++) {
722 s << std::scientific << synapse->
GetWeight() <<
" ";
729 if( fInvHessian.GetNcols()>0 ){
733 Int_t nElements = fInvHessian.GetNoElements();
734 Int_t nRows = fInvHessian.GetNrows();
735 Int_t nCols = fInvHessian.GetNcols();
742 fInvHessian.GetMatrix2Array( elements );
746 for(
Int_t row = 0; row < nRows; ++row ){
751 std::stringstream s(
"");
753 for(
Int_t col = 0; col < nCols; ++col ){
754 s << std::scientific << (*(elements+
index)) <<
" ";
771 std::vector<Int_t>* layout =
new std::vector<Int_t>();
773 void* xmlLayout = NULL;
776 xmlLayout = wghtnode;
780 layout->resize( nLayers );
788 layout->at(
index) = nNeurons;
792 BuildNetwork( layout, NULL, fromFile );
795 if (GetTrainingTMVAVersionCode() <
TMVA_VERSION(4,2,1) && fActivation->GetExpression().Contains(
"tanh")){
817 std::stringstream s(content);
818 for (
UInt_t iSyn = 0; iSyn<nSyn; iSyn++) {
835 void* xmlInvHessian = NULL;
841 fUseRegulator =
kTRUE;
851 fInvHessian.ResizeTo( nRows, nCols );
855 if (nElements > std::numeric_limits<int>::max()-100){
856 Log() << kFATAL <<
"you tried to read a hessian matrix with " << nElements <<
" elements, --> too large, guess s.th. went wrong reading from the weight file" <<
Endl;
859 elements =
new Double_t[nElements+10];
872 std::stringstream s(content);
873 for (
Int_t iCol = 0; iCol<nCols; iCol++) {
874 s >> (*(elements+
index));
881 fInvHessian.SetMatrixArray( elements );
897 std::vector<Double_t>* weights =
new std::vector<Double_t>();
899 while (istr>> dummy >> weight) weights->push_back(weight);
901 ForceWeights(weights);
913 fRanking =
new Ranking( GetName(),
"Importance" );
920 for (
UInt_t ivar = 0; ivar < GetNvar(); ivar++) {
922 neuron = GetInputNeuron(ivar);
925 varName = GetInputVar(ivar);
930 meanS, meanB, rmsS, rmsB,
xmin,
xmax );
934 if (avgVal<meanrms) avgVal = meanrms;
935 if (IsNormalised()) avgVal = 0.5*(1 +
gTools().
NormVariable( avgVal, GetXmin( ivar ), GetXmax( ivar )));
937 for (
Int_t j = 0; j < numSynapses; j++) {
942 importance *= avgVal * avgVal;
944 fRanking->AddRank(
Rank( varName, importance ) );
953 std::vector<TH1*>* hv )
const
956 Int_t numLayers = fNetwork->GetEntriesFast();
958 for (
Int_t i = 0; i < numLayers-1; i++) {
967 numNeurons1, 0, numNeurons1, numNeurons2, 0, numNeurons2);
969 for (
Int_t j = 0; j < numNeurons1; j++) {
974 for (
Int_t k = 0; k < numSynapses; k++) {
982 if (hv) hv->push_back( hist );
995 PrintMessage(
TString::Format(
"Write special histos to file: %s", BaseDir()->GetPath()).Data(),
kTRUE);
997 if (fEstimatorHistTrain) fEstimatorHistTrain->Write();
998 if (fEstimatorHistTest ) fEstimatorHistTest ->Write();
1001 CreateWeightMonitoringHists(
"weights_hist" );
1004 static std::atomic<int> epochMonitoringDirectoryNumber{0};
1005 int epochVal = epochMonitoringDirectoryNumber++;
1008 epochdir = BaseDir()->
mkdir(
"EpochMonitoring" );
1013 for (std::vector<TH1*>::const_iterator it = fEpochMonHistS.begin(); it != fEpochMonHistS.end(); ++it) {
1017 for (std::vector<TH1*>::const_iterator it = fEpochMonHistB.begin(); it != fEpochMonHistB.end(); ++it) {
1021 for (std::vector<TH1*>::const_iterator it = fEpochMonHistW.begin(); it != fEpochMonHistW.end(); ++it) {
1033 Int_t numLayers = fNetwork->GetEntries();
1036 fout <<
" double ActivationFnc(double x) const;" << std::endl;
1037 fout <<
" double OutputActivationFnc(double x) const;" << std::endl;
1039 int numNodesFrom = -1;
1040 for (
Int_t lIdx = 0; lIdx < numLayers; lIdx++) {
1041 int numNodesTo = ((
TObjArray*)fNetwork->At(lIdx))->GetEntries();
1042 if (numNodesFrom<0) { numNodesFrom=numNodesTo;
continue; }
1043 fout <<
" double fWeightMatrix" << lIdx-1 <<
"to" << lIdx <<
"[" << numNodesTo <<
"][" << numNodesFrom <<
"];";
1044 fout <<
" // weight matrix from layer " << lIdx-1 <<
" to " << lIdx << std::endl;
1045 numNodesFrom = numNodesTo;
1048 fout <<
"};" << std::endl;
1052 fout <<
"inline void " << className <<
"::Initialize()" << std::endl;
1053 fout <<
"{" << std::endl;
1054 fout <<
" // build network structure" << std::endl;
1056 for (
Int_t i = 0; i < numLayers-1; i++) {
1057 fout <<
" // weight matrix from layer " << i <<
" to " << i+1 << std::endl;
1060 for (
Int_t j = 0; j < numNeurons; j++) {
1063 for (
Int_t k = 0; k < numSynapses; k++) {
1065 fout <<
" fWeightMatrix" << i <<
"to" << i+1 <<
"[" << k <<
"][" << j <<
"] = " << synapse->
GetWeight() <<
";" << std::endl;
1070 fout <<
"}" << std::endl;
1074 fout <<
"inline double " << className <<
"::GetMvaValue__( const std::vector<double>& inputValues ) const" << std::endl;
1075 fout <<
"{" << std::endl;
1076 fout <<
" if (inputValues.size() != (unsigned int)" << ((
TObjArray *)fNetwork->At(0))->GetEntries() - 1 <<
") {"
1078 fout <<
" std::cout << \"Input vector needs to be of size \" << "
1079 << ((
TObjArray *)fNetwork->At(0))->GetEntries() - 1 <<
" << std::endl;" << std::endl;
1080 fout <<
" return 0;" << std::endl;
1081 fout <<
" }" << std::endl;
1083 for (
Int_t lIdx = 1; lIdx < numLayers; lIdx++) {
1086 fout <<
" std::array<double, " << numNodes <<
"> fWeights" << lIdx <<
" {{}};" << std::endl;
1088 for (
Int_t lIdx = 1; lIdx < numLayers - 1; lIdx++) {
1089 fout <<
" fWeights" << lIdx <<
".back() = 1.;" << std::endl;
1092 for (
Int_t i = 0; i < numLayers - 1; i++) {
1093 fout <<
" // layer " << i <<
" to " << i + 1 << std::endl;
1094 if (i + 1 == numLayers - 1) {
1095 fout <<
" for (int o=0; o<" << ((
TObjArray *)fNetwork->At(i + 1))->GetEntries() <<
"; o++) {" << std::endl;
1097 fout <<
" for (int o=0; o<" << ((
TObjArray *)fNetwork->At(i + 1))->GetEntries() - 1 <<
"; o++) {"
1101 fout <<
" std::array<double, " << ((
TObjArray *)fNetwork->At(i))->GetEntries()
1102 <<
"> buffer; // no need to initialise" << std::endl;
1103 fout <<
" for (int i = 0; i<" << ((
TObjArray *)fNetwork->At(i))->GetEntries() <<
" - 1; i++) {"
1105 fout <<
" buffer[i] = fWeightMatrix" << i <<
"to" << i + 1 <<
"[o][i] * inputValues[i];" << std::endl;
1106 fout <<
" } // loop over i" << std::endl;
1107 fout <<
" buffer.back() = fWeightMatrix" << i <<
"to" << i + 1 <<
"[o]["
1108 << ((
TObjArray *)fNetwork->At(i))->GetEntries() - 1 <<
"];" << std::endl;
1110 fout <<
" std::array<double, " << ((
TObjArray *)fNetwork->At(i))->GetEntries()
1111 <<
"> buffer; // no need to initialise" << std::endl;
1112 fout <<
" for (int i=0; i<" << ((
TObjArray *)fNetwork->At(i))->GetEntries() <<
"; i++) {" << std::endl;
1113 fout <<
" buffer[i] = fWeightMatrix" << i <<
"to" << i + 1 <<
"[o][i] * fWeights" << i <<
"[i];"
1115 fout <<
" } // loop over i" << std::endl;
1117 fout <<
" for (int i=0; i<" << ((
TObjArray *)fNetwork->At(i))->GetEntries() <<
"; i++) {" << std::endl;
1118 if (fNeuronInputType ==
"sum") {
1119 fout <<
" fWeights" << i + 1 <<
"[o] += buffer[i];" << std::endl;
1120 }
else if (fNeuronInputType ==
"sqsum") {
1121 fout <<
" fWeights" << i + 1 <<
"[o] += buffer[i]*buffer[i];" << std::endl;
1123 fout <<
" fWeights" << i + 1 <<
"[o] += fabs(buffer[i]);" << std::endl;
1125 fout <<
" } // loop over i" << std::endl;
1126 fout <<
" } // loop over o" << std::endl;
1127 if (i + 1 == numLayers - 1) {
1128 fout <<
" for (int o=0; o<" << ((
TObjArray *)fNetwork->At(i + 1))->GetEntries() <<
"; o++) {" << std::endl;
1130 fout <<
" for (int o=0; o<" << ((
TObjArray *)fNetwork->At(i + 1))->GetEntries() - 1 <<
"; o++) {"
1133 if (i+1 != numLayers-1)
1134 fout <<
" fWeights" << i + 1 <<
"[o] = ActivationFnc(fWeights" << i + 1 <<
"[o]);" << std::endl;
1136 fout <<
" fWeights" << i + 1 <<
"[o] = OutputActivationFnc(fWeights" << i + 1 <<
"[o]);"
1138 fout <<
" } // loop over o" << std::endl;
1141 fout <<
" return fWeights" << numLayers - 1 <<
"[0];" << std::endl;
1142 fout <<
"}" << std::endl;
1145 TString fncName = className+
"::ActivationFnc";
1146 fActivation->MakeFunction(fout, fncName);
1147 fncName = className+
"::OutputActivationFnc";
1148 fOutput->MakeFunction(fout, fncName);
1151 fout <<
"// Clean up" << std::endl;
1152 fout <<
"inline void " << className <<
"::Clear()" << std::endl;
1153 fout <<
"{" << std::endl;
1154 fout <<
"}" << std::endl;
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t index
void Debug(Int_t level, const char *fmt,...)
#define TMVA_VERSION(a, b, c)
Describe directory structure in memory.
virtual Bool_t cd()
Change current directory to "this" directory.
virtual TDirectory * mkdir(const char *name, const char *title="", Bool_t returnExistingDirectory=kFALSE)
Create a sub-directory "a" or a hierarchy of sub-directories "a/b/c/...".
2-D histogram with a float per channel (see TH1 documentation)
void SetBinContent(Int_t bin, Double_t content) override
Set bin content.
Class that contains all the data information.
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
void SetTarget(UInt_t itgt, Float_t value)
set the target value (dimension itgt) to value
Float_t GetTarget(UInt_t itgt) const
Base class for all TMVA methods using artificial neural networks.
std::vector< Int_t > * ParseLayoutString(TString layerSpec)
parse layout specification string and return a vector, each entry containing the number of neurons to...
virtual void ProcessOptions()
do nothing specific at this moment
virtual ~MethodANNBase()
destructor
virtual Double_t GetMvaValue(Double_t *err=nullptr, Double_t *errUpper=nullptr)
get the mva value generated by the NN
void DeleteNetworkLayer(TObjArray *&layer)
delete a network layer
virtual void BuildNetwork(std::vector< Int_t > *layout, std::vector< Double_t > *weights=nullptr, Bool_t fromFile=kFALSE)
build network given a layout (number of neurons in each layer) and optional weights array
const Ranking * CreateRanking()
compute ranking of input variables by summing function of weights
void DeleteNetwork()
delete/clear network
void WaitForKeyboard()
wait for keyboard input, for debugging
MethodANNBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &theData, const TString &theOption)
standard constructor Note: Right now it is an option to choose the neuron input function,...
void AddPreLinks(TNeuron *neuron, TObjArray *prevLayer)
add synapses connecting a neuron to its preceding layer
void PrintNeuron(TNeuron *neuron) const
print a neuron, for debugging
void PrintMessage(TString message, Bool_t force=kFALSE) const
print messages, turn off printing by setting verbose and debug flag appropriately
void AddWeightsXMLTo(void *parent) const
create XML description of ANN classifier
void InitANNBase()
initialize ANNBase object
void PrintLayer(TObjArray *layer) const
print a single layer, for debugging
void InitWeights()
initialize the synapse weights randomly
virtual void DeclareOptions()
define the options (their key words) that can be set in the option string here the options valid for ...
virtual void ReadWeightsFromStream(std::istream &istr)
destroy/clear the network then read it back in from the weights file
void BuildLayers(std::vector< Int_t > *layout, Bool_t from_file=false)
build the network layers
virtual void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response
void ForceWeights(std::vector< Double_t > *weights)
force the synapse weights
void BuildLayer(Int_t numNeurons, TObjArray *curLayer, TObjArray *prevLayer, Int_t layerIndex, Int_t numLayers, Bool_t from_file=false)
build a single layer with neurons and synapses connecting this layer to the previous layer
void ForceNetworkCalculations()
calculate input values to each neuron
void ForceNetworkInputs(const Event *ev, Int_t ignoreIndex=-1)
force the input values of the input neurons force the value for each input neuron
virtual const std::vector< Float_t > & GetMulticlassValues()
get the multiclass classification values generated by the NN
void ReadWeightsFromXML(void *wghtnode)
read MLP from xml weight file
Bool_t Debug() const
who the hell makes such strange Debug flags that even use "global pointers"..
virtual void WriteMonitoringHistosToFile() const
write histograms to file
virtual const std::vector< Float_t > & GetRegressionValues()
get the regression value generated by the NN
virtual void PrintNetwork() const
print network representation, for debugging
void CreateWeightMonitoringHists(const TString &bulkname, std::vector< TH1 * > *hv=nullptr) const
Virtual base Class for all MVA method.
Ranking for variables in method (implementation)
Class for easily choosing activation functions.
std::vector< TString > * GetAllActivationNames() const
returns the names of all know activation functions
TActivation * CreateActivation(EActivationType type) const
instantiate the correct activation object according to the type chosen (given as the enumeration type...
Tanh activation function for ANN.
Neuron class used by TMVA artificial neural network methods.
Double_t GetActivationValue() const
void ForceValue(Double_t value)
force the value, typically for input and bias neurons
TSynapse * PostLinkAt(Int_t index) const
void SetActivationEqn(TActivation *activation)
set activation equation
Double_t GetDelta() const
void AddPostLink(TSynapse *post)
add synapse as a post-link to this neuron
void SetInputCalculator(TNeuronInput *calculator)
set input calculator
Int_t NumPreLinks() const
void PrintActivationEqn()
print activation equation, for debugging
void CalculateValue()
calculate neuron input
void CalculateActivationValue()
calculate neuron activation/output
void PrintPostLinks() const
Int_t NumPostLinks() const
void AddPreLink(TSynapse *pre)
add synapse as a pre-link to this neuron
Double_t GetValue() const
void DeletePreLinks()
delete all pre-links
void PrintPreLinks() const
Synapse class used by TMVA artificial neural network methods.
void SetWeight(Double_t weight)
set synapse weight
void SetPostNeuron(TNeuron *post)
void SetPreNeuron(TNeuron *pre)
Int_t GetEntriesFast() const
Int_t GetEntries() const override
Return the number of objects in array (i.e.
TObject * At(Int_t idx) const override
void Add(TObject *obj) override
virtual Int_t Write(const char *name=nullptr, Int_t option=0, Int_t bufsize=0)
Write this object to the current directory.
Random number generator class based on M.
Ssiz_t First(char c) const
Find first occurrence of a character c.
const char * Data() const
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
TString & Remove(Ssiz_t pos)
static TString Format(const char *fmt,...)
Static method which formats a string using a printf style format descriptor and return a TString.
Bool_t AddRawLine(XMLNodePointer_t parent, const char *line)
Add just line into xml file Line should has correct xml syntax that later it can be decoded by xml pa...
XMLNodePointer_t NewChild(XMLNodePointer_t parent, XMLNsPointer_t ns, const char *name, const char *content=nullptr)
create new child element for parent node
XMLNodePointer_t GetChild(XMLNodePointer_t xmlnode, Bool_t realnode=kTRUE)
returns first child of xmlnode
XMLAttrPointer_t NewAttr(XMLNodePointer_t xmlnode, XMLNsPointer_t, const char *name, const char *value)
creates new attribute for xmlnode, namespaces are not supported for attributes
const char * GetNodeContent(XMLNodePointer_t xmlnode)
get contents (if any) of xmlnode
XMLNodePointer_t GetNext(XMLNodePointer_t xmlnode, Bool_t realnode=kTRUE)
return next to xmlnode node if realnode==kTRUE, any special nodes in between will be skipped
create variable transformations
MsgLogger & Endl(MsgLogger &ml)
Short_t Abs(Short_t d)
Returns the absolute value of parameter Short_t d.