321                                             const char* extF, 
const char* extD)
 
  380                                             const char * weight, 
TTree * data,
 
  384                                             const char* extF, 
const char* extD)
 
  446                                             const char * training,
 
  449                                             const char* extF, 
const char* extD)
 
  466   if(testcut==
"") testcut = 
Form(
"!(%s)",training);
 
  476      data->
Draw(
Form(
">>fTestList_%lu",(
ULong_t)
this),(
const char *)testcut,
"goff");
 
  480      Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
 
  518                                             const char * weight, 
TTree * data,
 
  519                                             const char * training,
 
  522                                             const char* extF, 
const char* extD)
 
  539   if(testcut==
"") testcut = 
Form(
"!(%s)",training);
 
  549      data->
Draw(
Form(
">>fTestList_%lu",(
ULong_t)
this),(
const char *)testcut,
"goff");
 
  553      Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
 
  580      std::cerr << 
"Error: data already defined." << std::endl;
 
  641      Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
 
  660      Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
 
  777   Bool_t minE_Train = 
false;
 
  783   Int_t displayStepping = 1;
 
  787      displayStepping = atoi(out.
Data() + 7);
 
  797   TGraph *train_residual_plot = 0;
 
  798   TGraph *test_residual_plot = 0;
 
  800      Error(
"Train",
"Training/Test samples still not defined. Cannot train the neural network");
 
  803   Info(
"Train",
"Using %d train and %d test entries.",
 
  807      std::cout << 
"Training the Neural Network" << std::endl;
 
  811         canvas = 
new TCanvas(
"NNtraining", 
"Neural Net training");
 
  814         if(!canvas) canvas = 
new TCanvas(
"NNtraining", 
"Neural Net training");
 
  816      train_residual_plot = 
new TGraph(nEpoch);
 
  817      test_residual_plot  = 
new TGraph(nEpoch);
 
  821      residual_plot->
Add(train_residual_plot);
 
  822      residual_plot->
Add(test_residual_plot);
 
  823      residual_plot->
Draw(
"LA");
 
  835   for (i = 0; i < els; i++)
 
  838   TMatrixD bfgsh(matrix_size, matrix_size);
 
  844   for (
Int_t iepoch = 0; (iepoch < nEpoch) && (!minE_Train || training_E>minE) && (!minE_Test || test_E>minE) ; iepoch++) {
 
  873               for (i = 0; i < els; i++)
 
  874                  onorm += dir[i] * dir[i];
 
  882                  prod -= dir[idx++] * neuron->
GetDEDw();
 
  888                  prod -= dir[idx++] * synapse->
GetDEDw();
 
  905               for (i = 0; i < els; i++)
 
  906                  onorm += dir[i] * dir[i];
 
  947                  Error(
"TMultiLayerPerceptron::Train()",
"Line search fail");
 
  957         Error(
"TMultiLayerPerceptron::Train()",
"Stop.");
 
  966      if ((verbosity % 2) && ((!(iepoch % displayStepping)) || (iepoch == nEpoch - 1))) {
 
  967         std::cout << 
"Epoch: " << iepoch
 
  968              << 
" learn=" << training_E
 
  969              << 
" test=" << test_E
 
  973         train_residual_plot->
SetPoint(iepoch, iepoch,training_E);
 
  974         test_residual_plot->
SetPoint(iepoch, iepoch,test_E);
 
  978            for (i = 1; i < nEpoch; i++) {
 
  979               train_residual_plot->
SetPoint(i, i, trp);
 
  980               test_residual_plot->
SetPoint(i, i, tep);
 
  983         if ((!(iepoch % displayStepping)) || (iepoch == nEpoch - 1)) {
 
  999      std::cout << 
"Training done." << std::endl;
 
 1000   if (verbosity / 2) {
 
 1003                       "Training sample", 
"L");
 
 1005                       "Test sample", 
"L");
 
 1033   if (nEntries == 0) 
return 0.0;
 
 1064      for (i = 0; i < nEvents; i++) {
 
 1069      for (i = 0; i < nEvents; i++) {
 
 1086   return (error / 2.);
 
 1099      if (target < DBL_EPSILON) {
 
 1105      if ((1 - target) < DBL_EPSILON) {
 
 1130      if (target > DBL_EPSILON) {               
 
 1162      for (i = 0; i < nEvents; i++) {
 
 1189      for (i = 0; i < nEvents; i++) {
 
 1251   Bool_t normalize = 
false;
 
 1255   Int_t maxop, maxpar, maxconst;
 
 1317   for (i = 0; i<nneurons; i++) {
 
 1321      if(
f.GetMultiplicity()==1 && 
f.GetNdata()>1) {
 
 1322         Warning(
"TMultiLayerPerceptron::ExpandStructure()",
"Variable size arrays cannot be used to build implicitly an input layer. The index 0 will be assumed.");
 
 1329      else if(
f.GetNdata()>1) {
 
 1330         for(
Int_t j=0; j<
f.GetNdata(); j++) {
 
 1331            if(i||j) newInput += 
",";
 
 1339      if(i) newInput += 
",";
 
 1345   fStructure = newInput + 
":" + hiddenAndOutput;
 
 1362           hidden(hidden.
Last(
':') + 1,
 
 1364   if (input.
Length() == 0) {
 
 1365      Error(
"BuildNetwork()",
"malformed structure. No input layer.");
 
 1368   if (
output.Length() == 0) {
 
 1369      Error(
"BuildNetwork()",
"malformed structure. No output layer.");
 
 1388   for (i = 0; i<nneurons; i++) {
 
 1404   Int_t prevStart = 0;
 
 1410      end = hidden.
Index(
":", beg + 1);
 
 1427      Error(
"BuildOneHiddenLayer",
 
 1428            "The specification '%s' for hidden layer %d must contain only numbers!",
 
 1429            sNumNodes.
Data(), layer - 1);
 
 1432      for (
Int_t i = 0; i < num; i++) {
 
 1433         name.Form(
"HiddenL%d:N%d",layer,i);
 
 1436         for (
Int_t j = prevStart; j < prevStop; j++) {
 
 1445         for (
Int_t i = prevStop; i < nEntries; i++) {
 
 1447            for (
Int_t j = prevStop; j < nEntries; j++)
 
 1452      prevStart = prevStop;
 
 1475   Int_t prevStart = prevStop - prev;
 
 1481   for (i = 0; i<nneurons; i++) {
 
 1488      for (j = prevStart; j < prevStop; j++) {
 
 1497   for (i = prevStop; i < nEntries; i++) {
 
 1499      for (j = prevStop; j < nEntries; j++)
 
 1520      Error(
"DrawResult()",
"no such output.");
 
 1525      new TCanvas(
"NNresult", 
"Neural Net output");
 
 1532      setname = 
Form(
"train%d",index);
 
 1535      setname = 
Form(
"test%d",index);
 
 1537   if ((!
fData) || (!events)) {
 
 1538      Error(
"DrawResult()",
"no dataset.");
 
 1543      TString title = 
"Neural Net Output control. ";
 
 1545      setname = 
"MLP_" + setname + 
"_comp";
 
 1548         hist = 
new TH2D(setname.
Data(), title.
Data(), 50, -1, 1, 50, -1, 1);
 
 1551      for (i = 0; i < nEvents; i++) {
 
 1558      TString title = 
"Neural Net Output. ";
 
 1560      setname = 
"MLP_" + setname;
 
 1563         hist = 
new TH1D(setname, title, 50, 1, -1);
 
 1566      for (i = 0; i < nEvents; i++)
 
 1574            hist = 
new TH1D(setname, title, 50, 1, -1);
 
 1576         nEvents = events->
GetN();
 
 1577         for (i = 0; i < nEvents; i++)
 
 1593      Error(
"TMultiLayerPerceptron::DumpWeights()",
"Invalid file name");
 
 1601   *
output << 
"#input normalization" << std::endl;
 
 1609   *
output << 
"#output normalization" << std::endl;
 
 1616   *
output << 
"#neurons weights" << std::endl;
 
 1623   *
output << 
"#synapses weights" << std::endl;
 
 1628      ((std::ofstream *) 
output)->close();
 
 1643      Error(
"TMultiLayerPerceptron::LoadWeights()",
"Invalid file name");
 
 1646   char *buff = 
new char[100];
 
 1647   std::ifstream input(filen.
Data());
 
 1649   input.getline(buff, 100);
 
 1657   input.getline(buff, 100);
 
 1659   input.getline(buff, 100);
 
 1666   input.getline(buff, 100);
 
 1668   input.getline(buff, 100);
 
 1676   input.getline(buff, 100);
 
 1678   input.getline(buff, 100);
 
 1725      Warning(
"TMultiLayerPerceptron::Export",
"Request to export a network using an external function");
 
 1728      TString basefilename = filename;
 
 1732      TString classname = basefilename;
 
 1737      std::ofstream headerfile(header);
 
 1738      std::ofstream sourcefile(source);
 
 1739      headerfile << 
"#ifndef " << basefilename << 
"_h" << std::endl;
 
 1740      headerfile << 
"#define " << basefilename << 
"_h" << std::endl << std::endl;
 
 1741      headerfile << 
"class " << classname << 
" { " << std::endl;
 
 1742      headerfile << 
"public:" << std::endl;
 
 1743      headerfile << 
"   " << classname << 
"() {}" << std::endl;
 
 1744      headerfile << 
"   ~" << classname << 
"() {}" << std::endl;
 
 1745      sourcefile << 
"#include \"" << header << 
"\"" << std::endl;
 
 1746      sourcefile << 
"#include <cmath>" << std::endl << std::endl;
 
 1747      headerfile << 
"   double Value(int index";
 
 1748      sourcefile << 
"double " << classname << 
"::Value(int index";
 
 1750         headerfile << 
",double in" << i;
 
 1751         sourcefile << 
",double in" << i;
 
 1753      headerfile << 
");" << std::endl;
 
 1754      sourcefile << 
") {" << std::endl;
 
 1756         sourcefile << 
"   input" << i << 
" = (in" << i << 
" - " 
 1760      sourcefile << 
"   switch(index) {" << std::endl;
 
 1765         sourcefile << 
"     case " << idx++ << 
":" << std::endl
 
 1766                    << 
"         return neuron" << neuron << 
"();" << std::endl;
 
 1767      sourcefile << 
"     default:" << std::endl
 
 1768                 << 
"         return 0.;" << std::endl << 
"   }" 
 1770      sourcefile << 
"}" << std::endl << std::endl;
 
 1771      headerfile << 
"   double Value(int index, double* input);" << std::endl;
 
 1772      sourcefile << 
"double " << classname << 
"::Value(int index, double* input) {" << std::endl;
 
 1774         sourcefile << 
"   input" << i << 
" = (input[" << i << 
"] - " 
 1778      sourcefile << 
"   switch(index) {" << std::endl;
 
 1783         sourcefile << 
"     case " << idx++ << 
":" << std::endl
 
 1784                    << 
"         return neuron" << neuron << 
"();" << std::endl;
 
 1785      sourcefile << 
"     default:" << std::endl
 
 1786                 << 
"         return 0.;" << std::endl << 
"   }" 
 1788      sourcefile << 
"}" << std::endl << std::endl;
 
 1789      headerfile << 
"private:" << std::endl;
 
 1791         headerfile << 
"   double input" << i << 
";" << std::endl;
 
 1796         if (!neuron->
GetPre(0)) {
 
 1797            headerfile << 
"   double neuron" << neuron << 
"();" << std::endl;
 
 1798            sourcefile << 
"double " << classname << 
"::neuron" << neuron
 
 1799                       << 
"() {" << std::endl;
 
 1800            sourcefile << 
"   return input" << idx++ << 
";" << std::endl;
 
 1801            sourcefile << 
"}" << std::endl << std::endl;
 
 1803            headerfile << 
"   double input" << neuron << 
"();" << std::endl;
 
 1804            sourcefile << 
"double " << classname << 
"::input" << neuron
 
 1805                       << 
"() {" << std::endl;
 
 1806            sourcefile << 
"   double input = " << neuron->
GetWeight()
 
 1807                       << 
";" << std::endl;
 
 1810            while ((syn = neuron->
GetPre(
n++))) {
 
 1811               sourcefile << 
"   input += synapse" << syn << 
"();" << std::endl;
 
 1813            sourcefile << 
"   return input;" << std::endl;
 
 1814            sourcefile << 
"}" << std::endl << std::endl;
 
 1816            headerfile << 
"   double neuron" << neuron << 
"();" << std::endl;
 
 1817            sourcefile << 
"double " << classname << 
"::neuron" << neuron << 
"() {" << std::endl;
 
 1818            sourcefile << 
"   double input = input" << neuron << 
"();" << std::endl;
 
 1822                     sourcefile << 
"   return ((input < -709. ? 0. : (1/(1+exp(-input)))) * ";
 
 1827                     sourcefile << 
"   return (input * ";
 
 1832                     sourcefile << 
"   return (tanh(input) * ";
 
 1837                     sourcefile << 
"   return (exp(-input*input) * ";
 
 1842                     sourcefile << 
"   return (exp(input) / (";
 
 1845                     sourcefile << 
"exp(input" << side << 
"())";
 
 1847                        sourcefile << 
" + exp(input" << side << 
"())";
 
 1848                     sourcefile << 
") * ";
 
 1853                     sourcefile << 
"   return (0.0 * ";
 
 1858            sourcefile << 
"}" << std::endl << std::endl;
 
 1865         headerfile << 
"   double synapse" << synapse << 
"();" << std::endl;
 
 1866         sourcefile << 
"double " << classname << 
"::synapse" 
 1867                    << synapse << 
"() {" << std::endl;
 
 1868         sourcefile << 
"   return (neuron" << synapse->
GetPre()
 
 1869                    << 
"()*" << synapse->
GetWeight() << 
");" << std::endl;
 
 1870         sourcefile << 
"}" << std::endl << std::endl;
 
 1873      headerfile << 
"};" << std::endl << std::endl;
 
 1874      headerfile << 
"#endif // " << basefilename << 
"_h" << std::endl << std::endl;
 
 1877      std::cout << header << 
" and " << source << 
" created." << std::endl;
 
 1879   else if(lg == 
"FORTRAN") {
 
 1880      TString implicit = 
"      implicit double precision (a-h,n-z)\n";
 
 1881      std::ofstream sigmoid(
"sigmoid.f");
 
 1882      sigmoid         << 
"      double precision FUNCTION SIGMOID(X)"        << std::endl
 
 1884                << 
"      IF(X.GT.37.) THEN"                        << std::endl
 
 1885                    << 
"         SIGMOID = 1."                        << std::endl
 
 1886                << 
"      ELSE IF(X.LT.-709.) THEN"                << std::endl
 
 1887                    << 
"         SIGMOID = 0."                        << std::endl
 
 1888                    << 
"      ELSE"                                        << std::endl
 
 1889                    << 
"         SIGMOID = 1./(1.+EXP(-X))"                << std::endl
 
 1890                    << 
"      ENDIF"                                << std::endl
 
 1891                    << 
"      END"                                        << std::endl;
 
 1895      std::ofstream sourcefile(source);
 
 1898      sourcefile << 
"      double precision function " << filename
 
 1899                 << 
"(x, index)" << std::endl;
 
 1900      sourcefile << implicit;
 
 1901      sourcefile << 
"      double precision x(" <<
 
 1905      sourcefile << 
"C --- Last Layer" << std::endl;
 
 1909      TString ifelseif = 
"      if (index.eq.";
 
 1911         sourcefile << ifelseif.
Data() << idx++ << 
") then" << std::endl
 
 1913                    << 
"=neuron" << neuron << 
"(x);" << std::endl;
 
 1914         ifelseif = 
"      else if (index.eq.";
 
 1916      sourcefile << 
"      else" << std::endl
 
 1917                 << 
"          " << filename << 
"=0.d0" << std::endl
 
 1918                 << 
"      endif" << std::endl;
 
 1919      sourcefile << 
"      end" << std::endl;
 
 1922      sourcefile << 
"C --- First and Hidden layers" << std::endl;
 
 1927         sourcefile << 
"      double precision function neuron" 
 1928                    << neuron << 
"(x)" << std::endl
 
 1930         sourcefile << 
"      double precision x(" 
 1932         if (!neuron->
GetPre(0)) {
 
 1933            sourcefile << 
"      neuron" << neuron
 
 1934             << 
" = (x(" << idx+1 << 
") - " 
 1938             << 
"d0" << std::endl;
 
 1941            sourcefile << 
"      neuron" << neuron
 
 1942                       << 
" = " << neuron->
GetWeight() << 
"d0" << std::endl;
 
 1945            while ((syn = neuron->
GetPre(
n++)))
 
 1946               sourcefile << 
"      neuron" << neuron
 
 1947                              << 
" = neuron" << neuron
 
 1948                          << 
" + synapse" << syn << 
"(x)" << std::endl;
 
 1952                     sourcefile << 
"      neuron" << neuron
 
 1953                                << 
"= (sigmoid(neuron" << neuron << 
")*";
 
 1962                     sourcefile << 
"      neuron" << neuron
 
 1963                                << 
"= (tanh(neuron" << neuron << 
")*";
 
 1968                     sourcefile << 
"      neuron" << neuron
 
 1969                                << 
"= (exp(-neuron" << neuron << 
"*neuron" 
 1977                     sourcefile << 
"      div = exp(neuron" << side << 
"())" << std::endl;
 
 1979                        sourcefile << 
"      div = div + exp(neuron" << side << 
"())" << std::endl;
 
 1980                     sourcefile << 
"      neuron"  << neuron ;
 
 1981                     sourcefile << 
"= (exp(neuron" << neuron << 
") / div * ";
 
 1986                     sourcefile << 
"   neuron " << neuron << 
"= 0.";
 
 1992         sourcefile << 
"      end" << std::endl;
 
 1997      sourcefile << 
"C --- Synapses" << std::endl;
 
 2001         sourcefile << 
"      double precision function " << 
"synapse" 
 2002                    << synapse << 
"(x)\n" << implicit;
 
 2003         sourcefile << 
"      double precision x(" 
 2005         sourcefile << 
"      synapse" << synapse
 
 2006                    << 
"=neuron" << synapse->
GetPre()
 
 2007                    << 
"(x)*" << synapse->
GetWeight() << 
"d0" << std::endl;
 
 2008         sourcefile << 
"      end" << std::endl << std::endl;
 
 2012      std::cout << source << 
" created." << std::endl;
 
 2014   else if(lg == 
"PYTHON") {
 
 2018      std::ofstream pythonfile(pyfile);
 
 2019      pythonfile << 
"from math import exp" << std::endl << std::endl;
 
 2020      pythonfile << 
"from math import tanh" << std::endl << std::endl;
 
 2021      pythonfile << 
"class " << classname << 
":" << std::endl;
 
 2022      pythonfile << 
"\tdef value(self,index";
 
 2024         pythonfile << 
",in" << i;
 
 2026      pythonfile << 
"):" << std::endl;
 
 2028         pythonfile << 
"\t\tself.input" << i << 
" = (in" << i << 
" - " 
 2035         pythonfile << 
"\t\tif index==" << idx++
 
 2036                    << 
": return self.neuron" << neuron << 
"();" << std::endl;
 
 2037      pythonfile << 
"\t\treturn 0." << std::endl;
 
 2042         pythonfile << 
"\tdef neuron" << neuron << 
"(self):" << std::endl;
 
 2044            pythonfile << 
"\t\treturn self.input" << idx++ << std::endl;
 
 2046            pythonfile << 
"\t\tinput = " << neuron->
GetWeight() << std::endl;
 
 2049            while ((syn = neuron->
GetPre(
n++)))
 
 2050               pythonfile << 
"\t\tinput = input + self.synapse" 
 2051                          << syn << 
"()" << std::endl;
 
 2055                     pythonfile << 
"\t\tif input<-709. : return " << neuron->
GetNormalisation()[1] << std::endl;
 
 2056                     pythonfile << 
"\t\treturn ((1/(1+exp(-input)))*";
 
 2061                     pythonfile << 
"\t\treturn (input*";
 
 2066                     pythonfile << 
"\t\treturn (tanh(input)*";
 
 2071                     pythonfile << 
"\t\treturn (exp(-input*input)*";
 
 2076                     pythonfile << 
"\t\treturn (exp(input) / (";
 
 2079                     pythonfile << 
"exp(self.neuron" << side << 
"())";
 
 2081                        pythonfile << 
" + exp(self.neuron" << side << 
"())";
 
 2082                     pythonfile << 
") * ";
 
 2087                     pythonfile << 
"\t\treturn 0.";
 
 2098         pythonfile << 
"\tdef synapse" << synapse << 
"(self):" << std::endl;
 
 2099         pythonfile << 
"\t\treturn (self.neuron" << synapse->
GetPre()
 
 2100                    << 
"()*" << synapse->
GetWeight() << 
")" << std::endl;
 
 2104      std::cout << pyfile << 
" created." << std::endl;
 
 2126   for (
Int_t i = 0; i < 
n; i++) {
 
 2129      index[j] = index[i];
 
 2144   for (i = 0; i < nEvents; i++)
 
 2150   for (i = 0; i < nEvents; i++) {
 
 2241      dir[idx++] = -neuron->
GetDEDw();
 
 2245      dir[idx++] = -synapse->
GetDEDw();
 
 2284   MLP_Line(origin, direction, alpha2);
 
 2290      for (icount = 0; icount < 100; icount++) {
 
 2292         MLP_Line(origin, direction, alpha3);
 
 2309      for (icount = 0; icount < 100; icount++) {
 
 2311         MLP_Line(origin, direction, alpha2);
 
 2329                (err3 - err1) / ((err3 - err2) / (alpha3 - alpha2)
 
 2330                - (err2 - err1) / (alpha2 - alpha1)));
 
 2339      buffer[idx] = neuron->
GetWeight() - origin[idx];
 
 2345      buffer[idx] = synapse->
GetWeight() - origin[idx];
 
 2430   for (
Int_t i = 0; i < els; i++)
 
 2431      delta[i].Assign(buffer[i]);
 
 2486      dedw[idx++][0] = neuron->
GetDEDw();
 
 2491      dedw[idx++][0] = synapse->
GetDEDw();
 
 2494   for (
Int_t i = 0; i < els; i++)
 
 2495      dir[i] = -direction[i][0];
 
 2507#define NeuronSize 2.5 
 2510   Float_t xStep = 1./(nLayers+1.);
 
 2512   for(layer=0; layer< nLayers-1; layer++) {
 
 2524            Int_t num = atoi(
TString(hidden(beg, end - beg)).Data());
 
 2527            end = hidden.
Index(
":", beg + 1);
 
 2528            if(layer==
cnt) nNeurons_this = num;
 
 2532         if(layer==
cnt) nNeurons_this = num;
 
 2535      if(layer==nLayers-2) {
 
 2537         nNeurons_next = 
output.CountChar(
',')+1;
 
 2545            Int_t num = atoi(
TString(hidden(beg, end - beg)).Data());
 
 2548            end = hidden.
Index(
":", beg + 1);
 
 2549            if(layer+1==
cnt) nNeurons_next = num;
 
 2553         if(layer+1==
cnt) nNeurons_next = num;
 
 2555      Float_t yStep_this = 1./(nNeurons_this+1.);
 
 2556      Float_t yStep_next = 1./(nNeurons_next+1.);
 
 2561         maxWeight = maxWeight < theSynapse->
GetWeight() ? theSynapse->
GetWeight() : maxWeight;
 
 2564      for(
Int_t neuron1=0; neuron1<nNeurons_this; neuron1++) {
 
 2565         for(
Int_t neuron2=0; neuron2<nNeurons_next; neuron2++) {
 
 2566            TLine* synapse = 
new TLine(xStep*(layer+1),yStep_this*(neuron1+1),xStep*(layer+2),yStep_next*(neuron2+1));
 
 2569            if (!theSynapse) 
continue;
 
 2578   for(layer=0; layer< nLayers; layer++) {
 
 2584      else if(layer==nLayers-1) {
 
 2586         nNeurons = 
output.CountChar(
',')+1;
 
 2594            Int_t num = atoi(
TString(hidden(beg, end - beg)).Data());
 
 2597            end = hidden.
Index(
":", beg + 1);
 
 2598            if(layer==
cnt) nNeurons = num;
 
 2602         if(layer==
cnt) nNeurons = num;
 
 2604      Float_t yStep = 1./(nNeurons+1.);
 
 2605      for(
Int_t neuron=0; neuron<nNeurons; neuron++) {
 
 2607         m->SetMarkerColor(4);
 
 2615   Float_t yStep = 1./(nrItems+1);
 
 2616   for (
Int_t item = 0; item < nrItems; item++) {
 
 2618      TText* label = 
new TText(0.5*xStep,yStep*(item+1),brName.
Data());
 
 2624   yStep=1./(numOutNodes+1);
 
 2625   for (
Int_t outnode=0; outnode<numOutNodes; outnode++) {
 
 2627      if (neuron && neuron->
GetName()) {
 
TMatrixT< Double_t > TMatrixD
 
char * Form(const char *fmt,...)
 
R__EXTERN TSystem * gSystem
 
virtual void SetTitleOffset(Float_t offset=1)
Set distance between the axis and the axis title.
 
virtual void SetLineStyle(Style_t lstyle)
Set the line style.
 
virtual void SetLineWidth(Width_t lwidth)
Set the line width.
 
virtual void SetLineColor(Color_t lcolor)
Set the line color.
 
virtual void SetLeftMargin(Float_t leftmargin)
Set Pad left margin in fraction of the pad width.
 
void SetDecimals(Bool_t dot=kTRUE)
Sets the decimals flag By default, blank characters are stripped, and then the label is correctly ali...
 
virtual void UnZoom()
Reset first & last bin to the full range.
 
static TClass * GetClass(const char *name, Bool_t load=kTRUE, Bool_t silent=kFALSE)
Static method returning pointer to TClass of the specified class name.
 
virtual void SetOwner(Bool_t enable=kTRUE)
Set whether this collection is the owner (enable==true) of its content.
 
A TEventList object is a list of selected events (entries) in a TTree.
 
virtual Long64_t GetEntry(Int_t index) const
Return value of entry at index in the list.
 
virtual Int_t GetN() const
 
A Graph is a graphics object made of two arrays X and Y with npoints each.
 
virtual void SetPoint(Int_t i, Double_t x, Double_t y)
Set x and y values for point number i.
 
1-D histogram with a double per channel (see TH1 documentation)}
 
virtual void Reset(Option_t *option="")
Reset.
 
virtual Int_t Fill(Double_t x)
Increment bin with abscissa X by 1.
 
virtual void Draw(Option_t *option="")
Draw this histogram with options.
 
2-D histogram with a double per channel (see TH1 documentation)}
 
virtual void Reset(Option_t *option="")
Reset this histogram: contents, errors, etc.
 
Int_t Fill(Double_t)
Invalid Fill method.
 
This class displays a legend box (TPaveText) containing several legend entries.
 
TLegendEntry * AddEntry(const TObject *obj, const char *label="", Option_t *option="lpf")
Add a new entry to this legend.
 
virtual void Draw(Option_t *option="")
Draw this legend with its current attributes.
 
virtual TObject * At(Int_t idx) const
Returns the object at position idx. Returns 0 if idx is out of range.
 
virtual TMatrixTBase< Element > & UnitMatrix()
Make a unit matrix (matrix need not be a square one).
 
A TMultiGraph is a collection of TGraph (or derived) objects.
 
TList * GetListOfGraphs() const
 
virtual void Add(TGraph *graph, Option_t *chopt="")
Add a new graph to the list of graphs.
 
virtual void Draw(Option_t *chopt="")
Draw this multigraph with its current attributes.
 
TAxis * GetYaxis()
Get y axis of the graph.
 
TAxis * GetXaxis()
Get x axis of the graph.
 
This class describes a neural network.
 
TTreeFormula * fEventWeight
! formula representing the event weight
 
void BuildOneHiddenLayer(const TString &sNumNodes, Int_t &layer, Int_t &prevStart, Int_t &prevStop, Bool_t lastLayer)
Builds a hidden layer, updates the number of layers.
 
void SteepestDir(Double_t *)
Sets the search direction to steepest descent.
 
void BuildNetwork()
Instantiates the network from the description.
 
TObjArray fNetwork
Collection of all the neurons in the network.
 
Double_t Evaluate(Int_t index, Double_t *params) const
Returns the Neural Net for a given set of input parameters #parameters must equal #input neurons.
 
TEventList * fTest
! EventList defining the events in the test dataset
 
bool GetBFGSH(TMatrixD &, TMatrixD &, TMatrixD &)
Computes the hessian matrix using the BFGS update algorithm.
 
void BuildHiddenLayers(TString &)
Builds hidden layers.
 
void BuildFirstLayer(TString &)
Instantiates the neurons in input Inputs are normalised and the type is set to kOff (simple forward o...
 
void SetTau(Double_t tau)
Sets Tau - used in line search (look at the constructor for the complete description of learning meth...
 
TMultiLayerPerceptron()
Default constructor.
 
Double_t GetSumSquareError() const
Error on the output for a given event.
 
void ConjugateGradientsDir(Double_t *, Double_t)
Sets the search direction to conjugate gradient direction beta should be:
 
Double_t fTau
! Tau - used in line search - Default=3.
 
TTree * fData
! pointer to the tree used as datasource
 
Double_t Result(Int_t event, Int_t index=0) const
Computes the output for a given event.
 
void SetGammaDelta(TMatrixD &, TMatrixD &, Double_t *)
Sets the gamma  and delta  vectors Gamma is computed here, so ComputeDEDw cannot have been called bef...
 
TEventList * fTraining
! EventList defining the events in the training dataset
 
TString fStructure
String containing the network structure.
 
Int_t fReset
! number of epochs between two resets of the search direction to the steepest descent - Default=50
 
Bool_t LoadWeights(Option_t *filename="")
Loads the weights from a text file conforming to the format defined by DumpWeights.
 
void MLP_Batch(Double_t *)
One step for the batch (stochastic) method.
 
TNeuron::ENeuronType fOutType
Type of output neurons.
 
Double_t fCurrentTreeWeight
! weight of the current tree in a chain
 
ELearningMethod fLearningMethod
! The Learning Method
 
Double_t fLastAlpha
! internal parameter used in line search
 
Int_t fCurrentTree
! index of the current tree in a chain
 
void Export(Option_t *filename="NNfunction", Option_t *language="C++") const
Exports the NN as a function for any non-ROOT-dependant code Supported languages are: only C++ ,...
 
Double_t fEpsilon
! Epsilon - used in stochastic minimisation - Default=0.
 
void Train(Int_t nEpoch, Option_t *option="text", Double_t minE=0)
Train the network.
 
TNeuron::ENeuronType GetType() const
 
void BFGSDir(TMatrixD &, Double_t *)
Computes the direction for the BFGS algorithm as the product between the Hessian estimate (bfgsh) and...
 
void SetTestDataSet(TEventList *test)
Sets the Test dataset.
 
Bool_t fTrainingOwner
! internal flag whether one has to delete fTraining or not
 
void SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method)
Sets the learning method.
 
void SetTrainingDataSet(TEventList *train)
Sets the Training dataset.
 
void BuildLastLayer(TString &, Int_t)
Builds the output layer Neurons are linear combinations of input, by default.
 
Double_t fDelta
! Delta - used in stochastic minimisation - Default=0.
 
TTreeFormulaManager * fManager
! TTreeFormulaManager for the weight and neurons
 
void Randomize() const
Randomize the weights.
 
Bool_t LineSearch(Double_t *, Double_t *)
Search along the line defined by direction.
 
virtual void Draw(Option_t *option="")
Draws the network structure.
 
void ExpandStructure()
Expand the structure of the first layer.
 
Double_t fEta
! Eta - used in stochastic minimisation - Default=0.1
 
Double_t GetError(Int_t event) const
Error on the output for a given event.
 
Double_t fEtaDecay
! EtaDecay - Eta *= EtaDecay at each epoch - Default=1.
 
void SetEtaDecay(Double_t ed)
Sets EtaDecay - Eta *= EtaDecay at each epoch (look at the constructor for the complete description o...
 
void AttachData()
Connects the TTree to Neurons in input and output layers.
 
void SetData(TTree *)
Set the data source.
 
void SetEventWeight(const char *)
Set the event weight.
 
Bool_t DumpWeights(Option_t *filename="-") const
Dumps the weights to a text file.
 
TString fWeight
String containing the event weight.
 
void SetDelta(Double_t delta)
Sets Delta - used in stochastic minimisation (look at the constructor for the complete description of...
 
Double_t GetCrossEntropy() const
Cross entropy error for a softmax output neuron, for a given event.
 
void SetReset(Int_t reset)
Sets number of epochs between two resets of the search direction to the steepest descent.
 
Bool_t fTestOwner
! internal flag whether one has to delete fTest or not
 
void Shuffle(Int_t *, Int_t) const
Shuffle the Int_t index[n] in input.
 
virtual ~TMultiLayerPerceptron()
Destructor.
 
Double_t DerivDir(Double_t *)
scalar product between gradient and direction = derivative along direction
 
void MLP_Stochastic(Double_t *)
One step for the stochastic method buffer should contain the previous dw vector and will be updated.
 
TObjArray fSynapses
Collection of all the synapses in the network.
 
void MLP_Line(Double_t *, Double_t *, Double_t)
Sets the weights to a point along a line Weights are set to [origin + (dist * dir)].
 
TNeuron::ENeuronType fType
Type of hidden neurons.
 
TObjArray fLastLayer
Collection of the output neurons; subset of fNetwork.
 
TString fextD
String containing the derivative name.
 
void ComputeDEDw() const
Compute the DEDw = sum on all training events of dedw for each weight normalized by the number of eve...
 
Double_t GetCrossEntropyBinary() const
Cross entropy error for sigmoid output neurons, for a given event.
 
void DrawResult(Int_t index=0, Option_t *option="test") const
Draws the neural net output It produces an histogram with the output for the two datasets.
 
void SetEta(Double_t eta)
Sets Eta - used in stochastic minimisation (look at the constructor for the complete description of l...
 
TObjArray fFirstLayer
Collection of the input neurons; subset of fNetwork.
 
void GetEntry(Int_t) const
Load an entry into the network.
 
void SetEpsilon(Double_t eps)
Sets Epsilon - used in stochastic minimisation (look at the constructor for the complete description ...
 
TString fextF
String containing the function name.
 
virtual void SetTitle(const char *title="")
Set the title of the TNamed.
 
virtual const char * GetName() const
Returns name of object.
 
This class describes an elementary neuron, which is the basic element for a Neural Network.
 
Double_t GetWeight() const
 
void SetWeight(Double_t w)
Sets the neuron weight to w.
 
Double_t GetValue() const
Computes the output using the appropriate function and all the weighted inputs, or uses the branch as...
 
void SetDEDw(Double_t in)
Sets the derivative of the total error wrt the neuron weight.
 
Double_t GetDeDw() const
Computes the derivative of the error wrt the neuron weight.
 
Double_t GetBranch() const
Returns the formula value.
 
TNeuron * GetInLayer(Int_t n) const
 
Double_t GetError() const
Computes the error for output neurons.
 
TTreeFormula * UseBranch(TTree *, const char *)
Sets a formula that can be used to make the neuron an input.
 
TSynapse * GetPre(Int_t n) const
 
void ForceExternalValue(Double_t value)
Uses the branch type to force an external value.
 
Double_t GetTarget() const
Computes the normalized target pattern for output neurons.
 
const Double_t * GetNormalisation() const
 
ENeuronType GetType() const
Returns the neuron type.
 
void SetNewEvent() const
Inform the neuron that inputs of the network have changed, so that the buffered values have to be rec...
 
void SetNormalisation(Double_t mean, Double_t RMS)
Sets the normalization variables.
 
void AddInLayer(TNeuron *)
Tells a neuron which neurons form its layer (including itself).
 
Iterator of object array.
 
TObject * Next()
Return next object in array. Returns 0 when no more objects in array.
 
Int_t GetEntriesFast() const
 
virtual void AddLast(TObject *obj)
Add object in the next empty slot in the array.
 
TObject * UncheckedAt(Int_t i) const
 
TIterator * MakeIterator(Bool_t dir=kIterForward) const
Returns an array iterator.
 
Int_t GetLast() const
Return index of last object in array.
 
TObject * At(Int_t idx) const
 
Collectable string class.
 
virtual void Warning(const char *method, const char *msgfmt,...) const
Issue warning message.
 
virtual void Error(const char *method, const char *msgfmt,...) const
Issue error message.
 
virtual void Draw(Option_t *option="")
Default Draw method for all objects.
 
virtual void Info(const char *method, const char *msgfmt,...) const
Issue info message.
 
Random number generator class based on M.
 
virtual Double_t Rndm()
Machine independent random number generator.
 
Regular expression class.
 
void ToLower()
Change string to lower-case.
 
Bool_t EndsWith(const char *pat, ECaseCompare cmp=kExact) const
Return true if string ends with the specified string.
 
Ssiz_t First(char c) const
Find first occurrence of a character c.
 
const char * Data() const
 
Bool_t IsAlpha() const
Returns true if all characters in string are alphabetic.
 
Ssiz_t Last(char c) const
Find last occurrence of a character c.
 
void ToUpper()
Change string to upper case.
 
TObjArray * Tokenize(const TString &delim) const
This function is used to isolate sequential tokens in a TString.
 
Int_t CountChar(Int_t c) const
Return number of times character c occurs in the string.
 
Bool_t Contains(const char *pat, ECaseCompare cmp=kExact) const
 
Bool_t IsAlnum() const
Returns true if all characters in string are alphanumeric.
 
Ssiz_t Index(const char *pat, Ssiz_t i=0, ECaseCompare cmp=kExact) const
 
This is a simple weighted bidirectional connection between two neurons.
 
Double_t GetDeDw() const
Computes the derivative of the error wrt the synapse weight.
 
void SetWeight(Double_t w)
Sets the weight of the synapse.
 
Double_t GetWeight() const
 
void SetDEDw(Double_t in)
Sets the derivative of the total error wrt the synapse weight.
 
virtual int Load(const char *module, const char *entry="", Bool_t system=kFALSE)
Load a shared library.
 
virtual Bool_t ProcessEvents()
Process pending events (GUI, timers, sockets).
 
Base class for several text objects.
 
The TTimeStamp encapsulates seconds and ns since EPOCH.
 
A TTree represents a columnar dataset.
 
virtual Double_t GetWeight() const
 
virtual Long64_t GetEntries() const
 
virtual Int_t GetEntry(Long64_t entry=0, Int_t getall=0)
Read all branches of entry and return total number of bytes read.
 
virtual Int_t GetTreeNumber() const
 
virtual void Draw(Option_t *opt)
Default Draw method for all objects.
 
TVirtualPad is an abstract base class for the Pad and Canvas classes.
 
virtual void Modified(Bool_t flag=1)=0
 
double beta(double x, double y)
Calculates the beta function.
 
double dist(Rotation3D const &r1, Rotation3D const &r2)
 
Double_t Sqrt(Double_t x)
 
static void output(int code)