Logo ROOT   6.12/07
Reference Guide
TMVAClassification_FDA_GA.class.C
Go to the documentation of this file.
1 // Class: ReadFDA_GA
2 // Automatically generated by MethodBase::MakeClass
3 //
4 
5 /* configuration options =====================================================
6 
7 #GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*-
8 
9 Method : FDA::FDA_GA
10 TMVA Release : 4.2.1 [262657]
11 ROOT Release : 6.12/07 [396295]
12 Creator : sftnight
13 Date : Sat Sep 29 23:25:14 2018
14 Host : Linux ec-ubuntu-14-04-x86-64-2 3.13.0-157-generic #207-Ubuntu SMP Mon Aug 20 16:44:59 UTC 2018 x86_64 x86_64 x86_64 GNU/Linux
15 Dir : /mnt/build/workspace/root-makedoc-v612/rootspi/rdoc/src/v6-12-00-patches/documentation/doxygen
16 Training events: 2000
17 Analysis type : [Classification]
18 
19 
20 #OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*-
21 
22 # Set by User:
23 V: "False" [Verbose output (short form of "VerbosityLevel" below - overrides the latter one)]
24 H: "True" [Print method-specific help message]
25 Formula: "(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3" [The discrimination formula]
26 ParRanges: "(-1,1);(-10,10);(-10,10);(-10,10);(-10,10)" [Parameter ranges]
27 FitMethod: "GA" [Optimisation Method]
28 # Default:
29 VerbosityLevel: "Default" [Verbosity level]
30 VarTransform: "None" [List of variable transformations performed before training, e.g., "D_Background,P_Signal,G,N_AllClasses" for: "Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)"]
31 CreateMVAPdfs: "False" [Create PDFs for classifier outputs (signal and background)]
32 IgnoreNegWeightsInTraining: "False" [Events with negative weights are ignored in the training (but are included for testing and performance evaluation)]
33 Converger: "None" [FitMethod uses Converger to improve result]
34 ##
35 
36 
37 #VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*-
38 
39 NVar 4
40 var1+var2 myvar1 myvar1 myvar1 'F' [-8.14423561096,7.26972866058]
41 var1-var2 myvar2 myvar2 Expression 2 'F' [-3.96643972397,4.0258936882]
42 var3 var3 var3 Variable 3 units 'F' [-5.03730010986,4.27845287323]
43 var4 var4 var4 Variable 4 units 'F' [-5.95050764084,4.64035463333]
44 NSpec 2
45 var1*2 spec1 spec1 Spectator 1 units 'F' [-9.91655540466,8.67800140381]
46 var1*3 spec2 spec2 Spectator 2 units 'F' [-14.874833107,13.0170021057]
47 
48 
49 ============================================================================ */
50 
51 #include <array>
52 #include <vector>
53 #include <cmath>
54 #include <string>
55 #include <iostream>
56 
57 #ifndef IClassifierReader__def
58 #define IClassifierReader__def
59 
60 class IClassifierReader {
61 
62  public:
63 
64  // constructor
65  IClassifierReader() : fStatusIsClean( true ) {}
66  virtual ~IClassifierReader() {}
67 
68  // return classifier response
69  virtual double GetMvaValue( const std::vector<double>& inputValues ) const = 0;
70 
71  // returns classifier status
72  bool IsStatusClean() const { return fStatusIsClean; }
73 
74  protected:
75 
76  bool fStatusIsClean;
77 };
78 
79 #endif
80 
81 class ReadFDA_GA : public IClassifierReader {
82 
83  public:
84 
85  // constructor
86  ReadFDA_GA( std::vector<std::string>& theInputVars )
87  : IClassifierReader(),
88  fClassName( "ReadFDA_GA" ),
89  fNvars( 4 ),
90  fIsNormalised( false )
91  {
92  // the training input variables
93  const char* inputVars[] = { "var1+var2", "var1-var2", "var3", "var4" };
94 
95  // sanity checks
96  if (theInputVars.size() <= 0) {
97  std::cout << "Problem in class \"" << fClassName << "\": empty input vector" << std::endl;
98  fStatusIsClean = false;
99  }
100 
101  if (theInputVars.size() != fNvars) {
102  std::cout << "Problem in class \"" << fClassName << "\": mismatch in number of input values: "
103  << theInputVars.size() << " != " << fNvars << std::endl;
104  fStatusIsClean = false;
105  }
106 
107  // validate input variables
108  for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) {
109  if (theInputVars[ivar] != inputVars[ivar]) {
110  std::cout << "Problem in class \"" << fClassName << "\": mismatch in input variable names" << std::endl
111  << " for variable [" << ivar << "]: " << theInputVars[ivar].c_str() << " != " << inputVars[ivar] << std::endl;
112  fStatusIsClean = false;
113  }
114  }
115 
116  // initialize min and max vectors (for normalisation)
117  fVmin[0] = 0;
118  fVmax[0] = 0;
119  fVmin[1] = 0;
120  fVmax[1] = 0;
121  fVmin[2] = 0;
122  fVmax[2] = 0;
123  fVmin[3] = 0;
124  fVmax[3] = 0;
125 
126  // initialize input variable types
127  fType[0] = 'F';
128  fType[1] = 'F';
129  fType[2] = 'F';
130  fType[3] = 'F';
131 
132  // initialize constants
133  Initialize();
134 
135  }
136 
137  // destructor
138  virtual ~ReadFDA_GA() {
139  Clear(); // method-specific
140  }
141 
142  // the classifier response
143  // "inputValues" is a vector of input values in the same order as the
144  // variables given to the constructor
145  double GetMvaValue( const std::vector<double>& inputValues ) const;
146 
147  private:
148 
149  // method-specific destructor
150  void Clear();
151 
152  // common member variables
153  const char* fClassName;
154 
155  const size_t fNvars;
156  size_t GetNvar() const { return fNvars; }
157  char GetType( int ivar ) const { return fType[ivar]; }
158 
159  // normalisation of input variables
160  const bool fIsNormalised;
161  bool IsNormalised() const { return fIsNormalised; }
162  double fVmin[4];
163  double fVmax[4];
164  double NormVariable( double x, double xmin, double xmax ) const {
165  // normalise to output range: [-1, 1]
166  return 2*(x - xmin)/(xmax - xmin) - 1.0;
167  }
168 
169  // type of input variable: 'F' or 'I'
170  char fType[4];
171 
172  // initialize internal variables
173  void Initialize();
174  double GetMvaValue__( const std::vector<double>& inputValues ) const;
175 
176  // private members (method specific)
177  double fParameter[5];
178 };
179 
180 inline void ReadFDA_GA::Initialize()
181 {
182  fParameter[0] = 0.361717166844755;
183  fParameter[1] = 0;
184  fParameter[2] = 0;
185  fParameter[3] = 0.111212226929868;
186  fParameter[4] = 0;
187 }
188 
189 inline double ReadFDA_GA::GetMvaValue__( const std::vector<double>& inputValues ) const
190 {
191  // interpret the formula
192  double retval = fParameter[0]+fParameter[1]*inputValues[0]+fParameter[2]*inputValues[1]+fParameter[3]*inputValues[2]+fParameter[4]*inputValues[3];
193 
194  return retval;
195 }
196 
197 // Clean up
198 inline void ReadFDA_GA::Clear()
199 {
200  // nothing to clear
201 }
202  inline double ReadFDA_GA::GetMvaValue( const std::vector<double>& inputValues ) const
203  {
204  // classifier response value
205  double retval = 0;
206 
207  // classifier response, sanity check first
208  if (!IsStatusClean()) {
209  std::cout << "Problem in class \"" << fClassName << "\": cannot return classifier response"
210  << " because status is dirty" << std::endl;
211  retval = 0;
212  }
213  else {
214  if (IsNormalised()) {
215  // normalise variables
216  std::vector<double> iV;
217  iV.reserve(inputValues.size());
218  int ivar = 0;
219  for (std::vector<double>::const_iterator varIt = inputValues.begin();
220  varIt != inputValues.end(); varIt++, ivar++) {
221  iV.push_back(NormVariable( *varIt, fVmin[ivar], fVmax[ivar] ));
222  }
223  retval = GetMvaValue__( iV );
224  }
225  else {
226  retval = GetMvaValue__( inputValues );
227  }
228  }
229 
230  return retval;
231  }
float xmin
Definition: THbookFile.cxx:93
Type GetType(const std::string &Name)
Definition: Systematics.cxx:34
Double_t x[n]
Definition: legend1.C:17
void Initialize(Bool_t useTMVAStyle=kTRUE)
Definition: tmvaglob.cxx:176
float xmax
Definition: THbookFile.cxx:93
PyObject * fType