113    fVRangeMode(kAdaptive),
   114    fKernelEstimator(
kBox),
   127    fInitializedVolumeEle(0),
   140                                 const TString& theWeightFile) :
   143    fVRangeMode(kAdaptive),
   144    fKernelEstimator(
kBox),
   157    fInitializedVolumeEle(0),
   292       Log() << 
kFATAL << 
"Mechanism to ignore events with negative weights in training not yet available for method: "   294             << 
" --> please remove \"IgnoreNegWeightsInTraining\" option from booking string."   331    Log() << 
kVERBOSE << 
"interpreted option string: vRangeMethod: '"   338       Log() << 
kVERBOSE << 
"nEventsMin/Max, maxVIterations, initialScale: "   353                              << 
"please remove the option from the configuration string, or "   354                              << 
"use \"!Normalise\""   506          Log() << 
kVERBOSE << 
"delta of var[" << (*fInputVars)[ivar]
   508                << 
"\t  |  comp with |max - min|: " << (
GetXmax( ivar ) - 
GetXmin( ivar ))
   518          Log() << 
kFATAL << 
"<SetVolumeElement> unknown range-set mode: "   521       (*fShift)[ivar] = 0.5; 
   549                                    std::vector<const BinarySearchTreeNode*>& events,
   560 #ifdef  TMVA_MethodPDERS__countByHand__Debug__   565    Int_t iS = 0, iB = 0;
   570       for (
Int_t ivar=0; ivar<nvar; ivar++) {
   572          inV = (x > (*volume->Lower)[ivar] && x <= (*volume->Upper)[ivar]);
   588       std::vector<Double_t> *lb = 
new std::vector<Double_t>( 
GetNvar() );
   590       std::vector<Double_t> *ub = 
new std::vector<Double_t>( *lb );
   592          (*lb)[ivar] -= (*fDelta)[ivar]*(1.0 - (*fShift)[ivar]);
   593          (*ub)[ivar] += (*fDelta)[ivar]*(*fShift)[ivar];
   613          RootFinder rootFinder( 
this, 0.01, 50, 200, 10 );
   637          if (i_ > 50) 
Log() << 
kWARNING << 
"warning in event: " << e
   638                             << 
": adaptive volume pre-adjustment reached "   639                             << 
">50 iterations in while loop (" << i_ << 
")" << 
Endl;
   647          Float_t nEventsBest = nEventsN;
   650             if (nEventsN < fNEventsMin || nEventsN > 
fNEventsMax) {
   658                if (nEventsN > 1 && nEventsN - nEventsO != 0)
   659                   if (scaleN - scaleO != 0)
   660                      scale += (scaleN - scaleO)/(nEventsN - nEventsO)*(nEventsE - nEventsN);
   671                    (nEventsN >= 
fNEventsMin || nEventsBest < nEventsN)) {
   672                   nEventsBest = nEventsN;
   683          nEventsN = nEventsBest;
   685          if (nEventsN < fNEventsMin-1 || nEventsN > 
fNEventsMax+1)
   687                   << 
": adaptive volume adjustment reached "   688                   << 
"max. #iterations (" << fMaxVIterations << 
")"   727                   << 
": kNN volume adjustment reached "   737          dim_normalization [ivar] = 1.0 / ((*v.
fUpper)[ivar] - (*v.
fLower)[ivar]);
   740       std::vector<const BinarySearchTreeNode*> tempVector;    
   743          std::vector<Double_t> *distances = 
new std::vector<Double_t>( kNNcount );
   750          std::vector<Double_t>::iterator wsk = distances->begin();
   752          std::nth_element( distances->begin(), wsk, distances->end() );
   759             if (dist <= (*distances)[fkNNMin-1])
   760                tempVector.push_back( events[j] );
   765       delete[] dim_normalization;
   780    std::vector<const BinarySearchTreeNode*> events;
   785    std::vector<Double_t> *lb = 
new std::vector<Double_t>( 
GetNvar() );
   788    std::vector<Double_t> *ub = 
new std::vector<Double_t>( *lb );
   790       (*lb)[ivar] -= (*fDelta)[ivar]*(1.0 - (*fShift)[ivar]);
   791       (*ub)[ivar] += (*fDelta)[ivar]*(*fShift)[ivar];
   809    std::vector<const BinarySearchTreeNode*> events;
   814    std::vector<Double_t> *lb = 
new std::vector<Double_t>( 
GetNvar() );
   817    std::vector<Double_t> *ub = 
new std::vector<Double_t>( *lb );
   819       (*lb)[ivar] -= (*fDelta)[ivar]*(1.0 - (*fShift)[ivar]);
   820       (*ub)[ivar] += (*fDelta)[ivar]*(*fShift)[ivar];
   835                                              std::vector<const BinarySearchTreeNode*>& events, 
Volume& 
v )
   839       dim_normalization [ivar] = 2 / ((*v.
fUpper)[ivar] - (*v.
fLower)[ivar]);
   845    for (std::vector<const BinarySearchTreeNode*>::iterator iev = events.begin(); iev != events.end(); iev++) {
   862    delete[] dim_normalization;
   864    if (pdfSumS < 1
e-20 && pdfSumB < 1
e-20) 
return 0.5;
   865    if (pdfSumB < 1
e-20) 
return 1.0;
   866    if (pdfSumS < 1
e-20) 
return 0.0;
   869    return 1.0/(r + 1.0);   
   876                                          std::vector<const BinarySearchTreeNode*>& events, 
Volume& 
v,
   877                                          std::vector<Float_t>* pdfSum )
   881       dim_normalization [ivar] = 2 / ((*v.
fUpper)[ivar] - (*v.
fLower)[ivar]);
   889       pdfSum->push_back( 0 );
   892    for (std::vector<const BinarySearchTreeNode*>::iterator iev = events.begin(); iev != events.end(); iev++) {
   902          pdfSum->at(ivar) += 
ApplyKernelFunction (normalized_distance) * (*iev)->GetWeight() * (*iev)->GetTargets()[ivar];
   907    delete[] dim_normalization;
   913       pdfSum->at(ivar) /= pdfDiv;
   930       return (1 - normalized_distance);
   941       return NormSinc (side_crossings * normalized_distance);
   980    if (ret != 0.0) 
return ret*pdf;
  1041    if (x < 10e-10 && x > -10
e-10) {
  1062    if (x < 10e-10 && x > -10
e-10) {
  1084    Float_t d = countS + c*countB; d *= d;
  1086    if (d < 1
e-10) 
return 1; 
  1089    Float_t err = f*countB*countB*sumW2S + f*countS*countS*sumW2B;
  1091    if (err < 1
e-10) 
return 1; 
  1105       Log() << 
kFATAL << 
"Signal and background binary search tree not available" << 
Endl;
  1117       Log() << 
kFATAL << 
"Could not create BinarySearchTree from XML" << 
Endl;
  1119       Log() << 
kFATAL << 
"Could not create BinarySearchTree from XML" << 
Endl;
  1148    fBinaryTree->CalcStatistics();
  1150    fBinaryTree->CountNodes();
  1199    fout << 
"   // not implemented for class: \"" << className << 
"\"" << std::endl;
  1200    fout << 
"};" << std::endl;
  1214    Log() << 
"PDERS is a generalization of the projective likelihood classifier " << 
Endl;
  1215    Log() << 
"to N dimensions, where N is the number of input variables used." << 
Endl;
  1216    Log() << 
"In its adaptive form it is mostly equivalent to k-Nearest-Neighbor" << 
Endl;
  1217    Log() << 
"(k-NN) methods. If the multidimensional PDF for signal and background" << 
Endl;
  1218    Log() << 
"were known, this classifier would exploit the full information" << 
Endl;
  1219    Log() << 
"contained in the input variables, and would hence be optimal. In " << 
Endl;
  1220    Log() << 
"practice however, huge training samples are necessary to sufficiently " << 
Endl;
  1221    Log() << 
"populate the multidimensional phase space. " << 
Endl;
  1223    Log() << 
"The simplest implementation of PDERS counts the number of signal" << 
Endl;
  1224    Log() << 
"and background events in the vicinity of a test event, and returns" << 
Endl;
  1225    Log() << 
"a weight according to the majority species of the neighboring events." << 
Endl;
  1226    Log() << 
"A more involved version of PDERS (selected by the option \"KernelEstimator\")" << 
Endl;
  1227    Log() << 
"uses Kernel estimation methods to approximate the shape of the PDF." << 
Endl;
  1231    Log() << 
"PDERS can be very powerful in case of strongly non-linear problems, " << 
Endl;
  1232    Log() << 
"e.g., distinct islands of signal and background regions. Because of " << 
Endl;
  1233    Log() << 
"the exponential growth of the phase space, it is important to restrict" << 
Endl;
  1234    Log() << 
"the number of input variables (dimension) to the strictly necessary." << 
Endl;
  1236    Log() << 
"Note that PDERS is a slowly responding classifier. Moreover, the necessity" << 
Endl;
  1237    Log() << 
"to store the entire binary tree in memory, to avoid accessing virtual " << 
Endl;
  1238    Log() << 
"memory, limits the number of training events that can effectively be " << 
Endl;
  1239    Log() << 
"used to model the multidimensional PDF." << 
Endl;
  1243    Log() << 
"If the PDERS response is found too slow when using the adaptive volume " << 
Endl;
  1244    Log() << 
"size (option \"VolumeRangeMode=Adaptive\"), it might be found beneficial" << 
Endl;
  1245    Log() << 
"to reduce the number of events required in the volume, and/or to enlarge" << 
Endl;
  1246    Log() << 
"the allowed range (\"NeventsMin/Max\"). PDERS is relatively insensitive" << 
Endl;
  1247    Log() << 
"to the width (\"GaussSigma\") of the Gaussian kernel (if used)." << 
Endl;
 Int_t SearchVolumeWithMaxLimit(TMVA::Volume *, std::vector< const TMVA::BinarySearchTreeNode *> *events=0, Int_t=-1)
 
std::vector< Double_t > * fLower
 
virtual void * AddXMLTo(void *parent) const
add attributes to XML 
 
void WriteWeightsToStream(TFile &rf) const
write training sample (TTree) to file 
 
void UpdateThis()
update static this pointer 
 
double dist(Rotation3D const &r1, Rotation3D const &r2)
 
virtual ~MethodPDERS(void)
destructor 
 
MsgLogger & Endl(MsgLogger &ml)
 
Double_t GetNormalizedDistance(const TMVA::Event &base_event, const BinarySearchTreeNode &sample_event, Double_t *dim_normalization)
We use Euclidian metric here. Might not be best or most efficient. 
 
const Bool_t MethodPDERS_UseFindRoot
 
std::vector< Float_t > * fShift
 
void GetHelpMessage() const
get help message text 
 
BinarySearchTree * fBinaryTree
 
Double_t NormSinc(Double_t x)
NormSinc. 
 
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
 
A ROOT file is a suite of consecutive data records (TKey instances) with a well defined format...
 
void CreateBinarySearchTree(Types::ETreeType type)
create binary search trees for signal and background 
 
Double_t Fill(const std::vector< TMVA::Event *> &events, const std::vector< Int_t > &theVars, Int_t theType=-1)
create the search tree from the event collection using ONLY the variables specified in "theVars" ...
 
std::vector< Double_t > * fUpper
 
void AddWeightsXMLTo(void *parent) const
write weights to xml file 
 
TransformationHandler & GetTransformationHandler(Bool_t takeReroutedIfAvailable=true)
 
void ScaleInterval(Double_t f)
 
void RKernelEstimate(const Event &, std::vector< const BinarySearchTreeNode *> &, Volume &, std::vector< Float_t > *pdfSum)
normalization factors so we can work with radius 1 hyperspheres 
 
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
init the size of a volume element using a defined fraction of the volume containing the entire events...
 
Double_t Gamma(Double_t z)
Computation of gamma(z) for all z. 
 
Double_t GetSumOfWeights(void) const
return the sum of event (node) weights 
 
LongDouble_t Power(LongDouble_t x, LongDouble_t y)
 
void GetSample(const Event &e, std::vector< const BinarySearchTreeNode *> &events, Volume *volume)
 
void NormalizeTree()
Normalisation of tree. 
 
void Init(void)
default initialisation routine called by all constructors 
 
static Double_t IGetVolumeContentForRoot(Double_t)
Interface to RootFinder. 
 
void ReadWeightsFromStream(std::istream &istr)
read weight info from file 
 
const Event * GetEvent() const
 
Double_t GetXmin(Int_t ivar) const
 
Float_t GetError(Float_t countS, Float_t countB, Float_t sumW2S, Float_t sumW2B) const
statistical error estimate for RS estimator 
 
Bool_t DoRegression() const
 
Long64_t GetNTrainingEvents() const
 
void CalcStatistics(TMVA::Node *n=0)
calculate basic statistics (mean, rms for each variable) 
 
const Event * GetTrainingEvent(Long64_t ievt) const
 
UInt_t GetNTargets() const
accessor to the number of targets 
 
Double_t Root(Double_t refValue)
Root finding using Brents algorithm; taken from CERNLIB function RZERO. 
 
void CalcAverages()
compute also average RMS values required for adaptive Gaussian 
 
Double_t CRScalc(const Event &)
 
Double_t GetXmax(Int_t ivar) const
 
static BinarySearchTree * CreateFromXML(void *node, UInt_t tmva_Version_Code=TMVA_VERSION_CODE)
re-create a new tree (decision tree or search tree) from XML 
 
void DeclareOptions()
define the options (their key words) that can be set in the option string know options: VolumeRangeMo...
 
Float_t GetTarget(UInt_t itgt) const
 
void SetVolumeElement(void)
defines volume dimensions 
 
Bool_t fInitializedVolumeEle
 
BinarySearchTree * GetBinaryTree(void) const
 
void SetTarget(UInt_t itgt, Float_t value)
set the target value (dimension itgt) to value 
 
void SetNormalize(Bool_t norm)
 
Double_t Gaus(Double_t x, Double_t mean=0, Double_t sigma=1, Bool_t norm=kFALSE)
Calculate a gaussian function with mean and sigma. 
 
Float_t GetValue(UInt_t ivar) const
return value of i'th variable 
 
void ReadWeightsFromXML(void *wghtnode)
 
static MethodPDERS * ThisPDERS(void)
static pointer to this object 
 
Bool_t IgnoreEventsWithNegWeightsInTraining() const
 
void Train(void)
this is a dummy training: the preparation work to do is the construction of the binary tree as a poin...
 
const std::vector< TMVA::Event * > & GetEventCollection(Types::ETreeType type)
returns the event collection (i.e. 
 
std::vector< Float_t > * fDelta
 
Float_t RMS(Types::ESBType sb, UInt_t var)
 
enum TMVA::MethodPDERS::EKernelEstimator fKernelEstimator
 
Bool_t IsNormalised() const
 
you should not use this method at all Int_t Int_t Double_t Double_t Double_t e
 
void AddPreDefVal(const T &)
 
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
PDERS can handle classification with 2 classes and regression with one or more regression-targets. 
 
Double_t CKernelEstimate(const Event &, std::vector< const BinarySearchTreeNode *> &, Volume &)
normalization factors so we can work with radius 1 hyperspheres 
 
std::vector< Float_t > fAverageRMS
 
UInt_t CountNodes(Node *n=NULL)
return the number of nodes in the tree. (make a new count –> takes time) 
 
const std::vector< Float_t > & GetEventV() const
 
const std::vector< Float_t > & GetRegressionValues()
 
#define REGISTER_METHOD(CLASS)
for example 
 
Abstract ClassifierFactory template that handles arbitrary types. 
 
TString GetMethodTypeName() const
 
enum TMVA::MethodPDERS::EVolumeRangeMode fVRangeMode
 
void ProcessOptions()
process the options specified by the user 
 
std::vector< Float_t > * fRegressionReturnVal
 
Double_t GetVolumeContentForRoot(Double_t)
count number of events in rescaled volume 
 
MethodPDERS(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption)
standard constructor for the PDERS method 
 
Double_t Sqrt(Double_t x)
 
Double_t SearchVolume(Volume *, std::vector< const TMVA::BinarySearchTreeNode *> *events=0)
search the whole tree and add up all weigths of events that lie within the given voluem ...
 
static MethodPDERS *& GetMethodPDERSThreadLocal()
 
Double_t ApplyKernelFunction(Double_t normalized_distance)
from the normalized euclidean distance calculate the distance for a certain kernel ...
 
Double_t LanczosFilter(Int_t level, Double_t x)
Lanczos Filter. 
 
void RRScalc(const Event &, std::vector< Float_t > *count)
 
Double_t KernelNormalization(Double_t pdf)
Calculating the normalization factor only once (might need a reset at some point. ...
 
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response 
 
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
 
void SetSignalReferenceCut(Double_t cut)