101 , fMultiTargetRegression(kFALSE)
107 , fKernelEstimator(
NULL)
108 , fTargetSelectionStr("
Mean")
109 , fTargetSelection(
kMean)
110 , fFillFoamWithOrigWeights(kFALSE)
111 , fUseYesNoCell(kFALSE)
113 , fDTSeparation(
kFoam)
125 const TString& theWeightFile) :
137 , fMultiTargetRegression(
kFALSE)
143 , fKernelEstimator(
NULL)
144 , fTargetSelectionStr(
"Mean")
145 , fTargetSelection(
kMean)
146 , fFillFoamWithOrigWeights(
kFALSE)
149 , fDTSeparation(
kFoam)
214 DeclareOptionRef(
fFrac = 0.001,
"TailCut",
"Fraction of outlier events that are excluded from the foam in each dimension" );
215 DeclareOptionRef(
fVolFrac = 1./15.,
"VolFrac",
"Size of sampling box, used for density calculation during foam build-up (maximum value: 1.0 is equivalent to volume of entire foam)");
258 Log() <<
kWARNING <<
"TailCut not in [0.,1] ==> using 0.001 instead" <<
Endl;
263 Log() <<
kWARNING <<
"invalid number of active cells specified: " 271 Log() <<
kFATAL <<
"Decision tree logic works only for a single foam (SigBgSeparate=F)" <<
Endl;
279 else if (
fDTLogic ==
"MisClassificationError")
281 else if (
fDTLogic ==
"CrossEntropy")
283 else if (
fDTLogic ==
"GiniIndexWithLaplace")
285 else if (
fDTLogic ==
"SdivSqrtSplusB")
289 <<
", setting to None" <<
Endl;
303 Log() <<
kWARNING <<
"Warning: number of targets > 1" 304 <<
" and MultiTargetRegression=F was set, this makes no sense!" 305 <<
" --> I'm setting MultiTargetRegression=T" <<
Endl;
339 for (
UInt_t dim=0; dim<kDim; dim++) {
346 Int_t rangehistbins = 10000;
352 for (
UInt_t dim=0; dim<kDim; dim++) {
374 for (
UInt_t dim=0; dim<kDim; dim++) {
375 range_h[dim] =
new TH1F(
Form(
"range%i", dim),
"range", rangehistbins, xmin[dim], xmax[dim]);
381 for (
UInt_t dim=0; dim<kDim; dim++) {
394 for (
UInt_t dim=0; dim<kDim; dim++) {
395 for (
Int_t i=1; i<(rangehistbins+1); i++) {
396 if (range_h[dim]->Integral(0, i) > nevoutside) {
401 for (
Int_t i=rangehistbins; i>0; i--) {
402 if (range_h[dim]->Integral(i, (rangehistbins+1)) > nevoutside) {
413 for (
UInt_t dim=0; dim<kDim; dim++) {
414 fXmin.push_back(xmin[dim]);
415 fXmax.push_back(xmax[dim]);
423 for (
UInt_t dim=0; dim<kDim; dim++)
435 Log() <<
kVERBOSE <<
"Calculate Xmin and Xmax for every dimension" <<
Endl;
452 if (
DataInfo().GetNormalization() !=
"EQUALNUMEVENTS" ) {
454 <<
" chosen. Note that only NormMode=EqualNumEvents" 455 <<
" ensures that Discriminant values correspond to" 456 <<
" signal probabilities." <<
Endl;
473 fFoam.at(i)->DeleteBinarySearchTree();
487 foamcaption[0] =
"SignalFoam";
488 foamcaption[1] =
"BgFoam";
490 for(
int i=0; i<2; i++) {
494 Log() <<
kVERBOSE <<
"Filling binary search tree of " << foamcaption[i]
495 <<
" with events" <<
Endl;
499 if ((i==0 &&
DataInfo().IsSignal(ev)) || (i==1 && !
DataInfo().IsSignal(ev)))
501 fFoam.back()->FillBinarySearchTree(ev);
504 Log() <<
kINFO <<
"Build up " << foamcaption[i] <<
Endl;
505 fFoam.back()->Create();
512 if ((i==0 &&
DataInfo().IsSignal(ev)) || (i==1 && !
DataInfo().IsSignal(ev)))
514 fFoam.back()->FillFoamCells(ev, weight);
527 Log() <<
kVERBOSE <<
"Filling binary search tree of discriminator foam with events" <<
Endl;
532 fFoam.back()->FillBinarySearchTree(ev);
535 Log() <<
kINFO <<
"Build up discriminator foam" <<
Endl;
536 fFoam.back()->Create();
544 fFoam.back()->FillFoamCells(ev, weight);
549 fFoam.back()->Finalize();
565 Log() <<
kVERBOSE <<
"Filling binary search tree of multiclass foam " 566 << iClass <<
" with events" <<
Endl;
571 fFoam.back()->FillBinarySearchTree(ev);
574 Log() <<
kINFO <<
"Build up multiclass foam " << iClass <<
Endl;
575 fFoam.back()->Create();
584 fFoam.back()->FillFoamCells(ev, weight);
589 fFoam.back()->Finalize();
601 Log() <<
kFATAL <<
"Can't do mono-target regression with " 614 fFoam.back()->FillBinarySearchTree(ev);
617 Log() <<
kINFO <<
"Build mono target regression foam" <<
Endl;
618 fFoam.back()->Create();
626 fFoam.back()->FillFoamCells(ev, weight);
631 fFoam.back()->Finalize();
645 Log() <<
kFATAL <<
"LinNeighbors kernel currently not supported" 646 <<
" for multi target regression" <<
Endl;
650 Log() <<
kVERBOSE <<
"Filling binary search tree of multi target regression foam with events" 657 std::vector<Float_t> targets(ev->
GetTargets());
659 for (
UInt_t i = 0; i < targets.size(); ++i)
660 ev->
SetVal(i+nVariables, targets.at(i));
663 fFoam.back()->FillBinarySearchTree(ev);
669 Log() <<
kINFO <<
"Build multi target regression foam" <<
Endl;
670 fFoam.back()->Create();
678 std::vector<Float_t> targets = ev->
GetTargets();
681 for (
UInt_t i = 0; i < targets.size(); ++i)
682 ev->
SetVal(i+nVariables, targets.at(i));
685 fFoam.back()->FillFoamCells(ev, weight);
718 std::vector<Float_t> xvec = ev->
GetValues();
726 if ( (density_sig+density_bg) > 0 )
727 discr = density_sig/(density_sig+density_bg);
737 if (err || errUpper) {
739 if (err != 0) *err = discr_error;
740 if (errUpper != 0) *errUpper = discr_error;
744 return (discr < 0.5 ? -1 : 1);
764 const std::vector<Float_t>& xvec = ev->
GetValues();
773 if ((neventsS > 1
e-10) || (neventsB > 1
e-10)) {
775 mvaError =
TMath::Sqrt(
Sqr(scaleB * neventsB /
Sqr(neventsS + scaleB * neventsB) * errorS) +
776 Sqr(scaleB * neventsS /
Sqr(neventsS + scaleB * neventsB) * errorB));
795 std::vector<Float_t> xvec = ev->
GetValues();
802 std::vector<Float_t> temp;
804 temp.reserve(nClasses);
805 for (
UInt_t iClass = 0; iClass < nClasses; ++iClass) {
809 for (
UInt_t iClass = 0; iClass < nClasses; ++iClass) {
811 for (
UInt_t j = 0; j < nClasses; ++j) {
813 norm +=
exp(temp[j] - temp[iClass]);
830 std::vector<Float_t> importance(
GetNvar(), 0);
833 for (
UInt_t ifoam = 0; ifoam <
fFoam.size(); ++ifoam) {
836 std::vector<UInt_t> nCuts(
fFoam.at(ifoam)->GetTotDim(), 0);
842 std::vector<Float_t> tmp_importance;
844 sumOfCuts += nCuts.at(ivar);
845 tmp_importance.push_back( nCuts.at(ivar) );
851 tmp_importance.at(ivar) /= sumOfCuts;
853 tmp_importance.at(ivar) = 0;
857 importance.at(ivar) += tmp_importance.at(ivar) /
fFoam.size();
906 for (
UInt_t idim=0; idim<num_vars; idim++) {
947 std::vector<Double_t>
box;
948 for (
Int_t idim = 0; idim < dim; ++idim) {
1003 <<
" currently not supported" <<
Endl;
1013 Log() <<
kFATAL <<
"Decision tree cell split algorithm is only" 1014 <<
" available for (multi) classification with a single" 1015 <<
" PDE-Foam (SigBgSeparate=F)" <<
Endl;
1021 else Log() <<
kFATAL <<
"PDEFoam pointer not set, exiting.." <<
Endl;
1059 std::vector<Float_t> vals = ev->
GetValues();
1062 Log() <<
kWARNING <<
"<GetRegressionValues> value vector is empty. " <<
Endl;
1067 std::map<Int_t, Float_t> xvec;
1068 for (
UInt_t i=0; i<vals.size(); ++i)
1069 xvec.insert(std::pair<Int_t, Float_t>(i, vals.at(i)));
1071 std::vector<Float_t> targets =
fFoam.at(0)->GetCellValue( xvec,
kValue );
1075 Log() <<
kFATAL <<
"Something wrong with multi-target regression foam: " 1076 <<
"number of targets does not match the DataSet()" <<
Endl;
1077 for(
UInt_t i=0; i<targets.size(); i++)
1209 TFile *rootFile = 0;
1210 if (
fCompress) rootFile =
new TFile(rfname,
"RECREATE",
"foamfile", 9);
1211 else rootFile =
new TFile(rfname,
"RECREATE");
1215 Log() <<
"writing foam " <<
fFoam.at(i)->GetFoamName().Data()
1216 <<
" to file" <<
Endl;
1217 fFoam.at(i)->Write(
fFoam.at(i)->GetFoamName().Data());
1221 Log() <<
kINFO <<
"Foams written to file: " 1244 Bool_t CutNmin, CutRMSmin;
1268 fXmin.assign(kDim, 0);
1269 fXmax.assign(kDim, 0);
1272 for (
UInt_t i=0; i<kDim; i++)
1273 istr >>
fXmin.at(i);
1274 for (
UInt_t i=0; i<kDim; i++)
1275 istr >>
fXmax.at(i);
1310 if (
gTools().HasAttr(wghtnode,
"FillFoamWithOrigWeights"))
1312 if (
gTools().HasAttr(wghtnode,
"UseYesNoCell"))
1321 fXmin.assign(kDim, 0);
1322 fXmax.assign(kDim, 0);
1326 for (
UInt_t counter=0; counter<kDim; counter++) {
1330 Log() <<
kFATAL <<
"dimension index out of range:" << i <<
Endl;
1335 void *xmax_wrap = xmin_wrap;
1336 for (
UInt_t counter=0; counter<kDim; counter++) {
1340 Log() <<
kFATAL <<
"dimension index out of range:" << i <<
Endl;
1378 Log() <<
kWARNING <<
"<ReadClonedFoamFromFile>: NULL pointer given" <<
Endl;
1390 Log() <<
kWARNING <<
"<ReadClonedFoamFromFile>: " << foamname
1391 <<
" could not be cloned!" <<
Endl;
1413 TFile *rootFile =
new TFile( rfname,
"READ" );
1430 fFoam.push_back(foam);
1457 case 0:
return kNone;
1458 case 1:
return kGaus;
1459 case 2:
return kLinN;
1461 Log() <<
kWARNING <<
"<UIntToKernel>: unknown kernel number: " << iker <<
Endl;
1473 case 0:
return kMean;
1474 case 1:
return kMpv;
1476 Log() <<
kWARNING <<
"<UIntToTargetSelection>: unknown method TargetSelection: " << its <<
Endl;
1487 for (
UInt_t ifoam=0; ifoam<
fFoam.size(); ifoam++) {
1488 for (
Int_t idim=0; idim<
fFoam.at(ifoam)->GetTotDim(); idim++) {
1492 fFoam.at(ifoam)->AddVariableName(
DataInfo().GetVariableInfo(idim).GetExpression().
Data());
1513 Log() <<
"PDE-Foam is a variation of the PDE-RS method using a self-adapting" <<
Endl;
1514 Log() <<
"binning method to divide the multi-dimensional variable space into a" <<
Endl;
1515 Log() <<
"finite number of hyper-rectangles (cells). The binning algorithm " <<
Endl;
1516 Log() <<
"adjusts the size and position of a predefined number of cells such" <<
Endl;
1517 Log() <<
"that the variance of the signal and background densities inside the " <<
Endl;
1518 Log() <<
"cells reaches a minimum" <<
Endl;
1522 Log() <<
"The PDEFoam classifier supports two different algorithms: " <<
Endl;
1524 Log() <<
" (1) Create one foam, which stores the signal over background" <<
Endl;
1525 Log() <<
" probability density. During foam buildup the variance of the" <<
Endl;
1526 Log() <<
" discriminant inside the cells is minimised." <<
Endl;
1528 Log() <<
" Booking option: SigBgSeparated=F" <<
Endl;
1530 Log() <<
" (2) Create two separate foams, one for the signal events and one for" <<
Endl;
1531 Log() <<
" background events. During foam buildup the variance of the" <<
Endl;
1532 Log() <<
" event density inside the cells is minimised separately for" <<
Endl;
1533 Log() <<
" signal and background." <<
Endl;
1535 Log() <<
" Booking option: SigBgSeparated=T" <<
Endl;
1537 Log() <<
"The following options can be set (the listed values are found to be a" <<
Endl;
1538 Log() <<
"good starting point for most applications):" <<
Endl;
1540 Log() <<
" SigBgSeparate False Separate Signal and Background" <<
Endl;
1541 Log() <<
" TailCut 0.001 Fraction of outlier events that excluded" <<
Endl;
1542 Log() <<
" from the foam in each dimension " <<
Endl;
1543 Log() <<
" VolFrac 0.0666 Volume fraction (used for density calculation" <<
Endl;
1544 Log() <<
" during foam build-up) " <<
Endl;
1545 Log() <<
" nActiveCells 500 Maximal number of active cells in final foam " <<
Endl;
1546 Log() <<
" nSampl 2000 Number of MC events per cell in foam build-up " <<
Endl;
1547 Log() <<
" nBin 5 Number of bins used in foam build-up " <<
Endl;
1548 Log() <<
" Nmin 100 Number of events in cell required to split cell" <<
Endl;
1549 Log() <<
" Kernel None Kernel type used (possible valuses are: None," <<
Endl;
1551 Log() <<
" Compress True Compress foam output file " <<
Endl;
1553 Log() <<
" Additional regression options:" <<
Endl;
1555 Log() <<
"MultiTargetRegression False Do regression with multiple targets " <<
Endl;
1556 Log() <<
" TargetSelection Mean Target selection method (possible valuses are: " <<
Endl;
1557 Log() <<
" Mean, Mpv)" <<
Endl;
1561 Log() <<
"The performance of the two implementations was found to be similar for" <<
Endl;
1562 Log() <<
"most examples studied. For the same number of cells per foam, the two-" <<
Endl;
1563 Log() <<
"foam option approximately doubles the amount of computer memory needed" <<
Endl;
1564 Log() <<
"during classification. For special cases where the event-density" <<
Endl;
1565 Log() <<
"distribution of signal and background events is very different, the" <<
Endl;
1566 Log() <<
"two-foam option was found to perform significantly better than the" <<
Endl;
1567 Log() <<
"option with only one foam." <<
Endl;
1569 Log() <<
"In order to gain better classification performance we recommend to set" <<
Endl;
1570 Log() <<
"the parameter \"nActiveCells\" to a high value." <<
Endl;
1572 Log() <<
"The parameter \"VolFrac\" specifies the size of the sampling volume" <<
Endl;
1573 Log() <<
"during foam buildup and should be tuned in order to achieve optimal" <<
Endl;
1574 Log() <<
"performance. A larger box leads to a reduced statistical uncertainty" <<
Endl;
1575 Log() <<
"for small training samples and to smoother sampling. A smaller box on" <<
Endl;
1576 Log() <<
"the other hand increases the sensitivity to statistical fluctuations" <<
Endl;
1577 Log() <<
"in the training samples, but for sufficiently large training samples" <<
Endl;
1578 Log() <<
"it will result in a more precise local estimate of the sampled" <<
Endl;
1579 Log() <<
"density. In general, higher dimensional problems require larger box" <<
Endl;
1580 Log() <<
"sizes, due to the reduced average number of events per box volume. The" <<
Endl;
1581 Log() <<
"default value of 0.0666 was optimised for an example with 5" <<
Endl;
1582 Log() <<
"observables and training samples of the order of 50000 signal and" <<
Endl;
1583 Log() <<
"background events each." <<
Endl;
1585 Log() <<
"Furthermore kernel weighting can be activated, which will lead to an" <<
Endl;
1586 Log() <<
"additional performance improvement. Note that Gauss weighting will" <<
Endl;
1587 Log() <<
"significantly increase the response time of the method. LinNeighbors" <<
Endl;
1588 Log() <<
"weighting performs a linear interpolation with direct neighbor cells" <<
Endl;
1589 Log() <<
"for each dimension and is much faster than Gauss weighting." <<
Endl;
1591 Log() <<
"The classification results were found to be rather insensitive to the" <<
Endl;
1592 Log() <<
"values of the parameters \"nSamples\" and \"nBin\"." <<
Endl;
void Train(void)
Train PDE-Foam depending on the set options.
PDEFoamCell * GetDau1() const
std::vector< Float_t > fXmax
virtual Int_t Fill(Double_t x)
Increment bin with abscissa X by 1.
UInt_t GetNVariables() const
TString fTargetSelectionStr
virtual void Reset()
reset MethodPDEFoam:
MsgLogger & Endl(MsgLogger &ml)
Bool_t fFillFoamWithOrigWeights
UInt_t KernelToUInt(EKernel ker) const
void GetNCuts(PDEFoamCell *cell, std::vector< UInt_t > &nCuts)
Fill in 'nCuts' the number of cuts made in every foam dimension, starting at the root cell 'cell'...
PDEFoam * InitFoam(TString, EFoamType, UInt_t cls=0)
Create a new PDEFoam, set the PDEFoam options (nCells, nBin, Xmin, Xmax, etc.) and initialize the PDE...
TString & ReplaceAll(const TString &s1, const TString &s2)
void PrintCoefficients(void)
THist< 1, float, THistStatContent, THistStatUncertainty > TH1F
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
Bool_t fMultiTargetRegression
A ROOT file is a suite of consecutive data records (TKey instances) with a well defined format...
UInt_t GetNVariables() const
access the number of variables through the datasetinfo
virtual TObject * Get(const char *namecycle)
Return pointer to object identified by namecycle.
tomato 1-D histogram with a float per channel (see TH1 documentation)}
void SetXmin(Int_t idim, Double_t wmin)
set lower foam bound in dimension idim
TransformationHandler & GetTransformationHandler(Bool_t takeReroutedIfAvailable=true)
void TrainUnifiedClassification(void)
Create only one unified foam (fFoam[0]) whose cells contain the average discriminator (N_sig)/(N_sig ...
void FillVariableNamesToFoam() const
store the variable names in all foams
UInt_t GetNClasses() const
UInt_t GetNTargets() const
virtual Double_t GetBinLowEdge(Int_t bin) const
Return bin lower edge for 1D histogram.
void box(Int_t pat, Double_t x1, Double_t y1, Double_t x2, Double_t y2)
void GetHelpMessage() const
provide help message
const TString & GetInputLabel(Int_t i) const
Long64_t GetNEvtBkgdTrain()
return number of background training events in dataset
const TString & GetNormalization() const
void ReadWeightsFromStream(std::istream &i)
read options and internal parameters
virtual ~MethodPDEFoam(void)
destructor
const Event * GetEvent() const
DataSetInfo & DataInfo() const
Bool_t DoRegression() const
void SetMinType(EMsgType minType)
void SetVal(UInt_t ivar, Float_t val)
set variable ivar to val
void DeclareOptions()
Declare MethodPDEFoam options.
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not...
std::vector< Float_t > fXmin
Long64_t GetNTrainingEvents() const
void SetXminXmax(TMVA::PDEFoam *)
Set Xmin, Xmax for every dimension in the given pdefoam object.
PDEFoam * ReadClonedFoamFromFile(TFile *, const TString &)
Reads a foam with name 'foamname' from file, and returns a clone of the foam.
void SetMaxDepth(UInt_t maxdepth)
std::vector< Float_t > & GetTargets()
UInt_t GetNEvents() const
temporary event when testing on a different DataSet than the own one
void SetnSampl(Long_t nSampl)
Bool_t DoMulticlass() const
void AddWeightsXMLTo(void *parent) const
create XML output of PDEFoam method variables
void SetXmax(Int_t idim, Double_t wmax)
set upper foam bound in dimension idim
Float_t GetTarget(UInt_t itgt) const
Double_t CalculateMVAError()
Calculate the error on the Mva value.
void Init(void)
default initialization called by all constructors
const char * GetName() const
void CalcXminXmax()
Determine foam range [fXmin, fXmax] for all dimensions, such that a fraction of 'fFrac' events lie ou...
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
PDEFoam can handle classification with multiple classes and regression with one or more regression-ta...
MethodPDEFoam(const TString &jobName, const TString &methodTitle, DataSetInfo &dsi, const TString &theOption="PDEFoam")
init PDEFoam objects
Double_t Mean(Long64_t n, const T *a, const Double_t *w=0)
PDEFoamKernelBase * fKernelEstimator
EKernel UIntToKernel(UInt_t iker)
convert UInt_t to EKernel (used for reading weight files)
char * Form(const char *fmt,...)
ETargetSelection UIntToTargetSelection(UInt_t its)
convert UInt_t to ETargetSelection (used for reading weight files)
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
Return Mva-Value.
void SetTarget(UInt_t itgt, Float_t value)
set the target value (dimension itgt) to value
void TrainMultiTargetRegression(void)
Training one (multi target regression) foam, whose cells contain the average event density...
UInt_t TargetSelectionToUInt(ETargetSelection ts) const
void SetDensity(PDEFoamDensityBase *dens)
void SetDim(Int_t kDim)
Sets dimension of cubical space.
void MakeClassSpecific(std::ostream &, const TString &) const
write PDEFoam-specific classifier response NOT IMPLEMENTED YET!
TString GetWeightFileName() const
retrieve weight file name
void TrainSeparatedClassification(void)
Creation of 2 separated foams: one for signal events, one for backgound events.
void WriteFoamsToFile() const
Write PDEFoams to file.
UInt_t GetNVariables() const
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
Bool_t IgnoreEventsWithNegWeightsInTraining() const
void ReadWeightsFromXML(void *wghtnode)
read PDEFoam variables from xml weight file
void TrainMultiClassification()
Create one unified foam (see TrainUnifiedClassification()) for each class, where the cells of foam i ...
void SetnCells(Long_t nCells)
std::vector< Float_t > * fMulticlassReturnVal
Long64_t GetNEvtSigTrain()
return number of signal training events in dataset
std::vector< PDEFoam * > fFoam
PDEFoamCell * GetDau0() const
you should not use this method at all Int_t Int_t Double_t Double_t Double_t e
void AddPreDefVal(const T &)
PDEFoamKernelBase * CreatePDEFoamKernel()
create a pdefoam kernel estimator, depending on the current value of fKernel
void TrainMonoTargetRegression(void)
Training one (mono target regression) foam, whose cells contain the average 0th target.
virtual TObject * Clone(const char *newname="") const
Make a clone of an object using the Streamer facility.
const std::vector< Float_t > & GetMulticlassValues()
Get the multiclass MVA response for the PDEFoam classifier.
#define REGISTER_METHOD(CLASS)
for example
const Ranking * CreateRanking()
Compute ranking of input variables from the number of cuts made in each PDEFoam dimension.
Abstract ClassifierFactory template that handles arbitrary types.
void DeleteFoams()
Deletes all trained foams.
std::vector< Float_t > & GetValues()
virtual void AddRank(const Rank &rank)
Add a new rank take ownership of it.
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
ETargetSelection fTargetSelection
Double_t GetOriginalWeight() const
void SetEvPerBin(Int_t EvPerBin)
EDTSeparation fDTSeparation
std::vector< Float_t > * fRegressionReturnVal
virtual const std::vector< Float_t > & GetRegressionValues()
Return regression values for both multi- and mono-target regression.
Double_t Sqrt(Double_t x)
void ProcessOptions()
process user options
UInt_t GetNTargets() const
access the number of targets through the datasetinfo
double norm(double *x, double *p)
void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility
virtual void SetAnalysisType(Types::EAnalysisType type)
void ReadFoamsFromFile()
read foams from file
void SetSignalReferenceCut(Double_t cut)
virtual void Close(Option_t *option="")
Close a file.
const char * Data() const