84 if (a<b) {
Int_t tmp =
a; a=
b; b=tmp; }
97 fVerboseLevel(TString(
"Info")),
98 fScaleWithPreselEff(0),
102 fLogger( new
MsgLogger(
"DataSetFactory", kINFO) )
111 std::vector<TTreeFormula*>::const_iterator formIt;
153 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"Build DataSet consisting of one Event with dynamically changing variables" <<
Endl;
163 std::vector<Float_t*>* evdyn =
new std::vector<Float_t*>(0);
167 if (varinfos.empty())
168 Log() << kFATAL <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"Dynamic data set cannot be built, since no variable informations are present. Apparently no variables have been set. This should not happen, please contact the TMVA authors." <<
Endl;
170 std::vector<VariableInfo>::iterator it = varinfos.begin(), itEnd=varinfos.end();
171 for (;it!=itEnd;++it) {
174 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"The link to the external variable is NULL while I am trying to build a dynamic data set. In this case fTmpEvent from MethodBase HAS TO BE USED in the method to get useful values in variables." <<
Endl;
175 else evdyn->push_back (external);
179 it = spectatorinfos.begin();
180 for (;it!=spectatorinfos.end();++it) evdyn->push_back( (
Float_t*)(*it).GetExternalLink() );
182 TMVA::Event * ev =
new Event((
const std::vector<Float_t*>*&)evdyn, varinfos.size());
183 std::vector<Event*>* newEventVector =
new std::vector<Event*>;
184 newEventVector->push_back(ev);
188 ds->SetCurrentEvent( 0 );
190 delete newEventVector;
207 std::vector< TString >* classList = dataInput.
GetClassList();
208 for (std::vector<TString>::iterator it = classList->begin(); it< classList->end(); ++it) {
219 InitOptions( dsi, eventCounts, normMode, splitSeed, splitMode , mixMode );
225 splitMode, mixMode, normMode, splitSeed );
228 if (showCollectedOutput) {
249 const TString& expression,
255 Log() << kFATAL <<
"Expression " << expression.Data()
256 <<
" could not be resolved to a valid formula. " <<
Endl;
258 Log() << kWARNING <<
"Expression: " << expression.Data()
259 <<
" does not provide data for this event. " 260 <<
"This event is not taken into account. --> please check if you use as a variable " 261 <<
"an entry of an array which is not filled for some events " 262 <<
"(e.g. arr[4] when arr has only 3 elements)." <<
Endl;
263 Log() << kWARNING <<
"If you want to take the event into account you can do something like: " 264 <<
"\"Alt$(arr[4],0)\" where in cases where arr doesn't have a 4th element, " 265 <<
" 0 is taken as an alternative." <<
Endl;
268 if( expression.Contains(
"$") )
272 for (
int i = 0, iEnd = ttf->
GetNcodes (); i < iEnd; ++i)
292 TTree *tr = tinfo.
GetTree()->GetTree();
294 tr->SetBranchStatus(
"*",1);
295 tr->ResetBranchAddresses();
300 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"transform input variables" <<
Endl;
301 std::vector<TTreeFormula*>::const_iterator formIt, formItEnd;
316 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"transform regression targets" <<
Endl;
329 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"transform spectator variables" <<
Endl;
343 for (formIt =
fCutFormulas.begin(), formItEnd =
fCutFormulas.end(); formIt!=formItEnd; ++formIt)
if (*formIt)
delete *formIt;
347 const TString tmpCutExp(tmpCut.GetTitle());
363 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"transform weights" <<
Endl;
376 ttf =
new TTreeFormula(
"FormulaWeight", tmpWeight, tr );
392 tr->SetBranchStatus(
"*",0);
393 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"enable branches: input variables" <<
Endl;
402 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"enable branches: targets" <<
Endl;
409 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"enable branches: spectators" <<
Endl;
416 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"enable branches: cuts" <<
Endl;
424 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"enable branches: weights" <<
Endl;
432 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"tree initialized" <<
Endl;
452 for (
UInt_t ivar=0; ivar<nvar ; ivar++) { min[ivar] = FLT_MAX; max[ivar] = -FLT_MAX; }
453 for (
UInt_t ivar=0; ivar<ntgts; ivar++) { tgmin[ivar] = FLT_MAX; tgmax[ivar] = -FLT_MAX; }
454 for (
UInt_t ivar=0; ivar<nvis; ivar++) { vmin[ivar] = FLT_MAX; vmax[ivar] = -FLT_MAX; }
460 for (
UInt_t ivar=0; ivar<nvar; ivar++) {
462 if (v<min[ivar]) min[ivar] =
v;
463 if (v>max[ivar]) max[ivar] =
v;
465 for (
UInt_t itgt=0; itgt<ntgts; itgt++) {
467 if (v<tgmin[itgt]) tgmin[itgt] =
v;
468 if (v>tgmax[itgt]) tgmax[itgt] =
v;
470 for (
UInt_t ivis=0; ivis<nvis; ivis++) {
472 if (v<vmin[ivis]) vmin[ivis] =
v;
473 if (v>vmax[ivis]) vmax[ivis] =
v;
477 for (
UInt_t ivar=0; ivar<nvar; ivar++) {
480 if(
TMath::Abs(max[ivar]-min[ivar]) <= FLT_MIN )
483 for (
UInt_t ivar=0; ivar<ntgts; ivar++) {
486 if(
TMath::Abs(tgmax[ivar]-tgmin[ivar]) <= FLT_MIN )
489 for (
UInt_t ivar=0; ivar<nvis; ivar++) {
516 for (ivar=0; ivar<nvar; ivar++) {
517 for (jvar=0; jvar<nvar; jvar++) {
519 Double_t d = (*mat)(ivar, ivar)*(*mat)(jvar, jvar);
520 if (d > 0) (*mat)(ivar, jvar) /=
sqrt(d);
522 Log() << kWARNING <<
Form(
"Dataset[%s] : ",
DataSetInfo().
GetName())<<
"<GetCorrelationMatrix> Zero variances for variables " 523 <<
"(" << ivar <<
", " << jvar <<
") = " << d
525 (*mat)(ivar, jvar) = 0;
531 for (ivar=0; ivar<nvar; ivar++) (*mat)(ivar, ivar) = 1.0;
542 UInt_t ivar = 0, jvar = 0;
549 for (ivar=0; ivar<nvar; ivar++) {
551 for (jvar=0; jvar<nvar; jvar++) mat2(ivar, jvar) = 0;
559 if (ev->
GetClass() != classNumber )
continue;
564 for (ivar=0; ivar<nvar; ivar++) {
567 vec(ivar) += xi*weight;
568 mat2(ivar, ivar) += (xi*xi*weight);
570 for (jvar=ivar+1; jvar<nvar; jvar++) {
572 mat2(ivar, jvar) += (xi*xj*weight);
577 for (ivar=0; ivar<nvar; ivar++)
578 for (jvar=ivar+1; jvar<nvar; jvar++)
579 mat2(jvar, ivar) = mat2(ivar, jvar);
583 for (ivar=0; ivar<nvar; ivar++) {
584 for (jvar=0; jvar<nvar; jvar++) {
585 (*mat)(ivar, jvar) = mat2(ivar, jvar)/ic - vec(ivar)*vec(jvar)/(ic*ic);
607 splitSpecs.SetConfigDescription(
"Configuration options given in the \"PrepareForTrainingAndTesting\" call; these options define the creation of the data sets used for training and expert validation by TMVA" );
609 splitMode =
"Random";
610 splitSpecs.DeclareOptionRef( splitMode,
"SplitMode",
611 "Method of picking training and testing events (default: random)" );
612 splitSpecs.AddPreDefVal(TString(
"Random"));
613 splitSpecs.AddPreDefVal(TString(
"Alternate"));
614 splitSpecs.AddPreDefVal(TString(
"Block"));
616 mixMode =
"SameAsSplitMode";
617 splitSpecs.DeclareOptionRef( mixMode,
"MixMode",
618 "Method of mixing events of different classes into one dataset (default: SameAsSplitMode)" );
619 splitSpecs.AddPreDefVal(TString(
"SameAsSplitMode"));
620 splitSpecs.AddPreDefVal(TString(
"Random"));
621 splitSpecs.AddPreDefVal(TString(
"Alternate"));
622 splitSpecs.AddPreDefVal(TString(
"Block"));
625 splitSpecs.DeclareOptionRef( splitSeed,
"SplitSeed",
626 "Seed for random event shuffling" );
628 normMode =
"EqualNumEvents";
629 splitSpecs.DeclareOptionRef( normMode,
"NormMode",
630 "Overall renormalisation of event-by-event weights used in the training (NumEvents: average weight of 1 per event, independently for signal and background; EqualNumEvents: average weight of 1 per event for signal, and sum of weights for background equal to sum of weights for signal)" );
631 splitSpecs.AddPreDefVal(TString(
"None"));
632 splitSpecs.AddPreDefVal(TString(
"NumEvents"));
633 splitSpecs.AddPreDefVal(TString(
"EqualNumEvents"));
635 splitSpecs.DeclareOptionRef(
fScaleWithPreselEff=
kFALSE,
"ScaleWithPreselEff",
"Scale the number of requested events by the eff. of the preselection cuts (or not)" );
642 TString titleTrain = TString().Format(
"Number of training events of class %s (default: 0 = all)",clName.Data()).Data();
643 TString titleTest = TString().Format(
"Number of test events of class %s (default: 0 = all)",clName.Data()).Data();
644 TString titleSplit = TString().Format(
"Split in training and test events of class %s (default: 0 = deactivated)",clName.Data()).Data();
646 splitSpecs.DeclareOptionRef( nEventRequests.at(cl).nTrainingEventsRequested, TString(
"nTrain_")+clName, titleTrain );
647 splitSpecs.DeclareOptionRef( nEventRequests.at(cl).nTestingEventsRequested , TString(
"nTest_")+clName , titleTest );
648 splitSpecs.DeclareOptionRef( nEventRequests.at(cl).TrainTestSplitRequested , TString(
"TrainTestSplit_")+clName , titleTest );
651 splitSpecs.DeclareOptionRef(
fVerbose,
"V",
"Verbosity (default: true)" );
653 splitSpecs.DeclareOptionRef(
fVerboseLevel=TString(
"Info"),
"VerboseLevel",
"VerboseLevel (Debug/Verbose/Info)" );
654 splitSpecs.AddPreDefVal(TString(
"Debug"));
655 splitSpecs.AddPreDefVal(TString(
"Verbose"));
656 splitSpecs.AddPreDefVal(TString(
"Info"));
659 splitSpecs.DeclareOptionRef(
fCorrelations,
"Correlations",
"Boolean to show correlation output (Default: true)");
661 splitSpecs.ParseOptions();
662 splitSpecs.CheckForUnusedOptions();
671 splitMode.ToUpper(); mixMode.ToUpper(); normMode.ToUpper();
674 <<
"\tSplitmode is: \"" << splitMode <<
"\" the mixmode is: \"" << mixMode <<
"\"" <<
Endl;
675 if (mixMode==
"SAMEASSPLITMODE") mixMode = splitMode;
676 else if (mixMode!=splitMode)
677 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"DataSet splitmode="<<splitMode
678 <<
" differs from mixmode="<<mixMode<<Endl;
702 for (
size_t i=0; i<nclasses; i++) {
703 eventCounts[i].varAvLength =
new Float_t[nvars];
704 for (
UInt_t ivar=0; ivar<nvars; ivar++)
705 eventCounts[i].varAvLength[ivar] = 0;
715 std::map<TString, int> nanInfWarnings;
716 std::map<TString, int> nanInfErrors;
720 for (
UInt_t cl=0; cl<nclasses; cl++) {
724 EventStats& classEventCounts = eventCounts[cl];
732 TString currentFileName(
"");
738 std::vector<Float_t> vars(nvars);
739 std::vector<Float_t> tgts(ntgts);
740 std::vector<Float_t> vis(nvis);
747 Bool_t isChain = (TString(
"TChain") == currentInfo.
GetTree()->ClassName());
748 currentInfo.
GetTree()->LoadTree(0);
756 for (
Long64_t evtIdx = 0; evtIdx < nEvts; evtIdx++) {
757 currentInfo.
GetTree()->LoadTree(evtIdx);
761 if (currentInfo.
GetTree()->GetTree()->GetDirectory()->GetFile()->GetName() != currentFileName) {
762 currentFileName = currentInfo.
GetTree()->GetTree()->GetDirectory()->GetFile()->GetName();
766 currentInfo.
GetTree()->GetEntry(evtIdx);
767 Int_t sizeOfArrays = 1;
768 Int_t prevArrExpr = 0;
773 for (
UInt_t ivar=0; ivar<nvars; ivar++) {
776 if (ndata == 1)
continue;
778 varIsArray[ivar] =
kTRUE;
779 if (sizeOfArrays == 1) {
780 sizeOfArrays = ndata;
783 else if (sizeOfArrays!=ndata) {
784 Log() << kERROR <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"ERROR while preparing training and testing trees:" <<
Endl;
785 Log() <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
" multiple array-type expressions of different length were encountered" <<
Endl;
786 Log() <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
" location of error: event " << evtIdx
787 <<
" in tree " << currentInfo.
GetTree()->GetName()
788 <<
" of file " << currentInfo.
GetTree()->GetCurrentFile()->GetName() <<
Endl;
790 <<
Form(
"Dataset[%s] : ",dsi.
GetName()) << ndata <<
" entries, while" <<
Endl;
793 Log() << kFATAL <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"Need to abort" << Endl;
798 for (
Int_t idata = 0; idata<sizeOfArrays; idata++) {
801 auto checkNanInf = [&](std::map<TString, int> &msgMap,
Float_t value,
const char *what,
const char *formulaTitle) {
803 contains_NaN_or_inf =
kTRUE;
804 ++msgMap[
TString::Format(
"Dataset[%s] : %s expression resolves to indeterminate value (NaN): %s", dsi.
GetName(), what, formulaTitle)];
806 contains_NaN_or_inf =
kTRUE;
807 ++msgMap[
TString::Format(
"Dataset[%s] : %s expression resolves to infinite value (+inf or -inf): %s", dsi.
GetName(), what, formulaTitle)];
821 checkNanInf(nanInfErrors, cutVal,
"Cut", formula->
GetTitle());
825 auto &nanMessages = cutVal < 0.5 ? nanInfWarnings : nanInfErrors;
828 for (
UInt_t ivar=0; ivar<nvars; ivar++) {
832 vars[ivar] = (ndata == 1 ?
835 checkNanInf(nanMessages, vars[ivar],
"Input", formula->
GetTitle());
839 for (
UInt_t itrgt=0; itrgt<ntgts; itrgt++) {
842 tgts[itrgt] = (ndata == 1 ?
845 checkNanInf(nanMessages, tgts[itrgt],
"Target", formula->
GetTitle());
849 for (
UInt_t itVis=0; itVis<nvis; itVis++) {
852 vis[itVis] = (ndata == 1 ?
855 checkNanInf(nanMessages, vis[itVis],
"Spectator", formula->
GetTitle());
864 weight *= (ndata == 1 ?
867 checkNanInf(nanMessages, weight,
"Weight", formula->
GetTitle());
877 if (cutVal<0.5)
continue;
886 if (contains_NaN_or_inf) {
887 Log() << kWARNING <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"NaN or +-inf in Event " << evtIdx <<
Endl;
888 if (sizeOfArrays>1)
Log() << kWARNING <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
" rejected" <<
Endl;
898 event_v.push_back(
new Event(vars, tgts , vis, cl , weight));
901 currentInfo.
GetTree()->ResetBranchAddresses();
905 if (!nanInfWarnings.empty()) {
906 Log() << kWARNING <<
"Found events with NaN and/or +-inf values" <<
Endl;
907 for (
const auto &warning : nanInfWarnings) {
908 auto &
log =
Log() << kWARNING << warning.first;
909 if (warning.second > 1)
log <<
" (" << warning.second <<
" times)";
912 Log() << kWARNING <<
"These NaN and/or +-infs were all removed by the specified cut, continuing." <<
Endl;
916 if (!nanInfErrors.empty()) {
917 Log() << kWARNING <<
"Found events with NaN and/or +-inf values (not removed by cut)" <<
Endl;
918 for (
const auto &error : nanInfErrors) {
919 auto &
log =
Log() << kWARNING << error.first;
920 if (error.second > 1)
log <<
" (" << error.second <<
" times)";
923 Log() << kFATAL <<
"How am I supposed to train a NaN or +-inf?!" <<
Endl;
929 Log() << kHEADER <<
Form(
"[%s] : ",dsi.
GetName()) <<
"Number of events in input trees" <<
Endl;
930 Log() << kDEBUG <<
"(after possible flattening of arrays):" <<
Endl;
937 <<
" -- number of events : " 938 << std::setw(5) << eventCounts[cl].nEvBeforeCut
939 <<
" / sum of weights: " << std::setw(5) << eventCounts[cl].nWeEvBeforeCut <<
Endl;
945 <<
" tree -- total number of entries: " 951 <<
"\tPreselection: (will affect number of requested training and testing events)" <<
Endl;
954 <<
"\tPreselection: (will NOT affect number of requested training and testing events)" <<
Endl;
962 <<
" -- number of events passed: " 963 << std::setw(5) << eventCounts[cl].nEvAfterCut
964 <<
" / sum of weights: " << std::setw(5) << eventCounts[cl].nWeEvAfterCut <<
Endl;
967 <<
" -- efficiency : " 968 << std::setw(6) << eventCounts[cl].nWeEvAfterCut/eventCounts[cl].nWeEvBeforeCut <<
Endl;
972 <<
" No preselection cuts applied on event classes" <<
Endl;
985 const TString& splitMode,
986 const TString& mixMode,
987 const TString& normMode,
995 if (splitMode.Contains(
"RANDOM" ) ) {
999 if( ! unspecifiedEvents.empty() ) {
1000 Log() << kDEBUG <<
"randomly shuffling " 1001 << unspecifiedEvents.size()
1002 <<
" events of class " << cls
1003 <<
" which are not yet associated to testing or training" <<
Endl;
1004 std::shuffle(unspecifiedEvents.begin(), unspecifiedEvents.end(), rndm);
1010 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"SPLITTING ========" <<
Endl;
1012 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"---- class " << cls <<
Endl;
1013 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"check number of training/testing events, requested and available number of events and for class " << cls <<
Endl;
1020 Int_t availableTraining = eventVectorTraining.size();
1021 Int_t availableTesting = eventVectorTesting.size();
1022 Int_t availableUndefined = eventVectorUndefined.size();
1026 presel_scale = eventCounts[cls].cutScaling();
1027 if (presel_scale < 1)
1028 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
" you have opted for scaling the number of requested training/testing events\n to be scaled by the preselection efficiency"<<
Endl;
1031 if (eventCounts[cls].cutScaling() < 1)
1032 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
" you have opted for interpreting the requested number of training/testing events\n to be the number of events AFTER your preselection cuts" <<
Endl;
1039 if(eventCounts[cls].TrainTestSplitRequested < 1.0 && eventCounts[cls].TrainTestSplitRequested > 0.0){
1040 eventCounts[cls].nTrainingEventsRequested =
Int_t(eventCounts[cls].TrainTestSplitRequested*(availableTraining+availableTesting+availableUndefined));
1041 eventCounts[cls].nTestingEventsRequested =
Int_t(0);
1043 else if(eventCounts[cls].TrainTestSplitRequested != 0.0)
Log() << kFATAL <<
Form(
"The option TrainTestSplit_<class> has to be in range (0, 1] but is set to %f.",eventCounts[cls].TrainTestSplitRequested) <<
Endl;
1044 Int_t requestedTraining =
Int_t(eventCounts[cls].nTrainingEventsRequested * presel_scale);
1045 Int_t requestedTesting =
Int_t(eventCounts[cls].nTestingEventsRequested * presel_scale);
1047 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"events in training trees : " << availableTraining << Endl;
1048 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"events in testing trees : " << availableTesting << Endl;
1049 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"events in unspecified trees : " << availableUndefined << Endl;
1050 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"requested for training : " << requestedTraining << Endl;;
1053 Log() <<
" ( " << eventCounts[cls].nTrainingEventsRequested
1054 <<
" * " << presel_scale <<
" preselection efficiency)" <<
Endl;
1057 Log() << kDEBUG <<
"requested for testing : " << requestedTesting;
1059 Log() <<
" ( " << eventCounts[cls].nTestingEventsRequested
1060 <<
" * " << presel_scale <<
" preselection efficiency)" <<
Endl;
1111 Int_t useForTesting(0),useForTraining(0);
1112 Int_t allAvailable(availableUndefined + availableTraining + availableTesting);
1114 if( (requestedTraining == 0) && (requestedTesting == 0)){
1118 if ( availableUndefined >=
TMath::Abs(availableTraining - availableTesting) ) {
1120 useForTraining = useForTesting = allAvailable/2;
1123 useForTraining = availableTraining;
1124 useForTesting = availableTesting;
1125 if (availableTraining < availableTesting)
1126 useForTraining += availableUndefined;
1128 useForTesting += availableUndefined;
1130 requestedTraining = useForTraining;
1131 requestedTesting = useForTesting;
1134 else if (requestedTesting == 0){
1136 useForTraining =
TMath::Max(requestedTraining,availableTraining);
1137 if (allAvailable < useForTraining) {
1138 Log() << kFATAL <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"More events requested for training (" 1139 << requestedTraining <<
") than available (" 1140 << allAvailable <<
")!" << Endl;
1142 useForTesting = allAvailable - useForTraining;
1143 requestedTesting = useForTesting;
1146 else if (requestedTraining == 0){
1147 useForTesting =
TMath::Max(requestedTesting,availableTesting);
1148 if (allAvailable < useForTesting) {
1149 Log() << kFATAL <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"More events requested for testing (" 1150 << requestedTesting <<
") than available (" 1151 << allAvailable <<
")!" << Endl;
1153 useForTraining= allAvailable - useForTesting;
1154 requestedTraining = useForTraining;
1163 Int_t stillNeedForTraining =
TMath::Max(requestedTraining-availableTraining,0);
1164 Int_t stillNeedForTesting =
TMath::Max(requestedTesting-availableTesting,0);
1166 int NFree = availableUndefined - stillNeedForTraining - stillNeedForTesting;
1167 if (NFree <0) NFree = 0;
1168 useForTraining =
TMath::Max(requestedTraining,availableTraining) + NFree/2;
1169 useForTesting= allAvailable - useForTraining;
1172 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"determined event sample size to select training sample from="<<useForTraining<<Endl;
1173 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"determined event sample size to select test sample from="<<useForTesting<<Endl;
1178 if( splitMode ==
"ALTERNATE" ){
1179 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"split 'ALTERNATE'" << Endl;
1180 Int_t nTraining = availableTraining;
1181 Int_t nTesting = availableTesting;
1182 for( EventVector::iterator it = eventVectorUndefined.begin(), itEnd = eventVectorUndefined.end(); it != itEnd; ){
1184 if( nTraining <= requestedTraining ){
1185 eventVectorTraining.insert( eventVectorTraining.end(), (*it) );
1190 eventVectorTesting.insert( eventVectorTesting.end(), (*it) );
1195 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"split '" << splitMode <<
"'" << Endl;
1198 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"availableundefined : " << availableUndefined << Endl;
1199 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"useForTraining : " << useForTraining << Endl;
1200 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"useForTesting : " << useForTesting << Endl;
1201 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"availableTraining : " << availableTraining << Endl;
1202 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"availableTesting : " << availableTesting << Endl;
1204 if( availableUndefined<(useForTraining-availableTraining) ||
1205 availableUndefined<(useForTesting -availableTesting ) ||
1206 availableUndefined<(useForTraining+useForTesting-availableTraining-availableTesting ) ){
1207 Log() << kFATAL <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"More events requested than available!" << Endl;
1211 if (useForTraining>availableTraining){
1212 eventVectorTraining.insert( eventVectorTraining.end() , eventVectorUndefined.begin(), eventVectorUndefined.begin()+ useForTraining- availableTraining );
1213 eventVectorUndefined.erase( eventVectorUndefined.begin(), eventVectorUndefined.begin() + useForTraining- availableTraining);
1215 if (useForTesting>availableTesting){
1216 eventVectorTesting.insert( eventVectorTesting.end() , eventVectorUndefined.begin(), eventVectorUndefined.begin()+ useForTesting- availableTesting );
1219 eventVectorUndefined.clear();
1222 if (splitMode.Contains(
"RANDOM" )){
1223 UInt_t sizeTraining = eventVectorTraining.size();
1224 if( sizeTraining >
UInt_t(requestedTraining) ){
1225 std::vector<UInt_t> indicesTraining( sizeTraining );
1229 std::shuffle(indicesTraining.begin(), indicesTraining.end(), rndm);
1231 indicesTraining.erase( indicesTraining.begin()+sizeTraining-
UInt_t(requestedTraining), indicesTraining.end() );
1233 for( std::vector<UInt_t>::iterator it = indicesTraining.begin(), itEnd = indicesTraining.end(); it != itEnd; ++it ){
1234 delete eventVectorTraining.at( (*it) );
1235 eventVectorTraining.at( (*it) ) = NULL;
1238 eventVectorTraining.erase( std::remove( eventVectorTraining.begin(), eventVectorTraining.end(), (
void*)NULL ), eventVectorTraining.end() );
1241 UInt_t sizeTesting = eventVectorTesting.size();
1242 if( sizeTesting >
UInt_t(requestedTesting) ){
1243 std::vector<UInt_t> indicesTesting( sizeTesting );
1247 std::shuffle(indicesTesting.begin(), indicesTesting.end(), rndm);
1249 indicesTesting.erase( indicesTesting.begin()+sizeTesting-
UInt_t(requestedTesting), indicesTesting.end() );
1251 for( std::vector<UInt_t>::iterator it = indicesTesting.begin(), itEnd = indicesTesting.end(); it != itEnd; ++it ){
1252 delete eventVectorTesting.at( (*it) );
1253 eventVectorTesting.at( (*it) ) = NULL;
1256 eventVectorTesting.erase( std::remove( eventVectorTesting.begin(), eventVectorTesting.end(), (
void*)NULL ), eventVectorTesting.end() );
1260 if( eventVectorTraining.size() <
UInt_t(requestedTraining) )
1261 Log() << kWARNING <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"DataSetFactory/requested number of training samples larger than size of eventVectorTraining.\n" 1262 <<
"There is probably an issue. Please contact the TMVA developers." << Endl;
1263 std::for_each( eventVectorTraining.begin()+requestedTraining, eventVectorTraining.end(), DeleteFunctor<Event>() );
1264 eventVectorTraining.erase(eventVectorTraining.begin()+requestedTraining,eventVectorTraining.end());
1266 if( eventVectorTesting.size() <
UInt_t(requestedTesting) )
1267 Log() << kWARNING <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"DataSetFactory/requested number of testing samples larger than size of eventVectorTesting.\n" 1268 <<
"There is probably an issue. Please contact the TMVA developers." << Endl;
1269 std::for_each( eventVectorTesting.begin()+requestedTesting, eventVectorTesting.end(), DeleteFunctor<Event>() );
1270 eventVectorTesting.erase(eventVectorTesting.begin()+requestedTesting,eventVectorTesting.end());
1276 Int_t trainingSize = 0;
1277 Int_t testingSize = 0;
1291 trainingEventVector->reserve( trainingSize );
1292 testingEventVector->reserve( testingSize );
1298 Log() << kDEBUG <<
" MIXING ============= " <<
Endl;
1300 if( mixMode ==
"ALTERNATE" ){
1305 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"Training sample: You are trying to mix events in alternate mode although the classes have different event numbers. This works but the alternation stops at the last event of the smaller class."<<Endl;
1308 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"Testing sample: You are trying to mix events in alternate mode although the classes have different event numbers. This works but the alternation stops at the last event of the smaller class."<<Endl;
1311 typedef EventVector::iterator EvtVecIt;
1312 EvtVecIt itEvent, itEventEnd;
1315 Log() << kDEBUG <<
"insert class 0 into training and test vector" <<
Endl;
1317 testingEventVector->insert( testingEventVector->end(), tmpEventVector[
Types::kTesting].at(0).begin(), tmpEventVector[
Types::kTesting].at(0).end() );
1322 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"insert class " << cls << Endl;
1324 itTarget = trainingEventVector->begin() - 1;
1326 for( itEvent = tmpEventVector[
Types::kTraining].at(cls).begin(), itEventEnd = tmpEventVector[
Types::kTraining].at(cls).end(); itEvent != itEventEnd; ++itEvent ){
1328 if( (trainingEventVector->end() - itTarget) <
Int_t(cls+1) ) {
1329 itTarget = trainingEventVector->end();
1330 trainingEventVector->insert( itTarget, itEvent, itEventEnd );
1334 trainingEventVector->insert( itTarget, (*itEvent) );
1338 itTarget = testingEventVector->begin() - 1;
1340 for( itEvent = tmpEventVector[
Types::kTesting].at(cls).begin(), itEventEnd = tmpEventVector[
Types::kTesting].at(cls).end(); itEvent != itEventEnd; ++itEvent ){
1342 if( ( testingEventVector->end() - itTarget ) <
Int_t(cls+1) ) {
1343 itTarget = testingEventVector->end();
1344 testingEventVector->insert( itTarget, itEvent, itEventEnd );
1348 testingEventVector->insert( itTarget, (*itEvent) );
1354 trainingEventVector->insert( trainingEventVector->end(), tmpEventVector[
Types::kTraining].at(cls).begin(), tmpEventVector[
Types::kTraining].at(cls).end() );
1355 testingEventVector->insert ( testingEventVector->end(), tmpEventVector[
Types::kTesting].at(cls).begin(), tmpEventVector[
Types::kTesting].at(cls).end() );
1364 if (mixMode ==
"RANDOM") {
1365 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"shuffling events"<<Endl;
1367 std::shuffle(trainingEventVector->begin(), trainingEventVector->end(), rndm);
1368 std::shuffle(testingEventVector->begin(), testingEventVector->end(), rndm);
1371 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"trainingEventVector " << trainingEventVector->size() <<
Endl;
1372 Log() << kDEBUG <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"testingEventVector " << testingEventVector->size() <<
Endl;
1384 Log() << kFATAL <<
"Dataset " << std::string(dsi.
GetName()) <<
" does not have any training events, I better stop here and let you fix that one first " << Endl;
1388 Log() << kERROR <<
"Dataset " << std::string(dsi.
GetName()) <<
" does not have any testing events, guess that will cause problems later..but for now, I continue " << Endl;
1391 delete trainingEventVector;
1392 delete testingEventVector;
1409 const TString& normMode )
1416 Int_t trainingSize = 0;
1417 Int_t testingSize = 0;
1425 Double_t trainingSumSignalWeights = 0;
1426 Double_t trainingSumBackgrWeights = 0;
1427 Double_t testingSumSignalWeights = 0;
1428 Double_t testingSumBackgrWeights = 0;
1433 trainingSizePerClass.at(cls) = tmpEventVector[
Types::kTraining].at(cls).size();
1434 testingSizePerClass.at(cls) = tmpEventVector[
Types::kTesting].at(cls).size();
1436 trainingSize += trainingSizePerClass.back();
1437 testingSize += testingSizePerClass.back();
1450 trainingSumWeightsPerClass.at(cls) =
1455 testingSumWeightsPerClass.at(cls) =
1461 trainingSumSignalWeights += trainingSumWeightsPerClass.at(cls);
1462 testingSumSignalWeights += testingSumWeightsPerClass.at(cls);
1464 trainingSumBackgrWeights += trainingSumWeightsPerClass.at(cls);
1465 testingSumBackgrWeights += testingSumWeightsPerClass.at(cls);
1485 if (normMode ==
"NONE") {
1486 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"No weight renormalisation applied: use original global and event weights" <<
Endl;
1492 else if (normMode ==
"NUMEVENTS") {
1494 <<
"\tWeight renormalisation mode: \"NumEvents\": renormalises all event classes " <<
Endl;
1496 <<
" such that the effective (weighted) number of events in each class equals the respective " <<
Endl;
1498 <<
" number of events (entries) that you demanded in PrepareTrainingAndTestTree(\"\",\"nTrain_Signal=.. )" <<
Endl;
1500 <<
" ... i.e. such that Sum[i=1..N_j]{w_i} = N_j, j=0,1,2..." <<
Endl;
1502 <<
" ... (note that N_j is the sum of TRAINING events (nTrain_j...with j=Signal,Background.." <<
Endl;
1504 <<
" ..... Testing events are not renormalised nor included in the renormalisation factor! )"<<
Endl;
1510 renormFactor.at(cls) = ((
Float_t)trainingSizePerClass.at(cls) )/
1511 (trainingSumWeightsPerClass.at(cls)) ;
1514 else if (normMode ==
"EQUALNUMEVENTS") {
1520 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"Weight renormalisation mode: \"EqualNumEvents\": renormalises all event classes ..." <<
Endl;
1521 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
" such that the effective (weighted) number of events in each class is the same " <<
Endl;
1522 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
" (and equals the number of events (entries) given for class=0 )" <<
Endl;
1523 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"... i.e. such that Sum[i=1..N_j]{w_i} = N_classA, j=classA, classB, ..." <<
Endl;
1524 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
"... (note that N_j is the sum of TRAINING events" <<
Endl;
1525 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) <<
" ..... Testing events are not renormalised nor included in the renormalisation factor!)" <<
Endl;
1528 UInt_t referenceClass = 0;
1530 renormFactor.at(cls) =
Float_t(trainingSizePerClass.at(referenceClass))/
1531 (trainingSumWeightsPerClass.at(cls));
1535 Log() << kFATAL <<
Form(
"Dataset[%s] : ",dsi.
GetName())<<
"<PrepareForTrainingAndTesting> Unknown NormMode: " << normMode <<
Endl;
1543 <<
"--> Rescale " << setiosflags(ios::left) << std::setw(maxL)
1545 for (EventVector::iterator it = tmpEventVector[
Types::kTraining].at(cls).begin(),
1546 itEnd = tmpEventVector[
Types::kTraining].at(cls).end(); it != itEnd; ++it){
1547 (*it)->SetWeight ((*it)->GetWeight() * renormFactor.at(cls));
1558 <<
"Number of training and testing events" <<
Endl;
1559 Log() << kDEBUG <<
"\tafter rescaling:" <<
Endl;
1561 <<
"---------------------------------------------------------------------------" <<
Endl;
1563 trainingSumSignalWeights = 0;
1564 trainingSumBackgrWeights = 0;
1565 testingSumSignalWeights = 0;
1566 testingSumBackgrWeights = 0;
1569 trainingSumWeightsPerClass.at(cls) =
1574 testingSumWeightsPerClass.at(cls) =
1580 trainingSumSignalWeights += trainingSumWeightsPerClass.at(cls);
1581 testingSumSignalWeights += testingSumWeightsPerClass.at(cls);
1583 trainingSumBackgrWeights += trainingSumWeightsPerClass.at(cls);
1584 testingSumBackgrWeights += testingSumWeightsPerClass.at(cls);
1590 << setiosflags(ios::left) << std::setw(maxL)
1592 <<
"training events : " << trainingSizePerClass.at(cls) <<
Endl;
1593 Log() << kDEBUG <<
"\t(sum of weights: " << trainingSumWeightsPerClass.at(cls) <<
")" 1594 <<
" - requested were " << eventCounts[cls].nTrainingEventsRequested <<
" events" <<
Endl;
1596 << setiosflags(ios::left) << std::setw(maxL)
1598 <<
"testing events : " << testingSizePerClass.at(cls) <<
Endl;
1599 Log() << kDEBUG <<
"\t(sum of weights: " << testingSumWeightsPerClass.at(cls) <<
")" 1600 <<
" - requested were " << eventCounts[cls].nTestingEventsRequested <<
" events" <<
Endl;
1602 << setiosflags(ios::left) << std::setw(maxL)
1604 <<
"training and testing events: " 1605 << (trainingSizePerClass.at(cls)+testingSizePerClass.at(cls)) << Endl;
1606 Log() << kDEBUG <<
"\t(sum of weights: " 1607 << (trainingSumWeightsPerClass.at(cls)+testingSumWeightsPerClass.at(cls)) <<
")" << Endl;
1608 if(eventCounts[cls].nEvAfterCut<eventCounts[cls].nEvBeforeCut) {
1609 Log() << kINFO <<
Form(
"Dataset[%s] : ",dsi.
GetName()) << setiosflags(ios::left) << std::setw(maxL)
1611 <<
"due to the preselection a scaling factor has been applied to the numbers of requested events: " 1612 << eventCounts[cls].cutScaling() <<
Endl;
virtual const char * GetName() const
Returns name of object.
A TLeaf describes individual elements of a TBranch See TBranch structure in TTree.
UInt_t GetNVariables() const
std::vector< EventVector > EventVectorOfClasses
void SetTrainingSumBackgrWeights(Double_t trainingSumBackgrWeights)
MsgLogger & Endl(MsgLogger &ml)
const TString & GetInternalName() const
std::vector< VariableInfo > & GetSpectatorInfos()
std::vector< TTreeFormula * > fInputFormulas
std::vector< TTreeFormula * > fCutFormulas
std::vector< Double_t > ValuePerClass
UInt_t GetNVariables() const
access the number of variables through the datasetinfo
void SetTrainingSumSignalWeights(Double_t trainingSumSignalWeights)
void SetTestingSumBackgrWeights(Double_t testingSumBackgrWeights)
void BuildEventVector(DataSetInfo &dsi, DataInputHandler &dataInput, EventVectorOfClassesOfTreeType &eventsmap, EvtStatsPerClass &eventCounts)
build empty event vectors distributes events between kTraining/kTesting/kMaxTreeType ...
UInt_t GetNClasses() const
void CalcMinMax(DataSet *, DataSetInfo &dsi)
compute covariance matrix
const TString & GetExpression() const
std::vector< int > NumberPerClass
std::map< Types::ETreeType, EventVectorOfClasses > EventVectorOfClassesOfTreeType
void InitOptions(DataSetInfo &dsi, EvtStatsPerClass &eventsmap, TString &normMode, UInt_t &splitSeed, TString &splitMode, TString &mixMode)
the dataset splitting
DataSet * BuildDynamicDataSet(DataSetInfo &)
static TString Format(const char *fmt,...)
Static method which formats a string using a printf style format descriptor and return a TString...
UInt_t GetNSpectators() const
access the number of targets through the datasetinfo
MsgLogger & Log() const
message logger
std::vector< TTreeFormula * > fWeightFormula
void SetTestingSumSignalWeights(Double_t testingSumSignalWeights)
void PrintCorrelationMatrix(const TString &className)
calculates the correlation matrices for signal and background, prints them to standard output...
void SetMinType(EMsgType minType)
Int_t Finite(Double_t x)
Check if it is finite with a mask in order to be consistent in presence of fast math.
void RenormEvents(DataSetInfo &dsi, EventVectorOfClassesOfTreeType &eventsmap, const EvtStatsPerClass &eventCounts, const TString &normMode)
renormalisation of the TRAINING event weights
Class that contains all the data information.
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not...
Long64_t GetNTrainingEvents() const
void ChangeToNewTree(TreeInfo &, const DataSetInfo &)
While the data gets copied into the local training and testing trees, the input tree can change (for ...
TMatrixT< Double_t > TMatrixD
void SetCorrelationMatrix(const TString &className, TMatrixD *matrix)
Class that contains all the data information.
DataSetFactory()
constructor
TMatrixD * CalcCorrelationMatrix(DataSet *, const UInt_t classNumber)
computes correlation matrix for variables "theVars" in tree; "theType" defines the required event "ty...
Float_t GetTarget(UInt_t itgt) const
Int_t LargestCommonDivider(Int_t a, Int_t b)
UInt_t GetNTargets() const
Bool_t fScaleWithPreselEff
Types::ETreeType GetTreeType() const
ClassInfo * GetClassInfo(Int_t clNum) const
std::vector< TTreeFormula * > fSpectatorFormulas
DataSet * CreateDataSet(DataSetInfo &, DataInputHandler &)
steering the creation of a new dataset
VariableInfo & GetTargetInfo(Int_t i)
char * Form(const char *fmt,...)
UInt_t GetNSpectators(bool all=kTRUE) const
UInt_t GetSignalClassIndex()
std::vector< TTreeFormula * > fTargetFormulas
std::vector< Event *> EventVector
constexpr Double_t E()
Base of natural log: .
Long64_t GetNTestEvents() const
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
DataSet * BuildInitialDataSet(DataSetInfo &, TMVA::DataInputHandler &)
if no entries, than create a DataSet with one Event which uses dynamic variables (pointers to variabl...
~DataSetFactory()
destructor
const TString & GetClassName() const
VariableInfo & GetSpectatorInfo(Int_t i)
void SetEventCollection(std::vector< Event *> *, Types::ETreeType, Bool_t deleteEvents=true)
Sets the event collection (by DataSetFactory)
VariableInfo & GetVariableInfo(Int_t i)
ClassInfo * AddClass(const TString &className)
Int_t GetClassNameMaxLength() const
virtual const char * GetName() const
Returns name of object.
Long64_t GetNClassEvents(Int_t type, UInt_t classNumber)
ostringstream derivative to redirect and format output
void SetConfigName(const char *n)
virtual const char * GetTitle() const
Returns title of object.
Abstract ClassifierFactory template that handles arbitrary types.
const TCut & GetCut() const
std::vector< EventStats > EvtStatsPerClass
const TString & GetSplitOptions() const
Short_t Max(Short_t a, Short_t b)
Double_t GetOriginalWeight() const
Bool_t CheckTTreeFormula(TTreeFormula *ttf, const TString &expression, Bool_t &hasDollar)
checks a TTreeFormula for problems
void SetNumber(const UInt_t index)
you should not use this method at all Int_t Int_t Double_t Double_t Double_t Int_t Double_t Double_t Double_t Double_t b
Long64_t GetNEvents(Types::ETreeType type=Types::kMaxTreeType) const
const TString & GetWeight() const
DataSet * MixEvents(DataSetInfo &dsi, EventVectorOfClassesOfTreeType &eventsmap, EvtStatsPerClass &eventCounts, const TString &splitMode, const TString &mixMode, const TString &normMode, UInt_t splitSeed)
Select and distribute unassigned events to kTraining and kTesting.
TBranch * GetBranch() const
virtual const char * GetName() const
Returns name of object.
virtual Bool_t IsOnTerminalBranch() const
Double_t GetWeight() const
UInt_t GetNTargets() const
access the number of targets through the datasetinfo
Float_t GetSpectator(UInt_t ivar) const
return spectator content
void SetNormalization(const TString &norm)
TMatrixD * CalcCovarianceMatrix(DataSet *, const UInt_t classNumber)
compute covariance matrix
const Event * GetEvent() const
std::vector< VariableInfo > & GetVariableInfos()
virtual const char * GetTitle() const
Returns title of object.