ROOT logo
// @(#)root/tmva $Id: MethodRuleFit.cxx 29195 2009-06-24 10:39:49Z brun $
// Author: Fredrik Tegenfeldt

/**********************************************************************************
 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis       *
 * Package: TMVA                                                                  *
 * Class  : MethodRuleFit                                                         *
 * Web    : http://tmva.sourceforge.net                                           *
 *                                                                                *
 * Description:                                                                   *
 *      Implementation (see header file for description)                          *
 *                                                                                *
 * Authors (alphabetical):                                                        *
 *      Fredrik Tegenfeldt <Fredrik.Tegenfeldt@cern.ch>  - Iowa State U., USA     *
 *                                                                                *
 * Copyright (c) 2005:                                                            *
 *      CERN, Switzerland                                                         * 
 *      Iowa State U.                                                             *
 *      MPI-K Heidelberg, Germany                                                 * 
 *                                                                                *
 * Redistribution and use in source and binary forms, with or without             *
 * modification, are permitted according to the terms listed in LICENSE           *
 * (http://tmva.sourceforge.net/LICENSE)                                          *
 **********************************************************************************/

//_______________________________________________________________________
//
// J Friedman's RuleFit method
//_______________________________________________________________________

#include <algorithm>
#include <list>

#include "Riostream.h"
#include "TRandom3.h"
#include "TMath.h"
#include "TMatrix.h"
#include "TDirectory.h"

#include "TMVA/ClassifierFactory.h"
#include "TMVA/GiniIndex.h"
#include "TMVA/CrossEntropy.h"
#include "TMVA/SdivSqrtSplusB.h"
#include "TMVA/SeparationBase.h"
#include "TMVA/MisClassificationError.h"
#include "TMVA/MethodRuleFit.h"
#include "TMVA/RuleFitAPI.h"
#include "TMVA/Tools.h"
#include "TMVA/Timer.h"
#include "TMVA/Ranking.h"
#include "TMVA/Config.h"
#include "TMVA/MsgLogger.h"

REGISTER_METHOD(RuleFit)

ClassImp(TMVA::MethodRuleFit)
 
//_______________________________________________________________________
TMVA::MethodRuleFit::MethodRuleFit( const TString& jobName,
                                    const TString& methodTitle,
                                    DataSetInfo& theData, 
                                    const TString& theOption,
                                    TDirectory* theTargetDir ) :
   MethodBase( jobName, Types::kRuleFit, methodTitle, theData, theOption, theTargetDir )
{
   // standard constructor
}

//_______________________________________________________________________
TMVA::MethodRuleFit::MethodRuleFit( DataSetInfo& theData,
                                    const TString& theWeightFile,
                                    TDirectory* theTargetDir ) :
   MethodBase( Types::kRuleFit, theData, theWeightFile, theTargetDir )
{
   // constructor from weight file
}

//_______________________________________________________________________
TMVA::MethodRuleFit::~MethodRuleFit( void )
{
   // destructor
   for (UInt_t i=0; i<fEventSample.size(); i++) delete fEventSample[i];
   for (UInt_t i=0; i<fForest.size(); i++)      delete fForest[i];
}

//_______________________________________________________________________
Bool_t TMVA::MethodRuleFit::HasAnalysisType( Types::EAnalysisType type, UInt_t numberClasses, UInt_t /*numberTargets*/ )
{
   // RuleFit can handle classification with 2 classes 
   if (type == Types::kClassification && numberClasses == 2) return kTRUE;
   return kFALSE;
}

//_______________________________________________________________________
void TMVA::MethodRuleFit::DeclareOptions() 
{
   // define the options (their key words) that can be set in the option string 
   // know options.
   //---------
   // general
   //---------
   // RuleFitModule  <string>     
   //    available values are:    RFTMVA      - use TMVA implementation
   //                             RFFriedman  - use Friedmans original implementation
   //----------------------
   // Path search (fitting)
   //----------------------
   // GDTau          <float>      gradient-directed path: fit threshhold, default
   // GDTauPrec      <float>      gradient-directed path: precision of estimated tau
   // GDStep         <float>      gradient-directed path: step size       
   // GDNSteps       <float>      gradient-directed path: number of steps 
   // GDErrScale     <float>      stop scan when error>scale*errmin       
   //-----------------
   // Tree generation
   //-----------------
   // fEventsMin     <float>      minimum fraction of events in a splittable node
   // fEventsMax     <float>      maximum fraction of events in a splittable node
   // nTrees         <float>      number of trees in forest.
   // ForestType     <string>
   //    available values are:    Random    - create forest using random subsample
   //                             AdaBoost  - create forest with boosted events
   //
   //-----------------
   // Model creation
   //-----------------
   // RuleMinDist    <float>      min distance allowed between rules
   // MinImp         <float>      minimum rule importance accepted        
   // Model          <string>     model to be used
   //    available values are:    ModRuleLinear <default>
   //                             ModRule
   //                             ModLinear
   //
   //-----------------
   // Friedmans module
   //-----------------
   // RFWorkDir      <string>     directory where Friedmans module (rf_go.exe) is installed
   // RFNrules       <int>        maximum number of rules allowed
   // RFNendnodes    <int>        average number of end nodes in the forest of trees
   //
   DeclareOptionRef(fGDTau=-1,             "GDTau",          "Gradient-directed (GD) path: default fit cut-off");
   DeclareOptionRef(fGDTauPrec=0.01,       "GDTauPrec",      "GD path: precision of tau");
   DeclareOptionRef(fGDPathStep=0.01,      "GDStep",         "GD path: step size");
   DeclareOptionRef(fGDNPathSteps=10000,   "GDNSteps",       "GD path: number of steps");
   DeclareOptionRef(fGDErrScale=1.1,       "GDErrScale",     "Stop scan when error > scale*errmin");
   DeclareOptionRef(fLinQuantile,           "LinQuantile",  "Quantile of linear terms (removes outliers)");
   DeclareOptionRef(fGDPathEveFrac=0.5,    "GDPathEveFrac",  "Fraction of events used for the path search");
   DeclareOptionRef(fGDValidEveFrac=0.5,   "GDValidEveFrac", "Fraction of events used for the validation");
   // tree options
   DeclareOptionRef(fMinFracNEve=0.1,      "fEventsMin",     "Minimum fraction of events in a splittable node");
   DeclareOptionRef(fMaxFracNEve=0.9,      "fEventsMax",     "Maximum fraction of events in a splittable node");
   DeclareOptionRef(fNTrees=20,            "nTrees",         "Number of trees in forest.");
   
   DeclareOptionRef(fForestTypeS="AdaBoost",  "ForestType",   "Method to use for forest generation");
   AddPreDefVal(TString("AdaBoost"));
   AddPreDefVal(TString("Random"));
   // rule cleanup options
   DeclareOptionRef(fRuleMinDist=0.001,    "RuleMinDist",    "Minimum distance between rules");
   DeclareOptionRef(fMinimp=0.01,          "MinImp",         "Minimum rule importance accepted");
   // rule model option
   DeclareOptionRef(fModelTypeS="ModRuleLinear", "Model",    "Model to be used");
   AddPreDefVal(TString("ModRule"));
   AddPreDefVal(TString("ModRuleLinear"));
   AddPreDefVal(TString("ModLinear"));
   DeclareOptionRef(fRuleFitModuleS="RFTMVA",  "RuleFitModule","Which RuleFit module to use");
   AddPreDefVal(TString("RFTMVA"));
   AddPreDefVal(TString("RFFriedman"));

   DeclareOptionRef(fRFWorkDir="./rulefit", "RFWorkDir",    "Friedman\'s RuleFit module (RFF): working dir");
   DeclareOptionRef(fRFNrules=2000,         "RFNrules",     "RFF: Mximum number of rules");
   DeclareOptionRef(fRFNendnodes=4,         "RFNendnodes",  "RFF: Average number of end nodes");
}

//_______________________________________________________________________
void TMVA::MethodRuleFit::ProcessOptions() 
{
   // process the options specified by the user   

   if (IgnoreEventsWithNegWeightsInTraining()) {
      Log() << kFATAL << "Mechanism to ignore events with negative weights in training not yet available for method: "
            << GetMethodTypeName() 
            << " --> please remove \"IgnoreNegWeightsInTraining\" option from booking string."
            << Endl;
   }

   fRuleFitModuleS.ToLower();
   if      (fRuleFitModuleS == "rftmva")     fUseRuleFitJF = kFALSE;
   else if (fRuleFitModuleS == "rffriedman") fUseRuleFitJF = kTRUE;
   else                                      fUseRuleFitJF = kTRUE;

   fSepTypeS.ToLower();
   if      (fSepTypeS == "misclassificationerror") fSepType = new MisClassificationError();
   else if (fSepTypeS == "giniindex")              fSepType = new GiniIndex();
   else if (fSepTypeS == "crossentropy")           fSepType = new CrossEntropy();
   else                                            fSepType = new SdivSqrtSplusB();

   fModelTypeS.ToLower();
   if      (fModelTypeS == "modlinear" ) fRuleFit.SetModelLinear();
   else if (fModelTypeS == "modrule" )   fRuleFit.SetModelRules();
   else                                  fRuleFit.SetModelFull();

   fPruneMethodS.ToLower();
   if      (fPruneMethodS == "expectederror" )   fPruneMethod  = DecisionTree::kExpectedErrorPruning;
   else if (fPruneMethodS == "costcomplexity" )  fPruneMethod  = DecisionTree::kCostComplexityPruning;
   else                                          fPruneMethod  = DecisionTree::kNoPruning;

   fForestTypeS.ToLower();
   if      (fForestTypeS == "random" )   fUseBoost = kFALSE;
   else if (fForestTypeS == "adaboost" ) fUseBoost = kTRUE;
   else                                  fUseBoost = kTRUE;
   //
   // if creating the forest by boosting the events
   // the full training sample is used per tree
   // -> only true for the TMVA version of RuleFit.
   if (fUseBoost && (!fUseRuleFitJF)) fTreeEveFrac = 1.0;

   // check event fraction for tree generation
   // if <0 set to automatic number
   if (fTreeEveFrac<=0) {
      Int_t nevents = Data()->GetNTrainingEvents();
      Double_t n = static_cast<Double_t>(nevents);
      fTreeEveFrac = min( 0.5, (100.0 +6.0*sqrt(n))/n);
   }
   // verify ranges of options
   VerifyRange(Log(), "nTrees",        fNTrees,0,100000,20);
   VerifyRange(Log(), "MinImp",        fMinimp,0.0,1.0,0.0);
   VerifyRange(Log(), "GDTauPrec",     fGDTauPrec,1e-5,5e-1);
   VerifyRange(Log(), "GDTauMin",      fGDTauMin,0.0,1.0);
   VerifyRange(Log(), "GDTauMax",      fGDTauMax,fGDTauMin,1.0);
   VerifyRange(Log(), "GDPathStep",    fGDPathStep,0.0,100.0,0.01);
   VerifyRange(Log(), "GDErrScale",    fGDErrScale,1.0,100.0,1.1);
   VerifyRange(Log(), "GDPathEveFrac", fGDPathEveFrac,0.01,0.9,0.5);
   VerifyRange(Log(), "GDValidEveFrac",fGDValidEveFrac,0.01,1.0-fGDPathEveFrac,1.0-fGDPathEveFrac);
   VerifyRange(Log(), "fEventsMin",    fMinFracNEve,0.0,1.0);
   VerifyRange(Log(), "fEventsMax",    fMaxFracNEve,fMinFracNEve,1.0);

   fRuleFit.GetRuleEnsemblePtr()->SetLinQuantile(fLinQuantile);
   fRuleFit.GetRuleFitParamsPtr()->SetGDTauRange(fGDTauMin,fGDTauMax);
   fRuleFit.GetRuleFitParamsPtr()->SetGDTau(fGDTau);
   fRuleFit.GetRuleFitParamsPtr()->SetGDTauPrec(fGDTauPrec);
   fRuleFit.GetRuleFitParamsPtr()->SetGDTauScan(fGDTauScan);
   fRuleFit.GetRuleFitParamsPtr()->SetGDPathStep(fGDPathStep);
   fRuleFit.GetRuleFitParamsPtr()->SetGDNPathSteps(fGDNPathSteps);
   fRuleFit.GetRuleFitParamsPtr()->SetGDErrScale(fGDErrScale);
   fRuleFit.SetImportanceCut(fMinimp);
   fRuleFit.SetRuleMinDist(fRuleMinDist);


   // check if Friedmans module is used.
   // print a message concerning the options.
   if (fUseRuleFitJF) {
      Log() << kINFO << "" << Endl;
      Log() << kINFO << "--------------------------------------" <<Endl;
      Log() << kINFO << "Friedmans RuleFit module is selected." << Endl;
      Log() << kINFO << "Only the following options are used:" << Endl;
      Log() << kINFO <<  Endl;
      Log() << kINFO << gTools().Color("bold") << "   Model"        << gTools().Color("reset") << Endl;
      Log() << kINFO << gTools().Color("bold") << "   RFWorkDir"    << gTools().Color("reset") << Endl;
      Log() << kINFO << gTools().Color("bold") << "   RFNrules"     << gTools().Color("reset") << Endl;
      Log() << kINFO << gTools().Color("bold") << "   RFNendnodes"  << gTools().Color("reset") << Endl;
      Log() << kINFO << gTools().Color("bold") << "   GDNPathSteps" << gTools().Color("reset") << Endl;
      Log() << kINFO << gTools().Color("bold") << "   GDPathStep"   << gTools().Color("reset") << Endl;
      Log() << kINFO << gTools().Color("bold") << "   GDErrScale"   << gTools().Color("reset") << Endl;
      Log() << kINFO << "--------------------------------------" <<Endl;
      Log() << kINFO << Endl;
   }

   // Select what weight to use in the 'importance' rule visualisation plots.
   // Note that if UseCoefficientsVisHists() is selected, the following weight is used:
   //    w = rule coefficient * rule support
   // The support is a positive number which is 0 if no events are accepted by the rule.
   // Normally the importance gives more useful information.
   //
   //fRuleFit.UseCoefficientsVisHists();
   fRuleFit.UseImportanceVisHists();

   fRuleFit.SetMsgType( Log().GetMinType() );

   if (HasTrainingTree()) InitEventSample();

   InitMonitorNtuple();

}

//_______________________________________________________________________
void TMVA::MethodRuleFit::InitMonitorNtuple()
{
   // initialize the monitoring ntuple
   BaseDir()->cd();
   fMonitorNtuple= new TTree("MonitorNtuple_RuleFit","RuleFit variables");
   fMonitorNtuple->Branch("importance",&fNTImportance,"importance/D");
   fMonitorNtuple->Branch("support",&fNTSupport,"support/D");
   fMonitorNtuple->Branch("coefficient",&fNTCoefficient,"coefficient/D");
   fMonitorNtuple->Branch("ncuts",&fNTNcuts,"ncuts/I");
   fMonitorNtuple->Branch("nvars",&fNTNvars,"nvars/I");
   fMonitorNtuple->Branch("type",&fNTType,"type/I");
   fMonitorNtuple->Branch("ptag",&fNTPtag,"ptag/D");
   fMonitorNtuple->Branch("pss",&fNTPss,"pss/D");
   fMonitorNtuple->Branch("psb",&fNTPsb,"psb/D");
   fMonitorNtuple->Branch("pbs",&fNTPbs,"pbs/D");
   fMonitorNtuple->Branch("pbb",&fNTPbb,"pbb/D");
   fMonitorNtuple->Branch("soversb",&fNTSSB,"soversb/D");
}

//_______________________________________________________________________
void TMVA::MethodRuleFit::Init()
{
   // default initialization

   // the minimum requirement to declare an event signal-like
   SetSignalReferenceCut( 0.0 );

   // set variables that used to be options
   // any modifications are then made in ProcessOptions()
   fLinQuantile   = 0.025;       // Quantile of linear terms (remove outliers)
   fTreeEveFrac   = -1.0;        // Fraction of events used to train each tree
   fNCuts         = 20;          // Number of steps during node cut optimisation
   fSepTypeS      = "GiniIndex"; // Separation criterion for node splitting; see BDT
   fPruneMethodS  = "NONE";      // Pruning method; see BDT
   fPruneStrength = 3.5;         // Pruning strength; see BDT
   fGDTauMin      = 0.0;         // Gradient-directed path: min fit threshold (tau)
   fGDTauMax      = 1.0;         // Gradient-directed path: max fit threshold (tau)
   fGDTauScan     = 1000;        // Gradient-directed path: number of points scanning for best tau

}

//_______________________________________________________________________
void TMVA::MethodRuleFit::InitEventSample( void )
{
   // write all Events from the Tree into a vector of Events, that are
   // more easily manipulated.
   // This method should never be called without existing trainingTree, as it
   // the vector of events from the ROOT training tree
   if (Data()->GetNEvents()==0) Log() << kFATAL << "<Init> Data().TrainingTree() is zero pointer" << Endl;

   Int_t nevents = Data()->GetNEvents();
   for (Int_t ievt=0; ievt<nevents; ievt++){
      const Event * ev = GetEvent(ievt);
      fEventSample.push_back( new Event(*ev));
   }
   if (fTreeEveFrac<=0) {
      Double_t n = static_cast<Double_t>(nevents);
      fTreeEveFrac = min( 0.5, (100.0 +6.0*sqrt(n))/n);
   }
   if (fTreeEveFrac>1.0) fTreeEveFrac=1.0;
   //
   std::random_shuffle(fEventSample.begin(), fEventSample.end());
   //
   Log() << kDEBUG << "Set sub-sample fraction to " << fTreeEveFrac << Endl;
}

//_______________________________________________________________________
void TMVA::MethodRuleFit::Train( void )
{
   // training of rules

   // fill the STL Vector with the event sample
   this->InitEventSample();

   if (fUseRuleFitJF) {
      TrainJFRuleFit();
   } 
   else {
      TrainTMVARuleFit();
   }
   fRuleFit.GetRuleEnsemblePtr()->ClearRuleMap();
}

//_______________________________________________________________________
void TMVA::MethodRuleFit::TrainTMVARuleFit( void )
{
   // training of rules using TMVA implementation

   if (IsNormalised()) Log() << kFATAL << "\"Normalise\" option cannot be used with RuleFit; " 
                               << "please remove the optoin from the configuration string, or "
                               << "use \"!Normalise\""
                               << Endl;

   // timer
   Timer timer( 1, GetName() );

   // test tree nmin cut -> for debug purposes
   // the routine will generate trees with stopping cut on N(eve) given by
   // a fraction between [20,N(eve)-1].
   // 
   //   MakeForestRnd();
   //   exit(1);
   //

   // Init RuleFit object and create rule ensemble
   // + make forest & rules
   fRuleFit.Initialize( this );

   // Make forest of decision trees
   //   if (fRuleFit.GetRuleEnsemble().DoRules()) fRuleFit.MakeForest();

   // Fit the rules
   Log() << kDEBUG << "Fitting rule coefficients ..." << Endl;
   fRuleFit.FitCoefficients();

   // Calculate importance
   Log() << kDEBUG << "Computing rule and variable importance" << Endl;
   fRuleFit.CalcImportance();

   // Output results and fill monitor ntuple
   fRuleFit.GetRuleEnsemblePtr()->Print();
   //
   Log() << kDEBUG << "Filling rule ntuple" << Endl;
   UInt_t nrules = fRuleFit.GetRuleEnsemble().GetRulesConst().size();
   const Rule *rule;
   for (UInt_t i=0; i<nrules; i++ ) {
      rule            = fRuleFit.GetRuleEnsemble().GetRulesConst(i);
      fNTImportance   = rule->GetRelImportance();
      fNTSupport      = rule->GetSupport();
      fNTCoefficient  = rule->GetCoefficient();
      fNTType         = (rule->IsSignalRule() ? 1:-1 );
      fNTNvars        = rule->GetRuleCut()->GetNvars();
      fNTNcuts        = rule->GetRuleCut()->GetNcuts();
      fNTPtag         = fRuleFit.GetRuleEnsemble().GetRulePTag(i); // should be identical with support
      fNTPss          = fRuleFit.GetRuleEnsemble().GetRulePSS(i);
      fNTPsb          = fRuleFit.GetRuleEnsemble().GetRulePSB(i);
      fNTPbs          = fRuleFit.GetRuleEnsemble().GetRulePBS(i);
      fNTPbb          = fRuleFit.GetRuleEnsemble().GetRulePBB(i);
      fNTSSB          = rule->GetSSB();
      fMonitorNtuple->Fill();
   }
   Log() << kDEBUG << "Training done" << Endl;

   fRuleFit.MakeVisHists();

   fRuleFit.MakeDebugHists();
}

//_______________________________________________________________________
void TMVA::MethodRuleFit::TrainJFRuleFit( void )
{
   // training of rules using Jerome Friedmans implementation

   fRuleFit.InitPtrs( this );
   fRuleFit.SetTrainingEvents( GetTrainingEvents() );

   RuleFitAPI *rfAPI = new RuleFitAPI( this, &fRuleFit, Log().GetMinType() );

   rfAPI->WelcomeMessage();

   // timer
   Timer timer( 1, GetName() );

   Log() << kINFO << "Training ..." << Endl;
   rfAPI->TrainRuleFit();

   Log() << kDEBUG << "reading model summary from rf_go.exe output" << Endl;
   rfAPI->ReadModelSum();

   //   fRuleFit.GetRuleEnsemblePtr()->MakeRuleMap();

   Log() << kDEBUG << "calculating rule and variable importance" << Endl;
   fRuleFit.CalcImportance();

   // Output results and fill monitor ntuple
   fRuleFit.GetRuleEnsemblePtr()->Print();
   //
   fRuleFit.MakeVisHists();

   delete rfAPI;

   Log() << kDEBUG << "done training" << Endl;
}

//_______________________________________________________________________
const TMVA::Ranking* TMVA::MethodRuleFit::CreateRanking() 
{
   // computes ranking of input variables

   // create the ranking object
   fRanking = new Ranking( GetName(), "Importance" );

   for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
      fRanking->AddRank( Rank( GetInputLabel(ivar), fRuleFit.GetRuleEnsemble().GetVarImportance(ivar) ) );
   }

   return fRanking;
}

//_______________________________________________________________________
void TMVA::MethodRuleFit::WriteWeightsToStream( ostream & o ) const
{  
   // write the rules to an ostream
   fRuleFit.GetRuleEnsemble().PrintRaw( o );
}

//_______________________________________________________________________
void TMVA::MethodRuleFit::AddWeightsXMLTo( void* parent ) const 
{
   // add the rules to XML node
   fRuleFit.GetRuleEnsemble().AddXMLTo( parent );
}

//_______________________________________________________________________
void TMVA::MethodRuleFit::ReadWeightsFromStream( istream & istr )
{
   // read rules from an istream

   fRuleFit.GetRuleEnsemblePtr()->ReadRaw( istr );
}

//_______________________________________________________________________
void TMVA::MethodRuleFit::ReadWeightsFromXML( void* wghtnode )
{
   // read rules from XML node
   fRuleFit.GetRuleEnsemblePtr()->ReadFromXML( wghtnode );
}

//_______________________________________________________________________
Double_t TMVA::MethodRuleFit::GetMvaValue( Double_t* err )
{
   // returns MVA value for given event

   // cannot determine error
   if (err != 0) *err = -1;

   return fRuleFit.EvalEvent( *GetEvent() );
}

//_______________________________________________________________________
void  TMVA::MethodRuleFit::WriteMonitoringHistosToFile( void ) const
{
   // write special monitoring histograms to file (here ntuple)
   BaseDir()->cd();
   Log() << kINFO << "Write monitoring ntuple to file: " << BaseDir()->GetPath() << Endl;
   fMonitorNtuple->Write();
}

//_______________________________________________________________________
void TMVA::MethodRuleFit::MakeClassSpecific( std::ostream& fout, const TString& className ) const
{
   // write specific classifier response
   fout << "   // not implemented for class: \"" << className << "\"" << std::endl;
   fout << "};" << std::endl;
   fout << "void   " << className << "::Initialize(){}" << std::endl;
   fout << "void   " << className << "::Clear(){}" << std::endl;
   fout << "double " << className << "::GetMvaValue__( const std::vector<double>& inputValues ) const {" << std::endl;
   fout << "   double rval=" << setprecision(10) << fRuleFit.GetRuleEnsemble().GetOffset() << ";" << std::endl;
   MakeClassRuleCuts(fout);
   MakeClassLinear(fout);
   fout << "   return rval;" << std::endl;
   fout << "}" << std::endl;

}

//_______________________________________________________________________
void TMVA::MethodRuleFit::MakeClassRuleCuts( std::ostream& fout ) const
{
   // print out the rule cuts
   if (!fRuleFit.GetRuleEnsemble().DoRules()) {
      fout << "   //" << std::endl;
      fout << "   // ==> MODEL CONTAINS NO RULES <==" << std::endl;
      fout << "   //" << std::endl;
      return;
   }
   const RuleEnsemble *rens = &(fRuleFit.GetRuleEnsemble());
   const std::vector< Rule* > *rules = &(rens->GetRulesConst());
   const RuleCut *ruleCut;
   //
   std::list< std::pair<Double_t,Int_t> > sortedRules;
   for (UInt_t ir=0; ir<rules->size(); ir++) {
      sortedRules.push_back( std::pair<Double_t,Int_t>( (*rules)[ir]->GetImportance()/rens->GetImportanceRef(),ir ) );
   }
   sortedRules.sort();
   //
   fout << "   //" << std::endl;
   fout << "   // here follows all rules ordered in importance (most important first)" << std::endl;
   fout << "   // at the end of each line, the relative importance of the rule is given" << std::endl;
   fout << "   //" << std::endl;
   //
   for ( std::list< std::pair<double,int> >::reverse_iterator itpair = sortedRules.rbegin();
         itpair != sortedRules.rend(); itpair++ ) {
      UInt_t ir     = itpair->second;
      Double_t impr = itpair->first;
      ruleCut = (*rules)[ir]->GetRuleCut();
      if (impr<rens->GetImportanceCut()) fout << "   //" << std::endl;
      fout << "   if (" << std::flush;
      for (UInt_t ic=0; ic<ruleCut->GetNvars(); ic++) {
         Double_t sel    = ruleCut->GetSelector(ic);
         Double_t valmin = ruleCut->GetCutMin(ic);
         Double_t valmax = ruleCut->GetCutMax(ic);
         Bool_t   domin  = ruleCut->GetCutDoMin(ic);
         Bool_t   domax  = ruleCut->GetCutDoMax(ic);
         //
         if (ic>0) fout << "&&" << std::flush;
         if (domin) {
            fout << "(" << setprecision(10) << valmin << std::flush;
            fout << "<inputValues[" << sel << "])" << std::flush;
         }
         if (domax) {
            if (domin) fout << "&&" << std::flush;
            fout << "(inputValues[" << sel << "]" << std::flush;
            fout << "<" << setprecision(10) << valmax << ")" <<std::flush;
         }
      }
      fout << ") rval+=" << setprecision(10) << (*rules)[ir]->GetCoefficient() << ";" << std::flush;
      fout << "   // importance = " << Form("%3.3f",impr) << std::endl;
   }
}

//_______________________________________________________________________
void TMVA::MethodRuleFit::MakeClassLinear( std::ostream& fout ) const
{
   // print out the linear terms
   if (!fRuleFit.GetRuleEnsemble().DoLinear()) {
      fout << "   //" << std::endl;
      fout << "   // ==> MODEL CONTAINS NO LINEAR TERMS <==" << std::endl;
      fout << "   //" << std::endl;
      return;
   }
   fout << "   //" << std::endl;
   fout << "   // here follows all linear terms" << std::endl;
   fout << "   // at the end of each line, the relative importance of the term is given" << std::endl;
   fout << "   //" << std::endl;
   const RuleEnsemble *rens = &(fRuleFit.GetRuleEnsemble());
   UInt_t nlin = rens->GetNLinear();
   for (UInt_t il=0; il<nlin; il++) {
      if (rens->IsLinTermOK(il)) {
         Double_t norm = rens->GetLinNorm(il);
         Double_t imp  = rens->GetLinImportance(il)/rens->GetImportanceRef();
         fout << "   rval+="
   //           << setprecision(10) << rens->GetLinCoefficients(il)*norm << "*std::min(" << setprecision(10) << rens->GetLinDP(il)
   //           << ", std::max( inputValues[" << il << "]," << setprecision(10) << rens->GetLinDM(il) << "));"
              << setprecision(10) << rens->GetLinCoefficients(il)*norm 
              << "*std::min( double(" << setprecision(10) << rens->GetLinDP(il)
              << "), std::max( double(inputValues[" << il << "]), double(" << setprecision(10) << rens->GetLinDM(il) << ")));"
              << std::flush;
         fout << "   // importance = " << Form("%3.3f",imp) << std::endl;
      }
   }
}

//_______________________________________________________________________
void TMVA::MethodRuleFit::GetHelpMessage() const
{
   // get help message text
   //
   // typical length of text line: 
   //         "|--------------------------------------------------------------|"
   TString col    = gConfig().WriteOptionsReference() ? "" : gTools().Color("bold");
   TString colres = gConfig().WriteOptionsReference() ? "" : gTools().Color("reset");
   TString brk    = gConfig().WriteOptionsReference() ? "<br>" : "";

   Log() << Endl;
   Log() << col << "--- Short description:" << colres << Endl;
   Log() << Endl;
   Log() << "This method uses a collection of so called rules to create a" << Endl;
   Log() << "discriminating scoring function. Each rule consists of a series" << Endl;
   Log() << "of cuts in parameter space. The ensemble of rules are created" << Endl;
   Log() << "from a forest of decision trees, trained using the training data." << Endl;
   Log() << "Each node (apart from the root) corresponds to one rule." << Endl;
   Log() << "The scoring function is then obtained by linearly combining" << Endl;
   Log() << "the rules. A fitting procedure is applied to find the optimum" << Endl;
   Log() << "set of coefficients. The goal is to find a model with few rules" << Endl;
   Log() << "but with a strong discriminating power." << Endl;
   Log() << Endl;
   Log() << col << "--- Performance optimisation:" << colres << Endl;
   Log() << Endl;
   Log() << "There are two important considerations to make when optimising:" << Endl;
   Log() << Endl;
   Log() << "  1. Topology of the decision tree forest" << brk << Endl;
   Log() << "  2. Fitting of the coefficients" << Endl;
   Log() << Endl;
   Log() << "The maximum complexity of the rules is defined by the size of" << Endl;
   Log() << "the trees. Large trees will yield many complex rules and capture" << Endl;
   Log() << "higher order correlations. On the other hand, small trees will" << Endl;
   Log() << "lead to a smaller ensemble with simple rules, only capable of" << Endl;
   Log() << "modeling simple structures." << Endl;
   Log() << "Several parameters exists for controlling the complexity of the" << Endl;
   Log() << "rule ensemble." << Endl;
   Log() << Endl;
   Log() << "The fitting procedure searches for a minimum using a gradient" << Endl;
   Log() << "directed path. Apart from step size and number of steps, the" << Endl;
   Log() << "evolution of the path is defined by a cut-off parameter, tau." << Endl;
   Log() << "This parameter is unknown and depends on the training data." << Endl;
   Log() << "A large value will tend to give large weights to a few rules." << Endl;
   Log() << "Similarily, a small value will lead to a large set of rules" << Endl;
   Log() << "with similar weights." << Endl;
   Log() << Endl;
   Log() << "A final point is the model used; rules and/or linear terms." << Endl;
   Log() << "For a given training sample, the result may improve by adding" << Endl;
   Log() << "linear terms. If best performance is optained using only linear" << Endl;
   Log() << "terms, it is very likely that the Fisher discriminant would be" << Endl;
   Log() << "a better choice. Ideally the fitting procedure should be able to" << Endl;
   Log() << "make this choice by giving appropriate weights for either terms." << Endl;
   Log() << Endl;
   Log() << col << "--- Performance tuning via configuration options:" << colres << Endl;
   Log() << Endl;
   Log() << "I.  TUNING OF RULE ENSEMBLE:" << Endl;
   Log() << Endl;
   Log() << "   " << col << "ForestType  " << colres
           << ": Recomended is to use the default \"AdaBoost\"." << brk << Endl;
   Log() << "   " << col << "nTrees      " << colres
           << ": More trees leads to more rules but also slow" << Endl;
   Log() << "                 performance. With too few trees the risk is" << Endl;
   Log() << "                 that the rule ensemble becomes too simple." << brk << Endl;
   Log() << "   " << col << "fEventsMin  " << colres << brk << Endl;
   Log() << "   " << col << "fEventsMax  " << colres
           << ": With a lower min, more large trees will be generated" << Endl;
   Log() << "                 leading to more complex rules." << Endl;
   Log() << "                 With a higher max, more small trees will be" << Endl;
   Log() << "                 generated leading to more simple rules." << Endl;
   Log() << "                 By changing this range, the average complexity" << Endl;
   Log() << "                 of the rule ensemble can be controlled." << brk << Endl;
   Log() << "   " << col << "RuleMinDist " << colres
           << ": By increasing the minimum distance between" << Endl;
   Log() << "                 rules, fewer and more diverse rules will remain." << Endl;
   Log() << "                 Initially it is a good idea to keep this small" << Endl;
   Log() << "                 or zero and let the fitting do the selection of" << Endl;
   Log() << "                 rules. In order to reduce the ensemble size," << Endl;
   Log() << "                 the value can then be increased." << Endl;
   Log() << Endl;
   //         "|--------------------------------------------------------------|"
   Log() << "II. TUNING OF THE FITTING:" << Endl;
   Log() << Endl;
   Log() << "   " << col << "GDPathEveFrac " << colres
           << ": fraction of events in path evaluation" << Endl;
   Log() << "                 Increasing this fraction will improve the path" << Endl;
   Log() << "                 finding. However, a too high value will give few" << Endl;
   Log() << "                 unique events available for error estimation." << Endl;
   Log() << "                 It is recomended to usethe default = 0.5." << brk << Endl;
   Log() << "   " << col << "GDTau         " << colres
           << ": cutoff parameter tau" << Endl;
   Log() << "                 By default this value is set to -1.0." << Endl;
   //         "|----------------|---------------------------------------------|"
   Log() << "                 This means that the cut off parameter is" << Endl;
   Log() << "                 automatically estimated. In most cases" << Endl;
   Log() << "                 this should be fine. However, you may want" << Endl;
   Log() << "                 to fix this value if you already know it" << Endl;
   Log() << "                 and want to reduce on training time." << brk << Endl;
   Log() << "   " << col << "GDTauPrec     " << colres
           << ": precision of estimated tau" << Endl;
   Log() << "                 Increase this precision to find a more" << Endl;
   Log() << "                 optimum cut-off parameter." << brk << Endl;
   Log() << "   " << col << "GDNStep       " << colres
           << ": number of steps in path search" << Endl;
   Log() << "                 If the number of steps is too small, then" << Endl;
   Log() << "                 the program will give a warning message." << Endl;
   Log() << Endl;
   Log() << "III. WARNING MESSAGES" << Endl;
   Log() << Endl;
   Log() << col << "Risk(i+1)>=Risk(i) in path" << colres << brk << Endl;
   Log() << col << "Chaotic behaviour of risk evolution." << colres << Endl;
   //         "|----------------|---------------------------------------------|"
   Log() << "                 The error rate was still decreasing at the end" << Endl;
   Log() << "                 By construction the Risk should always decrease." << Endl;
   Log() << "                 However, if the training sample is too small or" << Endl;
   Log() << "                 the model is overtrained, such warnings can" << Endl;
   Log() << "                 occur." << Endl;
   Log() << "                 The warnings can safely be ignored if only a" << Endl;
   Log() << "                 few (<3) occur. If more warnings are generated," << Endl;
   Log() << "                 the fitting fails." << Endl;
   Log() << "                 A remedy may be to increase the value" << brk << Endl;
   Log() << "                 "
           << col << "GDValidEveFrac" << colres
           << " to 1.0 (or a larger value)." << brk << Endl;
   Log() << "                 In addition, if "
           << col << "GDPathEveFrac" << colres
           << " is too high" << Endl;
   Log() << "                 the same warnings may occur since the events" << Endl;
   Log() << "                 used for error estimation are also used for" << Endl;
   Log() << "                 path estimation." << Endl;
   Log() << "                 Another possibility is to modify the model - " << Endl;
   Log() << "                 See above on tuning the rule ensemble." << Endl;
   Log() << Endl;
   Log() << col << "The error rate was still decreasing at the end of the path"
           << colres << Endl;
   Log() << "                 Too few steps in path! Increase "
           << col << "GDNSteps" <<  colres << "." << Endl;
   Log() << Endl;
   Log() << col << "Reached minimum early in the search" << colres << Endl;

   Log() << "                 Minimum was found early in the fitting. This" << Endl;
   Log() << "                 may indicate that the used step size "
           << col << "GDStep" <<  colres << "." << Endl;
   Log() << "                 was too large. Reduce it and rerun." << Endl;
   Log() << "                 If the results still are not OK, modify the" << Endl;
   Log() << "                 model either by modifying the rule ensemble" << Endl;
   Log() << "                 or add/remove linear terms" << Endl;
}
 MethodRuleFit.cxx:1
 MethodRuleFit.cxx:2
 MethodRuleFit.cxx:3
 MethodRuleFit.cxx:4
 MethodRuleFit.cxx:5
 MethodRuleFit.cxx:6
 MethodRuleFit.cxx:7
 MethodRuleFit.cxx:8
 MethodRuleFit.cxx:9
 MethodRuleFit.cxx:10
 MethodRuleFit.cxx:11
 MethodRuleFit.cxx:12
 MethodRuleFit.cxx:13
 MethodRuleFit.cxx:14
 MethodRuleFit.cxx:15
 MethodRuleFit.cxx:16
 MethodRuleFit.cxx:17
 MethodRuleFit.cxx:18
 MethodRuleFit.cxx:19
 MethodRuleFit.cxx:20
 MethodRuleFit.cxx:21
 MethodRuleFit.cxx:22
 MethodRuleFit.cxx:23
 MethodRuleFit.cxx:24
 MethodRuleFit.cxx:25
 MethodRuleFit.cxx:26
 MethodRuleFit.cxx:27
 MethodRuleFit.cxx:28
 MethodRuleFit.cxx:29
 MethodRuleFit.cxx:30
 MethodRuleFit.cxx:31
 MethodRuleFit.cxx:32
 MethodRuleFit.cxx:33
 MethodRuleFit.cxx:34
 MethodRuleFit.cxx:35
 MethodRuleFit.cxx:36
 MethodRuleFit.cxx:37
 MethodRuleFit.cxx:38
 MethodRuleFit.cxx:39
 MethodRuleFit.cxx:40
 MethodRuleFit.cxx:41
 MethodRuleFit.cxx:42
 MethodRuleFit.cxx:43
 MethodRuleFit.cxx:44
 MethodRuleFit.cxx:45
 MethodRuleFit.cxx:46
 MethodRuleFit.cxx:47
 MethodRuleFit.cxx:48
 MethodRuleFit.cxx:49
 MethodRuleFit.cxx:50
 MethodRuleFit.cxx:51
 MethodRuleFit.cxx:52
 MethodRuleFit.cxx:53
 MethodRuleFit.cxx:54
 MethodRuleFit.cxx:55
 MethodRuleFit.cxx:56
 MethodRuleFit.cxx:57
 MethodRuleFit.cxx:58
 MethodRuleFit.cxx:59
 MethodRuleFit.cxx:60
 MethodRuleFit.cxx:61
 MethodRuleFit.cxx:62
 MethodRuleFit.cxx:63
 MethodRuleFit.cxx:64
 MethodRuleFit.cxx:65
 MethodRuleFit.cxx:66
 MethodRuleFit.cxx:67
 MethodRuleFit.cxx:68
 MethodRuleFit.cxx:69
 MethodRuleFit.cxx:70
 MethodRuleFit.cxx:71
 MethodRuleFit.cxx:72
 MethodRuleFit.cxx:73
 MethodRuleFit.cxx:74
 MethodRuleFit.cxx:75
 MethodRuleFit.cxx:76
 MethodRuleFit.cxx:77
 MethodRuleFit.cxx:78
 MethodRuleFit.cxx:79
 MethodRuleFit.cxx:80
 MethodRuleFit.cxx:81
 MethodRuleFit.cxx:82
 MethodRuleFit.cxx:83
 MethodRuleFit.cxx:84
 MethodRuleFit.cxx:85
 MethodRuleFit.cxx:86
 MethodRuleFit.cxx:87
 MethodRuleFit.cxx:88
 MethodRuleFit.cxx:89
 MethodRuleFit.cxx:90
 MethodRuleFit.cxx:91
 MethodRuleFit.cxx:92
 MethodRuleFit.cxx:93
 MethodRuleFit.cxx:94
 MethodRuleFit.cxx:95
 MethodRuleFit.cxx:96
 MethodRuleFit.cxx:97
 MethodRuleFit.cxx:98
 MethodRuleFit.cxx:99
 MethodRuleFit.cxx:100
 MethodRuleFit.cxx:101
 MethodRuleFit.cxx:102
 MethodRuleFit.cxx:103
 MethodRuleFit.cxx:104
 MethodRuleFit.cxx:105
 MethodRuleFit.cxx:106
 MethodRuleFit.cxx:107
 MethodRuleFit.cxx:108
 MethodRuleFit.cxx:109
 MethodRuleFit.cxx:110
 MethodRuleFit.cxx:111
 MethodRuleFit.cxx:112
 MethodRuleFit.cxx:113
 MethodRuleFit.cxx:114
 MethodRuleFit.cxx:115
 MethodRuleFit.cxx:116
 MethodRuleFit.cxx:117
 MethodRuleFit.cxx:118
 MethodRuleFit.cxx:119
 MethodRuleFit.cxx:120
 MethodRuleFit.cxx:121
 MethodRuleFit.cxx:122
 MethodRuleFit.cxx:123
 MethodRuleFit.cxx:124
 MethodRuleFit.cxx:125
 MethodRuleFit.cxx:126
 MethodRuleFit.cxx:127
 MethodRuleFit.cxx:128
 MethodRuleFit.cxx:129
 MethodRuleFit.cxx:130
 MethodRuleFit.cxx:131
 MethodRuleFit.cxx:132
 MethodRuleFit.cxx:133
 MethodRuleFit.cxx:134
 MethodRuleFit.cxx:135
 MethodRuleFit.cxx:136
 MethodRuleFit.cxx:137
 MethodRuleFit.cxx:138
 MethodRuleFit.cxx:139
 MethodRuleFit.cxx:140
 MethodRuleFit.cxx:141
 MethodRuleFit.cxx:142
 MethodRuleFit.cxx:143
 MethodRuleFit.cxx:144
 MethodRuleFit.cxx:145
 MethodRuleFit.cxx:146
 MethodRuleFit.cxx:147
 MethodRuleFit.cxx:148
 MethodRuleFit.cxx:149
 MethodRuleFit.cxx:150
 MethodRuleFit.cxx:151
 MethodRuleFit.cxx:152
 MethodRuleFit.cxx:153
 MethodRuleFit.cxx:154
 MethodRuleFit.cxx:155
 MethodRuleFit.cxx:156
 MethodRuleFit.cxx:157
 MethodRuleFit.cxx:158
 MethodRuleFit.cxx:159
 MethodRuleFit.cxx:160
 MethodRuleFit.cxx:161
 MethodRuleFit.cxx:162
 MethodRuleFit.cxx:163
 MethodRuleFit.cxx:164
 MethodRuleFit.cxx:165
 MethodRuleFit.cxx:166
 MethodRuleFit.cxx:167
 MethodRuleFit.cxx:168
 MethodRuleFit.cxx:169
 MethodRuleFit.cxx:170
 MethodRuleFit.cxx:171
 MethodRuleFit.cxx:172
 MethodRuleFit.cxx:173
 MethodRuleFit.cxx:174
 MethodRuleFit.cxx:175
 MethodRuleFit.cxx:176
 MethodRuleFit.cxx:177
 MethodRuleFit.cxx:178
 MethodRuleFit.cxx:179
 MethodRuleFit.cxx:180
 MethodRuleFit.cxx:181
 MethodRuleFit.cxx:182
 MethodRuleFit.cxx:183
 MethodRuleFit.cxx:184
 MethodRuleFit.cxx:185
 MethodRuleFit.cxx:186
 MethodRuleFit.cxx:187
 MethodRuleFit.cxx:188
 MethodRuleFit.cxx:189
 MethodRuleFit.cxx:190
 MethodRuleFit.cxx:191
 MethodRuleFit.cxx:192
 MethodRuleFit.cxx:193
 MethodRuleFit.cxx:194
 MethodRuleFit.cxx:195
 MethodRuleFit.cxx:196
 MethodRuleFit.cxx:197
 MethodRuleFit.cxx:198
 MethodRuleFit.cxx:199
 MethodRuleFit.cxx:200
 MethodRuleFit.cxx:201
 MethodRuleFit.cxx:202
 MethodRuleFit.cxx:203
 MethodRuleFit.cxx:204
 MethodRuleFit.cxx:205
 MethodRuleFit.cxx:206
 MethodRuleFit.cxx:207
 MethodRuleFit.cxx:208
 MethodRuleFit.cxx:209
 MethodRuleFit.cxx:210
 MethodRuleFit.cxx:211
 MethodRuleFit.cxx:212
 MethodRuleFit.cxx:213
 MethodRuleFit.cxx:214
 MethodRuleFit.cxx:215
 MethodRuleFit.cxx:216
 MethodRuleFit.cxx:217
 MethodRuleFit.cxx:218
 MethodRuleFit.cxx:219
 MethodRuleFit.cxx:220
 MethodRuleFit.cxx:221
 MethodRuleFit.cxx:222
 MethodRuleFit.cxx:223
 MethodRuleFit.cxx:224
 MethodRuleFit.cxx:225
 MethodRuleFit.cxx:226
 MethodRuleFit.cxx:227
 MethodRuleFit.cxx:228
 MethodRuleFit.cxx:229
 MethodRuleFit.cxx:230
 MethodRuleFit.cxx:231
 MethodRuleFit.cxx:232
 MethodRuleFit.cxx:233
 MethodRuleFit.cxx:234
 MethodRuleFit.cxx:235
 MethodRuleFit.cxx:236
 MethodRuleFit.cxx:237
 MethodRuleFit.cxx:238
 MethodRuleFit.cxx:239
 MethodRuleFit.cxx:240
 MethodRuleFit.cxx:241
 MethodRuleFit.cxx:242
 MethodRuleFit.cxx:243
 MethodRuleFit.cxx:244
 MethodRuleFit.cxx:245
 MethodRuleFit.cxx:246
 MethodRuleFit.cxx:247
 MethodRuleFit.cxx:248
 MethodRuleFit.cxx:249
 MethodRuleFit.cxx:250
 MethodRuleFit.cxx:251
 MethodRuleFit.cxx:252
 MethodRuleFit.cxx:253
 MethodRuleFit.cxx:254
 MethodRuleFit.cxx:255
 MethodRuleFit.cxx:256
 MethodRuleFit.cxx:257
 MethodRuleFit.cxx:258
 MethodRuleFit.cxx:259
 MethodRuleFit.cxx:260
 MethodRuleFit.cxx:261
 MethodRuleFit.cxx:262
 MethodRuleFit.cxx:263
 MethodRuleFit.cxx:264
 MethodRuleFit.cxx:265
 MethodRuleFit.cxx:266
 MethodRuleFit.cxx:267
 MethodRuleFit.cxx:268
 MethodRuleFit.cxx:269
 MethodRuleFit.cxx:270
 MethodRuleFit.cxx:271
 MethodRuleFit.cxx:272
 MethodRuleFit.cxx:273
 MethodRuleFit.cxx:274
 MethodRuleFit.cxx:275
 MethodRuleFit.cxx:276
 MethodRuleFit.cxx:277
 MethodRuleFit.cxx:278
 MethodRuleFit.cxx:279
 MethodRuleFit.cxx:280
 MethodRuleFit.cxx:281
 MethodRuleFit.cxx:282
 MethodRuleFit.cxx:283
 MethodRuleFit.cxx:284
 MethodRuleFit.cxx:285
 MethodRuleFit.cxx:286
 MethodRuleFit.cxx:287
 MethodRuleFit.cxx:288
 MethodRuleFit.cxx:289
 MethodRuleFit.cxx:290
 MethodRuleFit.cxx:291
 MethodRuleFit.cxx:292
 MethodRuleFit.cxx:293
 MethodRuleFit.cxx:294
 MethodRuleFit.cxx:295
 MethodRuleFit.cxx:296
 MethodRuleFit.cxx:297
 MethodRuleFit.cxx:298
 MethodRuleFit.cxx:299
 MethodRuleFit.cxx:300
 MethodRuleFit.cxx:301
 MethodRuleFit.cxx:302
 MethodRuleFit.cxx:303
 MethodRuleFit.cxx:304
 MethodRuleFit.cxx:305
 MethodRuleFit.cxx:306
 MethodRuleFit.cxx:307
 MethodRuleFit.cxx:308
 MethodRuleFit.cxx:309
 MethodRuleFit.cxx:310
 MethodRuleFit.cxx:311
 MethodRuleFit.cxx:312
 MethodRuleFit.cxx:313
 MethodRuleFit.cxx:314
 MethodRuleFit.cxx:315
 MethodRuleFit.cxx:316
 MethodRuleFit.cxx:317
 MethodRuleFit.cxx:318
 MethodRuleFit.cxx:319
 MethodRuleFit.cxx:320
 MethodRuleFit.cxx:321
 MethodRuleFit.cxx:322
 MethodRuleFit.cxx:323
 MethodRuleFit.cxx:324
 MethodRuleFit.cxx:325
 MethodRuleFit.cxx:326
 MethodRuleFit.cxx:327
 MethodRuleFit.cxx:328
 MethodRuleFit.cxx:329
 MethodRuleFit.cxx:330
 MethodRuleFit.cxx:331
 MethodRuleFit.cxx:332
 MethodRuleFit.cxx:333
 MethodRuleFit.cxx:334
 MethodRuleFit.cxx:335
 MethodRuleFit.cxx:336
 MethodRuleFit.cxx:337
 MethodRuleFit.cxx:338
 MethodRuleFit.cxx:339
 MethodRuleFit.cxx:340
 MethodRuleFit.cxx:341
 MethodRuleFit.cxx:342
 MethodRuleFit.cxx:343
 MethodRuleFit.cxx:344
 MethodRuleFit.cxx:345
 MethodRuleFit.cxx:346
 MethodRuleFit.cxx:347
 MethodRuleFit.cxx:348
 MethodRuleFit.cxx:349
 MethodRuleFit.cxx:350
 MethodRuleFit.cxx:351
 MethodRuleFit.cxx:352
 MethodRuleFit.cxx:353
 MethodRuleFit.cxx:354
 MethodRuleFit.cxx:355
 MethodRuleFit.cxx:356
 MethodRuleFit.cxx:357
 MethodRuleFit.cxx:358
 MethodRuleFit.cxx:359
 MethodRuleFit.cxx:360
 MethodRuleFit.cxx:361
 MethodRuleFit.cxx:362
 MethodRuleFit.cxx:363
 MethodRuleFit.cxx:364
 MethodRuleFit.cxx:365
 MethodRuleFit.cxx:366
 MethodRuleFit.cxx:367
 MethodRuleFit.cxx:368
 MethodRuleFit.cxx:369
 MethodRuleFit.cxx:370
 MethodRuleFit.cxx:371
 MethodRuleFit.cxx:372
 MethodRuleFit.cxx:373
 MethodRuleFit.cxx:374
 MethodRuleFit.cxx:375
 MethodRuleFit.cxx:376
 MethodRuleFit.cxx:377
 MethodRuleFit.cxx:378
 MethodRuleFit.cxx:379
 MethodRuleFit.cxx:380
 MethodRuleFit.cxx:381
 MethodRuleFit.cxx:382
 MethodRuleFit.cxx:383
 MethodRuleFit.cxx:384
 MethodRuleFit.cxx:385
 MethodRuleFit.cxx:386
 MethodRuleFit.cxx:387
 MethodRuleFit.cxx:388
 MethodRuleFit.cxx:389
 MethodRuleFit.cxx:390
 MethodRuleFit.cxx:391
 MethodRuleFit.cxx:392
 MethodRuleFit.cxx:393
 MethodRuleFit.cxx:394
 MethodRuleFit.cxx:395
 MethodRuleFit.cxx:396
 MethodRuleFit.cxx:397
 MethodRuleFit.cxx:398
 MethodRuleFit.cxx:399
 MethodRuleFit.cxx:400
 MethodRuleFit.cxx:401
 MethodRuleFit.cxx:402
 MethodRuleFit.cxx:403
 MethodRuleFit.cxx:404
 MethodRuleFit.cxx:405
 MethodRuleFit.cxx:406
 MethodRuleFit.cxx:407
 MethodRuleFit.cxx:408
 MethodRuleFit.cxx:409
 MethodRuleFit.cxx:410
 MethodRuleFit.cxx:411
 MethodRuleFit.cxx:412
 MethodRuleFit.cxx:413
 MethodRuleFit.cxx:414
 MethodRuleFit.cxx:415
 MethodRuleFit.cxx:416
 MethodRuleFit.cxx:417
 MethodRuleFit.cxx:418
 MethodRuleFit.cxx:419
 MethodRuleFit.cxx:420
 MethodRuleFit.cxx:421
 MethodRuleFit.cxx:422
 MethodRuleFit.cxx:423
 MethodRuleFit.cxx:424
 MethodRuleFit.cxx:425
 MethodRuleFit.cxx:426
 MethodRuleFit.cxx:427
 MethodRuleFit.cxx:428
 MethodRuleFit.cxx:429
 MethodRuleFit.cxx:430
 MethodRuleFit.cxx:431
 MethodRuleFit.cxx:432
 MethodRuleFit.cxx:433
 MethodRuleFit.cxx:434
 MethodRuleFit.cxx:435
 MethodRuleFit.cxx:436
 MethodRuleFit.cxx:437
 MethodRuleFit.cxx:438
 MethodRuleFit.cxx:439
 MethodRuleFit.cxx:440
 MethodRuleFit.cxx:441
 MethodRuleFit.cxx:442
 MethodRuleFit.cxx:443
 MethodRuleFit.cxx:444
 MethodRuleFit.cxx:445
 MethodRuleFit.cxx:446
 MethodRuleFit.cxx:447
 MethodRuleFit.cxx:448
 MethodRuleFit.cxx:449
 MethodRuleFit.cxx:450
 MethodRuleFit.cxx:451
 MethodRuleFit.cxx:452
 MethodRuleFit.cxx:453
 MethodRuleFit.cxx:454
 MethodRuleFit.cxx:455
 MethodRuleFit.cxx:456
 MethodRuleFit.cxx:457
 MethodRuleFit.cxx:458
 MethodRuleFit.cxx:459
 MethodRuleFit.cxx:460
 MethodRuleFit.cxx:461
 MethodRuleFit.cxx:462
 MethodRuleFit.cxx:463
 MethodRuleFit.cxx:464
 MethodRuleFit.cxx:465
 MethodRuleFit.cxx:466
 MethodRuleFit.cxx:467
 MethodRuleFit.cxx:468
 MethodRuleFit.cxx:469
 MethodRuleFit.cxx:470
 MethodRuleFit.cxx:471
 MethodRuleFit.cxx:472
 MethodRuleFit.cxx:473
 MethodRuleFit.cxx:474
 MethodRuleFit.cxx:475
 MethodRuleFit.cxx:476
 MethodRuleFit.cxx:477
 MethodRuleFit.cxx:478
 MethodRuleFit.cxx:479
 MethodRuleFit.cxx:480
 MethodRuleFit.cxx:481
 MethodRuleFit.cxx:482
 MethodRuleFit.cxx:483
 MethodRuleFit.cxx:484
 MethodRuleFit.cxx:485
 MethodRuleFit.cxx:486
 MethodRuleFit.cxx:487
 MethodRuleFit.cxx:488
 MethodRuleFit.cxx:489
 MethodRuleFit.cxx:490
 MethodRuleFit.cxx:491
 MethodRuleFit.cxx:492
 MethodRuleFit.cxx:493
 MethodRuleFit.cxx:494
 MethodRuleFit.cxx:495
 MethodRuleFit.cxx:496
 MethodRuleFit.cxx:497
 MethodRuleFit.cxx:498
 MethodRuleFit.cxx:499
 MethodRuleFit.cxx:500
 MethodRuleFit.cxx:501
 MethodRuleFit.cxx:502
 MethodRuleFit.cxx:503
 MethodRuleFit.cxx:504
 MethodRuleFit.cxx:505
 MethodRuleFit.cxx:506
 MethodRuleFit.cxx:507
 MethodRuleFit.cxx:508
 MethodRuleFit.cxx:509
 MethodRuleFit.cxx:510
 MethodRuleFit.cxx:511
 MethodRuleFit.cxx:512
 MethodRuleFit.cxx:513
 MethodRuleFit.cxx:514
 MethodRuleFit.cxx:515
 MethodRuleFit.cxx:516
 MethodRuleFit.cxx:517
 MethodRuleFit.cxx:518
 MethodRuleFit.cxx:519
 MethodRuleFit.cxx:520
 MethodRuleFit.cxx:521
 MethodRuleFit.cxx:522
 MethodRuleFit.cxx:523
 MethodRuleFit.cxx:524
 MethodRuleFit.cxx:525
 MethodRuleFit.cxx:526
 MethodRuleFit.cxx:527
 MethodRuleFit.cxx:528
 MethodRuleFit.cxx:529
 MethodRuleFit.cxx:530
 MethodRuleFit.cxx:531
 MethodRuleFit.cxx:532
 MethodRuleFit.cxx:533
 MethodRuleFit.cxx:534
 MethodRuleFit.cxx:535
 MethodRuleFit.cxx:536
 MethodRuleFit.cxx:537
 MethodRuleFit.cxx:538
 MethodRuleFit.cxx:539
 MethodRuleFit.cxx:540
 MethodRuleFit.cxx:541
 MethodRuleFit.cxx:542
 MethodRuleFit.cxx:543
 MethodRuleFit.cxx:544
 MethodRuleFit.cxx:545
 MethodRuleFit.cxx:546
 MethodRuleFit.cxx:547
 MethodRuleFit.cxx:548
 MethodRuleFit.cxx:549
 MethodRuleFit.cxx:550
 MethodRuleFit.cxx:551
 MethodRuleFit.cxx:552
 MethodRuleFit.cxx:553
 MethodRuleFit.cxx:554
 MethodRuleFit.cxx:555
 MethodRuleFit.cxx:556
 MethodRuleFit.cxx:557
 MethodRuleFit.cxx:558
 MethodRuleFit.cxx:559
 MethodRuleFit.cxx:560
 MethodRuleFit.cxx:561
 MethodRuleFit.cxx:562
 MethodRuleFit.cxx:563
 MethodRuleFit.cxx:564
 MethodRuleFit.cxx:565
 MethodRuleFit.cxx:566
 MethodRuleFit.cxx:567
 MethodRuleFit.cxx:568
 MethodRuleFit.cxx:569
 MethodRuleFit.cxx:570
 MethodRuleFit.cxx:571
 MethodRuleFit.cxx:572
 MethodRuleFit.cxx:573
 MethodRuleFit.cxx:574
 MethodRuleFit.cxx:575
 MethodRuleFit.cxx:576
 MethodRuleFit.cxx:577
 MethodRuleFit.cxx:578
 MethodRuleFit.cxx:579
 MethodRuleFit.cxx:580
 MethodRuleFit.cxx:581
 MethodRuleFit.cxx:582
 MethodRuleFit.cxx:583
 MethodRuleFit.cxx:584
 MethodRuleFit.cxx:585
 MethodRuleFit.cxx:586
 MethodRuleFit.cxx:587
 MethodRuleFit.cxx:588
 MethodRuleFit.cxx:589
 MethodRuleFit.cxx:590
 MethodRuleFit.cxx:591
 MethodRuleFit.cxx:592
 MethodRuleFit.cxx:593
 MethodRuleFit.cxx:594
 MethodRuleFit.cxx:595
 MethodRuleFit.cxx:596
 MethodRuleFit.cxx:597
 MethodRuleFit.cxx:598
 MethodRuleFit.cxx:599
 MethodRuleFit.cxx:600
 MethodRuleFit.cxx:601
 MethodRuleFit.cxx:602
 MethodRuleFit.cxx:603
 MethodRuleFit.cxx:604
 MethodRuleFit.cxx:605
 MethodRuleFit.cxx:606
 MethodRuleFit.cxx:607
 MethodRuleFit.cxx:608
 MethodRuleFit.cxx:609
 MethodRuleFit.cxx:610
 MethodRuleFit.cxx:611
 MethodRuleFit.cxx:612
 MethodRuleFit.cxx:613
 MethodRuleFit.cxx:614
 MethodRuleFit.cxx:615
 MethodRuleFit.cxx:616
 MethodRuleFit.cxx:617
 MethodRuleFit.cxx:618
 MethodRuleFit.cxx:619
 MethodRuleFit.cxx:620
 MethodRuleFit.cxx:621
 MethodRuleFit.cxx:622
 MethodRuleFit.cxx:623
 MethodRuleFit.cxx:624
 MethodRuleFit.cxx:625
 MethodRuleFit.cxx:626
 MethodRuleFit.cxx:627
 MethodRuleFit.cxx:628
 MethodRuleFit.cxx:629
 MethodRuleFit.cxx:630
 MethodRuleFit.cxx:631
 MethodRuleFit.cxx:632
 MethodRuleFit.cxx:633
 MethodRuleFit.cxx:634
 MethodRuleFit.cxx:635
 MethodRuleFit.cxx:636
 MethodRuleFit.cxx:637
 MethodRuleFit.cxx:638
 MethodRuleFit.cxx:639
 MethodRuleFit.cxx:640
 MethodRuleFit.cxx:641
 MethodRuleFit.cxx:642
 MethodRuleFit.cxx:643
 MethodRuleFit.cxx:644
 MethodRuleFit.cxx:645
 MethodRuleFit.cxx:646
 MethodRuleFit.cxx:647
 MethodRuleFit.cxx:648
 MethodRuleFit.cxx:649
 MethodRuleFit.cxx:650
 MethodRuleFit.cxx:651
 MethodRuleFit.cxx:652
 MethodRuleFit.cxx:653
 MethodRuleFit.cxx:654
 MethodRuleFit.cxx:655
 MethodRuleFit.cxx:656
 MethodRuleFit.cxx:657
 MethodRuleFit.cxx:658
 MethodRuleFit.cxx:659
 MethodRuleFit.cxx:660
 MethodRuleFit.cxx:661
 MethodRuleFit.cxx:662
 MethodRuleFit.cxx:663
 MethodRuleFit.cxx:664
 MethodRuleFit.cxx:665
 MethodRuleFit.cxx:666
 MethodRuleFit.cxx:667
 MethodRuleFit.cxx:668
 MethodRuleFit.cxx:669
 MethodRuleFit.cxx:670
 MethodRuleFit.cxx:671
 MethodRuleFit.cxx:672
 MethodRuleFit.cxx:673
 MethodRuleFit.cxx:674
 MethodRuleFit.cxx:675
 MethodRuleFit.cxx:676
 MethodRuleFit.cxx:677
 MethodRuleFit.cxx:678
 MethodRuleFit.cxx:679
 MethodRuleFit.cxx:680
 MethodRuleFit.cxx:681
 MethodRuleFit.cxx:682
 MethodRuleFit.cxx:683
 MethodRuleFit.cxx:684
 MethodRuleFit.cxx:685
 MethodRuleFit.cxx:686
 MethodRuleFit.cxx:687
 MethodRuleFit.cxx:688
 MethodRuleFit.cxx:689
 MethodRuleFit.cxx:690
 MethodRuleFit.cxx:691
 MethodRuleFit.cxx:692
 MethodRuleFit.cxx:693
 MethodRuleFit.cxx:694
 MethodRuleFit.cxx:695
 MethodRuleFit.cxx:696
 MethodRuleFit.cxx:697
 MethodRuleFit.cxx:698
 MethodRuleFit.cxx:699
 MethodRuleFit.cxx:700
 MethodRuleFit.cxx:701
 MethodRuleFit.cxx:702
 MethodRuleFit.cxx:703
 MethodRuleFit.cxx:704
 MethodRuleFit.cxx:705
 MethodRuleFit.cxx:706
 MethodRuleFit.cxx:707
 MethodRuleFit.cxx:708
 MethodRuleFit.cxx:709
 MethodRuleFit.cxx:710
 MethodRuleFit.cxx:711
 MethodRuleFit.cxx:712
 MethodRuleFit.cxx:713
 MethodRuleFit.cxx:714
 MethodRuleFit.cxx:715
 MethodRuleFit.cxx:716
 MethodRuleFit.cxx:717
 MethodRuleFit.cxx:718
 MethodRuleFit.cxx:719
 MethodRuleFit.cxx:720
 MethodRuleFit.cxx:721
 MethodRuleFit.cxx:722
 MethodRuleFit.cxx:723
 MethodRuleFit.cxx:724
 MethodRuleFit.cxx:725
 MethodRuleFit.cxx:726
 MethodRuleFit.cxx:727
 MethodRuleFit.cxx:728
 MethodRuleFit.cxx:729
 MethodRuleFit.cxx:730
 MethodRuleFit.cxx:731
 MethodRuleFit.cxx:732
 MethodRuleFit.cxx:733
 MethodRuleFit.cxx:734
 MethodRuleFit.cxx:735
 MethodRuleFit.cxx:736
 MethodRuleFit.cxx:737
 MethodRuleFit.cxx:738
 MethodRuleFit.cxx:739
 MethodRuleFit.cxx:740
 MethodRuleFit.cxx:741
 MethodRuleFit.cxx:742
 MethodRuleFit.cxx:743
 MethodRuleFit.cxx:744
 MethodRuleFit.cxx:745
 MethodRuleFit.cxx:746
 MethodRuleFit.cxx:747
 MethodRuleFit.cxx:748
 MethodRuleFit.cxx:749
 MethodRuleFit.cxx:750
 MethodRuleFit.cxx:751
 MethodRuleFit.cxx:752
 MethodRuleFit.cxx:753
 MethodRuleFit.cxx:754
 MethodRuleFit.cxx:755
 MethodRuleFit.cxx:756
 MethodRuleFit.cxx:757
 MethodRuleFit.cxx:758
 MethodRuleFit.cxx:759
 MethodRuleFit.cxx:760
 MethodRuleFit.cxx:761
 MethodRuleFit.cxx:762
 MethodRuleFit.cxx:763
 MethodRuleFit.cxx:764
 MethodRuleFit.cxx:765
 MethodRuleFit.cxx:766
 MethodRuleFit.cxx:767
 MethodRuleFit.cxx:768
 MethodRuleFit.cxx:769
 MethodRuleFit.cxx:770
 MethodRuleFit.cxx:771
 MethodRuleFit.cxx:772
 MethodRuleFit.cxx:773
 MethodRuleFit.cxx:774
 MethodRuleFit.cxx:775
 MethodRuleFit.cxx:776
 MethodRuleFit.cxx:777
 MethodRuleFit.cxx:778
 MethodRuleFit.cxx:779
 MethodRuleFit.cxx:780
 MethodRuleFit.cxx:781
 MethodRuleFit.cxx:782
 MethodRuleFit.cxx:783
 MethodRuleFit.cxx:784