ROOT logo
// @(#)Root/tmva $Id: Factory.cxx 29246 2009-06-26 16:50:00Z brun $   
// Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss 

/**********************************************************************************
 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis       *
 * Package: TMVA                                                                  *
 * Class  : Factory                                                               *
 * Web    : http://tmva.sourceforge.net                                           *
 *                                                                                *
 * Description:                                                                   *
 *      Implementation (see header for description)                               *
 *                                                                                *
 * Authors (alphabetical):                                                        *
 *      Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland              *
 *      Joerg Stelzer   <stelzer@cern.ch>        - DESY, Germany                  *
 *      Peter Speckmayer <peter.speckmayer@cern.ch> - CERN, Switzerland           *
 *      Helge Voss      <Helge.Voss@cern.ch>     - MPI-K Heidelberg, Germany      *
 *      Kai Voss        <Kai.Voss@cern.ch>       - U. of Victoria, Canada         *
 *                                                                                *
 * Copyright (c) 2005:                                                            *
 *      CERN, Switzerland                                                         * 
 *      U. of Victoria, Canada                                                    * 
 *      MPI-K Heidelberg, Germany                                                 * 
 *      LAPP, Annec, France                                                       *
 *                                                                                *
 * Redistribution and use in source and binary forms, with or without             *
 * modification, are permitted according to the terms listed in LICENSE           *
 * (http://tmva.sourceforge.net/LICENSE)                                          *
 **********************************************************************************/

//_______________________________________________________________________
//                                                                      
// This is the main MVA steering class: it creates all MVA methods,     
// and guides them through the training, testing and evaluation         
// phases
//_______________________________________________________________________


#include "TROOT.h"
#include "TFile.h"
#include "TTree.h"
#include "TLeaf.h"
#include "TEventList.h"
#include "TH1.h"
#include "TH2.h"
#include "TText.h"
#include "TStyle.h"
#include "TMatrixF.h"
#include "TMatrixDSym.h"
#include "TPaletteAxis.h"
#include "TPrincipal.h"
#include "TMath.h"

#include "TMVA/Factory.h"
#include "TMVA/ClassifierFactory.h"
#include "TMVA/Config.h"
#include "TMVA/Tools.h"
#include "TMVA/Ranking.h"
#include "TMVA/DataSet.h"
#include "TMVA/IMethod.h"
#include "TMVA/MethodBase.h"
#include "TMVA/DataInputHandler.h"
#include "TMVA/DataSetManager.h"
#include "TMVA/DataSetInfo.h"
#include "TMVA/MethodBoost.h"

#include "TMVA/VariableIdentityTransform.h"
#include "TMVA/VariableDecorrTransform.h"
#include "TMVA/VariablePCATransform.h"
#include "TMVA/VariableGaussTransform.h"
#include "TMVA/VariableNormalizeTransform.h"

#include "TMVA/ResultsClassification.h"
#include "TMVA/ResultsRegression.h"

const Int_t  MinNoTrainingEvents = 10;
const Int_t  MinNoTestEvents     = 1;
TFile* TMVA::Factory::fgTargetFile = 0;

ClassImp(TMVA::Factory)

#define RECREATE_METHODS kTRUE
#define READXML          kTRUE

//_______________________________________________________________________
TMVA::Factory::Factory( TString jobName, TFile* theTargetFile, TString theOption )
  : Configurable          ( theOption ),
    fDataInputHandler     ( new DataInputHandler ),
    fTransformations      ( "" ),
    fVerbose              ( kFALSE ),
    fJobName              ( jobName ),
    fDataAssignType       ( kAssignEvents )
{  
   // standard constructor
   //   jobname       : this name will appear in all weight file names produced by the MVAs
   //   theTargetFile : output ROOT file; the test tree and all evaluation plots 
   //                   will be stored here
   //   theOption     : option string; currently: "V" for verbose

   fgTargetFile = theTargetFile;

   DataSetManager::CreateInstance(*fDataInputHandler);

   // render silent
   if (gTools().CheckForSilentOption( GetOptions() )) Log().InhibitOutput(); // make sure is silent if wanted to
   

   // init configurable
   SetConfigDescription( "Configuration options for Factory running" );
   SetConfigName( GetName() );

   // histograms are not automatically associated with the current
   // directory and hence don't go out of scope when closing the file
   // TH1::AddDirectory(kFALSE);
   Bool_t silent          = kFALSE;
   Bool_t color           = !gROOT->IsBatch();
   Bool_t drawProgressBar = kTRUE;
   DeclareOptionRef( fVerbose, "V", "Verbose flag" );
   DeclareOptionRef( color,    "Color", "Flag for coloured screen output (default: True, if in batch mode: False)" );
   DeclareOptionRef( fTransformations, "Transformations", "List of transformations to test; formatting example: \"Transformations=I;D;P;G,D\", for identity, decorrelation, PCA, and Gaussianisation followed by decorrelation transformations" );
   DeclareOptionRef( silent,   "Silent", "Batch mode: boolean silent flag inhibiting any output from TMVA after the creation of the factory class object (default: False)" );
   DeclareOptionRef( drawProgressBar,   
                     "DrawProgressBar", "Draw progress bar to display training, testing and evaluation schedule (default: True)" );

   ParseOptions();
   CheckForUnusedOptions();

   if (Verbose()) Log().SetMinType( kVERBOSE );

   // global settings
   gConfig().SetUseColor( color );
   gConfig().SetSilent( silent );
   gConfig().SetDrawProgressBar( drawProgressBar );
   
   Greetings();
}

//_______________________________________________________________________
void TMVA::Factory::Greetings() 
{
   // print welcome message
   // options are: kLogoWelcomeMsg, kIsometricWelcomeMsg, kLeanWelcomeMsg

   gTools().ROOTVersionMessage( Log() ); 
   gTools().TMVAWelcomeMessage( Log(), gTools().kLogoWelcomeMsg );
   gTools().TMVAVersionMessage( Log() ); Log() << Endl;
}

//_______________________________________________________________________
TMVA::Factory::~Factory( void )
{
   // destructor
   std::vector<TMVA::VariableTransformBase*>::iterator trfIt = fDefaultTrfs.begin();
   for (;trfIt != fDefaultTrfs.end(); trfIt++) delete (*trfIt);

   this->DeleteAllMethods();
   delete fDataInputHandler;

   // destroy singletons
   DataSetManager::DestroyInstance();
   // problem with call of REGISTER_METHOD macro ...
   //   ClassifierFactory::DestroyInstance();
   //   Types::DestroyInstance();
   Tools::DestroyInstance();
   Config::DestroyInstance();
}

//_______________________________________________________________________
void TMVA::Factory::DeleteAllMethods( void )
{
   // delete methods
   MVector::iterator itrMethod = fMethods.begin();
   for (; itrMethod != fMethods.end(); itrMethod++) {
      Log() << kDEBUG << "Delete method: " << (*itrMethod)->GetName() << Endl;    
      delete (*itrMethod);
   }
   fMethods.clear();
}

//_______________________________________________________________________
void TMVA::Factory::SetVerbose( Bool_t v ) 
{
   fVerbose = v; 
}


//_______________________________________________________________________
TMVA::DataSetInfo& TMVA::Factory::AddDataSet( DataSetInfo &dsi )
{
   return DataSetManager::Instance().AddDataSetInfo(dsi);
}

//_______________________________________________________________________
TMVA::DataSetInfo& TMVA::Factory::AddDataSet( const TString& dsiName )
{
   DataSetInfo* dsi = DataSetManager::Instance().GetDataSetInfo(dsiName);

   if (dsi!=0) return *dsi;
   
   return DataSetManager::Instance().AddDataSetInfo(*(new DataSetInfo(dsiName)));
}



// ________________________________________________
// the next functions are to assign events directly 

//_______________________________________________________________________
TTree* TMVA::Factory::CreateEventAssignTrees( const TString& name )
{
   // create the data assignment tree (for event-wise data assignment by user)
   TTree * assignTree = new TTree( name, name );
   assignTree->Branch( "type",   &fATreeType,   "ATreeType/I" );
   assignTree->Branch( "weight", &fATreeWeight, "ATreeWeight/I" );
   std::vector<VariableInfo>& vars = DefaultDataSetInfo().GetVariableInfos();
   if (!fATreeEvent) fATreeEvent = new Float_t[vars.size()];
   for (UInt_t ivar=0; ivar<vars.size(); ivar++) {
      TString vname = vars[ivar].GetExpression();
      assignTree->Branch( vname, &(fATreeEvent[ivar]), vname + "/F" );
   }
   return assignTree;
}

//_______________________________________________________________________
void TMVA::Factory::AddSignalTrainingEvent( const std::vector<Double_t>& event, Double_t weight ) 
{
   // add signal training event
   AddEvent( "Signal", Types::kTraining, event, weight );
}

//_______________________________________________________________________
void TMVA::Factory::AddSignalTestEvent( const std::vector<Double_t>& event, Double_t weight ) 
{
   // add signal training event
   AddEvent( "Signal", Types::kTraining, event, weight );
}

//_______________________________________________________________________
void TMVA::Factory::AddBackgroundTrainingEvent( const std::vector<Double_t>& event, Double_t weight ) 
{
   // add signal training event
   AddEvent( "Background", Types::kTesting, event, weight );
}

//_______________________________________________________________________
void TMVA::Factory::AddBackgroundTestEvent( const std::vector<Double_t>& event, Double_t weight ) 
{
   // add signal training event
   AddEvent( "Background", Types::kTesting, event, weight );
}

//_______________________________________________________________________
void TMVA::Factory::AddTrainingEvent( const TString& className, const std::vector<Double_t>& event, Double_t weight ) 
{
   // add signal training event
   AddEvent( className, Types::kTraining, event, weight );
}

//_______________________________________________________________________
void TMVA::Factory::AddTestEvent( const TString& className, const std::vector<Double_t>& event, Double_t weight ) 
{
   // add signal training event
   AddEvent( className, Types::kTraining, event, weight );
}

//_______________________________________________________________________
void TMVA::Factory::AddEvent( const TString& className, Types::ETreeType tt,
                                   const std::vector<Double_t>& event, Double_t weight ) 
{
   // add event
   ClassInfo* theClass = DefaultDataSetInfo().AddClass(className); // returns class (creates it if necessary)
   UInt_t clIndex = theClass->GetNumber();
   
   if (clIndex>=fTrainAssignTree.size()) {
      fTrainAssignTree.resize(clIndex+1, 0);
      fTestAssignTree.resize(clIndex+1, 0);
   }

   if (fTrainAssignTree[clIndex]==0) { // does not exist yet
      fTrainAssignTree[clIndex] = CreateEventAssignTrees( Form("TrainAssignTree_%s", className.Data()) );
      fTestAssignTree[clIndex]  = CreateEventAssignTrees( Form("TestAssignTree_%s",  className.Data()) );
   }
   
   fATreeType   = clIndex;
   fATreeWeight = weight;
   for (UInt_t ivar=0; ivar<event.size(); ivar++) fATreeEvent[ivar] = event[ivar];

   if(tt==Types::kTraining) fTrainAssignTree[clIndex]->Fill();
   else                     fTestAssignTree[clIndex]->Fill();

}

//_______________________________________________________________________
Bool_t TMVA::Factory::UserAssignEvents(UInt_t clIndex) 
{
   // 
   return fTrainAssignTree[clIndex]!=0;
}

//_______________________________________________________________________
void TMVA::Factory::SetInputTreesFromEventAssignTrees()
{
   // assign event-wise local trees to data set
   UInt_t size = fTrainAssignTree.size();
   for(UInt_t i=0; i<size; i++) {
      if(!UserAssignEvents(i)) continue;
      const TString& className = DefaultDataSetInfo().GetClassInfo(i)->GetName();
      SetWeightExpression( "weight", className );
      AddTree(fTrainAssignTree[i], className, 1.0, TCut(""), Types::kTraining );
      AddTree(fTestAssignTree[i], className, 1.0, TCut(""), Types::kTesting );
   }
}



//_______________________________________________________________________
void TMVA::Factory::AddTree( TTree* tree, const TString& className, Double_t weight, 
                             const TCut& cut, const TString& treetype )
{
   // number of signal events (used to compute significance)
   Types::ETreeType tt = Types::kMaxTreeType;
   TString tmpTreeType = treetype; tmpTreeType.ToLower();
   if      (tmpTreeType.Contains( "train" ) && tmpTreeType.Contains( "test" )) tt = Types::kMaxTreeType;
   else if (tmpTreeType.Contains( "train" ))                                   tt = Types::kTraining;
   else if (tmpTreeType.Contains( "test" ))                                    tt = Types::kTesting;
   else {
      Log() << kFATAL << "<AddTree> cannot interpret tree type: \"" << treetype 
              << "\" should be \"Training\" or \"Test\" or \"Training and Testing\"" << Endl;
   }
   AddTree(tree, className, weight, cut, tt );
}

//_______________________________________________________________________
void TMVA::Factory::AddTree( TTree* tree, const TString& className, Double_t weight, 
                             const TCut& cut, Types::ETreeType tt )
{
   DefaultDataSetInfo().AddClass( className );
   DataInput().AddTree(tree, className, weight, cut, tt );
}

//_______________________________________________________________________
void TMVA::Factory::AddSignalTree( TTree* signal, Double_t weight, Types::ETreeType treetype )
{
   // number of signal events (used to compute significance)
   AddTree( signal, "Signal", weight, TCut(""), treetype );
}

//_______________________________________________________________________
void TMVA::Factory::AddSignalTree( TString datFileS, Double_t weight, Types::ETreeType treetype )
{
   // add signal tree from text file

   // create trees from these ascii files
   TTree* signalTree = new TTree( "TreeS", "Tree (S)" );
   signalTree->ReadFile( datFileS );
 
   Log() << kINFO << "Create TTree objects from ASCII input files ... \n- Signal file    : \""
         << datFileS << Endl;
  
   // number of signal events (used to compute significance)
   AddTree( signalTree, "Signal", weight, TCut(""), treetype );
}

//_______________________________________________________________________
void TMVA::Factory::AddSignalTree( TTree* signal, Double_t weight, const TString& treetype )
{
   AddTree( signal, "Signal", weight, TCut(""), treetype );
}

//_______________________________________________________________________
void TMVA::Factory::AddBackgroundTree( TTree* signal, Double_t weight, Types::ETreeType treetype )
{
   // number of signal events (used to compute significance)
   AddTree( signal, "Background", weight, TCut(""), treetype );
}
//_______________________________________________________________________
void TMVA::Factory::AddBackgroundTree( TString datFileB, Double_t weight, Types::ETreeType treetype )
{
   // add background tree from text file

   // create trees from these ascii files
   TTree* bkgTree = new TTree( "TreeB", "Tree (B)" );
   bkgTree->ReadFile( datFileB );
 
   Log() << kINFO << "Create TTree objects from ASCII input files ... \n- Background file    : \""
         << datFileB << Endl;
  
   // number of signal events (used to compute significance)
   AddTree( bkgTree, "Background", weight, TCut(""), treetype );
}

//_______________________________________________________________________
void TMVA::Factory::AddBackgroundTree( TTree* signal, Double_t weight, const TString& treetype )
{
   AddTree( signal, "Background", weight, TCut(""), treetype );
}

//_______________________________________________________________________
void TMVA::Factory::SetSignalTree( TTree* tree, Double_t weight )
{
   AddTree( tree, "Signal", weight );
}

//_______________________________________________________________________
void TMVA::Factory::SetBackgroundTree( TTree* tree, Double_t weight )
{
   AddTree( tree, "Background", weight );
}

//_______________________________________________________________________
void TMVA::Factory::SetTree( TTree* tree, const TString& className, Double_t weight )
{
   // set background tree
   AddTree( tree, className, weight, TCut(""), Types::kMaxTreeType );
}

//_______________________________________________________________________
void  TMVA::Factory::SetInputTrees( TTree* signal, TTree* background, 
                                    Double_t signalWeight, Double_t backgroundWeight )
{
   // define the input trees for signal and background; no cuts are applied
   AddTree( signal,     "Signal",     signalWeight,     TCut(""), Types::kMaxTreeType );
   AddTree( background, "Background", backgroundWeight, TCut(""), Types::kMaxTreeType );
}

//_______________________________________________________________________
void TMVA::Factory::SetInputTrees( const TString& datFileS, const TString& datFileB, 
                                   Double_t signalWeight, Double_t backgroundWeight )
{
   DataInput().AddTree( datFileS, "Signal", signalWeight );
   DataInput().AddTree( datFileB, "Background", backgroundWeight );
}

//_______________________________________________________________________
void TMVA::Factory::SetInputTrees( TTree* inputTree, const TCut& SigCut, const TCut& BgCut )
{
   // define the input trees for signal and background from single input tree,
   // containing both signal and background events distinguished by the type 
   // identifiers: SigCut and BgCut
   AddTree( inputTree, "Signal",     1.0, SigCut, Types::kMaxTreeType );
   AddTree( inputTree, "Background", 1.0, BgCut , Types::kMaxTreeType );
}

//_______________________________________________________________________
void TMVA::Factory::AddVariable( const TString& expression, const TString& title, const TString& unit, 
                                 char type, Double_t min, Double_t max )
{
   // user inserts discriminating variable in data set info
   DefaultDataSetInfo().AddVariable( expression, title, unit, min, max, type ); 
}

//_______________________________________________________________________
void TMVA::Factory::AddVariable( const TString& expression, char type,
                                 Double_t min, Double_t max )
{
   // user inserts discriminating variable in data set info
   DefaultDataSetInfo().AddVariable( expression, "", "", min, max, type ); 
}

//_______________________________________________________________________
void TMVA::Factory::AddTarget( const TString& expression, const TString& title, const TString& unit, 
                               Double_t min, Double_t max )
{
   // user inserts target in data set info
   DefaultDataSetInfo().AddTarget( expression, title, unit, min, max ); 
}

//_______________________________________________________________________
void TMVA::Factory::AddSpectator( const TString& expression, const TString& title, const TString& unit, 
                                Double_t min, Double_t max )
{
   // user inserts target in data set info
   DefaultDataSetInfo().AddSpectator( expression, title, unit, min, max ); 
}

//_______________________________________________________________________
TMVA::DataSetInfo& TMVA::Factory::DefaultDataSetInfo() 
{ 
   // default creation
   return AddDataSet( "Default" );
}

//_______________________________________________________________________
void TMVA::Factory::SetInputVariables( std::vector<TString>* theVariables ) 
{ 
   // fill input variables in data set
   for (std::vector<TString>::iterator it=theVariables->begin();
        it!=theVariables->end(); it++) AddVariable(*it);
}

//_______________________________________________________________________
void TMVA::Factory::SetSignalWeightExpression( const TString& variable)  
{ 
   DefaultDataSetInfo().SetWeightExpression(variable, "Signal"); 
}

//_______________________________________________________________________
void TMVA::Factory::SetBackgroundWeightExpression( const TString& variable) 
{
   DefaultDataSetInfo().SetWeightExpression(variable, "Background");
}

//_______________________________________________________________________
void TMVA::Factory::SetWeightExpression( const TString& variable, const TString& className )  
{
   //Log() << kWarning << DefaultDataSetInfo().GetNClasses() /*fClasses.size()*/ << Endl;
   if (className=="") {
      SetSignalWeightExpression(variable);
      SetBackgroundWeightExpression(variable);
   } 
   else  DefaultDataSetInfo().SetWeightExpression( variable, className );
}

//_______________________________________________________________________
void TMVA::Factory::SetCut( const TString& cut, const TString& className ) {
   SetCut( TCut(cut), className );
}

//_______________________________________________________________________
void TMVA::Factory::SetCut( const TCut& cut, const TString& className ) 
{
   DefaultDataSetInfo().SetCut( cut, className );
}

//_______________________________________________________________________
void TMVA::Factory::AddCut( const TString& cut, const TString& className ) 
{
   AddCut( TCut(cut), className );
}

//_______________________________________________________________________
void TMVA::Factory::AddCut( const TCut& cut, const TString& className ) 
{
   DefaultDataSetInfo().AddCut( cut, className );
}

//_______________________________________________________________________
void TMVA::Factory::PrepareTrainingAndTestTree( const TCut& cut, 
                                                Int_t NsigTrain, Int_t NbkgTrain, Int_t NsigTest, Int_t NbkgTest,
                                                const TString& otherOpt )
{
   // prepare the training and test trees
   SetInputTreesFromEventAssignTrees();

   AddCut( cut  );

   DefaultDataSetInfo().SetSplitOptions( Form("nTrain_Signal=%i:nTrain_Background=%i:nTest_Signal=%i:nTest_Background=%i:%s", 
                                              NsigTrain, NbkgTrain, NsigTest, NbkgTest, otherOpt.Data()) );
}

//_______________________________________________________________________
void TMVA::Factory::PrepareTrainingAndTestTree( const TCut& cut, Int_t Ntrain, Int_t Ntest )
{
   // prepare the training and test trees 
   // kept for backward compatibility
   SetInputTreesFromEventAssignTrees();

   AddCut( cut  );

   DefaultDataSetInfo().SetSplitOptions( Form("nTrain_Signal=%i:nTrain_Background=%i:nTest_Signal=%i:nTest_Background=%i:SplitMode=Random:EqualTrainSample:!V", 
                                              Ntrain, Ntrain, Ntest, Ntest) );
}

//_______________________________________________________________________
void TMVA::Factory::PrepareTrainingAndTestTree( const TCut& cut, const TString& opt )
{ 
   // prepare the training and test trees 
   // -> same cuts for signal and background
   SetInputTreesFromEventAssignTrees();

   DefaultDataSetInfo().PrintClasses();
   AddCut( cut );
   DefaultDataSetInfo().SetSplitOptions( opt );
}




//_______________________________________________________________________
void TMVA::Factory::PrepareTrainingAndTestTree( TCut sigcut, TCut bkgcut, const TString& splitOpt )
{ 
   // prepare the training and test trees

   // if event-wise data assignment, add local trees to dataset first
   SetInputTreesFromEventAssignTrees();

   Log() << kINFO << "Preparing trees for training and testing..." << Endl;
   AddCut( sigcut, "Signal"  );
   AddCut( bkgcut, "Background" );

   DefaultDataSetInfo().SetSplitOptions( splitOpt );
}

//_______________________________________________________________________
TMVA::MethodBase* TMVA::Factory::BookMethod( TString theMethodName, TString methodTitle, TString theOption ) 
{
   // Book a classifier or regression method

   // booking via name; the names are translated into enums and the 
   // corresponding overloaded BookMethod is called
   if (GetMethod( methodTitle ) != 0) {
      Log() << kFATAL << "Booking failed since method with title <"
              << methodTitle <<"> already exists"
              << Endl;
   }

   Log() << kINFO << "Booking method: " << methodTitle << Endl;

   // interpret option string with respect to a request for boosting (i.e., BostNum > 0)
   Int_t    boostNum = 0;
   TMVA::Configurable* conf = new TMVA::Configurable( theOption );
   conf->DeclareOptionRef( boostNum = 0, "Boost_num",
                           "Number of times the classifier will be boosted" );
   conf->ParseOptions();
   delete conf;

   // initialize methods   
   IMethod* im;
   if (!boostNum) {
      im = ClassifierFactory::Instance().Create( std::string(theMethodName), 
                                                 fJobName,
                                                 methodTitle,
                                                 DefaultDataSetInfo(),
                                                 theOption );
   }
   else {
      // boosted classifier, requires a specific definition, making it transparent for the user
      Log() << "Boost Number is " << boostNum << " > 0: train boosted classifier" << Endl; 
      im = ClassifierFactory::Instance().Create( std::string("Boost"), 
                                                 fJobName,
                                                 methodTitle,
                                                 DefaultDataSetInfo(),
                                                 theOption );
      (dynamic_cast<MethodBoost*>(im))->SetBoostedMethodName( theMethodName );
   }

   MethodBase *method = (dynamic_cast<MethodBase*>(im));

   method->SetupMethod();
   method->ParseOptions();
   method->ProcessSetup(); 

   // check-for-unused-options is performed; may be overridden by derived classes
   method->CheckSetup();   

   fMethods.push_back( method );

   return method;
}

//_______________________________________________________________________
TMVA::MethodBase* TMVA::Factory::BookMethod( Types::EMVA theMethod, TString methodTitle, TString theOption ) 
{
   // books MVA method; the option configuration string is custom for each MVA
   // the TString field "theNameAppendix" serves to define (and distringuish) 
   // several instances of a given MVA, eg, when one wants to compare the 
   // performance of various configurations
   return BookMethod( Types::Instance().GetMethodName( theMethod ), methodTitle, theOption );
}

//_______________________________________________________________________
TMVA::IMethod* TMVA::Factory::GetMethod( const TString &methodTitle ) const
{
   // returns pointer to MVA that corresponds to given method title
   MVector::const_iterator itrMethod    = fMethods.begin();
   MVector::const_iterator itrMethodEnd = fMethods.end();
   //
   for (; itrMethod != itrMethodEnd; itrMethod++) {
      MethodBase* mva = dynamic_cast<MethodBase*>(*itrMethod);    
      if ( (mva->GetMethodName())==methodTitle ) return mva;
   }
   return 0;
}

//_______________________________________________________________________
void TMVA::Factory::WriteDataInformation() 
{
   // put correlations of input data and a few (default + user
   // selected) transformations into the root file

   RootBaseDir()->cd();

   DefaultDataSetInfo().GetDataSet(); // builds dataset (including calculation of correlation matrix)

   // correlation matrix of the default DS
   const TMatrixD* m(0);
   const TH2* h(0);
   m = DefaultDataSetInfo().CorrelationMatrix( "Signal" );
   h = DefaultDataSetInfo().CreateCorrelationMatrixHist(m, "CorrelationMatrixS", "Correlation Matrix (signal)");
   if (h!=0) { 
      h->Write();
      delete h;
   }

   m = DefaultDataSetInfo().CorrelationMatrix( "Background" );
   h = DefaultDataSetInfo().CreateCorrelationMatrixHist(m, "CorrelationMatrixB", "Correlation Matrix (background)");
   if (h!=0) { 
      h->Write();
      delete h;
   }

   m = DefaultDataSetInfo().CorrelationMatrix( "Regression" );
   h = DefaultDataSetInfo().CreateCorrelationMatrixHist(m, "CorrelationMatrix", "Correlation Matrix");
   if (h!=0) { 
      h->Write();
      delete h;
   }
   
   // some default transformations to evaluate
   // NOTE: all transformations are destroyed after this test
   TString processTrfs = ""; //"I;N;D;P;G,D;"

   // plus some user defined transformations
   processTrfs = fTransformations;

   // remove any trace of identity transform - if given (avoid to apply it twice)
   processTrfs.ReplaceAll(" ","");
   processTrfs.ReplaceAll("I;","");
   processTrfs.ReplaceAll(";I","");
   processTrfs.ReplaceAll("I","");

   // and re-add identity transform at beginning
   if (processTrfs.Length() > 0) processTrfs = TString("I;") + processTrfs;
   else                          processTrfs = TString("I");

   std::vector<TMVA::TransformationHandler*> trfs;
   TransformationHandler* identityTrHandler = 0;

   std::vector<TString> trfsDef = gTools().SplitString(processTrfs,';');
   std::vector<TString>::iterator trfsDefIt = trfsDef.begin();
   for (; trfsDefIt!=trfsDef.end(); trfsDefIt++) {
      trfs.push_back(new TMVA::TransformationHandler(DefaultDataSetInfo(), "Factory"));
      std::vector<TString> trfDef = gTools().SplitString(*trfsDefIt,',');

      std::vector<TString>::iterator trfDefIt = trfDef.begin();

      for (; trfDefIt!=trfDef.end(); trfDefIt++) {
         TString trfS = (*trfDefIt);
         
         TList* trClsList = gTools().ParseFormatLine( trfS, "_" ); // split entry to get trf-name and class-name
         TListIter trClsIt(trClsList);

         const TString& trName = ((TObjString*)trClsList->At(0))->GetString();
         TString trCls = "AllClasses";
         ClassInfo *ci = NULL;
         Int_t idxCls = -1;
         if (trClsList->GetEntries() > 1) {
            trCls  = ((TObjString*)trClsList->At(1))->GetString();
            if (trCls == "AllClasses") {
               // do nothing, since all necessary parameters are already set
            }
            else {
               ci = DefaultDataSetInfo().GetClassInfo( trCls );
               if (ci == NULL) {
                  Log() << kFATAL << "Class " << trCls << " not known for variable transformation " << trName << ", please check." << Endl;
               }
               else {
                  idxCls = ci->GetNumber();
               }
            }
         }
         delete trClsList;

         if (trName=='I') {
            trfs.back()->AddTransformation( new VariableIdentityTransform ( DefaultDataSetInfo() ), idxCls );
            identityTrHandler = trfs.back();
         } 
         else if (trName=='D') {
            trfs.back()->AddTransformation( new VariableDecorrTransform   ( DefaultDataSetInfo() ), idxCls );
         } 
         else if (trName=='P') {
            trfs.back()->AddTransformation( new VariablePCATransform      ( DefaultDataSetInfo() ), idxCls );
         } 
         else if (trName=='G') {
            trfs.back()->AddTransformation( new VariableGaussTransform    ( DefaultDataSetInfo() ), idxCls );
         } 
         else if (trName=='N') {
            trfs.back()->AddTransformation( new VariableNormalizeTransform( DefaultDataSetInfo() ), idxCls );
         } 
         else {
            Log() << kINFO << "The transformation " << *trfsDefIt << " definition is not valid, the \n"
                    << "transformation " << trName << " is not known!" << Endl;
         }
      }
   }

   const std::vector<Event*>& inputEvents = DefaultDataSetInfo().GetDataSet()->GetEventCollection();

   // apply all transformations
   std::vector<TMVA::TransformationHandler*>::iterator trfIt = trfs.begin();

   for (;trfIt != trfs.end(); trfIt++) {
      // setting a Root dir causes the variables distributions to be saved to the root file
      (*trfIt)->SetRootDir(RootBaseDir());
      (*trfIt)->CalcTransformations(inputEvents);      
   }
   if(identityTrHandler) identityTrHandler->PrintVariableRanking();

   // clean up
   for (trfIt = trfs.begin(); trfIt != trfs.end(); trfIt++) delete *trfIt;
}

//_______________________________________________________________________
void TMVA::Factory::TrainAllMethods( TString what ) 
{     
   // iterates through all booked methods and calls training
   
   what.ToLower();
   Types::EAnalysisType analysisType = ( what.CompareTo("regression")==0 ? Types::kRegression : Types::kClassification );

   // iterates over all MVAs that have been booked, and calls their training methods

   // first print some information about the default dataset
   WriteDataInformation();

   // here the training starts
   Log() << kINFO << "Train all methods for " 
         << (analysisType == Types::kRegression ? "Regression" : "Classification") << " ..." << Endl;

   // don't do anything if no method booked
   if (fMethods.size() == 0) {
      Log() << kINFO << "...nothing found to train" << Endl;
      return;
   }
   
   MVector::iterator itrMethod;

   // iterate over methods and train
   for (itrMethod = fMethods.begin(); itrMethod != fMethods.end(); ) {

      MethodBase* mva = dynamic_cast<MethodBase*>(*itrMethod);
      if (!mva->HasAnalysisType( analysisType, 
                                 DefaultDataSetInfo().GetNClasses(), DefaultDataSetInfo().GetNTargets() )) {
         Log() << kWARNING << "Method " << mva->GetMethodTypeName() << " is not capable of handling " ;
         if (analysisType == Types::kRegression) {
            Log() << "regression with " << DefaultDataSetInfo().GetNTargets() << " targets." << Endl;
         }
         else {
            Log() << "classification with " << DefaultDataSetInfo().GetNClasses() << " classes." << Endl;
         }
         itrMethod = fMethods.erase( itrMethod );
         continue;
      }
      mva->SetAnalysisType( analysisType );
      //      mva->Init();
      if (mva->Data()->GetNTrainingEvents() >= MinNoTrainingEvents) {
         Log() << kINFO << "Train method: " << mva->GetMethodName() << " for " 
                 << (analysisType == Types::kRegression ? "Regression" : "Classification") << Endl;
         mva->TrainMethod();
         Log() << kINFO << "Training finished" << Endl;
      }
      else {
         Log() << kWARNING << "Method " << mva->GetMethodName() 
                 << " not trained (training tree has less entries ["
                 << mva->Data()->GetNTrainingEvents() 
                 << "] than required [" << MinNoTrainingEvents << "]" << Endl; 
      }
      itrMethod++;
   }

   if (analysisType != Types::kRegression) {

      // variable ranking 
      Log() << Endl;
      Log() << kINFO << "Begin ranking of input variables..." << Endl;
      for (itrMethod = fMethods.begin(); itrMethod != fMethods.end(); itrMethod++) {
         MethodBase* mva = dynamic_cast<MethodBase*>(*itrMethod);
         if (mva->Data()->GetNTrainingEvents() >= MinNoTrainingEvents) {
            
            // create and print ranking
            const Ranking* ranking = (*itrMethod)->CreateRanking();
            if (ranking != 0) ranking->Print();
            else Log() << kINFO << "No variable ranking supplied by classifier: " 
                         << dynamic_cast<MethodBase*>(*itrMethod)->GetMethodName() << Endl;
         }
      }
   }

   // delete all methods and recreate them from weight file - this ensures that the application 
   // of the methods (in TMVAClassificationApplication) is consistent with the results obtained
   // in the testing
   Log() << Endl;
   if (RECREATE_METHODS) {

      Log() << kINFO << "=== Destroy and recreate all methods via weight files for testing ===" << Endl << Endl;;
      // iterate through all booked methods
      for (UInt_t i=0; i<fMethods.size(); i++) {

         MethodBase* m = dynamic_cast<MethodBase*>(fMethods[i]);

         TMVA::Types::EMVA methodType = m->GetMethodType();
         TString           weightfile = m->GetWeightFileName();

         // decide if .txt or .xml file should be read:
         if (READXML) weightfile.ReplaceAll(".txt",".xml");

         DataSetInfo& dataSetInfo = m->DataInfo();
         TString      testvarName = m->GetTestvarName();
         delete m; //itrMethod[i];

         // recreate
         m = dynamic_cast<MethodBase*>( ClassifierFactory::Instance()
                                                       .Create( std::string(Types::Instance().GetMethodName(methodType)), 
                                                                dataSetInfo, weightfile ) );
        
         m->SetupMethod();
         m->ReadStateFromFile();
         m->SetTestvarName(testvarName);

         // replace trained method by newly created one (from weight file) in methods vector
         fMethods[i] = m;
      }

   }
}

//_______________________________________________________________________
void TMVA::Factory::TestAllMethods()
{
   Log() << kINFO << "Test all methods..." << Endl;

   // don't do anything if no method booked
   if (fMethods.size() == 0) {
      Log() << kINFO << "...nothing found to test" << Endl;
      return;
   }

   // iterates over all MVAs that have been booked, and calls their testing methods
   // iterate over methods and test
   MVector::iterator itrMethod    = fMethods.begin();
   MVector::iterator itrMethodEnd = fMethods.end();
   for (; itrMethod != itrMethodEnd; itrMethod++) {
      MethodBase* mva = dynamic_cast<MethodBase*>(*itrMethod);
      Types::EAnalysisType analysisType = mva->GetAnalysisType();
      Log() << kINFO << "Test method: " << mva->GetMethodName() << " for " 
              << (analysisType == Types::kRegression ? "Regression" : "Classification") << " performance" << Endl;
      mva->AddOutput( Types::kTesting, analysisType );
   }
}

//_______________________________________________________________________
void TMVA::Factory::MakeClass( const TString& methodTitle ) const
{
   // Print predefined help message of classifier
   // iterate over methods and test
   if (methodTitle != "") {
      IMethod* method = GetMethod( methodTitle );
      if (method) method->MakeClass();
      else {
         Log() << kWARNING << "<MakeClass> Could not find classifier \"" << methodTitle 
                 << "\" in list" << Endl;
      }
   }
   else {

      // no classifier specified, print all hepl messages
      MVector::const_iterator itrMethod    = fMethods.begin();
      MVector::const_iterator itrMethodEnd = fMethods.end();
      for (; itrMethod != itrMethodEnd; itrMethod++) {
         MethodBase* method = dynamic_cast<MethodBase*>(*itrMethod);
         Log() << kINFO << "Make response class for classifier: " << method->GetMethodName() << Endl;
         method->MakeClass();
      }
   }
}

//_______________________________________________________________________
void TMVA::Factory::PrintHelpMessage( const TString& methodTitle ) const
{
   // Print predefined help message of classifier
   // iterate over methods and test
   if (methodTitle != "") {
      IMethod* method = GetMethod( methodTitle );
      if (method) method->PrintHelpMessage();
      else {
         Log() << kWARNING << "<PrintHelpMessage> Could not find classifier \"" << methodTitle 
                 << "\" in list" << Endl;
      }
   }
   else {

      // no classifier specified, print all hepl messages
      MVector::const_iterator itrMethod    = fMethods.begin();
      MVector::const_iterator itrMethodEnd = fMethods.end();
      for (; itrMethod != itrMethodEnd; itrMethod++) {
         MethodBase* method = dynamic_cast<MethodBase*>(*itrMethod);
         Log() << kINFO << "Print help message for classifier: " << method->GetMethodName() << Endl;
         method->PrintHelpMessage();
      }
   }
}

//_______________________________________________________________________
void TMVA::Factory::EvaluateAllVariables( TString options )
{
   // iterates over all MVA input varables and evaluates them
   Log() << kINFO << "Evaluating all variables..." << Endl;

   for (UInt_t i=0; i<DefaultDataSetInfo().GetNVariables(); i++) {
      TString s = DefaultDataSetInfo().GetVariableInfo(i).GetLabel();
      if (options.Contains("V")) s += ":V";
      this->BookMethod( "Variable", s );
   }
}

//_______________________________________________________________________
void TMVA::Factory::EvaluateAllMethods( void )
{
   // iterates over all MVAs that have been booked, and calls their evaluation methods
   Log() << kINFO << "Evaluate all methods..." << Endl;

   // don't do anything if no method booked
   if (fMethods.size() == 0) {
      Log() << kINFO << "...nothing found to evaluate" << Endl;
      return;
   }

   // -----------------------------------------------------------------------
   // First part of evaluation process
   // --> compute efficiencies, and other separation estimators
   // -----------------------------------------------------------------------

   // although equal, we now want to seperate the outpuf for the variables
   // and the real methods
   Int_t isel;                  // will be 0 for a Method; 1 for a Variable
   Int_t nmeth_used[2] = {0,0}; // 0 Method; 1 Variable

   std::vector<std::vector<TString> >  mname(2);
   std::vector<std::vector<Double_t> > sig(2), sep(2), roc(2);
   std::vector<std::vector<Double_t> > eff01(2), eff10(2), eff30(2), effArea(2);
   std::vector<std::vector<Double_t> > eff01err(2), eff10err(2), eff30err(2);
   std::vector<std::vector<Double_t> > trainEff01(2), trainEff10(2), trainEff30(2);

   std::vector<std::vector<Double_t> > biastrain(1);  // "bias" of the regression on the training data
   std::vector<std::vector<Double_t> > biastest(1);   // "bias" of the regression on test data 
   std::vector<std::vector<Double_t> > devtrain(1);   // "dev" of the regression on the training data
   std::vector<std::vector<Double_t> > devtest(1);    // "dev" of the regression on test data 
   std::vector<std::vector<Double_t> > rmstrain(1);   // "rms" of the regression on the training data
   std::vector<std::vector<Double_t> > rmstest(1);    // "rms" of the regression on test data 
   std::vector<std::vector<Double_t> > minftrain(1);  // "minf" of the regression on the training data
   std::vector<std::vector<Double_t> > minftest(1);   // "minf" of the regression on test data 
   std::vector<std::vector<Double_t> > rhotrain(1);   // correlation of the regression on the training data
   std::vector<std::vector<Double_t> > rhotest(1);    // correlation of the regression on test data 

   // same as above but for 'truncated' quantities (computed for events within 2sigma of RMS)
   std::vector<std::vector<Double_t> > biastrainT(1); 
   std::vector<std::vector<Double_t> > biastestT(1);    
   std::vector<std::vector<Double_t> > devtrainT(1);  
   std::vector<std::vector<Double_t> > devtestT(1);    
   std::vector<std::vector<Double_t> > rmstrainT(1);  
   std::vector<std::vector<Double_t> > rmstestT(1);    
   std::vector<std::vector<Double_t> > minftrainT(1);
   std::vector<std::vector<Double_t> > minftestT(1); 

   // following vector contains all methods - with the exception of Cuts, which are special
   MVector methodsNoCuts; 

   Bool_t doRegression = kFALSE;

   // iterate over methods and evaluate
   MVector::iterator itrMethod    = fMethods.begin();
   MVector::iterator itrMethodEnd = fMethods.end();
   for (; itrMethod != itrMethodEnd; itrMethod++) {
      MethodBase* theMethod = dynamic_cast<MethodBase*>(*itrMethod);
      if (theMethod->GetMethodType() != Types::kCuts) methodsNoCuts.push_back( *itrMethod );

      if (theMethod->DoRegression()) {
         doRegression = kTRUE;

         Log() << kINFO << "Evaluate regression method: " << theMethod->GetMethodName() << Endl;         
         Double_t bias, dev, rms, mInf;
         Double_t biasT, devT, rmsT, mInfT;
         Double_t rho;

         theMethod->TestRegression( bias, biasT, dev, devT, rms, rmsT, mInf, mInfT, rho, TMVA::Types::kTesting  );
         biastest[0]  .push_back( bias );
         devtest[0]   .push_back( dev );
         rmstest[0]   .push_back( rms );
         minftest[0]  .push_back( mInf );
         rhotest[0]   .push_back( rho );
         biastestT[0] .push_back( biasT );
         devtestT[0]  .push_back( devT );
         rmstestT[0]  .push_back( rmsT );
         minftestT[0] .push_back( mInfT );

         theMethod->TestRegression( bias, biasT, dev, devT, rms, rmsT, mInf, mInfT, rho, TMVA::Types::kTraining  );
         biastrain[0] .push_back( bias );
         devtrain[0]  .push_back( dev );
         rmstrain[0]  .push_back( rms );
         minftrain[0] .push_back( mInf );
         rhotrain[0]  .push_back( rho );
         biastrainT[0].push_back( biasT );
         devtrainT[0] .push_back( devT );
         rmstrainT[0] .push_back( rmsT );
         minftrainT[0].push_back( mInfT );

         mname[0].push_back( theMethod->GetMethodName() );
         nmeth_used[0]++;

         Log() << kINFO << "Write Evaluation Histos to file" << Endl;
         theMethod->WriteEvaluationHistosToFile();
      }
      else {
         
         Log() << kINFO << "Evaluate classifier: " << theMethod->GetMethodName() << Endl;
         isel = (theMethod->GetMethodTypeName().Contains("Variable")) ? 1 : 0;
      
         // perform the evaluation
         theMethod->TestClassification();
         
         // evaluate the classifier
         mname[isel].push_back( theMethod->GetMethodName() );
         sig[isel].push_back  ( theMethod->GetSignificance() );
         sep[isel].push_back  ( theMethod->GetSeparation() );
         roc[isel].push_back  ( theMethod->GetROCIntegral() );

         Double_t err;
         eff01[isel].push_back( theMethod->GetEfficiency("Efficiency:0.01", Types::kTesting, err) );
         eff01err[isel].push_back( err );
         eff10[isel].push_back( theMethod->GetEfficiency("Efficiency:0.10", Types::kTesting, err) );
         eff10err[isel].push_back( err );
         eff30[isel].push_back( theMethod->GetEfficiency("Efficiency:0.30", Types::kTesting, err) );
         eff30err[isel].push_back( err );
         effArea[isel].push_back( theMethod->GetEfficiency("",              Types::kTesting, err)  ); // computes the area (average)

         trainEff01[isel].push_back( theMethod->GetTrainingEfficiency("Efficiency:0.01") ); // the first pass takes longer
         trainEff10[isel].push_back( theMethod->GetTrainingEfficiency("Efficiency:0.10") );
         trainEff30[isel].push_back( theMethod->GetTrainingEfficiency("Efficiency:0.30") );

         nmeth_used[isel]++;

         Log() << kINFO << "Write Evaluation Histos to file" << Endl;
         theMethod->WriteEvaluationHistosToFile();
      }
   }
   if (doRegression) {

      std::vector<TString> vtemps = mname[0];
      std::vector< std::vector<Double_t> > vtmp;
      vtmp.push_back( devtest[0]   );  // this is the vector that is ranked
      vtmp.push_back( devtrain[0]  );
      vtmp.push_back( biastest[0]  );
      vtmp.push_back( biastrain[0] );
      vtmp.push_back( rmstest[0]   );
      vtmp.push_back( rmstrain[0]  );
      vtmp.push_back( minftest[0]  );
      vtmp.push_back( minftrain[0] );
      vtmp.push_back( rhotest[0]   );
      vtmp.push_back( rhotrain[0]  );
      vtmp.push_back( devtestT[0]  );  // this is the vector that is ranked
      vtmp.push_back( devtrainT[0] );
      vtmp.push_back( biastestT[0] );
      vtmp.push_back( biastrainT[0]);
      vtmp.push_back( rmstestT[0]  );
      vtmp.push_back( rmstrainT[0] );
      vtmp.push_back( minftestT[0] );
      vtmp.push_back( minftrainT[0]);
      gTools().UsefulSortAscending( vtmp, &vtemps );
      mname[0]      = vtemps;
      devtest[0]    = vtmp[0];
      devtrain[0]   = vtmp[1];
      biastest[0]   = vtmp[2];
      biastrain[0]  = vtmp[3];
      rmstest[0]    = vtmp[4];
      rmstrain[0]   = vtmp[5];
      minftest[0]   = vtmp[6];
      minftrain[0]  = vtmp[7];
      rhotest[0]    = vtmp[8];
      rhotrain[0]   = vtmp[9];
      devtestT[0]   = vtmp[10];
      devtrainT[0]  = vtmp[11];
      biastestT[0]  = vtmp[12];
      biastrainT[0] = vtmp[13];
      rmstestT[0]   = vtmp[14];
      rmstrainT[0]  = vtmp[15];
      minftestT[0]  = vtmp[16];
      minftrainT[0] = vtmp[17];
   }
   // now sort the variables according to the best 'eff at Beff=0.10'
   else {
      for (Int_t k=0; k<2; k++) {
         std::vector< std::vector<Double_t> > vtemp;
         vtemp.push_back( effArea[k] );  // this is the vector that is ranked
         vtemp.push_back( eff10[k] );
         vtemp.push_back( eff01[k] );
         vtemp.push_back( eff30[k] );
         vtemp.push_back( eff10err[k] ); 
         vtemp.push_back( eff01err[k] );
         vtemp.push_back( eff30err[k] );
         vtemp.push_back( trainEff10[k] );
         vtemp.push_back( trainEff01[k] );
         vtemp.push_back( trainEff30[k] );
         vtemp.push_back( sig[k] );
         vtemp.push_back( sep[k] );
         vtemp.push_back( roc[k] );
         std::vector<TString> vtemps = mname[k];
         gTools().UsefulSortDescending( vtemp, &vtemps );
         effArea[k]    = vtemp[0];
         eff10[k]      = vtemp[1];
         eff01[k]      = vtemp[2];
         eff30[k]      = vtemp[3];
         eff10err[k]   = vtemp[4];
         eff01err[k]   = vtemp[5];
         eff30err[k]   = vtemp[6];
         trainEff10[k] = vtemp[7];
         trainEff01[k] = vtemp[8];
         trainEff30[k] = vtemp[9];
         sig[k]        = vtemp[10];
         sep[k]        = vtemp[11];
         roc[k]        = vtemp[12];
         mname[k]      = vtemps;
      }
   }

   // -----------------------------------------------------------------------
   // Second part of evaluation process
   // --> compute correlations among MVAs
   // --> compute correlations between input variables and MVA (determines importsance)
   // --> count overlaps
   // -----------------------------------------------------------------------
   
   const Int_t nmeth = methodsNoCuts.size();
   const Int_t nvar  = DefaultDataSetInfo().GetNVariables();
   if (!doRegression) {

      if (nmeth > 0) {

         // needed for correlations
         Double_t *dvec = new Double_t[nmeth+nvar];
         std::vector<Double_t> rvec;

         // for correlations
         TPrincipal* tpSig = new TPrincipal( nmeth+nvar, "" );   
         TPrincipal* tpBkg = new TPrincipal( nmeth+nvar, "" );   

         // set required tree branch references
         Int_t ivar = 0;
         std::vector<TString>* theVars = new std::vector<TString>;
         std::vector<ResultsClassification*> mvaRes;
         for (itrMethod = methodsNoCuts.begin(); itrMethod != methodsNoCuts.end(); itrMethod++, ivar++) {
            MethodBase* m = dynamic_cast<MethodBase*>(*itrMethod);
            theVars->push_back( m->GetTestvarName() );
            rvec.push_back( m->GetSignalReferenceCut() );
            theVars->back().ReplaceAll( "MVA_", "" );
            mvaRes.push_back( dynamic_cast<ResultsClassification*>( m->Data()->GetResults( m->GetMethodName(), 
                                                                                           Types::kTesting, 
                                                                                           Types::kMaxAnalysisType) ) );
         }

         // for overlap study
         TMatrixD* overlapS = new TMatrixD( nmeth, nmeth );
         TMatrixD* overlapB = new TMatrixD( nmeth, nmeth );
         (*overlapS) *= 0; // init...
         (*overlapB) *= 0; // init...
      
         // loop over test tree      
         DataSet* defDs = DefaultDataSetInfo().GetDataSet();
         defDs->SetCurrentType(Types::kTesting);
         for (Int_t ievt=0; ievt<defDs->GetNEvents(); ievt++) {
            Event* ev = defDs->GetEvent(ievt);

            // for correlations
            TMatrixD* theMat = 0;
            for (Int_t im=0; im<nmeth; im++) {
               // check for NaN value
               Double_t retval = (Double_t)(*mvaRes[im])[ievt];               
               if (TMath::IsNaN(retval)) {
                  Log() << kWARNING << "Found NaN return value in event: " << ievt 
                        << " for method \"" << methodsNoCuts[im]->GetName() << "\"" << Endl;
                  dvec[im] = 0;
               }
               else dvec[im] = retval;
            }
            for (Int_t iv=0; iv<nvar;  iv++) dvec[iv+nmeth]  = (Double_t)ev->GetVal(iv);
            if (DefaultDataSetInfo().IsSignal(ev)) { tpSig->AddRow( dvec ); theMat = overlapS; }
            else                                   { tpBkg->AddRow( dvec ); theMat = overlapB; }

            // count overlaps
            for (Int_t im=0; im<nmeth; im++) {
               for (Int_t jm=im; jm<nmeth; jm++) {
                  if ((dvec[im] - rvec[im])*(dvec[jm] - rvec[jm]) > 0) { 
                     (*theMat)(im,jm)++; 
                     if (im != jm) (*theMat)(jm,im)++;
                  }
               }
            }
         }

         // renormalise overlap matrix
         (*overlapS) *= (1.0/defDs->GetNEvtSigTest());  // init...
         (*overlapB) *= (1.0/defDs->GetNEvtBkgdTest()); // init...

         tpSig->MakePrincipals();
         tpBkg->MakePrincipals();

         const TMatrixD* covMatS = tpSig->GetCovarianceMatrix();
         const TMatrixD* covMatB = tpBkg->GetCovarianceMatrix();
   
         const TMatrixD* corrMatS = gTools().GetCorrelationMatrix( covMatS );
         const TMatrixD* corrMatB = gTools().GetCorrelationMatrix( covMatB );

         // print correlation matrices
         if (corrMatS != 0 && corrMatB != 0) {

            // extract MVA matrix
            TMatrixD mvaMatS(nmeth,nmeth);
            TMatrixD mvaMatB(nmeth,nmeth);
            for (Int_t im=0; im<nmeth; im++) {
               for (Int_t jm=0; jm<nmeth; jm++) {
                  mvaMatS(im,jm) = (*corrMatS)(im,jm);
                  mvaMatB(im,jm) = (*corrMatB)(im,jm);
               }
            }
         
            // extract variables - to MVA matrix
            std::vector<TString> theInputVars;
            TMatrixD varmvaMatS(nvar,nmeth);
            TMatrixD varmvaMatB(nvar,nmeth);
            for (Int_t iv=0; iv<nvar; iv++) {
               theInputVars.push_back( DefaultDataSetInfo().GetVariableInfo( iv ).GetLabel() );
               for (Int_t jm=0; jm<nmeth; jm++) {
                  varmvaMatS(iv,jm) = (*corrMatS)(nmeth+iv,jm);
                  varmvaMatB(iv,jm) = (*corrMatB)(nmeth+iv,jm);
               }
            }

            if (nmeth > 1) {
               Log() << kINFO << Endl;
               Log() << kINFO << "Inter-MVA correlation matrix (signal):" << Endl;
               gTools().FormattedOutput( mvaMatS, *theVars, Log() );
               Log() << kINFO << Endl;

               Log() << kINFO << "Inter-MVA correlation matrix (background):" << Endl;
               gTools().FormattedOutput( mvaMatB, *theVars, Log() );
               Log() << kINFO << Endl;   
            }

            Log() << kINFO << "Correlations between input variables and MVA response (signal):" << Endl;
            gTools().FormattedOutput( varmvaMatS, theInputVars, *theVars, Log() );
            Log() << kINFO << Endl;

            Log() << kINFO << "Correlations between input variables and MVA response (background):" << Endl;
            gTools().FormattedOutput( varmvaMatB, theInputVars, *theVars, Log() );
            Log() << kINFO << Endl;
         }
         else Log() << kWARNING << "<TestAllMethods> cannot compute correlation matrices" << Endl;

         // print overlap matrices
         Log() << kINFO << "The following \"overlap\" matrices contain the fraction of events for which " << Endl;
         Log() << kINFO << "the MVAs 'i' and 'j' have returned conform answers about \"signal-likeness\"" << Endl;
         Log() << kINFO << "An event is signal-like, if its MVA output exceeds the following value:" << Endl;
         gTools().FormattedOutput( rvec, *theVars, "Method" , "Cut value", Log() );
         Log() << kINFO << "which correspond to the working point: eff(signal) = 1 - eff(background)" << Endl;

         // give notice that cut method has been excluded from this test
         if (nmeth != (Int_t)fMethods.size()) 
            Log() << kINFO << "Note: no correlations and overlap with cut method are provided at present" << Endl;

         if (nmeth > 1) {
            Log() << kINFO << Endl;
            Log() << kINFO << "Inter-MVA overlap matrix (signal):" << Endl;
            gTools().FormattedOutput( *overlapS, *theVars, Log() );
            Log() << kINFO << Endl;
      
            Log() << kINFO << "Inter-MVA overlap matrix (background):" << Endl;
            gTools().FormattedOutput( *overlapB, *theVars, Log() );
         }

         // cleanup
         delete tpSig;
         delete tpBkg;
         delete corrMatS;
         delete corrMatB;
         delete theVars;
         delete overlapS;
         delete overlapB;
         delete [] dvec;
      }
   }

   // -----------------------------------------------------------------------
   // Third part of evaluation process
   // --> output
   // ----------------------------------------------------------------------- 

   if (doRegression) {

      Log() << kINFO << Endl;
      TString hLine = "-------------------------------------------------------------------------";
      Log() << kINFO << "Evaluation results ranked by smallest RMS on test sample:" << Endl;
      Log() << kINFO << "(\"Bias\" quotes the mean deviation of the regression from true target." << Endl;
      Log() << kINFO << " \"MutInf\" is the \"Mutual Information\" between regression and target." << Endl;
      Log() << kINFO << " Indicated by \"_T\" are the corresponding \"truncated\" quantities ob-" << Endl;
      Log() << kINFO << " tained when removing events deviating more than 2sigma from average.)" << Endl;
      Log() << kINFO << hLine << Endl;
      Log() << kINFO << "MVA Method:        <Bias>   <Bias_T>    RMS    RMS_T  |  MutInf MutInf_T" << Endl;
      Log() << kINFO << hLine << Endl;

      for (Int_t i=0; i<nmeth_used[0]; i++) {
         Log() << kINFO << Form("%-15s:%#9.3g%#9.3g%#9.3g%#9.3g  |  %#5.3f  %#5.3f",
                                (const char*)mname[0][i], 
                                biastest[0][i], biastestT[0][i], 
                                rmstest[0][i], rmstestT[0][i], 
                                minftest[0][i], minftestT[0][i] )
               << Endl;
      }
      Log() << kINFO << hLine << Endl;
      Log() << kINFO << Endl;
      Log() << kINFO << "Evaluation results ranked by smallest RMS on training sample:" << Endl;
      Log() << kINFO << "(overtraining check)" << Endl;
      Log() << kINFO << hLine << Endl;
      Log() << kINFO << "MVA Method:        <Bias>   <Bias_T>    RMS    RMS_T  |  MutInf MutInf_T" << Endl;
      Log() << kINFO << hLine << Endl;

      for (Int_t i=0; i<nmeth_used[0]; i++) {
         Log() << kINFO << Form("%-15s:%#9.3g%#9.3g%#9.3g%#9.3g  |  %#5.3f  %#5.3f",
                                (const char*)mname[0][i], 
                                biastrain[0][i], biastrainT[0][i], 
                                rmstrain[0][i], rmstrainT[0][i], 
                                minftrain[0][i], minftrainT[0][i] )
               << Endl;
      }
      Log() << kINFO << hLine << Endl;
      Log() << kINFO << Endl;
   }
   else {
      Log() << Endl;
      TString hLine = "--------------------------------------------------------------------------------";
      Log() << kINFO << "Evaluation results ranked by best signal efficiency and purity (area)" << Endl;
      Log() << kINFO << hLine << Endl;
      Log() << kINFO << "MVA              Signal efficiency at bkg eff.(error):       | Sepa-    Signifi- "   << Endl;
      Log() << kINFO << "Method:          @B=0.01    @B=0.10    @B=0.30    ROC-integ. | ration:  cance:   "   << Endl;
      Log() << kINFO << hLine << Endl;
      for (Int_t k=0; k<2; k++) {
         if (k == 1 && nmeth_used[k] > 0) {
            Log() << kINFO << hLine << Endl;
            Log() << kINFO << "Input Variables: " << Endl << hLine << Endl;
         }
         for (Int_t i=0; i<nmeth_used[k]; i++) {
            if (k == 1) mname[k][i].ReplaceAll( "Variable_", "" );
            if (sep[k][i] < 0 || sig[k][i] < 0) {
               // cannot compute separation/significance -> no MVA (usually for Cuts)
               Log() << kINFO << Form("%-15s: %#1.3f(%#02i)  %#1.3f(%#02i)  %#1.3f(%#02i)    %#1.3f    | --       --",
                                      (const char*)mname[k][i], 
                                      eff01[k][i], Int_t(1000*eff01err[k][i]), 
                                      eff10[k][i], Int_t(1000*eff10err[k][i]), 
                                      eff30[k][i], Int_t(1000*eff30err[k][i]), 
                                      effArea[k][i]) << Endl;
            }
            else {
               Log() << kINFO << Form("%-15s: %#1.3f(%#02i)  %#1.3f(%#02i)  %#1.3f(%#02i)    %#1.3f    | %#1.3f    %#1.3f",
                                      (const char*)mname[k][i], 
                                      eff01[k][i], Int_t(1000*eff01err[k][i]), 
                                      eff10[k][i], Int_t(1000*eff10err[k][i]), 
                                      eff30[k][i], Int_t(1000*eff30err[k][i]), 
                                      effArea[k][i], 
                                      sep[k][i], sig[k][i]) << Endl;
            }
         }
      }
      Log() << kINFO << hLine << Endl;
      Log() << kINFO << Endl;
      Log() << kINFO << "Testing efficiency compared to training efficiency (overtraining check)" << Endl;
      Log() << kINFO << hLine << Endl;
      Log() << kINFO << "MVA              Signal efficiency: from test sample (from training sample) "   << Endl;
      Log() << kINFO << "Method:          @B=0.01             @B=0.10            @B=0.30   "   << Endl;
      Log() << kINFO << hLine << Endl;
      for (Int_t k=0; k<2; k++) {
         if (k == 1 && nmeth_used[k] > 0) {
            Log() << kINFO << hLine << Endl;
            Log() << kINFO << "Input Variables: " << Endl << hLine << Endl;
         }
         for (Int_t i=0; i<nmeth_used[k]; i++) {
            if (k == 1) mname[k][i].ReplaceAll( "Variable_", "" );
            Log() << kINFO << Form("%-15s: %#1.3f (%#1.3f)       %#1.3f (%#1.3f)      %#1.3f (%#1.3f)",
                                   (const char*)mname[k][i], 
                                   eff01[k][i],trainEff01[k][i], 
                                   eff10[k][i],trainEff10[k][i],
                                   eff30[k][i],trainEff30[k][i]) << Endl;
         }
      }
      Log() << kINFO << hLine << Endl;
      Log() << kINFO << Endl; 
   }

   // write test tree
   RootBaseDir()->cd();
   DefaultDataSetInfo().GetDataSet()->GetTree(Types::kTesting)->Write( "", TObject::kOverwrite );
}

 Factory.cxx:1
 Factory.cxx:2
 Factory.cxx:3
 Factory.cxx:4
 Factory.cxx:5
 Factory.cxx:6
 Factory.cxx:7
 Factory.cxx:8
 Factory.cxx:9
 Factory.cxx:10
 Factory.cxx:11
 Factory.cxx:12
 Factory.cxx:13
 Factory.cxx:14
 Factory.cxx:15
 Factory.cxx:16
 Factory.cxx:17
 Factory.cxx:18
 Factory.cxx:19
 Factory.cxx:20
 Factory.cxx:21
 Factory.cxx:22
 Factory.cxx:23
 Factory.cxx:24
 Factory.cxx:25
 Factory.cxx:26
 Factory.cxx:27
 Factory.cxx:28
 Factory.cxx:29
 Factory.cxx:30
 Factory.cxx:31
 Factory.cxx:32
 Factory.cxx:33
 Factory.cxx:34
 Factory.cxx:35
 Factory.cxx:36
 Factory.cxx:37
 Factory.cxx:38
 Factory.cxx:39
 Factory.cxx:40
 Factory.cxx:41
 Factory.cxx:42
 Factory.cxx:43
 Factory.cxx:44
 Factory.cxx:45
 Factory.cxx:46
 Factory.cxx:47
 Factory.cxx:48
 Factory.cxx:49
 Factory.cxx:50
 Factory.cxx:51
 Factory.cxx:52
 Factory.cxx:53
 Factory.cxx:54
 Factory.cxx:55
 Factory.cxx:56
 Factory.cxx:57
 Factory.cxx:58
 Factory.cxx:59
 Factory.cxx:60
 Factory.cxx:61
 Factory.cxx:62
 Factory.cxx:63
 Factory.cxx:64
 Factory.cxx:65
 Factory.cxx:66
 Factory.cxx:67
 Factory.cxx:68
 Factory.cxx:69
 Factory.cxx:70
 Factory.cxx:71
 Factory.cxx:72
 Factory.cxx:73
 Factory.cxx:74
 Factory.cxx:75
 Factory.cxx:76
 Factory.cxx:77
 Factory.cxx:78
 Factory.cxx:79
 Factory.cxx:80
 Factory.cxx:81
 Factory.cxx:82
 Factory.cxx:83
 Factory.cxx:84
 Factory.cxx:85
 Factory.cxx:86
 Factory.cxx:87
 Factory.cxx:88
 Factory.cxx:89
 Factory.cxx:90
 Factory.cxx:91
 Factory.cxx:92
 Factory.cxx:93
 Factory.cxx:94
 Factory.cxx:95
 Factory.cxx:96
 Factory.cxx:97
 Factory.cxx:98
 Factory.cxx:99
 Factory.cxx:100
 Factory.cxx:101
 Factory.cxx:102
 Factory.cxx:103
 Factory.cxx:104
 Factory.cxx:105
 Factory.cxx:106
 Factory.cxx:107
 Factory.cxx:108
 Factory.cxx:109
 Factory.cxx:110
 Factory.cxx:111
 Factory.cxx:112
 Factory.cxx:113
 Factory.cxx:114
 Factory.cxx:115
 Factory.cxx:116
 Factory.cxx:117
 Factory.cxx:118
 Factory.cxx:119
 Factory.cxx:120
 Factory.cxx:121
 Factory.cxx:122
 Factory.cxx:123
 Factory.cxx:124
 Factory.cxx:125
 Factory.cxx:126
 Factory.cxx:127
 Factory.cxx:128
 Factory.cxx:129
 Factory.cxx:130
 Factory.cxx:131
 Factory.cxx:132
 Factory.cxx:133
 Factory.cxx:134
 Factory.cxx:135
 Factory.cxx:136
 Factory.cxx:137
 Factory.cxx:138
 Factory.cxx:139
 Factory.cxx:140
 Factory.cxx:141
 Factory.cxx:142
 Factory.cxx:143
 Factory.cxx:144
 Factory.cxx:145
 Factory.cxx:146
 Factory.cxx:147
 Factory.cxx:148
 Factory.cxx:149
 Factory.cxx:150
 Factory.cxx:151
 Factory.cxx:152
 Factory.cxx:153
 Factory.cxx:154
 Factory.cxx:155
 Factory.cxx:156
 Factory.cxx:157
 Factory.cxx:158
 Factory.cxx:159
 Factory.cxx:160
 Factory.cxx:161
 Factory.cxx:162
 Factory.cxx:163
 Factory.cxx:164
 Factory.cxx:165
 Factory.cxx:166
 Factory.cxx:167
 Factory.cxx:168
 Factory.cxx:169
 Factory.cxx:170
 Factory.cxx:171
 Factory.cxx:172
 Factory.cxx:173
 Factory.cxx:174
 Factory.cxx:175
 Factory.cxx:176
 Factory.cxx:177
 Factory.cxx:178
 Factory.cxx:179
 Factory.cxx:180
 Factory.cxx:181
 Factory.cxx:182
 Factory.cxx:183
 Factory.cxx:184
 Factory.cxx:185
 Factory.cxx:186
 Factory.cxx:187
 Factory.cxx:188
 Factory.cxx:189
 Factory.cxx:190
 Factory.cxx:191
 Factory.cxx:192
 Factory.cxx:193
 Factory.cxx:194
 Factory.cxx:195
 Factory.cxx:196
 Factory.cxx:197
 Factory.cxx:198
 Factory.cxx:199
 Factory.cxx:200
 Factory.cxx:201
 Factory.cxx:202
 Factory.cxx:203
 Factory.cxx:204
 Factory.cxx:205
 Factory.cxx:206
 Factory.cxx:207
 Factory.cxx:208
 Factory.cxx:209
 Factory.cxx:210
 Factory.cxx:211
 Factory.cxx:212
 Factory.cxx:213
 Factory.cxx:214
 Factory.cxx:215
 Factory.cxx:216
 Factory.cxx:217
 Factory.cxx:218
 Factory.cxx:219
 Factory.cxx:220
 Factory.cxx:221
 Factory.cxx:222
 Factory.cxx:223
 Factory.cxx:224
 Factory.cxx:225
 Factory.cxx:226
 Factory.cxx:227
 Factory.cxx:228
 Factory.cxx:229
 Factory.cxx:230
 Factory.cxx:231
 Factory.cxx:232
 Factory.cxx:233
 Factory.cxx:234
 Factory.cxx:235
 Factory.cxx:236
 Factory.cxx:237
 Factory.cxx:238
 Factory.cxx:239
 Factory.cxx:240
 Factory.cxx:241
 Factory.cxx:242
 Factory.cxx:243
 Factory.cxx:244
 Factory.cxx:245
 Factory.cxx:246
 Factory.cxx:247
 Factory.cxx:248
 Factory.cxx:249
 Factory.cxx:250
 Factory.cxx:251
 Factory.cxx:252
 Factory.cxx:253
 Factory.cxx:254
 Factory.cxx:255
 Factory.cxx:256
 Factory.cxx:257
 Factory.cxx:258
 Factory.cxx:259
 Factory.cxx:260
 Factory.cxx:261
 Factory.cxx:262
 Factory.cxx:263
 Factory.cxx:264
 Factory.cxx:265
 Factory.cxx:266
 Factory.cxx:267
 Factory.cxx:268
 Factory.cxx:269
 Factory.cxx:270
 Factory.cxx:271
 Factory.cxx:272
 Factory.cxx:273
 Factory.cxx:274
 Factory.cxx:275
 Factory.cxx:276
 Factory.cxx:277
 Factory.cxx:278
 Factory.cxx:279
 Factory.cxx:280
 Factory.cxx:281
 Factory.cxx:282
 Factory.cxx:283
 Factory.cxx:284
 Factory.cxx:285
 Factory.cxx:286
 Factory.cxx:287
 Factory.cxx:288
 Factory.cxx:289
 Factory.cxx:290
 Factory.cxx:291
 Factory.cxx:292
 Factory.cxx:293
 Factory.cxx:294
 Factory.cxx:295
 Factory.cxx:296
 Factory.cxx:297
 Factory.cxx:298
 Factory.cxx:299
 Factory.cxx:300
 Factory.cxx:301
 Factory.cxx:302
 Factory.cxx:303
 Factory.cxx:304
 Factory.cxx:305
 Factory.cxx:306
 Factory.cxx:307
 Factory.cxx:308
 Factory.cxx:309
 Factory.cxx:310
 Factory.cxx:311
 Factory.cxx:312
 Factory.cxx:313
 Factory.cxx:314
 Factory.cxx:315
 Factory.cxx:316
 Factory.cxx:317
 Factory.cxx:318
 Factory.cxx:319
 Factory.cxx:320
 Factory.cxx:321
 Factory.cxx:322
 Factory.cxx:323
 Factory.cxx:324
 Factory.cxx:325
 Factory.cxx:326
 Factory.cxx:327
 Factory.cxx:328
 Factory.cxx:329
 Factory.cxx:330
 Factory.cxx:331
 Factory.cxx:332
 Factory.cxx:333
 Factory.cxx:334
 Factory.cxx:335
 Factory.cxx:336
 Factory.cxx:337
 Factory.cxx:338
 Factory.cxx:339
 Factory.cxx:340
 Factory.cxx:341
 Factory.cxx:342
 Factory.cxx:343
 Factory.cxx:344
 Factory.cxx:345
 Factory.cxx:346
 Factory.cxx:347
 Factory.cxx:348
 Factory.cxx:349
 Factory.cxx:350
 Factory.cxx:351
 Factory.cxx:352
 Factory.cxx:353
 Factory.cxx:354
 Factory.cxx:355
 Factory.cxx:356
 Factory.cxx:357
 Factory.cxx:358
 Factory.cxx:359
 Factory.cxx:360
 Factory.cxx:361
 Factory.cxx:362
 Factory.cxx:363
 Factory.cxx:364
 Factory.cxx:365
 Factory.cxx:366
 Factory.cxx:367
 Factory.cxx:368
 Factory.cxx:369
 Factory.cxx:370
 Factory.cxx:371
 Factory.cxx:372
 Factory.cxx:373
 Factory.cxx:374
 Factory.cxx:375
 Factory.cxx:376
 Factory.cxx:377
 Factory.cxx:378
 Factory.cxx:379
 Factory.cxx:380
 Factory.cxx:381
 Factory.cxx:382
 Factory.cxx:383
 Factory.cxx:384
 Factory.cxx:385
 Factory.cxx:386
 Factory.cxx:387
 Factory.cxx:388
 Factory.cxx:389
 Factory.cxx:390
 Factory.cxx:391
 Factory.cxx:392
 Factory.cxx:393
 Factory.cxx:394
 Factory.cxx:395
 Factory.cxx:396
 Factory.cxx:397
 Factory.cxx:398
 Factory.cxx:399
 Factory.cxx:400
 Factory.cxx:401
 Factory.cxx:402
 Factory.cxx:403
 Factory.cxx:404
 Factory.cxx:405
 Factory.cxx:406
 Factory.cxx:407
 Factory.cxx:408
 Factory.cxx:409
 Factory.cxx:410
 Factory.cxx:411
 Factory.cxx:412
 Factory.cxx:413
 Factory.cxx:414
 Factory.cxx:415
 Factory.cxx:416
 Factory.cxx:417
 Factory.cxx:418
 Factory.cxx:419
 Factory.cxx:420
 Factory.cxx:421
 Factory.cxx:422
 Factory.cxx:423
 Factory.cxx:424
 Factory.cxx:425
 Factory.cxx:426
 Factory.cxx:427
 Factory.cxx:428
 Factory.cxx:429
 Factory.cxx:430
 Factory.cxx:431
 Factory.cxx:432
 Factory.cxx:433
 Factory.cxx:434
 Factory.cxx:435
 Factory.cxx:436
 Factory.cxx:437
 Factory.cxx:438
 Factory.cxx:439
 Factory.cxx:440
 Factory.cxx:441
 Factory.cxx:442
 Factory.cxx:443
 Factory.cxx:444
 Factory.cxx:445
 Factory.cxx:446
 Factory.cxx:447
 Factory.cxx:448
 Factory.cxx:449
 Factory.cxx:450
 Factory.cxx:451
 Factory.cxx:452
 Factory.cxx:453
 Factory.cxx:454
 Factory.cxx:455
 Factory.cxx:456
 Factory.cxx:457
 Factory.cxx:458
 Factory.cxx:459
 Factory.cxx:460
 Factory.cxx:461
 Factory.cxx:462
 Factory.cxx:463
 Factory.cxx:464
 Factory.cxx:465
 Factory.cxx:466
 Factory.cxx:467
 Factory.cxx:468
 Factory.cxx:469
 Factory.cxx:470
 Factory.cxx:471
 Factory.cxx:472
 Factory.cxx:473
 Factory.cxx:474
 Factory.cxx:475
 Factory.cxx:476
 Factory.cxx:477
 Factory.cxx:478
 Factory.cxx:479
 Factory.cxx:480
 Factory.cxx:481
 Factory.cxx:482
 Factory.cxx:483
 Factory.cxx:484
 Factory.cxx:485
 Factory.cxx:486
 Factory.cxx:487
 Factory.cxx:488
 Factory.cxx:489
 Factory.cxx:490
 Factory.cxx:491
 Factory.cxx:492
 Factory.cxx:493
 Factory.cxx:494
 Factory.cxx:495
 Factory.cxx:496
 Factory.cxx:497
 Factory.cxx:498
 Factory.cxx:499
 Factory.cxx:500
 Factory.cxx:501
 Factory.cxx:502
 Factory.cxx:503
 Factory.cxx:504
 Factory.cxx:505
 Factory.cxx:506
 Factory.cxx:507
 Factory.cxx:508
 Factory.cxx:509
 Factory.cxx:510
 Factory.cxx:511
 Factory.cxx:512
 Factory.cxx:513
 Factory.cxx:514
 Factory.cxx:515
 Factory.cxx:516
 Factory.cxx:517
 Factory.cxx:518
 Factory.cxx:519
 Factory.cxx:520
 Factory.cxx:521
 Factory.cxx:522
 Factory.cxx:523
 Factory.cxx:524
 Factory.cxx:525
 Factory.cxx:526
 Factory.cxx:527
 Factory.cxx:528
 Factory.cxx:529
 Factory.cxx:530
 Factory.cxx:531
 Factory.cxx:532
 Factory.cxx:533
 Factory.cxx:534
 Factory.cxx:535
 Factory.cxx:536
 Factory.cxx:537
 Factory.cxx:538
 Factory.cxx:539
 Factory.cxx:540
 Factory.cxx:541
 Factory.cxx:542
 Factory.cxx:543
 Factory.cxx:544
 Factory.cxx:545
 Factory.cxx:546
 Factory.cxx:547
 Factory.cxx:548
 Factory.cxx:549
 Factory.cxx:550
 Factory.cxx:551
 Factory.cxx:552
 Factory.cxx:553
 Factory.cxx:554
 Factory.cxx:555
 Factory.cxx:556
 Factory.cxx:557
 Factory.cxx:558
 Factory.cxx:559
 Factory.cxx:560
 Factory.cxx:561
 Factory.cxx:562
 Factory.cxx:563
 Factory.cxx:564
 Factory.cxx:565
 Factory.cxx:566
 Factory.cxx:567
 Factory.cxx:568
 Factory.cxx:569
 Factory.cxx:570
 Factory.cxx:571
 Factory.cxx:572
 Factory.cxx:573
 Factory.cxx:574
 Factory.cxx:575
 Factory.cxx:576
 Factory.cxx:577
 Factory.cxx:578
 Factory.cxx:579
 Factory.cxx:580
 Factory.cxx:581
 Factory.cxx:582
 Factory.cxx:583
 Factory.cxx:584
 Factory.cxx:585
 Factory.cxx:586
 Factory.cxx:587
 Factory.cxx:588
 Factory.cxx:589
 Factory.cxx:590
 Factory.cxx:591
 Factory.cxx:592
 Factory.cxx:593
 Factory.cxx:594
 Factory.cxx:595
 Factory.cxx:596
 Factory.cxx:597
 Factory.cxx:598
 Factory.cxx:599
 Factory.cxx:600
 Factory.cxx:601
 Factory.cxx:602
 Factory.cxx:603
 Factory.cxx:604
 Factory.cxx:605
 Factory.cxx:606
 Factory.cxx:607
 Factory.cxx:608
 Factory.cxx:609
 Factory.cxx:610
 Factory.cxx:611
 Factory.cxx:612
 Factory.cxx:613
 Factory.cxx:614
 Factory.cxx:615
 Factory.cxx:616
 Factory.cxx:617
 Factory.cxx:618
 Factory.cxx:619
 Factory.cxx:620
 Factory.cxx:621
 Factory.cxx:622
 Factory.cxx:623
 Factory.cxx:624
 Factory.cxx:625
 Factory.cxx:626
 Factory.cxx:627
 Factory.cxx:628
 Factory.cxx:629
 Factory.cxx:630
 Factory.cxx:631
 Factory.cxx:632
 Factory.cxx:633
 Factory.cxx:634
 Factory.cxx:635
 Factory.cxx:636
 Factory.cxx:637
 Factory.cxx:638
 Factory.cxx:639
 Factory.cxx:640
 Factory.cxx:641
 Factory.cxx:642
 Factory.cxx:643
 Factory.cxx:644
 Factory.cxx:645
 Factory.cxx:646
 Factory.cxx:647
 Factory.cxx:648
 Factory.cxx:649
 Factory.cxx:650
 Factory.cxx:651
 Factory.cxx:652
 Factory.cxx:653
 Factory.cxx:654
 Factory.cxx:655
 Factory.cxx:656
 Factory.cxx:657
 Factory.cxx:658
 Factory.cxx:659
 Factory.cxx:660
 Factory.cxx:661
 Factory.cxx:662
 Factory.cxx:663
 Factory.cxx:664
 Factory.cxx:665
 Factory.cxx:666
 Factory.cxx:667
 Factory.cxx:668
 Factory.cxx:669
 Factory.cxx:670
 Factory.cxx:671
 Factory.cxx:672
 Factory.cxx:673
 Factory.cxx:674
 Factory.cxx:675
 Factory.cxx:676
 Factory.cxx:677
 Factory.cxx:678
 Factory.cxx:679
 Factory.cxx:680
 Factory.cxx:681
 Factory.cxx:682
 Factory.cxx:683
 Factory.cxx:684
 Factory.cxx:685
 Factory.cxx:686
 Factory.cxx:687
 Factory.cxx:688
 Factory.cxx:689
 Factory.cxx:690
 Factory.cxx:691
 Factory.cxx:692
 Factory.cxx:693
 Factory.cxx:694
 Factory.cxx:695
 Factory.cxx:696
 Factory.cxx:697
 Factory.cxx:698
 Factory.cxx:699
 Factory.cxx:700
 Factory.cxx:701
 Factory.cxx:702
 Factory.cxx:703
 Factory.cxx:704
 Factory.cxx:705
 Factory.cxx:706
 Factory.cxx:707
 Factory.cxx:708
 Factory.cxx:709
 Factory.cxx:710
 Factory.cxx:711
 Factory.cxx:712
 Factory.cxx:713
 Factory.cxx:714
 Factory.cxx:715
 Factory.cxx:716
 Factory.cxx:717
 Factory.cxx:718
 Factory.cxx:719
 Factory.cxx:720
 Factory.cxx:721
 Factory.cxx:722
 Factory.cxx:723
 Factory.cxx:724
 Factory.cxx:725
 Factory.cxx:726
 Factory.cxx:727
 Factory.cxx:728
 Factory.cxx:729
 Factory.cxx:730
 Factory.cxx:731
 Factory.cxx:732
 Factory.cxx:733
 Factory.cxx:734
 Factory.cxx:735
 Factory.cxx:736
 Factory.cxx:737
 Factory.cxx:738
 Factory.cxx:739
 Factory.cxx:740
 Factory.cxx:741
 Factory.cxx:742
 Factory.cxx:743
 Factory.cxx:744
 Factory.cxx:745
 Factory.cxx:746
 Factory.cxx:747
 Factory.cxx:748
 Factory.cxx:749
 Factory.cxx:750
 Factory.cxx:751
 Factory.cxx:752
 Factory.cxx:753
 Factory.cxx:754
 Factory.cxx:755
 Factory.cxx:756
 Factory.cxx:757
 Factory.cxx:758
 Factory.cxx:759
 Factory.cxx:760
 Factory.cxx:761
 Factory.cxx:762
 Factory.cxx:763
 Factory.cxx:764
 Factory.cxx:765
 Factory.cxx:766
 Factory.cxx:767
 Factory.cxx:768
 Factory.cxx:769
 Factory.cxx:770
 Factory.cxx:771
 Factory.cxx:772
 Factory.cxx:773
 Factory.cxx:774
 Factory.cxx:775
 Factory.cxx:776
 Factory.cxx:777
 Factory.cxx:778
 Factory.cxx:779
 Factory.cxx:780
 Factory.cxx:781
 Factory.cxx:782
 Factory.cxx:783
 Factory.cxx:784
 Factory.cxx:785
 Factory.cxx:786
 Factory.cxx:787
 Factory.cxx:788
 Factory.cxx:789
 Factory.cxx:790
 Factory.cxx:791
 Factory.cxx:792
 Factory.cxx:793
 Factory.cxx:794
 Factory.cxx:795
 Factory.cxx:796
 Factory.cxx:797
 Factory.cxx:798
 Factory.cxx:799
 Factory.cxx:800
 Factory.cxx:801
 Factory.cxx:802
 Factory.cxx:803
 Factory.cxx:804
 Factory.cxx:805
 Factory.cxx:806
 Factory.cxx:807
 Factory.cxx:808
 Factory.cxx:809
 Factory.cxx:810
 Factory.cxx:811
 Factory.cxx:812
 Factory.cxx:813
 Factory.cxx:814
 Factory.cxx:815
 Factory.cxx:816
 Factory.cxx:817
 Factory.cxx:818
 Factory.cxx:819
 Factory.cxx:820
 Factory.cxx:821
 Factory.cxx:822
 Factory.cxx:823
 Factory.cxx:824
 Factory.cxx:825
 Factory.cxx:826
 Factory.cxx:827
 Factory.cxx:828
 Factory.cxx:829
 Factory.cxx:830
 Factory.cxx:831
 Factory.cxx:832
 Factory.cxx:833
 Factory.cxx:834
 Factory.cxx:835
 Factory.cxx:836
 Factory.cxx:837
 Factory.cxx:838
 Factory.cxx:839
 Factory.cxx:840
 Factory.cxx:841
 Factory.cxx:842
 Factory.cxx:843
 Factory.cxx:844
 Factory.cxx:845
 Factory.cxx:846
 Factory.cxx:847
 Factory.cxx:848
 Factory.cxx:849
 Factory.cxx:850
 Factory.cxx:851
 Factory.cxx:852
 Factory.cxx:853
 Factory.cxx:854
 Factory.cxx:855
 Factory.cxx:856
 Factory.cxx:857
 Factory.cxx:858
 Factory.cxx:859
 Factory.cxx:860
 Factory.cxx:861
 Factory.cxx:862
 Factory.cxx:863
 Factory.cxx:864
 Factory.cxx:865
 Factory.cxx:866
 Factory.cxx:867
 Factory.cxx:868
 Factory.cxx:869
 Factory.cxx:870
 Factory.cxx:871
 Factory.cxx:872
 Factory.cxx:873
 Factory.cxx:874
 Factory.cxx:875
 Factory.cxx:876
 Factory.cxx:877
 Factory.cxx:878
 Factory.cxx:879
 Factory.cxx:880
 Factory.cxx:881
 Factory.cxx:882
 Factory.cxx:883
 Factory.cxx:884
 Factory.cxx:885
 Factory.cxx:886
 Factory.cxx:887
 Factory.cxx:888
 Factory.cxx:889
 Factory.cxx:890
 Factory.cxx:891
 Factory.cxx:892
 Factory.cxx:893
 Factory.cxx:894
 Factory.cxx:895
 Factory.cxx:896
 Factory.cxx:897
 Factory.cxx:898
 Factory.cxx:899
 Factory.cxx:900
 Factory.cxx:901
 Factory.cxx:902
 Factory.cxx:903
 Factory.cxx:904
 Factory.cxx:905
 Factory.cxx:906
 Factory.cxx:907
 Factory.cxx:908
 Factory.cxx:909
 Factory.cxx:910
 Factory.cxx:911
 Factory.cxx:912
 Factory.cxx:913
 Factory.cxx:914
 Factory.cxx:915
 Factory.cxx:916
 Factory.cxx:917
 Factory.cxx:918
 Factory.cxx:919
 Factory.cxx:920
 Factory.cxx:921
 Factory.cxx:922
 Factory.cxx:923
 Factory.cxx:924
 Factory.cxx:925
 Factory.cxx:926
 Factory.cxx:927
 Factory.cxx:928
 Factory.cxx:929
 Factory.cxx:930
 Factory.cxx:931
 Factory.cxx:932
 Factory.cxx:933
 Factory.cxx:934
 Factory.cxx:935
 Factory.cxx:936
 Factory.cxx:937
 Factory.cxx:938
 Factory.cxx:939
 Factory.cxx:940
 Factory.cxx:941
 Factory.cxx:942
 Factory.cxx:943
 Factory.cxx:944
 Factory.cxx:945
 Factory.cxx:946
 Factory.cxx:947
 Factory.cxx:948
 Factory.cxx:949
 Factory.cxx:950
 Factory.cxx:951
 Factory.cxx:952
 Factory.cxx:953
 Factory.cxx:954
 Factory.cxx:955
 Factory.cxx:956
 Factory.cxx:957
 Factory.cxx:958
 Factory.cxx:959
 Factory.cxx:960
 Factory.cxx:961
 Factory.cxx:962
 Factory.cxx:963
 Factory.cxx:964
 Factory.cxx:965
 Factory.cxx:966
 Factory.cxx:967
 Factory.cxx:968
 Factory.cxx:969
 Factory.cxx:970
 Factory.cxx:971
 Factory.cxx:972
 Factory.cxx:973
 Factory.cxx:974
 Factory.cxx:975
 Factory.cxx:976
 Factory.cxx:977
 Factory.cxx:978
 Factory.cxx:979
 Factory.cxx:980
 Factory.cxx:981
 Factory.cxx:982
 Factory.cxx:983
 Factory.cxx:984
 Factory.cxx:985
 Factory.cxx:986
 Factory.cxx:987
 Factory.cxx:988
 Factory.cxx:989
 Factory.cxx:990
 Factory.cxx:991
 Factory.cxx:992
 Factory.cxx:993
 Factory.cxx:994
 Factory.cxx:995
 Factory.cxx:996
 Factory.cxx:997
 Factory.cxx:998
 Factory.cxx:999
 Factory.cxx:1000
 Factory.cxx:1001
 Factory.cxx:1002
 Factory.cxx:1003
 Factory.cxx:1004
 Factory.cxx:1005
 Factory.cxx:1006
 Factory.cxx:1007
 Factory.cxx:1008
 Factory.cxx:1009
 Factory.cxx:1010
 Factory.cxx:1011
 Factory.cxx:1012
 Factory.cxx:1013
 Factory.cxx:1014
 Factory.cxx:1015
 Factory.cxx:1016
 Factory.cxx:1017
 Factory.cxx:1018
 Factory.cxx:1019
 Factory.cxx:1020
 Factory.cxx:1021
 Factory.cxx:1022
 Factory.cxx:1023
 Factory.cxx:1024
 Factory.cxx:1025
 Factory.cxx:1026
 Factory.cxx:1027
 Factory.cxx:1028
 Factory.cxx:1029
 Factory.cxx:1030
 Factory.cxx:1031
 Factory.cxx:1032
 Factory.cxx:1033
 Factory.cxx:1034
 Factory.cxx:1035
 Factory.cxx:1036
 Factory.cxx:1037
 Factory.cxx:1038
 Factory.cxx:1039
 Factory.cxx:1040
 Factory.cxx:1041
 Factory.cxx:1042
 Factory.cxx:1043
 Factory.cxx:1044
 Factory.cxx:1045
 Factory.cxx:1046
 Factory.cxx:1047
 Factory.cxx:1048
 Factory.cxx:1049
 Factory.cxx:1050
 Factory.cxx:1051
 Factory.cxx:1052
 Factory.cxx:1053
 Factory.cxx:1054
 Factory.cxx:1055
 Factory.cxx:1056
 Factory.cxx:1057
 Factory.cxx:1058
 Factory.cxx:1059
 Factory.cxx:1060
 Factory.cxx:1061
 Factory.cxx:1062
 Factory.cxx:1063
 Factory.cxx:1064
 Factory.cxx:1065
 Factory.cxx:1066
 Factory.cxx:1067
 Factory.cxx:1068
 Factory.cxx:1069
 Factory.cxx:1070
 Factory.cxx:1071
 Factory.cxx:1072
 Factory.cxx:1073
 Factory.cxx:1074
 Factory.cxx:1075
 Factory.cxx:1076
 Factory.cxx:1077
 Factory.cxx:1078
 Factory.cxx:1079
 Factory.cxx:1080
 Factory.cxx:1081
 Factory.cxx:1082
 Factory.cxx:1083
 Factory.cxx:1084
 Factory.cxx:1085
 Factory.cxx:1086
 Factory.cxx:1087
 Factory.cxx:1088
 Factory.cxx:1089
 Factory.cxx:1090
 Factory.cxx:1091
 Factory.cxx:1092
 Factory.cxx:1093
 Factory.cxx:1094
 Factory.cxx:1095
 Factory.cxx:1096
 Factory.cxx:1097
 Factory.cxx:1098
 Factory.cxx:1099
 Factory.cxx:1100
 Factory.cxx:1101
 Factory.cxx:1102
 Factory.cxx:1103
 Factory.cxx:1104
 Factory.cxx:1105
 Factory.cxx:1106
 Factory.cxx:1107
 Factory.cxx:1108
 Factory.cxx:1109
 Factory.cxx:1110
 Factory.cxx:1111
 Factory.cxx:1112
 Factory.cxx:1113
 Factory.cxx:1114
 Factory.cxx:1115
 Factory.cxx:1116
 Factory.cxx:1117
 Factory.cxx:1118
 Factory.cxx:1119
 Factory.cxx:1120
 Factory.cxx:1121
 Factory.cxx:1122
 Factory.cxx:1123
 Factory.cxx:1124
 Factory.cxx:1125
 Factory.cxx:1126
 Factory.cxx:1127
 Factory.cxx:1128
 Factory.cxx:1129
 Factory.cxx:1130
 Factory.cxx:1131
 Factory.cxx:1132
 Factory.cxx:1133
 Factory.cxx:1134
 Factory.cxx:1135
 Factory.cxx:1136
 Factory.cxx:1137
 Factory.cxx:1138
 Factory.cxx:1139
 Factory.cxx:1140
 Factory.cxx:1141
 Factory.cxx:1142
 Factory.cxx:1143
 Factory.cxx:1144
 Factory.cxx:1145
 Factory.cxx:1146
 Factory.cxx:1147
 Factory.cxx:1148
 Factory.cxx:1149
 Factory.cxx:1150
 Factory.cxx:1151
 Factory.cxx:1152
 Factory.cxx:1153
 Factory.cxx:1154
 Factory.cxx:1155
 Factory.cxx:1156
 Factory.cxx:1157
 Factory.cxx:1158
 Factory.cxx:1159
 Factory.cxx:1160
 Factory.cxx:1161
 Factory.cxx:1162
 Factory.cxx:1163
 Factory.cxx:1164
 Factory.cxx:1165
 Factory.cxx:1166
 Factory.cxx:1167
 Factory.cxx:1168
 Factory.cxx:1169
 Factory.cxx:1170
 Factory.cxx:1171
 Factory.cxx:1172
 Factory.cxx:1173
 Factory.cxx:1174
 Factory.cxx:1175
 Factory.cxx:1176
 Factory.cxx:1177
 Factory.cxx:1178
 Factory.cxx:1179
 Factory.cxx:1180
 Factory.cxx:1181
 Factory.cxx:1182
 Factory.cxx:1183
 Factory.cxx:1184
 Factory.cxx:1185
 Factory.cxx:1186
 Factory.cxx:1187
 Factory.cxx:1188
 Factory.cxx:1189
 Factory.cxx:1190
 Factory.cxx:1191
 Factory.cxx:1192
 Factory.cxx:1193
 Factory.cxx:1194
 Factory.cxx:1195
 Factory.cxx:1196
 Factory.cxx:1197
 Factory.cxx:1198
 Factory.cxx:1199
 Factory.cxx:1200
 Factory.cxx:1201
 Factory.cxx:1202
 Factory.cxx:1203
 Factory.cxx:1204
 Factory.cxx:1205
 Factory.cxx:1206
 Factory.cxx:1207
 Factory.cxx:1208
 Factory.cxx:1209
 Factory.cxx:1210
 Factory.cxx:1211
 Factory.cxx:1212
 Factory.cxx:1213
 Factory.cxx:1214
 Factory.cxx:1215
 Factory.cxx:1216
 Factory.cxx:1217
 Factory.cxx:1218
 Factory.cxx:1219
 Factory.cxx:1220
 Factory.cxx:1221
 Factory.cxx:1222
 Factory.cxx:1223
 Factory.cxx:1224
 Factory.cxx:1225
 Factory.cxx:1226
 Factory.cxx:1227
 Factory.cxx:1228
 Factory.cxx:1229
 Factory.cxx:1230
 Factory.cxx:1231
 Factory.cxx:1232
 Factory.cxx:1233
 Factory.cxx:1234
 Factory.cxx:1235
 Factory.cxx:1236
 Factory.cxx:1237
 Factory.cxx:1238
 Factory.cxx:1239
 Factory.cxx:1240
 Factory.cxx:1241
 Factory.cxx:1242
 Factory.cxx:1243
 Factory.cxx:1244
 Factory.cxx:1245
 Factory.cxx:1246
 Factory.cxx:1247
 Factory.cxx:1248
 Factory.cxx:1249
 Factory.cxx:1250
 Factory.cxx:1251
 Factory.cxx:1252
 Factory.cxx:1253
 Factory.cxx:1254
 Factory.cxx:1255
 Factory.cxx:1256
 Factory.cxx:1257
 Factory.cxx:1258
 Factory.cxx:1259
 Factory.cxx:1260
 Factory.cxx:1261
 Factory.cxx:1262
 Factory.cxx:1263
 Factory.cxx:1264
 Factory.cxx:1265
 Factory.cxx:1266
 Factory.cxx:1267
 Factory.cxx:1268
 Factory.cxx:1269
 Factory.cxx:1270
 Factory.cxx:1271
 Factory.cxx:1272
 Factory.cxx:1273
 Factory.cxx:1274
 Factory.cxx:1275
 Factory.cxx:1276
 Factory.cxx:1277
 Factory.cxx:1278
 Factory.cxx:1279
 Factory.cxx:1280
 Factory.cxx:1281
 Factory.cxx:1282
 Factory.cxx:1283
 Factory.cxx:1284
 Factory.cxx:1285
 Factory.cxx:1286
 Factory.cxx:1287
 Factory.cxx:1288
 Factory.cxx:1289
 Factory.cxx:1290
 Factory.cxx:1291
 Factory.cxx:1292
 Factory.cxx:1293
 Factory.cxx:1294
 Factory.cxx:1295
 Factory.cxx:1296
 Factory.cxx:1297
 Factory.cxx:1298
 Factory.cxx:1299
 Factory.cxx:1300
 Factory.cxx:1301
 Factory.cxx:1302
 Factory.cxx:1303
 Factory.cxx:1304
 Factory.cxx:1305
 Factory.cxx:1306
 Factory.cxx:1307
 Factory.cxx:1308
 Factory.cxx:1309
 Factory.cxx:1310
 Factory.cxx:1311
 Factory.cxx:1312
 Factory.cxx:1313
 Factory.cxx:1314
 Factory.cxx:1315
 Factory.cxx:1316
 Factory.cxx:1317
 Factory.cxx:1318
 Factory.cxx:1319
 Factory.cxx:1320
 Factory.cxx:1321
 Factory.cxx:1322
 Factory.cxx:1323
 Factory.cxx:1324
 Factory.cxx:1325
 Factory.cxx:1326
 Factory.cxx:1327
 Factory.cxx:1328
 Factory.cxx:1329
 Factory.cxx:1330
 Factory.cxx:1331
 Factory.cxx:1332
 Factory.cxx:1333
 Factory.cxx:1334
 Factory.cxx:1335
 Factory.cxx:1336
 Factory.cxx:1337
 Factory.cxx:1338
 Factory.cxx:1339
 Factory.cxx:1340
 Factory.cxx:1341
 Factory.cxx:1342
 Factory.cxx:1343
 Factory.cxx:1344
 Factory.cxx:1345
 Factory.cxx:1346
 Factory.cxx:1347
 Factory.cxx:1348
 Factory.cxx:1349
 Factory.cxx:1350
 Factory.cxx:1351
 Factory.cxx:1352
 Factory.cxx:1353
 Factory.cxx:1354
 Factory.cxx:1355
 Factory.cxx:1356
 Factory.cxx:1357
 Factory.cxx:1358
 Factory.cxx:1359
 Factory.cxx:1360
 Factory.cxx:1361
 Factory.cxx:1362
 Factory.cxx:1363
 Factory.cxx:1364
 Factory.cxx:1365
 Factory.cxx:1366
 Factory.cxx:1367
 Factory.cxx:1368
 Factory.cxx:1369
 Factory.cxx:1370
 Factory.cxx:1371
 Factory.cxx:1372
 Factory.cxx:1373
 Factory.cxx:1374
 Factory.cxx:1375
 Factory.cxx:1376
 Factory.cxx:1377
 Factory.cxx:1378
 Factory.cxx:1379
 Factory.cxx:1380
 Factory.cxx:1381
 Factory.cxx:1382
 Factory.cxx:1383
 Factory.cxx:1384
 Factory.cxx:1385
 Factory.cxx:1386
 Factory.cxx:1387
 Factory.cxx:1388
 Factory.cxx:1389
 Factory.cxx:1390
 Factory.cxx:1391
 Factory.cxx:1392
 Factory.cxx:1393
 Factory.cxx:1394
 Factory.cxx:1395
 Factory.cxx:1396
 Factory.cxx:1397
 Factory.cxx:1398
 Factory.cxx:1399
 Factory.cxx:1400
 Factory.cxx:1401
 Factory.cxx:1402
 Factory.cxx:1403
 Factory.cxx:1404
 Factory.cxx:1405
 Factory.cxx:1406
 Factory.cxx:1407
 Factory.cxx:1408
 Factory.cxx:1409
 Factory.cxx:1410
 Factory.cxx:1411
 Factory.cxx:1412
 Factory.cxx:1413
 Factory.cxx:1414
 Factory.cxx:1415
 Factory.cxx:1416
 Factory.cxx:1417
 Factory.cxx:1418
 Factory.cxx:1419
 Factory.cxx:1420
 Factory.cxx:1421
 Factory.cxx:1422
 Factory.cxx:1423
 Factory.cxx:1424
 Factory.cxx:1425
 Factory.cxx:1426
 Factory.cxx:1427
 Factory.cxx:1428
 Factory.cxx:1429
 Factory.cxx:1430
 Factory.cxx:1431
 Factory.cxx:1432
 Factory.cxx:1433
 Factory.cxx:1434
 Factory.cxx:1435
 Factory.cxx:1436
 Factory.cxx:1437
 Factory.cxx:1438
 Factory.cxx:1439
 Factory.cxx:1440
 Factory.cxx:1441
 Factory.cxx:1442
 Factory.cxx:1443
 Factory.cxx:1444
 Factory.cxx:1445
 Factory.cxx:1446
 Factory.cxx:1447
 Factory.cxx:1448
 Factory.cxx:1449
 Factory.cxx:1450
 Factory.cxx:1451
 Factory.cxx:1452
 Factory.cxx:1453
 Factory.cxx:1454
 Factory.cxx:1455
 Factory.cxx:1456
 Factory.cxx:1457
 Factory.cxx:1458
 Factory.cxx:1459
 Factory.cxx:1460
 Factory.cxx:1461
 Factory.cxx:1462
 Factory.cxx:1463
 Factory.cxx:1464
 Factory.cxx:1465
 Factory.cxx:1466
 Factory.cxx:1467
 Factory.cxx:1468
 Factory.cxx:1469
 Factory.cxx:1470
 Factory.cxx:1471
 Factory.cxx:1472
 Factory.cxx:1473
 Factory.cxx:1474
 Factory.cxx:1475
 Factory.cxx:1476
 Factory.cxx:1477
 Factory.cxx:1478
 Factory.cxx:1479
 Factory.cxx:1480
 Factory.cxx:1481
 Factory.cxx:1482
 Factory.cxx:1483
 Factory.cxx:1484
 Factory.cxx:1485
 Factory.cxx:1486
 Factory.cxx:1487
 Factory.cxx:1488
 Factory.cxx:1489
 Factory.cxx:1490