// @(#)root/tmva $Id$
// Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss, Eckhard von Toerne, Jan Therhaag

/**********************************************************************************
 * Project: TMVA - a Root-integrated toolkit for multivariate Data analysis       *
 * Package: TMVA                                                                  *
 * Class  : TMVA::MethodLikelihood                                                *
 * Web    : http://tmva.sourceforge.net                                           *
 *                                                                                *
 * Description:                                                                   *
 *      Implementation (see header for description)                               *
 *                                                                                *
 * Authors (alphabetical):                                                        *
 *      Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland              *
 *      Helge Voss      <Helge.Voss@cern.ch>     - MPI-K Heidelberg, Germany      *
 *      Kai Voss        <Kai.Voss@cern.ch>       - U. of Victoria, Canada         *
 *      Jan Therhaag       <Jan.Therhaag@cern.ch>     - U of Bonn, Germany        *
 *      Eckhard v. Toerne  <evt@uni-bonn.de>     - U. of Bonn, Germany            *
 *                                                                                *
 * Copyright (c) 2005-2011:                                                       *
 *      CERN, Switzerland                                                         *
 *      U. of Victoria, Canada                                                    *
 *      MPI-K Heidelberg, Germany                                                 *
 *      U. of Bonn, Germany                                                       *
 *                                                                                *
 * Redistribution and use in source and binary forms, with or without             *
 * modification, are permitted according to the terms listed in LICENSE           *
 * (http://tmva.sourceforge.net/LICENSE)                                          *
 **********************************************************************************/

//_______________________________________________________________________
/* Begin_Html

  Likelihood analysis ("non-parametric approach")

  <p>
  Also implemented is a "diagonalized likelihood approach",
  which improves over the uncorrelated likelihood ansatz by
  transforming linearly the input variables into a diagonal space,
  using the square-root of the covariance matrix

  <p>
  The method of maximum likelihood is the most straightforward, and
  certainly among the most elegant multivariate analyser approaches.
  We define the likelihood ratio, <i>R<sub>L</sub></i>, for event
  <i>i</i>, by:<br>
  <center>
  <img vspace=6 src="gif/tmva_likratio.gif" align="bottom" >
  </center>
  Here the signal and background likelihoods, <i>L<sub>S</sub></i>,
  <i>L<sub>B</sub></i>, are products of the corresponding probability
  densities, <i>p<sub>S</sub></i>, <i>p<sub>B</sub></i>, of the
  <i>N</i><sub>var</sub> discriminating variables used in the MVA: <br>
  <center>
  <img vspace=6 src="gif/tmva_lik.gif" align="bottom" >
  </center>
  and accordingly for L<sub>B</sub>.
  In practise, TMVA uses polynomial splines to estimate the probability
  density functions (PDF) obtained from the distributions of the
  training variables.<br>

  <p>
  Note that in TMVA the output of the likelihood ratio is transformed
  by<br>
  <center>
  <img vspace=6 src="gif/tmva_likratio_trans.gif" align="bottom"/>
  </center>
  to avoid the occurrence of heavy peaks at <i>R<sub>L</sub></i>=0,1.

  <b>Decorrelated (or "diagonalized") Likelihood</b>

  <p>
  The biggest drawback of the Likelihood approach is that it assumes
  that the discriminant variables are uncorrelated. If it were the case,
  it can be proven that the discrimination obtained by the above likelihood
  ratio is optimal, ie, no other method can beat it. However, in most
  practical applications of MVAs correlations are present. <br><p></p>

  <p>
  Linear correlations, measured from the training sample, can be taken
  into account in a straightforward manner through the square-root
  of the covariance matrix. The square-root of a matrix
  <i>C</i> is the matrix <i>C&prime;</i> that multiplied with itself
  yields <i>C</i>: <i>C</i>=<i>C&prime;C&prime;</i>. We compute the
  square-root matrix (SQM) by means of diagonalising (<i>D</i>) the
  covariance matrix: <br>
  <center>
  <img vspace=6 src="gif/tmva_sqm.gif" align="bottom" >
  </center>
  and the linear transformation of the linearly correlated into the
  uncorrelated variables space is then given by multiplying the measured
  variable tuple by the inverse of the SQM. Note that these transformations
  are performed for both signal and background separately, since the
  correlation pattern is not the same in the two samples.

  <p>
  The above diagonalisation is complete for linearly correlated,
  Gaussian distributed variables only. In real-world examples this
  is not often the case, so that only little additional information
  may be recovered by the diagonalisation procedure. In these cases,
  non-linear methods must be applied.

End_Html */
//_______________________________________________________________________

#include <iomanip>
#include <vector>
#include <cstdlib>

#include "TMatrixD.h"
#include "TVector.h"
#include "TMath.h"
#include "TObjString.h"
#include "TFile.h"
#include "TKey.h"
#include "TH1.h"
#include "TClass.h"
#include "Riostream.h"

#include "TMVA/ClassifierFactory.h"
#include "TMVA/MethodLikelihood.h"
#include "TMVA/Tools.h"
#include "TMVA/Ranking.h"

REGISTER_METHOD(Likelihood)

ClassImp(TMVA::MethodLikelihood)

//_______________________________________________________________________
TMVA::MethodLikelihood::MethodLikelihood( const TString& jobName,
                                          const TString& methodTitle,
                                          DataSetInfo& theData,
                                          const TString& theOption,
                                          TDirectory* theTargetDir ) :
   TMVA::MethodBase( jobName, Types::kLikelihood, methodTitle, theData, theOption, theTargetDir ),
   fEpsilon       ( 1.e3 * DBL_MIN ),
   fTransformLikelihoodOutput( kFALSE ),
   fDropVariable  ( 0 ),
   fHistSig       ( 0 ),
   fHistBgd       ( 0 ),
   fHistSig_smooth( 0 ),
   fHistBgd_smooth( 0 ),
   fDefaultPDFLik ( 0 ),
   fPDFSig        ( 0 ),
   fPDFBgd        ( 0 ),
   fNsmooth       ( 2 ),
   fNsmoothVarS   ( 0 ),
   fNsmoothVarB   ( 0 ),
   fAverageEvtPerBin( 0 ),
   fAverageEvtPerBinVarS (0), 
   fAverageEvtPerBinVarB (0),
   fKDEfineFactor ( 0 ),
   fInterpolateString(0)
{
   // standard constructor
}

//_______________________________________________________________________
TMVA::MethodLikelihood::MethodLikelihood( DataSetInfo& theData,
                                          const TString& theWeightFile,
                                          TDirectory* theTargetDir ) :
   TMVA::MethodBase( Types::kLikelihood, theData, theWeightFile, theTargetDir ),
   fEpsilon       ( 1.e3 * DBL_MIN ),
   fTransformLikelihoodOutput( kFALSE ),
   fDropVariable  ( 0 ),
   fHistSig       ( 0 ),
   fHistBgd       ( 0 ),
   fHistSig_smooth( 0 ),
   fHistBgd_smooth( 0 ),
   fDefaultPDFLik ( 0 ),
   fPDFSig        ( 0 ),
   fPDFBgd        ( 0 ),
   fNsmooth       ( 2 ),
   fNsmoothVarS   ( 0 ),
   fNsmoothVarB   ( 0 ),
   fAverageEvtPerBin( 0 ),
   fAverageEvtPerBinVarS (0), 
   fAverageEvtPerBinVarB (0),
   fKDEfineFactor ( 0 ),
   fInterpolateString(0)
{
   // construct likelihood references from file
}

//_______________________________________________________________________
TMVA::MethodLikelihood::~MethodLikelihood( void )
{
   // destructor
   if (NULL != fDefaultPDFLik)  delete fDefaultPDFLik;
   if (NULL != fHistSig)        delete fHistSig;
   if (NULL != fHistBgd)        delete fHistBgd;
   if (NULL != fHistSig_smooth) delete fHistSig_smooth;
   if (NULL != fHistBgd_smooth) delete fHistBgd_smooth;
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
      if ((*fPDFSig)[ivar] !=0) delete (*fPDFSig)[ivar];
      if ((*fPDFBgd)[ivar] !=0) delete (*fPDFBgd)[ivar];
   }
   if (NULL != fPDFSig)         delete fPDFSig;
   if (NULL != fPDFBgd)         delete fPDFBgd;
}

//_______________________________________________________________________
Bool_t TMVA::MethodLikelihood::HasAnalysisType( Types::EAnalysisType type,
                                                UInt_t numberClasses, UInt_t /*numberTargets*/ )
{
   // FDA can handle classification with 2 classes
   if (type == Types::kClassification && numberClasses == 2) return kTRUE;
   return kFALSE;
}

//_______________________________________________________________________
void TMVA::MethodLikelihood::Init( void )
{
   // default initialisation called by all constructors

   // no ranking test
   fDropVariable   = -1;
   fHistSig        = new std::vector<TH1*>      ( GetNvar(), (TH1*)0 );
   fHistBgd        = new std::vector<TH1*>      ( GetNvar(), (TH1*)0 );
   fHistSig_smooth = new std::vector<TH1*>      ( GetNvar(), (TH1*)0 );
   fHistBgd_smooth = new std::vector<TH1*>      ( GetNvar(), (TH1*)0 );
   fPDFSig         = new std::vector<TMVA::PDF*>( GetNvar(), (TMVA::PDF*)0 );
   fPDFBgd         = new std::vector<TMVA::PDF*>( GetNvar(), (TMVA::PDF*)0 );
}

//_______________________________________________________________________
void TMVA::MethodLikelihood::DeclareOptions()
{
   // define the options (their key words) that can be set in the option string
   // TransformOutput   <bool>   transform (often strongly peaked) likelihood output through sigmoid inversion

   DeclareOptionRef( fTransformLikelihoodOutput = kFALSE, "TransformOutput",
                     "Transform likelihood output by inverse sigmoid function" );

   // initialize

   // reading every PDF's definition and passing the option string to the next one to be read and marked
   TString updatedOptions = GetOptions();
   fDefaultPDFLik = new PDF( TString(GetName()) + " PDF", updatedOptions );
   fDefaultPDFLik->DeclareOptions();
   fDefaultPDFLik->ParseOptions();
   updatedOptions = fDefaultPDFLik->GetOptions();
   for (UInt_t ivar = 0; ivar< DataInfo().GetNVariables(); ivar++) {
      (*fPDFSig)[ivar] = new PDF( Form("%s PDF Sig[%d]", GetName(), ivar), updatedOptions,
                                  Form("Sig[%d]",ivar), fDefaultPDFLik );
      (*fPDFSig)[ivar]->DeclareOptions();
      (*fPDFSig)[ivar]->ParseOptions();
      updatedOptions = (*fPDFSig)[ivar]->GetOptions();
      (*fPDFBgd)[ivar] = new PDF( Form("%s PDF Bkg[%d]", GetName(), ivar), updatedOptions,
                                  Form("Bkg[%d]",ivar), fDefaultPDFLik );
      (*fPDFBgd)[ivar]->DeclareOptions();
      (*fPDFBgd)[ivar]->ParseOptions();
      updatedOptions = (*fPDFBgd)[ivar]->GetOptions();
   }

   // the final marked option string is written back to the original likelihood
   SetOptions( updatedOptions );
}


void TMVA::MethodLikelihood::DeclareCompatibilityOptions()
{
   // options that are used ONLY for the READER to ensure backward compatibility

   MethodBase::DeclareCompatibilityOptions();
   DeclareOptionRef( fNsmooth = 1, "NSmooth",
                     "Number of smoothing iterations for the input histograms");
   DeclareOptionRef( fAverageEvtPerBin = 50, "NAvEvtPerBin",
                     "Average number of events per PDF bin");
   DeclareOptionRef( fKDEfineFactor =1. , "KDEFineFactor",
                     "Fine tuning factor for Adaptive KDE: Factor to multyply the width of the kernel");
   DeclareOptionRef( fBorderMethodString = "None", "KDEborder",
                     "Border effects treatment (1=no treatment , 2=kernel renormalization, 3=sample mirroring)" );
   DeclareOptionRef( fKDEiterString = "Nonadaptive", "KDEiter",
                     "Number of iterations (1=non-adaptive, 2=adaptive)" );
   DeclareOptionRef( fKDEtypeString = "Gauss", "KDEtype",
                     "KDE kernel type (1=Gauss)" );
   fAverageEvtPerBinVarS = new Int_t[GetNvar()];
   fAverageEvtPerBinVarB = new Int_t[GetNvar()];
   fNsmoothVarS = new Int_t[GetNvar()];
   fNsmoothVarB = new Int_t[GetNvar()];
   fInterpolateString = new TString[GetNvar()];
   for(UInt_t i=0; i<GetNvar(); ++i) {
      fAverageEvtPerBinVarS[i] = fAverageEvtPerBinVarB[i] = 0;
      fNsmoothVarS[i] = fNsmoothVarB[i] = 0;
      fInterpolateString[i] = "";
   }
   DeclareOptionRef( fAverageEvtPerBinVarS, GetNvar(), "NAvEvtPerBinSig",
                     "Average num of events per PDF bin and variable (signal)");
   DeclareOptionRef( fAverageEvtPerBinVarB, GetNvar(), "NAvEvtPerBinBkg",
                     "Average num of events per PDF bin and variable (background)");
   DeclareOptionRef(fNsmoothVarS, GetNvar(), "NSmoothSig",
                    "Number of smoothing iterations for the input histograms");
   DeclareOptionRef(fNsmoothVarB, GetNvar(), "NSmoothBkg",
                    "Number of smoothing iterations for the input histograms");
   DeclareOptionRef(fInterpolateString, GetNvar(), "PDFInterpol", "Method of interpolating reference histograms (e.g. Spline2 or KDE)");
}




//_______________________________________________________________________
void TMVA::MethodLikelihood::ProcessOptions()
{

   // process user options
   // reference cut value to distingiush signal-like from background-like events
   SetSignalReferenceCut( TransformLikelihoodOutput( 0.5, 0.5 ) );

   fDefaultPDFLik->ProcessOptions();
   for (UInt_t ivar = 0; ivar< DataInfo().GetNVariables(); ivar++) {
      (*fPDFBgd)[ivar]->ProcessOptions();
      (*fPDFSig)[ivar]->ProcessOptions();
   }
}

//_______________________________________________________________________
void TMVA::MethodLikelihood::Train( void )
{
   // create reference distributions (PDFs) from signal and background events:
   // fill histograms and smooth them; if decorrelation is required, compute
   // corresponding square-root matrices
   // the reference histograms require the correct boundaries. Since in Likelihood classification
   // the transformations are applied using both classes, also the corresponding boundaries
   // need to take this into account
   UInt_t nvar=GetNvar();
   std::vector<Double_t> xmin(nvar), xmax(nvar);
   for (UInt_t ivar=0; ivar<nvar; ivar++) {xmin[ivar]=1e30; xmax[ivar]=-1e30;}

   UInt_t nevents=Data()->GetNEvents();
   for (UInt_t ievt=0; ievt<nevents; ievt++) {
      // use the true-event-type's transformation
      // set the event true event types transformation
      const Event* origEv = Data()->GetEvent(ievt);
      if (IgnoreEventsWithNegWeightsInTraining() && origEv->GetWeight()<=0) continue;
      // loop over classes
      for (int cls=0;cls<2;cls++){
         GetTransformationHandler().SetTransformationReferenceClass(cls);
         const Event* ev = GetTransformationHandler().Transform( origEv );
         for (UInt_t ivar=0; ivar<nvar; ivar++) {
            Float_t value  = ev->GetValue(ivar);
            if (value < xmin[ivar]) xmin[ivar] = value;
            if (value > xmax[ivar]) xmax[ivar] = value;
         }
      }
   }

   // create reference histograms
   // (KDE smoothing requires very finely binned reference histograms)
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
      TString var = (*fInputVars)[ivar];

      // the reference histograms require the correct boundaries. Since in Likelihood classification
      // the transformations are applied using both classes, also the corresponding boundaries
      // need to take this into account

      // special treatment for discrete variables
      if (DataInfo().GetVariableInfo(ivar).GetVarType() == 'I') {
         // special treatment for integer variables
         Int_t ixmin = TMath::Nint( xmin[ivar] );
         xmax[ivar]=xmax[ivar]+1; // make sure that all entries are included in histogram
         Int_t ixmax = TMath::Nint( xmax[ivar] );
         Int_t nbins = ixmax - ixmin;

         (*fHistSig)[ivar] = new TH1F( var + "_sig", var + " signal training",     nbins, ixmin, ixmax );
         (*fHistBgd)[ivar] = new TH1F( var + "_bgd", var + " background training", nbins, ixmin, ixmax );
      } else {

         UInt_t minNEvt = TMath::Min(Data()->GetNEvtSigTrain(),Data()->GetNEvtBkgdTrain());
         Int_t nbinsS = (*fPDFSig)[ivar]->GetHistNBins( minNEvt );
         Int_t nbinsB = (*fPDFBgd)[ivar]->GetHistNBins( minNEvt );

         (*fHistSig)[ivar] = new TH1F( Form("%s_sig",var.Data()),
                                       Form("%s signal training",var.Data()), nbinsS, xmin[ivar], xmax[ivar] );
         (*fHistBgd)[ivar] = new TH1F( Form("%s_bgd",var.Data()),
                                       Form("%s background training",var.Data()), nbinsB, xmin[ivar], xmax[ivar] );
      }
   }

   // ----- fill the reference histograms
   Log() << kINFO << "Filling reference histograms" << Endl;

   // event loop
   for (Int_t ievt=0; ievt<Data()->GetNEvents(); ievt++) {

      // use the true-event-type's transformation
      // set the event true event types transformation
      const Event* origEv = Data()->GetEvent(ievt);
      if (IgnoreEventsWithNegWeightsInTraining() && origEv->GetWeight()<=0) continue;
      GetTransformationHandler().SetTransformationReferenceClass( origEv->GetClass() );
      const Event* ev = GetTransformationHandler().Transform( origEv );

      // the event weight
      Float_t weight = ev->GetWeight();

      // fill variable vector
      for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
         Double_t value  = ev->GetValue(ivar);
         // verify limits
         if (value >= xmax[ivar]) value = xmax[ivar] - 1.0e-10;
         else if (value < xmin[ivar]) value = xmin[ivar] + 1.0e-10;
         // inserting check if there are events in overflow or underflow
         if (value >=(*fHistSig)[ivar]->GetXaxis()->GetXmax() ||
             value <(*fHistSig)[ivar]->GetXaxis()->GetXmin()){
            Log()<<kWARNING
                 <<"error in filling likelihood reference histograms var="
                 <<(*fInputVars)[ivar]
                 << ", xmin="<<(*fHistSig)[ivar]->GetXaxis()->GetXmin()
                 << ", value="<<value
                 << ", xmax="<<(*fHistSig)[ivar]->GetXaxis()->GetXmax()
                 << Endl;
         }
         if (DataInfo().IsSignal(ev)) (*fHistSig)[ivar]->Fill( value, weight );
         else                (*fHistBgd)[ivar]->Fill( value, weight );
      }
   }

   // building the pdfs
   Log() << kINFO << "Building PDF out of reference histograms" << Endl;
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {

      // the PDF is built from (binned) reference histograms
      // in case of KDE, this has a large number of bins, which makes it quasi-unbinned
      (*fPDFSig)[ivar]->BuildPDF( (*fHistSig)[ivar] );
      (*fPDFBgd)[ivar]->BuildPDF( (*fHistBgd)[ivar] );

      (*fPDFSig)[ivar]->ValidatePDF( (*fHistSig)[ivar] );
      (*fPDFBgd)[ivar]->ValidatePDF( (*fHistBgd)[ivar] );

      // saving the smoothed histograms
      if ((*fPDFSig)[ivar]->GetSmoothedHist() != 0) (*fHistSig_smooth)[ivar] = (*fPDFSig)[ivar]->GetSmoothedHist();
      if ((*fPDFBgd)[ivar]->GetSmoothedHist() != 0) (*fHistBgd_smooth)[ivar] = (*fPDFBgd)[ivar]->GetSmoothedHist();
   }
}

//_______________________________________________________________________
Double_t TMVA::MethodLikelihood::GetMvaValue( Double_t* err, Double_t* errUpper )
{
   // returns the likelihood estimator for signal
   // fill a new Likelihood branch into the testTree
   UInt_t ivar;

   // cannot determine error
   NoErrorCalc(err, errUpper);

   // retrieve variables, and transform, if required
   TVector vs( GetNvar() );
   TVector vb( GetNvar() );

   // need to distinguish signal and background in case of variable transformation
   // signal first

   GetTransformationHandler().SetTransformationReferenceClass( fSignalClass );
   // temporary: JS  --> FIX
   //GetTransformationHandler().SetTransformationReferenceClass( 0 );
   const Event* ev = GetEvent();
   for (ivar=0; ivar<GetNvar(); ivar++) vs(ivar) = ev->GetValue(ivar);

   GetTransformationHandler().SetTransformationReferenceClass( fBackgroundClass );
   // temporary: JS  --> FIX
   //GetTransformationHandler().SetTransformationReferenceClass( 1 );
   ev = GetEvent();
   for (ivar=0; ivar<GetNvar(); ivar++) vb(ivar) = ev->GetValue(ivar);

   // compute the likelihood (signal)
   Double_t ps(1), pb(1), p(0);
   for (ivar=0; ivar<GetNvar(); ivar++) {

      // drop one variable (this is ONLY used for internal variable ranking !)
      if ((Int_t)ivar == fDropVariable) continue;

      Double_t x[2] = { vs(ivar), vb(ivar) };

      for (UInt_t itype=0; itype < 2; itype++) {

         // verify limits
         if      (x[itype] >= (*fPDFSig)[ivar]->GetXmax()) x[itype] = (*fPDFSig)[ivar]->GetXmax() - 1.0e-10;
         else if (x[itype] <  (*fPDFSig)[ivar]->GetXmin()) x[itype] = (*fPDFSig)[ivar]->GetXmin();

         // find corresponding histogram from cached indices
         PDF* pdf = (itype == 0) ? (*fPDFSig)[ivar] : (*fPDFBgd)[ivar];
         if (pdf == 0) Log() << kFATAL << "<GetMvaValue> Reference histograms don't exist" << Endl;
         TH1* hist = pdf->GetPDFHist();

         // interpolate linearly between adjacent bins
         // this is not useful for discrete variables
         Int_t bin = hist->FindBin(x[itype]);

         // **** POTENTIAL BUG: PREFORMANCE IS WORSE WHEN USING TRUE TYPE ***
         // ==> commented out at present
         if ((*fPDFSig)[ivar]->GetInterpolMethod() == TMVA::PDF::kSpline0 ||
             DataInfo().GetVariableInfo(ivar).GetVarType() == 'N') {
            p = TMath::Max( hist->GetBinContent(bin), fEpsilon );
         } else { // splined PDF
            Int_t nextbin = bin;
            if ((x[itype] > hist->GetBinCenter(bin) && bin != hist->GetNbinsX()) || bin == 1)
               nextbin++;
            else
               nextbin--;


            Double_t dx   = hist->GetBinCenter(bin)  - hist->GetBinCenter(nextbin);
            Double_t dy   = hist->GetBinContent(bin) - hist->GetBinContent(nextbin);
            Double_t like = hist->GetBinContent(bin) + (x[itype] - hist->GetBinCenter(bin)) * dy/dx;

            p = TMath::Max( like, fEpsilon );
         }

         if (itype == 0) ps *= p;
         else            pb *= p;
      }
   }

   // the likelihood ratio (transform it ?)
   return TransformLikelihoodOutput( ps, pb );
}

//_______________________________________________________________________
Double_t TMVA::MethodLikelihood::TransformLikelihoodOutput( Double_t ps, Double_t pb ) const
{
   // returns transformed or non-transformed output
   if (ps < fEpsilon) ps = fEpsilon;
   if (pb < fEpsilon) pb = fEpsilon;
   Double_t r = ps/(ps + pb);
   if (r >= 1.0) r = 1. - 1.e-15;

   if (fTransformLikelihoodOutput) {
      // inverse Fermi function

      // sanity check
      if      (r <= 0.0) r = fEpsilon;
      else if (r >= 1.0) r = 1. - 1.e-15;

      Double_t tau = 15.0;
      r = - TMath::Log(1.0/r - 1.0)/tau;
   }

   return r;
}

//______________________________________________________________________
void TMVA::MethodLikelihood::WriteOptionsToStream( std::ostream& o, const TString& prefix ) const
{
   // write options to stream
   Configurable::WriteOptionsToStream( o, prefix);

   // writing the options defined for the different pdfs
   if (fDefaultPDFLik != 0) {
      o << prefix << std::endl << prefix << "#Default Likelihood PDF Options:" << std::endl << prefix << std::endl;
      fDefaultPDFLik->WriteOptionsToStream( o, prefix );
   }
   for (UInt_t ivar = 0; ivar < fPDFSig->size(); ivar++) {
      if ((*fPDFSig)[ivar] != 0) {
         o << prefix << std::endl << prefix << Form("#Signal[%d] Likelihood PDF Options:",ivar) << std::endl << prefix << std::endl;
         (*fPDFSig)[ivar]->WriteOptionsToStream( o, prefix );
      }
      if ((*fPDFBgd)[ivar] != 0) {
         o << prefix << std::endl << prefix << "#Background[%d] Likelihood PDF Options:" << std::endl << prefix << std::endl;
         (*fPDFBgd)[ivar]->WriteOptionsToStream( o, prefix );
      }
   }
}

//_______________________________________________________________________
void TMVA::MethodLikelihood::AddWeightsXMLTo( void* parent ) const
{
   // write weights to XML
   void* wght = gTools().AddChild(parent, "Weights");
   gTools().AddAttr(wght, "NVariables", GetNvar());
   gTools().AddAttr(wght, "NClasses", 2);
   void* pdfwrap;
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
      if ( (*fPDFSig)[ivar]==0 || (*fPDFBgd)[ivar]==0 )
         Log() << kFATAL << "Reference histograms for variable " << ivar
               << " don't exist, can't write it to weight file" << Endl;
      pdfwrap = gTools().AddChild(wght, "PDFDescriptor");
      gTools().AddAttr(pdfwrap, "VarIndex", ivar);
      gTools().AddAttr(pdfwrap, "ClassIndex", 0);
      (*fPDFSig)[ivar]->AddXMLTo(pdfwrap);
      pdfwrap = gTools().AddChild(wght, "PDFDescriptor");
      gTools().AddAttr(pdfwrap, "VarIndex", ivar);
      gTools().AddAttr(pdfwrap, "ClassIndex", 1);
      (*fPDFBgd)[ivar]->AddXMLTo(pdfwrap);
   }
}

//_______________________________________________________________________
const TMVA::Ranking* TMVA::MethodLikelihood::CreateRanking()
{
   // computes ranking of input variables

   // create the ranking object
   if (fRanking) delete fRanking;
   fRanking = new Ranking( GetName(), "Delta Separation" );

   Double_t sepRef = -1, sep = -1;
   for (Int_t ivar=-1; ivar<(Int_t)GetNvar(); ivar++) {

      // this variable should not be used
      fDropVariable = ivar;

      TString nameS = Form( "rS_%i", ivar+1 );
      TString nameB = Form( "rB_%i", ivar+1 );
      TH1* rS = new TH1F( nameS, nameS, 80, 0, 1 );
      TH1* rB = new TH1F( nameB, nameB, 80, 0, 1 );

      // the event loop
      for (Int_t ievt=0; ievt<Data()->GetNTrainingEvents(); ievt++) {

         const Event* origEv = Data()->GetEvent(ievt);
         GetTransformationHandler().SetTransformationReferenceClass( origEv->GetClass() );
         const Event* ev = GetTransformationHandler().Transform(Data()->GetEvent(ievt));

         Double_t lk = this->GetMvaValue();
         Double_t w  = ev->GetWeight();
         if (DataInfo().IsSignal(ev)) rS->Fill( lk, w );
         else                rB->Fill( lk, w );
      }

      // compute separation
      sep = TMVA::gTools().GetSeparation( rS, rB );
      if (ivar == -1) sepRef = sep;
      sep = sepRef - sep;

      // don't need these histograms anymore
      delete rS;
      delete rB;

      if (ivar >= 0) fRanking->AddRank( Rank( DataInfo().GetVariableInfo(ivar).GetInternalName(), sep ) );
   }

   fDropVariable = -1;

   return fRanking;
}

//_______________________________________________________________________
void  TMVA::MethodLikelihood::WriteWeightsToStream( TFile& ) const
{
   // write reference PDFs to ROOT file
   TString pname = "PDF_";
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++){
      (*fPDFSig)[ivar]->Write( pname + GetInputVar( ivar ) + "_S" );
      (*fPDFBgd)[ivar]->Write( pname + GetInputVar( ivar ) + "_B" );
   }
}
//_______________________________________________________________________
void  TMVA::MethodLikelihood::ReadWeightsFromXML(void* wghtnode)
{
   // read weights from XML
   TString pname = "PDF_";
   Bool_t addDirStatus = TH1::AddDirectoryStatus();
   TH1::AddDirectory(0); // this avoids the binding of the hists in TMVA::PDF to the current ROOT file
   UInt_t nvars=0;
   gTools().ReadAttr(wghtnode, "NVariables",nvars);
   void* descnode = gTools().GetChild(wghtnode);
   for (UInt_t ivar=0; ivar<nvars; ivar++){
      void* pdfnode = gTools().GetChild(descnode);
      Log() << kINFO << "Reading signal and background PDF for variable: " << GetInputVar( ivar ) << Endl;
      if ((*fPDFSig)[ivar] !=0) delete (*fPDFSig)[ivar];
      if ((*fPDFBgd)[ivar] !=0) delete (*fPDFBgd)[ivar];
      (*fPDFSig)[ivar] = new PDF( GetInputVar( ivar ) + " PDF Sig" );
      (*fPDFBgd)[ivar] = new PDF( GetInputVar( ivar ) + " PDF Bkg" );
      (*fPDFSig)[ivar]->SetReadingVersion( GetTrainingTMVAVersionCode() );
      (*fPDFBgd)[ivar]->SetReadingVersion( GetTrainingTMVAVersionCode() );
      (*(*fPDFSig)[ivar]).ReadXML(pdfnode);
      descnode = gTools().GetNextChild(descnode);
      pdfnode  = gTools().GetChild(descnode);
      (*(*fPDFBgd)[ivar]).ReadXML(pdfnode);
      descnode = gTools().GetNextChild(descnode);
   }
   TH1::AddDirectory(addDirStatus);
}
//_______________________________________________________________________
void  TMVA::MethodLikelihood::ReadWeightsFromStream( std::istream & istr )
{
   // read weight info from file
   // nothing to do for this method
   TString pname = "PDF_";
   Bool_t addDirStatus = TH1::AddDirectoryStatus();
   TH1::AddDirectory(0); // this avoids the binding of the hists in TMVA::PDF to the current ROOT file
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++){
      Log() << kINFO << "Reading signal and background PDF for variable: " << GetInputVar( ivar ) << Endl;
      if ((*fPDFSig)[ivar] !=0) delete (*fPDFSig)[ivar];
      if ((*fPDFBgd)[ivar] !=0) delete (*fPDFBgd)[ivar];
      (*fPDFSig)[ivar] = new PDF(GetInputVar( ivar ) + " PDF Sig" );
      (*fPDFBgd)[ivar] = new PDF(GetInputVar( ivar ) + " PDF Bkg");
      (*fPDFSig)[ivar]->SetReadingVersion( GetTrainingTMVAVersionCode() );
      (*fPDFBgd)[ivar]->SetReadingVersion( GetTrainingTMVAVersionCode() );
      istr >> *(*fPDFSig)[ivar];
      istr >> *(*fPDFBgd)[ivar];
   }
   TH1::AddDirectory(addDirStatus);
}

//_______________________________________________________________________
void  TMVA::MethodLikelihood::ReadWeightsFromStream( TFile& rf )
{
   // read reference PDF from ROOT file
   TString pname = "PDF_";
   Bool_t addDirStatus = TH1::AddDirectoryStatus();
   TH1::AddDirectory(0); // this avoids the binding of the hists in TMVA::PDF to the current ROOT file
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++){
      (*fPDFSig)[ivar] = (TMVA::PDF*)rf.Get( Form( "PDF_%s_S", GetInputVar( ivar ).Data() ) );
      (*fPDFBgd)[ivar] = (TMVA::PDF*)rf.Get( Form( "PDF_%s_B", GetInputVar( ivar ).Data() ) );
   }
   TH1::AddDirectory(addDirStatus);
}

//_______________________________________________________________________
void  TMVA::MethodLikelihood::WriteMonitoringHistosToFile( void ) const
{
   // write histograms and PDFs to file for monitoring purposes

   Log() << kINFO << "Write monitoring histograms to file: " << BaseDir()->GetPath() << Endl;
   BaseDir()->cd();

   for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
      (*fHistSig)[ivar]->Write();
      (*fHistBgd)[ivar]->Write();
      if ((*fHistSig_smooth)[ivar] != 0) (*fHistSig_smooth)[ivar]->Write();
      if ((*fHistBgd_smooth)[ivar] != 0) (*fHistBgd_smooth)[ivar]->Write();
      (*fPDFSig)[ivar]->GetPDFHist()->Write();
      (*fPDFBgd)[ivar]->GetPDFHist()->Write();

      if ((*fPDFSig)[ivar]->GetNSmoothHist() != 0) (*fPDFSig)[ivar]->GetNSmoothHist()->Write();
      if ((*fPDFBgd)[ivar]->GetNSmoothHist() != 0) (*fPDFBgd)[ivar]->GetNSmoothHist()->Write();

      // add special plots to check the smoothing in the GetVal method
      Float_t xmin=((*fPDFSig)[ivar]->GetPDFHist()->GetXaxis())->GetXmin();
      Float_t xmax=((*fPDFSig)[ivar]->GetPDFHist()->GetXaxis())->GetXmax();
      TH1F* mm = new TH1F( (*fInputVars)[ivar]+"_additional_check",
                           (*fInputVars)[ivar]+"_additional_check", 15000, xmin, xmax );
      Double_t intBin = (xmax-xmin)/15000;
      for (Int_t bin=0; bin < 15000; bin++) {
         Double_t x = (bin + 0.5)*intBin + xmin;
         mm->SetBinContent(bin+1 ,(*fPDFSig)[ivar]->GetVal(x));
      }
      mm->Write();

      // ---------- create cloned low-binned histogram for comparison in macros (mainly necessary for KDE)
      TH1* h[2] = { (*fHistSig)[ivar], (*fHistBgd)[ivar] };
      for (UInt_t i=0; i<2; i++) {
         TH1* hclone = (TH1F*)h[i]->Clone( TString(h[i]->GetName()) + "_nice" );
         hclone->SetName ( TString(h[i]->GetName()) + "_nice" );
         hclone->SetTitle( TString(h[i]->GetTitle()) + "" );
         if (hclone->GetNbinsX() > 100) {
            Int_t resFactor = 5;
            hclone->Rebin( resFactor );
            hclone->Scale( 1.0/resFactor );
         }
         hclone->Write();
      }
      // ----------
   }
}

//_______________________________________________________________________
void TMVA::MethodLikelihood::MakeClassSpecificHeader( std::ostream& fout, const TString& ) const
{
   // write specific header of the classifier (mostly include files)
   fout << "#include <math.h>" << std::endl;
   fout << "#include <cstdlib>" << std::endl;
}

//_______________________________________________________________________
void TMVA::MethodLikelihood::MakeClassSpecific( std::ostream& fout, const TString& className ) const
{
   // write specific classifier response
   Int_t dp = fout.precision();
   fout << "   double       fEpsilon;" << std::endl;

   Int_t * nbin = new Int_t[GetNvar()];

   Int_t nbinMax=-1;
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
      nbin[ivar]=(*fPDFSig)[ivar]->GetPDFHist()->GetNbinsX();
      if (nbin[ivar] > nbinMax) nbinMax=nbin[ivar];
   }

   fout << "   static float fRefS[][" << nbinMax << "]; "
        << "// signal reference vector [nvars][max_nbins]" << std::endl;
   fout << "   static float fRefB[][" << nbinMax << "]; "
        << "// backgr reference vector [nvars][max_nbins]" << std::endl << std::endl;
   fout << "// if a variable has its PDF encoded as a spline0 --> treat it like an Integer valued one" <<std::endl;
   fout << "   bool    fHasDiscretPDF[" << GetNvar() <<"]; "<< std::endl;
   fout << "   int    fNbin[" << GetNvar() << "]; "
        << "// number of bins (discrete variables may have less bins)" << std::endl;
   fout << "   double    fHistMin[" << GetNvar() << "]; " << std::endl;
   fout << "   double    fHistMax[" << GetNvar() << "]; " << std::endl;

   fout << "   double TransformLikelihoodOutput( double, double ) const;" << std::endl;
   fout << "};" << std::endl;
   fout << "" << std::endl;
   fout << "inline void " << className << "::Initialize() " << std::endl;
   fout << "{" << std::endl;
   fout << "   fEpsilon = " << fEpsilon << ";" << std::endl;
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
      fout << "   fNbin[" << ivar << "] = " << (*fPDFSig)[ivar]->GetPDFHist()->GetNbinsX() << ";" << std::endl;
      fout << "   fHistMin[" << ivar << "] = " << (*fPDFSig)[ivar]->GetPDFHist()->GetXaxis()->GetXmin() << ";" << std::endl;
      fout << "   fHistMax[" << ivar << "] = " << (*fPDFSig)[ivar]->GetPDFHist()->GetXaxis()->GetXmax() << ";" << std::endl;
      // sanity check (for previous code lines)
      if ((((*fPDFSig)[ivar]->GetPDFHist()->GetNbinsX() != nbin[ivar] ||
            (*fPDFBgd)[ivar]->GetPDFHist()->GetNbinsX() != nbin[ivar])
           ) ||
          (*fPDFSig)[ivar]->GetPDFHist()->GetNbinsX() != (*fPDFBgd)[ivar]->GetPDFHist()->GetNbinsX()) {
         Log() << kFATAL << "<MakeClassSpecific> Mismatch in binning of variable "
               << "\"" << GetOriginalVarName(ivar) << "\" of type: \'" << DataInfo().GetVariableInfo(ivar).GetVarType()
               << "\' : "
               << "nxS = " << (*fPDFSig)[ivar]->GetPDFHist()->GetNbinsX() << ", "
               << "nxB = " << (*fPDFBgd)[ivar]->GetPDFHist()->GetNbinsX()
               << " while we expect " << nbin[ivar]
               << Endl;
      }
   }
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++){
      if ((*fPDFSig)[ivar]->GetInterpolMethod() == TMVA::PDF::kSpline0)
         fout << "   fHasDiscretPDF[" << ivar <<"] = true;  " << std::endl;
      else
         fout << "   fHasDiscretPDF[" << ivar <<"] = false; " << std::endl;
   }

   fout << "}" << std::endl << std::endl;

   fout << "inline double " << className
        << "::GetMvaValue__( const std::vector<double>& inputValues ) const" << std::endl;
   fout << "{" << std::endl;
   fout << "   double ps(1), pb(1);" << std::endl;
   fout << "   std::vector<double> inputValuesSig = inputValues;" << std::endl;
   fout << "   std::vector<double> inputValuesBgd = inputValues;" << std::endl;
   if (GetTransformationHandler().GetTransformationList().GetSize() != 0) {
      fout << "   Transform(inputValuesSig,0);" << std::endl;
      fout << "   Transform(inputValuesBgd,1);" << std::endl;
   }
   fout << "   for (size_t ivar = 0; ivar < GetNvar(); ivar++) {" << std::endl;
   fout << std::endl;
   fout << "      // dummy at present... will be used for variable transforms" << std::endl;
   fout << "      double x[2] = { inputValuesSig[ivar], inputValuesBgd[ivar] };" << std::endl;
   fout << std::endl;
   fout << "      for (int itype=0; itype < 2; itype++) {" << std::endl;
   fout << std::endl;
   fout << "         // interpolate linearly between adjacent bins" << std::endl;
   fout << "         // this is not useful for discrete variables (or forced Spline0)" << std::endl;
   fout << "         int bin = int((x[itype] - fHistMin[ivar])/(fHistMax[ivar] - fHistMin[ivar])*fNbin[ivar]) + 0;" << std::endl;
   fout << std::endl;
   fout << "         // since the test data sample is in general different from the training sample" << std::endl;
   fout << "         // it can happen that the min/max of the training sample are trespassed --> correct this" << std::endl;
   fout << "         if      (bin < 0) {" << std::endl;
   fout << "            bin = 0;" << std::endl;
   fout << "            x[itype] = fHistMin[ivar];" << std::endl;
   fout << "         }" << std::endl;
   fout << "         else if (bin >= fNbin[ivar]) {" << std::endl;
   fout << "            bin = fNbin[ivar]-1;" << std::endl;
   fout << "            x[itype] = fHistMax[ivar];" << std::endl;
   fout << "         }" << std::endl;
   fout << std::endl;
   fout << "         // find corresponding histogram from cached indices" << std::endl;
   fout << "         float ref = (itype == 0) ? fRefS[ivar][bin] : fRefB[ivar][bin];" << std::endl;
   fout << std::endl;
   fout << "         // sanity check" << std::endl;
   fout << "         if (ref < 0) {" << std::endl;
   fout << "            std::cout << \"Fatal error in " << className
        << ": bin entry < 0 ==> abort\" << std::endl;" << std::endl;
   fout << "            std::exit(1);" << std::endl;
   fout << "         }" << std::endl;
   fout << std::endl;
   fout << "         double p = ref;" << std::endl;
   fout << std::endl;
   fout << "         if (GetType(ivar) != 'I' && !fHasDiscretPDF[ivar]) {" << std::endl;
   fout << "            float bincenter = (bin + 0.5)/fNbin[ivar]*(fHistMax[ivar] - fHistMin[ivar]) + fHistMin[ivar];" << std::endl;
   fout << "            int nextbin = bin;" << std::endl;
   fout << "            if ((x[itype] > bincenter && bin != fNbin[ivar]-1) || bin == 0) " << std::endl;
   fout << "               nextbin++;" << std::endl;
   fout << "            else" << std::endl;
   fout << "               nextbin--;  " << std::endl;
   fout << std::endl;
   fout << "            double refnext      = (itype == 0) ? fRefS[ivar][nextbin] : fRefB[ivar][nextbin];" << std::endl;
   fout << "            float nextbincenter = (nextbin + 0.5)/fNbin[ivar]*(fHistMax[ivar] - fHistMin[ivar]) + fHistMin[ivar];" << std::endl;
   fout << std::endl;
   fout << "            double dx = bincenter - nextbincenter;" << std::endl;
   fout << "            double dy = ref - refnext;" << std::endl;
   fout << "            p += (x[itype] - bincenter) * dy/dx;" << std::endl;
   fout << "         }" << std::endl;
   fout << std::endl;
   fout << "         if (p < fEpsilon) p = fEpsilon; // avoid zero response" << std::endl;
   fout << std::endl;
   fout << "         if (itype == 0) ps *= p;" << std::endl;
   fout << "         else            pb *= p;" << std::endl;
   fout << "      }            " << std::endl;
   fout << "   }     " << std::endl;
   fout << std::endl;
   fout << "   // the likelihood ratio (transform it ?)" << std::endl;
   fout << "   return TransformLikelihoodOutput( ps, pb );   " << std::endl;
   fout << "}" << std::endl << std::endl;

   fout << "inline double " << className << "::TransformLikelihoodOutput( double ps, double pb ) const" << std::endl;
   fout << "{" << std::endl;
   fout << "   // returns transformed or non-transformed output" << std::endl;
   fout << "   if (ps < fEpsilon) ps = fEpsilon;" << std::endl;
   fout << "   if (pb < fEpsilon) pb = fEpsilon;" << std::endl;
   fout << "   double r = ps/(ps + pb);" << std::endl;
   fout << "   if (r >= 1.0) r = 1. - 1.e-15;" << std::endl;
   fout << std::endl;
   fout << "   if (" << (fTransformLikelihoodOutput ? "true" : "false") << ") {" << std::endl;
   fout << "      // inverse Fermi function" << std::endl;
   fout << std::endl;
   fout << "      // sanity check" << std::endl;
   fout << "      if      (r <= 0.0) r = fEpsilon;" << std::endl;
   fout << "      else if (r >= 1.0) r = 1. - 1.e-15;" << std::endl;
   fout << std::endl;
   fout << "      double tau = 15.0;" << std::endl;
   fout << "      r = - log(1.0/r - 1.0)/tau;" << std::endl;
   fout << "   }" << std::endl;
   fout << std::endl;
   fout << "   return r;" << std::endl;
   fout << "}" << std::endl;
   fout << std::endl;

   fout << "// Clean up" << std::endl;
   fout << "inline void " << className << "::Clear() " << std::endl;
   fout << "{" << std::endl;
   fout << "   // nothing to clear" << std::endl;
   fout << "}" << std::endl << std::endl;

   fout << "// signal map" << std::endl;
   fout << "float " << className << "::fRefS[][" << nbinMax << "] = " << std::endl;
   fout << "{ " << std::endl;
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
      fout << "   { ";
      for (Int_t ibin=1; ibin<=nbinMax; ibin++) {
         if (ibin-1 < nbin[ivar])
            fout << (*fPDFSig)[ivar]->GetPDFHist()->GetBinContent(ibin);
         else
            fout << -1;

         if (ibin < nbinMax) fout << ", ";
      }
      fout << "   }, " << std::endl;
   }
   fout << "}; " << std::endl;
   fout << std::endl;

   fout << "// background map" << std::endl;
   fout << "float " << className << "::fRefB[][" << nbinMax << "] = " << std::endl;
   fout << "{ " << std::endl;
   for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
      fout << "   { ";
      fout << std::setprecision(8);
      for (Int_t ibin=1; ibin<=nbinMax; ibin++) {
         if (ibin-1 < nbin[ivar])
            fout << (*fPDFBgd)[ivar]->GetPDFHist()->GetBinContent(ibin);
         else
            fout << -1;

         if (ibin < nbinMax) fout << ", ";
      }
      fout << "   }, " << std::endl;
   }
   fout << "}; " << std::endl;
   fout << std::endl;
   fout << std::setprecision(dp);

   delete[] nbin;
}

//_______________________________________________________________________
void TMVA::MethodLikelihood::GetHelpMessage() const
{
   // get help message text
   //
   // typical length of text line:
   //         "|--------------------------------------------------------------|"
   Log() << Endl;
   Log() << gTools().Color("bold") << "--- Short description:" << gTools().Color("reset") << Endl;
   Log() << Endl;
   Log() << "The maximum-likelihood classifier models the data with probability " << Endl;
   Log() << "density functions (PDF) reproducing the signal and background" << Endl;
   Log() << "distributions of the input variables. Correlations among the " << Endl;
   Log() << "variables are ignored." << Endl;
   Log() << Endl;
   Log() << gTools().Color("bold") << "--- Performance optimisation:" << gTools().Color("reset") << Endl;
   Log() << Endl;
   Log() << "Required for good performance are decorrelated input variables" << Endl;
   Log() << "(PCA transformation via the option \"VarTransform=Decorrelate\"" << Endl;
   Log() << "may be tried). Irreducible non-linear correlations may be reduced" << Endl;
   Log() << "by precombining strongly correlated input variables, or by simply" << Endl;
   Log() << "removing one of the variables." << Endl;
   Log() << Endl;
   Log() << gTools().Color("bold") << "--- Performance tuning via configuration options:" << gTools().Color("reset") << Endl;
   Log() << Endl;
   Log() << "High fidelity PDF estimates are mandatory, i.e., sufficient training " << Endl;
   Log() << "statistics is required to populate the tails of the distributions" << Endl;
   Log() << "It would be a surprise if the default Spline or KDE kernel parameters" << Endl;
   Log() << "provide a satisfying fit to the data. The user is advised to properly" << Endl;
   Log() << "tune the events per bin and smooth options in the spline cases" << Endl;
   Log() << "individually per variable. If the KDE kernel is used, the adaptive" << Endl;
   Log() << "Gaussian kernel may lead to artefacts, so please always also try" << Endl;
   Log() << "the non-adaptive one." << Endl;
   Log() << "" << Endl;
   Log() << "All tuning parameters must be adjusted individually for each input" << Endl;
   Log() << "variable!" << Endl;
}

 MethodLikelihood.cxx:1
 MethodLikelihood.cxx:2
 MethodLikelihood.cxx:3
 MethodLikelihood.cxx:4
 MethodLikelihood.cxx:5
 MethodLikelihood.cxx:6
 MethodLikelihood.cxx:7
 MethodLikelihood.cxx:8
 MethodLikelihood.cxx:9
 MethodLikelihood.cxx:10
 MethodLikelihood.cxx:11
 MethodLikelihood.cxx:12
 MethodLikelihood.cxx:13
 MethodLikelihood.cxx:14
 MethodLikelihood.cxx:15
 MethodLikelihood.cxx:16
 MethodLikelihood.cxx:17
 MethodLikelihood.cxx:18
 MethodLikelihood.cxx:19
 MethodLikelihood.cxx:20
 MethodLikelihood.cxx:21
 MethodLikelihood.cxx:22
 MethodLikelihood.cxx:23
 MethodLikelihood.cxx:24
 MethodLikelihood.cxx:25
 MethodLikelihood.cxx:26
 MethodLikelihood.cxx:27
 MethodLikelihood.cxx:28
 MethodLikelihood.cxx:29
 MethodLikelihood.cxx:30
 MethodLikelihood.cxx:31
 MethodLikelihood.cxx:32
 MethodLikelihood.cxx:33
 MethodLikelihood.cxx:34
 MethodLikelihood.cxx:35
 MethodLikelihood.cxx:36
 MethodLikelihood.cxx:37
 MethodLikelihood.cxx:38
 MethodLikelihood.cxx:39
 MethodLikelihood.cxx:40
 MethodLikelihood.cxx:41
 MethodLikelihood.cxx:42
 MethodLikelihood.cxx:43
 MethodLikelihood.cxx:44
 MethodLikelihood.cxx:45
 MethodLikelihood.cxx:46
 MethodLikelihood.cxx:47
 MethodLikelihood.cxx:48
 MethodLikelihood.cxx:49
 MethodLikelihood.cxx:50
 MethodLikelihood.cxx:51
 MethodLikelihood.cxx:52
 MethodLikelihood.cxx:53
 MethodLikelihood.cxx:54
 MethodLikelihood.cxx:55
 MethodLikelihood.cxx:56
 MethodLikelihood.cxx:57
 MethodLikelihood.cxx:58
 MethodLikelihood.cxx:59
 MethodLikelihood.cxx:60
 MethodLikelihood.cxx:61
 MethodLikelihood.cxx:62
 MethodLikelihood.cxx:63
 MethodLikelihood.cxx:64
 MethodLikelihood.cxx:65
 MethodLikelihood.cxx:66
 MethodLikelihood.cxx:67
 MethodLikelihood.cxx:68
 MethodLikelihood.cxx:69
 MethodLikelihood.cxx:70
 MethodLikelihood.cxx:71
 MethodLikelihood.cxx:72
 MethodLikelihood.cxx:73
 MethodLikelihood.cxx:74
 MethodLikelihood.cxx:75
 MethodLikelihood.cxx:76
 MethodLikelihood.cxx:77
 MethodLikelihood.cxx:78
 MethodLikelihood.cxx:79
 MethodLikelihood.cxx:80
 MethodLikelihood.cxx:81
 MethodLikelihood.cxx:82
 MethodLikelihood.cxx:83
 MethodLikelihood.cxx:84
 MethodLikelihood.cxx:85
 MethodLikelihood.cxx:86
 MethodLikelihood.cxx:87
 MethodLikelihood.cxx:88
 MethodLikelihood.cxx:89
 MethodLikelihood.cxx:90
 MethodLikelihood.cxx:91
 MethodLikelihood.cxx:92
 MethodLikelihood.cxx:93
 MethodLikelihood.cxx:94
 MethodLikelihood.cxx:95
 MethodLikelihood.cxx:96
 MethodLikelihood.cxx:97
 MethodLikelihood.cxx:98
 MethodLikelihood.cxx:99
 MethodLikelihood.cxx:100
 MethodLikelihood.cxx:101
 MethodLikelihood.cxx:102
 MethodLikelihood.cxx:103
 MethodLikelihood.cxx:104
 MethodLikelihood.cxx:105
 MethodLikelihood.cxx:106
 MethodLikelihood.cxx:107
 MethodLikelihood.cxx:108
 MethodLikelihood.cxx:109
 MethodLikelihood.cxx:110
 MethodLikelihood.cxx:111
 MethodLikelihood.cxx:112
 MethodLikelihood.cxx:113
 MethodLikelihood.cxx:114
 MethodLikelihood.cxx:115
 MethodLikelihood.cxx:116
 MethodLikelihood.cxx:117
 MethodLikelihood.cxx:118
 MethodLikelihood.cxx:119
 MethodLikelihood.cxx:120
 MethodLikelihood.cxx:121
 MethodLikelihood.cxx:122
 MethodLikelihood.cxx:123
 MethodLikelihood.cxx:124
 MethodLikelihood.cxx:125
 MethodLikelihood.cxx:126
 MethodLikelihood.cxx:127
 MethodLikelihood.cxx:128
 MethodLikelihood.cxx:129
 MethodLikelihood.cxx:130
 MethodLikelihood.cxx:131
 MethodLikelihood.cxx:132
 MethodLikelihood.cxx:133
 MethodLikelihood.cxx:134
 MethodLikelihood.cxx:135
 MethodLikelihood.cxx:136
 MethodLikelihood.cxx:137
 MethodLikelihood.cxx:138
 MethodLikelihood.cxx:139
 MethodLikelihood.cxx:140
 MethodLikelihood.cxx:141
 MethodLikelihood.cxx:142
 MethodLikelihood.cxx:143
 MethodLikelihood.cxx:144
 MethodLikelihood.cxx:145
 MethodLikelihood.cxx:146
 MethodLikelihood.cxx:147
 MethodLikelihood.cxx:148
 MethodLikelihood.cxx:149
 MethodLikelihood.cxx:150
 MethodLikelihood.cxx:151
 MethodLikelihood.cxx:152
 MethodLikelihood.cxx:153
 MethodLikelihood.cxx:154
 MethodLikelihood.cxx:155
 MethodLikelihood.cxx:156
 MethodLikelihood.cxx:157
 MethodLikelihood.cxx:158
 MethodLikelihood.cxx:159
 MethodLikelihood.cxx:160
 MethodLikelihood.cxx:161
 MethodLikelihood.cxx:162
 MethodLikelihood.cxx:163
 MethodLikelihood.cxx:164
 MethodLikelihood.cxx:165
 MethodLikelihood.cxx:166
 MethodLikelihood.cxx:167
 MethodLikelihood.cxx:168
 MethodLikelihood.cxx:169
 MethodLikelihood.cxx:170
 MethodLikelihood.cxx:171
 MethodLikelihood.cxx:172
 MethodLikelihood.cxx:173
 MethodLikelihood.cxx:174
 MethodLikelihood.cxx:175
 MethodLikelihood.cxx:176
 MethodLikelihood.cxx:177
 MethodLikelihood.cxx:178
 MethodLikelihood.cxx:179
 MethodLikelihood.cxx:180
 MethodLikelihood.cxx:181
 MethodLikelihood.cxx:182
 MethodLikelihood.cxx:183
 MethodLikelihood.cxx:184
 MethodLikelihood.cxx:185
 MethodLikelihood.cxx:186
 MethodLikelihood.cxx:187
 MethodLikelihood.cxx:188
 MethodLikelihood.cxx:189
 MethodLikelihood.cxx:190
 MethodLikelihood.cxx:191
 MethodLikelihood.cxx:192
 MethodLikelihood.cxx:193
 MethodLikelihood.cxx:194
 MethodLikelihood.cxx:195
 MethodLikelihood.cxx:196
 MethodLikelihood.cxx:197
 MethodLikelihood.cxx:198
 MethodLikelihood.cxx:199
 MethodLikelihood.cxx:200
 MethodLikelihood.cxx:201
 MethodLikelihood.cxx:202
 MethodLikelihood.cxx:203
 MethodLikelihood.cxx:204
 MethodLikelihood.cxx:205
 MethodLikelihood.cxx:206
 MethodLikelihood.cxx:207
 MethodLikelihood.cxx:208
 MethodLikelihood.cxx:209
 MethodLikelihood.cxx:210
 MethodLikelihood.cxx:211
 MethodLikelihood.cxx:212
 MethodLikelihood.cxx:213
 MethodLikelihood.cxx:214
 MethodLikelihood.cxx:215
 MethodLikelihood.cxx:216
 MethodLikelihood.cxx:217
 MethodLikelihood.cxx:218
 MethodLikelihood.cxx:219
 MethodLikelihood.cxx:220
 MethodLikelihood.cxx:221
 MethodLikelihood.cxx:222
 MethodLikelihood.cxx:223
 MethodLikelihood.cxx:224
 MethodLikelihood.cxx:225
 MethodLikelihood.cxx:226
 MethodLikelihood.cxx:227
 MethodLikelihood.cxx:228
 MethodLikelihood.cxx:229
 MethodLikelihood.cxx:230
 MethodLikelihood.cxx:231
 MethodLikelihood.cxx:232
 MethodLikelihood.cxx:233
 MethodLikelihood.cxx:234
 MethodLikelihood.cxx:235
 MethodLikelihood.cxx:236
 MethodLikelihood.cxx:237
 MethodLikelihood.cxx:238
 MethodLikelihood.cxx:239
 MethodLikelihood.cxx:240
 MethodLikelihood.cxx:241
 MethodLikelihood.cxx:242
 MethodLikelihood.cxx:243
 MethodLikelihood.cxx:244
 MethodLikelihood.cxx:245
 MethodLikelihood.cxx:246
 MethodLikelihood.cxx:247
 MethodLikelihood.cxx:248
 MethodLikelihood.cxx:249
 MethodLikelihood.cxx:250
 MethodLikelihood.cxx:251
 MethodLikelihood.cxx:252
 MethodLikelihood.cxx:253
 MethodLikelihood.cxx:254
 MethodLikelihood.cxx:255
 MethodLikelihood.cxx:256
 MethodLikelihood.cxx:257
 MethodLikelihood.cxx:258
 MethodLikelihood.cxx:259
 MethodLikelihood.cxx:260
 MethodLikelihood.cxx:261
 MethodLikelihood.cxx:262
 MethodLikelihood.cxx:263
 MethodLikelihood.cxx:264
 MethodLikelihood.cxx:265
 MethodLikelihood.cxx:266
 MethodLikelihood.cxx:267
 MethodLikelihood.cxx:268
 MethodLikelihood.cxx:269
 MethodLikelihood.cxx:270
 MethodLikelihood.cxx:271
 MethodLikelihood.cxx:272
 MethodLikelihood.cxx:273
 MethodLikelihood.cxx:274
 MethodLikelihood.cxx:275
 MethodLikelihood.cxx:276
 MethodLikelihood.cxx:277
 MethodLikelihood.cxx:278
 MethodLikelihood.cxx:279
 MethodLikelihood.cxx:280
 MethodLikelihood.cxx:281
 MethodLikelihood.cxx:282
 MethodLikelihood.cxx:283
 MethodLikelihood.cxx:284
 MethodLikelihood.cxx:285
 MethodLikelihood.cxx:286
 MethodLikelihood.cxx:287
 MethodLikelihood.cxx:288
 MethodLikelihood.cxx:289
 MethodLikelihood.cxx:290
 MethodLikelihood.cxx:291
 MethodLikelihood.cxx:292
 MethodLikelihood.cxx:293
 MethodLikelihood.cxx:294
 MethodLikelihood.cxx:295
 MethodLikelihood.cxx:296
 MethodLikelihood.cxx:297
 MethodLikelihood.cxx:298
 MethodLikelihood.cxx:299
 MethodLikelihood.cxx:300
 MethodLikelihood.cxx:301
 MethodLikelihood.cxx:302
 MethodLikelihood.cxx:303
 MethodLikelihood.cxx:304
 MethodLikelihood.cxx:305
 MethodLikelihood.cxx:306
 MethodLikelihood.cxx:307
 MethodLikelihood.cxx:308
 MethodLikelihood.cxx:309
 MethodLikelihood.cxx:310
 MethodLikelihood.cxx:311
 MethodLikelihood.cxx:312
 MethodLikelihood.cxx:313
 MethodLikelihood.cxx:314
 MethodLikelihood.cxx:315
 MethodLikelihood.cxx:316
 MethodLikelihood.cxx:317
 MethodLikelihood.cxx:318
 MethodLikelihood.cxx:319
 MethodLikelihood.cxx:320
 MethodLikelihood.cxx:321
 MethodLikelihood.cxx:322
 MethodLikelihood.cxx:323
 MethodLikelihood.cxx:324
 MethodLikelihood.cxx:325
 MethodLikelihood.cxx:326
 MethodLikelihood.cxx:327
 MethodLikelihood.cxx:328
 MethodLikelihood.cxx:329
 MethodLikelihood.cxx:330
 MethodLikelihood.cxx:331
 MethodLikelihood.cxx:332
 MethodLikelihood.cxx:333
 MethodLikelihood.cxx:334
 MethodLikelihood.cxx:335
 MethodLikelihood.cxx:336
 MethodLikelihood.cxx:337
 MethodLikelihood.cxx:338
 MethodLikelihood.cxx:339
 MethodLikelihood.cxx:340
 MethodLikelihood.cxx:341
 MethodLikelihood.cxx:342
 MethodLikelihood.cxx:343
 MethodLikelihood.cxx:344
 MethodLikelihood.cxx:345
 MethodLikelihood.cxx:346
 MethodLikelihood.cxx:347
 MethodLikelihood.cxx:348
 MethodLikelihood.cxx:349
 MethodLikelihood.cxx:350
 MethodLikelihood.cxx:351
 MethodLikelihood.cxx:352
 MethodLikelihood.cxx:353
 MethodLikelihood.cxx:354
 MethodLikelihood.cxx:355
 MethodLikelihood.cxx:356
 MethodLikelihood.cxx:357
 MethodLikelihood.cxx:358
 MethodLikelihood.cxx:359
 MethodLikelihood.cxx:360
 MethodLikelihood.cxx:361
 MethodLikelihood.cxx:362
 MethodLikelihood.cxx:363
 MethodLikelihood.cxx:364
 MethodLikelihood.cxx:365
 MethodLikelihood.cxx:366
 MethodLikelihood.cxx:367
 MethodLikelihood.cxx:368
 MethodLikelihood.cxx:369
 MethodLikelihood.cxx:370
 MethodLikelihood.cxx:371
 MethodLikelihood.cxx:372
 MethodLikelihood.cxx:373
 MethodLikelihood.cxx:374
 MethodLikelihood.cxx:375
 MethodLikelihood.cxx:376
 MethodLikelihood.cxx:377
 MethodLikelihood.cxx:378
 MethodLikelihood.cxx:379
 MethodLikelihood.cxx:380
 MethodLikelihood.cxx:381
 MethodLikelihood.cxx:382
 MethodLikelihood.cxx:383
 MethodLikelihood.cxx:384
 MethodLikelihood.cxx:385
 MethodLikelihood.cxx:386
 MethodLikelihood.cxx:387
 MethodLikelihood.cxx:388
 MethodLikelihood.cxx:389
 MethodLikelihood.cxx:390
 MethodLikelihood.cxx:391
 MethodLikelihood.cxx:392
 MethodLikelihood.cxx:393
 MethodLikelihood.cxx:394
 MethodLikelihood.cxx:395
 MethodLikelihood.cxx:396
 MethodLikelihood.cxx:397
 MethodLikelihood.cxx:398
 MethodLikelihood.cxx:399
 MethodLikelihood.cxx:400
 MethodLikelihood.cxx:401
 MethodLikelihood.cxx:402
 MethodLikelihood.cxx:403
 MethodLikelihood.cxx:404
 MethodLikelihood.cxx:405
 MethodLikelihood.cxx:406
 MethodLikelihood.cxx:407
 MethodLikelihood.cxx:408
 MethodLikelihood.cxx:409
 MethodLikelihood.cxx:410
 MethodLikelihood.cxx:411
 MethodLikelihood.cxx:412
 MethodLikelihood.cxx:413
 MethodLikelihood.cxx:414
 MethodLikelihood.cxx:415
 MethodLikelihood.cxx:416
 MethodLikelihood.cxx:417
 MethodLikelihood.cxx:418
 MethodLikelihood.cxx:419
 MethodLikelihood.cxx:420
 MethodLikelihood.cxx:421
 MethodLikelihood.cxx:422
 MethodLikelihood.cxx:423
 MethodLikelihood.cxx:424
 MethodLikelihood.cxx:425
 MethodLikelihood.cxx:426
 MethodLikelihood.cxx:427
 MethodLikelihood.cxx:428
 MethodLikelihood.cxx:429
 MethodLikelihood.cxx:430
 MethodLikelihood.cxx:431
 MethodLikelihood.cxx:432
 MethodLikelihood.cxx:433
 MethodLikelihood.cxx:434
 MethodLikelihood.cxx:435
 MethodLikelihood.cxx:436
 MethodLikelihood.cxx:437
 MethodLikelihood.cxx:438
 MethodLikelihood.cxx:439
 MethodLikelihood.cxx:440
 MethodLikelihood.cxx:441
 MethodLikelihood.cxx:442
 MethodLikelihood.cxx:443
 MethodLikelihood.cxx:444
 MethodLikelihood.cxx:445
 MethodLikelihood.cxx:446
 MethodLikelihood.cxx:447
 MethodLikelihood.cxx:448
 MethodLikelihood.cxx:449
 MethodLikelihood.cxx:450
 MethodLikelihood.cxx:451
 MethodLikelihood.cxx:452
 MethodLikelihood.cxx:453
 MethodLikelihood.cxx:454
 MethodLikelihood.cxx:455
 MethodLikelihood.cxx:456
 MethodLikelihood.cxx:457
 MethodLikelihood.cxx:458
 MethodLikelihood.cxx:459
 MethodLikelihood.cxx:460
 MethodLikelihood.cxx:461
 MethodLikelihood.cxx:462
 MethodLikelihood.cxx:463
 MethodLikelihood.cxx:464
 MethodLikelihood.cxx:465
 MethodLikelihood.cxx:466
 MethodLikelihood.cxx:467
 MethodLikelihood.cxx:468
 MethodLikelihood.cxx:469
 MethodLikelihood.cxx:470
 MethodLikelihood.cxx:471
 MethodLikelihood.cxx:472
 MethodLikelihood.cxx:473
 MethodLikelihood.cxx:474
 MethodLikelihood.cxx:475
 MethodLikelihood.cxx:476
 MethodLikelihood.cxx:477
 MethodLikelihood.cxx:478
 MethodLikelihood.cxx:479
 MethodLikelihood.cxx:480
 MethodLikelihood.cxx:481
 MethodLikelihood.cxx:482
 MethodLikelihood.cxx:483
 MethodLikelihood.cxx:484
 MethodLikelihood.cxx:485
 MethodLikelihood.cxx:486
 MethodLikelihood.cxx:487
 MethodLikelihood.cxx:488
 MethodLikelihood.cxx:489
 MethodLikelihood.cxx:490
 MethodLikelihood.cxx:491
 MethodLikelihood.cxx:492
 MethodLikelihood.cxx:493
 MethodLikelihood.cxx:494
 MethodLikelihood.cxx:495
 MethodLikelihood.cxx:496
 MethodLikelihood.cxx:497
 MethodLikelihood.cxx:498
 MethodLikelihood.cxx:499
 MethodLikelihood.cxx:500
 MethodLikelihood.cxx:501
 MethodLikelihood.cxx:502
 MethodLikelihood.cxx:503
 MethodLikelihood.cxx:504
 MethodLikelihood.cxx:505
 MethodLikelihood.cxx:506
 MethodLikelihood.cxx:507
 MethodLikelihood.cxx:508
 MethodLikelihood.cxx:509
 MethodLikelihood.cxx:510
 MethodLikelihood.cxx:511
 MethodLikelihood.cxx:512
 MethodLikelihood.cxx:513
 MethodLikelihood.cxx:514
 MethodLikelihood.cxx:515
 MethodLikelihood.cxx:516
 MethodLikelihood.cxx:517
 MethodLikelihood.cxx:518
 MethodLikelihood.cxx:519
 MethodLikelihood.cxx:520
 MethodLikelihood.cxx:521
 MethodLikelihood.cxx:522
 MethodLikelihood.cxx:523
 MethodLikelihood.cxx:524
 MethodLikelihood.cxx:525
 MethodLikelihood.cxx:526
 MethodLikelihood.cxx:527
 MethodLikelihood.cxx:528
 MethodLikelihood.cxx:529
 MethodLikelihood.cxx:530
 MethodLikelihood.cxx:531
 MethodLikelihood.cxx:532
 MethodLikelihood.cxx:533
 MethodLikelihood.cxx:534
 MethodLikelihood.cxx:535
 MethodLikelihood.cxx:536
 MethodLikelihood.cxx:537
 MethodLikelihood.cxx:538
 MethodLikelihood.cxx:539
 MethodLikelihood.cxx:540
 MethodLikelihood.cxx:541
 MethodLikelihood.cxx:542
 MethodLikelihood.cxx:543
 MethodLikelihood.cxx:544
 MethodLikelihood.cxx:545
 MethodLikelihood.cxx:546
 MethodLikelihood.cxx:547
 MethodLikelihood.cxx:548
 MethodLikelihood.cxx:549
 MethodLikelihood.cxx:550
 MethodLikelihood.cxx:551
 MethodLikelihood.cxx:552
 MethodLikelihood.cxx:553
 MethodLikelihood.cxx:554
 MethodLikelihood.cxx:555
 MethodLikelihood.cxx:556
 MethodLikelihood.cxx:557
 MethodLikelihood.cxx:558
 MethodLikelihood.cxx:559
 MethodLikelihood.cxx:560
 MethodLikelihood.cxx:561
 MethodLikelihood.cxx:562
 MethodLikelihood.cxx:563
 MethodLikelihood.cxx:564
 MethodLikelihood.cxx:565
 MethodLikelihood.cxx:566
 MethodLikelihood.cxx:567
 MethodLikelihood.cxx:568
 MethodLikelihood.cxx:569
 MethodLikelihood.cxx:570
 MethodLikelihood.cxx:571
 MethodLikelihood.cxx:572
 MethodLikelihood.cxx:573
 MethodLikelihood.cxx:574
 MethodLikelihood.cxx:575
 MethodLikelihood.cxx:576
 MethodLikelihood.cxx:577
 MethodLikelihood.cxx:578
 MethodLikelihood.cxx:579
 MethodLikelihood.cxx:580
 MethodLikelihood.cxx:581
 MethodLikelihood.cxx:582
 MethodLikelihood.cxx:583
 MethodLikelihood.cxx:584
 MethodLikelihood.cxx:585
 MethodLikelihood.cxx:586
 MethodLikelihood.cxx:587
 MethodLikelihood.cxx:588
 MethodLikelihood.cxx:589
 MethodLikelihood.cxx:590
 MethodLikelihood.cxx:591
 MethodLikelihood.cxx:592
 MethodLikelihood.cxx:593
 MethodLikelihood.cxx:594
 MethodLikelihood.cxx:595
 MethodLikelihood.cxx:596
 MethodLikelihood.cxx:597
 MethodLikelihood.cxx:598
 MethodLikelihood.cxx:599
 MethodLikelihood.cxx:600
 MethodLikelihood.cxx:601
 MethodLikelihood.cxx:602
 MethodLikelihood.cxx:603
 MethodLikelihood.cxx:604
 MethodLikelihood.cxx:605
 MethodLikelihood.cxx:606
 MethodLikelihood.cxx:607
 MethodLikelihood.cxx:608
 MethodLikelihood.cxx:609
 MethodLikelihood.cxx:610
 MethodLikelihood.cxx:611
 MethodLikelihood.cxx:612
 MethodLikelihood.cxx:613
 MethodLikelihood.cxx:614
 MethodLikelihood.cxx:615
 MethodLikelihood.cxx:616
 MethodLikelihood.cxx:617
 MethodLikelihood.cxx:618
 MethodLikelihood.cxx:619
 MethodLikelihood.cxx:620
 MethodLikelihood.cxx:621
 MethodLikelihood.cxx:622
 MethodLikelihood.cxx:623
 MethodLikelihood.cxx:624
 MethodLikelihood.cxx:625
 MethodLikelihood.cxx:626
 MethodLikelihood.cxx:627
 MethodLikelihood.cxx:628
 MethodLikelihood.cxx:629
 MethodLikelihood.cxx:630
 MethodLikelihood.cxx:631
 MethodLikelihood.cxx:632
 MethodLikelihood.cxx:633
 MethodLikelihood.cxx:634
 MethodLikelihood.cxx:635
 MethodLikelihood.cxx:636
 MethodLikelihood.cxx:637
 MethodLikelihood.cxx:638
 MethodLikelihood.cxx:639
 MethodLikelihood.cxx:640
 MethodLikelihood.cxx:641
 MethodLikelihood.cxx:642
 MethodLikelihood.cxx:643
 MethodLikelihood.cxx:644
 MethodLikelihood.cxx:645
 MethodLikelihood.cxx:646
 MethodLikelihood.cxx:647
 MethodLikelihood.cxx:648
 MethodLikelihood.cxx:649
 MethodLikelihood.cxx:650
 MethodLikelihood.cxx:651
 MethodLikelihood.cxx:652
 MethodLikelihood.cxx:653
 MethodLikelihood.cxx:654
 MethodLikelihood.cxx:655
 MethodLikelihood.cxx:656
 MethodLikelihood.cxx:657
 MethodLikelihood.cxx:658
 MethodLikelihood.cxx:659
 MethodLikelihood.cxx:660
 MethodLikelihood.cxx:661
 MethodLikelihood.cxx:662
 MethodLikelihood.cxx:663
 MethodLikelihood.cxx:664
 MethodLikelihood.cxx:665
 MethodLikelihood.cxx:666
 MethodLikelihood.cxx:667
 MethodLikelihood.cxx:668
 MethodLikelihood.cxx:669
 MethodLikelihood.cxx:670
 MethodLikelihood.cxx:671
 MethodLikelihood.cxx:672
 MethodLikelihood.cxx:673
 MethodLikelihood.cxx:674
 MethodLikelihood.cxx:675
 MethodLikelihood.cxx:676
 MethodLikelihood.cxx:677
 MethodLikelihood.cxx:678
 MethodLikelihood.cxx:679
 MethodLikelihood.cxx:680
 MethodLikelihood.cxx:681
 MethodLikelihood.cxx:682
 MethodLikelihood.cxx:683
 MethodLikelihood.cxx:684
 MethodLikelihood.cxx:685
 MethodLikelihood.cxx:686
 MethodLikelihood.cxx:687
 MethodLikelihood.cxx:688
 MethodLikelihood.cxx:689
 MethodLikelihood.cxx:690
 MethodLikelihood.cxx:691
 MethodLikelihood.cxx:692
 MethodLikelihood.cxx:693
 MethodLikelihood.cxx:694
 MethodLikelihood.cxx:695
 MethodLikelihood.cxx:696
 MethodLikelihood.cxx:697
 MethodLikelihood.cxx:698
 MethodLikelihood.cxx:699
 MethodLikelihood.cxx:700
 MethodLikelihood.cxx:701
 MethodLikelihood.cxx:702
 MethodLikelihood.cxx:703
 MethodLikelihood.cxx:704
 MethodLikelihood.cxx:705
 MethodLikelihood.cxx:706
 MethodLikelihood.cxx:707
 MethodLikelihood.cxx:708
 MethodLikelihood.cxx:709
 MethodLikelihood.cxx:710
 MethodLikelihood.cxx:711
 MethodLikelihood.cxx:712
 MethodLikelihood.cxx:713
 MethodLikelihood.cxx:714
 MethodLikelihood.cxx:715
 MethodLikelihood.cxx:716
 MethodLikelihood.cxx:717
 MethodLikelihood.cxx:718
 MethodLikelihood.cxx:719
 MethodLikelihood.cxx:720
 MethodLikelihood.cxx:721
 MethodLikelihood.cxx:722
 MethodLikelihood.cxx:723
 MethodLikelihood.cxx:724
 MethodLikelihood.cxx:725
 MethodLikelihood.cxx:726
 MethodLikelihood.cxx:727
 MethodLikelihood.cxx:728
 MethodLikelihood.cxx:729
 MethodLikelihood.cxx:730
 MethodLikelihood.cxx:731
 MethodLikelihood.cxx:732
 MethodLikelihood.cxx:733
 MethodLikelihood.cxx:734
 MethodLikelihood.cxx:735
 MethodLikelihood.cxx:736
 MethodLikelihood.cxx:737
 MethodLikelihood.cxx:738
 MethodLikelihood.cxx:739
 MethodLikelihood.cxx:740
 MethodLikelihood.cxx:741
 MethodLikelihood.cxx:742
 MethodLikelihood.cxx:743
 MethodLikelihood.cxx:744
 MethodLikelihood.cxx:745
 MethodLikelihood.cxx:746
 MethodLikelihood.cxx:747
 MethodLikelihood.cxx:748
 MethodLikelihood.cxx:749
 MethodLikelihood.cxx:750
 MethodLikelihood.cxx:751
 MethodLikelihood.cxx:752
 MethodLikelihood.cxx:753
 MethodLikelihood.cxx:754
 MethodLikelihood.cxx:755
 MethodLikelihood.cxx:756
 MethodLikelihood.cxx:757
 MethodLikelihood.cxx:758
 MethodLikelihood.cxx:759
 MethodLikelihood.cxx:760
 MethodLikelihood.cxx:761
 MethodLikelihood.cxx:762
 MethodLikelihood.cxx:763
 MethodLikelihood.cxx:764
 MethodLikelihood.cxx:765
 MethodLikelihood.cxx:766
 MethodLikelihood.cxx:767
 MethodLikelihood.cxx:768
 MethodLikelihood.cxx:769
 MethodLikelihood.cxx:770
 MethodLikelihood.cxx:771
 MethodLikelihood.cxx:772
 MethodLikelihood.cxx:773
 MethodLikelihood.cxx:774
 MethodLikelihood.cxx:775
 MethodLikelihood.cxx:776
 MethodLikelihood.cxx:777
 MethodLikelihood.cxx:778
 MethodLikelihood.cxx:779
 MethodLikelihood.cxx:780
 MethodLikelihood.cxx:781
 MethodLikelihood.cxx:782
 MethodLikelihood.cxx:783
 MethodLikelihood.cxx:784
 MethodLikelihood.cxx:785
 MethodLikelihood.cxx:786
 MethodLikelihood.cxx:787
 MethodLikelihood.cxx:788
 MethodLikelihood.cxx:789
 MethodLikelihood.cxx:790
 MethodLikelihood.cxx:791
 MethodLikelihood.cxx:792
 MethodLikelihood.cxx:793
 MethodLikelihood.cxx:794
 MethodLikelihood.cxx:795
 MethodLikelihood.cxx:796
 MethodLikelihood.cxx:797
 MethodLikelihood.cxx:798
 MethodLikelihood.cxx:799
 MethodLikelihood.cxx:800
 MethodLikelihood.cxx:801
 MethodLikelihood.cxx:802
 MethodLikelihood.cxx:803
 MethodLikelihood.cxx:804
 MethodLikelihood.cxx:805
 MethodLikelihood.cxx:806
 MethodLikelihood.cxx:807
 MethodLikelihood.cxx:808
 MethodLikelihood.cxx:809
 MethodLikelihood.cxx:810
 MethodLikelihood.cxx:811
 MethodLikelihood.cxx:812
 MethodLikelihood.cxx:813
 MethodLikelihood.cxx:814
 MethodLikelihood.cxx:815
 MethodLikelihood.cxx:816
 MethodLikelihood.cxx:817
 MethodLikelihood.cxx:818
 MethodLikelihood.cxx:819
 MethodLikelihood.cxx:820
 MethodLikelihood.cxx:821
 MethodLikelihood.cxx:822
 MethodLikelihood.cxx:823
 MethodLikelihood.cxx:824
 MethodLikelihood.cxx:825
 MethodLikelihood.cxx:826
 MethodLikelihood.cxx:827
 MethodLikelihood.cxx:828
 MethodLikelihood.cxx:829
 MethodLikelihood.cxx:830
 MethodLikelihood.cxx:831
 MethodLikelihood.cxx:832
 MethodLikelihood.cxx:833
 MethodLikelihood.cxx:834
 MethodLikelihood.cxx:835
 MethodLikelihood.cxx:836
 MethodLikelihood.cxx:837
 MethodLikelihood.cxx:838
 MethodLikelihood.cxx:839
 MethodLikelihood.cxx:840
 MethodLikelihood.cxx:841
 MethodLikelihood.cxx:842
 MethodLikelihood.cxx:843
 MethodLikelihood.cxx:844
 MethodLikelihood.cxx:845
 MethodLikelihood.cxx:846
 MethodLikelihood.cxx:847
 MethodLikelihood.cxx:848
 MethodLikelihood.cxx:849
 MethodLikelihood.cxx:850
 MethodLikelihood.cxx:851
 MethodLikelihood.cxx:852
 MethodLikelihood.cxx:853
 MethodLikelihood.cxx:854
 MethodLikelihood.cxx:855
 MethodLikelihood.cxx:856
 MethodLikelihood.cxx:857
 MethodLikelihood.cxx:858
 MethodLikelihood.cxx:859
 MethodLikelihood.cxx:860
 MethodLikelihood.cxx:861
 MethodLikelihood.cxx:862
 MethodLikelihood.cxx:863
 MethodLikelihood.cxx:864
 MethodLikelihood.cxx:865
 MethodLikelihood.cxx:866
 MethodLikelihood.cxx:867
 MethodLikelihood.cxx:868
 MethodLikelihood.cxx:869
 MethodLikelihood.cxx:870
 MethodLikelihood.cxx:871
 MethodLikelihood.cxx:872
 MethodLikelihood.cxx:873
 MethodLikelihood.cxx:874
 MethodLikelihood.cxx:875
 MethodLikelihood.cxx:876
 MethodLikelihood.cxx:877
 MethodLikelihood.cxx:878
 MethodLikelihood.cxx:879
 MethodLikelihood.cxx:880
 MethodLikelihood.cxx:881
 MethodLikelihood.cxx:882
 MethodLikelihood.cxx:883
 MethodLikelihood.cxx:884
 MethodLikelihood.cxx:885
 MethodLikelihood.cxx:886
 MethodLikelihood.cxx:887
 MethodLikelihood.cxx:888
 MethodLikelihood.cxx:889
 MethodLikelihood.cxx:890
 MethodLikelihood.cxx:891
 MethodLikelihood.cxx:892
 MethodLikelihood.cxx:893
 MethodLikelihood.cxx:894
 MethodLikelihood.cxx:895
 MethodLikelihood.cxx:896
 MethodLikelihood.cxx:897
 MethodLikelihood.cxx:898
 MethodLikelihood.cxx:899
 MethodLikelihood.cxx:900
 MethodLikelihood.cxx:901
 MethodLikelihood.cxx:902
 MethodLikelihood.cxx:903
 MethodLikelihood.cxx:904
 MethodLikelihood.cxx:905
 MethodLikelihood.cxx:906
 MethodLikelihood.cxx:907
 MethodLikelihood.cxx:908
 MethodLikelihood.cxx:909
 MethodLikelihood.cxx:910
 MethodLikelihood.cxx:911
 MethodLikelihood.cxx:912
 MethodLikelihood.cxx:913
 MethodLikelihood.cxx:914
 MethodLikelihood.cxx:915
 MethodLikelihood.cxx:916
 MethodLikelihood.cxx:917
 MethodLikelihood.cxx:918
 MethodLikelihood.cxx:919
 MethodLikelihood.cxx:920
 MethodLikelihood.cxx:921
 MethodLikelihood.cxx:922
 MethodLikelihood.cxx:923
 MethodLikelihood.cxx:924
 MethodLikelihood.cxx:925
 MethodLikelihood.cxx:926
 MethodLikelihood.cxx:927
 MethodLikelihood.cxx:928
 MethodLikelihood.cxx:929
 MethodLikelihood.cxx:930
 MethodLikelihood.cxx:931
 MethodLikelihood.cxx:932
 MethodLikelihood.cxx:933
 MethodLikelihood.cxx:934
 MethodLikelihood.cxx:935
 MethodLikelihood.cxx:936
 MethodLikelihood.cxx:937
 MethodLikelihood.cxx:938
 MethodLikelihood.cxx:939
 MethodLikelihood.cxx:940
 MethodLikelihood.cxx:941
 MethodLikelihood.cxx:942
 MethodLikelihood.cxx:943
 MethodLikelihood.cxx:944
 MethodLikelihood.cxx:945
 MethodLikelihood.cxx:946
 MethodLikelihood.cxx:947
 MethodLikelihood.cxx:948
 MethodLikelihood.cxx:949
 MethodLikelihood.cxx:950
 MethodLikelihood.cxx:951
 MethodLikelihood.cxx:952
 MethodLikelihood.cxx:953
 MethodLikelihood.cxx:954
 MethodLikelihood.cxx:955
 MethodLikelihood.cxx:956
 MethodLikelihood.cxx:957
 MethodLikelihood.cxx:958
 MethodLikelihood.cxx:959
 MethodLikelihood.cxx:960
 MethodLikelihood.cxx:961
 MethodLikelihood.cxx:962
 MethodLikelihood.cxx:963
 MethodLikelihood.cxx:964
 MethodLikelihood.cxx:965
 MethodLikelihood.cxx:966
 MethodLikelihood.cxx:967
 MethodLikelihood.cxx:968
 MethodLikelihood.cxx:969
 MethodLikelihood.cxx:970
 MethodLikelihood.cxx:971
 MethodLikelihood.cxx:972
 MethodLikelihood.cxx:973
 MethodLikelihood.cxx:974
 MethodLikelihood.cxx:975
 MethodLikelihood.cxx:976
 MethodLikelihood.cxx:977
 MethodLikelihood.cxx:978
 MethodLikelihood.cxx:979
 MethodLikelihood.cxx:980
 MethodLikelihood.cxx:981
 MethodLikelihood.cxx:982
 MethodLikelihood.cxx:983
 MethodLikelihood.cxx:984
 MethodLikelihood.cxx:985
 MethodLikelihood.cxx:986
 MethodLikelihood.cxx:987
 MethodLikelihood.cxx:988
 MethodLikelihood.cxx:989
 MethodLikelihood.cxx:990
 MethodLikelihood.cxx:991
 MethodLikelihood.cxx:992
 MethodLikelihood.cxx:993
 MethodLikelihood.cxx:994
 MethodLikelihood.cxx:995
 MethodLikelihood.cxx:996
 MethodLikelihood.cxx:997
 MethodLikelihood.cxx:998
 MethodLikelihood.cxx:999
 MethodLikelihood.cxx:1000
 MethodLikelihood.cxx:1001
 MethodLikelihood.cxx:1002
 MethodLikelihood.cxx:1003