// @(#)root/mlp:$Id$
// Author: Christophe.Delaere@cern.ch   25/04/04

/*************************************************************************
 * Copyright (C) 1995-2003, Rene Brun and Fons Rademakers.               *
 * All rights reserved.                                                  *
 *                                                                       *
 * For the licensing terms see $ROOTSYS/LICENSE.                         *
 * For the list of contributors see $ROOTSYS/README/CREDITS.             *
 *************************************************************************/

///////////////////////////////////////////////////////////////////////////
//
// TMLPAnalyzer
//
// This utility class contains a set of tests usefull when developing
// a neural network.
// It allows you to check for unneeded variables, and to control
// the network structure.
//
///////////////////////////////////////////////////////////////////////////

#include "TROOT.h"
#include "TSynapse.h"
#include "TNeuron.h"
#include "TMultiLayerPerceptron.h"
#include "TMLPAnalyzer.h"
#include "TTree.h"
#include "TTreeFormula.h"
#include "TEventList.h"
#include "TH1D.h"
#include "TProfile.h"
#include "THStack.h"
#include "TLegend.h"
#include "TPad.h"
#include "TCanvas.h"
#include "TGaxis.h"
#include "TRegexp.h"
#include "TMath.h"
#include "Riostream.h"
#include <stdlib.h>

ClassImp(TMLPAnalyzer)

//______________________________________________________________________________
TMLPAnalyzer::~TMLPAnalyzer()
{
   // Destructor
   delete fAnalysisTree;
   delete fIOTree;
}

//______________________________________________________________________________
Int_t TMLPAnalyzer::GetLayers()
{
   // Returns the number of layers.

   TString fStructure = fNetwork->GetStructure();
   return fStructure.CountChar(':')+1;
}

//______________________________________________________________________________
Int_t TMLPAnalyzer::GetNeurons(Int_t layer)
{
   // Returns the number of neurons in given layer.

   if(layer==1) {
      TString fStructure = fNetwork->GetStructure();
      TString input      = TString(fStructure(0, fStructure.First(':')));
      return input.CountChar(',')+1;
   }
   else if(layer==GetLayers()) {
      TString fStructure = fNetwork->GetStructure();
      TString output = TString(fStructure(fStructure.Last(':') + 1,
                               fStructure.Length() - fStructure.Last(':')));
      return output.CountChar(',')+1;
   }
   else {
      Int_t cnt=1;
      TString fStructure = fNetwork->GetStructure();
      TString hidden = TString(fStructure(fStructure.First(':') + 1,
                               fStructure.Last(':') - fStructure.First(':') - 1));
      Int_t beg = 0;
      Int_t end = hidden.Index(":", beg + 1);
      Int_t num = 0;
      while (end != -1) {
         num = atoi(TString(hidden(beg, end - beg)).Data());
         cnt++;
         beg = end + 1;
         end = hidden.Index(":", beg + 1);
         if(layer==cnt) return num;
      }
      num = atoi(TString(hidden(beg, hidden.Length() - beg)).Data());
      cnt++;
      if(layer==cnt) return num;
   }
   return -1;
}

//______________________________________________________________________________
TString TMLPAnalyzer::GetNeuronFormula(Int_t idx)
{
   // Returns the formula used as input for neuron (idx) in
   // the first layer.

   TString fStructure = fNetwork->GetStructure();
   TString input      = TString(fStructure(0, fStructure.First(':')));
   Int_t beg = 0;
   Int_t end = input.Index(",", beg + 1);
   TString brName;
   Int_t cnt = 0;
   while (end != -1) {
      brName = TString(input(beg, end - beg));
      if (brName[0]=='@')
         brName = brName(1,brName.Length()-1);
      beg = end + 1;
      end = input.Index(",", beg + 1);
      if(cnt==idx) return brName;
      cnt++;
   }
   brName = TString(input(beg, input.Length() - beg));
   if (brName[0]=='@')
      brName = brName(1,brName.Length()-1);
   return brName;
}

//______________________________________________________________________________
const char* TMLPAnalyzer::GetInputNeuronTitle(Int_t in)
{
   // Returns the name of any neuron from the input layer
   TNeuron* neuron=(TNeuron*)fNetwork->fFirstLayer[in];
   return neuron ? neuron->GetName() : "NO SUCH NEURON";
}

//______________________________________________________________________________
const char* TMLPAnalyzer::GetOutputNeuronTitle(Int_t out)
{
   // Returns the name of any neuron from the output layer
   TNeuron* neuron=(TNeuron*)fNetwork->fLastLayer[out];
   return neuron ? neuron->GetName() : "NO SUCH NEURON";
}

//______________________________________________________________________________
void TMLPAnalyzer::CheckNetwork()
{
   // Gives some information about the network in the terminal.

   TString fStructure = fNetwork->GetStructure();
   std::cout << "Network with structure: " << fStructure.Data() << std::endl;
   std::cout << "inputs with low values in the differences plot may not be needed" << std::endl;
   // Checks if some input variable is not needed
   char var[64], sel[64];
   for (Int_t i = 0; i < GetNeurons(1); i++) {
      snprintf(var,64,"diff>>tmp%d",i);
      snprintf(sel,64,"inNeuron==%d",i);
      fAnalysisTree->Draw(var, sel, "goff");
      TH1F* tmp = (TH1F*)gDirectory->Get(Form("tmp%d",i));
      if (!tmp) continue;
      std::cout << GetInputNeuronTitle(i)
           << " -> " << tmp->GetMean()
           << " +/- " << tmp->GetRMS() << std::endl;
   }
}

//______________________________________________________________________________
void TMLPAnalyzer::GatherInformations()
{
   // Collect information about what is usefull in the network.
   // This method has to be called first when analyzing a network.
   // Fills the two analysis trees.

   Double_t shift = 0.1;
   TTree* data = fNetwork->fData;
   TEventList* test = fNetwork->fTest;
   Int_t nEvents = test->GetN();
   Int_t nn = GetNeurons(1);
   Double_t* params = new Double_t[nn];
   Double_t* rms    = new Double_t[nn];
   TTreeFormula** formulas = new TTreeFormula*[nn];
   Int_t* index = new Int_t[nn];
   TString formula;
   TRegexp re("{[0-9]+}$");
   Ssiz_t len = formula.Length();
   Ssiz_t pos = -1;
   Int_t i(0), j(0), k(0), l(0);
   for(i=0; i<nn; i++){
      formula = GetNeuronFormula(i);
      pos = re.Index(formula,&len);
      if(pos==-1 || len<3) {
         formulas[i] = new TTreeFormula(Form("NF%lu",(ULong_t)this),formula,data);
         index[i] = 0;
      }
      else {
         TString newformula(formula,pos);
         TString val = formula(pos+1,len-2);
         formulas[i] = new TTreeFormula(Form("NF%lu",(ULong_t)this),newformula,data);
         formula = newformula;
         index[i] = val.Atoi();
      }
      TH1D tmp("tmpb", "tmpb", 1, -FLT_MAX, FLT_MAX);
      data->Draw(Form("%s>>tmpb",formula.Data()),"","goff");
      rms[i]  = tmp.GetRMS();
   }
   Int_t inNeuron = 0;
   Double_t diff = 0.;
   if(fAnalysisTree) delete fAnalysisTree;
   fAnalysisTree = new TTree("result","analysis");
   fAnalysisTree->SetDirectory(0);
   fAnalysisTree->Branch("inNeuron",&inNeuron,"inNeuron/I");
   fAnalysisTree->Branch("diff",&diff,"diff/D");
   Int_t numOutNodes=GetNeurons(GetLayers());
   Double_t *outVal=new Double_t[numOutNodes];
   Double_t *trueVal=new Double_t[numOutNodes];

   delete fIOTree;
   fIOTree=new TTree("MLP_iotree","MLP_iotree");
   fIOTree->SetDirectory(0);
   TString leaflist;
   for (i=0; i<nn; i++)
      leaflist+=Form("In%d/D:",i);
   leaflist.Remove(leaflist.Length()-1);
   fIOTree->Branch("In", params, leaflist);

   leaflist="";
   for (i=0; i<numOutNodes; i++)
      leaflist+=Form("Out%d/D:",i);
   leaflist.Remove(leaflist.Length()-1);
   fIOTree->Branch("Out", outVal, leaflist);

   leaflist="";
   for (i=0; i<numOutNodes; i++)
      leaflist+=Form("True%d/D:",i);
   leaflist.Remove(leaflist.Length()-1);
   fIOTree->Branch("True", trueVal, leaflist);
   Double_t v1 = 0.;
   Double_t v2 = 0.;
   // Loop on the events in the test sample
   for(j=0; j< nEvents; j++) {
      fNetwork->GetEntry(test->GetEntry(j));
      // Loop on the neurons to evaluate
      for(k=0; k<GetNeurons(1); k++) {
         params[k] = formulas[k]->EvalInstance(index[k]);
      }
      for(k=0; k<GetNeurons(GetLayers()); k++) {
         outVal[k] = fNetwork->Evaluate(k,params);
         trueVal[k] = ((TNeuron*)fNetwork->fLastLayer[k])->GetBranch();
      }
      fIOTree->Fill();

      // Loop on the input neurons
      for (i = 0; i < GetNeurons(1); i++) {
         inNeuron = i;
         diff = 0;
         // Loop on the neurons in the output layer
         for(l=0; l<GetNeurons(GetLayers()); l++){
            params[i] += shift*rms[i];
            v1 = fNetwork->Evaluate(l,params);
            params[i] -= 2*shift*rms[i];
            v2 = fNetwork->Evaluate(l,params);
            diff += (v1-v2)*(v1-v2);
            // reset to original vealue
            params[i] += shift*rms[i];
         }
         diff = TMath::Sqrt(diff);
         fAnalysisTree->Fill();
      }
   }
   delete[] params;
   delete[] rms;
   delete[] outVal;
   delete[] trueVal;
   delete[] index;
   for(i=0; i<GetNeurons(1); i++) delete formulas[i]; delete [] formulas;
   fAnalysisTree->ResetBranchAddresses();
   fIOTree->ResetBranchAddresses();
}

//______________________________________________________________________________
void TMLPAnalyzer::DrawDInput(Int_t i)
{
   // Draws the distribution (on the test sample) of the
   // impact on the network output of a small variation of
   // the ith input.

   char sel[64];
   snprintf(sel,64, "inNeuron==%d", i);
   fAnalysisTree->Draw("diff", sel);
}

//______________________________________________________________________________
void TMLPAnalyzer::DrawDInputs()
{
   // Draws the distribution (on the test sample) of the
   // impact on the network output of a small variation of
   // each input.
   // DrawDInputs() draws something that approximates the distribution of the
   // derivative of the NN w.r.t. each input. That quantity is recognized as
   // one of the measures to determine key quantities in the network.
   //
   // What is done is to vary one input around its nominal value and to see
   // how the NN changes. This is done for each entry in the sample and produces
   // a distribution.
   //
   // What you can learn from that is:
   // - is variable a really useful, or is my network insensitive to it ?
   // - is there any risk of big systematic ? Is the network extremely sensitive
   //   to small variations of any of my inputs ?
   //
   // As you might understand, this is to be considered with care and can serve
   // as input for an "educated guess" when optimizing the network.

   THStack* stack  = new THStack("differences","differences (impact of variables on ANN)");
   TLegend* legend = new TLegend(0.75,0.75,0.95,0.95);
   TH1F* tmp = 0;
   char var[64], sel[64];
   for(Int_t i = 0; i < GetNeurons(1); i++) {
      snprintf(var,64, "diff>>tmp%d", i);
      snprintf(sel,64, "inNeuron==%d", i);
      fAnalysisTree->Draw(var, sel, "goff");
      tmp = (TH1F*)gDirectory->Get(Form("tmp%d",i));
      tmp->SetDirectory(0);
      tmp->SetLineColor(i+1);
      stack->Add(tmp);
      legend->AddEntry(tmp,GetInputNeuronTitle(i),"l");
   }
   stack->Draw("nostack");
   legend->Draw();
   gPad->SetLogy();
}

//______________________________________________________________________________
void TMLPAnalyzer::DrawNetwork(Int_t neuron, const char* signal, const char* bg)
{
   // Draws the distribution of the neural network (using ith neuron).
   // Two distributions are drawn, for events passing respectively the "signal"
   // and "background" cuts. Only the test sample is used.

   TTree* data = fNetwork->fData;
   TEventList* test = fNetwork->fTest;
   TEventList* current = data->GetEventList();
   data->SetEventList(test);
   THStack* stack = new THStack("__NNout_TMLPA",Form("Neural net output (neuron %d)",neuron));
   TH1F *bgh  = new TH1F("__bgh_TMLPA", "NN output", 50, -0.5, 1.5);
   TH1F *sigh = new TH1F("__sigh_TMLPA", "NN output", 50, -0.5, 1.5);
   bgh->SetDirectory(0);
   sigh->SetDirectory(0);
   Int_t nEvents = 0;
   Int_t j=0;
   // build event lists for signal and background
   TEventList* signal_list = new TEventList("__tmpSig_MLPA");
   TEventList* bg_list     = new TEventList("__tmpBkg_MLPA");
   data->Draw(">>__tmpSig_MLPA",signal,"goff");
   data->Draw(">>__tmpBkg_MLPA",bg,"goff");

   // fill the background
   nEvents = bg_list->GetN();
   for(j=0; j< nEvents; j++) {
      bgh->Fill(fNetwork->Result(bg_list->GetEntry(j),neuron));
   }
   // fill the signal
   nEvents = signal_list->GetN();
   for(j=0; j< nEvents; j++) {
      sigh->Fill(fNetwork->Result(signal_list->GetEntry(j),neuron));
   }
   // draws the result
   bgh->SetLineColor(kBlue);
   bgh->SetFillStyle(3008);
   bgh->SetFillColor(kBlue);
   sigh->SetLineColor(kRed);
   sigh->SetFillStyle(3003);
   sigh->SetFillColor(kRed);
   bgh->SetStats(0);
   sigh->SetStats(0);
   stack->Add(bgh);
   stack->Add(sigh);
   TLegend *legend = new TLegend(.75, .80, .95, .95);
   legend->AddEntry(bgh, "Background");
   legend->AddEntry(sigh,"Signal");
   stack->Draw("nostack");
   legend->Draw();
   // restore the default event list
   data->SetEventList(current);
   delete signal_list;
   delete bg_list;
}

//______________________________________________________________________________
TProfile* TMLPAnalyzer::DrawTruthDeviation(Int_t outnode /*=0*/,
                                           Option_t *option /*=""*/)
{
   // Create a profile of the difference of the MLP output minus the
   // true value for a given output node outnode, vs the true value for
   // outnode, for all test data events. This method is mainly useful
   // when doing regression analysis with the MLP (i.e. not classification,
   // but continuous truth values).
   // The resulting TProfile histogram is returned.
   // It is not drawn if option "goff" is specified.
   // Options are passed to TProfile::Draw

   if (!fIOTree) GatherInformations();
   TString pipehist=Form("MLP_truthdev_%d",outnode);
   TString drawline;
   drawline.Form("Out.Out%d-True.True%d:True.True%d>>",
                 outnode, outnode, outnode);
   fIOTree->Draw(drawline+pipehist+"(20)", "", "goff prof");
   TProfile* h=(TProfile*)gDirectory->Get(pipehist);
   h->SetDirectory(0);
   const char* title=GetOutputNeuronTitle(outnode);
   if (title) {
      h->SetTitle(Form("#Delta(output - truth) vs. truth for %s",
                      title));
      h->GetXaxis()->SetTitle(title);
      h->GetYaxis()->SetTitle(Form("#Delta(output - truth) for %s", title));
   }
   if (!strstr(option,"goff"))
      h->Draw();
   return h;
}

//______________________________________________________________________________
THStack* TMLPAnalyzer::DrawTruthDeviations(Option_t *option /*=""*/)
{
   // Creates TProfiles of the difference of the MLP output minus the
   // true value vs the true value, one for each output, filled with the
   // test data events. This method is mainly useful when doing regression
   // analysis with the MLP (i.e. not classification, but continuous truth
   // values).
   // The returned THStack contains all the TProfiles. It is drawn unless
   // the option "goff" is specified.
   // Options are passed to TProfile::Draw.
   THStack *hs=new THStack("MLP_TruthDeviation",
                           "Deviation of MLP output from truth");

   // leg!=0 means we're drawing
   TLegend *leg=0;
   if (!option || !strstr(option,"goff"))
      leg=new TLegend(.4,.85,.95,.95,"#Delta(output - truth) vs. truth for:");

   const char* xAxisTitle=0;

   // create profile for each input neuron,
   // adding them into the THStack and the TLegend
   for (Int_t outnode=0; outnode<GetNeurons(GetLayers()); outnode++) {
      TProfile* h=DrawTruthDeviation(outnode, "goff");
      h->SetLineColor(1+outnode);
      hs->Add(h, option);
      if (leg) leg->AddEntry(h,GetOutputNeuronTitle(outnode));
      if (!outnode)
         // Xaxis title is the same for all, extract it from the first one.
         xAxisTitle=h->GetXaxis()->GetTitle();
   }

   if (leg) {
      hs->Draw("nostack");
      leg->Draw();
      // gotta draw before accessing the axes
      hs->GetXaxis()->SetTitle(xAxisTitle);
      hs->GetYaxis()->SetTitle("#Delta(output - truth)");
   }

   return hs;
}

//______________________________________________________________________________
TProfile* TMLPAnalyzer::DrawTruthDeviationInOut(Int_t innode,
                                                Int_t outnode /*=0*/,
                                                Option_t *option /*=""*/)
{
   // Creates a profile of the difference of the MLP output outnode minus
   // the true value of outnode vs the input value innode, for all test
   // data events.
   // The resulting TProfile histogram is returned.
   // It is not drawn if option "goff" is specified.
   // Options are passed to TProfile::Draw

   if (!fIOTree) GatherInformations();
   TString pipehist=Form("MLP_truthdev_i%d_o%d", innode, outnode);
   TString drawline;
   drawline.Form("Out.Out%d-True.True%d:In.In%d>>",
                 outnode, outnode, innode);
   fIOTree->Draw(drawline+pipehist+"(50)", "", "goff prof");
   TProfile* h=(TProfile*)gROOT->FindObject(pipehist);
   h->SetDirectory(0);
   const char* titleInNeuron=GetInputNeuronTitle(innode);
   const char* titleOutNeuron=GetOutputNeuronTitle(outnode);
   h->SetTitle(Form("#Delta(output - truth) of %s vs. input %s",
                    titleOutNeuron, titleInNeuron));
   h->GetXaxis()->SetTitle(Form("%s", titleInNeuron));
   h->GetYaxis()->SetTitle(Form("#Delta(output - truth) for %s",
                                titleOutNeuron));
   if (!strstr(option,"goff"))
      h->Draw(option);
   return h;
}

//______________________________________________________________________________
THStack* TMLPAnalyzer::DrawTruthDeviationInsOut(Int_t outnode /*=0*/,
                                                Option_t *option /*=""*/)
{
   // Creates a profile of the difference of the MLP output outnode minus the
   // true value of outnode vs the input value, stacked for all inputs, for
   // all test data events.
   // The returned THStack contains all the TProfiles. It is drawn unless
   // the option "goff" is specified.
   // Options are passed to TProfile::Draw.
   TString sName;
   sName.Form("MLP_TruthDeviationIO_%d", outnode);
   const char* outputNodeTitle=GetOutputNeuronTitle(outnode);
   THStack *hs=new THStack(sName,
                           Form("Deviation of MLP output %s from truth",
                                outputNodeTitle));

   // leg!=0 means we're drawing.
   TLegend *leg=0;
   if (!option || !strstr(option,"goff"))
      leg=new TLegend(.4,.75,.95,.95,
                      Form("#Delta(output - truth) of %s vs. input for:",
                           outputNodeTitle));

   // create profile for each input neuron,
   // adding them into the THStack and the TLegend
   Int_t numInNodes=GetNeurons(1);
   Int_t innode=0;
   for (innode=0; innode<numInNodes; innode++) {
      TProfile* h=DrawTruthDeviationInOut(innode, outnode, "goff");
      h->SetLineColor(1+innode);
      hs->Add(h, option);
      if (leg) leg->AddEntry(h,h->GetXaxis()->GetTitle());
   }

   if (leg) {
      hs->Draw("nostack");
      leg->Draw();
      // gotta draw before accessing the axes
      hs->GetXaxis()->SetTitle("Input value");
      hs->GetYaxis()->SetTitle(Form("#Delta(output - truth) for %s",
                                 outputNodeTitle));
   }

   return hs;
}
 TMLPAnalyzer.cxx:1
 TMLPAnalyzer.cxx:2
 TMLPAnalyzer.cxx:3
 TMLPAnalyzer.cxx:4
 TMLPAnalyzer.cxx:5
 TMLPAnalyzer.cxx:6
 TMLPAnalyzer.cxx:7
 TMLPAnalyzer.cxx:8
 TMLPAnalyzer.cxx:9
 TMLPAnalyzer.cxx:10
 TMLPAnalyzer.cxx:11
 TMLPAnalyzer.cxx:12
 TMLPAnalyzer.cxx:13
 TMLPAnalyzer.cxx:14
 TMLPAnalyzer.cxx:15
 TMLPAnalyzer.cxx:16
 TMLPAnalyzer.cxx:17
 TMLPAnalyzer.cxx:18
 TMLPAnalyzer.cxx:19
 TMLPAnalyzer.cxx:20
 TMLPAnalyzer.cxx:21
 TMLPAnalyzer.cxx:22
 TMLPAnalyzer.cxx:23
 TMLPAnalyzer.cxx:24
 TMLPAnalyzer.cxx:25
 TMLPAnalyzer.cxx:26
 TMLPAnalyzer.cxx:27
 TMLPAnalyzer.cxx:28
 TMLPAnalyzer.cxx:29
 TMLPAnalyzer.cxx:30
 TMLPAnalyzer.cxx:31
 TMLPAnalyzer.cxx:32
 TMLPAnalyzer.cxx:33
 TMLPAnalyzer.cxx:34
 TMLPAnalyzer.cxx:35
 TMLPAnalyzer.cxx:36
 TMLPAnalyzer.cxx:37
 TMLPAnalyzer.cxx:38
 TMLPAnalyzer.cxx:39
 TMLPAnalyzer.cxx:40
 TMLPAnalyzer.cxx:41
 TMLPAnalyzer.cxx:42
 TMLPAnalyzer.cxx:43
 TMLPAnalyzer.cxx:44
 TMLPAnalyzer.cxx:45
 TMLPAnalyzer.cxx:46
 TMLPAnalyzer.cxx:47
 TMLPAnalyzer.cxx:48
 TMLPAnalyzer.cxx:49
 TMLPAnalyzer.cxx:50
 TMLPAnalyzer.cxx:51
 TMLPAnalyzer.cxx:52
 TMLPAnalyzer.cxx:53
 TMLPAnalyzer.cxx:54
 TMLPAnalyzer.cxx:55
 TMLPAnalyzer.cxx:56
 TMLPAnalyzer.cxx:57
 TMLPAnalyzer.cxx:58
 TMLPAnalyzer.cxx:59
 TMLPAnalyzer.cxx:60
 TMLPAnalyzer.cxx:61
 TMLPAnalyzer.cxx:62
 TMLPAnalyzer.cxx:63
 TMLPAnalyzer.cxx:64
 TMLPAnalyzer.cxx:65
 TMLPAnalyzer.cxx:66
 TMLPAnalyzer.cxx:67
 TMLPAnalyzer.cxx:68
 TMLPAnalyzer.cxx:69
 TMLPAnalyzer.cxx:70
 TMLPAnalyzer.cxx:71
 TMLPAnalyzer.cxx:72
 TMLPAnalyzer.cxx:73
 TMLPAnalyzer.cxx:74
 TMLPAnalyzer.cxx:75
 TMLPAnalyzer.cxx:76
 TMLPAnalyzer.cxx:77
 TMLPAnalyzer.cxx:78
 TMLPAnalyzer.cxx:79
 TMLPAnalyzer.cxx:80
 TMLPAnalyzer.cxx:81
 TMLPAnalyzer.cxx:82
 TMLPAnalyzer.cxx:83
 TMLPAnalyzer.cxx:84
 TMLPAnalyzer.cxx:85
 TMLPAnalyzer.cxx:86
 TMLPAnalyzer.cxx:87
 TMLPAnalyzer.cxx:88
 TMLPAnalyzer.cxx:89
 TMLPAnalyzer.cxx:90
 TMLPAnalyzer.cxx:91
 TMLPAnalyzer.cxx:92
 TMLPAnalyzer.cxx:93
 TMLPAnalyzer.cxx:94
 TMLPAnalyzer.cxx:95
 TMLPAnalyzer.cxx:96
 TMLPAnalyzer.cxx:97
 TMLPAnalyzer.cxx:98
 TMLPAnalyzer.cxx:99
 TMLPAnalyzer.cxx:100
 TMLPAnalyzer.cxx:101
 TMLPAnalyzer.cxx:102
 TMLPAnalyzer.cxx:103
 TMLPAnalyzer.cxx:104
 TMLPAnalyzer.cxx:105
 TMLPAnalyzer.cxx:106
 TMLPAnalyzer.cxx:107
 TMLPAnalyzer.cxx:108
 TMLPAnalyzer.cxx:109
 TMLPAnalyzer.cxx:110
 TMLPAnalyzer.cxx:111
 TMLPAnalyzer.cxx:112
 TMLPAnalyzer.cxx:113
 TMLPAnalyzer.cxx:114
 TMLPAnalyzer.cxx:115
 TMLPAnalyzer.cxx:116
 TMLPAnalyzer.cxx:117
 TMLPAnalyzer.cxx:118
 TMLPAnalyzer.cxx:119
 TMLPAnalyzer.cxx:120
 TMLPAnalyzer.cxx:121
 TMLPAnalyzer.cxx:122
 TMLPAnalyzer.cxx:123
 TMLPAnalyzer.cxx:124
 TMLPAnalyzer.cxx:125
 TMLPAnalyzer.cxx:126
 TMLPAnalyzer.cxx:127
 TMLPAnalyzer.cxx:128
 TMLPAnalyzer.cxx:129
 TMLPAnalyzer.cxx:130
 TMLPAnalyzer.cxx:131
 TMLPAnalyzer.cxx:132
 TMLPAnalyzer.cxx:133
 TMLPAnalyzer.cxx:134
 TMLPAnalyzer.cxx:135
 TMLPAnalyzer.cxx:136
 TMLPAnalyzer.cxx:137
 TMLPAnalyzer.cxx:138
 TMLPAnalyzer.cxx:139
 TMLPAnalyzer.cxx:140
 TMLPAnalyzer.cxx:141
 TMLPAnalyzer.cxx:142
 TMLPAnalyzer.cxx:143
 TMLPAnalyzer.cxx:144
 TMLPAnalyzer.cxx:145
 TMLPAnalyzer.cxx:146
 TMLPAnalyzer.cxx:147
 TMLPAnalyzer.cxx:148
 TMLPAnalyzer.cxx:149
 TMLPAnalyzer.cxx:150
 TMLPAnalyzer.cxx:151
 TMLPAnalyzer.cxx:152
 TMLPAnalyzer.cxx:153
 TMLPAnalyzer.cxx:154
 TMLPAnalyzer.cxx:155
 TMLPAnalyzer.cxx:156
 TMLPAnalyzer.cxx:157
 TMLPAnalyzer.cxx:158
 TMLPAnalyzer.cxx:159
 TMLPAnalyzer.cxx:160
 TMLPAnalyzer.cxx:161
 TMLPAnalyzer.cxx:162
 TMLPAnalyzer.cxx:163
 TMLPAnalyzer.cxx:164
 TMLPAnalyzer.cxx:165
 TMLPAnalyzer.cxx:166
 TMLPAnalyzer.cxx:167
 TMLPAnalyzer.cxx:168
 TMLPAnalyzer.cxx:169
 TMLPAnalyzer.cxx:170
 TMLPAnalyzer.cxx:171
 TMLPAnalyzer.cxx:172
 TMLPAnalyzer.cxx:173
 TMLPAnalyzer.cxx:174
 TMLPAnalyzer.cxx:175
 TMLPAnalyzer.cxx:176
 TMLPAnalyzer.cxx:177
 TMLPAnalyzer.cxx:178
 TMLPAnalyzer.cxx:179
 TMLPAnalyzer.cxx:180
 TMLPAnalyzer.cxx:181
 TMLPAnalyzer.cxx:182
 TMLPAnalyzer.cxx:183
 TMLPAnalyzer.cxx:184
 TMLPAnalyzer.cxx:185
 TMLPAnalyzer.cxx:186
 TMLPAnalyzer.cxx:187
 TMLPAnalyzer.cxx:188
 TMLPAnalyzer.cxx:189
 TMLPAnalyzer.cxx:190
 TMLPAnalyzer.cxx:191
 TMLPAnalyzer.cxx:192
 TMLPAnalyzer.cxx:193
 TMLPAnalyzer.cxx:194
 TMLPAnalyzer.cxx:195
 TMLPAnalyzer.cxx:196
 TMLPAnalyzer.cxx:197
 TMLPAnalyzer.cxx:198
 TMLPAnalyzer.cxx:199
 TMLPAnalyzer.cxx:200
 TMLPAnalyzer.cxx:201
 TMLPAnalyzer.cxx:202
 TMLPAnalyzer.cxx:203
 TMLPAnalyzer.cxx:204
 TMLPAnalyzer.cxx:205
 TMLPAnalyzer.cxx:206
 TMLPAnalyzer.cxx:207
 TMLPAnalyzer.cxx:208
 TMLPAnalyzer.cxx:209
 TMLPAnalyzer.cxx:210
 TMLPAnalyzer.cxx:211
 TMLPAnalyzer.cxx:212
 TMLPAnalyzer.cxx:213
 TMLPAnalyzer.cxx:214
 TMLPAnalyzer.cxx:215
 TMLPAnalyzer.cxx:216
 TMLPAnalyzer.cxx:217
 TMLPAnalyzer.cxx:218
 TMLPAnalyzer.cxx:219
 TMLPAnalyzer.cxx:220
 TMLPAnalyzer.cxx:221
 TMLPAnalyzer.cxx:222
 TMLPAnalyzer.cxx:223
 TMLPAnalyzer.cxx:224
 TMLPAnalyzer.cxx:225
 TMLPAnalyzer.cxx:226
 TMLPAnalyzer.cxx:227
 TMLPAnalyzer.cxx:228
 TMLPAnalyzer.cxx:229
 TMLPAnalyzer.cxx:230
 TMLPAnalyzer.cxx:231
 TMLPAnalyzer.cxx:232
 TMLPAnalyzer.cxx:233
 TMLPAnalyzer.cxx:234
 TMLPAnalyzer.cxx:235
 TMLPAnalyzer.cxx:236
 TMLPAnalyzer.cxx:237
 TMLPAnalyzer.cxx:238
 TMLPAnalyzer.cxx:239
 TMLPAnalyzer.cxx:240
 TMLPAnalyzer.cxx:241
 TMLPAnalyzer.cxx:242
 TMLPAnalyzer.cxx:243
 TMLPAnalyzer.cxx:244
 TMLPAnalyzer.cxx:245
 TMLPAnalyzer.cxx:246
 TMLPAnalyzer.cxx:247
 TMLPAnalyzer.cxx:248
 TMLPAnalyzer.cxx:249
 TMLPAnalyzer.cxx:250
 TMLPAnalyzer.cxx:251
 TMLPAnalyzer.cxx:252
 TMLPAnalyzer.cxx:253
 TMLPAnalyzer.cxx:254
 TMLPAnalyzer.cxx:255
 TMLPAnalyzer.cxx:256
 TMLPAnalyzer.cxx:257
 TMLPAnalyzer.cxx:258
 TMLPAnalyzer.cxx:259
 TMLPAnalyzer.cxx:260
 TMLPAnalyzer.cxx:261
 TMLPAnalyzer.cxx:262
 TMLPAnalyzer.cxx:263
 TMLPAnalyzer.cxx:264
 TMLPAnalyzer.cxx:265
 TMLPAnalyzer.cxx:266
 TMLPAnalyzer.cxx:267
 TMLPAnalyzer.cxx:268
 TMLPAnalyzer.cxx:269
 TMLPAnalyzer.cxx:270
 TMLPAnalyzer.cxx:271
 TMLPAnalyzer.cxx:272
 TMLPAnalyzer.cxx:273
 TMLPAnalyzer.cxx:274
 TMLPAnalyzer.cxx:275
 TMLPAnalyzer.cxx:276
 TMLPAnalyzer.cxx:277
 TMLPAnalyzer.cxx:278
 TMLPAnalyzer.cxx:279
 TMLPAnalyzer.cxx:280
 TMLPAnalyzer.cxx:281
 TMLPAnalyzer.cxx:282
 TMLPAnalyzer.cxx:283
 TMLPAnalyzer.cxx:284
 TMLPAnalyzer.cxx:285
 TMLPAnalyzer.cxx:286
 TMLPAnalyzer.cxx:287
 TMLPAnalyzer.cxx:288
 TMLPAnalyzer.cxx:289
 TMLPAnalyzer.cxx:290
 TMLPAnalyzer.cxx:291
 TMLPAnalyzer.cxx:292
 TMLPAnalyzer.cxx:293
 TMLPAnalyzer.cxx:294
 TMLPAnalyzer.cxx:295
 TMLPAnalyzer.cxx:296
 TMLPAnalyzer.cxx:297
 TMLPAnalyzer.cxx:298
 TMLPAnalyzer.cxx:299
 TMLPAnalyzer.cxx:300
 TMLPAnalyzer.cxx:301
 TMLPAnalyzer.cxx:302
 TMLPAnalyzer.cxx:303
 TMLPAnalyzer.cxx:304
 TMLPAnalyzer.cxx:305
 TMLPAnalyzer.cxx:306
 TMLPAnalyzer.cxx:307
 TMLPAnalyzer.cxx:308
 TMLPAnalyzer.cxx:309
 TMLPAnalyzer.cxx:310
 TMLPAnalyzer.cxx:311
 TMLPAnalyzer.cxx:312
 TMLPAnalyzer.cxx:313
 TMLPAnalyzer.cxx:314
 TMLPAnalyzer.cxx:315
 TMLPAnalyzer.cxx:316
 TMLPAnalyzer.cxx:317
 TMLPAnalyzer.cxx:318
 TMLPAnalyzer.cxx:319
 TMLPAnalyzer.cxx:320
 TMLPAnalyzer.cxx:321
 TMLPAnalyzer.cxx:322
 TMLPAnalyzer.cxx:323
 TMLPAnalyzer.cxx:324
 TMLPAnalyzer.cxx:325
 TMLPAnalyzer.cxx:326
 TMLPAnalyzer.cxx:327
 TMLPAnalyzer.cxx:328
 TMLPAnalyzer.cxx:329
 TMLPAnalyzer.cxx:330
 TMLPAnalyzer.cxx:331
 TMLPAnalyzer.cxx:332
 TMLPAnalyzer.cxx:333
 TMLPAnalyzer.cxx:334
 TMLPAnalyzer.cxx:335
 TMLPAnalyzer.cxx:336
 TMLPAnalyzer.cxx:337
 TMLPAnalyzer.cxx:338
 TMLPAnalyzer.cxx:339
 TMLPAnalyzer.cxx:340
 TMLPAnalyzer.cxx:341
 TMLPAnalyzer.cxx:342
 TMLPAnalyzer.cxx:343
 TMLPAnalyzer.cxx:344
 TMLPAnalyzer.cxx:345
 TMLPAnalyzer.cxx:346
 TMLPAnalyzer.cxx:347
 TMLPAnalyzer.cxx:348
 TMLPAnalyzer.cxx:349
 TMLPAnalyzer.cxx:350
 TMLPAnalyzer.cxx:351
 TMLPAnalyzer.cxx:352
 TMLPAnalyzer.cxx:353
 TMLPAnalyzer.cxx:354
 TMLPAnalyzer.cxx:355
 TMLPAnalyzer.cxx:356
 TMLPAnalyzer.cxx:357
 TMLPAnalyzer.cxx:358
 TMLPAnalyzer.cxx:359
 TMLPAnalyzer.cxx:360
 TMLPAnalyzer.cxx:361
 TMLPAnalyzer.cxx:362
 TMLPAnalyzer.cxx:363
 TMLPAnalyzer.cxx:364
 TMLPAnalyzer.cxx:365
 TMLPAnalyzer.cxx:366
 TMLPAnalyzer.cxx:367
 TMLPAnalyzer.cxx:368
 TMLPAnalyzer.cxx:369
 TMLPAnalyzer.cxx:370
 TMLPAnalyzer.cxx:371
 TMLPAnalyzer.cxx:372
 TMLPAnalyzer.cxx:373
 TMLPAnalyzer.cxx:374
 TMLPAnalyzer.cxx:375
 TMLPAnalyzer.cxx:376
 TMLPAnalyzer.cxx:377
 TMLPAnalyzer.cxx:378
 TMLPAnalyzer.cxx:379
 TMLPAnalyzer.cxx:380
 TMLPAnalyzer.cxx:381
 TMLPAnalyzer.cxx:382
 TMLPAnalyzer.cxx:383
 TMLPAnalyzer.cxx:384
 TMLPAnalyzer.cxx:385
 TMLPAnalyzer.cxx:386
 TMLPAnalyzer.cxx:387
 TMLPAnalyzer.cxx:388
 TMLPAnalyzer.cxx:389
 TMLPAnalyzer.cxx:390
 TMLPAnalyzer.cxx:391
 TMLPAnalyzer.cxx:392
 TMLPAnalyzer.cxx:393
 TMLPAnalyzer.cxx:394
 TMLPAnalyzer.cxx:395
 TMLPAnalyzer.cxx:396
 TMLPAnalyzer.cxx:397
 TMLPAnalyzer.cxx:398
 TMLPAnalyzer.cxx:399
 TMLPAnalyzer.cxx:400
 TMLPAnalyzer.cxx:401
 TMLPAnalyzer.cxx:402
 TMLPAnalyzer.cxx:403
 TMLPAnalyzer.cxx:404
 TMLPAnalyzer.cxx:405
 TMLPAnalyzer.cxx:406
 TMLPAnalyzer.cxx:407
 TMLPAnalyzer.cxx:408
 TMLPAnalyzer.cxx:409
 TMLPAnalyzer.cxx:410
 TMLPAnalyzer.cxx:411
 TMLPAnalyzer.cxx:412
 TMLPAnalyzer.cxx:413
 TMLPAnalyzer.cxx:414
 TMLPAnalyzer.cxx:415
 TMLPAnalyzer.cxx:416
 TMLPAnalyzer.cxx:417
 TMLPAnalyzer.cxx:418
 TMLPAnalyzer.cxx:419
 TMLPAnalyzer.cxx:420
 TMLPAnalyzer.cxx:421
 TMLPAnalyzer.cxx:422
 TMLPAnalyzer.cxx:423
 TMLPAnalyzer.cxx:424
 TMLPAnalyzer.cxx:425
 TMLPAnalyzer.cxx:426
 TMLPAnalyzer.cxx:427
 TMLPAnalyzer.cxx:428
 TMLPAnalyzer.cxx:429
 TMLPAnalyzer.cxx:430
 TMLPAnalyzer.cxx:431
 TMLPAnalyzer.cxx:432
 TMLPAnalyzer.cxx:433
 TMLPAnalyzer.cxx:434
 TMLPAnalyzer.cxx:435
 TMLPAnalyzer.cxx:436
 TMLPAnalyzer.cxx:437
 TMLPAnalyzer.cxx:438
 TMLPAnalyzer.cxx:439
 TMLPAnalyzer.cxx:440
 TMLPAnalyzer.cxx:441
 TMLPAnalyzer.cxx:442
 TMLPAnalyzer.cxx:443
 TMLPAnalyzer.cxx:444
 TMLPAnalyzer.cxx:445
 TMLPAnalyzer.cxx:446
 TMLPAnalyzer.cxx:447
 TMLPAnalyzer.cxx:448
 TMLPAnalyzer.cxx:449
 TMLPAnalyzer.cxx:450
 TMLPAnalyzer.cxx:451
 TMLPAnalyzer.cxx:452
 TMLPAnalyzer.cxx:453
 TMLPAnalyzer.cxx:454
 TMLPAnalyzer.cxx:455
 TMLPAnalyzer.cxx:456
 TMLPAnalyzer.cxx:457
 TMLPAnalyzer.cxx:458
 TMLPAnalyzer.cxx:459
 TMLPAnalyzer.cxx:460
 TMLPAnalyzer.cxx:461
 TMLPAnalyzer.cxx:462
 TMLPAnalyzer.cxx:463
 TMLPAnalyzer.cxx:464
 TMLPAnalyzer.cxx:465
 TMLPAnalyzer.cxx:466
 TMLPAnalyzer.cxx:467
 TMLPAnalyzer.cxx:468
 TMLPAnalyzer.cxx:469
 TMLPAnalyzer.cxx:470
 TMLPAnalyzer.cxx:471
 TMLPAnalyzer.cxx:472
 TMLPAnalyzer.cxx:473
 TMLPAnalyzer.cxx:474
 TMLPAnalyzer.cxx:475
 TMLPAnalyzer.cxx:476
 TMLPAnalyzer.cxx:477
 TMLPAnalyzer.cxx:478
 TMLPAnalyzer.cxx:479
 TMLPAnalyzer.cxx:480
 TMLPAnalyzer.cxx:481
 TMLPAnalyzer.cxx:482
 TMLPAnalyzer.cxx:483
 TMLPAnalyzer.cxx:484
 TMLPAnalyzer.cxx:485
 TMLPAnalyzer.cxx:486
 TMLPAnalyzer.cxx:487
 TMLPAnalyzer.cxx:488
 TMLPAnalyzer.cxx:489
 TMLPAnalyzer.cxx:490
 TMLPAnalyzer.cxx:491
 TMLPAnalyzer.cxx:492
 TMLPAnalyzer.cxx:493
 TMLPAnalyzer.cxx:494
 TMLPAnalyzer.cxx:495
 TMLPAnalyzer.cxx:496
 TMLPAnalyzer.cxx:497
 TMLPAnalyzer.cxx:498
 TMLPAnalyzer.cxx:499
 TMLPAnalyzer.cxx:500
 TMLPAnalyzer.cxx:501
 TMLPAnalyzer.cxx:502
 TMLPAnalyzer.cxx:503
 TMLPAnalyzer.cxx:504
 TMLPAnalyzer.cxx:505
 TMLPAnalyzer.cxx:506
 TMLPAnalyzer.cxx:507
 TMLPAnalyzer.cxx:508
 TMLPAnalyzer.cxx:509
 TMLPAnalyzer.cxx:510
 TMLPAnalyzer.cxx:511
 TMLPAnalyzer.cxx:512
 TMLPAnalyzer.cxx:513
 TMLPAnalyzer.cxx:514
 TMLPAnalyzer.cxx:515
 TMLPAnalyzer.cxx:516
 TMLPAnalyzer.cxx:517
 TMLPAnalyzer.cxx:518
 TMLPAnalyzer.cxx:519
 TMLPAnalyzer.cxx:520
 TMLPAnalyzer.cxx:521
 TMLPAnalyzer.cxx:522
 TMLPAnalyzer.cxx:523
 TMLPAnalyzer.cxx:524
 TMLPAnalyzer.cxx:525
 TMLPAnalyzer.cxx:526
 TMLPAnalyzer.cxx:527
 TMLPAnalyzer.cxx:528
 TMLPAnalyzer.cxx:529
 TMLPAnalyzer.cxx:530
 TMLPAnalyzer.cxx:531
 TMLPAnalyzer.cxx:532
 TMLPAnalyzer.cxx:533
 TMLPAnalyzer.cxx:534
 TMLPAnalyzer.cxx:535
 TMLPAnalyzer.cxx:536
 TMLPAnalyzer.cxx:537
 TMLPAnalyzer.cxx:538
 TMLPAnalyzer.cxx:539
 TMLPAnalyzer.cxx:540
 TMLPAnalyzer.cxx:541