ROOT logo
// @(#)root/tmva $Id: MethodKNN.cxx 31458 2009-11-30 13:58:20Z stelzer $
// Author: Rustem Ospanov 

/**********************************************************************************
 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis       *
 * Package: TMVA                                                                  *
 * Class  : MethodKNN                                                             *
 * Web    : http://tmva.sourceforge.net                                           *
 *                                                                                *
 * Description:                                                                   *
 *      Implementation                                                            *
 *                                                                                *
 * Author:                                                                        *
 *      Rustem Ospanov <rustem@fnal.gov> - U. of Texas at Austin, USA             *
 *                                                                                *
 * Copyright (c) 2007:                                                            *
 *      CERN, Switzerland                                                         * 
 *      MPI-K Heidelberg, Germany                                                 * 
 *      U. of Texas at Austin, USA                                                *
 *                                                                                *
 * Redistribution and use in source and binary forms, with or without             *
 * modification, are permitted according to the terms listed in LICENSE           *
 * (http://tmva.sourceforge.net/LICENSE)                                          *
 **********************************************************************************/

//////////////////////////////////////////////////////////////////////////
//                                                                      //
// MethodKNN                                                            //
//                                                                      //
// Analysis of k-nearest neighbor                                       //
//                                                                      //
//////////////////////////////////////////////////////////////////////////

// C/C++
#include <cmath>
#include <string>
#include <cstdlib>

// ROOT
#include "TFile.h"
#include "TMath.h"
#include "TTree.h"

// TMVA
#include "TMVA/ClassifierFactory.h"
#include "TMVA/MethodKNN.h"
#include "TMVA/Ranking.h"
#include "TMVA/Tools.h"

REGISTER_METHOD(KNN)

ClassImp(TMVA::MethodKNN)

//_______________________________________________________________________
TMVA::MethodKNN::MethodKNN( const TString& jobName,
                            const TString& methodTitle,
                            DataSetInfo& theData, 
                            const TString& theOption,
                            TDirectory* theTargetDir ) 
   : TMVA::MethodBase(jobName, Types::kKNN, methodTitle, theData, theOption, theTargetDir),
     fModule(0)
{
   // standard constructor
}

//_______________________________________________________________________
TMVA::MethodKNN::MethodKNN( DataSetInfo& theData, 
                            const TString& theWeightFile,  
                            TDirectory* theTargetDir ) 
   : TMVA::MethodBase( Types::kKNN, theData, theWeightFile, theTargetDir),
     fModule(0)
{
   // constructor from weight file
}

//_______________________________________________________________________
TMVA::MethodKNN::~MethodKNN()
{
   // destructor
   if (fModule) delete fModule;
}

//_______________________________________________________________________
void TMVA::MethodKNN::DeclareOptions() 
{
   // MethodKNN options
 
   // fnkNN         = 20;     // number of k-nearest neighbors 
   // fBalanceDepth = 6;      // number of binary tree levels used for tree balancing
   // fScaleFrac    = 0.8;    // fraction of events used to compute variable width
   // fSigmaFact    = 1.0;    // scale factor for Gaussian sigma 
   // fKernel       = use polynomial (1-x^3)^3 or Gaussian kernel
   // fTrim         = false;  // use equal number of signal and background events
   // fUseKernel    = false;  // use polynomial kernel weight function
   // fUseWeight    = true;   // count events using weights
   // fUseLDA       = false

   DeclareOptionRef(fnkNN         = 20,     "nkNN",         "Number of k-nearest neighbors");
   DeclareOptionRef(fBalanceDepth = 6,      "BalanceDepth", "Binary tree balance depth");
   DeclareOptionRef(fScaleFrac    = 0.80,   "ScaleFrac",    "Fraction of events used to compute variable width");
   DeclareOptionRef(fSigmaFact    = 1.0,    "SigmaFact",    "Scale factor for sigma in Gaussian kernel");
   DeclareOptionRef(fKernel       = "Gaus", "Kernel",       "Use polynomial (=Poln) or Gaussian (=Gaus) kernel");
   DeclareOptionRef(fTrim         = kFALSE, "Trim",         "Use equal number of signal and background events");
   DeclareOptionRef(fUseKernel    = kFALSE, "UseKernel",    "Use polynomial kernel weight");
   DeclareOptionRef(fUseWeight    = kTRUE,  "UseWeight",    "Use weight to count kNN events");
   DeclareOptionRef(fUseLDA       = kFALSE, "UseLDA",       "Use local linear discriminant - experimental feature");
}

//_______________________________________________________________________
void TMVA::MethodKNN::DeclareCompatibilityOptions() {
   MethodBase::DeclareCompatibilityOptions();
   DeclareOptionRef(fTreeOptDepth = 6, "TreeOptDepth", "Binary tree optimisation depth");
}

//_______________________________________________________________________
void TMVA::MethodKNN::ProcessOptions() 
{
   // process the options specified by the user
   if (!(fnkNN > 0)) {      
      fnkNN = 10;
      Log() << kWARNING << "kNN must be a positive integer: set kNN = " << fnkNN << Endl;
   }
   if (fScaleFrac < 0.0) {      
      fScaleFrac = 0.0;
      Log() << kWARNING << "ScaleFrac can not be negative: set ScaleFrac = " << fScaleFrac << Endl;
   }
   if (fScaleFrac > 1.0) {
      fScaleFrac = 1.0;
   }
   if (!(fBalanceDepth > 0)) {
      fBalanceDepth = 6;
      Log() << kWARNING << "Optimize must be a positive integer: set Optimize = " << fBalanceDepth << Endl;      
   }

   Log() << kVERBOSE
         << "kNN options: \n" 
         << "  kNN = \n" << fnkNN
         << "  UseKernel = \n" << fUseKernel
         << "  SigmaFact = \n" << fSigmaFact
         << "  ScaleFrac = \n" << fScaleFrac
         << "  Kernel = \n" << fKernel
         << "  Trim = \n" << fTrim 
         << "  Optimize = " << fBalanceDepth << Endl;
}

//_______________________________________________________________________
Bool_t TMVA::MethodKNN::HasAnalysisType( Types::EAnalysisType type, UInt_t numberClasses, UInt_t /*numberTargets*/ )
{
   // FDA can handle classification with 2 classes and regression with one regression-target
   if (type == Types::kClassification && numberClasses == 2) return kTRUE;
   if (type == Types::kRegression) return kTRUE;
   return kFALSE;
}

//_______________________________________________________________________
void TMVA::MethodKNN::Init() 
{
   // Initialization

   // fScaleFrac <= 0.0 then do not scale input variables
   // fScaleFrac >= 1.0 then use all event coordinates to scale input variables
   
   fModule = new kNN::ModulekNN();
   fSumOfWeightsS = 0;
   fSumOfWeightsB = 0;
}

//_______________________________________________________________________
void TMVA::MethodKNN::MakeKNN() 
{
   // create kNN
   if (!fModule) {
      Log() << kFATAL << "ModulekNN is not created" << Endl;
   }

   fModule->Clear();

   std::string option;
   if (fScaleFrac > 0.0) {
      option += "metric";
   }
   if (fTrim) {
      option += "trim";
   }

   Log() << kINFO << "Creating kd-tree with " << fEvent.size() << " events" << Endl;

   for (kNN::EventVec::const_iterator event = fEvent.begin(); event != fEvent.end(); ++event) {
      fModule->Add(*event);
   }

   // create binary tree
   fModule->Fill(static_cast<UInt_t>(fBalanceDepth),
                 static_cast<UInt_t>(100.0*fScaleFrac),
                 option);
}

//_______________________________________________________________________
void TMVA::MethodKNN::Train()
{
   // kNN training
   Log() << kINFO << "<Train> start..." << Endl;

   if (IsNormalised()) {
      Log() << kINFO << "Input events are normalized - setting ScaleFrac to 0" << Endl;
      fScaleFrac = 0.0;
   }
   
   if (!fEvent.empty()) {
      Log() << kINFO << "Erasing " << fEvent.size() << " previously stored events" << Endl;
      fEvent.clear();
   }
   if (GetNVariables() < 1)
      Log() << kFATAL << "MethodKNN::Train() - mismatched or wrong number of event variables" << Endl;
 

   Log() << kINFO << "Reading " << GetNEvents() << " events" << Endl;

   for (UInt_t ievt = 0; ievt < GetNEvents(); ++ievt) {
      // read the training event
      const Event*   evt_   = GetEvent(ievt);
      Double_t       weight = evt_->GetWeight();

      // in case event with neg weights are to be ignored
      if (IgnoreEventsWithNegWeightsInTraining() && weight <= 0) continue;          

      kNN::VarVec vvec(GetNVariables(), 0.0);      
      for (UInt_t ivar = 0; ivar < evt_ -> GetNVariables(); ++ivar) vvec[ivar] = evt_->GetValue(ivar);
      
      Short_t event_type = 0;

      if (evt_ -> IsSignal()) { // signal type = 1
         fSumOfWeightsS += weight;
         event_type = 1;
      }
      else { // background type = 2
         fSumOfWeightsB += weight;
         event_type = 2;
      }

      //
      // Create event and add classification variables, weight, type and regression variables
      // 
      kNN::Event event_knn(vvec, weight, event_type);
      event_knn.SetTargets(evt_->GetTargets());
      fEvent.push_back(event_knn);
      
   }
   Log() << kINFO 
         << "Number of signal events " << fSumOfWeightsS << Endl
         << "Number of background events " << fSumOfWeightsB << Endl;

   // create kd-tree (binary tree) structure
   MakeKNN();
}

//_______________________________________________________________________
Double_t TMVA::MethodKNN::GetMvaValue( Double_t* err )
{
   // Compute classifier response

   // cannot determine error
   if (err != 0) *err = -1;

   //
   // Define local variables
   //
   const Event *ev = GetEvent();
   const Int_t nvar = GetNVariables();
   const Double_t weight = ev->GetWeight();
   const UInt_t knn = static_cast<UInt_t>(fnkNN);

   kNN::VarVec vvec(static_cast<UInt_t>(nvar), 0.0);
   
   for (Int_t ivar = 0; ivar < nvar; ++ivar) {
      vvec[ivar] = ev->GetValue(ivar);
   }   

   // search for fnkNN+2 nearest neighbors, pad with two 
   // events to avoid Monte-Carlo events with zero distance
   // most of CPU time is spent in this recursive function
   const kNN::Event event_knn(vvec, weight, 3);
   fModule->Find(event_knn, knn + 2);

   const kNN::List &rlist = fModule->GetkNNList();
   if (rlist.size() != knn + 2) {
      Log() << kFATAL << "kNN result list is empty" << Endl;
      return -100.0;  
   }
   
   if (fUseLDA) return MethodKNN::getLDAValue(rlist, event_knn);

   //
   // Set flags for kernel option=Gaus, Poln
   //
   Bool_t use_gaus = false, use_poln = false;
   if (fUseKernel) {
      if      (fKernel == "Gaus") use_gaus = true;
      else if (fKernel == "Poln") use_poln = true;
   }

   //
   // Compute radius for polynomial kernel
   //
   Double_t kradius = -1.0;
   if (use_poln) {
      kradius = MethodKNN::getKernelRadius(rlist);

      if (!(kradius > 0.0)) {
         Log() << kFATAL << "kNN radius is not positive" << Endl;
         return -100.0; 
      }
      
      kradius = 1.0/TMath::Sqrt(kradius);
   }
   
   //
   // Compute RMS of variable differences for Gaussian sigma
   //
   std::vector<Double_t> rms_vec;
   if (use_gaus) {
      rms_vec = TMVA::MethodKNN::getRMS(rlist, event_knn);

      if (rms_vec.empty() || rms_vec.size() != event_knn.GetNVar()) {
         Log() << kFATAL << "Failed to compute RMS vector" << Endl;
         return -100.0; 
      }            
   }

   UInt_t count_all = 0;
   Double_t weight_all = 0, weight_sig = 0, weight_bac = 0;

   for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit) {

      // get reference to current node to make code more readable
      const kNN::Node<kNN::Event> &node = *(lit->first);
      
      // Warn about Monte-Carlo event with zero distance
      // this happens when this query event is also in learning sample
      if (lit->second < 0.0) {
         Log() << kFATAL << "A neighbor has negative distance to query event" << Endl;
      }
      else if (!(lit->second > 0.0)) {
         Log() << kVERBOSE << "A neighbor has zero distance to query event" << Endl;
      }
      
      // get event weight and scale weight by kernel function
      Double_t evweight = node.GetWeight();
      if      (use_gaus) evweight *= MethodKNN::GausKernel(event_knn, node.GetEvent(), rms_vec);
      else if (use_poln) evweight *= MethodKNN::PolnKernel(TMath::Sqrt(lit->second)*kradius);
      
      if (fUseWeight) weight_all += evweight;
      else          ++weight_all;

      if (node.GetEvent().GetType() == 1) { // signal type = 1
         if (fUseWeight) weight_sig += evweight;
         else          ++weight_sig;
      }
      else if (node.GetEvent().GetType() == 2) { // background type = 2
         if (fUseWeight) weight_bac += evweight;
         else          ++weight_bac;
      }
      else {
         Log() << kFATAL << "Unknown type for training event" << Endl;
      }
      
      // use only fnkNN events
      ++count_all;

      if (count_all >= knn) {
         break;
      }      
   }

   // check that total number of events or total weight sum is positive
   if (!(count_all > 0)) {
      Log() << kFATAL << "Size kNN result list is not positive" << Endl;
      return -100.0;
   }
   
   // check that number of events matches number of k in knn 
   if (count_all < knn) {
      Log() << kDEBUG << "count_all and kNN have different size: " << count_all << " < " << knn << Endl;
   }
   
   // Check that total weight is positive
   if (!(weight_all > 0.0)) {
      Log() << kFATAL << "kNN result total weight is not positive" << Endl;
      return -100.0;
   }
   
   return weight_sig/weight_all;
}

//_______________________________________________________________________
const std::vector< Float_t >& TMVA::MethodKNN::GetRegressionValues()
{
   //
   // Return vector of averages for target values of k-nearest neighbors.
   // Use own copy of the regression vector, I do not like using a pointer to vector.
   //
   if( fRegressionReturnVal == 0 )
      fRegressionReturnVal = new std::vector<Float_t>;
   else 
      fRegressionReturnVal->clear();

   //
   // Define local variables
   //
   const Event *evt = GetEvent();
   const Int_t nvar = GetNVariables();
   const UInt_t knn = static_cast<UInt_t>(fnkNN);
   std::vector<float> reg_vec;

   kNN::VarVec vvec(static_cast<UInt_t>(nvar), 0.0);
   
   for (Int_t ivar = 0; ivar < nvar; ++ivar) {
      vvec[ivar] = evt->GetValue(ivar);
   }   

   // search for fnkNN+2 nearest neighbors, pad with two 
   // events to avoid Monte-Carlo events with zero distance
   // most of CPU time is spent in this recursive function
   const kNN::Event event_knn(vvec, evt->GetWeight(), 3);
   fModule->Find(event_knn, knn + 2);

   const kNN::List &rlist = fModule->GetkNNList();
   if (rlist.size() != knn + 2) {
      Log() << kFATAL << "kNN result list is empty" << Endl;
      return *fRegressionReturnVal;
   }

   // compute regression values
   Double_t weight_all = 0;
   UInt_t count_all = 0;

   for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit) {

      // get reference to current node to make code more readable
      const kNN::Node<kNN::Event> &node = *(lit->first);
      const kNN::VarVec &tvec = node.GetEvent().GetTargets();
      const Double_t weight = node.GetEvent().GetWeight();

      if (reg_vec.empty()) {
         reg_vec= kNN::VarVec(tvec.size(), 0.0);
      }
      
      for(UInt_t ivar = 0; ivar < tvec.size(); ++ivar) {
         if (fUseWeight) reg_vec[ivar] += tvec[ivar]*weight;
         else            reg_vec[ivar] += tvec[ivar];
      }

      if (fUseWeight) weight_all += weight;
      else          ++weight_all;

      // use only fnkNN events
      ++count_all;

      if (count_all == knn) {
         break;
      }
   }

   // check that number of events matches number of k in knn 
   if (!(weight_all > 0.0)) {
      Log() << kFATAL << "Total weight sum is not positive: " << weight_all << Endl;
      return *fRegressionReturnVal;
   }

   for (UInt_t ivar = 0; ivar < reg_vec.size(); ++ivar) {
      reg_vec[ivar] /= weight_all;
   }

   // copy result
   fRegressionReturnVal->insert(fRegressionReturnVal->begin(), reg_vec.begin(), reg_vec.end());

   return *fRegressionReturnVal;
}

//_______________________________________________________________________
const TMVA::Ranking* TMVA::MethodKNN::CreateRanking() 
{
   // no ranking available
   return 0;
}

//_______________________________________________________________________
void TMVA::MethodKNN::AddWeightsXMLTo( void* parent ) const {
   // write weights to XML

   void* wght = gTools().AddChild(parent, "Weights");
   gTools().AddAttr(wght,"NEvents",fEvent.size());
   if (fEvent.size()>0) gTools().AddAttr(wght,"NVar",fEvent.begin()->GetNVar());
   if (fEvent.size()>0) gTools().AddAttr(wght,"NTgt",fEvent.begin()->GetNTgt());

   for (kNN::EventVec::const_iterator event = fEvent.begin(); event != fEvent.end(); ++event) {

      std::stringstream s("");
      s.precision( 16 );
      for (UInt_t ivar = 0; ivar < event->GetNVar(); ++ivar) {
         if (ivar>0) s << " ";
         s << std::scientific << event->GetVar(ivar);
      }

      for (UInt_t itgt = 0; itgt < event->GetNTgt(); ++itgt) {
         s << " " << std::scientific << event->GetTgt(itgt);
      }

      void* evt = gTools().AddChild(wght, "Event", s.str().c_str());
      gTools().AddAttr(evt,"Type", event->GetType());
      gTools().AddAttr(evt,"Weight", event->GetWeight());
   }
}

//_______________________________________________________________________
void TMVA::MethodKNN::ReadWeightsFromXML( void* wghtnode ) {

   void* ch = gTools().GetChild(wghtnode); // first event
   UInt_t nvar = 0, ntgt = 0;
   gTools().ReadAttr( wghtnode, "NVar", nvar );
   gTools().ReadAttr( wghtnode, "NTgt", ntgt );


   Short_t evtType(0);
   Double_t evtWeight(0);

   while (ch) {
      // build event
      kNN::VarVec vvec(nvar, 0);
      kNN::VarVec tvec(ntgt, 0);

      gTools().ReadAttr( ch, "Type",   evtType   );
      gTools().ReadAttr( ch, "Weight", evtWeight );
      std::stringstream s( gTools().GetContent(ch) );
      
      for(UInt_t ivar=0; ivar<nvar; ivar++)
         s >> vvec[ivar];

      for(UInt_t itgt=0; itgt<ntgt; itgt++)
         s >> tvec[itgt];

      ch = gTools().GetNextChild(ch);

      kNN::Event event_knn(vvec, evtWeight, evtType, tvec);
      fEvent.push_back(event_knn);
   }

   // create kd-tree (binary tree) structure
   MakeKNN();
}

//_______________________________________________________________________
void TMVA::MethodKNN::ReadWeightsFromStream(istream& is)
{
   // read the weights
   Log() << kINFO << "Starting ReadWeightsFromStream(istream& is) function..." << Endl;

   if (!fEvent.empty()) {
      Log() << kINFO << "Erasing " << fEvent.size() << " previously stored events" << Endl;
      fEvent.clear();
   }

   UInt_t nvar = 0;

   while (!is.eof()) {
      std::string line;
      std::getline(is, line);
      
      if (line.empty() || line.find("#") != std::string::npos) {
         continue;
      }
      
      UInt_t count = 0;
      std::string::size_type pos=0;
      while( (pos=line.find(',',pos)) != std::string::npos ) { count++; pos++; }

      if (nvar == 0) {
         nvar = count - 2;
      }
      if (count < 3 || nvar != count - 2) {
         Log() << kFATAL << "Missing comma delimeter(s)" << Endl;
      }

      Int_t ievent = -1, type = -1;
      Double_t weight = -1.0;
      
      kNN::VarVec vvec(nvar, 0.0);
      
      UInt_t vcount = 0;
      std::string::size_type prev = 0;
      
      for (std::string::size_type ipos = 0; ipos < line.size(); ++ipos) {
         if (line[ipos] != ',' && ipos + 1 != line.size()) {
            continue;
         }
         
         if (!(ipos > prev)) {
            Log() << kFATAL << "Wrong substring limits" << Endl;
         }
         
         std::string vstring = line.substr(prev, ipos - prev);
         if (ipos + 1 == line.size()) {
            vstring = line.substr(prev, ipos - prev + 1);
         }
         
         if (vstring.empty()) {
            Log() << kFATAL << "Failed to parse string" << Endl;
         }
         
         if (vcount == 0) {
            ievent = std::atoi(vstring.c_str());
         }
         else if (vcount == 1) {
            type = std::atoi(vstring.c_str());
         }
         else if (vcount == 2) {
            weight = std::atof(vstring.c_str());
         }
         else if (vcount - 3 < vvec.size()) {
            vvec[vcount - 3] = std::atof(vstring.c_str());
         }
         else {
            Log() << kFATAL << "Wrong variable count" << Endl;
         }
         
         prev = ipos + 1;
         ++vcount;
      }
      
      fEvent.push_back(kNN::Event(vvec, weight, type));
   }
   
   Log() << kINFO << "Read " << fEvent.size() << " events from text file" << Endl;   

   // create kd-tree (binary tree) structure
   MakeKNN();
}

//-------------------------------------------------------------------------------------------
void TMVA::MethodKNN::WriteWeightsToStream(TFile &rf) const
{ 
   // save weights to ROOT file
   Log() << kINFO << "Starting WriteWeightsToStream(TFile &rf) function..." << Endl;
   
   if (fEvent.empty()) {
      Log() << kWARNING << "MethodKNN contains no events " << Endl;
      return;
   }

   kNN::Event *event = new kNN::Event();
   TTree *tree = new TTree("knn", "event tree");
   tree->SetDirectory(0);
   tree->Branch("event", "TMVA::kNN::Event", &event);

   Double_t size = 0.0;
   for (kNN::EventVec::const_iterator it = fEvent.begin(); it != fEvent.end(); ++it) {
      (*event) = (*it);
      size += tree->Fill();
   }

   // !!! hard coded tree name !!!
   rf.WriteTObject(tree, "knn", "Overwrite");

   // scale to MegaBytes
   size /= 1048576.0;

   Log() << kINFO << "Wrote " << size << "MB and "  << fEvent.size() 
         << " events to ROOT file" << Endl;
   
   delete tree;
   delete event; 
}

//-------------------------------------------------------------------------------------------
void TMVA::MethodKNN::ReadWeightsFromStream(TFile &rf)
{ 
   // read weights from ROOT file
   Log() << kINFO << "Starting ReadWeightsFromStream(TFile &rf) function..." << Endl;

   if (!fEvent.empty()) {
      Log() << kINFO << "Erasing " << fEvent.size() << " previously stored events" << Endl;
      fEvent.clear();
   }

   // !!! hard coded tree name !!!
   TTree *tree = dynamic_cast<TTree *>(rf.Get("knn"));
   if (!tree) {
      Log() << kFATAL << "Failed to find knn tree" << Endl;
      return;
   }

   kNN::Event *event = new kNN::Event();
   tree->SetBranchAddress("event", &event);

   const Int_t nevent = tree->GetEntries();

   Double_t size = 0.0;
   for (Int_t i = 0; i < nevent; ++i) {
      size += tree->GetEntry(i);
      fEvent.push_back(*event);
   }

   // scale to MegaBytes
   size /= 1048576.0;

   Log() << kINFO << "Read " << size << "MB and "  << fEvent.size() 
         << " events from ROOT file" << Endl;

   delete event;

   // create kd-tree (binary tree) structure
   MakeKNN();
}

//_______________________________________________________________________
void TMVA::MethodKNN::MakeClassSpecific( std::ostream& fout, const TString& className ) const
{
   // write specific classifier response
   fout << "   // not implemented for class: \"" << className << "\"" << std::endl;
   fout << "};" << std::endl;
}

//_______________________________________________________________________
void TMVA::MethodKNN::GetHelpMessage() const
{
   // get help message text
   //
   // typical length of text line: 
   //         "|--------------------------------------------------------------|"
   Log() << Endl;
   Log() << gTools().Color("bold") << "--- Short description:" << gTools().Color("reset") << Endl;
   Log() << Endl;
   Log() << "The k-nearest neighbor (k-NN) algorithm is a multi-dimensional classification" << Endl
         << "and regression algorithm. Similarly to other TMVA algorithms, k-NN uses a set of" << Endl
         << "training events for which a classification category/regression target is known. " << Endl
         << "The k-NN method compares a test event to all training events using a distance " << Endl
         << "function, which is an Euclidean distance in a space defined by the input variables. "<< Endl
         << "The k-NN method, as implemented in TMVA, uses a kd-tree algorithm to perform a" << Endl
         << "quick search for the k events with shortest distance to the test event. The method" << Endl
         << "returns a fraction of signal events among the k neighbors. It is recommended" << Endl
         << "that a histogram which stores the k-NN decision variable is binned with k+1 bins" << Endl
         << "between 0 and 1." << Endl;

   Log() << Endl;
   Log() << gTools().Color("bold") << "--- Performance tuning via configuration options: " 
         << gTools().Color("reset") << Endl;
   Log() << Endl;
   Log() << "The k-NN method estimates a density of signal and background events in a "<< Endl
         << "neighborhood around the test event. The method assumes that the density of the " << Endl
         << "signal and background events is uniform and constant within the neighborhood. " << Endl
         << "k is an adjustable parameter and it determines an average size of the " << Endl
         << "neighborhood. Small k values (less than 10) are sensitive to statistical " << Endl
         << "fluctuations and large (greater than 100) values might not sufficiently capture  " << Endl
         << "local differences between events in the training set. The speed of the k-NN" << Endl
         << "method also increases with larger values of k. " << Endl;   
   Log() << Endl;
   Log() << "The k-NN method assigns equal weight to all input variables. Different scales " << Endl
         << "among the input variables is compensated using ScaleFrac parameter: the input " << Endl
         << "variables are scaled so that the widths for central ScaleFrac*100% events are " << Endl
         << "equal among all the input variables." << Endl;

   Log() << Endl;
   Log() << gTools().Color("bold") << "--- Additional configuration options: " 
         << gTools().Color("reset") << Endl;
   Log() << Endl;
   Log() << "The method inclues an option to use a Gaussian kernel to smooth out the k-NN" << Endl
         << "response. The kernel re-weights events using a distance to the test event." << Endl;
}

//_______________________________________________________________________
Double_t TMVA::MethodKNN::PolnKernel(const Double_t value) const
{
   // polynomial kernel
   const Double_t avalue = TMath::Abs(value);

   if (!(avalue < 1.0)) {
      return 0.0;
   }

   const Double_t prod = 1.0 - avalue * avalue * avalue;

   return (prod * prod * prod);
}

//_______________________________________________________________________
Double_t TMVA::MethodKNN::GausKernel(const kNN::Event &event_knn,
                                     const kNN::Event &event, const std::vector<Double_t> &svec) const
{
   // Gaussian kernel

   if (event_knn.GetNVar() != event.GetNVar() || event_knn.GetNVar() != svec.size()) {
      Log() << kFATAL << "Mismatched vectors in Gaussian kernel function" << Endl;
      return 0.0;
   }

   //
   // compute exponent
   //
   double sum_exp = 0.0;

   for(unsigned int ivar = 0; ivar < event_knn.GetNVar(); ++ivar) {

      const Double_t diff_ = event.GetVar(ivar) - event_knn.GetVar(ivar);
      const Double_t sigm_ = svec[ivar];
      if (!(sigm_ > 0.0)) {
         Log() << kFATAL << "Bad sigma value = " << sigm_ << Endl;
         return 0.0;
      }

      sum_exp += diff_*diff_/(2.0*sigm_*sigm_);
   }

   //
   // Return unnormalized(!) Gaussian function, because normalization
   // cancels for the ratio of weights.
   //

   return std::exp(-sum_exp);
}

//_______________________________________________________________________
Double_t TMVA::MethodKNN::getKernelRadius(const kNN::List &rlist) const
{
   //
   // Get polynomial kernel radius
   //
   Double_t kradius = -1.0;
   UInt_t kcount = 0;
   const UInt_t knn = static_cast<UInt_t>(fnkNN);

   for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit)
      {
         if (!(lit->second > 0.0)) continue;         
      
         if (kradius < lit->second || kradius < 0.0) kradius = lit->second;
      
         ++kcount;
         if (kcount >= knn) break;
      }
   
   return kradius;
}

//_______________________________________________________________________
const std::vector<Double_t> TMVA::MethodKNN::getRMS(const kNN::List &rlist, const kNN::Event &event_knn) const
{
   //
   // Get polynomial kernel radius
   //
   std::vector<Double_t> rvec;
   UInt_t kcount = 0;
   const UInt_t knn = static_cast<UInt_t>(fnkNN);

   for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit)
      {
         if (!(lit->second > 0.0)) continue;         
      
         const kNN::Node<kNN::Event> *node_ = lit -> first;
         const kNN::Event &event_ = node_-> GetEvent();
      
         if (rvec.empty()) {
            rvec.insert(rvec.end(), event_.GetNVar(), 0.0);
         }
         else if (rvec.size() != event_.GetNVar()) {
            Log() << kFATAL << "Wrong number of variables, should never happen!" << Endl;
            rvec.clear();
            return rvec;
         }

         for(unsigned int ivar = 0; ivar < event_.GetNVar(); ++ivar) {
            const Double_t diff_ = event_.GetVar(ivar) - event_knn.GetVar(ivar);
            rvec[ivar] += diff_*diff_;
         }

         ++kcount;
         if (kcount >= knn) break;
      }

   if (kcount < 1) {
      Log() << kFATAL << "Bad event kcount = " << kcount << Endl;
      rvec.clear();
      return rvec;
   }

   for(unsigned int ivar = 0; ivar < rvec.size(); ++ivar) {
      if (!(rvec[ivar] > 0.0)) {
         Log() << kFATAL << "Bad RMS value = " << rvec[ivar] << Endl;
         rvec.clear();
         return rvec;
      }

      rvec[ivar] = std::abs(fSigmaFact)*std::sqrt(rvec[ivar]/kcount);
   }   
   
   return rvec;
}

//_______________________________________________________________________
Double_t TMVA::MethodKNN::getLDAValue(const kNN::List &rlist, const kNN::Event &event_knn)
{
   LDAEvents sig_vec, bac_vec;

   for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit) {
       
      // get reference to current node to make code more readable
      const kNN::Node<kNN::Event> &node = *(lit->first);
      const kNN::VarVec &tvec = node.GetEvent().GetVars();

      if (node.GetEvent().GetType() == 1) { // signal type = 1
         sig_vec.push_back(tvec);
      }
      else if (node.GetEvent().GetType() == 2) { // background type = 2
         bac_vec.push_back(tvec);
      }
      else {
         Log() << kFATAL << "Unknown type for training event" << Endl;
      }       
   }

   fLDA.Initialize(sig_vec, bac_vec);
    
   return fLDA.GetProb(event_knn.GetVars(), 1);
}
 MethodKNN.cxx:1
 MethodKNN.cxx:2
 MethodKNN.cxx:3
 MethodKNN.cxx:4
 MethodKNN.cxx:5
 MethodKNN.cxx:6
 MethodKNN.cxx:7
 MethodKNN.cxx:8
 MethodKNN.cxx:9
 MethodKNN.cxx:10
 MethodKNN.cxx:11
 MethodKNN.cxx:12
 MethodKNN.cxx:13
 MethodKNN.cxx:14
 MethodKNN.cxx:15
 MethodKNN.cxx:16
 MethodKNN.cxx:17
 MethodKNN.cxx:18
 MethodKNN.cxx:19
 MethodKNN.cxx:20
 MethodKNN.cxx:21
 MethodKNN.cxx:22
 MethodKNN.cxx:23
 MethodKNN.cxx:24
 MethodKNN.cxx:25
 MethodKNN.cxx:26
 MethodKNN.cxx:27
 MethodKNN.cxx:28
 MethodKNN.cxx:29
 MethodKNN.cxx:30
 MethodKNN.cxx:31
 MethodKNN.cxx:32
 MethodKNN.cxx:33
 MethodKNN.cxx:34
 MethodKNN.cxx:35
 MethodKNN.cxx:36
 MethodKNN.cxx:37
 MethodKNN.cxx:38
 MethodKNN.cxx:39
 MethodKNN.cxx:40
 MethodKNN.cxx:41
 MethodKNN.cxx:42
 MethodKNN.cxx:43
 MethodKNN.cxx:44
 MethodKNN.cxx:45
 MethodKNN.cxx:46
 MethodKNN.cxx:47
 MethodKNN.cxx:48
 MethodKNN.cxx:49
 MethodKNN.cxx:50
 MethodKNN.cxx:51
 MethodKNN.cxx:52
 MethodKNN.cxx:53
 MethodKNN.cxx:54
 MethodKNN.cxx:55
 MethodKNN.cxx:56
 MethodKNN.cxx:57
 MethodKNN.cxx:58
 MethodKNN.cxx:59
 MethodKNN.cxx:60
 MethodKNN.cxx:61
 MethodKNN.cxx:62
 MethodKNN.cxx:63
 MethodKNN.cxx:64
 MethodKNN.cxx:65
 MethodKNN.cxx:66
 MethodKNN.cxx:67
 MethodKNN.cxx:68
 MethodKNN.cxx:69
 MethodKNN.cxx:70
 MethodKNN.cxx:71
 MethodKNN.cxx:72
 MethodKNN.cxx:73
 MethodKNN.cxx:74
 MethodKNN.cxx:75
 MethodKNN.cxx:76
 MethodKNN.cxx:77
 MethodKNN.cxx:78
 MethodKNN.cxx:79
 MethodKNN.cxx:80
 MethodKNN.cxx:81
 MethodKNN.cxx:82
 MethodKNN.cxx:83
 MethodKNN.cxx:84
 MethodKNN.cxx:85
 MethodKNN.cxx:86
 MethodKNN.cxx:87
 MethodKNN.cxx:88
 MethodKNN.cxx:89
 MethodKNN.cxx:90
 MethodKNN.cxx:91
 MethodKNN.cxx:92
 MethodKNN.cxx:93
 MethodKNN.cxx:94
 MethodKNN.cxx:95
 MethodKNN.cxx:96
 MethodKNN.cxx:97
 MethodKNN.cxx:98
 MethodKNN.cxx:99
 MethodKNN.cxx:100
 MethodKNN.cxx:101
 MethodKNN.cxx:102
 MethodKNN.cxx:103
 MethodKNN.cxx:104
 MethodKNN.cxx:105
 MethodKNN.cxx:106
 MethodKNN.cxx:107
 MethodKNN.cxx:108
 MethodKNN.cxx:109
 MethodKNN.cxx:110
 MethodKNN.cxx:111
 MethodKNN.cxx:112
 MethodKNN.cxx:113
 MethodKNN.cxx:114
 MethodKNN.cxx:115
 MethodKNN.cxx:116
 MethodKNN.cxx:117
 MethodKNN.cxx:118
 MethodKNN.cxx:119
 MethodKNN.cxx:120
 MethodKNN.cxx:121
 MethodKNN.cxx:122
 MethodKNN.cxx:123
 MethodKNN.cxx:124
 MethodKNN.cxx:125
 MethodKNN.cxx:126
 MethodKNN.cxx:127
 MethodKNN.cxx:128
 MethodKNN.cxx:129
 MethodKNN.cxx:130
 MethodKNN.cxx:131
 MethodKNN.cxx:132
 MethodKNN.cxx:133
 MethodKNN.cxx:134
 MethodKNN.cxx:135
 MethodKNN.cxx:136
 MethodKNN.cxx:137
 MethodKNN.cxx:138
 MethodKNN.cxx:139
 MethodKNN.cxx:140
 MethodKNN.cxx:141
 MethodKNN.cxx:142
 MethodKNN.cxx:143
 MethodKNN.cxx:144
 MethodKNN.cxx:145
 MethodKNN.cxx:146
 MethodKNN.cxx:147
 MethodKNN.cxx:148
 MethodKNN.cxx:149
 MethodKNN.cxx:150
 MethodKNN.cxx:151
 MethodKNN.cxx:152
 MethodKNN.cxx:153
 MethodKNN.cxx:154
 MethodKNN.cxx:155
 MethodKNN.cxx:156
 MethodKNN.cxx:157
 MethodKNN.cxx:158
 MethodKNN.cxx:159
 MethodKNN.cxx:160
 MethodKNN.cxx:161
 MethodKNN.cxx:162
 MethodKNN.cxx:163
 MethodKNN.cxx:164
 MethodKNN.cxx:165
 MethodKNN.cxx:166
 MethodKNN.cxx:167
 MethodKNN.cxx:168
 MethodKNN.cxx:169
 MethodKNN.cxx:170
 MethodKNN.cxx:171
 MethodKNN.cxx:172
 MethodKNN.cxx:173
 MethodKNN.cxx:174
 MethodKNN.cxx:175
 MethodKNN.cxx:176
 MethodKNN.cxx:177
 MethodKNN.cxx:178
 MethodKNN.cxx:179
 MethodKNN.cxx:180
 MethodKNN.cxx:181
 MethodKNN.cxx:182
 MethodKNN.cxx:183
 MethodKNN.cxx:184
 MethodKNN.cxx:185
 MethodKNN.cxx:186
 MethodKNN.cxx:187
 MethodKNN.cxx:188
 MethodKNN.cxx:189
 MethodKNN.cxx:190
 MethodKNN.cxx:191
 MethodKNN.cxx:192
 MethodKNN.cxx:193
 MethodKNN.cxx:194
 MethodKNN.cxx:195
 MethodKNN.cxx:196
 MethodKNN.cxx:197
 MethodKNN.cxx:198
 MethodKNN.cxx:199
 MethodKNN.cxx:200
 MethodKNN.cxx:201
 MethodKNN.cxx:202
 MethodKNN.cxx:203
 MethodKNN.cxx:204
 MethodKNN.cxx:205
 MethodKNN.cxx:206
 MethodKNN.cxx:207
 MethodKNN.cxx:208
 MethodKNN.cxx:209
 MethodKNN.cxx:210
 MethodKNN.cxx:211
 MethodKNN.cxx:212
 MethodKNN.cxx:213
 MethodKNN.cxx:214
 MethodKNN.cxx:215
 MethodKNN.cxx:216
 MethodKNN.cxx:217
 MethodKNN.cxx:218
 MethodKNN.cxx:219
 MethodKNN.cxx:220
 MethodKNN.cxx:221
 MethodKNN.cxx:222
 MethodKNN.cxx:223
 MethodKNN.cxx:224
 MethodKNN.cxx:225
 MethodKNN.cxx:226
 MethodKNN.cxx:227
 MethodKNN.cxx:228
 MethodKNN.cxx:229
 MethodKNN.cxx:230
 MethodKNN.cxx:231
 MethodKNN.cxx:232
 MethodKNN.cxx:233
 MethodKNN.cxx:234
 MethodKNN.cxx:235
 MethodKNN.cxx:236
 MethodKNN.cxx:237
 MethodKNN.cxx:238
 MethodKNN.cxx:239
 MethodKNN.cxx:240
 MethodKNN.cxx:241
 MethodKNN.cxx:242
 MethodKNN.cxx:243
 MethodKNN.cxx:244
 MethodKNN.cxx:245
 MethodKNN.cxx:246
 MethodKNN.cxx:247
 MethodKNN.cxx:248
 MethodKNN.cxx:249
 MethodKNN.cxx:250
 MethodKNN.cxx:251
 MethodKNN.cxx:252
 MethodKNN.cxx:253
 MethodKNN.cxx:254
 MethodKNN.cxx:255
 MethodKNN.cxx:256
 MethodKNN.cxx:257
 MethodKNN.cxx:258
 MethodKNN.cxx:259
 MethodKNN.cxx:260
 MethodKNN.cxx:261
 MethodKNN.cxx:262
 MethodKNN.cxx:263
 MethodKNN.cxx:264
 MethodKNN.cxx:265
 MethodKNN.cxx:266
 MethodKNN.cxx:267
 MethodKNN.cxx:268
 MethodKNN.cxx:269
 MethodKNN.cxx:270
 MethodKNN.cxx:271
 MethodKNN.cxx:272
 MethodKNN.cxx:273
 MethodKNN.cxx:274
 MethodKNN.cxx:275
 MethodKNN.cxx:276
 MethodKNN.cxx:277
 MethodKNN.cxx:278
 MethodKNN.cxx:279
 MethodKNN.cxx:280
 MethodKNN.cxx:281
 MethodKNN.cxx:282
 MethodKNN.cxx:283
 MethodKNN.cxx:284
 MethodKNN.cxx:285
 MethodKNN.cxx:286
 MethodKNN.cxx:287
 MethodKNN.cxx:288
 MethodKNN.cxx:289
 MethodKNN.cxx:290
 MethodKNN.cxx:291
 MethodKNN.cxx:292
 MethodKNN.cxx:293
 MethodKNN.cxx:294
 MethodKNN.cxx:295
 MethodKNN.cxx:296
 MethodKNN.cxx:297
 MethodKNN.cxx:298
 MethodKNN.cxx:299
 MethodKNN.cxx:300
 MethodKNN.cxx:301
 MethodKNN.cxx:302
 MethodKNN.cxx:303
 MethodKNN.cxx:304
 MethodKNN.cxx:305
 MethodKNN.cxx:306
 MethodKNN.cxx:307
 MethodKNN.cxx:308
 MethodKNN.cxx:309
 MethodKNN.cxx:310
 MethodKNN.cxx:311
 MethodKNN.cxx:312
 MethodKNN.cxx:313
 MethodKNN.cxx:314
 MethodKNN.cxx:315
 MethodKNN.cxx:316
 MethodKNN.cxx:317
 MethodKNN.cxx:318
 MethodKNN.cxx:319
 MethodKNN.cxx:320
 MethodKNN.cxx:321
 MethodKNN.cxx:322
 MethodKNN.cxx:323
 MethodKNN.cxx:324
 MethodKNN.cxx:325
 MethodKNN.cxx:326
 MethodKNN.cxx:327
 MethodKNN.cxx:328
 MethodKNN.cxx:329
 MethodKNN.cxx:330
 MethodKNN.cxx:331
 MethodKNN.cxx:332
 MethodKNN.cxx:333
 MethodKNN.cxx:334
 MethodKNN.cxx:335
 MethodKNN.cxx:336
 MethodKNN.cxx:337
 MethodKNN.cxx:338
 MethodKNN.cxx:339
 MethodKNN.cxx:340
 MethodKNN.cxx:341
 MethodKNN.cxx:342
 MethodKNN.cxx:343
 MethodKNN.cxx:344
 MethodKNN.cxx:345
 MethodKNN.cxx:346
 MethodKNN.cxx:347
 MethodKNN.cxx:348
 MethodKNN.cxx:349
 MethodKNN.cxx:350
 MethodKNN.cxx:351
 MethodKNN.cxx:352
 MethodKNN.cxx:353
 MethodKNN.cxx:354
 MethodKNN.cxx:355
 MethodKNN.cxx:356
 MethodKNN.cxx:357
 MethodKNN.cxx:358
 MethodKNN.cxx:359
 MethodKNN.cxx:360
 MethodKNN.cxx:361
 MethodKNN.cxx:362
 MethodKNN.cxx:363
 MethodKNN.cxx:364
 MethodKNN.cxx:365
 MethodKNN.cxx:366
 MethodKNN.cxx:367
 MethodKNN.cxx:368
 MethodKNN.cxx:369
 MethodKNN.cxx:370
 MethodKNN.cxx:371
 MethodKNN.cxx:372
 MethodKNN.cxx:373
 MethodKNN.cxx:374
 MethodKNN.cxx:375
 MethodKNN.cxx:376
 MethodKNN.cxx:377
 MethodKNN.cxx:378
 MethodKNN.cxx:379
 MethodKNN.cxx:380
 MethodKNN.cxx:381
 MethodKNN.cxx:382
 MethodKNN.cxx:383
 MethodKNN.cxx:384
 MethodKNN.cxx:385
 MethodKNN.cxx:386
 MethodKNN.cxx:387
 MethodKNN.cxx:388
 MethodKNN.cxx:389
 MethodKNN.cxx:390
 MethodKNN.cxx:391
 MethodKNN.cxx:392
 MethodKNN.cxx:393
 MethodKNN.cxx:394
 MethodKNN.cxx:395
 MethodKNN.cxx:396
 MethodKNN.cxx:397
 MethodKNN.cxx:398
 MethodKNN.cxx:399
 MethodKNN.cxx:400
 MethodKNN.cxx:401
 MethodKNN.cxx:402
 MethodKNN.cxx:403
 MethodKNN.cxx:404
 MethodKNN.cxx:405
 MethodKNN.cxx:406
 MethodKNN.cxx:407
 MethodKNN.cxx:408
 MethodKNN.cxx:409
 MethodKNN.cxx:410
 MethodKNN.cxx:411
 MethodKNN.cxx:412
 MethodKNN.cxx:413
 MethodKNN.cxx:414
 MethodKNN.cxx:415
 MethodKNN.cxx:416
 MethodKNN.cxx:417
 MethodKNN.cxx:418
 MethodKNN.cxx:419
 MethodKNN.cxx:420
 MethodKNN.cxx:421
 MethodKNN.cxx:422
 MethodKNN.cxx:423
 MethodKNN.cxx:424
 MethodKNN.cxx:425
 MethodKNN.cxx:426
 MethodKNN.cxx:427
 MethodKNN.cxx:428
 MethodKNN.cxx:429
 MethodKNN.cxx:430
 MethodKNN.cxx:431
 MethodKNN.cxx:432
 MethodKNN.cxx:433
 MethodKNN.cxx:434
 MethodKNN.cxx:435
 MethodKNN.cxx:436
 MethodKNN.cxx:437
 MethodKNN.cxx:438
 MethodKNN.cxx:439
 MethodKNN.cxx:440
 MethodKNN.cxx:441
 MethodKNN.cxx:442
 MethodKNN.cxx:443
 MethodKNN.cxx:444
 MethodKNN.cxx:445
 MethodKNN.cxx:446
 MethodKNN.cxx:447
 MethodKNN.cxx:448
 MethodKNN.cxx:449
 MethodKNN.cxx:450
 MethodKNN.cxx:451
 MethodKNN.cxx:452
 MethodKNN.cxx:453
 MethodKNN.cxx:454
 MethodKNN.cxx:455
 MethodKNN.cxx:456
 MethodKNN.cxx:457
 MethodKNN.cxx:458
 MethodKNN.cxx:459
 MethodKNN.cxx:460
 MethodKNN.cxx:461
 MethodKNN.cxx:462
 MethodKNN.cxx:463
 MethodKNN.cxx:464
 MethodKNN.cxx:465
 MethodKNN.cxx:466
 MethodKNN.cxx:467
 MethodKNN.cxx:468
 MethodKNN.cxx:469
 MethodKNN.cxx:470
 MethodKNN.cxx:471
 MethodKNN.cxx:472
 MethodKNN.cxx:473
 MethodKNN.cxx:474
 MethodKNN.cxx:475
 MethodKNN.cxx:476
 MethodKNN.cxx:477
 MethodKNN.cxx:478
 MethodKNN.cxx:479
 MethodKNN.cxx:480
 MethodKNN.cxx:481
 MethodKNN.cxx:482
 MethodKNN.cxx:483
 MethodKNN.cxx:484
 MethodKNN.cxx:485
 MethodKNN.cxx:486
 MethodKNN.cxx:487
 MethodKNN.cxx:488
 MethodKNN.cxx:489
 MethodKNN.cxx:490
 MethodKNN.cxx:491
 MethodKNN.cxx:492
 MethodKNN.cxx:493
 MethodKNN.cxx:494
 MethodKNN.cxx:495
 MethodKNN.cxx:496
 MethodKNN.cxx:497
 MethodKNN.cxx:498
 MethodKNN.cxx:499
 MethodKNN.cxx:500
 MethodKNN.cxx:501
 MethodKNN.cxx:502
 MethodKNN.cxx:503
 MethodKNN.cxx:504
 MethodKNN.cxx:505
 MethodKNN.cxx:506
 MethodKNN.cxx:507
 MethodKNN.cxx:508
 MethodKNN.cxx:509
 MethodKNN.cxx:510
 MethodKNN.cxx:511
 MethodKNN.cxx:512
 MethodKNN.cxx:513
 MethodKNN.cxx:514
 MethodKNN.cxx:515
 MethodKNN.cxx:516
 MethodKNN.cxx:517
 MethodKNN.cxx:518
 MethodKNN.cxx:519
 MethodKNN.cxx:520
 MethodKNN.cxx:521
 MethodKNN.cxx:522
 MethodKNN.cxx:523
 MethodKNN.cxx:524
 MethodKNN.cxx:525
 MethodKNN.cxx:526
 MethodKNN.cxx:527
 MethodKNN.cxx:528
 MethodKNN.cxx:529
 MethodKNN.cxx:530
 MethodKNN.cxx:531
 MethodKNN.cxx:532
 MethodKNN.cxx:533
 MethodKNN.cxx:534
 MethodKNN.cxx:535
 MethodKNN.cxx:536
 MethodKNN.cxx:537
 MethodKNN.cxx:538
 MethodKNN.cxx:539
 MethodKNN.cxx:540
 MethodKNN.cxx:541
 MethodKNN.cxx:542
 MethodKNN.cxx:543
 MethodKNN.cxx:544
 MethodKNN.cxx:545
 MethodKNN.cxx:546
 MethodKNN.cxx:547
 MethodKNN.cxx:548
 MethodKNN.cxx:549
 MethodKNN.cxx:550
 MethodKNN.cxx:551
 MethodKNN.cxx:552
 MethodKNN.cxx:553
 MethodKNN.cxx:554
 MethodKNN.cxx:555
 MethodKNN.cxx:556
 MethodKNN.cxx:557
 MethodKNN.cxx:558
 MethodKNN.cxx:559
 MethodKNN.cxx:560
 MethodKNN.cxx:561
 MethodKNN.cxx:562
 MethodKNN.cxx:563
 MethodKNN.cxx:564
 MethodKNN.cxx:565
 MethodKNN.cxx:566
 MethodKNN.cxx:567
 MethodKNN.cxx:568
 MethodKNN.cxx:569
 MethodKNN.cxx:570
 MethodKNN.cxx:571
 MethodKNN.cxx:572
 MethodKNN.cxx:573
 MethodKNN.cxx:574
 MethodKNN.cxx:575
 MethodKNN.cxx:576
 MethodKNN.cxx:577
 MethodKNN.cxx:578
 MethodKNN.cxx:579
 MethodKNN.cxx:580
 MethodKNN.cxx:581
 MethodKNN.cxx:582
 MethodKNN.cxx:583
 MethodKNN.cxx:584
 MethodKNN.cxx:585
 MethodKNN.cxx:586
 MethodKNN.cxx:587
 MethodKNN.cxx:588
 MethodKNN.cxx:589
 MethodKNN.cxx:590
 MethodKNN.cxx:591
 MethodKNN.cxx:592
 MethodKNN.cxx:593
 MethodKNN.cxx:594
 MethodKNN.cxx:595
 MethodKNN.cxx:596
 MethodKNN.cxx:597
 MethodKNN.cxx:598
 MethodKNN.cxx:599
 MethodKNN.cxx:600
 MethodKNN.cxx:601
 MethodKNN.cxx:602
 MethodKNN.cxx:603
 MethodKNN.cxx:604
 MethodKNN.cxx:605
 MethodKNN.cxx:606
 MethodKNN.cxx:607
 MethodKNN.cxx:608
 MethodKNN.cxx:609
 MethodKNN.cxx:610
 MethodKNN.cxx:611
 MethodKNN.cxx:612
 MethodKNN.cxx:613
 MethodKNN.cxx:614
 MethodKNN.cxx:615
 MethodKNN.cxx:616
 MethodKNN.cxx:617
 MethodKNN.cxx:618
 MethodKNN.cxx:619
 MethodKNN.cxx:620
 MethodKNN.cxx:621
 MethodKNN.cxx:622
 MethodKNN.cxx:623
 MethodKNN.cxx:624
 MethodKNN.cxx:625
 MethodKNN.cxx:626
 MethodKNN.cxx:627
 MethodKNN.cxx:628
 MethodKNN.cxx:629
 MethodKNN.cxx:630
 MethodKNN.cxx:631
 MethodKNN.cxx:632
 MethodKNN.cxx:633
 MethodKNN.cxx:634
 MethodKNN.cxx:635
 MethodKNN.cxx:636
 MethodKNN.cxx:637
 MethodKNN.cxx:638
 MethodKNN.cxx:639
 MethodKNN.cxx:640
 MethodKNN.cxx:641
 MethodKNN.cxx:642
 MethodKNN.cxx:643
 MethodKNN.cxx:644
 MethodKNN.cxx:645
 MethodKNN.cxx:646
 MethodKNN.cxx:647
 MethodKNN.cxx:648
 MethodKNN.cxx:649
 MethodKNN.cxx:650
 MethodKNN.cxx:651
 MethodKNN.cxx:652
 MethodKNN.cxx:653
 MethodKNN.cxx:654
 MethodKNN.cxx:655
 MethodKNN.cxx:656
 MethodKNN.cxx:657
 MethodKNN.cxx:658
 MethodKNN.cxx:659
 MethodKNN.cxx:660
 MethodKNN.cxx:661
 MethodKNN.cxx:662
 MethodKNN.cxx:663
 MethodKNN.cxx:664
 MethodKNN.cxx:665
 MethodKNN.cxx:666
 MethodKNN.cxx:667
 MethodKNN.cxx:668
 MethodKNN.cxx:669
 MethodKNN.cxx:670
 MethodKNN.cxx:671
 MethodKNN.cxx:672
 MethodKNN.cxx:673
 MethodKNN.cxx:674
 MethodKNN.cxx:675
 MethodKNN.cxx:676
 MethodKNN.cxx:677
 MethodKNN.cxx:678
 MethodKNN.cxx:679
 MethodKNN.cxx:680
 MethodKNN.cxx:681
 MethodKNN.cxx:682
 MethodKNN.cxx:683
 MethodKNN.cxx:684
 MethodKNN.cxx:685
 MethodKNN.cxx:686
 MethodKNN.cxx:687
 MethodKNN.cxx:688
 MethodKNN.cxx:689
 MethodKNN.cxx:690
 MethodKNN.cxx:691
 MethodKNN.cxx:692
 MethodKNN.cxx:693
 MethodKNN.cxx:694
 MethodKNN.cxx:695
 MethodKNN.cxx:696
 MethodKNN.cxx:697
 MethodKNN.cxx:698
 MethodKNN.cxx:699
 MethodKNN.cxx:700
 MethodKNN.cxx:701
 MethodKNN.cxx:702
 MethodKNN.cxx:703
 MethodKNN.cxx:704
 MethodKNN.cxx:705
 MethodKNN.cxx:706
 MethodKNN.cxx:707
 MethodKNN.cxx:708
 MethodKNN.cxx:709
 MethodKNN.cxx:710
 MethodKNN.cxx:711
 MethodKNN.cxx:712
 MethodKNN.cxx:713
 MethodKNN.cxx:714
 MethodKNN.cxx:715
 MethodKNN.cxx:716
 MethodKNN.cxx:717
 MethodKNN.cxx:718
 MethodKNN.cxx:719
 MethodKNN.cxx:720
 MethodKNN.cxx:721
 MethodKNN.cxx:722
 MethodKNN.cxx:723
 MethodKNN.cxx:724
 MethodKNN.cxx:725
 MethodKNN.cxx:726
 MethodKNN.cxx:727
 MethodKNN.cxx:728
 MethodKNN.cxx:729
 MethodKNN.cxx:730
 MethodKNN.cxx:731
 MethodKNN.cxx:732
 MethodKNN.cxx:733
 MethodKNN.cxx:734
 MethodKNN.cxx:735
 MethodKNN.cxx:736
 MethodKNN.cxx:737
 MethodKNN.cxx:738
 MethodKNN.cxx:739
 MethodKNN.cxx:740
 MethodKNN.cxx:741
 MethodKNN.cxx:742
 MethodKNN.cxx:743
 MethodKNN.cxx:744
 MethodKNN.cxx:745
 MethodKNN.cxx:746
 MethodKNN.cxx:747
 MethodKNN.cxx:748
 MethodKNN.cxx:749
 MethodKNN.cxx:750
 MethodKNN.cxx:751
 MethodKNN.cxx:752
 MethodKNN.cxx:753
 MethodKNN.cxx:754
 MethodKNN.cxx:755
 MethodKNN.cxx:756
 MethodKNN.cxx:757
 MethodKNN.cxx:758
 MethodKNN.cxx:759
 MethodKNN.cxx:760
 MethodKNN.cxx:761
 MethodKNN.cxx:762
 MethodKNN.cxx:763
 MethodKNN.cxx:764
 MethodKNN.cxx:765
 MethodKNN.cxx:766
 MethodKNN.cxx:767
 MethodKNN.cxx:768
 MethodKNN.cxx:769
 MethodKNN.cxx:770
 MethodKNN.cxx:771
 MethodKNN.cxx:772
 MethodKNN.cxx:773
 MethodKNN.cxx:774
 MethodKNN.cxx:775
 MethodKNN.cxx:776
 MethodKNN.cxx:777
 MethodKNN.cxx:778
 MethodKNN.cxx:779
 MethodKNN.cxx:780
 MethodKNN.cxx:781
 MethodKNN.cxx:782
 MethodKNN.cxx:783
 MethodKNN.cxx:784
 MethodKNN.cxx:785
 MethodKNN.cxx:786
 MethodKNN.cxx:787
 MethodKNN.cxx:788
 MethodKNN.cxx:789
 MethodKNN.cxx:790
 MethodKNN.cxx:791
 MethodKNN.cxx:792
 MethodKNN.cxx:793
 MethodKNN.cxx:794
 MethodKNN.cxx:795
 MethodKNN.cxx:796
 MethodKNN.cxx:797
 MethodKNN.cxx:798
 MethodKNN.cxx:799
 MethodKNN.cxx:800
 MethodKNN.cxx:801
 MethodKNN.cxx:802
 MethodKNN.cxx:803
 MethodKNN.cxx:804
 MethodKNN.cxx:805
 MethodKNN.cxx:806
 MethodKNN.cxx:807
 MethodKNN.cxx:808
 MethodKNN.cxx:809
 MethodKNN.cxx:810
 MethodKNN.cxx:811
 MethodKNN.cxx:812
 MethodKNN.cxx:813
 MethodKNN.cxx:814
 MethodKNN.cxx:815
 MethodKNN.cxx:816
 MethodKNN.cxx:817
 MethodKNN.cxx:818
 MethodKNN.cxx:819
 MethodKNN.cxx:820
 MethodKNN.cxx:821
 MethodKNN.cxx:822
 MethodKNN.cxx:823
 MethodKNN.cxx:824
 MethodKNN.cxx:825
 MethodKNN.cxx:826
 MethodKNN.cxx:827
 MethodKNN.cxx:828
 MethodKNN.cxx:829
 MethodKNN.cxx:830
 MethodKNN.cxx:831
 MethodKNN.cxx:832
 MethodKNN.cxx:833
 MethodKNN.cxx:834
 MethodKNN.cxx:835
 MethodKNN.cxx:836
 MethodKNN.cxx:837
 MethodKNN.cxx:838
 MethodKNN.cxx:839
 MethodKNN.cxx:840
 MethodKNN.cxx:841
 MethodKNN.cxx:842
 MethodKNN.cxx:843
 MethodKNN.cxx:844
 MethodKNN.cxx:845
 MethodKNN.cxx:846
 MethodKNN.cxx:847
 MethodKNN.cxx:848
 MethodKNN.cxx:849
 MethodKNN.cxx:850
 MethodKNN.cxx:851
 MethodKNN.cxx:852
 MethodKNN.cxx:853
 MethodKNN.cxx:854
 MethodKNN.cxx:855
 MethodKNN.cxx:856
 MethodKNN.cxx:857
 MethodKNN.cxx:858
 MethodKNN.cxx:859
 MethodKNN.cxx:860
 MethodKNN.cxx:861
 MethodKNN.cxx:862
 MethodKNN.cxx:863
 MethodKNN.cxx:864
 MethodKNN.cxx:865
 MethodKNN.cxx:866
 MethodKNN.cxx:867
 MethodKNN.cxx:868
 MethodKNN.cxx:869
 MethodKNN.cxx:870
 MethodKNN.cxx:871
 MethodKNN.cxx:872
 MethodKNN.cxx:873
 MethodKNN.cxx:874
 MethodKNN.cxx:875
 MethodKNN.cxx:876
 MethodKNN.cxx:877
 MethodKNN.cxx:878
 MethodKNN.cxx:879
 MethodKNN.cxx:880
 MethodKNN.cxx:881
 MethodKNN.cxx:882
 MethodKNN.cxx:883
 MethodKNN.cxx:884
 MethodKNN.cxx:885
 MethodKNN.cxx:886
 MethodKNN.cxx:887
 MethodKNN.cxx:888
 MethodKNN.cxx:889
 MethodKNN.cxx:890
 MethodKNN.cxx:891
 MethodKNN.cxx:892
 MethodKNN.cxx:893
 MethodKNN.cxx:894
 MethodKNN.cxx:895
 MethodKNN.cxx:896
 MethodKNN.cxx:897
 MethodKNN.cxx:898
 MethodKNN.cxx:899
 MethodKNN.cxx:900
 MethodKNN.cxx:901
 MethodKNN.cxx:902
 MethodKNN.cxx:903
 MethodKNN.cxx:904
 MethodKNN.cxx:905
 MethodKNN.cxx:906
 MethodKNN.cxx:907
 MethodKNN.cxx:908
 MethodKNN.cxx:909
 MethodKNN.cxx:910
 MethodKNN.cxx:911
 MethodKNN.cxx:912
 MethodKNN.cxx:913
 MethodKNN.cxx:914
 MethodKNN.cxx:915
 MethodKNN.cxx:916
 MethodKNN.cxx:917
 MethodKNN.cxx:918
 MethodKNN.cxx:919
 MethodKNN.cxx:920
 MethodKNN.cxx:921
 MethodKNN.cxx:922
 MethodKNN.cxx:923