ROOT logo
// @(#)root/tmva $Id: MethodKNN.h 31458 2009-11-30 13:58:20Z stelzer $
// Author: Rustem Ospanov

/**********************************************************************************
 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis       *
 * Package: TMVA                                                                  *
 * Class  : MethodKNN                                                             *
 * Web    : http://tmva.sourceforge.net                                           *
 *                                                                                *
 * Description:                                                                   *
 *      Analysis of k-nearest neighbor                                            *
 *                                                                                *
 * Author:                                                                        *
 *      Rustem Ospanov <rustem@fnal.gov> - U. of Texas at Austin, USA             *
 *                                                                                *
 * Copyright (c) 2007:                                                            *
 *      CERN, Switzerland                                                         * 
 *      MPI-K Heidelberg, Germany                                                 * 
 *      U. of Texas at Austin, USA                                                *
 *                                                                                *
 * Redistribution and use in source and binary forms, with or without             *
 * modification, are permitted according to the terms listed in LICENSE           *
 * (http://tmva.sourceforge.net/LICENSE)                                          *
 **********************************************************************************/

#ifndef ROOT_TMVA_MethodKNN
#define ROOT_TMVA_MethodKNN

//////////////////////////////////////////////////////////////////////////
//                                                                      //
// MethodKNN                                                            //
//                                                                      //
// Analysis of k-nearest neighbor                                       //
//                                                                      //
//////////////////////////////////////////////////////////////////////////

#include <vector>
#include <map>

// Local
#ifndef ROOT_TMVA_MethodBase
#include "TMVA/MethodBase.h"
#endif
#ifndef ROOT_TMVA_ModulekNN
#include "TMVA/ModulekNN.h"
#endif

// SVD and linear discriminat code
#ifndef ROOT_TMVA_LDA
#include "TMVA/LDA.h"
#endif

namespace TMVA
{   
   namespace kNN
   {
      class ModulekNN;
   }

   class MethodKNN : public MethodBase
   {
   public:

      MethodKNN(const TString& jobName, 
                const TString& methodTitle, 
                DataSetInfo& theData,
                const TString& theOption = "KNN",
                TDirectory* theTargetDir = NULL);

      MethodKNN(DataSetInfo& theData, 
                const TString& theWeightFile,  
                TDirectory* theTargetDir = NULL);
      
      virtual ~MethodKNN( void );
    
      virtual Bool_t HasAnalysisType( Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets );

      void Train( void );

      Double_t GetMvaValue( Double_t* err = 0 );
      const std::vector<Float_t>& GetRegressionValues();

      using MethodBase::ReadWeightsFromStream;

      void WriteWeightsToStream(TFile& rf) const;
      void AddWeightsXMLTo( void* parent ) const;
      void ReadWeightsFromXML( void* wghtnode );

      void ReadWeightsFromStream(std::istream& istr);
      void ReadWeightsFromStream(TFile &rf);

      const Ranking* CreateRanking();

   protected:

      // make ROOT-independent C++ class for classifier response (classifier-specific implementation)
      void MakeClassSpecific( std::ostream&, const TString& ) const;

      // get help message text
      void GetHelpMessage() const;

   private:

      // the option handling methods
      void DeclareOptions();
      void ProcessOptions();
      void DeclareCompatibilityOptions();

      // default initialisation called by all constructors
      void Init( void );

      // create kd-tree (binary tree) structure
      void MakeKNN( void );

      // polynomial and Gaussian kernel weight function
      Double_t PolnKernel(Double_t value) const;
      Double_t GausKernel(const kNN::Event &event_knn, const kNN::Event &event, const std::vector<Double_t> &svec) const;

      Double_t getKernelRadius(const kNN::List &rlist) const;
      const std::vector<Double_t> getRMS(const kNN::List &rlist, const kNN::Event &event_knn) const;
      
      double getLDAValue(const kNN::List &rlist, const kNN::Event &event_knn);

   private:

      // number of events (sumOfWeights)
      Double_t fSumOfWeightsS;        // sum-of-weights for signal training events
      Double_t fSumOfWeightsB;        // sum-of-weights for background training events      

      kNN::ModulekNN *fModule;        //! module where all work is done

      Int_t fnkNN;            // number of k-nearest neighbors 
      Int_t fBalanceDepth;    // number of binary tree levels used for balancing tree

      Float_t fScaleFrac;     // fraction of events used to compute variable width
      Float_t fSigmaFact;     // scale factor for Gaussian sigma in Gaus. kernel

      TString fKernel;        // ="Gaus","Poln" - kernel type for smoothing

      Bool_t fTrim;           // set equal number of signal and background events
      Bool_t fUseKernel;      // use polynomial kernel weight function
      Bool_t fUseWeight;      // use weights to count kNN
      Bool_t fUseLDA;         // use local linear discriminat analysis to compute MVA

      kNN::EventVec fEvent;   //! (untouched) events used for learning

      LDA fLDA;               //! Experimental feature for local knn analysis

      // for backward compatibility
      Int_t fTreeOptDepth;    // number of binary tree levels used for optimization

      ClassDef(MethodKNN,0) // k Nearest Neighbour classifier
   };

} // namespace TMVA

#endif // MethodKNN
 MethodKNN.h:1
 MethodKNN.h:2
 MethodKNN.h:3
 MethodKNN.h:4
 MethodKNN.h:5
 MethodKNN.h:6
 MethodKNN.h:7
 MethodKNN.h:8
 MethodKNN.h:9
 MethodKNN.h:10
 MethodKNN.h:11
 MethodKNN.h:12
 MethodKNN.h:13
 MethodKNN.h:14
 MethodKNN.h:15
 MethodKNN.h:16
 MethodKNN.h:17
 MethodKNN.h:18
 MethodKNN.h:19
 MethodKNN.h:20
 MethodKNN.h:21
 MethodKNN.h:22
 MethodKNN.h:23
 MethodKNN.h:24
 MethodKNN.h:25
 MethodKNN.h:26
 MethodKNN.h:27
 MethodKNN.h:28
 MethodKNN.h:29
 MethodKNN.h:30
 MethodKNN.h:31
 MethodKNN.h:32
 MethodKNN.h:33
 MethodKNN.h:34
 MethodKNN.h:35
 MethodKNN.h:36
 MethodKNN.h:37
 MethodKNN.h:38
 MethodKNN.h:39
 MethodKNN.h:40
 MethodKNN.h:41
 MethodKNN.h:42
 MethodKNN.h:43
 MethodKNN.h:44
 MethodKNN.h:45
 MethodKNN.h:46
 MethodKNN.h:47
 MethodKNN.h:48
 MethodKNN.h:49
 MethodKNN.h:50
 MethodKNN.h:51
 MethodKNN.h:52
 MethodKNN.h:53
 MethodKNN.h:54
 MethodKNN.h:55
 MethodKNN.h:56
 MethodKNN.h:57
 MethodKNN.h:58
 MethodKNN.h:59
 MethodKNN.h:60
 MethodKNN.h:61
 MethodKNN.h:62
 MethodKNN.h:63
 MethodKNN.h:64
 MethodKNN.h:65
 MethodKNN.h:66
 MethodKNN.h:67
 MethodKNN.h:68
 MethodKNN.h:69
 MethodKNN.h:70
 MethodKNN.h:71
 MethodKNN.h:72
 MethodKNN.h:73
 MethodKNN.h:74
 MethodKNN.h:75
 MethodKNN.h:76
 MethodKNN.h:77
 MethodKNN.h:78
 MethodKNN.h:79
 MethodKNN.h:80
 MethodKNN.h:81
 MethodKNN.h:82
 MethodKNN.h:83
 MethodKNN.h:84
 MethodKNN.h:85
 MethodKNN.h:86
 MethodKNN.h:87
 MethodKNN.h:88
 MethodKNN.h:89
 MethodKNN.h:90
 MethodKNN.h:91
 MethodKNN.h:92
 MethodKNN.h:93
 MethodKNN.h:94
 MethodKNN.h:95
 MethodKNN.h:96
 MethodKNN.h:97
 MethodKNN.h:98
 MethodKNN.h:99
 MethodKNN.h:100
 MethodKNN.h:101
 MethodKNN.h:102
 MethodKNN.h:103
 MethodKNN.h:104
 MethodKNN.h:105
 MethodKNN.h:106
 MethodKNN.h:107
 MethodKNN.h:108
 MethodKNN.h:109
 MethodKNN.h:110
 MethodKNN.h:111
 MethodKNN.h:112
 MethodKNN.h:113
 MethodKNN.h:114
 MethodKNN.h:115
 MethodKNN.h:116
 MethodKNN.h:117
 MethodKNN.h:118
 MethodKNN.h:119
 MethodKNN.h:120
 MethodKNN.h:121
 MethodKNN.h:122
 MethodKNN.h:123
 MethodKNN.h:124
 MethodKNN.h:125
 MethodKNN.h:126
 MethodKNN.h:127
 MethodKNN.h:128
 MethodKNN.h:129
 MethodKNN.h:130
 MethodKNN.h:131
 MethodKNN.h:132
 MethodKNN.h:133
 MethodKNN.h:134
 MethodKNN.h:135
 MethodKNN.h:136
 MethodKNN.h:137
 MethodKNN.h:138
 MethodKNN.h:139
 MethodKNN.h:140
 MethodKNN.h:141
 MethodKNN.h:142
 MethodKNN.h:143
 MethodKNN.h:144
 MethodKNN.h:145
 MethodKNN.h:146
 MethodKNN.h:147
 MethodKNN.h:148
 MethodKNN.h:149
 MethodKNN.h:150
 MethodKNN.h:151
 MethodKNN.h:152
 MethodKNN.h:153
 MethodKNN.h:154
 MethodKNN.h:155
 MethodKNN.h:156
 MethodKNN.h:157