library: libMLP #include "TMLPAnalyzer.h" |
TMLPAnalyzer
class description - header file - source file - inheritance tree (.pdf)
protected:
const char* GetInputNeuronTitle(Int_t in)
Int_t GetLayers()
TString GetNeuronFormula(Int_t idx)
Int_t GetNeurons(Int_t layer)
const char* GetOutputNeuronTitle(Int_t out)
public:
TMLPAnalyzer(TMultiLayerPerceptron& net)
TMLPAnalyzer(TMultiLayerPerceptron* net)
TMLPAnalyzer(const TMLPAnalyzer&)
virtual ~TMLPAnalyzer()
void CheckNetwork()
static TClass* Class()
void DrawDInput(Int_t i)
void DrawDInputs()
void DrawNetwork(Int_t neuron, const char* signal, const char* bg)
TProfile* DrawTruthDeviation(Int_t outnode = 0, Option_t* option = "")
TProfile* DrawTruthDeviationInOut(Int_t innode, Int_t outnode = 0, Option_t* option = "")
THStack* DrawTruthDeviationInsOut(Int_t outnode = 0, Option_t* option = "")
THStack* DrawTruthDeviations(Option_t* option = "")
void GatherInformations()
TTree* GetIOTree() const
virtual TClass* IsA() const
TMLPAnalyzer& operator=(const TMLPAnalyzer&)
virtual void ShowMembers(TMemberInspector& insp, char* parent)
virtual void Streamer(TBuffer& b)
void StreamerNVirtual(TBuffer& b)
private:
TMultiLayerPerceptron* fNetwork
TTree* fAnalysisTree
TTree* fIOTree
TMLPAnalyzer
This utility class contains a set of tests usefull when developing
a neural network.
It allows you to check for unneeded variables, and to control
the network structure.
void CheckNetwork()
Gives some information about the network in the terminal.
void GatherInformations()
Collect informations about what is usefull in the network.
This method has to be called first when analyzing a network.
Fills the two analysis trees.
void DrawDInput(Int_t i)
Draws the distribution (on the test sample) of the
impact on the network output of a small variation of
the ith input.
void DrawDInputs()
Draws the distribution (on the test sample) of the
impact on the network output of a small variation of
each input.
void DrawNetwork(Int_t neuron, const char* signal, const char* bg)
Draws the distribution of the neural network (using ith neuron).
Two distributions are drawn, for events passing respectively the "signal"
and "background" cuts. Only the test sample is used.
THStack* DrawTruthDeviations(Option_t *option /*=""*/)
Creates TProfiles of the difference of the MLP output minus the
true value vs the true value, one for each output, filled with the
test data events. This method is mainly useful when doing regression
analysis with the MLP (i.e. not classification, but continuous truth
values).
The returned THStack contains all the TProfiles. It is drawn unless
the option "goff" is specified.
Options are passed to TProfile::Draw.
TProfile* DrawTruthDeviationInOut(Int_t innode, Int_t outnode /*=0*/, Option_t *option /*=""*/)
Creates a profile of the difference of the MLP output outnode minus
the true value of outnode vs the input value innode, for all test
data events.
The resulting TProfile histogram is returned.
It is not drawn if option "goff" is specified.
Options are passed to TProfile::Draw
Author: Christophe.Delaere@cern.ch 25/04/04
Last update: root/mlp:$Name: $:$Id: TMLPAnalyzer.cxx,v 1.16 2006/05/26 15:13:02 rdm Exp $
Copyright (C) 1995-2003, Rene Brun and Fons Rademakers. *
ROOT page - Class index - Class Hierarchy - Top of the page
This page has been automatically generated. If you have any comments or suggestions about the page layout send a mail to ROOT support, or contact the developers with any questions or problems regarding ROOT.