ROOT logo
ROOT » MATH » MLP » TMLPAnalyzer

class TMLPAnalyzer: public TObject


 TMLPAnalyzer

 This utility class contains a set of tests usefull when developing
 a neural network.
 It allows you to check for unneeded variables, and to control
 the network structure.


Function Members (Methods)

public:
TMLPAnalyzer(TMultiLayerPerceptron& net)
TMLPAnalyzer(TMultiLayerPerceptron* net)
TMLPAnalyzer(const TMLPAnalyzer&)
virtual~TMLPAnalyzer()
voidTObject::AbstractMethod(const char* method) const
virtual voidTObject::AppendPad(Option_t* option = "")
virtual voidTObject::Browse(TBrowser* b)
voidCheckNetwork()
static TClass*Class()
virtual const char*TObject::ClassName() const
virtual voidTObject::Clear(Option_t* = "")
virtual TObject*TObject::Clone(const char* newname = "") const
virtual Int_tTObject::Compare(const TObject* obj) const
virtual voidTObject::Copy(TObject& object) const
virtual voidTObject::Delete(Option_t* option = "")MENU
virtual Int_tTObject::DistancetoPrimitive(Int_t px, Int_t py)
virtual voidTObject::Draw(Option_t* option = "")
virtual voidTObject::DrawClass() constMENU
virtual TObject*TObject::DrawClone(Option_t* option = "") constMENU
voidDrawDInput(Int_t i)
voidDrawDInputs()
voidDrawNetwork(Int_t neuron, const char* signal, const char* bg)
TProfile*DrawTruthDeviation(Int_t outnode = 0, Option_t* option = "")
TProfile*DrawTruthDeviationInOut(Int_t innode, Int_t outnode = 0, Option_t* option = "")
THStack*DrawTruthDeviationInsOut(Int_t outnode = 0, Option_t* option = "")
THStack*DrawTruthDeviations(Option_t* option = "")
virtual voidTObject::Dump() constMENU
virtual voidTObject::Error(const char* method, const char* msgfmt) const
virtual voidTObject::Execute(const char* method, const char* params, Int_t* error = 0)
virtual voidTObject::Execute(TMethod* method, TObjArray* params, Int_t* error = 0)
virtual voidTObject::ExecuteEvent(Int_t event, Int_t px, Int_t py)
virtual voidTObject::Fatal(const char* method, const char* msgfmt) const
virtual TObject*TObject::FindObject(const char* name) const
virtual TObject*TObject::FindObject(const TObject* obj) const
voidGatherInformations()
virtual Option_t*TObject::GetDrawOption() const
static Long_tTObject::GetDtorOnly()
virtual const char*TObject::GetIconName() const
TTree*GetIOTree() const
virtual const char*TObject::GetName() const
virtual char*TObject::GetObjectInfo(Int_t px, Int_t py) const
static Bool_tTObject::GetObjectStat()
virtual Option_t*TObject::GetOption() const
virtual const char*TObject::GetTitle() const
virtual UInt_tTObject::GetUniqueID() const
virtual Bool_tTObject::HandleTimer(TTimer* timer)
virtual ULong_tTObject::Hash() const
virtual voidTObject::Info(const char* method, const char* msgfmt) const
virtual Bool_tTObject::InheritsFrom(const char* classname) const
virtual Bool_tTObject::InheritsFrom(const TClass* cl) const
virtual voidTObject::Inspect() constMENU
voidTObject::InvertBit(UInt_t f)
virtual TClass*IsA() const
virtual Bool_tTObject::IsEqual(const TObject* obj) const
virtual Bool_tTObject::IsFolder() const
Bool_tTObject::IsOnHeap() const
virtual Bool_tTObject::IsSortable() const
Bool_tTObject::IsZombie() const
virtual voidTObject::ls(Option_t* option = "") const
voidTObject::MayNotUse(const char* method) const
virtual Bool_tTObject::Notify()
static voidTObject::operator delete(void* ptr)
static voidTObject::operator delete(void* ptr, void* vp)
static voidTObject::operator delete[](void* ptr)
static voidTObject::operator delete[](void* ptr, void* vp)
void*TObject::operator new(size_t sz)
void*TObject::operator new(size_t sz, void* vp)
void*TObject::operator new[](size_t sz)
void*TObject::operator new[](size_t sz, void* vp)
TMLPAnalyzer&operator=(const TMLPAnalyzer&)
virtual voidTObject::Paint(Option_t* option = "")
virtual voidTObject::Pop()
virtual voidTObject::Print(Option_t* option = "") const
virtual Int_tTObject::Read(const char* name)
virtual voidTObject::RecursiveRemove(TObject* obj)
voidTObject::ResetBit(UInt_t f)
virtual voidTObject::SaveAs(const char* filename = "", Option_t* option = "") constMENU
virtual voidTObject::SavePrimitive(basic_ostream<char,char_traits<char> >& out, Option_t* option = "")
voidTObject::SetBit(UInt_t f)
voidTObject::SetBit(UInt_t f, Bool_t set)
virtual voidTObject::SetDrawOption(Option_t* option = "")MENU
static voidTObject::SetDtorOnly(void* obj)
static voidTObject::SetObjectStat(Bool_t stat)
virtual voidTObject::SetUniqueID(UInt_t uid)
virtual voidShowMembers(TMemberInspector& insp, char* parent)
virtual voidStreamer(TBuffer& b)
voidStreamerNVirtual(TBuffer& b)
virtual voidTObject::SysError(const char* method, const char* msgfmt) const
Bool_tTObject::TestBit(UInt_t f) const
Int_tTObject::TestBits(UInt_t f) const
virtual voidTObject::UseCurrentStyle()
virtual voidTObject::Warning(const char* method, const char* msgfmt) const
virtual Int_tTObject::Write(const char* name = 0, Int_t option = 0, Int_t bufsize = 0)
virtual Int_tTObject::Write(const char* name = 0, Int_t option = 0, Int_t bufsize = 0) const
protected:
virtual voidTObject::DoError(int level, const char* location, const char* fmt, va_list va) const
const char*GetInputNeuronTitle(Int_t in)
Int_tGetLayers()
TStringGetNeuronFormula(Int_t idx)
Int_tGetNeurons(Int_t layer)
const char*GetOutputNeuronTitle(Int_t out)
voidTObject::MakeZombie()

Data Members

private:
TTree*fAnalysisTree
TTree*fIOTree
TMultiLayerPerceptron*fNetwork

Class Charts

Inheritance Inherited Members Includes Libraries
Class Charts

Function documentation

~TMLPAnalyzer()
 Destructor
Int_t GetLayers()
 Returns the number of layers.
Int_t GetNeurons(Int_t layer)
 Returns the number of neurons in given layer.
TString GetNeuronFormula(Int_t idx)
 Returns the formula used as input for neuron (idx) in
 the first layer.
const char* GetInputNeuronTitle(Int_t in)
 Returns the name of any neuron from the input layer
const char* GetOutputNeuronTitle(Int_t out)
 Returns the name of any neuron from the output layer
void CheckNetwork()
 Gives some information about the network in the terminal.
void GatherInformations()
 Collect informations about what is usefull in the network.
 This method has to be called first when analyzing a network.
 Fills the two analysis trees.
void DrawDInput(Int_t i)
 Draws the distribution (on the test sample) of the
 impact on the network output of a small variation of
 the ith input.
void DrawDInputs()
 Draws the distribution (on the test sample) of the
 impact on the network output of a small variation of
 each input.
 DrawDInputs() draws something that approximates the distribution of the
 derivative of the NN w.r.t. each input. That quantity is recognized as
 one of the measures to determine key quantities in the network.

 What is done is to vary one input around its nominal value and to see
 how the NN changes. This is done for each entry in the sample and produces
 a distribution.

 What you can learn from that is:
 - is variable a really useful, or is my network insensitive to it ?
 - is there any risk of big systematic ? Is the network extremely sensitive
   to small variations of any of my inputs ?

 As you might understand, this is to be considered with care and can serve
 as input for an "educated guess" when optimizing the network.
void DrawNetwork(Int_t neuron, const char* signal, const char* bg)
 Draws the distribution of the neural network (using ith neuron).
 Two distributions are drawn, for events passing respectively the "signal"
 and "background" cuts. Only the test sample is used.
TProfile* DrawTruthDeviation(Int_t outnode = 0, Option_t* option = "")
 Create a profile of the difference of the MLP output minus the
 true value for a given output node outnode, vs the true value for
 outnode, for all test data events. This method is mainly useful
 when doing regression analysis with the MLP (i.e. not classification,
 but continuous truth values).
 The resulting TProfile histogram is returned.
 It is not drawn if option "goff" is specified.
 Options are passed to TProfile::Draw
THStack* DrawTruthDeviations(Option_t* option = "")
 Creates TProfiles of the difference of the MLP output minus the
 true value vs the true value, one for each output, filled with the
 test data events. This method is mainly useful when doing regression
 analysis with the MLP (i.e. not classification, but continuous truth
 values).
 The returned THStack contains all the TProfiles. It is drawn unless
 the option "goff" is specified.
 Options are passed to TProfile::Draw.
TProfile* DrawTruthDeviationInOut(Int_t innode, Int_t outnode = 0, Option_t* option = "")
 Creates a profile of the difference of the MLP output outnode minus
 the true value of outnode vs the input value innode, for all test
 data events.
 The resulting TProfile histogram is returned.
 It is not drawn if option "goff" is specified.
 Options are passed to TProfile::Draw
THStack* DrawTruthDeviationInsOut(Int_t outnode = 0, Option_t* option = "")
 Creates a profile of the difference of the MLP output outnode minus the
 true value of outnode vs the input value, stacked for all inputs, for
 all test data events.
 The returned THStack contains all the TProfiles. It is drawn unless
 the option "goff" is specified.
 Options are passed to TProfile::Draw.
TMLPAnalyzer(TMultiLayerPerceptron& net)
{}
TMLPAnalyzer(TMultiLayerPerceptron* net)
{}
TTree* GetIOTree() const
{ return fIOTree;}