#include "TString.h"
#include <vector>
#include "TTree.h"
#include "Riostream.h"
#include "TRandom3.h"
#include "TH2F.h"
#ifndef ROOT_TMVA_MethodBase
#include "TMVA/MethodBase.h"
#endif
#ifndef ROOT_TMVA_MethodANNBase
#include "TMVA/MethodANNBase.h"
#endif
#ifndef ROOT_TMVA_TNeuron
#include "TMVA/TNeuron.h"
#endif
#ifndef ROOT_TMVA_TSynapse
#include "TMVA/TSynapse.h"
#endif
#ifndef ROOT_TMVA_TActivationChooser
#include "TMVA/TActivationChooser.h"
#endif
#ifndef ROOT_TMVA_Types
#include "TMVA/Types.h"
#endif
#ifndef ROOT_TMVA_Tools
#include "TMVA/Tools.h"
#endif
#ifndef ROOT_TMVA_TNeuronInputChooser
#include "TMVA/TNeuronInputChooser.h"
#endif
#ifndef ROOT_TMVA_Ranking
#include "TMVA/Ranking.h"
#endif
using std::vector;
ClassImp(TMVA::MethodANNBase)
;
TMVA::MethodANNBase::MethodANNBase( TString jobName, TString methodTitle, DataSet& theData,
TString theOption, TDirectory* theTargetDir )
: TMVA::MethodBase( jobName, methodTitle, theData, theOption, theTargetDir )
{
InitANNBase();
DeclareOptions();
}
TMVA::MethodANNBase::MethodANNBase( DataSet & theData,
TString theWeightFile, TDirectory* theTargetDir )
: TMVA::MethodBase( theData, theWeightFile, theTargetDir )
{
InitANNBase();
DeclareOptions();
}
void TMVA::MethodANNBase::DeclareOptions()
{
DeclareOptionRef(fNcycles=3000,"NCycles","Number of training cycles");
DeclareOptionRef(fNormalize=kTRUE, "Normalize", "Normalize input variables");
DeclareOptionRef(fLayerSpec="N-1,N-2","HiddenLayers","Specification of the hidden layers");
DeclareOptionRef(fNeuronType="sigmoid",
"NeuronType","Neuron activation function type");
TActivationChooser aChooser;
vector<TString>* names = aChooser.GetAllActivationNames();
Int_t nTypes = names->size();
for (Int_t i = 0; i < nTypes; i++)
AddPreDefVal(names->at(i));
delete names;
DeclareOptionRef(fNeuronInputType="sum",
"NeuronInputType","Neuron input function type");
TNeuronInputChooser iChooser;
names = iChooser.GetAllNeuronInputNames();
nTypes = names->size();
for (Int_t i = 0; i < nTypes; i++)
AddPreDefVal(names->at(i));
delete names;
}
void TMVA::MethodANNBase::ProcessOptions()
{
MethodBase::ProcessOptions();
vector<Int_t>* layout = ParseLayoutString(fLayerSpec);
BuildNetwork(layout);
}
vector<Int_t>* TMVA::MethodANNBase::ParseLayoutString(TString layerSpec)
{
vector<Int_t>* layout = new vector<Int_t>();
layout->push_back(GetNvar());
while(layerSpec.Length()>0) {
TString sToAdd="";
if(layerSpec.First(',')<0) {
sToAdd = layerSpec;
layerSpec = "";
} else {
sToAdd = layerSpec(0,layerSpec.First(','));
layerSpec = layerSpec(layerSpec.First(',')+1,layerSpec.Length());
}
int nNodes = 0;
if(sToAdd.BeginsWith("n") || sToAdd.BeginsWith("N")) { sToAdd.Remove(0,1); nNodes = GetNvar(); }
nNodes += atoi(sToAdd);
layout->push_back(nNodes);
}
layout->push_back(1);
return layout;
}
void TMVA::MethodANNBase::InitANNBase()
{
fNetwork = NULL;
frgen = NULL;
fActivation = NULL;
fIdentity = NULL;
fInputCalculator = NULL;
fSynapses = NULL;
fEstimatorHistTrain = NULL;
fEstimatorHistTest = NULL;
fInputLayer = NULL;
fOutputNeuron = NULL;
if (fgFIXED_SEED) frgen = new TRandom3(1);
else frgen = new TRandom3(0);
fSynapses = new TObjArray();
}
TMVA::MethodANNBase::~MethodANNBase()
{
DeleteNetwork();
}
void TMVA::MethodANNBase::DeleteNetwork()
{
if (fNetwork != NULL) {
TObjArray *layer;
Int_t numLayers = fNetwork->GetEntriesFast();
for (Int_t i = 0; i < numLayers; i++) {
layer = (TObjArray*)fNetwork->At(i);
DeleteNetworkLayer(layer);
}
delete fNetwork;
}
if (frgen != NULL) delete frgen;
if (fActivation != NULL) delete fActivation;
if (fIdentity != NULL) delete fIdentity;
if (fInputCalculator != NULL) delete fInputCalculator;
if (fSynapses != NULL) delete fSynapses;
fNetwork = NULL;
frgen = NULL;
fActivation = NULL;
fIdentity = NULL;
fInputCalculator = NULL;
fSynapses = NULL;
}
void TMVA::MethodANNBase::DeleteNetworkLayer(TObjArray*& layer)
{
TNeuron* neuron;
Int_t numNeurons = layer->GetEntriesFast();
for (Int_t i = 0; i < numNeurons; i++) {
neuron = (TNeuron*)layer->At(i);
neuron->DeletePreLinks();
delete neuron;
}
delete layer;
}
void TMVA::MethodANNBase::BuildNetwork(vector<Int_t>* layout, vector<Double_t>* weights)
{
fLogger << kINFO << "Building Network" << Endl;
DeleteNetwork();
InitANNBase();
TActivationChooser aChooser;
fActivation = aChooser.CreateActivation(fNeuronType);
fIdentity = aChooser.CreateActivation("linear");
TNeuronInputChooser iChooser;
fInputCalculator = iChooser.CreateNeuronInput(fNeuronInputType);
fNetwork = new TObjArray();
BuildLayers(layout);
fInputLayer = (TObjArray*)fNetwork->At(0);
TObjArray* outputLayer = (TObjArray*)fNetwork->At(fNetwork->GetEntriesFast()-1);
fOutputNeuron = (TNeuron*)outputLayer->At(0);
if (weights == NULL) InitWeights();
else ForceWeights(weights);
}
void TMVA::MethodANNBase::BuildLayers(vector<Int_t>* layout)
{
TObjArray* curLayer;
TObjArray* prevLayer = NULL;
Int_t numLayers = layout->size();
for (Int_t i = 0; i < numLayers; i++) {
curLayer = new TObjArray();
BuildLayer(layout->at(i), curLayer, prevLayer, i, numLayers);
prevLayer = curLayer;
fNetwork->Add(curLayer);
}
for (Int_t i = 0; i < numLayers; i++) {
TObjArray* layer = (TObjArray*)fNetwork->At(i);
Int_t numNeurons = layer->GetEntriesFast();
for (Int_t j = 0; j < numNeurons; j++) {
TNeuron* neuron = (TNeuron*)layer->At(j);
Int_t numSynapses = neuron->NumPostLinks();
for (Int_t k = 0; k < numSynapses; k++) {
TSynapse* synapse = neuron->PostLinkAt(k);
fSynapses->Add(synapse);
}
}
}
}
void TMVA::MethodANNBase::BuildLayer(Int_t numNeurons, TObjArray* curLayer,
TObjArray* prevLayer, Int_t layerIndex,
Int_t numLayers)
{
TNeuron* neuron;
for (Int_t j = 0; j < numNeurons; j++) {
neuron = new TNeuron();
neuron->SetInputCalculator(fInputCalculator);
if (layerIndex == 0) {
neuron->SetActivationEqn(fIdentity);
neuron->SetInputNeuron();
}
else {
if (layerIndex == numLayers-1) {
neuron->SetOutputNeuron();
neuron->SetActivationEqn(fIdentity);
}
else neuron->SetActivationEqn(fActivation);
AddPreLinks(neuron, prevLayer);
}
curLayer->Add(neuron);
}
if (layerIndex != numLayers-1) {
neuron = new TNeuron();
neuron->SetActivationEqn(fIdentity);
neuron->SetBiasNeuron();
neuron->ForceValue(1.0);
curLayer->Add(neuron);
}
}
void TMVA::MethodANNBase::AddPreLinks(TNeuron* neuron, TObjArray* prevLayer)
{
TSynapse* synapse;
int numNeurons = prevLayer->GetEntriesFast();
TNeuron* preNeuron;
for (Int_t i = 0; i < numNeurons; i++) {
preNeuron = (TNeuron*)prevLayer->At(i);
synapse = new TSynapse();
synapse->SetPreNeuron(preNeuron);
synapse->SetPostNeuron(neuron);
preNeuron->AddPostLink(synapse);
neuron->AddPreLink(synapse);
}
}
void TMVA::MethodANNBase::InitWeights()
{
PrintMessage("initializing weights");
Int_t numSynapses = fSynapses->GetEntriesFast();
TSynapse* synapse;
for (Int_t i = 0; i < numSynapses; i++) {
synapse = (TSynapse*)fSynapses->At(i);
synapse->SetWeight(4.0*frgen->Rndm() - 2.0);
}
}
void TMVA::MethodANNBase::ForceWeights(vector<Double_t>* weights)
{
PrintMessage("forcing weights");
Int_t numSynapses = fSynapses->GetEntriesFast();
TSynapse* synapse;
for (Int_t i = 0; i < numSynapses; i++) {
synapse = (TSynapse*)fSynapses->At(i);
synapse->SetWeight(weights->at(i));
}
}
void TMVA::MethodANNBase::ForceNetworkInputs(Int_t ignoreIndex)
{
Double_t x;
TNeuron* neuron;
for (Int_t j = 0; j < GetNvar(); j++) {
if (j == ignoreIndex) x = 0;
else x = (fNormalize) ? GetEventValNormalized(j) : Data().Event().GetVal(j);
neuron = GetInputNeuron(j);
neuron->ForceValue(x);
}
}
void TMVA::MethodANNBase::ForceNetworkCalculations()
{
TObjArray* curLayer;
TNeuron* neuron;
Int_t numLayers = fNetwork->GetEntriesFast();
Int_t numNeurons;
for (Int_t i = 0; i < numLayers; i++) {
curLayer = (TObjArray*)fNetwork->At(i);
numNeurons = curLayer->GetEntriesFast();
for (Int_t j = 0; j < numNeurons; j++) {
neuron = (TNeuron*) curLayer->At(j);
neuron->CalculateValue();
neuron->CalculateActivationValue();
}
}
}
void TMVA::MethodANNBase::PrintMessage(TString message, Bool_t force) const
{
if (Verbose() || Debug() || force) fLogger << kINFO << message << Endl;
}
void TMVA::MethodANNBase::WaitForKeyboard()
{
string dummy;
fLogger << kINFO << "***Type anything to continue (q to quit): ";
getline(cin, dummy);
if (dummy == "q" || dummy == "Q") {
PrintMessage( "quit" );
delete this;
exit(0);
}
}
void TMVA::MethodANNBase::PrintNetwork()
{
if (!Debug()) return;
fLogger << Endl;
PrintMessage( "printing network " );
fLogger << kINFO << "-------------------------------------------------------------------" << Endl;
TObjArray* curLayer;
Int_t numLayers = fNetwork->GetEntriesFast();
for (Int_t i = 0; i < numLayers; i++) {
curLayer = (TObjArray*)fNetwork->At(i);
Int_t numNeurons = curLayer->GetEntriesFast();
fLogger << kINFO << "Layer #" << i << " (" << numNeurons << " neurons):" << Endl;
PrintLayer( curLayer );
}
}
void TMVA::MethodANNBase::PrintLayer(TObjArray* layer)
{
Int_t numNeurons = layer->GetEntriesFast();
TNeuron* neuron;
for (Int_t j = 0; j < numNeurons; j++) {
neuron = (TNeuron*) layer->At(j);
fLogger << kINFO << "\tNeuron #" << j << " (LinksIn: " << neuron->NumPreLinks()
<< " , LinksOut: " << neuron->NumPostLinks() << ")" << Endl;
PrintNeuron( neuron );
}
}
void TMVA::MethodANNBase::PrintNeuron(TNeuron* neuron)
{
fLogger << kINFO
<< "\t\tValue:\t" << neuron->GetValue()
<< "\t\tActivation: " << neuron->GetActivationValue()
<< "\t\tDelta: " << neuron->GetDelta() << Endl;
fLogger << kINFO << "\t\tActivationEquation:\t";
neuron->PrintActivationEqn();
fLogger << kINFO << "\t\tLinksIn:" << Endl;
neuron->PrintPreLinks();
fLogger << kINFO << "\t\tLinksOut:" << Endl;
neuron->PrintPostLinks();
}
Double_t TMVA::MethodANNBase::GetMvaValue()
{
TNeuron* neuron;
TObjArray* inputLayer = (TObjArray*)fNetwork->At(0);
for (Int_t i = 0; i < GetNvar(); i++) {
Double_t x = fNormalize?GetEventValNormalized(i):Data().Event().GetVal(i);
neuron = (TNeuron*)inputLayer->At(i);
neuron->ForceValue(x);
}
ForceNetworkCalculations();
TObjArray* outputLayer = (TObjArray*)fNetwork->At( fNetwork->GetEntriesFast()-1 );
neuron = (TNeuron*)outputLayer->At(0);
return neuron->GetActivationValue();
}
void TMVA::MethodANNBase::WriteWeightsToStream( ostream & o) const
{
Int_t numLayers = fNetwork->GetEntriesFast();
o << "Weights" << endl;
for (Int_t i = 0; i < numLayers; i++) {
TObjArray* layer = (TObjArray*)fNetwork->At(i);
Int_t numNeurons = layer->GetEntriesFast();
for (Int_t j = 0; j < numNeurons; j++) {
TNeuron* neuron = (TNeuron*)layer->At(j);
Int_t numSynapses = neuron->NumPostLinks();
for (Int_t k = 0; k < numSynapses; k++) {
TSynapse* synapse = neuron->PostLinkAt(k);
o << "(layer" << i << ",neuron" << j << ")-(layer"
<< i+1 << ",neuron" << k << "): "
<< synapse->GetWeight() << endl;
}
}
}
}
void TMVA::MethodANNBase::ReadWeightsFromStream( istream & istr)
{
TString dummy;
Double_t weight;
vector<Double_t>* weights = new vector<Double_t>();
istr>> dummy;
while (istr>> dummy >> weight) weights->push_back(weight);
ForceWeights(weights);
delete weights;
}
const TMVA::Ranking* TMVA::MethodANNBase::CreateRanking()
{
fRanking = new Ranking( GetName(), "Sum of weights-squared" );
TNeuron* neuron;
TSynapse* synapse;
Double_t importance, avgVal;
TString varName;
for (Int_t i = 0; i < GetNvar(); i++) {
neuron = GetInputNeuron(i);
Int_t numSynapses = neuron->NumPostLinks();
importance = 0;
varName = GetInputVar(i);
Double_t meanS, meanB, rmsS, rmsB, xmin, xmax;
Statistics( TMVA::Types::kTraining, varName,
meanS, meanB, rmsS, rmsB, xmin, xmax );
avgVal = (meanS + meanB) / 2.0;
if (Normalize()) avgVal = 0.5*(1 + Norm(i, avgVal));
for (Int_t j = 0; j < numSynapses; j++) {
synapse = neuron->PostLinkAt(j);
importance += (synapse->GetWeight() * avgVal)*(synapse->GetWeight() * avgVal);
}
fRanking->AddRank( *new Rank( varName, importance ) );
}
return fRanking;
}
void TMVA::MethodANNBase::WriteMonitoringHistosToFile() const
{
PrintMessage(Form("write special histos to file: %s", BaseDir()->GetPath()), kTRUE);
BaseDir()->mkdir(GetName()+GetMethodName())->cd();
TH2F* hist;
Int_t numLayers = fNetwork->GetEntriesFast();
BaseDir()->cd();
if (fEstimatorHistTrain) fEstimatorHistTrain->Write();
if (fEstimatorHistTest ) fEstimatorHistTest ->Write();
for (Int_t i = 0; i < numLayers-1; i++) {
TObjArray* layer1 = (TObjArray*)fNetwork->At(i);
TObjArray* layer2 = (TObjArray*)fNetwork->At(i+1);
Int_t numNeurons1 = layer1->GetEntriesFast();
Int_t numNeurons2 = layer2->GetEntriesFast();
TString name = Form("weights_hist%i%i", i, i+1);
hist = new TH2F(name + "", name + "",
numNeurons1, 0, numNeurons1, numNeurons2, 0, numNeurons2);
for (Int_t j = 0; j < numNeurons1; j++) {
TNeuron* neuron = (TNeuron*)layer1->At(j);
Int_t numSynapses = neuron->NumPostLinks();
for (Int_t k = 0; k < numSynapses; k++) {
TSynapse* synapse = neuron->PostLinkAt(k);
hist->SetBinContent(j+1, k+1, synapse->GetWeight());
}
}
hist->Write();
delete hist;
}
}
ROOT page - Class index - Class Hierarchy - Top of the page
This page has been automatically generated. If you have any comments or suggestions about the page layout send a mail to ROOT support, or contact the developers with any questions or problems regarding ROOT.