Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
MethodTMlpANN.cxx
Go to the documentation of this file.
1// @(#)root/tmva $Id$
2// Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss, Eckhard von Toerne
3/**********************************************************************************
4 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
5 * Package: TMVA *
6 * Class : MethodTMlpANN *
7 * *
8 * *
9 * Description: *
10 * Implementation (see header for description) *
11 * *
12 * Authors (alphabetical): *
13 * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
14 * Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
15 * Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
16 * *
17 * Copyright (c) 2005: *
18 * CERN, Switzerland *
19 * U. of Victoria, Canada *
20 * MPI-K Heidelberg, Germany *
21 * *
22 * Redistribution and use in source and binary forms, with or without *
23 * modification, are permitted according to the terms listed in LICENSE *
24 * (see tmva/doc/LICENSE) *
25 **********************************************************************************/
26
27/*! \class TMVA::MethodTMlpANN
28\ingroup TMVA
29
30This is the TMVA TMultiLayerPerceptron interface class. It provides the
31training and testing the ROOT internal MLP class in the TMVA framework.
32
33Available learning methods:<br>
34
35 - Stochastic
36 - Batch
37 - SteepestDescent
38 - RibierePolak
39 - FletcherReeves
40 - BFGS
41
42See the TMultiLayerPerceptron class description
43for details on this ANN.
44*/
45
46#include "TMVA/MethodTMlpANN.h"
47
48#include "TMVA/Config.h"
49#include "TMVA/Configurable.h"
50#include "TMVA/DataSet.h"
51#include "TMVA/DataSetInfo.h"
52#include "TMVA/IMethod.h"
53#include "TMVA/MethodBase.h"
54#include "TMVA/MsgLogger.h"
55#include "TMVA/Types.h"
56#include "TMVA/VariableInfo.h"
57
59#include "TMVA/Tools.h"
60
61#include "TLeaf.h"
62#include "TEventList.h"
63#include "TROOT.h"
65#include "ThreadLocalStorage.h"
66
67#include <cstdlib>
68#include <iostream>
69#include <fstream>
70
71
72using std::atoi;
73
74// some additional TMlpANN options
76
78
79
80////////////////////////////////////////////////////////////////////////////////
81/// standard constructor
82
84 const TString& methodTitle,
87 TMVA::MethodBase( jobName, Types::kTMlpANN, methodTitle, theData, theOption),
88 fMLP(0),
89 fLocalTrainingTree(0),
90 fNcycles(100),
91 fValidationFraction(0.5),
92 fLearningMethod( "" )
93{
94}
95
96////////////////////////////////////////////////////////////////////////////////
97/// constructor from weight file
98
100 const TString& theWeightFile) :
102 fMLP(0),
103 fLocalTrainingTree(0),
104 fNcycles(100),
105 fValidationFraction(0.5),
106 fLearningMethod( "" )
107{
108}
109
110////////////////////////////////////////////////////////////////////////////////
111/// TMlpANN can handle classification with 2 classes
112
119
120
121////////////////////////////////////////////////////////////////////////////////
122/// default initialisations
123
125{
126}
127
128////////////////////////////////////////////////////////////////////////////////
129/// destructor
130
132{
133 if (fMLP) delete fMLP;
134}
135
136////////////////////////////////////////////////////////////////////////////////
137/// translates options from option string into TMlpANN language
138
140{
141 fHiddenLayer = ":";
142
143 while (layerSpec.Length()>0) {
144 TString sToAdd="";
145 if (layerSpec.First(',')<0) {
147 layerSpec = "";
148 }
149 else {
150 sToAdd = layerSpec(0,layerSpec.First(','));
151 layerSpec = layerSpec(layerSpec.First(',')+1,layerSpec.Length());
152 }
153 int nNodes = 0;
154 if (sToAdd.BeginsWith("N")) { sToAdd.Remove(0,1); nNodes = GetNvar(); }
155 nNodes += atoi(sToAdd);
156 fHiddenLayer = TString::Format( "%s%i:", (const char*)fHiddenLayer, nNodes );
157 }
158
159 // set input vars
160 std::vector<TString>::iterator itrVar = (*fInputVars).begin();
161 std::vector<TString>::iterator itrVarEnd = (*fInputVars).end();
162 fMLPBuildOptions = "";
163 for (; itrVar != itrVarEnd; ++itrVar) {
164 if (EnforceNormalization__) fMLPBuildOptions += "@";
165 TString myVar = *itrVar; ;
166 fMLPBuildOptions += myVar;
167 fMLPBuildOptions += ",";
168 }
169 fMLPBuildOptions.Chop(); // remove last ","
170
171 // prepare final options for MLP kernel
172 fMLPBuildOptions += fHiddenLayer;
173 fMLPBuildOptions += "type";
174
175 Log() << kINFO << "Use " << fNcycles << " training cycles" << Endl;
176 Log() << kINFO << "Use configuration (nodes per hidden layer): " << fHiddenLayer << Endl;
177}
178
179////////////////////////////////////////////////////////////////////////////////
180/// define the options (their key words) that can be set in the option string
181///
182/// know options:
183///
184/// - NCycles `<integer>` Number of training cycles (too many cycles could overtrain the network)
185/// - HiddenLayers `<string>` Layout of the hidden layers (nodes per layer)
186/// * specifications for each hidden layer are separated by comma
187/// * for each layer the number of nodes can be either absolut (simply a number)
188/// or relative to the number of input nodes to the neural net (N)
189/// * there is always a single node in the output layer
190///
191/// example: a net with 6 input nodes and "Hiddenlayers=N-1,N-2" has 6,5,4,1 nodes in the
192/// layers 1,2,3,4, respectively
193
195{
196 DeclareOptionRef( fNcycles = 200, "NCycles", "Number of training cycles" );
197 DeclareOptionRef( fLayerSpec = "N,N-1", "HiddenLayers", "Specification of hidden layer architecture (N stands for number of variables; any integers may also be used)" );
198
199 DeclareOptionRef( fValidationFraction = 0.5, "ValidationFraction",
200 "Fraction of events in training tree used for cross validation" );
201
202 DeclareOptionRef( fLearningMethod = "Stochastic", "LearningMethod", "Learning method" );
203 AddPreDefVal( TString("Stochastic") );
204 AddPreDefVal( TString("Batch") );
205 AddPreDefVal( TString("SteepestDescent") );
206 AddPreDefVal( TString("RibierePolak") );
207 AddPreDefVal( TString("FletcherReeves") );
208 AddPreDefVal( TString("BFGS") );
209}
210
211////////////////////////////////////////////////////////////////////////////////
212/// builds the neural network as specified by the user
213
215{
216 CreateMLPOptions(fLayerSpec);
217
218 if (IgnoreEventsWithNegWeightsInTraining()) {
219 Log() << kFATAL << "Mechanism to ignore events with negative weights in training not available for method"
220 << GetMethodTypeName()
221 << " --> please remove \"IgnoreNegWeightsInTraining\" option from booking string."
222 << Endl;
223 }
224}
225
226////////////////////////////////////////////////////////////////////////////////
227/// calculate the value of the neural net for the current event
228
230{
231 const Event* ev = GetEvent();
232 TTHREAD_TLS_DECL_ARG(Double_t*, d, new Double_t[Data()->GetNVariables()]);
233
234 for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
235 d[ivar] = (Double_t)ev->GetValue(ivar);
236 }
237 Double_t mvaVal = fMLP->Evaluate(0,d);
238
239 // cannot determine error
240 NoErrorCalc(err, errUpper);
241
242 return mvaVal;
243}
244
245////////////////////////////////////////////////////////////////////////////////
246/// performs TMlpANN training
247/// available learning methods:
248///
249/// - TMultiLayerPerceptron::kStochastic
250/// - TMultiLayerPerceptron::kBatch
251/// - TMultiLayerPerceptron::kSteepestDescent
252/// - TMultiLayerPerceptron::kRibierePolak
253/// - TMultiLayerPerceptron::kFletcherReeves
254/// - TMultiLayerPerceptron::kBFGS
255///
256/// TMultiLayerPerceptron wants test and training tree at once
257/// so merge the training and testing trees from the MVA factory first:
258
260{
261 Int_t type;
262 Float_t weight;
263 const Long_t basketsize = 128000;
264 Float_t* vArr = new Float_t[GetNvar()];
265
266 TTree *localTrainingTree = new TTree( "TMLPtrain", "Local training tree for TMlpANN" );
267 localTrainingTree->Branch( "type", &type, "type/I", basketsize );
268 localTrainingTree->Branch( "weight", &weight, "weight/F", basketsize );
269
270 for (UInt_t ivar=0; ivar<GetNvar(); ivar++) {
271 TString myVar = GetInternalVarName(ivar);
272 TString myTyp = TString::Format("Var%02i/F", ivar);
273 localTrainingTree->Branch( myVar.Data(), &vArr[ivar], myTyp.Data(), basketsize );
274 }
275
276 for (UInt_t ievt=0; ievt<Data()->GetNEvents(); ievt++) {
277 const Event *ev = GetEvent(ievt);
278 for (UInt_t i=0; i<GetNvar(); i++) {
279 vArr[i] = ev->GetValue( i );
280 }
281 type = DataInfo().IsSignal( ev ) ? 1 : 0;
282 weight = ev->GetWeight();
283 localTrainingTree->Fill();
284 }
285
286 // These are the event lists for the mlp train method
287 // first events in the tree are for training
288 // the rest for internal testing (cross validation)...
289 // NOTE: the training events are ordered: first part is signal, second part background
290 TString trainList = "Entry$<";
291 trainList += 1.0-fValidationFraction;
292 trainList += "*";
293 trainList += (Int_t)Data()->GetNEvtSigTrain();
294 trainList += " || (Entry$>";
295 trainList += (Int_t)Data()->GetNEvtSigTrain();
296 trainList += " && Entry$<";
297 trainList += (Int_t)(Data()->GetNEvtSigTrain() + (1.0 - fValidationFraction)*Data()->GetNEvtBkgdTrain());
298 trainList += ")";
299 TString testList = TString("!(") + trainList + ")";
300
301 // print the requirements
302 Log() << kHEADER << "Requirement for training events: \"" << trainList << "\"" << Endl;
303 Log() << kINFO << "Requirement for validation events: \"" << testList << "\"" << Endl;
304
305 // localTrainingTree->Print();
306
307 // create NN
308 if (fMLP) { delete fMLP; fMLP = nullptr; }
309 fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(),
311 trainList,
312 testList );
313 fMLP->SetEventWeight( "weight" );
314
315 // set learning method
317
318 fLearningMethod.ToLower();
319 if (fLearningMethod == "stochastic" ) learningMethod = TMultiLayerPerceptron::kStochastic;
320 else if (fLearningMethod == "batch" ) learningMethod = TMultiLayerPerceptron::kBatch;
321 else if (fLearningMethod == "steepestdescent" ) learningMethod = TMultiLayerPerceptron::kSteepestDescent;
322 else if (fLearningMethod == "ribierepolak" ) learningMethod = TMultiLayerPerceptron::kRibierePolak;
323 else if (fLearningMethod == "fletcherreeves" ) learningMethod = TMultiLayerPerceptron::kFletcherReeves;
324 else if (fLearningMethod == "bfgs" ) learningMethod = TMultiLayerPerceptron::kBFGS;
325 else {
326 Log() << kFATAL << "Unknown Learning Method: \"" << fLearningMethod << "\"" << Endl;
327 }
328 fMLP->SetLearningMethod( learningMethod );
329
330 // train NN
331 fMLP->Train(fNcycles, "" ); //"text,update=50" );
332
333 // write weights to File;
334 // this is not nice, but fMLP gets deleted at the end of Train()
335 delete localTrainingTree;
336 delete [] vArr;
337}
338
339////////////////////////////////////////////////////////////////////////////////
340/// write weights to xml file
341
342void TMVA::MethodTMlpANN::AddWeightsXMLTo( void* parent ) const
343{
344 // first the architecture
345 void *wght = gTools().AddChild(parent, "Weights");
346 void* arch = gTools().AddChild( wght, "Architecture" );
347 gTools().AddAttr( arch, "BuildOptions", fMLPBuildOptions.Data() );
348
349 // dump weights first in temporary txt file, read from there into xml
350 const TString tmpfile=GetWeightFileDir()+"/TMlp.nn.weights.temp";
351 fMLP->DumpWeights( tmpfile.Data() );
352 std::ifstream inf( tmpfile.Data() );
353 char temp[256];
354 TString data("");
355 void *ch = nullptr;
356 while (inf.getline(temp,256)) {
357 TString dummy(temp);
358 //std::cout << dummy << std::endl; // remove annoying debug printout with std::cout
359 if (dummy.BeginsWith('#')) {
360 if (ch) gTools().AddRawLine( ch, data.Data() );
361 dummy = dummy.Strip(TString::kLeading, '#');
362 dummy = dummy(0,dummy.First(' '));
363 ch = gTools().AddChild(wght, dummy);
364 data.Resize(0);
365 continue;
366 }
367 data += (dummy + " ");
368 }
369 if (ch) gTools().AddRawLine( ch, data.Data() );
370
371 inf.close();
372}
373
374////////////////////////////////////////////////////////////////////////////////
375/// rebuild temporary textfile from xml weightfile and load this
376/// file into MLP
377
379{
380 void* ch = gTools().GetChild(wghtnode);
381 gTools().ReadAttr( ch, "BuildOptions", fMLPBuildOptions );
382
383 ch = gTools().GetNextChild(ch);
384 const TString fname = GetWeightFileDir()+"/TMlp.nn.weights.temp";
385 std::ofstream fout( fname.Data() );
386 double temp1=0,temp2=0;
387 while (ch) {
388 const char* nodecontent = gTools().GetContent(ch);
389 std::stringstream content(nodecontent);
390 if (strcmp(gTools().GetName(ch),"input")==0) {
391 fout << "#input normalization" << std::endl;
392 while ((content >> temp1) &&(content >> temp2)) {
393 fout << temp1 << " " << temp2 << std::endl;
394 }
395 }
396 if (strcmp(gTools().GetName(ch),"output")==0) {
397 fout << "#output normalization" << std::endl;
398 while ((content >> temp1) &&(content >> temp2)) {
399 fout << temp1 << " " << temp2 << std::endl;
400 }
401 }
402 if (strcmp(gTools().GetName(ch),"neurons")==0) {
403 fout << "#neurons weights" << std::endl;
404 while (content >> temp1) {
405 fout << temp1 << std::endl;
406 }
407 }
408 if (strcmp(gTools().GetName(ch),"synapses")==0) {
409 fout << "#synapses weights" ;
410 while (content >> temp1) {
411 fout << std::endl << temp1 ;
412 }
413 }
414 ch = gTools().GetNextChild(ch);
415 }
416 fout.close();
417
418 // Here we create a dummy tree necessary to create a minimal NN
419 // to be used for testing, evaluation and application
420 TTHREAD_TLS_DECL_ARG(Double_t*, d, new Double_t[Data()->GetNVariables()]);
422
423 gROOT->cd();
424 TTree * dummyTree = new TTree("dummy","Empty dummy tree", 1);
425 for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
426 TString vn = DataInfo().GetVariableInfo(ivar).GetInternalName();
427 TString vt = TString::Format("%s/D", vn.Data());
428 dummyTree->Branch(vn.Data(), d+ivar, vt.Data());
429 }
430 dummyTree->Branch("type", &type, "type/I");
431
432 if (fMLP) { delete fMLP; fMLP = nullptr; }
433 fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(), dummyTree );
434 fMLP->LoadWeights( fname );
435}
436
437////////////////////////////////////////////////////////////////////////////////
438/// read weights from stream
439/// since the MLP can not read from the stream, we
440/// 1st: write the weights to temporary file
441
443{
444 std::ofstream fout( "./TMlp.nn.weights.temp" );
445 fout << istr.rdbuf();
446 fout.close();
447 // 2nd: load the weights from the temporary file into the MLP
448 // the MLP is already build
449 Log() << kINFO << "Load TMLP weights into " << fMLP << Endl;
450
451 Double_t *d = new Double_t[Data()->GetNVariables()] ;
452 Int_t type;
453 gROOT->cd();
454 TTree * dummyTree = new TTree("dummy","Empty dummy tree", 1);
455 for (UInt_t ivar = 0; ivar<Data()->GetNVariables(); ivar++) {
456 TString vn = DataInfo().GetVariableInfo(ivar).GetLabel();
457 TString vt = TString::Format("%s/D", vn.Data());
458 dummyTree->Branch(vn.Data(), d+ivar, vt.Data());
459 }
460 dummyTree->Branch("type", &type, "type/I");
461
462 if (fMLP) { delete fMLP; fMLP = nullptr; }
463 fMLP = new TMultiLayerPerceptron( fMLPBuildOptions.Data(), dummyTree );
464
465 fMLP->LoadWeights( "./TMlp.nn.weights.temp" );
466 // here we can delete the temporary file
467 // how?
468 delete [] d;
469}
470
471////////////////////////////////////////////////////////////////////////////////
472/// create reader class for classifier -> overwrites base class function
473/// create specific class for TMultiLayerPerceptron
474
476{
477 // the default consists of
479 if (theClassFileName == "")
480 classFileName = GetWeightFileDir() + "/" + GetJobName() + "_" + GetMethodName() + ".class";
481 else
483
484 classFileName.ReplaceAll(".class","");
485 Log() << kINFO << "Creating specific (TMultiLayerPerceptron) standalone response class: " << classFileName << Endl;
486 fMLP->Export( classFileName.Data() );
487}
488
489////////////////////////////////////////////////////////////////////////////////
490/// write specific classifier response
491/// nothing to do here - all taken care of by TMultiLayerPerceptron
492
493void TMVA::MethodTMlpANN::MakeClassSpecific( std::ostream& /*fout*/, const TString& /*className*/ ) const
494{
495}
496
497////////////////////////////////////////////////////////////////////////////////
498/// get help message text
499///
500/// typical length of text line:
501/// "|--------------------------------------------------------------|"
502
504{
505 Log() << Endl;
506 Log() << gTools().Color("bold") << "--- Short description:" << gTools().Color("reset") << Endl;
507 Log() << Endl;
508 Log() << "This feed-forward multilayer perceptron neural network is the " << Endl;
509 Log() << "standard implementation distributed with ROOT (class TMultiLayerPerceptron)." << Endl;
510 Log() << Endl;
511 Log() << "Detailed information is available here:" << Endl;
512 if (gConfig().WriteOptionsReference()) {
513 Log() << "<a href=\"https://root.cern/doc/master/classTMultiLayerPerceptron.html\">";
514 Log() << "https://root.cern/doc/master/classTMultiLayerPerceptron.html</a>" << Endl;
515 }
516 else Log() << "https://root.cern/doc/master/classTMultiLayerPerceptron.html" << Endl;
517 Log() << Endl;
518}
#define REGISTER_METHOD(CLASS)
for example
const Bool_t EnforceNormalization__
#define d(i)
Definition RSha256.hxx:102
bool Bool_t
Boolean (0=false, 1=true) (bool)
Definition RtypesCore.h:77
int Int_t
Signed integer 4 bytes (int)
Definition RtypesCore.h:59
long Long_t
Signed long integer 4 bytes (long). Size depends on architecture.
Definition RtypesCore.h:68
float Float_t
Float 4 bytes (float)
Definition RtypesCore.h:71
constexpr Bool_t kFALSE
Definition RtypesCore.h:108
double Double_t
Double 8 bytes.
Definition RtypesCore.h:73
constexpr Bool_t kTRUE
Definition RtypesCore.h:107
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void data
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t type
#define gROOT
Definition TROOT.h:411
const_iterator begin() const
const_iterator end() const
Class that contains all the data information.
Definition DataSetInfo.h:62
Virtual base Class for all MVA method.
Definition MethodBase.h:111
This is the TMVA TMultiLayerPerceptron interface class.
void ReadWeightsFromStream(std::istream &istr) override
read weights from stream since the MLP can not read from the stream, we 1st: write the weights to tem...
void MakeClass(const TString &classFileName=TString("")) const override
create reader class for classifier -> overwrites base class function create specific class for TMulti...
void Train(void) override
performs TMlpANN training available learning methods:
void ReadWeightsFromXML(void *wghtnode) override
rebuild temporary textfile from xml weightfile and load this file into MLP
void AddWeightsXMLTo(void *parent) const override
write weights to xml file
void DeclareOptions() override
define the options (their key words) that can be set in the option string
void CreateMLPOptions(TString)
translates options from option string into TMlpANN language
void ProcessOptions() override
builds the neural network as specified by the user
MethodTMlpANN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="3000:N-1:N-2")
standard constructor
Double_t GetMvaValue(Double_t *err=nullptr, Double_t *errUpper=nullptr) override
calculate the value of the neural net for the current event
void Init(void) override
default initialisations
virtual ~MethodTMlpANN(void)
destructor
Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets) override
TMlpANN can handle classification with 2 classes.
void GetHelpMessage() const override
get help message text
void MakeClassSpecific(std::ostream &, const TString &) const override
write specific classifier response nothing to do here - all taken care of by TMultiLayerPerceptron
Bool_t AddRawLine(void *node, const char *raw)
XML helpers.
Definition Tools.cxx:1190
const TString & Color(const TString &)
human readable color strings
Definition Tools.cxx:828
const char * GetContent(void *node)
XML helpers.
Definition Tools.cxx:1174
void ReadAttr(void *node, const char *, T &value)
read attribute from xml
Definition Tools.h:329
void * GetChild(void *parent, const char *childname=nullptr)
get child node
Definition Tools.cxx:1150
void AddAttr(void *node, const char *, const T &value, Int_t precision=16)
add attribute to xml
Definition Tools.h:347
void * AddChild(void *parent, const char *childname, const char *content=nullptr, bool isRootNode=false)
add child node
Definition Tools.cxx:1124
void * GetNextChild(void *prevchild, const char *childname=nullptr)
XML helpers.
Definition Tools.cxx:1162
Singleton class for Global types used by TMVA.
Definition Types.h:71
@ kClassification
Definition Types.h:127
This class describes a neural network.
Basic string class.
Definition TString.h:138
TSubString Strip(EStripType s=kTrailing, char c=' ') const
Return a substring of self stripped at beginning and/or end.
Definition TString.cxx:1170
Ssiz_t First(char c) const
Find first occurrence of a character c.
Definition TString.cxx:545
@ kLeading
Definition TString.h:284
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
Definition TString.h:631
static TString Format(const char *fmt,...)
Static method which formats a string using a printf style format descriptor and return a TString.
Definition TString.cxx:2384
A TTree represents a columnar dataset.
Definition TTree.h:89
create variable transformations
Config & gConfig()
Tools & gTools()
MsgLogger & Endl(MsgLogger &ml)
Definition MsgLogger.h:148