Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
DavidonErrorUpdator.cxx
Go to the documentation of this file.
1// @(#)root/minuit2:$Id$
2// Authors: M. Winkler, F. James, L. Moneta, A. Zsenei 2003-2005
3
4/**********************************************************************
5 * *
6 * Copyright (c) 2005 LCG ROOT Math team, CERN/PH-SFT *
7 * *
8 **********************************************************************/
9
12#include "Minuit2/LaSum.h"
13#include "Minuit2/LaProd.h"
14#include "Minuit2/MnPrint.h"
15
16namespace ROOT {
17
18namespace Minuit2 {
19
20double inner_product(const LAVector &, const LAVector &);
21double similarity(const LAVector &, const LASymMatrix &);
22double sum_of_elements(const LASymMatrix &);
23
24MinimumError
26{
27
28 // update of the covarianze matrix (Davidon formula, see Tutorial, par. 4.8 pag 26)
29 // in case of delgam > gvg (PHI > 1) use rank one formula
30 // see par 4.10 pag 30
31 // ( Tutorial: https://seal.web.cern.ch/seal/documents/minuit/mntutorial.pdf )
32
33 MnPrint print("DavidonErrorUpdator");
34
35 const MnAlgebraicSymMatrix &v0 = s0.Error().InvHessian();
36 MnAlgebraicVector dx = p1.Vec() - s0.Vec();
37 MnAlgebraicVector dg = g1.Vec() - s0.Gradient().Vec();
38
39 double delgam = inner_product(dx, dg);
40 double gvg = similarity(dg, v0);
41
42 print.Debug("\ndx", dx, "\ndg", dg, "\ndelgam", delgam, "gvg", gvg);
43
44 if (delgam == 0) {
45 print.Warn("delgam = 0 : cannot update - return same matrix (details in info log)");
46 print.Info("Explanation:\n"
47 " The distance from the minimum cannot be estimated, since at two\n"
48 " different points s0 and p1, the function gradient projected onto\n"
49 " the difference of s0 and p1 is zero, where:\n"
50 " * s0: ", s0.Vec(), "\n"
51 " * p1: ", p1.Vec(), "\n"
52 " * gradient at s0: ", s0.Gradient().Vec(), "\n"
53 " * gradient at p1: ", g1.Vec(), "\n"
54 " To understand whether this hints to an issue in the minimized function,\n"
55 " the minimized function can be plotted along points between s0 and p1 to\n"
56 " look for unexpected behavior.");
57 return s0.Error();
58 }
59
60 if (delgam < 0) {
61 print.Warn("delgam < 0 : first derivatives increasing along search line (details in info log)");
62 print.Info("Explanation:\n"
63 " The distance from the minimum cannot be estimated, since the minimized\n"
64 " function seems not to be strictly convex in the space probed by the fit.\n"
65 " That is expected if the starting parameters are e.g. close to a local maximum\n"
66 " of the minimized function. If this function is expected to be fully convex\n"
67 " in the probed range or Minuit is already close to the function minimum, this\n"
68 " may hint to numerical or analytical issues with the minimized function.\n"
69 " This was found by projecting the difference of gradients at two points, s0 and p1,\n"
70 " onto the direction given by the difference of s0 and p1, where:\n"
71 " * s0: ", s0.Vec(), "\n"
72 " * p1: ", p1.Vec(), "\n"
73 " * gradient at s0: ", s0.Gradient().Vec(), "\n"
74 " * gradient at p1: ", g1.Vec(), "\n"
75 " To understand whether this hints to an issue in the minimized function,\n"
76 " the minimized function can be plotted along points between s0 and p1 to\n"
77 " look for unexpected behavior.");
78 }
79
80 if (gvg <= 0) {
81 // since v0 is pos def this gvg can be only = 0 if dg = 0 - should never be here
82 print.Warn("gvg <= 0 : cannot update - return same matrix");
83 return s0.Error();
84 }
85
86 MnAlgebraicVector vg = v0 * dg;
87
88 MnAlgebraicSymMatrix vUpd = Outer_product(dx) / delgam - Outer_product(vg) / gvg;
89
90 if (delgam > gvg) {
91 // use rank 1 formula
92 vUpd += gvg * Outer_product(MnAlgebraicVector(dx / delgam - vg / gvg));
93 }
94
95 double sum_upd = sum_of_elements(vUpd);
96 vUpd += v0;
97
98 double dcov = 0.5 * (s0.Error().Dcovar() + sum_upd / sum_of_elements(vUpd));
99
100 return MinimumError(vUpd, dcov);
101}
102
103/*
104MinimumError DavidonErrorUpdator::Update(const MinimumState& s0,
105 const MinimumParameters& p1,
106 const FunctionGradient& g1) const {
107
108 const MnAlgebraicSymMatrix& v0 = s0.Error().InvHessian();
109 MnAlgebraicVector dx = p1.Vec() - s0.Vec();
110 MnAlgebraicVector dg = g1.Vec() - s0.Gradient().Vec();
111
112 double delgam = inner_product(dx, dg);
113 double gvg = similarity(dg, v0);
114
115// std::cout<<"delgam= "<<delgam<<" gvg= "<<gvg<<std::endl;
116 MnAlgebraicVector vg = v0*dg;
117// MnAlgebraicSymMatrix vUpd(v0.Nrow());
118
119// MnAlgebraicSymMatrix dd = ( 1./delgam )*outer_product(dx);
120// dd *= ( 1./delgam );
121// MnAlgebraicSymMatrix VggV = ( 1./gvg )*outer_product(vg);
122// VggV *= ( 1./gvg );
123// vUpd = dd - VggV;
124// MnAlgebraicSymMatrix vUpd = ( 1./delgam )*outer_product(dx) - ( 1./gvg )*outer_product(vg);
125 MnAlgebraicSymMatrix vUpd = Outer_product(dx)/delgam - Outer_product(vg)/gvg;
126
127 if(delgam > gvg) {
128// dx *= ( 1./delgam );
129// vg *= ( 1./gvg );
130// MnAlgebraicVector flnu = dx - vg;
131// MnAlgebraicSymMatrix tmp = Outer_product(flnu);
132// tmp *= gvg;
133// vUpd = vUpd + tmp;
134 vUpd += gvg*outer_product(dx/delgam - vg/gvg);
135 }
136
137//
138// MnAlgebraicSymMatrix dd = Outer_product(dx);
139// dd *= ( 1./delgam );
140// MnAlgebraicSymMatrix VggV = Outer_product(vg);
141// VggV *= ( 1./gvg );
142// vUpd = dd - VggV;
143//
144//
145// double phi = delgam/(delgam - gvg);
146
147// MnAlgebraicSymMatrix vUpd(v0.Nrow());
148// if(phi < 0) {
149// // rank-2 Update
150// MnAlgebraicSymMatrix dd = Outer_product(dx);
151// dd *= ( 1./delgam );
152// MnAlgebraicSymMatrix VggV = Outer_product(vg);
153// VggV *= ( 1./gvg );
154// vUpd = dd - VggV;
155// }
156// if(phi > 1) {
157// // rank-1 Update
158// MnAlgebraicVector tmp = dx - vg;
159// vUpd = Outer_product(tmp);
160// vUpd *= ( 1./(delgam - gvg) );
161// }
162//
163
164//
165// if(delgam > gvg) {
166// // rank-1 Update
167// MnAlgebraicVector tmp = dx - vg;
168// vUpd = Outer_product(tmp);
169// vUpd *= ( 1./(delgam - gvg) );
170// } else {
171// // rank-2 Update
172// MnAlgebraicSymMatrix dd = Outer_product(dx);
173// dd *= ( 1./delgam );
174// MnAlgebraicSymMatrix VggV = Outer_productn(vg);
175// VggV *= ( 1./gvg );
176// vUpd = dd - VggV;
177// }
178//
179
180 double sum_upd = sum_of_elements(vUpd);
181 vUpd += v0;
182
183// MnAlgebraicSymMatrix V1 = v0 + vUpd;
184
185 double dcov =
186 0.5*(s0.Error().Dcovar() + sum_upd/sum_of_elements(vUpd));
187
188 return MinimumError(vUpd, dcov);
189}
190*/
191
192} // namespace Minuit2
193
194} // namespace ROOT
#define s0(x)
Definition RSha256.hxx:90
MinimumError Update(const MinimumState &, const MinimumParameters &, const FunctionGradient &) const override
const MnAlgebraicVector & Vec() const
Class describing a symmetric matrix of size n.
Definition LASymMatrix.h:45
MinimumError keeps the inv.
const MnAlgebraicVector & Vec() const
MinimumState keeps the information (position, Gradient, 2nd deriv, etc) after one minimization step (...
void Debug(const Ts &... args)
Definition MnPrint.h:147
void Info(const Ts &... args)
Definition MnPrint.h:141
void Warn(const Ts &... args)
Definition MnPrint.h:135
ABObj< sym, VectorOuterProduct< ABObj< vec, LAVector, double >, double >, double > Outer_product(const ABObj< vec, LAVector, double > &obj)
LAPACK Algebra function specialize the Outer_product function for LAVector;.
LAVector MnAlgebraicVector
Definition MnMatrixfwd.h:22
double sum_of_elements(const LASymMatrix &)
double similarity(const LAVector &, const LASymMatrix &)
double inner_product(const LAVector &, const LAVector &)
This file contains a specialised ROOT message handler to test for diagnostic in unit tests.