Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
DavidonErrorUpdator.cxx
Go to the documentation of this file.
1// @(#)root/minuit2:$Id$
2// Authors: M. Winkler, F. James, L. Moneta, A. Zsenei 2003-2005
3
4/**********************************************************************
5 * *
6 * Copyright (c) 2005 LCG ROOT Math team, CERN/PH-SFT *
7 * *
8 **********************************************************************/
9
12#include "Minuit2/LaSum.h"
13#include "Minuit2/LaProd.h"
14#include "Minuit2/MnPrint.h"
15
16namespace ROOT {
17
18namespace Minuit2 {
19
20double inner_product(const LAVector &, const LAVector &);
21double similarity(const LAVector &, const LASymMatrix &);
22double sum_of_elements(const LASymMatrix &);
23
24MinimumError
26{
27
28 // update of the covarianze matrix (Davidon formula, see Tutorial, par. 4.8 pag 26)
29 // in case of delgam > gvg (PHI > 1) use rank one formula
30 // see par 4.10 pag 30
31 // ( Tutorial: https://seal.web.cern.ch/seal/documents/minuit/mntutorial.pdf )
32
33 MnPrint print("DavidonErrorUpdator");
34
35 const MnAlgebraicSymMatrix &v0 = s0.Error().InvHessian();
36 MnAlgebraicVector dx = p1.Vec() - s0.Vec();
37 MnAlgebraicVector dg = g1.Vec() - s0.Gradient().Vec();
38
39 double delgam = inner_product(dx, dg);
40 double gvg = similarity(dg, v0);
41
42 print.Debug("\ndx", dx, "\ndg", dg, "\ndelgam", delgam, "gvg", gvg);
43
44 if (delgam == 0) {
45 print.Warn("delgam = 0 : cannot update - return same matrix (details in info log)");
46 print.Info("Explanation:\n"
47 " The distance from the minimum cannot be estimated, since at two\n"
48 " different points s0 and p1, the function gradient projected onto\n"
49 " the difference of s0 and p1 is zero, where:\n"
50 " * s0: ", s0.Vec(), "\n"
51 " * p1: ", p1.Vec(), "\n"
52 " * gradient at s0: ", s0.Gradient().Vec(), "\n"
53 " * gradient at p1: ", g1.Vec(), "\n"
54 " To understand whether this hints to an issue in the minimized function,\n"
55 " the minimized function can be plotted along points between s0 and p1 to\n"
56 " look for unexpected behavior.");
57 return s0.Error();
58 }
59
60 if (delgam < 0) {
61 print.Warn("delgam < 0 : first derivatives increasing along search line (details in info log)");
62 print.Info("Explanation:\n"
63 " The distance from the minimum cannot be estimated, since the minimized\n"
64 " function seems not to be strictly convex in the space probed by the fit.\n"
65 " That is expected if the starting parameters are e.g. close to a local maximum\n"
66 " of the minimized function. If this function is expected to be fully convex\n"
67 " in the probed range or Minuit is already close to the function minimum, this\n"
68 " may hint to numerical or analytical issues with the minimized function.\n"
69 " This was found by projecting the difference of gradients at two points, s0 and p1,\n"
70 " onto the direction given by the difference of s0 and p1, where:\n"
71 " * s0: ", s0.Vec(), "\n"
72 " * p1: ", p1.Vec(), "\n"
73 " * gradient at s0: ", s0.Gradient().Vec(), "\n"
74 " * gradient at p1: ", g1.Vec(), "\n"
75 " To understand whether this hints to an issue in the minimized function,\n"
76 " the minimized function can be plotted along points between s0 and p1 to\n"
77 " look for unexpected behavior.");
78 }
79
80 if (gvg <= 0) {
81 // since v0 is pos def this gvg can be only = 0 if dg = 0 - should never be here
82 print.Warn("gvg <= 0 : cannot update - return same matrix");
83 return s0.Error();
84 }
85
86 MnAlgebraicVector vg = v0 * dg;
87
88 // use rank 2 formula (Davidon)
89 MnAlgebraicSymMatrix vUpd = Outer_product(dx) / delgam - Outer_product(vg) / gvg;
90
91 if (delgam > gvg) {
92 // use dual formula formula (BFGS)
93 vUpd += gvg * Outer_product(MnAlgebraicVector(dx / delgam - vg / gvg));
94 print.Debug("delgam<gvg : use dual (BFGS) formula");
95 }
96 else {
97 print.Debug("delgam<gvg : use rank 2 Davidon formula");
98 }
99
100 double sum_upd = sum_of_elements(vUpd);
101 vUpd += v0;
102
103 double dcov = 0.5 * (s0.Error().Dcovar() + sum_upd / sum_of_elements(vUpd));
104
105 return MinimumError(vUpd, dcov);
106}
107
108/*
109MinimumError DavidonErrorUpdator::Update(const MinimumState& s0,
110 const MinimumParameters& p1,
111 const FunctionGradient& g1) const {
112
113 const MnAlgebraicSymMatrix& v0 = s0.Error().InvHessian();
114 MnAlgebraicVector dx = p1.Vec() - s0.Vec();
115 MnAlgebraicVector dg = g1.Vec() - s0.Gradient().Vec();
116
117 double delgam = inner_product(dx, dg);
118 double gvg = similarity(dg, v0);
119
120// std::cout<<"delgam= "<<delgam<<" gvg= "<<gvg<<std::endl;
121 MnAlgebraicVector vg = v0*dg;
122// MnAlgebraicSymMatrix vUpd(v0.Nrow());
123
124// MnAlgebraicSymMatrix dd = ( 1./delgam )*outer_product(dx);
125// dd *= ( 1./delgam );
126// MnAlgebraicSymMatrix VggV = ( 1./gvg )*outer_product(vg);
127// VggV *= ( 1./gvg );
128// vUpd = dd - VggV;
129// MnAlgebraicSymMatrix vUpd = ( 1./delgam )*outer_product(dx) - ( 1./gvg )*outer_product(vg);
130 MnAlgebraicSymMatrix vUpd = Outer_product(dx)/delgam - Outer_product(vg)/gvg;
131
132 if(delgam > gvg) {
133// dx *= ( 1./delgam );
134// vg *= ( 1./gvg );
135// MnAlgebraicVector flnu = dx - vg;
136// MnAlgebraicSymMatrix tmp = Outer_product(flnu);
137// tmp *= gvg;
138// vUpd = vUpd + tmp;
139 vUpd += gvg*outer_product(dx/delgam - vg/gvg);
140 }
141
142//
143// MnAlgebraicSymMatrix dd = Outer_product(dx);
144// dd *= ( 1./delgam );
145// MnAlgebraicSymMatrix VggV = Outer_product(vg);
146// VggV *= ( 1./gvg );
147// vUpd = dd - VggV;
148//
149//
150// double phi = delgam/(delgam - gvg);
151
152// MnAlgebraicSymMatrix vUpd(v0.Nrow());
153// if(phi < 0) {
154// // rank-2 Update
155// MnAlgebraicSymMatrix dd = Outer_product(dx);
156// dd *= ( 1./delgam );
157// MnAlgebraicSymMatrix VggV = Outer_product(vg);
158// VggV *= ( 1./gvg );
159// vUpd = dd - VggV;
160// }
161// if(phi > 1) {
162// // rank-1 Update
163// MnAlgebraicVector tmp = dx - vg;
164// vUpd = Outer_product(tmp);
165// vUpd *= ( 1./(delgam - gvg) );
166// }
167//
168
169//
170// if(delgam > gvg) {
171// // rank-1 Update
172// MnAlgebraicVector tmp = dx - vg;
173// vUpd = Outer_product(tmp);
174// vUpd *= ( 1./(delgam - gvg) );
175// } else {
176// // rank-2 Update
177// MnAlgebraicSymMatrix dd = Outer_product(dx);
178// dd *= ( 1./delgam );
179// MnAlgebraicSymMatrix VggV = Outer_productn(vg);
180// VggV *= ( 1./gvg );
181// vUpd = dd - VggV;
182// }
183//
184
185 double sum_upd = sum_of_elements(vUpd);
186 vUpd += v0;
187
188// MnAlgebraicSymMatrix V1 = v0 + vUpd;
189
190 double dcov =
191 0.5*(s0.Error().Dcovar() + sum_upd/sum_of_elements(vUpd));
192
193 return MinimumError(vUpd, dcov);
194}
195*/
196
197} // namespace Minuit2
198
199} // namespace ROOT
#define s0(x)
Definition RSha256.hxx:90
MinimumError Update(const MinimumState &, const MinimumParameters &, const FunctionGradient &) const override
const MnAlgebraicVector & Vec() const
Class describing a symmetric matrix of size n.
Definition LASymMatrix.h:45
MinimumError keeps the inv.
const MnAlgebraicVector & Vec() const
MinimumState keeps the information (position, Gradient, 2nd deriv, etc) after one minimization step (...
void Debug(const Ts &... args)
Definition MnPrint.h:147
void Info(const Ts &... args)
Definition MnPrint.h:141
void Warn(const Ts &... args)
Definition MnPrint.h:135
ABObj< sym, VectorOuterProduct< ABObj< vec, LAVector, double >, double >, double > Outer_product(const ABObj< vec, LAVector, double > &obj)
LAPACK Algebra function specialize the Outer_product function for LAVector;.
LAVector MnAlgebraicVector
Definition MnMatrixfwd.h:22
double sum_of_elements(const LASymMatrix &)
double similarity(const LAVector &, const LASymMatrix &)
double inner_product(const LAVector &, const LAVector &)
tbb::task_arena is an alias of tbb::interface7::task_arena, which doesn't allow to forward declare tb...