Logo ROOT  
Reference Guide
MnSeedGenerator.cxx
Go to the documentation of this file.
1// @(#)root/minuit2:$Id$
2// Authors: M. Winkler, F. James, L. Moneta, A. Zsenei 2003-2005
3
4/**********************************************************************
5 * *
6 * Copyright (c) 2005 LCG ROOT Math team, CERN/PH-SFT *
7 * *
8 **********************************************************************/
9
11#include "Minuit2/MinimumSeed.h"
12#include "Minuit2/MnFcn.h"
19#include "Minuit2/MnMatrix.h"
26#include "Minuit2/MnStrategy.h"
27#include "Minuit2/MnHesse.h"
33#include "Minuit2/MnPrint.h"
34
35#include <cmath>
36
37namespace ROOT {
38
39namespace Minuit2 {
40
42operator()(const MnFcn &fcn, const GradientCalculator &gc, const MnUserParameterState &st, const MnStrategy &stra) const
43{
44
45 MnPrint print("MnSeedGenerator");
46
47 // find seed (initial minimization point) using the calculated gradient
48 const unsigned int n = st.VariableParameters();
49 const MnMachinePrecision &prec = st.Precision();
50
51 print.Info("Computing seed using NumericalGradient calculator");
52
53 print.Debug(n, "free parameters, FCN pointer", &fcn);
54
55 // initial starting values
57 for (unsigned int i = 0; i < n; i++)
58 x(i) = st.IntParameters()[i];
59 double fcnmin = fcn(x);
60
61 MinimumParameters pa(x, fcnmin);
62 FunctionGradient dgrad = gc(pa);
64 double dcovar = 1.;
65 if (st.HasCovariance()) {
66 for (unsigned int i = 0; i < n; i++)
67 for (unsigned int j = i; j < n; j++)
68 mat(i, j) = st.IntCovariance()(i, j);
69 dcovar = 0.;
70 } else {
71 for (unsigned int i = 0; i < n; i++)
72 mat(i, i) = (std::fabs(dgrad.G2()(i)) > prec.Eps2() ? 1. / dgrad.G2()(i) :
73 (dgrad.G2()(i) >= 0) ? 1./prec.Eps2() : -1./prec.Eps2());
74 }
75 MinimumError err(mat, dcovar);
76
77 double edm = VariableMetricEDMEstimator().Estimate(dgrad, err);
78 MinimumState state(pa, err, dgrad, edm, fcn.NumOfCalls());
79
80 print.Info("Initial state:", MnPrint::Oneline(state));
81
83 if (ng2ls.HasNegativeG2(dgrad, prec)) {
84 print.Debug("Negative G2 Found", "\n point:", x, "\n grad :", dgrad.Grad(), "\n g2 :", dgrad.G2());
85
86 state = ng2ls(fcn, state, gc, prec);
87
88 print.Info("Negative G2 found - new state:", state);
89 }
90
91 if (stra.Strategy() == 2 && !st.HasCovariance()) {
92 // calculate full 2nd derivative
93
94 print.Debug("calling MnHesse");
95
96 MinimumState tmp = MnHesse(stra)(fcn, state, st.Trafo());
97
98 print.Info("run Hesse - Initial seeding state:", tmp);
99
100 return MinimumSeed(tmp, st.Trafo());
101 }
102
103 print.Info("Initial state ",state);
104
105 return MinimumSeed(state, st.Trafo());
106}
107
109 const MnUserParameterState &st, const MnStrategy &stra) const
110{
111 MnPrint print("MnSeedGenerator");
112
113 // check gradient (slow: will require more function evaluations)
114 //if (gc.CheckGradient()) {
115 // //CheckGradient(st,trado,stra,grd)
116 //}
117
118 if (!gc.CanComputeG2()) {
119 Numerical2PGradientCalculator ngc(fcn, st.Trafo(), stra);
120 return this->operator()(fcn, ngc, st, stra);
121 }
122
123 print.Info("Computing seed using analytical (external) gradients");
124
125 // find seed (initial point for minimization) using analytical gradient
126 unsigned int n = st.VariableParameters();
127 const MnMachinePrecision &prec = st.Precision();
128
129 // initial starting values
131 for (unsigned int i = 0; i < n; i++)
132 x(i) = st.IntParameters()[i];
133 double fcnmin = fcn(x);
134 MinimumParameters pa(x, fcnmin);
135
136 // compute function gradient
137 FunctionGradient grad = gc(pa);
138 double dcovar = 0;
140 // if we can compute Hessian compute it and use it
141 bool computedHessian = false;
142 if (!grad.HasG2()) {
143 assert(gc.CanComputeHessian());
145 bool ret = gc.Hessian(pa, hmat);
146 if (!ret) {
147 print.Error("Cannot compute G2 and Hessian");
148 assert(true);
149 }
150 // update gradient using G2 from Hessian calculation
152 for (unsigned int i = 0; i < n; i++)
153 g2(i) = hmat(i,i);
154 grad = FunctionGradient(grad.Grad(),g2);
155
156 print.Debug("Computed analytical G2",g2);
157
158 // when Hessian has been computed invert to get covariance
159 // we prefer not using full Hessian in strategy 1 since we need to be sure that
160 // is pos-defined. Uncomment following line if want to have seed with the full Hessian
161 //computedHessian = true;
162 if (computedHessian) {
163 mat = MinimumError::InvertMatrix(hmat);
164 print.Info("Use full Hessian as seed");
165 print.Debug("computed Hessian",hmat);
166 print.Debug("computed Error matrix (H^-1)",mat);
167 }
168 }
169 // do this only when we have not computed the Hessian or always ?
170 if (!computedHessian) {
171 // check if minimum state has covariance - if not use computed G2
172 if (st.HasCovariance()) {
173 print.Info("Using existing covariance matrix");
174 for (unsigned int i = 0; i < n; i++)
175 for (unsigned int j = i; j < n; j++)
176 mat(i, j) = st.IntCovariance()(i, j);
177 dcovar = 0.;
178 } else {
179 for (unsigned int i = 0; i < n; i++) {
180 // should not use a cut-off here like 1./prec.Eps()
181 mat(i, i) = (std::fabs(grad.G2()(i)) > prec.Eps2() ? 1. / grad.G2()(i)
182 : (grad.G2()(i) >= 0) ? 1. / prec.Eps2()
183 : -1. / prec.Eps2());
184 }
185 dcovar = 1.;
186 }
187 } else {
188 print.Info("Computing seed using full Hessian");
189 }
190
191 MinimumError err(mat, dcovar);
192 double edm = VariableMetricEDMEstimator().Estimate(grad, err);
193
194 if (!grad.HasG2()) {
195 print.Error("Cannot compute seed because G2 is not computed");
196 }
197 MinimumState state(pa, err, grad, edm, fcn.NumOfCalls());
199 if (ng2ls.HasNegativeG2(grad, prec)) {
200 // do a negative line search - can use current gradient calculator
201 //Numerical2PGradientCalculator ngc(fcn, st.Trafo(), stra);
202 state = ng2ls(fcn, state, gc, prec);
203 }
204
205 // compute Hessian above will not have posdef check as it is done if we call MnHesse
206 if (stra.Strategy() == 2 && !st.HasCovariance() && !computedHessian) {
207 // can calculate full 2nd derivative
208 MinimumState tmpState = MnHesse(stra)(fcn, state, st.Trafo());
209 print.Info("Initial seeding state ",tmpState);
210 return MinimumSeed(tmpState, st.Trafo());
211 }
212
213 print.Info("Initial seeding state ",state);
214
215 return MinimumSeed(state, st.Trafo());
216}
217#if 0
218bool CheckGradient(MinimumState & st, MnUserTransformation & trafo, MnStrategy & stra)
219{
220
221 const MinimumParameters & pa = st.Parameters();
222 const FunctionGradient & grd = st.FunctionGradient();
223
224 // I think one should use Numerical2PGradientCaluclator
225 // since step sizes and G2 of initial gradient are wrong
226 InitialGradientCalculator igc(fcn, trafo, stra);
227 FunctionGradient tmp = igc(pa);
228 // should also use G2 from grd (in case Analyticalgradient can compute Hessian ?)
229 FunctionGradient dgrad(grd.Grad(), tmp.G2(), tmp.Gstep());
230
231 // do check computing gradient with HessianGradientCalculator which refines the gradient given an initial one
232 bool good = true;
233 HessianGradientCalculator hgc(fcn, trafo, MnStrategy(2));
234 std::pair<FunctionGradient, MnAlgebraicVector> hgrd = hgc.DeltaGradient(pa, dgrad);
235 for (unsigned int i = 0; i < n; i++) {
236 if (std::fabs(hgrd.first.Grad()(i) - grd.Grad()(i)) > hgrd.second(i)) {
237 int externalParameterIndex = trafo.ExtOfInt(i);
238 const char *parameter_name = trafo.Name(externalParameterIndex);
239 print.Warn("Gradient discrepancy of external Parameter too large:"
240 "parameter_name =",
241 parameter_name, "externalParameterIndex =", externalParameterIndex, "internal =", i);
242 good = false;
243 }
244 }
245 if (!good) {
246 print.Error("Minuit does not accept user specified Gradient. To force acceptance, override 'virtual bool "
247 "CheckGradient() const' of FCNGradientBase.h in the derived class.");
248
249 assert(good);
250 }
251 return good
252}
253#endif
254
255} // namespace Minuit2
256
257} // namespace ROOT
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void gc
const MnAlgebraicVector & Gstep() const
const MnAlgebraicVector & Grad() const
const MnAlgebraicVector & G2() const
interface class for gradient calculators
HessianGradientCalculator: class to calculate Gradient for Hessian.
Class to calculate an initial estimate of the gradient.
Class describing a symmetric matrix of size n.
Definition: LASymMatrix.h:45
MinimumError keeps the inv.
Definition: MinimumError.h:28
static MnAlgebraicSymMatrix InvertMatrix(const MnAlgebraicSymMatrix &matrix, int &ifail)
Definition: MinimumError.h:58
MinimumState keeps the information (position, Gradient, 2nd deriv, etc) after one minimization step (...
Definition: MinimumState.h:27
const MinimumParameters & Parameters() const
Definition: MinimumState.h:57
Wrapper class to FCNBase interface used internally by Minuit.
Definition: MnFcn.h:30
unsigned int NumOfCalls() const
Definition: MnFcn.h:39
API class for calculating the numerical covariance matrix (== 2x Inverse Hessian == 2x Inverse 2nd de...
Definition: MnHesse.h:40
Sets the relative floating point (double) arithmetic precision.
double Eps2() const
eps2 returns 2*sqrt(eps)
void Debug(const Ts &... args)
Definition: MnPrint.h:147
void Error(const Ts &... args)
Definition: MnPrint.h:129
void Info(const Ts &... args)
Definition: MnPrint.h:141
MinimumSeed operator()(const MnFcn &, const GradientCalculator &, const MnUserParameterState &, const MnStrategy &) const override
API class for defining three levels of strategies: low (0), medium (1), high (>=2); acts on: Migrad (...
Definition: MnStrategy.h:27
unsigned int Strategy() const
Definition: MnStrategy.h:38
class which holds the external user and/or internal Minuit representation of the parameters and error...
const MnMachinePrecision & Precision() const
const std::vector< double > & IntParameters() const
const MnUserTransformation & Trafo() const
const MnUserCovariance & IntCovariance() const
class dealing with the transformation between user specified parameters (external) and internal param...
unsigned int ExtOfInt(unsigned int internal) const
const char * Name(unsigned int) const
In case that one of the components of the second derivative g2 calculated by the numerical Gradient c...
bool HasNegativeG2(const FunctionGradient &, const MnMachinePrecision &) const
class performing the numerical gradient calculation
double Estimate(const FunctionGradient &, const MinimumError &) const
Double_t x[n]
Definition: legend1.C:17
const Int_t n
Definition: legend1.C:16
VecExpr< UnaryOp< Fabs< T >, VecExpr< A, T, D >, T >, T, D > fabs(const VecExpr< A, T, D > &rhs)
This file contains a specialised ROOT message handler to test for diagnostic in unit tests.