Logo ROOT  
Reference Guide
 
All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Modules Pages
Loading...
Searching...
No Matches
MnSeedGenerator.cxx
Go to the documentation of this file.
1// @(#)root/minuit2:$Id$
2// Authors: M. Winkler, F. James, L. Moneta, A. Zsenei 2003-2005
3
4/**********************************************************************
5 * *
6 * Copyright (c) 2005 LCG ROOT Math team, CERN/PH-SFT *
7 * *
8 **********************************************************************/
9
11#include "Minuit2/MinimumSeed.h"
12#include "Minuit2/MnFcn.h"
19#include "Minuit2/MnMatrix.h"
26#include "Minuit2/MnStrategy.h"
27#include "Minuit2/MnHesse.h"
33#include "Minuit2/MnPrint.h"
34
35#include <cmath>
36
37namespace ROOT {
38
39namespace Minuit2 {
40
43{
44
45 MnPrint print("MnSeedGenerator");
46
47 // find seed (initial minimization point) using the calculated gradient
48 const unsigned int n = st.VariableParameters();
49 const MnMachinePrecision &prec = st.Precision();
50
51 print.Info("Computing seed using NumericalGradient calculator");
52
53 print.Debug(n, "free parameters, FCN pointer", &fcn);
54
55 // initial starting values
57 for (unsigned int i = 0; i < n; i++)
58 x(i) = st.IntParameters()[i];
59 double fcnmin = fcn(x);
60
64 double dcovar = 1.;
65 if (st.HasCovariance()) {
66 for (unsigned int i = 0; i < n; i++)
67 for (unsigned int j = i; j < n; j++)
68 mat(i, j) = st.IntCovariance()(i, j);
69 dcovar = 0.;
70 } else {
71 for (unsigned int i = 0; i < n; i++)
72 // if G2 is small better using an arbitrary value (e.g. 1)
73 mat(i, i) = std::fabs(dgrad.G2()(i)) > prec.Eps() ? 1. / dgrad.G2()(i) : 1.0;
74 }
76
77 double edm = VariableMetricEDMEstimator().Estimate(dgrad, err);
78 MinimumState state(pa, err, dgrad, edm, fcn.NumOfCalls());
79
80 print.Info("Initial state:", MnPrint::Oneline(state));
81
83 if (ng2ls.HasNegativeG2(dgrad, prec)) {
84 print.Debug("Negative G2 Found", "\n point:", x, "\n grad :", dgrad.Grad(), "\n g2 :", dgrad.G2());
85
86 state = ng2ls(fcn, state, gc, prec);
87
88 print.Info("Negative G2 found - new state:", state);
89 }
90
91 if (stra.Strategy() == 2 && !st.HasCovariance()) {
92 // calculate full 2nd derivative
93
94 print.Debug("calling MnHesse");
95
96 MinimumState tmp = MnHesse(stra)(fcn, state, st.Trafo());
97
98 print.Info("run Hesse - Initial seeding state:", tmp);
99
100 return MinimumSeed(tmp, st.Trafo());
101 }
102
103 print.Info("Initial state ",state);
104
105 return MinimumSeed(state, st.Trafo());
106}
107
109 const MnUserParameterState &st, const MnStrategy &stra) const
110{
111 MnPrint print("MnSeedGenerator");
112
113 // check gradient (slow: will require more function evaluations)
114 //if (gc.CheckGradient()) {
115 // //CheckGradient(st,trado,stra,grd)
116 //}
117
118 if (!gc.CanComputeG2()) {
119 print.Info("Using analytical (external) gradient calculator but cannot compute G2 - use then numerical gradient for G2");
121 return this->operator()(fcn, ngc, st, stra);
122 }
123
124
125
126 if (gc.CanComputeHessian())
127 print.Info("Computing seed using analytical (external) gradients and Hessian calculator");
128 else
129 print.Info("Computing seed using analytical (external) gradients and G2 calculator");
130
131
132
133 // find seed (initial point for minimization) using analytical gradient
134 unsigned int n = st.VariableParameters();
135 const MnMachinePrecision &prec = st.Precision();
136
137 // initial starting values
139 for (unsigned int i = 0; i < n; i++)
140 x(i) = st.IntParameters()[i];
141 double fcnmin = fcn(x);
143
144 // compute function gradient
145 FunctionGradient grad = gc(pa);
146 double dcovar = 0;
148 // if we can compute Hessian compute it and use it
149 bool computedHessian = false;
150 if (!grad.HasG2()) {
151 assert(gc.CanComputeHessian());
153 bool ret = gc.Hessian(pa, hmat);
154 if (!ret) {
155 print.Error("Cannot compute G2 and Hessian");
156 assert(true);
157 }
158 // update gradient using G2 from Hessian calculation
160 for (unsigned int i = 0; i < n; i++)
161 g2(i) = hmat(i,i);
162 grad = FunctionGradient(grad.Grad(),g2);
163
164 print.Debug("Computed analytical G2",g2);
165
166 // when Hessian has been computed invert to get covariance
167 // we prefer not using full Hessian in strategy 1 since we need to be sure that
168 // is pos-defined. Uncomment following line if want to have seed with the full Hessian
169 //computedHessian = true;
170 if (computedHessian) {
172 print.Info("Use full Hessian as seed");
173 print.Debug("computed Hessian",hmat);
174 print.Debug("computed Error matrix (H^-1)",mat);
175 }
176 }
177 // do this only when we have not computed the Hessian or always ?
178 if (!computedHessian) {
179 // check if minimum state has covariance - if not use computed G2
180 // should maybe this an option, sometimes is not good to re-use existing covariance
181 if (st.HasCovariance()) {
182 print.Info("Using existing covariance matrix");
183 for (unsigned int i = 0; i < n; i++)
184 for (unsigned int j = i; j < n; j++)
185 mat(i, j) = st.IntCovariance()(i, j);
186 dcovar = 0.;
187 } else {
188 for (unsigned int i = 0; i < n; i++) {
189 // if G2 is very small, better using an arbitrary value (e.g. 1.)
190 mat(i, i) = std::fabs(grad.G2()(i)) > prec.Eps() ? 1. / grad.G2()(i)
191 : 1.0;
192 }
193 dcovar = 1.;
194 }
195 } else {
196 print.Info("Computing seed using full Hessian");
197 }
198
199 MinimumError err(mat, dcovar);
200 double edm = VariableMetricEDMEstimator().Estimate(grad, err);
201
202 if (!grad.HasG2()) {
203 print.Error("Cannot compute seed because G2 is not computed");
204 }
205 MinimumState state(pa, err, grad, edm, fcn.NumOfCalls());
207 if (ng2ls.HasNegativeG2(grad, prec)) {
208 // do a negative line search - can use current gradient calculator
209 //Numerical2PGradientCalculator ngc(fcn, st.Trafo(), stra);
210 state = ng2ls(fcn, state, gc, prec);
211 }
212
213 // compute Hessian above will not have posdef check as it is done if we call MnHesse
214 if (stra.Strategy() == 2 && !st.HasCovariance() && !computedHessian) {
215 // can calculate full 2nd derivative
216 MinimumState tmpState = MnHesse(stra)(fcn, state, st.Trafo());
217 print.Info("Compute full Hessian: Initial seeding state is ",tmpState);
218 return MinimumSeed(tmpState, st.Trafo());
219 }
220
221 print.Info("Initial seeding state ",state);
222
223 return MinimumSeed(state, st.Trafo());
224}
225#if 0
226bool CheckGradient(MinimumState & st, MnUserTransformation & trafo, MnStrategy & stra)
227{
228
229 const MinimumParameters & pa = st.Parameters();
230 const FunctionGradient & grd = st.FunctionGradient();
231
232 // I think one should use Numerical2PGradientCalculator
233 // since step sizes and G2 of initial gradient are wrong
235 FunctionGradient tmp = igc(pa);
236 // should also use G2 from grd (in case Analyticalgradient can compute Hessian ?)
237 FunctionGradient dgrad(grd.Grad(), tmp.G2(), tmp.Gstep());
238
239 // do check computing gradient with HessianGradientCalculator which refines the gradient given an initial one
240 bool good = true;
242 std::pair<FunctionGradient, MnAlgebraicVector> hgrd = hgc.DeltaGradient(pa, dgrad);
243 for (unsigned int i = 0; i < n; i++) {
244 if (std::fabs(hgrd.first.Grad()(i) - grd.Grad()(i)) > hgrd.second(i)) {
245 int externalParameterIndex = trafo.ExtOfInt(i);
246 const char *parameter_name = trafo.Name(externalParameterIndex);
247 print.Warn("Gradient discrepancy of external Parameter too large:"
248 "parameter_name =",
249 parameter_name, "externalParameterIndex =", externalParameterIndex, "internal =", i);
250 good = false;
251 }
252 }
253 if (!good) {
254 print.Error("Minuit does not accept user specified Gradient. To force acceptance, override 'virtual bool "
255 "CheckGradient() const' of FCNGradientBase.h in the derived class.");
256
257 assert(good);
258 }
259 return good
260}
261#endif
262
263} // namespace Minuit2
264
265} // namespace ROOT
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void gc
const MnAlgebraicVector & Grad() const
const MnAlgebraicVector & G2() const
interface class for gradient calculators
HessianGradientCalculator: class to calculate Gradient for Hessian.
Class to calculate an initial estimate of the gradient.
Class describing a symmetric matrix of size n.
Definition LASymMatrix.h:45
MinimumError keeps the inv.
static MnAlgebraicSymMatrix InvertMatrix(const MnAlgebraicSymMatrix &matrix, int &ifail)
MinimumState keeps the information (position, Gradient, 2nd deriv, etc) after one minimization step (...
Wrapper class to FCNBase interface used internally by Minuit.
Definition MnFcn.h:30
API class for calculating the numerical covariance matrix (== 2x Inverse Hessian == 2x Inverse 2nd de...
Definition MnHesse.h:40
Sets the relative floating point (double) arithmetic precision.
void Debug(const Ts &... args)
Definition MnPrint.h:147
void Error(const Ts &... args)
Definition MnPrint.h:129
void Info(const Ts &... args)
Definition MnPrint.h:141
MinimumSeed operator()(const MnFcn &, const GradientCalculator &, const MnUserParameterState &, const MnStrategy &) const override
API class for defining four levels of strategies: low (0), medium (1), high (2), very high (>=3); act...
Definition MnStrategy.h:27
class which holds the external user and/or internal Minuit representation of the parameters and error...
class dealing with the transformation between user specified parameters (external) and internal param...
In case that one of the components of the second derivative g2 calculated by the numerical Gradient c...
class performing the numerical gradient calculation
double Estimate(const FunctionGradient &, const MinimumError &) const
Double_t x[n]
Definition legend1.C:17
const Int_t n
Definition legend1.C:16
tbb::task_arena is an alias of tbb::interface7::task_arena, which doesn't allow to forward declare tb...