Logo ROOT  
Reference Guide
MnSeedGenerator.cxx
Go to the documentation of this file.
1// @(#)root/minuit2:$Id$
2// Authors: M. Winkler, F. James, L. Moneta, A. Zsenei 2003-2005
3
4/**********************************************************************
5 * *
6 * Copyright (c) 2005 LCG ROOT Math team, CERN/PH-SFT *
7 * *
8 **********************************************************************/
9
11#include "Minuit2/MinimumSeed.h"
12#include "Minuit2/MnFcn.h"
19#include "Minuit2/MnMatrix.h"
26#include "Minuit2/MnStrategy.h"
27#include "Minuit2/MnHesse.h"
33#include "Minuit2/MnPrint.h"
34
35#include <cmath>
36
37namespace ROOT {
38
39namespace Minuit2 {
40
42operator()(const MnFcn &fcn, const GradientCalculator &gc, const MnUserParameterState &st, const MnStrategy &stra) const
43{
44
45 MnPrint print("MnSeedGenerator");
46
47 // find seed (initial minimization point) using the calculated gradient
48 const unsigned int n = st.VariableParameters();
49 const MnMachinePrecision &prec = st.Precision();
50
51 print.Debug(n, "free parameters, FCN pointer", &fcn);
52
53 // initial starting values
55 for (unsigned int i = 0; i < n; i++)
56 x(i) = st.IntParameters()[i];
57 double fcnmin = fcn(x);
58
59 MinimumParameters pa(x, fcnmin);
60 FunctionGradient dgrad = gc(pa);
62 double dcovar = 1.;
63 if (st.HasCovariance()) {
64 for (unsigned int i = 0; i < n; i++)
65 for (unsigned int j = i; j < n; j++)
66 mat(i, j) = st.IntCovariance()(i, j);
67 dcovar = 0.;
68 } else {
69 for (unsigned int i = 0; i < n; i++)
70 mat(i, i) = (std::fabs(dgrad.G2()(i)) > prec.Eps2() ? 1. / dgrad.G2()(i) : 1.);
71 }
72 MinimumError err(mat, dcovar);
73
74 double edm = VariableMetricEDMEstimator().Estimate(dgrad, err);
75 MinimumState state(pa, err, dgrad, edm, fcn.NumOfCalls());
76
77 print.Info("Initial state:", MnPrint::Oneline(state));
78
80 if (ng2ls.HasNegativeG2(dgrad, prec)) {
81 print.Debug("Negative G2 Found", "\n point:", x, "\n grad :", dgrad.Grad(), "\n g2 :", dgrad.G2());
82
83 state = ng2ls(fcn, state, gc, prec);
84
85 print.Info("Negative G2 found - new state:", state);
86 }
87
88 if (stra.Strategy() == 2 && !st.HasCovariance()) {
89 // calculate full 2nd derivative
90
91 print.Debug("calling MnHesse");
92
93 MinimumState tmp = MnHesse(stra)(fcn, state, st.Trafo());
94
95 print.Info("run Hesse - new state:", tmp);
96
97 return MinimumSeed(tmp, st.Trafo());
98 }
99
100 return MinimumSeed(state, st.Trafo());
101}
102
104 const MnUserParameterState &st, const MnStrategy &stra) const
105{
106 MnPrint print("MnSeedGenerator");
107
108 // find seed (initial point for minimization) using analytical gradient
109 unsigned int n = st.VariableParameters();
110 const MnMachinePrecision &prec = st.Precision();
111
112 // initial starting values
114 for (unsigned int i = 0; i < n; i++)
115 x(i) = st.IntParameters()[i];
116 double fcnmin = fcn(x);
117 MinimumParameters pa(x, fcnmin);
118
119 InitialGradientCalculator igc(fcn, st.Trafo(), stra);
120 FunctionGradient tmp = igc(pa);
121 FunctionGradient grd = gc(pa);
122 FunctionGradient dgrad(grd.Grad(), tmp.G2(), tmp.Gstep());
123
124 if (gc.CheckGradient()) {
125 bool good = true;
126 HessianGradientCalculator hgc(fcn, st.Trafo(), MnStrategy(2));
127 std::pair<FunctionGradient, MnAlgebraicVector> hgrd = hgc.DeltaGradient(pa, dgrad);
128 for (unsigned int i = 0; i < n; i++) {
129 if (std::fabs(hgrd.first.Grad()(i) - grd.Grad()(i)) > hgrd.second(i)) {
130 int externalParameterIndex = st.Trafo().ExtOfInt(i);
131 const char *parameter_name = st.Trafo().Name(externalParameterIndex);
132 print.Warn("Gradient discrepancy of external Parameter too large:"
133 "parameter_name =",
134 parameter_name, "externalParameterIndex =", externalParameterIndex, "internal =", i);
135 good = false;
136 }
137 }
138 if (!good) {
139 print.Error("Minuit does not accept user specified Gradient. To force acceptance, override 'virtual bool "
140 "CheckGradient() const' of FCNGradientBase.h in the derived class.");
141
142 assert(good);
143 }
144 }
145
147 double dcovar = 1.;
148 if (st.HasCovariance()) {
149 for (unsigned int i = 0; i < n; i++)
150 for (unsigned int j = i; j < n; j++)
151 mat(i, j) = st.IntCovariance()(i, j);
152 dcovar = 0.;
153 } else {
154 for (unsigned int i = 0; i < n; i++)
155 mat(i, i) = (std::fabs(dgrad.G2()(i)) > prec.Eps2() ? 1. / dgrad.G2()(i) : 1.);
156 }
157 MinimumError err(mat, dcovar);
158 double edm = VariableMetricEDMEstimator().Estimate(dgrad, err);
159 MinimumState state(pa, err, dgrad, edm, fcn.NumOfCalls());
160
162 if (ng2ls.HasNegativeG2(dgrad, prec)) {
163 Numerical2PGradientCalculator ngc(fcn, st.Trafo(), stra);
164 state = ng2ls(fcn, state, ngc, prec);
165 }
166
167 if (stra.Strategy() == 2 && !st.HasCovariance()) {
168 // calculate full 2nd derivative
169 MinimumState tmpState = MnHesse(stra)(fcn, state, st.Trafo());
170 return MinimumSeed(tmpState, st.Trafo());
171 }
172
173 return MinimumSeed(state, st.Trafo());
174}
175
176} // namespace Minuit2
177
178} // namespace ROOT
const MnAlgebraicVector & Gstep() const
const MnAlgebraicVector & Grad() const
const MnAlgebraicVector & G2() const
interface class for gradient calculators
HessianGradientCalculator: class to calculate Gradient for Hessian.
std::pair< FunctionGradient, MnAlgebraicVector > DeltaGradient(const MinimumParameters &, const FunctionGradient &) const
Class to calculate an initial estimate of the gradient.
Class describing a symmetric matrix of size n.
Definition: LASymMatrix.h:45
MinimumError keeps the inv.
Definition: MinimumError.h:28
MinimumState keeps the information (position, Gradient, 2nd deriv, etc) after one minimization step (...
Definition: MinimumState.h:27
Wrapper class to FCNBase interface used internally by Minuit.
Definition: MnFcn.h:30
unsigned int NumOfCalls() const
Definition: MnFcn.h:39
API class for calculating the numerical covariance matrix (== 2x Inverse Hessian == 2x Inverse 2nd de...
Definition: MnHesse.h:39
Sets the relative floating point (double) arithmetic precision.
double Eps2() const
eps2 returns 2*sqrt(eps)
void Debug(const Ts &... args)
Definition: MnPrint.h:138
void Error(const Ts &... args)
Definition: MnPrint.h:120
void Info(const Ts &... args)
Definition: MnPrint.h:132
void Warn(const Ts &... args)
Definition: MnPrint.h:126
virtual MinimumSeed operator()(const MnFcn &, const GradientCalculator &, const MnUserParameterState &, const MnStrategy &) const
API class for defining three levels of strategies: low (0), medium (1), high (>=2); acts on: Migrad (...
Definition: MnStrategy.h:27
unsigned int Strategy() const
Definition: MnStrategy.h:38
class which holds the external user and/or internal Minuit representation of the parameters and error...
const MnMachinePrecision & Precision() const
const std::vector< double > & IntParameters() const
const MnUserTransformation & Trafo() const
const MnUserCovariance & IntCovariance() const
unsigned int ExtOfInt(unsigned int internal) const
const char * Name(unsigned int) const
In case that one of the components of the second derivative g2 calculated by the numerical Gradient c...
bool HasNegativeG2(const FunctionGradient &, const MnMachinePrecision &) const
class performing the numerical gradient calculation
double Estimate(const FunctionGradient &, const MinimumError &) const
Double_t x[n]
Definition: legend1.C:17
const Int_t n
Definition: legend1.C:16
VecExpr< UnaryOp< Fabs< T >, VecExpr< A, T, D >, T >, T, D > fabs(const VecExpr< A, T, D > &rhs)
tbb::task_arena is an alias of tbb::interface7::task_arena, which doesn't allow to forward declare tb...