43 :
RooAbsReal{
"RooEvaluatorWrapper",
"RooEvaluatorWrapper"},
47 _paramSet(
"paramSet",
"Set of parameters",
this),
53 setData(*
data,
false);
55 _paramSet.add(_evaluator->getParameters());
57 _paramSet.remove(*_paramSet.find(
item.first->GetName()));
63 _evaluator{
other._evaluator},
64 _topNode(
"topNode",
this,
other._topNode),
66 _paramSet(
"paramSet",
"Set of parameters",
this),
67 _rangeName{
other._rangeName},
69 _takeGlobalObservablesFromData{
other._takeGlobalObservablesFromData},
72 _paramSet.add(
other._paramSet);
75RooEvaluatorWrapper::~RooEvaluatorWrapper() =
default;
80 outputSet.add(_evaluator->getParameters());
82 outputSet.remove(*observables,
false,
true);
86 if (_data->getGlobalObservables() && _data->getGlobalObservables()->find(
item.first->GetName())) {
98 if (_takeGlobalObservablesFromData && _data->getGlobalObservables()) {
99 outputSet.replace(*_data->getGlobalObservables());
127 constexpr auto errMsg =
"Error in RooAbsReal::setData(): only resetting with same-structured data is supported.";
133 std::stack<std::vector<double>>{}.swap(_vectorBuffers);
134 bool skipZeroWeights = !_pdf || !_pdf->getAttribute(
"BinnedLikelihoodActive");
136 RooFit::BatchModeDataHelpers::getDataSpans(*_data, _rangeName,
dynamic_cast<RooSimultaneous const *
>(_pdf),
140 throw std::runtime_error(
errMsg);
143 const char *
name =
item.first->GetName();
144 _evaluator->setInput(
name,
item.second,
false);
145 if (_paramSet.find(
name)) {
147 throw std::runtime_error(
errMsg);
162 void gradient(
double *out)
const
165 std::fill(out, out + _params.size(), 0.0);
174 std::vector<std::string>
const &collectedFunctions() {
return _collectedFunctions; }
185 std::map<RooFit::Detail::DataKey, std::span<const double>>
190 using Func =
double (*)(
double *,
double const *,
double const *);
191 using Grad = void (*)(
double *,
double const *,
double const *,
double *);
196 std::size_t
size = 0;
205 std::vector<double> _observables;
206 std::map<RooFit::Detail::DataKey, ObsInfo>
_obsInfos;
207 std::vector<double> _xlArr;
208 std::vector<std::string> _collectedFunctions;
213void replaceAll(std::string &str,
const std::string &from,
const std::string &to)
219 str.replace(
start_pos, from.length(), to);
235 auto found =
spans.find(key);
236 return found !=
spans.
end() ? found->second.size() : -1;
244 ctx.
addResult(param,
"params[" + std::to_string(idx) +
"]");
253 if (
item.second.size == 1) {
254 ctx.addResult(
obsName,
"obs[" + std::to_string(
item.second.idx) +
"]");
264 auto print = [](std::string
const &
msg) {
oocoutI(
nullptr, Fitting) <<
msg << std::endl; };
269 _xlArr = ctx.xlArr();
270 _collectedFunctions = ctx.collectedFunctions();
273std::map<RooFit::Detail::DataKey, std::span<const double>>
278 std::map<RooFit::Detail::DataKey, std::span<const double>>
spans;
286 std::size_t
n =
item.second.size();
288 _observables.reserve(_observables.size() +
n);
289 for (std::size_t i = 0; i <
n; ++i) {
290 _observables.push_back(
item.second[i]);
305void RooFuncWrapper::createGradient()
312 gInterpreter->Declare(
"#include <Math/CladDerivator.h>\n");
318 " clad::gradient(" <<
_funcName <<
", \"params\");\n"
322 auto print = [](std::string
const &
msg) {
oocoutI(
nullptr, Fitting) <<
msg << std::endl; };
331 errorMsg <<
"Function could not be differentiated. See above for details.";
333 throw std::runtime_error(
errorMsg.str().c_str());
339 std::stringstream
ss;
341 ss <<
"static_cast<void (*)(double *, double const *, double const *, double *)>(" <<
gradName <<
");";
347 errorMsg <<
"Function could not be differentiated since ROOT was built without Clad support.";
349 throw std::runtime_error(
errorMsg.str().c_str());
353void RooFuncWrapper::updateGradientVarBuffer()
const
356 return obj->isCategory() ? static_cast<RooAbsCategory *>(obj)->getCurrentIndex()
357 : static_cast<RooAbsReal *>(obj)->getVal();
362void RooFuncWrapper::writeDebugMacro(std::string
const &
filename)
const
368 for (std::string
const &
name : _collectedFunctions) {
373 std::unique_ptr<TInterpreterValue>
v =
gInterpreter->MakeInterpreterValue();
375 std::string s =
v->ToString();
376 for (
int i = 0; i < 2; ++i) {
377 s = s.erase(0, s.find(
"\n") + 1);
384 outFile << R
"(//auto-generated test macro
385#include <RooFit/Detail/MathFuncs.h>
386#include <Math/CladDerivator.h>
388#pragma cling optimize(2)
392void gradient_request() {
402 std::stringstream
decl;
403 decl <<
"std::vector<double> " <<
name <<
" = {";
404 for (std::size_t i = 0; i <
vec.size(); ++i) {
408 if (i <
vec.size() - 1)
421 outFile <<
"// clang-format off\n" << std::endl;
428 outFile <<
"// clang-format on\n" << std::endl;
431// To run as a ROOT macro
435 std::vector<double> gradientVec(parametersVec.size());
437 auto func = [&](std::span<double> params) {
439 << _funcName << R"((params.data(), observablesVec.data(), auxConstantsVec.data());
441 auto grad = [&](std::span<double> params, std::span<double> out) {
443 << _funcName << R"(_grad_0(parametersVec.data(), observablesVec.data(), auxConstantsVec.data(),
447 grad(parametersVec, gradientVec);
449 auto numDiff = [&](int i) {
450 const double eps = 1e-6;
451 std::vector<double> p{parametersVec};
452 p[i] = parametersVec[i] - eps;
453 double funcValDown = func(p);
454 p[i] = parametersVec[i] + eps;
455 double funcValUp = func(p);
456 return (funcValUp - funcValDown) / (2 * eps);
459 for (std::size_t i = 0; i < parametersVec.size(); ++i) {
460 std::cout << i << ":" << std::endl;
461 std::cout << " numr : " << numDiff(i) << std::endl;
462 std::cout << " clad : " << gradientVec[i] << std::endl;
468double RooEvaluatorWrapper::evaluate()
const
477 :
RooFit::EvalContext::OffsetMode::WithOffset);
479 return _evaluator->run()[0];
482void RooEvaluatorWrapper::createFuncWrapper()
486 this->getParameters(_data ? _data->get() : nullptr,
paramSet,
false);
492void RooEvaluatorWrapper::generateGradient()
499void RooEvaluatorWrapper::setUseGeneratedFunctionCode(
bool flag)
506void RooEvaluatorWrapper::gradient(
double *out)
const
511bool RooEvaluatorWrapper::hasGradient()
const
518void RooEvaluatorWrapper::writeDebugMacro(std::string
const &
filename)
const
size_t size(const MatrixT &matrix)
retrieve the size of a square matrix
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void data
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char filename
const_iterator begin() const
const_iterator end() const
Common abstract base class for objects that represent a value and a "shape" in RooFit.
Abstract base class for binned and unbinned datasets.
Abstract interface for all probability density functions.
Abstract base class for objects that represent a real value and implements functionality common to al...
RooArgList is a container object that can hold multiple RooAbsArg objects.
RooArgSet is a container object that can hold multiple RooAbsArg objects.
A class to maintain the context for squashing of RooFit models into code.
void addResult(RooAbsArg const *key, std::string const &value)
A function to save an expression that includes/depends on the result of the input node.
Facilitates simultaneous fitting of multiple PDFs to subsets of a given dataset.
void replaceAll(std::string &inOut, std::string_view what, std::string_view with)
The namespace RooFit contains mostly switches that change the behaviour of functions of PDFs (or othe...
void evaluate(typename Architecture_t::Tensor_t &A, EActivationFunction f)
Apply the given activation function to each value in the given tensor A.