Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
RooEvaluatorWrapper.cxx
Go to the documentation of this file.
1/// \cond ROOFIT_INTERNAL
2
3/*
4 * Project: RooFit
5 * Authors:
6 * Jonas Rembser, CERN 2023
7 *
8 * Copyright (c) 2023, CERN
9 *
10 * Redistribution and use in source and binary forms,
11 * with or without modification, are permitted according to the terms
12 * listed in LICENSE (http://roofit.sourceforge.net/license.txt)
13 */
14
15/**
16\internal
17\file RooEvaluatorWrapper.cxx
18\class RooEvaluatorWrapper
19\ingroup Roofitcore
20
21Wraps a RooFit::Evaluator that evaluates a RooAbsReal back into a RooAbsReal.
22**/
23
24#include "RooEvaluatorWrapper.h"
25
26#include <RooAbsData.h>
27#include <RooAbsPdf.h>
28#include <RooMsgService.h>
29#include <RooRealVar.h>
30#include <RooSimultaneous.h>
31
33#include "RooFitImplHelpers.h"
34
35#include <TInterpreter.h>
36
37#include <fstream>
38
39namespace RooFit::Experimental {
40
41RooEvaluatorWrapper::RooEvaluatorWrapper(RooAbsReal &topNode, RooAbsData *data, bool useGPU,
42 std::string const &rangeName, RooAbsPdf const *pdf,
44 : RooAbsReal{"RooEvaluatorWrapper", "RooEvaluatorWrapper"},
45 _evaluator{std::make_unique<RooFit::Evaluator>(topNode, useGPU)},
46 _topNode("topNode", "top node", this, topNode, false, false),
47 _data{data},
48 _paramSet("paramSet", "Set of parameters", this),
49 _rangeName{rangeName},
50 _pdf{pdf},
51 _takeGlobalObservablesFromData{takeGlobalObservablesFromData}
52{
53 if (data) {
54 setData(*data, false);
55 }
56 _paramSet.add(_evaluator->getParameters());
57 for (auto const &item : _dataSpans) {
58 _paramSet.remove(*_paramSet.find(item.first->GetName()));
59 }
60}
61
62RooEvaluatorWrapper::RooEvaluatorWrapper(const RooEvaluatorWrapper &other, const char *name)
64 _evaluator{other._evaluator},
65 _topNode("topNode", this, other._topNode),
66 _data{other._data},
67 _paramSet("paramSet", "Set of parameters", this),
68 _rangeName{other._rangeName},
69 _pdf{other._pdf},
70 _takeGlobalObservablesFromData{other._takeGlobalObservablesFromData},
72{
73 _paramSet.add(other._paramSet);
74}
75
76RooEvaluatorWrapper::~RooEvaluatorWrapper() = default;
77
78bool RooEvaluatorWrapper::getParameters(const RooArgSet *observables, RooArgSet &outputSet,
79 bool stripDisconnected) const
80{
81 outputSet.add(_evaluator->getParameters());
82 if (observables) {
83 outputSet.remove(*observables, /*silent*/ false, /*matchByNameOnly*/ true);
84 }
85 // Exclude the data variables from the parameters which are not global observables
86 for (auto const &item : _dataSpans) {
87 if (_data->getGlobalObservables() && _data->getGlobalObservables()->find(item.first->GetName())) {
88 continue;
89 }
90 RooAbsArg *found = outputSet.find(item.first->GetName());
91 if (found) {
92 outputSet.remove(*found);
93 }
94 }
95 // If we take the global observables as data, we have to return these as
96 // parameters instead of the parameters in the model. Otherwise, the
97 // constant parameters in the fit result that are global observables will
98 // not have the right values.
99 if (_takeGlobalObservablesFromData && _data->getGlobalObservables()) {
100 outputSet.replace(*_data->getGlobalObservables());
101 }
102
103 // The disconnected parameters are stripped away in
104 // RooAbsArg::getParametersHook(), that is only called in the original
105 // RooAbsArg::getParameters() implementation. So he have to call it to
106 // identify disconnected parameters to remove.
107 if (stripDisconnected) {
109 _topNode->getParameters(observables, paramsStripped, true);
111 for (RooAbsArg *param : outputSet) {
112 if (!paramsStripped.find(param->GetName())) {
113 toRemove.add(*param);
114 }
115 }
116 outputSet.remove(toRemove, /*silent*/ false, /*matchByNameOnly*/ true);
117 }
118
119 return false;
120}
121
122/// @brief A wrapper class to store a C++ function of type 'double (*)(double*, double*)'.
123/// The parameters can be accessed as params[<relative position of param in paramSet>] in the function body.
124/// The observables can be accessed as obs[i + j], where i represents the observable position and j
125/// represents the data entry.
126class RooFuncWrapper {
127public:
129
130 bool hasGradient() const { return _hasGradient; }
131 bool hasHessian() const { return _hasHessian; }
132 void gradient(double *out) const
133 {
135 std::fill(out, out + _params.size(), 0.0);
136 _grad(_varBuffer.data(), _observables.data(), _xlArr.data(), out);
137 }
138 void hessian(double *out) const
139 {
141 std::fill(out, out + _params.size() * _params.size(), 0.0);
142 _hessian(_varBuffer.data(), _observables.data(), _xlArr.data(), out);
143 }
144
145 void createGradient();
146 void createHessian();
147
148 void writeDebugMacro(std::string const &) const;
149
150 std::vector<std::string> const &collectedFunctions() { return _collectedFunctions; }
151
152 double evaluate() const
153 {
155 return _func(_varBuffer.data(), _observables.data(), _xlArr.data());
156 }
157
158 void loadData(RooAbsData const &data, RooSimultaneous const *simPdf);
159
160private:
161 void updateGradientVarBuffer() const;
162
164
165 using Func = double (*)(double *, double const *, double const *);
166 using Grad = void (*)(double *, double const *, double const *, double *);
167 using Hessian = void (*)(double *, double const *, double const *, double *);
168
169 RooArgList _params;
170 std::string _funcName;
171 Func _func;
172 Grad _grad;
173 Hessian _hessian;
174 bool _hasGradient = false;
175 bool _hasHessian = false;
176 mutable std::vector<double> _varBuffer;
177 std::vector<double> _observables;
178 std::unordered_map<RooFit::Detail::DataKey, std::size_t> _obsInfos;
179 std::vector<double> _xlArr;
180 std::vector<std::string> _collectedFunctions;
181};
182
183namespace {
184
185void replaceAll(std::string &str, const std::string &from, const std::string &to)
186{
187 if (from.empty())
188 return;
189 size_t start_pos = 0;
190 while ((start_pos = str.find(from, start_pos)) != std::string::npos) {
191 str.replace(start_pos, from.length(), to);
192 start_pos += to.length(); // In case 'to' contains 'from', like replacing 'x' with 'yx'
193 }
194}
195
197{
200
201 std::unordered_set<RooFit::Detail::DataKey> dependsOnData;
202 for (RooAbsArg *arg : dataObs) {
203 dependsOnData.insert(arg);
204 }
205
206 for (RooAbsArg *arg : serverSet) {
207 if (arg->getAttribute("__obs__")) {
208 dependsOnData.insert(arg);
209 }
210 for (RooAbsArg *server : arg->servers()) {
211 if (server->isValueServer(*arg)) {
212 if (dependsOnData.find(server) != dependsOnData.end() && !arg->isReducerNode()) {
213 dependsOnData.insert(arg);
214 break;
215 }
216 }
217 }
218 }
219
220 return dependsOnData;
221}
222
223} // namespace
224
225RooFuncWrapper::RooFuncWrapper(RooAbsReal &obj, const RooAbsData *data, RooSimultaneous const *simPdf,
226 RooArgSet const &paramSet)
227{
228 // Load the observables from the dataset
229 if (data) {
231 }
232
233 // Define the parameters
234 for (auto *param : paramSet) {
235 if (_obsInfos.find(param) == _obsInfos.end()) {
236 _params.add(*param);
237 }
238 }
239 _varBuffer.resize(_params.size());
240
241 // Figure out which part of the computation graph depends on data
242 std::unordered_set<RooFit::Detail::DataKey> dependsOnData;
243 if (data) {
244 dependsOnData = getDependsOnData(obj, *data->get());
245 }
246
247 // Set up the code generation context
249
250 // First update the result variable of params in the compute graph to in[<position>].
251 int idx = 0;
252 for (RooAbsArg *param : _params) {
253 ctx.addResult(param, "params[" + std::to_string(idx) + "]");
254 idx++;
255 }
256
257 for (auto const &item : _obsInfos) {
258 const char *obsName = item.first->GetName();
259 ctx.addResult(obsName, "obs");
260 ctx.addVecObs(obsName, item.second);
261 }
262
263 // Declare the function and create its derivative.
264 auto print = [](std::string const &msg) { oocoutI(nullptr, Fitting) << msg << std::endl; };
265 ROOT::Math::Util::TimingScope timingScope(print, "Function JIT time:");
266 _funcName = ctx.buildFunction(obj, dependsOnData);
267
268 // Make sure the codegen implementations are known to the interpreter
269 gInterpreter->Declare("#include <RooFit/CodegenImpl.h>\n");
270
271 if (!gInterpreter->Declare(ctx.collectedCode().c_str())) {
272 std::stringstream errorMsg;
273 std::string debugFileName = "_codegen_" + _funcName + ".cxx";
274 errorMsg << "Function " << _funcName << " could not be compiled. See above for details. Full code dumped to file "
275 << debugFileName << " for debugging";
276 {
277 std::ofstream outFile;
278 outFile.open(debugFileName.c_str());
279 outFile << ctx.collectedCode();
280 }
281 oocoutE(nullptr, InputArguments) << errorMsg.str() << std::endl;
282 throw std::runtime_error(errorMsg.str().c_str());
283 }
284
285 _func = reinterpret_cast<Func>(gInterpreter->ProcessLine((_funcName + ";").c_str()));
286
287 _xlArr = ctx.xlArr();
288 _collectedFunctions = ctx.collectedFunctions();
289}
290
291void RooFuncWrapper::loadData(RooAbsData const &data, RooSimultaneous const *simPdf)
292{
293 // Extract observables
294 std::stack<std::vector<double>> vectorBuffers; // for data loading
295 auto spans = RooFit::BatchModeDataHelpers::getDataSpans(data, "", simPdf, true, false, vectorBuffers);
296
297 _observables.clear();
298 // The first elements contain the sizes of the packed observable arrays
299 std::size_t total = 0;
300 _observables.reserve(2 * spans.size());
301 std::size_t idx = 0;
302 for (auto const &item : spans) {
303 _obsInfos.emplace(item.first, idx);
304 _observables.push_back(total + 2 * spans.size());
305 _observables.push_back(item.second.size());
306 total += item.second.size();
307 idx += 1;
308 }
309 idx = 0;
310 for (auto const &item : spans) {
311 std::size_t n = item.second.size();
312 _observables.reserve(_observables.size() + n);
313 for (std::size_t i = 0; i < n; ++i) {
314 _observables.push_back(item.second[i]);
315 }
316 idx += n;
317 }
318}
319
320void RooFuncWrapper::createGradient()
321{
322#ifdef ROOFIT_CLAD
323 std::string gradName = _funcName + "_grad_0";
324 std::string requestName = _funcName + "_req";
325
326 // Calculate gradient
327 gInterpreter->Declare("#include <Math/CladDerivator.h>\n");
328 // disable clang-format for making the following code unreadable.
329 // clang-format off
330 std::stringstream requestFuncStrm;
331 requestFuncStrm << "#pragma clad ON\n"
332 "void " << requestName << "() {\n"
333 " clad::gradient(" << _funcName << ", \"params\");\n"
334 "}\n"
335 "#pragma clad OFF";
336 // clang-format on
337 auto print = [](std::string const &msg) { oocoutI(nullptr, Fitting) << msg << std::endl; };
338
339 bool cladSuccess = false;
340 {
341 ROOT::Math::Util::TimingScope timingScope(print, "Gradient generation time:");
342 cladSuccess = !gInterpreter->Declare(requestFuncStrm.str().c_str());
343 }
344 if (cladSuccess) {
345 std::stringstream errorMsg;
346 errorMsg << "Function could not be differentiated. See above for details.";
347 oocoutE(nullptr, InputArguments) << errorMsg.str() << std::endl;
348 throw std::runtime_error(errorMsg.str().c_str());
349 }
350
351 // Clad provides different overloads for the gradient, and we need to
352 // resolve to the one that we want. Without the static_cast, getting the
353 // function pointer would be ambiguous.
354 std::stringstream ss;
355 ROOT::Math::Util::TimingScope timingScope(print, "Gradient IR to machine code time:");
356 ss << "static_cast<void (*)(double *, double const *, double const *, double *)>(" << gradName << ");";
357 _grad = reinterpret_cast<Grad>(gInterpreter->ProcessLine(ss.str().c_str()));
358 _hasGradient = true;
359#else
360 _hasGradient = false;
361 std::stringstream errorMsg;
362 errorMsg << "Function could not be differentiated since ROOT was built without Clad support.";
363 oocoutE(nullptr, InputArguments) << errorMsg.str() << std::endl;
364 throw std::runtime_error(errorMsg.str().c_str());
365#endif
366}
367
368void RooFuncWrapper::createHessian()
369{
370#ifdef ROOFIT_CLAD
371 std::string hessianName = _funcName + "_hessian_0";
372 std::string requestName = _funcName + "_hessian_req";
373
374 // Calculate Hessian
375 gInterpreter->Declare("#include <Math/CladDerivator.h>\n");
376 // disable clang-format for making the following code unreadable.
377 // clang-format off
378 std::stringstream requestFuncStrm;
379 std::string paramsStr =
380 _params.size() == 1 ? "\"params[0]\"" : ("\"params[0:" + std::to_string(_params.size() - 1) + "]\"");
381 requestFuncStrm << "#pragma clad ON\n"
382 "void " << requestName << "() {\n"
383 " clad::hessian(" << _funcName << ", " << paramsStr << ");\n"
384 "}\n"
385 "#pragma clad OFF";
386 // clang-format on
387 auto print = [](std::string const &msg) { oocoutI(nullptr, Fitting) << msg << std::endl; };
388
389 bool cladSuccess = false;
390 {
391 ROOT::Math::Util::TimingScope timingScope(print, "Hessian generation time:");
392 cladSuccess = !gInterpreter->Declare(requestFuncStrm.str().c_str());
393 }
394 if (cladSuccess) {
395 std::stringstream errorMsg;
396 errorMsg << "Function could not be differentiated. See above for details.";
397 oocoutE(nullptr, InputArguments) << errorMsg.str() << std::endl;
398 throw std::runtime_error(errorMsg.str().c_str());
399 }
400
401 // Clad provides different overloads for the Hessian, and we need to
402 // resolve to the one that we want. Without the static_cast, getting the
403 // function pointer would be ambiguous.
404 std::stringstream ss;
405 ROOT::Math::Util::TimingScope timingScope(print, "Hessian IR to machine code time:");
406 ss << "static_cast<void (*)(double *, double const *, double const *, double *)>(" << hessianName << ");";
407 _hessian = reinterpret_cast<Hessian>(gInterpreter->ProcessLine(ss.str().c_str()));
408 _hasHessian = true;
409#else
410 _hasHessian = false;
411 std::stringstream errorMsg;
412 errorMsg << "Function could not be differentiated since ROOT was built without Clad support.";
413 oocoutE(nullptr, InputArguments) << errorMsg.str() << std::endl;
414 throw std::runtime_error(errorMsg.str().c_str());
415#endif
416}
417
418void RooFuncWrapper::updateGradientVarBuffer() const
419{
420 std::transform(_params.begin(), _params.end(), _varBuffer.begin(), [](RooAbsArg *obj) {
421 return obj->isCategory() ? static_cast<RooAbsCategory *>(obj)->getCurrentIndex()
422 : static_cast<RooAbsReal *>(obj)->getVal();
423 });
424}
425
426/// @brief Dumps a macro "filename.C" that can be used to test and debug the generated code and gradient.
427void RooFuncWrapper::writeDebugMacro(std::string const &filename) const
428{
429 std::stringstream allCode;
430 std::set<std::string> seenFunctions;
431
432 // Remove duplicated declared functions
433 for (std::string const &name : _collectedFunctions) {
434 if (seenFunctions.count(name) > 0) {
435 continue;
436 }
437 seenFunctions.insert(name);
438 std::unique_ptr<TInterpreterValue> v = gInterpreter->MakeInterpreterValue();
439 gInterpreter->Evaluate(name.c_str(), *v);
440 std::string s = v->ToString();
441 for (int i = 0; i < 2; ++i) {
442 s = s.erase(0, s.find("\n") + 1);
443 }
444 allCode << s << std::endl;
445 }
446
447 std::ofstream outFile;
448 std::string paramsStr =
449 _params.size() == 1 ? "\"params[0]\"" : ("\"params[0:" + std::to_string(_params.size() - 1) + "]\"");
450 outFile.open(filename + ".C");
451 outFile << R"(//auto-generated test macro
452#include <RooFit/Detail/MathFuncs.h>
453#include <Math/CladDerivator.h>
454
455//#define DO_HESSIAN
456
457)" << allCode.str()
458 << R"(
459#pragma clad ON
460void gradient_request() {
461 clad::gradient()"
462 << _funcName << R"(, "params");
463#ifdef DO_HESSIAN
464 clad::hessian()"
465 << _funcName << ", " << paramsStr << R"();
466#endif
467}
468#pragma clad OFF
469)";
470
472
473 auto writeVector = [&](std::string const &name, std::span<const double> vec) {
474 std::stringstream decl;
475 decl << "std::vector<double> " << name << " = {";
476 for (std::size_t i = 0; i < vec.size(); ++i) {
477 if (i % 10 == 0)
478 decl << "\n ";
479 decl << vec[i];
480 if (i < vec.size() - 1)
481 decl << ", ";
482 }
483 decl << "\n};\n";
484
485 std::string declStr = decl.str();
486
487 replaceAll(declStr, "inf", "std::numeric_limits<double>::infinity()");
488 replaceAll(declStr, "nan", "NAN");
489
490 outFile << declStr;
491 };
492
493 outFile << "// clang-format off\n" << std::endl;
494 writeVector("parametersVec", _varBuffer);
495 outFile << std::endl;
496 writeVector("observablesVec", _observables);
497 outFile << std::endl;
498 writeVector("auxConstantsVec", _xlArr);
499 outFile << std::endl;
500 outFile << "// clang-format on\n" << std::endl;
501
502 outFile << R"(
503// To run as a ROOT macro
504void )" << filename
505 << R"(()
506{
507 const std::size_t n = parametersVec.size();
508
509 std::vector<double> gradientVec(n);
510
511 auto func = [&](std::span<double> params) {
512 return )"
513 << _funcName << R"((params.data(), observablesVec.data(), auxConstantsVec.data());
514 };
515 auto grad = [&](std::span<double> params, std::span<double> out) {
516 return )"
517 << _funcName << R"(_grad_0(parametersVec.data(), observablesVec.data(), auxConstantsVec.data(),
518 out.data());
519 };
520
521 grad(parametersVec, gradientVec);
522
523 auto numDiff = [&](int i) {
524 const double eps = 1e-6;
525 std::vector<double> p{parametersVec};
526 p[i] = parametersVec[i] - eps;
527 double funcValDown = func(p);
528 p[i] = parametersVec[i] + eps;
529 double funcValUp = func(p);
530 return (funcValUp - funcValDown) / (2 * eps);
531 };
532
533 for (std::size_t i = 0; i < parametersVec.size(); ++i) {
534 std::cout << i << ":" << std::endl;
535 std::cout << " numr : " << numDiff(i) << std::endl;
536 std::cout << " clad : " << gradientVec[i] << std::endl;
537 }
538
539#ifdef DO_HESSIAN
540 std::cout << "\n";
541
542 auto hess = [&](std::span<double> params, std::span<double> out) {
543 return )"
544 << _funcName << R"(_hessian_0(params.data(), observablesVec.data(), auxConstantsVec.data(), out.data());
545 };
546
547 std::vector<double> hessianVec(n * n);
548 hess(parametersVec, hessianVec);
549
550 // ---------- Numerical Hessian ----------
551 // Uses central differences:
552 // diag: (f(x+ei)-2f(x)+f(x-ei))/eps^2
553 // offdiag: (f(++ ) - f(+-) - f(-+) + f(--)) / (4 eps^2)
554 auto numHess = [&](std::size_t i, std::size_t j) {
555 const double eps = 1e-5; // often needs to be a bit larger than grad eps
556 std::vector<double> p(parametersVec.begin(), parametersVec.end());
557
558 if (i == j) {
559 const double f0 = func(p);
560
561 p[i] = parametersVec[i] + eps;
562 const double fUp = func(p);
563
564 p[i] = parametersVec[i] - eps;
565 const double fDown = func(p);
566
567 return (fUp - 2.0 * f0 + fDown) / (eps * eps);
568 } else {
569 // f(x_i + eps, x_j + eps)
570 p[i] = parametersVec[i] + eps;
571 p[j] = parametersVec[j] + eps;
572 const double fPP = func(p);
573
574 // f(x_i + eps, x_j - eps)
575 p[i] = parametersVec[i] + eps;
576 p[j] = parametersVec[j] - eps;
577 const double fPM = func(p);
578
579 // f(x_i - eps, x_j + eps)
580 p[i] = parametersVec[i] - eps;
581 p[j] = parametersVec[j] + eps;
582 const double fMP = func(p);
583
584 // f(x_i - eps, x_j - eps)
585 p[i] = parametersVec[i] - eps;
586 p[j] = parametersVec[j] - eps;
587 const double fMM = func(p);
588
589 return (fPP - fPM - fMP + fMM) / (4.0 * eps * eps);
590 }
591 };
592
593 // Compute full numerical Hessian
594 std::vector<double> numHessianVec(n * n);
595 for (std::size_t i = 0; i < n; ++i) {
596 for (std::size_t j = 0; j < n; ++j) {
597 numHessianVec[i + n * j] = numHess(i, j); // keep same layout as your print
598 }
599 }
600
601 // ---------- Compare & print ----------
602 std::cout << "Hessian comparison (clad vs numeric vs diff):\n\n";
603
604 for (std::size_t i = 0; i < n; ++i) {
605 for (std::size_t j = 0; j < n; ++j) {
606 const std::size_t idx = i + n * j; // same indexing you used
607 const double cladH = hessianVec[idx];
608 const double numH = numHessianVec[idx];
609 const double diff = cladH - numH;
610
611 std::cout << "[" << i << "," << j << "] "
612 << "clad=" << cladH << " num=" << numH << " diff=" << diff << "\n";
613 }
614 }
615
616 std::cout << "\nRaw Clad Hessian matrix:\n";
617 for (std::size_t i = 0; i < n; ++i) {
618 for (std::size_t j = 0; j < n; ++j) {
619 std::cout << hessianVec[i + n * j] << " ";
620 }
621 std::cout << "\n";
622 }
623
624 std::cout << "\nRaw Numerical Hessian matrix:\n";
625 for (std::size_t i = 0; i < n; ++i) {
626 for (std::size_t j = 0; j < n; ++j) {
627 std::cout << numHessianVec[i + n * j] << " ";
628 }
629 std::cout << "\n";
630 }
631#endif
632}
633)";
634}
635
636double RooEvaluatorWrapper::evaluate() const
637{
639 return _funcWrapper->evaluate();
640
641 if (!_evaluator)
642 return 0.0;
643
644 _evaluator->setOffsetMode(hideOffset() ? RooFit::EvalContext::OffsetMode::WithoutOffset
645 : RooFit::EvalContext::OffsetMode::WithOffset);
646
647 return _evaluator->run()[0];
648}
649
650bool RooEvaluatorWrapper::setData(RooAbsData &data, bool /*cloneData*/)
651{
652 // To make things easier for RooFit, we only support resetting with
653 // datasets that have the same structure, e.g. the same columns and global
654 // observables. This is anyway the usecase: resetting same-structured data
655 // when iterating over toys.
656 constexpr auto errMsg = "Error in RooAbsReal::setData(): only resetting with same-structured data is supported.";
657
658 _data = &data;
659 bool isInitializing = _paramSet.empty();
660 const std::size_t oldSize = _dataSpans.size();
661
662 std::stack<std::vector<double>>{}.swap(_vectorBuffers);
663 bool skipZeroWeights = !_pdf || !_pdf->getAttribute("BinnedLikelihoodActive");
664 auto simPdf = dynamic_cast<RooSimultaneous const *>(_pdf);
665 _dataSpans = RooFit::BatchModeDataHelpers::getDataSpans(*_data, _rangeName, simPdf, skipZeroWeights,
666 _takeGlobalObservablesFromData, _vectorBuffers);
667 if (!isInitializing && _dataSpans.size() != oldSize) {
668 coutE(DataHandling) << errMsg << std::endl;
669 throw std::runtime_error(errMsg);
670 }
671 for (auto const &item : _dataSpans) {
672 const char *name = item.first->GetName();
673 _evaluator->setInput(name, item.second, false);
674 if (_paramSet.find(name)) {
675 coutE(DataHandling) << errMsg << std::endl;
676 throw std::runtime_error(errMsg);
677 }
678 }
679 if (_funcWrapper) {
680 _funcWrapper->loadData(*_data, simPdf);
681 }
682 return true;
683}
684
685void RooEvaluatorWrapper::createFuncWrapper()
686{
687 // Get the parameters.
689 this->getParameters(_data ? _data->get() : nullptr, paramSet, /*sripDisconnectedParams=*/false);
690
692 std::make_unique<RooFuncWrapper>(*_topNode, _data, dynamic_cast<RooSimultaneous const *>(_pdf), paramSet);
693}
694
695void RooEvaluatorWrapper::generateGradient()
696{
697 if (!_funcWrapper)
699 if (!_funcWrapper->hasGradient())
700 _funcWrapper->createGradient();
701}
702
703void RooEvaluatorWrapper::generateHessian()
704{
705 if (!_funcWrapper)
707 if (!_funcWrapper->hasHessian())
708 _funcWrapper->createHessian();
709}
710
711void RooEvaluatorWrapper::setUseGeneratedFunctionCode(bool flag)
712{
716}
717
718void RooEvaluatorWrapper::gradient(double *out) const
719{
720 _funcWrapper->gradient(out);
721}
722
723void RooEvaluatorWrapper::hessian(double *out) const
724{
725 _funcWrapper->hessian(out);
726}
727
728bool RooEvaluatorWrapper::hasGradient() const
729{
730 return _funcWrapper && _funcWrapper->hasGradient();
731}
732
733bool RooEvaluatorWrapper::hasHessian() const
734{
735 return _funcWrapper && _funcWrapper->hasHessian();
736}
737
738void RooEvaluatorWrapper::writeDebugMacro(std::string const &filename) const
739{
740 if (_funcWrapper)
741 return _funcWrapper->writeDebugMacro(filename);
742}
743
744} // namespace RooFit::Experimental
745
746/// \endcond
#define oocoutE(o, a)
#define oocoutI(o, a)
#define coutE(a)
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
static unsigned int total
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void data
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char filename
char name[80]
Definition TGX11.cxx:110
#define gInterpreter
const_iterator begin() const
const_iterator end() const
Common abstract base class for objects that represent a value and a "shape" in RooFit.
Definition RooAbsArg.h:76
Abstract base class for binned and unbinned datasets.
Definition RooAbsData.h:56
Abstract interface for all probability density functions.
Definition RooAbsPdf.h:32
Abstract base class for objects that represent a real value and implements functionality common to al...
Definition RooAbsReal.h:63
RooArgList is a container object that can hold multiple RooAbsArg objects.
Definition RooArgList.h:22
RooArgSet is a container object that can hold multiple RooAbsArg objects.
Definition RooArgSet.h:24
A class to maintain the context for squashing of RooFit models into code.
void addResult(RooAbsArg const *key, std::string const &value)
A function to save an expression that includes/depends on the result of the input node.
void addVecObs(const char *key, int idx)
Since the squashed code represents all observables as a single flattened array, it is important to ke...
std::string buildFunction(RooAbsArg const &arg, std::unordered_set< RooFit::Detail::DataKey > const &dependsOnData={})
Assemble and return the final code with the return expression and global statements.
std::vector< std::string > const & collectedFunctions()
std::vector< double > const & xlArr()
Facilitates simultaneous fitting of multiple PDFs to subsets of a given dataset.
const Int_t n
Definition legend1.C:16
void replaceAll(std::string &inOut, std::string_view what, std::string_view with)
The namespace RooFit contains mostly switches that change the behaviour of functions of PDFs (or othe...
Definition CodegenImpl.h:71
void getSortedComputationGraph(RooAbsArg const &func, RooArgSet &out)
void evaluate(typename Architecture_t::Tensor_t &A, EActivationFunction f)
Apply the given activation function to each value in the given tensor A.
Definition Functions.h:98