Logo ROOT  
Reference Guide
MemPoolForRooSets.h
Go to the documentation of this file.
1// @(#)root/roofit:$Id$
2// Author: Stephan Hageboeck, CERN, 10/2018
3/*************************************************************************
4 * Copyright (C) 1995-2018, Rene Brun and Fons Rademakers. *
5 * All rights reserved. *
6 * *
7 * For the licensing terms see $ROOTSYS/LICENSE. *
8 * For the list of contributors see $ROOTSYS/README/CREDITS. *
9 *************************************************************************/
10
11/** Memory pool for RooArgSet and RooDataSet.
12 * \class MemPoolForRooSets
13 * \ingroup roofitcore
14 * RooArgSet and RooDataSet were using a mempool that guarantees that allocating,
15 * de-allocating and re-allocating a set does not yield the same pointer. Since
16 * both were using the same logic, the functionality has been put in this class.
17 * This class solves RooFit's static destruction order problems by intentionally leaking
18 * arenas of the mempool that still contain live objects at the end of the program.
19 *
20 * When the set types are compared based on a unique ID instead of their pointer,
21 * one can go back to normal memory management, and this class becomes obsolete.
22 */
23
24#ifndef ROOFIT_ROOFITCORE_SRC_MEMPOOLFORROOSETS_H_
25#define ROOFIT_ROOFITCORE_SRC_MEMPOOLFORROOSETS_H_
26
27#include "TStorage.h"
28
29#include <algorithm>
30#include <array>
31#include <bitset>
32#include <vector>
33
34template <class RooSet_t, std::size_t POOLSIZE>
36
37 struct Arena {
39 : ownedMemory{static_cast<RooSet_t *>(TStorage::ObjectAlloc(2 * POOLSIZE * sizeof(RooSet_t)))},
41 memEnd{memBegin + 2 * POOLSIZE},
42 cycle{{}}
43 {}
44
45 Arena(const Arena &) = delete;
46 Arena(Arena && other)
47 : ownedMemory{other.ownedMemory},
48 memBegin{other.memBegin}, nextItem{other.nextItem}, memEnd{other.memEnd},
49 refCount{other.refCount},
50 totCount{other.totCount},
51 assigned{other.assigned},
52 cycle{{}}
53 {
54 // Needed for unique ownership
55 other.ownedMemory = nullptr;
56 other.refCount = 0;
57 other.totCount = 0;
58 other.assigned = 0;
59 }
60
61 Arena & operator=(const Arena &) = delete;
62 Arena & operator=(Arena && other)
63 {
64 ownedMemory = other.ownedMemory;
65 memBegin = other.memBegin;
66 nextItem = other.nextItem;
67 memEnd = other.memEnd;
68 refCount = other.refCount;
69 totCount = other.totCount;
70 assigned = other.assigned;
71
72 other.ownedMemory = nullptr;
73 other.refCount = 0;
74 other.totCount = 0;
75 other.assigned = 0;
76
77 return *this;
78 }
79
80 // If there is any user left, the arena shouldn't be deleted.
81 // If this happens, nevertheless, one has an order of destruction problem.
83 {
84 if (!ownedMemory) return;
85
86 if (refCount != 0) {
87 std::cerr << __FILE__ << ":" << __LINE__ << "Deleting arena " << ownedMemory << " with use count " << refCount
88 << std::endl;
89 assert(false);
90 }
91
92 ::operator delete(ownedMemory);
93 }
94
95
96 bool inPool(const RooSet_t * const ptr) const {
97 return memBegin <= ptr && ptr < memEnd;
98 }
99
100 bool inPool(const void * const ptr) const
101 {
102 return inPool(static_cast<const RooSet_t * const>(ptr));
103 }
104
105 bool hasSpace() const {
106 return totCount < POOLSIZE * sizeof(RooSet_t) && refCount < POOLSIZE;
107 }
108 bool empty() const { return refCount == 0; }
109
110 void tryFree(bool freeNonFull) {
111 if (ownedMemory && empty() && (!hasSpace() || freeNonFull) ) {
112 ::operator delete(ownedMemory);
113 ownedMemory = nullptr;
114 }
115 }
116
117 void * tryAllocate()
118 {
119 if (!hasSpace()) return nullptr;
120
121 for(std::size_t i = 0; i < POOLSIZE; ++i) {
122 if (nextItem == memEnd) {
123 nextItem = ownedMemory;
124 }
125 std::size_t index = (static_cast<RooSet_t *>(nextItem) - memBegin) / 2;
126 nextItem += 2;
127 if(!assigned[index]) {
128 if (cycle[index] == sizeof(RooSet_t)) {
129 continue;
130 }
131 ++refCount;
132 ++totCount;
133 assigned[index] = true;
134 auto ptr = reinterpret_cast<RooSet_t*>(reinterpret_cast<char*>(ownedMemory + 2 * index) + cycle[index]);
135 cycle[index]++;
136 return ptr;
137 }
138 }
139
140 return nullptr;
141 }
142
143 bool tryDeallocate(void * ptr)
144 {
145 if (inPool(ptr)) {
146 --refCount;
147 tryFree(false);
148 const std::size_t index = ( (reinterpret_cast<const char *>(ptr) - reinterpret_cast<const char *>(memBegin)) / 2) / sizeof(RooSet_t);
149#ifndef NDEBUG
150 if (assigned[index] == false) {
151 std::cerr << "Double delete of " << ptr << " at index " << index << " in Arena with refCount " << refCount
152 << ".\n\tArena: |" << memBegin << "\t" << ptr << "\t" << memEnd << "|" << std::endl;
153 throw;
154 }
155#endif
156 assigned[index] = false;
157 return true;
158 } else
159 return false;
160 }
161
162 bool memoryOverlaps(const Arena& other) const {
163 //Need the reinterpret_cast to correctly check for non-overlap on the last byte of the last element
164 return inPool(other.memBegin) || inPool(reinterpret_cast<const char*>(other.memEnd)-1);
165 }
166
167 RooSet_t * ownedMemory;
168 const RooSet_t * memBegin;
169 RooSet_t * nextItem;
170 const RooSet_t * memEnd;
171 std::size_t refCount = 0;
172 std::size_t totCount = 0;
173
174 std::bitset<POOLSIZE> assigned = {};
175 std::array<int, POOLSIZE> cycle = {{}};
176 };
177
178
179 public:
180 /// Create empty mem pool.
181 MemPoolForRooSets() : fArenas{} {}
182
187
188 /// Destructor. Should not be called when RooArgSets or RooDataSets are still alive.
190 {
191 if (!empty()) {
192#ifndef _MSC_VER
193 std::cerr << __PRETTY_FUNCTION__;
194#endif
195 std::cerr << " The mem pool being deleted is not empty. This will lead to crashes."
196 << std::endl;
197 assert(false);
198 }
199 }
200
201 /// Allocate memory for the templated set type. Fails if bytes != sizeof(RooSet_t).
202 void * allocate(std::size_t bytes)
203 {
204 if (bytes != sizeof(RooSet_t))
205 throw std::bad_alloc();
206
207 if (fArenas.empty()) {
208 newArena();
209 }
210
211 void * ptr = fArenas.back().tryAllocate();
212
213 if (ptr == nullptr) {
214 newArena();
215 prune();
216 ptr = fArenas.back().tryAllocate();
217 }
218
219 assert(ptr != nullptr);
220
221 return ptr;
222 }
223
224
225
226 /// Deallocate memory for the templated set type if in pool.
227 /// \return True if element was in pool.
228 bool deallocate(void * ptr)
229 {
230 bool deallocSuccess = false;
231
232 if (std::any_of(fArenas.begin(), fArenas.end(),
233 [ptr](Arena& arena){return arena.tryDeallocate(ptr);})) {
234 deallocSuccess = true;
235 }
236
237 if (fTeardownMode) {
238 // Try pruning after each dealloc because we are tearing down
239 prune();
240 }
241
242 return deallocSuccess;
243 }
244
245
246
247 ////////////////////////////////////////////////////////////////////////////////
248 /// Free memory in arenas that don't have space and no users.
249 /// In fTeardownMode, it will also delete the arena that still has space.
250 ///
251 void prune()
252 {
253 for (auto & arena : fArenas) {
254 arena.tryFree(fTeardownMode);
255 }
256
257 if (fTeardownMode) {
258 fArenas.erase(
259 std::remove_if(fArenas.begin(), fArenas.end(), [](Arena& ar){return ar.ownedMemory == nullptr;}),
260 fArenas.end());
261 }
262 }
263
264
265
266 /// Test if pool is empty.
267 bool empty() const
268 {
269 return std::all_of(fArenas.begin(), fArenas.end(), [](const Arena & ar) { return ar.empty(); });
270 }
271
272
273
274 /// Set pool to teardown mode (at program end).
275 /// Will prune all empty arenas. Non-empty arenas will survive until all contained elements
276 /// are deleted. They may therefore leak if not all elements are destructed.
277 void teardown()
278 {
279 fTeardownMode = true;
280
281 prune();
282 }
283
284
285 private:
286
287 ////////////////////////////////////////////////////////////////////////////////////
288 /// RooFit relies on unique pointers for RooArgSets. Here, memory
289 /// has to be allocated until a completely new chunk of memory is encountered.
290 /// As soon as RooXXXSets can be identified with a unique ID, this becomes obsolete.
291 void newArena() {
292 std::vector<Arena> failedAllocs;
293 while (true) {
294 Arena ar;
295 if (std::none_of(fArenas.begin(), fArenas.end(),
296 [&ar](Arena& other){return ar.memoryOverlaps(other);})) {
297 fArenas.emplace_back(std::move(ar));
298 break;
299 }
300 else {
301 failedAllocs.push_back(std::move(ar));
302 }
303 }
304 }
305
306
307
308 std::vector<Arena> fArenas;
309 bool fTeardownMode{false};
310};
311
312#endif /* ROOFIT_ROOFITCORE_SRC_MEMPOOLFORROOSETS_H_ */
Memory pool for RooArgSet and RooDataSet.
~MemPoolForRooSets()
Destructor. Should not be called when RooArgSets or RooDataSets are still alive.
void prune()
Free memory in arenas that don't have space and no users.
MemPoolForRooSets(const MemPoolForRooSets &)=delete
void newArena()
RooFit relies on unique pointers for RooArgSets.
MemPoolForRooSets & operator=(MemPoolForRooSets &&)=delete
MemPoolForRooSets()
Create empty mem pool.
bool deallocate(void *ptr)
Deallocate memory for the templated set type if in pool.
MemPoolForRooSets & operator=(const MemPoolForRooSets &)=delete
MemPoolForRooSets(MemPoolForRooSets &&)=delete
void teardown()
Set pool to teardown mode (at program end).
std::vector< Arena > fArenas
void * allocate(std::size_t bytes)
Allocate memory for the templated set type. Fails if bytes != sizeof(RooSet_t).
bool empty() const
Test if pool is empty.
Storage manager.
Definition: TStorage.h:33
void tryFree(bool freeNonFull)
Arena & operator=(const Arena &)=delete
bool inPool(const void *const ptr) const
bool memoryOverlaps(const Arena &other) const
Arena & operator=(Arena &&other)
std::array< int, POOLSIZE > cycle
bool inPool(const RooSet_t *const ptr) const
Arena(const Arena &)=delete