Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
RFieldSequenceContainer.cxx
Go to the documentation of this file.
1/// \file RFieldSequenceContainer.cxx
2/// \ingroup NTuple ROOT7
3/// \author Jonas Hahnfeld <jonas.hahnfeld@cern.ch>
4/// \date 2024-11-19
5/// \warning This is part of the ROOT 7 prototype! It will change without notice. It might trigger earthquakes. Feedback
6/// is welcome!
7
8#include <ROOT/RField.hxx>
9#include <ROOT/RFieldBase.hxx>
11
12#include <cstdlib> // for malloc, free
13#include <memory>
14#include <new> // hardware_destructive_interference_size
15
16ROOT::Experimental::RArrayField::RArrayField(std::string_view fieldName, std::unique_ptr<RFieldBase> itemField,
17 std::size_t arrayLength)
18 : ROOT::Experimental::RFieldBase(fieldName,
19 "std::array<" + itemField->GetTypeName() + "," + std::to_string(arrayLength) + ">",
20 ENTupleStructure::kLeaf, false /* isSimple */, arrayLength),
21 fItemSize(itemField->GetValueSize()),
22 fArrayLength(arrayLength)
23{
24 fTraits |= itemField->GetTraits() & ~kTraitMappable;
25 Attach(std::move(itemField));
26}
27
28std::unique_ptr<ROOT::Experimental::RFieldBase>
30{
31 auto newItemField = fSubFields[0]->Clone(fSubFields[0]->GetFieldName());
32 return std::make_unique<RArrayField>(newName, std::move(newItemField), fArrayLength);
33}
34
36{
37 std::size_t nbytes = 0;
38 if (fSubFields[0]->IsSimple()) {
39 GetPrincipalColumnOf(*fSubFields[0])->AppendV(from, fArrayLength);
40 nbytes += fArrayLength * GetPrincipalColumnOf(*fSubFields[0])->GetElement()->GetPackedSize();
41 } else {
42 auto arrayPtr = static_cast<const unsigned char *>(from);
43 for (unsigned i = 0; i < fArrayLength; ++i) {
44 nbytes += CallAppendOn(*fSubFields[0], arrayPtr + (i * fItemSize));
45 }
46 }
47 return nbytes;
48}
49
51{
52 if (fSubFields[0]->IsSimple()) {
53 GetPrincipalColumnOf(*fSubFields[0])->ReadV(globalIndex * fArrayLength, fArrayLength, to);
54 } else {
55 auto arrayPtr = static_cast<unsigned char *>(to);
56 for (unsigned i = 0; i < fArrayLength; ++i) {
57 CallReadOn(*fSubFields[0], globalIndex * fArrayLength + i, arrayPtr + (i * fItemSize));
58 }
59 }
60}
61
63{
64 if (fSubFields[0]->IsSimple()) {
65 GetPrincipalColumnOf(*fSubFields[0])
66 ->ReadV(RNTupleLocalIndex(localIndex.GetClusterId(), localIndex.GetIndexInCluster() * fArrayLength),
67 fArrayLength, to);
68 } else {
69 auto arrayPtr = static_cast<unsigned char *>(to);
70 for (unsigned i = 0; i < fArrayLength; ++i) {
71 CallReadOn(*fSubFields[0],
72 RNTupleLocalIndex(localIndex.GetClusterId(), localIndex.GetIndexInCluster() * fArrayLength + i),
73 arrayPtr + (i * fItemSize));
74 }
75 }
76}
77
79{
80 if (fSubFields[0]->GetTraits() & kTraitTriviallyConstructible)
81 return;
82
83 auto arrayPtr = reinterpret_cast<unsigned char *>(where);
84 for (unsigned i = 0; i < fArrayLength; ++i) {
85 CallConstructValueOn(*fSubFields[0], arrayPtr + (i * fItemSize));
86 }
87}
88
90{
91 if (fItemDeleter) {
92 for (unsigned i = 0; i < fArrayLength; ++i) {
93 fItemDeleter->operator()(reinterpret_cast<unsigned char *>(objPtr) + i * fItemSize, true /* dtorOnly */);
94 }
95 }
96 RDeleter::operator()(objPtr, dtorOnly);
97}
98
99std::unique_ptr<ROOT::Experimental::RFieldBase::RDeleter> ROOT::Experimental::RArrayField::GetDeleter() const
100{
101 if (!(fSubFields[0]->GetTraits() & kTraitTriviallyDestructible))
102 return std::make_unique<RArrayDeleter>(fItemSize, fArrayLength, GetDeleterOf(*fSubFields[0]));
103 return std::make_unique<RDeleter>();
104}
105
106std::vector<ROOT::Experimental::RFieldBase::RValue>
108{
109 auto arrayPtr = value.GetPtr<unsigned char>().get();
110 std::vector<RValue> result;
111 result.reserve(fArrayLength);
112 for (unsigned i = 0; i < fArrayLength; ++i) {
113 result.emplace_back(
114 fSubFields[0]->BindValue(std::shared_ptr<void>(value.GetPtr<void>(), arrayPtr + (i * fItemSize))));
115 }
116 return result;
117}
118
123
124//------------------------------------------------------------------------------
125
126namespace {
127
128/// Retrieve the addresses of the data members of a generic RVec from a pointer to the beginning of the RVec object.
129/// Returns pointers to fBegin, fSize and fCapacity in a std::tuple.
130std::tuple<void **, std::int32_t *, std::int32_t *> GetRVecDataMembers(void *rvecPtr)
131{
132 void **begin = reinterpret_cast<void **>(rvecPtr);
133 // int32_t fSize is the second data member (after 1 void*)
134 std::int32_t *size = reinterpret_cast<std::int32_t *>(begin + 1);
135 R__ASSERT(*size >= 0);
136 // int32_t fCapacity is the third data member (1 int32_t after fSize)
137 std::int32_t *capacity = size + 1;
138 R__ASSERT(*capacity >= -1);
139 return {begin, size, capacity};
140}
141
142std::tuple<const void *const *, const std::int32_t *, const std::int32_t *> GetRVecDataMembers(const void *rvecPtr)
143{
144 return {GetRVecDataMembers(const_cast<void *>(rvecPtr))};
145}
146
147std::size_t EvalRVecValueSize(std::size_t alignOfT, std::size_t sizeOfT, std::size_t alignOfRVecT)
148{
149 // the size of an RVec<T> is the size of its 4 data-members + optional padding:
150 //
151 // data members:
152 // - void *fBegin
153 // - int32_t fSize
154 // - int32_t fCapacity
155 // - the char[] inline storage, which is aligned like T
156 //
157 // padding might be present:
158 // - between fCapacity and the char[] buffer aligned like T
159 // - after the char[] buffer
160
161 constexpr auto dataMemberSz = sizeof(void *) + 2 * sizeof(std::int32_t);
162
163 // mimic the logic of RVecInlineStorageSize, but at runtime
164 const auto inlineStorageSz = [&] {
165#ifdef R__HAS_HARDWARE_INTERFERENCE_SIZE
166 // hardware_destructive_interference_size is a C++17 feature but many compilers do not implement it yet
167 constexpr unsigned cacheLineSize = std::hardware_destructive_interference_size;
168#else
169 constexpr unsigned cacheLineSize = 64u;
170#endif
171 const unsigned elementsPerCacheLine = (cacheLineSize - dataMemberSz) / sizeOfT;
172 constexpr unsigned maxInlineByteSize = 1024;
173 const unsigned nElements =
174 elementsPerCacheLine >= 8 ? elementsPerCacheLine : (sizeOfT * 8 > maxInlineByteSize ? 0 : 8);
175 return nElements * sizeOfT;
176 }();
177
178 // compute padding between first 3 datamembers and inline buffer
179 // (there should be no padding between the first 3 data members)
181 if (paddingMiddle != 0)
183
184 // padding at the end of the object
186 if (paddingEnd != 0)
188
190}
191
192std::size_t EvalRVecAlignment(std::size_t alignOfSubField)
193{
194 // the alignment of an RVec<T> is the largest among the alignments of its data members
195 // (including the inline buffer which has the same alignment as the RVec::value_type)
196 return std::max({alignof(void *), alignof(std::int32_t), alignOfSubField});
197}
198
199void DestroyRVecWithChecks(std::size_t alignOfT, void **beginPtr, char *begin, std::int32_t *capacityPtr)
200{
201 // figure out if we are in the small state, i.e. begin == &inlineBuffer
202 // there might be padding between fCapacity and the inline buffer, so we compute it here
203 constexpr auto dataMemberSz = sizeof(void *) + 2 * sizeof(std::int32_t);
205 if (paddingMiddle != 0)
207 const bool isSmall = (begin == (reinterpret_cast<char *>(beginPtr) + dataMemberSz + paddingMiddle));
208
209 const bool owns = (*capacityPtr != -1);
210 if (!isSmall && owns)
211 free(begin);
212}
213
214} // anonymous namespace
215
216ROOT::Experimental::RRVecField::RRVecField(std::string_view fieldName, std::unique_ptr<RFieldBase> itemField)
217 : ROOT::Experimental::RFieldBase(fieldName, "ROOT::VecOps::RVec<" + itemField->GetTypeName() + ">",
218 ENTupleStructure::kCollection, false /* isSimple */),
219 fItemSize(itemField->GetValueSize()),
220 fNWritten(0)
221{
222 if (!(itemField->GetTraits() & kTraitTriviallyDestructible))
224 Attach(std::move(itemField));
226}
227
228std::unique_ptr<ROOT::Experimental::RFieldBase>
230{
231 auto newItemField = fSubFields[0]->Clone(fSubFields[0]->GetFieldName());
232 return std::make_unique<RRVecField>(newName, std::move(newItemField));
233}
234
236{
237 auto [beginPtr, sizePtr, _] = GetRVecDataMembers(from);
238
239 std::size_t nbytes = 0;
240 if (fSubFields[0]->IsSimple() && *sizePtr) {
241 GetPrincipalColumnOf(*fSubFields[0])->AppendV(*beginPtr, *sizePtr);
242 nbytes += *sizePtr * GetPrincipalColumnOf(*fSubFields[0])->GetElement()->GetPackedSize();
243 } else {
244 auto begin = reinterpret_cast<const char *>(*beginPtr); // for pointer arithmetics
245 for (std::int32_t i = 0; i < *sizePtr; ++i) {
246 nbytes += CallAppendOn(*fSubFields[0], begin + i * fItemSize);
247 }
248 }
249
250 fNWritten += *sizePtr;
251 fPrincipalColumn->Append(&fNWritten);
252 return nbytes + fPrincipalColumn->GetElement()->GetPackedSize();
253}
254
256{
257 // TODO as a performance optimization, we could assign values to elements of the inline buffer:
258 // if size < inline buffer size: we save one allocation here and usage of the RVec skips a pointer indirection
259
261
262 // Read collection info for this entry
265 fPrincipalColumn->GetCollectionInfo(globalIndex, &collectionStart, &nItems);
266 char *begin = reinterpret_cast<char *>(*beginPtr); // for pointer arithmetics
267 const std::size_t oldSize = *sizePtr;
268
269 // See "semantics of reading non-trivial objects" in RNTuple's Architecture.md for details
270 // on the element construction/destrution.
271 const bool owns = (*capacityPtr != -1);
272 const bool needsConstruct = !(fSubFields[0]->GetTraits() & kTraitTriviallyConstructible);
273 const bool needsDestruct = owns && fItemDeleter;
274
275 // Destroy excess elements, if any
276 if (needsDestruct) {
277 for (std::size_t i = nItems; i < oldSize; ++i) {
278 fItemDeleter->operator()(begin + (i * fItemSize), true /* dtorOnly */);
279 }
280 }
281
282 // Resize RVec (capacity and size)
283 if (std::int32_t(nItems) > *capacityPtr) { // must reallocate
284 // Destroy old elements: useless work for trivial types, but in case the element type's constructor
285 // allocates memory we need to release it here to avoid memleaks (e.g. if this is an RVec<RVec<int>>)
286 if (needsDestruct) {
287 for (std::size_t i = 0u; i < oldSize; ++i) {
288 fItemDeleter->operator()(begin + (i * fItemSize), true /* dtorOnly */);
289 }
290 }
291
292 // TODO Increment capacity by a factor rather than just enough to fit the elements.
293 if (owns) {
294 // *beginPtr points to the array of item values (allocated in an earlier call by the following malloc())
295 free(*beginPtr);
296 }
297 // We trust that malloc returns a buffer with large enough alignment.
298 // This might not be the case if T in RVec<T> is over-aligned.
299 *beginPtr = malloc(nItems * fItemSize);
300 R__ASSERT(*beginPtr != nullptr);
301 begin = reinterpret_cast<char *>(*beginPtr);
303
304 // Placement new for elements that were already there before the resize
305 if (needsConstruct) {
306 for (std::size_t i = 0u; i < oldSize; ++i)
307 CallConstructValueOn(*fSubFields[0], begin + (i * fItemSize));
308 }
309 }
310 *sizePtr = nItems;
311
312 // Placement new for new elements, if any
313 if (needsConstruct) {
314 for (std::size_t i = oldSize; i < nItems; ++i)
315 CallConstructValueOn(*fSubFields[0], begin + (i * fItemSize));
316 }
317
318 if (fSubFields[0]->IsSimple() && nItems) {
319 GetPrincipalColumnOf(*fSubFields[0])->ReadV(collectionStart, nItems, begin);
320 return;
321 }
322
323 // Read the new values into the collection elements
324 for (std::size_t i = 0; i < nItems; ++i) {
325 CallReadOn(*fSubFields[0], collectionStart + i, begin + (i * fItemSize));
326 }
327}
328
330{
331 if (!fSubFields[0]->IsSimple())
333
334 if (bulkSpec.fAuxData->empty()) {
335 /// Initialize auxiliary memory: the first sizeof(size_t) bytes store the value size of the item field.
336 /// The following bytes store the item values, consecutively.
337 bulkSpec.fAuxData->resize(sizeof(std::size_t));
338 *reinterpret_cast<std::size_t *>(bulkSpec.fAuxData->data()) = fSubFields[0]->GetValueSize();
339 }
340 const auto itemValueSize = *reinterpret_cast<std::size_t *>(bulkSpec.fAuxData->data());
341 unsigned char *itemValueArray = bulkSpec.fAuxData->data() + sizeof(std::size_t);
343
344 // Get size of the first RVec of the bulk
347 this->GetCollectionInfo(bulkSpec.fFirstIndex, &firstItemIndex, &collectionSize);
350 *capacityPtr = -1;
351
352 // Set the size of the remaining RVecs of the bulk, going page by page through the RNTuple offset column.
353 // We optimistically assume that bulkSpec.fAuxData is already large enough to hold all the item values in the
354 // given range. If not, we'll fix up the pointers afterwards.
355 auto lastOffset = firstItemIndex.GetIndexInCluster() + collectionSize;
357 std::size_t nValues = 1;
358 std::size_t nItems = collectionSize;
359 while (nRemainingValues > 0) {
361 const auto offsets =
362 fPrincipalColumn->MapV<Internal::RColumnIndex>(bulkSpec.fFirstIndex + nValues, nElementsUntilPageEnd);
363 const std::size_t nBatch = std::min(nRemainingValues, nElementsUntilPageEnd);
364 for (std::size_t i = 0; i < nBatch; ++i) {
365 const auto size = offsets[i] - lastOffset;
366 std::tie(beginPtr, sizePtr, capacityPtr) =
367 GetRVecDataMembers(reinterpret_cast<unsigned char *>(bulkSpec.fValues) + (nValues + i) * fValueSize);
369 *sizePtr = size;
370 *capacityPtr = -1;
371
372 nItems += size;
373 lastOffset = offsets[i];
374 }
376 nValues += nBatch;
377 }
378
379 bulkSpec.fAuxData->resize(sizeof(std::size_t) + nItems * itemValueSize);
380 // If the vector got reallocated, we need to fix-up the RVecs begin pointers.
381 const auto delta = itemValueArray - (bulkSpec.fAuxData->data() + sizeof(std::size_t));
382 if (delta != 0) {
383 auto beginPtrAsUChar = reinterpret_cast<unsigned char *>(bulkSpec.fValues);
384 for (std::size_t i = 0; i < bulkSpec.fCount; ++i) {
385 *reinterpret_cast<unsigned char **>(beginPtrAsUChar) -= delta;
387 }
388 }
389
390 GetPrincipalColumnOf(*fSubFields[0])->ReadV(firstItemIndex, nItems, itemValueArray - delta);
391 return RBulkSpec::kAllSet;
392}
393
404
409
414
416{
417 // initialize data members fBegin, fSize, fCapacity
418 // currently the inline buffer is left uninitialized
419 void **beginPtr = new (where)(void *)(nullptr);
420 std::int32_t *sizePtr = new (reinterpret_cast<void *>(beginPtr + 1)) std::int32_t(0);
421 new (sizePtr + 1) std::int32_t(-1);
422}
423
425{
427
428 char *begin = reinterpret_cast<char *>(*beginPtr); // for pointer arithmetics
429 if (fItemDeleter) {
430 for (std::int32_t i = 0; i < *sizePtr; ++i) {
431 fItemDeleter->operator()(begin + i * fItemSize, true /* dtorOnly */);
432 }
433 }
434
435 DestroyRVecWithChecks(fItemAlignment, beginPtr, begin, capacityPtr);
436 RDeleter::operator()(objPtr, dtorOnly);
437}
438
439std::unique_ptr<ROOT::Experimental::RFieldBase::RDeleter> ROOT::Experimental::RRVecField::GetDeleter() const
440{
441 if (fItemDeleter)
442 return std::make_unique<RRVecDeleter>(fSubFields[0]->GetAlignment(), fItemSize, GetDeleterOf(*fSubFields[0]));
443 return std::make_unique<RRVecDeleter>(fSubFields[0]->GetAlignment());
444}
445
446std::vector<ROOT::Experimental::RFieldBase::RValue>
448{
449 auto [beginPtr, sizePtr, _] = GetRVecDataMembers(value.GetPtr<void>().get());
450
451 std::vector<RValue> result;
452 char *begin = reinterpret_cast<char *>(*beginPtr); // for pointer arithmetics
453 result.reserve(*sizePtr);
454 for (std::int32_t i = 0; i < *sizePtr; ++i) {
455 result.emplace_back(fSubFields[0]->BindValue(std::shared_ptr<void>(value.GetPtr<void>(), begin + i * fItemSize)));
456 }
457 return result;
458}
459
461{
462 return fValueSize;
463}
464
466{
467 return EvalRVecAlignment(fSubFields[0]->GetAlignment());
468}
469
474
475//------------------------------------------------------------------------------
476
477ROOT::Experimental::RVectorField::RVectorField(std::string_view fieldName, std::unique_ptr<RFieldBase> itemField,
478 bool isUntyped)
479 : ROOT::Experimental::RFieldBase(fieldName, isUntyped ? "" : "std::vector<" + itemField->GetTypeName() + ">",
480 ENTupleStructure::kCollection, false /* isSimple */),
481 fItemSize(itemField->GetValueSize()),
482 fNWritten(0)
483{
484 if (!(itemField->GetTraits() & kTraitTriviallyDestructible))
486 Attach(std::move(itemField));
487}
488
489ROOT::Experimental::RVectorField::RVectorField(std::string_view fieldName, std::unique_ptr<RFieldBase> itemField)
491{
492}
493
494std::unique_ptr<ROOT::Experimental::RVectorField>
495ROOT::Experimental::RVectorField::CreateUntyped(std::string_view fieldName, std::unique_ptr<RFieldBase> itemField)
496{
497 return std::unique_ptr<ROOT::Experimental::RVectorField>(new RVectorField(fieldName, std::move(itemField), true));
498}
499
500std::unique_ptr<ROOT::Experimental::RFieldBase>
502{
503 auto newItemField = fSubFields[0]->Clone(fSubFields[0]->GetFieldName());
504 return std::unique_ptr<ROOT::Experimental::RVectorField>(
505 new RVectorField(newName, std::move(newItemField), GetTypeName().empty()));
506}
507
509{
510 auto typedValue = static_cast<const std::vector<char> *>(from);
511 // The order is important here: Profiling showed that the integer division is on the critical path. By moving the
512 // computation of count before R__ASSERT, the compiler can use the result of a single instruction (on x86) also for
513 // the modulo operation. Otherwise, it must perform the division twice because R__ASSERT expands to an external call
514 // of Fatal() in case of failure, which could have side effects that the compiler cannot analyze.
515 auto count = typedValue->size() / fItemSize;
516 R__ASSERT((typedValue->size() % fItemSize) == 0);
517 std::size_t nbytes = 0;
518
519 if (fSubFields[0]->IsSimple() && count) {
520 GetPrincipalColumnOf(*fSubFields[0])->AppendV(typedValue->data(), count);
521 nbytes += count * GetPrincipalColumnOf(*fSubFields[0])->GetElement()->GetPackedSize();
522 } else {
523 for (unsigned i = 0; i < count; ++i) {
524 nbytes += CallAppendOn(*fSubFields[0], typedValue->data() + (i * fItemSize));
525 }
526 }
527
528 fNWritten += count;
529 fPrincipalColumn->Append(&fNWritten);
530 return nbytes + fPrincipalColumn->GetElement()->GetPackedSize();
531}
532
534{
535 auto typedValue = static_cast<std::vector<char> *>(to);
536
539 fPrincipalColumn->GetCollectionInfo(globalIndex, &collectionStart, &nItems);
540
541 if (fSubFields[0]->IsSimple()) {
542 typedValue->resize(nItems * fItemSize);
543 if (nItems)
544 GetPrincipalColumnOf(*fSubFields[0])->ReadV(collectionStart, nItems, typedValue->data());
545 return;
546 }
547
548 // See "semantics of reading non-trivial objects" in RNTuple's Architecture.md
549 R__ASSERT(fItemSize > 0);
550 const auto oldNItems = typedValue->size() / fItemSize;
551 const bool canRealloc = oldNItems < nItems;
552 bool allDeallocated = false;
553 if (fItemDeleter) {
555 for (std::size_t i = allDeallocated ? 0 : nItems; i < oldNItems; ++i) {
556 fItemDeleter->operator()(typedValue->data() + (i * fItemSize), true /* dtorOnly */);
557 }
558 }
559 typedValue->resize(nItems * fItemSize);
560 if (!(fSubFields[0]->GetTraits() & kTraitTriviallyConstructible)) {
561 for (std::size_t i = allDeallocated ? 0 : oldNItems; i < nItems; ++i) {
562 CallConstructValueOn(*fSubFields[0], typedValue->data() + (i * fItemSize));
563 }
564 }
565
566 for (std::size_t i = 0; i < nItems; ++i) {
567 CallReadOn(*fSubFields[0], collectionStart + i, typedValue->data() + (i * fItemSize));
568 }
569}
570
581
586
591
593{
594 auto vecPtr = static_cast<std::vector<char> *>(objPtr);
595 if (fItemDeleter) {
596 R__ASSERT(fItemSize > 0);
597 R__ASSERT((vecPtr->size() % fItemSize) == 0);
598 auto nItems = vecPtr->size() / fItemSize;
599 for (std::size_t i = 0; i < nItems; ++i) {
600 fItemDeleter->operator()(vecPtr->data() + (i * fItemSize), true /* dtorOnly */);
601 }
602 }
603 std::destroy_at(vecPtr);
604 RDeleter::operator()(objPtr, dtorOnly);
605}
606
607std::unique_ptr<ROOT::Experimental::RFieldBase::RDeleter> ROOT::Experimental::RVectorField::GetDeleter() const
608{
609 if (fItemDeleter)
610 return std::make_unique<RVectorDeleter>(fItemSize, GetDeleterOf(*fSubFields[0]));
611 return std::make_unique<RVectorDeleter>();
612}
613
614std::vector<ROOT::Experimental::RFieldBase::RValue>
616{
617 auto vec = value.GetPtr<std::vector<char>>();
618 R__ASSERT(fItemSize > 0);
619 R__ASSERT((vec->size() % fItemSize) == 0);
620 auto nItems = vec->size() / fItemSize;
621 std::vector<RValue> result;
622 result.reserve(nItems);
623 for (unsigned i = 0; i < nItems; ++i) {
624 result.emplace_back(
625 fSubFields[0]->BindValue(std::shared_ptr<void>(value.GetPtr<void>(), vec->data() + (i * fItemSize))));
626 }
627 return result;
628}
629
634
635//------------------------------------------------------------------------------
636
638 : ROOT::Experimental::RFieldBase(name, "std::vector<bool>", ENTupleStructure::kCollection, false /* isSimple */)
639{
640 Attach(std::make_unique<RField<bool>>("_0"));
641}
642
643std::size_t ROOT::Experimental::RField<std::vector<bool>>::AppendImpl(const void *from)
644{
645 auto typedValue = static_cast<const std::vector<bool> *>(from);
646 auto count = typedValue->size();
647 for (unsigned i = 0; i < count; ++i) {
648 bool bval = (*typedValue)[i];
649 CallAppendOn(*fSubFields[0], &bval);
650 }
651 fNWritten += count;
652 fPrincipalColumn->Append(&fNWritten);
653 return count + fPrincipalColumn->GetElement()->GetPackedSize();
654}
655
656void ROOT::Experimental::RField<std::vector<bool>>::ReadGlobalImpl(NTupleSize_t globalIndex, void *to)
657{
658 auto typedValue = static_cast<std::vector<bool> *>(to);
659
660 NTupleSize_t nItems;
661 RNTupleLocalIndex collectionStart;
662 fPrincipalColumn->GetCollectionInfo(globalIndex, &collectionStart, &nItems);
663
664 typedValue->resize(nItems);
665 for (unsigned i = 0; i < nItems; ++i) {
666 bool bval;
667 CallReadOn(*fSubFields[0], collectionStart + i, &bval);
668 (*typedValue)[i] = bval;
669 }
670}
671
673ROOT::Experimental::RField<std::vector<bool>>::GetColumnRepresentations() const
674{
675 static RColumnRepresentations representations({{ENTupleColumnType::kSplitIndex64},
679 {});
680 return representations;
681}
682
684{
686}
687
688void ROOT::Experimental::RField<std::vector<bool>>::GenerateColumns(const RNTupleDescriptor &desc)
689{
691}
692
693std::vector<ROOT::Experimental::RFieldBase::RValue>
694ROOT::Experimental::RField<std::vector<bool>>::SplitValue(const RValue &value) const
695{
696 const auto &typedValue = value.GetRef<std::vector<bool>>();
697 auto count = typedValue.size();
698 std::vector<RValue> result;
699 result.reserve(count);
700 for (unsigned i = 0; i < count; ++i) {
701 if (typedValue[i])
702 result.emplace_back(fSubFields[0]->BindValue(std::shared_ptr<bool>(new bool(true))));
703 else
704 result.emplace_back(fSubFields[0]->BindValue(std::shared_ptr<bool>(new bool(false))));
705 }
706 return result;
707}
708
709void ROOT::Experimental::RField<std::vector<bool>>::AcceptVisitor(Detail::RFieldVisitor &visitor) const
710{
711 visitor.VisitVectorBoolField(*this);
712}
713
714//------------------------------------------------------------------------------
715
717 std::unique_ptr<ROOT::Experimental::RFieldBase> itemField,
718 std::size_t arrayLength)
719 : ROOT::Experimental::RFieldBase(fieldName, "ROOT::VecOps::RVec<" + itemField->GetTypeName() + ">",
720 ENTupleStructure::kCollection, false /* isSimple */),
721 fItemSize(itemField->GetValueSize()),
722 fArrayLength(arrayLength)
723{
724 Attach(std::move(itemField));
728}
729
730std::unique_ptr<ROOT::Experimental::RFieldBase>
732{
733 auto newItemField = fSubFields[0]->Clone(fSubFields[0]->GetFieldName());
734 return std::make_unique<RArrayAsRVecField>(newName, std::move(newItemField), fArrayLength);
735}
736
738{
739 // initialize data members fBegin, fSize, fCapacity
740 void **beginPtr = new (where)(void *)(nullptr);
741 std::int32_t *sizePtr = new (reinterpret_cast<void *>(beginPtr + 1)) std::int32_t(0);
742 std::int32_t *capacityPtr = new (sizePtr + 1) std::int32_t(0);
743
744 // Create the RVec with the known fixed size, do it once here instead of
745 // every time the value is read in `Read*Impl` functions
746 char *begin = reinterpret_cast<char *>(*beginPtr); // for pointer arithmetics
747
748 // Early return if the RVec has already been allocated.
749 if (*sizePtr == std::int32_t(fArrayLength))
750 return;
751
752 // Need to allocate the RVec if it is the first time the value is being created.
753 // See "semantics of reading non-trivial objects" in RNTuple's Architecture.md for details
754 // on the element construction.
755 const bool owns = (*capacityPtr != -1); // RVec is adopting the memory
756 const bool needsConstruct = !(fSubFields[0]->GetTraits() & kTraitTriviallyConstructible);
757 const bool needsDestruct = owns && fItemDeleter;
758
759 // Destroy old elements: useless work for trivial types, but in case the element type's constructor
760 // allocates memory we need to release it here to avoid memleaks (e.g. if this is an RVec<RVec<int>>)
761 if (needsDestruct) {
762 for (std::int32_t i = 0; i < *sizePtr; ++i) {
763 fItemDeleter->operator()(begin + (i * fItemSize), true /* dtorOnly */);
764 }
765 }
766
767 // TODO: Isn't the RVec always owning in this case?
768 if (owns) {
769 // *beginPtr points to the array of item values (allocated in an earlier call by the following malloc())
770 free(*beginPtr);
771 }
772
773 *beginPtr = malloc(fArrayLength * fItemSize);
774 R__ASSERT(*beginPtr != nullptr);
775 // Re-assign begin pointer after allocation
776 begin = reinterpret_cast<char *>(*beginPtr);
777 // Size and capacity are equal since the field data type is std::array
778 *sizePtr = fArrayLength;
779 *capacityPtr = fArrayLength;
780
781 // Placement new for the array elements
782 if (needsConstruct) {
783 for (std::size_t i = 0; i < fArrayLength; ++i)
784 CallConstructValueOn(*fSubFields[0], begin + (i * fItemSize));
785 }
786}
787
788std::unique_ptr<ROOT::Experimental::RFieldBase::RDeleter> ROOT::Experimental::RArrayAsRVecField::GetDeleter() const
789{
790 if (fItemDeleter) {
791 return std::make_unique<RRVecField::RRVecDeleter>(fSubFields[0]->GetAlignment(), fItemSize,
792 GetDeleterOf(*fSubFields[0]));
793 }
794 return std::make_unique<RRVecField::RRVecDeleter>(fSubFields[0]->GetAlignment());
795}
796
798{
799
800 auto [beginPtr, _, __] = GetRVecDataMembers(to);
801 auto rvecBeginPtr = reinterpret_cast<char *>(*beginPtr); // for pointer arithmetics
802
803 if (fSubFields[0]->IsSimple()) {
804 GetPrincipalColumnOf(*fSubFields[0])->ReadV(globalIndex * fArrayLength, fArrayLength, rvecBeginPtr);
805 return;
806 }
807
808 // Read the new values into the collection elements
809 for (std::size_t i = 0; i < fArrayLength; ++i) {
810 CallReadOn(*fSubFields[0], globalIndex * fArrayLength + i, rvecBeginPtr + (i * fItemSize));
811 }
812}
813
815{
816 auto [beginPtr, _, __] = GetRVecDataMembers(to);
817 auto rvecBeginPtr = reinterpret_cast<char *>(*beginPtr); // for pointer arithmetics
818
819 const auto &clusterId = localIndex.GetClusterId();
820 const auto &indexInCluster = localIndex.GetIndexInCluster();
821
822 if (fSubFields[0]->IsSimple()) {
823 GetPrincipalColumnOf(*fSubFields[0])
824 ->ReadV(RNTupleLocalIndex(clusterId, indexInCluster * fArrayLength), fArrayLength, rvecBeginPtr);
825 return;
826 }
827
828 // Read the new values into the collection elements
829 for (std::size_t i = 0; i < fArrayLength; ++i) {
830 CallReadOn(*fSubFields[0], RNTupleLocalIndex(clusterId, indexInCluster * fArrayLength + i),
831 rvecBeginPtr + (i * fItemSize));
832 }
833}
834
836{
837 return EvalRVecAlignment(fSubFields[0]->GetAlignment());
838}
839
840std::vector<ROOT::Experimental::RFieldBase::RValue>
842{
843 auto arrayPtr = value.GetPtr<unsigned char>().get();
844 std::vector<ROOT::Experimental::RFieldBase::RValue> result;
845 result.reserve(fArrayLength);
846 for (unsigned i = 0; i < fArrayLength; ++i) {
847 result.emplace_back(
848 fSubFields[0]->BindValue(std::shared_ptr<void>(value.GetPtr<void>(), arrayPtr + (i * fItemSize))));
849 }
850 return result;
851}
852
size_t fValueSize
ROOT::Experimental::RField< T > RField
size_t size(const MatrixT &matrix)
retrieve the size of a square matrix
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
#define R__ASSERT(e)
Checks condition e and reports a fatal error if it's false.
Definition TError.h:125
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t result
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void value
char name[80]
Definition TGX11.cxx:110
#define _(A, B)
Definition cfortran.h:108
#define free
Definition civetweb.c:1539
#define malloc
Definition civetweb.c:1536
Abstract base class for classes implementing the visitor design pattern.
The in-memory representation of a 32bit or 64bit on-disk index column.
std::unique_ptr< RFieldBase > CloneImpl(std::string_view newName) const final
The size of a value of this field, i.e. an RVec.
std::size_t GetAlignment() const final
As a rule of thumb, the alignment is equal to the size of the type.
std::size_t GetValueSize() const final
The number of bytes taken by a value of the appropriate type.
std::size_t fValueSize
The length of the arrays in this field.
std::unique_ptr< RDeleter > GetDeleter() const final
Returns an RRVecField::RRVecDeleter.
RArrayAsRVecField(std::string_view fieldName, std::unique_ptr< RFieldBase > itemField, std::size_t arrayLength)
Constructor of the field.
void ConstructValue(void *where) const final
Constructs value in a given location of size at least GetValueSize(). Called by the base class' Creat...
void AcceptVisitor(Detail::RFieldVisitor &visitor) const final
void ReadInClusterImpl(RNTupleLocalIndex localIndex, void *to) final
void ReadGlobalImpl(NTupleSize_t globalIndex, void *to) final
std::vector< RFieldBase::RValue > SplitValue(const RFieldBase::RValue &value) const final
Creates the list of direct child values given a value for this field.
void operator()(void *objPtr, bool dtorOnly) final
std::vector< RValue > SplitValue(const RValue &value) const final
Creates the list of direct child values given a value for this field.
void ConstructValue(void *where) const final
Constructs value in a given location of size at least GetValueSize(). Called by the base class' Creat...
std::size_t AppendImpl(const void *from) final
Operations on values of complex types, e.g.
void ReadInClusterImpl(RNTupleLocalIndex localIndex, void *to) final
void AcceptVisitor(Detail::RFieldVisitor &visitor) const final
std::unique_ptr< RDeleter > GetDeleter() const final
std::unique_ptr< RFieldBase > CloneImpl(std::string_view newName) const final
Called by Clone(), which additionally copies the on-disk ID.
void ReadGlobalImpl(NTupleSize_t globalIndex, void *to) final
RArrayField(std::string_view fieldName, std::unique_ptr< RFieldBase > itemField, std::size_t arrayLength)
Some fields have multiple possible column representations, e.g.
Points to an object with RNTuple I/O support and keeps a pointer to the corresponding field.
A field translates read and write calls from/to underlying columns to/from tree values.
std::uint32_t fTraits
Properties of the type that allow for optimizations of collections of that type.
void Attach(std::unique_ptr< RFieldBase > child)
Add a new subfield to the list of nested fields.
@ kTraitTriviallyDestructible
The type is cleaned up just by freeing its memory. I.e. the destructor performs a no-op.
static std::unique_ptr< RDeleter > GetDeleterOf(const RFieldBase &other)
virtual std::size_t ReadBulkImpl(const RBulkSpec &bulkSpec)
General implementation of bulk read.
std::uint32_t GetTraits() const
std::vector< std::unique_ptr< RFieldBase > > fSubFields
Collections and classes own sub fields.
Classes with dictionaries that can be inspected by TClass.
Definition RField.hxx:258
The on-storage meta-data of an ntuple.
Addresses a column element or field item relative to a particular cluster, instead of a global NTuple...
void operator()(void *objPtr, bool dtorOnly) final
std::unique_ptr< RDeleter > fItemDeleter
std::size_t AppendImpl(const void *from) final
Operations on values of complex types, e.g.
void ReadGlobalImpl(NTupleSize_t globalIndex, void *to) final
void ConstructValue(void *where) const final
Constructs value in a given location of size at least GetValueSize(). Called by the base class' Creat...
std::unique_ptr< RDeleter > GetDeleter() const final
void AcceptVisitor(Detail::RFieldVisitor &visitor) const final
std::vector< RValue > SplitValue(const RValue &value) const final
Creates the list of direct child values given a value for this field.
size_t GetValueSize() const final
The number of bytes taken by a value of the appropriate type.
size_t GetAlignment() const final
As a rule of thumb, the alignment is equal to the size of the type.
void GenerateColumns() final
Implementations in derived classes should create the backing columns corresponsing to the field type ...
RRVecField(std::string_view fieldName, std::unique_ptr< RFieldBase > itemField)
const RColumnRepresentations & GetColumnRepresentations() const final
Implementations in derived classes should return a static RColumnRepresentations object.
std::size_t ReadBulkImpl(const RBulkSpec &bulkSpec) final
General implementation of bulk read.
std::unique_ptr< RFieldBase > CloneImpl(std::string_view newName) const final
Called by Clone(), which additionally copies the on-disk ID.
void operator()(void *objPtr, bool dtorOnly) final
Template specializations for C++ std::vector.
static std::unique_ptr< RVectorField > CreateUntyped(std::string_view fieldName, std::unique_ptr< RFieldBase > itemField)
std::vector< RValue > SplitValue(const RValue &value) const final
Creates the list of direct child values given a value for this field.
void AcceptVisitor(Detail::RFieldVisitor &visitor) const final
std::size_t AppendImpl(const void *from) final
Operations on values of complex types, e.g.
void GenerateColumns() final
Implementations in derived classes should create the backing columns corresponsing to the field type ...
std::unique_ptr< RDeleter > GetDeleter() const final
RVectorField(std::string_view fieldName, std::unique_ptr< RFieldBase > itemField, bool isUntyped)
const RColumnRepresentations & GetColumnRepresentations() const final
Implementations in derived classes should return a static RColumnRepresentations object.
std::unique_ptr< RFieldBase > CloneImpl(std::string_view newName) const final
Called by Clone(), which additionally copies the on-disk ID.
void ReadGlobalImpl(NTupleSize_t globalIndex, void *to) final
std::uint64_t NTupleSize_t
Integer type long enough to hold the maximum number of entries in a column.
ENTupleStructure
The fields in the ntuple model tree can carry different structural information about the type system.
tbb::task_arena is an alias of tbb::interface7::task_arena, which doesn't allow to forward declare tb...