File indexing completed on 2025-08-27 09:30:25
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017 #ifndef FLATBUFFERS_FLATBUFFER_BUILDER_H_
0018 #define FLATBUFFERS_FLATBUFFER_BUILDER_H_
0019
0020 #include <algorithm>
0021 #include <cstdint>
0022 #include <functional>
0023 #include <initializer_list>
0024 #include <type_traits>
0025
0026 #include "flatbuffers/allocator.h"
0027 #include "flatbuffers/array.h"
0028 #include "flatbuffers/base.h"
0029 #include "flatbuffers/buffer.h"
0030 #include "flatbuffers/buffer_ref.h"
0031 #include "flatbuffers/default_allocator.h"
0032 #include "flatbuffers/detached_buffer.h"
0033 #include "flatbuffers/stl_emulation.h"
0034 #include "flatbuffers/string.h"
0035 #include "flatbuffers/struct.h"
0036 #include "flatbuffers/table.h"
0037 #include "flatbuffers/vector.h"
0038 #include "flatbuffers/vector_downward.h"
0039 #include "flatbuffers/verifier.h"
0040
0041 namespace flatbuffers {
0042
0043
0044 inline voffset_t FieldIndexToOffset(voffset_t field_id) {
0045
0046 const voffset_t fixed_fields =
0047 2 * sizeof(voffset_t);
0048 size_t offset = fixed_fields + field_id * sizeof(voffset_t);
0049 FLATBUFFERS_ASSERT(offset < std::numeric_limits<voffset_t>::max());
0050 return static_cast<voffset_t>(offset);
0051 }
0052
0053 template<typename T, typename Alloc = std::allocator<T>>
0054 const T *data(const std::vector<T, Alloc> &v) {
0055
0056
0057 static uint8_t t;
0058 return v.empty() ? reinterpret_cast<const T *>(&t) : &v.front();
0059 }
0060 template<typename T, typename Alloc = std::allocator<T>>
0061 T *data(std::vector<T, Alloc> &v) {
0062
0063
0064 static uint8_t t;
0065 return v.empty() ? reinterpret_cast<T *>(&t) : &v.front();
0066 }
0067
0068
0069
0070
0071
0072
0073
0074
0075
0076
0077 template<bool Is64Aware = false> class FlatBufferBuilderImpl {
0078 public:
0079
0080
0081 typedef
0082 typename std::conditional<Is64Aware, uoffset64_t, uoffset_t>::type SizeT;
0083
0084
0085
0086
0087
0088
0089
0090
0091
0092
0093
0094
0095 explicit FlatBufferBuilderImpl(
0096 size_t initial_size = 1024, Allocator *allocator = nullptr,
0097 bool own_allocator = false,
0098 size_t buffer_minalign = AlignOf<largest_scalar_t>())
0099 : buf_(initial_size, allocator, own_allocator, buffer_minalign,
0100 static_cast<SizeT>(Is64Aware ? FLATBUFFERS_MAX_64_BUFFER_SIZE
0101 : FLATBUFFERS_MAX_BUFFER_SIZE)),
0102 num_field_loc(0),
0103 max_voffset_(0),
0104 length_of_64_bit_region_(0),
0105 nested(false),
0106 finished(false),
0107 minalign_(1),
0108 force_defaults_(false),
0109 dedup_vtables_(true),
0110 string_pool(nullptr) {
0111 EndianCheck();
0112 }
0113
0114
0115 FlatBufferBuilderImpl(FlatBufferBuilderImpl &&other) noexcept
0116 : buf_(1024, nullptr, false, AlignOf<largest_scalar_t>(),
0117 static_cast<SizeT>(Is64Aware ? FLATBUFFERS_MAX_64_BUFFER_SIZE
0118 : FLATBUFFERS_MAX_BUFFER_SIZE)),
0119 num_field_loc(0),
0120 max_voffset_(0),
0121 length_of_64_bit_region_(0),
0122 nested(false),
0123 finished(false),
0124 minalign_(1),
0125 force_defaults_(false),
0126 dedup_vtables_(true),
0127 string_pool(nullptr) {
0128 EndianCheck();
0129
0130
0131
0132 Swap(other);
0133 }
0134
0135
0136 FlatBufferBuilderImpl &operator=(FlatBufferBuilderImpl &&other) noexcept {
0137
0138 FlatBufferBuilderImpl temp(std::move(other));
0139 Swap(temp);
0140 return *this;
0141 }
0142
0143 void Swap(FlatBufferBuilderImpl &other) {
0144 using std::swap;
0145 buf_.swap(other.buf_);
0146 swap(num_field_loc, other.num_field_loc);
0147 swap(max_voffset_, other.max_voffset_);
0148 swap(length_of_64_bit_region_, other.length_of_64_bit_region_);
0149 swap(nested, other.nested);
0150 swap(finished, other.finished);
0151 swap(minalign_, other.minalign_);
0152 swap(force_defaults_, other.force_defaults_);
0153 swap(dedup_vtables_, other.dedup_vtables_);
0154 swap(string_pool, other.string_pool);
0155 }
0156
0157 ~FlatBufferBuilderImpl() {
0158 if (string_pool) delete string_pool;
0159 }
0160
0161 void Reset() {
0162 Clear();
0163 buf_.reset();
0164 }
0165
0166
0167
0168 void Clear() {
0169 ClearOffsets();
0170 buf_.clear();
0171 nested = false;
0172 finished = false;
0173 minalign_ = 1;
0174 length_of_64_bit_region_ = 0;
0175 if (string_pool) string_pool->clear();
0176 }
0177
0178
0179
0180 SizeT GetSize() const { return buf_.size(); }
0181
0182
0183
0184
0185 template<bool is_64 = Is64Aware>
0186
0187
0188
0189 typename std::enable_if<is_64, uoffset_t>::type GetSizeRelative32BitRegion()
0190 const {
0191
0192
0193
0194
0195 return static_cast<uoffset_t>(GetSize() - length_of_64_bit_region_);
0196 }
0197
0198 template<bool is_64 = Is64Aware>
0199
0200 typename std::enable_if<!is_64, uoffset_t>::type GetSizeRelative32BitRegion()
0201 const {
0202 return static_cast<uoffset_t>(GetSize());
0203 }
0204
0205
0206
0207
0208 uint8_t *GetBufferPointer() const {
0209 Finished();
0210 return buf_.data();
0211 }
0212
0213
0214
0215
0216 flatbuffers::span<uint8_t> GetBufferSpan() const {
0217 Finished();
0218 return flatbuffers::span<uint8_t>(buf_.data(), buf_.size());
0219 }
0220
0221
0222
0223 uint8_t *GetCurrentBufferPointer() const { return buf_.data(); }
0224
0225
0226
0227 DetachedBuffer Release() {
0228 Finished();
0229 DetachedBuffer buffer = buf_.release();
0230 Clear();
0231 return buffer;
0232 }
0233
0234
0235
0236
0237
0238
0239
0240
0241
0242
0243 uint8_t *ReleaseRaw(size_t &size, size_t &offset) {
0244 Finished();
0245 uint8_t *raw = buf_.release_raw(size, offset);
0246 Clear();
0247 return raw;
0248 }
0249
0250
0251
0252
0253
0254
0255 size_t GetBufferMinAlignment() const {
0256 Finished();
0257 return minalign_;
0258 }
0259
0260
0261 void Finished() const {
0262
0263
0264
0265
0266
0267 FLATBUFFERS_ASSERT(finished);
0268 }
0269
0270
0271
0272
0273
0274
0275
0276 void ForceDefaults(bool fd) { force_defaults_ = fd; }
0277
0278
0279
0280 void DedupVtables(bool dedup) { dedup_vtables_ = dedup; }
0281
0282
0283 void Pad(size_t num_bytes) { buf_.fill(num_bytes); }
0284
0285 void TrackMinAlign(size_t elem_size) {
0286 if (elem_size > minalign_) minalign_ = elem_size;
0287 }
0288
0289 void Align(size_t elem_size) {
0290 TrackMinAlign(elem_size);
0291 buf_.fill(PaddingBytes(buf_.size(), elem_size));
0292 }
0293
0294 void PushFlatBuffer(const uint8_t *bytes, size_t size) {
0295 PushBytes(bytes, size);
0296 finished = true;
0297 }
0298
0299 void PushBytes(const uint8_t *bytes, size_t size) { buf_.push(bytes, size); }
0300
0301 void PopBytes(size_t amount) { buf_.pop(amount); }
0302
0303 template<typename T> void AssertScalarT() {
0304
0305 static_assert(flatbuffers::is_scalar<T>::value, "T must be a scalar type");
0306 }
0307
0308
0309 template<typename T, typename ReturnT = uoffset_t>
0310 ReturnT PushElement(T element) {
0311 AssertScalarT<T>();
0312 Align(sizeof(T));
0313 buf_.push_small(EndianScalar(element));
0314 return CalculateOffset<ReturnT>();
0315 }
0316
0317 template<typename T, template<typename> class OffsetT = Offset>
0318 uoffset_t PushElement(OffsetT<T> off) {
0319
0320 return PushElement(ReferTo(off.o));
0321 }
0322
0323
0324
0325 void TrackField(voffset_t field, uoffset_t off) {
0326 FieldLoc fl = { off, field };
0327 buf_.scratch_push_small(fl);
0328 num_field_loc++;
0329 if (field > max_voffset_) { max_voffset_ = field; }
0330 }
0331
0332
0333 template<typename T> void AddElement(voffset_t field, T e, T def) {
0334
0335 if (IsTheSameAs(e, def) && !force_defaults_) return;
0336 TrackField(field, PushElement(e));
0337 }
0338
0339 template<typename T> void AddElement(voffset_t field, T e) {
0340 TrackField(field, PushElement(e));
0341 }
0342
0343 template<typename T> void AddOffset(voffset_t field, Offset<T> off) {
0344 if (off.IsNull()) return;
0345 AddElement(field, ReferTo(off.o), static_cast<uoffset_t>(0));
0346 }
0347
0348 template<typename T> void AddOffset(voffset_t field, Offset64<T> off) {
0349 if (off.IsNull()) return;
0350 AddElement(field, ReferTo(off.o), static_cast<uoffset64_t>(0));
0351 }
0352
0353 template<typename T> void AddStruct(voffset_t field, const T *structptr) {
0354 if (!structptr) return;
0355 Align(AlignOf<T>());
0356 buf_.push_small(*structptr);
0357 TrackField(field, CalculateOffset<uoffset_t>());
0358 }
0359
0360 void AddStructOffset(voffset_t field, uoffset_t off) {
0361 TrackField(field, off);
0362 }
0363
0364
0365
0366
0367 uoffset_t ReferTo(uoffset_t off) {
0368
0369 Align(sizeof(uoffset_t));
0370
0371
0372
0373 return ReferTo(off, GetSizeRelative32BitRegion());
0374 }
0375
0376 uoffset64_t ReferTo(uoffset64_t off) {
0377
0378 Align(sizeof(uoffset64_t));
0379
0380 return ReferTo(off, GetSize());
0381 }
0382
0383 template<typename T, typename T2> T ReferTo(const T off, const T2 size) {
0384 FLATBUFFERS_ASSERT(off && off <= size);
0385 return size - off + static_cast<T>(sizeof(T));
0386 }
0387
0388 template<typename T> T ReferTo(const T off, const T size) {
0389 FLATBUFFERS_ASSERT(off && off <= size);
0390 return size - off + static_cast<T>(sizeof(T));
0391 }
0392
0393 void NotNested() {
0394
0395
0396
0397
0398
0399
0400
0401
0402 FLATBUFFERS_ASSERT(!nested);
0403
0404 FLATBUFFERS_ASSERT(!num_field_loc);
0405 }
0406
0407
0408
0409 uoffset_t StartTable() {
0410 NotNested();
0411 nested = true;
0412 return GetSizeRelative32BitRegion();
0413 }
0414
0415
0416
0417
0418 uoffset_t EndTable(uoffset_t start) {
0419
0420 FLATBUFFERS_ASSERT(nested);
0421
0422
0423
0424 const uoffset_t vtable_offset_loc =
0425 static_cast<uoffset_t>(PushElement<soffset_t>(0));
0426
0427
0428
0429
0430
0431 max_voffset_ =
0432 (std::max)(static_cast<voffset_t>(max_voffset_ + sizeof(voffset_t)),
0433 FieldIndexToOffset(0));
0434 buf_.fill_big(max_voffset_);
0435 const uoffset_t table_object_size = vtable_offset_loc - start;
0436
0437 FLATBUFFERS_ASSERT(table_object_size < 0x10000);
0438 WriteScalar<voffset_t>(buf_.data() + sizeof(voffset_t),
0439 static_cast<voffset_t>(table_object_size));
0440 WriteScalar<voffset_t>(buf_.data(), max_voffset_);
0441
0442 for (auto it = buf_.scratch_end() - num_field_loc * sizeof(FieldLoc);
0443 it < buf_.scratch_end(); it += sizeof(FieldLoc)) {
0444 auto field_location = reinterpret_cast<FieldLoc *>(it);
0445 const voffset_t pos =
0446 static_cast<voffset_t>(vtable_offset_loc - field_location->off);
0447
0448 FLATBUFFERS_ASSERT(
0449 !ReadScalar<voffset_t>(buf_.data() + field_location->id));
0450 WriteScalar<voffset_t>(buf_.data() + field_location->id, pos);
0451 }
0452 ClearOffsets();
0453 auto vt1 = reinterpret_cast<voffset_t *>(buf_.data());
0454 auto vt1_size = ReadScalar<voffset_t>(vt1);
0455 auto vt_use = GetSizeRelative32BitRegion();
0456
0457
0458 if (dedup_vtables_) {
0459 for (auto it = buf_.scratch_data(); it < buf_.scratch_end();
0460 it += sizeof(uoffset_t)) {
0461 auto vt_offset_ptr = reinterpret_cast<uoffset_t *>(it);
0462 auto vt2 = reinterpret_cast<voffset_t *>(buf_.data_at(*vt_offset_ptr));
0463 auto vt2_size = ReadScalar<voffset_t>(vt2);
0464 if (vt1_size != vt2_size || 0 != memcmp(vt2, vt1, vt1_size)) continue;
0465 vt_use = *vt_offset_ptr;
0466 buf_.pop(GetSizeRelative32BitRegion() - vtable_offset_loc);
0467 break;
0468 }
0469 }
0470
0471 if (vt_use == GetSizeRelative32BitRegion()) {
0472 buf_.scratch_push_small(vt_use);
0473 }
0474
0475
0476
0477
0478
0479 WriteScalar(buf_.data_at(vtable_offset_loc + length_of_64_bit_region_),
0480 static_cast<soffset_t>(vt_use) -
0481 static_cast<soffset_t>(vtable_offset_loc));
0482 nested = false;
0483 return vtable_offset_loc;
0484 }
0485
0486 FLATBUFFERS_ATTRIBUTE([[deprecated("call the version above instead")]])
0487 uoffset_t EndTable(uoffset_t start, voffset_t ) {
0488 return EndTable(start);
0489 }
0490
0491
0492
0493 template<typename T> void Required(Offset<T> table, voffset_t field) {
0494 auto table_ptr = reinterpret_cast<const Table *>(buf_.data_at(table.o));
0495 bool ok = table_ptr->GetOptionalFieldOffset(field) != 0;
0496
0497 FLATBUFFERS_ASSERT(ok);
0498 (void)ok;
0499 }
0500
0501 uoffset_t StartStruct(size_t alignment) {
0502 Align(alignment);
0503 return GetSizeRelative32BitRegion();
0504 }
0505
0506 uoffset_t EndStruct() { return GetSizeRelative32BitRegion(); }
0507
0508 void ClearOffsets() {
0509 buf_.scratch_pop(num_field_loc * sizeof(FieldLoc));
0510 num_field_loc = 0;
0511 max_voffset_ = 0;
0512 }
0513
0514
0515
0516 void PreAlign(size_t len, size_t alignment) {
0517 if (len == 0) return;
0518 TrackMinAlign(alignment);
0519 buf_.fill(PaddingBytes(GetSize() + len, alignment));
0520 }
0521
0522
0523
0524 template<typename AlignT> void PreAlign(size_t len) {
0525 AssertScalarT<AlignT>();
0526 PreAlign(len, AlignOf<AlignT>());
0527 }
0528
0529
0530
0531
0532
0533
0534 template<template<typename> class OffsetT = Offset>
0535 OffsetT<String> CreateString(const char *str, size_t len) {
0536 CreateStringImpl(str, len);
0537 return OffsetT<String>(
0538 CalculateOffset<typename OffsetT<String>::offset_type>());
0539 }
0540
0541
0542
0543
0544 template<template<typename> class OffsetT = Offset>
0545 OffsetT<String> CreateString(const char *str) {
0546 return CreateString<OffsetT>(str, strlen(str));
0547 }
0548
0549
0550
0551
0552 template<template<typename> class OffsetT = Offset>
0553 OffsetT<String> CreateString(char *str) {
0554 return CreateString<OffsetT>(str, strlen(str));
0555 }
0556
0557
0558
0559
0560 template<template<typename> class OffsetT = Offset>
0561 OffsetT<String> CreateString(const std::string &str) {
0562 return CreateString<OffsetT>(str.c_str(), str.length());
0563 }
0564
0565
0566 #ifdef FLATBUFFERS_HAS_STRING_VIEW
0567
0568
0569
0570 template<template <typename> class OffsetT = Offset>
0571 OffsetT<String>CreateString(flatbuffers::string_view str) {
0572 return CreateString<OffsetT>(str.data(), str.size());
0573 }
0574 #endif
0575
0576
0577
0578
0579
0580 template<template<typename> class OffsetT = Offset>
0581 OffsetT<String> CreateString(const String *str) {
0582 return str ? CreateString<OffsetT>(str->c_str(), str->size()) : 0;
0583 }
0584
0585
0586
0587
0588
0589 template<template<typename> class OffsetT = Offset,
0590
0591
0592 int &...ExplicitArgumentBarrier, typename T>
0593 OffsetT<String> CreateString(const T &str) {
0594 return CreateString<OffsetT>(str.data(), str.length());
0595 }
0596
0597
0598
0599
0600
0601
0602
0603
0604 Offset<String> CreateSharedString(const char *str, size_t len) {
0605 FLATBUFFERS_ASSERT(FLATBUFFERS_GENERAL_HEAP_ALLOC_OK);
0606 if (!string_pool) {
0607 string_pool = new StringOffsetMap(StringOffsetCompare(buf_));
0608 }
0609
0610 const size_t size_before_string = buf_.size();
0611
0612
0613 const Offset<String> off = CreateString<Offset>(str, len);
0614 auto it = string_pool->find(off);
0615
0616 if (it != string_pool->end()) {
0617
0618 buf_.pop(buf_.size() - size_before_string);
0619 return *it;
0620 }
0621
0622 string_pool->insert(off);
0623 return off;
0624 }
0625
0626 #ifdef FLATBUFFERS_HAS_STRING_VIEW
0627
0628
0629
0630
0631
0632
0633 Offset<String> CreateSharedString(const flatbuffers::string_view str) {
0634 return CreateSharedString(str.data(), str.size());
0635 }
0636 #else
0637
0638
0639
0640
0641
0642
0643 Offset<String> CreateSharedString(const char *str) {
0644 return CreateSharedString(str, strlen(str));
0645 }
0646
0647
0648
0649
0650
0651
0652
0653 Offset<String> CreateSharedString(const std::string &str) {
0654 return CreateSharedString(str.c_str(), str.length());
0655 }
0656 #endif
0657
0658
0659
0660
0661
0662
0663
0664 Offset<String> CreateSharedString(const String *str) {
0665 return str ? CreateSharedString(str->c_str(), str->size()) : 0;
0666 }
0667
0668
0669 template<typename LenT = uoffset_t, typename ReturnT = uoffset_t>
0670 ReturnT EndVector(size_t len) {
0671 FLATBUFFERS_ASSERT(nested);
0672 nested = false;
0673 return PushElement<LenT, ReturnT>(static_cast<LenT>(len));
0674 }
0675
0676 template<template<typename> class OffsetT = Offset, typename LenT = uint32_t>
0677 void StartVector(size_t len, size_t elemsize, size_t alignment) {
0678 NotNested();
0679 nested = true;
0680
0681
0682 PreAlign<LenT>(len * elemsize);
0683 PreAlign(len * elemsize, alignment);
0684 }
0685
0686 template<typename T, template<typename> class OffsetT = Offset,
0687 typename LenT = uint32_t>
0688 void StartVector(size_t len) {
0689 return StartVector<OffsetT, LenT>(len, sizeof(T), AlignOf<T>());
0690 }
0691
0692
0693
0694
0695
0696
0697 void ForceVectorAlignment(const size_t len, const size_t elemsize,
0698 const size_t alignment) {
0699 if (len == 0) return;
0700 FLATBUFFERS_ASSERT(VerifyAlignmentRequirements(alignment));
0701 PreAlign(len * elemsize, alignment);
0702 }
0703
0704 template<bool is_64 = Is64Aware>
0705 typename std::enable_if<is_64, void>::type ForceVectorAlignment64(
0706 const size_t len, const size_t elemsize, const size_t alignment) {
0707
0708
0709 FLATBUFFERS_ASSERT(GetSize() == length_of_64_bit_region_);
0710
0711
0712 ForceVectorAlignment(len, elemsize, alignment);
0713
0714
0715 length_of_64_bit_region_ = GetSize();
0716 }
0717
0718
0719 void ForceStringAlignment(size_t len, size_t alignment) {
0720 if (len == 0) return;
0721 FLATBUFFERS_ASSERT(VerifyAlignmentRequirements(alignment));
0722 PreAlign((len + 1) * sizeof(char), alignment);
0723 }
0724
0725
0726
0727
0728
0729
0730
0731
0732
0733
0734
0735
0736 template<typename T, template<typename...> class OffsetT = Offset,
0737 template<typename...> class VectorT = Vector>
0738 OffsetT<VectorT<T>> CreateVector(const T *v, size_t len) {
0739
0740 typedef typename VectorT<T>::size_type LenT;
0741 typedef typename OffsetT<VectorT<T>>::offset_type offset_type;
0742
0743
0744 AssertScalarT<T>();
0745 StartVector<T, OffsetT, LenT>(len);
0746 if (len > 0) {
0747
0748 #if FLATBUFFERS_LITTLEENDIAN
0749 PushBytes(reinterpret_cast<const uint8_t *>(v), len * sizeof(T));
0750 #else
0751 if (sizeof(T) == 1) {
0752 PushBytes(reinterpret_cast<const uint8_t *>(v), len);
0753 } else {
0754 for (auto i = len; i > 0; ) {
0755 PushElement(v[--i]);
0756 }
0757 }
0758 #endif
0759
0760 }
0761 return OffsetT<VectorT<T>>(EndVector<LenT, offset_type>(len));
0762 }
0763
0764
0765
0766
0767
0768
0769
0770
0771 template<typename T, class C> Offset<Vector<T>> CreateVector(const C &array) {
0772 return CreateVector(array.data(), array.size());
0773 }
0774
0775
0776
0777
0778
0779
0780 template<typename T>
0781 Offset<Vector<T>> CreateVector(std::initializer_list<T> v) {
0782 return CreateVector(v.begin(), v.size());
0783 }
0784
0785 template<typename T>
0786 Offset<Vector<Offset<T>>> CreateVector(const Offset<T> *v, size_t len) {
0787 StartVector<Offset<T>>(len);
0788 for (auto i = len; i > 0;) { PushElement(v[--i]); }
0789 return Offset<Vector<Offset<T>>>(EndVector(len));
0790 }
0791
0792
0793
0794
0795
0796
0797
0798 template<typename T, typename Alloc = std::allocator<T>>
0799 Offset<Vector<T>> CreateVector(const std::vector<T, Alloc> &v) {
0800 return CreateVector(data(v), v.size());
0801 }
0802
0803 template<template<typename...> class VectorT = Vector64,
0804 int &...ExplicitArgumentBarrier, typename T>
0805 Offset64<VectorT<T>> CreateVector64(const std::vector<T> &v) {
0806 return CreateVector<T, Offset64, VectorT>(data(v), v.size());
0807 }
0808
0809
0810
0811
0812 Offset<Vector<uint8_t>> CreateVector(const std::vector<bool> &v) {
0813 StartVector<uint8_t>(v.size());
0814 for (auto i = v.size(); i > 0;) {
0815 PushElement(static_cast<uint8_t>(v[--i]));
0816 }
0817 return Offset<Vector<uint8_t>>(EndVector(v.size()));
0818 }
0819
0820
0821
0822
0823
0824
0825
0826
0827 template<typename T>
0828 Offset<Vector<T>> CreateVector(size_t vector_size,
0829 const std::function<T(size_t i)> &f) {
0830 FLATBUFFERS_ASSERT(FLATBUFFERS_GENERAL_HEAP_ALLOC_OK);
0831 std::vector<T> elems(vector_size);
0832 for (size_t i = 0; i < vector_size; i++) elems[i] = f(i);
0833 return CreateVector(elems);
0834 }
0835
0836
0837
0838
0839
0840
0841
0842
0843
0844
0845
0846
0847 template<typename T, typename F, typename S>
0848 Offset<Vector<T>> CreateVector(size_t vector_size, F f, S *state) {
0849 FLATBUFFERS_ASSERT(FLATBUFFERS_GENERAL_HEAP_ALLOC_OK);
0850 std::vector<T> elems(vector_size);
0851 for (size_t i = 0; i < vector_size; i++) elems[i] = f(i, state);
0852 return CreateVector(elems);
0853 }
0854
0855
0856
0857
0858
0859
0860
0861
0862
0863 template<typename StringType = std::string,
0864 typename Alloc = std::allocator<StringType>>
0865 Offset<Vector<Offset<String>>> CreateVectorOfStrings(
0866 const std::vector<StringType, Alloc> &v) {
0867 return CreateVectorOfStrings(v.cbegin(), v.cend());
0868 }
0869
0870
0871
0872
0873
0874
0875
0876 template<class It>
0877 Offset<Vector<Offset<String>>> CreateVectorOfStrings(It begin, It end) {
0878 auto distance = std::distance(begin, end);
0879 FLATBUFFERS_ASSERT(distance >= 0);
0880 auto size = static_cast<size_t>(distance);
0881 auto scratch_buffer_usage = size * sizeof(Offset<String>);
0882
0883
0884
0885 buf_.ensure_space(scratch_buffer_usage);
0886 for (auto it = begin; it != end; ++it) {
0887 buf_.scratch_push_small(CreateString(*it));
0888 }
0889 StartVector<Offset<String>>(size);
0890 for (size_t i = 1; i <= size; i++) {
0891
0892
0893 PushElement(*reinterpret_cast<Offset<String> *>(
0894 buf_.scratch_end() - i * sizeof(Offset<String>)));
0895 }
0896 buf_.scratch_pop(scratch_buffer_usage);
0897 return Offset<Vector<Offset<String>>>(EndVector(size));
0898 }
0899
0900
0901
0902
0903
0904
0905
0906
0907 template<typename T, template<typename...> class OffsetT = Offset,
0908 template<typename...> class VectorT = Vector>
0909 OffsetT<VectorT<const T *>> CreateVectorOfStructs(const T *v, size_t len) {
0910
0911 typedef typename VectorT<T>::size_type LenT;
0912 typedef typename OffsetT<VectorT<const T *>>::offset_type offset_type;
0913
0914 StartVector<OffsetT, LenT>(len, sizeof(T), AlignOf<T>());
0915 if (len > 0) {
0916 PushBytes(reinterpret_cast<const uint8_t *>(v), sizeof(T) * len);
0917 }
0918 return OffsetT<VectorT<const T *>>(EndVector<LenT, offset_type>(len));
0919 }
0920
0921
0922
0923
0924
0925
0926
0927
0928
0929 template<typename T>
0930 Offset<Vector<const T *>> CreateVectorOfStructs(
0931 size_t vector_size, const std::function<void(size_t i, T *)> &filler) {
0932 T *structs = StartVectorOfStructs<T>(vector_size);
0933 for (size_t i = 0; i < vector_size; i++) {
0934 filler(i, structs);
0935 structs++;
0936 }
0937 return EndVectorOfStructs<T>(vector_size);
0938 }
0939
0940
0941
0942
0943
0944
0945
0946
0947
0948
0949 template<typename T, typename F, typename S>
0950 Offset<Vector<const T *>> CreateVectorOfStructs(size_t vector_size, F f,
0951 S *state) {
0952 T *structs = StartVectorOfStructs<T>(vector_size);
0953 for (size_t i = 0; i < vector_size; i++) {
0954 f(i, structs, state);
0955 structs++;
0956 }
0957 return EndVectorOfStructs<T>(vector_size);
0958 }
0959
0960
0961
0962
0963
0964
0965
0966 template<typename T, template<typename...> class OffsetT = Offset,
0967 template<typename...> class VectorT = Vector,
0968 typename Alloc = std::allocator<T>>
0969 OffsetT<VectorT<const T *>> CreateVectorOfStructs(
0970 const std::vector<T, Alloc> &v) {
0971 return CreateVectorOfStructs<T, OffsetT, VectorT>(data(v), v.size());
0972 }
0973
0974 template<template<typename...> class VectorT = Vector64, int &..., typename T>
0975 Offset64<VectorT<const T *>> CreateVectorOfStructs64(
0976 const std::vector<T> &v) {
0977 return CreateVectorOfStructs<T, Offset64, VectorT>(data(v), v.size());
0978 }
0979
0980
0981
0982
0983
0984
0985
0986
0987
0988
0989
0990 template<typename T, typename S>
0991 Offset<Vector<const T *>> CreateVectorOfNativeStructs(
0992 const S *v, size_t len, T (*const pack_func)(const S &)) {
0993 FLATBUFFERS_ASSERT(pack_func);
0994 auto structs = StartVectorOfStructs<T>(len);
0995 for (size_t i = 0; i < len; i++) { structs[i] = pack_func(v[i]); }
0996 return EndVectorOfStructs<T>(len);
0997 }
0998
0999
1000
1001
1002
1003
1004
1005
1006
1007 template<typename T, typename S>
1008 Offset<Vector<const T *>> CreateVectorOfNativeStructs(const S *v,
1009 size_t len) {
1010 extern T Pack(const S &);
1011 return CreateVectorOfNativeStructs(v, len, Pack);
1012 }
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024 template<typename T, typename S, typename Alloc = std::allocator<T>>
1025 Offset<Vector<const T *>> CreateVectorOfNativeStructs(
1026 const std::vector<S, Alloc> &v, T (*const pack_func)(const S &)) {
1027 return CreateVectorOfNativeStructs<T, S>(data(v), v.size(), pack_func);
1028 }
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038 template<typename T, typename S, typename Alloc = std::allocator<S>>
1039 Offset<Vector<const T *>> CreateVectorOfNativeStructs(
1040 const std::vector<S, Alloc> &v) {
1041 return CreateVectorOfNativeStructs<T, S>(data(v), v.size());
1042 }
1043
1044
1045 template<typename T> struct StructKeyComparator {
1046 bool operator()(const T &a, const T &b) const {
1047 return a.KeyCompareLessThan(&b);
1048 }
1049 };
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059 template<typename T, typename Alloc = std::allocator<T>>
1060 Offset<Vector<const T *>> CreateVectorOfSortedStructs(
1061 std::vector<T, Alloc> *v) {
1062 return CreateVectorOfSortedStructs(data(*v), v->size());
1063 }
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073 template<typename T, typename S, typename Alloc = std::allocator<T>>
1074 Offset<Vector<const T *>> CreateVectorOfSortedNativeStructs(
1075 std::vector<S, Alloc> *v) {
1076 return CreateVectorOfSortedNativeStructs<T, S>(data(*v), v->size());
1077 }
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087 template<typename T>
1088 Offset<Vector<const T *>> CreateVectorOfSortedStructs(T *v, size_t len) {
1089 std::stable_sort(v, v + len, StructKeyComparator<T>());
1090 return CreateVectorOfStructs(v, len);
1091 }
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102 template<typename T, typename S>
1103 Offset<Vector<const T *>> CreateVectorOfSortedNativeStructs(S *v,
1104 size_t len) {
1105 extern T Pack(const S &);
1106 auto structs = StartVectorOfStructs<T>(len);
1107 for (size_t i = 0; i < len; i++) { structs[i] = Pack(v[i]); }
1108 std::stable_sort(structs, structs + len, StructKeyComparator<T>());
1109 return EndVectorOfStructs<T>(len);
1110 }
1111
1112
1113 template<typename T> struct TableKeyComparator {
1114 explicit TableKeyComparator(vector_downward<SizeT> &buf) : buf_(buf) {}
1115 TableKeyComparator(const TableKeyComparator &other) : buf_(other.buf_) {}
1116 bool operator()(const Offset<T> &a, const Offset<T> &b) const {
1117 auto table_a = reinterpret_cast<T *>(buf_.data_at(a.o));
1118 auto table_b = reinterpret_cast<T *>(buf_.data_at(b.o));
1119 return table_a->KeyCompareLessThan(table_b);
1120 }
1121 vector_downward<SizeT> &buf_;
1122
1123 private:
1124 FLATBUFFERS_DELETE_FUNC(
1125 TableKeyComparator &operator=(const TableKeyComparator &other));
1126 };
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137 template<typename T>
1138 Offset<Vector<Offset<T>>> CreateVectorOfSortedTables(Offset<T> *v,
1139 size_t len) {
1140 std::stable_sort(v, v + len, TableKeyComparator<T>(buf_));
1141 return CreateVector(v, len);
1142 }
1143
1144
1145
1146
1147
1148
1149
1150
1151 template<typename T, typename Alloc = std::allocator<T>>
1152 Offset<Vector<Offset<T>>> CreateVectorOfSortedTables(
1153 std::vector<Offset<T>, Alloc> *v) {
1154 return CreateVectorOfSortedTables(data(*v), v->size());
1155 }
1156
1157
1158
1159
1160
1161
1162
1163
1164 uoffset_t CreateUninitializedVector(size_t len, size_t elemsize,
1165 size_t alignment, uint8_t **buf) {
1166 NotNested();
1167 StartVector(len, elemsize, alignment);
1168 buf_.make_space(len * elemsize);
1169 const uoffset_t vec_start = GetSizeRelative32BitRegion();
1170 auto vec_end = EndVector(len);
1171 *buf = buf_.data_at(vec_start);
1172 return vec_end;
1173 }
1174
1175 FLATBUFFERS_ATTRIBUTE([[deprecated("call the version above instead")]])
1176 uoffset_t CreateUninitializedVector(size_t len, size_t elemsize,
1177 uint8_t **buf) {
1178 return CreateUninitializedVector(len, elemsize, elemsize, buf);
1179 }
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189 template<typename T>
1190 Offset<Vector<T>> CreateUninitializedVector(size_t len, T **buf) {
1191 AssertScalarT<T>();
1192 return CreateUninitializedVector(len, sizeof(T), AlignOf<T>(),
1193 reinterpret_cast<uint8_t **>(buf));
1194 }
1195
1196 template<typename T>
1197 Offset<Vector<const T *>> CreateUninitializedVectorOfStructs(size_t len,
1198 T **buf) {
1199 return CreateUninitializedVector(len, sizeof(T), AlignOf<T>(),
1200 reinterpret_cast<uint8_t **>(buf));
1201 }
1202
1203
1204
1205
1206 template<typename T, typename U>
1207 Offset<Vector<T>> CreateVectorScalarCast(const U *v, size_t len) {
1208 AssertScalarT<T>();
1209 AssertScalarT<U>();
1210 StartVector<T>(len);
1211 for (auto i = len; i > 0;) { PushElement(static_cast<T>(v[--i])); }
1212 return Offset<Vector<T>>(EndVector(len));
1213 }
1214
1215
1216 template<typename T> Offset<const T *> CreateStruct(const T &structobj) {
1217 NotNested();
1218 Align(AlignOf<T>());
1219 buf_.push_small(structobj);
1220 return Offset<const T *>(
1221 CalculateOffset<typename Offset<const T *>::offset_type>());
1222 }
1223
1224
1225
1226
1227 template<typename T>
1228 void Finish(Offset<T> root, const char *file_identifier = nullptr) {
1229 Finish(root.o, file_identifier, false);
1230 }
1231
1232
1233
1234
1235
1236
1237
1238
1239 template<typename T>
1240 void FinishSizePrefixed(Offset<T> root,
1241 const char *file_identifier = nullptr) {
1242 Finish(root.o, file_identifier, true);
1243 }
1244
1245 void SwapBufAllocator(FlatBufferBuilderImpl &other) {
1246 buf_.swap_allocator(other.buf_);
1247 }
1248
1249
1250 static const size_t kFileIdentifierLength =
1251 ::flatbuffers::kFileIdentifierLength;
1252
1253 protected:
1254
1255 FlatBufferBuilderImpl(const FlatBufferBuilderImpl &);
1256 FlatBufferBuilderImpl &operator=(const FlatBufferBuilderImpl &);
1257
1258 void Finish(uoffset_t root, const char *file_identifier, bool size_prefix) {
1259
1260 FLATBUFFERS_ASSERT(!finished);
1261
1262 NotNested();
1263 buf_.clear_scratch();
1264
1265 const size_t prefix_size = size_prefix ? sizeof(SizeT) : 0;
1266
1267 TrackMinAlign(prefix_size);
1268
1269 const size_t root_offset_size = sizeof(uoffset_t);
1270 const size_t file_id_size = file_identifier ? kFileIdentifierLength : 0;
1271
1272
1273 PreAlign(prefix_size + root_offset_size + file_id_size, minalign_);
1274
1275 if (file_identifier) {
1276 FLATBUFFERS_ASSERT(strlen(file_identifier) == kFileIdentifierLength);
1277 PushBytes(reinterpret_cast<const uint8_t *>(file_identifier),
1278 kFileIdentifierLength);
1279 }
1280 PushElement(ReferTo(root));
1281 if (size_prefix) { PushElement(GetSize()); }
1282 finished = true;
1283 }
1284
1285 struct FieldLoc {
1286 uoffset_t off;
1287 voffset_t id;
1288 };
1289
1290 vector_downward<SizeT> buf_;
1291
1292
1293
1294 uoffset_t num_field_loc;
1295
1296
1297 voffset_t max_voffset_;
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317 size_t length_of_64_bit_region_;
1318
1319
1320 bool nested;
1321
1322
1323 bool finished;
1324
1325 size_t minalign_;
1326
1327 bool force_defaults_;
1328
1329 bool dedup_vtables_;
1330
1331 struct StringOffsetCompare {
1332 explicit StringOffsetCompare(const vector_downward<SizeT> &buf)
1333 : buf_(&buf) {}
1334 bool operator()(const Offset<String> &a, const Offset<String> &b) const {
1335 auto stra = reinterpret_cast<const String *>(buf_->data_at(a.o));
1336 auto strb = reinterpret_cast<const String *>(buf_->data_at(b.o));
1337 return StringLessThan(stra->data(), stra->size(), strb->data(),
1338 strb->size());
1339 }
1340 const vector_downward<SizeT> *buf_;
1341 };
1342
1343
1344 typedef std::set<Offset<String>, StringOffsetCompare> StringOffsetMap;
1345 StringOffsetMap *string_pool;
1346
1347 private:
1348 void CanAddOffset64() {
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361 static_assert(Is64Aware, "cannot add 64-bit offset to a 32-bit builder");
1362
1363
1364
1365
1366
1367
1368 FLATBUFFERS_ASSERT(GetSize() == length_of_64_bit_region_);
1369 }
1370
1371
1372
1373
1374
1375 void CreateStringImpl(const char *str, size_t len) {
1376 NotNested();
1377 PreAlign<uoffset_t>(len + 1);
1378 buf_.fill(1);
1379 PushBytes(reinterpret_cast<const uint8_t *>(str), len);
1380 PushElement(static_cast<uoffset_t>(len));
1381 }
1382
1383
1384
1385 template<typename T, template<typename> class OffsetT = Offset>
1386 T *StartVectorOfStructs(size_t vector_size) {
1387 StartVector<OffsetT>(vector_size, sizeof(T), AlignOf<T>());
1388 return reinterpret_cast<T *>(buf_.make_space(vector_size * sizeof(T)));
1389 }
1390
1391
1392
1393 template<typename T, template<typename> class OffsetT = Offset>
1394 OffsetT<Vector<const T *>> EndVectorOfStructs(size_t vector_size) {
1395 return OffsetT<Vector<const T *>>(
1396 EndVector<typename Vector<const T *>::size_type,
1397 typename OffsetT<Vector<const T *>>::offset_type>(
1398 vector_size));
1399 }
1400
1401 template<typename T>
1402 typename std::enable_if<std::is_same<T, uoffset_t>::value, T>::type
1403 CalculateOffset() {
1404
1405
1406 return GetSizeRelative32BitRegion();
1407 }
1408
1409
1410
1411 template<typename T>
1412 typename std::enable_if<std::is_same<T, uoffset64_t>::value, T>::type
1413 CalculateOffset() {
1414
1415 static_assert(Is64Aware, "invalid 64-bit offset in 32-bit builder");
1416
1417
1418
1419 length_of_64_bit_region_ = GetSize();
1420
1421 return length_of_64_bit_region_;
1422 }
1423 };
1424
1425
1426
1427
1428 using FlatBufferBuilder = FlatBufferBuilderImpl<false>;
1429 using FlatBufferBuilder64 = FlatBufferBuilderImpl<true>;
1430
1431
1432
1433 template<>
1434 template<>
1435 inline Offset64<String> FlatBufferBuilder64::CreateString(const char *str,
1436 size_t len) {
1437 CanAddOffset64();
1438 CreateStringImpl(str, len);
1439 return Offset64<String>(
1440 CalculateOffset<typename Offset64<String>::offset_type>());
1441 }
1442
1443
1444 template<typename T = void> struct EmptyOffset {};
1445
1446
1447 template<>
1448 template<>
1449 inline void FlatBufferBuilder64::StartVector<Offset64, uint32_t>(
1450 size_t len, size_t elemsize, size_t alignment) {
1451 CanAddOffset64();
1452 StartVector<EmptyOffset, uint32_t>(len, elemsize, alignment);
1453 }
1454
1455 template<>
1456 template<>
1457 inline void FlatBufferBuilder64::StartVector<Offset64, uint64_t>(
1458 size_t len, size_t elemsize, size_t alignment) {
1459 CanAddOffset64();
1460 StartVector<EmptyOffset, uint64_t>(len, elemsize, alignment);
1461 }
1462
1463
1464
1465
1466 template<typename T>
1467 T *GetMutableTemporaryPointer(FlatBufferBuilder &fbb, Offset<T> offset) {
1468 return reinterpret_cast<T *>(fbb.GetCurrentBufferPointer() + fbb.GetSize() -
1469 offset.o);
1470 }
1471
1472 template<typename T>
1473 const T *GetTemporaryPointer(const FlatBufferBuilder &fbb, Offset<T> offset) {
1474 return reinterpret_cast<const T *>(fbb.GetCurrentBufferPointer() +
1475 fbb.GetSize() - offset.o);
1476 }
1477
1478 }
1479
1480 #endif