File indexing completed on 2025-02-21 10:13:00
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015 #ifndef RAPIDJSON_ALLOCATORS_H_
0016 #define RAPIDJSON_ALLOCATORS_H_
0017
0018 #include "rapidjson.h"
0019 #include "internal/meta.h"
0020
0021 #include <memory>
0022 #include <limits>
0023
0024 #if RAPIDJSON_HAS_CXX11
0025 #include <type_traits>
0026 #endif
0027
0028 RAPIDJSON_NAMESPACE_BEGIN
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071 #ifndef RAPIDJSON_ALLOCATOR_DEFAULT_CHUNK_CAPACITY
0072 #define RAPIDJSON_ALLOCATOR_DEFAULT_CHUNK_CAPACITY (64 * 1024)
0073 #endif
0074
0075
0076
0077
0078
0079
0080
0081
0082
0083 class CrtAllocator {
0084 public:
0085 static const bool kNeedFree = true;
0086 void* Malloc(size_t size) {
0087 if (size)
0088 return RAPIDJSON_MALLOC(size);
0089 else
0090 return NULL;
0091 }
0092 void* Realloc(void* originalPtr, size_t originalSize, size_t newSize) {
0093 (void)originalSize;
0094 if (newSize == 0) {
0095 RAPIDJSON_FREE(originalPtr);
0096 return NULL;
0097 }
0098 return RAPIDJSON_REALLOC(originalPtr, newSize);
0099 }
0100 static void Free(void *ptr) RAPIDJSON_NOEXCEPT { RAPIDJSON_FREE(ptr); }
0101
0102 bool operator==(const CrtAllocator&) const RAPIDJSON_NOEXCEPT {
0103 return true;
0104 }
0105 bool operator!=(const CrtAllocator&) const RAPIDJSON_NOEXCEPT {
0106 return false;
0107 }
0108 };
0109
0110
0111
0112
0113
0114
0115
0116
0117
0118
0119
0120
0121
0122
0123
0124
0125
0126
0127
0128
0129 template <typename BaseAllocator = CrtAllocator>
0130 class MemoryPoolAllocator {
0131
0132
0133
0134 struct ChunkHeader {
0135 size_t capacity;
0136 size_t size;
0137 ChunkHeader *next;
0138 };
0139
0140 struct SharedData {
0141 ChunkHeader *chunkHead;
0142 BaseAllocator* ownBaseAllocator;
0143 size_t refcount;
0144 bool ownBuffer;
0145 };
0146
0147 static const size_t SIZEOF_SHARED_DATA = RAPIDJSON_ALIGN(sizeof(SharedData));
0148 static const size_t SIZEOF_CHUNK_HEADER = RAPIDJSON_ALIGN(sizeof(ChunkHeader));
0149
0150 static inline ChunkHeader *GetChunkHead(SharedData *shared)
0151 {
0152 return reinterpret_cast<ChunkHeader*>(reinterpret_cast<uint8_t*>(shared) + SIZEOF_SHARED_DATA);
0153 }
0154 static inline uint8_t *GetChunkBuffer(SharedData *shared)
0155 {
0156 return reinterpret_cast<uint8_t*>(shared->chunkHead) + SIZEOF_CHUNK_HEADER;
0157 }
0158
0159 static const size_t kDefaultChunkCapacity = RAPIDJSON_ALLOCATOR_DEFAULT_CHUNK_CAPACITY;
0160
0161 public:
0162 static const bool kNeedFree = false;
0163 static const bool kRefCounted = true;
0164
0165
0166
0167
0168
0169 explicit
0170 MemoryPoolAllocator(size_t chunkSize = kDefaultChunkCapacity, BaseAllocator* baseAllocator = 0) :
0171 chunk_capacity_(chunkSize),
0172 baseAllocator_(baseAllocator ? baseAllocator : RAPIDJSON_NEW(BaseAllocator)()),
0173 shared_(static_cast<SharedData*>(baseAllocator_ ? baseAllocator_->Malloc(SIZEOF_SHARED_DATA + SIZEOF_CHUNK_HEADER) : 0))
0174 {
0175 RAPIDJSON_ASSERT(baseAllocator_ != 0);
0176 RAPIDJSON_ASSERT(shared_ != 0);
0177 if (baseAllocator) {
0178 shared_->ownBaseAllocator = 0;
0179 }
0180 else {
0181 shared_->ownBaseAllocator = baseAllocator_;
0182 }
0183 shared_->chunkHead = GetChunkHead(shared_);
0184 shared_->chunkHead->capacity = 0;
0185 shared_->chunkHead->size = 0;
0186 shared_->chunkHead->next = 0;
0187 shared_->ownBuffer = true;
0188 shared_->refcount = 1;
0189 }
0190
0191
0192
0193
0194
0195
0196
0197
0198
0199
0200
0201 MemoryPoolAllocator(void *buffer, size_t size, size_t chunkSize = kDefaultChunkCapacity, BaseAllocator* baseAllocator = 0) :
0202 chunk_capacity_(chunkSize),
0203 baseAllocator_(baseAllocator),
0204 shared_(static_cast<SharedData*>(AlignBuffer(buffer, size)))
0205 {
0206 RAPIDJSON_ASSERT(size >= SIZEOF_SHARED_DATA + SIZEOF_CHUNK_HEADER);
0207 shared_->chunkHead = GetChunkHead(shared_);
0208 shared_->chunkHead->capacity = size - SIZEOF_SHARED_DATA - SIZEOF_CHUNK_HEADER;
0209 shared_->chunkHead->size = 0;
0210 shared_->chunkHead->next = 0;
0211 shared_->ownBaseAllocator = 0;
0212 shared_->ownBuffer = false;
0213 shared_->refcount = 1;
0214 }
0215
0216 MemoryPoolAllocator(const MemoryPoolAllocator& rhs) RAPIDJSON_NOEXCEPT :
0217 chunk_capacity_(rhs.chunk_capacity_),
0218 baseAllocator_(rhs.baseAllocator_),
0219 shared_(rhs.shared_)
0220 {
0221 RAPIDJSON_NOEXCEPT_ASSERT(shared_->refcount > 0);
0222 ++shared_->refcount;
0223 }
0224 MemoryPoolAllocator& operator=(const MemoryPoolAllocator& rhs) RAPIDJSON_NOEXCEPT
0225 {
0226 RAPIDJSON_NOEXCEPT_ASSERT(rhs.shared_->refcount > 0);
0227 ++rhs.shared_->refcount;
0228 this->~MemoryPoolAllocator();
0229 baseAllocator_ = rhs.baseAllocator_;
0230 chunk_capacity_ = rhs.chunk_capacity_;
0231 shared_ = rhs.shared_;
0232 return *this;
0233 }
0234
0235 #if RAPIDJSON_HAS_CXX11_RVALUE_REFS
0236 MemoryPoolAllocator(MemoryPoolAllocator&& rhs) RAPIDJSON_NOEXCEPT :
0237 chunk_capacity_(rhs.chunk_capacity_),
0238 baseAllocator_(rhs.baseAllocator_),
0239 shared_(rhs.shared_)
0240 {
0241 RAPIDJSON_NOEXCEPT_ASSERT(rhs.shared_->refcount > 0);
0242 rhs.shared_ = 0;
0243 }
0244 MemoryPoolAllocator& operator=(MemoryPoolAllocator&& rhs) RAPIDJSON_NOEXCEPT
0245 {
0246 RAPIDJSON_NOEXCEPT_ASSERT(rhs.shared_->refcount > 0);
0247 this->~MemoryPoolAllocator();
0248 baseAllocator_ = rhs.baseAllocator_;
0249 chunk_capacity_ = rhs.chunk_capacity_;
0250 shared_ = rhs.shared_;
0251 rhs.shared_ = 0;
0252 return *this;
0253 }
0254 #endif
0255
0256
0257
0258
0259 ~MemoryPoolAllocator() RAPIDJSON_NOEXCEPT {
0260 if (!shared_) {
0261
0262 return;
0263 }
0264 if (shared_->refcount > 1) {
0265 --shared_->refcount;
0266 return;
0267 }
0268 Clear();
0269 BaseAllocator *a = shared_->ownBaseAllocator;
0270 if (shared_->ownBuffer) {
0271 baseAllocator_->Free(shared_);
0272 }
0273 RAPIDJSON_DELETE(a);
0274 }
0275
0276
0277 void Clear() RAPIDJSON_NOEXCEPT {
0278 RAPIDJSON_NOEXCEPT_ASSERT(shared_->refcount > 0);
0279 for (;;) {
0280 ChunkHeader* c = shared_->chunkHead;
0281 if (!c->next) {
0282 break;
0283 }
0284 shared_->chunkHead = c->next;
0285 baseAllocator_->Free(c);
0286 }
0287 shared_->chunkHead->size = 0;
0288 }
0289
0290
0291
0292
0293 size_t Capacity() const RAPIDJSON_NOEXCEPT {
0294 RAPIDJSON_NOEXCEPT_ASSERT(shared_->refcount > 0);
0295 size_t capacity = 0;
0296 for (ChunkHeader* c = shared_->chunkHead; c != 0; c = c->next)
0297 capacity += c->capacity;
0298 return capacity;
0299 }
0300
0301
0302
0303
0304 size_t Size() const RAPIDJSON_NOEXCEPT {
0305 RAPIDJSON_NOEXCEPT_ASSERT(shared_->refcount > 0);
0306 size_t size = 0;
0307 for (ChunkHeader* c = shared_->chunkHead; c != 0; c = c->next)
0308 size += c->size;
0309 return size;
0310 }
0311
0312
0313
0314
0315 bool Shared() const RAPIDJSON_NOEXCEPT {
0316 RAPIDJSON_NOEXCEPT_ASSERT(shared_->refcount > 0);
0317 return shared_->refcount > 1;
0318 }
0319
0320
0321 void* Malloc(size_t size) {
0322 RAPIDJSON_NOEXCEPT_ASSERT(shared_->refcount > 0);
0323 if (!size)
0324 return NULL;
0325
0326 size = RAPIDJSON_ALIGN(size);
0327 if (RAPIDJSON_UNLIKELY(shared_->chunkHead->size + size > shared_->chunkHead->capacity))
0328 if (!AddChunk(chunk_capacity_ > size ? chunk_capacity_ : size))
0329 return NULL;
0330
0331 void *buffer = GetChunkBuffer(shared_) + shared_->chunkHead->size;
0332 shared_->chunkHead->size += size;
0333 return buffer;
0334 }
0335
0336
0337 void* Realloc(void* originalPtr, size_t originalSize, size_t newSize) {
0338 if (originalPtr == 0)
0339 return Malloc(newSize);
0340
0341 RAPIDJSON_NOEXCEPT_ASSERT(shared_->refcount > 0);
0342 if (newSize == 0)
0343 return NULL;
0344
0345 originalSize = RAPIDJSON_ALIGN(originalSize);
0346 newSize = RAPIDJSON_ALIGN(newSize);
0347
0348
0349 if (originalSize >= newSize)
0350 return originalPtr;
0351
0352
0353 if (originalPtr == GetChunkBuffer(shared_) + shared_->chunkHead->size - originalSize) {
0354 size_t increment = static_cast<size_t>(newSize - originalSize);
0355 if (shared_->chunkHead->size + increment <= shared_->chunkHead->capacity) {
0356 shared_->chunkHead->size += increment;
0357 return originalPtr;
0358 }
0359 }
0360
0361
0362 if (void* newBuffer = Malloc(newSize)) {
0363 if (originalSize)
0364 std::memcpy(newBuffer, originalPtr, originalSize);
0365 return newBuffer;
0366 }
0367 else
0368 return NULL;
0369 }
0370
0371
0372 static void Free(void *ptr) RAPIDJSON_NOEXCEPT { (void)ptr; }
0373
0374
0375 bool operator==(const MemoryPoolAllocator& rhs) const RAPIDJSON_NOEXCEPT {
0376 RAPIDJSON_NOEXCEPT_ASSERT(shared_->refcount > 0);
0377 RAPIDJSON_NOEXCEPT_ASSERT(rhs.shared_->refcount > 0);
0378 return shared_ == rhs.shared_;
0379 }
0380
0381 bool operator!=(const MemoryPoolAllocator& rhs) const RAPIDJSON_NOEXCEPT {
0382 return !operator==(rhs);
0383 }
0384
0385 private:
0386
0387
0388
0389
0390 bool AddChunk(size_t capacity) {
0391 if (!baseAllocator_)
0392 shared_->ownBaseAllocator = baseAllocator_ = RAPIDJSON_NEW(BaseAllocator)();
0393 if (ChunkHeader* chunk = static_cast<ChunkHeader*>(baseAllocator_->Malloc(SIZEOF_CHUNK_HEADER + capacity))) {
0394 chunk->capacity = capacity;
0395 chunk->size = 0;
0396 chunk->next = shared_->chunkHead;
0397 shared_->chunkHead = chunk;
0398 return true;
0399 }
0400 else
0401 return false;
0402 }
0403
0404 static inline void* AlignBuffer(void* buf, size_t &size)
0405 {
0406 RAPIDJSON_NOEXCEPT_ASSERT(buf != 0);
0407 const uintptr_t mask = sizeof(void*) - 1;
0408 const uintptr_t ubuf = reinterpret_cast<uintptr_t>(buf);
0409 if (RAPIDJSON_UNLIKELY(ubuf & mask)) {
0410 const uintptr_t abuf = (ubuf + mask) & ~mask;
0411 RAPIDJSON_ASSERT(size >= abuf - ubuf);
0412 buf = reinterpret_cast<void*>(abuf);
0413 size -= abuf - ubuf;
0414 }
0415 return buf;
0416 }
0417
0418 size_t chunk_capacity_;
0419 BaseAllocator* baseAllocator_;
0420 SharedData *shared_;
0421 };
0422
0423 namespace internal {
0424 template<typename, typename = void>
0425 struct IsRefCounted :
0426 public FalseType
0427 { };
0428 template<typename T>
0429 struct IsRefCounted<T, typename internal::EnableIfCond<T::kRefCounted>::Type> :
0430 public TrueType
0431 { };
0432 }
0433
0434 template<typename T, typename A>
0435 inline T* Realloc(A& a, T* old_p, size_t old_n, size_t new_n)
0436 {
0437 RAPIDJSON_NOEXCEPT_ASSERT(old_n <= (std::numeric_limits<size_t>::max)() / sizeof(T) && new_n <= (std::numeric_limits<size_t>::max)() / sizeof(T));
0438 return static_cast<T*>(a.Realloc(old_p, old_n * sizeof(T), new_n * sizeof(T)));
0439 }
0440
0441 template<typename T, typename A>
0442 inline T *Malloc(A& a, size_t n = 1)
0443 {
0444 return Realloc<T, A>(a, NULL, 0, n);
0445 }
0446
0447 template<typename T, typename A>
0448 inline void Free(A& a, T *p, size_t n = 1)
0449 {
0450 static_cast<void>(Realloc<T, A>(a, p, n, 0));
0451 }
0452
0453 #ifdef __GNUC__
0454 RAPIDJSON_DIAG_PUSH
0455 RAPIDJSON_DIAG_OFF(effc++)
0456 #endif
0457
0458 template <typename T, typename BaseAllocator = CrtAllocator>
0459 class StdAllocator :
0460 public std::allocator<T>
0461 {
0462 typedef std::allocator<T> allocator_type;
0463 #if RAPIDJSON_HAS_CXX11
0464 typedef std::allocator_traits<allocator_type> traits_type;
0465 #else
0466 typedef allocator_type traits_type;
0467 #endif
0468
0469 public:
0470 typedef BaseAllocator BaseAllocatorType;
0471
0472 StdAllocator() RAPIDJSON_NOEXCEPT :
0473 allocator_type(),
0474 baseAllocator_()
0475 { }
0476
0477 StdAllocator(const StdAllocator& rhs) RAPIDJSON_NOEXCEPT :
0478 allocator_type(rhs),
0479 baseAllocator_(rhs.baseAllocator_)
0480 { }
0481
0482 template<typename U>
0483 StdAllocator(const StdAllocator<U, BaseAllocator>& rhs) RAPIDJSON_NOEXCEPT :
0484 allocator_type(rhs),
0485 baseAllocator_(rhs.baseAllocator_)
0486 { }
0487
0488 #if RAPIDJSON_HAS_CXX11_RVALUE_REFS
0489 StdAllocator(StdAllocator&& rhs) RAPIDJSON_NOEXCEPT :
0490 allocator_type(std::move(rhs)),
0491 baseAllocator_(std::move(rhs.baseAllocator_))
0492 { }
0493 #endif
0494 #if RAPIDJSON_HAS_CXX11
0495 using propagate_on_container_move_assignment = std::true_type;
0496 using propagate_on_container_swap = std::true_type;
0497 #endif
0498
0499
0500 StdAllocator(const BaseAllocator& baseAllocator) RAPIDJSON_NOEXCEPT :
0501 allocator_type(),
0502 baseAllocator_(baseAllocator)
0503 { }
0504
0505 ~StdAllocator() RAPIDJSON_NOEXCEPT
0506 { }
0507
0508 template<typename U>
0509 struct rebind {
0510 typedef StdAllocator<U, BaseAllocator> other;
0511 };
0512
0513 typedef typename traits_type::size_type size_type;
0514 typedef typename traits_type::difference_type difference_type;
0515
0516 typedef typename traits_type::value_type value_type;
0517 typedef typename traits_type::pointer pointer;
0518 typedef typename traits_type::const_pointer const_pointer;
0519
0520 #if RAPIDJSON_HAS_CXX11
0521
0522 typedef typename std::add_lvalue_reference<value_type>::type &reference;
0523 typedef typename std::add_lvalue_reference<typename std::add_const<value_type>::type>::type &const_reference;
0524
0525 pointer address(reference r) const RAPIDJSON_NOEXCEPT
0526 {
0527 return std::addressof(r);
0528 }
0529 const_pointer address(const_reference r) const RAPIDJSON_NOEXCEPT
0530 {
0531 return std::addressof(r);
0532 }
0533
0534 size_type max_size() const RAPIDJSON_NOEXCEPT
0535 {
0536 return traits_type::max_size(*this);
0537 }
0538
0539 template <typename ...Args>
0540 void construct(pointer p, Args&&... args)
0541 {
0542 traits_type::construct(*this, p, std::forward<Args>(args)...);
0543 }
0544 void destroy(pointer p)
0545 {
0546 traits_type::destroy(*this, p);
0547 }
0548
0549 #else
0550
0551 typedef typename allocator_type::reference reference;
0552 typedef typename allocator_type::const_reference const_reference;
0553
0554 pointer address(reference r) const RAPIDJSON_NOEXCEPT
0555 {
0556 return allocator_type::address(r);
0557 }
0558 const_pointer address(const_reference r) const RAPIDJSON_NOEXCEPT
0559 {
0560 return allocator_type::address(r);
0561 }
0562
0563 size_type max_size() const RAPIDJSON_NOEXCEPT
0564 {
0565 return allocator_type::max_size();
0566 }
0567
0568 void construct(pointer p, const_reference r)
0569 {
0570 allocator_type::construct(p, r);
0571 }
0572 void destroy(pointer p)
0573 {
0574 allocator_type::destroy(p);
0575 }
0576
0577 #endif
0578
0579 template <typename U>
0580 U* allocate(size_type n = 1, const void* = 0)
0581 {
0582 return RAPIDJSON_NAMESPACE::Malloc<U>(baseAllocator_, n);
0583 }
0584 template <typename U>
0585 void deallocate(U* p, size_type n = 1)
0586 {
0587 RAPIDJSON_NAMESPACE::Free<U>(baseAllocator_, p, n);
0588 }
0589
0590 pointer allocate(size_type n = 1, const void* = 0)
0591 {
0592 return allocate<value_type>(n);
0593 }
0594 void deallocate(pointer p, size_type n = 1)
0595 {
0596 deallocate<value_type>(p, n);
0597 }
0598
0599 #if RAPIDJSON_HAS_CXX11
0600 using is_always_equal = std::is_empty<BaseAllocator>;
0601 #endif
0602
0603 template<typename U>
0604 bool operator==(const StdAllocator<U, BaseAllocator>& rhs) const RAPIDJSON_NOEXCEPT
0605 {
0606 return baseAllocator_ == rhs.baseAllocator_;
0607 }
0608 template<typename U>
0609 bool operator!=(const StdAllocator<U, BaseAllocator>& rhs) const RAPIDJSON_NOEXCEPT
0610 {
0611 return !operator==(rhs);
0612 }
0613
0614
0615 static const bool kNeedFree = BaseAllocator::kNeedFree;
0616 static const bool kRefCounted = internal::IsRefCounted<BaseAllocator>::Value;
0617 void* Malloc(size_t size)
0618 {
0619 return baseAllocator_.Malloc(size);
0620 }
0621 void* Realloc(void* originalPtr, size_t originalSize, size_t newSize)
0622 {
0623 return baseAllocator_.Realloc(originalPtr, originalSize, newSize);
0624 }
0625 static void Free(void *ptr) RAPIDJSON_NOEXCEPT
0626 {
0627 BaseAllocator::Free(ptr);
0628 }
0629
0630 private:
0631 template <typename, typename>
0632 friend class StdAllocator;
0633
0634 BaseAllocator baseAllocator_;
0635 };
0636
0637 #if !RAPIDJSON_HAS_CXX17
0638 template <typename BaseAllocator>
0639 class StdAllocator<void, BaseAllocator> :
0640 public std::allocator<void>
0641 {
0642 typedef std::allocator<void> allocator_type;
0643
0644 public:
0645 typedef BaseAllocator BaseAllocatorType;
0646
0647 StdAllocator() RAPIDJSON_NOEXCEPT :
0648 allocator_type(),
0649 baseAllocator_()
0650 { }
0651
0652 StdAllocator(const StdAllocator& rhs) RAPIDJSON_NOEXCEPT :
0653 allocator_type(rhs),
0654 baseAllocator_(rhs.baseAllocator_)
0655 { }
0656
0657 template<typename U>
0658 StdAllocator(const StdAllocator<U, BaseAllocator>& rhs) RAPIDJSON_NOEXCEPT :
0659 allocator_type(rhs),
0660 baseAllocator_(rhs.baseAllocator_)
0661 { }
0662
0663
0664 StdAllocator(const BaseAllocator& baseAllocator) RAPIDJSON_NOEXCEPT :
0665 allocator_type(),
0666 baseAllocator_(baseAllocator)
0667 { }
0668
0669 ~StdAllocator() RAPIDJSON_NOEXCEPT
0670 { }
0671
0672 template<typename U>
0673 struct rebind {
0674 typedef StdAllocator<U, BaseAllocator> other;
0675 };
0676
0677 typedef typename allocator_type::value_type value_type;
0678
0679 private:
0680 template <typename, typename>
0681 friend class StdAllocator;
0682
0683 BaseAllocator baseAllocator_;
0684 };
0685 #endif
0686
0687 #ifdef __GNUC__
0688 RAPIDJSON_DIAG_POP
0689 #endif
0690
0691 RAPIDJSON_NAMESPACE_END
0692
0693 #endif