File indexing completed on 2025-01-31 10:12:26
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #ifndef GOOGLE_PROTOBUF_SERIAL_ARENA_H__
0011 #define GOOGLE_PROTOBUF_SERIAL_ARENA_H__
0012
0013 #include <algorithm>
0014 #include <atomic>
0015 #include <cstddef>
0016 #include <cstdint>
0017 #include <string>
0018 #include <vector>
0019
0020 #include "absl/base/attributes.h"
0021 #include "absl/base/optimization.h"
0022 #include "absl/base/prefetch.h"
0023 #include "absl/log/absl_check.h"
0024 #include "absl/numeric/bits.h"
0025 #include "google/protobuf/arena_align.h"
0026 #include "google/protobuf/arena_cleanup.h"
0027 #include "google/protobuf/port.h"
0028 #include "google/protobuf/string_block.h"
0029
0030
0031 #include "google/protobuf/port_def.inc"
0032
0033 namespace google {
0034 namespace protobuf {
0035 namespace internal {
0036
0037
0038
0039 struct ArenaBlock {
0040
0041 constexpr ArenaBlock() : next(nullptr), size(0) {}
0042
0043 ArenaBlock(ArenaBlock* next, size_t size) : next(next), size(size) {
0044 ABSL_DCHECK_GT(size, sizeof(ArenaBlock));
0045 }
0046
0047 char* Pointer(size_t n) {
0048 ABSL_DCHECK_LE(n, size);
0049 return reinterpret_cast<char*>(this) + n;
0050 }
0051 char* Limit() { return Pointer(size & static_cast<size_t>(-8)); }
0052
0053 bool IsSentry() const { return size == 0; }
0054
0055 ArenaBlock* const next;
0056 const size_t size;
0057
0058 };
0059
0060 enum class AllocationClient { kDefault, kArray };
0061
0062 class ThreadSafeArena;
0063
0064
0065 struct FirstSerialArena {
0066 explicit FirstSerialArena() = default;
0067 };
0068
0069
0070
0071
0072
0073
0074
0075
0076
0077
0078
0079
0080 class PROTOBUF_EXPORT SerialArena {
0081 public:
0082 static constexpr size_t kBlockHeaderSize =
0083 ArenaAlignDefault::Ceil(sizeof(ArenaBlock));
0084
0085 void CleanupList() { cleanup_list_.Cleanup(*this); }
0086 uint64_t SpaceAllocated() const {
0087 return space_allocated_.load(std::memory_order_relaxed);
0088 }
0089 uint64_t SpaceUsed() const;
0090
0091
0092 PROTOBUF_ALWAYS_INLINE void* TryAllocateFromCachedBlock(size_t size) {
0093 if (PROTOBUF_PREDICT_FALSE(size < 16)) return nullptr;
0094
0095
0096 const size_t index = absl::bit_width(size - 1) - 4;
0097
0098 if (PROTOBUF_PREDICT_FALSE(index >= cached_block_length_)) return nullptr;
0099 auto& cached_head = cached_blocks_[index];
0100 if (cached_head == nullptr) return nullptr;
0101
0102 void* ret = cached_head;
0103 PROTOBUF_UNPOISON_MEMORY_REGION(ret, size);
0104 cached_head = cached_head->next;
0105 return ret;
0106 }
0107
0108
0109
0110
0111
0112
0113
0114
0115 template <AllocationClient alloc_client = AllocationClient::kDefault>
0116 void* AllocateAligned(size_t n) {
0117 ABSL_DCHECK(internal::ArenaAlignDefault::IsAligned(n));
0118 ABSL_DCHECK_GE(limit_, ptr());
0119
0120 if (alloc_client == AllocationClient::kArray) {
0121 if (void* res = TryAllocateFromCachedBlock(n)) {
0122 return res;
0123 }
0124 }
0125
0126 void* ptr;
0127 if (PROTOBUF_PREDICT_TRUE(MaybeAllocateAligned(n, &ptr))) {
0128 return ptr;
0129 }
0130 return AllocateAlignedFallback(n);
0131 }
0132
0133 private:
0134 static inline PROTOBUF_ALWAYS_INLINE constexpr size_t AlignUpTo(size_t n,
0135 size_t a) {
0136
0137
0138
0139
0140
0141 return a <= 8 ? ArenaAlignDefault::Ceil(n) : ArenaAlignAs(a).Padded(n);
0142 }
0143
0144 static inline PROTOBUF_ALWAYS_INLINE void* AlignTo(void* p, size_t a) {
0145 return (a <= ArenaAlignDefault::align)
0146 ? ArenaAlignDefault::CeilDefaultAligned(p)
0147 : ArenaAlignAs(a).CeilDefaultAligned(p);
0148 }
0149
0150
0151 void ReturnArrayMemory(void* p, size_t size) {
0152
0153
0154
0155 if (sizeof(void*) < 8) {
0156 if (PROTOBUF_PREDICT_FALSE(size < 16)) return;
0157 } else {
0158 PROTOBUF_ASSUME(size >= 16);
0159 }
0160
0161
0162
0163
0164 const size_t index = absl::bit_width(size) - 5;
0165
0166 if (PROTOBUF_PREDICT_FALSE(index >= cached_block_length_)) {
0167
0168
0169
0170 CachedBlock** new_list = static_cast<CachedBlock**>(p);
0171 size_t new_size = size / sizeof(CachedBlock*);
0172
0173 std::copy(cached_blocks_, cached_blocks_ + cached_block_length_,
0174 new_list);
0175
0176
0177
0178 PROTOBUF_UNPOISON_MEMORY_REGION(
0179 new_list + cached_block_length_,
0180 (new_size - cached_block_length_) * sizeof(CachedBlock*));
0181
0182 std::fill(new_list + cached_block_length_, new_list + new_size, nullptr);
0183
0184 cached_blocks_ = new_list;
0185
0186
0187 cached_block_length_ =
0188 static_cast<uint8_t>(std::min(size_t{64}, new_size));
0189
0190 return;
0191 }
0192
0193 auto& cached_head = cached_blocks_[index];
0194 auto* new_node = static_cast<CachedBlock*>(p);
0195 new_node->next = cached_head;
0196 cached_head = new_node;
0197 PROTOBUF_POISON_MEMORY_REGION(p, size);
0198 }
0199
0200 public:
0201
0202 bool MaybeAllocateAligned(size_t n, void** out) {
0203 ABSL_DCHECK(internal::ArenaAlignDefault::IsAligned(n));
0204 ABSL_DCHECK_GE(limit_, ptr());
0205 char* ret = ptr();
0206
0207
0208
0209 if (PROTOBUF_PREDICT_FALSE(reinterpret_cast<uintptr_t>(ret) + n >
0210 reinterpret_cast<uintptr_t>(limit_))) {
0211 return false;
0212 }
0213 PROTOBUF_UNPOISON_MEMORY_REGION(ret, n);
0214 *out = ret;
0215 char* next = ret + n;
0216 set_ptr(next);
0217 MaybePrefetchData(next);
0218 return true;
0219 }
0220
0221
0222
0223
0224 PROTOBUF_ALWAYS_INLINE void* MaybeAllocateStringWithCleanup() {
0225 void* p;
0226 return MaybeAllocateString(p) ? p : nullptr;
0227 }
0228
0229 PROTOBUF_ALWAYS_INLINE
0230 void* AllocateAlignedWithCleanup(size_t n, size_t align,
0231 void (*destructor)(void*)) {
0232 n = ArenaAlignDefault::Ceil(n);
0233 char* ret = ArenaAlignAs(align).CeilDefaultAligned(ptr());
0234
0235 if (PROTOBUF_PREDICT_FALSE(reinterpret_cast<uintptr_t>(ret) + n >
0236 reinterpret_cast<uintptr_t>(limit_))) {
0237 return AllocateAlignedWithCleanupFallback(n, align, destructor);
0238 }
0239 PROTOBUF_UNPOISON_MEMORY_REGION(ret, n);
0240 char* next = ret + n;
0241 set_ptr(next);
0242 AddCleanup(ret, destructor);
0243 ABSL_DCHECK_GE(limit_, ptr());
0244 MaybePrefetchData(next);
0245 return ret;
0246 }
0247
0248 PROTOBUF_ALWAYS_INLINE
0249 void AddCleanup(void* elem, void (*destructor)(void*)) {
0250 cleanup_list_.Add(elem, destructor, *this);
0251 MaybePrefetchCleanup();
0252 }
0253
0254 ABSL_ATTRIBUTE_RETURNS_NONNULL void* AllocateFromStringBlock();
0255
0256 std::vector<void*> PeekCleanupListForTesting();
0257
0258 private:
0259 friend class ThreadSafeArena;
0260 friend class cleanup::ChunkList;
0261
0262
0263 struct CachedBlock {
0264
0265 CachedBlock* next;
0266 };
0267
0268 static constexpr ptrdiff_t kPrefetchDataDegree = ABSL_CACHELINE_SIZE * 16;
0269 static constexpr ptrdiff_t kPrefetchCleanupDegree = ABSL_CACHELINE_SIZE * 6;
0270
0271
0272 inline SerialArena(ArenaBlock* b, ThreadSafeArena& parent);
0273
0274
0275 inline explicit SerialArena(ThreadSafeArena& parent);
0276 inline SerialArena(FirstSerialArena, ArenaBlock* b, ThreadSafeArena& parent);
0277
0278 bool MaybeAllocateString(void*& p);
0279 ABSL_ATTRIBUTE_RETURNS_NONNULL void* AllocateFromStringBlockFallback();
0280
0281
0282
0283 PROTOBUF_ALWAYS_INLINE
0284 static const char* MaybePrefetchImpl(const ptrdiff_t prefetch_degree,
0285 const char* next, const char* limit,
0286 const char* prefetch_ptr) {
0287 if (PROTOBUF_PREDICT_TRUE(prefetch_ptr - next > prefetch_degree))
0288 return prefetch_ptr;
0289 if (PROTOBUF_PREDICT_TRUE(prefetch_ptr < limit)) {
0290 prefetch_ptr = std::max(next, prefetch_ptr);
0291 ABSL_DCHECK(prefetch_ptr != nullptr);
0292 const char* end = std::min(limit, prefetch_ptr + prefetch_degree);
0293 for (; prefetch_ptr < end; prefetch_ptr += ABSL_CACHELINE_SIZE) {
0294 absl::PrefetchToLocalCacheForWrite(prefetch_ptr);
0295 }
0296 }
0297 return prefetch_ptr;
0298 }
0299 PROTOBUF_ALWAYS_INLINE
0300 void MaybePrefetchData(const char* next) {
0301 ABSL_DCHECK(static_cast<const void*>(prefetch_ptr_) == nullptr ||
0302 static_cast<const void*>(prefetch_ptr_) >= head());
0303 prefetch_ptr_ =
0304 MaybePrefetchImpl(kPrefetchDataDegree, next, limit_, prefetch_ptr_);
0305 }
0306 PROTOBUF_ALWAYS_INLINE
0307 void MaybePrefetchCleanup() {
0308 ABSL_DCHECK(static_cast<const void*>(cleanup_list_.prefetch_ptr_) ==
0309 nullptr ||
0310 static_cast<const void*>(cleanup_list_.prefetch_ptr_) >=
0311 cleanup_list_.head_);
0312 cleanup_list_.prefetch_ptr_ = MaybePrefetchImpl(
0313 kPrefetchCleanupDegree, reinterpret_cast<char*>(cleanup_list_.next_),
0314 reinterpret_cast<char*>(cleanup_list_.limit_),
0315 cleanup_list_.prefetch_ptr_);
0316 }
0317
0318
0319
0320
0321
0322 static SerialArena* New(SizedPtr mem, ThreadSafeArena& parent);
0323
0324 template <typename Deallocator>
0325 SizedPtr Free(Deallocator deallocator);
0326
0327 size_t FreeStringBlocks() {
0328
0329 size_t unused_bytes = string_block_unused_.load(std::memory_order_relaxed);
0330 if (StringBlock* sb = string_block_.load(std::memory_order_relaxed)) {
0331 return FreeStringBlocks(sb, unused_bytes);
0332 }
0333 return 0;
0334 }
0335 static size_t FreeStringBlocks(StringBlock* string_block, size_t unused);
0336
0337
0338 void AddSpaceUsed(size_t space_used) {
0339 space_used_.store(space_used_.load(std::memory_order_relaxed) + space_used,
0340 std::memory_order_relaxed);
0341 }
0342
0343
0344 void AddSpaceAllocated(size_t space_allocated) {
0345 space_allocated_.store(
0346 space_allocated_.load(std::memory_order_relaxed) + space_allocated,
0347 std::memory_order_relaxed);
0348 }
0349
0350
0351 ArenaBlock* head() { return head_.load(std::memory_order_relaxed); }
0352 const ArenaBlock* head() const {
0353 return head_.load(std::memory_order_relaxed);
0354 }
0355
0356 char* ptr() { return ptr_.load(std::memory_order_relaxed); }
0357 const char* ptr() const { return ptr_.load(std::memory_order_relaxed); }
0358 void set_ptr(char* ptr) { return ptr_.store(ptr, std::memory_order_relaxed); }
0359 PROTOBUF_ALWAYS_INLINE void set_range(char* ptr, char* limit) {
0360 set_ptr(ptr);
0361 prefetch_ptr_ = ptr;
0362 limit_ = limit;
0363 }
0364
0365 void* AllocateAlignedFallback(size_t n);
0366 void* AllocateAlignedWithCleanupFallback(size_t n, size_t align,
0367 void (*destructor)(void*));
0368 void AddCleanupFallback(void* elem, void (*destructor)(void*));
0369 inline void AllocateNewBlock(size_t n);
0370 inline void Init(ArenaBlock* b, size_t offset);
0371
0372
0373
0374
0375
0376
0377
0378 std::atomic<char*> ptr_{nullptr};
0379
0380 char* limit_ = nullptr;
0381
0382
0383 const char* prefetch_ptr_ = nullptr;
0384
0385
0386 cleanup::ChunkList cleanup_list_;
0387
0388
0389 std::atomic<StringBlock*> string_block_{nullptr};
0390
0391
0392
0393
0394 std::atomic<size_t> string_block_unused_{0};
0395
0396 std::atomic<ArenaBlock*> head_{nullptr};
0397 std::atomic<size_t> space_used_{0};
0398 std::atomic<size_t> space_allocated_{0};
0399 ThreadSafeArena& parent_;
0400
0401
0402
0403
0404
0405
0406
0407
0408 uint8_t cached_block_length_ = 0;
0409 CachedBlock** cached_blocks_ = nullptr;
0410 };
0411
0412 inline PROTOBUF_ALWAYS_INLINE bool SerialArena::MaybeAllocateString(void*& p) {
0413
0414 size_t unused_bytes = string_block_unused_.load(std::memory_order_relaxed);
0415 if (PROTOBUF_PREDICT_TRUE(unused_bytes != 0)) {
0416 unused_bytes -= sizeof(std::string);
0417 string_block_unused_.store(unused_bytes, std::memory_order_relaxed);
0418 p = string_block_.load(std::memory_order_relaxed)->AtOffset(unused_bytes);
0419 return true;
0420 }
0421 return false;
0422 }
0423
0424 ABSL_ATTRIBUTE_RETURNS_NONNULL inline PROTOBUF_ALWAYS_INLINE void*
0425 SerialArena::AllocateFromStringBlock() {
0426 void* p;
0427 if (ABSL_PREDICT_TRUE(MaybeAllocateString(p))) return p;
0428 return AllocateFromStringBlockFallback();
0429 }
0430
0431 }
0432 }
0433 }
0434
0435 #include "google/protobuf/port_undef.inc"
0436
0437 #endif