File indexing completed on 2025-02-22 10:42:25
0001
0002
0003
0004
0005 #ifndef INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_
0006 #define INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_
0007
0008 #include <atomic>
0009 #include <cstddef>
0010 #include <type_traits>
0011
0012 #include "cppgc/internal/api-constants.h"
0013 #include "cppgc/internal/logging.h"
0014 #include "cppgc/sentinel-pointer.h"
0015 #include "v8config.h" // NOLINT(build/include_directory)
0016
0017 namespace cppgc {
0018 namespace internal {
0019
0020 enum class WriteBarrierSlotType {
0021 kCompressed,
0022 kUncompressed,
0023 };
0024
0025 #if defined(CPPGC_POINTER_COMPRESSION)
0026
0027 #if defined(__clang__)
0028
0029
0030 #define CPPGC_CONST __attribute__((const))
0031 #define CPPGC_REQUIRE_CONSTANT_INIT \
0032 __attribute__((require_constant_initialization))
0033 #else
0034 #define CPPGC_CONST
0035 #define CPPGC_REQUIRE_CONSTANT_INIT
0036 #endif
0037
0038 class V8_EXPORT CageBaseGlobal final {
0039 public:
0040 V8_INLINE CPPGC_CONST static uintptr_t Get() {
0041 CPPGC_DCHECK(IsBaseConsistent());
0042 return g_base_.base;
0043 }
0044
0045 V8_INLINE CPPGC_CONST static bool IsSet() {
0046 CPPGC_DCHECK(IsBaseConsistent());
0047 return (g_base_.base & ~kLowerHalfWordMask) != 0;
0048 }
0049
0050 private:
0051
0052 static constexpr uintptr_t kLowerHalfWordMask =
0053 (api_constants::kCagedHeapReservationAlignment - 1);
0054
0055 static union alignas(api_constants::kCachelineSize) Base {
0056 uintptr_t base;
0057 char cache_line[api_constants::kCachelineSize];
0058 } g_base_ CPPGC_REQUIRE_CONSTANT_INIT;
0059
0060 CageBaseGlobal() = delete;
0061
0062 V8_INLINE static bool IsBaseConsistent() {
0063 return kLowerHalfWordMask == (g_base_.base & kLowerHalfWordMask);
0064 }
0065
0066 friend class CageBaseGlobalUpdater;
0067 };
0068
0069 #undef CPPGC_REQUIRE_CONSTANT_INIT
0070 #undef CPPGC_CONST
0071
0072 class V8_TRIVIAL_ABI CompressedPointer final {
0073 public:
0074 using IntegralType = uint32_t;
0075 static constexpr auto kWriteBarrierSlotType =
0076 WriteBarrierSlotType::kCompressed;
0077
0078 V8_INLINE CompressedPointer() : value_(0u) {}
0079 V8_INLINE explicit CompressedPointer(const void* ptr)
0080 : value_(Compress(ptr)) {}
0081 V8_INLINE explicit CompressedPointer(std::nullptr_t) : value_(0u) {}
0082 V8_INLINE explicit CompressedPointer(SentinelPointer)
0083 : value_(kCompressedSentinel) {}
0084
0085 V8_INLINE const void* Load() const { return Decompress(value_); }
0086 V8_INLINE const void* LoadAtomic() const {
0087 return Decompress(
0088 reinterpret_cast<const std::atomic<IntegralType>&>(value_).load(
0089 std::memory_order_relaxed));
0090 }
0091
0092 V8_INLINE void Store(const void* ptr) { value_ = Compress(ptr); }
0093 V8_INLINE void StoreAtomic(const void* value) {
0094 reinterpret_cast<std::atomic<IntegralType>&>(value_).store(
0095 Compress(value), std::memory_order_relaxed);
0096 }
0097
0098 V8_INLINE void Clear() { value_ = 0u; }
0099 V8_INLINE bool IsCleared() const { return !value_; }
0100
0101 V8_INLINE bool IsSentinel() const { return value_ == kCompressedSentinel; }
0102
0103 V8_INLINE uint32_t GetAsInteger() const { return value_; }
0104
0105 V8_INLINE friend bool operator==(CompressedPointer a, CompressedPointer b) {
0106 return a.value_ == b.value_;
0107 }
0108 V8_INLINE friend bool operator!=(CompressedPointer a, CompressedPointer b) {
0109 return a.value_ != b.value_;
0110 }
0111 V8_INLINE friend bool operator<(CompressedPointer a, CompressedPointer b) {
0112 return a.value_ < b.value_;
0113 }
0114 V8_INLINE friend bool operator<=(CompressedPointer a, CompressedPointer b) {
0115 return a.value_ <= b.value_;
0116 }
0117 V8_INLINE friend bool operator>(CompressedPointer a, CompressedPointer b) {
0118 return a.value_ > b.value_;
0119 }
0120 V8_INLINE friend bool operator>=(CompressedPointer a, CompressedPointer b) {
0121 return a.value_ >= b.value_;
0122 }
0123
0124 static V8_INLINE IntegralType Compress(const void* ptr) {
0125 static_assert(SentinelPointer::kSentinelValue ==
0126 1 << api_constants::kPointerCompressionShift,
0127 "The compression scheme relies on the sentinel encoded as 1 "
0128 "<< kPointerCompressionShift");
0129 static constexpr size_t kGigaCageMask =
0130 ~(api_constants::kCagedHeapReservationAlignment - 1);
0131 static constexpr size_t kPointerCompressionShiftMask =
0132 (1 << api_constants::kPointerCompressionShift) - 1;
0133
0134 CPPGC_DCHECK(CageBaseGlobal::IsSet());
0135 const uintptr_t base = CageBaseGlobal::Get();
0136 CPPGC_DCHECK(!ptr || ptr == kSentinelPointer ||
0137 (base & kGigaCageMask) ==
0138 (reinterpret_cast<uintptr_t>(ptr) & kGigaCageMask));
0139 CPPGC_DCHECK(
0140 (reinterpret_cast<uintptr_t>(ptr) & kPointerCompressionShiftMask) == 0);
0141
0142 #if defined(CPPGC_2GB_CAGE)
0143
0144 auto compressed =
0145 static_cast<IntegralType>(reinterpret_cast<uintptr_t>(ptr));
0146 #else
0147 const auto uptr = reinterpret_cast<uintptr_t>(ptr);
0148
0149 auto compressed = static_cast<IntegralType>(
0150 uptr >> api_constants::kPointerCompressionShift);
0151 #endif
0152
0153 CPPGC_DCHECK((!compressed || compressed == kCompressedSentinel) ||
0154 (compressed & (1 << 31)));
0155 return compressed;
0156 }
0157
0158 static V8_INLINE void* Decompress(IntegralType ptr) {
0159 CPPGC_DCHECK(CageBaseGlobal::IsSet());
0160 const uintptr_t base = CageBaseGlobal::Get();
0161
0162
0163 #if defined(CPPGC_2GB_CAGE)
0164 const uint64_t mask = static_cast<uint64_t>(static_cast<int32_t>(ptr));
0165 #else
0166
0167
0168 const uint64_t mask = static_cast<uint64_t>(static_cast<int32_t>(ptr))
0169 << api_constants::kPointerCompressionShift;
0170 #endif
0171 return reinterpret_cast<void*>(mask & base);
0172 }
0173
0174 private:
0175 #if defined(CPPGC_2GB_CAGE)
0176 static constexpr IntegralType kCompressedSentinel =
0177 SentinelPointer::kSentinelValue;
0178 #else
0179 static constexpr IntegralType kCompressedSentinel =
0180 SentinelPointer::kSentinelValue >>
0181 api_constants::kPointerCompressionShift;
0182 #endif
0183
0184
0185
0186 IntegralType value_;
0187 };
0188
0189 #endif
0190
0191 class V8_TRIVIAL_ABI RawPointer final {
0192 public:
0193 using IntegralType = uintptr_t;
0194 static constexpr auto kWriteBarrierSlotType =
0195 WriteBarrierSlotType::kUncompressed;
0196
0197 V8_INLINE RawPointer() : ptr_(nullptr) {}
0198 V8_INLINE explicit RawPointer(const void* ptr) : ptr_(ptr) {}
0199
0200 V8_INLINE const void* Load() const { return ptr_; }
0201 V8_INLINE const void* LoadAtomic() const {
0202 return reinterpret_cast<const std::atomic<const void*>&>(ptr_).load(
0203 std::memory_order_relaxed);
0204 }
0205
0206 V8_INLINE void Store(const void* ptr) { ptr_ = ptr; }
0207 V8_INLINE void StoreAtomic(const void* ptr) {
0208 reinterpret_cast<std::atomic<const void*>&>(ptr_).store(
0209 ptr, std::memory_order_relaxed);
0210 }
0211
0212 V8_INLINE void Clear() { ptr_ = nullptr; }
0213 V8_INLINE bool IsCleared() const { return !ptr_; }
0214
0215 V8_INLINE bool IsSentinel() const { return ptr_ == kSentinelPointer; }
0216
0217 V8_INLINE uintptr_t GetAsInteger() const {
0218 return reinterpret_cast<uintptr_t>(ptr_);
0219 }
0220
0221 V8_INLINE friend bool operator==(RawPointer a, RawPointer b) {
0222 return a.ptr_ == b.ptr_;
0223 }
0224 V8_INLINE friend bool operator!=(RawPointer a, RawPointer b) {
0225 return a.ptr_ != b.ptr_;
0226 }
0227 V8_INLINE friend bool operator<(RawPointer a, RawPointer b) {
0228 return a.ptr_ < b.ptr_;
0229 }
0230 V8_INLINE friend bool operator<=(RawPointer a, RawPointer b) {
0231 return a.ptr_ <= b.ptr_;
0232 }
0233 V8_INLINE friend bool operator>(RawPointer a, RawPointer b) {
0234 return a.ptr_ > b.ptr_;
0235 }
0236 V8_INLINE friend bool operator>=(RawPointer a, RawPointer b) {
0237 return a.ptr_ >= b.ptr_;
0238 }
0239
0240 private:
0241
0242
0243
0244 const void* ptr_;
0245 };
0246
0247 #if defined(CPPGC_POINTER_COMPRESSION)
0248 using DefaultMemberStorage = CompressedPointer;
0249 #else
0250 using DefaultMemberStorage = RawPointer;
0251 #endif
0252
0253 }
0254 }
0255
0256 #endif