Back to home page

EIC code displayed by LXR

 
 

    


File indexing completed on 2025-02-21 10:05:24

0001 // Copyright 2018 the V8 project authors. All rights reserved.
0002 // Use of this source code is governed by a BSD-style license that can be
0003 // found in the LICENSE file.
0004 
0005 #ifndef INCLUDE_V8_INTERNAL_H_
0006 #define INCLUDE_V8_INTERNAL_H_
0007 
0008 #include <stddef.h>
0009 #include <stdint.h>
0010 #include <string.h>
0011 
0012 #include <atomic>
0013 #include <iterator>
0014 #include <memory>
0015 #include <type_traits>
0016 
0017 #include "v8config.h"  // NOLINT(build/include_directory)
0018 
0019 namespace v8 {
0020 
0021 class Array;
0022 class Context;
0023 class Data;
0024 class Isolate;
0025 
0026 namespace internal {
0027 
0028 class Heap;
0029 class Isolate;
0030 
0031 typedef uintptr_t Address;
0032 static constexpr Address kNullAddress = 0;
0033 
0034 constexpr int KB = 1024;
0035 constexpr int MB = KB * 1024;
0036 constexpr int GB = MB * 1024;
0037 #ifdef V8_TARGET_ARCH_X64
0038 constexpr size_t TB = size_t{GB} * 1024;
0039 #endif
0040 
0041 /**
0042  * Configuration of tagging scheme.
0043  */
0044 const int kApiSystemPointerSize = sizeof(void*);
0045 const int kApiDoubleSize = sizeof(double);
0046 const int kApiInt32Size = sizeof(int32_t);
0047 const int kApiInt64Size = sizeof(int64_t);
0048 const int kApiSizetSize = sizeof(size_t);
0049 
0050 // Tag information for HeapObject.
0051 const int kHeapObjectTag = 1;
0052 const int kWeakHeapObjectTag = 3;
0053 const int kHeapObjectTagSize = 2;
0054 const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
0055 const intptr_t kHeapObjectReferenceTagMask = 1 << (kHeapObjectTagSize - 1);
0056 
0057 // Tag information for fowarding pointers stored in object headers.
0058 // 0b00 at the lowest 2 bits in the header indicates that the map word is a
0059 // forwarding pointer.
0060 const int kForwardingTag = 0;
0061 const int kForwardingTagSize = 2;
0062 const intptr_t kForwardingTagMask = (1 << kForwardingTagSize) - 1;
0063 
0064 // Tag information for Smi.
0065 const int kSmiTag = 0;
0066 const int kSmiTagSize = 1;
0067 const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1;
0068 
0069 template <size_t tagged_ptr_size>
0070 struct SmiTagging;
0071 
0072 constexpr intptr_t kIntptrAllBitsSet = intptr_t{-1};
0073 constexpr uintptr_t kUintptrAllBitsSet =
0074     static_cast<uintptr_t>(kIntptrAllBitsSet);
0075 
0076 // Smi constants for systems where tagged pointer is a 32-bit value.
0077 template <>
0078 struct SmiTagging<4> {
0079   enum { kSmiShiftSize = 0, kSmiValueSize = 31 };
0080 
0081   static constexpr intptr_t kSmiMinValue =
0082       static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
0083   static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
0084 
0085   V8_INLINE static constexpr int SmiToInt(Address value) {
0086     int shift_bits = kSmiTagSize + kSmiShiftSize;
0087     // Truncate and shift down (requires >> to be sign extending).
0088     return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
0089   }
0090   V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
0091     // Is value in range [kSmiMinValue, kSmiMaxValue].
0092     // Use unsigned operations in order to avoid undefined behaviour in case of
0093     // signed integer overflow.
0094     return (static_cast<uintptr_t>(value) -
0095             static_cast<uintptr_t>(kSmiMinValue)) <=
0096            (static_cast<uintptr_t>(kSmiMaxValue) -
0097             static_cast<uintptr_t>(kSmiMinValue));
0098   }
0099 };
0100 
0101 // Smi constants for systems where tagged pointer is a 64-bit value.
0102 template <>
0103 struct SmiTagging<8> {
0104   enum { kSmiShiftSize = 31, kSmiValueSize = 32 };
0105 
0106   static constexpr intptr_t kSmiMinValue =
0107       static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
0108   static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
0109 
0110   V8_INLINE static constexpr int SmiToInt(Address value) {
0111     int shift_bits = kSmiTagSize + kSmiShiftSize;
0112     // Shift down and throw away top 32 bits.
0113     return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
0114   }
0115   V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
0116     // To be representable as a long smi, the value must be a 32-bit integer.
0117     return (value == static_cast<int32_t>(value));
0118   }
0119 };
0120 
0121 #ifdef V8_COMPRESS_POINTERS
0122 // See v8:7703 or src/common/ptr-compr-inl.h for details about pointer
0123 // compression.
0124 constexpr size_t kPtrComprCageReservationSize = size_t{1} << 32;
0125 constexpr size_t kPtrComprCageBaseAlignment = size_t{1} << 32;
0126 
0127 static_assert(
0128     kApiSystemPointerSize == kApiInt64Size,
0129     "Pointer compression can be enabled only for 64-bit architectures");
0130 const int kApiTaggedSize = kApiInt32Size;
0131 #else
0132 const int kApiTaggedSize = kApiSystemPointerSize;
0133 #endif
0134 
0135 constexpr bool PointerCompressionIsEnabled() {
0136   return kApiTaggedSize != kApiSystemPointerSize;
0137 }
0138 
0139 #ifdef V8_31BIT_SMIS_ON_64BIT_ARCH
0140 using PlatformSmiTagging = SmiTagging<kApiInt32Size>;
0141 #else
0142 using PlatformSmiTagging = SmiTagging<kApiTaggedSize>;
0143 #endif
0144 
0145 // TODO(ishell): Consinder adding kSmiShiftBits = kSmiShiftSize + kSmiTagSize
0146 // since it's used much more often than the inividual constants.
0147 const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize;
0148 const int kSmiValueSize = PlatformSmiTagging::kSmiValueSize;
0149 const int kSmiMinValue = static_cast<int>(PlatformSmiTagging::kSmiMinValue);
0150 const int kSmiMaxValue = static_cast<int>(PlatformSmiTagging::kSmiMaxValue);
0151 constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; }
0152 constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; }
0153 constexpr bool Is64() { return kApiSystemPointerSize == sizeof(int64_t); }
0154 
0155 V8_INLINE static constexpr Address IntToSmi(int value) {
0156   return (static_cast<Address>(value) << (kSmiTagSize + kSmiShiftSize)) |
0157          kSmiTag;
0158 }
0159 
0160 /*
0161  * Sandbox related types, constants, and functions.
0162  */
0163 constexpr bool SandboxIsEnabled() {
0164 #ifdef V8_ENABLE_SANDBOX
0165   return true;
0166 #else
0167   return false;
0168 #endif
0169 }
0170 
0171 // SandboxedPointers are guaranteed to point into the sandbox. This is achieved
0172 // for example by storing them as offset rather than as raw pointers.
0173 using SandboxedPointer_t = Address;
0174 
0175 #ifdef V8_ENABLE_SANDBOX
0176 
0177 // Size of the sandbox, excluding the guard regions surrounding it.
0178 #if defined(V8_TARGET_OS_ANDROID)
0179 // On Android, most 64-bit devices seem to be configured with only 39 bits of
0180 // virtual address space for userspace. As such, limit the sandbox to 128GB (a
0181 // quarter of the total available address space).
0182 constexpr size_t kSandboxSizeLog2 = 37;  // 128 GB
0183 #elif defined(V8_TARGET_ARCH_LOONG64)
0184 // Some Linux distros on LoongArch64 configured with only 40 bits of virtual
0185 // address space for userspace. Limit the sandbox to 256GB here.
0186 constexpr size_t kSandboxSizeLog2 = 38;  // 256 GB
0187 #else
0188 // Everywhere else use a 1TB sandbox.
0189 constexpr size_t kSandboxSizeLog2 = 40;  // 1 TB
0190 #endif  // V8_TARGET_OS_ANDROID
0191 constexpr size_t kSandboxSize = 1ULL << kSandboxSizeLog2;
0192 
0193 // Required alignment of the sandbox. For simplicity, we require the
0194 // size of the guard regions to be a multiple of this, so that this specifies
0195 // the alignment of the sandbox including and excluding surrounding guard
0196 // regions. The alignment requirement is due to the pointer compression cage
0197 // being located at the start of the sandbox.
0198 constexpr size_t kSandboxAlignment = kPtrComprCageBaseAlignment;
0199 
0200 // Sandboxed pointers are stored inside the heap as offset from the sandbox
0201 // base shifted to the left. This way, it is guaranteed that the offset is
0202 // smaller than the sandbox size after shifting it to the right again. This
0203 // constant specifies the shift amount.
0204 constexpr uint64_t kSandboxedPointerShift = 64 - kSandboxSizeLog2;
0205 
0206 // Size of the guard regions surrounding the sandbox. This assumes a worst-case
0207 // scenario of a 32-bit unsigned index used to access an array of 64-bit
0208 // values.
0209 constexpr size_t kSandboxGuardRegionSize = 32ULL * GB;
0210 
0211 static_assert((kSandboxGuardRegionSize % kSandboxAlignment) == 0,
0212               "The size of the guard regions around the sandbox must be a "
0213               "multiple of its required alignment.");
0214 
0215 // On OSes where reserving virtual memory is too expensive to reserve the
0216 // entire address space backing the sandbox, notably Windows pre 8.1, we create
0217 // a partially reserved sandbox that doesn't actually reserve most of the
0218 // memory, and so doesn't have the desired security properties as unrelated
0219 // memory allocations could end up inside of it, but which still ensures that
0220 // objects that should be located inside the sandbox are allocated within
0221 // kSandboxSize bytes from the start of the sandbox. The minimum size of the
0222 // region that is actually reserved for such a sandbox is specified by this
0223 // constant and should be big enough to contain the pointer compression cage as
0224 // well as the ArrayBuffer partition.
0225 constexpr size_t kSandboxMinimumReservationSize = 8ULL * GB;
0226 
0227 static_assert(kSandboxMinimumReservationSize > kPtrComprCageReservationSize,
0228               "The minimum reservation size for a sandbox must be larger than "
0229               "the pointer compression cage contained within it.");
0230 
0231 // The maximum buffer size allowed inside the sandbox. This is mostly dependent
0232 // on the size of the guard regions around the sandbox: an attacker must not be
0233 // able to construct a buffer that appears larger than the guard regions and
0234 // thereby "reach out of" the sandbox.
0235 constexpr size_t kMaxSafeBufferSizeForSandbox = 32ULL * GB - 1;
0236 static_assert(kMaxSafeBufferSizeForSandbox <= kSandboxGuardRegionSize,
0237               "The maximum allowed buffer size must not be larger than the "
0238               "sandbox's guard regions");
0239 
0240 constexpr size_t kBoundedSizeShift = 29;
0241 static_assert(1ULL << (64 - kBoundedSizeShift) ==
0242                   kMaxSafeBufferSizeForSandbox + 1,
0243               "The maximum size of a BoundedSize must be synchronized with the "
0244               "kMaxSafeBufferSizeForSandbox");
0245 
0246 #endif  // V8_ENABLE_SANDBOX
0247 
0248 #ifdef V8_COMPRESS_POINTERS
0249 
0250 #ifdef V8_TARGET_OS_ANDROID
0251 // The size of the virtual memory reservation for an external pointer table.
0252 // This determines the maximum number of entries in a table. Using a maximum
0253 // size allows omitting bounds checks on table accesses if the indices are
0254 // guaranteed (e.g. through shifting) to be below the maximum index. This
0255 // value must be a power of two.
0256 constexpr size_t kExternalPointerTableReservationSize = 512 * MB;
0257 
0258 // The external pointer table indices stored in HeapObjects as external
0259 // pointers are shifted to the left by this amount to guarantee that they are
0260 // smaller than the maximum table size.
0261 constexpr uint32_t kExternalPointerIndexShift = 6;
0262 #else
0263 constexpr size_t kExternalPointerTableReservationSize = 1024 * MB;
0264 constexpr uint32_t kExternalPointerIndexShift = 5;
0265 #endif  // V8_TARGET_OS_ANDROID
0266 
0267 // The maximum number of entries in an external pointer table.
0268 constexpr int kExternalPointerTableEntrySize = 8;
0269 constexpr int kExternalPointerTableEntrySizeLog2 = 3;
0270 constexpr size_t kMaxExternalPointers =
0271     kExternalPointerTableReservationSize / kExternalPointerTableEntrySize;
0272 static_assert((1 << (32 - kExternalPointerIndexShift)) == kMaxExternalPointers,
0273               "kExternalPointerTableReservationSize and "
0274               "kExternalPointerIndexShift don't match");
0275 
0276 #else  // !V8_COMPRESS_POINTERS
0277 
0278 // Needed for the V8.SandboxedExternalPointersCount histogram.
0279 constexpr size_t kMaxExternalPointers = 0;
0280 
0281 #endif  // V8_COMPRESS_POINTERS
0282 
0283 // A ExternalPointerHandle represents a (opaque) reference to an external
0284 // pointer that can be stored inside the sandbox. A ExternalPointerHandle has
0285 // meaning only in combination with an (active) Isolate as it references an
0286 // external pointer stored in the currently active Isolate's
0287 // ExternalPointerTable. Internally, an ExternalPointerHandles is simply an
0288 // index into an ExternalPointerTable that is shifted to the left to guarantee
0289 // that it is smaller than the size of the table.
0290 using ExternalPointerHandle = uint32_t;
0291 
0292 // ExternalPointers point to objects located outside the sandbox. When the V8
0293 // sandbox is enabled, these are stored on heap as ExternalPointerHandles,
0294 // otherwise they are simply raw pointers.
0295 #ifdef V8_ENABLE_SANDBOX
0296 using ExternalPointer_t = ExternalPointerHandle;
0297 #else
0298 using ExternalPointer_t = Address;
0299 #endif
0300 
0301 constexpr ExternalPointer_t kNullExternalPointer = 0;
0302 constexpr ExternalPointerHandle kNullExternalPointerHandle = 0;
0303 
0304 //
0305 // External Pointers.
0306 //
0307 // When the sandbox is enabled, external pointers are stored in an external
0308 // pointer table and are referenced from HeapObjects through an index (a
0309 // "handle"). When stored in the table, the pointers are tagged with per-type
0310 // tags to prevent type confusion attacks between different external objects.
0311 // Besides type information bits, these tags also contain the GC marking bit
0312 // which indicates whether the pointer table entry is currently alive. When a
0313 // pointer is written into the table, the tag is ORed into the top bits. When
0314 // that pointer is later loaded from the table, it is ANDed with the inverse of
0315 // the expected tag. If the expected and actual type differ, this will leave
0316 // some of the top bits of the pointer set, rendering the pointer inaccessible.
0317 // The AND operation also removes the GC marking bit from the pointer.
0318 //
0319 // The tags are constructed such that UNTAG(TAG(0, T1), T2) != 0 for any two
0320 // (distinct) tags T1 and T2. In practice, this is achieved by generating tags
0321 // that all have the same number of zeroes and ones but different bit patterns.
0322 // With N type tag bits, this allows for (N choose N/2) possible type tags.
0323 // Besides the type tag bits, the tags also have the GC marking bit set so that
0324 // the marking bit is automatically set when a pointer is written into the
0325 // external pointer table (in which case it is clearly alive) and is cleared
0326 // when the pointer is loaded. The exception to this is the free entry tag,
0327 // which doesn't have the mark bit set, as the entry is not alive. This
0328 // construction allows performing the type check and removing GC marking bits
0329 // from the pointer in one efficient operation (bitwise AND). The number of
0330 // available bits is limited in the following way: on x64, bits [47, 64) are
0331 // generally available for tagging (userspace has 47 address bits available).
0332 // On Arm64, userspace typically has a 40 or 48 bit address space. However, due
0333 // to top-byte ignore (TBI) and memory tagging (MTE), the top byte is unusable
0334 // for type checks as type-check failures would go unnoticed or collide with
0335 // MTE bits. Some bits of the top byte can, however, still be used for the GC
0336 // marking bit. The bits available for the type tags are therefore limited to
0337 // [48, 56), i.e. (8 choose 4) = 70 different types.
0338 // The following options exist to increase the number of possible types:
0339 // - Using multiple ExternalPointerTables since tags can safely be reused
0340 //   across different tables
0341 // - Using "extended" type checks, where additional type information is stored
0342 //   either in an adjacent pointer table entry or at the pointed-to location
0343 // - Using a different tagging scheme, for example based on XOR which would
0344 //   allow for 2**8 different tags but require a separate operation to remove
0345 //   the marking bit
0346 //
0347 // The external pointer sandboxing mechanism ensures that every access to an
0348 // external pointer field will result in a valid pointer of the expected type
0349 // even in the presence of an attacker able to corrupt memory inside the
0350 // sandbox. However, if any data related to the external object is stored
0351 // inside the sandbox it may still be corrupted and so must be validated before
0352 // use or moved into the external object. Further, an attacker will always be
0353 // able to substitute different external pointers of the same type for each
0354 // other. Therefore, code using external pointers must be written in a
0355 // "substitution-safe" way, i.e. it must always be possible to substitute
0356 // external pointers of the same type without causing memory corruption outside
0357 // of the sandbox. Generally this is achieved by referencing any group of
0358 // related external objects through a single external pointer.
0359 //
0360 // Currently we use bit 62 for the marking bit which should always be unused as
0361 // it's part of the non-canonical address range. When Arm's top-byte ignore
0362 // (TBI) is enabled, this bit will be part of the ignored byte, and we assume
0363 // that the Embedder is not using this byte (really only this one bit) for any
0364 // other purpose. This bit also does not collide with the memory tagging
0365 // extension (MTE) which would use bits [56, 60).
0366 //
0367 // External pointer tables are also available even when the sandbox is off but
0368 // pointer compression is on. In that case, the mechanism can be used to easy
0369 // alignment requirements as it turns unaligned 64-bit raw pointers into
0370 // aligned 32-bit indices. To "opt-in" to the external pointer table mechanism
0371 // for this purpose, instead of using the ExternalPointer accessors one needs to
0372 // use ExternalPointerHandles directly and use them to access the pointers in an
0373 // ExternalPointerTable.
0374 constexpr uint64_t kExternalPointerMarkBit = 1ULL << 62;
0375 constexpr uint64_t kExternalPointerTagMask = 0x40ff000000000000;
0376 constexpr uint64_t kExternalPointerTagMaskWithoutMarkBit = 0xff000000000000;
0377 constexpr uint64_t kExternalPointerTagShift = 48;
0378 
0379 // All possible 8-bit type tags.
0380 // These are sorted so that tags can be grouped together and it can efficiently
0381 // be checked if a tag belongs to a given group. See for example the
0382 // IsSharedExternalPointerType routine.
0383 constexpr uint64_t kAllExternalPointerTypeTags[] = {
0384     0b00001111, 0b00010111, 0b00011011, 0b00011101, 0b00011110, 0b00100111,
0385     0b00101011, 0b00101101, 0b00101110, 0b00110011, 0b00110101, 0b00110110,
0386     0b00111001, 0b00111010, 0b00111100, 0b01000111, 0b01001011, 0b01001101,
0387     0b01001110, 0b01010011, 0b01010101, 0b01010110, 0b01011001, 0b01011010,
0388     0b01011100, 0b01100011, 0b01100101, 0b01100110, 0b01101001, 0b01101010,
0389     0b01101100, 0b01110001, 0b01110010, 0b01110100, 0b01111000, 0b10000111,
0390     0b10001011, 0b10001101, 0b10001110, 0b10010011, 0b10010101, 0b10010110,
0391     0b10011001, 0b10011010, 0b10011100, 0b10100011, 0b10100101, 0b10100110,
0392     0b10101001, 0b10101010, 0b10101100, 0b10110001, 0b10110010, 0b10110100,
0393     0b10111000, 0b11000011, 0b11000101, 0b11000110, 0b11001001, 0b11001010,
0394     0b11001100, 0b11010001, 0b11010010, 0b11010100, 0b11011000, 0b11100001,
0395     0b11100010, 0b11100100, 0b11101000, 0b11110000};
0396 
0397 #define TAG(i)                                                    \
0398   ((kAllExternalPointerTypeTags[i] << kExternalPointerTagShift) | \
0399    kExternalPointerMarkBit)
0400 
0401 // clang-format off
0402 
0403 // When adding new tags, please ensure that the code using these tags is
0404 // "substitution-safe", i.e. still operate safely if external pointers of the
0405 // same type are swapped by an attacker. See comment above for more details.
0406 
0407 // Shared external pointers are owned by the shared Isolate and stored in the
0408 // shared external pointer table associated with that Isolate, where they can
0409 // be accessed from multiple threads at the same time. The objects referenced
0410 // in this way must therefore always be thread-safe.
0411 #define SHARED_EXTERNAL_POINTER_TAGS(V)                 \
0412   V(kFirstSharedTag,                            TAG(0)) \
0413   V(kWaiterQueueNodeTag,                        TAG(0)) \
0414   V(kExternalStringResourceTag,                 TAG(1)) \
0415   V(kExternalStringResourceDataTag,             TAG(2)) \
0416   V(kLastSharedTag,                             TAG(2))
0417 
0418 // External pointers using these tags are kept in a per-Isolate external
0419 // pointer table and can only be accessed when this Isolate is active.
0420 #define PER_ISOLATE_EXTERNAL_POINTER_TAGS(V)             \
0421   V(kForeignForeignAddressTag,                  TAG(10)) \
0422   V(kNativeContextMicrotaskQueueTag,            TAG(11)) \
0423   V(kEmbedderDataSlotPayloadTag,                TAG(12)) \
0424 /* This tag essentially stands for a `void*` pointer in the V8 API, and */ \
0425 /* it is the Embedder's responsibility to ensure type safety (against */   \
0426 /* substitution) and lifetime validity of these objects. */                \
0427   V(kExternalObjectValueTag,                    TAG(13)) \
0428   V(kFunctionTemplateInfoCallbackTag,           TAG(14)) \
0429   V(kAccessorInfoGetterTag,                     TAG(15)) \
0430   V(kAccessorInfoSetterTag,                     TAG(16)) \
0431   V(kWasmInternalFunctionCallTargetTag,         TAG(17)) \
0432   V(kWasmTypeInfoNativeTypeTag,                 TAG(18)) \
0433   V(kWasmExportedFunctionDataSignatureTag,      TAG(19)) \
0434   V(kWasmContinuationJmpbufTag,                 TAG(20)) \
0435   V(kWasmIndirectFunctionTargetTag,             TAG(21)) \
0436   V(kArrayBufferExtensionTag,                   TAG(22))
0437 
0438 // All external pointer tags.
0439 #define ALL_EXTERNAL_POINTER_TAGS(V) \
0440   SHARED_EXTERNAL_POINTER_TAGS(V)    \
0441   PER_ISOLATE_EXTERNAL_POINTER_TAGS(V)
0442 
0443 #define EXTERNAL_POINTER_TAG_ENUM(Name, Tag) Name = Tag,
0444 #define MAKE_TAG(HasMarkBit, TypeTag)                             \
0445   ((static_cast<uint64_t>(TypeTag) << kExternalPointerTagShift) | \
0446   (HasMarkBit ? kExternalPointerMarkBit : 0))
0447 enum ExternalPointerTag : uint64_t {
0448   // Empty tag value. Mostly used as placeholder.
0449   kExternalPointerNullTag =            MAKE_TAG(1, 0b00000000),
0450   // External pointer tag that will match any external pointer. Use with care!
0451   kAnyExternalPointerTag =             MAKE_TAG(1, 0b11111111),
0452   // The free entry tag has all type bits set so every type check with a
0453   // different type fails. It also doesn't have the mark bit set as free
0454   // entries are (by definition) not alive.
0455   kExternalPointerFreeEntryTag =       MAKE_TAG(0, 0b11111111),
0456   // Evacuation entries are used during external pointer table compaction.
0457   kExternalPointerEvacuationEntryTag = MAKE_TAG(1, 0b11100111),
0458 
0459   ALL_EXTERNAL_POINTER_TAGS(EXTERNAL_POINTER_TAG_ENUM)
0460 };
0461 
0462 #undef MAKE_TAG
0463 #undef TAG
0464 #undef EXTERNAL_POINTER_TAG_ENUM
0465 
0466 // clang-format on
0467 
0468 // True if the external pointer must be accessed from the shared isolate's
0469 // external pointer table.
0470 V8_INLINE static constexpr bool IsSharedExternalPointerType(
0471     ExternalPointerTag tag) {
0472   return tag >= kFirstSharedTag && tag <= kLastSharedTag;
0473 }
0474 
0475 // True if the external pointer may live in a read-only object, in which case
0476 // the table entry will be in the shared read-only segment of the external
0477 // pointer table.
0478 V8_INLINE static constexpr bool IsMaybeReadOnlyExternalPointerType(
0479     ExternalPointerTag tag) {
0480   return tag == kAccessorInfoGetterTag || tag == kAccessorInfoSetterTag ||
0481          tag == kFunctionTemplateInfoCallbackTag;
0482 }
0483 
0484 // Sanity checks.
0485 #define CHECK_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
0486   static_assert(IsSharedExternalPointerType(Tag));
0487 #define CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
0488   static_assert(!IsSharedExternalPointerType(Tag));
0489 
0490 SHARED_EXTERNAL_POINTER_TAGS(CHECK_SHARED_EXTERNAL_POINTER_TAGS)
0491 PER_ISOLATE_EXTERNAL_POINTER_TAGS(CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS)
0492 
0493 #undef CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS
0494 #undef CHECK_SHARED_EXTERNAL_POINTER_TAGS
0495 
0496 #undef SHARED_EXTERNAL_POINTER_TAGS
0497 #undef EXTERNAL_POINTER_TAGS
0498 
0499 //
0500 // Indirect Pointers.
0501 //
0502 // When the sandbox is enabled, indirect pointers are used to reference
0503 // HeapObjects that live outside of the sandbox (but are still managed by V8's
0504 // garbage collector). When object A references an object B through an indirect
0505 // pointer, object A will contain a IndirectPointerHandle, i.e. a shifted
0506 // 32-bit index, which identifies an entry in a pointer table (either the
0507 // trusted pointer table for TrustedObjects, or the code pointer table if it is
0508 // a Code object). This table entry then contains the actual pointer to object
0509 // B. Further, object B owns this pointer table entry, and it is responsible
0510 // for updating the "self-pointer" in the entry when it is relocated in memory.
0511 // This way, in contrast to "normal" pointers, indirect pointers never need to
0512 // be tracked by the GC (i.e. there is no remembered set for them).
0513 // These pointers do not exist when the sandbox is disabled.
0514 
0515 // An IndirectPointerHandle represents a 32-bit index into a pointer table.
0516 using IndirectPointerHandle = uint32_t;
0517 
0518 // A null handle always references an entry that contains nullptr.
0519 constexpr IndirectPointerHandle kNullIndirectPointerHandle = 0;
0520 
0521 // When the sandbox is enabled, indirect pointers are used to implement:
0522 // - TrustedPointers: an indirect pointer using the trusted pointer table (TPT)
0523 //   and referencing a TrustedObject in one of the trusted heap spaces.
0524 // - CodePointers, an indirect pointer using the code pointer table (CPT) and
0525 //   referencing a Code object together with its instruction stream.
0526 
0527 //
0528 // Trusted Pointers.
0529 //
0530 // A pointer to a TrustedObject.
0531 // When the sandbox is enabled, these are indirect pointers using the trusted
0532 // pointer table (TPT). They are used to reference trusted objects (located in
0533 // one of V8's trusted heap spaces, outside of the sandbox) from inside the
0534 // sandbox in a memory-safe way. When the sandbox is disabled, these are
0535 // regular tagged pointers.
0536 using TrustedPointerHandle = IndirectPointerHandle;
0537 
0538 // The size of the virtual memory reservation for the trusted pointer table.
0539 // As with the external pointer table, a maximum table size in combination with
0540 // shifted indices allows omitting bounds checks.
0541 constexpr size_t kTrustedPointerTableReservationSize = 64 * MB;
0542 
0543 // The trusted pointer handles are stores shifted to the left by this amount
0544 // to guarantee that they are smaller than the maximum table size.
0545 constexpr uint32_t kTrustedPointerHandleShift = 9;
0546 
0547 // A null handle always references an entry that contains nullptr.
0548 constexpr TrustedPointerHandle kNullTrustedPointerHandle =
0549     kNullIndirectPointerHandle;
0550 
0551 // The maximum number of entries in an trusted pointer table.
0552 constexpr int kTrustedPointerTableEntrySize = 8;
0553 constexpr int kTrustedPointerTableEntrySizeLog2 = 3;
0554 constexpr size_t kMaxTrustedPointers =
0555     kTrustedPointerTableReservationSize / kTrustedPointerTableEntrySize;
0556 static_assert((1 << (32 - kTrustedPointerHandleShift)) == kMaxTrustedPointers,
0557               "kTrustedPointerTableReservationSize and "
0558               "kTrustedPointerHandleShift don't match");
0559 
0560 //
0561 // Code Pointers.
0562 //
0563 // A pointer to a Code object.
0564 // Essentially a specialized version of a trusted pointer that (when the
0565 // sandbox is enabled) uses the code pointer table (CPT) instead of the TPT.
0566 // Each entry in the CPT contains both a pointer to a Code object as well as a
0567 // pointer to the Code's entrypoint. This allows calling/jumping into Code with
0568 // one fewer memory access (compared to the case where the entrypoint pointer
0569 // first needs to be loaded from the Code object). As such, a CodePointerHandle
0570 // can be used both to obtain the referenced Code object and to directly load
0571 // its entrypoint.
0572 //
0573 // When the sandbox is disabled, these are regular tagged pointers.
0574 using CodePointerHandle = IndirectPointerHandle;
0575 
0576 // The size of the virtual memory reservation for the code pointer table.
0577 // As with the other tables, a maximum table size in combination with shifted
0578 // indices allows omitting bounds checks.
0579 constexpr size_t kCodePointerTableReservationSize = 16 * MB;
0580 
0581 // Code pointer handles are shifted by a different amount than indirect pointer
0582 // handles as the tables have a different maximum size.
0583 constexpr uint32_t kCodePointerHandleShift = 12;
0584 
0585 // A null handle always references an entry that contains nullptr.
0586 constexpr CodePointerHandle kNullCodePointerHandle = kNullIndirectPointerHandle;
0587 
0588 // It can sometimes be necessary to distinguish a code pointer handle from a
0589 // trusted pointer handle. A typical example would be a union trusted pointer
0590 // field that can refer to both Code objects and other trusted objects. To
0591 // support these use-cases, we use a simple marking scheme where some of the
0592 // low bits of a code pointer handle are set, while they will be unset on a
0593 // trusted pointer handle. This way, the correct table to resolve the handle
0594 // can be determined even in the absence of a type tag.
0595 constexpr uint32_t kCodePointerHandleMarker = 0x1;
0596 static_assert(kCodePointerHandleShift > 0);
0597 static_assert(kTrustedPointerHandleShift > 0);
0598 
0599 // The maximum number of entries in a code pointer table.
0600 constexpr int kCodePointerTableEntrySize = 16;
0601 constexpr int kCodePointerTableEntrySizeLog2 = 4;
0602 constexpr size_t kMaxCodePointers =
0603     kCodePointerTableReservationSize / kCodePointerTableEntrySize;
0604 static_assert(
0605     (1 << (32 - kCodePointerHandleShift)) == kMaxCodePointers,
0606     "kCodePointerTableReservationSize and kCodePointerHandleShift don't match");
0607 
0608 constexpr int kCodePointerTableEntryEntrypointOffset = 0;
0609 constexpr int kCodePointerTableEntryCodeObjectOffset = 8;
0610 
0611 // Constants that can be used to mark places that should be modified once
0612 // certain types of objects are moved out of the sandbox and into trusted space.
0613 constexpr bool kRuntimeGeneratedCodeObjectsLiveInTrustedSpace = true;
0614 constexpr bool kBuiltinCodeObjectsLiveInTrustedSpace = false;
0615 constexpr bool kAllCodeObjectsLiveInTrustedSpace =
0616     kRuntimeGeneratedCodeObjectsLiveInTrustedSpace &&
0617     kBuiltinCodeObjectsLiveInTrustedSpace;
0618 
0619 // {obj} must be the raw tagged pointer representation of a HeapObject
0620 // that's guaranteed to never be in ReadOnlySpace.
0621 V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj);
0622 
0623 // Returns if we need to throw when an error occurs. This infers the language
0624 // mode based on the current context and the closure. This returns true if the
0625 // language mode is strict.
0626 V8_EXPORT bool ShouldThrowOnError(internal::Isolate* isolate);
0627 /**
0628  * This class exports constants and functionality from within v8 that
0629  * is necessary to implement inline functions in the v8 api.  Don't
0630  * depend on functions and constants defined here.
0631  */
0632 class Internals {
0633 #ifdef V8_MAP_PACKING
0634   V8_INLINE static constexpr Address UnpackMapWord(Address mapword) {
0635     // TODO(wenyuzhao): Clear header metadata.
0636     return mapword ^ kMapWordXorMask;
0637   }
0638 #endif
0639 
0640  public:
0641   // These values match non-compiler-dependent values defined within
0642   // the implementation of v8.
0643   static const int kHeapObjectMapOffset = 0;
0644   static const int kMapInstanceTypeOffset = 1 * kApiTaggedSize + kApiInt32Size;
0645   static const int kStringResourceOffset =
0646       1 * kApiTaggedSize + 2 * kApiInt32Size;
0647 
0648   static const int kOddballKindOffset = 4 * kApiTaggedSize + kApiDoubleSize;
0649   static const int kJSObjectHeaderSize = 3 * kApiTaggedSize;
0650   static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
0651   static const int kEmbedderDataArrayHeaderSize = 2 * kApiTaggedSize;
0652   static const int kEmbedderDataSlotSize = kApiSystemPointerSize;
0653 #ifdef V8_ENABLE_SANDBOX
0654   static const int kEmbedderDataSlotExternalPointerOffset = kApiTaggedSize;
0655 #else
0656   static const int kEmbedderDataSlotExternalPointerOffset = 0;
0657 #endif
0658   static const int kNativeContextEmbedderDataOffset = 6 * kApiTaggedSize;
0659   static const int kStringRepresentationAndEncodingMask = 0x0f;
0660   static const int kStringEncodingMask = 0x8;
0661   static const int kExternalTwoByteRepresentationTag = 0x02;
0662   static const int kExternalOneByteRepresentationTag = 0x0a;
0663 
0664   static const uint32_t kNumIsolateDataSlots = 4;
0665   static const int kStackGuardSize = 8 * kApiSystemPointerSize;
0666   static const int kNumberOfBooleanFlags = 6;
0667   static const int kErrorMessageParamSize = 1;
0668   static const int kTablesAlignmentPaddingSize = 1;
0669   static const int kBuiltinTier0EntryTableSize = 7 * kApiSystemPointerSize;
0670   static const int kBuiltinTier0TableSize = 7 * kApiSystemPointerSize;
0671   static const int kLinearAllocationAreaSize = 3 * kApiSystemPointerSize;
0672   static const int kThreadLocalTopSize = 30 * kApiSystemPointerSize;
0673   static const int kHandleScopeDataSize =
0674       2 * kApiSystemPointerSize + 2 * kApiInt32Size;
0675 
0676   // ExternalPointerTable and TrustedPointerTable layout guarantees.
0677   static const int kExternalPointerTableBasePointerOffset = 0;
0678   static const int kExternalPointerTableSize = 2 * kApiSystemPointerSize;
0679   static const int kTrustedPointerTableSize = 2 * kApiSystemPointerSize;
0680   static const int kTrustedPointerTableBasePointerOffset = 0;
0681 
0682   // IsolateData layout guarantees.
0683   static const int kIsolateCageBaseOffset = 0;
0684   static const int kIsolateStackGuardOffset =
0685       kIsolateCageBaseOffset + kApiSystemPointerSize;
0686   static const int kVariousBooleanFlagsOffset =
0687       kIsolateStackGuardOffset + kStackGuardSize;
0688   static const int kErrorMessageParamOffset =
0689       kVariousBooleanFlagsOffset + kNumberOfBooleanFlags;
0690   static const int kBuiltinTier0EntryTableOffset = kErrorMessageParamOffset +
0691                                                    kErrorMessageParamSize +
0692                                                    kTablesAlignmentPaddingSize;
0693   static const int kBuiltinTier0TableOffset =
0694       kBuiltinTier0EntryTableOffset + kBuiltinTier0EntryTableSize;
0695   static const int kNewAllocationInfoOffset =
0696       kBuiltinTier0TableOffset + kBuiltinTier0TableSize;
0697   static const int kOldAllocationInfoOffset =
0698       kNewAllocationInfoOffset + kLinearAllocationAreaSize;
0699 
0700   static const int kFastCCallAlignmentPaddingSize =
0701       kApiSystemPointerSize == 8 ? 0 : kApiSystemPointerSize;
0702   static const int kIsolateFastCCallCallerFpOffset =
0703       kOldAllocationInfoOffset + kLinearAllocationAreaSize +
0704       kFastCCallAlignmentPaddingSize;
0705   static const int kIsolateFastCCallCallerPcOffset =
0706       kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
0707   static const int kIsolateFastApiCallTargetOffset =
0708       kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize;
0709   static const int kIsolateLongTaskStatsCounterOffset =
0710       kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
0711   static const int kIsolateThreadLocalTopOffset =
0712       kIsolateLongTaskStatsCounterOffset + kApiSizetSize;
0713   static const int kIsolateHandleScopeDataOffset =
0714       kIsolateThreadLocalTopOffset + kThreadLocalTopSize;
0715   static const int kIsolateEmbedderDataOffset =
0716       kIsolateHandleScopeDataOffset + kHandleScopeDataSize;
0717 #ifdef V8_COMPRESS_POINTERS
0718   static const int kIsolateExternalPointerTableOffset =
0719       kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
0720   static const int kIsolateSharedExternalPointerTableAddressOffset =
0721       kIsolateExternalPointerTableOffset + kExternalPointerTableSize;
0722 #ifdef V8_ENABLE_SANDBOX
0723   static const int kIsolateTrustedCageBaseOffset =
0724       kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
0725   static const int kIsolateTrustedPointerTableOffset =
0726       kIsolateTrustedCageBaseOffset + kApiSystemPointerSize;
0727   static const int kIsolateApiCallbackThunkArgumentOffset =
0728       kIsolateTrustedPointerTableOffset + kTrustedPointerTableSize;
0729 #else
0730   static const int kIsolateApiCallbackThunkArgumentOffset =
0731       kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
0732 #endif  // V8_ENABLE_SANDBOX
0733 #else
0734   static const int kIsolateApiCallbackThunkArgumentOffset =
0735       kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
0736 #endif  // V8_COMPRESS_POINTERS
0737   static const int kContinuationPreservedEmbedderDataOffset =
0738       kIsolateApiCallbackThunkArgumentOffset + kApiSystemPointerSize;
0739 
0740   static const int kWasm64OOBOffsetAlignmentPaddingSize = 0;
0741   static const int kWasm64OOBOffsetOffset =
0742       kContinuationPreservedEmbedderDataOffset + kApiSystemPointerSize +
0743       kWasm64OOBOffsetAlignmentPaddingSize;
0744   static const int kIsolateRootsOffset =
0745       kWasm64OOBOffsetOffset + sizeof(int64_t);
0746 
0747 #if V8_STATIC_ROOTS_BOOL
0748 
0749 // These constants are copied from static-roots.h and guarded by static asserts.
0750 #define EXPORTED_STATIC_ROOTS_PTR_LIST(V) \
0751   V(UndefinedValue, 0x69)                 \
0752   V(NullValue, 0x85)                      \
0753   V(TrueValue, 0xc9)                      \
0754   V(FalseValue, 0xad)                     \
0755   V(EmptyString, 0xa1)                    \
0756   V(TheHoleValue, 0x719)
0757 
0758   using Tagged_t = uint32_t;
0759   struct StaticReadOnlyRoot {
0760 #define DEF_ROOT(name, value) static constexpr Tagged_t k##name = value;
0761     EXPORTED_STATIC_ROOTS_PTR_LIST(DEF_ROOT)
0762 #undef DEF_ROOT
0763 
0764     static constexpr Tagged_t kFirstStringMap = 0xe5;
0765     static constexpr Tagged_t kLastStringMap = 0x47d;
0766 
0767 #define PLUSONE(...) +1
0768     static constexpr size_t kNumberOfExportedStaticRoots =
0769         2 + EXPORTED_STATIC_ROOTS_PTR_LIST(PLUSONE);
0770 #undef PLUSONE
0771   };
0772 
0773 #endif  // V8_STATIC_ROOTS_BOOL
0774 
0775   static const int kUndefinedValueRootIndex = 4;
0776   static const int kTheHoleValueRootIndex = 5;
0777   static const int kNullValueRootIndex = 6;
0778   static const int kTrueValueRootIndex = 7;
0779   static const int kFalseValueRootIndex = 8;
0780   static const int kEmptyStringRootIndex = 9;
0781 
0782   static const int kNodeClassIdOffset = 1 * kApiSystemPointerSize;
0783   static const int kNodeFlagsOffset = 1 * kApiSystemPointerSize + 3;
0784   static const int kNodeStateMask = 0x3;
0785   static const int kNodeStateIsWeakValue = 2;
0786 
0787   static const int kFirstNonstringType = 0x80;
0788   static const int kOddballType = 0x83;
0789   static const int kForeignType = 0xcc;
0790   static const int kJSSpecialApiObjectType = 0x410;
0791   static const int kJSObjectType = 0x421;
0792   static const int kFirstJSApiObjectType = 0x422;
0793   static const int kLastJSApiObjectType = 0x80A;
0794   // Defines a range [kFirstEmbedderJSApiObjectType, kJSApiObjectTypesCount]
0795   // of JSApiObject instance type values that an embedder can use.
0796   static const int kFirstEmbedderJSApiObjectType = 0;
0797   static const int kLastEmbedderJSApiObjectType =
0798       kLastJSApiObjectType - kFirstJSApiObjectType;
0799 
0800   static const int kUndefinedOddballKind = 4;
0801   static const int kNullOddballKind = 3;
0802 
0803   // Constants used by PropertyCallbackInfo to check if we should throw when an
0804   // error occurs.
0805   static const int kThrowOnError = 0;
0806   static const int kDontThrow = 1;
0807   static const int kInferShouldThrowMode = 2;
0808 
0809   // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
0810   // incremental GC once the external memory reaches this limit.
0811   static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;
0812 
0813 #ifdef V8_MAP_PACKING
0814   static const uintptr_t kMapWordMetadataMask = 0xffffULL << 48;
0815   // The lowest two bits of mapwords are always `0b10`
0816   static const uintptr_t kMapWordSignature = 0b10;
0817   // XORing a (non-compressed) map with this mask ensures that the two
0818   // low-order bits are 0b10. The 0 at the end makes this look like a Smi,
0819   // although real Smis have all lower 32 bits unset. We only rely on these
0820   // values passing as Smis in very few places.
0821   static const int kMapWordXorMask = 0b11;
0822 #endif
0823 
0824   V8_EXPORT static void CheckInitializedImpl(v8::Isolate* isolate);
0825   V8_INLINE static void CheckInitialized(v8::Isolate* isolate) {
0826 #ifdef V8_ENABLE_CHECKS
0827     CheckInitializedImpl(isolate);
0828 #endif
0829   }
0830 
0831   V8_INLINE static constexpr bool HasHeapObjectTag(Address value) {
0832     return (value & kHeapObjectTagMask) == static_cast<Address>(kHeapObjectTag);
0833   }
0834 
0835   V8_INLINE static constexpr int SmiValue(Address value) {
0836     return PlatformSmiTagging::SmiToInt(value);
0837   }
0838 
0839   V8_INLINE static constexpr Address IntToSmi(int value) {
0840     return internal::IntToSmi(value);
0841   }
0842 
0843   V8_INLINE static constexpr bool IsValidSmi(intptr_t value) {
0844     return PlatformSmiTagging::IsValidSmi(value);
0845   }
0846 
0847 #if V8_STATIC_ROOTS_BOOL
0848   V8_INLINE static bool is_identical(Address obj, Tagged_t constant) {
0849     return static_cast<Tagged_t>(obj) == constant;
0850   }
0851 
0852   V8_INLINE static bool CheckInstanceMapRange(Address obj, Tagged_t first_map,
0853                                               Tagged_t last_map) {
0854     auto map = ReadRawField<Tagged_t>(obj, kHeapObjectMapOffset);
0855 #ifdef V8_MAP_PACKING
0856     map = UnpackMapWord(map);
0857 #endif
0858     return map >= first_map && map <= last_map;
0859   }
0860 #endif
0861 
0862   V8_INLINE static int GetInstanceType(Address obj) {
0863     Address map = ReadTaggedPointerField(obj, kHeapObjectMapOffset);
0864 #ifdef V8_MAP_PACKING
0865     map = UnpackMapWord(map);
0866 #endif
0867     return ReadRawField<uint16_t>(map, kMapInstanceTypeOffset);
0868   }
0869 
0870   V8_INLINE static Address LoadMap(Address obj) {
0871     if (!HasHeapObjectTag(obj)) return kNullAddress;
0872     Address map = ReadTaggedPointerField(obj, kHeapObjectMapOffset);
0873 #ifdef V8_MAP_PACKING
0874     map = UnpackMapWord(map);
0875 #endif
0876     return map;
0877   }
0878 
0879   V8_INLINE static int GetOddballKind(Address obj) {
0880     return SmiValue(ReadTaggedSignedField(obj, kOddballKindOffset));
0881   }
0882 
0883   V8_INLINE static bool IsExternalTwoByteString(int instance_type) {
0884     int representation = (instance_type & kStringRepresentationAndEncodingMask);
0885     return representation == kExternalTwoByteRepresentationTag;
0886   }
0887 
0888   V8_INLINE static constexpr bool CanHaveInternalField(int instance_type) {
0889     static_assert(kJSObjectType + 1 == kFirstJSApiObjectType);
0890     static_assert(kJSObjectType < kLastJSApiObjectType);
0891     static_assert(kFirstJSApiObjectType < kLastJSApiObjectType);
0892     // Check for IsJSObject() || IsJSSpecialApiObject() || IsJSApiObject()
0893     return instance_type == kJSSpecialApiObjectType ||
0894            // inlined version of base::IsInRange
0895            (static_cast<unsigned>(static_cast<unsigned>(instance_type) -
0896                                   static_cast<unsigned>(kJSObjectType)) <=
0897             static_cast<unsigned>(kLastJSApiObjectType - kJSObjectType));
0898   }
0899 
0900   V8_INLINE static uint8_t GetNodeFlag(Address* obj, int shift) {
0901     uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
0902     return *addr & static_cast<uint8_t>(1U << shift);
0903   }
0904 
0905   V8_INLINE static void UpdateNodeFlag(Address* obj, bool value, int shift) {
0906     uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
0907     uint8_t mask = static_cast<uint8_t>(1U << shift);
0908     *addr = static_cast<uint8_t>((*addr & ~mask) | (value << shift));
0909   }
0910 
0911   V8_INLINE static uint8_t GetNodeState(Address* obj) {
0912     uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
0913     return *addr & kNodeStateMask;
0914   }
0915 
0916   V8_INLINE static void UpdateNodeState(Address* obj, uint8_t value) {
0917     uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
0918     *addr = static_cast<uint8_t>((*addr & ~kNodeStateMask) | value);
0919   }
0920 
0921   V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot,
0922                                         void* data) {
0923     Address addr = reinterpret_cast<Address>(isolate) +
0924                    kIsolateEmbedderDataOffset + slot * kApiSystemPointerSize;
0925     *reinterpret_cast<void**>(addr) = data;
0926   }
0927 
0928   V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate,
0929                                          uint32_t slot) {
0930     Address addr = reinterpret_cast<Address>(isolate) +
0931                    kIsolateEmbedderDataOffset + slot * kApiSystemPointerSize;
0932     return *reinterpret_cast<void* const*>(addr);
0933   }
0934 
0935   V8_INLINE static void IncrementLongTasksStatsCounter(v8::Isolate* isolate) {
0936     Address addr =
0937         reinterpret_cast<Address>(isolate) + kIsolateLongTaskStatsCounterOffset;
0938     ++(*reinterpret_cast<size_t*>(addr));
0939   }
0940 
0941   V8_INLINE static Address* GetRootSlot(v8::Isolate* isolate, int index) {
0942     Address addr = reinterpret_cast<Address>(isolate) + kIsolateRootsOffset +
0943                    index * kApiSystemPointerSize;
0944     return reinterpret_cast<Address*>(addr);
0945   }
0946 
0947   V8_INLINE static Address GetRoot(v8::Isolate* isolate, int index) {
0948 #if V8_STATIC_ROOTS_BOOL
0949     Address base = *reinterpret_cast<Address*>(
0950         reinterpret_cast<uintptr_t>(isolate) + kIsolateCageBaseOffset);
0951     switch (index) {
0952 #define DECOMPRESS_ROOT(name, ...) \
0953   case k##name##RootIndex:         \
0954     return base + StaticReadOnlyRoot::k##name;
0955       EXPORTED_STATIC_ROOTS_PTR_LIST(DECOMPRESS_ROOT)
0956 #undef DECOMPRESS_ROOT
0957 #undef EXPORTED_STATIC_ROOTS_PTR_LIST
0958       default:
0959         break;
0960     }
0961 #endif  // V8_STATIC_ROOTS_BOOL
0962     return *GetRootSlot(isolate, index);
0963   }
0964 
0965 #ifdef V8_ENABLE_SANDBOX
0966   V8_INLINE static Address* GetExternalPointerTableBase(v8::Isolate* isolate) {
0967     Address addr = reinterpret_cast<Address>(isolate) +
0968                    kIsolateExternalPointerTableOffset +
0969                    kExternalPointerTableBasePointerOffset;
0970     return *reinterpret_cast<Address**>(addr);
0971   }
0972 
0973   V8_INLINE static Address* GetSharedExternalPointerTableBase(
0974       v8::Isolate* isolate) {
0975     Address addr = reinterpret_cast<Address>(isolate) +
0976                    kIsolateSharedExternalPointerTableAddressOffset;
0977     addr = *reinterpret_cast<Address*>(addr);
0978     addr += kExternalPointerTableBasePointerOffset;
0979     return *reinterpret_cast<Address**>(addr);
0980   }
0981 #endif
0982 
0983   template <typename T>
0984   V8_INLINE static T ReadRawField(Address heap_object_ptr, int offset) {
0985     Address addr = heap_object_ptr + offset - kHeapObjectTag;
0986 #ifdef V8_COMPRESS_POINTERS
0987     if (sizeof(T) > kApiTaggedSize) {
0988       // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
0989       // fields (external pointers, doubles and BigInt data) are only
0990       // kTaggedSize aligned so we have to use unaligned pointer friendly way of
0991       // accessing them in order to avoid undefined behavior in C++ code.
0992       T r;
0993       memcpy(&r, reinterpret_cast<void*>(addr), sizeof(T));
0994       return r;
0995     }
0996 #endif
0997     return *reinterpret_cast<const T*>(addr);
0998   }
0999 
1000   V8_INLINE static Address ReadTaggedPointerField(Address heap_object_ptr,
1001                                                   int offset) {
1002 #ifdef V8_COMPRESS_POINTERS
1003     uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
1004     Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
1005     return base + static_cast<Address>(static_cast<uintptr_t>(value));
1006 #else
1007     return ReadRawField<Address>(heap_object_ptr, offset);
1008 #endif
1009   }
1010 
1011   V8_INLINE static Address ReadTaggedSignedField(Address heap_object_ptr,
1012                                                  int offset) {
1013 #ifdef V8_COMPRESS_POINTERS
1014     uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
1015     return static_cast<Address>(static_cast<uintptr_t>(value));
1016 #else
1017     return ReadRawField<Address>(heap_object_ptr, offset);
1018 #endif
1019   }
1020 
1021   V8_INLINE static v8::Isolate* GetIsolateForSandbox(Address obj) {
1022 #ifdef V8_ENABLE_SANDBOX
1023     return reinterpret_cast<v8::Isolate*>(
1024         internal::IsolateFromNeverReadOnlySpaceObject(obj));
1025 #else
1026     // Not used in non-sandbox mode.
1027     return nullptr;
1028 #endif
1029   }
1030 
1031   template <ExternalPointerTag tag>
1032   V8_INLINE static Address ReadExternalPointerField(v8::Isolate* isolate,
1033                                                     Address heap_object_ptr,
1034                                                     int offset) {
1035 #ifdef V8_ENABLE_SANDBOX
1036     static_assert(tag != kExternalPointerNullTag);
1037     // See src/sandbox/external-pointer-table-inl.h. Logic duplicated here so
1038     // it can be inlined and doesn't require an additional call.
1039     Address* table = IsSharedExternalPointerType(tag)
1040                          ? GetSharedExternalPointerTableBase(isolate)
1041                          : GetExternalPointerTableBase(isolate);
1042     internal::ExternalPointerHandle handle =
1043         ReadRawField<ExternalPointerHandle>(heap_object_ptr, offset);
1044     uint32_t index = handle >> kExternalPointerIndexShift;
1045     std::atomic<Address>* ptr =
1046         reinterpret_cast<std::atomic<Address>*>(&table[index]);
1047     Address entry = std::atomic_load_explicit(ptr, std::memory_order_relaxed);
1048     return entry & ~tag;
1049 #else
1050     return ReadRawField<Address>(heap_object_ptr, offset);
1051 #endif  // V8_ENABLE_SANDBOX
1052   }
1053 
1054 #ifdef V8_COMPRESS_POINTERS
1055   V8_INLINE static Address GetPtrComprCageBaseFromOnHeapAddress(Address addr) {
1056     return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
1057   }
1058 
1059   V8_INLINE static uint32_t CompressTagged(Address value) {
1060     return static_cast<uint32_t>(value);
1061   }
1062 
1063   V8_INLINE static Address DecompressTaggedField(Address heap_object_ptr,
1064                                                  uint32_t value) {
1065     Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
1066     return base + static_cast<Address>(static_cast<uintptr_t>(value));
1067   }
1068 
1069 #endif  // V8_COMPRESS_POINTERS
1070 };
1071 
1072 // Only perform cast check for types derived from v8::Data since
1073 // other types do not implement the Cast method.
1074 template <bool PerformCheck>
1075 struct CastCheck {
1076   template <class T>
1077   static void Perform(T* data);
1078 };
1079 
1080 template <>
1081 template <class T>
1082 void CastCheck<true>::Perform(T* data) {
1083   T::Cast(data);
1084 }
1085 
1086 template <>
1087 template <class T>
1088 void CastCheck<false>::Perform(T* data) {}
1089 
1090 template <class T>
1091 V8_INLINE void PerformCastCheck(T* data) {
1092   CastCheck<std::is_base_of<Data, T>::value &&
1093             !std::is_same<Data, std::remove_cv_t<T>>::value>::Perform(data);
1094 }
1095 
1096 // A base class for backing stores, which is needed due to vagaries of
1097 // how static casts work with std::shared_ptr.
1098 class BackingStoreBase {};
1099 
1100 // The maximum value in enum GarbageCollectionReason, defined in heap.h.
1101 // This is needed for histograms sampling garbage collection reasons.
1102 constexpr int kGarbageCollectionReasonMaxValue = 27;
1103 
1104 // Base class for the address block allocator compatible with standard
1105 // containers, which registers its allocated range as strong roots.
1106 class V8_EXPORT StrongRootAllocatorBase {
1107  public:
1108   Heap* heap() const { return heap_; }
1109 
1110   bool operator==(const StrongRootAllocatorBase& other) const {
1111     return heap_ == other.heap_;
1112   }
1113   bool operator!=(const StrongRootAllocatorBase& other) const {
1114     return heap_ != other.heap_;
1115   }
1116 
1117  protected:
1118   explicit StrongRootAllocatorBase(Heap* heap) : heap_(heap) {}
1119   explicit StrongRootAllocatorBase(v8::Isolate* isolate);
1120 
1121   // Allocate/deallocate a range of n elements of type internal::Address.
1122   Address* allocate_impl(size_t n);
1123   void deallocate_impl(Address* p, size_t n) noexcept;
1124 
1125  private:
1126   Heap* heap_;
1127 };
1128 
1129 // The general version of this template behaves just as std::allocator, with
1130 // the exception that the constructor takes the isolate as parameter. Only
1131 // specialized versions, e.g., internal::StrongRootAllocator<internal::Address>
1132 // and internal::StrongRootAllocator<v8::Local<T>> register the allocated range
1133 // as strong roots.
1134 template <typename T>
1135 class StrongRootAllocator : public StrongRootAllocatorBase,
1136                             private std::allocator<T> {
1137  public:
1138   using value_type = T;
1139 
1140   explicit StrongRootAllocator(Heap* heap) : StrongRootAllocatorBase(heap) {}
1141   explicit StrongRootAllocator(v8::Isolate* isolate)
1142       : StrongRootAllocatorBase(isolate) {}
1143   template <typename U>
1144   StrongRootAllocator(const StrongRootAllocator<U>& other) noexcept
1145       : StrongRootAllocatorBase(other) {}
1146 
1147   using std::allocator<T>::allocate;
1148   using std::allocator<T>::deallocate;
1149 };
1150 
1151 // A class of iterators that wrap some different iterator type.
1152 // If specified, ElementType is the type of element accessed by the wrapper
1153 // iterator; in this case, the actual reference and pointer types of Iterator
1154 // must be convertible to ElementType& and ElementType*, respectively.
1155 template <typename Iterator, typename ElementType = void>
1156 class WrappedIterator {
1157  public:
1158   static_assert(
1159       !std::is_void_v<ElementType> ||
1160       (std::is_convertible_v<typename std::iterator_traits<Iterator>::pointer,
1161                              ElementType*> &&
1162        std::is_convertible_v<typename std::iterator_traits<Iterator>::reference,
1163                              ElementType&>));
1164 
1165   using iterator_category =
1166       typename std::iterator_traits<Iterator>::iterator_category;
1167   using difference_type =
1168       typename std::iterator_traits<Iterator>::difference_type;
1169   using value_type =
1170       std::conditional_t<std::is_void_v<ElementType>,
1171                          typename std::iterator_traits<Iterator>::value_type,
1172                          ElementType>;
1173   using pointer =
1174       std::conditional_t<std::is_void_v<ElementType>,
1175                          typename std::iterator_traits<Iterator>::pointer,
1176                          ElementType*>;
1177   using reference =
1178       std::conditional_t<std::is_void_v<ElementType>,
1179                          typename std::iterator_traits<Iterator>::reference,
1180                          ElementType&>;
1181 
1182   constexpr WrappedIterator() noexcept : it_() {}
1183   constexpr explicit WrappedIterator(Iterator it) noexcept : it_(it) {}
1184 
1185   template <typename OtherIterator, typename OtherElementType,
1186             std::enable_if_t<std::is_convertible_v<OtherIterator, Iterator>,
1187                              bool> = true>
1188   constexpr WrappedIterator(
1189       const WrappedIterator<OtherIterator, OtherElementType>& it) noexcept
1190       : it_(it.base()) {}
1191 
1192   constexpr reference operator*() const noexcept { return *it_; }
1193   constexpr pointer operator->() const noexcept { return it_.operator->(); }
1194 
1195   constexpr WrappedIterator& operator++() noexcept {
1196     ++it_;
1197     return *this;
1198   }
1199   constexpr WrappedIterator operator++(int) noexcept {
1200     WrappedIterator result(*this);
1201     ++(*this);
1202     return result;
1203   }
1204 
1205   constexpr WrappedIterator& operator--() noexcept {
1206     --it_;
1207     return *this;
1208   }
1209   constexpr WrappedIterator operator--(int) noexcept {
1210     WrappedIterator result(*this);
1211     --(*this);
1212     return result;
1213   }
1214   constexpr WrappedIterator operator+(difference_type n) const noexcept {
1215     WrappedIterator result(*this);
1216     result += n;
1217     return result;
1218   }
1219   constexpr WrappedIterator& operator+=(difference_type n) noexcept {
1220     it_ += n;
1221     return *this;
1222   }
1223   constexpr WrappedIterator operator-(difference_type n) const noexcept {
1224     return *this + (-n);
1225   }
1226   constexpr WrappedIterator& operator-=(difference_type n) noexcept {
1227     *this += -n;
1228     return *this;
1229   }
1230   constexpr reference operator[](difference_type n) const noexcept {
1231     return it_[n];
1232   }
1233 
1234   constexpr Iterator base() const noexcept { return it_; }
1235 
1236  private:
1237   template <typename OtherIterator, typename OtherElementType>
1238   friend class WrappedIterator;
1239 
1240  private:
1241   Iterator it_;
1242 };
1243 
1244 template <typename Iterator, typename ElementType, typename OtherIterator,
1245           typename OtherElementType>
1246 constexpr bool operator==(
1247     const WrappedIterator<Iterator, ElementType>& x,
1248     const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1249   return x.base() == y.base();
1250 }
1251 
1252 template <typename Iterator, typename ElementType, typename OtherIterator,
1253           typename OtherElementType>
1254 constexpr bool operator<(
1255     const WrappedIterator<Iterator, ElementType>& x,
1256     const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1257   return x.base() < y.base();
1258 }
1259 
1260 template <typename Iterator, typename ElementType, typename OtherIterator,
1261           typename OtherElementType>
1262 constexpr bool operator!=(
1263     const WrappedIterator<Iterator, ElementType>& x,
1264     const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1265   return !(x == y);
1266 }
1267 
1268 template <typename Iterator, typename ElementType, typename OtherIterator,
1269           typename OtherElementType>
1270 constexpr bool operator>(
1271     const WrappedIterator<Iterator, ElementType>& x,
1272     const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1273   return y < x;
1274 }
1275 
1276 template <typename Iterator, typename ElementType, typename OtherIterator,
1277           typename OtherElementType>
1278 constexpr bool operator>=(
1279     const WrappedIterator<Iterator, ElementType>& x,
1280     const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1281   return !(x < y);
1282 }
1283 
1284 template <typename Iterator, typename ElementType, typename OtherIterator,
1285           typename OtherElementType>
1286 constexpr bool operator<=(
1287     const WrappedIterator<Iterator, ElementType>& x,
1288     const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept {
1289   return !(y < x);
1290 }
1291 
1292 template <typename Iterator, typename ElementType, typename OtherIterator,
1293           typename OtherElementType>
1294 constexpr auto operator-(
1295     const WrappedIterator<Iterator, ElementType>& x,
1296     const WrappedIterator<OtherIterator, OtherElementType>& y) noexcept
1297     -> decltype(x.base() - y.base()) {
1298   return x.base() - y.base();
1299 }
1300 
1301 template <typename Iterator, typename ElementType>
1302 constexpr WrappedIterator<Iterator> operator+(
1303     typename WrappedIterator<Iterator, ElementType>::difference_type n,
1304     const WrappedIterator<Iterator, ElementType>& x) noexcept {
1305   x += n;
1306   return x;
1307 }
1308 
1309 // Helper functions about values contained in handles.
1310 // A value is either an indirect pointer or a direct pointer, depending on
1311 // whether direct local support is enabled.
1312 class ValueHelper final {
1313  public:
1314 #ifdef V8_ENABLE_DIRECT_LOCAL
1315   static constexpr Address kTaggedNullAddress = 1;
1316   static constexpr Address kEmpty = kTaggedNullAddress;
1317 #else
1318   static constexpr Address kEmpty = kNullAddress;
1319 #endif  // V8_ENABLE_DIRECT_LOCAL
1320 
1321   template <typename T>
1322   V8_INLINE static bool IsEmpty(T* value) {
1323     return reinterpret_cast<Address>(value) == kEmpty;
1324   }
1325 
1326   // Returns a handle's "value" for all kinds of abstract handles. For Local,
1327   // it is equivalent to `*handle`. The variadic parameters support handle
1328   // types with extra type parameters, like `Persistent<T, M>`.
1329   template <template <typename T, typename... Ms> typename H, typename T,
1330             typename... Ms>
1331   V8_INLINE static T* HandleAsValue(const H<T, Ms...>& handle) {
1332     return handle.template value<T>();
1333   }
1334 
1335 #ifdef V8_ENABLE_DIRECT_LOCAL
1336 
1337   template <typename T>
1338   V8_INLINE static Address ValueAsAddress(const T* value) {
1339     return reinterpret_cast<Address>(value);
1340   }
1341 
1342   template <typename T, bool check_null = true, typename S>
1343   V8_INLINE static T* SlotAsValue(S* slot) {
1344     if (check_null && slot == nullptr) {
1345       return reinterpret_cast<T*>(kTaggedNullAddress);
1346     }
1347     return *reinterpret_cast<T**>(slot);
1348   }
1349 
1350 #else  // !V8_ENABLE_DIRECT_LOCAL
1351 
1352   template <typename T>
1353   V8_INLINE static Address ValueAsAddress(const T* value) {
1354     return *reinterpret_cast<const Address*>(value);
1355   }
1356 
1357   template <typename T, bool check_null = true, typename S>
1358   V8_INLINE static T* SlotAsValue(S* slot) {
1359     return reinterpret_cast<T*>(slot);
1360   }
1361 
1362 #endif  // V8_ENABLE_DIRECT_LOCAL
1363 };
1364 
1365 /**
1366  * Helper functions about handles.
1367  */
1368 class HandleHelper final {
1369  public:
1370   /**
1371    * Checks whether two handles are equal.
1372    * They are equal iff they are both empty or they are both non-empty and the
1373    * objects to which they refer are physically equal.
1374    *
1375    * If both handles refer to JS objects, this is the same as strict equality.
1376    * For primitives, such as numbers or strings, a `false` return value does not
1377    * indicate that the values aren't equal in the JavaScript sense.
1378    * Use `Value::StrictEquals()` to check primitives for equality.
1379    */
1380   template <typename T1, typename T2>
1381   V8_INLINE static bool EqualHandles(const T1& lhs, const T2& rhs) {
1382     if (lhs.IsEmpty()) return rhs.IsEmpty();
1383     if (rhs.IsEmpty()) return false;
1384     return lhs.ptr() == rhs.ptr();
1385   }
1386 
1387   static V8_EXPORT bool IsOnStack(const void* ptr);
1388   static V8_EXPORT void VerifyOnStack(const void* ptr);
1389   static V8_EXPORT void VerifyOnMainThread();
1390 };
1391 
1392 V8_EXPORT void VerifyHandleIsNonEmpty(bool is_empty);
1393 
1394 }  // namespace internal
1395 }  // namespace v8
1396 
1397 #endif  // INCLUDE_V8_INTERNAL_H_