Back to home page

EIC code displayed by LXR

 
 

    


File indexing completed on 2025-02-22 10:42:25

0001 // Copyright 2020 the V8 project authors. All rights reserved.
0002 // Use of this source code is governed by a BSD-style license that can be
0003 // found in the LICENSE file.
0004 
0005 #ifndef INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
0006 #define INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_
0007 
0008 #include <cstddef>
0009 #include <cstdint>
0010 
0011 #include "cppgc/heap-handle.h"
0012 #include "cppgc/heap-state.h"
0013 #include "cppgc/internal/api-constants.h"
0014 #include "cppgc/internal/atomic-entry-flag.h"
0015 #include "cppgc/internal/base-page-handle.h"
0016 #include "cppgc/internal/member-storage.h"
0017 #include "cppgc/platform.h"
0018 #include "cppgc/sentinel-pointer.h"
0019 #include "cppgc/trace-trait.h"
0020 #include "v8config.h"  // NOLINT(build/include_directory)
0021 
0022 #if defined(CPPGC_CAGED_HEAP)
0023 #include "cppgc/internal/caged-heap-local-data.h"
0024 #include "cppgc/internal/caged-heap.h"
0025 #endif
0026 
0027 namespace cppgc {
0028 
0029 class HeapHandle;
0030 
0031 namespace internal {
0032 
0033 #if defined(CPPGC_CAGED_HEAP)
0034 class WriteBarrierTypeForCagedHeapPolicy;
0035 #else   // !CPPGC_CAGED_HEAP
0036 class WriteBarrierTypeForNonCagedHeapPolicy;
0037 #endif  // !CPPGC_CAGED_HEAP
0038 
0039 class V8_EXPORT WriteBarrier final {
0040  public:
0041   enum class Type : uint8_t {
0042     kNone,
0043     kMarking,
0044     kGenerational,
0045   };
0046 
0047   enum class GenerationalBarrierType : uint8_t {
0048     kPreciseSlot,
0049     kPreciseUncompressedSlot,
0050     kImpreciseSlot,
0051   };
0052 
0053   struct Params {
0054     HeapHandle* heap = nullptr;
0055 #if V8_ENABLE_CHECKS
0056     Type type = Type::kNone;
0057 #endif  // !V8_ENABLE_CHECKS
0058 #if defined(CPPGC_CAGED_HEAP)
0059     uintptr_t slot_offset = 0;
0060     uintptr_t value_offset = 0;
0061 #endif  // CPPGC_CAGED_HEAP
0062   };
0063 
0064   enum class ValueMode {
0065     kValuePresent,
0066     kNoValuePresent,
0067   };
0068 
0069   // Returns the required write barrier for a given `slot` and `value`.
0070   static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
0071                                             Params& params);
0072   // Returns the required write barrier for a given `slot` and `value`.
0073   template <typename MemberStorage>
0074   static V8_INLINE Type GetWriteBarrierType(const void* slot, MemberStorage,
0075                                             Params& params);
0076   // Returns the required write barrier for a given `slot`.
0077   template <typename HeapHandleCallback>
0078   static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params,
0079                                             HeapHandleCallback callback);
0080   // Returns the required write barrier for a given  `value`.
0081   static V8_INLINE Type GetWriteBarrierType(const void* value, Params& params);
0082 
0083 #ifdef CPPGC_SLIM_WRITE_BARRIER
0084   // A write barrier that combines `GenerationalBarrier()` and
0085   // `DijkstraMarkingBarrier()`. We only pass a single parameter here to clobber
0086   // as few registers as possible.
0087   template <WriteBarrierSlotType>
0088   static V8_NOINLINE void V8_PRESERVE_MOST
0089   CombinedWriteBarrierSlow(const void* slot);
0090 #endif  // CPPGC_SLIM_WRITE_BARRIER
0091 
0092   static V8_INLINE void DijkstraMarkingBarrier(const Params& params,
0093                                                const void* object);
0094   static V8_INLINE void DijkstraMarkingBarrierRange(
0095       const Params& params, const void* first_element, size_t element_size,
0096       size_t number_of_elements, TraceCallback trace_callback);
0097   static V8_INLINE void SteeleMarkingBarrier(const Params& params,
0098                                              const void* object);
0099 #if defined(CPPGC_YOUNG_GENERATION)
0100   template <GenerationalBarrierType>
0101   static V8_INLINE void GenerationalBarrier(const Params& params,
0102                                             const void* slot);
0103 #else  // !CPPGC_YOUNG_GENERATION
0104   template <GenerationalBarrierType>
0105   static V8_INLINE void GenerationalBarrier(const Params& params,
0106                                             const void* slot){}
0107 #endif  // CPPGC_YOUNG_GENERATION
0108 
0109 #if V8_ENABLE_CHECKS
0110   static void CheckParams(Type expected_type, const Params& params);
0111 #else   // !V8_ENABLE_CHECKS
0112   static void CheckParams(Type expected_type, const Params& params) {}
0113 #endif  // !V8_ENABLE_CHECKS
0114 
0115   // The FlagUpdater class allows cppgc internal to update
0116   // |write_barrier_enabled_|.
0117   class FlagUpdater;
0118   static bool IsEnabled() { return write_barrier_enabled_.MightBeEntered(); }
0119 
0120  private:
0121   WriteBarrier() = delete;
0122 
0123 #if defined(CPPGC_CAGED_HEAP)
0124   using WriteBarrierTypePolicy = WriteBarrierTypeForCagedHeapPolicy;
0125 #else   // !CPPGC_CAGED_HEAP
0126   using WriteBarrierTypePolicy = WriteBarrierTypeForNonCagedHeapPolicy;
0127 #endif  // !CPPGC_CAGED_HEAP
0128 
0129   static void DijkstraMarkingBarrierSlow(const void* value);
0130   static void DijkstraMarkingBarrierSlowWithSentinelCheck(const void* value);
0131   static void DijkstraMarkingBarrierRangeSlow(HeapHandle& heap_handle,
0132                                               const void* first_element,
0133                                               size_t element_size,
0134                                               size_t number_of_elements,
0135                                               TraceCallback trace_callback);
0136   static void SteeleMarkingBarrierSlow(const void* value);
0137   static void SteeleMarkingBarrierSlowWithSentinelCheck(const void* value);
0138 
0139 #if defined(CPPGC_YOUNG_GENERATION)
0140   static CagedHeapLocalData& GetLocalData(HeapHandle&);
0141   static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
0142                                       const AgeTable& age_table,
0143                                       const void* slot, uintptr_t value_offset,
0144                                       HeapHandle* heap_handle);
0145   static void GenerationalBarrierForUncompressedSlotSlow(
0146       const CagedHeapLocalData& local_data, const AgeTable& age_table,
0147       const void* slot, uintptr_t value_offset, HeapHandle* heap_handle);
0148   static void GenerationalBarrierForSourceObjectSlow(
0149       const CagedHeapLocalData& local_data, const void* object,
0150       HeapHandle* heap_handle);
0151 #endif  // CPPGC_YOUNG_GENERATION
0152 
0153   static AtomicEntryFlag write_barrier_enabled_;
0154 };
0155 
0156 template <WriteBarrier::Type type>
0157 V8_INLINE WriteBarrier::Type SetAndReturnType(WriteBarrier::Params& params) {
0158   if constexpr (type == WriteBarrier::Type::kNone)
0159     return WriteBarrier::Type::kNone;
0160 #if V8_ENABLE_CHECKS
0161   params.type = type;
0162 #endif  // !V8_ENABLE_CHECKS
0163   return type;
0164 }
0165 
0166 #if defined(CPPGC_CAGED_HEAP)
0167 class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
0168  public:
0169   template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
0170   static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
0171                                           WriteBarrier::Params& params,
0172                                           HeapHandleCallback callback) {
0173     return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
0174   }
0175 
0176   template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback,
0177             typename MemberStorage>
0178   static V8_INLINE WriteBarrier::Type Get(const void* slot, MemberStorage value,
0179                                           WriteBarrier::Params& params,
0180                                           HeapHandleCallback callback) {
0181     return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
0182   }
0183 
0184   template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
0185   static V8_INLINE WriteBarrier::Type Get(const void* value,
0186                                           WriteBarrier::Params& params,
0187                                           HeapHandleCallback callback) {
0188     return GetNoSlot(value, params, callback);
0189   }
0190 
0191  private:
0192   WriteBarrierTypeForCagedHeapPolicy() = delete;
0193 
0194   template <typename HeapHandleCallback>
0195   static V8_INLINE WriteBarrier::Type GetNoSlot(const void* value,
0196                                                 WriteBarrier::Params& params,
0197                                                 HeapHandleCallback) {
0198     const bool within_cage = CagedHeapBase::IsWithinCage(value);
0199     if (!within_cage) return WriteBarrier::Type::kNone;
0200 
0201     // We know that |value| points either within the normal page or to the
0202     // beginning of large-page, so extract the page header by bitmasking.
0203     BasePageHandle* page =
0204         BasePageHandle::FromPayload(const_cast<void*>(value));
0205 
0206     HeapHandle& heap_handle = page->heap_handle();
0207     if (V8_UNLIKELY(heap_handle.is_incremental_marking_in_progress())) {
0208       return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
0209     }
0210 
0211     return SetAndReturnType<WriteBarrier::Type::kNone>(params);
0212   }
0213 
0214   template <WriteBarrier::ValueMode value_mode>
0215   struct ValueModeDispatch;
0216 };
0217 
0218 template <>
0219 struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
0220     WriteBarrier::ValueMode::kValuePresent> {
0221   template <typename HeapHandleCallback, typename MemberStorage>
0222   static V8_INLINE WriteBarrier::Type Get(const void* slot,
0223                                           MemberStorage storage,
0224                                           WriteBarrier::Params& params,
0225                                           HeapHandleCallback) {
0226     if (V8_LIKELY(!WriteBarrier::IsEnabled()))
0227       return SetAndReturnType<WriteBarrier::Type::kNone>(params);
0228 
0229     return BarrierEnabledGet(slot, storage.Load(), params);
0230   }
0231 
0232   template <typename HeapHandleCallback>
0233   static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
0234                                           WriteBarrier::Params& params,
0235                                           HeapHandleCallback) {
0236     if (V8_LIKELY(!WriteBarrier::IsEnabled()))
0237       return SetAndReturnType<WriteBarrier::Type::kNone>(params);
0238 
0239     return BarrierEnabledGet(slot, value, params);
0240   }
0241 
0242  private:
0243   static V8_INLINE WriteBarrier::Type BarrierEnabledGet(
0244       const void* slot, const void* value, WriteBarrier::Params& params) {
0245     const bool within_cage = CagedHeapBase::AreWithinCage(slot, value);
0246     if (!within_cage) return WriteBarrier::Type::kNone;
0247 
0248     // We know that |value| points either within the normal page or to the
0249     // beginning of large-page, so extract the page header by bitmasking.
0250     BasePageHandle* page =
0251         BasePageHandle::FromPayload(const_cast<void*>(value));
0252 
0253     HeapHandle& heap_handle = page->heap_handle();
0254     if (V8_LIKELY(!heap_handle.is_incremental_marking_in_progress())) {
0255 #if defined(CPPGC_YOUNG_GENERATION)
0256       if (!heap_handle.is_young_generation_enabled())
0257         return WriteBarrier::Type::kNone;
0258       params.heap = &heap_handle;
0259       params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
0260       params.value_offset = CagedHeapBase::OffsetFromAddress(value);
0261       return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
0262 #else   // !CPPGC_YOUNG_GENERATION
0263       return SetAndReturnType<WriteBarrier::Type::kNone>(params);
0264 #endif  // !CPPGC_YOUNG_GENERATION
0265     }
0266 
0267     // Use marking barrier.
0268     params.heap = &heap_handle;
0269     return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
0270   }
0271 };
0272 
0273 template <>
0274 struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
0275     WriteBarrier::ValueMode::kNoValuePresent> {
0276   template <typename HeapHandleCallback>
0277   static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
0278                                           WriteBarrier::Params& params,
0279                                           HeapHandleCallback callback) {
0280     if (V8_LIKELY(!WriteBarrier::IsEnabled()))
0281       return SetAndReturnType<WriteBarrier::Type::kNone>(params);
0282 
0283     HeapHandle& handle = callback();
0284 #if defined(CPPGC_YOUNG_GENERATION)
0285     if (V8_LIKELY(!handle.is_incremental_marking_in_progress())) {
0286       if (!handle.is_young_generation_enabled()) {
0287         return WriteBarrier::Type::kNone;
0288       }
0289       params.heap = &handle;
0290       // Check if slot is on stack.
0291       if (V8_UNLIKELY(!CagedHeapBase::IsWithinCage(slot))) {
0292         return SetAndReturnType<WriteBarrier::Type::kNone>(params);
0293       }
0294       params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
0295       return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
0296     }
0297 #else   // !defined(CPPGC_YOUNG_GENERATION)
0298     if (V8_UNLIKELY(!handle.is_incremental_marking_in_progress())) {
0299       return SetAndReturnType<WriteBarrier::Type::kNone>(params);
0300     }
0301 #endif  // !defined(CPPGC_YOUNG_GENERATION)
0302     params.heap = &handle;
0303     return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
0304   }
0305 };
0306 
0307 #endif  // CPPGC_CAGED_HEAP
0308 
0309 class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
0310  public:
0311   template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
0312   static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
0313                                           WriteBarrier::Params& params,
0314                                           HeapHandleCallback callback) {
0315     return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
0316   }
0317 
0318   template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
0319   static V8_INLINE WriteBarrier::Type Get(const void* slot, RawPointer value,
0320                                           WriteBarrier::Params& params,
0321                                           HeapHandleCallback callback) {
0322     return ValueModeDispatch<value_mode>::Get(slot, value.Load(), params,
0323                                               callback);
0324   }
0325 
0326   template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
0327   static V8_INLINE WriteBarrier::Type Get(const void* value,
0328                                           WriteBarrier::Params& params,
0329                                           HeapHandleCallback callback) {
0330     // The slot will never be used in `Get()` below.
0331     return Get<WriteBarrier::ValueMode::kValuePresent>(nullptr, value, params,
0332                                                        callback);
0333   }
0334 
0335  private:
0336   template <WriteBarrier::ValueMode value_mode>
0337   struct ValueModeDispatch;
0338 
0339   WriteBarrierTypeForNonCagedHeapPolicy() = delete;
0340 };
0341 
0342 template <>
0343 struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
0344     WriteBarrier::ValueMode::kValuePresent> {
0345   template <typename HeapHandleCallback>
0346   static V8_INLINE WriteBarrier::Type Get(const void*, const void* object,
0347                                           WriteBarrier::Params& params,
0348                                           HeapHandleCallback callback) {
0349     // The following check covers nullptr as well as sentinel pointer.
0350     if (object <= static_cast<void*>(kSentinelPointer)) {
0351       return SetAndReturnType<WriteBarrier::Type::kNone>(params);
0352     }
0353     if (V8_LIKELY(!WriteBarrier::IsEnabled())) {
0354       return SetAndReturnType<WriteBarrier::Type::kNone>(params);
0355     }
0356     // We know that |object| is within the normal page or in the beginning of a
0357     // large page, so extract the page header by bitmasking.
0358     BasePageHandle* page =
0359         BasePageHandle::FromPayload(const_cast<void*>(object));
0360 
0361     HeapHandle& heap_handle = page->heap_handle();
0362     if (V8_LIKELY(heap_handle.is_incremental_marking_in_progress())) {
0363       return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
0364     }
0365     return SetAndReturnType<WriteBarrier::Type::kNone>(params);
0366   }
0367 };
0368 
0369 template <>
0370 struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
0371     WriteBarrier::ValueMode::kNoValuePresent> {
0372   template <typename HeapHandleCallback>
0373   static V8_INLINE WriteBarrier::Type Get(const void*, const void*,
0374                                           WriteBarrier::Params& params,
0375                                           HeapHandleCallback callback) {
0376     if (V8_UNLIKELY(WriteBarrier::IsEnabled())) {
0377       HeapHandle& handle = callback();
0378       if (V8_LIKELY(handle.is_incremental_marking_in_progress())) {
0379         params.heap = &handle;
0380         return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
0381       }
0382     }
0383     return WriteBarrier::Type::kNone;
0384   }
0385 };
0386 
0387 // static
0388 WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
0389     const void* slot, const void* value, WriteBarrier::Params& params) {
0390   return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
0391                                                                params, []() {});
0392 }
0393 
0394 // static
0395 template <typename MemberStorage>
0396 WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
0397     const void* slot, MemberStorage value, WriteBarrier::Params& params) {
0398   return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
0399                                                                params, []() {});
0400 }
0401 
0402 // static
0403 template <typename HeapHandleCallback>
0404 WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
0405     const void* slot, WriteBarrier::Params& params,
0406     HeapHandleCallback callback) {
0407   return WriteBarrierTypePolicy::Get<ValueMode::kNoValuePresent>(
0408       slot, nullptr, params, callback);
0409 }
0410 
0411 // static
0412 WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
0413     const void* value, WriteBarrier::Params& params) {
0414   return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(value, params,
0415                                                                []() {});
0416 }
0417 
0418 // static
0419 void WriteBarrier::DijkstraMarkingBarrier(const Params& params,
0420                                           const void* object) {
0421   CheckParams(Type::kMarking, params);
0422 #if defined(CPPGC_CAGED_HEAP)
0423   // Caged heap already filters out sentinels.
0424   DijkstraMarkingBarrierSlow(object);
0425 #else   // !CPPGC_CAGED_HEAP
0426   DijkstraMarkingBarrierSlowWithSentinelCheck(object);
0427 #endif  // !CPPGC_CAGED_HEAP
0428 }
0429 
0430 // static
0431 void WriteBarrier::DijkstraMarkingBarrierRange(const Params& params,
0432                                                const void* first_element,
0433                                                size_t element_size,
0434                                                size_t number_of_elements,
0435                                                TraceCallback trace_callback) {
0436   CheckParams(Type::kMarking, params);
0437   DijkstraMarkingBarrierRangeSlow(*params.heap, first_element, element_size,
0438                                   number_of_elements, trace_callback);
0439 }
0440 
0441 // static
0442 void WriteBarrier::SteeleMarkingBarrier(const Params& params,
0443                                         const void* object) {
0444   CheckParams(Type::kMarking, params);
0445 #if defined(CPPGC_CAGED_HEAP)
0446   // Caged heap already filters out sentinels.
0447   SteeleMarkingBarrierSlow(object);
0448 #else   // !CPPGC_CAGED_HEAP
0449   SteeleMarkingBarrierSlowWithSentinelCheck(object);
0450 #endif  // !CPPGC_CAGED_HEAP
0451 }
0452 
0453 #if defined(CPPGC_YOUNG_GENERATION)
0454 
0455 // static
0456 template <WriteBarrier::GenerationalBarrierType type>
0457 void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
0458   CheckParams(Type::kGenerational, params);
0459 
0460   const CagedHeapLocalData& local_data = CagedHeapLocalData::Get();
0461   const AgeTable& age_table = local_data.age_table;
0462 
0463   // Bail out if the slot (precise or imprecise) is in young generation.
0464   if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
0465     return;
0466 
0467   // Dispatch between different types of barriers.
0468   // TODO(chromium:1029379): Consider reload local_data in the slow path to
0469   // reduce register pressure.
0470   if constexpr (type == GenerationalBarrierType::kPreciseSlot) {
0471     GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset,
0472                             params.heap);
0473   } else if constexpr (type ==
0474                        GenerationalBarrierType::kPreciseUncompressedSlot) {
0475     GenerationalBarrierForUncompressedSlotSlow(
0476         local_data, age_table, slot, params.value_offset, params.heap);
0477   } else {
0478     GenerationalBarrierForSourceObjectSlow(local_data, slot, params.heap);
0479   }
0480 }
0481 
0482 #endif  // !CPPGC_YOUNG_GENERATION
0483 
0484 }  // namespace internal
0485 }  // namespace cppgc
0486 
0487 #endif  // INCLUDE_CPPGC_INTERNAL_WRITE_BARRIER_H_