17#if defined(CPPGC_CAGED_HEAP)
38 const auto&
heap = page->heap();
44 DCHECK(
heap.is_incremental_marking_in_progress());
54 HeapHandle& heap_handle,
const void* first_element,
size_t element_size,
59 DCHECK(heap_base.marker());
61 DCHECK(!heap_base.in_atomic_pause());
64 const char* array =
static_cast<const char*
>(first_element);
65 while (number_of_elements-- > 0) {
66 trace_callback(&heap_base.marker()->Visitor(), array);
67 array += element_size;
82 const auto&
heap = page->heap();
88 DCHECK(
heap.is_incremental_marking_in_progress());
96#if defined(CPPGC_YOUNG_GENERATION)
98void WriteBarrier::GenerationalBarrierSlow(
const CagedHeapLocalData& local_data,
99 const AgeTable& age_table,
101 uintptr_t value_offset,
105 DCHECK_GT(api_constants::kCagedHeapMaxReservationSize, value_offset);
110 if (
heap.in_atomic_pause())
return;
112 if (value_offset > 0 && age_table.GetAge(value_offset) == AgeTable::Age::kOld)
116 heap.remembered_set().AddSlot((
const_cast<void*
>(slot)));
120void WriteBarrier::GenerationalBarrierForUncompressedSlotSlow(
121 const CagedHeapLocalData& local_data,
const AgeTable& age_table,
122 const void* slot, uintptr_t value_offset,
HeapHandle* heap_handle) {
125 DCHECK_GT(api_constants::kCagedHeapMaxReservationSize, value_offset);
130 if (
heap.in_atomic_pause())
return;
132 if (value_offset > 0 && age_table.GetAge(value_offset) == AgeTable::Age::kOld)
136 heap.remembered_set().AddUncompressedSlot((
const_cast<void*
>(slot)));
140void WriteBarrier::GenerationalBarrierForSourceObjectSlow(
141 const CagedHeapLocalData& local_data,
const void* inner_pointer,
142 HeapHandle* heap_handle) {
148 auto& object_header =
153 heap.remembered_set().AddSourceObject(
154 const_cast<HeapObjectHeader&
>(object_header));
161 CHECK_EQ(expected_type, params.type);
165#if defined(CPPGC_YOUNG_GENERATION)
168YoungGenerationEnabler& YoungGenerationEnabler::Instance() {
170 return *instance.
get();
173void YoungGenerationEnabler::Enable() {
174 auto& instance = Instance();
176 if (++instance.is_enabled_ == 1) {
183void YoungGenerationEnabler::Disable() {
184 auto& instance = Instance();
187 if (--instance.is_enabled_ == 0) {
192bool YoungGenerationEnabler::IsEnabled() {
193 auto& instance = Instance();
195 return instance.is_enabled_;
200#ifdef CPPGC_SLIM_WRITE_BARRIER
203template <WriteBarrierSlotType SlotType>
204void WriteBarrier::CombinedWriteBarrierSlow(
const void* slot) {
207 const void* value =
nullptr;
208#if defined(CPPGC_POINTER_COMPRESSION)
210 value = CompressedPointer::Decompress(
211 *
static_cast<const CompressedPointer::IntegralType*
>(slot));
213 value = *
reinterpret_cast<const void* const*
>(slot);
217 value = *
reinterpret_cast<const void* const*
>(slot);
220 WriteBarrier::Params params;
241#if defined(CPPGC_POINTER_COMPRESSION)
HeapObjectHeader & ObjectHeaderFromInnerAddress(void *address) const
static BasePage * FromPayload(void *)
static BasePage * FromInnerAddress(const HeapBase *, void *)
static HeapBase & From(cppgc::HeapHandle &heap_handle)
static void DijkstraMarkingBarrierSlow(const void *value)
static void DijkstraMarkingBarrierSlowWithSentinelCheck(const void *value)
static AtomicEntryFlag write_barrier_enabled_
static void CheckParams(Type expected_type, const Params ¶ms)
static void DijkstraMarkingBarrierRangeSlow(HeapHandle &heap_handle, const void *first_element, size_t element_size, size_t number_of_elements, TraceCallback trace_callback)
static V8_INLINE Type GetWriteBarrierType(const void *slot, const void *value, Params ¶ms)
static V8_INLINE void GenerationalBarrier(const Params ¶ms, const void *slot)
static void SteeleMarkingBarrierSlow(const void *value)
static void SteeleMarkingBarrierSlowWithSentinelCheck(const void *value)
static V8_INLINE void DijkstraMarkingBarrier(const Params ¶ms, const void *object)
constexpr internal::SentinelPointer kSentinelPointer
void(*)(Visitor *visitor, const void *object) TraceCallback
#define DCHECK_NOT_NULL(val)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_GT(v1, v2)
#define V8_EXPORT_PRIVATE