v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
write-barrier.cc
Go to the documentation of this file.
1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
13#include "src/heap/cppgc/heap.h"
16
17#if defined(CPPGC_CAGED_HEAP)
19#endif
20
21namespace cppgc {
22namespace internal {
23
24// static
26
27// static
29 const void* value) {
30 if (!value || value == kSentinelPointer) return;
31
33}
34
35// static
37 const BasePage* page = BasePage::FromPayload(value);
38 const auto& heap = page->heap();
39
40 // GetWriteBarrierType() checks marking state.
41 DCHECK(heap.marker());
42 // No write barriers should be executed from atomic pause marking.
43 DCHECK(!heap.in_atomic_pause());
44 DCHECK(heap.is_incremental_marking_in_progress());
45
46 auto& header =
47 const_cast<HeapObjectHeader&>(page->ObjectHeaderFromInnerAddress(value));
48 heap.marker()->WriteBarrierForObject<MarkerBase::WriteBarrierType::kDijkstra>(
49 header);
50}
51
52// static
54 HeapHandle& heap_handle, const void* first_element, size_t element_size,
55 size_t number_of_elements, TraceCallback trace_callback) {
56 auto& heap_base = HeapBase::From(heap_handle);
57
58 // GetWriteBarrierType() checks marking state.
59 DCHECK(heap_base.marker());
60 // No write barriers should be executed from atomic pause marking.
61 DCHECK(!heap_base.in_atomic_pause());
62
63 cppgc::subtle::DisallowGarbageCollectionScope disallow_gc_scope(heap_base);
64 const char* array = static_cast<const char*>(first_element);
65 while (number_of_elements-- > 0) {
66 trace_callback(&heap_base.marker()->Visitor(), array);
67 array += element_size;
68 }
69}
70
71// static
73 const void* value) {
74 if (!value || value == kSentinelPointer) return;
75
77}
78
79// static
81 const BasePage* page = BasePage::FromPayload(value);
82 const auto& heap = page->heap();
83
84 // GetWriteBarrierType() checks marking state.
85 DCHECK(heap.marker());
86 // No write barriers should be executed from atomic pause marking.
87 DCHECK(!heap.in_atomic_pause());
88 DCHECK(heap.is_incremental_marking_in_progress());
89
90 auto& header =
91 const_cast<HeapObjectHeader&>(page->ObjectHeaderFromInnerAddress(value));
92 heap.marker()->WriteBarrierForObject<MarkerBase::WriteBarrierType::kSteele>(
93 header);
94}
95
96#if defined(CPPGC_YOUNG_GENERATION)
97// static
98void WriteBarrier::GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
99 const AgeTable& age_table,
100 const void* slot,
101 uintptr_t value_offset,
102 HeapHandle* heap_handle) {
103 DCHECK(slot);
104 DCHECK(heap_handle);
105 DCHECK_GT(api_constants::kCagedHeapMaxReservationSize, value_offset);
106 // A write during atomic pause (e.g. pre-finalizer) may trigger the slow path
107 // of the barrier. This is a result of the order of bailouts where not marking
108 // results in applying the generational barrier.
109 auto& heap = HeapBase::From(*heap_handle);
110 if (heap.in_atomic_pause()) return;
111
112 if (value_offset > 0 && age_table.GetAge(value_offset) == AgeTable::Age::kOld)
113 return;
114
115 // Record slot.
116 heap.remembered_set().AddSlot((const_cast<void*>(slot)));
117}
118
119// static
120void WriteBarrier::GenerationalBarrierForUncompressedSlotSlow(
121 const CagedHeapLocalData& local_data, const AgeTable& age_table,
122 const void* slot, uintptr_t value_offset, HeapHandle* heap_handle) {
123 DCHECK(slot);
124 DCHECK(heap_handle);
125 DCHECK_GT(api_constants::kCagedHeapMaxReservationSize, value_offset);
126 // A write during atomic pause (e.g. pre-finalizer) may trigger the slow path
127 // of the barrier. This is a result of the order of bailouts where not marking
128 // results in applying the generational barrier.
129 auto& heap = HeapBase::From(*heap_handle);
130 if (heap.in_atomic_pause()) return;
131
132 if (value_offset > 0 && age_table.GetAge(value_offset) == AgeTable::Age::kOld)
133 return;
134
135 // Record slot.
136 heap.remembered_set().AddUncompressedSlot((const_cast<void*>(slot)));
137}
138
139// static
140void WriteBarrier::GenerationalBarrierForSourceObjectSlow(
141 const CagedHeapLocalData& local_data, const void* inner_pointer,
142 HeapHandle* heap_handle) {
143 DCHECK(inner_pointer);
144 DCHECK(heap_handle);
145
146 auto& heap = HeapBase::From(*heap_handle);
147
148 auto& object_header =
149 BasePage::FromInnerAddress(&heap, inner_pointer)
151
152 // Record the source object.
153 heap.remembered_set().AddSourceObject(
154 const_cast<HeapObjectHeader&>(object_header));
155}
156#endif // CPPGC_YOUNG_GENERATION
157
158#if V8_ENABLE_CHECKS
159// static
160void WriteBarrier::CheckParams(Type expected_type, const Params& params) {
161 CHECK_EQ(expected_type, params.type);
162}
163#endif // V8_ENABLE_CHECKS
164
165#if defined(CPPGC_YOUNG_GENERATION)
166
167// static
168YoungGenerationEnabler& YoungGenerationEnabler::Instance() {
170 return *instance.get();
171}
172
173void YoungGenerationEnabler::Enable() {
174 auto& instance = Instance();
175 v8::base::MutexGuard _(&instance.mutex_);
176 if (++instance.is_enabled_ == 1) {
177 // Enter the flag so that the check in the write barrier will always trigger
178 // when young generation is enabled.
180 }
181}
182
183void YoungGenerationEnabler::Disable() {
184 auto& instance = Instance();
185 v8::base::MutexGuard _(&instance.mutex_);
186 DCHECK_LT(0, instance.is_enabled_);
187 if (--instance.is_enabled_ == 0) {
189 }
190}
191
192bool YoungGenerationEnabler::IsEnabled() {
193 auto& instance = Instance();
194 v8::base::MutexGuard _(&instance.mutex_);
195 return instance.is_enabled_;
196}
197
198#endif // defined(CPPGC_YOUNG_GENERATION)
199
200#ifdef CPPGC_SLIM_WRITE_BARRIER
201
202// static
203template <WriteBarrierSlotType SlotType>
204void WriteBarrier::CombinedWriteBarrierSlow(const void* slot) {
205 DCHECK_NOT_NULL(slot);
206
207 const void* value = nullptr;
208#if defined(CPPGC_POINTER_COMPRESSION)
210 value = CompressedPointer::Decompress(
211 *static_cast<const CompressedPointer::IntegralType*>(slot));
212 } else {
213 value = *reinterpret_cast<const void* const*>(slot);
214 }
215#else
217 value = *reinterpret_cast<const void* const*>(slot);
218#endif
219
220 WriteBarrier::Params params;
221 const WriteBarrier::Type type =
222 WriteBarrier::GetWriteBarrierType(slot, value, params);
223 switch (type) {
227 break;
230 break;
232 // The fast checks are approximate and may trigger spuriously if any heap
233 // has marking in progress. `GetWriteBarrierType()` above is exact which
234 // is the reason we could still observe a bailout here.
235 break;
236 }
237}
238
239template V8_EXPORT_PRIVATE void WriteBarrier::CombinedWriteBarrierSlow<
240 WriteBarrierSlotType::kUncompressed>(const void* slot);
241#if defined(CPPGC_POINTER_COMPRESSION)
242template V8_EXPORT_PRIVATE void WriteBarrier::CombinedWriteBarrierSlow<
243 WriteBarrierSlotType::kCompressed>(const void* slot);
244#endif // defined(CPPGC_POINTER_COMPRESSION)
245
246#endif // CPPGC_SLIM_WRITE_BARRIER
247
248} // namespace internal
249} // namespace cppgc
HeapObjectHeader & ObjectHeaderFromInnerAddress(void *address) const
Definition heap-page.h:339
static BasePage * FromPayload(void *)
Definition heap-page.h:314
static BasePage * FromInnerAddress(const HeapBase *, void *)
Definition heap-page.cc:40
static HeapBase & From(cppgc::HeapHandle &heap_handle)
Definition heap-base.h:88
static void DijkstraMarkingBarrierSlow(const void *value)
static void DijkstraMarkingBarrierSlowWithSentinelCheck(const void *value)
static AtomicEntryFlag write_barrier_enabled_
static void CheckParams(Type expected_type, const Params &params)
static void DijkstraMarkingBarrierRangeSlow(HeapHandle &heap_handle, const void *first_element, size_t element_size, size_t number_of_elements, TraceCallback trace_callback)
static V8_INLINE Type GetWriteBarrierType(const void *slot, const void *value, Params &params)
static V8_INLINE void GenerationalBarrier(const Params &params, const void *slot)
static void SteeleMarkingBarrierSlow(const void *value)
static void SteeleMarkingBarrierSlowWithSentinelCheck(const void *value)
static V8_INLINE void DijkstraMarkingBarrier(const Params &params, const void *object)
#define _
constexpr internal::SentinelPointer kSentinelPointer
void(*)(Visitor *visitor, const void *object) TraceCallback
Definition trace-trait.h:38
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_GT(v1, v2)
Definition logging.h:487
#define V8_EXPORT_PRIVATE
Definition macros.h:460