34 major_collector_(
heap_->mark_compact_collector()),
35 minor_collector_(
heap_->minor_mark_sweep_collector()),
36 incremental_marking_(
heap_->incremental_marking()),
38 is_main_thread_barrier_(local_heap->is_main_thread()),
39 uses_shared_heap_(isolate()->has_shared_space()),
40 is_shared_space_isolate_(isolate()->is_shared_space_isolate()) {}
45#ifdef V8_ENABLE_SANDBOX
84 static_assert(!CodePointerTable::kSupportsCompaction);
85 static_assert(!TrustedPointerTable::kSupportsCompaction);
144 int number_of_own_descriptors) {
150 if (
is_minor() || IsStrongDescriptorArray(descriptor_array)) {
169 if (
const auto target_worklist =
181 if (
v8_flags.black_allocated_pages) {
194 gc_epoch, descriptor_array, number_of_own_descriptors)) {
195 worklist->
Push(descriptor_array);
212 typed_slots->Insert(info.slot_type, info.offset);
216template <
typename Space>
218 if constexpr (std::is_same_v<Space, OldSpace> ||
219 std::is_same_v<Space, SharedSpace> ||
220 std::is_same_v<Space, TrustedSpace> ||
221 std::is_same_v<Space, CodeSpace>) {
222 for (
auto* p : *space) {
223 p->SetOldGenerationPageFlags(marking_mode);
225 }
else if constexpr (std::is_same_v<Space, OldLargeObjectSpace> ||
226 std::is_same_v<Space, SharedLargeObjectSpace> ||
227 std::is_same_v<Space, TrustedLargeObjectSpace> ||
228 std::is_same_v<Space, CodeLargeObjectSpace>) {
229 for (
auto* p : *space) {
230 DCHECK(p->Chunk()->IsLargePage());
231 p->SetOldGenerationPageFlags(marking_mode);
233 }
else if constexpr (std::is_same_v<Space, NewSpace>) {
234 for (
auto* p : *space) {
235 p->SetYoungGenerationPageFlags(marking_mode);
238 static_assert(std::is_same_v<Space, NewLargeObjectSpace>);
239 for (
auto* p : *space) {
240 DCHECK(p->Chunk()->IsLargePage());
241 p->SetYoungGenerationPageFlags(marking_mode);
246template <
typename Space>
247void ActivateSpace(Space* space,
MarkingMode marking_mode) {
248 SetGenerationPageFlags(space, marking_mode);
251template <
typename Space>
252void DeactivateSpace(Space* space) {
257 ActivateSpace(
heap->old_space(), marking_mode);
258 ActivateSpace(
heap->lo_space(), marking_mode);
259 if (
heap->new_space()) {
261 ActivateSpace(
heap->new_space(), marking_mode);
263 ActivateSpace(
heap->new_lo_space(), marking_mode);
265 RwxMemoryWriteScope scope(
"For writing flags.");
266 ActivateSpace(
heap->code_space(), marking_mode);
267 ActivateSpace(
heap->code_lo_space(), marking_mode);
271 if (
heap->shared_space()) {
272 ActivateSpace(
heap->shared_space(), marking_mode);
274 if (
heap->shared_lo_space()) {
275 ActivateSpace(
heap->shared_lo_space(), marking_mode);
279 ActivateSpace(
heap->trusted_space(), marking_mode);
280 ActivateSpace(
heap->trusted_lo_space(), marking_mode);
284 DeactivateSpace(
heap->old_space());
285 DeactivateSpace(
heap->lo_space());
286 if (
heap->new_space()) {
288 DeactivateSpace(
heap->new_space());
290 DeactivateSpace(
heap->new_lo_space());
292 RwxMemoryWriteScope scope(
"For writing flags.");
293 DeactivateSpace(
heap->code_space());
294 DeactivateSpace(
heap->code_lo_space());
298 if (
heap->shared_space()) {
299 DeactivateSpace(
heap->shared_space());
301 if (
heap->shared_lo_space()) {
302 DeactivateSpace(
heap->shared_lo_space());
306 DeactivateSpace(
heap->trusted_space());
307 DeactivateSpace(
heap->trusted_lo_space());
315 heap->safepoint()->IterateLocalHeaps([is_compacting](
LocalHeap* local_heap) {
320 if (
heap->isolate()->is_shared_space_isolate()) {
322 ->shared_space_isolate()
324 ->IterateClientIsolates([](
Isolate* client) {
340 heap->safepoint()->IterateLocalHeaps([](
LocalHeap* local_heap) {
366 heap->safepoint()->IterateLocalHeaps([](
LocalHeap* local_heap) {
370 if (
heap->isolate()->is_shared_space_isolate()) {
372 ->shared_space_isolate()
374 ->IterateClientIsolates([](
Isolate* client) {
378 const bool is_marking =
393 heap->safepoint()->IterateLocalHeaps([](
LocalHeap* local_heap) {
415 heap->safepoint()->IterateLocalHeaps([](
LocalHeap* local_heap) {
419 if (
heap->isolate()->is_shared_space_isolate()) {
421 ->shared_space_isolate()
423 ->IterateClientIsolates([](
Isolate* client) {
434 heap->safepoint()->IterateLocalHeaps([](
LocalHeap* local_heap) {
447 std::unique_ptr<TypedSlots>& typed_slots = it.second;
449 std::move(typed_slots));
471void MarkingBarrier::AssertSharedMarkingIsActivated()
const {
static bool TryUpdateIndicesToMark(unsigned gc_epoch, Tagged< DescriptorArray > array, DescriptorIndex index_to_mark)
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
static V8_INLINE bool InWritableSharedSpace(Tagged< HeapObject > object)
static V8_INLINE bool InReadOnlySpace(Tagged< HeapObject > object)
static V8_INLINE bool InBlackAllocatedPage(Tagged< HeapObject > object)
MarkCompactCollector * mark_compact_collector()
IncrementalMarking * incremental_marking() const
IsolateSafepoint * safepoint()
Isolate * isolate() const
void SetIsMarkingFlag(bool value)
Tagged< Object > load(IsolateForSandbox isolate) const
void IterateLocalHeaps(Callback callback)
Isolate * shared_space_isolate() const
MarkingBarrier * marking_barrier()
MarkingWorklists * marking_worklists()
static bool ShouldRecordRelocSlot(Tagged< InstructionStream > host, RelocInfo *rinfo, Tagged< HeapObject > target)
static RecordRelocSlotInfo ProcessRelocInfo(Tagged< InstructionStream > host, RelocInfo *rinfo, Tagged< HeapObject > target)
static void RecordRelocSlot(Tagged< InstructionStream > host, RelocInfo *rinfo, Tagged< HeapObject > target)
static V8_EXPORT_PRIVATE void PublishAll(Heap *heap)
void MarkValue(Tagged< HeapObject > host, Tagged< HeapObject > value)
static V8_EXPORT_PRIVATE void PublishYoung(Heap *heap)
const bool is_main_thread_barrier_
std::optional< MarkingWorklists::Local > shared_heap_worklists_
void Activate(bool is_compacting, MarkingMode marking_mode)
void PublishSharedIfNeeded()
void MarkValueLocal(Tagged< HeapObject > value)
void Write(Tagged< HeapObject > host, TSlot slot, Tagged< HeapObject > value)
const bool is_shared_space_isolate_
MarkingBarrier(LocalHeap *)
std::unique_ptr< MarkingWorklists::Local > current_worklists_
bool IsCurrentMarkingBarrier(Tagged< HeapObject > verification_candidate)
void WriteWithoutHost(Tagged< HeapObject > value)
Isolate * isolate() const
void RecordRelocSlot(Tagged< InstructionStream > host, RelocInfo *rinfo, Tagged< HeapObject > target)
const bool uses_shared_heap_
std::unordered_map< MutablePageMetadata *, std::unique_ptr< TypedSlots >, base::hash< MutablePageMetadata * > > typed_slots_map_
MarkCompactCollector * major_collector_
MinorMarkSweepCollector * minor_collector_
void MarkValueShared(Tagged< HeapObject > value)
MarkingMode marking_mode_
static void DeactivateAll(Heap *heap)
static void DeactivateYoung(Heap *heap)
static void ActivateAll(Heap *heap, bool is_compacting)
static void ActivateYoung(Heap *heap)
MarkingState marking_state_
V8_INLINE bool IsMarked(const Tagged< HeapObject > obj) const
V8_INLINE bool TryMark(Tagged< HeapObject > obj)
void Push(Tagged< HeapObject > object)
static V8_INLINE MemoryChunk * FromHeapObject(Tagged< HeapObject > object)
MarkingWorklists * marking_worklists()
static void MergeTyped(MutablePageMetadata *page, std::unique_ptr< TypedSlots > other)
static MarkingBarrier * CurrentMarkingBarrier(Tagged< HeapObject > verification_candidate)
NonAtomicMarkingState * marking_state_
kInterpreterTrampolineOffset Tagged< HeapObject >
V8_EXPORT_PRIVATE FlagValues v8_flags
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
static V8_INLINE std::optional< WorklistTarget > ShouldMarkObject(Heap *heap, Tagged< HeapObject > object)
#define V8_UNLIKELY(condition)