v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
minor-mark-sweep.cc
Go to the documentation of this file.
1// Copyright 2023 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <algorithm>
8#include <atomic>
9#include <memory>
10#include <unordered_set>
11#include <vector>
12
13#include "src/base/logging.h"
16#include "src/common/globals.h"
18#include "src/flags/flags.h"
25#include "src/heap/gc-tracer.h"
27#include "src/heap/heap.h"
39#include "src/heap/new-spaces.h"
45#include "src/heap/safepoint.h"
46#include "src/heap/slot-set.h"
47#include "src/heap/sweeper.h"
50#include "src/init/v8.h"
52#include "src/objects/objects.h"
57#include "src/utils/utils-inl.h"
58
59namespace v8 {
60namespace internal {
61
62// ==================================================================
63// Verifiers
64// ==================================================================
65
66#ifdef VERIFY_HEAP
67namespace {
68
69class YoungGenerationMarkingVerifier : public MarkingVerifierBase {
70 public:
71 explicit YoungGenerationMarkingVerifier(Heap* heap)
72 : MarkingVerifierBase(heap),
73 marking_state_(heap->non_atomic_marking_state()) {}
74
75 const MarkingBitmap* bitmap(const MutablePageMetadata* chunk) override {
76 return chunk->marking_bitmap();
77 }
78
79 bool IsMarked(Tagged<HeapObject> object) override {
80 return marking_state_->IsMarked(object);
81 }
82
83 void Run() override {
84 // VerifyRoots will visit also visit the conservative stack and consider
85 // objects reachable from it, including old objects. This is fine since this
86 // verifier will only check that young objects are marked.
87 VerifyRoots();
88 if (v8_flags.sticky_mark_bits) {
89 VerifyMarking(heap_->sticky_space());
90 } else {
91 VerifyMarking(heap_->new_space());
92 }
93 }
94
95 GarbageCollector collector() const override {
96 return GarbageCollector::MINOR_MARK_SWEEPER;
97 }
98
99 protected:
100 void VerifyMap(Tagged<Map> map) override { VerifyHeapObjectImpl(map); }
101
102 void VerifyPointers(ObjectSlot start, ObjectSlot end) override {
103 VerifyPointersImpl(start, end);
104 }
105
106 void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) override {
107 VerifyPointersImpl(start, end);
108 }
109 void VerifyCodePointer(InstructionStreamSlot slot) override {
110 // Code slots never appear in new space because
111 // Code objects, the only object that can contain code pointers, are
112 // always allocated in the old space.
113 UNREACHABLE();
114 }
115
116 void VisitCodeTarget(Tagged<InstructionStream> host,
117 RelocInfo* rinfo) override {
118 Tagged<InstructionStream> target =
119 InstructionStream::FromTargetAddress(rinfo->target_address());
120 VerifyHeapObjectImpl(target);
121 }
122 void VisitEmbeddedPointer(Tagged<InstructionStream> host,
123 RelocInfo* rinfo) override {
124 VerifyHeapObjectImpl(rinfo->target_object(cage_base()));
125 }
126 void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) override {
127 VerifyPointersImpl(start, end);
128 }
129
130 private:
131 V8_INLINE void VerifyHeapObjectImpl(Tagged<HeapObject> heap_object) {
132 CHECK_IMPLIES(HeapLayout::InYoungGeneration(heap_object),
133 IsMarked(heap_object));
134 }
135
136 template <typename TSlot>
137 V8_INLINE void VerifyPointersImpl(TSlot start, TSlot end) {
138 PtrComprCageBase cage_base =
140 for (TSlot slot = start; slot < end; ++slot) {
141 typename TSlot::TObject object = slot.load(cage_base);
142#ifdef V8_ENABLE_DIRECT_HANDLE
143 if (object.ptr() == kTaggedNullAddress) continue;
144#endif
145 Tagged<HeapObject> heap_object;
146 // Minor MS treats weak references as strong.
147 if (object.GetHeapObject(&heap_object)) {
148 VerifyHeapObjectImpl(heap_object);
149 }
150 }
151 }
152
153 NonAtomicMarkingState* const marking_state_;
154};
155
156} // namespace
157#endif // VERIFY_HEAP
158
159// =============================================================================
160// MinorMarkSweepCollector
161// =============================================================================
162
163namespace {
164int EstimateMaxNumberOfRemeberedSets(Heap* heap) {
165 // old space, lo space, trusted space and trusted lo space can have a maximum
166 // of two remembered sets (OLD_TO_NEW and OLD_TO_NEW_BACKGROUND).
167 // Code space and code lo space can have typed OLD_TO_NEW in addition.
168 return 2 * (heap->old_space()->CountTotalPages() +
169 heap->lo_space()->PageCount() +
170 heap->trusted_space()->CountTotalPages() +
171 heap->trusted_lo_space()->PageCount()) +
172 3 * (heap->code_space()->CountTotalPages() +
173 heap->code_lo_space()->PageCount());
174}
175} // namespace
176
177// static
178std::vector<YoungGenerationRememberedSetsMarkingWorklist::MarkingItem>
180 std::vector<MarkingItem> items;
181 int max_remembered_set_count = EstimateMaxNumberOfRemeberedSets(heap);
182 items.reserve(max_remembered_set_count);
184 heap, [&items](MutablePageMetadata* chunk) {
185 SlotSet* slot_set = chunk->ExtractSlotSet<OLD_TO_NEW>();
186 SlotSet* background_slot_set =
188 if (slot_set || background_slot_set) {
189 items.emplace_back(chunk, MarkingItem::SlotsType::kRegularSlots,
190 slot_set, background_slot_set);
191 }
192 if (TypedSlotSet* typed_slot_set =
195 items.emplace_back(chunk, MarkingItem::SlotsType::kTypedSlots,
196 typed_slot_set);
197 }
198 });
199 DCHECK_LE(items.size(), max_remembered_set_count);
200 return items;
201}
202
220
223 DCHECK(IsAcquired());
224 if (slots_type_ == SlotsType::kRegularSlots) {
225 if (slot_set_) SlotSet::Delete(slot_set_);
226 if (background_slot_set_) SlotSet::Delete(background_slot_set_);
227 } else {
228 DCHECK_EQ(slots_type_, SlotsType::kTypedSlots);
229 DCHECK_NULL(background_slot_set_);
230 if (typed_slot_set_)
231 RememberedSet<OLD_TO_NEW>::DeleteTyped(std::move(*typed_slot_set_));
232 }
233}
234
237 if (slots_type_ == SlotsType::kRegularSlots) {
238 if (slot_set_) SlotSet::Delete(slot_set_);
239 if (background_slot_set_) SlotSet::Delete(background_slot_set_);
240
241 } else {
242 DCHECK_EQ(slots_type_, SlotsType::kTypedSlots);
243 DCHECK_NULL(background_slot_set_);
244 if (typed_slot_set_) delete typed_slot_set_;
245 }
246}
247
255
259 if (v8_flags.sticky_mark_bits) {
260 item.DeleteRememberedSets();
261 } else {
262 item.MergeAndDeleteRememberedSets();
263 }
264 }
265}
266
269 item.DeleteSetsOnTearDown();
270 }
272 remaining_remembered_sets_marking_items_.store(0, std::memory_order_relaxed);
273}
274
276 MinorMarkSweepCollector* collector)
277 : main_marking_visitor_(collector->main_marking_visitor()) {}
278
280 default;
281
282// static
284
286 : heap_(heap),
287 marking_state_(heap_->marking_state()),
288 non_atomic_marking_state_(heap_->non_atomic_marking_state()),
289 sweeper_(heap_->sweeper()) {}
290
292 auto* cpp_heap = CppHeap::From(heap_->cpp_heap_);
293 if (!cpp_heap) return;
294
295 TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MS_MARK_EMBEDDER_TRACING);
297 cpp_heap->AdvanceMarking(v8::base::TimeDelta::Max(), SIZE_MAX);
298}
299
301
306 main_marking_visitor_->PublishWorklists();
308 // Marking barriers of LocalHeaps will be published in their destructors.
309 marking_worklists_->Clear();
310 ephemeron_table_list_->Clear();
311 }
312}
313
315 if (v8_flags.concurrent_minor_ms_marking || v8_flags.parallel_marking) {
321 }
325 if (auto* cpp_heap = CppHeap::From(heap_->cpp_heap_)) {
326 cpp_heap->FinishConcurrentMarkingIfNeeded();
327 }
328}
329
330#ifdef DEBUG
331template <typename Space>
332static bool ExternalPointerRememberedSetsEmpty(Space* space) {
333 for (auto it = space->begin(); it != space->end();) {
334 PageMetadata* p = *(it++);
336 return false;
337 }
338 }
339 return true;
340}
341#endif
342
343void MinorMarkSweepCollector::StartMarking(bool force_use_background_threads) {
344#if defined(VERIFY_HEAP) && !V8_ENABLE_STICKY_MARK_BITS_BOOL
345 if (v8_flags.verify_heap) {
346 for (PageMetadata* page : *heap_->new_space()) {
347 CHECK(page->marking_bitmap()->IsClean());
348 }
349 }
350#endif // defined(VERIFY_HEAP) && !V8_ENABLE_STICKY_MARK_BITS_BOOL
351
352 // The state for background thread is saved here and maintained for the whole
353 // GC cycle. Both CppHeap and regular V8 heap will refer to this flag.
355 // Once we decided to start concurrent marking we always need to use
356 // background threads, this is because Minor MS doesn't perform incremental
357 // marking. ShouldUseBackgroundThreads() on worker isolates can be updated
358 // concurrently from the main thread outside a task, so we shouldn't invoke it
359 // here again as it could return a different result.
361 force_use_background_threads || heap_->ShouldUseBackgroundThreads();
362
363 auto* cpp_heap = CppHeap::From(heap_->cpp_heap_);
364 // CppHeap's marker must be initialized before the V8 marker to allow
365 // exchanging of worklists.
366 if (cpp_heap && cpp_heap->generational_gc_supported()) {
367 TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MS_MARK_EMBEDDER_PROLOGUE);
368 cpp_heap->InitializeMarking(CppHeap::CollectionType::kMinor);
369 }
371 ephemeron_table_list_ = std::make_unique<EphemeronRememberedSet::TableList>();
373 marking_worklists_ = std::make_unique<MarkingWorklists>();
376 if (v8_flags.sticky_mark_bits) {
377 DCHECK(ExternalPointerRememberedSetsEmpty(heap_->sticky_space()));
378 } else {
379 DCHECK(ExternalPointerRememberedSetsEmpty(
381 }
383 std::make_unique<PretenuringHandler::PretenuringFeedbackMap>(
385 main_marking_visitor_ = std::make_unique<YoungGenerationMainMarkingVisitor>(
389 std::make_unique<YoungGenerationRememberedSetsMarkingWorklist>(heap_);
390 if (cpp_heap && cpp_heap->generational_gc_supported()) {
391 TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MS_MARK_EMBEDDER_PROLOGUE);
392 // StartTracing immediately starts marking which requires V8 worklists to
393 // be set up.
394 cpp_heap->StartMarking();
395 }
396}
397
399 TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MS_FINISH);
400
402
403 {
404 TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MS_FINISH_ENSURE_CAPACITY);
406 }
407
408 if (!v8_flags.sticky_mark_bits) {
410 }
411}
412
414 DCHECK(!heap_->mark_compact_collector()->in_use());
417 DCHECK(!sweeper()->AreMinorSweeperTasksRunning());
418 if (v8_flags.sticky_mark_bits) {
419 DCHECK(sweeper()->IsSweepingDoneForSpace(OLD_SPACE));
420 } else {
421 DCHECK(sweeper()->IsSweepingDoneForSpace(NEW_SPACE));
422 }
423
425
426 is_in_atomic_pause_.store(true, std::memory_order_relaxed);
427
429 if (auto* cpp_heap = CppHeap::From(heap_->cpp_heap_)) {
430 cpp_heap->ProcessCrossThreadWeakness();
431 }
433#ifdef VERIFY_HEAP
434 if (v8_flags.verify_heap) {
435 TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MS_MARK_VERIFY);
436 YoungGenerationMarkingVerifier verifier(heap_);
437 verifier.Run();
438 }
439#endif // VERIFY_HEAP
440
441 if (auto* cpp_heap = CppHeap::From(heap_->cpp_heap_)) {
442 cpp_heap->FinishMarkingAndProcessWeakness();
443 }
444
445 Sweep();
446 Finish();
447
448 auto* isolate = heap_->isolate();
450 isolate->traced_handles()->UpdateListOfYoungNodes();
451
452 isolate->stack_guard()->ClearGC();
453 gc_finalization_requested_.store(false, std::memory_order_relaxed);
454 is_in_atomic_pause_.store(false, std::memory_order_relaxed);
455}
456
457namespace {
458
459class YoungStringForwardingTableCleaner final
461 public:
462 explicit YoungStringForwardingTableCleaner(Heap* heap)
464
465 // For Minor MS we don't mark forward objects, because they are always
466 // in old generation (and thus considered live).
467 // We only need to delete non-live young objects.
468 void ProcessYoungObjects() {
469 DCHECK(v8_flags.always_use_string_forwarding_table);
470 StringForwardingTable* forwarding_table =
471 isolate_->string_forwarding_table();
472 forwarding_table->IterateElements(
473 [&](StringForwardingTable::Record* record) {
474 ClearNonLiveYoungObjects(record);
475 });
476 }
477
478 private:
479 void ClearNonLiveYoungObjects(StringForwardingTable::Record* record) {
480 Tagged<Object> original = record->OriginalStringObject(isolate_);
481 if (!IsHeapObject(original)) {
483 return;
484 }
485 Tagged<String> original_string = Cast<String>(original);
486 if (!HeapLayout::InYoungGeneration(original_string)) return;
487 if (!marking_state_->IsMarked(original_string)) {
488 DisposeExternalResource(record);
489 record->set_original_string(StringForwardingTable::deleted_element());
490 }
491 }
492};
493
494bool IsUnmarkedObjectInYoungGeneration(Heap* heap, FullObjectSlot p) {
495 if (v8_flags.sticky_mark_bits) {
497 }
498 DCHECK_IMPLIES(HeapLayout::InYoungGeneration(*p), Heap::InToPage(*p));
500 !heap->non_atomic_marking_state()->IsMarked(Cast<HeapObject>(*p));
501}
502
503} // namespace
504
506 TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MS_CLEAR);
507
508 if (V8_UNLIKELY(v8_flags.always_use_string_forwarding_table)) {
510 GCTracer::Scope::MINOR_MS_CLEAR_STRING_FORWARDING_TABLE);
511 // Clear non-live objects in the string fowarding table.
512 YoungStringForwardingTableCleaner forwarding_table_cleaner(heap_);
513 forwarding_table_cleaner.ProcessYoungObjects();
514 }
515
516 Heap::ExternalStringTable& external_string_table =
518 if (external_string_table.HasYoung()) {
519 TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MS_CLEAR_STRING_TABLE);
520 // Internalized strings are always stored in old space, so there is no
521 // need to clean them here.
524 external_visitor(heap_);
525 external_string_table.IterateYoung(&external_visitor);
526 external_string_table.CleanUpYoung();
527 }
528
529 Isolate* isolate = heap_->isolate();
530 if (isolate->global_handles()->HasYoung() ||
531 isolate->traced_handles()->HasYoung()) {
533 GCTracer::Scope::MINOR_MS_CLEAR_WEAK_GLOBAL_HANDLES);
534 isolate->global_handles()->ProcessWeakYoungObjects(
535 nullptr, &IsUnmarkedObjectInYoungGeneration);
536 if (auto* cpp_heap = CppHeap::From(heap_->cpp_heap_);
537 cpp_heap && cpp_heap->generational_gc_supported()) {
538 isolate->traced_handles()->ResetYoungDeadNodes(
539 &IsUnmarkedObjectInYoungGeneration);
540 } else {
541 isolate->traced_handles()->ProcessWeakYoungObjects(
542 nullptr, &IsUnmarkedObjectInYoungGeneration);
543 }
544 }
545
546 // Clear ephemeron entries from EphemeronHashTables in the young generation
547 // whenever the entry has a dead young generation key.
548 //
549 // Worklist is collected during marking.
550 {
552 EphemeronRememberedSet::TableList::Local local_ephemeron_table_list(
555 while (local_ephemeron_table_list.Pop(&table)) {
556 for (InternalIndex i : table->IterateEntries()) {
557 // Keys in EphemeronHashTables must be heap objects.
558 HeapObjectSlot key_slot(
559 table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i)));
563 table->RemoveEntry(i);
564 }
565 }
566 }
567 }
568 ephemeron_table_list_.reset();
569
570 // Clear ephemeron entries from EphemeronHashTables in the old generation
571 // whenever the entry has a dead young generation key.
572 //
573 // Does not need to be iterated as roots but is maintained in the GC to avoid
574 // treating keys as strong. The set is populated from the write barrier and
575 // the sweeper during promoted pages iteration.
576 auto* table_map = heap_->ephemeron_remembered_set()->tables();
577 for (auto it = table_map->begin(); it != table_map->end();) {
578 Tagged<EphemeronHashTable> table = it->first;
579 auto& indices = it->second;
580 for (auto iti = indices.begin(); iti != indices.end();) {
581 // Keys in EphemeronHashTables must be heap objects.
582 HeapObjectSlot key_slot(table->RawFieldOfElementAt(
583 EphemeronHashTable::EntryToIndex(InternalIndex(*iti))));
585 // There may be old generation entries left in the remembered set as
586 // MinorMS only promotes pages after clearing non-live references.
588 iti = indices.erase(iti);
590 table->RemoveEntry(InternalIndex(*iti));
591 iti = indices.erase(iti);
592 } else {
593 ++iti;
594 }
595 }
596
597 if (indices.empty()) {
598 it = table_map->erase(it);
599 } else {
600 ++it;
601 }
602 }
603}
604
605namespace {
606void VisitObjectWithEmbedderFields(Isolate* isolate, Tagged<JSObject> js_object,
607 MarkingWorklists::Local& worklist) {
608 DCHECK(js_object->MayHaveEmbedderFields());
610 // Not every object that can have embedder fields is actually a JSApiWrapper.
611 if (!IsJSApiWrapperObject(js_object)) {
612 return;
613 }
614
615 // Wrapper using cpp_heap_wrappable field.
616 void* wrappable =
617 JSApiWrapper(js_object).GetCppHeapWrappable(isolate, kAnyCppHeapPointer);
618 if (wrappable) {
619 worklist.cpp_marking_state()->MarkAndPush(wrappable);
620 }
621}
622} // namespace
623
625 YoungGenerationRootMarkingVisitor& root_visitor) {
626 TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MS_MARK_TRACED_HANDLES);
627 if (auto* cpp_heap = CppHeap::From(heap_->cpp_heap_);
628 cpp_heap && cpp_heap->generational_gc_supported()) {
629 // Visit the Oilpan-to-V8 remembered set.
631 &root_visitor);
632 // Visit the V8-to-Oilpan remembered set.
633 cpp_heap->VisitCrossHeapRememberedSetIfNeeded([this](Tagged<JSObject> obj) {
634 VisitObjectWithEmbedderFields(heap_->isolate(), obj,
636 });
637 } else {
638 // Otherwise, visit all young roots.
639 heap_->isolate()->traced_handles()->IterateYoungRoots(&root_visitor);
640 }
641}
642
645 bool was_marked_incrementally) {
646 Isolate* isolate = heap_->isolate();
647
648 // Seed the root set.
649 {
650 TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MS_MARK_SEED);
651 isolate->traced_handles()->ComputeWeaknessForYoungObjects();
652 // MinorMS treats all weak roots except for global handles as strong.
653 // That is why we don't set skip_weak = true here and instead visit
654 // global handles separately.
656 &root_visitor,
661 isolate->global_handles()->IterateYoungStrongAndDependentRoots(
662 &root_visitor);
663 MarkRootsFromTracedHandles(root_visitor);
664 }
665}
666
667namespace {
668class MinorMSConservativeStackVisitor
669 : public ConservativeStackVisitorBase<MinorMSConservativeStackVisitor> {
670 public:
671 MinorMSConservativeStackVisitor(
672 Isolate* isolate, YoungGenerationRootMarkingVisitor& root_visitor)
673 : ConservativeStackVisitorBase(isolate, &root_visitor) {}
674
675 private:
676 static constexpr bool kOnlyVisitMainV8Cage [[maybe_unused]] = true;
677
678 static bool FilterPage(const MemoryChunk* chunk) {
679 return v8_flags.sticky_mark_bits
680 ? !chunk->IsFlagSet(MemoryChunk::CONTAINS_ONLY_OLD)
681 : chunk->IsToPage();
682 }
683 static bool FilterLargeObject(Tagged<HeapObject>, MapWord) { return true; }
684 static bool FilterNormalObject(Tagged<HeapObject>, MapWord, MarkingBitmap*) {
685 return true;
686 }
687 static void HandleObjectFound(Tagged<HeapObject>, size_t, MarkingBitmap*) {}
688
689 friend class ConservativeStackVisitorBase<MinorMSConservativeStackVisitor>;
690};
691} // namespace
692
694 YoungGenerationRootMarkingVisitor& root_visitor) {
695 if (!heap_->IsGCWithStack()) return;
696 TRACE_GC(heap_->tracer(), GCTracer::Scope::CONSERVATIVE_STACK_SCANNING);
697
698 MinorMSConservativeStackVisitor stack_visitor(heap_->isolate(), root_visitor);
699
700 heap_->IterateConservativeStackRoots(&stack_visitor);
701}
702
704 TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MS_MARK);
705
706 const bool was_marked_incrementally =
708 if (!was_marked_incrementally) {
709 StartMarking(false);
710 } else {
711 auto* incremental_marking = heap_->incremental_marking();
713 heap_->tracer(), GCTracer::Scope::MINOR_MS_MARK_FINISH_INCREMENTAL,
714 incremental_marking->current_trace_id(), TRACE_EVENT_FLAG_FLOW_IN);
715 DCHECK(incremental_marking->IsMinorMarking());
716 DCHECK(v8_flags.concurrent_minor_ms_marking);
717 incremental_marking->Stop();
719 }
720
723
724 YoungGenerationRootMarkingVisitor root_visitor(this);
725
726 MarkRoots(root_visitor, was_marked_incrementally);
727
728 // CppGC starts parallel marking tasks that will trace TracedReferences.
729 if (auto* cpp_heap = CppHeap::From(heap_->cpp_heap())) {
730 cpp_heap->EnterFinalPause(heap_->embedder_stack_state_);
731 }
732
733 {
734 // Mark the transitive closure in parallel.
736 GCTracer::Scope::MINOR_MS_MARK_CLOSURE_PARALLEL,
737 "UseBackgroundThreads", UseBackgroundThreadsInCycle());
738 if (v8_flags.parallel_marking) {
741 }
744 }
745
746 {
748 GCTracer::Scope::MINOR_MS_MARK_CONSERVATIVE_STACK);
749 MarkRootsFromConservativeStack(root_visitor);
750 }
751
752 {
753 TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MS_MARK_CLOSURE);
754 if (auto* cpp_heap = CppHeap::From(heap_->cpp_heap())) {
755 cpp_heap->EnterProcessGlobalAtomicPause();
756 }
758 }
759 CHECK(local_marking_worklists()->IsEmpty());
760
761 if (was_marked_incrementally) {
762 // Disable the marking barrier after concurrent/parallel marking has
763 // finished as it will reset page flags.
764 Sweeper::PauseMajorSweepingScope pause_sweeping_scope(heap_->sweeper());
766 }
767
768 main_marking_visitor_.reset();
769 marking_worklists_.reset();
771
772 PretenuringHandler* pretenuring_handler = heap_->pretenuring_handler();
773 pretenuring_handler->MergeAllocationSitePretenuringFeedback(
775 pretenuring_feedback_.reset();
776
777 if (v8_flags.minor_ms_trace_fragmentation) {
779 }
780}
781
783 PtrComprCageBase cage_base(heap_->isolate());
786 auto* marking_worklists_local = local_marking_worklists();
787 do {
788 marking_worklists_local->MergeOnHold();
789
791
792 Tagged<HeapObject> heap_object;
793 while (marking_worklists_local->Pop(&heap_object)) {
794 DCHECK(!IsFreeSpaceOrFiller(heap_object, cage_base));
795 DCHECK(IsHeapObject(heap_object));
796 DCHECK(heap_->Contains(heap_object));
797 DCHECK(!marking_state_->IsUnmarked(heap_object));
798 // Maps won't change in the atomic pause, so the map can be read without
799 // atomics.
800 Tagged<Map> map = Cast<Map>(*heap_object->map_slot());
801 const auto visited_size = main_marking_visitor_->Visit(map, heap_object);
802 if (visited_size) {
803 main_marking_visitor_->IncrementLiveBytesCached(
805 ALIGN_TO_ALLOCATION_ALIGNMENT(visited_size));
806 }
807 }
808 } while (remembered_sets.ProcessNextItem(main_marking_visitor_.get()) ||
809 !IsCppHeapMarkingFinished(heap_, marking_worklists_local));
810}
811
813 PagedSpaceBase* new_space =
814 v8_flags.sticky_mark_bits ? heap_->sticky_space()
815 : static_cast<PagedSpaceBase*>(
816 heap_->paged_new_space()->paged_space());
817 PtrComprCageBase cage_base(heap_->isolate());
818 const std::array<size_t, 4> free_size_class_limits = {0, 1024, 2048, 4096};
819 size_t free_bytes_of_class[free_size_class_limits.size()] = {0};
820 size_t live_bytes = 0;
821 size_t allocatable_bytes = 0;
822 for (PageMetadata* p : *new_space) {
823 Address free_start = p->area_start();
824 for (auto [object, size] : LiveObjectRange(p)) {
825 Address free_end = object.address();
826 if (free_end != free_start) {
827 size_t free_bytes = free_end - free_start;
828 int free_bytes_index = 0;
829 for (auto free_size_class_limit : free_size_class_limits) {
830 if (free_bytes >= free_size_class_limit) {
831 free_bytes_of_class[free_bytes_index] += free_bytes;
832 }
833 free_bytes_index++;
834 }
835 }
836 live_bytes += size;
837 free_start = free_end + size;
838 }
839 const Address top = heap_->NewSpaceTop();
840 size_t area_end = p->Contains(top) ? top : p->area_end();
841 if (free_start != area_end) {
842 size_t free_bytes = area_end - free_start;
843 int free_bytes_index = 0;
844 for (auto free_size_class_limit : free_size_class_limits) {
845 if (free_bytes >= free_size_class_limit) {
846 free_bytes_of_class[free_bytes_index] += free_bytes;
847 }
848 free_bytes_index++;
849 }
850 }
851 allocatable_bytes += area_end - p->area_start();
852 CHECK_EQ(allocatable_bytes, live_bytes + free_bytes_of_class[0]);
853 }
855 "Minor Mark-Sweep Fragmentation: allocatable_bytes=%zu "
856 "live_bytes=%zu "
857 "free_bytes=%zu free_bytes_1K=%zu free_bytes_2K=%zu "
858 "free_bytes_4K=%zu\n",
859 allocatable_bytes, live_bytes, free_bytes_of_class[0],
860 free_bytes_of_class[1], free_bytes_of_class[2],
861 free_bytes_of_class[3]);
862}
863
864namespace {
865
866// NewSpacePages with more live bytes than this threshold qualify for fast
867// evacuation.
868intptr_t NewSpacePageEvacuationThreshold() {
869 return v8_flags.minor_ms_page_promotion_threshold *
871}
872
873bool ShouldMovePage(PageMetadata* p, intptr_t live_bytes,
874 intptr_t wasted_bytes) {
875 DCHECK(v8_flags.page_promotion);
876 DCHECK(!v8_flags.sticky_mark_bits);
877 Heap* heap = p->heap();
878 DCHECK(!p->Chunk()->NeverEvacuate());
879 const bool should_move_page =
880 ((live_bytes + wasted_bytes) > NewSpacePageEvacuationThreshold() ||
881 (p->AllocatedLabSize() == 0)) &&
882 (heap->new_space()->IsPromotionCandidate(p)) &&
883 heap->CanExpandOldGeneration(live_bytes);
884 if (v8_flags.trace_page_promotions) {
886 heap->isolate(),
887 "[Page Promotion] %p: collector=mms, should move: %d"
888 ", live bytes = %zu, wasted bytes = %zu, promotion threshold = %zu"
889 ", allocated labs size = %zu\n",
890 p, should_move_page, live_bytes, wasted_bytes,
891 NewSpacePageEvacuationThreshold(), p->AllocatedLabSize());
892 }
893 if (!should_move_page &&
894 (p->AgeInNewSpace() == v8_flags.minor_ms_max_page_age)) {
895 // Don't allocate on old pages so that recently allocated objects on the
896 // page get a chance to die young. The page will be force promoted on the
897 // next GC because `AllocatedLabSize` will be 0.
898 p->Chunk()->SetFlagNonExecutable(MemoryChunk::NEVER_ALLOCATE_ON_PAGE);
899 }
900 return should_move_page;
901}
902
903} // namespace
904
907#ifdef V8_COMPRESS_POINTERS
908 using BasicSlotSet = ::heap::base::BasicSlotSet<kTaggedSize>;
909 BasicSlotSet* slots = p->slot_set<SURVIVOR_TO_EXTERNAL_POINTER>();
910 if (!slots) return;
911 ExternalPointerTable& table = heap_->isolate()->external_pointer_table();
912 ExternalPointerTable::Space* young = heap_->young_external_pointer_space();
913 ExternalPointerTable::Space* old = heap_->old_external_pointer_space();
914 auto callback = [&table, young, old](Address handle_location) {
916 *reinterpret_cast<ExternalPointerHandle*>(handle_location);
917 table.Evacuate(young, old, handle, handle_location,
918 ExternalPointerTable::EvacuateMarkMode::kClearMark);
919 return KEEP_SLOT;
920 };
921 auto slot_count = slots->Iterate<BasicSlotSet::AccessMode::NON_ATOMIC>(
923 BasicSlotSet::EmptyBucketMode::FREE_EMPTY_BUCKETS);
924 DCHECK(slot_count);
925 USE(slot_count);
926 // SURVIVOR_TO_EXTERNAL_POINTER remembered set will be freed later by the
927 // sweeper.
928#endif
929}
930
932 TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MS_SWEEP_NEW);
934 paged_space->ClearAllocatorState();
935
936 int will_be_swept = 0;
937 bool has_promoted_pages = false;
938
940
941 for (auto it = paged_space->begin(); it != paged_space->end();) {
942 PageMetadata* p = *(it++);
943 DCHECK(p->SweepingDone());
944
945 intptr_t live_bytes_on_page = p->live_bytes();
946 if (live_bytes_on_page == 0) {
947 if (paged_space->ShouldReleaseEmptyPage()) {
948 paged_space->ReleasePage(p);
949 } else {
951 }
952 continue;
953 }
954
955 if (ShouldMovePage(p, live_bytes_on_page, p->wasted_memory())) {
957 // free list categories will be relinked by the sweeper after sweeping is
958 // done.
961 has_promoted_pages = true;
963 } else {
964 // Page is not promoted. Sweep it instead.
966 will_be_swept++;
967 }
968 }
969
970#ifdef V8_COMPRESS_POINTERS
971 // Now that we have evacuated any external pointers, rebuild EPT free-lists
972 // for the new space.
973 heap_->isolate()->external_pointer_table().SweepAndCompact(
974 heap_->young_external_pointer_space(), heap_->isolate()->counters());
975#endif
976
977 if (v8_flags.gc_verbose) {
979 "sweeping: space=%s initialized_for_sweeping=%d",
980 ToString(paged_space->identity()), will_be_swept);
981 }
982
983 return has_promoted_pages;
984}
985
987 TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MS_SWEEP_NEW);
988 PagedSpaceBase* paged_space = heap_->sticky_space();
989 paged_space->ClearAllocatorState();
990
991 int will_be_swept = 0;
992
993 for (auto it = paged_space->begin(); it != paged_space->end();) {
994 PageMetadata* p = *(it++);
995 DCHECK(p->SweepingDone());
996
997 intptr_t live_bytes_on_page = p->live_bytes();
998 if (live_bytes_on_page == 0) {
999 // Don't release empty pages with sticky bits, since there may be other
1000 // live old objects not accounted in current live bytes.
1002 continue;
1003 }
1004
1005 // TODO(333906585): Fix the promotion counter.
1006 sweeper()->AddPage(OLD_SPACE, p);
1007 will_be_swept++;
1008 }
1009
1010 static_cast<StickySpace*>(paged_space)
1011 ->set_old_objects_size(paged_space->Size());
1012
1013#ifdef V8_COMPRESS_POINTERS
1014 // Now that we have evacuated any external pointers, rebuild EPT free-lists
1015 // for the new space.
1016 heap_->isolate()->external_pointer_table().SweepAndCompact(
1017 heap_->young_external_pointer_space(), heap_->isolate()->counters());
1018#endif
1019
1020 if (v8_flags.gc_verbose) {
1022 "sweeping: space=%s initialized_for_sweeping=%d",
1023 ToString(paged_space->identity()), will_be_swept);
1024 }
1025}
1026
1028 TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MS_SWEEP_NEW_LO);
1029 NewLargeObjectSpace* new_lo_space = heap_->new_lo_space();
1030 DCHECK_NOT_NULL(new_lo_space);
1032
1033 bool has_promoted_pages = false;
1034
1035 OldLargeObjectSpace* old_lo_space = heap_->lo_space();
1036
1037 for (auto it = new_lo_space->begin(); it != new_lo_space->end();) {
1038 LargePageMetadata* current = *it;
1039 MemoryChunk* chunk = current->Chunk();
1040 it++;
1041
1042 Tagged<HeapObject> object = current->GetObject();
1043 if (!non_atomic_marking_state_->IsMarked(object)) {
1044 // Object is dead and page can be released.
1045 new_lo_space->RemovePage(current);
1047 current);
1048 continue;
1049 }
1052 current->marking_progress_tracker().ResetIfEnabled();
1054 old_lo_space->PromoteNewLargeObject(current);
1055 has_promoted_pages = true;
1056 sweeper()->AddPromotedPage(current);
1057 }
1058 new_lo_space->set_objects_size(0);
1059
1060 return has_promoted_pages;
1061}
1062
1064 DCHECK(!sweeper()->AreMinorSweeperTasksRunning());
1066
1068 heap_->tracer(), GCTracer::Scope::MINOR_MS_SWEEP,
1069 sweeper_->GetTraceIdForFlowEvent(GCTracer::Scope::MINOR_MS_SWEEP),
1071
1072 bool has_promoted_pages = false;
1073 if (v8_flags.sticky_mark_bits) {
1075 } else {
1076 has_promoted_pages = StartSweepNewSpace();
1077 }
1078 if (SweepNewLargeSpace()) has_promoted_pages = true;
1079
1080 if (v8_flags.verify_heap && has_promoted_pages) {
1081 // Update the external string table in preparation for heap verification.
1082 // Otherwise, updating the table will happen during the next full GC.
1084 GCTracer::Scope::MINOR_MS_SWEEP_UPDATE_STRING_TABLE);
1086 FullObjectSlot p) {
1087 DCHECK(
1088 !Cast<HeapObject>(*p)->map_word(kRelaxedLoad).IsForwardingAddress());
1089 return Cast<String>(*p);
1090 });
1091 }
1092
1094
1095#ifdef DEBUG
1096 VerifyRememberedSetsAfterEvacuation(heap_,
1098 heap_->VerifyCountersBeforeConcurrentSweeping(
1100#endif
1101
1103 DCHECK_EQ(0, heap_->new_lo_space()->Size());
1104 const bool empty_new_space =
1105 v8_flags.sticky_mark_bits
1107 : heap_->new_space()->Size() == 0;
1112}
1113
1115 if (is_in_atomic_pause()) return;
1116 DCHECK(v8_flags.concurrent_minor_ms_marking);
1117 if (gc_finalization_requested_.exchange(true, std::memory_order_relaxed))
1118 return;
1119 heap_->isolate()->stack_guard()->RequestGC();
1120}
1121} // namespace internal
1122} // namespace v8
Isolate * isolate_
ThreadLocalTop * top
bool generational_gc_supported() const
Definition heap-base.h:218
static void Delete(BasicSlotSet *slot_set)
V8_INLINE bool Pop(EntryType *entry)
Definition worklist.h:402
static constexpr TimeDelta Max()
Definition time.h:233
void RequestSweep(SweepingType sweeping_type, TreatAllYoungAsPromoted treat_all_young_as_promoted)
GarbageCollector garbage_collector() const
void RescheduleJobIfNeeded(GarbageCollector garbage_collector, TaskPriority priority=TaskPriority::kUserVisible)
static CppHeap * From(v8::CppHeap *heap)
Definition cpp-heap.h:102
Tagged< HeapObject > ToHeapObject() const
Definition slots-inl.h:187
void SampleConcurrencyEsimate(size_t concurrency)
Definition gc-tracer.cc:680
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
void IterateYoung(RootVisitor *v)
Definition heap.cc:2844
ExternalStringTable external_string_table_
Definition heap.h:2364
void StartResizeNewSpace()
Definition heap.cc:3831
NewSpace * new_space() const
Definition heap.h:727
OldLargeObjectSpace * lo_space() const
Definition heap.h:734
NewLargeObjectSpace * new_lo_space() const
Definition heap.h:737
bool use_new_space() const
Definition heap.h:1643
MarkCompactCollector * mark_compact_collector()
Definition heap.h:813
V8_EXPORT_PRIVATE bool Contains(Tagged< HeapObject > value) const
Definition heap.cc:4341
void ResizeNewSpace()
Definition heap.cc:3841
V8_INLINE Address NewSpaceTop()
Definition heap-inl.h:225
IncrementalMarking * incremental_marking() const
Definition heap.h:1062
ArrayBufferSweeper * array_buffer_sweeper()
Definition heap.h:823
ConcurrentMarking * concurrent_marking() const
Definition heap.h:1070
StickySpace * sticky_space() const
Definition heap-inl.h:443
void IterateRoots(RootVisitor *v, base::EnumSet< SkipRoot > options, IterateRootsMode roots_mode=IterateRootsMode::kMainIsolate)
Definition heap.cc:4657
v8::CppHeap * cpp_heap_
Definition heap.h:2305
void UpdateYoungReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
Definition heap.cc:2863
MemoryAllocator * memory_allocator()
Definition heap.h:803
void IterateConservativeStackRoots(RootVisitor *root_visitor, IterateRootsMode roots_mode=IterateRootsMode::kMainIsolate)
Definition heap.cc:4836
Sweeper * sweeper()
Definition heap.h:821
StackState embedder_stack_state_
Definition heap.h:2309
LocalHeap * main_thread_local_heap_
Definition heap.h:2191
EphemeronRememberedSet * ephemeron_remembered_set()
Definition heap.h:362
bool ShouldUseBackgroundThreads() const
Definition heap.cc:456
PagedNewSpace * paged_new_space() const
Definition heap-inl.h:435
v8::CppHeap * cpp_heap() const
Definition heap.h:1112
GCTracer * tracer()
Definition heap.h:800
bool IsGCWithStack() const
Definition heap.cc:526
Isolate * isolate() const
Definition heap-inl.h:61
PretenuringHandler * pretenuring_handler()
Definition heap.h:1627
GlobalHandles * global_handles() const
Definition isolate.h:1416
Counters * counters()
Definition isolate.h:1180
TracedHandles * traced_handles()
Definition isolate.h:1418
StackGuard * stack_guard()
Definition isolate.h:1198
virtual void RemovePage(LargePageMetadata *page)
size_t Size() const override
void set_objects_size(size_t objects_size)
MarkingBarrier * marking_barrier()
Definition local-heap.h:130
static V8_EXPORT_PRIVATE void PublishYoung(Heap *heap)
static void DeactivateYoung(Heap *heap)
V8_INLINE bool IsMarked(const Tagged< HeapObject > obj) const
V8_INLINE bool IsUnmarked(const Tagged< HeapObject > obj) const
CppMarkingState * cpp_marking_state() const
V8_EXPORT_PRIVATE void Free(MemoryAllocator::FreeMode mode, MutablePageMetadata *chunk)
static constexpr size_t AllocatableMemoryInDataPage()
V8_INLINE void SetFlagNonExecutable(Flag flag)
V8_INLINE void ClearFlagNonExecutable(Flag flag)
std::unique_ptr< YoungGenerationRememberedSetsMarkingWorklist > remembered_sets_marking_handler_
NonAtomicMarkingState *const non_atomic_marking_state_
V8_EXPORT_PRIVATE void DrainMarkingWorklist()
std::unique_ptr< PretenuringHandler::PretenuringFeedbackMap > pretenuring_feedback_
std::unique_ptr< EphemeronRememberedSet::TableList > ephemeron_table_list_
void MarkRoots(YoungGenerationRootMarkingVisitor &root_visitor, bool was_marked_incrementally)
std::unique_ptr< MarkingWorklists > marking_worklists_
std::optional< bool > use_background_threads_in_cycle_
MarkingWorklists::Local * local_marking_worklists()
void MarkRootsFromTracedHandles(YoungGenerationRootMarkingVisitor &root_visitor)
std::unique_ptr< YoungGenerationMainMarkingVisitor > main_marking_visitor_
void MarkRootsFromConservativeStack(YoungGenerationRootMarkingVisitor &root_visitor)
static constexpr size_t kMaxParallelTasks
void StartMarking(bool force_use_background_threads)
void EvacuateExternalPointerReferences(MutablePageMetadata *p)
static V8_INLINE MutablePageMetadata * FromHeapObject(Tagged< HeapObject > o)
virtual void GarbageCollectionEpilogue()=0
void PromotePageToOldSpace(PageMetadata *page, FreeMode free_mode)
static void ForAll(Heap *heap, Callback callback)
void PromoteNewLargeObject(LargePageMetadata *page)
PagedSpaceForNewSpace * paged_space()
Definition new-spaces.h:718
size_t Size() const override
void ReleasePage(PageMetadata *page) final
static constexpr int kInitialFeedbackCapacity
void MergeAllocationSitePretenuringFeedback(const PretenuringFeedbackMap &local_pretenuring_feedback)
static void DeleteTyped(TypedSlotSet &&other_typed_slot_set)
static void MergeAndDelete(MutablePageMetadata *chunk, SlotSet &&other_slot_set)
static void MergeAndDeleteTyped(MutablePageMetadata *chunk, TypedSlotSet &&other_typed_slot_set)
size_t young_objects_size() const
static constexpr Tagged< Smi > deleted_element()
void InitializeMinorSweeping()
Definition sweeper.cc:669
void AddNewSpacePage(PageMetadata *page)
Definition sweeper.cc:1316
uint64_t GetTraceIdForFlowEvent(GCTracer::Scope::ScopeId scope_id) const
Definition sweeper.cc:1541
V8_EXPORT_PRIVATE void StartMinorSweeperTasks()
Definition sweeper.cc:764
void SweepEmptyNewSpacePage(PageMetadata *page)
Definition sweeper.cc:1476
void AddPromotedPage(MutablePageMetadata *chunk)
Definition sweeper.cc:1343
void AddPage(AllocationSpace space, PageMetadata *page)
Definition sweeper.cc:1311
void IterateAndMarkYoungRootsWithOldHosts(RootVisitor *)
void IterateYoungRoots(RootVisitor *)
static std::vector< MarkingItem > CollectItems(Heap *heap)
YoungGenerationRootMarkingVisitor(MinorMarkSweepCollector *collector)
#define ALIGN_TO_ALLOCATION_ALIGNMENT(value)
Definition globals.h:1796
int start
int end
#define TRACE_GC_WITH_FLOW(tracer, scope_id, bind_id, flow_flags)
Definition gc-tracer.h:52
#define TRACE_GC(tracer, scope_id)
Definition gc-tracer.h:35
#define TRACE_GC_ARG1(tracer, scope_id, arg0_name, arg0_value)
Definition gc-tracer.h:43
TNode< Object > callback
DurationRecord record
NonAtomicMarkingState * marking_state_
static constexpr bool kOnlyVisitMainV8Cage
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
constexpr const char * ToString(DeoptimizeKind kind)
Definition globals.h:880
V8_INLINE constexpr PtrComprCageBase GetPtrComprCageBaseFromOnHeapAddress(Address address)
Tagged(T object) -> Tagged< T >
@ kDoNotLinkCategory
Definition free-list.h:42
kInterpreterTrampolineOffset Tagged< HeapObject >
constexpr bool IsAnyCodeSpace(AllocationSpace space)
Definition globals.h:1334
bool IsCppHeapMarkingFinished(Heap *heap, MarkingWorklists::Local *local_marking_worklists)
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:669
V8_EXPORT_PRIVATE FlagValues v8_flags
uint32_t ExternalPointerHandle
bool IsJSApiWrapperObject(Tagged< Map > map)
static constexpr Address kNullAddress
Definition v8-internal.h:53
void PrintIsolate(void *isolate, const char *format,...)
Definition utils.cc:61
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
static constexpr RelaxedLoadTag kRelaxedLoad
Definition globals.h:2909
constexpr CppHeapPointerTagRange kAnyCppHeapPointer(CppHeapPointerTag::kFirstTag, CppHeapPointerTag::kLastTag)
#define UNREACHABLE()
Definition logging.h:67
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK_NULL(val)
Definition logging.h:491
#define CHECK_IMPLIES(lhs, rhs)
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define USE(...)
Definition macros.h:293
#define TRACE_EVENT_FLAG_FLOW_OUT
#define TRACE_EVENT_FLAG_FLOW_IN
Heap * heap_
#define V8_INLINE
Definition v8config.h:500
#define V8_UNLIKELY(condition)
Definition v8config.h:660