10#include <unordered_set>
69class YoungGenerationMarkingVerifier :
public MarkingVerifierBase {
71 explicit YoungGenerationMarkingVerifier(Heap*
heap)
72 : MarkingVerifierBase(
heap),
75 const MarkingBitmap* bitmap(
const MutablePageMetadata* chunk)
override {
76 return chunk->marking_bitmap();
79 bool IsMarked(Tagged<HeapObject>
object)
override {
89 VerifyMarking(
heap_->sticky_space());
91 VerifyMarking(
heap_->new_space());
96 return GarbageCollector::MINOR_MARK_SWEEPER;
100 void VerifyMap(Tagged<Map> map)
override { VerifyHeapObjectImpl(map); }
102 void VerifyPointers(ObjectSlot
start, ObjectSlot
end)
override {
106 void VerifyPointers(MaybeObjectSlot
start, MaybeObjectSlot
end)
override {
109 void VerifyCodePointer(InstructionStreamSlot slot)
override {
116 void VisitCodeTarget(Tagged<InstructionStream> host,
117 RelocInfo* rinfo)
override {
118 Tagged<InstructionStream> target =
119 InstructionStream::FromTargetAddress(rinfo->target_address());
120 VerifyHeapObjectImpl(target);
122 void VisitEmbeddedPointer(Tagged<InstructionStream> host,
123 RelocInfo* rinfo)
override {
124 VerifyHeapObjectImpl(rinfo->target_object(cage_base()));
126 void VerifyRootPointers(FullObjectSlot
start, FullObjectSlot
end)
override {
131 V8_INLINE void VerifyHeapObjectImpl(Tagged<HeapObject> heap_object) {
133 IsMarked(heap_object));
136 template <
typename TSlot>
138 PtrComprCageBase cage_base =
140 for (TSlot slot =
start; slot <
end; ++slot) {
141 typename TSlot::TObject
object = slot.load(cage_base);
142#ifdef V8_ENABLE_DIRECT_HANDLE
143 if (
object.ptr() == kTaggedNullAddress)
continue;
145 Tagged<HeapObject> heap_object;
147 if (
object.GetHeapObject(&heap_object)) {
148 VerifyHeapObjectImpl(heap_object);
164int EstimateMaxNumberOfRemeberedSets(Heap*
heap) {
168 return 2 * (
heap->old_space()->CountTotalPages() +
169 heap->lo_space()->PageCount() +
170 heap->trusted_space()->CountTotalPages() +
171 heap->trusted_lo_space()->PageCount()) +
172 3 * (
heap->code_space()->CountTotalPages() +
173 heap->code_lo_space()->PageCount());
178std::vector<YoungGenerationRememberedSetsMarkingWorklist::MarkingItem>
180 std::vector<MarkingItem> items;
181 int max_remembered_set_count = EstimateMaxNumberOfRemeberedSets(
heap);
182 items.reserve(max_remembered_set_count);
188 if (slot_set || background_slot_set) {
190 slot_set, background_slot_set);
199 DCHECK_LE(items.size(), max_remembered_set_count);
224 if (slots_type_ == SlotsType::kRegularSlots) {
228 DCHECK_EQ(slots_type_, SlotsType::kTypedSlots);
237 if (slots_type_ == SlotsType::kRegularSlots) {
242 DCHECK_EQ(slots_type_, SlotsType::kTypedSlots);
244 if (typed_slot_set_)
delete typed_slot_set_;
260 item.DeleteRememberedSets();
262 item.MergeAndDeleteRememberedSets();
269 item.DeleteSetsOnTearDown();
277 : main_marking_visitor_(collector->main_marking_visitor()) {}
288 non_atomic_marking_state_(
heap_->non_atomic_marking_state()),
289 sweeper_(
heap_->sweeper()) {}
293 if (!cpp_heap)
return;
326 cpp_heap->FinishConcurrentMarkingIfNeeded();
331template <
typename Space>
332static bool ExternalPointerRememberedSetsEmpty(
Space* space) {
333 for (
auto it = space->begin(); it != space->end();) {
344#if defined(VERIFY_HEAP) && !V8_ENABLE_STICKY_MARK_BITS_BOOL
347 CHECK(page->marking_bitmap()->IsClean());
366 if (cpp_heap && cpp_heap->generational_gc_supported()) {
368 cpp_heap->InitializeMarking(CppHeap::CollectionType::kMinor);
379 DCHECK(ExternalPointerRememberedSetsEmpty(
383 std::make_unique<PretenuringHandler::PretenuringFeedbackMap>(
389 std::make_unique<YoungGenerationRememberedSetsMarkingWorklist>(
heap_);
390 if (cpp_heap && cpp_heap->generational_gc_supported()) {
394 cpp_heap->StartMarking();
430 cpp_heap->ProcessCrossThreadWeakness();
436 YoungGenerationMarkingVerifier verifier(
heap_);
442 cpp_heap->FinishMarkingAndProcessWeakness();
450 isolate->traced_handles()->UpdateListOfYoungNodes();
452 isolate->stack_guard()->ClearGC();
459class YoungStringForwardingTableCleaner final
462 explicit YoungStringForwardingTableCleaner(
Heap*
heap)
468 void ProcessYoungObjects() {
470 StringForwardingTable* forwarding_table =
471 isolate_->string_forwarding_table();
472 forwarding_table->IterateElements(
473 [&](StringForwardingTable::Record*
record) {
474 ClearNonLiveYoungObjects(
record);
479 void ClearNonLiveYoungObjects(StringForwardingTable::Record*
record) {
488 DisposeExternalResource(
record);
494bool IsUnmarkedObjectInYoungGeneration(Heap*
heap, FullObjectSlot p) {
510 GCTracer::Scope::MINOR_MS_CLEAR_STRING_FORWARDING_TABLE);
512 YoungStringForwardingTableCleaner forwarding_table_cleaner(
heap_);
513 forwarding_table_cleaner.ProcessYoungObjects();
518 if (external_string_table.
HasYoung()) {
524 external_visitor(
heap_);
530 if (isolate->global_handles()->HasYoung() ||
531 isolate->traced_handles()->HasYoung()) {
533 GCTracer::Scope::MINOR_MS_CLEAR_WEAK_GLOBAL_HANDLES);
534 isolate->global_handles()->ProcessWeakYoungObjects(
535 nullptr, &IsUnmarkedObjectInYoungGeneration);
538 isolate->traced_handles()->ResetYoungDeadNodes(
539 &IsUnmarkedObjectInYoungGeneration);
541 isolate->traced_handles()->ProcessWeakYoungObjects(
542 nullptr, &IsUnmarkedObjectInYoungGeneration);
555 while (local_ephemeron_table_list.
Pop(&table)) {
559 table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(
i)));
563 table->RemoveEntry(
i);
577 for (
auto it = table_map->begin(); it != table_map->end();) {
579 auto& indices = it->second;
580 for (
auto iti = indices.begin(); iti != indices.end();) {
588 iti = indices.erase(iti);
591 iti = indices.erase(iti);
597 if (indices.empty()) {
598 it = table_map->erase(it);
608 DCHECK(js_object->MayHaveEmbedderFields());
633 cpp_heap->VisitCrossHeapRememberedSetIfNeeded([
this](
Tagged<JSObject> obj) {
645 bool was_marked_incrementally) {
651 isolate->traced_handles()->ComputeWeaknessForYoungObjects();
661 isolate->global_handles()->IterateYoungStrongAndDependentRoots(
668class MinorMSConservativeStackVisitor
671 MinorMSConservativeStackVisitor(
678 static bool FilterPage(
const MemoryChunk* chunk) {
689 friend class ConservativeStackVisitorBase<MinorMSConservativeStackVisitor>;
698 MinorMSConservativeStackVisitor stack_visitor(
heap_->
isolate(), root_visitor);
706 const bool was_marked_incrementally =
708 if (!was_marked_incrementally) {
713 heap_->
tracer(), GCTracer::Scope::MINOR_MS_MARK_FINISH_INCREMENTAL,
715 DCHECK(incremental_marking->IsMinorMarking());
717 incremental_marking->Stop();
726 MarkRoots(root_visitor, was_marked_incrementally);
736 GCTracer::Scope::MINOR_MS_MARK_CLOSURE_PARALLEL,
748 GCTracer::Scope::MINOR_MS_MARK_CONSERVATIVE_STACK);
755 cpp_heap->EnterProcessGlobalAtomicPause();
761 if (was_marked_incrementally) {
777 if (
v8_flags.minor_ms_trace_fragmentation) {
788 marking_worklists_local->MergeOnHold();
793 while (marking_worklists_local->Pop(&heap_object)) {
794 DCHECK(!IsFreeSpaceOrFiller(heap_object, cage_base));
818 const std::array<size_t, 4> free_size_class_limits = {0, 1024, 2048, 4096};
819 size_t free_bytes_of_class[free_size_class_limits.size()] = {0};
820 size_t live_bytes = 0;
821 size_t allocatable_bytes = 0;
823 Address free_start = p->area_start();
825 Address free_end =
object.address();
826 if (free_end != free_start) {
827 size_t free_bytes = free_end - free_start;
828 int free_bytes_index = 0;
829 for (
auto free_size_class_limit : free_size_class_limits) {
830 if (free_bytes >= free_size_class_limit) {
831 free_bytes_of_class[free_bytes_index] += free_bytes;
837 free_start = free_end +
size;
840 size_t area_end = p->Contains(top) ?
top : p->area_end();
841 if (free_start != area_end) {
842 size_t free_bytes = area_end - free_start;
843 int free_bytes_index = 0;
844 for (
auto free_size_class_limit : free_size_class_limits) {
845 if (free_bytes >= free_size_class_limit) {
846 free_bytes_of_class[free_bytes_index] += free_bytes;
851 allocatable_bytes += area_end - p->area_start();
852 CHECK_EQ(allocatable_bytes, live_bytes + free_bytes_of_class[0]);
855 "Minor Mark-Sweep Fragmentation: allocatable_bytes=%zu "
857 "free_bytes=%zu free_bytes_1K=%zu free_bytes_2K=%zu "
858 "free_bytes_4K=%zu\n",
859 allocatable_bytes, live_bytes, free_bytes_of_class[0],
860 free_bytes_of_class[1], free_bytes_of_class[2],
861 free_bytes_of_class[3]);
868intptr_t NewSpacePageEvacuationThreshold() {
869 return v8_flags.minor_ms_page_promotion_threshold *
873bool ShouldMovePage(PageMetadata* p, intptr_t live_bytes,
874 intptr_t wasted_bytes) {
877 Heap*
heap = p->heap();
878 DCHECK(!p->Chunk()->NeverEvacuate());
879 const bool should_move_page =
880 ((live_bytes + wasted_bytes) > NewSpacePageEvacuationThreshold() ||
881 (p->AllocatedLabSize() == 0)) &&
882 (
heap->new_space()->IsPromotionCandidate(p)) &&
883 heap->CanExpandOldGeneration(live_bytes);
884 if (
v8_flags.trace_page_promotions) {
887 "[Page Promotion] %p: collector=mms, should move: %d"
888 ", live bytes = %zu, wasted bytes = %zu, promotion threshold = %zu"
889 ", allocated labs size = %zu\n",
890 p, should_move_page, live_bytes, wasted_bytes,
891 NewSpacePageEvacuationThreshold(), p->AllocatedLabSize());
893 if (!should_move_page &&
894 (p->AgeInNewSpace() ==
v8_flags.minor_ms_max_page_age)) {
900 return should_move_page;
907#ifdef V8_COMPRESS_POINTERS
911 ExternalPointerTable& table =
heap_->
isolate()->external_pointer_table();
912 ExternalPointerTable::Space* young =
heap_->young_external_pointer_space();
913 ExternalPointerTable::Space* old =
heap_->old_external_pointer_space();
917 table.Evacuate(young, old,
handle, handle_location,
918 ExternalPointerTable::EvacuateMarkMode::kClearMark);
921 auto slot_count = slots->Iterate<BasicSlotSet::AccessMode::NON_ATOMIC>(
923 BasicSlotSet::EmptyBucketMode::FREE_EMPTY_BUCKETS);
936 int will_be_swept = 0;
937 bool has_promoted_pages =
false;
941 for (
auto it = paged_space->
begin(); it != paged_space->
end();) {
945 intptr_t live_bytes_on_page = p->
live_bytes();
946 if (live_bytes_on_page == 0) {
955 if (ShouldMovePage(p, live_bytes_on_page, p->
wasted_memory())) {
961 has_promoted_pages =
true;
970#ifdef V8_COMPRESS_POINTERS
973 heap_->
isolate()->external_pointer_table().SweepAndCompact(
979 "sweeping: space=%s initialized_for_sweeping=%d",
980 ToString(paged_space->identity()), will_be_swept);
983 return has_promoted_pages;
991 int will_be_swept = 0;
993 for (
auto it = paged_space->
begin(); it != paged_space->
end();) {
997 intptr_t live_bytes_on_page = p->
live_bytes();
998 if (live_bytes_on_page == 0) {
1011 ->set_old_objects_size(paged_space->
Size());
1013#ifdef V8_COMPRESS_POINTERS
1016 heap_->
isolate()->external_pointer_table().SweepAndCompact(
1022 "sweeping: space=%s initialized_for_sweeping=%d",
1023 ToString(paged_space->identity()), will_be_swept);
1033 bool has_promoted_pages =
false;
1037 for (
auto it = new_lo_space->
begin(); it != new_lo_space->
end();) {
1052 current->marking_progress_tracker().ResetIfEnabled();
1055 has_promoted_pages =
true;
1060 return has_promoted_pages;
1072 bool has_promoted_pages =
false;
1080 if (
v8_flags.verify_heap && has_promoted_pages) {
1084 GCTracer::Scope::MINOR_MS_SWEEP_UPDATE_STRING_TABLE);
1096 VerifyRememberedSetsAfterEvacuation(
heap_,
1098 heap_->VerifyCountersBeforeConcurrentSweeping(
1104 const bool empty_new_space =
bool generational_gc_supported() const
static void Delete(BasicSlotSet *slot_set)
V8_INLINE bool Pop(EntryType *entry)
static constexpr TimeDelta Max()
void RequestSweep(SweepingType sweeping_type, TreatAllYoungAsPromoted treat_all_young_as_promoted)
bool sweeping_in_progress() const
size_t FetchAndResetConcurrencyEstimate()
GarbageCollector garbage_collector() const
void RescheduleJobIfNeeded(GarbageCollector garbage_collector, TaskPriority priority=TaskPriority::kUserVisible)
void FlushPretenuringFeedback()
static CppHeap * From(v8::CppHeap *heap)
void MarkAndPush(void *instance)
Tagged< HeapObject > ToHeapObject() const
void SampleConcurrencyEsimate(size_t concurrency)
void UpdateListOfYoungNodes()
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
void IterateYoung(RootVisitor *v)
ExternalStringTable external_string_table_
void StartResizeNewSpace()
NewSpace * new_space() const
OldLargeObjectSpace * lo_space() const
NewLargeObjectSpace * new_lo_space() const
bool use_new_space() const
MarkCompactCollector * mark_compact_collector()
V8_EXPORT_PRIVATE bool Contains(Tagged< HeapObject > value) const
V8_INLINE Address NewSpaceTop()
IncrementalMarking * incremental_marking() const
ArrayBufferSweeper * array_buffer_sweeper()
ConcurrentMarking * concurrent_marking() const
StickySpace * sticky_space() const
void IterateRoots(RootVisitor *v, base::EnumSet< SkipRoot > options, IterateRootsMode roots_mode=IterateRootsMode::kMainIsolate)
void UpdateYoungReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
MemoryAllocator * memory_allocator()
void IterateConservativeStackRoots(RootVisitor *root_visitor, IterateRootsMode roots_mode=IterateRootsMode::kMainIsolate)
StackState embedder_stack_state_
LocalHeap * main_thread_local_heap_
EphemeronRememberedSet * ephemeron_remembered_set()
bool ShouldUseBackgroundThreads() const
PagedNewSpace * paged_new_space() const
v8::CppHeap * cpp_heap() const
bool IsGCWithStack() const
Isolate * isolate() const
PretenuringHandler * pretenuring_handler()
bool IsMinorMarking() const
GlobalHandles * global_handles() const
TracedHandles * traced_handles()
StackGuard * stack_guard()
virtual void RemovePage(LargePageMetadata *page)
void ResetPendingObject()
size_t Size() const override
void set_objects_size(size_t objects_size)
Address pending_object() const
MarkingBarrier * marking_barrier()
static V8_EXPORT_PRIVATE void PublishYoung(Heap *heap)
static void DeactivateYoung(Heap *heap)
V8_INLINE bool IsMarked(const Tagged< HeapObject > obj) const
V8_INLINE bool IsUnmarked(const Tagged< HeapObject > obj) const
void PublishCppHeapObjects()
CppMarkingState * cpp_marking_state() const
V8_EXPORT_PRIVATE void Free(MemoryAllocator::FreeMode mode, MutablePageMetadata *chunk)
static constexpr size_t AllocatableMemoryInDataPage()
V8_INLINE void SetFlagNonExecutable(Flag flag)
V8_INLINE void ClearFlagNonExecutable(Flag flag)
void ClearNonLiveReferences()
std::unique_ptr< YoungGenerationRememberedSetsMarkingWorklist > remembered_sets_marking_handler_
void TraceFragmentation()
~MinorMarkSweepCollector()
NonAtomicMarkingState *const non_atomic_marking_state_
MinorMarkSweepCollector(Heap *heap)
V8_EXPORT_PRIVATE void DrainMarkingWorklist()
std::unique_ptr< PretenuringHandler::PretenuringFeedbackMap > pretenuring_feedback_
std::unique_ptr< EphemeronRememberedSet::TableList > ephemeron_table_list_
bool is_in_atomic_pause() const
MarkingState *const marking_state_
std::atomic< bool > is_in_atomic_pause_
bool UseBackgroundThreadsInCycle() const
void MarkRoots(YoungGenerationRootMarkingVisitor &root_visitor, bool was_marked_incrementally)
std::unique_ptr< MarkingWorklists > marking_worklists_
std::optional< bool > use_background_threads_in_cycle_
bool StartSweepNewSpace()
MarkingWorklists::Local * local_marking_worklists()
void MarkRootsFromTracedHandles(YoungGenerationRootMarkingVisitor &root_visitor)
void PerformWrapperTracing()
std::unique_ptr< YoungGenerationMainMarkingVisitor > main_marking_visitor_
std::atomic< bool > gc_finalization_requested_
bool SweepNewLargeSpace()
void FinishConcurrentMarking()
void MarkRootsFromConservativeStack(YoungGenerationRootMarkingVisitor &root_visitor)
static constexpr size_t kMaxParallelTasks
void StartMarking(bool force_use_background_threads)
void EvacuateExternalPointerReferences(MutablePageMetadata *p)
void StartSweepNewSpaceWithStickyBits()
size_t live_bytes() const
size_t BucketsInSlotSet() const
TypedSlotSet * ExtractTypedSlotSet()
AllocationSpace owner_identity() const
bool SweepingDone() const
SlotSet * ExtractSlotSet()
static V8_INLINE MutablePageMetadata * FromHeapObject(Tagged< HeapObject > o)
virtual void GarbageCollectionEpilogue()=0
void PromotePageToOldSpace(PageMetadata *page, FreeMode free_mode)
static void ForAll(Heap *heap, Callback callback)
void PromoteNewLargeObject(LargePageMetadata *page)
PagedSpaceForNewSpace * paged_space()
void ClearAllocatorState()
size_t Size() const override
bool ShouldReleaseEmptyPage() const
void ReleasePage(PageMetadata *page) final
static constexpr int kInitialFeedbackCapacity
void MergeAllocationSitePretenuringFeedback(const PretenuringFeedbackMap &local_pretenuring_feedback)
static void DeleteTyped(TypedSlotSet &&other_typed_slot_set)
static void MergeAndDelete(MutablePageMetadata *chunk, SlotSet &&other_slot_set)
static void MergeAndDeleteTyped(MutablePageMetadata *chunk, TypedSlotSet &&other_typed_slot_set)
size_t young_objects_size() const
static constexpr Tagged< Smi > deleted_element()
void InitializeMinorSweeping()
void AddNewSpacePage(PageMetadata *page)
uint64_t GetTraceIdForFlowEvent(GCTracer::Scope::ScopeId scope_id) const
void StartMinorSweeping()
V8_EXPORT_PRIVATE void StartMinorSweeperTasks()
void SweepEmptyNewSpacePage(PageMetadata *page)
void AddPromotedPage(MutablePageMetadata *chunk)
void AddPage(AllocationSpace space, PageMetadata *page)
void IterateAndMarkYoungRootsWithOldHosts(RootVisitor *)
void IterateYoungRoots(RootVisitor *)
bool ProcessNextItem(Visitor *visitor)
void DeleteRememberedSets()
SlotSet * background_slot_set_
const SlotsType slots_type_
TypedSlotSet * typed_slot_set_
void MergeAndDeleteRememberedSets()
MutablePageMetadata *const chunk_
void DeleteSetsOnTearDown()
YoungGenerationRememberedSetsMarkingWorklist(Heap *heap)
IndexGenerator remembered_sets_marking_index_generator_
std::vector< MarkingItem > remembered_sets_marking_items_
static std::vector< MarkingItem > CollectItems(Heap *heap)
~YoungGenerationRememberedSetsMarkingWorklist()
std::atomic_size_t remaining_remembered_sets_marking_items_
YoungGenerationRootMarkingVisitor(MinorMarkSweepCollector *collector)
~YoungGenerationRootMarkingVisitor()
#define ALIGN_TO_ALLOCATION_ALIGNMENT(value)
#define TRACE_GC_WITH_FLOW(tracer, scope_id, bind_id, flow_flags)
#define TRACE_GC(tracer, scope_id)
#define TRACE_GC_ARG1(tracer, scope_id, arg0_name, arg0_value)
NonAtomicMarkingState * marking_state_
static constexpr bool kOnlyVisitMainV8Cage
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
constexpr const char * ToString(DeoptimizeKind kind)
V8_INLINE constexpr PtrComprCageBase GetPtrComprCageBaseFromOnHeapAddress(Address address)
Tagged(T object) -> Tagged< T >
kInterpreterTrampolineOffset Tagged< HeapObject >
constexpr bool IsAnyCodeSpace(AllocationSpace space)
bool IsCppHeapMarkingFinished(Heap *heap, MarkingWorklists::Local *local_marking_worklists)
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
V8_EXPORT_PRIVATE FlagValues v8_flags
uint32_t ExternalPointerHandle
bool IsJSApiWrapperObject(Tagged< Map > map)
@ SURVIVOR_TO_EXTERNAL_POINTER
static constexpr Address kNullAddress
void PrintIsolate(void *isolate, const char *format,...)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
static constexpr RelaxedLoadTag kRelaxedLoad
constexpr CppHeapPointerTagRange kAnyCppHeapPointer(CppHeapPointerTag::kFirstTag, CppHeapPointerTag::kLastTag)
#define DCHECK_LE(v1, v2)
#define CHECK_IMPLIES(lhs, rhs)
#define DCHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define TRACE_EVENT_FLAG_FLOW_OUT
#define TRACE_EVENT_FLAG_FLOW_IN
#define V8_UNLIKELY(condition)