v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
mark-sweep-utilities.cc
Go to the documentation of this file.
1// Copyright 2023 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
14#include "src/heap/new-spaces.h"
19
20namespace v8 {
21namespace internal {
22
23// The following has to hold in order for {MarkingState::MarkBitFrom} to not
24// produce invalid {kImpossibleBitPattern} in the marking bitmap by overlapping.
25static_assert(Heap::kMinObjectSizeInTaggedWords >= 2);
26
27#ifdef VERIFY_HEAP
28MarkingVerifierBase::MarkingVerifierBase(Heap* heap)
29 : ObjectVisitorWithCageBases(heap), heap_(heap) {}
30
31void MarkingVerifierBase::VisitMapPointer(Tagged<HeapObject> object) {
32 VerifyMap(object->map(cage_base()));
33}
34
35void MarkingVerifierBase::VerifyRoots() {
36 heap_->IterateRootsIncludingClients(this,
37 base::EnumSet<SkipRoot>{SkipRoot::kWeak});
38}
39
40void MarkingVerifierBase::VerifyMarkingOnPage(const PageMetadata* page,
41 Address start, Address end) {
42 Address next_object_must_be_here_or_later = start;
43
44 for (auto [object, size] : LiveObjectRange(page)) {
45 Address current = object.address();
46 if (current < start) continue;
47 if (current >= end) break;
48 CHECK(IsMarked(object));
49 CHECK(current >= next_object_must_be_here_or_later);
50 VisitObject(heap_->isolate(), object, this);
51 next_object_must_be_here_or_later = current + size;
52 // The object is either part of a black area of black allocation or a
53 // regular black object
54 CHECK(bitmap(page)->AllBitsSetInRange(
55 MarkingBitmap::AddressToIndex(current),
56 MarkingBitmap::LimitAddressToIndex(
57 next_object_must_be_here_or_later)) ||
58 bitmap(page)->AllBitsClearInRange(
59 MarkingBitmap::AddressToIndex(current) + 1,
60 MarkingBitmap::LimitAddressToIndex(
61 next_object_must_be_here_or_later)));
62 current = next_object_must_be_here_or_later;
63 }
64}
65
66void MarkingVerifierBase::VerifyMarking(NewSpace* space) {
67 if (!space) return;
68
69 if (v8_flags.minor_ms) {
70 VerifyMarking(PagedNewSpace::From(space)->paged_space());
71 return;
72 }
73
74 for (PageMetadata* page : *space) {
75 VerifyMarkingOnPage(page, page->area_start(), page->area_end());
76 }
77}
78
79void MarkingVerifierBase::VerifyMarking(PagedSpaceBase* space) {
80 for (PageMetadata* p : *space) {
81 VerifyMarkingOnPage(p, p->area_start(), p->area_end());
82 }
83}
84
85void MarkingVerifierBase::VerifyMarking(LargeObjectSpace* lo_space) {
86 if (!lo_space) return;
87 LargeObjectSpaceObjectIterator it(lo_space);
88 for (Tagged<HeapObject> obj = it.Next(); !obj.is_null(); obj = it.Next()) {
89 if (IsMarked(obj)) {
90 VisitObject(heap_->isolate(), obj, this);
91 }
92 }
93}
94#endif // VERIFY_HEAP
95
96template <ExternalStringTableCleaningMode mode>
98 Root root, const char* description, FullObjectSlot start,
100 // Visit all HeapObject pointers in [start, end).
101 DCHECK_EQ(static_cast<int>(root),
102 static_cast<int>(Root::kExternalStringsTable));
103 NonAtomicMarkingState* marking_state = heap_->non_atomic_marking_state();
104 Tagged<Object> the_hole = ReadOnlyRoots(heap_).the_hole_value();
105 for (FullObjectSlot p = start; p < end; ++p) {
106 Tagged<Object> o = *p;
107 if (!IsHeapObject(o)) continue;
108 Tagged<HeapObject> heap_object = Cast<HeapObject>(o);
109 // MinorMS doesn't update the young strings set and so it may contain
110 // strings that are already in old space.
111 if (MarkingHelper::IsMarkedOrAlwaysLive(heap_, marking_state, heap_object))
112 continue;
113 if ((mode == ExternalStringTableCleaningMode::kYoungOnly) &&
114 !HeapLayout::InYoungGeneration(heap_object))
115 continue;
116 if (IsExternalString(o)) {
117 heap_->FinalizeExternalString(Cast<String>(o));
118 } else {
119 // The original external string may have been internalized.
120 DCHECK(IsThinString(o));
121 }
122 // Set the entry to the_hole_value (as deleted).
123 p.store(the_hole);
124 }
125}
126
127StringForwardingTableCleanerBase::StringForwardingTableCleanerBase(Heap* heap)
128 : isolate_(heap->isolate()),
129 marking_state_(heap->non_atomic_marking_state()) {}
130
133 Address resource = record->ExternalResourceAddress();
134 if (resource != kNullAddress && disposed_resources_.count(resource) == 0) {
135 record->DisposeExternalResource();
136 disposed_resources_.insert(resource);
137 }
138}
139
141 Heap* heap, MarkingWorklists::Local* local_marking_worklists) {
142 const auto* cpp_heap = CppHeap::From(heap->cpp_heap());
143 if (!cpp_heap) return true;
144
145 return cpp_heap->IsMarkingDone() && local_marking_worklists->IsWrapperEmpty();
146}
147
148#if DEBUG
149void VerifyRememberedSetsAfterEvacuation(Heap* heap,
150 GarbageCollector garbage_collector) {
151 // Old-to-old slot sets must be empty after evacuation.
152 bool new_space_is_empty =
153 !heap->new_space() || heap->new_space()->Size() == 0;
155 new_space_is_empty);
156
157 MemoryChunkIterator chunk_iterator(heap);
158
159 while (chunk_iterator.HasNext()) {
160 MutablePageMetadata* chunk = chunk_iterator.Next();
161
162 // Old-to-old slot sets must be empty after evacuation.
163 DCHECK_NULL((chunk->slot_set<OLD_TO_OLD, AccessMode::ATOMIC>()));
165 DCHECK_NULL((chunk->typed_slot_set<OLD_TO_OLD, AccessMode::ATOMIC>()));
167 (chunk->typed_slot_set<TRUSTED_TO_TRUSTED, AccessMode::ATOMIC>()));
168
169 if (new_space_is_empty &&
170 (garbage_collector == GarbageCollector::MARK_COMPACTOR)) {
171 // Old-to-new slot sets must be empty after evacuation.
172 DCHECK_NULL((chunk->slot_set<OLD_TO_NEW, AccessMode::ATOMIC>()));
173 DCHECK_NULL((chunk->typed_slot_set<OLD_TO_NEW, AccessMode::ATOMIC>()));
175 (chunk->slot_set<OLD_TO_NEW_BACKGROUND, AccessMode::ATOMIC>()));
177 (chunk->typed_slot_set<OLD_TO_NEW_BACKGROUND, AccessMode::ATOMIC>()));
178 }
179
180 // Old-to-shared slots may survive GC but there should never be any slots in
181 // new or shared spaces.
182 AllocationSpace id = chunk->owner_identity();
183 if (IsAnySharedSpace(id) || IsAnyNewSpace(id)) {
184 DCHECK_NULL((chunk->slot_set<OLD_TO_SHARED, AccessMode::ATOMIC>()));
185 DCHECK_NULL((chunk->typed_slot_set<OLD_TO_SHARED, AccessMode::ATOMIC>()));
187 (chunk->slot_set<TRUSTED_TO_SHARED_TRUSTED, AccessMode::ATOMIC>()));
188 }
189
190 // No support for trusted-to-shared-trusted typed slots.
191 DCHECK_NULL((chunk->typed_slot_set<TRUSTED_TO_SHARED_TRUSTED>()));
192 }
193
194 if (v8_flags.sticky_mark_bits) {
196 heap, [](MutablePageMetadata* chunk) {
197 DCHECK(!chunk->ContainsSlots<OLD_TO_NEW>());
198 DCHECK(!chunk->ContainsSlots<OLD_TO_NEW_BACKGROUND>());
199 });
200 }
201}
202#endif // DEBUG
203
204} // namespace internal
205} // namespace v8
Isolate * isolate_
static CppHeap * From(v8::CppHeap *heap)
Definition cpp-heap.h:102
static void ForAll(Heap *heap, Callback callback)
void DisposeExternalResource(StringForwardingTable::Record *record)
int start
int end
DurationRecord record
NonAtomicMarkingState * marking_state_
uintptr_t Address
Definition memory.h:13
void VisitObject(Isolate *isolate, Tagged< HeapObject > object, ObjectVisitor *visitor)
bool IsCppHeapMarkingFinished(Heap *heap, MarkingWorklists::Local *local_marking_worklists)
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr bool IsAnyNewSpace(AllocationSpace space)
Definition globals.h:1345
static constexpr Address kNullAddress
Definition v8-internal.h:53
constexpr bool IsAnySharedSpace(AllocationSpace space)
Definition globals.h:1341
#define DCHECK_NULL(val)
Definition logging.h:491
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
Heap * heap_