v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
remembered-set.cc
Go to the documentation of this file.
1// Copyright 2022 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if defined(CPPGC_YOUNG_GENERATION)
6
8
9#include <algorithm>
10
19
20namespace cppgc {
21namespace internal {
22
23namespace {
24
25enum class SlotType { kCompressed, kUncompressed };
26
27void EraseFromSet(std::set<void*>& set, void* begin, void* end) {
28 // TODO(1029379): The 2 binary walks can be optimized with a custom algorithm.
29 auto from = set.lower_bound(begin), to = set.lower_bound(end);
30 set.erase(from, to);
31}
32
33// TODO(1029379): Make the implementation functions private functions of
34// OldToNewRememberedSet to avoid parameter passing.
35void InvalidateCompressedRememberedSlots(
36 const HeapBase& heap, void* begin, void* end,
37 std::set<void*>& remembered_slots_for_verification) {
38 DCHECK_LT(begin, end);
39
40 BasePage* page = BasePage::FromInnerAddress(&heap, begin);
41 DCHECK_NOT_NULL(page);
42 // The input range must reside within the same page.
44 &heap, reinterpret_cast<void*>(
45 reinterpret_cast<uintptr_t>(end) - 1)));
46
47 auto* slot_set = page->slot_set();
48 if (!slot_set) return;
49
50 const size_t buckets_size = SlotSet::BucketsForSize(page->AllocatedSize());
51
52 const uintptr_t page_start = reinterpret_cast<uintptr_t>(page);
53 const uintptr_t ubegin = reinterpret_cast<uintptr_t>(begin);
54 const uintptr_t uend = reinterpret_cast<uintptr_t>(end);
55
56 slot_set->RemoveRange(ubegin - page_start, uend - page_start, buckets_size,
57 SlotSet::EmptyBucketMode::FREE_EMPTY_BUCKETS);
58#if DEBUG
59 EraseFromSet(remembered_slots_for_verification, begin, end);
60#endif // DEBUG
61}
62
63void InvalidateUncompressedRememberedSlots(
64 std::set<void*>& slots, void* begin, void* end,
65 std::set<void*>& remembered_slots_for_verification) {
66 EraseFromSet(slots, begin, end);
67#if DEBUG
68 EraseFromSet(remembered_slots_for_verification, begin, end);
69#endif // DEBUG
70#if defined(ENABLE_SLOW_DCHECKS)
71 // Check that no remembered slots are referring to the freed area.
72 DCHECK(std::none_of(slots.begin(), slots.end(), [begin, end](void* slot) {
73 void* value = nullptr;
74 value = *reinterpret_cast<void**>(slot);
75 return begin <= value && value < end;
76 }));
77#endif // defined(ENABLE_SLOW_DCHECKS)
78}
79
80// Visit remembered set that was recorded in the generational barrier.
81template <SlotType slot_type>
82void VisitSlot(const HeapBase& heap, const BasePage& page, Address slot,
83 MutatorMarkingState& marking_state,
84 const std::set<void*>& slots_for_verification) {
85#if defined(DEBUG)
87 DCHECK_NE(slots_for_verification.end(), slots_for_verification.find(slot));
88#endif // defined(DEBUG)
89
90 // Slot must always point to a valid, not freed object.
91 auto& slot_header = page.ObjectHeaderFromInnerAddress(slot);
92 // The age checking in the generational barrier is imprecise, since a card
93 // may have mixed young/old objects. Check here precisely if the object is
94 // old.
95 if (slot_header.IsYoung()) return;
96
97#if defined(CPPGC_POINTER_COMPRESSION)
98 void* value = nullptr;
99 if constexpr (slot_type == SlotType::kCompressed) {
100 value = CompressedPointer::Decompress(*reinterpret_cast<uint32_t*>(slot));
101 } else {
102 value = *reinterpret_cast<void**>(slot);
103 }
104#else // !defined(CPPGC_POINTER_COMPRESSION)
105 void* value = *reinterpret_cast<void**>(slot);
106#endif // !defined(CPPGC_POINTER_COMPRESSION)
107
108 // Slot could be updated to nullptr or kSentinelPointer by the mutator.
109 if (value == kSentinelPointer || value == nullptr) return;
110
111#if defined(DEBUG)
112 // Check that the slot can not point to a freed object.
113 HeapObjectHeader& header =
115 DCHECK(!header.IsFree());
116#endif // defined(DEBUG)
117
118 marking_state.DynamicallyMarkAddress(static_cast<Address>(value));
119}
120
121class CompressedSlotVisitor : HeapVisitor<CompressedSlotVisitor> {
122 friend class HeapVisitor<CompressedSlotVisitor>;
123
124 public:
125 CompressedSlotVisitor(HeapBase& heap, MutatorMarkingState& marking_state,
126 const std::set<void*>& slots_for_verification)
127 : heap_(heap),
128 marking_state_(marking_state),
129 remembered_slots_for_verification_(slots_for_verification) {}
130
131 size_t Run() {
132 Traverse(heap_.raw_heap());
133 return objects_visited_;
134 }
135
136 private:
137 heap::base::SlotCallbackResult VisitCompressedSlot(Address slot) {
139 VisitSlot<SlotType::kCompressed>(heap_, *current_page_, slot,
141 remembered_slots_for_verification_);
142 ++objects_visited_;
144 }
145
146 void VisitSlotSet(SlotSet* slot_set) {
148
149 if (!slot_set) return;
150
151 const uintptr_t page_start = reinterpret_cast<uintptr_t>(current_page_);
152 const size_t buckets_size =
153 SlotSet::BucketsForSize(current_page_->AllocatedSize());
154
155 slot_set->Iterate(
156 page_start, 0, buckets_size,
157 [this](SlotSet::Address slot) {
158 return VisitCompressedSlot(reinterpret_cast<Address>(slot));
159 },
160 SlotSet::EmptyBucketMode::FREE_EMPTY_BUCKETS);
161 }
162
163 bool VisitNormalPage(NormalPage& page) {
165 VisitSlotSet(page.slot_set());
166 return true;
167 }
168
169 bool VisitLargePage(LargePage& page) {
171 VisitSlotSet(page.slot_set());
172 return true;
173 }
174
175 HeapBase& heap_;
176 MutatorMarkingState& marking_state_;
177 BasePage* current_page_ = nullptr;
178
179 const std::set<void*>& remembered_slots_for_verification_;
180 size_t objects_visited_ = 0u;
181};
182
183class SlotRemover : HeapVisitor<SlotRemover> {
184 friend class HeapVisitor<SlotRemover>;
185
186 public:
187 explicit SlotRemover(HeapBase& heap) : heap_(heap) {}
188
189 void Run() { Traverse(heap_.raw_heap()); }
190
191 private:
192 bool VisitNormalPage(NormalPage& page) {
193 page.ResetSlotSet();
194 return true;
195 }
196
197 bool VisitLargePage(LargePage& page) {
198 page.ResetSlotSet();
199 return true;
200 }
201
202 HeapBase& heap_;
203};
204
205// Visit remembered set that was recorded in the generational barrier.
206void VisitRememberedSlots(
207 HeapBase& heap, MutatorMarkingState& mutator_marking_state,
208 const std::set<void*>& remembered_uncompressed_slots,
209 const std::set<void*>& remembered_slots_for_verification) {
210 size_t objects_visited = 0;
211 {
212 CompressedSlotVisitor slot_visitor(heap, mutator_marking_state,
213 remembered_slots_for_verification);
214 objects_visited += slot_visitor.Run();
215 }
216 for (void* uncompressed_slot : remembered_uncompressed_slots) {
217 auto* page = BasePage::FromInnerAddress(&heap, uncompressed_slot);
218 DCHECK(page);
219 VisitSlot<SlotType::kUncompressed>(
220 heap, *page, static_cast<Address>(uncompressed_slot),
221 mutator_marking_state, remembered_slots_for_verification);
222 ++objects_visited;
223 }
224 DCHECK_EQ(remembered_slots_for_verification.size(), objects_visited);
225 USE(objects_visited);
226}
227
228// Visits source objects that were recorded in the generational barrier for
229// slots.
230void VisitRememberedSourceObjects(
231 const std::set<HeapObjectHeader*>& remembered_source_objects,
232 Visitor& visitor) {
233 for (HeapObjectHeader* source_hoh : remembered_source_objects) {
234 DCHECK(source_hoh);
235 // The age checking in the generational barrier is imprecise, since a card
236 // may have mixed young/old objects. Check here precisely if the object is
237 // old.
238 if (source_hoh->IsYoung()) continue;
239
240 const TraceCallback trace_callback =
241 GlobalGCInfoTable::GCInfoFromIndex(source_hoh->GetGCInfoIndex()).trace;
242
243 // Process eagerly to avoid reaccounting.
244 trace_callback(&visitor, source_hoh->ObjectStart());
245 }
246}
247
248// Revisit in-construction objects from previous GCs. We must do it to make
249// sure that we don't miss any initializing pointer writes if a previous GC
250// happened while an object was in-construction.
251void RevisitInConstructionObjects(
252 std::set<HeapObjectHeader*>& remembered_in_construction_objects,
253 Visitor& visitor, ConservativeTracingVisitor& conservative_visitor) {
254 for (HeapObjectHeader* hoh : remembered_in_construction_objects) {
255 DCHECK(hoh);
256 // The object must be marked on previous GC.
257 DCHECK(hoh->IsMarked());
258
259 if (hoh->template IsInConstruction<AccessMode::kNonAtomic>()) {
260 conservative_visitor.TraceConservatively(*hoh);
261 } else {
262 // If the object is fully constructed, trace precisely.
263 const TraceCallback trace_callback =
264 GlobalGCInfoTable::GCInfoFromIndex(hoh->GetGCInfoIndex()).trace;
265 trace_callback(&visitor, hoh->ObjectStart());
266 }
267 }
268}
269
270} // namespace
271
272void OldToNewRememberedSet::AddSlot(void* slot) {
274
275 BasePage* source_page = BasePage::FromInnerAddress(&heap_, slot);
276 DCHECK(source_page);
277
278 auto& slot_set = source_page->GetOrAllocateSlotSet();
279
280 const uintptr_t slot_offset = reinterpret_cast<uintptr_t>(slot) -
281 reinterpret_cast<uintptr_t>(source_page);
282
283 slot_set.Insert<SlotSet::AccessMode::NON_ATOMIC>(
284 static_cast<size_t>(slot_offset));
285
286#if defined(DEBUG)
287 remembered_slots_for_verification_.insert(slot);
288#endif // defined(DEBUG)
289}
290
291void OldToNewRememberedSet::AddUncompressedSlot(void* uncompressed_slot) {
293 remembered_uncompressed_slots_.insert(uncompressed_slot);
294#if defined(DEBUG)
295 remembered_slots_for_verification_.insert(uncompressed_slot);
296#endif // defined(DEBUG)
297}
298
299void OldToNewRememberedSet::AddSourceObject(HeapObjectHeader& hoh) {
301 remembered_source_objects_.insert(&hoh);
302}
303
304void OldToNewRememberedSet::AddWeakCallback(WeakCallbackItem item) {
306 // TODO(1029379): WeakCallbacks are also executed for weak collections.
307 // Consider splitting weak-callbacks in custom weak callbacks and ones for
308 // collections.
309 remembered_weak_callbacks_.insert(item);
310}
311
312void OldToNewRememberedSet::AddInConstructionObjectToBeRetraced(
313 HeapObjectHeader& hoh) {
315 remembered_in_construction_objects_.current.insert(&hoh);
316}
317
318void OldToNewRememberedSet::InvalidateRememberedSlotsInRange(void* begin,
319 void* end) {
321 InvalidateCompressedRememberedSlots(heap_, begin, end,
322 remembered_slots_for_verification_);
323 InvalidateUncompressedRememberedSlots(remembered_uncompressed_slots_, begin,
324 end,
325 remembered_slots_for_verification_);
326}
327
328void OldToNewRememberedSet::InvalidateRememberedSourceObject(
329 HeapObjectHeader& header) {
331 remembered_source_objects_.erase(&header);
332}
333
334void OldToNewRememberedSet::Visit(
335 Visitor& visitor, ConservativeTracingVisitor& conservative_visitor,
336 MutatorMarkingState& marking_state) {
338 VisitRememberedSlots(heap_, marking_state, remembered_uncompressed_slots_,
339 remembered_slots_for_verification_);
340 VisitRememberedSourceObjects(remembered_source_objects_, visitor);
341 RevisitInConstructionObjects(remembered_in_construction_objects_.previous,
342 visitor, conservative_visitor);
343}
344
345void OldToNewRememberedSet::ExecuteCustomCallbacks(LivenessBroker broker) {
347 for (const auto& callback : remembered_weak_callbacks_) {
348 callback.callback(broker, callback.parameter);
349 }
350}
351
352void OldToNewRememberedSet::ReleaseCustomCallbacks() {
354 remembered_weak_callbacks_.clear();
355}
356
357void OldToNewRememberedSet::Reset() {
359 SlotRemover slot_remover(heap_);
360 slot_remover.Run();
361 remembered_uncompressed_slots_.clear();
362 remembered_source_objects_.clear();
363#if DEBUG
364 remembered_slots_for_verification_.clear();
365#endif // DEBUG
366 remembered_in_construction_objects_.Reset();
367 // Custom weak callbacks is alive across GCs.
368}
369
370bool OldToNewRememberedSet::IsEmpty() const {
371 // TODO(1029379): Add visitor to check if empty.
372 return remembered_uncompressed_slots_.empty() &&
373 remembered_source_objects_.empty() &&
374 remembered_weak_callbacks_.empty();
375}
376
377void OldToNewRememberedSet::RememberedInConstructionObjects::Reset() {
378 // Make sure to keep the still-in-construction objects in the remembered set,
379 // as otherwise, being marked, the marker won't be able to observe them.
380 std::copy_if(previous.begin(), previous.end(),
381 std::inserter(current, current.begin()),
382 [](const HeapObjectHeader* h) {
383 return h->template IsInConstruction<AccessMode::kNonAtomic>();
384 });
385 previous = std::move(current);
386 current.clear();
387}
388
389} // namespace internal
390} // namespace cppgc
391
392#endif // defined(CPPGC_YOUNG_GENERATION)
HeapObjectHeader & ObjectHeaderFromInnerAddress(void *address) const
Definition heap-page.h:339
static BasePage * FromPayload(void *)
Definition heap-page.h:314
static BasePage * FromInnerAddress(const HeapBase *, void *)
Definition heap-page.cc:40
static const GCInfo & GCInfoFromIndex(GCInfoIndex index)
bool generational_gc_supported() const
Definition heap-base.h:218
NormalPage * current_page_
Definition compactor.cc:327
BasePage * page
Definition sweeper.cc:218
int end
LineAndColumn previous
JSHeapBroker * broker
TNode< Object > callback
NonAtomicMarkingState * marking_state_
uint8_t * Address
Definition globals.h:17
constexpr internal::SentinelPointer kSentinelPointer
void(*)(Visitor *visitor, const void *object) TraceCallback
Definition trace-trait.h:38
uintptr_t Address
Definition memory.h:13
Node::Uses::const_iterator begin(const Node::Uses &uses)
Definition node.h:708
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define USE(...)
Definition macros.h:293
Heap * heap_