5#if defined(CPPGC_YOUNG_GENERATION)
27void EraseFromSet(std::set<void*>& set,
void* begin,
void*
end) {
29 auto from = set.lower_bound(begin), to = set.lower_bound(
end);
35void InvalidateCompressedRememberedSlots(
36 const HeapBase&
heap,
void* begin,
void*
end,
37 std::set<void*>& remembered_slots_for_verification) {
44 &
heap,
reinterpret_cast<void*
>(
45 reinterpret_cast<uintptr_t
>(
end) - 1)));
47 auto* slot_set = page->slot_set();
48 if (!slot_set)
return;
50 const size_t buckets_size = SlotSet::BucketsForSize(page->AllocatedSize());
52 const uintptr_t page_start =
reinterpret_cast<uintptr_t
>(
page);
53 const uintptr_t ubegin =
reinterpret_cast<uintptr_t
>(
begin);
54 const uintptr_t uend =
reinterpret_cast<uintptr_t
>(
end);
56 slot_set->RemoveRange(ubegin - page_start, uend - page_start, buckets_size,
57 SlotSet::EmptyBucketMode::FREE_EMPTY_BUCKETS);
59 EraseFromSet(remembered_slots_for_verification, begin,
end);
63void InvalidateUncompressedRememberedSlots(
64 std::set<void*>& slots,
void* begin,
void*
end,
65 std::set<void*>& remembered_slots_for_verification) {
66 EraseFromSet(slots, begin,
end);
68 EraseFromSet(remembered_slots_for_verification, begin,
end);
70#if defined(ENABLE_SLOW_DCHECKS)
72 DCHECK(std::none_of(slots.begin(), slots.end(), [begin,
end](
void* slot) {
73 void* value = nullptr;
74 value = *reinterpret_cast<void**>(slot);
75 return begin <= value && value < end;
81template <SlotType slot_type>
82void VisitSlot(
const HeapBase&
heap,
const BasePage& page,
Address slot,
83 MutatorMarkingState& marking_state,
84 const std::set<void*>& slots_for_verification) {
87 DCHECK_NE(slots_for_verification.end(), slots_for_verification.find(slot));
91 auto& slot_header = page.ObjectHeaderFromInnerAddress(slot);
95 if (slot_header.IsYoung())
return;
97#if defined(CPPGC_POINTER_COMPRESSION)
98 void* value =
nullptr;
99 if constexpr (slot_type == SlotType::kCompressed) {
100 value = CompressedPointer::Decompress(*
reinterpret_cast<uint32_t*
>(slot));
102 value = *
reinterpret_cast<void**
>(slot);
105 void* value = *
reinterpret_cast<void**
>(slot);
113 HeapObjectHeader& header =
118 marking_state.DynamicallyMarkAddress(
static_cast<Address>(value));
121class CompressedSlotVisitor : HeapVisitor<CompressedSlotVisitor> {
122 friend class HeapVisitor<CompressedSlotVisitor>;
125 CompressedSlotVisitor(HeapBase&
heap, MutatorMarkingState& marking_state,
126 const std::set<void*>& slots_for_verification)
129 remembered_slots_for_verification_(slots_for_verification) {}
132 Traverse(
heap_.raw_heap());
133 return objects_visited_;
141 remembered_slots_for_verification_);
146 void VisitSlotSet(SlotSet* slot_set) {
149 if (!slot_set)
return;
151 const uintptr_t page_start =
reinterpret_cast<uintptr_t
>(
current_page_);
152 const size_t buckets_size =
156 page_start, 0, buckets_size,
157 [
this](SlotSet::Address slot) {
158 return VisitCompressedSlot(
reinterpret_cast<Address>(slot));
160 SlotSet::EmptyBucketMode::FREE_EMPTY_BUCKETS);
163 bool VisitNormalPage(NormalPage& page) {
165 VisitSlotSet(page.slot_set());
169 bool VisitLargePage(LargePage& page) {
171 VisitSlotSet(page.slot_set());
179 const std::set<void*>& remembered_slots_for_verification_;
180 size_t objects_visited_ = 0u;
183class SlotRemover : HeapVisitor<SlotRemover> {
184 friend class HeapVisitor<SlotRemover>;
189 void Run() { Traverse(
heap_.raw_heap()); }
192 bool VisitNormalPage(NormalPage& page) {
197 bool VisitLargePage(LargePage& page) {
206void VisitRememberedSlots(
207 HeapBase&
heap, MutatorMarkingState& mutator_marking_state,
208 const std::set<void*>& remembered_uncompressed_slots,
209 const std::set<void*>& remembered_slots_for_verification) {
210 size_t objects_visited = 0;
212 CompressedSlotVisitor slot_visitor(
heap, mutator_marking_state,
213 remembered_slots_for_verification);
214 objects_visited += slot_visitor.Run();
216 for (
void* uncompressed_slot : remembered_uncompressed_slots) {
219 VisitSlot<SlotType::kUncompressed>(
220 heap, *page,
static_cast<Address>(uncompressed_slot),
221 mutator_marking_state, remembered_slots_for_verification);
224 DCHECK_EQ(remembered_slots_for_verification.size(), objects_visited);
225 USE(objects_visited);
230void VisitRememberedSourceObjects(
231 const std::set<HeapObjectHeader*>& remembered_source_objects,
233 for (HeapObjectHeader* source_hoh : remembered_source_objects) {
238 if (source_hoh->IsYoung())
continue;
244 trace_callback(&visitor, source_hoh->ObjectStart());
251void RevisitInConstructionObjects(
252 std::set<HeapObjectHeader*>& remembered_in_construction_objects,
253 Visitor& visitor, ConservativeTracingVisitor& conservative_visitor) {
254 for (HeapObjectHeader* hoh : remembered_in_construction_objects) {
259 if (hoh->template IsInConstruction<AccessMode::kNonAtomic>()) {
260 conservative_visitor.TraceConservatively(*hoh);
265 trace_callback(&visitor, hoh->ObjectStart());
272void OldToNewRememberedSet::AddSlot(
void* slot) {
278 auto& slot_set = source_page->GetOrAllocateSlotSet();
280 const uintptr_t slot_offset =
reinterpret_cast<uintptr_t
>(slot) -
281 reinterpret_cast<uintptr_t
>(source_page);
283 slot_set.Insert<SlotSet::AccessMode::NON_ATOMIC>(
284 static_cast<size_t>(slot_offset));
287 remembered_slots_for_verification_.insert(slot);
291void OldToNewRememberedSet::AddUncompressedSlot(
void* uncompressed_slot) {
293 remembered_uncompressed_slots_.insert(uncompressed_slot);
295 remembered_slots_for_verification_.insert(uncompressed_slot);
299void OldToNewRememberedSet::AddSourceObject(HeapObjectHeader& hoh) {
301 remembered_source_objects_.insert(&hoh);
304void OldToNewRememberedSet::AddWeakCallback(WeakCallbackItem item) {
309 remembered_weak_callbacks_.insert(item);
312void OldToNewRememberedSet::AddInConstructionObjectToBeRetraced(
313 HeapObjectHeader& hoh) {
315 remembered_in_construction_objects_.current.insert(&hoh);
318void OldToNewRememberedSet::InvalidateRememberedSlotsInRange(
void* begin,
321 InvalidateCompressedRememberedSlots(
heap_, begin,
end,
322 remembered_slots_for_verification_);
323 InvalidateUncompressedRememberedSlots(remembered_uncompressed_slots_, begin,
325 remembered_slots_for_verification_);
328void OldToNewRememberedSet::InvalidateRememberedSourceObject(
329 HeapObjectHeader& header) {
331 remembered_source_objects_.erase(&header);
334void OldToNewRememberedSet::Visit(
335 Visitor& visitor, ConservativeTracingVisitor& conservative_visitor,
336 MutatorMarkingState& marking_state) {
338 VisitRememberedSlots(
heap_, marking_state, remembered_uncompressed_slots_,
339 remembered_slots_for_verification_);
340 VisitRememberedSourceObjects(remembered_source_objects_, visitor);
341 RevisitInConstructionObjects(remembered_in_construction_objects_.previous,
342 visitor, conservative_visitor);
345void OldToNewRememberedSet::ExecuteCustomCallbacks(LivenessBroker
broker) {
347 for (
const auto&
callback : remembered_weak_callbacks_) {
352void OldToNewRememberedSet::ReleaseCustomCallbacks() {
354 remembered_weak_callbacks_.clear();
357void OldToNewRememberedSet::Reset() {
359 SlotRemover slot_remover(
heap_);
361 remembered_uncompressed_slots_.clear();
362 remembered_source_objects_.clear();
364 remembered_slots_for_verification_.clear();
366 remembered_in_construction_objects_.Reset();
370bool OldToNewRememberedSet::IsEmpty()
const {
372 return remembered_uncompressed_slots_.empty() &&
373 remembered_source_objects_.empty() &&
374 remembered_weak_callbacks_.empty();
377void OldToNewRememberedSet::RememberedInConstructionObjects::Reset() {
381 std::inserter(current, current.begin()),
382 [](
const HeapObjectHeader* h) {
383 return h->template IsInConstruction<AccessMode::kNonAtomic>();
HeapObjectHeader & ObjectHeaderFromInnerAddress(void *address) const
static BasePage * FromPayload(void *)
static BasePage * FromInnerAddress(const HeapBase *, void *)
static const GCInfo & GCInfoFromIndex(GCInfoIndex index)
bool generational_gc_supported() const
NormalPage * current_page_
NonAtomicMarkingState * marking_state_
constexpr internal::SentinelPointer kSentinelPointer
void(*)(Visitor *visitor, const void *object) TraceCallback
Node::Uses::const_iterator begin(const Node::Uses &uses)
#define DCHECK_NOT_NULL(val)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)