9#include <unordered_map>
10#include <unordered_set>
29static constexpr size_t kFreeListSizeThreshold = 512 *
kKB;
38class MovableReferences final {
42 explicit MovableReferences(HeapBase&
heap)
46 void AddOrFilter(MovableReference*);
49 void Relocate(
Address from,
Address to,
size_t size_including_header);
52 void RelocateInteriorReferences(
Address from,
Address to,
size_t size);
56 void UpdateCallbacks();
79 std::unordered_set<const void*> moved_objects_;
80 std::unordered_map<MovableReference*, MovableReference>
81 interior_slot_to_object_;
85void MovableReferences::AddOrFilter(MovableReference* slot) {
86 const BasePage* slot_page = BasePage::FromInnerAddress(&
heap_, slot);
89 const void* value = *slot;
98 const HeapObjectHeader& slot_header =
99 slot_page->ObjectHeaderFromInnerAddress(slot);
101 if (!slot_header.IsMarked())
return;
103 const BasePage* value_page = BasePage::FromInnerAddress(&
heap_, value);
109 if (value_page->is_large() || !value_page->space().is_compactable())
return;
115 const HeapObjectHeader& value_header =
116 value_page->ObjectHeaderFromInnerAddress(value);
117 CHECK(value_header.IsMarked());
122 CHECK_EQ(slot, reference_it->second);
130 if (
V8_LIKELY(!slot_page->space().is_compactable()))
return;
136 interior_slot_to_object_.emplace(slot, slot_header.ObjectStart());
141 size_t size_including_header) {
143 moved_objects_.insert(from);
147 heap_.CallMoveListeners(from -
sizeof(HeapObjectHeader),
148 to -
sizeof(HeapObjectHeader),
149 size_including_header);
158 const HeapObjectHeader& header = HeapObjectHeader::FromObject(to);
159 const size_t size = header.ObjectSize();
160 RelocateInteriorReferences(from, to, size);
174 MovableReference* slot = it->second;
177 MovableReference* slot_location =
178 reinterpret_cast<MovableReference*
>(interior_it->second);
179 if (!slot_location) {
180 interior_it->second =
to;
183 auto reverse_it = interior_slot_to_object_.find(slot);
184 DCHECK_NE(interior_slot_to_object_.end(), reverse_it);
185 DCHECK_EQ(moved_objects_.end(), moved_objects_.find(reverse_it->second));
188 slot = slot_location;
199void MovableReferences::RelocateInteriorReferences(
Address from,
Address to,
203 reinterpret_cast<MovableReference*
>(from));
207 size_t offset =
reinterpret_cast<Address>(interior_it->first) - from;
209 if (!interior_it->second) {
213 interior_it->second = reference;
219 Address& reference_contents = *
reinterpret_cast<Address*
>(reference);
220 if (reference_contents > from && reference_contents < (from + size)) {
221 reference_contents = reference_contents - from +
to;
227 offset =
reinterpret_cast<Address>(interior_it->first) - from;
231class CompactionState final {
233 using Pages = std::vector<NormalPage*>;
236 CompactionState(NormalPageSpace* space, MovableReferences& movable_references)
239 void AddPage(NormalPage* page) {
248 void RelocateObject(
const NormalPage* page,
const Address header,
256 ReturnCurrentPageToSpace();
263 if (
V8_LIKELY(compact_frontier != header)) {
266 memmove(compact_frontier, header, size);
268 memcpy(compact_frontier, header, size);
270 compact_frontier +
sizeof(HeapObjectHeader),
273 current_page_->object_start_bitmap().SetBit(compact_frontier);
278 void FinishCompactingSpace() {
284 ReturnCurrentPageToSpace();
290 NormalPage::Destroy(page);
294 void FinishCompactingPage(NormalPage* page) {
295#if DEBUG || defined(V8_USE_MEMORY_SANITIZER) || \
296 defined(V8_USE_ADDRESS_SANITIZER)
299 ZapMemory(page->PayloadStart(), page->PayloadSize());
305 page->object_start_bitmap().MarkAsFullyPopulated();
309 void ReturnCurrentPageToSpace() {
319 space_->free_list().Add({free_start, freed_size});
335void CompactPage(NormalPage* page, CompactionState& compaction_state,
337 compaction_state.AddPage(page);
339 page->object_start_bitmap().Clear();
341 for (
Address header_address = page->PayloadStart();
342 header_address < page->PayloadEnd();) {
343 HeapObjectHeader* header =
344 reinterpret_cast<HeapObjectHeader*
>(header_address);
345 size_t size = header->AllocatedSize();
349 if (header->IsFree()) {
352 header_address +=
size;
356 if (!header->IsMarked()) {
365#if DEBUG || defined(V8_USE_MEMORY_SANITIZER) || \
366 defined(V8_USE_ADDRESS_SANITIZER)
369 header_address +=
size;
374#if defined(CPPGC_YOUNG_GENERATION)
383 compaction_state.RelocateObject(page, header_address, size);
384 header_address +=
size;
387 compaction_state.FinishCompactingPage(page);
390void CompactSpace(NormalPageSpace* space, MovableReferences& movable_references,
394#ifdef V8_USE_ADDRESS_SANITIZER
395 UnmarkedObjectsPoisoner().Traverse(*space);
398 DCHECK(space->is_compactable());
400 space->free_list().Clear();
428 Pages pages = space->RemoveAllPages();
429 if (pages.empty())
return;
431 CompactionState compaction_state(space, movable_references);
432 for (BasePage* page : pages) {
433 page->ResetMarkedBytes();
438 compaction_state.FinishCompactingSpace();
442size_t UpdateHeapResidency(
const std::vector<NormalPageSpace*>& spaces) {
443 return std::accumulate(spaces.cbegin(), spaces.cend(), 0u,
444 [](
size_t acc,
const NormalPageSpace* space) {
445 DCHECK(space->is_compactable());
446 if (!space->size()) return acc;
447 return acc + space->free_list().Size();
454 for (
auto& space :
heap_) {
455 if (!space->is_compactable())
continue;
464 (marking_type == GCConfig::MarkingType::kAtomic &&
465 stack_state == StackState::kMayContainHeapPointers)) {
478 return free_list_size > kFreeListSizeThreshold;
506 if (!
is_enabled_)
return CompactableSpaceHandling::kSweep;
509 StatsCollector::kAtomicCompact);
511 MovableReferences movable_references(*
heap_.
heap());
513 CompactionWorklists::MovableReferencesWorklist::Local local(
516 while (local.Pop(&slot)) {
517 movable_references.AddOrFilter(slot);
524 CompactSpace(space, movable_references, sticky_bits);
529 return CompactableSpaceHandling::kIgnore;
#define ASAN_UNPOISON_MEMORY_REGION(start, size)
std::vector< BasePage * > Pages
const void * MovableReference
CompactableSpaceHandling CompactSpacesIfEnabled()
std::unique_ptr< CompactionWorklists > compaction_worklists_
void InitializeIfShouldCompact(GCConfig::MarkingType, StackState)
std::vector< NormalPageSpace * > compactable_spaces_
void CancelIfShouldNotCompact(GCConfig::MarkingType, StackState)
bool ShouldCompact(GCConfig::MarkingType, StackState) const
void EnableForNextGCForTesting()
bool enable_for_next_gc_for_testing_
StatsCollector * stats_collector()
StickyBits sticky_bits() const
MarkerBase * marker() const
static NormalPage * From(BasePage *page)
size_t used_bytes_in_current_page_
NormalPage * current_page_
std::map< MovableReference *, Address > interior_movable_references_
std::unordered_map< MovableReference, MovableReference * > movable_references_
const bool heap_has_move_listeners_
#define CPPGC_STACK_ALLOCATED()
constexpr size_t kPageSize
V8_INLINE void SetMemoryInaccessible(void *address, size_t size)
V8_INLINE void ZapMemory(void *address, size_t size)
#define DCHECK_LE(v1, v2)
#define CHECK_NOT_NULL(val)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
#define V8_LIKELY(condition)
#define V8_UNLIKELY(condition)