37constexpr TaskPriority kBackgroundBoostedPriority = TaskPriority::kUserBlocking;
38constexpr TaskPriority kBackgroundRegularPriority = TaskPriority::kUserVisible;
39constexpr TaskPriority kForegroundRegularPriority = TaskPriority::kUserBlocking;
40constexpr TaskPriority kForegroundLowPriority = TaskPriority::kUserVisible;
42class DeadlineChecker final {
58enum class MutatorThreadSweepingMode {
63constexpr const char*
ToString(MutatorThreadSweepingMode sweeping_mode) {
64 switch (sweeping_mode) {
65 case MutatorThreadSweepingMode::kAll:
67 case MutatorThreadSweepingMode::kOnlyFinalizers:
68 return "only-finalizers";
72class ObjectStartBitmapVerifier final
73 :
private HeapVisitor<ObjectStartBitmapVerifier> {
74 friend class HeapVisitor<ObjectStartBitmapVerifier>;
77 void Verify(RawHeap&
heap) {
82 void Verify(NormalPage& page) {
89 bool VisitNormalPage(NormalPage& page) {
91 bitmap_ = &page.object_start_bitmap();
96 bool VisitHeapObjectHeader(HeapObjectHeader& header) {
97 if (header.IsLargeObject())
return true;
99 auto* raw_header =
reinterpret_cast<ConstAddress>(&header);
109 PlatformAwareObjectStartBitmap*
bitmap_ =
nullptr;
113class FreeHandlerBase {
115 virtual ~FreeHandlerBase() =
default;
116 virtual void FreeFreeList(
120class DiscardingFreeHandler :
public FreeHandlerBase {
122 DiscardingFreeHandler(
PageAllocator& page_allocator, FreeList& free_list,
126 void Free(FreeList::Block block) {
127 const auto unused_range =
free_list_.AddReturningUnusedBounds(block);
128 const uintptr_t aligned_begin_unused =
129 RoundUp(
reinterpret_cast<uintptr_t
>(unused_range.first),
131 const uintptr_t aligned_end_unused =
132 RoundDown(
reinterpret_cast<uintptr_t
>(unused_range.second),
134 if (aligned_begin_unused < aligned_end_unused) {
135 const size_t discarded_size = aligned_end_unused - aligned_begin_unused;
137 reinterpret_cast<void*
>(aligned_begin_unused),
138 aligned_end_unused - aligned_begin_unused);
139 page_.IncrementDiscardedMemory(discarded_size);
144 ->IncrementDiscardedMemory(discarded_size);
150 Free(std::move(entry));
160class RegularFreeHandler :
public FreeHandlerBase {
162 RegularFreeHandler(
PageAllocator& page_allocator, FreeList& free_list,
166 void Free(FreeList::Block block) {
free_list_.Add(std::move(block)); }
170 Free(std::move(entry));
179class ThreadSafeStack {
181 ThreadSafeStack() =
default;
185 vector_.push_back(std::move(t));
186 is_empty_.store(
false, std::memory_order_relaxed);
189 std::optional<T> Pop() {
192 is_empty_.store(
true, std::memory_order_relaxed);
195 T top = std::move(
vector_.back());
198 return std::move(top);
201 template <
typename It>
202 void Insert(It begin, It
end) {
205 is_empty_.store(
false, std::memory_order_relaxed);
208 bool IsEmpty()
const {
return is_empty_.load(std::memory_order_relaxed); }
216struct SweepingState {
217 struct SweptPageState {
218 BasePage* page =
nullptr;
219#if defined(CPPGC_CAGED_HEAP)
223 HeapObjectHeader* unfinalized_objects_head =
nullptr;
237using SpaceStates = std::vector<SweepingState>;
239void StickyUnmark(HeapObjectHeader* header,
StickyBits sticky_bits) {
240#if defined(CPPGC_YOUNG_GENERATION)
249class InlinedFinalizationBuilderBase {
261template <
typename FreeHandler>
262class InlinedFinalizationBuilder final :
public InlinedFinalizationBuilderBase,
265 InlinedFinalizationBuilder(BasePage& page,
PageAllocator& page_allocator)
266 : FreeHandler(page_allocator,
269 void AddFinalizer(HeapObjectHeader* header,
size_t size) {
275 FreeHandler::Free({
start, size});
276 result_.largest_new_free_list_entry =
277 std::max(
result_.largest_new_free_list_entry, size);
280 ResultType&& GetResult(
bool is_empty) {
287template <
typename FreeHandler>
288class DeferredFinalizationBuilder final :
public FreeHandler {
290 using ResultType = SweepingState::SweptPageState;
292 DeferredFinalizationBuilder(BasePage& page,
PageAllocator& page_allocator)
297 void AddFinalizer(HeapObjectHeader* header,
size_t size) {
298 if (header->IsFinalizable()) {
299#if defined(CPPGC_CAGED_HEAP)
300 if (!current_unfinalized_) {
303 result_.unfinalized_objects_head = header;
309 result_.unfinalized_objects.push_back({header});
318 if (found_finalizer_) {
321 FreeHandler::Free({
start, size});
323 result_.largest_new_free_list_entry =
324 std::max(
result_.largest_new_free_list_entry, size);
328 ResultType&& GetResult(
bool is_empty) {
330 return std::move(result_);
339template <
typename FinalizationBuilder>
340typename FinalizationBuilder::ResultType SweepNormalPage(
343 FinalizationBuilder builder(*page, page_allocator);
345 PlatformAwareObjectStartBitmap& bitmap = page->object_start_bitmap();
347 size_t live_bytes = 0;
349 Address start_of_gap = page->PayloadStart();
351 const auto clear_bit_if_coalesced_entry = [&bitmap,
352 &start_of_gap](
Address address) {
353 if (address != start_of_gap) {
362 for (
Address begin = page->PayloadStart(),
end = page->PayloadEnd();
365 HeapObjectHeader* header =
reinterpret_cast<HeapObjectHeader*
>(
begin);
366 const size_t size = header->AllocatedSize();
368 if (header->IsFree<kAtomicAccess>()) {
373 clear_bit_if_coalesced_entry(begin);
378 if (!header->IsMarked<kAtomicAccess>()) {
379 builder.AddFinalizer(header, size);
380 clear_bit_if_coalesced_entry(begin);
386 if (start_of_gap != header_address) {
387 const size_t new_free_list_entry_size =
388 static_cast<size_t>(header_address - start_of_gap);
389 builder.AddFreeListEntry(start_of_gap, new_free_list_entry_size);
392 StickyUnmark(header, sticky_bits);
394 start_of_gap =
begin;
398 const bool is_empty = live_bytes == 0;
404 if (!
is_empty && start_of_gap != page->PayloadEnd()) {
405 builder.AddFreeListEntry(
406 start_of_gap,
static_cast<size_t>(page->PayloadEnd() - start_of_gap));
409 page->SetAllocatedBytesAtLastGC(live_bytes);
414constexpr BaseSpace* kSweepWithoutSpaceAssignment =
nullptr;
416enum class EmptyPageHandling {
426class SweepFinalizer final {
430 SweepFinalizer(
cppgc::Platform* platform, StatsCollector* stats_collector,
431 BaseSpace* space,
size_t* unused_destroyed_normal_pages,
432 FreeMemoryHandling free_memory_handling,
433 EmptyPageHandling empty_page_handling_type)
442 void Finalize(SpaceStates& states) {
443 for (SweepingState& state : states) {
448 void Finalize(SweepingState& state) {
449 while (
auto page_state = state.swept_unfinalized_pages.Pop()) {
450 FinalizePage(&*page_state);
458 SweepingState& state,
460 if (state.swept_unfinalized_pages.IsEmpty()) {
464 DeadlineChecker deadline_check(deadline);
465 while (
auto page_state = state.swept_unfinalized_pages.Pop()) {
466 FinalizePage(&*page_state);
467 if (size <= largest_consecutive_block_) {
470 if (deadline_check.Check()) {
480 SweepingState& state,
482 if (state.swept_unfinalized_pages.IsEmpty()) {
486 DeadlineChecker deadline_check(deadline);
487 while (
auto page_state = state.swept_unfinalized_pages.Pop()) {
488 FinalizePage(&*page_state);
489 if (deadline_check.Check()) {
497 void FinalizePage(SweepingState::SweptPageState* page_state) {
500 BasePage* page = page_state->page;
503 const auto finalize_header = [](HeapObjectHeader* header) {
504 const size_t size = header->AllocatedSize();
508#if defined(CPPGC_CAGED_HEAP)
509#if defined(CPPGC_POINTER_COMPRESSION)
510 const uint64_t cage_base = CageBaseGlobal::Get();
512 const uint64_t cage_base = CagedHeapBase::GetBase();
514 HeapObjectHeader* next_unfinalized =
nullptr;
516 for (
auto* unfinalized_header = page_state->unfinalized_objects_head;
517 unfinalized_header; unfinalized_header = next_unfinalized) {
518 next_unfinalized = unfinalized_header->GetNextUnfinalized(cage_base);
519 finalize_header(unfinalized_header);
522 for (HeapObjectHeader* unfinalized_header :
523 page_state->unfinalized_objects) {
524 finalize_header(unfinalized_header);
529 if (page_state->is_empty) {
531 empty_page_handling_ == EmptyPageHandling::kDestroy);
532 if (empty_page_handling_ == EmptyPageHandling::kDestroy) {
533 if (!page->is_large()) {
534 (*unused_destroyed_normal_pages_)++;
553 normal_page->ChangeOwner(*space_);
556 page_state->cached_free_list.Clear();
557 page_state->cached_free_list.Add(
558 {normal_page->PayloadStart(), normal_page->PayloadSize()});
560 page_state->unfinalized_free_list.clear();
561 page_state->largest_new_free_list_entry = normal_page->PayloadSize();
566 DCHECK(!page->is_large());
570 space_freelist.
Append(std::move(page_state->cached_free_list));
573 if (!page_state->unfinalized_free_list.empty()) {
574 std::unique_ptr<FreeHandlerBase> handler =
576 ? std::unique_ptr<FreeHandlerBase>(
new DiscardingFreeHandler(
578 :
std::unique_ptr<FreeHandlerBase>(new RegularFreeHandler(
579 *
platform_->GetPageAllocator(), space_freelist, *page));
580 handler->FreeFreeList(page_state->unfinalized_free_list);
584 page_state->largest_new_free_list_entry, largest_consecutive_block_);
588 ObjectStartBitmapVerifier().Verify(
static_cast<NormalPage&
>(*page));
591 page->space().AddPage(page);
605class MutatorThreadSweeper final :
private HeapVisitor<MutatorThreadSweeper> {
606 friend class HeapVisitor<MutatorThreadSweeper>;
612 StatsCollector* stats_collector, BaseSpace* space,
613 size_t* unused_destroyed_normal_pages,
615 EmptyPageHandling empty_page_handling)
624 static void SweepLiveLargePage(LargePage& page,
StickyBits sticky_bits) {
625 HeapObjectHeader* header = page.ObjectHeader();
626 CHECK(header->IsMarked());
627 StickyUnmark(header, sticky_bits);
629 page.ResetMarkedBytes();
631 page.space().AddPage(&page);
634 void Sweep(SpaceStates& states) {
635 for (SweepingState& state : states) {
640 void Sweep(SweepingState& state) {
641 while (
auto page = state.unswept_pages.Pop()) {
646 void SweepPage(BasePage& page) { Traverse(page); }
651 SweepingState& state,
653 MutatorThreadSweepingMode sweeping_mode) {
655 SweepFinalizer finalizer(
658 if (!finalizer.FinalizeWithDeadline(scope_id, state, deadline)) {
662 if (sweeping_mode != MutatorThreadSweepingMode::kOnlyFinalizers) {
664 if (!SweepSpaceWithDeadline(&state, deadline)) {
672 SweepingState& state,
674 if (state.unswept_pages.IsEmpty()) {
678 DeadlineChecker deadline_check(deadline);
679 while (
auto page = state.unswept_pages.Pop()) {
684 if (deadline_check.Check()) {
692 bool SweepSpaceWithDeadline(SweepingState* state,
694 DeadlineChecker deadline_check(deadline);
695 while (
auto page = state->unswept_pages.Pop()) {
697 if (deadline_check.Check()) {
705 bool VisitNormalPage(NormalPage& page) {
707 page.ResetDiscardedMemory();
712 InlinedFinalizationBuilder<DiscardingFreeHandler>>(
714 : SweepNormalPage<InlinedFinalizationBuilder<RegularFreeHandler>>(
718 NormalPage::Destroy(&page);
719 (*unused_destroyed_normal_pages_)++;
722 DCHECK_IMPLIES(!result.is_empty, space_ == &page.space());
723 page.ChangeOwner(*space_);
728 target_space.free_list().Add({page.PayloadStart(), page.PayloadSize()});
732 ObjectStartBitmapVerifier().Verify(page);
734 std::max(
result.is_empty ? page.PayloadSize()
735 :
result.largest_new_free_list_entry,
741 bool VisitLargePage(LargePage& page) {
742 HeapObjectHeader* header = page.ObjectHeader();
743 CHECK(!header->IsMarked());
765 private HeapVisitor<ConcurrentSweepTask> {
766 friend class HeapVisitor<ConcurrentSweepTask>;
771 ConcurrentSweepTask(Platform* platform, HeapBase&
heap,
772 SpaceStates* space_states,
773 SweepingState* empty_normal_pages,
774 SweepingState* empty_large_pages,
785 StatsCollector::EnabledConcurrentScope stats_scope(
786 heap_.stats_collector(), StatsCollector::kConcurrentSweep);
792 if (!SweepStateOrYield(delegate, state))
return;
803 return is_completed_.load(std::memory_order_relaxed) ? 0 : 1;
810 while (
auto page = state.unswept_pages.Pop()) {
820 bool VisitNormalPage(NormalPage& page) {
822 page.ResetDiscardedMemory();
824 SweepingState::SweptPageState sweep_result =
827 DeferredFinalizationBuilder<DiscardingFreeHandler>>(
829 : SweepNormalPage<DeferredFinalizationBuilder<RegularFreeHandler>>(
832 std::move(sweep_result));
836 bool VisitLargePage(LargePage& page) {
837 HeapObjectHeader* header = page.ObjectHeader();
838 CHECK(!header->IsMarked());
840#if defined(CPPGC_CAGED_HEAP)
842 header->IsFinalizable() ? page.ObjectHeader() :
nullptr;
845 if (header->IsFinalizable()) {
878class PrepareForSweepVisitor final
879 :
protected HeapVisitor<PrepareForSweepVisitor> {
880 friend class HeapVisitor<PrepareForSweepVisitor>;
881 using CompactableSpaceHandling = SweepingConfig::CompactableSpaceHandling;
884 PrepareForSweepVisitor(HeapBase*
heap, SpaceStates* space_states,
885 SweepingState* empty_normal_pages,
886 SweepingState* empty_large_pages,
887 CompactableSpaceHandling compactable_space_handling)
894 void Run(RawHeap& raw_heap) {
900 bool VisitNormalPageSpace(NormalPageSpace& space) {
902 space.is_compactable()) {
906 CHECK(!space.linear_allocation_buffer().size());
907 space.free_list().Clear();
908#ifdef V8_USE_ADDRESS_SANITIZER
909 UnmarkedObjectsPoisoner().Traverse(space);
912 BaseSpace::Pages space_pages = space.RemoveAllPages();
913 std::sort(space_pages.begin(), space_pages.end(),
914 [](
const BasePage* a,
const BasePage* b) {
915 return a->marked_bytes() < b->marked_bytes();
917 auto first_non_empty_page = std::find_if(
918 space_pages.begin(), space_pages.end(),
919 [](
const BasePage* page) { return page->marked_bytes() != 0; });
921 first_non_empty_page);
922 (*space_states_)[space.index()].unswept_pages.Insert(first_non_empty_page,
928 bool VisitLargePageSpace(LargePageSpace& space) {
929#ifdef V8_USE_ADDRESS_SANITIZER
930 UnmarkedObjectsPoisoner().Traverse(space);
933 BaseSpace::Pages space_pages = space.RemoveAllPages();
934 for (BasePage* page : space_pages) {
936 const auto* header = LargePage::From(page)->ObjectHeader();
940 if (page->marked_bytes() != 0) {
941 MutatorThreadSweeper::SweepLiveLargePage(*LargePage::From(page),
942 heap_->sticky_bits());
952 HeapBase*
const heap_;
967 page_pool_(
heap.page_backend()->page_pool()),
977 StatsCollector::kAtomicSweep);
978 is_in_progress_ =
true;
981 if (!foreground_task_runner_) {
985 foreground_task_runner_ =
987 low_priority_foreground_task_runner_ =
991 foreground_task_runner_.get());
992 const auto supports_non_nestable_tasks =
993 [](
const std::shared_ptr<TaskRunner>& runner) {
994 return runner && runner->NonNestableTasksEnabled() &&
995 runner->NonNestableDelayedTasksEnabled();
997 if (!supports_non_nestable_tasks(foreground_task_runner_) ||
998 !supports_non_nestable_tasks(low_priority_foreground_task_runner_)) {
999 foreground_task_runner_.reset();
1000 low_priority_foreground_task_runner_.reset();
1005 ObjectStartBitmapVerifier().Verify(
heap_);
1009 if (!CanDiscardMemory()) {
1010 config_.free_memory_handling = FreeMemoryHandling::kDoNotDiscard;
1012 if (config_.free_memory_handling ==
1013 FreeMemoryHandling::kDiscardWherePossible) {
1015 heap_.heap()->stats_collector()->ResetDiscardedMemory();
1023 if (config.
sweeping_type >= SweepingConfig::SweepingType::kIncremental) {
1024 ScheduleLowPriorityIncrementalSweeping();
1025 ScheduleIncrementalSweeping(kDelayWhileLowPrioritySweepingMakesProgress);
1028 SweepingConfig::SweepingType::kIncrementalAndConcurrent) {
1029 ScheduleConcurrentSweeping();
1036 if (low_priority_task_ran_) {
1038 ScheduleIncrementalSweeping(kDelayWhileLowPrioritySweepingMakesProgress);
1044 SweepInForegroundTaskImpl(max_duration, StatsCollector::kSweepInTask)) {
1045 case SweepResult::kFullyDone:
1047 case SweepResult::kInProgress:
1048 ScheduleIncrementalSweeping(kDelayForRegularPrioritySweeping);
1050 case SweepResult::kMainThreadDoneConcurrentInProgress:
1053 ScheduleIncrementalSweeping(kDelayWhileConcurrentSweepingMakesProgress);
1060 low_priority_task_ran_ =
true;
1061 switch (SweepInForegroundTaskImpl(
1062 max_duration, StatsCollector::kSweepInLowPriorityTask)) {
1063 case SweepResult::kFullyDone:
1065 case SweepResult::kInProgress:
1067 ScheduleLowPriorityIncrementalSweeping();
1069 case SweepResult::kMainThreadDoneConcurrentInProgress:
1070 ScheduleLowPriorityIncrementalSweeping(
1071 kDelayWhileLowPrioritySweepingMakesProgress);
1079 DCHECK(space->is_large());
1084 DCHECK(space_state.unswept_pages.IsEmpty());
1085 DCHECK(space_state.swept_unfinalized_pages.IsEmpty());
1102 SweepFinalizer finalizer(
1104 config_.free_memory_handling, EmptyPageHandling::kDestroy);
1107 if (finalizer.FinalizeWithDeadlineAndSize(
1114 config_.free_memory_handling,
1115 EmptyPageHandling::kDestroy);
1118 if (sweeper.SweepWithDeadlineAndSize(StatsCollector::kSweepEmptyPages,
1128 DCHECK(!space->is_large());
1143 space_state.swept_unfinalized_pages.IsEmpty() &&
1144 space_state.unswept_pages.IsEmpty()) {
1156 SweepFinalizer finalizer(
1158 config_.free_memory_handling, EmptyPageHandling::kReturn);
1161 if (finalizer.FinalizeWithDeadlineAndSize(
1168 config_.free_memory_handling,
1169 EmptyPageHandling::kReturn);
1172 if (sweeper.SweepWithDeadlineAndSize(StatsCollector::kSweepEmptyPages,
1179 if (finalizer.FinalizeWithDeadlineAndSize(
1180 StatsCollector::kSweepFinalizeSweptPages, space_state, deadline,
1186 if (sweeper.SweepWithDeadlineAndSize(StatsCollector::kSweepPages,
1187 space_state, deadline, size)) {
1195 if (!is_in_progress_) {
1201 if (is_sweeping_on_mutator_thread_) {
1205 return space->is_large()
1206 ? SweepForLargeAllocation(space, size, max_duration)
1207 : SweepForNormalAllocation(space, size, max_duration);
1211 if (!is_in_progress_) {
1217 if (is_sweeping_on_mutator_thread_) {
1222 std::optional<StatsCollector::EnabledScope> stats_scope;
1223 if (config_.sweeping_type != SweepingConfig::SweepingType::kAtomic) {
1225 StatsCollector::kIncrementalSweep);
1228 StatsCollector::kSweepFinish);
1229 if (concurrent_sweeper_handle_ && concurrent_sweeper_handle_->IsValid() &&
1230 concurrent_sweeper_handle_->UpdatePriorityEnabled()) {
1231 concurrent_sweeper_handle_->UpdatePriority(kBackgroundBoostedPriority);
1240 return !concurrent_sweeper_handle_ ||
1241 !concurrent_sweeper_handle_->IsValid() ||
1242 !concurrent_sweeper_handle_->IsActive();
1246 if (!is_in_progress_ || is_sweeping_on_mutator_thread_) {
1252 if (!concurrent_sweeper_handle_ || !concurrent_sweeper_handle_->IsValid() ||
1253 concurrent_sweeper_handle_->IsActive()) {
1260 [](
const SweepingState& state) {
1261 return state.unswept_pages.IsEmpty();
1266 [](
const SweepingState& state) {
1267 return !state.swept_unfinalized_pages.IsEmpty();
1294 SweepFinalizer finalizer(
1297 EmptyPageHandling::kDestroy);
1303 MutatorThreadSweeper sweeper(
1306 EmptyPageHandling::kDestroy);
1319 SynchronizeAndFinalizeConcurrentAndIncrementalSweeping();
1324 is_in_progress_ =
false;
1325 notify_done_pending_ =
true;
1330 DCHECK(!is_in_progress_);
1331 DCHECK(notify_done_pending_);
1332 notify_done_pending_ =
false;
1334 if (config_.free_memory_handling ==
1335 FreeMemoryHandling::kDiscardWherePossible) {
1336 heap_.heap()->page_backend()->ReleasePooledPages();
1341 if (concurrent_sweeper_handle_) concurrent_sweeper_handle_->Join();
1345 return is_sweeping_on_mutator_thread_;
1352 MutatorThreadSweepingMode sweeping_mode) {
1353 if (!is_in_progress_)
return true;
1361 MutatorThreadSweeper sweeper(
1364 config_.free_memory_handling, EmptyPageHandling::kDestroy);
1371 if (!sweeper.FinalizeAndSweepWithDeadline(
1373 deadline, sweeping_mode)) {
1377 if (!sweeper.FinalizeAndSweepWithDeadline(
1378 StatsCollector::kSweepFinalizeSweptPages, state, deadline,
1383 if (!sweeper.FinalizeAndSweepWithDeadline(
1385 deadline, sweeping_mode)) {
1388 if (sweeping_mode != MutatorThreadSweepingMode::kAll) {
1400 DCHECK_EQ(mutator_thread_sweeping_observers_.end(),
1401 std::find(mutator_thread_sweeping_observers_.begin(),
1402 mutator_thread_sweeping_observers_.end(), observer));
1403 mutator_thread_sweeping_observers_.push_back(observer);
1409 std::find(mutator_thread_sweeping_observers_.begin(),
1410 mutator_thread_sweeping_observers_.end(), observer);
1411 DCHECK_NE(mutator_thread_sweeping_observers_.end(), it);
1412 mutator_thread_sweeping_observers_.erase(it);
1419 : sweeper_(sweeper) {
1420 DCHECK(!sweeper_.is_sweeping_on_mutator_thread_);
1421 sweeper_.is_sweeping_on_mutator_thread_ =
true;
1422 for (
auto* observer : sweeper_.mutator_thread_sweeping_observers_) {
1427 sweeper_.is_sweeping_on_mutator_thread_ =
false;
1428 for (
auto* observer : sweeper_.mutator_thread_sweeping_observers_) {
1445 static constexpr auto kMaxSweepDuration =
1449 : sweeper_(sweeper),
1454 const std::shared_ptr<cppgc::TaskRunner>& runner,
1456 std::optional<v8::base::TimeDelta> delay = {}) {
1457 auto task = std::make_unique<IncrementalSweepTask>(sweeper,
priority);
1458 auto handle = task->handle_;
1459 if (delay.has_value()) {
1460 runner->PostNonNestableDelayedTask(std::move(task),
1461 delay->InSecondsF());
1463 runner->PostNonNestableTask(std::move(task));
1472 switch (priority_) {
1473 case kForegroundRegularPriority:
1474 sweeper_.SweepForTask(kMaxSweepDuration);
1476 case kForegroundLowPriority:
1477 sweeper_.SweepForLowPriorityTask(kMaxSweepDuration);
1498 kMainThreadDoneConcurrentInProgress,
1501 static constexpr double kMaxHeapPercentageForNoSweeping = 50;
1503 static constexpr auto kDelayWhileLowPrioritySweepingMakesProgress =
1506 static constexpr auto kDelayWhileConcurrentSweepingMakesProgress =
1511 static constexpr auto kDelayForRegularPrioritySweeping =
1517 bool concurrent_sweep_complete = IsConcurrentSweepingDone();
1519 bool main_thread_sweep_complete = PerformSweepOnMutatorThread(
1520 max_duration, scope,
1521 concurrent_sweep_complete ? MutatorThreadSweepingMode::kAll
1522 : MutatorThreadSweepingMode::kOnlyFinalizers);
1523 if (main_thread_sweep_complete && !concurrent_sweep_complete &&
1524 IsConcurrentSweepingDone()) {
1529 concurrent_sweep_complete =
true;
1530 main_thread_sweep_complete = PerformSweepOnMutatorThread(
1531 max_duration, scope, MutatorThreadSweepingMode::kAll);
1534 if (main_thread_sweep_complete) {
1535 if (!concurrent_sweep_complete) {
1536 return SweepResult::kMainThreadDoneConcurrentInProgress;
1538 CHECK(!is_in_progress_);
1539 return SweepResult::kFullyDone;
1542 return SweepResult::kInProgress;
1546 std::optional<v8::base::TimeDelta> delay = {}) {
1548 SweepingConfig::SweepingType::kIncremental);
1550 if (!foreground_task_runner_) {
1554 low_priority_task_ran_ =
false;
1555 incremental_sweeper_handle_.CancelIfNonEmpty();
1556 incremental_sweeper_handle_ = IncrementalSweepTask::Post(
1557 *
this, foreground_task_runner_, kForegroundRegularPriority, delay);
1561 std::optional<v8::base::TimeDelta> delay = {}) {
1563 SweepingConfig::SweepingType::kIncremental);
1565 if (!low_priority_foreground_task_runner_) {
1569 incremental_sweeper_low_priority_handle_.CancelIfNonEmpty();
1570 incremental_sweeper_low_priority_handle_ =
1571 IncrementalSweepTask::Post(*
this, low_priority_foreground_task_runner_,
1572 kForegroundLowPriority, delay);
1577 SweepingConfig::SweepingType::kIncrementalAndConcurrent);
1580 kBackgroundRegularPriority,
1581 std::make_unique<ConcurrentSweepTask>(
1587 if (incremental_sweeper_handle_) {
1588 incremental_sweeper_handle_.Cancel();
1590 if (incremental_sweeper_low_priority_handle_) {
1591 incremental_sweeper_low_priority_handle_.Cancel();
1593 if (concurrent_sweeper_handle_ && concurrent_sweeper_handle_->IsValid()) {
1594 concurrent_sweeper_handle_->Cancel();
1602 CancelAllSweepingTasks();
1605 [](
const SweepingState& state) {
1606 return state.unswept_pages.IsEmpty();
1611 SweepFinalizer finalizer(
1614 EmptyPageHandling::kDestroy);
1643 std::vector<Sweeper::SweepingOnMutatorThreadObserver*>
1647 bool low_priority_task_ran_ =
false;
1649 bool is_in_progress_ =
false;
1650 bool notify_done_pending_ =
false;
1653 bool is_sweeping_on_mutator_thread_ =
false;
1668 impl_->WaitForConcurrentSweepingForTesting();
1673 return impl_->SweepForAllocationIfRunning(space, size, max_duration);
1677 return impl_->IsSweepingOnMutatorThread();
1681 return impl_->IsSweepingInProgress();
1686 return impl_->PerformSweepOnMutatorThread(max_duration, scope_id,
1687 MutatorThreadSweepingMode::kAll);
1692 : sweeper_(sweeper) {
1697 sweeper_.impl_->RemoveMutatorThreadSweepingObserver(
this);
static void Destroy(BasePage *)
static LargePage * From(BasePage *page)
static void Destroy(LargePage *)
static NormalPageSpace & From(BaseSpace &space)
static NormalPage * From(BasePage *page)
InternalScope< kDisabled, kMutatorThread > DisabledScope
IncrementalSweepTask(SweeperImpl &sweeper, cppgc::TaskPriority priority)
static Handle Post(SweeperImpl &sweeper, const std::shared_ptr< cppgc::TaskRunner > &runner, cppgc::TaskPriority priority, std::optional< v8::base::TimeDelta > delay={})
cppgc::TaskPriority priority_
MutatorThreadSweepingScope(SweeperImpl &sweeper)
MutatorThreadSweepingScope(const MutatorThreadSweepingScope &)=delete
MutatorThreadSweepingScope & operator=(const MutatorThreadSweepingScope &)=delete
~MutatorThreadSweepingScope()
StatsCollector *const stats_collector_
bool IsSweepingInProgress() const
void ScheduleConcurrentSweeping()
bool PerformSweepOnMutatorThread(v8::base::TimeDelta max_duration, StatsCollector::ScopeId internal_scope_id, MutatorThreadSweepingMode sweeping_mode)
void SweepForTask(v8::base::TimeDelta max_duration)
std::shared_ptr< cppgc::TaskRunner > low_priority_foreground_task_runner_
void SynchronizeAndFinalizeConcurrentAndIncrementalSweeping()
SweeperImpl(HeapBase &heap)
bool SweepForLargeAllocation(BaseSpace *space, size_t size, v8::base::TimeDelta max_duration)
bool SweepForAllocationIfRunning(BaseSpace *space, size_t size, v8::base::TimeDelta max_duration)
void ScheduleIncrementalSweeping(std::optional< v8::base::TimeDelta > delay={})
SweepingState empty_large_pages_
std::unique_ptr< cppgc::JobHandle > concurrent_sweeper_handle_
std::shared_ptr< cppgc::TaskRunner > foreground_task_runner_
IncrementalSweepTask::Handle incremental_sweeper_low_priority_handle_
bool IsSweepingOnMutatorThread() const
cppgc::Platform * platform_
void ScheduleLowPriorityIncrementalSweeping(std::optional< v8::base::TimeDelta > delay={})
SweepResult SweepInForegroundTaskImpl(v8::base::TimeDelta max_duration, StatsCollector::ScopeId scope)
void AddMutatorThreadSweepingObserver(Sweeper::SweepingOnMutatorThreadObserver *observer)
void WaitForConcurrentSweepingForTesting()
bool SweepForNormalAllocation(BaseSpace *space, size_t size, v8::base::TimeDelta max_duration)
void SweepForLowPriorityTask(v8::base::TimeDelta max_duration)
SweepingState empty_normal_pages_
IncrementalSweepTask::Handle incremental_sweeper_handle_
void CancelAllSweepingTasks()
bool IsConcurrentSweepingDone() const
SpaceStates space_states_
void Start(SweepingConfig config)
NormalPageMemoryPool & page_pool_
std::vector< Sweeper::SweepingOnMutatorThreadObserver * > mutator_thread_sweeping_observers_
void RemoveMutatorThreadSweepingObserver(Sweeper::SweepingOnMutatorThreadObserver *observer)
virtual ~SweepingOnMutatorThreadObserver()
SweepingOnMutatorThreadObserver(Sweeper &)
bool IsSweepingOnMutatorThread() const
bool SweepForAllocationIfRunning(BaseSpace *space, size_t min_wanted_size, v8::base::TimeDelta max_duration)
bool IsSweepingInProgress() const
std::unique_ptr< SweeperImpl > impl_
void Start(SweepingConfig)
bool PerformSweepOnMutatorThread(v8::base::TimeDelta max_duration, StatsCollector::ScopeId)
void WaitForConcurrentSweepingForTesting()
virtual bool ShouldYield()=0
virtual void Run(JobDelegate *delegate)=0
virtual size_t GetMaxConcurrency(size_t worker_count) const =0
virtual bool DiscardSystemPages(void *address, size_t size)
virtual size_t CommitPageSize()=0
double InMillisecondsF() const
static constexpr TimeDelta FromMilliseconds(int64_t milliseconds)
cppgc::PageAllocator * page_allocator_
const StickyBits sticky_bits_
SweepingState *const empty_normal_pages_
ThreadSafeStack< SweptPageState > swept_unfinalized_pages
std::vector< HeapObjectHeader * > unfinalized_objects
SweepingState * current_sweeping_state_
std::atomic_bool is_completed_
SpaceStates *const space_states_
SweepingState *const empty_large_pages_
ThreadSafeStack< BasePage * > unswept_pages
FreeList cached_free_list
std::vector< FreeList::Block > unfinalized_free_list
const v8::base::TimeTicks end_
size_t largest_new_free_list_entry
const FreeMemoryHandling free_memory_handling_
StatsCollector * stats_collector_
std::atomic< bool > is_empty_
size_t * unused_destroyed_normal_pages_
HeapObjectHeader * current_unfinalized_
const EmptyPageHandling empty_page_handling_
PlatformAwareObjectStartBitmap * bitmap_
static constexpr size_t kInterval
CompactableSpaceHandling compactable_space_handling_
size_t largest_consecutive_block_
constexpr const char * ToString(DataViewOp op)
ZoneVector< RpoNumber > & result
LiftoffAssembler::CacheState state
V8_INLINE void CheckMemoryIsInaccessible(const void *address, size_t size)
V8_INLINE void SetMemoryInaccessible(void *address, size_t size)
const uint8_t * ConstAddress
constexpr size_t kFreeListEntrySize
v8::PageAllocator PageAllocator
v8::TaskPriority TaskPriority
Node::Uses::const_iterator begin(const Node::Uses &uses)
#define CHECK_IMPLIES(lhs, rhs)
#define CHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
constexpr T RoundDown(T x, intptr_t m)
cppgc::internal::FreeMemoryHandling FreeMemoryHandling
SweepingType sweeping_type
CompactableSpaceHandling compactable_space_handling
#define V8_UNLIKELY(condition)
WasmOrphanedGlobalHandle * handle_