13#include <unordered_map>
14#include <unordered_set>
131#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
135#if V8_ENABLE_WEBASSEMBLY
145void Heap::SetConstructStubCreateDeoptPCOffset(
int pc_offset) {
150void Heap::SetConstructStubInvokeDeoptPCOffset(
int pc_offset) {
155void Heap::SetDeoptPCOffsetAfterAdaptShadowStack(
int pc_offset) {
157 (
pc_offset == deopt_pc_offset_after_adapt_shadow_stack().
value()));
161void Heap::SetInterpreterEntryReturnPCOffset(
int pc_offset) {
168 set_serialized_objects(objects);
173 set_serialized_global_proxy_sizes(sizes);
177 set_basic_block_profiling_data(*list);
184 external_string_table_(this),
185 allocation_type_for_in_place_internalizable_strings_(
189 non_atomic_marking_state_(
isolate_),
190 pretenuring_handler_(this) {
203Heap::~Heap() =
default;
205size_t Heap::MaxReserved()
const {
207 return static_cast<size_t>(
212size_t Heap::YoungGenerationSizeFromOldGenerationSize(
size_t old_generation) {
216 if (
v8_flags.minor_ms && !is_low_memory) {
221 semi_space = old_generation / ratio;
229size_t Heap::HeapSizeFromPhysicalMemory(uint64_t physical_memory) {
231 uint64_t old_generation = physical_memory /
235 std::min(old_generation,
242 static_cast<size_t>(old_generation));
243 return static_cast<size_t>(old_generation) + young_generation;
246void Heap::GenerationSizesFromHeapSize(
size_t heap_size,
247 size_t* young_generation_size,
248 size_t* old_generation_size) {
250 *young_generation_size = 0;
251 *old_generation_size = 0;
254 size_t lower = 0, upper = heap_size;
255 while (lower + 1 < upper) {
256 size_t old_generation = lower + (upper - lower) / 2;
257 size_t young_generation =
259 if (old_generation + young_generation <= heap_size) {
261 *young_generation_size = young_generation;
262 *old_generation_size = old_generation;
263 lower = old_generation;
265 upper = old_generation;
270size_t Heap::MinYoungGenerationSize() {
274size_t Heap::MinOldGenerationSize() {
275 size_t paged_space_count =
280size_t Heap::AllocatorLimitOnMaxOldGenerationSize() {
281#ifdef V8_COMPRESS_POINTERS
283 return kPtrComprCageReservationSize -
287 return std::numeric_limits<size_t>::max();
291size_t Heap::MaxOldGenerationSize(uint64_t physical_memory) {
296#ifdef V8_HOST_ARCH_64_BIT
297 if ((physical_memory / GB) >= 15) {
314int NumberOfSemiSpaces() {
return v8_flags.minor_ms ? 1 : 2; }
317size_t Heap::YoungGenerationSizeFromSemiSpaceSize(
size_t semi_space_size) {
318 return semi_space_size *
322size_t Heap::SemiSpaceSizeFromYoungGenerationSize(
323 size_t young_generation_size) {
324 return young_generation_size /
328size_t Heap::Capacity() {
335size_t Heap::OldGenerationCapacity()
const {
340 space = spaces.
Next()) {
341 total += space->Capacity();
350size_t Heap::CommittedOldGenerationMemory() {
356 space = spaces.
Next()) {
357 total += space->CommittedMemory();
366size_t Heap::CommittedMemory() {
372 return new_space_committed + new_lo_space_committed +
376size_t Heap::CommittedPhysicalMemory() {
381 total += it.Next()->CommittedPhysicalMemory();
387size_t Heap::CommittedMemoryExecutable() {
393void Heap::UpdateMaximumCommitted() {
402size_t Heap::Available() {
408 total += it.Next()->Available();
415bool Heap::CanExpandOldGeneration(
size_t size)
const {
423bool Heap::IsOldGenerationExpansionAllowed(
428bool Heap::CanPromoteYoungAndExpandOldGeneration(
size_t size)
const {
431 size_t new_space_capacity =
435 new_lo_space_capacity);
441 size_t new_space_capacity =
451bool Heap::HasBeenSetUp()
const {
456bool Heap::ShouldUseBackgroundThreads()
const {
457 return !
v8_flags.single_threaded_gc_in_background ||
458 !
isolate()->EfficiencyModeEnabled();
461bool Heap::ShouldUseIncrementalMarking()
const {
462 if (
v8_flags.single_threaded_gc_in_background &&
463 isolate()->EfficiencyModeEnabled()) {
464 return v8_flags.incremental_marking_for_gc_in_background;
470bool Heap::ShouldOptimizeForBattery()
const {
471 return v8_flags.optimize_gc_for_battery ||
472 isolate()->BatterySaverModeEnabled();
477 const char** reason)
const {
482 *reason =
"Concurrent MinorMS needs finalization";
488 isolate_->counters()->gc_compactor_caused_by_request()->Increment();
489 *reason =
"GC in old space requested";
494 *reason =
"GC in old space forced by flags";
499 *reason =
"Incremental marking forced finalization";
505 ->gc_compactor_caused_by_oldspace_exhaustion()
507 *reason =
"scavenge might not succeed";
519 gc_state_.store(state, std::memory_order_relaxed);
522bool Heap::IsGCWithMainThreadStack()
const {
526bool Heap::IsGCWithStack()
const {
535 if (!
v8_flags.minor_ms_shortcut_strings)
return false;
550 if (
isolate()->has_shared_space() &&
551 !
isolate()->is_shared_space_isolate() &&
553 ->shared_space_isolate()
558 ->shared_space_isolate()
572void Heap::PrintShortHeapStatistics() {
573 if (!
v8_flags.trace_gc_verbose)
return;
575 "Memory allocator, used: %6zu KB"
576 ", available: %7zu KB\n",
580 "Read-only space, used: %6zu KB"
581 ", available: %7zu KB"
582 ", committed: %6zu KB\n",
586 "New space, used: %6zu KB"
587 ", available:%c %7zu KB"
588 ", committed: %6zu KB\n",
594 "New large object space, used: %6zu KB"
595 ", available: %7zu KB"
596 ", committed: %6zu KB\n",
601 "Old space, used: %6zu KB"
602 ", available:%c %7zu KB"
603 ", committed: %6zu KB\n",
609 "Code space, used: %6zu KB"
610 ", available:%c %7zu KB"
611 ", committed: %6zu KB\n",
617 "Large object space, used: %6zu KB"
618 ", available: %7zu KB"
619 ", committed: %6zu KB\n",
623 "Code large object space, used: %6zu KB"
624 ", available: %7zu KB"
625 ", committed: %6zu KB\n",
630 "Trusted space, used: %6zu KB"
631 ", available:%c %7zu KB"
632 ", committed: %6zu KB\n",
638 "Trusted large object space, used: %6zu KB"
639 ", available: %7zu KB"
640 ", committed: %6zu KB\n",
646 "All spaces, used: %6zu KB"
647 ", available:%c %7zu KB"
648 ", committed: %6zu KB\n",
656 "External memory reported: %7" PRId64
" KB\n",
659 "Backing store memory: %7" PRIu64
" KB\n",
664 "Total time spent in GC: %7.1f ms\n",
668 "(*) Sweeping is still in progress, making available sizes "
673void Heap::PrintFreeListsStats() {
676 if (
v8_flags.trace_gc_freelists_verbose) {
678 "Freelists statistics per Page: "
679 "[category: length || total free bytes]\n");
682 std::vector<int> categories_lengths(
683 old_space()->free_list()->number_of_categories(), 0);
684 std::vector<size_t> categories_sums(
685 old_space()->free_list()->number_of_categories(), 0);
686 unsigned int pageCnt = 0;
692 std::ostringstream out_str;
694 if (
v8_flags.trace_gc_freelists_verbose) {
695 out_str <<
"Page " << std::setw(4) << pageCnt;
699 cat <=
old_space()->free_list()->last_category(); cat++) {
705 if (
v8_flags.trace_gc_freelists_verbose) {
706 out_str <<
"[" << cat <<
": " << std::setw(4) << length <<
" || "
707 << std::setw(6) << sum <<
" ]"
708 << (cat ==
old_space()->free_list()->last_category() ?
"\n"
711 categories_lengths[cat] +=
length;
712 categories_sums[cat] += sum;
715 if (
v8_flags.trace_gc_freelists_verbose) {
725 "%d pages. Free space: %.1f MB (waste: %.2f). "
726 "Usage: %.1f/%.1f (MB) -> %.2f%%.\n",
735 "FreeLists global statistics: "
736 "[category: length || total free KB]\n");
737 std::ostringstream out_str;
739 cat <=
old_space()->free_list()->last_category(); cat++) {
740 out_str <<
"[" << cat <<
": " << categories_lengths[cat] <<
" || "
741 << std::fixed << std::setprecision(2)
742 <<
static_cast<double>(categories_sums[cat]) / KB <<
" KB]"
743 << (cat ==
old_space()->free_list()->last_category() ?
"\n" :
", ");
748void Heap::DumpJSONHeapStatistics(std::stringstream& stream) {
753#define DICT(s) "{" << s << "}"
754#define LIST(s) "[" << s << "]"
755#define QUOTE(s) "\"" << s << "\""
756#define MEMBER(s) QUOTE(s) << ":"
758 auto SpaceStatistics = [
this](
int space_index) {
761 &space_stats, space_index);
762 std::stringstream stream;
778 MEMBER(
"time_ms") <<
isolate()->time_millis_since_init() <<
","
779 MEMBER(
"total_heap_size") << stats.total_heap_size() <<
","
780 MEMBER(
"total_heap_size_executable")
781 << stats.total_heap_size_executable() <<
","
782 MEMBER(
"total_physical_size") << stats.total_physical_size() <<
","
783 MEMBER(
"total_available_size") << stats.total_available_size() <<
","
784 MEMBER(
"used_heap_size") << stats.used_heap_size() <<
","
785 MEMBER(
"heap_size_limit") << stats.heap_size_limit() <<
","
786 MEMBER(
"malloced_memory") << stats.malloced_memory() <<
","
787 MEMBER(
"external_memory") << stats.external_memory() <<
","
788 MEMBER(
"peak_malloced_memory") << stats.peak_malloced_memory() <<
","
807void Heap::ReportStatisticsAfterGC() {
821 (
v8_flags.trace_allocation_stack_interval > 0);
848 }
else if (
v8_flags.fuzzer_gc_analysis) {
850 }
else if (
v8_flags.trace_allocation_stack_interval > 0) {
871 }
else if (
v8_flags.fuzzer_gc_analysis) {
880 Address object_address =
object.address();
887 static_cast<uint32_t
>(memory_chunk->
Offset(object_address)) |
894 const uint16_t c1 =
static_cast<uint16_t
>(
value);
895 const uint16_t c2 =
static_cast<uint16_t
>(value >> 16);
902 PrintF(
"\n### Allocations = %zu, hash = 0x%08x\n",
915void Heap::AddHeapObjectAllocationTracker(
922 isolate_->UpdateLogObjectRelocation();
926void Heap::RemoveHeapObjectAllocationTracker(
932 isolate_->UpdateLogObjectRelocation();
939void Heap::IncrementDeferredCounts(
945void Heap::GarbageCollectionPrologue(
957#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
977 DCHECK(!AllowGarbageCollection::IsAllowed());
984void Heap::GarbageCollectionPrologueInSafepoint() {
990size_t Heap::NewSpaceAllocationCounter()
const {
993 DCHECK(!allocator()->new_space_allocator()->IsLabValid());
999size_t Heap::SizeOfObjects() {
1003 total += it.Next()->SizeOfObjects();
1008size_t Heap::TotalGlobalHandlesSize() {
1009 return isolate_->global_handles()->TotalSize() +
1010 isolate_->traced_handles()->total_size_bytes();
1013size_t Heap::UsedGlobalHandlesSize() {
1014 return isolate_->global_handles()->UsedSize() +
1015 isolate_->traced_handles()->used_size_bytes();
1018void Heap::AddAllocationObserversToAllSpaces(
1020 DCHECK(observer && new_space_observer);
1022 allocator()->AddAllocationObserver(observer, new_space_observer);
1025void Heap::RemoveAllocationObserversFromAllSpaces(
1027 DCHECK(observer && new_space_observer);
1028 allocator()->RemoveAllocationObserver(observer, new_space_observer);
1031void Heap::PublishMainThreadPendingAllocations() {
1032 allocator()->PublishPendingAllocations();
1035void Heap::DeoptMarkedAllocationSites() {
1041 if (site->deopt_dependent_code()) {
1045 site->set_deopt_dependent_code(
false);
1053 switch (collector) {
1077 isolate()->is_shared_space_isolate()) {
1078 isolate()->global_safepoint()->IterateClientIsolates([](
Isolate* client) {
1088#define UPDATE_COUNTERS_FOR_SPACE(space) \
1089 isolate_->counters()->space##_bytes_available()->Set( \
1090 static_cast<int>(space()->Available())); \
1091 isolate_->counters()->space##_bytes_committed()->Set( \
1092 static_cast<int>(space()->CommittedMemory())); \
1093 isolate_->counters()->space##_bytes_used()->Set( \
1094 static_cast<int>(space()->SizeOfObjects()));
1095#define UPDATE_FRAGMENTATION_FOR_SPACE(space) \
1096 if (space()->CommittedMemory() > 0) { \
1097 isolate_->counters()->external_fragmentation_##space()->AddSample( \
1098 static_cast<int>(100 - (space()->SizeOfObjects() * 100.0) / \
1099 space()->CommittedMemory())); \
1101#define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space) \
1102 UPDATE_COUNTERS_FOR_SPACE(space) \
1103 UPDATE_FRAGMENTATION_FOR_SPACE(space)
1113#undef UPDATE_COUNTERS_FOR_SPACE
1114#undef UPDATE_FRAGMENTATION_FOR_SPACE
1115#undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE
1119 if (
v8_flags.print_handles) PrintHandles();
1128 std::memory_order_relaxed);
1136#if V8_ENABLE_WEBASSEMBLY
1137 isolate_->stack_pool().ReleaseFinishedStacks();
1160 isolate_->counters()->alive_after_last_gc()->Set(
1164 isolate_->counters()->external_fragmentation_total()->AddSample(
1167 isolate_->counters()->heap_sample_total_committed()->AddSample(
1169 isolate_->counters()->heap_sample_total_used()->AddSample(
1171 isolate_->counters()->heap_sample_code_space_committed()->AddSample(
1174 isolate_->counters()->heap_sample_maximum_committed()->AddSample(
1180 if (
v8_flags.code_stats) ReportCodeStatistics(
"After GC");
1196void Heap::HandleGCRequest() {
1215size_t MinorMSConcurrentMarkingTrigger(
Heap*
heap) {
1216 size_t young_capacity = 0;
1219 young_capacity =
heap->sticky_space()->Capacity() -
1220 heap->sticky_space()->old_objects_size();
1222 young_capacity =
heap->new_space()->TotalCapacity();
1224 return young_capacity *
v8_flags.minor_ms_concurrent_marking_trigger / 100;
1228void Heap::StartMinorMSIncrementalMarkingIfNeeded() {
1233 size_t usable_capacity = 0;
1234 size_t new_space_size = 0;
1244 if ((usable_capacity >=
1245 v8_flags.minor_ms_min_new_space_capacity_for_concurrent_marking_mb *
1247 (new_space_size >= MinorMSConcurrentMarkingTrigger(
this)) &&
1275 if (*slot_a != *slot_b) {
1276 return *slot_a - *slot_b;
1285 if (objects->empty())
return;
1287 sort(objects->begin(), objects->end(),
1289 intptr_t c = CompareWords(size, a, b);
1290 if (c != 0) return c < 0;
1294 std::vector<std::pair<int, Tagged<HeapObject>>> duplicates;
1297 for (
size_t i = 1;
i < objects->
size();
i++) {
1298 if (CompareWords(size, current, (*objects)[
i]) == 0) {
1302 duplicates.push_back(std::make_pair(count - 1, current));
1305 current = (*objects)[
i];
1309 duplicates.push_back(std::make_pair(count - 1, current));
1312 int threshold =
v8_flags.trace_duplicate_threshold_kb *
KB;
1314 sort(duplicates.begin(), duplicates.end());
1315 for (
auto it = duplicates.rbegin(); it != duplicates.rend(); ++it) {
1316 int duplicate_bytes = it->first *
size;
1317 if (duplicate_bytes < threshold)
break;
1318 PrintF(
"%d duplicates of size %d each (%dKB)\n", it->first, size,
1319 duplicate_bytes / KB);
1320 PrintF(
"Sample object: ");
1322 PrintF(
"============================\n");
1330 static constexpr int kMaxNumberOfAttempts = 7;
1331 static constexpr int kMinNumberOfAttempts = 2;
1335 const auto num_roots = [
this]() {
1336 size_t js_roots = 0;
1337 js_roots +=
isolate()->global_handles()->handles_count();
1338 js_roots +=
isolate()->eternal_handles()->handles_count();
1339 size_t cpp_roots = 0;
1341 cpp_roots +=
cpp_heap->GetStrongPersistentRegion().NodesInUse();
1343 cpp_heap->GetStrongCrossThreadPersistentRegion().NodesInUse();
1345 return js_roots + cpp_roots;
1351 RCS_SCOPE(
isolate(), RuntimeCallCounterId::kGC_Custom_AllAvailableGarbage);
1355 isolate()->ClearSerializerData();
1356 isolate()->compilation_cache()->Clear();
1368 for (
int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
1369 const size_t roots_before = num_roots();
1373 if ((roots_before == num_roots()) &&
1374 ((attempt + 1) >= kMinNumberOfAttempts)) {
1381 if (
v8_flags.trace_duplicate_threshold_kb) {
1382 std::map<int, std::vector<Tagged<HeapObject>>> objects_by_size;
1385 space = spaces.
Next()) {
1389 objects_by_size[obj->Size()].push_back(obj);
1396 objects_by_size[obj->Size()].push_back(obj);
1399 for (
auto it = objects_by_size.rbegin(); it != objects_by_size.rend();
1401 ReportDuplicates(it->first, &it->second);
1411void Heap::PreciseCollectAllGarbage(
GCFlags gc_flags,
1418void Heap::HandleExternalMemoryInterrupt() {
1425 TRACE_EVENT2(
"devtools.timeline,v8",
"V8.ExternalMemoryPressure",
1426 "external_memory_mb",
static_cast<int>((current) / MB),
1427 "external_memory_hard_limit_mb",
1433 kGCCallbackFlagsForExternalMemory));
1436 if (
v8_flags.external_memory_accounted_in_global_limit) {
1442 kGCCallbackFlagsForExternalMemory);
1446 if (current <= soft_limit) {
1449 TRACE_EVENT2(
"devtools.timeline,v8",
"V8.ExternalMemoryPressure",
1450 "external_memory_mb",
static_cast<int>((current) / MB),
1451 "external_memory_soft_limit_mb",
1452 static_cast<int>((soft_limit) / MB));
1457 kGCCallbackFlagsForExternalMemory);
1461 kGCCallbackFlagsForExternalMemory);
1471uint64_t Heap::external_memory_limit_for_interrupt() {
1475uint64_t Heap::external_memory_soft_limit() {
1480 const char* event_name,
1481 const char* event_type)
1482 :
heap_(
heap), event_name_(event_name) {
1489 heap_->SizeOfObjects());
1494template <
typename Callback>
1496 DCHECK(!AllowJavascriptExecution::IsAllowed(isolate));
1502 StackState::kMayContainHeapPointers);
1508size_t GlobalMemorySizeFromV8Size(
size_t v8_size) {
1509 const size_t kGlobalMemoryToV8Ratio = 2;
1510 return std::min(
static_cast<uint64_t
>(std::numeric_limits<size_t>::max()),
1511 static_cast<uint64_t
>(v8_size) * kGlobalMemoryToV8Ratio);
1516void Heap::SetOldGenerationAndGlobalMaximumSize(
1517 size_t max_old_generation_size) {
1519 std::memory_order_relaxed);
1523void Heap::SetOldGenerationAndGlobalAllocationLimit(
1524 size_t new_old_generation_allocation_limit,
1525 size_t new_global_allocation_limit) {
1526 CHECK_GE(new_global_allocation_limit, new_old_generation_allocation_limit);
1527#if defined(V8_USE_PERFETTO)
1530 perfetto::ThreadTrack::Current()),
1531 new_old_generation_allocation_limit);
1534 perfetto::ThreadTrack::Current()),
1535 new_global_allocation_limit);
1538 std::memory_order_relaxed);
1540 std::memory_order_relaxed);
1543void Heap::ResetOldGenerationAndGlobalAllocationLimit() {
1579 DisallowJavascriptExecution no_js(
isolate());
1586 DCHECK(AllowGarbageCollection::IsAllowed());
1589 !isolate()->InFastCCall());
1591 const char* collector_reason =
nullptr;
1614 InvokeExternalCallbacks(
isolate(), [
this, gc_callback_flags, gc_type]() {
1616 isolate()->global_handles()->InvokeSecondPassPhantomCallbacks();
1620 GCTracer::Scope::HEAP_EXTERNAL_PROLOGUE);
1631 stack().SetMarkerIfNeededAndCallback([
this, collector, gc_reason,
1632 collector_reason, gc_callback_flags]() {
1635 size_t committed_memory_before =
1642 DevToolsTraceEventScope devtools_trace_event_scope(
1650 std::optional<TimedHistogramScope> histogram_timer_scope;
1651 std::optional<OptionalTimedHistogramScope> histogram_timer_priority_scope;
1654 histogram_timer_scope.emplace(record_gc_phases_info.
type_timer(),
1658 histogram_timer_priority_scope.emplace(
1684 v8_flags.track_detached_contexts) {
1685 isolate()->CheckDetachedContextsAfterGC();
1725 InvokeExternalCallbacks(
isolate(), [
this, gc_callback_flags, gc_type]() {
1728 GCTracer::Scope::HEAP_EXTERNAL_EPILOGUE);
1730 isolate()->global_handles()->PostGarbageCollectionProcessing(
1735 if ((gc_callback_flags &
1739 if (
v8_flags.heap_snapshot_on_gc > 0 &&
1754 if (
v8_flags.heap_snapshot_on_oom) {
1769 const auto runner =
heap->GetForegroundTaskRunner();
1770 if (runner->IdleTasksEnabled()) {
1771 runner->PostIdleTask(
1772 std::make_unique<IdleTaskOnContextDispose>(
heap->isolate()));
1781 const base::TimeDelta time_to_run = base::TimeTicks::Now() - creation_time_;
1784 (deadline_in_seconds * 1000) -
heap->MonotonicallyIncreasingTimeInMs());
1785 const bool time_to_run_exceeded = time_to_run > kMaxTimeToRun;
1788 "[context-disposal/idle task] time-to-run: %fms (max delay: %fms), "
1789 "idle time: %fms%s\n",
1792 time_to_run_exceeded ?
", not starting any action" :
"");
1794 if (time_to_run_exceeded) {
1797 TryRunMinorGC(idle_time);
1802 base::TimeDelta::FromMillisecondsD(16);
1814 static constexpr size_t kMinYounGenSize = 1 *
MB;
1817 const std::optional<double> young_gen_gc_speed =
1818 heap->tracer()->YoungGenerationSpeedInBytesPerMillisecond(
1819 YoungGenerationSpeedMode::kUpToAndIncludingAtomicPause);
1820 if (!young_gen_gc_speed) {
1823 const size_t young_gen_bytes =
heap->YoungGenerationSizeOfObjects();
1825 base::TimeDelta::FromMillisecondsD(young_gen_bytes /
1826 *young_gen_gc_speed);
1827 const bool run_young_gen_gc =
1828 young_gen_estimate < idle_time && young_gen_bytes > kMinYounGenSize;
1831 "[context-disposal/idle task] young generation size: %zuKB (min: "
1832 "%zuKB), GC speed: %fKB/ms, estimated time: %fms%s\n",
1833 young_gen_bytes / KB, kMinYounGenSize / KB, *young_gen_gc_speed / KB,
1835 run_young_gen_gc ?
", performing young gen GC"
1836 :
", not starting young gen GC");
1838 if (run_young_gen_gc) {
1840 GarbageCollectionReason::kIdleContextDisposal);
1848int Heap::NotifyContextDisposed(
bool has_dependent_context) {
1849 if (!has_dependent_context) {
1857 }
else if (
v8_flags.idle_gc_on_context_disposal &&
1864 isolate()->raw_native_context()->set_retained_maps(
1871void Heap::StartIncrementalMarking(
GCFlags gc_flags,
1877 !isolate()->InFastCCall());
1897 std::optional<SafepointScope> safepoint_scope;
1905 VerifyCountersAfterSweeping();
1908 std::vector<Isolate*> paused_clients =
1927 if (
isolate()->is_shared_space_isolate()) {
1928 for (
Isolate* client : paused_clients) {
1929 client->heap()->concurrent_marking()->Resume();
1932 DCHECK(paused_clients.empty());
1937void CompleteArrayBufferSweeping(
Heap*
heap) {
1938 auto* array_buffer_sweeper =
heap->array_buffer_sweeper();
1940 auto* tracer =
heap->tracer();
1945 scope_id = GCTracer::Scope::MINOR_MS_COMPLETE_SWEEP_ARRAY_BUFFERS;
1948 scope_id = GCTracer::Scope::SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS;
1951 scope_id = GCTracer::Scope::MC_COMPLETE_SWEEP_ARRAY_BUFFERS;
1963void Heap::CompleteSweepingFull() {
1972void Heap::StartIncrementalMarkingOnInterrupt() {
1978void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
1984 case IncrementalMarkingLimit::kHardLimit:
1994 isolate()->stack_guard()->RequestStartIncrementalMarking();
1996 job->ScheduleTask();
2000 case IncrementalMarkingLimit::kSoftLimit:
2005 case IncrementalMarkingLimit::kFallbackForEmbedderLimit:
2013 case IncrementalMarkingLimit::kNoLimit:
2026 DCHECK(dst_slot < dst_end);
2027 DCHECK(src_slot < src_slot + len);
2031 if (dst_slot < src_slot) {
2037 while (dst < atomic_dst_end) {
2048 while (dst >= atomic_dst_begin) {
2071template <
typename TSlot>
2077 const TSlot dst_end(dst_slot + len);
2079 DCHECK(dst_end <= src_slot || (src_slot + len) <= dst_slot);
2088 while (dst < atomic_dst_end) {
2102bool Heap::CollectionRequested() {
2106void Heap::CollectGarbageForBackground(
LocalHeap* local_heap) {
2113void Heap::CheckCollectionRequested() {
2121void Heap::UpdateSurvivalStatistics(
int start_new_space_size) {
2122 if (start_new_space_size == 0)
return;
2125 static_cast<double>(start_new_space_size) * 100);
2137 static_cast<double>(start_new_space_size) * 100);
2146 switch (collector) {
2148 return GCTracer::Scope::ScopeId::MARK_COMPACTOR;
2150 return GCTracer::Scope::ScopeId::MINOR_MARK_SWEEPER;
2152 return GCTracer::Scope::ScopeId::SCAVENGER;
2157void ClearStubCaches(Isolate* isolate) {
2158 isolate->load_stub_cache()->Clear();
2159 isolate->store_stub_cache()->Clear();
2160 isolate->define_own_stub_cache()->Clear();
2162 if (isolate->is_shared_space_isolate()) {
2163 isolate->global_safepoint()->IterateClientIsolates([](Isolate* client) {
2164 client->load_stub_cache()->Clear();
2165 client->store_stub_cache()->Clear();
2166 client->define_own_stub_cache()->Clear();
2175 const char* collector_reason) {
2199 std::optional<SafepointScope> safepoint_scope;
2212 if ((!Heap::IsYoungGenerationCollector(collector) ||
v8_flags.minor_ms) &&
2219 DCHECK(
tracer()->IsConsistentWithCollector(collector));
2224 std::vector<Isolate*> paused_clients =
2235 const size_t start_young_generation_size =
2261 isolate_->counters()->objs_since_last_young()->Set(0);
2263 isolate_->eternal_handles()->PostGarbageCollectionProcessing();
2268 if (
isolate_->is_shared_space_isolate()) {
2271 isolate()->global_safepoint()->IterateClientIsolates([](
Isolate* client) {
2278 isolate_->global_handles()->InvokeFirstPassWeakCallbacks();
2287 TRACE_GC(
tracer(), GCTracer::Scope::HEAP_EMBEDDER_TRACING_EPILOGUE);
2310void Heap::PerformHeapVerification() {
2313 if (
isolate()->is_shared_space_isolate()) {
2317 isolate()->global_safepoint()->IterateClientIsolates([](
Isolate* client) {
2323std::vector<Isolate*> Heap::PauseConcurrentThreadsInClients(
2325 std::vector<Isolate*> paused_clients;
2327 if (
isolate()->is_shared_space_isolate()) {
2328 isolate()->global_safepoint()->IterateClientIsolates(
2329 [collector, &paused_clients](
Isolate* client) {
2334 paused_clients.push_back(client);
2344 return paused_clients;
2347void Heap::ResumeConcurrentThreadsInClients(
2348 std::vector<Isolate*> paused_clients) {
2349 if (
isolate()->is_shared_space_isolate()) {
2350 for (
Isolate* client : paused_clients) {
2351 client->heap()->concurrent_marking()->Resume();
2354 DCHECK(paused_clients.empty());
2372bool Heap::CollectGarbageFromAnyThread(
LocalHeap* local_heap,
2387 const bool performed_gc =
2389 return performed_gc;
2397void Heap::CompleteSweepingYoung() {
2407#if defined(CPPGC_YOUNG_GENERATION)
2412 iheap->FinishSweepingIfRunning();
2437 heap->tracer()->RecordGCSizeCounters();
2439 std::optional<double> v8_gc_speed =
2440 heap->tracer()->OldGenerationSpeedInBytesPerMillisecond();
2441 double v8_mutator_speed =
2442 heap->tracer()->OldGenerationAllocationThroughputInBytesPerMillisecond();
2444 heap,
heap->max_old_generation_size(), v8_gc_speed, v8_mutator_speed,
2446 std::optional<double> embedder_gc_speed =
2447 heap->tracer()->EmbedderSpeedInBytesPerMillisecond();
2448 double embedder_speed =
2449 heap->tracer()->EmbedderAllocationThroughputInBytesPerMillisecond();
2450 double embedder_growing_factor =
2451 (embedder_gc_speed.has_value() && embedder_speed > 0)
2453 heap,
heap->max_global_memory_size_, embedder_gc_speed,
2454 embedder_speed, mode)
2457 size_t new_space_capacity =
heap->NewSpaceTargetCapacity();
2459 size_t new_old_generation_allocation_limit =
2461 heap,
heap->OldGenerationConsumedBytesAtLastGC(),
2462 heap->OldGenerationConsumedBytesAtLastGC() * v8_growing_factor,
2463 heap->min_old_generation_size_,
heap->max_old_generation_size(),
2464 new_space_capacity, mode);
2466 double global_growing_factor =
2467 std::max(v8_growing_factor, embedder_growing_factor);
2468 double external_growing_factor = std::min(
2472 size_t new_global_allocation_limit =
2474 heap,
heap->GlobalConsumedBytesAtLastGC(),
2475 (
heap->OldGenerationConsumedBytesAtLastGC() +
2476 heap->embedder_size_at_last_gc_) *
2477 global_growing_factor +
2478 (
v8_flags.external_memory_accounted_in_global_limit
2479 ?
heap->external_memory_.low_since_mark_compact() *
2480 external_growing_factor
2482 heap->min_global_memory_size_,
heap->max_global_memory_size_,
2483 new_space_capacity, mode);
2485 return {new_old_generation_allocation_limit, new_global_allocation_limit};
2499 size_t new_old_generation_allocation_limit =
2500 new_limits.old_generation_allocation_limit;
2501 size_t new_global_allocation_limit = new_limits.global_allocation_limit;
2506 mb_->RecomputeLimits(new_limits.global_allocation_limit -
2507 new_limits.old_generation_allocation_limit,
2511 new_limits.old_generation_allocation_limit,
2512 new_limits.global_allocation_limit);
2517 tracer()->AverageMarkCompactMutatorUtilization());
2520 new_old_generation_allocation_limit = std::min(
2522 new_global_allocation_limit =
2525 new_old_generation_allocation_limit, new_global_allocation_limit);
2533void Heap::RecomputeLimitsAfterLoadingIfNeeded() {
2565 size_t new_old_generation_allocation_limit =
2566 new_limits.old_generation_allocation_limit;
2567 size_t new_global_allocation_limit = new_limits.global_allocation_limit;
2569 new_old_generation_allocation_limit = std::max(
2571 new_global_allocation_limit =
2574 new_global_allocation_limit);
2607void Heap::MarkCompact() {
2626 if (
v8_flags.allocation_site_pretenuring) {
2642void Heap::MinorMarkSweep() {
2655void Heap::MarkCompactEpilogue() {
2659 isolate_->counters()->objs_since_last_full()->Set(0);
2662void Heap::MarkCompactPrologue() {
2664 isolate_->descriptor_lookup_cache()->Clear();
2672void Heap::Scavenge() {
2686 for (
size_t i = 0;
i < young_strings_.
size(); ++
i) {
2687 if (young_strings_[
i] ==
string)
return true;
2689 for (
size_t i = 0;
i < old_strings_.
size(); ++
i) {
2690 if (old_strings_[
i] ==
string)
return true;
2696 size_t new_payload) {
2697 DCHECK(IsExternalString(
string));
2701 if (old_payload > new_payload) {
2702 page->DecrementExternalBackingStoreBytes(
2705 page->IncrementExternalBackingStoreBytes(
2725 if (!IsExternalString(
string, cage_base)) {
2727 DCHECK(IsThinString(
string, cage_base));
2730 heap->FinalizeExternalString(
string);
2739 if (IsThinString(new_string, cage_base)) {
2742 }
else if (IsExternalString(new_string, cage_base)) {
2752 return IsExternalString(new_string, cage_base) ? new_string
2758 std::set<Tagged<String>> visited_map;
2759 std::map<MutablePageMetadata*, size_t> size_map;
2761 for (
size_t i = 0;
i < young_strings_.
size(); ++
i) {
2768 DCHECK(IsExternalString(obj));
2771 visited_map.insert(obj);
2774 for (std::map<MutablePageMetadata*, size_t>::iterator it = size_map.begin();
2775 it != size_map.end(); it++)
2776 DCHECK_EQ(it->first->ExternalBackingStoreBytes(type), it->second);
2782 std::set<Tagged<String>> visited_map;
2783 std::map<MutablePageMetadata*, size_t> size_map;
2786 for (
size_t i = 0;
i < old_strings_.
size(); ++
i) {
2793 DCHECK(IsExternalString(obj));
2796 visited_map.insert(obj);
2799 for (std::map<MutablePageMetadata*, size_t>::iterator it = size_map.begin();
2800 it != size_map.end(); it++)
2801 DCHECK_EQ(it->first->ExternalBackingStoreBytes(type), it->second);
2806 Heap::ExternalStringTableUpdaterCallback updater_func) {
2807 if (young_strings_.empty())
return;
2816 if (target.is_null())
continue;
2818 DCHECK(IsExternalString(target));
2826 old_strings_.push_back(target);
2831 young_strings_.resize(last -
start);
2838 old_strings_.reserve(old_strings_.size() + young_strings_.size());
2839 std::move(std::begin(young_strings_), std::end(young_strings_),
2840 std::back_inserter(old_strings_));
2841 young_strings_.clear();
2845 if (!young_strings_.empty()) {
2847 Root::kExternalStringsTable,
nullptr,
2855 if (!old_strings_.empty()) {
2857 Root::kExternalStringsTable,
nullptr,
2863void Heap::UpdateYoungReferencesInExternalStringTable(
2869 Heap::ExternalStringTableUpdaterCallback updater_func) {
2870 if (!old_strings_.empty()) {
2874 p.store(updater_func(
heap_, p));
2877 UpdateYoungReferences(updater_func);
2880void Heap::UpdateReferencesInExternalStringTable(
2904 allocation_site_obj));
2913 if (IsUndefined(head,
isolate())) {
2929void Heap::ForeachAllocationSite(
2934 while (IsAllocationSite(current)) {
2939 while (IsAllocationSite(current_nested)) {
2941 visitor(nested_site);
2942 current_nested = nested_site->nested_site();
2944 current = site->weak_next();
2950 bool marked =
false;
2955 if (site->GetAllocationType() == allocation) {
2956 site->ResetPretenureDecision();
2957 site->set_deopt_dependent_code(
true);
2963 if (marked)
isolate_->stack_guard()->RequestDeoptMarkedAllocationSites();
2966void Heap::EvaluateOldSpaceLocalPretenuring(
2967 uint64_t size_of_objects_before_gc) {
2969 double old_generation_survival_rate =
2970 (
static_cast<double>(size_of_objects_after_gc) * 100) /
2971 static_cast<double>(size_of_objects_before_gc);
2981 "Deopt all allocation sites dependent code due to low survival "
2982 "rate in the old generation %f\n",
2983 old_generation_survival_rate);
2991#ifdef V8_COMPRESS_POINTERS
3003 switch (alignment) {
3025size_t Heap::GetCodeRangeReservedAreaSize() {
3045 const int filler_size = allocation_size - object_size;
3047 const int pre_filler =
GetFillToAlign(
object.address(), alignment);
3052 const int post_filler = filler_size - pre_filler;
3055 object.address() + object_size, post_filler));
3060void* Heap::AllocateExternalBackingStore(
3061 const std::function<
void*(
size_t)>& allocate,
size_t byte_length) {
3062 size_t max =
isolate()->array_buffer_allocator()->MaxAllocationSize();
3064 if (byte_length > max) {
3068 size_t new_space_backing_store_bytes =
3072 new_space_backing_store_bytes >= byte_length) {
3079 void*
result = allocate(byte_length);
3082 for (
int i = 0;
i < 2;
i++) {
3085 result = allocate(byte_length);
3091 return allocate(byte_length);
3097void Heap::ShrinkOldGenerationAllocationLimitIfNotConfigured() {
3099 tracer()->SurvivalEventsRecorded()) {
3101 const size_t minimum_growing_step =
3104 size_t new_old_generation_allocation_limit =
3106 static_cast<size_t>(
3108 (
tracer()->AverageSurvivalRatio() / 100)));
3109 new_old_generation_allocation_limit = std::min(
3111 size_t new_global_allocation_limit = std::max(
3114 (
tracer()->AverageSurvivalRatio() / 100)));
3115 new_global_allocation_limit =
3118 new_old_generation_allocation_limit, new_global_allocation_limit);
3122void Heap::FlushNumberStringCache() {
3124 int len = number_string_cache()->length();
3126 for (
int i = 0;
i < len;
i++) {
3135 int size = free_space.
Size();
3136 if (size == 0)
return;
3147 roots.unchecked_one_pointer_filler_map());
3153 roots.unchecked_two_pointer_filler_map());
3173 ->map(
heap->isolate())));
3180 if (chunk->InReadOnlySpace())
return;
3181 if (!
v8_flags.sticky_mark_bits && chunk->InYoungGeneration())
return;
3182 MutablePageMetadata* mutable_page =
3184 BaseSpace* space = mutable_page->owner();
3185 space->heap()->VerifySlotRangeHasNoRecordedSlots(
start,
end);
3202void Heap::CreateFillerObjectAt(
Address addr,
int size,
3204 if (size == 0)
return;
3210 VerifyNoSlotsRecorded::kYes);
3216 VerifyNoSlotsRecorded::kYes);
3220void Heap::CreateFillerObjectAtRaw(
3227 size_t size = free_space.
Size();
3228 if (size == 0)
return;
3229 CreateFillerObjectAtImpl(free_space,
this, clear_memory_mode);
3233 }
else if (verify_no_slots_recorded == VerifyNoSlotsRecorded::kYes) {
3234 VerifyNoNeedToClearSlots(addr, addr + size);
3253 if (
isolate()->concurrent_recompilation_enabled() &&
3254 isolate()->optimizing_compile_dispatcher()->HasJobs()) {
3285#ifdef ENABLE_SLOW_DCHECKS
3288class LeftTrimmerVerifierRootVisitor :
public RootVisitor {
3291 : to_check_(to_check) {}
3293 LeftTrimmerVerifierRootVisitor(
const LeftTrimmerVerifierRootVisitor&) =
3295 LeftTrimmerVerifierRootVisitor& operator=(
3296 const LeftTrimmerVerifierRootVisitor&) =
delete;
3298 void VisitRootPointers(
Root root,
const char* description,
3299 FullObjectSlot
start, FullObjectSlot
end)
override {
3300 for (FullObjectSlot p =
start; p <
end; ++p) {
3309 void VisitRootPointers(
Root root,
const char* description,
3312 DCHECK(root == Root::kStringTable ||
3313 root == Root::kSharedStructTypeRegistry);
3331 if (IsByteArray(
object) || IsFixedDoubleArray(
object))
return false;
3338 int size_in_bytes) {
3345 tracker->MoveEvent(source.address(), target.address(), size_in_bytes);
3347 if (IsSharedFunctionInfo(target,
isolate_)) {
3350 }
else if (IsNativeContext(target,
isolate_)) {
3351 if (
isolate_->current_embedder_state() !=
nullptr) {
3352 isolate_->current_embedder_state()->OnMoveEvent(source.address(),
3356 NativeContextMoveEvent(source.address(), target.address()));
3357 }
else if (IsMap(target,
isolate_)) {
3363 int elements_to_trim) {
3364 if (elements_to_trim == 0) {
3368 CHECK(!
object.is_null());
3372 DCHECK(IsFixedArray(
object) || IsFixedDoubleArray(
object));
3374 const int bytes_to_trim = elements_to_trim * element_size;
3387 const int len =
object->length();
3388 DCHECK(elements_to_trim <= len);
3391 Address old_start =
object.address();
3392 Address new_start = old_start + bytes_to_trim;
3402 VerifyNoSlotsRecorded::kYes);
3415 if (
isolate()->log_object_relocation()) {
3417 OnMoveEvent(
object, new_object, new_object->Size());
3420#ifdef ENABLE_SLOW_DCHECKS
3421 if (
v8_flags.enable_slow_asserts) {
3424 std::optional<IsolateSafepointScope> safepoint_scope;
3428 safepoint_scope.emplace(
this);
3431 LeftTrimmerVerifierRootVisitor root_visitor(
object);
3445template <
typename Array>
3448 DCHECK_EQ(old_capacity, object->capacity());
3452 if constexpr (Array::kElementsAreMaybeObject) {
3459 const int bytes_to_trim = (old_capacity - new_capacity) * Array::kElementSize;
3462 const int old_size = Array::SizeFor(old_capacity);
3463 DCHECK_EQ(object->AllocatedSize(), old_size);
3464 Address old_end =
object.address() + old_size;
3465 Address new_end = old_end - bytes_to_trim;
3467 const bool clear_slots = MayContainRecordedSlots(
object);
3474 object, old_size, old_size - bytes_to_trim,
3476 if (!
v8_flags.black_allocated_pages) {
3488 }
else if (clear_slots) {
3503 tracker->UpdateObjectSizeEvent(
object.address(),
3504 Array::SizeFor(new_capacity));
3508#define DEF_RIGHT_TRIM(T) \
3509 template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) void \
3510 Heap::RightTrimArray<T>(Tagged<T> object, int new_capacity, \
3513#undef DEF_RIGHT_TRIM
3515void Heap::MakeHeapIterable() {
3521void Heap::MakeLinearAllocationAreasIterable() {
3522 allocator()->MakeLinearAllocationAreasIterable();
3528 if (
isolate()->is_shared_space_isolate()) {
3529 isolate()->global_safepoint()->IterateClientIsolates([](
Isolate* client) {
3535void Heap::FreeLinearAllocationAreas() {
3541 if (
isolate()->is_shared_space_isolate()) {
3542 isolate()->global_safepoint()->IterateClientIsolates(
3547void Heap::FreeMainThreadLinearAllocationAreas() {
3548 allocator()->FreeLinearAllocationAreas();
3551void Heap::MarkSharedLinearAllocationAreasBlack() {
3553 allocator()->MarkSharedLinearAllocationAreasBlack();
3561void Heap::UnmarkSharedLinearAllocationAreas() {
3563 allocator()->UnmarkSharedLinearAllocationAreas();
3570void Heap::FreeSharedLinearAllocationAreasAndResetFreeLists() {
3572 allocator()->FreeSharedLinearAllocationAreasAndResetFreeLists();
3580void Heap::Unmark() {
3584 auto unmark_space = [](
auto&
space) {
3585 for (
auto* page : space) {
3586 page->marking_bitmap()->template Clear<AccessMode::NON_ATOMIC>();
3587 page->Chunk()->SetMajorGCInProgress();
3588 page->SetLiveBytes(0);
3595 if (
isolate()->is_shared_space_isolate()) {
3610void Heap::DeactivateMajorGCInProgressFlag() {
3614 auto deactivate_space = [](
auto&
space) {
3615 for (
auto* metadata : space) {
3616 metadata->Chunk()->ResetMajorGCInProgress();
3629 if (
isolate()->is_shared_space_isolate()) {
3640double ComputeMutatorUtilizationImpl(
double mutator_speed,
3641 std::optional<double> gc_speed) {
3642 constexpr double kMinMutatorUtilization = 0.0;
3643 constexpr double kConservativeGcSpeedInBytesPerMillisecond = 200000;
3644 if (mutator_speed == 0)
return kMinMutatorUtilization;
3645 if (!gc_speed) gc_speed = kConservativeGcSpeedInBytesPerMillisecond;
3653 return *gc_speed / (mutator_speed + *gc_speed);
3658double Heap::ComputeMutatorUtilization(
const char* tag,
double mutator_speed,
3659 std::optional<double> gc_speed) {
3660 double result = ComputeMutatorUtilizationImpl(mutator_speed, gc_speed);
3661 if (
v8_flags.trace_mutator_utilization) {
3662 isolate()->PrintWithTimestamp(
3663 "%s mutator utilization = %.3f ("
3664 "mutator_speed=%.f, gc_speed=%.f)\n",
3665 tag,
result, mutator_speed, gc_speed.value_or(0));
3670bool Heap::HasLowYoungGenerationAllocationRate() {
3673 tracer()->NewSpaceAllocationThroughputInBytesPerMillisecond(),
3674 tracer()->YoungGenerationSpeedInBytesPerMillisecond(
3676 constexpr double kHighMutatorUtilization = 0.993;
3677 return mu > kHighMutatorUtilization;
3680bool Heap::HasLowOldGenerationAllocationRate() {
3683 tracer()->OldGenerationAllocationThroughputInBytesPerMillisecond(),
3684 tracer()->OldGenerationSpeedInBytesPerMillisecond());
3685 const double kHighMutatorUtilization = 0.993;
3686 return mu > kHighMutatorUtilization;
3689bool Heap::HasLowEmbedderAllocationRate() {
3691 "Embedder",
tracer()->EmbedderAllocationThroughputInBytesPerMillisecond(),
3692 tracer()->EmbedderSpeedInBytesPerMillisecond());
3693 const double kHighMutatorUtilization = 0.993;
3694 return mu > kHighMutatorUtilization;
3697bool Heap::HasLowAllocationRate() {
3702bool Heap::IsIneffectiveMarkCompact(
size_t old_generation_size,
3703 double mutator_utilization) {
3704 const double kHighHeapPercentage = 0.8;
3705 const double kLowMutatorUtilization = 0.4;
3706 return old_generation_size >=
3708 mutator_utilization < kLowMutatorUtilization;
3712static constexpr int kMaxConsecutiveIneffectiveMarkCompacts = 4;
3715void Heap::CheckIneffectiveMarkCompact(
size_t old_generation_size,
3716 double mutator_utilization) {
3717 if (!
v8_flags.detect_ineffective_gcs_near_heap_limit)
return;
3724 kMaxConsecutiveIneffectiveMarkCompacts) {
3730 if (
v8_flags.heap_snapshot_on_oom) {
3737bool Heap::HasHighFragmentation() {
3743 if (committed < used)
return false;
3745 constexpr size_t kSlack = 16 *
MB;
3749 return committed - used > used + kSlack;
3752bool Heap::ShouldOptimizeForMemoryUsage() {
3772 heap_->ActivateMemoryReducerIfNeededOnMainThread();
3778void Heap::ActivateMemoryReducerIfNeeded() {
3785void Heap::ActivateMemoryReducerIfNeededOnMainThread() {
3792 isolate()->is_backgrounded()) {
3797Heap::ResizeNewSpaceMode Heap::ShouldResizeNewSpace() {
3799 return (
v8_flags.predictable) ? ResizeNewSpaceMode::kNone
3800 : ResizeNewSpaceMode::kShrink;
3803 static const size_t kLowAllocationThroughput = 1000;
3804 const double allocation_throughput =
3805 tracer_->AllocationThroughputInBytesPerMillisecond();
3806 const bool should_shrink = !
v8_flags.predictable &&
3807 (allocation_throughput != 0) &&
3808 (allocation_throughput < kLowAllocationThroughput);
3810 const bool should_grow =
3816 if (should_grow == should_shrink)
return ResizeNewSpaceMode::kNone;
3817 return should_grow ? ResizeNewSpaceMode::kGrow : ResizeNewSpaceMode::kShrink;
3821size_t ComputeReducedNewSpaceSize(
NewSpace* new_space) {
3822 size_t new_capacity =
3824 size_t rounded_new_capacity =
3827 return std::min(new_space->
TotalCapacity(), rounded_new_capacity);
3831void Heap::StartResizeNewSpace() {
3836 size_t reduced_capacity = ComputeReducedNewSpaceSize(
new_space());
3841void Heap::ResizeNewSpace() {
3849 case ResizeNewSpaceMode::kShrink:
3852 case ResizeNewSpaceMode::kGrow:
3855 case ResizeNewSpaceMode::kNone:
3863void Heap::ExpandNewSpaceSize() {
3866 const size_t suggested_capacity =
3867 static_cast<size_t>(
v8_flags.semi_space_growth_factor) *
3869 const size_t chosen_capacity =
3879void Heap::ReduceNewSpaceSize() {
3881 const size_t reduced_capacity = ComputeReducedNewSpaceSize(
new_space());
3890size_t Heap::NewSpaceSize() {
3897size_t Heap::NewSpaceCapacity()
const {
3904size_t Heap::NewSpaceTargetCapacity()
const {
3912void Heap::FinalizeIncrementalMarkingAtomically(
3918void Heap::FinalizeIncrementalMarkingAtomicallyIfRunning(
3925void Heap::InvokeIncrementalMarkingPrologueCallbacks() {
3929 GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE);
3932void Heap::InvokeIncrementalMarkingEpilogueCallbacks() {
3936 GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE);
3942#ifdef V8_ENABLE_SANDBOX
3943class ExternalPointerSlotInvalidator
3944 :
public HeapVisitor<ExternalPointerSlotInvalidator> {
3946 explicit ExternalPointerSlotInvalidator(
Isolate* isolate)
3949 void VisitPointers(Tagged<HeapObject> host, ObjectSlot
start,
3950 ObjectSlot
end)
override {}
3951 void VisitPointers(Tagged<HeapObject> host, MaybeObjectSlot
start,
3952 MaybeObjectSlot
end)
override {}
3953 void VisitInstructionStreamPointer(Tagged<Code> host,
3954 InstructionStreamSlot slot)
override {}
3955 void VisitMapPointer(Tagged<HeapObject> host)
override {}
3957 void VisitExternalPointer(Tagged<HeapObject> host,
3958 ExternalPointerSlot slot)
override {
3960 ExternalPointerTable::Space* space =
3961 IsolateForSandbox(
isolate_).GetExternalPointerTableSpaceFor(
3962 slot.tag_range(), host.address());
3963 space->NotifyExternalPointerFieldInvalidated(slot.address(),
3965 num_invalidated_slots++;
3968 int Visit(Tagged<HeapObject> target) {
3970 num_invalidated_slots = 0;
3971 HeapVisitor::Visit(target);
3972 return num_invalidated_slots;
3977 Tagged<HeapObject> target_;
3978 int num_invalidated_slots = 0;
3984void Heap::NotifyObjectLayoutChange(
3990 const bool may_contain_recorded_slots = MayContainRecordedSlots(
object);
4000 const Address clear_range_end =
object.address() + new_size;
4005 pending_layout_change_object_address =
object.address();
4008 chunk, clear_range_start, clear_range_end,
4009 SlotSet::EmptyBucketMode::KEEP_EMPTY_BUCKETS);
4013 if (may_contain_recorded_slots) {
4015 chunk, clear_range_start, clear_range_end,
4016 SlotSet::EmptyBucketMode::KEEP_EMPTY_BUCKETS);
4018 chunk, clear_range_start, clear_range_end,
4019 SlotSet::EmptyBucketMode::KEEP_EMPTY_BUCKETS);
4021 chunk, clear_range_start, clear_range_end,
4022 SlotSet::EmptyBucketMode::KEEP_EMPTY_BUCKETS);
4031 if (invalidate_external_pointer_slots ==
4038 DCHECK(IsString(
object));
4039#ifdef V8_ENABLE_SANDBOX
4041 ExternalPointerSlotInvalidator slot_invalidator(
isolate());
4042 int num_invalidated_slots = slot_invalidator.Visit(
object);
4043 USE(num_invalidated_slots);
4058 HeapVerifier::SetPendingLayoutChangeObject(
this,
object);
4065 if (pending_layout_change_object_address !=
kNullAddress) {
4066 DCHECK_EQ(pending_layout_change_object_address,
object.address());
4079 if (new_size == old_size)
return;
4081 const bool is_main_thread = LocalHeap::Current() ==
nullptr;
4086 const auto verify_no_slots_recorded =
4087 is_main_thread ? VerifyNoSlotsRecorded::kYes : VerifyNoSlotsRecorded::kNo;
4091 const Address filler =
object.address() + new_size;
4092 const int filler_size = old_size - new_size;
4095 clear_memory_mode, clear_recorded_slots, verify_no_slots_recorded);
4098double Heap::MonotonicallyIncreasingTimeInMs()
const {
4104void Heap::VerifyNewSpaceTop() {
4105 if (!new_space())
return;
4106 allocator()->new_space_allocator()->Verify();
4127void Heap::CheckMemoryPressure() {
4138 TRACE_EVENT0(
"devtools.timeline,v8",
"V8.CheckMemoryPressure");
4142 TRACE_EVENT0(
"devtools.timeline,v8",
"V8.CheckMemoryPressure");
4149void Heap::CollectGarbageOnMemoryPressure() {
4150 const int kGarbageThresholdInBytes = 8 *
MB;
4151 const double kGarbageThresholdAsFractionOfTotalMemory = 0.1;
4153 const double kMaxMemoryPressurePauseMs = 100;
4163 int64_t potential_garbage =
4167 if (potential_garbage >= kGarbageThresholdInBytes &&
4168 potential_garbage >=
4172 if (
end -
start < kMaxMemoryPressurePauseMs / 2) {
4186 bool is_isolate_locked) {
4187 TRACE_EVENT1(
"devtools.timeline,v8",
"V8.MemoryPressureNotification",
"level",
4188 static_cast<int>(level));
4195 if (is_isolate_locked) {
4199 isolate()->stack_guard()->RequestGC();
4201 std::make_unique<MemoryPressureInterruptTask>(
this));
4206void Heap::EagerlyFreeExternalMemoryAndWasmCode() {
4207#if V8_ENABLE_WEBASSEMBLY
4211 isolate_->counters()->wasm_flushed_liftoff_code_size_bytes()->AddSample(
4212 static_cast<int>(code_size));
4213 isolate_->counters()->wasm_flushed_liftoff_metadata_size_bytes()->AddSample(
4214 static_cast<int>(metadata_size));
4217 CompleteArrayBufferSweeping(
this);
4222 const size_t kMaxCallbacks = 100;
4227 near_heap_limit_callbacks_.push_back(std::make_pair(
callback, data));
4231 size_t heap_limit) {
4260void Heap::AutomaticallyRestoreInitialHeapLimit(
double threshold_percent) {
4265bool Heap::InvokeNearHeapLimitCallback() {
4268 TRACE_GC(
tracer(), GCTracer::Scope::HEAP_EXTERNAL_NEAR_HEAP_LIMIT);
4285bool Heap::MeasureMemory(std::unique_ptr<v8::MeasureMemoryDelegate> delegate,
4289 std::vector<Handle<NativeContext>> to_measure;
4290 for (
auto& current : contexts) {
4291 if (delegate->ShouldMeasure(v8::Utils::ToLocal(current))) {
4292 to_measure.push_back(current);
4299std::unique_ptr<v8::MeasureMemoryDelegate>
4300Heap::CreateDefaultMeasureMemoryDelegate(
4303 return i::MemoryMeasurement::DefaultDelegate(
4307void Heap::CollectCodeStatistics() {
4325 if (!HasBeenSetUp())
return;
4326 isolate()->PrintStack(stdout);
4333void Heap::ReportCodeStatistics(
const char* title) {
4334 PrintF(
"###### Code Stats (%s) ######\n", title);
4335 CollectCodeStatistics();
4336 CodeStatistics::ReportCodeStatistics(
isolate());
4386 if (
isolate()->OwnsStringTables())
return false;
4389 if (IsExternalString(value))
return false;
4390 if (IsInternalizedString(value))
return true;
4493void Heap::VerifyCountersAfterSweeping() {
4496 for (
PagedSpace* space = spaces.Next(); space !=
nullptr;
4497 space = spaces.Next()) {
4498 space->VerifyCountersAfterSweeping(
this);
4502void Heap::VerifyCountersBeforeConcurrentSweeping(
GarbageCollector collector) {
4503 if (
v8_flags.minor_ms && new_space()) {
4504 PagedSpaceBase* space = paged_new_space()->
paged_space();
4506 space->VerifyCountersBeforeConcurrentSweeping();
4509 PagedSpaceIterator spaces(
this);
4510 for (PagedSpace* space = spaces.Next(); space !=
nullptr;
4511 space = spaces.Next()) {
4515 space->RefillFreeList();
4516 space->VerifyCountersBeforeConcurrentSweeping();
4520void Heap::VerifyCommittedPhysicalMemory() {
4521 PagedSpaceIterator spaces(
this);
4522 for (PagedSpace* space = spaces.Next(); space !=
nullptr;
4523 space = spaces.Next()) {
4524 space->VerifyCommittedPhysicalMemory();
4538 Root::kWeakRoots,
nullptr,
4544 isolate()->OwnsStringTables()) {
4551 isolate()->string_table()->IterateElements(v);
4553 v->
Synchronize(VisitorSynchronization::kStringTable);
4561 v->
Synchronize(VisitorSynchronization::kExternalStringsTable);
4564 isolate()->is_shared_space_isolate() &&
4565 isolate()->shared_struct_type_registry()) {
4566 isolate()->shared_struct_type_registry()->IterateElements(
isolate(), v);
4568 v->
Synchronize(VisitorSynchronization::kSharedStructTypeRegistry);
4577 v->
Synchronize(VisitorSynchronization::kSmiRootList);
4584 if (IsLeftTrimmed(p)) {
4587 visitor_->VisitRootPointer(root, description, p);
4594 if (!current->map_word(cage_base(),
kRelaxedLoad).IsForwardingAddress() &&
4595 IsFreeSpaceOrFiller(current, cage_base())) {
4599 !current->map_word(cage_base(),
kRelaxedLoad).IsForwardingAddress() &&
4600 IsFreeSpaceOrFiller(current, cage_base())) {
4602 if (current->map(cage_base()) ==
4605 }
else if (current->map(cage_base()) ==
4609 next += current->Size();
4614 current->map_word(cage_base(),
kRelaxedLoad).IsForwardingAddress() ||
4615 IsFixedArrayBase(current, cage_base()));
4627#if V8_COMPRESS_POINTERS
4629 cage_base_(
heap->isolate())
4662 v->
Synchronize(VisitorSynchronization::kStrongRootList);
4664 isolate_->bootstrapper()->Iterate(v);
4665 v->
Synchronize(VisitorSynchronization::kBootstrapper);
4667 v->
Synchronize(VisitorSynchronization::kRelocatable);
4671 isolate_->compilation_cache()->Iterate(v);
4672 v->
Synchronize(VisitorSynchronization::kCompilationCache);
4674 const bool skip_iterate_builtins =
4680 if (!skip_iterate_builtins) {
4682 v->
Synchronize(VisitorSynchronization::kBuiltins);
4686 isolate_->thread_manager()->Iterate(v);
4687 v->
Synchronize(VisitorSynchronization::kThreadManager);
4709 isolate_->traced_handles()->Iterate(v);
4717 isolate_->global_handles()->IterateStrongRoots(v);
4719 isolate_->global_handles()->IterateAllRoots(v);
4722 v->
Synchronize(VisitorSynchronization::kGlobalHandles);
4730 v->
Synchronize(VisitorSynchronization::kStackRoots);
4738 isolate_->handle_scope_implementer()->Iterate(&left_trim_visitor);
4744 v->
Synchronize(VisitorSynchronization::kHandleScope);
4747 isolate_->eternal_handles()->IterateYoungRoots(v);
4749 isolate_->eternal_handles()->IterateAllRoots(v);
4751 v->
Synchronize(VisitorSynchronization::kEternalHandles);
4755 isolate_->default_microtask_queue();
4756 if (default_microtask_queue) {
4763 v->
Synchronize(VisitorSynchronization::kMicroTasks);
4768 current = current->
next) {
4772 v->
Synchronize(VisitorSynchronization::kStrongRoots);
4777 v->
Synchronize(VisitorSynchronization::kStartupObjectCache);
4786 if (
isolate_->OwnsStringTables()) {
4788 v->
Synchronize(VisitorSynchronization::kSharedHeapObjectCache);
4799 IterateRoots(v, options, IterateRootsMode::kMainIsolate);
4801 if (
isolate()->is_shared_space_isolate()) {
4803 isolate()->global_safepoint()->IterateClientIsolates(
4804 [v = &client_root_visitor, options](
Isolate* client) {
4806 IterateRootsMode::kClientIsolate);
4812 isolate_->global_handles()->IterateWeakRoots(v);
4813 isolate_->traced_handles()->Iterate(v);
4821 v->
VisitRootPointer(Root::kBuiltins, name, builtins->builtin_slot(builtin));
4827 builtins->builtin_tier0_slot(builtin));
4838#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
4842 Isolate* main_isolate = roots_mode == IterateRootsMode::kClientIsolate
4851void Heap::IterateConservativeStackRoots(
4856 stack().IteratePointersUntilMarker(stack_visitor);
4858 stack().IterateBackgroundStacks(stack_visitor);
4863 isolate()->handle_scope_implementer()->Iterate(visitor);
4867size_t Heap::DefaultMinSemiSpaceSize() {
4869 static constexpr size_t kMinSemiSpaceSize =
4872 static constexpr size_t kMinSemiSpaceSize = 512 * KB * kPointerMultiplier;
4874 static_assert(kMinSemiSpaceSize % (1 <<
kPageSizeBits) == 0);
4876 return kMinSemiSpaceSize;
4880size_t Heap::DefaultMaxSemiSpaceSize() {
4882 static constexpr size_t kMaxSemiSpaceCapacityBaseUnit =
4885 static constexpr size_t kMaxSemiSpaceCapacityBaseUnit =
4886 MB * kPointerMultiplier;
4888 static_assert(kMaxSemiSpaceCapacityBaseUnit % (1 <<
kPageSizeBits) == 0);
4890 size_t max_semi_space_size =
4892 :
v8_flags.scavenger_max_new_space_capacity_mb) *
4893 kMaxSemiSpaceCapacityBaseUnit;
4895 return max_semi_space_size;
4899size_t Heap::OldGenerationToSemiSpaceRatio() {
4904 static size_t kMaxOldGenSizeToMaxYoungGenSizeRatio =
4906 (
v8_flags.scavenger_max_new_space_capacity_mb *
MB);
4911size_t Heap::OldGenerationToSemiSpaceRatioLowMemory() {
4912 static constexpr size_t kOldGenerationToSemiSpaceRatioLowMemory =
4914 return kOldGenerationToSemiSpaceRatioLowMemory / (
v8_flags.minor_ms ? 2 : 1);
4927 if (
v8_flags.max_semi_space_size > 0) {
4929 static_cast<size_t>(
v8_flags.max_semi_space_size) * MB;
4930 }
else if (
v8_flags.max_heap_size > 0) {
4931 size_t max_heap_size =
static_cast<size_t>(
v8_flags.max_heap_size) * MB;
4932 size_t young_generation_size, old_generation_size;
4933 if (
v8_flags.max_old_space_size > 0) {
4934 old_generation_size =
4935 static_cast<size_t>(
v8_flags.max_old_space_size) * MB;
4936 young_generation_size = max_heap_size > old_generation_size
4937 ? max_heap_size - old_generation_size
4941 &old_generation_size);
4969 if (
v8_flags.max_old_space_size > 0) {
4970 max_old_generation_size =
4971 static_cast<size_t>(
v8_flags.max_old_space_size) * MB;
4972 }
else if (
v8_flags.max_heap_size > 0) {
4973 size_t max_heap_size =
static_cast<size_t>(
v8_flags.max_heap_size) * MB;
4974 size_t young_generation_size =
4976 max_old_generation_size = max_heap_size > young_generation_size
4977 ? max_heap_size - young_generation_size
4980 max_old_generation_size =
4982 max_old_generation_size = std::min(max_old_generation_size,
4984 max_old_generation_size =
5003 if (
v8_flags.min_semi_space_size > 0) {
5005 static_cast<size_t>(
v8_flags.min_semi_space_size) * MB;
5019 if (
v8_flags.initial_heap_size > 0) {
5020 size_t young_generation, old_generation;
5021 Heap::GenerationSizesFromHeapSize(
5022 static_cast<size_t>(
v8_flags.initial_heap_size) * MB,
5023 &young_generation, &old_generation);
5038 if (
v8_flags.lazy_new_space_shrinking) {
5043 std::optional<size_t> initial_old_generation_size =
5044 [&]() -> std::optional<size_t> {
5045 if (
v8_flags.initial_old_space_size > 0) {
5046 return static_cast<size_t>(
v8_flags.initial_old_space_size) * MB;
5048 if (
v8_flags.initial_heap_size > 0) {
5049 size_t initial_heap_size =
5050 static_cast<size_t>(
v8_flags.initial_heap_size) * MB;
5051 size_t young_generation_size =
5053 return initial_heap_size > young_generation_size
5054 ? initial_heap_size - young_generation_size
5057 return std::nullopt;
5059 if (initial_old_generation_size.has_value()) {
5085 (JSArray::kHeaderSize +
5100void Heap::AddToRingBuffer(
const char*
string) {
5105 if (first_part < strlen(
string)) {
5107 size_t second_part = strlen(
string) - first_part;
5113void Heap::GetFromRingBuffer(
char* buffer) {
5122void Heap::ConfigureHeapDefault() {
5134 stats->old_space_size =
old_space_->SizeOfObjects();
5136 stats->code_space_size =
code_space_->SizeOfObjects();
5138 stats->map_space_size = 0;
5139 stats->map_space_capacity = 0;
5142 isolate_->global_handles()->RecordStats(stats);
5144 stats->memory_allocator_capacity =
5148 stats->malloced_memory =
isolate_->allocator()->GetCurrentMemoryUsage();
5149 stats->malloced_peak_memory =
isolate_->allocator()->GetMaxMemoryUsage();
5153size_t Heap::OldGenerationSizeOfObjects()
const {
5173size_t Heap::OldGenerationWastedBytes()
const {
5177 space = spaces.
Next()) {
5178 total += space->Waste();
5183size_t Heap::OldGenerationConsumedBytes()
const {
5187size_t Heap::YoungGenerationSizeOfObjects()
const {
5198size_t Heap::YoungGenerationWastedBytes()
const {
5204size_t Heap::YoungGenerationConsumedBytes()
const {
5221size_t Heap::EmbedderSizeOfObjects()
const {
5225size_t Heap::GlobalSizeOfObjects()
const {
5233size_t Heap::GlobalConsumedBytes()
const {
5237size_t Heap::OldGenerationConsumedBytesAtLastGC()
const {
5241size_t Heap::GlobalConsumedBytesAtLastGC()
const {
5243 (
v8_flags.external_memory_accounted_in_global_limit
5248uint64_t Heap::AllocatedExternalMemorySinceMarkCompact()
const {
5252bool Heap::AllocationLimitOvershotByLargeMargin()
const {
5255 constexpr size_t kMarginForSmallHeaps = 32u *
MB;
5258 if (!
v8_flags.external_memory_accounted_in_global_limit) {
5271 const size_t global_overshoot =
5272 global_limit < global_size ? global_size - global_limit : 0;
5276 if (v8_overshoot == 0 && global_overshoot == 0) {
5282 const size_t v8_margin = std::min(
5283 std::max(old_generation_allocation_limit() / 2, kMarginForSmallHeaps),
5285 const size_t global_margin =
5286 std::min(std::max(global_limit / 2, kMarginForSmallHeaps),
5289 return v8_overshoot >= v8_margin || global_overshoot >= global_margin;
5292bool Heap::ShouldOptimizeForLoadTime()
const {
5304bool Heap::ShouldExpandOldGenerationOnSlowAllocation(
LocalHeap* local_heap,
5350bool Heap::ShouldExpandYoungGenerationOnSlowAllocation(
size_t allocation_size) {
5375bool Heap::IsNewSpaceAllowedToGrowAboveTargetCapacity()
const {
5380Heap::HeapGrowingMode Heap::CurrentHeapGrowingMode() {
5382 return Heap::HeapGrowingMode::kMinimal;
5386 return Heap::HeapGrowingMode::kConservative;
5390 return Heap::HeapGrowingMode::kSlow;
5393 return Heap::HeapGrowingMode::kDefault;
5396size_t Heap::GlobalMemoryAvailable() {
5400 if (global_size < global_limit) {
5401 return global_limit - global_size;
5409double PercentToLimit(
size_t size_at_gc,
size_t size_now,
size_t limit) {
5410 if (size_now < size_at_gc) {
5413 if (size_now > limit) {
5416 const size_t current_bytes = size_now - size_at_gc;
5417 const size_t total_bytes = limit - size_at_gc;
5419 return static_cast<double>(current_bytes) * 100 / total_bytes;
5424double Heap::PercentToOldGenerationLimit()
const {
5430double Heap::PercentToGlobalMemoryLimit()
const {
5442Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() {
5447 return IncrementalMarkingLimit::kNoLimit;
5449 if (
v8_flags.stress_incremental_marking) {
5450 return IncrementalMarkingLimit::kHardLimit;
5454 return IncrementalMarkingLimit::kNoLimit;
5459 return IncrementalMarkingLimit::kHardLimit;
5463 int current_percent =
static_cast<int>(
5465 if (current_percent > 0) {
5466 if (
v8_flags.trace_stress_marking) {
5467 isolate()->PrintWithTimestamp(
5468 "[IncrementalMarking] %d%% of the memory limit reached\n",
5473 if (current_percent < 100) {
5474 double max_marking_limit_reached =
5476 while (current_percent > max_marking_limit_reached) {
5478 max_marking_limit_reached, current_percent,
5479 std::memory_order_relaxed);
5483 return IncrementalMarkingLimit::kHardLimit;
5488 if (
v8_flags.incremental_marking_soft_trigger > 0 ||
5489 v8_flags.incremental_marking_hard_trigger > 0) {
5490 int current_percent =
static_cast<int>(
5492 if (current_percent >
v8_flags.incremental_marking_hard_trigger &&
5493 v8_flags.incremental_marking_hard_trigger > 0) {
5494 return IncrementalMarkingLimit::kHardLimit;
5496 if (current_percent >
v8_flags.incremental_marking_soft_trigger &&
5497 v8_flags.incremental_marking_soft_trigger > 0) {
5498 return IncrementalMarkingLimit::kSoftLimit;
5500 return IncrementalMarkingLimit::kNoLimit;
5515 return IncrementalMarkingLimit::kFallbackForEmbedderLimit;
5517 return IncrementalMarkingLimit::kNoLimit;
5520 return IncrementalMarkingLimit::kHardLimit;
5523 return IncrementalMarkingLimit::kNoLimit;
5525 if (old_generation_space_available == 0) {
5526 return IncrementalMarkingLimit::kHardLimit;
5528 if (global_memory_available == 0) {
5529 return IncrementalMarkingLimit::kHardLimit;
5531 return IncrementalMarkingLimit::kSoftLimit;
5534bool Heap::ShouldStressCompaction()
const {
5540void Heap::DisableInlineAllocation() {
5555#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
5573 const size_t requested_size =
5578#ifdef V8_COMPRESS_POINTERS
5586 requested_size,
false)) {
5588 isolate_,
"Failed to reserve virtual memory for CodeRange");
5593 NewEvent(
"CodeRange",
5594 reinterpret_cast<void*
>(
code_range_->reservation()->address()),
5599 code_page_allocator =
code_range_->page_allocator();
5601 code_page_allocator =
isolate_->page_allocator();
5605#ifdef V8_ENABLE_SANDBOX
5606 trusted_page_allocator =
5607 TrustedRange::GetProcessWideTrustedRange()->page_allocator();
5609 trusted_page_allocator =
isolate_->page_allocator();
5682 const int kNumIterations = 2000;
5684 const int kMediumObjectSize = 8 *
KB;
5685 const int kLargeObjectSize =
5689 for (
int i = 0;
i < kNumIterations;
i++) {
5691 if (
heap->gc_state() == Heap::TEAR_DOWN)
return;
5696 if (!
result.IsFailure()) {
5697 heap->CreateFillerObjectAtBackground(
5701 heap->CollectGarbageFromAnyThread(&local_heap);
5707 if (!
result.IsFailure()) {
5708 heap->CreateFillerObjectAtBackground(
5710 kMediumObjectSize));
5712 heap->CollectGarbageFromAnyThread(&local_heap);
5718 if (!
result.IsFailure()) {
5719 heap->CreateFillerObjectAtBackground(
5723 heap->CollectGarbageFromAnyThread(&local_heap);
5733 auto task = std::make_unique<StressConcurrentAllocationTask>(isolate);
5734 const double kDelayInSeconds = 0.1;
5750 if (
v8_flags.stress_concurrent_allocation) {
5819 if (
isolate()->is_shared_space_isolate()) {
5834 std::make_unique<SharedTrustedLargeObjectSpace>(
this);
5839 if (
isolate()->has_shared_space()) {
5849 old_allocation_info);
5863 std::make_unique<Heap::AllocationTrackerForDebugging>(
this);
5880 allocator()->new_space_allocator()->AddAllocationObserver(
5889void Heap::InitializeHashSeed() {
5891 uint64_t new_hash_seed;
5893 int64_t rnd =
isolate()->random_number_generator()->NextInt64();
5894 new_hash_seed =
static_cast<uint64_t
>(rnd);
5896 new_hash_seed =
static_cast<uint64_t
>(
v8_flags.hash_seed);
5899 MemCopy(hash_seed->begin(),
reinterpret_cast<uint8_t*
>(&new_hash_seed),
5903std::shared_ptr<v8::TaskRunner> Heap::GetForegroundTaskRunner(
5910void Heap::InitializeOncePerProcess() {
5911#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
5912 HeapAllocator::InitializeOncePerProcess();
5920void Heap::PrintMaxMarkingLimitReached() {
5921 PrintF(
"\n### Maximum marking limit reached = %.02lf\n",
5925void Heap::PrintMaxNewSpaceSizeReached() {
5926 PrintF(
"\n### Maximum new space size reached = %.02lf\n",
5930int Heap::NextStressMarkingLimit() {
5934void Heap::WeakenDescriptorArrays(
5943 std::move(strong_descriptor_arrays));
5951 for (
auto it = strong_descriptor_arrays.
begin();
5952 it != strong_descriptor_arrays.
end(); ++it) {
5954 DCHECK(IsStrongDescriptorArray(array));
5955 array->set_map_safe_transition_no_write_barrier(
isolate(),
5956 descriptor_array_map);
5961void Heap::NotifyDeserializationComplete() {
5972 DCHECK(p->Chunk()->NeverEvacuate());
5977 if (
v8_flags.stress_concurrent_allocation) {
5993void Heap::NotifyBootstrapComplete() {
6001void Heap::NotifyOldGenerationExpansion(
6011 isolate()->AddCodeMemoryChunk(chunk_metadata);
6016 const size_t kMemoryReducerActivationThreshold = 1 *
MB;
6020 kMemoryReducerActivationThreshold &&
6021 (notification_origin ==
6022 OldGenerationExpansionNotificationOrigin::kFromSameHeap) &&
6023 v8_flags.memory_reducer_for_small_heaps) {
6045std::optional<StackState> Heap::overridden_stack_state()
const {
6050void Heap::SetStackStart() {
6054 stack().SetStackStart();
6062const ::heap::base::Stack& Heap::stack()
const {
6067void Heap::StartTearDown() {
6103void Heap::TearDownWithSharedHeap() {
6122void Heap::TearDown() {
6161 allocator()->new_space_allocator()->RemoveAllocationObserver(
6215 next = current->
next;
6229 heap->isolate()->root(RootIndex::kFreeSpaceMap);
6230 CHECK(!
heap->deserialization_complete() ||
6231 object.map_slot().contains_map_value(free_space_map.
ptr()));
6237 GCType gc_type,
void* data) {
6248 GCType gc_type,
void* data) {
6262 if (array->length() == 0) {
6265 int new_length = array->CountLiveWeakReferences();
6272 handle(ReadOnlyRoots(
heap).empty_weak_array_list(),
heap->isolate()),
6278 for (
int i = 0;
i < array->
length();
i++) {
6280 if (element.IsCleared())
continue;
6281 new_array->Set(copy_to++, element);
6283 new_array->set_length(copy_to);
6289void Heap::CompactWeakArrayLists() {
6291 std::vector<Handle<PrototypeInfo>> prototype_infos;
6295 o = iterator.
Next()) {
6296 if (IsPrototypeInfo(*o)) {
6298 if (IsWeakArrayList(prototype_info->prototype_users())) {
6299 prototype_infos.emplace_back(
handle(prototype_info,
isolate()));
6304 for (
auto& prototype_info : prototype_infos) {
6312 prototype_info->set_prototype_users(new_array);
6319 set_script_list(*scripts);
6327 if (array->length() + new_maps_size > array->capacity()) {
6330 int cur_length = array->length();
6333 if (*array != context->retained_maps()) {
6334 context->set_retained_maps(*array);
6343 if (map->is_in_retained_map_list()) {
6347 raw_array->Set(cur_length,
MakeWeak(*map));
6348 raw_array->Set(cur_length + 1,
6351 raw_array->set_length(cur_length);
6353 map->set_is_in_retained_map_list(
true);
6359 int length = retained_maps->length();
6373 retained_maps->Set(
new_length, maybe_object);
6380 retained_maps->Set(
i, undefined);
6385void Heap::FatalProcessOutOfMemory(
const char* location) {
6393 void VisitRootPointers(
Root root,
const char* description,
6396 PrintF(
" handle %p to %p\n", p.ToVoidPtr(),
6397 reinterpret_cast<void*
>((*p).ptr()));
6401void Heap::PrintHandles() {
6403 PrintHandleVisitor v;
6404 isolate_->handle_scope_implementer()->Iterate(&v);
6424void Heap::CheckHandleCount() {
6426 isolate_->handle_scope_implementer()->Iterate(&v);
6431 size_t slot_offset) {
6441#ifndef V8_DISABLE_WRITE_BARRIERS
6451#ifndef V8_DISABLE_WRITE_BARRIERS
6454#if !V8_ENABLE_STICKY_MARK_BITS_BOOL
6465 if (!page->SweepingDone()) {
6481 if (space)
return space;
6502 DCHECK(!IsFreeSpaceOrFiller(
object));
6506 if (
reachable_.count(chunk) == 0)
return true;
6507 return reachable_[chunk]->count(
object) == 0;
6518 reachable_[chunk] = std::make_unique<BucketType>();
6583 template <
typename TSlot>
6586 for (TSlot p =
start; p <
end; ++p) {
6587 typename TSlot::TObject
object = p.load(
cage_base());
6588#ifdef V8_ENABLE_DIRECT_HANDLE
6592 if (
object.GetHeapObject(&heap_object)) {
6619 std::unordered_map<MemoryChunkMetadata*, std::unique_ptr<BucketType>,
6642 space_iterator_(
heap_) {
6644 switch (filtering) {
6646 filter_ = std::make_unique<UnreachableObjectsFilter>(
heap_);
6672 if (!obj.
is_null())
return obj;
6677 if (!obj.
is_null())
return obj;
6693 if (IsTheHole(o, isolate)) {
6698 if (IsThinString(o))
continue;
6699 DCHECK(IsExternalString(o));
6713 for (
size_t i = 0;
i < old_strings_.
size(); ++
i) {
6715 if (IsTheHole(o, isolate)) {
6720 if (IsThinString(o))
continue;
6721 DCHECK(IsExternalString(o));
6723 old_strings_[last++] = o;
6725 old_strings_.resize(last);
6732 for (
size_t i = 0;
i < young_strings_.
size(); ++
i) {
6735 if (IsThinString(o))
continue;
6738 young_strings_.clear();
6739 for (
size_t i = 0;
i < old_strings_.
size(); ++
i) {
6742 if (IsThinString(o))
continue;
6745 old_strings_.clear();
6748void Heap::RememberUnmappedPage(
Address page,
bool compacted) {
6760size_t Heap::YoungArrayBufferBytes() {
6764uint64_t Heap::UpdateExternalMemory(int64_t delta) {
6767 if (amount < low_since_mark_compact) {
6773size_t Heap::OldArrayBufferBytes() {
6789 entry->
prev =
nullptr;
6817 if (prev) prev->
next = next;
6818 if (next) next->
prev = prev;
6829 set_builtins_constants_table(cache);
6833 set_detached_contexts(detached_contexts);
6836bool Heap::HasDirtyJSFinalizationRegistries() {
6840void Heap::PostFinalizationRegistryCleanupTaskIfNeeded() {
6846 auto task = std::make_unique<FinalizationRegistryCleanupTask>(
this);
6851void Heap::EnqueueDirtyJSFinalizationRegistry(
6855 gc_notify_updated_slot) {
6859 DCHECK(IsUndefined(finalization_registry->next_dirty(), isolate()));
6860 DCHECK(!finalization_registry->scheduled_for_cleanup());
6861 finalization_registry->set_scheduled_for_cleanup(
true);
6870 tail->set_next_dirty(finalization_registry);
6871 gc_notify_updated_slot(
6872 tail, tail->RawField(JSFinalizationRegistry::kNextDirtyOffset),
6873 finalization_registry);
6881Heap::DequeueDirtyJSFinalizationRegistry() {
6888 head->set_next_dirty(
ReadOnlyRoots(
this).undefined_value());
6898void Heap::RemoveDirtyFinalizationRegistriesOnContext(
6905 while (!IsUndefined(current, isolate)) {
6908 if (finalization_registry->native_context() == context) {
6909 if (IsUndefined(prev, isolate)) {
6911 finalization_registry->next_dirty());
6914 finalization_registry->next_dirty());
6916 finalization_registry->set_scheduled_for_cleanup(
false);
6917 current = finalization_registry->next_dirty();
6918 finalization_registry->set_next_dirty(
6922 current = finalization_registry->next_dirty();
6929 DCHECK(IsUndefined(weak_refs_keep_during_job()) ||
6930 IsOrderedHashSet(weak_refs_keep_during_job()));
6932 if (IsUndefined(weak_refs_keep_during_job(),
isolate())) {
6933 table =
isolate()->factory()->NewOrderedHashSet();
6940 if (!maybe_table.
ToHandle(&table)) {
6942 "Fatal JavaScript error: Too many distinct WeakRef objects "
6943 "created or dereferenced during single event loop turn.");
6945 set_weak_refs_keep_during_job(*table);
6948void Heap::ClearKeptObjects() {
6952size_t Heap::NumberOfTrackedHeapObjectTypes() {
6956size_t Heap::ObjectCountAtLastGC(
size_t index) {
6962size_t Heap::ObjectSizeAtLastGC(
size_t index) {
6968bool Heap::GetObjectTypeName(
size_t index,
const char** object_type,
6969 const char** object_sub_type) {
6972 switch (
static_cast<int>(index)) {
6973#define COMPARE_AND_RETURN_NAME(name) \
6975 *object_type = #name; \
6976 *object_sub_type = ""; \
6979#undef COMPARE_AND_RETURN_NAME
6981#define COMPARE_AND_RETURN_NAME(name) \
6982 case ObjectStats::FIRST_VIRTUAL_TYPE + \
6983 static_cast<int>(ObjectStats::VirtualInstanceType::name): \
6984 *object_type = #name; \
6985 *object_sub_type = ""; \
6988#undef COMPARE_AND_RETURN_NAME
6993size_t Heap::NumberOfNativeContexts() {
6996 while (!IsUndefined(context,
isolate())) {
7004std::vector<Handle<NativeContext>> Heap::FindAllNativeContexts() {
7005 std::vector<Handle<NativeContext>>
result;
7007 while (!IsUndefined(context,
isolate())) {
7015std::vector<Tagged<WeakArrayList>> Heap::FindAllRetainedMaps() {
7016 std::vector<Tagged<WeakArrayList>>
result;
7018 while (!IsUndefined(context,
isolate())) {
7026size_t Heap::NumberOfDetachedContexts() {
7028 return detached_contexts()->length() / 2;
7057 return dst ==
CODE_SPACE && type == INSTRUCTION_STREAM_TYPE;
7076size_t Heap::EmbedderAllocationCounter()
const {
7080void Heap::CreateObjectStats() {
7096 return map_word.
ToMap();
7108bool Heap::GcSafeInstructionStreamContains(
7113 Builtin builtin_lookup_result =
7118 instruction_stream->code(
kAcquireLoad)->builtin_id()));
7124 return start <= addr && addr <
end;
7127std::optional<Tagged<InstructionStream>>
7128Heap::GcSafeTryFindInstructionStreamForInnerPointer(
Address inner_pointer) {
7129 std::optional<Address>
start =
7131 if (
start.has_value()) {
7138std::optional<Tagged<GcSafeCode>> Heap::GcSafeTryFindCodeForInnerPointer(
7146 std::optional<Tagged<InstructionStream>> maybe_istream =
7148 if (!maybe_istream)
return {};
7158 std::optional<Tagged<GcSafeCode>> maybe_code =
7161 CHECK(maybe_code.has_value());
7165std::optional<Tagged<Code>> Heap::TryFindCodeForInnerPointerForPrinting(
7169 i::OffHeapInstructionStream::PcIsOffHeap(
isolate(), inner_pointer)) {
7170 std::optional<Tagged<GcSafeCode>> maybe_code =
7172 if (maybe_code.has_value()) {
7173 return maybe_code.value()->UnsafeCastToCode();
7180void Heap::IncrementObjectCounters() {
7181 isolate_->counters()->objs_since_last_full()->Increment();
7182 isolate_->counters()->objs_since_last_young()->Increment();
7186bool Heap::IsStressingScavenge() {
7190void Heap::SetIsMarkingFlag(
bool value) {
7194uint8_t* Heap::IsMarkingFlagAddress() {
7195 return &
isolate()->isolate_data()->is_marking_flag_;
7198void Heap::SetIsMinorMarkingFlag(
bool value) {
7199 isolate()->isolate_data()->is_minor_marking_flag_ =
value;
7202uint8_t* Heap::IsMinorMarkingFlagAddress() {
7203 return &
isolate()->isolate_data()->is_minor_marking_flag_;
7246 heap()->UnregisterStrongRoots(*header);
7251#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
7252void Heap::set_allocation_timeout(
int allocation_timeout) {
7253 heap_allocator_->SetAllocationTimeout(allocation_timeout);
7257void Heap::FinishSweepingIfOutOfWork() {
7259 sweeper()->UsingMajorSweeperTasks() &&
7260 !
sweeper()->AreMajorSweeperTasksRunning()) {
7265 !
sweeper()->HasUnsweptPagesForMajorSweeping());
7276 CompleteArrayBufferSweeping(
this);
7285 if (was_major_sweeping_in_progress) {
7289 GCTracer::Scope::MC_COMPLETE_SWEEPING),
7299 }
else if (
v8_flags.sticky_mark_bits) {
7303 DCHECK(was_minor_sweeping_in_progress);
7308 was_minor_sweeping_in_progress) {
7310 tracer(), GCTracer::Scope::MINOR_MS_COMPLETE_SWEEPING,
7313 GCTracer::Scope::MINOR_MS_COMPLETE_SWEEPING),
7322 EvacuationVerifier verifier(
this);
7328 if (mode == SweepingForcedFinalizationMode::kUnifiedHeap &&
cpp_heap()) {
7336 mode == SweepingForcedFinalizationMode::kUnifiedHeap || !
cpp_heap(),
7337 !
tracer()->IsSweepingInProgress());
7339 if (
v8_flags.external_memory_accounted_in_global_limit) {
7343 new_limits.old_generation_allocation_limit,
7344 new_limits.global_allocation_limit);
7349void Heap::EnsureQuarantinedPagesSweepingCompleted() {
7356void Heap::EnsureYoungSweepingCompleted() {
7357 CompleteArrayBufferSweeping(
this);
7367 GCTracer::Scope::MINOR_MS_COMPLETE_SWEEPING),
7376void Heap::NotifyLoadingStarted() {
7383void Heap::NotifyLoadingEnded() {
7393int Heap::NextScriptId() {
7409 last_id_before_cas = last_id;
7412 }
while (last_id != last_id_before_cas);
7414 return new_id.value();
7417int Heap::NextDebuggingId() {
7418 int last_id = last_debugging_id().value();
7419 if (last_id == DebugInfo::DebuggingIdBits::kMax) {
7427int Heap::NextStackTraceId() {
7428 int last_id = last_stack_trace_id().value();
7440 old_stack_state_(
heap_->embedder_stack_state_),
7441 old_origin_(
heap->embedder_stack_state_origin_) {
7446 heap_->embedder_stack_state_ = stack_state;
7447 heap_->embedder_stack_state_origin_ = origin;
7464#if V8_HEAP_USE_PTHREAD_JIT_WRITE_PROTECT || \
7465 V8_HEAP_USE_PKU_JIT_WRITE_PROTECT || V8_HEAP_USE_BECORE_JIT_WRITE_PROTECT
7471 : rwx_write_scope_(
"Write access for zapping.") {
7472#if !defined(DEBUG) && !defined(VERIFY_HEAP) && !defined(USE_SIMULATOR)
7479 : rwx_write_scope_(
"Write access for zapping.") {
7480#if !defined(DEBUG) && !defined(VERIFY_HEAP) && !defined(USE_SIMULATOR)
7496#if !defined(DEBUG) && !defined(VERIFY_HEAP) && !defined(USE_SIMULATOR)
7503#if !defined(DEBUG) && !defined(VERIFY_HEAP) && !defined(USE_SIMULATOR)
#define DISALLOW_GARBAGE_COLLECTION(name)
constexpr int kPageSizeBits
bool generational_gc_supported() const
static void EnforcePredictableOrder()
size_t physical_space_size()
size_t space_available_size()
static Isolate * TryGetCurrent()
void(*)(Isolate *isolate, GCType type, GCCallbackFlags flags, void *data) GCCallbackWithData
size_t max_old_generation_size_in_bytes() const
size_t code_range_size_in_bytes() const
size_t initial_young_generation_size_in_bytes() const
size_t max_young_generation_size_in_bytes() const
size_t initial_old_generation_size_in_bytes() const
void PostTask(std::unique_ptr< Task > task, const SourceLocation &location=SourceLocation::Current())
void PostNonNestableTask(std::unique_ptr< Task > task, const SourceLocation &location=SourceLocation::Current())
static const int kNoScriptId
static int GetLastError()
T * insert(T *pos, const T &value)
static constexpr int64_t kMillisecondsPerSecond
double InMillisecondsF() const
constexpr T * begin() const
constexpr T * end() const
ActivateMemoryReducerTask & operator=(const ActivateMemoryReducerTask &)=delete
ActivateMemoryReducerTask(const ActivateMemoryReducerTask &)=delete
~ActivateMemoryReducerTask() override=default
ActivateMemoryReducerTask(Heap *heap)
void RunInternal() override
bool sweeping_in_progress() const
size_t YoungBytes() const
uint64_t GetTraceIdForFlowEvent(GCTracer::Scope::ScopeId scope_id) const
AllocationSpace identity() const
virtual size_t CommittedMemory() const
static constexpr Builtin kFirst
static constexpr bool AllBuiltinsAreIsolateIndependent()
static constexpr bool IsBuiltinId(Builtin builtin)
static constexpr Builtin kLast
static constexpr bool kCodeObjectsAreInROSpace
static constexpr Builtin kLastTier0
static V8_EXPORT_PRIVATE const char * name(Builtin builtin)
CheckHandleCountVisitor()
~CheckHandleCountVisitor() override
void VisitRootPointers(Root root, const char *description, FullObjectSlot start, FullObjectSlot end) override
void VisitRootPointer(Root root, const char *description, FullObjectSlot p) override
void ClearLeftTrimmedOrForward(Root root, const char *description, FullObjectSlot p)
void VisitRunningCode(FullObjectSlot code_slot, FullObjectSlot istream_or_smi_zero_slot) override
bool IsLeftTrimmed(FullObjectSlot p)
void VisitRootPointers(Root root, const char *description, FullObjectSlot start, FullObjectSlot end) override
ClearStaleLeftTrimmedPointerVisitor(Heap *heap, RootVisitor *visitor)
~CodePageMemoryModificationScopeForDebugging()
CodePageMemoryModificationScopeForDebugging(Heap *heap, VirtualMemory *reservation, base::AddressRegion region)
static size_t GetWritableReservedAreaSize()
static void ResetCodeAndMetadataStatistics(Isolate *isolate)
static void CollectCodeStatistics(PagedSpace *space, Isolate *isolate)
~CppClassNamesAsHeapObjectNameScope()
CppClassNamesAsHeapObjectNameScope(v8::CppHeap *heap)
void StartDetachingIsolate()
static CppHeap * From(v8::CppHeap *heap)
size_t allocated_size() const
void FinishSweepingIfOutOfWork()
void FinishAtomicSweepingIfRunning()
void AttachIsolate(Isolate *isolate)
void FinishSweepingIfRunning()
static const int kNoDebuggingId
static void DeoptimizeMarkedCode(Isolate *isolate)
@ kAllocationSiteTenuringChangedGroup
static bool MarkCodeForDeoptimization(Isolate *isolate, Tagged< ObjectT > object, DependencyGroups groups)
std::optional< EmbedderStackStateOrigin > old_origin_
EmbedderStackStateScope(Heap *heap, EmbedderStackStateOrigin origin, StackState stack_state)
const StackState old_stack_state_
~EmbedderStackStateScope()
Isolate * isolate() const
static void SetSize(const WritableFreeSpace &writable_free_space, int size, RelaxedStoreTag)
Tagged< Object > load() const
Tagged< Object > Relaxed_Load() const
void store(Tagged< Object > value) const
Tagged< Object > Relaxed_CompareAndSwap(Tagged< Object > old, Tagged< Object > target) const
GCCallbacksScope(Heap *heap)
bool CheckReenter() const
void Remove(CallbackType callback, void *data)
void Invoke(GCType gc_type, GCCallbackFlags gc_callback_flags) const
void Add(CallbackType callback, v8::Isolate *isolate, GCType gc_type, void *data)
TimedHistogram * type_timer() const
TimedHistogram * type_priority_timer() const
const char * trace_event_name() const
void AddSurvivalRatio(double survival_ratio)
void StopInSafepoint(base::TimeTicks time)
void RecordGCSizeCounters() const
void UpdateCurrentEvent(GarbageCollectionReason gc_reason, const char *collector_reason)
void StopObservablePause(GarbageCollector collector, base::TimeTicks time)
void StartInSafepoint(base::TimeTicks time)
void StopFullCycleIfFinished()
void StartObservablePause(base::TimeTicks time)
void RecordGCPhasesHistograms(RecordGCPhasesInfo::Mode mode)
void NotifyYoungSweepingCompletedAndStopCycleIfFinished()
void ResetSurvivalEvents()
void StartCycle(GarbageCollector collector, GarbageCollectionReason gc_reason, const char *collector_reason, MarkingType marking)
void StopYoungCycleIfFinished()
void NotifyFullSweepingCompletedAndStopCycleIfFinished()
GarbageCollector GetCurrentCollector() const
static const int kCheckHandleThreshold
void SetReadOnlySpace(ReadOnlySpace *)
static void GCEpiloguePrintHeapLayout(v8::Isolate *isolate, v8::GCType gc_type, v8::GCCallbackFlags flags, void *data)
static void GCProloguePrintHeapLayout(v8::Isolate *isolate, v8::GCType gc_type, v8::GCCallbackFlags flags, void *data)
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
static V8_INLINE bool InAnySharedSpace(Tagged< HeapObject > object)
std::unique_ptr< ObjectIterator > object_iterator_
Tagged< HeapObject > NextObject()
SpaceIterator space_iterator_
std::unique_ptr< HeapObjectsFilter > filter_
HeapObjectIterator(Heap *heap, HeapObjectsFiltering filtering=kNoFiltering)
Tagged< HeapObject > Next()
static Tagged< HeapObject > FromAddress(Address address)
static void SetFillerMap(const WritableFreeSpace &writable_page, Tagged< Map > value)
virtual bool SkipObject(Tagged< HeapObject > object)=0
virtual ~HeapObjectsFilter()=default
void WriteSnapshotToDiskAfterGC(HeapSnapshotMode snapshot_mode=HeapSnapshotMode::kRegular)
void ObjectMoveEvent(Address from, Address to, int size, bool is_native_object)
static const int kStartMarker
static const int kEndMarker
static V8_INLINE Heap * GetOwnerHeap(Tagged< HeapObject > object)
static V8_INLINE void VerifyHeapIfEnabled(Heap *heap)
void UpdateAllocationsHash(uint32_t value)
std::atomic< size_t > allocations_count_
void UpdateObjectSizeEvent(Address, int) final
AllocationTrackerForDebugging(Heap *heap)
void PrintAllocationsHash()
void MoveEvent(Address source, Address target, int size) final
void UpdateAllocationsHash(Tagged< HeapObject > object)
~AllocationTrackerForDebugging() final
void AllocationEvent(Address addr, int size) final
RunningStringHasher raw_allocations_hash_
uint64_t UpdateAmount(int64_t delta)
void UpdateLowSinceMarkCompact(uint64_t amount)
void UpdateLimitForInterrupt(uint64_t amount)
uint64_t limit_for_interrupt() const
uint64_t low_since_mark_compact() const
uint64_t AllocatedSinceMarkCompact() const
uint64_t soft_limit() const
bool Contains(Tagged< String > string)
void UpdateReferences(Heap::ExternalStringTableUpdaterCallback updater_func)
void IterateYoung(RootVisitor *v)
void UpdateYoungReferences(Heap::ExternalStringTableUpdaterCallback updater_func)
std::vector< TaggedBase > old_strings_
std::vector< TaggedBase > young_strings_
void IterateAll(RootVisitor *v)
V8_EXPORT_PRIVATE size_t MaxReserved() const
GarbageCollector SelectGarbageCollector(AllocationSpace space, GarbageCollectionReason gc_reason, const char **reason) const
std::vector< Isolate * > PauseConcurrentThreadsInClients(GarbageCollector collector)
size_t embedder_size_at_last_gc_
ResizeNewSpaceMode resize_new_space_mode_
ExternalStringTable external_string_table_
static constexpr double kLoadTimeNotLoading
std::atomic< size_t > global_allocation_limit_
V8_EXPORT_PRIVATE void FreeLinearAllocationAreas()
Address remembered_unmapped_pages_[kRememberedUnmappedPages]
void CompactRetainedMaps(Tagged< WeakArrayList > retained_maps)
static V8_EXPORT_PRIVATE size_t AllocatorLimitOnMaxOldGenerationSize()
SharedLargeObjectSpace * shared_lo_space() const
static const int kHeapLimitMultiplier
int nodes_copied_in_new_space_
size_t old_generation_size_at_last_gc_
void CompleteSweepingYoung()
std::unique_ptr< ObjectStats > live_object_stats_
bool HasDirtyJSFinalizationRegistries()
std::unique_ptr< MinorMarkSweepCollector > minor_mark_sweep_collector_
V8_EXPORT_PRIVATE void FinalizeIncrementalMarkingAtomically(GarbageCollectionReason gc_reason)
size_t MaxOldGenerationSize()
void ConfigureHeap(const v8::ResourceConstraints &constraints, v8::CppHeap *cpp_heap)
V8_EXPORT_PRIVATE bool CanExpandOldGeneration(size_t size) const
V8_INLINE uint64_t external_memory() const
bool update_allocation_limits_after_loading_
OldGenerationExpansionNotificationOrigin
std::unique_ptr< MemoryReducer > memory_reducer_
void PerformHeapVerification()
NewSpace * new_space() const
GCFlags current_gc_flags_
void GarbageCollectionPrologue(GarbageCollectionReason gc_reason, const v8::GCCallbackFlags gc_callback_flags)
void ClearRecordedSlotRange(Address start, Address end)
SharedTrustedSpace * shared_trusted_space_
V8_EXPORT_PRIVATE void StartIncrementalMarkingIfAllocationLimitIsReached(LocalHeap *local_heap, GCFlags gc_flags, GCCallbackFlags gc_callback_flags=GCCallbackFlags::kNoGCCallbackFlags)
std::unique_ptr< ArrayBufferSweeper > array_buffer_sweeper_
bool IsTearingDown() const
void set_native_contexts_list(Tagged< Object > object)
void set_allocation_sites_list(Tagged< UnionOf< Smi, Undefined, AllocationSiteWithWeakNext > > object)
void PrintMaxMarkingLimitReached()
static bool IsYoungGenerationCollector(GarbageCollector collector)
bool HasLowEmbedderAllocationRate()
size_t min_old_generation_size_
std::unique_ptr< CppHeap > owning_cpp_heap_
ExternalMemoryAccounting external_memory_
bool AllocationLimitOvershotByLargeMargin() const
NewLargeObjectSpace * new_lo_space_
HeapState gc_state() const
std::unique_ptr< MemoryMeasurement > memory_measurement_
void CollectGarbageOnMemoryPressure()
SharedSpace * shared_space() const
SharedTrustedSpace * shared_trusted_allocation_space_
size_t maximum_committed_
double PercentToOldGenerationLimit() const
SharedLargeObjectSpace * shared_lo_space_
V8_EXPORT_PRIVATE size_t YoungGenerationSizeOfObjects() const
void SetOldGenerationAndGlobalAllocationLimit(size_t new_old_generation_allocation_limit, size_t new_global_allocation_limit)
PagedSpace * shared_allocation_space_
bool sweeping_in_progress() const
void MarkCompactPrologue()
StrongRootsEntry * strong_roots_head_
OldLargeObjectSpace * lo_space() const
std::vector< std::pair< v8::NearHeapLimitCallback, void * > > near_heap_limit_callbacks_
V8_EXPORT_PRIVATE void RemoveHeapObjectAllocationTracker(HeapObjectAllocationTracker *tracker)
NewLargeObjectSpace * new_lo_space() const
bool use_new_space() const
void AddGCEpilogueCallback(v8::Isolate::GCCallbackWithData callback, GCType gc_type_filter, void *data)
size_t initial_old_generation_size_
static const int kRememberedUnmappedPages
void RecomputeLimitsAfterLoadingIfNeeded()
void CreateFillerObjectAtRaw(const WritableFreeSpace &free_space, ClearFreedMemoryMode clear_memory_mode, ClearRecordedSlots clear_slots_mode, VerifyNoSlotsRecorded verify_no_slots_recorded)
char trace_ring_buffer_[kTraceRingBufferSize]
size_t previous_new_space_surviving_object_size_
MemoryReducer * memory_reducer()
ResizeNewSpaceMode ShouldResizeNewSpace()
void GarbageCollectionEpilogueInSafepoint(GarbageCollector collector)
static constexpr int kRetainMapEntrySize
MarkCompactCollector * mark_compact_collector()
std::atomic< v8::MemoryPressureLevel > memory_pressure_level_
void CheckIneffectiveMarkCompact(size_t old_generation_size, double mutator_utilization)
V8_EXPORT_PRIVATE void CollectAllAvailableGarbage(GarbageCollectionReason gc_reason)
std::unique_ptr< ObjectStats > dead_object_stats_
std::unique_ptr< CollectionBarrier > collection_barrier_
size_t max_global_memory_size_
static GarbageCollector YoungGenerationCollector()
std::atomic< size_t > max_old_generation_size_
size_t old_generation_capacity_after_bootstrap_
void ConfigureHeapDefault()
V8_EXPORT_PRIVATE void SetGCState(HeapState state)
CodeLargeObjectSpace * code_lo_space_
void AddGCPrologueCallback(v8::Isolate::GCCallbackWithData callback, GCType gc_type_filter, void *data)
LocalHeap * main_thread_local_heap()
void CheckCollectionRequested()
size_t old_generation_allocation_limit() const
V8_EXPORT_PRIVATE size_t YoungGenerationWastedBytes() const
void OnMoveEvent(Tagged< HeapObject > source, Tagged< HeapObject > target, int size_in_bytes)
Tagged< Object > dirty_js_finalization_registries_list()
size_t max_old_generation_size() const
void IterateStackRoots(RootVisitor *v)
double new_space_surviving_rate_
size_t GlobalMemoryAvailable()
V8_EXPORT_PRIVATE void CreateFillerObjectAt(Address addr, int size, ClearFreedMemoryMode clear_memory_mode=ClearFreedMemoryMode::kDontClearFreedMemory)
std::atomic< HeapState > gc_state_
TrustedSpace * trusted_space_
bool delay_sweeper_tasks_for_testing_
Tagged< Map > GcSafeMapOfHeapObject(Tagged< HeapObject > object)
static const int kMaxLoadTimeMs
void CheckMemoryPressure()
IncrementalMarking * incremental_marking() const
V8_INLINE RootsTable & roots_table()
SharedTrustedLargeObjectSpace * shared_trusted_lo_space_
bool InvokeNearHeapLimitCallback()
void RememberUnmappedPage(Address page, bool compacted)
void ReportStatisticsAfterGC()
void ProcessNativeContexts(WeakObjectRetainer *retainer)
base::Mutex strong_roots_mutex_
size_t max_semi_space_size_
void SetGetExternallyAllocatedMemoryInBytesCallback(GetExternallyAllocatedMemoryInBytesCallback callback)
std::unique_ptr< Sweeper > sweeper_
bool ShouldStressCompaction() const
std::unique_ptr< CodeRange > code_range_
OldSpace * old_space() const
GCCallbacks gc_epilogue_callbacks_
ArrayBufferSweeper * array_buffer_sweeper()
ConcurrentMarking * concurrent_marking() const
void ReduceNewSpaceSize()
void MarkCompactEpilogue()
static bool InFromPage(Tagged< Object > object)
bool IsGCWithMainThreadStack() const
V8_EXPORT_PRIVATE size_t OldGenerationSizeOfObjects() const
V8_EXPORT_PRIVATE void StartIncrementalMarking(GCFlags gc_flags, GarbageCollectionReason gc_reason, GCCallbackFlags gc_callback_flags=GCCallbackFlags::kNoGCCallbackFlags, GarbageCollector collector=GarbageCollector::MARK_COMPACTOR)
std::unique_ptr< AllocationTrackerForDebugging > allocation_tracker_for_debugging_
size_t old_generation_wasted_at_last_gc_
std::unique_ptr< ConcurrentMarking > concurrent_marking_
ReadOnlySpace * read_only_space_
static V8_EXPORT_PRIVATE size_t SemiSpaceSizeFromYoungGenerationSize(size_t young_generation_size)
V8_EXPORT_PRIVATE void CollectAllGarbage(GCFlags gc_flags, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
V8_EXPORT_PRIVATE Tagged< HeapObject > PrecedeWithFillerBackground(Tagged< HeapObject > object, int filler_size)
void RemoveDirtyFinalizationRegistriesOnContext(Tagged< NativeContext > context)
OldLargeObjectSpace * shared_lo_allocation_space_
void GetFromRingBuffer(char *buffer)
V8_EXPORT_PRIVATE size_t EmbedderSizeOfObjects() const
static const size_t kMaxInitialOldGenerationSize
std::unique_ptr< MemoryBalancer > mb_
bool IsIneffectiveMarkCompact(size_t old_generation_size, double mutator_utilization)
V8_EXPORT_PRIVATE void MakeHeapIterable()
void RecomputeLimits(GarbageCollector collector, base::TimeTicks time)
static constexpr size_t kPhysicalMemoryToOldGenerationRatio
HeapProfiler * heap_profiler() const
std::unique_ptr< HeapProfiler > heap_profiler_
bool HasLowYoungGenerationAllocationRate()
std::unique_ptr< IncrementalMarking > incremental_marking_
V8_EXPORT_PRIVATE size_t OldGenerationWastedBytes() const
bool always_allocate() const
std::unique_ptr< GCTracer > tracer_
void IterateWeakRoots(RootVisitor *v, base::EnumSet< SkipRoot > options)
MinorMarkSweepCollector * minor_mark_sweep_collector()
TrustedSpace * trusted_space() const
V8_EXPORT_PRIVATE bool CollectGarbageFromAnyThread(LocalHeap *local_heap, GarbageCollectionReason gc_reason=GarbageCollectionReason::kBackgroundAllocationFailure)
void EagerlyFreeExternalMemoryAndWasmCode()
bool is_current_gc_forced_
MinorGCJob * minor_gc_job()
V8_EXPORT_PRIVATE bool CanPromoteYoungAndExpandOldGeneration(size_t size) const
StickySpace * sticky_space() const
V8_EXPORT_PRIVATE double MonotonicallyIncreasingTimeInMs() const
bool major_sweeping_in_progress() const
int max_regular_code_object_size_
std::vector< HeapObjectAllocationTracker * > allocation_trackers_
bool CollectionRequested()
V8_EXPORT_PRIVATE size_t YoungGenerationConsumedBytes() const
base::SmallVector< v8::Isolate::UseCounterFeature, 8 > deferred_counters_
void FinishSweepingIfOutOfWork()
HeapGrowingMode CurrentHeapGrowingMode()
uint64_t external_memory_hard_limit()
static V8_EXPORT_PRIVATE size_t YoungGenerationSizeFromSemiSpaceSize(size_t semi_space_size)
static V8_EXPORT_PRIVATE size_t YoungGenerationSizeFromOldGenerationSize(size_t old_generation_size)
static constexpr size_t kOldGenerationLowMemory
void MakeLinearAllocationAreasIterable()
size_t NewSpaceTargetCapacity() const
void set_dirty_js_finalization_registries_list_tail(Tagged< Object > object)
size_t initial_max_old_generation_size_
void IterateRoots(RootVisitor *v, base::EnumSet< SkipRoot > options, IterateRootsMode roots_mode=IterateRootsMode::kMainIsolate)
bool is_finalization_registry_cleanup_task_posted_
size_t OldGenerationSpaceAvailable()
bool HasBeenSetUp() const
StrongRootsEntry * RegisterStrongRoots(const char *label, FullObjectSlot start, FullObjectSlot end)
std::unique_ptr< ScavengerCollector > scavenger_collector_
int nodes_died_in_new_space_
std::unique_ptr< MarkCompactCollector > mark_compact_collector_
bool InOldSpace(Tagged< Object > object)
MemoryAllocator * memory_allocator()
void ExpandNewSpaceSize()
void GarbageCollectionPrologueInSafepoint()
void RestoreHeapLimit(size_t heap_limit)
void IterateConservativeStackRoots(RootVisitor *root_visitor, IterateRootsMode roots_mode=IterateRootsMode::kMainIsolate)
V8_EXPORT_PRIVATE size_t OldGenerationConsumedBytes() const
static const int kTraceRingBufferSize
V8_EXPORT_PRIVATE size_t GlobalWastedBytes() const
V8_EXPORT_PRIVATE void EnsureSweepingCompleted(SweepingForcedFinalizationMode mode)
V8_EXPORT_PRIVATE void CompleteSweepingFull()
V8_EXPORT_PRIVATE void DisableInlineAllocation()
Tagged< GcSafeCode > GcSafeFindCodeForInnerPointer(Address inner_pointer)
void CreateFillerObjectAtBackground(const WritableFreeSpace &free_space)
IsolateSafepoint * safepoint()
static size_t DefaultMinSemiSpaceSize()
static V8_EXPORT_PRIVATE size_t DefaultMaxSemiSpaceSize()
uintptr_t mmap_region_base_
GCCallbackFlags current_gc_callback_flags_
V8_EXPORT_PRIVATE size_t OldGenerationConsumedBytesAtLastGC() const
bool inline_allocation_enabled_
size_t MaximumCommittedMemory()
void IterateBuiltins(RootVisitor *v)
GarbageCollector current_or_last_garbage_collector_
SharedSpace * shared_space_
CodeLargeObjectSpace * code_lo_space() const
std::atomic< size_t > old_generation_allocation_limit_
int remembered_unmapped_pages_index_
EmbedderRootsHandler * embedder_roots_handler_
V8_EXPORT_PRIVATE bool ShouldOptimizeForLoadTime() const
void AttachCppHeap(v8::CppHeap *cpp_heap)
TrustedLargeObjectSpace * trusted_lo_space() const
void ProcessAllocationSites(WeakObjectRetainer *retainer)
static constexpr size_t kNewLargeObjectSpaceToSemiSpaceRatio
GCFlags GCFlagsForIncrementalMarking()
bool is_current_gc_for_heap_profiler_
Tagged< GcSafeCode > GcSafeGetCodeFromInstructionStream(Tagged< HeapObject > instruction_stream, Address inner_pointer)
std::unique_ptr< MemoryAllocator > memory_allocator_
std::shared_ptr< v8::TaskRunner > task_runner_
StackState embedder_stack_state_
Tagged< UnionOf< Smi, Undefined, AllocationSiteWithWeakNext > > allocation_sites_list()
SemiSpaceNewSpace * semi_space_new_space() const
base::TimeDelta total_gc_time_ms_
bool CanMoveObjectStart(Tagged< HeapObject > object)
bool initial_size_overwritten_
void FlushNumberStringCache()
bool need_to_remove_stress_concurrent_allocation_observer_
IncrementalMarkingLimit IncrementalMarkingLimitReached()
void GarbageCollectionEpilogue(GarbageCollector collector)
int NextStressMarkingLimit()
V8_EXPORT_PRIVATE void EnableInlineAllocation()
CodeSpace * code_space() const
size_t global_allocation_limit() const
std::optional< EmbedderStackStateOrigin > embedder_stack_state_origin_
void set_dirty_js_finalization_registries_list(Tagged< Object > object)
MarkingState * marking_state()
bool using_initial_limit() const
void UpdateMaximumCommitted()
LocalHeap * main_thread_local_heap_
static size_t OldGenerationToSemiSpaceRatio()
TrustedLargeObjectSpace * trusted_lo_space_
V8_EXPORT_PRIVATE size_t GlobalConsumedBytesAtLastGC() const
size_t CommittedOldGenerationMemory()
std::optional< Tagged< InstructionStream > > GcSafeTryFindInstructionStreamForInnerPointer(Address inner_pointer)
size_t min_global_memory_size_
int stress_marking_percentage_
std::optional< Tagged< GcSafeCode > > GcSafeTryFindCodeForInnerPointer(Address inner_pointer)
void EnsureYoungSweepingCompleted()
bool IsStressingScavenge()
bool HighMemoryPressure()
uint64_t backing_store_bytes() const
std::unique_ptr< IsolateSafepoint > safepoint_
V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(const char *location)
Tagged< Object > native_contexts_list() const
SharedTrustedLargeObjectSpace * shared_trusted_lo_allocation_space_
bool ShouldUseBackgroundThreads() const
size_t survived_since_last_expansion_
size_t initial_semispace_size_
std::unique_ptr< AllocationObserver > stress_concurrent_allocation_observer_
size_t min_semi_space_size_
V8_EXPORT_PRIVATE bool InSpaceSlow(Address addr, AllocationSpace space) const
V8_EXPORT_PRIVATE size_t GlobalSizeOfObjects() const
std::unique_ptr< MinorGCJob > minor_gc_job_
void ProcessDirtyJSFinalizationRegistries(WeakObjectRetainer *retainer)
V8_EXPORT_PRIVATE size_t GlobalConsumedBytes() const
size_t new_space_surviving_object_size_
void AddAllocationObserversToAllSpaces(AllocationObserver *observer, AllocationObserver *new_space_observer)
PagedNewSpace * paged_new_space() const
V8_EXPORT_PRIVATE void AddHeapObjectAllocationTracker(HeapObjectAllocationTracker *tracker)
int consecutive_ineffective_mark_compacts_
OldLargeObjectSpace * lo_space_
static const int kPointerMultiplier
void PrintMaxNewSpaceSizeReached()
V8_EXPORT_PRIVATE uint64_t AllocatedExternalMemorySinceMarkCompact() const
V8_EXPORT_PRIVATE void CollectGarbage(AllocationSpace space, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
static V8_EXPORT_PRIVATE size_t MinOldGenerationSize()
std::vector< Handle< NativeContext > > FindAllNativeContexts()
V8_EXPORT_PRIVATE bool ShouldOptimizeForMemoryUsage()
static const int kOldSurvivalRateLowThreshold
static V8_EXPORT_PRIVATE int GetFillToAlign(Address address, AllocationAlignment alignment)
V8_EXPORT_PRIVATE::heap::base::Stack & stack()
bool HasLowOldGenerationAllocationRate()
size_t new_space_allocation_counter_
size_t NewSpaceCapacity() const
double PercentToGlobalMemoryLimit() const
size_t initial_max_old_generation_size_threshold_
bool deserialization_complete_
bool GcSafeInstructionStreamContains(Tagged< InstructionStream > instruction_stream, Address addr)
void UpdateOldGenerationAllocationCounter()
void ShrinkOldGenerationAllocationLimitIfNotConfigured()
void set_using_initial_limit(bool value)
StressScavengeObserver * stress_scavenge_observer_
void ForeachAllocationSite(Tagged< Object > list, const std::function< void(Tagged< AllocationSite >)> &visitor)
PretenuringHandler pretenuring_handler_
void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc)
v8::CppHeap * cpp_heap() const
static LimitsCompuatationResult ComputeNewAllocationLimits(Heap *heap)
size_t old_generation_allocation_counter_at_last_gc_
SharedTrustedSpace * shared_trusted_space() const
double ComputeMutatorUtilization(const char *tag, double mutator_speed, std::optional< double > gc_speed)
bool IsGCWithStack() const
void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags, GCTracer::Scope::ScopeId scope_id)
Isolate * isolate() const
HeapAllocator * heap_allocator_
void RemoveAllocationObserversFromAllSpaces(AllocationObserver *observer, AllocationObserver *new_space_observer)
static V8_EXPORT_PRIVATE void GenerationSizesFromHeapSize(size_t heap_size, size_t *young_generation_size, size_t *old_generation_size)
void ResumeConcurrentThreadsInClients(std::vector< Isolate * > paused_clients)
void ResetOldGenerationAndGlobalAllocationLimit()
void PerformGarbageCollection(GarbageCollector collector, GarbageCollectionReason gc_reason, const char *collector_reason)
void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags, GCTracer::Scope::ScopeId scope_id)
std::atomic< double > max_marking_limit_reached_
size_t promoted_objects_size_
void SetOldGenerationAndGlobalMaximumSize(size_t max_old_generation_size)
void EnsureQuarantinedPagesSweepingCompleted()
void NotifyObjectSizeChange(Tagged< HeapObject >, int old_size, int new_size, ClearRecordedSlots clear_recorded_slots)
SweepingForcedFinalizationMode
GCCallbacks gc_prologue_callbacks_
std::atomic< double > load_start_time_ms_
GetExternallyAllocatedMemoryInBytesCallback external_memory_callback_
V8_EXPORT_PRIVATE void SetStackStart()
bool is_full_gc_during_loading_
bool minor_sweeping_in_progress() const
static size_t OldGenerationToSemiSpaceRatioLowMemory()
V8_EXPORT_PRIVATE size_t OldGenerationCapacity() const
void UpdateSurvivalStatistics(int start_new_space_size)
Tagged< Object > dirty_js_finalization_registries_list_tail()
bool deserialization_complete() const
V8_EXPORT_PRIVATE void FinalizeIncrementalMarkingAtomicallyIfRunning(GarbageCollectionReason gc_reason)
V8_EXPORT_PRIVATE size_t NewSpaceAllocationCounter() const
bool ShouldReduceMemory() const
bool force_gc_on_next_allocation_
static const uintptr_t kMmapRegionMask
V8_EXPORT_PRIVATE void FreeMainThreadLinearAllocationAreas()
V8_EXPORT_PRIVATE size_t SizeOfObjects()
static V8_EXPORT_PRIVATE bool IsLargeObject(Tagged< HeapObject > object)
void ResetAllAllocationSitesDependentCode(AllocationType allocation)
void RunInternal(double deadline_in_seconds) override
void TryRunMinorGC(const base::TimeDelta idle_time)
IdleTaskOnContextDispose(Isolate *isolate)
static void TryPostJob(Heap *heap)
void AdvanceAndFinalizeIfNecessary()
void Start(GarbageCollector garbage_collector, GarbageCollectionReason gc_reason)
static Tagged< InstructionStream > FromTargetAddress(Address address)
V8_EXPORT_PRIVATE void AssertMainThreadIsOnlyThread()
void IterateLocalHeaps(Callback callback)
void PrintStack(StringStream *accumulator, PrintStackMode mode=kPrintStackVerbose)
Isolate * shared_space_isolate() const
static constexpr size_t kMaxByteLength
static const int kInitialMaxFastElementArray
static void PrototypeRegistryCompactionCallback(Tagged< HeapObject > value, int old_index, int new_index)
bool Contains(Tagged< HeapObject > obj) const
size_t Size() const override
size_t Available() const override
bool ContainsSlow(Address addr) const
size_t SizeOfObjects() const override
ThreadState SetCollectionRequested()
ThreadState ClearCollectionRequested()
constexpr bool IsRunning() const
constexpr bool IsParked() const
V8_WARN_UNUSED_RESULT AllocationResult AllocateRaw(int size_in_bytes, AllocationType allocation, AllocationOrigin origin=AllocationOrigin::kRuntime, AllocationAlignment alignment=kTaggedAligned)
bool is_main_thread() const
void InvokeGCEpilogueCallbacksInSafepoint(GCCallbacksInSafepoint::GCType gc_type)
void MarkSharedLinearAllocationAreasBlack()
::heap::base::Stack stack_
void UnmarkSharedLinearAllocationsArea()
bool IsRetryOfFailedAllocation() const
bool is_main_thread_for(Heap *heap) const
HeapAllocator heap_allocator_
MarkingBarrier * marking_barrier()
void SetUpMainThread(LinearAllocationArea &new_allocation_info, LinearAllocationArea &old_allocation_info)
void FreeLinearAllocationAreas()
void MakeLinearAllocationAreasIterable()
void FreeSharedLinearAllocationAreasAndResetFreeLists()
Tagged< Map > ToMap() const
bool IsForwardingAddress() const
static MapWord FromMap(const Tagged< Map > map)
Tagged< HeapObject > ToForwardingAddress(Tagged< HeapObject > map_word_host)
void RecordStrongDescriptorArraysForWeakening(GlobalHandleVector< DescriptorArray > strong_descriptor_arrays)
void PublishSharedIfNeeded()
static V8_INLINE constexpr MarkBitIndex LimitAddressToIndex(Address address)
static V8_INLINE constexpr MarkBitIndex AddressToIndex(Address address)
V8_WARN_UNUSED_RESULT V8_INLINE bool ToHandle(Handle< S > *out) const
V8_EXPORT_PRIVATE void TearDown()
V8_EXPORT_PRIVATE size_t GetPooledChunksCount()
static void InitializeOncePerProcess()
size_t SizeExecutable() const
static constexpr int MaxRegularCodeObjectSize()
static constexpr size_t ObjectStartOffsetInDataPage()
V8_INLINE MemoryChunkMetadata * Metadata()
static V8_INLINE MemoryChunk * FromAddress(Address addr)
bool NeverEvacuate() const
size_t Offset(Address addr) const
V8_INLINE bool InYoungGeneration() const
static V8_INLINE MemoryChunk * FromHeapObject(Tagged< HeapObject > object)
V8_INLINE bool InReadOnlySpace() const
static double GrowingFactor(Heap *heap, size_t max_heap_size, std::optional< double > gc_speed, double mutator_speed, Heap::HeapGrowingMode growing_mode)
static size_t MinimumAllocationLimitGrowingStep(Heap::HeapGrowingMode growing_mode)
static size_t BoundAllocationLimit(Heap *heap, size_t current_size, uint64_t limit, size_t min_size, size_t max_size, size_t new_space_capacity, Heap::HeapGrowingMode growing_mode)
MemoryPressureInterruptTask(const MemoryPressureInterruptTask &)=delete
~MemoryPressureInterruptTask() override=default
MemoryPressureInterruptTask & operator=(const MemoryPressureInterruptTask &)=delete
MemoryPressureInterruptTask(Heap *heap)
void RunInternal() override
void NotifyPossibleGarbage()
void CancelTaskIfScheduled()
static MutablePageMetadata * cast(MemoryChunkMetadata *metadata)
static const int kPageSize
AllocationSpace owner_identity() const
static void MoveExternalBackingStoreBytes(ExternalBackingStoreType type, MutablePageMetadata *from, MutablePageMetadata *to, size_t amount)
bool SweepingDone() const
static V8_INLINE MutablePageMetadata * FromHeapObject(Tagged< HeapObject > o)
void SetCapacity(size_t capacity)
size_t Available() const override
size_t ExternalBackingStoreOverallBytes() const
virtual size_t MinimumCapacity() const =0
virtual bool ContainsSlow(Address a) const =0
virtual size_t Capacity() const =0
virtual void Grow(size_t new_capacity)=0
bool Contains(Tagged< Object > o) const
virtual size_t AllocatedSinceLastGC() const =0
virtual size_t MaximumCapacity() const =0
virtual size_t TotalCapacity() const =0
static V8_INLINE void Lock(Tagged< HeapObject > heap_object)
static V8_INLINE void Unlock(Tagged< HeapObject > heap_object)
static constexpr int OBJECT_STATS_COUNT
PtrComprCageBase code_cage_base() const
PtrComprCageBase cage_base() const
static Builtin TryLookupCode(Isolate *isolate, Address address)
static HandleType< OrderedHashSet >::MaybeType Add(Isolate *isolate, HandleType< OrderedHashSet > table, DirectHandle< Object > value)
static PageMetadata * cast(MemoryChunkMetadata *metadata)
static V8_INLINE PageMetadata * FromHeapObject(Tagged< HeapObject > o)
static V8_INLINE PageMetadata * FromAddress(Address addr)
void ReleaseOnTearDown(Isolate *isolate)
PagedSpaceForNewSpace * paged_space()
bool StartShrinking(size_t new_target_capacity)
bool Contains(Address a) const
size_t Available() const override
bool ContainsSlow(Address addr) const
size_t Size() const override
virtual void RefillFreeList()
size_t UsableCapacity() const
void ProcessPretenuringFeedback(size_t new_space_capacity_before_gc)
void RemoveAllocationSitePretenuringFeedback(Tagged< AllocationSite > site)
static Tagged< WeakArrayList > Compact(DirectHandle< WeakArrayList > array, Heap *heap, CompactionCallback callback, AllocationType allocation=AllocationType::kYoung)
static V8_EXPORT_PRIVATE bool Contains(Address address)
ReadOnlySpace * read_only_space() const
void Iterate(RootVisitor *visitor)
virtual V8_EXPORT_PRIVATE void TearDown(MemoryAllocator *memory_allocator)
size_t Size() const override
bool ContainsSlow(Address addr) const
static void Clear(Heap *heap)
static void Clear(Tagged< FixedArray > cache)
static void Iterate(Isolate *isolate, RootVisitor *v)
static void PostGarbageCollectionProcessing(Isolate *isolate)
static void Insert(MutablePageMetadata *page, size_t slot_offset)
static void RemoveRange(MutablePageMetadata *chunk, Address start, Address end, SlotSet::EmptyBucketMode mode)
static void CheckNoneInRange(MutablePageMetadata *page, Address start, Address end)
virtual void VisitRootPointers(Root root, const char *description, FullObjectSlot start, FullObjectSlot end)=0
virtual void VisitRunningCode(FullObjectSlot code_slot, FullObjectSlot istream_or_smi_zero_slot)
virtual void Synchronize(VisitorSynchronization::SyncTag tag)
virtual void VisitRootPointer(Root root, const char *description, FullObjectSlot p)
V8_INLINE void AddCharacter(uint16_t c)
V8_INLINE uint32_t Finalize()
void Shrink(size_t new_capacity)
size_t QuarantinedPageCount() const
size_t CurrentCapacitySafe() const
static void IterateStartupObjectCache(Isolate *isolate, RootVisitor *visitor)
static void IterateSharedHeapObjectCache(Isolate *isolate, RootVisitor *visitor)
static constexpr Tagged< Smi > FromInt(int value)
static constexpr Tagged< Smi > zero()
static constexpr int kMaxValue
virtual std::unique_ptr< ObjectIterator > GetObjectIterator(Heap *heap)=0
size_t old_objects_size() const
size_t young_objects_size() const
void Step(int bytes_allocated, Address, size_t) override
StressConcurrentAllocationObserver(Heap *heap)
static void Schedule(Isolate *isolate)
void RunInternal() override
StressConcurrentAllocationTask(Isolate *isolate)
double MaxNewSpaceSizeReached() const
bool HasRequestedGC() const
StrongRootAllocatorBase(Heap *heap)
Address * allocate_impl(size_t n)
void deallocate_impl(Address *p, size_t n) noexcept
void EnsureMajorCompleted()
void ContributeAndWaitForPromotedPagesIteration()
void EnsurePageIsSwept(PageMetadata *page)
void EnsureMinorCompleted()
static constexpr int SizeFor(int capacity)
V8_INLINE constexpr StorageType ptr() const
constexpr bool IsCleared() const
bool GetHeapObject(Tagged< HeapObject > *result) const
constexpr bool IsWeak() const
V8_INLINE constexpr bool is_null() const
static std::optional< Address > StartOfJitAllocationAt(Address inner_pointer)
V8_INLINE void MarkPointersImpl(TSlot start, TSlot end)
void VisitRootPointers(Root root, const char *description, FullObjectSlot start, FullObjectSlot end) override
UnreachableObjectsFilter * filter_
void VisitMapPointer(Tagged< HeapObject > object) override
void VisitPointers(Tagged< HeapObject > host, MaybeObjectSlot start, MaybeObjectSlot end) final
void VisitRootPointers(Root root, const char *description, OffHeapObjectSlot start, OffHeapObjectSlot end) override
void VisitCodeTarget(Tagged< InstructionStream > host, RelocInfo *rinfo) final
std::vector< Tagged< HeapObject > > marking_stack_
void VisitPointers(Tagged< HeapObject > host, ObjectSlot start, ObjectSlot end) override
void VisitInstructionStreamPointer(Tagged< Code > host, InstructionStreamSlot slot) override
V8_INLINE void MarkHeapObject(Tagged< HeapObject > heap_object)
void VisitEmbeddedPointer(Tagged< InstructionStream > host, RelocInfo *rinfo) final
MarkingVisitor(UnreachableObjectsFilter *filter)
~UnreachableObjectsFilter() override=default
bool SkipObject(Tagged< HeapObject > object) override
std::unordered_map< MemoryChunkMetadata *, std::unique_ptr< BucketType >, base::hash< MemoryChunkMetadata * > > reachable_
void MarkReachableObjects()
std::unordered_set< Tagged< HeapObject >, Object::Hasher > BucketType
bool MarkAsReachable(Tagged< HeapObject > object)
UnreachableObjectsFilter(Heap *heap)
static V8_EXPORT_PRIVATE v8::Platform * GetCurrentPlatform()
static V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(Isolate *isolate, const char *location, const OOMDetails &details=kNoOOMDetails)
static V8_EXPORT_PRIVATE const OOMDetails kHeapOOM
static Handle< WeakArrayList > EnsureSpace(Isolate *isolate, Handle< WeakArrayList > array, int length, AllocationType allocation=AllocationType::kYoung)
virtual Tagged< Object > RetainAs(Tagged< Object > object)=0
void ClearTagged(size_t count) const
static V8_INLINE WritableFreeSpace ForNonExecutableMemory(base::Address addr, size_t size)
base::Address Address() const
V8_INLINE WritableFreeSpace FreeRange(Address addr, size_t size)
static void ForRange(Heap *heap, Tagged< HeapObject > object, TSlot start, TSlot end)
std::pair< size_t, size_t > FlushLiftoffCode()
#define PROFILE(the_isolate, Call)
#define V8_COMPRESS_POINTERS_8GB_BOOL
#define ALIGN_TO_ALLOCATION_ALIGNMENT(value)
#define V8_ENABLE_SANDBOX_BOOL
DeclarationScope * scope_
std::unique_ptr< SafepointScope > safepoint_scope_
MicrotaskQueue * microtask_queue
DisallowGarbageCollection no_gc_
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage * MB
#define TRACE_GC_EPOCH(tracer, scope_id, thread_kind)
#define TRACE_GC_EPOCH_WITH_FLOW(tracer, scope_id, thread_kind, bind_id, flow_flags)
#define TRACE_GC(tracer, scope_id)
#define DEF_RIGHT_TRIM(T)
#define COMPARE_AND_RETURN_NAME(name)
#define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space)
#define UPDATE_COUNTERS_FOR_SPACE(space)
ZoneVector< RpoNumber > & result
ZoneStack< RpoNumber > & stack
LiftoffAssembler::CacheState state
#define LOG(isolate, Call)
#define LOG_CODE_EVENT(isolate, Call)
NonAtomicMarkingState * marking_state_
V8_BASE_EXPORT constexpr uint64_t RoundUpToPowerOfTwo64(uint64_t value)
constexpr Vector< T > VectorOf(T *start, size_t size)
void * Malloc(size_t size)
WasmEngine * GetWasmEngine()
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
static constexpr FreeListCategoryType kFirstCategory
@ kFinalizeMarkingViaStackGuard
@ kBackgroundAllocationFailure
@ kFinalizeConcurrentMinorMS
@ kExternalMemoryPressure
constexpr int kSpaceTagSize
constexpr const char * ToString(DeoptimizeKind kind)
constexpr int kTaggedSize
constexpr Address kTaggedNullAddress
template Tagged< Object > VisitWeakList< AllocationSiteWithWeakNext >(Heap *heap, Tagged< Object > list, WeakObjectRetainer *retainer)
constexpr int kMaxRegularHeapObjectSize
static void ReturnNull(const v8::FunctionCallbackInfo< v8::Value > &info)
SlotTraits::TObjectSlot ObjectSlot
void PrintF(const char *format,...)
int32_t FreeListCategoryType
Tagged(T object) -> Tagged< T >
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
kInterpreterTrampolineOffset Tagged< HeapObject >
void MemsetTagged(Tagged_t *start, Tagged< MaybeObject > value, size_t counter)
void VisitObject(Isolate *isolate, Tagged< HeapObject > object, ObjectVisitor *visitor)
void * GetRandomMmapAddr()
constexpr bool IsAnyCodeSpace(AllocationSpace space)
void Print(Tagged< Object > obj)
Handle< To > UncheckedCast(Handle< From > value)
base::Flags< GCFlag, uint8_t > GCFlags
constexpr int kSystemPointerSize
constexpr intptr_t kObjectAlignment8GbHeap
V8_EXPORT_PRIVATE void MemMove(void *dest, const void *src, size_t size)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available space
template Tagged< Object > VisitWeakList< JSFinalizationRegistry >(Heap *heap, Tagged< Object > list, WeakObjectRetainer *retainer)
typename detail::FlattenUnionHelper< Union<>, Ts... >::type UnionOf
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation trace turbo cfg trace TurboFan s graph trimmer trace TurboFan s control equivalence trace TurboFan s register allocator trace stack load store counters for optimized code in run fuzzing &&concurrent_recompilation trace_turbo trace_turbo_scheduled trace_turbo_stack_accesses verify TurboFan machine graph of code stubs enable FixedArray bounds checks print TurboFan statistics of wasm compilations maximum cumulative size of bytecode considered for inlining scale factor of bytecode size used to calculate the inlining budget * KB
Tagged< MaybeWeak< T > > MakeWeak(Tagged< T > value)
constexpr uint32_t kClearedFreeMemoryValue
@ SHARED_TRUSTED_LO_SPACE
@ FIRST_GROWABLE_PAGED_SPACE
@ LAST_GROWABLE_PAGED_SPACE
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
V8_EXPORT_PRIVATE FlagValues v8_flags
InvalidateExternalPointerSlots
SlotTraits::TOffHeapObjectSlot OffHeapObjectSlot
constexpr size_t kMaximalCodeRangeSize
static constexpr Address kNullAddress
static constexpr GlobalSafepointForSharedSpaceIsolateTag kGlobalSafepointForSharedSpaceIsolate
constexpr int kDoubleSize
void PrintIsolate(void *isolate, const char *format,...)
constexpr intptr_t kDoubleAlignmentMask
void MemCopy(void *dest, const void *src, size_t size)
template Tagged< Object > VisitWeakList< Context >(Heap *heap, Tagged< Object > list, WeakObjectRetainer *retainer)
constexpr intptr_t kDoubleAlignment
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage * MB
!IsContextMap !IsContextMap native_context
static GCType GetGCTypeFromGarbageCollector(GarbageCollector collector)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
static constexpr ReleaseStoreTag kReleaseStore
@ kGCCallbackScheduleIdleGarbageCollection
@ kGCCallbackFlagCollectAllExternalMemory
@ kGCCallbackFlagSynchronousPhantomCallbackProcessing
@ kGCCallbackFlagCollectAllAvailableGarbage
static constexpr RelaxedLoadTag kRelaxedLoad
static constexpr RelaxedStoreTag kRelaxedStore
@ kGCTypeMarkSweepCompact
@ kGCTypeIncrementalMarking
size_t(*)(void *data, size_t current_heap_limit, size_t initial_heap_limit) NearHeapLimitCallback
static constexpr AcquireLoadTag kAcquireLoad
#define RELAXED_WRITE_FIELD(p, offset, value)
#define VIRTUAL_INSTANCE_TYPE_LIST(V)
#define INSTANCE_TYPE_LIST(V)
#define DCHECK_LE(v1, v2)
#define CHECK_GE(lhs, rhs)
#define CHECK_IMPLIES(lhs, rhs)
#define CHECK_GT(lhs, rhs)
#define CHECK_LT(lhs, rhs)
#define CHECK_LE(lhs, rhs)
#define DCHECK_NOT_NULL(val)
#define CHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define CHECK_NE(lhs, rhs)
#define DCHECK_GE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
constexpr T RoundDown(T x, intptr_t m)
#define V8_EXPORT_PRIVATE
constexpr bool IsAligned(T value, U alignment)
#define RIGHT_TRIMMABLE_ARRAY_LIST(V)
static constexpr size_t kMaxSize
static constexpr size_t kMinSize
static constexpr double kConservativeGrowingFactor
static constexpr char kName[]
static bool is_gc_stats_enabled()
static constexpr char kName[]
EphemeronRememberedSet * ephemeron_remembered_set_
#define OFFSET_OF_DATA_START(Type)
#define TRACE_EVENT_BEGIN2(category_group, name, arg1_name, arg1_val, arg2_name, arg2_val)
#define TRACE_EVENT0(category_group, name)
#define TRACE_EVENT2(category_group, name, arg1_name, arg1_val, arg2_name, arg2_val)
#define TRACE_EVENT_END1(category_group, name, arg1_name, arg1_val)
#define TRACE_DISABLED_BY_DEFAULT(name)
#define TRACE_EVENT1(category_group, name, arg1_name, arg1_val)
#define TRACE_EVENT_FLAG_FLOW_OUT
#define TRACE_EVENT_FLAG_FLOW_IN
#define V8_STATIC_ROOTS_BOOL
#define V8_LIKELY(condition)
#define V8_UNLIKELY(condition)