211 const char* event_type);
221 static constexpr size_t kExternalAllocationLimitForInterrupt = 128 *
KB;
223 uint64_t
total()
const {
return total_.load(std::memory_order_relaxed); }
225 return limit_for_interrupt_.load(std::memory_order_relaxed);
231 return low_since_mark_compact_.load(std::memory_order_relaxed);
235 const uint64_t amount_before =
236 total_.fetch_add(delta, std::memory_order_relaxed);
237 CHECK_GE(
static_cast<int64_t
>(amount_before), -delta);
238 return amount_before + delta;
242 set_limit_for_interrupt(amount + kExternalAllocationLimitForInterrupt);
246 set_low_since_mark_compact(amount);
247 UpdateLimitForInterrupt(amount);
251 uint64_t total_bytes = total();
252 uint64_t low_since_mark_compact_bytes = low_since_mark_compact();
254 if (total_bytes <= low_since_mark_compact_bytes) {
257 return total_bytes - low_since_mark_compact_bytes;
262 total_.store(value, std::memory_order_relaxed);
266 limit_for_interrupt_.store(value, std::memory_order_relaxed);
270 low_since_mark_compact_.store(value, std::memory_order_relaxed);
274 std::atomic<uint64_t> total_{0};
277 std::atomic<uint64_t> limit_for_interrupt_{
278 kExternalAllocationLimitForInterrupt};
281 std::atomic<uint64_t> low_since_mark_compact_{0};
296 static const int kPointerMultiplier = 1;
297 static const int kHeapLimitMultiplier = 1;
306 static const size_t kMaxInitialOldGenerationSize =
307 256 * MB * kHeapLimitMultiplier;
310 static constexpr size_t kPhysicalMemoryToOldGenerationRatio = 4;
311 static constexpr size_t kOldGenerationLowMemory =
312 128 * MB * kHeapLimitMultiplier;
313 static constexpr size_t kNewLargeObjectSpaceToSemiSpaceRatio = 1;
315 static const int kTraceRingBufferSize = 512;
316 static const int kStacktraceBufferSize = 512;
319 static const int kMinObjectSizeInTaggedWords = 2;
321 static size_t DefaultMinSemiSpaceSize();
324 static size_t OldGenerationToSemiSpaceRatio();
325 static size_t OldGenerationToSemiSpaceRatioLowMemory();
338 static size_t GetCodeRangeReservedAreaSize();
341 const char* location);
360 static inline void CopyBlock(
Address dst,
Address src,
int byte_size);
370 void NotifyDeserializationComplete();
375 void WeakenDescriptorArrays(
378 void NotifyBootstrapComplete();
387 void NotifyOldGenerationExpansion(
389 OldGenerationExpansionNotificationOrigin =
390 OldGenerationExpansionNotificationOrigin::kFromSameHeap);
392 inline Address* NewSpaceAllocationTopAddress();
393 inline Address* NewSpaceAllocationLimitAddress();
394 inline Address* OldSpaceAllocationTopAddress();
395 inline Address* OldSpaceAllocationLimitAddress();
397 size_t NewSpaceSize();
398 size_t NewSpaceCapacity()
const;
399 size_t NewSpaceTargetCapacity()
const;
409 template <
typename TSlot>
411 TSlot dst_slot, TSlot src_slot,
int len,
438#define RIGHT_TRIMMABLE_ARRAY_LIST(V) \
442 V(FixedDoubleArray) \
447 template <
typename Array>
448 void RightTrimArray(
Tagged<Array> object,
int new_capacity,
int old_capacity);
458 native_contexts_list_.store(
object.ptr(), std::memory_order_release);
463 native_contexts_list_.load(std::memory_order_acquire));
468 allocation_sites_list_ = object;
472 return allocation_sites_list_;
476 dirty_js_finalization_registries_list_ = object;
479 return dirty_js_finalization_registries_list_;
482 dirty_js_finalization_registries_list_tail_ = object;
485 return dirty_js_finalization_registries_list_tail_;
490 return reinterpret_cast<Address>(&allocation_sites_list_);
495 void ForeachAllocationSite(
507 void CheckHandleCount();
510 void PrintShortHeapStatistics();
516 void PrintFreeListsStats();
519 void DumpJSONHeapStatistics(std::stringstream& stream);
522 return gc_state_.load(std::memory_order_relaxed);
530 return state != NOT_IN_GC && state != TEAR_DOWN;
535 return ignore_local_gc_requests_depth_ > 0;
539 return pause_allocation_observers_depth_ == 0;
542 bool IsGCWithMainThreadStack()
const;
545 bool IsGCWithStack()
const;
550 void CollectGarbageForBackground(
LocalHeap* local_heap);
556 void CreateReadOnlyApiObjects();
557 void CreateMutableApiObjects();
561 void CheckMemoryPressure();
568 double threshold_percent);
584 void VerifyNewSpaceTop();
589 bool MeasureMemory(std::unique_ptr<v8::MeasureMemoryDelegate> delegate,
592 std::unique_ptr<v8::MeasureMemoryDelegate> CreateDefaultMeasureMemoryDelegate(
596 void IncrementDeferredCounts(
600 int NextDebuggingId();
601 int NextStackTraceId();
602 inline uint32_t GetNextTemplateSerialNumber();
610 void RememberUnmappedPage(
Address page,
bool compacted);
613 return external_memory_.low_since_mark_compact() +
614 max_old_generation_size() / 2;
617 V8_INLINE uint64_t external_memory()
const;
620 uint64_t UpdateExternalMemory(int64_t delta);
626 return backing_store_bytes_.load(std::memory_order_relaxed);
629 void CompactWeakArrayLists();
645 bool HasLowAllocationRate();
646 bool HasHighFragmentation();
648 void ActivateMemoryReducerIfNeeded();
656 return memory_pressure_level_.load(std::memory_order_relaxed) !=
660 bool CollectionRequested();
662 void CheckCollectionRequested();
666 size_t min_limit = SizeOfObjects() + SizeOfObjects() / 4;
667 SetOldGenerationAndGlobalMaximumSize(
668 std::min(max_old_generation_size(), std::max(heap_limit, min_limit)));
677 void ConfigureHeapDefault();
680 void SetUp(
LocalHeap* main_thread_local_heap);
695 void InitializeHashSeed();
698 static void InitializeOncePerProcess();
702 bool CreateReadOnlyHeapObjects();
703 bool CreateMutableHeapObjects();
706 void CreateObjectStats();
709 void StartTearDown();
712 void TearDownWithSharedHeap();
718 bool HasBeenSetUp()
const;
741 return shared_trusted_space_;
744 return trusted_lo_space_;
747 return shared_trusted_lo_space_;
751 return shared_allocation_space_;
754 return shared_lo_allocation_space_;
757 return shared_trusted_allocation_space_;
760 return shared_trusted_lo_allocation_space_;
763 inline PagedSpace* paged_space(
int idx)
const;
766#ifdef V8_COMPRESS_POINTERS
767 ExternalPointerTable::Space* young_external_pointer_space() {
768 return &young_external_pointer_space_;
770 ExternalPointerTable::Space* old_external_pointer_space() {
771 return &old_external_pointer_space_;
773 ExternalPointerTable::Space* read_only_external_pointer_space() {
774 return &read_only_external_pointer_space_;
776 CppHeapPointerTable::Space* cpp_heap_pointer_space() {
777 return &cpp_heap_pointer_space_;
781#ifdef V8_ENABLE_SANDBOX
782 TrustedPointerTable::Space* trusted_pointer_space() {
783 return &trusted_pointer_space_;
786 CodePointerTable::Space* code_pointer_space() {
return &code_pointer_space_; }
790#ifdef V8_ENABLE_LEAPTIERING
791 JSDispatchTable::Space* js_dispatch_table_space() {
792 return &js_dispatch_table_space_;
805 return memory_allocator_.get();
811 inline bool IsMainThread()
const;
814 return mark_compact_collector_.get();
818 return minor_mark_sweep_collector_.get();
824 return array_buffer_sweeper_.get();
832#ifdef V8_COMPRESS_POINTERS
835 return code_range_.get();
840 inline Address code_range_base();
854#define ROOT_ACCESSOR(type, name, CamelName) inline Tagged<type> name();
864 V8_INLINE void SetFunctionsMarkedForManualOptimization(
867#if V8_ENABLE_WEBASSEMBLY
868 V8_INLINE void SetWasmCanonicalRttsAndJSToWasmWrappers(
881 void EnqueueDirtyJSFinalizationRegistry(
885 gc_notify_updated_slot);
888 DequeueDirtyJSFinalizationRegistry();
894 void RemoveDirtyFinalizationRegistriesOnContext(
897 bool HasDirtyJSFinalizationRegistries();
899 void PostFinalizationRegistryCleanupTaskIfNeeded();
902 is_finalization_registry_cleanup_task_posted_ = posted;
906 return is_finalization_registry_cleanup_task_posted_;
910 void ClearKeptObjects();
960 void HandleExternalMemoryInterrupt();
967 external_memory_callback_ =
callback;
971 void HandleGCRequest();
989 IterateRootsMode roots_mode = IterateRootsMode::kMainIsolate);
1003 void IterateConservativeStackRoots(
1005 IterateRootsMode roots_mode = IterateRootsMode::kMainIsolate);
1008 void IterateRootsForPrecisePinning(
RootVisitor* visitor);
1015 uint8_t* IsMarkingFlagAddress();
1016 uint8_t* IsMinorMarkingFlagAddress();
1020 size_t slot_offset);
1057 void CompleteSweepingYoung();
1063 return incremental_marking_.get();
1071 return concurrent_marking_.get();
1082 void NotifyObjectLayoutChange(
1101 void SetConstructStubCreateDeoptPCOffset(
int pc_offset);
1102 void SetConstructStubInvokeDeoptPCOffset(
int pc_offset);
1103 void SetDeoptPCOffsetAfterAdaptShadowStack(
int pc_offset);
1104 void SetInterpreterEntryReturnPCOffset(
int pc_offset);
1106 void DeoptMarkedAllocationSites();
1114 std::optional<StackState> overridden_stack_state()
const;
1120 V8_EXPORT_PRIVATE ::heap::base::Stack&
stack();
1143 size_t new_payload);
1149 static Tagged<String> UpdateYoungReferenceInExternalStringTableEntry(
1201 size_t NumberOfTrackedHeapObjectTypes();
1206 size_t ObjectCountAtLastGC(
size_t index);
1207 size_t ObjectSizeAtLastGC(
size_t index);
1210 bool GetObjectTypeName(
size_t index,
const char** object_type,
1211 const char** object_sub_type);
1214 size_t NumberOfNativeContexts();
1217 size_t NumberOfDetachedContexts();
1225 void CollectCodeStatistics();
1241 uint64_t physical_memory);
1243 size_t heap_size,
size_t* young_generation_size,
1244 size_t* old_generation_size);
1246 size_t old_generation_size);
1248 size_t semi_space_size);
1250 size_t young_generation_size);
1254 uint64_t physical_memory);
1266 size_t CommittedMemory();
1269 size_t CommittedOldGenerationMemory();
1272 size_t CommittedMemoryExecutable();
1275 size_t CommittedPhysicalMemory();
1282 void UpdateMaximumCommitted();
1298 void UpdateSurvivalStatistics(
int start_new_space_size);
1301 promoted_objects_size_ += object_size;
1306 new_space_surviving_object_size_ += object_size;
1309 return new_space_surviving_object_size_;
1313 return promoted_objects_size_ + new_space_surviving_object_size_;
1317 nodes_died_in_new_space_ +=
count;
1325 survived_since_last_expansion_ += survived;
1331 new_space_allocation_counter_ = new_value;
1335 old_generation_allocation_counter_at_last_gc_ =
1336 OldGenerationAllocationCounter();
1340 return old_generation_allocation_counter_at_last_gc_ +
1341 PromotedSinceLastGC();
1344 size_t EmbedderAllocationCounter()
const;
1348 old_generation_allocation_counter_at_last_gc_ = new_value;
1356 return current_or_last_garbage_collector_;
1362 return is_current_gc_forced_ || is_current_gc_for_heap_profiler_;
1410 bool AllocationLimitOvershotByLargeMargin()
const;
1425 GCType gc_type_filter,
void* data);
1430 GCType gc_type_filter,
void* data);
1469 const std::function<
void*(
size_t)>& allocate,
size_t byte_length);
1476 void AddAllocationObserversToAllSpaces(
1481 void RemoveAllocationObserversFromAllSpaces(
1504 return !allocation_trackers_.empty();
1515 std::optional<Tagged<GcSafeCode>> GcSafeTryFindCodeForInnerPointer(
1517 std::optional<Tagged<InstructionStream>>
1518 GcSafeTryFindInstructionStreamForInnerPointer(
Address inner_pointer);
1520 std::optional<Tagged<Code>> TryFindCodeForInnerPointerForPrinting(
1525 bool GcSafeInstructionStreamContains(
1534 return sweeper_->sweeping_in_progress_for_space(space);
1537 return sweeper_->minor_sweeping_in_progress();
1540 return sweeper_->major_sweeping_in_progress();
1543 void FinishSweepingIfOutOfWork();
1551 SweepingForcedFinalizationMode mode);
1552 void EnsureYoungSweepingCompleted();
1553 void EnsureQuarantinedPagesSweepingCompleted();
1557#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
1562 void VerifyCountersAfterSweeping();
1564 void VerifyCommittedPhysicalMemory();
1567 void PrintHandles();
1570 void ReportCodeStatistics(
const char* title);
1574#if V8_TARGET_ARCH_X64
1583 uintptr_t
offset =
reinterpret_cast<uintptr_t
>(
result) & kMmapRegionMask;
1584 result =
reinterpret_cast<void*
>(mmap_region_base_ +
offset);
1591 inline int MaxNumberToStringCacheSize()
const;
1624 return &non_atomic_marking_state_;
1634 std::shared_ptr<v8::TaskRunner> GetForegroundTaskRunner(
1637 bool ShouldUseBackgroundThreads()
const;
1638 bool ShouldUseIncrementalMarking()
const;
1645 return new_space() ||
v8_flags.sticky_mark_bits;
1648 bool IsNewSpaceAllowedToGrowAboveTargetCapacity()
const;
1651 class AllocationTrackerForDebugging;
1673 void PromoteYoung();
1678 void CleanUpYoung();
1683 void UpdateYoungReferences(
1684 Heap::ExternalStringTableUpdaterCallback updater_func);
1685 void UpdateReferences(
1686 Heap::ExternalStringTableUpdaterCallback updater_func);
1688 bool HasYoung()
const {
return !young_strings_.empty(); }
1704 static const int kInitialEvalCacheSize = 64;
1705 static const int kInitialNumberStringCacheSize = 256;
1707 static const int kRememberedUnmappedPages = 128;
1709 static const int kYoungSurvivalRateHighThreshold = 90;
1710 static const int kYoungSurvivalRateAllowedDeviation = 15;
1711 static const int kOldSurvivalRateLowThreshold = 10;
1713 static const int kMaxMarkCompactsInIdleRound = 7;
1726#define ROOT_ACCESSOR(type, name, CamelName) \
1727 inline void set_##name(Tagged<type> value);
1736 const char** reason)
const;
1739 void MakeLinearAllocationAreasIterable();
1743 void MarkSharedLinearAllocationAreasBlack();
1744 void UnmarkSharedLinearAllocationAreas();
1747 void FreeSharedLinearAllocationAreasAndResetFreeLists();
1752 const char* collector_reason);
1754 void PerformHeapVerification();
1755 std::vector<Isolate*> PauseConcurrentThreadsInClients(
1757 void ResumeConcurrentThreadsInClients(std::vector<Isolate*> paused_clients);
1762 bool CreateEarlyReadOnlyMapsAndObjects();
1763 bool CreateImportantReadOnlyObjects();
1764 bool CreateLateReadOnlyNonJSReceiverMaps();
1765 bool CreateLateReadOnlyJSReceiverMaps();
1766 bool CreateReadOnlyObjects();
1768 void CreateInternalAccessorInfoObjects();
1769 void CreateInitialMutableObjects();
1778 VerifyNoSlotsRecorded verify_no_slots_recorded);
1782 void ResetAllAllocationSitesDependentCode(
AllocationType allocation);
1787 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
1790 void ReportStatisticsAfterGC();
1793 void FlushNumberStringCache();
1795 void ActivateMemoryReducerIfNeededOnMainThread();
1797 void ShrinkOldGenerationAllocationLimitIfNotConfigured();
1799 double ComputeMutatorUtilization(
const char* tag,
double mutator_speed,
1800 std::optional<double> gc_speed);
1801 bool HasLowYoungGenerationAllocationRate();
1802 bool HasLowOldGenerationAllocationRate();
1803 bool HasLowEmbedderAllocationRate();
1806 ResizeNewSpaceMode ShouldResizeNewSpace();
1808 void StartResizeNewSpace();
1809 void ResizeNewSpace();
1810 void ExpandNewSpaceSize();
1811 void ReduceNewSpaceSize();
1813 void PrintMaxMarkingLimitReached();
1814 void PrintMaxNewSpaceSizeReached();
1816 int NextStressMarkingLimit();
1818 void AddToRingBuffer(
const char*
string);
1819 void GetFromRingBuffer(
char* buffer);
1821 static constexpr int kRetainMapEntrySize = 2;
1825 void CollectGarbageOnMemoryPressure();
1827 void EagerlyFreeExternalMemoryAndWasmCode();
1829 bool InvokeNearHeapLimitCallback();
1831 void InvokeIncrementalMarkingPrologueCallbacks();
1832 void InvokeIncrementalMarkingEpilogueCallbacks();
1850 void GarbageCollectionPrologueInSafepoint();
1857 void MinorMarkSweep();
1860 void MarkCompactPrologue();
1861 void MarkCompactEpilogue();
1866 void UpdateYoungReferencesInExternalStringTable(
1869 void UpdateReferencesInExternalStringTable(
1883 uint64_t bytes = OldGenerationConsumedBytes();
1884 if (!
v8_flags.external_memory_accounted_in_global_limit) {
1888 bytes += AllocatedExternalMemorySinceMarkCompact();
1891 if (old_generation_allocation_limit() <= bytes)
return 0;
1892 return old_generation_allocation_limit() -
static_cast<size_t>(bytes);
1897 bool IsIneffectiveMarkCompact(
size_t old_generation_size,
1898 double mutator_utilization);
1899 void CheckIneffectiveMarkCompact(
size_t old_generation_size,
1900 double mutator_utilization);
1918 static const int kMaxLoadTimeMs = 7000;
1921 void NotifyLoadingStarted();
1922 void NotifyLoadingEnded();
1925 return old_generation_allocation_limit_.load(std::memory_order_relaxed);
1929 return global_allocation_limit_.load(std::memory_order_relaxed);
1933 return using_initial_limit_.load(std::memory_order_relaxed);
1937 using_initial_limit_.store(value, std::memory_order_relaxed);
1941 return max_old_generation_size_.load(std::memory_order_relaxed);
1948 void SetOldGenerationAndGlobalMaximumSize(
size_t max_old_generation_size);
1951 void SetOldGenerationAndGlobalAllocationLimit(
1952 size_t new_old_generation_allocation_limit,
1953 size_t new_global_allocation_limit);
1955 void ResetOldGenerationAndGlobalAllocationLimit();
1959 bool ShouldExpandOldGenerationOnSlowAllocation(
LocalHeap* local_heap,
1961 bool ShouldExpandYoungGenerationOnSlowAllocation(
size_t allocation_size);
1963 HeapGrowingMode CurrentHeapGrowingMode();
1965 double PercentToOldGenerationLimit()
const;
1966 double PercentToGlobalMemoryLimit()
const;
1971 kFallbackForEmbedderLimit
1973 IncrementalMarkingLimit IncrementalMarkingLimitReached();
1975 bool ShouldStressCompaction()
const;
1977 size_t GlobalMemoryAvailable();
1980 void RecomputeLimitsAfterLoadingIfNeeded();
2005 int inobject_properties = 0);
2019 template <AllocationRetryMode mode>
2037 AllocatePartialMap(
InstanceType instance_type,
int instance_size);
2043 force_gc_on_next_allocation_ =
true;
2053 std::vector<Handle<NativeContext>> FindAllNativeContexts();
2054 std::vector<Tagged<WeakArrayList>> FindAllRetainedMaps();
2058 return allocation_type_for_in_place_internalizable_strings_;
2061 bool IsStressingScavenge();
2063 void SetIsMarkingFlag(
bool value);
2064 void SetIsMinorMarkingFlag(
bool value);
2067 size_t old_generation_size = OldGenerationSizeOfObjects();
2068 return old_generation_size > old_generation_size_at_last_gc_
2069 ? old_generation_size - old_generation_size_at_last_gc_
2083 size_t code_range_size_ = 0;
2084 size_t max_semi_space_size_ = 0;
2085 size_t min_semi_space_size_ = 0;
2086 size_t initial_semispace_size_ = 0;
2089 size_t min_old_generation_size_ = 0;
2092 std::atomic<size_t> max_old_generation_size_{0};
2095 size_t min_global_memory_size_ = 0;
2096 size_t max_global_memory_size_ = 0;
2098 size_t initial_max_old_generation_size_ = 0;
2099 size_t initial_max_old_generation_size_threshold_ = 0;
2100 size_t initial_old_generation_size_ = 0;
2110 std::atomic<bool> using_initial_limit_ =
true;
2113 bool initial_size_overwritten_ =
false;
2115 size_t maximum_committed_ = 0;
2116 size_t old_generation_capacity_after_bootstrap_ = 0;
2121 std::atomic<uint64_t> backing_store_bytes_{0};
2125 size_t survived_since_last_expansion_ = 0;
2129 std::atomic<size_t> always_allocate_scope_count_{0};
2135 std::vector<std::pair<v8::NearHeapLimitCallback, void*>>
2139 int contexts_disposed_ = 0;
2166#ifdef V8_COMPRESS_POINTERS
2169 ExternalPointerTable::Space young_external_pointer_space_;
2170 ExternalPointerTable::Space old_external_pointer_space_;
2172 ExternalPointerTable::Space read_only_external_pointer_space_;
2175 CppHeapPointerTable::Space cpp_heap_pointer_space_;
2178#ifdef V8_ENABLE_SANDBOX
2180 TrustedPointerTable::Space trusted_pointer_space_;
2183 CodePointerTable::Space code_pointer_space_;
2186#ifdef V8_ENABLE_LEAPTIERING
2188 JSDispatchTable::Space js_dispatch_table_space_;
2193 std::atomic<HeapState> gc_state_{NOT_IN_GC};
2197 int stress_marking_percentage_ = 0;
2204 std::atomic<double> max_marking_limit_reached_ = 0.0;
2207 unsigned int ms_count_ = 0;
2210 unsigned int gc_count_ = 0;
2214 int consecutive_ineffective_mark_compacts_ = 0;
2216 static const uintptr_t kMmapRegionMask = 0xFFFFFFFFu;
2217 uintptr_t mmap_region_base_ = 0;
2220 int remembered_unmapped_pages_index_ = 0;
2221 Address remembered_unmapped_pages_[kRememberedUnmappedPages];
2227 std::atomic<size_t> old_generation_allocation_limit_{0};
2228 std::atomic<size_t> global_allocation_limit_{0};
2248 size_t promoted_objects_size_ = 0;
2249 double promotion_ratio_ = 0.0;
2250 double promotion_rate_ = 0.0;
2251 size_t new_space_surviving_object_size_ = 0;
2252 size_t previous_new_space_surviving_object_size_ = 0;
2253 double new_space_surviving_rate_ = 0.0;
2254 int nodes_died_in_new_space_ = 0;
2255 int nodes_copied_in_new_space_ = 0;
2256 int nodes_promoted_ = 0;
2262 double last_gc_time_ = 0.0;
2280 std::unique_ptr<AllocationTrackerForDebugging>
2292#ifdef V8_COMPRESS_POINTERS
2309 StackState embedder_stack_state_ = StackState::kMayContainHeapPointers;
2317 bool need_to_remove_stress_concurrent_allocation_observer_ =
false;
2322 size_t new_space_allocation_counter_ = 0;
2327 size_t old_generation_allocation_counter_at_last_gc_ = 0;
2330 size_t old_generation_size_at_last_gc_{0};
2333 size_t old_generation_wasted_at_last_gc_{0};
2336 size_t embedder_size_at_last_gc_ = 0;
2338 char trace_ring_buffer_[kTraceRingBufferSize];
2343 bool ring_buffer_full_ =
false;
2344 size_t ring_buffer_end_ = 0;
2348 bool configured_ =
false;
2359 bool is_current_gc_forced_ =
false;
2360 bool is_current_gc_for_heap_profiler_ =
false;
2370 int ignore_local_gc_requests_depth_ = 0;
2372 int gc_callbacks_depth_ = 0;
2374 bool deserialization_complete_ =
false;
2376 int max_regular_code_object_size_ = 0;
2378 bool inline_allocation_enabled_ =
true;
2380 int pause_allocation_observers_depth_ = 0;
2383 bool force_oom_ =
false;
2384 bool force_gc_on_next_allocation_ =
false;
2385 bool delay_sweeper_tasks_for_testing_ =
false;
2389 bool is_finalization_registry_cleanup_task_posted_ =
false;
2399 std::unique_ptr<MemoryBalancer>
mb_;
2402 static constexpr double kLoadTimeNotLoading = -1.0;
2405 std::atomic<double> load_start_time_ms_{kLoadTimeNotLoading};
2407 bool update_allocation_limits_after_loading_ =
false;
2411 bool is_full_gc_during_loading_ =
false;
2416 friend class ArrayBufferCollector;
2435 template <
typename ConcreteVisitor>
2440 friend class MinorGCTaskObserver;
2442 friend class MinorMSIncrementalMarkingTaskObserver;
2465 friend class heap::TestMemoryAllocatorScope;
2470 template <
typename IsolateT>
2477 friend class heap::HeapTester;
2480 friend class HeapInternalsBase;