v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
heap.h
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_HEAP_HEAP_H_
6#define V8_HEAP_HEAP_H_
7
8#include <atomic>
9#include <cmath>
10#include <memory>
11#include <optional>
12#include <unordered_map>
13#include <unordered_set>
14#include <vector>
15
16// Clients of this interface shouldn't depend on lots of heap internals.
17// Do not include anything from src/heap here!
20#include "include/v8-internal.h"
21#include "include/v8-isolate.h"
23#include "src/base/enum-set.h"
30#include "src/common/globals.h"
38#include "src/heap/sweeper.h"
45#include "src/objects/objects.h"
46#include "src/objects/smi.h"
48#include "src/roots/roots.h"
54#include "testing/gtest/include/gtest/gtest_prod.h" // nogncheck
55
56namespace cppgc::internal {
57enum class HeapObjectNameForUnnamedObject : uint8_t;
58class ClassNameAsHeapObjectNameScope;
59} // namespace cppgc::internal
60
61namespace heap::base {
62class Stack;
63class StackVisitor;
64} // namespace heap::base
65
66namespace v8 {
67
68namespace debug {
69using OutOfMemoryCallback = void (*)(void* data);
70} // namespace debug
71
72namespace internal {
73
74namespace heap {
75class HeapTester;
76class TestMemoryAllocatorScope;
77} // namespace heap
78
79class ArrayBufferCollector;
80class ArrayBufferSweeper;
81class BackingStore;
82class MemoryChunkMetadata;
83class Boolean;
84class CodeLargeObjectSpace;
85class CodeRange;
86class CollectionBarrier;
87class ConcurrentMarking;
88class CppHeap;
89class EphemeronRememberedSet;
90class GCTracer;
91class IncrementalMarking;
92class IsolateSafepoint;
93class HeapObjectAllocationTracker;
94class HeapObjectsFilter;
95class HeapProfiler;
96class HeapStats;
97class Isolate;
98class JSArrayBuffer;
99class JSFinalizationRegistry;
100class JSPromise;
101class LinearAllocationArea;
102class LocalHeap;
103class MemoryAllocator;
104class MemoryBalancer;
105class MutablePageMetadata;
106class MemoryMeasurement;
107class MemoryReducer;
108class MinorMarkSweepCollector;
109class NativeContext;
110class NopRwxMemoryWriteScope;
111class ObjectIterator;
112class ObjectStats;
113class PageMetadata;
114class PagedSpace;
115class PagedNewSpace;
116class ReadOnlyHeap;
117class RootVisitor;
118class RwxMemoryWriteScope;
119class SafepointScope;
120class Scavenger;
121class ScavengerCollector;
122class SemiSpaceNewSpace;
123class SharedLargeObjectSpace;
124class SharedReadOnlySpace;
125class SharedSpace;
126class SharedTrustedLargeObjectSpace;
127class SharedTrustedSpace;
128class Space;
129class StickySpace;
130class StressScavengeObserver;
131class TimedHistogram;
132class TrustedLargeObjectSpace;
133class TrustedRange;
134class TrustedSpace;
135class WeakObjectRetainer;
136
138
140
142
144
157
162
163class StrongRootsEntry final {
164 explicit StrongRootsEntry(const char* label) : label(label) {}
165
166 // Label that identifies the roots in tooling.
167 const char* label;
172
173 friend class Heap;
174};
175
176// An alias for std::unordered_map<Tagged<HeapObject>, T> which also
177// sets proper Hash and KeyEqual functions.
178template <typename T>
180 std::unordered_map<Tagged<HeapObject>, T, Object::Hasher,
182
183enum class GCFlag : uint8_t {
184 kNoFlags = 0,
185 kReduceMemoryFootprint = 1 << 0,
186 // GCs that are forced, either through testing configurations (requiring
187 // --expose-gc) or through DevTools (using LowMemoryNotification).
188 kForced = 1 << 1,
189 kLastResort = 1 << 2,
190};
191
194
195class Heap final {
196 public:
197 enum class HeapGrowingMode { kSlow, kConservative, kMinimal, kDefault };
198
206
207 // Emits GC events for DevTools timeline.
209 public:
210 DevToolsTraceEventScope(Heap* heap, const char* event_name,
211 const char* event_type);
213
214 private:
216 const char* event_name_;
217 };
218
220 public:
221 static constexpr size_t kExternalAllocationLimitForInterrupt = 128 * KB;
222
223 uint64_t total() const { return total_.load(std::memory_order_relaxed); }
224 uint64_t limit_for_interrupt() const {
225 return limit_for_interrupt_.load(std::memory_order_relaxed);
226 }
227 uint64_t soft_limit() const {
228 return low_since_mark_compact() + kExternalAllocationSoftLimit;
229 }
230 uint64_t low_since_mark_compact() const {
231 return low_since_mark_compact_.load(std::memory_order_relaxed);
232 }
233
234 uint64_t UpdateAmount(int64_t delta) {
235 const uint64_t amount_before =
236 total_.fetch_add(delta, std::memory_order_relaxed);
237 CHECK_GE(static_cast<int64_t>(amount_before), -delta);
238 return amount_before + delta;
239 }
240
241 void UpdateLimitForInterrupt(uint64_t amount) {
242 set_limit_for_interrupt(amount + kExternalAllocationLimitForInterrupt);
243 }
244
245 void UpdateLowSinceMarkCompact(uint64_t amount) {
246 set_low_since_mark_compact(amount);
247 UpdateLimitForInterrupt(amount);
248 }
249
250 uint64_t AllocatedSinceMarkCompact() const {
251 uint64_t total_bytes = total();
252 uint64_t low_since_mark_compact_bytes = low_since_mark_compact();
253
254 if (total_bytes <= low_since_mark_compact_bytes) {
255 return 0;
256 }
257 return total_bytes - low_since_mark_compact_bytes;
258 }
259
260 private:
261 void set_total(uint64_t value) {
262 total_.store(value, std::memory_order_relaxed);
263 }
264
265 void set_limit_for_interrupt(uint64_t value) {
266 limit_for_interrupt_.store(value, std::memory_order_relaxed);
267 }
268
269 void set_low_since_mark_compact(uint64_t value) {
270 low_since_mark_compact_.store(value, std::memory_order_relaxed);
271 }
272
273 // The amount of external memory registered through the API.
274 std::atomic<uint64_t> total_{0};
275
276 // The limit when to trigger memory pressure from the API.
277 std::atomic<uint64_t> limit_for_interrupt_{
278 kExternalAllocationLimitForInterrupt};
279
280 // Caches the amount of external memory registered at the last MC.
281 std::atomic<uint64_t> low_since_mark_compact_{0};
282 };
283
284 // Support for context snapshots. After calling this we have a linear
285 // space to write objects in each space.
286 struct Chunk {
287 uint32_t size;
288 Address start;
289 Address end;
290 };
291 using Reservation = std::vector<Chunk>;
292
293#if V8_OS_ANDROID
294 // Don't apply pointer multiplier on Android since it has no swap space and
295 // should instead adapt it's heap size based on available physical memory.
296 static const int kPointerMultiplier = 1;
297 static const int kHeapLimitMultiplier = 1;
298#else
299 static const int kPointerMultiplier = kTaggedSize / 4;
300 // The heap limit needs to be computed based on the system pointer size
301 // because we want a pointer-compressed heap to have larger limit than
302 // an ordinary 32-bit which that is constrained by 2GB virtual address space.
303 static const int kHeapLimitMultiplier = kSystemPointerSize / 4;
304#endif
305
306 static const size_t kMaxInitialOldGenerationSize =
307 256 * MB * kHeapLimitMultiplier;
308
309 // These constants control heap configuration based on the physical memory.
310 static constexpr size_t kPhysicalMemoryToOldGenerationRatio = 4;
311 static constexpr size_t kOldGenerationLowMemory =
312 128 * MB * kHeapLimitMultiplier;
313 static constexpr size_t kNewLargeObjectSpaceToSemiSpaceRatio = 1;
314
315 static const int kTraceRingBufferSize = 512;
316 static const int kStacktraceBufferSize = 512;
317
318 // The minimum size of a HeapObject on the heap.
319 static const int kMinObjectSizeInTaggedWords = 2;
320
321 static size_t DefaultMinSemiSpaceSize();
322 V8_EXPORT_PRIVATE static size_t DefaultMaxSemiSpaceSize();
323 // Young generation size is the same for compressed heaps and 32-bit heaps.
324 static size_t OldGenerationToSemiSpaceRatio();
325 static size_t OldGenerationToSemiSpaceRatioLowMemory();
326
327 // Calculates the maximum amount of filler that could be required by the
328 // given alignment.
329 V8_EXPORT_PRIVATE static int GetMaximumFillToAlign(
330 AllocationAlignment alignment);
331 // Calculates the actual amount of filler required for a given address at the
332 // given alignment.
333 V8_EXPORT_PRIVATE static int GetFillToAlign(Address address,
334 AllocationAlignment alignment);
335
336 // Returns the size of the initial area of a code-range, which is marked
337 // writable and reserved to contain unwind information.
338 static size_t GetCodeRangeReservedAreaSize();
339
340 [[noreturn]] V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(
341 const char* location);
342
343 // Checks whether the space is valid.
344 static bool IsValidAllocationSpace(AllocationSpace space);
345
346 static inline bool IsYoungGenerationCollector(GarbageCollector collector) {
347 return collector == GarbageCollector::SCAVENGER ||
349 }
350
351 V8_EXPORT_PRIVATE static bool IsFreeSpaceValid(FreeSpace object);
352
357
358 // Copy block of memory from src to dst. Size of block should be aligned
359 // by pointer size.
360 static inline void CopyBlock(Address dst, Address src, int byte_size);
361
365
366 HeapProfiler* heap_profiler() const { return heap_profiler_.get(); }
367
368 // Notifies the heap that is ok to start marking or other activities that
369 // should not happen during deserialization.
370 void NotifyDeserializationComplete();
371
372 // Weakens StrongDescriptorArray objects into regular DescriptorArray objects.
373 //
374 // Thread-safe.
375 void WeakenDescriptorArrays(
376 GlobalHandleVector<DescriptorArray> strong_descriptor_arrays);
377
378 void NotifyBootstrapComplete();
379
381 // Specifies that the notification is coming from the client heap.
382 kFromClientHeap,
383 // Specifies that the notification is done within the same heap.
384 kFromSameHeap,
385 };
386
387 void NotifyOldGenerationExpansion(
388 LocalHeap* local_heap, AllocationSpace space, MutablePageMetadata* chunk,
389 OldGenerationExpansionNotificationOrigin =
390 OldGenerationExpansionNotificationOrigin::kFromSameHeap);
391
392 inline Address* NewSpaceAllocationTopAddress();
393 inline Address* NewSpaceAllocationLimitAddress();
394 inline Address* OldSpaceAllocationTopAddress();
395 inline Address* OldSpaceAllocationLimitAddress();
396
397 size_t NewSpaceSize();
398 size_t NewSpaceCapacity() const;
399 size_t NewSpaceTargetCapacity() const;
400
401 // Move len non-weak tagged elements from src_slot to dst_slot of dst_object.
402 // The source and destination memory ranges can overlap.
403 V8_EXPORT_PRIVATE void MoveRange(Tagged<HeapObject> dst_object,
404 ObjectSlot dst_slot, ObjectSlot src_slot,
405 int len, WriteBarrierMode mode);
406
407 // Copy len non-weak tagged elements from src_slot to dst_slot of dst_object.
408 // The source and destination memory ranges must not overlap.
409 template <typename TSlot>
411 TSlot dst_slot, TSlot src_slot, int len,
412 WriteBarrierMode mode);
413
414 // Initialize a filler object to keep the ability to iterate over the heap
415 // when introducing gaps within pages. This method will verify that no slots
416 // are recorded in this free memory.
417 V8_EXPORT_PRIVATE void CreateFillerObjectAt(
418 Address addr, int size,
419 ClearFreedMemoryMode clear_memory_mode =
421
422 // Initialize a filler object at a specific address. Unlike
423 // `CreateFillerObjectAt` this method will not perform slot verification since
424 // this would race on background threads.
425 void CreateFillerObjectAtBackground(const WritableFreeSpace& free_space);
426
427 bool CanMoveObjectStart(Tagged<HeapObject> object);
428
429 bool IsImmovable(Tagged<HeapObject> object);
430
431 V8_EXPORT_PRIVATE static bool IsLargeObject(Tagged<HeapObject> object);
432
433 // Trim the given array from the left. Note that this relocates the object
434 // start and hence is only valid if there is only a single reference to it.
435 V8_EXPORT_PRIVATE Tagged<FixedArrayBase> LeftTrimFixedArray(
436 Tagged<FixedArrayBase> obj, int elements_to_trim);
437
438#define RIGHT_TRIMMABLE_ARRAY_LIST(V) \
439 V(ArrayList) \
440 V(ByteArray) \
441 V(FixedArray) \
442 V(FixedDoubleArray) \
443 V(TransitionArray) \
444 V(WeakFixedArray)
445
446 // Trim the given array from the right.
447 template <typename Array>
448 void RightTrimArray(Tagged<Array> object, int new_capacity, int old_capacity);
449
450 // Converts the given boolean condition to JavaScript boolean value.
451 inline Tagged<Boolean> ToBoolean(bool condition);
452
453 // Notify the heap that a context has been disposed. `has_dependent_context`
454 // implies that a top-level context (no dependent contexts) has been disposed.
455 V8_EXPORT_PRIVATE int NotifyContextDisposed(bool has_dependent_context);
456
458 native_contexts_list_.store(object.ptr(), std::memory_order_release);
459 }
460
462 return Tagged<Object>(
463 native_contexts_list_.load(std::memory_order_acquire));
464 }
465
468 allocation_sites_list_ = object;
469 }
472 return allocation_sites_list_;
473 }
474
476 dirty_js_finalization_registries_list_ = object;
477 }
479 return dirty_js_finalization_registries_list_;
480 }
482 dirty_js_finalization_registries_list_tail_ = object;
483 }
485 return dirty_js_finalization_registries_list_tail_;
486 }
487
488 // Used in CreateAllocationSiteStub and the (de)serializer.
490 return reinterpret_cast<Address>(&allocation_sites_list_);
491 }
492
493 // Traverse all the allocation_sites [nested_site and weak_next] in the list
494 // and foreach call the visitor
495 void ForeachAllocationSite(
496 Tagged<Object> list,
497 const std::function<void(Tagged<AllocationSite>)>& visitor);
498
499 // Number of mark-sweeps.
500 int ms_count() const { return ms_count_; }
501
502 // Checks whether the given object is allowed to be migrated from its
503 // current space into the given destination space. Used for debugging.
504 bool AllowedToBeMigrated(Tagged<Map> map, Tagged<HeapObject> object,
505 AllocationSpace dest);
506
507 void CheckHandleCount();
508
509 // Print short heap statistics.
510 void PrintShortHeapStatistics();
511
512 // Print statistics of freelists of old_space:
513 // with v8_flags.trace_gc_freelists: summary of each FreeListCategory.
514 // with v8_flags.trace_gc_freelists_verbose: also prints the statistics of
515 // each FreeListCategory of each page.
516 void PrintFreeListsStats();
517
518 // Dump heap statistics in JSON format.
519 void DumpJSONHeapStatistics(std::stringstream& stream);
520
521 inline HeapState gc_state() const {
522 return gc_state_.load(std::memory_order_relaxed);
523 }
524 V8_EXPORT_PRIVATE void SetGCState(HeapState state);
525 bool IsTearingDown() const { return gc_state() == TEAR_DOWN; }
526 bool IsInGC() const {
527 // Load state only once and store it in local variable. Otherwise multiples
528 // loads could return different states on background threads.
529 HeapState state = gc_state();
530 return state != NOT_IN_GC && state != TEAR_DOWN;
531 }
532 bool force_oom() const { return force_oom_; }
533
535 return ignore_local_gc_requests_depth_ > 0;
536 }
537
539 return pause_allocation_observers_depth_ == 0;
540 }
541
542 bool IsGCWithMainThreadStack() const;
543
544 // This method is only safe to use in a safepoint.
545 bool IsGCWithStack() const;
546
547 bool CanShortcutStringsDuringGC(GarbageCollector collector) const;
548
549 // Performs GC after background allocation failure.
550 void CollectGarbageForBackground(LocalHeap* local_heap);
551
552 //
553 // Support for the API.
554 //
555
556 void CreateReadOnlyApiObjects();
557 void CreateMutableApiObjects();
558
559 V8_EXPORT_PRIVATE void MemoryPressureNotification(
560 v8::MemoryPressureLevel level, bool is_isolate_locked);
561 void CheckMemoryPressure();
562
563 V8_EXPORT_PRIVATE void AddNearHeapLimitCallback(v8::NearHeapLimitCallback,
564 void* data);
565 V8_EXPORT_PRIVATE void RemoveNearHeapLimitCallback(
566 v8::NearHeapLimitCallback callback, size_t heap_limit);
567 V8_EXPORT_PRIVATE void AutomaticallyRestoreInitialHeapLimit(
568 double threshold_percent);
569
570 V8_EXPORT_PRIVATE void AppendArrayBufferExtension(
572 V8_EXPORT_PRIVATE void ResizeArrayBufferExtension(
573 ArrayBufferExtension* extension, int64_t delta);
574 void DetachArrayBufferExtension(ArrayBufferExtension* extension);
575
576 V8_EXPORT_PRIVATE void ExpandNewSpaceSizeForTesting();
577 V8_EXPORT_PRIVATE void ReduceNewSpaceSizeForTesting();
578
579 IsolateSafepoint* safepoint() { return safepoint_.get(); }
580
581 V8_EXPORT_PRIVATE double MonotonicallyIncreasingTimeInMs() const;
582
583#if DEBUG
584 void VerifyNewSpaceTop();
585#endif // DEBUG
586
587 void RecordStats(HeapStats* stats);
588
589 bool MeasureMemory(std::unique_ptr<v8::MeasureMemoryDelegate> delegate,
591
592 std::unique_ptr<v8::MeasureMemoryDelegate> CreateDefaultMeasureMemoryDelegate(
595
596 void IncrementDeferredCounts(
598
599 int NextScriptId();
600 int NextDebuggingId();
601 int NextStackTraceId();
602 inline uint32_t GetNextTemplateSerialNumber();
603
604 void SetSerializedObjects(Tagged<HeapObject> objects);
605 void SetSerializedGlobalProxySizes(Tagged<FixedArray> sizes);
606
607 void SetBasicBlockProfilingData(DirectHandle<ArrayList> list);
608
609 // For post mortem debugging.
610 void RememberUnmappedPage(Address page, bool compacted);
611
613 return external_memory_.low_since_mark_compact() +
614 max_old_generation_size() / 2;
615 }
616
617 V8_INLINE uint64_t external_memory() const;
618 V8_EXPORT_PRIVATE uint64_t external_memory_limit_for_interrupt();
619 V8_EXPORT_PRIVATE uint64_t external_memory_soft_limit();
620 uint64_t UpdateExternalMemory(int64_t delta);
621
622 V8_EXPORT_PRIVATE size_t YoungArrayBufferBytes();
623 V8_EXPORT_PRIVATE size_t OldArrayBufferBytes();
624
625 uint64_t backing_store_bytes() const {
626 return backing_store_bytes_.load(std::memory_order_relaxed);
627 }
628
629 void CompactWeakArrayLists();
630
631 V8_EXPORT_PRIVATE void AddRetainedMaps(DirectHandle<NativeContext> context,
633
634 // This event is triggered after object is moved to a new place.
635 void OnMoveEvent(Tagged<HeapObject> source, Tagged<HeapObject> target,
636 int size_in_bytes);
637
638 bool deserialization_complete() const { return deserialization_complete_; }
639
640 // We can only invoke Safepoint() on the main thread local heap after
641 // deserialization is complete. Before that, main_thread_local_heap_ might be
642 // null.
643 V8_INLINE bool CanSafepoint() const { return deserialization_complete(); }
644
645 bool HasLowAllocationRate();
646 bool HasHighFragmentation();
647
648 void ActivateMemoryReducerIfNeeded();
649
650 V8_EXPORT_PRIVATE bool ShouldOptimizeForMemoryUsage();
651
652 // Returns true when GC should optimize for battery.
653 V8_EXPORT_PRIVATE bool ShouldOptimizeForBattery() const;
654
656 return memory_pressure_level_.load(std::memory_order_relaxed) !=
658 }
659
660 bool CollectionRequested();
661
662 void CheckCollectionRequested();
663
664 void RestoreHeapLimit(size_t heap_limit) {
665 // Do not set the limit lower than the live size + some slack.
666 size_t min_limit = SizeOfObjects() + SizeOfObjects() / 4;
667 SetOldGenerationAndGlobalMaximumSize(
668 std::min(max_old_generation_size(), std::max(heap_limit, min_limit)));
669 }
670
671 // ===========================================================================
672 // Initialization. ===========================================================
673 // ===========================================================================
674
675 void ConfigureHeap(const v8::ResourceConstraints& constraints,
676 v8::CppHeap* cpp_heap);
677 void ConfigureHeapDefault();
678
679 // Prepares the heap, setting up for deserialization.
680 void SetUp(LocalHeap* main_thread_local_heap);
681
682 // Sets read-only heap and space.
683 void SetUpFromReadOnlyHeap(ReadOnlyHeap* ro_heap);
684
685 void ReplaceReadOnlySpace(SharedReadOnlySpace* shared_ro_space);
686
687 // Sets up the heap memory without creating any objects.
688 void SetUpSpaces(LinearAllocationArea& new_allocation_info,
689 LinearAllocationArea& old_allocation_info);
690
691 // Prepares the heap, setting up for deserialization.
692 void InitializeMainThreadLocalHeap(LocalHeap* main_thread_local_heap);
693
694 // (Re-)Initialize hash seed from flag or RNG.
695 void InitializeHashSeed();
696
697 // Invoked once for the process from V8::Initialize.
698 static void InitializeOncePerProcess();
699
700 // Bootstraps the object heap with the core set of objects required to run.
701 // Returns whether it succeeded.
702 bool CreateReadOnlyHeapObjects();
703 bool CreateMutableHeapObjects();
704
705 // Create ObjectStats if live_object_stats_ or dead_object_stats_ are nullptr.
706 void CreateObjectStats();
707
708 // Sets the TearDown state, so no new GC tasks get posted.
709 void StartTearDown();
710
711 // Destroys all data that might require the shared heap.
712 void TearDownWithSharedHeap();
713
714 // Destroys all memory allocated by the heap.
715 void TearDown();
716
717 // Returns whether SetUp has been called.
718 bool HasBeenSetUp() const;
719
720 // ===========================================================================
721 // Getters for spaces. =======================================================
722 // ===========================================================================
723
724 V8_INLINE Address NewSpaceTop();
725 V8_INLINE Address NewSpaceLimit();
726
727 NewSpace* new_space() const { return new_space_; }
728 inline PagedNewSpace* paged_new_space() const;
729 inline SemiSpaceNewSpace* semi_space_new_space() const;
730 OldSpace* old_space() const { return old_space_; }
731 inline StickySpace* sticky_space() const;
732 CodeSpace* code_space() const { return code_space_; }
733 SharedSpace* shared_space() const { return shared_space_; }
734 OldLargeObjectSpace* lo_space() const { return lo_space_; }
735 CodeLargeObjectSpace* code_lo_space() const { return code_lo_space_; }
736 SharedLargeObjectSpace* shared_lo_space() const { return shared_lo_space_; }
737 NewLargeObjectSpace* new_lo_space() const { return new_lo_space_; }
738 ReadOnlySpace* read_only_space() const { return read_only_space_; }
739 TrustedSpace* trusted_space() const { return trusted_space_; }
741 return shared_trusted_space_;
742 }
744 return trusted_lo_space_;
745 }
747 return shared_trusted_lo_space_;
748 }
749
751 return shared_allocation_space_;
752 }
754 return shared_lo_allocation_space_;
755 }
757 return shared_trusted_allocation_space_;
758 }
760 return shared_trusted_lo_allocation_space_;
761 }
762
763 inline PagedSpace* paged_space(int idx) const;
764 inline Space* space(int idx) const;
765
766#ifdef V8_COMPRESS_POINTERS
767 ExternalPointerTable::Space* young_external_pointer_space() {
768 return &young_external_pointer_space_;
769 }
770 ExternalPointerTable::Space* old_external_pointer_space() {
771 return &old_external_pointer_space_;
772 }
773 ExternalPointerTable::Space* read_only_external_pointer_space() {
774 return &read_only_external_pointer_space_;
775 }
776 CppHeapPointerTable::Space* cpp_heap_pointer_space() {
777 return &cpp_heap_pointer_space_;
778 }
779#endif // V8_COMPRESS_POINTERS
780
781#ifdef V8_ENABLE_SANDBOX
782 TrustedPointerTable::Space* trusted_pointer_space() {
783 return &trusted_pointer_space_;
784 }
785
786 CodePointerTable::Space* code_pointer_space() { return &code_pointer_space_; }
787
788#endif // V8_ENABLE_SANDBOX
789
790#ifdef V8_ENABLE_LEAPTIERING
791 JSDispatchTable::Space* js_dispatch_table_space() {
792 return &js_dispatch_table_space_;
793 }
794#endif // V8_ENABLE_LEAPTIERING
795
796 // ===========================================================================
797 // Getters to other components. ==============================================
798 // ===========================================================================
799
800 GCTracer* tracer() { return tracer_.get(); }
801 const GCTracer* tracer() const { return tracer_.get(); }
802
803 MemoryAllocator* memory_allocator() { return memory_allocator_.get(); }
805 return memory_allocator_.get();
806 }
807
808 inline Isolate* isolate() const;
809
810 // Check if we run on isolate's main thread.
811 inline bool IsMainThread() const;
812
814 return mark_compact_collector_.get();
815 }
816
818 return minor_mark_sweep_collector_.get();
819 }
820
821 Sweeper* sweeper() { return sweeper_.get(); }
822
824 return array_buffer_sweeper_.get();
825 }
826
827 // The potentially overreserved address space region reserved by the code
828 // range if it exists or empty region otherwise.
829 const base::AddressRegion& code_region();
830
832#ifdef V8_COMPRESS_POINTERS
833 return code_range_;
834#else
835 return code_range_.get();
836#endif
837 }
838
839 // The base of the code range if it exists or null address.
840 inline Address code_range_base();
841
842 LocalHeap* main_thread_local_heap() { return main_thread_local_heap_; }
843
844 Heap* AsHeap() { return this; }
845
846 // ===========================================================================
847 // Root set access. ==========================================================
848 // ===========================================================================
849
850 // Shortcut to the roots table stored in the Isolate.
851 V8_INLINE RootsTable& roots_table();
852
853// Heap root getters.
854#define ROOT_ACCESSOR(type, name, CamelName) inline Tagged<type> name();
856#undef ROOT_ACCESSOR
857
858 V8_INLINE Tagged<FixedArray> single_character_string_table();
859
860 V8_INLINE void SetRootMaterializedObjects(Tagged<FixedArray> objects);
861 V8_INLINE void SetRootScriptList(Tagged<Object> value);
863 V8_INLINE void SetMessageListeners(Tagged<ArrayList> value);
864 V8_INLINE void SetFunctionsMarkedForManualOptimization(
865 Tagged<Object> bytecode);
866
867#if V8_ENABLE_WEBASSEMBLY
868 V8_INLINE void SetWasmCanonicalRttsAndJSToWasmWrappers(
869 Tagged<WeakFixedArray> rtts, Tagged<WeakFixedArray> js_to_wasm_wrappers);
870#endif
871
872 StrongRootsEntry* RegisterStrongRoots(const char* label, FullObjectSlot start,
874 void UnregisterStrongRoots(StrongRootsEntry* entry);
875 void UpdateStrongRoots(StrongRootsEntry* entry, FullObjectSlot start,
877
878 void SetBuiltinsConstantsTable(Tagged<FixedArray> cache);
879 void SetDetachedContexts(Tagged<WeakArrayList> detached_contexts);
880
881 void EnqueueDirtyJSFinalizationRegistry(
882 Tagged<JSFinalizationRegistry> finalization_registry,
883 std::function<void(Tagged<HeapObject> object, ObjectSlot slot,
884 Tagged<Object> target)>
885 gc_notify_updated_slot);
886
888 DequeueDirtyJSFinalizationRegistry();
889
890 // Called from Heap::NotifyContextDisposed to remove all
891 // FinalizationRegistries with {context} from the dirty list when the context
892 // e.g. navigates away or is detached. If the dirty list is empty afterwards,
893 // the cleanup task is aborted if needed.
894 void RemoveDirtyFinalizationRegistriesOnContext(
895 Tagged<NativeContext> context);
896
897 bool HasDirtyJSFinalizationRegistries();
898
899 void PostFinalizationRegistryCleanupTaskIfNeeded();
900
902 is_finalization_registry_cleanup_task_posted_ = posted;
903 }
904
906 return is_finalization_registry_cleanup_task_posted_;
907 }
908
909 V8_EXPORT_PRIVATE void KeepDuringJob(DirectHandle<HeapObject> target);
910 void ClearKeptObjects();
911
912 // ===========================================================================
913 // Inline allocation. ========================================================
914 // ===========================================================================
915
916 // Switch whether inline bump-pointer allocation should be used.
917 V8_EXPORT_PRIVATE void EnableInlineAllocation();
918 V8_EXPORT_PRIVATE void DisableInlineAllocation();
919
920 // ===========================================================================
921 // Methods triggering GCs. ===================================================
922 // ===========================================================================
923
924 // Performs garbage collection operation.
925 // Returns whether there is a chance that another major GC could
926 // collect more garbage.
927 V8_EXPORT_PRIVATE void CollectGarbage(
929 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
930
931 // Performs a full garbage collection.
932 V8_EXPORT_PRIVATE void CollectAllGarbage(
933 GCFlags gc_flags, GarbageCollectionReason gc_reason,
934 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
935
936 // Last hope garbage collection. Will try to free as much memory as possible
937 // with multiple rounds of garbage collection.
938 V8_EXPORT_PRIVATE void CollectAllAvailableGarbage(
939 GarbageCollectionReason gc_reason);
940
941 // Precise garbage collection that potentially finalizes already running
942 // incremental marking before performing an atomic garbage collection.
943 // Only use if absolutely necessary or in tests to avoid floating garbage!
944 V8_EXPORT_PRIVATE void PreciseCollectAllGarbage(
945 GCFlags gc_flags, GarbageCollectionReason gc_reason,
946 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
947
948 // Performs garbage collection operation for the shared heap.
949 V8_EXPORT_PRIVATE bool CollectGarbageShared(
950 LocalHeap* local_heap, GarbageCollectionReason gc_reason);
951
952 // Requests garbage collection from some other thread.
953 V8_EXPORT_PRIVATE bool CollectGarbageFromAnyThread(
954 LocalHeap* local_heap,
955 GarbageCollectionReason gc_reason =
957
958 // Reports and external memory pressure event, either performs a major GC or
959 // completes incremental marking in order to free external resources.
960 void HandleExternalMemoryInterrupt();
961
964
969
970 // Invoked when GC was requested via the stack guard.
971 void HandleGCRequest();
972
973 // ===========================================================================
974 // Iterators. ================================================================
975 // ===========================================================================
976
977 // In the case of shared GC, kMainIsolate is used for the main isolate and
978 // kClientIsolate for the (other) client isolates.
979 enum class IterateRootsMode { kMainIsolate, kClientIsolate };
980
981 // None of these methods iterate over the read-only roots. To do this use
982 // ReadOnlyRoots::Iterate. Read-only root iteration is not necessary for
983 // garbage collection and is usually only performed as part of
984 // (de)serialization or heap verification.
985
986 // Iterates over the strong roots and the weak roots.
987 void IterateRoots(
989 IterateRootsMode roots_mode = IterateRootsMode::kMainIsolate);
990 void IterateRootsIncludingClients(RootVisitor* v,
992
993 // Iterates over entries in the smi roots list. Only interesting to the
994 // serializer/deserializer, since GC does not care about smis.
995 void IterateSmiRoots(RootVisitor* v);
996 // Iterates over weak string tables.
997 void IterateWeakRoots(RootVisitor* v, base::EnumSet<SkipRoot> options);
998 void IterateWeakGlobalHandles(RootVisitor* v);
999 void IterateBuiltins(RootVisitor* v);
1000
1001 void IterateStackRoots(RootVisitor* v);
1002
1003 void IterateConservativeStackRoots(
1004 RootVisitor* root_visitor,
1005 IterateRootsMode roots_mode = IterateRootsMode::kMainIsolate);
1006 void IterateConservativeStackRoots(::heap::base::StackVisitor* stack_visitor);
1007
1008 void IterateRootsForPrecisePinning(RootVisitor* visitor);
1009
1010 // ===========================================================================
1011 // Remembered set API. =======================================================
1012 // ===========================================================================
1013
1014 // Used for query incremental marking status in generated code.
1015 uint8_t* IsMarkingFlagAddress();
1016 uint8_t* IsMinorMarkingFlagAddress();
1017
1018 void ClearRecordedSlotRange(Address start, Address end);
1019 static int InsertIntoRememberedSetFromCode(MutablePageMetadata* chunk,
1020 size_t slot_offset);
1021
1022#ifdef DEBUG
1023 void VerifySlotRangeHasNoRecordedSlots(Address start, Address end);
1024#endif
1025
1026 // ===========================================================================
1027 // Incremental marking API. ==================================================
1028 // ===========================================================================
1029
1031 return ShouldOptimizeForMemoryUsage() ? GCFlag::kReduceMemoryFootprint
1033 }
1034
1035 // Starts incremental marking assuming incremental marking is currently
1036 // stopped.
1037 V8_EXPORT_PRIVATE void StartIncrementalMarking(
1038 GCFlags gc_flags, GarbageCollectionReason gc_reason,
1041
1042 V8_EXPORT_PRIVATE void StartIncrementalMarkingOnInterrupt();
1043
1044 V8_EXPORT_PRIVATE void StartIncrementalMarkingIfAllocationLimitIsReached(
1045 LocalHeap* local_heap, GCFlags gc_flags,
1047
1048 // Synchronously finalizes incremental marking.
1049 V8_EXPORT_PRIVATE void FinalizeIncrementalMarkingAtomically(
1050 GarbageCollectionReason gc_reason);
1051
1052 // Synchronously finalizes incremental marking if it is currently running.
1053 V8_EXPORT_PRIVATE void FinalizeIncrementalMarkingAtomicallyIfRunning(
1054 GarbageCollectionReason gc_reason);
1055
1056 V8_EXPORT_PRIVATE void CompleteSweepingFull();
1057 void CompleteSweepingYoung();
1058
1059 // Ensures that sweeping is finished for that object's page.
1060 void EnsureSweepingCompletedForObject(Tagged<HeapObject> object);
1061
1063 return incremental_marking_.get();
1064 }
1065
1066 // ===========================================================================
1067 // Concurrent marking API. ===================================================
1068 // ===========================================================================
1069
1071 return concurrent_marking_.get();
1072 }
1073
1074 // The runtime uses this function to notify potentially unsafe object layout
1075 // changes that require special synchronization with the concurrent marker.
1076 // By default recorded slots in the object are invalidated. Pass
1077 // InvalidateRecordedSlots::kNo if this is not necessary or to perform this
1078 // manually.
1079 // If the object contains external pointer slots, then these need to be
1080 // invalidated as well if a GC marker may have observed them previously. To
1081 // do this, pass HasExternalPointerSlots::kYes.
1082 void NotifyObjectLayoutChange(
1084 InvalidateRecordedSlots invalidate_recorded_slots,
1085 InvalidateExternalPointerSlots invalidate_external_pointer_slots,
1086 int new_size = 0);
1087 V8_EXPORT_PRIVATE static void NotifyObjectLayoutChangeDone(
1088 Tagged<HeapObject> object);
1089
1090 // The runtime uses this function to inform the GC of object size changes. The
1091 // GC will fill this area with a filler object and might clear recorded slots
1092 // in that area.
1093 void NotifyObjectSizeChange(Tagged<HeapObject>, int old_size, int new_size,
1094 ClearRecordedSlots clear_recorded_slots);
1095
1096 // ===========================================================================
1097 // Deoptimization support API. ===============================================
1098 // ===========================================================================
1099
1100 // Setters for code offsets of well-known deoptimization targets.
1101 void SetConstructStubCreateDeoptPCOffset(int pc_offset);
1102 void SetConstructStubInvokeDeoptPCOffset(int pc_offset);
1103 void SetDeoptPCOffsetAfterAdaptShadowStack(int pc_offset);
1104 void SetInterpreterEntryReturnPCOffset(int pc_offset);
1105
1106 void DeoptMarkedAllocationSites();
1107
1108 // ===========================================================================
1109 // Unified heap (C++) support. ===============================================
1110 // ===========================================================================
1111
1112 v8::CppHeap* cpp_heap() const { return cpp_heap_; }
1113
1114 std::optional<StackState> overridden_stack_state() const;
1115
1116 // Set stack information from the stack of the current thread.
1117 V8_EXPORT_PRIVATE void SetStackStart();
1118
1119 // Stack information of the main thread.
1120 V8_EXPORT_PRIVATE ::heap::base::Stack& stack();
1121 V8_EXPORT_PRIVATE const ::heap::base::Stack& stack() const;
1122
1123 // ===========================================================================
1124 // Embedder roots optimizations. =============================================
1125 // ===========================================================================
1126
1128 void SetEmbedderRootsHandler(EmbedderRootsHandler* handler);
1129
1130 EmbedderRootsHandler* GetEmbedderRootsHandler() const;
1131
1132 // ===========================================================================
1133 // External string table API. ================================================
1134 // ===========================================================================
1135
1136 // Registers an external string.
1137 inline void RegisterExternalString(Tagged<String> string);
1138
1139 // Called when a string's resource is changed. The size of the payload is sent
1140 // as argument of the method.
1141 V8_EXPORT_PRIVATE void UpdateExternalString(Tagged<String> string,
1142 size_t old_payload,
1143 size_t new_payload);
1144
1145 // Finalizes an external string by deleting the associated external
1146 // data and clearing the resource pointer.
1147 inline void FinalizeExternalString(Tagged<String> string);
1148
1149 static Tagged<String> UpdateYoungReferenceInExternalStringTableEntry(
1150 Heap* heap, FullObjectSlot pointer);
1151
1152 // ===========================================================================
1153 // Methods checking/returning the space of a given object/address. ===========
1154 // ===========================================================================
1155
1156 // Returns whether the object resides in new space.
1157 static inline bool InFromPage(Tagged<Object> object);
1158 static inline bool InFromPage(Tagged<MaybeObject> object);
1159 static inline bool InFromPage(Tagged<HeapObject> heap_object);
1160 static inline bool InToPage(Tagged<Object> object);
1161 static inline bool InToPage(Tagged<MaybeObject> object);
1162 static inline bool InToPage(Tagged<HeapObject> heap_object);
1163
1164 // Returns whether the object resides in old space.
1165 inline bool InOldSpace(Tagged<Object> object);
1166
1167 // Checks whether an address/object is in the non-read-only heap (including
1168 // auxiliary area and unused area). Use IsValidHeapObject if checking both
1169 // heaps is required.
1170 V8_EXPORT_PRIVATE bool Contains(Tagged<HeapObject> value) const;
1171 // Same as above, but checks whether the object resides in any of the code
1172 // spaces.
1173 V8_EXPORT_PRIVATE bool ContainsCode(Tagged<HeapObject> value) const;
1174
1175 // Checks whether an address/object is in the non-read-only heap (including
1176 // auxiliary area and unused area). Use IsValidHeapObject if checking both
1177 // heaps is required.
1178 V8_EXPORT_PRIVATE bool SharedHeapContains(Tagged<HeapObject> value) const;
1179
1180 // Returns whether the object must be in the shared old space.
1181 V8_EXPORT_PRIVATE bool MustBeInSharedOldSpace(Tagged<HeapObject> value);
1182
1183 // Checks whether an address/object in a space.
1184 // Currently used by tests, serialization and heap verification only.
1185 V8_EXPORT_PRIVATE bool InSpace(Tagged<HeapObject> value,
1186 AllocationSpace space) const;
1187
1188 // Slow methods that can be used for verification as they can also be used
1189 // with off-heap Addresses.
1190 V8_EXPORT_PRIVATE bool InSpaceSlow(Address addr, AllocationSpace space) const;
1191
1192 static inline Heap* FromWritableHeapObject(Tagged<HeapObject> obj);
1193
1194 // ===========================================================================
1195 // Object statistics tracking. ===============================================
1196 // ===========================================================================
1197
1198 // Returns the number of buckets used by object statistics tracking during a
1199 // major GC. Note that the following methods fail gracefully when the bounds
1200 // are exceeded though.
1201 size_t NumberOfTrackedHeapObjectTypes();
1202
1203 // Returns object statistics about count and size at the last major GC.
1204 // Objects are being grouped into buckets that roughly resemble existing
1205 // instance types.
1206 size_t ObjectCountAtLastGC(size_t index);
1207 size_t ObjectSizeAtLastGC(size_t index);
1208
1209 // Retrieves names of buckets used by object statistics tracking.
1210 bool GetObjectTypeName(size_t index, const char** object_type,
1211 const char** object_sub_type);
1212
1213 // The total number of native contexts object on the heap.
1214 size_t NumberOfNativeContexts();
1215 // The total number of native contexts that were detached but were not
1216 // garbage collected yet.
1217 size_t NumberOfDetachedContexts();
1218
1219 // ===========================================================================
1220 // Code statistics.
1221 // ==========================================================
1222 // ===========================================================================
1223
1224 // Collect code (Code and BytecodeArray objects) statistics.
1225 void CollectCodeStatistics();
1226
1227 // ===========================================================================
1228 // GC statistics. ============================================================
1229 // ===========================================================================
1230
1231 // Returns the maximum amount of memory reserved for the heap.
1232 V8_EXPORT_PRIVATE size_t MaxReserved() const;
1233 size_t MaxSemiSpaceSize() { return max_semi_space_size_; }
1234 size_t InitialSemiSpaceSize() { return initial_semispace_size_; }
1235 size_t MaxOldGenerationSize() { return max_old_generation_size(); }
1236
1237 // Limit on the max old generation size imposed by the underlying allocator.
1238 V8_EXPORT_PRIVATE static size_t AllocatorLimitOnMaxOldGenerationSize();
1239
1240 V8_EXPORT_PRIVATE static size_t HeapSizeFromPhysicalMemory(
1241 uint64_t physical_memory);
1242 V8_EXPORT_PRIVATE static void GenerationSizesFromHeapSize(
1243 size_t heap_size, size_t* young_generation_size,
1244 size_t* old_generation_size);
1245 V8_EXPORT_PRIVATE static size_t YoungGenerationSizeFromOldGenerationSize(
1246 size_t old_generation_size);
1247 V8_EXPORT_PRIVATE static size_t YoungGenerationSizeFromSemiSpaceSize(
1248 size_t semi_space_size);
1249 V8_EXPORT_PRIVATE static size_t SemiSpaceSizeFromYoungGenerationSize(
1250 size_t young_generation_size);
1251 V8_EXPORT_PRIVATE static size_t MinYoungGenerationSize();
1252 V8_EXPORT_PRIVATE static size_t MinOldGenerationSize();
1253 V8_EXPORT_PRIVATE static size_t MaxOldGenerationSize(
1254 uint64_t physical_memory);
1255
1256 // Returns the capacity of the heap in bytes w/o growing. Heap grows when
1257 // more spaces are needed until it reaches the limit.
1258 size_t Capacity();
1259
1260 // Returns the capacity of the old generation.
1261 V8_EXPORT_PRIVATE size_t OldGenerationCapacity() const;
1262
1263 base::Mutex* heap_expansion_mutex() { return &heap_expansion_mutex_; }
1264
1265 // Returns the amount of memory currently committed for the heap.
1266 size_t CommittedMemory();
1267
1268 // Returns the amount of memory currently committed for the old space.
1269 size_t CommittedOldGenerationMemory();
1270
1271 // Returns the amount of executable memory currently committed for the heap.
1272 size_t CommittedMemoryExecutable();
1273
1274 // Returns the amount of physical memory currently committed for the heap.
1275 size_t CommittedPhysicalMemory();
1276
1277 // Returns the maximum amount of memory ever committed for the heap.
1278 size_t MaximumCommittedMemory() { return maximum_committed_; }
1279
1280 // Updates the maximum committed memory for the heap. Should be called
1281 // whenever a space grows.
1282 void UpdateMaximumCommitted();
1283
1284 // Returns the available bytes in space w/o growing.
1285 // Heap doesn't guarantee that it can allocate an object that requires
1286 // all available bytes. Check MaxHeapObjectSize() instead.
1287 size_t Available();
1288
1289 // Returns size of all objects residing in the heap.
1290 V8_EXPORT_PRIVATE size_t SizeOfObjects();
1291
1292 // Returns size of all global handles in the heap.
1293 V8_EXPORT_PRIVATE size_t TotalGlobalHandlesSize();
1294
1295 // Returns size of all allocated/used global handles in the heap.
1296 V8_EXPORT_PRIVATE size_t UsedGlobalHandlesSize();
1297
1298 void UpdateSurvivalStatistics(int start_new_space_size);
1299
1300 inline void IncrementPromotedObjectsSize(size_t object_size) {
1301 promoted_objects_size_ += object_size;
1302 }
1303 inline size_t promoted_objects_size() { return promoted_objects_size_; }
1304
1305 inline void IncrementNewSpaceSurvivingObjectSize(size_t object_size) {
1306 new_space_surviving_object_size_ += object_size;
1307 }
1309 return new_space_surviving_object_size_;
1310 }
1311
1312 inline size_t SurvivedYoungObjectSize() {
1313 return promoted_objects_size_ + new_space_surviving_object_size_;
1314 }
1315
1317 nodes_died_in_new_space_ += count;
1318 }
1319
1320 inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; }
1321
1322 inline void IncrementNodesPromoted() { nodes_promoted_++; }
1323
1324 inline void IncrementYoungSurvivorsCounter(size_t survived) {
1325 survived_since_last_expansion_ += survived;
1326 }
1327
1328 V8_EXPORT_PRIVATE size_t NewSpaceAllocationCounter() const;
1329
1331 new_space_allocation_counter_ = new_value;
1332 }
1333
1335 old_generation_allocation_counter_at_last_gc_ =
1336 OldGenerationAllocationCounter();
1337 }
1338
1340 return old_generation_allocation_counter_at_last_gc_ +
1341 PromotedSinceLastGC();
1342 }
1343
1344 size_t EmbedderAllocationCounter() const;
1345
1346 // This should be used only for testing.
1348 old_generation_allocation_counter_at_last_gc_ = new_value;
1349 }
1350
1351 int gc_count() const { return gc_count_; }
1352
1353 bool is_current_gc_forced() const { return is_current_gc_forced_; }
1354
1356 return current_or_last_garbage_collector_;
1357 }
1358
1359 // Returns whether the currently in-progress GC should avoid increasing the
1360 // ages on any objects that live for a set number of collections.
1362 return is_current_gc_forced_ || is_current_gc_for_heap_profiler_;
1363 }
1364
1365 // Returns the size of objects residing in non-new spaces.
1366 // Excludes external memory held by those objects.
1367 V8_EXPORT_PRIVATE size_t OldGenerationSizeOfObjects() const;
1368
1369 // Returns the amount of wasted bytes in non-new spaces.
1370 V8_EXPORT_PRIVATE size_t OldGenerationWastedBytes() const;
1371
1372 // Returns the amount of bytes in non-new spaces not available for allocation,
1373 // including bytes allocated and wasted.
1374 V8_EXPORT_PRIVATE size_t OldGenerationConsumedBytes() const;
1375
1376 // Returns the size of objects residing in new spaces.
1377 // Excludes external memory held by those objects.
1378 V8_EXPORT_PRIVATE size_t YoungGenerationSizeOfObjects() const;
1379
1380 // Returns the amount of wasted bytes in new spaces.
1381 V8_EXPORT_PRIVATE size_t YoungGenerationWastedBytes() const;
1382
1383 // Returns the amount of bytes in new space not available for allocation,
1384 // including bytes allocated and wasted.
1385 V8_EXPORT_PRIVATE size_t YoungGenerationConsumedBytes() const;
1386
1387 // Returns the size of objects held by the EmbedderHeapTracer.
1388 V8_EXPORT_PRIVATE size_t EmbedderSizeOfObjects() const;
1389
1390 // Returns the global size of objects (embedder + V8 non-new spaces).
1391 V8_EXPORT_PRIVATE size_t GlobalSizeOfObjects() const;
1392
1393 // Returns the global amount of wasted bytes.
1394 V8_EXPORT_PRIVATE size_t GlobalWastedBytes() const;
1395
1396 // Returns the global amount of bytes not available for allocation, including
1397 // bytes allocated and wasted.
1398 V8_EXPORT_PRIVATE size_t GlobalConsumedBytes() const;
1399
1400 // Returns the size of objects in old generation after the last MarkCompact
1401 // GC.
1402 V8_EXPORT_PRIVATE size_t OldGenerationConsumedBytesAtLastGC() const;
1403
1404 // Returns the global amount of bytes after the last MarkCompact GC.
1405 V8_EXPORT_PRIVATE size_t GlobalConsumedBytesAtLastGC() const;
1406
1407 // We allow incremental marking to overshoot the V8 and global allocation
1408 // limit for performance reasons. If the overshoot is too large then we are
1409 // more eager to finalize incremental marking.
1410 bool AllocationLimitOvershotByLargeMargin() const;
1411
1412 // Return the maximum size objects can be before having to allocate them as
1413 // large objects. This takes into account allocating in the code space for
1414 // which the size of the allocatable space per V8 page may depend on the OS
1415 // page size at runtime. You may use kMaxRegularHeapObjectSize as a constant
1416 // instead if you know the allocation isn't in the code spaces.
1417 inline V8_EXPORT_PRIVATE int MaxRegularHeapObjectSize(
1418 AllocationType allocation);
1419
1420 // ===========================================================================
1421 // Prologue/epilogue callback methods.========================================
1422 // ===========================================================================
1423
1424 void AddGCPrologueCallback(v8::Isolate::GCCallbackWithData callback,
1425 GCType gc_type_filter, void* data);
1426 void RemoveGCPrologueCallback(v8::Isolate::GCCallbackWithData callback,
1427 void* data);
1428
1429 void AddGCEpilogueCallback(v8::Isolate::GCCallbackWithData callback,
1430 GCType gc_type_filter, void* data);
1431 void RemoveGCEpilogueCallback(v8::Isolate::GCCallbackWithData callback,
1432 void* data);
1433
1434 void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags,
1435 GCTracer::Scope::ScopeId scope_id);
1436 void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags,
1437 GCTracer::Scope::ScopeId scope_id);
1438
1439 // ===========================================================================
1440 // Allocation methods. =======================================================
1441 // ===========================================================================
1442
1443 // Creates a filler object and returns a heap object immediately after it.
1444 V8_EXPORT_PRIVATE Tagged<HeapObject> PrecedeWithFiller(
1445 Tagged<HeapObject> object, int filler_size);
1446
1447 // Creates a filler object and returns a heap object immediately after it.
1448 // Unlike `PrecedeWithFiller` this method will not perform slot verification
1449 // since this would race on background threads.
1450 V8_EXPORT_PRIVATE Tagged<HeapObject> PrecedeWithFillerBackground(
1451 Tagged<HeapObject> object, int filler_size);
1452
1453 // Creates a filler object if needed for alignment and returns a heap object
1454 // immediately after it. If any space is left after the returned object,
1455 // another filler object is created so the over allocated memory is iterable.
1456 V8_WARN_UNUSED_RESULT Tagged<HeapObject> AlignWithFillerBackground(
1457 Tagged<HeapObject> object, int object_size, int allocation_size,
1458 AllocationAlignment alignment);
1459
1460 // Allocate an external backing store with the given allocation callback.
1461 // If the callback fails (indicated by a nullptr result) then this function
1462 // will re-try the allocation after performing GCs. This is useful for
1463 // external backing stores that may be retained by (unreachable) V8 objects
1464 // such as ArrayBuffers, ExternalStrings, etc.
1465 //
1466 // The function may also proactively trigger GCs even if the allocation
1467 // callback does not fail to keep the memory usage low.
1468 V8_EXPORT_PRIVATE void* AllocateExternalBackingStore(
1469 const std::function<void*(size_t)>& allocate, size_t byte_length);
1470
1471 // ===========================================================================
1472 // Allocation tracking. ======================================================
1473 // ===========================================================================
1474
1475 // Adds {new_space_observer} to new space and {observer} to any other space.
1476 void AddAllocationObserversToAllSpaces(
1477 AllocationObserver* observer, AllocationObserver* new_space_observer);
1478
1479 // Removes {new_space_observer} from new space and {observer} from any other
1480 // space.
1481 void RemoveAllocationObserversFromAllSpaces(
1482 AllocationObserver* observer, AllocationObserver* new_space_observer);
1483
1484 // Check if the given object was recently allocated and its fields may appear
1485 // as uninitialized to background threads.
1486 // This predicate may be invoked from a background thread.
1487 inline bool IsPendingAllocation(Tagged<HeapObject> object);
1488 inline bool IsPendingAllocation(Tagged<Object> object);
1489
1490 // Notifies that all previously allocated objects are properly initialized
1491 // and ensures that IsPendingAllocation returns false for them. This function
1492 // may be invoked only on the main thread.
1493 V8_EXPORT_PRIVATE void PublishMainThreadPendingAllocations();
1494
1495 // ===========================================================================
1496 // Heap object allocation tracking. ==========================================
1497 // ===========================================================================
1498
1499 V8_EXPORT_PRIVATE void AddHeapObjectAllocationTracker(
1501 V8_EXPORT_PRIVATE void RemoveHeapObjectAllocationTracker(
1504 return !allocation_trackers_.empty();
1505 }
1506
1507 // ===========================================================================
1508 // Stack frame support. ======================================================
1509 // ===========================================================================
1510
1511 // Searches for a Code object by the given interior pointer.
1512 V8_EXPORT_PRIVATE Tagged<Code> FindCodeForInnerPointer(Address inner_pointer);
1513 // Use the GcSafe family of functions if called while GC is in progress.
1514 Tagged<GcSafeCode> GcSafeFindCodeForInnerPointer(Address inner_pointer);
1515 std::optional<Tagged<GcSafeCode>> GcSafeTryFindCodeForInnerPointer(
1516 Address inner_pointer);
1517 std::optional<Tagged<InstructionStream>>
1518 GcSafeTryFindInstructionStreamForInnerPointer(Address inner_pointer);
1519 // Only intended for use from the `jco` gdb macro.
1520 std::optional<Tagged<Code>> TryFindCodeForInnerPointerForPrinting(
1521 Address inner_pointer);
1522
1523 // Returns true if {addr} is contained within {instruction_stream} and false
1524 // otherwise. Mostly useful for debugging.
1525 bool GcSafeInstructionStreamContains(
1526 Tagged<InstructionStream> instruction_stream, Address addr);
1527
1528 // ===========================================================================
1529 // Sweeping. =================================================================
1530 // ===========================================================================
1531
1532 bool sweeping_in_progress() const { return sweeper_->sweeping_in_progress(); }
1534 return sweeper_->sweeping_in_progress_for_space(space);
1535 }
1537 return sweeper_->minor_sweeping_in_progress();
1538 }
1540 return sweeper_->major_sweeping_in_progress();
1541 }
1542
1543 void FinishSweepingIfOutOfWork();
1544
1545 enum class SweepingForcedFinalizationMode { kUnifiedHeap, kV8Only };
1546
1547 // Ensures that sweeping is finished.
1548 //
1549 // Note: Can only be called safely from main thread.
1550 V8_EXPORT_PRIVATE void EnsureSweepingCompleted(
1551 SweepingForcedFinalizationMode mode);
1552 void EnsureYoungSweepingCompleted();
1553 void EnsureQuarantinedPagesSweepingCompleted();
1554
1555 // =============================================================================
1556
1557#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
1558 void V8_EXPORT_PRIVATE set_allocation_timeout(int allocation_timeout);
1559#endif // V8_ENABLE_ALLOCATION_TIMEOUT
1560
1561#ifdef DEBUG
1562 void VerifyCountersAfterSweeping();
1563 void VerifyCountersBeforeConcurrentSweeping(GarbageCollector collector);
1564 void VerifyCommittedPhysicalMemory();
1565
1566 void Print();
1567 void PrintHandles();
1568
1569 // Report code statistics.
1570 void ReportCodeStatistics(const char* title);
1571#endif // DEBUG
1574#if V8_TARGET_ARCH_X64
1575#if V8_OS_DARWIN
1576 // The Darwin kernel [as of macOS 10.12.5] does not clean up page
1577 // directory entries [PDE] created from mmap or mach_vm_allocate, even
1578 // after the region is destroyed. Using a virtual address space that is
1579 // too large causes a leak of about 1 wired [can never be paged out] page
1580 // per call to mmap(). The page is only reclaimed when the process is
1581 // killed. Confine the hint to a 32-bit section of the virtual address
1582 // space. See crbug.com/700928.
1583 uintptr_t offset = reinterpret_cast<uintptr_t>(result) & kMmapRegionMask;
1584 result = reinterpret_cast<void*>(mmap_region_base_ + offset);
1585#endif // V8_OS_DARWIN
1586#endif // V8_TARGET_ARCH_X64
1587 return result;
1588 }
1589
1590 // Calculates the nof entries for the full sized number to string cache.
1591 inline int MaxNumberToStringCacheSize() const;
1592
1593 // Ensure that we have swept all spaces in such a way that we can iterate
1594 // over all objects.
1595 V8_EXPORT_PRIVATE void MakeHeapIterable();
1596
1597 V8_EXPORT_PRIVATE void Unmark();
1598 V8_EXPORT_PRIVATE void DeactivateMajorGCInProgressFlag();
1599
1600 // Free all LABs in the heap.
1601 V8_EXPORT_PRIVATE void FreeLinearAllocationAreas();
1602
1603 // Frees all LABs owned by the main thread.
1604 V8_EXPORT_PRIVATE void FreeMainThreadLinearAllocationAreas();
1605
1606 V8_EXPORT_PRIVATE bool CanPromoteYoungAndExpandOldGeneration(
1607 size_t size) const;
1608 V8_EXPORT_PRIVATE bool CanExpandOldGeneration(size_t size) const;
1609
1610 // Checks whether OldGenerationCapacity() can be expanded by `size` bytes and
1611 // still fits into `max_old_generation_size_`.
1612 V8_EXPORT_PRIVATE bool IsOldGenerationExpansionAllowed(
1613 size_t size, const base::MutexGuard& expansion_mutex_witness) const;
1614
1615 bool ShouldReduceMemory() const {
1616 return current_gc_flags_ & GCFlag::kReduceMemoryFootprint;
1617 }
1618
1619 bool IsLastResortGC() { return current_gc_flags_ & GCFlag::kLastResort; }
1620
1622
1624 return &non_atomic_marking_state_;
1625 }
1626
1627 PretenuringHandler* pretenuring_handler() { return &pretenuring_handler_; }
1628
1629 bool IsInlineAllocationEnabled() const { return inline_allocation_enabled_; }
1630
1631 // Returns the amount of external memory registered since last global gc.
1632 V8_EXPORT_PRIVATE uint64_t AllocatedExternalMemorySinceMarkCompact() const;
1633
1634 std::shared_ptr<v8::TaskRunner> GetForegroundTaskRunner(
1636
1637 bool ShouldUseBackgroundThreads() const;
1638 bool ShouldUseIncrementalMarking() const;
1639
1640 HeapAllocator* allocator() { return heap_allocator_; }
1641 const HeapAllocator* allocator() const { return heap_allocator_; }
1642
1643 bool use_new_space() const {
1644 DCHECK_IMPLIES(new_space(), !v8_flags.sticky_mark_bits);
1645 return new_space() || v8_flags.sticky_mark_bits;
1646 }
1647
1648 bool IsNewSpaceAllowedToGrowAboveTargetCapacity() const;
1649
1650 private:
1651 class AllocationTrackerForDebugging;
1652
1653 void AttachCppHeap(v8::CppHeap* cpp_heap);
1654
1656 Tagged<String> (*)(Heap* heap, FullObjectSlot pointer);
1657
1658 // External strings table is a place where all external strings are
1659 // registered. We need to keep track of such strings to properly
1660 // finalize them.
1662 public:
1666
1667 // Registers an external string.
1668 inline void AddString(Tagged<String> string);
1669 bool Contains(Tagged<String> string);
1670
1671 void IterateAll(RootVisitor* v);
1672 void IterateYoung(RootVisitor* v);
1673 void PromoteYoung();
1674
1675 // Restores internal invariant and gets rid of collected strings. Must be
1676 // called after each Iterate*() that modified the strings.
1677 void CleanUpAll();
1678 void CleanUpYoung();
1679
1680 // Finalize all registered external strings and clear tables.
1681 void TearDown();
1682
1683 void UpdateYoungReferences(
1684 Heap::ExternalStringTableUpdaterCallback updater_func);
1685 void UpdateReferences(
1686 Heap::ExternalStringTableUpdaterCallback updater_func);
1687
1688 bool HasYoung() const { return !young_strings_.empty(); }
1689
1690 private:
1691 void Verify();
1692 void VerifyYoung();
1693
1694 Heap* const heap_;
1695
1696 // To speed up scavenge collections young string are kept separate from old
1697 // strings.
1698 std::vector<TaggedBase> young_strings_;
1699 std::vector<TaggedBase> old_strings_;
1700 // Used to protect access with --shared-string-table.
1702 };
1703
1704 static const int kInitialEvalCacheSize = 64;
1705 static const int kInitialNumberStringCacheSize = 256;
1706
1707 static const int kRememberedUnmappedPages = 128;
1708
1709 static const int kYoungSurvivalRateHighThreshold = 90;
1710 static const int kYoungSurvivalRateAllowedDeviation = 15;
1711 static const int kOldSurvivalRateLowThreshold = 10;
1712
1713 static const int kMaxMarkCompactsInIdleRound = 7;
1714
1715 Heap();
1717
1718 Heap(const Heap&) = delete;
1719 Heap& operator=(const Heap&) = delete;
1720
1722 return AllocationType::kYoung == allocation ||
1723 AllocationType::kOld == allocation;
1724 }
1725
1726#define ROOT_ACCESSOR(type, name, CamelName) \
1727 inline void set_##name(Tagged<type> value);
1729#undef ROOT_ACCESSOR
1730
1732
1733 // Checks whether a global GC is necessary
1734 GarbageCollector SelectGarbageCollector(AllocationSpace space,
1735 GarbageCollectionReason gc_reason,
1736 const char** reason) const;
1737
1738 // Make all LABs of all threads iterable.
1739 void MakeLinearAllocationAreasIterable();
1740
1741 // Enables/Disables black allocation in shared LABs when not using black
1742 // allocated pages.
1743 void MarkSharedLinearAllocationAreasBlack();
1744 void UnmarkSharedLinearAllocationAreas();
1745
1746 // Free shared LABs and reset freelists.
1747 void FreeSharedLinearAllocationAreasAndResetFreeLists();
1748
1749 // Performs garbage collection in a safepoint.
1750 void PerformGarbageCollection(GarbageCollector collector,
1751 GarbageCollectionReason gc_reason,
1752 const char* collector_reason);
1753
1754 void PerformHeapVerification();
1755 std::vector<Isolate*> PauseConcurrentThreadsInClients(
1756 GarbageCollector collector);
1757 void ResumeConcurrentThreadsInClients(std::vector<Isolate*> paused_clients);
1758
1759 // For static-roots builds, pads the object to the required size.
1760 void StaticRootsEnsureAllocatedSize(DirectHandle<HeapObject> obj,
1761 int required);
1762 bool CreateEarlyReadOnlyMapsAndObjects();
1763 bool CreateImportantReadOnlyObjects();
1764 bool CreateLateReadOnlyNonJSReceiverMaps();
1765 bool CreateLateReadOnlyJSReceiverMaps();
1766 bool CreateReadOnlyObjects();
1767
1768 void CreateInternalAccessorInfoObjects();
1769 void CreateInitialMutableObjects();
1770
1772
1773 // Creates a filler object in the specified memory area. This method is the
1774 // internal method used by all CreateFillerObjectAtXXX-methods.
1775 void CreateFillerObjectAtRaw(const WritableFreeSpace& free_space,
1776 ClearFreedMemoryMode clear_memory_mode,
1777 ClearRecordedSlots clear_slots_mode,
1778 VerifyNoSlotsRecorded verify_no_slots_recorded);
1779
1780 // Deopts all code that contains allocation instruction which are tenured or
1781 // not tenured. Moreover it clears the pretenuring allocation site statistics.
1782 void ResetAllAllocationSitesDependentCode(AllocationType allocation);
1783
1784 // Evaluates local pretenuring for the old space and calls
1785 // ResetAllTenuredAllocationSitesDependentCode if too many objects died in
1786 // the old space.
1787 void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
1788
1789 // Record statistics after garbage collection.
1790 void ReportStatisticsAfterGC();
1791
1792 // Flush the number to string cache.
1793 void FlushNumberStringCache();
1794
1795 void ActivateMemoryReducerIfNeededOnMainThread();
1796
1797 void ShrinkOldGenerationAllocationLimitIfNotConfigured();
1798
1799 double ComputeMutatorUtilization(const char* tag, double mutator_speed,
1800 std::optional<double> gc_speed);
1801 bool HasLowYoungGenerationAllocationRate();
1802 bool HasLowOldGenerationAllocationRate();
1803 bool HasLowEmbedderAllocationRate();
1804
1805 enum class ResizeNewSpaceMode { kShrink, kGrow, kNone };
1806 ResizeNewSpaceMode ShouldResizeNewSpace();
1807
1808 void StartResizeNewSpace();
1809 void ResizeNewSpace();
1810 void ExpandNewSpaceSize();
1811 void ReduceNewSpaceSize();
1812
1813 void PrintMaxMarkingLimitReached();
1814 void PrintMaxNewSpaceSizeReached();
1815
1816 int NextStressMarkingLimit();
1817
1818 void AddToRingBuffer(const char* string);
1819 void GetFromRingBuffer(char* buffer);
1820
1821 static constexpr int kRetainMapEntrySize = 2;
1822
1823 void CompactRetainedMaps(Tagged<WeakArrayList> retained_maps);
1824
1825 void CollectGarbageOnMemoryPressure();
1826
1827 void EagerlyFreeExternalMemoryAndWasmCode();
1828
1829 bool InvokeNearHeapLimitCallback();
1830
1831 void InvokeIncrementalMarkingPrologueCallbacks();
1832 void InvokeIncrementalMarkingEpilogueCallbacks();
1833
1834 // Casts a heap object to an InstructionStream, DCHECKs that the
1835 // inner_pointer is within the object, and returns the attached Code object.
1836 Tagged<GcSafeCode> GcSafeGetCodeFromInstructionStream(
1837 Tagged<HeapObject> instruction_stream, Address inner_pointer);
1838 // Returns the map of a HeapObject. Can be used during garbage collection,
1839 // i.e. it supports a forwarded map.
1840 Tagged<Map> GcSafeMapOfHeapObject(Tagged<HeapObject> object);
1841
1842 // ===========================================================================
1843 // Actual GC. ================================================================
1844 // ===========================================================================
1845
1846 // Code that should be run before and after each GC. Includes
1847 // some reporting/verification activities when compiled with DEBUG set.
1848 void GarbageCollectionPrologue(GarbageCollectionReason gc_reason,
1849 const v8::GCCallbackFlags gc_callback_flags);
1850 void GarbageCollectionPrologueInSafepoint();
1851 void GarbageCollectionEpilogue(GarbageCollector collector);
1852 void GarbageCollectionEpilogueInSafepoint(GarbageCollector collector);
1853
1854 // Performs a major collection in the whole heap.
1855 void MarkCompact();
1856 // Performs a minor collection of just the young generation.
1857 void MinorMarkSweep();
1858
1859 // Code to be run before and after mark-compact.
1860 void MarkCompactPrologue();
1861 void MarkCompactEpilogue();
1862
1863 // Performs a minor collection in new generation.
1864 void Scavenge();
1865
1866 void UpdateYoungReferencesInExternalStringTable(
1868
1869 void UpdateReferencesInExternalStringTable(
1871
1872 void ProcessAllWeakReferences(WeakObjectRetainer* retainer);
1873 void ProcessNativeContexts(WeakObjectRetainer* retainer);
1874 void ProcessAllocationSites(WeakObjectRetainer* retainer);
1875 void ProcessDirtyJSFinalizationRegistries(WeakObjectRetainer* retainer);
1876 void ProcessWeakListRoots(WeakObjectRetainer* retainer);
1877
1878 // ===========================================================================
1879 // GC statistics. ============================================================
1880 // ===========================================================================
1881
1883 uint64_t bytes = OldGenerationConsumedBytes();
1884 if (!v8_flags.external_memory_accounted_in_global_limit) {
1885 // TODO(chromium:42203776): When not accounting external memory properly
1886 // in the global limit, just add allocated external bytes towards the
1887 // regular old gen bytes. This is historic behavior.
1888 bytes += AllocatedExternalMemorySinceMarkCompact();
1889 }
1890
1891 if (old_generation_allocation_limit() <= bytes) return 0;
1892 return old_generation_allocation_limit() - static_cast<size_t>(bytes);
1893 }
1894
1895 void UpdateTotalGCTime(base::TimeDelta duration);
1896
1897 bool IsIneffectiveMarkCompact(size_t old_generation_size,
1898 double mutator_utilization);
1899 void CheckIneffectiveMarkCompact(size_t old_generation_size,
1900 double mutator_utilization);
1901
1902 inline void IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
1903 size_t amount);
1904
1905 inline void DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
1906 size_t amount);
1907
1908 // ===========================================================================
1909 // Growing strategy. =========================================================
1910 // ===========================================================================
1911
1912 MemoryReducer* memory_reducer() { return memory_reducer_.get(); }
1913
1914 // For some webpages NotifyLoadingEnded() is never called.
1915 // This constant limits the effect of load time on GC.
1916 // The value is arbitrary and chosen as the largest load time observed in
1917 // v8 browsing benchmarks.
1918 static const int kMaxLoadTimeMs = 7000;
1919
1920 V8_EXPORT_PRIVATE bool ShouldOptimizeForLoadTime() const;
1921 void NotifyLoadingStarted();
1922 void NotifyLoadingEnded();
1923
1925 return old_generation_allocation_limit_.load(std::memory_order_relaxed);
1926 }
1927
1929 return global_allocation_limit_.load(std::memory_order_relaxed);
1930 }
1931
1932 bool using_initial_limit() const {
1933 return using_initial_limit_.load(std::memory_order_relaxed);
1934 }
1935
1936 void set_using_initial_limit(bool value) {
1937 using_initial_limit_.store(value, std::memory_order_relaxed);
1938 }
1939
1941 return max_old_generation_size_.load(std::memory_order_relaxed);
1942 }
1943
1944 size_t min_old_generation_size() const { return min_old_generation_size_; }
1945
1946 // Sets max_old_generation_size_ and computes the new global heap limit from
1947 // it.
1948 void SetOldGenerationAndGlobalMaximumSize(size_t max_old_generation_size);
1949
1950 // Sets allocation limits for both old generation and the global heap.
1951 void SetOldGenerationAndGlobalAllocationLimit(
1952 size_t new_old_generation_allocation_limit,
1953 size_t new_global_allocation_limit);
1954
1955 void ResetOldGenerationAndGlobalAllocationLimit();
1956
1957 bool always_allocate() const { return always_allocate_scope_count_ != 0; }
1958
1959 bool ShouldExpandOldGenerationOnSlowAllocation(LocalHeap* local_heap,
1960 AllocationOrigin origin);
1961 bool ShouldExpandYoungGenerationOnSlowAllocation(size_t allocation_size);
1962
1963 HeapGrowingMode CurrentHeapGrowingMode();
1964
1965 double PercentToOldGenerationLimit() const;
1966 double PercentToGlobalMemoryLimit() const;
1968 kNoLimit,
1969 kSoftLimit,
1970 kHardLimit,
1971 kFallbackForEmbedderLimit
1972 };
1973 IncrementalMarkingLimit IncrementalMarkingLimitReached();
1974
1975 bool ShouldStressCompaction() const;
1976
1977 size_t GlobalMemoryAvailable();
1978
1979 void RecomputeLimits(GarbageCollector collector, base::TimeTicks time);
1980 void RecomputeLimitsAfterLoadingIfNeeded();
1985 static LimitsCompuatationResult ComputeNewAllocationLimits(Heap* heap);
1986
1987 // ===========================================================================
1988 // GC Tasks. =================================================================
1989 // ===========================================================================
1990
1991 V8_EXPORT_PRIVATE void StartMinorMSIncrementalMarkingIfNeeded();
1993
1994 MinorGCJob* minor_gc_job() { return minor_gc_job_.get(); }
1995
1996 // ===========================================================================
1997 // Allocation methods. =======================================================
1998 // ===========================================================================
1999
2000 // Allocates a JS Map in the heap.
2002 AllocateMap(AllocationType allocation_type, InstanceType instance_type,
2003 int instance_size,
2005 int inobject_properties = 0);
2006
2007 // Allocate an uninitialized object. The memory is non-executable if the
2008 // hardware and OS allow. This is the single choke-point for allocations
2009 // performed by the runtime and should not be bypassed (to extend this to
2010 // inlined allocations, use the Heap::DisableInlineAllocation() support).
2012 AllocateRaw(int size_in_bytes, AllocationType allocation,
2015
2016 // This method will try to allocate objects quickly (AllocationType::kYoung)
2017 // otherwise it falls back to a slower path indicated by the mode.
2018 enum AllocationRetryMode { kLightRetry, kRetryOrFail };
2019 template <AllocationRetryMode mode>
2021 int size, AllocationType allocation,
2024
2025 // Call AllocateRawWith with kRetryOrFail. Matches the method in LocalHeap.
2026 V8_WARN_UNUSED_RESULT inline Address AllocateRawOrFail(
2027 int size, AllocationType allocation,
2030
2031 // Allocates a heap object based on the map.
2033 AllocationType allocation);
2034
2035 // Allocates a partial map for bootstrapping.
2037 AllocatePartialMap(InstanceType instance_type, int instance_size);
2038
2039 void FinalizePartialMap(Tagged<Map> map);
2040
2041 void set_force_oom(bool value) { force_oom_ = value; }
2043 force_gc_on_next_allocation_ = true;
2044 }
2045
2046 // Helper for IsPendingAllocation.
2047 inline bool IsPendingAllocationInternal(Tagged<HeapObject> object);
2048
2049#ifdef DEBUG
2050 V8_EXPORT_PRIVATE void IncrementObjectCounters();
2051#endif // DEBUG
2052
2053 std::vector<Handle<NativeContext>> FindAllNativeContexts();
2054 std::vector<Tagged<WeakArrayList>> FindAllRetainedMaps();
2055 MemoryMeasurement* memory_measurement() { return memory_measurement_.get(); }
2056
2058 return allocation_type_for_in_place_internalizable_strings_;
2059 }
2060
2061 bool IsStressingScavenge();
2062
2063 void SetIsMarkingFlag(bool value);
2064 void SetIsMinorMarkingFlag(bool value);
2065
2067 size_t old_generation_size = OldGenerationSizeOfObjects();
2068 return old_generation_size > old_generation_size_at_last_gc_
2069 ? old_generation_size - old_generation_size_at_last_gc_
2070 : 0;
2071 }
2072
2073 ExternalMemoryAccounting external_memory_;
2074
2075 // This can be calculated directly from a pointer to the heap; however, it is
2076 // more expedient to get at the isolate directly from within Heap methods.
2077 Isolate* isolate_ = nullptr;
2078
2079 HeapAllocator* heap_allocator_ = nullptr;
2080
2081 // These limits are initialized in Heap::ConfigureHeap based on the resource
2082 // constraints and flags.
2083 size_t code_range_size_ = 0;
2084 size_t max_semi_space_size_ = 0;
2085 size_t min_semi_space_size_ = 0;
2086 size_t initial_semispace_size_ = 0;
2087 // Full garbage collections can be skipped if the old generation size
2088 // is below this threshold.
2089 size_t min_old_generation_size_ = 0;
2090 // If the old generation size exceeds this limit, then V8 will
2091 // crash with out-of-memory error.
2092 std::atomic<size_t> max_old_generation_size_{0};
2093 // TODO(mlippautz): Clarify whether this should take some embedder
2094 // configurable limit into account.
2095 size_t min_global_memory_size_ = 0;
2096 size_t max_global_memory_size_ = 0;
2097
2098 size_t initial_max_old_generation_size_ = 0;
2099 size_t initial_max_old_generation_size_threshold_ = 0;
2100 size_t initial_old_generation_size_ = 0;
2101
2102 // Before the first full GC the old generation allocation limit is considered
2103 // to be *not* configured (unless initial limits were provided by the
2104 // embedder, see below). In this mode V8 starts with a very large old
2105 // generation allocation limit initially. Minor GCs may then shrink this
2106 // initial limit down until the first full GC computes a proper old generation
2107 // allocation limit in Heap::RecomputeLimits. The old generation allocation
2108 // limit is then considered to be configured for all subsequent GCs. After the
2109 // first full GC this field is only ever reset for top context disposals.
2110 std::atomic<bool> using_initial_limit_ = true;
2111
2112 // True if initial heap size was provided by the embedder.
2113 bool initial_size_overwritten_ = false;
2114
2115 size_t maximum_committed_ = 0;
2116 size_t old_generation_capacity_after_bootstrap_ = 0;
2117
2118 // Backing store bytes (array buffers and external strings).
2119 // Use uint64_t counter since the counter could overflow the 32-bit range
2120 // temporarily on 32-bit.
2121 std::atomic<uint64_t> backing_store_bytes_{0};
2122
2123 // For keeping track of how much data has survived
2124 // scavenge since last new space expansion.
2125 size_t survived_since_last_expansion_ = 0;
2126
2127 // This is not the depth of nested AlwaysAllocateScope's but rather a single
2128 // count, as scopes can be acquired from multiple tasks (read: threads).
2129 std::atomic<size_t> always_allocate_scope_count_{0};
2130
2131 // Stores the memory pressure level that set by MemoryPressureNotification
2132 // and reset by a mark-compact garbage collection.
2133 std::atomic<v8::MemoryPressureLevel> memory_pressure_level_;
2134
2135 std::vector<std::pair<v8::NearHeapLimitCallback, void*>>
2137
2138 // For keeping track of context disposals.
2139 int contexts_disposed_ = 0;
2140
2141 // Spaces owned by this heap through space_.
2142 NewSpace* new_space_ = nullptr;
2143 OldSpace* old_space_ = nullptr;
2144 CodeSpace* code_space_ = nullptr;
2145 SharedSpace* shared_space_ = nullptr;
2146 OldLargeObjectSpace* lo_space_ = nullptr;
2147 CodeLargeObjectSpace* code_lo_space_ = nullptr;
2148 NewLargeObjectSpace* new_lo_space_ = nullptr;
2149 SharedLargeObjectSpace* shared_lo_space_ = nullptr;
2150 ReadOnlySpace* read_only_space_ = nullptr;
2151 TrustedSpace* trusted_space_ = nullptr;
2152 SharedTrustedSpace* shared_trusted_space_ = nullptr;
2153 TrustedLargeObjectSpace* trusted_lo_space_ = nullptr;
2154 SharedTrustedLargeObjectSpace* shared_trusted_lo_space_ = nullptr;
2155
2156 // Either pointer to owned shared spaces or pointer to unowned shared spaces
2157 // in another isolate.
2158 PagedSpace* shared_allocation_space_ = nullptr;
2159 OldLargeObjectSpace* shared_lo_allocation_space_ = nullptr;
2160 SharedTrustedSpace* shared_trusted_allocation_space_ = nullptr;
2161 SharedTrustedLargeObjectSpace* shared_trusted_lo_allocation_space_ = nullptr;
2162
2163 // Map from the space id to the space.
2164 std::unique_ptr<Space> space_[LAST_SPACE + 1];
2165
2166#ifdef V8_COMPRESS_POINTERS
2167 // The spaces in the ExternalPointerTable containing entries owned by objects
2168 // in this heap.
2169 ExternalPointerTable::Space young_external_pointer_space_;
2170 ExternalPointerTable::Space old_external_pointer_space_;
2171 // Likewise but for slots in host objects in ReadOnlySpace.
2172 ExternalPointerTable::Space read_only_external_pointer_space_;
2173 // Space in the ExternalPointerTable containing entries owned by objects in
2174 // this heap. The entries exclusively point to CppHeap objects.
2175 CppHeapPointerTable::Space cpp_heap_pointer_space_;
2176#endif // V8_COMPRESS_POINTERS
2177
2178#ifdef V8_ENABLE_SANDBOX
2179 // Likewise, but for the trusted pointer table.
2180 TrustedPointerTable::Space trusted_pointer_space_;
2181
2182 // The space in the process-wide code pointer table managed by this heap.
2183 CodePointerTable::Space code_pointer_space_;
2184#endif // V8_ENABLE_SANDBOX
2185
2186#ifdef V8_ENABLE_LEAPTIERING
2187 // The space in the process-wide JSDispatchTable managed by this heap.
2188 JSDispatchTable::Space js_dispatch_table_space_;
2189#endif // V8_ENABLE_LEAPTIERING
2190
2191 LocalHeap* main_thread_local_heap_ = nullptr;
2192
2193 std::atomic<HeapState> gc_state_{NOT_IN_GC};
2194
2195 // Starts marking when stress_marking_percentage_% of the marking start limit
2196 // is reached.
2197 int stress_marking_percentage_ = 0;
2198
2199 // Observer that can cause early scavenge start.
2200 StressScavengeObserver* stress_scavenge_observer_ = nullptr;
2201
2202 // The maximum percent of the marking limit reached without causing marking.
2203 // This is tracked when specifying --fuzzer-gc-analysis.
2204 std::atomic<double> max_marking_limit_reached_ = 0.0;
2205
2206 // How many mark-sweep collections happened.
2207 unsigned int ms_count_ = 0;
2208
2209 // How many gc happened.
2210 unsigned int gc_count_ = 0;
2211
2212 // The number of Mark-Compact garbage collections that are considered as
2213 // ineffective. See IsIneffectiveMarkCompact() predicate.
2214 int consecutive_ineffective_mark_compacts_ = 0;
2215
2216 static const uintptr_t kMmapRegionMask = 0xFFFFFFFFu;
2217 uintptr_t mmap_region_base_ = 0;
2218
2219 // For post mortem debugging.
2220 int remembered_unmapped_pages_index_ = 0;
2221 Address remembered_unmapped_pages_[kRememberedUnmappedPages];
2222
2223 // Limit that triggers a global GC on the next (normally caused) GC. This
2224 // is checked when we have already decided to do a GC to help determine
2225 // which collector to invoke, before expanding a paged space in the old
2226 // generation and on every allocation in large object space.
2227 std::atomic<size_t> old_generation_allocation_limit_{0};
2228 std::atomic<size_t> global_allocation_limit_{0};
2229
2230 // Weak list heads, threaded through the objects.
2231 // List heads are initialized lazily and contain the undefined_value at start.
2232 // {native_contexts_list_} is an Address instead of an Object to allow the use
2233 // of atomic accessors.
2234 std::atomic<Address> native_contexts_list_;
2236 allocation_sites_list_ = Smi::zero();
2237 Tagged<Object> dirty_js_finalization_registries_list_ = Smi::zero();
2238 // Weak list tails.
2239 Tagged<Object> dirty_js_finalization_registries_list_tail_ = Smi::zero();
2240
2243
2245
2247
2248 size_t promoted_objects_size_ = 0;
2249 double promotion_ratio_ = 0.0;
2250 double promotion_rate_ = 0.0;
2251 size_t new_space_surviving_object_size_ = 0;
2252 size_t previous_new_space_surviving_object_size_ = 0;
2253 double new_space_surviving_rate_ = 0.0;
2254 int nodes_died_in_new_space_ = 0;
2255 int nodes_copied_in_new_space_ = 0;
2256 int nodes_promoted_ = 0;
2257
2258 // Total time spent in GC.
2260
2261 // Last time a garbage collection happened.
2262 double last_gc_time_ = 0.0;
2263
2264 std::unique_ptr<GCTracer> tracer_;
2265 std::unique_ptr<Sweeper> sweeper_;
2266 std::unique_ptr<MarkCompactCollector> mark_compact_collector_;
2267 std::unique_ptr<MinorMarkSweepCollector> minor_mark_sweep_collector_;
2268 std::unique_ptr<ScavengerCollector> scavenger_collector_;
2269 std::unique_ptr<ArrayBufferSweeper> array_buffer_sweeper_;
2270
2271 std::unique_ptr<MemoryAllocator> memory_allocator_;
2272 std::unique_ptr<IncrementalMarking> incremental_marking_;
2273 std::unique_ptr<ConcurrentMarking> concurrent_marking_;
2274 std::unique_ptr<MemoryMeasurement> memory_measurement_;
2275 std::unique_ptr<MemoryReducer> memory_reducer_;
2276 std::unique_ptr<ObjectStats> live_object_stats_;
2277 std::unique_ptr<ObjectStats> dead_object_stats_;
2278 std::unique_ptr<MinorGCJob> minor_gc_job_;
2279 std::unique_ptr<AllocationObserver> stress_concurrent_allocation_observer_;
2280 std::unique_ptr<AllocationTrackerForDebugging>
2282 std::unique_ptr<EphemeronRememberedSet> ephemeron_remembered_set_;
2283 std::unique_ptr<HeapProfiler> heap_profiler_;
2284
2285 std::shared_ptr<v8::TaskRunner> task_runner_;
2286
2287 // This object controls virtual space reserved for code on the V8 heap. This
2288 // is only valid for 64-bit architectures where kPlatformRequiresCodeRange.
2289 //
2290 // Owned by the isolate group when V8_COMPRESS_POINTERS, otherwise owned by
2291 // the heap.
2292#ifdef V8_COMPRESS_POINTERS
2293 CodeRange* code_range_ = nullptr;
2294#else
2295 std::unique_ptr<CodeRange> code_range_;
2296#endif
2297
2298 // V8 configuration where V8 owns the heap which is either created or passed
2299 // in during Isolate initialization.
2300 std::unique_ptr<CppHeap> owning_cpp_heap_;
2301 // Deprecated API where the heap is owned by the embedder. This field is
2302 // always set, independent of which CppHeap configuration (owned, unowned) is
2303 // used. As soon as Isolate::AttachCppHeap() is removed, this field should
2304 // also be removed and we should exclusively rely on the owning version.
2305 v8::CppHeap* cpp_heap_ = nullptr;
2306 EmbedderRootsHandler* embedder_roots_handler_ =
2307 nullptr; // Owned by the embedder.
2308
2309 StackState embedder_stack_state_ = StackState::kMayContainHeapPointers;
2310 std::optional<EmbedderStackStateOrigin> embedder_stack_state_origin_;
2311
2312 StrongRootsEntry* strong_roots_head_ = nullptr;
2314
2316
2317 bool need_to_remove_stress_concurrent_allocation_observer_ = false;
2318
2319 // This counter is increased before each GC and never reset.
2320 // To account for the bytes allocated since the last GC, use the
2321 // NewSpaceAllocationCounter() function.
2322 size_t new_space_allocation_counter_ = 0;
2323
2324 // This counter is increased before each GC and never reset. To
2325 // account for the bytes allocated since the last GC, use the
2326 // OldGenerationAllocationCounter() function.
2327 size_t old_generation_allocation_counter_at_last_gc_ = 0;
2328
2329 // The size of objects in old generation after the last MarkCompact GC.
2330 size_t old_generation_size_at_last_gc_{0};
2331
2332 // The wasted bytes in old generation after the last MarkCompact GC.
2333 size_t old_generation_wasted_at_last_gc_{0};
2334
2335 // The size of embedder memory after the last MarkCompact GC.
2336 size_t embedder_size_at_last_gc_ = 0;
2337
2338 char trace_ring_buffer_[kTraceRingBufferSize];
2339
2340 // If it's not full then the data is from 0 to ring_buffer_end_. If it's
2341 // full then the data is from ring_buffer_end_ to the end of the buffer and
2342 // from 0 to ring_buffer_end_.
2343 bool ring_buffer_full_ = false;
2344 size_t ring_buffer_end_ = 0;
2345
2346 // Flag is set when the heap has been configured. The heap can be repeatedly
2347 // configured through the API until it is set up.
2348 bool configured_ = false;
2349
2350 // Currently set GC flags that are respected by all GC components.
2351 GCFlags current_gc_flags_ = GCFlag::kNoFlags;
2352 // Currently set GC callback flags that are used to pass information between
2353 // the embedder and V8's GC.
2354 GCCallbackFlags current_gc_callback_flags_ =
2356
2357 std::unique_ptr<IsolateSafepoint> safepoint_;
2358
2359 bool is_current_gc_forced_ = false;
2360 bool is_current_gc_for_heap_profiler_ = false;
2361 GarbageCollector current_or_last_garbage_collector_ =
2363
2364 ExternalStringTable external_string_table_;
2365
2367
2368 std::unique_ptr<CollectionBarrier> collection_barrier_;
2369
2370 int ignore_local_gc_requests_depth_ = 0;
2371
2372 int gc_callbacks_depth_ = 0;
2373
2374 bool deserialization_complete_ = false;
2375
2376 int max_regular_code_object_size_ = 0;
2377
2378 bool inline_allocation_enabled_ = true;
2379
2380 int pause_allocation_observers_depth_ = 0;
2381
2382 // Used for testing purposes.
2383 bool force_oom_ = false;
2384 bool force_gc_on_next_allocation_ = false;
2385 bool delay_sweeper_tasks_for_testing_ = false;
2386
2387 std::vector<HeapObjectAllocationTracker*> allocation_trackers_;
2388
2389 bool is_finalization_registry_cleanup_task_posted_ = false;
2390
2393
2395
2396 // This field is used only when not running with MinorMS.
2397 ResizeNewSpaceMode resize_new_space_mode_ = ResizeNewSpaceMode::kNone;
2398
2399 std::unique_ptr<MemoryBalancer> mb_;
2400
2401 // A sentinel meaning that the embedder isn't currently loading resources.
2402 static constexpr double kLoadTimeNotLoading = -1.0;
2403
2404 // Time that the embedder started loading resources, or kLoadTimeNotLoading.
2405 std::atomic<double> load_start_time_ms_{kLoadTimeNotLoading};
2406
2407 bool update_allocation_limits_after_loading_ = false;
2408 // Full GC may trigger during loading due to overshooting allocation limits.
2409 // In such cases we may want to update the limits again once loading is
2410 // actually finished.
2411 bool is_full_gc_during_loading_ = false;
2412
2413 // Classes in "heap" can be friends.
2416 friend class ArrayBufferCollector;
2418 friend class ConcurrentMarking;
2420 friend class CppHeap;
2423 friend class GCCallbacksScope;
2424 friend class GCTracer;
2425 friend class HeapAllocator;
2427 friend class HeapVerifier;
2431 friend class LargeObjectSpace;
2432 friend class LocalHeap;
2433 friend class MarkingBarrier;
2435 template <typename ConcreteVisitor>
2438 friend class MemoryBalancer;
2439 friend class MinorGCJob;
2440 friend class MinorGCTaskObserver;
2442 friend class MinorMSIncrementalMarkingTaskObserver;
2444 friend class NewSpace;
2446 friend class PageMetadata;
2449 friend class PagedSpaceBase;
2453 friend class ReadOnlyRoots;
2455 friend class Scavenger;
2458 friend class SemiSpaceNewSpace;
2461 friend class Space;
2463 friend class Sweeper;
2465 friend class heap::TestMemoryAllocatorScope;
2466
2467 // The allocator interface.
2468 friend class Factory;
2469 friend class LocalFactory;
2470 template <typename IsolateT>
2471 friend class Deserializer;
2472
2473 // The Isolate constructs us.
2474 friend class Isolate;
2475
2476 // Used in cctest.
2477 friend class heap::HeapTester;
2478 FRIEND_TEST(SpacesTest, InlineAllocationObserverCadence);
2480 friend class HeapInternalsBase;
2481};
2482
2483#define DECL_RIGHT_TRIM(T) \
2484 extern template EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) void \
2485 Heap::RightTrimArray<T>(Tagged<T> object, int new_capacity, \
2486 int old_capacity);
2488#undef DECL_RIGHT_TRIM
2489
2491 public:
2492 static const int kStartMarker = 0xDECADE00;
2493 static const int kEndMarker = 0xDECADE01;
2494
2495 intptr_t start_marker = 0; // 0
2496 size_t ro_space_size = 0; // 1
2497 size_t ro_space_capacity = 0; // 2
2498 size_t new_space_size = 0; // 3
2499 size_t new_space_capacity = 0; // 4
2500 size_t old_space_size = 0; // 5
2501 size_t old_space_capacity = 0; // 6
2502 size_t code_space_size = 0; // 7
2503 size_t code_space_capacity = 0; // 8
2504 size_t map_space_size = 0; // 9
2505 size_t map_space_capacity = 0; // 10
2506 size_t lo_space_size = 0; // 11
2507 size_t code_lo_space_size = 0; // 12
2508 size_t global_handle_count = 0; // 13
2513 size_t memory_allocator_size = 0; // 18
2515 size_t malloced_memory = 0; // 20
2516 size_t malloced_peak_memory = 0; // 21
2517 size_t objects_per_type = 0; // 22
2518 size_t size_per_type = 0; // 23
2519 int os_error = 0; // 24
2520 char last_few_messages[Heap::kTraceRingBufferSize + 1] = {0}; // 25
2521 intptr_t end_marker = 0; // 27
2522};
2523
2524// Disables GC for all allocations. It should not be used
2525// outside heap, deserializer, and isolate bootstrap.
2526// Use AlwaysAllocateScopeForTesting in tests.
2528 public:
2529 inline ~AlwaysAllocateScope();
2530
2531 private:
2533 friend class Evacuator;
2534 friend class Heap;
2535 friend class HeapAllocator;
2536 friend class Isolate;
2537 // TODO(1445003): Remove this after investigating the crash.
2539
2540 explicit inline AlwaysAllocateScope(Heap* heap);
2542};
2543
2545 public:
2546 explicit GCCallbacksScope(Heap* heap);
2548
2549 bool CheckReenter() const;
2550
2551 private:
2552 Heap* const heap_;
2553};
2554
2556 public:
2557 explicit inline AlwaysAllocateScopeForTesting(Heap* heap);
2558
2559 private:
2561};
2562
2564 public:
2565 // When we zap newly allocated MemoryChunks, the chunk is not initialized yet
2566 // and we can't use the regular CodePageMemoryModificationScope since it will
2567 // access the page header. Hence, use the VirtualMemory for tracking instead.
2569 Heap* heap, VirtualMemory* reservation, base::AddressRegion region);
2571 MemoryChunkMetadata* chunk);
2573
2574 private:
2575#if V8_HEAP_USE_PTHREAD_JIT_WRITE_PROTECT || \
2576 V8_HEAP_USE_PKU_JIT_WRITE_PROTECT || V8_HEAP_USE_BECORE_JIT_WRITE_PROTECT
2577 RwxMemoryWriteScope rwx_write_scope_;
2578#endif
2579};
2580
2582 public:
2583 explicit inline IgnoreLocalGCRequests(Heap* heap);
2584 inline ~IgnoreLocalGCRequests();
2585
2586 private:
2588};
2589
2590// Space iterator for iterating over all the paged spaces of the heap: Map
2591// space, old space and code space. Returns each space in turn, and null when it
2592// is done.
2594 public:
2596 : heap_(heap), counter_(FIRST_GROWABLE_PAGED_SPACE) {}
2597 PagedSpace* Next();
2598
2599 private:
2600 const Heap* const heap_;
2602};
2603
2604// A HeapObjectIterator provides iteration over the entire non-read-only heap.
2605// It aggregates the specific iterators for the different spaces as these can
2606// only iterate over one space only.
2607//
2608// HeapObjectIterator ensures there is no allocation during its lifetime (using
2609// an embedded DisallowGarbageCollection instance).
2610//
2611// HeapObjectIterator can skip free list nodes (that is, de-allocated heap
2612// objects that still remain in the heap).
2613//
2614// See ReadOnlyHeapObjectIterator if you need to iterate over read-only space
2615// objects, or CombinedHeapObjectIterator if you need to iterate over both
2616// heaps.
2618 public:
2619 enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
2620
2621 explicit HeapObjectIterator(Heap* heap,
2622 HeapObjectsFiltering filtering = kNoFiltering);
2623 // .. when already in a SafepointScope:
2624 HeapObjectIterator(Heap* heap, const SafepointScope& safepoint_scope,
2625 HeapObjectsFiltering filtering = kNoFiltering);
2627
2628 Tagged<HeapObject> Next();
2629
2630 private:
2631 HeapObjectIterator(Heap* heap, SafepointScope* safepoint_scope_or_nullptr,
2632 HeapObjectsFiltering filtering);
2633
2634 Tagged<HeapObject> NextObject();
2635
2637 DISALLOW_GARBAGE_COLLECTION(no_heap_allocation_)
2638
2639 // The safepoint scope pointer is null if a scope already existed when the
2640 // iterator was created (i.e. when using the constructor that passes a
2641 // safepoint_scope reference).
2642 std::unique_ptr<SafepointScope> safepoint_scope_; // nullable
2644 // Space iterator for iterating all the spaces.
2645 SpaceIterator space_iterator_;
2646 // Object iterator for the space currently being iterated.
2648};
2649
2650// Abstract base class for checking whether a weak object should be retained.
2652 public:
2653 virtual ~WeakObjectRetainer() = default;
2654
2655 // Return whether this object should be retained. If nullptr is returned the
2656 // object has no references. Otherwise the address of the retained object
2657 // should be returned as in some GC situations the object has been moved.
2659};
2660
2661// -----------------------------------------------------------------------------
2662// Allows observation of heap object allocations.
2664 public:
2665 virtual void AllocationEvent(Address addr, int size) = 0;
2666 virtual void MoveEvent(Address from, Address to, int size) {}
2667 virtual void UpdateObjectSizeEvent(Address addr, int size) {}
2668 virtual ~HeapObjectAllocationTracker() = default;
2669};
2670
2671template <typename T>
2672inline Tagged<T> ForwardingAddress(Tagged<T> heap_obj);
2673
2674// Specialized strong root allocator for blocks of Addresses, retained
2675// as strong references.
2676template <>
2678 public:
2679 using value_type = Address;
2680
2681 template <typename HeapOrIsolateT>
2682 explicit StrongRootAllocator(HeapOrIsolateT* heap_or_isolate)
2683 : StrongRootAllocatorBase(heap_or_isolate) {}
2684 template <typename U>
2687
2688 Address* allocate(size_t n) { return allocate_impl(n); }
2689 void deallocate(Address* p, size_t n) noexcept {
2690 return deallocate_impl(p, n);
2691 }
2692};
2693
2695 public:
2697 StackState stack_state);
2699
2700 private:
2701 Heap* const heap_;
2703 std::optional<EmbedderStackStateOrigin> old_origin_;
2704};
2705
2715
2717 public:
2720
2721 private:
2722 std::unique_ptr<cppgc::internal::ClassNameAsHeapObjectNameScope> scope_;
2723};
2724
2725// We cannot avoid stale handles to left-trimmed objects, but can only make
2726// sure all handles still needed are updated. Filter out a stale pointer
2727// and clear the slot to allow post processing of handles (needed because
2728// the sweeper might actually free the underlying page).
2730 public:
2732
2733 void VisitRootPointer(Root root, const char* description,
2734 FullObjectSlot p) override;
2735
2736 void VisitRootPointers(Root root, const char* description,
2738
2739 void VisitRunningCode(FullObjectSlot code_slot,
2740 FullObjectSlot istream_or_smi_zero_slot) override;
2741
2743 visitor_->Synchronize(tag);
2744 }
2745
2746 // The pointer compression cage base value used for decompression of all
2747 // tagged values except references to InstructionStream objects.
2749#if V8_COMPRESS_POINTERS
2750 return cage_base_;
2751#else
2752 return PtrComprCageBase{};
2753#endif // V8_COMPRESS_POINTERS
2754 }
2755
2756 private:
2757 inline void ClearLeftTrimmedOrForward(Root root, const char* description,
2758 FullObjectSlot p);
2759 inline bool IsLeftTrimmed(FullObjectSlot p);
2760
2763
2764#if V8_COMPRESS_POINTERS
2765 const PtrComprCageBase cage_base_;
2766#endif // V8_COMPRESS_POINTERS
2767};
2768
2769} // namespace internal
2770} // namespace v8
2771
2772// Opt out from libc++ backing sanitization, since root iteration walks up to
2773// the capacity.
2774#ifdef _LIBCPP_HAS_ASAN_CONTAINER_ANNOTATIONS_FOR_ALL_ALLOCATORS
2775template <typename T>
2776struct ::std::__asan_annotate_container_with_allocator<
2777 v8::internal::StrongRootAllocator<T>> : ::std::false_type {};
2778#endif // _LIBCPP_HAS_ASAN_CONTAINER_ANNOTATIONS_FOR_ALL_ALLOCATORS
2779
2780#endif // V8_HEAP_HEAP_H_
Isolate * isolate_
#define DISALLOW_GARBAGE_COLLECTION(name)
#define DEFINE_OPERATORS_FOR_FLAGS(Type)
Definition flags.h:100
#define T
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
size_t(*)() GetExternallyAllocatedMemoryInBytesCallback
void(*)(Isolate *isolate, GCType type, GCCallbackFlags flags, void *data) GCCallbackWithData
void Synchronize(VisitorSynchronization::SyncTag tag) override
Definition heap.h:2742
CodePageMemoryModificationScopeForDebugging(Heap *heap, VirtualMemory *reservation, base::AddressRegion region)
Definition heap.cc:7493
std::unique_ptr< cppgc::internal::ClassNameAsHeapObjectNameScope > scope_
Definition heap.h:2722
std::optional< EmbedderStackStateOrigin > old_origin_
Definition heap.h:2703
const StackState old_stack_state_
Definition heap.h:2702
virtual void UpdateObjectSizeEvent(Address addr, int size)
Definition heap.h:2667
virtual void AllocationEvent(Address addr, int size)=0
virtual void MoveEvent(Address from, Address to, int size)
Definition heap.h:2666
size_t code_lo_space_size
Definition heap.h:2507
static const int kStartMarker
Definition heap.h:2492
size_t weak_global_handle_count
Definition heap.h:2509
size_t near_death_global_handle_count
Definition heap.h:2511
char last_few_messages[Heap::kTraceRingBufferSize+1]
Definition heap.h:2520
size_t memory_allocator_capacity
Definition heap.h:2514
size_t free_global_handle_count
Definition heap.h:2512
size_t malloced_peak_memory
Definition heap.h:2516
static const int kEndMarker
Definition heap.h:2493
size_t new_space_capacity
Definition heap.h:2499
size_t global_handle_count
Definition heap.h:2508
size_t pending_global_handle_count
Definition heap.h:2510
size_t map_space_capacity
Definition heap.h:2505
size_t code_space_capacity
Definition heap.h:2503
size_t memory_allocator_size
Definition heap.h:2513
intptr_t start_marker
Definition heap.h:2495
size_t old_space_capacity
Definition heap.h:2501
void set_low_since_mark_compact(uint64_t value)
Definition heap.h:269
uint64_t UpdateAmount(int64_t delta)
Definition heap.h:234
void set_limit_for_interrupt(uint64_t value)
Definition heap.h:265
void UpdateLowSinceMarkCompact(uint64_t amount)
Definition heap.h:245
void UpdateLimitForInterrupt(uint64_t amount)
Definition heap.h:241
ExternalStringTable & operator=(const ExternalStringTable &)=delete
std::vector< TaggedBase > old_strings_
Definition heap.h:1699
std::vector< TaggedBase > young_strings_
Definition heap.h:1698
ExternalStringTable(const ExternalStringTable &)=delete
V8_INLINE void SetRootNoScriptSharedFunctionInfos(Tagged< Object > value)
ExternalStringTable external_string_table_
Definition heap.h:2364
size_t promoted_objects_size()
Definition heap.h:1303
SharedLargeObjectSpace * shared_lo_space() const
Definition heap.h:736
std::unique_ptr< ObjectStats > live_object_stats_
Definition heap.h:2276
std::unique_ptr< MinorMarkSweepCollector > minor_mark_sweep_collector_
Definition heap.h:2267
SharedTrustedLargeObjectSpace * shared_trusted_lo_space() const
Definition heap.h:746
static bool IsRegularObjectAllocation(AllocationType allocation)
Definition heap.h:1721
size_t MaxOldGenerationSize()
Definition heap.h:1235
OldGenerationExpansionNotificationOrigin
Definition heap.h:380
std::unique_ptr< MemoryReducer > memory_reducer_
Definition heap.h:2275
NewSpace * new_space() const
Definition heap.h:727
std::unique_ptr< ArrayBufferSweeper > array_buffer_sweeper_
Definition heap.h:2269
bool IsTearingDown() const
Definition heap.h:525
void set_native_contexts_list(Tagged< Object > object)
Definition heap.h:457
NonAtomicMarkingState non_atomic_marking_state_
Definition heap.h:2392
void set_allocation_sites_list(Tagged< UnionOf< Smi, Undefined, AllocationSiteWithWeakNext > > object)
Definition heap.h:466
static bool IsYoungGenerationCollector(GarbageCollector collector)
Definition heap.h:346
void SetNewSpaceAllocationCounterForTesting(size_t new_value)
Definition heap.h:1330
size_t OldGenerationAllocationCounter()
Definition heap.h:1339
std::unique_ptr< CppHeap > owning_cpp_heap_
Definition heap.h:2300
ExternalMemoryAccounting external_memory_
Definition heap.h:2073
HeapState gc_state() const
Definition heap.h:521
std::unique_ptr< MemoryMeasurement > memory_measurement_
Definition heap.h:2274
SharedSpace * shared_space() const
Definition heap.h:733
bool sweeping_in_progress() const
Definition heap.h:1532
OldLargeObjectSpace * lo_space() const
Definition heap.h:734
base::Mutex heap_expansion_mutex_
Definition heap.h:2315
std::vector< std::pair< v8::NearHeapLimitCallback, void * > > near_heap_limit_callbacks_
Definition heap.h:2136
NewLargeObjectSpace * new_lo_space() const
Definition heap.h:737
bool use_new_space() const
Definition heap.h:1643
base::Mutex * heap_expansion_mutex()
Definition heap.h:1263
v8::Isolate::GetExternallyAllocatedMemoryInBytesCallback GetExternallyAllocatedMemoryInBytesCallback
Definition heap.h:962
void IncrementNewSpaceSurvivingObjectSize(size_t object_size)
Definition heap.h:1305
MemoryReducer * memory_reducer()
Definition heap.h:1912
std::vector< Chunk > Reservation
Definition heap.h:291
FRIEND_TEST(SpacesTest, AllocationObserver)
void set_force_oom(bool value)
Definition heap.h:2041
MarkCompactCollector * mark_compact_collector()
Definition heap.h:813
std::atomic< v8::MemoryPressureLevel > memory_pressure_level_
Definition heap.h:2133
std::unique_ptr< ObjectStats > dead_object_stats_
Definition heap.h:2277
std::unique_ptr< CollectionBarrier > collection_barrier_
Definition heap.h:2368
static GarbageCollector YoungGenerationCollector()
Definition heap.h:353
std::unique_ptr< EphemeronRememberedSet > ephemeron_remembered_set_
Definition heap.h:2282
size_t min_old_generation_size() const
Definition heap.h:1944
std::atomic< Address > native_contexts_list_
Definition heap.h:2234
LocalHeap * main_thread_local_heap()
Definition heap.h:842
int ms_count() const
Definition heap.h:500
size_t old_generation_allocation_limit() const
Definition heap.h:1924
bool has_heap_object_allocation_tracker() const
Definition heap.h:1503
bool ShouldCurrentGCKeepAgesUnchanged() const
Definition heap.h:1361
size_t new_space_surviving_object_size()
Definition heap.h:1308
Tagged< Object > dirty_js_finalization_registries_list()
Definition heap.h:478
size_t max_old_generation_size() const
Definition heap.h:1940
IncrementalMarking * incremental_marking() const
Definition heap.h:1062
void IncrementPromotedObjectsSize(size_t object_size)
Definition heap.h:1300
CodeRange * code_range()
Definition heap.h:831
base::Mutex strong_roots_mutex_
Definition heap.h:2313
void SetGetExternallyAllocatedMemoryInBytesCallback(GetExternallyAllocatedMemoryInBytesCallback callback)
Definition heap.h:965
MemoryMeasurement * memory_measurement()
Definition heap.h:2055
std::unique_ptr< Sweeper > sweeper_
Definition heap.h:2265
GarbageCollector current_or_last_garbage_collector() const
Definition heap.h:1355
std::unique_ptr< CodeRange > code_range_
Definition heap.h:2295
OldSpace * old_space() const
Definition heap.h:730
void IncrementNodesDiedInNewSpace(int count)
Definition heap.h:1316
GCCallbacks gc_epilogue_callbacks_
Definition heap.h:2242
ArrayBufferSweeper * array_buffer_sweeper()
Definition heap.h:823
ConcurrentMarking * concurrent_marking() const
Definition heap.h:1070
size_t PromotedSinceLastGC()
Definition heap.h:2066
std::unique_ptr< AllocationTrackerForDebugging > allocation_tracker_for_debugging_
Definition heap.h:2281
std::unique_ptr< ConcurrentMarking > concurrent_marking_
Definition heap.h:2273
Heap * AsHeap()
Definition heap.h:844
void InitializeMainThreadLocalHeap(LocalHeap *main_thread_local_heap)
OldLargeObjectSpace * shared_lo_allocation_space() const
Definition heap.h:753
std::unique_ptr< MemoryBalancer > mb_
Definition heap.h:2399
HeapProfiler * heap_profiler() const
Definition heap.h:366
std::unique_ptr< HeapProfiler > heap_profiler_
Definition heap.h:2283
std::unique_ptr< IncrementalMarking > incremental_marking_
Definition heap.h:2272
bool ignore_local_gc_requests() const
Definition heap.h:534
void * GetRandomMmapAddr()
Definition heap.h:1572
bool always_allocate() const
Definition heap.h:1957
std::unique_ptr< GCTracer > tracer_
Definition heap.h:2264
MinorMarkSweepCollector * minor_mark_sweep_collector()
Definition heap.h:817
TrustedSpace * trusted_space() const
Definition heap.h:739
MinorGCJob * minor_gc_job()
Definition heap.h:1994
FRIEND_TEST(SpacesTest, InlineAllocationObserverCadence)
bool major_sweeping_in_progress() const
Definition heap.h:1539
std::vector< HeapObjectAllocationTracker * > allocation_trackers_
Definition heap.h:2387
void IncrementYoungSurvivorsCounter(size_t survived)
Definition heap.h:1324
base::SmallVector< v8::Isolate::UseCounterFeature, 8 > deferred_counters_
Definition heap.h:2246
uint64_t external_memory_hard_limit()
Definition heap.h:612
void set_dirty_js_finalization_registries_list_tail(Tagged< Object > object)
Definition heap.h:481
const HeapAllocator * allocator() const
Definition heap.h:1641
Heap & operator=(const Heap &)=delete
size_t OldGenerationSpaceAvailable()
Definition heap.h:1882
SharedTrustedSpace * shared_trusted_allocation_space() const
Definition heap.h:756
size_t MaxSemiSpaceSize()
Definition heap.h:1233
std::unique_ptr< ScavengerCollector > scavenger_collector_
Definition heap.h:2268
std::unique_ptr< MarkCompactCollector > mark_compact_collector_
Definition heap.h:2266
MemoryAllocator * memory_allocator()
Definition heap.h:803
AllocationType allocation_type_for_in_place_internalizable_strings() const
Definition heap.h:2057
void RestoreHeapLimit(size_t heap_limit)
Definition heap.h:664
void set_old_generation_allocation_counter_at_last_gc(size_t new_value)
Definition heap.h:1347
bool sweeping_in_progress_for_space(AllocationSpace space) const
Definition heap.h:1533
void set_is_finalization_registry_cleanup_task_posted(bool posted)
Definition heap.h:901
Sweeper * sweeper()
Definition heap.h:821
IsolateSafepoint * safepoint()
Definition heap.h:579
size_t MaximumCommittedMemory()
Definition heap.h:1278
int gc_count() const
Definition heap.h:1351
Address allocation_sites_list_address()
Definition heap.h:489
CodeLargeObjectSpace * code_lo_space() const
Definition heap.h:735
SharedTrustedLargeObjectSpace * shared_trusted_lo_allocation_space() const
Definition heap.h:759
Heap(const Heap &)=delete
ReadOnlySpace * read_only_space() const
Definition heap.h:738
V8_EXPORT_PRIVATE void CopyRange(Tagged< HeapObject > dst_object, TSlot dst_slot, TSlot src_slot, int len, WriteBarrierMode mode)
TrustedLargeObjectSpace * trusted_lo_space() const
Definition heap.h:743
GCFlags GCFlagsForIncrementalMarking()
Definition heap.h:1030
std::unique_ptr< MemoryAllocator > memory_allocator_
Definition heap.h:2271
std::shared_ptr< v8::TaskRunner > task_runner_
Definition heap.h:2285
Tagged< UnionOf< Smi, Undefined, AllocationSiteWithWeakNext > > allocation_sites_list()
Definition heap.h:471
base::TimeDelta total_gc_time_ms_
Definition heap.h:2259
size_t InitialSemiSpaceSize()
Definition heap.h:1234
CodeSpace * code_space() const
Definition heap.h:732
size_t global_allocation_limit() const
Definition heap.h:1928
std::optional< EmbedderStackStateOrigin > embedder_stack_state_origin_
Definition heap.h:2310
void set_dirty_js_finalization_registries_list(Tagged< Object > object)
Definition heap.h:475
MarkingState * marking_state()
Definition heap.h:1621
void set_force_gc_on_next_allocation()
Definition heap.h:2042
bool using_initial_limit() const
Definition heap.h:1932
const AllocationType allocation_type_for_in_place_internalizable_strings_
Definition heap.h:2366
void IncrementNodesCopiedInNewSpace()
Definition heap.h:1320
EphemeronRememberedSet * ephemeron_remembered_set()
Definition heap.h:362
bool force_oom() const
Definition heap.h:532
PagedSpace * shared_allocation_space() const
Definition heap.h:750
bool HighMemoryPressure()
Definition heap.h:655
uint64_t backing_store_bytes() const
Definition heap.h:625
std::unique_ptr< IsolateSafepoint > safepoint_
Definition heap.h:2357
MarkingState marking_state_
Definition heap.h:2391
Tagged< Object > native_contexts_list() const
Definition heap.h:461
const GCTracer * tracer() const
Definition heap.h:801
const MemoryAllocator * memory_allocator() const
Definition heap.h:804
std::unique_ptr< AllocationObserver > stress_concurrent_allocation_observer_
Definition heap.h:2279
bool IsLastResortGC()
Definition heap.h:1619
std::unique_ptr< MinorGCJob > minor_gc_job_
Definition heap.h:2278
bool is_current_gc_forced() const
Definition heap.h:1353
V8_INLINE bool CanSafepoint() const
Definition heap.h:643
NonAtomicMarkingState * non_atomic_marking_state()
Definition heap.h:1623
bool IsAllocationObserverActive() const
Definition heap.h:538
void UpdateOldGenerationAllocationCounter()
Definition heap.h:1334
void set_using_initial_limit(bool value)
Definition heap.h:1936
PretenuringHandler pretenuring_handler_
Definition heap.h:2394
void IncrementNodesPromoted()
Definition heap.h:1322
v8::CppHeap * cpp_heap() const
Definition heap.h:1112
bool is_finalization_registry_cleanup_task_posted()
Definition heap.h:905
SharedTrustedSpace * shared_trusted_space() const
Definition heap.h:740
GCTracer * tracer()
Definition heap.h:800
bool IsInlineAllocationEnabled() const
Definition heap.h:1629
bool IsInGC() const
Definition heap.h:526
bool MinorMSSizeTaskTriggerReached() const
size_t SurvivedYoungObjectSize()
Definition heap.h:1312
SweepingForcedFinalizationMode
Definition heap.h:1545
GCCallbacks gc_prologue_callbacks_
Definition heap.h:2241
HeapAllocator * allocator()
Definition heap.h:1640
PretenuringHandler * pretenuring_handler()
Definition heap.h:1627
GetExternallyAllocatedMemoryInBytesCallback external_memory_callback_
Definition heap.h:2244
bool minor_sweeping_in_progress() const
Definition heap.h:1536
V8_WARN_UNUSED_RESULT V8_INLINE Tagged< HeapObject > AllocateRawWith(int size, AllocationType allocation, AllocationOrigin origin=AllocationOrigin::kRuntime, AllocationAlignment alignment=kTaggedAligned)
Tagged< Object > dirty_js_finalization_registries_list_tail()
Definition heap.h:484
bool deserialization_complete() const
Definition heap.h:638
bool ShouldReduceMemory() const
Definition heap.h:1615
const Heap *const heap_
Definition heap.h:2600
PagedSpaceIterator(const Heap *heap)
Definition heap.h:2595
static constexpr Tagged< Smi > zero()
Definition smi.h:99
void deallocate(Address *p, size_t n) noexcept
Definition heap.h:2689
StrongRootAllocator(HeapOrIsolateT *heap_or_isolate)
Definition heap.h:2682
StrongRootAllocator(const StrongRootAllocator< U > &other) V8_NOEXCEPT
Definition heap.h:2685
StrongRootsEntry * next
Definition heap.h:171
StrongRootsEntry * prev
Definition heap.h:170
StrongRootsEntry(const char *label)
Definition heap.h:164
virtual ~WeakObjectRetainer()=default
virtual Tagged< Object > RetainAs(Tagged< Object > object)=0
NormalPageSpace * space_
Definition compactor.cc:324
int start
uint32_t count
int end
std::unique_ptr< SafepointScope > safepoint_scope_
std::unique_ptr< ObjectIterator > object_iterator_
Label label
#define ROOT_ACCESSOR(Type, name, CamelName)
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation trace turbo cfg trace TurboFan s graph trimmer trace TurboFan s control equivalence trace TurboFan s register allocator trace stack load store counters for optimized code in run fuzzing &&concurrent_recompilation trace_turbo trace_turbo_scheduled trace_turbo_stack_accesses verify TurboFan machine graph of code stubs enable FixedArray bounds checks print TurboFan statistics of wasm compilations maximum cumulative size of bytecode considered for inlining scale factor of bytecode size used to calculate the inlining budget * KB
Isolate * isolate
int32_t offset
std::string extension
TNode< Object > callback
ZoneVector< RpoNumber > & result
ZoneStack< RpoNumber > & stack
int pc_offset
NonAtomicMarkingState * marking_state_
size_t priority
const std::function< bool(int)> filter_
EmbedderStackState
Definition common.h:15
STL namespace.
void(*)(void *data) OutOfMemoryCallback
Definition heap.h:69
ClearRecordedSlots
Definition heap.h:137
GarbageCollectionReason
Definition globals.h:1428
ClearFreedMemoryMode
Definition heap.h:143
constexpr int kTaggedSize
Definition globals.h:542
Tagged(T object) -> Tagged< T >
kStaticElementsTemplateOffset kInstancePropertiesTemplateOffset Tagged< FixedArray >
@ TERMINAL_FAST_ELEMENTS_KIND
void * GetRandomMmapAddr()
void Print(Tagged< Object > obj)
Definition objects.h:774
constexpr int kSystemPointerSize
Definition globals.h:410
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available space
Definition flags.cc:2128
typename detail::FlattenUnionHelper< Union<>, Ts... >::type UnionOf
Definition union.h:123
@ FIRST_GROWABLE_PAGED_SPACE
Definition globals.h:1326
Tagged< T > ForwardingAddress(Tagged< T > heap_obj)
Definition heap-inl.h:48
V8_EXPORT_PRIVATE FlagValues v8_flags
ExternalBackingStoreType
Definition globals.h:1605
InvalidateRecordedSlots
Definition heap.h:139
InvalidateExternalPointerSlots
Definition heap.h:141
EmbedderStackStateOrigin
Definition heap.h:158
return value
Definition map-inl.h:893
use conservative stack scanning use direct handles with conservative stack scanning Treat some precise references as conservative references to stress test object pinning in Scavenger minor_gc_task Enables random stressing of object pinning in Scavenger
Definition flags.cc:502
constexpr size_t kExternalAllocationSoftLimit
Definition globals.h:670
std::unordered_map< Tagged< HeapObject >, T, Object::Hasher, Object::KeyEqualSafe > UnorderedHeapObjectMap
Definition heap.h:179
GCCallbackFlags
@ kNoGCCallbackFlags
TaskPriority
Definition v8-platform.h:24
size_t(*)(void *data, size_t current_heap_limit, size_t initial_heap_limit) NearHeapLimitCallback
MemoryPressureLevel
Definition v8-isolate.h:175
MeasureMemoryExecution
MeasureMemoryMode
#define MUTABLE_ROOT_LIST(V)
Definition roots.h:483
#define ROOT_LIST(V)
Definition roots.h:488
#define V8_NOEXCEPT
#define CHECK_GE(lhs, rhs)
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define V8_EXPORT_PRIVATE
Definition macros.h:460
#define DECL_RIGHT_TRIM(T)
Definition heap.h:2483
#define RIGHT_TRIMMABLE_ARRAY_LIST(V)
Definition heap.h:438
EphemeronRememberedSet * ephemeron_remembered_set_
Definition sweeper.cc:572
RootVisitor * visitor_
Heap * heap_
#define V8_INLINE
Definition v8config.h:500
#define V8_WARN_UNUSED_RESULT
Definition v8config.h:671
#define V8_NODISCARD
Definition v8config.h:693