20 : local_heap_(local_heap),
heap_(local_heap->
heap()) {}
114 return AllocateRaw(size, allocation, origin, alignment);
142 return AllocateRaw(size, allocation, origin, alignment);
172 AllocateRaw(size_in_bytes, allocation, origin, alignment);
195void HeapAllocator::VerifyLinearAllocationAreas()
const {
348void HeapAllocator::IncrementObjectCounters() {
355#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
357void HeapAllocator::InitializeOncePerProcess() {
358 SetAllocationGcInterval(
v8_flags.gc_interval);
362void HeapAllocator::SetAllocationGcInterval(
int allocation_gc_interval) {
363 allocation_gc_interval_.store(allocation_gc_interval,
364 std::memory_order_relaxed);
368std::atomic<int> HeapAllocator::allocation_gc_interval_{-1};
370void HeapAllocator::SetAllocationTimeout(
int allocation_timeout) {
371 if (allocation_timeout > 0) {
372 allocation_timeout_ = allocation_timeout;
374 allocation_timeout_.reset();
378void HeapAllocator::UpdateAllocationTimeout() {
379 if (
v8_flags.random_gc_interval > 0) {
386 constexpr int kFewAllocationsHeadroom = 6;
387 int timeout = std::max(kFewAllocationsHeadroom, new_timeout);
388 SetAllocationTimeout(timeout);
389 DCHECK(allocation_timeout_.has_value());
393 int timeout = allocation_gc_interval_.load(std::memory_order_relaxed);
394 SetAllocationTimeout(timeout);
398 DCHECK(allocation_timeout_.has_value());
404 allocation_timeout_ = std::max(0, allocation_timeout_.value() - 1);
405 return allocation_timeout_.value() <= 0;
V8_INLINE int NextInt() V8_WARN_UNUSED_RESULT
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT AllocationResult AllocateRaw(LocalHeap *local_heap, int object_size)
void MarkLinearAllocationAreasBlack()
V8_WARN_UNUSED_RESULT auto AllocateRawWithLightRetrySlowPath(AllocateFunction &&Allocate, RetryFunction &&RetryAllocate, AllocationType allocation)
void MakeLinearAllocationAreasIterable()
std::optional< MainAllocator > shared_space_allocator_
void CollectAllAvailableGarbage(AllocationType allocation)
void Setup(LinearAllocationArea *new_allocation_info=nullptr, LinearAllocationArea *old_allocation_info=nullptr)
V8_WARN_UNUSED_RESULT AllocationResult RetryAllocateRaw(int size_in_bytes, AllocationType allocation, AllocationOrigin origin, AllocationAlignment alignment)
V8_WARN_UNUSED_RESULT AllocationResult AllocateRawLargeInternal(int size_in_bytes, AllocationType allocation, AllocationOrigin origin, AllocationAlignment alignment)
OldLargeObjectSpace * shared_lo_space_
void AddAllocationObserver(AllocationObserver *observer, AllocationObserver *new_space_observer)
V8_INLINE ReadOnlySpace * read_only_space() const
Space * spaces_[LAST_SPACE+1]
void PublishPendingAllocations()
std::optional< MainAllocator > trusted_space_allocator_
void CollectGarbage(AllocationType allocation)
void UnmarkSharedLinearAllocationAreas()
void FreeLinearAllocationAreas()
void PauseAllocationObservers()
void MarkSharedLinearAllocationAreasBlack()
ReadOnlySpace * read_only_space_
HeapAllocator(LocalHeap *)
V8_INLINE OldLargeObjectSpace * shared_lo_space() const
void UnmarkLinearAllocationsArea()
void RemoveAllocationObserver(AllocationObserver *observer, AllocationObserver *new_space_observer)
V8_INLINE CodeLargeObjectSpace * code_lo_space() const
V8_INLINE OldLargeObjectSpace * lo_space() const
V8_WARN_UNUSED_RESULT auto AllocateRawWithRetryOrFailSlowPath(AllocateFunction &&Allocate, RetryFunction &&RetryAllocate, AllocationType allocation)
V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult AllocateRaw(int size_in_bytes, AllocationType allocation, AllocationOrigin origin=AllocationOrigin::kRuntime, AllocationAlignment alignment=kTaggedAligned)
V8_INLINE NewLargeObjectSpace * new_lo_space() const
V8_INLINE OldLargeObjectSpace * shared_trusted_lo_space() const
void FreeLinearAllocationAreasAndResetFreeLists()
void FreeSharedLinearAllocationAreasAndResetFreeLists()
bool ReachedAllocationTimeout()
void ResumeAllocationObservers()
std::optional< MainAllocator > old_space_allocator_
std::optional< MainAllocator > shared_trusted_space_allocator_
void SetReadOnlySpace(ReadOnlySpace *)
V8_INLINE OldLargeObjectSpace * trusted_lo_space() const
std::optional< MainAllocator > code_space_allocator_
SharedTrustedLargeObjectSpace * shared_trusted_lo_space_
std::optional< MainAllocator > new_space_allocator_
NewSpace * new_space() const
V8_EXPORT_PRIVATE void CollectAllAvailableGarbage(GarbageCollectionReason gc_reason)
LocalHeap * main_thread_local_heap()
OldSpace * old_space() const
OldLargeObjectSpace * shared_lo_allocation_space() const
bool always_allocate() const
TrustedSpace * trusted_space() const
V8_EXPORT_PRIVATE bool CollectGarbageFromAnyThread(LocalHeap *local_heap, GarbageCollectionReason gc_reason=GarbageCollectionReason::kBackgroundAllocationFailure)
StickySpace * sticky_space() const
V8_EXPORT_PRIVATE bool CollectGarbageShared(LocalHeap *local_heap, GarbageCollectionReason gc_reason)
SharedTrustedSpace * shared_trusted_allocation_space() const
SharedTrustedLargeObjectSpace * shared_trusted_lo_allocation_space() const
V8_EXPORT_PRIVATE int MaxRegularHeapObjectSize(AllocationType allocation)
CodeSpace * code_space() const
PagedSpace * shared_allocation_space() const
Space * space(int idx) const
V8_EXPORT_PRIVATE void CollectGarbage(AllocationSpace space, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Isolate * isolate() const
bool has_shared_space() const
base::RandomNumberGenerator * fuzzer_rng()
void ResetPendingObject()
void RemoveAllocationObserver(AllocationObserver *observer)
void AddAllocationObserver(AllocationObserver *observer)
bool is_main_thread() const
void SetRetryOfFailedAllocation(bool value)
bool IsRetryOfFailedAllocation() const
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT AllocationResult AllocateRaw(LocalHeap *local_heap, int object_size)
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT AllocationResult AllocateRaw(LocalHeap *local_heap, int object_size)
ZoneVector< RpoNumber > & result
constexpr bool IsSharedAllocationType(AllocationType kind)
V8_EXPORT_PRIVATE FlagValues v8_flags
#define DCHECK(condition)
#define DCHECK_GT(v1, v2)