v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
heap-allocator.cc
Go to the documentation of this file.
1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include "src/base/logging.h"
11#include "src/heap/heap-inl.h"
13
14namespace v8 {
15namespace internal {
16
17class Heap;
18
20 : local_heap_(local_heap), heap_(local_heap->heap()) {}
21
59
63
65 int size_in_bytes, AllocationType allocation, AllocationOrigin origin,
66 AllocationAlignment alignment) {
67 DCHECK_GT(size_in_bytes, heap_->MaxRegularHeapObjectSize(allocation));
68 switch (allocation) {
70 return new_lo_space()->AllocateRaw(local_heap_, size_in_bytes);
72 return lo_space()->AllocateRaw(local_heap_, size_in_bytes);
74 return code_lo_space()->AllocateRaw(local_heap_, size_in_bytes);
76 return shared_lo_space()->AllocateRaw(local_heap_, size_in_bytes);
78 return trusted_lo_space()->AllocateRaw(local_heap_, size_in_bytes);
80 return shared_trusted_lo_space()->AllocateRaw(local_heap_, size_in_bytes);
85 }
86}
87
88namespace {
89
90constexpr AllocationSpace AllocationTypeToGCSpace(AllocationType type) {
91 switch (type) {
93 return NEW_SPACE;
98 // OLD_SPACE indicates full GC.
99 return OLD_SPACE;
104 UNREACHABLE();
105 }
106}
107
108} // namespace
109
111 int size, AllocationType allocation, AllocationOrigin origin,
112 AllocationAlignment alignment) {
113 auto Allocate = [&](AllocationType allocation) {
114 return AllocateRaw(size, allocation, origin, alignment);
115 };
116 auto RetryAllocate = [&](AllocationType allocation) {
117 return RetryAllocateRaw(size, allocation, origin, alignment);
118 };
119
120 return AllocateRawWithLightRetrySlowPath(Allocate, RetryAllocate, allocation);
121}
122
124 if (IsSharedAllocationType(allocation)) {
127 } else if (local_heap_->is_main_thread()) {
128 // On the main thread we can directly start the GC.
129 AllocationSpace space_to_gc = AllocationTypeToGCSpace(allocation);
130 heap_->CollectGarbage(space_to_gc,
132 } else {
133 // Request GC from main thread.
135 }
136}
137
139 int size, AllocationType allocation, AllocationOrigin origin,
140 AllocationAlignment alignment) {
141 auto Allocate = [&](AllocationType allocation) {
142 return AllocateRaw(size, allocation, origin, alignment);
143 };
144 auto RetryAllocate = [&](AllocationType allocation) {
145 return RetryAllocateRaw(size, allocation, origin, alignment);
146 };
147 return AllocateRawWithRetryOrFailSlowPath(Allocate, RetryAllocate,
148 allocation);
149}
150
152 if (IsSharedAllocationType(allocation)) {
155 } else if (local_heap_->is_main_thread()) {
156 // On the main thread we can directly start the GC.
158 } else {
159 // Request GC from main thread.
161 }
162}
163
165 int size_in_bytes, AllocationType allocation, AllocationOrigin origin,
166 AllocationAlignment alignment) {
167 // Initially flags on the LocalHeap are always disabled. They are only
168 // active while this method is running.
172 AllocateRaw(size_in_bytes, allocation, origin, alignment);
174 return result;
175}
176
179 new_space_allocator_->MakeLinearAllocationAreaIterable();
180 }
181 old_space_allocator_->MakeLinearAllocationAreaIterable();
182 trusted_space_allocator_->MakeLinearAllocationAreaIterable();
183 code_space_allocator_->MakeLinearAllocationAreaIterable();
184
186 shared_space_allocator_->MakeLinearAllocationAreaIterable();
187 }
188
190 shared_trusted_space_allocator_->MakeLinearAllocationAreaIterable();
191 }
192}
193
194#if DEBUG
195void HeapAllocator::VerifyLinearAllocationAreas() const {
197 new_space_allocator_->Verify();
198 }
199 old_space_allocator_->Verify();
200 trusted_space_allocator_->Verify();
201 code_space_allocator_->Verify();
202
204 shared_space_allocator_->Verify();
205 }
206
209 }
210}
211#endif // DEBUG
212
214 DCHECK(!v8_flags.black_allocated_pages);
215 old_space_allocator_->MarkLinearAllocationAreaBlack();
216 trusted_space_allocator_->MarkLinearAllocationAreaBlack();
217 code_space_allocator_->MarkLinearAllocationAreaBlack();
218}
219
221 DCHECK(!v8_flags.black_allocated_pages);
222 old_space_allocator_->UnmarkLinearAllocationArea();
223 trusted_space_allocator_->UnmarkLinearAllocationArea();
224 code_space_allocator_->UnmarkLinearAllocationArea();
225}
226
228 DCHECK(!v8_flags.black_allocated_pages);
230 shared_space_allocator_->MarkLinearAllocationAreaBlack();
231 }
233 shared_trusted_space_allocator_->MarkLinearAllocationAreaBlack();
234 }
235}
236
238 DCHECK(!v8_flags.black_allocated_pages);
240 shared_space_allocator_->UnmarkLinearAllocationArea();
241 }
243 shared_trusted_space_allocator_->UnmarkLinearAllocationArea();
244 }
245}
246
248 DCHECK(v8_flags.black_allocated_pages);
249 old_space_allocator_->FreeLinearAllocationAreaAndResetFreeList();
250 trusted_space_allocator_->FreeLinearAllocationAreaAndResetFreeList();
251 code_space_allocator_->FreeLinearAllocationAreaAndResetFreeList();
252}
253
255 DCHECK(v8_flags.black_allocated_pages);
257 shared_space_allocator_->FreeLinearAllocationAreaAndResetFreeList();
258 }
260 shared_trusted_space_allocator_->FreeLinearAllocationAreaAndResetFreeList();
261 }
262}
263
266 new_space_allocator_->FreeLinearAllocationArea();
267 }
268 old_space_allocator_->FreeLinearAllocationArea();
269 trusted_space_allocator_->FreeLinearAllocationArea();
270 code_space_allocator_->FreeLinearAllocationArea();
271
273 shared_space_allocator_->FreeLinearAllocationArea();
274 }
275
277 shared_trusted_space_allocator_->FreeLinearAllocationArea();
278 }
279}
280
283 new_space_allocator_->MoveOriginalTopForward();
284 }
285
286 old_space_allocator_->MoveOriginalTopForward();
287 trusted_space_allocator_->MoveOriginalTopForward();
288 code_space_allocator_->MoveOriginalTopForward();
289
294}
295
297 AllocationObserver* observer, AllocationObserver* new_space_observer) {
299 new_space_allocator_->AddAllocationObserver(new_space_observer);
300 }
301 if (new_lo_space()) {
302 new_lo_space()->AddAllocationObserver(new_space_observer);
303 }
304 old_space_allocator_->AddAllocationObserver(observer);
305 lo_space()->AddAllocationObserver(observer);
306 trusted_space_allocator_->AddAllocationObserver(observer);
308 code_space_allocator_->AddAllocationObserver(observer);
310}
311
313 AllocationObserver* observer, AllocationObserver* new_space_observer) {
315 new_space_allocator_->RemoveAllocationObserver(new_space_observer);
316 }
317 if (new_lo_space()) {
318 new_lo_space()->RemoveAllocationObserver(new_space_observer);
319 }
320 old_space_allocator_->RemoveAllocationObserver(observer);
322 trusted_space_allocator_->RemoveAllocationObserver(observer);
324 code_space_allocator_->RemoveAllocationObserver(observer);
326}
327
330 new_space_allocator_->PauseAllocationObservers();
331 }
332 old_space_allocator_->PauseAllocationObservers();
333 trusted_space_allocator_->PauseAllocationObservers();
334 code_space_allocator_->PauseAllocationObservers();
335}
336
339 new_space_allocator_->ResumeAllocationObservers();
340 }
341 old_space_allocator_->ResumeAllocationObservers();
342 trusted_space_allocator_->ResumeAllocationObservers();
343 code_space_allocator_->ResumeAllocationObservers();
344}
345
346#ifdef DEBUG
347
348void HeapAllocator::IncrementObjectCounters() {
349 heap_->isolate()->counters()->objs_since_last_full()->Increment();
350 heap_->isolate()->counters()->objs_since_last_young()->Increment();
351}
352
353#endif // DEBUG
354
355#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
356// static
357void HeapAllocator::InitializeOncePerProcess() {
358 SetAllocationGcInterval(v8_flags.gc_interval);
359}
360
361// static
362void HeapAllocator::SetAllocationGcInterval(int allocation_gc_interval) {
363 allocation_gc_interval_.store(allocation_gc_interval,
364 std::memory_order_relaxed);
365}
366
367// static
368std::atomic<int> HeapAllocator::allocation_gc_interval_{-1};
369
370void HeapAllocator::SetAllocationTimeout(int allocation_timeout) {
371 if (allocation_timeout > 0) {
372 allocation_timeout_ = allocation_timeout;
373 } else {
374 allocation_timeout_.reset();
375 }
376}
377
378void HeapAllocator::UpdateAllocationTimeout() {
379 if (v8_flags.random_gc_interval > 0) {
380 const int new_timeout = heap_->isolate()->fuzzer_rng()->NextInt(
381 v8_flags.random_gc_interval + 1);
382 // Reset the allocation timeout, but make sure to allow at least a few
383 // allocations after a collection. The reason for this is that we have a lot
384 // of allocation sequences and we assume that a garbage collection will
385 // allow the subsequent allocation attempts to go through.
386 constexpr int kFewAllocationsHeadroom = 6;
387 int timeout = std::max(kFewAllocationsHeadroom, new_timeout);
388 SetAllocationTimeout(timeout);
389 DCHECK(allocation_timeout_.has_value());
390 return;
391 }
392
393 int timeout = allocation_gc_interval_.load(std::memory_order_relaxed);
394 SetAllocationTimeout(timeout);
395}
396
398 DCHECK(allocation_timeout_.has_value());
399
401 return false;
402 }
403
404 allocation_timeout_ = std::max(0, allocation_timeout_.value() - 1);
405 return allocation_timeout_.value() <= 0;
406}
407
408#endif // V8_ENABLE_ALLOCATION_TIMEOUT
409
410} // namespace internal
411} // namespace v8
V8_INLINE int NextInt() V8_WARN_UNUSED_RESULT
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT AllocationResult AllocateRaw(LocalHeap *local_heap, int object_size)
V8_WARN_UNUSED_RESULT auto AllocateRawWithLightRetrySlowPath(AllocateFunction &&Allocate, RetryFunction &&RetryAllocate, AllocationType allocation)
std::optional< MainAllocator > shared_space_allocator_
void CollectAllAvailableGarbage(AllocationType allocation)
void Setup(LinearAllocationArea *new_allocation_info=nullptr, LinearAllocationArea *old_allocation_info=nullptr)
V8_WARN_UNUSED_RESULT AllocationResult RetryAllocateRaw(int size_in_bytes, AllocationType allocation, AllocationOrigin origin, AllocationAlignment alignment)
V8_WARN_UNUSED_RESULT AllocationResult AllocateRawLargeInternal(int size_in_bytes, AllocationType allocation, AllocationOrigin origin, AllocationAlignment alignment)
OldLargeObjectSpace * shared_lo_space_
void AddAllocationObserver(AllocationObserver *observer, AllocationObserver *new_space_observer)
V8_INLINE ReadOnlySpace * read_only_space() const
Space * spaces_[LAST_SPACE+1]
std::optional< MainAllocator > trusted_space_allocator_
void CollectGarbage(AllocationType allocation)
V8_INLINE OldLargeObjectSpace * shared_lo_space() const
void RemoveAllocationObserver(AllocationObserver *observer, AllocationObserver *new_space_observer)
V8_INLINE CodeLargeObjectSpace * code_lo_space() const
V8_INLINE OldLargeObjectSpace * lo_space() const
V8_WARN_UNUSED_RESULT auto AllocateRawWithRetryOrFailSlowPath(AllocateFunction &&Allocate, RetryFunction &&RetryAllocate, AllocationType allocation)
V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult AllocateRaw(int size_in_bytes, AllocationType allocation, AllocationOrigin origin=AllocationOrigin::kRuntime, AllocationAlignment alignment=kTaggedAligned)
V8_INLINE NewLargeObjectSpace * new_lo_space() const
V8_INLINE OldLargeObjectSpace * shared_trusted_lo_space() const
void FreeSharedLinearAllocationAreasAndResetFreeLists()
std::optional< MainAllocator > old_space_allocator_
std::optional< MainAllocator > shared_trusted_space_allocator_
void SetReadOnlySpace(ReadOnlySpace *)
V8_INLINE OldLargeObjectSpace * trusted_lo_space() const
std::optional< MainAllocator > code_space_allocator_
SharedTrustedLargeObjectSpace * shared_trusted_lo_space_
std::optional< MainAllocator > new_space_allocator_
NewSpace * new_space() const
Definition heap.h:727
V8_EXPORT_PRIVATE void CollectAllAvailableGarbage(GarbageCollectionReason gc_reason)
Definition heap.cc:1327
LocalHeap * main_thread_local_heap()
Definition heap.h:842
OldSpace * old_space() const
Definition heap.h:730
OldLargeObjectSpace * shared_lo_allocation_space() const
Definition heap.h:753
bool always_allocate() const
Definition heap.h:1957
TrustedSpace * trusted_space() const
Definition heap.h:739
V8_EXPORT_PRIVATE bool CollectGarbageFromAnyThread(LocalHeap *local_heap, GarbageCollectionReason gc_reason=GarbageCollectionReason::kBackgroundAllocationFailure)
Definition heap.cc:2372
StickySpace * sticky_space() const
Definition heap-inl.h:443
V8_EXPORT_PRIVATE bool CollectGarbageShared(LocalHeap *local_heap, GarbageCollectionReason gc_reason)
Definition heap.cc:2358
SharedTrustedSpace * shared_trusted_allocation_space() const
Definition heap.h:756
SharedTrustedLargeObjectSpace * shared_trusted_lo_allocation_space() const
Definition heap.h:759
V8_EXPORT_PRIVATE int MaxRegularHeapObjectSize(AllocationType allocation)
Definition heap-inl.h:185
CodeSpace * code_space() const
Definition heap.h:732
PagedSpace * shared_allocation_space() const
Definition heap.h:750
Space * space(int idx) const
Definition heap-inl.h:154
V8_EXPORT_PRIVATE void CollectGarbage(AllocationSpace space, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition heap.cc:1552
Isolate * isolate() const
Definition heap-inl.h:61
Counters * counters()
Definition isolate.h:1180
bool has_shared_space() const
Definition isolate.h:2303
base::RandomNumberGenerator * fuzzer_rng()
Definition isolate.cc:6330
void RemoveAllocationObserver(AllocationObserver *observer)
void AddAllocationObserver(AllocationObserver *observer)
bool is_main_thread() const
Definition local-heap.h:194
void SetRetryOfFailedAllocation(bool value)
Definition local-heap.h:120
bool IsRetryOfFailedAllocation() const
Definition local-heap.h:118
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT AllocationResult AllocateRaw(LocalHeap *local_heap, int object_size)
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT AllocationResult AllocateRaw(LocalHeap *local_heap, int object_size)
ZoneVector< RpoNumber > & result
constexpr bool IsSharedAllocationType(AllocationType kind)
Definition globals.h:1543
V8_EXPORT_PRIVATE FlagValues v8_flags
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_GT(v1, v2)
Definition logging.h:487
Heap * heap_