v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
heap-allocator-inl.h
Go to the documentation of this file.
1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_HEAP_HEAP_ALLOCATOR_INL_H_
6#define V8_HEAP_HEAP_ALLOCATOR_INL_H_
7
9// Include the non-inl header before the rest of the headers.
10
11#include "src/base/logging.h"
13#include "src/common/globals.h"
15#include "src/heap/local-heap.h"
17#include "src/heap/new-spaces.h"
20#include "src/heap/zapping.h"
21
22namespace v8 {
23namespace internal {
24
26 return static_cast<PagedSpace*>(spaces_[CODE_SPACE]);
27}
28
32
36
40
42 return static_cast<NewSpace*>(spaces_[NEW_SPACE]);
43}
44
48
50 return static_cast<PagedSpace*>(spaces_[OLD_SPACE]);
51}
52
56
60
64
68
72
73template <AllocationType type>
75 int size_in_bytes, AllocationOrigin origin, AllocationAlignment alignment) {
76 DCHECK(!heap_->IsInGC());
77 DCHECK(AllowHandleAllocation::IsAllowed());
78 DCHECK(AllowHeapAllocation::IsAllowed());
81 // We need to have entered the isolate before allocating.
83#if V8_ENABLE_WEBASSEMBLY
84 if (!v8_flags.wasm_jitless) {
86 }
87#endif
88#if DEBUG
89 local_heap_->VerifyCurrent();
90#endif
91
92 if (v8_flags.single_generation.value() && type == AllocationType::kYoung) {
93 return AllocateRaw(size_in_bytes, AllocationType::kOld, origin, alignment);
94 }
95
96#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
97 if (V8_UNLIKELY(allocation_timeout_.has_value()) &&
100 }
101#endif // V8_ENABLE_ALLOCATION_TIMEOUT
102
103#ifdef DEBUG
104 IncrementObjectCounters();
105#endif // DEBUG
106
107 if (heap_->CanSafepoint()) {
109 }
110
111 const size_t large_object_threshold = heap_->MaxRegularHeapObjectSize(type);
112 const bool large_object =
113 static_cast<size_t>(size_in_bytes) > large_object_threshold;
114
115 Tagged<HeapObject> object;
116 AllocationResult allocation;
117
118 if (V8_UNLIKELY(large_object)) {
119 allocation =
120 AllocateRawLargeInternal(size_in_bytes, type, origin, alignment);
121 } else {
122 switch (type) {
124 allocation =
125 new_space_allocator_->AllocateRaw(size_in_bytes, alignment, origin);
126 break;
129 allocation =
130 old_space_allocator_->AllocateRaw(size_in_bytes, alignment, origin);
131 DCHECK_IMPLIES(v8_flags.sticky_mark_bits && !allocation.IsFailure(),
132 heap_->marking_state()->IsMarked(allocation.ToObject()));
133 break;
136 DCHECK(AllowCodeAllocation::IsAllowed());
137 allocation = code_space_allocator_->AllocateRaw(
138 size_in_bytes, AllocationAlignment::kTaggedAligned, origin);
139 break;
140 }
142 DCHECK(read_only_space()->writable());
144 allocation = read_only_space()->AllocateRaw(size_in_bytes, alignment);
145 break;
148 allocation = shared_space_allocator_->AllocateRaw(size_in_bytes,
149 alignment, origin);
150 break;
152 allocation = trusted_space_allocator_->AllocateRaw(size_in_bytes,
153 alignment, origin);
154 break;
156 allocation = shared_trusted_space_allocator_->AllocateRaw(
157 size_in_bytes, alignment, origin);
158 break;
159 }
160 }
161
162 if (allocation.To(&object)) {
164 heap::ZapCodeBlock(object.address(), size_in_bytes);
165 }
166
168 for (auto& tracker : heap_->allocation_trackers_) {
169 tracker->AllocationEvent(object.address(), size_in_bytes);
170 }
171 }
172 }
173
174 return allocation;
175}
176
178 AllocationType type,
179 AllocationOrigin origin,
180 AllocationAlignment alignment) {
181 switch (type) {
183 return AllocateRaw<AllocationType::kYoung>(size_in_bytes, origin,
184 alignment);
186 return AllocateRaw<AllocationType::kOld>(size_in_bytes, origin,
187 alignment);
189 return AllocateRaw<AllocationType::kCode>(size_in_bytes, origin,
190 alignment);
192 return AllocateRaw<AllocationType::kMap>(size_in_bytes, origin,
193 alignment);
195 return AllocateRaw<AllocationType::kReadOnly>(size_in_bytes, origin,
196 alignment);
198 return AllocateRaw<AllocationType::kSharedMap>(size_in_bytes, origin,
199 alignment);
201 return AllocateRaw<AllocationType::kSharedOld>(size_in_bytes, origin,
202 alignment);
204 return AllocateRaw<AllocationType::kTrusted>(size_in_bytes, origin,
205 alignment);
207 return AllocateRaw<AllocationType::kSharedTrusted>(size_in_bytes, origin,
208 alignment);
209 }
210 UNREACHABLE();
211}
212
213template <HeapAllocator::AllocationRetryMode mode>
216 AllocationOrigin origin,
217 AllocationAlignment alignment) {
219 Tagged<HeapObject> object;
221 if (allocation == AllocationType::kYoung) {
222 result = AllocateRaw<AllocationType::kYoung>(size, origin, alignment);
223 if (result.To(&object)) {
224 return object;
225 }
226 } else if (allocation == AllocationType::kOld) {
227 result = AllocateRaw<AllocationType::kOld>(size, origin, alignment);
228 if (result.To(&object)) {
229 return object;
230 }
231 }
232 switch (mode) {
233 case kLightRetry:
234 result = AllocateRawWithLightRetrySlowPath(size, allocation, origin,
235 alignment);
236 break;
237 case kRetryOrFail:
238 result = AllocateRawWithRetryOrFailSlowPath(size, allocation, origin,
239 alignment);
240 break;
241 }
242 if (result.To(&object)) {
243 return object;
244 }
245 return HeapObject();
246}
247
248template <typename AllocateFunction, typename RetryFunction>
250 AllocateFunction&& Allocate, RetryFunction&& RetryAllocate,
251 AllocationType allocation) {
252 if (auto result = Allocate(allocation)) [[likely]] {
253 return result;
254 }
255
256 // Two GCs before returning failure.
257 CollectGarbage(allocation);
258 if (auto result = RetryAllocate(allocation)) {
259 return result;
260 }
261 CollectGarbage(allocation);
262 return RetryAllocate(allocation);
263}
264
265template <typename AllocateFunction, typename RetryFunction>
267 AllocateFunction&& Allocate, RetryFunction&& RetryAllocate,
268 AllocationType allocation) {
269 if (auto result = AllocateRawWithLightRetrySlowPath(Allocate, RetryAllocate,
270 allocation)) {
271 return result;
272 }
273
274 CollectAllAvailableGarbage(allocation);
275 if (auto result = RetryAllocate(allocation)) {
276 return result;
277 }
278
279 V8::FatalProcessOutOfMemory(heap_->isolate(), "CALL_AND_RETRY_LAST",
281}
282
283template <typename Function>
285 Function&& Allocate, AllocationType allocation) {
286 return *AllocateRawWithRetryOrFailSlowPath(Allocate, Allocate, allocation);
287}
288
289} // namespace internal
290} // namespace v8
291
292#endif // V8_HEAP_HEAP_ALLOCATOR_INL_H_
static Isolate * TryGetCurrent()
Definition api.cc:9954
static AllocationResult Failure()
V8_WARN_UNUSED_RESULT auto AllocateRawWithLightRetrySlowPath(AllocateFunction &&Allocate, RetryFunction &&RetryAllocate, AllocationType allocation)
std::optional< MainAllocator > shared_space_allocator_
V8_INLINE PagedSpace * code_space() const
void CollectAllAvailableGarbage(AllocationType allocation)
V8_INLINE NewSpace * new_space() const
V8_WARN_UNUSED_RESULT V8_INLINE auto CustomAllocateWithRetryOrFail(Function &&Allocate, AllocationType allocation)
V8_WARN_UNUSED_RESULT AllocationResult AllocateRawLargeInternal(int size_in_bytes, AllocationType allocation, AllocationOrigin origin, AllocationAlignment alignment)
OldLargeObjectSpace * shared_lo_space_
V8_INLINE bool CanAllocateInReadOnlySpace() const
V8_INLINE PagedSpace * old_space() const
V8_INLINE ReadOnlySpace * read_only_space() const
Space * spaces_[LAST_SPACE+1]
std::optional< MainAllocator > trusted_space_allocator_
void CollectGarbage(AllocationType allocation)
V8_INLINE OldLargeObjectSpace * shared_lo_space() const
V8_INLINE CodeLargeObjectSpace * code_lo_space() const
V8_INLINE PagedSpace * trusted_space() const
V8_INLINE OldLargeObjectSpace * lo_space() const
V8_WARN_UNUSED_RESULT auto AllocateRawWithRetryOrFailSlowPath(AllocateFunction &&Allocate, RetryFunction &&RetryAllocate, AllocationType allocation)
V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult AllocateRaw(int size_in_bytes, AllocationType allocation, AllocationOrigin origin=AllocationOrigin::kRuntime, AllocationAlignment alignment=kTaggedAligned)
V8_INLINE NewLargeObjectSpace * new_lo_space() const
V8_INLINE OldLargeObjectSpace * shared_trusted_lo_space() const
std::optional< MainAllocator > old_space_allocator_
std::optional< MainAllocator > shared_trusted_space_allocator_
V8_INLINE OldLargeObjectSpace * trusted_lo_space() const
std::optional< MainAllocator > code_space_allocator_
SharedTrustedLargeObjectSpace * shared_trusted_lo_space_
std::optional< MainAllocator > new_space_allocator_
V8_WARN_UNUSED_RESULT V8_INLINE Tagged< HeapObject > AllocateRawWith(int size, AllocationType allocation, AllocationOrigin origin=AllocationOrigin::kRuntime, AllocationAlignment alignment=kTaggedAligned)
std::vector< HeapObjectAllocationTracker * > allocation_trackers_
Definition heap.h:2387
V8_EXPORT_PRIVATE int MaxRegularHeapObjectSize(AllocationType allocation)
Definition heap-inl.h:185
MarkingState * marking_state()
Definition heap.h:1621
V8_INLINE bool CanSafepoint() const
Definition heap.h:643
bool IsInGC() const
Definition heap.h:526
Isolate * isolate() const
Definition heap-inl.h:61
bool is_main_thread() const
Definition local-heap.h:194
V8_INLINE bool IsMarked(const Tagged< HeapObject > obj) const
static V8_EXPORT_PRIVATE bool IsAllowed()
V8_EXPORT_PRIVATE AllocationResult AllocateRaw(int size_in_bytes, AllocationAlignment alignment)
static V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(Isolate *isolate, const char *location, const OOMDetails &details=kNoOOMDetails)
static V8_EXPORT_PRIVATE const OOMDetails kHeapOOM
Definition v8.h:37
#define ALIGN_TO_ALLOCATION_ALIGNMENT(value)
Definition globals.h:1796
ZoneVector< RpoNumber > & result
void ZapCodeBlock(Address start, int size_in_bytes)
Definition zapping.cc:14
bool ShouldZapGarbage()
Definition zapping.h:18
V8_EXPORT_PRIVATE FlagValues v8_flags
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define V8_INLINE
Definition v8config.h:500
#define V8_WARN_UNUSED_RESULT
Definition v8config.h:671
#define V8_UNLIKELY(condition)
Definition v8config.h:660