v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
object-allocator.h
Go to the documentation of this file.
1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_HEAP_CPPGC_OBJECT_ALLOCATOR_H_
6#define V8_HEAP_CPPGC_OBJECT_ALLOCATOR_H_
7
8#include <optional>
9
12#include "src/base/logging.h"
20
21namespace cppgc {
22
23namespace internal {
24class ObjectAllocator;
25class PreFinalizerHandler;
26} // namespace internal
27
29 private:
30 AllocationHandle() = default;
32};
33
34namespace internal {
35
36class StatsCollector;
37class PageBackend;
38class GarbageCollector;
39
41 public:
42 using AlignVal = std::align_val_t;
43 static constexpr size_t kSmallestSpaceSize = 32;
44
47
48 inline void* AllocateObject(size_t size, GCInfoIndex gcinfo);
49 inline void* AllocateObject(size_t size, AlignVal alignment,
50 GCInfoIndex gcinfo);
51 inline void* AllocateObject(size_t size, GCInfoIndex gcinfo,
52 CustomSpaceIndex space_index);
53 inline void* AllocateObject(size_t size, AlignVal alignment,
54 GCInfoIndex gcinfo, CustomSpaceIndex space_index);
55
56 void ResetLinearAllocationBuffers();
57 void MarkAllPagesAsYoung();
58
59#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
60 void UpdateAllocationTimeout();
61 int get_allocation_timeout_for_testing() const {
62 return *allocation_timeout_;
63 }
64#endif // V8_ENABLE_ALLOCATION_TIMEOUT
65
66 private:
67 bool in_disallow_gc_scope() const;
68
69 // Returns the initially tried SpaceType to allocate an object of |size| bytes
70 // on. Returns the largest regular object size bucket for large objects.
71 inline static RawHeap::RegularSpaceType GetInitialSpaceIndexForSize(
72 size_t size);
73
74 inline void* AllocateObjectOnSpace(NormalPageSpace&, size_t, GCInfoIndex);
75 inline void* AllocateObjectOnSpace(NormalPageSpace&, size_t, AlignVal,
77 inline void* OutOfLineAllocate(NormalPageSpace&, size_t, AlignVal,
79
80 // Called from the fast path LAB allocation when the LAB capacity cannot fit
81 // the allocation or a large object is requested. Use out parameter as
82 // `V8_PRESERVE_MOST` cannot handle non-void return values.
83 //
84 // Prefer using `OutOfLineAllocate()`.
85 void V8_PRESERVE_MOST OutOfLineAllocateGCSafePoint(NormalPageSpace&, size_t,
87 void**);
88 // Raw allocation, does not emit safepoint for conservative GC.
89 void* OutOfLineAllocateImpl(NormalPageSpace&, size_t, AlignVal, GCInfoIndex);
90
91 bool TryRefillLinearAllocationBuffer(NormalPageSpace&, size_t);
92 bool TryRefillLinearAllocationBufferFromFreeList(NormalPageSpace&, size_t);
93 bool TryExpandAndRefillLinearAllocationBuffer(NormalPageSpace&);
94
95#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
96 void TriggerGCOnAllocationTimeoutIfNeeded();
97#endif // V8_ENABLE_ALLOCATION_TIMEOUT
98
105#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
106 // Specifies how many allocations should be performed until triggering a
107 // garbage collection.
108 std::optional<int> allocation_timeout_;
109#endif // V8_ENABLE_ALLOCATION_TIMEOUT
110};
111
114#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
115 TriggerGCOnAllocationTimeoutIfNeeded();
116#endif // V8_ENABLE_ALLOCATION_TIMEOUT
117 const size_t allocation_size =
119 const RawHeap::RegularSpaceType type =
120 GetInitialSpaceIndexForSize(allocation_size);
122 allocation_size, gcinfo);
123}
124
125void* ObjectAllocator::AllocateObject(size_t size, AlignVal alignment,
126 GCInfoIndex gcinfo) {
128#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
129 TriggerGCOnAllocationTimeoutIfNeeded();
130#endif // V8_ENABLE_ALLOCATION_TIMEOUT
131 const size_t allocation_size =
133 const RawHeap::RegularSpaceType type =
134 GetInitialSpaceIndexForSize(allocation_size);
136 allocation_size, alignment, gcinfo);
137}
138
140 CustomSpaceIndex space_index) {
142#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
143 TriggerGCOnAllocationTimeoutIfNeeded();
144#endif // V8_ENABLE_ALLOCATION_TIMEOUT
145 const size_t allocation_size =
149 allocation_size, gcinfo);
150}
151
152void* ObjectAllocator::AllocateObject(size_t size, AlignVal alignment,
153 GCInfoIndex gcinfo,
154 CustomSpaceIndex space_index) {
156#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
157 TriggerGCOnAllocationTimeoutIfNeeded();
158#endif // V8_ENABLE_ALLOCATION_TIMEOUT
159 const size_t allocation_size =
163 allocation_size, alignment, gcinfo);
164}
165
166// static
168 size_t size) {
169 static_assert(kSmallestSpaceSize == 32,
170 "should be half the next larger size");
171 if (size < 64) {
174 }
175 if (size < 128) return RawHeap::RegularSpaceType::kNormal3;
177}
178
180 AlignVal alignment,
181 GCInfoIndex gcinfo) {
182 void* object;
183 OutOfLineAllocateGCSafePoint(space, size, alignment, gcinfo, &object);
184 return object;
185}
186
188 size_t size, AlignVal alignment,
189 GCInfoIndex gcinfo) {
190 // The APIs are set up to support general alignment. Since we want to keep
191 // track of the actual usage there the alignment support currently only covers
192 // double-world alignment (8 bytes on 32bit and 16 bytes on 64bit
193 // architectures). This is enforced on the public API via static_asserts
194 // against alignof(T).
195 static_assert(2 * kAllocationGranularity ==
197 static_assert(kAllocationGranularity == sizeof(HeapObjectHeader));
198 static_assert(kAllocationGranularity ==
200 DCHECK_EQ(2 * sizeof(HeapObjectHeader), static_cast<size_t>(alignment));
201 constexpr size_t kAlignment = 2 * kAllocationGranularity;
202 constexpr size_t kAlignmentMask = kAlignment - 1;
203 constexpr size_t kPaddingSize = kAlignment - sizeof(HeapObjectHeader);
204
206 space.linear_allocation_buffer();
207 const size_t current_lab_size = current_lab.size();
208 // Case 1: The LAB fits the request and the LAB start is already properly
209 // aligned.
210 bool lab_allocation_will_succeed =
211 current_lab_size >= size &&
212 (reinterpret_cast<uintptr_t>(current_lab.start() +
213 sizeof(HeapObjectHeader)) &
214 kAlignmentMask) == 0;
215 // Case 2: The LAB fits an extended request to manually align the second
216 // allocation.
217 if (!lab_allocation_will_succeed &&
218 (current_lab_size >= (size + kPaddingSize))) {
219 void* filler_memory = current_lab.Allocate(kPaddingSize);
220 auto& filler = Filler::CreateAt(filler_memory, kPaddingSize);
223 .SetBit<AccessMode::kAtomic>(reinterpret_cast<ConstAddress>(&filler));
224 lab_allocation_will_succeed = true;
225 }
226 if (V8_UNLIKELY(!lab_allocation_will_succeed)) {
227 return OutOfLineAllocate(space, size, alignment, gcinfo);
228 }
229 void* object = AllocateObjectOnSpace(space, size, gcinfo);
230 DCHECK_NOT_NULL(object);
231 DCHECK_EQ(0u, reinterpret_cast<uintptr_t>(object) & kAlignmentMask);
232 return object;
233}
234
236 size_t size, GCInfoIndex gcinfo) {
237 DCHECK_LT(0u, gcinfo);
238
240 space.linear_allocation_buffer();
241 if (V8_UNLIKELY(current_lab.size() < size)) {
242 return OutOfLineAllocate(
243 space, size, static_cast<AlignVal>(kAllocationGranularity), gcinfo);
244 }
245
246 void* raw = current_lab.Allocate(size);
247#if !defined(V8_USE_MEMORY_SANITIZER) && !defined(V8_USE_ADDRESS_SANITIZER) && \
248 DEBUG
249 // For debug builds, unzap only the payload.
250 SetMemoryAccessible(static_cast<char*>(raw) + sizeof(HeapObjectHeader),
251 size - sizeof(HeapObjectHeader));
252#else
253 SetMemoryAccessible(raw, size);
254#endif
255 auto* header = new (raw) HeapObjectHeader(size, gcinfo);
256
257 // The marker needs to find the object start concurrently.
260 .SetBit<AccessMode::kAtomic>(reinterpret_cast<ConstAddress>(header));
261
262 return header->ObjectStart();
263}
264
265} // namespace internal
266} // namespace cppgc
267
268#endif // V8_HEAP_CPPGC_OBJECT_ALLOCATOR_H_
static BasePage * FromPayload(void *)
Definition heap-page.h:314
static Filler & CreateAt(void *memory, size_t size)
Definition free-list.h:76
static NormalPageSpace & From(BaseSpace &space)
Definition heap-space.h:93
static NormalPage * From(BasePage *page)
Definition heap-page.h:205
PlatformAwareObjectStartBitmap & object_start_bitmap()
Definition heap-page.h:241
void V8_PRESERVE_MOST OutOfLineAllocateGCSafePoint(NormalPageSpace &, size_t, AlignVal, GCInfoIndex, void **)
static constexpr size_t kSmallestSpaceSize
void * AllocateObjectOnSpace(NormalPageSpace &, size_t, GCInfoIndex)
static RawHeap::RegularSpaceType GetInitialSpaceIndexForSize(size_t size)
PreFinalizerHandler & prefinalizer_handler_
void * OutOfLineAllocate(NormalPageSpace &, size_t, AlignVal, GCInfoIndex)
FatalOutOfMemoryHandler & oom_handler_
void * AllocateObject(size_t size, GCInfoIndex gcinfo)
BaseSpace * CustomSpace(CustomSpaceIndex space_index)
Definition raw-heap.h:78
BaseSpace * Space(RegularSpaceType type)
Definition raw-heap.h:69
constexpr size_t kAllocationGranularity
static constexpr size_t kMaxSupportedAlignment
std::align_val_t AlignVal
Definition allocation.h:47
V8_INLINE void SetMemoryAccessible(void *address, size_t size)
Definition memory.h:72
const uint8_t * ConstAddress
Definition globals.h:18
constexpr size_t kAllocationGranularity
Definition globals.h:37
uint16_t GCInfoIndex
Definition gc-info.h:21
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
constexpr T RoundUp(T x, intptr_t m)
Definition macros.h:387
#define V8_EXPORT_PRIVATE
Definition macros.h:460
#define V8_EXPORT
Definition v8config.h:800
#define V8_UNLIKELY(condition)
Definition v8config.h:660
#define V8_PRESERVE_MOST
Definition v8config.h:598