v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
heap-inl.h
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_HEAP_HEAP_INL_H_
6#define V8_HEAP_HEAP_INL_H_
7
8#include "src/heap/heap.h"
9// Include the non-inl header before the rest of the headers.
10
11#include <atomic>
12#include <optional>
13
14// Clients of this interface shouldn't depend on lots of heap internals.
15// Avoid including anything but `heap.h` from `src/heap` where possible.
33#include "src/heap/safepoint.h"
34#include "src/heap/spaces-inl.h"
41#include "src/utils/ostreams.h"
43
44namespace v8 {
45namespace internal {
46
47template <typename T>
49 MapWord map_word = Cast<HeapObject>(heap_obj)->map_word(kRelaxedLoad);
50
51 if (map_word.IsForwardingAddress()) {
52 return Cast<T>(map_word.ToForwardingAddress(heap_obj));
53 } else if (Heap::InFromPage(heap_obj)) {
54 DCHECK(!v8_flags.minor_ms);
55 return Tagged<T>();
56 } else {
57 return heap_obj;
58 }
59}
60
61Isolate* Heap::isolate() const { return Isolate::FromHeap(this); }
62
63bool Heap::IsMainThread() const {
64 return isolate()->thread_id() == ThreadId::Current();
65}
66
67uint64_t Heap::external_memory() const { return external_memory_.total(); }
68
69RootsTable& Heap::roots_table() { return isolate()->roots_table(); }
70
71#define ROOT_ACCESSOR(Type, name, CamelName) \
72 Tagged<Type> Heap::name() { \
73 return Cast<Type>(Tagged<Object>(roots_table()[RootIndex::k##CamelName])); \
74 }
76#undef ROOT_ACCESSOR
77
78Tagged<FixedArray> Heap::single_character_string_table() {
79 return Cast<FixedArray>(
80 Tagged<Object>(roots_table()[RootIndex::kSingleCharacterStringTable]));
81}
82
83#define STATIC_ROOTS_FAILED_MSG \
84 "Read-only heap layout changed. Run `tools/dev/gen-static-roots.py` to " \
85 "update static-roots.h."
86#if V8_STATIC_ROOTS_BOOL
87// Check all read only roots are allocated where we expect it. Skip `Exception`
88// which changes during setup-heap-internal.
89#define DCHECK_STATIC_ROOT(obj, name) \
90 if constexpr (RootsTable::IsReadOnly(RootIndex::k##name) && \
91 RootIndex::k##name != RootIndex::kException) { \
92 DCHECK_WITH_MSG(V8HeapCompressionScheme::CompressObject(obj.ptr()) == \
93 StaticReadOnlyRootsPointerTable[static_cast<size_t>( \
94 RootIndex::k##name)], \
95 STATIC_ROOTS_FAILED_MSG); \
96 }
97#else
98#define DCHECK_STATIC_ROOT(obj, name)
99#endif
100
101#define ROOT_ACCESSOR(type, name, CamelName) \
102 void Heap::set_##name(Tagged<type> value) { \
103 /* The deserializer makes use of the fact that these common roots are */ \
104 /* never in new space and never on a page that is being compacted. */ \
105 DCHECK_IMPLIES(deserialization_complete(), \
106 !RootsTable::IsImmortalImmovable(RootIndex::k##CamelName)); \
107 if constexpr (RootsTable::IsImmortalImmovable(RootIndex::k##CamelName)) { \
108 /* Cast via object to avoid compile errors when trying to cast a Smi */ \
109 /* to HeapObject (these Smis will anyway be excluded by */ \
110 /* RootsTable::IsImmortalImmovable but this isn't enough for the*/ \
111 /* compiler, even with `if constexpr`)*/ \
112 DCHECK(IsImmovable(Cast<HeapObject>(Cast<Object>(value)))); \
113 } \
114 DCHECK_STATIC_ROOT(value, CamelName); \
115 roots_table()[RootIndex::k##CamelName] = value.ptr(); \
116 }
118#undef ROOT_ACCESSOR
119#undef CHECK_STATIC_ROOT
120#undef STATIC_ROOTS_FAILED_MSG
121
122void Heap::SetRootMaterializedObjects(Tagged<FixedArray> objects) {
123 roots_table()[RootIndex::kMaterializedObjects] = objects.ptr();
124}
125
126void Heap::SetRootScriptList(Tagged<Object> value) {
127 roots_table()[RootIndex::kScriptList] = value.ptr();
128}
129
130void Heap::SetMessageListeners(Tagged<ArrayList> value) {
131 roots_table()[RootIndex::kMessageListeners] = value.ptr();
132}
133
134void Heap::SetFunctionsMarkedForManualOptimization(Tagged<Object> hash_table) {
135 DCHECK(IsObjectHashTable(hash_table) || IsUndefined(hash_table, isolate()));
136 roots_table()[RootIndex::kFunctionsMarkedForManualOptimization] =
137 hash_table.ptr();
138}
139
140#if V8_ENABLE_WEBASSEMBLY
141void Heap::SetWasmCanonicalRttsAndJSToWasmWrappers(
142 Tagged<WeakFixedArray> rtts, Tagged<WeakFixedArray> js_to_wasm_wrappers) {
143 set_wasm_canonical_rtts(rtts);
144 set_js_to_wasm_wrappers(js_to_wasm_wrappers);
145}
146#endif // V8_ENABLE_WEBASSEMBLY
147
148PagedSpace* Heap::paged_space(int idx) const {
149 DCHECK(idx == OLD_SPACE || idx == CODE_SPACE || idx == SHARED_SPACE ||
150 idx == TRUSTED_SPACE || idx == SHARED_TRUSTED_SPACE);
151 return static_cast<PagedSpace*>(space_[idx].get());
152}
153
154Space* Heap::space(int idx) const { return space_[idx].get(); }
155
156Address* Heap::NewSpaceAllocationTopAddress() {
157 return new_space_ || v8_flags.sticky_mark_bits
158 ? isolate()->isolate_data()->new_allocation_info_.top_address()
159 : nullptr;
160}
161
162Address* Heap::NewSpaceAllocationLimitAddress() {
163 return new_space_ || v8_flags.sticky_mark_bits
164 ? isolate()->isolate_data()->new_allocation_info_.limit_address()
165 : nullptr;
166}
167
168Address* Heap::OldSpaceAllocationTopAddress() {
169 return allocator()->old_space_allocator()->allocation_top_address();
170}
171
172Address* Heap::OldSpaceAllocationLimitAddress() {
173 return allocator()->old_space_allocator()->allocation_limit_address();
174}
175
176inline const base::AddressRegion& Heap::code_region() {
177 static constexpr base::AddressRegion kEmptyRegion;
178 return code_range_ ? code_range_->reservation()->region() : kEmptyRegion;
179}
180
181Address Heap::code_range_base() {
182 return code_range_ ? code_range_->base() : kNullAddress;
183}
184
185int Heap::MaxRegularHeapObjectSize(AllocationType allocation) {
186 if (allocation == AllocationType::kCode) {
190 }
192}
193
194AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
195 AllocationOrigin origin,
196 AllocationAlignment alignment) {
197 return heap_allocator_->AllocateRaw(size_in_bytes, type, origin, alignment);
198}
199
200Address Heap::AllocateRawOrFail(int size, AllocationType allocation,
201 AllocationOrigin origin,
202 AllocationAlignment alignment) {
203 return heap_allocator_
204 ->AllocateRawWith<HeapAllocator::kRetryOrFail>(size, allocation, origin,
205 alignment)
206 .address();
207}
208
209void Heap::RegisterExternalString(Tagged<String> string) {
210 DCHECK(IsExternalString(string));
211 DCHECK(!IsThinString(string));
213}
214
215void Heap::FinalizeExternalString(Tagged<String> string) {
216 DCHECK(IsExternalString(string));
217 Tagged<ExternalString> ext_string = Cast<ExternalString>(string);
219 page->DecrementExternalBackingStoreBytes(
221 ext_string->ExternalPayloadSize());
222 ext_string->DisposeResource(isolate());
223}
224
225Address Heap::NewSpaceTop() {
226 return new_space_ || v8_flags.sticky_mark_bits
227 ? allocator()->new_space_allocator()->top()
228 : kNullAddress;
229}
230
231Address Heap::NewSpaceLimit() {
232 return new_space_ || v8_flags.sticky_mark_bits
233 ? allocator()->new_space_allocator()->limit()
234 : kNullAddress;
235}
236
237// static
238bool Heap::InFromPage(Tagged<Object> object) {
240 return IsHeapObject(object) && InFromPage(Cast<HeapObject>(object));
241}
242
243// static
244bool Heap::InFromPage(Tagged<MaybeObject> object) {
245 Tagged<HeapObject> heap_object;
246 return object.GetHeapObject(&heap_object) && InFromPage(heap_object);
247}
248
249// static
250bool Heap::InFromPage(Tagged<HeapObject> heap_object) {
251 return MemoryChunk::FromHeapObject(heap_object)->IsFromPage();
252}
253
254// static
255bool Heap::InToPage(Tagged<Object> object) {
257 return IsHeapObject(object) && InToPage(Cast<HeapObject>(object));
258}
259
260// static
261bool Heap::InToPage(Tagged<MaybeObject> object) {
262 Tagged<HeapObject> heap_object;
263 return object.GetHeapObject(&heap_object) && InToPage(heap_object);
264}
265
266// static
267bool Heap::InToPage(Tagged<HeapObject> heap_object) {
268 return MemoryChunk::FromHeapObject(heap_object)->IsToPage();
269}
270
271bool Heap::InOldSpace(Tagged<Object> object) {
272 return old_space_->Contains(object) &&
273 (!v8_flags.sticky_mark_bits || !HeapLayout::InYoungGeneration(object));
274}
275
276// static
277Heap* Heap::FromWritableHeapObject(Tagged<HeapObject> obj) {
279 // RO_SPACE can be shared between heaps, so we can't use RO_SPACE objects to
280 // find a heap. The exception is when the ReadOnlySpace is writeable, during
281 // bootstrapping, so explicitly allow this case.
282 SLOW_DCHECK(chunk->IsWritable());
283 Heap* heap = chunk->heap();
284 SLOW_DCHECK(heap != nullptr);
285 return heap;
286}
287
288void Heap::CopyBlock(Address dst, Address src, int byte_size) {
289 DCHECK(IsAligned(byte_size, kTaggedSize));
290 CopyTagged(dst, src, static_cast<size_t>(byte_size / kTaggedSize));
291}
292
293bool Heap::IsPendingAllocationInternal(Tagged<HeapObject> object) {
295
297 if (chunk->InReadOnlySpace()) return false;
298
299 BaseSpace* base_space = chunk->Metadata()->owner();
300 Address addr = object.address();
301
302 switch (base_space->identity()) {
303 case NEW_SPACE: {
304 return allocator()->new_space_allocator()->IsPendingAllocation(addr);
305 }
306
307 case OLD_SPACE: {
308 return allocator()->old_space_allocator()->IsPendingAllocation(addr);
309 }
310
311 case CODE_SPACE: {
312 return allocator()->code_space_allocator()->IsPendingAllocation(addr);
313 }
314
315 case TRUSTED_SPACE: {
316 return allocator()->trusted_space_allocator()->IsPendingAllocation(addr);
317 }
318
319 case LO_SPACE:
320 case CODE_LO_SPACE:
321 case TRUSTED_LO_SPACE:
322 case NEW_LO_SPACE: {
323 LargeObjectSpace* large_space =
324 static_cast<LargeObjectSpace*>(base_space);
325 base::MutexGuard guard(large_space->pending_allocation_mutex());
326 return addr == large_space->pending_object();
327 }
328
329 case SHARED_SPACE:
330 case SHARED_LO_SPACE:
333 // TODO(v8:13267): Ensure that all shared space objects have a memory
334 // barrier after initialization.
335 return false;
336
337 case RO_SPACE:
338 UNREACHABLE();
339 }
340
341 UNREACHABLE();
342}
343
344bool Heap::IsPendingAllocation(Tagged<HeapObject> object) {
345 bool result = IsPendingAllocationInternal(object);
346 if (v8_flags.trace_pending_allocations && result) {
347 StdoutStream{} << "Pending allocation: " << std::hex << "0x" << object.ptr()
348 << "\n";
349 }
350 return result;
351}
352
353bool Heap::IsPendingAllocation(Tagged<Object> object) {
354 return IsHeapObject(object) && IsPendingAllocation(Cast<HeapObject>(object));
355}
356
358 std::optional<base::MutexGuard> guard;
359
360 // With --shared-string-table client isolates may insert into the main
361 // isolate's table concurrently.
362 if (v8_flags.shared_string_table &&
364 guard.emplace(&mutex_);
365 }
366
367 DCHECK(IsExternalString(string));
368 DCHECK(!Contains(string));
369
370 if (HeapLayout::InYoungGeneration(string)) {
371 young_strings_.push_back(string);
372 } else {
373 old_strings_.push_back(string);
374 }
375}
376
377Tagged<Boolean> Heap::ToBoolean(bool condition) {
378 ReadOnlyRoots roots(this);
379 return roots.boolean_value(condition);
380}
381
382uint32_t Heap::GetNextTemplateSerialNumber() {
383 uint32_t next_serial_number =
384 static_cast<uint32_t>(next_template_serial_number().value());
385 if (next_serial_number < Smi::kMaxValue) {
386 ++next_serial_number;
387 } else {
388 // In case of overflow, restart from a range where it's ok for serial
389 // numbers to be non-unique.
391 }
393 set_next_template_serial_number(Smi::FromInt(next_serial_number));
394 return next_serial_number;
395}
396
397int Heap::MaxNumberToStringCacheSize() const {
398 // Compute the size of the number string cache based on the max newspace size.
399 // The number string cache has a minimum size based on twice the initial cache
400 // size to ensure that it is bigger after being made 'full size'.
401 size_t number_string_cache_size = max_semi_space_size_ / 512;
402 number_string_cache_size =
403 std::max(static_cast<size_t>(kInitialNumberStringCacheSize * 2),
404 std::min(static_cast<size_t>(0x4000), number_string_cache_size));
405 // There is a string and a number per entry so the length is twice the number
406 // of entries.
407 return static_cast<int>(number_string_cache_size * 2);
408}
409
410void Heap::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
411 size_t amount) {
412 base::CheckedIncrement(&backing_store_bytes_, static_cast<uint64_t>(amount),
413 std::memory_order_relaxed);
414 // TODO(mlippautz): Implement interrupt for global memory allocations that can
415 // trigger garbage collections.
416}
417
418void Heap::DecrementExternalBackingStoreBytes(ExternalBackingStoreType type,
419 size_t amount) {
420 base::CheckedDecrement(&backing_store_bytes_, static_cast<uint64_t>(amount),
421 std::memory_order_relaxed);
422}
423
427
431
434
435PagedNewSpace* Heap::paged_new_space() const {
437}
438
439SemiSpaceNewSpace* Heap::semi_space_new_space() const {
441}
442
443StickySpace* Heap::sticky_space() const {
444 DCHECK(v8_flags.sticky_mark_bits);
446}
447
451
456
457} // namespace internal
458} // namespace v8
459
460#endif // V8_HEAP_HEAP_INL_H_
#define SLOW_DCHECK(condition)
Definition checks.h:21
AllocationSpace identity() const
Definition base-space.h:32
V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult AllocateRaw(int size_in_bytes, AllocationType allocation, AllocationOrigin origin=AllocationOrigin::kRuntime, AllocationAlignment alignment=kTaggedAligned)
V8_WARN_UNUSED_RESULT V8_INLINE Tagged< HeapObject > AllocateRawWith(int size, AllocationType allocation, AllocationOrigin origin=AllocationOrigin::kRuntime, AllocationAlignment alignment=kTaggedAligned)
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
bool Contains(Tagged< String > string)
Definition heap.cc:2685
std::vector< TaggedBase > old_strings_
Definition heap.h:1699
std::vector< TaggedBase > young_strings_
Definition heap.h:1698
void AddString(Tagged< String > string)
Definition heap-inl.h:357
ExternalStringTable external_string_table_
Definition heap.h:2364
std::atomic< uint64_t > backing_store_bytes_
Definition heap.h:2121
NewSpace * new_space() const
Definition heap.h:727
ExternalMemoryAccounting external_memory_
Definition heap.h:2073
int ignore_local_gc_requests_depth_
Definition heap.h:2370
std::atomic< size_t > always_allocate_scope_count_
Definition heap.h:2129
static const int kInitialNumberStringCacheSize
Definition heap.h:1705
V8_INLINE RootsTable & roots_table()
Definition heap-inl.h:69
size_t max_semi_space_size_
Definition heap.h:2084
std::unique_ptr< CodeRange > code_range_
Definition heap.h:2295
OldSpace * old_space() const
Definition heap.h:730
static bool InFromPage(Tagged< Object > object)
Definition heap-inl.h:238
NewSpace * new_space_
Definition heap.h:2142
bool IsPendingAllocation(Tagged< HeapObject > object)
Definition heap-inl.h:344
int max_regular_code_object_size_
Definition heap.h:2376
static bool InToPage(Tagged< Object > object)
Definition heap-inl.h:255
OldSpace * old_space_
Definition heap.h:2143
Isolate * isolate() const
Definition heap-inl.h:61
HeapAllocator * heap_allocator_
Definition heap.h:2079
bool deserialization_complete() const
Definition heap.h:638
bool IsPendingAllocationInternal(Tagged< HeapObject > object)
Definition heap-inl.h:293
bool is_shared_space_isolate() const
Definition isolate.h:2292
static Isolate * FromHeap(const Heap *heap)
Definition isolate.h:1202
base::Mutex * pending_allocation_mutex()
bool IsForwardingAddress() const
Tagged< HeapObject > ToForwardingAddress(Tagged< HeapObject > map_word_host)
static constexpr int MaxRegularCodeObjectSize()
static V8_INLINE MemoryChunkMetadata * FromHeapObject(Tagged< HeapObject > o)
V8_INLINE MemoryChunkMetadata * Metadata()
static V8_INLINE MemoryChunk * FromHeapObject(Tagged< HeapObject > object)
V8_INLINE bool InReadOnlySpace() const
static V8_INLINE PageMetadata * FromHeapObject(Tagged< HeapObject > o)
static PagedNewSpace * From(NewSpace *space)
Definition new-spaces.h:598
bool Contains(Address a) const
V8_INLINE Tagged< Boolean > boolean_value(bool value) const
Definition roots-inl.h:119
static SemiSpaceNewSpace * From(NewSpace *space)
Definition new-spaces.h:259
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static constexpr int kMaxValue
Definition smi.h:101
static StickySpace * From(OldSpace *space)
V8_INLINE constexpr StorageType ptr() const
bool GetHeapObject(Tagged< HeapObject > *result) const
static const int kUninitializedSerialNumber
Definition templates.h:41
static const int kFirstNonUniqueSerialNumber
Definition templates.h:44
static ThreadId Current()
Definition thread-id.h:32
NormalPageSpace * space_
Definition compactor.cc:324
DeclarationScope * scope_
#define ROOT_ACCESSOR(Type, name, CamelName)
Isolate * isolate
ZoneVector< RpoNumber > & result
void CheckedDecrement(std::atomic< T > *number, T amount, std::memory_order order=std::memory_order_seq_cst)
void CheckedIncrement(std::atomic< T > *number, T amount, std::memory_order order=std::memory_order_seq_cst)
static V8_INLINE bool HasWeakHeapObjectTag(const Tagged< Object > value)
Definition objects.h:653
constexpr int kTaggedSize
Definition globals.h:542
constexpr int kMaxRegularHeapObjectSize
Definition globals.h:680
void CopyTagged(Address dst, const Address src, size_t num_tagged)
Definition slots-inl.h:479
@ SHARED_TRUSTED_LO_SPACE
Definition globals.h:1319
@ SHARED_TRUSTED_SPACE
Definition globals.h:1314
Tagged< T > ForwardingAddress(Tagged< T > heap_obj)
Definition heap-inl.h:48
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:669
V8_EXPORT_PRIVATE FlagValues v8_flags
ExternalBackingStoreType
Definition globals.h:1605
static constexpr Address kNullAddress
Definition v8-internal.h:53
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
static constexpr RelaxedLoadTag kRelaxedLoad
Definition globals.h:2909
#define MUTABLE_ROOT_LIST(V)
Definition roots.h:483
#define ROOT_LIST(V)
Definition roots.h:488
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
constexpr bool IsAligned(T value, U alignment)
Definition macros.h:403
Heap * heap_