v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
v8-internal.h
Go to the documentation of this file.
1// Copyright 2018 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef INCLUDE_V8_INTERNAL_H_
6#define INCLUDE_V8_INTERNAL_H_
7
8#include <stddef.h>
9#include <stdint.h>
10#include <string.h>
11
12#include <atomic>
13#include <iterator>
14#include <limits>
15#include <memory>
16#include <optional>
17#include <type_traits>
18
19#include "v8config.h" // NOLINT(build/include_directory)
20
21// TODO(pkasting): Use <compare>/spaceship unconditionally after dropping
22// support for old libstdc++ versions.
23#if __has_include(<version>)
24#include <version>
25#endif
26#if defined(__cpp_lib_three_way_comparison) && \
27 __cpp_lib_three_way_comparison >= 201711L && \
28 defined(__cpp_lib_concepts) && __cpp_lib_concepts >= 202002L
29#include <compare>
30#include <concepts>
31
32#define V8_HAVE_SPACESHIP_OPERATOR 1
33#else
34#define V8_HAVE_SPACESHIP_OPERATOR 0
35#endif
36
37namespace v8 {
38
39class Array;
40class Context;
41class Data;
42class Isolate;
43
44namespace internal {
45
46class Heap;
47class LocalHeap;
48class Isolate;
49class IsolateGroup;
50class LocalIsolate;
51
52typedef uintptr_t Address;
53static constexpr Address kNullAddress = 0;
54
55constexpr int KB = 1024;
56constexpr int MB = KB * 1024;
57constexpr int GB = MB * 1024;
58#ifdef V8_TARGET_ARCH_X64
59constexpr size_t TB = size_t{GB} * 1024;
60#endif
61
65const int kApiSystemPointerSize = sizeof(void*);
66const int kApiDoubleSize = sizeof(double);
67const int kApiInt32Size = sizeof(int32_t);
68const int kApiInt64Size = sizeof(int64_t);
69const int kApiSizetSize = sizeof(size_t);
70
71// Tag information for HeapObject.
72const int kHeapObjectTag = 1;
73const int kWeakHeapObjectTag = 3;
74const int kHeapObjectTagSize = 2;
75const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
77
78// Tag information for fowarding pointers stored in object headers.
79// 0b00 at the lowest 2 bits in the header indicates that the map word is a
80// forwarding pointer.
81const int kForwardingTag = 0;
82const int kForwardingTagSize = 2;
83const intptr_t kForwardingTagMask = (1 << kForwardingTagSize) - 1;
84
85// Tag information for Smi.
86const int kSmiTag = 0;
87const int kSmiTagSize = 1;
88const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1;
89
90template <size_t tagged_ptr_size>
92
93constexpr intptr_t kIntptrAllBitsSet = intptr_t{-1};
94constexpr uintptr_t kUintptrAllBitsSet =
95 static_cast<uintptr_t>(kIntptrAllBitsSet);
96
97// Smi constants for systems where tagged pointer is a 32-bit value.
98template <>
99struct SmiTagging<4> {
100 enum { kSmiShiftSize = 0, kSmiValueSize = 31 };
101
102 static constexpr intptr_t kSmiMinValue =
103 static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
104 static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
105
106 V8_INLINE static constexpr int SmiToInt(Address value) {
108 // Truncate and shift down (requires >> to be sign extending).
109 return static_cast<int32_t>(static_cast<uint32_t>(value)) >> shift_bits;
110 }
111
112 template <class T, typename std::enable_if_t<std::is_integral_v<T> &&
113 std::is_signed_v<T>>* = nullptr>
114 V8_INLINE static constexpr bool IsValidSmi(T value) {
115 // Is value in range [kSmiMinValue, kSmiMaxValue].
116 // Use unsigned operations in order to avoid undefined behaviour in case of
117 // signed integer overflow.
118 return (static_cast<uintptr_t>(value) -
119 static_cast<uintptr_t>(kSmiMinValue)) <=
120 (static_cast<uintptr_t>(kSmiMaxValue) -
121 static_cast<uintptr_t>(kSmiMinValue));
122 }
123
124 template <class T,
125 typename std::enable_if_t<std::is_integral_v<T> &&
126 std::is_unsigned_v<T>>* = nullptr>
127 V8_INLINE static constexpr bool IsValidSmi(T value) {
128 static_assert(kSmiMaxValue <= std::numeric_limits<uintptr_t>::max());
130 }
131
132 // Same as the `intptr_t` version but works with int64_t on 32-bit builds
133 // without slowing down anything else.
134 V8_INLINE static constexpr bool IsValidSmi(int64_t value) {
135 return (static_cast<uint64_t>(value) -
136 static_cast<uint64_t>(kSmiMinValue)) <=
137 (static_cast<uint64_t>(kSmiMaxValue) -
138 static_cast<uint64_t>(kSmiMinValue));
139 }
140
141 V8_INLINE static constexpr bool IsValidSmi(uint64_t value) {
142 static_assert(kSmiMaxValue <= std::numeric_limits<uint64_t>::max());
144 }
145};
146
147// Smi constants for systems where tagged pointer is a 64-bit value.
148template <>
149struct SmiTagging<8> {
150 enum { kSmiShiftSize = 31, kSmiValueSize = 32 };
151
152 static constexpr intptr_t kSmiMinValue =
153 static_cast<intptr_t>(kUintptrAllBitsSet << (kSmiValueSize - 1));
154 static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1);
155
156 V8_INLINE static constexpr int SmiToInt(Address value) {
158 // Shift down and throw away top 32 bits.
159 return static_cast<int>(static_cast<intptr_t>(value) >> shift_bits);
160 }
161
162 template <class T, typename std::enable_if_t<std::is_integral_v<T> &&
163 std::is_signed_v<T>>* = nullptr>
164 V8_INLINE static constexpr bool IsValidSmi(T value) {
165 // To be representable as a long smi, the value must be a 32-bit integer.
166 return std::numeric_limits<int32_t>::min() <= value &&
167 value <= std::numeric_limits<int32_t>::max();
168 }
169
170 template <class T,
171 typename std::enable_if_t<std::is_integral_v<T> &&
172 std::is_unsigned_v<T>>* = nullptr>
173 V8_INLINE static constexpr bool IsValidSmi(T value) {
174 return value <= std::numeric_limits<int32_t>::max();
175 }
176};
177
178#ifdef V8_COMPRESS_POINTERS
179// See v8:7703 or src/common/ptr-compr-inl.h for details about pointer
180// compression.
181constexpr size_t kPtrComprCageReservationSize = size_t{1} << 32;
182constexpr size_t kPtrComprCageBaseAlignment = size_t{1} << 32;
183
184static_assert(
186 "Pointer compression can be enabled only for 64-bit architectures");
187const int kApiTaggedSize = kApiInt32Size;
188#else
190#endif
191
195
196#ifdef V8_31BIT_SMIS_ON_64BIT_ARCH
197using PlatformSmiTagging = SmiTagging<kApiInt32Size>;
198#else
200#endif
201
202// TODO(ishell): Consinder adding kSmiShiftBits = kSmiShiftSize + kSmiTagSize
203// since it's used much more often than the inividual constants.
204const int kSmiShiftSize = PlatformSmiTagging::kSmiShiftSize;
205const int kSmiValueSize = PlatformSmiTagging::kSmiValueSize;
206const int kSmiMinValue = static_cast<int>(PlatformSmiTagging::kSmiMinValue);
207const int kSmiMaxValue = static_cast<int>(PlatformSmiTagging::kSmiMaxValue);
208constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; }
209constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; }
210constexpr bool Is64() { return kApiSystemPointerSize == sizeof(int64_t); }
211
212V8_INLINE static constexpr Address IntToSmi(int value) {
213 return (static_cast<Address>(value) << (kSmiTagSize + kSmiShiftSize)) |
214 kSmiTag;
215}
216
217/*
218 * Sandbox related types, constants, and functions.
219 */
220constexpr bool SandboxIsEnabled() {
221#ifdef V8_ENABLE_SANDBOX
222 return true;
223#else
224 return false;
225#endif
226}
227
228// SandboxedPointers are guaranteed to point into the sandbox. This is achieved
229// for example by storing them as offset rather than as raw pointers.
231
232#ifdef V8_ENABLE_SANDBOX
233
234// Size of the sandbox, excluding the guard regions surrounding it.
235#if defined(V8_TARGET_OS_ANDROID)
236// On Android, most 64-bit devices seem to be configured with only 39 bits of
237// virtual address space for userspace. As such, limit the sandbox to 128GB (a
238// quarter of the total available address space).
239constexpr size_t kSandboxSizeLog2 = 37; // 128 GB
240#else
241// Everywhere else use a 1TB sandbox.
242constexpr size_t kSandboxSizeLog2 = 40; // 1 TB
243#endif // V8_TARGET_OS_ANDROID
244constexpr size_t kSandboxSize = 1ULL << kSandboxSizeLog2;
245
246// Required alignment of the sandbox. For simplicity, we require the
247// size of the guard regions to be a multiple of this, so that this specifies
248// the alignment of the sandbox including and excluding surrounding guard
249// regions. The alignment requirement is due to the pointer compression cage
250// being located at the start of the sandbox.
251constexpr size_t kSandboxAlignment = kPtrComprCageBaseAlignment;
252
253// Sandboxed pointers are stored inside the heap as offset from the sandbox
254// base shifted to the left. This way, it is guaranteed that the offset is
255// smaller than the sandbox size after shifting it to the right again. This
256// constant specifies the shift amount.
257constexpr uint64_t kSandboxedPointerShift = 64 - kSandboxSizeLog2;
258
259// Size of the guard regions surrounding the sandbox. This assumes a worst-case
260// scenario of a 32-bit unsigned index used to access an array of 64-bit values
261// with an additional 4GB (compressed pointer) offset. In particular, accesses
262// to TypedArrays are effectively computed as
263// `entry_pointer = array->base + array->offset + index * array->element_size`.
264// See also https://crbug.com/40070746 for more details.
265constexpr size_t kSandboxGuardRegionSize = 32ULL * GB + 4ULL * GB;
266
267static_assert((kSandboxGuardRegionSize % kSandboxAlignment) == 0,
268 "The size of the guard regions around the sandbox must be a "
269 "multiple of its required alignment.");
270
271// On OSes where reserving virtual memory is too expensive to reserve the
272// entire address space backing the sandbox, notably Windows pre 8.1, we create
273// a partially reserved sandbox that doesn't actually reserve most of the
274// memory, and so doesn't have the desired security properties as unrelated
275// memory allocations could end up inside of it, but which still ensures that
276// objects that should be located inside the sandbox are allocated within
277// kSandboxSize bytes from the start of the sandbox. The minimum size of the
278// region that is actually reserved for such a sandbox is specified by this
279// constant and should be big enough to contain the pointer compression cage as
280// well as the ArrayBuffer partition.
281constexpr size_t kSandboxMinimumReservationSize = 8ULL * GB;
282
283static_assert(kSandboxMinimumReservationSize > kPtrComprCageReservationSize,
284 "The minimum reservation size for a sandbox must be larger than "
285 "the pointer compression cage contained within it.");
286
287// The maximum buffer size allowed inside the sandbox. This is mostly dependent
288// on the size of the guard regions around the sandbox: an attacker must not be
289// able to construct a buffer that appears larger than the guard regions and
290// thereby "reach out of" the sandbox.
291constexpr size_t kMaxSafeBufferSizeForSandbox = 32ULL * GB - 1;
292static_assert(kMaxSafeBufferSizeForSandbox <= kSandboxGuardRegionSize,
293 "The maximum allowed buffer size must not be larger than the "
294 "sandbox's guard regions");
295
296constexpr size_t kBoundedSizeShift = 29;
297static_assert(1ULL << (64 - kBoundedSizeShift) ==
298 kMaxSafeBufferSizeForSandbox + 1,
299 "The maximum size of a BoundedSize must be synchronized with the "
300 "kMaxSafeBufferSizeForSandbox");
301
302#endif // V8_ENABLE_SANDBOX
303
304#ifdef V8_COMPRESS_POINTERS
305
306#ifdef V8_TARGET_OS_ANDROID
307// The size of the virtual memory reservation for an external pointer table.
308// This determines the maximum number of entries in a table. Using a maximum
309// size allows omitting bounds checks on table accesses if the indices are
310// guaranteed (e.g. through shifting) to be below the maximum index. This
311// value must be a power of two.
312constexpr size_t kExternalPointerTableReservationSize = 256 * MB;
313
314// The external pointer table indices stored in HeapObjects as external
315// pointers are shifted to the left by this amount to guarantee that they are
316// smaller than the maximum table size even after the C++ compiler multiplies
317// them by 8 to be used as indexes into a table of 64 bit pointers.
318constexpr uint32_t kExternalPointerIndexShift = 7;
319#else
320constexpr size_t kExternalPointerTableReservationSize = 512 * MB;
321constexpr uint32_t kExternalPointerIndexShift = 6;
322#endif // V8_TARGET_OS_ANDROID
323
324// The maximum number of entries in an external pointer table.
325constexpr int kExternalPointerTableEntrySize = 8;
326constexpr int kExternalPointerTableEntrySizeLog2 = 3;
327constexpr size_t kMaxExternalPointers =
328 kExternalPointerTableReservationSize / kExternalPointerTableEntrySize;
329static_assert((1 << (32 - kExternalPointerIndexShift)) == kMaxExternalPointers,
330 "kExternalPointerTableReservationSize and "
331 "kExternalPointerIndexShift don't match");
332
333#else // !V8_COMPRESS_POINTERS
334
335// Needed for the V8.SandboxedExternalPointersCount histogram.
336constexpr size_t kMaxExternalPointers = 0;
337
338#endif // V8_COMPRESS_POINTERS
339
340constexpr uint64_t kExternalPointerMarkBit = 1ULL << 48;
341constexpr uint64_t kExternalPointerTagShift = 49;
342constexpr uint64_t kExternalPointerTagMask = 0x00fe000000000000ULL;
347constexpr uint64_t kExternalPointerTagAndMarkbitMask = 0x00ff000000000000ULL;
348constexpr uint64_t kExternalPointerPayloadMask = 0xff00ffffffffffffULL;
349
350// A ExternalPointerHandle represents a (opaque) reference to an external
351// pointer that can be stored inside the sandbox. A ExternalPointerHandle has
352// meaning only in combination with an (active) Isolate as it references an
353// external pointer stored in the currently active Isolate's
354// ExternalPointerTable. Internally, an ExternalPointerHandles is simply an
355// index into an ExternalPointerTable that is shifted to the left to guarantee
356// that it is smaller than the size of the table.
357using ExternalPointerHandle = uint32_t;
358
359// ExternalPointers point to objects located outside the sandbox. When the V8
360// sandbox is enabled, these are stored on heap as ExternalPointerHandles,
361// otherwise they are simply raw pointers.
362#ifdef V8_ENABLE_SANDBOX
364#else
366#endif
367
370
371// See `ExternalPointerHandle` for the main documentation. The difference to
372// `ExternalPointerHandle` is that the handle does not represent an arbitrary
373// external pointer but always refers to an object managed by `CppHeap`. The
374// handles are using in combination with a dedicated table for `CppHeap`
375// references.
376using CppHeapPointerHandle = uint32_t;
377
378// The actual pointer to objects located on the `CppHeap`. When pointer
379// compression is enabled these pointers are stored as `CppHeapPointerHandle`.
380// In non-compressed configurations the pointers are simply stored as raw
381// pointers.
382#ifdef V8_COMPRESS_POINTERS
384#else
386#endif
387
390
391constexpr uint64_t kCppHeapPointerMarkBit = 1ULL;
392constexpr uint64_t kCppHeapPointerTagShift = 1;
393constexpr uint64_t kCppHeapPointerPayloadShift = 16;
394
395#ifdef V8_COMPRESS_POINTERS
396// CppHeapPointers use a dedicated pointer table. These constants control the
397// size and layout of the table. See the corresponding constants for the
398// external pointer table for further details.
399constexpr size_t kCppHeapPointerTableReservationSize =
400 kExternalPointerTableReservationSize;
401constexpr uint32_t kCppHeapPointerIndexShift = kExternalPointerIndexShift;
402
403constexpr int kCppHeapPointerTableEntrySize = 8;
404constexpr int kCppHeapPointerTableEntrySizeLog2 = 3;
405constexpr size_t kMaxCppHeapPointers =
406 kCppHeapPointerTableReservationSize / kCppHeapPointerTableEntrySize;
407static_assert((1 << (32 - kCppHeapPointerIndexShift)) == kMaxCppHeapPointers,
408 "kCppHeapPointerTableReservationSize and "
409 "kCppHeapPointerIndexShift don't match");
410
411#else // !V8_COMPRESS_POINTERS
412
413// Needed for the V8.SandboxedCppHeapPointersCount histogram.
414constexpr size_t kMaxCppHeapPointers = 0;
415
416#endif // V8_COMPRESS_POINTERS
417
418// Generic tag range struct to represent ranges of type tags.
419//
420// When referencing external objects via pointer tables, type tags are
421// frequently necessary to guarantee type safety for the external objects. When
422// support for subtyping is necessary, range-based type checks are used in
423// which all subtypes of a given supertype use contiguous tags. This struct can
424// then be used to represent such a type range.
425//
426// As an example, consider the following type hierarchy:
427//
428// A F
429// / \
430// B E
431// / \
432// C D
433//
434// A potential type id assignment for range-based type checks is
435// {A: 0, B: 1, C: 2, D: 3, E: 4, F: 5}. With that, the type check for type A
436// would check for the range [A, E], while the check for B would check range
437// [B, D], and for F it would simply check [F, F].
438//
439// In addition, there is an option for performance tweaks: if the size of the
440// type range corresponding to a supertype is a power of two and starts at a
441// power of two (e.g. [0x100, 0x13f]), then the compiler can often optimize
442// the type check to use even fewer instructions (essentially replace a AND +
443// SUB with a single AND).
444//
445template <typename Tag>
446struct TagRange {
447 static_assert(std::is_enum_v<Tag> &&
448 std::is_same_v<std::underlying_type_t<Tag>, uint16_t>,
449 "Tag parameter must be an enum with base type uint16_t");
450
451 // Construct the inclusive tag range [first, last].
452 constexpr TagRange(Tag first, Tag last) : first(first), last(last) {}
453
454 // Construct a tag range consisting of a single tag.
455 //
456 // A single tag is always implicitly convertible to a tag range. This greatly
457 // increases readability as most of the time, the exact tag of a field is
458 // known and so no tag range needs to explicitly be created for it.
459 constexpr TagRange(Tag tag) // NOLINT(runtime/explicit)
460 : first(tag), last(tag) {}
461
462 // Construct an empty tag range.
463 constexpr TagRange() : TagRange(static_cast<Tag>(0)) {}
464
465 // A tag range is considered empty if it only contains the null tag.
466 constexpr bool IsEmpty() const { return first == 0 && last == 0; }
467
468 constexpr size_t Size() const {
469 if (IsEmpty()) {
470 return 0;
471 } else {
472 return last - first + 1;
473 }
474 }
475
476 constexpr bool Contains(Tag tag) const {
477 // Need to perform the math with uint32_t. Otherwise, the uint16_ts would
478 // be promoted to (signed) int, allowing the compiler to (wrongly) assume
479 // that an underflow cannot happen as that would be undefined behavior.
480 return static_cast<uint32_t>(tag) - first <=
481 static_cast<uint32_t>(last) - first;
482 }
483
484 constexpr bool Contains(TagRange tag_range) const {
485 return tag_range.first >= first && tag_range.last <= last;
486 }
487
488 constexpr bool operator==(const TagRange other) const {
489 return first == other.first && last == other.last;
490 }
491
492 constexpr size_t hash_value() const {
493 static_assert(std::is_same_v<std::underlying_type_t<Tag>, uint16_t>);
494 return (static_cast<size_t>(first) << 16) | last;
495 }
496
497 // Internally we represent tag ranges as half-open ranges [first, last).
498 const Tag first;
499 const Tag last;
500};
501
502//
503// External Pointers.
504//
505// When the sandbox is enabled, external pointers are stored in an external
506// pointer table and are referenced from HeapObjects through an index (a
507// "handle"). When stored in the table, the pointers are tagged with per-type
508// tags to prevent type confusion attacks between different external objects.
509//
510// When loading an external pointer, a range of allowed tags can be specified.
511// This way, type hierarchies can be supported. The main requirement for that
512// is that all (transitive) child classes of a given parent class have type ids
513// in the same range, and that there are no unrelated types in that range. For
514// more details about how to assign type tags to types, see the TagRange class.
515//
516// The external pointer sandboxing mechanism ensures that every access to an
517// external pointer field will result in a valid pointer of the expected type
518// even in the presence of an attacker able to corrupt memory inside the
519// sandbox. However, if any data related to the external object is stored
520// inside the sandbox it may still be corrupted and so must be validated before
521// use or moved into the external object. Further, an attacker will always be
522// able to substitute different external pointers of the same type for each
523// other. Therefore, code using external pointers must be written in a
524// "substitution-safe" way, i.e. it must always be possible to substitute
525// external pointers of the same type without causing memory corruption outside
526// of the sandbox. Generally this is achieved by referencing any group of
527// related external objects through a single external pointer.
528//
529// Currently we use bit 62 for the marking bit which should always be unused as
530// it's part of the non-canonical address range. When Arm's top-byte ignore
531// (TBI) is enabled, this bit will be part of the ignored byte, and we assume
532// that the Embedder is not using this byte (really only this one bit) for any
533// other purpose. This bit also does not collide with the memory tagging
534// extension (MTE) which would use bits [56, 60).
535//
536// External pointer tables are also available even when the sandbox is off but
537// pointer compression is on. In that case, the mechanism can be used to ease
538// alignment requirements as it turns unaligned 64-bit raw pointers into
539// aligned 32-bit indices. To "opt-in" to the external pointer table mechanism
540// for this purpose, instead of using the ExternalPointer accessors one needs to
541// use ExternalPointerHandles directly and use them to access the pointers in an
542// ExternalPointerTable.
543//
544// The tag is currently in practice limited to 15 bits since it needs to fit
545// together with a marking bit into the unused parts of a pointer.
546enum ExternalPointerTag : uint16_t {
549
550 // When adding new tags, please ensure that the code using these tags is
551 // "substitution-safe", i.e. still operate safely if external pointers of the
552 // same type are swapped by an attacker. See comment above for more details.
553
554 // Shared external pointers are owned by the shared Isolate and stored in the
555 // shared external pointer table associated with that Isolate, where they can
556 // be accessed from multiple threads at the same time. The objects referenced
557 // in this way must therefore always be thread-safe.
563
564 // External pointers using these tags are kept in a per-Isolate external
565 // pointer table and can only be accessed when this Isolate is active.
568 // This tag essentially stands for a `void*` pointer in the V8 API, and it is
569 // the Embedder's responsibility to ensure type safety (against substitution)
570 // and lifetime validity of these objects.
576
577 // InterceptorInfo external pointers.
595
597
603
604 // Foreigns
607
617
618 // Managed
641 // External resources whose lifetime is tied to their entry in the external
642 // pointer table but which are not referenced via a Managed
645
649 // The tags are limited to 7 bits, so the last tag is 0x7f.
651};
652
654
671
672// True if the external pointer must be accessed from the shared isolate's
673// external pointer table.
675 ExternalPointerTagRange tag_range) {
677}
678
679// True if the external pointer may live in a read-only object, in which case
680// the table entry will be in the shared read-only segment of the external
681// pointer table.
686
687// True if the external pointer references an external object whose lifetime is
688// tied to the entry in the external pointer table.
689// In this case, the entry in the ExternalPointerTable always points to an
690// object derived from ExternalPointerTable::ManagedResource.
692 ExternalPointerTagRange tag_range) {
694}
695
696// When an external poiner field can contain the null external pointer handle,
697// the type checking mechanism needs to also check for null.
698// TODO(saelo): this is mostly a temporary workaround to introduce range-based
699// type checks. In the future, we should either (a) change the type tagging
700// scheme so that null always passes or (b) (more likely) introduce dedicated
701// null entries for those tags that need them (similar to other well-known
702// empty value constants such as the empty fixed array).
709
710// Indirect Pointers.
711//
712// When the sandbox is enabled, indirect pointers are used to reference
713// HeapObjects that live outside of the sandbox (but are still managed by V8's
714// garbage collector). When object A references an object B through an indirect
715// pointer, object A will contain a IndirectPointerHandle, i.e. a shifted
716// 32-bit index, which identifies an entry in a pointer table (either the
717// trusted pointer table for TrustedObjects, or the code pointer table if it is
718// a Code object). This table entry then contains the actual pointer to object
719// B. Further, object B owns this pointer table entry, and it is responsible
720// for updating the "self-pointer" in the entry when it is relocated in memory.
721// This way, in contrast to "normal" pointers, indirect pointers never need to
722// be tracked by the GC (i.e. there is no remembered set for them).
723// These pointers do not exist when the sandbox is disabled.
724
725// An IndirectPointerHandle represents a 32-bit index into a pointer table.
726using IndirectPointerHandle = uint32_t;
727
728// A null handle always references an entry that contains nullptr.
730
731// When the sandbox is enabled, indirect pointers are used to implement:
732// - TrustedPointers: an indirect pointer using the trusted pointer table (TPT)
733// and referencing a TrustedObject in one of the trusted heap spaces.
734// - CodePointers, an indirect pointer using the code pointer table (CPT) and
735// referencing a Code object together with its instruction stream.
736
737//
738// Trusted Pointers.
739//
740// A pointer to a TrustedObject.
741// When the sandbox is enabled, these are indirect pointers using the trusted
742// pointer table (TPT). They are used to reference trusted objects (located in
743// one of V8's trusted heap spaces, outside of the sandbox) from inside the
744// sandbox in a memory-safe way. When the sandbox is disabled, these are
745// regular tagged pointers.
747
748// The size of the virtual memory reservation for the trusted pointer table.
749// As with the external pointer table, a maximum table size in combination with
750// shifted indices allows omitting bounds checks.
752
753// The trusted pointer handles are stored shifted to the left by this amount
754// to guarantee that they are smaller than the maximum table size.
755constexpr uint32_t kTrustedPointerHandleShift = 9;
756
757// A null handle always references an entry that contains nullptr.
760
761// The maximum number of entries in an trusted pointer table.
764constexpr size_t kMaxTrustedPointers =
766static_assert((1 << (32 - kTrustedPointerHandleShift)) == kMaxTrustedPointers,
767 "kTrustedPointerTableReservationSize and "
768 "kTrustedPointerHandleShift don't match");
769
770//
771// Code Pointers.
772//
773// A pointer to a Code object.
774// Essentially a specialized version of a trusted pointer that (when the
775// sandbox is enabled) uses the code pointer table (CPT) instead of the TPT.
776// Each entry in the CPT contains both a pointer to a Code object as well as a
777// pointer to the Code's entrypoint. This allows calling/jumping into Code with
778// one fewer memory access (compared to the case where the entrypoint pointer
779// first needs to be loaded from the Code object). As such, a CodePointerHandle
780// can be used both to obtain the referenced Code object and to directly load
781// its entrypoint.
782//
783// When the sandbox is disabled, these are regular tagged pointers.
785
786// The size of the virtual memory reservation for the code pointer table.
787// As with the other tables, a maximum table size in combination with shifted
788// indices allows omitting bounds checks.
789constexpr size_t kCodePointerTableReservationSize = 128 * MB;
790
791// Code pointer handles are shifted by a different amount than indirect pointer
792// handles as the tables have a different maximum size.
793constexpr uint32_t kCodePointerHandleShift = 9;
794
795// A null handle always references an entry that contains nullptr.
797
798// It can sometimes be necessary to distinguish a code pointer handle from a
799// trusted pointer handle. A typical example would be a union trusted pointer
800// field that can refer to both Code objects and other trusted objects. To
801// support these use-cases, we use a simple marking scheme where some of the
802// low bits of a code pointer handle are set, while they will be unset on a
803// trusted pointer handle. This way, the correct table to resolve the handle
804// can be determined even in the absence of a type tag.
805constexpr uint32_t kCodePointerHandleMarker = 0x1;
806static_assert(kCodePointerHandleShift > 0);
807static_assert(kTrustedPointerHandleShift > 0);
808
809// The maximum number of entries in a code pointer table.
810constexpr int kCodePointerTableEntrySize = 16;
812constexpr size_t kMaxCodePointers =
814static_assert(
816 "kCodePointerTableReservationSize and kCodePointerHandleShift don't match");
817
820
821// Constants that can be used to mark places that should be modified once
822// certain types of objects are moved out of the sandbox and into trusted space.
828
829// {obj} must be the raw tagged pointer representation of a HeapObject
830// that's guaranteed to never be in ReadOnlySpace.
832
833// Returns if we need to throw when an error occurs. This infers the language
834// mode based on the current context and the closure. This returns true if the
835// language mode is strict.
843#ifdef V8_MAP_PACKING
844 V8_INLINE static constexpr Address UnpackMapWord(Address mapword) {
845 // TODO(wenyuzhao): Clear header metadata.
846 return mapword ^ kMapWordXorMask;
847 }
848#endif
849
850 public:
851 // These values match non-compiler-dependent values defined within
852 // the implementation of v8.
853 static const int kHeapObjectMapOffset = 0;
855 static const int kStringResourceOffset =
857
859 static const int kJSObjectHeaderSize = 3 * kApiTaggedSize;
860#ifdef V8_COMPRESS_POINTERS
863#else // !V8_COMPRESS_POINTERS
866#endif // !V8_COMPRESS_POINTERS
867 static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
870#ifdef V8_ENABLE_SANDBOX
872#else
874#endif
877 static const int kStringEncodingMask = 0x8;
878 static const int kExternalTwoByteRepresentationTag = 0x02;
879 static const int kExternalOneByteRepresentationTag = 0x0a;
880
881 static const uint32_t kNumIsolateDataSlots = 4;
883 static const int kNumberOfBooleanFlags = 6;
884 static const int kErrorMessageParamSize = 1;
885 static const int kTablesAlignmentPaddingSize = 1;
891 static const int kHandleScopeDataSize =
893
894 // ExternalPointerTable and TrustedPointerTable layout guarantees.
899
900 // IsolateData layout guarantees.
901 static const int kIsolateCageBaseOffset = 0;
902 static const int kIsolateStackGuardOffset =
904 static const int kVariousBooleanFlagsOffset =
906 static const int kErrorMessageParamOffset =
911 static const int kBuiltinTier0TableOffset =
913 static const int kNewAllocationInfoOffset =
915 static const int kOldAllocationInfoOffset =
917
934 static const int kIsolateEmbedderDataOffset =
936#ifdef V8_COMPRESS_POINTERS
937 static const int kIsolateExternalPointerTableOffset =
939 static const int kIsolateSharedExternalPointerTableAddressOffset =
940 kIsolateExternalPointerTableOffset + kExternalPointerTableSize;
941 static const int kIsolateCppHeapPointerTableOffset =
942 kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
943#ifdef V8_ENABLE_SANDBOX
944 static const int kIsolateTrustedCageBaseOffset =
945 kIsolateCppHeapPointerTableOffset + kExternalPointerTableSize;
946 static const int kIsolateTrustedPointerTableOffset =
947 kIsolateTrustedCageBaseOffset + kApiSystemPointerSize;
948 static const int kIsolateSharedTrustedPointerTableAddressOffset =
949 kIsolateTrustedPointerTableOffset + kTrustedPointerTableSize;
950 static const int kIsolateTrustedPointerPublishingScopeOffset =
951 kIsolateSharedTrustedPointerTableAddressOffset + kApiSystemPointerSize;
952 static const int kIsolateCodePointerTableBaseAddressOffset =
953 kIsolateTrustedPointerPublishingScopeOffset + kApiSystemPointerSize;
955 kIsolateCodePointerTableBaseAddressOffset + kApiSystemPointerSize;
956#else
958 kIsolateCppHeapPointerTableOffset + kExternalPointerTableSize;
959#endif // V8_ENABLE_SANDBOX
960#else
963#endif // V8_COMPRESS_POINTERS
968 static const int kIsolateRootsOffset =
970
971 // Assert scopes
972 static const int kDisallowGarbageCollectionAlign = alignof(uint32_t);
973 static const int kDisallowGarbageCollectionSize = sizeof(uint32_t);
974
975#if V8_STATIC_ROOTS_BOOL
976
977// These constants are copied from static-roots.h and guarded by static asserts.
978#define EXPORTED_STATIC_ROOTS_PTR_LIST(V) \
979 V(UndefinedValue, 0x11) \
980 V(NullValue, 0x2d) \
981 V(TrueValue, 0x71) \
982 V(FalseValue, 0x55) \
983 V(EmptyString, 0x49) \
984 V(TheHoleValue, 0x761)
985
986 using Tagged_t = uint32_t;
987 struct StaticReadOnlyRoot {
988#define DEF_ROOT(name, value) static constexpr Tagged_t k##name = value;
989 EXPORTED_STATIC_ROOTS_PTR_LIST(DEF_ROOT)
990#undef DEF_ROOT
991
992 // Use 0 for kStringMapLowerBound since string maps are the first maps.
993 static constexpr Tagged_t kStringMapLowerBound = 0;
994 static constexpr Tagged_t kStringMapUpperBound = 0x425;
995
996#define PLUSONE(...) +1
997 static constexpr size_t kNumberOfExportedStaticRoots =
998 2 + EXPORTED_STATIC_ROOTS_PTR_LIST(PLUSONE);
999#undef PLUSONE
1000 };
1001
1002#endif // V8_STATIC_ROOTS_BOOL
1003
1004 static const int kUndefinedValueRootIndex = 4;
1005 static const int kTheHoleValueRootIndex = 5;
1006 static const int kNullValueRootIndex = 6;
1007 static const int kTrueValueRootIndex = 7;
1008 static const int kFalseValueRootIndex = 8;
1009 static const int kEmptyStringRootIndex = 9;
1010
1012 static const int kNodeFlagsOffset = 1 * kApiSystemPointerSize + 3;
1013 static const int kNodeStateMask = 0x3;
1014 static const int kNodeStateIsWeakValue = 2;
1015
1016 static const int kFirstNonstringType = 0x80;
1017 static const int kOddballType = 0x83;
1018 static const int kForeignType = 0xcc;
1019 static const int kJSSpecialApiObjectType = 0x410;
1020 static const int kJSObjectType = 0x421;
1021 static const int kFirstJSApiObjectType = 0x422;
1022 static const int kLastJSApiObjectType = 0x80A;
1023 // Defines a range [kFirstEmbedderJSApiObjectType, kJSApiObjectTypesCount]
1024 // of JSApiObject instance type values that an embedder can use.
1025 static const int kFirstEmbedderJSApiObjectType = 0;
1028
1029 static const int kUndefinedOddballKind = 4;
1030 static const int kNullOddballKind = 3;
1031
1032 // Constants used by PropertyCallbackInfo to check if we should throw when an
1033 // error occurs.
1034 static const int kDontThrow = 0;
1035 static const int kThrowOnError = 1;
1036 static const int kInferShouldThrowMode = 2;
1037
1038 // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
1039 // incremental GC once the external memory reaches this limit.
1040 static constexpr size_t kExternalAllocationSoftLimit = 64 * 1024 * 1024;
1041
1042#ifdef V8_MAP_PACKING
1043 static const uintptr_t kMapWordMetadataMask = 0xffffULL << 48;
1044 // The lowest two bits of mapwords are always `0b10`
1045 static const uintptr_t kMapWordSignature = 0b10;
1046 // XORing a (non-compressed) map with this mask ensures that the two
1047 // low-order bits are 0b10. The 0 at the end makes this look like a Smi,
1048 // although real Smis have all lower 32 bits unset. We only rely on these
1049 // values passing as Smis in very few places.
1050 static const int kMapWordXorMask = 0b11;
1051#endif
1052
1055#ifdef V8_ENABLE_CHECKS
1056 CheckInitializedImpl(isolate);
1057#endif
1058 }
1059
1060 V8_INLINE static constexpr bool HasHeapObjectTag(Address value) {
1061 return (value & kHeapObjectTagMask) == static_cast<Address>(kHeapObjectTag);
1062 }
1063
1064 V8_INLINE static constexpr int SmiValue(Address value) {
1065 return PlatformSmiTagging::SmiToInt(value);
1066 }
1067
1068 V8_INLINE static constexpr Address AddressToSmi(Address value) {
1069 return (value << (kSmiTagSize + PlatformSmiTagging::kSmiShiftSize)) |
1070 kSmiTag;
1071 }
1072
1073 V8_INLINE static constexpr Address IntToSmi(int value) {
1074 return AddressToSmi(static_cast<Address>(value));
1075 }
1076
1077 template <typename T,
1078 typename std::enable_if_t<std::is_integral_v<T>>* = nullptr>
1079 V8_INLINE static constexpr Address IntegralToSmi(T value) {
1080 return AddressToSmi(static_cast<Address>(value));
1081 }
1082
1083 template <typename T,
1084 typename std::enable_if_t<std::is_integral_v<T>>* = nullptr>
1085 V8_INLINE static constexpr bool IsValidSmi(T value) {
1086 return PlatformSmiTagging::IsValidSmi(value);
1087 }
1088
1089 template <typename T,
1090 typename std::enable_if_t<std::is_integral_v<T>>* = nullptr>
1091 static constexpr std::optional<Address> TryIntegralToSmi(T value) {
1092 if (V8_LIKELY(PlatformSmiTagging::IsValidSmi(value))) {
1093 return {AddressToSmi(static_cast<Address>(value))};
1094 }
1095 return {};
1096 }
1097
1098#if V8_STATIC_ROOTS_BOOL
1099 V8_INLINE static bool is_identical(Address obj, Tagged_t constant) {
1100 return static_cast<Tagged_t>(obj) == constant;
1101 }
1102
1103 V8_INLINE static bool CheckInstanceMapRange(Address obj, Tagged_t first_map,
1104 Tagged_t last_map) {
1106#ifdef V8_MAP_PACKING
1107 map = UnpackMapWord(map);
1108#endif
1109 return map >= first_map && map <= last_map;
1110 }
1111#endif
1112
1115#ifdef V8_MAP_PACKING
1116 map = UnpackMapWord(map);
1117#endif
1119 }
1120
1122 if (!HasHeapObjectTag(obj)) return kNullAddress;
1124#ifdef V8_MAP_PACKING
1125 map = UnpackMapWord(map);
1126#endif
1127 return map;
1128 }
1129
1133
1134 V8_INLINE static bool IsExternalTwoByteString(int instance_type) {
1135 int representation = (instance_type & kStringRepresentationAndEncodingMask);
1136 return representation == kExternalTwoByteRepresentationTag;
1137 }
1138
1139 V8_INLINE static constexpr bool CanHaveInternalField(int instance_type) {
1140 static_assert(kJSObjectType + 1 == kFirstJSApiObjectType);
1141 static_assert(kJSObjectType < kLastJSApiObjectType);
1143 // Check for IsJSObject() || IsJSSpecialApiObject() || IsJSApiObject()
1144 return instance_type == kJSSpecialApiObjectType ||
1145 // inlined version of base::IsInRange
1146 (static_cast<unsigned>(static_cast<unsigned>(instance_type) -
1147 static_cast<unsigned>(kJSObjectType)) <=
1148 static_cast<unsigned>(kLastJSApiObjectType - kJSObjectType));
1149 }
1150
1151 V8_INLINE static uint8_t GetNodeFlag(Address* obj, int shift) {
1152 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
1153 return *addr & static_cast<uint8_t>(1U << shift);
1154 }
1155
1156 V8_INLINE static void UpdateNodeFlag(Address* obj, bool value, int shift) {
1157 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
1158 uint8_t mask = static_cast<uint8_t>(1U << shift);
1159 *addr = static_cast<uint8_t>((*addr & ~mask) | (value << shift));
1160 }
1161
1162 V8_INLINE static uint8_t GetNodeState(Address* obj) {
1163 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
1164 return *addr & kNodeStateMask;
1165 }
1166
1167 V8_INLINE static void UpdateNodeState(Address* obj, uint8_t value) {
1168 uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
1169 *addr = static_cast<uint8_t>((*addr & ~kNodeStateMask) | value);
1170 }
1171
1172 V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot,
1173 void* data) {
1174 Address addr = reinterpret_cast<Address>(isolate) +
1176 *reinterpret_cast<void**>(addr) = data;
1177 }
1178
1179 V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate,
1180 uint32_t slot) {
1181 Address addr = reinterpret_cast<Address>(isolate) +
1183 return *reinterpret_cast<void* const*>(addr);
1184 }
1185
1187 Address addr =
1189 ++(*reinterpret_cast<size_t*>(addr));
1190 }
1191
1192 V8_INLINE static Address* GetRootSlot(v8::Isolate* isolate, int index) {
1193 Address addr = reinterpret_cast<Address>(isolate) + kIsolateRootsOffset +
1194 index * kApiSystemPointerSize;
1195 return reinterpret_cast<Address*>(addr);
1196 }
1197
1198 V8_INLINE static Address GetRoot(v8::Isolate* isolate, int index) {
1199#if V8_STATIC_ROOTS_BOOL
1200 Address base = *reinterpret_cast<Address*>(
1201 reinterpret_cast<uintptr_t>(isolate) + kIsolateCageBaseOffset);
1202 switch (index) {
1203#define DECOMPRESS_ROOT(name, ...) \
1204 case k##name##RootIndex: \
1205 return base + StaticReadOnlyRoot::k##name;
1206 EXPORTED_STATIC_ROOTS_PTR_LIST(DECOMPRESS_ROOT)
1207#undef DECOMPRESS_ROOT
1208#undef EXPORTED_STATIC_ROOTS_PTR_LIST
1209 default:
1210 break;
1211 }
1212#endif // V8_STATIC_ROOTS_BOOL
1213 return *GetRootSlot(isolate, index);
1214 }
1215
1216#ifdef V8_ENABLE_SANDBOX
1217 V8_INLINE static Address* GetExternalPointerTableBase(v8::Isolate* isolate) {
1218 Address addr = reinterpret_cast<Address>(isolate) +
1219 kIsolateExternalPointerTableOffset +
1221 return *reinterpret_cast<Address**>(addr);
1222 }
1223
1224 V8_INLINE static Address* GetSharedExternalPointerTableBase(
1225 v8::Isolate* isolate) {
1226 Address addr = reinterpret_cast<Address>(isolate) +
1227 kIsolateSharedExternalPointerTableAddressOffset;
1228 addr = *reinterpret_cast<Address*>(addr);
1230 return *reinterpret_cast<Address**>(addr);
1231 }
1232#endif
1233
1234 template <typename T>
1235 V8_INLINE static T ReadRawField(Address heap_object_ptr, int offset) {
1236 Address addr = heap_object_ptr + offset - kHeapObjectTag;
1237#ifdef V8_COMPRESS_POINTERS
1238 if (sizeof(T) > kApiTaggedSize) {
1239 // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
1240 // fields (external pointers, doubles and BigInt data) are only
1241 // kTaggedSize aligned so we have to use unaligned pointer friendly way of
1242 // accessing them in order to avoid undefined behavior in C++ code.
1243 T r;
1244 memcpy(&r, reinterpret_cast<void*>(addr), sizeof(T));
1245 return r;
1246 }
1247#endif
1248 return *reinterpret_cast<const T*>(addr);
1249 }
1250
1252 int offset) {
1253#ifdef V8_COMPRESS_POINTERS
1254 uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
1256 return base + static_cast<Address>(static_cast<uintptr_t>(value));
1257#else
1258 return ReadRawField<Address>(heap_object_ptr, offset);
1259#endif
1260 }
1261
1263 int offset) {
1264#ifdef V8_COMPRESS_POINTERS
1265 uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
1266 return static_cast<Address>(static_cast<uintptr_t>(value));
1267#else
1268 return ReadRawField<Address>(heap_object_ptr, offset);
1269#endif
1270 }
1271
1273#ifdef V8_ENABLE_SANDBOX
1274 return reinterpret_cast<v8::Isolate*>(
1276#else
1277 // Not used in non-sandbox mode.
1278 return nullptr;
1279#endif
1280 }
1281
1282 template <ExternalPointerTagRange tag_range>
1284 Address heap_object_ptr,
1285 int offset) {
1286#ifdef V8_ENABLE_SANDBOX
1287 static_assert(!tag_range.IsEmpty());
1288 // See src/sandbox/external-pointer-table.h. Logic duplicated here so
1289 // it can be inlined and doesn't require an additional call.
1290 Address* table = IsSharedExternalPointerType(tag_range)
1291 ? GetSharedExternalPointerTableBase(isolate)
1292 : GetExternalPointerTableBase(isolate);
1295 uint32_t index = handle >> kExternalPointerIndexShift;
1296 std::atomic<Address>* ptr =
1297 reinterpret_cast<std::atomic<Address>*>(&table[index]);
1298 Address entry = std::atomic_load_explicit(ptr, std::memory_order_relaxed);
1299 ExternalPointerTag actual_tag = static_cast<ExternalPointerTag>(
1301 if (V8_LIKELY(tag_range.Contains(actual_tag))) {
1302 return entry & kExternalPointerPayloadMask;
1303 } else {
1304 return 0;
1305 }
1306 return entry;
1307#else
1308 return ReadRawField<Address>(heap_object_ptr, offset);
1309#endif // V8_ENABLE_SANDBOX
1310 }
1311
1312#ifdef V8_COMPRESS_POINTERS
1314 return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
1315 }
1316
1317 V8_INLINE static uint32_t CompressTagged(Address value) {
1318 return static_cast<uint32_t>(value);
1319 }
1320
1321 V8_INLINE static Address DecompressTaggedField(Address heap_object_ptr,
1322 uint32_t value) {
1324 return base + static_cast<Address>(static_cast<uintptr_t>(value));
1325 }
1326
1327#endif // V8_COMPRESS_POINTERS
1328};
1329
1330// Only perform cast check for types derived from v8::Data since
1331// other types do not implement the Cast method.
1332template <bool PerformCheck>
1334 template <class T>
1335 static void Perform(T* data);
1336};
1337
1338template <>
1339template <class T>
1341 T::Cast(data);
1342}
1343
1344template <>
1345template <class T>
1347
1348template <class T>
1351 !std::is_same<Data, std::remove_cv_t<T>>::value>::Perform(data);
1352}
1353
1354// A base class for backing stores, which is needed due to vagaries of
1355// how static casts work with std::shared_ptr.
1357
1358// The maximum value in enum GarbageCollectionReason, defined in heap.h.
1359// This is needed for histograms sampling garbage collection reasons.
1361
1362// Base class for the address block allocator compatible with standard
1363// containers, which registers its allocated range as strong roots.
1365 public:
1366 Heap* heap() const { return heap_; }
1367
1369 const StrongRootAllocatorBase& b) {
1370 // TODO(pkasting): Replace this body with `= default` after dropping support
1371 // for old gcc versions.
1372 return a.heap_ == b.heap_;
1373 }
1374
1375 protected:
1378 explicit StrongRootAllocatorBase(Isolate* isolate);
1379 explicit StrongRootAllocatorBase(v8::Isolate* isolate);
1380 explicit StrongRootAllocatorBase(LocalIsolate* isolate);
1381
1382 // Allocate/deallocate a range of n elements of type internal::Address.
1383 Address* allocate_impl(size_t n);
1384 void deallocate_impl(Address* p, size_t n) noexcept;
1385
1386 private:
1388};
1389
1390// The general version of this template behaves just as std::allocator, with
1391// the exception that the constructor takes the isolate as parameter. Only
1392// specialized versions, e.g., internal::StrongRootAllocator<internal::Address>
1393// and internal::StrongRootAllocator<v8::Local<T>> register the allocated range
1394// as strong roots.
1395template <typename T>
1396class StrongRootAllocator : private std::allocator<T> {
1397 public:
1398 using value_type = T;
1399
1400 template <typename HeapOrIsolateT>
1401 explicit StrongRootAllocator(HeapOrIsolateT*) {}
1402 template <typename U>
1404
1405 using std::allocator<T>::allocate;
1406 using std::allocator<T>::deallocate;
1407};
1408
1409// TODO(pkasting): Replace with `requires` clauses after dropping support for
1410// old gcc versions.
1411template <typename Iterator, typename = void>
1412inline constexpr bool kHaveIteratorConcept = false;
1413template <typename Iterator>
1414inline constexpr bool kHaveIteratorConcept<
1415 Iterator, std::void_t<typename Iterator::iterator_concept>> = true;
1416
1417template <typename Iterator, typename = void>
1418inline constexpr bool kHaveIteratorCategory = false;
1419template <typename Iterator>
1420inline constexpr bool kHaveIteratorCategory<
1421 Iterator, std::void_t<typename Iterator::iterator_category>> = true;
1422
1423// Helper struct that contains an `iterator_concept` type alias only when either
1424// `Iterator` or `std::iterator_traits<Iterator>` do.
1425// Default: no alias.
1426template <typename Iterator, typename = void>
1428// Use `Iterator::iterator_concept` if available.
1429template <typename Iterator>
1431 Iterator, std::enable_if_t<kHaveIteratorConcept<Iterator>>> {
1432 using iterator_concept = typename Iterator::iterator_concept;
1433};
1434// Otherwise fall back to `std::iterator_traits<Iterator>` if possible.
1435template <typename Iterator>
1437 Iterator, std::enable_if_t<kHaveIteratorCategory<Iterator> &&
1438 !kHaveIteratorConcept<Iterator>>> {
1439 // There seems to be no feature-test macro covering this, so use the
1440 // presence of `<ranges>` as a crude proxy, since it was added to the
1441 // standard as part of the Ranges papers.
1442 // TODO(pkasting): Add this unconditionally after dropping support for old
1443 // libstdc++ versions.
1444#if __has_include(<ranges>)
1445 using iterator_concept =
1446 typename std::iterator_traits<Iterator>::iterator_concept;
1447#endif
1448};
1449
1450// A class of iterators that wrap some different iterator type.
1451// If specified, ElementType is the type of element accessed by the wrapper
1452// iterator; in this case, the actual reference and pointer types of Iterator
1453// must be convertible to ElementType& and ElementType*, respectively.
1454template <typename Iterator, typename ElementType = void>
1456 public:
1457 static_assert(
1458 std::is_void_v<ElementType> ||
1459 (std::is_convertible_v<typename std::iterator_traits<Iterator>::pointer,
1460 std::add_pointer_t<ElementType>> &&
1461 std::is_convertible_v<typename std::iterator_traits<Iterator>::reference,
1462 std::add_lvalue_reference_t<ElementType>>));
1463
1465 typename std::iterator_traits<Iterator>::difference_type;
1467 std::conditional_t<std::is_void_v<ElementType>,
1468 typename std::iterator_traits<Iterator>::value_type,
1469 ElementType>;
1470 using pointer =
1471 std::conditional_t<std::is_void_v<ElementType>,
1472 typename std::iterator_traits<Iterator>::pointer,
1473 std::add_pointer_t<ElementType>>;
1475 std::conditional_t<std::is_void_v<ElementType>,
1476 typename std::iterator_traits<Iterator>::reference,
1477 std::add_lvalue_reference_t<ElementType>>;
1479 typename std::iterator_traits<Iterator>::iterator_category;
1480
1481 constexpr WrappedIterator() noexcept = default;
1482 constexpr explicit WrappedIterator(Iterator it) noexcept : it_(it) {}
1483
1484 // TODO(pkasting): Switch to `requires` and concepts after dropping support
1485 // for old gcc and libstdc++ versions.
1486 template <typename OtherIterator, typename OtherElementType,
1487 typename = std::enable_if_t<
1488 std::is_convertible_v<OtherIterator, Iterator>>>
1491 : it_(other.base()) {}
1492
1493 [[nodiscard]] constexpr reference operator*() const noexcept { return *it_; }
1494 [[nodiscard]] constexpr pointer operator->() const noexcept {
1495 if constexpr (std::is_pointer_v<Iterator>) {
1496 return it_;
1497 } else {
1498 return it_.operator->();
1499 }
1500 }
1501
1502 template <typename OtherIterator, typename OtherElementType>
1503 [[nodiscard]] constexpr bool operator==(
1505 const noexcept {
1506 return it_ == other.base();
1507 }
1508#if V8_HAVE_SPACESHIP_OPERATOR
1509 template <typename OtherIterator, typename OtherElementType>
1510 [[nodiscard]] constexpr auto operator<=>(
1512 const noexcept {
1513 if constexpr (std::three_way_comparable_with<Iterator, OtherIterator>) {
1514 return it_ <=> other.base();
1515 } else if constexpr (std::totally_ordered_with<Iterator, OtherIterator>) {
1516 if (it_ < other.base()) {
1517 return std::strong_ordering::less;
1518 }
1519 return (it_ > other.base()) ? std::strong_ordering::greater
1520 : std::strong_ordering::equal;
1521 } else {
1522 if (it_ < other.base()) {
1523 return std::partial_ordering::less;
1524 }
1525 if (other.base() < it_) {
1526 return std::partial_ordering::greater;
1527 }
1528 return (it_ == other.base()) ? std::partial_ordering::equivalent
1529 : std::partial_ordering::unordered;
1530 }
1531 }
1532#else
1533 // Assume that if spaceship isn't present, operator rewriting might not be
1534 // either.
1535 template <typename OtherIterator, typename OtherElementType>
1536 [[nodiscard]] constexpr bool operator!=(
1538 const noexcept {
1539 return it_ != other.base();
1540 }
1541
1542 template <typename OtherIterator, typename OtherElementType>
1543 [[nodiscard]] constexpr bool operator<(
1545 const noexcept {
1546 return it_ < other.base();
1547 }
1548 template <typename OtherIterator, typename OtherElementType>
1549 [[nodiscard]] constexpr bool operator<=(
1551 const noexcept {
1552 return it_ <= other.base();
1553 }
1554 template <typename OtherIterator, typename OtherElementType>
1555 [[nodiscard]] constexpr bool operator>(
1557 const noexcept {
1558 return it_ > other.base();
1559 }
1560 template <typename OtherIterator, typename OtherElementType>
1561 [[nodiscard]] constexpr bool operator>=(
1563 const noexcept {
1564 return it_ >= other.base();
1565 }
1566#endif
1567
1568 constexpr WrappedIterator& operator++() noexcept {
1569 ++it_;
1570 return *this;
1571 }
1572 constexpr WrappedIterator operator++(int) noexcept {
1573 WrappedIterator result(*this);
1574 ++(*this);
1575 return result;
1576 }
1577
1578 constexpr WrappedIterator& operator--() noexcept {
1579 --it_;
1580 return *this;
1581 }
1582 constexpr WrappedIterator operator--(int) noexcept {
1583 WrappedIterator result(*this);
1584 --(*this);
1585 return result;
1586 }
1587 [[nodiscard]] constexpr WrappedIterator operator+(
1588 difference_type n) const noexcept {
1589 WrappedIterator result(*this);
1590 result += n;
1591 return result;
1592 }
1593 [[nodiscard]] friend constexpr WrappedIterator operator+(
1594 difference_type n, const WrappedIterator& x) noexcept {
1595 return x + n;
1596 }
1598 it_ += n;
1599 return *this;
1600 }
1601 [[nodiscard]] constexpr WrappedIterator operator-(
1602 difference_type n) const noexcept {
1603 return *this + -n;
1604 }
1606 return *this += -n;
1607 }
1608 template <typename OtherIterator, typename OtherElementType>
1609 [[nodiscard]] constexpr auto operator-(
1611 const noexcept {
1612 return it_ - other.base();
1613 }
1614 [[nodiscard]] constexpr reference operator[](
1615 difference_type n) const noexcept {
1616 return it_[n];
1617 }
1618
1619 [[nodiscard]] constexpr const Iterator& base() const noexcept { return it_; }
1620
1621 private:
1622 Iterator it_;
1623};
1624
1625// Helper functions about values contained in handles.
1626// A value is either an indirect pointer or a direct pointer, depending on
1627// whether direct local support is enabled.
1628class ValueHelper final {
1629 public:
1630 // ValueHelper::InternalRepresentationType is an abstract type that
1631 // corresponds to the internal representation of v8::Local and essentially
1632 // to what T* really is (these two are always in sync). This type is used in
1633 // methods like GetDataFromSnapshotOnce that need access to a handle's
1634 // internal representation. In particular, if `x` is a `v8::Local<T>`, then
1635 // `v8::Local<T>::FromRepr(x.repr())` gives exactly the same handle as `x`.
1636#ifdef V8_ENABLE_DIRECT_HANDLE
1637 static constexpr Address kTaggedNullAddress = 1;
1638
1641#else
1643 static constexpr InternalRepresentationType kEmpty = nullptr;
1644#endif // V8_ENABLE_DIRECT_HANDLE
1645
1646 template <typename T>
1647 V8_INLINE static bool IsEmpty(T* value) {
1648 return ValueAsRepr(value) == kEmpty;
1649 }
1650
1651 // Returns a handle's "value" for all kinds of abstract handles. For Local,
1652 // it is equivalent to `*handle`. The variadic parameters support handle
1653 // types with extra type parameters, like `Persistent<T, M>`.
1654 template <template <typename T, typename... Ms> typename H, typename T,
1655 typename... Ms>
1657 return handle.template value<T>();
1658 }
1659
1660#ifdef V8_ENABLE_DIRECT_HANDLE
1661
1662 template <typename T>
1663 V8_INLINE static Address ValueAsAddress(const T* value) {
1664 return reinterpret_cast<Address>(value);
1665 }
1666
1667 template <typename T, bool check_null = true, typename S>
1668 V8_INLINE static T* SlotAsValue(S* slot) {
1669 if (check_null && slot == nullptr) {
1670 return reinterpret_cast<T*>(kTaggedNullAddress);
1671 }
1672 return *reinterpret_cast<T**>(slot);
1673 }
1674
1675 template <typename T>
1676 V8_INLINE static InternalRepresentationType ValueAsRepr(const T* value) {
1677 return reinterpret_cast<InternalRepresentationType>(value);
1678 }
1679
1680 template <typename T>
1682 return reinterpret_cast<T*>(repr);
1683 }
1684
1685#else // !V8_ENABLE_DIRECT_HANDLE
1686
1687 template <typename T>
1688 V8_INLINE static Address ValueAsAddress(const T* value) {
1689 return *reinterpret_cast<const Address*>(value);
1690 }
1691
1692 template <typename T, bool check_null = true, typename S>
1693 V8_INLINE static T* SlotAsValue(S* slot) {
1694 return reinterpret_cast<T*>(slot);
1695 }
1696
1697 template <typename T>
1699 return const_cast<InternalRepresentationType>(
1700 reinterpret_cast<const Address*>(value));
1701 }
1702
1703 template <typename T>
1705 return reinterpret_cast<T*>(repr);
1706 }
1707
1708#endif // V8_ENABLE_DIRECT_HANDLE
1709};
1710
1714class HandleHelper final {
1715 public:
1726 template <typename T1, typename T2>
1727 V8_INLINE static bool EqualHandles(const T1& lhs, const T2& rhs) {
1728 if (lhs.IsEmpty()) return rhs.IsEmpty();
1729 if (rhs.IsEmpty()) return false;
1730 return lhs.ptr() == rhs.ptr();
1731 }
1732};
1733
1735
1736// These functions are here just to match friend declarations in
1737// XxxCallbackInfo classes allowing these functions to access the internals
1738// of the info objects. These functions are supposed to be called by debugger
1739// macros.
1740void PrintFunctionCallbackInfo(void* function_callback_info);
1741void PrintPropertyCallbackInfo(void* property_callback_info);
1742
1743} // namespace internal
1744} // namespace v8
1745
1746#endif // INCLUDE_V8_INTERNAL_H_
#define T
static V8_INLINE bool EqualHandles(const T1 &lhs, const T2 &rhs)
static V8_INLINE Address LoadMap(Address obj)
static constexpr size_t kExternalAllocationSoftLimit
static V8_INLINE constexpr bool HasHeapObjectTag(Address value)
static const int kIsolateCageBaseOffset
static const int kEmbedderDataArrayHeaderSize
static const int kHeapObjectMapOffset
static const int kEmbedderDataSlotSize
static const int kIsolateApiCallbackThunkArgumentOffset
static const int kJSAPIObjectWithEmbedderSlotsHeaderSize
static const int kOddballType
static V8_INLINE void * GetEmbedderData(const v8::Isolate *isolate, uint32_t slot)
static const int kInferShouldThrowMode
static const int kNewAllocationInfoOffset
static const int kStringEncodingMask
static const int kIsolateFastCCallCallerPcOffset
static V8_INLINE void CheckInitialized(v8::Isolate *isolate)
static const int kIsolateThreadLocalTopOffset
static V8_INLINE Address GetRoot(v8::Isolate *isolate, int index)
static const uint32_t kNumIsolateDataSlots
static const int kForeignType
static V8_INLINE constexpr bool CanHaveInternalField(int instance_type)
static V8_INLINE T ReadRawField(Address heap_object_ptr, int offset)
static const int kFirstEmbedderJSApiObjectType
static const int kNumberOfBooleanFlags
static V8_INLINE constexpr Address IntToSmi(int value)
static const int kThreadLocalTopSize
static const int kIsolateRootsOffset
static V8_INLINE Address ReadTaggedPointerField(Address heap_object_ptr, int offset)
static const int kExternalPointerTableSize
static const int kUndefinedOddballKind
static const int kMapInstanceTypeOffset
static V8_INLINE uint8_t GetNodeFlag(Address *obj, int shift)
static const int kIsolateStackGuardOffset
static const int kLinearAllocationAreaSize
static const int kFastCCallAlignmentPaddingSize
static const int kDisallowGarbageCollectionAlign
static const int kIsolateFastCCallCallerFpOffset
static const int kErrorMessageParamSize
static const int kJSObjectType
static const int kBuiltinTier0TableOffset
static V8_INLINE constexpr Address IntegralToSmi(T value)
static const int kIsolateLongTaskStatsCounterOffset
static const int kNativeContextEmbedderDataOffset
static const int kLastJSApiObjectType
static const int kIsolateHandleScopeDataOffset
static const int kFirstNonstringType
static const int kEmptyStringRootIndex
static const int kBuiltinTier0EntryTableOffset
static const int kFixedArrayHeaderSize
static const int kNullOddballKind
static const int kUndefinedValueRootIndex
static const int kExternalTwoByteRepresentationTag
static const int kDontThrow
static V8_INLINE int GetOddballKind(Address obj)
static const int kStackGuardSize
static V8_INLINE Address * GetRootSlot(v8::Isolate *isolate, int index)
static const int kNodeStateMask
static V8_INLINE int GetInstanceType(Address obj)
static V8_INLINE void UpdateNodeFlag(Address *obj, bool value, int shift)
static const int kNodeStateIsWeakValue
static const int kFirstJSApiObjectType
static const int kStringResourceOffset
static const int kErrorMessageParamOffset
static const int kExternalPointerTableBasePointerOffset
static const int kFalseValueRootIndex
static const int kIsolateRegexpExecVectorArgumentOffset
static const int kIsolateFastApiCallTargetOffset
static const int kTrueValueRootIndex
static const int kThrowOnError
static const int kOddballKindOffset
static const int kBuiltinTier0TableSize
static V8_INLINE Address ReadTaggedSignedField(Address heap_object_ptr, int offset)
static const int kContinuationPreservedEmbedderDataOffset
static V8_INLINE constexpr int SmiValue(Address value)
static const int kNullValueRootIndex
static V8_INLINE void SetEmbedderData(v8::Isolate *isolate, uint32_t slot, void *data)
static const int kTrustedPointerTableSize
static const int kTheHoleValueRootIndex
static V8_INLINE void UpdateNodeState(Address *obj, uint8_t value)
static const int kTablesAlignmentPaddingSize
static V8_EXPORT void CheckInitializedImpl(v8::Isolate *isolate)
static V8_INLINE constexpr Address AddressToSmi(Address value)
static V8_INLINE Address ReadExternalPointerField(v8::Isolate *isolate, Address heap_object_ptr, int offset)
static const int kHandleScopeDataSize
static const int kExternalOneByteRepresentationTag
static const int kBuiltinTier0EntryTableSize
static const int kDisallowGarbageCollectionSize
static const int kOldAllocationInfoOffset
static const int kIsolateEmbedderDataOffset
static V8_INLINE uint8_t GetNodeState(Address *obj)
static const int kEmbedderDataSlotExternalPointerOffset
static const int kNodeFlagsOffset
static const int kTrustedPointerTableBasePointerOffset
static const int kRegExpStaticResultOffsetsVectorSize
static const int kLastEmbedderJSApiObjectType
static const int kVariousBooleanFlagsOffset
static constexpr std::optional< Address > TryIntegralToSmi(T value)
static const int kNodeClassIdOffset
static const int kStringRepresentationAndEncodingMask
static const int kJSObjectHeaderSize
static V8_INLINE void IncrementLongTasksStatsCounter(v8::Isolate *isolate)
static const int kJSSpecialApiObjectType
static V8_INLINE bool IsExternalTwoByteString(int instance_type)
static V8_INLINE constexpr bool IsValidSmi(T value)
static V8_INLINE v8::Isolate * GetIsolateForSandbox(Address obj)
friend bool operator==(const StrongRootAllocatorBase &a, const StrongRootAllocatorBase &b)
StrongRootAllocator(HeapOrIsolateT *)
StrongRootAllocator(const StrongRootAllocator< U > &other) noexcept
static V8_INLINE Address ValueAsAddress(const T *value)
static V8_INLINE T * SlotAsValue(S *slot)
internal::Address * InternalRepresentationType
static V8_INLINE bool IsEmpty(T *value)
static V8_INLINE InternalRepresentationType ValueAsRepr(const T *value)
static V8_INLINE T * HandleAsValue(const H< T, Ms... > &handle)
static V8_INLINE T * ReprAsValue(InternalRepresentationType repr)
static constexpr InternalRepresentationType kEmpty
std::conditional_t< std::is_void_v< ElementType >, typename std::iterator_traits< Iterator >::pointer, std::add_pointer_t< ElementType > > pointer
constexpr WrappedIterator & operator-=(difference_type n) noexcept
constexpr WrappedIterator operator--(int) noexcept
constexpr WrappedIterator & operator+=(difference_type n) noexcept
constexpr const Iterator & base() const noexcept
constexpr WrappedIterator & operator++() noexcept
constexpr pointer operator->() const noexcept
typename std::iterator_traits< Iterator >::difference_type difference_type
std::conditional_t< std::is_void_v< ElementType >, typename std::iterator_traits< Iterator >::reference, std::add_lvalue_reference_t< ElementType > > reference
constexpr reference operator[](difference_type n) const noexcept
constexpr bool operator!=(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr bool operator>=(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr bool operator<(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr WrappedIterator & operator--() noexcept
constexpr WrappedIterator() noexcept=default
typename std::iterator_traits< Iterator >::iterator_category iterator_category
constexpr bool operator<=(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr WrappedIterator(const WrappedIterator< OtherIterator, OtherElementType > &other) noexcept
constexpr bool operator>(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
constexpr reference operator*() const noexcept
constexpr auto operator-(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
friend constexpr WrappedIterator operator+(difference_type n, const WrappedIterator &x) noexcept
constexpr WrappedIterator operator+(difference_type n) const noexcept
constexpr WrappedIterator operator++(int) noexcept
constexpr WrappedIterator operator-(difference_type n) const noexcept
constexpr bool operator==(const WrappedIterator< OtherIterator, OtherElementType > &other) const noexcept
std::conditional_t< std::is_void_v< ElementType >, typename std::iterator_traits< Iterator >::value_type, ElementType > value_type
bool is_empty
Definition sweeper.cc:229
Isolate * isolate
int32_t offset
std::map< const std::string, const std::string > map
ZoneVector< RpoNumber > & result
int x
uint32_t const mask
int n
Definition mul-fft.cc:296
int r
Definition mul-fft.cc:298
STL namespace.
const intptr_t kHeapObjectTagMask
Definition v8-internal.h:75
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
constexpr uint64_t kCppHeapPointerMarkBit
constexpr int kCodePointerTableEntrySizeLog2
constexpr bool kRuntimeGeneratedCodeObjectsLiveInTrustedSpace
IndirectPointerHandle TrustedPointerHandle
constexpr uint64_t kExternalPointerTagShift
const int kApiSystemPointerSize
Definition v8-internal.h:65
constexpr int H
constexpr Address kTaggedNullAddress
Definition handles.h:53
constexpr bool SandboxIsEnabled()
const int kApiDoubleSize
Definition v8-internal.h:66
Address CppHeapPointer_t
constexpr size_t kMaxCppHeapPointers
constexpr intptr_t kIntptrAllBitsSet
Definition v8-internal.h:93
V8_INLINE constexpr PtrComprCageBase GetPtrComprCageBaseFromOnHeapAddress(Address address)
constexpr int GB
Definition v8-internal.h:57
void VerifyHandleIsNonEmpty(bool is_empty)
Definition api.cc:557
const int kApiInt32Size
Definition v8-internal.h:67
const int kForwardingTagSize
Definition v8-internal.h:82
static V8_INLINE constexpr bool IsSharedExternalPointerType(ExternalPointerTagRange tag_range)
const intptr_t kForwardingTagMask
Definition v8-internal.h:83
void PrintPropertyCallbackInfo(void *property_callback_info)
constexpr ExternalPointerTagRange kAnyManagedResourceExternalPointerTag(kFirstManagedResourceTag, kLastManagedResourceTag)
constexpr uint64_t kExternalPointerPayloadMask
const int kSmiTagSize
Definition v8-internal.h:87
const int kApiInt64Size
Definition v8-internal.h:68
constexpr ExternalPointerTagRange kAnyExternalPointerTagRange(kFirstExternalPointerTag, kLastExternalPointerTag)
constexpr uint64_t kExternalPointerTagMask
constexpr int kCodePointerTableEntryCodeObjectOffset
constexpr int kTrustedPointerTableEntrySizeLog2
constexpr int kTrustedPointerTableEntrySize
constexpr uint64_t kCppHeapPointerPayloadShift
constexpr ExternalPointer_t kNullExternalPointer
IndirectPointerHandle CodePointerHandle
constexpr CppHeapPointer_t kNullCppHeapPointer
constexpr ExternalPointerTagRange kAnySharedExternalPointerTagRange(kFirstSharedExternalPointerTag, kLastSharedExternalPointerTag)
V8_EXPORT internal::Isolate * IsolateFromNeverReadOnlySpaceObject(Address obj)
Address Tagged_t
Definition globals.h:547
Address SandboxedPointer_t
const int kApiSizetSize
Definition v8-internal.h:69
constexpr uint64_t kExternalPointerTagAndMarkbitMask
constexpr int kCodePointerTableEntryEntrypointOffset
constexpr size_t kMaxExternalPointers
constexpr size_t kCodePointerTableReservationSize
constexpr TrustedPointerHandle kNullTrustedPointerHandle
uint32_t IndirectPointerHandle
const int kWeakHeapObjectTag
Definition v8-internal.h:73
constexpr ExternalPointerHandle kNullExternalPointerHandle
constexpr ExternalPointerTagRange kAnyMaybeReadOnlyExternalPointerTagRange(kFirstMaybeReadOnlyExternalPointerTag, kLastMaybeReadOnlyExternalPointerTag)
Address ExternalPointer_t
constexpr int S
constexpr uintptr_t kUintptrAllBitsSet
Definition v8-internal.h:94
const int kForwardingTag
Definition v8-internal.h:81
const intptr_t kHeapObjectReferenceTagMask
Definition v8-internal.h:76
constexpr bool SmiValuesAre31Bits()
constexpr size_t kMaxTrustedPointers
constexpr uint64_t kCppHeapPointerTagShift
constexpr ExternalPointerTagRange kAnyInterceptorInfoExternalPointerTagRange(kFirstInterceptorInfoExternalPointerTag, kLastInterceptorInfoExternalPointerTag)
static V8_INLINE constexpr bool IsMaybeReadOnlyExternalPointerType(ExternalPointerTagRange tag_range)
constexpr bool kBuiltinCodeObjectsLiveInTrustedSpace
constexpr uint32_t kTrustedPointerHandleShift
V8_INLINE void PerformCastCheck(T *data)
constexpr uint32_t kCodePointerHandleShift
constexpr ExternalPointerTagRange kAnyManagedExternalPointerTagRange(kFirstManagedExternalPointerTag, kLastManagedExternalPointerTag)
const int kHeapObjectTag
Definition v8-internal.h:72
const int kSmiShiftSize
constexpr size_t kMaxCodePointers
constexpr bool kHaveIteratorCategory
@ kLastForeignExternalPointerTag
@ kApiIndexedPropertyDescriptorCallbackTag
@ kFirstMaybeReadOnlyExternalPointerTag
@ kExternalPointerEvacuationEntryTag
@ kFirstSharedExternalPointerTag
@ kApiNamedPropertyDefinerCallbackTag
@ kWasmExportedFunctionDataSignatureTag
@ kLastSharedExternalPointerTag
@ kApiIndexedPropertySetterCallbackTag
@ kLastExternalPointerTag
@ kMicrotaskCallbackDataTag
@ kApiIndexedPropertyGetterCallbackTag
@ kApiNamedPropertyDescriptorCallbackTag
@ kIcuSimpleDateFormatTag
@ kApiIndexedPropertyDefinerCallbackTag
@ kD8ModuleEmbedderDataTag
@ kExternalStringResourceTag
@ kExternalObjectValueTag
@ kApiNamedPropertyDeleterCallbackTag
@ kWasmIndirectFunctionTargetTag
@ kApiAccessCheckCallbackTag
@ kApiAbortScriptExecutionCallbackTag
@ kIcuLocalizedNumberFormatterTag
@ kApiNamedPropertyGetterCallbackTag
@ kApiIndexedPropertyEnumeratorCallbackTag
@ kWasmInternalFunctionCallTargetTag
@ kExternalPointerFreeEntryTag
@ kFirstInterceptorInfoExternalPointerTag
@ kLastManagedExternalPointerTag
@ kExternalPointerNullTag
@ kExternalStringResourceDataTag
@ kLastManagedResourceTag
@ kExternalPointerZappedEntryTag
@ kApiNamedPropertyQueryCallbackTag
@ kWasmTypeInfoNativeTypeTag
@ kEmbedderDataSlotPayloadTag
@ kFirstForeignExternalPointerTag
@ kDisplayNamesInternalTag
@ kFirstManagedResourceTag
@ kFirstManagedExternalPointerTag
@ kApiIndexedPropertyQueryCallbackTag
@ kApiIndexedPropertyDeleterCallbackTag
@ kLastInterceptorInfoExternalPointerTag
@ kIcuRelativeDateTimeFormatterTag
@ kNativeContextMicrotaskQueueTag
@ kLastMaybeReadOnlyExternalPointerTag
@ kArrayBufferExtensionTag
@ kIcuDateIntervalFormatTag
@ kFirstExternalPointerTag
@ kApiNamedPropertyEnumeratorCallbackTag
@ kFunctionTemplateInfoCallbackTag
const int kSmiValueSize
constexpr ExternalPointerTagRange kAnyForeignExternalPointerTagRange(kFirstForeignExternalPointerTag, kLastForeignExternalPointerTag)
constexpr bool SmiValuesAre32Bits()
constexpr IndirectPointerHandle kNullIndirectPointerHandle
static V8_INLINE constexpr bool IsManagedExternalPointerType(ExternalPointerTagRange tag_range)
void PrintFunctionCallbackInfo(void *function_callback_info)
uint32_t ExternalPointerHandle
constexpr size_t kTrustedPointerTableReservationSize
const intptr_t kSmiTagMask
Definition v8-internal.h:88
const int kHeapObjectTagSize
Definition v8-internal.h:74
return value
Definition map-inl.h:893
const int kSmiMaxValue
constexpr bool Is64()
constexpr bool kAllCodeObjectsLiveInTrustedSpace
uint32_t CppHeapPointerHandle
const int kSmiTag
Definition v8-internal.h:86
constexpr CodePointerHandle kNullCodePointerHandle
static constexpr Address kNullAddress
Definition v8-internal.h:53
constexpr CppHeapPointerHandle kNullCppHeapPointerHandle
constexpr int kGarbageCollectionReasonMaxValue
constexpr int kCodePointerTableEntrySize
static V8_INLINE constexpr Address IntToSmi(int value)
SmiTagging< kApiTaggedSize > PlatformSmiTagging
constexpr uint32_t kCodePointerHandleMarker
const int kSmiMinValue
V8_EXPORT bool ShouldThrowOnError(internal::Isolate *isolate)
constexpr bool kHaveIteratorConcept
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage * MB
Definition flags.cc:2197
constexpr uint64_t kExternalPointerShiftedTagMask
constexpr uint64_t kExternalPointerMarkBit
const int kApiTaggedSize
static V8_INLINE constexpr bool ExternalPointerCanBeEmpty(ExternalPointerTagRange tag_range)
constexpr bool PointerCompressionIsEnabled()
static void Perform(T *data)
static V8_INLINE constexpr bool IsValidSmi(uint64_t value)
static V8_INLINE constexpr bool IsValidSmi(int64_t value)
static V8_INLINE constexpr int SmiToInt(Address value)
static V8_INLINE constexpr bool IsValidSmi(T value)
static V8_INLINE constexpr bool IsValidSmi(T value)
static V8_INLINE constexpr int SmiToInt(Address value)
constexpr size_t Size() const
constexpr bool IsEmpty() const
constexpr bool operator==(const TagRange other) const
constexpr bool Contains(Tag tag) const
constexpr TagRange(Tag tag)
constexpr size_t hash_value() const
constexpr TagRange(Tag first, Tag last)
constexpr bool Contains(TagRange tag_range) const
#define T1(name, string, precedence)
Definition token.cc:28
#define T2(name, string, precedence)
Definition token.cc:30
Heap * heap_
#define V8_EXPORT
Definition v8config.h:800
#define V8_INLINE
Definition v8config.h:500
#define V8_LIKELY(condition)
Definition v8config.h:661