5#ifndef V8_OBJECTS_SLOTS_H_
6#define V8_OBJECTS_SLOTS_H_
20class ExposedTrustedObject;
23template <
typename Subclass,
typename Data,
24 size_t SlotDataAlignment =
sizeof(
Data)>
29 static constexpr size_t kSlotDataSize =
sizeof(
Data);
30 static constexpr size_t kSlotDataAlignment = SlotDataAlignment;
33 ptr_ += kSlotDataSize;
34 return *
static_cast<Subclass*
>(
this);
37 Subclass
result = *
static_cast<Subclass*
>(
this);
38 ptr_ += kSlotDataSize;
42 ptr_ -= kSlotDataSize;
43 return *
static_cast<Subclass*
>(
this);
46 Subclass
result = *
static_cast<Subclass*
>(
this);
47 ptr_ -= kSlotDataSize;
59 return static_cast<size_t>((
ptr_ - other.ptr_) / kSlotDataSize);
64 return Subclass(slot.ptr_ +
i * kSlotDataSize);
67 ptr_ +=
i * kSlotDataSize;
68 return *
static_cast<Subclass*
>(
this);
72 ptr_ -=
i * kSlotDataSize;
73 return *
static_cast<Subclass*
>(
this);
76 void*
ToVoidPtr()
const {
return reinterpret_cast<void*
>(address()); }
104 static constexpr bool kCanBeWeak =
false;
111#if defined(V8_HOST_ARCH_32_BIT) || \
112 defined(V8_HOST_ARCH_64_BIT) && !V8_COMPRESS_POINTERS_BOOL
116 template <
typename T>
122 inline bool contains_map_value(
Address raw_value)
const;
123 inline bool Relaxed_ContainsMapValue(
Address raw_value)
const;
137 inline Address Relaxed_Load_Raw()
const;
159 static constexpr bool kCanBeWeak =
true;
165#if defined(V8_HOST_ARCH_32_BIT) || \
166 defined(V8_HOST_ARCH_64_BIT) && !V8_COMPRESS_POINTERS_BOOL
172 template <
typename T>
183 inline Address Relaxed_Load_Raw()
const;
204#if defined(V8_HOST_ARCH_32_BIT) || \
205 defined(V8_HOST_ARCH_64_BIT) && !V8_COMPRESS_POINTERS_BOOL
209 template <
typename T>
235 explicit Reference(Address address) : address_(address) {}
239 base::WriteUnalignedValue<T>(address_, other.value());
243 base::WriteUnalignedValue<T>(address_, value);
253 base::WriteUnalignedValue<T>(address_, other.value());
254 base::WriteUnalignedValue<T>(other.address_, tmp);
258 return value() < other.value();
262 return value() == other.value();
266 T
value()
const {
return base::ReadUnalignedValue<T>(address_); }
296 return static_cast<int>(a.address() - b.
address()) /
sizeof(
T);
320 :
public SlotBase<ExternalPointerSlot, ExternalPointer_t,
325#ifdef V8_COMPRESS_POINTERS
334#ifdef V8_COMPRESS_POINTERS
343#ifdef V8_COMPRESS_POINTERS
350 template <ExternalPo
interTag tag>
352 :
SlotBase(member->storage_address())
353#ifdef V8_COMPRESS_POINTERS
365#ifdef V8_COMPRESS_POINTERS
375 bool HasExternalPointerHandle()
const {
385 inline void store(IsolateForSandbox isolate,
Address value,
407#ifdef V8_COMPRESS_POINTERS
412 return tag_range_.first;
427#ifdef V8_COMPRESS_POINTERS
429 DCHECK(HasExternalPointerHandle());
442 :
public SlotBase<CppHeapPointerSlot, CppHeapPointer_t,
443 sizeof(CppHeapPointer_t)> {
449#ifdef V8_COMPRESS_POINTERS
467 inline void init()
const;
476 :
public SlotBase<IndirectPointerSlot, IndirectPointerHandle,
481#ifdef V8_ENABLE_SANDBOX
490#ifdef V8_ENABLE_SANDBOX
521#ifdef V8_ENABLE_SANDBOX
537 template <TagCheckStrictness allow_unpublished = kRequireExactMatch>
542#ifdef V8_ENABLE_SANDBOX
545 template <TagCheckStrictness allow_unpublished = kRequireExactMatch>
558class WritableJitAllocation;
560template <
typename SlotT>
564 using SlotT::kCanBeWeak;
572 return SlotT::Relaxed_Load(cage_base);
Address try_load(IsolateForPointerCompression isolate, CppHeapPointerTagRange tag_range) const
void store(IsolateForPointerCompression isolate, Address value, CppHeapPointerTag tag) const
CppHeapPointerSlot(Address ptr)
void init(IsolateForSandbox isolate, Tagged< HeapObject > host, Address value, ExternalPointerTag tag)
ExternalPointerSlot(Address ptr, ExternalPointerTag tag_range)
ExternalPointerSlot(ExternalPointerMember< tag > *member)
void init_lazily_initialized()
uint32_t GetContentAsIndexAfterDeserialization(const DisallowGarbageCollection &no_gc)
ExternalPointer_t RawContent
ExternalPointerTagRange tag_range() const
ExternalPointerSlot(Address ptr, ExternalPointerTagRange tag_range)
RawContent GetAndClearContentForSerialization(const DisallowGarbageCollection &no_gc)
void ReplaceContentWithIndexForSerialization(const DisallowGarbageCollection &no_gc, uint32_t index)
ExternalPointerTag exact_tag() const
Address load(IsolateForSandbox isolate)
void RestoreContentAfterSerialization(RawContent content, const DisallowGarbageCollection &no_gc)
void store(IsolateForSandbox isolate, Address value, ExternalPointerTag tag)
bool ExactTagIsKnown() const
FullHeapObjectSlot(Address ptr)
FullHeapObjectSlot(SlotBase< T, TData, kSlotDataAlignment > slot)
FullHeapObjectSlot(TaggedBase *ptr)
FullMaybeObjectSlot(TaggedBase *ptr)
FullMaybeObjectSlot(SlotBase< T, TData, kSlotDataAlignment > slot)
FullMaybeObjectSlot(Address ptr)
FullMaybeObjectSlot(Tagged< MaybeObject > *ptr)
FullObjectSlot(Address ptr)
Tagged< Object > Relaxed_Load() const
FullObjectSlot(const Address *ptr)
FullObjectSlot(SlotBase< T, TData, kSlotDataAlignment > slot)
Tagged< Object > Relaxed_Load_AllowUnpublished(IsolateForSandbox isolate) const
IndirectPointerHandle Acquire_LoadHandle() const
void Release_StoreHandle(IndirectPointerHandle handle) const
@ kAllowUnpublishedEntries
Tagged< Object > load(IsolateForSandbox isolate) const
IndirectPointerHandle Relaxed_LoadHandle() const
Tagged< Object > Relaxed_Load(IsolateForSandbox isolate) const
void Relaxed_StoreHandle(IndirectPointerHandle handle) const
IndirectPointerTag tag() const
void Relaxed_Store(Tagged< ExposedTrustedObject > value) const
Tagged< Object > Acquire_Load(IsolateForSandbox isolate) const
void Release_Store(Tagged< ExposedTrustedObject > value) const
Tagged< Object > ResolveHandle(IndirectPointerHandle handle, IsolateForSandbox isolate) const
void store(Tagged< ExposedTrustedObject > value) const
IndirectPointerSlot(Address ptr, IndirectPointerTag tag)
OffHeapFullObjectSlot(Address ptr)
OffHeapFullObjectSlot(const Address *ptr)
Tagged< Object > operator*() const =delete
Subclass operator-(int i)
Subclass & operator+=(int i)
bool operator>(const SlotBase &other) const
Subclass operator-(int i) const
Subclass & operator-=(int i)
bool operator==(const SlotBase &other) const
friend Subclass operator+(int i, const Subclass &slot)
bool operator!=(const SlotBase &other) const
size_t operator-(const SlotBase &other) const
Subclass operator+(int i) const
bool operator>=(const SlotBase &other) const
bool operator<(const SlotBase &other) const
bool operator<=(const SlotBase &other) const
bool operator==(const Reference &other) const
bool operator<(const Reference &other) const
Reference & operator=(T value)
Reference(Address address)
Reference & operator=(const Reference &other) V8_NOEXCEPT
void swap(Reference &other)
Reference(const Reference &) V8_NOEXCEPT=default
friend difference_type operator-(UnalignedSlot a, UnalignedSlot b)
Reference operator[](difference_type i) const
std::random_access_iterator_tag iterator_category
UnalignedSlot(T *address)
Reference operator*() const
friend void swap(Reference lhs, Reference rhs)
UnalignedSlot(Address address)
TObject Relaxed_Load() const
typename SlotT::TObject TObject
WritableJitAllocation & jit_allocation_
void Relaxed_Store(TObject value) const
WriteProtectedSlot(WritableJitAllocation &jit_allocation, Address ptr)
TObject Relaxed_Load(PtrComprCageBase cage_base) const
#define V8_ENABLE_SANDBOX_BOOL
ZoneVector< RpoNumber > & result
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
void CopyTagged(Address dst, const Address src, size_t num_tagged)
@ kIndirectPointerNullTag
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats store(v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) DEFINE_GENERIC_IMPLICATION(trace_gc_object_stats
void MemsetTagged(Tagged_t *start, Tagged< MaybeObject > value, size_t counter)
TagRange< ExternalPointerTag > ExternalPointerTagRange
uint32_t IndirectPointerHandle
Address ExternalPointer_t
@ kExternalPointerNullTag
@ kArrayBufferExtensionTag
uint32_t ExternalPointerHandle
uint32_t CppHeapPointerHandle
static constexpr Address kNullAddress
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
constexpr bool IsAligned(T value, U alignment)
std::unique_ptr< ValueMirror > value