v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
heap-object.h
Go to the documentation of this file.
1// Copyright 2018 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_OBJECTS_HEAP_OBJECT_H_
6#define V8_OBJECTS_HEAP_OBJECT_H_
7
8#include "src/base/macros.h"
10#include "src/objects/casting.h"
12#include "src/objects/slots.h"
15#include "src/sandbox/isolate.h"
16
17// Has to be the last include (doesn't have include guards):
19
20namespace v8 {
21namespace internal {
22
23class Heap;
24class PrimitiveHeapObject;
25class ExternalPointerSlot;
26class IndirectPointerSlot;
27class ExposedTrustedObject;
28class ObjectVisitor;
29class WritableFreeSpace;
30
32 public:
33 HeapObjectLayout() = delete;
34
35 // [map]: Contains a map which contains the object's reflective
36 // information.
37 inline Tagged<Map> map() const;
38 inline Tagged<Map> map(AcquireLoadTag) const;
39
40 inline MapWord map_word(RelaxedLoadTag) const;
41
42 inline void set_map(Isolate* isolate, Tagged<Map> value);
43 template <typename IsolateT>
44 inline void set_map(IsolateT* isolate, Tagged<Map> value, ReleaseStoreTag);
45
46 // This method behaves the same as `set_map` but marks the map transition as
47 // safe for the concurrent marker (object layout doesn't change) during
48 // verification.
49 template <typename IsolateT>
50 inline void set_map_safe_transition(IsolateT* isolate, Tagged<Map> value,
52
55
56 // Initialize the map immediately after the object is allocated.
57 // Do not use this outside Heap.
58 template <typename IsolateT>
59 inline void set_map_after_allocation(
60 IsolateT* isolate, Tagged<Map> value,
62
63 // The no-write-barrier version. This is OK if the object is white and in
64 // new space, or if the value is an immortal immutable object, like the maps
65 // of primitive (non-JS) objects like strings, heap numbers etc.
66 inline void set_map_no_write_barrier(Isolate* isolate, Tagged<Map> value,
68
69 // Access the map word using acquire load and release store.
70 inline void set_map_word_forwarded(Tagged<HeapObject> target_object,
72
73 // Set the map word using relaxed store.
74 inline void set_map_word_forwarded(Tagged<HeapObject> target_object,
76
77 // Returns the tagged pointer to this HeapObject.
78 // TODO(leszeks): Consider bottlenecking this through Tagged<>.
79 inline Address ptr() const { return address() + kHeapObjectTag; }
80
81 // Returns the address of this HeapObject.
82 inline Address address() const { return reinterpret_cast<Address>(this); }
83
84 // This is slower that GetReadOnlyRoots, but safe to call during
85 // bootstrapping.
87
88 // Returns the heap object's size in bytes
89 inline int Size() const;
90
91 // Given a heap object's map pointer, returns the heap size in bytes
92 // Useful when the map pointer field is used for other purposes.
93 // GC internal.
95
96 // Return the write barrier mode for this. Callers of this function
97 // must be able to present a reference to an DisallowGarbageCollection
98 // object as a sign that they are not going to use this function
99 // from code that allocates and thus invalidates the returned write
100 // barrier mode.
102 const DisallowGarbageCollection& promise);
103
104#ifdef OBJECT_PRINT
105 void PrintHeader(std::ostream& os, const char* id);
106#endif
107
108 private:
109 friend class HeapObject;
110 friend class Heap;
111 friend class CodeStubAssembler;
112
113 // HeapObjects shouldn't be copied or moved by C++ code, only by the GC.
114 // TODO(leszeks): Consider making these non-deleted if the GC starts using
115 // HeapObjectLayout rather than manual per-byte access.
119 HeapObjectLayout& operator=(const HeapObjectLayout&) V8_NOEXCEPT = delete;
120
123
124static_assert(sizeof(HeapObjectLayout) == kTaggedSize);
125
126inline bool operator==(const HeapObjectLayout* obj, StrongTaggedBase ptr) {
127 return Tagged<HeapObject>(obj) == ptr;
128}
130 return ptr == Tagged<HeapObject>(obj);
131}
133 return Tagged<HeapObject>(obj) != ptr;
134}
136 return ptr != Tagged<HeapObject>(obj);
137}
138
139template <typename T>
141 using BodyDescriptor = typename T::BodyDescriptor;
142};
143
144// HeapObject is the superclass for all classes describing heap allocated
145// objects.
146class HeapObject : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
147 public:
148 constexpr HeapObject() = default;
149
150 // [map]: Contains a map which contains the object's reflective
151 // information.
153 inline void set_map(Isolate* isolate, Tagged<Map> value);
154
155 // This method behaves the same as `set_map` but marks the map transition as
156 // safe for the concurrent marker (object layout doesn't change) during
157 // verification.
158 template <typename IsolateT>
159 inline void set_map_safe_transition(IsolateT* isolate, Tagged<Map> value);
160
161 inline ObjectSlot map_slot() const;
162
163 // The no-write-barrier version. This is OK if the object is white and in
164 // new space, or if the value is an immortal immutable object, like the maps
165 // of primitive (non-JS) objects like strings, heap numbers etc.
166 inline void set_map_no_write_barrier(Isolate* isolate, Tagged<Map> value,
168 inline void set_map_no_write_barrier(Isolate* isolate, Tagged<Map> value,
173 Tagged<Map> value,
175
176 // Access the map using acquire load and release store.
178 template <typename IsolateT>
179 inline void set_map(IsolateT* isolate, Tagged<Map> value, ReleaseStoreTag);
180 template <typename IsolateT>
181 inline void set_map_safe_transition(IsolateT* isolate, Tagged<Map> value,
183
184 // Compare-and-swaps map word using release store, returns true if the map
185 // word was actually swapped.
186 inline bool release_compare_and_swap_map_word_forwarded(
187 MapWord old_map_word, Tagged<HeapObject> new_target_object);
188
189 // Compare-and-swaps map word using relaxed store, returns true if the map
190 // word was actually swapped.
191 inline bool relaxed_compare_and_swap_map_word_forwarded(
192 MapWord old_map_word, Tagged<HeapObject> new_target_object);
193
194 // Initialize the map immediately after the object is allocated.
195 // Do not use this outside Heap.
196 template <typename IsolateT>
197 inline void set_map_after_allocation(
198 IsolateT* isolate, Tagged<Map> value,
200
201 static inline void SetFillerMap(const WritableFreeSpace& writable_page,
202 Tagged<Map> value);
203
204 // During garbage collection, the map word of a heap object does not
205 // necessarily contain a map pointer.
207 inline void set_map_word(Tagged<Map> map, RelaxedStoreTag);
208 inline void set_map_word_forwarded(Tagged<HeapObject> target_object,
210
211 // Access the map word using acquire load and release store.
213 inline void set_map_word(Tagged<Map> map, ReleaseStoreTag);
214 inline void set_map_word_forwarded(Tagged<HeapObject> target_object,
216
217 // This is slower than GetReadOnlyRoots, but safe to call during
218 // bootstrapping.
220
221 // Converts an address to a HeapObject pointer.
226
227 // Returns the address of this HeapObject.
228 inline Address address() const { return ptr() - kHeapObjectTag; }
229
230 // Returns the heap object's size in bytes
231 DECL_GETTER(Size, int)
232
233 // Given a heap object's map pointer, returns the heap size in bytes
234 // Useful when the map pointer field is used for other purposes.
235 // GC internal.
237
238 template <class T>
239 inline T ReadField(size_t offset) const
240 requires(std::is_arithmetic_v<T> || std::is_enum_v<T> ||
241 std::is_pointer_v<T>)
242 {
243 return ReadMaybeUnalignedValue<T>(field_address(offset));
244 }
245
246 template <class T>
247 inline void WriteField(size_t offset, T value) const
248 requires(std::is_arithmetic_v<T> || std::is_enum_v<T> ||
249 std::is_pointer_v<T>)
250 {
251 return WriteMaybeUnalignedValue<T>(field_address(offset), value);
252 }
253
254 // Atomically reads a field using relaxed memory ordering. Can only be used
255 // with integral types whose size is <= kTaggedSize (to guarantee alignment).
256 template <class T>
257 inline T Relaxed_ReadField(size_t offset) const
258 requires((std::is_arithmetic_v<T> || std::is_enum_v<T>) &&
259 !std::is_floating_point_v<T>);
260
261 // Atomically writes a field using relaxed memory ordering. Can only be used
262 // with integral types whose size is <= kTaggedSize (to guarantee alignment).
263 template <class T>
264 inline void Relaxed_WriteField(size_t offset, T value)
265 requires((std::is_arithmetic_v<T> || std::is_enum_v<T>) &&
266 !std::is_floating_point_v<T>);
267
268 // Atomically reads a field using acquire memory ordering. Can only be used
269 // with integral types whose size is <= kTaggedSize (to guarantee alignment).
270 template <class T>
271 inline T Acquire_ReadField(size_t offset) const
272 requires((std::is_arithmetic_v<T> || std::is_enum_v<T>) &&
273 !std::is_floating_point_v<T>);
274
275 // Atomically compares and swaps a field using seq cst memory ordering.
276 // Contains the required logic to properly handle number comparison.
277 template <typename CompareAndSwapImpl>
278 static Tagged<Object> SeqCst_CompareAndSwapField(
279 Tagged<Object> expected_value, Tagged<Object> new_value,
280 CompareAndSwapImpl compare_and_swap_impl);
281
282 //
283 // SandboxedPointer_t field accessors.
284 //
286 PtrComprCageBase cage_base) const;
287 inline void WriteSandboxedPointerField(size_t offset,
288 PtrComprCageBase cage_base,
289 Address value);
290 inline void WriteSandboxedPointerField(size_t offset, Isolate* isolate,
291 Address value);
292
293 //
294 // BoundedSize field accessors.
295 //
296 inline size_t ReadBoundedSizeField(size_t offset) const;
297 inline void WriteBoundedSizeField(size_t offset, size_t value);
298
299 //
300 // ExternalPointer_t field accessors.
301 //
302 template <ExternalPointerTag tag>
303 inline void InitExternalPointerField(
304 size_t offset, IsolateForSandbox isolate, Address value,
306 template <ExternalPointerTagRange tag_range>
308 IsolateForSandbox isolate) const;
309 // Similar to `ReadExternalPointerField()` but uses the CppHeapPointerTable.
310 template <CppHeapPointerTag lower_bound, CppHeapPointerTag upper_bound>
312 size_t offset, IsolateForPointerCompression isolate) const;
314 size_t offset, IsolateForPointerCompression isolate,
315 CppHeapPointerTagRange tag_range) const;
316 template <ExternalPointerTag tag>
317 inline void WriteExternalPointerField(size_t offset,
318 IsolateForSandbox isolate,
319 Address value);
320
321 // Set up a lazily-initialized external pointer field. If the sandbox is
322 // enabled, this will set the field to the kNullExternalPointerHandle. It will
323 // *not* allocate an entry in the external pointer table. That will only
324 // happen on the first call to WriteLazilyInitializedExternalPointerField. If
325 // the sandbox is disabled, this is equivalent to InitExternalPointerField
326 // with a nullptr value.
328
329 // Returns true if the lazily-initializer external pointer field still
330 // contains the initial value. If the sandbox is enabled, returns true if
331 // the field is not equal to kNullExternalPointerHandle (this check will
332 // *not* try to read the actual value from the table). If the sandbox
333 // is disabled, returns true if the field is not equal to kNullAddress.
334 inline bool IsLazilyInitializedExternalPointerFieldInitialized(
335 size_t offset) const;
336
337 // Writes and possibly initializes a lazily-initialized external pointer
338 // field. When the sandbox is enabled, a lazily initialized external pointer
339 // field initially contains the kNullExternalPointerHandle and will only be
340 // properly initialized (i.e. allocate an entry in the external pointer table)
341 // once a value is written into it for the first time. If the sandbox is
342 // disabled, this is equivalent to WriteExternalPointerField.
343 template <ExternalPointerTag tag>
344 inline void WriteLazilyInitializedExternalPointerField(
345 size_t offset, IsolateForSandbox isolate, Address value);
346
347 inline void SetupLazilyInitializedCppHeapPointerField(size_t offset);
348 template <CppHeapPointerTag tag>
350 size_t offset, IsolateForPointerCompression isolate, Address value);
352 size_t offset, IsolateForPointerCompression isolate, Address value,
354
355#if V8_ENABLE_SANDBOX
356 //
357 // Indirect pointers.
358 //
359 // These are only available when the sandbox is enabled, in which case they
360 // are the under-the-hood implementation of trusted pointers.
362 size_t offset, IsolateForSandbox isolate,
363 TrustedPointerPublishingScope* opt_publishing_scope);
364#endif // V8_ENABLE_SANDBOX
365
366 // Trusted pointers.
367 //
368 // A pointer to a trusted object. When the sandbox is enabled, these are
369 // indirect pointers using the the TrustedPointerTable (TPT). When the sandbox
370 // is disabled, they are regular tagged pointers. They must always point to an
371 // ExposedTrustedObject as (only) these objects can be referenced through the
372 // trusted pointer table.
373 template <IndirectPointerTag tag>
374 inline Tagged<ExposedTrustedObject> ReadTrustedPointerField(
375 size_t offset, IsolateForSandbox isolate) const;
376 template <IndirectPointerTag tag>
377 inline Tagged<ExposedTrustedObject> ReadTrustedPointerField(
378 size_t offset, IsolateForSandbox isolate, AcquireLoadTag) const;
379 // Like ReadTrustedPointerField, but if the field is cleared, this will
380 // return Smi::zero().
381 template <IndirectPointerTag tag>
382 inline Tagged<Object> ReadMaybeEmptyTrustedPointerField(
383 size_t offset, IsolateForSandbox isolate, AcquireLoadTag) const;
384
385 template <IndirectPointerTag tag>
386 inline void WriteTrustedPointerField(size_t offset,
388
389 // Trusted pointer fields can be cleared/empty, in which case they no longer
390 // point to any object. When the sandbox is enabled, this will set the fields
391 // indirect pointer handle to the null handle (referencing the zeroth entry
392 // in the TrustedPointerTable which just contains nullptr). When the sandbox
393 // is disabled, this will set the field to Smi::zero().
394 inline bool IsTrustedPointerFieldEmpty(size_t offset) const;
395 inline bool IsTrustedPointerFieldUnpublished(size_t offset,
397 IsolateForSandbox isolate) const;
398 inline void ClearTrustedPointerField(size_t offest);
399 inline void ClearTrustedPointerField(size_t offest, ReleaseStoreTag);
400
401 // Code pointers.
402 //
403 // These are special versions of trusted pointers that always point to Code
404 // objects. When the sandbox is enabled, they are indirect pointers using the
405 // code pointer table (CPT) instead of the TrustedPointerTable. When the
406 // sandbox is disabled, they are regular tagged pointers.
407 inline Tagged<Code> ReadCodePointerField(size_t offset,
408 IsolateForSandbox isolate) const;
409 inline void WriteCodePointerField(size_t offset, Tagged<Code> value);
410
411 inline bool IsCodePointerFieldEmpty(size_t offset) const;
412 inline void ClearCodePointerField(size_t offest);
413
415 size_t offset, CodeEntrypointTag tag) const;
417 Address value,
419
420 // JSDispatchHandles.
421 //
422 // These are references to entries in the JSDispatchTable, which contain the
423 // current code for a function.
424 template <typename ObjectType>
425 static inline JSDispatchHandle AllocateAndInstallJSDispatchHandle(
426 ObjectType host, size_t offset, Isolate* isolate,
427 uint16_t parameter_count, DirectHandle<Code> code,
429
430 // Returns the field at offset in obj, as a read/write Object reference.
431 // Does no checking, and is safe to use during GC, while maps are invalid.
432 // Does not invoke write barrier, so should only be assigned to
433 // during marking GC.
434 inline ObjectSlot RawField(int byte_offset) const;
435 inline MaybeObjectSlot RawMaybeWeakField(int byte_offset) const;
436 inline InstructionStreamSlot RawInstructionStreamField(int byte_offset) const;
437 inline ExternalPointerSlot RawExternalPointerField(
438 int byte_offset, ExternalPointerTagRange tag_range) const;
439 inline CppHeapPointerSlot RawCppHeapPointerField(int byte_offset) const;
440 inline IndirectPointerSlot RawIndirectPointerField(
441 int byte_offset, IndirectPointerTag tag) const;
442
443 // Return the write barrier mode for this. Callers of this function
444 // must be able to present a reference to an DisallowGarbageCollection
445 // object as a sign that they are not going to use this function
446 // from code that allocates and thus invalidates the returned write
447 // barrier mode.
449 const DisallowGarbageCollection& promise);
450
451 // Dispatched behavior.
452 void HeapObjectShortPrint(std::ostream& os);
453 void Print();
454 static void Print(Tagged<Object> obj);
455 static void Print(Tagged<Object> obj, std::ostream& os);
456#ifdef OBJECT_PRINT
457 void PrintHeader(std::ostream& os, const char* id);
458#endif
461#ifdef VERIFY_HEAP
462 inline void VerifyObjectField(Isolate* isolate, int offset);
463 inline void VerifySmiField(int offset);
464 inline void VerifyMaybeObjectField(Isolate* isolate, int offset);
465
466 // Verify a pointer is a valid HeapObject pointer that points to object
467 // areas in the heap.
468 static void VerifyHeapPointer(Isolate* isolate, Tagged<Object> p);
469 static void VerifyCodePointer(Isolate* isolate, Tagged<Object> p);
470#endif
471
472 static inline AllocationAlignment RequiredAlignment(Tagged<Map> map);
473 bool inline CheckRequiredAlignment(PtrComprCageBase cage_base) const;
474
475 // Whether the object needs rehashing. That is the case if the object's
476 // content depends on v8_flags.hash_seed. When the object is deserialized into
477 // a heap with a different hash seed, these objects need to adapt.
478 bool NeedsRehashing(InstanceType instance_type) const;
479 bool NeedsRehashing(PtrComprCageBase cage_base) const;
480
481 // Rehashing support is not implemented for all objects that need rehashing.
482 // With objects that need rehashing but cannot be rehashed, rehashing has to
483 // be disabled.
484 bool CanBeRehashed(PtrComprCageBase cage_base) const;
485
486 // Rehash the object based on the layout inferred from its map.
487 template <typename IsolateT>
488 void RehashBasedOnMap(IsolateT* isolate);
489
490 // Layout description.
491 static constexpr int kMapOffset = offsetof(HeapObjectLayout, map_);
492 static constexpr int kHeaderSize = sizeof(HeapObjectLayout);
493
494 static_assert(kMapOffset == Internals::kHeapObjectMapOffset);
495
497
498 inline Address GetFieldAddress(int field_offset) const;
499
500 HeapObject* operator->() { return this; }
501 const HeapObject* operator->() const { return this; }
502
503 protected:
505 friend class Tagged<HeapObject>;
509 explicit inline HeapObject(Address ptr);
510
511 // Static overwrites of TaggedImpl's IsSmi/IsHeapObject, to avoid conflicts
512 // with IsSmi(Tagged<HeapObject>) inside HeapObject subclasses' methods.
513 template <typename T>
514 static bool IsSmi(T obj);
515 template <typename T>
516 static bool IsHeapObject(T obj);
517
518 inline Address field_address(size_t offset) const {
519 return ptr() + offset - kHeapObjectTag;
520 }
521
522 private:
523 enum class VerificationMode {
524 kSafeMapTransition,
525 kPotentialLayoutChange,
526 };
527
528 enum class EmitWriteBarrier {
529 kYes,
530 kNo,
531 };
532
533 template <EmitWriteBarrier emit_write_barrier, typename MemoryOrder,
534 typename IsolateT>
535 V8_INLINE void set_map(IsolateT* isolate, Tagged<Map> value,
536 MemoryOrder order, VerificationMode mode);
537};
538
542
543template <typename T>
544// static
545bool HeapObject::IsSmi(T obj) {
546 return i::IsSmi(obj);
547}
548template <typename T>
549// static
551 return i::IsHeapObject(obj);
552}
553
554// Define Tagged<HeapObject> now that HeapObject exists.
556 return ToRawPtr();
557}
563 return HeapObject(this->ptr(), HeapObject::SkipTypeCheckTag{});
564}
565
566// Overload Is* predicates for HeapObject.
567#define IS_TYPE_FUNCTION_DECL(Type) \
568 V8_INLINE bool Is##Type(Tagged<HeapObject> obj); \
569 V8_INLINE bool Is##Type(Tagged<HeapObject> obj, PtrComprCageBase cage_base); \
570 V8_INLINE bool Is##Type(HeapObject obj); \
571 V8_INLINE bool Is##Type(HeapObject obj, PtrComprCageBase cage_base); \
572 V8_INLINE bool Is##Type(const HeapObjectLayout* obj); \
573 V8_INLINE bool Is##Type(const HeapObjectLayout* obj, \
574 PtrComprCageBase cage_base);
576IS_TYPE_FUNCTION_DECL(HashTableBase)
577IS_TYPE_FUNCTION_DECL(SmallOrderedHashTable)
579#undef IS_TYPE_FUNCTION_DECL
580
581// Most calls to Is<Oddball> should go via the Tagged<Object> overloads, withst
582// an Isolate/LocalIsolate/ReadOnlyRoots parameter.
583#define IS_TYPE_FUNCTION_DECL(Type, Value, _) \
584 V8_INLINE bool Is##Type(Tagged<HeapObject> obj); \
585 V8_INLINE bool Is##Type(HeapObject obj); \
586 V8_INLINE bool Is##Type(const HeapObjectLayout* obj, Isolate* isolate); \
587 V8_INLINE bool Is##Type(const HeapObjectLayout* obj);
590IS_TYPE_FUNCTION_DECL(NullOrUndefined, , /* unused */)
591#undef IS_TYPE_FUNCTION_DECL
592
593#define DECL_STRUCT_PREDICATE(NAME, Name, name) \
594 V8_INLINE bool Is##Name(Tagged<HeapObject> obj); \
595 V8_INLINE bool Is##Name(Tagged<HeapObject> obj, PtrComprCageBase cage_base); \
596 V8_INLINE bool Is##Name(HeapObject obj); \
597 V8_INLINE bool Is##Name(HeapObject obj, PtrComprCageBase cage_base); \
598 V8_INLINE bool Is##Name(const HeapObjectLayout* obj); \
599 V8_INLINE bool Is##Name(const HeapObjectLayout* obj, \
600 PtrComprCageBase cage_base);
602#undef DECL_STRUCT_PREDICATE
603
604// Whether the object is located outside of the sandbox or in read-only
605// space. Currently only needed due to Code objects. Once they are fully
606// migrated into trusted space, this can be replaced by !InsideSandbox().
609
610// Returns true if obj is guaranteed to be a read-only object or a specific
611// (small) Smi. If the method returns false, we need more checks for RO space
612// objects or Smis. This can be used for a fast RO space/Smi check which are
613// objects for e.g. GC than can be exlucded for processing.
616
617} // namespace internal
618} // namespace v8
619
621
622#endif // V8_OBJECTS_HEAP_OBJECT_H_
int16_t parameter_count
Definition builtins.cc:67
#define DCHECK_TAG_ALIGNED(address)
Definition checks.h:25
void set_map_safe_transition_no_write_barrier(Isolate *isolate, Tagged< Map > value, RelaxedStoreTag=kRelaxedStore)
MapWord map_word(RelaxedLoadTag) const
WriteBarrierMode GetWriteBarrierMode(const DisallowGarbageCollection &promise)
void set_map_after_allocation(IsolateT *isolate, Tagged< Map > value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
ReadOnlyRoots EarlyGetReadOnlyRoots() const
void set_map(Isolate *isolate, Tagged< Map > value)
Tagged< Map > map() const
void set_map_word_forwarded(Tagged< HeapObject > target_object, ReleaseStoreTag)
void set_map_safe_transition(IsolateT *isolate, Tagged< Map > value, ReleaseStoreTag)
HeapObjectLayout(HeapObjectLayout &&) V8_NOEXCEPT=delete
void set_map_no_write_barrier(Isolate *isolate, Tagged< Map > value, RelaxedStoreTag=kRelaxedStore)
V8_EXPORT_PRIVATE int SizeFromMap(Tagged< Map > map) const
Definition objects.cc:1935
const HeapObject * operator->() const
static Tagged< HeapObject > FromAddress(Address address)
constexpr HeapObject()=default
V8_INLINE void set_map(IsolateT *isolate, Tagged< Map > value, MemoryOrder order, VerificationMode mode)
Address field_address(size_t offset) const
T ReadField(size_t offset) const
HeapObject * operator->()
Address address() const
V8_INLINE constexpr HeapObject(Address ptr, HeapObject::SkipTypeCheckTag)
void WriteField(size_t offset, T value) const
static const int kHeapObjectMapOffset
V8_INLINE T * ToRawPtr() const
Definition tagged.h:809
V8_INLINE constexpr decltype(auto) operator*() const
Definition tagged.h:738
V8_INLINE constexpr decltype(auto) operator->() const
Definition tagged.h:744
#define DECL_STRUCT_PREDICATE(NAME, Name, name)
#define IS_TYPE_FUNCTION_DECL(Type)
int32_t offset
V8_INLINE void WriteExternalPointerField(Address field_address, IsolateForSandbox isolate, Address value)
V8_INLINE Address ReadCppHeapPointerField(Address field_address, IsolateForPointerCompression isolate)
V8_INLINE Address ReadCodeEntrypointViaCodePointerField(Address field_address, CodeEntrypointTag tag)
constexpr int kTaggedSize
Definition globals.h:542
bool operator!=(ExternalReference lhs, ExternalReference rhs)
@ UPDATE_WRITE_BARRIER
Definition objects.h:55
V8_INLINE size_t ReadBoundedSizeField(Address field_address)
Tagged(T object) -> Tagged< T >
static void WriteMaybeUnalignedValue(Address p, V value)
Definition ptr-compr.h:225
V8_INLINE Address ReadSandboxedPointerField(Address field_address, PtrComprCageBase cage_base)
V8_INLINE void WriteLazilyInitializedCppHeapPointerField(Address field_address, IsolateForPointerCompression isolate, Address value)
V8_INLINE void WriteBoundedSizeField(Address field_address, size_t value)
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:665
kInterpreterTrampolineOffset Tagged< HeapObject >
Address Tagged_t
Definition globals.h:547
V8_INLINE Address ReadExternalPointerField(Address field_address, IsolateForSandbox isolate)
void Print(Tagged< Object > obj)
Definition objects.h:774
v8::internal::LoadHandler V8_OBJECT_END
V8_INLINE bool OutsideSandboxOrInReadonlySpace(Tagged< HeapObject > obj)
const int kHeapObjectTag
Definition v8-internal.h:72
V8_INLINE void InitSelfIndirectPointerField(Address field_address, IsolateForSandbox isolate, Tagged< HeapObject > host, IndirectPointerTag tag, TrustedPointerPublishingScope *opt_publishing_scope)
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:669
V8_INLINE void InitExternalPointerField(Address host_address, Address field_address, IsolateForSandbox isolate, Address value)
NameDictionary PropertyDictionary
Definition dictionary.h:26
V8_INLINE void SetupLazilyInitializedExternalPointerField(Address field_address)
V8_INLINE void WriteSandboxedPointerField(Address field_address, PtrComprCageBase cage_base, Address pointer)
bool operator==(ExternalReference lhs, ExternalReference rhs)
constexpr bool kAllCodeObjectsLiveInTrustedSpace
static V ReadMaybeUnalignedValue(Address p)
Definition ptr-compr.h:207
V8_INLINE void WriteCodeEntrypointViaCodePointerField(Address field_address, Address value, CodeEntrypointTag tag)
V8_INLINE constexpr bool FastInReadOnlySpaceOrSmallSmi(Tagged_t obj)
static constexpr RelaxedStoreTag kRelaxedStore
Definition globals.h:2911
CppHeapPointerTag
Definition v8-sandbox.h:28
#define HEAP_OBJECT_TYPE_LIST(V)
#define HOLE_LIST(V)
#define ODDBALL_LIST(V)
#define DECL_GETTER(name,...)
#define DECL_ACQUIRE_GETTER(name,...)
#define V8_OBJECT
#define DECL_RELAXED_GETTER(name,...)
#define DECL_PRINTER(Name)
#define EXPORT_DECL_VERIFIER(Name)
#define STRUCT_LIST(V)
#define V8_NOEXCEPT
#define V8_EXPORT_PRIVATE
Definition macros.h:460
typename T::BodyDescriptor BodyDescriptor
#define V8_INLINE
Definition v8config.h:500