5#ifndef V8_OBJECTS_MAP_INL_H_
6#define V8_OBJECTS_MAP_INL_H_
31#if V8_ENABLE_WEBASSEMBLY
41#include "torque-generated/src/objects/map-tq-inl.inc"
46 kInstanceDescriptorsOffset)
47#if V8_ENABLE_WEBASSEMBLY
49 kInstanceDescriptorsOffset, IsWasmStructMap(*
this))
53 kInstanceDescriptorsOffset)
55 kInstanceDescriptorsOffset)
63 kTransitionsOrPrototypeInfoOffset)
67 kTransitionsOrPrototypeInfoOffset)
78 kTransitionsOrPrototypeInfoOffset>::load(cage_base, *
this);
84 kTransitionsOrPrototypeInfoOffset)
100 Map::Bits1::HasNonInstancePrototypeBit)
102 Map::Bits1::HasPrototypeSlotBit)
108 Map::Bits1::IsCallableBit)
110 Map::Bits1::HasNamedInterceptorBit)
112 Map::Bits1::HasIndexedInterceptorBit)
114 Map::Bits1::IsUndetectableBit)
116 Map::Bits1::IsAccessCheckNeededBit)
118 Map::Bits1::IsConstructorBit)
122 Map::Bits2::NewTargetIsBaseBit)
124 Map::Bits2::IsImmutablePrototypeBit)
128 Map::Bits3::OwnsDescriptorsBit)
130 Map::Bits3::IsDeprecatedBit)
132 Map::Bits3::IsInRetainedMapListBit)
134 Map::Bits3::IsPrototypeMapBit)
136 Map::Bits3::IsMigrationTargetBit)
138 Map::Bits3::IsExtensibleBit)
140 Map::Bits3::MayHaveInterestingPropertiesBit)
142 Map::Bits3::ConstructionCounterBits)
151 DCHECK(has_indexed_interceptor());
164 return instance_type == JS_ARRAY_TYPE ||
165 instance_type == JS_PRIMITIVE_WRAPPER_TYPE ||
166 instance_type == JS_ARGUMENTS_OBJECT_TYPE;
176 IsUndefined(GetBackPointer(), isolate);
196 const bool kUseCache =
true;
197 return Normalize(isolate, fast_map, fast_map->elements_kind(), {}, mode,
211 int limit = std::max(
215 return external > limit;
233 return Bits3::NumberOfOwnDescriptorsBits::decode(
239 CHECK_LE(
static_cast<unsigned>(number),
241 set_release_acquire_bit_field3(
242 Bits3::NumberOfOwnDescriptorsBits::update(
bit_field3(), number));
250 return Bits3::EnumLengthBits::decode(
bit_field3());
256 CHECK_LE(
static_cast<unsigned>(length),
259 set_relaxed_bit_field3(Bits3::EnumLengthBits::update(
bit_field3(), length));
284 CHECK_LT(
static_cast<unsigned>(
id), 256);
288int Map::instance_size_in_words()
const {
292void Map::set_instance_size_in_words(
int value) {
294 static_cast<uint8_t
>(value));
297int Map::instance_size()
const {
301void Map::set_instance_size(
int size_in_bytes) {
306 set_instance_size_in_words(size_in_words);
309int Map::inobject_properties_start_or_constructor_function_index()
const {
313 *
this, kInobjectPropertiesStartOrConstructorFunctionIndexOffset);
316void Map::set_inobject_properties_start_or_constructor_function_index(
320 *
this, kInobjectPropertiesStartOrConstructorFunctionIndexOffset,
321 static_cast<uint8_t
>(value));
325 DCHECK(IsJSObjectMap(*
this));
326 return inobject_properties_start_or_constructor_function_index();
330 CHECK(IsJSObjectMap(*
this));
331 set_inobject_properties_start_or_constructor_function_index(value);
342 DCHECK(IsJSObjectMap(*
this));
347#if V8_ENABLE_WEBASSEMBLY
355 return inobject_properties_start_or_constructor_function_index();
360 set_inobject_properties_start_or_constructor_function_index(value);
385#if V8_ENABLE_WEBASSEMBLY
386 DCHECK(!IsWasmObjectMap(*
this));
388 int value = used_or_unused_instance_size_in_words();
392 unused = instance_size_in_words() -
value;
404#if V8_ENABLE_WEBASSEMBLY
405 DCHECK(!IsWasmObjectMap(*
this));
407 int value = used_or_unused_instance_size_in_words();
410 return instance_size_in_words() -
value;
415int Map::used_or_unused_instance_size_in_words()
const {
419void Map::set_used_or_unused_instance_size_in_words(
int value) {
420 CHECK_LE(
static_cast<unsigned>(value), 255);
422 static_cast<uint8_t
>(value));
426#if V8_ENABLE_WEBASSEMBLY
427 DCHECK(!IsWasmObjectMap(*
this));
429 int words = used_or_unused_instance_size_in_words();
433 return instance_size();
440 if (!IsJSObjectMap(*
this)) {
442 set_used_or_unused_instance_size_in_words(0);
448 set_used_or_unused_instance_size_in_words(
458 set_used_or_unused_instance_size_in_words(value);
463 set_used_or_unused_instance_size_in_words(
464 map->used_or_unused_instance_size_in_words());
469 int value = map->used_or_unused_instance_size_in_words();
470 if (value >= JSPrimitiveWrapper::kFieldsAdded) {
473 value += instance_size_in_words() - map->instance_size_in_words();
475 set_used_or_unused_instance_size_in_words(value);
486 int value = used_or_unused_instance_size_in_words();
488 if (value == instance_size_in_words()) {
492 set_used_or_unused_instance_size_in_words(value + 1);
501 unused_in_property_array--;
502 if (unused_in_property_array < 0) {
505 CHECK_LT(
static_cast<unsigned>(unused_in_property_array),
507 set_used_or_unused_instance_size_in_words(unused_in_property_array);
511#if V8_ENABLE_WEBASSEMBLY
513 DCHECK(IsWasmObjectMap(*
this));
514 return inobject_properties_start_or_constructor_function_index();
518 DCHECK(IsWasmObjectMap(*
this));
519 return used_or_unused_instance_size_in_words();
523 CHECK(IsWasmObjectMap(*
this));
524 set_inobject_properties_start_or_constructor_function_index(value);
528 CHECK(IsWasmObjectMap(*
this));
529 set_used_or_unused_instance_size_in_words(value);
533uint8_t Map::bit_field()
const {
539void Map::set_bit_field(uint8_t value) {
542 set_relaxed_bit_field(value);
545uint8_t Map::relaxed_bit_field()
const {
549void Map::set_relaxed_bit_field(uint8_t value) {
553uint8_t Map::bit_field2()
const {
return ReadField<uint8_t>(kBitField2Offset); }
555void Map::set_bit_field2(uint8_t value) {
556 WriteField<uint8_t>(kBitField2Offset, value);
559uint32_t Map::bit_field3()
const {
565void Map::set_bit_field3(uint32_t value) { set_relaxed_bit_field3(value); }
567uint32_t Map::relaxed_bit_field3()
const {
571void Map::set_relaxed_bit_field3(uint32_t value) {
575uint32_t Map::release_acquire_bit_field3()
const {
579void Map::set_release_acquire_bit_field3(uint32_t value) {
606void Map::set_elements_kind(
ElementsKind elements_kind) {
609 Map::Bits2::ElementsKindBits::update(
bit_field2(), elements_kind));
613 return Map::Bits2::ElementsKindBits::decode(
bit_field2());
659#if V8_ENABLE_WEBASSEMBLY
689void Map::set_is_dictionary_map(
bool value) {
690 uint32_t new_bit_field3 =
691 Bits3::IsDictionaryMapBit::update(
bit_field3(), value);
692 new_bit_field3 = Bits3::IsUnstableBit::update(new_bit_field3, value);
693 set_bit_field3(new_bit_field3);
696bool Map::is_dictionary_map()
const {
701 set_release_acquire_bit_field3(
702 Bits3::IsUnstableBit::update(
bit_field3(),
true));
738 DCHECK_IMPLIES(InstanceTypeChecker::IsAlwaysSharedSpaceJSObject(type),
740 return InstanceTypeChecker::IsJSObject(type) &&
742 !InstanceTypeChecker::IsAlwaysSharedSpaceJSObject(type);
751 return map == roots.null_map() || map == roots.undefined_map();
755 return map->instance_type() <= LAST_PRIMITIVE_HEAP_OBJECT_TYPE;
760 int number_of_own_descriptors) {
767 descriptors->number_of_descriptors());
771 if (
FIELD_SIZE(kOptionalPaddingOffset) == 0)
return;
773 memset(
reinterpret_cast<void*
>(address() + kOptionalPaddingOffset), 0,
780 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
784 descriptors->Append(desc);
786#ifndef V8_DISABLE_WRITE_BARRIERS
788 number_of_own_descriptors + 1);
792 if (desc->GetKey()->IsInteresting(isolate)) {
793 set_may_have_interesting_properties(
true);
815 return heap_object->map(cage_base) == meta_map;
820 if (TryGetBackPointer(cage_base, &back_pointer)) {
835 DCHECK(IsNativeContext(meta_map->native_context_or_null()));
846 CHECK_GE(instance_type(), FIRST_JS_RECEIVER_TYPE);
848 CHECK(IsUndefined(GetBackPointer()));
851 set_constructor_or_back_pointer(value, mode);
869 kPrototypeValidityCellOffset)
872 !IsContextMap(*this),
IsNull(value) || !IsContextMap(*this))
875 !IsContextMap(*this),
876 IsNull(value) || !IsContextMap(*this))
879 IsContextMap(*this) || IsMapMap(*this))
882 (
IsNull(value) || IsNativeContext(value)) &&
883 (IsContextMap(*this) || IsMapMap(*this)))
892 DCHECK(IsContextMap(*
this) || IsMapMap(*
this));
895#if V8_ENABLE_WEBASSEMBLY
898 IsWasmStructMap(*
this) || IsWasmArrayMap(*
this) ||
899 IsWasmFuncRefMap(*
this))
904 if (
IsSmi(validity_cell)) {
918 DCHECK(IsNativeContext(this_meta_map->native_context_or_null()));
919 return this_meta_map == other_map->map();
923 Tagged<Map> context_meta_map = context->map()->map();
926 return this_meta_map == context_meta_map;
936 ConcurrentIsHeapObjectWithMap(cage_base, maybe_constructor, meta_map)) {
937 DCHECK(IsMap(maybe_constructor));
939 DCHECK(IsNativeContext(meta_map->native_context_or_null()));
941 Cast<Map>(maybe_constructor)->constructor_or_back_pointer(cage_base);
945 DCHECK(!IsMap(maybe_constructor));
946 return maybe_constructor;
950 DCHECK(has_non_instance_prototype());
952 CHECK(IsTuple2(raw_constructor));
964 if (IsTuple2(maybe_constructor)) {
966 maybe_constructor =
Cast<Tuple2>(maybe_constructor)->value1();
968 return maybe_constructor;
975 while (IsMap(maybe_constructor, cage_base)) {
978 Cast<Map>(maybe_constructor)->constructor_or_back_pointer(cage_base);
980 if (IsTuple2(maybe_constructor)) {
982 maybe_constructor =
Cast<Tuple2>(maybe_constructor)->value1();
984 return maybe_constructor;
989 if (IsJSFunction(constructor, cage_base)) {
992 DCHECK(sfi->IsApiFunction());
993 return sfi->api_func_data();
995 DCHECK(IsFunctionTemplateInfo(constructor, cage_base));
1004 DCHECK_EQ(has_non_instance_prototype(), IsTuple2(constructor));
1005 set_constructor_or_back_pointer(constructor, mode);
1010 map->GetInObjectProperties(),
1011 map->UnusedPropertyFields());
1021 DCHECK(IsUndefined(GetBackPointer()));
1024 set_construction_counter(counter - 1);
1031 const int max_slack = size_limit - old_size;
1037 return std::min(max_slack, old_size / 4);
1053 if (!IsWeakFixedArray(obj, cage_base))
return false;
static void DeoptimizeDependencyGroups(Isolate *isolate, ObjectT object, DependencyGroups groups)
static V8_EXPORT_PRIVATE Tagged< FieldType > Any()
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
static V8_INLINE bool InReadOnlySpace(Tagged< HeapObject > object)
static V8_INLINE bool InAnySharedSpace(Tagged< HeapObject > object)
static const int kFieldsAdded
static const int kMaxInstanceSize
static void CompleteInobjectSlackTracking(Isolate *isolate, Tagged< Map > initial_map)
static V8_INLINE bool ConcurrentIsHeapObjectWithMap(PtrComprCageBase cage_base, Tagged< Object > object, Tagged< Map > meta_map)
InternalIndex::Range IterateOwnDescriptors() const
void SetEnumLength(int length)
int NumberOfFields(ConcurrencyMode cmode) const
bool IsPrototypeValidityCellValid() const
bool has_sealed_elements() const
Tagged< Map > ElementsTransitionMap(Isolate *isolate, ConcurrencyMode cmode)
void CopyUnusedPropertyFieldsAdjustedForInstanceSize(Tagged< Map > map)
bool HasOutOfObjectProperties() const
uint8_t WasmByte1() const
void SetWasmByte2(uint8_t value)
void AccountAddedOutOfObjectPropertyField(int unused_in_property_array)
void SetBackPointer(Tagged< HeapObject > value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
bool has_prototype_info() const
static const int kNoSlackTracking
void SetConstructorFunctionIndex(int value)
int UsedInstanceSize() const
bool IsDetached(Isolate *isolate) const
Tagged< Name > GetLastDescriptorName(Isolate *isolate) const
bool IsInobjectSlackTrackingInProgress() const
bool EquivalentToForNormalization(const Tagged< Map > other, ElementsKind elements_kind, Tagged< HeapObject > prototype, PropertyNormalizationMode mode) const
int GetInObjectPropertiesStartInWords() const
bool should_be_fast_prototype_map() const
bool CanHaveFastTransitionableElementsKind() const
bool has_fast_double_elements() const
int GetInObjectPropertyOffset(int index) const
void SetOutOfObjectUnusedPropertyFields(int unused_property_fields)
PropertyDetails GetLastDescriptorDetails(Isolate *isolate) const
int InstanceSizeFromSlack(int slack) const
static Tagged< Map > GetMapFor(ReadOnlyRoots roots, InstanceType type)
bool has_fast_object_elements() const
bool has_dictionary_elements() const
bool has_any_typed_array_or_wasm_array_elements() const
static const int kSlackTrackingCounterEnd
void SetNumberOfOwnDescriptors(int number)
void SetWasmByte1(uint8_t value)
bool CanTransition() const
bool has_typed_array_or_rab_gsab_typed_array_elements() const
static bool IsMostGeneralFieldType(Representation representation, Tagged< FieldType > field_type)
int GetInObjectProperties() const
bool has_fast_smi_elements() const
static constexpr std::optional< RootIndex > TryGetMapRootIdxFor(InstanceType type)
bool has_any_nonextensible_elements() const
InternalIndex LastAdded() const
void InobjectSlackTrackingStep(Isolate *isolate)
Tagged< FixedArrayBase > GetInitialElements() const
void AppendDescriptor(Isolate *isolate, Descriptor *desc)
bool has_sloppy_arguments_elements() const
bool has_fast_packed_elements() const
V8_EXPORT_PRIVATE void SetInstanceDescriptors(Isolate *isolate, Tagged< DescriptorArray > descriptors, int number_of_own_descriptors, WriteBarrierMode barrier_mode=UPDATE_WRITE_BARRIER)
void CopyUnusedPropertyFields(Tagged< Map > map)
bool has_frozen_elements() const
bool has_fast_smi_or_object_elements() const
bool TooManyFastProperties(StoreOrigin store_origin) const
bool has_nonextensible_elements() const
static V8_EXPORT_PRIVATE Handle< Map > Normalize(Isolate *isolate, DirectHandle< Map > map, ElementsKind new_elements_kind, DirectHandle< JSPrototype > new_prototype, PropertyNormalizationMode mode, bool use_cache, const char *reason)
static DirectHandle< Map > AddMissingTransitionsForTesting(Isolate *isolate, DirectHandle< Map > split_map, DirectHandle< DescriptorArray > descriptors)
bool has_fast_sloppy_arguments_elements() const
void AccountAddedPropertyField()
int UnusedInObjectProperties() const
void SetInObjectUnusedPropertyFields(int unused_property_fields)
bool has_shared_array_elements() const
bool BelongsToSameNativeContextAs(Tagged< Map > other_map) const
bool TryGetBackPointer(PtrComprCageBase cage_base, Tagged< Map > *back_pointer) const
void UpdateDescriptors(Isolate *isolate, Tagged< DescriptorArray > descriptors, int number_of_own_descriptors)
static void GeneralizeIfCanHaveTransitionableFastElementsKind(Isolate *isolate, InstanceType instance_type, Representation *representation, DirectHandle< FieldType > *field_type)
void InitializeDescriptors(Isolate *isolate, Tagged< DescriptorArray > descriptors)
V8_INLINE void clear_padding()
static V8_EXPORT_PRIVATE Handle< Map > AddMissingTransitions(Isolate *isolate, DirectHandle< Map > map, DirectHandle< DescriptorArray > descriptors)
void SetConstructor(Tagged< Object > constructor, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
int NumberOfOwnDescriptors() const
static int SlackForArraySize(int old_size, int size_limit)
int GetConstructorFunctionIndex() const
static constexpr int kPrototypeChainValid
void SetInObjectPropertiesStartInWords(int value)
bool has_fast_elements() const
uint8_t WasmByte2() const
void init_prototype_and_constructor_or_back_pointer(ReadOnlyRoots roots)
bool CanBeDeprecated() const
int UnusedPropertyFields() const
bool is_abandoned_prototype_map() const
bool has_fast_string_wrapper_elements() const
Tagged< Object > TryGetConstructor(PtrComprCageBase cage_base, int max_steps)
bool TryGetPrototypeInfo(Tagged< PrototypeInfo > *result) const
static Handle< Map > CopyInitialMap(Isolate *isolate, DirectHandle< Map > map)
void NotifyLeafMapLayoutChange(Isolate *isolate)
static const int kEntries
PropertyLocation location() const
Representation representation() const
PropertyKind kind() const
static bool IsPrototypeInfoFast(Tagged< Object > object)
V8_INLINE Tagged< Object > object_at(RootIndex root_index) const
constexpr bool IsHeapObject() const
bool MightCauseMapDeprecation() const
static constexpr Representation Tagged()
constexpr bool IsDouble() const
static constexpr Tagged< Smi > uninitialized_deserialization_value()
static constexpr Tagged< Smi > FromInt(int value)
static void store(Tagged< HeapObject > host, PtrType value)
Tagged< Map > SearchSpecial(Tagged< Symbol > name)
static void ForDescriptorArray(Tagged< DescriptorArray >, int number_of_own_descriptors)
ZoneVector< RpoNumber > & result
V8_INLINE constexpr bool IsMaybeReadOnlyJSObject(InstanceType instance_type)
constexpr int kTaggedSize
bool IsWasmArrayElementsKind(ElementsKind kind)
bool IsPrimitiveMap(Tagged< Map > map)
ReadOnlyRoots GetReadOnlyRoots()
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit Map::Bits3::IsDeprecatedBit is_prototype_map
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit bit_field2
bool IsSealedElementsKind(ElementsKind kind)
Tagged(T object) -> Tagged< T >
constexpr bool IsSmiElementsKind(ElementsKind kind)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats store(v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) DEFINE_GENERIC_IMPLICATION(trace_gc_object_stats
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
bool IsAnyNonextensibleElementsKind(ElementsKind kind)
constexpr bool IsObjectElementsKind(ElementsKind kind)
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit is_immutable_proto
bool IsNonextensibleElementsKind(ElementsKind kind)
@ FAST_SLOPPY_ARGUMENTS_ELEMENTS
@ FAST_STRING_WRAPPER_ELEMENTS
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit Map::Bits3::IsDeprecatedBit Map::Bits3::IsPrototypeMapBit Map::Bits3::IsExtensibleBit construction_counter
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit is_undetectable
kConstructorOrBackPointerOrNativeContextOffset
Handle< To > UncheckedCast(Handle< From > value)
bool IsSmiOrObjectElementsKind(ElementsKind kind)
bool IsFastPackedElementsKind(ElementsKind kind)
constexpr int kTaggedSizeLog2
static const int kInvalidEnumCacheSentinel
Map::Bits1::HasPrototypeSlotBit has_named_interceptor
!IsContextMap !IsContextMap IsContextMap this IsMapMap this raw_native_context_or_null
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset kPrototypeOffset
bool IsSloppyArgumentsElementsKind(ElementsKind kind)
bool IsBooleanMap(Tagged< Map > map)
kInstanceDescriptorsOffset raw_transitions
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit is_deprecated
bool IsFrozenElementsKind(ElementsKind kind)
bool IsNullOrUndefinedMap(Tagged< Map > map)
typename detail::FlattenUnionHelper< Union<>, Ts... >::type UnionOf
bool IsFastElementsKind(ElementsKind kind)
bool IsSharedArrayElementsKind(ElementsKind kind)
static const int kMaxNumberOfDescriptors
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit Map::Bits3::IsDeprecatedBit Map::Bits3::IsPrototypeMapBit bit_field3
bool IsDictionaryElementsKind(ElementsKind kind)
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
V8_EXPORT_PRIVATE FlagValues v8_flags
bool IsAny(Tagged< FieldType > obj)
V8_INLINE bool IsWasmObject(T obj, Isolate *=nullptr)
bool IsTypedArrayOrRabGsabTypedArrayElementsKind(ElementsKind kind)
constexpr bool IsDoubleElementsKind(ElementsKind kind)
constexpr int kElementsKindCount
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit Map::Bits3::IsDeprecatedBit Map::Bits3::IsPrototypeMapBit is_extensible
constructor_or_back_pointer
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit Map::Bits3::IsDeprecatedBit Map::Bits3::IsPrototypeMapBit relaxed_bit_field3
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset IsNull(value)||IsJSProxy(value)||IsWasmObject(value)||(IsJSObject(value) &&(HeapLayout
constexpr bool IsConcurrent(ConcurrencyMode mode)
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset prototype
!IsContextMap !IsContextMap native_context
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit release_acquire_bit_field3
PropertyNormalizationMode
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
static constexpr RelaxedLoadTag kRelaxedLoad
#define RELAXED_WRITE_BYTE_FIELD(p, offset, value)
#define RELAXED_READ_UINT32_FIELD(p, offset)
#define ACCESSORS(holder, name, type, offset)
#define TQ_OBJECT_CONSTRUCTORS_IMPL(Type)
#define RELAXED_WRITE_UINT32_FIELD(p, offset, value)
#define DEF_HEAP_OBJECT_PREDICATE(holder, name)
#define ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, set_condition)
#define ACQUIRE_READ_UINT32_FIELD(p, offset)
#define BIT_FIELD_ACCESSORS(holder, field, name, BitField)
#define RELEASE_WRITE_UINT32_FIELD(p, offset, value)
#define RELAXED_ACCESSORS(holder, name, type, offset)
#define RELAXED_WRITE_UINT16_FIELD(p, offset, value)
#define RELAXED_READ_BYTE_FIELD(p, offset)
#define BIT_FIELD_ACCESSORS2(holder, get_field, set_field, name, BitField)
#define RELAXED_READ_UINT16_FIELD(p, offset)
#define RELEASE_ACQUIRE_ACCESSORS(holder, name, type, offset)
#define ACCESSORS_CHECKED(holder, name, type, offset, condition)
#define NEVER_READ_ONLY_SPACE_IMPL(Type)
#define RELAXED_ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, set_condition)
#define DEF_GETTER(Camel, Lower, Bit)
#define DCHECK_LE(v1, v2)
#define CHECK_GE(lhs, rhs)
#define CHECK_LT(lhs, rhs)
#define CHECK_LE(lhs, rhs)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
constexpr bool IsAligned(T value, U alignment)