v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
map-inl.h
Go to the documentation of this file.
1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_OBJECTS_MAP_INL_H_
6#define V8_OBJECTS_MAP_INL_H_
7
8#include "src/objects/map.h"
9// Include the non-inl header before the rest of the headers.
10
30
31#if V8_ENABLE_WEBASSEMBLY
33#endif // V8_ENABLE_WEBASSEMBLY
34
35// Has to be the last include (doesn't have include guards):
37
38namespace v8 {
39namespace internal {
40
41#include "torque-generated/src/objects/map-tq-inl.inc"
42
44
46 kInstanceDescriptorsOffset)
47#if V8_ENABLE_WEBASSEMBLY
48ACCESSORS_CHECKED(Map, custom_descriptor, Tagged<WasmStruct>,
49 kInstanceDescriptorsOffset, IsWasmStructMap(*this))
50#endif // V8_ENABLE_WEBASSEMBLY
51
53 kInstanceDescriptorsOffset)
55 kInstanceDescriptorsOffset)
56
57// A freshly allocated layout descriptor can be set on an existing map.
58// We need to use release-store and acquire-load accessor pairs to ensure
59// that the concurrent marking thread observes initializing stores of the
60// layout descriptor.
63 kTransitionsOrPrototypeInfoOffset)
67 kTransitionsOrPrototypeInfoOffset)
68
70 IsNull(value) || IsJSProxy(value) || IsWasmObject(value) ||
71 (IsJSObject(value) &&
72 (HeapLayout::InWritableSharedSpace(value) ||
73 value->map()->is_prototype_map())))
74
75DEF_GETTER(Map, prototype_info, Tagged<UnionOf<Smi, PrototypeInfo>>) {
78 kTransitionsOrPrototypeInfoOffset>::load(cage_base, *this);
79 DCHECK(this->is_prototype_map());
80 return value;
81}
93
94// |bit_field| fields.
95// Concurrent access to |has_prototype_slot| and |has_non_instance_prototype|
96// is explicitly allowlisted here. The former is never modified after the map
97// is setup but it's being read by concurrent marker when pointer compression
98// is enabled. The latter bit can be modified on a live objects.
99BIT_FIELD_ACCESSORS(Map, relaxed_bit_field, has_non_instance_prototype,
100 Map::Bits1::HasNonInstancePrototypeBit)
102 Map::Bits1::HasPrototypeSlotBit)
103
104// These are fine to be written as non-atomic since we don't have data races.
105// However, they have to be read atomically from the background since the
106// |bit_field| as a whole can mutate when using the above setters.
108 Map::Bits1::IsCallableBit)
110 Map::Bits1::HasNamedInterceptorBit)
112 Map::Bits1::HasIndexedInterceptorBit)
114 Map::Bits1::IsUndetectableBit)
116 Map::Bits1::IsAccessCheckNeededBit)
118 Map::Bits1::IsConstructorBit)
119
120// |bit_field2| fields.
121BIT_FIELD_ACCESSORS(Map, bit_field2, new_target_is_base,
122 Map::Bits2::NewTargetIsBaseBit)
124 Map::Bits2::IsImmutablePrototypeBit)
125
126// |bit_field3| fields.
128 Map::Bits3::OwnsDescriptorsBit)
130 Map::Bits3::IsDeprecatedBit)
132 Map::Bits3::IsInRetainedMapListBit)
134 Map::Bits3::IsPrototypeMapBit)
136 Map::Bits3::IsMigrationTargetBit)
138 Map::Bits3::IsExtensibleBit)
139BIT_FIELD_ACCESSORS(Map, bit_field3, may_have_interesting_properties,
140 Map::Bits3::MayHaveInterestingPropertiesBit)
142 Map::Bits3::ConstructionCounterBits)
143
144DEF_GETTER(Map, GetNamedInterceptor, Tagged<InterceptorInfo>) {
146 Tagged<FunctionTemplateInfo> info = GetFunctionTemplateInfo(cage_base);
147 return Cast<InterceptorInfo>(info->GetNamedPropertyHandler(cage_base));
148}
149
150DEF_GETTER(Map, GetIndexedInterceptor, Tagged<InterceptorInfo>) {
151 DCHECK(has_indexed_interceptor());
152 Tagged<FunctionTemplateInfo> info = GetFunctionTemplateInfo(cage_base);
153 return Cast<InterceptorInfo>(info->GetIndexedPropertyHandler(cage_base));
154}
155
156// static
158 Tagged<FieldType> field_type) {
159 return !representation.IsHeapObject() || IsAny(field_type);
160}
161
162// static
164 return instance_type == JS_ARRAY_TYPE ||
165 instance_type == JS_PRIMITIVE_WRAPPER_TYPE ||
166 instance_type == JS_ARGUMENTS_OBJECT_TYPE;
167}
168
172
173bool Map::IsDetached(Isolate* isolate) const {
174 if (is_prototype_map()) return true;
175 return instance_type() == JS_OBJECT_TYPE && NumberOfOwnDescriptors() > 0 &&
176 IsUndefined(GetBackPointer(), isolate);
177}
178
179// static
181 Isolate* isolate, InstanceType instance_type,
182 Representation* representation, DirectHandle<FieldType>* field_type) {
183 if (CanHaveFastTransitionableElementsKind(instance_type)) {
184 // We don't support propagation of field generalization through elements
185 // kind transitions because they are inserted into the transition tree
186 // before field transitions. In order to avoid complexity of handling
187 // such a case we ensure that all maps with transitionable elements kinds
188 // have the most general field representation and type.
189 *field_type = FieldType::Any(isolate);
190 *representation = Representation::Tagged();
191 }
192}
193
195 PropertyNormalizationMode mode, const char* reason) {
196 const bool kUseCache = true;
197 return Normalize(isolate, fast_map, fast_map->elements_kind(), {}, mode,
198 kUseCache, reason);
199}
200
202 PropertyNormalizationMode mode) const {
203 return EquivalentToForNormalization(other, elements_kind(), prototype(),
204 mode);
205}
206
207bool Map::TooManyFastProperties(StoreOrigin store_origin) const {
208 if (UnusedPropertyFields() != 0) return false;
209 if (store_origin != StoreOrigin::kMaybeKeyed) return false;
210 if (is_prototype_map()) return false;
211 int limit = std::max(
212 {v8_flags.fast_properties_soft_limit.value(), GetInObjectProperties()});
213 int external =
215 return external > limit;
216}
217
219 return instance_descriptors(isolate)->GetKey(LastAdded());
220}
221
223 return instance_descriptors(isolate)->GetDetails(LastAdded());
224}
225
227 int number_of_own_descriptors = NumberOfOwnDescriptors();
228 DCHECK_GT(number_of_own_descriptors, 0);
229 return InternalIndex(number_of_own_descriptors - 1);
230}
231
233 return Bits3::NumberOfOwnDescriptorsBits::decode(
235}
236
239 CHECK_LE(static_cast<unsigned>(number),
240 static_cast<unsigned>(kMaxNumberOfDescriptors));
241 set_release_acquire_bit_field3(
242 Bits3::NumberOfOwnDescriptorsBits::update(bit_field3(), number));
243}
244
248
249int Map::EnumLength() const {
250 return Bits3::EnumLengthBits::decode(bit_field3());
251}
252
253void Map::SetEnumLength(int length) {
254 if (length != kInvalidEnumCacheSentinel) {
256 CHECK_LE(static_cast<unsigned>(length),
257 static_cast<unsigned>(kMaxNumberOfDescriptors));
258 }
259 set_relaxed_bit_field3(Bits3::EnumLengthBits::update(bit_field3(), length));
260}
261
266 result = GetReadOnlyRoots().empty_fixed_array();
268 result = GetReadOnlyRoots().empty_byte_array();
269 } else if (has_dictionary_elements()) {
270 result = GetReadOnlyRoots().empty_slow_element_dictionary();
271 } else {
272 UNREACHABLE();
273 }
275 return result;
276}
277
278VisitorId Map::visitor_id() const {
279 return static_cast<VisitorId>(
280 RELAXED_READ_BYTE_FIELD(*this, kVisitorIdOffset));
281}
282
283void Map::set_visitor_id(VisitorId id) {
284 CHECK_LT(static_cast<unsigned>(id), 256);
285 RELAXED_WRITE_BYTE_FIELD(*this, kVisitorIdOffset, static_cast<uint8_t>(id));
286}
287
288int Map::instance_size_in_words() const {
289 return RELAXED_READ_BYTE_FIELD(*this, kInstanceSizeInWordsOffset);
290}
291
292void Map::set_instance_size_in_words(int value) {
293 RELAXED_WRITE_BYTE_FIELD(*this, kInstanceSizeInWordsOffset,
294 static_cast<uint8_t>(value));
295}
296
297int Map::instance_size() const {
298 return instance_size_in_words() << kTaggedSizeLog2;
299}
300
301void Map::set_instance_size(int size_in_bytes) {
302 CHECK(IsAligned(size_in_bytes, kTaggedSize));
303 DCHECK_LE(static_cast<unsigned>(size_in_bytes), JSObject::kMaxInstanceSize);
304 int size_in_words = size_in_bytes >>= kTaggedSizeLog2;
305 CHECK_LE(static_cast<unsigned>(size_in_words), kMaxUInt8);
306 set_instance_size_in_words(size_in_words);
307}
308
309int Map::inobject_properties_start_or_constructor_function_index() const {
310 // TODO(solanes, v8:7790, v8:11353): Make this and the setter non-atomic
311 // when TSAN sees the map's store synchronization.
313 *this, kInobjectPropertiesStartOrConstructorFunctionIndexOffset);
314}
315
316void Map::set_inobject_properties_start_or_constructor_function_index(
317 int value) {
318 CHECK_LE(static_cast<unsigned>(value), kMaxUInt8);
320 *this, kInobjectPropertiesStartOrConstructorFunctionIndexOffset,
321 static_cast<uint8_t>(value));
322}
323
325 DCHECK(IsJSObjectMap(*this));
326 return inobject_properties_start_or_constructor_function_index();
327}
328
330 CHECK(IsJSObjectMap(*this));
331 set_inobject_properties_start_or_constructor_function_index(value);
332}
333
335 bool ret = used_or_unused_instance_size_in_words() < JSObject::kFieldsAdded;
338 return ret;
339}
340
342 DCHECK(IsJSObjectMap(*this));
343 return instance_size_in_words() - GetInObjectPropertiesStartInWords();
344}
345
347#if V8_ENABLE_WEBASSEMBLY
348 // We allow WasmNull here so builtins can produce error messages when
349 // called from Wasm, without having to special-case WasmNull at every
350 // caller of such a builtin.
351 DCHECK(IsPrimitiveMap(*this) || instance_type() == WASM_NULL_TYPE);
352#else
353 DCHECK(IsPrimitiveMap(*this));
354#endif
355 return inobject_properties_start_or_constructor_function_index();
356}
357
359 CHECK(IsPrimitiveMap(*this));
360 set_inobject_properties_start_or_constructor_function_index(value);
361}
362
363int Map::GetInObjectPropertyOffset(int index) const {
365}
366
368 Isolate* isolate, DirectHandle<Map> split_map,
369 DirectHandle<DescriptorArray> descriptors) {
370 return AddMissingTransitions(isolate, split_map, descriptors);
371}
372
373InstanceType Map::instance_type() const {
374 // TODO(solanes, v8:7790, v8:11353, v8:11945): Make this and the setter
375 // non-atomic when TSAN sees the map's store synchronization.
376 return static_cast<InstanceType>(
377 RELAXED_READ_UINT16_FIELD(*this, kInstanceTypeOffset));
378}
379
380void Map::set_instance_type(InstanceType value) {
381 RELAXED_WRITE_UINT16_FIELD(*this, kInstanceTypeOffset, value);
382}
383
385#if V8_ENABLE_WEBASSEMBLY
386 DCHECK(!IsWasmObjectMap(*this));
387#endif // V8_ENABLE_WEBASSEMBLY
388 int value = used_or_unused_instance_size_in_words();
389 DCHECK_IMPLIES(!IsJSObjectMap(*this), value == 0);
390 int unused;
391 if (value >= JSObject::kFieldsAdded) {
392 unused = instance_size_in_words() - value;
393 } else {
394 // For out of object properties "used_or_unused_instance_size_in_words"
395 // byte encodes the slack in the property array.
396 unused = value;
397 }
398 return unused;
399}
400
402 // Like Map::UnusedPropertyFields(), but returns 0 for out of object
403 // properties.
404#if V8_ENABLE_WEBASSEMBLY
405 DCHECK(!IsWasmObjectMap(*this));
406#endif // V8_ENABLE_WEBASSEMBLY
407 int value = used_or_unused_instance_size_in_words();
408 DCHECK_IMPLIES(!IsJSObjectMap(*this), value == 0);
409 if (value >= JSObject::kFieldsAdded) {
410 return instance_size_in_words() - value;
411 }
412 return 0;
413}
414
415int Map::used_or_unused_instance_size_in_words() const {
416 return RELAXED_READ_BYTE_FIELD(*this, kUsedOrUnusedInstanceSizeInWordsOffset);
417}
418
419void Map::set_used_or_unused_instance_size_in_words(int value) {
420 CHECK_LE(static_cast<unsigned>(value), 255);
421 RELAXED_WRITE_BYTE_FIELD(*this, kUsedOrUnusedInstanceSizeInWordsOffset,
422 static_cast<uint8_t>(value));
423}
424
426#if V8_ENABLE_WEBASSEMBLY
427 DCHECK(!IsWasmObjectMap(*this));
428#endif // V8_ENABLE_WEBASSEMBLY
429 int words = used_or_unused_instance_size_in_words();
430 if (words < JSObject::kFieldsAdded) {
431 // All in-object properties are used and the words is tracking the slack
432 // in the property array.
433 return instance_size();
434 }
435 return words * kTaggedSize;
436}
437
439 static_assert(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
440 if (!IsJSObjectMap(*this)) {
441 CHECK_EQ(0, value);
442 set_used_or_unused_instance_size_in_words(0);
443 return;
444 }
445 CHECK_LE(0, value);
447 int used_inobject_properties = GetInObjectProperties() - value;
448 set_used_or_unused_instance_size_in_words(
449 GetInObjectPropertyOffset(used_inobject_properties) / kTaggedSize);
451}
452
454 static_assert(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
455 CHECK_LT(static_cast<unsigned>(value), JSObject::kFieldsAdded);
456 // For out of object properties "used_instance_size_in_words" byte encodes
457 // the slack in the property array.
458 set_used_or_unused_instance_size_in_words(value);
460}
461
463 set_used_or_unused_instance_size_in_words(
464 map->used_or_unused_instance_size_in_words());
465 DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields());
466}
467
469 int value = map->used_or_unused_instance_size_in_words();
470 if (value >= JSPrimitiveWrapper::kFieldsAdded) {
471 // Unused in-object fields. Adjust the offset from the object’s start
472 // so it matches the distance to the object’s end.
473 value += instance_size_in_words() - map->instance_size_in_words();
474 }
475 set_used_or_unused_instance_size_in_words(value);
476 DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields());
477}
478
480 // Update used instance size and unused property fields number.
481 static_assert(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
482#ifdef DEBUG
483 int new_unused = UnusedPropertyFields() - 1;
484 if (new_unused < 0) new_unused += JSObject::kFieldsAdded;
485#endif
486 int value = used_or_unused_instance_size_in_words();
487 if (value >= JSObject::kFieldsAdded) {
488 if (value == instance_size_in_words()) {
490 } else {
491 // The property is added in-object, so simply increment the counter.
492 set_used_or_unused_instance_size_in_words(value + 1);
493 }
494 } else {
496 }
497 DCHECK_EQ(new_unused, UnusedPropertyFields());
498}
499
500void Map::AccountAddedOutOfObjectPropertyField(int unused_in_property_array) {
501 unused_in_property_array--;
502 if (unused_in_property_array < 0) {
503 unused_in_property_array += JSObject::kFieldsAdded;
504 }
505 CHECK_LT(static_cast<unsigned>(unused_in_property_array),
507 set_used_or_unused_instance_size_in_words(unused_in_property_array);
508 DCHECK_EQ(unused_in_property_array, UnusedPropertyFields());
509}
510
511#if V8_ENABLE_WEBASSEMBLY
512uint8_t Map::WasmByte1() const {
513 DCHECK(IsWasmObjectMap(*this));
514 return inobject_properties_start_or_constructor_function_index();
515}
516
517uint8_t Map::WasmByte2() const {
518 DCHECK(IsWasmObjectMap(*this));
519 return used_or_unused_instance_size_in_words();
520}
521
522void Map::SetWasmByte1(uint8_t value) {
523 CHECK(IsWasmObjectMap(*this));
524 set_inobject_properties_start_or_constructor_function_index(value);
525}
526
527void Map::SetWasmByte2(uint8_t value) {
528 CHECK(IsWasmObjectMap(*this));
529 set_used_or_unused_instance_size_in_words(value);
530}
531#endif // V8_ENABLE_WEBASSEMBLY
532
533uint8_t Map::bit_field() const {
534 // TODO(solanes, v8:7790, v8:11353): Make this non-atomic when TSAN sees the
535 // map's store synchronization.
536 return relaxed_bit_field();
537}
538
539void Map::set_bit_field(uint8_t value) {
540 // TODO(solanes, v8:7790, v8:11353): Make this non-atomic when TSAN sees the
541 // map's store synchronization.
542 set_relaxed_bit_field(value);
543}
544
545uint8_t Map::relaxed_bit_field() const {
546 return RELAXED_READ_BYTE_FIELD(*this, kBitFieldOffset);
547}
548
549void Map::set_relaxed_bit_field(uint8_t value) {
550 RELAXED_WRITE_BYTE_FIELD(*this, kBitFieldOffset, value);
551}
552
553uint8_t Map::bit_field2() const { return ReadField<uint8_t>(kBitField2Offset); }
554
555void Map::set_bit_field2(uint8_t value) {
556 WriteField<uint8_t>(kBitField2Offset, value);
557}
558
559uint32_t Map::bit_field3() const {
560 // TODO(solanes, v8:7790, v8:11353): Make this and the setter non-atomic
561 // when TSAN sees the map's store synchronization.
562 return relaxed_bit_field3();
563}
564
565void Map::set_bit_field3(uint32_t value) { set_relaxed_bit_field3(value); }
566
567uint32_t Map::relaxed_bit_field3() const {
568 return RELAXED_READ_UINT32_FIELD(*this, kBitField3Offset);
569}
570
571void Map::set_relaxed_bit_field3(uint32_t value) {
572 RELAXED_WRITE_UINT32_FIELD(*this, kBitField3Offset, value);
573}
574
575uint32_t Map::release_acquire_bit_field3() const {
576 return ACQUIRE_READ_UINT32_FIELD(*this, kBitField3Offset);
577}
578
579void Map::set_release_acquire_bit_field3(uint32_t value) {
580 RELEASE_WRITE_UINT32_FIELD(*this, kBitField3Offset, value);
581}
582
584 return is_prototype_map() && !owns_descriptors();
585}
586
589 if (!has_prototype_info()) return false;
590 return Cast<PrototypeInfo>(prototype_info())->should_be_fast_map();
591}
592
595 return PrototypeInfo::IsPrototypeInfoFast(prototype_info());
596}
597
600 Tagged<Object> maybe_proto_info = prototype_info();
601 if (!PrototypeInfo::IsPrototypeInfoFast(maybe_proto_info)) return false;
602 *result = Cast<PrototypeInfo>(maybe_proto_info);
603 return true;
604}
605
606void Map::set_elements_kind(ElementsKind elements_kind) {
607 CHECK_LT(static_cast<int>(elements_kind), kElementsKindCount);
608 set_bit_field2(
609 Map::Bits2::ElementsKindBits::update(bit_field2(), elements_kind));
610}
611
612ElementsKind Map::elements_kind() const {
613 return Map::Bits2::ElementsKindBits::decode(bit_field2());
614}
615
617 return IsSmiElementsKind(elements_kind());
618}
619
621 return IsObjectElementsKind(elements_kind());
622}
623
625 return IsSmiOrObjectElementsKind(elements_kind());
626}
627
629 return IsDoubleElementsKind(elements_kind());
630}
631
633 return IsFastElementsKind(elements_kind());
634}
635
637 return IsFastPackedElementsKind(elements_kind());
638}
639
641 return IsSloppyArgumentsElementsKind(elements_kind());
642}
643
645 return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
646}
647
649 return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS;
650}
651
655
657 ElementsKind kind = elements_kind();
659#if V8_ENABLE_WEBASSEMBLY
661#endif // V8_ENABLE_WEBASSEMBLY
662 false;
663}
664
666 return IsDictionaryElementsKind(elements_kind());
667}
668
670 return IsAnyNonextensibleElementsKind(elements_kind());
671}
672
674 return IsNonextensibleElementsKind(elements_kind());
675}
676
678 return IsSealedElementsKind(elements_kind());
679}
680
682 return IsFrozenElementsKind(elements_kind());
683}
684
686 return IsSharedArrayElementsKind(elements_kind());
687}
688
689void Map::set_is_dictionary_map(bool value) {
690 uint32_t new_bit_field3 =
691 Bits3::IsDictionaryMapBit::update(bit_field3(), value);
692 new_bit_field3 = Bits3::IsUnstableBit::update(new_bit_field3, value);
693 set_bit_field3(new_bit_field3);
694}
695
696bool Map::is_dictionary_map() const {
697 return Bits3::IsDictionaryMapBit::decode(relaxed_bit_field3());
698}
699
701 set_release_acquire_bit_field3(
702 Bits3::IsUnstableBit::update(bit_field3(), true));
703}
704
705bool Map::is_stable() const {
706 return !Bits3::IsUnstableBit::decode(release_acquire_bit_field3());
707}
708
711 PropertyDetails details = instance_descriptors(kRelaxedLoad)->GetDetails(i);
712 if (details.representation().MightCauseMapDeprecation()) return true;
713 if (details.kind() == PropertyKind::kData &&
715 return true;
716 }
717 }
718 return false;
719}
720
728
729bool Map::CanTransition() const {
730 // Only JSObject and subtypes have map transitions and back pointers.
731 const InstanceType type = instance_type();
732 // JSExternalObjects are non-extensible and thus the map is allocated in
733 // read only sapce.
736 // Shared JS objects have fixed shapes and do not transition. Their maps are
737 // either in shared space or RO space.
738 DCHECK_IMPLIES(InstanceTypeChecker::IsAlwaysSharedSpaceJSObject(type),
740 return InstanceTypeChecker::IsJSObject(type) &&
742 !InstanceTypeChecker::IsAlwaysSharedSpaceJSObject(type);
743}
744
746 return map == GetReadOnlyRoots().boolean_map();
747}
748
750 auto roots = GetReadOnlyRoots();
751 return map == roots.null_map() || map == roots.undefined_map();
752}
753
755 return map->instance_type() <= LAST_PRIMITIVE_HEAP_OBJECT_TYPE;
756}
757
759 Tagged<DescriptorArray> descriptors,
760 int number_of_own_descriptors) {
761 SetInstanceDescriptors(isolate, descriptors, number_of_own_descriptors);
762}
763
765 Tagged<DescriptorArray> descriptors) {
766 SetInstanceDescriptors(isolate, descriptors,
767 descriptors->number_of_descriptors());
768}
769
771 if (FIELD_SIZE(kOptionalPaddingOffset) == 0) return;
772 DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
773 memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
774 FIELD_SIZE(kOptionalPaddingOffset));
775}
776
778 Tagged<DescriptorArray> descriptors = instance_descriptors(isolate);
779 int number_of_own_descriptors = NumberOfOwnDescriptors();
780 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
781 {
782 // The following two operations need to happen before the marking write
783 // barrier.
784 descriptors->Append(desc);
785 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
786#ifndef V8_DISABLE_WRITE_BARRIERS
788 number_of_own_descriptors + 1);
789#endif
790 }
791 // Properly mark the map if the {desc} is an "interesting symbol".
792 if (desc->GetKey()->IsInteresting(isolate)) {
793 set_may_have_interesting_properties(true);
794 }
795 PropertyDetails details = desc->GetDetails();
796 if (details.location() == PropertyLocation::kField) {
799 }
800
801// This function does not support appending double field descriptors and
802// it should never try to (otherwise, layout descriptor must be updated too).
803#ifdef DEBUG
805 !details.representation().IsDouble());
806#endif
807}
808
809// static
811 Tagged<Object> object,
812 Tagged<Map> meta_map) {
813 if (!IsHeapObject(object)) return false;
814 Tagged<HeapObject> heap_object = Cast<HeapObject>(object);
815 return heap_object->map(cage_base) == meta_map;
816}
817
819 Tagged<Map> back_pointer;
820 if (TryGetBackPointer(cage_base, &back_pointer)) {
821 return back_pointer;
822 }
823 return GetReadOnlyRoots().undefined_value();
824}
825
827 Tagged<Map>* back_pointer) const {
829 // We don't expect maps from another native context in the transition tree,
830 // so just compare object's map against current map's meta map.
831 Tagged<Map> meta_map = map(cage_base);
832 if (ConcurrentIsHeapObjectWithMap(cage_base, object, meta_map)) {
833 DCHECK(IsMap(object));
834 // Sanity check - only contextful maps can transition.
835 DCHECK(IsNativeContext(meta_map->native_context_or_null()));
836 *back_pointer = Cast<Map>(object);
837 return true;
838 }
839 // If it was a map that'd mean that there are maps from different native
840 // contexts in the transition tree.
841 DCHECK(!IsMap(object));
842 return false;
843}
844
846 CHECK_GE(instance_type(), FIRST_JS_RECEIVER_TYPE);
847 CHECK(IsMap(value));
848 CHECK(IsUndefined(GetBackPointer()));
849 CHECK_EQ(Cast<Map>(value)->GetConstructorRaw(),
851 set_constructor_or_back_pointer(value, mode);
852}
853
854// static
856 RootIndex map_idx = TryGetMapRootIdxFor(type).value();
857 return UncheckedCast<Map>(roots.object_at(map_idx));
858}
859
860// static
862 ConcurrencyMode cmode) {
863 return TransitionsAccessor(isolate, *this, IsConcurrent(cmode))
864 .SearchSpecial(ReadOnlyRoots(isolate).elements_transition_symbol());
865}
866
867ACCESSORS(Map, dependent_code, Tagged<DependentCode>, kDependentCodeOffset)
868RELAXED_ACCESSORS(Map, prototype_validity_cell, (Tagged<UnionOf<Smi, Cell>>),
869 kPrototypeValidityCellOffset)
872 !IsContextMap(*this), IsNull(value) || !IsContextMap(*this))
875 !IsContextMap(*this),
876 IsNull(value) || !IsContextMap(*this))
879 IsContextMap(*this) || IsMapMap(*this))
880ACCESSORS_CHECKED(Map, native_context_or_null, Tagged<Object>,
882 (IsNull(value) || IsNativeContext(value)) &&
883 (IsContextMap(*this) || IsMapMap(*this)))
884// Unlike native_context_or_null() this getter allows the value to be
885// equal to Smi::uninitialized_deserialization_value().
889 kConstructorOrBackPointerOrNativeContextOffset>::load(cage_base, *this);
890 DCHECK(IsNull(value) || IsNativeContext(value) ||
892 DCHECK(IsContextMap(*this) || IsMapMap(*this));
893 return value;
894}
895#if V8_ENABLE_WEBASSEMBLY
898 IsWasmStructMap(*this) || IsWasmArrayMap(*this) ||
899 IsWasmFuncRefMap(*this))
900#endif // V8_ENABLE_WEBASSEMBLY
901
903 Tagged<Object> validity_cell = prototype_validity_cell(kRelaxedLoad);
904 if (IsSmi(validity_cell)) {
905 // Smi validity cells should always be considered valid.
907 return true;
908 }
909 Tagged<Smi> cell_value = Cast<Smi>(Cast<Cell>(validity_cell)->value());
910 return cell_value == Smi::FromInt(Map::kPrototypeChainValid);
911}
912
914 Tagged<Map> this_meta_map = map();
915 // If the meta map is contextless (as in case of remote object's meta map)
916 // we can't be sure the maps belong to the same context.
917 if (this_meta_map == GetReadOnlyRoots().meta_map()) return false;
918 DCHECK(IsNativeContext(this_meta_map->native_context_or_null()));
919 return this_meta_map == other_map->map();
920}
921
923 Tagged<Map> context_meta_map = context->map()->map();
924 Tagged<Map> this_meta_map = map();
925 DCHECK_NE(context_meta_map, GetReadOnlyRoots().meta_map());
926 return this_meta_map == context_meta_map;
927}
928
929DEF_GETTER(Map, GetConstructorRaw, Tagged<Object>) {
930 Tagged<Object> maybe_constructor = constructor_or_back_pointer(cage_base);
931 // Follow any back pointers.
932 // We don't expect maps from another native context in the transition tree,
933 // so just compare object's map against current map's meta map.
934 Tagged<Map> meta_map = map(cage_base);
935 while (
936 ConcurrentIsHeapObjectWithMap(cage_base, maybe_constructor, meta_map)) {
937 DCHECK(IsMap(maybe_constructor));
938 // Sanity check - only contextful maps can transition.
939 DCHECK(IsNativeContext(meta_map->native_context_or_null()));
940 maybe_constructor =
941 Cast<Map>(maybe_constructor)->constructor_or_back_pointer(cage_base);
942 }
943 // If it was a map that'd mean that there are maps from different native
944 // contexts in the transition tree.
945 DCHECK(!IsMap(maybe_constructor));
946 return maybe_constructor;
947}
948
949DEF_GETTER(Map, GetNonInstancePrototype, Tagged<Object>) {
950 DCHECK(has_non_instance_prototype());
951 Tagged<Object> raw_constructor = GetConstructorRaw(cage_base);
952 CHECK(IsTuple2(raw_constructor));
953 // Get prototype from the {constructor, non-instance_prototype} tuple.
954 Tagged<Tuple2> non_instance_prototype_constructor_tuple =
955 Cast<Tuple2>(raw_constructor);
956 Tagged<Object> result = non_instance_prototype_constructor_tuple->value2();
957 DCHECK(!IsJSReceiver(result));
958 DCHECK(!IsFunctionTemplateInfo(result));
959 return result;
960}
961
962DEF_GETTER(Map, GetConstructor, Tagged<Object>) {
963 Tagged<Object> maybe_constructor = GetConstructorRaw(cage_base);
964 if (IsTuple2(maybe_constructor)) {
965 // Get constructor from the {constructor, non-instance_prototype} tuple.
966 maybe_constructor = Cast<Tuple2>(maybe_constructor)->value1();
967 }
968 return maybe_constructor;
969}
970
972 int max_steps) {
973 Tagged<Object> maybe_constructor = constructor_or_back_pointer(cage_base);
974 // Follow any back pointers.
975 while (IsMap(maybe_constructor, cage_base)) {
976 if (max_steps-- == 0) return Smi::FromInt(0);
977 maybe_constructor =
978 Cast<Map>(maybe_constructor)->constructor_or_back_pointer(cage_base);
979 }
980 if (IsTuple2(maybe_constructor)) {
981 // Get constructor from the {constructor, non-instance_prototype} tuple.
982 maybe_constructor = Cast<Tuple2>(maybe_constructor)->value1();
983 }
984 return maybe_constructor;
985}
986
987DEF_GETTER(Map, GetFunctionTemplateInfo, Tagged<FunctionTemplateInfo>) {
988 Tagged<Object> constructor = GetConstructor(cage_base);
989 if (IsJSFunction(constructor, cage_base)) {
991 Cast<JSFunction>(constructor)->shared(cage_base);
992 DCHECK(sfi->IsApiFunction());
993 return sfi->api_func_data();
994 }
995 DCHECK(IsFunctionTemplateInfo(constructor, cage_base));
996 return Cast<FunctionTemplateInfo>(constructor);
997}
998
1000 // Never overwrite a back pointer with a constructor.
1002 // Constructor field must contain {constructor, non-instance_prototype} tuple
1003 // for maps with non-instance prototype.
1004 DCHECK_EQ(has_non_instance_prototype(), IsTuple2(constructor));
1005 set_constructor_or_back_pointer(constructor, mode);
1006}
1007
1009 return CopyInitialMap(isolate, map, map->instance_size(),
1010 map->GetInObjectProperties(),
1011 map->UnusedPropertyFields());
1012}
1013
1017
1020 // Slack tracking should only be performed on an initial map.
1021 DCHECK(IsUndefined(GetBackPointer()));
1022 if (!this->IsInobjectSlackTrackingInProgress()) return;
1023 int counter = construction_counter();
1024 set_construction_counter(counter - 1);
1025 if (counter == kSlackTrackingCounterEnd) {
1027 }
1028}
1029
1030int Map::SlackForArraySize(int old_size, int size_limit) {
1031 const int max_slack = size_limit - old_size;
1032 CHECK_LE(0, max_slack);
1033 if (old_size < 4) {
1034 DCHECK_LE(1, max_slack);
1035 return 1;
1036 }
1037 return std::min(max_slack, old_size / 4);
1038}
1039
1040int Map::InstanceSizeFromSlack(int slack) const {
1041 return instance_size() - slack * kTaggedSize;
1042}
1043
1045
1046int NormalizedMapCache::GetIndex(Isolate* isolate, Tagged<Map> map,
1047 Tagged<HeapObject> prototype) {
1049 return map->Hash(isolate, prototype) % NormalizedMapCache::kEntries;
1050}
1051
1053 if (!IsWeakFixedArray(obj, cage_base)) return false;
1055 return false;
1056 }
1057 return true;
1058}
1059
1060} // namespace internal
1061} // namespace v8
1062
1064
1065#endif // V8_OBJECTS_MAP_INL_H_
Builtins::Kind kind
Definition builtins.cc:40
static void DeoptimizeDependencyGroups(Isolate *isolate, ObjectT object, DependencyGroups groups)
static V8_EXPORT_PRIVATE Tagged< FieldType > Any()
Definition field-type.cc:22
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
static V8_INLINE bool InReadOnlySpace(Tagged< HeapObject > object)
static V8_INLINE bool InAnySharedSpace(Tagged< HeapObject > object)
static const int kFieldsAdded
Definition js-objects.h:954
static const int kMaxInstanceSize
Definition js-objects.h:945
static void CompleteInobjectSlackTracking(Isolate *isolate, Tagged< Map > initial_map)
static V8_INLINE bool ConcurrentIsHeapObjectWithMap(PtrComprCageBase cage_base, Tagged< Object > object, Tagged< Map > meta_map)
Definition map-inl.h:810
InternalIndex::Range IterateOwnDescriptors() const
Definition map-inl.h:245
void SetEnumLength(int length)
Definition map-inl.h:253
int NumberOfFields(ConcurrencyMode cmode) const
Definition map.cc:610
bool IsPrototypeValidityCellValid() const
Definition map-inl.h:902
bool has_sealed_elements() const
Definition map-inl.h:677
Tagged< Map > ElementsTransitionMap(Isolate *isolate, ConcurrencyMode cmode)
Definition map-inl.h:861
void CopyUnusedPropertyFieldsAdjustedForInstanceSize(Tagged< Map > map)
Definition map-inl.h:468
bool HasOutOfObjectProperties() const
Definition map-inl.h:334
uint8_t WasmByte1() const
void SetWasmByte2(uint8_t value)
void AccountAddedOutOfObjectPropertyField(int unused_in_property_array)
Definition map-inl.h:500
void SetBackPointer(Tagged< HeapObject > value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition map-inl.h:845
bool has_prototype_info() const
Definition map-inl.h:593
static const int kNoSlackTracking
Definition map.h:349
void SetConstructorFunctionIndex(int value)
Definition map-inl.h:358
int UsedInstanceSize() const
Definition map-inl.h:425
bool IsDetached(Isolate *isolate) const
Definition map-inl.h:173
Tagged< Name > GetLastDescriptorName(Isolate *isolate) const
Definition map-inl.h:218
bool IsInobjectSlackTrackingInProgress() const
Definition map-inl.h:1014
bool EquivalentToForNormalization(const Tagged< Map > other, ElementsKind elements_kind, Tagged< HeapObject > prototype, PropertyNormalizationMode mode) const
Definition map.cc:2331
int GetInObjectPropertiesStartInWords() const
Definition map-inl.h:324
bool should_be_fast_prototype_map() const
Definition map-inl.h:587
bool CanHaveFastTransitionableElementsKind() const
Definition map-inl.h:169
bool has_fast_double_elements() const
Definition map-inl.h:628
int GetInObjectPropertyOffset(int index) const
Definition map-inl.h:363
void SetOutOfObjectUnusedPropertyFields(int unused_property_fields)
Definition map-inl.h:453
PropertyDetails GetLastDescriptorDetails(Isolate *isolate) const
Definition map-inl.h:222
int InstanceSizeFromSlack(int slack) const
Definition map-inl.h:1040
static Tagged< Map > GetMapFor(ReadOnlyRoots roots, InstanceType type)
Definition map-inl.h:855
bool has_fast_object_elements() const
Definition map-inl.h:620
bool has_dictionary_elements() const
Definition map-inl.h:665
bool is_stable() const
Definition map-inl.h:705
bool has_any_typed_array_or_wasm_array_elements() const
Definition map-inl.h:656
static const int kSlackTrackingCounterEnd
Definition map.h:348
void SetNumberOfOwnDescriptors(int number)
Definition map-inl.h:237
void SetWasmByte1(uint8_t value)
bool CanTransition() const
Definition map-inl.h:729
bool has_typed_array_or_rab_gsab_typed_array_elements() const
Definition map-inl.h:652
static bool IsMostGeneralFieldType(Representation representation, Tagged< FieldType > field_type)
Definition map-inl.h:157
int GetInObjectProperties() const
Definition map-inl.h:341
bool has_fast_smi_elements() const
Definition map-inl.h:616
static constexpr std::optional< RootIndex > TryGetMapRootIdxFor(InstanceType type)
Definition map.h:891
bool has_any_nonextensible_elements() const
Definition map-inl.h:669
InternalIndex LastAdded() const
Definition map-inl.h:226
void InobjectSlackTrackingStep(Isolate *isolate)
Definition map-inl.h:1018
Tagged< FixedArrayBase > GetInitialElements() const
Definition map-inl.h:262
void AppendDescriptor(Isolate *isolate, Descriptor *desc)
Definition map-inl.h:777
bool has_sloppy_arguments_elements() const
Definition map-inl.h:640
bool has_fast_packed_elements() const
Definition map-inl.h:636
V8_EXPORT_PRIVATE void SetInstanceDescriptors(Isolate *isolate, Tagged< DescriptorArray > descriptors, int number_of_own_descriptors, WriteBarrierMode barrier_mode=UPDATE_WRITE_BARRIER)
Definition map.cc:2363
void CopyUnusedPropertyFields(Tagged< Map > map)
Definition map-inl.h:462
bool has_frozen_elements() const
Definition map-inl.h:681
bool has_fast_smi_or_object_elements() const
Definition map-inl.h:624
bool TooManyFastProperties(StoreOrigin store_origin) const
Definition map-inl.h:207
bool has_nonextensible_elements() const
Definition map-inl.h:673
static V8_EXPORT_PRIVATE Handle< Map > Normalize(Isolate *isolate, DirectHandle< Map > map, ElementsKind new_elements_kind, DirectHandle< JSPrototype > new_prototype, PropertyNormalizationMode mode, bool use_cache, const char *reason)
Definition map.cc:1282
static DirectHandle< Map > AddMissingTransitionsForTesting(Isolate *isolate, DirectHandle< Map > split_map, DirectHandle< DescriptorArray > descriptors)
Definition map-inl.h:367
bool has_fast_sloppy_arguments_elements() const
Definition map-inl.h:644
void AccountAddedPropertyField()
Definition map-inl.h:479
int UnusedInObjectProperties() const
Definition map-inl.h:401
void SetInObjectUnusedPropertyFields(int unused_property_fields)
Definition map-inl.h:438
bool has_shared_array_elements() const
Definition map-inl.h:685
bool BelongsToSameNativeContextAs(Tagged< Map > other_map) const
Definition map-inl.h:913
bool TryGetBackPointer(PtrComprCageBase cage_base, Tagged< Map > *back_pointer) const
Definition map-inl.h:826
void UpdateDescriptors(Isolate *isolate, Tagged< DescriptorArray > descriptors, int number_of_own_descriptors)
Definition map-inl.h:758
static void GeneralizeIfCanHaveTransitionableFastElementsKind(Isolate *isolate, InstanceType instance_type, Representation *representation, DirectHandle< FieldType > *field_type)
Definition map-inl.h:180
void InitializeDescriptors(Isolate *isolate, Tagged< DescriptorArray > descriptors)
Definition map-inl.h:764
V8_INLINE void clear_padding()
Definition map-inl.h:770
static V8_EXPORT_PRIVATE Handle< Map > AddMissingTransitions(Isolate *isolate, DirectHandle< Map > map, DirectHandle< DescriptorArray > descriptors)
Definition map.cc:1618
void SetConstructor(Tagged< Object > constructor, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition map-inl.h:999
int NumberOfOwnDescriptors() const
Definition map-inl.h:232
static int SlackForArraySize(int old_size, int size_limit)
Definition map-inl.h:1030
int GetConstructorFunctionIndex() const
Definition map-inl.h:346
static constexpr int kPrototypeChainValid
Definition map.h:517
void SetInObjectPropertiesStartInWords(int value)
Definition map-inl.h:329
bool has_fast_elements() const
Definition map-inl.h:632
void mark_unstable()
Definition map-inl.h:700
uint8_t WasmByte2() const
void init_prototype_and_constructor_or_back_pointer(ReadOnlyRoots roots)
bool CanBeDeprecated() const
Definition map-inl.h:709
int UnusedPropertyFields() const
Definition map-inl.h:384
bool is_abandoned_prototype_map() const
Definition map-inl.h:583
bool has_fast_string_wrapper_elements() const
Definition map-inl.h:648
int EnumLength() const
Definition map-inl.h:249
Tagged< Object > TryGetConstructor(PtrComprCageBase cage_base, int max_steps)
Definition map-inl.h:971
bool TryGetPrototypeInfo(Tagged< PrototypeInfo > *result) const
Definition map-inl.h:598
static Handle< Map > CopyInitialMap(Isolate *isolate, DirectHandle< Map > map)
Definition map-inl.h:1008
void NotifyLeafMapLayoutChange(Isolate *isolate)
Definition map-inl.h:721
static const int kEntries
Definition map.h:1098
PropertyLocation location() const
Representation representation() const
static bool IsPrototypeInfoFast(Tagged< Object > object)
V8_INLINE Tagged< Object > object_at(RootIndex root_index) const
Definition roots-inl.h:143
constexpr bool IsHeapObject() const
static constexpr Representation Tagged()
constexpr bool IsDouble() const
static constexpr Tagged< Smi > uninitialized_deserialization_value()
Definition smi.h:114
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static void store(Tagged< HeapObject > host, PtrType value)
Tagged< Map > SearchSpecial(Tagged< Symbol > name)
static void ForDescriptorArray(Tagged< DescriptorArray >, int number_of_own_descriptors)
std::map< const std::string, const std::string > map
ZoneVector< RpoNumber > & result
V8_INLINE constexpr bool IsMaybeReadOnlyJSObject(InstanceType instance_type)
constexpr int kTaggedSize
Definition globals.h:542
bool IsWasmArrayElementsKind(ElementsKind kind)
bool IsPrimitiveMap(Tagged< Map > map)
Definition map-inl.h:754
constexpr int kMaxUInt8
Definition globals.h:378
ReadOnlyRoots GetReadOnlyRoots()
Definition roots-inl.h:86
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit Map::Bits3::IsDeprecatedBit is_prototype_map
Definition map-inl.h:133
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit bit_field2
Definition map-inl.h:123
bool IsSealedElementsKind(ElementsKind kind)
Tagged(T object) -> Tagged< T >
constexpr bool IsSmiElementsKind(ElementsKind kind)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats store(v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) DEFINE_GENERIC_IMPLICATION(trace_gc_object_stats
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:665
bool IsAnyNonextensibleElementsKind(ElementsKind kind)
constexpr bool IsObjectElementsKind(ElementsKind kind)
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit is_immutable_proto
Definition map-inl.h:123
bool IsNonextensibleElementsKind(ElementsKind kind)
@ FAST_SLOPPY_ARGUMENTS_ELEMENTS
@ FAST_STRING_WRAPPER_ELEMENTS
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit Map::Bits3::IsDeprecatedBit Map::Bits3::IsPrototypeMapBit Map::Bits3::IsExtensibleBit construction_counter
Definition map-inl.h:141
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit is_undetectable
Definition map-inl.h:113
kConstructorOrBackPointerOrNativeContextOffset
Definition map-inl.h:871
Handle< To > UncheckedCast(Handle< From > value)
Definition handles-inl.h:55
bool IsSmiOrObjectElementsKind(ElementsKind kind)
bool IsFastPackedElementsKind(ElementsKind kind)
constexpr int kTaggedSizeLog2
Definition globals.h:543
static const int kInvalidEnumCacheSentinel
Map::Bits1::HasPrototypeSlotBit has_named_interceptor
Definition map-inl.h:109
!IsContextMap !IsContextMap IsContextMap this IsMapMap this raw_native_context_or_null
Definition map-inl.h:886
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset kPrototypeOffset
Definition map-inl.h:69
instance_descriptors
Definition map-inl.h:52
bool IsSloppyArgumentsElementsKind(ElementsKind kind)
bool IsBooleanMap(Tagged< Map > map)
Definition map-inl.h:745
kInstanceDescriptorsOffset raw_transitions
Definition map-inl.h:61
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit is_deprecated
Definition map-inl.h:129
bool IsFrozenElementsKind(ElementsKind kind)
bool IsNullOrUndefinedMap(Tagged< Map > map)
Definition map-inl.h:749
typename detail::FlattenUnionHelper< Union<>, Ts... >::type UnionOf
Definition union.h:123
bool IsFastElementsKind(ElementsKind kind)
bool IsSharedArrayElementsKind(ElementsKind kind)
static const int kMaxNumberOfDescriptors
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit Map::Bits3::IsDeprecatedBit Map::Bits3::IsPrototypeMapBit bit_field3
Definition map-inl.h:137
bool IsDictionaryElementsKind(ElementsKind kind)
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:669
V8_EXPORT_PRIVATE FlagValues v8_flags
bool IsAny(Tagged< FieldType > obj)
Definition field-type.h:51
V8_INLINE bool IsWasmObject(T obj, Isolate *=nullptr)
Definition objects.h:725
return value
Definition map-inl.h:893
bool IsTypedArrayOrRabGsabTypedArrayElementsKind(ElementsKind kind)
constexpr bool IsDoubleElementsKind(ElementsKind kind)
constexpr int kElementsKindCount
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit Map::Bits3::IsDeprecatedBit Map::Bits3::IsPrototypeMapBit is_extensible
Definition map-inl.h:137
constructor_or_back_pointer
Definition map-inl.h:870
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit Map::Bits3::IsDeprecatedBit Map::Bits3::IsPrototypeMapBit relaxed_bit_field3
Definition map-inl.h:137
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset IsNull(value)||IsJSProxy(value)||IsWasmObject(value)||(IsJSObject(value) &&(HeapLayout
Definition map-inl.h:70
constexpr bool IsConcurrent(ConcurrencyMode mode)
Definition globals.h:2599
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset prototype
Definition map-inl.h:69
!IsContextMap !IsContextMap native_context
Definition map-inl.h:877
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit release_acquire_bit_field3
Definition map-inl.h:129
PropertyNormalizationMode
Definition objects.h:60
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
static constexpr RelaxedLoadTag kRelaxedLoad
Definition globals.h:2909
#define RELAXED_WRITE_BYTE_FIELD(p, offset, value)
#define RELAXED_READ_UINT32_FIELD(p, offset)
#define ACCESSORS(holder, name, type, offset)
#define TQ_OBJECT_CONSTRUCTORS_IMPL(Type)
#define RELAXED_WRITE_UINT32_FIELD(p, offset, value)
#define DEF_HEAP_OBJECT_PREDICATE(holder, name)
#define ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, set_condition)
#define ACQUIRE_READ_UINT32_FIELD(p, offset)
#define BIT_FIELD_ACCESSORS(holder, field, name, BitField)
#define RELEASE_WRITE_UINT32_FIELD(p, offset, value)
#define RELAXED_ACCESSORS(holder, name, type, offset)
#define RELAXED_WRITE_UINT16_FIELD(p, offset, value)
#define RELAXED_READ_BYTE_FIELD(p, offset)
#define BIT_FIELD_ACCESSORS2(holder, get_field, set_field, name, BitField)
#define RELAXED_READ_UINT16_FIELD(p, offset)
#define RELEASE_ACQUIRE_ACCESSORS(holder, name, type, offset)
#define ACCESSORS_CHECKED(holder, name, type, offset, condition)
#define NEVER_READ_ONLY_SPACE_IMPL(Type)
#define RELAXED_ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, set_condition)
#define DEF_GETTER(Camel, Lower, Bit)
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK_GE(lhs, rhs)
#define CHECK(condition)
Definition logging.h:124
#define CHECK_LT(lhs, rhs)
#define CHECK_LE(lhs, rhs)
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
constexpr bool IsAligned(T value, U alignment)
Definition macros.h:403
#define FIELD_SIZE(Name)
Definition utils.h:259