v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
objects-inl.h
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_OBJECTS_OBJECTS_INL_H_
6#define V8_OBJECTS_OBJECTS_INL_H_
7
8// Review notes:
9//
10// - The use of macros in these inline functions may seem superfluous
11// but it is absolutely needed to make sure gcc generates optimal
12// code. gcc is not happy when attempting to inline too deep.
13
14#include "src/objects/objects.h"
15// Include the non-inl header before the rest of the headers.
16
17#include "include/v8-internal.h"
18#include "src/base/bits.h"
19#include "src/base/memory.h"
22#include "src/common/globals.h"
25#include "src/heap/factory.h"
32#include "src/objects/casting.h"
38#include "src/objects/js-proxy-inl.h" // TODO(jkummerow): Drop.
39#include "src/objects/keys.h"
41#include "src/objects/lookup-inl.h" // TODO(jkummerow): Drop.
50#include "src/objects/slots.h"
51#include "src/objects/smi-inl.h"
56#include "src/roots/roots.h"
63#include "src/sandbox/isolate.h"
65
66// Has to be the last include (doesn't have include guards):
68
69namespace v8 {
70namespace internal {
71
72template <typename T>
73class Managed;
74template <typename T>
75class TrustedManaged;
76
78
80 // Ensure the upper 2 bits have the same value by sign extending it. This is
81 // necessary to be able to use the 31st bit of the property details.
82 int value = value_ << 1;
83 return Smi::FromInt(value >> 1);
84}
85
90
92 return IsSmi(obj) &&
94}
95
100
101#define IS_TYPE_FUNCTION_DEF(type_) \
102 bool Is##type_(Tagged<Object> obj) { \
103 return IsHeapObject(obj) && Is##type_(Cast<HeapObject>(obj)); \
104 } \
105 bool Is##type_(Tagged<Object> obj, PtrComprCageBase cage_base) { \
106 return IsHeapObject(obj) && Is##type_(Cast<HeapObject>(obj), cage_base); \
107 } \
108 bool Is##type_(HeapObject obj) { \
109 static_assert(kTaggedCanConvertToRawObjects); \
110 return Is##type_(Tagged<HeapObject>(obj)); \
111 } \
112 bool Is##type_(HeapObject obj, PtrComprCageBase cage_base) { \
113 static_assert(kTaggedCanConvertToRawObjects); \
114 return Is##type_(Tagged<HeapObject>(obj), cage_base); \
115 } \
116 bool Is##type_(const HeapObjectLayout* obj) { \
117 return Is##type_(Tagged<HeapObject>(obj)); \
118 } \
119 bool Is##type_(const HeapObjectLayout* obj, PtrComprCageBase cage_base) { \
120 return Is##type_(Tagged<HeapObject>(obj), cage_base); \
121 }
123IS_TYPE_FUNCTION_DEF(HashTableBase)
124IS_TYPE_FUNCTION_DEF(SmallOrderedHashTable)
126#undef IS_TYPE_FUNCTION_DEF
127
129 return IsHole(obj, cage_base);
130}
131
132bool IsAnyHole(Tagged<Object> obj) { return IsHole(obj); }
133
134#define IS_TYPE_FUNCTION_DEF(Type, Value, _) \
135 bool Is##Type(Tagged<Object> obj, Isolate* isolate) { \
136 return Is##Type(obj, ReadOnlyRoots(isolate)); \
137 } \
138 bool Is##Type(Tagged<Object> obj, LocalIsolate* isolate) { \
139 return Is##Type(obj, ReadOnlyRoots(isolate)); \
140 } \
141 bool Is##Type(Tagged<Object> obj) { \
142 return Is##Type(obj, GetReadOnlyRoots()); \
143 } \
144 bool Is##Type(Tagged<HeapObject> obj) { \
145 return Is##Type(obj, GetReadOnlyRoots()); \
146 } \
147 bool Is##Type(HeapObject obj) { \
148 static_assert(kTaggedCanConvertToRawObjects); \
149 return Is##Type(Tagged<HeapObject>(obj)); \
150 } \
151 bool Is##Type(const HeapObjectLayout* obj, Isolate* isolate) { \
152 return Is##Type(Tagged<HeapObject>(obj), isolate); \
153 } \
154 bool Is##Type(const HeapObjectLayout* obj) { \
155 return Is##Type(Tagged<HeapObject>(obj)); \
156 }
159#undef IS_TYPE_FUNCTION_DEF
160
161#if V8_STATIC_ROOTS_BOOL
162#define IS_TYPE_FUNCTION_DEF(Type, Value, CamelName) \
163 bool Is##Type(Tagged<Object> obj, ReadOnlyRoots roots) { \
164 SLOW_DCHECK(CheckObjectComparisonAllowed(obj.ptr(), roots.Value().ptr())); \
165 return V8HeapCompressionScheme::CompressObject(obj.ptr()) == \
166 StaticReadOnlyRoot::k##CamelName; \
167 }
168#else
169#define IS_TYPE_FUNCTION_DEF(Type, Value, _) \
170 bool Is##Type(Tagged<Object> obj, ReadOnlyRoots roots) { \
171 return obj == roots.Value(); \
172 }
173#endif
176#undef IS_TYPE_FUNCTION_DEF
177
179 return IsNullOrUndefined(obj, ReadOnlyRoots(isolate));
180}
181
183 return IsNullOrUndefined(obj, ReadOnlyRoots(local_isolate));
184}
185
187 return IsNull(obj, roots) || IsUndefined(obj, roots);
188}
189
193
197
198bool IsZero(Tagged<Object> obj) { return obj == Smi::zero(); }
199
201 Tagged<Symbol> symbol;
202 return TryCast<Symbol>(obj, &symbol) && !symbol->is_private();
203}
205 Tagged<Symbol> symbol;
206 return TryCast<Symbol>(obj, &symbol) && symbol->is_private();
207}
208
212
213// TODO(leszeks): Expand Is<T> to all types.
214#define IS_HELPER_DEF(Type, ...) \
215 template <> \
216 struct CastTraits<Type> { \
217 static inline bool AllowFrom(Tagged<Object> value) { \
218 return Is##Type(value); \
219 } \
220 static inline bool AllowFrom(Tagged<HeapObject> value) { \
221 return Is##Type(value); \
222 } \
223 };
228
229#define IS_HELPER_DEF_STRUCT(NAME, Name, name) IS_HELPER_DEF(Name)
231#undef IS_HELPER_DEF_STRUCT
232
234#undef IS_HELPER_DEF
235
236template <typename... T>
237struct CastTraits<Union<T...>> {
238 static inline bool AllowFrom(Tagged<Object> value) {
239 return (Is<T>(value) || ...);
240 }
241 static inline bool AllowFrom(Tagged<HeapObject> value) {
242 return (Is<T>(value) || ...);
243 }
244};
245template <>
247 static inline bool AllowFrom(Tagged<Object> value) {
248 return IsPrimitive(value);
249 }
250 static inline bool AllowFrom(Tagged<HeapObject> value) {
251 return IsPrimitive(value);
252 }
253};
254template <>
256 static inline bool AllowFrom(Tagged<Object> value) {
257 return IsPrimitive(value) || IsJSReceiver(value);
258 }
259 static inline bool AllowFrom(Tagged<HeapObject> value) {
260 return IsPrimitive(value) || IsJSReceiver(value);
261 }
262};
263template <>
265 template <typename From>
266 static inline bool AllowFrom(Tagged<From> value) {
268 return TryCast<AllocationSite>(value, &site) && site->HasWeakNext();
269 }
270};
271
272template <>
274 static inline bool AllowFrom(Tagged<Object> value) {
275 return value == FieldType::None() || value == FieldType::Any() ||
276 IsMap(value);
277 }
278 static inline bool AllowFrom(Tagged<HeapObject> value) {
279 return IsMap(value);
280 }
281};
282
283template <typename T>
284struct CastTraits<Managed<T>> : public CastTraits<Foreign> {};
285template <typename T>
286struct CastTraits<TrustedManaged<T>> : public CastTraits<TrustedForeign> {};
287template <typename T>
288struct CastTraits<PodArray<T>> : public CastTraits<ByteArray> {};
289template <typename T>
290struct CastTraits<TrustedPodArray<T>> : public CastTraits<TrustedByteArray> {};
291template <typename T, typename Base>
292struct CastTraits<FixedIntegerArrayBase<T, Base>> : public CastTraits<Base> {};
293template <typename Base>
294struct CastTraits<FixedAddressArrayBase<Base>> : public CastTraits<Base> {};
295
296template <>
297struct CastTraits<JSRegExpResultIndices> : public CastTraits<JSArray> {};
298template <>
300 : public CastTraits<TrustedWeakFixedArray> {};
301template <>
302struct CastTraits<FreshlyAllocatedBigInt> : public CastTraits<BigInt> {};
303template <>
304struct CastTraits<JSIteratorResult> : public CastTraits<JSObject> {};
305
306template <>
308 : public CastTraits<TrustedByteArray> {};
309
310template <class T>
312 requires((std::is_arithmetic_v<T> || std::is_enum_v<T>) &&
313 !std::is_floating_point_v<T>)
314{
315 // Pointer compression causes types larger than kTaggedSize to be
316 // unaligned. Atomic loads must be aligned.
318 using AtomicT = typename base::AtomicTypeFromByteWidth<sizeof(T)>::type;
319 return static_cast<T>(base::AsAtomicImpl<AtomicT>::Relaxed_Load(
320 reinterpret_cast<AtomicT*>(field_address(offset))));
321}
322
323template <class T>
325 requires((std::is_arithmetic_v<T> || std::is_enum_v<T>) &&
326 !std::is_floating_point_v<T>)
327{
328 // Pointer compression causes types larger than kTaggedSize to be
329 // unaligned. Atomic stores must be aligned.
331 using AtomicT = typename base::AtomicTypeFromByteWidth<sizeof(T)>::type;
333 reinterpret_cast<AtomicT*>(field_address(offset)),
334 static_cast<AtomicT>(value));
335}
336
337template <class T>
339 requires((std::is_arithmetic_v<T> || std::is_enum_v<T>) &&
340 !std::is_floating_point_v<T>)
341{
342 // Pointer compression causes types larger than kTaggedSize to be
343 // unaligned. Atomic loads must be aligned.
345 using AtomicT = typename base::AtomicTypeFromByteWidth<sizeof(T)>::type;
346 return static_cast<T>(base::AsAtomicImpl<AtomicT>::Acquire_Load(
347 reinterpret_cast<AtomicT*>(field_address(offset))));
348}
349
350// static
351template <typename CompareAndSwapImpl>
353 Tagged<Object> expected, Tagged<Object> value,
354 CompareAndSwapImpl compare_and_swap_impl) {
355 Tagged<Object> actual_expected = expected;
356 do {
357 Tagged<Object> old_value = compare_and_swap_impl(actual_expected, value);
358 if (old_value == actual_expected || !IsNumber(old_value) ||
359 !IsNumber(actual_expected)) {
360 return old_value;
361 }
364 Object::NumberValue(Cast<Number>(actual_expected)))) {
365 return old_value;
366 }
367 // The pointer comparison failed, but the numbers are equal. This can
368 // happen even if both numbers are HeapNumbers with the same value.
369 // Try again in the next iteration.
370 actual_expected = old_value;
371 } while (true);
372}
373
375#if V8_STATIC_ROOTS_BOOL
376 // The following assert ensures that the page size check covers all our static
377 // roots. This is not strictly necessary and can be relaxed in future as the
378 // most prominent static roots are anyways allocated at the beginning of the
379 // first page.
380 static_assert(StaticReadOnlyRoot::kLastAllocatedRoot < kRegularPageSize);
381 return obj < kRegularPageSize;
382#else // !V8_STATIC_ROOTS_BOOL
383 return false;
384#endif // !V8_STATIC_ROOTS_BOOL
385}
386
388#ifdef V8_COMPRESS_POINTERS
389 // This check is only valid for objects in the main cage.
390 DCHECK(obj.IsSmi() || obj.IsInMainCageBase());
393#else // V8_COMPRESS_POINTERS
394 return false;
395#endif // V8_COMPRESS_POINTERS
396}
397
399#ifdef V8_ENABLE_SANDBOX
400 return !InsideSandbox(obj.address()) ||
401 MemoryChunk::FromHeapObject(obj)->SandboxSafeInReadOnlySpace();
402#else
403 return true;
404#endif
405}
406
408 // Do not optimize objects in the shared heap because it is not
409 // threadsafe. Objects in the shared heap have fixed layouts and their maps
410 // never change.
411 return IsJSObject(obj) && !HeapLayout::InWritableSharedSpace(*obj);
412}
413
415 const InstanceType instance_type = map->instance_type();
416 return InstanceTypeChecker::IsJSAPIObjectWithEmbedderSlots(instance_type) ||
417 InstanceTypeChecker::IsJSSpecialObject(instance_type);
418}
419
421 return IsJSApiWrapperObject(js_obj->map());
422}
423
425 return IsInternalizedString(obj, cage_base) || IsSymbol(obj, cage_base);
426}
427
429 return obj->map(cage_base)->is_callable();
430}
431
433 return IsCallable(obj, cage_base) && IsJSProxy(obj, cage_base);
434}
435
437 InstanceType type = obj->map(cage_base)->instance_type();
438 return IsCallable(obj, cage_base) &&
439 (type == JS_API_OBJECT_TYPE || type == JS_SPECIAL_API_OBJECT_TYPE);
440}
441
443 return IsForeign(obj, cage_base) &&
444 Cast<Foreign>(obj)->foreign_address_unchecked() != kNullAddress;
445}
446
448 return obj->map(cage_base)->is_constructor();
449}
450
451DEF_HEAP_OBJECT_PREDICATE(HeapObject, IsSourceTextModuleInfo) {
452 return obj->map(cage_base) == GetReadOnlyRoots().module_info_map();
453}
454
456 if (!IsString(obj, cage_base)) return false;
457 return StringShape(Cast<String>(obj)->map()).IsCons();
458}
459
461 if (!IsString(obj, cage_base)) return false;
462 return StringShape(Cast<String>(obj)->map()).IsThin();
463}
464
466 if (!IsString(obj, cage_base)) return false;
467 return StringShape(Cast<String>(obj)->map()).IsSliced();
468}
469
471 if (!IsString(obj, cage_base)) return false;
472 return StringShape(Cast<String>(obj)->map()).IsSequential();
473}
474
476 if (!IsString(obj, cage_base)) return false;
478}
479
481 if (!IsString(obj, cage_base)) return false;
483}
484
485DEF_HEAP_OBJECT_PREDICATE(HeapObject, IsExternalOneByteString) {
486 if (!IsString(obj, cage_base)) return false;
488}
489
490DEF_HEAP_OBJECT_PREDICATE(HeapObject, IsExternalTwoByteString) {
491 if (!IsString(obj, cage_base)) return false;
493}
494
496 if (IsSmi(obj)) return true;
497 Tagged<HeapObject> heap_object = Cast<HeapObject>(obj);
498 PtrComprCageBase cage_base = GetPtrComprCageBase(heap_object);
499 return IsHeapNumber(heap_object, cage_base);
500}
501
503 return obj.IsSmi() || IsHeapNumber(obj, cage_base);
504}
505
507 if (IsSmi(obj)) return true;
508 Tagged<HeapObject> heap_object = Cast<HeapObject>(obj);
509 PtrComprCageBase cage_base = GetPtrComprCageBase(heap_object);
510 return IsHeapNumber(heap_object, cage_base) ||
511 IsBigInt(heap_object, cage_base);
512}
513
515 return IsNumber(obj, cage_base) || IsBigInt(obj, cage_base);
516}
517
518DEF_HEAP_OBJECT_PREDICATE(HeapObject, IsTemplateLiteralObject) {
519 return IsJSArray(obj, cage_base);
520}
521
522#if V8_INTL_SUPPORT
523DEF_HEAP_OBJECT_PREDICATE(HeapObject, IsJSSegmentDataObject) {
524 return IsJSObject(obj, cage_base);
525}
526DEF_HEAP_OBJECT_PREDICATE(HeapObject, IsJSSegmentDataObjectWithIsWordLike) {
527 return IsJSObject(obj, cage_base);
528}
529#endif // V8_INTL_SUPPORT
530
531DEF_HEAP_OBJECT_PREDICATE(HeapObject, IsDeoptimizationData) {
532 // Must be a (protected) fixed array.
533 if (!IsProtectedFixedArray(obj, cage_base)) return false;
534
535 // There's no sure way to detect the difference between a fixed array and
536 // a deoptimization data array. Since this is used for asserts we can
537 // check that the length is zero or else the fixed size plus a multiple of
538 // the entry size.
539 int length = Cast<ProtectedFixedArray>(obj)->length();
540 if (length == 0) return true;
541
543 return length >= 0 && length % DeoptimizationData::kDeoptEntrySize == 0;
544}
545
547 return IsFixedArrayExact(obj, cage_base);
548}
549
551 return IsWeakArrayList(obj, cage_base);
552}
553
554DEF_HEAP_OBJECT_PREDICATE(HeapObject, IsOSROptimizedCodeCache) {
555 return IsWeakFixedArray(obj, cage_base);
556}
557
559 return IsJSPrimitiveWrapper(obj, cage_base) &&
560 IsString(Cast<JSPrimitiveWrapper>(obj)->value(), cage_base);
561}
562
564 return IsJSPrimitiveWrapper(obj, cage_base) &&
565 IsBoolean(Cast<JSPrimitiveWrapper>(obj)->value(), cage_base);
566}
567
569 return IsJSPrimitiveWrapper(obj, cage_base) &&
570 IsScript(Cast<JSPrimitiveWrapper>(obj)->value(), cage_base);
571}
572
574 return IsJSPrimitiveWrapper(obj, cage_base) &&
575 IsNumber(Cast<JSPrimitiveWrapper>(obj)->value(), cage_base);
576}
577
579 return IsJSPrimitiveWrapper(obj, cage_base) &&
580 IsBigInt(Cast<JSPrimitiveWrapper>(obj)->value(), cage_base);
581}
582
584 return IsJSPrimitiveWrapper(obj, cage_base) &&
585 IsSymbol(Cast<JSPrimitiveWrapper>(obj)->value(), cage_base);
586}
587
589 return IsHashTable(obj, cage_base);
590}
591
593 return IsHashTable(obj, cage_base);
594}
595
596DEF_HEAP_OBJECT_PREDICATE(HeapObject, IsCompilationCacheTable) {
597 return IsHashTable(obj, cage_base);
598}
599
601 return IsHashTable(obj, cage_base);
602}
603
605 return IsHashTable(obj, cage_base);
606}
607
608DEF_HEAP_OBJECT_PREDICATE(HeapObject, IsObjectTwoHashTable) {
609 return IsHashTable(obj, cage_base);
610}
611
613 return IsHashTable(obj, cage_base);
614}
615
616// static
618 if (obj.IsSmi()) return true;
619 Tagged<HeapObject> this_heap_object = Cast<HeapObject>(obj);
620 PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object);
621 return IsPrimitiveMap(this_heap_object->map(cage_base));
622}
623
624// static
626 return obj.IsSmi() || IsPrimitiveMap(Cast<HeapObject>(obj)->map(cage_base));
627}
628
629// static
631 if (IsSmi(*object)) return Just(false);
632 auto heap_object = Cast<HeapObject>(object);
633 if (IsJSArray(*heap_object)) return Just(true);
634 if (!IsJSProxy(*heap_object)) return Just(false);
635 return JSProxy::IsArray(Cast<JSProxy>(object));
636}
637
639 return obj->map(cage_base)->is_undetectable();
640}
641
643 if (IsJSGlobalProxy(obj, cage_base)) {
646 proxy->GetIsolate()->context()->global_object();
647 return proxy->IsDetachedFrom(global);
648 }
649 return obj->map(cage_base)->is_access_check_needed();
650}
651
652#define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
653 bool Is##Name(Tagged<Object> obj) { \
654 return IsHeapObject(obj) && Is##Name(Cast<HeapObject>(obj)); \
655 } \
656 bool Is##Name(Tagged<Object> obj, PtrComprCageBase cage_base) { \
657 return IsHeapObject(obj) && Is##Name(Cast<HeapObject>(obj), cage_base); \
658 } \
659 bool Is##Name(HeapObject obj) { \
660 static_assert(kTaggedCanConvertToRawObjects); \
661 return Is##Name(Tagged<HeapObject>(obj)); \
662 } \
663 bool Is##Name(HeapObject obj, PtrComprCageBase cage_base) { \
664 static_assert(kTaggedCanConvertToRawObjects); \
665 return Is##Name(Tagged<HeapObject>(obj), cage_base); \
666 } \
667 bool Is##Name(const HeapObjectLayout* obj) { \
668 return Is##Name(Tagged<HeapObject>(obj)); \
669 } \
670 bool Is##Name(const HeapObjectLayout* obj, PtrComprCageBase cage_base) { \
671 return Is##Name(Tagged<HeapObject>(obj), cage_base); \
672 }
673// static
675#undef MAKE_STRUCT_PREDICATE
676
677// static
679 DCHECK(IsNumber(obj));
680 return IsSmi(obj) ? static_cast<double>(UncheckedCast<Smi>(obj).value())
681 : UncheckedCast<HeapNumber>(obj)->value();
682}
683// TODO(leszeks): Remove in favour of Tagged<Number>
684// static
686 return NumberValue(Cast<Number>(obj));
687}
692 return NumberValue(Cast<Number>(obj));
693}
694
695// static
696template <typename T, template <typename> typename HandleType>
697 requires(std::is_convertible_v<HandleType<T>, DirectHandle<T>>)
698Maybe<double> Object::IntegerValue(Isolate* isolate, HandleType<T> input) {
700 isolate, input, ConvertToNumber(isolate, input), Nothing<double>());
701 if (IsSmi(*input)) {
702 return Just(static_cast<double>(Cast<Smi>(*input).value()));
703 }
704 return Just(DoubleToInteger(Cast<HeapNumber>(*input)->value()));
705}
706
707// static
708bool Object::SameNumberValue(double value1, double value2) {
709 // Compare values bitwise, to cover -0 being different from 0 -- we'd need to
710 // look at sign bits anyway if we'd done a double comparison, so we may as
711 // well compare bitwise immediately.
712 uint64_t value1_bits = base::bit_cast<uint64_t>(value1);
713 uint64_t value2_bits = base::bit_cast<uint64_t>(value2);
714 if (value1_bits == value2_bits) {
715 return true;
716 }
717 // SameNumberValue(NaN, NaN) is true even for NaNs with different bit
718 // representations.
719 return std::isnan(value1) && std::isnan(value2);
720}
721
722// static
724 return IsHeapNumber(obj) && std::isnan(Cast<HeapNumber>(obj)->value());
725}
726
727// static
729 return IsHeapNumber(obj) && i::IsMinusZero(Cast<HeapNumber>(obj)->value());
730}
731
732// static
734 // Dictionary is covered under FixedArray. ByteArray is used
735 // for the JSTypedArray backing stores.
736 return IsFixedArray(obj) || IsFixedDoubleArray(obj) || IsByteArray(obj);
737}
738
739// static
741 DCHECK(!IsPropertyCell(obj));
742 if (filter == PRIVATE_NAMES_ONLY) {
743 if (!IsSymbol(obj)) return true;
744 return !Cast<Symbol>(obj)->is_private_name();
745 } else if (IsSymbol(obj)) {
746 if (filter & SKIP_SYMBOLS) return true;
747
748 if (Cast<Symbol>(obj)->is_private()) return true;
749 } else {
750 if (filter & SKIP_STRINGS) return true;
751 }
752 return false;
753}
754
755// static
757 PtrComprCageBase cage_base) {
758 if (IsSmi(obj)) {
759 return Representation::Smi();
760 }
761 Tagged<HeapObject> heap_object = Cast<HeapObject>(obj);
762 if (IsHeapNumber(heap_object, cage_base)) {
763 return Representation::Double();
764 } else if (IsUninitialized(heap_object)) {
765 return Representation::None();
766 }
768}
769
770// static
772 PtrComprCageBase cage_base) {
773 if (IsSmi(obj)) return PACKED_SMI_ELEMENTS;
774 if (IsHeapNumber(obj, cage_base)) return PACKED_DOUBLE_ELEMENTS;
775#ifdef V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
776 if (IsUndefined(obj, GetReadOnlyRoots())) {
778 }
779#endif // V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
780 return PACKED_ELEMENTS;
781}
782
783// static
785 Representation representation,
786 bool allow_coercion) {
787 if (representation.IsSmi()) {
788 return IsSmi(obj);
789 } else if (representation.IsDouble()) {
790 return allow_coercion ? IsNumber(obj) : IsHeapNumber(obj);
791 } else if (representation.IsHeapObject()) {
792 return IsHeapObject(obj);
793 } else if (representation.IsNone()) {
794 return false;
795 }
796 return true;
797}
798
799// static
800bool Object::ToUint32(Tagged<Object> obj, uint32_t* value) {
801 if (IsSmi(obj)) {
802 int num = Smi::ToInt(obj);
803 if (num < 0) return false;
804 *value = static_cast<uint32_t>(num);
805 return true;
806 }
807 if (IsHeapNumber(obj)) {
808 double num = Cast<HeapNumber>(obj)->value();
809 return DoubleToUint32IfEqualToSelf(num, value);
810 }
811 return false;
812}
813
814// static
815template <typename T, template <typename> typename HandleType>
816 requires(std::is_convertible_v<HandleType<T>, DirectHandle<T>>)
817typename HandleType<JSReceiver>::MaybeType Object::ToObject(
818 Isolate* isolate, HandleType<T> object, const char* method_name) {
819 if (IsJSReceiver(*object)) return Cast<JSReceiver>(object);
820 return ToObjectImpl(isolate, object, method_name);
821}
822
823// static
824template <template <typename> typename HandleType>
825typename HandleType<Name>::MaybeType Object::ToName(Isolate* isolate,
826 HandleType<Object> input)
827 requires(std::is_convertible_v<HandleType<Object>, DirectHandle<Object>>)
828{
829 if (IsName(*input)) return Cast<Name>(input);
830 return ConvertToName(isolate, input);
831}
832
833// static
834template <typename T, template <typename> typename HandleType>
835 requires(std::is_convertible_v<HandleType<T>, DirectHandle<T>>)
836typename HandleType<Object>::MaybeType Object::ToPropertyKey(
837 Isolate* isolate, HandleType<T> value) {
838 if (IsSmi(*value) || IsName(Cast<HeapObject>(*value))) return value;
839 return ConvertToPropertyKey(isolate, value);
840}
841
842// static
843template <typename T, template <typename> typename HandleType>
844 requires(std::is_convertible_v<HandleType<T>, DirectHandle<T>>)
845typename HandleType<Object>::MaybeType Object::ToPrimitive(
846 Isolate* isolate, HandleType<T> input, ToPrimitiveHint hint) {
847 if (IsPrimitive(*input)) return input;
848 return JSReceiver::ToPrimitive(isolate, Cast<JSReceiver>(input), hint);
849}
850
851// static
852template <typename T, template <typename> typename HandleType>
853 requires(std::is_convertible_v<HandleType<T>, DirectHandle<T>>)
854typename HandleType<Number>::MaybeType Object::ToNumber(Isolate* isolate,
855 HandleType<T> input) {
856 if (IsNumber(*input)) return Cast<Number>(input); // Shortcut.
857 return ConvertToNumber(isolate, Cast<Object>(input));
858}
859
860// static
861template <typename T, template <typename> typename HandleType>
862 requires(std::is_convertible_v<HandleType<T>, DirectHandle<T>>)
863typename HandleType<Object>::MaybeType Object::ToNumeric(Isolate* isolate,
864 HandleType<T> input) {
865 if (IsNumber(*input) || IsBigInt(*input)) return input; // Shortcut.
866 return ConvertToNumeric(isolate, Cast<Object>(input));
867}
868
869// static
870template <typename T, template <typename> typename HandleType>
871 requires(std::is_convertible_v<HandleType<T>, DirectHandle<T>>)
872typename HandleType<Number>::MaybeType Object::ToInteger(Isolate* isolate,
873 HandleType<T> input) {
874 if (IsSmi(*input)) return Cast<Smi>(input);
875 return ConvertToInteger(isolate, Cast<Object>(input));
876}
877
878// static
879template <typename T, template <typename> typename HandleType>
880 requires(std::is_convertible_v<HandleType<T>, DirectHandle<T>>)
881typename HandleType<Number>::MaybeType Object::ToInt32(Isolate* isolate,
882 HandleType<T> input) {
883 if (IsSmi(*input)) return Cast<Smi>(input);
884 return ConvertToInt32(isolate, Cast<Object>(input));
885}
886
887// static
888template <typename T, template <typename> typename HandleType>
889 requires(std::is_convertible_v<HandleType<T>, DirectHandle<T>>)
890typename HandleType<Number>::MaybeType Object::ToUint32(Isolate* isolate,
891 HandleType<T> input) {
892 if (IsSmi(*input)) {
893 return typename HandleType<Number>::MaybeType(
894 Smi::ToUint32Smi(Cast<Smi>(*input)), isolate);
895 }
896 return ConvertToUint32(isolate, Cast<Object>(input));
897}
898
899// static
900template <typename T, template <typename> typename HandleType>
901 requires(std::is_convertible_v<HandleType<T>, DirectHandle<T>>)
902typename HandleType<String>::MaybeType Object::ToString(Isolate* isolate,
903 HandleType<T> input) {
904 if (IsString(*input)) return Cast<String>(input);
905 return ConvertToString(isolate, Cast<Object>(input));
906}
907
908// static
910 DirectHandle<Object> input) {
911 if (IsSmi(*input)) {
912 int value = std::max(Smi::ToInt(*input), 0);
913 return handle(Smi::FromInt(value), isolate);
914 }
915 return ConvertToLength(isolate, input);
916}
917
918// static
919template <typename T, template <typename> typename HandleType>
920 requires(std::is_convertible_v<HandleType<T>, DirectHandle<T>>)
921typename HandleType<Object>::MaybeType Object::ToIndex(
922 Isolate* isolate, HandleType<T> input, MessageTemplate error_index) {
923 if (IsSmi(*input) && Smi::ToInt(*input) >= 0) return input;
924 return ConvertToIndex(isolate, Cast<Object>(input), error_index);
925}
926
928 DirectHandle<JSAny> object,
929 DirectHandle<Name> name) {
930 LookupIterator it(isolate, object, name);
931 if (!it.IsFound()) return it.factory()->undefined_value();
932 return GetProperty(&it);
933}
934
936 DirectHandle<JSAny> object,
937 uint32_t index) {
938 LookupIterator it(isolate, object, index);
939 if (!it.IsFound()) return it.factory()->undefined_value();
940 return GetProperty(&it);
941}
942
944 DirectHandle<JSAny> object,
945 uint32_t index,
947 ShouldThrow should_throw) {
948 LookupIterator it(isolate, object, index);
950 SetProperty(&it, value, StoreOrigin::kMaybeKeyed, Just(should_throw)));
951 return value;
952}
953
958
964
970
974
978
979template <ExternalPointerTag tag>
981 IsolateForSandbox isolate,
982 Address value,
983 WriteBarrierMode mode) {
985 value);
986 CONDITIONAL_EXTERNAL_POINTER_WRITE_BARRIER(*this, static_cast<int>(offset),
987 tag, mode);
988}
989
990template <ExternalPointerTagRange tag_range>
995
996template <CppHeapPointerTag lower_bound, CppHeapPointerTag upper_bound>
1002
1008
1009template <ExternalPointerTag tag>
1015
1017#ifdef V8_ENABLE_SANDBOX
1018 auto location =
1019 reinterpret_cast<ExternalPointerHandle*>(field_address(offset));
1021#else
1023#endif // V8_ENABLE_SANDBOX
1024}
1025
1027 size_t offset) const {
1028#ifdef V8_ENABLE_SANDBOX
1029 auto location =
1030 reinterpret_cast<ExternalPointerHandle*>(field_address(offset));
1033#else
1036#endif // V8_ENABLE_SANDBOX
1037}
1038
1039template <ExternalPointerTag tag>
1041 size_t offset, IsolateForSandbox isolate, Address value) {
1042#ifdef V8_ENABLE_SANDBOX
1043 static_assert(tag != kExternalPointerNullTag);
1044 ExternalPointerTable& table = isolate.GetExternalPointerTableFor(tag);
1045 auto location =
1046 reinterpret_cast<ExternalPointerHandle*>(field_address(offset));
1049 // Field has not been initialized yet.
1050 handle = table.AllocateAndInitializeEntry(
1051 isolate.GetExternalPointerTableSpaceFor(tag, address()), value, tag);
1053 // In this case, we're adding a reference from an existing object to a new
1054 // table entry, so we always require a write barrier.
1055 EXTERNAL_POINTER_WRITE_BARRIER(*this, static_cast<int>(offset), tag);
1056 } else {
1057 table.Set(handle, value, tag);
1058 }
1059#else
1061#endif // V8_ENABLE_SANDBOX
1062}
1063
1067
1068template <CppHeapPointerTag tag>
1074
1081
1082#if V8_ENABLE_SANDBOX
1083
1084void HeapObject::InitSelfIndirectPointerField(
1085 size_t offset, IsolateForSandbox isolate,
1086 TrustedPointerPublishingScope* opt_publishing_scope) {
1087 DCHECK(IsExposedTrustedObject(*this));
1088 InstanceType instance_type = map()->instance_type();
1091 opt_publishing_scope);
1092}
1093#endif // V8_ENABLE_SANDBOX
1094
1095template <IndirectPointerTag tag>
1097 size_t offset, IsolateForSandbox isolate) const {
1098 // Currently, trusted pointer loads always use acquire semantics as the
1099 // under-the-hood indirect pointer loads use acquire loads anyway.
1101}
1102
1103template <IndirectPointerTag tag>
1105 size_t offset, IsolateForSandbox isolate,
1106 AcquireLoadTag acquire_load) const {
1107 Tagged<Object> object =
1108 ReadMaybeEmptyTrustedPointerField<tag>(offset, isolate, acquire_load);
1109 DCHECK(IsExposedTrustedObject(object));
1110 return Cast<ExposedTrustedObject>(object);
1111}
1112
1113template <IndirectPointerTag tag>
1115 size_t offset, IsolateForSandbox isolate,
1116 AcquireLoadTag acquire_load) const {
1117#ifdef V8_ENABLE_SANDBOX
1119 acquire_load);
1120#else
1121 return TaggedField<Object>::Acquire_Load(*this, static_cast<int>(offset));
1122#endif
1123}
1124
1125template <IndirectPointerTag tag>
1128 // Currently, trusted pointer stores always use release semantics as the
1129 // under-the-hood indirect pointer stores use release stores anyway.
1130#ifdef V8_ENABLE_SANDBOX
1133#else
1135 *this, static_cast<int>(offset), value);
1136#endif
1137}
1138
1140#ifdef V8_ENABLE_SANDBOX
1143#else
1144 PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
1145 return IsSmi(TaggedField<Object>::Acquire_Load(cage_base, *this,
1146 static_cast<int>(offset)));
1147#endif
1148}
1149
1151 size_t offset, IndirectPointerTag tag, IsolateForSandbox isolate) const {
1152#ifdef V8_ENABLE_SANDBOX
1154 const TrustedPointerTable& table = isolate.GetTrustedPointerTableFor(tag);
1155 return table.IsUnpublished(handle);
1156#else
1157 return false;
1158#endif
1159}
1160
1162#ifdef V8_ENABLE_SANDBOX
1164#else
1165 TaggedField<Smi>::Release_Store(*this, static_cast<int>(offset), Smi::zero());
1166#endif
1167}
1168
1172
1178
1182
1186
1190
1195
1201
1202// static
1203template <typename ObjectType>
1205 ObjectType host, size_t offset, Isolate* isolate, uint16_t parameter_count,
1207#ifdef V8_ENABLE_LEAPTIERING
1208 JSDispatchTable::Space* space =
1209 isolate->GetJSDispatchTableSpaceFor(host->field_address(offset));
1211 isolate->factory()->NewJSDispatchHandle(parameter_count, code, space);
1212
1213 // Use a Release_Store to ensure that the store of the pointer into the table
1214 // is not reordered after the store of the handle. Otherwise, other threads
1215 // may access an uninitialized table entry and crash.
1216 auto location =
1217 reinterpret_cast<JSDispatchHandle*>(host->field_address(offset));
1220
1221 return handle;
1222#else
1223 UNREACHABLE();
1224#endif // V8_ENABLE_LEAPTIERING
1225}
1226
1227ObjectSlot HeapObject::RawField(int byte_offset) const {
1228 return ObjectSlot(field_address(byte_offset));
1229}
1230
1232 return MaybeObjectSlot(field_address(byte_offset));
1233}
1234
1236 int byte_offset) const {
1237 return InstructionStreamSlot(field_address(byte_offset));
1238}
1239
1241 int byte_offset, ExternalPointerTagRange tag_range) const {
1242 return ExternalPointerSlot(field_address(byte_offset), tag_range);
1243}
1244
1246 return CppHeapPointerSlot(field_address(byte_offset));
1247}
1248
1250 int byte_offset, IndirectPointerTag tag) const {
1251 return IndirectPointerSlot(field_address(byte_offset), tag);
1252}
1253
1255 DCHECK(map.is_null() || !MapWord::IsPacked(map.ptr()));
1256#ifdef V8_MAP_PACKING
1257 return MapWord(Pack(map.ptr()));
1258#else
1259 return MapWord(map.ptr());
1260#endif
1261}
1262
1264#ifdef V8_MAP_PACKING
1265 return UncheckedCast<Map>(Tagged<Object>(Unpack(value_)));
1266#else
1268#endif
1269}
1270
1272#ifdef V8_EXTERNAL_CODE_SPACE
1273 // When external code space is enabled forwarding pointers are encoded as
1274 // Smi representing a diff from the source object address in kObjectAlignment
1275 // chunks.
1276 return HAS_SMI_TAG(value_);
1277#else
1279#endif // V8_EXTERNAL_CODE_SPACE
1280}
1281
1283 Tagged<HeapObject> object) {
1284#ifdef V8_EXTERNAL_CODE_SPACE
1285 // When external code space is enabled forwarding pointers are encoded as
1286 // Smi representing a diff from the source object address in kObjectAlignment
1287 // chunks.
1288 intptr_t diff = static_cast<intptr_t>(object.ptr() - map_word_host.ptr());
1290 MapWord map_word(Smi::FromIntptr(diff / kObjectAlignment).ptr());
1291 DCHECK(map_word.IsForwardingAddress());
1292 return map_word;
1293#else
1294 return MapWord(object.ptr() - kHeapObjectTag);
1295#endif // V8_EXTERNAL_CODE_SPACE
1296}
1297
1299 Tagged<HeapObject> map_word_host) {
1301#ifdef V8_EXTERNAL_CODE_SPACE
1302 // When the sandbox or the external code space is enabled, forwarding
1303 // pointers are encoded as Smi representing a diff from the source object
1304 // address in kObjectAlignment chunks. This is required as we are using
1305 // multiple pointer compression cages in these scenarios.
1306 intptr_t diff =
1307 static_cast<intptr_t>(Tagged<Smi>(value_).value()) * kObjectAlignment;
1308 Address address = map_word_host.address() + diff;
1309 return HeapObject::FromAddress(address);
1310#else
1311 // The sandbox requires the external code space.
1314#endif // V8_EXTERNAL_CODE_SPACE
1315}
1316
1317#ifdef VERIFY_HEAP
1318void HeapObject::VerifyObjectField(Isolate* isolate, int offset) {
1319 Object::VerifyPointer(isolate,
1320 TaggedField<Object>::load(isolate, *this, offset));
1321 static_assert(!COMPRESS_POINTERS_BOOL || kTaggedSize == kInt32Size);
1322}
1323
1324void HeapObject::VerifyMaybeObjectField(Isolate* isolate, int offset) {
1325 Object::VerifyMaybeObjectPointer(
1326 isolate, TaggedField<MaybeObject>::load(isolate, *this, offset));
1327 static_assert(!COMPRESS_POINTERS_BOOL || kTaggedSize == kInt32Size);
1328}
1329
1330void HeapObject::VerifySmiField(int offset) {
1332 static_assert(!COMPRESS_POINTERS_BOOL || kTaggedSize == kInt32Size);
1333}
1334
1335#endif
1336
1340
1344
1345Tagged<Map> HeapObject::map() const {
1346 // This method is never used for objects located in code space
1347 // (InstructionStream and free space fillers) and thus it is fine to use
1348 // auto-computed cage base value.
1350 PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
1351 return HeapObject::map(cage_base);
1352}
1353
1354Tagged<Map> HeapObject::map(PtrComprCageBase cage_base) const {
1355 return map_word(cage_base, kRelaxedLoad).ToMap();
1356}
1357
1359 // TODO(leszeks): Support MapWord members and access via that instead.
1360 return Tagged<HeapObject>(this)->map();
1361}
1362
1364 // TODO(leszeks): Support MapWord members and access via that instead.
1365 return Tagged<HeapObject>(this)->map(kAcquireLoad);
1366}
1367
1369 // TODO(leszeks): Support MapWord members and access via that instead.
1370 return Tagged<HeapObject>(this)->map_word(kRelaxedLoad);
1371}
1372
1374 // TODO(leszeks): Support MapWord members and access via that instead.
1375 return Tagged<HeapObject>(this)->set_map(isolate, value);
1376}
1377
1378template <typename IsolateT>
1379void HeapObjectLayout::set_map(IsolateT* isolate, Tagged<Map> value,
1381 // TODO(leszeks): Support MapWord members and access via that instead.
1382 return Tagged<HeapObject>(this)->set_map(isolate, value, kReleaseStore);
1383}
1384
1385template <typename IsolateT>
1387 Tagged<Map> value,
1389 // TODO(leszeks): Support MapWord members and access via that instead.
1390 return Tagged<HeapObject>(this)->set_map_safe_transition(isolate, value,
1392}
1393
1398
1399template <typename IsolateT>
1405
1406template <typename IsolateT>
1411
1412template <typename IsolateT>
1418
1420 Isolate* isolate, Tagged<Map> value, RelaxedStoreTag tag) {
1421 // TODO(leszeks): Support MapWord members and access via that instead.
1422 return Tagged<HeapObject>(this)->set_map_safe_transition_no_write_barrier(
1423 isolate, value, tag);
1424}
1425
1432
1439
1441 Tagged<Map> value,
1442 RelaxedStoreTag tag) {
1443 // TODO(leszeks): Support MapWord members and access via that instead.
1444 Tagged<HeapObject>(this)->set_map_no_write_barrier(isolate, value, tag);
1445}
1446
1447// Unsafe accessor omitting write barrier.
1453
1459
1460template <HeapObject::EmitWriteBarrier emit_write_barrier, typename MemoryOrder,
1461 typename IsolateT>
1462void HeapObject::set_map(IsolateT* isolate, Tagged<Map> value,
1463 MemoryOrder order, VerificationMode mode) {
1464#if V8_ENABLE_WEBASSEMBLY
1465 // In {WasmGraphBuilder::SetMap} and {WasmGraphBuilder::LoadMap}, we treat
1466 // maps as immutable. Therefore we are not allowed to mutate them here.
1467 DCHECK(!IsWasmStructMap(value) && !IsWasmArrayMap(value));
1468#endif
1469 // Object layout changes are currently not supported on background threads.
1470 // This method might change object layout and therefore can't be used on
1471 // background threads.
1473 !LocalHeap::Current());
1474 if (v8_flags.verify_heap && !value.is_null()) {
1476 HeapVerifier::VerifySafeMapTransition(isolate->heap()->AsHeap(), *this,
1477 value);
1478 } else {
1480 HeapVerifier::VerifyObjectLayoutChange(isolate->heap()->AsHeap(), *this,
1481 value);
1482 }
1483 }
1484 set_map_word(value, order);
1485 Heap::NotifyObjectLayoutChangeDone(*this);
1486#ifndef V8_DISABLE_WRITE_BARRIERS
1487 if (!value.is_null()) {
1488 if (emit_write_barrier == EmitWriteBarrier::kYes) {
1491 } else {
1492 DCHECK_EQ(emit_write_barrier, EmitWriteBarrier::kNo);
1493 SLOW_DCHECK(!WriteBarrier::IsRequired(*this, value));
1494 }
1495 }
1496#endif
1497}
1498
1499template <typename IsolateT>
1501 Tagged<Map> value,
1502 WriteBarrierMode mode) {
1503 // TODO(leszeks): Support MapWord members and access via that instead.
1504 Tagged<HeapObject>(this)->set_map_after_allocation(isolate, value, mode);
1505}
1506
1507template <typename IsolateT>
1509 WriteBarrierMode mode) {
1511#ifndef V8_DISABLE_WRITE_BARRIERS
1512 if (mode != SKIP_WRITE_BARRIER) {
1513 DCHECK(!value.is_null());
1514 WriteBarrier::ForValue(*this, MaybeObjectSlot(map_slot()), value, mode);
1515 } else {
1517 // We allow writes of a null map before root initialisation.
1518 value.is_null() ? !isolate->read_only_heap()->roots_init_complete()
1519 : !WriteBarrier::IsRequired(*this, value));
1520 }
1521#endif
1522}
1523
1524// static
1526 Tagged<Map> value) {
1528}
1529
1531 return map_word(cage_base, kAcquireLoad).ToMap();
1532}
1533
1537
1538MapWord HeapObject::map_word(RelaxedLoadTag tag) const {
1539 // This method is never used for objects located in code space
1540 // (InstructionStream and free space fillers) and thus it is fine to use
1541 // auto-computed cage base value.
1543 PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
1544 return HeapObject::map_word(cage_base, tag);
1545}
1546MapWord HeapObject::map_word(PtrComprCageBase cage_base,
1547 RelaxedLoadTag tag) const {
1548 return MapField::Relaxed_Load_Map_Word(cage_base, *this);
1549}
1550
1554
1560
1561MapWord HeapObject::map_word(AcquireLoadTag tag) const {
1562 // This method is never used for objects located in code space
1563 // (InstructionStream and free space fillers) and thus it is fine to use
1564 // auto-computed cage base value.
1566 PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
1567 return HeapObject::map_word(cage_base, tag);
1568}
1569MapWord HeapObject::map_word(PtrComprCageBase cage_base,
1570 AcquireLoadTag tag) const {
1571 return MapField::Acquire_Load_No_Unpack(cage_base, *this);
1572}
1573
1577
1579 ReleaseStoreTag tag) {
1580 // TODO(leszeks): Support MapWord members and access via that instead.
1581 Tagged<HeapObject>(this)->set_map_word_forwarded(target_object, tag);
1582}
1583
1585 RelaxedStoreTag tag) {
1586 // TODO(leszeks): Support MapWord members and access via that instead.
1587 Tagged<HeapObject>(this)->set_map_word_forwarded(target_object, tag);
1588}
1589
1595
1597 MapWord old_map_word, Tagged<HeapObject> new_target_object) {
1599 *this, old_map_word,
1600 MapWord::FromForwardingAddress(*this, new_target_object));
1601 return result == static_cast<Tagged_t>(old_map_word.ptr());
1602}
1603
1605 MapWord old_map_word, Tagged<HeapObject> new_target_object) {
1607 *this, old_map_word,
1608 MapWord::FromForwardingAddress(*this, new_target_object));
1609 return result == static_cast<Tagged_t>(old_map_word.ptr());
1610}
1611
1612int HeapObjectLayout::Size() const { return Tagged<HeapObject>(this)->Size(); }
1613
1614// TODO(v8:11880): consider dropping parameterless version.
1615int HeapObject::Size() const {
1617 PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
1618 return HeapObject::Size(cage_base);
1619}
1620int HeapObject::Size(PtrComprCageBase cage_base) const {
1621 return SizeFromMap(map(cage_base));
1622}
1623
1625 return instance_type <= LAST_SPECIAL_RECEIVER_TYPE;
1626}
1627
1628// This should be in objects/map-inl.h, but can't, because of a cyclic
1629// dependency.
1631 bool result = IsSpecialReceiverInstanceType(map->instance_type());
1633 !result, !map->has_named_interceptor() && !map->is_access_check_needed());
1634 return result;
1635}
1636
1638 return instance_type <= LAST_CUSTOM_ELEMENTS_RECEIVER;
1639}
1640
1641// This should be in objects/map-inl.h, but can't, because of a cyclic
1642// dependency.
1644 return IsCustomElementsReceiverInstanceType(map->instance_type());
1645}
1646
1647// static
1648bool Object::ToArrayLength(Tagged<Object> obj, uint32_t* index) {
1649 return Object::ToUint32(obj, index);
1650}
1651
1652// static
1653bool Object::ToArrayIndex(Tagged<Object> obj, uint32_t* index) {
1654 return Object::ToUint32(obj, index) && *index != kMaxUInt32;
1655}
1656
1657// static
1658bool Object::ToIntegerIndex(Tagged<Object> obj, size_t* index) {
1659 if (IsSmi(obj)) {
1660 int num = Smi::ToInt(obj);
1661 if (num < 0) return false;
1662 *index = static_cast<size_t>(num);
1663 return true;
1664 }
1665 if (IsHeapNumber(obj)) {
1666 double num = Cast<HeapNumber>(obj)->value();
1667 if (!(num >= 0)) return false; // Negation to catch NaNs.
1668 constexpr double max =
1669 std::min(kMaxSafeInteger,
1670 // The maximum size_t is reserved as "invalid" sentinel.
1671 static_cast<double>(std::numeric_limits<size_t>::max() - 1));
1672 if (num > max) return false;
1673 size_t result = static_cast<size_t>(num);
1674 if (num != result) return false; // Conversion lost fractional precision.
1675 *index = result;
1676 return true;
1677 }
1678 return false;
1679}
1680
1685
1690
1691// static
1693 // TODO(v8:4153): We should think about requiring double alignment
1694 // in general for ByteArray, since they are used as backing store for typed
1695 // arrays now.
1696 // TODO(ishell, v8:8875): Consider using aligned allocations for BigInt.
1698 int instance_type = map->instance_type();
1699
1700 static_assert(!USE_ALLOCATION_ALIGNMENT_BOOL ||
1702 kTaggedSize);
1703 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) return kDoubleAligned;
1704
1705 static_assert(!USE_ALLOCATION_ALIGNMENT_BOOL ||
1706 (offsetof(HeapNumber, value_) & kDoubleAlignmentMask) ==
1707 kTaggedSize);
1708 if (instance_type == HEAP_NUMBER_TYPE) return kDoubleUnaligned;
1709 }
1710 return kTaggedAligned;
1711}
1712
1715 CHECK_EQ(0, Heap::GetFillToAlign(address(), alignment));
1716 return true;
1717}
1718
1719Address HeapObject::GetFieldAddress(int field_offset) const {
1720 return field_address(field_offset);
1721}
1722
1723// static
1727 if (result.IsJust()) {
1728 switch (result.FromJust()) {
1730 return Just(true);
1734 return Just(false);
1735 }
1736 }
1737 return Nothing<bool>();
1738}
1739
1740// static
1744 if (result.IsJust()) {
1745 switch (result.FromJust()) {
1748 return Just(true);
1751 return Just(false);
1752 }
1753 }
1754 return Nothing<bool>();
1755}
1756
1757// static
1761 if (result.IsJust()) {
1762 switch (result.FromJust()) {
1764 return Just(true);
1768 return Just(false);
1769 }
1770 }
1771 return Nothing<bool>();
1772}
1773
1774// static
1778 if (result.IsJust()) {
1779 switch (result.FromJust()) {
1782 return Just(true);
1785 return Just(false);
1786 }
1787 }
1788 return Nothing<bool>();
1789}
1790
1792 DirectHandle<JSAny> object,
1793 DirectHandle<Name> name) {
1794 return GetPropertyOrElement(isolate, object, PropertyKey(isolate, name));
1795}
1796
1798 DirectHandle<JSAny> object,
1799 PropertyKey key) {
1800 LookupIterator it(isolate, object, key);
1801 return GetProperty(&it);
1802}
1803
1805 Isolate* isolate, DirectHandle<JSAny> object, DirectHandle<Name> name,
1806 DirectHandle<Object> value, Maybe<ShouldThrow> should_throw,
1807 StoreOrigin store_origin) {
1808 return SetPropertyOrElement(isolate, object, PropertyKey(isolate, name),
1809 value, should_throw, store_origin);
1810}
1811
1813 Isolate* isolate, DirectHandle<JSAny> object, PropertyKey key,
1814 DirectHandle<Object> value, Maybe<ShouldThrow> should_throw,
1815 StoreOrigin store_origin) {
1816 LookupIterator it(isolate, object, key);
1817 MAYBE_RETURN_NULL(SetProperty(&it, value, store_origin, should_throw));
1818 return value;
1819}
1820
1821// static
1824 if (IsSmi(object)) {
1825 uint32_t hash = ComputeUnseededHash(Smi::ToInt(object));
1826 return Smi::FromInt(hash & Smi::kMaxValue);
1827 }
1828 auto instance_type = Cast<HeapObject>(object)->map()->instance_type();
1829 if (InstanceTypeChecker::IsHeapNumber(instance_type)) {
1830 double num = Cast<HeapNumber>(object)->value();
1831 if (std::isnan(num)) return Smi::FromInt(Smi::kMaxValue);
1832 // Use ComputeUnseededHash for all values in Signed32 range, including -0,
1833 // which is considered equal to 0 because collections use SameValueZero.
1834 uint32_t hash;
1835 // Check range before conversion to avoid undefined behavior.
1836 if (num >= kMinInt && num <= kMaxInt && FastI2D(FastD2I(num)) == num) {
1837 hash = ComputeUnseededHash(FastD2I(num));
1838 } else {
1840 }
1841 return Smi::FromInt(hash & Smi::kMaxValue);
1842 } else if (InstanceTypeChecker::IsName(instance_type)) {
1843 uint32_t hash = Cast<Name>(object)->EnsureHash();
1844 return Smi::FromInt(hash);
1845 } else if (InstanceTypeChecker::IsOddball(instance_type)) {
1846 uint32_t hash = Cast<Oddball>(object)->to_string()->EnsureHash();
1847 return Smi::FromInt(hash);
1848 } else if (InstanceTypeChecker::IsBigInt(instance_type)) {
1849 uint32_t hash = Cast<BigInt>(object)->Hash();
1850 return Smi::FromInt(hash & Smi::kMaxValue);
1851 } else if (InstanceTypeChecker::IsSharedFunctionInfo(instance_type)) {
1852 uint32_t hash = Cast<SharedFunctionInfo>(object)->Hash();
1853 return Smi::FromInt(hash & Smi::kMaxValue);
1854 } else if (InstanceTypeChecker::IsScopeInfo(instance_type)) {
1855 uint32_t hash = Cast<ScopeInfo>(object)->Hash();
1856 return Smi::FromInt(hash & Smi::kMaxValue);
1857 } else if (InstanceTypeChecker::IsScript(instance_type)) {
1858 int id = Cast<Script>(object)->id();
1860 } else if (InstanceTypeChecker::IsTemplateInfo(instance_type)) {
1861 uint32_t hash = Cast<TemplateInfo>(object)->GetHash();
1862 DCHECK_EQ(hash, hash & Smi::kMaxValue);
1863 return Smi::FromInt(hash);
1864 }
1865
1866 DCHECK(!InstanceTypeChecker::IsHole(instance_type));
1867 DCHECK(IsJSReceiver(object));
1868 return object;
1869}
1870
1871// static
1874 Tagged<Object> hash = GetSimpleHash(obj);
1875 if (IsSmi(hash)) return hash;
1876
1877 // Make sure that we never cast internal objects to JSReceivers.
1878 CHECK(IsJSReceiver(obj));
1880 return receiver->GetIdentityHash();
1881}
1882
1884 // This logic should be kept in sync with fast paths in
1885 // CodeStubAssembler::SharedValueBarrier.
1886
1887 // Smis are trivially shared.
1888 if (IsSmi(obj)) return true;
1889
1891
1892 // RO objects are shared when the RO space is shared.
1893 if (HeapLayout::InReadOnlySpace(object)) {
1894 return true;
1895 }
1896
1897 // Check if this object is already shared.
1898 InstanceType instance_type = object->map()->instance_type();
1899 if (InstanceTypeChecker::IsAlwaysSharedSpaceJSObject(instance_type)) {
1901 return true;
1902 }
1903 switch (instance_type) {
1911 return true;
1918 if (v8_flags.shared_string_table) {
1920 return true;
1921 }
1922 return false;
1923 case HEAP_NUMBER_TYPE:
1924 return HeapLayout::InWritableSharedSpace(object);
1925 default:
1926 return false;
1927 }
1928}
1929
1930// static
1931template <typename T, template <typename> typename HandleType>
1932 requires(std::is_convertible_v<HandleType<T>, DirectHandle<T>>)
1933typename HandleType<Object>::MaybeType Object::Share(
1934 Isolate* isolate, HandleType<T> value,
1935 ShouldThrow throw_if_cannot_be_shared) {
1936 // Sharing values requires the RO space be shared.
1937 if (IsShared(*value)) return value;
1938 return ShareSlow(isolate, Cast<HeapObject>(value), throw_if_cannot_be_shared);
1939}
1940
1941// https://tc39.es/ecma262/#sec-canbeheldweakly
1942// static
1944 if (IsJSReceiver(obj)) {
1945 // TODO(v8:12547) Shared structs and arrays should only be able to point
1946 // to shared values in weak collections. For now, disallow them as weak
1947 // collection keys.
1948 if (v8_flags.harmony_struct) {
1949 return !IsJSSharedStruct(obj) && !IsJSSharedArray(obj);
1950 }
1951 return true;
1952 }
1953 return IsSymbol(obj) && !Cast<Symbol>(obj)->is_in_public_symbol_table();
1954}
1955
1960
1962 isolate_ = isolate;
1963 prev_ = isolate->relocatable_top();
1964 isolate->set_relocatable_top(this);
1965}
1966
1968 DCHECK_EQ(isolate_->relocatable_top(), this);
1969 isolate_->set_relocatable_top(prev_);
1970}
1971
1972// Predictably converts HeapObject or Address to uint32 by calculating
1973// offset of the address in respective MemoryChunk.
1974static inline uint32_t ObjectAddressForHashing(Address object) {
1975 return MemoryChunk::AddressToOffset(object);
1976}
1977
1978static inline DirectHandle<Object> MakeEntryPair(Isolate* isolate, size_t index,
1979 DirectHandle<Object> value) {
1980 DirectHandle<Object> key = isolate->factory()->SizeToString(index);
1981 DirectHandle<FixedArray> entry_storage = isolate->factory()->NewFixedArray(2);
1982 {
1983 entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
1984 entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
1985 }
1986 return isolate->factory()->NewJSArrayWithElements(entry_storage,
1987 PACKED_ELEMENTS, 2);
1988}
1989
1992 DirectHandle<Object> value) {
1993 DirectHandle<FixedArray> entry_storage = isolate->factory()->NewFixedArray(2);
1994 {
1995 entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
1996 entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
1997 }
1998 return isolate->factory()->NewJSArrayWithElements(entry_storage,
1999 PACKED_ELEMENTS, 2);
2000}
2001
2002} // namespace internal
2003} // namespace v8
2004
2006
2007#endif // V8_OBJECTS_OBJECTS_INL_H_
#define T
constexpr int kRegularPageSize
int16_t parameter_count
Definition builtins.cc:67
#define SLOW_DCHECK(condition)
Definition checks.h:21
bool IsArray() const
Definition api.cc:3525
uint32_t GetHash()
Definition api.cc:4282
static void Release_Store(T *addr, typename std::remove_reference< T >::type new_value)
static T Acquire_Load(T *addr)
static void Relaxed_Store(T *addr, typename std::remove_reference< T >::type new_value)
static T Relaxed_Load(T *addr)
static V8_EXPORT_PRIVATE Tagged< FieldType > Any()
Definition field-type.cc:22
static V8_EXPORT_PRIVATE Tagged< FieldType > None()
Definition field-type.cc:17
static V8_INLINE bool InWritableSharedSpace(Tagged< HeapObject > object)
static V8_INLINE bool InReadOnlySpace(Tagged< HeapObject > object)
static V8_INLINE bool InAnySharedSpace(Tagged< HeapObject > object)
static V8_INLINE bool InCodeSpace(Tagged< HeapObject > object)
void set_map_safe_transition_no_write_barrier(Isolate *isolate, Tagged< Map > value, RelaxedStoreTag=kRelaxedStore)
MapWord map_word(RelaxedLoadTag) const
WriteBarrierMode GetWriteBarrierMode(const DisallowGarbageCollection &promise)
void set_map_after_allocation(IsolateT *isolate, Tagged< Map > value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
ReadOnlyRoots EarlyGetReadOnlyRoots() const
void set_map(Isolate *isolate, Tagged< Map > value)
Tagged< Map > map() const
void set_map_word_forwarded(Tagged< HeapObject > target_object, ReleaseStoreTag)
void set_map_safe_transition(IsolateT *isolate, Tagged< Map > value, ReleaseStoreTag)
void set_map_no_write_barrier(Isolate *isolate, Tagged< Map > value, RelaxedStoreTag=kRelaxedStore)
bool release_compare_and_swap_map_word_forwarded(MapWord old_map_word, Tagged< HeapObject > new_target_object)
ExternalPointerSlot RawExternalPointerField(int byte_offset, ExternalPointerTagRange tag_range) const
void WriteCodePointerField(size_t offset, Tagged< Code > value)
void Relaxed_WriteField(size_t offset, T value)
static Tagged< HeapObject > FromAddress(Address address)
T Acquire_ReadField(size_t offset) const
void WriteSandboxedPointerField(size_t offset, PtrComprCageBase cage_base, Address value)
ObjectSlot map_slot() const
void SetupLazilyInitializedCppHeapPointerField(size_t offset)
void WriteBoundedSizeField(size_t offset, size_t value)
void set_map_word(Tagged< Map > map, RelaxedStoreTag)
bool relaxed_compare_and_swap_map_word_forwarded(MapWord old_map_word, Tagged< HeapObject > new_target_object)
T Relaxed_ReadField(size_t offset) const
static Tagged< Object > SeqCst_CompareAndSwapField(Tagged< Object > expected_value, Tagged< Object > new_value, CompareAndSwapImpl compare_and_swap_impl)
Address ReadCppHeapPointerField(size_t offset, IsolateForPointerCompression isolate) const
Address ReadSandboxedPointerField(size_t offset, PtrComprCageBase cage_base) const
void InitExternalPointerField(size_t offset, IsolateForSandbox isolate, Address value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
void WriteTrustedPointerField(size_t offset, Tagged< ExposedTrustedObject > value)
void WriteCodeEntrypointViaCodePointerField(size_t offset, Address value, CodeEntrypointTag tag)
Address ReadCodeEntrypointViaCodePointerField(size_t offset, CodeEntrypointTag tag) const
Address field_address(size_t offset) const
static void SetFillerMap(const WritableFreeSpace &writable_page, Tagged< Map > value)
static constexpr int kMapOffset
ObjectSlot RawField(int byte_offset) const
MaybeObjectSlot RawMaybeWeakField(int byte_offset) const
void WriteLazilyInitializedExternalPointerField(size_t offset, IsolateForSandbox isolate, Address value)
V8_EXPORT_PRIVATE int SizeFromMap(Tagged< Map > map) const
Definition objects.cc:1939
void set_map(Isolate *isolate, Tagged< Map > value)
static JSDispatchHandle AllocateAndInstallJSDispatchHandle(ObjectType host, size_t offset, Isolate *isolate, uint16_t parameter_count, DirectHandle< Code > code, WriteBarrierMode mode=WriteBarrierMode::UPDATE_WRITE_BARRIER)
Tagged< Object > ReadMaybeEmptyTrustedPointerField(size_t offset, IsolateForSandbox isolate, AcquireLoadTag) const
static AllocationAlignment RequiredAlignment(Tagged< Map > map)
size_t ReadBoundedSizeField(size_t offset) const
Address ReadExternalPointerField(size_t offset, IsolateForSandbox isolate) const
void set_map_safe_transition(IsolateT *isolate, Tagged< Map > value)
ReadOnlyRoots EarlyGetReadOnlyRoots() const
void ClearTrustedPointerField(size_t offest)
void set_map_after_allocation(IsolateT *isolate, Tagged< Map > value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
void set_map_word_forwarded(Tagged< HeapObject > target_object, RelaxedStoreTag)
void set_map_safe_transition_no_write_barrier(Isolate *isolate, Tagged< Map > value, RelaxedStoreTag=kRelaxedStore)
Address GetFieldAddress(int field_offset) const
bool IsTrustedPointerFieldEmpty(size_t offset) const
void SetupLazilyInitializedExternalPointerField(size_t offset)
bool IsCodePointerFieldEmpty(size_t offset) const
Tagged< Code > ReadCodePointerField(size_t offset, IsolateForSandbox isolate) const
void set_map_no_write_barrier(Isolate *isolate, Tagged< Map > value, RelaxedStoreTag=kRelaxedStore)
void ClearCodePointerField(size_t offest)
WriteBarrierMode GetWriteBarrierMode(const DisallowGarbageCollection &promise)
CppHeapPointerSlot RawCppHeapPointerField(int byte_offset) const
Address address() const
bool CheckRequiredAlignment(PtrComprCageBase cage_base) const
Tagged< ExposedTrustedObject > ReadTrustedPointerField(size_t offset, IsolateForSandbox isolate) const
void WriteExternalPointerField(size_t offset, IsolateForSandbox isolate, Address value)
void WriteLazilyInitializedCppHeapPointerField(size_t offset, IsolateForPointerCompression isolate, Address value)
IndirectPointerSlot RawIndirectPointerField(int byte_offset, IndirectPointerTag tag) const
InstructionStreamSlot RawInstructionStreamField(int byte_offset) const
bool IsTrustedPointerFieldUnpublished(size_t offset, IndirectPointerTag tag, IsolateForSandbox isolate) const
bool IsLazilyInitializedExternalPointerFieldInitialized(size_t offset) const
static void VerifyObjectLayoutChange(Heap *heap, Tagged< HeapObject > object, Tagged< Map > new_map)
static void VerifySafeMapTransition(Heap *heap, Tagged< HeapObject > object, Tagged< Map > new_map)
static V8_WARN_UNUSED_RESULT Maybe< bool > IsArray(DirectHandle< JSProxy > proxy)
Definition objects.cc:2864
static V8_WARN_UNUSED_RESULT HandleType< Object >::MaybeType ToPrimitive(Isolate *isolate, HandleType< JSReceiver > receiver, ToPrimitiveHint hint=ToPrimitiveHint::kDefault)
static constexpr bool IsPacked(Address)
Definition objects.h:846
Tagged< Map > ToMap() const
constexpr Address ptr() const
Definition objects.h:820
bool IsForwardingAddress() const
constexpr MapWord(Address value)
Definition objects.h:855
static MapWord FromForwardingAddress(Tagged< HeapObject > map_word_host, Tagged< HeapObject > object)
static MapWord FromMap(const Tagged< Map > map)
Tagged< HeapObject > ToForwardingAddress(Tagged< HeapObject > map_word_host)
static constexpr uint32_t AddressToOffset(Address address)
static V8_INLINE MemoryChunk * FromHeapObject(Tagged< HeapObject > object)
static DirectHandle< Object > AsHandle(DirectHandle< Object > key)
static V8_WARN_UNUSED_RESULT MaybeHandle< Object > ToLength(Isolate *isolate, DirectHandle< Object > input)
static bool CanBeHeldWeakly(Tagged< Object > obj)
V8_EXPORT_PRIVATE static V8_WARN_UNUSED_RESULT MaybeHandle< Number > ConvertToLength(Isolate *isolate, DirectHandle< Object > input)
Definition objects.cc:732
static V8_WARN_UNUSED_RESULT Maybe< bool > GreaterThan(Isolate *isolate, DirectHandle< Object > x, DirectHandle< Object > y)
static V8_WARN_UNUSED_RESULT bool ToArrayIndex(Tagged< Object > obj, uint32_t *index)
static bool ToArrayLength(Tagged< Object > obj, uint32_t *index)
static V8_WARN_UNUSED_RESULT HandleType< String >::MaybeType ToString(Isolate *isolate, HandleType< T > input)
V8_EXPORT_PRIVATE static V8_WARN_UNUSED_RESULT Maybe< bool > SetProperty(LookupIterator *it, DirectHandle< Object > value, StoreOrigin store_origin, Maybe< ShouldThrow > should_throw=Nothing< ShouldThrow >())
Definition objects.cc:2439
static V8_WARN_UNUSED_RESULT MaybeHandle< Object > GetPropertyOrElement(Isolate *isolate, DirectHandle< JSAny > object, DirectHandle< Name > name)
static V8_WARN_UNUSED_RESULT HandleType< Number >::MaybeType ToNumber(Isolate *isolate, HandleType< T > input)
static bool SameNumberValue(double number1, double number2)
static ElementsKind OptimalElementsKind(Tagged< Object > obj, PtrComprCageBase cage_base)
static V8_WARN_UNUSED_RESULT HandleType< Number >::MaybeType ToInteger(Isolate *isolate, HandleType< T > input)
static V8_WARN_UNUSED_RESULT HandleType< JSReceiver >::MaybeType ToObject(Isolate *isolate, HandleType< T > object, const char *method_name=nullptr)
static Representation OptimalRepresentation(Tagged< Object > obj, PtrComprCageBase cage_base)
static bool FilterKey(Tagged< Object > obj, PropertyFilter filter)
static V8_WARN_UNUSED_RESULT HandleType< Object >::MaybeType ToIndex(Isolate *isolate, HandleType< T > input, MessageTemplate error_index)
static V8_WARN_UNUSED_RESULT HandleType< Object >::MaybeType ToPropertyKey(Isolate *isolate, HandleType< T > value)
static V8_WARN_UNUSED_RESULT MaybeDirectHandle< Object > SetElement(Isolate *isolate, DirectHandle< JSAny > object, uint32_t index, DirectHandle< Object > value, ShouldThrow should_throw)
static V8_WARN_UNUSED_RESULT Maybe< bool > LessThan(Isolate *isolate, DirectHandle< Object > x, DirectHandle< Object > y)
static V8_WARN_UNUSED_RESULT MaybeDirectHandle< Object > SetPropertyOrElement(Isolate *isolate, DirectHandle< JSAny > object, DirectHandle< Name > name, DirectHandle< Object > value, Maybe< ShouldThrow > should_throw=Nothing< ShouldThrow >(), StoreOrigin store_origin=StoreOrigin::kMaybeKeyed)
static double NumberValue(Tagged< Number > obj)
static V8_WARN_UNUSED_RESULT Maybe< double > IntegerValue(Isolate *isolate, HandleType< T > input)
static V8_EXPORT_PRIVATE bool ToInt32(Tagged< Object > obj, int32_t *value)
Definition objects.cc:1438
static V8_WARN_UNUSED_RESULT Maybe< bool > GreaterThanOrEqual(Isolate *isolate, DirectHandle< Object > x, DirectHandle< Object > y)
static V8_WARN_UNUSED_RESULT HandleType< Object >::MaybeType ToPrimitive(Isolate *isolate, HandleType< T > input, ToPrimitiveHint hint=ToPrimitiveHint::kDefault)
static V8_WARN_UNUSED_RESULT Maybe< bool > LessThanOrEqual(Isolate *isolate, DirectHandle< Object > x, DirectHandle< Object > y)
static V8_WARN_UNUSED_RESULT HandleType< Name >::MaybeType ToName(Isolate *isolate, HandleType< Object > input)
V8_EXPORT_PRIVATE static V8_WARN_UNUSED_RESULT MaybeHandle< Object > GetProperty(LookupIterator *it, bool is_global_reference=false)
Definition objects.cc:1248
static bool ToUint32(Tagged< Object > obj, uint32_t *value)
static bool HasValidElements(Tagged< Object > obj)
static bool ToIntegerIndex(Tagged< Object > obj, size_t *index)
static bool FitsRepresentation(Tagged< Object > obj, Representation representation, bool allow_coercion=true)
static Tagged< Object > GetSimpleHash(Tagged< Object > object)
V8_EXPORT_PRIVATE static V8_WARN_UNUSED_RESULT Maybe< ComparisonResult > Compare(Isolate *isolate, DirectHandle< Object > x, DirectHandle< Object > y)
Definition objects.cc:841
static HandleType< Object >::MaybeType Share(Isolate *isolate, HandleType< T > value, ShouldThrow throw_if_cannot_be_shared)
static V8_WARN_UNUSED_RESULT HandleType< Object >::MaybeType ToNumeric(Isolate *isolate, HandleType< T > input)
static V8_WARN_UNUSED_RESULT MaybeHandle< Object > GetElement(Isolate *isolate, DirectHandle< JSAny > object, uint32_t index)
detail::ArrayHeaderBase< HeapObjectLayout, true > Header
constexpr PropertyDetails(PropertyKind kind, PropertyAttributes attributes, PropertyCellType cell_type, int dictionary_index=0)
PropertyLocation location() const
Tagged< Smi > AsSmi() const
Definition objects-inl.h:79
static V8_EXPORT_PRIVATE ReadOnlyRoots EarlyGetReadOnlyRoots(Tagged< HeapObject > object)
Relocatable(Isolate *isolate)
Relocatable * prev_
Definition objects.h:901
constexpr bool IsHeapObject() const
constexpr bool IsNone() const
static constexpr Representation Double()
constexpr bool IsSmi() const
static constexpr Representation HeapObject()
static constexpr Representation None()
static constexpr Representation Smi()
constexpr bool IsDouble() const
static V8_EXPORT_PRIVATE constexpr Tagged< Smi > const kNoSharedNameSentinel
static constexpr int ToInt(const Tagged< Object > object)
Definition smi.h:33
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static constexpr Tagged< Smi > FromIntptr(intptr_t value)
Definition smi.h:43
static constexpr Tagged< Smi > ToUint32Smi(Tagged< Smi > smi)
Definition smi.h:27
static constexpr Tagged< Smi > zero()
Definition smi.h:99
static constexpr int kMaxValue
Definition smi.h:101
V8_INLINE bool IsThin() const
Definition string-inl.h:174
V8_INLINE bool IsExternalOneByte() const
Definition string-inl.h:238
V8_INLINE bool IsCons() const
Definition string-inl.h:170
V8_INLINE bool IsSliced() const
Definition string-inl.h:178
V8_INLINE bool IsExternalTwoByte() const
Definition string-inl.h:247
V8_INLINE bool IsSequentialOneByte() const
Definition string-inl.h:230
V8_INLINE bool IsSequentialTwoByte() const
Definition string-inl.h:234
V8_INLINE bool IsSequential() const
Definition string-inl.h:192
static Tagged_t Relaxed_CompareAndSwap(Tagged< HeapObject > host, PtrType old, PtrType value)
static void Release_Store(Tagged< HeapObject > host, PtrType value)
static PtrType load(Tagged< HeapObject > host, int offset=0)
static void Relaxed_Store_Map_Word(Tagged< HeapObject > host, PtrType value)
static Address address(Tagged< HeapObject > host, int offset=0)
static PtrType Acquire_Load_No_Unpack(PtrComprCageBase cage_base, Tagged< HeapObject > host, int offset=0)
static Tagged_t Release_CompareAndSwap(Tagged< HeapObject > host, PtrType old, PtrType value)
static PtrType Acquire_Load(Tagged< HeapObject > host, int offset=0)
static void Release_Store_Map_Word(Tagged< HeapObject > host, PtrType value)
static PtrType Relaxed_Load_Map_Word(PtrComprCageBase cage_base, Tagged< HeapObject > host)
V8_INLINE constexpr StorageType ptr() const
static bool constexpr IsValid(intptr_t value)
constexpr V8_INLINE bool IsSmi() const
Definition tagged.h:508
static V8_INLINE constexpr Tagged_t CompressAny(Address tagged)
V8_INLINE void WriteHeaderSlot(Tagged< T > value, RelaxedStoreTag) const
static WriteBarrierMode GetWriteBarrierModeForObject(Tagged< HeapObject > object, const DisallowGarbageCollection &promise)
static void ForValue(Tagged< HeapObject > host, MaybeObjectSlot slot, Tagged< T > value, WriteBarrierMode mode)
Register const value_
#define V8_EXTERNAL_CODE_SPACE_BOOL
Definition globals.h:255
#define COMPRESS_POINTERS_BOOL
Definition globals.h:99
#define HAS_SMI_TAG(value)
Definition globals.h:1771
#define USE_ALLOCATION_ALIGNMENT_BOOL
Definition globals.h:1562
#define V8_ENABLE_SANDBOX_BOOL
Definition globals.h:160
#define ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value)
Definition isolate.h:276
#define MAYBE_RETURN_NULL(call)
Definition isolate.h:413
Isolate * isolate
int32_t offset
TNode< Object > receiver
std::map< const std::string, const std::string > map
ZoneVector< RpoNumber > & result
int x
constexpr uint64_t double_to_uint64(double d)
Definition double.h:17
V8_INLINE Dest bit_cast(Source const &source)
Definition macros.h:95
constexpr int kMinInt
Definition globals.h:375
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
V8_INLINE void WriteExternalPointerField(Address field_address, IsolateForSandbox isolate, Address value)
V8_INLINE Address ReadCppHeapPointerField(Address field_address, IsolateForPointerCompression isolate)
V8_INLINE Address ReadCodeEntrypointViaCodePointerField(Address field_address, CodeEntrypointTag tag)
bool TryCast(Tagged< From > value, Tagged< To > *out)
Definition casting.h:77
constexpr int kTaggedSize
Definition globals.h:542
constexpr double kMaxSafeInteger
Definition globals.h:1985
V8_INLINE void WriteIndirectPointerField(Address field_address, Tagged< ExposedTrustedObject > value, ReleaseStoreTag)
bool IsNaN(Tagged< Object > obj)
bool IsPrimitiveMap(Tagged< Map > map)
Definition map-inl.h:754
@ SKIP_WRITE_BARRIER
Definition objects.h:52
@ UPDATE_WRITE_BARRIER
Definition objects.h:55
uint32_t ComputeLongHash(uint64_t key)
Definition utils.h:282
static DirectHandle< Object > MakeEntryPair(Isolate *isolate, size_t index, DirectHandle< Object > value)
bool DoubleToUint32IfEqualToSelf(double value, uint32_t *uint32_value)
bool IsNumeric(Tagged< Object > obj)
SlotTraits::TObjectSlot ObjectSlot
Definition globals.h:1243
constexpr intptr_t kObjectAlignment
Definition globals.h:930
bool Is(IndirectHandle< U > value)
Definition handles-inl.h:51
bool IsNumber(Tagged< Object > obj)
ReadOnlyRoots GetReadOnlyRoots()
Definition roots-inl.h:86
bool IsAnyHole(Tagged< Object > obj, PtrComprCageBase cage_base)
const intptr_t kForwardingTagMask
Definition v8-internal.h:83
bool IsSpecialReceiverInstanceType(InstanceType instance_type)
V8_INLINE Tagged< Object > ReadIndirectPointerField(Address field_address, IsolateForSandbox isolate, AcquireLoadTag)
V8_INLINE size_t ReadBoundedSizeField(Address field_address)
bool IsCustomElementsReceiverMap(Tagged< Map > map)
Tagged(T object) -> Tagged< T >
static void WriteMaybeUnalignedValue(Address p, V value)
Definition ptr-compr.h:225
double DoubleToInteger(double x)
uint32_t ComputeUnseededHash(uint32_t key)
Definition utils.h:271
V8_INLINE Address ReadSandboxedPointerField(Address field_address, PtrComprCageBase cage_base)
V8_INLINE void WriteLazilyInitializedCppHeapPointerField(Address field_address, IsolateForPointerCompression isolate, Address value)
V8_INLINE void WriteBoundedSizeField(Address field_address, size_t value)
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:665
bool IsSpecialReceiverMap(Tagged< Map > map)
bool IsPublicSymbol(Tagged< Object > obj)
kInterpreterTrampolineOffset Tagged< HeapObject >
Address Tagged_t
Definition globals.h:547
SlotTraits::TInstructionStreamSlot InstructionStreamSlot
Definition globals.h:1265
V8_INLINE Address ReadExternalPointerField(Address field_address, IsolateForSandbox isolate)
double FastI2D(int x)
bool IsCustomElementsReceiverInstanceType(InstanceType instance_type)
Union< Smi, HeapNumber > Number
Definition globals.h:1181
bool IsNullOrUndefined(Tagged< Object > obj, Isolate *isolate)
Handle< To > UncheckedCast(Handle< From > value)
Definition handles-inl.h:55
uint32_t IndirectPointerHandle
V8_INLINE bool OutsideSandboxOrInReadonlySpace(Tagged< HeapObject > obj)
V8_INLINE PtrComprCageBase GetPtrComprCageBase()
constexpr ExternalPointerHandle kNullExternalPointerHandle
bool IsPrimitive(Tagged< Object > obj)
const int kForwardingTag
Definition v8-internal.h:81
bool IsShared(Tagged< Object > obj)
bool IsJSObjectThatCanBeTrackedAsPrototype(Tagged< Object > obj)
Definition objects-inl.h:96
constexpr int kInt32Size
Definition globals.h:401
@ UNCACHED_EXTERNAL_INTERNALIZED_ONE_BYTE_STRING_TYPE
@ SHARED_SEQ_ONE_BYTE_STRING_TYPE
@ SHARED_SEQ_TWO_BYTE_STRING_TYPE
@ SHARED_EXTERNAL_TWO_BYTE_STRING_TYPE
@ UNCACHED_EXTERNAL_INTERNALIZED_TWO_BYTE_STRING_TYPE
@ LAST_CUSTOM_ELEMENTS_RECEIVER
@ SHARED_UNCACHED_EXTERNAL_TWO_BYTE_STRING_TYPE
@ SHARED_UNCACHED_EXTERNAL_ONE_BYTE_STRING_TYPE
@ EXTERNAL_INTERNALIZED_TWO_BYTE_STRING_TYPE
@ SHARED_EXTERNAL_ONE_BYTE_STRING_TYPE
@ EXTERNAL_INTERNALIZED_ONE_BYTE_STRING_TYPE
@ INTERNALIZED_ONE_BYTE_STRING_TYPE
@ INTERNALIZED_TWO_BYTE_STRING_TYPE
DONT_OVERRIDE DISABLE_ALLOCATION_SITES DISABLE_ALLOCATION_SITES HOLEY_DOUBLE_ELEMENTS
const int kHeapObjectTag
Definition v8-internal.h:72
V8_INLINE void InitSelfIndirectPointerField(Address field_address, IsolateForSandbox isolate, Tagged< HeapObject > host, IndirectPointerTag tag, TrustedPointerPublishingScope *opt_publishing_scope)
int FastD2I(double x)
@ kExternalPointerNullTag
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:669
V8_EXPORT_PRIVATE FlagValues v8_flags
V8_INLINE void InitExternalPointerField(Address host_address, Address field_address, IsolateForSandbox isolate, Address value)
bool IsUniqueName(Tagged< Name > obj)
constexpr IndirectPointerHandle kNullIndirectPointerHandle
NameDictionary PropertyDictionary
Definition dictionary.h:26
uint32_t ExternalPointerHandle
static uint32_t ObjectAddressForHashing(Address object)
V8_INLINE void WriteSandboxedPointerField(Address field_address, PtrComprCageBase cage_base, Address pointer)
return value
Definition map-inl.h:893
static bool IsMinusZero(double value)
bool IsJSApiWrapperObject(Tagged< Map > map)
bool IsTaggedIndex(Tagged< Object > obj)
Definition objects-inl.h:91
bool IsPrivateSymbol(Tagged< Object > obj)
static V ReadMaybeUnalignedValue(Address p)
Definition ptr-compr.h:207
bool IsNoSharedNameSentinel(Tagged< Object > obj)
static constexpr Address kNullAddress
Definition v8-internal.h:53
constexpr int kMaxInt
Definition globals.h:374
SlotTraits::TMaybeObjectSlot MaybeObjectSlot
Definition globals.h:1248
constexpr intptr_t kDoubleAlignmentMask
Definition globals.h:950
V8_INLINE bool InsideSandbox(uintptr_t address)
Definition sandbox.h:334
constexpr uint32_t kMaxUInt32
Definition globals.h:387
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset IsNull(value)||IsJSProxy(value)||IsWasmObject(value)||(IsJSObject(value) &&(HeapLayout
Definition map-inl.h:70
V8_INLINE void WriteCodeEntrypointViaCodePointerField(Address field_address, Address value, CodeEntrypointTag tag)
V8_INLINE IndirectPointerTag IndirectPointerTagFromInstanceType(InstanceType instance_type)
V8_INLINE constexpr bool FastInReadOnlySpaceOrSmallSmi(Tagged_t obj)
static bool IsZero(const Operand &rt)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
static constexpr ReleaseStoreTag kReleaseStore
Definition globals.h:2910
Maybe< T > Nothing()
Definition v8-maybe.h:112
static constexpr RelaxedLoadTag kRelaxedLoad
Definition globals.h:2909
static constexpr RelaxedStoreTag kRelaxedStore
Definition globals.h:2911
CppHeapPointerTag
Definition v8-sandbox.h:28
static constexpr AcquireLoadTag kAcquireLoad
Definition globals.h:2908
Maybe< T > Just(const T &t)
Definition v8-maybe.h:117
#define HEAP_OBJECT_TYPE_LIST(V)
#define HEAP_OBJECT_TRUSTED_TYPE_LIST(V)
#define HOLE_LIST(V)
#define VIRTUAL_OBJECT_TYPE_LIST(V)
#define ODDBALL_LIST(V)
#define HEAP_OBJECT_ORDINARY_TYPE_LIST(V)
#define CONDITIONAL_JS_DISPATCH_HANDLE_WRITE_BARRIER(object, handle, mode)
#define DEF_ACQUIRE_GETTER(holder, name,...)
#define EXTERNAL_POINTER_WRITE_BARRIER(object, offset, tag)
#define DEF_HEAP_OBJECT_PREDICATE(holder, name)
#define ACQUIRE_READ_UINT32_FIELD(p, offset)
#define CONDITIONAL_EXTERNAL_POINTER_WRITE_BARRIER(object, offset, tag, mode)
#define RELEASE_WRITE_UINT32_FIELD(p, offset, value)
#define STRUCT_LIST(V)
#define IS_HELPER_DEF_STRUCT(NAME, Name, name)
#define IS_TYPE_FUNCTION_DEF(type_)
#define MAKE_STRUCT_PREDICATE(NAME, Name, name)
#define IS_HELPER_DEF(Type,...)
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
constexpr bool IsAligned(T value, U alignment)
Definition macros.h:403
static bool AllowFrom(Tagged< HeapObject > value)
static bool AllowFrom(Tagged< Object > value)
static bool AllowFrom(Tagged< Object > value)
static bool AllowFrom(Tagged< HeapObject > value)
static bool AllowFrom(Tagged< HeapObject > value)
static bool AllowFrom(Tagged< Object > value)
static bool AllowFrom(Tagged< HeapObject > value)
static bool AllowFrom(Tagged< Object > value)