v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
objects-debug.cc
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/base/logging.h"
8#include "src/date/date.h"
20#include "src/objects/bigint.h"
41#include "src/objects/objects.h"
45#include "src/roots/roots.h"
46#ifdef V8_INTL_SUPPORT
49#endif // V8_INTL_SUPPORT
51#ifdef V8_INTL_SUPPORT
55#endif // V8_INTL_SUPPORT
59#ifdef V8_INTL_SUPPORT
64#endif // V8_INTL_SUPPORT
68#ifdef V8_INTL_SUPPORT
73#endif // V8_INTL_SUPPORT
94#include "src/regexp/regexp.h"
96#include "src/utils/ostreams.h"
97#include "torque-generated/class-verifiers.h"
98
99#if V8_ENABLE_WEBASSEMBLY
100#include "src/base/strings.h"
103#endif // V8_ENABLE_WEBASSEMBLY
104
105namespace v8 {
106namespace internal {
107
108// Heap Verification Overview
109// --------------------------
110// - Each InstanceType has a separate XXXVerify method which checks an object's
111// integrity in isolation.
112// - --verify-heap will iterate over all gc spaces and call ObjectVerify() on
113// every encountered tagged pointer.
114// - Verification should be pushed down to the specific instance type if its
115// integrity is independent of an outer object.
116// - In cases where the InstanceType is too generic (e.g. FixedArray) the
117// XXXVerify of the outer method has to do recursive verification.
118// - If the corresponding objects have inheritance the parent's Verify method
119// is called as well.
120// - For any field containing pointers VerifyPointer(...) should be called.
121//
122// Caveats
123// -------
124// - Assume that any of the verify methods is incomplete!
125// - Some integrity checks are only partially done due to objects being in
126// partially initialized states when a gc happens, for instance when outer
127// objects are allocated before inner ones.
128//
129
130#ifdef VERIFY_HEAP
131
132#define USE_TORQUE_VERIFIER(Class) \
133 void Class::Class##Verify(Isolate* isolate) { \
134 TorqueGeneratedClassVerifiers::Class##Verify(*this, isolate); \
135 }
136
137// static
138void Object::ObjectVerify(Tagged<Object> obj, Isolate* isolate) {
139 RCS_SCOPE(isolate, RuntimeCallCounterId::kObjectVerify);
140 if (IsSmi(obj)) {
141 Smi::SmiVerify(Cast<Smi>(obj), isolate);
142 } else {
143 Cast<HeapObject>(obj)->HeapObjectVerify(isolate);
144 }
145 PtrComprCageBase cage_base(isolate);
146 CHECK(!IsConstructor(obj, cage_base) || IsCallable(obj, cage_base));
147}
148
149void Object::VerifyPointer(Isolate* isolate, Tagged<Object> p) {
150 if (IsHeapObject(p)) {
151 HeapObject::VerifyHeapPointer(isolate, p);
152 } else {
153 CHECK(IsSmi(p));
154 }
155}
156
157void Object::VerifyAnyTagged(Isolate* isolate, Tagged<Object> p) {
158 if (IsHeapObject(p)) {
160 CHECK(IsValidHeapObject(isolate->heap(), Cast<HeapObject>(p)));
161 } else {
162 HeapObject::VerifyHeapPointer(isolate, p);
163 }
164 } else {
165 CHECK(IsSmi(p));
166 }
167}
168
169void Object::VerifyMaybeObjectPointer(Isolate* isolate, Tagged<MaybeObject> p) {
170 Tagged<HeapObject> heap_object;
171 if (p.GetHeapObject(&heap_object)) {
172 HeapObject::VerifyHeapPointer(isolate, heap_object);
173 } else {
174 CHECK(p.IsSmi() || p.IsCleared() || MapWord::IsPacked(p.ptr()));
175 }
176}
177
178// static
179void Smi::SmiVerify(Tagged<Smi> obj, Isolate* isolate) {
181 CHECK(!IsCallable(obj));
182 CHECK(!IsConstructor(obj));
183}
184
185// static
186void TaggedIndex::TaggedIndexVerify(Tagged<TaggedIndex> obj, Isolate* isolate) {
187 CHECK(IsTaggedIndex(obj));
188}
189
190void HeapObject::HeapObjectVerify(Isolate* isolate) {
191 CHECK(IsHeapObject(*this));
192 PtrComprCageBase cage_base(isolate);
193 Object::VerifyPointer(isolate, map(cage_base));
194 CHECK(IsMap(map(cage_base), cage_base));
195
197
198 // Only TrustedObjects live in trusted space. See also TrustedObjectVerify.
199 CHECK_IMPLIES(!IsTrustedObject(*this) && !IsFreeSpaceOrFiller(*this),
201
202 switch (map(cage_base)->instance_type()) {
203#define STRING_TYPE_CASE(TYPE, size, name, CamelName) case TYPE:
204 STRING_TYPE_LIST(STRING_TYPE_CASE)
205#undef STRING_TYPE_CASE
206 if (IsConsString(*this, cage_base)) {
207 Cast<ConsString>(*this)->ConsStringVerify(isolate);
208 } else if (IsSlicedString(*this, cage_base)) {
209 Cast<SlicedString>(*this)->SlicedStringVerify(isolate);
210 } else if (IsThinString(*this, cage_base)) {
211 Cast<ThinString>(*this)->ThinStringVerify(isolate);
212 } else if (IsSeqString(*this, cage_base)) {
213 Cast<SeqString>(*this)->SeqStringVerify(isolate);
214 } else if (IsExternalString(*this, cage_base)) {
215 Cast<ExternalString>(*this)->ExternalStringVerify(isolate);
216 } else {
217 Cast<String>(*this)->StringVerify(isolate);
218 }
219 break;
220 // FixedArray types
221 case HASH_TABLE_TYPE:
222 case ORDERED_HASH_MAP_TYPE:
223 case ORDERED_HASH_SET_TYPE:
224 case ORDERED_NAME_DICTIONARY_TYPE:
225 case NAME_TO_INDEX_HASH_TABLE_TYPE:
226 case REGISTERED_SYMBOL_TABLE_TYPE:
227 case NAME_DICTIONARY_TYPE:
228 case GLOBAL_DICTIONARY_TYPE:
229 case NUMBER_DICTIONARY_TYPE:
230 case SIMPLE_NUMBER_DICTIONARY_TYPE:
231 case EPHEMERON_HASH_TABLE_TYPE:
232 Cast<FixedArray>(*this)->FixedArrayVerify(isolate);
233 break;
234 case AWAIT_CONTEXT_TYPE:
235 case BLOCK_CONTEXT_TYPE:
236 case CATCH_CONTEXT_TYPE:
237 case DEBUG_EVALUATE_CONTEXT_TYPE:
238 case EVAL_CONTEXT_TYPE:
239 case FUNCTION_CONTEXT_TYPE:
240 case MODULE_CONTEXT_TYPE:
241 case SCRIPT_CONTEXT_TYPE:
242 case WITH_CONTEXT_TYPE:
243 Cast<Context>(*this)->ContextVerify(isolate);
244 break;
245 case NATIVE_CONTEXT_TYPE:
246 Cast<NativeContext>(*this)->NativeContextVerify(isolate);
247 break;
248 case FEEDBACK_METADATA_TYPE:
249 Cast<FeedbackMetadata>(*this)->FeedbackMetadataVerify(isolate);
250 break;
251 case TRANSITION_ARRAY_TYPE:
252 Cast<TransitionArray>(*this)->TransitionArrayVerify(isolate);
253 break;
254
255 case INSTRUCTION_STREAM_TYPE:
256 Cast<InstructionStream>(*this)->InstructionStreamVerify(isolate);
257 break;
258 case JS_API_OBJECT_TYPE:
259 case JS_ARRAY_ITERATOR_PROTOTYPE_TYPE:
260 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
261 case JS_ERROR_TYPE:
262 case JS_ITERATOR_PROTOTYPE_TYPE:
263 case JS_MAP_ITERATOR_PROTOTYPE_TYPE:
264 case JS_OBJECT_PROTOTYPE_TYPE:
265 case JS_PROMISE_PROTOTYPE_TYPE:
266 case JS_REG_EXP_PROTOTYPE_TYPE:
267 case JS_SET_ITERATOR_PROTOTYPE_TYPE:
268 case JS_SET_PROTOTYPE_TYPE:
269 case JS_SPECIAL_API_OBJECT_TYPE:
270 case JS_STRING_ITERATOR_PROTOTYPE_TYPE:
271 case JS_TYPED_ARRAY_PROTOTYPE_TYPE:
272 Cast<JSObject>(*this)->JSObjectVerify(isolate);
273 break;
274#if V8_ENABLE_WEBASSEMBLY
275 case WASM_TRUSTED_INSTANCE_DATA_TYPE:
276 Cast<WasmTrustedInstanceData>(*this)->WasmTrustedInstanceDataVerify(
277 isolate);
278 break;
279 case WASM_DISPATCH_TABLE_TYPE:
280 Cast<WasmDispatchTable>(*this)->WasmDispatchTableVerify(isolate);
281 break;
282 case WASM_VALUE_OBJECT_TYPE:
283 Cast<WasmValueObject>(*this)->WasmValueObjectVerify(isolate);
284 break;
285 case WASM_EXCEPTION_PACKAGE_TYPE:
286 Cast<WasmExceptionPackage>(*this)->WasmExceptionPackageVerify(isolate);
287 break;
288#endif // V8_ENABLE_WEBASSEMBLY
289 case JS_SET_KEY_VALUE_ITERATOR_TYPE:
290 case JS_SET_VALUE_ITERATOR_TYPE:
291 Cast<JSSetIterator>(*this)->JSSetIteratorVerify(isolate);
292 break;
293 case JS_MAP_KEY_ITERATOR_TYPE:
294 case JS_MAP_KEY_VALUE_ITERATOR_TYPE:
295 case JS_MAP_VALUE_ITERATOR_TYPE:
296 Cast<JSMapIterator>(*this)->JSMapIteratorVerify(isolate);
297 break;
298 case FILLER_TYPE:
299 break;
300 case CODE_TYPE:
301 Cast<Code>(*this)->CodeVerify(isolate);
302 break;
303 case CODE_WRAPPER_TYPE:
304 Cast<CodeWrapper>(*this)->CodeWrapperVerify(isolate);
305 break;
306
307#define MAKE_TORQUE_CASE(Name, TYPE) \
308 case TYPE: \
309 Cast<Name>(*this)->Name##Verify(isolate); \
310 break;
311 // Every class that has its fields defined in a .tq file and corresponds
312 // to exactly one InstanceType value is included in the following list.
313 TORQUE_INSTANCE_CHECKERS_SINGLE_FULLY_DEFINED(MAKE_TORQUE_CASE)
314 TORQUE_INSTANCE_CHECKERS_MULTIPLE_FULLY_DEFINED(MAKE_TORQUE_CASE)
315#undef MAKE_TORQUE_CASE
316
317 case TUPLE2_TYPE:
318 Cast<Tuple2>(*this)->Tuple2Verify(isolate);
319 break;
320
321 case CLASS_POSITIONS_TYPE:
322 Cast<ClassPositions>(*this)->ClassPositionsVerify(isolate);
323 break;
324
325 case ACCESSOR_PAIR_TYPE:
326 Cast<AccessorPair>(*this)->AccessorPairVerify(isolate);
327 break;
328
329 case ALLOCATION_SITE_TYPE:
330 Cast<AllocationSite>(*this)->AllocationSiteVerify(isolate);
331 break;
332
333 case LOAD_HANDLER_TYPE:
334 Cast<LoadHandler>(*this)->LoadHandlerVerify(isolate);
335 break;
336
337 case STORE_HANDLER_TYPE:
338 Cast<StoreHandler>(*this)->StoreHandlerVerify(isolate);
339 break;
340
341 case BIG_INT_BASE_TYPE:
342 Cast<BigIntBase>(*this)->BigIntBaseVerify(isolate);
343 break;
344
345 case JS_CLASS_CONSTRUCTOR_TYPE:
346 case JS_PROMISE_CONSTRUCTOR_TYPE:
347 case JS_REG_EXP_CONSTRUCTOR_TYPE:
348 case JS_ARRAY_CONSTRUCTOR_TYPE:
349#define TYPED_ARRAY_CONSTRUCTORS_SWITCH(Type, type, TYPE, Ctype) \
350 case TYPE##_TYPED_ARRAY_CONSTRUCTOR_TYPE:
352#undef TYPED_ARRAY_CONSTRUCTORS_SWITCH
353 Cast<JSFunction>(*this)->JSFunctionVerify(isolate);
354 break;
355 case JS_LAST_DUMMY_API_OBJECT_TYPE:
356 UNREACHABLE();
357 }
358}
359
360// static
361void HeapObject::VerifyHeapPointer(Isolate* isolate, Tagged<Object> p) {
363 // If you crashed here and {isolate->is_shared()}, there is a bug causing the
364 // host of {p} to point to a non-shared object.
365 CHECK(IsValidHeapObject(isolate->heap(), Cast<HeapObject>(p)));
366 CHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsInstructionStream(p));
367}
368
369// static
370void HeapObject::VerifyCodePointer(Isolate* isolate, Tagged<Object> p) {
372 CHECK(IsValidCodeObject(isolate->heap(), Cast<HeapObject>(p)));
373 PtrComprCageBase cage_base(isolate);
374 CHECK(IsInstructionStream(Cast<HeapObject>(p), cage_base));
375}
376
377void Name::NameVerify(Isolate* isolate) {
378 PrimitiveHeapObjectVerify(isolate);
379 CHECK(IsName(this));
380}
381
382void Symbol::SymbolVerify(Isolate* isolate) {
383 NameVerify(isolate);
384 CHECK(IsSymbol(this));
385 uint32_t hash;
386 const bool has_hash = TryGetHash(&hash);
387 CHECK(has_hash);
388 CHECK_GT(hash, 0);
389 CHECK(IsUndefined(description(), isolate) || IsString(description()));
392}
393
394void BytecodeArray::BytecodeArrayVerify(Isolate* isolate) {
395 ExposedTrustedObjectVerify(isolate);
396
397 {
398 CHECK(IsSmi(TaggedField<Object>::load(*this, kLengthOffset)));
399 CHECK_LE(0, length());
401 }
402 {
403 auto o = constant_pool();
404 Object::VerifyPointer(isolate, o);
405 CHECK(IsTrustedFixedArray(o));
406 }
407 {
408 auto o = handler_table();
409 Object::VerifyPointer(isolate, o);
410 CHECK(IsTrustedByteArray(o));
411 }
412 {
413 auto o = wrapper();
414 Object::VerifyPointer(isolate, o);
415 CHECK(IsBytecodeWrapper(o));
416 // Our wrapper must point back to us.
417 CHECK_EQ(o->bytecode(isolate), *this);
418 }
419 {
420 // Use the raw accessor here as source positions may not be available.
421 auto o = raw_source_position_table(kAcquireLoad);
422 Object::VerifyPointer(isolate, o);
423 CHECK(o == Smi::zero() || IsTrustedByteArray(o));
424 }
425
426 // TODO(oth): Walk bytecodes and immediate values to validate sanity.
427 // - All bytecodes are known and well formed.
428 // - Jumps must go to new instructions starts.
429 // - No Illegal bytecodes.
430 // - No consecutive sequences of prefix Wide / ExtraWide.
431 // - String constants for loads should be internalized strings.
432}
433
434void BytecodeWrapper::BytecodeWrapperVerify(Isolate* isolate) {
435 if (!this->has_bytecode()) return;
436 auto bytecode = this->bytecode(isolate);
437 Object::VerifyPointer(isolate, bytecode);
438 CHECK_EQ(bytecode->wrapper(), *this);
439}
440
441bool JSObject::ElementsAreSafeToExamine(PtrComprCageBase cage_base) const {
442 // If a GC was caused while constructing this object, the elements
443 // pointer may point to a one pointer filler map.
444 return elements(cage_base) != GetReadOnlyRoots().one_pointer_filler_map();
445}
446
447namespace {
448
449void VerifyJSObjectElements(Isolate* isolate, Tagged<JSObject> object) {
450 // Only TypedArrays can have these specialized elements.
451 if (IsJSTypedArray(object)) {
452 // TODO(bmeurer,v8:4153): Fix CreateTypedArray to either not instantiate
453 // the object or properly initialize it on errors during construction.
454 /* CHECK(object->HasTypedArrayOrRabGsabTypedArrayElements()); */
455 return;
456 }
457 CHECK(!IsByteArray(object->elements()));
458
459 if (object->HasDoubleElements()) {
460 if (object->elements()->length() > 0) {
461 CHECK(IsFixedDoubleArray(object->elements()));
462 }
463 return;
464 }
465
466 if (object->HasSloppyArgumentsElements()) {
467 CHECK(IsSloppyArgumentsElements(object->elements()));
468 return;
469 }
470
471 Tagged<FixedArray> elements = Cast<FixedArray>(object->elements());
472 if (object->HasSmiElements()) {
473 // We might have a partially initialized backing store, in which case we
474 // allow the hole + smi values.
475 for (int i = 0; i < elements->length(); i++) {
476 Tagged<Object> value = elements->get(i);
477 CHECK(IsSmi(value) || IsTheHole(value, isolate));
478 }
479 } else if (object->HasObjectElements()) {
480 for (int i = 0; i < elements->length(); i++) {
481 Tagged<Object> element = elements->get(i);
482 CHECK(!HasWeakHeapObjectTag(element));
483 }
484 }
485}
486} // namespace
487
488void JSObject::JSObjectVerify(Isolate* isolate) {
489 TorqueGeneratedClassVerifiers::JSObjectVerify(*this, isolate);
490 VerifyHeapPointer(isolate, elements());
491
492 CHECK_IMPLIES(HasSloppyArgumentsElements(), IsJSArgumentsObject(*this));
493 if (HasFastProperties()) {
494 int actual_unused_property_fields = map()->GetInObjectProperties() +
495 property_array()->length() -
496 map()->NextFreePropertyIndex();
497 if (map()->UnusedPropertyFields() != actual_unused_property_fields) {
498 // There are two reasons why this can happen:
499 // - in the middle of StoreTransitionStub when the new extended backing
500 // store is already set into the object and the allocation of the
501 // HeapNumber triggers GC while the map isn't updated yet.
502 // - deletion of the last property can leave additional backing store
503 // capacity behind.
504 CHECK_GT(actual_unused_property_fields, map()->UnusedPropertyFields());
505 int delta = actual_unused_property_fields - map()->UnusedPropertyFields();
507 }
508 Tagged<DescriptorArray> descriptors = map()->instance_descriptors(isolate);
509 bool is_transitionable_fast_elements_kind =
510 IsTransitionableFastElementsKind(map()->elements_kind());
511
512 for (InternalIndex i : map()->IterateOwnDescriptors()) {
513 PropertyDetails details = descriptors->GetDetails(i);
514 if (details.location() == PropertyLocation::kField) {
515 CHECK_EQ(PropertyKind::kData, details.kind());
516 Representation r = details.representation();
517 FieldIndex index = FieldIndex::ForDetails(map(), details);
518 if (COMPRESS_POINTERS_BOOL && index.is_inobject()) {
519 VerifyObjectField(isolate, index.offset());
520 }
521 Tagged<Object> value = RawFastPropertyAt(index);
522 CHECK_IMPLIES(r.IsDouble(), IsHeapNumber(value));
523 if (IsUninitialized(value, isolate)) continue;
524 CHECK_IMPLIES(r.IsSmi(), IsSmi(value));
525 CHECK_IMPLIES(r.IsHeapObject(), IsHeapObject(value));
526 Tagged<FieldType> field_type = descriptors->GetFieldType(i);
527 bool type_is_none = IsNone(field_type);
528 bool type_is_any = IsAny(field_type);
529 if (r.IsNone()) {
530 CHECK(type_is_none);
531 } else if (r.IsHeapObject()) {
532 CHECK(!type_is_none);
533 if (!type_is_any) {
535 map()->is_deprecated() ||
536 FieldType::NowContains(field_type, value));
537 }
538 } else {
539 CHECK(type_is_any);
540 }
541 CHECK_IMPLIES(is_transitionable_fast_elements_kind,
542 Map::IsMostGeneralFieldType(r, field_type));
543 }
544 }
545
546 if (map()->EnumLength() != kInvalidEnumCacheSentinel) {
547 Tagged<EnumCache> enum_cache = descriptors->enum_cache();
548 Tagged<FixedArray> keys = enum_cache->keys();
549 Tagged<FixedArray> indices = enum_cache->indices();
550 CHECK_LE(map()->EnumLength(), keys->length());
551 CHECK_IMPLIES(indices != ReadOnlyRoots(isolate).empty_fixed_array(),
552 keys->length() == indices->length());
553 }
554 }
555
556 // If a GC was caused while constructing this object, the elements
557 // pointer may point to a one pointer filler map.
558 if (ElementsAreSafeToExamine(isolate)) {
559 CHECK_EQ((map()->has_fast_smi_or_object_elements() ||
560 map()->has_any_nonextensible_elements() ||
561 (elements() == GetReadOnlyRoots().empty_fixed_array()) ||
562 HasFastStringWrapperElements()),
563 (elements()->map() == GetReadOnlyRoots().fixed_array_map() ||
564 elements()->map() == GetReadOnlyRoots().fixed_cow_array_map()));
565 CHECK_EQ(map()->has_fast_object_elements(), HasObjectElements());
566 VerifyJSObjectElements(isolate, *this);
567 }
568}
569
570void Map::MapVerify(Isolate* isolate) {
571 TorqueGeneratedClassVerifiers::MapVerify(*this, isolate);
572 Heap* heap = isolate->heap();
573 CHECK(!HeapLayout::InYoungGeneration(Tagged<Map>(*this)));
574 CHECK(FIRST_TYPE <= instance_type() && instance_type() <= LAST_TYPE);
575 CHECK(instance_size() == kVariableSizeSentinel ||
576 (kTaggedSize <= instance_size() &&
577 static_cast<size_t>(instance_size()) < heap->Capacity()));
578#if V8_ENABLE_WEBASSEMBLY
579 bool is_wasm_struct = InstanceTypeChecker::IsWasmStruct(instance_type());
580#else
581 constexpr bool is_wasm_struct = false;
582#endif // V8_ENABLE_WEBASSEMBLY
583 if (IsContextMap(*this)) {
584 // The map for the NativeContext is allocated before the NativeContext
585 // itself, so it may happen that during a GC the native_context() is still
586 // null.
587 CHECK(IsNull(native_context_or_null()) ||
588 IsNativeContext(native_context_or_null()));
589 // The context's meta map is tied to the same native context.
590 CHECK_EQ(native_context_or_null(), map()->native_context_or_null());
591 } else {
592 if (IsUndefined(GetBackPointer(), isolate)) {
593 if (!is_wasm_struct) {
594 // Root maps must not have descriptors in the descriptor array that do
595 // not belong to the map.
598 }
599 } else {
600 // If there is a parent map it must be non-stable.
601 Tagged<Map> parent = Cast<Map>(GetBackPointer());
602 CHECK(!parent->is_stable());
603 Tagged<DescriptorArray> descriptors = instance_descriptors(isolate);
604 if (!is_deprecated() && !parent->is_deprecated()) {
606 parent->IsInobjectSlackTrackingInProgress());
607 }
608 if (descriptors == parent->instance_descriptors(isolate)) {
609 if (NumberOfOwnDescriptors() == parent->NumberOfOwnDescriptors() + 1) {
610 // Descriptors sharing through property transitions takes over
611 // ownership from the parent map.
612 CHECK(!parent->owns_descriptors());
613 } else {
614 CHECK_EQ(NumberOfOwnDescriptors(), parent->NumberOfOwnDescriptors());
615 // Descriptors sharing through special transitions properly takes over
616 // ownership from the parent map unless it uses the canonical empty
617 // descriptor array.
618 if (descriptors != ReadOnlyRoots(isolate).empty_descriptor_array()) {
619 CHECK_IMPLIES(owns_descriptors(), !parent->owns_descriptors());
620 CHECK_IMPLIES(parent->owns_descriptors(), !owns_descriptors());
621 }
622 }
623 }
624 }
625 }
626 if (!is_wasm_struct) {
627 SLOW_DCHECK(instance_descriptors(isolate)->IsSortedNoDuplicates());
628 }
629 SLOW_DCHECK(TransitionsAccessor(isolate, *this).IsSortedNoDuplicates());
631 TransitionsAccessor(isolate, *this).IsConsistentWithBackPointers());
632 // Only JSFunction maps have has_prototype_slot() bit set and constructible
633 // JSFunction objects must have prototype slot.
634 CHECK_IMPLIES(has_prototype_slot(), IsJSFunctionMap(*this));
635
637 // Native context-specific objects must have their own contextful meta map
638 // modulo the following exceptions.
639 if (instance_type() == NATIVE_CONTEXT_TYPE ||
640 instance_type() == JS_GLOBAL_PROXY_TYPE) {
641 // 1) Diring creation of the NativeContext the native context field might
642 // be not be initialized yet.
643 // 2) The same applies to the placeholder JSGlobalProxy object created by
644 // Factory::NewUninitializedJSGlobalProxy.
645 CHECK(IsNull(map()->native_context_or_null()) ||
646 IsNativeContext(map()->native_context_or_null()));
647
648 } else if (instance_type() == JS_SPECIAL_API_OBJECT_TYPE) {
649 // 3) Remote Api objects' maps have the RO meta map (and thus are not
650 // tied to any native context) while all the other Api objects are
651 // tied to a native context.
652 CHECK_IMPLIES(map() != GetReadOnlyRoots().meta_map(),
653 IsNativeContext(map()->native_context_or_null()));
654
655 } else {
656 // For all the other objects native context specific objects the
657 // native context field must already be initialized.
658 CHECK(IsNativeContext(map()->native_context_or_null()));
659 }
660 } else if (InstanceTypeChecker::IsAlwaysSharedSpaceJSObject(
661 instance_type())) {
662 // Shared objects' maps must use the RO meta map.
663 CHECK_EQ(map(), GetReadOnlyRoots().meta_map());
664 }
665
666 if (IsJSObjectMap(*this)) {
667 int header_end_offset = JSObject::GetHeaderSize(*this);
668 int inobject_fields_start_offset = GetInObjectPropertyOffset(0);
669 // Ensure that embedder fields are located exactly between header and
670 // inobject properties.
671 CHECK_EQ(header_end_offset, JSObject::GetEmbedderFieldsStartOffset(*this));
672 CHECK_EQ(header_end_offset +
674 inobject_fields_start_offset);
675
676 if (IsJSSharedStructMap(*this) || IsJSSharedArrayMap(*this) ||
677 IsJSAtomicsMutex(*this) || IsJSAtomicsCondition(*this)) {
679 // TODO(v8:14089): Verify what should be checked in this configuration
680 // and again merge with the else-branch below.
681 // CHECK(InSharedHeap());
682 CHECK(IsUndefined(GetBackPointer(), isolate));
683 // Object maybe_cell = prototype_validity_cell(kRelaxedLoad);
684 // if (maybe_cell.IsCell()) CHECK(maybe_cell.InSharedHeap());
688 // CHECK(instance_descriptors(isolate).InSharedHeap());
689 if (IsJSSharedArrayMap(*this)) {
691 }
692 } else {
694 CHECK(IsUndefined(GetBackPointer(), isolate));
695 Tagged<Object> maybe_cell = prototype_validity_cell(kRelaxedLoad);
696 if (IsCell(maybe_cell))
702 if (IsJSSharedArrayMap(*this)) {
704 }
705 }
706 }
707
708 // Check constructor value in JSFunction's maps.
709 if (IsJSFunctionMap(*this) && !IsMap(constructor_or_back_pointer())) {
710 Tagged<Object> maybe_constructor = constructor_or_back_pointer();
711 // Constructor field might still contain a tuple if this map used to
712 // have non-instance prototype earlier.
713 CHECK_IMPLIES(has_non_instance_prototype(), IsTuple2(maybe_constructor));
714 if (IsTuple2(maybe_constructor)) {
715 Tagged<Tuple2> tuple = Cast<Tuple2>(maybe_constructor);
716 // Unwrap the {constructor, non-instance_prototype} pair.
717 maybe_constructor = tuple->value1();
718 CHECK(!IsJSReceiver(tuple->value2()));
719 }
720 CHECK(IsJSFunction(maybe_constructor) ||
721 IsFunctionTemplateInfo(maybe_constructor) ||
722 // The above check might fail until empty function setup is done.
723 IsUndefined(isolate->raw_native_context()->get(
724 Context::EMPTY_FUNCTION_INDEX)));
725 }
726 }
727
728 if (!may_have_interesting_properties()) {
730 CHECK(!is_dictionary_map());
732 Tagged<DescriptorArray> const descriptors = instance_descriptors(isolate);
733 for (InternalIndex i : IterateOwnDescriptors()) {
734 CHECK(!descriptors->GetKey(i)->IsInteresting(isolate));
735 }
736 }
737 CHECK_IMPLIES(has_named_interceptor(), may_have_interesting_properties());
738 CHECK_IMPLIES(is_dictionary_map(), may_have_interesting_properties());
739 CHECK_IMPLIES(is_dictionary_map(), owns_descriptors());
740 CHECK_IMPLIES(is_access_check_needed(), may_have_interesting_properties());
742 IsJSObjectMap(*this) && !CanHaveFastTransitionableElementsKind(),
743 IsDictionaryElementsKind(elements_kind()) ||
744 IsTerminalElementsKind(elements_kind()) ||
745 IsAnyHoleyNonextensibleElementsKind(elements_kind()) ||
746 IsSharedArrayElementsKind(elements_kind()));
748 if (is_prototype_map()) {
749 CHECK(prototype_info() == Smi::zero() || IsPrototypeInfo(prototype_info()));
750 }
751}
752
753void Map::DictionaryMapVerify(Isolate* isolate) {
754 MapVerify(isolate);
755 CHECK(is_dictionary_map());
757 CHECK_EQ(ReadOnlyRoots(isolate).empty_descriptor_array(),
758 instance_descriptors(isolate));
760 CHECK_EQ(Map::GetVisitorId(*this), visitor_id());
761}
762
763void EmbedderDataArray::EmbedderDataArrayVerify(Isolate* isolate) {
764 TorqueGeneratedClassVerifiers::EmbedderDataArrayVerify(*this, isolate);
765 EmbedderDataSlot start(*this, 0);
766 EmbedderDataSlot end(*this, length());
767 for (EmbedderDataSlot slot = start; slot < end; ++slot) {
768 Tagged<Object> e = slot.load_tagged();
769 Object::VerifyPointer(isolate, e);
770 }
771}
772
773void FixedArrayBase::FixedArrayBaseVerify(Isolate* isolate) {
774 CHECK(IsSmi(length_.load()));
775}
776
777void FixedArray::FixedArrayVerify(Isolate* isolate) {
778 CHECK(IsSmi(length_.load()));
779
780 for (int i = 0; i < length(); ++i) {
781 Object::VerifyPointer(isolate, get(i));
782 }
783
784 if (this == ReadOnlyRoots(isolate).empty_fixed_array()) {
785 CHECK_EQ(length(), 0);
786 CHECK_EQ(map(), ReadOnlyRoots(isolate).fixed_array_map());
787 }
788}
789
790void TrustedFixedArray::TrustedFixedArrayVerify(Isolate* isolate) {
791 TrustedObjectVerify(isolate);
792 CHECK(IsSmi(length_.load()));
793
794 for (int i = 0; i < length(); ++i) {
795 Object::VerifyPointer(isolate, get(i));
796 }
797}
798
799void ProtectedFixedArray::ProtectedFixedArrayVerify(Isolate* isolate) {
800 TrustedObjectVerify(isolate);
801
802 CHECK(IsSmi(length_.load()));
803
804 for (int i = 0; i < length(); ++i) {
805 Tagged<Object> element = get(i);
806 CHECK(IsSmi(element) || IsTrustedObject(element));
807 Object::VerifyPointer(isolate, element);
808 }
809}
810
811void RegExpMatchInfo::RegExpMatchInfoVerify(Isolate* isolate) {
812 CHECK(IsSmi(length_.load()));
813 CHECK_GE(capacity(), kMinCapacity);
814 CHECK_LE(capacity(), kMaxCapacity);
817 CHECK(IsString(last_subject()));
818 Object::VerifyPointer(isolate, last_input());
819 for (int i = 0; i < capacity(); ++i) {
820 CHECK(IsSmi(get(i)));
821 }
822}
823
824void FeedbackCell::FeedbackCellVerify(Isolate* isolate) {
825 Tagged<Object> v = value();
826 Object::VerifyPointer(isolate, v);
827 CHECK(IsUndefined(v) || IsClosureFeedbackCellArray(v) || IsFeedbackVector(v));
828
829#ifdef V8_ENABLE_LEAPTIERING
830 JSDispatchHandle handle = dispatch_handle();
831 if (handle == kNullJSDispatchHandle) return;
832
833 JSDispatchTable* jdt = IsolateGroup::current()->js_dispatch_table();
834 Tagged<Code> code = jdt->GetCode(handle);
835 CodeKind kind = code->kind();
836 CHECK(kind == CodeKind::FOR_TESTING || kind == CodeKind::BUILTIN ||
837 kind == CodeKind::INTERPRETED_FUNCTION || kind == CodeKind::BASELINE ||
838 kind == CodeKind::MAGLEV || kind == CodeKind::TURBOFAN_JS);
839#endif
840}
841
842void ClosureFeedbackCellArray::ClosureFeedbackCellArrayVerify(
843 Isolate* isolate) {
844 CHECK(IsSmi(length_.load()));
845 for (int i = 0; i < length(); ++i) {
846 Object::VerifyPointer(isolate, get(i));
847 }
848}
849
850void WeakFixedArray::WeakFixedArrayVerify(Isolate* isolate) {
851 CHECK(IsSmi(length_.load()));
852 for (int i = 0; i < length(); i++) {
853 Object::VerifyMaybeObjectPointer(isolate, get(i));
854 }
855}
856
857void TrustedWeakFixedArray::TrustedWeakFixedArrayVerify(Isolate* isolate) {
858 CHECK(IsSmi(length_.load()));
859 for (int i = 0; i < length(); i++) {
860 Object::VerifyMaybeObjectPointer(isolate, get(i));
861 }
862}
863
864void ProtectedWeakFixedArray::ProtectedWeakFixedArrayVerify(Isolate* isolate) {
865 TrustedObjectVerify(isolate);
866 CHECK(IsSmi(length_.load()));
867 for (int i = 0; i < length(); i++) {
869 Tagged<HeapObject> heap_object;
870 if (p.GetHeapObject(&heap_object)) {
871 // We could relax this, but for now we assume that strong pointers in a
872 // weak fixed array are unintentional and should be reported.
873 CHECK(p.IsWeak());
874 CHECK(IsTrustedObject(heap_object));
875 HeapObject::VerifyHeapPointer(isolate, heap_object);
876 } else {
877 CHECK(p.IsSmi() || p.IsCleared());
878 }
879 }
880}
881
882void ScriptContextTable::ScriptContextTableVerify(Isolate* isolate) {
883 CHECK(IsSmi(capacity_.load()));
884 CHECK(IsSmi(length_.load()));
885 int len = length(kAcquireLoad);
886 CHECK_LE(0, len);
887 CHECK_LE(len, capacity());
888 CHECK(IsNameToIndexHashTable(names_to_context_index()));
889 for (int i = 0; i < len; ++i) {
890 Tagged<Context> o = get(i);
891 Object::VerifyPointer(isolate, o);
892 CHECK(IsContext(o));
893 CHECK(o->IsScriptContext());
894 }
895}
896
897void ArrayList::ArrayListVerify(Isolate* isolate) {
898 CHECK_LE(0, length());
899 CHECK_LE(length(), capacity());
900 CHECK_IMPLIES(capacity() == 0,
901 this == ReadOnlyRoots(isolate).empty_array_list());
902 for (int i = 0; i < capacity(); ++i) {
903 Object::VerifyPointer(isolate, get(i));
904 }
905}
906
907void PropertyArray::PropertyArrayVerify(Isolate* isolate) {
908 TorqueGeneratedClassVerifiers::PropertyArrayVerify(*this, isolate);
909 if (length() == 0) {
910 CHECK_EQ(*this, ReadOnlyRoots(isolate).empty_property_array());
911 return;
912 }
913 // There are no empty PropertyArrays.
914 CHECK_LT(0, length());
915 for (int i = 0; i < length(); i++) {
916 Tagged<Object> e = get(i);
917 Object::VerifyPointer(isolate, e);
918 }
919}
920
921void ByteArray::ByteArrayVerify(Isolate* isolate) {}
922
923void TrustedByteArray::TrustedByteArrayVerify(Isolate* isolate) {
924 TrustedObjectVerify(isolate);
925}
926
927void FixedDoubleArray::FixedDoubleArrayVerify(Isolate* isolate) {
928 for (int i = 0; i < length(); i++) {
929 if (!is_the_hole(i)) {
930 uint64_t value = get_representation(i);
931 uint64_t unexpected =
932 base::bit_cast<uint64_t>(std::numeric_limits<double>::quiet_NaN()) &
933 uint64_t{0x7FF8000000000000};
934 // Create implementation specific sNaN by inverting relevant bit.
935 unexpected ^= uint64_t{0x0008000000000000};
936 CHECK((value & uint64_t{0x7FF8000000000000}) != unexpected ||
937 (value & uint64_t{0x0007FFFFFFFFFFFF}) == uint64_t{0});
938 }
939 }
940}
941
942void Context::ContextVerify(Isolate* isolate) {
943 if (has_extension()) VerifyExtensionSlot(extension());
944 TorqueGeneratedClassVerifiers::ContextVerify(*this, isolate);
945 for (int i = 0; i < length(); i++) {
946 VerifyObjectField(isolate, OffsetOfElementAt(i));
947 }
948 if (IsScriptContext()) {
950 CHECK(IsFixedArray(side_data));
951 Tagged<FixedArray> side_data_array = Cast<FixedArray>(side_data);
952 // The array might not be empty if the script context is deserialized from
953 // snapshot. However, as long as the flags are enabled the feedback slots
954 // must be initialized properly.
955 if (v8_flags.script_context_mutable_heap_number ||
956 v8_flags.const_tracking_let) {
957 for (int i = 0; i < side_data_array->length(); i++) {
958 Tagged<Object> element = side_data_array->get(i);
959 if (IsSmi(element)) {
960 int value = element.ToSmi().value();
963 } else {
964 // The slot contains `undefined` before the variable is initialized.
965 CHECK(IsUndefined(element) || IsContextSidePropertyCell(element));
966 }
967 }
968 }
969 }
970}
971
972void NativeContext::NativeContextVerify(Isolate* isolate) {
973 ContextVerify(isolate);
974 CHECK(retained_maps() == Smi::zero() || IsWeakArrayList(retained_maps()));
976 CHECK_EQ(kVariableSizeSentinel, map()->instance_size());
977}
978
979void FeedbackMetadata::FeedbackMetadataVerify(Isolate* isolate) {
980 if (slot_count() == 0 && create_closure_slot_count() == 0) {
981 CHECK_EQ(ReadOnlyRoots(isolate).empty_feedback_metadata(), *this);
982 } else {
983 FeedbackMetadataIterator iter(*this);
984 while (iter.HasNext()) {
985 iter.Next();
986 FeedbackSlotKind kind = iter.kind();
989 }
990 }
991}
992
993void DescriptorArray::DescriptorArrayVerify(Isolate* isolate) {
994 TorqueGeneratedClassVerifiers::DescriptorArrayVerify(*this, isolate);
995 if (number_of_all_descriptors() == 0) {
996 CHECK_EQ(ReadOnlyRoots(isolate).empty_descriptor_array(), *this);
997 CHECK_EQ(0, number_of_all_descriptors());
999 CHECK_EQ(ReadOnlyRoots(isolate).empty_enum_cache(), enum_cache());
1000 } else {
1001 CHECK_LT(0, number_of_all_descriptors());
1002 CHECK_LE(number_of_descriptors(), number_of_all_descriptors());
1003
1004 // Check that properties with private symbols names are non-enumerable, and
1005 // that fields are in order.
1006 int expected_field_index = 0;
1007 for (InternalIndex descriptor :
1008 InternalIndex::Range(number_of_descriptors())) {
1010 *(GetDescriptorSlot(descriptor.as_int()) + kEntryKeyIndex);
1011 // number_of_descriptors() may be out of sync with the actual descriptors
1012 // written during descriptor array construction.
1013 if (IsUndefined(key, isolate)) continue;
1014 PropertyDetails details = GetDetails(descriptor);
1015 if (Cast<Name>(key)->IsPrivate()) {
1016 CHECK_NE(details.attributes() & DONT_ENUM, 0);
1017 }
1018 Tagged<MaybeObject> value = GetValue(descriptor);
1019 if (details.location() == PropertyLocation::kField) {
1020 CHECK_EQ(details.field_index(), expected_field_index);
1021 CHECK(value == FieldType::None() || value == FieldType::Any() ||
1022 IsMap(value.GetHeapObjectAssumeWeak()));
1023 expected_field_index += details.field_width_in_words();
1024 } else {
1025 CHECK(!value.IsWeakOrCleared());
1026 CHECK(!IsMap(Cast<Object>(value)));
1027 }
1028 }
1029 }
1030}
1031
1032void TransitionArray::TransitionArrayVerify(Isolate* isolate) {
1033 WeakFixedArrayVerify(isolate);
1035
1036 ReadOnlyRoots roots(isolate);
1037 Tagged<Map> owner;
1038
1039 // Check all entries have the same owner
1040 for (int i = 0; i < number_of_transitions(); ++i) {
1041 Tagged<Map> target = GetTarget(i);
1042 Tagged<Map> parent = Cast<Map>(target->constructor_or_back_pointer());
1043 if (owner.is_null()) {
1044 parent = owner;
1045 } else {
1046 CHECK_EQ(parent, owner);
1047 }
1048 }
1049 // Check all entries have the same owner
1052 int length = TransitionArray::NumberOfPrototypeTransitions(proto_trans);
1053 for (int i = 0; i < length; ++i) {
1055 Tagged<MaybeObject> maybe_target = proto_trans->get(index);
1057 if (maybe_target.GetHeapObjectIfWeak(&target)) {
1058 if (v8_flags.move_prototype_transitions_first) {
1059 Tagged<Map> parent =
1061 if (owner.is_null()) {
1062 parent = Cast<Map>(target);
1063 } else {
1064 CHECK_EQ(parent, owner);
1065 }
1066 } else {
1067 CHECK(IsUndefined(Cast<Map>(target)->GetBackPointer()));
1068 }
1069 }
1070 }
1071 }
1072 // Check all entries are valid
1073 if (HasSideStepTransitions()) {
1075 for (uint32_t index = SideStepTransition::kFirstMapIdx;
1077 Tagged<MaybeObject> maybe_target = side_trans->get(index);
1079 if (maybe_target.GetHeapObjectIfWeak(&target)) {
1080 CHECK(IsMap(target));
1081 if (!owner.is_null()) {
1082 CHECK_EQ(target->map(), owner->map());
1083 }
1084 } else {
1085 CHECK(maybe_target == SideStepTransition::Unreachable ||
1086 maybe_target == SideStepTransition::Empty ||
1087 maybe_target.IsCleared());
1088 }
1089 }
1090 Tagged<MaybeObject> maybe_cell =
1091 side_trans->get(SideStepTransition::index_of(
1093 Tagged<HeapObject> cell;
1094 if (maybe_cell.GetHeapObjectIfWeak(&cell)) {
1095 CHECK(IsCell(cell));
1096 } else {
1097 CHECK(maybe_cell == SideStepTransition::Empty || maybe_cell.IsCleared());
1098 }
1099 }
1100}
1101
1102namespace {
1103void SloppyArgumentsElementsVerify(Isolate* isolate,
1105 Tagged<JSObject> holder) {
1106 elements->SloppyArgumentsElementsVerify(isolate);
1107 ElementsKind kind = holder->GetElementsKind();
1108 bool is_fast = kind == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
1109 Tagged<Context> context_object = elements->context();
1110 Tagged<FixedArray> arg_elements = elements->arguments();
1111 if (arg_elements->length() == 0) {
1112 CHECK(arg_elements == ReadOnlyRoots(isolate).empty_fixed_array());
1113 return;
1114 }
1115 ElementsAccessor* accessor;
1116 if (is_fast) {
1118 } else {
1120 }
1121 int nofMappedParameters = 0;
1122 int maxMappedIndex = 0;
1123 for (int i = 0; i < nofMappedParameters; i++) {
1124 // Verify that each context-mapped argument is either the hole or a valid
1125 // Smi within context length range.
1126 Tagged<Object> mapped = elements->mapped_entries(i, kRelaxedLoad);
1127 if (IsTheHole(mapped, isolate)) {
1128 // Slow sloppy arguments can be holey.
1129 if (!is_fast) continue;
1130 // Fast sloppy arguments elements are never holey. Either the element is
1131 // context-mapped or present in the arguments elements.
1132 CHECK(accessor->HasElement(holder, i, arg_elements));
1133 continue;
1134 }
1135 int mappedIndex = Smi::ToInt(mapped);
1136 nofMappedParameters++;
1137 CHECK_LE(maxMappedIndex, mappedIndex);
1138 maxMappedIndex = mappedIndex;
1139 Tagged<Object> value = context_object->get(mappedIndex);
1140 CHECK(IsObject(value));
1141 // None of the context-mapped entries should exist in the arguments
1142 // elements.
1143 CHECK(!accessor->HasElement(holder, i, arg_elements));
1144 }
1145 CHECK_LE(nofMappedParameters, context_object->length());
1146 CHECK_LE(nofMappedParameters, arg_elements->length());
1147 CHECK_LE(maxMappedIndex, context_object->length());
1148 CHECK_LE(maxMappedIndex, arg_elements->length());
1149}
1150} // namespace
1151
1152void JSArgumentsObject::JSArgumentsObjectVerify(Isolate* isolate) {
1153 TorqueGeneratedClassVerifiers::JSArgumentsObjectVerify(*this, isolate);
1154 if (IsSloppyArgumentsElementsKind(GetElementsKind())) {
1155 SloppyArgumentsElementsVerify(
1156 isolate, Cast<SloppyArgumentsElements>(elements()), *this);
1157 }
1158 Tagged<NativeContext> native_context = map()->map()->native_context();
1159 if (map() == native_context->get(Context::SLOPPY_ARGUMENTS_MAP_INDEX) ||
1160 map() == native_context->get(Context::SLOW_ALIASED_ARGUMENTS_MAP_INDEX) ||
1161 map() == native_context->get(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX)) {
1162 VerifyObjectField(isolate, JSSloppyArgumentsObject::kLengthOffset);
1163 VerifyObjectField(isolate, JSSloppyArgumentsObject::kCalleeOffset);
1164 } else if (map() ==
1165 native_context->get(Context::STRICT_ARGUMENTS_MAP_INDEX)) {
1166 VerifyObjectField(isolate, JSStrictArgumentsObject::kLengthOffset);
1167 }
1168}
1169
1170void JSAsyncFunctionObject::JSAsyncFunctionObjectVerify(Isolate* isolate) {
1171 TorqueGeneratedClassVerifiers::JSAsyncFunctionObjectVerify(*this, isolate);
1172}
1173
1174void JSAsyncGeneratorObject::JSAsyncGeneratorObjectVerify(Isolate* isolate) {
1175 TorqueGeneratedClassVerifiers::JSAsyncGeneratorObjectVerify(*this, isolate);
1176}
1177
1178void JSDate::JSDateVerify(Isolate* isolate) {
1179 TorqueGeneratedClassVerifiers::JSDateVerify(*this, isolate);
1180
1181 if (IsSmi(month())) {
1182 int month = Smi::ToInt(this->month());
1183 CHECK(0 <= month && month <= 11);
1184 }
1185 if (IsSmi(day())) {
1186 int day = Smi::ToInt(this->day());
1187 CHECK(1 <= day && day <= 31);
1188 }
1189 if (IsSmi(hour())) {
1190 int hour = Smi::ToInt(this->hour());
1191 CHECK(0 <= hour && hour <= 23);
1192 }
1193 if (IsSmi(min())) {
1194 int min = Smi::ToInt(this->min());
1195 CHECK(0 <= min && min <= 59);
1196 }
1197 if (IsSmi(sec())) {
1198 int sec = Smi::ToInt(this->sec());
1199 CHECK(0 <= sec && sec <= 59);
1200 }
1201 if (IsSmi(weekday())) {
1202 int weekday = Smi::ToInt(this->weekday());
1203 CHECK(0 <= weekday && weekday <= 6);
1204 }
1205 if (IsSmi(cache_stamp())) {
1206 CHECK(Smi::ToInt(cache_stamp()) <=
1207 Smi::ToInt(isolate->date_cache()->stamp()));
1208 }
1209}
1210
1211void String::StringVerify(Isolate* isolate) {
1212 PrimitiveHeapObjectVerify(isolate);
1213 CHECK(IsString(this, isolate));
1214 CHECK(length() >= 0 && length() <= Smi::kMaxValue);
1215 CHECK_IMPLIES(length() == 0, this == ReadOnlyRoots(isolate).empty_string());
1216 if (IsInternalizedString(this)) {
1217 CHECK(HasHashCode());
1219 }
1220}
1221
1222void ConsString::ConsStringVerify(Isolate* isolate) {
1223 StringVerify(isolate);
1224 CHECK(IsConsString(this, isolate));
1226 CHECK(length() == first()->length() + second()->length());
1227 if (IsFlat()) {
1228 // A flat cons can only be created by String::SlowFlatten.
1229 // Afterwards, the first part may be externalized or internalized.
1231 IsThinString(first()));
1232 }
1233}
1234
1235void ThinString::ThinStringVerify(Isolate* isolate) {
1236 StringVerify(isolate);
1237 CHECK(IsThinString(this, isolate));
1241}
1242
1243void SlicedString::SlicedStringVerify(Isolate* isolate) {
1244 StringVerify(isolate);
1245 CHECK(IsSlicedString(this, isolate));
1248#ifdef DEBUG
1249 if (!isolate->has_turbofan_string_builders()) {
1250 // Turbofan's string builder optimization can introduce SlicedString that
1251 // are less than SlicedString::kMinLength characters. Their live range and
1252 // scope are pretty limited, but they can be visible to the GC, which
1253 // shouldn't treat them as invalid.
1255 }
1256#endif
1257}
1258
1259void ExternalString::ExternalStringVerify(Isolate* isolate) {
1260 StringVerify(isolate);
1261 CHECK(IsExternalString(this, isolate));
1262}
1263
1264void JSBoundFunction::JSBoundFunctionVerify(Isolate* isolate) {
1265 TorqueGeneratedClassVerifiers::JSBoundFunctionVerify(*this, isolate);
1266 CHECK(IsCallable(*this));
1267 CHECK_EQ(IsConstructor(*this), IsConstructor(bound_target_function()));
1268 // Ensure that the function's meta map belongs to the same native context
1269 // as the target function (i.e. meta maps are the same).
1270 CHECK_EQ(map()->map(), bound_target_function()->map()->map());
1271}
1272
1273void JSFunction::JSFunctionVerify(Isolate* isolate) {
1274 // Don't call TorqueGeneratedClassVerifiers::JSFunctionVerify here because the
1275 // Torque class definition contains the field `prototype_or_initial_map` which
1276 // may not be allocated.
1277
1278 // This assertion exists to encourage updating this verification function if
1279 // new fields are added in the Torque class layout definition.
1280 static_assert(JSFunction::TorqueGeneratedClass::kHeaderSize ==
1281 8 * kTaggedSize);
1282
1283 JSFunctionOrBoundFunctionOrWrappedFunctionVerify(isolate);
1284 CHECK(IsJSFunction(*this));
1285 Object::VerifyPointer(isolate, shared(isolate));
1286 CHECK(IsSharedFunctionInfo(shared(isolate)));
1287 Object::VerifyPointer(isolate, context(isolate, kRelaxedLoad));
1288 CHECK(IsContext(context(isolate, kRelaxedLoad)));
1289 Object::VerifyPointer(isolate, raw_feedback_cell(isolate));
1290 CHECK(IsFeedbackCell(raw_feedback_cell(isolate)));
1291 Object::VerifyPointer(isolate, code(isolate));
1292 CHECK(IsCode(code(isolate)));
1293 CHECK(map(isolate)->is_callable());
1294 // Ensure that the function's meta map belongs to the same native context.
1295 CHECK_EQ(map()->map()->native_context_or_null(), native_context());
1296
1297#ifdef V8_ENABLE_LEAPTIERING
1298 JSDispatchTable* jdt = IsolateGroup::current()->js_dispatch_table();
1299 JSDispatchHandle handle = dispatch_handle();
1301 uint16_t parameter_count = jdt->GetParameterCount(handle);
1303 shared(isolate)->internal_formal_parameter_count_with_receiver());
1304 Tagged<Code> code_from_table = jdt->GetCode(handle);
1305 CHECK(code_from_table->parameter_count() == kDontAdaptArgumentsSentinel ||
1306 code_from_table->parameter_count() == parameter_count);
1307 CHECK(!code_from_table->marked_for_deoptimization());
1308 CHECK_IMPLIES(code_from_table->is_optimized_code(),
1309 code_from_table->js_dispatch_handle() != kNullJSDispatchHandle);
1310
1311 // Currently, a JSFunction must have the same dispatch entry as its
1312 // FeedbackCell, unless the FeedbackCell has no entry.
1313 JSDispatchHandle feedback_cell_handle =
1314 raw_feedback_cell(isolate)->dispatch_handle();
1315 CHECK_EQ(
1316 raw_feedback_cell(isolate) == *isolate->factory()->many_closures_cell(),
1317 feedback_cell_handle == kNullJSDispatchHandle);
1318 if (feedback_cell_handle != kNullJSDispatchHandle) {
1319 CHECK_EQ(feedback_cell_handle, handle);
1320 }
1321 if (code_from_table->is_context_specialized()) {
1322 CHECK_EQ(raw_feedback_cell(isolate)->map(),
1323 ReadOnlyRoots(isolate).one_closure_cell_map());
1324 }
1325
1326 // Verify the entrypoint corresponds to the code or a tiering builtin.
1327 Address entrypoint = jdt->GetEntrypoint(handle);
1328#define CASE(name, ...) \
1329 entrypoint == BUILTIN_CODE(isolate, name)->instruction_start() ||
1331 entrypoint == code_from_table->instruction_start());
1332#undef CASE
1333
1334#endif // V8_ENABLE_LEAPTIERING
1335
1336 DirectHandle<JSFunction> function(*this, isolate);
1337 LookupIterator it(isolate, function, isolate->factory()->prototype_string(),
1339 if (has_prototype_slot()) {
1340 VerifyObjectField(isolate, kPrototypeOrInitialMapOffset);
1341 }
1342
1343 if (has_prototype_property()) {
1344 CHECK(it.IsFound());
1346 CHECK(IsAccessorInfo(*it.GetAccessors()));
1347 } else {
1348 CHECK(!it.IsFound() || it.state() != LookupIterator::ACCESSOR ||
1349 !IsAccessorInfo(*it.GetAccessors()));
1350 }
1351
1352 CHECK_IMPLIES(shared()->HasBuiltinId(),
1354 shared()->builtin_id(), shared()->length(),
1355 shared()->internal_formal_parameter_count_with_receiver()));
1356}
1357
1358void JSWrappedFunction::JSWrappedFunctionVerify(Isolate* isolate) {
1359 TorqueGeneratedClassVerifiers::JSWrappedFunctionVerify(*this, isolate);
1360 CHECK(IsCallable(*this));
1361 // Ensure that the function's meta map belongs to the same native context.
1362 CHECK_EQ(map()->map()->native_context_or_null(), context());
1363}
1364
1365namespace {
1366
1367bool ShouldVerifySharedFunctionInfoFunctionIndex(
1369 if (!sfi->HasBuiltinId()) return true;
1370 switch (sfi->builtin_id()) {
1371 case Builtin::kPromiseCapabilityDefaultReject:
1372 case Builtin::kPromiseCapabilityDefaultResolve:
1373 // For these we manually set custom function indices.
1374 return false;
1375 default:
1376 return true;
1377 }
1378 UNREACHABLE();
1379}
1380
1381} // namespace
1382
1383void SharedFunctionInfo::SharedFunctionInfoVerify(LocalIsolate* isolate) {
1384 ReadOnlyRoots roots(isolate);
1385
1386 Tagged<Object> value = name_or_scope_info(kAcquireLoad);
1387 if (IsScopeInfo(value)) {
1388 CHECK(!Cast<ScopeInfo>(value)->IsEmpty());
1389 CHECK_NE(value, roots.empty_scope_info());
1390 }
1391
1392#if V8_ENABLE_WEBASSEMBLY
1393 bool is_wasm = HasWasmExportedFunctionData() || HasAsmWasmData() ||
1394 HasWasmJSFunctionData() || HasWasmCapiFunctionData() ||
1395 HasWasmResumeData();
1396#else
1397 bool is_wasm = false;
1398#endif // V8_ENABLE_WEBASSEMBLY
1399 CHECK(is_wasm || IsApiFunction() || HasBytecodeArray() || HasBuiltinId() ||
1402
1403 {
1404 Tagged<HeapObject> script = this->script(kAcquireLoad);
1405 CHECK(IsUndefined(script, roots) || IsScript(script));
1406 }
1407
1408 if (!is_compiled()) {
1410 CHECK(IsScopeInfo(outer_scope_info()) ||
1411 IsTheHole(outer_scope_info(), roots));
1412 } else if (HasBytecodeArray() && HasFeedbackMetadata()) {
1413 CHECK(IsFeedbackMetadata(feedback_metadata()));
1414 }
1415
1416 if (HasBytecodeArray() && !IsDontAdaptArguments()) {
1419 }
1420
1421 if (ShouldVerifySharedFunctionInfoFunctionIndex(*this)) {
1422 int expected_map_index =
1424 CHECK_EQ(expected_map_index, function_map_index());
1425 }
1426
1428 if (!info->IsEmpty()) {
1429 CHECK(kind() == info->function_kind());
1430 CHECK_EQ(internal::IsModule(kind()), info->scope_type() == MODULE_SCOPE);
1431 }
1432
1433 if (IsApiFunction()) {
1435 } else if (!HasBuiltinId()) {
1437 } else {
1438 if (builtin_id() != Builtin::kCompileLazy &&
1439 builtin_id() != Builtin::kEmptyFunction) {
1441 } else {
1443 }
1444 }
1447 builtin_id(), length(),
1449}
1450
1451void SharedFunctionInfo::SharedFunctionInfoVerify(Isolate* isolate) {
1452 // TODO(leszeks): Add a TorqueGeneratedClassVerifier for LocalIsolate.
1453 SharedFunctionInfoVerify(isolate->AsLocalIsolate());
1454}
1455
1456void SharedFunctionInfoWrapper::SharedFunctionInfoWrapperVerify(
1457 Isolate* isolate) {
1458 Object::VerifyPointer(isolate, shared_info());
1459}
1460
1461void JSGlobalProxy::JSGlobalProxyVerify(Isolate* isolate) {
1462 TorqueGeneratedClassVerifiers::JSGlobalProxyVerify(*this, isolate);
1464 // Make sure that this object has no properties, elements.
1465 CHECK_EQ(0, Cast<FixedArray>(elements())->length());
1466}
1467
1468void JSGlobalObject::JSGlobalObjectVerify(Isolate* isolate) {
1469 CHECK(IsJSGlobalObject(*this));
1470 // Do not check the dummy global object for the builtins.
1471 if (global_dictionary(kAcquireLoad)->NumberOfElements() == 0 &&
1472 elements()->length() == 0) {
1473 return;
1474 }
1475 JSObjectVerify(isolate);
1476}
1477
1478void PrimitiveHeapObject::PrimitiveHeapObjectVerify(Isolate* isolate) {
1479 CHECK(IsPrimitiveHeapObject(this, isolate));
1480}
1481
1482void HeapNumber::HeapNumberVerify(Isolate* isolate) {
1483 PrimitiveHeapObjectVerify(isolate);
1484 CHECK(IsHeapNumber(this, isolate));
1485}
1486
1487void Oddball::OddballVerify(Isolate* isolate) {
1488 PrimitiveHeapObjectVerify(isolate);
1489 CHECK(IsOddball(this, isolate));
1490
1491 Heap* heap = isolate->heap();
1492 Tagged<Object> string = to_string();
1493 Object::VerifyPointer(isolate, string);
1494 CHECK(IsString(string));
1495 Tagged<Object> type = type_of();
1496 Object::VerifyPointer(isolate, type);
1497 CHECK(IsString(type));
1498 Tagged<Object> kind_value = kind_.load();
1499 Object::VerifyPointer(isolate, kind_value);
1500 CHECK(IsSmi(kind_value));
1501
1502 Tagged<Object> number = to_number();
1503 Object::VerifyPointer(isolate, number);
1504 CHECK(IsSmi(number) || IsHeapNumber(number));
1505 if (IsHeapObject(number)) {
1506 CHECK(number == ReadOnlyRoots(heap).nan_value() ||
1507 number == ReadOnlyRoots(heap).hole_nan_value());
1508 } else {
1509 CHECK(IsSmi(number));
1510 int value = Smi::ToInt(number);
1511 // Hidden oddballs have negative smis.
1512 const int kLeastHiddenOddballNumber = -7;
1513 CHECK_LE(value, 1);
1514 CHECK_GE(value, kLeastHiddenOddballNumber);
1515 }
1516
1517 ReadOnlyRoots roots(heap);
1518 if (map() == roots.undefined_map()) {
1519 CHECK(this == roots.undefined_value());
1520 } else if (map() == roots.null_map()) {
1521 CHECK(this == roots.null_value());
1522 } else if (map() == roots.boolean_map()) {
1523 CHECK(this == roots.true_value() || this == roots.false_value());
1524 } else {
1525 UNREACHABLE();
1526 }
1527}
1528
1529void Hole::HoleVerify(Isolate* isolate) {
1530 ReadOnlyRoots roots(isolate->heap());
1531 CHECK_EQ(map(), roots.hole_map());
1532
1533#define COMPARE_ROOTS_VALUE(_, Value, __) \
1534 if (*this == roots.Value()) { \
1535 return; \
1536 }
1537 HOLE_LIST(COMPARE_ROOTS_VALUE);
1538#undef COMPARE_ROOTS_VALUE
1539
1540 UNREACHABLE();
1541}
1542
1543void PropertyCell::PropertyCellVerify(Isolate* isolate) {
1544 TorqueGeneratedClassVerifiers::PropertyCellVerify(*this, isolate);
1547}
1548
1549void ContextSidePropertyCell::ContextSidePropertyCellVerify(Isolate* isolate) {
1550 TorqueGeneratedClassVerifiers::ContextSidePropertyCellVerify(*this, isolate);
1551}
1552
1553void TrustedObject::TrustedObjectVerify(Isolate* isolate) {
1554#if defined(V8_ENABLE_SANDBOX)
1555 // All trusted objects must live in trusted space.
1556 // TODO(saelo): Some objects are trusted but do not yet live in trusted space.
1557 CHECK(HeapLayout::InTrustedSpace(*this) || IsCode(*this));
1558#endif
1559}
1560
1561void TrustedObjectLayout::TrustedObjectVerify(Isolate* isolate) {
1562 UncheckedCast<TrustedObject>(this)->TrustedObjectVerify(isolate);
1563}
1564
1565void ExposedTrustedObject::ExposedTrustedObjectVerify(Isolate* isolate) {
1566 TrustedObjectVerify(isolate);
1567#if defined(V8_ENABLE_SANDBOX)
1568 // Check that the self indirect pointer is consistent, i.e. points back to
1569 // this object.
1570 InstanceType instance_type = map()->instance_type();
1572 // We can't use ReadIndirectPointerField here because the tag is not a
1573 // compile-time constant.
1574 IndirectPointerSlot slot =
1575 RawIndirectPointerField(kSelfIndirectPointerOffset, tag);
1576 Tagged<Object> self = slot.load(isolate);
1577 CHECK_EQ(self, *this);
1578 // If the object is in the read-only space, the self indirect pointer entry
1579 // must be in the read-only segment, and vice versa.
1580 if (tag == kCodeIndirectPointerTag) {
1581 CodePointerTable::Space* space =
1582 IsolateForSandbox(isolate).GetCodePointerTableSpaceFor(slot.address());
1583 // During snapshot creation, the code pointer space of the read-only heap is
1584 // not marked as an internal read-only space.
1585 bool is_space_read_only =
1586 space == isolate->read_only_heap()->code_pointer_space();
1587 CHECK_EQ(is_space_read_only, HeapLayout::InReadOnlySpace(*this));
1588 } else {
1590 }
1591#endif
1592}
1593
1594void Code::CodeVerify(Isolate* isolate) {
1595 ExposedTrustedObjectVerify(isolate);
1596 CHECK(IsCode(*this));
1597 if (has_instruction_stream()) {
1598 Tagged<InstructionStream> istream = instruction_stream();
1599 CHECK_EQ(istream->code(kAcquireLoad), *this);
1601 CHECK_LE(safepoint_table_offset(), handler_table_offset());
1602 CHECK_LE(handler_table_offset(), constant_pool_offset());
1603 CHECK_LE(constant_pool_offset(), code_comments_offset());
1604 CHECK_LE(code_comments_offset(), unwinding_info_offset());
1605 CHECK_LE(unwinding_info_offset(), metadata_size());
1606
1607 // Ensure the cached code entry point corresponds to the InstructionStream
1608 // object associated with this Code.
1609#if defined(V8_COMPRESS_POINTERS) && defined(V8_SHORT_BUILTIN_CALLS)
1610 if (istream->instruction_start() == instruction_start()) {
1611 // Most common case, all good.
1612 } else {
1613 // When shared pointer compression cage is enabled and it has the
1614 // embedded code blob copy then the
1615 // InstructionStream::instruction_start() might return the address of
1616 // the remapped builtin regardless of whether the builtins copy existed
1617 // when the instruction_start value was cached in the Code (see
1618 // InstructionStream::OffHeapInstructionStart()). So, do a reverse
1619 // Code object lookup via instruction_start value to ensure it
1620 // corresponds to this current Code object.
1621 Tagged<Code> lookup_result =
1622 isolate->heap()->FindCodeForInnerPointer(instruction_start());
1623 CHECK_EQ(lookup_result, *this);
1624 }
1625#else
1626 CHECK_EQ(istream->instruction_start(), instruction_start());
1627#endif // V8_COMPRESS_POINTERS && V8_SHORT_BUILTIN_CALLS
1628 }
1629
1630 // Our wrapper must point back to us.
1631 CHECK_EQ(wrapper()->code(isolate), *this);
1632}
1633
1634void CodeWrapper::CodeWrapperVerify(Isolate* isolate) {
1635 if (!this->has_code()) return;
1636 auto code = this->code(isolate);
1637 Object::VerifyPointer(isolate, code);
1638 CHECK_EQ(code->wrapper(), *this);
1639}
1640
1641void InstructionStream::InstructionStreamVerify(Isolate* isolate) {
1642 TrustedObjectVerify(isolate);
1644 if (!TryGetCode(&code, kAcquireLoad)) return;
1645 CHECK(
1646 IsAligned(code->instruction_size(),
1647 static_cast<unsigned>(InstructionStream::kMetadataAlignment)));
1648#if (!defined(_MSC_VER) || defined(__clang__)) && !defined(V8_OS_ZOS)
1649 // See also: PlatformEmbeddedFileWriterWin::AlignToCodeAlignment
1650 // and: PlatformEmbeddedFileWriterZOS::AlignToCodeAlignment
1653#endif // (!defined(_MSC_VER) || defined(__clang__)) && !defined(V8_OS_ZOS)
1656 CHECK_EQ(*this, code->instruction_stream());
1658 isolate->heap()->InSpace(*this, CODE_LO_SPACE));
1659 Address last_gc_pc = kNullAddress;
1660
1661 Object::ObjectVerify(relocation_info(), isolate);
1662
1663 for (RelocIterator it(code); !it.done(); it.next()) {
1664 it.rinfo()->Verify(isolate);
1665 // Ensure that GC will not iterate twice over the same pointer.
1666 if (RelocInfo::IsGCRelocMode(it.rinfo()->rmode())) {
1667 CHECK(it.rinfo()->pc() != last_gc_pc);
1668 last_gc_pc = it.rinfo()->pc();
1669 }
1670 }
1671}
1672
1673void JSArray::JSArrayVerify(Isolate* isolate) {
1674 TorqueGeneratedClassVerifiers::JSArrayVerify(*this, isolate);
1675 // If a GC was caused while constructing this array, the elements
1676 // pointer may point to a one pointer filler map.
1677 if (!ElementsAreSafeToExamine(isolate)) return;
1678 if (IsUndefined(elements(), isolate)) return;
1679 CHECK(IsFixedArray(elements()) || IsFixedDoubleArray(elements()));
1680 if (elements()->length() == 0) {
1681 CHECK_EQ(elements(), ReadOnlyRoots(isolate).empty_fixed_array());
1682 }
1683 // Verify that the length and the elements backing store are in sync.
1684 if (IsSmi(length()) && (HasFastElements() || HasAnyNonextensibleElements())) {
1685 if (elements()->length() > 0) {
1686 CHECK_IMPLIES(HasDoubleElements(), IsFixedDoubleArray(elements()));
1687 CHECK_IMPLIES(HasSmiOrObjectElements() || HasAnyNonextensibleElements(),
1688 IsFixedArray(elements()));
1689 }
1690 int size = Smi::ToInt(length());
1691 // Holey / Packed backing stores might have slack or might have not been
1692 // properly initialized yet.
1693 CHECK(size <= elements()->length() ||
1694 elements() == ReadOnlyRoots(isolate).empty_fixed_array());
1695 } else {
1696 CHECK(HasDictionaryElements());
1697 uint32_t array_length;
1698 CHECK(Object::ToArrayLength(length(), &array_length));
1699 if (array_length == 0xFFFFFFFF) {
1700 CHECK(Object::ToArrayLength(length(), &array_length));
1701 }
1702 if (array_length != 0) {
1704 // The dictionary can never have more elements than the array length + 1.
1705 // If the backing store grows the verification might be triggered with
1706 // the old length in place.
1707 uint32_t nof_elements = static_cast<uint32_t>(dict->NumberOfElements());
1708 if (nof_elements != 0) nof_elements--;
1709 CHECK_LE(nof_elements, array_length);
1710 }
1711 }
1712}
1713
1714void JSSet::JSSetVerify(Isolate* isolate) {
1715 TorqueGeneratedClassVerifiers::JSSetVerify(*this, isolate);
1716 CHECK(IsOrderedHashSet(table()) || IsUndefined(table(), isolate));
1717 // TODO(arv): Verify OrderedHashTable too.
1718}
1719
1720void JSMap::JSMapVerify(Isolate* isolate) {
1721 TorqueGeneratedClassVerifiers::JSMapVerify(*this, isolate);
1722 CHECK(IsOrderedHashMap(table()) || IsUndefined(table(), isolate));
1723 // TODO(arv): Verify OrderedHashTable too.
1724}
1725
1726void JSSetIterator::JSSetIteratorVerify(Isolate* isolate) {
1727 CHECK(IsJSSetIterator(*this));
1728 JSCollectionIteratorVerify(isolate);
1729 CHECK(IsOrderedHashSet(table()));
1730 CHECK(IsSmi(index()));
1731}
1732
1733void JSMapIterator::JSMapIteratorVerify(Isolate* isolate) {
1734 CHECK(IsJSMapIterator(*this));
1735 JSCollectionIteratorVerify(isolate);
1736 CHECK(IsOrderedHashMap(table()));
1737 CHECK(IsSmi(index()));
1738}
1739
1740USE_TORQUE_VERIFIER(JSShadowRealm)
1741
1742namespace {
1743
1744void VerifyElementIsShared(Tagged<Object> element) {
1745 // Exception for ThinStrings:
1746 // When storing a ThinString in a shared object, we want to store the actual
1747 // string, which is shared when sharing the string table.
1748 // It is possible that a stored shared string migrates to a ThinString later
1749 // on, which is fine as the ThinString resides in shared space if the original
1750 // string was in shared space.
1751 if (IsThinString(element)) {
1752 CHECK(v8_flags.shared_string_table);
1754 } else {
1755 CHECK(IsShared(element));
1756 }
1757}
1758
1759} // namespace
1760
1761void JSSharedStruct::JSSharedStructVerify(Isolate* isolate) {
1762 CHECK(IsJSSharedStruct(*this));
1764 JSObjectVerify(isolate);
1765 CHECK(HasFastProperties());
1766 // Shared structs can only point to primitives or other shared HeapObjects,
1767 // even internally.
1768 Tagged<Map> struct_map = map();
1769 CHECK(HeapLayout::InAnySharedSpace(property_array()));
1770 Tagged<DescriptorArray> descriptors =
1771 struct_map->instance_descriptors(isolate);
1772 for (InternalIndex i : struct_map->IterateOwnDescriptors()) {
1773 PropertyDetails details = descriptors->GetDetails(i);
1774 CHECK_EQ(PropertyKind::kData, details.kind());
1775
1776 if (JSSharedStruct::IsRegistryKeyDescriptor(isolate, struct_map, i)) {
1777 CHECK_EQ(PropertyLocation::kDescriptor, details.location());
1778 CHECK(IsInternalizedString(descriptors->GetStrongValue(i)));
1779 } else if (JSSharedStruct::IsElementsTemplateDescriptor(isolate, struct_map,
1780 i)) {
1781 CHECK_EQ(PropertyLocation::kDescriptor, details.location());
1782 CHECK(IsNumberDictionary(descriptors->GetStrongValue(i)));
1783 } else {
1784 CHECK_EQ(PropertyLocation::kField, details.location());
1785 CHECK(details.representation().IsTagged());
1786 CHECK(!IsNumberDictionary(descriptors->GetStrongValue(i)));
1787 CHECK(!IsInternalizedString(descriptors->GetStrongValue(i)));
1788 FieldIndex field_index = FieldIndex::ForDetails(struct_map, details);
1789 VerifyElementIsShared(RawFastPropertyAt(field_index));
1790 }
1791 }
1792}
1793
1794void JSAtomicsMutex::JSAtomicsMutexVerify(Isolate* isolate) {
1795 CHECK(IsJSAtomicsMutex(*this));
1797 JSObjectVerify(isolate);
1798}
1799
1800void JSAtomicsCondition::JSAtomicsConditionVerify(Isolate* isolate) {
1801 CHECK(IsJSAtomicsCondition(*this));
1803 JSObjectVerify(isolate);
1804}
1805
1806void JSDisposableStackBase::JSDisposableStackBaseVerify(Isolate* isolate) {
1807 CHECK(IsJSDisposableStackBase(*this));
1808 JSObjectVerify(isolate);
1809 CHECK_EQ(length() % 3, 0);
1810 CHECK_GE(stack()->capacity(), length());
1811}
1812
1813void JSSyncDisposableStack::JSSyncDisposableStackVerify(Isolate* isolate) {
1814 CHECK(IsJSSyncDisposableStack(*this));
1815 JSDisposableStackBase::JSDisposableStackBaseVerify(isolate);
1816}
1817
1818void JSAsyncDisposableStack::JSAsyncDisposableStackVerify(Isolate* isolate) {
1819 CHECK(IsJSAsyncDisposableStack(*this));
1820 JSDisposableStackBase::JSDisposableStackBaseVerify(isolate);
1821}
1822
1823void JSSharedArray::JSSharedArrayVerify(Isolate* isolate) {
1824 CHECK(IsJSSharedArray(*this));
1825 JSObjectVerify(isolate);
1826 CHECK(HasFastProperties());
1827 // Shared arrays can only point to primitives or other shared HeapObjects,
1828 // even internally.
1829 Tagged<FixedArray> storage = Cast<FixedArray>(elements());
1830 uint32_t length = storage->length();
1831 for (uint32_t j = 0; j < length; j++) {
1832 Tagged<Object> element_value = storage->get(j);
1833 VerifyElementIsShared(element_value);
1834 }
1835}
1836
1837void JSIteratorMapHelper::JSIteratorMapHelperVerify(Isolate* isolate) {
1838 TorqueGeneratedClassVerifiers::JSIteratorMapHelperVerify(*this, isolate);
1839 CHECK(IsCallable(mapper()));
1840 CHECK_GE(Object::NumberValue(counter()), 0);
1841}
1842
1843void JSIteratorFilterHelper::JSIteratorFilterHelperVerify(Isolate* isolate) {
1844 TorqueGeneratedClassVerifiers::JSIteratorFilterHelperVerify(*this, isolate);
1845 CHECK(IsCallable(predicate()));
1846 CHECK_GE(Object::NumberValue(counter()), 0);
1847}
1848
1849void JSIteratorTakeHelper::JSIteratorTakeHelperVerify(Isolate* isolate) {
1850 TorqueGeneratedClassVerifiers::JSIteratorTakeHelperVerify(*this, isolate);
1851 CHECK_GE(Object::NumberValue(remaining()), 0);
1852}
1853
1854void JSIteratorDropHelper::JSIteratorDropHelperVerify(Isolate* isolate) {
1855 TorqueGeneratedClassVerifiers::JSIteratorDropHelperVerify(*this, isolate);
1856 CHECK_GE(Object::NumberValue(remaining()), 0);
1857}
1858
1859void JSIteratorFlatMapHelper::JSIteratorFlatMapHelperVerify(Isolate* isolate) {
1860 TorqueGeneratedClassVerifiers::JSIteratorFlatMapHelperVerify(*this, isolate);
1861 CHECK(IsCallable(mapper()));
1862 CHECK_GE(Object::NumberValue(counter()), 0);
1863}
1864
1865void WeakCell::WeakCellVerify(Isolate* isolate) {
1866 CHECK(IsWeakCell(*this));
1867
1868 CHECK(IsUndefined(target(), isolate) || Object::CanBeHeldWeakly(target()));
1869
1870 CHECK(IsWeakCell(prev()) || IsUndefined(prev(), isolate));
1871 if (IsWeakCell(prev())) {
1872 CHECK_EQ(Cast<WeakCell>(prev())->next(), *this);
1873 }
1874
1875 CHECK(IsWeakCell(next()) || IsUndefined(next(), isolate));
1876 if (IsWeakCell(next())) {
1877 CHECK_EQ(Cast<WeakCell>(next())->prev(), *this);
1878 }
1879
1880 CHECK_IMPLIES(IsUndefined(unregister_token(), isolate),
1881 IsUndefined(key_list_prev(), isolate));
1882 CHECK_IMPLIES(IsUndefined(unregister_token(), isolate),
1883 IsUndefined(key_list_next(), isolate));
1884
1885 CHECK(IsWeakCell(key_list_prev()) || IsUndefined(key_list_prev(), isolate));
1886
1887 CHECK(IsWeakCell(key_list_next()) || IsUndefined(key_list_next(), isolate));
1888
1889 CHECK(IsUndefined(finalization_registry(), isolate) ||
1890 IsJSFinalizationRegistry(finalization_registry()));
1891}
1892
1893void JSWeakRef::JSWeakRefVerify(Isolate* isolate) {
1894 CHECK(IsJSWeakRef(*this));
1895 JSObjectVerify(isolate);
1896 CHECK(IsUndefined(target(), isolate) || Object::CanBeHeldWeakly(target()));
1897}
1898
1899void JSFinalizationRegistry::JSFinalizationRegistryVerify(Isolate* isolate) {
1900 TorqueGeneratedClassVerifiers::JSFinalizationRegistryVerify(*this, isolate);
1901 if (IsWeakCell(active_cells())) {
1902 CHECK(IsUndefined(Cast<WeakCell>(active_cells())->prev(), isolate));
1903 }
1904 if (IsWeakCell(cleared_cells())) {
1905 CHECK(IsUndefined(Cast<WeakCell>(cleared_cells())->prev(), isolate));
1906 }
1907}
1908
1909void JSWeakMap::JSWeakMapVerify(Isolate* isolate) {
1910 TorqueGeneratedClassVerifiers::JSWeakMapVerify(*this, isolate);
1911 CHECK(IsEphemeronHashTable(table()) || IsUndefined(table(), isolate));
1912}
1913
1914void JSArrayIterator::JSArrayIteratorVerify(Isolate* isolate) {
1915 TorqueGeneratedClassVerifiers::JSArrayIteratorVerify(*this, isolate);
1916
1917 CHECK_GE(Object::NumberValue(next_index()), 0);
1919
1920 if (IsJSTypedArray(iterated_object())) {
1921 // JSTypedArray::length is limited to Smi range.
1922 CHECK(IsSmi(next_index()));
1924 } else if (IsJSArray(iterated_object())) {
1925 // JSArray::length is limited to Uint32 range.
1926 CHECK_LE(Object::NumberValue(next_index()), kMaxUInt32);
1927 }
1928}
1929
1930void JSStringIterator::JSStringIteratorVerify(Isolate* isolate) {
1931 TorqueGeneratedClassVerifiers::JSStringIteratorVerify(*this, isolate);
1932 CHECK_GE(index(), 0);
1934}
1935
1936void JSWeakSet::JSWeakSetVerify(Isolate* isolate) {
1937 TorqueGeneratedClassVerifiers::JSWeakSetVerify(*this, isolate);
1938 CHECK(IsEphemeronHashTable(table()) || IsUndefined(table(), isolate));
1939}
1940
1941void CallableTask::CallableTaskVerify(Isolate* isolate) {
1942 TorqueGeneratedClassVerifiers::CallableTaskVerify(*this, isolate);
1943 CHECK(IsCallable(callable()));
1944}
1945
1946void JSPromise::JSPromiseVerify(Isolate* isolate) {
1947 TorqueGeneratedClassVerifiers::JSPromiseVerify(*this, isolate);
1948 if (status() == Promise::kPending) {
1949 CHECK(IsSmi(reactions()) || IsPromiseReaction(reactions()));
1950 }
1951}
1952
1953template <typename Derived>
1954void SmallOrderedHashTable<Derived>::SmallOrderedHashTableVerify(
1955 Isolate* isolate) {
1956 CHECK(IsSmallOrderedHashTable(*this));
1957
1958 int capacity = Capacity();
1959 CHECK_GE(capacity, kMinCapacity);
1960 CHECK_LE(capacity, kMaxCapacity);
1961
1962 for (int entry = 0; entry < NumberOfBuckets(); entry++) {
1963 int bucket = GetFirstEntry(entry);
1964 if (bucket == kNotFound) continue;
1965 CHECK_GE(bucket, 0);
1966 CHECK_LE(bucket, capacity);
1967 }
1968
1969 for (int entry = 0; entry < NumberOfElements(); entry++) {
1970 int chain = GetNextEntry(entry);
1971 if (chain == kNotFound) continue;
1972 CHECK_GE(chain, 0);
1973 CHECK_LE(chain, capacity);
1974 }
1975
1976 for (int entry = 0; entry < NumberOfElements(); entry++) {
1977 for (int offset = 0; offset < Derived::kEntrySize; offset++) {
1978 Tagged<Object> val = GetDataEntry(entry, offset);
1979 Object::VerifyPointer(isolate, val);
1980 }
1981 }
1982
1983 for (int entry = NumberOfElements() + NumberOfDeletedElements();
1984 entry < Capacity(); entry++) {
1985 for (int offset = 0; offset < Derived::kEntrySize; offset++) {
1986 Tagged<Object> val = GetDataEntry(entry, offset);
1987 CHECK(IsTheHole(val, isolate));
1988 }
1989 }
1990}
1991
1992void SmallOrderedHashMap::SmallOrderedHashMapVerify(Isolate* isolate) {
1993 CHECK(IsSmallOrderedHashMap(*this));
1994 SmallOrderedHashTable<SmallOrderedHashMap>::SmallOrderedHashTableVerify(
1995 isolate);
1996 for (int entry = NumberOfElements(); entry < NumberOfDeletedElements();
1997 entry++) {
1998 for (int offset = 0; offset < kEntrySize; offset++) {
1999 Tagged<Object> val = GetDataEntry(entry, offset);
2000 CHECK(IsTheHole(val, isolate));
2001 }
2002 }
2003}
2004
2005void SmallOrderedHashSet::SmallOrderedHashSetVerify(Isolate* isolate) {
2006 CHECK(IsSmallOrderedHashSet(*this));
2007 SmallOrderedHashTable<SmallOrderedHashSet>::SmallOrderedHashTableVerify(
2008 isolate);
2009 for (int entry = NumberOfElements(); entry < NumberOfDeletedElements();
2010 entry++) {
2011 for (int offset = 0; offset < kEntrySize; offset++) {
2012 Tagged<Object> val = GetDataEntry(entry, offset);
2013 CHECK(IsTheHole(val, isolate));
2014 }
2015 }
2016}
2017
2018void SmallOrderedNameDictionary::SmallOrderedNameDictionaryVerify(
2019 Isolate* isolate) {
2020 CHECK(IsSmallOrderedNameDictionary(*this));
2021 SmallOrderedHashTable<
2022 SmallOrderedNameDictionary>::SmallOrderedHashTableVerify(isolate);
2023 for (int entry = NumberOfElements(); entry < NumberOfDeletedElements();
2024 entry++) {
2025 for (int offset = 0; offset < kEntrySize; offset++) {
2026 Tagged<Object> val = GetDataEntry(entry, offset);
2027 CHECK(IsTheHole(val, isolate) ||
2028 (PropertyDetails::Empty().AsSmi() == Cast<Smi>(val)));
2029 }
2030 }
2031}
2032
2033void SwissNameDictionary::SwissNameDictionaryVerify(Isolate* isolate) {
2034 this->SwissNameDictionaryVerify(isolate, false);
2035}
2036
2037void SwissNameDictionary::SwissNameDictionaryVerify(Isolate* isolate,
2038 bool slow_checks) {
2040
2042
2043 meta_table()->ByteArrayVerify(isolate);
2044
2045 int seen_deleted = 0;
2046 int seen_present = 0;
2047
2048 for (int i = 0; i < Capacity(); i++) {
2049 ctrl_t ctrl = GetCtrl(i);
2050
2051 if (IsFull(ctrl) || slow_checks) {
2053 Tagged<Object> value = ValueAtRaw(i);
2054
2055 if (IsFull(ctrl)) {
2056 ++seen_present;
2057
2058 Tagged<Name> name = Cast<Name>(key);
2059 if (slow_checks) {
2060 CHECK_EQ(swiss_table::H2(name->hash()), ctrl);
2061 }
2062
2063 CHECK(!IsTheHole(key));
2064 CHECK(!IsTheHole(value));
2065 name->NameVerify(isolate);
2066 Object::ObjectVerify(value, isolate);
2067 } else if (IsDeleted(ctrl)) {
2068 ++seen_deleted;
2069 CHECK(IsTheHole(key));
2070 CHECK(IsTheHole(value));
2071 } else if (IsEmpty(ctrl)) {
2072 CHECK(IsTheHole(key));
2073 CHECK(IsTheHole(value));
2074 } else {
2075 // Something unexpected. Note that we don't use kSentinel at the moment.
2076 UNREACHABLE();
2077 }
2078 }
2079 }
2080
2081 CHECK_EQ(seen_present, NumberOfElements());
2082 if (slow_checks) {
2083 CHECK_EQ(seen_deleted, NumberOfDeletedElements());
2084
2085 // Verify copy of first group at end (= after Capacity() slots) of control
2086 // table.
2087 for (int i = 0; i < std::min(static_cast<int>(Group::kWidth), Capacity());
2088 ++i) {
2089 CHECK_EQ(CtrlTable()[i], CtrlTable()[Capacity() + i]);
2090 }
2091 // If 2 * capacity is smaller than the capacity plus group width, the slots
2092 // after that must be empty.
2093 for (int i = 2 * Capacity(); i < Capacity() + kGroupWidth; ++i) {
2094 CHECK_EQ(Ctrl::kEmpty, CtrlTable()[i]);
2095 }
2096
2097 for (int enum_index = 0; enum_index < UsedCapacity(); ++enum_index) {
2098 int entry = EntryForEnumerationIndex(enum_index);
2099 CHECK_LT(entry, Capacity());
2100 ctrl_t ctrl = GetCtrl(entry);
2101
2102 // Enum table must not point to empty slots.
2103 CHECK(IsFull(ctrl) || IsDeleted(ctrl));
2104 }
2105 }
2106}
2107
2108void JSRegExp::JSRegExpVerify(Isolate* isolate) {
2109 Tagged<Object> source = TaggedField<Object>::load(*this, kSourceOffset);
2110 Tagged<Object> flags = TaggedField<Object>::load(*this, kFlagsOffset);
2111 CHECK(IsString(source) || IsUndefined(source));
2112 CHECK(IsSmi(flags) || IsUndefined(flags));
2113 if (!has_data()) return;
2114
2115 Tagged<RegExpData> data = this->data(isolate);
2116 switch (data->type_tag()) {
2119 return;
2122 CHECK(Is<IrRegExpData>(data));
2123 return;
2124 }
2125 UNREACHABLE();
2126}
2127
2128void RegExpData::RegExpDataVerify(Isolate* isolate) {
2129 ExposedTrustedObjectVerify(isolate);
2130 CHECK(IsSmi(TaggedField<Object>::load(*this, kTypeTagOffset)));
2131 CHECK(IsString(source()));
2132 CHECK(IsSmi(TaggedField<Object>::load(*this, kFlagsOffset)));
2133}
2134
2135void AtomRegExpData::AtomRegExpDataVerify(Isolate* isolate) {
2136 ExposedTrustedObjectVerify(isolate);
2137 RegExpDataVerify(isolate);
2138 CHECK(IsString(pattern()));
2139}
2140
2141void IrRegExpData::IrRegExpDataVerify(Isolate* isolate) {
2142 ExposedTrustedObjectVerify(isolate);
2143 RegExpDataVerify(isolate);
2144
2145 VerifyProtectedPointerField(isolate, kLatin1BytecodeOffset);
2146 VerifyProtectedPointerField(isolate, kUc16BytecodeOffset);
2147
2148 CHECK_IMPLIES(!has_latin1_code(), !has_latin1_bytecode());
2149 CHECK_IMPLIES(!has_uc16_code(), !has_uc16_bytecode());
2150
2151 CHECK_IMPLIES(has_latin1_code(), Is<Code>(latin1_code(isolate)));
2152 CHECK_IMPLIES(has_uc16_code(), Is<Code>(uc16_code(isolate)));
2153 CHECK_IMPLIES(has_latin1_bytecode(), Is<TrustedByteArray>(latin1_bytecode()));
2154 CHECK_IMPLIES(has_uc16_bytecode(), Is<TrustedByteArray>(uc16_bytecode()));
2155
2161 CHECK(IsSmi(TaggedField<Object>::load(*this, kMaxRegisterCountOffset)));
2162 CHECK(IsSmi(TaggedField<Object>::load(*this, kCaptureCountOffset)));
2163 CHECK(IsSmi(TaggedField<Object>::load(*this, kTicksUntilTierUpOffset)));
2164 CHECK(IsSmi(TaggedField<Object>::load(*this, kBacktrackLimitOffset)));
2165
2166 switch (type_tag()) {
2168 if (has_latin1_code()) {
2169 CHECK_EQ(latin1_code(isolate)->builtin_id(),
2170 Builtin::kRegExpExperimentalTrampoline);
2171 CHECK_EQ(latin1_code(isolate), uc16_code(isolate));
2172 CHECK(Is<TrustedByteArray>(latin1_bytecode()));
2173 CHECK_EQ(latin1_bytecode(), uc16_bytecode());
2174 } else {
2175 CHECK(!has_uc16_code());
2176 CHECK(!has_latin1_bytecode());
2177 CHECK(!has_uc16_bytecode());
2178 }
2179
2180 CHECK_EQ(max_register_count(), JSRegExp::kUninitializedValue);
2181 CHECK_EQ(ticks_until_tier_up(), JSRegExp::kUninitializedValue);
2182 CHECK_EQ(backtrack_limit(), JSRegExp::kUninitializedValue);
2183
2184 break;
2185 }
2187 bool can_be_interpreted = RegExp::CanGenerateBytecode();
2188 CHECK_IMPLIES(has_latin1_bytecode(), can_be_interpreted);
2189 CHECK_IMPLIES(has_uc16_bytecode(), can_be_interpreted);
2190
2191 static_assert(JSRegExp::kUninitializedValue == -1);
2192 CHECK_GE(max_register_count(), JSRegExp::kUninitializedValue);
2193 CHECK_GE(capture_count(), 0);
2194 if (v8_flags.regexp_tier_up) {
2195 // With tier-up enabled, ticks_until_tier_up should actually be >= 0.
2196 // However FlagScopes in unittests can modify the flag and verification
2197 // on Isolate deinitialization will fail.
2198 CHECK_GE(ticks_until_tier_up(), JSRegExp::kUninitializedValue);
2199 CHECK_LE(ticks_until_tier_up(), v8_flags.regexp_tier_up_ticks);
2200 } else {
2201 CHECK_EQ(ticks_until_tier_up(), JSRegExp::kUninitializedValue);
2202 }
2203 CHECK_GE(backtrack_limit(), 0);
2204
2205 break;
2206 }
2207 default:
2208 UNREACHABLE();
2209 }
2210}
2211
2212void RegExpDataWrapper::RegExpDataWrapperVerify(Isolate* isolate) {
2213 if (!this->has_data()) return;
2214 auto data = this->data(isolate);
2215 Object::VerifyPointer(isolate, data);
2216 CHECK_EQ(data->wrapper(), *this);
2217}
2218
2219void JSProxy::JSProxyVerify(Isolate* isolate) {
2220 TorqueGeneratedClassVerifiers::JSProxyVerify(*this, isolate);
2221 CHECK(IsJSFunction(map()->GetConstructor()));
2222 if (!IsRevoked()) {
2223 CHECK_EQ(IsCallable(target()), map()->is_callable());
2224 CHECK_EQ(IsConstructor(target()), map()->is_constructor());
2225 }
2226 CHECK(IsNull(map()->prototype(), isolate));
2227 // There should be no properties on a Proxy.
2229}
2230
2231void JSArrayBuffer::JSArrayBufferVerify(Isolate* isolate) {
2232 TorqueGeneratedClassVerifiers::JSArrayBufferVerify(*this, isolate);
2233 if (FIELD_SIZE(kOptionalPaddingOffset) != 0) {
2234 CHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
2235 CHECK_EQ(0,
2236 *reinterpret_cast<uint32_t*>(address() + kOptionalPaddingOffset));
2237 }
2238}
2239
2240void JSArrayBufferView::JSArrayBufferViewVerify(Isolate* isolate) {
2241 TorqueGeneratedClassVerifiers::JSArrayBufferViewVerify(*this, isolate);
2244}
2245
2246void JSTypedArray::JSTypedArrayVerify(Isolate* isolate) {
2247 TorqueGeneratedClassVerifiers::JSTypedArrayVerify(*this, isolate);
2249}
2250
2251void JSDataView::JSDataViewVerify(Isolate* isolate) {
2252 TorqueGeneratedClassVerifiers::JSDataViewVerify(*this, isolate);
2253 CHECK(!IsVariableLength());
2254 if (!WasDetached()) {
2255 CHECK_EQ(reinterpret_cast<uint8_t*>(
2256 Cast<JSArrayBuffer>(buffer())->backing_store()) +
2257 byte_offset(),
2258 data_pointer());
2259 }
2260}
2261
2262void JSRabGsabDataView::JSRabGsabDataViewVerify(Isolate* isolate) {
2263 TorqueGeneratedClassVerifiers::JSRabGsabDataViewVerify(*this, isolate);
2264 CHECK(IsVariableLength());
2265 if (!WasDetached()) {
2266 CHECK_EQ(reinterpret_cast<uint8_t*>(
2267 Cast<JSArrayBuffer>(buffer())->backing_store()) +
2268 byte_offset(),
2269 data_pointer());
2270 }
2271}
2272
2273void AsyncGeneratorRequest::AsyncGeneratorRequestVerify(Isolate* isolate) {
2274 TorqueGeneratedClassVerifiers::AsyncGeneratorRequestVerify(*this, isolate);
2275 CHECK_GE(resume_mode(), JSGeneratorObject::kNext);
2276 CHECK_LE(resume_mode(), JSGeneratorObject::kThrow);
2277}
2278
2279void BigIntBase::BigIntBaseVerify(Isolate* isolate) {
2280 CHECK_GE(length(), 0);
2281 CHECK_IMPLIES(is_zero(), !sign()); // There is no -0n.
2282}
2283
2284void SourceTextModuleInfoEntry::SourceTextModuleInfoEntryVerify(
2285 Isolate* isolate) {
2286 TorqueGeneratedClassVerifiers::SourceTextModuleInfoEntryVerify(*this,
2287 isolate);
2288 CHECK_IMPLIES(IsString(import_name()), module_request() >= 0);
2289 CHECK_IMPLIES(IsString(export_name()) && IsString(import_name()),
2290 IsUndefined(local_name(), isolate));
2291}
2292
2293void Module::ModuleVerify(Isolate* isolate) {
2294 TorqueGeneratedClassVerifiers::ModuleVerify(*this, isolate);
2295
2296 CHECK_EQ(status() == Module::kErrored, !IsTheHole(exception(), isolate));
2297
2298 CHECK(IsUndefined(module_namespace(), isolate) ||
2299 IsJSModuleNamespace(module_namespace()));
2300 if (IsJSModuleNamespace(module_namespace())) {
2302 CHECK_EQ(Cast<JSModuleNamespace>(module_namespace())->module(), *this);
2303 }
2304
2305 if (!(status() == kErrored || status() == kEvaluating ||
2306 status() == kEvaluatingAsync || status() == kEvaluated)) {
2307 CHECK(IsUndefined(top_level_capability()));
2308 }
2309
2310 CHECK_NE(hash(), 0);
2311}
2312
2313void ModuleRequest::ModuleRequestVerify(Isolate* isolate) {
2314 TorqueGeneratedClassVerifiers::ModuleRequestVerify(*this, isolate);
2315 CHECK_EQ(0,
2316 import_attributes()->length() % ModuleRequest::kAttributeEntrySize);
2317
2318 for (int i = 0; i < import_attributes()->length();
2320 CHECK(IsString(import_attributes()->get(i))); // Attribute key
2321 CHECK(IsString(import_attributes()->get(i + 1))); // Attribute value
2322 CHECK(IsSmi(import_attributes()->get(i + 2))); // Attribute location
2323 }
2324}
2325
2326void SourceTextModule::SourceTextModuleVerify(Isolate* isolate) {
2327 TorqueGeneratedClassVerifiers::SourceTextModuleVerify(*this, isolate);
2328
2329 if (status() == kErrored) {
2330 CHECK(IsSharedFunctionInfo(code()));
2331 } else if (status() == kEvaluating || status() == kEvaluatingAsync ||
2332 status() == kEvaluated) {
2333 CHECK(IsJSGeneratorObject(code()));
2334 } else {
2335 if (status() == kLinked) {
2336 CHECK(IsJSGeneratorObject(code()));
2337 } else if (status() == kLinking) {
2338 CHECK(IsJSFunction(code()));
2339 } else if (status() == kPreLinking) {
2340 CHECK(IsSharedFunctionInfo(code()));
2341 } else if (status() == kUnlinked) {
2342 CHECK(IsSharedFunctionInfo(code()));
2343 }
2345 CHECK(!pending_async_dependencies());
2347 }
2348
2349 CHECK_EQ(requested_modules()->length(), info()->module_requests()->length());
2350}
2351
2352void SyntheticModule::SyntheticModuleVerify(Isolate* isolate) {
2353 TorqueGeneratedClassVerifiers::SyntheticModuleVerify(*this, isolate);
2354
2355 for (int i = 0; i < export_names()->length(); i++) {
2356 CHECK(IsString(export_names()->get(i)));
2357 }
2358}
2359
2360void PrototypeInfo::PrototypeInfoVerify(Isolate* isolate) {
2361 TorqueGeneratedClassVerifiers::PrototypeInfoVerify(*this, isolate);
2362 if (IsWeakArrayList(prototype_users())) {
2363 PrototypeUsers::Verify(Cast<WeakArrayList>(prototype_users()));
2364 } else {
2365 CHECK(IsSmi(prototype_users()));
2366 }
2367 Tagged<HeapObject> derived = derived_maps(isolate);
2368 if (!IsUndefined(derived)) {
2369 auto derived_list = Cast<WeakArrayList>(derived);
2370 CHECK_GT(derived_list->length(), 0);
2371 for (int i = 0; i < derived_list->length(); ++i) {
2372 derived_list->Get(i).IsWeakOrCleared();
2373 }
2374 }
2375}
2376
2377void PrototypeUsers::Verify(Tagged<WeakArrayList> array) {
2378 if (array->length() == 0) {
2379 // Allow empty & uninitialized lists.
2380 return;
2381 }
2382 // Verify empty slot chain.
2383 int empty_slot = Smi::ToInt(empty_slot_index(array));
2384 int empty_slots_count = 0;
2385 while (empty_slot != kNoEmptySlotsMarker) {
2386 CHECK_GT(empty_slot, 0);
2387 CHECK_LT(empty_slot, array->length());
2388 empty_slot = array->Get(empty_slot).ToSmi().value();
2389 ++empty_slots_count;
2390 }
2391
2392 // Verify that all elements are either weak pointers or SMIs marking empty
2393 // slots.
2394 int weak_maps_count = 0;
2395 for (int i = kFirstIndex; i < array->length(); ++i) {
2396 Tagged<HeapObject> heap_object;
2397 Tagged<MaybeObject> object = array->Get(i);
2398 if ((object.GetHeapObjectIfWeak(&heap_object) && IsMap(heap_object)) ||
2399 object.IsCleared()) {
2400 ++weak_maps_count;
2401 } else {
2402 CHECK(IsSmi(object));
2403 }
2404 }
2405
2406 CHECK_EQ(weak_maps_count + empty_slots_count + 1, array->length());
2407}
2408
2409void EnumCache::EnumCacheVerify(Isolate* isolate) {
2410 TorqueGeneratedClassVerifiers::EnumCacheVerify(*this, isolate);
2411 Heap* heap = isolate->heap();
2412 if (*this == ReadOnlyRoots(heap).empty_enum_cache()) {
2413 CHECK_EQ(ReadOnlyRoots(heap).empty_fixed_array(), keys());
2414 CHECK_EQ(ReadOnlyRoots(heap).empty_fixed_array(), indices());
2415 }
2416}
2417
2418void ObjectBoilerplateDescription::ObjectBoilerplateDescriptionVerify(
2419 Isolate* isolate) {
2420 CHECK(IsSmi(length_.load()));
2421 CHECK(IsSmi(backing_store_size_.load()));
2422 CHECK(IsSmi(flags_.load()));
2423 // The keys of the boilerplate should not be thin strings. The values can be.
2424 for (int i = 0; i < boilerplate_properties_count(); ++i) {
2425 CHECK(!IsThinString(name(i), isolate));
2426 }
2427}
2428
2429void ClassBoilerplate::ClassBoilerplateVerify(Isolate* isolate) {
2430 CHECK(IsSmi(TaggedField<Object>::load(*this, kArgumentsCountOffset)));
2431 Object::VerifyPointer(isolate, static_properties_template());
2432 Object::VerifyPointer(isolate, static_elements_template());
2433 Object::VerifyPointer(isolate, static_computed_properties());
2434 CHECK(IsFixedArray(static_computed_properties()));
2435 Object::VerifyPointer(isolate, instance_properties_template());
2436 Object::VerifyPointer(isolate, instance_elements_template());
2437 Object::VerifyPointer(isolate, instance_computed_properties());
2438 CHECK(IsFixedArray(instance_computed_properties()));
2439}
2440
2441void RegExpBoilerplateDescription::RegExpBoilerplateDescriptionVerify(
2442 Isolate* isolate) {
2443 {
2444 auto o = data(isolate);
2445 Object::VerifyPointer(isolate, o);
2446 CHECK(IsRegExpData(o));
2447 }
2448 {
2449 auto o = source();
2450 Object::VerifyPointer(isolate, o);
2451 CHECK(IsString(o));
2452 }
2453 CHECK(IsSmi(TaggedField<Object>::load(*this, kFlagsOffset)));
2454}
2455
2456#if V8_ENABLE_WEBASSEMBLY
2457
2458void WasmTrustedInstanceData::WasmTrustedInstanceDataVerify(Isolate* isolate) {
2459 // Check all tagged fields.
2460 for (uint16_t offset : kTaggedFieldOffsets) {
2461 VerifyObjectField(isolate, offset);
2462 }
2463
2464 // Check all protected fields.
2465 for (uint16_t offset : kProtectedFieldOffsets) {
2466 VerifyProtectedPointerField(isolate, offset);
2467 }
2468
2469 int num_dispatch_tables = dispatch_tables()->length();
2470 for (int i = 0; i < num_dispatch_tables; ++i) {
2471 Tagged<Object> table = dispatch_tables()->get(i);
2472 if (table == Smi::zero()) continue;
2473 CHECK(IsWasmDispatchTable(table));
2474 if (i == 0) CHECK_EQ(table, dispatch_table0());
2475 }
2476 if (num_dispatch_tables == 0) CHECK_EQ(0, dispatch_table0()->length());
2477}
2478
2479void WasmDispatchTable::WasmDispatchTableVerify(Isolate* isolate) {
2480 TrustedObjectVerify(isolate);
2481
2482 int len = length();
2483 CHECK_LE(len, capacity());
2484 for (int i = 0; i < len; ++i) {
2486 Object::VerifyPointer(isolate, arg);
2487 CHECK(IsWasmTrustedInstanceData(arg) || IsWasmImportData(arg) ||
2488 arg == Smi::zero());
2489 if (!v8_flags.wasm_jitless) {
2490 // call_target always null with the interpreter.
2492 }
2493 }
2494
2495 // Check invariants of the "uses" list (which are specific to
2496 // WasmDispatchTable, not inherent to any ProtectedWeakFixedArray).
2497 Tagged<ProtectedWeakFixedArray> uses = protected_uses();
2498 if (uses->length() > 0) {
2499 CHECK(IsSmi(uses->get(0)));
2500 int capacity = uses->length();
2501 CHECK(capacity & 1); // Capacity is odd: reserved slot + 2*num_entries.
2502 int used_length = Cast<Smi>(uses->get(0)).value();
2503 CHECK_LE(used_length, capacity);
2504 for (int i = 1; i < used_length; i += 2) {
2505 CHECK(uses->get(i).IsCleared() ||
2506 IsWasmTrustedInstanceData(uses->get(i).GetHeapObjectAssumeWeak()));
2507 CHECK(IsSmi(uses->get(i + 1)));
2508 }
2509 }
2510}
2511
2512void WasmTableObject::WasmTableObjectVerify(Isolate* isolate) {
2513 TorqueGeneratedClassVerifiers::WasmTableObjectVerify(*this, isolate);
2514 if (has_trusted_dispatch_table() &&
2515 !has_trusted_dispatch_table_unpublished(isolate)) {
2516 CHECK_EQ(trusted_dispatch_table(isolate)->length(), current_length());
2517 }
2518}
2519
2520void WasmValueObject::WasmValueObjectVerify(Isolate* isolate) {
2521 JSObjectVerify(isolate);
2522 CHECK(IsWasmValueObject(*this));
2523}
2524
2525void WasmExceptionPackage::WasmExceptionPackageVerify(Isolate* isolate) {
2526 JSObjectVerify(isolate);
2527 CHECK(IsWasmExceptionPackage(*this));
2528}
2529
2530void WasmExportedFunctionData::WasmExportedFunctionDataVerify(
2531 Isolate* isolate) {
2532 TorqueGeneratedClassVerifiers::WasmExportedFunctionDataVerify(*this, isolate);
2533 Tagged<Code> wrapper = wrapper_code(isolate);
2534 CHECK(
2535 wrapper->kind() == CodeKind::JS_TO_WASM_FUNCTION ||
2536 wrapper->kind() == CodeKind::C_WASM_ENTRY ||
2537 (wrapper->is_builtin() &&
2538 (wrapper->builtin_id() == Builtin::kJSToWasmWrapper ||
2539#if V8_ENABLE_DRUMBRAKE
2540 wrapper->builtin_id() == Builtin::kGenericJSToWasmInterpreterWrapper ||
2541#endif // V8_ENABLE_DRUMBRAKE
2542 wrapper->builtin_id() == Builtin::kWasmPromising ||
2543 wrapper->builtin_id() == Builtin::kWasmStressSwitch)));
2544}
2545
2546#endif // V8_ENABLE_WEBASSEMBLY
2547
2548void StructLayout::StructVerify(Isolate* isolate) {
2549 Cast<Struct>(this)->StructVerify(isolate);
2550}
2551
2552void Tuple2::Tuple2Verify(Isolate* isolate) {
2553 StructVerify(isolate);
2554 CHECK(IsTuple2(this));
2555 Object::VerifyPointer(isolate, value1_.load());
2556 Object::VerifyPointer(isolate, value2_.load());
2557}
2558
2559void AccessorPair::AccessorPairVerify(Isolate* isolate) {
2560 StructVerify(isolate);
2561 CHECK(IsAccessorPair(this));
2562 Object::VerifyPointer(isolate, getter_.load());
2563 Object::VerifyPointer(isolate, setter_.load());
2564}
2565
2566void ClassPositions::ClassPositionsVerify(Isolate* isolate) {
2567 StructVerify(isolate);
2568 CHECK(IsClassPositions(this));
2569 CHECK(IsSmi(Tagged<Object>(start_.load())));
2570 CHECK(IsSmi(Tagged<Object>(end_.load())));
2571}
2572
2573void DataHandler::DataHandlerVerify(Isolate* isolate) {
2574 StructVerify(isolate);
2575 CHECK(IsDataHandler(this));
2576 Object::VerifyPointer(isolate, smi_handler());
2578 IsStoreHandler(this) && IsCode(smi_handler()));
2579 Object::VerifyPointer(isolate, validity_cell());
2580 CHECK(IsSmi(validity_cell()) || IsCell(validity_cell()));
2581 for (int i = 0; i < data_field_count(); ++i) {
2582 Object::VerifyMaybeObjectPointer(isolate, data()[i].load());
2583 }
2584}
2585
2586void LoadHandler::LoadHandlerVerify(Isolate* isolate) {
2587 DataHandler::DataHandlerVerify(isolate);
2588 // TODO(ishell): check handler integrity
2589}
2590
2591void StoreHandler::StoreHandlerVerify(Isolate* isolate) {
2592 DataHandler::DataHandlerVerify(isolate);
2593 // TODO(ishell): check handler integrity
2594}
2595
2596void AllocationSite::AllocationSiteVerify(Isolate* isolate) {
2597 CHECK(IsAllocationSite(this));
2598 CHECK(IsDependentCode(dependent_code()));
2599 if (PointsToLiteral()) {
2600 CHECK(IsJSObject(transition_info_or_boilerplate_.load()));
2601 } else {
2603 }
2604 CHECK(IsAllocationSite(nested_site()) || nested_site() == Smi::zero());
2605}
2606
2607void AllocationMemento::AllocationMementoVerify(Isolate* isolate) {
2608 StructVerify(isolate);
2609 CHECK(IsAllocationMemento(this));
2610 CHECK(IsAllocationSite(allocation_site_.load()));
2611}
2612
2613void Script::ScriptVerify(Isolate* isolate) {
2614 TorqueGeneratedClassVerifiers::ScriptVerify(*this, isolate);
2615#if V8_ENABLE_WEBASSEMBLY
2616 if (type() == Script::Type::kWasm) {
2617 CHECK_EQ(line_ends(), ReadOnlyRoots(isolate).empty_fixed_array());
2618 } else {
2620 }
2621#else // V8_ENABLE_WEBASSEMBLY
2623#endif // V8_ENABLE_WEBASSEMBLY
2624 for (int i = 0; i < infos()->length(); ++i) {
2625 Tagged<MaybeObject> maybe_object = infos()->get(i);
2626 Tagged<HeapObject> heap_object;
2627 CHECK(!maybe_object.GetHeapObjectIfWeak(isolate, &heap_object) ||
2628 (maybe_object.GetHeapObjectIfStrong(&heap_object) &&
2629 IsUndefined(heap_object, isolate)) ||
2630 Is<SharedFunctionInfo>(heap_object) || Is<ScopeInfo>(heap_object));
2631 }
2632}
2633
2634void NormalizedMapCache::NormalizedMapCacheVerify(Isolate* isolate) {
2635 Cast<WeakFixedArray>(this)->WeakFixedArrayVerify(isolate);
2636 if (v8_flags.enable_slow_asserts) {
2637 for (int i = 0; i < length(); i++) {
2639 Tagged<HeapObject> heap_object;
2640 if (e.GetHeapObjectIfWeak(&heap_object)) {
2641 Cast<Map>(heap_object)->DictionaryMapVerify(isolate);
2642 } else {
2643 CHECK(e.IsCleared() || (e.GetHeapObjectIfStrong(&heap_object) &&
2644 IsUndefined(heap_object, isolate)));
2645 }
2646 }
2647 }
2648}
2649
2650void PreparseData::PreparseDataVerify(Isolate* isolate) {
2651 TorqueGeneratedClassVerifiers::PreparseDataVerify(*this, isolate);
2652 CHECK_LE(0, data_length());
2653 CHECK_LE(0, children_length());
2654
2655 for (int i = 0; i < children_length(); ++i) {
2657 CHECK(IsNull(child) || IsPreparseData(child));
2658 Object::VerifyPointer(isolate, child);
2659 }
2660}
2661
2662void CallSiteInfo::CallSiteInfoVerify(Isolate* isolate) {
2663 TorqueGeneratedClassVerifiers::CallSiteInfoVerify(*this, isolate);
2664#if V8_ENABLE_WEBASSEMBLY
2665 CHECK_IMPLIES(IsAsmJsWasm(), IsWasm());
2666 CHECK_IMPLIES(IsWasm(), IsWasmInstanceObject(receiver_or_instance()));
2667 CHECK_IMPLIES(IsWasm() || IsBuiltin(), IsSmi(function()));
2668 CHECK_IMPLIES(!IsWasm() && !IsBuiltin(), IsJSFunction(function()));
2669 CHECK_IMPLIES(IsAsync(), !IsWasm());
2670 CHECK_IMPLIES(IsConstructor(), !IsWasm());
2671#endif // V8_ENABLE_WEBASSEMBLY
2672}
2673
2674void FunctionTemplateRareData::FunctionTemplateRareDataVerify(
2675 Isolate* isolate) {
2676 CHECK(IsFixedArray(c_function_overloads()) ||
2677 IsUndefined(c_function_overloads(), isolate));
2678}
2679
2680void StackFrameInfo::StackFrameInfoVerify(Isolate* isolate) {
2681 TorqueGeneratedClassVerifiers::StackFrameInfoVerify(*this, isolate);
2682}
2683
2684void StackTraceInfo::StackTraceInfoVerify(Isolate* isolate) {
2685 TorqueGeneratedClassVerifiers::StackTraceInfoVerify(*this, isolate);
2686}
2687
2688void ErrorStackData::ErrorStackDataVerify(Isolate* isolate) {
2689 TorqueGeneratedClassVerifiers::ErrorStackDataVerify(*this, isolate);
2690}
2691
2692void SloppyArgumentsElements::SloppyArgumentsElementsVerify(Isolate* isolate) {
2693 CHECK(IsSmi(length_.load()));
2694 {
2695 auto o = context();
2696 Object::VerifyPointer(isolate, o);
2697 CHECK(IsContext(o));
2698 }
2699 {
2700 auto o = arguments();
2701 Object::VerifyPointer(isolate, o);
2702 CHECK(IsFixedArray(o));
2703 }
2704 for (int i = 0; i < length(); ++i) {
2705 auto o = mapped_entries(i, kRelaxedLoad);
2706 CHECK(IsSmi(o) || IsTheHole(o));
2707 }
2708}
2709
2710// Helper class for verifying the string table.
2711class StringTableVerifier : public RootVisitor {
2712 public:
2713 explicit StringTableVerifier(Isolate* isolate) : isolate_(isolate) {}
2714
2715 void VisitRootPointers(Root root, const char* description,
2716 FullObjectSlot start, FullObjectSlot end) override {
2717 UNREACHABLE();
2718 }
2719 void VisitRootPointers(Root root, const char* description,
2720 OffHeapObjectSlot start,
2721 OffHeapObjectSlot end) override {
2722 // Visit all HeapObject pointers in [start, end).
2723 for (OffHeapObjectSlot p = start; p < end; ++p) {
2724 Tagged<Object> o = p.load(isolate_);
2725 CHECK(!HasWeakHeapObjectTag(o));
2726 if (IsHeapObject(o)) {
2727 Tagged<HeapObject> object = Cast<HeapObject>(o);
2728 // Check that the string is actually internalized.
2729 CHECK(IsInternalizedString(object));
2730 }
2731 }
2732 }
2733
2734 private:
2735 Isolate* isolate_;
2736};
2737
2738void StringTable::VerifyIfOwnedBy(Isolate* isolate) {
2739 CHECK_EQ(isolate->string_table(), this);
2740 if (!isolate->OwnsStringTables()) return;
2741 StringTableVerifier verifier(isolate);
2742 IterateElements(&verifier);
2743}
2744
2745#endif // VERIFY_HEAP
2746
2747#ifdef DEBUG
2748
2749void JSObject::IncrementSpillStatistics(Isolate* isolate,
2750 SpillInformation* info) {
2751 info->number_of_objects_++;
2752 // Named properties
2753 if (HasFastProperties()) {
2754 info->number_of_objects_with_fast_properties_++;
2755 info->number_of_fast_used_fields_ += map()->NextFreePropertyIndex();
2756 info->number_of_fast_unused_fields_ += map()->UnusedPropertyFields();
2757 } else if (IsJSGlobalObject(*this)) {
2759 Cast<JSGlobalObject>(*this)->global_dictionary(kAcquireLoad);
2760 info->number_of_slow_used_properties_ += dict->NumberOfElements();
2761 info->number_of_slow_unused_properties_ +=
2762 dict->Capacity() - dict->NumberOfElements();
2764 Tagged<SwissNameDictionary> dict = property_dictionary_swiss();
2765 info->number_of_slow_used_properties_ += dict->NumberOfElements();
2766 info->number_of_slow_unused_properties_ +=
2767 dict->Capacity() - dict->NumberOfElements();
2768 } else {
2769 Tagged<NameDictionary> dict = property_dictionary();
2770 info->number_of_slow_used_properties_ += dict->NumberOfElements();
2771 info->number_of_slow_unused_properties_ +=
2772 dict->Capacity() - dict->NumberOfElements();
2773 }
2774 // Indexed properties
2775 switch (GetElementsKind()) {
2776 case HOLEY_SMI_ELEMENTS:
2780 case HOLEY_ELEMENTS:
2784 case PACKED_ELEMENTS:
2789 case SHARED_ARRAY_ELEMENTS: {
2790 info->number_of_objects_with_fast_elements_++;
2791 int holes = 0;
2793 int len = e->length();
2794 for (int i = 0; i < len; i++) {
2795 if (IsTheHole(e->get(i), isolate)) holes++;
2796 }
2797 info->number_of_fast_used_elements_ += len - holes;
2798 info->number_of_fast_unused_elements_ += holes;
2799 break;
2800 }
2801
2802#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) case TYPE##_ELEMENTS:
2803
2806#undef TYPED_ARRAY_CASE
2807 {
2808 info->number_of_objects_with_fast_elements_++;
2810 info->number_of_fast_used_elements_ += e->length();
2811 break;
2812 }
2816 info->number_of_slow_used_elements_ += dict->NumberOfElements();
2817 info->number_of_slow_unused_elements_ +=
2818 dict->Capacity() - dict->NumberOfElements();
2819 break;
2820 }
2824 case NO_ELEMENTS:
2825 break;
2826 }
2827}
2828
2829void JSObject::SpillInformation::Clear() {
2830 number_of_objects_ = 0;
2831 number_of_objects_with_fast_properties_ = 0;
2832 number_of_objects_with_fast_elements_ = 0;
2833 number_of_fast_used_fields_ = 0;
2834 number_of_fast_unused_fields_ = 0;
2835 number_of_slow_used_properties_ = 0;
2836 number_of_slow_unused_properties_ = 0;
2837 number_of_fast_used_elements_ = 0;
2838 number_of_fast_unused_elements_ = 0;
2839 number_of_slow_used_elements_ = 0;
2840 number_of_slow_unused_elements_ = 0;
2841}
2842
2843void JSObject::SpillInformation::Print() {
2844 PrintF("\n JSObject Spill Statistics (#%d):\n", number_of_objects_);
2845
2846 PrintF(" - fast properties (#%d): %d (used) %d (unused)\n",
2847 number_of_objects_with_fast_properties_, number_of_fast_used_fields_,
2848 number_of_fast_unused_fields_);
2849
2850 PrintF(" - slow properties (#%d): %d (used) %d (unused)\n",
2851 number_of_objects_ - number_of_objects_with_fast_properties_,
2852 number_of_slow_used_properties_, number_of_slow_unused_properties_);
2853
2854 PrintF(" - fast elements (#%d): %d (used) %d (unused)\n",
2855 number_of_objects_with_fast_elements_, number_of_fast_used_elements_,
2856 number_of_fast_unused_elements_);
2857
2858 PrintF(" - slow elements (#%d): %d (used) %d (unused)\n",
2859 number_of_objects_ - number_of_objects_with_fast_elements_,
2860 number_of_slow_used_elements_, number_of_slow_unused_elements_);
2861
2862 PrintF("\n");
2863}
2864
2865bool DescriptorArray::IsSortedNoDuplicates() {
2866 Tagged<Name> current_key;
2867 uint32_t current = 0;
2868 for (int i = 0; i < number_of_descriptors(); i++) {
2870 uint32_t hash;
2871 const bool has_hash = key->TryGetHash(&hash);
2872 CHECK(has_hash);
2873 if (key == current_key) {
2874 Print(*this);
2875 return false;
2876 }
2877 current_key = key;
2878 if (hash < current) {
2879 Print(*this);
2880 return false;
2881 }
2882 current = hash;
2883 }
2884 return true;
2885}
2886
2887bool TransitionArray::IsSortedNoDuplicates() {
2888 Tagged<Name> prev_key;
2890 PropertyAttributes prev_attributes = NONE;
2891 uint32_t prev_hash = 0;
2892
2893 for (int i = 0; i < number_of_transitions(); i++) {
2895 uint32_t hash;
2896 const bool has_hash = key->TryGetHash(&hash);
2897 CHECK(has_hash);
2899 PropertyAttributes attributes = NONE;
2901 Tagged<Map> target = GetTarget(i);
2902 PropertyDetails details =
2904 kind = details.kind();
2905 attributes = details.attributes();
2906 } else {
2907 // Duplicate entries are not allowed for non-property transitions.
2908 DCHECK_NE(prev_key, key);
2909 }
2910
2911 int cmp = CompareKeys(prev_key, prev_hash, prev_kind, prev_attributes, key,
2912 hash, kind, attributes);
2913 if (cmp >= 0) {
2914 Print(this);
2915 return false;
2916 }
2917 prev_key = key;
2918 prev_hash = hash;
2919 prev_attributes = attributes;
2920 prev_kind = kind;
2921 }
2922 return true;
2923}
2924
2925bool TransitionsAccessor::IsSortedNoDuplicates() {
2926 // Simple and non-existent transitions are always sorted.
2927 if (encoding() != kFullTransitionArray) return true;
2928 return transitions()->IsSortedNoDuplicates();
2929}
2930
2931static bool CheckOneBackPointer(Tagged<Map> current_map, Tagged<Map> target) {
2932 return target->GetBackPointer() == current_map;
2933}
2934
2935bool TransitionsAccessor::IsConsistentWithBackPointers() {
2937 bool success = true;
2938 ReadOnlyRoots roots(isolate_);
2939 DCHECK_IMPLIES(map_->IsInobjectSlackTrackingInProgress(),
2941 auto CheckTarget =
2942 [&](Tagged<Map> target) {
2943#ifdef DEBUG
2944 if (!map_->is_deprecated() && !target->is_deprecated()) {
2945 DCHECK_EQ(map_->IsInobjectSlackTrackingInProgress(),
2946 target->IsInobjectSlackTrackingInProgress());
2947 // Check prototype transitions are first.
2948 DCHECK_IMPLIES(map_->prototype() != target->prototype(),
2949 IsUndefined(map_->GetBackPointer()));
2950 }
2951 DCHECK_EQ(target->map(), map_->map());
2952#endif // DEBUG
2953 if (!CheckOneBackPointer(map_, target)) {
2954 success = false;
2955 }
2956 };
2958 &no_gc, [&](Tagged<Map> target) { CheckTarget(target); },
2959 [&](Tagged<Map> proto_target) {
2960 if (v8_flags.move_prototype_transitions_first) {
2961 CheckTarget(proto_target);
2962 }
2963 },
2964 [&](Tagged<Object> side_step) {
2965 if (!side_step.IsSmi()) {
2966 DCHECK_EQ(Cast<Map>(side_step)->map(), map_->map());
2967 DCHECK(!Cast<Map>(side_step)->IsInobjectSlackTrackingInProgress());
2968 DCHECK_EQ(
2969 Cast<Map>(side_step)->GetInObjectProperties() -
2970 Cast<Map>(side_step)->UnusedInObjectProperties(),
2971 map_->GetInObjectProperties() - map_->UnusedInObjectProperties());
2972 }
2973 });
2974 return success;
2975}
2976
2977#undef USE_TORQUE_VERIFIER
2978
2979#endif // DEBUG
2980
2981} // namespace internal
2982} // namespace v8
Isolate * isolate_
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype)
#define BUILTIN_LIST_BASE_TIERING(TFC)
int16_t parameter_count
Definition builtins.cc:67
interpreter::Bytecode bytecode
Definition builtins.cc:43
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
Builtins::Kind kind
Definition builtins.cc:40
#define SLOW_DCHECK(condition)
Definition checks.h:21
TaggedMember< Object > setter_
Definition struct.h:115
TaggedMember< Object > getter_
Definition struct.h:114
TaggedMember< AllocationSite > allocation_site_
TaggedMember< UnionOf< Smi, JSObject > > transition_info_or_boilerplate_
Tagged< UnionOf< Smi, AllocationSite > > nested_site() const
Tagged< DependentCode > dependent_code() const
bool is_zero() const
Definition bigint.h:150
bool sign() const
Definition bigint.h:141
uint32_t length() const
Definition bigint.h:90
static bool CheckFormalParameterCount(Builtin builtin, int function_length, int formal_parameter_count_with_receiver)
Definition builtins.cc:166
TaggedMember< Smi > end_
Definition struct.h:136
TaggedMember< Smi > start_
Definition struct.h:135
int safepoint_table_offset() const
Definition code.h:263
bool has_instruction_stream() const
Definition code-inl.h:739
static const uint32_t kMinLength
Definition string.h:1029
V8_INLINE bool IsFlat() const
Tagged< String > first() const
Tagged< String > second() const
bool has_extension() const
static V8_INLINE constexpr int OffsetOfElementAt(int index)
Definition contexts.h:512
bool IsScriptContext() const
Tagged< HeapObject > extension() const
V8_INLINE Tagged< Object > get(int index) const
static int FunctionMapIndex(LanguageMode language_mode, FunctionKind kind, bool has_shared_name)
Tagged< UnionOf< Smi, Cell > > validity_cell() const
Tagged< UnionOf< Smi, Code > > smi_handler() const
PropertyDetails GetDetails(InternalIndex descriptor_number)
ObjectSlot GetDescriptorSlot(int descriptor)
Tagged< MaybeObject > GetValue(InternalIndex descriptor_number)
Tagged< Name > GetSortedKey(int descriptor_number)
static ElementsAccessor * ForKind(ElementsKind elements_kind)
Definition elements.h:29
int32_t slot_count(AcquireLoadTag) const
int32_t create_closure_slot_count(AcquireLoadTag) const
static FieldIndex ForDetails(Tagged< Map > map, PropertyDetails details)
static bool NowStable(Tagged< FieldType > type)
Definition field-type.cc:63
static V8_EXPORT_PRIVATE Tagged< FieldType > Any()
Definition field-type.cc:22
static V8_EXPORT_PRIVATE Tagged< FieldType > None()
Definition field-type.cc:17
static bool NowContains(Tagged< FieldType > type, Tagged< Object > value)
bool is_the_hole(Isolate *isolate, int index)
uint64_t get_representation(int index)
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
static V8_INLINE bool InWritableSharedSpace(Tagged< HeapObject > object)
static V8_INLINE bool InTrustedSpace(Tagged< HeapObject > object)
static V8_INLINE bool InReadOnlySpace(Tagged< HeapObject > object)
static V8_INLINE bool InAnySharedSpace(Tagged< HeapObject > object)
Tagged< Map > map() const
bool CheckRequiredAlignment(PtrComprCageBase cage_base) const
IndirectPointerSlot RawIndirectPointerField(int byte_offset, IndirectPointerTag tag) const
bool TryGetCode(Tagged< Code > *code_out, AcquireLoadTag tag) const
static constexpr int kMetadataAlignment
Tagged< TrustedByteArray > relocation_info() const
static IsolateGroup * current()
static constexpr size_t kMaxByteLength
Tagged< Context > context()
Tagged< NativeContext > native_context()
int GetEmbedderFieldCount() const
static const int kFieldsAdded
Definition js-objects.h:954
Tagged< JSAny > RawFastPropertyAt(FieldIndex index) const
Tagged< FixedArrayBase > elements(PtrComprCageBase cage_base, AcquireLoadTag tag) const =delete
static V8_EXPORT_PRIVATE int GetHeaderSize(InstanceType instance_type, bool function_has_prototype_slot=false)
Tagged< Object > reactions() const
V8_EXPORT_PRIVATE Promise::PromiseState status() const
Definition objects.cc:4993
V8_INLINE bool IsRevoked() const
static constexpr int kUninitializedValue
Definition js-regexp.h:130
static bool IsRegistryKeyDescriptor(Isolate *isolate, Tagged< Map > instance_map, InternalIndex i)
Definition js-struct.cc:284
static bool IsElementsTemplateDescriptor(Isolate *isolate, Tagged< Map > instance_map, InternalIndex i)
Definition js-struct.cc:301
V8_EXPORT_PRIVATE size_t element_size() const
static constexpr size_t kMaxByteLength
static constexpr bool IsPacked(Address)
Definition objects.h:846
InternalIndex::Range IterateOwnDescriptors() const
Definition map-inl.h:245
bool IsInobjectSlackTrackingInProgress() const
Definition map-inl.h:1014
bool CanHaveFastTransitionableElementsKind() const
Definition map-inl.h:169
int GetInObjectPropertyOffset(int index) const
Definition map-inl.h:363
bool is_stable() const
Definition map-inl.h:705
static bool IsMostGeneralFieldType(Representation representation, Tagged< FieldType > field_type)
Definition map-inl.h:157
bool OnlyHasSimpleProperties() const
Definition map.cc:1202
bool has_shared_array_elements() const
Definition map-inl.h:685
static V8_EXPORT_PRIVATE VisitorId GetVisitorId(Tagged< Map > map)
Definition map.cc:65
int NumberOfOwnDescriptors() const
Definition map-inl.h:232
int UnusedPropertyFields() const
Definition map-inl.h:384
int EnumLength() const
Definition map-inl.h:249
static constexpr int MaxRegularCodeObjectSize()
static const size_t kAttributeEntrySize
bool HasForwardingIndex(AcquireLoadTag) const
Definition name-inl.h:157
bool TryGetHash(uint32_t *hash) const
Definition name-inl.h:237
bool IsPrivateBrand()
Definition name-inl.h:269
bool HasHashCode() const
Definition name-inl.h:153
uint32_t hash() const
Definition name-inl.h:228
static bool CanBeHeldWeakly(Tagged< Object > obj)
static bool ToArrayLength(Tagged< Object > obj, uint32_t *index)
static double NumberValue(Tagged< Number > obj)
TaggedMember< Smi > kind_
Definition oddball.h:73
Tagged< String > type_of() const
Definition oddball-inl.h:42
Tagged< String > to_string() const
Definition oddball-inl.h:32
Tagged< Number > to_number() const
Definition oddball-inl.h:37
Tagged< Object > get_child_raw(int index) const
Tagged< JSAny > get(int index) const
PropertyDetails property_details() const
static bool CheckDataIsCompatible(PropertyDetails details, Tagged< Object > value)
Definition objects.cc:6525
static constexpr PropertyDetails Empty(PropertyCellType cell_type=PropertyCellType::kNoCell)
static const int kNoEmptySlotsMarker
static Tagged< Smi > empty_slot_index(Tagged< WeakArrayList > array)
static V8_EXPORT_PRIVATE bool Contains(Address address)
Tagged< Object > last_input() const
static constexpr int kMinCapacity
Tagged< String > last_subject() const
static bool CanGenerateBytecode()
Definition regexp.cc:112
static constexpr bool IsGCRelocMode(Mode mode)
Definition reloc-info.h:189
Tagged< NameToIndexHashTable > names_to_context_index() const
Tagged< Context > get(int index) const
bool CanHaveLineEnds() const
Definition script-inl.h:192
uint16_t internal_formal_parameter_count_with_receiver() const
Tagged< BytecodeArray > GetBytecodeArray(IsolateT *isolate) const
Tagged< ScopeInfo > EarlyScopeInfo(AcquireLoadTag tag)
Tagged< HeapObject > script() const
Tagged< String > parent() const
static const uint32_t kMinLength
Definition string.h:1130
Tagged< Context > context() const
Tagged< UnionOf< FixedArray, NumberDictionary > > arguments() const
Tagged< UnionOf< Smi, Hole > > mapped_entries(int index, RelaxedLoadTag) const
V8_INLINE Tagged< Object > GetDataEntry(int entry, int relative_index)
static constexpr int ToInt(const Tagged< Object > object)
Definition smi.h:33
static constexpr Tagged< Smi > zero()
Definition smi.h:99
static constexpr int kMaxValue
Definition smi.h:101
bool HasAsyncEvaluationOrdinal() const
Definition module-inl.h:147
Tagged< SourceTextModuleInfo > info() const
Definition module-inl.h:60
void IterateElements(RootVisitor *visitor)
void VerifyIfOwnedBy(Isolate *isolate)
static const uint32_t kMaxLength
Definition string.h:511
uint32_t length() const
Definition string-inl.h:127
Tagged< Object > KeyAt(InternalIndex entry)
static constexpr bool IsValidCapacity(int capacity)
int EntryForEnumerationIndex(int enumeration_index)
Tagged< PrimitiveHeapObject > description() const
Definition name-inl.h:25
static PtrType load(Tagged< HeapObject > host, int offset=0)
constexpr bool IsCleared() const
Tagged< String > actual() const
static int LengthFor(int number_of_transitions)
Tagged< Name > GetKey(int transition_number)
Tagged< WeakFixedArray > GetSideStepTransitions()
static int CompareKeys(Tagged< Name > key1, uint32_t hash1, PropertyKind kind1, PropertyAttributes attributes1, Tagged< Name > key2, uint32_t hash2, PropertyKind kind2, PropertyAttributes attributes2)
Tagged< Map > GetTarget(int transition_number)
static const int kProtoTransitionHeaderSize
static int NumberOfPrototypeTransitions(Tagged< WeakFixedArray > proto_transitions)
Tagged< WeakFixedArray > GetPrototypeTransitions()
void ForEachTransition(DisallowGarbageCollection *no_gc, Callback callback, ProtoCallback proto_transition_callback, SideStepCallback side_step_transition_callback)
Tagged< TransitionArray > transitions()
static bool IsSpecialTransition(ReadOnlyRoots roots, Tagged< Name > name)
static PropertyDetails GetTargetDetails(Tagged< Name > name, Tagged< Map > target)
TaggedMember< Object > value2_
Definition struct.h:61
TaggedMember< Object > value1_
Definition struct.h:60
Tagged< Object > implicit_arg(int index) const
static constexpr std::array< uint16_t, 6 > kProtectedFieldOffsets
static constexpr std::array< uint16_t, kTaggedFieldsCount > kTaggedFieldOffsets
Handle< Code > code
#define V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL
Definition globals.h:242
#define V8_EXTERNAL_CODE_SPACE_BOOL
Definition globals.h:255
#define COMPRESS_POINTERS_BOOL
Definition globals.h:99
#define COMPRESS_POINTERS_IN_MULTIPLE_CAGES_BOOL
Definition globals.h:117
JSRegExp::Flags flags_
int start
int end
#define RAB_GSAB_TYPED_ARRAYS(V)
#define TYPED_ARRAYS(V)
DirectHandle< FixedArray > capture_name_map
#define MAKE_TORQUE_CASE(Name, TYPE)
int32_t offset
TNode< Context > context
TNode< Object > target
SharedFunctionInfoRef shared
std::map< const std::string, const std::string > map
std::string pattern
double hour
ZoneStack< RpoNumber > & stack
InstructionOperand source
const int length_
Definition mul-fft.cc:473
int r
Definition mul-fft.cc:298
unsigned short uint16_t
Definition unicode.cc:39
V8_INLINE Dest bit_cast(Source const &source)
Definition macros.h:95
V8_INLINE constexpr bool IsSeqString(InstanceType instance_type)
V8_INLINE constexpr bool IsThinString(InstanceType instance_type)
V8_INLINE constexpr bool IsConsString(InstanceType instance_type)
V8_INLINE constexpr bool IsExternalString(InstanceType instance_type)
V8_INLINE constexpr bool IsInternalizedString(InstanceType instance_type)
V8_INLINE constexpr bool IsNativeContextSpecific(InstanceType instance_type)
V8_INLINE constexpr bool IsFreeSpaceOrFiller(InstanceType instance_type)
V8_INLINE constexpr bool IsSlicedString(InstanceType instance_type)
V8_INLINE constexpr bool IsHeapObject(InstanceType instance_type)
Map::Bits3::NumberOfOwnDescriptorsBits Map::Bits3::ConstructionCounterBits is_access_check_needed
static swiss_table::ctrl_t H2(uint32_t hash)
constexpr WasmCodePointer kInvalidWasmCodePointer
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
static V8_INLINE bool HasWeakHeapObjectTag(const Tagged< Object > value)
Definition objects.h:653
V8_WARN_UNUSED_RESULT bool IsValidCodeObject(Heap *heap, Tagged< HeapObject > object)
constexpr int kTaggedSize
Definition globals.h:542
kStaticElementsTemplateOffset kInstancePropertiesTemplateOffset instance_computed_properties
constexpr double kMaxSafeInteger
Definition globals.h:1985
PerThreadAssertScopeDebugOnly< false, HEAP_ALLOCATION_ASSERT > DisallowHeapAllocation
bool IsNone(Tagged< FieldType > obj)
Definition field-type.h:50
PerThreadAssertScopeDebugOnly< false, SAFEPOINTS_ASSERT, HEAP_ALLOCATION_ASSERT > DisallowGarbageCollection
bool IsAnyHoleyNonextensibleElementsKind(ElementsKind kind)
bool Is(IndirectHandle< U > value)
Definition handles-inl.h:51
ReadOnlyRoots GetReadOnlyRoots()
Definition roots-inl.h:86
void PrintF(const char *format,...)
Definition utils.cc:39
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit Map::Bits3::IsDeprecatedBit is_prototype_map
Definition map-inl.h:133
constexpr int kEmbedderDataSlotSize
Definition globals.h:664
Tagged< DescriptorArray >
Definition map-inl.h:52
Tagged(T object) -> Tagged< T >
static constexpr int kFeedbackSlotKindCount
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:665
constexpr intptr_t kCodeAlignment
Definition globals.h:964
constexpr uint16_t kDontAdaptArgumentsSentinel
Definition globals.h:2779
kInterpreterTrampolineOffset Tagged< HeapObject >
kStaticElementsTemplateOffset kInstancePropertiesTemplateOffset Tagged< FixedArray >
base::StrongAlias< JSDispatchHandleAliasTag, uint32_t > JSDispatchHandle
Definition globals.h:557
@ HOLEY_NONEXTENSIBLE_ELEMENTS
@ SLOW_STRING_WRAPPER_ELEMENTS
@ PACKED_NONEXTENSIBLE_ELEMENTS
@ SLOW_SLOPPY_ARGUMENTS_ELEMENTS
@ FAST_SLOPPY_ARGUMENTS_ELEMENTS
@ FAST_STRING_WRAPPER_ELEMENTS
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in name
Definition flags.cc:2086
return Cast< NumberDictionary >(elements(cage_base))
const int kVariableSizeSentinel
Definition objects.h:84
void Print(Tagged< Object > obj)
Definition objects.h:774
Handle< To > UncheckedCast(Handle< From > value)
Definition handles-inl.h:55
static const int kInvalidEnumCacheSentinel
Map::Bits1::HasPrototypeSlotBit has_named_interceptor
Definition map-inl.h:109
constexpr JSDispatchHandle kNullJSDispatchHandle(0)
bool IsModule(FunctionKind kind)
instance_descriptors
Definition map-inl.h:52
bool IsShared(Tagged< Object > obj)
bool IsSloppyArgumentsElementsKind(ElementsKind kind)
DONT_OVERRIDE DISABLE_ALLOCATION_SITES HOLEY_ELEMENTS
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit is_deprecated
Definition map-inl.h:129
bool IsTerminalElementsKind(ElementsKind kind)
bool IsSharedArrayElementsKind(ElementsKind kind)
DONT_OVERRIDE DISABLE_ALLOCATION_SITES DISABLE_ALLOCATION_SITES HOLEY_DOUBLE_ELEMENTS
bool IsDictionaryElementsKind(ElementsKind kind)
V8_INLINE constexpr bool IsObject(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:661
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:669
V8_EXPORT_PRIVATE FlagValues v8_flags
bool IsAny(Tagged< FieldType > obj)
Definition field-type.h:51
bool IsUniqueName(Tagged< Name > obj)
kStaticElementsTemplateOffset instance_properties_template
return value
Definition map-inl.h:893
bool IsTaggedIndex(Tagged< Object > obj)
Definition objects-inl.h:91
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset kInstanceObjectOffset kMemoryObjectsOffset kTaggedGlobalsBufferOffset kTablesOffset dispatch_table0
static constexpr Address kNullAddress
Definition v8-internal.h:53
bool IsTransitionableFastElementsKind(ElementsKind from_kind)
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit Map::Bits3::IsDeprecatedBit Map::Bits3::IsPrototypeMapBit is_extensible
Definition map-inl.h:137
constructor_or_back_pointer
Definition map-inl.h:870
!IsContextMap !IsContextMap Tagged< NativeContext >
Definition map-inl.h:877
constexpr uint32_t kMaxUInt32
Definition globals.h:387
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset IsNull(value)||IsJSProxy(value)||IsWasmObject(value)||(IsJSObject(value) &&(HeapLayout
Definition map-inl.h:70
V8_INLINE IndirectPointerTag IndirectPointerTagFromInstanceType(InstanceType instance_type)
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset prototype
Definition map-inl.h:69
V8_WARN_UNUSED_RESULT bool IsValidHeapObject(Heap *heap, Tagged< HeapObject > object)
!IsContextMap !IsContextMap native_context
Definition map-inl.h:877
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
static constexpr RelaxedLoadTag kRelaxedLoad
Definition globals.h:2909
static constexpr AcquireLoadTag kAcquireLoad
Definition globals.h:2908
#define HOLE_LIST(V)
#define STRING_TYPE_LIST(V)
#define RCS_SCOPE(...)
#define UNREACHABLE()
Definition logging.h:67
#define CHECK_GE(lhs, rhs)
#define CHECK_IMPLIES(lhs, rhs)
#define CHECK(condition)
Definition logging.h:124
#define CHECK_GT(lhs, rhs)
#define CHECK_LT(lhs, rhs)
#define CHECK_LE(lhs, rhs)
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_NE(lhs, rhs)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
constexpr bool IsAligned(T value, U alignment)
Definition macros.h:403
static constexpr int index_of(Kind kind)
Definition transitions.h:44
static constexpr uint32_t kLastMapIdx
Definition transitions.h:49
static constexpr Tagged< Smi > Empty
Definition transitions.h:40
static constexpr uint32_t kFirstMapIdx
Definition transitions.h:47
static constexpr Tagged< Smi > Unreachable
Definition transitions.h:41
#define TYPED_ARRAY_CONSTRUCTORS_SWITCH(Type, type, TYPE, Ctype)
#define FIELD_SIZE(Name)
Definition utils.h:259
wasm::ValueType type