v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
deserializer.cc
Go to the documentation of this file.
1// Copyright 2016 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <inttypes.h>
8
9#include "src/base/logging.h"
13#include "src/common/globals.h"
16#include "src/heap/heap-inl.h"
19#include "src/heap/heap.h"
22#include "src/logging/log.h"
27#include "src/objects/objects.h"
28#include "src/objects/slots.h"
29#include "src/objects/string.h"
30#include "src/roots/roots.h"
37#include "src/utils/memcopy.h"
38
39// Has to be the last include (doesn't have include guards)
41
42namespace v8::internal {
43
44#ifdef V8_COMPRESS_POINTERS
45#define PRIxTAGGED PRIx32
46#else
47#define PRIxTAGGED PRIxPTR
48#endif
49
50// A SlotAccessor for a slot in a HeapObject, which abstracts the slot
51// operations done by the deserializer in a way which is GC-safe. In particular,
52// rather than an absolute slot address, this accessor holds a Handle to the
53// HeapObject, which is updated if the HeapObject moves.
55 public:
57 int index) {
58 return SlotAccessorForHeapObject(object, index * kTaggedSize);
59 }
61 int offset) {
62 return SlotAccessorForHeapObject(object, offset);
63 }
64
65 MaybeObjectSlot slot() const { return object_->RawMaybeWeakField(offset_); }
67 return object_->RawExternalPointerField(offset_, tag);
68 }
69 Handle<HeapObject> object() const { return object_; }
70 int offset() const { return offset_; }
72 // Writes the given value to this slot, with an offset (e.g. for repeat
73 // writes). Returns the number of slots written (which is one).
74 int Write(Tagged<MaybeObject> value, int slot_offset, WriteBarrierMode mode) {
75 MaybeObjectSlot current_slot = slot() + slot_offset;
76 current_slot.Relaxed_Store(value);
77#ifdef V8_STATIC_ROOTS_BOOL
79 // TODO(jgruber): Remove this once WriteBarrier::ForValue() contains the
80 // same check.
81 mode = SKIP_WRITE_BARRIER;
82 }
83#endif // V8_STATIC_ROOTS_BOOL
84 WriteBarrier::ForValue(*object_, current_slot, value, mode);
85 return 1;
86 }
87 int Write(Tagged<HeapObject> value, HeapObjectReferenceType ref_type,
88 int slot_offset, WriteBarrierMode mode) {
89 return Write(Tagged<HeapObjectReference>(value, ref_type), slot_offset,
90 mode);
91 }
92 int Write(DirectHandle<HeapObject> value, HeapObjectReferenceType ref_type,
93 int slot_offset, WriteBarrierMode mode) {
94 return Write(*value, ref_type, slot_offset, mode);
95 }
96
98 // Only ExposedTrustedObjects can be referenced via indirect pointers, so
99 // we must have one of these objects here. See the comments in
100 // trusted-object.h for more details.
101 DCHECK(IsExposedTrustedObject(value));
103
104 InstanceType instance_type = value->map()->instance_type();
106 IndirectPointerSlot dest = object_->RawIndirectPointerField(offset_, tag);
107 dest.store(object);
108
109 WriteBarrier::ForIndirectPointer(*object_, dest, value, mode);
110 return 1;
111 }
112
114 WriteBarrierMode mode) {
115 DCHECK(IsTrustedObject(*object_));
117 ProtectedPointerSlot dest = host->RawProtectedPointerField(offset_);
118 dest.store(value);
119 WriteBarrier::ForProtectedPointer(host, dest, value, mode);
120 return 1;
121 }
122
123 private:
126
128 const int offset_;
129};
130
131// A SlotAccessor for absolute full slot addresses.
133 public:
135
136 FullMaybeObjectSlot slot() const { return slot_; }
141 int offset() const { UNREACHABLE(); }
142
143 // Writes the given value to this slot, with an offset (e.g. for repeat
144 // writes). Returns the number of slots written (which is one).
145 int Write(Tagged<MaybeObject> value, int slot_offset, WriteBarrierMode mode) {
146 FullMaybeObjectSlot current_slot = slot() + slot_offset;
147 current_slot.Relaxed_Store(value);
148 return 1;
149 }
150 int Write(Tagged<HeapObject> value, HeapObjectReferenceType ref_type,
151 int slot_offset, WriteBarrierMode mode) {
152 return Write(Tagged<HeapObjectReference>(value, ref_type), slot_offset,
153 mode);
154 }
155 int Write(DirectHandle<HeapObject> value, HeapObjectReferenceType ref_type,
156 int slot_offset, WriteBarrierMode mode) {
157 return Write(*value, ref_type, slot_offset, mode);
158 }
161 }
164 UNREACHABLE();
165 }
166
167 private:
169};
170
171// A SlotAccessor for creating a Handle, which saves a Handle allocation when
172// a Handle already exists.
173template <typename IsolateT>
175 public:
178
182 }
184 int offset() const { UNREACHABLE(); }
185
186 int Write(Tagged<MaybeObject> value, int slot_offset, WriteBarrierMode mode) {
187 UNREACHABLE();
188 }
189 int Write(Tagged<HeapObject> value, HeapObjectReferenceType ref_type,
190 int slot_offset, WriteBarrierMode mode) {
191 DCHECK_EQ(slot_offset, 0);
192 DCHECK_EQ(ref_type, HeapObjectReferenceType::STRONG);
193 *handle_ = direct_handle(value, isolate_);
194 return 1;
196 int Write(DirectHandle<HeapObject> value, HeapObjectReferenceType ref_type,
197 int slot_offset, WriteBarrierMode mode) {
198 DCHECK_EQ(slot_offset, 0);
199 DCHECK_EQ(ref_type, HeapObjectReferenceType::STRONG);
200 *handle_ = value;
201 return 1;
202 }
204 UNREACHABLE();
208 UNREACHABLE();
210
211 private:
213 IsolateT* isolate_;
214};
216template <typename IsolateT>
217template <typename SlotAccessor>
218int Deserializer<IsolateT>::WriteHeapPointer(SlotAccessor slot_accessor,
220 ReferenceDescriptor descr,
222 if (descr.is_indirect_pointer) {
223 return slot_accessor.WriteIndirectPointerTo(heap_object, mode);
224 } else {
225 return slot_accessor.Write(heap_object, descr.type, 0, mode);
226 }
228
229template <typename IsolateT>
230template <typename SlotAccessor>
232 SlotAccessor slot_accessor, DirectHandle<HeapObject> heap_object,
233 ReferenceDescriptor descr, WriteBarrierMode mode) {
234 if (descr.is_indirect_pointer) {
235 return slot_accessor.WriteIndirectPointerTo(*heap_object, mode);
236 } else if (descr.is_protected_pointer) {
237 DCHECK(IsTrustedObject(*heap_object));
238 return slot_accessor.WriteProtectedPointerTo(
239 Cast<TrustedObject>(*heap_object), mode);
240 } else {
241 return slot_accessor.Write(heap_object, descr.type, 0, mode);
243}
245template <typename IsolateT>
248 Address value,
249 ExternalPointerTag tag) {
250 DCHECK(!next_reference_is_weak_ && !next_reference_is_indirect_pointer_ &&
251 !next_reference_is_protected_pointer);
252
253 #ifdef V8_ENABLE_SANDBOX
254 ExternalPointerTable::ManagedResource* managed_resource = nullptr;
255 ExternalPointerTable* owning_table = nullptr;
258 // This can currently only happen during snapshot stress mode as we cannot
259 // normally serialized managed resources. In snapshot stress mode, the new
260 // isolate will be destroyed and the old isolate (really, the old isolate's
261 // external pointer table) therefore effectively retains ownership of the
262 // resource. As such, we need to save and restore the relevant fields of
263 // the external resource. Once the external pointer table itself destroys
264 // the managed resource when freeing the corresponding table entry, this
265 // workaround can be removed again.
266 DCHECK(v8_flags.stress_snapshot);
267 managed_resource =
268 reinterpret_cast<ExternalPointerTable::ManagedResource*>(value);
269 owning_table = managed_resource->owning_table_;
270 original_handle = managed_resource->ept_entry_;
271 managed_resource->owning_table_ = nullptr;
272 managed_resource->ept_entry_ = kNullExternalPointerHandle;
273 }
274#endif // V8_ENABLE_SANDBOX
275
276 dest.init(main_thread_isolate(), host, value, tag);
277
278#ifdef V8_ENABLE_SANDBOX
279 if (managed_resource) {
280 managed_resource->owning_table_ = owning_table;
281 managed_resource->ept_entry_ = original_handle;
282 }
283#endif // V8_ENABLE_SANDBOX
284
285 // ExternalPointers can only be written into HeapObject fields, therefore they
286 // cover (kExternalPointerSlotSize / kTaggedSize) slots.
288}
289
290namespace {
291#ifdef DEBUG
292int GetNumApiReferences(Isolate* isolate) {
293 int num_api_references = 0;
294 // The read-only deserializer is run by read-only heap set-up before the
295 // heap is fully set up. External reference table relies on a few parts of
296 // this set-up (like old-space), so it may be uninitialized at this point.
297 if (isolate->isolate_data()->external_reference_table()->is_initialized()) {
298 // Count the number of external references registered through the API.
299 if (isolate->api_external_references() != nullptr) {
300 while (isolate->api_external_references()[num_api_references] != 0) {
301 num_api_references++;
302 }
303 }
304 }
305 return num_api_references;
306}
307int GetNumApiReferences(LocalIsolate* isolate) { return 0; }
308#endif
309} // namespace
310
311template <typename IsolateT>
314 uint32_t magic_number,
315 bool deserializing_user_code,
316 bool can_rehash)
317 : isolate_(isolate),
318 attached_objects_(isolate),
319 source_(payload),
320 magic_number_(magic_number),
321 new_maps_(isolate),
322 new_allocation_sites_(isolate),
323 new_code_objects_(isolate),
324 accessor_infos_(isolate),
325 function_template_infos_(isolate),
326 new_scripts_(isolate),
327 new_descriptor_arrays_(isolate->heap()),
328 deserializing_user_code_(deserializing_user_code),
329 should_rehash_((v8_flags.rehash_snapshot && can_rehash) ||
330 deserializing_user_code),
331 to_rehash_(isolate) {
332 DCHECK_NOT_NULL(isolate);
333 isolate->RegisterDeserializerStarted();
334
335 // We start the indices here at 1, so that we can distinguish between an
336 // actual index and an empty backing store (serialized as
337 // kEmptyBackingStoreRefSentinel) in a deserialized object requiring fix-up.
338 static_assert(kEmptyBackingStoreRefSentinel == 0);
339 backing_stores_.push_back({});
340
341 back_refs_.reserve(2048);
342 js_dispatch_entries_.reserve(512);
343
344#ifdef DEBUG
345 num_api_references_ = GetNumApiReferences(isolate);
346#endif // DEBUG
348}
349
350template <typename IsolateT>
352 DCHECK(should_rehash());
353 for (DirectHandle<HeapObject> item : to_rehash_) {
354 item->RehashBasedOnMap(isolate());
355 }
356}
357
358template <typename IsolateT>
360#ifdef DEBUG
361 // Do not perform checks if we aborted deserialization.
362 if (source_.position() == 0) return;
363 // Check that we only have padding bytes remaining.
364 while (source_.HasMore()) DCHECK_EQ(kNop, source_.Get());
365 // Check that there are no remaining forward refs.
366 DCHECK_EQ(num_unresolved_forward_refs_, 0);
367 DCHECK(unresolved_forward_refs_.empty());
368#endif // DEBUG
370}
371
372// This is called on the roots. It is the driver of the deserialization
373// process. It is also called on the body of each function.
374template <typename IsolateT>
381
382template <typename IsolateT>
384 static const uint8_t expected = kSynchronize;
385 CHECK_EQ(expected, source_.Get());
386 if (v8_flags.trace_deserialization) {
387 const char* name;
388 switch (tag) {
389#define CASE(ID, NAME) \
390 case VisitorSynchronization::ID: \
391 name = NAME; \
392 break;
394#undef CASE
395 default:
396 name = "(!unknown!)";
397 break;
398 }
399 PrintF("Synchronize %d %s\n", tag, name);
400 }
401}
402
403template <typename IsolateT>
405 if (v8_flags.trace_deserialization) {
406 PrintF("-- Deferred objects\n");
407 }
408 for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) {
409 SnapshotSpace space = NewObject::Decode(code);
410 ReadObject(space);
411 }
412}
413
414template <typename IsolateT>
416 if (V8_LIKELY(!v8_flags.log_maps)) return;
418 for (DirectHandle<Map> map : new_maps_) {
419 DCHECK(v8_flags.log_maps);
420 LOG(isolate(), MapCreate(*map));
421 LOG(isolate(), MapDetails(*map));
422 }
423}
424
425template <typename IsolateT>
427 isolate()->heap()->WeakenDescriptorArrays(std::move(new_descriptor_arrays_));
428}
429
430template <typename IsolateT>
436
437namespace {
438template <typename IsolateT>
439uint32_t ComputeRawHashField(IsolateT* isolate, Tagged<String> string) {
440 // Make sure raw_hash_field() is computed.
441 string->EnsureHash(SharedStringAccessGuardIfNeeded(isolate));
442 return string->raw_hash_field();
443}
444} // namespace
445
447 Isolate* isolate, DirectHandle<String> string,
448 DeserializingUserCodeOption deserializing_user_code)
449 : StringTableKey(ComputeRawHashField(isolate, *string), string->length()),
450 string_(string) {
451#ifdef DEBUG
452 deserializing_user_code_ = deserializing_user_code;
453#endif
454 DCHECK(IsInternalizedString(*string));
455}
456
458 LocalIsolate* isolate, DirectHandle<String> string,
459 DeserializingUserCodeOption deserializing_user_code)
460 : StringTableKey(ComputeRawHashField(isolate, *string), string->length()),
461 string_(string) {
462#ifdef DEBUG
463 deserializing_user_code_ = deserializing_user_code;
464#endif
465 DCHECK(IsInternalizedString(*string));
466}
467
468template <typename IsolateT>
469bool StringTableInsertionKey::IsMatch(IsolateT* isolate,
470 Tagged<String> string) {
471 // We want to compare the content of two strings here.
472 return string_->SlowEquals(string, SharedStringAccessGuardIfNeeded(isolate));
473}
474template bool StringTableInsertionKey::IsMatch(Isolate* isolate,
475 Tagged<String> string);
477 Tagged<String> string);
478
479namespace {
480
482 // The following check will trigger if a function or object template
483 // with references to native functions have been deserialized from
484 // snapshot, but no actual external references were provided when the
485 // isolate was created.
486 FATAL("No external references provided via API");
487}
488
489void PostProcessExternalString(Tagged<ExternalString> string,
490 Isolate* isolate) {
492 uint32_t index = string->GetResourceRefForDeserialization();
493 Address address =
494 static_cast<Address>(isolate->api_external_references()[index]);
495 string->InitExternalPointerFields(isolate);
496 string->set_address_as_resource(isolate, address);
497 isolate->heap()->UpdateExternalString(string, 0,
498 string->ExternalPayloadSize());
499 isolate->heap()->RegisterExternalString(string);
500}
501
502} // namespace
503
504// Should be called only on the main thread (not thread safe).
505template <>
507 Tagged<Map> map, DirectHandle<JSReceiver> obj, InstanceType instance_type,
508 SnapshotSpace space) {
509 DCHECK_EQ(map->instance_type(), instance_type);
510
511 if (InstanceTypeChecker::IsJSDataView(instance_type) ||
512 InstanceTypeChecker::IsJSRabGsabDataView(instance_type)) {
513 auto data_view = Cast<JSDataViewOrRabGsabDataView>(*obj);
514 auto buffer = Cast<JSArrayBuffer>(data_view->buffer());
515 if (buffer->was_detached()) {
516 // Directly set the data pointer to point to the EmptyBackingStoreBuffer.
517 // Otherwise, we might end up setting it to EmptyBackingStoreBuffer() +
518 // byte_offset() which would result in an invalid pointer.
519 data_view->set_data_pointer(main_thread_isolate(),
521 } else {
522 void* backing_store = buffer->backing_store();
523 data_view->set_data_pointer(
524 main_thread_isolate(),
525 reinterpret_cast<uint8_t*>(backing_store) + data_view->byte_offset());
526 }
527 } else if (InstanceTypeChecker::IsJSTypedArray(instance_type)) {
528 auto typed_array = Cast<JSTypedArray>(*obj);
529 // Note: ByteArray objects must not be deferred s.t. they are
530 // available here for is_on_heap(). See also: CanBeDeferred.
531 // Fixup typed array pointers.
532 if (typed_array->is_on_heap()) {
533 typed_array->AddExternalPointerCompensationForDeserialization(
534 main_thread_isolate());
535 } else {
536 // Serializer writes backing store ref as a DataPtr() value.
537 uint32_t store_index =
538 typed_array->GetExternalBackingStoreRefForDeserialization();
539 auto backing_store = backing_stores_[store_index];
540 void* start = backing_store ? backing_store->buffer_start() : nullptr;
542 typed_array->SetOffHeapDataPtr(main_thread_isolate(), start,
543 typed_array->byte_offset());
544 }
545 } else if (InstanceTypeChecker::IsJSArrayBuffer(instance_type)) {
546 auto buffer = Cast<JSArrayBuffer>(*obj);
547 uint32_t store_index = buffer->GetBackingStoreRefForDeserialization();
548 buffer->init_extension();
549 if (store_index == kEmptyBackingStoreRefSentinel) {
550 buffer->set_backing_store(main_thread_isolate(),
552 } else {
553 auto bs = backing_store(store_index);
554 SharedFlag shared =
555 bs && bs->is_shared() ? SharedFlag::kShared : SharedFlag::kNotShared;
557 buffer->is_resizable_by_js() == bs->is_resizable_by_js());
558 ResizableFlag resizable = bs && bs->is_resizable_by_js()
561 buffer->Setup(shared, resizable, bs, main_thread_isolate());
562 }
563 }
564}
565
566template <>
572
573template <typename IsolateT>
576 SnapshotSpace space) {
578 Tagged<Map> raw_map = *map;
579 DCHECK_EQ(raw_map, obj->map(isolate_));
580 InstanceType instance_type = raw_map->instance_type();
581 Tagged<HeapObject> raw_obj = *obj;
582 DCHECK_IMPLIES(deserializing_user_code(), should_rehash());
583 if (should_rehash()) {
584 if (InstanceTypeChecker::IsString(instance_type)) {
585 // Uninitialize hash field as we need to recompute the hash.
586 Tagged<String> string = Cast<String>(raw_obj);
587 string->set_raw_hash_field(String::kEmptyHashField);
588 // Rehash strings before read-only space is sealed. Strings outside
589 // read-only space are rehashed lazily. (e.g. when rehashing dictionaries)
590 if (space == SnapshotSpace::kReadOnlyHeap) {
591 PushObjectToRehash(obj);
592 }
593 } else if (raw_obj->NeedsRehashing(instance_type)) {
594 PushObjectToRehash(obj);
595 }
596
597 if (deserializing_user_code()) {
599 // Canonicalize the internalized string. If it already exists in the
600 // string table, set the string to point to the existing one and patch
601 // the deserialized string handle to point to the existing one.
602 // TODO(leszeks): This handle patching is ugly, consider adding an
603 // explicit internalized string bytecode. Also, the new thin string
604 // should be dead, try immediately freeing it.
606
608 isolate(), string,
612
613 if (result != raw_obj) {
614 Cast<String>(raw_obj)->MakeThin(isolate(), result);
615 // Mutate the given object handle so that the backreference entry is
616 // also updated.
617 obj.PatchValue(result);
618 }
619 return;
620 } else if (InstanceTypeChecker::IsScript(instance_type)) {
621 new_scripts_.push_back(Cast<Script>(obj));
622 } else if (InstanceTypeChecker::IsAllocationSite(instance_type)) {
623 // We should link new allocation sites, but we can't do this immediately
624 // because |AllocationSite::HasWeakNext()| internally accesses
625 // |Heap::roots_| that may not have been initialized yet. So defer this
626 // to |ObjectDeserializer::CommitPostProcessedObjects()|.
627 new_allocation_sites_.push_back(Cast<AllocationSite>(obj));
628 } else {
629 // We dont defer ByteArray because JSTypedArray needs the base_pointer
630 // ByteArray immediately if it's on heap.
631 DCHECK(CanBeDeferred(*obj, SlotType::kAnySlot) ||
632 InstanceTypeChecker::IsByteArray(instance_type));
633 }
634 }
635 }
636
637 if (InstanceTypeChecker::IsInstructionStream(instance_type)) {
638 // We flush all code pages after deserializing the startup snapshot.
639 // Hence we only remember each individual code object when deserializing
640 // user code.
641 if (deserializing_user_code()) {
642 new_code_objects_.push_back(Cast<InstructionStream>(obj));
643 }
644 } else if (InstanceTypeChecker::IsCode(instance_type)) {
645 Tagged<Code> code = Cast<Code>(raw_obj);
646 if (!code->has_instruction_stream()) {
647 code->SetInstructionStartForOffHeapBuiltin(
648 main_thread_isolate(), EmbeddedData::FromBlob(main_thread_isolate())
649 .InstructionStartOf(code->builtin_id()));
650 } else {
651 code->UpdateInstructionStart(main_thread_isolate(),
652 code->instruction_stream());
653 }
654 } else if (InstanceTypeChecker::IsSharedFunctionInfo(instance_type)) {
656 // Reset the id to avoid collisions - it must be unique in this isolate.
657 sfi->set_unique_id(isolate()->GetAndIncNextUniqueSfiId());
658 } else if (InstanceTypeChecker::IsMap(instance_type)) {
659 if (v8_flags.log_maps) {
660 // Keep track of all seen Maps to log them later since they might be only
661 // partially initialized at this point.
662 new_maps_.push_back(Cast<Map>(obj));
663 }
664 } else if (InstanceTypeChecker::IsAccessorInfo(instance_type)) {
665#ifdef USE_SIMULATOR
666 accessor_infos_.push_back(Cast<AccessorInfo>(obj));
667#endif
668 } else if (InstanceTypeChecker::IsFunctionTemplateInfo(instance_type)) {
669#ifdef USE_SIMULATOR
670 function_template_infos_.push_back(Cast<FunctionTemplateInfo>(obj));
671#endif
672 } else if (InstanceTypeChecker::IsExternalString(instance_type)) {
673 PostProcessExternalString(Cast<ExternalString>(raw_obj),
674 main_thread_isolate());
675 } else if (InstanceTypeChecker::IsJSReceiver(instance_type)) {
676 // PostProcessNewJSReceiver may trigger GC.
677 no_gc.Release();
678 return PostProcessNewJSReceiver(raw_map, Cast<JSReceiver>(obj),
679 instance_type, space);
680 } else if (InstanceTypeChecker::IsDescriptorArray(instance_type)) {
681 DCHECK(InstanceTypeChecker::IsStrongDescriptorArray(instance_type));
682 auto descriptors = Cast<DescriptorArray>(obj);
683 new_descriptor_arrays_.Push(*descriptors);
684 } else if (InstanceTypeChecker::IsNativeContext(instance_type)) {
685 Cast<NativeContext>(raw_obj)->init_microtask_queue(main_thread_isolate(),
686 nullptr);
687 } else if (InstanceTypeChecker::IsScript(instance_type)) {
688 LogScriptEvents(Cast<Script>(*obj));
689 }
690}
691
692template <typename IsolateT>
695 DCHECK(!(next_reference_is_weak_ && next_reference_is_indirect_pointer_));
697 desc.type = next_reference_is_weak_ ? HeapObjectReferenceType::WEAK
698 : HeapObjectReferenceType::STRONG;
699 next_reference_is_weak_ = false;
700 desc.is_indirect_pointer = next_reference_is_indirect_pointer_;
701 next_reference_is_indirect_pointer_ = false;
702 desc.is_protected_pointer = next_reference_is_protected_pointer;
703 next_reference_is_protected_pointer = false;
704 return desc;
705}
706
707template <typename IsolateT>
709 return GetBackReferencedObject(source_.GetUint30());
710}
711
712template <typename IsolateT>
714 uint32_t index) {
715 Handle<HeapObject> obj = back_refs_[index];
716
717 // We don't allow ThinStrings in backreferences -- if internalization produces
718 // a thin string, then it should also update the backref handle.
719 DCHECK(!IsThinString(*obj, isolate()));
720
721 hot_objects_.Add(obj);
723 return obj;
724}
725
726template <typename IsolateT>
729 CHECK_EQ(ReadSingleBytecodeData(
730 source_.Get(), SlotAccessorForHandle<IsolateT>(&ret, isolate())),
731 1);
732 return ret;
733}
734
735namespace {
736AllocationType SpaceToAllocation(SnapshotSpace space) {
737 switch (space) {
746 }
747}
748} // namespace
749
750template <typename IsolateT>
752 const int size_in_tagged = source_.GetUint30();
753 const int size_in_bytes = size_in_tagged * kTaggedSize;
754
755 // The map can't be a forward ref. If you want the map to be a forward ref,
756 // then you're probably serializing the meta-map, in which case you want to
757 // use the kNewContextlessMetaMap/kNewContextfulMetaMap bytecode.
758 DCHECK_NE(source()->Peek(), kRegisterPendingForwardRef);
759 DirectHandle<Map> map = Cast<Map>(ReadObject());
760
761 AllocationType allocation = SpaceToAllocation(space);
762
763 // When sharing a string table, all in-place internalizable and internalized
764 // strings internalized strings are allocated in the shared heap.
765 //
766 // TODO(12007): When shipping, add a new SharedOld SnapshotSpace.
767 if (v8_flags.shared_string_table) {
768 InstanceType instance_type = map->instance_type();
770 String::IsInPlaceInternalizable(instance_type)) {
771 allocation = isolate()
772 ->factory()
774 allocation, *map);
775 }
776 }
777
778 // Filling an object's fields can cause GCs and heap walks, so this object has
779 // to be in a 'sufficiently initialised' state by the time the next allocation
780 // can happen. For this to be the case, the object is carefully deserialized
781 // as follows:
782 // * The space for the object is allocated.
783 // * The map is set on the object so that the GC knows what type the object
784 // has.
785 // * The rest of the object is filled with a fixed Smi value
786 // - This is a Smi so that tagged fields become initialized to a valid
787 // tagged value.
788 // - It's a fixed value, "Smi::uninitialized_deserialization_value()", so
789 // that we can DCHECK for it when reading objects that are assumed to be
790 // partially initialized objects.
791 // * The fields of the object are deserialized in order, under the
792 // assumption that objects are laid out in such a way that any fields
793 // required for object iteration (e.g. length fields) are deserialized
794 // before fields with objects.
795 // - We ensure this is the case by DCHECKing on object allocation that the
796 // previously allocated object has a valid size (see `Allocate`).
797 Tagged<HeapObject> raw_obj =
798 Allocate(allocation, size_in_bytes, HeapObject::RequiredAlignment(*map));
799 raw_obj->set_map_after_allocation(isolate_, *map);
800 MemsetTagged(raw_obj->RawField(kTaggedSize),
801 Smi::uninitialized_deserialization_value(), size_in_tagged - 1);
802 DCHECK(raw_obj->CheckRequiredAlignment(isolate()));
803
804 // Make sure BytecodeArrays have a valid age, so that the marker doesn't
805 // break when making them older.
806 if (IsSharedFunctionInfo(raw_obj, isolate())) {
807 Cast<SharedFunctionInfo>(raw_obj)->set_age(0);
808 } else if (IsEphemeronHashTable(raw_obj)) {
809 // Make sure EphemeronHashTables have valid HeapObject keys, so that the
810 // marker does not break when marking EphemeronHashTable, see
811 // MarkingVisitorBase::VisitEphemeronHashTable.
813 MemsetTagged(Cast<HeapObject>(table)->RawField(table->kElementsStartOffset),
814 ReadOnlyRoots(isolate()).undefined_value(),
815 (size_in_bytes - table->kElementsStartOffset) / kTaggedSize);
816 }
817
818#ifdef DEBUG
819 PtrComprCageBase cage_base(isolate());
820 // We want to make sure that all embedder pointers are initialized to null.
821 if (IsJSObject(raw_obj, cage_base) &&
822 Cast<JSObject>(raw_obj)->MayHaveEmbedderFields()) {
823 Tagged<JSObject> js_obj = Cast<JSObject>(raw_obj);
824 for (int i = 0; i < js_obj->GetEmbedderFieldCount(); ++i) {
825 void* pointer;
826 CHECK(EmbedderDataSlot(js_obj, i).ToAlignedPointer(main_thread_isolate(),
827 &pointer));
828 CHECK_NULL(pointer);
829 }
830 } else if (IsEmbedderDataArray(raw_obj, cage_base)) {
832 EmbedderDataSlot start(array, 0);
833 EmbedderDataSlot end(array, array->length());
834 for (EmbedderDataSlot slot = start; slot < end; ++slot) {
835 void* pointer;
836 CHECK(slot.ToAlignedPointer(main_thread_isolate(), &pointer));
837 CHECK_NULL(pointer);
838 }
839 }
840#endif
841
842 Handle<HeapObject> obj = handle(raw_obj, isolate());
843 back_refs_.push_back(obj);
844 if (v8_flags.trace_deserialization) {
845 PrintF(" %*s(set obj backref %u)\n", depth_, "",
846 static_cast<int>(back_refs_.size() - 1));
847 }
848
849 ReadData(obj, 1, size_in_tagged);
850 PostProcessNewObject(map, obj, space);
851
852#ifdef DEBUG
853 if (IsInstructionStream(*obj, cage_base)) {
854 DCHECK(space == SnapshotSpace::kCode ||
856 } else {
858 }
859 if (IsTrustedObject(*obj)) {
861 } else {
863 }
864#endif // DEBUG
865
866 return obj;
867}
868
869template <typename IsolateT>
871 const int size_in_bytes = Map::kSize;
872 const int size_in_tagged = size_in_bytes / kTaggedSize;
873
874 Tagged<HeapObject> raw_obj =
875 Allocate(SpaceToAllocation(space), size_in_bytes, kTaggedAligned);
876 raw_obj->set_map_after_allocation(isolate_, UncheckedCast<Map>(raw_obj));
877 MemsetTagged(raw_obj->RawField(kTaggedSize),
878 Smi::uninitialized_deserialization_value(), size_in_tagged - 1);
879 DCHECK(raw_obj->CheckRequiredAlignment(isolate()));
880
881 Handle<HeapObject> obj = handle(raw_obj, isolate());
882 back_refs_.push_back(obj);
883 if (v8_flags.trace_deserialization) {
884 PrintF(" %*s(set obj backref %u)\n", depth_, "",
885 static_cast<int>(back_refs_.size() - 1));
886 }
887
888 // Set the instance-type manually, to allow backrefs to read it.
889 UncheckedCast<Map>(*obj)->set_instance_type(MAP_TYPE);
890
891 ReadData(obj, 1, size_in_tagged);
892 PostProcessNewObject(Cast<Map>(obj), obj, space);
893
894 return obj;
895}
896
897template <typename IsolateT>
898template <typename SlotAccessor>
899int Deserializer<IsolateT>::ReadRepeatedRoot(SlotAccessor slot_accessor,
900 int repeat_count) {
901 CHECK_LE(2, repeat_count);
902
903 uint8_t id = source_.Get();
904 RootIndex root_index = static_cast<RootIndex>(id);
905 if (v8_flags.trace_deserialization) {
906 PrintF("%s", RootsTable::name(root_index));
907 }
908 DCHECK(RootsTable::IsReadOnly(root_index));
909
910 Tagged<HeapObject> heap_object =
911 Cast<HeapObject>(isolate()->root(root_index));
912
913 for (int i = 0; i < repeat_count; i++) {
914 slot_accessor.Write(heap_object, HeapObjectReferenceType::STRONG, i,
916 }
917 return repeat_count;
918}
919
920namespace {
921
922// Template used by the below CASE_RANGE macro to statically verify that the
923// given number of cases matches the number of expected cases for that bytecode.
924template <int byte_code_count, int expected>
925constexpr uint8_t VerifyBytecodeCount(uint8_t bytecode) {
926 static_assert(byte_code_count == expected);
927 return bytecode;
928}
929
930} // namespace
931
932// Helper macro (and its implementation detail) for specifying a range of cases.
933// Use as "case CASE_RANGE(byte_code, num_bytecodes):"
934#define CASE_RANGE(byte_code, num_bytecodes) \
935 CASE_R##num_bytecodes( \
936 (VerifyBytecodeCount<byte_code##Count, num_bytecodes>(byte_code)))
937#define CASE_R1(byte_code) byte_code
938#define CASE_R2(byte_code) CASE_R1(byte_code) : case CASE_R1(byte_code + 1)
939#define CASE_R3(byte_code) CASE_R2(byte_code) : case CASE_R1(byte_code + 2)
940#define CASE_R4(byte_code) CASE_R2(byte_code) : case CASE_R2(byte_code + 2)
941#define CASE_R8(byte_code) CASE_R4(byte_code) : case CASE_R4(byte_code + 4)
942#define CASE_R16(byte_code) CASE_R8(byte_code) : case CASE_R8(byte_code + 8)
943#define CASE_R32(byte_code) CASE_R16(byte_code) : case CASE_R16(byte_code + 16)
944
945// This generates a case range for all the spaces.
946// clang-format off
947#define CASE_RANGE_ALL_SPACES(bytecode) \
948 SpaceEncoder<bytecode>::Encode(SnapshotSpace::kOld): \
949 case SpaceEncoder<bytecode>::Encode(SnapshotSpace::kCode): \
950 case SpaceEncoder<bytecode>::Encode(SnapshotSpace::kReadOnlyHeap): \
951 case SpaceEncoder<bytecode>::Encode(SnapshotSpace::kTrusted)
952// clang-format on
953
954template <typename IsolateT>
956 int start_slot_index,
957 int end_slot_index) {
958 int current = start_slot_index;
959 while (current < end_slot_index) {
960 uint8_t data = source_.Get();
961 current += ReadSingleBytecodeData(
962 data, SlotAccessorForHeapObject::ForSlotIndex(object, current));
963 }
964 CHECK_EQ(current, end_slot_index);
965}
966
967template <typename IsolateT>
970 FullMaybeObjectSlot current = start;
971 while (current < end) {
972 uint8_t data = source_.Get();
973 current += ReadSingleBytecodeData(data, SlotAccessorForRootSlots(current));
974 }
975 CHECK_EQ(current, end);
976}
977
978template <typename IsolateT>
979template <typename SlotAccessor>
981 SlotAccessor slot_accessor) {
982 if (v8_flags.trace_deserialization) {
983 PrintF("%02x ", data);
984 }
985 switch (data) {
986 case CASE_RANGE_ALL_SPACES(kNewObject):
987 return ReadNewObject(data, slot_accessor);
988 case kBackref:
989 return ReadBackref(data, slot_accessor);
990 case kReadOnlyHeapRef:
991 return ReadReadOnlyHeapRef(data, slot_accessor);
992 case kRootArray:
993 return ReadRootArray(data, slot_accessor);
994 case kStartupObjectCache:
995 return ReadStartupObjectCache(data, slot_accessor);
996 case kSharedHeapObjectCache:
997 return ReadSharedHeapObjectCache(data, slot_accessor);
998 case kNewContextlessMetaMap:
999 case kNewContextfulMetaMap:
1000 return ReadNewMetaMap(data, slot_accessor);
1001 case kSandboxedExternalReference:
1002 case kExternalReference:
1003 return ReadExternalReference(data, slot_accessor);
1004 case kSandboxedRawExternalReference:
1005 return ReadRawExternalReference(data, slot_accessor);
1006 case kAttachedReference:
1007 return ReadAttachedReference(data, slot_accessor);
1008 case kNop:
1009 return 0;
1010 case kRegisterPendingForwardRef:
1011 return ReadRegisterPendingForwardRef(data, slot_accessor);
1012 case kResolvePendingForwardRef:
1013 return ReadResolvePendingForwardRef(data, slot_accessor);
1014 case kSynchronize:
1015 // If we get here then that indicates that you have a mismatch between
1016 // the number of GC roots when serializing and deserializing.
1017 UNREACHABLE();
1018 case kVariableRawData:
1019 return ReadVariableRawData(data, slot_accessor);
1020 case kVariableRepeatRoot:
1021 return ReadVariableRepeatRoot(data, slot_accessor);
1022 case kOffHeapBackingStore:
1023 case kOffHeapResizableBackingStore:
1024 return ReadOffHeapBackingStore(data, slot_accessor);
1025 case kSandboxedApiReference:
1026 case kApiReference:
1027 return ReadApiReference(data, slot_accessor);
1028 case kClearedWeakReference:
1029 return ReadClearedWeakReference(data, slot_accessor);
1030 case kWeakPrefix:
1031 return ReadWeakPrefix(data, slot_accessor);
1032 case kIndirectPointerPrefix:
1033 return ReadIndirectPointerPrefix(data, slot_accessor);
1034 case kInitializeSelfIndirectPointer:
1035 return ReadInitializeSelfIndirectPointer(data, slot_accessor);
1036 case kAllocateJSDispatchEntry:
1037 return ReadAllocateJSDispatchEntry(data, slot_accessor);
1038 case kJSDispatchEntry:
1039 return ReadJSDispatchEntry(data, slot_accessor);
1040 case kProtectedPointerPrefix:
1041 return ReadProtectedPointerPrefix(data, slot_accessor);
1042 case CASE_RANGE(kRootArrayConstants, 32):
1043 return ReadRootArrayConstants(data, slot_accessor);
1044 case CASE_RANGE(kHotObject, 8):
1045 return ReadHotObject(data, slot_accessor);
1046 case CASE_RANGE(kFixedRawData, 32):
1047 return ReadFixedRawData(data, slot_accessor);
1048 case CASE_RANGE(kFixedRepeatRoot, 16):
1049 return ReadFixedRepeatRoot(data, slot_accessor);
1050
1051#ifdef DEBUG
1052#define UNUSED_CASE(byte_code) \
1053 case byte_code: \
1054 UNREACHABLE();
1055 UNUSED_SERIALIZER_BYTE_CODES(UNUSED_CASE)
1056#endif
1057#undef UNUSED_CASE
1058 }
1059
1060 // The above switch, including UNUSED_SERIALIZER_BYTE_CODES, covers all
1061 // possible bytecodes; but, clang doesn't realize this, so we have an explicit
1062 // UNREACHABLE here too.
1063 UNREACHABLE();
1064}
1065
1066namespace {
1067const char* SnapshotSpaceName(SnapshotSpace space) {
1068 switch (space) {
1070 return "ReadOnlyHeap";
1072 return "Old";
1074 return "Code";
1076 return "Trusted";
1077 }
1078 return "(!unknown space!)";
1079}
1080} // namespace
1081
1082// Deserialize a new object and write a pointer to it to the current
1083// object.
1084template <typename IsolateT>
1085template <typename SlotAccessor>
1087 SlotAccessor slot_accessor) {
1088 SnapshotSpace space = NewObject::Decode(data);
1089 if (v8_flags.trace_deserialization) {
1090 PrintF("%*sNewObject [%s]\n", depth_, "", SnapshotSpaceName(space));
1091 ++depth_;
1092 }
1094 // Save the descriptor before recursing down into reading the object.
1095 ReferenceDescriptor descr = GetAndResetNextReferenceDescriptor();
1096 DirectHandle<HeapObject> heap_object = ReadObject(space);
1097 if (v8_flags.trace_deserialization) {
1098 --depth_;
1099 }
1100 return WriteHeapPointer(slot_accessor, heap_object, descr);
1101}
1102
1103// Find a recently deserialized object using its offset from the current
1104// allocation point and write a pointer to it to the current object.
1105template <typename IsolateT>
1106template <typename SlotAccessor>
1108 SlotAccessor slot_accessor) {
1109 uint32_t index = source_.GetUint30();
1110 DirectHandle<HeapObject> heap_object = GetBackReferencedObject(index);
1111 if (v8_flags.trace_deserialization) {
1112 PrintF("%*sBackref [%u]\n", depth_, "", index);
1113 // Don't print the backref object, since it might still be being
1114 // initialized.
1115 // TODO(leszeks): Have some sort of initialization marker on backrefs to
1116 // allow them to be printed when valid.
1117 }
1118 return WriteHeapPointer(slot_accessor, heap_object,
1119 GetAndResetNextReferenceDescriptor());
1120}
1121
1122// Reference an object in the read-only heap.
1123template <typename IsolateT>
1124template <typename SlotAccessor>
1126 SlotAccessor slot_accessor) {
1127 uint32_t chunk_index = source_.GetUint30();
1128 uint32_t chunk_offset = source_.GetUint30();
1129
1130 ReadOnlySpace* read_only_space = isolate()->heap()->read_only_space();
1131 ReadOnlyPageMetadata* page = read_only_space->pages()[chunk_index];
1132 Address address = page->OffsetToAddress(chunk_offset);
1133 Tagged<HeapObject> heap_object = HeapObject::FromAddress(address);
1134
1135 if (v8_flags.trace_deserialization) {
1136 PrintF("%*sReadOnlyHeapRef [%u, %u] : ", depth_, "", chunk_index,
1137 chunk_offset);
1138 ShortPrint(heap_object);
1139 PrintF("\n");
1140 }
1141
1142 return WriteHeapPointer(slot_accessor, heap_object,
1143 GetAndResetNextReferenceDescriptor(),
1145}
1146
1147// Find an object in the roots array and write a pointer to it to the
1148// current object.
1149template <typename IsolateT>
1150template <typename SlotAccessor>
1152 SlotAccessor slot_accessor) {
1153 int id = source_.GetUint30();
1154 RootIndex root_index = static_cast<RootIndex>(id);
1155 DirectHandle<HeapObject> heap_object =
1156 Cast<HeapObject>(isolate()->root_handle(root_index));
1157
1158 if (v8_flags.trace_deserialization) {
1159 PrintF("%*sRootArray [%u] : %s\n", depth_, "", id,
1160 RootsTable::name(root_index));
1161 }
1162 hot_objects_.Add(heap_object);
1163 return WriteHeapPointer(
1164 slot_accessor, heap_object, GetAndResetNextReferenceDescriptor(),
1167}
1168
1169// Find an object in the startup object cache and write a pointer to it to
1170// the current object.
1171template <typename IsolateT>
1172template <typename SlotAccessor>
1174 SlotAccessor slot_accessor) {
1175 int cache_index = source_.GetUint30();
1176 // TODO(leszeks): Could we use the address of the startup_object_cache
1177 // entry as a Handle backing?
1179 main_thread_isolate()->startup_object_cache()->at(cache_index));
1180 if (v8_flags.trace_deserialization) {
1181 PrintF("%*sStartupObjectCache [%u] : ", depth_, "", cache_index);
1182 ShortPrint(*heap_object);
1183 PrintF("\n");
1184 }
1185 return WriteHeapPointer(slot_accessor, heap_object,
1186 GetAndResetNextReferenceDescriptor());
1187}
1188
1189// Find an object in the shared heap object cache and write a pointer to it
1190// to the current object.
1191template <typename IsolateT>
1192template <typename SlotAccessor>
1194 uint8_t data, SlotAccessor slot_accessor) {
1195 int cache_index = source_.GetUint30();
1196 // TODO(leszeks): Could we use the address of the
1197 // shared_heap_object_cache entry as a Handle backing?
1199 main_thread_isolate()->shared_heap_object_cache()->at(cache_index));
1201 return WriteHeapPointer(slot_accessor, heap_object,
1202 GetAndResetNextReferenceDescriptor());
1203}
1204
1205// Deserialize a new meta-map and write a pointer to it to the current
1206// object.
1207template <typename IsolateT>
1208template <typename SlotAccessor>
1210 SlotAccessor slot_accessor) {
1211 SnapshotSpace space = data == kNewContextlessMetaMap
1214 DirectHandle<HeapObject> heap_object = ReadMetaMap(space);
1215 if (v8_flags.trace_deserialization) {
1216 PrintF("%*sNewMetaMap [%s]\n", depth_, "", SnapshotSpaceName(space));
1217 }
1218 return slot_accessor.Write(heap_object, HeapObjectReferenceType::STRONG, 0,
1220}
1221
1222// Find an external reference and write a pointer to it to the current
1223// object.
1224template <typename IsolateT>
1225template <typename SlotAccessor>
1227 SlotAccessor slot_accessor) {
1228 DCHECK_IMPLIES(data == kSandboxedExternalReference, V8_ENABLE_SANDBOX_BOOL);
1229 Address address = ReadExternalReferenceCase();
1231 if (data == kSandboxedExternalReference) {
1232 tag = ReadExternalPointerTag();
1233 }
1234 if (v8_flags.trace_deserialization) {
1235 PrintF("%*sExternalReference [%" PRIxPTR ", %i]\n", depth_, "", address,
1236 tag);
1237 }
1238 return WriteExternalPointer(*slot_accessor.object(),
1239 slot_accessor.external_pointer_slot(tag), address,
1240 tag);
1241}
1242
1243template <typename IsolateT>
1244template <typename SlotAccessor>
1246 uint8_t data, SlotAccessor slot_accessor) {
1247 DCHECK_IMPLIES(data == kSandboxedExternalReference, V8_ENABLE_SANDBOX_BOOL);
1248 Address address;
1249 source_.CopyRaw(&address, kSystemPointerSize);
1251 if (data == kSandboxedRawExternalReference) {
1252 tag = ReadExternalPointerTag();
1253 }
1254 if (v8_flags.trace_deserialization) {
1255 PrintF("%*sRawExternalReference [%" PRIxPTR ", %i]\n", depth_, "", address,
1256 tag);
1257 }
1258 return WriteExternalPointer(*slot_accessor.object(),
1259 slot_accessor.external_pointer_slot(tag), address,
1260 tag);
1261}
1262
1263// Find an object in the attached references and write a pointer to it to
1264// the current object.
1265template <typename IsolateT>
1266template <typename SlotAccessor>
1268 SlotAccessor slot_accessor) {
1269 int index = source_.GetUint30();
1270 DirectHandle<HeapObject> heap_object = attached_objects_[index];
1271 if (v8_flags.trace_deserialization) {
1272 PrintF("%*sAttachedReference [%u] : ", depth_, "", index);
1273 ShortPrint(*heap_object);
1274 PrintF("\n");
1275 }
1276 return WriteHeapPointer(slot_accessor, heap_object,
1277 GetAndResetNextReferenceDescriptor());
1278}
1279
1280template <typename IsolateT>
1281template <typename SlotAccessor>
1283 uint8_t data, SlotAccessor slot_accessor) {
1284 ReferenceDescriptor descr = GetAndResetNextReferenceDescriptor();
1285 unresolved_forward_refs_.emplace_back(slot_accessor.object(),
1286 slot_accessor.offset(), descr);
1287 num_unresolved_forward_refs_++;
1288 return 1;
1289}
1290
1291template <typename IsolateT>
1292template <typename SlotAccessor>
1294 uint8_t data, SlotAccessor slot_accessor) {
1295 // Pending forward refs can only be resolved after the heap object's map
1296 // field is deserialized; currently they only appear immediately after
1297 // the map field or after the 'self' indirect pointer for trusted objects.
1298 DCHECK(slot_accessor.offset() == HeapObject::kHeaderSize ||
1299 slot_accessor.offset() == ExposedTrustedObject::kHeaderSize);
1300 DirectHandle<HeapObject> obj = slot_accessor.object();
1301 int index = source_.GetUint30();
1302 auto& forward_ref = unresolved_forward_refs_[index];
1303 auto slot = SlotAccessorForHeapObject::ForSlotOffset(forward_ref.object,
1304 forward_ref.offset);
1305 WriteHeapPointer(slot, obj, forward_ref.descr);
1306 num_unresolved_forward_refs_--;
1307 if (num_unresolved_forward_refs_ == 0) {
1308 // If there's no more pending fields, clear the entire pending field
1309 // vector.
1310 unresolved_forward_refs_.clear();
1311 } else {
1312 // Otherwise, at least clear the pending field.
1313 forward_ref.object = Handle<HeapObject>();
1314 }
1315 return 0;
1316}
1317
1318// Deserialize raw data of variable length.
1319template <typename IsolateT>
1320template <typename SlotAccessor>
1322 SlotAccessor slot_accessor) {
1323 // This operation is only supported for tagged-size slots, else we might
1324 // become misaligned.
1325 DCHECK_EQ(decltype(slot_accessor.slot())::kSlotDataSize, kTaggedSize);
1326 int size_in_tagged = source_.GetUint30();
1327 if (v8_flags.trace_deserialization) {
1328 PrintF("%*sVariableRawData [%u] :", depth_, "", size_in_tagged);
1329 for (int i = 0; i < size_in_tagged; ++i) {
1330 PrintF(" %0*" PRIxTAGGED, kTaggedSize / 2,
1331 reinterpret_cast<const Tagged_t*>(source_.data())[i]);
1332 }
1333 PrintF("\n");
1334 }
1335 // TODO(leszeks): Only copy slots when there are Smis in the serialized
1336 // data.
1337 source_.CopySlots(slot_accessor.slot().location(), size_in_tagged);
1338 return size_in_tagged;
1339}
1340
1341template <typename IsolateT>
1342template <typename SlotAccessor>
1344 SlotAccessor slot_accessor) {
1345 int repeats = VariableRepeatRootCount::Decode(source_.GetUint30());
1346 if (v8_flags.trace_deserialization) {
1347 PrintF("%*sVariableRepeat [%u] : ", depth_, "", repeats);
1348 }
1349 int ret = ReadRepeatedRoot(slot_accessor, repeats);
1350 if (v8_flags.trace_deserialization) {
1351 PrintF("\n");
1352 }
1353 return ret;
1354}
1355
1356template <typename IsolateT>
1357template <typename SlotAccessor>
1359 uint8_t data, SlotAccessor slot_accessor) {
1360 int byte_length = source_.GetUint32();
1361 if (v8_flags.trace_deserialization) {
1362 PrintF("%*sOffHeapBackingStore [%d]\n", depth_, "", byte_length);
1363 }
1364
1365 std::unique_ptr<BackingStore> backing_store;
1366 if (data == kOffHeapBackingStore) {
1367 backing_store = BackingStore::Allocate(main_thread_isolate(), byte_length,
1370 } else {
1371 int max_byte_length = source_.GetUint32();
1372 size_t page_size, initial_pages, max_pages;
1375 nullptr, byte_length, max_byte_length, kDontThrow, &page_size,
1376 &initial_pages, &max_pages);
1377 DCHECK(result.FromJust());
1378 USE(result);
1380 main_thread_isolate(), byte_length, max_byte_length, page_size,
1381 initial_pages, max_pages, WasmMemoryFlag::kNotWasm,
1383 }
1384 CHECK_NOT_NULL(backing_store);
1385 source_.CopyRaw(backing_store->buffer_start(), byte_length);
1386 backing_stores_.push_back(std::move(backing_store));
1387 return 0;
1388}
1389
1390template <typename IsolateT>
1391template <typename SlotAccessor>
1393 SlotAccessor slot_accessor) {
1394 DCHECK_IMPLIES(data == kSandboxedApiReference, V8_ENABLE_SANDBOX_BOOL);
1395 uint32_t reference_id = static_cast<uint32_t>(source_.GetUint30());
1396 Address address;
1397 if (main_thread_isolate()->api_external_references()) {
1398 DCHECK_WITH_MSG(reference_id < num_api_references_,
1399 "too few external references provided through the API");
1400 address = static_cast<Address>(
1401 main_thread_isolate()->api_external_references()[reference_id]);
1402 } else {
1403 address = reinterpret_cast<Address>(NoExternalReferencesCallback);
1404 }
1406 if (data == kSandboxedApiReference) {
1407 tag = ReadExternalPointerTag();
1408 }
1409 if (v8_flags.trace_deserialization) {
1410 PrintF("%*sApiReference [%" PRIxPTR ", %i]\n", depth_, "", address, tag);
1411 }
1412 return WriteExternalPointer(*slot_accessor.object(),
1413 slot_accessor.external_pointer_slot(tag), address,
1414 tag);
1415}
1416
1417template <typename IsolateT>
1418template <typename SlotAccessor>
1420 uint8_t data, SlotAccessor slot_accessor) {
1421 if (v8_flags.trace_deserialization) {
1422 PrintF("%*sClearedWeakReference\n", depth_, "");
1423 }
1424 return slot_accessor.Write(ClearedValue(isolate()), 0, SKIP_WRITE_BARRIER);
1425}
1426
1427template <typename IsolateT>
1428template <typename SlotAccessor>
1430 SlotAccessor slot_accessor) {
1431 if (v8_flags.trace_deserialization) {
1432 PrintF("%*sWeakPrefix\n", depth_, "");
1433 }
1434 // We shouldn't have two weak prefixes in a row.
1435 DCHECK(!next_reference_is_weak_);
1436 // We shouldn't have weak refs without a current object.
1437 DCHECK_NE(slot_accessor.object()->address(), kNullAddress);
1438 next_reference_is_weak_ = true;
1439 return 0;
1440}
1441
1442template <typename IsolateT>
1443template <typename SlotAccessor>
1445 uint8_t data, SlotAccessor slot_accessor) {
1446 if (v8_flags.trace_deserialization) {
1447 PrintF("%*sIndirectPointerPrefix\n", depth_, "");
1448 }
1449 // We shouldn't have two indirect pointer prefixes in a row.
1450 DCHECK(!next_reference_is_indirect_pointer_);
1451 // We shouldn't have a indirect pointer prefix without a current object.
1452 DCHECK_NE(slot_accessor.object()->address(), kNullAddress);
1453 next_reference_is_indirect_pointer_ = true;
1454 return 0;
1455}
1456
1457template <typename IsolateT>
1458template <typename SlotAccessor>
1460 uint8_t data, SlotAccessor slot_accessor) {
1461#ifdef V8_ENABLE_SANDBOX
1462 DCHECK_NE(slot_accessor.object()->address(), kNullAddress);
1463 DCHECK(IsExposedTrustedObject(*slot_accessor.object()));
1464 DCHECK_EQ(slot_accessor.offset(),
1465 ExposedTrustedObject::kSelfIndirectPointerOffset);
1466
1468 Cast<ExposedTrustedObject>(*slot_accessor.object());
1469 host->init_self_indirect_pointer(isolate());
1470
1471 return 1;
1472#else
1473 UNREACHABLE();
1474#endif // V8_ENABLE_SANDBOX
1475}
1476
1477template <typename IsolateT>
1478template <typename SlotAccessor>
1480 uint8_t data, SlotAccessor slot_accessor) {
1481#ifdef V8_ENABLE_LEAPTIERING
1482 DCHECK_NE(slot_accessor.object()->address(), kNullAddress);
1483 DirectHandle<HeapObject> host = slot_accessor.object();
1484
1485 uint32_t parameter_count = source_.GetUint30();
1487
1488 if (v8_flags.trace_deserialization) {
1489 PrintF("%*sAllocateJSDispatchEntry [%u]\n", depth_, "", parameter_count);
1490 }
1491
1492 DirectHandle<Code> code = Cast<Code>(ReadObject());
1493
1494 JSDispatchTable::Space* space =
1495 isolate()->GetJSDispatchTableSpaceFor(host->address());
1497 isolate()->factory()->NewJSDispatchHandle(parameter_count, code, space);
1498 js_dispatch_entries_.push_back(handle);
1499 host->Relaxed_WriteField<JSDispatchHandle::underlying_type>(
1500 slot_accessor.offset(), handle.value());
1502
1503 return 1;
1504#else
1505 UNREACHABLE();
1506#endif // V8_ENABLE_SANDBOX
1507}
1508
1509template <typename IsolateT>
1510template <typename SlotAccessor>
1512 SlotAccessor slot_accessor) {
1513#ifdef V8_ENABLE_LEAPTIERING
1514 DCHECK_NE(slot_accessor.object()->address(), kNullAddress);
1515 DirectHandle<HeapObject> host = slot_accessor.object();
1516 uint32_t entry_id = source_.GetUint30();
1517 DCHECK_LT(entry_id, js_dispatch_entries_.size());
1518
1519 if (v8_flags.trace_deserialization) {
1520 PrintF("%*sJSDispatchEntry [%u]\n", depth_, "", entry_id);
1521 }
1522
1523 JSDispatchHandle handle = js_dispatch_entries_[entry_id];
1525
1526 host->Relaxed_WriteField<JSDispatchHandle::underlying_type>(
1527 slot_accessor.offset(), handle.value());
1529
1530 return 1;
1531#else
1532 UNREACHABLE();
1533#endif // V8_ENABLE_SANDBOX
1534}
1535
1536template <typename IsolateT>
1537template <typename SlotAccessor>
1539 uint8_t data, SlotAccessor slot_accessor) {
1540 // We shouldn't have two protected pointer prefixes in a row.
1541 DCHECK(!next_reference_is_protected_pointer);
1542 // We shouldn't have a protected pointer prefix without a current object.
1543 DCHECK_NE(slot_accessor.object()->address(), kNullAddress);
1544 next_reference_is_protected_pointer = true;
1545 return 0;
1546}
1547
1548template <typename IsolateT>
1549template <typename SlotAccessor>
1551 SlotAccessor slot_accessor) {
1552 // First kRootArrayConstantsCount roots are guaranteed to be in
1553 // the old space.
1554 static_assert(static_cast<int>(RootIndex::kFirstImmortalImmovableRoot) == 0);
1555 static_assert(kRootArrayConstantsCount <=
1556 static_cast<int>(RootIndex::kLastImmortalImmovableRoot));
1557
1558 RootIndex root_index = RootArrayConstant::Decode(data);
1559 DirectHandle<HeapObject> heap_object =
1560 Cast<HeapObject>(isolate()->root_handle(root_index));
1561 if (v8_flags.trace_deserialization) {
1562 PrintF("%*sRootArrayConstants [%u] : %s\n", depth_, "",
1563 static_cast<int>(root_index), RootsTable::name(root_index));
1564 }
1565 return slot_accessor.Write(heap_object, HeapObjectReferenceType::STRONG, 0,
1567}
1568
1569template <typename IsolateT>
1570template <typename SlotAccessor>
1572 SlotAccessor slot_accessor) {
1573 int index = HotObject::Decode(data);
1574 DirectHandle<HeapObject> hot_object = hot_objects_.Get(index);
1575 if (v8_flags.trace_deserialization) {
1576 PrintF("%*sHotObject [%u] : ", depth_, "", index);
1577 ShortPrint(*hot_object);
1578 PrintF("\n");
1579 }
1580 return WriteHeapPointer(slot_accessor, hot_object,
1581 GetAndResetNextReferenceDescriptor());
1582}
1583
1584template <typename IsolateT>
1585template <typename SlotAccessor>
1587 SlotAccessor slot_accessor) {
1588 using TSlot = decltype(slot_accessor.slot());
1589
1590 // Deserialize raw data of fixed length from 1 to 32 times kTaggedSize.
1591 int size_in_tagged = FixedRawDataWithSize::Decode(data);
1592 static_assert(TSlot::kSlotDataSize == kTaggedSize ||
1593 TSlot::kSlotDataSize == 2 * kTaggedSize);
1594 int size_in_slots = size_in_tagged / (TSlot::kSlotDataSize / kTaggedSize);
1595 // kFixedRawData can have kTaggedSize != TSlot::kSlotDataSize when
1596 // serializing Smi roots in pointer-compressed builds. In this case, the
1597 // size in bytes is unconditionally the (full) slot size.
1598 DCHECK_IMPLIES(kTaggedSize != TSlot::kSlotDataSize, size_in_slots == 1);
1599 if (v8_flags.trace_deserialization) {
1600 PrintF("%*sFixedRawData [%u] :", depth_, "", size_in_tagged);
1601 for (int i = 0; i < size_in_tagged; ++i) {
1602 PrintF(" %0*" PRIxTAGGED, kTaggedSize / 2,
1603 reinterpret_cast<const Tagged_t*>(source_.data())[i]);
1604 }
1605 PrintF("\n");
1606 }
1607 // TODO(leszeks): Only copy slots when there are Smis in the serialized
1608 // data.
1609 source_.CopySlots(slot_accessor.slot().location(), size_in_slots);
1610 return size_in_slots;
1611}
1612
1613template <typename IsolateT>
1614template <typename SlotAccessor>
1616 SlotAccessor slot_accessor) {
1617 int repeats = FixedRepeatRootWithCount::Decode(data);
1618 if (v8_flags.trace_deserialization) {
1619 PrintF("%*sFixedRepeat [%u] : ", depth_, "", repeats);
1620 }
1621 int ret = ReadRepeatedRoot(slot_accessor, repeats);
1622 if (v8_flags.trace_deserialization) {
1623 PrintF("\n");
1624 }
1625 return ret;
1626}
1627
1628#undef CASE_RANGE_ALL_SPACES
1629#undef CASE_RANGE
1630#undef CASE_R32
1631#undef CASE_R16
1632#undef CASE_R8
1633#undef CASE_R4
1634#undef CASE_R3
1635#undef CASE_R2
1636#undef CASE_R1
1637
1638template <typename IsolateT>
1640 uint32_t reference_id = static_cast<uint32_t>(source_.GetUint30());
1641 return main_thread_isolate()->external_reference_table()->address(
1642 reference_id);
1643}
1644
1645template <typename IsolateT>
1649
1650template <typename IsolateT>
1652 AllocationType allocation, int size, AllocationAlignment alignment) {
1653#ifdef DEBUG
1654 if (!previous_allocation_obj_.is_null()) {
1655 // Make sure that the previous object is initialized sufficiently to
1656 // be iterated over by the GC.
1657 int object_size = previous_allocation_obj_->Size(isolate_);
1658 DCHECK_LE(object_size, previous_allocation_size_);
1659 }
1660#endif
1661
1662 Tagged<HeapObject> obj =
1663 HeapObject::FromAddress(isolate()->heap()->AllocateRawOrFail(
1664 size, allocation, AllocationOrigin::kRuntime, alignment));
1665
1666#ifdef DEBUG
1667 previous_allocation_obj_ = direct_handle(obj, isolate());
1668 previous_allocation_size_ = size;
1669#endif
1670
1671 return obj;
1672}
1673
1677
1678} // namespace v8::internal
1679
Isolate * isolate_
int16_t parameter_count
Definition builtins.cc:67
interpreter::Bytecode bytecode
Definition builtins.cc:43
UnderlyingType underlying_type
static std::unique_ptr< BackingStore > TryAllocateAndPartiallyCommitMemory(Isolate *isolate, size_t byte_length, size_t max_byte_length, size_t page_size, size_t initial_pages, size_t maximum_pages, WasmMemoryFlag wasm_memory, SharedFlag shared, bool has_guard_regions=false)
static std::unique_ptr< BackingStore > Allocate(Isolate *isolate, size_t byte_length, SharedFlag shared, InitializedFlag initialized)
int ReadRootArray(uint8_t data, SlotAccessor slot_accessor)
int ReadApiReference(uint8_t data, SlotAccessor slot_accessor)
int ReadReadOnlyHeapRef(uint8_t data, SlotAccessor slot_accessor)
int ReadSharedHeapObjectCache(uint8_t data, SlotAccessor slot_accessor)
DirectHandle< HeapObject > ReadObject()
void PostProcessNewObject(DirectHandle< Map > map, Handle< HeapObject > obj, SnapshotSpace space)
void LogScriptEvents(Tagged< Script > script)
int ReadExternalReference(uint8_t data, SlotAccessor slot_accessor)
void Synchronize(VisitorSynchronization::SyncTag tag) override
int ReadSingleBytecodeData(uint8_t data, SlotAccessor slot_accessor)
void PostProcessNewJSReceiver(Tagged< Map > map, DirectHandle< JSReceiver > obj, InstanceType instance_type, SnapshotSpace space)
int ReadRegisterPendingForwardRef(uint8_t data, SlotAccessor slot_accessor)
int ReadWeakPrefix(uint8_t data, SlotAccessor slot_accessor)
void VisitRootPointers(Root root, const char *description, FullObjectSlot start, FullObjectSlot end) override
int ReadResolvePendingForwardRef(uint8_t data, SlotAccessor slot_accessor)
int ReadClearedWeakReference(uint8_t data, SlotAccessor slot_accessor)
int ReadBackref(uint8_t data, SlotAccessor slot_accessor)
int ReadAllocateJSDispatchEntry(uint8_t data, SlotAccessor slot_accessor)
int ReadRepeatedRoot(SlotGetter slot_getter, int repeat_count)
std::vector< std::shared_ptr< BackingStore > > backing_stores_
int ReadOffHeapBackingStore(uint8_t data, SlotAccessor slot_accessor)
int ReadInitializeSelfIndirectPointer(uint8_t data, SlotAccessor slot_accessor)
int ReadStartupObjectCache(uint8_t data, SlotAccessor slot_accessor)
int ReadFixedRepeatRoot(uint8_t data, SlotAccessor slot_accessor)
Handle< HeapObject > ReadMetaMap(SnapshotSpace space)
int ReadProtectedPointerPrefix(uint8_t data, SlotAccessor slot_accessor)
int ReadFixedRawData(uint8_t data, SlotAccessor slot_accessor)
int ReadNewObject(uint8_t data, SlotAccessor slot_accessor)
int ReadJSDispatchEntry(uint8_t data, SlotAccessor slot_accessor)
ExternalPointerTag ReadExternalPointerTag()
int WriteExternalPointer(Tagged< HeapObject > host, ExternalPointerSlot dest, Address value, ExternalPointerTag tag)
std::vector< JSDispatchHandle > js_dispatch_entries_
int ReadVariableRawData(uint8_t data, SlotAccessor slot_accessor)
int ReadNewMetaMap(uint8_t data, SlotAccessor slot_accessor)
int ReadHotObject(uint8_t data, SlotAccessor slot_accessor)
int ReadAttachedReference(uint8_t data, SlotAccessor slot_accessor)
std::vector< IndirectHandle< HeapObject > > back_refs_
int ReadVariableRepeatRoot(uint8_t data, SlotAccessor slot_accessor)
Tagged< HeapObject > Allocate(AllocationType allocation, int size, AllocationAlignment alignment)
int WriteHeapPointer(SlotAccessor slot_accessor, Tagged< HeapObject > heap_object, ReferenceDescriptor descr, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Deserializer(const Deserializer &)=delete
int ReadRawExternalReference(uint8_t data, SlotAccessor slot_accessor)
ReferenceDescriptor GetAndResetNextReferenceDescriptor()
int ReadIndirectPointerPrefix(uint8_t data, SlotAccessor slot_accessor)
void ReadData(Handle< HeapObject > object, int start_slot_index, int end_slot_index)
int ReadRootArrayConstants(uint8_t data, SlotAccessor slot_accessor)
Handle< HeapObject > GetBackReferencedObject()
V8_INLINE Address address() const
Definition handles.h:695
static EmbeddedData FromBlob()
AllocationType RefineAllocationTypeForInPlaceInternalizableString(AllocationType allocation, Tagged< Map > string_map)
void Relaxed_Store(Tagged< MaybeObject > value) const
Definition slots-inl.h:160
void store(Tagged< Object > value) const
Definition slots-inl.h:54
void PatchValue(Tagged< T > new_value)
Definition handles.h:213
static constexpr int kHeaderSize
static Tagged< HeapObject > FromAddress(Address address)
static AllocationAlignment RequiredAlignment(Tagged< Map > map)
void WeakenDescriptorArrays(GlobalHandleVector< DescriptorArray > strong_descriptor_arrays)
Definition heap.cc:5934
ReadOnlySpace * read_only_space() const
Definition heap.h:738
void store(Tagged< ExposedTrustedObject > value) const
Definition slots-inl.h:353
void RegisterDeserializerFinished()
Definition isolate.h:926
StringTable * string_table() const
Definition isolate.h:781
v8::internal::Factory * factory()
Definition isolate.h:1527
static Maybe< bool > GetResizableBackingStorePageConfiguration(Isolate *isolate, size_t byte_length, size_t max_byte_length, ShouldThrow should_throw, size_t *page_size, size_t *initial_pages, size_t *max_pages)
static constexpr int kEmptyHashField
Definition name.h:133
const std::vector< ReadOnlyPageMetadata * > & pages() const
static const char * name(RootIndex root_index)
Definition roots.h:600
static constexpr bool IsReadOnly(RootIndex root_index)
Definition roots.h:623
static constexpr uint32_t kMagicNumber
static bool ShouldBeInSharedHeapObjectCache(Tagged< HeapObject > obj)
int WriteProtectedPointerTo(Tagged< TrustedObject > value, WriteBarrierMode mode)
int Write(DirectHandle< HeapObject > value, HeapObjectReferenceType ref_type, int slot_offset, WriteBarrierMode mode)
Handle< HeapObject > object() const
DirectHandle< HeapObject > * handle_
int WriteIndirectPointerTo(Tagged< HeapObject > value, WriteBarrierMode mode)
int Write(Tagged< HeapObject > value, HeapObjectReferenceType ref_type, int slot_offset, WriteBarrierMode mode)
int Write(Tagged< MaybeObject > value, int slot_offset, WriteBarrierMode mode)
SlotAccessorForHandle(DirectHandle< HeapObject > *handle, IsolateT *isolate)
ExternalPointerSlot external_pointer_slot(ExternalPointerTag tag) const
Handle< HeapObject > object() const
static SlotAccessorForHeapObject ForSlotIndex(Handle< HeapObject > object, int index)
int Write(Tagged< MaybeObject > value, int slot_offset, WriteBarrierMode mode)
SlotAccessorForHeapObject(Handle< HeapObject > object, int offset)
int Write(DirectHandle< HeapObject > value, HeapObjectReferenceType ref_type, int slot_offset, WriteBarrierMode mode)
int WriteIndirectPointerTo(Tagged< HeapObject > value, WriteBarrierMode mode)
ExternalPointerSlot external_pointer_slot(ExternalPointerTag tag) const
int WriteProtectedPointerTo(Tagged< TrustedObject > value, WriteBarrierMode mode)
static SlotAccessorForHeapObject ForSlotOffset(Handle< HeapObject > object, int offset)
const Handle< HeapObject > object_
int Write(Tagged< HeapObject > value, HeapObjectReferenceType ref_type, int slot_offset, WriteBarrierMode mode)
int WriteIndirectPointerTo(Tagged< HeapObject > value, WriteBarrierMode mode)
FullMaybeObjectSlot slot() const
SlotAccessorForRootSlots(FullMaybeObjectSlot slot)
int Write(Tagged< MaybeObject > value, int slot_offset, WriteBarrierMode mode)
int Write(Tagged< HeapObject > value, HeapObjectReferenceType ref_type, int slot_offset, WriteBarrierMode mode)
int WriteProtectedPointerTo(Tagged< TrustedObject > value, WriteBarrierMode mode)
int Write(DirectHandle< HeapObject > value, HeapObjectReferenceType ref_type, int slot_offset, WriteBarrierMode mode)
ExternalPointerSlot external_pointer_slot(ExternalPointerTag tag) const
Handle< HeapObject > object() const
static constexpr Tagged< Smi > uninitialized_deserialization_value()
Definition smi.h:114
bool IsMatch(IsolateT *isolate, Tagged< String > string)
StringTableInsertionKey(Isolate *isolate, DirectHandle< String > string, DeserializingUserCodeOption deserializing_user_code)
DirectHandle< String > LookupKey(IsolateT *isolate, StringTableKey *key)
static bool IsInPlaceInternalizable(Tagged< String > string)
static void ForProtectedPointer(Tagged< TrustedObject > host, ProtectedPointerSlot slot, Tagged< TrustedObject > value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static void ForIndirectPointer(Tagged< HeapObject > host, IndirectPointerSlot slot, Tagged< HeapObject > value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static void ForValue(Tagged< HeapObject > host, MaybeObjectSlot slot, Tagged< T > value, WriteBarrierMode mode)
#define V8_ENABLE_SANDBOX_BOOL
Definition globals.h:160
DirectHandle< String > string_
Handle< String > source_
Definition compiler.cc:3791
int start
int end
#define PRIxTAGGED
#define CASE_RANGE_ALL_SPACES(bytecode)
#define CASE_RANGE(byte_code, num_bytecodes)
#define EXPORT_TEMPLATE_DEFINE(export)
Isolate * isolate
std::map< const std::string, const std::string > map
ZoneVector< RpoNumber > & result
#define LOG(isolate, Call)
Definition log.h:78
InstructionOperand source
V8_INLINE constexpr bool IsExternalString(InstanceType instance_type)
V8_INLINE constexpr bool IsInternalizedString(InstanceType instance_type)
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
static V8_INLINE bool HasWeakHeapObjectTag(const Tagged< Object > value)
Definition objects.h:653
constexpr int kTaggedSize
Definition globals.h:542
PerThreadAssertScopeDebugOnly< false, SAFEPOINTS_ASSERT, HEAP_ALLOCATION_ASSERT > DisallowGarbageCollection
@ SKIP_WRITE_BARRIER
Definition objects.h:52
@ UPDATE_WRITE_BARRIER
Definition objects.h:55
void PrintF(const char *format,...)
Definition utils.cc:39
Tagged(T object) -> Tagged< T >
constexpr int kMaxUInt16
Definition globals.h:382
void MemsetTagged(Tagged_t *start, Tagged< MaybeObject > value, size_t counter)
Definition slots-inl.h:486
Address Tagged_t
Definition globals.h:547
V8_INLINE DirectHandle< T > direct_handle(Tagged< T > object, Isolate *isolate)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in name
Definition flags.cc:2086
Handle< To > UncheckedCast(Handle< From > value)
Definition handles-inl.h:55
constexpr int kSystemPointerSize
Definition globals.h:410
constexpr ExternalPointerHandle kNullExternalPointerHandle
constexpr JSDispatchHandle kNullJSDispatchHandle(0)
V8_INLINE void * EmptyBackingStoreBuffer()
Definition sandbox.h:345
void ShortPrint(Tagged< Object > obj, FILE *out)
Definition objects.cc:1865
constexpr int kExternalPointerSlotSize
Definition globals.h:613
Tagged< ClearedWeakValue > ClearedValue(PtrComprCageBase cage_base)
@ kExternalPointerNullTag
V8_EXPORT_PRIVATE FlagValues v8_flags
static V8_INLINE constexpr bool IsManagedExternalPointerType(ExternalPointerTagRange tag_range)
uint32_t ExternalPointerHandle
return value
Definition map-inl.h:893
static constexpr Address kNullAddress
Definition v8-internal.h:53
V8_INLINE IndirectPointerTag IndirectPointerTagFromInstanceType(InstanceType instance_type)
V8_INLINE constexpr bool FastInReadOnlySpaceOrSmallSmi(Tagged_t obj)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
#define JS_DISPATCH_HANDLE_WRITE_BARRIER(object, handle)
#define UNUSED_SERIALIZER_BYTE_CODES(V)
#define FATAL(...)
Definition logging.h:47
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_WITH_MSG(condition, msg)
Definition logging.h:182
#define CHECK_LE(lhs, rhs)
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define CHECK_NULL(val)
#define CHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define USE(...)
Definition macros.h:293
#define V8_EXPORT_PRIVATE
Definition macros.h:460
#define V8_STATIC_ROOTS_BOOL
Definition v8config.h:1001
#define V8_LIKELY(condition)
Definition v8config.h:661
#define ROOT_ID_LIST(V)
Definition visitors.h:20