v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
read-only-deserializer.cc
Go to the documentation of this file.
1// Copyright 2018 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
8#include "src/heap/heap-inl.h"
12#include "src/objects/slots.h"
16
17namespace v8 {
18namespace internal {
19
21 public:
22 static void Deserialize(Isolate* isolate, SnapshotByteSource* source) {
24 }
25
26 private:
28
30 : source_(source), isolate_(isolate) {}
31
33 while (true) {
34 int bytecode_as_int = source_->Get();
35 DCHECK_LT(bytecode_as_int, ro::kNumberOfBytecodes);
36 switch (static_cast<Bytecode>(bytecode_as_int)) {
37 case Bytecode::kAllocatePage:
38 AllocatePage(false);
39 break;
40 case Bytecode::kAllocatePageAt:
41 AllocatePage(true);
42 break;
43 case Bytecode::kSegment:
45 break;
46 case Bytecode::kRelocateSegment:
47 UNREACHABLE(); // Handled together with kSegment.
48 case Bytecode::kReadOnlyRootsTable:
50 break;
51 case Bytecode::kFinalizeReadOnlySpace:
53 return;
54 }
55 }
56 }
57
58 void AllocatePage(bool fixed_offset) {
59 CHECK_EQ(V8_STATIC_ROOTS_BOOL, fixed_offset);
60 size_t expected_page_index = static_cast<size_t>(source_->GetUint30());
61 size_t actual_page_index = static_cast<size_t>(-1);
62 size_t area_size_in_bytes = static_cast<size_t>(source_->GetUint30());
63 if (fixed_offset) {
64#ifdef V8_COMPRESS_POINTERS
65 uint32_t compressed_page_addr = source_->GetUint32();
66 Address pos = isolate_->cage_base() + compressed_page_addr;
67 actual_page_index = ro_space()->AllocateNextPageAt(pos);
68#else
70#endif // V8_COMPRESS_POINTERS
71 } else {
72 actual_page_index = ro_space()->AllocateNextPage();
73 }
74 CHECK_EQ(actual_page_index, expected_page_index);
76 area_size_in_bytes);
77 }
78
80 uint32_t page_index = source_->GetUint30();
81 ReadOnlyPageMetadata* page = PageAt(page_index);
82
83 // Copy over raw contents.
84 Address start = page->area_start() + source_->GetUint30();
85 int size_in_bytes = source_->GetUint30();
86 CHECK_LE(start + size_in_bytes, page->area_end());
87 source_->CopyRaw(reinterpret_cast<void*>(start), size_in_bytes);
88
90 uint8_t relocate_marker_bytecode = source_->Get();
91 CHECK_EQ(relocate_marker_bytecode, Bytecode::kRelocateSegment);
92 int tagged_slots_size_in_bits = size_in_bytes / kTaggedSize;
93 // The const_cast is unfortunate, but we promise not to mutate data.
94 uint8_t* data =
95 const_cast<uint8_t*>(source_->data() + source_->position());
96 ro::BitSet tagged_slots(data, tagged_slots_size_in_bits);
98 source_->Advance(static_cast<int>(tagged_slots.size_in_bytes()));
99 }
100 }
101
103 ReadOnlyPageMetadata* page = PageAt(encoded.page_index);
104 return page->OffsetToAddress(encoded.offset * kTaggedSize);
105 }
106
108 const ro::BitSet& tagged_slots) {
110 for (size_t i = 0; i < tagged_slots.size_in_bits(); i++) {
111 // TODO(jgruber): Depending on sparseness, different iteration methods
112 // could be more efficient.
113 if (!tagged_slots.contains(static_cast<int>(i))) continue;
114 Address slot_addr = segment_start + i * kTaggedSize;
115 Address obj_addr = Decode(ro::EncodedTagged::FromAddress(slot_addr));
116 Address obj_ptr = obj_addr + kHeapObjectTag;
117
118 Tagged_t* dst = reinterpret_cast<Tagged_t*>(slot_addr);
121 : static_cast<Tagged_t>(obj_ptr);
122 }
123 }
124
125 ReadOnlyPageMetadata* PageAt(size_t index) const {
126 DCHECK_LT(index, ro_space()->pages().size());
127 return ro_space()->pages()[index];
128 }
129
131 ReadOnlyRoots roots(isolate_);
134 } else {
135 for (size_t i = 0; i < ReadOnlyRoots::kEntriesCount; i++) {
136 uint32_t encoded_as_int = source_->GetUint32();
137 Address rudolf = Decode(ro::EncodedTagged::FromUint32(encoded_as_int));
138 roots.read_only_roots_[i] = rudolf + kHeapObjectTag;
139 }
140 }
141 }
142
146
149};
150
152 const SnapshotData* data,
153 bool can_rehash)
154 : Deserializer(isolate, data->Payload(), data->GetMagicNumber(), false,
155 can_rehash) {}
156
158 base::ElapsedTimer timer;
159 if (V8_UNLIKELY(v8_flags.profile_deserialization)) timer.Start();
160 NestedTimedHistogramScope histogram_timer(
161 isolate()->counters()->snapshot_deserialize_rospace());
162 HandleScope scope(isolate());
163
165 ReadOnlyHeap* ro_heap = isolate()->read_only_heap();
168
169 ReadOnlyRoots roots(isolate());
171#ifdef DEBUG
172 roots.VerifyTypes();
173 roots.VerifyNameForProtectors();
174#endif
175
176 if (should_rehash()) {
178 Rehash();
179 }
180
181 if (V8_UNLIKELY(v8_flags.profile_deserialization)) {
182 // ATTENTION: The Memory.json benchmark greps for this exact output. Do not
183 // change it without also updating Memory.json.
184 const int bytes = source()->length();
185 const double ms = timer.Elapsed().InMillisecondsF();
186 PrintF("[Deserializing read-only space (%d bytes) took %0.3f ms]\n", bytes,
187 ms);
188 }
189}
190
192 // The following check will trigger if a function or object template with
193 // references to native functions have been deserialized from snapshot, but
194 // no actual external references were provided when the isolate was created.
195 FATAL("No external references provided via API");
196}
197
199 public:
200 explicit ObjectPostProcessor(Isolate* isolate)
201 : isolate_(isolate), embedded_data_(EmbeddedData::FromBlob(isolate_)) {}
202
203 void Finalize() {
204#ifdef V8_ENABLE_SANDBOX
205 std::vector<ReadOnlyArtifacts::ExternalPointerRegistryEntry> registry;
206 registry.reserve(external_pointer_slots_.size());
207 for (auto& slot : external_pointer_slots_) {
208 registry.emplace_back(slot.Relaxed_LoadHandle(), slot.load(isolate_),
209 slot.exact_tag());
210 }
211
213 std::move(registry));
214#endif // V8_ENABLE_SANDBOX
215 }
216#define POST_PROCESS_TYPE_LIST(V) \
217 V(AccessorInfo) \
218 V(InterceptorInfo) \
219 V(JSExternalObject) \
220 V(FunctionTemplateInfo) \
221 V(Code) \
222 V(SharedFunctionInfo)
223
225 InstanceType instance_type) {
226 DCHECK_EQ(o->map(isolate_)->instance_type(), instance_type);
227#define V(TYPE) \
228 if (InstanceTypeChecker::Is##TYPE(instance_type)) { \
229 return PostProcess##TYPE(Cast<TYPE>(o)); \
230 }
232#undef V
233 // If we reach here, no postprocessing is needed for this object.
234 }
235#undef POST_PROCESS_TYPE_LIST
236
237 private:
238 Address GetAnyExternalReferenceAt(int index, bool is_api_reference) const {
239 if (is_api_reference) {
240 const intptr_t* refs = isolate_->api_external_references();
241 Address address =
242 refs == nullptr
243 ? reinterpret_cast<Address>(NoExternalReferencesCallback)
244 : static_cast<Address>(refs[index]);
245 DCHECK_NE(address, kNullAddress);
246 return address;
247 }
248 // Note we allow `address` to be kNullAddress since some of our tests
249 // rely on this (e.g. when testing an incompletely initialized ER table).
251 }
252
254 ExternalPointerSlot slot) {
255 // Constructing no_gc here is not the intended use pattern (instead we
256 // should pass it along the entire callchain); but there's little point of
257 // doing that here - all of the code in this file relies on GC being
258 // disabled, and that's guarded at entry points.
262 Address slot_value =
263 GetAnyExternalReferenceAt(encoded.index, encoded.is_api_reference);
264 DCHECK(slot.ExactTagIsKnown());
265 slot.init(isolate_, host, slot_value, slot.exact_tag());
266#ifdef V8_ENABLE_SANDBOX
267 // Register these slots during deserialization s.t. later isolates (which
268 // share the RO space we are currently deserializing) can properly
269 // initialize their external pointer table RO space. Note that slot values
270 // are only fully finalized at the end of deserialization, thus we only
271 // register the slot itself now and read the handle/value in Finalize.
272 external_pointer_slots_.emplace_back(slot);
273#endif // V8_ENABLE_SANDBOX
274 }
276 ExternalPointerSlot slot) {
277 // Constructing no_gc here is not the intended use pattern (instead we
278 // should pass it along the entire callchain); but there's little point of
279 // doing that here - all of the code in this file relies on GC being
280 // disabled, and that's guarded at entry points.
284 Address slot_value =
285 GetAnyExternalReferenceAt(encoded.index, encoded.is_api_reference);
286 DCHECK(slot.ExactTagIsKnown());
287 if (slot_value == kNullAddress) {
289 } else {
290 slot.init(isolate_, host, slot_value, slot.exact_tag());
291#ifdef V8_ENABLE_SANDBOX
292 // Register these slots during deserialization s.t. later isolates (which
293 // share the RO space we are currently deserializing) can properly
294 // initialize their external pointer table RO space. Note that slot values
295 // are only fully finalized at the end of deserialization, thus we only
296 // register the slot itself now and read the handle/value in Finalize.
297 external_pointer_slots_.emplace_back(slot);
298#endif // V8_ENABLE_SANDBOX
299 }
300 }
303 o, o->RawExternalPointerField(AccessorInfo::kSetterOffset,
305 DecodeExternalPointerSlot(o, o->RawExternalPointerField(
306 AccessorInfo::kMaybeRedirectedGetterOffset,
308 if (USE_SIMULATOR_BOOL) o->init_getter_redirection(isolate_);
309 }
311 const bool is_named = o->is_named();
312
313#define PROCESS_FIELD(Name, name) \
314 DecodeLazilyInitializedExternalPointerSlot( \
315 o, o->RawExternalPointerField( \
316 InterceptorInfo::k##Name##Offset, \
317 is_named ? kApiNamedProperty##Name##CallbackTag \
318 : kApiIndexedProperty##Name##CallbackTag));
319
321#undef PROCESS_FIELD
322 }
325 o, o->RawExternalPointerField(JSExternalObject::kValueOffset,
327 }
330 o, o->RawExternalPointerField(
331 FunctionTemplateInfo::kMaybeRedirectedCallbackOffset,
333 if (USE_SIMULATOR_BOOL) o->init_callback_redirection(isolate_);
334 }
336 o->init_self_indirect_pointer(isolate_);
337 o->wrapper()->set_code(o);
338 // RO space only contains builtin Code objects which don't have an
339 // attached InstructionStream.
340 DCHECK(o->is_builtin());
341 DCHECK(!o->has_instruction_stream());
342 o->SetInstructionStartForOffHeapBuiltin(
343 isolate_,
344 EmbeddedData::FromBlob(isolate_).InstructionStartOf(o->builtin_id()));
345 }
347 // Reset the id to avoid collisions - it must be unique in this isolate.
348 o->set_unique_id(isolate_->GetAndIncNextUniqueSfiId());
349 }
350
353
354#ifdef V8_ENABLE_SANDBOX
355 std::vector<ExternalPointerSlot> external_pointer_slots_;
356#endif // V8_ENABLE_SANDBOX
357};
358
360 // Since we are not deserializing individual objects we need to scan the
361 // heap and search for objects that need post-processing.
362 //
363 // See also Deserializer<IsolateT>::PostProcessNewObject.
364 PtrComprCageBase cage_base(isolate());
365#ifdef V8_COMPRESS_POINTERS
366 ExternalPointerTable::UnsealReadOnlySegmentScope unseal_scope(
367 &isolate()->external_pointer_table());
368#endif // V8_COMPRESS_POINTERS
369 ObjectPostProcessor post_processor(isolate());
370 ReadOnlyHeapObjectIterator it(isolate()->read_only_heap());
371 for (Tagged<HeapObject> o = it.Next(); !o.is_null(); o = it.Next()) {
372 const InstanceType instance_type = o->map(cage_base)->instance_type();
373 if (should_rehash()) {
374 if (InstanceTypeChecker::IsString(instance_type)) {
376 str->set_raw_hash_field(Name::kEmptyHashField);
378 } else if (o->NeedsRehashing(instance_type)) {
380 }
381 }
382
383 post_processor.PostProcessIfNeeded(o, instance_type);
384 }
385 post_processor.Finalize();
386}
387
388} // namespace internal
389} // namespace v8
#define INTERCEPTOR_INFO_CALLBACK_LIST(V)
SourcePosition pos
void PushObjectToRehash(DirectHandle< HeapObject > object)
static EmbeddedData FromBlob()
void init(IsolateForSandbox isolate, Tagged< HeapObject > host, Address value, ExternalPointerTag tag)
Definition slots-inl.h:205
uint32_t GetContentAsIndexAfterDeserialization(const DisallowGarbageCollection &no_gc)
Definition slots-inl.h:293
ExternalPointerTag exact_tag() const
Definition slots.h:419
void InitializeHashSeed()
Definition heap.cc:5889
ReadOnlyHeap * read_only_heap() const
Definition isolate.h:1201
ReadOnlyArtifacts * read_only_artifacts() const
Definition isolate.h:774
ExternalReferenceTable * external_reference_table_unsafe()
Definition isolate.h:1278
Address cage_base() const
Definition isolate.h:1213
uint32_t GetAndIncNextUniqueSfiId()
Definition isolate.h:1823
static constexpr int kEmptyHashField
Definition name.h:133
void PostProcessFunctionTemplateInfo(Tagged< FunctionTemplateInfo > o)
void PostProcessJSExternalObject(Tagged< JSExternalObject > o)
void PostProcessAccessorInfo(Tagged< AccessorInfo > o)
void PostProcessInterceptorInfo(Tagged< InterceptorInfo > o)
void PostProcessSharedFunctionInfo(Tagged< SharedFunctionInfo > o)
Address GetAnyExternalReferenceAt(int index, bool is_api_reference) const
void DecodeLazilyInitializedExternalPointerSlot(Tagged< HeapObject > host, ExternalPointerSlot slot)
void DecodeExternalPointerSlot(Tagged< HeapObject > host, ExternalPointerSlot slot)
V8_INLINE void PostProcessIfNeeded(Tagged< HeapObject > o, InstanceType instance_type)
void set_external_pointer_registry(std::vector< ExternalPointerRegistryEntry > &&registry)
ReadOnlyDeserializer(Isolate *isolate, const SnapshotData *data, bool can_rehash)
void DecodeTaggedSlots(Address segment_start, const ro::BitSet &tagged_slots)
Address Decode(ro::EncodedTagged encoded) const
ReadOnlyHeapImageDeserializer(Isolate *isolate, SnapshotByteSource *source)
ReadOnlyPageMetadata * PageAt(size_t index) const
static void Deserialize(Isolate *isolate, SnapshotByteSource *source)
ReadOnlySpace * read_only_space() const
void InitFromStaticRootsTable(Address cage_base)
Definition roots.cc:115
static constexpr size_t kEntriesCount
Definition roots.h:711
V8_INLINE void VerifyNameForProtectorsPages() const
Definition roots-inl.h:136
size_t AllocateNextPageAt(Address pos)
void InitializePageForDeserialization(ReadOnlyPageMetadata *page, size_t area_size_in_bytes)
const std::vector< ReadOnlyPageMetadata * > & pages() const
void CopyRaw(void *to, int number_of_bytes)
V8_INLINE constexpr bool is_null() const
Definition tagged.h:502
static V8_INLINE Tagged_t CompressObject(Address tagged)
#define COMPRESS_POINTERS_BOOL
Definition globals.h:99
#define USE_SIMULATOR_BOOL
Definition globals.h:73
int start
Isolate * isolate
static constexpr int kNumberOfBytecodes
constexpr int kTaggedSize
Definition globals.h:542
void PrintF(const char *format,...)
Definition utils.cc:39
Address Tagged_t
Definition globals.h:547
V8_INLINE DirectHandle< T > direct_handle(Tagged< T > object, Isolate *isolate)
const int kHeapObjectTag
Definition v8-internal.h:72
@ kExternalObjectValueTag
@ kFunctionTemplateInfoCallbackTag
V8_EXPORT_PRIVATE FlagValues v8_flags
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats TracingFlags::gc_stats track native contexts that are expected to be garbage collected verify heap pointers before and after GC memory reducer runs GC with ReduceMemoryFootprint flag Maximum number of memory reducer GCs scheduled Old gen GC speed is computed directly from gc tracer counters Perform compaction on full GCs based on V8 s default heuristics Perform compaction on every full GC Perform code space compaction when finalizing a full GC with stack Stress GC compaction to flush out bugs with moving objects flush of baseline code when it has not been executed recently Use time base code flushing instead of age Use a progress bar to scan large objects in increments when incremental marking is active force incremental marking for small heaps and run it more often force marking at random points between and force scavenge at random points between and reclaim otherwise unreachable unmodified wrapper objects when possible less compaction in non memory reducing mode use high priority threads for concurrent Marking Test mode only flag It allows an unit test to select evacuation candidates pages(requires --stress_compaction).") DEFINE_BOOL(cppheap_incremental_marking
static constexpr Address kNullAddress
Definition v8-internal.h:53
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
#define PROCESS_FIELD(Name, name)
#define POST_PROCESS_TYPE_LIST(V)
const Address segment_start
ro::BitSet tagged_slots
#define FATAL(...)
Definition logging.h:47
#define CHECK_LE(lhs, rhs)
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
static EncodedExternalReference FromUint32(uint32_t v)
static EncodedTagged FromAddress(Address address)
#define V8_STATIC_ROOTS_BOOL
Definition v8config.h:1001
#define V8_INLINE
Definition v8config.h:500
#define V8_UNLIKELY(condition)
Definition v8config.h:660