v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
read-only-promotion.cc
Go to the documentation of this file.
1// Copyright 2023 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <unordered_set>
8
13#include "src/heap/heap.h"
19#include "src/utils/ostreams.h"
20
21namespace v8 {
22namespace internal {
23namespace {
24
25// Convenience aliases:
26using HeapObjectSet = std::unordered_set<Tagged<HeapObject>, Object::Hasher,
27 Object::KeyEqualSafe>;
28using HeapObjectMap = std::unordered_map<Tagged<HeapObject>, Tagged<HeapObject>,
29 Object::Hasher, Object::KeyEqualSafe>;
30using HeapObjectList = std::vector<Tagged<HeapObject>>;
31
32bool Contains(const HeapObjectSet& s, Tagged<HeapObject> o) {
33 return s.count(o) != 0;
34}
35bool Contains(const HeapObjectMap& s, Tagged<HeapObject> o) {
36 return s.count(o) != 0;
37}
38
39class Committee final {
40 public:
41 static HeapObjectList DeterminePromotees(
42 Isolate* isolate, const DisallowGarbageCollection& no_gc,
43 const SafepointScope& safepoint_scope) {
44 return Committee(isolate).DeterminePromotees(safepoint_scope);
45 }
46
47 private:
48 explicit Committee(Isolate* isolate)
49 : isolate_(isolate), ref_encoder_(isolate) {}
50
51 const ExternalReferenceEncoder& ref_encoder() const { return ref_encoder_; }
52
53 HeapObjectList DeterminePromotees(const SafepointScope& safepoint_scope) {
54 DCHECK(promo_accepted_.empty());
55 DCHECK(promo_rejected_.empty());
56
57 // List of promotees as discovered in insertion order. Using the
58 // `HeapObjectIterator` will visit pages in insertion order from a GC
59 // perspective which is deterministic independent of the absolute pages
60 // provided by the OS allocators in case of 32-bit builds (that don't use
61 // cages).
62 //
63 // We keep a separate HeapObjectList as there's no standard-equivalent for a
64 // hash set that can maintain insertion order.
65 HeapObjectList promo_accepted_list;
66
67 // We assume that a full and precise GC has reclaimed all dead objects
68 // and therefore that no filtering of unreachable objects is required here.
69 HeapObjectIterator it(isolate_->heap(), safepoint_scope);
70 for (Tagged<HeapObject> o = it.Next(); !o.is_null(); o = it.Next()) {
72
73 // Note that cycles prevent us from promoting/rejecting each subgraph as
74 // we visit it, since locally we cannot determine whether the deferred
75 // decision on the 'cycle object' will be 'promote' or 'reject'. This
76 // could be solved if necessary (with more complex code), but for now
77 // there are no performance issues.
78 HeapObjectSet accepted_subgraph; // Either all are accepted or none.
79 HeapObjectList accepted_subgraph_list;
80 HeapObjectSet visited; // Cycle detection.
81 if (!EvaluateSubgraph(o, &accepted_subgraph, &visited,
82 &accepted_subgraph_list)) {
83 continue;
84 }
85 if (accepted_subgraph.empty()) {
86 continue;
87 }
88
89 if (V8_UNLIKELY(v8_flags.trace_read_only_promotion)) {
90 LogAcceptedPromotionSet(accepted_subgraph);
91 }
92 promo_accepted_.insert(accepted_subgraph.begin(),
93 accepted_subgraph.end());
94 promo_accepted_list.insert(promo_accepted_list.end(),
95 accepted_subgraph_list.begin(),
96 accepted_subgraph_list.end());
97 }
98
99 // Remove duplicates from the promo_accepted_list. Note we have to jump
100 // through these hoops in order to preserve deterministic ordering
101 // (otherwise simply using the promo_accepted_ set would be sufficient).
102 HeapObjectSet seen_promotees;
103 HeapObjectList promotees;
104 promotees.reserve(promo_accepted_list.size());
105 for (Tagged<HeapObject> o : promo_accepted_list) {
106 if (Contains(seen_promotees, o)) continue;
107 seen_promotees.insert(o);
108 promotees.push_back(o);
109 }
110 CHECK_EQ(promotees.size(), promo_accepted_.size());
111
112 return promotees;
113 }
114
115 // Returns `false` if the subgraph rooted at `o` is rejected.
116 // Returns `true` if it is accepted, or if we've reached a cycle and `o`
117 // will be processed further up the callchain.
118 bool EvaluateSubgraph(Tagged<HeapObject> o, HeapObjectSet* accepted_subgraph,
119 HeapObjectSet* visited, HeapObjectList* promotees) {
120 if (HeapLayout::InReadOnlySpace(o)) return true;
121 if (Contains(promo_rejected_, o)) return false;
122 if (Contains(promo_accepted_, o)) return true;
123 if (Contains(*visited, o)) return true;
124 visited->insert(o);
125 if (!IsPromoCandidate(this, isolate_, o)) {
126 const auto& [it, inserted] = promo_rejected_.insert(o);
127 if (V8_UNLIKELY(v8_flags.trace_read_only_promotion) && inserted) {
128 LogRejectedPromotionForFailedPredicate(o);
129 }
130 return false;
131 }
132 // Recurse into outgoing pointers.
133 CandidateVisitor v(this, accepted_subgraph, visited, promotees);
134 VisitObject(isolate_, o, &v);
135 if (!v.all_slots_are_promo_candidates()) {
136 const auto& [it, inserted] = promo_rejected_.insert(o);
137 if (V8_UNLIKELY(v8_flags.trace_read_only_promotion) && inserted) {
138 LogRejectedPromotionForInvalidSubgraph(o,
139 v.first_rejected_slot_offset());
140 }
141 if (Tagged<TemplateInfo> info; TryCast<TemplateInfo>(o, &info)) {
142 CHECK_WITH_MSG(!info->should_promote_to_read_only(),
143 "v8::Template was asked to be promoted to "
144 "read only space but it wasn't possible. "
145 "Use --trace-read-only-promotion for debugging.");
146 }
147 return false;
148 }
149
150 accepted_subgraph->insert(o);
151 promotees->push_back(o);
152 return true;
153 }
154
155#define PROMO_CANDIDATE_TYPE_LIST(V) \
156 V(AccessCheckInfo) \
157 V(AccessorInfo) \
158 V(Code) \
159 V(CodeWrapper) \
160 V(JSExternalObject) \
161 V(FunctionTemplateInfo) \
162 V(InterceptorInfo) \
163 V(ScopeInfo) \
164 V(SharedFunctionInfo) \
165 V(Symbol)
166 // TODO(jgruber): Don't forget to extend ReadOnlyPromotionImpl::Verify when
167 // adding new object types here.
168
169 static bool IsPromoCandidate(Committee* committee, Isolate* isolate,
171 const InstanceType itype = o->map(isolate)->instance_type();
172#define V(TYPE) \
173 if (InstanceTypeChecker::Is##TYPE(itype)) { \
174 return IsPromoCandidate##TYPE(committee, isolate, Cast<TYPE>(o)); \
175 /* NOLINTNEXTLINE(readability/braces) */ \
176 } else
178 /* if { ... } else */ {
179 return false;
180 }
181#undef V
182 UNREACHABLE();
183 }
184#undef PROMO_CANDIDATE_TYPE_LIST
185
186#define DEF_PROMO_CANDIDATE(Type) \
187 static bool IsPromoCandidate##Type(Committee* committee, Isolate* isolate, \
188 Tagged<Type> o) { \
189 return true; \
190 }
191
193 DEF_PROMO_CANDIDATE(AccessorInfo)
194
195 static bool IsPromoCandidateJSExternalObject(Committee* committee,
196 Isolate* isolate,
198 // Check if the external pointer value is serializable.
199 DCHECK(IsNull(o->map()->map()->native_context_or_null()));
200 Address address = reinterpret_cast<Address>(o->value());
201 auto maybe_index = committee->ref_encoder().TryEncode(address);
202 return !maybe_index.IsNothing();
203 }
204 static bool IsPromoCandidateFunctionTemplateInfo(
205 Committee* committee, Isolate* isolate, Tagged<FunctionTemplateInfo> o) {
206 // This flag is set by the embedder explicitly by calling
207 // v8::FunctionTemplate::SealAndPrepareForPromotionToReadOnly(..).
208 return o->should_promote_to_read_only();
209 }
210 static bool IsPromoCandidateCode(Committee* committee, Isolate* isolate,
211 Tagged<Code> o) {
212 return Builtins::kCodeObjectsAreInROSpace && o->is_builtin();
213 }
214 static bool IsPromoCandidateCodeWrapper(Committee* committee,
215 Isolate* isolate,
217 return IsPromoCandidateCode(committee, isolate, o->code(isolate));
218 }
219 DEF_PROMO_CANDIDATE(InterceptorInfo)
220 DEF_PROMO_CANDIDATE(ScopeInfo)
221 static bool IsPromoCandidateSharedFunctionInfo(Committee* committee,
222 Isolate* isolate,
224 // Only internal SFIs are guaranteed to remain immutable.
225 if (o->has_script(kAcquireLoad)) return false;
226 // kIllegal is used for js_global_object_function, which is created during
227 // bootstrapping but never rooted. We currently assumed that all objects in
228 // the snapshot are live. But RO space is 1) not GC'd and 2) serialized
229 // verbatim, preserving dead objects. As a workaround, exclude this builtin
230 // id from RO allocation.
231 // TODO(jgruber): A better solution. Remove the liveness assumption (see
232 // test-heap-profiler.cc)? Overwrite dead RO objects with fillers
233 // pre-serialization? Implement a RO GC pass pre-serialization?
234 if (o->HasBuiltinId() && o->builtin_id() != Builtin::kIllegal) {
235 return true;
236 }
237 // Api functions are good candidates for promotion.
238 if (o->IsApiFunction()) return true;
239 return false;
240 }
241 DEF_PROMO_CANDIDATE(Symbol)
242
243#undef DEF_PROMO_CANDIDATE
244
245 // Recurses into all tagged slots of an object and tracks whether predicates
246 // failed on any part of the subgraph.
247 class CandidateVisitor : public ObjectVisitor {
248 public:
249 CandidateVisitor(Committee* committee, HeapObjectSet* accepted_subgraph,
250 HeapObjectSet* visited, HeapObjectList* promotees)
251 : committee_(committee),
252 accepted_subgraph_(accepted_subgraph),
253 visited_(visited),
254 promotees_(promotees) {}
255
256 int first_rejected_slot_offset() const {
258 }
259 bool all_slots_are_promo_candidates() const {
260 return first_rejected_slot_offset_ == -1;
261 }
262
263 void VisitPointers(Tagged<HeapObject> host, MaybeObjectSlot start,
264 MaybeObjectSlot end) final {
265 if (!all_slots_are_promo_candidates()) return;
266 for (MaybeObjectSlot slot = start; slot < end; slot++) {
267 Tagged<MaybeObject> maybe_object = slot.load(committee_->isolate_);
268 Tagged<HeapObject> heap_object;
269 if (!maybe_object.GetHeapObject(&heap_object)) continue;
270 if (!committee_->EvaluateSubgraph(heap_object, accepted_subgraph_,
271 visited_, promotees_)) {
273 static_cast<int>(slot.address() - host.address());
274 DCHECK_GE(first_rejected_slot_offset_, 0);
275 return;
276 }
277 }
278 }
279 void VisitPointers(Tagged<HeapObject> host, ObjectSlot start,
280 ObjectSlot end) final {
281 VisitPointers(host, MaybeObjectSlot(start), MaybeObjectSlot(end));
282 }
283 void VisitInstructionStreamPointer(Tagged<Code> host,
284 InstructionStreamSlot slot) final {
285 DCHECK(host->is_builtin());
286 }
287 void VisitMapPointer(Tagged<HeapObject> host) final {
288 MaybeObjectSlot slot = host->RawMaybeWeakField(HeapObject::kMapOffset);
289 VisitPointers(host, slot, slot + 1);
290 }
291
292 private:
293 Committee* const committee_;
294 HeapObjectSet* const accepted_subgraph_;
295 HeapObjectSet* const visited_;
296 HeapObjectList* const promotees_;
298 };
299
300 static void LogAcceptedPromotionSet(const HeapObjectSet& os) {
301 std::cout << "ro-promotion: accepted set {";
302 for (Tagged<HeapObject> o : os) {
303 std::cout << reinterpret_cast<void*>(o.ptr()) << ", ";
304 }
305 std::cout << "}\n";
306 }
307
308 static void LogRejectedPromotionForFailedPredicate(Tagged<HeapObject> o) {
309 std::cout << "ro-promotion: rejected due to failed predicate "
310 << reinterpret_cast<void*>(o.ptr()) << " ("
311 << o->map()->instance_type() << ")"
312 << "\n";
313 }
314
315 void LogRejectedPromotionForInvalidSubgraph(Tagged<HeapObject> o,
316 int first_rejected_slot_offset) {
317 std::cout << "ro-promotion: rejected due to rejected subgraph "
318 << reinterpret_cast<void*>(o.ptr()) << " ("
319 << o->map()->instance_type() << ")"
320 << " at slot offset " << first_rejected_slot_offset << " ";
321
322 MaybeObjectSlot slot = o->RawMaybeWeakField(first_rejected_slot_offset);
323 Tagged<MaybeObject> maybe_object = slot.load(isolate_);
324 Tagged<HeapObject> heap_object;
325 if (maybe_object.GetHeapObject(&heap_object)) {
326 std::cout << reinterpret_cast<void*>(heap_object.ptr()) << " ("
327 << heap_object->map()->instance_type() << ")"
328 << "\n";
329 } else {
330 std::cout << "<cleared weak object>\n";
331 }
332 }
333
334 Isolate* const isolate_;
335 ExternalReferenceEncoder ref_encoder_;
336 HeapObjectSet promo_accepted_;
337 HeapObjectSet promo_rejected_;
338};
339
340class ReadOnlyPromotionImpl final : public AllStatic {
341 public:
342 static void CopyToReadOnlyHeap(
343 Isolate* isolate, const std::vector<Tagged<HeapObject>>& promotees,
344 HeapObjectMap* moves) {
345 ReadOnlySpace* rospace = isolate->heap()->read_only_space();
346 for (Tagged<HeapObject> src : promotees) {
347 const int size = src->Size(isolate);
349 rospace->AllocateRaw(size, kTaggedAligned).ToObjectChecked();
350 Heap::CopyBlock(dst.address(), src.address(), size);
351 moves->emplace(src, dst);
352
353 if (V8_UNLIKELY(v8_flags.trace_read_only_promotion_verbose)) {
354 LogPromotedObject(src, dst);
355 }
356 }
357 }
358
359 static void UpdatePointers(Isolate* isolate,
360 const SafepointScope& safepoint_scope,
361 const HeapObjectMap& moves) {
362 Heap* heap = isolate->heap();
363#ifdef V8_COMPRESS_POINTERS
364 ExternalPointerTable::UnsealReadOnlySegmentScope unseal_scope(
365 &isolate->external_pointer_table());
366#endif // V8_COMPRESS_POINTERS
367 UpdatePointersVisitor v(isolate, &moves);
368
369 // Iterate all roots.
370 EmbedderStackStateScope stack_scope(
372 StackState::kNoHeapPointers);
373 heap->IterateRoots(&v, base::EnumSet<SkipRoot>{});
374
375 // Iterate all objects on the mutable heap.
376 // We assume that a full and precise GC has reclaimed all dead objects
377 // and therefore that no filtering of unreachable objects is required here.
378 HeapObjectIterator it(heap, safepoint_scope);
379 for (Tagged<HeapObject> o = it.Next(); !o.is_null(); o = it.Next()) {
380 VisitObject(isolate, o, &v);
381 }
382
383 // Iterate all objects we just copied into RO space.
384 for (auto [src, dst] : moves) {
385 VisitObject(isolate, dst, &v);
386 }
387
388#ifdef V8_ENABLE_LEAPTIERING
389 // Iterate all entries in the JSDispatchTable as they could contain
390 // pointers to promoted Code objects.
391 JSDispatchTable* const jdt = IsolateGroup::current()->js_dispatch_table();
392 jdt->IterateActiveEntriesIn(heap->js_dispatch_table_space(),
394 Tagged<Code> old_code = jdt->GetCode(handle);
395 auto it = moves.find(old_code);
396 if (it == moves.end()) return;
397 Tagged<HeapObject> new_code = it->second;
398 CHECK(IsCode(new_code));
399 // TODO(saelo): is it worth logging something
400 // in this case?
401 jdt->SetCodeNoWriteBarrier(
402 handle, Cast<Code>(new_code));
403 });
404#endif // V8_ENABLE_LEAPTIERING
405 }
406
407 static void DeleteDeadObjects(Isolate* isolate,
408 const SafepointScope& safepoint_scope,
409 const HeapObjectMap& moves) {
410 // After moving a source object to a new destination, overwrite the source
411 // memory with a filler. This is needed for moved objects that are verified
412 // by the heap verifier to have a 1-1 relation with some other object (e.g.
413 // objects related to trusted space). The verifier won't compute liveness
414 // and instead just iterates linearly over pages. Without this change the
415 // verifier would fail on this now-dead object.
416 for (auto [src, dst] : moves) {
418 isolate->heap()->CreateFillerObjectAt(src.address(), src->Size(isolate));
419 }
420 }
421
422 static void Verify(Isolate* isolate, const SafepointScope& safepoint_scope) {
423#ifdef DEBUG
424 // Verify that certain objects were promoted as expected.
425 //
426 // Known objects.
427 Heap* heap = isolate->heap();
429 heap->promise_all_resolve_element_closure_shared_fun()));
430 CHECK(HeapLayout::InReadOnlySpace(heap->error_stack_getter_fun_template()));
431 CHECK(HeapLayout::InReadOnlySpace(heap->error_stack_setter_fun_template()));
432
433 // TODO(jgruber): Extend here with more objects as they are added to
434 // the promotion algorithm.
435
436 // Builtin Code objects.
438 Builtins* builtins = isolate->builtins();
439 for (int i = 0; i < Builtins::kBuiltinCount; i++) {
441 builtins->code(static_cast<Builtin>(i))));
442 }
443 }
444#endif // DEBUG
445 }
446
447 private:
448 class UpdatePointersVisitor final : public ObjectVisitor, public RootVisitor {
449 public:
450 UpdatePointersVisitor(Isolate* isolate, const HeapObjectMap* moves)
451 : isolate_(isolate), moves_(moves) {
452#ifdef V8_ENABLE_SANDBOX
453 for (auto [_src, dst] : *moves_) {
454 promoted_objects_.emplace(dst);
455 if (IsCode(dst)) {
456 PromoteCodePointerEntryFor(Cast<Code>(dst));
457 }
458 }
459#endif // V8_ENABLE_SANDBOX
460 }
461
462 // The RootVisitor interface.
463 void VisitRootPointers(Root root, const char* description,
464 FullObjectSlot start, FullObjectSlot end) final {
465 for (FullObjectSlot slot = start; slot < end; slot++) {
466 ProcessSlot(root, slot);
467 }
468 }
469
470 // The ObjectVisitor interface.
471 void VisitPointers(Tagged<HeapObject> host, MaybeObjectSlot start,
472 MaybeObjectSlot end) final {
473 for (MaybeObjectSlot slot = start; slot < end; slot++) {
474 ProcessSlot(host, slot);
475 }
476 }
477 void VisitPointers(Tagged<HeapObject> host, ObjectSlot start,
478 ObjectSlot end) final {
479 VisitPointers(host, MaybeObjectSlot(start), MaybeObjectSlot(end));
480 }
481 void VisitInstructionStreamPointer(Tagged<Code> host,
482 InstructionStreamSlot slot) final {
483 // InstructionStream objects never move to RO space.
484 }
485 void VisitMapPointer(Tagged<HeapObject> host) final {
486 ProcessSlot(host, host->RawMaybeWeakField(HeapObject::kMapOffset));
487 }
488 void VisitExternalPointer(Tagged<HeapObject> host,
489 ExternalPointerSlot slot) final {
490#ifdef V8_ENABLE_SANDBOX
491 if (promoted_objects_.find(host) == promoted_objects_.end()) return;
492
493 // If we reach here, `host` is a moved object with external pointer slots
494 // located in RO space. To preserve the 1:1 relation between slots and
495 // table entries, allocate a new entry (in
496 // read_only_external_pointer_space) now.
497 RecordProcessedSlotIfDebug(slot.address());
498 Address slot_value = slot.load(isolate_);
499 DCHECK(slot.ExactTagIsKnown());
500 slot.init(isolate_, host, slot_value, slot.exact_tag());
501
502 if (V8_UNLIKELY(v8_flags.trace_read_only_promotion_verbose)) {
503 LogUpdatedExternalPointerTableEntry(host, slot, slot_value);
504 }
505#endif // V8_ENABLE_SANDBOX
506 }
507 void VisitIndirectPointer(Tagged<HeapObject> host, IndirectPointerSlot slot,
508 IndirectPointerMode mode) final {
509#ifdef V8_ENABLE_SANDBOX
510 if (slot.tag() == kCodeIndirectPointerTag) {
511 VisitCodePointer(host, slot);
512 }
513#endif // V8_ENABLE_SANDBOX
514 }
515 void VisitTrustedPointerTableEntry(Tagged<HeapObject> host,
516 IndirectPointerSlot slot) final {
517#ifdef V8_ENABLE_SANDBOX
518 if (slot.tag() == kCodeIndirectPointerTag) {
519 VisitCodePointer(host, slot);
520 }
521#endif // V8_ENABLE_SANDBOX
522 }
523 void VisitRootPointers(Root root, const char* description,
525 OffHeapObjectSlot end) override {
526 // We shouldn't have moved any string table contents or SharedStructType
527 // registry contents (which is what OffHeapObjectSlot currently refers
528 // to).
529 for (OffHeapObjectSlot slot = start; slot < end; slot++) {
530 Tagged<Object> o = slot.load(isolate_);
531 if (!IsHeapObject(o)) continue;
532 CHECK(!Contains(*moves_, Cast<HeapObject>(o)));
533 }
534 }
535
536 private:
537 void ProcessSlot(Root root, FullObjectSlot slot) {
538 Tagged<Object> old_slot_value_obj = slot.load(isolate_);
539#ifdef V8_ENABLE_DIRECT_HANDLE
540 if (old_slot_value_obj.ptr() == kTaggedNullAddress) return;
541#endif
542 if (!IsHeapObject(old_slot_value_obj)) return;
543 Tagged<HeapObject> old_slot_value = Cast<HeapObject>(old_slot_value_obj);
544 auto it = moves_->find(old_slot_value);
545 if (it == moves_->end()) return;
546 Tagged<HeapObject> new_slot_value = it->second;
547 slot.store(new_slot_value);
548 if (V8_UNLIKELY(v8_flags.trace_read_only_promotion_verbose)) {
549 LogUpdatedPointer(root, slot, old_slot_value, new_slot_value);
550 }
551 }
552 void ProcessSlot(Tagged<HeapObject> host, MaybeObjectSlot slot) {
553 Tagged<HeapObject> old_slot_value;
554 if (!slot.load(isolate_).GetHeapObject(&old_slot_value)) return;
555 auto it = moves_->find(old_slot_value);
556 if (it == moves_->end()) return;
557 Tagged<HeapObject> new_slot_value = it->second;
558 slot.store(new_slot_value);
559 if (V8_UNLIKELY(v8_flags.trace_read_only_promotion_verbose)) {
560 LogUpdatedPointer(host, slot, old_slot_value, new_slot_value);
561 }
562 }
563
564#ifdef V8_ENABLE_SANDBOX
565 void VisitCodePointer(Tagged<HeapObject> host, IndirectPointerSlot slot) {
566 CHECK_EQ(kCodeIndirectPointerTag, slot.tag());
567 IndirectPointerHandle old_handle = slot.Relaxed_LoadHandle();
568 auto it = code_pointer_moves_.find(old_handle);
569 if (it == code_pointer_moves_.end()) return;
570
571 // If we reach here, `host` is a moved object with a code pointer slot
572 // located in RO space. To preserve the 1:1 relation between slots and
573 // table entries, we need to use the relocated code pointer table entry.
574 RecordProcessedSlotIfDebug(slot.address());
575 IndirectPointerHandle new_handle = it->second;
576 slot.Relaxed_StoreHandle(new_handle);
577
578 if (V8_UNLIKELY(v8_flags.trace_read_only_promotion_verbose)) {
579 LogUpdatedCodePointerTableEntry(host, slot, old_handle, new_handle);
580 }
581 }
582
583 void PromoteCodePointerEntryFor(Tagged<Code> code) {
584 // If we reach here, `code` is a moved Code object located in RO space.
586
587 IndirectPointerSlot slot = code->RawIndirectPointerField(
588 Code::kSelfIndirectPointerOffset, kCodeIndirectPointerTag);
589 CodeEntrypointTag entrypoint_tag = code->entrypoint_tag();
590
591 IndirectPointerHandle old_handle = slot.Relaxed_LoadHandle();
592 CodePointerTable* cpt = IsolateGroup::current()->code_pointer_table();
593
594 // To preserve the 1:1 relation between slots and code table entries,
595 // allocate a new entry (in the code_pointer_space of the RO heap) now.
596 // The slot will be updated later, when the Code object is visited.
597 CodePointerTable::Space* space =
598 IsolateForSandbox(isolate_).GetCodePointerTableSpaceFor(
599 slot.address());
600 IndirectPointerHandle new_handle = cpt->AllocateAndInitializeEntry(
601 space, code.address(), cpt->GetEntrypoint(old_handle, entrypoint_tag),
602 entrypoint_tag);
603
604 code_pointer_moves_.emplace(old_handle, new_handle);
605
606 if (V8_UNLIKELY(v8_flags.trace_read_only_promotion_verbose)) {
607 LogPromotedCodePointerTableEntry(code, old_handle, new_handle);
608 }
609 }
610#endif // V8_ENABLE_SANDBOX
611
612 void LogUpdatedPointer(Root root, FullObjectSlot slot,
613 Tagged<HeapObject> old_slot_value,
614 Tagged<HeapObject> new_slot_value) {
615 std::cout << "ro-promotion: updated pointer {root "
616 << static_cast<int>(root) << " slot "
617 << reinterpret_cast<void*>(slot.address()) << " from "
618 << reinterpret_cast<void*>(old_slot_value.ptr()) << " to "
619 << reinterpret_cast<void*>(new_slot_value.ptr()) << "}\n";
620 }
621 void LogUpdatedPointer(Tagged<HeapObject> host, MaybeObjectSlot slot,
622 Tagged<HeapObject> old_slot_value,
623 Tagged<HeapObject> new_slot_value) {
624 std::cout << "ro-promotion: updated pointer {host "
625 << reinterpret_cast<void*>(host.address()) << " slot "
626 << reinterpret_cast<void*>(slot.address()) << " from "
627 << reinterpret_cast<void*>(old_slot_value.ptr()) << " to "
628 << reinterpret_cast<void*>(new_slot_value.ptr()) << "}\n";
629 }
630 void LogUpdatedExternalPointerTableEntry(Tagged<HeapObject> host,
631 ExternalPointerSlot slot,
632 Address slot_value) {
633 std::cout << "ro-promotion: updated external pointer slot {host "
634 << reinterpret_cast<void*>(host.address()) << " slot "
635 << reinterpret_cast<void*>(slot.address()) << " slot_value "
636 << reinterpret_cast<void*>(slot_value) << "}\n";
637 }
638 void LogUpdatedCodePointerTableEntry(Tagged<HeapObject> host,
639 IndirectPointerSlot slot,
640 IndirectPointerHandle old_handle,
641 IndirectPointerHandle new_handle) {
642 std::cout << "ro-promotion: updated code pointer table entry {host "
643 << reinterpret_cast<void*>(host.address()) << " slot "
644 << reinterpret_cast<void*>(slot.address()) << " from "
645 << AsHex(old_handle, 8, true) << " to "
646 << AsHex(new_handle, 8, true) << "}\n";
647 }
648
649#ifdef DEBUG
650 void RecordProcessedSlotIfDebug(Address slot_address) {
651 // If this fails, we're visiting some object multiple times by accident.
652 CHECK_EQ(processed_slots_.count(slot_address), 0);
653 processed_slots_.insert(slot_address);
654 }
655 std::unordered_set<Address> processed_slots_; // To avoid dupe processing.
656#else
657 void RecordProcessedSlotIfDebug(Address slot_address) const {}
658#endif // DEBUG
659
660 Isolate* const isolate_;
661 const HeapObjectMap* moves_;
662
663#ifdef V8_ENABLE_SANDBOX
664 HeapObjectSet promoted_objects_;
665
666 // When an object owning an pointer table entry is relocated to the RO
667 // space, it cannot just update the entry to point to its new location
668 // (see b/330450848). A new pointer table entry must be allocated for the
669 // relocated object, in a RO segment of the table.
670
671 using IndirectPointerHandleMap =
672 std::unordered_map<IndirectPointerHandle, IndirectPointerHandle>;
673 IndirectPointerHandleMap code_pointer_moves_;
674#endif // V8_ENABLE_SANDBOX
675 };
676
677 static void LogPromotedObject(Tagged<HeapObject> src,
678 Tagged<HeapObject> dst) {
679 std::cout << "ro-promotion: promoted object {from "
680 << reinterpret_cast<void*>(src.ptr()) << " to "
681 << reinterpret_cast<void*>(dst.ptr()) << "}\n";
682 }
683
684 static void LogPromotedCodePointerTableEntry(
685 Tagged<Code> code, IndirectPointerHandle old_handle,
686 IndirectPointerHandle new_handle) {
687 std::cout << "ro-promotion: promoted code pointer table entry {code "
688 << reinterpret_cast<void*>(code.ptr()) << " slot "
689 << AsHex(old_handle, 8, true) << " to "
690 << AsHex(new_handle, 8, true) << "}\n";
691 }
692};
693
694} // namespace
695
696// static
698 const SafepointScope& safepoint_scope,
699 const DisallowGarbageCollection& no_gc) {
700 // Visit the mutable heap and determine the set of objects that can be
701 // promoted to RO space.
702 std::vector<Tagged<HeapObject>> promotees =
703 Committee::DeterminePromotees(isolate, no_gc, safepoint_scope);
704 // Physically copy promotee objects to RO space and track all object moves.
705 HeapObjectMap moves;
706 ReadOnlyPromotionImpl::CopyToReadOnlyHeap(isolate, promotees, &moves);
707 // Update all references to moved objects to point at their new location in
708 // RO space.
709 ReadOnlyPromotionImpl::UpdatePointers(isolate, safepoint_scope, moves);
710 ReadOnlyPromotionImpl::DeleteDeadObjects(isolate, safepoint_scope, moves);
711 ReadOnlyPromotionImpl::Verify(isolate, safepoint_scope);
712}
713
714} // namespace internal
715} // namespace v8
Isolate * isolate_
static constexpr int kBuiltinCount
Definition builtins.h:105
static constexpr bool kCodeObjectsAreInROSpace
Definition builtins.h:98
Tagged< MaybeObject > load() const
Definition slots-inl.h:130
static V8_INLINE bool InReadOnlySpace(Tagged< HeapObject > object)
static constexpr int kMapOffset
static IsolateGroup * current()
static V8_EXPORT_PRIVATE void Promote(Isolate *isolate, const SafepointScope &safepoint_scope, const DisallowGarbageCollection &no_gc)
int start
int end
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
bool TryCast(Tagged< From > value, Tagged< To > *out)
Definition casting.h:77
constexpr Address kTaggedNullAddress
Definition handles.h:53
PerThreadAssertScopeDebugOnly< false, SAFEPOINTS_ASSERT, HEAP_ALLOCATION_ASSERT > DisallowGarbageCollection
SlotTraits::TObjectSlot ObjectSlot
Definition globals.h:1243
Tagged(T object) -> Tagged< T >
UnionOf< Undefined, FunctionTemplateInfo > UnionOf< Undefined, InterceptorInfo > UnionOf< Undefined, ObjectTemplateInfo > AccessCheckInfo
kInterpreterTrampolineOffset Tagged< HeapObject >
base::StrongAlias< JSDispatchHandleAliasTag, uint32_t > JSDispatchHandle
Definition globals.h:557
SlotTraits::TInstructionStreamSlot InstructionStreamSlot
Definition globals.h:1265
void VisitObject(Isolate *isolate, Tagged< HeapObject > object, ObjectVisitor *visitor)
uint32_t IndirectPointerHandle
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:669
V8_EXPORT_PRIVATE FlagValues v8_flags
SlotTraits::TOffHeapObjectSlot OffHeapObjectSlot
Definition globals.h:1258
SlotTraits::TMaybeObjectSlot MaybeObjectSlot
Definition globals.h:1248
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset IsNull(value)||IsJSProxy(value)||IsWasmObject(value)||(IsJSObject(value) &&(HeapLayout
Definition map-inl.h:70
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
static constexpr AcquireLoadTag kAcquireLoad
Definition globals.h:2908
HeapObjectSet *const visited_
#define DEF_PROMO_CANDIDATE(Type)
int first_rejected_slot_offset_
HeapObjectSet promo_rejected_
const HeapObjectMap * moves_
Committee *const committee_
HeapObjectList *const promotees_
HeapObjectSet promo_accepted_
ExternalReferenceEncoder ref_encoder_
#define PROMO_CANDIDATE_TYPE_LIST(V)
HeapObjectSet *const accepted_subgraph_
#define CHECK(condition)
Definition logging.h:124
#define CHECK_WITH_MSG(condition, message)
Definition logging.h:118
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define V8_UNLIKELY(condition)
Definition v8config.h:660