v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
marking-state.h
Go to the documentation of this file.
1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_HEAP_CPPGC_MARKING_STATE_H_
6#define V8_HEAP_CPPGC_MARKING_STATE_H_
7
8#include <algorithm>
9
12#include "src/base/logging.h"
13#include "src/base/macros.h"
15#include "src/heap/base/stack.h"
22
23namespace cppgc {
24namespace internal {
25
26// C++ marking implementation.
28 public:
30 virtual ~MarkingStateBase() = default;
31
34
35 inline void MarkAndPush(const void*, TraceDescriptor);
36 inline void MarkAndPush(HeapObjectHeader&);
37
39
40 V8_EXPORT_PRIVATE virtual void Publish();
41
42 MarkingWorklists::MarkingWorklist::Local& marking_worklist() {
43 return marking_worklist_;
44 }
49
50 protected:
52
53 inline bool MarkNoPush(HeapObjectHeader&);
54
56
57 MarkingWorklists::MarkingWorklist::Local marking_worklist_;
60};
61
63 MarkingWorklists& marking_worklists)
64 : heap_(heap),
65 marking_worklist_(*marking_worklists.marking_worklist()),
66 not_fully_constructed_worklist_(
67 *marking_worklists.not_fully_constructed_worklist()) {}
68
69void MarkingStateBase::MarkAndPush(const void* object, TraceDescriptor desc) {
70 DCHECK_NOT_NULL(object);
72 HeapObjectHeader::FromObject(const_cast<void*>(desc.base_object_payload)),
73 desc);
74}
75
77 TraceDescriptor desc) {
78 DCHECK_NOT_NULL(desc.callback);
79
82 } else if (MarkNoPush(header)) {
83 PushMarked(header, desc);
84 }
85}
86
88 // A GC should only mark the objects that belong in its heap.
90 // Never mark free space objects. This would e.g. hint to marking a promptly
91 // freed backing store.
93 return header.TryMarkAtomic();
94}
95
98 header,
99 {header.ObjectStart(),
101}
102
104 TraceDescriptor desc) {
107 DCHECK_NOT_NULL(desc.callback);
108
109 marking_worklist_.Push(desc);
110}
111
113 public:
115 ~BasicMarkingState() override;
116
119
120 inline void RegisterWeakReferenceIfNeeded(const void*, TraceDescriptor,
121 WeakCallback, const void*);
122 inline void RegisterWeakContainerCallback(WeakCallback, const void*);
123 inline void RegisterWeakCustomCallback(WeakCallback, const void*);
124
125 void RegisterMovableReference(const void** slot) {
127#if defined(CPPGC_CAGED_HEAP)
128 if (V8_UNLIKELY(!CagedHeapBase::IsWithinCage(slot))) return;
129#else // !defined(CPPGC_CAGED_HEAP)
130 if (V8_UNLIKELY(heap::base::Stack::IsOnStack(slot))) return;
131#endif // !defined(CPPGC_CAGED_HEAP)
132
133 movable_slots_worklist_->Push(slot);
134 }
135
136 // Weak containers are special in that they may require re-tracing if
137 // reachable through stack, even if the container was already traced before.
138 // ProcessWeakContainer records which weak containers were already marked so
139 // that conservative stack scanning knows to retrace them.
140 inline void ProcessWeakContainer(const void*, TraceDescriptor, WeakCallback,
141 const void*);
142
143 inline void ProcessEphemeron(const void*, const void*, TraceDescriptor,
144 Visitor&);
145
146 inline void AccountMarkedBytes(const HeapObjectHeader&);
147 inline void AccountMarkedBytes(BasePage*, size_t);
148 size_t marked_bytes() const { return marked_bytes_; }
149 // Returns marked_bytes() and resets the counter internally, basically
150 // consuming the value of the marked bytes.
152 return marked_bytes_ - std::exchange(last_marked_bytes_, marked_bytes_);
153 }
154
155 V8_EXPORT_PRIVATE void Publish() override;
156
157 MarkingWorklists::PreviouslyNotFullyConstructedWorklist::Local&
161 MarkingWorklists::WeakCallbackWorklist::Local&
165 MarkingWorklists::WeakCallbackWorklist::Local&
169 MarkingWorklists::WeakCustomCallbackWorklist::Local&
173 MarkingWorklists::WriteBarrierWorklist::Local& write_barrier_worklist() {
175 }
176 MarkingWorklists::ConcurrentMarkingBailoutWorklist::Local&
180 MarkingWorklists::EphemeronPairsWorklist::Local&
184 MarkingWorklists::EphemeronPairsWorklist::Local&
191
192 CompactionWorklists::MovableReferencesWorklist::Local*
196
200
204
206
207 protected:
209
210 MarkingWorklists::PreviouslyNotFullyConstructedWorklist::Local
212 MarkingWorklists::WeakCallbackWorklist::Local
214 MarkingWorklists::WeakCallbackWorklist::Local
216 MarkingWorklists::WeakCustomCallbackWorklist::Local
218 MarkingWorklists::WriteBarrierWorklist::Local write_barrier_worklist_;
219 MarkingWorklists::ConcurrentMarkingBailoutWorklist::Local
221 MarkingWorklists::EphemeronPairsWorklist::Local
223 MarkingWorklists::EphemeronPairsWorklist::Local
226 // Existence of the worklist (|movable_slot_worklist_| != nullptr) denotes
227 // that compaction is currently enabled and slots must be recorded.
228 std::unique_ptr<CompactionWorklists::MovableReferencesWorklist::Local>
230
231 size_t marked_bytes_ = 0;
235 bool in_atomic_pause_ = false;
238};
239
241 const void* object, TraceDescriptor desc, WeakCallback weak_callback,
242 const void* parameter) {
243 // Filter out already marked values. The write barrier for WeakMember
244 // ensures that any newly set value after this point is kept alive and does
245 // not require the callback.
246 const HeapObjectHeader& header =
247 HeapObjectHeader::FromObject(desc.base_object_payload);
248 if (!header.IsInConstruction<AccessMode::kAtomic>() &&
250 return;
251 parallel_weak_callback_worklist_.Push({weak_callback, parameter});
252}
253
259
265
269
271 TraceDescriptor desc,
273 const void* data) {
274 DCHECK_NOT_NULL(object);
275
276 HeapObjectHeader& header =
277 HeapObjectHeader::FromObject(const_cast<void*>(object));
278
281 return;
282 }
283
284 RegisterWeakContainer(header);
285
286 // Only mark the container initially. Its buckets will be processed after
287 // marking.
288 if (!MarkNoPush(header)) return;
289
290 // Register final weak processing of the backing store.
292
293 // Weak containers might not require tracing. In such cases the callback in
294 // the TraceDescriptor will be nullptr. For ephemerons the callback will be
295 // non-nullptr so that the container is traced and the ephemeron pairs are
296 // processed.
297 if (desc.callback) {
298 PushMarked(header, desc);
299 } else {
300 // For weak containers, there's no trace callback and no processing loop to
301 // update the marked bytes, hence inline that here.
302 AccountMarkedBytes(header);
303 }
304}
305
306void BasicMarkingState::ProcessEphemeron(const void* key, const void* value,
307 TraceDescriptor value_desc,
308 Visitor& visitor) {
309 // ProcessEphemeron is not expected to find new ephemerons recursively, which
310 // would break the main marking loop.
313 // Keys are considered live even in incremental/concurrent marking settings
314 // because the write barrier for WeakMember ensures that any newly set value
315 // after this point is kept alive and does not require the callback.
316 const bool key_in_construction =
318 const bool key_considered_as_live =
319 key_in_construction
323 key_in_construction && in_atomic_pause_,
324 HeapObjectHeader::FromObject(key).IsMarked<AccessMode::kAtomic>());
325 if (key_considered_as_live) {
326 if (value_desc.base_object_payload) {
327 MarkAndPush(value_desc.base_object_payload, value_desc);
328 } else {
329 // If value_desc.base_object_payload is nullptr, the value is not GCed and
330 // should be immediately traced.
331 value_desc.callback(&visitor, value);
332 }
333 } else {
336 }
338}
339
341 const size_t marked_bytes =
343 ? reinterpret_cast<const LargePage*>(BasePage::FromPayload(&header))
344 ->PayloadSize()
346 auto* base_page =
347 BasePage::FromPayload(&const_cast<HeapObjectHeader&>(header));
349}
350
352 size_t marked_bytes) {
354 marked_bytes_map_[base_page] += static_cast<int64_t>(marked_bytes);
355}
356
358 public:
360 CompactionWorklists* compaction_worklists)
361 : BasicMarkingState(heap, marking_worklists, compaction_worklists),
363 *marking_worklists.retrace_marked_objects_worklist()) {}
364 ~MutatorMarkingState() override = default;
365
366 inline bool MarkNoPush(HeapObjectHeader& header) {
367 return MutatorMarkingState::BasicMarkingState::MarkNoPush(header);
368 }
369
371
373
374 // Moves objects in not_fully_constructed_worklist_ to
375 // previously_not_full_constructed_worklists_.
377
378 // Moves ephemeron pairs in discovered_ephemeron_pairs_worklist_ to
379 // ephemeron_pairs_for_processing_worklist_.
381
382 inline void InvokeWeakRootsCallbackIfNeeded(const void*, TraceDescriptor,
383 WeakCallback, const void*);
384
386
387 MarkingWorklists::RetraceMarkedObjectsWorklist::Local&
391
392 V8_EXPORT_PRIVATE void Publish() override;
393
394 private:
395 // Weak containers are strongly retraced during conservative stack scanning.
396 // Stack scanning happens once per GC at the start of the atomic pause.
397 // Because the visitor is not retained between GCs, there is no need to clear
398 // the set at the end of GC.
400 static constexpr size_t kMaxCacheSize = 8;
401
402 public:
403 inline bool Contains(const HeapObjectHeader*) const;
404 inline void Insert(const HeapObjectHeader*);
405
406 private:
407 std::vector<const HeapObjectHeader*> recently_retraced_cache_;
408 size_t last_used_index_ = -1;
410
411 MarkingWorklists::RetraceMarkedObjectsWorklist::Local
413};
414
421
423 HeapObjectHeader& header =
425 const_cast<Address>(address));
426 DCHECK(!header.IsInConstruction());
427 if (MarkNoPush(header)) {
429 {reinterpret_cast<void*>(header.ObjectStart()),
431 }
432}
433
435 const void* object, TraceDescriptor desc, WeakCallback weak_callback,
436 const void* parameter) {
437 // Since weak roots are only traced at the end of marking, we can execute
438 // the callback instead of registering it.
439#if DEBUG
440 const HeapObjectHeader& header =
441 HeapObjectHeader::FromObject(desc.base_object_payload);
443 header.IsMarked<AccessMode::kAtomic>());
444#endif // DEBUG
445 weak_callback(LivenessBrokerFactory::Create(), parameter);
446}
447
456
458 const HeapObjectHeader* header) const {
459 return std::find(recently_retraced_cache_.begin(),
461 header) != recently_retraced_cache_.end();
462}
463
465 const HeapObjectHeader* header) {
466 last_used_index_ = (last_used_index_ + 1) % kMaxCacheSize;
467 if (recently_retraced_cache_.size() <= last_used_index_)
468 recently_retraced_cache_.push_back(header);
469 else
470 recently_retraced_cache_[last_used_index_] = header;
471}
472
474 public:
476 CompactionWorklists* compaction_worklists)
477 : BasicMarkingState(heap, marking_worklists, compaction_worklists) {}
478
479 ~ConcurrentMarkingState() override = default;
480
481 inline void AccountDeferredMarkedBytes(BasePage* base_page,
482 size_t deferred_bytes) {
483 // AccountDeferredMarkedBytes is called from Trace methods, which are always
484 // called after AccountMarkedBytes, so there should be no underflow here.
485 DCHECK_LE(deferred_bytes, marked_bytes_);
486 marked_bytes_ -= deferred_bytes;
487 marked_bytes_map_[base_page] -= static_cast<int64_t>(deferred_bytes);
488 }
489};
490
491template <size_t kDeadlineCheckInterval, typename Predicate,
492 typename CreateStatsScopeCallback, typename WorklistLocal,
493 typename ProcessWorklistItemCallback>
495 Predicate ShouldYield, CreateStatsScopeCallback CreateStatsScope,
496 WorklistLocal& worklist_local,
497 ProcessWorklistItemCallback ProcessWorklistItem) {
498 if (worklist_local.IsLocalAndGlobalEmpty()) {
499 return true;
500 }
501 if (ShouldYield()) {
502 return false;
503 }
504 const auto stats_scope = CreateStatsScope();
505 size_t processed_callback_count = kDeadlineCheckInterval;
506 typename WorklistLocal::ItemType item;
507 while (worklist_local.Pop(&item)) {
508 ProcessWorklistItem(item);
509 if (V8_UNLIKELY(--processed_callback_count == 0)) {
510 if (ShouldYield()) {
511 return false;
512 }
513 processed_callback_count = kDeadlineCheckInterval;
514 }
515 }
516 return true;
517}
518
519template <AccessMode mode>
521 const HeapObjectHeader& header) {
522 DCHECK(!header.IsInConstruction<mode>());
524 header.TraceImpl<mode>(&visitor);
525}
526
527} // namespace internal
528} // namespace cppgc
529
530#endif // V8_HEAP_CPPGC_MARKING_STATE_H_
HeapObjectHeader & ObjectHeaderFromInnerAddress(void *address) const
Definition heap-page.h:339
static BasePage * FromPayload(void *)
Definition heap-page.h:314
MarkingWorklists::EphemeronPairsWorklist::Local & ephemeron_pairs_for_processing_worklist()
BasicMarkingState(HeapBase &heap, MarkingWorklists &, CompactionWorklists *)
void RegisterWeakCustomCallback(WeakCallback, const void *)
MarkingWorklists::EphemeronPairsWorklist::Local discovered_ephemeron_pairs_worklist_
V8_EXPORT_PRIVATE void Publish() override
MarkingWorklists::ConcurrentMarkingBailoutWorklist::Local & concurrent_marking_bailout_worklist()
heap::base::CachedUnorderedMap< BasePage *, int64_t, v8::base::hash< BasePage * > > marked_bytes_map_
void RegisterWeakReferenceIfNeeded(const void *, TraceDescriptor, WeakCallback, const void *)
MarkingWorklists::WeakCallbackWorklist::Local & weak_container_callback_worklist()
MarkingWorklists::WriteBarrierWorklist::Local & write_barrier_worklist()
MarkingWorklists::WeakCallbackWorklist::Local & parallel_weak_callback_worklist()
MarkingWorklists::EphemeronPairsWorklist::Local & discovered_ephemeron_pairs_worklist()
void RegisterMovableReference(const void **slot)
MarkingWorklists::WeakContainersWorklist & weak_containers_worklist()
std::unique_ptr< CompactionWorklists::MovableReferencesWorklist::Local > movable_slots_worklist_
MarkingWorklists::WriteBarrierWorklist::Local write_barrier_worklist_
MarkingWorklists::WeakCustomCallbackWorklist::Local & weak_custom_callback_worklist()
MarkingWorklists::PreviouslyNotFullyConstructedWorklist::Local previously_not_fully_constructed_worklist_
BasicMarkingState & operator=(const BasicMarkingState &)=delete
MarkingWorklists::WeakContainersWorklist & weak_containers_worklist_
void RegisterWeakContainerCallback(WeakCallback, const void *)
CompactionWorklists::MovableReferencesWorklist::Local * movable_slots_worklist()
void ProcessEphemeron(const void *, const void *, TraceDescriptor, Visitor &)
void ProcessWeakContainer(const void *, TraceDescriptor, WeakCallback, const void *)
void RegisterWeakContainer(HeapObjectHeader &)
MarkingWorklists::EphemeronPairsWorklist::Local ephemeron_pairs_for_processing_worklist_
MarkingWorklists::WeakCustomCallbackWorklist::Local weak_custom_callback_worklist_
BasicMarkingState(const BasicMarkingState &)=delete
MarkingWorklists::PreviouslyNotFullyConstructedWorklist::Local & previously_not_fully_constructed_worklist()
MarkingWorklists::ConcurrentMarkingBailoutWorklist::Local concurrent_marking_bailout_worklist_
MarkingWorklists::WeakCallbackWorklist::Local weak_container_callback_worklist_
void AccountMarkedBytes(const HeapObjectHeader &)
MarkingWorklists::WeakCallbackWorklist::Local parallel_weak_callback_worklist_
void AccountDeferredMarkedBytes(BasePage *base_page, size_t deferred_bytes)
ConcurrentMarkingState(HeapBase &heap, MarkingWorklists &marking_worklists, CompactionWorklists *compaction_worklists)
static const GCInfo & GCInfoFromIndex(GCInfoIndex index)
static HeapObjectHeader & FromObject(void *address)
size_t PayloadSize() const
Definition heap-page.h:291
virtual V8_EXPORT_PRIVATE void Publish()
MarkingStateBase(const MarkingStateBase &)=delete
MarkingWorklists::MarkingWorklist::Local & marking_worklist()
MarkingWorklists::MarkingWorklist::Local marking_worklist_
void MarkAndPush(const void *, TraceDescriptor)
void PushMarked(HeapObjectHeader &, TraceDescriptor desc)
virtual ~MarkingStateBase()=default
MarkingWorklists::NotFullyConstructedWorklist & not_fully_constructed_worklist_
MarkingStateBase & operator=(const MarkingStateBase &)=delete
MarkingWorklists::NotFullyConstructedWorklist & not_fully_constructed_worklist()
bool MarkNoPush(HeapObjectHeader &)
MarkingStateBase(HeapBase &, MarkingWorklists &)
void InvokeWeakRootsCallbackIfNeeded(const void *, TraceDescriptor, WeakCallback, const void *)
class cppgc::internal::MutatorMarkingState::RecentlyRetracedWeakContainers recently_retraced_weak_containers_
bool IsMarkedWeakContainer(HeapObjectHeader &)
MutatorMarkingState(HeapBase &heap, MarkingWorklists &marking_worklists, CompactionWorklists *compaction_worklists)
void ReTraceMarkedWeakContainer(cppgc::Visitor &, HeapObjectHeader &)
V8_EXPORT_PRIVATE void Publish() override
MarkingWorklists::RetraceMarkedObjectsWorklist::Local & retrace_marked_objects_worklist()
bool MarkNoPush(HeapObjectHeader &header)
~MutatorMarkingState() override=default
MarkingWorklists::RetraceMarkedObjectsWorklist::Local retrace_marked_objects_worklist_
static bool IsOnStack(const void *slot)
Definition stack.cc:24
TNode< Object > callback
ZoneVector< RpoNumber > & result
void DynamicallyTraceMarkedObject(Visitor &visitor, const HeapObjectHeader &header)
uint8_t * Address
Definition globals.h:17
bool DrainWorklistWithPredicate(Predicate ShouldYield, CreateStatsScopeCallback CreateStatsScope, WorklistLocal &worklist_local, ProcessWorklistItemCallback ProcessWorklistItem)
const uint8_t * ConstAddress
Definition globals.h:18
void(*)(const LivenessBroker &, const void *) WeakCallback
Definition visitor.h:37
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define V8_EXPORT_PRIVATE
Definition macros.h:460
const void * base_object_payload
Definition trace-trait.h:49
TraceCallback callback
Definition trace-trait.h:53
Heap * heap_
#define V8_LIKELY(condition)
Definition v8config.h:661
#define V8_UNLIKELY(condition)
Definition v8config.h:660
std::unique_ptr< ValueMirror > value
std::unique_ptr< ValueMirror > key