5#ifndef V8_HEAP_MARKING_VISITOR_INL_H_
6#define V8_HEAP_MARKING_VISITOR_INL_H_
45template <
typename ConcreteVisitor>
50 SynchronizePageAccess(
object);
51 concrete_visitor()->AddStrongReferenceForReferenceSummarizer(retainer,
54 concrete_visitor()->marking_state(),
55 target_worklist,
object);
59template <
typename ConcreteVisitor>
61template <
typename THeapObjectSlot>
65 SynchronizePageAccess(heap_object);
66 const auto target_worklist =
68 if (!target_worklist) {
76 reinterpret_cast<void*
>(host->map().
ptr()),
77 reinterpret_cast<void*
>(host->
address()),
78 reinterpret_cast<void*
>(slot.address()),
83 MarkObject(host, heap_object, target_worklist.value());
84 concrete_visitor()->RecordSlot(host, slot, heap_object);
88template <
typename ConcreteVisitor>
92 return !IsMap(heap_object) ||
93 !(IsMap(host) || IsTransitionArray(host) || IsDescriptorArray(host));
97template <
typename ConcreteVisitor>
99template <
typename THeapObjectSlot>
103 SynchronizePageAccess(heap_object);
104 concrete_visitor()->AddWeakReferenceForReferenceSummarizer(host, heap_object);
105 const auto target_worklist =
107 if (!target_worklist) {
110 if (concrete_visitor()->marking_state()->IsMarked(heap_object)) {
114 concrete_visitor()->RecordSlot(host, slot, heap_object);
125 }
else if (
V8_LIKELY(IsTrivialWeakReferenceValue(host, heap_object))) {
136template <
typename ConcreteVisitor>
138template <
typename TSlot>
141 using THeapObjectSlot =
typename TSlot::THeapObjectSlot;
142 for (TSlot slot =
start; slot <
end; ++slot) {
143 typename TSlot::TObject object;
146 object = slot.Relaxed_Load();
148 const std::optional<Tagged<Object>> optional_object =
149 this->GetObjectFilterReadOnlyAndSmiFast(slot);
150 if (!optional_object) {
153 object = *optional_object;
156 if (
object.GetHeapObjectIfStrong(&heap_object)) {
160 ProcessStrongHeapObject(host, THeapObjectSlot(slot), heap_object);
161 }
else if (TSlot::kCanBeWeak &&
object.GetHeapObjectIfWeak(&heap_object)) {
162 ProcessWeakHeapObject(host, THeapObjectSlot(slot), heap_object);
168template <
typename ConcreteVisitor>
170template <
typename TSlot>
173 static_assert(!TSlot::kCanBeWeak);
174 using THeapObjectSlot =
typename TSlot::THeapObjectSlot;
175 typename TSlot::TObject
object = slot.Relaxed_Load();
177 if (
object.GetHeapObject(&heap_object)) {
178 ProcessStrongHeapObject(host, THeapObjectSlot(slot), heap_object);
182template <
typename ConcreteVisitor>
189 if (!target_worklist) {
193 if (!concrete_visitor()->marking_state()->IsMarked(
object)) {
195 if (code->IsWeakObject(
object)) {
198 concrete_visitor()->AddWeakReferenceForReferenceSummarizer(host,
object);
200 MarkObject(host,
object, target_worklist.value());
203 concrete_visitor()->RecordRelocSlot(host, rinfo,
object);
206template <
typename ConcreteVisitor>
214 if (!target_worklist) {
217 MarkObject(host, target, target_worklist.value());
218 concrete_visitor()->RecordRelocSlot(host, rinfo, target);
221template <
typename ConcreteVisitor>
224#ifdef V8_COMPRESS_POINTERS
226 if (slot.HasExternalPointerHandle()) {
228 ExternalPointerTable* table;
231 table = shared_external_pointer_table_;
232 space = shared_external_pointer_space_;
234 table = external_pointer_table_;
240 if (table->Contains(
heap_->young_external_pointer_space(),
handle)) {
241 space =
heap_->young_external_pointer_space();
244 space =
heap_->old_external_pointer_space();
248 ?
heap_->young_external_pointer_space()
249 :
heap_->old_external_pointer_space();
257template <
typename ConcreteVisitor>
260#ifdef V8_COMPRESS_POINTERS
265 CppHeapPointerTable* table = cpp_heap_pointer_table_;
266 CppHeapPointerTable::Space* space =
heap_->cpp_heap_pointer_space();
269 if (
auto cpp_heap_pointer =
272 reinterpret_cast<void*
>(cpp_heap_pointer));
276template <
typename ConcreteVisitor>
280#ifdef V8_ENABLE_SANDBOX
291 SynchronizePageAccess(obj);
293 if (!target_worklist) {
296 MarkObject(host, obj, target_worklist.value());
304template <
typename ConcreteVisitor>
307 concrete_visitor()->MarkPointerTableEntry(host, slot);
310template <
typename ConcreteVisitor>
313#ifdef V8_ENABLE_LEAPTIERING
316 JSDispatchTable::Space* space =
heap_->js_dispatch_table_space();
317 JSDispatchTable::Space* ro_space =
319 jdt->VerifyEntry(
handle, space, ro_space);
334template <
typename ConcreteVisitor>
338 if (ShouldFlushBaselineCode(js_function)) {
340#ifndef V8_ENABLE_LEAPTIERING
343 return Base::VisitJSFunction(map, js_function, maybe_object_size);
347#ifdef V8_ENABLE_LEAPTIERING
352 JSFunction::kDispatchHandleOffset));
358 SynchronizePageAccess(obj);
360 if (target_worklist) {
361 MarkObject(js_function, obj, target_worklist.value());
366#ifdef V8_ENABLE_SANDBOX
367 VisitIndirectPointer(js_function,
368 js_function->RawIndirectPointerField(
369 JSFunction::kCodeOffset, kCodeIndirectPointerTag),
372 VisitPointer(js_function, js_function->RawField(JSFunction::kCodeOffset));
381 js_function->NeedsResetDueToFlushedBytecode(
heap_->
isolate())) {
385 return Base::VisitJSFunction(map, js_function, maybe_object_size);
388template <
typename ConcreteVisitor>
392 const bool can_flush_bytecode = HasBytecodeArrayForFlushing(shared_info);
395 if (can_flush_bytecode && !should_keep_ages_unchanged_) {
396 MakeOlder(shared_info);
399 if (!can_flush_bytecode || !ShouldFlushCode(shared_info)) {
402#ifdef V8_ENABLE_SANDBOX
403 VisitIndirectPointer(shared_info,
404 shared_info->RawIndirectPointerField(
405 SharedFunctionInfo::kTrustedFunctionDataOffset,
411 shared_info->RawField(SharedFunctionInfo::kTrustedFunctionDataOffset));
413 VisitPointer(shared_info,
414 shared_info->RawField(
415 SharedFunctionInfo::kUntrustedFunctionDataOffset));
422 VisitProtectedPointer(
423 baseline_code, baseline_code->RawProtectedPointerField(
424 Code::kDeoptimizationDataOrInterpreterDataOffset));
431 return Base::VisitSharedFunctionInfo(map, shared_info, maybe_object_size);
434template <
typename ConcreteVisitor>
450 DCHECK_EQ(baseline_code->kind(), CodeKind::BASELINE);
454 data = baseline_code->bytecode_or_interpreter_data();
461 return IsBytecodeArray(data);
464template <
typename ConcreteVisitor>
472template <
typename ConcreteVisitor>
475 if (
v8_flags.flush_code_based_on_time) {
476 return sfi->age() >=
v8_flags.bytecode_old_time;
477 }
else if (
v8_flags.flush_code_based_on_tab_visibility) {
478 return isolate_in_background_ ||
481 return sfi->age() >=
v8_flags.bytecode_old_age;
485template <
typename ConcreteVisitor>
488 if (
v8_flags.flush_code_based_on_time) {
489 if (code_flushing_increase_ == 0) {
493 uint16_t current_age;
494 uint16_t updated_age;
496 current_age = sfi->age();
503 updated_age = current_age == 0
505 :
SaturateAdd(current_age, code_flushing_increase_);
506 }
while (sfi->CompareExchangeAge(current_age, updated_age) != current_age);
507 }
else if (
v8_flags.flush_code_based_on_tab_visibility) {
510 uint16_t age = sfi->age();
511 if (age <
v8_flags.bytecode_old_age) {
512 sfi->CompareExchangeAge(age, age + 1);
518template <
typename ConcreteVisitor>
528 if (!IsSharedFunctionInfo(maybe_shared))
return false;
536#ifdef THREAD_SANITIZER
541 if (!IsCode(maybe_code))
return false;
543 if (code->kind() != CodeKind::BASELINE)
return false;
546 return HasBytecodeArrayForFlushing(shared) && ShouldFlushCode(shared);
553template <
typename ConcreteVisitor>
558 static constexpr size_t kMaxQueuedWorklistItems = 8u;
559 DCHECK(concrete_visitor()->marking_state()->IsMarked(
object));
570 if (
const auto target_worklist =
574 const size_t scheduled_chunks =
575 std::min(total_chunks, kMaxQueuedWorklistItems);
577 for (
size_t i = 1;
i < scheduled_chunks; ++
i) {
586 ->template VisitMapPointerIfNeeded<VisitorId::kVisitFixedArray>(
object);
595 if (chunk + kMaxQueuedWorklistItems < total_chunks) {
596 if (
const auto target_worklist =
604 VisitPointers(
object,
612template <
typename ConcreteVisitor>
618 return concrete_visitor()->CanUpdateValuesInHeap() &&
620 ? VisitFixedArrayWithProgressTracker(map,
object, progress_tracker)
621 : Base::VisitFixedArray(map,
object, maybe_object_size);
628template <
typename ConcreteVisitor>
632 object->MarkExtension();
633 return Base::VisitJSArrayBuffer(map,
object, maybe_object_size);
640template <
typename ConcreteVisitor>
644 const bool use_key_to_values = key_to_values_ !=
nullptr;
648 table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(
i));
651 SynchronizePageAccess(
key);
652 concrete_visitor()->RecordSlot(table, key_slot,
key);
653 concrete_visitor()->AddWeakReferenceForReferenceSummarizer(table,
key);
656 table->RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(
i));
663 heap_, concrete_visitor()->marking_state(),
key)) {
664 VisitPointer(table, value_slot);
670 SynchronizePageAccess(value);
671 concrete_visitor()->RecordSlot(table, value_slot, value);
672 concrete_visitor()->AddWeakReferenceForReferenceSummarizer(table,
675 const auto target_worklist =
677 if (!target_worklist) {
683 if (concrete_visitor()->marking_state()->IsUnmarked(value)) {
688 auto it = key_to_values_->try_emplace(
key).first;
689 it->second.push_back(value);
695 return table->SizeFromMap(map);
698template <
typename ConcreteVisitor>
704 SynchronizePageAccess(target);
705 concrete_visitor()->AddWeakReferenceForReferenceSummarizer(weak_ref,
708 heap_, concrete_visitor()->marking_state(), target)) {
711 ObjectSlot slot = weak_ref->RawField(JSWeakRef::kTargetOffset);
712 concrete_visitor()->RecordSlot(weak_ref, slot, target);
719 return Base::VisitJSWeakRef(map, weak_ref, maybe_object_size);
722template <
typename ConcreteVisitor>
728 SynchronizePageAccess(target);
729 SynchronizePageAccess(unregister_token);
731 heap_, concrete_visitor()->marking_state(), target) &&
733 heap_, concrete_visitor()->marking_state(), unregister_token)) {
736 ObjectSlot slot = weak_cell->RawField(WeakCell::kTargetOffset);
737 concrete_visitor()->RecordSlot(weak_cell, slot, target);
738 slot = weak_cell->RawField(WeakCell::kUnregisterTokenOffset);
739 concrete_visitor()->RecordSlot(weak_cell, slot, unregister_token);
745 concrete_visitor()->AddWeakReferenceForReferenceSummarizer(weak_cell,
747 concrete_visitor()->AddWeakReferenceForReferenceSummarizer(
748 weak_cell, unregister_token);
750 return Base::VisitWeakCell(map, weak_cell, maybe_object_size);
757template <
typename ConcreteVisitor>
760 this->
template VisitMapPointerIfNeeded<VisitorId::kVisitDescriptorArray>(
762 const int size = DescriptorArray::BodyDescriptor::SizeOf(map, array);
763 VisitPointers(array, array->GetFirstPointerSlot(),
764 array->GetDescriptorSlot(0));
767 array->GetDescriptorSlot(array->number_of_descriptors())));
771template <
typename ConcreteVisitor>
775 if (!concrete_visitor()->CanUpdateValuesInHeap()) {
778 return VisitDescriptorArrayStrongly(map, array, maybe_object_size);
787 mark_compact_epoch_, array);
795 size_t size = DescriptorArray::BodyDescriptor::SizeOf(map, array);
796 VisitPointers(array, array->GetFirstPointerSlot(),
797 array->GetDescriptorSlot(0));
799 ->template VisitMapPointerIfNeeded<VisitorId::kVisitDescriptorArray>(
807template <
typename ConcreteVisitor>
810 if (!concrete_visitor()->CanUpdateValuesInHeap() || !map->CanTransition())
826 if (
IsSmi(maybe_descriptors)) {
834 SynchronizePageAccess(descriptors);
841 IsStrongDescriptorArray(descriptors)) {
845 if (
v8_flags.black_allocated_pages &&
850 const int number_of_own_descriptors = map->NumberOfOwnDescriptors();
851 if (number_of_own_descriptors) {
858 const auto descriptors_to_mark = std::min<int>(
859 number_of_own_descriptors, descriptors->number_of_descriptors());
860 concrete_visitor()->marking_state()->TryMark(descriptors);
862 mark_compact_epoch_, descriptors, descriptors_to_mark)) {
864 const auto target_worklist =
875template <
typename ConcreteVisitor>
878 VisitDescriptorsForMap(map);
882 return Base::VisitMap(meta_map, map, maybe_object_size);
885template <
typename ConcreteVisitor>
890 return Base::VisitTransitionArray(map, array, maybe_object_size);
893template <
typename ConcreteVisitor>
896#ifdef V8_ENABLE_SANDBOX
906 if (tag == kCodeIndirectPointerTag) {
908 CodePointerTable::Space* space = this->
heap_->code_pointer_space();
909 table->Mark(space,
handle);
913 TrustedPointerTable* table = use_shared_table
914 ? this->shared_trusted_pointer_table_
915 : this->trusted_pointer_table_;
916 TrustedPointerTable::Space* space =
918 ? this->
heap_->
isolate()->shared_trusted_pointer_space()
919 : this->
heap_->trusted_pointer_space();
920 table->Mark(space,
handle);
UnderlyingType underlying_type
AllocationSpace identity() const
Address try_load(IsolateForPointerCompression isolate, CppHeapPointerTagRange tag_range) const
void MarkAndPush(void *instance)
static std::pair< DescriptorIndex, DescriptorIndex > AcquireDescriptorRangeToMark(unsigned gc_epoch, Tagged< DescriptorArray > array)
static bool TryUpdateIndicesToMark(unsigned gc_epoch, Tagged< DescriptorArray > array, DescriptorIndex index_to_mark)
ExternalPointerTagRange tag_range() const
static int SizeOf(Tagged< Map > map, Tagged< HeapObject > raw_object)
void MarkPointerTableEntry(Tagged< HeapObject > obj, IndirectPointerSlot slot)
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
static V8_INLINE bool InWritableSharedSpace(Tagged< HeapObject > object)
static V8_INLINE bool InReadOnlySpace(Tagged< HeapObject > object)
static V8_INLINE bool InBlackAllocatedPage(Tagged< HeapObject > object)
V8_EXPORT_PRIVATE bool Contains(Tagged< HeapObject > value) const
Isolate * isolate() const
Tagged< Object > Relaxed_Load_AllowUnpublished(IsolateForSandbox isolate) const
IndirectPointerHandle Relaxed_LoadHandle() const
IndirectPointerTag tag() const
static Tagged< InstructionStream > FromTargetAddress(Address address)
static IsolateGroup * current()
ReadOnlyHeap * read_only_heap() const
V8_NOINLINE void PushStackTraceAndDie(void *ptr1=nullptr, void *ptr2=nullptr, void *ptr3=nullptr, void *ptr4=nullptr, void *ptr5=nullptr, void *ptr6=nullptr)
static constexpr size_t kChunkSize
size_t TotalNumberOfChunks() const
size_t GetNextChunkToMark()
V8_INLINE size_t VisitJSFunction(Tagged< Map > map, Tagged< JSFunction > object, MaybeObjectSize)
V8_INLINE size_t VisitEphemeronHashTable(Tagged< Map > map, Tagged< EphemeronHashTable > object, MaybeObjectSize)
bool IsOld(Tagged< SharedFunctionInfo > sfi) const
V8_INLINE bool MarkObject(Tagged< HeapObject > host, Tagged< HeapObject > obj, MarkingHelper::WorklistTarget target_worklist)
V8_INLINE size_t VisitFixedArrayWithProgressTracker(Tagged< Map > map, Tagged< FixedArray > object, MarkingProgressTracker &progress_tracker)
V8_INLINE size_t VisitTransitionArray(Tagged< Map > map, Tagged< TransitionArray > object, MaybeObjectSize)
V8_INLINE void VisitCodeTarget(Tagged< InstructionStream > host, RelocInfo *rinfo) final
V8_INLINE size_t VisitWeakCell(Tagged< Map > map, Tagged< WeakCell > object, MaybeObjectSize)
V8_INLINE void VisitExternalPointer(Tagged< HeapObject > host, ExternalPointerSlot slot) override
V8_INLINE void VisitIndirectPointer(Tagged< HeapObject > host, IndirectPointerSlot slot, IndirectPointerMode mode) final
void VisitJSDispatchTableEntry(Tagged< HeapObject > host, JSDispatchHandle handle) override
bool HasBytecodeArrayForFlushing(Tagged< SharedFunctionInfo > sfi) const
V8_INLINE size_t VisitJSWeakRef(Tagged< Map > map, Tagged< JSWeakRef > object, MaybeObjectSize)
void ProcessWeakHeapObject(Tagged< HeapObject > host, THeapObjectSlot slot, Tagged< HeapObject > heap_object)
V8_INLINE size_t VisitFixedArray(Tagged< Map > map, Tagged< FixedArray > object, MaybeObjectSize)
void VisitTrustedPointerTableEntry(Tagged< HeapObject > host, IndirectPointerSlot slot) final
V8_INLINE void VisitDescriptorsForMap(Tagged< Map > map)
V8_INLINE void VisitPointersImpl(Tagged< HeapObject > host, TSlot start, TSlot end)
V8_INLINE size_t VisitDescriptorArrayStrongly(Tagged< Map > map, Tagged< DescriptorArray > object, MaybeObjectSize)
void MakeOlder(Tagged< SharedFunctionInfo > sfi) const
V8_INLINE void VisitEmbeddedPointer(Tagged< InstructionStream > host, RelocInfo *rinfo) final
V8_INLINE void VisitCppHeapPointer(Tagged< HeapObject > host, CppHeapPointerSlot slot) override
V8_INLINE size_t VisitJSArrayBuffer(Tagged< Map > map, Tagged< JSArrayBuffer > object, MaybeObjectSize)
V8_INLINE size_t VisitSharedFunctionInfo(Tagged< Map > map, Tagged< SharedFunctionInfo > object, MaybeObjectSize)
void ProcessStrongHeapObject(Tagged< HeapObject > host, THeapObjectSlot slot, Tagged< HeapObject > heap_object)
V8_INLINE size_t VisitMap(Tagged< Map > map, Tagged< Map > object, MaybeObjectSize)
V8_INLINE size_t VisitDescriptorArray(Tagged< Map > map, Tagged< DescriptorArray > object, MaybeObjectSize)
bool ShouldFlushBaselineCode(Tagged< JSFunction > js_function) const
static V8_INLINE constexpr bool IsTrivialWeakReferenceValue(Tagged< HeapObject > host, Tagged< HeapObject > heap_object)
bool ShouldFlushCode(Tagged< SharedFunctionInfo > sfi) const
V8_INLINE void VisitStrongPointerImpl(Tagged< HeapObject > host, TSlot slot)
void Push(Tagged< HeapObject > object)
CppMarkingState * cpp_marking_state() const
static V8_INLINE MemoryChunk * FromAddress(Address addr)
static V8_INLINE MemoryChunk * FromHeapObject(Tagged< HeapObject > object)
MarkingProgressTracker & marking_progress_tracker()
static V8_INLINE MutablePageMetadata * FromHeapObject(Tagged< HeapObject > o)
PtrComprCageBase cage_base() const
static constexpr bool IsCodeTargetMode(Mode mode)
V8_INLINE Address target_address()
static constexpr bool IsEmbeddedObjectMode(Mode mode)
V8_INLINE Tagged< HeapObject > target_object(PtrComprCageBase cage_base)
static constexpr uint16_t kMaxAge
static constexpr Tagged< Smi > uninitialized_deserialization_value()
static const int kStartOffset
static PtrType Acquire_Load(Tagged< HeapObject > host, int offset=0)
V8_INLINE constexpr StorageType ptr() const
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
constexpr int kTaggedSize
constexpr int kMaxRegularHeapObjectSize
static V8_INLINE constexpr bool IsSharedExternalPointerType(ExternalPointerTagRange tag_range)
static constexpr bool SlotHoldsTrustedPointerV
bool IsForceFlushingEnabled(base::EnumSet< CodeFlushMode > mode)
@ kUnknownIndirectPointerTag
bool IsByteCodeFlushingEnabled(base::EnumSet< CodeFlushMode > mode)
bool IsResumableFunction(FunctionKind kind)
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
bool IsBaselineCodeFlushingEnabled(base::EnumSet< CodeFlushMode > mode)
Handle< To > UncheckedCast(Handle< From > value)
uint32_t IndirectPointerHandle
constexpr ExternalPointerHandle kNullExternalPointerHandle
constexpr JSDispatchHandle kNullJSDispatchHandle(0)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available space
detail::HeapObjectAndSlotPOD< HeapObjectSlot > HeapObjectAndSlot
bool IsFlushingDisabled(base::EnumSet< CodeFlushMode > mode)
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr IndirectPointerHandle kNullIndirectPointerHandle
detail::HeapObjectAndSlotPOD< ProtectedMaybeObjectSlot > TrustedObjectAndSlot
uint32_t ExternalPointerHandle
static V8_INLINE constexpr bool IsSharedTrustedPointerType(IndirectPointerTag tag)
SlotTraits::TMaybeObjectSlot MaybeObjectSlot
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
static constexpr RelaxedLoadTag kRelaxedLoad
constexpr CppHeapPointerTagRange kAnyCppHeapPointer(CppHeapPointerTag::kFirstTag, CppHeapPointerTag::kLastTag)
static constexpr AcquireLoadTag kAcquireLoad
#define ACQUIRE_READ_FIELD(p, offset)
MarkingWorklists::Local local_marking_worklists_
WeakObjects::Local local_weak_objects_
#define DCHECK_LE(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
static V8_INLINE bool TryMarkAndPush(Heap *heap, MarkingWorklists::Local *marking_worklist, MarkingState *marking_state, WorklistTarget target_worklis, Tagged< HeapObject > object)
static V8_INLINE bool IsMarkedOrAlwaysLive(Heap *heap, MarkingStateT *marking_state, Tagged< HeapObject > object)
static V8_INLINE std::optional< WorklistTarget > ShouldMarkObject(Heap *heap, Tagged< HeapObject > object)
constexpr bool IsEmpty() const
#define V8_LIKELY(condition)
#define V8_UNLIKELY(condition)