5#ifndef V8_HANDLES_TRACED_HANDLES_INL_H_
6#define V8_HANDLES_TRACED_HANDLES_INL_H_
23 DCHECK(!node->is_in_use());
32 auto* node = block->AllocateNode();
33 DCHECK(node->IsMetadataCleared());
38 return std::make_pair(block, node);
43 DCHECK(!node->is_in_young_list());
50 if (!
v8_flags.cppgc_young_generation)
return nullptr;
52 if (cpp_heap && cpp_heap->generational_gc_supported())
return cpp_heap;
59 auto* host_ptr =
reinterpret_cast<void*
>(host);
63 if (!page)
return false;
64 return !page->ObjectHeaderFromInnerAddress(host_ptr).IsYoung();
70 DCHECK(!node->has_old_host());
92 bool needs_young_bit_update,
93 bool needs_black_allocation,
94 bool has_old_host,
bool is_droppable_value) {
101 reinterpret_cast<std::atomic<Address>*
>(&
object_)->
store(
102 object.ptr(), std::memory_order_release);
114 const bool needs_black_allocation =
116 const bool is_droppable =
119 node->Publish(
object, needs_young_bit_update, needs_black_allocation,
120 has_old_host, is_droppable);
123 if (needs_young_bit_update && !block->InYoungList()) {
125 DCHECK(block->InYoungList());
128 if (needs_black_allocation) {
133 Object::ObjectVerify(*result_slot,
isolate_);
static BasePage * FromInnerAddress(const HeapBase *, void *)
bool generational_gc_supported() const
static constexpr int kShift
static CppHeap * From(v8::CppHeap *heap)
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
V8_INLINE bool NeedsToBeRemembered(Tagged< Object > value, TracedNode *node, Address *slot, TracedReferenceStoreMode store_mode) const
V8_INLINE bool IsCppGCHostOld(CppHeap &cpp_heap, Address host) const
TracedNodeBlock::YoungList young_blocks_
V8_NOINLINE V8_PRESERVE_MOST void RefillUsableNodeBlocks()
TracedNodeBlock::UsableList usable_blocks_
V8_INLINE bool NeedsTrackingInYoungNodes(Tagged< Object > object, TracedNode *node) const
V8_INLINE CppHeap * GetCppHeapIfUnifiedYoungGC(Isolate *isolate) const
V8_INLINE std::pair< TracedNodeBlock *, TracedNode * > AllocateNode()
V8_INLINE FullObjectSlot Create(Address value, Address *slot, TracedReferenceStoreMode store_mode, TracedReferenceHandling reference_handling)
static constexpr TracedNode::IndexType kInvalidFreeListNodeIndex
TracedNode * at(TracedNode::IndexType index)
V8_INLINE TracedNode * AllocateNode()
TracedNode::IndexType first_free_node_
const TracedNode::IndexType capacity_
TracedNode::IndexType used_
V8_INLINE FullObjectSlot Publish(Tagged< Object > object, bool needs_young_bit_update, bool needs_black_allocation, bool has_old_host, bool is_droppable)
bool IsMetadataCleared() const
bool has_old_host() const
static void MarkingFromTracedHandle(Tagged< Object > value)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats store(v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) DEFINE_GENERIC_IMPLICATION(trace_gc_object_stats
V8_EXPORT_PRIVATE FlagValues v8_flags
#define DCHECK_NOT_NULL(val)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define V8_UNLIKELY(condition)