v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
traced-handles.h
Go to the documentation of this file.
1// Copyright 2022 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_HANDLES_TRACED_HANDLES_H_
6#define V8_HANDLES_TRACED_HANDLES_H_
7
12#include "src/base/macros.h"
13#include "src/common/globals.h"
14#include "src/handles/handles.h"
15#include "src/objects/objects.h"
17
18namespace v8::internal {
19
20class CppHeap;
21class Isolate;
22class TracedHandles;
23
24class TracedNode final {
25 public:
26#ifdef V8_HOST_ARCH_64_BIT
27 using IndexType = uint16_t;
28#else // !V8_HOST_ARCH_64_BIT
29 using IndexType = uint8_t;
30#endif // !V8_HOST_ARCH_64_BIT
31
33 return reinterpret_cast<TracedNode*>(location);
34 }
35
36 static const TracedNode* FromLocation(const Address* location) {
37 return reinterpret_cast<const TracedNode*>(location);
38 }
39
41
42 IndexType index() const { return index_; }
43
44 bool is_weak() const { return IsWeak::decode(flags_); }
45 void set_weak(bool v) { flags_ = IsWeak::update(flags_, v); }
46
47 bool is_droppable() const { return IsDroppable::decode(flags_); }
49
50 bool is_in_use() const { return IsInUse::decode(flags_); }
52
57
59 void set_next_free(IndexType next_free_index) {
60 next_free_index_ = next_free_index;
61 }
62
63 void set_markbit() { is_marked_.store(true, std::memory_order_relaxed); }
64
65 bool markbit() const { return is_marked_.load(std::memory_order_relaxed); }
66
67 bool IsMetadataCleared() const { return flags_ == 0 && !markbit(); }
68
69 void clear_markbit() { is_marked_.store(false, std::memory_order_relaxed); }
70
71 bool has_old_host() const { return HasOldHost::decode(flags_); }
73
74 template <AccessMode access_mode = AccessMode::NON_ATOMIC>
75 void set_raw_object(Address value) {
76 if constexpr (access_mode == AccessMode::NON_ATOMIC) {
77 object_ = value;
78 } else {
79 reinterpret_cast<std::atomic<Address>*>(&object_)->store(
80 value, std::memory_order_relaxed);
81 }
82 }
83 Address raw_object() const { return object_; }
86
88 bool needs_young_bit_update,
89 bool needs_black_allocation,
90 bool has_old_host, bool is_droppable);
91 void Release(Address zap_value);
92
93 private:
99
101 // When a node is not in use, this index is used to build the free list.
104 uint8_t flags_ = 0;
105 // Marking bit could be stored in flags_ as well but is kept separately for
106 // clarity.
107 std::atomic<bool> is_marked_ = false;
108};
109
110// TracedNode should not take more than 2 words.
111static_assert(sizeof(TracedNode) <= 2 * kSystemPointerSize);
112
113class TracedNodeBlock final {
115 : public base::iterator<std::forward_iterator_tag, TracedNode> {
116 public:
117 explicit NodeIteratorImpl(TracedNodeBlock* block) : block_(block) {}
119 TracedNode::IndexType current_index)
120 : block_(block), current_index_(current_index) {}
122 : block_(other.block_),
123 current_index_(other.current_index_) {}
124
126 bool operator==(const NodeIteratorImpl& rhs) const {
127 return rhs.block_ == block_ && rhs.current_index_ == current_index_;
128 }
129 bool operator!=(const NodeIteratorImpl& rhs) const {
130 return !(*this == rhs);
131 }
134 return *this;
135 }
137 NodeIteratorImpl tmp(*this);
138 operator++();
139 return tmp;
140 }
141
142 private:
145 };
146
147 struct ListNode final {
150 };
151
152 template <typename ConcreteTraits>
155 return &ConcreteTraits::GetListNode(tnb).prev_;
156 }
158 return &ConcreteTraits::GetListNode(tnb).next_;
159 }
160 static bool non_empty(TracedNodeBlock* tnb) { return tnb != nullptr; }
161 static bool in_use(const TracedNodeBlock* tnb) {
162 return *prev(const_cast<TracedNodeBlock*>(tnb)) != nullptr;
163 }
164 };
165
166 public:
167 struct OverallListTraits : BaseListTraits<OverallListTraits> {
169 return tnb->overall_list_node_;
170 }
171 };
172
173 struct UsableListTraits : BaseListTraits<UsableListTraits> {
175 return tnb->usable_list_node_;
176 }
177 };
178
179 struct YoungListTraits : BaseListTraits<YoungListTraits> {
181 return tnb->young_list_node_;
182 }
183 };
184
189 using YoungList =
192
193#if defined(V8_USE_ADDRESS_SANITIZER)
194 static constexpr size_t kMinCapacity = 1;
195 static constexpr size_t kMaxCapacity = 1;
196#else // !defined(V8_USE_ADDRESS_SANITIZER)
197#ifdef V8_HOST_ARCH_64_BIT
198 static constexpr size_t kMinCapacity = 256;
199#else // !V8_HOST_ARCH_64_BIT
200 static constexpr size_t kMinCapacity = 128;
201#endif // !V8_HOST_ARCH_64_BIT
202 static constexpr size_t kMaxCapacity =
203 std::numeric_limits<TracedNode::IndexType>::max() - 1;
204#endif // !defined(V8_USE_ADDRESS_SANITIZER)
205
207
208 static_assert(kMinCapacity <= kMaxCapacity);
210
212 static void Delete(TracedNodeBlock*);
213
214 static TracedNodeBlock& From(TracedNode& node);
215 static const TracedNodeBlock& From(const TracedNode& node);
216
218 void FreeNode(TracedNode* node, Address zap_value);
219
221 return &(reinterpret_cast<TracedNode*>(this + 1)[index]);
222 }
223 const TracedNode* at(TracedNode::IndexType index) const {
224 return const_cast<TracedNodeBlock*>(this)->at(index);
225 }
226
227 const void* nodes_begin_address() const { return at(0); }
228 const void* nodes_end_address() const { return at(capacity_); }
229
231
232 Iterator begin() { return Iterator(this); }
233 Iterator end() { return Iterator(this, capacity_); }
234
235 bool IsFull() const { return used_ == capacity_; }
236 bool IsEmpty() const { return used_ == 0; }
237 size_t used() const { return used_; }
238
239 size_t size_bytes() const {
240 return sizeof(*this) + capacity_ * sizeof(TracedNode);
241 }
242
243 bool InYoungList() const { return YoungListTraits::in_use(this); }
244 bool InUsableList() const { return UsableListTraits::in_use(this); }
245
246 bool NeedsReprocessing() const { return reprocess_; }
247 void SetReprocessing(bool value) { reprocess_ = value; }
248
254 const auto locally_freed = locally_freed_;
255 locally_freed_ = 0;
256 return locally_freed;
257 }
258
259 private:
261
270 bool reprocess_ = false;
271};
272
273// TracedHandles hold handles that must go through cppgc's tracing methods. The
274// handles do otherwise not keep their pointees alive.
276 public:
277 enum class MarkMode : uint8_t { kOnlyYoung, kAll };
278
279 static void Destroy(Address* location);
280 static void Copy(const Address* const* from, Address** to);
281 static void Move(Address** from, Address** to);
282
283 static Tagged<Object> Mark(Address* location, MarkMode mark_mode);
284 static Tagged<Object> MarkConservatively(Address* inner_location,
285 Address* traced_node_block_base,
286 MarkMode mark_mode);
287
288 static bool IsValidInUseNode(const Address* location);
289
290 explicit TracedHandles(Isolate*);
292
293 TracedHandles(const TracedHandles&) = delete;
295
296 V8_INLINE FullObjectSlot Create(Address value, Address* slot,
297 TracedReferenceStoreMode store_mode,
298 TracedReferenceHandling reference_handling);
299
300 using NodeBounds = std::vector<std::pair<const void*, const void*>>;
301 const NodeBounds GetNodeBounds() const;
302
303 void SetIsMarking(bool);
304 void SetIsSweepingOnMutatorThread(bool);
305
306 // Updates the list of young nodes that is maintained separately.
307 void UpdateListOfYoungNodes();
308
309 // Deletes empty blocks. Sweeping must not be running.
310 void DeleteEmptyBlocks();
311
312 void ResetDeadNodes(WeakSlotCallbackWithHeap should_reset_handle);
313 void ResetYoungDeadNodes(WeakSlotCallbackWithHeap should_reset_handle);
314
315 // Computes whether young weak objects should be considered roots for young
316 // generation garbage collections or just be treated weakly. Per default
317 // objects are considered as roots. Objects are treated not as root when both
318 // - `JSObject::IsUnmodifiedApiObject` returns true;
319 // - the `EmbedderRootsHandler` also does not consider them as roots;
320 void ComputeWeaknessForYoungObjects();
321 // Processes the weak objects that have been computed in
322 // `ComputeWeaknessForYoungObjects()`.
323 void ProcessWeakYoungObjects(RootVisitor* v,
324 WeakSlotCallbackWithHeap should_reset_handle);
325
326 void Iterate(RootVisitor*);
327 void IterateYoung(RootVisitor*);
328 void IterateYoungRoots(RootVisitor*);
329 void IterateAndMarkYoungRootsWithOldHosts(RootVisitor*);
330 void IterateYoungRootsWithOldHostsForTesting(RootVisitor*);
331
332 size_t used_node_count() const { return used_nodes_; }
333 size_t total_size_bytes() const { return block_size_bytes_; }
334 size_t used_size_bytes() const { return sizeof(TracedNode) * used_nodes_; }
335
336 bool HasYoung() const;
337
338 private:
339 V8_INLINE std::pair<TracedNodeBlock*, TracedNode*> AllocateNode();
340 V8_NOINLINE V8_PRESERVE_MOST void RefillUsableNodeBlocks();
341 void FreeNode(TracedNode* node, Address zap_value);
342
343 V8_INLINE bool NeedsToBeRemembered(Tagged<Object> value, TracedNode* node,
344 Address* slot,
345 TracedReferenceStoreMode store_mode) const;
346 V8_INLINE bool NeedsTrackingInYoungNodes(Tagged<Object> object,
347 TracedNode* node) const;
348 V8_INLINE CppHeap* GetCppHeapIfUnifiedYoungGC(Isolate* isolate) const;
349 V8_INLINE bool IsCppGCHostOld(CppHeap& cpp_heap, Address host) const;
350
351 void Destroy(TracedNodeBlock& node_block, TracedNode& node);
352 void Copy(const TracedNode& from_node, Address** to);
353 void Move(TracedNode& from_node, Address** from, Address** to);
354
355 bool SupportsClearingWeakNonLiveWrappers();
356
358 size_t num_blocks_ = 0;
362 // Fully empty blocks that are neither referenced from any stale references in
363 // destructors nor from young nodes.
364 std::vector<TracedNodeBlock*> empty_blocks_;
366 bool is_marking_ = false;
367 bool is_sweeping_on_mutator_thread_ = false;
368 size_t used_nodes_ = 0;
369 size_t block_size_bytes_ = 0;
370 bool disable_block_handling_on_free_ = false;
371};
372
373} // namespace v8::internal
374
375#endif // V8_HANDLES_TRACED_HANDLES_H_
static constexpr T decode(U value)
Definition bit-field.h:66
static V8_NODISCARD constexpr U update(U previous, T value)
Definition bit-field.h:61
TracedHandles & operator=(const TracedHandles &)=delete
TracedNodeBlock::YoungList young_blocks_
std::vector< TracedNodeBlock * > empty_blocks_
TracedHandles(const TracedHandles &)=delete
TracedNodeBlock::UsableList usable_blocks_
std::vector< std::pair< const void *, const void * > > NodeBounds
TracedNodeBlock::OverallList blocks_
bool operator==(const NodeIteratorImpl &rhs) const
bool operator!=(const NodeIteratorImpl &rhs) const
NodeIteratorImpl(TracedNodeBlock *block, TracedNode::IndexType current_index)
NodeIteratorImpl(const NodeIteratorImpl &other) V8_NOEXCEPT
static constexpr TracedNode::IndexType kInvalidFreeListNodeIndex
TracedNode::IndexType ConsumeLocallyFreed()
TracedNode::IndexType locally_freed_
TracedNode * at(TracedNode::IndexType index)
void FreeNode(TracedNode *node, Address zap_value)
TracedHandles & traced_handles() const
static void Delete(TracedNodeBlock *)
const void * nodes_begin_address() const
TracedNodeBlock(TracedHandles &, TracedNode::IndexType)
static TracedNodeBlock & From(TracedNode &node)
static TracedNodeBlock * Create(TracedHandles &)
V8_INLINE TracedNode * AllocateNode()
void SetLocallyFreed(TracedNode::IndexType count)
TracedNode::IndexType first_free_node_
const void * nodes_end_address() const
const TracedNode::IndexType capacity_
const TracedNode * at(TracedNode::IndexType index) const
static constexpr size_t kMaxCapacity
static constexpr size_t kMinCapacity
TracedNode::IndexType used_
static const TracedNode * FromLocation(const Address *location)
IndexType index() const
IndexType next_free() const
Tagged< Object > object() const
void set_raw_object(Address value)
std::atomic< bool > is_marked_
TracedNode(IndexType, IndexType)
Address raw_object() const
V8_INLINE FullObjectSlot Publish(Tagged< Object > object, bool needs_young_bit_update, bool needs_black_allocation, bool has_old_host, bool is_droppable)
static TracedNode * FromLocation(Address *location)
FullObjectSlot location()
void Release(Address zap_value)
void set_is_in_young_list(bool v)
void set_next_free(IndexType next_free_index)
uint32_t count
Register tmp
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats store(v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) DEFINE_GENERIC_IMPLICATION(trace_gc_object_stats
constexpr int kSystemPointerSize
Definition globals.h:410
bool(*)(Heap *heap, FullObjectSlot pointer) WeakSlotCallbackWithHeap
Definition globals.h:1278
return value
Definition map-inl.h:893
static constexpr Address kNullAddress
Definition v8-internal.h:53
#define V8_NOEXCEPT
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define V8_EXPORT_PRIVATE
Definition macros.h:460
static TracedNodeBlock *** prev(TracedNodeBlock *tnb)
static bool in_use(const TracedNodeBlock *tnb)
static TracedNodeBlock ** next(TracedNodeBlock *tnb)
static bool non_empty(TracedNodeBlock *tnb)
static ListNode & GetListNode(TracedNodeBlock *tnb)
static ListNode & GetListNode(TracedNodeBlock *tnb)
static ListNode & GetListNode(TracedNodeBlock *tnb)
const size_t num_young_blocks_
#define V8_INLINE
Definition v8config.h:500
#define V8_NOINLINE
Definition v8config.h:586
#define V8_PRESERVE_MOST
Definition v8config.h:598