5#ifndef V8_HEAP_HEAP_VISITOR_INL_H_
6#define V8_HEAP_HEAP_VISITOR_INL_H_
36#if V8_ENABLE_WEBASSEMBLY
43template <VisitorId visitor_
id>
46 case kVisitFixedArray:
47 case kVisitFixedDoubleArray:
48 case kVisitWeakFixedArray:
56template <VisitorId visitor_
id>
61#define DEFINE_READ_ONLY_MAP_SPECIALIZATION(VisitorIdType) \
63 inline bool ContainsReadOnlyMap<VisitorId::kVisit##VisitorIdType>( \
64 PtrComprCageBase cage_base, Tagged<HeapObject> object) { \
67 DCHECK(HeapLayout::InReadOnlySpace(object->map(cage_base))); \
71#undef DEFINE_READ_ONLY_MAP_SPECIALIZATION
73template <
typename ConcreteVisitor>
79template <
typename ConcreteVisitor>
83template <
typename ConcreteVisitor>
87template <
typename ConcreteVisitor>
91 if constexpr (ConcreteVisitor::ShouldUseUncheckedCast()) {
97template <
typename ConcreteVisitor>
99 requires(!ConcreteVisitor::UsePrecomputedObjectSize())
101 return Visit(object->map(cage_base()),
object);
104template <
typename ConcreteVisitor>
107 requires(!ConcreteVisitor::UsePrecomputedObjectSize())
112template <
typename ConcreteVisitor>
116 requires(ConcreteVisitor::UsePrecomputedObjectSize())
121template <
typename ConcreteVisitor>
125 if constexpr (ConcreteVisitor::UsePrecomputedObjectSize()) {
127 static_assert(!ConcreteVisitor::EnableConcurrentVisitation());
132 ConcreteVisitor* visitor =
static_cast<ConcreteVisitor*
>(
this);
133 switch (map->visitor_id()) {
134#define CASE(TypeName) \
135 case kVisit##TypeName: \
141 DCHECK(!InstanceTypeChecker::IsTrustedObject(map) || \
142 !HeapLayout::InTrustedSpace(object)); \
143 return visitor->Visit##TypeName( \
144 map, ConcreteVisitor::template Cast<TypeName>(object, heap_), \
150#define CASE(TypeName) \
151 case kVisit##TypeName: \
152 DCHECK(InstanceTypeChecker::IsTrustedObject(map)); \
161 SBXCHECK(OutsideSandboxOrInReadonlySpace(object)); \
162 return visitor->Visit##TypeName( \
163 map, ConcreteVisitor::template Cast<TypeName>(object, heap_), \
167 case kVisitShortcutCandidate:
168 return visitor->VisitShortcutCandidate(
171 case kVisitJSObjectFast:
172 return visitor->VisitJSObjectFast(
175 case kVisitJSApiObject:
176 return visitor->VisitJSApiObject(
180 return visitor->VisitStruct(map,
object, maybe_object_size);
182 return visitor->VisitFiller(map,
object, maybe_object_size);
183 case kVisitFreeSpace:
195 isolate->PushParamsAndDie(
196 reinterpret_cast<void*
>(
object.ptr()),
197 reinterpret_cast<void*
>(map.ptr()),
198 reinterpret_cast<void*
>(
static_cast<intptr_t
>(map->visitor_id())));
204template <
typename ConcreteVisitor>
205template <VisitorId visitor_
id>
208 if constexpr (!ConcreteVisitor::ShouldVisitMapPointer()) {
211 if constexpr (!ConcreteVisitor::ShouldVisitReadOnlyMapPointer()) {
216 static_cast<ConcreteVisitor*
>(
this)->VisitMapPointer(host);
219#define VISIT(TypeName) \
220 template <typename ConcreteVisitor> \
221 size_t HeapVisitor<ConcreteVisitor>::Visit##TypeName( \
222 Tagged<Map> map, Tagged<TypeName> object, \
223 MaybeObjectSize maybe_object_size) { \
224 return static_cast<ConcreteVisitor*>(this) \
225 ->template VisitWithBodyDescriptor< \
226 VisitorId::kVisit##TypeName, TypeName, \
227 ObjectTraits<TypeName>::BodyDescriptor>(map, object, \
228 maybe_object_size); \
236#define VISIT_WITH_SLACK(TypeName) \
237 template <typename ConcreteVisitor> \
238 size_t HeapVisitor<ConcreteVisitor>::Visit##TypeName( \
239 Tagged<Map> map, Tagged<TypeName> object, \
240 MaybeObjectSize maybe_object_size) { \
241 return static_cast<ConcreteVisitor*>(this) \
242 ->template VisitJSObjectSubclass<TypeName, TypeName::BodyDescriptor>( \
243 map, object, maybe_object_size); \
247#undef VISIT_WITH_SLACK
249template <
typename ConcreteVisitor>
253 return static_cast<ConcreteVisitor*
>(
this)->VisitConsString(
254 map,
object, maybe_object_size);
257template <
typename ConcreteVisitor>
261 if constexpr (!ConcreteVisitor::CanEncounterFillerOrFreeSpace()) {
264 ConcreteVisitor* visitor =
static_cast<ConcreteVisitor*
>(
this);
265 visitor->template VisitMapPointerIfNeeded<VisitorId::kVisitFiller>(
object);
266 return ConcreteVisitor::UsePrecomputedObjectSize()
268 : map->instance_size();
271template <
typename ConcreteVisitor>
275 if constexpr (!ConcreteVisitor::CanEncounterFillerOrFreeSpace()) {
278 ConcreteVisitor* visitor =
static_cast<ConcreteVisitor*
>(
this);
279 visitor->template VisitMapPointerIfNeeded<VisitorId::kVisitFreeSpace>(
object);
283template <
typename ConcreteVisitor>
287 return static_cast<ConcreteVisitor*
>(
this)
288 ->
template VisitJSObjectSubclass<JSObject, JSObject::FastBodyDescriptor>(
289 map,
object, maybe_object_size);
292template <
typename ConcreteVisitor>
296 return static_cast<ConcreteVisitor*
>(
this)
297 ->
template VisitJSObjectSubclass<
299 map,
object, maybe_object_size);
302template <
typename ConcreteVisitor>
306 ConcreteVisitor* visitor =
static_cast<ConcreteVisitor*
>(
this);
307 int size = ConcreteVisitor::UsePrecomputedObjectSize()
308 ?
static_cast<int>(maybe_object_size.
AssumeSize())
309 : map->instance_size();
310 visitor->template VisitMapPointerIfNeeded<VisitorId::kVisitStruct>(
object);
315template <
typename ConcreteVisitor>
316template <
typename T,
typename TBodyDescriptor>
325 ConcreteVisitor* visitor =
static_cast<ConcreteVisitor*
>(
this);
326 visitor->template VisitMapPointerIfNeeded<VisitorId::kVisitJSObject>(
object);
328 const size_t size = ConcreteVisitor::UsePrecomputedObjectSize()
330 : TBodyDescriptor::SizeOf(map,
object);
332 int visitation_size =
static_cast<int>(
size);
334 if (!ConcreteVisitor::ShouldVisitFullJSObject()) {
338 const int used_size = map->UsedInstanceSize();
342 visitation_size = used_size;
345 TBodyDescriptor::IterateBody(map,
object, visitation_size, visitor);
350template <
typename ConcreteVisitor>
351template <VisitorId visitor_
id,
typename T,
typename TBodyDescriptor>
364 !ConcreteVisitor::EnableConcurrentVisitation()) {
365 DCHECK_EQ(object->SizeFromMap(map), TBodyDescriptor::SizeOf(map,
object));
368 DCHECK(!map->IsInobjectSlackTrackingInProgress());
370 ConcreteVisitor* visitor =
static_cast<ConcreteVisitor*
>(
this);
371 visitor->template VisitMapPointerIfNeeded<visitor_id>(
object);
372 const int size = ConcreteVisitor::UsePrecomputedObjectSize()
373 ?
static_cast<int>(maybe_object_size.
AssumeSize())
374 : TBodyDescriptor::SizeOf(map,
object);
375 TBodyDescriptor::IterateBody(map,
object, size, visitor);
379template <
typename ConcreteVisitor>
380template <
typename TSlot>
381std::optional<Tagged<Object>>
384 auto raw = slot.Relaxed_Load_Raw();
393template <
typename ConcreteVisitor>
395 : HeapVisitor<ConcreteVisitor>(isolate) {}
398struct ConcurrentVisitorCastHelper {
404#define UNCHECKED_CAST(VisitorId, TypeName) \
406 V8_INLINE Tagged<TypeName> ConcurrentVisitorCastHelper<TypeName>::Cast( \
407 Tagged<HeapObject> object) { \
408 return UncheckedCast<TypeName>(object); \
416template <
typename ConcreteVisitor>
418Tagged<T> ConcurrentHeapVisitor<ConcreteVisitor>::Cast(
419 Tagged<HeapObject>
object,
const Heap*
heap) {
420 if constexpr (ConcreteVisitor::EnableConcurrentVisitation()) {
421 return ConcurrentVisitorCastHelper<T>::Cast(
object);
426#define VISIT_AS_LOCKED_STRING(VisitorId, TypeName) \
427 template <typename ConcreteVisitor> \
428 size_t ConcurrentHeapVisitor<ConcreteVisitor>::Visit##TypeName( \
429 Tagged<Map> map, Tagged<TypeName> object, \
430 MaybeObjectSize maybe_object_size) { \
431 if constexpr (ConcreteVisitor::EnableConcurrentVisitation()) { \
432 return VisitStringLocked(object); \
434 return HeapVisitor<ConcreteVisitor>::Visit##TypeName(map, object, \
435 maybe_object_size); \
439#undef VISIT_AS_LOCKED_STRING
441template <
typename ConcreteVisitor>
445 ConcreteVisitor* visitor =
static_cast<ConcreteVisitor*
>(
this);
452 switch (map->visitor_id()) {
453#define UNSAFE_STRING_TRANSITION_TARGET_CASE(VisitorIdType, TypeName) \
454 case kVisit##VisitorIdType: \
456 ->template VisitMapPointerIfNeeded<VisitorId::kVisit##VisitorIdType>( \
458 size = ObjectTraits<TypeName>::BodyDescriptor::SizeOf(map, object); \
459 ObjectTraits<TypeName>::BodyDescriptor::IterateBody( \
460 map, UncheckedCast<TypeName>(object), size, visitor); \
464#undef UNSAFE_STRING_TRANSITION_TARGET_CASE
471template <
typename ConcreteVisitor>
472NewSpaceVisitor<ConcreteVisitor>::NewSpaceVisitor(
Isolate* isolate)
473 : ConcurrentHeapVisitor<ConcreteVisitor>(isolate) {}
V8_INLINE ConcurrentHeapVisitor(Isolate *isolate)
V8_INLINE size_t VisitFiller(Tagged< Map > map, Tagged< HeapObject > object, MaybeObjectSize maybe_object_size)
static V8_INLINE Tagged< T > Cast(Tagged< HeapObject > object, const Heap *heap)
V8_INLINE size_t VisitShortcutCandidate(Tagged< Map > map, Tagged< ConsString > object, MaybeObjectSize maybe_object_size)
V8_INLINE size_t VisitJSObjectSubclass(Tagged< Map > map, Tagged< T > object, MaybeObjectSize maybe_object_size)
V8_INLINE void VisitMapPointerIfNeeded(Tagged< HeapObject > host)
HeapVisitor(LocalIsolate *isolate)
V8_INLINE size_t VisitFreeSpace(Tagged< Map > map, Tagged< FreeSpace > object, MaybeObjectSize maybe_object_size)
V8_INLINE size_t VisitStruct(Tagged< Map > map, Tagged< HeapObject > object, MaybeObjectSize maybe_object_size)
V8_INLINE size_t VisitJSObjectFast(Tagged< Map > map, Tagged< JSObject > object, MaybeObjectSize maybe_object_size)
std::optional< Tagged< Object > > GetObjectFilterReadOnlyAndSmiFast(TSlot slot) const
V8_INLINE size_t Visit(Tagged< HeapObject > object)
V8_INLINE size_t VisitJSApiObject(Tagged< Map > map, Tagged< JSObject > object, MaybeObjectSize maybe_object_size)
V8_INLINE size_t VisitWithBodyDescriptor(Tagged< Map > map, Tagged< T > object, MaybeObjectSize maybe_object_size)
static V8_EXPORT_PRIVATE int GetHeaderSize(InstanceType instance_type, bool function_has_prototype_slot=false)
size_t AssumeSize() const
PtrComprCageBase cage_base() const
static void IterateBody(Tagged< Map > map, Tagged< HeapObject > obj, int object_size, ObjectVisitor *v)
#define UNCHECKED_CAST(VisitorId, TypeName)
#define VISIT_WITH_SLACK(TypeName)
#define UNSAFE_STRING_TRANSITION_TARGET_CASE(VisitorIdType, TypeName)
#define VISIT_AS_LOCKED_STRING(VisitorId, TypeName)
#define DEFINE_READ_ONLY_MAP_SPECIALIZATION(VisitorIdType)
#define SAFE_STRING_TRANSITION_SOURCES(V)
#define TYPED_VISITOR_WITH_SLACK_ID_LIST(V)
#define TYPED_VISITOR_ID_LIST(V)
#define UNSAFE_STRING_TRANSITION_SOURCES(V)
#define UNSAFE_STRING_TRANSITION_TARGETS(V)
#define VISITOR_IDS_WITH_READ_ONLY_MAPS_LIST(V)
#define TORQUE_VISITOR_ID_LIST(V)
#define TRUSTED_VISITOR_ID_LIST(V)
constexpr bool SupportsRightTrim()
V8_INLINE bool GetIsolateFromHeapObject(Tagged< HeapObject > object, Isolate **isolate)
kInterpreterTrampolineOffset Tagged< HeapObject >
Handle< To > UncheckedCast(Handle< From > value)
bool ContainsReadOnlyMap(PtrComprCageBase, Tagged< HeapObject >)
@ kDataOnlyVisitorIdCount
V8_INLINE constexpr bool FastInReadOnlySpaceOrSmallSmi(Tagged_t obj)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
static constexpr RelaxedLoadTag kRelaxedLoad
#define DCHECK_LE(v1, v2)
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)