v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
heap-visitor-inl.h
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_HEAP_HEAP_VISITOR_INL_H_
6#define V8_HEAP_HEAP_VISITOR_INL_H_
7
9// Include the non-inl header before the rest of the headers.
10
11#include <optional>
12
13#include "src/base/logging.h"
25#include "src/objects/map.h"
29#include "src/objects/oddball.h"
35
36#if V8_ENABLE_WEBASSEMBLY
38#endif // V8_ENABLE_WEBASSEMBLY
39
40namespace v8 {
41namespace internal {
42
43template <VisitorId visitor_id>
44constexpr bool SupportsRightTrim() {
45 switch (visitor_id) {
46 case kVisitFixedArray:
47 case kVisitFixedDoubleArray:
48 case kVisitWeakFixedArray:
49 return true;
50 default:
51 return false;
52 }
54}
55
56template <VisitorId visitor_id>
58 return false;
59}
60
61#define DEFINE_READ_ONLY_MAP_SPECIALIZATION(VisitorIdType) \
62 template <> \
63 inline bool ContainsReadOnlyMap<VisitorId::kVisit##VisitorIdType>( \
64 PtrComprCageBase cage_base, Tagged<HeapObject> object) { \
65 /* If you see this DCHECK fail we encountered a Map with a VisitorId that \
66 * should have only ever appeared in read-only space. */ \
67 DCHECK(HeapLayout::InReadOnlySpace(object->map(cage_base))); \
68 return true; \
69 }
71#undef DEFINE_READ_ONLY_MAP_SPECIALIZATION
72
73template <typename ConcreteVisitor>
75 : ObjectVisitorWithCageBases(PtrComprCageBase(isolate->cage_base()),
76 PtrComprCageBase(isolate->code_cage_base())),
77 heap_(isolate->heap()->heap()) {}
78
79template <typename ConcreteVisitor>
81 : ObjectVisitorWithCageBases(isolate), heap_(isolate->heap()) {}
82
83template <typename ConcreteVisitor>
86
87template <typename ConcreteVisitor>
88template <typename T>
90 const Heap* heap) {
91 if constexpr (ConcreteVisitor::ShouldUseUncheckedCast()) {
92 return i::UncheckedCast<T>(object);
93 }
94 return i::Cast<T>(object);
95}
96
97template <typename ConcreteVisitor>
99 requires(!ConcreteVisitor::UsePrecomputedObjectSize())
100{
101 return Visit(object->map(cage_base()), object);
102}
103
104template <typename ConcreteVisitor>
106 Tagged<HeapObject> object)
107 requires(!ConcreteVisitor::UsePrecomputedObjectSize())
108{
109 return Visit(map, object, MaybeObjectSize());
110}
111
112template <typename ConcreteVisitor>
114 Tagged<HeapObject> object,
115 int object_size)
116 requires(ConcreteVisitor::UsePrecomputedObjectSize())
117{
118 return Visit(map, object, MaybeObjectSize(object_size));
119}
120
121template <typename ConcreteVisitor>
123 Tagged<HeapObject> object,
124 MaybeObjectSize maybe_object_size) {
125 if constexpr (ConcreteVisitor::UsePrecomputedObjectSize()) {
126 DCHECK_EQ(maybe_object_size.AssumeSize(), object->SizeFromMap(map));
127 static_assert(!ConcreteVisitor::EnableConcurrentVisitation());
128 } else {
129 DCHECK(maybe_object_size.IsNone());
130 }
131
132 ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
133 switch (map->visitor_id()) {
134#define CASE(TypeName) \
135 case kVisit##TypeName: \
136 /* If this DCHECK fails, it means that the object type wasn't added \
137 * to the TRUSTED_VISITOR_ID_LIST. \
138 * Note: This would normally be just !IsTrustedObject(obj), however we \
139 * might see trusted objects here before they've been migrated to trusted \
140 * space, hence the second condition. */ \
141 DCHECK(!InstanceTypeChecker::IsTrustedObject(map) || \
142 !HeapLayout::InTrustedSpace(object)); \
143 return visitor->Visit##TypeName( \
144 map, ConcreteVisitor::template Cast<TypeName>(object, heap_), \
145 maybe_object_size);
149#undef CASE
150#define CASE(TypeName) \
151 case kVisit##TypeName: \
152 DCHECK(InstanceTypeChecker::IsTrustedObject(map)); \
153 /* Trusted objects are protected from modifications by an attacker as \
154 * they are located outside of the sandbox. However, an attacker can \
155 * still craft their own fake trusted objects inside the sandbox. In \
156 * this case, bad things might happen if these objects are then \
157 * processed by e.g. an object visitor as they will typically assume \
158 * that these objects are trustworthy. The following check defends \
159 * against that by ensuring that the object is outside of the sandbox. \
160 * See also crbug.com/c/1505089. */ \
161 SBXCHECK(OutsideSandboxOrInReadonlySpace(object)); \
162 return visitor->Visit##TypeName( \
163 map, ConcreteVisitor::template Cast<TypeName>(object, heap_), \
164 maybe_object_size);
166#undef CASE
167 case kVisitShortcutCandidate:
168 return visitor->VisitShortcutCandidate(
169 map, ConcreteVisitor::template Cast<ConsString>(object, heap_),
170 maybe_object_size);
171 case kVisitJSObjectFast:
172 return visitor->VisitJSObjectFast(
173 map, ConcreteVisitor::template Cast<JSObject>(object, heap_),
174 maybe_object_size);
175 case kVisitJSApiObject:
176 return visitor->VisitJSApiObject(
177 map, ConcreteVisitor::template Cast<JSObject>(object, heap_),
178 maybe_object_size);
179 case kVisitStruct:
180 return visitor->VisitStruct(map, object, maybe_object_size);
181 case kVisitFiller:
182 return visitor->VisitFiller(map, object, maybe_object_size);
183 case kVisitFreeSpace:
184 return visitor->VisitFreeSpace(map, i::Cast<FreeSpace>(object),
185 maybe_object_size);
187 case kVisitorIdCount:
188 UNREACHABLE();
189 }
190 // TODO(chromium:327992715): Remove once we have some clarity why execution
191 // can reach this point.
192 {
194 if (GetIsolateFromHeapObject(object, &isolate)) {
195 isolate->PushParamsAndDie(
196 reinterpret_cast<void*>(object.ptr()),
197 reinterpret_cast<void*>(map.ptr()),
198 reinterpret_cast<void*>(static_cast<intptr_t>(map->visitor_id())));
199 }
201 UNREACHABLE();
202}
204template <typename ConcreteVisitor>
205template <VisitorId visitor_id>
208 if constexpr (!ConcreteVisitor::ShouldVisitMapPointer()) {
209 return;
210 }
211 if constexpr (!ConcreteVisitor::ShouldVisitReadOnlyMapPointer()) {
212 if (ContainsReadOnlyMap<visitor_id>(cage_base(), host)) {
213 return;
214 }
215 }
216 static_cast<ConcreteVisitor*>(this)->VisitMapPointer(host);
217}
218
219#define VISIT(TypeName) \
220 template <typename ConcreteVisitor> \
221 size_t HeapVisitor<ConcreteVisitor>::Visit##TypeName( \
222 Tagged<Map> map, Tagged<TypeName> object, \
223 MaybeObjectSize maybe_object_size) { \
224 return static_cast<ConcreteVisitor*>(this) \
225 ->template VisitWithBodyDescriptor< \
226 VisitorId::kVisit##TypeName, TypeName, \
227 ObjectTraits<TypeName>::BodyDescriptor>(map, object, \
228 maybe_object_size); \
229 }
230
234#undef VISIT
235
236#define VISIT_WITH_SLACK(TypeName) \
237 template <typename ConcreteVisitor> \
238 size_t HeapVisitor<ConcreteVisitor>::Visit##TypeName( \
239 Tagged<Map> map, Tagged<TypeName> object, \
240 MaybeObjectSize maybe_object_size) { \
241 return static_cast<ConcreteVisitor*>(this) \
242 ->template VisitJSObjectSubclass<TypeName, TypeName::BodyDescriptor>( \
243 map, object, maybe_object_size); \
244 }
245
247#undef VISIT_WITH_SLACK
248
249template <typename ConcreteVisitor>
252 MaybeObjectSize maybe_object_size) {
253 return static_cast<ConcreteVisitor*>(this)->VisitConsString(
254 map, object, maybe_object_size);
255}
256
257template <typename ConcreteVisitor>
260 MaybeObjectSize maybe_object_size) {
261 if constexpr (!ConcreteVisitor::CanEncounterFillerOrFreeSpace()) {
263 }
264 ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
265 visitor->template VisitMapPointerIfNeeded<VisitorId::kVisitFiller>(object);
266 return ConcreteVisitor::UsePrecomputedObjectSize()
267 ? maybe_object_size.AssumeSize()
268 : map->instance_size();
270
271template <typename ConcreteVisitor>
274 MaybeObjectSize maybe_object_size) {
275 if constexpr (!ConcreteVisitor::CanEncounterFillerOrFreeSpace()) {
276 UNREACHABLE();
277 }
278 ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
279 visitor->template VisitMapPointerIfNeeded<VisitorId::kVisitFreeSpace>(object);
280 return object->size(kRelaxedLoad);
282
283template <typename ConcreteVisitor>
285 Tagged<Map> map, Tagged<JSObject> object,
286 MaybeObjectSize maybe_object_size) {
287 return static_cast<ConcreteVisitor*>(this)
288 ->template VisitJSObjectSubclass<JSObject, JSObject::FastBodyDescriptor>(
289 map, object, maybe_object_size);
290}
292template <typename ConcreteVisitor>
294 Tagged<Map> map, Tagged<JSObject> object,
295 MaybeObjectSize maybe_object_size) {
296 return static_cast<ConcreteVisitor*>(this)
297 ->template VisitJSObjectSubclass<
299 map, object, maybe_object_size);
300}
301
302template <typename ConcreteVisitor>
305 MaybeObjectSize maybe_object_size) {
306 ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
307 int size = ConcreteVisitor::UsePrecomputedObjectSize()
308 ? static_cast<int>(maybe_object_size.AssumeSize())
309 : map->instance_size();
310 visitor->template VisitMapPointerIfNeeded<VisitorId::kVisitStruct>(object);
311 StructBodyDescriptor::IterateBody(map, object, size, visitor);
312 return size;
313}
314
315template <typename ConcreteVisitor>
316template <typename T, typename TBodyDescriptor>
318 Tagged<Map> map, Tagged<T> object, MaybeObjectSize maybe_object_size) {
319 // JSObject types are subject to slack tracking. At the end of slack tracking
320 // a Map's instance size is adjusted properly. Since this changes the instance
321 // size, we cannot DCHECK that `SizeFromMap()` is consistent with
322 // `TBodyDescriptor::SizeOf()` as that would require taking a snapshot of the
323 // Map.
324
325 ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
326 visitor->template VisitMapPointerIfNeeded<VisitorId::kVisitJSObject>(object);
328 const size_t size = ConcreteVisitor::UsePrecomputedObjectSize()
329 ? maybe_object_size.AssumeSize()
330 : TBodyDescriptor::SizeOf(map, object);
331
332 int visitation_size = static_cast<int>(size);
333
334 if (!ConcreteVisitor::ShouldVisitFullJSObject()) {
335 // It is important to visit only the used field and ignore the slack fields
336 // because the slack fields may be trimmed concurrently and we don't want to
337 // find fillers (slack) during pointer visitation.
338 const int used_size = map->UsedInstanceSize();
339 DCHECK_LE(used_size, size);
341
342 visitation_size = used_size;
343 }
344
345 TBodyDescriptor::IterateBody(map, object, visitation_size, visitor);
346
347 return size;
348}
349
350template <typename ConcreteVisitor>
351template <VisitorId visitor_id, typename T, typename TBodyDescriptor>
353 Tagged<Map> map, Tagged<T> object, MaybeObjectSize maybe_object_size) {
354 // If you see the following DCHECK fail, then the size computation of
355 // BodyDescriptor doesn't match the size return via obj.Size(). This is
356 // problematic as the GC requires those sizes to match for accounting reasons.
357 // The fix likely involves adding a padding field in the object definitions.
358 //
359 // We can only perform this check for types that do not support right trimming
360 // when running concurrently. `RefineAllocatedBytesAfterSweeping()` ensures
361 // that we only see sizes that get smaller during marking.
362#ifdef DEBUG
364 !ConcreteVisitor::EnableConcurrentVisitation()) {
365 DCHECK_EQ(object->SizeFromMap(map), TBodyDescriptor::SizeOf(map, object));
366 }
367#endif // DEBUG
368 DCHECK(!map->IsInobjectSlackTrackingInProgress());
369
370 ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
371 visitor->template VisitMapPointerIfNeeded<visitor_id>(object);
372 const int size = ConcreteVisitor::UsePrecomputedObjectSize()
373 ? static_cast<int>(maybe_object_size.AssumeSize())
374 : TBodyDescriptor::SizeOf(map, object);
375 TBodyDescriptor::IterateBody(map, object, size, visitor);
376 return size;
377}
378
379template <typename ConcreteVisitor>
380template <typename TSlot>
381std::optional<Tagged<Object>>
384 auto raw = slot.Relaxed_Load_Raw();
385 // raw is either Tagged_t or Address depending on the slot type. Both can be
386 // cast to Tagged_t for the fast check.
387 if (FastInReadOnlySpaceOrSmallSmi(static_cast<Tagged_t>(raw))) {
388 return std::nullopt;
389 }
390 return TSlot::RawToTagged(ObjectVisitorWithCageBases::cage_base(), raw);
391}
393template <typename ConcreteVisitor>
395 : HeapVisitor<ConcreteVisitor>(isolate) {}
396
397template <typename T>
398struct ConcurrentVisitorCastHelper {
399 static V8_INLINE Tagged<T> Cast(Tagged<HeapObject> object) {
400 return i::Cast<T>(object);
401 }
402};
403
404#define UNCHECKED_CAST(VisitorId, TypeName) \
405 template <> \
406 V8_INLINE Tagged<TypeName> ConcurrentVisitorCastHelper<TypeName>::Cast( \
407 Tagged<HeapObject> object) { \
408 return UncheckedCast<TypeName>(object); \
409 }
411// Casts are also needed for unsafe ones for the initial dispatch in
412// HeapVisitor.
414#undef UNCHECKED_CAST
415
416template <typename ConcreteVisitor>
417template <typename T>
418Tagged<T> ConcurrentHeapVisitor<ConcreteVisitor>::Cast(
419 Tagged<HeapObject> object, const Heap* heap) {
420 if constexpr (ConcreteVisitor::EnableConcurrentVisitation()) {
421 return ConcurrentVisitorCastHelper<T>::Cast(object);
422 }
423 return i::Cast<T>(object);
424}
425
426#define VISIT_AS_LOCKED_STRING(VisitorId, TypeName) \
427 template <typename ConcreteVisitor> \
428 size_t ConcurrentHeapVisitor<ConcreteVisitor>::Visit##TypeName( \
429 Tagged<Map> map, Tagged<TypeName> object, \
430 MaybeObjectSize maybe_object_size) { \
431 if constexpr (ConcreteVisitor::EnableConcurrentVisitation()) { \
432 return VisitStringLocked(object); \
433 } \
434 return HeapVisitor<ConcreteVisitor>::Visit##TypeName(map, object, \
435 maybe_object_size); \
436 }
437
439#undef VISIT_AS_LOCKED_STRING
440
441template <typename ConcreteVisitor>
442template <typename T>
444 Tagged<T> object) {
445 ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
446 ObjectLockGuard guard(object);
447 // The object has been locked. At this point shared read access is
448 // guaranteed but we must re-read the map and check whether the string has
449 // transitioned.
450 Tagged<Map> map = object->map();
451 int size;
452 switch (map->visitor_id()) {
453#define UNSAFE_STRING_TRANSITION_TARGET_CASE(VisitorIdType, TypeName) \
454 case kVisit##VisitorIdType: \
455 visitor \
456 ->template VisitMapPointerIfNeeded<VisitorId::kVisit##VisitorIdType>( \
457 object); \
458 size = ObjectTraits<TypeName>::BodyDescriptor::SizeOf(map, object); \
459 ObjectTraits<TypeName>::BodyDescriptor::IterateBody( \
460 map, UncheckedCast<TypeName>(object), size, visitor); \
461 break;
462
464#undef UNSAFE_STRING_TRANSITION_TARGET_CASE
465 default:
466 UNREACHABLE();
467 }
468 return size;
469}
470
471template <typename ConcreteVisitor>
472NewSpaceVisitor<ConcreteVisitor>::NewSpaceVisitor(Isolate* isolate)
473 : ConcurrentHeapVisitor<ConcreteVisitor>(isolate) {}
474
475} // namespace internal
476} // namespace v8
477
478#endif // V8_HEAP_HEAP_VISITOR_INL_H_
V8_INLINE ConcurrentHeapVisitor(Isolate *isolate)
V8_INLINE size_t VisitFiller(Tagged< Map > map, Tagged< HeapObject > object, MaybeObjectSize maybe_object_size)
static V8_INLINE Tagged< T > Cast(Tagged< HeapObject > object, const Heap *heap)
V8_INLINE size_t VisitShortcutCandidate(Tagged< Map > map, Tagged< ConsString > object, MaybeObjectSize maybe_object_size)
V8_INLINE size_t VisitJSObjectSubclass(Tagged< Map > map, Tagged< T > object, MaybeObjectSize maybe_object_size)
V8_INLINE void VisitMapPointerIfNeeded(Tagged< HeapObject > host)
HeapVisitor(LocalIsolate *isolate)
V8_INLINE size_t VisitFreeSpace(Tagged< Map > map, Tagged< FreeSpace > object, MaybeObjectSize maybe_object_size)
V8_INLINE size_t VisitStruct(Tagged< Map > map, Tagged< HeapObject > object, MaybeObjectSize maybe_object_size)
V8_INLINE size_t VisitJSObjectFast(Tagged< Map > map, Tagged< JSObject > object, MaybeObjectSize maybe_object_size)
std::optional< Tagged< Object > > GetObjectFilterReadOnlyAndSmiFast(TSlot slot) const
V8_INLINE size_t Visit(Tagged< HeapObject > object)
V8_INLINE size_t VisitJSApiObject(Tagged< Map > map, Tagged< JSObject > object, MaybeObjectSize maybe_object_size)
V8_INLINE size_t VisitWithBodyDescriptor(Tagged< Map > map, Tagged< T > object, MaybeObjectSize maybe_object_size)
static V8_EXPORT_PRIVATE int GetHeaderSize(InstanceType instance_type, bool function_has_prototype_slot=false)
PtrComprCageBase cage_base() const
Definition visitors.h:225
static void IterateBody(Tagged< Map > map, Tagged< HeapObject > obj, int object_size, ObjectVisitor *v)
Isolate * isolate
#define VISIT(TypeName)
#define UNCHECKED_CAST(VisitorId, TypeName)
#define VISIT_WITH_SLACK(TypeName)
#define UNSAFE_STRING_TRANSITION_TARGET_CASE(VisitorIdType, TypeName)
#define VISIT_AS_LOCKED_STRING(VisitorId, TypeName)
#define DEFINE_READ_ONLY_MAP_SPECIALIZATION(VisitorIdType)
#define SAFE_STRING_TRANSITION_SOURCES(V)
#define TYPED_VISITOR_WITH_SLACK_ID_LIST(V)
#define TYPED_VISITOR_ID_LIST(V)
#define UNSAFE_STRING_TRANSITION_SOURCES(V)
#define UNSAFE_STRING_TRANSITION_TARGETS(V)
#define VISITOR_IDS_WITH_READ_ONLY_MAPS_LIST(V)
#define TORQUE_VISITOR_ID_LIST(V)
Definition map.h:117
#define TRUSTED_VISITOR_ID_LIST(V)
Definition map.h:121
constexpr bool SupportsRightTrim()
V8_INLINE bool GetIsolateFromHeapObject(Tagged< HeapObject > object, Isolate **isolate)
kInterpreterTrampolineOffset Tagged< HeapObject >
Address Tagged_t
Definition globals.h:547
Handle< To > UncheckedCast(Handle< From > value)
Definition handles-inl.h:55
bool ContainsReadOnlyMap(PtrComprCageBase, Tagged< HeapObject >)
@ kDataOnlyVisitorIdCount
Definition map.h:132
@ kVisitorIdCount
Definition map.h:136
V8_INLINE constexpr bool FastInReadOnlySpaceOrSmallSmi(Tagged_t obj)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
static constexpr RelaxedLoadTag kRelaxedLoad
Definition globals.h:2909
#define UNREACHABLE()
Definition logging.h:67
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
Heap * heap_
#define V8_INLINE
Definition v8config.h:500