v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
scavenger-inl.h
Go to the documentation of this file.
1// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_HEAP_SCAVENGER_INL_H_
6#define V8_HEAP_SCAVENGER_INL_H_
7
9// Include the non-inl header before the rest of the headers.
10
17#include "src/heap/new-spaces.h"
21#include "src/objects/map.h"
25
26namespace v8 {
27namespace internal {
28
30 // Threshold when to prioritize processing of the promoted list. Right
31 // now we only look into the regular object list.
32 const int kProcessPromotedListThreshold = kPromotedListSegmentSize / 2;
34 kProcessPromotedListThreshold;
35}
36
38#ifdef THREAD_SANITIZER
39 // Perform a dummy acquire load to tell TSAN that there is no data race
40 // with page initialization.
41 Tagged<HeapObject> heap_object;
42 if (object.GetHeapObject(&heap_object)) {
43 MemoryChunk::FromHeapObject(heap_object)->SynchronizedLoad();
44 }
45#endif
46}
47
49 Tagged<HeapObject> target, int size,
50 PromotionHeapChoice promotion_heap_choice) {
51 // This CAS can be relaxed because we do not access the object body if the
52 // object was already copied by another thread. We only access the page header
53 // of such objects and this is safe because of the memory fence after page
54 // header initialization.
55 if (!source->relaxed_compare_and_swap_map_word_forwarded(
56 MapWord::FromMap(map), target)) {
57 // Other task migrated the object.
58 return false;
59 }
60
61 // Copy the content of source to target. Note that we do this on purpose
62 // *after* the CAS. This avoids copying of the object in the (unlikely)
63 // failure case. It also helps us to ensure that we do not rely on non-relaxed
64 // memory ordering for the CAS above.
65 target->set_map_word(map, kRelaxedStore);
66 heap()->CopyBlock(target.address() + kTaggedSize,
67 source.address() + kTaggedSize, size - kTaggedSize);
68
70 heap()->OnMoveEvent(source, target, size);
71 }
72
75
76 return true;
77}
78
79template <typename THeapObjectSlot>
81 Tagged<Map> map, THeapObjectSlot slot, Tagged<HeapObject> object,
82 int object_size, ObjectFields object_fields) {
83 static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
84 std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
85 "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
86 DCHECK(heap()->AllowedToBeMigrated(map, object, NEW_SPACE));
88 AllocationResult allocation =
89 allocator_.Allocate(NEW_SPACE, object_size, alignment);
90
92 if (allocation.To(&target)) {
93 DCHECK(heap()->marking_state()->IsUnmarked(target));
94 const bool self_success =
95 MigrateObject(map, object, target, object_size, kPromoteIntoLocalHeap);
96 if (!self_success) {
97 allocator_.FreeLast(NEW_SPACE, target, object_size);
98 MapWord map_word = object->map_word(kRelaxedLoad);
101 DCHECK(!Heap::InFromPage(*slot));
102 return Heap::InToPage(*slot)
105 }
106 UpdateHeapObjectReferenceSlot(slot, target);
107 if (object_fields == ObjectFields::kMaybePointers) {
108 local_copied_list_.Push(target);
109 }
110 copied_size_ += object_size;
112 }
114}
115
116template <typename THeapObjectSlot,
117 Scavenger::PromotionHeapChoice promotion_heap_choice>
119 THeapObjectSlot slot,
120 Tagged<HeapObject> object,
121 int object_size,
122 ObjectFields object_fields) {
123 static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
124 std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
125 "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
126 DCHECK_GE(object_size, Heap::kMinObjectSizeInTaggedWords * kTaggedSize);
129 promotion_heap_choice == kPromoteIntoLocalHeap ? OLD_SPACE : SHARED_SPACE,
130 object_size, alignment);
131
133 if (allocation.To(&target)) {
134 DCHECK(heap()->non_atomic_marking_state()->IsUnmarked(target));
135 const bool self_success =
136 MigrateObject(map, object, target, object_size, promotion_heap_choice);
137 if (!self_success) {
138 allocator_.FreeLast(promotion_heap_choice == kPromoteIntoLocalHeap
139 ? OLD_SPACE
140 : SHARED_SPACE,
141 target, object_size);
142
143 MapWord map_word = object->map_word(kRelaxedLoad);
146 DCHECK(!Heap::InFromPage(*slot));
147 return Heap::InToPage(*slot)
150 }
151 UpdateHeapObjectReferenceSlot(slot, target);
152
153 if (object_fields == ObjectFields::kMaybePointers) {
154 local_promoted_list_.Push({target, map, object_size});
155 }
156 promoted_size_ += object_size;
158 }
160}
161
168
170 int object_size, ObjectFields object_fields) {
171 if (MemoryChunk::FromHeapObject(object)->InNewLargeObjectSpace()) {
173 MutablePageMetadata::FromHeapObject(object)->owner_identity());
174 if (object->relaxed_compare_and_swap_map_word_forwarded(
175 MapWord::FromMap(map), object)) {
176 local_surviving_new_large_objects_.insert({object, map});
177 promoted_size_ += object_size;
178 if (object_fields == ObjectFields::kMaybePointers) {
179 local_promoted_list_.Push({object, map, object_size});
180 }
181 }
182 return true;
183 }
184 return false;
185}
186
187template <typename THeapObjectSlot,
188 Scavenger::PromotionHeapChoice promotion_heap_choice>
190 Tagged<Map> map, THeapObjectSlot slot, Tagged<HeapObject> object,
191 int object_size, ObjectFields object_fields) {
192 static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
193 std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
194 "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
195 SLOW_DCHECK(object->SizeFromMap(map) == object_size);
197
198 if (HandleLargeObject(map, object, object_size, object_fields)) {
199 return REMOVE_SLOT;
200 }
201
202 SLOW_DCHECK(static_cast<size_t>(object_size) <=
204
205 if (!heap()->semi_space_new_space()->ShouldBePromoted(object.address())) {
206 // A semi-space copy may fail due to fragmentation. In that case, we
207 // try to promote the object.
208 result = SemiSpaceCopyObject(map, slot, object, object_size, object_fields);
211 }
212 }
213
214 // We may want to promote this object if the object was already semi-space
215 // copied in a previous young generation GC or if the semi-space copy above
216 // failed.
218 map, slot, object, object_size, object_fields);
221 }
222
223 // If promotion failed, we try to copy the object to the other semi-space.
224 result = SemiSpaceCopyObject(map, slot, object, object_size, object_fields);
227 }
228
229 heap()->FatalProcessOutOfMemory("Scavenger: semi-space copy");
230 UNREACHABLE();
231}
232
233template <typename THeapObjectSlot>
235 THeapObjectSlot slot,
236 Tagged<ThinString> object,
237 int object_size) {
238 static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
239 std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
240 "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
241 if (shortcut_strings_) {
242 // The ThinString should die after Scavenge, so avoid writing the proper
243 // forwarding pointer and instead just signal the actual object as forwarded
244 // reference.
245 Tagged<String> actual = object->actual();
246 // ThinStrings always refer to internalized strings, which are always in old
247 // space.
249 UpdateHeapObjectReferenceSlot(slot, actual);
250 return REMOVE_SLOT;
251 }
252
254 Map::ObjectFieldsFrom(map->visitor_id()));
255 return EvacuateObjectDefault(map, slot, object, object_size,
257}
258
259template <typename THeapObjectSlot>
261 Tagged<Map> map, THeapObjectSlot slot, Tagged<ConsString> object,
262 int object_size) {
263 static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
264 std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
265 "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
266 DCHECK(IsShortcutCandidate(map->instance_type()));
267
268 if (shortcut_strings_ &&
269 object->unchecked_second() == ReadOnlyRoots(heap()).empty_string()) {
270 Tagged<HeapObject> first = Cast<HeapObject>(object->unchecked_first());
271
273
274 if (!HeapLayout::InYoungGeneration(first)) {
275 object->set_map_word_forwarded(first, kRelaxedStore);
276 return REMOVE_SLOT;
277 }
278
279 MapWord first_word = first->map_word(kRelaxedLoad);
280 if (first_word.IsForwardingAddress()) {
281 Tagged<HeapObject> target = first_word.ToForwardingAddress(first);
282
283 UpdateHeapObjectReferenceSlot(slot, target);
284 SynchronizePageAccess(target);
285 object->set_map_word_forwarded(target, kRelaxedStore);
286 return HeapLayout::InYoungGeneration(target) ? KEEP_SLOT : REMOVE_SLOT;
287 }
288 Tagged<Map> first_map = first_word.ToMap();
289 SlotCallbackResult result = EvacuateObjectDefault(
290 first_map, slot, first, first->SizeFromMap(first_map),
291 Map::ObjectFieldsFrom(first_map->visitor_id()));
292 object->set_map_word_forwarded(slot.ToHeapObject(), kRelaxedStore);
293 return result;
294 }
296 Map::ObjectFieldsFrom(map->visitor_id()));
297 return EvacuateObjectDefault(map, slot, object, object_size,
299}
300
301template <typename THeapObjectSlot>
303 Tagged<Map> map, THeapObjectSlot slot, Tagged<String> object,
304 int object_size, ObjectFields object_fields) {
305 DCHECK(String::IsInPlaceInternalizable(map->instance_type()));
306 DCHECK_EQ(object_fields, Map::ObjectFieldsFrom(map->visitor_id()));
309 map, slot, object, object_size, object_fields);
310 }
311 return EvacuateObjectDefault(map, slot, object, object_size, object_fields);
312}
313
314template <typename THeapObjectSlot>
315SlotCallbackResult Scavenger::EvacuateObject(THeapObjectSlot slot,
316 Tagged<Map> map,
317 Tagged<HeapObject> source) {
318 static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
319 std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
320 "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
321 SLOW_DCHECK(Heap::InFromPage(source));
322 SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress());
323 int size = source->SizeFromMap(map);
324 // Cannot use ::cast() below because that would add checks in debug mode
325 // that require re-reading the map.
326 VisitorId visitor_id = map->visitor_id();
327 switch (visitor_id) {
328 case kVisitThinString:
329 // At the moment we don't allow weak pointers to thin strings.
330 DCHECK(!(*slot).IsWeak());
331 return EvacuateThinString(map, slot, UncheckedCast<ThinString>(source),
332 size);
333 case kVisitShortcutCandidate:
334 DCHECK(!(*slot).IsWeak());
335 // At the moment we don't allow weak pointers to cons strings.
336 return EvacuateShortcutCandidate(map, slot,
337 UncheckedCast<ConsString>(source), size);
338 case kVisitSeqOneByteString:
339 case kVisitSeqTwoByteString:
340 DCHECK(String::IsInPlaceInternalizable(map->instance_type()));
341 static_assert(Map::ObjectFieldsFrom(kVisitSeqOneByteString) ==
342 Map::ObjectFieldsFrom(kVisitSeqTwoByteString));
344 map, slot, UncheckedCast<String>(source), size,
345 Map::ObjectFieldsFrom(kVisitSeqOneByteString));
346 default:
347 return EvacuateObjectDefault(map, slot, source, size,
348 Map::ObjectFieldsFrom(visitor_id));
349 }
350}
351
352template <typename THeapObjectSlot>
353SlotCallbackResult Scavenger::ScavengeObject(THeapObjectSlot p,
354 Tagged<HeapObject> object) {
355 static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
356 std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
357 "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
358 DCHECK(Heap::InFromPage(object));
359
360 // Check whether object was already successfully forwarded by the CAS in
361 // MigrateObject. No memory ordering required because we only access the page
362 // header of a relocated object. Page header initialization uses a memory
363 // fence.
364 MapWord first_word = object->map_word(kRelaxedLoad);
365
366 // If the first word is a forwarding address, the object has already been
367 // copied.
368 if (first_word.IsForwardingAddress()) {
369 Tagged<HeapObject> dest = first_word.ToForwardingAddress(object);
371 dest == object);
372 if (dest == object) {
375 chunk->IsLargePage()
376 ? REMOVE_SLOT
377 : KEEP_SLOT;
378 }
379
382 // A forwarded object in new space is either in the second (to) semi space,
383 // a large object, or a pinned object on a quarantined page.
384 // Pinned objects have a self forwarding map word. However, since forwarding
385 // addresses are set with relaxed atomics and before the object is actually
386 // copied, it is unfortunately not safe to access `dest` to check whether it
387 // is pinned or not.
389 Heap::InToPage(dest) || Heap::IsLargeObject(dest) ||
390 MemoryChunk::FromHeapObject(dest)->IsQuarantined());
391
392 // This load forces us to have memory ordering for the map load above. We
393 // need to have the page header properly initialized.
395 return !chunk->InYoungGeneration() || chunk->IsLargePage() ? REMOVE_SLOT
396 : KEEP_SLOT;
397 }
398
399 Tagged<Map> map = first_word.ToMap();
400 // AllocationMementos are unrooted and shouldn't survive a scavenge
401 DCHECK_NE(ReadOnlyRoots(heap()).allocation_memento_map(), map);
402 // Call the slow part of scavenge object.
403 return EvacuateObject(p, map, object);
404}
405
406template <typename TSlot>
407SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap, TSlot slot) {
408 static_assert(
409 std::is_same<TSlot, FullMaybeObjectSlot>::value ||
410 std::is_same<TSlot, MaybeObjectSlot>::value,
411 "Only FullMaybeObjectSlot and MaybeObjectSlot are expected here");
412 using THeapObjectSlot = typename TSlot::THeapObjectSlot;
413 Tagged<MaybeObject> object = *slot;
414 if (Heap::InFromPage(object)) {
415 Tagged<HeapObject> heap_object = object.GetHeapObject();
416
417 SlotCallbackResult result =
418 ScavengeObject(THeapObjectSlot(slot), heap_object);
419 DCHECK_IMPLIES(result == REMOVE_SLOT,
420 !HeapLayout::InYoungGeneration((*slot).GetHeapObject()) ||
421 MemoryChunk::FromHeapObject((*slot).GetHeapObject())
422 ->IsLargePage() ||
423 MemoryChunk::FromHeapObject((*slot).GetHeapObject())
425 return result;
426 } else if (Heap::InToPage(object)) {
427 // Already updated slot. This can happen when processing of the work list
428 // is interleaved with processing roots.
429 return KEEP_SLOT;
430 }
431 // Slots can point to "to" space if the slot has been recorded multiple
432 // times in the remembered set. We remove the redundant slot now.
433 return REMOVE_SLOT;
434}
435
436class ScavengeVisitor final : public NewSpaceVisitor<ScavengeVisitor> {
437 public:
438 explicit ScavengeVisitor(Scavenger* scavenger);
439
441 ObjectSlot end) final;
442
444 MaybeObjectSlot end) final;
455
456 V8_INLINE static constexpr bool CanEncounterFillerOrFreeSpace() {
457 return false;
458 }
459
460 template <typename T>
462 return GCSafeCast<T>(object, heap);
463 }
464
465 private:
466 template <typename TSlot>
468 Tagged<HeapObject> heap_object);
469
470 template <typename TSlot>
472 TSlot end);
473
475};
476
481
487
488template <typename TSlot>
490 Tagged<HeapObject> heap_object) {
491 if (HeapLayout::InYoungGeneration(heap_object)) {
492 using THeapObjectSlot = typename TSlot::THeapObjectSlot;
493 scavenger_->ScavengeObject(THeapObjectSlot(slot), heap_object);
494 }
495}
496
497template <typename TSlot>
499 TSlot end) {
500 for (TSlot slot = start; slot < end; ++slot) {
501 const std::optional<Tagged<Object>> optional_object =
503 if (!optional_object) {
504 continue;
505 }
506 typename TSlot::TObject object = *optional_object;
507 Tagged<HeapObject> heap_object;
508 // Treat weak references as strong.
509 if (object.GetHeapObject(&heap_object)) {
510 VisitHeapObjectImpl(slot, heap_object);
511 }
512 }
513}
514
518 object->YoungMarkExtension();
519 int size = JSArrayBuffer::BodyDescriptor::SizeOf(map, object);
520 JSArrayBuffer::BodyDescriptor::IterateBody(map, object, size, this);
521 return size;
522}
523
532
534 ExternalPointerSlot slot) {
535#ifdef V8_COMPRESS_POINTERS
536 DCHECK(!slot.tag_range().IsEmpty());
539
540 // TODO(chromium:337580006): Remove when pointer compression always uses
541 // EPT.
542 if (!slot.HasExternalPointerHandle()) return;
543
544 ExternalPointerHandle handle = slot.Relaxed_LoadHandle();
545 Heap* heap = scavenger_->heap();
546 ExternalPointerTable& table = heap->isolate()->external_pointer_table();
547 table.Mark(heap->young_external_pointer_space(), handle, slot.address());
548#endif // V8_COMPRESS_POINTERS
549}
550
553 // Register table with the scavenger, so it can take care of the weak keys
554 // later. This allows to only iterate the tables' values, which are treated
555 // as strong independently of whether the key is live.
557 for (InternalIndex i : table->IterateEntries()) {
558 ObjectSlot value_slot =
559 table->RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
560 VisitPointer(table, value_slot);
561 }
562
563 return table->SizeFromMap(map);
564}
565
566} // namespace internal
567} // namespace v8
568
569#endif // V8_HEAP_SCAVENGER_INL_H_
#define SLOW_DCHECK(condition)
Definition checks.h:21
size_t PushSegmentSize() const
Definition worklist.h:330
V8_INLINE void Push(EntryType entry)
Definition worklist.h:393
void FreeLast(AllocationSpace space, Tagged< HeapObject > object, int object_size)
AllocationResult Allocate(AllocationSpace space, int object_size, AllocationAlignment alignment)
ExternalPointerTagRange tag_range() const
Definition slots.h:421
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
static bool IsSelfForwarded(Tagged< HeapObject > object)
static AllocationAlignment RequiredAlignment(Tagged< Map > map)
std::optional< Tagged< Object > > GetObjectFilterReadOnlyAndSmiFast(TSlot slot) const
void OnMoveEvent(Tagged< HeapObject > source, Tagged< HeapObject > target, int size_in_bytes)
Definition heap.cc:3337
V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(const char *location)
Definition heap.cc:6385
static void CopyBlock(Address dst, Address src, int byte_size)
Definition heap-inl.h:288
static void IterateBody(Tagged< Map > map, Tagged< HeapObject > obj, int object_size, ObjectVisitor *v)
static int SizeOf(Tagged< Map > map, Tagged< HeapObject > object)
static int SizeOf(Tagged< Map > map, Tagged< HeapObject > object)
static void IterateBody(Tagged< Map > map, Tagged< HeapObject > obj, int object_size, ObjectVisitor *v)
Tagged< Map > ToMap() const
bool IsForwardingAddress() const
static MapWord FromMap(const Tagged< Map > map)
Tagged< HeapObject > ToForwardingAddress(Tagged< HeapObject > map_word_host)
static constexpr ObjectFields ObjectFieldsFrom(VisitorId visitor_id)
Definition map.h:919
static constexpr size_t AllocatableMemoryInDataPage()
V8_INLINE bool IsFlagSet(Flag flag) const
V8_INLINE bool InYoungGeneration() const
static V8_INLINE MemoryChunk * FromHeapObject(Tagged< HeapObject > object)
static V8_INLINE MutablePageMetadata * FromHeapObject(Tagged< HeapObject > o)
virtual void VisitPointer(Tagged< HeapObject > host, ObjectSlot p)
Definition visitors.h:157
static void UpdateAllocationSite(Heap *heap, Tagged< Map > map, Tagged< HeapObject > object, int object_size, PretenuringFeedbackMap *pretenuring_feedback)
V8_INLINE void VisitPointers(Tagged< HeapObject > host, ObjectSlot start, ObjectSlot end) final
V8_INLINE size_t VisitJSArrayBuffer(Tagged< Map > map, Tagged< JSArrayBuffer > object, MaybeObjectSize)
ScavengeVisitor(Scavenger *scavenger)
V8_INLINE void VisitExternalPointer(Tagged< HeapObject > host, ExternalPointerSlot slot)
V8_INLINE size_t VisitEphemeronHashTable(Tagged< Map > map, Tagged< EphemeronHashTable > object, MaybeObjectSize)
V8_INLINE void VisitPointersImpl(Tagged< HeapObject > host, TSlot start, TSlot end)
static V8_INLINE constexpr bool CanEncounterFillerOrFreeSpace()
static V8_INLINE Tagged< T > Cast(Tagged< HeapObject > object, const Heap *heap)
V8_INLINE size_t VisitJSApiObject(Tagged< Map > map, Tagged< JSObject > object, MaybeObjectSize)
V8_INLINE void VisitHeapObjectImpl(TSlot slot, Tagged< HeapObject > heap_object)
static constexpr int kPromotedListSegmentSize
Definition scavenger.h:46
V8_INLINE SlotCallbackResult EvacuateObject(THeapObjectSlot slot, Tagged< Map > map, Tagged< HeapObject > source)
EvacuationAllocator allocator_
Definition scavenger.h:204
PretenuringHandler::PretenuringFeedbackMap local_pretenuring_feedback_
Definition scavenger.h:199
V8_INLINE SlotCallbackResult RememberedSetEntryNeeded(CopyAndForwardResult result)
void AddEphemeronHashTable(Tagged< EphemeronHashTable > table)
const bool shared_string_table_
Definition scavenger.h:207
V8_INLINE CopyAndForwardResult PromoteObject(Tagged< Map > map, THeapObjectSlot slot, Tagged< HeapObject > object, int object_size, ObjectFields object_fields)
PromotedList::Local local_promoted_list_
Definition scavenger.h:197
void SynchronizePageAccess(Tagged< MaybeObject > object) const
SlotCallbackResult EvacuateThinString(Tagged< Map > map, THeapObjectSlot slot, Tagged< ThinString > object, int object_size)
V8_INLINE bool ShouldEagerlyProcessPromotedList() const
SlotCallbackResult EvacuateInPlaceInternalizableString(Tagged< Map > map, THeapObjectSlot slot, Tagged< String > string, int object_size, ObjectFields object_fields)
SlotCallbackResult ScavengeObject(THeapObjectSlot p, Tagged< HeapObject > object)
V8_INLINE SlotCallbackResult EvacuateObjectDefault(Tagged< Map > map, THeapObjectSlot slot, Tagged< HeapObject > object, int object_size, ObjectFields object_fields)
V8_INLINE CopyAndForwardResult SemiSpaceCopyObject(Tagged< Map > map, THeapObjectSlot slot, Tagged< HeapObject > object, int object_size, ObjectFields object_fields)
V8_INLINE bool MigrateObject(Tagged< Map > map, Tagged< HeapObject > source, Tagged< HeapObject > target, int size, PromotionHeapChoice promotion_heap_choice)
V8_INLINE bool HandleLargeObject(Tagged< Map > map, Tagged< HeapObject > object, int object_size, ObjectFields object_fields)
SlotCallbackResult EvacuateShortcutCandidate(Tagged< Map > map, THeapObjectSlot slot, Tagged< ConsString > object, int object_size)
SlotCallbackResult CheckAndScavengeObject(Heap *heap, TSlot slot)
CopiedList::Local local_copied_list_
Definition scavenger.h:195
const bool shortcut_strings_
Definition scavenger.h:209
SurvivingNewLargeObjectsMap local_surviving_new_large_objects_
Definition scavenger.h:201
Address address() const
Definition slots.h:78
static bool IsInPlaceInternalizable(Tagged< String > string)
bool GetHeapObject(Tagged< HeapObject > *result) const
int start
int end
TNode< Object > target
std::map< const std::string, const std::string > map
ZoneVector< RpoNumber > & result
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
constexpr int kTaggedSize
Definition globals.h:542
static V8_INLINE constexpr bool IsSharedExternalPointerType(ExternalPointerTagRange tag_range)
Tagged< T > GCSafeCast(Tagged< Object > object, const Heap *heap)
Definition casting.h:142
Handle< To > UncheckedCast(Handle< From > value)
Definition handles-inl.h:55
static bool IsShortcutCandidate(int type)
uint32_t ExternalPointerHandle
void UpdateHeapObjectReferenceSlot(THeapObjectSlot slot, Tagged< HeapObject > value)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
static constexpr RelaxedLoadTag kRelaxedLoad
Definition globals.h:2909
static constexpr RelaxedStoreTag kRelaxedStore
Definition globals.h:2911
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
constexpr bool IsEmpty() const
#define V8_INLINE
Definition v8config.h:500
#define V8_UNLIKELY(condition)
Definition v8config.h:660