10#include <unordered_map> 
   56    : 
public HeapVisitor<IterateAndScavengePromotedObjectsVisitor> {
 
   93#ifdef V8_COMPRESS_POINTERS 
   98    if (!slot.HasExternalPointerHandle()) 
return;
 
  101    ExternalPointerTable& table = 
heap->isolate()->external_pointer_table();
 
  112    table.Evacuate(
heap->young_external_pointer_space(),
 
  114                   ExternalPointerTable::EvacuateMarkMode::kTransferMark);
 
 
  132  template <
typename TSlot>
 
  135    using THeapObjectSlot = 
typename TSlot::THeapObjectSlot;
 
  138    for (TSlot slot = 
start; slot < 
end; ++slot) {
 
  139      typename TSlot::TObject 
object = *slot;
 
  141      if (
object.GetHeapObject(&heap_object)) {
 
  142        HandleSlot(host, THeapObjectSlot(slot), heap_object);
 
 
  147  template <
typename THeapObjectSlot>
 
  151        std::is_same_v<THeapObjectSlot, FullHeapObjectSlot> ||
 
  152            std::is_same_v<THeapObjectSlot, HeapObjectSlot>,
 
  153        "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
 
  156    if (Heap::InFromPage(target)) {
 
  158      bool success = (*slot).GetHeapObject(&target);
 
  162      if (
result == KEEP_SLOT) {
 
  171            page, chunk->
Offset(slot.address()));
 
  180          page, chunk->
Offset(slot.address()));
 
 
 
  192                                          .IsForwardingAddress();
 
  198  return Heap::InFromPage(heap_object) &&
 
  199         !heap_object->map_word(
kRelaxedLoad).IsForwardingAddress();
 
  202bool IsUnscavengedHeapObjectSlot(Heap* 
heap, FullObjectSlot p) {
 
  203  return IsUnscavengedHeapObject(
heap, *p);
 
  210    std::vector<std::unique_ptr<Scavenger>>* scavengers,
 
  211    std::vector<std::pair<ParallelWorkItem, MutablePageMetadata*>>
 
  217      scavengers_(scavengers),
 
  218      old_to_new_chunks_(
std::move(old_to_new_chunks)),
 
  219      remaining_memory_chunks_(old_to_new_chunks_.size()),
 
  221      copied_list_(copied_list),
 
  222      pinned_list_(pinned_list),
 
  223      promoted_list_(promoted_list),
 
  224      trace_id_(reinterpret_cast<uint64_t>(this) ^
 
 
  234  collector_->estimate_concurrency_.fetch_add(1, std::memory_order_relaxed);
 
  239                       GCTracer::Scope::SCAVENGER_SCAVENGE_PARALLEL, 
trace_id_,
 
  241    ProcessItems(delegate, scavenger);
 
  245        GCTracer::Scope::SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL,
 
  247    ProcessItems(delegate, scavenger);
 
 
  252    size_t worker_count)
 const {
 
  255  size_t wanted_num_workers =
 
  256      std::max<size_t>(remaining_memory_chunks_.load(std::memory_order_relaxed),
 
  257                       worker_count + copied_list_.Size() +
 
  258                           pinned_list_.Size() + promoted_list_.Size());
 
  259  if (!
collector_->heap_->ShouldUseBackgroundThreads() ||
 
  260      collector_->heap_->ShouldOptimizeForBattery()) {
 
  261    return std::min<size_t>(wanted_num_workers, 1);
 
  263  return std::min<size_t>(scavengers_->size(), wanted_num_workers);
 
 
  268  double scavenging_time = 0.0;
 
  273    ConcurrentScavengePages(scavenger);
 
  278                 "scavenge[%p]: time=%.2f copied=%zu promoted=%zu\n",
 
  279                 static_cast<void*
>(
this), scavenging_time,
 
 
  286  while (remaining_memory_chunks_.load(std::memory_order_relaxed) > 0) {
 
  287    std::optional<size_t> index = 
generator_.GetNext();
 
  291    for (
size_t i = *index; 
i < old_to_new_chunks_.
size(); ++
i) {
 
  292      auto& work_item = old_to_new_chunks_[
i];
 
  293      if (!work_item.first.TryAcquire()) {
 
  297      if (remaining_memory_chunks_.fetch_sub(1, std::memory_order_relaxed) <=
 
 
  312class GlobalHandlesWeakRootsUpdatingVisitor final : 
public RootVisitor {
 
  314  void VisitRootPointer(
Root root, 
const char* description,
 
  318  void VisitRootPointers(
Root root, 
const char* description,
 
  319                         FullObjectSlot 
start, FullObjectSlot 
end) 
final {
 
  320    for (FullObjectSlot p = 
start; p < 
end; ++p) {
 
  326  void UpdatePointer(FullObjectSlot p) {
 
  339    CHECK(Heap::InFromPage(heap_object));
 
  340    MapWord first_word = heap_object->map_word(
kRelaxedLoad);
 
  341    CHECK(first_word.IsForwardingAddress());
 
  343    if (heap_object == dest) {
 
  344      DCHECK(Heap::IsLargeObject(heap_object) ||
 
  352                   Heap::InToPage(dest) ||
 
  353                       (Heap::IsLargeObject(dest) && Heap::InFromPage(dest) &&
 
  379class YoungGenerationConservativeStackVisitor
 
  381          YoungGenerationConservativeStackVisitor> {
 
  383  YoungGenerationConservativeStackVisitor(
Isolate* isolate,
 
  390        isolate_->heap()->semi_space_new_space()->to_space().begin(),
 
  391        isolate_->heap()->semi_space_new_space()->to_space().end(),
 
  393          return page->marking_bitmap()->IsClean();
 
  396        isolate_->heap()->semi_space_new_space()->from_space().begin(),
 
  397        isolate_->heap()->semi_space_new_space()->from_space().end(),
 
  399          return page->marking_bitmap()->IsClean();
 
  403  ~YoungGenerationConservativeStackVisitor() {
 
  405        isolate_->heap()->semi_space_new_space()->to_space().begin(),
 
  406        isolate_->heap()->semi_space_new_space()->to_space().end(),
 
  408          return page->marking_bitmap()->IsClean();
 
  411         isolate_->heap()->semi_space_new_space()->from_space()) {
 
  419  static bool FilterPage(
const MemoryChunk* chunk) {
 
  420    return chunk->IsFromPage();
 
  429                                 MarkingBitmap* bitmap) {
 
  431    if (map_word.IsForwardingAddress()) {
 
  443                                MarkingBitmap* bitmap) {
 
  445    Address object_address = 
object->address();
 
  446    if (object_address + object_size <
 
  455  friend class ConservativeStackVisitorBase<
 
  456      YoungGenerationConservativeStackVisitor>;
 
  459template <
typename ConcreteVisitor>
 
  460class ObjectPinningVisitorBase : 
public RootVisitor {
 
  462  ObjectPinningVisitorBase(
const Heap* 
heap, 
Scavenger& scavenger,
 
  469  void VisitRootPointer(
Root root, 
const char* description,
 
  470                        FullObjectSlot p) 
final {
 
  471    DCHECK(root == Root::kStackRoots || root == Root::kHandleScope);
 
  472    static_cast<ConcreteVisitor*
>(
this)->HandlePointer(p);
 
  475  void VisitRootPointers(
Root root, 
const char* description,
 
  476                         FullObjectSlot 
start, FullObjectSlot 
end) 
final {
 
  477    DCHECK(root == Root::kStackRoots || root == Root::kHandleScope);
 
  478    for (FullObjectSlot p = 
start; p < 
end; ++p) {
 
  479      static_cast<ConcreteVisitor*
>(
this)->HandlePointer(p);
 
  488    if (IsAllocationMemento(
object)) {
 
  492    if (
scavenger_.PromoteIfLargeObject(
object)) {
 
  500    DCHECK(Heap::InFromPage(
object));
 
  501    Address object_address = 
object.address();
 
  503    DCHECK(!map_word.IsForwardingAddress());
 
  506        [object_address](ScavengerCollector::PinnedObjectEntry& entry) {
 
  507          return entry.address != object_address;
 
  509    int object_size = 
object->SizeFromMap(map_word.ToMap());
 
  512        {object_address, map_word, 
static_cast<size_t>(object_size)});
 
  514    if (!chunk->IsQuarantined()) {
 
  516      if (
v8_flags.scavenger_promote_quarantined_pages &&
 
  517          heap_->semi_space_new_space()->ShouldPageBePromoted(chunk)) {
 
  521    scavenger_.PinAndPushObject(chunk, 
object, map_word);
 
  530class ConservativeObjectPinningVisitor final
 
  531    : 
public ObjectPinningVisitorBase<ConservativeObjectPinningVisitor> {
 
  533  ConservativeObjectPinningVisitor(
 
  536      : ObjectPinningVisitorBase<ConservativeObjectPinningVisitor>(
 
  537            heap, scavenger, pinned_objects) {}
 
  540  void HandlePointer(FullObjectSlot p) {
 
  544  friend class ObjectPinningVisitorBase<ConservativeObjectPinningVisitor>;
 
  547class PreciseObjectPinningVisitor final
 
  548    : 
public ObjectPinningVisitorBase<PreciseObjectPinningVisitor> {
 
  550  PreciseObjectPinningVisitor(
const Heap* 
heap, 
Scavenger& scavenger,
 
  552      : ObjectPinningVisitorBase<PreciseObjectPinningVisitor>(
heap, scavenger,
 
  556  void HandlePointer(FullObjectSlot p) {
 
  568    HandleHeapObject(heap_object);
 
  571  friend class ObjectPinningVisitorBase<PreciseObjectPinningVisitor>;
 
  577class TreatConservativelyVisitor final : 
public RootVisitor {
 
  579  TreatConservativelyVisitor(YoungGenerationConservativeStackVisitor* v,
 
  583        rng_(
heap->isolate()->fuzzer_rng()),
 
  585            v8_flags.stress_scavenger_conservative_object_pinning_random
 
  589  void VisitRootPointer(
Root root, 
const char* description,
 
  590                        FullObjectSlot p) 
final {
 
  594  void VisitRootPointers(
Root root, 
const char* description,
 
  595                         FullObjectSlot 
start, FullObjectSlot 
end) 
final {
 
  596    for (FullObjectSlot p = 
start; p < 
end; ++p) {
 
  602  void HandlePointer(FullObjectSlot p) {
 
  603    if (
rng_->NextDouble() < stressing_threshold_) {
 
  607    stack_visitor_->VisitPointer(
reinterpret_cast<void*
>(
object.ptr()));
 
  611  base::RandomNumberGenerator* 
const rng_;
 
  615void RestorePinnedObjects(
 
  616    SemiSpaceNewSpace& new_space,
 
  621  DCHECK_EQ(0, new_space.QuarantinedPageCount());
 
  622  size_t quarantined_objects_size = 0;
 
  623  for (
const auto& [object_address, map_word, object_size] : pinned_objects) {
 
  624    DCHECK(!map_word.IsForwardingAddress());
 
  630    DCHECK(chunk->IsQuarantined());
 
  632      quarantined_objects_size += object_size;
 
  635  new_space.SetQuarantinedSize(quarantined_objects_size);
 
  638void QuarantinePinnedPages(SemiSpaceNewSpace& new_space) {
 
  639  PageMetadata* next_page = new_space.from_space().first_page();
 
  641    PageMetadata* current_page = next_page;
 
  642    next_page = current_page->next_page();
 
  643    MemoryChunk* chunk = current_page->Chunk();
 
  644    DCHECK(chunk->IsFromPage());
 
  645    if (!chunk->IsQuarantined()) {
 
  651      new_space.PromotePageToOldSpace(current_page,
 
  653      DCHECK(!chunk->InYoungGeneration());
 
  655      new_space.MoveQuarantinedPage(chunk);
 
  656      DCHECK(!chunk->IsFromPage());
 
  657      DCHECK(chunk->IsToPage());
 
  659    DCHECK(current_page->marking_bitmap()->IsClean());
 
  660    DCHECK(!chunk->IsFromPage());
 
  661    DCHECK(!chunk->IsQuarantined());
 
  671      trace_id_(reinterpret_cast<uint64_t>(this) ^
 
  673      should_zap_(
heap::ShouldZapGarbage()),
 
  675  DCHECK(!pinned_objects.empty());
 
 
  680#ifdef V8_COMPRESS_POINTERS_IN_MULTIPLE_CAGES 
  685      GCTracer::Scope::SCAVENGER_BACKGROUND_QUARANTINED_PAGE_SWEEPING,
 
  688  DCHECK(!is_done_.load(std::memory_order_relaxed));
 
  690  if (pinned_object_per_page_.empty()) {
 
  698      DCHECK(!std::any_of(objects_for_page.begin(), objects_for_page.end(),
 
  699                          [entry](
auto& object_and_size) {
 
  700                            return object_and_size.first == entry.address;
 
  702      objects_for_page.emplace_back(entry.address, entry.size);
 
  705    next_page_iterator_ = pinned_object_per_page_.begin();
 
  706    DCHECK_NE(next_page_iterator_, pinned_object_per_page_.end());
 
  709  while (next_page_iterator_ != pinned_object_per_page_.end()) {
 
  718      SweepPage(CreateFillerFreeSpaceHandler, chunk, page,
 
  719                next_page_iterator_->second);
 
  724      DCHECK_EQ(page->area_size(), page->allocated_bytes());
 
  725      size_t filler_size_on_page =
 
  726          SweepPage(AddToFreeListFreeSpaceHandler, chunk, page,
 
  727                    next_page_iterator_->second);
 
  732    next_page_iterator_++;
 
  734  is_done_.store(
true, std::memory_order_relaxed);
 
  735  pinned_object_per_page_.clear();
 
 
  746  heap->CreateFillerObjectAt(address, 
static_cast<int>(size));
 
 
  766  DCHECK(!pinned_objects_on_page.empty());
 
  768  std::sort(pinned_objects_on_page.begin(), pinned_objects_on_page.end());
 
  769  size_t filler_size_on_page = 0;
 
  770  for (
const auto& [object_adress, object_size] : pinned_objects_on_page) {
 
  772    if (
start != object_adress) {
 
  773      size_t filler_size = object_adress - 
start;
 
  774      free_space_handler(
heap_, 
start, filler_size, should_zap_);
 
  775      filler_size_on_page += filler_size;
 
  777    start = object_adress + object_size;
 
  782    free_space_handler(
heap_, 
start, filler_size, should_zap_);
 
  783    filler_size_on_page += filler_size;
 
  785  return filler_size_on_page;
 
 
  791  DCHECK(!pinned_objects.empty());
 
  792  auto job = std::make_unique<JobTask>(
heap_, std::move(pinned_objects));
 
 
  830  std::vector<std::unique_ptr<Scavenger>> scavengers;
 
  833  for (
int i = 0; 
i < num_scavenge_tasks; ++
i) {
 
  834    scavengers.emplace_back(
 
  835        new Scavenger(
this, 
heap_, is_logging, &empty_chunks, &copied_list,
 
  836                      &pinned_list, &promoted_list, &ephemeron_table_list));
 
  843             GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_IDENTIFY);
 
  847  std::vector<std::pair<ParallelWorkItem, MutablePageMetadata*>>
 
  865    if (
v8_flags.scavenger_conservative_object_pinning &&
 
  874               GCTracer::Scope::SCAVENGER_SCAVENGE_PIN_OBJECTS);
 
  875      ConservativeObjectPinningVisitor conservative_pinning_visitor(
 
  876          heap_, main_thread_scavenger, pinned_objects);
 
  880      YoungGenerationConservativeStackVisitor stack_visitor(
 
  881          isolate_, &conservative_pinning_visitor);
 
  885        TreatConservativelyVisitor handles_visitor(&stack_visitor, 
heap_);
 
  889    if (
v8_flags.scavenger_precise_object_pinning) {
 
  890      PreciseObjectPinningVisitor precise_pinning_visitor(
 
  891          heap_, main_thread_scavenger, pinned_objects);
 
  893          heap_, &precise_pinning_visitor);
 
  904    if (
v8_flags.scavenger_precise_object_pinning) {
 
  911        &root_scavenge_visitor);
 
  917                  GCTracer::Scope::SCAVENGER_SCAVENGE_PARALLEL_PHASE,
 
  920    auto job = std::make_unique<JobTask>(
 
  921        this, &scavengers, std::move(old_to_new_chunks), copied_list,
 
  922        pinned_list, promoted_list);
 
  936             GCTracer::Scope::SCAVENGER_SCAVENGE_WEAK_GLOBAL_HANDLES_PROCESS);
 
  937    GlobalHandlesWeakRootsUpdatingVisitor visitor;
 
  939        &visitor, &IsUnscavengedHeapObjectSlot);
 
  941        &visitor, &IsUnscavengedHeapObjectSlot);
 
  950    for (
auto& scavenger : scavengers) {
 
  951      scavenger->Finalize();
 
  955#ifdef V8_COMPRESS_POINTERS 
  973        &Heap::UpdateYoungReferenceInExternalStringTableEntry);
 
  984             GCTracer::Scope::SCAVENGER_SCAVENGE_RESTORE_AND_QUARANTINE_PINNED);
 
  985    RestorePinnedObjects(*new_space, pinned_objects);
 
  986    QuarantinePinnedPages(*new_space);
 
 1001  if (!pinned_objects.empty()) {
 
 1014    while (empty_chunks_local.
Pop(&chunk)) {
 
 
 1093  const int num_scavenge_tasks =
 
 1099  int tasks = std::max(
 
 
 1115      local_empty_chunks_(*empty_chunks),
 
 1116      local_copied_list_(*copied_list),
 
 1117      local_pinned_list_(*pinned_list),
 
 1118      local_promoted_list_(*promoted_list),
 
 1119      local_ephemeron_table_list_(*ephemeron_table_list),
 
 1122      is_logging_(is_logging),
 
 1123      shared_string_table_(
v8_flags.shared_string_table &&
 
 1124                           heap->isolate()->has_shared_space()),
 
 1125      mark_shared_heap_(
heap->isolate()->is_shared_space_isolate()),
 
 1128  DCHECK(!
heap->incremental_marking()->IsMarking());
 
 
 1142  visitor.
Visit(map, target, size);
 
 1144  if (IsJSArrayBufferMap(map)) {
 
 
 1153      {table, std::unordered_set<int>()});
 
 1154  indices.first->second.insert(index);
 
 
 1165        [
this, chunk, page, record_old_to_shared_slots](
MaybeObjectSlot slot) {
 
 1169          if (
result == REMOVE_SLOT && record_old_to_shared_slots) {
 
 1178    std::vector<std::tuple<Tagged<HeapObject>, 
SlotType, 
Address>> slot_updates;
 
 1185        page, [
this, chunk, page, record_old_to_shared_slots, &slot_updates](
 
 1193          if (
result == REMOVE_SLOT && record_old_to_shared_slots) {
 
 1195                                            slot_address, *slot);
 
 1198            slot_updates.emplace_back(
new_target, slot_type, slot_address);
 
 1205    if (typed_slot_count == 0) {
 
 1210        page->area_start(), page->area_size());
 
 1211    for (
auto& slot_update : slot_updates) {
 
 1213      SlotType slot_type = std::get<1>(slot_update);
 
 1214      Address slot_address = std::get<2>(slot_update);
 
 1219          jit_allocation, 
heap_, slot_type, slot_address,
 
 1232        [
this, chunk, page, record_old_to_shared_slots](
MaybeObjectSlot slot) {
 
 1236          if (
result == REMOVE_SLOT && record_old_to_shared_slots) {
 
 
 1255      scavenge_visitor.
Visit(
object);
 
 
 1292          table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(
i)));
 
 1294      if (IsUnscavengedHeapObject(
heap_, 
key)) {
 
 1295        table->RemoveEntry(
i);
 
 1302  ephemeron_table_list->
Clear();
 
 
 1309  for (
auto it = table_map->begin(); it != table_map->end();) {
 
 1311    auto& indices = it->second;
 
 1312    for (
auto iti = indices.begin(); iti != indices.end();) {
 
 1317      if (IsUnscavengedHeapObject(
heap_, 
key)) {
 
 1319        iti = indices.erase(iti);
 
 1324          iti = indices.erase(iti);
 
 1331    if (indices.empty()) {
 
 1332      it = table_map->erase(it);
 
 
 1354        it.first, std::move(it.second));
 
 
 1374template <
typename TSlot>
 
 1381  if (
object.GetHeapObject(&heap_object) &&
 
 1384        page, chunk->
Offset(slot.address()));
 
 
 1393  if (
new_target.GetHeapObject(&heap_object) &&
 
 1400                                              static_cast<uint32_t
>(
offset));
 
 
 1415  int object_size = 
object->SizeFromMap(map);
 
 
 1433  DCHECK_EQ(object_size, object->SizeFromMap(map));
 
 
 1443  DCHECK_EQ(object_size, object->SizeFromMap(map));
 
 
 1454    scavenge_visitor.
Visit(object_and_map.second, object_and_map.first);
 
 
RegisterAllocator * allocator_
#define SLOW_DCHECK(condition)
V8_INLINE void Push(EntryType entry)
bool IsGlobalEmpty() const
bool IsLocalEmpty() const
V8_INLINE bool Pop(EntryType *entry)
void Iterate(Callback callback) const
virtual void NotifyConcurrencyIncrease()=0
virtual bool ShouldYield()=0
virtual bool IsJoiningThread() const =0
virtual uint8_t GetTaskId()=0
void RequestSweep(SweepingType sweeping_type, TreatAllYoungAsPromoted treat_all_young_as_promoted)
AllocationSpace identity() const
void RecordEphemeronKeyWrites(Tagged< EphemeronHashTable > table, IndicesSet indices)
ExternalPointerTagRange tag_range() const
Tagged< HeapObject > ToHeapObject() const
void StoreHeapObject(Tagged< HeapObject > value) const
void store(Tagged< MaybeObject > value) const
void SampleConcurrencyEsimate(size_t concurrency)
void NotifyYoungSweepingCompleted()
void NotifyYoungSweepingCompletedAndStopCycleIfFinished()
void UpdateListOfYoungNodes()
void IterateYoungStrongAndDependentRoots(RootVisitor *v)
void ProcessWeakYoungObjects(RootVisitor *v, WeakSlotCallbackWithHeap should_reset_handle)
MainAllocator * new_space_allocator()
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
static V8_INLINE bool InWritableSharedSpace(Tagged< HeapObject > object)
static bool IsSelfForwarded(Tagged< HeapObject > object)
static Tagged< HeapObject > FromAddress(Address address)
V8_INLINE size_t Visit(Tagged< HeapObject > object)
NewSpace * new_space() const
OldLargeObjectSpace * lo_space() const
NewLargeObjectSpace * new_lo_space() const
void IncrementNewSpaceSurvivingObjectSize(size_t object_size)
std::unique_ptr< EphemeronRememberedSet > ephemeron_remembered_set_
IncrementalMarking * incremental_marking() const
void IncrementPromotedObjectsSize(size_t object_size)
ArrayBufferSweeper * array_buffer_sweeper()
ConcurrentMarking * concurrent_marking() const
V8_EXPORT_PRIVATE bool CanPromoteYoungAndExpandOldGeneration(size_t size) const
void IncrementYoungSurvivorsCounter(size_t survived)
void IterateRoots(RootVisitor *v, base::EnumSet< SkipRoot > options, IterateRootsMode roots_mode=IterateRootsMode::kMainIsolate)
void UpdateYoungReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
void IterateConservativeStackRoots(RootVisitor *root_visitor, IterateRootsMode roots_mode=IterateRootsMode::kMainIsolate)
void IterateRootsForPrecisePinning(RootVisitor *visitor)
EphemeronRememberedSet * ephemeron_remembered_set()
bool ShouldUseBackgroundThreads() const
bool IsGCWithStack() const
Isolate * isolate() const
size_t SurvivedYoungObjectSize()
HeapAllocator * allocator()
PretenuringHandler * pretenuring_handler()
bool IsMajorMarking() const
GlobalHandles * global_handles() const
TracedHandles * traced_handles()
bool has_shared_space() const
bool log_object_relocation() const
StringForwardingTable * string_forwarding_table() const
void ResetPendingObject()
size_t Size() const override
void set_objects_size(size_t objects_size)
static V8_INLINE LargePageMetadata * FromHeapObject(Tagged< HeapObject > o)
V8_INLINE bool IsLabValid() const
V8_EXPORT_PRIVATE void FreeLinearAllocationArea()
static constexpr bool IsPacked(Address)
Tagged< Map > ToMap() const
static constexpr ObjectFields ObjectFieldsFrom(VisitorId visitor_id)
static bool IsOnEvacuationCandidate(Tagged< MaybeObject > obj)
static V8_INLINE MarkBit MarkBitFromAddress(Address address)
Executability executable() const
V8_INLINE bool IsFlagSet(Flag flag) const
V8_INLINE MemoryChunkMetadata * Metadata()
static V8_INLINE MemoryChunk * FromAddress(Address addr)
size_t Offset(Address addr) const
bool IsQuarantined() const
static V8_INLINE MemoryChunk * FromHeapObject(Tagged< HeapObject > object)
TypedSlotSet * typed_slot_set()
PossiblyEmptyBuckets * possibly_empty_buckets()
static MutablePageMetadata * cast(MemoryChunkMetadata *metadata)
static const int kPageSize
void FreeDeadObjects(const std::function< bool(Tagged< HeapObject >)> &is_dead)
virtual void VisitPointer(Tagged< HeapObject > host, ObjectSlot p)
static void ForAll(Heap *heap, Callback callback)
void PromoteNewLargeObject(LargePageMetadata *page)
void RelinkQuarantinedPageFreeList(PageMetadata *page, size_t filler_size_on_page)
static V8_INLINE PageMetadata * FromHeapObject(Tagged< HeapObject > o)
static V8_INLINE PageMetadata * FromAddress(Address addr)
V8_INLINE size_t FreeDuringSweep(Address start, size_t size_in_bytes)
void MergeAllocationSitePretenuringFeedback(const PretenuringFeedbackMap &local_pretenuring_feedback)
static void UpdateAllocationSite(Heap *heap, Tagged< Map > map, Tagged< HeapObject > object, int object_size, PretenuringFeedbackMap *pretenuring_feedback)
static bool CheckPossiblyEmptyBuckets(MutablePageMetadata *chunk)
static void InsertTyped(MutablePageMetadata *memory_chunk, SlotType slot_type, uint32_t offset)
static void Insert(MutablePageMetadata *page, size_t slot_offset)
static int IterateAndTrackEmptyBuckets(MutablePageMetadata *chunk, Callback callback, ::heap::base::Worklist< MutablePageMetadata *, 64 >::Local *empty_chunks)
static int IterateTyped(MutablePageMetadata *chunk, Callback callback)
RootScavengeVisitor(Scavenger &scavenger)
void VisitRootPointer(Root root, const char *description, FullObjectSlot p) final
void VisitRootPointers(Root root, const char *description, FullObjectSlot start, FullObjectSlot end) final
~RootScavengeVisitor() final
void ScavengePointer(FullObjectSlot p)
ScavengeVisitor(Scavenger *scavenger)
size_t GetMaxConcurrency(size_t worker_count) const override
void ProcessItems(JobDelegate *delegate, Scavenger *scavenger)
void Run(JobDelegate *delegate) override
void ConcurrentScavengePages(Scavenger *scavenger)
JobTask(ScavengerCollector *collector, std::vector< std::unique_ptr< Scavenger > > *scavengers, std::vector< std::pair< ParallelWorkItem, MutablePageMetadata * > > old_to_new_chunks, const Scavenger::CopiedList &copied_list, const Scavenger::PinnedList &pinned_list, const Scavenger::PromotedList &promoted_list)
size_t SweepPage(FreeSpaceHandler free_space_handler, MemoryChunk *chunk, PageMetadata *page, ObjectsAndSizes &pinned_objects_on_page)
static void AddToFreeListFreeSpaceHandler(Heap *heap, Address address, size_t size, bool should_zap)
void Run(JobDelegate *delegate) override
std::vector< std::pair< Address, size_t > > ObjectsAndSizes
JobTask(Heap *heap, const PinnedObjects &&pinned_objects)
static void CreateFillerFreeSpaceHandler(Heap *heap, Address address, size_t size, bool should_zap)
std::function< void(Heap *, Address, size_t, bool)> FreeSpaceHandler
void StartSweeping(const PinnedObjects &&pinned_objects)
std::unique_ptr< JobHandle > job_handle_
ScavengerCollector(Heap *heap)
void ClearOldEphemerons()
void MergeSurvivingNewLargeObjects(const SurvivingNewLargeObjectsMap &objects)
QuarantinedPageSweeper quarantined_page_sweeper_
size_t FetchAndResetConcurrencyEstimate()
void SweepArrayBufferExtensions()
void ClearYoungEphemerons(EphemeronRememberedSet::TableList *ephemeron_table_list)
static const int kMaxScavengerTasks
int NumberOfScavengeTasks()
void CompleteSweepingQuarantinedPagesIfNeeded()
void ProcessWeakReferences(EphemeronRememberedSet::TableList *ephemeron_table_list)
void HandleSurvivingNewLargeObjects()
SurvivingNewLargeObjectsMap surviving_new_large_objects_
static const int kMainThreadId
std::vector< PinnedObjectEntry > PinnedObjects
EvacuationAllocator allocator_
EphemeronRememberedSet::TableList::Local local_ephemeron_table_list_
void PushPinnedObject(Tagged< HeapObject > object, Tagged< Map > map, int object_size)
ScavengerCollector *const collector_
PretenuringHandler::PretenuringFeedbackMap local_pretenuring_feedback_
void RememberPromotedEphemeron(Tagged< EphemeronHashTable > table, int index)
void AddEphemeronHashTable(Tagged< EphemeronHashTable > table)
size_t bytes_copied() const
EmptyChunksList::Local local_empty_chunks_
std::pair< Tagged< HeapObject >, Tagged< Map > > ObjectAndMap
Scavenger(ScavengerCollector *collector, Heap *heap, bool is_logging, EmptyChunksList *empty_chunks, CopiedList *copied_list, PinnedList *pinned_list, PromotedList *promoted_list, EphemeronRememberedSet::TableList *ephemeron_table_list)
PromotedList::Local local_promoted_list_
void SynchronizePageAccess(Tagged< MaybeObject > object) const
void VisitPinnedObjects()
V8_INLINE bool ShouldEagerlyProcessPromotedList() const
static const int kInterruptThreshold
void IterateAndScavengePromotedObject(Tagged< HeapObject > target, Tagged< Map > map, int size)
SlotCallbackResult ScavengeObject(THeapObjectSlot p, Tagged< HeapObject > object)
V8_INLINE bool HandleLargeObject(Tagged< Map > map, Tagged< HeapObject > object, int object_size, ObjectFields object_fields)
bool PromoteIfLargeObject(Tagged< HeapObject > object)
SlotCallbackResult CheckAndScavengeObject(Heap *heap, TSlot slot)
CopiedList::Local local_copied_list_
void Process(JobDelegate *delegate=nullptr)
PinnedList::Local local_pinned_list_
void CheckOldToNewSlotForSharedUntyped(MemoryChunk *chunk, MutablePageMetadata *page, TSlot slot)
size_t bytes_promoted() const
void ScavengePage(MutablePageMetadata *page)
void CheckOldToNewSlotForSharedTyped(MemoryChunk *chunk, MutablePageMetadata *page, SlotType slot_type, Address slot_address, Tagged< MaybeObject > new_target)
SurvivingNewLargeObjectsMap local_surviving_new_large_objects_
EphemeronRememberedSet::TableMap local_ephemeron_remembered_set_
void PushPinnedPromotedObject(Tagged< HeapObject > object, Tagged< Map > map, int object_size)
void PinAndPushObject(MemoryChunk *chunk, Tagged< HeapObject > object, MapWord map_word)
~ScopedFullHeapCrashKey()
ScopedFullHeapCrashKey(Isolate *isolate)
size_t TotalCapacity() const final
static SemiSpaceNewSpace * From(NewSpace *space)
void GarbageCollectionEpilogue() final
void GarbageCollectionPrologue() final
void UpdateAfterYoungEvacuation()
static WritableJitPage LookupWritableJitPage(Address addr, size_t size)
void ComputeWeaknessForYoungObjects()
void ProcessWeakYoungObjects(RootVisitor *v, WeakSlotCallbackWithHeap should_reset_handle)
void UpdateListOfYoungNodes()
void IterateYoungRoots(RootVisitor *)
static const int kMaxOffset
static SlotCallbackResult UpdateTypedSlot(WritableJitAllocation &jit_allocation, Heap *heap, SlotType slot_type, Address addr, Callback callback)
static Tagged< HeapObject > GetTargetObject(Heap *heap, SlotType slot_type, Address addr)
static V8_EXPORT_PRIVATE v8::Platform * GetCurrentPlatform()
V8_INLINE WritableJitAllocation LookupAllocationContaining(Address addr)
DirectHandle< Object > new_target
#define TRACE_GC_NOTE_WITH_FLOW(note, bind_id, flow_flags)
#define TRACE_GC_EPOCH_WITH_FLOW(tracer, scope_id, thread_kind, bind_id, flow_flags)
#define TRACE_GC_WITH_FLOW(tracer, scope_id, bind_id, flow_flags)
#define TRACE_GC(tracer, scope_id)
#define TRACE_GC_NOTE(note)
#define TRACE_GC_ARG1(tracer, scope_id, arg0_name, arg0_value)
std::unique_ptr< icu::DateTimePatternGenerator > generator_
DirectHandle< JSReceiver > options
ZoneVector< RpoNumber > & result
MarkCompactCollector * collector_
void ZapBlock(Address start, size_t size, uintptr_t zap_value)
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
static V8_INLINE bool HasWeakHeapObjectTag(const Tagged< Object > value)
std::pair< Tagged< HeapObject >, Tagged< Map > > SurvivingNewLargeObjectMapEntry
static V8_INLINE constexpr bool IsSharedExternalPointerType(ExternalPointerTagRange tag_range)
kInterpreterTrampolineOffset Tagged< HeapObject >
@ kCompactionSpaceForScavenge
Tagged< T > GCSafeCast(Tagged< Object > object, const Heap *heap)
Handle< To > UncheckedCast(Handle< From > value)
std::unordered_map< Tagged< HeapObject >, Tagged< Map >, Object::Hasher > SurvivingNewLargeObjectsMap
Tagged< T > ForwardingAddress(Tagged< T > heap_obj)
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
V8_EXPORT_PRIVATE FlagValues v8_flags
uint32_t ExternalPointerHandle
void UpdateHeapObjectReferenceSlot(THeapObjectSlot slot, Tagged< HeapObject > value)
void PrintIsolate(void *isolate, const char *format,...)
use conservative stack scanning use direct handles with conservative stack scanning Treat some precise references as conservative references to stress test object pinning in Scavenger minor_gc_task Enables random stressing of object pinning in Scavenger
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
static constexpr RelaxedLoadTag kRelaxedLoad
static constexpr RelaxedStoreTag kRelaxedStore
base::RandomNumberGenerator *const rng_
YoungGenerationConservativeStackVisitor *const stack_visitor_
ScavengerCollector::PinnedObjects & pinned_objects_
double stressing_threshold_
static constexpr bool kOnlyVisitMainV8Cage
#define DCHECK_LE(v1, v2)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
Tagged< HeapObject > heap_object
constexpr bool IsEmpty() const
#define TRACE_EVENT_FLAG_FLOW_OUT
#define TRACE_EVENT_FLAG_FLOW_IN
#define V8_UNLIKELY(condition)