31#if defined(CPPGC_YOUNG_GENERATION)
34 if (!page.heap().generational_gc_supported())
return;
39 (
begin == page.PayloadStart()) && (
end == page.PayloadEnd());
41 auto& age_table = CagedHeapLocalData::Get().age_table;
44 AgeTable::Age::kYoung,
45 new_page ? AgeTable::AdjacentCardsPolicy::kIgnore
46 : AgeTable::AdjacentCardsPolicy::kConsider);
47 page.set_as_containing_young_objects(
true);
51void AddToFreeList(NormalPageSpace& space,
Address start,
size_t size) {
54 space.free_list().Add({
start, size});
62void ReplaceLinearAllocationBuffer(NormalPageSpace& space,
63 StatsCollector& stats_collector,
64 Address new_buffer,
size_t new_size) {
65 auto& lab = space.linear_allocation_buffer();
67 AddToFreeList(space, lab.start(), lab.size());
68 stats_collector.NotifyExplicitFree(lab.size());
71 lab.Set(new_buffer, new_size);
74 stats_collector.NotifyAllocation(new_size);
79 MarkRangeAsYoung(*page, new_buffer, new_buffer + new_size);
83LargePage* TryAllocateLargeObjectImpl(PageBackend& page_backend,
84 LargePageSpace& space,
size_t size) {
86 if (page)
return page;
88 Sweeper& sweeper = space.raw_heap()->
heap()->
sweeper();
92 if (sweeper.SweepForAllocationIfRunning(
100 if (sweeper.SweepForAllocationIfRunning(&space, size,
106 if (sweeper.FinishIfRunning() &&
114void* TryAllocateLargeObject(PageBackend& page_backend, LargePageSpace& space,
115 StatsCollector& stats_collector,
size_t size,
117 LargePage* page = TryAllocateLargeObjectImpl(page_backend, space, size);
118 if (!page)
return nullptr;
122 auto* header =
new (page->ObjectHeader())
125 stats_collector.NotifyAllocation(size);
126 MarkRangeAsYoung(*page, page->PayloadStart(), page->PayloadEnd());
128 return header->ObjectStart();
141 page_backend_(page_backend),
143 prefinalizer_handler_(prefinalizer_handler),
144 oom_handler_(oom_handler),
145 garbage_collector_(garbage_collector) {}
167constexpr GCConfig kOnAllocationFailureGCConfig = {
169 GCConfig::MarkingType::kAtomic,
170 GCConfig::SweepingType::kIncrementalAndConcurrent,
171 GCConfig::FreeMemoryHandling::kDiscardWherePossible};
191 for (
int i = 0;
i < 2;
i++) {
192 auto config = kOnAllocationFailureGCConfig;
200#if defined(CPPGC_CAGED_HEAP)
201 const auto last_alloc_status =
203 const std::string suffix =
214 size_t request_size =
size;
216 const size_t dynamic_alignment =
static_cast<size_t>(alignment);
224 for (
int i = 0;
i < 2;
i++) {
225 auto config = kOnAllocationFailureGCConfig;
233#if defined(CPPGC_CAGED_HEAP)
234 const auto last_alloc_status =
236 const std::string suffix =
257 if (!new_page)
return false;
259 space.AddPage(new_page);
262 new_page->PayloadStart(),
263 new_page->PayloadSize());
317 if (!entry.
address)
return false;
321 if (page.discarded_memory()) {
323 page.ResetDiscardedMemory();
326 ReplaceLinearAllocationBuffer(
351 class YoungMarker :
public HeapVisitor<YoungMarker> {
354 MarkRangeAsYoung(page, page.PayloadStart(), page.PayloadEnd());
359 MarkRangeAsYoung(page, page.PayloadStart(), page.PayloadEnd());
365#if defined(CPPGC_YOUNG_GENERATION)
374#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
375void ObjectAllocator::UpdateAllocationTimeout() {
379void ObjectAllocator::TriggerGCOnAllocationTimeoutIfNeeded() {
380 if (!allocation_timeout_)
return;
382 if (--*allocation_timeout_ == 0) {
385 DCHECK(allocation_timeout_);
friend class internal::ObjectAllocator
static BasePage * FromPayload(void *)
AllocatorType & page_allocator()
static RetType OffsetFromAddress(const void *address)
static CagedHeap & Instance()
virtual void CollectGarbage(GCConfig)=0
virtual bool IsGCForbidden() const
static LargePageSpace & From(BaseSpace &space)
static LargePage * TryCreate(PageBackend &, LargePageSpace &, size_t)
static NormalPage * From(BasePage *page)
static NormalPage * TryCreate(PageBackend &, NormalPageSpace &)
PlatformAwareObjectStartBitmap & object_start_bitmap()
bool TryRefillLinearAllocationBuffer(NormalPageSpace &, size_t)
void V8_PRESERVE_MOST OutOfLineAllocateGCSafePoint(NormalPageSpace &, size_t, AlignVal, GCInfoIndex, void **)
static constexpr size_t kSmallestSpaceSize
void * AllocateObjectOnSpace(NormalPageSpace &, size_t, GCInfoIndex)
std::align_val_t AlignVal
bool in_disallow_gc_scope() const
PageBackend & page_backend_
void * OutOfLineAllocateImpl(NormalPageSpace &, size_t, AlignVal, GCInfoIndex)
GarbageCollector & garbage_collector_
PreFinalizerHandler & prefinalizer_handler_
void MarkAllPagesAsYoung()
bool TryExpandAndRefillLinearAllocationBuffer(NormalPageSpace &)
bool TryRefillLinearAllocationBufferFromFreeList(NormalPageSpace &, size_t)
StatsCollector & stats_collector_
void ResetLinearAllocationBuffers()
FatalOutOfMemoryHandler & oom_handler_
void NotifyAllocationInPrefinalizer(size_t)
bool IsInvokingPreFinalizers() const
BaseSpace * Space(RegularSpaceType type)
void DecrementDiscardedMemory(size_t)
void NotifySafePointForConservativeCollection()
bool SweepForAllocationIfRunning(BaseSpace *space, size_t min_wanted_size, v8::base::TimeDelta max_duration)
static const char * AllocationStatusToString(AllocationStatus)
AllocationStatus get_last_allocation_status() const
static constexpr TimeDelta Max()
static constexpr TimeDelta FromMicroseconds(int64_t microseconds)
StatsCollector * stats_collector_
ZoneVector< RpoNumber > & result
constexpr size_t kAllocationGranularity
constexpr size_t kAllocationMask
constexpr size_t kLargeObjectSizeThreshold
constexpr size_t kFreeListEntrySize
Node::Uses::const_iterator begin(const Node::Uses &uses)
#define DCHECK_LE(v1, v2)
#define DCHECK_NOT_NULL(val)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)