29 return reinterpret_cast<Address>(
30 RoundUp(
reinterpret_cast<uintptr_t
>(address), alignment));
47 const void* address) {
48 return reinterpret_cast<const BasePage*
>(
54 if (page->discarded_memory()) {
59 ->DecrementDiscardedMemory(page->discarded_memory());
61 if (page->is_large()) {
99 void* address)
const {
105 const void* address)
const {
123 if (header->
IsFree())
return nullptr;
128#if defined(CPPGC_YOUNG_GENERATION)
131 slot_set_ =
decltype(slot_set_)(
132 static_cast<SlotSet*
>(
133 SlotSet::Allocate(SlotSet::BucketsForSize(
AllocatedSize()))),
139 SlotSet::Delete(slot_set);
142void BasePage::ResetSlotSet() { slot_set_.reset(); }
149#if
defined(CPPGC_YOUNG_GENERATION)
167 if (!memory)
return nullptr;
169 auto* normal_page =
new (memory)
NormalPage(*space.raw_heap()->heap(), space);
170 normal_page->SynchronizedStore();
171 normal_page->heap().stats_collector()->NotifyAllocatedMemory(
kPageSize);
183#if defined(V8_USE_MEMORY_SANITIZER)
185#elif defined(V8_USE_ADDRESS_SANITIZER)
199 DCHECK_EQ(space.end(), std::find(space.begin(), space.end(), page));
216 lab.start(), lab.size());
227 return AlignAddress((
reinterpret_cast<Address>(
this + 1)),
262 auto*
heap = space.raw_heap()->heap();
264 if (!memory)
return nullptr;
268 page->heap().stats_collector()->NotifyAllocatedMemory(allocation_size);
276 const size_t payload_size = page->PayloadSize();
284 DCHECK_EQ(space.end(), std::find(space.begin(), space.end(), page));
#define ASAN_POISON_MEMORY_REGION(start, size)
HeapHandle & heap_handle_
HeapObjectHeader * TryObjectHeaderFromInnerAddress(void *address) const
size_t AllocatedSize() const
size_t AllocatedBytesAtLastGC() const
static void Destroy(BasePage *)
void ChangeOwner(BaseSpace &)
BaseSpace & space() const
static BasePage * FromInnerAddress(const HeapBase *, void *)
BasePage(const BasePage &)=delete
static LargePage * TryCreate(PageBackend &, LargePageSpace &, size_t)
static LargePage * From(BasePage *page)
static constexpr size_t kGuaranteedObjectAlignment
HeapObjectHeader * ObjectHeader()
size_t AllocatedBytesAtLastGC() const
static size_t AllocationSize(size_t size)
size_t PayloadSize() const
LargePage(HeapBase &heap, BaseSpace &space, size_t)
static void Destroy(LargePage *)
static constexpr size_t PageHeaderSize()
static NormalPageSpace & From(BaseSpace &space)
LinearAllocationBuffer & linear_allocation_buffer()
static void Destroy(NormalPage *)
static NormalPage * From(BasePage *page)
static NormalPage * TryCreate(PageBackend &, NormalPageSpace &)
bool PayloadContains(ConstAddress address) const
static constexpr size_t PayloadSize()
size_t AllocatedBytesAtLastGC() const
NormalPage(HeapBase &heap, BaseSpace &space)
IteratorImpl< HeapObjectHeader > iterator
IteratorImpl< const HeapObjectHeader > const_iterator
Address TryAllocateNormalPageMemory()
void FreeNormalPageMemory(Address writeable_base)
void FreeLargePageMemory(Address writeable_base)
Address TryAllocateLargePageMemory(size_t size)
#define MSAN_ALLOCATED_UNINITIALIZED_MEMORY(start, size)
static constexpr size_t kMaxSupportedAlignment
constexpr size_t kPageSize
V8_INLINE void CheckMemoryIsInaccessible(const void *address, size_t size)
V8_INLINE void ZapMemory(void *address, size_t size)
constexpr size_t kPageOffsetMask
const HeapObjectHeader * ObjectHeaderFromInnerAddressImpl(const BasePage *page, const void *address)
const uint8_t * ConstAddress
constexpr size_t kAllocationGranularity
constexpr GCInfoIndex kFreeListGCInfoIndex
constexpr size_t kLargeObjectSizeThreshold
#define DCHECK_LE(v1, v2)
#define DCHECK_NOT_NULL(val)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
void operator()(SlotSet *) const