21bool InGC(HeapHandle& heap_handle) {
25 return heap.in_atomic_pause() ||
heap.marker() ||
26 heap.sweeper().IsSweepingInProgress();
33 if (InGC(heap_handle)) {
44#if defined(CPPGC_YOUNG_GENERATION)
49 heap_base.remembered_set().InvalidateRememberedSlotsInRange(
50 object,
reinterpret_cast<uint8_t*
>(
object) + object_size);
53 heap_base.remembered_set().InvalidateRememberedSourceObject(header);
54 if (header.IsMarked()) {
71 const size_t header_size = header.AllocatedSize();
77 if (payload_end == lab.start()) {
78 lab.Set(
reinterpret_cast<Address>(&header), lab.size() + header_size);
79 normal_page->object_start_bitmap().ClearBit(lab.start());
82 normal_space.free_list().Add({&header, header_size});
99 if (lab.start() == header.
ObjectEnd() && lab.size() >= size_delta) {
102 Address delta_start = lab.Allocate(size_delta);
105#if defined(CPPGC_YOUNG_GENERATION)
106 if (
auto& heap_base = *normal_space.raw_heap()->heap();
107 heap_base.generational_gc_supported()) {
119bool Shrink(HeapObjectHeader& header, BasePage& base_page,
size_t new_size,
123 DCHECK(!base_page.is_large());
125 auto& normal_space = *
static_cast<NormalPageSpace*
>(&base_page.space());
126 auto& lab = normal_space.linear_allocation_buffer();
127 Address free_start = header.ObjectEnd() - size_delta;
128 if (lab.start() == header.ObjectEnd()) {
129 DCHECK_EQ(free_start, lab.start() - size_delta);
132 lab.Set(free_start, lab.size() + size_delta);
134 header.SetAllocatedSize(new_size);
139 base_page.heap().stats_collector()->NotifyExplicitFree(size_delta);
140 normal_space.free_list().Add({free_start, size_delta});
142 header.SetAllocatedSize(new_size);
144#if defined(CPPGC_YOUNG_GENERATION)
145 auto&
heap = base_page.heap();
146 if (
heap.generational_gc_supported()) {
147 heap.remembered_set().InvalidateRememberedSlotsInRange(
148 free_start, free_start + size_delta);
149 if (header.IsMarked()) {
150 base_page.DecrementMarkedBytes(
167 if (InGC(base_page->
heap())) {
180 const size_t old_size = header.AllocatedSize();
182 if (new_size > old_size) {
183 return Grow(header, *base_page, new_size, new_size - old_size);
184 }
else if (old_size > new_size) {
185 return Shrink(header, *base_page, new_size, old_size - new_size);
void IncrementMarkedBytes(size_t value)
void DecrementMarkedBytes(size_t value)
static BasePage * FromPayload(void *)
BaseSpace & space() const
void RemovePage(BasePage *)
friend void subtle::FreeUnreferencedObject(HeapHandle &, T &)
friend bool subtle::Resize(T &, AdditionalBytes)
bool generational_gc_supported() const
StatsCollector * stats_collector()
static HeapBase & From(cppgc::HeapHandle &heap_handle)
static LargePage * From(BasePage *page)
size_t PayloadSize() const
static void Destroy(LargePage *)
LinearAllocationBuffer & linear_allocation_buffer()
static NormalPage * From(BasePage *page)
PlatformAwareObjectStartBitmap & object_start_bitmap()
static constexpr size_t kSmallestSpaceSize
V8_INLINE size_t Size() const
void NotifyExplicitFree(size_t)
V8_INLINE void SetMemoryInaccessible(void *address, size_t size)
V8_INLINE void SetMemoryAccessible(void *address, size_t size)
const uint8_t * ConstAddress
constexpr size_t kAllocationGranularity
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)