5#ifndef V8_HEAP_CPPGC_HEAP_PAGE_H_
6#define V8_HEAP_CPPGC_HEAP_PAGE_H_
32 static inline BasePage* FromPayload(
void*);
33 static inline const BasePage* FromPayload(
const void*);
55 size_t AllocatedSize()
const;
59 size_t AllocatedBytesAtLastGC()
const;
62 template <AccessMode = AccessMode::kNonAtomic>
64 template <AccessMode = AccessMode::kNonAtomic>
66 const void* address)
const;
74 const void* address)
const;
81#if defined(THREAD_SANITIZER)
86 std::atomic_thread_fence(std::memory_order_seq_cst);
87#if defined(THREAD_SANITIZER)
93 DCHECK_GE(discarded_memory_ + value, discarded_memory_);
94 discarded_memory_ +=
value;
100 const size_t old_marked_bytes =
101 marked_bytes_.fetch_add(value, std::memory_order_relaxed);
102 USE(old_marked_bytes);
103 DCHECK_GE(old_marked_bytes + value, old_marked_bytes);
106 const size_t old_marked_bytes =
107 marked_bytes_.fetch_sub(value, std::memory_order_relaxed);
108 USE(old_marked_bytes);
109 DCHECK_LE(old_marked_bytes - value, old_marked_bytes);
112 marked_bytes_.store(new_value, std::memory_order_relaxed);
115 return marked_bytes_.load(std::memory_order_relaxed);
120 contains_young_objects_ =
value;
123#if defined(CPPGC_YOUNG_GENERATION)
124 V8_INLINE SlotSet* slot_set()
const {
return slot_set_.get(); }
125 V8_INLINE SlotSet& GetOrAllocateSlotSet();
129 void ChangeOwner(BaseSpace&);
138 size_t page_size_ = 0;
144 bool contains_young_objects_ =
false;
145#if defined(CPPGC_YOUNG_GENERATION)
146 std::unique_ptr<SlotSet, SlotSetDeleter> slot_set_;
148 size_t discarded_memory_ = 0;
149 std::atomic<size_t> marked_bytes_{0};
153 template <
typename T>
158 : p_(p), lab_start_(lab_start), lab_size_(lab_size) {
160 DCHECK_EQ(0, (lab_size & (
sizeof(T) - 1)));
162 p_ += (lab_size_ /
sizeof(
T));
173 const size_t size = p_->AllocatedSize();
175 p_ += (size /
sizeof(
T));
177 p_ += (lab_size_ /
sizeof(
T));
206 DCHECK(!page->is_large());
210 return From(
const_cast<BasePage*
>(page));
214 const_iterator begin()
const;
229 static constexpr size_t PayloadSize();
232 return (PayloadStart() <= address) && (address < PayloadEnd());
238 allocated_bytes_at_last_gc_ = bytes;
242 return object_start_bitmap_;
245 return object_start_bitmap_;
252 size_t allocated_bytes_at_last_gc_ = 0;
268 static size_t AllocationSize(
size_t size);
280 return From(
const_cast<BasePage*
>(page));
300 return (PayloadStart() <= address) && (address < PayloadEnd());
304 static constexpr size_t kGuaranteedObjectAlignment =
323template <AccessMode mode = AccessMode::kNonAtomic>
325 const void* address) {
326 if (page->is_large()) {
338template <AccessMode mode>
344template <AccessMode mode>
346 const void* address)
const {
360#if defined(CPPGC_YOUNG_GENERATION)
361SlotSet& BasePage::GetOrAllocateSlotSet() {
381#ifdef CPPGC_POINTER_COMPRESSION
382 using AddressType = uint32_t;
384 using AddressType = uintptr_t;
386 return static_cast<AddressType
>(
reinterpret_cast<uintptr_t
>(base_page)) >>
static V8_INLINE BasePageHandle * FromPayload(void *payload)
size_t discarded_memory() const
void set_as_containing_young_objects(bool value)
void IncrementDiscardedMemory(size_t value)
HeapObjectHeader & ObjectHeaderFromInnerAddress(void *address) const
void IncrementMarkedBytes(size_t value)
size_t marked_bytes() const
bool contains_young_objects() const
BasePage & operator=(const BasePage &)=delete
void DecrementMarkedBytes(size_t value)
static BasePage * FromPayload(void *)
void ResetDiscardedMemory()
void SynchronizedLoad() const
BaseSpace & space() const
void ResetMarkedBytes(size_t new_value=0)
BasePage(const BasePage &)=delete
static LargePage * From(BasePage *page)
HeapObjectHeader * ObjectHeader()
size_t AllocatedBytesAtLastGC() const
static const LargePage * From(const BasePage *page)
size_t PayloadSize() const
size_t ObjectSize() const
static constexpr size_t PageHeaderSize()
bool PayloadContains(ConstAddress address) const
bool operator!=(IteratorImpl other) const
bool operator==(IteratorImpl other) const
IteratorImpl & operator++()
IteratorImpl operator++(int)
const T & operator*() const
IteratorImpl(T *p, ConstAddress lab_start=nullptr, size_t lab_size=0)
static const NormalPage * From(const BasePage *page)
PlatformAwareObjectStartBitmap object_start_bitmap_
static NormalPage * From(BasePage *page)
bool PayloadContains(ConstAddress address) const
static constexpr size_t PayloadSize()
void SetAllocatedBytesAtLastGC(size_t bytes)
size_t AllocatedBytesAtLastGC() const
PlatformAwareObjectStartBitmap & object_start_bitmap()
const_iterator end() const
const PlatformAwareObjectStartBitmap & object_start_bitmap() const
HeapObjectHeader * FindHeader(ConstAddress address_maybe_pointing_to_the_middle_of_object) const
static constexpr size_t kPageSizeBits
constexpr size_t kPageSize
const HeapObjectHeader * ObjectHeaderFromInnerAddressImpl(const BasePage *page, const void *address)
const uint8_t * ConstAddress
constexpr size_t kAllocationGranularity
constexpr GCInfoIndex kFreeListGCInfoIndex
V8_INLINE std::atomic< T > * AsAtomicPtr(T *t)
#define DCHECK_LE(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
#define V8_EXPORT_PRIVATE
void operator()(SlotSet *) const
V8_INLINE size_t operator()(const cppgc::internal::BasePage *base_page) const
std::unique_ptr< ValueMirror > value