v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
heap-page.h
Go to the documentation of this file.
1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_HEAP_CPPGC_HEAP_PAGE_H_
6#define V8_HEAP_CPPGC_HEAP_PAGE_H_
7
8#include <atomic>
9
11#include "src/base/hashing.h"
12#include "src/base/iterator.h"
13#include "src/base/macros.h"
19
20namespace cppgc {
21namespace internal {
22
23class BaseSpace;
24class NormalPageSpace;
25class LargePageSpace;
26class HeapBase;
27class PageBackend;
28class SlotSet;
29
31 public:
32 static inline BasePage* FromPayload(void*);
33 static inline const BasePage* FromPayload(const void*);
34
35 static BasePage* FromInnerAddress(const HeapBase*, void*);
36 static const BasePage* FromInnerAddress(const HeapBase*, const void*);
37
38 static void Destroy(BasePage*);
39
40 BasePage(const BasePage&) = delete;
41 BasePage& operator=(const BasePage&) = delete;
42
43 HeapBase& heap() const;
44
45 BaseSpace& space() const { return *space_; }
46
47 bool is_large() const { return type_ == PageType::kLarge; }
48
49 Address PayloadStart();
50 ConstAddress PayloadStart() const;
51 Address PayloadEnd();
52 ConstAddress PayloadEnd() const;
53
54 // Size of the payload with the page header.
55 size_t AllocatedSize() const;
56
57 // Returns the size of live objects on the page at the last GC.
58 // The counter is update after sweeping.
59 size_t AllocatedBytesAtLastGC() const;
60
61 // |address| must refer to real object.
62 template <AccessMode = AccessMode::kNonAtomic>
63 HeapObjectHeader& ObjectHeaderFromInnerAddress(void* address) const;
64 template <AccessMode = AccessMode::kNonAtomic>
65 const HeapObjectHeader& ObjectHeaderFromInnerAddress(
66 const void* address) const;
67
68 // |address| is guaranteed to point into the page but not payload. Returns
69 // nullptr when pointing into free list entries and the valid header
70 // otherwise. The function is not thread-safe and cannot be called when
71 // e.g. sweeping is in progress.
72 HeapObjectHeader* TryObjectHeaderFromInnerAddress(void* address) const;
73 const HeapObjectHeader* TryObjectHeaderFromInnerAddress(
74 const void* address) const;
75
76 // SynchronizedLoad and SynchronizedStore are used to sync pages after they
77 // are allocated. std::atomic_thread_fence is sufficient in practice but is
78 // not recognized by tsan. Atomic load and store of the |type_| field are
79 // added for tsan builds.
80 void SynchronizedLoad() const {
81#if defined(THREAD_SANITIZER)
82 v8::base::AsAtomicPtr(&type_)->load(std::memory_order_acquire);
83#endif
84 }
86 std::atomic_thread_fence(std::memory_order_seq_cst);
87#if defined(THREAD_SANITIZER)
88 v8::base::AsAtomicPtr(&type_)->store(type_, std::memory_order_release);
89#endif
90 }
91
92 void IncrementDiscardedMemory(size_t value) {
93 DCHECK_GE(discarded_memory_ + value, discarded_memory_);
94 discarded_memory_ += value;
95 }
96 void ResetDiscardedMemory() { discarded_memory_ = 0; }
97 size_t discarded_memory() const { return discarded_memory_; }
98
99 void IncrementMarkedBytes(size_t value) {
100 const size_t old_marked_bytes =
101 marked_bytes_.fetch_add(value, std::memory_order_relaxed);
102 USE(old_marked_bytes);
103 DCHECK_GE(old_marked_bytes + value, old_marked_bytes);
104 }
105 void DecrementMarkedBytes(size_t value) {
106 const size_t old_marked_bytes =
107 marked_bytes_.fetch_sub(value, std::memory_order_relaxed);
108 USE(old_marked_bytes);
109 DCHECK_LE(old_marked_bytes - value, old_marked_bytes);
110 }
111 void ResetMarkedBytes(size_t new_value = 0) {
112 marked_bytes_.store(new_value, std::memory_order_relaxed);
113 }
114 size_t marked_bytes() const {
115 return marked_bytes_.load(std::memory_order_relaxed);
116 }
117
118 bool contains_young_objects() const { return contains_young_objects_; }
120 contains_young_objects_ = value;
121 }
122
123#if defined(CPPGC_YOUNG_GENERATION)
124 V8_INLINE SlotSet* slot_set() const { return slot_set_.get(); }
125 V8_INLINE SlotSet& GetOrAllocateSlotSet();
126 void ResetSlotSet();
127#endif // defined(CPPGC_YOUNG_GENERATION)
128
129 void ChangeOwner(BaseSpace&);
130
131 protected:
132 enum class PageType : uint8_t { kNormal, kLarge };
133 BasePage(HeapBase&, BaseSpace&, PageType);
134
135 private:
137 void operator()(SlotSet*) const;
138 size_t page_size_ = 0;
139 };
141
144 bool contains_young_objects_ = false;
145#if defined(CPPGC_YOUNG_GENERATION)
146 std::unique_ptr<SlotSet, SlotSetDeleter> slot_set_;
147#endif // defined(CPPGC_YOUNG_GENERATION)
148 size_t discarded_memory_ = 0;
149 std::atomic<size_t> marked_bytes_{0};
150};
151
153 template <typename T>
154 class IteratorImpl : v8::base::iterator<std::forward_iterator_tag, T> {
155 public:
156 explicit IteratorImpl(T* p, ConstAddress lab_start = nullptr,
157 size_t lab_size = 0)
158 : p_(p), lab_start_(lab_start), lab_size_(lab_size) {
159 DCHECK(p);
160 DCHECK_EQ(0, (lab_size & (sizeof(T) - 1)));
161 if (reinterpret_cast<ConstAddress>(p_) == lab_start_) {
162 p_ += (lab_size_ / sizeof(T));
163 }
164 }
165
166 T& operator*() { return *p_; }
167 const T& operator*() const { return *p_; }
168
169 bool operator==(IteratorImpl other) const { return p_ == other.p_; }
170 bool operator!=(IteratorImpl other) const { return !(*this == other); }
171
173 const size_t size = p_->AllocatedSize();
174 DCHECK_EQ(0, (size & (sizeof(T) - 1)));
175 p_ += (size / sizeof(T));
176 if (reinterpret_cast<ConstAddress>(p_) == lab_start_) {
177 p_ += (lab_size_ / sizeof(T));
178 }
179 return *this;
180 }
182 IteratorImpl temp(*this);
183 ++(*this);
184 return temp;
185 }
186
187 T* base() const { return p_; }
188
189 private:
190 T* p_;
192 size_t lab_size_;
193 };
194
195 public:
198
199 // Allocates a new page in the detached state.
200 static NormalPage* TryCreate(PageBackend&, NormalPageSpace&);
201 // Destroys and frees the page. The page must be detached from the
202 // corresponding space (i.e. be swept when called).
203 static void Destroy(NormalPage*);
204
205 static NormalPage* From(BasePage* page) {
206 DCHECK(!page->is_large());
207 return static_cast<NormalPage*>(page);
208 }
209 static const NormalPage* From(const BasePage* page) {
210 return From(const_cast<BasePage*>(page));
211 }
212
213 iterator begin();
214 const_iterator begin() const;
215
217 return iterator(reinterpret_cast<HeapObjectHeader*>(PayloadEnd()));
218 }
220 return const_iterator(
221 reinterpret_cast<const HeapObjectHeader*>(PayloadEnd()));
222 }
223
224 Address PayloadStart();
225 ConstAddress PayloadStart() const;
226 Address PayloadEnd();
227 ConstAddress PayloadEnd() const;
228
229 static constexpr size_t PayloadSize();
230
231 bool PayloadContains(ConstAddress address) const {
232 return (PayloadStart() <= address) && (address < PayloadEnd());
233 }
234
235 size_t AllocatedBytesAtLastGC() const { return allocated_bytes_at_last_gc_; }
236
237 void SetAllocatedBytesAtLastGC(size_t bytes) {
238 allocated_bytes_at_last_gc_ = bytes;
239 }
240
242 return object_start_bitmap_;
243 }
245 return object_start_bitmap_;
246 }
247
248 private:
250 ~NormalPage() = default;
251
252 size_t allocated_bytes_at_last_gc_ = 0;
254};
255
257 public:
258 static constexpr size_t PageHeaderSize() {
259 // Header should be un-aligned to `kAllocationGranularity` so that adding a
260 // `HeapObjectHeader` gets the user object aligned to
261 // `kGuaranteedObjectAlignment`.
263 sizeof(HeapObjectHeader)) -
264 sizeof(HeapObjectHeader);
265 }
266
267 // Returns the allocation size required for a payload of size |size|.
268 static size_t AllocationSize(size_t size);
269 // Allocates a new page in the detached state.
270 static LargePage* TryCreate(PageBackend&, LargePageSpace&, size_t);
271 // Destroys and frees the page. The page must be detached from the
272 // corresponding space (i.e. be swept when called).
273 static void Destroy(LargePage*);
274
275 static LargePage* From(BasePage* page) {
276 DCHECK(page->is_large());
277 return static_cast<LargePage*>(page);
278 }
279 static const LargePage* From(const BasePage* page) {
280 return From(const_cast<BasePage*>(page));
281 }
282
283 HeapObjectHeader* ObjectHeader();
284 const HeapObjectHeader* ObjectHeader() const;
285
286 Address PayloadStart();
287 ConstAddress PayloadStart() const;
288 Address PayloadEnd();
289 ConstAddress PayloadEnd() const;
290
291 size_t PayloadSize() const { return payload_size_; }
292 size_t ObjectSize() const {
293 DCHECK_GT(payload_size_, sizeof(HeapObjectHeader));
294 return payload_size_ - sizeof(HeapObjectHeader);
295 }
296
297 size_t AllocatedBytesAtLastGC() const { return ObjectSize(); }
298
299 bool PayloadContains(ConstAddress address) const {
300 return (PayloadStart() <= address) && (address < PayloadEnd());
301 }
302
303 private:
304 static constexpr size_t kGuaranteedObjectAlignment =
306
307 LargePage(HeapBase& heap, BaseSpace& space, size_t);
308 ~LargePage() = default;
309
311};
312
313// static
315 return static_cast<BasePage*>(BasePageHandle::FromPayload(payload));
316}
317
318// static
319const BasePage* BasePage::FromPayload(const void* payload) {
320 return static_cast<const BasePage*>(BasePageHandle::FromPayload(payload));
321}
322
323template <AccessMode mode = AccessMode::kNonAtomic>
325 const void* address) {
326 if (page->is_large()) {
327 return LargePage::From(page)->ObjectHeader();
328 }
329 const PlatformAwareObjectStartBitmap& bitmap =
331 const HeapObjectHeader* header =
332 bitmap.FindHeader<mode>(static_cast<ConstAddress>(address));
333 DCHECK_LT(address, reinterpret_cast<ConstAddress>(header) +
335 return header;
336}
337
338template <AccessMode mode>
340 return const_cast<HeapObjectHeader&>(
341 ObjectHeaderFromInnerAddress<mode>(const_cast<const void*>(address)));
342}
343
344template <AccessMode mode>
346 const void* address) const {
347 // This method might be called for |address| found via a Trace method of
348 // another object. If |address| is on a newly allocated page , there will
349 // be no sync between the page allocation and a concurrent marking thread,
350 // resulting in a race with page initialization (specifically with writing
351 // the page |type_| field). This can occur when tracing a Member holding a
352 // reference to a mixin type
354 const HeapObjectHeader* header =
357 return *header;
358}
359
360#if defined(CPPGC_YOUNG_GENERATION)
361SlotSet& BasePage::GetOrAllocateSlotSet() {
362 if (!slot_set_) AllocateSlotSet();
363 return *slot_set_;
364}
365#endif // defined(CPPGC_YOUNG_GENERATION)
366
367// static
368constexpr inline size_t NormalPage::PayloadSize() {
370}
371
372} // namespace internal
373} // namespace cppgc
374
375namespace v8::base {
376
377template <>
378struct hash<const cppgc::internal::BasePage*> {
379 V8_INLINE size_t
380 operator()(const cppgc::internal::BasePage* base_page) const {
381#ifdef CPPGC_POINTER_COMPRESSION
382 using AddressType = uint32_t;
383#else
384 using AddressType = uintptr_t;
385#endif
386 return static_cast<AddressType>(reinterpret_cast<uintptr_t>(base_page)) >>
388 }
389};
390
391} // namespace v8::base
392
393#endif // V8_HEAP_CPPGC_HEAP_PAGE_H_
#define T
static V8_INLINE BasePageHandle * FromPayload(void *payload)
size_t discarded_memory() const
Definition heap-page.h:97
void set_as_containing_young_objects(bool value)
Definition heap-page.h:119
void IncrementDiscardedMemory(size_t value)
Definition heap-page.h:92
HeapObjectHeader & ObjectHeaderFromInnerAddress(void *address) const
Definition heap-page.h:339
void IncrementMarkedBytes(size_t value)
Definition heap-page.h:99
size_t marked_bytes() const
Definition heap-page.h:114
bool contains_young_objects() const
Definition heap-page.h:118
BasePage & operator=(const BasePage &)=delete
void DecrementMarkedBytes(size_t value)
Definition heap-page.h:105
static BasePage * FromPayload(void *)
Definition heap-page.h:314
void SynchronizedLoad() const
Definition heap-page.h:80
BaseSpace & space() const
Definition heap-page.h:45
void ResetMarkedBytes(size_t new_value=0)
Definition heap-page.h:111
BasePage(const BasePage &)=delete
static LargePage * From(BasePage *page)
Definition heap-page.h:275
HeapObjectHeader * ObjectHeader()
Definition heap-page.cc:293
size_t AllocatedBytesAtLastGC() const
Definition heap-page.h:297
static const LargePage * From(const BasePage *page)
Definition heap-page.h:279
size_t PayloadSize() const
Definition heap-page.h:291
size_t ObjectSize() const
Definition heap-page.h:292
static constexpr size_t PageHeaderSize()
Definition heap-page.h:258
bool PayloadContains(ConstAddress address) const
Definition heap-page.h:299
bool operator!=(IteratorImpl other) const
Definition heap-page.h:170
bool operator==(IteratorImpl other) const
Definition heap-page.h:169
IteratorImpl(T *p, ConstAddress lab_start=nullptr, size_t lab_size=0)
Definition heap-page.h:156
static const NormalPage * From(const BasePage *page)
Definition heap-page.h:209
PlatformAwareObjectStartBitmap object_start_bitmap_
Definition heap-page.h:253
static NormalPage * From(BasePage *page)
Definition heap-page.h:205
bool PayloadContains(ConstAddress address) const
Definition heap-page.h:231
static constexpr size_t PayloadSize()
Definition heap-page.h:368
void SetAllocatedBytesAtLastGC(size_t bytes)
Definition heap-page.h:237
size_t AllocatedBytesAtLastGC() const
Definition heap-page.h:235
PlatformAwareObjectStartBitmap & object_start_bitmap()
Definition heap-page.h:241
const_iterator end() const
Definition heap-page.h:219
const PlatformAwareObjectStartBitmap & object_start_bitmap() const
Definition heap-page.h:244
HeapObjectHeader * FindHeader(ConstAddress address_maybe_pointing_to_the_middle_of_object) const
NormalPageSpace * space_
Definition compactor.cc:324
const ObjectRef type_
BasePage * page
Definition sweeper.cc:218
static constexpr size_t kPageSizeBits
constexpr size_t kPageSize
Definition globals.h:42
uint8_t * Address
Definition globals.h:17
const HeapObjectHeader * ObjectHeaderFromInnerAddressImpl(const BasePage *page, const void *address)
Definition heap-page.h:324
const uint8_t * ConstAddress
Definition globals.h:18
constexpr size_t kAllocationGranularity
Definition globals.h:37
constexpr GCInfoIndex kFreeListGCInfoIndex
Definition globals.h:48
V8_INLINE std::atomic< T > * AsAtomicPtr(T *t)
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
#define USE(...)
Definition macros.h:293
constexpr T RoundUp(T x, intptr_t m)
Definition macros.h:387
#define V8_EXPORT_PRIVATE
Definition macros.h:460
V8_INLINE size_t operator()(const cppgc::internal::BasePage *base_page) const
Definition heap-page.h:380
#define V8_INLINE
Definition v8config.h:500
std::unique_ptr< ValueMirror > value