v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
heap-page.cc
Go to the documentation of this file.
1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <algorithm>
8#include <cstddef>
9
11#include "src/base/logging.h"
16#include "src/heap/cppgc/heap.h"
23
24namespace cppgc::internal {
25
26namespace {
27
28Address AlignAddress(Address address, size_t alignment) {
29 return reinterpret_cast<Address>(
30 RoundUp(reinterpret_cast<uintptr_t>(address), alignment));
31}
32
33} // namespace
34
36 return static_cast<HeapBase&>(heap_handle_);
37}
38
39// static
41 return const_cast<BasePage*>(
42 FromInnerAddress(heap, const_cast<const void*>(address)));
43}
44
45// static
47 const void* address) {
48 return reinterpret_cast<const BasePage*>(
49 heap->page_backend()->Lookup(static_cast<ConstAddress>(address)));
50}
51
52// static
54 if (page->discarded_memory()) {
55 page->space()
56 .raw_heap()
57 ->heap()
58 ->stats_collector()
59 ->DecrementDiscardedMemory(page->discarded_memory());
60 }
61 if (page->is_large()) {
63 } else {
65 }
66}
67
72
74 return const_cast<BasePage*>(this)->PayloadStart();
75}
76
81
83 return const_cast<BasePage*>(this)->PayloadEnd();
84}
85
92
97
99 void* address) const {
100 return const_cast<HeapObjectHeader*>(
101 TryObjectHeaderFromInnerAddress(const_cast<const void*>(address)));
102}
103
105 const void* address) const {
106 if (is_large()) {
107 if (!LargePage::From(this)->PayloadContains(
108 static_cast<ConstAddress>(address)))
109 return nullptr;
110 } else {
111 const NormalPage* normal_page = NormalPage::From(this);
112 if (!normal_page->PayloadContains(static_cast<ConstAddress>(address)))
113 return nullptr;
114 // Check that the space has no linear allocation buffer.
115 DCHECK(!NormalPageSpace::From(normal_page->space())
117 .size());
118 }
119
120 // |address| is on the heap, so we FromInnerAddress can get the header.
121 const HeapObjectHeader* header =
123 if (header->IsFree()) return nullptr;
125 return header;
126}
127
128#if defined(CPPGC_YOUNG_GENERATION)
130 DCHECK_NULL(slot_set_);
131 slot_set_ = decltype(slot_set_)(
132 static_cast<SlotSet*>(
133 SlotSet::Allocate(SlotSet::BucketsForSize(AllocatedSize()))),
134 SlotSetDeleter{AllocatedSize()});
135}
136
137void BasePage::SlotSetDeleter::operator()(SlotSet* slot_set) const {
138 DCHECK_NOT_NULL(slot_set);
139 SlotSet::Delete(slot_set);
140}
141
142void BasePage::ResetSlotSet() { slot_set_.reset(); }
143#endif // defined(CPPGC_YOUNG_GENERATION)
144
147 space_(&space),
148 type_(type)
149#if defined(CPPGC_YOUNG_GENERATION)
150 ,
151 slot_set_(nullptr, SlotSetDeleter{})
152#endif // defined(CPPGC_YOUNG_GENERATION)
153{
154 DCHECK_EQ(0u, reinterpret_cast<uintptr_t>(this) & kPageOffsetMask);
155 DCHECK_EQ(&heap.raw_heap(), space_->raw_heap());
156}
157
159 DCHECK_EQ(space_->raw_heap(), space.raw_heap());
160 space_ = &space;
161}
162
163// static
165 NormalPageSpace& space) {
166 void* memory = page_backend.TryAllocateNormalPageMemory();
167 if (!memory) return nullptr;
168
169 auto* normal_page = new (memory) NormalPage(*space.raw_heap()->heap(), space);
170 normal_page->SynchronizedStore();
171 normal_page->heap().stats_collector()->NotifyAllocatedMemory(kPageSize);
172 // Memory is zero initialized as
173 // a) memory retrieved from the OS is zeroed;
174 // b) memory retrieved from the page pool was swept and thus is zeroed except
175 // for the first header which will anyways serve as header again.
176 //
177 // The following is a subset of SetMemoryInaccessible() to establish the
178 // invariant that memory is in the same state as it would be after sweeping.
179 // This allows to return newly allocated pages to go into that LAB and back
180 // into the free list.
181 Address begin = normal_page->PayloadStart() + sizeof(HeapObjectHeader);
182 const size_t size = normal_page->PayloadSize() - sizeof(HeapObjectHeader);
183#if defined(V8_USE_MEMORY_SANITIZER)
185#elif defined(V8_USE_ADDRESS_SANITIZER)
187#elif DEBUG
189#endif // Release builds.
191 return normal_page;
192}
193
194// static
196 DCHECK(page);
197 HeapBase& heap = page->heap();
198 const BaseSpace& space = page->space();
199 DCHECK_EQ(space.end(), std::find(space.begin(), space.end(), page));
200 USE(space);
201 page->~NormalPage();
202 PageBackend* backend = heap.page_backend();
203 heap.stats_collector()->NotifyFreedMemory(kPageSize);
204 backend->FreeNormalPageMemory(reinterpret_cast<Address>(page));
205}
206
208 : BasePage(heap, space, PageType::kNormal), object_start_bitmap_() {
210 static_cast<size_t>(PayloadEnd() - PayloadStart()));
211}
212
215 return iterator(reinterpret_cast<HeapObjectHeader*>(PayloadStart()),
216 lab.start(), lab.size());
217}
218
221 return const_iterator(
222 reinterpret_cast<const HeapObjectHeader*>(PayloadStart()), lab.start(),
223 lab.size());
224}
225
227 return AlignAddress((reinterpret_cast<Address>(this + 1)),
229}
230
232 return const_cast<NormalPage*>(this)->PayloadStart();
233}
234
236
238 return const_cast<NormalPage*>(this)->PayloadEnd();
239}
240
242 : BasePage(heap, space, PageType::kLarge), payload_size_(size) {}
243
244// static
245size_t LargePage::AllocationSize(size_t payload_size) {
246 return PageHeaderSize() + payload_size;
247}
248
249// static
251 LargePageSpace& space, size_t size) {
252 // Ensure that the API-provided alignment guarantees does not violate the
253 // internally guaranteed alignment of large page allocations.
254 static_assert(kGuaranteedObjectAlignment <=
256 static_assert(
258
260 const size_t allocation_size = AllocationSize(size);
261
262 auto* heap = space.raw_heap()->heap();
263 void* memory = page_backend.TryAllocateLargePageMemory(allocation_size);
264 if (!memory) return nullptr;
265
266 LargePage* page = new (memory) LargePage(*heap, space, size);
267 page->SynchronizedStore();
268 page->heap().stats_collector()->NotifyAllocatedMemory(allocation_size);
269 return page;
270}
271
272// static
274 DCHECK(page);
275 HeapBase& heap = page->heap();
276 const size_t payload_size = page->PayloadSize();
277#if DEBUG
278 const BaseSpace& space = page->space();
279 {
280 // Destroy() happens on the mutator but another concurrent sweeper task may
281 // add add a live object using `BaseSpace::AddPage()` while iterating the
282 // pages.
283 v8::base::MutexGuard guard(&space.pages_mutex());
284 DCHECK_EQ(space.end(), std::find(space.begin(), space.end(), page));
285 }
286#endif // DEBUG
287 page->~LargePage();
288 PageBackend* backend = heap.page_backend();
289 heap.stats_collector()->NotifyFreedMemory(AllocationSize(payload_size));
290 backend->FreeLargePageMemory(reinterpret_cast<Address>(page));
291}
292
294 return reinterpret_cast<HeapObjectHeader*>(PayloadStart());
295}
296
298 return reinterpret_cast<const HeapObjectHeader*>(PayloadStart());
299}
300
302 return reinterpret_cast<Address>(this) + PageHeaderSize();
303}
304
306 return const_cast<LargePage*>(this)->PayloadStart();
307}
308
310
312 return const_cast<LargePage*>(this)->PayloadEnd();
313}
314
315} // namespace cppgc::internal
#define ASAN_POISON_MEMORY_REGION(start, size)
Definition asan.h:64
HeapObjectHeader * TryObjectHeaderFromInnerAddress(void *address) const
Definition heap-page.cc:98
HeapBase & heap() const
Definition heap-page.cc:35
size_t AllocatedSize() const
Definition heap-page.cc:86
size_t AllocatedBytesAtLastGC() const
Definition heap-page.cc:93
static void Destroy(BasePage *)
Definition heap-page.cc:53
void ChangeOwner(BaseSpace &)
Definition heap-page.cc:158
BaseSpace & space() const
Definition heap-page.h:45
static BasePage * FromInnerAddress(const HeapBase *, void *)
Definition heap-page.cc:40
BasePage(const BasePage &)=delete
static LargePage * TryCreate(PageBackend &, LargePageSpace &, size_t)
Definition heap-page.cc:250
static LargePage * From(BasePage *page)
Definition heap-page.h:275
static constexpr size_t kGuaranteedObjectAlignment
Definition heap-page.h:304
HeapObjectHeader * ObjectHeader()
Definition heap-page.cc:293
size_t AllocatedBytesAtLastGC() const
Definition heap-page.h:297
static size_t AllocationSize(size_t size)
Definition heap-page.cc:245
size_t PayloadSize() const
Definition heap-page.h:291
LargePage(HeapBase &heap, BaseSpace &space, size_t)
Definition heap-page.cc:241
static void Destroy(LargePage *)
Definition heap-page.cc:273
static constexpr size_t PageHeaderSize()
Definition heap-page.h:258
static NormalPageSpace & From(BaseSpace &space)
Definition heap-space.h:93
LinearAllocationBuffer & linear_allocation_buffer()
Definition heap-space.h:103
static void Destroy(NormalPage *)
Definition heap-page.cc:195
static NormalPage * From(BasePage *page)
Definition heap-page.h:205
static NormalPage * TryCreate(PageBackend &, NormalPageSpace &)
Definition heap-page.cc:164
bool PayloadContains(ConstAddress address) const
Definition heap-page.h:231
static constexpr size_t PayloadSize()
Definition heap-page.h:368
size_t AllocatedBytesAtLastGC() const
Definition heap-page.h:235
NormalPage(HeapBase &heap, BaseSpace &space)
Definition heap-page.cc:207
IteratorImpl< HeapObjectHeader > iterator
Definition heap-page.h:196
IteratorImpl< const HeapObjectHeader > const_iterator
Definition heap-page.h:197
void FreeNormalPageMemory(Address writeable_base)
void FreeLargePageMemory(Address writeable_base)
Address TryAllocateLargePageMemory(size_t size)
BasePage * page
Definition sweeper.cc:218
bool defined
#define MSAN_ALLOCATED_UNINITIALIZED_MEMORY(start, size)
Definition msan.h:29
static constexpr size_t kMaxSupportedAlignment
constexpr size_t kPageSize
Definition globals.h:42
uint8_t * Address
Definition globals.h:17
V8_INLINE void CheckMemoryIsInaccessible(const void *address, size_t size)
Definition memory.h:73
V8_INLINE void ZapMemory(void *address, size_t size)
Definition memory.h:25
constexpr size_t kPageOffsetMask
Definition globals.h:43
const HeapObjectHeader * ObjectHeaderFromInnerAddressImpl(const BasePage *page, const void *address)
Definition heap-page.h:324
const uint8_t * ConstAddress
Definition globals.h:18
constexpr size_t kAllocationGranularity
Definition globals.h:37
constexpr GCInfoIndex kFreeListGCInfoIndex
Definition globals.h:48
constexpr size_t kLargeObjectSizeThreshold
Definition globals.h:46
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK_NULL(val)
Definition logging.h:491
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define USE(...)
Definition macros.h:293
constexpr T RoundUp(T x, intptr_t m)
Definition macros.h:387