12#if !defined(CPPGC_CAGED_HEAP)
13#error "Must be compiled with caged heap enabled"
33uintptr_t CagedHeapBase::g_heap_base_ = 0u;
34size_t CagedHeapBase::g_age_table_size_ = 0u;
40VirtualMemory ReserveCagedHeap(
PageAllocator& platform_allocator) {
41 DCHECK_EQ(0u, api_constants::kCagedHeapMaxReservationSize %
42 platform_allocator.AllocatePageSize());
44 static constexpr size_t kAllocationTries = 4;
45 for (
size_t i = 0;
i < kAllocationTries; ++
i) {
46#if defined(CPPGC_POINTER_COMPRESSION)
61 static constexpr size_t kTryReserveSize =
62 2 * api_constants::kCagedHeapMaxReservationSize;
63 static constexpr size_t kTryReserveAlignment =
64 2 * api_constants::kCagedHeapReservationAlignment;
66 static constexpr size_t kTryReserveSize =
67 api_constants::kCagedHeapMaxReservationSize;
68 static constexpr size_t kTryReserveAlignment =
69 api_constants::kCagedHeapReservationAlignment;
71 void* hint =
reinterpret_cast<void*
>(
RoundDown(
72 reinterpret_cast<uintptr_t
>(platform_allocator.GetRandomMmapAddr()),
73 kTryReserveAlignment));
75 VirtualMemory memory(&platform_allocator, kTryReserveSize,
76 kTryReserveAlignment, hint);
77 if (memory.IsReserved())
return memory;
87 size_t desired_heap_size) {
99 size_t desired_heap_size)
100 : reserved_area_(ReserveCagedHeap(platform_allocator)) {
103#if defined(CPPGC_POINTER_COMPRESSION)
106 static constexpr size_t kBaseOffset =
107 api_constants::kCagedHeapMaxReservationSize;
109 static constexpr size_t kBaseOffset = 0;
112 void*
const cage_start =
115 CagedHeapBase::g_heap_base_ =
reinterpret_cast<uintptr_t
>(cage_start);
117#if defined(CPPGC_POINTER_COMPRESSION)
119 CHECK(!CageBaseGlobal::IsSet());
120 CageBaseGlobalUpdater::UpdateCageBase(CagedHeapBase::g_heap_base_);
123 const size_t total_heap_size = std::clamp<size_t>(
125 api_constants::kCagedHeapDefaultReservationSize,
126 api_constants::kCagedHeapMaxReservationSize);
128 const size_t local_data_size =
129 CagedHeapLocalData::CalculateLocalDataSizeForHeapSize(total_heap_size);
130 const CagedAddress caged_heap_start =
RoundUp(
131 reinterpret_cast<CagedAddress
>(cage_start) + local_data_size,
kPageSize);
132 const size_t local_data_size_with_padding =
133 caged_heap_start -
reinterpret_cast<CagedAddress
>(cage_start);
136 &platform_allocator, caged_heap_start,
137 total_heap_size - local_data_size_with_padding,
kPageSize,
142 CagedHeapBase::g_age_table_size_ = AgeTable::CalculateAgeTableSizeForHeapSize(
143 api_constants::kCagedHeapDefaultReservationSize);
148 reinterpret_cast<void*
>(CagedHeapBase::g_heap_base_),
149 RoundUp(CagedHeapBase::g_age_table_size_,
std::unique_ptr< AllocatorType > page_bounded_allocator_
CagedHeap(const CagedHeap &)=delete
static void InitializeIfNeeded(PageAllocator &platform_allocator, size_t desired_heap_size)
static void CommitAgeTable(PageAllocator &platform_allocator)
static CagedHeap * instance_
const VirtualMemory reserved_area_
static CagedHeap & Instance()
virtual bool SetPermissions(void *address, size_t length, Permission permissions)=0
virtual size_t CommitPageSize()=0
constexpr size_t kPageSize
FatalOutOfMemoryHandler & GetGlobalOOMHandler()
v8::PageAllocator PageAllocator
V8_BASE_EXPORT constexpr uint64_t RoundUpToPowerOfTwo64(uint64_t value)
@ kAllocatedPagesMustBeZeroInitialized
#define DCHECK_NOT_NULL(val)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
constexpr T RoundDown(T x, intptr_t m)