32class PageAllocatorInitializer {
34 PageAllocatorInitializer() {
36 if (page_allocator_ ==
nullptr) {
37 static base::LeakyObject<base::PageAllocator> default_page_allocator;
40#if defined(LEAK_SANITIZER)
41 static base::LeakyObject<base::LsanPageAllocator> lsan_allocator(
49 void SetPageAllocatorForTesting(PageAllocator* allocator) {
58 GetPageAllocatorInitializer)
62const int kAllocationTries = 2;
68 return GetPageAllocatorInitializer()->page_allocator();
72#if defined(LEAK_SANITIZER)
74 std::make_unique<base::VirtualAddressSpace>());
81#ifdef V8_ENABLE_SANDBOX
83 CHECK(Sandbox::current()->is_initialized());
84 return Sandbox::current()->page_allocator();
91 GetPageAllocatorInitializer()->SetPageAllocatorForTesting(new_page_allocator);
92 return old_page_allocator;
95void* Malloced::operator
new(
size_t size) {
106 size_t length = strlen(str);
114 size_t length = strlen(str);
115 if (n < length) length =
n;
124 for (
int i = 0;
i < kAllocationTries; ++
i) {
134 for (
int i = 0;
i < kAllocationTries; ++
i) {
144 for (
int i = 0;
i < kAllocationTries; ++
i) {
169 if (!hint &&
v8_flags.randomize_all_allocations) {
173 for (
int i = 0;
i < kAllocationTries; ++
i) {
185 if (!page_allocator->
FreePages(address, size)) {
211 void* hint,
size_t alignment,
217 alignment =
RoundUp(alignment, page_size);
241 reinterpret_cast<void*
>(
address), size, access);
256 reinterpret_cast<void*
>(
address), size);
268 const size_t free_size = old_size - (free_start -
region_.
begin());
294 *
this = std::move(other);
302 reservation_ = std::move(other.reservation_);
312 const size_t allocate_page_size = params.page_allocator->AllocatePageSize();
315 IsAligned(params.base_alignment, allocate_page_size));
317 if (!existing_reservation.
is_empty()) {
318 CHECK_EQ(existing_reservation.
size(), params.reservation_size);
323 existing_reservation.
size());
326 Address hint = params.requested_start_hint;
331 reinterpret_cast<void*
>(hint),
332 params.base_alignment, params.permissions);
344 const size_t allocatable_size =
RoundDown(
345 params.reservation_size - (allocatable_base -
base_), params.page_size);
346 size_ = allocatable_base + allocatable_size -
base_;
349 params.page_allocator, allocatable_base, allocatable_size,
350 params.page_size, params.page_initialization_mode,
351 params.page_freeing_mode);
virtual void * GetRandomMmapAddr()=0
virtual bool ReleasePages(void *address, size_t length, size_t new_length)=0
virtual size_t AllocatePageSize()=0
virtual void * AllocatePages(void *address, size_t length, size_t alignment, Permission permissions)=0
virtual bool RecommitPages(void *address, size_t length, Permission permissions)
virtual bool SetPermissions(void *address, size_t length, Permission permissions)=0
virtual bool FreePages(void *address, size_t length)=0
virtual bool DiscardSystemPages(void *address, size_t size)
virtual size_t CommitPageSize()=0
void set_size(size_t size)
static V8_EXPORT_PRIVATE v8::Platform * GetCurrentPlatform()
static V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(Isolate *isolate, const char *location, const OOMDetails &details=kNoOOMDetails)
virtual ~VirtualMemoryCage()
VirtualMemory reservation_
bool InitReservation(const ReservationParams ¶ms, base::AddressRegion existing_reservation=base::AddressRegion())
std::unique_ptr< base::BoundedPageAllocator > page_allocator_
VirtualMemory * reservation()
VirtualMemoryCage & operator=(VirtualMemoryCage &)=delete
v8::PageAllocator * page_allocator_
V8_EXPORT_PRIVATE ~VirtualMemory()
V8_EXPORT_PRIVATE bool DiscardSystemPages(Address address, size_t size)
V8_EXPORT_PRIVATE VirtualMemory()
bool InVM(Address address, size_t size) const
v8::PageAllocator * page_allocator()
base::AddressRegion region_
V8_EXPORT_PRIVATE void Free()
V8_EXPORT_PRIVATE void Reset()
V8_EXPORT_PRIVATE size_t Release(Address free_start)
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT bool SetPermissions(Address address, size_t size, PageAllocator::Permission access)
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT bool RecommitPages(Address address, size_t size, PageAllocator::Permission access)
cppgc::PageAllocator * page_allocator_
ZoneVector< RpoNumber > & result
#define DEFINE_LAZY_LEAKY_OBJECT_GETTER(T, FunctionName,...)
v8::PageAllocator PageAllocator
V8_NODISCARD AllocationResult< T * > AllocateAtLeast(size_t n)
void AlignedFree(void *ptr)
void * AlignedAlloc(size_t size, size_t alignment)
bool SetPermissions(v8::PageAllocator *page_allocator, void *address, size_t size, PageAllocator::Permission access)
v8::PageAllocator * GetPlatformPageAllocator()
void * AllocatePages(v8::PageAllocator *page_allocator, void *hint, size_t size, size_t alignment, PageAllocator::Permission access)
v8::PageAllocator * SetPlatformPageAllocatorForTesting(v8::PageAllocator *new_page_allocator)
void * AlignedAllocWithRetry(size_t size, size_t alignment)
void OnCriticalMemoryPressure()
void * GetRandomMmapAddr()
char * StrDup(const char *str)
void *(*)(size_t) MallocFn
v8::VirtualAddressSpace * GetPlatformVirtualAddressSpace()
size_t AllocatePageSize()
V8_EXPORT_PRIVATE FlagValues v8_flags
void * AllocWithRetry(size_t size, MallocFn malloc_fn)
void AlignedFree(void *ptr)
base::AllocationResult< void * > AllocAtLeastWithRetry(size_t size)
char * StrNDup(const char *str, size_t n)
static constexpr Address kNullAddress
void ReleasePages(v8::PageAllocator *page_allocator, void *address, size_t size, size_t new_size)
void MemCopy(void *dest, const void *src, size_t size)
T * NewArray(size_t size)
void FreePages(v8::PageAllocator *page_allocator, void *address, const size_t size)
#define DCHECK_NOT_NULL(val)
#define CHECK_NE(lhs, rhs)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
void * AlignedAddress(void *address, size_t alignment)
constexpr T RoundDown(T x, intptr_t m)
constexpr bool IsAligned(T value, U alignment)
static constexpr size_t kAnyBaseAlignment
#define V8_LIKELY(condition)
#define V8_UNLIKELY(condition)