26static constexpr size_t kZonePageSize = 256 *
KB;
32 void* hint =
reinterpret_cast<void*
>(
RoundDown(
38 if (memory.IsReserved()) {
44 "Failed to reserve memory for compressed zones");
48std::unique_ptr<v8::base::BoundedPageAllocator> CreateBoundedAllocator(
50 CHECK(reservation_start);
53 auto allocator = std::make_unique<v8::base::BoundedPageAllocator>(
61 allocator->AllocatePagesAt(reservation_start, kZonePageSize,
71 VirtualMemory memory = ReserveAddressSpace(platform_page_allocator);
72 reserved_area_ = std::make_unique<VirtualMemory>(std::move(memory));
81 bool supports_compression) {
84 bytes =
RoundUp(bytes, kZonePageSize);
93 if (memory ==
nullptr)
return nullptr;
99 max, current, std::memory_order_relaxed)) {
103 return new (memory)
Segment(bytes);
107 bool supports_compression) {
virtual void * GetRandomMmapAddr()=0
virtual size_t AllocatePageSize()=0
std::atomic< size_t > current_memory_usage_
std::unique_ptr< base::BoundedPageAllocator > bounded_page_allocator_
virtual ~AccountingAllocator()
void ReturnSegment(Segment *memory, bool supports_compression)
std::atomic< size_t > max_memory_usage_
std::unique_ptr< VirtualMemory > reserved_area_
Segment * AllocateSegment(size_t bytes, bool supports_compression)
size_t total_size() const
#define COMPRESS_ZONES_BOOL
ZoneVector< RpoNumber > & result
@ kAllocatedPagesCanBeUninitialized
void FatalOOM(OOMType type, const char *msg)
v8::PageAllocator * GetPlatformPageAllocator()
void * AllocatePages(v8::PageAllocator *page_allocator, void *hint, size_t size, size_t alignment, PageAllocator::Permission access)
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation trace turbo cfg trace TurboFan s graph trimmer trace TurboFan s control equivalence trace TurboFan s register allocator trace stack load store counters for optimized code in run fuzzing &&concurrent_recompilation trace_turbo trace_turbo_scheduled trace_turbo_stack_accesses verify TurboFan machine graph of code stubs enable FixedArray bounds checks print TurboFan statistics of wasm compilations maximum cumulative size of bytecode considered for inlining scale factor of bytecode size used to calculate the inlining budget * KB
base::AllocationResult< void * > AllocAtLeastWithRetry(size_t size)
void FreePages(v8::PageAllocator *page_allocator, void *address, const size_t size)
const size_t segment_size
#define DCHECK_LE(v1, v2)
#define DCHECK(condition)
constexpr T RoundUp(T x, intptr_t m)
constexpr T RoundDown(T x, intptr_t m)
constexpr bool IsAligned(T value, U alignment)
static const size_t kReservationSize
static const size_t kReservationAlignment