42 data_page_allocator_(isolate->page_allocator()),
43 code_page_allocator_(code_page_allocator),
44 trusted_page_allocator_(trusted_page_allocator),
89 size_t size = reservation->
size();
98 size_t size = reservation->
size();
112 size_t chunk_size,
size_t area_size,
size_t alignment,
131 if ((reservation.
address() +
static_cast<Address>(chunk_size)) == 0u) {
150 *controller = std::move(reservation);
156 if (!
heap->deserialization_complete()) {
157 heap->FatalProcessOutOfMemory(
159 ?
"Executable MemoryChunk allocation failed during deserialization."
160 :
"MemoryChunk allocation failed during deserialization.");
182std::optional<MemoryAllocator::MemoryChunkAllocationResult>
188#ifndef V8_COMPRESS_POINTERS
193 hint =
reinterpret_cast<Address>(
205 space->identity(), executable,
reinterpret_cast<void*
>(hint),
230 NewEvent(
"MemoryChunk",
reinterpret_cast<void*
>(
base), chunk_size));
234 Address area_end = area_start + area_size;
237 reinterpret_cast<void*
>(
base),
nullptr, chunk_size, area_start, area_end,
238 std::move(reservation),
244 size_t bytes_to_free,
270 const size_t released_bytes = reservation->
Release(start_free);
272 size_ -= released_bytes;
297 UnregisterExecutableMemoryChunk(
302 chunk_metadata->
size());
312 DCHECK(!page->Chunk()->executable());
331 RoundUp(chunk->
size(), allocator->AllocatePageSize()));
340 LOG(
isolate_, DeleteEvent(
"MemoryChunk", chunk_metadata));
343 reinterpret_cast<Address>(chunk_metadata),
392 std::optional<MemoryChunkAllocationResult> chunk_info;
403 if (!chunk_info)
return nullptr;
406 if (chunk_info->optional_metadata) {
407 metadata =
new (chunk_info->optional_metadata)
PageMetadata(
408 isolate_->
heap(), space, chunk_info->size, chunk_info->area_start,
409 chunk_info->area_end, std::move(chunk_info->reservation));
412 chunk_info->area_start, chunk_info->area_end,
413 std::move(chunk_info->reservation));
426 flags &=
~MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING;
433 chunk =
new (chunk_info->chunk)
MemoryChunk(flags, metadata);
435 chunk =
new (chunk_info->chunk)
MemoryChunk(flags, metadata);
439 if (chunk->
executable()) RegisterExecutableMemoryChunk(metadata);
443 space->InitializePage(metadata);
452 std::optional<MemoryChunkAllocationResult> chunk_info =
460 isolate_->
heap(), space, chunk_info->size, chunk_info->area_start,
461 chunk_info->area_end, std::move(chunk_info->reservation));
472std::unique_ptr<::v8::PageAllocator::SharedMemoryMapping>
475 return shared_memory->
RemapTo(
reinterpret_cast<void*
>(new_address));
480 std::optional<MemoryChunkAllocationResult> chunk_info =
484 if (!chunk_info)
return nullptr;
487 if (chunk_info->optional_metadata) {
489 isolate_->
heap(), space, chunk_info->size, chunk_info->area_start,
490 chunk_info->area_end, std::move(chunk_info->reservation), executable);
493 isolate_->
heap(), space, chunk_info->size, chunk_info->area_start,
494 chunk_info->area_end, std::move(chunk_info->reservation), executable);
500 chunk =
new (chunk_info->chunk)
MemoryChunk(flags, metadata);
502 chunk =
new (chunk_info->chunk)
MemoryChunk(flags, metadata);
506 if (chunk->
executable()) RegisterExecutableMemoryChunk(metadata);
513std::optional<MemoryAllocator::MemoryChunkAllocationResult>
516 if (chunk_metadata ==
nullptr)
return {};
534 chunk_metadata->
Chunk(), chunk_metadata,
size, area_start, area_end,
535 std::move(reservation),
585 DCHECK_LE((*normal_page_it)->address(), addr);
587 }
else if (
auto large_page_it =
large_pages_.upper_bound(chunk);
591 addr < (*large_page_it)->address());
592 auto* large_page_chunk = *std::next(large_page_it, -1);
594 DCHECK_LE(large_page_chunk->address(), addr);
595 if (large_page_chunk->Metadata()->Contains(addr))
return large_page_chunk;
virtual std::unique_ptr< SharedMemoryMapping > RemapTo(void *new_address) const =0
IncrementalMarking * incremental_marking() const
void RememberUnmappedPage(Address page, bool compacted)
void * GetRandomMmapAddr()
PagePool * page_pool() const
IsolateGroup * isolate_group() const
bool RequiresCodeRange() const
MemoryChunk::MainThreadFlags InitialFlags(Executability executable) const
std::unique_ptr<::v8::PageAllocator::SharedMemoryMapping > RemapSharedPage(::v8::PageAllocator::SharedMemory *shared_memory, Address new_address)
void UnregisterMemoryChunk(MemoryChunkMetadata *chunk, Executability executable=NOT_EXECUTABLE)
void UnregisterReadOnlyPage(ReadOnlyPageMetadata *page)
static V8_EXPORT_PRIVATE size_t commit_page_size_bits_
void FreeMemoryRegion(v8::PageAllocator *page_allocator, Address addr, size_t size)
V8_EXPORT_PRIVATE const MemoryChunk * LookupChunkContainingAddress(Address addr) const
v8::PageAllocator * code_page_allocator_
base::Mutex chunks_mutex_
void ReleaseQueuedPages()
V8_EXPORT_PRIVATE void TearDown()
V8_EXPORT_PRIVATE void Free(MemoryAllocator::FreeMode mode, MutablePageMetadata *chunk)
std::set< const MemoryChunk * > large_pages_
bool UncommitMemory(VirtualMemory *reservation)
v8::PageAllocator * trusted_page_allocator_
v8::PageAllocator * page_allocator(AllocationSpace space)
v8::PageAllocator * data_page_allocator_
static V8_INLINE intptr_t GetCommitPageSize()
V8_WARN_UNUSED_RESULT std::optional< MemoryChunkAllocationResult > AllocateUninitializedChunk(BaseSpace *space, size_t area_size, Executability executable, PageSize page_size)
void UnregisterSharedMemoryChunk(MemoryChunkMetadata *chunk)
V8_EXPORT_PRIVATE size_t GetSharedPooledChunksCount()
std::atomic< size_t > size_executable_
ReadOnlyPageMetadata * AllocateReadOnlyPage(ReadOnlySpace *space, Address hint=kNullAddress)
V8_EXPORT_PRIVATE LargePageMetadata * AllocateLargePage(LargeObjectSpace *space, size_t object_size, Executability executable)
static void DeleteMemoryChunk(MutablePageMetadata *metadata)
void PerformFreeMemory(MutablePageMetadata *chunk)
V8_WARN_UNUSED_RESULT std::optional< MemoryChunkAllocationResult > AllocateUninitializedChunkAt(BaseSpace *space, size_t area_size, Executability executable, Address hint, PageSize page_size)
std::optional< VirtualMemory > reserved_chunk_at_virtual_memory_limit_
V8_EXPORT_PRIVATE size_t GetPooledChunksCount()
V8_EXPORT_PRIVATE PageMetadata * AllocatePage(MemoryAllocator::AllocationMode alloc_mode, Space *space, Executability executable)
void RecordMemoryChunkCreated(const MemoryChunk *chunk)
std::unordered_set< const MemoryChunk *, base::hash< const MemoryChunk * > > normal_pages_
static void InitializeOncePerProcess()
V8_EXPORT_PRIVATE MemoryAllocator(Isolate *isolate, v8::PageAllocator *code_page_allocator, v8::PageAllocator *trusted_page_allocator, size_t max_capacity)
std::optional< MemoryChunkAllocationResult > AllocateUninitializedPageFromPool(Space *space)
std::atomic< size_t > size_
V8_WARN_UNUSED_RESULT bool SetPermissionsOnExecutableMemoryChunk(VirtualMemory *vm, Address start, size_t reserved_size)
std::vector< MutablePageMetadata * > queued_pages_to_be_freed_
void PartialFreeMemory(MemoryChunkMetadata *chunk, Address start_free, size_t bytes_to_free, Address new_area_end)
static V8_EXPORT_PRIVATE size_t commit_page_size_
Address AllocateAlignedMemory(size_t chunk_size, size_t area_size, size_t alignment, AllocationSpace space, Executability executable, void *hint, VirtualMemory *controller)
bool CommitMemory(VirtualMemory *reservation, Executability executable)
void PreFreeMemory(MutablePageMetadata *chunk)
void UpdateAllocatedSpaceLimits(Address low, Address high, Executability executable)
static size_t ComputeChunkSize(size_t area_size, AllocationSpace space)
v8::PageAllocator * data_page_allocator()
void UnregisterMutableMemoryChunk(MutablePageMetadata *chunk)
void RecordMemoryChunkDestroyed(const MemoryChunk *chunk)
V8_EXPORT_PRIVATE size_t GetTotalPooledChunksCount()
void FreeReadOnlyPage(ReadOnlyPageMetadata *chunk)
V8_EXPORT_PRIVATE const MemoryChunk * LookupChunkContainingAddressInSafepoint(Address addr) const
V8_EXPORT_PRIVATE void ReleasePooledChunksImmediately()
Address HandleAllocationFailure(Executability executable)
static constexpr size_t AllocatableMemoryInMemoryChunk(AllocationSpace space)
static constexpr size_t ObjectStartOffsetInMemoryChunk(AllocationSpace space)
bool IsEvacuationCandidate() const
Executability executable() const
V8_INLINE bool IsFlagSet(Flag flag) const
V8_INLINE Address address() const
V8_INLINE MemoryChunkMetadata * Metadata()
static V8_INLINE MemoryChunk * FromAddress(Address addr)
void SetFlagSlow(Flag flag)
V8_INLINE bool InReadOnlySpace() const
static intptr_t GetAlignmentForAllocation()
static PageAllocator::Permission GetCodeModificationPermission()
MemoryChunk::MainThreadFlags InitialFlags(Executability executable) const
bool IsLivenessClear() const
static const int kPageSize
void ReleaseAllAllocatedMemory()
V8_EXPORT_PRIVATE void ReleaseImmediately(Isolate *isolate)
size_t GetSharedCount() const
size_t GetCount(Isolate *isolate) const
MutablePageMetadata * Remove(Isolate *isolate)
size_t GetTotalCount() const
void Add(Isolate *isolate, MutablePageMetadata *chunk)
MemoryChunk::MainThreadFlags InitialFlags() const
static void NotifyReadOnlyPageCreated(Address addr, size_t size, PageAllocator::Permission current_permissions)
static void UnregisterJitPage(Address address, size_t size)
static void RegisterJitPage(Address address, size_t size)
V8_EXPORT_PRIVATE bool DiscardSystemPages(Address address, size_t size)
V8_EXPORT_PRIVATE void Free()
V8_EXPORT_PRIVATE size_t Release(Address free_start)
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT bool SetPermissions(Address address, size_t size, PageAllocator::Permission access)
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT bool RecommitPages(Address address, size_t size, PageAllocator::Permission access)
#define V8_HEAP_USE_BECORE_JIT_WRITE_PROTECT
#define V8_HEAP_USE_PTHREAD_JIT_WRITE_PROTECT
ZoneVector< RpoNumber > & result
#define LOG(isolate, Call)
constexpr bool IsPowerOfTwo(T value)
constexpr int WhichPowerOfTwo(T value)
void ZapBlock(Address start, size_t size, uintptr_t zap_value)
constexpr uint32_t kZapValue
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation trace turbo cfg trace TurboFan s graph trimmer trace TurboFan s control equivalence trace TurboFan s register allocator trace stack load store counters for optimized code in run fuzzing &&concurrent_recompilation trace_turbo trace_turbo_scheduled trace_turbo_stack_accesses verify TurboFan machine graph of code stubs enable FixedArray bounds checks print TurboFan statistics of wasm compilations maximum cumulative size of bytecode considered for inlining scale factor of bytecode size used to calculate the inlining budget * KB
V8_EXPORT_PRIVATE FlagValues v8_flags
static constexpr Address kNullAddress
void FreePages(v8::PageAllocator *page_allocator, void *address, const size_t size)
constexpr bool IsAnySharedSpace(AllocationSpace space)
#define DCHECK_LE(v1, v2)
#define DCHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
void * AlignedAddress(void *address, size_t alignment)
constexpr bool IsAligned(T value, U alignment)