28#ifdef V8_COMPRESS_POINTERS_IN_MULTIPLE_CAGES
29thread_local IsolateGroup* IsolateGroup::current_ =
nullptr;
32IsolateGroup* IsolateGroup::current_non_inlined() {
return current_; }
34void IsolateGroup::set_current_non_inlined(IsolateGroup* group) {
38class IsolateGroupAccessScope final {
41 : previous_(IsolateGroup::current()) {
42 IsolateGroup::set_current(group);
48 IsolateGroup* previous_;
61#ifdef V8_COMPRESS_POINTERS
62struct PtrComprCageReservationParams
63 :
public VirtualMemoryCage::ReservationParams {
64 PtrComprCageReservationParams() {
65 page_allocator = GetPlatformPageAllocator();
67 reservation_size = kPtrComprCageReservationSize;
68 base_alignment = kPtrComprCageBaseAlignment;
75 reinterpret_cast<Address
>(page_allocator->GetRandomMmapAddr()),
78#if V8_OS_FUCHSIA && !V8_EXTERNAL_CODE_SPACE
82 permissions = PageAllocator::Permission::kNoAccessWillJitLater;
84 permissions = PageAllocator::Permission::kNoAccess;
86 page_initialization_mode =
87 base::PageInitializationMode::kAllocatedPagesCanBeUninitialized;
88 page_freeing_mode = base::PageFreeingMode::kMakeInaccessible;
101#ifdef V8_ENABLE_LEAPTIERING
102 js_dispatch_table_.TearDown();
105#ifdef V8_ENABLE_SANDBOX
106 code_pointer_table_.TearDown();
113#ifdef V8_COMPRESS_POINTERS
114 DCHECK(reservation_.IsReserved());
118#ifdef V8_ENABLE_SANDBOX
119 sandbox_->TearDown();
123#ifdef V8_ENABLE_SANDBOX
125 DCHECK(!reservation_.IsReserved());
126 CHECK(sandbox->is_initialized());
128 PtrComprCageReservationParams params;
129 Address base = sandbox->address_space()->AllocatePages(
130 sandbox->base(), params.reservation_size, params.base_alignment,
134 params.page_allocator = sandbox->page_allocator();
135 if (!reservation_.InitReservation(params, existing_reservation)) {
138 "Failed to reserve virtual memory for process-wide V8 "
139 "pointer compression cage");
142 pointer_compression_cage_ = &reservation_;
143 trusted_pointer_compression_cage_ =
144 TrustedRange::EnsureProcessWideTrustedRange(kMaximalTrustedRangeSize);
147 code_pointer_table()->Initialize();
149 std::make_unique<OptimizingCompileTaskExecutor>();
152#ifdef V8_ENABLE_LEAPTIERING
153 js_dispatch_table()->Initialize();
156#elif defined(V8_COMPRESS_POINTERS)
158 DCHECK(!reservation_.IsReserved());
160 PtrComprCageReservationParams params;
161 if (!reservation_.InitReservation(params)) {
164 "Failed to reserve virtual memory for process-wide V8 "
165 "pointer compression cage");
168 pointer_compression_cage_ = &reservation_;
169 trusted_pointer_compression_cage_ = &reservation_;
171 std::make_unique<OptimizingCompileTaskExecutor>();
173#ifdef V8_ENABLE_LEAPTIERING
174 js_dispatch_table()->Initialize();
182 std::make_unique<OptimizingCompileTaskExecutor>();
184#ifdef V8_ENABLE_LEAPTIERING
185 js_dispatch_table()->Initialize();
197#ifdef V8_ENABLE_SANDBOX
198 group->
Initialize(
true, Sandbox::GetDefault());
204#ifdef V8_COMPRESS_POINTERS
207#ifdef V8_EXTERNAL_CODE_SPACE
213#ifdef V8_COMPRESS_POINTERS_IN_MULTIPLE_CAGES
214 IsolateGroup::set_current(group);
230void InitCodeRangeOnce(std::unique_ptr<CodeRange>* code_range_member,
234 if (!code_range->
InitReservation(page_allocator, requested_size, immutable)) {
236 nullptr,
"Failed to reserve virtual memory for CodeRange");
238 code_range_member->reset(code_range);
239#ifdef V8_EXTERNAL_CODE_SPACE
240#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
241 ExternalCodeCompressionScheme::InitBase(
242 ExternalCodeCompressionScheme::PrepareCageBaseAddress(
243 code_range->
base()));
263#ifdef V8_ENABLE_SANDBOX
264 return sandbox()->page_allocator();
273 DCHECK_EQ(isolate->isolate_group(),
this);
279 DCHECK_EQ(isolate->isolate_group(),
this);
283 const bool inserted =
isolates_.insert(isolate).second;
294 isolate->owns_shareable_data_ =
false;
297 isolate->is_shared_space_isolate_ =
true;
298 DCHECK(isolate->owns_shareable_data_);
313 if (isolate->is_shared_space_isolate()) {
319 DCHECK(!isolate->is_shared_space_isolate());
337 "Creation of new isolate groups requires enabling "
338 "multiple pointer compression cages at build-time");
342#ifdef V8_ENABLE_SANDBOX
370#ifdef V8_ENABLE_SANDBOX
371void SandboxedArrayBufferAllocator::LazyInitialize(Sandbox* sandbox) {
373 if (is_initialized()) {
376 CHECK(sandbox->is_initialized());
378 constexpr size_t max_backing_memory_size = 8ULL *
GB;
379 constexpr size_t min_backing_memory_size = 1ULL *
GB;
380 size_t backing_memory_size = max_backing_memory_size;
381 Address backing_memory_base = 0;
382 while (!backing_memory_base &&
383 backing_memory_size >= min_backing_memory_size) {
384 backing_memory_base = sandbox_->address_space()->AllocatePages(
387 if (!backing_memory_base) {
388 backing_memory_size /= 2;
391 if (!backing_memory_base) {
393 nullptr,
"Could not reserve backing memory for ArrayBufferAllocators");
397 region_alloc_ = std::make_unique<base::RegionAllocator>(
398 backing_memory_base, backing_memory_size, kAllocationGranularity);
399 end_of_accessible_region_ = region_alloc_->begin();
402 region_alloc_->set_on_merge_callback([
this](
Address start,
size_t size) {
405 if (
end == region_alloc_->end() &&
406 start <= end_of_accessible_region_ - kChunkSize) {
409 size_t size_to_decommit =
410 end_of_accessible_region_ - new_end_of_accessible_region;
411 if (!sandbox_->address_space()->DecommitPages(
412 new_end_of_accessible_region, size_to_decommit)) {
415 end_of_accessible_region_ = new_end_of_accessible_region;
416 }
else if (size >= 2 * kChunkSize) {
421 if (!sandbox_->address_space()->DiscardSystemPages(
422 chunk_start, chunk_end - chunk_start)) {
429SandboxedArrayBufferAllocator::~SandboxedArrayBufferAllocator() {
432 if (is_initialized() && sandbox_->is_initialized()) {
433 sandbox_->address_space()->FreePages(region_alloc_->begin(),
434 region_alloc_->size());
438void* SandboxedArrayBufferAllocator::Allocate(
size_t length) {
441 length =
RoundUp(length, kAllocationGranularity);
442 Address region = region_alloc_->AllocateRegion(length);
447 size_t length_to_memset =
length;
448 if (
end > end_of_accessible_region_) {
450 size_t size = new_end_of_accessible_region - end_of_accessible_region_;
451 if (!sandbox_->address_space()->SetPagePermissions(
453 if (!region_alloc_->FreeRegion(region)) {
455 nullptr,
"SandboxedArrayBufferAllocator::Allocate()");
462 length_to_memset = end_of_accessible_region_ - region;
463 end_of_accessible_region_ = new_end_of_accessible_region;
466 void* mem =
reinterpret_cast<void*
>(region);
467 memset(mem, 0, length_to_memset);
471void SandboxedArrayBufferAllocator::Free(
void* data) {
473 region_alloc_->FreeRegion(
reinterpret_cast<Address>(data));
476PageAllocator* SandboxedArrayBufferAllocator::page_allocator() {
477 return sandbox_->page_allocator();
480SandboxedArrayBufferAllocator*
481IsolateGroup::GetSandboxedArrayBufferAllocator() {
484 backend_allocator_.LazyInitialize(sandbox());
485 return &backend_allocator_;
490OptimizingCompileTaskExecutor*
constexpr int kPageSizeBits
static constexpr Address kNoHint
V8_INLINE void AssertHeld() const
static constexpr Address kAllocationFailure
bool InitReservation(v8::PageAllocator *page_allocator, size_t requested, bool immutable)
static void InitializeOncePerIsolateGroup(MemorySpan< Address > shared_external_references)
~IsolateGroupAccessScope()
IsolateGroupAccessScope(IsolateGroup *)
static void TearDownOncePerProcess()
CodeRange * EnsureCodeRange(size_t requested_size)
bool has_shared_space_isolate() const
void AddIsolate(Isolate *isolate)
PageAllocator * GetBackingStorePageAllocator()
base::OnceType init_code_range_
OptimizingCompileTaskExecutor * optimizing_compile_task_executor()
MemorySpan< Address > external_ref_table()
static void ReleaseDefault()
v8::PageAllocator * page_allocator_
void RemoveIsolate(Isolate *isolate)
static IsolateGroup * default_isolate_group_
absl::flat_hash_set< Isolate * > isolates_
static constexpr bool CanCreateNewGroups()
void init_shared_space_isolate(Isolate *isolate)
std::unique_ptr< PagePool > page_pool_
std::unique_ptr< OptimizingCompileTaskExecutor > optimizing_compile_task_executor_
static IsolateGroup * New()
Isolate * shared_space_isolate_
void Initialize(bool process_wide)
static V8_INLINE IsolateGroup * GetDefault()
std::unique_ptr< CodeRange > code_range_
ReadOnlyArtifacts * InitializeReadOnlyArtifacts()
std::atomic< int > reference_count_
void SetupReadOnlyHeap(Isolate *isolate, SnapshotData *read_only_snapshot_data, bool can_rehash)
static void InitializeOncePerProcess()
std::unique_ptr< ReadOnlyArtifacts > read_only_artifacts_
static void SetUp(Isolate *isolate, SnapshotData *read_only_snapshot_data, bool can_rehash)
static V8_INLINE void InitBase(Address base)
V8_CONST static V8_INLINE Address base()
static V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(Isolate *isolate, const char *location, const OOMDetails &details=kNoOOMDetails)
v8::PageAllocator PageAllocator
void CallOnce(OnceType *once, std::function< void()> init_func)
LockGuard< Mutex > MutexGuard
v8::PageAllocator * GetPlatformPageAllocator()
v8::VirtualAddressSpace * GetPlatformVirtualAddressSpace()
V8_EXPORT_PRIVATE FlagValues v8_flags
#define CHECK_NOT_NULL(val)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
constexpr T RoundDown(T x, intptr_t m)
constexpr bool IsAligned(T value, U alignment)