5#ifndef V8_COMMON_CODE_MEMORY_ACCESS_INL_H_
6#define V8_COMMON_CODE_MEMORY_ACCESS_INL_H_
15#if V8_HAS_PKU_JIT_WRITE_PROTECT
18#if V8_HAS_PTHREAD_JIT_WRITE_PROTECT
21#if V8_HAS_BECORE_JIT_WRITE_PROTECT
22#include <BrowserEngineCore/BEMemory.h>
57 write_scope_(
"WritableJitAllocation"),
60 ? page_ref_->RegisterAllocation(addr, size, type)
61 : page_ref_->LookupAllocation(addr, size, type)),
62 enforce_write_api_(enforce_write_api) {
77 bool enforce_write_api)
80 enforce_write_api_(enforce_write_api) {}
88std::optional<RwxMemoryWriteScope>
92 return std::optional<RwxMemoryWriteScope>(
"WriteScopeForApiEnforcement");
98#ifdef V8_ENABLE_WEBASSEMBLY
100WritableJumpTablePair::WritableJumpTablePair(
Address jump_table_address,
101 size_t jump_table_size,
102 Address far_jump_table_address,
103 size_t far_jump_table_size)
104 : writable_jump_table_(jump_table_address, jump_table_size,
107 writable_far_jump_table_(
108 far_jump_table_address, far_jump_table_size,
110 write_scope_(
"WritableJumpTablePair"),
114 far_jump_table_address, far_jump_table_size, jump_table_address,
116 CHECK(jump_table_pages_.value().second.Contains(
117 jump_table_address, jump_table_size,
119 CHECK(jump_table_pages_.value().first.Contains(
120 far_jump_table_address, far_jump_table_size,
132template <
typename T,
size_t offset>
134 std::optional<RwxMemoryWriteScope> write_scope =
148template <
typename T,
size_t offset>
150 std::optional<RwxMemoryWriteScope> write_scope =
160template <
typename T,
size_t offset>
162 std::optional<RwxMemoryWriteScope> write_scope =
173template <
typename T,
size_t offset>
177 std::optional<RwxMemoryWriteScope> write_scope =
183template <
typename T,
size_t offset>
187 std::optional<RwxMemoryWriteScope> write_scope =
201 case InstructionStream::kCodeOffset:
205 case InstructionStream::kRelocationInfoOffset:
207 InstructionStream::kRelocationInfoOffset>(
218 std::optional<RwxMemoryWriteScope> write_scope =
227 std::optional<RwxMemoryWriteScope> write_scope =
237 std::optional<RwxMemoryWriteScope> write_scope =
241 reinterpret_cast<std::atomic<T>*
>(
address)->
store(value,
242 std::memory_order_relaxed);
247 std::optional<RwxMemoryWriteScope> write_scope =
254 std::optional<RwxMemoryWriteScope> write_scope =
260 std::optional<RwxMemoryWriteScope> write_scope =
268 : write_scope_(
"WritableJitPage"),
275 pair.second.Type(),
false);
294 : address_(addr),
size_(static_cast<int>(size)), executable_(executable) {}
296template <
typename T,
size_t offset>
308#if V8_HAS_PTHREAD_JIT_WRITE_PROTECT
319#elif V8_HAS_BECORE_JIT_WRITE_PROTECT
323 return be_memory_inline_jit_restrict_with_witness_supported() != 0;
328 be_memory_inline_jit_restrict_rwx_to_rw_with_witness();
333 be_memory_inline_jit_restrict_rwx_to_rx_with_witness();
336#elif V8_HAS_PKU_JIT_WRITE_PROTECT
339 static_assert(base::MemoryProtectionKey::kNoMemoryProtectionKey == -1);
340 DCHECK(ThreadIsolation::initialized());
341 return ThreadIsolation::PkeyIsAvailable();
346 DCHECK(ThreadIsolation::initialized());
350 base::MemoryProtectionKey::GetKeyPermission(ThreadIsolation::pkey()),
351 base::MemoryProtectionKey::kNoRestrictions);
353 base::MemoryProtectionKey::SetPermissionsForKey(
354 ThreadIsolation::pkey(), base::MemoryProtectionKey::kNoRestrictions);
359 DCHECK(ThreadIsolation::initialized());
363 base::MemoryProtectionKey::GetKeyPermission(ThreadIsolation::pkey()),
364 base::MemoryProtectionKey::kNoRestrictions);
366 base::MemoryProtectionKey::SetPermissionsForKey(
367 ThreadIsolation::pkey(), base::MemoryProtectionKey::kDisableWrite);
static Tagged< HeapObject > FromAddress(Address address)
static constexpr int kMapOffset
static V8_INLINE void SetWritable()
static V8_INLINE void SetExecutable()
V8_INLINE ~RwxMemoryWriteScope()
static V8_INLINE bool IsSupported()
V8_INLINE RwxMemoryWriteScope(const char *comment)
static void Release_Store(Tagged< HeapObject > host, PtrType value)
static void Relaxed_Store(Tagged< HeapObject > host, PtrType value)
static void Relaxed_Store_Map_Word(Tagged< HeapObject > host, PtrType value)
V8_INLINE void WriteHeaderSlot(Tagged< T > value, RelaxedStoreTag) const
V8_INLINE ~WritableFreeSpace()
WritableFreeSpace(const WritableFreeSpace &)=delete
static V8_INLINE WritableFreeSpace ForNonExecutableMemory(base::Address addr, size_t size)
const base::Address address_
static V8_INLINE WritableJitAllocation ForNonExecutableMemory(Address addr, size_t size, ThreadIsolation::JitAllocationType type)
V8_INLINE void WriteUnalignedValue(Address address, T value)
const ThreadIsolation::JitAllocation allocation_
V8_INLINE void CopyCode(size_t dst_offset, const uint8_t *src, size_t num_bytes)
V8_INLINE void WriteValue(Address address, T value)
std::optional< ThreadIsolation::JitPageReference > page_ref_
V8_INLINE void WriteHeaderSlot(T value)
V8_INLINE ~WritableJitAllocation()
V8_INLINE void ClearBytes(size_t offset, size_t len)
V8_INLINE void WriteProtectedPointerHeaderSlot(Tagged< T > value, ReleaseStoreTag)
WritableJitAllocation(const WritableJitAllocation &)=delete
V8_INLINE void CopyData(size_t dst_offset, const uint8_t *src, size_t num_bytes)
V8_INLINE std::optional< RwxMemoryWriteScope > WriteScopeForApiEnforcement() const
std::optional< RwxMemoryWriteScope > write_scope_
V8_INLINE WritableJitAllocation LookupAllocationContaining(Address addr)
V8_INLINE ~WritableJitPage()
V8_INLINE WritableFreeSpace FreeRange(Address addr, size_t size)
V8_INLINE WritableJitPage(Address addr, size_t size)
ThreadIsolation::JitPageReference page_ref_
const AllocationType allocation_
static void WriteUnalignedValue(Address p, V value)
void CopyBytes(T *dst, const T *src, size_t num_bytes)
static void WriteMaybeUnalignedValue(Address p, V value)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats store(v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) DEFINE_GENERIC_IMPLICATION(trace_gc_object_stats
static constexpr bool is_taggable_v
V8_EXPORT_PRIVATE FlagValues v8_flags
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)