16 Address base, uint32_t num_slots, uint32_t num_imported_functions,
17 Address wasm_compile_lazy_target) {
18 uint32_t lazy_compile_table_size = num_slots * kLazyCompileTableSlotSize;
24 for (uint32_t slot_index = 0; slot_index < num_slots; ++slot_index) {
27 wasm_compile_lazy_target);
34 Address base, uint32_t num_slots,
Address lazy_compile_table_start) {
41 for (uint32_t slot_index = 0; slot_index < num_slots; ++slot_index) {
49 lazy_compile_table_start +
53 int offset_before_emit = jtasm.
pc_offset();
86 Address write_end = write_start +
sizeof(
T) - 1;
102 const uint8_t inst[kLazyCompileTableSlotSize] = {
108 lazy_compile_target - (
pc_ + kLazyCompileTableSlotSize);
117#ifdef V8_ENABLE_CET_IBT
118 uint32_t endbr_insn = 0xfa1e0ff3;
119 uint32_t nop = 0x00401f0f;
129 uint8_t inst[kJumpTableSlotSize] = {
133 int32_t displacement32 = base::checked_cast<int32_t>(
displacement);
134 memcpy(&inst[1], &displacement32,
sizeof(int32_t));
143 const uint8_t inst[kFarJumpTableSlotSize] = {
144 0xff, 0x25, 0x02, 0, 0, 0,
146 0, 0, 0, 0, 0, 0, 0, 0,
170#elif V8_TARGET_ARCH_IA32
174 const uint8_t inst[kLazyCompileTableSlotSize] = {
179 lazy_compile_target - (
pc_ + kLazyCompileTableSlotSize);
190 const uint8_t inst[kJumpTableSlotSize] = {
202 static_assert(kJumpTableSlotSize == kFarJumpTableSlotSize);
217#elif V8_TARGET_ARCH_ARM
222 const uint32_t inst[kLazyCompileTableSlotSize / 4] = {
236 static_assert(kJumpTableSlotSize == 2 *
kInstrSize);
240 const uint32_t inst[kJumpTableSlotSize /
kInstrSize] = {
253 static_assert(kJumpTableSlotSize == kFarJumpTableSlotSize);
269#elif V8_TARGET_ARCH_ARM64
272 uint16_t func_index_low = func_index & 0xffff;
273 uint16_t func_index_high = func_index >> 16;
278 const uint32_t inst[kLazyCompileTableSlotSize / 4] = {
295 base::checked_cast<int32_t>(target_offset)));
299#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
300 static constexpr ptrdiff_t kCodeEntryMarkerSize =
kInstrSize;
302 static constexpr ptrdiff_t kCodeEntryMarkerSize = 0;
307 reinterpret_cast<uint8_t*
>(
pc_ + kCodeEntryMarkerSize));
312#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
313 uint32_t bti_inst = 0xd503245f;
317 uint32_t branch_inst =
328 const uint32_t inst[kFarJumpTableSlotSize / 4] = {
349 jit_allocation.WriteValue(slot + kTargetOffset, target,
kRelaxedStore);
362#elif V8_TARGET_ARCH_S390X
366 uint8_t inst[kLazyCompileTableSlotSize] = {
367 0xc0, 0x71, 0x00, 0x00, 0x00, 0x00,
368 0xc0, 0x10, 0x00, 0x00, 0x00, 0x00,
369 0xe3, 0x10, 0x10, 0x12, 0x00, 0x04,
371 0xb9, 0x04, 0x00, 0x00,
372 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0
375#if V8_TARGET_LITTLE_ENDIAN
379 memcpy(&inst[2], &func_index,
sizeof(int32_t));
380 for (
size_t i = 0;
i < (kLazyCompileTableSlotSize -
sizeof(
Address));
i++) {
387 intptr_t relative_target = target -
pc_;
389 if (!is_int32(relative_target / 2)) {
393 uint8_t inst[kJumpTableSlotSize] = {
399 int32_t relative_target_addr =
static_cast<int32_t>(relative_target / 2);
400#if V8_TARGET_LITTLE_ENDIAN
404 memcpy(&inst[2], &relative_target_addr,
sizeof(int32_t));
412 const uint8_t inst[kFarJumpTableSlotSize] = {
413 0xc0, 0x10, 0x00, 0x00, 0x00, 0x00,
414 0xe3, 0x10, 0x10, 0x10, 0x00, 0x04,
417 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0
420 for (
size_t i = 0;
i < (kFarJumpTableSlotSize -
sizeof(
Address));
i++) {
429 Address target_addr = slot + 8;
430 jit_allocation.WriteValue(target_addr, target,
kRelaxedStore);
439#elif V8_TARGET_ARCH_MIPS64
442 uint32_t func_index_low = func_index & 0xffff;
443 uint32_t func_index_high = func_index >> 16;
445 const uint32_t inst[kLazyCompileTableSlotSize / 4] = {
472 const uint32_t inst[kJumpTableSlotSize /
kInstrSize] = {
497 static_assert(kJumpTableSlotSize == kFarJumpTableSlotSize);
513#elif V8_TARGET_ARCH_LOONG64
516 uint32_t func_index_low_12 = func_index & 0xfff;
517 uint32_t func_index_high_20 = func_index >> 12;
519 const uint32_t inst[kLazyCompileTableSlotSize / 4] = {
531 uint32_t target_offset_offs26 = (target_offset & 0xfffffff) >> 2;
532 uint32_t target_offset_low_16 = target_offset_offs26 & 0xffff;
533 uint32_t target_offset_high_10 = target_offset_offs26 >> 16;
538 target_offset_high_10);
548 uint32_t target_offset_offs26 = (target_offset & 0xfffffff) >> 2;
549 uint32_t target_offset_low_16 = target_offset_offs26 & 0xffff;
550 uint32_t target_offset_high_10 = target_offset_offs26 >> 16;
552 uint32_t branch_inst =
553 0x50000000 | target_offset_low_16 <<
kRkShift | target_offset_high_10;
560 const uint32_t inst[kFarJumpTableSlotSize / 4] = {
582 jit_allocation.WriteValue(target_addr, target,
kRelaxedStore);
595#elif V8_TARGET_ARCH_PPC64
599 const uint32_t inst[kLazyCompileTableSlotSize / 4] = {
627 intptr_t relative_target = target -
pc_;
629 if (!is_int26(relative_target)) {
633 const uint32_t inst[kJumpTableSlotSize /
kInstrSize] = {
644 const uint32_t inst[kFarJumpTableSlotSize / 4] = {
674 Address target_addr = slot + kFarJumpTableSlotSize - 8;
675 jit_allocation.WriteValue(target_addr, target,
kRelaxedStore);
684#elif V8_TARGET_ARCH_RISCV64
687 static_assert(kLazyCompileTableSlotSize == 3 *
kInstrSize);
688 int64_t high_20 = (func_index + 0x800) >> 12;
689 int64_t low_12 = int64_t(func_index) << 52 >> 52;
694 DCHECK(is_int21(target_offset));
697 const uint32_t inst[kLazyCompileTableSlotSize / 4] = {
704 uint32_t(target_offset & 0xff000) |
705 uint32_t((target_offset & 0x800) << 9) |
706 uint32_t((target_offset & 0x7fe) << 20) |
707 uint32_t((target_offset & 0x100000) << 11)),
717 static_assert(kJumpTableSlotSize == 2 *
kInstrSize);
718 intptr_t relative_target = target -
pc_;
719 if (!is_int32(relative_target)) {
724 int64_t high_20 = (relative_target + 0x800) >> 12;
725 int64_t low_12 = int64_t(relative_target) << 52 >> 52;
739 uint32_t high_20 = (int64_t(4 *
kInstrSize + 0x800) >> 12);
740 uint32_t low_12 = (int64_t(4 *
kInstrSize) << 52 >> 52);
742 const uint32_t inst[kFarJumpTableSlotSize / 4] = {
764 int kTargetOffset = kFarJumpTableSlotSize -
sizeof(
Address);
765 jit_allocation.WriteValue(slot + kTargetOffset, target,
kRelaxedStore);
778#elif V8_TARGET_ARCH_RISCV32
781 static_assert(kLazyCompileTableSlotSize == 3 *
kInstrSize);
782 int64_t high_20 = (func_index + 0x800) >> 12;
783 int64_t low_12 = int64_t(func_index) << 52 >> 52;
788 DCHECK(is_int21(target_offset));
791 const uint32_t inst[kLazyCompileTableSlotSize / 4] = {
798 uint32_t(target_offset & 0xff000) |
799 uint32_t((target_offset & 0x800) << 9) |
800 uint32_t((target_offset & 0x7fe) << 20) |
801 uint32_t((target_offset & 0x100000) << 11)),
809 uint32_t high_20 = (int64_t(4 *
kInstrSize + 0x800) >> 12);
810 uint32_t low_12 = (int64_t(4 *
kInstrSize) << 52 >> 52);
812 const uint32_t inst[kJumpTableSlotSize / 4] = {
830 static_assert(kJumpTableSlotSize == kFarJumpTableSlotSize);
847#error Unknown architecture.
static constexpr int kIntraSegmentJmpInstrSize
static Instr ImmMoveWide(int imm)
static Instr ImmUncondBranch(int imm26)
static CPURegList DefaultTmpList()
static int64_t CalculateTargetOffset(Address target, RelocInfo::Mode rmode, uint8_t *pc)
static bool IsNearCallOffset(int64_t offset)
constexpr int8_t code() const
static WritableJitAllocation LookupJitAllocation(Address addr, size_t size, JitAllocationType type, bool enforce_write_api=false)
V8_INLINE void WriteUnalignedValue(Address address, T value)
void SkipUntil(int offset)
void EmitFarJumpSlot(Address target)
static constexpr uint32_t SizeForNumberOfSlots(uint32_t slot_count)
static uint32_t JumpSlotIndexToOffset(uint32_t slot_index)
bool EmitJumpSlot(Address target)
WritableJitAllocation & jit_allocation_
static void InitializeJumpsToLazyCompileTable(Address base, uint32_t num_slots, Address lazy_compile_table_start)
void EmitLazyCompileJumpSlot(uint32_t func_index, Address lazy_compile_target)
static uint32_t LazyCompileSlotIndexToOffset(uint32_t slot_index)
static void PatchFarJumpSlot(WritableJitAllocation &jit_allocation, Address slot, Address target)
static void GenerateLazyCompileTable(Address base, uint32_t num_slots, uint32_t num_imported_functions, Address wasm_compile_lazy_target)
constexpr Opcode RO_AUIPC
void FlushInstructionCache(void *start, size_t size)
constexpr int kSystemPointerSizeLog2
constexpr int kSystemPointerSize
constexpr Register kWasmCompileLazyFuncIndexRegister
constexpr uint8_t kInstrSize
static constexpr RelaxedStoreTag kRelaxedStore
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
constexpr bool IsAligned(T value, U alignment)