32#define EMIT_INSTR_HANDLER(name) EmitFnId(k_##name);
33#define EMIT_INSTR_HANDLER_WITH_PC(name, pc) EmitFnId(k_##name, pc);
35#define START_EMIT_INSTR_HANDLER() \
37 size_t _current_code_offset = code_.size(); \
38 size_t _current_slots_size = slots_.size(); \
39 DCHECK(!no_nested_emit_instr_handler_guard_); \
40 no_nested_emit_instr_handler_guard_ = true; \
41 stack_.clear_history(); \
42 if (v8_flags.drumbrake_compact_bytecode) { \
43 handler_size_ = InstrHandlerSize::Small; \
45 DCHECK_EQ(handler_size_, InstrHandlerSize::Large); \
48 current_instr_encoding_failed_ = false;
50#define START_EMIT_INSTR_HANDLER_WITH_ID(name) \
51 START_EMIT_INSTR_HANDLER() \
52 EMIT_INSTR_HANDLER(name)
54#define START_EMIT_INSTR_HANDLER_WITH_PC(name, pc) \
55 START_EMIT_INSTR_HANDLER() \
56 EMIT_INSTR_HANDLER_WITH_PC(name, pc)
58#define END_EMIT_INSTR_HANDLER() \
59 if (v8_flags.drumbrake_compact_bytecode && current_instr_encoding_failed_) { \
60 code_.resize(_current_code_offset); \
61 slots_.resize(_current_slots_size); \
63 current_instr_encoding_failed_ = false; \
64 handler_size_ = InstrHandlerSize::Large; \
69 DCHECK(!current_instr_encoding_failed_); \
70 no_nested_emit_instr_handler_guard_ = false; \
76#define EMIT_MEM64_INSTR_HANDLER(name, mem64_name, is_memory64) \
77 if (V8_UNLIKELY(is_memory64)) { \
78 EMIT_INSTR_HANDLER(mem64_name); \
80 EMIT_INSTR_HANDLER(name); \
83#define EMIT_MEM64_INSTR_HANDLER_WITH_PC(name, mem64_name, is_memory64, pc) \
84 if (V8_UNLIKELY(is_memory64)) { \
85 EMIT_INSTR_HANDLER_WITH_PC(mem64_name, pc); \
87 EMIT_INSTR_HANDLER_WITH_PC(name, pc); \
93 const uint8_t* module_start,
Zone* zone)
97 interpreter_code_(zone),
98 bytecode_generation_time_(),
99 generated_code_size_(0) {
100 if (
module ==
nullptr)
return;
103 if (function.imported) {
104 DCHECK(!function.code.is_set());
105 AddFunction(&function, nullptr, nullptr);
107 AddFunction(&function, module_start + function.code.offset(),
108 module_start + function.code.end_offset());
115 DCHECK_EQ(code->function->imported, code->start ==
nullptr);
116 DCHECK(!code->bytecode && code->start);
132 int bytecode_generation_time_usecs =
137 bytecode_generation_time_usecs);
147 if (prev_code_size_mb < code_size_mb) {
163 auto it =
map_.find(current_thread_id);
164 if (it ==
map_.end()) {
165 map_[current_thread_id] =
166 std::make_unique<WasmInterpreterThread>(isolate);
167 it =
map_.find(current_thread_id);
169 return it->second.get();
176 auto it =
map_.begin();
177 while (it !=
map_.end()) {
179 if (thread->GetIsolate() == isolate) {
180 thread->TerminateExecutionTimers();
189 uint32_t catch_block_index,
191 if (caught_exceptions_.is_null()) {
193 uint32_t blocks_count = current_function_->GetBlocksCount();
195 isolate->factory()->NewFixedArrayWithHoles(blocks_count);
196 caught_exceptions_ = isolate->global_handles()->Create(*caught_exceptions);
198 caught_exceptions_->set(catch_block_index, *exception);
202 Isolate* isolate, uint32_t catch_block_index)
const {
205 DCHECK(!IsTheHole(*exception));
210 if (!caught_exceptions_.is_null()) {
211 isolate->global_handles()->Destroy(caught_exceptions_.location());
217 bool track_jitless_wasm)
218 : execute_ratio_histogram_(
220 ? isolate->counters()->wasm_jitless_execution_ratio()
221 : isolate->counters()->wasm_jit_execution_ratio()),
222 slow_wasm_histogram_(
224 ? isolate->counters()->wasm_jitless_execution_too_slow()
225 : isolate->counters()->wasm_jit_execution_too_slow()),
226 window_has_started_(false),
227 next_interval_time_(),
228 start_interval_time_(),
229 window_running_time_(),
230 sample_duration_(
base::TimeDelta::FromMilliseconds(
std::max(
231 0,
v8_flags.wasm_exec_time_histogram_sample_duration.value()))),
232 slow_threshold_(
v8_flags.wasm_exec_time_histogram_slow_threshold.value()),
233 slow_threshold_samples_count_(
std::max(
234 1,
v8_flags.wasm_exec_time_slow_threshold_samples_count.value())),
236 int cooldown_interval_in_msec = std::max(
237 0,
v8_flags.wasm_exec_time_histogram_sample_period.value() -
238 v8_flags.wasm_exec_time_histogram_sample_duration.value());
272 for (
int sample :
samples_) sum += sample;
351 Address* global_handle_location =
352 reinterpret_cast<Address*
>(data.GetParameter());
363 return weak_instance;
366std::optional<wasm::ValueType> GetWasmReturnTypeFromSignature(
368 if (wasm_signature->return_count() == 0)
return {};
370 DCHECK_EQ(wasm_signature->return_count(), 1);
371 return wasm_signature->GetReturn(0);
379std::vector<WasmInterpreterStackEntry>
382 std::vector<WasmInterpreterStackEntry> stack_trace;
390 if (frame_state->current_function_) {
392 frame_state->current_function_->GetFunctionIndex(),
393 frame_state->current_bytecode_
395 frame_state->current_function_->GetPcFromTrapCode(
396 frame_state->current_bytecode_))
401 frame_state = frame_state->previous_frame_;
402 while (frame_state && frame_state->current_function_) {
406 frame_state->current_function_->GetFunctionIndex(),
407 frame_state->current_bytecode_
409 frame_state->current_function_->GetPcFromTrapCode(
410 frame_state->current_bytecode_))
412 frame_state = frame_state->previous_frame_;
418 if (stack_trace.empty()) {
426 std::vector<int> function_indexes;
427 const FrameState* frame_state = ¤t_frame_state_;
429 while (frame_state->current_function_) {
430 function_indexes.push_back(
431 frame_state->current_function_->GetFunctionIndex());
432 frame_state = frame_state->previous_frame_;
435 if (
static_cast<size_t>(index) < function_indexes.size()) {
436 return function_indexes[function_indexes.size() - index - 1];
459 "WasmInterpreterThread::WasmInterpreterThread",
460 "Cannot allocate Wasm interpreter stack");
473 new_size = std::max(
size_t{8},
480 static_cast<int>(new_size));
488 static_cast<int>(index +
count));
494 if (!isolate->has_exception()) {
497 isolate->factory()->NewWasmRuntimeError(message);
499 isolate->factory()->wasm_uncatchable_symbol(),
500 isolate->factory()->true_value(),
NONE);
501 isolate->Throw(*error_obj);
520 if (
v8_flags.wasm_enable_exec_time_histograms &&
v8_flags.slow_histograms) {
526 if (
v8_flags.wasm_enable_exec_time_histograms &&
v8_flags.slow_histograms) {
532 if (
v8_flags.wasm_enable_exec_time_histograms &&
v8_flags.slow_histograms) {
537#if !defined(V8_DRUMBRAKE_BOUNDS_CHECKS)
540#define ITEM_ENUM_DEFINE(name) name##counter,
542#undef ITEM_ENUM_DEFINE
576 int64_t
r0,
double fp0);
589 if (v8_flags.drumbrake_compact_bytecode) { \
590 trap_handler::RegisterHandlerData( \
591 reinterpret_cast<Address>(kInstructionTable[k_##name]), \
592 embedded_data.InstructionSizeOf(Builtin::k##name##_s), 0, nullptr); \
594 trap_handler::RegisterHandlerData( \
595 reinterpret_cast<Address>( \
596 kInstructionTable[k_##name + kInstructionCount]), \
597 embedded_data.InstructionSizeOf(Builtin::k##name##_l), 0, nullptr);
605 if (v8_flags.drumbrake_compact_bytecode) { \
606 kInstructionTable[index] = reinterpret_cast<PWasmOp*>( \
607 isolate->builtins()->code(Builtin::k##name##_s)->instruction_start()); \
609 kInstructionTable[kInstructionCount + index++] = reinterpret_cast<PWasmOp*>( \
610 isolate->builtins()->code(Builtin::k##name##_l)->instruction_start());
613#pragma clang diagnostic push
614#pragma clang diagnostic ignored "-Wcast-calling-convention"
618#pragma clang diagnostic pop
622#ifdef V8_ENABLE_DRUMBRAKE_TRACING
623 if (
v8_flags.trace_drumbrake_compact_bytecode) {
625#define DEFINE_INSTR_HANDLER(name) kInstructionHandlerNames[index++] = #name;
628#undef DEFINE_INSTR_HANDLER
641 codemap_(isolate, module, module_bytes_.data(), &
zone_) {
644 module->SetWasmInterpreter(wasm_runtime_);
646#if !defined(V8_DRUMBRAKE_BOUNDS_CHECKS)
665 return thread->state();
677#ifdef DRUMBRAKE_ENABLE_PROFILING
679static const char* prev_op_name_s =
nullptr;
680static std::map<std::pair<const char*, const char*>, uint64_t>*
681 ops_pairs_count_s =
nullptr;
682static std::map<const char*, uint64_t>* ops_count_s =
nullptr;
683static void ProfileOp(
const char* op_name) {
684 if (!ops_pairs_count_s) {
686 new std::map<std::pair<const char*, const char*>, uint64_t>();
687 ops_count_s =
new std::map<const char*, uint64_t>();
689 if (prev_op_name_s) {
690 (*ops_pairs_count_s)[{prev_op_name_s, op_name}]++;
692 (*ops_count_s)[op_name]++;
693 prev_op_name_s = op_name;
696template <
typename A,
typename B>
697std::pair<B, A> flip_pair(
const std::pair<A, B>& p) {
698 return std::pair<B, A>(p.second, p.first);
700template <
typename A,
typename B>
701std::multimap<B, A> flip_map(
const std::map<A, B>& src) {
702 std::multimap<B, A> dst;
703 std::transform(src.begin(), src.end(), std::inserter(dst, dst.begin()),
708static void PrintOpsCount() {
709 std::multimap<uint64_t, const char*> count_ops_map = flip_map(*ops_count_s);
710 uint64_t total_count = 0;
711 for (
auto& pair : count_ops_map) {
712 printf(
"%10lld, %s\n", pair.first, pair.second);
713 total_count += pair.first;
715 printf(
"Total count: %10lld\n\n", total_count);
717 std::multimap<uint64_t, std::pair<const char*, const char*>>
718 count_pairs_ops_map = flip_map(*ops_pairs_count_s);
719 for (
auto& pair : count_pairs_ops_map) {
720 printf(
"%10lld, %s -> %s\n", pair.first, pair.second.first,
725static void PrintAndClearProfilingData() {
728 ops_count_s =
nullptr;
729 delete ops_pairs_count_s;
730 ops_pairs_count_s =
nullptr;
734 ProfileOp(__FUNCTION__); \
735 MUSTTAIL return kInstructionTable[ReadFnId(code) & kInstructionTableMask]( \
736 code, sp, wasm_runtime, r0, fp0)
741 MUSTTAIL return kInstructionTable[ReadFnId(code) & kInstructionTableMask]( \
742 code, sp, wasm_runtime, r0, fp0)
757 int64_t
r0,
double fp0) {
759 TrapReason::kTrapUnreachable,
fp0);
770 int64_t
r0,
double fp0) {
779#define TRAP(trap_reason) \
780 MUSTTAIL return HandlersBase::Trap(code, sp, wasm_runtime, trap_reason, fp0);
782#define INLINED_TRAP(trap_reason) \
783 wasm_runtime->SetTrap(trap_reason, code); \
784 MUSTTAIL return s_unwind_func_addr(code, sp, wasm_runtime, trap_reason, .0);
786template <
bool Compressed>
794 template <
typename T>
803 reinterpret_cast<Address>(code));
804#if V8_ENABLE_DRUMBRAKE_TRACING
805 if (
v8_flags.trace_drumbrake_compact_bytecode) {
806 printf(
"Read slot_offset_t %d\n",
res);
815 template <
typename T>
817 if (std::isnan(
x) || std::isnan(
y)) {
818 return std::numeric_limits<T>::quiet_NaN();
820 if (std::signbit(
x) < std::signbit(
y))
return x;
821 return x >
y ?
x :
y;
826 template <
typename T>
828 if (std::isnan(
x) || std::isnan(
y)) {
829 return std::numeric_limits<T>::quiet_NaN();
831 if (std::signbit(
x) < std::signbit(
y))
return y;
832 return x >
y ?
y :
x;
849 template <
typename T>
854#ifdef V8_ENABLE_DRUMBRAKE_TRACING
855 if (
v8_flags.trace_drumbrake_execution)
868#ifdef V8_ENABLE_DRUMBRAKE_TRACING
869 if (
v8_flags.trace_drumbrake_execution)
874 template <
typename T>
878#ifdef V8_ENABLE_DRUMBRAKE_TRACING
888#ifdef V8_ENABLE_DRUMBRAKE_TRACING
894 template <
typename T>
896 if (rval == -1)
return 0;
900 template <
typename T>
908 template <
typename IntT>
911 int64_t
r0,
double fp0) {
921 template <
typename FloatT>
924 int64_t
r0,
double fp0) {
934 template <
typename T>
937 int64_t
r0,
double fp0) {
953 int64_t
r0,
double fp0) {
963 template <
typename IntT>
966 int64_t
r0,
double fp0) {
970 static_cast<IntT
>(
r0));
976 template <
typename FloatT>
979 int64_t
r0,
double fp0) {
983 static_cast<FloatT
>(
fp0));
989 template <
typename T>
992 int64_t
r0,
double fp0) {
1007 int64_t
r0,
double fp0) {
1017 template <
typename T>
1020 int64_t
r0,
double fp0) {
1030 int64_t
r0,
double fp0) {
1034 template <
typename T>
1037 int64_t
r0,
double fp0) {
1050 int64_t
r0,
double fp0) {
1059 template <
typename IntT,
typename IntU = IntT,
typename MemIdx =
uint32_t,
1063 int64_t
r0,
double fp0) {
1066 MemIdx index =
static_cast<MemIdx
>(
r0);
1070 effective_index < index ||
1073 TRAP(TrapReason::kTrapMemOutOfBounds)
1076 uint8_t* address = memory_start + effective_index;
1080 r0 =
static_cast<IntT
>(
value);
1109 template <
typename FloatT,
typename MemIdx =
uint32_t,
1113 int64_t
r0,
double fp0) {
1116 MemIdx index =
static_cast<MemIdx
>(
r0);
1120 effective_index < index ||
1123 TRAP(TrapReason::kTrapMemOutOfBounds)
1126 uint8_t* address = memory_start + effective_index;
1137 template <
typename T,
typename U =
T,
typename MemIdx =
uint32_t,
1141 int64_t
r0,
double fp0) {
1144 MemIdx index =
static_cast<MemIdx
>(
r0);
1148 effective_index < index ||
1151 TRAP(TrapReason::kTrapMemOutOfBounds)
1154 uint8_t* address = memory_start + effective_index;
1191 template <
typename IntT,
typename IntU = IntT,
typename MemIdx =
uint32_t,
1195 int64_t
r0,
double fp0) {
1202 effective_index < index ||
1205 TRAP(TrapReason::kTrapMemOutOfBounds)
1208 uint8_t* address = memory_start + effective_index;
1210 r0 =
static_cast<IntT
>(
1240 template <
typename FloatT,
typename MemIdx =
uint32_t,
1244 int64_t
r0,
double fp0) {
1251 effective_index < index ||
1254 TRAP(TrapReason::kTrapMemOutOfBounds)
1257 uint8_t* address = memory_start + effective_index;
1259 fp0 =
static_cast<FloatT
>(
1269 template <
typename T,
typename U =
T,
typename MemIdx =
uint32_t,
1273 int64_t
r0,
double fp0) {
1280 effective_index < index ||
1283 TRAP(TrapReason::kTrapMemOutOfBounds)
1286 uint8_t* address = memory_start + effective_index;
1324 template <
typename T,
typename U =
T,
typename MemIdx =
uint32_t,
1328 int64_t
r0,
double fp0) {
1335 effective_index < index ||
1338 TRAP(TrapReason::kTrapMemOutOfBounds)
1341 uint8_t* address = memory_start + effective_index;
1347 static_cast<T
>(value));
1381 template <
typename IntT,
typename IntU = IntT,
typename MemIdx =
uint32_t,
1385 int64_t
r0,
double fp0) {
1386 IntT value =
static_cast<IntT
>(
r0);
1394 effective_index < index ||
1397 TRAP(TrapReason::kTrapMemOutOfBounds)
1400 uint8_t* address = memory_start + effective_index;
1403 reinterpret_cast<Address>(address),
1423 template <
typename FloatT,
typename MemIdx =
uint32_t,
1427 int64_t
r0,
double fp0) {
1428 FloatT value =
static_cast<FloatT
>(
fp0);
1436 effective_index < index ||
1439 TRAP(TrapReason::kTrapMemOutOfBounds)
1442 uint8_t* address = memory_start + effective_index;
1445 reinterpret_cast<Address>(address),
1455 template <
typename T,
typename U =
T,
typename MemIdx =
uint32_t,
1459 int64_t
r0,
double fp0) {
1468 effective_index < index ||
1471 TRAP(TrapReason::kTrapMemOutOfBounds)
1474 uint8_t* address = memory_start + effective_index;
1477 reinterpret_cast<Address>(address),
1502 template <
typename T,
typename MemIdx =
uint32_t,
1506 int64_t
r0,
double fp0) {
1510 uint64_t load_index =
r0;
1511 uint64_t effective_load_index = load_offset + load_index;
1515 uint64_t effective_store_index = store_offset + store_index;
1518 effective_load_index < load_index ||
1521 effective_store_index < store_offset ||
1524 TRAP(TrapReason::kTrapMemOutOfBounds)
1527 uint8_t* load_address = memory_start + effective_load_index;
1528 uint8_t* store_address = memory_start + effective_store_index;
1531 reinterpret_cast<Address>(store_address),
1545 template <
typename T,
typename MemIdx =
uint32_t,
1549 int64_t
r0,
double fp0) {
1554 uint64_t effective_load_index = load_offset + load_index;
1558 uint64_t effective_store_index = store_offset + store_index;
1561 effective_load_index < load_index ||
1564 effective_store_index < store_offset ||
1567 TRAP(TrapReason::kTrapMemOutOfBounds)
1570 uint8_t* load_address = memory_start + effective_load_index;
1571 uint8_t* store_address = memory_start + effective_store_index;
1574 reinterpret_cast<Address>(store_address),
1588#if defined(V8_DRUMBRAKE_BOUNDS_CHECKS)
1654 template <
typename T,
typename U = T>
1657 int64_t
r0,
double fp0) {
1660 uint64_t index =
static_cast<uint32_t
>(
r0);
1664 effective_index < index ||
1667 TRAP(TrapReason::kTrapMemOutOfBounds)
1670 uint8_t* address = memory_start + effective_index;
1676 static_cast<T
>(value));
1680 static auto constexpr r2s_I32LoadMem8S_LocalSet =
1681 r2s_LoadMem_LocalSet<int32_t, int8_t>;
1682 static auto constexpr r2s_I32LoadMem8U_LocalSet =
1683 r2s_LoadMem_LocalSet<int32_t, uint8_t>;
1684 static auto constexpr r2s_I32LoadMem16S_LocalSet =
1685 r2s_LoadMem_LocalSet<int32_t, int16_t>;
1686 static auto constexpr r2s_I32LoadMem16U_LocalSet =
1687 r2s_LoadMem_LocalSet<int32_t, uint16_t>;
1688 static auto constexpr r2s_I64LoadMem8S_LocalSet =
1689 r2s_LoadMem_LocalSet<int64_t, int8_t>;
1690 static auto constexpr r2s_I64LoadMem8U_LocalSet =
1691 r2s_LoadMem_LocalSet<int64_t, uint8_t>;
1692 static auto constexpr r2s_I64LoadMem16S_LocalSet =
1693 r2s_LoadMem_LocalSet<int64_t, int16_t>;
1694 static auto constexpr r2s_I64LoadMem16U_LocalSet =
1695 r2s_LoadMem_LocalSet<int64_t, uint16_t>;
1696 static auto constexpr r2s_I64LoadMem32S_LocalSet =
1697 r2s_LoadMem_LocalSet<int64_t, int32_t>;
1698 static auto constexpr r2s_I64LoadMem32U_LocalSet =
1699 r2s_LoadMem_LocalSet<int64_t, uint32_t>;
1700 static auto constexpr r2s_I32LoadMem_LocalSet = r2s_LoadMem_LocalSet<int32_t>;
1701 static auto constexpr r2s_I64LoadMem_LocalSet = r2s_LoadMem_LocalSet<int64_t>;
1702 static auto constexpr r2s_F32LoadMem_LocalSet = r2s_LoadMem_LocalSet<float>;
1703 static auto constexpr r2s_F64LoadMem_LocalSet = r2s_LoadMem_LocalSet<double>;
1705 static auto constexpr s2s_I32LoadMem8S_LocalSet =
1707 static auto constexpr s2s_I32LoadMem8U_LocalSet =
1709 static auto constexpr s2s_I32LoadMem16S_LocalSet =
1711 static auto constexpr s2s_I32LoadMem16U_LocalSet =
1713 static auto constexpr s2s_I64LoadMem8S_LocalSet =
1715 static auto constexpr s2s_I64LoadMem8U_LocalSet =
1717 static auto constexpr s2s_I64LoadMem16S_LocalSet =
1719 static auto constexpr s2s_I64LoadMem16U_LocalSet =
1721 static auto constexpr s2s_I64LoadMem32S_LocalSet =
1723 static auto constexpr s2s_I64LoadMem32U_LocalSet =
1756 template <
typename T,
typename U = T>
1759 int64_t
r0,
double fp0) {
1769 effective_index < index ||
1772 TRAP(TrapReason::kTrapMemOutOfBounds)
1775 uint8_t* address = memory_start + effective_index;
1778 reinterpret_cast<Address>(address),
1783 static auto constexpr s2s_LocalGet_I32StoreMem8 =
1784 s2s_LocalGet_StoreMem<int32_t, int8_t>;
1785 static auto constexpr s2s_LocalGet_I32StoreMem16 =
1786 s2s_LocalGet_StoreMem<int32_t, int16_t>;
1787 static auto constexpr s2s_LocalGet_I64StoreMem8 =
1788 s2s_LocalGet_StoreMem<int64_t, int8_t>;
1789 static auto constexpr s2s_LocalGet_I64StoreMem16 =
1790 s2s_LocalGet_StoreMem<int64_t, int16_t>;
1791 static auto constexpr s2s_LocalGet_I64StoreMem32 =
1792 s2s_LocalGet_StoreMem<int64_t, int32_t>;
1793 static auto constexpr s2s_LocalGet_I32StoreMem =
1794 s2s_LocalGet_StoreMem<int32_t>;
1795 static auto constexpr s2s_LocalGet_I64StoreMem =
1796 s2s_LocalGet_StoreMem<int64_t>;
1797 static auto constexpr s2s_LocalGet_F32StoreMem = s2s_LocalGet_StoreMem<float>;
1798 static auto constexpr s2s_LocalGet_F64StoreMem =
1799 s2s_LocalGet_StoreMem<double>;
1818 template <
typename IntT>
1821 int64_t
r0,
double fp0) {
1826 r0 =
r0 ? val1 : val2;
1833 template <
typename FloatT>
1836 int64_t
r0,
double fp0) {
1841 fp0 =
r0 ? val1 : val2;
1848 template <
typename T>
1851 int64_t
r0,
double fp0) {
1867 int64_t
r0,
double fp0) {
1875 template <
typename IntT>
1878 int64_t
r0,
double fp0) {
1883 r0 = cond ? val1 : val2;
1890 template <
typename FloatT>
1893 int64_t
r0,
double fp0) {
1898 fp0 = cond ? val1 : val2;
1905 template <
typename T>
1908 int64_t
r0,
double fp0) {
1925 int64_t
r0,
double fp0) {
1937#define FOREACH_ARITHMETIC_BINOP(V) \
1938 V(I32Add, uint32_t, r0, +, I32) \
1939 V(I32Sub, uint32_t, r0, -, I32) \
1940 V(I32Mul, uint32_t, r0, *, I32) \
1941 V(I32And, uint32_t, r0, &, I32) \
1942 V(I32Ior, uint32_t, r0, |, I32) \
1943 V(I32Xor, uint32_t, r0, ^, I32) \
1944 V(I64Add, uint64_t, r0, +, I64) \
1945 V(I64Sub, uint64_t, r0, -, I64) \
1946 V(I64Mul, uint64_t, r0, *, I64) \
1947 V(I64And, uint64_t, r0, &, I64) \
1948 V(I64Ior, uint64_t, r0, |, I64) \
1949 V(I64Xor, uint64_t, r0, ^, I64) \
1950 V(F32Add, float, fp0, +, F32) \
1951 V(F32Sub, float, fp0, -, F32) \
1952 V(F32Mul, float, fp0, *, F32) \
1953 V(F32Div, float, fp0, /, F32) \
1954 V(F64Add, double, fp0, +, F64) \
1955 V(F64Sub, double, fp0, -, F64) \
1956 V(F64Mul, double, fp0, *, F64) \
1957 V(F64Div, double, fp0, /, F64)
1959#define DEFINE_BINOP(name, ctype, reg, op, type) \
1960 INSTRUCTION_HANDLER_FUNC r2r_##name(const uint8_t* code, uint32_t* sp, \
1961 WasmInterpreterRuntime* wasm_runtime, \
1962 int64_t r0, double fp0) { \
1963 ctype rval = static_cast<ctype>(reg); \
1964 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
1965 reg = static_cast<ctype>(lval op rval); \
1969 INSTRUCTION_HANDLER_FUNC r2s_##name(const uint8_t* code, uint32_t* sp, \
1970 WasmInterpreterRuntime* wasm_runtime, \
1971 int64_t r0, double fp0) { \
1972 ctype rval = static_cast<ctype>(reg); \
1973 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
1974 push<ctype>(sp, code, wasm_runtime, lval op rval); \
1978 INSTRUCTION_HANDLER_FUNC s2r_##name(const uint8_t* code, uint32_t* sp, \
1979 WasmInterpreterRuntime* wasm_runtime, \
1980 int64_t r0, double fp0) { \
1981 ctype rval = pop<ctype>(sp, code, wasm_runtime); \
1982 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
1983 reg = static_cast<ctype>(lval op rval); \
1987 INSTRUCTION_HANDLER_FUNC s2s_##name(const uint8_t* code, uint32_t* sp, \
1988 WasmInterpreterRuntime* wasm_runtime, \
1989 int64_t r0, double fp0) { \
1990 ctype rval = pop<ctype>(sp, code, wasm_runtime); \
1991 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
1992 push<ctype>(sp, code, wasm_runtime, lval op rval); \
2001#define FOREACH_SIGNED_DIV_BINOP(V) \
2002 V(I32DivS, int32_t, r0, /, I32) \
2003 V(I64DivS, int64_t, r0, /, I64)
2005#define FOREACH_UNSIGNED_DIV_BINOP(V) \
2006 V(I32DivU, uint32_t, r0, /, I32) \
2007 V(I64DivU, uint64_t, r0, /, I64)
2009#define FOREACH_REM_BINOP(V) \
2010 V(I32RemS, int32_t, r0, ExecuteRemS, I32) \
2011 V(I64RemS, int64_t, r0, ExecuteRemS, I64) \
2012 V(I32RemU, uint32_t, r0, ExecuteRemU, I32) \
2013 V(I64RemU, uint64_t, r0, ExecuteRemU, I64)
2015#define FOREACH_TRAPPING_BINOP(V) \
2016 FOREACH_SIGNED_DIV_BINOP(V) \
2017 FOREACH_UNSIGNED_DIV_BINOP(V) \
2018 FOREACH_REM_BINOP(V)
2020#define DEFINE_BINOP(name, ctype, reg, op, type) \
2021 INSTRUCTION_HANDLER_FUNC r2r_##name(const uint8_t* code, uint32_t* sp, \
2022 WasmInterpreterRuntime* wasm_runtime, \
2023 int64_t r0, double fp0) { \
2024 ctype rval = static_cast<ctype>(reg); \
2025 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2027 TRAP(TrapReason::kTrapDivByZero) \
2028 } else if (rval == -1 && lval == std::numeric_limits<ctype>::min()) { \
2029 TRAP(TrapReason::kTrapDivUnrepresentable) \
2031 reg = static_cast<ctype>(lval op rval); \
2036 INSTRUCTION_HANDLER_FUNC r2s_##name(const uint8_t* code, uint32_t* sp, \
2037 WasmInterpreterRuntime* wasm_runtime, \
2038 int64_t r0, double fp0) { \
2039 ctype rval = static_cast<ctype>(reg); \
2040 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2042 TRAP(TrapReason::kTrapDivByZero) \
2043 } else if (rval == -1 && lval == std::numeric_limits<ctype>::min()) { \
2044 TRAP(TrapReason::kTrapDivUnrepresentable) \
2046 push<ctype>(sp, code, wasm_runtime, lval op rval); \
2051 INSTRUCTION_HANDLER_FUNC s2r_##name(const uint8_t* code, uint32_t* sp, \
2052 WasmInterpreterRuntime* wasm_runtime, \
2053 int64_t r0, double fp0) { \
2054 ctype rval = pop<ctype>(sp, code, wasm_runtime); \
2055 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2057 TRAP(TrapReason::kTrapDivByZero) \
2058 } else if (rval == -1 && lval == std::numeric_limits<ctype>::min()) { \
2059 TRAP(TrapReason::kTrapDivUnrepresentable) \
2061 reg = static_cast<ctype>(lval op rval); \
2066 INSTRUCTION_HANDLER_FUNC s2s_##name(const uint8_t* code, uint32_t* sp, \
2067 WasmInterpreterRuntime* wasm_runtime, \
2068 int64_t r0, double fp0) { \
2069 ctype rval = pop<ctype>(sp, code, wasm_runtime); \
2070 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2072 TRAP(TrapReason::kTrapDivByZero) \
2073 } else if (rval == -1 && lval == std::numeric_limits<ctype>::min()) { \
2074 TRAP(TrapReason::kTrapDivUnrepresentable) \
2076 push<ctype>(sp, code, wasm_runtime, lval op rval); \
2083#define DEFINE_BINOP(name, ctype, reg, op, type) \
2084 INSTRUCTION_HANDLER_FUNC r2r_##name(const uint8_t* code, uint32_t* sp, \
2085 WasmInterpreterRuntime* wasm_runtime, \
2086 int64_t r0, double fp0) { \
2087 ctype rval = static_cast<ctype>(reg); \
2088 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2090 TRAP(TrapReason::kTrapDivByZero) \
2092 reg = static_cast<ctype>(lval op rval); \
2097 INSTRUCTION_HANDLER_FUNC r2s_##name(const uint8_t* code, uint32_t* sp, \
2098 WasmInterpreterRuntime* wasm_runtime, \
2099 int64_t r0, double fp0) { \
2100 ctype rval = static_cast<ctype>(reg); \
2101 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2103 TRAP(TrapReason::kTrapDivByZero) \
2105 push<ctype>(sp, code, wasm_runtime, lval op rval); \
2110 INSTRUCTION_HANDLER_FUNC s2r_##name(const uint8_t* code, uint32_t* sp, \
2111 WasmInterpreterRuntime* wasm_runtime, \
2112 int64_t r0, double fp0) { \
2113 ctype rval = pop<ctype>(sp, code, wasm_runtime); \
2114 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2116 TRAP(TrapReason::kTrapDivByZero) \
2118 reg = static_cast<ctype>(lval op rval); \
2123 INSTRUCTION_HANDLER_FUNC s2s_##name(const uint8_t* code, uint32_t* sp, \
2124 WasmInterpreterRuntime* wasm_runtime, \
2125 int64_t r0, double fp0) { \
2126 ctype rval = pop<ctype>(sp, code, wasm_runtime); \
2127 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2129 TRAP(TrapReason::kTrapDivByZero) \
2131 push<ctype>(sp, code, wasm_runtime, lval op rval); \
2138#define DEFINE_BINOP(name, ctype, reg, op, type) \
2139 INSTRUCTION_HANDLER_FUNC r2r_##name(const uint8_t* code, uint32_t* sp, \
2140 WasmInterpreterRuntime* wasm_runtime, \
2141 int64_t r0, double fp0) { \
2142 ctype rval = static_cast<ctype>(reg); \
2143 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2145 TRAP(TrapReason::kTrapRemByZero) \
2147 reg = static_cast<ctype>(op(lval, rval)); \
2152 INSTRUCTION_HANDLER_FUNC r2s_##name(const uint8_t* code, uint32_t* sp, \
2153 WasmInterpreterRuntime* wasm_runtime, \
2154 int64_t r0, double fp0) { \
2155 ctype rval = static_cast<ctype>(reg); \
2156 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2158 TRAP(TrapReason::kTrapRemByZero) \
2160 push<ctype>(sp, code, wasm_runtime, op(lval, rval)); \
2165 INSTRUCTION_HANDLER_FUNC s2r_##name(const uint8_t* code, uint32_t* sp, \
2166 WasmInterpreterRuntime* wasm_runtime, \
2167 int64_t r0, double fp0) { \
2168 ctype rval = pop<ctype>(sp, code, wasm_runtime); \
2169 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2171 TRAP(TrapReason::kTrapRemByZero); \
2173 reg = static_cast<ctype>(op(lval, rval)); \
2178 INSTRUCTION_HANDLER_FUNC s2s_##name(const uint8_t* code, uint32_t* sp, \
2179 WasmInterpreterRuntime* wasm_runtime, \
2180 int64_t r0, double fp0) { \
2181 ctype rval = pop<ctype>(sp, code, wasm_runtime); \
2182 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2184 TRAP(TrapReason::kTrapRemByZero) \
2186 push<ctype>(sp, code, wasm_runtime, op(lval, rval)); \
2196#define FOREACH_COMPARISON_BINOP(V) \
2197 V(I32Eq, uint32_t, r0, ==, I32) \
2198 V(I32Ne, uint32_t, r0, !=, I32) \
2199 V(I32LtU, uint32_t, r0, <, I32) \
2200 V(I32LeU, uint32_t, r0, <=, I32) \
2201 V(I32GtU, uint32_t, r0, >, I32) \
2202 V(I32GeU, uint32_t, r0, >=, I32) \
2203 V(I32LtS, int32_t, r0, <, I32) \
2204 V(I32LeS, int32_t, r0, <=, I32) \
2205 V(I32GtS, int32_t, r0, >, I32) \
2206 V(I32GeS, int32_t, r0, >=, I32) \
2207 V(I64Eq, uint64_t, r0, ==, I64) \
2208 V(I64Ne, uint64_t, r0, !=, I64) \
2209 V(I64LtU, uint64_t, r0, <, I64) \
2210 V(I64LeU, uint64_t, r0, <=, I64) \
2211 V(I64GtU, uint64_t, r0, >, I64) \
2212 V(I64GeU, uint64_t, r0, >=, I64) \
2213 V(I64LtS, int64_t, r0, <, I64) \
2214 V(I64LeS, int64_t, r0, <=, I64) \
2215 V(I64GtS, int64_t, r0, >, I64) \
2216 V(I64GeS, int64_t, r0, >=, I64) \
2217 V(F32Eq, float, fp0, ==, F32) \
2218 V(F32Ne, float, fp0, !=, F32) \
2219 V(F32Lt, float, fp0, <, F32) \
2220 V(F32Le, float, fp0, <=, F32) \
2221 V(F32Gt, float, fp0, >, F32) \
2222 V(F32Ge, float, fp0, >=, F32) \
2223 V(F64Eq, double, fp0, ==, F64) \
2224 V(F64Ne, double, fp0, !=, F64) \
2225 V(F64Lt, double, fp0, <, F64) \
2226 V(F64Le, double, fp0, <=, F64) \
2227 V(F64Gt, double, fp0, >, F64) \
2228 V(F64Ge, double, fp0, >=, F64)
2230#define DEFINE_BINOP(name, ctype, reg, op, type) \
2231 INSTRUCTION_HANDLER_FUNC r2r_##name(const uint8_t* code, uint32_t* sp, \
2232 WasmInterpreterRuntime* wasm_runtime, \
2233 int64_t r0, double fp0) { \
2234 ctype rval = static_cast<ctype>(reg); \
2235 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2236 r0 = (lval op rval) ? 1 : 0; \
2240 INSTRUCTION_HANDLER_FUNC r2s_##name(const uint8_t* code, uint32_t* sp, \
2241 WasmInterpreterRuntime* wasm_runtime, \
2242 int64_t r0, double fp0) { \
2243 ctype rval = static_cast<ctype>(reg); \
2244 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2245 push<int32_t>(sp, code, wasm_runtime, lval op rval ? 1 : 0); \
2249 INSTRUCTION_HANDLER_FUNC s2r_##name(const uint8_t* code, uint32_t* sp, \
2250 WasmInterpreterRuntime* wasm_runtime, \
2251 int64_t r0, double fp0) { \
2252 ctype rval = pop<ctype>(sp, code, wasm_runtime); \
2253 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2254 r0 = (lval op rval) ? 1 : 0; \
2258 INSTRUCTION_HANDLER_FUNC s2s_##name(const uint8_t* code, uint32_t* sp, \
2259 WasmInterpreterRuntime* wasm_runtime, \
2260 int64_t r0, double fp0) { \
2261 ctype rval = pop<ctype>(sp, code, wasm_runtime); \
2262 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2263 push<int32_t>(sp, code, wasm_runtime, lval op rval ? 1 : 0); \
2272#define FOREACH_MORE_BINOP(V) \
2273 V(I32Shl, uint32_t, r0, (lval << (rval & 31)), I32) \
2274 V(I32ShrU, uint32_t, r0, (lval >> (rval & 31)), I32) \
2275 V(I32ShrS, int32_t, r0, (lval >> (rval & 31)), I32) \
2276 V(I64Shl, uint64_t, r0, (lval << (rval & 63)), I64) \
2277 V(I64ShrU, uint64_t, r0, (lval >> (rval & 63)), I64) \
2278 V(I64ShrS, int64_t, r0, (lval >> (rval & 63)), I64) \
2279 V(I32Rol, uint32_t, r0, (base::bits::RotateLeft32(lval, rval & 31)), I32) \
2280 V(I32Ror, uint32_t, r0, (base::bits::RotateRight32(lval, rval & 31)), I32) \
2281 V(I64Rol, uint64_t, r0, (base::bits::RotateLeft64(lval, rval & 63)), I64) \
2282 V(I64Ror, uint64_t, r0, (base::bits::RotateRight64(lval, rval & 63)), I64) \
2283 V(F32Min, float, fp0, (JSMin<float>(lval, rval)), F32) \
2284 V(F32Max, float, fp0, (JSMax<float>(lval, rval)), F32) \
2285 V(F64Min, double, fp0, (JSMin<double>(lval, rval)), F64) \
2286 V(F64Max, double, fp0, (JSMax<double>(lval, rval)), F64) \
2287 V(F32CopySign, float, fp0, \
2288 Float32::FromBits((base::ReadUnalignedValue<uint32_t>( \
2289 reinterpret_cast<Address>(&lval)) & \
2290 ~kFloat32SignBitMask) | \
2291 (base::ReadUnalignedValue<uint32_t>( \
2292 reinterpret_cast<Address>(&rval)) & \
2293 kFloat32SignBitMask)) \
2296 V(F64CopySign, double, fp0, \
2297 Float64::FromBits((base::ReadUnalignedValue<uint64_t>( \
2298 reinterpret_cast<Address>(&lval)) & \
2299 ~kFloat64SignBitMask) | \
2300 (base::ReadUnalignedValue<uint64_t>( \
2301 reinterpret_cast<Address>(&rval)) & \
2302 kFloat64SignBitMask)) \
2306#define DEFINE_BINOP(name, ctype, reg, op, type) \
2307 INSTRUCTION_HANDLER_FUNC r2r_##name(const uint8_t* code, uint32_t* sp, \
2308 WasmInterpreterRuntime* wasm_runtime, \
2309 int64_t r0, double fp0) { \
2310 ctype rval = static_cast<ctype>(reg); \
2311 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2312 reg = static_cast<ctype>(op); \
2316 INSTRUCTION_HANDLER_FUNC r2s_##name(const uint8_t* code, uint32_t* sp, \
2317 WasmInterpreterRuntime* wasm_runtime, \
2318 int64_t r0, double fp0) { \
2319 ctype rval = static_cast<ctype>(reg); \
2320 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2321 push<ctype>(sp, code, wasm_runtime, op); \
2325 INSTRUCTION_HANDLER_FUNC s2r_##name(const uint8_t* code, uint32_t* sp, \
2326 WasmInterpreterRuntime* wasm_runtime, \
2327 int64_t r0, double fp0) { \
2328 ctype rval = pop<ctype>(sp, code, wasm_runtime); \
2329 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2330 reg = static_cast<ctype>(op); \
2334 INSTRUCTION_HANDLER_FUNC s2s_##name(const uint8_t* code, uint32_t* sp, \
2335 WasmInterpreterRuntime* wasm_runtime, \
2336 int64_t r0, double fp0) { \
2337 ctype rval = pop<ctype>(sp, code, wasm_runtime); \
2338 ctype lval = pop<ctype>(sp, code, wasm_runtime); \
2339 push<ctype>(sp, code, wasm_runtime, op); \
2348#define FOREACH_SIMPLE_UNOP(V) \
2349 V(F32Abs, float, fp0, abs(val), F32) \
2350 V(F32Neg, float, fp0, -val, F32) \
2351 V(F32Ceil, float, fp0, ceilf(val), F32) \
2352 V(F32Floor, float, fp0, floorf(val), F32) \
2353 V(F32Trunc, float, fp0, truncf(val), F32) \
2354 V(F32NearestInt, float, fp0, nearbyintf(val), F32) \
2355 V(F32Sqrt, float, fp0, sqrt(val), F32) \
2356 V(F64Abs, double, fp0, abs(val), F64) \
2357 V(F64Neg, double, fp0, (-val), F64) \
2358 V(F64Ceil, double, fp0, ceil(val), F64) \
2359 V(F64Floor, double, fp0, floor(val), F64) \
2360 V(F64Trunc, double, fp0, trunc(val), F64) \
2361 V(F64NearestInt, double, fp0, nearbyint(val), F64) \
2362 V(F64Sqrt, double, fp0, sqrt(val), F64)
2364#define DEFINE_UNOP(name, ctype, reg, op, type) \
2365 INSTRUCTION_HANDLER_FUNC r2r_##name(const uint8_t* code, uint32_t* sp, \
2366 WasmInterpreterRuntime* wasm_runtime, \
2367 int64_t r0, double fp0) { \
2368 ctype val = static_cast<ctype>(reg); \
2369 reg = static_cast<ctype>(op); \
2373 INSTRUCTION_HANDLER_FUNC r2s_##name(const uint8_t* code, uint32_t* sp, \
2374 WasmInterpreterRuntime* wasm_runtime, \
2375 int64_t r0, double fp0) { \
2376 ctype val = static_cast<ctype>(reg); \
2377 push<ctype>(sp, code, wasm_runtime, op); \
2381 INSTRUCTION_HANDLER_FUNC s2r_##name(const uint8_t* code, uint32_t* sp, \
2382 WasmInterpreterRuntime* wasm_runtime, \
2383 int64_t r0, double fp0) { \
2384 ctype val = pop<ctype>(sp, code, wasm_runtime); \
2385 reg = static_cast<ctype>(op); \
2389 INSTRUCTION_HANDLER_FUNC s2s_##name(const uint8_t* code, uint32_t* sp, \
2390 WasmInterpreterRuntime* wasm_runtime, \
2391 int64_t r0, double fp0) { \
2392 ctype val = pop<ctype>(sp, code, wasm_runtime); \
2393 push<ctype>(sp, code, wasm_runtime, op); \
2402#define FOREACH_ADDITIONAL_CONVERT_UNOP(V) \
2403 V(I32ConvertI64, int64_t, I64, r0, int32_t, I32, r0)
2407 int64_t
r0,
double fp0) {
2413 int64_t
r0,
double fp0) {
2419 int64_t
r0,
double fp0) {
2425 int64_t
r0,
double fp0) {
2431#define FOREACH_I64_CONVERT_FROM_FLOAT_UNOP(V) \
2432 V(I64SConvertF32, float, F32, fp0, int64_t, I64, r0) \
2433 V(I64SConvertF64, double, F64, fp0, int64_t, I64, r0) \
2434 V(I64UConvertF32, float, F32, fp0, uint64_t, I64, r0) \
2435 V(I64UConvertF64, double, F64, fp0, uint64_t, I64, r0)
2437#define FOREACH_I32_CONVERT_FROM_FLOAT_UNOP(V) \
2438 V(I32SConvertF32, float, F32, fp0, int32_t, I32, r0) \
2439 V(I32UConvertF32, float, F32, fp0, uint32_t, I32, r0) \
2440 V(I32SConvertF64, double, F64, fp0, int32_t, I32, r0) \
2441 V(I32UConvertF64, double, F64, fp0, uint32_t, I32, r0)
2443#define FOREACH_OTHER_CONVERT_UNOP(V) \
2444 V(I64SConvertI32, int32_t, I32, r0, int64_t, I64, r0) \
2445 V(I64UConvertI32, uint32_t, I32, r0, uint64_t, I64, r0) \
2446 V(F32SConvertI32, int32_t, I32, r0, float, F32, fp0) \
2447 V(F32UConvertI32, uint32_t, I32, r0, float, F32, fp0) \
2448 V(F32SConvertI64, int64_t, I64, r0, float, F32, fp0) \
2449 V(F32UConvertI64, uint64_t, I64, r0, float, F32, fp0) \
2450 V(F32ConvertF64, double, F64, fp0, float, F32, fp0) \
2451 V(F64SConvertI32, int32_t, I32, r0, double, F64, fp0) \
2452 V(F64UConvertI32, uint32_t, I32, r0, double, F64, fp0) \
2453 V(F64SConvertI64, int64_t, I64, r0, double, F64, fp0) \
2454 V(F64UConvertI64, uint64_t, I64, r0, double, F64, fp0) \
2455 V(F64ConvertF32, float, F32, fp0, double, F64, fp0)
2457#define FOREACH_CONVERT_UNOP(V) \
2458 FOREACH_I64_CONVERT_FROM_FLOAT_UNOP(V) \
2459 FOREACH_I32_CONVERT_FROM_FLOAT_UNOP(V) \
2460 FOREACH_OTHER_CONVERT_UNOP(V)
2462#define DEFINE_UNOP(name, from_ctype, from_type, from_reg, to_ctype, to_type, \
2464 INSTRUCTION_HANDLER_FUNC r2r_##name(const uint8_t* code, uint32_t* sp, \
2465 WasmInterpreterRuntime* wasm_runtime, \
2466 int64_t r0, double fp0) { \
2467 if (!base::IsValueInRangeForNumericType<to_ctype>(from_reg)) { \
2468 TRAP(TrapReason::kTrapFloatUnrepresentable) \
2470 to_reg = static_cast<to_ctype>(static_cast<from_ctype>(from_reg)); \
2475 INSTRUCTION_HANDLER_FUNC r2s_##name(const uint8_t* code, uint32_t* sp, \
2476 WasmInterpreterRuntime* wasm_runtime, \
2477 int64_t r0, double fp0) { \
2478 if (!base::IsValueInRangeForNumericType<to_ctype>(from_reg)) { \
2479 TRAP(TrapReason::kTrapFloatUnrepresentable) \
2481 to_ctype val = static_cast<from_ctype>(from_reg); \
2482 push<to_ctype>(sp, code, wasm_runtime, val); \
2487 INSTRUCTION_HANDLER_FUNC s2r_##name(const uint8_t* code, uint32_t* sp, \
2488 WasmInterpreterRuntime* wasm_runtime, \
2489 int64_t r0, double fp0) { \
2490 from_ctype from_val = pop<from_ctype>(sp, code, wasm_runtime); \
2491 if (!base::IsValueInRangeForNumericType<to_ctype>(from_val)) { \
2492 TRAP(TrapReason::kTrapFloatUnrepresentable) \
2494 to_reg = static_cast<to_ctype>(from_val); \
2499 INSTRUCTION_HANDLER_FUNC s2s_##name(const uint8_t* code, uint32_t* sp, \
2500 WasmInterpreterRuntime* wasm_runtime, \
2501 int64_t r0, double fp0) { \
2502 from_ctype from_val = pop<from_ctype>(sp, code, wasm_runtime); \
2503 if (!base::IsValueInRangeForNumericType<to_ctype>(from_val)) { \
2504 TRAP(TrapReason::kTrapFloatUnrepresentable) \
2506 to_ctype val = static_cast<to_ctype>(from_val); \
2507 push<to_ctype>(sp, code, wasm_runtime, val); \
2514#define DEFINE_UNOP(name, from_ctype, from_type, from_reg, to_ctype, to_type, \
2516 INSTRUCTION_HANDLER_FUNC r2r_##name(const uint8_t* code, uint32_t* sp, \
2517 WasmInterpreterRuntime* wasm_runtime, \
2518 int64_t r0, double fp0) { \
2519 if (!is_inbounds<to_ctype>(from_reg)) { \
2520 TRAP(TrapReason::kTrapFloatUnrepresentable) \
2522 to_reg = static_cast<to_ctype>(static_cast<from_ctype>(from_reg)); \
2527 INSTRUCTION_HANDLER_FUNC r2s_##name(const uint8_t* code, uint32_t* sp, \
2528 WasmInterpreterRuntime* wasm_runtime, \
2529 int64_t r0, double fp0) { \
2530 if (!is_inbounds<to_ctype>(from_reg)) { \
2531 TRAP(TrapReason::kTrapFloatUnrepresentable) \
2533 to_ctype val = static_cast<from_ctype>(from_reg); \
2534 push<to_ctype>(sp, code, wasm_runtime, val); \
2539 INSTRUCTION_HANDLER_FUNC s2r_##name(const uint8_t* code, uint32_t* sp, \
2540 WasmInterpreterRuntime* wasm_runtime, \
2541 int64_t r0, double fp0) { \
2542 from_ctype from_val = pop<from_ctype>(sp, code, wasm_runtime); \
2543 if (!is_inbounds<to_ctype>(from_val)) { \
2544 TRAP(TrapReason::kTrapFloatUnrepresentable) \
2546 to_reg = static_cast<to_ctype>(from_val); \
2551 INSTRUCTION_HANDLER_FUNC s2s_##name(const uint8_t* code, uint32_t* sp, \
2552 WasmInterpreterRuntime* wasm_runtime, \
2553 int64_t r0, double fp0) { \
2554 from_ctype from_val = pop<from_ctype>(sp, code, wasm_runtime); \
2555 if (!is_inbounds<to_ctype>(from_val)) { \
2556 TRAP(TrapReason::kTrapFloatUnrepresentable) \
2558 to_ctype val = static_cast<to_ctype>(from_val); \
2559 push<to_ctype>(sp, code, wasm_runtime, val); \
2566#define DEFINE_UNOP(name, from_ctype, from_type, from_reg, to_ctype, to_type, \
2568 INSTRUCTION_HANDLER_FUNC r2r_##name(const uint8_t* code, uint32_t* sp, \
2569 WasmInterpreterRuntime* wasm_runtime, \
2570 int64_t r0, double fp0) { \
2571 to_reg = static_cast<to_ctype>(static_cast<from_ctype>(from_reg)); \
2575 INSTRUCTION_HANDLER_FUNC r2s_##name(const uint8_t* code, uint32_t* sp, \
2576 WasmInterpreterRuntime* wasm_runtime, \
2577 int64_t r0, double fp0) { \
2578 to_ctype val = static_cast<from_ctype>(from_reg); \
2579 push<to_ctype>(sp, code, wasm_runtime, val); \
2583 INSTRUCTION_HANDLER_FUNC s2r_##name(const uint8_t* code, uint32_t* sp, \
2584 WasmInterpreterRuntime* wasm_runtime, \
2585 int64_t r0, double fp0) { \
2586 to_reg = static_cast<to_ctype>(pop<from_ctype>(sp, code, wasm_runtime)); \
2590 INSTRUCTION_HANDLER_FUNC s2s_##name(const uint8_t* code, uint32_t* sp, \
2591 WasmInterpreterRuntime* wasm_runtime, \
2592 int64_t r0, double fp0) { \
2593 to_ctype val = pop<from_ctype>(sp, code, wasm_runtime); \
2594 push<to_ctype>(sp, code, wasm_runtime, val); \
2603#define FOREACH_REINTERPRET_UNOP(V) \
2604 V(F32ReinterpretI32, int32_t, I32, r0, float, F32, fp0) \
2605 V(F64ReinterpretI64, int64_t, I64, r0, double, F64, fp0) \
2606 V(I32ReinterpretF32, float, F32, fp0, int32_t, I32, r0) \
2607 V(I64ReinterpretF64, double, F64, fp0, int64_t, I64, r0)
2609#define DEFINE_UNOP(name, from_ctype, from_type, from_reg, to_ctype, to_type, \
2611 INSTRUCTION_HANDLER_FUNC r2r_##name(const uint8_t* code, uint32_t* sp, \
2612 WasmInterpreterRuntime* wasm_runtime, \
2613 int64_t r0, double fp0) { \
2614 from_ctype value = static_cast<from_ctype>(from_reg); \
2616 base::ReadUnalignedValue<to_ctype>(reinterpret_cast<Address>(&value)); \
2620 INSTRUCTION_HANDLER_FUNC r2s_##name(const uint8_t* code, uint32_t* sp, \
2621 WasmInterpreterRuntime* wasm_runtime, \
2622 int64_t r0, double fp0) { \
2623 from_ctype val = static_cast<from_ctype>(from_reg); \
2625 sp, code, wasm_runtime, \
2626 base::ReadUnalignedValue<to_ctype>(reinterpret_cast<Address>(&val))); \
2630 INSTRUCTION_HANDLER_FUNC s2r_##name(const uint8_t* code, uint32_t* sp, \
2631 WasmInterpreterRuntime* wasm_runtime, \
2632 int64_t r0, double fp0) { \
2633 from_ctype val = pop<from_ctype>(sp, code, wasm_runtime); \
2635 base::ReadUnalignedValue<to_ctype>(reinterpret_cast<Address>(&val)); \
2639 INSTRUCTION_HANDLER_FUNC s2s_##name(const uint8_t* code, uint32_t* sp, \
2640 WasmInterpreterRuntime* wasm_runtime, \
2641 int64_t r0, double fp0) { \
2642 from_ctype val = pop<from_ctype>(sp, code, wasm_runtime); \
2644 sp, code, wasm_runtime, \
2645 base::ReadUnalignedValue<to_ctype>(reinterpret_cast<Address>(&val))); \
2654#define FOREACH_BITS_UNOP(V) \
2655 V(I32Clz, uint32_t, I32, uint32_t, I32, base::bits::CountLeadingZeros(val)) \
2656 V(I32Ctz, uint32_t, I32, uint32_t, I32, base::bits::CountTrailingZeros(val)) \
2657 V(I32Popcnt, uint32_t, I32, uint32_t, I32, base::bits::CountPopulation(val)) \
2658 V(I32Eqz, uint32_t, I32, int32_t, I32, val == 0 ? 1 : 0) \
2659 V(I64Clz, uint64_t, I64, uint64_t, I64, base::bits::CountLeadingZeros(val)) \
2660 V(I64Ctz, uint64_t, I64, uint64_t, I64, base::bits::CountTrailingZeros(val)) \
2661 V(I64Popcnt, uint64_t, I64, uint64_t, I64, base::bits::CountPopulation(val)) \
2662 V(I64Eqz, uint64_t, I64, int32_t, I32, val == 0 ? 1 : 0)
2664#define DEFINE_REG_BINOP(name, from_ctype, from_type, to_ctype, to_type, op) \
2665 INSTRUCTION_HANDLER_FUNC r2r_##name(const uint8_t* code, uint32_t* sp, \
2666 WasmInterpreterRuntime* wasm_runtime, \
2667 int64_t r0, double fp0) { \
2668 from_ctype val = static_cast<from_ctype>(r0); \
2669 r0 = static_cast<to_ctype>(op); \
2673 INSTRUCTION_HANDLER_FUNC r2s_##name(const uint8_t* code, uint32_t* sp, \
2674 WasmInterpreterRuntime* wasm_runtime, \
2675 int64_t r0, double fp0) { \
2676 from_ctype val = static_cast<from_ctype>(r0); \
2677 push<to_ctype>(sp, code, wasm_runtime, op); \
2681 INSTRUCTION_HANDLER_FUNC s2r_##name(const uint8_t* code, uint32_t* sp, \
2682 WasmInterpreterRuntime* wasm_runtime, \
2683 int64_t r0, double fp0) { \
2684 from_ctype val = pop<from_ctype>(sp, code, wasm_runtime); \
2689 INSTRUCTION_HANDLER_FUNC s2s_##name(const uint8_t* code, uint32_t* sp, \
2690 WasmInterpreterRuntime* wasm_runtime, \
2691 int64_t r0, double fp0) { \
2692 from_ctype val = pop<from_ctype>(sp, code, wasm_runtime); \
2693 push<to_ctype>(sp, code, wasm_runtime, op); \
2697#undef DEFINE_REG_BINOP
2702#define FOREACH_EXTENSION_UNOP(V) \
2703 V(I32SExtendI8, int8_t, I32, int32_t, I32) \
2704 V(I32SExtendI16, int16_t, I32, int32_t, I32) \
2705 V(I64SExtendI8, int8_t, I64, int64_t, I64) \
2706 V(I64SExtendI16, int16_t, I64, int64_t, I64) \
2707 V(I64SExtendI32, int32_t, I64, int64_t, I64)
2709#define DEFINE_UNOP(name, from_ctype, from_type, to_ctype, to_type) \
2710 INSTRUCTION_HANDLER_FUNC r2r_##name(const uint8_t* code, uint32_t* sp, \
2711 WasmInterpreterRuntime* wasm_runtime, \
2712 int64_t r0, double fp0) { \
2713 from_ctype val = static_cast<from_ctype>(static_cast<to_ctype>(r0)); \
2714 r0 = static_cast<to_ctype>(val); \
2718 INSTRUCTION_HANDLER_FUNC r2s_##name(const uint8_t* code, uint32_t* sp, \
2719 WasmInterpreterRuntime* wasm_runtime, \
2720 int64_t r0, double fp0) { \
2721 from_ctype val = static_cast<from_ctype>(static_cast<to_ctype>(r0)); \
2722 push<to_ctype>(sp, code, wasm_runtime, val); \
2726 INSTRUCTION_HANDLER_FUNC s2r_##name(const uint8_t* code, uint32_t* sp, \
2727 WasmInterpreterRuntime* wasm_runtime, \
2728 int64_t r0, double fp0) { \
2730 static_cast<from_ctype>(pop<to_ctype>(sp, code, wasm_runtime)); \
2731 r0 = static_cast<to_ctype>(val); \
2735 INSTRUCTION_HANDLER_FUNC s2s_##name(const uint8_t* code, uint32_t* sp, \
2736 WasmInterpreterRuntime* wasm_runtime, \
2737 int64_t r0, double fp0) { \
2739 static_cast<from_ctype>(pop<to_ctype>(sp, code, wasm_runtime)); \
2740 push<to_ctype>(sp, code, wasm_runtime, val); \
2749#define FOREACH_TRUNCSAT_UNOP(V) \
2750 V(I32SConvertSatF32, float, F32, fp0, int32_t, I32, r0) \
2751 V(I32UConvertSatF32, float, F32, fp0, uint32_t, I32, r0) \
2752 V(I32SConvertSatF64, double, F64, fp0, int32_t, I32, r0) \
2753 V(I32UConvertSatF64, double, F64, fp0, uint32_t, I32, r0) \
2754 V(I64SConvertSatF32, float, F32, fp0, int64_t, I64, r0) \
2755 V(I64UConvertSatF32, float, F32, fp0, uint64_t, I64, r0) \
2756 V(I64SConvertSatF64, double, F64, fp0, int64_t, I64, r0) \
2757 V(I64UConvertSatF64, double, F64, fp0, uint64_t, I64, r0)
2759#define DEFINE_UNOP(name, from_ctype, from_type, from_reg, to_ctype, to_type, \
2761 INSTRUCTION_HANDLER_FUNC r2r_##name(const uint8_t* code, uint32_t* sp, \
2762 WasmInterpreterRuntime* wasm_runtime, \
2763 int64_t r0, double fp0) { \
2765 base::saturated_cast<to_ctype>(static_cast<from_ctype>(from_reg)); \
2769 INSTRUCTION_HANDLER_FUNC r2s_##name(const uint8_t* code, uint32_t* sp, \
2770 WasmInterpreterRuntime* wasm_runtime, \
2771 int64_t r0, double fp0) { \
2773 base::saturated_cast<to_ctype>(static_cast<from_ctype>(from_reg)); \
2774 push<to_ctype>(sp, code, wasm_runtime, val); \
2778 INSTRUCTION_HANDLER_FUNC s2r_##name(const uint8_t* code, uint32_t* sp, \
2779 WasmInterpreterRuntime* wasm_runtime, \
2780 int64_t r0, double fp0) { \
2781 to_reg = base::saturated_cast<to_ctype>( \
2782 pop<from_ctype>(sp, code, wasm_runtime)); \
2786 INSTRUCTION_HANDLER_FUNC s2s_##name(const uint8_t* code, uint32_t* sp, \
2787 WasmInterpreterRuntime* wasm_runtime, \
2788 int64_t r0, double fp0) { \
2789 to_ctype val = base::saturated_cast<to_ctype>( \
2790 pop<from_ctype>(sp, code, wasm_runtime)); \
2791 push<to_ctype>(sp, code, wasm_runtime, val); \
2801 int64_t
r0,
double fp0) {
2813 int64_t
r0,
double fp0) {
2818 if (delta_pages <= std::numeric_limits<uint32_t>::max()) {
2829 int64_t
r0,
double fp0) {
2838 int64_t
r0,
double fp0) {
2847 int64_t
r0,
double fp0) {
2854 int64_t
r0,
double fp0) {
2863 int64_t
r0,
double fp0) {
2877 int64_t
r0,
double fp0) {
2891 int64_t
r0,
double fp0) {
2905 int64_t
r0,
double fp0) {
2919 int64_t
r0,
double fp0) {
2932 int64_t
r0,
double fp0) {
2945 int64_t
r0,
double fp0) {
2954 int64_t
r0,
double fp0) {
2963 int64_t
r0,
double fp0) {
2969#ifdef V8_ENABLE_DRUMBRAKE_TRACING
2970 if (
v8_flags.trace_drumbrake_execution) {
2975 wasm_runtime->ExecuteFunction(code, function_index, stack_pos,
2976 ref_stack_fp_offset, slot_offset,
2977 return_slot_offset);
2983 int64_t
r0,
double fp0) {
2993#ifdef V8_ENABLE_DRUMBRAKE_TRACING
2994 if (
v8_flags.trace_drumbrake_execution) {
3000 wasm_runtime->UnwindCurrentStackFrame(
sp, slot_offset, rets_size, args_size,
3001 rets_refs, args_refs,
3002 ref_stack_fp_offset);
3006 wasm_runtime->PrepareTailCall(code, function_index, stack_pos,
3007 return_slot_offset);
3013 int64_t
r0,
double fp0) {
3019#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3020 if (
v8_flags.trace_drumbrake_execution) {
3025 wasm_runtime->ExecuteImportedFunction(code, function_index, stack_pos,
3026 ref_stack_fp_offset, slot_offset,
3027 return_slot_offset);
3033 int64_t
r0,
double fp0) {
3043#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3044 if (
v8_flags.trace_drumbrake_execution) {
3050 wasm_runtime->UnwindCurrentStackFrame(
sp, slot_offset, rets_size, args_size,
3051 rets_refs, args_refs,
3052 ref_stack_fp_offset);
3054 wasm_runtime->ExecuteImportedFunction(code, function_index, stack_pos, 0, 0,
3055 return_slot_offset,
true);
3062 int64_t
r0,
double fp0) {
3070#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3071 if (
v8_flags.trace_drumbrake_execution) {
3077 wasm_runtime->ExecuteIndirectCall(code, table_index, sig_index, entry_index,
3078 stack_pos,
sp, ref_stack_fp_offset,
3079 slot_offset, return_slot_offset,
false);
3085 int64_t
r0,
double fp0) {
3087 if (entry_index_64 > std::numeric_limits<uint32_t>::max()) {
3088 TRAP(TrapReason::kTrapTableOutOfBounds)
3090 uint32_t entry_index =
static_cast<uint32_t
>(entry_index_64);
3097#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3098 if (
v8_flags.trace_drumbrake_execution) {
3104 wasm_runtime->ExecuteIndirectCall(code, table_index, sig_index, entry_index,
3105 stack_pos,
sp, ref_stack_fp_offset,
3106 slot_offset, return_slot_offset,
false);
3112 int64_t
r0,
double fp0) {
3124#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3125 if (
v8_flags.trace_drumbrake_execution) {
3131 wasm_runtime->UnwindCurrentStackFrame(
sp, slot_offset, rets_size, args_size,
3132 rets_refs, args_refs,
3133 ref_stack_fp_offset);
3136 wasm_runtime->ExecuteIndirectCall(code, table_index, sig_index, entry_index,
3137 stack_pos,
sp, 0, 0, return_slot_offset,
3144 int64_t
r0,
double fp0) {
3150 if (entry_index_64 > std::numeric_limits<uint32_t>::max()) {
3151 TRAP(TrapReason::kTrapTableOutOfBounds)
3153 uint32_t entry_index =
static_cast<uint32_t
>(entry_index_64);
3160#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3161 if (
v8_flags.trace_drumbrake_execution) {
3167 wasm_runtime->UnwindCurrentStackFrame(
sp, slot_offset, rets_size, args_size,
3168 rets_refs, args_refs,
3169 ref_stack_fp_offset);
3172 wasm_runtime->ExecuteIndirectCall(code, table_index, sig_index, entry_index,
3173 stack_pos,
sp, 0, 0, return_slot_offset,
3180 int64_t
r0,
double fp0) {
3181 uint32_t cond =
static_cast<int32_t>(
r0);
3184 uint32_t index = cond < table_length ? cond : table_length;
3195 int64_t
r0,
double fp0) {
3199 uint32_t index = cond < table_length ? cond : table_length;
3210 int64_t
r0,
double fp0) {
3212 DCHECK(params_count > 1 && params_count < 32);
3217 for (uint32_t
i = 0;
i < params_count;
i++) {
3219 bool is_64 = arg_size_mask & (1 <<
i);
3226#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3227 if (
v8_flags.trace_drumbrake_execution &&
3228 v8_flags.trace_drumbrake_execution_verbose) {
3229 wasm_runtime->Trace(
"COPYSLOT64 %d %d %" PRIx64
"\n", from, to,
3235 to +=
sizeof(uint64_t) /
sizeof(uint32_t);
3242#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3243 if (
v8_flags.trace_drumbrake_execution &&
3244 v8_flags.trace_drumbrake_execution_verbose) {
3245 wasm_runtime->Trace(
"COPYSLOT32 %d %d %08x\n", from, to,
3250 to +=
sizeof(
uint32_t) /
sizeof(uint32_t);
3259 int64_t
r0,
double fp0) {
3267 reinterpret_cast<Address>(
sp + from0)));
3269#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3270 if (
v8_flags.trace_drumbrake_execution &&
3271 v8_flags.trace_drumbrake_execution_verbose) {
3272 wasm_runtime->Trace(
"COPYSLOT32 %d %d %08x\n", from0, to,
3277 to +=
sizeof(
uint32_t) /
sizeof(uint32_t);
3282 reinterpret_cast<Address>(
sp + from1)));
3284#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3285 if (
v8_flags.trace_drumbrake_execution &&
3286 v8_flags.trace_drumbrake_execution_verbose) {
3287 wasm_runtime->Trace(
"COPYSLOT32 %d %d %08x\n", from1, to,
3297 int64_t
r0,
double fp0) {
3305 reinterpret_cast<Address>(
sp + from0)));
3307#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3308 if (
v8_flags.trace_drumbrake_execution &&
3309 v8_flags.trace_drumbrake_execution_verbose) {
3310 wasm_runtime->Trace(
"COPYSLOT64 %d %d %" PRIx64
"\n", from0, to,
3316 to +=
sizeof(uint64_t) /
sizeof(uint32_t);
3321 reinterpret_cast<Address>(
sp + from1)));
3323#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3324 if (
v8_flags.trace_drumbrake_execution &&
3325 v8_flags.trace_drumbrake_execution_verbose) {
3326 wasm_runtime->Trace(
"COPYSLOT32 %d %d %08x\n", from1, to,
3336 int64_t
r0,
double fp0) {
3344 reinterpret_cast<Address>(
sp + from0)));
3346#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3347 if (
v8_flags.trace_drumbrake_execution &&
3348 v8_flags.trace_drumbrake_execution_verbose) {
3349 wasm_runtime->Trace(
"COPYSLOT32 %d %d %08x\n", from0, to,
3354 to +=
sizeof(
uint32_t) /
sizeof(uint32_t);
3359 reinterpret_cast<Address>(
sp + from1)));
3361#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3362 if (
v8_flags.trace_drumbrake_execution &&
3363 v8_flags.trace_drumbrake_execution_verbose) {
3364 wasm_runtime->Trace(
"COPYSLOT64 %d %d %" PRIx64
"\n", from1, to,
3375 int64_t
r0,
double fp0) {
3383 reinterpret_cast<Address>(
sp + from0)));
3385#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3386 if (
v8_flags.trace_drumbrake_execution &&
3387 v8_flags.trace_drumbrake_execution_verbose) {
3388 wasm_runtime->Trace(
"COPYSLOT64 %d %d %" PRIx64
"\n", from0, to,
3394 to +=
sizeof(uint64_t) /
sizeof(uint32_t);
3399 reinterpret_cast<Address>(
sp + from1)));
3401#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3402 if (
v8_flags.trace_drumbrake_execution &&
3403 v8_flags.trace_drumbrake_execution_verbose) {
3404 wasm_runtime->Trace(
"COPYSLOT64 %d %d %" PRIx64
"\n", from1, to,
3415 int64_t
r0,
double fp0) {
3423#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3424 if (
v8_flags.trace_drumbrake_execution &&
3425 v8_flags.trace_drumbrake_execution_verbose) {
3426 wasm_runtime->Trace(
"COPYSLOT32 %d %d %08x\n", from, to,
3436 int64_t
r0,
double fp0) {
3443#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3444 if (
v8_flags.trace_drumbrake_execution &&
3445 v8_flags.trace_drumbrake_execution_verbose) {
3446 wasm_runtime->Trace(
"COPYSLOT32 %d %d %08x\n", from, to,
3458#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3459 if (
v8_flags.trace_drumbrake_execution &&
3460 v8_flags.trace_drumbrake_execution_verbose) {
3461 wasm_runtime->Trace(
"COPYSLOT32 %d %d %08x\n", from, to,
3472 int64_t
r0,
double fp0) {
3480#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3481 if (
v8_flags.trace_drumbrake_execution &&
3482 v8_flags.trace_drumbrake_execution_verbose) {
3483 wasm_runtime->Trace(
"COPYSLOT64 %d %d %" PRIx64
"\n", from, to,
3494 int64_t
r0,
double fp0) {
3502#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3503 if (
v8_flags.trace_drumbrake_execution &&
3504 v8_flags.trace_drumbrake_execution_verbose) {
3506 "COPYSLOT128 %d %d %" PRIx64
"`%" PRIx64
"\n", from, to,
3510 reinterpret_cast<Address>(
sp + to +
sizeof(uint64_t))));
3519 int64_t
r0,
double fp0) {
3526#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3527 if (
v8_flags.trace_drumbrake_execution &&
3528 v8_flags.trace_drumbrake_execution_verbose) {
3529 wasm_runtime->Trace(
"COPYSLOT64 %d %d %" PRIx64
"\n", from, to,
3541#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3542 if (
v8_flags.trace_drumbrake_execution &&
3543 v8_flags.trace_drumbrake_execution_verbose) {
3544 wasm_runtime->Trace(
"COPYSLOT64 %d %d %" PRIx64
"\n", from, to,
3555 int64_t
r0,
double fp0) {
3560#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3561 if (
v8_flags.trace_drumbrake_execution &&
3562 v8_flags.trace_drumbrake_execution_verbose) {
3572 int64_t
r0,
double fp0) {
3578 reinterpret_cast<Address>(
sp + preserve),
3585#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3586 if (
v8_flags.trace_drumbrake_execution &&
3587 v8_flags.trace_drumbrake_execution_verbose) {
3588 wasm_runtime->Trace(
"PRESERVECOPYSLOT32 %d %d %08x\n", from, to,
3599 int64_t
r0,
double fp0) {
3605 reinterpret_cast<Address>(
sp + preserve),
3612#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3613 if (
v8_flags.trace_drumbrake_execution &&
3614 v8_flags.trace_drumbrake_execution_verbose) {
3615 wasm_runtime->Trace(
"PRESERVECOPYSLOT64 %d %d %" PRIx64
"\n", from, to,
3626 int64_t
r0,
double fp0) {
3632 reinterpret_cast<Address>(
sp + preserve),
3639#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3640 if (
v8_flags.trace_drumbrake_execution &&
3641 v8_flags.trace_drumbrake_execution_verbose) {
3643 "PRESERVECOPYSLOT64 %d %d %" PRIx64
"`%" PRIx64
"\n", from, to,
3647 reinterpret_cast<Address>(
sp + to +
sizeof(uint64_t))));
3656 int64_t
r0,
double fp0) {
3661#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3662 if (
v8_flags.trace_drumbrake_execution &&
3663 v8_flags.trace_drumbrake_execution_verbose) {
3675 int64_t
r0,
double fp0) {
3679#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3680 if (
v8_flags.trace_drumbrake_execution &&
3681 v8_flags.trace_drumbrake_execution_verbose) {
3682 wasm_runtime->Trace(
"COPYR0TOSLOT64 %d %" PRIx64
"\n", to,
3693 int64_t
r0,
double fp0) {
3696 static_cast<float>(
fp0));
3698#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3699 if (
v8_flags.trace_drumbrake_execution &&
3700 v8_flags.trace_drumbrake_execution_verbose) {
3712 int64_t
r0,
double fp0) {
3716#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3717 if (
v8_flags.trace_drumbrake_execution &&
3718 v8_flags.trace_drumbrake_execution_verbose) {
3719 wasm_runtime->Trace(
"COPYFP0TOSLOT64 %d %" PRIx64
"\n", to,
3730 int64_t
r0,
double fp0) {
3734 reinterpret_cast<Address>(
sp + preserve),
3739#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3740 if (
v8_flags.trace_drumbrake_execution &&
3741 v8_flags.trace_drumbrake_execution_verbose) {
3742 wasm_runtime->Trace(
"PRESERVECOPYR0TOSLOT32 %d %d %08x\n", to, preserve,
3753 int64_t
r0,
double fp0) {
3757 reinterpret_cast<Address>(
sp + preserve),
3761#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3762 if (
v8_flags.trace_drumbrake_execution &&
3763 v8_flags.trace_drumbrake_execution_verbose) {
3764 wasm_runtime->Trace(
"PRESERVECOPYR0TOSLOT64 %d %d %" PRIx64
"\n", to,
3776 int64_t
r0,
double fp0) {
3780 reinterpret_cast<Address>(
sp + preserve),
3783 static_cast<float>(
fp0));
3785#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3786 if (
v8_flags.trace_drumbrake_execution &&
3787 v8_flags.trace_drumbrake_execution_verbose) {
3788 wasm_runtime->Trace(
"PRESERVECOPYFP0TOSLOT32 %d %d %08x\n", to, preserve,
3799 int64_t
r0,
double fp0) {
3803 reinterpret_cast<Address>(
sp + preserve),
3807#ifdef V8_ENABLE_DRUMBRAKE_TRACING
3808 if (
v8_flags.trace_drumbrake_execution &&
3809 v8_flags.trace_drumbrake_execution_verbose) {
3810 wasm_runtime->Trace(
"PRESERVECOPYFP0TOSLOT64 %d %d %" PRIx64
"\n", to,
3822 int64_t
r0,
double fp0) {
3835 int64_t
r0,
double fp0) {
3844 int64_t
r0,
double fp0) {
3853 int64_t
r0,
double fp0) {
3863 int64_t
r0,
double fp0) {
3870 wasm_runtime->MemoryInit(code, data_segment_index, dst, src, size);
3877 int64_t
r0,
double fp0) {
3884 wasm_runtime->MemoryInit(code, data_segment_index, dst, src, size);
3891 int64_t
r0,
double fp0) {
3901 int64_t
r0,
double fp0) {
3914 int64_t
r0,
double fp0) {
3927 int64_t
r0,
double fp0) {
3940 int64_t
r0,
double fp0) {
3953 int64_t
r0,
double fp0) {
3959 if (
wasm_runtime->TableGet(code, table_index, entry_index, &ref)) {
3968 int64_t
r0,
double fp0) {
3972 if (entry_index_64 > std::numeric_limits<uint32_t>::max()) {
3973 TRAP(TrapReason::kTrapTableOutOfBounds)
3976 uint32_t entry_index =
static_cast<uint32_t
>(entry_index_64);
3980 if (
wasm_runtime->TableGet(code, table_index, entry_index, &ref)) {
3989 int64_t
r0,
double fp0) {
3995 wasm_runtime->TableSet(code, table_index, entry_index, ref);
4002 int64_t
r0,
double fp0) {
4007 if (entry_index_64 > std::numeric_limits<uint32_t>::max()) {
4008 TRAP(TrapReason::kTrapTableOutOfBounds)
4011 uint32_t entry_index =
static_cast<uint32_t
>(entry_index_64);
4014 wasm_runtime->TableSet(code, table_index, entry_index, ref);
4021 int64_t
r0,
double fp0) {
4029 wasm_runtime->TableInit(code, table_index, element_segment_index, dst, src,
4037 int64_t
r0,
double fp0) {
4044 if (dst_64 > std::numeric_limits<uint32_t>::max()) {
4045 TRAP(TrapReason::kTrapTableOutOfBounds)
4048 uint32_t dst =
static_cast<uint32_t
>(dst_64);
4051 wasm_runtime->TableInit(code, table_index, element_segment_index, dst, src,
4059 int64_t
r0,
double fp0) {
4069 int64_t
r0,
double fp0) {
4077 wasm_runtime->TableCopy(code, dst_table_index, src_table_index, dst, src,
4083 template <
typename IntN,
typename IntM,
typename IntK>
4086 int64_t
r0,
double fp0) {
4093 if (src_64 > std::numeric_limits<uint32_t>::max() ||
4094 dst_64 > std::numeric_limits<uint32_t>::max() ||
4095 size_64 > std::numeric_limits<uint32_t>::max()) {
4096 TRAP(TrapReason::kTrapTableOutOfBounds)
4099 uint32_t size =
static_cast<uint32_t
>(size_64);
4100 uint32_t src =
static_cast<uint32_t
>(src_64);
4101 uint32_t dst =
static_cast<uint32_t
>(dst_64);
4104 wasm_runtime->TableCopy(code, dst_table_index, src_table_index, dst, src,
4118 int64_t
r0,
double fp0) {
4131 int64_t
r0,
double fp0) {
4136 if (delta_64 > std::numeric_limits<uint32_t>::max()) {
4139 uint32_t delta =
static_cast<uint32_t
>(delta_64);
4150 int64_t
r0,
double fp0) {
4161 int64_t
r0,
double fp0) {
4172 int64_t
r0,
double fp0) {
4186 int64_t
r0,
double fp0) {
4192 if (count_64 > std::numeric_limits<uint32_t>::max() ||
4193 start_64 > std::numeric_limits<uint32_t>::max()) {
4194 TRAP(TrapReason::kTrapTableOutOfBounds)
4197 uint32_t
count =
static_cast<uint32_t
>(count_64);
4198 uint32_t
start =
static_cast<uint32_t
>(start_64);
4208 int64_t
r0,
double fp0) {
4217 int64_t
r0,
double fp0) {
4226 template <
typename MemIdx = u
int32_t,
typename MemOffsetT = memory_offset32_t>
4229 int64_t
r0,
double fp0) {
4236 const uint32_t align_mask =
sizeof(
int32_t) - 1;
4237 if (
V8_UNLIKELY((effective_index & align_mask) != 0)) {
4238 TRAP(TrapReason::kTrapUnalignedAccess)
4242 effective_index < index ||
4245 TRAP(TrapReason::kTrapMemOutOfBounds)
4256 template <
typename MemIdx = u
int32_t,
typename MemOffsetT = memory_offset32_t>
4259 int64_t
r0,
double fp0) {
4267 const uint32_t align_mask =
sizeof(
int32_t) - 1;
4268 if (
V8_UNLIKELY((effective_index & align_mask) != 0)) {
4269 TRAP(TrapReason::kTrapUnalignedAccess)
4273 effective_index < index ||
4276 TRAP(TrapReason::kTrapMemOutOfBounds)
4280 TRAP(TrapReason::kTrapUnreachable)
4291 template <
typename MemIdx = u
int32_t,
typename MemOffsetT = memory_offset32_t>
4294 int64_t
r0,
double fp0) {
4302 const uint32_t align_mask =
sizeof(int64_t) - 1;
4303 if (
V8_UNLIKELY((effective_index & align_mask) != 0)) {
4304 TRAP(TrapReason::kTrapUnalignedAccess)
4308 effective_index < index ||
4311 TRAP(TrapReason::kTrapMemOutOfBounds)
4315 TRAP(TrapReason::kTrapUnreachable)
4328 int64_t
r0,
double fp0) {
4329 std::atomic_thread_fence(std::memory_order_seq_cst);
4333#define FOREACH_ATOMIC_BINOP(V) \
4334 V(I32AtomicAdd, Uint32, uint32_t, I32, uint32_t, I32, std::atomic_fetch_add) \
4335 V(I32AtomicAdd8U, Uint8, uint8_t, I32, uint32_t, I32, std::atomic_fetch_add) \
4336 V(I32AtomicAdd16U, Uint16, uint16_t, I32, uint32_t, I32, \
4337 std::atomic_fetch_add) \
4338 V(I32AtomicSub, Uint32, uint32_t, I32, uint32_t, I32, std::atomic_fetch_sub) \
4339 V(I32AtomicSub8U, Uint8, uint8_t, I32, uint32_t, I32, std::atomic_fetch_sub) \
4340 V(I32AtomicSub16U, Uint16, uint16_t, I32, uint32_t, I32, \
4341 std::atomic_fetch_sub) \
4342 V(I32AtomicAnd, Uint32, uint32_t, I32, uint32_t, I32, std::atomic_fetch_and) \
4343 V(I32AtomicAnd8U, Uint8, uint8_t, I32, uint32_t, I32, std::atomic_fetch_and) \
4344 V(I32AtomicAnd16U, Uint16, uint16_t, I32, uint32_t, I32, \
4345 std::atomic_fetch_and) \
4346 V(I32AtomicOr, Uint32, uint32_t, I32, uint32_t, I32, std::atomic_fetch_or) \
4347 V(I32AtomicOr8U, Uint8, uint8_t, I32, uint32_t, I32, std::atomic_fetch_or) \
4348 V(I32AtomicOr16U, Uint16, uint16_t, I32, uint32_t, I32, \
4349 std::atomic_fetch_or) \
4350 V(I32AtomicXor, Uint32, uint32_t, I32, uint32_t, I32, std::atomic_fetch_xor) \
4351 V(I32AtomicXor8U, Uint8, uint8_t, I32, uint32_t, I32, std::atomic_fetch_xor) \
4352 V(I32AtomicXor16U, Uint16, uint16_t, I32, uint32_t, I32, \
4353 std::atomic_fetch_xor) \
4354 V(I32AtomicExchange, Uint32, uint32_t, I32, uint32_t, I32, \
4355 std::atomic_exchange) \
4356 V(I32AtomicExchange8U, Uint8, uint8_t, I32, uint32_t, I32, \
4357 std::atomic_exchange) \
4358 V(I32AtomicExchange16U, Uint16, uint16_t, I32, uint32_t, I32, \
4359 std::atomic_exchange) \
4360 V(I64AtomicAdd, Uint64, uint64_t, I64, uint64_t, I64, std::atomic_fetch_add) \
4361 V(I64AtomicAdd8U, Uint8, uint8_t, I32, uint64_t, I64, std::atomic_fetch_add) \
4362 V(I64AtomicAdd16U, Uint16, uint16_t, I32, uint64_t, I64, \
4363 std::atomic_fetch_add) \
4364 V(I64AtomicAdd32U, Uint32, uint32_t, I32, uint64_t, I64, \
4365 std::atomic_fetch_add) \
4366 V(I64AtomicSub, Uint64, uint64_t, I64, uint64_t, I64, std::atomic_fetch_sub) \
4367 V(I64AtomicSub8U, Uint8, uint8_t, I32, uint64_t, I64, std::atomic_fetch_sub) \
4368 V(I64AtomicSub16U, Uint16, uint16_t, I32, uint64_t, I64, \
4369 std::atomic_fetch_sub) \
4370 V(I64AtomicSub32U, Uint32, uint32_t, I32, uint64_t, I64, \
4371 std::atomic_fetch_sub) \
4372 V(I64AtomicAnd, Uint64, uint64_t, I64, uint64_t, I64, std::atomic_fetch_and) \
4373 V(I64AtomicAnd8U, Uint8, uint8_t, I32, uint64_t, I64, std::atomic_fetch_and) \
4374 V(I64AtomicAnd16U, Uint16, uint16_t, I32, uint64_t, I64, \
4375 std::atomic_fetch_and) \
4376 V(I64AtomicAnd32U, Uint32, uint32_t, I32, uint64_t, I64, \
4377 std::atomic_fetch_and) \
4378 V(I64AtomicOr, Uint64, uint64_t, I64, uint64_t, I64, std::atomic_fetch_or) \
4379 V(I64AtomicOr8U, Uint8, uint8_t, I32, uint64_t, I64, std::atomic_fetch_or) \
4380 V(I64AtomicOr16U, Uint16, uint16_t, I32, uint64_t, I64, \
4381 std::atomic_fetch_or) \
4382 V(I64AtomicOr32U, Uint32, uint32_t, I32, uint64_t, I64, \
4383 std::atomic_fetch_or) \
4384 V(I64AtomicXor, Uint64, uint64_t, I64, uint64_t, I64, std::atomic_fetch_xor) \
4385 V(I64AtomicXor8U, Uint8, uint8_t, I32, uint64_t, I64, std::atomic_fetch_xor) \
4386 V(I64AtomicXor16U, Uint16, uint16_t, I32, uint64_t, I64, \
4387 std::atomic_fetch_xor) \
4388 V(I64AtomicXor32U, Uint32, uint32_t, I32, uint64_t, I64, \
4389 std::atomic_fetch_xor) \
4390 V(I64AtomicExchange, Uint64, uint64_t, I64, uint64_t, I64, \
4391 std::atomic_exchange) \
4392 V(I64AtomicExchange8U, Uint8, uint8_t, I32, uint64_t, I64, \
4393 std::atomic_exchange) \
4394 V(I64AtomicExchange16U, Uint16, uint16_t, I32, uint64_t, I64, \
4395 std::atomic_exchange) \
4396 V(I64AtomicExchange32U, Uint32, uint32_t, I32, uint64_t, I64, \
4397 std::atomic_exchange)
4399#define ATOMIC_BINOP(name, Type, ctype, type, op_ctype, op_type, operation) \
4400 template <typename MemIdx, typename MemOffsetT> \
4401 INSTRUCTION_HANDLER_FUNC s2s_##name##I(const uint8_t* code, uint32_t* sp, \
4402 WasmInterpreterRuntime* wasm_runtime, \
4403 int64_t r0, double fp0) { \
4404 ctype val = static_cast<ctype>(pop<op_ctype>(sp, code, wasm_runtime)); \
4406 uint64_t offset = Read<MemOffsetT>(code); \
4407 uint64_t index = pop<MemIdx>(sp, code, wasm_runtime); \
4408 uint64_t effective_index = offset + index; \
4410 if (V8_UNLIKELY(!IsAligned(effective_index, sizeof(ctype)))) { \
4411 TRAP(TrapReason::kTrapUnalignedAccess) \
4415 effective_index < index || \
4416 !base::IsInBounds<uint64_t>(effective_index, sizeof(ctype), \
4417 wasm_runtime->GetMemorySize()))) { \
4418 TRAP(TrapReason::kTrapMemOutOfBounds) \
4420 static_assert(sizeof(std::atomic<ctype>) == sizeof(ctype), \
4421 "Size mismatch for types std::atomic<" #ctype \
4422 ">, and " #ctype); \
4424 uint8_t* memory_start = wasm_runtime->GetMemoryStart(); \
4425 uint8_t* address = memory_start + effective_index; \
4426 op_ctype result = static_cast<op_ctype>( \
4427 operation(reinterpret_cast<std::atomic<ctype>*>(address), val)); \
4428 push<op_ctype>(sp, code, wasm_runtime, result); \
4431 static auto constexpr s2s_##name = \
4432 s2s_##name##I<uint32_t, memory_offset32_t>; \
4433 static auto constexpr s2s_##name##_Idx64 = \
4434 s2s_##name##I<uint64_t, memory_offset64_t>;
4438#define FOREACH_ATOMIC_COMPARE_EXCHANGE_OP(V) \
4439 V(I32AtomicCompareExchange, Uint32, uint32_t, I32, uint32_t, I32) \
4440 V(I32AtomicCompareExchange8U, Uint8, uint8_t, I32, uint32_t, I32) \
4441 V(I32AtomicCompareExchange16U, Uint16, uint16_t, I32, uint32_t, I32) \
4442 V(I64AtomicCompareExchange, Uint64, uint64_t, I64, uint64_t, I64) \
4443 V(I64AtomicCompareExchange8U, Uint8, uint8_t, I32, uint64_t, I64) \
4444 V(I64AtomicCompareExchange16U, Uint16, uint16_t, I32, uint64_t, I64) \
4445 V(I64AtomicCompareExchange32U, Uint32, uint32_t, I32, uint64_t, I64)
4447#define ATOMIC_COMPARE_EXCHANGE_OP(name, Type, ctype, type, op_ctype, op_type) \
4448 template <typename MemIdx = uint32_t, typename MemOffsetT> \
4449 INSTRUCTION_HANDLER_FUNC s2s_##name##I(const uint8_t* code, uint32_t* sp, \
4450 WasmInterpreterRuntime* wasm_runtime, \
4451 int64_t r0, double fp0) { \
4452 ctype new_val = static_cast<ctype>(pop<op_ctype>(sp, code, wasm_runtime)); \
4453 ctype old_val = static_cast<ctype>(pop<op_ctype>(sp, code, wasm_runtime)); \
4455 uint64_t offset = Read<MemOffsetT>(code); \
4456 uint64_t index = pop<MemIdx>(sp, code, wasm_runtime); \
4457 uint64_t effective_index = offset + index; \
4459 if (V8_UNLIKELY(!IsAligned(effective_index, sizeof(ctype)))) { \
4460 TRAP(TrapReason::kTrapUnalignedAccess) \
4464 effective_index < index || \
4465 !base::IsInBounds<uint64_t>(effective_index, sizeof(ctype), \
4466 wasm_runtime->GetMemorySize()))) { \
4467 TRAP(TrapReason::kTrapMemOutOfBounds) \
4469 static_assert(sizeof(std::atomic<ctype>) == sizeof(ctype), \
4470 "Size mismatch for types std::atomic<" #ctype \
4471 ">, and " #ctype); \
4473 uint8_t* memory_start = wasm_runtime->GetMemoryStart(); \
4474 uint8_t* address = memory_start + effective_index; \
4476 std::atomic_compare_exchange_strong( \
4477 reinterpret_cast<std::atomic<ctype>*>(address), &old_val, new_val); \
4478 push<op_ctype>(sp, code, wasm_runtime, static_cast<op_ctype>(old_val)); \
4481 static auto constexpr s2s_##name = \
4482 s2s_##name##I<uint32_t, memory_offset32_t>; \
4483 static auto constexpr s2s_##name##_Idx64 = \
4484 s2s_##name##I<uint64_t, memory_offset64_t>;
4486#undef ATOMIC_COMPARE_EXCHANGE_OP
4488#define FOREACH_ATOMIC_LOAD_OP(V) \
4489 V(I32AtomicLoad, Uint32, uint32_t, I32, uint32_t, I32) \
4490 V(I32AtomicLoad8U, Uint8, uint8_t, I32, uint32_t, I32) \
4491 V(I32AtomicLoad16U, Uint16, uint16_t, I32, uint32_t, I32) \
4492 V(I64AtomicLoad, Uint64, uint64_t, I64, uint64_t, I64) \
4493 V(I64AtomicLoad8U, Uint8, uint8_t, I32, uint64_t, I64) \
4494 V(I64AtomicLoad16U, Uint16, uint16_t, I32, uint64_t, I64) \
4495 V(I64AtomicLoad32U, Uint32, uint32_t, I32, uint64_t, I64)
4497#define ATOMIC_LOAD_OP(name, Type, ctype, type, op_ctype, op_type) \
4498 template <typename MemIdx, typename MemOffsetT> \
4499 INSTRUCTION_HANDLER_FUNC s2s_##name##I(const uint8_t* code, uint32_t* sp, \
4500 WasmInterpreterRuntime* wasm_runtime, \
4501 int64_t r0, double fp0) { \
4502 uint64_t offset = Read<MemOffsetT>(code); \
4503 uint64_t index = pop<MemIdx>(sp, code, wasm_runtime); \
4504 uint64_t effective_index = offset + index; \
4506 if (V8_UNLIKELY(!IsAligned(effective_index, sizeof(ctype)))) { \
4507 TRAP(TrapReason::kTrapUnalignedAccess) \
4511 effective_index < index || \
4512 !base::IsInBounds<uint64_t>(effective_index, sizeof(ctype), \
4513 wasm_runtime->GetMemorySize()))) { \
4514 TRAP(TrapReason::kTrapMemOutOfBounds) \
4516 static_assert(sizeof(std::atomic<ctype>) == sizeof(ctype), \
4517 "Size mismatch for types std::atomic<" #ctype \
4518 ">, and " #ctype); \
4520 uint8_t* memory_start = wasm_runtime->GetMemoryStart(); \
4521 uint8_t* address = memory_start + effective_index; \
4524 std::atomic_load(reinterpret_cast<std::atomic<ctype>*>(address)); \
4525 push<op_ctype>(sp, code, wasm_runtime, static_cast<op_ctype>(val)); \
4528 static auto constexpr s2s_##name = \
4529 s2s_##name##I<uint32_t, memory_offset32_t>; \
4530 static auto constexpr s2s_##name##_Idx64 = \
4531 s2s_##name##I<uint64_t, memory_offset64_t>;
4533#undef ATOMIC_LOAD_OP
4535#define FOREACH_ATOMIC_STORE_OP(V) \
4536 V(I32AtomicStore, Uint32, uint32_t, I32, uint32_t, I32) \
4537 V(I32AtomicStore8U, Uint8, uint8_t, I32, uint32_t, I32) \
4538 V(I32AtomicStore16U, Uint16, uint16_t, I32, uint32_t, I32) \
4539 V(I64AtomicStore, Uint64, uint64_t, I64, uint64_t, I64) \
4540 V(I64AtomicStore8U, Uint8, uint8_t, I32, uint64_t, I64) \
4541 V(I64AtomicStore16U, Uint16, uint16_t, I32, uint64_t, I64) \
4542 V(I64AtomicStore32U, Uint32, uint32_t, I32, uint64_t, I64)
4544#define ATOMIC_STORE_OP(name, Type, ctype, type, op_ctype, op_type) \
4545 template <typename MemIdx = uint32_t, typename MemOffsetT> \
4546 INSTRUCTION_HANDLER_FUNC s2s_##name##I(const uint8_t* code, uint32_t* sp, \
4547 WasmInterpreterRuntime* wasm_runtime, \
4548 int64_t r0, double fp0) { \
4549 ctype val = static_cast<ctype>(pop<op_ctype>(sp, code, wasm_runtime)); \
4551 uint64_t offset = Read<MemOffsetT>(code); \
4552 uint64_t index = pop<MemIdx>(sp, code, wasm_runtime); \
4553 uint64_t effective_index = offset + index; \
4555 if (V8_UNLIKELY(!IsAligned(effective_index, sizeof(ctype)))) { \
4556 TRAP(TrapReason::kTrapUnalignedAccess) \
4560 effective_index < index || \
4561 !base::IsInBounds<uint64_t>(effective_index, sizeof(ctype), \
4562 wasm_runtime->GetMemorySize()))) { \
4563 TRAP(TrapReason::kTrapMemOutOfBounds) \
4565 static_assert(sizeof(std::atomic<ctype>) == sizeof(ctype), \
4566 "Size mismatch for types std::atomic<" #ctype \
4567 ">, and " #ctype); \
4569 uint8_t* memory_start = wasm_runtime->GetMemoryStart(); \
4570 uint8_t* address = memory_start + effective_index; \
4572 std::atomic_store(reinterpret_cast<std::atomic<ctype>*>(address), val); \
4575 static auto constexpr s2s_##name = \
4576 s2s_##name##I<uint32_t, memory_offset32_t>; \
4577 static auto constexpr s2s_##name##_Idx64 = \
4578 s2s_##name##I<uint64_t, memory_offset64_t>;
4580#undef ATOMIC_STORE_OP
4585#if V8_TARGET_BIG_ENDIAN
4586#define LANE(i, type) ((sizeof(type.val) / sizeof(type.val[0])) - (i)-1)
4588#define LANE(i, type) (i)
4591#define SPLAT_CASE(format, stype, valType, op_type, num) \
4592 INSTRUCTION_HANDLER_FUNC s2s_Simd##format##Splat( \
4593 const uint8_t* code, uint32_t* sp, WasmInterpreterRuntime* wasm_runtime, \
4594 int64_t r0, double fp0) { \
4595 valType v = pop<valType>(sp, code, wasm_runtime); \
4597 for (int i = 0; i < num; i++) s.val[i] = v; \
4598 push<Simd128>(sp, code, wasm_runtime, Simd128(s)); \
4609#define EXTRACT_LANE_CASE(format, stype, op_type, name) \
4610 INSTRUCTION_HANDLER_FUNC s2s_Simd##format##ExtractLane( \
4611 const uint8_t* code, uint32_t* sp, WasmInterpreterRuntime* wasm_runtime, \
4612 int64_t r0, double fp0) { \
4613 uint16_t lane = Read<int16_t>(code); \
4614 DCHECK_LT(lane, 4); \
4615 Simd128 v = pop<Simd128>(sp, code, wasm_runtime); \
4616 stype s = v.to_##name(); \
4617 push(sp, code, wasm_runtime, s.val[LANE(lane, s)]); \
4624#undef EXTRACT_LANE_CASE
4631#define EXTRACT_LANE_EXTEND_CASE(format, stype, name, sign, extended_type) \
4632 INSTRUCTION_HANDLER_FUNC s2s_Simd##format##ExtractLane##sign( \
4633 const uint8_t* code, uint32_t* sp, WasmInterpreterRuntime* wasm_runtime, \
4634 int64_t r0, double fp0) { \
4635 uint16_t lane = Read<int16_t>(code); \
4636 DCHECK_LT(lane, 16); \
4637 Simd128 s = pop<Simd128>(sp, code, wasm_runtime); \
4638 stype ss = s.to_##name(); \
4639 auto res = ss.val[LANE(lane, ss)]; \
4640 DCHECK(std::is_signed<decltype(res)>::value); \
4641 if (std::is_unsigned<extended_type>::value) { \
4642 using unsigned_type = std::make_unsigned<decltype(res)>::type; \
4643 push(sp, code, wasm_runtime, \
4644 static_cast<extended_type>(static_cast<unsigned_type>(res))); \
4646 push(sp, code, wasm_runtime, static_cast<extended_type>(res)); \
4654#undef EXTRACT_LANE_EXTEND_CASE
4656#define BINOP_CASE(op, name, stype, count, expr) \
4657 INSTRUCTION_HANDLER_FUNC s2s_Simd##op(const uint8_t* code, uint32_t* sp, \
4658 WasmInterpreterRuntime* wasm_runtime, \
4659 int64_t r0, double fp0) { \
4660 stype s2 = pop<Simd128>(sp, code, wasm_runtime).to_##name(); \
4661 stype s1 = pop<Simd128>(sp, code, wasm_runtime).to_##name(); \
4663 for (size_t i = 0; i < count; ++i) { \
4664 auto a = s1.val[LANE(i, s1)]; \
4665 auto b = s2.val[LANE(i, s2)]; \
4666 res.val[LANE(i, res)] = expr; \
4668 push<Simd128>(sp, code, wasm_runtime, Simd128(res)); \
4742#define UNOP_CASE(op, name, stype, count, expr) \
4743 INSTRUCTION_HANDLER_FUNC s2s_Simd##op(const uint8_t* code, uint32_t* sp, \
4744 WasmInterpreterRuntime* wasm_runtime, \
4745 int64_t r0, double fp0) { \
4746 stype s = pop<Simd128>(sp, code, wasm_runtime).to_##name(); \
4748 for (size_t i = 0; i < count; ++i) { \
4749 auto a = s.val[LANE(i, s)]; \
4750 res.val[LANE(i, res)] = expr; \
4752 push<Simd128>(sp, code, wasm_runtime, Simd128(res)); \
4783#define BITMASK_CASE(op, name, stype, count) \
4784 INSTRUCTION_HANDLER_FUNC s2s_Simd##op(const uint8_t* code, uint32_t* sp, \
4785 WasmInterpreterRuntime* wasm_runtime, \
4786 int64_t r0, double fp0) { \
4787 stype s = pop<Simd128>(sp, code, wasm_runtime).to_##name(); \
4789 for (size_t i = 0; i < count; ++i) { \
4790 bool sign = std::signbit(static_cast<double>(s.val[LANE(i, s)])); \
4791 res |= (sign << i); \
4793 push<int32_t>(sp, code, wasm_runtime, res); \
4802#define CMPOP_CASE(op, name, stype, out_stype, count, expr) \
4803 INSTRUCTION_HANDLER_FUNC s2s_Simd##op(const uint8_t* code, uint32_t* sp, \
4804 WasmInterpreterRuntime* wasm_runtime, \
4805 int64_t r0, double fp0) { \
4806 stype s2 = pop<Simd128>(sp, code, wasm_runtime).to_##name(); \
4807 stype s1 = pop<Simd128>(sp, code, wasm_runtime).to_##name(); \
4809 for (size_t i = 0; i < count; ++i) { \
4810 auto a = s1.val[LANE(i, s1)]; \
4811 auto b = s2.val[LANE(i, s2)]; \
4812 auto result = expr; \
4813 res.val[LANE(i, res)] = result ? -1 : 0; \
4815 push<Simd128>(sp, code, wasm_runtime, Simd128(res)); \
4844 static_cast<uint32_t
>(a) >
static_cast<uint32_t
>(b))
4846 static_cast<uint32_t
>(a) >=
static_cast<uint32_t
>(b))
4848 static_cast<uint32_t
>(a) <
static_cast<uint32_t
>(b))
4850 static_cast<uint32_t
>(a) <=
static_cast<uint32_t
>(b))
4881#define REPLACE_LANE_CASE(format, name, stype, ctype, op_type) \
4882 INSTRUCTION_HANDLER_FUNC s2s_Simd##format##ReplaceLane( \
4883 const uint8_t* code, uint32_t* sp, WasmInterpreterRuntime* wasm_runtime, \
4884 int64_t r0, double fp0) { \
4885 uint16_t lane = Read<int16_t>(code); \
4886 DCHECK_LT(lane, 16); \
4887 ctype new_val = pop<ctype>(sp, code, wasm_runtime); \
4888 Simd128 simd_val = pop<Simd128>(sp, code, wasm_runtime); \
4889 stype s = simd_val.to_##name(); \
4890 s.val[LANE(lane, s)] = new_val; \
4891 push<Simd128>(sp, code, wasm_runtime, Simd128(s)); \
4900#undef REPLACE_LANE_CASE
4902 template <
typename MemIdx,
typename MemOffsetT>
4905 int64_t
r0,
double fp0) {
4913 effective_index < index ||
4916 TRAP(TrapReason::kTrapMemOutOfBounds)
4919 uint8_t* address = memory_start + effective_index;
4927 s2s_SimdS128LoadMemI<uint32_t, memory_offset32_t>;
4929 s2s_SimdS128LoadMemI<uint64_t, memory_offset64_t>;
4931 template <
typename MemIdx,
typename MemOffsetT>
4934 int64_t
r0,
double fp0) {
4944 effective_index < index ||
4947 TRAP(TrapReason::kTrapMemOutOfBounds)
4950 uint8_t* address = memory_start + effective_index;
4960#define SHIFT_CASE(op, name, stype, count, expr) \
4961 INSTRUCTION_HANDLER_FUNC s2s_Simd##op(const uint8_t* code, uint32_t* sp, \
4962 WasmInterpreterRuntime* wasm_runtime, \
4963 int64_t r0, double fp0) { \
4964 uint32_t shift = pop<uint32_t>(sp, code, wasm_runtime); \
4965 stype s = pop<Simd128>(sp, code, wasm_runtime).to_##name(); \
4967 for (size_t i = 0; i < count; ++i) { \
4968 auto a = s.val[LANE(i, s)]; \
4969 res.val[LANE(i, res)] = expr; \
4971 push<Simd128>(sp, code, wasm_runtime, Simd128(res)); \
4975 static_cast<uint64_t
>(a) << (shift % 64))
5000 int64_t
r0,
double fp0) {
5006 for (
size_t dst = 0;
i <
end; ++
i, ++dst) {
5041#define CONVERT_CASE(op, src_type, name, dst_type, count, start_index, ctype, \
5043 INSTRUCTION_HANDLER_FUNC s2s_Simd##op(const uint8_t* code, uint32_t* sp, \
5044 WasmInterpreterRuntime* wasm_runtime, \
5045 int64_t r0, double fp0) { \
5046 src_type s = pop<Simd128>(sp, code, wasm_runtime).to_##name(); \
5047 dst_type res = {0}; \
5048 for (size_t i = 0; i < count; ++i) { \
5049 ctype a = s.val[LANE(start_index + i, s)]; \
5050 res.val[LANE(i, res)] = expr; \
5052 push<Simd128>(sp, code, wasm_runtime, Simd128(res)); \
5056 static_cast<float>(a))
5060 base::saturated_cast<
int32_t>(a))
5062 base::saturated_cast<uint32_t>(a))
5064 base::saturated_cast<
int32_t>(a))
5066 base::saturated_cast<uint32_t>(a))
5091 base::saturated_cast<
int32_t>(a))
5093 base::saturated_cast<uint32_t>(a))
5097 double, base::saturated_cast<uint32_t>(a))
5104#define PACK_CASE(op, src_type, name, dst_type, count, dst_ctype) \
5105 INSTRUCTION_HANDLER_FUNC s2s_Simd##op(const uint8_t* code, uint32_t* sp, \
5106 WasmInterpreterRuntime* wasm_runtime, \
5107 int64_t r0, double fp0) { \
5108 src_type s2 = pop<Simd128>(sp, code, wasm_runtime).to_##name(); \
5109 src_type s1 = pop<Simd128>(sp, code, wasm_runtime).to_##name(); \
5111 for (size_t i = 0; i < count; ++i) { \
5112 int64_t v = i < count / 2 ? s1.val[LANE(i, s1)] \
5113 : s2.val[LANE(i - count / 2, s2)]; \
5114 res.val[LANE(i, res)] = base::saturated_cast<dst_ctype>(v); \
5116 push<Simd128>(sp, code, wasm_runtime, Simd128(res)); \
5132 for (
size_t i = 0;
i < 4; ++
i) {
5135 bool_val.val[
LANE(
i, bool_val)]);
5149 int64_t
r0,
double fp0) {
5153 for (
size_t i = 0;
i < 4;
i++) {
5164 int64_t
r0,
double fp0) {
5168 for (
size_t i = 0;
i < 8;
i++) {
5179 int64_t
r0,
double fp0) {
5184 for (
size_t i = 0;
i < 4;
i++) {
5191 res.val[
LANE(
i,
res)] = base::AddWithWraparound(a + b + c + d, acc);
5199 int64_t
r0,
double fp0) {
5206 lane < kSimd128Size && lane >= 0 ?
v1.val[
LANE(lane,
v1)] : 0;
5215 int64_t
r0,
double fp0) {
5221 int lane = value.val[
i];
5232 int64_t
r0,
double fp0) {
5234 bool res = s.val[
LANE(0, s)] | s.val[
LANE(1, s)] | s.val[
LANE(2, s)] |
5240#define REDUCTION_CASE(op, name, stype, count) \
5241 INSTRUCTION_HANDLER_FUNC s2s_Simd##op(const uint8_t* code, uint32_t* sp, \
5242 WasmInterpreterRuntime* wasm_runtime, \
5243 int64_t r0, double fp0) { \
5244 stype s = pop<Simd128>(sp, code, wasm_runtime).to_##name(); \
5246 for (size_t i = 0; i < count; ++i) { \
5247 res = res & static_cast<bool>(s.val[LANE(i, s)]); \
5249 push<int32_t>(sp, code, wasm_runtime, res); \
5256#undef REDUCTION_CASE
5258#define QFM_CASE(op, name, stype, count, operation) \
5259 INSTRUCTION_HANDLER_FUNC s2s_Simd##op(const uint8_t* code, uint32_t* sp, \
5260 WasmInterpreterRuntime* wasm_runtime, \
5261 int64_t r0, double fp0) { \
5262 stype c = pop<Simd128>(sp, code, wasm_runtime).to_##name(); \
5263 stype b = pop<Simd128>(sp, code, wasm_runtime).to_##name(); \
5264 stype a = pop<Simd128>(sp, code, wasm_runtime).to_##name(); \
5266 for (size_t i = 0; i < count; i++) { \
5267 res.val[LANE(i, res)] = \
5268 operation(a.val[LANE(i, a)] * b.val[LANE(i, b)]) + \
5269 c.val[LANE(i, c)]; \
5271 push<Simd128>(sp, code, wasm_runtime, Simd128(res)); \
5280 template <
typename s_type,
typename load_type,
typename MemIdx,
5281 typename MemOffsetT>
5284 int64_t
r0,
double fp0) {
5292 effective_index < index ||
5295 TRAP(TrapReason::kTrapMemOutOfBounds)
5298 uint8_t* address = memory_start + effective_index;
5303 s.val[
LANE(
i, s)] = v;
5326 template <
typename s_type,
typename wide_type,
typename narrow_type,
5327 typename MemIdx,
typename MemOffsetT>
5330 int64_t
r0,
double fp0) {
5331 static_assert(
sizeof(wide_type) ==
sizeof(narrow_type) * 2,
5332 "size mismatch for wide and narrow types");
5341 effective_index < index ||
5344 TRAP(TrapReason::kTrapMemOutOfBounds)
5347 uint8_t* address = memory_start + effective_index;
5350 constexpr int lanes =
kSimd128Size /
sizeof(wide_type);
5352 for (
int i = 0;
i < lanes;
i++) {
5353 uint8_t shift =
i * (
sizeof(narrow_type) * 8);
5354 narrow_type el =
static_cast<narrow_type
>(v >> shift);
5355 s.val[
LANE(
i, s)] =
static_cast<wide_type
>(el);
5398 template <
typename s_type,
typename load_type,
typename MemIdx,
5399 typename MemOffsetT>
5402 int64_t
r0,
double fp0) {
5412 effective_index < index ||
5415 TRAP(TrapReason::kTrapMemOutOfBounds)
5418 uint8_t* address = memory_start + effective_index;
5424 s.val[
LANE(
i, s)] = 0;
5427 s.val[
LANE(0, s)] = v;
5441 template <
typename s_type,
typename memory_type,
typename MemIdx =
uint32_t,
5445 int64_t
r0,
double fp0) {
5455 effective_index < index ||
5458 TRAP(TrapReason::kTrapMemOutOfBounds)
5461 uint8_t* address = memory_start + effective_index;
5463 reinterpret_cast<Address>(address));
5465 value.val[
LANE(lane, value)] = loaded;
5487 template <
typename s_type,
typename memory_type,
typename MemIdx =
uint32_t,
5491 int64_t
r0,
double fp0) {
5502 effective_index < index ||
5505 TRAP(TrapReason::kTrapMemOutOfBounds)
5507 uint8_t* address = memory_start + effective_index;
5510 memory_type
res = value.val[
LANE(lane, value)];
5533 template <
typename DstSimdType,
typename SrcSimdType,
typename Wide,
5537 int64_t
r0,
double fp0) {
5538 constexpr int lanes =
kSimd128Size /
sizeof(DstSimdType::val[0]);
5541 for (
int i = 0;
i < lanes; ++
i) {
5544 static_cast<Narrow
>(v.val[
LANE(
i * 2 + 1, v)]));
5565 int64_t
r0,
double fp0) {
5580 uint32_t encoded_index = 0;
5581 for (
size_t index = 0; index <
sig->parameter_count(); index++) {
5582 switch (
sig->GetParam(index).kind()) {
5591 *
reinterpret_cast<uint32_t*
>(&f32));
5602 *
reinterpret_cast<uint64_t*
>(&f64));
5620 if (IsWasmNull(*ref, isolate)) {
5623 encoded_values->set(encoded_index++, *ref);
5638 int64_t
r0,
double fp0) {
5650 int64_t
r0,
double fp0) {
5685 int64_t
r0,
double fp0) {
5705 int64_t
r0,
double fp0) {
5725 int64_t
r0,
double fp0) {
5796 int64_t
r0,
double fp0) {
5835 int64_t
r0,
double fp0) {
5857 int64_t
r0,
double fp0) {
5864#ifdef V8_ENABLE_DRUMBRAKE_TRACING
5865 if (
v8_flags.trace_drumbrake_execution) {
5871 TRAP(TrapReason::kTrapNullDereference)
5875 wasm_runtime->ExecuteCallRef(code, func_ref, sig_index, stack_pos,
sp,
5876 ref_stack_fp_offset, slot_offset,
5877 return_slot_offset,
false);
5883 int64_t
r0,
double fp0) {
5895#ifdef V8_ENABLE_DRUMBRAKE_TRACING
5896 if (
v8_flags.trace_drumbrake_execution) {
5902 TRAP(TrapReason::kTrapNullDereference)
5906 wasm_runtime->UnwindCurrentStackFrame(
sp, slot_offset, rets_size, args_size,
5907 rets_refs, args_refs,
5908 ref_stack_fp_offset);
5912 wasm_runtime->ExecuteCallRef(code, func_ref, sig_index, stack_pos,
sp, 0, 0,
5913 return_slot_offset,
true);
5933 int64_t
r0,
double fp0) {
5935 std::pair<DirectHandle<WasmStruct>,
const StructType*> struct_new_result =
5938 const StructType* struct_type = struct_new_result.second;
5948 Address field_addr = (*struct_obj).ptr() + field_offset;
5953 *
reinterpret_cast<int8_t*
>(field_addr) =
5984 *struct_obj, field_addr,
6003 int64_t
r0,
double fp0) {
6005 std::pair<DirectHandle<WasmStruct>,
const StructType*> struct_new_result =
6008 const StructType* struct_type = struct_new_result.second;
6018 Address field_addr = (*struct_obj).ptr() + field_offset;
6024 *
reinterpret_cast<int8_t*
>(field_addr) = int8_t{};
6047 *struct_obj, field_addr,
6063 template <
typename T,
typename U = T>
6066 int64_t
r0,
double fp0) {
6070 TRAP(TrapReason::kTrapNullDereference)
6090 int64_t
r0,
double fp0) {
6093 TRAP(TrapReason::kTrapNullDereference)
6108 template <
typename T,
typename U = T>
6111 int64_t
r0,
double fp0) {
6116 TRAP(TrapReason::kTrapNullDereference)
6133 int64_t
r0,
double fp0) {
6138 TRAP(TrapReason::kTrapNullDereference)
6140 Address field_addr = (*struct_obj).ptr() + field_offset;
6150 template <
typename T,
typename U = T>
6153 int64_t
r0,
double fp0) {
6158 std::pair<DirectHandle<WasmArray>,
const ArrayType*> array_new_result =
6159 wasm_runtime->ArrayNewUninitialized(elem_count, array_index);
6162 TRAP(TrapReason::kTrapArrayTooLarge)
6170 const ArrayType* array_type = array_new_result.second;
6175 Address element_addr = array->ElementAddress(0);
6176 for (uint32_t
i = 0;
i < elem_count;
i++) {
6178 element_addr += element_size;
6196 int64_t
r0,
double fp0) {
6201 std::pair<DirectHandle<WasmArray>,
const ArrayType*> array_new_result =
6202 wasm_runtime->ArrayNewUninitialized(elem_count, array_index);
6205 TRAP(TrapReason::kTrapArrayTooLarge)
6209 const ArrayType* array_type = array_new_result.second;
6219 Address element_addr = array->ElementAddress(0);
6220 uint32_t element_offset = array->element_offset(0);
6221 for (uint32_t
i = 0;
i < elem_count;
i++) {
6225 element_offset +=
sizeof(
Tagged_t);
6236 int64_t
r0,
double fp0) {
6240 std::pair<DirectHandle<WasmArray>,
const ArrayType*> array_new_result =
6241 wasm_runtime->ArrayNewUninitialized(elem_count, array_index);
6244 TRAP(TrapReason::kTrapArrayTooLarge)
6252 if (elem_count > 0) {
6253 const ArrayType* array_type = array_new_result.second;
6257 Address element_addr = array->ElementAddress(elem_count - 1);
6258 uint32_t element_offset = array->element_offset(elem_count - 1);
6259 for (uint32_t
i = 0;
i < elem_count;
i++) {
6262 *
reinterpret_cast<int8_t*
>(element_addr) =
6299 element_addr -= element_size;
6300 element_offset -= element_size;
6317 std::pair<DirectHandle<WasmArray>,
const ArrayType*> array_new_result =
6318 wasm_runtime->ArrayNewUninitialized(elem_count, array_index);
6321 TRAP(TrapReason::kTrapArrayTooLarge)
6329 const ArrayType* array_type = array_new_result.second;
6334 Address element_addr = array->ElementAddress(0);
6335 uint32_t element_offset = array->element_offset(0);
6336 for (uint32_t
i = 0;
i < elem_count;
i++) {
6339 *
reinterpret_cast<int8_t*
>(element_addr) = int8_t{};
6368 element_addr += element_size;
6369 element_offset += element_size;
6378 template <TrapReason OutOfBoundsError>
6381 int64_t
r0,
double fp0) {
6385 TRAP(TrapReason::kTrapArrayOutOfBounds)
6391 TRAP(OutOfBoundsError)
6397 TRAP(OutOfBoundsError)
6401 TRAP(TrapReason::kTrapArrayTooLarge)
6425 template <
bool init_data>
6428 int64_t
r0,
double fp0) {
6432 TRAP(TrapReason::kTrapArrayOutOfBounds)
6438 TRAP(TrapReason::kTrapElementSegmentOutOfBounds)
6445 TRAP(TrapReason::kTrapArrayOutOfBounds)
6449 ? TrapReason::kTrapDataSegmentOutOfBounds
6450 : TrapReason::kTrapElementSegmentOutOfBounds;
6456 TRAP(TrapReason::kTrapNullDereference)
6459 bool ok =
wasm_runtime->WasmArrayInitSegment(data_index, array, dest_offset,
6479 int64_t
r0,
double fp0) {
6482 TRAP(TrapReason::kTrapNullDereference)
6484 DCHECK(IsWasmArray(*array_obj));
6494 int64_t
r0,
double fp0) {
6500 TRAP(TrapReason::kTrapArrayOutOfBounds)
6511 TRAP(TrapReason::kTrapArrayOutOfBounds)
6513 TRAP(TrapReason::kTrapNullDereference)
6516 TRAP(TrapReason::kTrapArrayOutOfBounds)
6518 TRAP(TrapReason::kTrapNullDereference)
6521 TRAP(TrapReason::kTrapArrayOutOfBounds)
6526 ok =
wasm_runtime->WasmArrayCopy(dest_array, dest_offset, src_array,
6539 template <
typename T,
typename U = T>
6542 int64_t
r0,
double fp0) {
6546 TRAP(TrapReason::kTrapNullDereference)
6548 DCHECK(IsWasmArray(*array_obj));
6552 TRAP(TrapReason::kTrapArrayOutOfBounds)
6555 Address element_addr = array->ElementAddress(index);
6572 int64_t
r0,
double fp0) {
6576 TRAP(TrapReason::kTrapNullDereference)
6578 DCHECK(IsWasmArray(*array_obj));
6582 TRAP(TrapReason::kTrapArrayOutOfBounds)
6593 template <
typename T,
typename U = T>
6596 int64_t
r0,
double fp0) {
6601 TRAP(TrapReason::kTrapNullDereference)
6603 DCHECK(IsWasmArray(*array_obj));
6607 TRAP(TrapReason::kTrapArrayOutOfBounds)
6610 Address element_addr = array->ElementAddress(index);
6625 int64_t
r0,
double fp0) {
6630 TRAP(TrapReason::kTrapNullDereference)
6632 DCHECK(IsWasmArray(*array_obj));
6636 TRAP(TrapReason::kTrapArrayOutOfBounds)
6639 Address element_addr = array->ElementAddress(index);
6640 uint32_t element_offset = array->element_offset(index);
6647 template <
typename T,
typename U = T>
6650 int64_t
r0,
double fp0) {
6657 TRAP(TrapReason::kTrapNullDereference)
6659 DCHECK(IsWasmArray(*array_obj));
6663 TRAP(TrapReason::kTrapArrayOutOfBounds)
6667 for (uint32_t
i = 0;
i <
size;
i++) {
6669 element_addr +=
sizeof(
T);
6684 int64_t
r0,
double fp0) {
6695 TRAP(TrapReason::kTrapNullDereference)
6697 DCHECK(IsWasmArray(*array_obj));
6701 TRAP(TrapReason::kTrapArrayOutOfBounds)
6705 uint32_t element_offset = array->element_offset(
offset);
6706 for (uint32_t
i = 0;
i <
size;
i++) {
6718 int64_t
r0,
double fp0) {
6731 int64_t
r0,
double fp0) {
6734 TRAP(TrapReason::kTrapNullDereference)
6744 int64_t
r0,
double fp0) {
6747 TRAP(TrapReason::kTrapNullDereference)
6751 0x7fffffff &
static_cast<uint32_t
>(i::Smi::ToInt(*ref)));
6756 template <
bool null_succeeds>
6759 int64_t
r0,
double fp0) {
6769 TRAP(TrapReason::kTrapIllegalCast)
6779 template <
bool null_succeeds>
6782 int64_t
r0,
double fp0) {
6791 bool cast_succeeds =
6802 int64_t
r0,
double fp0) {
6808 TRAP(TrapReason::kTrapIllegalCast)
6817 int64_t
r0,
double fp0) {
6823 TRAP(TrapReason::kTrapIllegalCast)
6832 int64_t
r0,
double fp0){
TRAP(TrapReason::kTrapIllegalCast)}
6856 int64_t
r0,
double fp0) {
6865 int64_t
r0,
double fp0) {
6868 TRAP(TrapReason::kTrapNullDereference)
6877 int64_t
r0,
double fp0) {
6895 int64_t
r0,
double fp0) {
6907#ifdef V8_ENABLE_DRUMBRAKE_TRACING
6911 int64_t
r0,
double fp0) {
6916 if (
v8_flags.trace_drumbrake_execution) {
6918 "@%-3u: %-24s: ",
pc,
6928 int64_t
r0,
double fp0) {
6936 template <
typename T>
6939 int64_t
r0,
double fp0) {
6945 static auto constexpr trace_PushConstI32Slot = trace_PushConstSlot<int32_t>;
6946 static auto constexpr trace_PushConstI64Slot = trace_PushConstSlot<int64_t>;
6947 static auto constexpr trace_PushConstF32Slot = trace_PushConstSlot<float>;
6948 static auto constexpr trace_PushConstF64Slot = trace_PushConstSlot<double>;
6949 static auto constexpr trace_PushConstS128Slot = trace_PushConstSlot<Simd128>;
6950 static auto constexpr trace_PushConstRefSlot = trace_PushConstSlot<WasmRef>;
6954 int64_t
r0,
double fp0) {
6964 int64_t
r0,
double fp0) {
6972 int64_t
r0,
double fp0) {
6983#ifdef V8_ENABLE_DRUMBRAKE_TRACING
6985void WasmBytecodeGenerator::TracePushConstSlot(uint32_t slot_index) {
6986 if (
v8_flags.trace_drumbrake_execution) {
7017void WasmBytecodeGenerator::TracePushCopySlot(uint32_t from_stack_index) {
7018 if (
v8_flags.trace_drumbrake_execution) {
7026void WasmBytecodeGenerator::TraceSetSlotType(uint32_t stack_index,
7028 if (
v8_flags.trace_drumbrake_execution) {
7037void ShadowStack::Print(WasmInterpreterRuntime*
wasm_runtime,
7038 const uint32_t*
sp,
size_t start_params,
7039 size_t start_locals,
size_t start_stack,
7041 for (
size_t i = 0;
i < stack_.
size();
i++) {
7042 char slot_kind =
i < start_locals - start_params ?
'p'
7043 :
i < start_stack - start_params ?
'l'
7045 const uint8_t* addr =
7046 reinterpret_cast<const uint8_t*
>(
sp) + stack_[
i].slot_offset_;
7053 start_params + stack_.size(),
'R',
7054 reinterpret_cast<const uint8_t*
>(&
r0));
7058 start_params + stack_.size(),
'R',
7059 reinterpret_cast<const uint8_t*
>(&
r0));
7062 float f =
static_cast<float>(
fp0);
7064 start_params + stack_.size(),
'R',
7065 reinterpret_cast<const uint8_t*
>(&f));
7069 start_params + stack_.size(),
'R',
7070 reinterpret_cast<const uint8_t*
>(&
fp0));
7080void ShadowStack::Slot::Print(WasmInterpreterRuntime*
wasm_runtime,
7081 ValueType type,
size_t index,
char kind,
7082 const uint8_t* addr) {
7083 switch (type.kind()) {
7086 "%c%zu:i32:%d ",
kind, index,
7091 "%c%zu:i64:%" PRId64,
kind, index,
7101 "%c%zu:f64:%f ",
kind, index,
7112 reinterpret_cast<Address>(addr + 4));
7114 reinterpret_cast<Address>(addr + 8));
7116 reinterpret_cast<Address>(addr + 12));
7118 s.val[0], s.val[1], s.val[2], s.val[3]);
7127 "%c%zu:ref:%" PRIx64,
kind, index,
7143#if !V8_DRUMBRAKE_BOUNDS_CHECKS
7146#define V(_) nullptr,
7151#define V(name) Handlers<true>::name,
7158#define V(name) Handlers<true>::name,
7160#ifdef V8_ENABLE_DRUMBRAKE_TRACING
7161 FOREACH_TRACE_INSTR_HANDLER(
V)
7167#if !V8_DRUMBRAKE_BOUNDS_CHECKS
7170#define V(_) nullptr,
7175#define V(name) Handlers<false>::name,
7181#define V(name) Handlers<false>::name,
7183#ifdef V8_ENABLE_DRUMBRAKE_TRACING
7184 FOREACH_TRACE_INSTR_HANDLER(
V)
7196 BlockIndex try_block_index = catch_it->second;
7198 const auto& try_it =
try_blocks_.find(try_block_index);
7209 const auto& try_it =
7211 return try_it !=
try_blocks_.end() ? &try_it->second :
nullptr;
7222 return &try_it->second;
7230 const auto& it =
try_blocks_.find(try_block_index);
7232 return it->second.end_instruction_code_offset;
7240 return {it->second.first_param_slot_offset,
7241 it->second.first_param_ref_stack_index};
7248 return it->second.try_block_index;
7257 TryBlock{parent_or_matching_try_block_index, ancestor_try_block_index}});
7263 uint32_t first_param_slot_offset,
7264 uint32_t first_param_ref_stack_index,
7270 first_param_ref_stack_index}});
7274 it->second.catch_handlers.emplace_back(
7290 if (try_block_index < 0) {
7292 try_block_index = block_index;
7295 const auto& try_it =
try_blocks_.find(try_block_index);
7297 try_it->second.end_instruction_code_offset = code_offset;
7299 return try_block_index;
7311 const TryBlock& try_block = try_it->second;
7314 if (inside_catch_handler) {
7318 if (try_block_index < 0)
return;
7325 size_t code_length, uint32_t stack_frame_size,
7329 size_t blocks_count,
const uint8_t* const_slots_data,
7330 size_t const_slots_length, uint32_t ref_slots_count,
7332 const std::map<CodeOffset, pc_t>&& code_pc_map)
7333 :
code_(code_data, code_data + code_length),
7334 code_bytes_(
code_.data()),
7335 signature_(signature),
7336 canonical_signature_(canonical_signature),
7337 interpreter_code_(interpreter_code),
7338 const_slots_values_(const_slots_data,
7339 const_slots_data + const_slots_length),
7341 blocks_count_(static_cast<uint32_t>(blocks_count)),
7343 args_slots_size_(ArgsSizeInSlots(signature_)),
7344 return_count_(static_cast<uint32_t>(signature_->return_count())),
7345 rets_slots_size_(RetsSizeInSlots(signature_)),
7347 static_cast<uint32_t>(interpreter_code_->locals.num_locals)),
7348 locals_slots_size_(LocalsSizeInSlots(interpreter_code_)),
7349 total_frame_size_in_bytes_(stack_frame_size *
kSlotSize +
7352 ref_args_count_(RefArgsCount(signature_)),
7353 ref_rets_count_(RefRetsCount(signature_)),
7354 ref_locals_count_(RefLocalsCount(interpreter_code)),
7355 ref_slots_count_(ref_slots_count),
7357 code_pc_map_(code_pc_map) {}
7381 : const_slot_offset_(0),
7383 ref_slots_count_(0),
7384 function_index_(function_index),
7385 wasm_code_(wasm_code),
7387 args_slots_size_(0),
7389 rets_slots_size_(0),
7391 current_block_index_(-1),
7392 is_instruction_reachable_(true),
7393 unreachable_block_count_(0),
7395 was_current_instruction_reachable_(true),
7398 last_instr_offset_(kInvalidCodeOffset),
7400 current_instr_encoding_failed_(false)
7403 no_nested_emit_instr_handler_guard_(false)
7412 code_.reserve(wasm_code_size * 6);
7413 slots_.reserve(wasm_code_size / 2);
7415 blocks_.reserve(wasm_code_size / 8);
7436 static_assert(
sizeof(
size_t) ==
sizeof(uint64_t));
7437 const int64x2 s = s128.to_i64x2();
7438 return s.val[0] ^ s.val[1];
7451 for (uint32_t
i = start_slot_index;
i <
stack_.
size();
i++) {
7465 uint32_t* new_slot_index) {
7466 *new_slot_index = UINT_MAX;
7476 for (uint32_t
i = start_slot_index;
i <
stack_.
size();
i++) {
7484 if (*new_slot_index == UINT_MAX)
return false;
7496 DCHECK_GT(start_slot_index, stack_index);
7497 for (uint32_t
i = start_slot_index;
i <
stack_.
size();
i++) {
7501#ifdef V8_ENABLE_DRUMBRAKE_TRACING
7502 if (
v8_flags.trace_drumbrake_execution &&
7503 v8_flags.trace_drumbrake_execution_verbose) {
7507 printf(
"Preserve UpdateStack: [%d] = %d\n",
i,
7508 slots_[*new_slot_index].slot_offset);
7520 uint32_t from_slot_index,
7521 uint32_t to_slot_index,
7522 bool copy_from_reg) {
7527 if (copy_from_reg) {
7534 if (copy_from_reg) {
7541 if (copy_from_reg) {
7548 if (copy_from_reg) {
7572 if (!copy_from_reg) {
7580#ifdef V8_ENABLE_DRUMBRAKE_TRACING
7581 if (
v8_flags.trace_drumbrake_bytecode_generator &&
7582 v8_flags.trace_drumbrake_execution_verbose) {
7583 printf(
"emit CopySlot: %d(%d) -> %d(%d)\n", from_slot_index,
7584 slots_[from_slot_index].slot_offset, to_slot_index,
7585 slots_[to_slot_index].slot_offset);
7609 uint32_t from_slot_index,
7610 uint32_t to_stack_index,
7611 bool copy_from_reg) {
7613 uint32_t to_slot_index =
stack_[to_stack_index];
7617 uint32_t new_slot_index;
7626 if (copy_from_reg) {
7633 if (copy_from_reg) {
7640 if (copy_from_reg) {
7647 if (copy_from_reg) {
7669 if (!copy_from_reg) {
7676#ifdef V8_ENABLE_DRUMBRAKE_TRACING
7677 if (
v8_flags.trace_drumbrake_execution &&
7678 v8_flags.trace_drumbrake_execution_verbose) {
7679 printf(
"emit s2s_PreserveCopySlot: %d %d %d\n",
7680 slots_[from_slot_index].slot_offset,
7681 slots_[to_slot_index].slot_offset,
7682 slots_[new_slot_index].slot_offset);
7694 uint32_t to_stack_index,
7695 bool is_tee,
bool copy_from_reg) {
7705 if (!is_tee && !copy_from_reg) {
7708#ifdef V8_ENABLE_DRUMBRAKE_TRACING
7709 if (
v8_flags.trace_drumbrake_execution) {
7729 if (num_args_and_locals >=
stack_size())
return;
7731 for (uint32_t local_index = 0; local_index < num_args_and_locals;
7733 uint32_t new_slot_index;
7743 size_t* rets_slots_count,
size_t* params_slots_count) {
7744 uint32_t first_slot_index = 0;
7745 *rets_slots_count = 0;
7746 *params_slots_count = 0;
7747 bool first_slot_found =
false;
7751 *rets_slots_count =
sig->return_count();
7752 for (uint32_t
i = 0;
i < *rets_slots_count;
i++) {
7754 if (!first_slot_found) {
7755 first_slot_index = slot_index;
7756 first_slot_found =
true;
7759 *params_slots_count =
sig->parameter_count();
7760 for (uint32_t
i = 0;
i < *params_slots_count;
i++) {
7762 if (!first_slot_found) {
7763 first_slot_index = slot_index;
7764 first_slot_found =
true;
7768 *rets_slots_count = 1;
7771 return first_slot_index;
7775 uint32_t target_block_index,
bool update_stack) {
7780 uint32_t params_count =
ParamsCount(target_block_data);
7782 uint32_t first_param_slot_index =
7784 for (uint32_t
i = 0;
i < params_count;
i++) {
7785 uint32_t from_slot_index =
7787 uint32_t to_slot_index = first_param_slot_index +
i;
7788 if (from_slot_index != to_slot_index) {
7797 first_param_slot_index +
i);
7799#ifdef V8_ENABLE_DRUMBRAKE_TRACING
7800 if (
v8_flags.trace_drumbrake_execution) {
7815 uint32_t target_block_index,
WasmOpcode opcode) {
7816 bool is_branch = kExprBr == opcode || kExprBrIf == opcode ||
7817 kExprBrTable == opcode || kExprBrOnNull == opcode ||
7818 kExprBrOnNonNull == opcode || kExprBrOnCast == opcode;
7821 bool is_target_loop_block = target_block_data.
opcode_ == kExprLoop;
7822 if (is_target_loop_block && is_branch) {
7827 uint32_t params_count =
7828 target_block_index == 0 ? 0 :
ParamsCount(target_block_data);
7833 if (!is_target_loop_block || !is_branch) {
7837 uint32_t
count = std::min(
static_cast<uint32_t
>(
stack_.
size()), rets_count);
7838 for (uint32_t
i = 0;
i <
count;
i++) {
7841 if (from_slot_index != to_slot_index) {
7848 bool is_else = (kExprElse == opcode);
7849 bool is_return = (kExprReturn == opcode);
7850 bool is_catch = (kExprCatch == opcode || kExprCatchAll == opcode);
7851 if (!is_branch && !is_return && !is_else && !is_catch) {
7852 uint32_t new_stack_height =
7853 target_block_data.
stack_size_ - params_count + rets_count;
7855 !was_current_instruction_reachable_);
7858 for (uint32_t
i = 0;
i < rets_count;
i++) {
7866#ifdef V8_ENABLE_DRUMBRAKE_TRACING
7867 if (
v8_flags.trace_drumbrake_execution) {
7887 uint32_t params_count = if_block_index == 0 ? 0 :
ParamsCount(if_block_data);
7888 for (uint32_t
i = 0;
i < params_count;
i++) {
7891#ifdef V8_ENABLE_DRUMBRAKE_TRACING
7892 if (
v8_flags.trace_drumbrake_execution) {
7906 uint32_t const_slots_size = 0;
7909 while (
pc < limit) {
7911 if (opcode == kExprI32Const || opcode == kExprF32Const) {
7912 const_slots_size +=
sizeof(uint32_t) /
kSlotSize;
7913 }
else if (opcode == kExprI64Const || opcode == kExprF64Const) {
7914 const_slots_size +=
sizeof(uint64_t) /
kSlotSize;
7915 }
else if (opcode == kSimdPrefix) {
7916 auto [opcode_index, opcode_len] =
7919 opcode = (kSimdPrefix << 8) | opcode_index;
7920 if (opcode == kExprS128Const || opcode == kExprI8x16Shuffle) {
7926 return const_slots_size;
7937 if (opcode == kExprEnd && block_data.
IsElse()) {
7943 if (!is_try_catch) {
7947 if (is_try_catch && (opcode == kExprEnd || opcode == kExprDelegate)) {
7948 int32_t try_block_index =
7966 uint32_t final_stack_size =
7982 uint32_t prefixed_opcode_length;
7983 std::tie(opcode, prefixed_opcode_length) =
7987 len = prefixed_opcode_length;
7992 case kExprUnreachable:
8014 std::optional<wasm::ValueType> wasm_return_type =
8015 GetWasmReturnTypeFromSignature(&imm.
sig);
8016 DCHECK(wasm_return_type.has_value());
8018 wasm_return_type.value().raw_bit_field();
8047 case kExprBrOnNonNull:
8048 case kExprDelegate: {
8055 case kExprBrTable: {
8066 len =
static_cast<int>(1 + iterator.
pc() - imm.
start);
8071 case kExprCallFunction:
8072 case kExprReturnCall: {
8079 case kExprCallIndirect:
8080 case kExprReturnCallIndirect: {
8092 case kExprSelectWithType: {
8098 case kExprLocalGet: {
8105 case kExprLocalSet: {
8112 case kExprLocalTee: {
8119 case kExprGlobalGet: {
8126 case kExprGlobalSet: {
8133 case kExprTableGet: {
8140 case kExprTableSet: {
8148#define LOAD_CASE(name, ctype, mtype, rep, type) \
8149 case kExpr##name: { \
8150 MemoryAccessImmediate imm( \
8151 &decoder, wasm_code_->at(pc + 1), sizeof(ctype), \
8152 Decoder::kNoValidation); \
8153 len = 1 + imm.length; \
8154 optional.offset = imm.offset; \
8173#define STORE_CASE(name, ctype, mtype, rep, type) \
8174 case kExpr##name: { \
8175 MemoryAccessImmediate imm( \
8176 &decoder, wasm_code_->at(pc + 1), sizeof(ctype), \
8177 Decoder::kNoValidation); \
8178 len = 1 + imm.length; \
8179 optional.offset = imm.offset; \
8193 case kExprMemorySize: {
8199 case kExprMemoryGrow: {
8205 case kExprI32Const: {
8212 case kExprI64Const: {
8219 case kExprF32Const: {
8226 case kExprF64Const: {
8234#define EXECUTE_BINOP(name, ctype, reg, op, type) \
8244#define EXECUTE_UNOP(name, ctype, reg, op, type) \
8251#define EXECUTE_UNOP(name, from_ctype, from_type, from_reg, to_ctype, to_type, \
8261#define EXECUTE_UNOP(name, from_ctype, from_type, to_ctype, to_type, op) \
8268#define EXECUTE_UNOP(name, from_ctype, from_type, to_ctype, to_type) \
8275 case kExprRefNull: {
8283 case kExprRefIsNull:
8285 case kExprRefAsNonNull: {
8289 case kExprRefFunc: {
8301 case kNumericPrefix:
8310 bool is_valid_simd_op =
8319 case kExprReturnCallRef: {
8339 pc_t pc,
int*
const len) {
8341 case kExprStructNew:
8342 case kExprStructNewDefault: {
8349 case kExprStructGet:
8350 case kExprStructGetS:
8351 case kExprStructGetU:
8352 case kExprStructSet: {
8360 case kExprArrayNewDefault:
8362 case kExprArrayGetS:
8363 case kExprArrayGetU:
8365 case kExprArrayFill: {
8373 case kExprArrayNewFixed: {
8377 *len += array_imm.
length;
8385 case kExprArrayNewData:
8386 case kExprArrayNewElem:
8387 case kExprArrayInitData:
8388 case kExprArrayInitElem: {
8392 *len += array_imm.
length;
8400 case kExprArrayCopy: {
8404 *len += dest_array_imm.
length;
8408 *len += src_array_imm.
length;
8415 case kExprAnyConvertExtern:
8416 case kExprExternConvertAny:
8421 case kExprRefCastNull:
8423 case kExprRefTestNull: {
8435 case kExprBrOnCastFail: {
8438 *len += flags_imm.
length;
8445 *len += source_imm.
length;
8449 *len += target_imm.
length;
8459 FATAL(
"Unknown or unimplemented opcode #%d:%s", code->start[
pc],
8471#define DECODE_UNOP(name, from_ctype, from_type, from_reg, to_ctype, to_type, \
8479 case kExprMemoryInit: {
8487 case kExprDataDrop: {
8495 case kExprMemoryCopy: {
8501 case kExprMemoryFill: {
8507 case kExprTableInit: {
8515 case kExprElemDrop: {
8522 case kExprTableCopy: {
8530 case kExprTableGrow: {
8537 case kExprTableSize: {
8544 case kExprTableFill: {
8552 FATAL(
"Unknown or unimplemented opcode #%d:%s", code->start[
pc],
8564 case kExprAtomicNotify:
8565 case kExprI32AtomicWait: {
8574 case kExprI64AtomicWait: {
8583 case kExprAtomicFence:
8587#define ATOMIC_BINOP(name, Type, ctype, type, op_ctype, op_type, operation) \
8588 case kExpr##name: { \
8589 MachineType memtype = MachineType::Type(); \
8590 MemoryAccessImmediate imm(decoder, code->at(pc + *len), \
8591 ElementSizeLog2Of(memtype.representation()), \
8592 Decoder::kNoValidation); \
8593 optional->offset = imm.offset; \
8594 *len += imm.length; \
8600#define ATOMIC_OP(name, Type, ctype, type, op_ctype, op_type) \
8601 case kExpr##name: { \
8602 MachineType memtype = MachineType::Type(); \
8603 MemoryAccessImmediate imm(decoder, code->at(pc + *len), \
8604 ElementSizeLog2Of(memtype.representation()), \
8605 Decoder::kNoValidation); \
8606 optional->offset = imm.offset; \
8607 *len += imm.length; \
8616 FATAL(
"Unknown or unimplemented opcode #%d:%s", code->start[
pc],
8656#if !defined(V8_DRUMBRAKE_BOUNDS_CHECKS)
8661 TRAP(TrapReason::kTrapMemOutOfBounds)
8674#ifdef DRUMBRAKE_ENABLE_PROFILING
8675 PrintAndClearProfilingData();
8678 if (
v8_flags.drumbrake_compact_bytecode) {
8688 printf(
"Total bytecode size: %zu bytes.\n", total_bytecode_size);
8689 size_t space_saved_in_bytes =
8692 double saved_pct = (total_bytecode_size + space_saved_in_bytes == 0)
8694 : 100.0 * space_saved_in_bytes /
8695 (total_bytecode_size + space_saved_in_bytes);
8696 printf(
"Bytes saved: %zu (%.1f%%).\n", space_saved_in_bytes, saved_pct);
8700 bool is_indirect_call) {
8702 if (is_indirect_call) {
8711 bool fast_path =
sig->parameter_count() > 1 &&
sig->parameter_count() < 32 &&
8714 if (
sig->parameter_count() == 2) {
8722 uint32_t copyslot32_two_args_func_id =
8723 ((kind0 ==
kI64 || kind0 ==
kF64) ? 0x01 : 0x00) |
8724 ((kind1 ==
kI64 || kind1 ==
kF64) ? 0x02 : 0x00);
8726 k_s2s_CopySlot_ll, k_s2s_CopySlot_lq, k_s2s_CopySlot_ql,
8730 EmitFnId(kCopySlot32TwoArgFuncs[copyslot32_two_args_func_id]);
8742 uint32_t arg_size_mask = 0;
8743 for (
size_t index = 0; index <
sig->parameter_count(); index++) {
8747 arg_size_mask |= (1 <<
index);
8753 for (
size_t index = 0; index <
sig->parameter_count(); index++) {
8767 for (
size_t index = 0; index <
sig->parameter_count(); index++) {
8794 bool types_unrelated =
8803 return (types_unrelated &&
8814bool WasmBytecodeGenerator::HasSideEffects(
WasmOpcode opcode) {
8828 case kExprBrOnNonNull:
8830 case kExprBrOnCastFail:
8833 case kExprCallFunction:
8834 case kExprReturnCall:
8835 case kExprCallIndirect:
8836 case kExprReturnCallIndirect:
8838 case kExprReturnCallRef:
8845 case kExprS128Const:
8846 case kExprI8x16Shuffle:
8849 case kExprUnreachable:
8855 case kExprSelectWithType:
8857 case kExprGlobalGet:
8858 case kExprGlobalSet:
8861 case kExprI32LoadMem:
8862 case kExprI32LoadMem8S:
8863 case kExprI32LoadMem8U:
8864 case kExprI32LoadMem16S:
8865 case kExprI32LoadMem16U:
8866 case kExprI64LoadMem:
8867 case kExprI64LoadMem8S:
8868 case kExprI64LoadMem8U:
8869 case kExprI64LoadMem16S:
8870 case kExprI64LoadMem16U:
8871 case kExprI64LoadMem32S:
8872 case kExprI64LoadMem32U:
8873 case kExprI32StoreMem:
8874 case kExprI32StoreMem8:
8875 case kExprI32StoreMem16:
8876 case kExprI64StoreMem:
8877 case kExprI64StoreMem8:
8878 case kExprI64StoreMem16:
8879 case kExprI64StoreMem32:
8880 case kExprMemoryGrow:
8881 case kExprMemorySize:
8895 case kExprI32Popcnt:
8913 case kExprI64Popcnt:
8934 case kExprF32NearestInt:
8942 case kExprF32CopySign:
8948 case kExprF64NearestInt:
8956 case kExprF64CopySign:
8957 case kExprI32ConvertI64:
8958 case kExprI32SConvertF32:
8959 case kExprI32UConvertF32:
8960 case kExprI32SConvertF64:
8961 case kExprI32UConvertF64:
8962 case kExprI64SConvertI32:
8963 case kExprI64UConvertI32:
8964 case kExprI64SConvertF32:
8965 case kExprI64UConvertF32:
8966 case kExprI64SConvertF64:
8967 case kExprI64UConvertF64:
8968 case kExprF32SConvertI32:
8969 case kExprF32UConvertI32:
8970 case kExprF32SConvertI64:
8971 case kExprF32UConvertI64:
8972 case kExprF32ConvertF64:
8973 case kExprF64SConvertI32:
8974 case kExprF64UConvertI32:
8975 case kExprF64SConvertI64:
8976 case kExprF64UConvertI64:
8977 case kExprF64ConvertF32:
8978 case kExprI32ReinterpretF32:
8979 case kExprI64ReinterpretF64:
8980 case kExprF32ReinterpretI32:
8981 case kExprF64ReinterpretI64:
8982 case kExprI32SExtendI8:
8983 case kExprI32SExtendI16:
8984 case kExprI64SExtendI8:
8985 case kExprI64SExtendI16:
8986 case kExprI64SExtendI32:
8988 case kExprRefIsNull:
8991 case kExprRefAsNonNull:
8994 case kExprStructNew:
8995 case kExprStructNewDefault:
8996 case kExprStructGet:
8997 case kExprStructGetS:
8998 case kExprStructGetU:
8999 case kExprStructSet:
9001 case kExprArrayNewDefault:
9003 case kExprArrayGetS:
9004 case kExprArrayGetU:
9006 case kExprArrayFill:
9011 case kExprRefCastNull:
9012 case kExprAnyConvertExtern:
9013 case kExprExternConvertAny:
9016 case kExprRefTestNull:
9018 case kNumericPrefix:
9019 case kExprI32SConvertSatF32:
9020 case kExprI32UConvertSatF32:
9021 case kExprI32SConvertSatF64:
9022 case kExprI32UConvertSatF64:
9023 case kExprI64SConvertSatF32:
9024 case kExprI64UConvertSatF32:
9025 case kExprI64SConvertSatF64:
9026 case kExprI64UConvertSatF64:
9027 case kExprMemoryInit:
9029 case kExprMemoryCopy:
9030 case kExprMemoryFill:
9031 case kExprTableInit:
9033 case kExprTableCopy:
9034 case kExprTableGrow:
9035 case kExprTableSize:
9036 case kExprTableFill:
9039 case kExprAtomicNotify:
9040 case kExprI32AtomicWait:
9041 case kExprI64AtomicWait:
9042 case kExprAtomicFence:
9043 case kExprI32AtomicLoad:
9044 case kExprI64AtomicLoad:
9045 case kExprI32AtomicStore:
9046 case kExprI64AtomicStore:
9047 case kExprI32AtomicAdd:
9048 case kExprI64AtomicAdd:
9049 case kExprI32AtomicSub:
9050 case kExprI64AtomicSub:
9051 case kExprI32AtomicAnd:
9052 case kExprI64AtomicAnd:
9053 case kExprI32AtomicOr:
9054 case kExprI64AtomicOr:
9055 case kExprI32AtomicXor:
9056 case kExprI64AtomicXor:
9057 case kExprI32AtomicExchange:
9058 case kExprI64AtomicExchange:
9059 case kExprI32AtomicCompareExchange:
9060 case kExprI64AtomicCompareExchange:
9064 case kExprS128LoadMem:
9065 case kExprS128Load8Splat:
9066 case kExprS128Load16Splat:
9067 case kExprS128Load32Splat:
9068 case kExprS128Load64Splat:
9069 case kExprS128StoreMem:
9070 case kExprI8x16Swizzle:
9071 case kExprI8x16Splat:
9072 case kExprI16x8Splat:
9073 case kExprI32x4Splat:
9074 case kExprI64x2Splat:
9075 case kExprF32x4Splat:
9076 case kExprF64x2Splat:
9077 case kExprI8x16ExtractLaneS:
9078 case kExprI8x16ExtractLaneU:
9079 case kExprI16x8ExtractLaneS:
9080 case kExprI16x8ExtractLaneU:
9081 case kExprI32x4ExtractLane:
9082 case kExprI64x2ExtractLane:
9083 case kExprF32x4ExtractLane:
9084 case kExprF64x2ExtractLane:
9085 case kExprI8x16ReplaceLane:
9086 case kExprI16x8ReplaceLane:
9087 case kExprI32x4ReplaceLane:
9088 case kExprI64x2ReplaceLane:
9089 case kExprF32x4ReplaceLane:
9090 case kExprF64x2ReplaceLane:
9141 case kExprS128AndNot:
9144 case kExprS128Select:
9145 case kExprV128AnyTrue:
9146 case kExprS128Load8Lane:
9147 case kExprS128Load16Lane:
9148 case kExprS128Load32Lane:
9149 case kExprS128Load64Lane:
9150 case kExprS128Store8Lane:
9151 case kExprS128Store16Lane:
9152 case kExprS128Store32Lane:
9153 case kExprS128Store64Lane:
9154 case kExprS128Load32Zero:
9155 case kExprS128Load64Zero:
9156 case kExprF32x4DemoteF64x2Zero:
9158 case kExprI32x4AllTrue:
9159 case kExprI32x4BitMask:
9160 case kExprI32x4SConvertI16x8Low:
9164 case kExprI32x4ExtMulLowI16x8S:
9165 case kExprI32x4ExtMulHighI16x8S:
9166 case kExprI32x4ExtMulLowI16x8U:
9167 case kExprI32x4ExtMulHighI16x8U:
9169 case kExprI64x2AllTrue:
9170 case kExprI64x2BitMask:
9171 case kExprI64x2SConvertI32x4Low:
9172 case kExprI64x2SConvertI32x4High:
9173 case kExprI64x2UConvertI32x4Low:
9174 case kExprI64x2UConvertI32x4High:
9179 case kExprF32x4Sqrt:
9180 case kExprF64x2ConvertLowI32x4S:
9181 case kExprF64x2ConvertLowI32x4U:
9184 case kExprI8x16RelaxedSwizzle:
9185 case kExprI32x4RelaxedTruncF32x4S:
9186 case kExprI32x4RelaxedTruncF32x4U:
9187 case kExprI32x4RelaxedTruncF64x2SZero:
9188 case kExprI32x4RelaxedTruncF64x2UZero:
9189 case kExprF32x4Qfma:
9190 case kExprF32x4Qfms:
9191 case kExprF64x2Qfma:
9192 case kExprF64x2Qfms:
9193 case kExprI8x16RelaxedLaneSelect:
9194 case kExprI16x8RelaxedLaneSelect:
9195 case kExprI32x4RelaxedLaneSelect:
9196 case kExprI64x2RelaxedLaneSelect:
9197 case kExprF32x4RelaxedMin:
9198 case kExprF32x4RelaxedMax:
9199 case kExprF64x2RelaxedMin:
9200 case kExprF64x2RelaxedMax:
9201 case kExprI16x8RelaxedQ15MulRS:
9202 case kExprI16x8DotI8x16I7x16S:
9203 case kExprI32x4DotI8x16I7x16AddS:
9206 case kExprF16x8Splat:
9207 case kExprF16x8ExtractLane:
9208 case kExprF16x8ReplaceLane:
9211 case kExprF16x8Sqrt:
9212 case kExprF16x8Ceil:
9213 case kExprF16x8Floor:
9214 case kExprF16x8Trunc:
9215 case kExprF16x8NearestInt:
9228 case kExprF16x8Pmin:
9229 case kExprF16x8Pmax:
9230 case kExprI16x8SConvertF16x8:
9231 case kExprI16x8UConvertF16x8:
9232 case kExprF16x8SConvertI16x8:
9233 case kExprF16x8UConvertI16x8:
9234 case kExprF16x8DemoteF32x4Zero:
9235 case kExprF16x8DemoteF64x2Zero:
9236 case kExprF32x4PromoteLowF16x8:
9237 case kExprF16x8Qfma:
9238 case kExprF16x8Qfms:
9242 case kExprNopForTestingUnsupportedInLiftoff:
9255 case kExprI32AsmjsDivS:
9256 case kExprI32AsmjsDivU:
9257 case kExprI32AsmjsRemS:
9258 case kExprI32AsmjsRemU:
9259 case kExprI32AsmjsSConvertF32:
9260 case kExprI32AsmjsUConvertF32:
9261 case kExprI32AsmjsSConvertF64:
9262 case kExprI32AsmjsUConvertF64:
9263 case kExprRefCastNop:
9266 case kExprStringNewUtf8:
9267 case kExprStringNewWtf16:
9268 case kExprStringConst:
9269 case kExprStringMeasureUtf8:
9270 case kExprStringMeasureWtf8:
9271 case kExprStringMeasureWtf16:
9272 case kExprStringEncodeUtf8:
9273 case kExprStringEncodeWtf16:
9274 case kExprStringConcat:
9276 case kExprStringIsUSVSequence:
9277 case kExprStringNewLossyUtf8:
9278 case kExprStringNewWtf8:
9279 case kExprStringEncodeLossyUtf8:
9280 case kExprStringEncodeWtf8:
9281 case kExprStringNewUtf8Try:
9282 case kExprStringAsWtf8:
9283 case kExprStringViewWtf8Advance:
9284 case kExprStringViewWtf8EncodeUtf8:
9285 case kExprStringViewWtf8Slice:
9286 case kExprStringViewWtf8EncodeLossyUtf8:
9287 case kExprStringViewWtf8EncodeWtf8:
9288 case kExprStringAsWtf16:
9289 case kExprStringViewWtf16Length:
9290 case kExprStringViewWtf16GetCodeunit:
9291 case kExprStringViewWtf16Encode:
9292 case kExprStringViewWtf16Slice:
9293 case kExprStringAsIter:
9294 case kExprStringViewIterNext:
9295 case kExprStringViewIterAdvance:
9296 case kExprStringViewIterRewind:
9297 case kExprStringViewIterSlice:
9298 case kExprStringCompare:
9299 case kExprStringFromCodePoint:
9300 case kExprStringHash:
9301 case kExprStringNewUtf8Array:
9302 case kExprStringNewWtf16Array:
9303 case kExprStringEncodeUtf8Array:
9304 case kExprStringEncodeWtf16Array:
9305 case kExprStringNewLossyUtf8Array:
9306 case kExprStringNewWtf8Array:
9307 case kExprStringEncodeLossyUtf8Array:
9308 case kExprStringEncodeWtf8Array:
9309 case kExprStringNewUtf8ArrayTry:
9312 case kExprF32LoadMemF16:
9313 case kExprF32StoreMemF16:
9323 if (!
v8_flags.drumbrake_compact_bytecode) {
9328 size_t current_instr_code_offset =
code_.size();
9329 size_t current_slots_size =
slots_.size();
9337 code_.resize(current_instr_code_offset);
9338 slots_.resize(current_slots_size);
9358 if (
instr.opcode == kExprBlock ||
instr.opcode == kExprLoop ||
9359 instr.opcode == kExprIf ||
instr.opcode == kExprTry) {
9361 }
else if (
instr.opcode == kExprEnd ||
instr.opcode == kExprDelegate) {
9366 }
else if (
instr.opcode == kExprElse ||
instr.opcode == kExprCatch ||
9367 instr.opcode == kExprCatchAll) {
9379 if (
v8_flags.drumbrake_register_optimization) {
9380 switch (next_reg_mode) {
9405#ifdef V8_ENABLE_DRUMBRAKE_TRACING
9406 if (
v8_flags.trace_drumbrake_bytecode_generator) {
9407 printf(
"PRE @%-3u: %-24s: %3s %-7s -> %-7s\n",
instr.pc,
9413 if (
v8_flags.trace_drumbrake_execution) {
9421 switch (
instr.opcode) {
9422 case kExprUnreachable: {
9441 ancestor_try_block_index);
9466 case kExprCatchAll: {
9475 int32_t catch_block_index =
9481 uint32_t first_param_slot_index = UINT_MAX;
9482 uint32_t first_ref_param_slot_index = UINT_MAX;
9483 if (
instr.opcode == kExprCatch) {
9487 for (
size_t i = 0;
i <
sig->parameter_count(); ++
i) {
9499 if (first_param_slot_index == UINT_MAX) {
9500 first_param_slot_index = slot_index;
9503 first_ref_param_slot_index == UINT_MAX) {
9504 first_ref_param_slot_index = slot_index;
9516 blocks_[catch_block_index].first_block_index_ =
9517 blocks_[try_block_index].first_block_index_;
9519 if (
instr.opcode == kExprCatch) {
9522 first_param_slot_index == UINT_MAX
9524 :
slots_[first_param_slot_index].slot_offset,
9525 first_ref_param_slot_index == UINT_MAX
9527 :
slots_[first_ref_param_slot_index].ref_stack_index,
9528 static_cast<int>(
code_.size()));
9532 static_cast<int>(
code_.size()));
9537 case kExprDelegate: {
9541 if (target_block_index > 0) {
9543 delegated_try_block_index = target_block.
IsTry()
9544 ? target_block_index
9559 size_t stack_index =
stack_.
size() -
sig->parameter_count();
9560 for (
size_t index = 0; index <
sig->parameter_count();
9561 index++, stack_index++) {
9575 uint32_t ref_index =
slots_[
stack_[stack_index]].ref_stack_index;
9576 Emit(&ref_index,
sizeof(uint32_t));
9590 case kExprRethrow: {
9594 blocks_[target_branch_index].IsCatchAll());
9595 Emit(&target_branch_index,
sizeof(int32_t));
9608 if (params_count > 0) {
9654 Emit(&if_false_code_offset,
sizeof(if_false_code_offset));
9664 reinterpret_cast<Address>(
code_.data() + if_false_code_offset),
9669 case kExprBrOnNull: {
9697 Emit(&if_false_code_offset,
sizeof(if_false_code_offset));
9712 reinterpret_cast<Address>(
code_.data() + if_false_code_offset),
9717 case kExprBrOnNonNull: {
9739 Emit(&if_false_code_offset,
sizeof(if_false_code_offset));
9749 reinterpret_cast<Address>(
code_.data() + if_false_code_offset),
9756 case kExprBrOnCast: {
9758 const int32_t target_branch_index =
9788 HeapType br_on_cast_data_target_type(
9798 Emit(&no_branch_code_offset,
sizeof(no_branch_code_offset));
9805 reinterpret_cast<Address>(
code_.data() + no_branch_code_offset),
9810 case kExprBrOnCastFail: {
9812 int32_t target_branch_index =
9815 HeapType br_on_cast_data_target_type =
9856 Emit(&no_branch_code_offset,
sizeof(no_branch_code_offset));
9863 reinterpret_cast<Address>(
code_.data() + no_branch_code_offset),
9868 case kExprBrTable: {
9896 const uint32_t labels_count =
instr.optional.br_table.table_count;
9899 for (uint32_t
i = 0;
i <= labels_count;
i++) {
9904 Emit(&label_offset,
sizeof(label_offset));
9906 for (uint32_t
i = 0;
i <= labels_count;
i++) {
9917 uint32_t label_offset = labels_offset_start +
i *
sizeof(uint32_t);
9918 int32_t delta = branch_code_start - label_offset;
9920 reinterpret_cast<Address>(
code_.data() + label_offset), delta);
9930 case kExprCallFunction:
9931 case kExprReturnCall: {
9955 const bool is_tail_call = (
instr.opcode == kExprReturnCall);
9959 std::vector<uint32_t> rets_slots;
9960 rets_slots.resize(
sig->return_count());
9961 for (
size_t index = 0; index <
sig->return_count(); index++) {
9962 rets_slots[
index] = is_tail_call ?
static_cast<uint32_t
>(
index)
9995 for (
size_t index =
sig->parameter_count(); index > 0; index--) {
9996 Pop(
sig->GetParam(index - 1).kind(),
false);
9999#ifdef V8_ENABLE_DRUMBRAKE_TRACING
10000 if (
v8_flags.trace_drumbrake_execution) {
10007 if (!is_tail_call) {
10013 for (
size_t index = 0; index <
sig->return_count(); index++) {
10034 if (is_tail_call) {
10040 case kExprCallIndirect:
10041 case kExprReturnCallIndirect: {
10045 const bool is_tail_call = (
instr.opcode == kExprReturnCallIndirect);
10050 std::vector<uint32_t> rets_slots;
10051 rets_slots.resize(
sig->return_count());
10052 for (
size_t index = 0; index <
sig->return_count(); index++) {
10053 rets_slots[
index] = is_tail_call ?
static_cast<uint32_t
>(
index)
10060 module_->tables[
instr.optional.indirect_call.table_index]
10063 if (is_tail_call) {
10065 s2s_ReturnCallIndirect64, is_table64,
10073 is_table64,
instr.pc);
10087 for (
size_t index =
sig->parameter_count(); index > 0; index--) {
10088 Pop(
sig->GetParam(index - 1).kind(),
false);
10091#ifdef V8_ENABLE_DRUMBRAKE_TRACING
10092 if (
v8_flags.trace_drumbrake_execution) {
10099 if (!is_tail_call) {
10105 for (
size_t index = 0; index <
sig->return_count(); index++) {
10125 if (is_tail_call) {
10133 case kExprReturnCallRef: {
10136 const bool is_tail_call = (
instr.opcode == kExprReturnCallRef);
10141 std::vector<uint32_t> rets_slots;
10142 rets_slots.resize(
sig->return_count());
10143 for (
size_t index = 0; index <
sig->return_count(); index++) {
10144 rets_slots[
index] = is_tail_call ?
static_cast<uint32_t
>(
index)
10150 if (is_tail_call) {
10169 for (
size_t index =
sig->parameter_count(); index > 0; index--) {
10170 Pop(
sig->GetParam(index - 1).kind(),
false);
10173#ifdef V8_ENABLE_DRUMBRAKE_TRACING
10174 if (
v8_flags.trace_drumbrake_execution) {
10181 if (!is_tail_call) {
10187 for (
size_t index = 0; index <
sig->return_count(); index++) {
10208 if (is_tail_call) {
10216 switch (top_stack_slot_type) {
10306 case kExprSelectWithType: {
10470 case kExprLocalGet: {
10494 case kExprLocalSet: {
10501 DCHECK(CheckEqualKind(
kind, top_stack_slot_type));
10539 instr.optional.index,
false,
false);
10548 case kExprLocalTee: {
10555 DCHECK(CheckEqualKind(
kind, top_stack_slot_type));
10594 instr.optional.index,
true,
false);
10603 case kExprGlobalGet: {
10701 case kExprGlobalSet: {
10702 switch (top_stack_slot_type) {
10800 case kExprTableGet: {
10801 bool is_table64 =
module_->tables[
instr.optional.index].is_table64();
10810 case kExprTableSet: {
10811 bool is_table64 =
module_->tables[
instr.optional.index].is_table64();
10820#define LOAD_CASE(name, ctype, mtype, rep, type) \
10821 case kExpr##name: { \
10824 EMIT_MEM64_INSTR_HANDLER_WITH_PC(r2r_##name, r2r_##name##_Idx64, \
10825 is_memory64_, instr.pc); \
10826 EmitMemoryOffset(instr.optional.offset); \
10827 return RegMode::k##type##Reg; \
10829 EMIT_MEM64_INSTR_HANDLER_WITH_PC(r2s_##name, r2s_##name##_Idx64, \
10830 is_memory64_, instr.pc); \
10831 EmitMemoryOffset(instr.optional.offset); \
10833 return RegMode::kNoReg; \
10835 EMIT_MEM64_INSTR_HANDLER_WITH_PC(s2r_##name, s2r_##name##_Idx64, \
10836 is_memory64_, instr.pc); \
10837 EmitMemoryOffset(instr.optional.offset); \
10839 return RegMode::k##type##Reg; \
10841 EMIT_MEM64_INSTR_HANDLER_WITH_PC(s2s_##name, s2s_##name##_Idx64, \
10842 is_memory64_, instr.pc); \
10843 EmitMemoryOffset(instr.optional.offset); \
10846 return RegMode::kNoReg; \
10866#define STORE_CASE(name, ctype, mtype, rep, type) \
10867 case kExpr##name: { \
10874 EMIT_MEM64_INSTR_HANDLER_WITH_PC(r2s_##name, r2s_##name##_Idx64, \
10875 is_memory64_, instr.pc); \
10876 EmitMemoryOffset(instr.optional.offset); \
10878 return RegMode::kNoReg; \
10880 EMIT_MEM64_INSTR_HANDLER_WITH_PC(s2s_##name, s2s_##name##_Idx64, \
10881 is_memory64_, instr.pc); \
10883 EmitMemoryOffset(instr.optional.offset); \
10885 return RegMode::kNoReg; \
10900 case kExprMemoryGrow: {
10906 case kExprMemorySize:
10911 case kExprI32Const: {
10923 case kExprI64Const: {
10935 case kExprF32Const: {
10947 case kExprF64Const: {
10960#define EXECUTE_BINOP(name, ctype, reg, op, type) \
10961 case kExpr##name: { \
10964 EMIT_INSTR_HANDLER(r2r_##name); \
10966 return RegMode::kI32Reg; \
10968 EMIT_INSTR_HANDLER(r2s_##name); \
10971 return RegMode::kNoReg; \
10973 EMIT_INSTR_HANDLER(s2r_##name); \
10976 return RegMode::kI32Reg; \
10978 EMIT_INSTR_HANDLER(s2s_##name); \
10982 return RegMode::kNoReg; \
10987#undef EXECUTE_BINOP
10989#define EXECUTE_BINOP(name, ctype, reg, op, type) \
10990 case kExpr##name: { \
10993 EMIT_INSTR_HANDLER(r2r_##name); \
10995 return RegMode::k##type##Reg; \
10997 EMIT_INSTR_HANDLER(r2s_##name); \
11000 return RegMode::kNoReg; \
11002 EMIT_INSTR_HANDLER(s2r_##name); \
11005 return RegMode::k##type##Reg; \
11007 EMIT_INSTR_HANDLER(s2s_##name); \
11011 return RegMode::kNoReg; \
11017#undef EXECUTE_BINOP
11019#define EXECUTE_BINOP(name, ctype, reg, op, type) \
11020 case kExpr##name: { \
11023 EMIT_INSTR_HANDLER_WITH_PC(r2r_##name, instr.pc); \
11025 return RegMode::k##type##Reg; \
11027 EMIT_INSTR_HANDLER_WITH_PC(r2s_##name, instr.pc); \
11030 return RegMode::kNoReg; \
11032 EMIT_INSTR_HANDLER_WITH_PC(s2r_##name, instr.pc); \
11035 return RegMode::k##type##Reg; \
11037 EMIT_INSTR_HANDLER_WITH_PC(s2s_##name, instr.pc); \
11041 return RegMode::kNoReg; \
11046#undef EXECUTE_BINOP
11048#define EXECUTE_UNOP(name, ctype, reg, op, type) \
11049 case kExpr##name: { \
11052 EMIT_INSTR_HANDLER(r2r_##name); \
11053 return RegMode::k##type##Reg; \
11055 EMIT_INSTR_HANDLER(r2s_##name); \
11057 return RegMode::kNoReg; \
11059 EMIT_INSTR_HANDLER(s2r_##name); \
11061 return RegMode::k##type##Reg; \
11063 EMIT_INSTR_HANDLER(s2s_##name); \
11066 return RegMode::kNoReg; \
11073#define EXECUTE_UNOP(name, from_ctype, from_type, from_reg, to_ctype, to_type, \
11075 case kExpr##name: { \
11078 EMIT_INSTR_HANDLER(r2r_##name); \
11079 return RegMode::k##to_type##Reg; \
11081 EMIT_INSTR_HANDLER(r2s_##name); \
11083 return RegMode::kNoReg; \
11085 EMIT_INSTR_HANDLER(s2r_##name); \
11086 from_type##Pop(); \
11087 return RegMode::k##to_type##Reg; \
11089 EMIT_INSTR_HANDLER(s2s_##name); \
11090 from_type##Pop(); \
11092 return RegMode::kNoReg; \
11102#define EXECUTE_UNOP(name, from_ctype, from_type, from_reg, to_ctype, to_type, \
11104 case kExpr##name: { \
11107 EMIT_INSTR_HANDLER_WITH_PC(r2r_##name, instr.pc); \
11108 return RegMode::k##to_type##Reg; \
11110 EMIT_INSTR_HANDLER_WITH_PC(r2s_##name, instr.pc); \
11112 return RegMode::kNoReg; \
11114 EMIT_INSTR_HANDLER_WITH_PC(s2r_##name, instr.pc); \
11115 from_type##Pop(); \
11116 return RegMode::k##to_type##Reg; \
11118 EMIT_INSTR_HANDLER_WITH_PC(s2s_##name, instr.pc); \
11119 from_type##Pop(); \
11121 return RegMode::kNoReg; \
11129#define EXECUTE_UNOP(name, from_ctype, from_type, to_ctype, to_type, op) \
11130 case kExpr##name: { \
11133 EMIT_INSTR_HANDLER(r2r_##name); \
11134 return RegMode::k##to_type##Reg; \
11136 EMIT_INSTR_HANDLER(r2s_##name); \
11138 return RegMode::kNoReg; \
11140 EMIT_INSTR_HANDLER(s2r_##name); \
11141 from_type##Pop(); \
11142 return RegMode::k##to_type##Reg; \
11144 EMIT_INSTR_HANDLER(s2s_##name); \
11145 from_type##Pop(); \
11147 return RegMode::kNoReg; \
11154#define EXECUTE_UNOP(name, from_ctype, from_type, to_ctype, to_type) \
11155 case kExpr##name: { \
11158 EMIT_INSTR_HANDLER(r2r_##name); \
11159 return RegMode::k##to_type##Reg; \
11161 EMIT_INSTR_HANDLER(r2s_##name); \
11163 return RegMode::kNoReg; \
11165 EMIT_INSTR_HANDLER(s2r_##name); \
11166 from_type##Pop(); \
11167 return RegMode::k##to_type##Reg; \
11169 EMIT_INSTR_HANDLER(s2s_##name); \
11170 from_type##Pop(); \
11172 return RegMode::kNoReg; \
11179 case kExprRefNull: {
11188 case kExprRefIsNull:
11194 case kExprRefFunc: {
11211 case kExprRefAsNonNull: {
11218 case kExprStructNew: {
11235 case kExprStructNewDefault: {
11243 case kExprStructGet:
11244 case kExprStructGetS:
11245 case kExprStructGetU: {
11249 uint32_t field_index =
instr.optional.gc_field_immediate.field_index;
11317 case kExprStructSet: {
11320 uint32_t field_index =
instr.optional.gc_field_immediate.field_index;
11372 case kExprArrayNew: {
11373 uint32_t array_index =
instr.optional.gc_array_new_fixed.array_index;
11439 case kExprArrayNewFixed: {
11441 uint32_t length =
instr.optional.gc_array_new_fixed.length;
11442 uint32_t array_index =
instr.optional.gc_array_new_fixed.array_index;
11450 for (uint32_t
i = 0;
i <
length;
i++) {
11483 case kExprArrayNewDefault: {
11494 case kExprArrayNewData: {
11496 uint32_t array_index =
11497 instr.optional.gc_array_new_or_init_data.array_index;
11499 uint32_t data_index =
instr.optional.gc_array_new_or_init_data.data_index;
11509 case kExprArrayNewElem: {
11511 uint32_t array_index =
11512 instr.optional.gc_array_new_or_init_data.array_index;
11514 uint32_t data_index =
instr.optional.gc_array_new_or_init_data.data_index;
11524 case kExprArrayInitData: {
11526 uint32_t array_index =
11527 instr.optional.gc_array_new_or_init_data.array_index;
11529 uint32_t data_index =
instr.optional.gc_array_new_or_init_data.data_index;
11538 case kExprArrayInitElem: {
11540 uint32_t array_index =
11541 instr.optional.gc_array_new_or_init_data.array_index;
11543 uint32_t data_index =
instr.optional.gc_array_new_or_init_data.data_index;
11552 case kExprArrayLen: {
11559 case kExprArrayCopy: {
11571 case kExprArrayGet:
11572 case kExprArrayGetS:
11573 case kExprArrayGetU: {
11643 case kExprArraySet: {
11703 case kExprArrayFill: {
11771 case kExprRefI31: {
11778 case kExprI31GetS: {
11785 case kExprI31GetU: {
11793 case kExprRefCastNull: {
11796 instr.optional.gc_heap_type_immediate.heap_type_bit_field);
11810 RefPush(resulting_value_type);
11822 RefPush(resulting_value_type);
11828 if (
instr.opcode == kExprRefCast) {
11836 RefPush(resulting_value_type);
11842 case kExprRefTestNull: {
11845 instr.optional.gc_heap_type_immediate.heap_type_bit_field);
11869 if (
instr.opcode == kExprRefTest) {
11882 case kExprAnyConvertExtern: {
11891 case kExprExternConvertAny: {
11900 case kExprMemoryInit:
11909 case kExprDataDrop:
11914 case kExprMemoryCopy:
11922 case kExprMemoryFill:
11930 case kExprTableInit: {
11931 bool is_table64 =
module_->tables[
instr.optional.index].is_table64();
11933 is_table64,
instr.pc);
11942 case kExprElemDrop:
11947 case kExprTableCopy: {
11948 bool is_src_table64 =
11949 module_->tables[
instr.optional.table_copy.src_table_index]
11951 bool is_dst_table64 =
11952 module_->tables[
instr.optional.table_copy.dst_table_index]
11955 if (is_src_table64) {
11956 if (is_dst_table64) {
11962 if (is_dst_table64) {
11970 is_dst_table64&& is_src_table64 ?
I64Pop() :
I32Pop();
11975 case kExprTableGrow: {
11976 bool is_table64 =
module_->tables[
instr.optional.index].is_table64();
11992 case kExprTableSize: {
11993 bool is_table64 =
module_->tables[
instr.optional.index].is_table64();
12005 case kExprTableFill: {
12006 bool is_table64 =
module_->tables[
instr.optional.index].is_table64();
12022 case kExprAtomicNotify:
12031 case kExprI32AtomicWait:
12041 case kExprI64AtomicWait:
12051 case kExprAtomicFence:
12055#define ATOMIC_BINOP(name, Type, ctype, type, op_ctype, op_type, operation) \
12056 case kExpr##name: { \
12057 EMIT_MEM64_INSTR_HANDLER_WITH_PC(s2s_##name, s2s_##name##_Idx64, \
12058 is_memory64_, instr.pc); \
12060 EmitMemoryOffset(instr.optional.offset); \
12063 return RegMode::kNoReg; \
12068#define ATOMIC_COMPARE_EXCHANGE_OP(name, Type, ctype, type, op_ctype, op_type) \
12069 case kExpr##name: { \
12070 EMIT_MEM64_INSTR_HANDLER_WITH_PC(s2s_##name, s2s_##name##_Idx64, \
12071 is_memory64_, instr.pc); \
12074 EmitMemoryOffset(instr.optional.offset); \
12077 return RegMode::kNoReg; \
12080#undef ATOMIC_COMPARE_EXCHANGE_OP
12082#define ATOMIC_LOAD_OP(name, Type, ctype, type, op_ctype, op_type) \
12083 case kExpr##name: { \
12084 EMIT_MEM64_INSTR_HANDLER_WITH_PC(s2s_##name, s2s_##name##_Idx64, \
12085 is_memory64_, instr.pc); \
12086 EmitMemoryOffset(instr.optional.offset); \
12089 return RegMode::kNoReg; \
12092#undef ATOMIC_LOAD_OP
12094#define ATOMIC_STORE_OP(name, Type, ctype, type, op_ctype, op_type) \
12095 case kExpr##name: { \
12096 EMIT_MEM64_INSTR_HANDLER_WITH_PC(s2s_##name, s2s_##name##_Idx64, \
12097 is_memory64_, instr.pc); \
12099 EmitMemoryOffset(instr.optional.offset); \
12101 return RegMode::kNoReg; \
12104#undef ATOMIC_STORE_OP
12106#define SPLAT_CASE(format, stype, valType, op_type, num) \
12107 case kExpr##format##Splat: { \
12108 EMIT_INSTR_HANDLER(s2s_Simd##format##Splat); \
12111 return RegMode::kNoReg; \
12113 SPLAT_CASE(F64x2, float64x2,
double, F64, 2)
12118 SPLAT_CASE(I8x16, int8x16, int32_t, I32, 16)
12121#define EXTRACT_LANE_CASE(format, stype, op_type, name) \
12122 case kExpr##format##ExtractLane: { \
12123 EMIT_INSTR_HANDLER(s2s_Simd##format##ExtractLane); \
12125 EmitI16Const(instr.optional.simd_lane); \
12128 return RegMode::kNoReg; \
12134#undef EXTRACT_LANE_CASE
12136#define EXTRACT_LANE_EXTEND_CASE(format, stype, name, sign, extended_type) \
12137 case kExpr##format##ExtractLane##sign: { \
12138 EMIT_INSTR_HANDLER(s2s_Simd##format##ExtractLane##sign); \
12140 EmitI16Const(instr.optional.simd_lane); \
12143 return RegMode::kNoReg; \
12149#undef EXTRACT_LANE_EXTEND_CASE
12151#define BINOP_CASE(op, name, stype, count, expr) \
12152 case kExpr##op: { \
12153 EMIT_INSTR_HANDLER(s2s_Simd##op); \
12157 return RegMode::kNoReg; \
12159 BINOP_CASE(F64x2Add, f64x2, float64x2, 2, a + b)
12160 BINOP_CASE(F64x2Sub, f64x2, float64x2, 2, a - b)
12161 BINOP_CASE(F64x2Mul, f64x2, float64x2, 2, a * b)
12165 BINOP_CASE(F64x2Pmin, f64x2, float64x2, 2, std::min(a, b))
12166 BINOP_CASE(F64x2Pmax, f64x2, float64x2, 2, std::max(a, b))
12167 BINOP_CASE(F32x4RelaxedMin, f32x4, float32x4, 4, std::min(a, b))
12168 BINOP_CASE(F32x4RelaxedMax, f32x4, float32x4, 4, std::max(a, b))
12169 BINOP_CASE(F64x2RelaxedMin, f64x2, float64x2, 2, std::min(a, b))
12170 BINOP_CASE(F64x2RelaxedMax, f64x2, float64x2, 2, std::max(a, b))
12171 BINOP_CASE(F32x4Add, f32x4, float32x4, 4, a + b)
12172 BINOP_CASE(F32x4Sub, f32x4, float32x4, 4, a - b)
12173 BINOP_CASE(F32x4Mul, f32x4, float32x4, 4, a * b)
12174 BINOP_CASE(F32x4Div, f32x4, float32x4, 4, a / b)
12177 BINOP_CASE(F32x4Pmin, f32x4, float32x4, 4, std::min(a, b))
12178 BINOP_CASE(F32x4Pmax, f32x4, float32x4, 4, std::max(a, b))
12179 BINOP_CASE(I64x2Add, i64x2, int64x2, 2, base::AddWithWraparound(a, b))
12180 BINOP_CASE(I64x2Sub, i64x2, int64x2, 2, base::SubWithWraparound(a, b))
12182 BINOP_CASE(I32x4Add, i32x4, int32x4, 4, base::AddWithWraparound(a, b))
12183 BINOP_CASE(I32x4Sub, i32x4, int32x4, 4, base::SubWithWraparound(a, b))
12185 BINOP_CASE(I32x4MinS, i32x4, int32x4, 4, a < b ? a : b)
12187 static_cast<uint32_t
>(a) <
static_cast<uint32_t
>(b) ? a : b)
12188 BINOP_CASE(I32x4MaxS, i32x4, int32x4, 4, a > b ? a : b)
12190 static_cast<uint32_t
>(a) >
static_cast<uint32_t
>(b) ? a : b)
12191 BINOP_CASE(S128And, i32x4, int32x4, 4, a & b)
12192 BINOP_CASE(S128Or, i32x4, int32x4, 4, a | b)
12193 BINOP_CASE(S128Xor, i32x4, int32x4, 4, a ^ b)
12194 BINOP_CASE(S128AndNot, i32x4, int32x4, 4, a & ~b)
12195 BINOP_CASE(I16x8Add, i16x8, int16x8, 8, base::AddWithWraparound(a, b))
12196 BINOP_CASE(I16x8Sub, i16x8, int16x8, 8, base::SubWithWraparound(a, b))
12198 BINOP_CASE(I16x8MinS, i16x8, int16x8, 8, a < b ? a : b)
12200 static_cast<uint16_t
>(a) <
static_cast<uint16_t
>(b) ? a : b)
12201 BINOP_CASE(I16x8MaxS, i16x8, int16x8, 8, a > b ? a : b)
12203 static_cast<uint16_t
>(a) >
static_cast<uint16_t
>(b) ? a : b)
12208 BINOP_CASE(I16x8RoundingAverageU, i16x8, int16x8, 8,
12210 BINOP_CASE(I16x8Q15MulRSatS, i16x8, int16x8, 8,
12212 BINOP_CASE(I16x8RelaxedQ15MulRS, i16x8, int16x8, 8,
12214 BINOP_CASE(I8x16Add, i8x16, int8x16, 16, base::AddWithWraparound(a, b))
12215 BINOP_CASE(I8x16Sub, i8x16, int8x16, 16, base::SubWithWraparound(a, b))
12216 BINOP_CASE(I8x16MinS, i8x16, int8x16, 16, a < b ? a : b)
12218 static_cast<uint8_t
>(a) <
static_cast<uint8_t
>(b) ? a : b)
12219 BINOP_CASE(I8x16MaxS, i8x16, int8x16, 16, a > b ? a : b)
12221 static_cast<uint8_t
>(a) >
static_cast<uint8_t
>(b) ? a : b)
12226 BINOP_CASE(I8x16RoundingAverageU, i8x16, int8x16, 16,
12230#define UNOP_CASE(op, name, stype, count, expr) \
12231 case kExpr##op: { \
12232 EMIT_INSTR_HANDLER(s2s_Simd##op); \
12235 return RegMode::kNoReg; \
12237 UNOP_CASE(F64x2Abs, f64x2, float64x2, 2, std::abs(a))
12238 UNOP_CASE(F64x2Neg, f64x2, float64x2, 2, -a)
12239 UNOP_CASE(F64x2Sqrt, f64x2, float64x2, 2, std::sqrt(a))
12240 UNOP_CASE(F64x2Ceil, f64x2, float64x2, 2,
12241 (AixFpOpWorkaround<double, &ceil>(a)))
12242 UNOP_CASE(F64x2Floor, f64x2, float64x2, 2,
12243 (AixFpOpWorkaround<double, &floor>(a)))
12244 UNOP_CASE(F64x2Trunc, f64x2, float64x2, 2,
12245 (AixFpOpWorkaround<double, &trunc>(a)))
12246 UNOP_CASE(F64x2NearestInt, f64x2, float64x2, 2,
12247 (AixFpOpWorkaround<double, &nearbyint>(a)))
12248 UNOP_CASE(F32x4Abs, f32x4, float32x4, 4, std::abs(a))
12249 UNOP_CASE(F32x4Neg, f32x4, float32x4, 4, -a)
12250 UNOP_CASE(F32x4Sqrt, f32x4, float32x4, 4, std::sqrt(a))
12251 UNOP_CASE(F32x4Ceil, f32x4, float32x4, 4,
12252 (AixFpOpWorkaround<float, &ceilf>(a)))
12253 UNOP_CASE(F32x4Floor, f32x4, float32x4, 4,
12254 (AixFpOpWorkaround<float, &floorf>(a)))
12255 UNOP_CASE(F32x4Trunc, f32x4, float32x4, 4,
12256 (AixFpOpWorkaround<float, &truncf>(a)))
12257 UNOP_CASE(F32x4NearestInt, f32x4, float32x4, 4,
12258 (AixFpOpWorkaround<float, &nearbyintf>(a)))
12262 UNOP_CASE(I64x2Abs, i64x2, int64x2, 2, std::llabs(a))
12263 UNOP_CASE(I32x4Abs, i32x4, int32x4, 4, std::abs(a))
12264 UNOP_CASE(S128Not, i32x4, int32x4, 4, ~a)
12266 UNOP_CASE(I16x8Abs, i16x8, int16x8, 8, std::abs(a))
12268 UNOP_CASE(I8x16Abs, i8x16, int8x16, 16, std::abs(a))
12269 UNOP_CASE(I8x16Popcnt, i8x16, int8x16, 16,
12273#define BITMASK_CASE(op, name, stype, count) \
12274 case kExpr##op: { \
12275 EMIT_INSTR_HANDLER(s2s_Simd##op); \
12278 return RegMode::kNoReg; \
12286#define CMPOP_CASE(op, name, stype, out_stype, count, expr) \
12287 case kExpr##op: { \
12288 EMIT_INSTR_HANDLER(s2s_Simd##op); \
12292 return RegMode::kNoReg; \
12294 CMPOP_CASE(F64x2Eq, f64x2, float64x2, int64x2, 2, a == b)
12295 CMPOP_CASE(F64x2Ne, f64x2, float64x2, int64x2, 2, a != b)
12296 CMPOP_CASE(F64x2Gt, f64x2, float64x2, int64x2, 2, a > b)
12297 CMPOP_CASE(F64x2Ge, f64x2, float64x2, int64x2, 2, a >= b)
12298 CMPOP_CASE(F64x2Lt, f64x2, float64x2, int64x2, 2, a < b)
12299 CMPOP_CASE(F64x2Le, f64x2, float64x2, int64x2, 2, a <= b)
12300 CMPOP_CASE(F32x4Eq, f32x4, float32x4, int32x4, 4, a == b)
12301 CMPOP_CASE(F32x4Ne, f32x4, float32x4, int32x4, 4, a != b)
12302 CMPOP_CASE(F32x4Gt, f32x4, float32x4, int32x4, 4, a > b)
12303 CMPOP_CASE(F32x4Ge, f32x4, float32x4, int32x4, 4, a >= b)
12304 CMPOP_CASE(F32x4Lt, f32x4, float32x4, int32x4, 4, a < b)
12305 CMPOP_CASE(F32x4Le, f32x4, float32x4, int32x4, 4, a <= b)
12306 CMPOP_CASE(I64x2Eq, i64x2, int64x2, int64x2, 2, a == b)
12307 CMPOP_CASE(I64x2Ne, i64x2, int64x2, int64x2, 2, a != b)
12308 CMPOP_CASE(I64x2LtS, i64x2, int64x2, int64x2, 2, a < b)
12309 CMPOP_CASE(I64x2GtS, i64x2, int64x2, int64x2, 2, a > b)
12310 CMPOP_CASE(I64x2LeS, i64x2, int64x2, int64x2, 2, a <= b)
12311 CMPOP_CASE(I64x2GeS, i64x2, int64x2, int64x2, 2, a >= b)
12312 CMPOP_CASE(I32x4Eq, i32x4, int32x4, int32x4, 4, a == b)
12313 CMPOP_CASE(I32x4Ne, i32x4, int32x4, int32x4, 4, a != b)
12314 CMPOP_CASE(I32x4GtS, i32x4, int32x4, int32x4, 4, a > b)
12315 CMPOP_CASE(I32x4GeS, i32x4, int32x4, int32x4, 4, a >= b)
12316 CMPOP_CASE(I32x4LtS, i32x4, int32x4, int32x4, 4, a < b)
12317 CMPOP_CASE(I32x4LeS, i32x4, int32x4, int32x4, 4, a <= b)
12318 CMPOP_CASE(I32x4GtU, i32x4, int32x4, int32x4, 4,
12319 static_cast<uint32_t
>(a) >
static_cast<uint32_t
>(b))
12320 CMPOP_CASE(I32x4GeU, i32x4, int32x4, int32x4, 4,
12321 static_cast<uint32_t
>(a) >=
static_cast<uint32_t
>(b))
12322 CMPOP_CASE(I32x4LtU, i32x4, int32x4, int32x4, 4,
12323 static_cast<uint32_t
>(a) <
static_cast<uint32_t
>(b))
12324 CMPOP_CASE(I32x4LeU, i32x4, int32x4, int32x4, 4,
12325 static_cast<uint32_t
>(a) <=
static_cast<uint32_t
>(b))
12326 CMPOP_CASE(I16x8Eq, i16x8, int16x8, int16x8, 8, a == b)
12327 CMPOP_CASE(I16x8Ne, i16x8, int16x8, int16x8, 8, a != b)
12328 CMPOP_CASE(I16x8GtS, i16x8, int16x8, int16x8, 8, a > b)
12329 CMPOP_CASE(I16x8GeS, i16x8, int16x8, int16x8, 8, a >= b)
12330 CMPOP_CASE(I16x8LtS, i16x8, int16x8, int16x8, 8, a < b)
12331 CMPOP_CASE(I16x8LeS, i16x8, int16x8, int16x8, 8, a <= b)
12332 CMPOP_CASE(I16x8GtU, i16x8, int16x8, int16x8, 8,
12333 static_cast<uint16_t
>(a) >
static_cast<uint16_t
>(b))
12334 CMPOP_CASE(I16x8GeU, i16x8, int16x8, int16x8, 8,
12335 static_cast<uint16_t
>(a) >=
static_cast<uint16_t
>(b))
12336 CMPOP_CASE(I16x8LtU, i16x8, int16x8, int16x8, 8,
12337 static_cast<uint16_t
>(a) <
static_cast<uint16_t
>(b))
12338 CMPOP_CASE(I16x8LeU, i16x8, int16x8, int16x8, 8,
12339 static_cast<uint16_t
>(a) <=
static_cast<uint16_t
>(b))
12340 CMPOP_CASE(I8x16Eq, i8x16, int8x16, int8x16, 16, a == b)
12341 CMPOP_CASE(I8x16Ne, i8x16, int8x16, int8x16, 16, a != b)
12342 CMPOP_CASE(I8x16GtS, i8x16, int8x16, int8x16, 16, a > b)
12343 CMPOP_CASE(I8x16GeS, i8x16, int8x16, int8x16, 16, a >= b)
12344 CMPOP_CASE(I8x16LtS, i8x16, int8x16, int8x16, 16, a < b)
12345 CMPOP_CASE(I8x16LeS, i8x16, int8x16, int8x16, 16, a <= b)
12346 CMPOP_CASE(I8x16GtU, i8x16, int8x16, int8x16, 16,
12347 static_cast<uint8_t
>(a) >
static_cast<uint8_t
>(b))
12348 CMPOP_CASE(I8x16GeU, i8x16, int8x16, int8x16, 16,
12349 static_cast<uint8_t
>(a) >=
static_cast<uint8_t
>(b))
12350 CMPOP_CASE(I8x16LtU, i8x16, int8x16, int8x16, 16,
12351 static_cast<uint8_t
>(a) <
static_cast<uint8_t
>(b))
12352 CMPOP_CASE(I8x16LeU, i8x16, int8x16, int8x16, 16,
12353 static_cast<uint8_t
>(a) <=
static_cast<uint8_t
>(b))
12356#define REPLACE_LANE_CASE(format, name, stype, ctype, op_type) \
12357 case kExpr##format##ReplaceLane: { \
12358 EMIT_INSTR_HANDLER(s2s_Simd##format##ReplaceLane); \
12360 EmitI16Const(instr.optional.simd_lane); \
12364 return RegMode::kNoReg; \
12372#undef REPLACE_LANE_CASE
12374 case kExprS128LoadMem: {
12384 case kExprS128StoreMem: {
12394#define SHIFT_CASE(op, name, stype, count, expr) \
12395 case kExpr##op: { \
12396 EMIT_INSTR_HANDLER(s2s_Simd##op); \
12400 return RegMode::kNoReg; \
12403 static_cast<uint64_t
>(a) << (shift % 64))
12404 SHIFT_CASE(I64x2ShrS, i64x2, int64x2, 2, a >> (shift % 64))
12406 static_cast<uint64_t
>(a) >> (shift % 64))
12408 static_cast<uint32_t
>(a) << (shift % 32))
12409 SHIFT_CASE(I32x4ShrS, i32x4, int32x4, 4, a >> (shift % 32))
12411 static_cast<uint32_t
>(a) >> (shift % 32))
12413 static_cast<uint16_t
>(a) << (shift % 16))
12414 SHIFT_CASE(I16x8ShrS, i16x8, int16x8, 8, a >> (shift % 16))
12416 static_cast<uint16_t
>(a) >> (shift % 16))
12418 static_cast<uint8_t
>(a) << (shift % 8))
12419 SHIFT_CASE(I8x16ShrS, i8x16, int8x16, 16, a >> (shift % 8))
12421 static_cast<uint8_t
>(a) >> (shift % 8))
12424#define EXT_MUL_CASE(op) \
12425 case kExpr##op: { \
12426 EMIT_INSTR_HANDLER(s2s_Simd##op); \
12430 return RegMode::kNoReg; \
12446#define CONVERT_CASE(op, src_type, name, dst_type, count, start_index, ctype, \
12448 case kExpr##op: { \
12449 EMIT_INSTR_HANDLER(s2s_Simd##op); \
12452 return RegMode::kNoReg; \
12454 CONVERT_CASE(F32x4SConvertI32x4, int32x4, i32x4, float32x4, 4, 0, int32_t,
12455 static_cast<float>(a))
12456 CONVERT_CASE(F32x4UConvertI32x4, int32x4, i32x4, float32x4, 4, 0,
12457 uint32_t,
static_cast<float>(a))
12458 CONVERT_CASE(I32x4SConvertF32x4, float32x4, f32x4, int32x4, 4, 0,
float,
12459 base::saturated_cast<int32_t>(a))
12460 CONVERT_CASE(I32x4UConvertF32x4, float32x4, f32x4, int32x4, 4, 0,
float,
12461 base::saturated_cast<uint32_t>(a))
12462 CONVERT_CASE(I32x4RelaxedTruncF32x4S, float32x4, f32x4, int32x4, 4, 0,
12463 float, base::saturated_cast<int32_t>(a))
12464 CONVERT_CASE(I32x4RelaxedTruncF32x4U, float32x4, f32x4, int32x4, 4, 0,
12465 float, base::saturated_cast<uint32_t>(a))
12466 CONVERT_CASE(I64x2SConvertI32x4Low, int32x4, i32x4, int64x2, 2, 0,
12468 CONVERT_CASE(I64x2SConvertI32x4High, int32x4, i32x4, int64x2, 2, 2,
12470 CONVERT_CASE(I64x2UConvertI32x4Low, int32x4, i32x4, int64x2, 2, 0,
12472 CONVERT_CASE(I64x2UConvertI32x4High, int32x4, i32x4, int64x2, 2, 2,
12474 CONVERT_CASE(I32x4SConvertI16x8High, int16x8, i16x8, int32x4, 4, 4,
12476 CONVERT_CASE(I32x4UConvertI16x8High, int16x8, i16x8, int32x4, 4, 4,
12478 CONVERT_CASE(I32x4SConvertI16x8Low, int16x8, i16x8, int32x4, 4, 0,
12480 CONVERT_CASE(I32x4UConvertI16x8Low, int16x8, i16x8, int32x4, 4, 0,
12482 CONVERT_CASE(I16x8SConvertI8x16High, int8x16, i8x16, int16x8, 8, 8,
12484 CONVERT_CASE(I16x8UConvertI8x16High, int8x16, i8x16, int16x8, 8, 8,
12486 CONVERT_CASE(I16x8SConvertI8x16Low, int8x16, i8x16, int16x8, 8, 0, int8_t,
12488 CONVERT_CASE(I16x8UConvertI8x16Low, int8x16, i8x16, int16x8, 8, 0,
12490 CONVERT_CASE(F64x2ConvertLowI32x4S, int32x4, i32x4, float64x2, 2, 0,
12491 int32_t,
static_cast<double>(a))
12492 CONVERT_CASE(F64x2ConvertLowI32x4U, int32x4, i32x4, float64x2, 2, 0,
12493 uint32_t,
static_cast<double>(a))
12494 CONVERT_CASE(I32x4TruncSatF64x2SZero, float64x2, f64x2, int32x4, 2, 0,
12495 double, base::saturated_cast<int32_t>(a))
12496 CONVERT_CASE(I32x4TruncSatF64x2UZero, float64x2, f64x2, int32x4, 2, 0,
12497 double, base::saturated_cast<uint32_t>(a))
12498 CONVERT_CASE(I32x4RelaxedTruncF64x2SZero, float64x2, f64x2, int32x4, 2, 0,
12499 double, base::saturated_cast<int32_t>(a))
12500 CONVERT_CASE(I32x4RelaxedTruncF64x2UZero, float64x2, f64x2, int32x4, 2, 0,
12501 double, base::saturated_cast<uint32_t>(a))
12502 CONVERT_CASE(F32x4DemoteF64x2Zero, float64x2, f64x2, float32x4, 2, 0,
12504 CONVERT_CASE(F64x2PromoteLowF32x4, float32x4, f32x4, float64x2, 2, 0,
12505 float,
static_cast<double>(a))
12508#define PACK_CASE(op, src_type, name, dst_type, count, dst_ctype) \
12509 case kExpr##op: { \
12510 EMIT_INSTR_HANDLER(s2s_Simd##op); \
12514 return RegMode::kNoReg; \
12516 PACK_CASE(I16x8SConvertI32x4, int32x4, i32x4, int16x8, 8, int16_t)
12517 PACK_CASE(I16x8UConvertI32x4, int32x4, i32x4, int16x8, 8, uint16_t)
12518 PACK_CASE(I8x16SConvertI16x8, int16x8, i16x8, int8x16, 16, int8_t)
12519 PACK_CASE(I8x16UConvertI16x8, int16x8, i16x8, int8x16, 16, uint8_t)
12522#define SELECT_CASE(op) \
12523 case kExpr##op: { \
12524 EMIT_INSTR_HANDLER(s2s_Simd##op); \
12529 return RegMode::kNoReg; \
12538 case kExprI32x4DotI16x8S: {
12546 case kExprS128Const: {
12552 case kExprI16x8DotI8x16I7x16S: {
12560 case kExprI32x4DotI8x16I7x16AddS: {
12569 case kExprI8x16RelaxedSwizzle: {
12577 case kExprI8x16Swizzle: {
12585 case kExprI8x16Shuffle: {
12588#ifdef V8_ENABLE_DRUMBRAKE_TRACING
12589 TracePushConstSlot(slot_index);
12600 case kExprV128AnyTrue: {
12607#define REDUCTION_CASE(op, name, stype, count, operation) \
12608 case kExpr##op: { \
12609 EMIT_INSTR_HANDLER(s2s_Simd##op); \
12612 return RegMode::kNoReg; \
12618#undef REDUCTION_CASE
12620#define QFM_CASE(op, name, stype, count, operation) \
12621 case kExpr##op: { \
12622 EMIT_INSTR_HANDLER(s2s_Simd##op); \
12627 return RegMode::kNoReg; \
12629 QFM_CASE(F32x4Qfma, f32x4, float32x4, 4, +)
12630 QFM_CASE(F32x4Qfms, f32x4, float32x4, 4, -)
12631 QFM_CASE(F64x2Qfma, f64x2, float64x2, 2, +)
12632 QFM_CASE(F64x2Qfms, f64x2, float64x2, 2, -)
12635#define LOAD_SPLAT_CASE(op) \
12636 case kExprS128##op: { \
12637 EMIT_MEM64_INSTR_HANDLER_WITH_PC( \
12638 s2s_SimdS128##op, s2s_SimdS128##op##_Idx64, is_memory64_, instr.pc); \
12639 EmitMemoryOffset(instr.optional.offset); \
12642 return RegMode::kNoReg; \
12648#undef LOAD_SPLAT_CASE
12650#define LOAD_EXTEND_CASE(op) \
12651 case kExprS128##op: { \
12652 EMIT_MEM64_INSTR_HANDLER_WITH_PC( \
12653 s2s_SimdS128##op, s2s_SimdS128##op##_Idx64, is_memory64_, instr.pc); \
12654 EmitMemoryOffset(instr.optional.offset); \
12657 return RegMode::kNoReg; \
12665#undef LOAD_EXTEND_CASE
12667#define LOAD_ZERO_EXTEND_CASE(op, load_type) \
12668 case kExprS128##op: { \
12669 EMIT_MEM64_INSTR_HANDLER_WITH_PC( \
12670 s2s_SimdS128##op, s2s_SimdS128##op##_Idx64, is_memory64_, instr.pc); \
12671 EmitMemoryOffset(instr.optional.offset); \
12674 return RegMode::kNoReg; \
12678#undef LOAD_ZERO_EXTEND_CASE
12680#define LOAD_LANE_CASE(op) \
12681 case kExprS128##op: { \
12682 EMIT_MEM64_INSTR_HANDLER_WITH_PC( \
12683 s2s_SimdS128##op, s2s_SimdS128##op##_Idx64, is_memory64_, instr.pc); \
12685 EmitMemoryOffset(instr.optional.simd_loadstore_lane.offset); \
12688 EmitI16Const(instr.optional.simd_loadstore_lane.lane); \
12690 return RegMode::kNoReg; \
12696#undef LOAD_LANE_CASE
12698#define STORE_LANE_CASE(op) \
12699 case kExprS128##op: { \
12700 EMIT_MEM64_INSTR_HANDLER_WITH_PC( \
12701 s2s_SimdS128##op, s2s_SimdS128##op##_Idx64, is_memory64_, instr.pc); \
12703 EmitMemoryOffset(instr.optional.simd_loadstore_lane.offset); \
12706 EmitI16Const(instr.optional.simd_loadstore_lane.lane); \
12707 return RegMode::kNoReg; \
12713#undef STORE_LANE_CASE
12715#define EXT_ADD_PAIRWISE_CASE(op) \
12716 case kExpr##op: { \
12717 EMIT_INSTR_HANDLER(s2s_Simd##op); \
12720 return RegMode::kNoReg; \
12726#undef EXT_ADD_PAIRWISE_CASE
12729 FATAL(
"Unknown or unimplemented opcode #%d:%s",
12742 if (!
v8_flags.drumbrake_compact_bytecode) {
12747 size_t current_instr_code_offset =
code_.size();
12748 size_t current_slots_size =
slots_.size();
12755 code_.resize(current_instr_code_offset);
12756 slots_.resize(current_slots_size);
12770 if (curr_instr.
orig >= kExprI32LoadMem &&
12771 curr_instr.
orig <= kExprI64LoadMem32U &&
12772 next_instr.
orig == kExprLocalSet) {
12777 switch (curr_instr.
orig) {
12780#define LOAD_CASE(name, ctype, mtype, rep, type) \
12781 case kExpr##name: { \
12782 if (reg_mode == RegMode::kNoReg) { \
12783 EMIT_MEM64_INSTR_HANDLER_WITH_PC(s2s_##name##_LocalSet, \
12784 s2s_##name##_LocalSet_Idx64, \
12785 is_memory64_, curr_instr.pc); \
12786 EmitMemoryOffset(curr_instr.optional.offset); \
12788 EmitSlotOffset(slots_[stack_[to_stack_index]].slot_offset); \
12789 reg_mode = RegMode::kNoReg; \
12791 EMIT_MEM64_INSTR_HANDLER_WITH_PC(r2s_##name, r2s_##name##_Idx64, \
12792 is_memory64_, curr_instr.pc); \
12793 EmitMemoryOffset(static_cast<uint64_t>(curr_instr.optional.offset)); \
12794 EmitSlotOffset(slots_[stack_[to_stack_index]].slot_offset); \
12795 reg_mode = RegMode::kNoReg; \
12818 }
else if (curr_instr.
orig == kExprI32LoadMem &&
12819 next_instr.
orig == kExprI32StoreMem) {
12836 }
else if (curr_instr.
orig == kExprI64LoadMem &&
12837 next_instr.
orig == kExprI64StoreMem) {
12854 }
else if (curr_instr.
orig == kExprF32LoadMem &&
12855 next_instr.
orig == kExprF32StoreMem) {
12872 }
else if (curr_instr.
orig == kExprF64LoadMem &&
12873 next_instr.
orig == kExprF64StoreMem) {
12890 }
else if (curr_instr.
orig >= kExprI32Const &&
12891 curr_instr.
orig <= kExprF32Const &&
12892 next_instr.
orig == kExprLocalSet) {
12894 switch (curr_instr.
orig) {
12895 case kExprI32Const: {
12896 uint32_t from_slot_index =
12902 case kExprI64Const: {
12903 uint32_t from_slot_index =
12909 case kExprF32Const: {
12910 uint32_t from_slot_index =
12916 case kExprF64Const: {
12917 uint32_t from_slot_index =
12926 }
else if (curr_instr.
orig == kExprLocalGet &&
12927 next_instr.
orig >= kExprI32StoreMem &&
12928 next_instr.
orig <= kExprI64StoreMem32) {
12929 switch (next_instr.
orig) {
12932#define STORE_CASE(name, ctype, mtype, rep, type) \
12933 case kExpr##name: { \
12934 EMIT_MEM64_INSTR_HANDLER_WITH_PC(s2s_##name, s2s_##name##_Idx64, \
12935 is_memory64_, curr_instr.pc); \
12936 EmitSlotOffset(slots_[stack_[curr_instr.optional.index]].slot_offset); \
12937 EmitMemoryOffset(next_instr.optional.offset); \
12939 reg_mode = RegMode::kNoReg; \
12962#ifdef V8_ENABLE_DRUMBRAKE_TRACING
12963 if (
v8_flags.trace_drumbrake_bytecode_generator) {
12984 for (uint32_t index = 0; index <
args_count_; index++) {
13003 while (
pc < limit) {
13008 if (curr_instr)
pc += curr_instr.length;
13010 if (!curr_instr)
break;
13013 if (next_instr)
pc += next_instr.
length;
13023 curr_instr = next_instr;
13031 if (
pc == limit && curr_instr) {
13045 return std::make_unique<WasmBytecode>(
13054 int32_t block_index =
static_cast<int32_t
>(
blocks_.size());
13057 uint32_t first_block_index = 0;
13058 size_t rets_slots_count = 0;
13059 size_t params_slots_count = 0;
13060 if (block_index > 0 && (opcode != kExprElse && opcode != kExprCatch &&
13061 opcode != kExprCatchAll)) {
13063 ¶ms_slots_count);
13067 if (opcode == kExprCatch || opcode == kExprCatchAll) {
13068 parent_block_index =
13073 signature, first_block_index, rets_slots_count,
13077 if (opcode == kExprIf && params_slots_count > 0) {
13078 DCHECK_GE(stack_size, params_slots_count);
13079 blocks_.back().SaveParams(&
stack_[stack_size - params_slots_count],
13080 params_slots_count);
13083 if (opcode == kExprLoop) {
13096 bool return_matching_try_for_catch_blocks)
const {
13099 while (index >= 0) {
13101 if (block.IsTry())
return index;
13102 if (return_matching_try_for_catch_blocks &&
13103 (block.IsCatch() || block.IsCatchAll())) {
13104 return block.parent_try_block_index_;
13116 k_s2s_OnLoopBeginNoRefSlots);
13122 static const uint32_t kElseBlockStartOffset =
13125 for (
int block_index = 0; block_index < static_cast<int>(
blocks_.size());
13131 if (block_data.
IsLoop()) {
13138 kElseBlockStartOffset;
13143 target_offset =
static_cast<uint32_t
>(
13147 int32_t delta = target_offset - current_code_offset;
13149 reinterpret_cast<Address>(
code_.data() + current_code_offset), delta);
13160 if (func_id == k_s2s_CopySlot32 && prev_instr_handler == k_s2s_CopySlot32) {
13167 v8_flags.drumbrake_compact_bytecode);
13169 reinterpret_cast<Address>(prev_instr_addr), k_s2s_CopySlot32x2);
13171 }
else if (func_id == k_s2s_CopySlot64 &&
13172 prev_instr_handler == k_s2s_CopySlot64) {
13174 v8_flags.drumbrake_compact_bytecode);
13176 reinterpret_cast<Address>(prev_instr_addr), k_s2s_CopySlot64x2);
13189ClearThreadInWasmScope ::~ClearThreadInWasmScope() {
virtual size_t AllocatePageSize()=0
TimeDelta Elapsed() const
void emplace_back(Args &&... args)
double TimesOf(const TimeDelta &other) const
int64_t InMicroseconds() const
static constexpr TimeDelta FromMilliseconds(int64_t milliseconds)
static bool IsHighResolution()
V8_INLINE bool is_identical_to(Handle< S > other) const
static EmbeddedData FromBlob()
Handle< FixedArray > CopyFixedArrayAndGrow(DirectHandle< FixedArray > array, int grow_by, AllocationType allocation=AllocationType::kYoung)
static void Destroy(Address *location)
static void MakeWeak(Address *location, void *parameter, WeakCallbackInfo< void >::Callback weak_callback, v8::WeakCallbackType type)
IndirectHandle< Object > Create(Tagged< Object > value)
V8_INLINE Address * location() const
V8_EXPORT_PRIVATE void AddSample(int sample)
static V8_INLINE constexpr Address IntToSmi(int value)
GlobalHandles * global_handles() const
Tagged< Context > context() const
v8::internal::Factory * factory()
static V8_EXPORT_PRIVATE void AddProperty(Isolate *isolate, DirectHandle< JSObject > object, DirectHandle< Name > name, DirectHandle< Object > value, PropertyAttributes attributes)
constexpr MachineRepresentation representation() const
static constexpr MachineType Uint64()
static constexpr MachineType Uint32()
size_t return_count() const
size_t parameter_count() const
static bool constexpr IsValid(T value)
V8_INLINE constexpr StorageType ptr() const
constexpr int ToInteger() const
static ThreadId Current()
static V8_INLINE Tagged_t CompressObject(Address tagged)
static V8_INLINE Address DecompressTagged(TOnHeapAddress on_heap_addr, Tagged_t raw_value)
static V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(Isolate *isolate, const char *location, const OOMDetails &details=kNoOOMDetails)
static constexpr int MaxLength(uint32_t element_size_bytes)
static DirectHandle< Object > GetExceptionValues(Isolate *isolate, DirectHandle< WasmExceptionPackage > exception_package)
ValueType element_type() const
const uint8_t * pc() const
ClearThreadInWasmScope(Isolate *isolate)
std::pair< uint32_t, uint32_t > read_u32v(const uint8_t *pc, Name< ValidationTag > name="LEB32")
std::pair< WasmOpcode, uint32_t > read_prefixed_opcode(const uint8_t *pc, Name< ValidationTag > name="prefixed opcode")
static constexpr struct v8::internal::wasm::Decoder::NoValidationTag kNoValidation
INSTRUCTION_HANDLER_FUNC Trap(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static constexpr PWasmOp * s_unwind_func_addr
INSTRUCTION_HANDLER_FUNC s2s_Unwind(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_Unreachable(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_TableInit(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_LoadMem_LocalSet(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_ReturnCall(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
CONVERT_CASE(F32x4SConvertI32x4, int32x4, i32x4, float32x4, 4, 0, int32_t, static_cast< float >(a)) CONVERT_CASE(F32x4UConvertI32x4
traits::memory_offset32_t memory_offset32_t
static auto constexpr s2s_I64StructGet
static auto constexpr s2s_SimdS128Load32x2U_Idx64
INSTRUCTION_HANDLER_FUNC s2s_GlobalSet(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_SimdI8x16Swizzle(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_Table64Init(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_I31GetS(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdI16x8ExtMulLowI8x16U
static auto constexpr s2s_SimdS128Load8Splat_Idx64
INSTRUCTION_HANDLER_FUNC s2s_Memory64Grow(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_F64Select
static auto constexpr r2s_F64GlobalSet
INSTRUCTION_HANDLER_FUNC s2s_StructNewDefault(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2s_I32StoreMem_Idx64
static auto constexpr s2s_F32ArrayFill
a a a a DoubleToFloat32(a)) CONVERT_CASE(F64x2PromoteLowF32x4
INSTRUCTION_HANDLER_FUNC s2s_Memory64Copy(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2s_F32GlobalSet
INSTRUCTION_HANDLER_FUNC s2s_RefEq(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64LoadMem8U_LocalSet_Idx64
INSTRUCTION_HANDLER_FUNC r2s_CopyFp0ToSlot32(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2s_I32LoadMem8S_Idx64
a a a a uint32_t WasmInterpreterRuntime int64_t r0
static auto constexpr s2s_SimdS128Load64Zero
static auto constexpr s2s_I32LoadMem16U_Idx64
INSTRUCTION_HANDLER_FUNC s2s_PreserveCopySlot32(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_DoSimdExtAddPairwise(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_F64LoadStoreMem_Idx64
INSTRUCTION_HANDLER_FUNC s2s_CopySlotMulti(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2r_I64Select
static auto constexpr s2s_SimdI32x4ExtMulHighI16x8U
static auto constexpr r2r_F64Select
INSTRUCTION_HANDLER_FUNC s2r_GlobalGetF(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_CallIndirect64(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2s_I64LoadMem32U_Idx64
INSTRUCTION_HANDLER_FUNC s2s_ArrayInitSegment(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdS128Load64Lane_Idx64
static void StoreRefIntoMemory(Tagged< HeapObject > host, Address dst_addr, uint32_t offset, Tagged< Object > value, WriteBarrierMode mode)
static auto constexpr s2s_SimdS128Load64Splat_Idx64
INSTRUCTION_HANDLER_FUNC s2s_MemoryGrow(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_RefTest
static auto constexpr s2s_SimdI16x8ExtAddPairwiseI8x16S
INSTRUCTION_HANDLER_FUNC r2s_Drop(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
SHIFT_CASE(I64x2Shl, i64x2, int64x2, 2, static_cast< uint64_t >(a)<<(shift % 64)) SHIFT_CASE(I64x2ShrU
static auto constexpr s2s_I64StoreMem16_Idx64
static auto constexpr s2s_S128Drop
INSTRUCTION_HANDLER_FUNC r2s_StoreMemI(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_LoadStoreMem(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64LoadMem32S_Idx64
INSTRUCTION_HANDLER_FUNC s2s_RefIsNonNull(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I16ArrayFill
static constexpr auto s2s_SimdS128Select
INSTRUCTION_HANDLER_FUNC s2s_ElemDrop(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdS128Load16Lane
INSTRUCTION_HANDLER_FUNC s2s_MemoryFill(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_Select(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_ReturnCallIndirect64(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_TableGrow(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2s_F32LoadMem_Idx64
static auto constexpr s2s_RefTestNull
INSTRUCTION_HANDLER_FUNC s2s_TableSet(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_Table64Fill(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_RefIsNull(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2s_F32Drop
static auto constexpr r2r_I64LoadMem16U_Idx64
INSTRUCTION_HANDLER_FUNC s2s_BranchOnCast(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_BranchIf(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I32LoadStoreMem_Idx64
static auto constexpr s2s_I64AtomicWait_Idx64
static auto constexpr s2s_SimdS128Store16Lane
INSTRUCTION_HANDLER_FUNC s2s_OnLoopBegin(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_RefSelect(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_Rethrow(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_SimdV128AnyTrue(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdS128Load16x4U_Idx64
INSTRUCTION_HANDLER_FUNC s2s_SimdI32x4DotI8x16I7x16AddS(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I32GlobalGet
static auto constexpr s2s_Table64Copy_32_64_32
static auto constexpr r2r_I32LoadMem16S_Idx64
static auto constexpr s2s_I8ArrayFill
static auto constexpr r2s_F64Select
static auto constexpr s2s_S128StructGet
static auto constexpr s2s_RefCast
INSTRUCTION_HANDLER_FUNC r2s_PreserveCopyFp0ToSlot64(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_RefStructGet(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_F64StructGet
static auto constexpr s2s_F32ArrayGet
static auto constexpr r2s_F32LoadStoreMem_Idx64
static auto constexpr r2r_I32LoadMem16U_Idx64
static void push(uint32_t *&sp, const uint8_t *&code, WasmInterpreterRuntime *wasm_runtime, T val)
static auto constexpr s2s_SimdS128Load8x8S_Idx64
static auto constexpr s2s_SimdS128Load8x8U
static auto constexpr r2r_F32LoadMem_Idx64
static auto constexpr s2s_I32StructSet
INSTRUCTION_HANDLER_FUNC s2s_If(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_DoSimdLoadExtend(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64StoreMem_Idx64
static auto constexpr s2s_I8UArrayGet
static auto constexpr r2r_F32Select
static auto constexpr r2r_I64LoadMem32U_Idx64
INSTRUCTION_HANDLER_FUNC s2s_RefGlobalSet(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64LoadMem16U_LocalSet_Idx64
static auto constexpr s2s_SimdS128StoreMem
INSTRUCTION_HANDLER_FUNC s2s_TableFill(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2s_S128Select
static auto constexpr r2r_I64Select
INSTRUCTION_HANDLER_FUNC s2s_Table64Grow(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_LoadMem(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdS128Store32Lane
static auto constexpr r2s_F64LoadStoreMem_Idx64
INSTRUCTION_HANDLER_FUNC r2s_CopyR0ToSlot32(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdI32x4ExtMulHighI16x8S
static auto constexpr s2s_S128ArrayFill
INSTRUCTION_HANDLER_FUNC r2s_BrTable(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_CallRef(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC r2s_If(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdI32x4ExtAddPairwiseI16x8S
static auto constexpr r2s_I64LoadMem32S_Idx64
static auto constexpr s2s_SimdS128Load16Splat_Idx64
static auto constexpr s2s_I64StoreMem32_Idx64
INSTRUCTION_HANDLER_FUNC s2s_CopySlot_ql(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static_cast(a) > static_cast< uint32_t >(b) ? a
static auto constexpr s2s_I32ArrayNew
static bool DoRefCast(WasmRef ref, ValueType ref_type, HeapType target_type, bool null_succeeds, WasmInterpreterRuntime *wasm_runtime)
static auto constexpr s2s_I32GlobalSet
INSTRUCTION_HANDLER_FUNC s2s_DoSimdLoadLane(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_F32StoreMem_Idx64
static auto constexpr s2s_F64ArrayGet
static auto constexpr s2s_SimdS128Store64Lane_Idx64
static auto constexpr s2s_I8UStructGet
WasmRef pop(uint32_t *&sp, const uint8_t *&code, WasmInterpreterRuntime *wasm_runtime)
INSTRUCTION_HANDLER_FUNC s2s_ArraySet(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_DoSimdLoadZeroExtend(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2r_I32LoadMem16U_Idx64
INSTRUCTION_HANDLER_FUNC s2s_Throw(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64LoadStoreMem_Idx64
INSTRUCTION_HANDLER_FUNC s2s_CopySlot_lq(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdI16x8ExtMulLowI8x16S
INSTRUCTION_HANDLER_FUNC s2s_Memory64Fill(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2s_I32LoadMem16U_Idx64
static auto constexpr s2s_SimdI32x4ExtAddPairwiseI16x8U
INSTRUCTION_HANDLER_FUNC s2r_LoadMemF(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdS128Load32Zero_Idx64
INSTRUCTION_HANDLER_FUNC s2s_DataDrop(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I32LoadMem8S_Idx64
static auto constexpr s2s_SimdS128Load8Splat
static auto constexpr s2s_SimdS128Load16Lane_Idx64
static auto constexpr r2r_I64LoadMem8U_Idx64
static T ExecuteRemU(T lval, T rval)
static auto constexpr s2r_I64LoadMem32U_Idx64
static auto constexpr s2s_I64ArrayFill
static auto constexpr s2s_SimdS128Store8Lane_Idx64
static constexpr auto s2s_SimdI16x8RelaxedLaneSelect
static auto constexpr s2s_SimdS128Load8Lane_Idx64
static auto constexpr s2s_I16UStructGet
INSTRUCTION_HANDLER_FUNC s2s_I64AtomicWait(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
uint32_t start INSTRUCTION_HANDLER_FUNC s2s_DoSimdExtMul(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I32LoadMem_LocalSet_Idx64
static auto constexpr s2s_RefCastNull
static constexpr auto s2s_SimdI32x4RelaxedLaneSelect
INSTRUCTION_HANDLER_FUNC s2s_SimdI8x16Shuffle(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdI64x2ExtMulLowI32x4S
INSTRUCTION_HANDLER_FUNC s2s_RefGlobalGet(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_AtomicFence(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_F32ArraySet
static auto constexpr s2s_SimdS128Load8x8U_Idx64
INSTRUCTION_HANDLER_FUNC s2s_SimdI16x8DotI8x16I7x16S(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2s_I64Select
static auto constexpr r2s_I64Drop
static auto constexpr s2s_I8SArrayGet
static auto constexpr s2s_I32LoadMem16U_LocalSet_Idx64
static auto constexpr s2s_I32StructGet
INSTRUCTION_HANDLER_FUNC s2s_AssertNullTypecheck(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC RefTest(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I8ArrayNew
static auto constexpr s2s_I32LoadMem_Idx64
static auto constexpr r2r_I32LoadMem_Idx64
INSTRUCTION_HANDLER_FUNC s2s_StoreMem(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_ArrayNewElem
INSTRUCTION_HANDLER_FUNC s2r_SelectF(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I16UArrayGet
INSTRUCTION_HANDLER_FUNC s2s_ArrayNewDefault(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I16SStructGet
INSTRUCTION_HANDLER_FUNC r2r_LoadMemI(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2r_I64LoadMem_Idx64
static auto constexpr s2r_F64GlobalGet
static auto constexpr s2s_SimdS128Load64Splat
static auto constexpr s2s_I16StructSet
INSTRUCTION_HANDLER_FUNC s2s_AssertNotNullTypecheck(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I32LoadMem16S_Idx64
static T pop(uint32_t *&sp, const uint8_t *&code, WasmInterpreterRuntime *wasm_runtime)
static auto constexpr s2s_I64LoadMem8S_Idx64
static auto constexpr s2s_SimdS128LoadMem
INSTRUCTION_HANDLER_FUNC s2s_CopySlot32x2(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static constexpr auto s2s_SimdI64x2RelaxedLaneSelect
INSTRUCTION_HANDLER_FUNC s2s_CallIndirect(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdS128Load16x4U
INSTRUCTION_HANDLER_FUNC r2s_LoadMem(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_ReturnCallImportedFunction(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_F32GlobalGet
static auto constexpr s2s_F64ArrayNew
static auto constexpr s2s_F64ArraySet
INSTRUCTION_HANDLER_FUNC s2s_BranchOnNonNull(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_RefStructSet(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
a a a a uint32_t WasmInterpreterRuntime * wasm_runtime
INSTRUCTION_HANDLER_FUNC s2s_CallImportedFunction(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I8ArraySet
static auto constexpr s2r_I32LoadMem8S_Idx64
INSTRUCTION_HANDLER_FUNC r2s_PreserveCopyR0ToSlot64(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2r_I32LoadMem16S_Idx64
static auto constexpr s2s_SimdS128Load32Splat
static auto constexpr s2r_I64GlobalGet
static auto constexpr s2s_SimdI16x8ExtMulHighI8x16S
INSTRUCTION_HANDLER_FUNC s2s_CallFunction(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_ReturnCallIndirect(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64LoadMem8U_Idx64
static auto constexpr s2s_I32ArrayGet
INSTRUCTION_HANDLER_FUNC s2s_ArrayLen(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I32LoadMem16S_LocalSet_Idx64
INSTRUCTION_HANDLER_FUNC r2s_PreserveCopyFp0ToSlot32(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2s_I64LoadMem_Idx64
INSTRUCTION_HANDLER_FUNC s2s_ExternConvertAny(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC r2s_BranchIf(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2s_I32LoadMem8U_Idx64
static auto constexpr s2s_F32StructSet
INSTRUCTION_HANDLER_FUNC s2s_TableCopy(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC r2r_LoadMemF(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_ArrayNewData
static auto constexpr s2s_I32LoadMem8U_Idx64
static auto constexpr s2s_I16ArrayNew
static auto constexpr r2s_I64LoadMem8S_Idx64
static auto constexpr s2s_I8StructSet
static auto constexpr s2r_F32Select
static auto constexpr s2s_SimdS128Load32Lane_Idx64
static auto constexpr s2s_F32Select
INSTRUCTION_HANDLER_FUNC s2s_GlobalGet(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_F32StructGet
static auto constexpr s2s_SimdS128Store16Lane_Idx64
INSTRUCTION_HANDLER_FUNC s2s_Else(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_Drop(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_Branch(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_BranchIfWithParams(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_Memory64Init(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_RefFunc(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdI64x2ExtMulHighI32x4S
INSTRUCTION_HANDLER_FUNC r2s_GlobalSetF(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2r_F64LoadMem_Idx64
INSTRUCTION_HANDLER_FUNC s2s_Catch(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_SimdS128StoreMemI(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_S128Select
INSTRUCTION_HANDLER_FUNC s2s_PreserveCopySlot64(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_RefArrayGet(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdS128Load64Zero_Idx64
static auto constexpr r2s_I64LoadMem8U_Idx64
static auto constexpr s2s_I16SArrayGet
static auto constexpr r2s_F64StoreMem_Idx64
INSTRUCTION_HANDLER_FUNC s2s_DoSimdLoadSplat(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64GlobalSet
static auto constexpr r2s_I64LoadMem16S_Idx64
static auto constexpr r2s_I32LoadStoreMem_Idx64
static auto constexpr s2s_I8SStructGet
static auto constexpr r2r_I64LoadMem_Idx64
INSTRUCTION_HANDLER_FUNC s2s_SimdI32x4DotI16x8S(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdI16x8ExtAddPairwiseI8x16U
static auto constexpr s2s_SimdS128Load32Lane
INSTRUCTION_HANDLER_FUNC s2s_Table64Set(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_S128GlobalGet
INSTRUCTION_HANDLER_FUNC r2s_CopyR0ToSlot64(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_RefAsNonNull(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2r_I64LoadMem16S_Idx64
static auto constexpr s2s_I32AtomicWait_Idx64
INSTRUCTION_HANDLER_FUNC s2s_ArrayFill(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_F64LoadMem_LocalSet_Idx64
static auto constexpr s2s_I64LoadMem32U_LocalSet_Idx64
static auto constexpr s2s_SimdS128StoreMem_Idx64
INSTRUCTION_HANDLER_FUNC s2s_TableGet(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_F64GlobalSet
INSTRUCTION_HANDLER_FUNC s2s_ReturnCallRef(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_ArrayNew(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_F64GlobalGet
INSTRUCTION_HANDLER_FUNC s2s_CopySlot64(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2s_F64LoadMem_Idx64
static auto constexpr s2s_I32LoadMem8S_LocalSet_Idx64
INSTRUCTION_HANDLER_FUNC s2s_BrTable(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_TableSize(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_MemorySize(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_F32LoadMem_LocalSet_Idx64
static auto constexpr s2s_SimdS128Load8x8S
static auto constexpr s2s_F64ArrayFill
static auto constexpr s2s_SimdS128LoadMem_Idx64
static auto constexpr r2s_I64LoadMem16U_Idx64
static auto constexpr r2s_I64StoreMem32_Idx64
static auto constexpr s2s_Table64Copy_64_64_64
static auto constexpr s2s_F32GlobalSet
INSTRUCTION_HANDLER_FUNC s2s_CopySlot64x2(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2r_I64LoadMem16S_Idx64
INSTRUCTION_HANDLER_FUNC s2r_I32ConvertI64(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64ArrayNew
static auto constexpr r2r_F64LoadMem_Idx64
INSTRUCTION_HANDLER_FUNC s2s_Table64CopyImpl(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_Memory64Size(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC r2s_BranchIfWithParams(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdS128Load32Splat_Idx64
static auto constexpr s2s_S128ArrayGet
INSTRUCTION_HANDLER_FUNC s2s_RefArrayFill(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64LoadMem8S_LocalSet_Idx64
static auto constexpr s2r_I64LoadMem32S_Idx64
static auto constexpr s2s_SimdI32x4ExtMulLowI16x8S
INSTRUCTION_HANDLER_FUNC s2s_ArrayGet(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_Table64Size(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_ArrayCopy(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
a a a a uint32_t WasmInterpreterRuntime int64_t double fp0
INSTRUCTION_HANDLER_FUNC r2s_CopyFp0ToSlot64(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static T ExecuteRemS(T lval, T rval)
static auto constexpr s2s_F32ArrayNew
INSTRUCTION_HANDLER_FUNC s2s_OnLoopBeginNoRefSlots(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2r_GlobalGetI(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2r_I64LoadMem8U_Idx64
static auto constexpr s2s_S128ArraySet
static T Read(const uint8_t *&code)
static auto constexpr r2s_F32StoreMem_Idx64
INSTRUCTION_HANDLER_FUNC s2s_BranchOnNullWithParams(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_BranchOnCastFail(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdS128Load64Lane
static auto constexpr s2s_Table64Copy_64_32_32
static auto constexpr r2s_I64LoadStoreMem_Idx64
static auto constexpr s2s_F32Drop
static auto constexpr r2r_I64LoadMem8S_Idx64
INSTRUCTION_HANDLER_FUNC r2s_PreserveCopyR0ToSlot32(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC r2s_I32ConvertI64(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_I31GetU(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_TrapIllegalCast(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2r_I64LoadMem8S_Idx64
static auto constexpr s2s_I32StoreMem16_Idx64
static auto constexpr s2s_F64LoadMem_Idx64
static auto constexpr s2s_SimdS128Store8Lane
static auto constexpr s2s_I32Drop
INSTRUCTION_HANDLER_FUNC s2s_BranchOnNonNullWithParams(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2s_F32Select
static auto constexpr s2s_SimdS128Load16x4S
INSTRUCTION_HANDLER_FUNC s2s_MemoryInit(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2s_I32LoadMem_Idx64
static auto constexpr r2r_I32LoadMem8U_Idx64
static auto constexpr r2s_F64Drop
static uint32_t ReadGlobalIndex(const uint8_t *&code)
static auto constexpr s2s_SimdS128Load32Zero
static auto constexpr s2s_I64Select
static auto constexpr r2s_I32StoreMem8_Idx64
static auto constexpr s2s_I64GlobalGet
static constexpr auto s2s_SimdI8x16RelaxedLaneSelect
INSTRUCTION_HANDLER_FUNC s2r_SelectI(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_F64StoreMem_Idx64
static auto constexpr s2s_SimdI64x2ExtMulLowI32x4U
static auto constexpr s2s_I64LoadMem_Idx64
INSTRUCTION_HANDLER_FUNC s2r_LoadMemI(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_RefTestSucceeds(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_CopySlot_ll(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64Drop
static auto constexpr r2s_I32Drop
INSTRUCTION_HANDLER_FUNC s2s_I32ConvertI64(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static uint8_t * ReadMemoryAddress(uint8_t *&code)
static constexpr auto s2s_SimdI8x16RelaxedSwizzle
static auto constexpr r2s_I32LoadMem16S_Idx64
static auto constexpr r2s_I32StoreMem16_Idx64
INSTRUCTION_HANDLER_FUNC s2s_ArrayNewSegment(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_DoSimdStoreLane(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_AtomicNotify(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_CopySlot128(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_RefI31(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_Table64Get(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_AtomicNotify_Idx64
static auto constexpr s2r_I32LoadMem8U_Idx64
static auto constexpr s2s_SimdS128Load32x2U
INSTRUCTION_HANDLER_FUNC r2s_RefSelect(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2r_I32Select
static auto constexpr s2r_F32LoadMem_Idx64
static auto constexpr s2r_I32LoadMem_Idx64
INSTRUCTION_HANDLER_FUNC r2s_GlobalSetI(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64LoadMem32U_Idx64
static auto constexpr s2s_SimdS128Load16Splat
static auto constexpr s2s_SimdS128Store32Lane_Idx64
static auto constexpr r2s_I32GlobalSet
static auto constexpr s2s_I32StoreMem_Idx64
INSTRUCTION_HANDLER_FUNC r2s_StoreMemF(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdS128Store64Lane
static auto constexpr s2r_I64LoadMem16U_Idx64
static auto constexpr s2s_F64StructSet
INSTRUCTION_HANDLER_FUNC r2r_SelectF(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64LoadMem16S_LocalSet_Idx64
static auto constexpr r2s_I64StoreMem8_Idx64
void push(uint32_t *&sp, const uint8_t *&code, WasmInterpreterRuntime *wasm_runtime, WasmRef ref)
static auto constexpr s2s_I32StoreMem8_Idx64
static auto constexpr r2r_I32LoadMem8S_Idx64
static auto constexpr s2s_I32ArrayFill
INSTRUCTION_HANDLER_FUNC s2s_Return(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_CopySlot_qq(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2s_I32Select
static auto constexpr s2s_F64Drop
static auto constexpr s2s_I16ArraySet
static auto constexpr s2s_SimdI32x4ExtMulLowI16x8U
static auto constexpr s2r_F32GlobalGet
INSTRUCTION_HANDLER_FUNC s2s_ArrayNewFixed(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_StructSet(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_AnyConvertExtern(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdS128Load32x2S_Idx64
INSTRUCTION_HANDLER_FUNC RefCast(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC r2s_RefDrop(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I32ArraySet
INSTRUCTION_HANDLER_FUNC s2s_RefDrop(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC r2r_SelectI(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_CopySlotRef(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC r2s_Select(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64StructSet
static auto constexpr s2s_S128StructSet
static auto constexpr s2s_SimdS128Load8Lane
traits::memory_offset64_t memory_offset64_t
static auto constexpr s2s_I64ArraySet
INSTRUCTION_HANDLER_FUNC s2s_RefArrayNew(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2r_I64LoadMem32S_Idx64
static auto constexpr s2s_I32LoadMem8U_LocalSet_Idx64
static auto constexpr s2s_F32LoadStoreMem_Idx64
static auto constexpr s2s_SimdI16x8ExtMulHighI8x16U
static auto constexpr s2s_I32Select
static auto constexpr s2s_F32LoadMem_Idx64
INSTRUCTION_HANDLER_FUNC r2s_LoadStoreMem(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64LoadMem_LocalSet_Idx64
INSTRUCTION_HANDLER_FUNC s2s_CopySlot32(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr r2r_I32Select
static auto constexpr s2s_SimdS128Load32x2S
INSTRUCTION_HANDLER_FUNC s2s_RefArraySet(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
traits::slot_offset_t slot_offset_t
INSTRUCTION_HANDLER_FUNC s2s_MemoryCopy(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_PreserveCopySlot128(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64StoreMem8_Idx64
static auto constexpr r2s_I64StoreMem_Idx64
static auto constexpr s2s_ArrayInitElem
INSTRUCTION_HANDLER_FUNC s2s_BranchOnNull(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_I32AtomicWait(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64ArrayGet
static auto constexpr r2s_I64GlobalSet
static auto constexpr s2s_SimdS128Load16x4S_Idx64
static auto constexpr s2r_I32GlobalGet
static auto constexpr s2s_ArrayInitData
INSTRUCTION_HANDLER_FUNC s2s_RefNull(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_S128GlobalSet
static auto constexpr r2s_I64StoreMem16_Idx64
INSTRUCTION_HANDLER_FUNC r2r_I32ConvertI64(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
INSTRUCTION_HANDLER_FUNC s2s_StructGet(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_SimdI64x2ExtMulHighI32x4U
static auto constexpr s2s_I64LoadMem32S_LocalSet_Idx64
INSTRUCTION_HANDLER_FUNC s2s_RefTestFails(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static auto constexpr s2s_I64LoadMem16S_Idx64
static auto constexpr s2s_S128ArrayNew
static auto constexpr s2s_I64LoadMem16U_Idx64
static auto constexpr s2r_F64Select
INSTRUCTION_HANDLER_FUNC s2s_StructNew(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
constexpr Representation representation() const
constexpr bool is_index() const
constexpr ModuleTypeIndex ref_index() const
static constexpr HeapType FromBits(uint32_t bits)
static constexpr int ToTagged(int offset)
uint32_t field_offset(uint32_t index) const
uint32_t field_count() const
ValueType field(uint32_t index) const
V8_EXPORT_PRIVATE const CanonicalSig * LookupFunctionSignature(CanonicalTypeIndex index) const
constexpr ValueKind kind() const
constexpr bool is_reference() const
constexpr bool is_string_view() const
constexpr bool is_nullable() const
constexpr uint32_t raw_bit_field() const
static constexpr ValueType FromRawBitField(uint32_t bits)
constexpr HeapType heap_type() const
static constexpr ValueType RefNull(ModuleTypeIndex index, bool shared, RefTypeKind kind)
static constexpr ValueType Ref(ModuleTypeIndex index, bool shared, RefTypeKind kind)
static constexpr ValueType RefMaybeNull(ModuleTypeIndex index, Nullability nullable, bool shared, RefTypeKind kind)
void resize(Stack::size_type count)
void reserve(Stack::size_type new_cap)
void push_back(const uint32_t &value)
static void PrintBytecodeCompressionStats()
void EmitI32Const(int32_t value)
static std::atomic< size_t > emitted_short_memory_offset_count_
int32_t EndBlock(WasmOpcode opcode)
ValueType GetParamType(const WasmBytecodeGenerator::BlockData &block_data, size_t index) const
void BeginElseBlock(uint32_t if_block_index, bool dummy)
void Pop(ValueKind kind, bool emit=true)
void EmitFnId(InstructionHandler func_id, uint32_t pc=UINT_MAX)
void CopyToSlotAndPop(ValueType value_type, uint32_t to, bool is_tee, bool copy_from_reg)
void EmitBranchOffset(uint32_t delta)
void EmitStackIndex(int32_t value)
std::vector< uint8_t > code_
uint32_t rets_slots_size_
void EmitBranchTableOffset(uint32_t delta, uint32_t code_pos)
void EmitCopySlot(ValueType value_type, uint32_t from_slot_index, uint32_t to_slot_index, bool copy_from_reg=false)
uint32_t _PushSlot(ValueType value_type)
std::vector< Simd128 > simd_immediates_
void SetSlotType(uint32_t stack_index, ValueType type)
base::SmallVector< uint32_t, 8 > br_table_labels_
bool TypeCheckAlwaysSucceeds(ValueType obj_type, HeapType type) const
const FunctionSig * GetFunctionSignature(uint32_t function_index) const
WasmEHDataGenerator eh_data_
void PushSlot(uint32_t slot_index)
bool HasVoidSignature(const WasmBytecodeGenerator::BlockData &block_data) const
uint32_t function_index() const
void EmitMemoryOffset(uint64_t value)
uint32_t CurrentCodePos() const
uint32_t CreateConstSlot(T value)
uint32_t stack_top_index() const
void DecodeAtomicOp(WasmOpcode opcode, WasmInstruction::Optional *optional, Decoder *decoder, InterpreterCode *code, pc_t pc, int *const len)
base::SmallVector< uint32_t, 16 > loop_begin_code_offsets_
ValueKind GetTopStackType(RegMode reg_mode) const
RegMode DoEncodeInstruction(const WasmInstruction &instr, RegMode curr_reg_mode, RegMode next_reg_mode)
void DecodeGCOp(WasmOpcode opcode, WasmInstruction::Optional *optional, Decoder *decoder, InterpreterCode *code, pc_t pc, int *const len)
void PushCopySlot(uint32_t from_stack_index)
std::map< CodeOffset, pc_t > code_pc_map_
ValueType GetReturnType(const WasmBytecodeGenerator::BlockData &block_data, size_t index) const
void I32Pop(bool emit=true)
uint32_t GetStackFrameSize() const
static const CodeOffset kInvalidCodeOffset
void StoreBlockParamsAndResultsIntoSlots(uint32_t target_block_index, WasmOpcode opcode)
void EmitTryCatchBranchOffset()
static std::atomic< size_t > total_bytecode_size_
uint32_t ReturnsCount(const WasmBytecodeGenerator::BlockData &block_data) const
WasmBytecodeGenerator(uint32_t function_index, InterpreterCode *wasm_code, const WasmModule *module)
uint32_t ParamsCount(const WasmBytecodeGenerator::BlockData &block_data) const
bool DoEncodeSuperInstruction(RegMode ®_mode, const WasmInstruction &curr_instr, const WasmInstruction &next_instr)
void F32Push(bool emit=true)
int GetCurrentTryBlockIndex(bool return_matching_try_for_catch_blocks) const
void EmitGlobalIndex(uint32_t index)
void RestoreIfElseParams(uint32_t if_block_index)
void MemIndexPop(bool emit=true)
uint32_t PushConstSlot(T value)
WasmInstruction DecodeInstruction(pc_t pc, Decoder &decoder)
void S128Pop(bool emit=true)
void StoreBlockParamsIntoSlots(uint32_t target_block_index, bool update_stack)
void UpdateStack(uint32_t index, uint32_t slot_index)
bool EncodeSuperInstruction(RegMode ®_mode, const WasmInstruction &curr_instr, const WasmInstruction &next_instr)
bool HasSharedSlot(uint32_t stack_index) const
InterpreterCode * wasm_code_
ValueKind GetGlobalType(uint32_t index) const
void InitSlotsForFunctionArgs(const FunctionSig *sig, bool is_indirect_call)
uint32_t unreachable_block_count_
CodeOffset last_instr_offset_
MemIndexPushFunc int_mem_push_
uint32_t ReserveBlockSlots(uint8_t opcode, const WasmInstruction::Optional::Block &block_data, size_t *rets_slots_count, size_t *params_slots_count)
void PreserveArgsAndLocals()
void EmitRefStackIndex(int32_t value)
bool TypeCheckAlwaysFails(ValueType obj_type, HeapType expected_type, bool null_succeeds) const
void F64Push(bool emit=true)
void I64Push(bool emit=true)
int32_t current_block_index_
void EmitSlotOffset(uint32_t value)
int32_t BeginBlock(WasmOpcode opcode, const WasmInstruction::Optional::Block signature)
bool DecodeSimdOp(WasmOpcode opcode, WasmInstruction::Optional *optional, Decoder *decoder, InterpreterCode *code, pc_t pc, int *const len)
ValueType RefPop(bool emit=true)
void RefPush(ValueType type, bool emit=true)
void EmitIfElseBranchOffset()
uint32_t CreateSlot(ValueType value_type)
uint32_t ScanConstInstructions() const
bool TryCompactInstructionHandler(InstructionHandler func_addr)
InstrHandlerSize handler_size_
void F64Pop(bool emit=true)
bool current_instr_encoding_failed_
void I32Push(bool emit=true)
RegMode EncodeInstruction(const WasmInstruction &instr, RegMode curr_reg_mode, RegMode next_reg_mode)
void EmitStructFieldOffset(int32_t value)
MemIndexPopFunc int_mem_pop_
const WasmModule * module_
void DecodeNumericOp(WasmOpcode opcode, WasmInstruction::Optional *optional, Decoder *decoder, InterpreterCode *code, pc_t pc, int *const len)
int32_t GetTargetBranch(uint32_t delta) const
uint32_t args_slots_size_
void EmitRefValueType(int32_t value)
bool FindSharedSlot(uint32_t stack_index, uint32_t *new_slot_index)
static std::atomic< size_t > emitted_short_slot_offset_count_
bool is_instruction_reachable_
uint32_t ref_slots_count_
void I64Pop(bool emit=true)
void SetUnreachableMode()
void Emit(const void *buff, size_t len)
std::vector< Slot > slots_
bool ToRegisterIsAllowed(const WasmInstruction &instr)
uint32_t stack_size() const
void F32Pop(bool emit=true)
void PatchBranchOffsets()
void S128Push(bool emit=true)
void CopyToSlot(ValueType value_type, uint32_t from_slot_index, uint32_t to_stack_index, bool copy_from_reg)
void PatchLoopBeginInstructions()
std::vector< BlockData > blocks_
std::vector< uint8_t > const_slots_values_
std::unique_ptr< WasmBytecode > GenerateBytecode()
void MemIndexPush(bool emit=true)
static bool HasRefOrSimdArgs(const FunctionSig *sig)
WasmBytecode(int func_index, const uint8_t *code_data, size_t code_length, uint32_t stack_frame_size, const FunctionSig *signature, const CanonicalSig *canonical_signature, const InterpreterCode *interpreter_code, size_t blocks_count, const uint8_t *const_slots_data, size_t const_slots_length, uint32_t ref_slots_count, const WasmEHData &&eh_data, const std::map< CodeOffset, pc_t > &&code_pc_map)
static uint32_t RetsSizeInSlots(const FunctionSig *sig)
pc_t GetPcFromTrapCode(const uint8_t *current_code) const
const uint8_t * code_bytes_
static uint32_t ArgsSizeInSlots(const FunctionSig *sig)
std::map< CodeOffset, pc_t > code_pc_map_
static uint32_t RefArgsCount(const FunctionSig *sig)
static uint32_t RefRetsCount(const FunctionSig *sig)
void AddCatchBlock(BlockIndex catch_block_index, int tag_index, uint32_t first_param_slot_offset, uint32_t first_param_ref_stack_index, CodeOffset code_offset)
BlockIndex current_try_block_index_
void AddTryBlock(BlockIndex try_block_index, BlockIndex parent_or_matching_try_block_index, BlockIndex ancestor_try_block_index)
void AddDelegatedBlock(BlockIndex delegated_try_block_index)
BlockIndex GetCurrentTryBlockIndex() const
void RecordPotentialExceptionThrowingInstruction(WasmOpcode opcode, CodeOffset code_offset)
BlockIndex EndTryCatchBlocks(BlockIndex block_index, CodeOffset code_offset)
ExceptionPayloadSlotOffsets GetExceptionPayloadStartSlotOffsets(BlockIndex catch_block_index) const
static const int kCatchAllTagIndex
std::unordered_map< BlockIndex, CatchBlock > catch_blocks_
std::unordered_map< CodeOffset, BlockIndex > code_trycatch_map_
const TryBlock * GetTryBlock(CodeOffset code_offset) const
std::unordered_map< BlockIndex, TryBlock > try_blocks_
static const int kDelegateToCallerIndex
size_t GetEndInstructionOffsetFor(BlockIndex catch_block_index) const
BlockIndex GetTryBranchOf(BlockIndex catch_block_index) const
const TryBlock * GetParentTryBlock(const TryBlock *try_block) const
const TryBlock * GetDelegateTryBlock(const TryBlock *try_block) const
static constexpr WasmEnabledFeatures All()
base::TimeTicks start_interval_time_
base::TimeTicks next_interval_time_
base::TimeDelta window_running_time_
const size_t slow_threshold_samples_count_
base::TimeDelta cooldown_interval_
std::vector< int > samples_
Histogram * slow_wasm_histogram_
base::ElapsedTimer window_execute_timer_
const int slow_threshold_
void BeginInterval(bool start_timer)
static const int kMaxPercentValue
const base::TimeDelta sample_duration_
Histogram * execute_ratio_histogram_
void AddSample(int running_ratio)
WasmExecutionTimer(Isolate *isolate, bool track_jitless_wasm)
WasmInterpreterThread * GetCurrentInterpreterThread(Isolate *isolate)
void NotifyIsolateDisposal(Isolate *isolate)
std::vector< WasmInterpreterStackEntry > CaptureStackTrace(const TrapStatus *trap_status=nullptr) const
FrameState current_frame_state_
int GetFunctionIndex(int index) const
WasmInterpreterThread(Isolate *isolate)
void StopExecutionTimer()
static void NotifyIsolateDisposal(Isolate *isolate)
static WasmInterpreterThreadMap * thread_interpreter_map_s
static constexpr uint32_t kInitialStackSize
void TerminateExecutionTimers()
void StartExecutionTimer()
void RaiseException(Isolate *isolate, MessageTemplate message)
static constexpr uint32_t kMaxStackSize
Handle< FixedArray > reference_stack_
void EnsureRefStackSpace(size_t new_size)
void ClearRefStackValues(size_t index, size_t count)
size_t current_ref_stack_size_
WasmExecutionTimer execution_timer_
static WasmInterpreterThread * GetCurrentInterpreterThread(Isolate *isolate)
uint32_t current_stack_size_
static void SetRuntimeLastWasmError(Isolate *isolate, MessageTemplate message)
static TrapReason GetRuntimeLastWasmError(Isolate *isolate)
std::atomic< size_t > generated_code_size_
const WasmModule * module() const
base::TimeDelta bytecode_generation_time_
void Preprocess(uint32_t function_index)
const WasmModule * module_
ZoneVector< InterpreterCode > interpreter_code_
CodeMap(Isolate *isolate, const WasmModule *module, const uint8_t *module_start, Zone *zone)
static void GlobalTearDown()
static void NotifyIsolateDisposal(Isolate *isolate)
WasmInterpreterThread::State ContinueExecution(WasmInterpreterThread *thread, bool called_from_js)
std::shared_ptr< WasmInterpreterRuntime > wasm_runtime_
static void InitializeOncePerProcess()
IndirectHandle< WasmInstanceObject > instance_object_
WasmInterpreter(Isolate *isolate, const WasmModule *module, const ModuleWireBytes &wire_bytes, DirectHandle< WasmInstanceObject > instance)
static constexpr TrapReason MessageIdToTrapReason(MessageTemplate message)
static constexpr const char * OpcodeName(WasmOpcode)
static constexpr bool IsPrefixOpcode(WasmOpcode)
#define COMPRESS_POINTERS_BOOL
ZoneList< RegExpInstruction > code_
#define BINOP_CASE(opcode, assembler_op)
#define FOREACH_LOAD_STORE_DUPLICATED_INSTR_HANDLER(V)
#define FOREACH_LOAD_STORE_INSTR_HANDLER(V,...)
#define FOREACH_NO_BOUNDSCHECK_INSTR_HANDLER(V)
#define FOREACH_INSTR_HANDLER(V)
ZoneVector< RpoNumber > & result
#define ATOMIC_LOAD_OP(name, type)
#define ATOMIC_COMPARE_EXCHANGE_OP(name, type)
#define ATOMIC_STORE_OP(name, type)
#define ATOMIC_OP(op, type, kind)
V8_BASE_EXPORT constexpr uint64_t RoundUpToPowerOfTwo64(uint64_t value)
constexpr unsigned CountPopulation(T value)
static V ReadUnalignedValue(Address p)
int16_t MulWithWraparound(int16_t a, int16_t b)
void CallOnce(OnceType *once, std::function< void()> init_func)
constexpr bool IsInBounds(T index, T length, T max)
signed_type NegateWithWraparound(signed_type a)
static void WriteUnalignedValue(Address p, V value)
WordWithBits< 128 > Simd128
bool IsTrapHandlerEnabled()
void SetLandingPad(uintptr_t landing_pad)
TH_DISABLE_ASAN bool IsThreadInWasm()
static void Populate(HeapType *unfinished_type, const WasmModule *module)
RegMode GetRegMode(ValueKind kind)
static ValueType value_type()
static const size_t kSlotSize
const char * GetOperatorModeString(OperatorMode mode)
PWasmOp * kInstructionTable[kInstructionTableSize]
INSTRUCTION_HANDLER_FUNC TrapMemOutOfBounds(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
constexpr uint32_t kBranchOnCastDataTargetTypeBitSize
uint32_t WasmInterpreterRuntime int64_t r0
constexpr uint64_t kFloat64SignBitMask
constexpr IndependentValueType kWasmF32
constexpr IndependentHeapType kWasmAnyRef
void InitInstructionTableOnce(Isolate *isolate)
TypeCanonicalizer * GetTypeCanonicalizer()
uint32_t WasmInterpreterRuntime * wasm_runtime
BoundsCheckedHandlersCounter
constexpr IndependentHeapType kWasmExternRef
constexpr IndependentValueType kWasmI32
uint32_t WasmInterpreterRuntime int64_t double fp0
DirectHandle< Object > WasmRef
static const ptrdiff_t kCodeOffsetSize
constexpr IndependentHeapType kWasmRefI31
InstructionHandler s_unwind_code
constexpr IndependentHeapType kWasmVoid
constexpr uint32_t kFloat32SignBitMask
V8_INLINE bool IsSubtypeOf(ValueType subtype, ValueType supertype, const WasmModule *sub_module, const WasmModule *super_module)
constexpr int value_kind_size(ValueKind kind)
constexpr IndependentHeapType kWasmBottom
static constexpr uint32_t kInstructionTableSize
void InitTrapHandlersOnce(Isolate *isolate)
const char * GetRegModeString(RegMode reg_mode)
Signature< ValueType > FunctionSig
constexpr IndependentValueType kWasmF64
constexpr IndependentValueType kWasmI64
static int StructFieldOffset(const StructType *struct_type, int field_index)
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
bool SetPermissions(v8::PageAllocator *page_allocator, void *address, size_t size, PageAllocator::Permission access)
constexpr int kTaggedSize
Wide MultiplyLong(Narrow a, Narrow b)
constexpr int kSimd128Size
v8::PageAllocator * GetPlatformPageAllocator()
void EncodeI32ExceptionValue(DirectHandle< FixedArray > encoded_values, uint32_t *encoded_index, uint32_t value)
void * AllocatePages(v8::PageAllocator *page_allocator, void *hint, size_t size, size_t alignment, PageAllocator::Permission access)
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
V8_INLINE DirectHandle< T > direct_handle(Tagged< T > object, Isolate *isolate)
kWasmInternalFunctionIndirectPointerTag kProtectedInstanceDataOffset sig
T SaturateRoundingQMul(T a, T b)
Tagged< MaybeWeak< T > > MakeWeak(Tagged< T > value)
Wide AddLong(Narrow a, Narrow b)
V8_EXPORT_PRIVATE FlagValues v8_flags
V8_EXPORT_PRIVATE constexpr int ElementSizeLog2Of(MachineRepresentation)
float DoubleToFloat32(double x)
T RoundingAverageUnsigned(T a, T b)
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset instance_object
constexpr uint32_t kSlotsZapValue
bool is_signed(Condition cond)
void FreePages(v8::PageAllocator *page_allocator, void *address, const size_t size)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage * MB
void EncodeI64ExceptionValue(DirectHandle< FixedArray > encoded_values, uint32_t *encoded_index, uint64_t value)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
#define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode)
#define V8_DECLARE_ONCE(NAME)
#define SHIFT_CASE(from, to)
#define SPLAT_CASE(from, to)
#define DCHECK_LE(v1, v2)
#define CHECK_LE(lhs, rhs)
#define DCHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
uint32_t target_type_bit_fields
DirectHandle< Object > GetCaughtException(Isolate *isolate, uint32_t catch_block_index) const
void SetCaughtException(Isolate *isolate, uint32_t catch_block_index, DirectHandle< Object > exception)
void DisposeCaughtExceptionsArray(Isolate *isolate)
const WasmFunction * function
const uint8_t * at(pc_t pc)
static constexpr ModuleTypeIndex Invalid()
constexpr bool valid() const
uint32_t end_code_offset_
base::SmallVector< uint32_t, 4 > branch_code_offsets_
int32_t parent_try_block_index_
uint32_t begin_code_offset_
uint32_t first_block_index_
int32_t if_else_block_index_
uint32_t GetParam(size_t index) const
size_t operator()(const Simd128 &s128) const
BlockIndex delegate_try_index
void SetDelegated(BlockIndex delegate_try_idx)
std::vector< CatchHandler > catch_handlers
BlockIndex ancestor_try_index
bool IsTryDelegate() const
BlockIndex parent_or_matching_try_block
constexpr ValueType value_type() const
ModuleTypeIndex sig_index
uint32_t value_type_bitfield
uint32_t dest_array_index
uint32_t element_segment_index
RegMode InputRegMode() const
std::vector< WasmMemory > memories
BranchOnCastData br_on_cast_data
struct v8::internal::wasm::WasmInstruction::Optional::Block block
struct v8::internal::wasm::WasmInstruction::Optional::BrTable br_table
struct v8::internal::wasm::WasmInstruction::Optional::TableInit table_init
struct v8::internal::wasm::WasmInstruction::Optional::GC_ArrayNewFixed gc_array_new_fixed
struct v8::internal::wasm::WasmInstruction::Optional::IndirectCall indirect_call
struct v8::internal::wasm::WasmInstruction::Optional::GC_FieldImmediate gc_field_immediate
uint32_t ref_type_bit_field
struct v8::internal::wasm::WasmInstruction::Optional::GC_ArrayCopy gc_array_copy
struct v8::internal::wasm::WasmInstruction::Optional::GC_HeapTypeImmediate gc_heap_type_immediate
struct v8::internal::wasm::WasmInstruction::Optional::TableCopy table_copy
struct v8::internal::wasm::WasmInstruction::Optional::GC_ArrayNewOrInitData gc_array_new_or_init_data
#define V8_LIKELY(condition)
#define V8_UNLIKELY(condition)
const wasm::WasmModule * module_
#define DEFINE_UNOP(name, ctype, reg, op, type)
#define FOREACH_I64_CONVERT_FROM_FLOAT_UNOP(V)
#define FOREACH_ATOMIC_COMPARE_EXCHANGE_OP(V)
#define CONVERT_CASE(op, src_type, name, dst_type, count, start_index, ctype, expr)
#define DEFINE_BINOP(name, ctype, reg, op, type)
#define EXTRACT_LANE_EXTEND_CASE(format, stype, name, sign, extended_type)
#define INLINED_TRAP(trap_reason)
#define EXECUTE_UNOP(name, ctype, reg, op, type)
#define FOREACH_ATOMIC_BINOP(V)
#define FOREACH_REM_BINOP(V)
#define FOREACH_ATOMIC_STORE_OP(V)
#define PACK_CASE(op, src_type, name, dst_type, count, dst_ctype)
#define EMIT_MEM64_INSTR_HANDLER(name, mem64_name, is_memory64)
#define FOREACH_UNSIGNED_DIV_BINOP(V)
#define START_EMIT_INSTR_HANDLER()
#define EXT_ADD_PAIRWISE_CASE(op)
#define FOREACH_ADDITIONAL_CONVERT_UNOP(V)
#define FOREACH_ATOMIC_LOAD_OP(V)
#define REDUCTION_CASE(op, name, stype, count)
#define STORE_CASE(name, ctype, mtype, rep, type)
#define FOREACH_OTHER_CONVERT_UNOP(V)
#define FOREACH_CONVERT_UNOP(V)
#define QFM_CASE(op, name, stype, count, operation)
#define FOREACH_REINTERPRET_UNOP(V)
#define FOREACH_I32_CONVERT_FROM_FLOAT_UNOP(V)
#define FOREACH_BITS_UNOP(V)
#define TRAP(trap_reason)
#define EMIT_INSTR_HANDLER(name)
#define LOAD_SPLAT_CASE(op)
#define DEFINE_REG_BINOP(name, from_ctype, from_type, to_ctype, to_type, op)
#define START_EMIT_INSTR_HANDLER_WITH_ID(name)
#define FOREACH_SIGNED_DIV_BINOP(V)
#define CMPOP_CASE(op, name, stype, out_stype, count, expr)
#define END_EMIT_INSTR_HANDLER()
#define FOREACH_TRUNCSAT_UNOP(V)
#define EMIT_INSTR_HANDLER_WITH_PC(name, pc)
#define STORE_LANE_CASE(op)
#define LOAD_LANE_CASE(op)
#define EXTRACT_LANE_CASE(format, stype, op_type, name)
#define BITMASK_CASE(op, name, stype, count)
#define LOAD_ZERO_EXTEND_CASE(op, load_type)
#define FOREACH_ARITHMETIC_BINOP(V)
#define DECODE_UNOP(name, from_ctype, from_type, from_reg, to_ctype, to_type, to_reg)
#define EXECUTE_BINOP(name, ctype, reg, op, type)
#define FOREACH_TRAPPING_BINOP(V)
#define ITEM_ENUM_DEFINE(name)
#define FOREACH_SIMPLE_UNOP(V)
#define FOREACH_MORE_BINOP(V)
#define EMIT_MEM64_INSTR_HANDLER_WITH_PC(name, mem64_name, is_memory64, pc)
#define FOREACH_COMPARISON_BINOP(V)
#define ATOMIC_BINOP(name, Type, ctype, type, op_ctype, op_type, operation)
#define FOREACH_EXTENSION_UNOP(V)
#define LOAD_CASE(name, ctype, mtype, rep, type)
#define LOAD_EXTEND_CASE(op)
#define REPLACE_LANE_CASE(format, name, stype, ctype, op_type)
#define UNOP_CASE(op, name, stype, count, expr)
#define DEFINE_INSTR_HANDLER(name)
#define INSTRUCTION_HANDLER_FUNC