9#include "absl/container/btree_map.h"
39using compiler::AccessBuilder;
40using compiler::CallDescriptor;
42using compiler::Operator;
43using compiler::TrapId;
67using compiler::turboshaft::Simd128ConstantOp;
74using compiler::turboshaft::WasmArrayNullable;
75using compiler::turboshaft::WasmStackCheckOp;
76using compiler::turboshaft::WasmStringRefNullable;
77using compiler::turboshaft::WasmStructNullable;
78using compiler::turboshaft::WasmTypeAnnotationOp;
79using compiler::turboshaft::WasmTypeCastOp;
89 case DataViewOp::kGet##Name: \
90 case DataViewOp::kSet##Name: \
91 return kExternal##Name##Array;
101 switch (array_type) {
102#define ELEMENTS_KIND_TO_ELEMENT_SIZE(Type, type, TYPE, ctype) \
103 case kExternal##Type##Array: \
104 return sizeof(ctype);
107#undef ELEMENTS_KIND_TO_ELEMENT_SIZE
111bool ReverseBytesSupported(
size_t size_in_bytes) {
112 switch (size_in_bytes) {
123enum class BranchHintingMode {
129class BranchHintingStresser {
152 OpIndex isolate_root =
__ LoadRootRegister();
154 int builtin_slot_offset =
178 static_assert(std::is_same<Smi, BuiltinPtr>(),
"BuiltinPtr must be Smi");
184 return stub_mode == StubCallMode::kCallWasmRuntimeStub
185 ?
__ RelocatableWasmBuiltinCallTarget(builtin)
191 Builtin builtin =
Is64() ? Builtin::kI64ToBigInt : Builtin::kI32PairToBigInt;
198 interface_descriptor,
206 if constexpr (
Is64()) {
209 V<Word32> low_word =
__ TruncateWord64ToWord32(input);
210 V<Word32> high_word =
__ TruncateWord64ToWord32(
__ ShiftRightLogical(
213 __ Call(target, {low_word, high_word}, ts_call_descriptor));
233 return {
target, implicit_arg};
237 __ WordPtrMul(
__ ChangeUint32ToUintPtr(func_index.
value()),
244 __ LoadProtectedPointerField(dispatch_table, dispatch_table_entry_offset,
249 return {
target, implicit_arg};
258 WasmInternalFunction::kProtectedImplicitArgOffset));
262 WasmInternalFunction::kRawCallTargetOffset);
264 return {
target, implicit_arg};
269 switch (type.kind()) {
299 kWasmTrustedInstanceDataIndirectPointerTag,
300 WasmInstanceObject::kTrustedDataOffset));
304 Zone* zone,
OpIndex thread_in_wasm_flag_address,
bool new_value) {
311 OpIndex message_id =
__ TaggedIndexConstant(
static_cast<int32_t
>(
312 new_value ? AbortReason::kUnexpectedThreadInWasmSet
313 : AbortReason::kUnexpectedThreadInWasmUnset));
314 CallRuntime(zone, Runtime::kAbort, {message_id},
__ NoContextConstant());
319 __ Store(thread_in_wasm_flag_address,
__ Word32Constant(new_value),
328 OpIndex isolate_root =
__ LoadRootRegister();
329 OpIndex thread_in_wasm_flag_address =
341 std::initializer_list<OpIndex>
args) {
347 std::initializer_list<OpIndex>
args) {
363 __ AtomicCompareExchange(
364 __ IsolateField(IsolateFieldId::kJsLimitAddress),
__ UintPtrConstant(0),
375 OpIndex isolate_root =
__ LoadRootRegister();
378 IsolateData::is_on_central_stack_flag_offset());
384 IsolateData::active_stack_offset());
401 __ SetStackPointer(central_stack_sp);
411 IsolateData::is_on_central_stack_flag_offset());
420 old_limit_var = old_limit;
422 return {old_sp_var, old_limit_var};
432 IsolateData::is_on_central_stack_flag_offset());
438 IsolateData::active_stack_offset());
448 __ SetStackPointer(old_sp);
471 template <
typename... Args>
483 template <
typename... Args>
492 std::unique_ptr<AssumptionsJournal>* assumptions,
517 std::unique_ptr<AssumptionsJournal>* assumptions,
555 for (; index < decoder->
sig_->parameter_count(); index++) {
564 for (; index < decoder->
sig_->parameter_count(); index++) {
576 if (!type.is_defaultable()) {
577 DCHECK(type.is_reference());
578 op =
__ RootConstant(RootIndex::kOptimizedOut);
610 decoder->
module_->type_feedback.feedback_for_function.find(
612 decoder->
module_->type_feedback.feedback_for_function.end());
614 decoder->
module_->type_feedback.feedback_for_function
616 ->second.feedback_vector.size());
618 decoder->
module_->type_feedback.feedback_for_function
620 ->second.call_targets.size());
629 WASM_TRUSTED_INSTANCE_DATA_TYPE))) {
630 OpIndex message_id =
__ TaggedIndexConstant(
631 static_cast<int32_t
>(AbortReason::kUnexpectedInstanceType));
633 __ NoContextConstant());
639 StackCheck(WasmStackCheckOp::Kind::kFunctionEntry, decoder);
646 __ NoContextConstant());
650 if (
v8_flags.stress_branch_hinting) {
653 }
else if (branch_hints_it != decoder->
module_->branch_hints.end()) {
674 __ output_graph().source_positions()[
__ output_graph().EndIndex()];
676 for (
OpIndex index :
__ output_graph().AllOperationIndices()) {
678 __ output_graph().operation_origins()[index]);
682 uint32_t node_count =
683 __ output_graph().NumberOfOperationsForDebugging();
721 bool can_be_innermost =
false;
725 block->assigned = assigned;
733 uint32_t arity = block->start_merge.arity;
735 for (uint32_t
i = 0;
i < arity;
i++) {
736 OpIndex phi =
__ PendingLoopPhi(stack_base[
i].op,
738 block->start_merge[
i].op = phi;
741 StackCheck(WasmStackCheckOp::Kind::kLoop, decoder);
744 block->merge_block = loop_merge;
745 block->false_or_loop_or_catch_block = loop;
774 __ Goto(target->merge_block);
781 IF ({cond.
op, hint}) {
788 __ Branch({cond.
op, hint}, target->merge_block, non_branching);
789 __ Bind(non_branching);
800 template <
typename Val
idationTag>
807 using TargetMap = absl::btree_map<uint32_t, CaseVector>;
813 uint32_t target = iterator.
next();
879 switch (cases.size()) {
885 return __ Word32Equal(
key.op,
__ Word32Constant(cases[0]));
887 return __ Word32BitwiseOr(
__ Word32Equal(
key.op, cases[0]),
888 __ Word32Equal(
key.op, cases[1]));
892 auto insert_cond_branch = [
this, &decoder](
OpIndex cond,
903 __ Branch({cond, hint}, target->merge_block, non_branching);
904 __ Bind(non_branching);
908 for (
auto const& [target, cases] : table_analysis.
other_targets()) {
911 insert_cond_branch(generate_cond(
key, cases), target);
919 OpIndex cond =
__ Word32BitwiseAnd(lower, upper);
928 __ output_graph().graph_zone()
929 -> AllocateArray<compiler::turboshaft::SwitchOp::Case>(
934 while (new_block_iterator.
has_next()) {
943 new_block_iterator.
next();
950 while (branch_iterator.
has_next()) {
951 TSBlock* intermediate = intermediate_blocks[
i];
953 __ Bind(intermediate);
961 if (
__ current_block() !=
nullptr) {
963 __ Goto(block->merge_block);
968 switch (block->kind) {
970 if (block->reachable()) {
972 __ Goto(block->merge_block);
980 &block->start_merge);
981 __ Goto(block->merge_block);
993 if (
__ current_block() !=
nullptr) {
995 __ Goto(block->merge_block);
1007 if (block->reachable()) {
1011 if (!block->false_or_loop_or_catch_block->IsBound()) {
1014 }
else if (block->merge_block->PredecessorCount() == 0) {
1018 block->false_or_loop_or_catch_block->SetKind(
1020 for (
auto& op :
__ output_graph().operations(
1021 *block->false_or_loop_or_catch_block)) {
1023 if (!pending_phi)
break;
1024 OpIndex replaced =
__ output_graph().Index(op);
1035 __ Goto(block->false_or_loop_or_catch_block);
1036 auto operations =
__ output_graph().operations(
1037 *block -> false_or_loop_or_catch_block);
1038 auto to = operations.begin();
1041 while (to != operations.end() &&
1045 for (
auto it = block->assigned->begin(); it != block->assigned->end();
1048 if (*it ==
static_cast<int>(
ssa_env_.size()))
break;
1050 OpIndex replaced =
__ output_graph().Index(*to);
1055 for (uint32_t
i = 0;
i < block->br_merge()->arity; ++
i, ++
to) {
1057 OpIndex replaced =
__ output_graph().Index(*to);
1061 {pending_phi.
first(), (*block->br_merge())[
i].op}),
1072 size_t return_count = decoder->
sig_->return_count();
1074 Value* stack_base = return_count == 0
1077 return_count + drop_values));
1078 for (
size_t i = 0;
i < return_count;
i++) {
1079 return_values[
i] = stack_base[
i].
op;
1083 if (return_count == 1) {
1087 info =
__ StackSlot(size, size);
1095 __ NoContextConstant());
1099 v8_flags.experimental_wasm_growable_stacks);
1102 if (
__ generating_unreachable_operations())
return;
1103 for (
size_t i = 0;
i < return_count;
i++) {
1125 result->op =
__ Word32Constant(value);
1129 result->op =
__ Word64Constant(value);
1133 result->op =
__ Float32Constant(value);
1137 result->op =
__ Float64Constant(value);
1151 decoder->
module_->functions[function_index].sig_index;
1152 bool shared = decoder->
module_->type(sig_index).is_shared;
1158 __ AssertNotNull(arg.
op, arg.
type, TrapId::kTrapNullDereference);
1180 bool shared = decoder->
module_->globals[imm.
index].shared;
1186 bool shared = decoder->
module_->globals[imm.
index].shared;
1203 __ AssertNotNull(obj.
op, obj.
type, TrapId::kTrapIllegalCast);
1214 bool use_select =
false;
1217 if (SupportedOperations::word32_select()) use_select =
true;
1220 if (SupportedOperations::word64_select()) use_select =
true;
1223 if (SupportedOperations::float32_select()) use_select =
true;
1226 if (SupportedOperations::float64_select()) use_select =
true;
1243 use_select ? Implementation::kCMove : Implementation::kBranch);
1252 int value_size_in_bits = 8 * value_size_in_bytes;
1253 bool is_float =
false;
1255 switch (wasmtype.
kind()) {
1257 value =
__ BitcastFloat64ToWord64(node);
1261 result =
__ Word64Constant(
static_cast<uint64_t
>(0));
1264 value =
__ BitcastFloat32ToWord32(node);
1271 DCHECK(ReverseBytesSupported(value_size_in_bytes));
1286 value_size_in_bits = 8 * value_size_in_bytes;
1288 value =
__ Word32ShiftLeft(value, 16);
1292 value =
__ Word32ShiftLeft(value, 16);
1296 uint32_t shift_count;
1298 if (ReverseBytesSupported(value_size_in_bytes)) {
1299 switch (value_size_in_bytes) {
1314 for (
i = 0, shift_count = value_size_in_bits - 8;
1315 i < value_size_in_bits / 2;
i += 8, shift_count -= 16) {
1324 if (value_size_in_bits > 32) {
1325 shift_lower =
__ Word64ShiftLeft(value, shift_count);
1326 shift_higher =
__ Word64ShiftRightLogical(value, shift_count);
1327 lower_byte =
__ Word64BitwiseAnd(shift_lower,
1328 static_cast<uint64_t
>(0xFF)
1329 << (value_size_in_bits - 8 -
i));
1330 higher_byte =
__ Word64BitwiseAnd(shift_higher,
1331 static_cast<uint64_t
>(0xFF) <<
i);
1335 shift_lower =
__ Word32ShiftLeft(value, shift_count);
1336 shift_higher =
__ Word32ShiftRightLogical(value, shift_count);
1337 lower_byte =
__ Word32BitwiseAnd(shift_lower,
1338 static_cast<uint32_t
>(0xFF)
1339 << (value_size_in_bits - 8 -
i));
1340 higher_byte =
__ Word32BitwiseAnd(shift_higher,
1341 static_cast<uint32_t
>(0xFF) <<
i);
1349 switch (wasmtype.
kind()) {
1369 int value_size_in_bits = 8 * value_size_in_bytes;
1370 bool is_float =
false;
1374 value =
__ BitcastFloat64ToWord64(node);
1378 result =
__ Word64Constant(
static_cast<uint64_t
>(0));
1381 value =
__ BitcastFloat32ToWord32(node);
1392 DCHECK(ReverseBytesSupported(value_size_in_bytes));
1399 uint32_t shift_count;
1401 if (ReverseBytesSupported(value_size_in_bytes < 4 ? 4
1402 : value_size_in_bytes)) {
1403 switch (value_size_in_bytes) {
1405 result =
__ Word32ReverseBytes(
__ Word32ShiftLeft(value, 16));
1408 result =
__ Word32ReverseBytes(value);
1411 result =
__ Word64ReverseBytes(value);
1414 result =
__ Simd128ReverseBytes(value);
1420 for (
i = 0, shift_count = value_size_in_bits - 8;
1421 i < value_size_in_bits / 2;
i += 8, shift_count -= 16) {
1430 if (value_size_in_bits > 32) {
1431 shift_lower =
__ Word64ShiftLeft(value, shift_count);
1432 shift_higher =
__ Word64ShiftRightLogical(value, shift_count);
1433 lower_byte =
__ Word64BitwiseAnd(shift_lower,
1434 static_cast<uint64_t
>(0xFF)
1435 << (value_size_in_bits - 8 -
i));
1436 higher_byte =
__ Word64BitwiseAnd(shift_higher,
1437 static_cast<uint64_t
>(0xFF) <<
i);
1441 shift_lower =
__ Word32ShiftLeft(value, shift_count);
1442 shift_higher =
__ Word32ShiftRightLogical(value, shift_count);
1443 lower_byte =
__ Word32BitwiseAnd(shift_lower,
1444 static_cast<uint32_t
>(0xFF)
1445 << (value_size_in_bits - 8 -
i));
1446 higher_byte =
__ Word32BitwiseAnd(shift_higher,
1447 static_cast<uint32_t
>(0xFF) <<
i);
1470 if (value_size_in_bits < 32) {
1472 int shift_bit_count = 32 - value_size_in_bits;
1476 __ Word32ShiftRightArithmeticShiftOutZeros(
result, shift_bit_count);
1488 bool needs_f16_to_f32_conv =
false;
1489 if (type.value() == LoadType::kF32LoadF16 &&
1490 !SupportedOperations::float16()) {
1491 needs_f16_to_f32_conv =
true;
1492 type = LoadType::kI32Load16U;
1497 auto [final_index, strategy] =
1506 const bool offset_in_int_range =
1507 imm.
offset <= std::numeric_limits<int32_t>::max();
1509 offset_in_int_range ? mem_start :
__ WordPtrAdd(mem_start, imm.
offset);
1510 int32_t
offset = offset_in_int_range ?
static_cast<int32_t
>(imm.
offset) : 0;
1513#if V8_TARGET_BIG_ENDIAN
1518 load = repr.
IsSigned() ?
__ ChangeInt32ToInt64(load)
1519 :
__ ChangeUint32ToUint64(load);
1522 if (needs_f16_to_f32_conv) {
1524 load, ExternalReference::wasm_float16_to_float32(),
1546 auto [final_index, strategy] =
1551 compiler::turboshaft::Simd128LoadTransformOp::LoadKind load_kind =
1554 using TransformKind =
1555 compiler::turboshaft::Simd128LoadTransformOp::TransformKind;
1557 TransformKind transform_kind;
1561 transform_kind = TransformKind::k8x8S;
1563 transform_kind = TransformKind::k8x8U;
1565 transform_kind = TransformKind::k16x4S;
1567 transform_kind = TransformKind::k16x4U;
1569 transform_kind = TransformKind::k32x2S;
1571 transform_kind = TransformKind::k32x2U;
1577 transform_kind = TransformKind::k8Splat;
1579 transform_kind = TransformKind::k16Splat;
1581 transform_kind = TransformKind::k32Splat;
1583 transform_kind = TransformKind::k64Splat;
1589 transform_kind = TransformKind::k32Zero;
1591 transform_kind = TransformKind::k64Zero;
1599 load_kind, transform_kind, 0);
1611 using compiler::turboshaft::Simd128LaneMemoryOp;
1616 auto [final_index, strategy] =
1622 Simd128LaneMemoryOp::LaneKind lane_kind;
1626 lane_kind = Simd128LaneMemoryOp::LaneKind::k8;
1629 lane_kind = Simd128LaneMemoryOp::LaneKind::k16;
1632 lane_kind = Simd128LaneMemoryOp::LaneKind::k32;
1635 lane_kind = Simd128LaneMemoryOp::LaneKind::k64;
1645 value.op, Simd128LaneMemoryOp::Mode::kLoad,
kind, lane_kind, laneidx,
1657 const Value& value) {
1658 bool needs_f32_to_f16_conv =
false;
1659 if (type.value() == StoreType::kF32StoreF16 &&
1660 !SupportedOperations::float16()) {
1661 needs_f32_to_f16_conv =
true;
1662 type = StoreType::kI32Store16;
1672 auto [final_index, strategy] =
1680 OpIndex store_value = value.op;
1682 store_value =
__ TruncateWord64ToWord32(store_value);
1684 if (needs_f32_to_f16_conv) {
1686 store_value, ExternalReference::wasm_float32_to_float16(),
1690#if defined(V8_TARGET_BIG_ENDIAN)
1694 const bool offset_in_int_range =
1695 imm.
offset <= std::numeric_limits<int32_t>::max();
1697 offset_in_int_range ? mem_start :
__ WordPtrAdd(mem_start, imm.
offset);
1698 int32_t
offset = offset_in_int_range ?
static_cast<int32_t
>(imm.
offset) : 0;
1699 __ Store(
base, final_index, store_value, store_kind, repr,
1711 const Value& value,
const uint8_t laneidx) {
1712 using compiler::turboshaft::Simd128LaneMemoryOp;
1722 auto [final_index, strategy] =
1727 Simd128LaneMemoryOp::LaneKind lane_kind;
1733 lane_kind = Simd128LaneMemoryOp::LaneKind::k8;
1736 lane_kind = Simd128LaneMemoryOp::LaneKind::k16;
1739 lane_kind = Simd128LaneMemoryOp::LaneKind::k32;
1742 lane_kind = Simd128LaneMemoryOp::LaneKind::k64;
1751 final_index, value.op,
1752 Simd128LaneMemoryOp::Mode::kStore,
kind, lane_kind,
1765 if (imm.
memory->is_memory64()) {
1766 result->op =
__ ChangeIntPtrToInt64(result_wordptr);
1768 result->op =
__ TruncateWordPtrToWord32(result_wordptr);
1774 if (!imm.
memory->is_memory64()) {
1777 decoder, {
__ Word32Constant(imm.
index), value.op});
1782 value.op,
__ Word64Constant(
static_cast<int64_t
>(
kMaxInt))))) {
1784 BuiltinCallDescriptor::WasmMemoryGrow>(
1785 decoder, {
__ Word32Constant(imm.
index),
1786 __ TruncateWord64ToWord32(value.op)})));
1788 GOTO(done,
__ Word64Constant(int64_t{-1}));
1791 BIND(done, result_64);
1801 return __ WasmTypeCheck(value.op, rtt, config);
1813 if (
__ generating_unreachable_operations())
return false;
1814 const WasmTypeCastOp*
cast =
1815 __ output_graph().Get(value.op).TryCast<WasmTypeCastOp>();
1824 __ Word32Constant(0));
1828 GOTO(clamped_start_label, length);
1829 BIND(clamped_start_label, clamped_start);
1830 start = clamped_start;
1838 decoder, {
string, search, start_smi});
1841 return __ UntagSmi(result_value);
1848 BuiltinCallDescriptor::StringToLowerCaseIntl>(
1849 decoder,
__ NoContextConstant(), {
string});
1856 OpIndex isolate_root =
__ LoadRootRegister();
1857 __ Store(isolate_root,
__ Word32Constant(op_type),
1873 BuiltinCallDescriptor::ThrowDataViewOutOfBounds>(decoder, {});
1880 BuiltinCallDescriptor::ThrowDataViewDetachedError>(decoder, {});
1920 GOTO(heapnumber_label);
1925 GOTO(done_label, smi_length);
1927 BIND(heapnumber_label);
1928 V<Float64> float_value =
__ template LoadField<Float64>(
1930 if constexpr (
Is64()) {
1934 __ TruncateFloat64ToInt64OverflowUndefined(float_value)));
1937 __ ChangeInt32ToIntPtr(
1938 __ TruncateFloat64ToInt32OverflowUndefined(float_value)));
1941 BIND(done_label, length);
1967 IF (
LIKELY(
__ HasInstanceType(dataview, InstanceType::JS_DATA_VIEW_TYPE))) {
1974 GOTO(done_label, view_byte_length);
1981 dataview, InstanceType::JS_RAB_GSAB_DATA_VIEW_TYPE)),
1991 bit_field, JSArrayBufferView::IsLengthTrackingBit::kMask);
1992 V<Word32> backed_by_rab_bit =
__ Word32BitwiseAnd(
1993 bit_field, JSArrayBufferView::IsBackedByRabBit::kMask);
2004 IF (backed_by_rab_bit) {
2006 IF (length_tracking) {
2008 IF (
LIKELY(
__ UintPtrLessThanOrEqual(view_byte_offset,
2009 buffer_byte_length))) {
2010 final_length =
__ WordPtrSub(buffer_byte_length, view_byte_offset);
2014 GOTO(done_label, final_length);
2019 __ WordPtrAdd(view_byte_offset, view_byte_length),
2022 GOTO(done_label, view_byte_length);
2028 decoder->
zone(), Runtime::kGrowableSharedArrayBufferByteLength,
2029 {buffer},
__ NoContextConstant());
2032 IF (
LIKELY(
__ UintPtrLessThanOrEqual(view_byte_offset, gsab_length))) {
2033 gsab_buffer_byte_length =
__ WordPtrSub(gsab_length, view_byte_offset);
2035 GOTO(done_label, gsab_buffer_byte_length);
2039 BIND(type_error_label);
2042 BIND(done_label, final_view_byte_length);
2043 return final_view_byte_length;
2051 __ WordPtrSub(view_byte_length, GetTypeSize(op_type));
2053 return __ LoadField<WordPtr>(
2062 (op_type == DataViewOp::kGetInt8 || op_type == DataViewOp::kGetUint8)
2063 ?
__ Word32Constant(1)
2068 return __ LoadDataViewElement(dataview, data_ptr,
offset, is_little_endian,
2069 GetExternalArrayType(op_type));
2078 (op_type == DataViewOp::kSetInt8 || op_type == DataViewOp::kSetUint8)
2079 ?
__ Word32Constant(1)
2084 __ StoreDataViewElement(dataview, data_ptr,
offset, value, is_little_endian,
2085 GetExternalArrayType(op_type));
2090 template <
typename T>
2098 return __ AnnotateWasmType(value, type);
2103 uint32_t func_index = imm.
index;
2109 V<Object> data =
__ LoadFixedArrayElement(imports_array, func_index);
2112 WasmFastApiCallData::kCachedMapOffset);
2121 cached_map =
__ BitcastWordPtrToTagged(
__ WordPtrBitwiseAnd(
2123 GOTO_IF(
__ TaggedEqual(map, cached_map), if_equal_maps);
2124 GOTO(if_unknown_receiver);
2126 BIND(if_unknown_receiver);
2129 BuiltinCallDescriptor::WasmFastApiCallTypeCheckAndUpdateIC>(
2131 GOTO(if_equal_maps);
2133 BIND(if_equal_maps);
2137 size_t param_count =
sig->parameter_count();
2145 if (
sig->return_count()) {
2160 inputs[0] = receiver_handle;
2163 for (
size_t i = 1;
i < param_count; ++
i) {
2164 if (
sig->GetParam(
i).is_reference()) {
2165 inputs[
i] =
__ AdaptLocalArgument(
args[
i].op);
2166 }
else if (callback_sig->
GetParam(
i - 1).representation() ==
2175 __ TryTruncateFloat64ToInt64(
args[
i].op);
2176 inputs[
i] =
__ template Projection<0>(truncate);
2178 __ Word32Equal(
__ template Projection<1>(truncate), 0)),
2179 value_out_of_range);
2181 inputs[
i] =
__ ChangeInt32ToInt64(
args[
i].op);
2186 __ TryTruncateFloat32ToInt64(
args[
i].op);
2187 inputs[
i] =
__ template Projection<0>(truncate);
2189 __ Word32Equal(
__ template Projection<1>(truncate), 0)),
2190 value_out_of_range);
2195 __ TryTruncateFloat64ToUint64(
args[
i].op);
2196 inputs[
i] =
__ template Projection<0>(truncate);
2198 __ Word32Equal(
__ template Projection<1>(truncate), 0)),
2199 value_out_of_range);
2201 inputs[
i] =
__ ChangeUint32ToUint64(
args[
i].op);
2206 __ TryTruncateFloat32ToUint64(
args[
i].op);
2207 inputs[
i] =
__ template Projection<0>(truncate);
2209 __ Word32Equal(
__ template Projection<1>(truncate), 0)),
2210 value_out_of_range);
2223 options_object =
__ StackSlot(kSize, kAlign);
2227 "We expected 'isolate' to be pointer sized, but it is not.");
2228 __ StoreOffHeap(options_object,
2229 __ IsolateField(IsolateFieldId::kIsolateAddress),
2236 WasmFastApiCallData::kCallbackDataOffset);
2237 V<WordPtr> data_argument_to_pass =
__ AdaptLocalArgument(callback_data);
2239 __ StoreOffHeap(options_object, data_argument_to_pass,
2244 inputs[param_count] = options_object;
2258 __ Store(
__ LoadRootRegister(),
2271 __ Store(
__ LoadRootRegister(),
2282 __ TaggedEqual(exception,
LOAD_ROOT(TheHoleValue)))) {
2284 BuiltinCallDescriptor::WasmPropagateException>(
2291 ret_val =
__ WordBitwiseAnd(ret_val,
__ Word32Constant(0xff),
2295 ret_val =
__ ChangeInt64ToFloat64(ret_val);
2297 ret_val =
__ TruncateWord64ToWord32(ret_val);
2299 ret_val =
__ ChangeInt64ToFloat32(ret_val);
2303 ret_val =
__ ChangeUint64ToFloat64(ret_val);
2305 ret_val =
__ TruncateWord64ToWord32(ret_val);
2307 ret_val =
__ ChangeUint64ToFloat32(ret_val);
2313 BIND(value_out_of_range);
2314 auto [
target, implicit_arg] =
2320 if (
sig->return_count()) {
2321 returns[0].
op = ret_val;
2328 uint32_t index = imm.
index;
2329 if (!decoder->
module_)
return false;
2331 decoder->
module_->type_feedback.well_known_imports;
2333 WKI imported_op = well_known_imports.
get(index);
2335 switch (imported_op) {
2336 case WKI::kUninstantiated:
2338 case WKI::kLinkError:
2342 case WKI::kStringCast: {
2344 decoder->
detected_->add_imported_strings();
2347 case WKI::kStringTest: {
2349 decoder->
detected_->add_imported_strings();
2352 case WKI::kStringCharCodeAt: {
2357 decoder->
detected_->add_imported_strings();
2360 case WKI::kStringCodePointAt: {
2365 decoder->
detected_->add_imported_strings();
2368 case WKI::kStringCompare: {
2373 decoder, {a_string, b_string}));
2374 decoder->
detected_->add_imported_strings();
2377 case WKI::kStringConcat: {
2384 {head_string, tail_string});
2386 decoder->detected_->add_imported_strings();
2389 case WKI::kStringEquals: {
2392 static constexpr bool kNullSucceeds =
true;
2397 decoder->
detected_->add_imported_strings();
2400 case WKI::kStringFromCharCode: {
2403 BuiltinCallDescriptor::WasmStringFromCodePoint>(decoder, {capped});
2405 decoder->detected_->add_imported_strings();
2408 case WKI::kStringFromCodePoint: {
2410 BuiltinCallDescriptor::WasmStringFromCodePoint>(decoder,
2413 decoder->detected_->add_imported_strings();
2416 case WKI::kStringFromWtf16Array: {
2418 BuiltinCallDescriptor::WasmStringNewWtf16Array>(
2422 decoder->detected_->add_imported_strings();
2425 case WKI::kStringFromUtf8Array:
2429 decoder->
detected_->add_imported_strings();
2431 case WKI::kStringIntoUtf8Array: {
2436 decoder->
detected_->add_imported_strings();
2439 case WKI::kStringToUtf8Array: {
2442 BuiltinCallDescriptor::WasmStringToUtf8Array>(decoder, {
string});
2443 result =
__ AnnotateWasmType(result_value, returns[0].type);
2444 decoder->detected_->add_imported_strings();
2447 case WKI::kStringLength: {
2449 result =
__ template LoadField<Word32>(
2451 decoder->
detected_->add_imported_strings();
2454 case WKI::kStringMeasureUtf8: {
2458 decoder->
detected_->add_imported_strings();
2461 case WKI::kStringSubstring: {
2467 BuiltinCallDescriptor::WasmStringViewWtf16Slice>(
2468 decoder, {view,
args[1].op,
args[2].op});
2470 decoder->detected_->add_imported_strings();
2473 case WKI::kStringToWtf16Array: {
2476 BuiltinCallDescriptor::WasmStringEncodeWtf16Array>(
2479 decoder->detected_->add_imported_strings();
2484 case WKI::kDoubleToString: {
2487 BuiltinCallDescriptor::WasmFloat64ToString>(decoder, {
args[0].op});
2490 decoder->detected_->Add(
2492 ? WasmDetectedFeature::stringref
2493 : WasmDetectedFeature::imported_strings);
2496 case WKI::kIntToString: {
2500 decoder, {
args[0].op,
args[1].op});
2505 ? WasmDetectedFeature::stringref
2506 : WasmDetectedFeature::imported_strings);
2509 case WKI::kParseFloat: {
2510 if (
args[0].type.is_nullable()) {
2513 __ Float64Constant(std::numeric_limits<double>::quiet_NaN()));
2517 BuiltinCallDescriptor::WasmStringToDouble>(decoder, {
args[0].op});
2519 GOTO(done, not_null_res);
2521 BIND(done, result_f64);
2526 BuiltinCallDescriptor::WasmStringToDouble>(decoder, {
args[0].op});
2532 case WKI::kStringIndexOf: {
2538 if (
args[0].type.is_nullable()) {
2541 BuiltinCallDescriptor::ThrowIndexOfCalledOnNull>(decoder, {});
2547 if (
args[1].type.is_nullable()) {
2552 BIND(search_done_label, search_value);
2553 search = search_value;
2560 case WKI::kStringIndexOfImported: {
2572 decoder->
detected_->add_imported_strings();
2575 case WKI::kStringToLocaleLowerCaseStringref:
2578 case WKI::kStringToLowerCaseStringref: {
2581 if (
args[0].type.is_nullable()) {
2584 BuiltinCallDescriptor::ThrowToLowerCaseCalledOnNull>(decoder,
2589 V<String> result_value = CallStringToLowercase(decoder,
string);
2597 case WKI::kStringToLowerCaseImported: {
2606 V<String> result_value = CallStringToLowercase(decoder,
string);
2608 decoder->
detected_->add_imported_strings();
2617 case WKI::kDataViewGetBigInt64: {
2621 case WKI::kDataViewGetBigUint64:
2624 case WKI::kDataViewGetFloat32:
2627 case WKI::kDataViewGetFloat64:
2630 case WKI::kDataViewGetInt8:
2633 case WKI::kDataViewGetInt16:
2636 case WKI::kDataViewGetInt32:
2639 case WKI::kDataViewGetUint8:
2642 case WKI::kDataViewGetUint16:
2645 case WKI::kDataViewGetUint32:
2648 case WKI::kDataViewSetBigInt64:
2651 case WKI::kDataViewSetBigUint64:
2654 case WKI::kDataViewSetFloat32:
2657 case WKI::kDataViewSetFloat64:
2660 case WKI::kDataViewSetInt8:
2663 case WKI::kDataViewSetInt16:
2666 case WKI::kDataViewSetInt32:
2669 case WKI::kDataViewSetUint8:
2672 case WKI::kDataViewSetUint16:
2675 case WKI::kDataViewSetUint32:
2678 case WKI::kDataViewByteLength: {
2682 if constexpr (
Is64()) {
2684 __ ChangeInt64ToFloat64(
__ ChangeIntPtrToInt64(view_byte_length));
2687 __ TruncateWordPtrToWord32(view_byte_length));
2693 case WKI::kMathF64Acos:
2696 case WKI::kMathF64Asin:
2699 case WKI::kMathF64Atan:
2702 case WKI::kMathF64Atan2:
2705 case WKI::kMathF64Cos:
2708 case WKI::kMathF64Sin:
2711 case WKI::kMathF64Tan:
2714 case WKI::kMathF64Exp:
2717 case WKI::kMathF64Log:
2720 case WKI::kMathF64Pow:
2723 case WKI::kMathF64Sqrt:
2728 case WKI::kFastAPICall: {
2734 if (
v8_flags.trace_wasm_inlining) {
2739 (*assumptions_)->RecordAssumption(index, imported_op);
2747 if (imm.
index < decoder->
module_->num_imported_functions) {
2751 auto [
target, implicit_arg] =
2758 decoder->
module_->functions[imm.
index].code.length())) {
2759 if (
v8_flags.trace_wasm_inlining) {
2760 PrintF(
"[function %d%s: inlining direct call #%d to function %d]\n",
2779 if (imm.
index < decoder->
module_->num_imported_functions) {
2780 auto [
target, implicit_arg] =
2787 decoder->
module_->functions[imm.
index].code.length())) {
2788 if (
v8_flags.trace_wasm_inlining) {
2790 "[function %d%s: inlining direct tail call #%d to function %d]\n",
2809 if (
v8_flags.wasm_inlining_call_indirect) {
2814 if (
__ generating_unreachable_operations())
return;
2817 std::numeric_limits<int>::max())) {
2832 constexpr bool kNeedsTypeOrNullCheck =
false;
2834 decoder, index_wordptr, imm, kNeedsTypeOrNullCheck);
2836 size_t return_count = imm.
sig->return_count();
2839 std::vector<base::SmallVector<OpIndex, 2>> case_returns(return_count);
2842 constexpr int kSlowpathCase = 1;
2845 for (
size_t i = 0;
i < feedback_cases.
size() + kSlowpathCase;
i++) {
2860 if (use_deopt_slowpath &&
2862 if (
v8_flags.trace_wasm_inlining) {
2864 "[function %d%s: Not emitting deopt slow-path for "
2865 "call_indirect #%d as feedback contains non-inlineable "
2870 use_deopt_slowpath =
false;
2882 __ Branch({
__ TaggedEqual(implicit_arg, instance),
2883 kUnlikelyCrossInstanceCall},
2884 case_blocks[0], no_inline_block);
2886 for (
size_t i = 0;
i < feedback_cases.
size();
i++) {
2887 __ Bind(case_blocks[
i]);
2889 if (!tree || !tree->is_inlined()) {
2891 __ Goto(case_blocks[
i + 1]);
2895 use_deopt_slowpath =
false;
2898 uint32_t inlined_index = tree->function_index();
2908 decoder->
module_->functions[inlined_index].sig)) {
2909 __ Goto(case_blocks[
i + 1]);
2914 __ RelocatableWasmIndirectCallTarget(inlined_index);
2916 bool is_last_feedback_case = (
i == feedback_cases.
size() - 1);
2917 if (use_deopt_slowpath && is_last_feedback_case) {
2918 DeoptIfNot(decoder,
__ Word32Equal(target, inlined_target),
2924 __ Branch({
__ Word32Equal(target, inlined_target), hint},
2925 inline_block, case_blocks[
i + 1]);
2926 __ Bind(inline_block);
2931 if (
v8_flags.trace_wasm_inlining) {
2933 "[function %d%s: Speculatively inlining call_indirect #%d, "
2934 "case #%zu, to function %d]\n",
2939 static_cast<uint32_t
>(
i),
false,
args,
2940 direct_returns.
data());
2942 if (
__ current_block() !=
nullptr) {
2947 for (
size_t ret = 0; ret < direct_returns.
size(); ret++) {
2948 case_returns[ret].push_back(direct_returns[ret].op);
2954 __ Bind(no_inline_block);
2955 if (use_deopt_slowpath) {
2958 Deopt(decoder, frame_state);
2960 auto [call_target, call_implicit_arg] =
2966 indirect_returns.
data(),
2968 for (
size_t ret = 0; ret < indirect_returns.
size(); ret++) {
2969 case_returns[ret].push_back(indirect_returns[ret].op);
2975 for (
size_t i = 0;
i < case_returns.size();
i++) {
2987 auto [
target, implicit_arg] =
2996 if (
v8_flags.wasm_inlining_call_indirect) {
3001 std::numeric_limits<int>::max())) {
3016 constexpr bool kNeedsTypeOrNullCheck =
false;
3018 decoder, index_wordptr, imm, kNeedsTypeOrNullCheck);
3022 constexpr int kSlowpathCase = 1;
3025 for (
size_t i = 0;
i < feedback_cases.
size() + kSlowpathCase;
i++) {
3040 __ Branch({
__ TaggedEqual(implicit_arg, instance),
3041 kUnlikelyCrossInstanceCall},
3042 case_blocks[0], no_inline_block);
3044 for (
size_t i = 0;
i < feedback_cases.
size();
i++) {
3045 __ Bind(case_blocks[
i]);
3047 if (!tree || !tree->is_inlined()) {
3049 __ Goto(case_blocks[
i + 1]);
3052 uint32_t inlined_index = tree->function_index();
3062 decoder->
module_->functions[inlined_index].sig)) {
3063 __ Goto(case_blocks[
i + 1]);
3068 __ RelocatableWasmIndirectCallTarget(inlined_index);
3071 bool is_last_case = (
i == feedback_cases.
size() - 1);
3074 __ Branch({
__ Word32Equal(target, inlined_target), hint},
3075 inline_block, case_blocks[
i + 1]);
3076 __ Bind(inline_block);
3077 if (
v8_flags.trace_wasm_inlining) {
3079 "[function %d%s: Speculatively inlining return_call_indirect "
3080 "#%d, case #%zu, to function %d]\n",
3085 static_cast<uint32_t
>(
i),
true,
args,
nullptr);
3091 __ Bind(no_inline_block);
3098 auto [
target, implicit_arg] =
3112 if (
__ generating_unreachable_operations())
return;
3115 std::numeric_limits<int>::max())) {
3121 size_t return_count =
sig->return_count();
3124 std::vector<base::SmallVector<OpIndex, 2>> case_returns(return_count);
3127 constexpr int kSlowpathCase = 1;
3130 for (
size_t i = 0;
i < feedback_cases.
size() + kSlowpathCase;
i++) {
3134 __ Goto(case_blocks[0]);
3137 for (
size_t i = 0;
i < feedback_cases.
size();
i++) {
3138 __ Bind(case_blocks[
i]);
3140 if (!tree || !tree->is_inlined()) {
3142 __ Goto(case_blocks[
i + 1]);
3145 use_deopt_slowpath =
false;
3148 uint32_t inlined_index = tree->function_index();
3149 DCHECK(!decoder->
module_->function_is_shared(inlined_index));
3151 __ LoadFixedArrayElement(
func_refs, inlined_index);
3153 bool is_last_feedback_case = (
i == feedback_cases.
size() - 1);
3154 if (use_deopt_slowpath && is_last_feedback_case) {
3157 if (
v8_flags.trace_wasm_inlining) {
3159 "[function %d%s: Not emitting deopt slow-path for "
3160 "call_ref #%d as feedback contains non-inlineable "
3165 use_deopt_slowpath =
false;
3168 bool emit_deopt = use_deopt_slowpath && is_last_feedback_case;
3172 if (frame_state.
valid()) {
3173 DeoptIfNot(decoder,
__ TaggedEqual(func_ref.
op, inlined_func_ref),
3177 use_deopt_slowpath =
false;
3184 __ Branch({
__ TaggedEqual(func_ref.
op, inlined_func_ref), hint},
3185 inline_block, case_blocks[
i + 1]);
3186 __ Bind(inline_block);
3190 if (
v8_flags.trace_wasm_inlining) {
3192 "[function %d%s: Speculatively inlining call_ref #%d, case #%zu, "
3193 "to function %d]\n",
3198 false,
args, direct_returns.
data());
3200 if (
__ current_block() !=
nullptr) {
3205 for (
size_t ret = 0; ret < direct_returns.
size(); ret++) {
3206 case_returns[ret].push_back(direct_returns[ret].op);
3212 if (!use_deopt_slowpath) {
3214 __ Bind(no_inline_block);
3215 auto [
target, implicit_arg] =
3221 for (
size_t ret = 0; ret < ref_returns.
size(); ret++) {
3222 case_returns[ret].push_back(ref_returns[ret].op);
3228 for (
size_t i = 0;
i < case_returns.size();
i++) {
3234 func_ref.
op, func_ref.
type);
3245 std::numeric_limits<int>::max())) {
3253 constexpr int kSlowpathCase = 1;
3257 for (
size_t i = 0;
i < feedback_cases.
size() + kSlowpathCase;
i++) {
3260 __ Goto(case_blocks[0]);
3262 for (
size_t i = 0;
i < feedback_cases.
size();
i++) {
3263 __ Bind(case_blocks[
i]);
3265 if (!tree || !tree->is_inlined()) {
3267 __ Goto(case_blocks[
i + 1]);
3270 uint32_t inlined_index = tree->function_index();
3271 DCHECK(!decoder->
module_->function_is_shared(inlined_index));
3273 __ LoadFixedArrayElement(
func_refs, inlined_index);
3276 bool is_last_case = (
i == feedback_cases.
size() - 1);
3278 __ Branch({
__ TaggedEqual(func_ref.
op, inlined_func_ref), hint},
3279 inline_block, case_blocks[
i + 1]);
3280 __ Bind(inline_block);
3281 if (
v8_flags.trace_wasm_inlining) {
3283 "[function %d%s: Speculatively inlining return_call_ref #%d, "
3284 "case #%zu, to function %d]\n",
3289 true,
args,
nullptr);
3296 __ Bind(no_inline_block);
3298 auto [
target, implicit_arg] =
3305 bool pass_null_along_branch,
Value* result_on_fallthrough) {
3306 result_on_fallthrough->
op = ref_object.
op;
3308 int drop_values = pass_null_along_branch ? 0 : 1;
3309 BrOrRet(decoder, depth, drop_values);
3314 uint32_t depth,
bool ) {
3324#define HANDLE_BINARY_OPCODE(kind) \
3327 __ Simd128Binop(V<compiler::turboshaft::Simd128>::Cast(args[0].op), \
3328 V<compiler::turboshaft::Simd128>::Cast(args[1].op), \
3329 compiler::turboshaft::Simd128BinopOp::Kind::k##kind); \
3332#undef HANDLE_BINARY_OPCODE
3333#define HANDLE_F16X8_BIN_OPTIONAL_OPCODE(kind, extern_ref) \
3334 case kExprF16x8##kind: \
3335 if (SupportedOperations::float16()) { \
3336 result->op = __ Simd128Binop( \
3337 V<compiler::turboshaft::Simd128>::Cast(args[0].op), \
3338 V<compiler::turboshaft::Simd128>::Cast(args[1].op), \
3339 compiler::turboshaft::Simd128BinopOp::Kind::kF16x8##kind); \
3341 result->op = CallCStackSlotToStackSlot(args[0].op, args[1].op, \
3342 ExternalReference::extern_ref(), \
3343 MemoryRepresentation::Simd128()); \
3359#undef HANDLE_F16X8_BIN_OPCODE
3361#define HANDLE_F16X8_INVERSE_COMPARISON(kind, ts_kind, extern_ref) \
3362 case kExprF16x8##kind: \
3363 if (SupportedOperations::float16()) { \
3364 result->op = __ Simd128Binop( \
3365 V<compiler::turboshaft::Simd128>::Cast(args[1].op), \
3366 V<compiler::turboshaft::Simd128>::Cast(args[0].op), \
3367 compiler::turboshaft::Simd128BinopOp::Kind::kF16x8##ts_kind); \
3369 result->op = CallCStackSlotToStackSlot(args[1].op, args[0].op, \
3370 ExternalReference::extern_ref(), \
3371 MemoryRepresentation::Simd128()); \
3377#undef HANDLE_F16X8_INVERSE_COMPARISON
3379#define HANDLE_INVERSE_COMPARISON(wasm_kind, ts_kind) \
3380 case kExpr##wasm_kind: \
3381 result->op = __ Simd128Binop( \
3382 V<compiler::turboshaft::Simd128>::Cast(args[1].op), \
3383 V<compiler::turboshaft::Simd128>::Cast(args[0].op), \
3384 compiler::turboshaft::Simd128BinopOp::Kind::k##ts_kind); \
3410#undef HANDLE_INVERSE_COMPARISON
3412#define HANDLE_UNARY_NON_OPTIONAL_OPCODE(kind) \
3415 __ Simd128Unary(V<compiler::turboshaft::Simd128>::Cast(args[0].op), \
3416 compiler::turboshaft::Simd128UnaryOp::Kind::k##kind); \
3418 FOREACH_SIMD_128_UNARY_NON_OPTIONAL_OPCODE(
3420#undef HANDLE_UNARY_NON_OPTIONAL_OPCODE
3422#define HANDLE_UNARY_OPTIONAL_OPCODE(kind, feature, external_ref) \
3424 if (SupportedOperations::feature()) { \
3425 result->op = __ Simd128Unary( \
3426 V<compiler::turboshaft::Simd128>::Cast(args[0].op), \
3427 compiler::turboshaft::Simd128UnaryOp::Kind::k##kind); \
3429 result->op = CallCStackSlotToStackSlot( \
3430 args[0].op, ExternalReference::external_ref(), \
3431 MemoryRepresentation::Simd128()); \
3441 wasm_f16x8_nearest_int)
3443 wasm_i16x8_sconvert_f16x8)
3445 wasm_i16x8_uconvert_f16x8)
3447 wasm_f16x8_sconvert_i16x8)
3449 wasm_f16x8_uconvert_i16x8)
3451 wasm_f16x8_demote_f32x4_zero)
3453 float64_to_float16_raw_bits,
3454 wasm_f16x8_demote_f64x2_zero)
3456 wasm_f32x4_promote_low_f16x8)
3463 wasm_f32x4_nearest_int)
3470 wasm_f64x2_nearest_int)
3471#undef HANDLE_UNARY_OPTIONAL_OPCODE
3473#define HANDLE_SHIFT_OPCODE(kind) \
3476 __ Simd128Shift(V<compiler::turboshaft::Simd128>::Cast(args[0].op), \
3477 V<Word32>::Cast(args[1].op), \
3478 compiler::turboshaft::Simd128ShiftOp::Kind::k##kind); \
3481#undef HANDLE_SHIFT_OPCODE
3483#define HANDLE_TEST_OPCODE(kind) \
3486 __ Simd128Test(V<compiler::turboshaft::Simd128>::Cast(args[0].op), \
3487 compiler::turboshaft::Simd128TestOp::Kind::k##kind); \
3490#undef HANDLE_TEST_OPCODE
3492#define HANDLE_SPLAT_OPCODE(kind) \
3493 case kExpr##kind##Splat: \
3495 __ Simd128Splat(V<Any>::Cast(args[0].op), \
3496 compiler::turboshaft::Simd128SplatOp::Kind::k##kind); \
3499#undef HANDLE_SPLAT_OPCODE
3500 case kExprF16x8Splat:
3501 if (SupportedOperations::float16()) {
3504 compiler::turboshaft::Simd128SplatOp::Kind::kF16x8);
3507 args[0].op, ExternalReference::wasm_float32_to_float16(),
3511 compiler::turboshaft::Simd128SplatOp::Kind::kI16x8);
3516#define HANDLE_TERNARY_MASK_OPCODE(kind) \
3518 result->op = __ Simd128Ternary( \
3519 V<compiler::turboshaft::Simd128>::Cast(args[2].op), \
3520 V<compiler::turboshaft::Simd128>::Cast(args[0].op), \
3521 V<compiler::turboshaft::Simd128>::Cast(args[1].op), \
3522 compiler::turboshaft::Simd128TernaryOp::Kind::k##kind); \
3525#undef HANDLE_TERNARY_MASK_OPCODE
3527#define HANDLE_TERNARY_OTHER_OPCODE(kind) \
3529 result->op = __ Simd128Ternary( \
3530 V<compiler::turboshaft::Simd128>::Cast(args[0].op), \
3531 V<compiler::turboshaft::Simd128>::Cast(args[1].op), \
3532 V<compiler::turboshaft::Simd128>::Cast(args[2].op), \
3533 compiler::turboshaft::Simd128TernaryOp::Kind::k##kind); \
3536#undef HANDLE_TERNARY_OTHER_OPCODE
3538#define HANDLE_F16X8_TERN_OPCODE(kind, extern_ref) \
3540 if (SupportedOperations::float16()) { \
3541 result->op = __ Simd128Ternary( \
3542 V<compiler::turboshaft::Simd128>::Cast(args[0].op), \
3543 V<compiler::turboshaft::Simd128>::Cast(args[1].op), \
3544 V<compiler::turboshaft::Simd128>::Cast(args[2].op), \
3545 compiler::turboshaft::Simd128TernaryOp::Kind::k##kind); \
3547 result->op = CallCStackSlotToStackSlot( \
3548 ExternalReference::extern_ref(), MemoryRepresentation::Simd128(), \
3549 {{args[0].op, MemoryRepresentation::Simd128()}, \
3550 {args[1].op, MemoryRepresentation::Simd128()}, \
3551 {args[2].op, MemoryRepresentation::Simd128()}}); \
3556#undef HANDLE_F16X8_TERN_OPCODE
3565 using compiler::turboshaft::Simd128ExtractLaneOp;
3566 using compiler::turboshaft::Simd128ReplaceLaneOp;
3570 case kExprI8x16ExtractLaneS:
3571 result->op =
__ Simd128ExtractLane(
3572 input_val, Simd128ExtractLaneOp::Kind::kI8x16S, imm.
lane);
3574 case kExprI8x16ExtractLaneU:
3575 result->op =
__ Simd128ExtractLane(
3576 input_val, Simd128ExtractLaneOp::Kind::kI8x16U, imm.
lane);
3578 case kExprI16x8ExtractLaneS:
3579 result->op =
__ Simd128ExtractLane(
3580 input_val, Simd128ExtractLaneOp::Kind::kI16x8S, imm.
lane);
3582 case kExprI16x8ExtractLaneU:
3583 result->op =
__ Simd128ExtractLane(
3584 input_val, Simd128ExtractLaneOp::Kind::kI16x8U, imm.
lane);
3586 case kExprI32x4ExtractLane:
3587 result->op =
__ Simd128ExtractLane(
3588 input_val, Simd128ExtractLaneOp::Kind::kI32x4, imm.
lane);
3590 case kExprI64x2ExtractLane:
3591 result->op =
__ Simd128ExtractLane(
3592 input_val, Simd128ExtractLaneOp::Kind::kI64x2, imm.
lane);
3594 case kExprF16x8ExtractLane:
3595 if (SupportedOperations::float16()) {
3596 result->op =
__ Simd128ExtractLane(
3597 input_val, Simd128ExtractLaneOp::Kind::kF16x8, imm.
lane);
3599 auto f16 =
__ Simd128ExtractLane(
3600 input_val, Simd128ExtractLaneOp::Kind::kI16x8S, imm.
lane);
3602 f16, ExternalReference::wasm_float16_to_float32(),
3606 case kExprF32x4ExtractLane:
3607 result->op =
__ Simd128ExtractLane(
3608 input_val, Simd128ExtractLaneOp::Kind::kF32x4, imm.
lane);
3610 case kExprF64x2ExtractLane:
3611 result->op =
__ Simd128ExtractLane(
3612 input_val, Simd128ExtractLaneOp::Kind::kF64x2, imm.
lane);
3614 case kExprI8x16ReplaceLane:
3617 Simd128ReplaceLaneOp::Kind::kI8x16, imm.
lane);
3619 case kExprI16x8ReplaceLane:
3622 Simd128ReplaceLaneOp::Kind::kI16x8, imm.
lane);
3624 case kExprI32x4ReplaceLane:
3627 Simd128ReplaceLaneOp::Kind::kI32x4, imm.
lane);
3629 case kExprI64x2ReplaceLane:
3632 Simd128ReplaceLaneOp::Kind::kI64x2, imm.
lane);
3634 case kExprF16x8ReplaceLane:
3635 if (SupportedOperations::float16()) {
3636 result->op =
__ Simd128ReplaceLane(
3638 Simd128ReplaceLaneOp::Kind::kF16x8, imm.
lane);
3641 inputs[1].op, ExternalReference::wasm_float32_to_float16(),
3644 Simd128ReplaceLaneOp::Kind::kI16x8,
3648 case kExprF32x4ReplaceLane:
3651 Simd128ReplaceLaneOp::Kind::kF32x4, imm.
lane);
3653 case kExprF64x2ReplaceLane:
3656 Simd128ReplaceLaneOp::Kind::kF64x2, imm.
lane);
3669 compiler::turboshaft::Simd128ShuffleOp::Kind::kI8x16, imm.
value);
3678 const Value arg_values[]) {
3679 size_t count = imm.
tag->sig->parameter_count();
3681 for (
size_t index = 0; index <
count; index++) {
3688 BuiltinCallDescriptor::WasmAllocateFixedArray>(
3689 decoder, {
__ IntPtrConstant(encoded_size)});
3694 for (
size_t i = 0;
i <
count;
i++) {
3696 switch (
sig->GetParam(
i).kind()) {
3698 value =
__ BitcastFloat32ToWord32(value);
3706 value =
__ BitcastFloat64ToWord64(value);
3710 __ TruncateWord64ToWord32(
__ Word64ShiftRightLogical(value, 32));
3713 OpIndex lower_half =
__ TruncateWord64ToWord32(value);
3719 case wasm::kRefNull:
3720 __ StoreFixedArrayElement(values_array, index, value,
3727 using Kind = compiler::turboshaft::Simd128ExtractLaneOp::Kind;
3730 value_s128, Kind::kI32x4, 0)));
3734 value_s128, Kind::kI32x4, 1)));
3738 value_s128, Kind::kI32x4, 2)));
3742 value_s128, Kind::kI32x4, 3)));
3761 __ LoadFixedArrayElement(instance_tags, imm.
index));
3777 if (
v8_flags.trace_wasm_inlining) {
3779 "[function %d%s: Disabling deoptimizations for speculative "
3780 "inlining due to legacy exception handling usage]\n",
3787 nullptr, &block->exception);
3792 {block->exception,
LOAD_ROOT(wasm_exception_tag_symbol)}));
3798 __ LoadFixedArrayElement(instance_tags, imm.
index));
3805 block->false_or_loop_or_catch_block = if_no_catch;
3807 if (imm.
tag->sig->parameter_count() == 1 &&
3816 __ TaggedEqual(caught_tag,
LOAD_ROOT(UndefinedValue));
3827 WasmTagObject::kTagOffset);
3828 GOTO_IF(
__ TaggedEqual(expected_tag, js_tag), if_catch,
3830 GOTO(no_catch_merge);
3832 IF (
__ TaggedEqual(caught_tag, expected_tag)) {
3834 GOTO(if_catch, values[0].op);
3836 GOTO(no_catch_merge);
3839 BIND(no_catch_merge);
3840 __ Goto(if_no_catch);
3842 BIND(if_catch, caught_exception);
3845 values[0].op = caught_exception;
3849 if_catch, if_no_catch);
3857 nullptr, &block->exception);
3860 if (block->exception.valid()) {
3868 decoder, {block->exception});
3876 __ Goto(target_catch);
3881 DCHECK(block->is_try_catchall() || block->is_try_catch());
3885 if (
v8_flags.trace_wasm_inlining) {
3891 "[function %d%s: Disabling deoptimizations for speculative "
3892 "inlining due to legacy exception handling usage]\n",
3899 nullptr, &block->exception);
3913 nullptr, &block->exception);
3917 values.last().op = block->exception;
3926 {block->exception,
LOAD_ROOT(wasm_exception_tag_symbol)}));
3939 block->false_or_loop_or_catch_block = if_no_catch;
3950 __ TaggedEqual(caught_tag,
LOAD_ROOT(UndefinedValue));
3961 WasmTagObject::kTagOffset);
3962 GOTO_IF(
__ TaggedEqual(expected_tag, js_tag), if_catch,
3964 GOTO(no_catch_merge);
3966 IF (
__ TaggedEqual(caught_tag, expected_tag)) {
3969 values.SubVector(0, values.size() - 1));
3970 values.last().op = block->exception;
3974 GOTO(if_catch, values[0].op);
3976 GOTO(no_catch_merge);
3979 BIND(no_catch_merge);
3980 __ Goto(if_no_catch);
3982 BIND(if_catch, caught_exception);
3985 values[0].op = caught_exception;
3989 if_catch, if_no_catch);
3993 values.SubVector(0, values.size() - 1));
3994 values.last().op = block->exception;
4002 bool is_last = &catch_case == &block->catch_cases.last();
4005 nullptr, &block->exception);
4006 ThrowRef(decoder, block->exception);
4023 OpIndex effective_offset =
__ WordPtrAdd(converted_index, imm.
offset);
4028 result->op =
CallC(&
sig, ExternalReference::wasm_atomic_notify(),
4029 {addr, num_waiters_to_wake});
4035 constexpr StubCallMode kStubMode = StubCallMode::kCallWasmRuntimeStub;
4045 OpIndex effective_offset =
__ WordPtrAdd(converted_index, imm.
offset);
4048 if (opcode == kExprI32AtomicWait) {
4051 decoder, {
__ Word32Constant(imm.
memory->index), effective_offset,
4052 expected, bigint_timeout});
4059 decoder, {
__ Word32Constant(imm.
memory->index), effective_offset,
4060 bigint_expected, bigint_timeout});
4066 if (opcode == WasmOpcode::kExprAtomicNotify) {
4069 if (opcode == WasmOpcode::kExprI32AtomicWait ||
4070 opcode == WasmOpcode::kExprI64AtomicWait) {
4076 struct AtomicOpInfo {
4079 Binop bin_op = Binop::kAdd;
4087 in_out_rep(in_out_rep),
4088 memory_rep(memory_rep) {}
4092 : op_type(op_type), in_out_rep(in_out_rep), memory_rep(memory_rep) {}
4096#define CASE_BINOP(OPCODE, BINOP, RESULT, INPUT) \
4097 case WasmOpcode::kExpr##OPCODE: \
4098 return AtomicOpInfo(Binop::k##BINOP, RegisterRepresentation::RESULT(), \
4099 MemoryRepresentation::INPUT());
4100#define RMW_OPERATION(V) \
4101 V(I32AtomicAdd, Add, Word32, Uint32) \
4102 V(I32AtomicAdd8U, Add, Word32, Uint8) \
4103 V(I32AtomicAdd16U, Add, Word32, Uint16) \
4104 V(I32AtomicSub, Sub, Word32, Uint32) \
4105 V(I32AtomicSub8U, Sub, Word32, Uint8) \
4106 V(I32AtomicSub16U, Sub, Word32, Uint16) \
4107 V(I32AtomicAnd, And, Word32, Uint32) \
4108 V(I32AtomicAnd8U, And, Word32, Uint8) \
4109 V(I32AtomicAnd16U, And, Word32, Uint16) \
4110 V(I32AtomicOr, Or, Word32, Uint32) \
4111 V(I32AtomicOr8U, Or, Word32, Uint8) \
4112 V(I32AtomicOr16U, Or, Word32, Uint16) \
4113 V(I32AtomicXor, Xor, Word32, Uint32) \
4114 V(I32AtomicXor8U, Xor, Word32, Uint8) \
4115 V(I32AtomicXor16U, Xor, Word32, Uint16) \
4116 V(I32AtomicExchange, Exchange, Word32, Uint32) \
4117 V(I32AtomicExchange8U, Exchange, Word32, Uint8) \
4118 V(I32AtomicExchange16U, Exchange, Word32, Uint16) \
4119 V(I32AtomicCompareExchange, CompareExchange, Word32, Uint32) \
4120 V(I32AtomicCompareExchange8U, CompareExchange, Word32, Uint8) \
4121 V(I32AtomicCompareExchange16U, CompareExchange, Word32, Uint16) \
4122 V(I64AtomicAdd, Add, Word64, Uint64) \
4123 V(I64AtomicAdd8U, Add, Word64, Uint8) \
4124 V(I64AtomicAdd16U, Add, Word64, Uint16) \
4125 V(I64AtomicAdd32U, Add, Word64, Uint32) \
4126 V(I64AtomicSub, Sub, Word64, Uint64) \
4127 V(I64AtomicSub8U, Sub, Word64, Uint8) \
4128 V(I64AtomicSub16U, Sub, Word64, Uint16) \
4129 V(I64AtomicSub32U, Sub, Word64, Uint32) \
4130 V(I64AtomicAnd, And, Word64, Uint64) \
4131 V(I64AtomicAnd8U, And, Word64, Uint8) \
4132 V(I64AtomicAnd16U, And, Word64, Uint16) \
4133 V(I64AtomicAnd32U, And, Word64, Uint32) \
4134 V(I64AtomicOr, Or, Word64, Uint64) \
4135 V(I64AtomicOr8U, Or, Word64, Uint8) \
4136 V(I64AtomicOr16U, Or, Word64, Uint16) \
4137 V(I64AtomicOr32U, Or, Word64, Uint32) \
4138 V(I64AtomicXor, Xor, Word64, Uint64) \
4139 V(I64AtomicXor8U, Xor, Word64, Uint8) \
4140 V(I64AtomicXor16U, Xor, Word64, Uint16) \
4141 V(I64AtomicXor32U, Xor, Word64, Uint32) \
4142 V(I64AtomicExchange, Exchange, Word64, Uint64) \
4143 V(I64AtomicExchange8U, Exchange, Word64, Uint8) \
4144 V(I64AtomicExchange16U, Exchange, Word64, Uint16) \
4145 V(I64AtomicExchange32U, Exchange, Word64, Uint32) \
4146 V(I64AtomicCompareExchange, CompareExchange, Word64, Uint64) \
4147 V(I64AtomicCompareExchange8U, CompareExchange, Word64, Uint8) \
4148 V(I64AtomicCompareExchange16U, CompareExchange, Word64, Uint16) \
4149 V(I64AtomicCompareExchange32U, CompareExchange, Word64, Uint32)
4154#define CASE_LOAD(OPCODE, RESULT, INPUT) \
4155 case WasmOpcode::kExpr##OPCODE: \
4156 return AtomicOpInfo(kLoad, RegisterRepresentation::RESULT(), \
4157 MemoryRepresentation::INPUT());
4158#define LOAD_OPERATION(V) \
4159 V(I32AtomicLoad, Word32, Uint32) \
4160 V(I32AtomicLoad16U, Word32, Uint16) \
4161 V(I32AtomicLoad8U, Word32, Uint8) \
4162 V(I64AtomicLoad, Word64, Uint64) \
4163 V(I64AtomicLoad32U, Word64, Uint32) \
4164 V(I64AtomicLoad16U, Word64, Uint16) \
4165 V(I64AtomicLoad8U, Word64, Uint8)
4167#undef LOAD_OPERATION
4169#define CASE_STORE(OPCODE, INPUT, OUTPUT) \
4170 case WasmOpcode::kExpr##OPCODE: \
4171 return AtomicOpInfo(kStore, RegisterRepresentation::INPUT(), \
4172 MemoryRepresentation::OUTPUT());
4173#define STORE_OPERATION(V) \
4174 V(I32AtomicStore, Word32, Uint32) \
4175 V(I32AtomicStore16U, Word32, Uint16) \
4176 V(I32AtomicStore8U, Word32, Uint8) \
4177 V(I64AtomicStore, Word64, Uint64) \
4178 V(I64AtomicStore32U, Word64, Uint32) \
4179 V(I64AtomicStore16U, Word64, Uint16) \
4180 V(I64AtomicStore8U, Word64, Uint8)
4182#undef STORE_OPERATION_OPERATION
4190 AtomicOpInfo info = AtomicOpInfo::Get(opcode);
4193 std::tie(index, bounds_check_result) =
4204 if (info.op_type == kBinop) {
4205 if (info.bin_op == Binop::kCompareExchange) {
4206 result->op =
__ AtomicCompareExchange(
4208 args[2].op, info.in_out_rep, info.memory_rep, access_kind);
4212 args[1].op, info.bin_op, info.in_out_rep,
4213 info.memory_rep, access_kind);
4216 if (info.op_type ==
kStore) {
4220 value =
__ TruncateWord64ToWord32(value);
4222#ifdef V8_TARGET_BIG_ENDIAN
4230 value, info.memory_rep.ToMachineType().representation(), wasm_type);
4241#if V8_TARGET_BIG_ENDIAN
4245 bool needs_zero_extension_64 =
false;
4247 info.memory_rep.SizeInBytes() < 8 &&
4248 info.memory_rep.SizeInBytes() != 1) {
4249 needs_zero_extension_64 =
true;
4258 info.memory_rep, loaded_value_rep);
4260#ifdef V8_TARGET_BIG_ENDIAN
4268 result->op, info.memory_rep.ToMachineType(), wasm_type);
4270 if (needs_zero_extension_64) {
4291 CallC(&
sig, ExternalReference::wasm_memory_init(),
4295 __ TrapIfNot(
result, TrapId::kTrapMemOutOfBounds);
4318 CallC(&
sig, ExternalReference::wasm_memory_copy(),
4322 src_uintptr, size_uintptr});
4323 __ TrapIfNot(
result, TrapId::kTrapMemOutOfBounds);
4339 &
sig, ExternalReference::wasm_memory_fill(),
4341 __ Word32Constant(imm.
index), dst_uintptr, value.op, size_uintptr});
4343 __ TrapIfNot(
result, TrapId::kTrapMemOutOfBounds);
4349 bool shared = decoder->
enabled_.has_shared() &&
4354 __ Store(data_segment_sizes,
__ Word32Constant(0),
4365 WasmTableObject::kCurrentLengthOffset);
4370 index_wordptr,
__ ChangeUint32ToUintPtr(
__ UntagSmi(size_smi)));
4371 __ TrapIfNot(in_bounds, TrapId::kTrapTableOutOfBounds);
4374 WasmTableObject::kEntriesOffset);
4385 V<Word32> instance_type =
__ LoadInstanceTypeField(entry_map);
4387 UNLIKELY(
__ Word32Equal(instance_type, InstanceType::TUPLE2_TYPE)),
4390 GOTO(resolved, entry);
4393 bool extract_shared_data = !
shared_ && imm.
table->shared;
4396 BuiltinCallDescriptor::WasmFunctionTableGet>(
4397 decoder, {
__ IntPtrConstant(imm.
index), index_wordptr,
4398 __ Word32Constant(extract_shared_data ? 1 : 0)}));
4400 BIND(resolved, resolved_entry);
4401 result->op = resolved_entry;
4410 bool extract_shared_data = !
shared_ && imm.
table->shared;
4417 decoder, {
__ IntPtrConstant(imm.
index),
4418 __ Word32Constant(extract_shared_data ? 1 : 0),
4419 index_wordptr, value.op});
4422 decoder, {
__ IntPtrConstant(imm.
index),
4423 __ Word32Constant(extract_shared_data ? 1 : 0),
4424 index_wordptr, value.op});
4430 const Value& size_val) {
4436 DCHECK_EQ(table->shared, table->shared);
4444 __ NumberConstant((!
shared_ && table->shared) ? 1 : 0),
4450 const Value& size_val) {
4466 decoder, {dst_wordptr, src_wordptr, size_wordptr,
4469 __ NumberConstant((!
shared_ && table_is_shared) ? 1 : 0)});
4478 if (!imm.
table->is_table64()) {
4479 delta_wordptr =
__ ChangeUint32ToUintPtr(delta.
op);
4480 }
else if constexpr (
Is64()) {
4481 delta_wordptr = delta.
op;
4484 __ Word64ShiftRightLogical(delta.
op, 32))),
4485 end,
__ Word32Constant(-1));
4489 bool extract_shared_data = !
shared_ && imm.
table->shared;
4493 decoder, {
__ NumberConstant(imm.
index), delta_wordptr,
4494 __ Word32Constant(extract_shared_data), value.op}));
4498 if (imm.
table->is_table64()) {
4499 result->op =
__ ChangeInt32ToInt64(result_i32);
4511 bool extract_shared_data = !
shared_ && imm.
table->shared;
4514 {start_wordptr, count_wordptr,
__ Word32Constant(extract_shared_data),
4515 __ NumberConstant(imm.
index), value.op});
4532 WasmTableObject::kCurrentLengthOffset));
4533 if (imm.
table->is_table64()) {
4534 result->op =
__ ChangeUint32ToUint64(size_word32);
4536 result->op = size_word32;
4544 bool shared = decoder->
module_->elem_segments[imm.
index].shared;
4548 __ StoreFixedArrayElement(elem_segments, imm.
index,
4555 uint32_t field_count = imm.
struct_type->field_count();
4557 for (uint32_t
i = 0;
i < field_count; ++
i) {
4558 args_vector[
i] =
args[
i].op;
4565 uint32_t field_count = imm.
struct_type->field_count();
4567 for (uint32_t
i = 0;
i < field_count;
i++) {
4595 const Value& length,
const Value& initial_value,
4620 const Value& value) {
4644 src.type.is_nullable()
4650 IF_NOT (
__ Word32Equal(length.op, 0)) {
4652 int array_copy_max_loop_length;
4653 switch (element_type.
kind()) {
4658 array_copy_max_loop_length = 20;
4663 array_copy_max_loop_length = 35;
4666 array_copy_max_loop_length = 100;
4669 case wasm::kRefNull:
4670 array_copy_max_loop_length = 15;
4678 IF (
__ Uint32LessThan(array_copy_max_loop_length, length.op)) {
4686 CallC(&
sig, ExternalReference::wasm_array_copy(),
4687 {dst_array, dst_index.
op, src_array, src_index.
op, length.op});
4690 __ Word32Sub(
__ Word32Add(src_index.
op, length.op), 1);
4692 IF (
__ Uint32LessThan(src_index.
op, dst_index.
op)) {
4695 __ Word32Sub(
__ Word32Add(dst_index.
op, length.op), 1);
4702 __ ArraySet(dst_array, dst_index_loop, value, element_type);
4706 src_index_loop =
__ Word32Sub(src_index_loop, 1);
4707 dst_index_loop =
__ Word32Sub(dst_index_loop, 1);
4716 __ ArraySet(dst_array, dst_index_loop, value, element_type);
4718 IF_NOT (
__ Uint32LessThan(src_index_loop, src_end_index))
BREAK;
4720 src_index_loop =
__ Word32Add(src_index_loop, 1);
4721 dst_index_loop =
__ Word32Add(dst_index_loop, 1);
4730 const Value& length) {
4731 const bool emit_write_barrier =
4732 imm.
array_type->element_type().is_reference();
4735 array_value, index.op, length.op,
4748 int element_count = length_imm.
index;
4750 bool shared = decoder->
module_->type(array_imm.
index).is_shared;
4752 V<WasmArray> array =
__ WasmAllocateArray(rtt, element_count, type);
4754 for (
int i = 0;
i < element_count;
i++) {
4755 __ ArraySet(array,
__ Word32Constant(
i), elements[
i].op, element_type);
4764 bool is_element = array_imm.
array_type->element_type().is_reference();
4767 bool segment_is_shared =
4770 ? decoder->
module_->elem_segments[segment_imm.
index].shared
4771 : decoder->
module_->data_segments[segment_imm.
index].shared);
4776 {
__ Word32Constant(segment_imm.
index),
offset.op, length.op,
4788 const Value& length) {
4789 bool is_element = array_imm.
array_type->element_type().is_reference();
4791 bool segment_is_shared =
4794 ? decoder->
module_->elem_segments[segment_imm.
index].shared
4795 : decoder->
module_->data_segments[segment_imm.
index].shared);
4812 if constexpr (
Is64()) {
4814 result->op =
__ BitcastWord32ToWord64(shifted);
4822 V<WordPtr> input_wordptr =
__ ChangeUint32ToUintPtr(input.op);
4823 result->op =
__ WordPtrShiftRightArithmetic(
4834 result->op =
__ Word32ShiftRightArithmeticShiftOutZeros(
4835 __ TruncateWordPtrToWord32(
__ BitcastTaggedToWordPtr(input_non_null)),
4839 result->op =
__ TruncateWordPtrToWord32(
4840 __ WordPtrShiftRightArithmeticShiftOutZeros(
4841 __ BitcastTaggedToWordPtr(input_non_null),
4849 result->op =
__ Word32ShiftRightLogical(
4850 __ TruncateWordPtrToWord32(
__ BitcastTaggedToWordPtr(input_non_null)),
4854 result->op =
__ TruncateWordPtrToWord32(
__ WordPtrShiftRightLogical(
4855 __ WordPtrShiftLeft(
__ BitcastTaggedToWordPtr(input_non_null), 1),
4866 Map::kInstanceDescriptorsOffset);
4872 if (target.is_exact() ||
4873 decoder->
module_->type(target.ref_index()).is_final) {
4882 target.ref_index());
4888 result->op =
__ WasmTypeCheck(
object.op, rtt, config);
4897 result->op =
__ WasmTypeCheck(
object.op, rtt, config);
4901 if (
v8_flags.experimental_wasm_assume_ref_cast_succeeds) {
4908 target.ref_index());
4911 result->op =
__ WasmTypeCast(
object.op, rtt, config);
4916 if (
v8_flags.experimental_wasm_assume_ref_cast_succeeds) {
4924 result->op =
__ WasmTypeCast(
object.op, rtt, config);
4929 if (
v8_flags.experimental_wasm_assume_ref_cast_succeeds) {
4941 result->op =
__ WasmTypeCast(
object.op, rtt, config);
4956 return BrOnCastImpl(decoder, rtt, config,
object, value_on_branch, br_depth,
4961 const Value&
object,
const Value& descriptor,
4962 Value* value_on_branch, uint32_t br_depth,
4969 return BrOnCastImpl(decoder, rtt, config,
object, value_on_branch, br_depth,
4980 return BrOnCastImpl(decoder, rtt, config,
object, value_on_branch, br_depth,
4985 const Value&
object,
Value* value_on_fallthrough,
4993 return BrOnCastFailImpl(decoder, rtt, config,
object, value_on_fallthrough,
4998 const Value&
object,
const Value& descriptor,
4999 Value* value_on_fallthrough, uint32_t br_depth,
5006 return BrOnCastFailImpl(decoder, rtt, config,
object, value_on_fallthrough,
5017 return BrOnCastFailImpl(decoder, rtt, config,
object, value_on_fallthrough,
5031 decoder, {
index, size.op, memory, variant_smi});
5040 if (
__ generating_unreachable_operations())
return nullptr;
5047 array = didnt_throw->throwing_operation();
5050 if (call ==
nullptr)
return nullptr;
5053 .MatchWasmStubCallConstant(call->callee(), &stub_id)) {
5056 DCHECK_LT(stub_id,
static_cast<uint64_t
>(Builtin::kFirstBytecodeHandler));
5057 if (stub_id ==
static_cast<uint64_t
>(Builtin::kWasmArrayNewSegment)) {
5075 OpIndex segment_index = array_new->input(1);
5086 OpIndex segment_length = array_new->input(3);
5090 BuiltinCallDescriptor::WasmStringFromDataSegment>(
5091 decoder, {segment_length,
start.op,
end.op, index_smi, offset_smi,
5096 BuiltinCallDescriptor::WasmStringNewWtf8Array>(
5123 decoder, {
__ Word32Constant(imm.
index),
index, size.op});
5131 BuiltinCallDescriptor::WasmStringNewWtf16Array>(
5140 decoder, {
__ Word32Constant(imm.
index)});
5155 case unibrow::Utf8Variant::kUtf8:
5157 BuiltinCallDescriptor::WasmStringMeasureUtf8>(decoder, {
string});
5159 case unibrow::Utf8Variant::kWtf8:
5161 BuiltinCallDescriptor::WasmStringMeasureWtf8>(decoder, {
string});
5162 case unibrow::Utf8Variant::kUtf8NoTrap:
5168 return __ template LoadField<Word32>(
5183 V<Word32> mem_index =
__ Word32Constant(memory.index);
5184 V<Word32> utf8 =
__ Word32Constant(
static_cast<int32_t
>(variant));
5186 BuiltinCallDescriptor::WasmStringEncodeWtf8>(
5205 BuiltinCallDescriptor::WasmStringEncodeWtf8Array>(
5206 decoder, {str, array,
start, utf8});
5215 BuiltinCallDescriptor::WasmStringEncodeWtf16>(
5223 BuiltinCallDescriptor::WasmStringEncodeWtf16Array>(
5243 GOTO_IF(
__ TaggedEqual(a, b), done,
__ Word32Constant(1));
5256 GOTO_IF_NOT(
__ Word32Equal(a_length, b_length), done,
__ Word32Constant(0));
5265 decoder, {
a, b, length});
5267 GOTO(done,
__ IsRootConstant(
result, RootIndex::kTrueValue));
5269 BIND(done, eq_result);
5281 BuiltinCallDescriptor::WasmStringIsUSVSequence>(
5296 BuiltinCallDescriptor::WasmStringViewWtf8Advance>(
5311 BuiltinCallDescriptor::WasmStringViewWtf8Encode>(
5312 decoder, {address,
pos.op, bytes.op,
5323 BuiltinCallDescriptor::WasmStringViewWtf8Slice>(
5334 auto prepare =
__ StringPrepareForGetCodeUnit(
string);
5336 V<WordPtr> base_offset =
__ template Projection<1>(prepare);
5337 V<Word32> charwidth_shift =
__ template Projection<2>(prepare);
5341 __ TrapIfNot(
__ Uint32LessThan(
offset, length),
5342 TrapId::kTrapStringOffsetOutOfBounds);
5350 GOTO_IF(
__ Word32Equal(charwidth_shift, 0), onebyte);
5354 __ WordPtrMul(
__ ChangeInt32ToIntPtr(
offset), 2), base_offset);
5362 GOTO(done, result_value);
5366 object_offset =
__ WordPtrAdd(
__ ChangeInt32ToIntPtr(
offset), base_offset);
5370 base_ptr =
__ BitcastTaggedToWordPtr(
base);
5374 GOTO(done, result_value);
5378 BuiltinCallDescriptor::WasmStringViewWtf16GetCodeUnit>(
5381 BIND(done, final_result);
5386 return final_result;
5397 auto prepare =
__ StringPrepareForGetCodeUnit(
string);
5399 V<WordPtr> base_offset =
__ template Projection<1>(prepare);
5400 V<Word32> charwidth_shift =
__ template Projection<2>(prepare);
5404 __ TrapIfNot(
__ Uint32LessThan(
offset, length),
5405 TrapId::kTrapStringOffsetOutOfBounds);
5413 GOTO_IF(
__ Word32Equal(charwidth_shift, 0), onebyte);
5417 __ WordPtrMul(
__ ChangeInt32ToIntPtr(
offset), 2), base_offset);
5426 __ Word32Equal(
__ Word32BitwiseAnd(lead, 0xFC00), 0xD800);
5429 GOTO_IF_NOT(
__ Uint32LessThan(trail_offset, length), done, lead);
5431 base_ptr,
__ WordPtrAdd(object_offset,
__ IntPtrConstant(2)),
5434 __ Word32Equal(
__ Word32BitwiseAnd(trail, 0xFC00), 0xDC00);
5437 __ Word32Constant(0x10000 - (0xD800 << 10) - 0xDC00);
5439 __ Word32Add(trail, surrogate_bias));
5444 object_offset =
__ WordPtrAdd(
__ ChangeInt32ToIntPtr(
offset), base_offset);
5448 base_ptr =
__ BitcastTaggedToWordPtr(
base);
5456 BuiltinCallDescriptor::WasmStringCodePointAt>(
5459 BIND(done, final_result);
5464 return final_result;
5476 BuiltinCallDescriptor::WasmStringViewWtf16Encode>(
5477 decoder, {address,
pos.op, codeunits.
op,
string, mem_index});
5485 BuiltinCallDescriptor::WasmStringViewWtf16Slice>(
5502 BuiltinCallDescriptor::WasmStringViewIterNext>(decoder, {iter});
5509 BuiltinCallDescriptor::WasmStringViewIterAdvance>(
5510 decoder, {iter, codepoints.
op});
5517 BuiltinCallDescriptor::WasmStringViewIterRewind>(decoder,
5518 {iter, codepoints.
op});
5525 BuiltinCallDescriptor::WasmStringViewIterSlice>(decoder,
5526 {iter, codepoints.
op});
5536 decoder, {lhs_val, rhs_val}));
5542 BuiltinCallDescriptor::WasmStringFromCodePoint>(decoder,
5553 V<Word32> raw_hash =
__ template LoadField<Word32>(
5559 __ Word32BitwiseAnd(raw_hash, hash_not_computed_mask);
5560 GOTO_IF(hash_not_computed, runtime_label);
5566 GOTO(end_label, hash);
5568 BIND(runtime_label);
5571 decoder, {string_val});
5572 GOTO(end_label, hash_runtime);
5574 BIND(end_label, hash_val);
5599 : mem_start_(assembler), mem_size_(assembler),
asm_(assembler) {}
5603 DCHECK(!trusted_data_.valid());
5605 managed_object_maps_ =
5608 WasmTrustedInstanceData::kManagedObjectMapsOffset);
5612 WasmTrustedInstanceData::kNativeContextOffset);
5626 memory_size_cached_ = !mem.
is_shared || !memory_can_grow_;
5633 if (memory_size_cached_) {
5634 mem_size_ = LoadMemSize();
5636 mem_start_ = LoadMemStart();
5643 if (memory_can_move()) mem_start_ = LoadMemStart();
5644 if (memory_can_grow_ && memory_size_cached_) mem_size_ = LoadMemSize();
5656 if (!memory_size_cached_)
return LoadMemSize();
5661 static constexpr uint8_t kUnused = ~uint8_t{0};
5669 if (!memory_can_move())
kind =
kind.Immutable();
5671 WasmTrustedInstanceData::kMemory0StartOffset);
5677 if (memory_is_shared_ && memory_can_grow_) {
5682 if (!memory_can_grow_)
kind =
kind.Immutable();
5684 WasmTrustedInstanceData::kMemory0SizeOffset);
5703 bool memory_is_shared_{
false};
5704 bool memory_can_grow_{
false};
5705 bool memory_can_move_{
false};
5706 bool memory_size_cached_{
false};
5708 bool has_memory_{
false};
5724 : incoming_exceptions_(decoder -> zone()) {
5727 uint32_t merge_arity = merge !=
nullptr ? merge->
arity : 0;
5729 phi_count_ = num_locals + merge_arity;
5730 phi_types_ = decoder->
zone()->AllocateArray<ValueType>(phi_count_);
5733 std::uninitialized_copy(locals.
begin(), locals.
end(), phi_types_);
5734 for (uint32_t
i = 0;
i < merge_arity;
i++) {
5735 new (&phi_types_[num_locals +
i])
ValueType((*merge)[
i].type);
5737 AllocatePhiInputs(decoder->
zone());
5744 phi_inputs_capacity_total_ = phi_count_ * input_capacity_per_phi_;
5748 constexpr uint32_t kNoInputs = 0;
5749 input_count_per_phi_ = std::vector(phi_count_, kNoInputs);
5761 uint32_t return_count =
static_cast<uint32_t
>(return_types.
size());
5762 phi_count_ = return_count;
5763 phi_types_ = zone()->AllocateArray<
ValueType>(phi_count_);
5765 std::uninitialized_copy(return_types.
begin(), return_types.
end(),
5767 AllocatePhiInputs(zone());
5771 if (
V8_UNLIKELY(phi_inputs_total_ >= phi_inputs_capacity_total_)) {
5778 size_t phi_inputs_start = phi_i * input_capacity_per_phi_;
5779 size_t phi_input_offset_from_start = inputs_per_phi_;
5780 CHECK_EQ(input_count_per_phi_[phi_i]++, phi_input_offset_from_start);
5781 size_t phi_input_offset = phi_inputs_start + phi_input_offset_from_start;
5782 CHECK_EQ(next_phi_input_add_offset_, phi_input_offset);
5784 new (&phi_inputs_[next_phi_input_add_offset_])
OpIndex(input);
5786 phi_inputs_total_++;
5787 next_phi_input_add_offset_ += input_capacity_per_phi_;
5788 if (next_phi_input_add_offset_ >= phi_inputs_capacity_total_) {
5791 next_phi_input_add_offset_ = inputs_per_phi_;
5793 EnsureAllPhisHaveSameInputCount();
5803 size_t phi_inputs_start = phi_i * input_capacity_per_phi_;
5804 return base::VectorOf(&phi_inputs_[phi_inputs_start], inputs_per_phi_);
5808 incoming_exceptions_.push_back(exception);
5816 void DcheckConsistency() { EnsureAllPhisHaveSameInputCount(); }
5844 uint32_t phi_inputs_capacity_total_ = 0;
5845 uint32_t phi_inputs_total_ = 0;
5846 uint32_t next_phi_input_add_offset_ = 0;
5849 uint32_t phi_count_ = 0;
5850 uint32_t inputs_per_phi_ = 0;
5851 static constexpr uint32_t kInitialInputCapacityPerPhi = 2;
5852 uint32_t input_capacity_per_phi_ = kInitialInputCapacityPerPhi;
5855 std::vector<uint32_t> input_count_per_phi_;
5856 void EnsureAllPhisHaveSameInputCount()
const {
5857 CHECK_EQ(phi_inputs_total_, phi_count() * inputs_per_phi_);
5858 CHECK_EQ(phi_count(), input_count_per_phi_.size());
5859 CHECK(std::all_of(input_count_per_phi_.begin(),
5860 input_count_per_phi_.end(),
5861 [=,
this](uint32_t input_count) {
5862 return input_count == inputs_per_phi_;
5878 DCHECK_NE(phi_inputs_capacity_total_, 0);
5880 OpIndex* old_phi_inputs = phi_inputs_;
5881 uint32_t old_input_capacity_per_phi = input_capacity_per_phi_;
5882 uint32_t old_phi_inputs_capacity_total = phi_inputs_capacity_total_;
5884 input_capacity_per_phi_ *= 2;
5885 phi_inputs_capacity_total_ *= 2;
5886 phi_inputs_ = zone()->AllocateArray<
OpIndex>(phi_inputs_capacity_total_);
5892 EnsureAllPhisHaveSameInputCount();
5894 for (
size_t phi_i = 0; phi_i < phi_count(); ++phi_i) {
5896 &old_phi_inputs[phi_i * old_input_capacity_per_phi];
5897 const OpIndex* old_end = old_begin + inputs_per_phi_;
5898 OpIndex* begin = &phi_inputs_[phi_i * input_capacity_per_phi_];
5899 std::uninitialized_copy(old_begin, old_end, begin);
5902 zone()->DeleteArray(old_phi_inputs, old_phi_inputs_capacity_total);
5908 TrapId trap_id = TrapId::kTrapNullDereference) {
5910 if (value.type.is_nullable()) {
5911 not_null_value =
__ AssertNotNull(value.op, value.type, trap_id);
5913 return not_null_value;
5929 uint32_t drop_values = 0,
5932 if (
__ current_block() ==
nullptr)
return;
5936 uint32_t merge_arity =
static_cast<uint32_t
>(phis_for_block.
phi_count()) -
5945 : stack_values !=
nullptr
5946 ? &(*stack_values)[0]
5947 : decoder->
stack_value(merge_arity + drop_values);
5948 for (
size_t i = 0;
i < merge_arity;
i++) {
5953 if (exception.valid()) {
5960 for (
size_t i = 1;
i < elements.
size();
i++) {
5961 if (elements[
i] != elements[0]) {
5973 OpIndex* exception =
nullptr) {
5977 BlockPhis& block_phis = block_phis_it->second;
5979 uint32_t merge_arity = merge !=
nullptr ? merge->
arity : 0;
5985 block_phis.DcheckConsistency();
5991 for (uint32_t
i = 0;
i < merge_arity;
i++) {
5998 if (exception !=
nullptr && !exception->valid()) {
6005 switch (type.kind()) {
6009 return __ Word32Constant(int32_t{0});
6011 return __ Word64Constant(int64_t{0});
6014 return __ Float32Constant(0.0f);
6016 return __ Float64Constant(0.0);
6021 return __ Simd128Constant(value);
6034 const Value* func_ref_or_index,
6045 size_t param_count = decoder->
sig_->parameter_count();
6046 for (
size_t i = 0;
i < param_count; ++
i) {
6070 if (
args !=
nullptr) {
6071 for (
const Value& arg :
6073 builder.
AddInput(arg.type.machine_type(), arg.op);
6076 if (func_ref_or_index) {
6078 func_ref_or_index->
op);
6081 const size_t kExtraLocals = func_ref_or_index !=
nullptr ? 1 : 0;
6082 size_t wasm_local_count =
ssa_env_.size() - param_count;
6083 size_t local_count = kExtraLocals + decoder->
stack_size() +
6088 compiler::FrameStateType::kLiftoffFunction,
6089 static_cast<uint16_t
>(param_count), 0,
static_cast<int>(local_count),
6100 constexpr size_t max_deopt_input_count = 500;
6102 constexpr size_t max_isel_input_count =
6103 std::numeric_limits<uint16_t>::max();
6111 static_assert((max_deopt_input_count * 2 + 42) *
6113 max_isel_input_count);
6114 if (builder.
Inputs().size() >= max_deopt_input_count) {
6115 if (
v8_flags.trace_wasm_inlining) {
6117 "[function %d%s: Disabling deoptimizations for speculative "
6118 "inlining as the deoptimization FrameState takes too many inputs "
6121 builder.
Inputs().size(), max_deopt_input_count);
6139 __ DeoptimizeIfNot(deopt_condition, frame_state,
6140 DeoptimizeReason::kWrongCallTarget,
6147 __ Deoptimize(frame_state, DeoptimizeReason::kWrongCallTarget,
6158 auto function_feedback = feedback.feedback_for_function.find(
func_index_);
6159 CHECK_NE(function_feedback, feedback.feedback_for_function.end());
6171 V<Word32> check =
__ template Projection<1>(truncated);
6172 __ TrapIf(
__ Word32Equal(check, 0), TrapId::kTrapFloatUnrepresentable);
6180 V<WordPtr> stack_slot =
__ StackSlot(slot_size, slot_size);
6186 return {stack_slot, overflow};
6193 __ TrapIf(
__ Word32Equal(overflow, 0),
6194 compiler::TrapId::kTrapFloatUnrepresentable);
6205 V<WordPtr> stack_slot =
__ StackSlot(slot_size, slot_size);
6210 CallC(&
sig, ccall_ref, stack_slot);
6218 uint8_t slot_size = std::max(input_representation.
SizeInBytes(),
6220 V<WordPtr> stack_slot =
__ StackSlot(slot_size, slot_size);
6225 CallC(&
sig, ccall_ref, stack_slot);
6227 result_representation);
6231 wasm::TrapId trap_zero) {
6244 __ TrapIf(
__ Word32Equal(rc, 0), trap_zero);
6245 __ TrapIf(
__ Word32Equal(rc, -1), TrapId::kTrapDivUnrepresentable);
6253 return __ Word32Equal(arg, 0);
6255 return __ Float32Abs(arg);
6257 return __ Float32Negate(arg);
6259 return __ Float32Sqrt(arg);
6261 return __ Float64Abs(arg);
6263 return __ Float64Negate(arg);
6265 return __ Float64Sqrt(arg);
6266 case kExprI32SConvertF32: {
6270 __ TrapIf(
__ Word32Equal(
__ Float32Equal(converted_back, truncated), 0),
6271 TrapId::kTrapFloatUnrepresentable);
6274 case kExprI32UConvertF32: {
6278 __ TrapIf(
__ Word32Equal(
__ Float32Equal(converted_back, truncated), 0),
6279 TrapId::kTrapFloatUnrepresentable);
6282 case kExprI32SConvertF64: {
6285 __ TruncateFloat64ToInt32OverflowUndefined(truncated);
6287 __ TrapIf(
__ Word32Equal(
__ Float64Equal(converted_back, truncated), 0),
6288 TrapId::kTrapFloatUnrepresentable);
6291 case kExprI32UConvertF64: {
6295 __ TrapIf(
__ Word32Equal(
__ Float64Equal(converted_back, truncated), 0),
6296 TrapId::kTrapFloatUnrepresentable);
6299 case kExprI64SConvertF32:
6301 __ TryTruncateFloat32ToInt64(arg))
6304 ExternalReference::wasm_float32_to_int64());
6305 case kExprI64UConvertF32:
6307 __ TryTruncateFloat32ToUint64(arg))
6310 ExternalReference::wasm_float32_to_uint64());
6311 case kExprI64SConvertF64:
6313 __ TryTruncateFloat64ToInt64(arg))
6316 ExternalReference::wasm_float64_to_int64());
6317 case kExprI64UConvertF64:
6319 __ TryTruncateFloat64ToUint64(arg))
6322 ExternalReference::wasm_float64_to_uint64());
6323 case kExprF64SConvertI32:
6324 return __ ChangeInt32ToFloat64(arg);
6325 case kExprF64UConvertI32:
6326 return __ ChangeUint32ToFloat64(arg);
6327 case kExprF32SConvertI32:
6328 return __ ChangeInt32ToFloat32(arg);
6329 case kExprF32UConvertI32:
6330 return __ ChangeUint32ToFloat32(arg);
6331 case kExprI32SConvertSatF32: {
6334 __ TruncateFloat32ToInt32OverflowUndefined(truncated);
6335 V<Float32> converted_back =
__ ChangeInt32ToFloat32(converted);
6339 IF (
LIKELY(
__ Float32Equal(truncated, converted_back))) {
6340 GOTO(done, converted);
6343 IF (
__ Float32Equal(arg, arg)) {
6345 IF (
__ Float32LessThan(arg, 0)) {
6348 __ Word32Constant(std::numeric_limits<int32_t>::min()));
6352 __ Word32Constant(std::numeric_limits<int32_t>::max()));
6356 GOTO(done,
__ Word32Constant(0));
6363 case kExprI32UConvertSatF32: {
6366 __ TruncateFloat32ToUint32OverflowUndefined(truncated);
6367 V<Float32> converted_back =
__ ChangeUint32ToFloat32(converted);
6371 IF (
LIKELY(
__ Float32Equal(truncated, converted_back))) {
6372 GOTO(done, converted);
6375 IF (
__ Float32Equal(arg, arg)) {
6377 IF (
__ Float32LessThan(arg, 0)) {
6379 GOTO(done,
__ Word32Constant(0));
6383 __ Word32Constant(std::numeric_limits<uint32_t>::max()));
6387 GOTO(done,
__ Word32Constant(0));
6394 case kExprI32SConvertSatF64: {
6397 __ TruncateFloat64ToInt32OverflowUndefined(truncated);
6398 V<Float64> converted_back =
__ ChangeInt32ToFloat64(converted);
6402 IF (
LIKELY(
__ Float64Equal(truncated, converted_back))) {
6403 GOTO(done, converted);
6406 IF (
__ Float64Equal(arg, arg)) {
6408 IF (
__ Float64LessThan(arg, 0)) {
6411 __ Word32Constant(std::numeric_limits<int32_t>::min()));
6415 __ Word32Constant(std::numeric_limits<int32_t>::max()));
6419 GOTO(done,
__ Word32Constant(0));
6426 case kExprI32UConvertSatF64: {
6429 __ TruncateFloat64ToUint32OverflowUndefined(truncated);
6430 V<Float64> converted_back =
__ ChangeUint32ToFloat64(converted);
6434 IF (
LIKELY(
__ Float64Equal(truncated, converted_back))) {
6435 GOTO(done, converted);
6438 IF (
__ Float64Equal(arg, arg)) {
6440 IF (
__ Float64LessThan(arg, 0)) {
6442 GOTO(done,
__ Word32Constant(0));
6446 __ Word32Constant(std::numeric_limits<uint32_t>::max()));
6450 GOTO(done,
__ Word32Constant(0));
6457 case kExprI64SConvertSatF32: {
6458 if constexpr (!
Is64()) {
6462 ExternalReference::wasm_float32_to_int64_sat(),
is_signed);
6467 if (SupportedOperations::sat_conversion_is_safe()) {
6468 return __ Projection<0>(converted);
6471 GOTO(done,
__ Projection<0>(converted));
6474 IF (
__ Float32Equal(arg, arg)) {
6476 IF (
__ Float32LessThan(arg, 0)) {
6479 __ Word64Constant(std::numeric_limits<int64_t>::min()));
6483 __ Word64Constant(std::numeric_limits<int64_t>::max()));
6487 GOTO(done,
__ Word64Constant(int64_t{0}));
6494 case kExprI64UConvertSatF32: {
6495 if constexpr (!
Is64()) {
6499 ExternalReference::wasm_float32_to_uint64_sat(),
is_signed);
6504 if (SupportedOperations::sat_conversion_is_safe()) {
6505 return __ template Projection<0>(converted);
6508 IF (
LIKELY(
__ template Projection<1>(converted))) {
6509 GOTO(done,
__ template Projection<0>(converted));
6512 IF (
__ Float32Equal(arg, arg)) {
6514 IF (
__ Float32LessThan(arg, 0)) {
6516 GOTO(done,
__ Word64Constant(int64_t{0}));
6520 __ Word64Constant(std::numeric_limits<uint64_t>::max()));
6524 GOTO(done,
__ Word64Constant(int64_t{0}));
6531 case kExprI64SConvertSatF64: {
6532 if constexpr (!
Is64()) {
6536 ExternalReference::wasm_float64_to_int64_sat(),
is_signed);
6541 if (SupportedOperations::sat_conversion_is_safe()) {
6542 return __ template Projection<0>(converted);
6545 IF (
LIKELY(
__ template Projection<1>(converted))) {
6546 GOTO(done,
__ template Projection<0>(converted));
6549 IF (
__ Float64Equal(arg, arg)) {
6551 IF (
__ Float64LessThan(arg, 0)) {
6554 __ Word64Constant(std::numeric_limits<int64_t>::min()));
6558 __ Word64Constant(std::numeric_limits<int64_t>::max()));
6562 GOTO(done,
__ Word64Constant(int64_t{0}));
6569 case kExprI64UConvertSatF64: {
6570 if constexpr (!
Is64()) {
6574 ExternalReference::wasm_float64_to_uint64_sat(),
is_signed);
6579 if (SupportedOperations::sat_conversion_is_safe()) {
6580 return __ template Projection<0>(converted);
6583 IF (
LIKELY(
__ template Projection<1>(converted))) {
6584 GOTO(done,
__ template Projection<0>(converted));
6587 IF (
__ Float64Equal(arg, arg)) {
6589 IF (
__ Float64LessThan(arg, 0)) {
6591 GOTO(done,
__ Word64Constant(int64_t{0}));
6595 __ Word64Constant(std::numeric_limits<uint64_t>::max()));
6599 GOTO(done,
__ Word64Constant(int64_t{0}));
6606 case kExprF32ConvertF64:
6607 return __ TruncateFloat64ToFloat32(arg);
6608 case kExprF64ConvertF32:
6609 return __ ChangeFloat32ToFloat64(arg);
6610 case kExprF32ReinterpretI32:
6611 return __ BitcastWord32ToFloat32(arg);
6612 case kExprI32ReinterpretF32:
6613 return __ BitcastFloat32ToWord32(arg);
6615 return __ Word32CountLeadingZeros(arg);
6617 if (SupportedOperations::word32_ctz()) {
6618 return __ Word32CountTrailingZeros(arg);
6624 return CallC(&
sig, ExternalReference::wasm_word32_ctz(), arg);
6626 case kExprI32Popcnt:
6627 if (SupportedOperations::word32_popcnt()) {
6628 return __ Word32PopCount(arg);
6633 return CallC(&
sig, ExternalReference::wasm_word32_popcnt(), arg);
6636 if (SupportedOperations::float32_round_down()) {
6637 return __ Float32RoundDown(arg);
6640 ExternalReference::wasm_f32_floor(),
6644 if (SupportedOperations::float32_round_up()) {
6645 return __ Float32RoundUp(arg);
6648 ExternalReference::wasm_f32_ceil(),
6652 if (SupportedOperations::float32_round_to_zero()) {
6653 return __ Float32RoundToZero(arg);
6656 ExternalReference::wasm_f32_trunc(),
6659 case kExprF32NearestInt:
6660 if (SupportedOperations::float32_round_ties_even()) {
6661 return __ Float32RoundTiesEven(arg);
6664 arg, ExternalReference::wasm_f32_nearest_int(),
6668 if (SupportedOperations::float64_round_down()) {
6669 return __ Float64RoundDown(arg);
6672 ExternalReference::wasm_f64_floor(),
6676 if (SupportedOperations::float64_round_up()) {
6677 return __ Float64RoundUp(arg);
6680 ExternalReference::wasm_f64_ceil(),
6684 if (SupportedOperations::float64_round_to_zero()) {
6685 return __ Float64RoundToZero(arg);
6688 ExternalReference::wasm_f64_trunc(),
6691 case kExprF64NearestInt:
6692 if (SupportedOperations::float64_round_ties_even()) {
6693 return __ Float64RoundTiesEven(arg);
6696 arg, ExternalReference::wasm_f64_nearest_int(),
6701 arg, ExternalReference::f64_acos_wrapper_function(),
6705 arg, ExternalReference::f64_asin_wrapper_function(),
6708 return __ Float64Atan(arg);
6710 return __ Float64Cos(arg);
6712 return __ Float64Sin(arg);
6714 return __ Float64Tan(arg);
6716 return __ Float64Exp(arg);
6718 return __ Float64Log(arg);
6719 case kExprI32ConvertI64:
6720 return __ TruncateWord64ToWord32(arg);
6721 case kExprI64SConvertI32:
6722 return __ ChangeInt32ToInt64(arg);
6723 case kExprI64UConvertI32:
6724 return __ ChangeUint32ToUint64(arg);
6725 case kExprF64ReinterpretI64:
6726 return __ BitcastWord64ToFloat64(arg);
6727 case kExprI64ReinterpretF64:
6728 return __ BitcastFloat64ToWord64(arg);
6730 return __ Word64CountLeadingZeros(arg);
6732 if (SupportedOperations::word64_ctz() ||
6733 (!
Is64() && SupportedOperations::word32_ctz())) {
6734 return __ Word64CountTrailingZeros(arg);
6735 }
else if (
Is64()) {
6740 return __ ChangeUint32ToUint64(
6741 CallC(&
sig, ExternalReference::wasm_word64_ctz(), arg));
6745 __ TruncateWord64ToWord32(
__ Word64ShiftRightLogical(arg, 32));
6746 OpIndex lower_word =
__ TruncateWord64ToWord32(arg);
6751 IF (
__ Word32Equal(lower_word, 0)) {
6753 __ Word32Add(
CallC(&
sig, ExternalReference::wasm_word32_ctz(),
6758 CallC(&
sig, ExternalReference::wasm_word32_ctz(), lower_word));
6761 return __ ChangeUint32ToUint64(
result);
6763 case kExprI64Popcnt:
6764 if (SupportedOperations::word64_popcnt() ||
6765 (!
Is64() && SupportedOperations::word32_popcnt())) {
6766 return __ Word64PopCount(arg);
6767 }
else if (
Is64()) {
6772 return __ ChangeUint32ToUint64(
6773 CallC(&
sig, ExternalReference::wasm_word64_popcnt(), arg));
6777 __ TruncateWord64ToWord32(
__ Word64ShiftRightLogical(arg, 32));
6778 OpIndex lower_word =
__ TruncateWord64ToWord32(arg);
6782 return __ ChangeUint32ToUint64(
__ Word32Add(
6783 CallC(&
sig, ExternalReference::wasm_word32_popcnt(), lower_word),
6784 CallC(&
sig, ExternalReference::wasm_word32_popcnt(),
6788 return __ Word64Equal(arg, 0);
6789 case kExprF32SConvertI64:
6790 if constexpr (!
Is64()) {
6792 arg, ExternalReference::wasm_int64_to_float32(),
6795 return __ ChangeInt64ToFloat32(arg);
6796 case kExprF32UConvertI64:
6797 if constexpr (!
Is64()) {
6799 arg, ExternalReference::wasm_uint64_to_float32(),
6802 return __ ChangeUint64ToFloat32(arg);
6803 case kExprF64SConvertI64:
6804 if constexpr (!
Is64()) {
6806 arg, ExternalReference::wasm_int64_to_float64(),
6809 return __ ChangeInt64ToFloat64(arg);
6810 case kExprF64UConvertI64:
6811 if constexpr (!
Is64()) {
6813 arg, ExternalReference::wasm_uint64_to_float64(),
6816 return __ ChangeUint64ToFloat64(arg);
6817 case kExprI32SExtendI8:
6818 return __ Word32SignExtend8(arg);
6819 case kExprI32SExtendI16:
6820 return __ Word32SignExtend16(arg);
6821 case kExprI64SExtendI8:
6822 return __ Word64SignExtend8(arg);
6823 case kExprI64SExtendI16:
6824 return __ Word64SignExtend16(arg);
6825 case kExprI64SExtendI32:
6826 return __ ChangeInt32ToInt64(
__ TruncateWord64ToWord32(arg));
6827 case kExprRefIsNull:
6829 case kExprI32AsmjsLoadMem8S:
6831 case kExprI32AsmjsLoadMem8U:
6833 case kExprI32AsmjsLoadMem16S:
6835 case kExprI32AsmjsLoadMem16U:
6837 case kExprI32AsmjsLoadMem:
6839 case kExprF32AsmjsLoadMem:
6841 case kExprF64AsmjsLoadMem:
6843 case kExprI32AsmjsSConvertF32:
6844 case kExprI32AsmjsUConvertF32:
6845 return __ JSTruncateFloat64ToWord32(
__ ChangeFloat32ToFloat64(arg));
6846 case kExprI32AsmjsSConvertF64:
6847 case kExprI32AsmjsUConvertF64:
6848 return __ JSTruncateFloat64ToWord32(arg);
6849 case kExprRefAsNonNull:
6852 return __ Word32Equal(
__ IsNull(arg, input_type), 0);
6853 case kExprAnyConvertExtern:
6854 return __ AnyConvertExtern(arg);
6855 case kExprExternConvertAny:
6856 return __ ExternConvertAny(arg);
6865 return __ Word32Add(lhs, rhs);
6867 return __ Word32Sub(lhs, rhs);
6869 return __ Word32Mul(lhs, rhs);
6870 case kExprI32DivS: {
6871 __ TrapIf(
__ Word32Equal(rhs, 0), TrapId::kTrapDivByZero);
6872 V<Word32> unrepresentable_condition =
__ Word32BitwiseAnd(
6873 __ Word32Equal(rhs, -1),
__ Word32Equal(lhs,
kMinInt));
6874 __ TrapIf(unrepresentable_condition, TrapId::kTrapDivUnrepresentable);
6875 return __ Int32Div(lhs, rhs);
6878 __ TrapIf(
__ Word32Equal(rhs, 0), TrapId::kTrapDivByZero);
6879 return __ Uint32Div(lhs, rhs);
6880 case kExprI32RemS: {
6881 __ TrapIf(
__ Word32Equal(rhs, 0), TrapId::kTrapRemByZero);
6884 GOTO(done,
__ Word32Constant(0));
6886 GOTO(done,
__ Int32Mod(lhs, rhs));
6893 __ TrapIf(
__ Word32Equal(rhs, 0), TrapId::kTrapRemByZero);
6894 return __ Uint32Mod(lhs, rhs);
6896 return __ Word32BitwiseAnd(lhs, rhs);
6898 return __ Word32BitwiseOr(lhs, rhs);
6900 return __ Word32BitwiseXor(lhs, rhs);
6903 return __ Word32ShiftLeft(lhs,
__ Word32BitwiseAnd(rhs, 0x1f));
6905 return __ Word32ShiftRightArithmetic(lhs,
6906 __ Word32BitwiseAnd(rhs, 0x1f));
6908 return __ Word32ShiftRightLogical(lhs,
__ Word32BitwiseAnd(rhs, 0x1f));
6910 return __ Word32RotateRight(lhs,
__ Word32BitwiseAnd(rhs, 0x1f));
6912 if (SupportedOperations::word32_rol()) {
6913 return __ Word32RotateLeft(lhs,
__ Word32BitwiseAnd(rhs, 0x1f));
6915 return __ Word32RotateRight(
6916 lhs,
__ Word32Sub(32,
__ Word32BitwiseAnd(rhs, 0x1f)));
6919 return __ Word32Equal(lhs, rhs);
6921 return __ Word32Equal(
__ Word32Equal(lhs, rhs), 0);
6923 return __ Int32LessThan(lhs, rhs);
6925 return __ Int32LessThanOrEqual(lhs, rhs);
6927 return __ Uint32LessThan(lhs, rhs);
6929 return __ Uint32LessThanOrEqual(lhs, rhs);
6931 return __ Int32LessThan(rhs, lhs);
6933 return __ Int32LessThanOrEqual(rhs, lhs);
6935 return __ Uint32LessThan(rhs, lhs);
6937 return __ Uint32LessThanOrEqual(rhs, lhs);
6939 return __ Word64Add(lhs, rhs);
6941 return __ Word64Sub(lhs, rhs);
6943 return __ Word64Mul(lhs, rhs);
6944 case kExprI64DivS: {
6945 if constexpr (!
Is64()) {
6946 return BuildDiv64Call(lhs, rhs, ExternalReference::wasm_int64_div(),
6947 wasm::TrapId::kTrapDivByZero);
6949 __ TrapIf(
__ Word64Equal(rhs, 0), TrapId::kTrapDivByZero);
6950 V<Word32> unrepresentable_condition =
__ Word32BitwiseAnd(
6951 __ Word64Equal(rhs, -1),
6952 __ Word64Equal(lhs, std::numeric_limits<int64_t>::min()));
6953 __ TrapIf(unrepresentable_condition, TrapId::kTrapDivUnrepresentable);
6954 return __ Int64Div(lhs, rhs);
6957 if constexpr (!
Is64()) {
6958 return BuildDiv64Call(lhs, rhs, ExternalReference::wasm_uint64_div(),
6959 wasm::TrapId::kTrapDivByZero);
6961 __ TrapIf(
__ Word64Equal(rhs, 0), TrapId::kTrapDivByZero);
6962 return __ Uint64Div(lhs, rhs);
6963 case kExprI64RemS: {
6964 if constexpr (!
Is64()) {
6965 return BuildDiv64Call(lhs, rhs, ExternalReference::wasm_int64_mod(),
6966 wasm::TrapId::kTrapRemByZero);
6968 __ TrapIf(
__ Word64Equal(rhs, 0), TrapId::kTrapRemByZero);
6971 GOTO(done,
__ Word64Constant(int64_t{0}));
6973 GOTO(done,
__ Int64Mod(lhs, rhs));
6980 if constexpr (!
Is64()) {
6981 return BuildDiv64Call(lhs, rhs, ExternalReference::wasm_uint64_mod(),
6982 wasm::TrapId::kTrapRemByZero);
6984 __ TrapIf(
__ Word64Equal(rhs, 0), TrapId::kTrapRemByZero);
6985 return __ Uint64Mod(lhs, rhs);
6987 return __ Word64BitwiseAnd(lhs, rhs);
6989 return __ Word64BitwiseOr(lhs, rhs);
6991 return __ Word64BitwiseXor(lhs, rhs);
6994 return __ Word64ShiftLeft(
6995 lhs,
__ Word32BitwiseAnd(
__ TruncateWord64ToWord32(rhs), 0x3f));
6997 return __ Word64ShiftRightArithmetic(
6998 lhs,
__ Word32BitwiseAnd(
__ TruncateWord64ToWord32(rhs), 0x3f));
7000 return __ Word64ShiftRightLogical(
7001 lhs,
__ Word32BitwiseAnd(
__ TruncateWord64ToWord32(rhs), 0x3f));
7003 return __ Word64RotateRight(
7004 lhs,
__ Word32BitwiseAnd(
__ TruncateWord64ToWord32(rhs), 0x3f));
7006 if (SupportedOperations::word64_rol()) {
7007 return __ Word64RotateLeft(
7008 lhs,
__ Word32BitwiseAnd(
__ TruncateWord64ToWord32(rhs), 0x3f));
7010 return __ Word64RotateRight(
7011 lhs,
__ Word32BitwiseAnd(
7012 __ Word32Sub(64,
__ TruncateWord64ToWord32(rhs)), 0x3f));
7015 return __ Word64Equal(lhs, rhs);
7017 return __ Word32Equal(
__ Word64Equal(lhs, rhs), 0);
7019 return __ Int64LessThan(lhs, rhs);
7021 return __ Int64LessThanOrEqual(lhs, rhs);
7023 return __ Uint64LessThan(lhs, rhs);
7025 return __ Uint64LessThanOrEqual(lhs, rhs);
7027 return __ Int64LessThan(rhs, lhs);
7029 return __ Int64LessThanOrEqual(rhs, lhs);
7031 return __ Uint64LessThan(rhs, lhs);
7033 return __ Uint64LessThanOrEqual(rhs, lhs);
7034 case kExprF32CopySign: {
7036 __ Word32BitwiseAnd(
__ BitcastFloat32ToWord32(lhs), 0x7fffffff);
7038 __ Word32BitwiseAnd(
__ BitcastFloat32ToWord32(rhs), 0x80000000);
7039 return __ BitcastWord32ToFloat32(
7040 __ Word32BitwiseOr(lhs_without_sign, rhs_sign));
7043 return __ Float32Add(lhs, rhs);
7045 return __ Float32Sub(lhs, rhs);
7047 return __ Float32Mul(lhs, rhs);
7049 return __ Float32Div(lhs, rhs);
7051 return __ Float32Equal(lhs, rhs);
7053 return __ Word32Equal(
__ Float32Equal(lhs, rhs), 0);
7055 return __ Float32LessThan(lhs, rhs);
7057 return __ Float32LessThanOrEqual(lhs, rhs);
7059 return __ Float32LessThan(rhs, lhs);
7061 return __ Float32LessThanOrEqual(rhs, lhs);
7063 return __ Float32Min(rhs, lhs);
7065 return __ Float32Max(rhs, lhs);
7066 case kExprF64CopySign: {
7068 __ BitcastFloat64ToWord64(lhs), 0x7fffffffffffffff);
7069 V<Word64> rhs_sign =
__ Word64BitwiseAnd(
__ BitcastFloat64ToWord64(rhs),
7070 0x8000000000000000);
7071 return __ BitcastWord64ToFloat64(
7072 __ Word64BitwiseOr(lhs_without_sign, rhs_sign));
7075 return __ Float64Add(lhs, rhs);
7077 return __ Float64Sub(lhs, rhs);
7079 return __ Float64Mul(lhs, rhs);
7081 return __ Float64Div(lhs, rhs);
7083 return __ Float64Equal(lhs, rhs);
7085 return __ Word32Equal(
__ Float64Equal(lhs, rhs), 0);
7087 return __ Float64LessThan(lhs, rhs);
7089 return __ Float64LessThanOrEqual(lhs, rhs);
7091 return __ Float64LessThan(rhs, lhs);
7093 return __ Float64LessThanOrEqual(rhs, lhs);
7095 return __ Float64Min(lhs, rhs);
7097 return __ Float64Max(lhs, rhs);
7099 return __ Float64Power(lhs, rhs);
7101 return __ Float64Atan2(lhs, rhs);
7104 lhs, rhs, ExternalReference::f64_mod_wrapper_function(),
7107 return __ TaggedEqual(lhs, rhs);
7108 case kExprI32AsmjsDivS: {
7110 if (SupportedOperations::int32_div_is_safe()) {
7111 return __ Int32Div(lhs, rhs);
7115 GOTO(done,
__ Word32Constant(0));
7118 GOTO(done,
__ Word32Sub(0, lhs));
7120 GOTO(done,
__ Int32Div(lhs, rhs));
7126 case kExprI32AsmjsDivU: {
7128 if (SupportedOperations::uint32_div_is_safe()) {
7129 return __ Uint32Div(lhs, rhs);
7133 GOTO(done,
__ Word32Constant(0));
7135 GOTO(done,
__ Uint32Div(lhs, rhs));
7140 case kExprI32AsmjsRemS: {
7159 IF (
__ Int32LessThan(0, rhs)) {
7161 IF (
__ Word32Equal(
__ Word32BitwiseAnd(rhs,
mask), 0)) {
7165 GOTO(done,
__ Word32Sub(0, combined));
7170 GOTO(done,
__ Int32Mod(lhs, rhs));
7173 IF (
__ Int32LessThan(rhs, -1)) {
7174 GOTO(done,
__ Int32Mod(lhs, rhs));
7176 GOTO(done,
__ Word32Constant(0));
7182 case kExprI32AsmjsRemU: {
7186 GOTO(done,
__ Word32Constant(0));
7188 GOTO(done,
__ Uint32Mod(lhs, rhs));
7193 case kExprI32AsmjsStoreMem8:
7196 case kExprI32AsmjsStoreMem16:
7199 case kExprI32AsmjsStoreMem:
7202 case kExprF32AsmjsStoreMem:
7205 case kExprF64AsmjsStoreMem:
7220 memory->max_memory_size));
7226 if (!memory->is_memory64()) {
7231 converted_index =
__ ChangeUint32ToUintPtr(index);
7237 const uintptr_t align_mask = repr.
SizeInBytes() - 1;
7240 if (
static_cast<bool>(alignment_check) && align_mask != 0) {
7250 V<Word32> cond =
__ TruncateWordPtrToWord32(
__ WordPtrBitwiseAnd(
7251 effective_offset,
__ IntPtrConstant(align_mask)));
7252 __ TrapIfNot(
__ Word32Equal(cond,
__ Word32Constant(0)),
7253 TrapId::kTrapUnalignedAccess);
7267 __ TruncateWord64ToWord32(
__ Word64ShiftRightLogical(index, 32));
7268 __ TrapIf(high_word, TrapId::kTrapMemOutOfBounds);
7272 DCHECK_LT(end_offset, memory->max_memory_size);
7275 if (end_offset <= memory->min_memory_size && index.valid() &&
7279 uintptr_t constant_index = memory->is_memory64()
7280 ? constant_index_op.
word64()
7281 : constant_index_op.
word32();
7282 if (constant_index < memory->min_memory_size - end_offset) {
7287#if V8_TRAP_HANDLER_SUPPORTED
7289 enforce_bounds_check ==
7291 if (memory->is_memory64()) {
7299 __ TrapIfNot(cond, TrapId::kTrapMemOutOfBounds);
7308 if (end_offset > memory->min_memory_size) {
7312 __ UintPtrLessThan(
__ UintPtrConstant(end_offset), memory_size),
7313 TrapId::kTrapMemOutOfBounds);
7318 V<WordPtr> effective_size =
__ WordPtrSub(memory_size, end_offset);
7319 __ TrapIfNot(
__ UintPtrLessThan(converted_index, effective_size),
7320 TrapId::kTrapMemOutOfBounds);
7332 MemoryBasesAndSizes,
7342 if (
offset == 0)
return mem_start;
7343 return __ WordPtrAdd(mem_start,
offset);
7354 MemoryBasesAndSizes,
7391 __ Store(info,
__ Word32Constant(is_store ? 1 : 0),
7400 __ NoContextConstant());
7412 decoder->
module_->functions[function_index].sig_index;
7413 bool shared = decoder->
module_->type(sig_index).is_shared;
7424 bool needs_type_or_null_check =
true) {
7446 bool needs_dynamic_size =
7447 !table->has_maximum_size || table->maximum_size != table->initial_size;
7448 if (needs_dynamic_size) {
7449 table_length =
__ LoadField<Word32>(
7450 dispatch_table, AccessBuilder::ForWasmDispatchTableLength());
7452 table_length =
__ Word32Constant(table->initial_size);
7455 index_wordptr,
__ ChangeUint32ToUintPtr(table_length));
7456 __ TrapIfNot(in_bounds, TrapId::kTrapTableOutOfBounds);
7461 bool needs_type_check =
7462 needs_type_or_null_check &&
7466 bool needs_null_check =
7467 needs_type_or_null_check && table->type.is_nullable();
7469 V<WordPtr> dispatch_table_entry_offset =
__ WordPtrAdd(
7473 if (needs_type_check) {
7476 __ RelocatableWasmCanonicalSignatureId(sig_id.
index);
7479 __ Load(dispatch_table, dispatch_table_entry_offset,
7482 V<Word32> sigs_match =
__ Word32Equal(expected_canonical_sig, loaded_sig);
7483 if (!decoder->
module_->type(sig_index).is_final) {
7490 if (needs_null_check) {
7492 __ TrapIf(
__ Word32Equal(loaded_sig, -1),
7493 TrapId::kTrapFuncSigMismatch);
7495 bool shared = decoder->
module_->type(sig_index).is_shared;
7508 rtts,
__ ChangeInt32ToIntPtr(loaded_sig),
7518 Map::kConstructorOrBackPointerOrNativeContextOffset);
7522 if (
static_cast<uint32_t
>(rtt_depth) >=
7527 WasmTypeInfo::kSupertypesLengthOffset));
7528 __ TrapIfNot(
__ Uint32LessThan(rtt_depth, supertypes_length),
7534 WasmTypeInfo::kSupertypesOffset +
kTaggedSize * rtt_depth);
7535 __ TrapIfNot(
__ TaggedEqual(maybe_match, formal_rtt),
7541 __ TrapIfNot(sigs_match, TrapId::kTrapFuncSigMismatch);
7543 }
else if (needs_null_check) {
7545 __ Load(dispatch_table, dispatch_table_entry_offset,
7548 __ TrapIf(
__ Word32Equal(-1, loaded_sig), TrapId::kTrapFuncSigMismatch);
7557 dispatch_table, dispatch_table_entry_offset,
7561 return {
target, implicit_arg};
7569 if (type.is_nullable() &&
7572 __ AssertNotNull(func_ref, type, TrapId::kTrapNullDereference));
7583 func_ref, load_kind, kWasmInternalFunctionIndirectPointerTag,
7584 WasmFuncRef::kTrustedInternalOffset));
7590 return type.is_object_reference()
7608 arg_indices[0] = ref;
7609 for (uint32_t
i = 0;
i <
sig->parameter_count();
i++) {
7610 arg_indices[
i + 1] =
args[
i].op;
7615 check_for_exception,
OpEffects().CanCallAnything());
7617 if (
sig->return_count() == 1) {
7619 }
else if (
sig->return_count() > 1) {
7620 for (uint32_t
i = 0;
i <
sig->return_count();
i++) {
7643 arg_indices[0] = ref;
7644 for (uint32_t
i = 0;
i <
sig->parameter_count();
i++) {
7645 arg_indices[
i + 1] =
args[
i].op;
7649 if (
__ generating_unreachable_operations())
return;
7653 size_t return_count =
sig->return_count();
7660 for (
size_t i = 0;
i < return_count;
i++) {
7667 template <
typename Descriptor>
7672 requires(!Descriptor::kNeedsContext)
7677 __ RelocatableWasmBuiltinCallTarget(Descriptor::kFunction);
7678 auto arguments = std::apply(
7681 OpIndex, std::tuple_size_v<typename Descriptor::arguments_t> + 1>{
7682 std::forward<decltype(as)>(as)...};
7688 Descriptor::Create(StubCallMode::kCallWasmRuntimeStub,
7690 check_for_exception, Descriptor::kEffects);
7693 template <
typename Descriptor>
7697 const typename Descriptor::arguments_t&
args,
7699 requires Descriptor::kNeedsContext
7704 __ RelocatableWasmBuiltinCallTarget(Descriptor::kFunction);
7705 auto arguments = std::apply(
7706 [context](
auto&&... as) {
7708 OpIndex, std::tuple_size_v<typename Descriptor::arguments_t> + 1>{
7709 std::forward<decltype(as)>(as)..., context};
7715 Descriptor::Create(StubCallMode::kCallWasmRuntimeStub,
7717 check_for_exception, Descriptor::kEffects);
7720 template <
typename Descriptor>
7725 requires(!Descriptor::kNeedsContext)
7730 auto arguments = std::apply(
7733 OpIndex, std::tuple_size_v<typename Descriptor::arguments_t> + 1>{
7734 std::forward<decltype(as)>(as)...};
7742 check_for_exception, Descriptor::kEffects);
7767 bool handled_in_this_frame =
7778 if (handled_in_this_frame) {
7787 TSBlock* exception_block =
__ NewBlock();
7790 Assembler::CatchScope scope(
asm_, exception_block);
7793 __ Goto(success_block);
7796 __ Bind(exception_block);
7797 OpIndex exception =
__ CatchBlockBegin();
7798 if (handled_in_this_frame) {
7809 __ Goto(catch_block);
7811 __ Bind(success_block);
7826 return CallC(&
sig, ref, stack_slot_param);
7831 std::initializer_list<std::pair<OpIndex, MemoryRepresentation>>
args) {
7833 for (
auto arg :
args) slot_size += arg.second.SizeInBytes();
7836 V<WordPtr> stack_slot_param =
__ StackSlot(slot_size, 0);
7838 for (
auto arg :
args) {
7839 __ Store(stack_slot_param, arg.first,
7842 offset += arg.second.SizeInBytes();
7846 return CallC(&
sig, ref, stack_slot_param);
7851 std::initializer_list<std::pair<OpIndex, MemoryRepresentation>>
args) {
7853 for (
auto arg :
args) slot_size += arg.second.SizeInBytes();
7856 slot_size = std::max<int>(slot_size, res_type.
SizeInBytes());
7857 V<WordPtr> stack_slot_param =
__ StackSlot(slot_size, 0);
7859 for (
auto arg :
args) {
7860 __ Store(stack_slot_param, arg.first,
7863 offset += arg.second.SizeInBytes();
7867 CallC(&
sig, ref, stack_slot_param);
7886 {{arg0, arg_type}, {arg1, arg_type}});
7891 TrapId trap_reason) {
7899 if constexpr (
Is64()) {
7902 __ TrapIf(
__ TruncateWord64ToWord32(
7911 TrapId::kTrapMemOutOfBounds);
7917 TrapId::kTrapTableOutOfBounds);
7935 return __ TruncateWordPtrToWord32(
__ WordPtrShiftRightLogical(
7944 __ StoreFixedArrayElement(values_array, index, upper_half,
7947 __ StoreFixedArrayElement(values_array, index + 1, lower_half,
7955 __ LoadFixedArrayElement(exception_values_array, index))),
7958 __ LoadFixedArrayElement(exception_values_array, index + 1)));
7959 return __ Word32BitwiseOr(upper_half, lower_half);
7965 __ ChangeUint32ToUint64(
7970 return __ Word64BitwiseOr(upper_half, lower_half);
7978 {exception, LOAD_ROOT(wasm_exception_values_symbol)}));
7981 for (
Value& value : values) {
7982 switch (value.type.kind()) {
7994 value.op =
__ BitcastWord32ToFloat32(
7999 value.op =
__ BitcastWord64ToFloat64(
8005 value_s128 =
__ Simd128Splat(
8007 compiler::turboshaft::Simd128SplatOp::Kind::kI32x4);
8009 using Kind = compiler::turboshaft::Simd128ReplaceLaneOp::Kind;
8010 value_s128 =
__ Simd128ReplaceLane(
8015 value_s128 =
__ Simd128ReplaceLane(
8020 value.op =
__ Simd128ReplaceLane(
8029 value.op =
__ LoadFixedArrayElement(exception_values_array, index);
8057 V<WordPtr> index_ptr =
__ ChangeUint32ToUintPtr(index);
8073 V<WordPtr> index_ptr =
__ ChangeUint32ToUintPtr(index);
8085 __ SetVariable(
result,
__ Word32Constant(0));
8089 std::numeric_limits<float>::quiet_NaN()));
8093 std::numeric_limits<double>::quiet_NaN()));
8109 __ AssertNotNull(array, array_type, TrapId::kTrapNullDereference);
8115 __ TrapIfNot(
__ Uint32LessThan(index, length),
8116 TrapId::kTrapArrayOutOfBounds);
8126 V<Word32> array_length =
__ ArrayLength(array, null_check);
8127 V<Word32> range_end =
__ Word32Add(index, length);
8130 __ Uint32LessThanOrEqual(range_end, array_length),
8132 __ Uint32LessThanOrEqual(index, range_end));
8133 __ TrapIfNot(range_valid, TrapId::kTrapArrayOutOfBounds);
8140 Value* value_on_branch, uint32_t br_depth,
8142 OpIndex cast_succeeds =
__ WasmTypeCheck(
object.op, rtt, config);
8143 IF (cast_succeeds) {
8145 Forward(decoder,
object, value_on_branch);
8157 const Value&
object,
Value* value_on_fallthrough,
8159 OpIndex cast_succeeds =
__ WasmTypeCheck(
object.op, rtt, config);
8160 IF (
__ Word32Equal(cast_succeeds, 0)) {
8168 value_on_fallthrough->
op =
8176 bool shared = decoder->
module_->type(index).is_shared;
8178 V<WasmArray> array =
__ WasmAllocateArray(rtt, length, array_type);
8190 bool force_explicit_null_check =
8192 bool explicit_null_check =
8194 bool implicit_null_check =
8197 if (explicit_null_check) {
8199 TrapId::kTrapNullDereference);
8204 return __ Load(descriptor_struct, load_kind,
8206 WasmStruct::kHeaderSize);
8215 if (type.has_descriptor()) {
8222 if (type.is_descriptor()) {
8224 BuiltinCallDescriptor::WasmAllocateDescriptorStruct>(
8236 static_assert(Heap::kMinObjectSizeInTaggedWords == 2 &&
8238 "empty struct might require initialization of padding field");
8239 return struct_value;
8244 if (
__ generating_unreachable_operations())
return false;
8245 const Simd128ConstantOp* s128_op =
8246 __ output_graph().Get(op).TryCast<Simd128ConstantOp>();
8247 return s128_op && s128_op->IsZero();
8252 bool emit_write_barrier) {
8262 constexpr uint32_t kArrayNewMinimumSizeForMemSet = 16;
8264 length,
__ Word32Constant(kArrayNewMinimumSizeForMemSet))) {
8271 CallC(&
sig, ExternalReference::wasm_array_fill(),
8273 __ Word32Constant(emit_write_barrier ? 1 : 0),
8281 WHILE(
__ Uint32LessThan(current_index,
__ Word32Add(index, length))) {
8282 __ ArraySet(array, current_index, value, type->element_type());
8283 current_index =
__ Word32Add(current_index, 1);
8295 switch (type.kind()) {
8302 case wasm::kRefNull:
8308 value =
__ Word64Constant(uint64_t{0});
8326 type.is_reference());
8337 for (
size_t i = 0;
i <
sig->return_count(); ++
i) {
8341 for (
size_t i = 0;
i <
sig->parameter_count(); ++
i) {
8357 inlinee.
sig->parameter_count() + 1, decoder->
zone_);
8358 bool inlinee_is_shared = decoder->
module_->function_is_shared(func_index);
8360 for (
size_t i = 0;
i < inlinee.
sig->parameter_count();
i++) {
8361 inlinee_args[
i + 1] =
args[
i].op;
8369 function_bytes.
end(), inlinee_is_shared};
8385 decoder,
sig, callee,
8387 decoder->
module_->function_is_shared(func_index)),
8392 decoder->
module_->function_is_shared(func_index)),
8397 decoder->
module_->set_function_validated(func_index);
8403 TSBlock* callee_catch_block =
nullptr;
8410 callee_return_block =
nullptr;
8411 inlinee_return_phis =
nullptr;
8415 inlinee_mode =
mode_;
8428 callee_catch_block =
__ NewBlock();
8432 callee_return_block =
__ NewBlock();
8433 inlinee_return_phis = &fresh_return_phis;
8438 frame_state = is_tail_call
8451 callee_return_block, inlinee_return_phis,
8452 callee_catch_block, is_tail_call, frame_state);
8458 {
static_cast<int>(func_index), is_tail_call, call_position});
8459 inlinee_decoder.interface().set_inlining_id(
8461 inlinee_decoder.interface().set_parent_position(call_position);
8465 inlinee_decoder.interface().disable_deopts();
8469 inlinee_decoder.interface().set_inlining_decisions(
8475 inlinee_decoder.interface().set_no_liftoff_inlining_budget(
8478 inlinee_decoder.Decode();
8480 DCHECK(inlinee_decoder.ok());
8483 inlinee_return_phis !=
nullptr);
8488 __ Bind(callee_catch_block);
8491 bool handled_in_this_frame = decoder->
current_catch() != -1;
8493 if (handled_in_this_frame) {
8508 __ Goto(catch_block);
8511 if (!is_tail_call) {
8512 __ Bind(callee_return_block);
8514 size_t return_count = inlinee.
sig->return_count();
8515 for (
size_t i = 0;
i < return_count;
i++) {
8523 inlinee_decoder.interface().no_liftoff_inlining_budget());
8529#define TRAPREASON_TO_TRAPID(name) \
8530 case wasm::k##name: \
8531 static_assert(static_cast<int>(TrapId::k##name) == \
8532 static_cast<int>(Builtin::kThrowWasm##name), \
8533 "trap id mismatch"); \
8534 return TrapId::k##name;
8536#undef TRAPREASON_TO_TRAPID
8591 if (!
v8_flags.wasm_inlining)
return false;
8615 if (tree && tree->is_inlined()) {
8616 DCHECK(!decoder->
module_->function_is_shared(tree->function_index()));
8649 auto iter = feedback.deopt_count_for_function.find(
func_index_);
8650 if (iter != feedback.deopt_count_for_function.end() &&
8651 iter->second >=
v8_flags.wasm_deopts_per_function_limit) {
8653 if (
v8_flags.trace_wasm_inlining) {
8655 "[function %d%s: Disabling deoptimizations for speculative "
8656 "inlining as the deoptimization limit (%u) for this function "
8657 "is reached or exceeded (%zu)]\n",
8659 iter->second,
v8_flags.wasm_deopts_per_function_limit.value());
8669 return (element_is_shared && !
shared_)
8678 if (type_is_shared && !
shared_) {
8681 shared_instance, ManagedObjectMaps,
8745 std::unique_ptr<AssumptionsJournal>* assumptions,
8747 DCHECK(env->
module->function_was_validated(func_index));
8753 &zone, env, assembler, assumptions, inlining_positions,
8754 func_index, func_body.
is_shared, wire_bytes);
8760#undef LOAD_IMMUTABLE_INSTANCE_FIELD
8761#undef LOAD_INSTANCE_FIELD
#define GOTO_IF_NOT(cond, label,...)
#define GOTO_IF(cond, label,...)
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
#define SBXCHECK(condition)
static constexpr int kLastUsedBit
static constexpr T decode(U value)
static constexpr U encode(T value)
static constexpr int kShift
constexpr void Add(E element)
void emplace_back(Args &&... args)
constexpr bool empty() const
constexpr size_t size() const
constexpr T * begin() const
constexpr T * end() const
bool Contains(int i) const
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
static V8_INLINE constexpr int OffsetOfElementAt(int index)
static const int kInvalidContext
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr int OffsetOfElementAt(int index)
auto Returns(ReturnTypes... return_types) const
static constexpr int real_jslimit_offset()
static constexpr int exception_offset()
static constexpr int BuiltinSlotOffset(Builtin id)
static constexpr uint32_t thread_in_wasm_flag_address_offset()
static constexpr uint32_t central_stack_sp_offset()
static constexpr uint32_t context_offset()
static uint32_t error_message_param_offset()
static constexpr uint32_t central_stack_limit_offset()
constexpr bool IsSigned() const
static constexpr MachineType Pointer()
static constexpr MachineType Uint8()
constexpr MachineRepresentation representation() const
static constexpr MachineType Int32()
static constexpr MachineType AnyTagged()
static constexpr MachineType Uint64()
static constexpr MachineType Uint32()
static constexpr MachineType Uint16()
static constexpr MachineType Int16()
static constexpr MachineType Bool()
static constexpr MachineType Int64()
static constexpr MachineType TaggedPointer()
static constexpr MachineType UintPtr()
static constexpr MachineType Int8()
static constexpr int kHashNotComputedMask
static V8_EXPORT_PRIVATE const Function * FunctionForId(FunctionId id)
size_t return_count() const
T GetParam(size_t index) const
T GetReturn(size_t index=0) const
size_t parameter_count() const
static constexpr Tagged< Smi > FromInt(int value)
static constexpr int kMaxValue
static const int kNotInlined
static constexpr size_t kTargetBias
static constexpr size_t kSigBias
static constexpr size_t kImplicitArgBias
static constexpr size_t kEntrySize
static constexpr size_t kEntriesOffset
static constexpr int OffsetOf(int index)
static uint32_t GetEncodedSize(const wasm::WasmTagSig *tag)
T * AllocateArray(size_t length)
static FieldAccess ForJSArrayBufferViewBuffer()
static FieldAccess ForJSArrayBufferViewByteLength()
static FieldAccess ForJSArrayBufferViewByteOffset()
static FieldAccess ForHeapNumberValue()
static FieldAccess ForJSArrayBufferByteLength()
static FieldAccess ForStringLength()
static FieldAccess ForNameRawHashField()
static FieldAccess ForJSDataViewDataPointer()
static FieldAccess ForJSArrayBufferViewBitField()
static CallDescriptor * GetSimplifiedCDescriptor(Zone *zone, const MachineSignature *sig, CallDescriptor::Flags flags=CallDescriptor::kNoFlags, Operator::Properties properties=Operator::kNoThrow)
static CallDescriptor * GetStubCallDescriptor(Zone *zone, const CallInterfaceDescriptor &descriptor, int stack_parameter_count, CallDescriptor::Flags flags, Operator::Properties properties=Operator::kNoProperties, StubCallMode stub_mode=StubCallMode::kCallCodeObject)
static CallDescriptor * GetRuntimeCallDescriptor(Zone *zone, Runtime::FunctionId function, int js_parameter_count, Operator::Properties properties, CallDescriptor::Flags flags, LazyDeoptOnThrow lazy_deopt_on_throw=LazyDeoptOnThrow::kNo)
static OutputFrameStateCombine Ignore()
PipelineData * data() const
constant_type constant_value() const
const FrameStateData * AllocateFrameStateData(const FrameStateInfo &info, Zone *zone)
void AddParentFrameState(V< FrameState > parent)
base::Vector< const OpIndex > Inputs()
void AddInput(MachineType type, OpIndex input)
static constexpr MemoryRepresentation FromMachineRepresentation(MachineRepresentation rep)
RegisterRepresentation ToRegisterRepresentation() const
static MemoryRepresentation FromMachineType(MachineType type)
static constexpr MemoryRepresentation Int8()
static constexpr MemoryRepresentation UncompressedTaggedPointer()
static constexpr MemoryRepresentation Uint32()
static constexpr MemoryRepresentation TaggedSigned()
static constexpr MemoryRepresentation Int32()
static constexpr MemoryRepresentation Int64()
static constexpr MemoryRepresentation Uint16()
constexpr uint8_t SizeInBytes() const
static constexpr MemoryRepresentation TaggedPointer()
static constexpr MemoryRepresentation UintPtr()
static constexpr MemoryRepresentation Uint8()
static constexpr MemoryRepresentation Int16()
static constexpr MemoryRepresentation Uint64()
MachineType ToMachineType() const
static constexpr MemoryRepresentation Float32()
static constexpr MemoryRepresentation Float64()
static constexpr OpIndex Invalid()
constexpr bool valid() const
static constexpr OpIndex FromOffset(uint32_t offset)
bool MatchIntegralWord32Constant(V< Any > matched, uint32_t *constant) const
ZoneWithName< kCompilationZoneName > & compilation_zone()
static constexpr RegisterRepresentation Simd128()
static constexpr RegisterRepresentation Word32()
static constexpr RegisterRepresentation Float64()
static constexpr RegisterRepresentation WordPtr()
static constexpr RegisterRepresentation Float32()
static constexpr RegisterRepresentation Word64()
static constexpr RegisterRepresentation Tagged()
static bool IsUnalignedLoadSupported(MemoryRepresentation repr)
static V< T > Cast(V< U > index)
static constexpr WordRepresentation Word32()
static constexpr WordRepresentation Word64()
uint32_t cur_index() const
const uint8_t * pc() const
uint32_t V8_INLINE pc_offset(const uint8_t *pc) const
constexpr ModuleTypeIndex ref_index() const
base::Vector< CasesPerCallSite > function_calls()
static int NoLiftoffBudget(const WasmModule *module, uint32_t func_index)
base::Vector< bool > has_non_inlineable_targets()
static constexpr uint32_t kMaxInliningNestingDepth
uint32_t function_index()
static constexpr int kMaxInlinedCount
static InliningTree * CreateRoot(Zone *zone, const WasmModule *module, uint32_t function_index)
static constexpr uint32_t stack_switch_target_sp_offset()
static constexpr uint32_t stack_switch_source_fp_offset()
base::Vector< const OpIndex > phi_inputs(size_t phi_i) const
V8_NOINLINE V8_PRESERVE_MOST void GrowInputsVector()
void AddIncomingException(OpIndex exception)
uint32_t phi_count() const
void AllocatePhiInputs(Zone *zone)
base::Vector< const OpIndex > incoming_exceptions() const
V8_INLINE BlockPhis(FullDecoder *decoder, Merge< Value > *merge)
ValueType phi_type(size_t phi_i) const
void AddInputForPhi(size_t phi_i, OpIndex input)
void InitReturnPhis(base::Vector< const ValueType > return_types)
ZoneVector< OpIndex > incoming_exceptions_
bool TryAddTarget(uint32_t target, uint32_t index)
void AddDefault(uint32_t target)
std::optional< uint32_t > primary_target_
const TargetMap & other_targets() const
const CaseVector & primary_indices() const
std::optional< uint32_t > default_target_
uint32_t default_target() const
absl::btree_map< uint32_t, CaseVector > TargetMap
bool LowerToBranches(Decoder *decoder, const BranchTableImmediate &imm)
static constexpr int32_t kMaxTableCount
uint32_t primary_target() const
static constexpr int32_t kMaxComparesPerTarget
CaseVector primary_indices_
static constexpr uint32_t kMaxTargets
V< WasmTrustedInstanceData > trusted_instance_data()
void Initialize(V< WasmTrustedInstanceData > trusted_instance_data, const WasmModule *mod)
V< WordPtr > memory0_start()
V< FixedArray > managed_object_maps()
V< WordPtr > LoadMemStart()
void ReloadCachedMemory()
V< NativeContext > native_context()
InstanceCache(Assembler &assembler)
ScopedVar< WordPtr > mem_start_
V< WasmTrustedInstanceData > trusted_data_
V< FixedArray > managed_object_maps_
V< WordPtr > memory0_size()
V< NativeContext > native_context_
V< WordPtr > LoadMemSize()
ScopedVar< WordPtr > mem_size_
void Delegate(FullDecoder *decoder, uint32_t depth, Control *block)
BranchHintingMode branch_hinting_mode_
void SetDataViewOpForErrorMessage(DataViewOp op_type)
void RefGetDesc(FullDecoder *decoder, const Value &ref_val, Value *result)
V< Word32 > StringCodePointAt(FullDecoder *decoder, V< String > string, V< Word32 > offset)
void OnFirstError(FullDecoder *)
void ThrowRef(FullDecoder *decoder, Value *value)
void StoreMem(FullDecoder *decoder, StoreType type, const MemoryAccessImmediate &imm, const Value &index, const Value &value)
V< WordPtr > GetDataViewByteLength(FullDecoder *decoder, V< Object > dataview, V< WordPtr > offset, DataViewOp op_type)
void MemoryFill(FullDecoder *decoder, const MemoryIndexImmediate &imm, const Value &dst, const Value &value, const Value &size)
void LocalGet(FullDecoder *decoder, Value *result, const IndexImmediate &imm)
base::Vector< OpIndex > real_parameters_
void NextInstruction(FullDecoder *decoder, WasmOpcode)
void StringEncodeWtf16Array(FullDecoder *decoder, const Value &str, const Value &array, const Value &start, Value *result)
void BuildWasmMaybeReturnCall(FullDecoder *decoder, const FunctionSig *sig, V< CallTarget > callee, V< HeapObject > ref, const Value args[], compiler::WasmCallKind call_kind=compiler::kWasmFunction)
BranchHint GetBranchHint(FullDecoder *decoder)
compiler::ExactOrSubtype GetExactness(FullDecoder *decoder, HeapType target)
BranchHintingStresser branch_hinting_stresser_
WasmFullDecoder< ValidationTag, TurboshaftGraphBuildingInterface > FullDecoder
bool is_inlined_tail_call_
void BrOnCastDescFail(FullDecoder *decoder, HeapType target_type, const Value &object, const Value &descriptor, Value *value_on_fallthrough, uint32_t br_depth, bool null_succeeds)
V< WordPtr > MemStart(uint32_t index)
void FallThruTo(FullDecoder *decoder, Control *block)
void LoadMem(FullDecoder *decoder, LoadType type, const MemoryAccessImmediate &imm, const Value &index, Value *result)
void CatchAll(FullDecoder *decoder, Control *block)
void ThrowRef(FullDecoder *decoder, OpIndex exn)
void Deopt(FullDecoder *decoder, V< FrameState > frame_state)
void StructNewDefault(FullDecoder *decoder, const StructIndexImmediate &imm, const Value &descriptor, Value *result)
void ArrayNewSegment(FullDecoder *decoder, const ArrayIndexImmediate &array_imm, const IndexImmediate &segment_imm, const Value &offset, const Value &length, Value *result)
void I31GetU(FullDecoder *decoder, const Value &input, Value *result)
std::unique_ptr< InstanceCache > owned_instance_cache_
void ReturnCallRef(FullDecoder *decoder, const Value &func_ref, const FunctionSig *sig, const Value args[])
void DeoptIfNot(FullDecoder *decoder, OpIndex deopt_condition, V< FrameState > frame_state)
void CatchCase(FullDecoder *decoder, Control *block, const CatchCase &catch_case, base::Vector< Value > values)
TSBlock * return_catch_block_
V< FixedArray > managed_object_maps(bool type_is_shared)
OpIndex MaybePhi(base::Vector< const OpIndex > elements, ValueType type)
void TableGet(FullDecoder *decoder, const Value &index, Value *result, const TableIndexImmediate &imm)
void MemoryInit(FullDecoder *decoder, const MemoryInitImmediate &imm, const Value &dst, const Value &src, const Value &size)
compiler::turboshaft::detail::index_type_for_t< typename Descriptor::results_t > CallBuiltinThroughJumptable(FullDecoder *decoder, V< Context > context, const typename Descriptor::arguments_t &args, CheckForException check_for_exception=CheckForException::kNo)
bool should_inline(FullDecoder *decoder, int feedback_slot, int size)
void TableCopy(FullDecoder *decoder, const TableCopyImmediate &imm, const Value &dst_val, const Value &src_val, const Value &size_val)
void Loop(FullDecoder *decoder, Control *block)
void ElemDrop(FullDecoder *decoder, const IndexImmediate &imm)
ZoneAbslFlatHashMap< TSBlock *, BlockPhis > block_phis_
OpIndex AsmjsLoadMem(V< Word32 > index, MemoryRepresentation repr)
void BoundsCheckArray(V< WasmArrayNullable > array, V< Word32 > index, ValueType array_type)
void StringAsWtf16(FullDecoder *decoder, const Value &str, Value *result)
void I64Const(FullDecoder *decoder, Value *result, int64_t value)
static constexpr int kPositionFieldSize
InliningTree * inlining_decisions_
OpIndex DataViewGetter(FullDecoder *decoder, const Value args[], DataViewOp op_type)
void DataViewDetachedBufferCheck(FullDecoder *decoder, V< Object > dataview, DataViewOp op_type)
V< WordPtr > ChangeTaggedNumberToIntPtr(V< Object > tagged)
V< WordPtr > MemSize(uint32_t index)
const WireBytesStorage * wire_bytes_
void StackCheck(WasmStackCheckOp::Kind kind, FullDecoder *decoder)
V< Object > NullCheck(const Value &value, TrapId trap_id=TrapId::kTrapNullDereference)
void StringViewIterSlice(FullDecoder *decoder, const Value &view, const Value &codepoints, Value *result)
void StringViewIterNext(FullDecoder *decoder, const Value &view, Value *result)
void Else(FullDecoder *decoder, Control *if_block)
void TableFill(FullDecoder *decoder, const TableIndexImmediate &imm, const Value &start, const Value &value, const Value &count)
void StoreLane(FullDecoder *decoder, StoreType type, const MemoryAccessImmediate &imm, const Value &index, const Value &value, const uint8_t laneidx)
TrapId GetTrapIdForTrap(wasm::TrapReason reason)
void NopForTestingUnsupportedInLiftoff(FullDecoder *decoder)
static constexpr bool kUsesPoppedArgs
OpIndex BinOpImpl(WasmOpcode opcode, OpIndex lhs, OpIndex rhs)
V< Word32 > IsExternRefString(const Value value)
void LoadLane(FullDecoder *decoder, LoadType type, const Value &value, const Value &index, const MemoryAccessImmediate &imm, const uint8_t laneidx, Value *result)
ZoneVector< WasmInliningPosition > * inlining_positions_
void UnpackWasmException(FullDecoder *decoder, V< Object > exception, base::Vector< Value > values)
void Drop(FullDecoder *decoder)
void CallRef(FullDecoder *decoder, const Value &func_ref, const FunctionSig *sig, const Value args[], Value returns[])
void ArrayNewDefault(FullDecoder *decoder, const ArrayIndexImmediate &imm, const Value &length, Value *result)
void RefTest(FullDecoder *decoder, HeapType target, const Value &object, Value *result, bool null_succeeds)
void StringNewWtf16Array(FullDecoder *decoder, const Value &array, const Value &start, const Value &end, Value *result)
void ReturnCallIndirect(FullDecoder *decoder, const Value &index, const CallIndirectImmediate &imm, const Value args[])
V< HeapObject > StringNewWtf8ArrayImpl(FullDecoder *decoder, const unibrow::Utf8Variant variant, const Value &array, const Value &start, const Value &end, ValueType result_type)
void ArrayNewFixed(FullDecoder *decoder, const ArrayIndexImmediate &array_imm, const IndexImmediate &length_imm, const Value elements[], Value *result)
void StringNewWtf8Array(FullDecoder *decoder, const unibrow::Utf8Variant variant, const Value &array, const Value &start, const Value &end, Value *result)
OpIndex CallCStackSlotToStackSlot(OpIndex arg, ExternalReference ref, MemoryRepresentation arg_type, MemoryRepresentation res_type)
void AtomicFence(FullDecoder *decoder)
void LoadTransform(FullDecoder *decoder, LoadType type, LoadTransformationKind transform, const MemoryAccessImmediate &imm, const Value &index, Value *result)
void MemoryCopy(FullDecoder *decoder, const MemoryCopyImmediate &imm, const Value &dst, const Value &src, const Value &size)
void DataViewSetter(FullDecoder *decoder, const Value args[], DataViewOp op_type)
V< Word32 > StringEqImpl(FullDecoder *decoder, V< String > a, V< String > b, ValueType a_type, ValueType b_type)
void StringViewWtf16Slice(FullDecoder *decoder, const Value &view, const Value &start, const Value &end, Value *result)
void StringMeasureWtf8(FullDecoder *decoder, const unibrow::Utf8Variant variant, const Value &str, Value *result)
void AssertNullTypecheck(FullDecoder *decoder, const Value &obj, Value *result)
const BranchHintMap * branch_hints_
void AtomicNotify(FullDecoder *decoder, const MemoryAccessImmediate &imm, OpIndex index, OpIndex num_waiters_to_wake, Value *result)
void FinishFunction(FullDecoder *decoder)
V< Map > GetRttFromDescriptor(const Value &descriptor)
void F64Const(FullDecoder *decoder, Value *result, double value)
void AtomicOp(FullDecoder *decoder, WasmOpcode opcode, const Value args[], const size_t argc, const MemoryAccessImmediate &imm, Value *result)
void BrTable(FullDecoder *decoder, const BranchTableImmediate &imm, const Value &key)
V< Word32 > CallCStackSlotToInt32(ExternalReference ref, std::initializer_list< std::pair< OpIndex, MemoryRepresentation > > args)
void DataViewBoundsCheck(FullDecoder *decoder, V< WordPtr > left, V< WordPtr > right, DataViewOp op_type)
void DataDrop(FullDecoder *decoder, const IndexImmediate &imm)
void If(FullDecoder *decoder, const Value &cond, Control *if_block)
void ThrowDataViewOutOfBoundsError(FullDecoder *decoder, DataViewOp op_type)
V< WasmTableObject > LoadTable(FullDecoder *decoder, const TableIndexImmediate &imm)
bool HandleWellKnownImport(FullDecoder *decoder, const CallFunctionImmediate &imm, const Value args[], Value returns[])
OpIndex BuildDiv64Call(OpIndex lhs, OpIndex rhs, ExternalReference ccall_ref, wasm::TrapId trap_zero)
void StructGet(FullDecoder *decoder, const Value &struct_object, const FieldImmediate &field, bool is_signed, Value *result)
void LocalSet(FullDecoder *decoder, const Value &value, const IndexImmediate &imm)
V< WordPtr > TableAddressToUintPtrOrOOBTrap(AddressType address_type, V< Word > index)
V< WordPtr > MemoryAddressToUintPtrOrOOBTrap(AddressType address_type, V< Word > index)
void BrOnNonNull(FullDecoder *decoder, const Value &ref_object, Value *result, uint32_t depth, bool)
bool IsSimd128ZeroConstant(OpIndex op)
void ArraySet(FullDecoder *decoder, const Value &array_obj, const ArrayIndexImmediate &imm, const Value &index, const Value &value)
void RefCastDesc(FullDecoder *decoder, const Value &object, const Value &desc, Value *result)
void BrIf(FullDecoder *decoder, const Value &cond, uint32_t depth)
void StringViewIterRewind(FullDecoder *decoder, const Value &view, const Value &codepoints, Value *result)
void CallIndirect(FullDecoder *decoder, const Value &index, const CallIndirectImmediate &imm, const Value args[], Value returns[])
void StringFromCodePoint(FullDecoder *decoder, const Value &code_point, Value *result)
OptionalV< FrameState > parent_frame_state_
BlockPhis * return_phis()
OpIndex CallCStackSlotToInt32(OpIndex arg, ExternalReference ref, MemoryRepresentation arg_type)
V< WasmTrustedInstanceData > trusted_instance_data(bool element_is_shared)
void Simd8x16ShuffleOp(FullDecoder *decoder, const Simd128Immediate &imm, const Value &input0, const Value &input1, Value *result)
void BrOnNull(FullDecoder *decoder, const Value &ref_object, uint32_t depth, bool pass_null_along_branch, Value *result_on_fallthrough)
V< WordPtr > StoreInStackSlot(OpIndex value, wasm::ValueType type)
V< Any > DefaultValue(ValueType type)
void RefAsNonNull(FullDecoder *decoder, const Value &arg, Value *result)
V< WordPtr > GetDataViewByteLength(FullDecoder *decoder, V< Object > dataview, DataViewOp op_type)
void InlineWasmCall(FullDecoder *decoder, uint32_t func_index, const FunctionSig *sig, uint32_t feedback_case, bool is_tail_call, const Value args[], Value returns[])
V< WordPtr > GetDataViewDataPtr(FullDecoder *decoder, V< Object > dataview, V< WordPtr > offset, DataViewOp op_type)
void StringEncodeWtf8Array(FullDecoder *decoder, const unibrow::Utf8Variant variant, const Value &str, const Value &array, const Value &start, Value *result)
void Select(FullDecoder *decoder, const Value &cond, const Value &fval, const Value &tval, Value *result)
void RefCastAbstract(FullDecoder *decoder, const Value &object, HeapType type, Value *result, bool null_succeeds)
std::pair< V< Word32 >, V< ExposedTrustedObject > > BuildIndirectCallTargetAndImplicitArg(FullDecoder *decoder, V< WordPtr > index_wordptr, CallIndirectImmediate imm, bool needs_type_or_null_check=true)
void StringNewWtf8(FullDecoder *decoder, const MemoryIndexImmediate &imm, const unibrow::Utf8Variant variant, const Value &offset, const Value &size, Value *result)
compiler::turboshaft::detail::index_type_for_t< typename Descriptor::results_t > CallBuiltinByPointer(FullDecoder *decoder, const typename Descriptor::arguments_t &args, CheckForException check_for_exception=CheckForException::kNo)
void SimdOp(FullDecoder *decoder, WasmOpcode opcode, const Value *args, Value *result)
std::pair< V< WordPtr >, compiler::BoundsCheckResult > BoundsCheckMem(const wasm::WasmMemory *memory, MemoryRepresentation repr, OpIndex index, uintptr_t offset, compiler::EnforceBoundsCheck enforce_bounds_check, compiler::AlignmentCheck alignment_check)
uint32_t liftoff_frame_size_
void StringEq(FullDecoder *decoder, const Value &a, const Value &b, Value *result)
void BrOnCastFail(FullDecoder *decoder, HeapType target_type, const Value &object, Value *value_on_fallthrough, uint32_t br_depth, bool null_succeeds)
void CurrentMemoryPages(FullDecoder *decoder, const MemoryIndexImmediate &imm, Value *result)
void ArrayFill(FullDecoder *decoder, ArrayIndexImmediate &imm, const Value &array, const Value &index, const Value &value, const Value &length)
void set_no_liftoff_inlining_budget(int no_liftoff_inlining_budget)
void BindBlockAndGeneratePhis(FullDecoder *decoder, TSBlock *tsblock, Merge< Value > *merge, OpIndex *exception=nullptr)
void TableSize(FullDecoder *decoder, const TableIndexImmediate &imm, Value *result)
void StringEncodeWtf8(FullDecoder *decoder, const MemoryIndexImmediate &memory, const unibrow::Utf8Variant variant, const Value &str, const Value &offset, Value *result)
void ArrayGet(FullDecoder *decoder, const Value &array_obj, const ArrayIndexImmediate &imm, const Value &index, bool is_signed, Value *result)
void ArrayNew(FullDecoder *decoder, const ArrayIndexImmediate &imm, const Value &length, const Value &initial_value, Value *result)
V< Word64 > ExtractTruncationProjections(V< Tuple< Word64, Word32 > > truncated)
OpIndex StringMeasureWtf8Impl(FullDecoder *decoder, const unibrow::Utf8Variant variant, V< String > string)
void set_inlining_id(uint8_t inlining_id)
OpIndex BuildIntToFloatConversionInstruction(OpIndex input, ExternalReference ccall_ref, MemoryRepresentation input_representation, MemoryRepresentation result_representation)
void RefFunc(FullDecoder *decoder, uint32_t function_index, Value *result)
void DoReturn(FullDecoder *decoder, uint32_t drop_values)
OpIndex BuildChangeEndiannessStore(OpIndex node, MachineRepresentation mem_rep, wasm::ValueType wasmtype)
void DataViewRangeCheck(FullDecoder *decoder, V< WordPtr > left, V< WordPtr > right, DataViewOp op_type)
int no_liftoff_inlining_budget_
void Forward(FullDecoder *decoder, const Value &from, Value *to)
LoadOp::Kind GetMemoryAccessKind(MemoryRepresentation repr, compiler::BoundsCheckResult bounds_check_result)
void StringAsIter(FullDecoder *decoder, const Value &str, Value *result)
void ArrayLen(FullDecoder *decoder, const Value &array_obj, Value *result)
void CatchException(FullDecoder *decoder, const TagIndexImmediate &imm, Control *block, base::Vector< Value > values)
std::pair< V< Word32 >, V< HeapObject > > BuildImportedFunctionTargetAndImplicitArg(FullDecoder *decoder, uint32_t function_index)
compiler::turboshaft::detail::index_type_for_t< typename Descriptor::results_t > CallBuiltinThroughJumptable(FullDecoder *decoder, const typename Descriptor::arguments_t &args, CheckForException check_for_exception=CheckForException::kNo)
void BuildEncodeException32BitValue(V< FixedArray > values_array, uint32_t index, V< Word32 > value)
InstanceCache & instance_cache_
OpIndex CallCStackSlotToStackSlot(OpIndex arg, ExternalReference ref, MemoryRepresentation arg_type)
void TraceMemoryOperation(FullDecoder *decoder, bool is_store, MemoryRepresentation repr, V< WordPtr > index, uintptr_t offset)
uint32_t GetLiftoffFrameSize(const FullDecoder *decoder)
void PopControl(FullDecoder *decoder, Control *block)
void GlobalGet(FullDecoder *decoder, Value *result, const GlobalIndexImmediate &imm)
static constexpr int kPositionFieldShift
void TableGrow(FullDecoder *decoder, const TableIndexImmediate &imm, const Value &value, const Value &delta, Value *result)
void StartFunctionBody(FullDecoder *decoder, Control *block)
void set_parent_position(SourcePosition position)
void GlobalSet(FullDecoder *decoder, const Value &value, const GlobalIndexImmediate &imm)
static constexpr uint8_t kNoInliningId
V< Word32 > GetStringIndexOf(FullDecoder *decoder, V< String > string, V< String > search, V< Word32 > start)
bool IsExplicitStringCast(const Value value)
void ArrayInitSegment(FullDecoder *decoder, const ArrayIndexImmediate &array_imm, const IndexImmediate &segment_imm, const Value &array, const Value &array_index, const Value &segment_offset, const Value &length)
void StringConst(FullDecoder *decoder, const StringConstImmediate &imm, Value *result)
void Throw(FullDecoder *decoder, const TagIndexImmediate &imm, const Value arg_values[])
void AsmjsStoreMem(V< Word32 > index, OpIndex value, MemoryRepresentation repr)
void StringHash(FullDecoder *decoder, const Value &string, Value *result)
void StringViewWtf16Encode(FullDecoder *decoder, const MemoryIndexImmediate &imm, const Value &view, const Value &offset, const Value &pos, const Value &codeunits, Value *result)
void WellKnown_FastApi(FullDecoder *decoder, const CallFunctionImmediate &imm, const Value args[], Value returns[])
V< WordPtr > MemBuffer(uint32_t mem_index, uintptr_t offset)
TurboshaftGraphBuildingInterface(Zone *zone, CompilationEnv *env, Assembler &assembler, std::unique_ptr< AssumptionsJournal > *assumptions, ZoneVector< WasmInliningPosition > *inlining_positions, int func_index, bool shared, const WireBytesStorage *wire_bytes)
void BrOnCastDesc(FullDecoder *decoder, HeapType target_type, const Value &object, const Value &descriptor, Value *value_on_branch, uint32_t br_depth, bool null_succeeds)
void MemoryGrow(FullDecoder *decoder, const MemoryIndexImmediate &imm, const Value &value, Value *result)
void StringIsUSVSequence(FullDecoder *decoder, const Value &str, Value *result)
void RefTestAbstract(FullDecoder *decoder, const Value &object, HeapType type, Value *result, bool null_succeeds)
bool InlineTargetIsTypeCompatible(const WasmModule *module, const FunctionSig *sig, const FunctionSig *inlinee)
void StartFunction(FullDecoder *decoder)
V< Word64 > BuildDecodeException64BitValue(V< FixedArray > exception_values_array, int index)
void TraceInstruction(FullDecoder *decoder, uint32_t markid)
V< WordPtr > MemOrTableAddressToUintPtrOrOOBTrap(AddressType address_type, V< Word > index, TrapId trap_reason)
void StringMeasureWtf16(FullDecoder *decoder, const Value &str, Value *result)
void LocalTee(FullDecoder *decoder, const Value &value, Value *result, const IndexImmediate &imm)
SourcePosition OpIndexToSourcePosition(OpIndex index)
V< T > AnnotateAsString(V< T > value, wasm::ValueType type)
void ArrayCopy(FullDecoder *decoder, const Value &dst, const Value &dst_index, const Value &src, const Value &src_index, const ArrayIndexImmediate &src_imm, const Value &length)
void BuildWasmCall(FullDecoder *decoder, const FunctionSig *sig, V< CallTarget > callee, V< HeapObject > ref, const Value args[], Value returns[], compiler::WasmCallKind call_kind=compiler::WasmCallKind::kWasmFunction, CheckForException check_for_exception=CheckForException::kCatchInThisFrame)
static constexpr int kInliningIdFieldSize
OpIndex BuildCcallConvertFloatSat(OpIndex arg, MemoryRepresentation float_type, ExternalReference ccall_ref, bool is_signed)
void Try(FullDecoder *decoder, Control *block)
void StringEncodeWtf16(FullDecoder *decoder, const MemoryIndexImmediate &imm, const Value &str, const Value &offset, Value *result)
void UnOp(FullDecoder *decoder, WasmOpcode opcode, const Value &value, Value *result)
void set_inlining_decisions(InliningTree *inlining_decisions)
void RefNull(FullDecoder *decoder, ValueType type, Value *result)
void BrOnCastFailImpl(FullDecoder *decoder, V< Map > rtt, compiler::WasmTypeCheckConfig config, const Value &object, Value *value_on_fallthrough, uint32_t br_depth, bool null_succeeds)
void AssertNotNullTypecheck(FullDecoder *decoder, const Value &obj, Value *result)
void StructSet(FullDecoder *decoder, const Value &struct_object, const FieldImmediate &field, const Value &field_value)
std::pair< V< Word32 >, V< ExposedTrustedObject > > BuildFunctionReferenceTargetAndImplicitArg(V< WasmFuncRef > func_ref, ValueType type)
int no_liftoff_inlining_budget()
void MaybeSetPositionToParent(OpIndex call, CheckForException check_for_exception)
OpIndex CallCStackSlotToStackSlot(OpIndex arg0, OpIndex arg1, ExternalReference ref, MemoryRepresentation arg_type)
void F32Const(FullDecoder *decoder, Value *result, float value)
ZoneVector< OpIndex > ssa_env_
void RefI31(FullDecoder *decoder, const Value &input, Value *result)
OpIndex BuildChangeEndiannessLoad(OpIndex node, MachineType memtype, wasm::ValueType wasmtype)
OpIndex AnnotateResultIfReference(OpIndex result, wasm::ValueType type)
void CallDirect(FullDecoder *decoder, const CallFunctionImmediate &imm, const Value args[], Value returns[])
void AtomicWait(FullDecoder *decoder, WasmOpcode opcode, const MemoryAccessImmediate &imm, OpIndex index, OpIndex expected, V< Word64 > timeout, Value *result)
void StringViewWtf8Advance(FullDecoder *decoder, const Value &view, const Value &pos, const Value &bytes, Value *result)
TurboshaftGraphBuildingInterface(Zone *zone, CompilationEnv *env, Assembler &assembler, Mode mode, InstanceCache &instance_cache, std::unique_ptr< AssumptionsJournal > *assumptions, ZoneVector< WasmInliningPosition > *inlining_positions, int func_index, bool shared, const WireBytesStorage *wire_bytes, base::Vector< OpIndex > real_parameters, TSBlock *return_block, BlockPhis *return_phis, TSBlock *catch_block, bool is_inlined_tail_call, OptionalV< FrameState > parent_frame_state)
std::unique_ptr< AssumptionsJournal > * assumptions_
void BrOnCast(FullDecoder *decoder, HeapType target_type, const Value &object, Value *value_on_branch, uint32_t br_depth, bool null_succeeds)
void ThrowDataViewTypeError(FullDecoder *decoder, V< Object > dataview, DataViewOp op_type)
Decoder::NoValidationTag ValidationTag
V< FrameState > CreateFrameState(FullDecoder *decoder, const FunctionSig *callee_sig, const Value *func_ref_or_index, const Value args[])
void StringViewWtf8Encode(FullDecoder *decoder, const MemoryIndexImmediate &memory, const unibrow::Utf8Variant variant, const Value &view, const Value &addr, const Value &pos, const Value &bytes, Value *next_pos, Value *bytes_written)
OpIndex CallAndMaybeCatchException(FullDecoder *decoder, V< CallTarget > callee, base::Vector< const OpIndex > args, const TSCallDescriptor *descriptor, CheckForException check_for_exception, OpEffects effects)
void ThrowDataViewDetachedError(FullDecoder *decoder, DataViewOp op_type)
void StringCompare(FullDecoder *decoder, const Value &lhs, const Value &rhs, Value *result)
V< Word32 > BuildDecodeException32BitValue(V< FixedArray > exception_values_array, int index)
OpIndex BuildCcallConvertFloat(OpIndex arg, MemoryRepresentation float_type, ExternalReference ccall_ref)
OpIndex CallCStackSlotToStackSlot(ExternalReference ref, MemoryRepresentation res_type, std::initializer_list< std::pair< OpIndex, MemoryRepresentation > > args)
void StringViewWtf8Slice(FullDecoder *decoder, const Value &view, const Value &start, const Value &end, Value *result)
void Block(FullDecoder *decoder, Control *block)
TSBlock * NewBlockWithPhis(FullDecoder *decoder, Merge< Value > *merge)
void SetupControlFlowEdge(FullDecoder *decoder, TSBlock *block, uint32_t drop_values=0, V< Object > exception=OpIndex::Invalid(), Merge< Value > *stack_values=nullptr)
void I32Const(FullDecoder *decoder, Value *result, int32_t value)
compiler::NullCheckStrategy null_check_strategy_
void TableInit(FullDecoder *decoder, const TableInitImmediate &imm, const Value &dst_val, const Value &src_val, const Value &size_val)
void StringViewIterAdvance(FullDecoder *decoder, const Value &view, const Value &codepoints, Value *result)
void ArrayFillImpl(V< WasmArray > array, V< Word32 > index, V< Any > value, OpIndex length, const wasm::ArrayType *type, bool emit_write_barrier)
SourcePosition parent_position_
void SimdLaneOp(FullDecoder *decoder, WasmOpcode opcode, const SimdLaneImmediate &imm, base::Vector< const Value > inputs, Value *result)
V< Word32 > GetCodeUnitImpl(FullDecoder *decoder, V< String > string, V< Word32 > offset)
void StringConcat(FullDecoder *decoder, const Value &head, const Value &tail, Value *result)
void I31GetS(FullDecoder *decoder, const Value &input, Value *result)
V< Word32 > LoadStringLength(V< Object > string)
void BrOnCastAbstract(FullDecoder *decoder, const Value &object, HeapType type, Value *value_on_branch, uint32_t br_depth, bool null_succeeds)
void S128Const(FullDecoder *decoder, const Simd128Immediate &imm, Value *result)
void BrOnCastImpl(FullDecoder *decoder, V< Map > rtt, compiler::WasmTypeCheckConfig config, const Value &object, Value *value_on_branch, uint32_t br_depth, bool null_succeeds)
V< String > ExternRefToString(const Value value, bool null_succeeds=false)
void Rethrow(FullDecoder *decoder, Control *block)
void TableSet(FullDecoder *decoder, const Value &index, const Value &value, const TableIndexImmediate &imm)
OpIndex UnOpImpl(WasmOpcode opcode, OpIndex arg, ValueType input_type)
std::pair< OpIndex, V< Word32 > > BuildCCallForFloatConversion(OpIndex arg, MemoryRepresentation float_type, ExternalReference ccall_ref)
V< HeapObject > ArrayNewImpl(FullDecoder *decoder, ModuleTypeIndex index, const ArrayType *array_type, V< Word32 > length, V< Any > initial_value)
void StringAsWtf8(FullDecoder *decoder, const Value &str, Value *result)
V< WasmStruct > StructNewImpl(FullDecoder *decoder, const StructIndexImmediate &imm, const Value &descriptor, OpIndex args[])
void StructNew(FullDecoder *decoder, const StructIndexImmediate &imm, const Value &descriptor, const Value args[], Value *result)
V< WasmArray > BoundsCheckArrayWithLength(V< WasmArrayNullable > array, V< Word32 > index, V< Word32 > length, compiler::CheckForNull null_check)
void BrOnCastFailAbstract(FullDecoder *decoder, const Value &object, HeapType type, Value *value_on_fallthrough, uint32_t br_depth, bool null_succeeds)
void StringNewWtf16(FullDecoder *decoder, const MemoryIndexImmediate &imm, const Value &offset, const Value &size, Value *result)
void ReturnCall(FullDecoder *decoder, const CallFunctionImmediate &imm, const Value args[])
V< Word32 > ChangeSmiToUint32(V< Smi > value)
void BinOp(FullDecoder *decoder, WasmOpcode opcode, const Value &lhs, const Value &rhs, Value *result)
const CallOp * IsArrayNewSegment(V< Object > array)
void TryTable(FullDecoder *decoder, Control *block)
void RefCast(FullDecoder *decoder, const Value &object, Value *result)
OpIndex WasmPositionToOpIndex(WasmCodePosition position, int inlining_id)
V< Smi > ChangeUint31ToSmi(V< Word32 > value)
void BrOrRet(FullDecoder *decoder, uint32_t depth, uint32_t drop_values=0)
void StringViewWtf16GetCodeUnit(FullDecoder *decoder, const Value &view, const Value &pos, Value *result)
OpIndex StringEncodeWtf8ArrayImpl(FullDecoder *decoder, const unibrow::Utf8Variant variant, V< String > str, V< WasmArray > array, V< Word32 > start)
std::optional< bool > deopts_enabled_
void Trap(FullDecoder *decoder, TrapReason reason)
constexpr int value_kind_size() const
constexpr MachineType machine_type() const
constexpr ValueKind kind() const
constexpr MachineRepresentation machine_representation() const
constexpr Nullability nullability() const
constexpr bool is_nullable() const
constexpr uint32_t raw_bit_field() const
constexpr ValueType Unpacked() const
static constexpr ValueType Ref(ModuleTypeIndex index, bool shared, RefTypeKind kind)
static constexpr ValueType RefMaybeNull(ModuleTypeIndex index, Nullability nullable, bool shared, RefTypeKind kind)
WasmDetectedFeatures * detected_
const WasmModule * module_
base::Vector< ValueType > local_types() const
const WasmEnabledFeatures enabled_
uint32_t num_locals() const
static BitVector * AnalyzeLoopAssignment(WasmDecoder *decoder, const uint8_t *pc, uint32_t locals_count, Zone *zone, bool *loop_is_innermost=nullptr)
ValueType local_type(uint32_t index) const
WasmCodePosition position() const
bool HasCatchAll(Control *block) const
int32_t current_catch() const
Control * control_at(uint32_t depth)
uint32_t control_depth_of_current_catch() const
uint32_t pc_relative_offset() const
Value * stack_value(uint32_t depth) const
uint32_t stack_size() const
uint32_t control_depth() const
V< BigInt > BuildChangeInt64ToBigInt(V< Word64 > input, StubCallMode stub_mode)
V< WasmTrustedInstanceData > LoadTrustedDataFromInstanceObject(V< HeapObject > instance_object)
RegisterRepresentation RepresentationFor(ValueTypeBase type)
V< WordPtr > BuildSwitchToTheCentralStack(V< WordPtr > old_limit)
void BuildSetNewStackLimit(V< WordPtr > old_limit, V< WordPtr > new_limit)
std::pair< V< WordPtr >, V< WordPtr > > BuildSwitchToTheCentralStackIfNeeded()
OpIndex GetBuiltinPointerTarget(Builtin builtin)
V< WordPtr > GetTargetForBuiltinCall(Builtin builtin, StubCallMode stub_mode)
std::pair< V< Word32 >, V< HeapObject > > BuildImportedFunctionTargetAndImplicitArg(ConstOrV< Word32 > func_index, V< WasmTrustedInstanceData > trusted_instance_data)
std::pair< V< Word32 >, V< ExposedTrustedObject > > BuildFunctionTargetAndImplicitArg(V< WasmInternalFunction > internal_function)
compiler::turboshaft::OpIndex OpIndex
OpIndex CallRuntime(Zone *zone, Runtime::FunctionId f, std::initializer_list< const OpIndex > args, V< Context > context)
void BuildSwitchBackFromCentralStack(V< WordPtr > old_sp, V< WordPtr > old_limit)
void BuildModifyThreadInWasmFlagHelper(Zone *zone, OpIndex thread_in_wasm_flag_address, bool new_value)
OpIndex CallC(const MachineSignature *sig, ExternalReference ref, std::initializer_list< OpIndex > args)
void BuildModifyThreadInWasmFlag(Zone *zone, bool new_value)
WellKnownImport get(int index) const
#define COMPRESS_POINTERS_BOOL
#define FOREACH_WASM_TRAPREASON(V)
Tagged< NativeContext > native_context_
#define DATAVIEW_OP_LIST(V)
base::Vector< const DirectHandle< Object > > args
#define ELEMENTS_KIND_TO_ELEMENT_SIZE(Type, type, TYPE, ctype)
std::optional< TNode< JSArray > > a
ZoneVector< RpoNumber > & result
ZoneVector< Entry > entries
constexpr bool IsInBounds(T index, T length, T max)
constexpr Vector< T > VectorOf(T *start, size_t size)
typename IndexTypeFor< T >::type index_type_for_t
V8_INLINE const Operation & Get(const Graph &graph, OpIndex index)
UnionOf< String, Null > StringOrNull
uint64_t OperationStorageSlot
std::conditional_t< Is64(), Word64, Word32 > WordPtr
typename detail::TypeForBits< Bits >::float_type float_type
WordWithBits< 128 > Simd128
CallDescriptor * GetWasmCallDescriptor(Zone *zone, const Signature< T > *fsig, WasmCallKind call_kind, bool need_frame_state)
static constexpr int kCharWidthBailoutSentinel
@ kProtectedByTrapHandler
bool IsTrapHandlerEnabled()
const char * WellKnownImportName(WellKnownImport wki)
constexpr uint32_t kWasmPageSizeLog2
int GetSubtypingDepth(const WasmModule *module, ModuleTypeIndex type_index)
V8_EXPORT_PRIVATE void BuildTSGraph(compiler::turboshaft::PipelineData *data, AccountingAllocator *allocator, CompilationEnv *env, WasmDetectedFeatures *detected, Graph &graph, const FunctionBody &func_body, const WireBytesStorage *wire_bytes, std::unique_ptr< AssumptionsJournal > *assumptions, ZoneVector< WasmInliningPosition > *inlining_positions, int func_index)
constexpr int kWasmInstanceDataParameterIndex
uint32_t max_table_size()
constexpr uint32_t kMinimumSupertypeArraySize
constexpr IndependentHeapType kWasmRefNullExternString
constexpr size_t kV8MaxWasmFunctionSize
constexpr IndependentValueType kWasmF32
constexpr IndependentHeapType kWasmAnyRef
V8_NOINLINE bool EquivalentTypes(ValueType type1, ValueType type2, const WasmModule *module1, const WasmModule *module2)
bool is_asmjs_module(const WasmModule *module)
constexpr IndependentHeapType kWasmExternRef
constexpr IndependentValueType kWasmI32
constexpr IndependentHeapType kWasmRefI31
constexpr size_t kMaxMemory64Size
constexpr IndependentHeapType kWasmRefExternString
static constexpr bool kNotShared
DecodeResult ValidateFunctionBody(Zone *zone, WasmEnabledFeatures enabled, const WasmModule *module, WasmDetectedFeatures *detected, const FunctionBody &body)
WireBytesRef Get(const NameMap &map, uint32_t index)
V8_INLINE bool IsSubtypeOf(ValueType subtype, ValueType supertype, const WasmModule *sub_module, const WasmModule *super_module)
constexpr size_t kV8MaxWasmTableSize
constexpr bool kPartialOOBWritesAreNoops
constexpr IndependentValueType kWasmS128
constexpr IndependentValueType kWasmF64
constexpr IndependentValueType kWasmI64
constexpr IndependentHeapType kWasmRefString
bool TryCast(Tagged< From > value, Tagged< To > *out)
constexpr NullMaybeHandleType kNullMaybeHandle
constexpr int kTaggedSize
constexpr int kSimd128Size
constexpr int kBitsPerByte
bool Is(IndirectHandle< U > value)
void PrintF(const char *format,...)
const Address kWeakHeapObjectMask
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
kWasmInternalFunctionIndirectPointerTag kProtectedInstanceDataOffset sig
constexpr int kSystemPointerSize
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset kInstanceObjectOffset kMemoryObjectsOffset kTaggedGlobalsBufferOffset tables
constexpr int kTaggedSizeLog2
Handle< T > IndirectHandle
constexpr int kBitsPerInt
constexpr bool SmiValuesAre31Bits()
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset kInstanceObjectOffset kMemoryObjectsOffset kTaggedGlobalsBufferOffset kTablesOffset kProtectedDispatchTable0Offset kProtectedDispatchTableForImportsOffset func_refs
V8_EXPORT_PRIVATE FlagValues v8_flags
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation nullptr
V8_EXPORT_PRIVATE constexpr int ElementSizeInBytes(MachineRepresentation)
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset instance_object
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset IsNull(value)||IsJSProxy(value)||IsWasmObject(value)||(IsJSObject(value) &&(HeapLayout
bool is_signed(Condition cond)
template const char * string
!IsContextMap !IsContextMap native_context
static bool IsZero(const Operand &rt)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
V8_INLINE Local< Primitive > Null(Isolate *isolate)
i::Address Load(i::Address address)
const size_t segment_offset
#define DCHECK_LE(v1, v2)
#define DCHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define CHECK_NE(lhs, rhs)
#define DCHECK_GE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
#define V8_EXPORT_PRIVATE
static constexpr Kind TaggedBase()
constexpr Kind NotLoadEliminable()
static constexpr Kind RawAligned()
static constexpr Kind Protected()
constexpr Kind Immutable() const
static constexpr Kind TrapOnNull()
constexpr Kind Atomic() const
static constexpr Kind MaybeUnaligned(MemoryRepresentation rep)
static constexpr Kind RawUnaligned()
const underlying_operation_t< Op > * TryCast() const
underlying_operation_t< Op > & Cast()
RegisterRepresentation rep
static const TSCallDescriptor * Create(const CallDescriptor *descriptor, CanThrow can_throw, LazyDeoptOnThrow lazy_deopt_on_throw, Zone *graph_zone, const JSWasmCallParameters *js_wasm_call_parameters=nullptr)
static constexpr int bits
union v8::internal::wasm::CatchCase::MaybeTagIndex maybe_tag
BranchDepthImmediate br_imm
const WasmEnabledFeatures enabled_features
std::atomic< const MachineSignature * > * fast_api_signatures
const std::atomic< Address > * fast_api_targets
const WasmModule *const module
ControlBase(Zone *zone, ControlKind kind, uint32_t stack_depth, uint32_t init_stack_depth, const uint8_t *pc, Reachability reachability)
static constexpr uint32_t kUninitializedLiftoffFrameSize
Control(Args &&... args) V8_NOEXCEPT
TSBlock * false_or_loop_or_catch_block
Value(Args &&... args) V8_NOEXCEPT
ValueBase(const uint8_t *pc, ValueType type)
BoundsCheckStrategy bounds_checks
std::vector< WasmMemory > memories
#define OFFSET_OF_DATA_START(Type)
#define HANDLE_F16X8_INVERSE_COMPARISON(kind, ts_kind, extern_ref)
#define HANDLE_UNARY_OPTIONAL_OPCODE(kind, feature, external_ref)
#define HANDLE_SHIFT_OPCODE(kind)
#define HANDLE_TERNARY_MASK_OPCODE(kind)
#define HANDLE_BINARY_OPCODE(kind)
#define HANDLE_INVERSE_COMPARISON(wasm_kind, ts_kind)
#define HANDLE_TEST_OPCODE(kind)
#define HANDLE_SPLAT_OPCODE(kind)
#define CASE_LOAD(OPCODE, RESULT, INPUT)
#define HANDLE_F16X8_TERN_OPCODE(kind, extern_ref)
#define LOAD_OPERATION(V)
#define HANDLE_TERNARY_OTHER_OPCODE(kind)
#define STORE_OPERATION(V)
#define HANDLE_UNARY_NON_OPTIONAL_OPCODE(kind)
#define CASE_BINOP(OPCODE, BINOP, RESULT, INPUT)
#define HANDLE_F16X8_BIN_OPTIONAL_OPCODE(kind, extern_ref)
#define CASE_STORE(OPCODE, INPUT, OUTPUT)
TagIndexImmediate tag_imm
#define V8_STATIC_ROOTS_BOOL
#define V8_UNLIKELY(condition)
#define LOAD_IMMUTABLE_PROTECTED_INSTANCE_FIELD(instance, name, type)
#define LOAD_PROTECTED_INSTANCE_FIELD(instance, name, type)
#define LOAD_IMMUTABLE_INSTANCE_FIELD(instance, name, representation)
#define TRAPREASON_TO_TRAPID(name)