38#define __ ACCESS_MASM(masm)
46 Register exclusion3)
const {
48 RegList exclusions = {exclusion1, exclusion2, exclusion3};
61 Register exclusion2, Register exclusion3) {
64 RegList exclusions = {exclusion1, exclusion2, exclusion3};
79 Register exclusion2, Register exclusion3) {
87 RegList exclusions = {exclusion1, exclusion2, exclusion3};
125 ExternalReference reference, Register scratch) {
127 if (reference.IsIsolateFieldId()) {
130 if (
options().enable_root_relative_access) {
137 if (
options().isolate_independent_code) {
155 Move(scratch, reference);
173 int current_instr_code_object_relative_offset =
177 sub(dest, dest, Operand(current_instr_code_object_relative_offset));
190 Jump(
static_cast<intptr_t
>(target), rmode, cond);
200 if (
isolate()->builtins()->IsBuiltinHandle(code, &builtin)) {
206 Jump(
static_cast<intptr_t
>(code.address()), rmode, cond);
212 Move(scratch, reference);
224 bool check_constant_pool) {
249 mov(ip, Operand(target, rmode));
259 bool check_constant_pool) {
265 if (
isolate()->builtins()->IsBuiltinHandle(code, &builtin)) {
271 Call(code.address(), rmode, cond, mode);
285 add(target, target, Operand(IsolateData::builtin_entry_table_offset()));
312 switch (
options().builtin_call_jump_mode) {
325 if (
options().use_pc_relative_calls_and_jumps_for_mksnapshot) {
344 switch (
options().builtin_call_jump_mode) {
357 if (
options().use_pc_relative_calls_and_jumps_for_mksnapshot) {
371#ifdef V8_ENABLE_LEAPTIERING
373void MacroAssembler::LoadEntrypointFromJSDispatchTable(Register
destination,
374 Register dispatch_handle,
379 Move(scratch, ExternalReference::js_dispatch_table_address());
380 static_assert(kJSDispatchHandleShift == 0);
381 add(scratch, scratch,
389 Register code_object,
409 uint16_t argument_count) {
411#if V8_ENABLE_LEAPTIERING
416 LoadEntrypointFromJSDispatchTable(code, dispatch_handle, scratch);
424#if V8_ENABLE_LEAPTIERING
426 uint16_t argument_count) {
430 mov(dispatch_handle_reg,
438 static_assert(!JSDispatchTable::kSupportsCompaction);
439 LoadEntrypointFromJSDispatchTable(code, dispatch_handle_reg, scratch);
450#if V8_ENABLE_LEAPTIERING
455 LoadEntrypointFromJSDispatchTable(code, dispatch_handle, scratch);
463#ifdef V8_ENABLE_WEBASSEMBLY
465void MacroAssembler::ResolveWasmCodePointer(Register target) {
468 ExternalReference global_jump_table =
469 ExternalReference::wasm_code_pointer_table();
472 Move(scratch, global_jump_table);
473 static_assert(
sizeof(wasm::WasmCodePointerTableEntry) == 4);
477void MacroAssembler::CallWasmCodePointer(Register target,
479 ResolveWasmCodePointer(target);
502 add(lr,
pc, Operand(4));
524 uint32_t frame_alignment_mask = ~(
static_cast<uint32_t
>(frame_alignment) - 1);
525 and_(sp, sp, Operand(frame_alignment_mask));
550 mov(scratch, Operand(smi));
560 PushArrayOrder order) {
568 mov(counter, Operand(0));
573 add(counter, counter, Operand(1));
584 sub(counter, counter, Operand(1),
SetCC);
590 mov(dst, Operand(smi));
601 mov(dst, Operand(value));
606 if (reference.IsIsolateFieldId()) {
610 if (
options().isolate_independent_code) {
618 CHECK(!reference.IsIsolateFieldId());
619 mov(dst, Operand(reference));
635 vmov(dst, src, cond);
642 vmov(dst, src, cond);
658 }
else if (dst1 != src0) {
669 DCHECK(srcdst0 != srcdst1);
672 mov(scratch, srcdst0);
673 mov(srcdst0, srcdst1);
674 mov(srcdst1, scratch);
678 DCHECK(srcdst0 != srcdst1);
683 vswp(srcdst0, srcdst1);
686 DwVfpRegister scratch = temps.AcquireD();
687 vmov(scratch, srcdst0);
688 vmov(srcdst0, srcdst1);
689 vmov(srcdst1, scratch);
694 DCHECK(srcdst0 != srcdst1);
695 vswp(srcdst0, srcdst1);
701 CpuFeatureScope scope(
this, ARMv7);
702 mls(dst, src1, src2, srcA, cond);
714 if (!src2.IsRegister() && !src2.MustOutputRelocInfo(
this) &&
715 src2.immediate() == 0) {
717 }
else if (!(src2.InstructionsRequired(
this) == 1) &&
718 !src2.MustOutputRelocInfo(
this) &&
721 CpuFeatureScope scope(
this, ARMv7);
735 int mask = (1u << (width + lsb)) - 1u - ((1u << lsb) - 1u);
741 CpuFeatureScope scope(
this, ARMv7);
742 ubfx(dst, src1, lsb, width, cond);
750 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
752 int shift_up = 32 - lsb - width;
753 int shift_down = lsb + shift_up;
757 if (shift_down != 0) {
761 CpuFeatureScope scope(
this, ARMv7);
762 sbfx(dst, src1, lsb, width, cond);
770 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
773 CpuFeatureScope scope(
this, ARMv7);
774 Move(dst, src, cond);
775 bfc(dst, lsb, width, cond);
892#if V8_ENABLE_WEBASSEMBLY
893 if (mode == StubCallMode::kCallWasmRuntimeStub) {
906 Register
object, Operand
offset) {
914 if (dst_slot !=
object) {
916 Move(dst_object,
object);
924 if (!
offset.IsRegister() || (
offset.rm() != dst_object)) {
925 Move(dst_object, dst_slot);
935 add(dst_slot, dst_slot, dst_object);
936 sub(dst_object, dst_slot, dst_object);
953 Check(
eq, AbortReason::kWrongAddressOrValuePassedToRecordWrite);
956 if (
v8_flags.disable_write_barriers) {
994 if (marker_reg.is_valid()) {
995 if (marker_reg.code() > fp.code()) {
997 mov(fp, Operand(sp));
1000 stm(
db_w, sp, {marker_reg, fp, lr});
1011 DCHECK(!function_reg.is_valid() || function_reg.code() <
cp.
code());
1012 stm(
db_w, sp, {function_reg,
cp, fp, lr});
1020 const DwVfpRegister src,
1029 const SwVfpRegister src2,
1043 const DwVfpRegister src2,
1057 const SwVfpRegister src2,
1058 const Register fpscr_flags,
1061 vcmp(src1, src2, cond);
1062 vmrs(fpscr_flags, cond);
1067 const Register fpscr_flags,
1070 vcmp(src1, src2, cond);
1071 vmrs(fpscr_flags, cond);
1075 const DwVfpRegister src2,
1076 const Register fpscr_flags,
1079 vcmp(src1, src2, cond);
1080 vmrs(fpscr_flags, cond);
1085 const Register fpscr_flags,
1088 vcmp(src1, src2, cond);
1089 vmrs(fpscr_flags, cond);
1093 if (src.code() < 16) {
1095 vmov(dst, loc.high());
1102 if (dst.code() < 16) {
1104 vmov(loc.high(), src);
1111 if (src.code() < 16) {
1113 vmov(dst, loc.low());
1120 if (dst.code() < 16) {
1122 vmov(loc.low(), src);
1131 if (src_code & 0x1) {
1141 if (dst_code & 0x1) {
1149 if (src_code == dst_code)
return;
1160 int dst_offset = dst_code & 1;
1161 int src_offset = src_code & 1;
1164 DwVfpRegister scratch = temps.AcquireD();
1166 if (src_offset == dst_offset) {
1168 vdup(
Neon32, scratch, src_d_reg, src_offset);
1170 src_d_reg = scratch;
1171 src_offset = dst_offset ^ 1;
1174 if (dst_d_reg == src_d_reg) {
1180 if (dst_d_reg == src_d_reg) {
1192 LowDwVfpRegister d_scratch = temps.AcquireLowD();
1193 LowDwVfpRegister d_scratch2 = temps.AcquireLowD();
1194 int s_scratch_code = d_scratch.low().code();
1195 int s_scratch_code2 = d_scratch2.low().code();
1198 vmov(d_scratch, dst_d_reg);
1201 vmov(dst_d_reg, d_scratch);
1204 vmov(d_scratch, src_d_reg);
1210 vmov(d_scratch, src_d_reg);
1211 vmov(d_scratch2, dst_d_reg);
1214 vmov(dst_d_reg, d_scratch2);
1223 LowDwVfpRegister scratch = temps.AcquireLowD();
1225 int dst_s_code = scratch.low().code() + (dst_code & 1);
1238 LowDwVfpRegister scratch = temps.AcquireLowD();
1239 int src_s_code = scratch.low().code() + (src_code & 1);
1248 int byte = lane <<
size;
1251 int double_lane = double_byte >>
size;
1252 DwVfpRegister double_source =
1254 vmov(dt, dst, double_source, double_lane);
1260 int byte = lane <<
size;
1262 int double_lane = double_byte >>
size;
1263 vmov(dt, dst, src, double_lane);
1268 int s_code = src.code() * 4 + lane;
1275 vmov(dst, double_dst);
1282 int byte = lane <<
size;
1285 int double_lane = double_byte >>
size;
1286 DwVfpRegister double_dst =
1288 vmov(dt, double_dst, double_lane, src_lane);
1292 SwVfpRegister src_lane,
int lane) {
1294 int s_code = dst.code() * 4 + lane;
1299 DwVfpRegister src_lane,
int lane) {
1302 vmov(double_dst, src_lane);
1306 uint8_t lane, NeonMemOperand src) {
1311 vld1s(sz, dst_list, lane, src);
1316 uint8_t lane, NeonMemOperand dst) {
1321 vst1s(sz, src_list, lane, dst);
1326 Register src_low, Register src_high,
1331 Register scratch = temps.Acquire();
1336 b(
gt, &less_than_32);
1339 lsl(dst_high, src_low, Operand(scratch));
1340 mov(dst_low, Operand(0));
1342 bind(&less_than_32);
1344 lsl(dst_high, src_high, Operand(
shift));
1345 orr(dst_high, dst_high, Operand(src_low,
LSR, scratch));
1346 lsl(dst_low, src_low, Operand(
shift));
1351 Register src_low, Register src_high,
1357 Move(dst_high, src_high);
1358 Move(dst_low, src_low);
1359 }
else if (
shift == 32) {
1360 Move(dst_high, src_low);
1361 Move(dst_low, Operand(0));
1362 }
else if (
shift >= 32) {
1364 lsl(dst_high, src_low, Operand(
shift));
1365 mov(dst_low, Operand(0));
1367 lsl(dst_high, src_high, Operand(
shift));
1368 orr(dst_high, dst_high, Operand(src_low,
LSR, 32 -
shift));
1369 lsl(dst_low, src_low, Operand(
shift));
1374 Register src_low, Register src_high,
1379 Register scratch = temps.Acquire();
1384 b(
gt, &less_than_32);
1387 lsr(dst_low, src_high, Operand(scratch));
1388 mov(dst_high, Operand(0));
1390 bind(&less_than_32);
1393 lsr(dst_low, src_low, Operand(
shift));
1394 orr(dst_low, dst_low, Operand(src_high,
LSL, scratch));
1395 lsr(dst_high, src_high, Operand(
shift));
1400 Register src_low, Register src_high,
1406 mov(dst_low, src_high);
1407 mov(dst_high, Operand(0));
1408 }
else if (
shift > 32) {
1410 lsr(dst_low, src_high, Operand(
shift));
1411 mov(dst_high, Operand(0));
1412 }
else if (
shift == 0) {
1413 Move(dst_low, src_low);
1414 Move(dst_high, src_high);
1416 lsr(dst_low, src_low, Operand(
shift));
1417 orr(dst_low, dst_low, Operand(src_high,
LSL, 32 -
shift));
1418 lsr(dst_high, src_high, Operand(
shift));
1423 Register src_low, Register src_high,
1428 Register scratch = temps.Acquire();
1433 b(
gt, &less_than_32);
1436 asr(dst_low, src_high, Operand(scratch));
1437 asr(dst_high, src_high, Operand(31));
1439 bind(&less_than_32);
1441 lsr(dst_low, src_low, Operand(
shift));
1442 orr(dst_low, dst_low, Operand(src_high,
LSL, scratch));
1443 asr(dst_high, src_high, Operand(
shift));
1448 Register src_low, Register src_high,
1454 mov(dst_low, src_high);
1455 asr(dst_high, src_high, Operand(31));
1456 }
else if (
shift > 32) {
1458 asr(dst_low, src_high, Operand(
shift));
1459 asr(dst_high, src_high, Operand(31));
1460 }
else if (
shift == 0) {
1461 Move(dst_low, src_low);
1462 Move(dst_high, src_high);
1464 lsr(dst_low, src_low, Operand(
shift));
1465 orr(dst_low, dst_low, Operand(src_high,
LSL, 32 -
shift));
1466 asr(dst_high, src_high, Operand(
shift));
1473 Register scratch = temps.Acquire();
1492 bool load_constant_pool_pointer_reg) {
1498 scratch = temps.Acquire();
1502#if V8_ENABLE_WEBASSEMBLY
1528 DwVfpRegister scratch = temps.AcquireD();
1530 Label touch_next_page;
1532 bind(&touch_next_page);
1533 sub(sp, sp, Operand(kStackPageSize));
1536 sub(bytes_scratch, bytes_scratch, Operand(kStackPageSize));
1539 cmp(bytes_scratch, Operand(kStackPageSize));
1540 b(
gt, &touch_next_page);
1542 sub(sp, sp, bytes_scratch);
1549 DwVfpRegister scratch =
no_dreg;
1550 while (bytes > kStackPageSize) {
1552 scratch = temps.AcquireD();
1554 sub(sp, sp, Operand(kStackPageSize));
1556 bytes -= kStackPageSize;
1558 if (bytes == 0)
return;
1559 sub(sp, sp, Operand(bytes));
1566 DCHECK(frame_type == StackFrame::EXIT ||
1567 frame_type == StackFrame::BUILTIN_EXIT ||
1568 frame_type == StackFrame::API_ACCESSOR_EXIT ||
1569 frame_type == StackFrame::API_CALLBACK_EXIT);
1571 using ER = ExternalReference;
1587 ER c_entry_fp_address =
1588 ER::Create(IsolateAddressId::kCEntryFPAddress,
isolate());
1591 ER context_address = ER::Create(IsolateAddressId::kContextAddress,
isolate());
1617 return v8_flags.sim_stack_alignment;
1625 using ER = ExternalReference;
1628 ER context_address = ER::Create(IsolateAddressId::kContextAddress,
isolate());
1636 ER c_entry_fp_address =
1637 ER::Create(IsolateAddressId::kCEntryFPAddress,
isolate());
1642 mov(sp, Operand(fp));
1664 : IsolateData::jslimit_offset();
1670 Label* stack_overflow) {
1678 sub(scratch, sp, scratch);
1681 b(
le, stack_overflow);
1685 Register actual_parameter_count,
1688 Label regular_invoke;
1693 DCHECK_EQ(expected_parameter_count, r2);
1697 sub(expected_parameter_count, expected_parameter_count,
1698 actual_parameter_count,
SetCC);
1699 b(
le, ®ular_invoke);
1701 Label stack_overflow;
1709 Register num = r5, src = r6, dest = r9;
1715 mov(num, actual_parameter_count);
1726 LoadRoot(scratch, RootIndex::kUndefinedValue);
1731 sub(expected_parameter_count, expected_parameter_count, Operand(1),
SetCC);
1736 bind(&stack_overflow);
1744 bind(®ular_invoke);
1748 Register expected_parameter_count,
1749 Register actual_parameter_count) {
1756 SmiTag(expected_parameter_count);
1757 Push(expected_parameter_count);
1759 SmiTag(actual_parameter_count);
1760 Push(actual_parameter_count);
1774 Pop(actual_parameter_count);
1777 Pop(expected_parameter_count);
1778 SmiUntag(expected_parameter_count);
1782 Register expected_parameter_count,
1783 Register actual_parameter_count,
1792 Label debug_hook, continue_after_hook;
1794 ExternalReference debug_hook_active =
1795 ExternalReference::debug_hook_on_function_call_address(
isolate());
1796 Move(r4, debug_hook_active);
1798 cmp(r4, Operand(0));
1801 bind(&continue_after_hook);
1805 LoadRoot(r3, RootIndex::kUndefinedValue);
1808 InvokePrologue(expected_parameter_count, actual_parameter_count, type);
1812 constexpr int unused_argument_count = 0;
1827 actual_parameter_count);
1828 b(&continue_after_hook);
1836 Register fun, Register
new_target, Register actual_parameter_count,
1852 SharedFunctionInfo::kFormalParameterCountOffset));
1859 Register expected_parameter_count,
1860 Register actual_parameter_count,
1873 actual_parameter_count, type);
1895 Register scratch = temps.Acquire();
1908 const Register temp = type_reg ==
no_reg ? temps.Acquire() : type_reg;
1915 Register type_reg, Register scratch,
1926 cmp(type_reg, Operand(type));
1930 unsigned lower_limit,
unsigned higher_limit) {
1933 if (lower_limit != 0) {
1934 sub(scratch, value, Operand(lower_limit));
1935 cmp(scratch, Operand(higher_limit - lower_limit));
1937 cmp(value, Operand(higher_limit));
1947 CompareRange(type_reg, scratch, lower_limit, higher_limit);
1956 Register scratch = temps.Acquire();
1963 unsigned lower_limit,
1964 unsigned higher_limit,
1965 Label* on_in_range) {
1967 CompareRange(value, scratch, lower_limit, higher_limit);
1972 DwVfpRegister double_input,
1977 if (temps.CanAcquireVfp<SwVfpRegister>()) {
1978 single_scratch = temps.AcquireS();
1983 LowDwVfpRegister double_scratch =
1985 single_scratch = double_scratch.low();
1990 Register scratch = temps.Acquire();
1993 cmp(scratch, Operand(0x7FFFFFFE));
1999 DwVfpRegister double_input,
2011#if V8_ENABLE_WEBASSEMBLY
2012 if (stub_mode == StubCallMode::kCallWasmRuntimeStub) {
2031#ifndef V8_ENABLE_LEAPTIERING
2033void TailCallOptimizedCodeSlot(MacroAssembler* masm,
2034 Register optimized_code_entry,
2044 Label heal_optimized_code_slot;
2049 &heal_optimized_code_slot);
2052 __ ldr(optimized_code_entry,
2060 __ b(
ne, &heal_optimized_code_slot);
2073 __ bind(&heal_optimized_code_slot);
2081#ifdef V8_ENABLE_DEBUG_CODE
2085 Assert(
eq, AbortReason::kExpectedFeedbackCell);
2091 Assert(
eq, AbortReason::kExpectedFeedbackVector);
2097 Register optimized_code, Register closure) {
2099#ifdef V8_ENABLE_LEAPTIERING
2139#ifndef V8_ENABLE_LEAPTIERING
2144 Register flags, Register feedback_vector,
CodeKind current_code_kind) {
2152 if (current_code_kind != CodeKind::MAGLEV) {
2155 tst(flags, Operand(kFlagsMask));
2160 Register flags, Register feedback_vector,
CodeKind current_code_kind,
2161 Label* flags_need_processing) {
2165 flags_need_processing);
2169 Register flags, Register feedback_vector) {
2172 Label maybe_has_optimized_code, maybe_needs_logging;
2175 b(
eq, &maybe_needs_logging);
2178 bind(&maybe_needs_logging);
2179 tst(flags, Operand(FeedbackVector::LogNextExecutionBit::kMask));
2180 b(
eq, &maybe_has_optimized_code);
2183 bind(&maybe_has_optimized_code);
2185 ldr(optimized_code_entry,
2187 FeedbackVector::kMaybeOptimizedCodeOffset));
2188 TailCallOptimizedCodeSlot(
this, optimized_code_entry, r6);
2194 int num_arguments) {
2201 CHECK(f->nargs < 0 || f->nargs == num_arguments);
2207 mov(r0, Operand(num_arguments));
2209 bool switch_to_central_stack =
options().is_wasm;
2217 if (function->nargs >= 0) {
2222 mov(r0, Operand(function->nargs));
2228 bool builtin_exit_frame) {
2229#if defined(__thumb__)
2238 Label* target_if_cleared) {
2240 b(
eq, target_if_cleared);
2247 Register scratch2) {
2249 if (
v8_flags.native_code_counters && counter->Enabled()) {
2253 add(scratch1, scratch1, Operand(value));
2260 Register scratch2) {
2262 if (
v8_flags.native_code_counters && counter->Enabled()) {
2266 sub(scratch1, scratch1, Operand(value));
2271#ifdef V8_ENABLE_DEBUG_CODE
2305 Check(
eq, AbortReason::kOperandIsNotAMap);
2313 Check(
ne, AbortReason::kOperandIsASmiAndNotAConstructor);
2317 tst(
object, Operand(Map::Bits1::IsConstructorBit::kMask));
2319 Check(
ne, AbortReason::kOperandIsNotAConstructor);
2327 Check(
ne, AbortReason::kOperandIsASmiAndNotAFunction);
2331 LAST_JS_FUNCTION_TYPE);
2333 Check(
ls, AbortReason::kOperandIsNotAFunction);
2341 Check(
ne, AbortReason::kOperandIsASmiAndNotAFunction);
2348 Check(
ls, AbortReason::kOperandIsNotACallableFunction);
2356 Check(
ne, AbortReason::kOperandIsASmiAndNotABoundFunction);
2360 Check(
eq, AbortReason::kOperandIsNotABoundFunction);
2367 Check(
ne, AbortReason::kOperandIsASmiAndNotAGeneratorObject);
2377 FIRST_JS_GENERATOR_OBJECT_TYPE,
2378 LAST_JS_GENERATOR_OBJECT_TYPE);
2381 Check(
ls, AbortReason::kOperandIsNotAGeneratorObject);
2388 Label done_checking;
2391 b(
eq, &done_checking);
2394 Assert(
eq, AbortReason::kExpectedUndefinedOrCell);
2395 bind(&done_checking);
2433 Abort(abort_reason);
2466 Move(r1, ExternalReference::abort_with_reason());
2498 Register scratch, Label* fbv_undef) {
2508 cmp(scratch, Operand(FEEDBACK_VECTOR_TYPE));
2512 LoadRoot(dst, RootIndex::kUndefinedValue);
2527 dst, Map::kConstructorOrBackPointerOrNativeContextOffset));
2533 ExternalReference isolate_root = ExternalReference::isolate_root(
isolate());
2542 add(dst, src, Operand(src), s);
2566 b(
ne, not_smi_label);
2571 Move(scratch, ExternalReference::cpu_features());
2573 tst(scratch, Operand(1u << VFP32DREGS));
2613template <
typename T>
2615 Label* out_of_line) {
2621 CpuFeatureScope scope(
this, ARMv8);
2630 bool aliased_result_reg =
result == left ||
result == right;
2644template <
typename T>
2657template <
typename T>
2659 Label* out_of_line) {
2665 CpuFeatureScope scope(
this, ARMv8);
2674 bool aliased_result_reg =
result == left ||
result == right;
2703template <
typename T>
2713 SwVfpRegister right, Label* out_of_line) {
2718 SwVfpRegister right, Label* out_of_line) {
2723 DwVfpRegister right, Label* out_of_line) {
2728 DwVfpRegister right, Label* out_of_line) {
2733 SwVfpRegister right) {
2738 SwVfpRegister right) {
2743 DwVfpRegister right) {
2748 DwVfpRegister right) {
2753 int num_double_arguments) {
2754 int stack_passed_words = 0;
2759 stack_passed_words +=
2765 num_reg_arguments += 2 * num_double_arguments;
2771 return stack_passed_words;
2775 int num_double_arguments,
2779 int stack_passed_arguments =
2783 if (!scratch.is_valid()) scratch = temps.Acquire();
2790 }
else if (stack_passed_arguments > 0) {
2808 DwVfpRegister src2) {
2818 int num_reg_arguments,
2819 int num_double_arguments,
2821 Label* return_label) {
2823 Register scratch = temps.Acquire();
2824 Move(scratch, function);
2825 return CallCFunction(scratch, num_reg_arguments, num_double_arguments,
2826 set_isolate_data_slots, return_label);
2830 int num_double_arguments,
2832 Label* return_label) {
2842 int frame_alignment_mask = frame_alignment - 1;
2846 Label alignment_as_expected;
2847 tst(sp, Operand(frame_alignment_mask));
2848 b(
eq, &alignment_as_expected);
2852 bind(&alignment_as_expected);
2880 if (return_label)
bind(return_label);
2894 int stack_passed_arguments =
2902 return call_pc_offset;
2907 Label* return_label) {
2908 return CallCFunction(function, num_arguments, 0, set_isolate_data_slots,
2914 Label* return_label) {
2915 return CallCFunction(function, num_arguments, 0, set_isolate_data_slots,
2920 Label* condition_met) {
2923 Register scratch = temps.Acquire();
2929 b(
cc, condition_met);
2933 Register reg4, Register reg5,
2935 RegList regs = {reg1, reg2, reg3, reg4, reg5, reg6};
2938 for (
int i = 0;
i < config->num_allocatable_general_registers(); ++
i) {
2939 int code = config->GetAllocatableGeneralCode(
i);
2941 if (regs.has(candidate))
continue;
2964 Register scratch = temps.Acquire();
2971#ifdef V8_ENABLE_LEAPTIERING
2973 Assert(
kZero, AbortReason::kInvalidDeoptimizedCode);
3007 QwNeonRegister tmp1 = temps.AcquireQ();
3013 add(dst, dst, Operand(tmp,
LSL, 1));
3017 QwNeonRegister src2) {
3022 vand(dst, dst, scratch);
3026 QwNeonRegister src2) {
3032 vorn(dst, dst, tmp);
3036 QwNeonRegister src2) {
3043 QwNeonRegister src2) {
3053 QwNeonRegister tmp = temps.AcquireQ();
3065 add(dst, dst, Operand(1));
3079 veor(dst, src, tmp);
3084using AssemblerFunc = void (
Assembler::*)(DwVfpRegister, SwVfpRegister,
3088void F64x2ConvertLowHelper(
Assembler* assm, QwNeonRegister dst,
3089 QwNeonRegister src, AssemblerFunc convert_fn) {
3093 LowDwVfpRegister tmp = temps.AcquireLowD();
3094 assm->vmov(tmp, src_d);
3104 QwNeonRegister src) {
3109 QwNeonRegister src) {
3114 QwNeonRegister src) {
3119 int case_value_base, Label** labels,
3122 if (case_value_base != 0) {
3123 sub(value, value, Operand(case_value_base));
3126 cmp(value, Operand(num_labels));
3132 for (
int i = 0;
i < num_labels; ++
i) {
3139 Register code, Register scratch, Label* if_marked_for_deoptimization) {
3142 b(if_marked_for_deoptimization,
ne);
3146 Label* if_turbofanned) {
3149 b(if_turbofanned,
ne);
3154 Register feedback_vector,
3158 Label fallthrough, clear_slot;
3163 LoadWeakValue(scratch_and_result, scratch_and_result, &fallthrough);
3170 ldr(scratch_and_result,
3175 if (min_opt_level == CodeKind::TURBOFAN_JS) {
3191 Move(scratch_and_result, Operand(0));
3201 Register function_address,
3202 ExternalReference thunk_ref, Register thunk_arg,
3203 int slots_to_drop_on_return,
3208 using ER = ExternalReference;
3210 Isolate* isolate = masm->isolate();
3212 ER::handle_scope_next_address(isolate),
no_reg);
3214 ER::handle_scope_limit_address(isolate),
no_reg);
3216 ER::handle_scope_level_address(isolate),
no_reg);
3225 Register prev_next_address_reg = r4;
3234 scratch, scratch2, prev_next_address_reg, prev_limit_reg));
3239 scratch, scratch2, prev_next_address_reg, prev_limit_reg));
3241 scratch, scratch2, prev_next_address_reg, prev_limit_reg));
3244 "Allocate HandleScope in callee-save registers.");
3245 __ ldr(prev_next_address_reg, next_mem_op);
3246 __ ldr(prev_limit_reg, limit_mem_op);
3247 __ ldr(prev_level_reg, level_mem_op);
3248 __ add(scratch, prev_level_reg, Operand(1));
3249 __ str(scratch, level_mem_op);
3252 Label profiler_or_side_effects_check_enabled, done_api_call;
3253 if (with_profiling) {
3254 __ RecordComment(
"Check if profiler or side effects check is enabled");
3257 __ cmp(scratch, Operand(0));
3258 __ b(
ne, &profiler_or_side_effects_check_enabled);
3259#ifdef V8_RUNTIME_CALL_STATS
3261 __ Move(scratch, ER::address_of_runtime_stats_flag());
3263 __ cmp(scratch, Operand(0));
3264 __ b(
ne, &profiler_or_side_effects_check_enabled);
3272 Label propagate_exception;
3273 Label delete_allocated_handles;
3274 Label leave_exit_frame;
3277 __ ldr(return_value, return_value_operand);
3282 "No more valid handles (the result handle was the last one)."
3283 "Restore previous handle scope.");
3284 __ str(prev_next_address_reg, next_mem_op);
3286 __ ldr(scratch, level_mem_op);
3287 __ sub(scratch, scratch, Operand(1));
3288 __ cmp(scratch, prev_level_reg);
3289 __ Check(
eq, AbortReason::kUnexpectedLevelAfterReturnFromApiCall);
3291 __ str(prev_level_reg, level_mem_op);
3292 __ ldr(scratch, limit_mem_op);
3293 __ cmp(scratch, prev_limit_reg);
3294 __ b(
ne, &delete_allocated_handles);
3298 __ bind(&leave_exit_frame);
3300 Register argc_reg = prev_limit_reg;
3301 if (argc_operand !=
nullptr) {
3303 __ ldr(argc_reg, *argc_operand);
3309 "Check if the function scheduled an exception.");
3310 __ LoadRoot(scratch, RootIndex::kTheHoleValue);
3312 ER::exception_address(isolate),
no_reg));
3313 __ cmp(scratch, scratch2);
3314 __ b(
ne, &propagate_exception);
3318 AbortReason::kAPICallReturnedInvalidObject);
3320 if (argc_operand ==
nullptr) {
3332 if (with_profiling) {
3334 __ bind(&profiler_or_side_effects_check_enabled);
3336 if (thunk_arg.is_valid()) {
3338 IsolateFieldId::kApiCallbackThunkArgument);
3339 __ str(thunk_arg, thunk_arg_mem_op);
3341 __ Move(scratch, thunk_ref);
3343 __ b(&done_api_call);
3347 __ bind(&propagate_exception);
3351 masm,
"HandleScope limit has changed. Delete allocated extensions.");
3352 __ bind(&delete_allocated_handles);
3353 __ str(prev_limit_reg, limit_mem_op);
3355 Register saved_result = prev_limit_reg;
3356 __ mov(saved_result, return_value);
3360 __ mov(return_value, saved_result);
3361 __ jmp(&leave_exit_frame);
constexpr int kPageSizeBits
#define Assert(condition)
static int ActivationFrameAlignment()
friend class ConstantPoolUnavailableScope
void set_predictable_code_size(bool value)
friend class FrameAndConstantPoolScope
bool predictable_code_size() const
int AddCodeTarget(IndirectHandle< Code > target)
V8_INLINE void RecordComment(const char *comment, const SourceLocation &loc=SourceLocation::Current())
const AssemblerOptions & options() const
void rsb(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void vceq(QwNeonRegister dst, QwNeonRegister src1, QwNeonRegister src2)
void vmrs(const Register dst, const Condition cond=al)
void Move32BitImmediate(Register rd, const Operand &x, Condition cond=al)
void vqsub(NeonDataType dt, QwNeonRegister dst, QwNeonRegister src1, QwNeonRegister src2)
void vshr(NeonDataType dt, DwVfpRegister dst, DwVfpRegister src, int shift)
void and_(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void vneg(const DwVfpRegister dst, const DwVfpRegister src, const Condition cond=al)
ConstantPool::BlockScope BlockConstPoolScope
void vldm(BlockAddrMode am, Register base, DwVfpRegister first, DwVfpRegister last, Condition cond=al)
bool has_pending_constants() const
void mov_label_offset(Register dst, Label *label)
void mls(Register dst, Register src1, Register src2, Register srcA, Condition cond=al)
void b(int branch_offset, Condition cond=al, RelocInfo::Mode rmode=RelocInfo::NO_INFO)
void stm(BlockAddrMode am, Register base, RegList src, Condition cond=al)
void mul(Register dst, Register src1, Register src2, SBit s=LeaveCC, Condition cond=al)
void vminnm(const DwVfpRegister dst, const DwVfpRegister src1, const DwVfpRegister src2)
void add(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void CheckConstPool(bool force_emit, bool require_jump)
void sbfx(Register dst, Register src, int lsb, int width, Condition cond=al)
void vmvn(QwNeonRegister dst, QwNeonRegister src)
void bl(int branch_offset, Condition cond=al, RelocInfo::Mode rmode=RelocInfo::NO_INFO)
void vmov(const SwVfpRegister dst, Float32 imm)
void vcvt_f64_s32(const DwVfpRegister dst, const SwVfpRegister src, VFPConversionMode mode=kDefaultRoundToZero, const Condition cond=al)
void cmp(Register src1, const Operand &src2, Condition cond=al)
void BlockConstPoolFor(int instructions)
void vsli(NeonSize size, DwVfpRegister dst, DwVfpRegister src, int shift)
bool VfpRegisterIsAvailable(DwVfpRegister reg)
void lsr(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
friend class UseScratchRegisterScope
V8_INLINE void MaybeCheckConstPool()
void ldrsb(Register dst, const MemOperand &src, Condition cond=al)
void vcvt_f64_f32(const DwVfpRegister dst, const SwVfpRegister src, VFPConversionMode mode=kDefaultRoundToZero, const Condition cond=al)
void str(Register src, const MemOperand &dst, Condition cond=al)
void vstm(BlockAddrMode am, Register base, DwVfpRegister first, DwVfpRegister last, Condition cond=al)
void ldrh(Register dst, const MemOperand &src, Condition cond=al)
void shift(Operand dst, Immediate shift_amount, int subcode, int size)
void sub(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void vorn(QwNeonRegister dst, QwNeonRegister src1, QwNeonRegister src2)
void vld1(NeonSize size, const NeonListOperand &dst, const NeonMemOperand &src)
void vsri(NeonSize size, DwVfpRegister dst, DwVfpRegister src, int shift)
void blx(int branch_offset)
void veor(DwVfpRegister dst, DwVfpRegister src1, DwVfpRegister src2)
void lsl(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void ldrb(Register dst, const MemOperand &src, Condition cond=al)
void vand(QwNeonRegister dst, QwNeonRegister src1, QwNeonRegister src2)
void vpmax(NeonDataType dt, DwVfpRegister dst, DwVfpRegister src1, DwVfpRegister src2)
void vswp(DwVfpRegister dst, DwVfpRegister src)
void ubfx(Register dst, Register src, int lsb, int width, Condition cond=al)
void vst1s(NeonSize size, const NeonListOperand &src, uint8_t index, const NeonMemOperand &dst)
void vcmp(const DwVfpRegister src1, const DwVfpRegister src2, const Condition cond=al)
void vcvt_f64_u32(const DwVfpRegister dst, const SwVfpRegister src, VFPConversionMode mode=kDefaultRoundToZero, const Condition cond=al)
void vcvt_s32_f64(const SwVfpRegister dst, const DwVfpRegister src, VFPConversionMode mode=kDefaultRoundToZero, const Condition cond=al)
void vdup(NeonSize size, QwNeonRegister dst, Register src)
void vld1s(NeonSize size, const NeonListOperand &dst, uint8_t index, const NeonMemOperand &src)
void orr(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void ldm(BlockAddrMode am, Register base, RegList dst, Condition cond=al)
void vstr(const DwVfpRegister src, const Register base, int offset, const Condition cond=al)
void vsub(const DwVfpRegister dst, const DwVfpRegister src1, const DwVfpRegister src2, const Condition cond=al)
void tst(Register src1, const Operand &src2, Condition cond=al)
void ldr(Register dst, const MemOperand &src, Condition cond=al)
void bic(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void vadd(const DwVfpRegister dst, const DwVfpRegister src1, const DwVfpRegister src2, const Condition cond=al)
void bkpt(uint32_t imm16)
Assembler(const AssemblerOptions &, std::unique_ptr< AssemblerBuffer >={})
void bx(Register target, Condition cond=al)
void vst1(NeonSize size, const NeonListOperand &src, const NeonMemOperand &dst)
void vmaxnm(const DwVfpRegister dst, const DwVfpRegister src1, const DwVfpRegister src2)
void stop(Condition cond=al, int32_t code=kDefaultStopCode)
int SizeOfCodeGeneratedSince(Label *label)
void vldr(const DwVfpRegister dst, const Register base, int offset, const Condition cond=al)
void bfc(Register dst, int lsb, int width, Condition cond=al)
void asr(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void vrev64(NeonSize size, QwNeonRegister dst, QwNeonRegister src)
static constexpr Builtin RecordWrite(SaveFPRegsMode fp_mode)
static bool IsIsolateIndependentBuiltin(Tagged< Code > code)
V8_EXPORT_PRIVATE Handle< Code > code_handle(Builtin builtin)
static constexpr Builtin RuntimeCEntry(int result_size, bool switch_to_central_stack=false)
static constexpr Builtin EphemeronKeyBarrier(SaveFPRegsMode fp_mode)
static constexpr Builtin kLastTier0
static constexpr Builtin CEntry(int result_size, ArgvMode argv_mode, bool builtin_exit_frame=false, bool switch_to_central_stack=false)
static const int kIsTurbofannedBit
static const int kMarkedForDeoptimizationBit
static constexpr int kCallerFPOffset
static constexpr int kCallerPCOffset
static const int kInvalidContext
static V8_INLINE constexpr int SlotOffset(int index)
static bool IsSupported(CpuFeature f)
static V8_EXPORT_PRIVATE const int kEagerDeoptExitSize
static V8_EXPORT_PRIVATE const int kLazyDeoptExitSize
static constexpr int kSizeInBytes
static constexpr int kSPOffset
static constexpr int kCallerSPDisplacement
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr uint32_t kFlagsMaybeHasMaglevCode
static constexpr uint32_t kFlagsTieringStateIsAnyRequested
static constexpr uint32_t kFlagsLogNextExecution
static constexpr uint32_t kFlagsMaybeHasTurbofanCode
static constexpr int OffsetOfElementAt(int index)
static constexpr int kHeaderSize
static constexpr int kMapOffset
static constexpr int kPcLoadDelta
static constexpr int BuiltinEntrySlotOffset(Builtin id)
static constexpr int real_jslimit_offset()
static IsolateGroup * current()
Address BuiltinEntry(Builtin builtin)
bool root_array_available_
static bool IsAddressableThroughRootRegister(Isolate *isolate, const ExternalReference &reference)
V8_INLINE std::string CommentForOffHeapTrampoline(const char *prefix, Builtin builtin)
static int32_t RootRegisterOffsetForExternalReferenceTableEntry(Isolate *isolate, const ExternalReference &reference)
static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index)
Isolate * isolate() const
bool root_array_available() const
void IndirectLoadConstant(Register destination, Handle< HeapObject > object)
static intptr_t RootRegisterOffsetForExternalReference(Isolate *isolate, const ExternalReference &reference)
bool should_abort_hard() const
void IndirectLoadExternalReference(Register destination, ExternalReference reference)
void Abort(AbortReason msg)
void LoadStackLimit(Register destination, StackLimitKind kind)
void InitializeRootRegister()
void Call(Register target, Condition cond=al)
void CallJSFunction(Register function_object, uint16_t argument_count)
void CallDebugOnFunctionCall(Register fun, Register new_target, Register expected_parameter_count, Register actual_parameter_count)
void TestCodeIsMarkedForDeoptimization(Register code, Register scratch)
void AssertMap(Register object) NOOP_UNLESS_DEBUG_CODE
void JumpIfIsInRange(Register value, Register scratch, unsigned lower_limit, unsigned higher_limit, Label *on_in_range)
void Drop(int count, Condition cond=al)
void CompareInstanceType(Register map, Register type_reg, InstanceType type)
int CalculateStackPassedWords(int num_reg_arguments, int num_double_arguments)
void MovFromFloatResult(DwVfpRegister dst)
void mov(Register rd, Register rj)
void I64x2GtS(QwNeonRegister dst, QwNeonRegister src1, QwNeonRegister src2)
void SmiUntag(Register reg, SBit s=LeaveCC)
void AssertFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void AssertNotSmi(Register object, AbortReason reason=AbortReason::kOperandIsASmi) NOOP_UNLESS_DEBUG_CODE
void AssertGeneratorObject(Register object) NOOP_UNLESS_DEBUG_CODE
void LoadEntryFromBuiltin(Builtin builtin, Register destination)
void PushStandardFrame(Register function_reg)
void I64x2Eq(QwNeonRegister dst, QwNeonRegister src1, QwNeonRegister src2)
void I64x2AllTrue(Register dst, QwNeonRegister src)
void CompareRoot(Register obj, RootIndex index)
void MovFromFloatParameter(DwVfpRegister dst)
void LoadLane(NeonSize sz, NeonListOperand dst_list, uint8_t lane, NeonMemOperand src)
void CompareObjectType(Register heap_object, Register map, Register type_reg, InstanceType type)
void Move(Register dst, Tagged< Smi > smi)
void SmiTst(Register value)
void Assert(Condition cond, AbortReason reason) NOOP_UNLESS_DEBUG_CODE
void StoreReturnAddressAndCall(Register target)
void FloatMaxOutOfLineHelper(T result, T left, T right)
void I64x2Ne(QwNeonRegister dst, QwNeonRegister src1, QwNeonRegister src2)
void StackOverflowCheck(Register num_args, Register scratch, Label *stack_overflow)
void ReplaceLane(QwNeonRegister dst, QwNeonRegister src, Register src_lane, NeonDataType dt, int lane)
void AssertFeedbackVector(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void CallBuiltinByIndex(Register builtin_index, Register target)
void LoadRootRelative(Register destination, int32_t offset) final
void JumpIfSmi(Register value, Label *smi_label)
void SaveFPRegsToHeap(Register location, Register scratch)
void CallCodeObject(Register code_object)
void AssertUnreachable(AbortReason reason) NOOP_UNLESS_DEBUG_CODE
void RestoreFPRegs(Register location, Register scratch)
void LoadRootRegisterOffset(Register destination, intptr_t offset) final
void LoadCodeInstructionStart(Register destination, Register code_object, CodeEntrypointTag tag=kDefaultCodeEntrypointTag)
bool use_eabi_hardfloat()
void LsrPair(Register dst_low, Register dst_high, Register src_low, Register src_high, Register shift)
void LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(Register flags, Register feedback_vector, CodeKind current_code_kind, Label *flags_need_processing)
void LoadFeedbackVector(Register dst, Register closure, Register scratch, Label *fbv_undef)
void F64x2PromoteLowF32x4(QwNeonRegister dst, QwNeonRegister src)
void EmitIncrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void VFPCompareAndSetFlags(const SwVfpRegister src1, const SwVfpRegister src2, const Condition cond=al)
void RestoreFPRegsFromHeap(Register location, Register scratch)
void AsrPair(Register dst_low, Register dst_high, Register src_low, Register src_high, Register shift)
void InvokeFunctionCode(Register function, Register new_target, Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void BailoutIfDeoptimized()
int RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg) const
void CompareTaggedRoot(Register with, RootIndex index)
void InvokePrologue(Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void EnforceStackAlignment()
void SmiTag(Register reg, SBit s=LeaveCC)
void VmovLow(Register dst, DwVfpRegister src)
void FloatMin(SwVfpRegister result, SwVfpRegister left, SwVfpRegister right, Label *out_of_line)
void PushArray(Register array, Register size, Register scratch, PushArrayOrder order=PushArrayOrder::kNormal)
void EnterExitFrame(Register scratch, int stack_space, StackFrame::Type frame_type)
void MovToFloatResult(DwVfpRegister src)
void Bfc(Register dst, Register src, int lsb, int width, Condition cond=al)
void RecordWriteField(Register object, int offset, Register value, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, SmiCheck smi_check=SmiCheck::kInline)
MemOperand ExternalReferenceAsOperand(ExternalReference reference, Register scratch)
void Zero(const MemOperand &dest)
void MovToFloatParameter(DwVfpRegister src)
void AssertSmi(Register object, AbortReason reason=AbortReason::kOperandIsNotASmi) NOOP_UNLESS_DEBUG_CODE
void PushCommonFrame(Register marker_reg=no_reg)
void JumpIfEqual(Register x, int32_t y, Label *dest)
int LeaveFrame(StackFrame::Type type)
void GetLabelAddress(Register dst, Label *target)
void LslPair(Register dst_low, Register dst_high, Register src_low, Register src_high, Register shift)
int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg)
void LoadGlobalProxy(Register dst)
void And(Register dst, Register src1, const Operand &src2, Condition cond=al)
void FloatMinOutOfLine(SwVfpRegister result, SwVfpRegister left, SwVfpRegister right)
void JumpToExternalReference(const ExternalReference &builtin, bool builtin_exit_frame=false)
Operand ClearedValue() const
void InvokeFunctionWithNewTarget(Register function, Register new_target, Register actual_parameter_count, InvokeType type)
void I64x2GeS(QwNeonRegister dst, QwNeonRegister src1, QwNeonRegister src2)
void Jump(Register target, Condition cond=al)
void LoadRoot(Register destination, RootIndex index) final
void RecordWrite(Register object, Operand offset, Register value, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, SmiCheck smi_check=SmiCheck::kInline)
void StoreTaggedField(const Register &value, const MemOperand &dst_field_operand)
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg=false)
void InvokeFunction(Register function, Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void F64x2ConvertLowI32x4U(QwNeonRegister dst, QwNeonRegister src)
void LoadTaggedField(const Register &destination, const MemOperand &field_operand)
void CompareRange(Register value, Register scratch, unsigned lower_limit, unsigned higher_limit)
void JumpCodeObject(Register code_object, JumpMode jump_mode=JumpMode::kJump)
void JumpIfCodeIsTurbofanned(Register code, Register scratch, Label *if_turbofanned)
void FloatMinHelper(T result, T left, T right, Label *out_of_line)
void EmitDecrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
static int ActivationFrameAlignment()
void StoreLane(NeonSize sz, NeonListOperand src_list, uint8_t lane, NeonMemOperand dst)
void LoadFromConstantsTable(Register destination, int constant_index) final
MemOperand EntryFromBuiltinAsOperand(Builtin builtin)
void CompareObjectTypeRange(Register heap_object, Register map, Register type_reg, Register scratch, InstanceType lower_limit, InstanceType higher_limit)
void Sbfx(Register dst, Register src, int lsb, int width, Condition cond=al)
void ComputeCodeStartAddress(Register dst)
void MaybeSaveRegisters(RegList registers)
void CheckPageFlag(Register object, int mask, Condition cc, Label *condition_met)
void CheckFor32DRegs(Register scratch)
void JumpIfNotSmi(Register value, Label *not_smi_label)
void VFPCanonicalizeNaN(const DwVfpRegister dst, const DwVfpRegister src, const Condition cond=al)
int CallCFunction(ExternalReference function, int num_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_label=nullptr)
void MovePair(Register dst0, Register src0, Register dst1, Register src1)
void JumpJSFunction(Register function_object, JumpMode jump_mode=JumpMode::kJump)
void VmovHigh(Register dst, DwVfpRegister src)
void I64x2BitMask(Register dst, QwNeonRegister src)
void MoveObjectAndSlot(Register dst_object, Register dst_slot, Register object, Operand offset)
void AssertConstructor(Register object) NOOP_UNLESS_DEBUG_CODE
Condition LoadFeedbackVectorFlagsAndCheckIfNeedsProcessing(Register flags, Register feedback_vector, CodeKind current_code_kind)
void CallRuntime(const Runtime::Function *f, int num_arguments)
void LoadWeakValue(Register out, Register in, Label *target_if_cleared)
void CallBuiltin(Builtin builtin, Condition cond=al)
void ExtractLane(Register dst, QwNeonRegister src, NeonDataType dt, int lane)
void I64x2Abs(QwNeonRegister dst, QwNeonRegister src)
void F64x2ConvertLowI32x4S(QwNeonRegister dst, QwNeonRegister src)
void Ubfx(Register dst, Register src, int lsb, int width, Condition cond=al)
void GenerateTailCallToReturnedCode(Runtime::FunctionId function_id)
void TruncateDoubleToI(Isolate *isolate, Zone *zone, Register result, DwVfpRegister double_input, StubCallMode stub_mode)
void AssertJSAny(Register object, Register map_tmp, Register tmp, AbortReason abort_reason) NOOP_UNLESS_DEBUG_CODE
void CallEphemeronKeyBarrier(Register object, Operand offset, SaveFPRegsMode fp_mode)
void Check(Condition cond, AbortReason reason)
void AssertFeedbackCell(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void FloatMaxOutOfLine(SwVfpRegister result, SwVfpRegister left, SwVfpRegister right)
void CallForDeoptimization(Builtin target, int deopt_id, Label *exit, DeoptimizeKind kind, Label *ret, Label *jump_deoptimization_entry_label)
void FloatMaxHelper(T result, T left, T right, Label *out_of_line)
void DropArgumentsAndPushNewReceiver(Register argc, Register receiver)
void JumpIfCodeIsMarkedForDeoptimization(Register code, Register scratch, Label *if_marked_for_deoptimization)
void AllocateStackSpace(Register bytes)
void LoadEntryFromBuiltinIndex(Register builtin_index, Register target)
void FloatMinOutOfLineHelper(T result, T left, T right)
void VmovExtended(Register dst, int src_code)
void ReplaceClosureCodeWithOptimizedCode(Register optimized_code, Register closure)
void VFPCompareAndLoadFlags(const SwVfpRegister src1, const SwVfpRegister src2, const Register fpscr_flags, const Condition cond=al)
void SaveFPRegs(Register location, Register scratch)
void JumpIfLessThan(Register x, int32_t y, Label *dest)
void AssertBoundFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void CallRecordWriteStubSaveRegisters(Register object, Operand offset, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void Mls(Register dst, Register src1, Register src2, Register srcA, Condition cond=al)
void CompareInstanceTypeRange(Register map, Register type_reg, Register scratch, InstanceType lower_limit, InstanceType higher_limit)
void OptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, Register feedback_vector)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers=0, Register scratch=no_reg)
void LoadIsolateField(Register dst, IsolateFieldId id)
void TryInlineTruncateDoubleToI(Register result, DwVfpRegister input, Label *done)
void MaybeRestoreRegisters(RegList registers)
void CallRecordWriteStub(Register object, Register slot_address, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void TryLoadOptimizedOsrCode(Register scratch_and_result, CodeKind min_opt_level, Register feedback_vector, FeedbackSlot slot, Label *on_result, Label::Distance distance)
int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg)
void AssertUndefinedOrAllocationSite(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void StubPrologue(StackFrame::Type type)
void MovToFloatParameters(DwVfpRegister src1, DwVfpRegister src2)
void StoreRootRelative(int32_t offset, Register value) final
MemOperand ReceiverOperand()
void LoadMap(Register destination, Register object)
void TailCallRuntime(Runtime::FunctionId fid)
void Swap(Register srcdst0, Register srcdst1)
void LoadNativeContextSlot(Register dst, int index)
void FloatMax(SwVfpRegister result, SwVfpRegister left, SwVfpRegister right, Label *out_of_line)
void TailCallBuiltin(Builtin builtin, Condition cond=al)
void Switch(Register scratch, Register value, int case_value_base, Label **labels, int num_labels)
void DropArguments(Register count)
void AssertCallableFunction(Register object) NOOP_UNLESS_DEBUG_CODE
static constexpr MainThreadFlags kPointersToHereAreInterestingMask
static constexpr intptr_t FlagsOffset()
static constexpr MainThreadFlags kPointersFromHereAreInterestingMask
static V8_INLINE Operand Zero()
constexpr unsigned Count() const
static constexpr int8_t kNumRegisters
static constexpr LowDwVfpRegister from_code(int8_t code)
static constexpr SwVfpRegister no_reg()
constexpr int8_t code() const
static const RegisterConfiguration * Default()
static constexpr Register from_code(int code)
static constexpr bool IsCodeTarget(Mode mode)
static constexpr bool IsImmortalImmovable(RootIndex root_index)
static V8_EXPORT_PRIVATE const Function * FunctionForId(FunctionId id)
static constexpr Tagged< Smi > FromInt(int value)
static constexpr Tagged< Smi > FromIntptr(intptr_t value)
static constexpr Tagged< Smi > zero()
static constexpr int32_t TypeToMarker(Type type)
static bool IsJavaScript(Type t)
static const int kNextOffset
static constexpr int kContextOffset
static constexpr int kFixedFrameSizeFromFp
static constexpr Register ObjectRegister()
static constexpr RegList ComputeSavedRegisters(Register object, Register slot_address=no_reg)
static constexpr Register SlotAddressRegister()
static constexpr Builtin GetRecordWriteBuiltin(SaveFPRegsMode fp_mode)
#define ASM_CODE_COMMENT_STRING(asm,...)
#define ASM_CODE_COMMENT(asm)
#define V8_ENABLE_LEAPTIERING_BOOL
#define V8_ENABLE_SANDBOX_BOOL
DirectHandle< Object > new_target
ZoneVector< RpoNumber > & result
RegListBase< RegisterT > registers
InstructionOperand destination
constexpr bool IsPowerOfTwo(T value)
constexpr int WhichPowerOfTwo(T value)
uint32_t WasmInterpreterRuntime int64_t r0
constexpr Register no_reg
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
constexpr Register kRootRegister
constexpr NeonSize Neon32
@ kUnsignedGreaterThanEqual
constexpr int kPointerSizeLog2
constexpr BlockAddrMode ia_w
RegListBase< Register > RegList
constexpr bool CodeKindCanTierUp(CodeKind kind)
constexpr Register kJavaScriptCallTargetRegister
constexpr int kPointerSize
constexpr BlockAddrMode db_w
constexpr NeonSize Neon64
int NeonSz(NeonDataType dt)
const Address kWeakHeapObjectMask
constexpr DwVfpRegister no_dreg
constexpr Register kJavaScriptCallArgCountRegister
constexpr int kSystemPointerSizeLog2
base::StrongAlias< JSDispatchHandleAliasTag, uint32_t > JSDispatchHandle
static const int kRegisterPassedArguments
QwNeonRegister Simd128Register
static const int kDoubleRegisterPassedArguments
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
const RegList kCallerSaved
const char * GetAbortReason(AbortReason reason)
static constexpr int kMaxCParameters
constexpr uint32_t kZapValue
@ NEVER_INLINE_TARGET_ADDRESS
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
constexpr NeonDataType NeonS32
Tagged< ClearedWeakValue > ClearedValue(PtrComprCageBase cage_base)
constexpr LowDwVfpRegister kDoubleRegZero
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr Register kJavaScriptCallCodeStartRegister
constexpr int kJSDispatchTableEntrySizeLog2
constexpr NeonDataType NeonU32
constexpr AddrMode PostIndex
const intptr_t kSmiTagMask
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
constexpr NeonDataType NeonU64
constexpr uint8_t kInstrSize
constexpr NeonDataType NeonS64
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
constexpr VFPConversionMode kDefaultRoundToZero
constexpr int kDoubleSizeLog2
constexpr Register kCArgRegs[]
constexpr int kDoubleSize
const uint32_t kClearedWeakHeapObjectLower32
constexpr Register kJavaScriptCallNewTargetRegister
#define DCHECK_LE(v1, v2)
#define CHECK_LE(lhs, rhs)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
constexpr bool IsAligned(T value, U alignment)
#define OFFSET_OF_DATA_START(Type)