7#if V8_TARGET_ARCH_LOONG64 
   37#define __ ACCESS_MASM(masm) 
   42static inline bool IsZero(
const Operand& rk) {
 
   44    return rk.rm() == zero_reg;
 
   46    return rk.immediate() == 0;
 
   53                                                    Register exclusion3)
 const {
 
   56  RegList exclusions = {exclusion1, exclusion2, exclusion3};
 
   68                                    Register exclusion2, Register exclusion3) {
 
   72  RegList exclusions = {exclusion1, exclusion2, exclusion3};
 
   86                                   Register exclusion2, Register exclusion3) {
 
   94  RegList exclusions = {exclusion1, exclusion2, exclusion3};
 
  123  if (marker_reg.is_valid()) {
 
  134  if (function_reg.is_valid()) {
 
  141  Add_d(fp, sp, Operand(
offset));
 
  150                                      SmiCheck smi_check, SlotDescriptor slot) {
 
  172    Branch(&ok, 
eq, scratch, Operand(zero_reg));
 
  173    Abort(AbortReason::kUnalignedCellInWriteBarrier);
 
  185#ifdef V8_ENABLE_SANDBOX 
  186  srli_d(value, value, kSandboxedPointerShift);
 
  195#ifdef V8_ENABLE_SANDBOX 
  206#ifdef V8_ENABLE_SANDBOX 
  211  slli_d(scratch, scratch, kSandboxedPointerShift);
 
  212  St_d(scratch, dst_field_operand);
 
  221                                              Register isolate_root) {
 
  224#ifdef V8_ENABLE_SANDBOX 
  225  DCHECK(!tag_range.IsEmpty());
 
  228  Register external_table = temps.Acquire();
 
  229  if (isolate_root == 
no_reg) {
 
  235                  IsolateData::external_pointer_table_offset() +
 
  251  if (tag_range.Size() == 1) {
 
  256    SbxCheck(
eq, AbortReason::kExternalPointerTagMismatch, scratch,
 
  257             Operand(tag_range.first));
 
  272#ifdef V8_ENABLE_SANDBOX 
  281#ifdef V8_ENABLE_SANDBOX 
  291#ifdef V8_ENABLE_SANDBOX 
  305#ifdef V8_ENABLE_SANDBOX 
  309                    value, ExposedTrustedObject::kSelfIndirectPointerOffset));
 
  310  St_w(scratch, dst_field_operand);
 
  316#ifdef V8_ENABLE_SANDBOX 
  317void MacroAssembler::ResolveIndirectPointerHandle(Register 
destination,
 
  324    Label is_trusted_pointer_handle, done;
 
  330    bind(&is_trusted_pointer_handle);
 
  334  } 
else if (tag == kCodeIndirectPointerTag) {
 
  341void MacroAssembler::ResolveTrustedPointerHandle(Register 
destination,
 
  360void MacroAssembler::ResolveCodePointerHandle(Register 
destination,
 
  365  LoadCodePointerTableBase(table);
 
  375void MacroAssembler::LoadCodeEntrypointViaCodePointer(Register 
destination,
 
  382  LoadCodePointerTableBase(scratch);
 
  388    li(scratch, Operand(tag));
 
  393void MacroAssembler::LoadCodePointerTableBase(Register 
destination) {
 
  394#ifdef V8_COMPRESS_POINTERS_IN_MULTIPLE_CAGES 
  398       ExternalReference::code_pointer_table_base_address(
isolate()));
 
  409  li(
destination, ExternalReference::global_code_pointer_table_base_address());
 
  414#ifdef V8_ENABLE_LEAPTIERING 
  415void MacroAssembler::LoadEntrypointFromJSDispatchTable(Register 
destination,
 
  416                                                       Register dispatch_handle,
 
  422  li(scratch, ExternalReference::js_dispatch_table_address());
 
  423  srli_d(index, dispatch_handle, kJSDispatchHandleShift);
 
  429void MacroAssembler::LoadEntrypointFromJSDispatchTable(
 
  434  li(scratch, ExternalReference::js_dispatch_table_address());
 
  441  static_assert(!JSDispatchTable::kSupportsCompaction);
 
  442  int offset = JSDispatchTable::OffsetOfEntry(dispatch_handle) +
 
  443               JSDispatchEntry::kEntrypointOffset;
 
  447void MacroAssembler::LoadParameterCountFromJSDispatchTable(
 
  448    Register 
destination, Register dispatch_handle, Register scratch) {
 
  453  li(scratch, ExternalReference::js_dispatch_table_address());
 
  454  srli_d(index, dispatch_handle, kJSDispatchHandleShift);
 
  457  static_assert(JSDispatchEntry::kParameterCountMask == 0xffff);
 
  461void MacroAssembler::LoadEntrypointAndParameterCountFromJSDispatchTable(
 
  462    Register entrypoint, Register 
parameter_count, Register dispatch_handle,
 
  468  li(scratch, ExternalReference::js_dispatch_table_address());
 
  469  srli_d(index, dispatch_handle, kJSDispatchHandleShift);
 
  472  Ld_d(entrypoint, 
MemOperand(scratch, JSDispatchEntry::kEntrypointOffset));
 
  473  static_assert(JSDispatchEntry::kParameterCountMask == 0xffff);
 
  475        MemOperand(scratch, JSDispatchEntry::kCodeObjectOffset));
 
  482#ifdef V8_ENABLE_SANDBOX 
  560#if V8_ENABLE_WEBASSEMBLY 
  561  if (mode == StubCallMode::kCallWasmRuntimeStub) {
 
  574                                       Register 
object, Operand 
offset) {
 
  582  if (dst_slot != 
object) {
 
  583    Add_d(dst_slot, 
object, 
offset);
 
  584    mov(dst_object, 
object);
 
  592  if (
offset.IsImmediate() || (
offset.rm() != dst_object)) {
 
  593    mov(dst_object, dst_slot);
 
  594    Add_d(dst_slot, dst_slot, 
offset);
 
  603  Add_d(dst_slot, dst_slot, dst_object);
 
  604  Sub_d(dst_object, dst_slot, dst_object);
 
  615                                 SlotDescriptor slot) {
 
  621    Add_d(scratch, 
object, 
offset);
 
  622    if (slot.contains_indirect_pointer()) {
 
  624                               slot.indirect_pointer_tag());
 
  626      DCHECK(slot.contains_direct_pointer());
 
  629    Assert(
eq, AbortReason::kWrongAddressOrValuePassedToRecordWrite, scratch,
 
  633  if (
v8_flags.disable_write_barriers) {
 
  659  if (slot.contains_direct_pointer()) {
 
  661    Add_d(slot_address, 
object, 
offset);
 
  665    DCHECK(slot.contains_indirect_pointer());
 
  667                               slot.indirect_pointer_tag());
 
  679void MacroAssembler::Add_w(Register rd, Register rj, 
const Operand& rk) {
 
  681    add_w(rd, rj, rk.rm());
 
  683    if (is_int12(rk.immediate()) && !
MustUseReg(rk.rmode())) {
 
  691      add_w(rd, rj, scratch);
 
  696void MacroAssembler::Add_d(Register rd, Register rj, 
const Operand& rk) {
 
  698    add_d(rd, rj, rk.rm());
 
  700    if (is_int12(rk.immediate()) && !
MustUseReg(rk.rmode())) {
 
  708      add_d(rd, rj, scratch);
 
  713void MacroAssembler::Sub_w(Register rd, Register rj, 
const Operand& rk) {
 
  715    sub_w(rd, rj, rk.rm());
 
  717    DCHECK(is_int32(rk.immediate()));
 
  718    if (is_int12(-rk.immediate()) && !
MustUseReg(rk.rmode())) {
 
  725      if (-rk.immediate() >> 12 == 0 && !
MustUseReg(rk.rmode())) {
 
  727        li(scratch, -rk.immediate());
 
  728        add_w(rd, rj, scratch);
 
  732        sub_w(rd, rj, scratch);
 
  738void MacroAssembler::Sub_d(Register rd, Register rj, 
const Operand& rk) {
 
  740    sub_d(rd, rj, rk.rm());
 
  741  } 
else if (is_int12(-rk.immediate()) && !
MustUseReg(rk.rmode())) {
 
  747    if (li_neg_count < li_count && !
MustUseReg(rk.rmode())) {
 
  749      DCHECK(rk.immediate() != std::numeric_limits<int32_t>::min());
 
  752      li(scratch, Operand(-rk.immediate()));
 
  753      add_d(rd, rj, scratch);
 
  759      sub_d(rd, rj, scratch);
 
  764void MacroAssembler::Mul_w(Register rd, Register rj, 
const Operand& rk) {
 
  766    mul_w(rd, rj, rk.rm());
 
  773    mul_w(rd, rj, scratch);
 
  777void MacroAssembler::Mulh_w(Register rd, Register rj, 
const Operand& rk) {
 
  790void MacroAssembler::Mulh_wu(Register rd, Register rj, 
const Operand& rk) {
 
  803void MacroAssembler::Mul_d(Register rd, Register rj, 
const Operand& rk) {
 
  805    mul_d(rd, rj, rk.rm());
 
  812    mul_d(rd, rj, scratch);
 
  816void MacroAssembler::Mulh_d(Register rd, Register rj, 
const Operand& rk) {
 
  829void MacroAssembler::Mulh_du(Register rd, Register rj, 
const Operand& rk) {
 
  842void MacroAssembler::Div_w(Register rd, Register rj, 
const Operand& rk) {
 
  844    div_w(rd, rj, rk.rm());
 
  851    div_w(rd, rj, scratch);
 
  855void MacroAssembler::Mod_w(Register rd, Register rj, 
const Operand& rk) {
 
  857    mod_w(rd, rj, rk.rm());
 
  864    mod_w(rd, rj, scratch);
 
  868void MacroAssembler::Mod_wu(Register rd, Register rj, 
const Operand& rk) {
 
  881void MacroAssembler::Div_d(Register rd, Register rj, 
const Operand& rk) {
 
  883    div_d(rd, rj, rk.rm());
 
  890    div_d(rd, rj, scratch);
 
  894void MacroAssembler::Div_wu(Register rd, Register rj, 
const Operand& rk) {
 
  907void MacroAssembler::Div_du(Register rd, Register rj, 
const Operand& rk) {
 
  920void MacroAssembler::Mod_d(Register rd, Register rj, 
const Operand& rk) {
 
  922    mod_d(rd, rj, rk.rm());
 
  929    mod_d(rd, rj, scratch);
 
  933void MacroAssembler::Mod_du(Register rd, Register rj, 
const Operand& rk) {
 
  948    and_(rd, rj, rk.rm());
 
  950    if (is_uint12(rk.immediate()) && !
MustUseReg(rk.rmode())) {
 
  951      andi(rd, rj, 
static_cast<int32_t>(rk.immediate()));
 
  958      and_(rd, rj, scratch);
 
  965    or_(rd, rj, rk.rm());
 
  967    if (is_uint12(rk.immediate()) && !
MustUseReg(rk.rmode())) {
 
  968      ori(rd, rj, 
static_cast<int32_t>(rk.immediate()));
 
  975      or_(rd, rj, scratch);
 
  982    xor_(rd, rj, rk.rm());
 
  984    if (is_uint12(rk.immediate()) && !
MustUseReg(rk.rmode())) {
 
  985      xori(rd, rj, 
static_cast<int32_t>(rk.immediate()));
 
  992      xor_(rd, rj, scratch);
 
  997void MacroAssembler::Nor(Register rd, Register rj, 
const Operand& rk) {
 
  999    nor(rd, rj, rk.rm());
 
 1003    Register scratch = temps.Acquire();
 
 1006    nor(rd, rj, scratch);
 
 1010void MacroAssembler::Andn(Register rd, Register rj, 
const Operand& rk) {
 
 1012    andn(rd, rj, rk.rm());
 
 1016    Register scratch = temps.Acquire();
 
 1019    andn(rd, rj, scratch);
 
 1025    orn(rd, rj, rk.rm());
 
 1029    Register scratch = temps.Acquire();
 
 1032    orn(rd, rj, scratch);
 
 1038  sub_d(rj, zero_reg, rk.rm());
 
 1041void MacroAssembler::Slt(Register rd, Register rj, 
const Operand& rk) {
 
 1043    slt(rd, rj, rk.rm());
 
 1045    if (is_int12(rk.immediate()) && !
MustUseReg(rk.rmode())) {
 
 1046      slti(rd, rj, 
static_cast<int32_t>(rk.immediate()));
 
 1051      Register scratch = temps.Acquire();
 
 1054      slt(rd, rj, scratch);
 
 1059void MacroAssembler::Sltu(Register rd, Register rj, 
const Operand& rk) {
 
 1061    sltu(rd, rj, rk.rm());
 
 1063    if (is_int12(rk.immediate()) && !
MustUseReg(rk.rmode())) {
 
 1069      Register scratch = temps.Acquire();
 
 1072      sltu(rd, rj, scratch);
 
 1077void MacroAssembler::Sle(Register rd, Register rj, 
const Operand& rk) {
 
 1079    slt(rd, rk.rm(), rj);
 
 1081    if (rk.immediate() == 0 && !
MustUseReg(rk.rmode())) {
 
 1082      slt(rd, zero_reg, rj);
 
 1086      Register scratch = temps.Acquire();
 
 1090      slt(rd, scratch, rj);
 
 1096void MacroAssembler::Sleu(Register rd, Register rj, 
const Operand& rk) {
 
 1098    sltu(rd, rk.rm(), rj);
 
 1100    if (rk.immediate() == 0 && !
MustUseReg(rk.rmode())) {
 
 1101      sltu(rd, zero_reg, rj);
 
 1105      Register scratch = temps.Acquire();
 
 1109      sltu(rd, scratch, rj);
 
 1115void MacroAssembler::Sge(Register rd, Register rj, 
const Operand& rk) {
 
 1120void MacroAssembler::Sgeu(Register rd, Register rj, 
const Operand& rk) {
 
 1125void MacroAssembler::Sgt(Register rd, Register rj, 
const Operand& rk) {
 
 1127    slt(rd, rk.rm(), rj);
 
 1129    if (rk.immediate() == 0 && !
MustUseReg(rk.rmode())) {
 
 1130      slt(rd, zero_reg, rj);
 
 1134      Register scratch = temps.Acquire();
 
 1138      slt(rd, scratch, rj);
 
 1143void MacroAssembler::Sgtu(Register rd, Register rj, 
const Operand& rk) {
 
 1145    sltu(rd, rk.rm(), rj);
 
 1147    if (rk.immediate() == 0 && !
MustUseReg(rk.rmode())) {
 
 1148      sltu(rd, zero_reg, rj);
 
 1152      Register scratch = temps.Acquire();
 
 1156      sltu(rd, scratch, rj);
 
 1161void MacroAssembler::Rotr_w(Register rd, Register rj, 
const Operand& rk) {
 
 1165    int64_t ror_value = rk.immediate() % 32;
 
 1166    if (ror_value < 0) {
 
 1173void MacroAssembler::Rotr_d(Register rd, Register rj, 
const Operand& rk) {
 
 1177    int64_t dror_value = rk.immediate() % 64;
 
 1178    if (dror_value < 0) dror_value += 64;
 
 1184  DCHECK(sa >= 1 && sa <= 31);
 
 1189    Register tmp = rd == rk ? temps.Acquire() : rd;
 
 1197  DCHECK(sa >= 1 && sa <= 63);
 
 1202    Register tmp = rd == rk ? temps.Acquire() : rd;
 
 1213  DCHECK(operand_size == 4 || operand_size == 8);
 
 1214  if (operand_size == 4) {
 
 1225  if (source.hasIndexReg()) {
 
 1226    ldx_b(rd, source.base(), source.index());
 
 1228    ld_b(rd, source.base(), source.offset());
 
 1235  if (source.hasIndexReg()) {
 
 1236    ldx_bu(rd, source.base(), source.index());
 
 1238    ld_bu(rd, source.base(), source.offset());
 
 1245  if (source.hasIndexReg()) {
 
 1246    stx_b(rd, source.base(), source.index());
 
 1248    st_b(rd, source.base(), source.offset());
 
 1255  if (source.hasIndexReg()) {
 
 1256    ldx_h(rd, source.base(), source.index());
 
 1258    ld_h(rd, source.base(), source.offset());
 
 1265  if (source.hasIndexReg()) {
 
 1266    ldx_hu(rd, source.base(), source.index());
 
 1268    ld_hu(rd, source.base(), source.offset());
 
 1275  if (source.hasIndexReg()) {
 
 1276    stx_h(rd, source.base(), source.index());
 
 1278    st_h(rd, source.base(), source.offset());
 
 1285  if (!(source.hasIndexReg()) && is_int16(source.offset()) &&
 
 1286      (source.offset() & 0b11) == 0) {
 
 1287    ldptr_w(rd, source.base(), source.offset());
 
 1292  if (source.hasIndexReg()) {
 
 1293    ldx_w(rd, source.base(), source.index());
 
 1295    ld_w(rd, source.base(), source.offset());
 
 1303  if (source.hasIndexReg()) {
 
 1304    ldx_wu(rd, source.base(), source.index());
 
 1306    ld_wu(rd, source.base(), source.offset());
 
 1313  if (!(source.hasIndexReg()) && is_int16(source.offset()) &&
 
 1314      (source.offset() & 0b11) == 0) {
 
 1315    stptr_w(rd, source.base(), source.offset());
 
 1320  if (source.hasIndexReg()) {
 
 1321    stx_w(rd, source.base(), source.index());
 
 1323    st_w(rd, source.base(), source.offset());
 
 1330  if (!(source.hasIndexReg()) && is_int16(source.offset()) &&
 
 1331      (source.offset() & 0b11) == 0) {
 
 1332    ldptr_d(rd, source.base(), source.offset());
 
 1337  if (source.hasIndexReg()) {
 
 1338    ldx_d(rd, source.base(), source.index());
 
 1340    ld_d(rd, source.base(), source.offset());
 
 1347  if (!(source.hasIndexReg()) && is_int16(source.offset()) &&
 
 1348      (source.offset() & 0b11) == 0) {
 
 1349    stptr_d(rd, source.base(), source.offset());
 
 1354  if (source.hasIndexReg()) {
 
 1355    stx_d(rd, source.base(), source.index());
 
 1357    st_d(rd, source.base(), source.offset());
 
 1364  if (tmp.hasIndexReg()) {
 
 1365    fldx_s(fd, tmp.base(), tmp.index());
 
 1367    fld_s(fd, tmp.base(), tmp.offset());
 
 1374  if (tmp.hasIndexReg()) {
 
 1375    fstx_s(fs, tmp.base(), tmp.index());
 
 1377    fst_s(fs, tmp.base(), tmp.offset());
 
 1384  if (tmp.hasIndexReg()) {
 
 1385    fldx_d(fd, tmp.base(), tmp.index());
 
 1387    fld_d(fd, tmp.base(), tmp.offset());
 
 1394  if (tmp.hasIndexReg()) {
 
 1395    fstx_d(fs, tmp.base(), tmp.index());
 
 1397    fst_d(fs, tmp.base(), tmp.offset());
 
 1402  DCHECK(!rj.hasIndexReg());
 
 1403  bool is_one_instruction = is_int14(rj.offset());
 
 1404  if (is_one_instruction) {
 
 1405    ll_w(rd, rj.base(), rj.offset());
 
 1408    Register scratch = temps.Acquire();
 
 1409    li(scratch, rj.offset());
 
 1410    add_d(scratch, scratch, rj.base());
 
 1411    ll_w(rd, scratch, 0);
 
 1416  DCHECK(!rj.hasIndexReg());
 
 1417  bool is_one_instruction = is_int14(rj.offset());
 
 1418  if (is_one_instruction) {
 
 1419    ll_d(rd, rj.base(), rj.offset());
 
 1422    Register scratch = temps.Acquire();
 
 1423    li(scratch, rj.offset());
 
 1424    add_d(scratch, scratch, rj.base());
 
 1425    ll_d(rd, scratch, 0);
 
 1430  DCHECK(!rj.hasIndexReg());
 
 1431  bool is_one_instruction = is_int14(rj.offset());
 
 1432  if (is_one_instruction) {
 
 1433    sc_w(rd, rj.base(), rj.offset());
 
 1436    Register scratch = temps.Acquire();
 
 1437    li(scratch, rj.offset());
 
 1438    add_d(scratch, scratch, rj.base());
 
 1439    sc_w(rd, scratch, 0);
 
 1444  DCHECK(!rj.hasIndexReg());
 
 1445  bool is_one_instruction = is_int14(rj.offset());
 
 1446  if (is_one_instruction) {
 
 1447    sc_d(rd, rj.base(), rj.offset());
 
 1450    Register scratch = temps.Acquire();
 
 1451    li(scratch, rj.offset());
 
 1452    add_d(scratch, scratch, rj.base());
 
 1453    sc_d(rd, scratch, 0);
 
 1466  li(dst, Operand(value), mode);
 
 1472    if (reference.IsIsolateFieldId()) {
 
 1473      Add_d(dst, 
kRootRegister, Operand(reference.offset_from_root_register()));
 
 1476    if (
options().isolate_independent_code) {
 
 1484  CHECK(!reference.IsIsolateFieldId());
 
 1485  li(dst, Operand(reference), mode);
 
 1489  if (is_int12(
static_cast<int32_t>(value)) ||
 
 1498  if (is_int12(
static_cast<int32_t>(
j.immediate()))) {
 
 1499    addi_d(rd, zero_reg, 
j.immediate());
 
 1500  } 
else if (is_uint12(
static_cast<int32_t>(
j.immediate()))) {
 
 1503    lu12i_w(rd, 
j.immediate() >> 12 & 0xfffff);
 
 1511  if (is_int32(value)) {
 
 1513  } 
else if (is_int52(value)) {
 
 1515  } 
else if ((value & 0xffffffffL) == 0) {
 
 1521    } 
else if (tzc + lzc > 12) {
 
 1527    int64_t imm21 = (value >> 31) & 0x1fffffL;
 
 1528    if (imm21 != 0x1fffffL && imm21 != 0) {
 
 1544  int64_t imm = 
j.immediate();
 
 1547  if (is_int32(imm)) {
 
 1549  } 
else if (is_int52(imm)) {
 
 1551    lu32i_d(rd, imm >> 32 & 0xfffff);
 
 1552  } 
else if ((imm & 0xffffffffL) == 0) {
 
 1558    } 
else if (tzc + lzc > 12) {
 
 1561      slli_d(rd, rd, tzc + 20);
 
 1564      lu32i_d(rd, imm >> 32 & 0xfffff);
 
 1568    int64_t imm21 = (imm >> 31) & 0x1fffffL;
 
 1570    if (imm21 != 0x1fffffL && imm21 != 0) 
lu32i_d(rd, imm >> 32 & 0xfffff);
 
 1581    Handle<HeapObject> 
handle(
reinterpret_cast<Address*
>(
j.immediate()));
 
 1584    lu12i_w(rd, immediate >> 12 & 0xfffff);
 
 1588    if (
j.IsHeapNumberRequest()) {
 
 1592      Handle<HeapObject> 
handle(
reinterpret_cast<Address*
>(
j.immediate()));
 
 1595      immediate = 
j.immediate();
 
 1599    lu12i_w(rd, immediate >> 12 & 0xfffff);
 
 1605      DCHECK(is_int32(immediate) || is_uint32(immediate));
 
 1608    lu32i_d(rd, immediate >> 32 & 0xfffff);
 
 1612    lu12i_w(rd, 
j.immediate() >> 12 & 0xfffff);
 
 1614    lu32i_d(rd, 
j.immediate() >> 32 & 0xfffff);
 
 1617    lu12i_w(rd, 
j.immediate() >> 12 & 0xfffff);
 
 1619    lu32i_d(rd, 
j.immediate() >> 32 & 0xfffff);
 
 1632    if ((regs.bits() & (1 << 
i)) != 0) {
 
 1637  addi_d(sp, sp, stack_offset);
 
 1645    if ((regs1.bits() & (1 << 
i)) != 0) {
 
 1651    if ((regs2.bits() & (1 << 
i)) != 0) {
 
 1656  addi_d(sp, sp, stack_offset);
 
 1666    if ((regs1.bits() & (1 << 
i)) != 0) {
 
 1672    if ((regs2.bits() & (1 << 
i)) != 0) {
 
 1678    if ((regs3.bits() & (1 << 
i)) != 0) {
 
 1683  addi_d(sp, sp, stack_offset);
 
 1690    if ((regs.bits() & (1 << 
i)) != 0) {
 
 1695  addi_d(sp, sp, stack_offset);
 
 1703    if ((regs2.bits() & (1 << 
i)) != 0) {
 
 1709    if ((regs1.bits() & (1 << 
i)) != 0) {
 
 1714  addi_d(sp, sp, stack_offset);
 
 1724    if ((regs3.bits() & (1 << 
i)) != 0) {
 
 1730    if ((regs2.bits() & (1 << 
i)) != 0) {
 
 1736    if ((regs1.bits() & (1 << 
i)) != 0) {
 
 1741  addi_d(sp, sp, stack_offset);
 
 1745  int16_t num_to_push = regs.Count();
 
 1748  Sub_d(sp, sp, Operand(stack_offset));
 
 1750    if ((regs.bits() & (1 << 
i)) != 0) {
 
 1761    if ((regs.bits() & (1 << 
i)) != 0) {
 
 1766  addi_d(sp, sp, stack_offset);
 
 1792  Register scratch = temps.Acquire();
 
 1800  Register scratch = temps.Acquire();
 
 1811  Register scratch = temps.Acquire();
 
 1819  Register scratch = temps.Acquire();
 
 1822  Label msb_clear, conversion_done;
 
 1824  Branch(&msb_clear, 
ge, rj, Operand(zero_reg));
 
 1827  andi(scratch, rj, 1);
 
 1829  or_(scratch, scratch, rj);
 
 1833  Branch(&conversion_done);
 
 1840  bind(&conversion_done);
 
 1846  Register scratch = temps.Acquire();
 
 1854  Register scratch = temps.Acquire();
 
 1865  Register scratch = temps.Acquire();
 
 1873  Register scratch = temps.Acquire();
 
 1881  andi(scratch, rj, 1);
 
 1883  or_(scratch, scratch, rj);
 
 1887  Branch(&conversion_done);
 
 1894  bind(&conversion_done);
 
 1914                                  FPURegister scratch) {
 
 1920                                  FPURegister scratch) {
 
 1922  Register scratch2 = temps.Acquire();
 
 1928                                  FPURegister scratch) {
 
 1930  Register scratch2 = temps.Acquire();
 
 1936                                  FPURegister scratch, Register 
result) {
 
 1938  Register scratch2 = temps.Acquire();
 
 1944                                  FPURegister scratch, Register 
result) {
 
 1946  Register scratch2 = temps.Acquire();
 
 1968                                  FPURegister scratch) {
 
 1974    Register scratch1 = temps.Acquire();
 
 1975    li(scratch1, 0x41F0000000000000);
 
 1980  Label simple_convert;
 
 1985  Add_w(rd, zero_reg, -1);
 
 1990  bind(&simple_convert);
 
 1999                                  FPURegister scratch) {
 
 2004    Register scratch1 = temps.Acquire();
 
 2005    li(scratch1, 0x4F800000);
 
 2010  Label simple_convert;
 
 2015  Add_w(rd, zero_reg, -1);
 
 2020  bind(&simple_convert);
 
 2029                                  FPURegister scratch, Register 
result) {
 
 2031  Register scratch1 = temps.Acquire();
 
 2036  Label simple_convert, done, fail;
 
 2039    Move(scratch, -1.0);
 
 2046  li(scratch1, 0x43E0000000000000);
 
 2056  fsub_d(scratch, fj, scratch);
 
 2059  Or(rd, rd, Operand(1UL << 63));
 
 2063  bind(&simple_convert);
 
 2070    addi_d(scratch1, zero_reg, -1);
 
 2071    srli_d(scratch1, scratch1, 1);  
 
 2081                                  FPURegister scratch, Register 
result) {
 
 2085  Label simple_convert, done, fail;
 
 2088    Move(scratch, -1.0f);
 
 2097    Register scratch1 = temps.Acquire();
 
 2098    li(scratch1, 0x5F000000);
 
 2109  fsub_s(scratch, fj, scratch);
 
 2112  Or(rd, rd, Operand(1UL << 63));
 
 2116  bind(&simple_convert);
 
 2125      Register scratch1 = temps.Acquire();
 
 2126      addi_d(scratch1, zero_reg, -1);
 
 2127      srli_d(scratch1, scratch1, 1);  
 
 2141  Register scratch = temps.Acquire();
 
 2142  Register scratch2 = temps.Acquire();
 
 2144  li(scratch2, Operand(mode));
 
 2170  Register scratch = temps.Acquire();
 
 2171  Register scratch2 = temps.Acquire();
 
 2173  li(scratch2, Operand(mode));
 
 2219  bool long_branch = target->is_bound()
 
 2220                         ? !
is_near(target, OffsetSize::kOffset21)
 
 2233  bool long_branch = target->is_bound()
 
 2234                         ? !
is_near(target, OffsetSize::kOffset21)
 
 2248  Register scratch = temps.Acquire();
 
 2249  DCHECK(src_low != scratch);
 
 2257  Register scratch = temps.Acquire();
 
 2258  li(scratch, Operand(
static_cast<int32_t>(src)));
 
 2271    Register scratch = temps.Acquire();
 
 2272    li(scratch, Operand(
static_cast<int64_t
>(src)));
 
 2280  Register scratch = temps.Acquire();
 
 2283  or_(rd, rd, scratch);
 
 2288  Register scratch = temps.Acquire();
 
 2291  or_(rd, rd, scratch);
 
 2306  Register scratch = temps.Acquire();
 
 2313  Register scratch = temps.Acquire();
 
 2347  uint32_t 
shift = 24;         
 
 2351  Register scratch = temps.Acquire();
 
 2352  Register scratch2 = temps.Acquire();
 
 2355  And(scratch, scratch, scratch2);
 
 2356  Sub_w(scratch, rj, scratch);
 
 2358  And(rd, scratch, scratch2);
 
 2359  srli_w(scratch, scratch, 2);
 
 2360  And(scratch, scratch, scratch2);
 
 2361  Add_w(scratch, rd, scratch);
 
 2363  Add_w(rd, rd, scratch);
 
 2365  And(rd, rd, scratch2);
 
 2367  Mul_w(rd, rd, scratch);
 
 2373  int64_t 
B0 = 0x5555555555555555l;     
 
 2374  int64_t 
B1 = 0x3333333333333333l;     
 
 2375  int64_t 
B2 = 0x0F0F0F0F0F0F0F0Fl;     
 
 2376  int64_t value = 0x0101010101010101l;  
 
 2377  uint32_t 
shift = 56;                  
 
 2381  Register scratch = temps.Acquire();
 
 2382  Register scratch2 = temps.Acquire();
 
 2385  And(scratch, scratch, scratch2);
 
 2386  Sub_d(scratch, rj, scratch);
 
 2388  And(rd, scratch, scratch2);
 
 2389  srli_d(scratch, scratch, 2);
 
 2390  And(scratch, scratch, scratch2);
 
 2391  Add_d(scratch, rd, scratch);
 
 2393  Add_d(rd, rd, scratch);
 
 2395  And(rd, rd, scratch2);
 
 2397  Mul_d(rd, rd, scratch);
 
 2402                                 int size, 
bool sign_extend) {
 
 2425  Rotr_d(dest, dest, 
pos);
 
 2429    Register scratch = temps.Acquire();
 
 2430    Sub_d(scratch, zero_reg, 
pos);
 
 2431    Rotr_d(dest, dest, scratch);
 
 2441  Register scratch = temps.Acquire();
 
 2442  Register scratch2 = temps.Acquire();
 
 2446  li(scratch, 1L << 63);
 
 2447  Xor(scratch, scratch, scratch2);
 
 2448  rotri_d(scratch2, scratch, 1);
 
 2450  Branch(done, 
ne, scratch, Operand(scratch2));
 
 2467#if V8_ENABLE_WEBASSEMBLY 
 2468  if (stub_mode == StubCallMode::kCallWasmRuntimeStub) {
 
 2483                                 const Operand& rhs) {
 
 2487      if (rhs.IsImmediate()) {
 
 2488        if (rhs.immediate() == 0) {
 
 2492            Sltu(dst, zero_reg, lhs);
 
 2494        } 
else if (is_int12(-rhs.immediate())) {
 
 2495          Add_d(dst, lhs, Operand(-rhs.immediate()));
 
 2499            Sltu(dst, zero_reg, dst);
 
 2506            Sltu(dst, zero_reg, dst);
 
 2514          Sltu(dst, zero_reg, dst);
 
 2532      Sltu(dst, lhs, rhs);
 
 2535      Sgeu(dst, lhs, rhs);
 
 2538      Sgtu(dst, lhs, rhs);
 
 2541      Sleu(dst, lhs, rhs);
 
 2549#define BRANCH_ARGS_CHECK(cond, rj, rk)                                  \ 
 2550  DCHECK((cond == cc_always && rj == zero_reg && rk.rm() == zero_reg) || \ 
 2551         (cond != cc_always && (rj != zero_reg || rk.rm() != zero_reg))) 
 2563                            const Operand& rk, 
bool need_link) {
 
 2564  if (
L->is_bound()) {
 
 2595                            RootIndex index, 
bool need_sign_extend) {
 
 2600    if (need_sign_extend) {
 
 2601      left = temps.Acquire();
 
 2605    Branch(
L, cond, left, Operand(right));
 
 2608    Branch(
L, cond, rj, Operand(right));
 
 2630                                           Register rj, 
const Operand& rk,
 
 2634  Register scratch = temps.Acquire();
 
 2645        if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset26)) 
return false;
 
 2654        if (rk.is_reg() && rj.code() == rk.rm().code()) {
 
 2657          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset16)) 
return false;
 
 2662          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset21)) 
return false;
 
 2667          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset16)) 
return false;
 
 2676        if (rk.is_reg() && rj.code() == rk.rm().code()) {
 
 2677          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset16)) 
return false;
 
 2684          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset21)) 
return false;
 
 2689          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset16)) 
return false;
 
 2701        if (rk.is_reg() && rj.code() == rk.rm().code()) {
 
 2704          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset16)) 
return false;
 
 2709          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset16)) 
return false;
 
 2719        if (rk.is_reg() && rj.code() == rk.rm().code()) {
 
 2720          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset26)) 
return false;
 
 2725          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset16)) 
return false;
 
 2730          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset16)) 
return false;
 
 2740        if (rk.is_reg() && rj.code() == rk.rm().code()) {
 
 2743          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset16)) 
return false;
 
 2748          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset16)) 
return false;
 
 2758        if (rk.is_reg() && rj.code() == rk.rm().code()) {
 
 2759          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset26)) 
return false;
 
 2764          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset16)) 
return false;
 
 2769          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset16)) 
return false;
 
 2781        if (rk.is_reg() && rj.code() == rk.rm().code()) {
 
 2784          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset26)) 
return false;
 
 2789          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset16)) 
return false;
 
 2799        if (rk.is_reg() && rj.code() == rk.rm().code()) {
 
 2800          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset26)) 
return false;
 
 2805          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset26)) 
return false;
 
 2810          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset16)) 
return false;
 
 2820        if (rk.is_reg() && rj.code() == rk.rm().code()) {
 
 2825          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset16)) 
return false;
 
 2835        if (rk.is_reg() && rj.code() == rk.rm().code()) {
 
 2836          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset26)) 
return false;
 
 2841          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset21)) 
return false;
 
 2845          if (
L->is_bound() && !
is_near(
L, OffsetSize::kOffset16)) 
return false;
 
 2861                                 const Operand& rk, 
bool need_link) {
 
 2869                                            Register r1, 
const Operand& r2,
 
 2873    Register scratch0 = temps.Acquire();
 
 2876      Branch(
label, cond, scratch0, Operand(zero_reg), need_link);
 
 2878      Register scratch1 = temps.Acquire();
 
 2880        slli_w(scratch1, r2.rm(), 0);
 
 2884      Branch(
label, cond, scratch0, Operand(scratch1), need_link);
 
 2900                                            int constant_index) {
 
 2927    ExternalReference reference, Register scratch) {
 
 2929    if (reference.IsIsolateFieldId()) {
 
 2932    if (
options().enable_root_relative_access) {
 
 2939    if (
options().isolate_independent_code) {
 
 2949        DCHECK(scratch.is_valid());
 
 2958  DCHECK(scratch.is_valid());
 
 2959  li(scratch, reference);
 
 2981  offset -= 
reinterpret_cast<int64_t
>(
pc);
 
 2987                          const Operand& rk) {
 
 2990    jirl(zero_reg, target, 0);
 
 2995    jirl(zero_reg, target, 0);
 
 3001                          Condition cond, Register rj, 
const Operand& rk) {
 
 3009    Register scratch = temps.Acquire();
 
 3010    li(scratch, Operand(target, rmode));
 
 3011    jirl(zero_reg, scratch, 0);
 
 3017                          Register rj, 
const Operand& rk) {
 
 3018  Jump(
static_cast<intptr_t
>(target), rmode, cond, rj, rk);
 
 3022                          Condition cond, Register rj, 
const Operand& rk) {
 
 3031  if (
isolate()->builtins()->IsBuiltinHandle(code, &builtin)) {
 
 3044  Register scratch = temps.Acquire();
 
 3045  li(scratch, reference);
 
 3051                          const Operand& rk) {
 
 3097    Branch(target, 
cc, obj, Operand(temp));
 
 3104                                     unsigned higher_limit,
 
 3105                                     Label* on_in_range) {
 
 3107  if (lower_limit != 0) {
 
 3109    Register scratch = temps.Acquire();
 
 3110    Sub_d(scratch, value, Operand(lower_limit));
 
 3111    Branch(on_in_range, 
ls, scratch, Operand(higher_limit - lower_limit));
 
 3113    Branch(on_in_range, 
ls, value, Operand(higher_limit - lower_limit));
 
 3118                          Register rj, 
const Operand& rk) {
 
 3124  intptr_t offset_diff = target - 
pc_offset();
 
 3126    bl(offset_diff >> 2);
 
 3129    Register scratch = temps.Acquire();
 
 3131    jirl(
ra, scratch, (offset_diff & 0x3ffff) >> 2);
 
 3134    Register scratch = temps.Acquire();
 
 3135    li(scratch, Operand(
static_cast<int64_t
>(target), rmode), 
ADDRESS_LOAD);
 
 3142                          Condition cond, Register rj, 
const Operand& rk) {
 
 3145  if (
isolate()->builtins()->IsBuiltinHandle(code, &builtin)) {
 
 3152  Call(
static_cast<Address>(target_index), rmode, cond, rj, rk);
 
 3165  Ld_d(target, 
MemOperand(target, IsolateData::builtin_entry_table_offset()));
 
 3189  switch (
options().builtin_call_jump_mode) {
 
 3197      bl(
static_cast<int>(builtin));
 
 3207      if (
options().use_pc_relative_calls_and_jumps_for_mksnapshot) {
 
 3211        bl(code_target_index);
 
 3223                                     Register type, Operand range) {
 
 3240  switch (
options().builtin_call_jump_mode) {
 
 3253      b(
static_cast<int>(builtin));
 
 3258      if (
options().use_pc_relative_calls_and_jumps_for_mksnapshot) {
 
 3262        b(code_target_index);
 
 3282  static constexpr int kNumInstructionsToJump = 2;
 
 3287  pcaddi(
ra, kNumInstructionsToJump + 1);
 
 3295  jirl(zero_reg, target, 0);
 
 3316                          const Operand& op) {
 
 3336    Xor(reg1, reg1, Operand(reg2));
 
 3337    Xor(reg2, reg2, Operand(reg1));
 
 3338    Xor(reg1, reg1, Operand(reg2));
 
 3350  Register scratch = temps.Acquire();
 
 3351  li(scratch, Operand(smi));
 
 3357  Register scratch = temps.Acquire();
 
 3367    mov(scratch, zero_reg);
 
 3373    Add_d(scratch, scratch, Operand(1));
 
 3384    Add_d(scratch, scratch, Operand(-1));
 
 3416  Register scratch = temps.Acquire();
 
 3435                        : IsolateData::jslimit_offset();
 
 3442                                        Label* stack_overflow) {
 
 3451  sub_d(scratch1, sp, scratch1);
 
 3455  Branch(stack_overflow, 
le, scratch1, Operand(scratch2));
 
 3459    Register code_data_container, Register scratch, 
Condition cond,
 
 3463  Branch(target, cond, scratch, Operand(zero_reg));
 
 3471                                    Register actual_parameter_count,
 
 3474  Label regular_invoke;
 
 3481  DCHECK_EQ(expected_parameter_count, a2);
 
 3485  sub_d(expected_parameter_count, expected_parameter_count,
 
 3486        actual_parameter_count);
 
 3487  Branch(®ular_invoke, 
le, expected_parameter_count, Operand(zero_reg));
 
 3489  Label stack_overflow;
 
 3498    Sub_d(sp, sp, Operand(t0));
 
 3501    mov(t0, actual_parameter_count);
 
 3505    Sub_d(t0, t0, Operand(1));
 
 3508    Branch(©, 
gt, t0, Operand(zero_reg));
 
 3512  LoadRoot(t0, RootIndex::kUndefinedValue);
 
 3517    Sub_d(expected_parameter_count, expected_parameter_count, Operand(1));
 
 3519    Branch(&loop, 
gt, expected_parameter_count, Operand(zero_reg));
 
 3523  bind(&stack_overflow);
 
 3531  bind(®ular_invoke);
 
 3536    Register expected_parameter_count_or_dispatch_handle,
 
 3537    Register actual_parameter_count) {
 
 3539                     expected_parameter_count_or_dispatch_handle,
 
 3540                     actual_parameter_count));
 
 3546  SmiTag(expected_parameter_count_or_dispatch_handle);
 
 3547  SmiTag(actual_parameter_count);
 
 3548  Push(expected_parameter_count_or_dispatch_handle, actual_parameter_count);
 
 3560  Pop(expected_parameter_count_or_dispatch_handle, actual_parameter_count);
 
 3562  SmiUntag(expected_parameter_count_or_dispatch_handle);
 
 3565#ifdef V8_ENABLE_LEAPTIERING 
 3567    Register function, Register actual_parameter_count, 
InvokeType type,
 
 3581                     argument_adaption_mode);
 
 3585    Register function, Register 
new_target, Register actual_parameter_count,
 
 3601    Register function, Register 
new_target, Register actual_parameter_count,
 
 3610  Ld_w(dispatch_handle,
 
 3614  Label debug_hook, continue_after_hook;
 
 3616    li(t0, ExternalReference::debug_hook_on_function_call_address(
isolate()));
 
 3620  bind(&continue_after_hook);
 
 3624    LoadRoot(a3, RootIndex::kUndefinedValue);
 
 3629    Register expected_parameter_count = a2;
 
 3630    LoadParameterCountFromJSDispatchTable(expected_parameter_count,
 
 3631                                          dispatch_handle, scratch);
 
 3632    InvokePrologue(expected_parameter_count, actual_parameter_count, type);
 
 3639                                    dispatch_handle, scratch);
 
 3654                          actual_parameter_count);
 
 3655  Branch(&continue_after_hook);
 
 3661                                        Register expected_parameter_count,
 
 3662                                        Register actual_parameter_count,
 
 3670  Label debug_hook, continue_after_hook;
 
 3672    li(t0, ExternalReference::debug_hook_on_function_call_address(
isolate()));
 
 3676  bind(&continue_after_hook);
 
 3680    LoadRoot(a3, RootIndex::kUndefinedValue);
 
 3683  InvokePrologue(expected_parameter_count, actual_parameter_count, type);
 
 3688  constexpr int unused_argument_count = 0;
 
 3704                          actual_parameter_count);
 
 3705  Branch(&continue_after_hook);
 
 3713    Register function, Register 
new_target, Register actual_parameter_count,
 
 3721  Register expected_parameter_count = a2;
 
 3727  Ld_hu(expected_parameter_count,
 
 3729                        SharedFunctionInfo::kFormalParameterCountOffset));
 
 3732                     actual_parameter_count, type);
 
 3736                                    Register expected_parameter_count,
 
 3737                                    Register actual_parameter_count,
 
 3750                     actual_parameter_count, type);
 
 3758                                   Register type_reg) {
 
 3767  if (lower_limit != 0 || type_reg != range) {
 
 3768    Sub_d(range, type_reg, Operand(lower_limit));
 
 3776                                   const Operand& right, Register overflow) {
 
 3780  Register scratch = temps.Acquire();
 
 3781  Register scratch2 = temps.Acquire();
 
 3783  if (!right.is_reg()) {
 
 3784    li(scratch, Operand(right));
 
 3785    right_reg = scratch;
 
 3787    right_reg = right.rm();
 
 3790  DCHECK(left != scratch2 && right_reg != scratch2 && dst != scratch2 &&
 
 3791         overflow != scratch2);
 
 3792  DCHECK(overflow != left && overflow != right_reg);
 
 3794  if (dst == left || dst == right_reg) {
 
 3795    add_d(scratch2, left, right_reg);
 
 3796    xor_(overflow, scratch2, left);
 
 3797    xor_(scratch, scratch2, right_reg);
 
 3798    and_(overflow, overflow, scratch);
 
 3801    add_d(dst, left, right_reg);
 
 3802    xor_(overflow, dst, left);
 
 3803    xor_(scratch, dst, right_reg);
 
 3804    and_(overflow, overflow, scratch);
 
 3809                                   const Operand& right, Register overflow) {
 
 3813  Register scratch = temps.Acquire();
 
 3814  Register scratch2 = temps.Acquire();
 
 3816  if (!right.is_reg()) {
 
 3817    li(scratch, Operand(right));
 
 3818    right_reg = scratch;
 
 3820    right_reg = right.rm();
 
 3823  DCHECK(left != scratch2 && right_reg != scratch2 && dst != scratch2 &&
 
 3824         overflow != scratch2);
 
 3825  DCHECK(overflow != left && overflow != right_reg);
 
 3827  if (dst == left || dst == right_reg) {
 
 3828    Sub_d(scratch2, left, right_reg);
 
 3829    xor_(overflow, left, scratch2);
 
 3830    xor_(scratch, left, right_reg);
 
 3831    and_(overflow, overflow, scratch);
 
 3834    sub_d(dst, left, right_reg);
 
 3835    xor_(overflow, left, dst);
 
 3836    xor_(scratch, left, right_reg);
 
 3837    and_(overflow, overflow, scratch);
 
 3842                                   const Operand& right, Register overflow) {
 
 3846  Register scratch = temps.Acquire();
 
 3847  Register scratch2 = temps.Acquire();
 
 3849  if (!right.is_reg()) {
 
 3850    li(scratch, Operand(right));
 
 3851    right_reg = scratch;
 
 3853    right_reg = right.rm();
 
 3856  DCHECK(left != scratch2 && right_reg != scratch2 && dst != scratch2 &&
 
 3857         overflow != scratch2);
 
 3858  DCHECK(overflow != left && overflow != right_reg);
 
 3860  if (dst == left || dst == right_reg) {
 
 3861    Mul_w(scratch2, left, right_reg);
 
 3862    Mulh_w(overflow, left, right_reg);
 
 3865    Mul_w(dst, left, right_reg);
 
 3866    Mulh_w(overflow, left, right_reg);
 
 3869  srai_d(scratch2, dst, 32);
 
 3870  xor_(overflow, overflow, scratch2);
 
 3874                                   const Operand& right, Register overflow) {
 
 3878  Register scratch = temps.Acquire();
 
 3879  Register scratch2 = temps.Acquire();
 
 3881  if (!right.is_reg()) {
 
 3882    li(scratch, Operand(right));
 
 3883    right_reg = scratch;
 
 3885    right_reg = right.rm();
 
 3888  DCHECK(left != scratch2 && right_reg != scratch2 && dst != scratch2 &&
 
 3889         overflow != scratch2);
 
 3890  DCHECK(overflow != left && overflow != right_reg);
 
 3892  if (dst == left || dst == right_reg) {
 
 3893    Mul_d(scratch2, left, right_reg);
 
 3894    Mulh_d(overflow, left, right_reg);
 
 3897    Mul_d(dst, left, right_reg);
 
 3898    Mulh_d(overflow, left, right_reg);
 
 3901  srai_d(scratch2, dst, 63);
 
 3902  xor_(overflow, overflow, scratch2);
 
 3906                                 int num_arguments) {
 
 3913  CHECK(f->nargs < 0 || f->nargs == num_arguments);
 
 3921  bool switch_to_central_stack = 
options().is_wasm;
 
 3929  if (function->nargs >= 0) {
 
 3936                                             bool builtin_exit_frame) {
 
 3942                                   Label* target_if_cleared) {
 
 3950                                          Register scratch2) {
 
 3952  if (
v8_flags.native_code_counters && counter->Enabled()) {
 
 3959    Add_w(scratch1, scratch1, Operand(value));
 
 3966                                          Register scratch2) {
 
 3968  if (
v8_flags.native_code_counters && counter->Enabled()) {
 
 3975    Sub_w(scratch1, scratch1, Operand(value));
 
 4017    li(a0, Operand(
static_cast<int>(reason)));
 
 4018    li(a1, ExternalReference::abort_with_reason());
 
 4036      Register scratch = temps.Acquire();
 
 4055    static const int kExpectedAbortInstructions = 10;
 
 4057    DCHECK_LE(abort_instructions, kExpectedAbortInstructions);
 
 4058    while (abort_instructions++ < kExpectedAbortInstructions) {
 
 4074                                        Register scratch, Label* fbv_undef) {
 
 4084  Branch(&done, 
eq, scratch, Operand(FEEDBACK_VECTOR_TYPE));
 
 4087  LoadRoot(dst, RootIndex::kUndefinedValue);
 
 4097               dst, Map::kConstructorOrBackPointerOrNativeContextOffset));
 
 4103  Register scratch = temps.Acquire();
 
 4119#if V8_ENABLE_WEBASSEMBLY 
 4120  if (type == StackFrame::WASM || type == StackFrame::WASM_LIFTOFF_SETUP) {
 
 4136  DCHECK(frame_type == StackFrame::EXIT ||
 
 4137         frame_type == StackFrame::BUILTIN_EXIT ||
 
 4138         frame_type == StackFrame::API_ACCESSOR_EXIT ||
 
 4139         frame_type == StackFrame::API_CALLBACK_EXIT);
 
 4141  using ER = ExternalReference;
 
 4174  ER c_entry_fp_address =
 
 4175      ER::Create(IsolateAddressId::kCEntryFPAddress, 
isolate());
 
 4178  ER context_address = ER::Create(IsolateAddressId::kContextAddress, 
isolate());
 
 4187  if (frame_alignment > 0) {
 
 4189    And(sp, sp, Operand(-frame_alignment));  
 
 4202  using ER = ExternalReference;
 
 4205  ER context_address = ER::Create(IsolateAddressId::kContextAddress, 
isolate());
 
 4214  ER c_entry_fp_address =
 
 4215      ER::Create(IsolateAddressId::kCEntryFPAddress, 
isolate());
 
 4226#if V8_HOST_ARCH_LOONG64 
 4237  return v8_flags.sim_stack_alignment;
 
 4258  Register scratch = temps.Acquire();
 
 4260  Branch(smi_label, 
eq, scratch, Operand(zero_reg));
 
 4266  Register scratch = temps.Acquire();
 
 4268  Branch(not_smi_label, 
ne, scratch, Operand(zero_reg));
 
 4278    scratch = temps.Acquire();
 
 4281    if (std::optional<RootIndex> expected =
 
 4285      Branch(target, 
cc, scratch, Operand(ptr));
 
 4290  Branch(target, 
cc, scratch, Operand(instance_type));
 
 4294                                               Register scratch, Label* target,
 
 4297  CHECK(
cc == Condition::kUnsignedLessThan ||
 
 4298        cc == Condition::kUnsignedGreaterThanEqual);
 
 4302    LoadMap(scratch, heap_object);
 
 4304    Branch(&ok, Condition::kUnsignedLessThanEqual, scratch,
 
 4305           Operand(LAST_JS_RECEIVER_TYPE - FIRST_JS_RECEIVER_TYPE));
 
 4307    LoadMap(scratch, heap_object);
 
 4310    Branch(&ok, Condition::kUnsignedLessThanEqual, scratch,
 
 4311           Operand(LAST_PRIMITIVE_HEAP_OBJECT_TYPE -
 
 4312                   FIRST_PRIMITIVE_HEAP_OBJECT_TYPE));
 
 4314    Abort(AbortReason::kInvalidReceiver);
 
 4325    static_assert(LAST_JS_RECEIVER_TYPE == 
LAST_TYPE);
 
 4327    Branch(target, 
cc, scratch, Operand(FIRST_JS_RECEIVER_TYPE));
 
 4331#ifdef V8_ENABLE_DEBUG_CODE 
 4354  Branch(&ok, 
kEqual, map_tmp, RootIndex::kHeapNumberMap);
 
 4358  Branch(&ok, 
kEqual, 
object, RootIndex::kUndefinedValue);
 
 4366  Abort(abort_reason);
 
 4375  Register scratch = temps.Acquire();
 
 4377  Check(
ne, AbortReason::kOperandIsASmi, scratch, Operand(zero_reg));
 
 4385  Register scratch = temps.Acquire();
 
 4387  Check(
eq, AbortReason::kOperandIsASmi, scratch, Operand(zero_reg));
 
 4394  const int frame_alignment_mask = frame_alignment - 1;
 
 4397    Label alignment_as_expected;
 
 4401      Register scratch = temps.Acquire();
 
 4402      andi(scratch, sp, frame_alignment_mask);
 
 4403      Branch(&alignment_as_expected, 
eq, scratch, Operand(zero_reg));
 
 4407    bind(&alignment_as_expected);
 
 4416  Register scratch = temps.Acquire();
 
 4419  Check(
ne, AbortReason::kOperandIsASmiAndNotAConstructor, scratch,
 
 4424  And(scratch, scratch, Operand(Map::Bits1::IsConstructorBit::kMask));
 
 4425  Check(
ne, AbortReason::kOperandIsNotAConstructor, scratch, Operand(zero_reg));
 
 4433  Register scratch = temps.Acquire();
 
 4436  Check(
ne, AbortReason::kOperandIsASmiAndNotAFunction, scratch,
 
 4441  Check(
ls, AbortReason::kOperandIsNotAFunction, scratch,
 
 4442        Operand(LAST_JS_FUNCTION_TYPE - FIRST_JS_FUNCTION_TYPE));
 
 4451  Register scratch = temps.Acquire();
 
 4454  Check(
ne, AbortReason::kOperandIsASmiAndNotAFunction, scratch,
 
 4460  Check(
ls, AbortReason::kOperandIsNotACallableFunction, scratch,
 
 4471  Register scratch = temps.Acquire();
 
 4474  Check(
ne, AbortReason::kOperandIsASmiAndNotABoundFunction, scratch,
 
 4477  Check(
eq, AbortReason::kOperandIsNotABoundFunction, scratch,
 
 4478        Operand(JS_BOUND_FUNCTION_TYPE));
 
 4486  Register scratch = temps.Acquire();
 
 4489  Check(
ne, AbortReason::kOperandIsASmiAndNotAGeneratorObject, scratch,
 
 4492  Sub_d(scratch, scratch, Operand(FIRST_JS_GENERATOR_OBJECT_TYPE));
 
 4494      ls, AbortReason::kOperandIsNotAGeneratorObject, scratch,
 
 4495      Operand(LAST_JS_GENERATOR_OBJECT_TYPE - FIRST_JS_GENERATOR_OBJECT_TYPE));
 
 4506  Label done_checking;
 
 4508  LoadRoot(scratch, RootIndex::kUndefinedValue);
 
 4509  Branch(&done_checking, 
eq, 
object, Operand(scratch));
 
 4511  Assert(
eq, AbortReason::kExpectedUndefinedOrCell, scratch,
 
 4512         Operand(ALLOCATION_SITE_TYPE));
 
 4513  bind(&done_checking);
 
 4519                                FPURegister src2, Label* out_of_line) {
 
 4539                                FPURegister src2, Label* out_of_line) {
 
 4559                                FPURegister src2, Label* out_of_line) {
 
 4579                                FPURegister src2, Label* out_of_line) {
 
 4599                                              int num_double_arguments) {
 
 4600  int stack_passed_words = 0;
 
 4609      stack_passed_words += num_count;
 
 4611      stack_passed_words +=
 
 4615  return stack_passed_words;
 
 4619                                          int num_double_arguments,
 
 4626  int stack_passed_arguments =
 
 4634    bstrins_d(sp, zero_reg, std::log2(frame_alignment) - 1, 0);
 
 4647                                  int num_reg_arguments,
 
 4648                                  int num_double_arguments,
 
 4650                                  Label* return_location) {
 
 4654  Register scratch = temps.Acquire();
 
 4655  li(scratch, function);
 
 4657                             set_isolate_data_slots, return_location);
 
 4661                                  int num_double_arguments,
 
 4663                                  Label* return_location) {
 
 4666                             set_isolate_data_slots, return_location);
 
 4671                                  Label* return_location) {
 
 4672  return CallCFunction(function, num_arguments, 0, set_isolate_data_slots,
 
 4678                                  Label* return_location) {
 
 4679  return CallCFunction(function, num_arguments, 0, set_isolate_data_slots,
 
 4684    Register function, 
int num_reg_arguments, 
int num_double_arguments,
 
 4696  temps.Include({t0, t1, t2, function});
 
 4697  temps.Exclude(function);
 
 4702#if V8_HOST_ARCH_LOONG64 
 4705    int frame_alignment_mask = frame_alignment - 1;
 
 4708      Label alignment_as_expected;
 
 4711        Register scratch = temps.Acquire();
 
 4712        And(scratch, sp, Operand(frame_alignment_mask));
 
 4713        Branch(&alignment_as_expected, 
eq, scratch, Operand(zero_reg));
 
 4718      bind(&alignment_as_expected);
 
 4733      Register pc_scratch = temps.Acquire();
 
 4747    if (return_location) 
bind(return_location);
 
 4755    int stack_passed_arguments =
 
 4766    return call_pc_offset;
 
 4770#undef BRANCH_ARGS_CHECK 
 4773                                   Label* condition_met) {
 
 4776  Register scratch = temps.Acquire();
 
 4779  And(scratch, scratch, Operand(
mask));
 
 4780  Branch(condition_met, 
cc, scratch, Operand(zero_reg));
 
 4784                                   Register reg4, Register reg5,
 
 4786  RegList regs = {reg1, reg2, reg3, reg4, reg5, reg6};
 
 4789  for (
int i = 0; 
i < config->num_allocatable_general_registers(); ++
i) {
 
 4790    int code = config->GetAllocatableGeneralCode(
i);
 
 4792    if (regs.has(candidate)) 
continue;
 
 4814  Register scratch = temps.Acquire();
 
 4822#ifdef V8_ENABLE_LEAPTIERING 
 4824    Label not_deoptimized;
 
 4826    Branch(¬_deoptimized, 
eq, scratch, Operand(zero_reg));
 
 4827    Abort(AbortReason::kInvalidDeoptimizedCode);
 
 4828    bind(¬_deoptimized);
 
 4843  Register scratch = temps.Acquire();
 
 4853                                              Register code_object,
 
 4856#ifdef V8_ENABLE_SANDBOX 
 4857  LoadCodeEntrypointViaCodePointer(
 
 4885                                    uint16_t argument_count) {
 
 4887#ifdef V8_ENABLE_LEAPTIERING 
 4892  Ld_w(dispatch_handle,
 
 4894  LoadEntrypointAndParameterCountFromJSDispatchTable(code, 
parameter_count,
 
 4895                                                     dispatch_handle, scratch);
 
 4899           Operand(argument_count));
 
 4909#if V8_ENABLE_LEAPTIERING 
 4911                                         uint16_t argument_count) {
 
 4922  static_assert(!JSDispatchTable::kSupportsCompaction);
 
 4923  LoadEntrypointFromJSDispatchTable(code, dispatch_handle, scratch);
 
 4934#ifdef V8_ENABLE_LEAPTIERING 
 4947#ifdef V8_ENABLE_WEBASSEMBLY 
 4949void MacroAssembler::ResolveWasmCodePointer(Register target,
 
 4950                                            uint64_t signature_hash) {
 
 4952  ExternalReference global_jump_table =
 
 4953      ExternalReference::wasm_code_pointer_table();
 
 4955  Register scratch = temps.Acquire();
 
 4956  li(scratch, global_jump_table);
 
 4957#ifdef V8_ENABLE_SANDBOX 
 4958  static_assert(
sizeof(wasm::WasmCodePointerTableEntry) == 16);
 
 4959  Alsl_d(target, target, scratch, 4);
 
 4961       MemOperand(target, wasm::WasmCodePointerTable::kOffsetOfSignatureHash));
 
 4962  bool has_second_tmp = temps.hasAvailable();
 
 4963  Register signature_hash_register = has_second_tmp ? temps.Acquire() : 
target;
 
 4964  if (!has_second_tmp) {
 
 4965    Push(signature_hash_register);
 
 4967  li(signature_hash_register, Operand(signature_hash));
 
 4968  SbxCheck(Condition::kEqual, AbortReason::kWasmSignatureMismatch, scratch,
 
 4969           Operand(signature_hash_register));
 
 4970  if (!has_second_tmp) {
 
 4971    Pop(signature_hash_register);
 
 4974  static_assert(
sizeof(wasm::WasmCodePointerTableEntry) == 8);
 
 4975  Alsl_d(target, target, scratch, 3);
 
 4981void MacroAssembler::CallWasmCodePointer(Register target,
 
 4982                                         uint64_t signature_hash,
 
 4984  ResolveWasmCodePointer(target, signature_hash);
 
 4992void MacroAssembler::CallWasmCodePointerNoSignatureCheck(Register target) {
 
 4993  ExternalReference global_jump_table =
 
 4994      ExternalReference::wasm_code_pointer_table();
 
 4996  Register scratch = temps.Acquire();
 
 4997  li(scratch, global_jump_table);
 
 4998  constexpr unsigned int kEntrySizeLog2 =
 
 4999      std::bit_width(
sizeof(wasm::WasmCodePointerTableEntry)) - 1;
 
 5000  Alsl_d(target, target, scratch, kEntrySizeLog2);
 
 5006void MacroAssembler::LoadWasmCodePointer(Register dst, 
MemOperand src) {
 
 5007  static_assert(
sizeof(WasmCodePointer) == 4);
 
 5015#ifndef V8_ENABLE_LEAPTIERING 
 5017void TailCallOptimizedCodeSlot(MacroAssembler* masm,
 
 5018                               Register optimized_code_entry) {
 
 5026  Label heal_optimized_code_slot;
 
 5031                   &heal_optimized_code_slot);
 
 5035      optimized_code_entry,
 
 5041                                              &heal_optimized_code_slot);
 
 5056  __ bind(&heal_optimized_code_slot);
 
 5063#ifdef V8_ENABLE_DEBUG_CODE 
 5067    Assert(
eq, AbortReason::kExpectedFeedbackCell, scratch,
 
 5068           Operand(FEEDBACK_CELL_TYPE));
 
 5074    Assert(
eq, AbortReason::kExpectedFeedbackVector, scratch,
 
 5075           Operand(FEEDBACK_VECTOR_TYPE));
 
 5081    Register optimized_code, Register closure) {
 
 5085#ifdef V8_ENABLE_LEAPTIERING 
 5107    FrameScope scope(
this, StackFrame::INTERNAL);
 
 5114#ifdef V8_ENABLE_LEAPTIERING 
 5116    static_assert(kJSDispatchHandleShift > 0);
 
 5127#ifdef V8_ENABLE_LEAPTIERING 
 5139#ifndef V8_ENABLE_LEAPTIERING 
 5144    Register flags, Register feedback_vector, 
CodeKind current_code_kind,
 
 5145    Label* flags_need_processing) {
 
 5150  uint32_t flag_mask =
 
 5153  And(scratch, flags, Operand(flag_mask));
 
 5154  Branch(flags_need_processing, 
ne, scratch, Operand(zero_reg));
 
 5158    Register flags, Register feedback_vector) {
 
 5161  Label maybe_has_optimized_code, maybe_needs_logging;
 
 5165    Register scratch = temps.Acquire();
 
 5168    Branch(&maybe_needs_logging, 
eq, scratch, Operand(zero_reg));
 
 5173  bind(&maybe_needs_logging);
 
 5176    Register scratch = temps.Acquire();
 
 5177    And(scratch, flags, Operand(FeedbackVector::LogNextExecutionBit::kMask));
 
 5178    Branch(&maybe_has_optimized_code, 
eq, scratch, Operand(zero_reg));
 
 5183  bind(&maybe_has_optimized_code);
 
 5187                                  FeedbackVector::kMaybeOptimizedCodeOffset));
 
 5189  TailCallOptimizedCodeSlot(
this, optimized_code_entry);
 
 5227  Register scratch = temps.Acquire();
 
 5228  Add_d(scratch, dst.base(), dst.offset());
 
 5265#if V8_ENABLE_SANDBOX 
 5268  Register scratch = temps.Acquire();
 
 5304                              Register function_address,
 
 5305                              ExternalReference thunk_ref, Register thunk_arg,
 
 5306                              int slots_to_drop_on_return,
 
 5309  using ER = ExternalReference;
 
 5311  Isolate* isolate = masm->isolate();
 
 5313      ER::handle_scope_next_address(isolate), 
no_reg);
 
 5315      ER::handle_scope_limit_address(isolate), 
no_reg);
 
 5317      ER::handle_scope_level_address(isolate), 
no_reg);
 
 5326  Register prev_next_address_reg = s0;
 
 5335                     scratch, scratch2, prev_next_address_reg, prev_limit_reg));
 
 5340                     scratch, scratch2, prev_next_address_reg, prev_limit_reg));
 
 5342                     scratch, scratch2, prev_next_address_reg, prev_limit_reg));
 
 5345                            "Allocate HandleScope in callee-save registers.");
 
 5346    __ Ld_d(prev_next_address_reg, next_mem_op);
 
 5347    __ Ld_d(prev_limit_reg, limit_mem_op);
 
 5348    __ Ld_w(prev_level_reg, level_mem_op);
 
 5349    __ Add_w(scratch, prev_level_reg, Operand(1));
 
 5350    __ St_w(scratch, level_mem_op);
 
 5353  Label profiler_or_side_effects_check_enabled, done_api_call;
 
 5354  if (with_profiling) {
 
 5355    __ RecordComment(
"Check if profiler or side effects check is enabled");
 
 5358    __ Branch(&profiler_or_side_effects_check_enabled, 
ne, scratch,
 
 5360#ifdef V8_RUNTIME_CALL_STATS 
 5362    __ li(scratch, ER::address_of_runtime_stats_flag());
 
 5364    __ Branch(&profiler_or_side_effects_check_enabled, 
ne, scratch,
 
 5373  Label propagate_exception;
 
 5374  Label delete_allocated_handles;
 
 5375  Label leave_exit_frame;
 
 5378  __ Ld_d(return_value, return_value_operand);
 
 5383        "No more valid handles (the result handle was the last one)." 
 5384        "Restore previous handle scope.");
 
 5385    __ St_d(prev_next_address_reg, next_mem_op);
 
 5387      __ Ld_w(scratch, level_mem_op);
 
 5388      __ Sub_w(scratch, scratch, Operand(1));
 
 5389      __ Check(
eq, AbortReason::kUnexpectedLevelAfterReturnFromApiCall, scratch,
 
 5390               Operand(prev_level_reg));
 
 5392    __ St_w(prev_level_reg, level_mem_op);
 
 5393    __ Ld_d(scratch, limit_mem_op);
 
 5394    __ Branch(&delete_allocated_handles, 
ne, prev_limit_reg, Operand(scratch));
 
 5398  __ bind(&leave_exit_frame);
 
 5400  Register argc_reg = prev_limit_reg;
 
 5401  if (argc_operand != 
nullptr) {
 
 5403    __ Ld_d(argc_reg, *argc_operand);
 
 5410                            "Check if the function scheduled an exception.");
 
 5411    __ LoadRoot(scratch, RootIndex::kTheHoleValue);
 
 5413                          ER::exception_address(isolate), 
no_reg));
 
 5414    __ Branch(&propagate_exception, 
ne, scratch, Operand(scratch2));
 
 5418                 AbortReason::kAPICallReturnedInvalidObject);
 
 5420  if (argc_operand == 
nullptr) {
 
 5425    if (slots_to_drop_on_return != 0) {
 
 5433  if (with_profiling) {
 
 5435    __ bind(&profiler_or_side_effects_check_enabled);
 
 5437    if (thunk_arg.is_valid()) {
 
 5439          IsolateFieldId::kApiCallbackThunkArgument);
 
 5440      __ St_d(thunk_arg, thunk_arg_mem_op);
 
 5442    __ li(scratch, thunk_ref);
 
 5448  __ bind(&propagate_exception);
 
 5453        masm, 
"HandleScope limit has changed. Delete allocated extensions.");
 
 5454    __ bind(&delete_allocated_handles);
 
 5455    __ St_d(prev_limit_reg, limit_mem_op);
 
 5457    Register saved_result = prev_limit_reg;
 
 5458    __ mov(saved_result, a0);
 
 5463    __ jmp(&leave_exit_frame);
 
#define Assert(condition)
static int ActivationFrameAlignment()
void RequestHeapNumber(HeapNumberRequest request)
EmbeddedObjectIndex AddEmbeddedObject(IndirectHandle< HeapObject > object)
int AddCodeTarget(IndirectHandle< Code > target)
V8_INLINE void RecordComment(const char *comment, const SourceLocation &loc=SourceLocation::Current())
const AssemblerOptions & options() const
void ctz_d(Register rd, Register rj)
void ftintrm_w_d(FPURegister fd, FPURegister fj)
void mod_du(Register rd, Register rj, Register rk)
void lu12i_w(Register rd, int32_t si20)
void rotri_d(Register rd, Register rj, int32_t ui6)
bool is_near(Label *L, OffsetSize bits)
void frint_d(FPURegister fd, FPURegister fj)
void ld_d(Register rd, Register rj, int32_t si12)
void clz_d(Register rd, Register rj)
void ldptr_w(Register rd, Register rj, int32_t si14)
void break_(uint32_t code, bool break_as_stop=false)
void fmax_d(FPURegister fd, FPURegister fj, FPURegister fk)
void rotri_w(Register rd, Register rj, int32_t ui5)
void masknez(Register rd, Register rj, Register rk)
void fmov_d(FPURegister fd, FPURegister fj)
void ftintrz_w_d(FPURegister fd, FPURegister fj)
void and_(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void mulh_d(Register rd, Register rj, Register rk)
void fldx_s(FPURegister fd, Register rj, Register rk)
void alsl_w(Register rd, Register rj, Register rk, int32_t sa2)
void bgeu(Register rj, Register rd, int32_t offset)
void slli_w(Register rd, Register rj, int32_t ui5)
void bstrpick_w(Register rd, Register rj, int32_t msbw, int32_t lsbw)
void bstrins_d(Register rd, Register rj, int32_t msbd, int32_t lsbd)
bool is_trampoline_emitted() const
void ldptr_d(Register rd, Register rj, int32_t si14)
void beqz(Register rj, int32_t offset)
void stx_b(Register rd, Register rj, Register rk)
void ld_hu(Register rd, Register rj, int32_t si12)
void fabs_d(FPURegister fd, FPURegister fj)
void mul_d(Register rd, Register rj, Register rk)
void mod_d(Register rd, Register rj, Register rk)
void fneg_s(FPURegister fd, FPURegister fj)
void ld_w(Register rd, Register rj, int32_t si12)
void ext_w_b(Register rd, Register rj)
void movfrh2gr_s(Register rd, FPURegister fj)
void movfr2gr_s(Register rd, FPURegister fj)
void b(int branch_offset, Condition cond=al, RelocInfo::Mode rmode=RelocInfo::NO_INFO)
void bcnez(CFRegister cj, int32_t si21)
void stx_h(Register rd, Register rj, Register rk)
void stptr_w(Register rd, Register rj, int32_t si14)
void ftintrp_l_d(FPURegister fd, FPURegister fj)
void srli_w(Register rd, Register rj, int32_t ui5)
void movfr2gr_d(Register rd, FPURegister fj)
void bne(Register rj, Register rd, int32_t offset)
void movcf2gr(Register rd, CFRegister cj)
void div_w(Register rd, Register rj, Register rk)
void alsl_d(Register rd, Register rj, Register rk, int32_t sa2)
friend class BlockTrampolinePoolScope
void fsub_s(FPURegister fd, FPURegister fj, FPURegister fk)
void ftintrp_w_d(FPURegister fd, FPURegister fj)
void j(Condition cc, Label *L, Label::Distance distance=Label::kFar)
void movgr2fr_d(FPURegister fd, Register rj)
bool is_trampoline_pool_blocked() const
void slti(Register rd, Register rj, int32_t si12)
void ldx_d(Register rd, Register rj, Register rk)
void movfcsr2gr(Register rd, FPUControlRegister fcsr=FCSR0)
void div_wu(Register rd, Register rj, Register rk)
void lu52i_d(Register rd, Register rj, int32_t si12)
void fsub_d(FPURegister fd, FPURegister fj, FPURegister fk)
int InstructionsGeneratedSince(Label *label)
void fmax_s(FPURegister fd, FPURegister fj, FPURegister fk)
void blt(Register rj, Register rd, int32_t offset)
void ld_wu(Register rd, Register rj, int32_t si12)
void bl(int branch_offset, Condition cond=al, RelocInfo::Mode rmode=RelocInfo::NO_INFO)
Simd128Register Simd128Register ra
void st_w(Register rd, Register rj, int32_t si12)
void fcmp_cond_s(FPUCondition cc, FPURegister fj, FPURegister fk, CFRegister cd)
void movgr2fr_w(FPURegister fd, Register rj)
void revb_2w(Register rd, Register rj)
void fcmp_cond_d(FPUCondition cc, FPURegister fj, FPURegister fk, CFRegister cd)
void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data=0)
void ldx_wu(Register rd, Register rj, Register rk)
void ftintrz_l_s(FPURegister fd, FPURegister fj)
void bstrpick_d(Register rd, Register rj, int32_t msbd, int32_t lsbd)
void jirl(Register rd, Register rj, int32_t offset)
void revb_d(Register rd, Register rj)
void mul_w(Register rd, Register rj, Register rk)
void xori(Register rd, Register rj, int32_t ui12)
void mulh_wu(Register rd, Register rj, Register rk)
friend class UseScratchRegisterScope
bool MustUseReg(RelocInfo::Mode rmode)
void pcaddu18i(Register rd, int32_t si20)
void set_pc_for_safepoint()
void srai_d(Register rd, Register rj, int32_t ui6)
void ld_h(Register rd, Register rj, int32_t si12)
void amswap_db_d(Register rd, Register rk, Register rj)
void add_w(Register rd, Register rj, Register rk)
void sub_w(Register rd, Register rj, Register rk)
void nor(Register rd, Register rj, Register rk)
void mod_w(Register rd, Register rj, Register rk)
void bltu(Register rj, Register rd, int32_t offset)
void ld_bu(Register rd, Register rj, int32_t si12)
void ftintrne_w_d(FPURegister fd, FPURegister fj)
void shift(Operand dst, Immediate shift_amount, int subcode, int size)
void sc(Register rd, const MemOperand &rs)
void or_(Register dst, int32_t imm32)
void fmin_d(FPURegister fd, FPURegister fj, FPURegister fk)
void sra_d(Register rd, Register rj, Register rk)
void xor_(Register dst, int32_t imm32)
void andi(Register rd, Register rj, int32_t ui12)
void st_b(Register rd, Register rj, int32_t si12)
void rotr_d(Register rd, Register rj, Register rk)
void st_h(Register rd, Register rj, int32_t si12)
void sc_d(Register rd, Register rj, int32_t si14)
void stx_d(Register rd, Register rj, Register rk)
void movgr2fcsr(Register rj, FPUControlRegister fcsr=FCSR0)
void ftintrm_l_d(FPURegister fd, FPURegister fj)
void frint_s(FPURegister fd, FPURegister fj)
void sltu(Register rd, Register rj, Register rk)
void ld_b(Register rd, Register rj, int32_t si12)
void amswap_db_w(Register rd, Register rk, Register rj)
void sc_w(Register rd, Register rj, int32_t si14)
void fstx_s(FPURegister fd, Register rj, Register rk)
void ffint_s_l(FPURegister fd, FPURegister fj)
void add_d(Register rd, Register rj, Register rk)
void addi_d(Register rd, Register rj, int32_t si12)
void andn(Register dst, Register src1, Register src2)
void fmin_s(FPURegister fd, FPURegister fj, FPURegister fk)
void fneg_d(FPURegister fd, FPURegister fj)
int32_t branch_offset_helper(Label *L, OffsetSize bits)
void ll_w(Register rd, Register rj, int32_t si14)
void fldx_d(FPURegister fd, Register rj, Register rk)
void ll_d(Register rd, Register rj, int32_t si14)
void sltui(Register rd, Register rj, int32_t si12)
void bge(Register rj, Register rd, int32_t offset)
void ctz_w(Register rd, Register rj)
void ffint_d_l(FPURegister fd, FPURegister fj)
void ori(Register rd, Register rj, int32_t ui12)
void mulh_w(Register rd, Register rj, Register rk)
void lu32i_d(Register rd, int32_t si20)
void ldx_b(Register rd, Register rj, Register rk)
void ftintrz_l_d(FPURegister fd, FPURegister fj)
void maskeqz(Register rd, Register rj, Register rk)
void addi_w(Register rd, Register rj, int32_t si12)
void rotr_w(Register rd, Register rj, Register rk)
void stptr_d(Register rd, Register rj, int32_t si14)
void AdjustBaseAndOffset(MemOperand *src)
void fadd_s(FPURegister fd, FPURegister fj, FPURegister fk)
void stx_w(Register rd, Register rj, Register rk)
void ftintrne_l_d(FPURegister fd, FPURegister fj)
void slt(Register rd, Register rj, Register rk)
void ldx_hu(Register rd, Register rj, Register rk)
void fadd_d(FPURegister fd, FPURegister fj, FPURegister fk)
void mulh_du(Register rd, Register rj, Register rk)
void ldx_bu(Register rd, Register rj, Register rk)
void bceqz(CFRegister cj, int32_t si21)
void clz_w(Register rd, Register rj)
void ext_w_h(Register rd, Register rj)
void movgr2frh_w(FPURegister fd, Register rj)
void fstx_d(FPURegister fd, Register rj, Register rk)
void slli_d(Register rd, Register rj, int32_t ui6)
void st_d(Register rd, Register rj, int32_t si12)
void ldx_h(Register rd, Register rj, Register rk)
void bnez(Register rj, int32_t offset)
void stop(Condition cond=al, int32_t code=kDefaultStopCode)
void ldx_w(Register rd, Register rj, Register rk)
int SizeOfCodeGeneratedSince(Label *label)
void mod_wu(Register rd, Register rj, Register rk)
void orn(const VRegister &vd, const VRegister &vn, const VRegister &vm)
void srli_d(Register rd, Register rj, int32_t ui6)
void beq(Register rj, Register rd, int32_t offset)
void div_d(Register rd, Register rj, Register rk)
void sub_d(Register rd, Register rj, Register rk)
void div_du(Register rd, Register rj, Register rk)
void pcaddi(Register rd, int32_t si20)
static constexpr Builtin RecordWrite(SaveFPRegsMode fp_mode)
V8_EXPORT_PRIVATE Handle< Code > code_handle(Builtin builtin)
static constexpr Builtin RuntimeCEntry(int result_size, bool switch_to_central_stack=false)
static constexpr Builtin EphemeronKeyBarrier(SaveFPRegsMode fp_mode)
static constexpr Builtin IndirectPointerBarrier(SaveFPRegsMode fp_mode)
static constexpr Builtin CEntry(int result_size, ArgvMode argv_mode, bool builtin_exit_frame=false, bool switch_to_central_stack=false)
static const int kMarkedForDeoptimizationBit
static constexpr int kCallerFPOffset
static constexpr int kCallerPCOffset
static const int kInvalidContext
static V8_INLINE constexpr int SlotOffset(int index)
static V8_EXPORT_PRIVATE const int kEagerDeoptExitSize
static V8_EXPORT_PRIVATE const int kLazyDeoptExitSize
static constexpr int kSPOffset
static constexpr int kCallerSPDisplacement
static V8_EXPORT_PRIVATE ExternalReference address_of_code_pointer_table_base_address()
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr uint32_t kFlagsTieringStateIsAnyRequested
static constexpr uint32_t FlagMaskForNeedsProcessingCheckFrom(CodeKind code_kind)
static constexpr int kHeaderSize
static constexpr int kMapOffset
static constexpr RegList ComputeSavedRegisters(Register object, Register slot_address=no_reg)
static constexpr Register IndirectPointerTagRegister()
static constexpr Register ObjectRegister()
static constexpr Register SlotAddressRegister()
static const int kExternalPointerTableBasePointerOffset
static constexpr int BuiltinEntrySlotOffset(Builtin id)
static constexpr int real_jslimit_offset()
static IsolateGroup * current()
Address BuiltinEntry(Builtin builtin)
bool root_array_available_
static bool IsAddressableThroughRootRegister(Isolate *isolate, const ExternalReference &reference)
V8_INLINE std::string CommentForOffHeapTrampoline(const char *prefix, Builtin builtin)
static int32_t RootRegisterOffsetForExternalReferenceTableEntry(Isolate *isolate, const ExternalReference &reference)
static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index)
Isolate * isolate() const
Tagged_t ReadOnlyRootPtr(RootIndex index)
bool root_array_available() const
void IndirectLoadConstant(Register destination, Handle< HeapObject > object)
static intptr_t RootRegisterOffsetForExternalReference(Isolate *isolate, const ExternalReference &reference)
bool should_abort_hard() const
void IndirectLoadExternalReference(Register destination, ExternalReference reference)
void Abort(AbortReason msg)
void Ld_b(Register rd, const MemOperand &rj)
void LoadStackLimit(Register destination, StackLimitKind kind)
void LoadReceiver(Register dest)
void Ftintrp_w_d(FPURegister fd, FPURegister fj)
void GetObjectType(Register function, Register map, Register type_reg)
void Call(Register target, Condition cond=al)
void CallJSFunction(Register function_object, uint16_t argument_count)
void CallDebugOnFunctionCall(Register fun, Register new_target, Register expected_parameter_count, Register actual_parameter_count)
void LiLower32BitHelper(Register rd, Operand j)
void Round_d(FPURegister fd, FPURegister fj)
void St_w(Register rd, const MemOperand &rj)
void AddOverflow_d(Register dst, Register left, const Operand &right, Register overflow)
void ExtractBits(Register dest, Register source, Register pos, int size, bool sign_extend=false)
void JumpIfIsInRange(Register value, Register scratch, unsigned lower_limit, unsigned higher_limit, Label *on_in_range)
void MultiPopFPU(DoubleRegList regs)
void Ftintrm_l_d(FPURegister fd, FPURegister fj)
void DecompressTaggedSigned(const Register &destination, const MemOperand &field_operand)
void Drop(int count, Condition cond=al)
int CalculateStackPassedWords(int num_reg_arguments, int num_double_arguments)
void MultiPushFPU(DoubleRegList regs)
void Round_s(FPURegister fd, FPURegister fj)
void Neg(const Register &rd, const Operand &operand)
void Floor_d(FPURegister fd, FPURegister fj)
void Fst_d(FPURegister fj, const MemOperand &dst)
void mov(Register rd, Register rj)
void SmiUntag(Register reg, SBit s=LeaveCC)
void AssertFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void Neg_s(FPURegister fd, FPURegister fj)
void AssertNotSmi(Register object, AbortReason reason=AbortReason::kOperandIsASmi) NOOP_UNLESS_DEBUG_CODE
void Float64Min(FPURegister dst, FPURegister src1, FPURegister src2, Label *out_of_line)
void LoadExternalPointerField(Register destination, MemOperand field_operand, ExternalPointerTagRange tag_range, Register isolate_root=Register::no_reg())
void AssertGeneratorObject(Register object) NOOP_UNLESS_DEBUG_CODE
void CompareIsNanF64(FPURegister cmp1, FPURegister cmp2, CFRegister cd=FCC0)
void Sc_d(Register rd, const MemOperand &rj)
void CompareF32(FPURegister cmp1, FPURegister cmp2, FPUCondition cc, CFRegister cd=FCC0)
void LoadEntryFromBuiltin(Builtin builtin, Register destination)
void TestCodeIsMarkedForDeoptimizationAndJump(Register code_data_container, Register scratch, Condition cond, Label *target)
void PushStandardFrame(Register function_reg)
void BranchFalseF(Label *target, CFRegister cc=FCC0)
void LoadZeroIfNotFPUCondition(Register dest, CFRegister=FCC0)
void Move(Register dst, Tagged< Smi > smi)
void CompareIsNanF(FPURegister cmp1, FPURegister cmp2, CFRegister cd, bool f32=true)
void SmiTst(Register value)
bool has_double_zero_reg_set_
void Assert(Condition cond, AbortReason reason) NOOP_UNLESS_DEBUG_CODE
void li_optimized(Register rd, Operand j, LiFlags mode=OPTIMIZE_SIZE)
void AtomicDecompressTaggedSigned(const Register &destination, const Register &base, const Register &index, const Register &temp)
void Clz_d(Register rd, Register rj)
void Bstrpick_w(Register rk, Register rj, uint16_t msbw, uint16_t lsbw)
void StoreReturnAddressAndCall(Register target)
void Ftintrp_l_d(FPURegister fd, FPURegister fj)
void LoadZeroIfConditionZero(Register dest, Register condition)
void Float64Max(FPURegister dst, FPURegister src1, FPURegister src2, Label *out_of_line)
void StackOverflowCheck(Register num_args, Register scratch, Label *stack_overflow)
void AssertFeedbackVector(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void CallBuiltinByIndex(Register builtin_index, Register target)
int32_t GetOffset(Label *L, OffsetSize bits)
void LoadTrustedPointerField(Register destination, MemOperand field_operand, IndirectPointerTag tag)
void MulOverflow_w(Register dst, Register left, const Operand &right, Register overflow)
void Movz(Register rd, Register rj, Register rk)
void LoadRootRelative(Register destination, int32_t offset) final
void JumpIfSmi(Register value, Label *smi_label)
void Ld_d(Register rd, const MemOperand &rj)
void Ftintrz_ul_s(FPURegister fd, FPURegister fj, FPURegister scratch, Register result=no_reg)
void JumpIfObjectType(Register object, Register map, Register type_reg, InstanceType type, Label *if_cond_pass, Condition cond=eq)
void BranchShort(Label *label, Condition cond, Register r1, const Operand &r2, bool need_link=false)
void MultiPush(RegList regs)
bool BranchShortOrFallback(Label *L, Condition cond, Register rj, const Operand &rk, bool need_link)
void CallCodeObject(Register code_object)
void LoadSandboxedPointerField(Register destination, MemOperand field_operand)
void AssertUnreachable(AbortReason reason) NOOP_UNLESS_DEBUG_CODE
void Alsl_d(Register rd, Register rj, Register rk, uint8_t sa)
int CallCFunctionHelper(Register function, int num_reg_arguments, int num_double_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_location=nullptr)
void Ll_d(Register rd, const MemOperand &rj)
void LoadRootRegisterOffset(Register destination, intptr_t offset) final
void Ld_bu(Register rd, const MemOperand &rj)
void LoadCodeInstructionStart(Register destination, Register code_object, CodeEntrypointTag tag=kDefaultCodeEntrypointTag)
void BranchFalseShortF(Label *target, CFRegister cc=FCC0)
void CompareTaggedRootAndBranch(const Register &with, RootIndex index, Condition cc, Label *target)
void Alsl_w(Register rd, Register rj, Register rk, uint8_t sa)
void AtomicDecompressTagged(const Register &destination, const Register &base, const Register &index, const Register &temp)
void Ftintrz_ul_d(FPURegister fd, FPURegister fj, FPURegister scratch, Register result=no_reg)
void LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(Register flags, Register feedback_vector, CodeKind current_code_kind, Label *flags_need_processing)
void LoadFeedbackVector(Register dst, Register closure, Register scratch, Label *fbv_undef)
void EmitIncrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void Movn(Register rd, Register rj, Register rk)
void BranchTrueF(Label *target, CFRegister cc=FCC0)
void InvokeFunctionCode(Register function, Register new_target, Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void BailoutIfDeoptimized()
void Fld_d(FPURegister fd, const MemOperand &src)
void Ll_w(Register rd, const MemOperand &rj)
void DecodeSandboxedPointer(Register value)
int RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg) const
void Move_d(FPURegister dst, FPURegister src)
void JumpIfJSAnyIsNotPrimitive(Register heap_object, Register scratch, Label *target, Label::Distance distance=Label::kFar, Condition condition=Condition::kUnsignedGreaterThanEqual)
void CompareTaggedAndBranch(const Register &lhs, const Operand &rhs, Condition cond, Label *label)
void InvokePrologue(Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void RoundFloat(FPURegister dst, FPURegister src, FPURoundingMode mode)
void Ftintrne_l_d(FPURegister fd, FPURegister fj)
void SmiTag(Register reg, SBit s=LeaveCC)
void SbxCheck(Condition cc, AbortReason reason)
void Ld_h(Register rd, const MemOperand &rj)
void Ctz_w(Register rd, Register rj)
void Ftintrz_l_ud(FPURegister fd, FPURegister fj, FPURegister scratch)
void PushArray(Register array, Register size, Register scratch, PushArrayOrder order=PushArrayOrder::kNormal)
void Ffint_s_uw(FPURegister fd, FPURegister fj)
void EnterExitFrame(Register scratch, int stack_space, StackFrame::Type frame_type)
void RecordWriteField(Register object, int offset, Register value, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, SmiCheck smi_check=SmiCheck::kInline)
void Clz_w(Register rd, Register rj)
void CompareWord(Condition cond, Register dst, Register lhs, const Operand &rhs)
void Fst_s(FPURegister fj, const MemOperand &dst)
MemOperand ExternalReferenceAsOperand(ExternalReference reference, Register scratch)
void AssertSmi(Register object, AbortReason reason=AbortReason::kOperandIsNotASmi) NOOP_UNLESS_DEBUG_CODE
void PushCommonFrame(Register marker_reg=no_reg)
void LoadIndirectPointerField(Register destination, MemOperand field_operand, IndirectPointerTag tag)
void Trunc_d(FPURegister fd, FPURegister fj)
void CallIndirectPointerBarrier(Register object, Operand offset, SaveFPRegsMode fp_mode, IndirectPointerTag tag)
void RoundDouble(FPURegister dst, FPURegister src, FPURoundingMode mode)
int LeaveFrame(StackFrame::Type type)
void li(Register rd, Operand j, LiFlags mode=OPTIMIZE_SIZE)
int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg)
void Ftintrz_w_d(FPURegister fd, FPURegister fj)
void And(Register dst, Register src1, const Operand &src2, Condition cond=al)
static int InstrCountForLi64Bit(int64_t value)
void Ffint_d_ul(FPURegister fd, FPURegister fj)
void JumpToExternalReference(const ExternalReference &builtin, bool builtin_exit_frame=false)
void Ftintrm_w_d(FPURegister fd, FPURegister fj)
void Float64MaxOutOfLine(FPURegister dst, FPURegister src1, FPURegister src2)
Operand ClearedValue() const
void St_d(Register rd, const MemOperand &rj)
void Ceil_s(FPURegister fd, FPURegister fj)
void InsertBits(Register dest, Register source, Register pos, int size)
void StoreCodePointerField(Register value, MemOperand dst_field_operand)
void Ffint_d_uw(FPURegister fd, FPURegister fj)
void ByteSwap(Register dest, Register src, int operand_size)
void MultiPop(RegList regs)
void Trunc_s(FPURegister fd, FPURegister fj)
void Sc_w(Register rd, const MemOperand &rj)
void InvokeFunctionWithNewTarget(Register function, Register new_target, Register actual_parameter_count, InvokeType type)
void FmoveLow(Register dst_low, FPURegister src)
void Jump(Register target, Condition cond=al)
void LoadRoot(Register destination, RootIndex index) final
void RecordWrite(Register object, Operand offset, Register value, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, SmiCheck smi_check=SmiCheck::kInline)
void DecompressProtected(const Register &destination, const MemOperand &field_operand)
void StoreTaggedField(const Register &value, const MemOperand &dst_field_operand)
void PrepareCEntryArgs(int num_args)
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg=false)
void Float64MinOutOfLine(FPURegister dst, FPURegister src1, FPURegister src2)
void InvokeFunction(Register function, Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void Float32MinOutOfLine(FPURegister dst, FPURegister src1, FPURegister src2)
void Ftintrz_l_d(FPURegister fd, FPURegister fj)
void LoadTaggedField(const Register &destination, const MemOperand &field_operand)
void Float32Min(FPURegister dst, FPURegister src1, FPURegister src2, Label *out_of_line)
void JumpCodeObject(Register code_object, JumpMode jump_mode=JumpMode::kJump)
void Ftintrne_w_d(FPURegister fd, FPURegister fj)
void LoadZeroIfConditionNotZero(Register dest, Register condition)
void Bstrpick_d(Register rk, Register rj, uint16_t msbw, uint16_t lsbw)
void Orn(const Register &rd, const Register &rn, const Operand &operand)
void EmitDecrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
static int ActivationFrameAlignment()
void LoadFromConstantsTable(Register destination, int constant_index) final
void LoadProtectedPointerField(Register destination, MemOperand field_operand)
MemOperand EntryFromBuiltinAsOperand(Builtin builtin)
void PrepareCEntryFunction(const ExternalReference &ref)
void ComputeCodeStartAddress(Register dst)
void MaybeSaveRegisters(RegList registers)
void CheckPageFlag(Register object, int mask, Condition cc, Label *condition_met)
void LoadTaggedRoot(Register destination, RootIndex index)
void JumpIfNotSmi(Register value, Label *not_smi_label)
void StoreIndirectPointerField(Register value, MemOperand dst_field_operand)
int CallCFunction(ExternalReference function, int num_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_label=nullptr)
void JumpJSFunction(Register function_object, JumpMode jump_mode=JumpMode::kJump)
Register GetRkAsRegisterHelper(const Operand &rk, Register scratch)
void Neg_d(FPURegister fd, FPURegister fk)
void MoveObjectAndSlot(Register dst_object, Register dst_slot, Register object, Operand offset)
void AssertConstructor(Register object) NOOP_UNLESS_DEBUG_CODE
void LoadCompressedMap(Register dst, Register object)
void CallRuntime(const Runtime::Function *f, int num_arguments)
void LoadWeakValue(Register out, Register in, Label *target_if_cleared)
void CallBuiltin(Builtin builtin, Condition cond=al)
void Ceil_d(FPURegister fd, FPURegister fj)
void FPUCanonicalizeNaN(const DoubleRegister dst, const DoubleRegister src)
void GenerateTailCallToReturnedCode(Runtime::FunctionId function_id)
void TruncateDoubleToI(Isolate *isolate, Zone *zone, Register result, DwVfpRegister double_input, StubCallMode stub_mode)
void LoadCodePointerField(Register destination, MemOperand field_operand)
void AssertJSAny(Register object, Register map_tmp, Register tmp, AbortReason abort_reason) NOOP_UNLESS_DEBUG_CODE
void Xor(Register dst, Register src)
void CallEphemeronKeyBarrier(Register object, Operand offset, SaveFPRegsMode fp_mode)
void Float32MaxOutOfLine(FPURegister dst, FPURegister src1, FPURegister src2)
void Float32Max(FPURegister dst, FPURegister src1, FPURegister src2, Label *out_of_line)
void Check(Condition cond, AbortReason reason)
void AssertFeedbackCell(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void Or(Register dst, Register src)
void CallForDeoptimization(Builtin target, int deopt_id, Label *exit, DeoptimizeKind kind, Label *ret, Label *jump_deoptimization_entry_label)
void Ffint_s_ul(FPURegister fd, FPURegister fj)
void StoreTrustedPointerField(Register value, MemOperand dst_field_operand)
static int64_t CalculateTargetOffset(Address target, RelocInfo::Mode rmode, uint8_t *pc)
void DropArgumentsAndPushNewReceiver(Register argc, Register receiver)
void StoreSandboxedPointerField(Register value, MemOperand dst_field_operand)
void LoadEntryFromBuiltinIndex(Register builtin_index, Register target)
void MulOverflow_d(Register dst, Register left, const Operand &right, Register overflow)
static bool IsNearCallOffset(int64_t offset)
void LoadLabelRelative(Register dst, Label *target)
void Ctz_d(Register rd, Register rj)
void ReplaceClosureCodeWithOptimizedCode(Register optimized_code, Register closure)
void CompareRootAndBranch(const Register &obj, RootIndex index, Condition cc, Label *target, ComparisonMode mode=ComparisonMode::kDefault)
void Popcnt_d(Register rd, Register rj)
void AssertBoundFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void CallRecordWriteStubSaveRegisters(Register object, Operand offset, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void Branch(Label *label, bool need_link=false)
void GetInstanceTypeRange(Register map, Register type_reg, InstanceType lower_limit, Register range)
void Ld_hu(Register rd, const MemOperand &rj)
void OptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, Register feedback_vector)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers=0, Register scratch=no_reg)
void LoadIsolateField(Register dst, IsolateFieldId id)
void Ftintrz_uw_s(FPURegister fd, FPURegister fs, FPURegister scratch)
void LoadZeroIfFPUCondition(Register dest, CFRegister=FCC0)
void TryInlineTruncateDoubleToI(Register result, DwVfpRegister input, Label *done)
void CompareIsNanF32(FPURegister cmp1, FPURegister cmp2, CFRegister cd=FCC0)
void Move_s(FPURegister dst, FPURegister src)
void BranchTrueShortF(Label *target, CFRegister cc=FCC0)
void MaybeRestoreRegisters(RegList registers)
void CallRecordWriteStub(Register object, Register slot_address, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void Popcnt_w(Register rd, Register rj)
void Fld_s(FPURegister fd, const MemOperand &src)
void St_b(Register rd, const MemOperand &rj)
int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg)
void AssertUndefinedOrAllocationSite(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void SmiUntagField(Register dst, const MemOperand &src)
void StubPrologue(StackFrame::Type type)
void St_h(Register rd, const MemOperand &rj)
void DecompressTagged(const Register &destination, const MemOperand &field_operand)
void StoreRootRelative(int32_t offset, Register value) final
void LoadTaggedSignedField(const Register &destination, const MemOperand &field_operand)
void LoadMap(Register destination, Register object)
void AtomicStoreTaggedField(const Register &value, const Register &dst_base, const Register &dst_index, const Register &temp)
void CompareF64(FPURegister cmp1, FPURegister cmp2, FPUCondition cc, CFRegister cd=FCC0)
void AssertStackIsAligned() NOOP_UNLESS_DEBUG_CODE
void TailCallRuntime(Runtime::FunctionId fid)
void SubOverflow_d(Register dst, Register left, const Operand &right, Register overflow)
void Swap(Register srcdst0, Register srcdst1)
void Floor_s(FPURegister fd, FPURegister fj)
void LoadNativeContextSlot(Register dst, int index)
void TailCallBuiltin(Builtin builtin, Condition cond=al)
void CompareF(FPURegister cmp1, FPURegister cmp2, FPUCondition cc, CFRegister cd, bool f32=true)
void Ftintrz_uw_d(FPURegister fd, FPURegister fj, FPURegister scratch)
void Ld_wu(Register rd, const MemOperand &rj)
void DropArguments(Register count)
void Ld_w(Register rd, const MemOperand &rj)
void AssertCallableFunction(Register object) NOOP_UNLESS_DEBUG_CODE
static constexpr MainThreadFlags kPointersToHereAreInterestingMask
static constexpr intptr_t FlagsOffset()
static constexpr MainThreadFlags kPointersFromHereAreInterestingMask
static constexpr intptr_t GetAlignmentMaskForAssembler()
static constexpr FPURegister from_code(int8_t code)
static const RegisterConfiguration * Default()
static constexpr Register from_code(int code)
static constexpr bool IsWasmCanonicalSigId(Mode mode)
static constexpr bool IsCompressedEmbeddedObject(Mode mode)
static constexpr bool IsCodeTarget(Mode mode)
static constexpr bool IsJSDispatchHandle(Mode mode)
static constexpr bool IsWasmCodePointerTableEntry(Mode mode)
static constexpr bool IsFullEmbeddedObject(Mode mode)
static constexpr bool IsNoInfo(Mode mode)
static constexpr bool IsReadOnly(RootIndex root_index)
static constexpr bool IsImmortalImmovable(RootIndex root_index)
static V8_EXPORT_PRIVATE const Function * FunctionForId(FunctionId id)
static SlotDescriptor ForCodePointerSlot()
static constexpr Tagged< Smi > FromInt(int value)
static constexpr Tagged< Smi > zero()
static constexpr int32_t TypeToMarker(Type type)
static bool IsJavaScript(Type t)
static const int kNextOffset
static constexpr int kContextOffset
static constexpr int OffsetOfElementAt(int index)
static constexpr int kFixedFrameSizeFromFp
static constexpr Register ObjectRegister()
static constexpr RegList ComputeSavedRegisters(Register object, Register slot_address=no_reg)
static constexpr Register SlotAddressRegister()
static constexpr Builtin GetRecordWriteBuiltin(SaveFPRegsMode fp_mode)
#define ASM_CODE_COMMENT_STRING(asm,...)
#define ASM_CODE_COMMENT(asm)
#define V8_ENABLE_LEAPTIERING_BOOL
#define COMPRESS_POINTERS_BOOL
#define V8_ENABLE_SANDBOX_BOOL
DirectHandle< Object > new_target
ZoneVector< RpoNumber > & result
#define SmiWordOffset(offset)
#define BRANCH_ARGS_CHECK(cond, rs, rt)
RegListBase< RegisterT > registers
InstructionOperand destination
constexpr unsigned CountTrailingZeros32(uint32_t value)
constexpr bool IsPowerOfTwo(T value)
constexpr unsigned CountLeadingZeros32(uint32_t value)
V8_INLINE Dest bit_cast(Source const &source)
constexpr bool IsInRange(T value, U lower_limit, U higher_limit)
constexpr Tagged_t kNonJsReceiverMapLimit
V8_INLINE constexpr std::optional< RootIndex > UniqueMapOfInstanceType(InstanceType type)
constexpr Register no_reg
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
constexpr Register kRootRegister
constexpr int kCodePointerTableEntrySizeLog2
constexpr uint64_t kExternalPointerTagShift
RegListBase< DoubleRegister > DoubleRegList
constexpr int kTaggedSize
@ kUnsignedGreaterThanEqual
constexpr int kFPRegisterPassedArguments
DwVfpRegister DoubleRegister
static V8_INLINE constexpr bool IsSharedExternalPointerType(ExternalPointerTagRange tag_range)
constexpr DoubleRegister kScratchDoubleReg
constexpr uint64_t kExternalPointerPayloadMask
@ kUnknownIndirectPointerTag
RegListBase< Register > RegList
constexpr ExternalPointerTagRange kAnyExternalPointerTagRange(kFirstExternalPointerTag, kLastExternalPointerTag)
constexpr bool CodeKindCanTierUp(CodeKind kind)
constexpr Register kJavaScriptCallTargetRegister
constexpr int kCodePointerTableEntryCodeObjectOffset
static int InstrCountForLiLower32Bit(int64_t value)
constexpr int kTrustedPointerTableEntrySizeLog2
const Address kWeakHeapObjectMask
constexpr Register kJavaScriptCallArgCountRegister
constexpr int kSystemPointerSizeLog2
base::StrongAlias< JSDispatchHandleAliasTag, uint32_t > JSDispatchHandle
TagRange< ExternalPointerTag > ExternalPointerTagRange
constexpr Register kScratchReg
static const int kRegisterPassedArguments
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
const char * GetAbortReason(AbortReason reason)
static constexpr int kMaxCParameters
constexpr uint32_t kDebugZapValue
constexpr bool SmiValuesAre31Bits()
Condition NegateCondition(Condition cond)
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
const DoubleRegList kCallerSavedFPU
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
constexpr uint32_t kTrustedPointerHandleShift
constexpr uint32_t kCodePointerHandleShift
Tagged< ClearedWeakValue > ClearedValue(PtrComprCageBase cage_base)
constexpr LowDwVfpRegister kDoubleRegZero
V8_EXPORT_PRIVATE FlagValues v8_flags
const RegList kJSCallerSaved
Register ToRegister(int num)
constexpr bool SmiValuesAre32Bits()
constexpr Register kJavaScriptCallCodeStartRegister
constexpr int kJSDispatchTableEntrySizeLog2
constexpr Register kPtrComprCageBaseRegister
const intptr_t kSmiTagMask
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
constexpr uint8_t kInstrSize
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
constexpr uint64_t kTrustedPointerTableMarkBit
constexpr Register kCArgRegs[]
constexpr int kDoubleSize
constexpr Register kJavaScriptCallDispatchHandleRegister
constexpr uint32_t kCodePointerHandleMarker
const uint32_t kClearedWeakHeapObjectLower32
@ kFirstStrongOrReadOnlyRoot
@ kLastStrongOrReadOnlyRoot
constexpr Register kJavaScriptCallNewTargetRegister
constexpr uint64_t kExternalPointerShiftedTagMask
static V8_INLINE constexpr bool ExternalPointerCanBeEmpty(ExternalPointerTagRange tag_range)
constexpr int kNumRegisters
static bool IsZero(const Operand &rt)
#define DCHECK_LE(v1, v2)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
constexpr bool IsAligned(T value, U alignment)
#define V8_STATIC_ROOTS_BOOL