41#if V8_TARGET_ARCH_S390X
43#if V8_HOST_ARCH_S390X && !V8_OS_ZOS
63static bool supportsCPUFeature(
const char* feature) {
69 assert(strcmp(feature,
"vx") == 0);
70 return __is_vxf_available();
72 static std::set<std::string>& features = *
new std::set<std::string>();
73 static std::set<std::string>& all_available_features =
74 *
new std::set<std::string>({
"iesan3",
"zarch",
"stfle",
"msa",
"ldisp",
75 "eimm",
"dfp",
"etf3eh",
"highgprs",
"te",
77 if (features.empty()) {
81#define HWCAP_S390_VX 2048
83#define CHECK_AVAILABILITY_FOR(mask, value) \
84 if (f & mask) features.insert(value);
87 uint64_t f = getauxval(AT_HWCAP);
88 CHECK_AVAILABILITY_FOR(HWCAP_S390_ESAN3,
"iesan3")
89 CHECK_AVAILABILITY_FOR(HWCAP_S390_ZARCH, "zarch")
90 CHECK_AVAILABILITY_FOR(HWCAP_S390_STFLE, "stfle")
91 CHECK_AVAILABILITY_FOR(HWCAP_S390_MSA, "msa")
92 CHECK_AVAILABILITY_FOR(HWCAP_S390_LDISP, "ldisp")
93 CHECK_AVAILABILITY_FOR(HWCAP_S390_EIMM, "eimm")
94 CHECK_AVAILABILITY_FOR(HWCAP_S390_DFP, "dfp")
95 CHECK_AVAILABILITY_FOR(HWCAP_S390_ETF3EH, "etf3eh")
96 CHECK_AVAILABILITY_FOR(HWCAP_S390_HIGH_GPRS, "highgprs")
97 CHECK_AVAILABILITY_FOR(HWCAP_S390_TE, "te")
98 CHECK_AVAILABILITY_FOR(HWCAP_S390_VX, "vx")
101 features.insert(all_available_features.begin(),
102 all_available_features.end());
105 USE(all_available_features);
106 return features.find(feature) != features.end();
110#undef CHECK_AVAILABILITY_FOR
115static bool supportsSTFLE() {
117 return __is_stfle_available();
118#elif V8_HOST_ARCH_S390X
119 static bool read_tried =
false;
120 static uint32_t auxv_hwcap = 0;
124 int fd = open(
"/proc/self/auxv", O_RDONLY);
128 static Elf64_auxv_t buffer[16];
129 Elf64_auxv_t* auxv_element;
131 while (bytes_read >= 0) {
133 bytes_read = read(fd, buffer,
sizeof(buffer));
135 for (auxv_element = buffer;
136 auxv_element +
sizeof(auxv_element) <= buffer + bytes_read &&
137 auxv_element->a_type != AT_NULL;
140 if (auxv_element->a_type == AT_HWCAP) {
142 auxv_hwcap = auxv_element->a_un.a_val;
153 if (0 == auxv_hwcap) {
159 const uint32_t _HWCAP_S390_STFLE = 4;
160 return (auxv_hwcap & _HWCAP_S390_STFLE);
168#if V8_ENABLE_WEBASSEMBLY
180 if (cross_compile)
return;
186 static bool performSTFLE = supportsSTFLE();
190#if V8_HOST_ARCH_S390X
199 int64_t facilities[3] = {0
L};
202 asm volatile(
" stfle %0" :
"=m"(facilities), __ZL_NR(
"+", r0)(reg0)::
"cc");
210 ".insn s,0xb2b00000,%0\n"
211 :
"=Q"(facilities),
"=r"(reg0)
216 uint64_t
one =
static_cast<uint64_t
>(1);
218 if (facilities[0] & (
one << (63 - 45))) {
222 if (facilities[0] & (
one << (63 - 34))) {
226 if (facilities[0] & (
one << (63 - 37))) {
230 if (facilities[2] & (
one << (63 - (129 - 128))) &&
231 supportsCPUFeature(
"vx")) {
235 if (facilities[2] & (
one << (63 - (135 - 128))) &&
236 supportsCPUFeature(
"vx")) {
237 supported_ |= (1u << VECTOR_ENHANCE_FACILITY_1);
240 if (facilities[2] & (
one << (63 - (148 - 128))) &&
241 supportsCPUFeature(
"vx")) {
242 supported_ |= (1u << VECTOR_ENHANCE_FACILITY_2);
245 if (facilities[0] & (1lu << (63 - 58))) {
257 USE(supportsCPUFeature);
259 supported_ |= (1u << VECTOR_ENHANCE_FACILITY_1);
260 supported_ |= (1u << VECTOR_ENHANCE_FACILITY_2);
272 const char* s390_arch =
"s390x";
273 PrintF(
"target %s\n", s390_arch);
282 PrintF(
"VECTOR_ENHANCE_FACILITY_1=%d\n",
284 PrintF(
"VECTOR_ENHANCE_FACILITY_2=%d\n",
291 const Register kRegisters[] = {
r0, r1, r2, r3, r4, r5, r6, r7,
292 r8, r9, r10, fp, ip, r13, r14,
sp};
293 return kRegisters[num];
315 return static_cast<uint32_t
>(
334 result.is_heap_number_request_ =
true;
335 result.value_.heap_number_request = HeapNumberRequest(value);
339MemOperand::MemOperand(Register rn, int32_t
offset)
342MemOperand::MemOperand(Register rx, Register rb, int32_t
offset)
345void Assembler::AllocateAndInstallRequestedHeapNumbers(LocalIsolate* isolate) {
346 DCHECK_IMPLIES(isolate ==
nullptr, heap_number_requests_.empty());
347 for (
auto& request : heap_number_requests_) {
349 Handle<HeapObject>
object =
350 isolate->factory()->NewHeapNumber<AllocationType::kOld>(
351 request.heap_number());
352 set_target_address_at(
pc, kNullAddress,
object.address(),
nullptr,
360Assembler::Assembler(
const AssemblerOptions& options,
361 std::unique_ptr<AssemblerBuffer> buffer)
362 : AssemblerBase(options,
std::move(buffer)),
363 scratch_register_list_(DefaultTmpList()),
364 scratch_double_register_list_(DefaultFPTmpList()) {
365 reloc_info_writer.Reposition(buffer_start_ +
buffer_->size(),
pc_);
367 relocations_.reserve(128);
370void Assembler::GetCode(Isolate* isolate, CodeDesc* desc) {
371 GetCode(isolate->main_thread_local_isolate(), desc);
373void Assembler::GetCode(LocalIsolate* isolate, CodeDesc* desc,
374 SafepointTableBuilderBase* safepoint_table_builder,
375 int handler_table_offset) {
383 DataAlign(InstructionStream::kMetadataAlignment);
387 int code_comments_size = WriteCodeComments();
389 AllocateAndInstallRequestedHeapNumbers(isolate);
395 static constexpr int kConstantPoolSize = 0;
396 static constexpr int kBuiltinJumpTableInfoSize = 0;
397 const int instruction_size =
pc_offset();
398 const int builtin_jump_table_info_offset =
399 instruction_size - kBuiltinJumpTableInfoSize;
400 const int code_comments_offset =
401 builtin_jump_table_info_offset - code_comments_size;
402 const int constant_pool_offset = code_comments_offset - kConstantPoolSize;
403 const int handler_table_offset2 = (handler_table_offset == kNoHandlerTable)
404 ? constant_pool_offset
405 : handler_table_offset;
406 const int safepoint_table_offset =
407 (safepoint_table_builder == kNoSafepointTable)
408 ? handler_table_offset2
409 : safepoint_table_builder->safepoint_table_offset();
410 const int reloc_info_offset =
411 static_cast<int>(reloc_info_writer.pos() -
buffer_->start());
412 CodeDesc::Initialize(desc,
this, safepoint_table_offset,
413 handler_table_offset2, constant_pool_offset,
414 code_comments_offset, builtin_jump_table_info_offset,
418void Assembler::Align(
int m) {
419 DCHECK(
m >= 4 && base::bits::IsPowerOfTwo(
m));
425void Assembler::CodeTargetAlign() { Align(8); }
428 switch (
instr & kCondMask) {
439bool Assembler::Is64BitLoadIntoIP(SixByteInstr instr1, SixByteInstr instr2) {
441 return (((instr1 >> 32) == 0xC0C8) && ((instr2 >> 32) == 0xC0C9));
460int Assembler::target_at(
int pos) {
463 Opcode opcode = Instruction::S390OpcodeValue(buffer_start_ +
pos);
465 if (BRC == opcode || BRCT == opcode || BRCTG == opcode || BRXH == opcode) {
470 }
else if (LLILF == opcode || BRCL == opcode || LARL == opcode ||
471 BRASL == opcode || LGRL == opcode) {
473 static_cast<int32_t>(
instr & (
static_cast<uint64_t
>(0xFFFFFFFF)));
478 }
else if (BRXHG == opcode) {
493void Assembler::target_at_put(
int pos,
int target_pos,
bool* is_branch) {
495 Opcode opcode = Instruction::S390OpcodeValue(buffer_start_ +
pos);
497 if (is_branch !=
nullptr) {
499 (opcode == BRC || opcode == BRCT || opcode == BRCTG || opcode == BRCL ||
500 opcode == BRASL || opcode == BRXH || opcode == BRXHG);
503 if (BRC == opcode || BRCT == opcode || BRCTG == opcode || BRXH == opcode) {
507 instr_at_put<FourByteInstr>(
pos,
instr | (imm16 >> 1));
509 }
else if (BRCL == opcode || LARL == opcode || BRASL == opcode ||
513 instr &= (~static_cast<uint64_t>(0xFFFFFFFF));
514 instr_at_put<SixByteInstr>(
pos,
instr | (imm32 >> 1));
516 }
else if (LLILF == opcode) {
517 DCHECK(target_pos == kEndOfChain || target_pos >= 0);
523 instr &= (~static_cast<uint64_t>(0xFFFFFFFF));
524 instr_at_put<SixByteInstr>(
pos,
instr | imm32);
526 }
else if (BRXHG == opcode) {
529 instr &= (0xFFFF0000FFFF);
533 instr_at_put<SixByteInstr>(
pos,
instr | (imm16 >> 1));
540int Assembler::max_reach_from(
int pos) {
541 Opcode opcode = Instruction::S390OpcodeValue(buffer_start_ +
pos);
544 if (BRC == opcode || BRCT == opcode || BRCTG == opcode || BRXH == opcode ||
547 }
else if (LLILF == opcode || BRCL == opcode || LARL == opcode ||
548 BRASL == opcode || LGRL == opcode) {
558void Assembler::bind_to(Label* L,
int pos) {
560 bool is_branch =
false;
561 while (
L->is_linked()) {
562 int fixup_pos =
L->pos();
565 int maxReach = max_reach_from(fixup_pos);
569 target_at_put(fixup_pos,
pos, &is_branch);
575 if (
pos > last_bound_pos_) last_bound_pos_ =
pos;
578void Assembler::bind(Label* L) {
583void Assembler::next(Label* L) {
585 int link = target_at(
L->pos());
586 if (link == kEndOfChain) {
594int Assembler::link(Label* L) {
599 if (
L->is_linked()) {
614void Assembler::load_label_offset(Register r1, Label* L) {
618 target_pos =
L->pos();
619 constant = target_pos + (InstructionStream::kHeaderSize -
kHeapObjectTag);
621 if (
L->is_linked()) {
622 target_pos =
L->pos();
634 llilf(r1, Operand(constant));
638void Assembler::branchOnCond(Condition c,
int branch_offset,
bool is_bound,
639 bool force_long_branch) {
640 int offset_in_halfwords = branch_offset / 2;
641 if (is_bound && is_int16(offset_in_halfwords) && !force_long_branch) {
642 brc(c, Operand(offset_in_halfwords));
644 brcl(c, Operand(offset_in_halfwords));
651void Assembler::stop(Condition cond, int32_t code, CRegister cr) {
662void Assembler::bkpt(uint32_t imm16) {
668void Assembler::nop(
int type) {
673 case DEBUG_BREAK_NOP:
675 oill(r3, Operand::Zero());
678 case BASR_CALL_TYPE_NOP:
681 case BRAS_CALL_TYPE_NOP:
684 case BRASL_CALL_TYPE_NOP:
697void Assembler::larl(Register r1, Label* l) {
698 larl(r1, Operand(branch_offset(l)));
701void Assembler::lgrl(Register r1, Label* l) {
702 lgrl(r1, Operand(branch_offset(l)));
705void Assembler::EnsureSpaceFor(
int space_needed) {
706 if (buffer_space() <= (kGap + space_needed)) {
707 GrowBuffer(space_needed);
711void Assembler::call(Handle<Code> target, RelocInfo::Mode rmode) {
712 DCHECK(RelocInfo::IsCodeTarget(rmode));
713 EnsureSpace ensure_space(
this);
715 RecordRelocInfo(rmode);
716 int32_t target_index = AddCodeTarget(target);
717 brasl(r14, Operand(target_index));
720void Assembler::jump(Handle<Code> target, RelocInfo::Mode rmode,
722 DCHECK(RelocInfo::IsRelativeCodeTarget(rmode));
723 EnsureSpace ensure_space(
this);
725 RecordRelocInfo(rmode);
726 int32_t target_index = AddCodeTarget(target);
727 brcl(cond, Operand(target_index));
732bool Assembler::IsNop(SixByteInstr
instr,
int type) {
733 DCHECK((0 == type) || (DEBUG_BREAK_NOP == type));
734 if (DEBUG_BREAK_NOP == type) {
735 return ((
instr & 0xFFFFFFFF) == 0xA53B0000);
737 return ((
instr & 0xFFFF) == 0x1800);
741void Assembler::dumy(
int r1,
int x2,
int b2,
int d2) {
742#if defined(USE_SIMULATOR)
744 uint64_t code = (
static_cast<uint64_t
>(op & 0xFF00)) *
B32 |
745 (
static_cast<uint64_t
>(r1) & 0xF) *
B36 |
746 (
static_cast<uint64_t
>(x2) & 0xF) *
B32 |
747 (
static_cast<uint64_t
>(b2) & 0xF) *
B28 |
748 (
static_cast<uint64_t
>(d2 & 0x0FFF)) * B16 |
749 (
static_cast<uint64_t
>(d2 & 0x0FF000)) >> 4 |
750 (
static_cast<uint64_t
>(op & 0x00FF));
755void Assembler::GrowBuffer(
int needed) {
759 int old_size =
buffer_->size();
760 int new_size = std::min(2 * old_size, old_size + 1 * MB);
761 int space = buffer_space() + (new_size - old_size);
762 new_size += (space < needed) ? needed - space : 0;
766 if (new_size > kMaximalBufferSize) {
767 V8::FatalProcessOutOfMemory(
nullptr,
"Assembler::GrowBuffer");
771 std::unique_ptr<AssemblerBuffer> new_buffer =
buffer_->Grow(new_size);
773 uint8_t* new_start = new_buffer->start();
776 intptr_t pc_delta = new_start - buffer_start_;
777 intptr_t rc_delta = (new_start + new_size) - (buffer_start_ + old_size);
778 size_t reloc_size = (buffer_start_ + old_size) - reloc_info_writer.pos();
780 MemMove(reloc_info_writer.pos() + rc_delta, reloc_info_writer.pos(),
784 buffer_ = std::move(new_buffer);
785 buffer_start_ = new_start;
787 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
788 reloc_info_writer.last_pc() + pc_delta);
795void Assembler::db(uint8_t data) {
797 *
reinterpret_cast<uint8_t*
>(
pc_) = data;
798 pc_ +=
sizeof(uint8_t);
801void Assembler::dh(uint16_t data) {
807void Assembler::dd(uint32_t data) {
809 *
reinterpret_cast<uint32_t*
>(
pc_) = data;
810 pc_ +=
sizeof(uint32_t);
813void Assembler::dq(uint64_t value) {
815 *
reinterpret_cast<uint64_t*
>(
pc_) = value;
816 pc_ +=
sizeof(uint64_t);
819void Assembler::dp(uintptr_t data) {
821 *
reinterpret_cast<uintptr_t*
>(
pc_) = data;
822 pc_ +=
sizeof(uintptr_t);
825void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
826 if (!ShouldRecordRelocInfo(rmode))
return;
827 DeferredRelocInfo rinfo(
pc_offset(), rmode, data);
828 relocations_.push_back(rinfo);
831void Assembler::emit_label_addr(Label*
label) {
833 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
840void Assembler::EmitRelocations() {
841 EnsureSpaceFor(relocations_.size() * kMaxRelocSize);
843 for (std::vector<DeferredRelocInfo>::iterator it = relocations_.begin();
844 it != relocations_.end(); it++) {
845 RelocInfo::Mode rmode = it->rmode();
847 RelocInfo rinfo(
pc, rmode, it->data());
850 if (RelocInfo::IsInternalReference(rmode)) {
853 Memory<Address>(
pc) =
reinterpret_cast<Address>(buffer_start_) +
pos;
854 }
else if (RelocInfo::IsInternalReferenceEncoded(rmode)) {
857 set_target_address_at(
pc, 0,
858 reinterpret_cast<Address>(buffer_start_) +
pos,
859 nullptr, SKIP_ICACHE_FLUSH);
862 reloc_info_writer.Write(&rinfo);
866RegList Assembler::DefaultTmpList() {
return {r1, ip}; }
static V8_INLINE Address target_address_at(Address pc, Address constant_pool)
static bool IsSupported(CpuFeature f)
static bool supports_wasm_simd_128_
static bool SupportsWasmSimd128()
static unsigned supported_
static unsigned icache_line_size_
static void PrintFeatures()
static void PrintTarget()
static void ProbeImpl(bool cross_compile)
static Operand EmbeddedNumber(double number)
union v8::internal::Operand::Value value_
V8_INLINE Operand(int32_t immediate, RelocInfo::Mode rmode=RelocInfo::NO_INFO)
static const int kApplyMask
uint32_t wasm_call_tag() const
static constexpr int ModeMask(Mode mode)
static constexpr Tagged< Smi > FromInt(int value)
base::OwnedVector< uint8_t > buffer_
#define SIGN_EXT_IMM16(imm)
ZoneVector< RpoNumber > & result
uint32_t WasmInterpreterRuntime int64_t r0
constexpr Register no_reg
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
RegListBase< DoubleRegister > DoubleRegList
PerThreadAssertScopeDebugOnly< true, HANDLE_DEREFERENCE_ASSERT > AllowHandleDereference
bool DoubleToSmiInteger(double value, int *smi_int_value)
void PrintF(const char *format,...)
constexpr DoubleRegister kScratchDoubleReg
RegListBase< Register > RegList
V8_EXPORT_PRIVATE void MemMove(void *dest, const void *src, size_t size)
Condition NegateCondition(Condition cond)
constexpr bool is_intn(int64_t x, unsigned n)
constexpr LowDwVfpRegister kDoubleRegZero
Register ToRegister(int num)
static unsigned CpuFeaturesImpliedByCompiler()
constexpr int kNumRegisters
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)