v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
assembler-ia32.cc
Go to the documentation of this file.
1// Copyright (c) 1994-2006 Sun Microsystems Inc.
2// All Rights Reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions
6// are met:
7//
8// - Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10//
11// - Redistribution in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the
14// distribution.
15//
16// - Neither the name of Sun Microsystems or the names of contributors may
17// be used to endorse or promote products derived from this software without
18// specific prior written permission.
19//
20// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31// OF THE POSSIBILITY OF SUCH DAMAGE.
32
33// The original source code covered by the above license above has been modified
34// significantly by Google Inc.
35// Copyright 2012 the V8 project authors. All rights reserved.
36
38
39#include <cstring>
40
41#if V8_TARGET_ARCH_IA32
42
43#if V8_LIBC_MSVCRT
44#include <intrin.h> // _xgetbv()
45#endif
46#if V8_OS_DARWIN
47#include <sys/sysctl.h>
48#endif
49
50#include "src/base/bits.h"
51#include "src/base/cpu.h"
56#include "src/init/v8.h"
58
59namespace v8 {
60namespace internal {
61
62Immediate Immediate::EmbeddedNumber(double value) {
63 int32_t smi;
64 if (DoubleToSmiInteger(value, &smi)) return Immediate(Smi::FromInt(smi));
66 result.is_heap_number_request_ = true;
67 result.value_.heap_number_request = HeapNumberRequest(value);
68 return result;
69}
70
71// -----------------------------------------------------------------------------
72// Implementation of CpuFeatures
73
74namespace {
75
76V8_INLINE uint64_t xgetbv(unsigned int xcr) {
77#if V8_LIBC_MSVCRT
78 return _xgetbv(xcr);
79#else
80 unsigned eax, edx;
81 // Check xgetbv; this uses a .byte sequence instead of the instruction
82 // directly because older assemblers do not include support for xgetbv and
83 // there is no easy way to conditionally compile based on the assembler
84 // used.
85 __asm__ volatile(".byte 0x0F, 0x01, 0xD0" : "=a"(eax), "=d"(edx) : "c"(xcr));
86 return static_cast<uint64_t>(eax) | (static_cast<uint64_t>(edx) << 32);
87#endif
88}
89
90bool OSHasAVXSupport() {
91#if V8_OS_DARWIN
92 // Mac OS X up to 10.9 has a bug where AVX transitions were indeed being
93 // caused by ISRs, so we detect that here and disable AVX in that case.
94 char buffer[128];
95 size_t buffer_size = arraysize(buffer);
96 int ctl_name[] = {CTL_KERN, KERN_OSRELEASE};
97 if (sysctl(ctl_name, 2, buffer, &buffer_size, nullptr, 0) != 0) {
98 FATAL("V8 failed to get kernel version");
99 }
100 // The buffer now contains a string of the form XX.YY.ZZ, where
101 // XX is the major kernel version component.
102 char* period_pos = strchr(buffer, '.');
103 DCHECK_NOT_NULL(period_pos);
104 *period_pos = '\0';
105 long kernel_version_major = strtol(buffer, nullptr, 10); // NOLINT
106 if (kernel_version_major <= 13) return false;
107#endif // V8_OS_DARWIN
108 // Check whether OS claims to support AVX.
109 uint64_t feature_mask = xgetbv(0); // XCR_XFEATURE_ENABLED_MASK
110 return (feature_mask & 0x6) == 0x6;
111}
112
113#undef _XCR_XFEATURE_ENABLED_MASK
114
115} // namespace
116
118#if V8_ENABLE_WEBASSEMBLY
119 if (IsSupported(SSE4_1)) return true;
120 if (v8_flags.wasm_simd_ssse3_codegen && IsSupported(SSSE3)) return true;
121#endif // V8_ENABLE_WEBASSEMBLY
122 return false;
123}
124
125void CpuFeatures::ProbeImpl(bool cross_compile) {
126 base::CPU cpu;
127 CHECK(cpu.has_sse2()); // SSE2 support is mandatory.
128 CHECK(cpu.has_cmov()); // CMOV support is mandatory.
129
130 // Only use statically determined features for cross compile (snapshot).
131 if (cross_compile) return;
132
133 if (cpu.has_sse42()) SetSupported(SSE4_2);
134 if (cpu.has_sse41()) SetSupported(SSE4_1);
135 if (cpu.has_ssse3()) SetSupported(SSSE3);
136 if (cpu.has_sse3()) SetSupported(SSE3);
137 if (cpu.has_avx() && cpu.has_osxsave() && OSHasAVXSupport()) {
138 SetSupported(AVX);
139 if (cpu.has_avx2()) SetSupported(AVX2);
140 if (cpu.has_fma3()) SetSupported(FMA3);
141 }
142
143 if (cpu.has_bmi1() && v8_flags.enable_bmi1) SetSupported(BMI1);
144 if (cpu.has_bmi2() && v8_flags.enable_bmi2) SetSupported(BMI2);
145 if (cpu.has_lzcnt() && v8_flags.enable_lzcnt) SetSupported(LZCNT);
146 if (cpu.has_popcnt() && v8_flags.enable_popcnt) SetSupported(POPCNT);
147 if (strcmp(v8_flags.mcpu, "auto") == 0) {
148 if (cpu.is_atom()) SetSupported(INTEL_ATOM);
149 } else if (strcmp(v8_flags.mcpu, "atom") == 0) {
150 SetSupported(INTEL_ATOM);
151 }
152
153 // Ensure that supported cpu features make sense. E.g. it is wrong to support
154 // AVX but not SSE4_2, if we have --enable-avx and --no-enable-sse4-2, the
155 // code above would set AVX to supported, and SSE4_2 to unsupported, then the
156 // checks below will set AVX to unsupported.
157 if (!v8_flags.enable_sse3) SetUnsupported(SSE3);
158 if (!v8_flags.enable_ssse3 || !IsSupported(SSE3)) SetUnsupported(SSSE3);
159 if (!v8_flags.enable_sse4_1 || !IsSupported(SSSE3)) SetUnsupported(SSE4_1);
160 if (!v8_flags.enable_sse4_2 || !IsSupported(SSE4_1)) SetUnsupported(SSE4_2);
161 if (!v8_flags.enable_avx || !IsSupported(SSE4_2)) SetUnsupported(AVX);
162 if (!v8_flags.enable_avx2 || !IsSupported(AVX)) SetUnsupported(AVX2);
163 if (!v8_flags.enable_fma3 || !IsSupported(AVX)) SetUnsupported(FMA3);
164
165 // Set a static value on whether Simd is supported.
166 // This variable is only used for certain archs to query SupportWasmSimd128()
167 // at runtime in builtins using an extern ref. Other callers should use
168 // CpuFeatures::SupportWasmSimd128().
170}
171
174 printf(
175 "SSE3=%d SSSE3=%d SSE4_1=%d AVX=%d AVX2=%d FMA3=%d BMI1=%d BMI2=%d "
176 "LZCNT=%d "
177 "POPCNT=%d ATOM=%d\n",
183 CpuFeatures::IsSupported(INTEL_ATOM));
184}
185
186// -----------------------------------------------------------------------------
187// Implementation of Displacement
188
189void Displacement::init(Label* L, Type type) {
190 DCHECK(!L->is_bound());
191 int next = 0;
192 if (L->is_linked()) {
193 next = L->pos();
194 DCHECK_GT(next, 0); // Displacements must be at positions > 0
195 }
196 // Ensure that we _never_ overflow the next field.
199}
200
201// -----------------------------------------------------------------------------
202// Implementation of RelocInfo
203
204const int RelocInfo::kApplyMask =
209
211 // The deserializer needs to know whether a pointer is specially coded. Being
212 // specially coded on IA32 means that it is a relative address, as used by
213 // branch instructions. These are also the ones that need changing when a
214 // code object moves.
216}
217
218bool RelocInfo::IsInConstantPool() { return false; }
219
220uint32_t RelocInfo::wasm_call_tag() const {
222 return ReadUnalignedValue<uint32_t>(pc_);
223}
224
225// -----------------------------------------------------------------------------
226// Implementation of Operand
227
228Operand::Operand(Register base, int32_t disp, RelocInfo::Mode rmode) {
229 // [base + disp/r]
230 if (disp == 0 && RelocInfo::IsNoInfo(rmode) && base != ebp) {
231 // [base]
232 set_modrm(0, base);
233 if (base == esp) set_sib(times_1, esp, base);
234 } else if (is_int8(disp) && RelocInfo::IsNoInfo(rmode)) {
235 // [base + disp8]
236 set_modrm(1, base);
237 if (base == esp) set_sib(times_1, esp, base);
238 set_disp8(disp);
239 } else {
240 // [base + disp/r]
241 set_modrm(2, base);
242 if (base == esp) set_sib(times_1, esp, base);
243 set_dispr(disp, rmode);
244 }
245}
246
247Operand::Operand(Register base, Register index, ScaleFactor scale, int32_t disp,
248 RelocInfo::Mode rmode) {
249 DCHECK(index != esp); // illegal addressing mode
250 // [base + index*scale + disp/r]
251 if (disp == 0 && RelocInfo::IsNoInfo(rmode) && base != ebp) {
252 // [base + index*scale]
253 set_modrm(0, esp);
254 set_sib(scale, index, base);
255 } else if (is_int8(disp) && RelocInfo::IsNoInfo(rmode)) {
256 // [base + index*scale + disp8]
257 set_modrm(1, esp);
258 set_sib(scale, index, base);
259 set_disp8(disp);
260 } else {
261 // [base + index*scale + disp/r]
262 set_modrm(2, esp);
263 set_sib(scale, index, base);
264 set_dispr(disp, rmode);
265 }
266}
267
268Operand::Operand(Register index, ScaleFactor scale, int32_t disp,
269 RelocInfo::Mode rmode) {
270 DCHECK(index != esp); // illegal addressing mode
271 // [index*scale + disp/r]
272 set_modrm(0, esp);
273 set_sib(scale, index, ebp);
274 set_dispr(disp, rmode);
275}
276
277bool Operand::is_reg_only() const {
278 return (buf_[0] & 0xF8) == 0xC0; // Addressing mode is register only.
279}
280
281Register Operand::reg() const {
283 return Register::from_code(buf_[0] & 0x07);
284}
285
286bool operator!=(Operand op, XMMRegister r) { return !op.is_reg(r); }
287
288void Assembler::AllocateAndInstallRequestedHeapNumbers(LocalIsolate* isolate) {
289 DCHECK_IMPLIES(isolate == nullptr, heap_number_requests_.empty());
290 for (auto& request : heap_number_requests_) {
291 Handle<HeapObject> object =
292 isolate->factory()->NewHeapNumber<AllocationType::kOld>(
293 request.heap_number());
294 Address pc = reinterpret_cast<Address>(buffer_start_) + request.offset();
295 WriteUnalignedValue(pc, object);
296 }
297}
298
299// -----------------------------------------------------------------------------
300// Implementation of Assembler.
301
302// Emit a single byte. Must always be inlined.
303#define EMIT(x) *pc_++ = (x)
304
305Assembler::Assembler(const AssemblerOptions& options,
306 std::unique_ptr<AssemblerBuffer> buffer)
307 : AssemblerBase(options, std::move(buffer)) {
308 reloc_info_writer.Reposition(buffer_start_ + buffer_->size(), pc_);
309 if (CpuFeatures::IsSupported(SSE4_2)) {
310 EnableCpuFeature(SSE4_1);
311 }
312 if (CpuFeatures::IsSupported(SSE4_1)) {
313 EnableCpuFeature(SSSE3);
314 }
315 if (CpuFeatures::IsSupported(SSSE3)) {
316 EnableCpuFeature(SSE3);
317 }
318}
319
320void Assembler::GetCode(Isolate* isolate, CodeDesc* desc) {
321 GetCode(isolate->main_thread_local_isolate(), desc);
322}
323void Assembler::GetCode(LocalIsolate* isolate, CodeDesc* desc,
324 SafepointTableBuilder* safepoint_table_builder,
325 int handler_table_offset) {
326 // As a crutch to avoid having to add manual Align calls wherever we use a
327 // raw workflow to create InstructionStream objects (mostly in tests), add
328 // another Align call here. It does no harm - the end of the InstructionStream
329 // object is aligned to the (larger) kCodeAlignment anyways.
330 // TODO(jgruber): Consider moving responsibility for proper alignment to
331 // metadata table builders (safepoint, handler, constant pool, code
332 // comments).
333 DataAlign(InstructionStream::kMetadataAlignment);
334
335 const int code_comments_size = WriteCodeComments();
336
337 // Finalize code (at this point overflow() may be true, but the gap ensures
338 // that we are still not overlapping instructions and relocation info).
339 DCHECK(pc_ <= reloc_info_writer.pos()); // No overlap.
340
341 AllocateAndInstallRequestedHeapNumbers(isolate);
342
343 // Set up code descriptor.
344 // TODO(jgruber): Reconsider how these offsets and sizes are maintained up to
345 // this point to make CodeDesc initialization less fiddly.
346
347 static constexpr int kConstantPoolSize = 0;
348 static constexpr int kBuiltinJumpTableInfoSize = 0;
349 const int instruction_size = pc_offset();
350 const int builtin_jump_table_info_offset =
351 instruction_size - kBuiltinJumpTableInfoSize;
352 const int code_comments_offset =
353 builtin_jump_table_info_offset - code_comments_size;
354 const int constant_pool_offset = code_comments_offset - kConstantPoolSize;
355 const int handler_table_offset2 = (handler_table_offset == kNoHandlerTable)
356 ? constant_pool_offset
357 : handler_table_offset;
358 const int safepoint_table_offset =
359 (safepoint_table_builder == kNoSafepointTable)
360 ? handler_table_offset2
361 : safepoint_table_builder->safepoint_table_offset();
362 const int reloc_info_offset =
363 static_cast<int>(reloc_info_writer.pos() - buffer_->start());
364 CodeDesc::Initialize(desc, this, safepoint_table_offset,
365 handler_table_offset2, constant_pool_offset,
366 code_comments_offset, builtin_jump_table_info_offset,
367 reloc_info_offset);
368}
369
370void Assembler::FinalizeJumpOptimizationInfo() {
371 // Collection stage
372 auto jump_opt = jump_optimization_info();
373 if (jump_opt && jump_opt->is_collecting()) {
374 auto& dict = jump_opt->may_optimizable_farjmp;
375 int num = static_cast<int>(jump_opt->farjmps.size());
376 if (num && dict.empty()) {
377 bool can_opt = false;
378 for (int i = 0; i < num; i++) {
379 auto jmp_info = jump_opt->farjmps[i];
380 int disp = long_at(jmp_info.pos + jmp_info.opcode_size);
381 if (is_int8(disp)) {
382 jmp_info.distance = disp;
383 dict[i] = jmp_info;
384 can_opt = true;
385 }
386 }
387 if (can_opt) {
388 jump_opt->set_optimizable();
389 }
390 }
391 }
392}
393
394void Assembler::Align(int m) {
395 DCHECK(base::bits::IsPowerOfTwo(m));
396 int mask = m - 1;
397 int addr = pc_offset();
398 Nop((m - (addr & mask)) & mask);
399}
400
401bool Assembler::IsNop(Address addr) {
402 uint8_t* a = reinterpret_cast<uint8_t*>(addr);
403 while (*a == 0x66) a++;
404 if (*a == 0x90) return true;
405 if (a[0] == 0xF && a[1] == 0x1F) return true;
406 return false;
407}
408
409void Assembler::Nop(int bytes) {
410 EnsureSpace ensure_space(this);
411 // Multi byte nops from http://support.amd.com/us/Processor_TechDocs/40546.pdf
412 while (bytes > 0) {
413 switch (bytes) {
414 case 2:
415 EMIT(0x66);
416 [[fallthrough]];
417 case 1:
418 EMIT(0x90);
419 return;
420 case 3:
421 EMIT(0xF);
422 EMIT(0x1F);
423 EMIT(0);
424 return;
425 case 4:
426 EMIT(0xF);
427 EMIT(0x1F);
428 EMIT(0x40);
429 EMIT(0);
430 return;
431 case 6:
432 EMIT(0x66);
433 [[fallthrough]];
434 case 5:
435 EMIT(0xF);
436 EMIT(0x1F);
437 EMIT(0x44);
438 EMIT(0);
439 EMIT(0);
440 return;
441 case 7:
442 EMIT(0xF);
443 EMIT(0x1F);
444 EMIT(0x80);
445 EMIT(0);
446 EMIT(0);
447 EMIT(0);
448 EMIT(0);
449 return;
450 default:
451 case 11:
452 EMIT(0x66);
453 bytes--;
454 [[fallthrough]];
455 case 10:
456 EMIT(0x66);
457 bytes--;
458 [[fallthrough]];
459 case 9:
460 EMIT(0x66);
461 bytes--;
462 [[fallthrough]];
463 case 8:
464 EMIT(0xF);
465 EMIT(0x1F);
466 EMIT(0x84);
467 EMIT(0);
468 EMIT(0);
469 EMIT(0);
470 EMIT(0);
471 EMIT(0);
472 bytes -= 8;
473 }
474 }
475}
476
477void Assembler::CodeTargetAlign() {
478 Align(16); // Preferred alignment of jump targets on ia32.
479 auto jump_opt = jump_optimization_info();
480 if (jump_opt && jump_opt->is_collecting()) {
481 jump_opt->align_pos_size[pc_offset()] = 16;
482 }
483}
484
485void Assembler::cpuid() {
486 EnsureSpace ensure_space(this);
487 EMIT(0x0F);
488 EMIT(0xA2);
489}
490
491void Assembler::pushad() {
492 EnsureSpace ensure_space(this);
493 EMIT(0x60);
494}
495
496void Assembler::popad() {
497 EnsureSpace ensure_space(this);
498 EMIT(0x61);
499}
500
501void Assembler::pushfd() {
502 EnsureSpace ensure_space(this);
503 EMIT(0x9C);
504}
505
506void Assembler::popfd() {
507 EnsureSpace ensure_space(this);
508 EMIT(0x9D);
509}
510
511void Assembler::push(const Immediate& x) {
512 EnsureSpace ensure_space(this);
513 if (x.is_int8()) {
514 EMIT(0x6A);
515 EMIT(x.immediate());
516 } else {
517 EMIT(0x68);
518 emit(x);
519 }
520}
521
522void Assembler::push_imm32(int32_t imm32) {
523 EnsureSpace ensure_space(this);
524 EMIT(0x68);
525 emit(imm32);
526}
527
528void Assembler::push(Register src) {
529 EnsureSpace ensure_space(this);
530 EMIT(0x50 | src.code());
531}
532
533void Assembler::push(Operand src) {
534 EnsureSpace ensure_space(this);
535 EMIT(0xFF);
536 emit_operand(esi, src);
537}
538
539void Assembler::pop(Register dst) {
540 DCHECK_NOT_NULL(reloc_info_writer.last_pc());
541 EnsureSpace ensure_space(this);
542 EMIT(0x58 | dst.code());
543}
544
545void Assembler::pop(Operand dst) {
546 EnsureSpace ensure_space(this);
547 EMIT(0x8F);
548 emit_operand(eax, dst);
549}
550
551void Assembler::leave() {
552 EnsureSpace ensure_space(this);
553 EMIT(0xC9);
554}
555
556void Assembler::mov_b(Register dst, Operand src) {
557 CHECK(dst.is_byte_register());
558 EnsureSpace ensure_space(this);
559 EMIT(0x8A);
560 emit_operand(dst, src);
561}
562
563void Assembler::mov_b(Operand dst, const Immediate& src) {
564 EnsureSpace ensure_space(this);
565 EMIT(0xC6);
566 emit_operand(eax, dst);
567 EMIT(static_cast<int8_t>(src.immediate()));
568}
569
570void Assembler::mov_b(Operand dst, Register src) {
571 CHECK(src.is_byte_register());
572 EnsureSpace ensure_space(this);
573 EMIT(0x88);
574 emit_operand(src, dst);
575}
576
577void Assembler::mov_w(Register dst, Operand src) {
578 EnsureSpace ensure_space(this);
579 EMIT(0x66);
580 EMIT(0x8B);
581 emit_operand(dst, src);
582}
583
584void Assembler::mov_w(Operand dst, Register src) {
585 EnsureSpace ensure_space(this);
586 EMIT(0x66);
587 EMIT(0x89);
588 emit_operand(src, dst);
589}
590
591void Assembler::mov_w(Operand dst, const Immediate& src) {
592 EnsureSpace ensure_space(this);
593 EMIT(0x66);
594 EMIT(0xC7);
595 emit_operand(eax, dst);
596 EMIT(static_cast<int8_t>(src.immediate() & 0xFF));
597 EMIT(static_cast<int8_t>(src.immediate() >> 8));
598}
599
600void Assembler::mov(Register dst, int32_t imm32) {
601 EnsureSpace ensure_space(this);
602 EMIT(0xB8 | dst.code());
603 emit(imm32);
604}
605
606void Assembler::mov(Register dst, const Immediate& x) {
607 EnsureSpace ensure_space(this);
608 EMIT(0xB8 | dst.code());
609 emit(x);
610}
611
612void Assembler::mov(Register dst, Handle<HeapObject> handle) {
613 EnsureSpace ensure_space(this);
614 EMIT(0xB8 | dst.code());
615 emit(handle);
616}
617
618void Assembler::mov(Register dst, Operand src) {
619 EnsureSpace ensure_space(this);
620 EMIT(0x8B);
621 emit_operand(dst, src);
622}
623
624void Assembler::mov(Register dst, Register src) {
625 EnsureSpace ensure_space(this);
626 EMIT(0x89);
627 EMIT(0xC0 | src.code() << 3 | dst.code());
628}
629
630void Assembler::mov(Operand dst, const Immediate& x) {
631 EnsureSpace ensure_space(this);
632 EMIT(0xC7);
633 emit_operand(eax, dst);
634 emit(x);
635}
636
637void Assembler::mov(Operand dst, Address src, RelocInfo::Mode rmode) {
638 EnsureSpace ensure_space(this);
639 EMIT(0xC7);
640 emit_operand(eax, dst);
641 emit(src, rmode);
642}
643
644void Assembler::mov(Operand dst, Handle<HeapObject> handle) {
645 EnsureSpace ensure_space(this);
646 EMIT(0xC7);
647 emit_operand(eax, dst);
648 emit(handle);
649}
650
651void Assembler::mov(Operand dst, Register src) {
652 EnsureSpace ensure_space(this);
653 EMIT(0x89);
654 emit_operand(src, dst);
655}
656
657void Assembler::movsx_b(Register dst, Operand src) {
658 DCHECK_IMPLIES(src.is_reg_only(), src.reg().is_byte_register());
659 EnsureSpace ensure_space(this);
660 EMIT(0x0F);
661 EMIT(0xBE);
662 emit_operand(dst, src);
663}
664
665void Assembler::movsx_w(Register dst, Operand src) {
666 EnsureSpace ensure_space(this);
667 EMIT(0x0F);
668 EMIT(0xBF);
669 emit_operand(dst, src);
670}
671
672void Assembler::movzx_b(Register dst, Operand src) {
673 DCHECK_IMPLIES(src.is_reg_only(), src.reg().is_byte_register());
674 EnsureSpace ensure_space(this);
675 EMIT(0x0F);
676 EMIT(0xB6);
677 emit_operand(dst, src);
678}
679
680void Assembler::movzx_w(Register dst, Operand src) {
681 EnsureSpace ensure_space(this);
682 EMIT(0x0F);
683 EMIT(0xB7);
684 emit_operand(dst, src);
685}
686
687void Assembler::movq(XMMRegister dst, Operand src) {
688 EnsureSpace ensure_space(this);
689 EMIT(0xF3);
690 EMIT(0x0F);
691 EMIT(0x7E);
692 emit_operand(dst, src);
693}
694
695void Assembler::movq(Operand dst, XMMRegister src) {
696 EnsureSpace ensure_space(this);
697 EMIT(0x66);
698 EMIT(0x0F);
699 EMIT(0xD6);
700 emit_operand(src, dst);
701}
702
703void Assembler::cmov(Condition cc, Register dst, Operand src) {
704 EnsureSpace ensure_space(this);
705 // Opcode: 0f 40 + cc /r.
706 EMIT(0x0F);
707 EMIT(0x40 + cc);
708 emit_operand(dst, src);
709}
710
711void Assembler::cld() {
712 EnsureSpace ensure_space(this);
713 EMIT(0xFC);
714}
715
716void Assembler::rep_movs() {
717 EnsureSpace ensure_space(this);
718 EMIT(0xF3);
719 EMIT(0xA5);
720}
721
722void Assembler::rep_stos() {
723 EnsureSpace ensure_space(this);
724 EMIT(0xF3);
725 EMIT(0xAB);
726}
727
728void Assembler::stos() {
729 EnsureSpace ensure_space(this);
730 EMIT(0xAB);
731}
732
733void Assembler::xadd(Operand dst, Register src) {
734 EnsureSpace ensure_space(this);
735 EMIT(0x0F);
736 EMIT(0xC1);
737 emit_operand(src, dst);
738}
739
740void Assembler::xadd_b(Operand dst, Register src) {
741 DCHECK(src.is_byte_register());
742 EnsureSpace ensure_space(this);
743 EMIT(0x0F);
744 EMIT(0xC0);
745 emit_operand(src, dst);
746}
747
748void Assembler::xadd_w(Operand dst, Register src) {
749 EnsureSpace ensure_space(this);
750 EMIT(0x66);
751 EMIT(0x0F);
752 EMIT(0xC1);
753 emit_operand(src, dst);
754}
755
756void Assembler::xchg(Register dst, Register src) {
757 EnsureSpace ensure_space(this);
758 if (src == eax || dst == eax) { // Single-byte encoding.
759 EMIT(0x90 | (src == eax ? dst.code() : src.code()));
760 } else {
761 EMIT(0x87);
762 EMIT(0xC0 | src.code() << 3 | dst.code());
763 }
764}
765
766void Assembler::xchg(Register dst, Operand src) {
767 EnsureSpace ensure_space(this);
768 EMIT(0x87);
769 emit_operand(dst, src);
770}
771
772void Assembler::xchg_b(Register reg, Operand op) {
773 DCHECK(reg.is_byte_register());
774 EnsureSpace ensure_space(this);
775 EMIT(0x86);
776 emit_operand(reg, op);
777}
778
779void Assembler::xchg_w(Register reg, Operand op) {
780 EnsureSpace ensure_space(this);
781 EMIT(0x66);
782 EMIT(0x87);
783 emit_operand(reg, op);
784}
785
786void Assembler::lock() {
787 EnsureSpace ensure_space(this);
788 EMIT(0xF0);
789}
790
791void Assembler::cmpxchg(Operand dst, Register src) {
792 EnsureSpace ensure_space(this);
793 EMIT(0x0F);
794 EMIT(0xB1);
795 emit_operand(src, dst);
796}
797
798void Assembler::cmpxchg_b(Operand dst, Register src) {
799 DCHECK(src.is_byte_register());
800 EnsureSpace ensure_space(this);
801 EMIT(0x0F);
802 EMIT(0xB0);
803 emit_operand(src, dst);
804}
805
806void Assembler::cmpxchg_w(Operand dst, Register src) {
807 EnsureSpace ensure_space(this);
808 EMIT(0x66);
809 EMIT(0x0F);
810 EMIT(0xB1);
811 emit_operand(src, dst);
812}
813
814void Assembler::cmpxchg8b(Operand dst) {
815 EnsureSpace enure_space(this);
816 EMIT(0x0F);
817 EMIT(0xC7);
818 emit_operand(ecx, dst);
819}
820
821void Assembler::mfence() {
822 EnsureSpace ensure_space(this);
823 EMIT(0x0F);
824 EMIT(0xAE);
825 EMIT(0xF0);
826}
827
828void Assembler::lfence() {
829 EnsureSpace ensure_space(this);
830 EMIT(0x0F);
831 EMIT(0xAE);
832 EMIT(0xE8);
833}
834
835void Assembler::pause() {
836 EnsureSpace ensure_space(this);
837 EMIT(0xF3);
838 EMIT(0x90);
839}
840
841void Assembler::adc(Register dst, int32_t imm32) {
842 EnsureSpace ensure_space(this);
843 emit_arith(2, Operand(dst), Immediate(imm32));
844}
845
846void Assembler::adc(Register dst, Operand src) {
847 EnsureSpace ensure_space(this);
848 EMIT(0x13);
849 emit_operand(dst, src);
850}
851
852void Assembler::add(Register dst, Operand src) {
853 EnsureSpace ensure_space(this);
854 EMIT(0x03);
855 emit_operand(dst, src);
856}
857
858void Assembler::add(Operand dst, Register src) {
859 EnsureSpace ensure_space(this);
860 EMIT(0x01);
861 emit_operand(src, dst);
862}
863
864void Assembler::add(Operand dst, const Immediate& x) {
865 DCHECK_NOT_NULL(reloc_info_writer.last_pc());
866 EnsureSpace ensure_space(this);
867 emit_arith(0, dst, x);
868}
869
870void Assembler::and_(Register dst, int32_t imm32) {
871 and_(dst, Immediate(imm32));
872}
873
874void Assembler::and_(Register dst, const Immediate& x) {
875 EnsureSpace ensure_space(this);
876 emit_arith(4, Operand(dst), x);
877}
878
879void Assembler::and_(Register dst, Operand src) {
880 EnsureSpace ensure_space(this);
881 EMIT(0x23);
882 emit_operand(dst, src);
883}
884
885void Assembler::and_(Operand dst, const Immediate& x) {
886 EnsureSpace ensure_space(this);
887 emit_arith(4, dst, x);
888}
889
890void Assembler::and_(Operand dst, Register src) {
891 EnsureSpace ensure_space(this);
892 EMIT(0x21);
893 emit_operand(src, dst);
894}
895
896void Assembler::cmpb(Operand op, Immediate imm8) {
897 DCHECK(imm8.is_int8() || imm8.is_uint8());
898 EnsureSpace ensure_space(this);
899 if (op.is_reg(eax)) {
900 EMIT(0x3C);
901 } else {
902 EMIT(0x80);
903 emit_operand(edi, op); // edi == 7
904 }
905 emit_b(imm8);
906}
907
908void Assembler::cmpb(Operand op, Register reg) {
909 CHECK(reg.is_byte_register());
910 EnsureSpace ensure_space(this);
911 EMIT(0x38);
912 emit_operand(reg, op);
913}
914
915void Assembler::cmpb(Register reg, Operand op) {
916 CHECK(reg.is_byte_register());
917 EnsureSpace ensure_space(this);
918 EMIT(0x3A);
919 emit_operand(reg, op);
920}
921
922void Assembler::cmpw(Operand op, Immediate imm16) {
923 DCHECK(imm16.is_int16() || imm16.is_uint16());
924 EnsureSpace ensure_space(this);
925 EMIT(0x66);
926 EMIT(0x81);
927 emit_operand(edi, op);
928 emit_w(imm16);
929}
930
931void Assembler::cmpw(Register reg, Operand op) {
932 EnsureSpace ensure_space(this);
933 EMIT(0x66);
934 EMIT(0x3B);
935 emit_operand(reg, op);
936}
937
938void Assembler::cmpw(Operand op, Register reg) {
939 EnsureSpace ensure_space(this);
940 EMIT(0x66);
941 EMIT(0x39);
942 emit_operand(reg, op);
943}
944
945void Assembler::cmp(Register reg, int32_t imm32) {
946 EnsureSpace ensure_space(this);
947 emit_arith(7, Operand(reg), Immediate(imm32));
948}
949
950void Assembler::cmp(Register reg, Handle<HeapObject> handle) {
951 EnsureSpace ensure_space(this);
952 emit_arith(7, Operand(reg), Immediate(handle));
953}
954
955void Assembler::cmp(Register reg, Operand op) {
956 EnsureSpace ensure_space(this);
957 EMIT(0x3B);
958 emit_operand(reg, op);
959}
960
961void Assembler::cmp(Operand op, Register reg) {
962 EnsureSpace ensure_space(this);
963 EMIT(0x39);
964 emit_operand(reg, op);
965}
966
967void Assembler::cmp(Operand op, const Immediate& imm) {
968 EnsureSpace ensure_space(this);
969 emit_arith(7, op, imm);
970}
971
972void Assembler::cmp(Operand op, Handle<HeapObject> handle) {
973 EnsureSpace ensure_space(this);
974 emit_arith(7, op, Immediate(handle));
975}
976
977void Assembler::cmpb_al(Operand op) {
978 EnsureSpace ensure_space(this);
979 EMIT(0x38); // CMP r/m8, r8
980 emit_operand(eax, op); // eax has same code as register al.
981}
982
983void Assembler::cmpw_ax(Operand op) {
984 EnsureSpace ensure_space(this);
985 EMIT(0x66);
986 EMIT(0x39); // CMP r/m16, r16
987 emit_operand(eax, op); // eax has same code as register ax.
988}
989
990void Assembler::dec_b(Register dst) {
991 CHECK(dst.is_byte_register());
992 EnsureSpace ensure_space(this);
993 EMIT(0xFE);
994 EMIT(0xC8 | dst.code());
995}
996
997void Assembler::dec_b(Operand dst) {
998 EnsureSpace ensure_space(this);
999 EMIT(0xFE);
1000 emit_operand(ecx, dst);
1001}
1002
1003void Assembler::dec(Register dst) {
1004 EnsureSpace ensure_space(this);
1005 EMIT(0x48 | dst.code());
1006}
1007
1008void Assembler::dec(Operand dst) {
1009 EnsureSpace ensure_space(this);
1010 EMIT(0xFF);
1011 emit_operand(ecx, dst);
1012}
1013
1014void Assembler::cdq() {
1015 EnsureSpace ensure_space(this);
1016 EMIT(0x99);
1017}
1018
1019void Assembler::idiv(Operand src) {
1020 EnsureSpace ensure_space(this);
1021 EMIT(0xF7);
1022 emit_operand(edi, src);
1023}
1024
1025void Assembler::div(Operand src) {
1026 EnsureSpace ensure_space(this);
1027 EMIT(0xF7);
1028 emit_operand(esi, src);
1029}
1030
1031void Assembler::imul(Register reg) {
1032 EnsureSpace ensure_space(this);
1033 EMIT(0xF7);
1034 EMIT(0xE8 | reg.code());
1035}
1036
1037void Assembler::imul(Register dst, Operand src) {
1038 EnsureSpace ensure_space(this);
1039 EMIT(0x0F);
1040 EMIT(0xAF);
1041 emit_operand(dst, src);
1042}
1043
1044void Assembler::imul(Register dst, Register src, int32_t imm32) {
1045 imul(dst, Operand(src), imm32);
1046}
1047
1048void Assembler::imul(Register dst, Operand src, int32_t imm32) {
1049 EnsureSpace ensure_space(this);
1050 if (is_int8(imm32)) {
1051 EMIT(0x6B);
1052 emit_operand(dst, src);
1053 EMIT(imm32);
1054 } else {
1055 EMIT(0x69);
1056 emit_operand(dst, src);
1057 emit(imm32);
1058 }
1059}
1060
1061void Assembler::inc(Register dst) {
1062 EnsureSpace ensure_space(this);
1063 EMIT(0x40 | dst.code());
1064}
1065
1066void Assembler::inc(Operand dst) {
1067 EnsureSpace ensure_space(this);
1068 EMIT(0xFF);
1069 emit_operand(eax, dst);
1070}
1071
1072void Assembler::lea(Register dst, Operand src) {
1073 EnsureSpace ensure_space(this);
1074 EMIT(0x8D);
1075 emit_operand(dst, src);
1076}
1077
1078void Assembler::lea(Register dst, Register src, Label* lbl) {
1079 EnsureSpace ensure_space(this);
1080 EMIT(0x8D);
1081
1082 // ModRM byte for dst,[src]+disp32.
1083 EMIT(((0x2) << 6) | (dst.code() << 3) | src.code());
1084
1085 if (lbl->is_bound()) {
1086 int offs = lbl->pos() - (pc_offset() + sizeof(int32_t));
1087 DCHECK_LE(offs, 0);
1088 emit(offs);
1089 } else {
1090 emit_disp(lbl, Displacement::OTHER);
1091 }
1092}
1093
1094void Assembler::mul(Register src) {
1095 EnsureSpace ensure_space(this);
1096 EMIT(0xF7);
1097 EMIT(0xE0 | src.code());
1098}
1099
1100void Assembler::neg(Register dst) {
1101 EnsureSpace ensure_space(this);
1102 EMIT(0xF7);
1103 EMIT(0xD8 | dst.code());
1104}
1105
1106void Assembler::neg(Operand dst) {
1107 EnsureSpace ensure_space(this);
1108 EMIT(0xF7);
1109 emit_operand(ebx, dst);
1110}
1111
1112void Assembler::not_(Register dst) {
1113 EnsureSpace ensure_space(this);
1114 EMIT(0xF7);
1115 EMIT(0xD0 | dst.code());
1116}
1117
1118void Assembler::not_(Operand dst) {
1119 EnsureSpace ensure_space(this);
1120 EMIT(0xF7);
1121 emit_operand(edx, dst);
1122}
1123
1124void Assembler::or_(Register dst, int32_t imm32) {
1125 EnsureSpace ensure_space(this);
1126 emit_arith(1, Operand(dst), Immediate(imm32));
1127}
1128
1129void Assembler::or_(Register dst, Operand src) {
1130 EnsureSpace ensure_space(this);
1131 EMIT(0x0B);
1132 emit_operand(dst, src);
1133}
1134
1135void Assembler::or_(Operand dst, const Immediate& x) {
1136 EnsureSpace ensure_space(this);
1137 emit_arith(1, dst, x);
1138}
1139
1140void Assembler::or_(Operand dst, Register src) {
1141 EnsureSpace ensure_space(this);
1142 EMIT(0x09);
1143 emit_operand(src, dst);
1144}
1145
1146void Assembler::rcl(Register dst, uint8_t imm8) {
1147 EnsureSpace ensure_space(this);
1148 DCHECK(is_uint5(imm8)); // illegal shift count
1149 if (imm8 == 1) {
1150 EMIT(0xD1);
1151 EMIT(0xD0 | dst.code());
1152 } else {
1153 EMIT(0xC1);
1154 EMIT(0xD0 | dst.code());
1155 EMIT(imm8);
1156 }
1157}
1158
1159void Assembler::rcr(Register dst, uint8_t imm8) {
1160 EnsureSpace ensure_space(this);
1161 DCHECK(is_uint5(imm8)); // illegal shift count
1162 if (imm8 == 1) {
1163 EMIT(0xD1);
1164 EMIT(0xD8 | dst.code());
1165 } else {
1166 EMIT(0xC1);
1167 EMIT(0xD8 | dst.code());
1168 EMIT(imm8);
1169 }
1170}
1171
1172void Assembler::rol(Operand dst, uint8_t imm8) {
1173 EnsureSpace ensure_space(this);
1174 DCHECK(is_uint5(imm8)); // illegal shift count
1175 if (imm8 == 1) {
1176 EMIT(0xD1);
1177 emit_operand(eax, dst);
1178 } else {
1179 EMIT(0xC1);
1180 emit_operand(eax, dst);
1181 EMIT(imm8);
1182 }
1183}
1184
1185void Assembler::rol_cl(Operand dst) {
1186 EnsureSpace ensure_space(this);
1187 EMIT(0xD3);
1188 emit_operand(eax, dst);
1189}
1190
1191void Assembler::ror(Operand dst, uint8_t imm8) {
1192 EnsureSpace ensure_space(this);
1193 DCHECK(is_uint5(imm8)); // illegal shift count
1194 if (imm8 == 1) {
1195 EMIT(0xD1);
1196 emit_operand(ecx, dst);
1197 } else {
1198 EMIT(0xC1);
1199 emit_operand(ecx, dst);
1200 EMIT(imm8);
1201 }
1202}
1203
1204void Assembler::ror_cl(Operand dst) {
1205 EnsureSpace ensure_space(this);
1206 EMIT(0xD3);
1207 emit_operand(ecx, dst);
1208}
1209
1210void Assembler::sar(Operand dst, uint8_t imm8) {
1211 EnsureSpace ensure_space(this);
1212 DCHECK(is_uint5(imm8)); // illegal shift count
1213 if (imm8 == 1) {
1214 EMIT(0xD1);
1215 emit_operand(edi, dst);
1216 } else {
1217 EMIT(0xC1);
1218 emit_operand(edi, dst);
1219 EMIT(imm8);
1220 }
1221}
1222
1223void Assembler::sar_cl(Operand dst) {
1224 EnsureSpace ensure_space(this);
1225 EMIT(0xD3);
1226 emit_operand(edi, dst);
1227}
1228
1229void Assembler::sbb(Register dst, Operand src) {
1230 EnsureSpace ensure_space(this);
1231 EMIT(0x1B);
1232 emit_operand(dst, src);
1233}
1234
1235void Assembler::shld(Register dst, Register src, uint8_t shift) {
1236 DCHECK(is_uint5(shift));
1237 EnsureSpace ensure_space(this);
1238 EMIT(0x0F);
1239 EMIT(0xA4);
1240 emit_operand(src, Operand(dst));
1241 EMIT(shift);
1242}
1243
1244void Assembler::shld_cl(Register dst, Register src) {
1245 EnsureSpace ensure_space(this);
1246 EMIT(0x0F);
1247 EMIT(0xA5);
1248 emit_operand(src, Operand(dst));
1249}
1250
1251void Assembler::shl(Operand dst, uint8_t imm8) {
1252 EnsureSpace ensure_space(this);
1253 DCHECK(is_uint5(imm8)); // illegal shift count
1254 if (imm8 == 1) {
1255 EMIT(0xD1);
1256 emit_operand(esp, dst);
1257 } else {
1258 EMIT(0xC1);
1259 emit_operand(esp, dst);
1260 EMIT(imm8);
1261 }
1262}
1263
1264void Assembler::shl_cl(Operand dst) {
1265 EnsureSpace ensure_space(this);
1266 EMIT(0xD3);
1267 emit_operand(esp, dst);
1268}
1269
1270void Assembler::shr(Operand dst, uint8_t imm8) {
1271 EnsureSpace ensure_space(this);
1272 DCHECK(is_uint5(imm8)); // illegal shift count
1273 if (imm8 == 1) {
1274 EMIT(0xD1);
1275 emit_operand(ebp, dst);
1276 } else {
1277 EMIT(0xC1);
1278 emit_operand(ebp, dst);
1279 EMIT(imm8);
1280 }
1281}
1282
1283void Assembler::shr_cl(Operand dst) {
1284 EnsureSpace ensure_space(this);
1285 EMIT(0xD3);
1286 emit_operand(ebp, dst);
1287}
1288
1289void Assembler::shrd(Register dst, Register src, uint8_t shift) {
1290 DCHECK(is_uint5(shift));
1291 EnsureSpace ensure_space(this);
1292 EMIT(0x0F);
1293 EMIT(0xAC);
1294 emit_operand(src, Operand(dst));
1295 EMIT(shift);
1296}
1297
1298void Assembler::shrd_cl(Operand dst, Register src) {
1299 EnsureSpace ensure_space(this);
1300 EMIT(0x0F);
1301 EMIT(0xAD);
1302 emit_operand(src, dst);
1303}
1304
1305void Assembler::sub(Operand dst, const Immediate& x) {
1306 EnsureSpace ensure_space(this);
1307 emit_arith(5, dst, x);
1308}
1309
1310void Assembler::sub(Register dst, Operand src) {
1311 EnsureSpace ensure_space(this);
1312 EMIT(0x2B);
1313 emit_operand(dst, src);
1314}
1315
1316void Assembler::sub(Operand dst, Register src) {
1317 EnsureSpace ensure_space(this);
1318 EMIT(0x29);
1319 emit_operand(src, dst);
1320}
1321
1322void Assembler::sub_sp_32(uint32_t imm) {
1323 EnsureSpace ensure_space(this);
1324 EMIT(0x81); // using a literal 32-bit immediate.
1325 static constexpr Register ireg = Register::from_code(5);
1326 emit_operand(ireg, Operand(esp));
1327 emit(imm);
1328}
1329
1330void Assembler::test(Register reg, const Immediate& imm) {
1331 if (imm.is_uint8()) {
1332 test_b(reg, imm);
1333 return;
1334 }
1335
1336 EnsureSpace ensure_space(this);
1337 // This is not using emit_arith because test doesn't support
1338 // sign-extension of 8-bit operands.
1339 if (reg == eax) {
1340 EMIT(0xA9);
1341 } else {
1342 EMIT(0xF7);
1343 EMIT(0xC0 | reg.code());
1344 }
1345 emit(imm);
1346}
1347
1348void Assembler::test(Register reg, Operand op) {
1349 EnsureSpace ensure_space(this);
1350 EMIT(0x85);
1351 emit_operand(reg, op);
1352}
1353
1354void Assembler::test_b(Register reg, Operand op) {
1355 CHECK(reg.is_byte_register());
1356 EnsureSpace ensure_space(this);
1357 EMIT(0x84);
1358 emit_operand(reg, op);
1359}
1360
1361void Assembler::test(Operand op, const Immediate& imm) {
1362 if (op.is_reg_only()) {
1363 test(op.reg(), imm);
1364 return;
1365 }
1366 if (imm.is_uint8()) {
1367 return test_b(op, imm);
1368 }
1369 EnsureSpace ensure_space(this);
1370 EMIT(0xF7);
1371 emit_operand(eax, op);
1372 emit(imm);
1373}
1374
1375void Assembler::test_b(Register reg, Immediate imm8) {
1376 DCHECK(imm8.is_uint8());
1377 EnsureSpace ensure_space(this);
1378 // Only use test against byte for registers that have a byte
1379 // variant: eax, ebx, ecx, and edx.
1380 if (reg == eax) {
1381 EMIT(0xA8);
1382 emit_b(imm8);
1383 } else if (reg.is_byte_register()) {
1384 emit_arith_b(0xF6, 0xC0, reg, static_cast<uint8_t>(imm8.immediate()));
1385 } else {
1386 EMIT(0x66);
1387 EMIT(0xF7);
1388 EMIT(0xC0 | reg.code());
1389 emit_w(imm8);
1390 }
1391}
1392
1393void Assembler::test_b(Operand op, Immediate imm8) {
1394 if (op.is_reg_only()) {
1395 test_b(op.reg(), imm8);
1396 return;
1397 }
1398 EnsureSpace ensure_space(this);
1399 EMIT(0xF6);
1400 emit_operand(eax, op);
1401 emit_b(imm8);
1402}
1403
1404void Assembler::test_w(Register reg, Immediate imm16) {
1405 DCHECK(imm16.is_int16() || imm16.is_uint16());
1406 EnsureSpace ensure_space(this);
1407 if (reg == eax) {
1408 EMIT(0xA9);
1409 emit_w(imm16);
1410 } else {
1411 EMIT(0x66);
1412 EMIT(0xF7);
1413 EMIT(0xC0 | reg.code());
1414 emit_w(imm16);
1415 }
1416}
1417
1418void Assembler::test_w(Register reg, Operand op) {
1419 EnsureSpace ensure_space(this);
1420 EMIT(0x66);
1421 EMIT(0x85);
1422 emit_operand(reg, op);
1423}
1424
1425void Assembler::test_w(Operand op, Immediate imm16) {
1426 DCHECK(imm16.is_int16() || imm16.is_uint16());
1427 if (op.is_reg_only()) {
1428 test_w(op.reg(), imm16);
1429 return;
1430 }
1431 EnsureSpace ensure_space(this);
1432 EMIT(0x66);
1433 EMIT(0xF7);
1434 emit_operand(eax, op);
1435 emit_w(imm16);
1436}
1437
1438void Assembler::xor_(Register dst, int32_t imm32) {
1439 EnsureSpace ensure_space(this);
1440 emit_arith(6, Operand(dst), Immediate(imm32));
1441}
1442
1443void Assembler::xor_(Register dst, Operand src) {
1444 EnsureSpace ensure_space(this);
1445 EMIT(0x33);
1446 emit_operand(dst, src);
1447}
1448
1449void Assembler::xor_(Operand dst, Register src) {
1450 EnsureSpace ensure_space(this);
1451 EMIT(0x31);
1452 emit_operand(src, dst);
1453}
1454
1455void Assembler::xor_(Operand dst, const Immediate& x) {
1456 EnsureSpace ensure_space(this);
1457 emit_arith(6, dst, x);
1458}
1459
1460void Assembler::bswap(Register dst) {
1461 EnsureSpace ensure_space(this);
1462 EMIT(0x0F);
1463 EMIT(0xC8 + dst.code());
1464}
1465
1466void Assembler::bt(Operand dst, Register src) {
1467 EnsureSpace ensure_space(this);
1468 EMIT(0x0F);
1469 EMIT(0xA3);
1470 emit_operand(src, dst);
1471}
1472
1473void Assembler::bts(Operand dst, Register src) {
1474 EnsureSpace ensure_space(this);
1475 EMIT(0x0F);
1476 EMIT(0xAB);
1477 emit_operand(src, dst);
1478}
1479
1480void Assembler::bsr(Register dst, Operand src) {
1481 EnsureSpace ensure_space(this);
1482 EMIT(0x0F);
1483 EMIT(0xBD);
1484 emit_operand(dst, src);
1485}
1486
1487void Assembler::bsf(Register dst, Operand src) {
1488 EnsureSpace ensure_space(this);
1489 EMIT(0x0F);
1490 EMIT(0xBC);
1491 emit_operand(dst, src);
1492}
1493
1494void Assembler::hlt() {
1495 EnsureSpace ensure_space(this);
1496 EMIT(0xF4);
1497}
1498
1499void Assembler::int3() {
1500 EnsureSpace ensure_space(this);
1501 EMIT(0xCC);
1502}
1503
1504void Assembler::nop() {
1505 EnsureSpace ensure_space(this);
1506 EMIT(0x90);
1507}
1508
1509void Assembler::ret(int imm16) {
1510 EnsureSpace ensure_space(this);
1511 DCHECK(is_uint16(imm16));
1512 if (imm16 == 0) {
1513 EMIT(0xC3);
1514 } else {
1515 EMIT(0xC2);
1516 EMIT(imm16 & 0xFF);
1517 EMIT((imm16 >> 8) & 0xFF);
1518 }
1519}
1520
1521void Assembler::ud2() {
1522 EnsureSpace ensure_space(this);
1523 EMIT(0x0F);
1524 EMIT(0x0B);
1525}
1526
1527// Labels refer to positions in the (to be) generated code.
1528// There are bound, linked, and unused labels.
1529//
1530// Bound labels refer to known positions in the already
1531// generated code. pos() is the position the label refers to.
1532//
1533// Linked labels refer to unknown positions in the code
1534// to be generated; pos() is the position of the 32bit
1535// Displacement of the last instruction using the label.
1536
1537void Assembler::print(const Label* L) {
1538 if (L->is_unused()) {
1539 PrintF("unused label\n");
1540 } else if (L->is_bound()) {
1541 PrintF("bound label to %d\n", L->pos());
1542 } else if (L->is_linked()) {
1543 Label l;
1544 l.link_to(L->pos());
1545 PrintF("unbound label");
1546 while (l.is_linked()) {
1547 Displacement disp = disp_at(&l);
1548 PrintF("@ %d ", l.pos());
1549 disp.print();
1550 PrintF("\n");
1551 disp.next(&l);
1552 }
1553 } else {
1554 PrintF("label in inconsistent state (pos = %d)\n", L->pos_);
1555 }
1556}
1557
1558void Assembler::bind_to(Label* L, int pos) {
1559 EnsureSpace ensure_space(this);
1560 DCHECK(0 <= pos && pos <= pc_offset()); // must have a valid binding position
1561 while (L->is_linked()) {
1562 Displacement disp = disp_at(L);
1563 int fixup_pos = L->pos();
1564 if (disp.type() == Displacement::CODE_ABSOLUTE) {
1565 long_at_put(fixup_pos, reinterpret_cast<int>(buffer_start_ + pos));
1566 internal_reference_positions_.push_back(fixup_pos);
1567 } else if (disp.type() == Displacement::CODE_RELATIVE) {
1568 // Relative to InstructionStream heap object pointer.
1569 long_at_put(fixup_pos,
1570 pos + InstructionStream::kHeaderSize - kHeapObjectTag);
1571 } else {
1572 if (disp.type() == Displacement::UNCONDITIONAL_JUMP) {
1573 DCHECK_EQ(byte_at(fixup_pos - 1), 0xE9); // jmp expected
1574 }
1575 // Relative address, relative to point after address.
1576 int imm32 = pos - (fixup_pos + sizeof(int32_t));
1577 long_at_put(fixup_pos, imm32);
1578 }
1579 disp.next(L);
1580 }
1581 while (L->is_near_linked()) {
1582 int fixup_pos = L->near_link_pos();
1583 int offset_to_next =
1584 static_cast<int>(*reinterpret_cast<int8_t*>(addr_at(fixup_pos)));
1585 DCHECK_LE(offset_to_next, 0);
1586 // Relative address, relative to point after address.
1587 int disp = pos - fixup_pos - sizeof(int8_t);
1588 CHECK(0 <= disp && disp <= 127);
1589 set_byte_at(fixup_pos, disp);
1590 if (offset_to_next < 0) {
1591 L->link_to(fixup_pos + offset_to_next, Label::kNear);
1592 } else {
1593 L->UnuseNear();
1594 }
1595 }
1596
1597 // Optimization stage
1598 auto jump_opt = jump_optimization_info();
1599 if (jump_opt && jump_opt->is_optimizing()) {
1600 auto it = jump_opt->label_farjmp_maps.find(L);
1601 if (it != jump_opt->label_farjmp_maps.end()) {
1602 auto& pos_vector = it->second;
1603 for (auto fixup_pos : pos_vector) {
1604 int disp = pos - (fixup_pos + sizeof(int8_t));
1605 CHECK(is_int8(disp));
1606 set_byte_at(fixup_pos, disp);
1607 }
1608 jump_opt->label_farjmp_maps.erase(it);
1609 }
1610 }
1611 L->bind_to(pos);
1612}
1613
1614void Assembler::bind(Label* L) {
1615 EnsureSpace ensure_space(this);
1616 DCHECK(!L->is_bound()); // label can only be bound once
1617 bind_to(L, pc_offset());
1618}
1619
1620void Assembler::record_farjmp_position(Label* L, int pos) {
1621 auto& pos_vector = jump_optimization_info()->label_farjmp_maps[L];
1622 pos_vector.push_back(pos);
1623}
1624
1625bool Assembler::is_optimizable_farjmp(int idx) {
1626 if (predictable_code_size()) return false;
1627
1628 auto jump_opt = jump_optimization_info();
1629 CHECK(jump_opt->is_optimizing());
1630
1631 auto& dict = jump_opt->may_optimizable_farjmp;
1632 if (dict.find(idx) != dict.end()) {
1633 auto record_jmp_info = dict[idx];
1634
1635 int record_pos = record_jmp_info.pos;
1636
1637 // 4 bytes for jmp rel32 operand.
1638 const int operand_size = 4;
1639 int record_dest = record_jmp_info.pos + record_jmp_info.opcode_size +
1640 operand_size + record_jmp_info.distance;
1641
1642 const int max_align_in_jmp_range =
1643 jump_opt->MaxAlignInRange(record_pos, record_dest);
1644
1645 if (max_align_in_jmp_range == 0) {
1646 return true;
1647 }
1648
1649 // ja rel32 -> ja rel8, the opcode size 2bytes -> 1byte
1650 // 0F 87 -> 77
1651 const int saved_opcode_size = record_jmp_info.opcode_size - 1;
1652
1653 // jmp rel32 -> rel8, the operand size 4bytes -> 1byte
1654 constexpr int saved_operand_size = 4 - 1;
1655
1656 // The shorter encoding may further decrease the base address of the
1657 // relative jump, while the jump target could stay in place because of
1658 // alignment.
1659 int cur_jmp_length_max_increase =
1660 (record_pos - pc_offset() + saved_opcode_size + saved_operand_size) %
1661 max_align_in_jmp_range;
1662
1663 if (is_int8(record_jmp_info.distance + cur_jmp_length_max_increase)) {
1664 return true;
1665 }
1666 }
1667 return false;
1668}
1669
1670void Assembler::call(Label* L) {
1671 EnsureSpace ensure_space(this);
1672 if (L->is_bound()) {
1673 const int long_size = 5;
1674 int offs = L->pos() - pc_offset();
1675 DCHECK_LE(offs, 0);
1676 // 1110 1000 #32-bit disp.
1677 EMIT(0xE8);
1678 emit(offs - long_size);
1679 } else {
1680 // 1110 1000 #32-bit disp.
1681 EMIT(0xE8);
1682 emit_disp(L, Displacement::OTHER);
1683 }
1684}
1685
1686void Assembler::call(Address entry, RelocInfo::Mode rmode) {
1687 EnsureSpace ensure_space(this);
1688 DCHECK(!RelocInfo::IsCodeTarget(rmode));
1689 EMIT(0xE8);
1690 emit(entry - (reinterpret_cast<Address>(pc_) + sizeof(int32_t)), rmode);
1691}
1692
1693void Assembler::wasm_call(Address entry, RelocInfo::Mode rmode) {
1694 EnsureSpace ensure_space(this);
1695 EMIT(0xE8);
1696 emit(entry, rmode);
1697}
1698
1699void Assembler::call(Operand adr) {
1700 EnsureSpace ensure_space(this);
1701 EMIT(0xFF);
1702 emit_operand(edx, adr);
1703}
1704
1705void Assembler::call(Handle<Code> code, RelocInfo::Mode rmode) {
1706 EnsureSpace ensure_space(this);
1707 DCHECK(RelocInfo::IsCodeTarget(rmode));
1708 EMIT(0xE8);
1709 emit(code, rmode);
1710}
1711
1712void Assembler::jmp_rel(int offset) {
1713 EnsureSpace ensure_space(this);
1714 const int short_size = 2;
1715 const int long_size = 5;
1716 if (is_int8(offset - short_size) && !predictable_code_size()) {
1717 // 1110 1011 #8-bit disp.
1718 EMIT(0xEB);
1719 EMIT((offset - short_size) & 0xFF);
1720 } else {
1721 // 1110 1001 #32-bit disp.
1722 EMIT(0xE9);
1723 emit(offset - long_size);
1724 }
1725}
1726
1727void Assembler::jmp(Label* L, Label::Distance distance) {
1728 if (L->is_bound()) {
1729 int offset = L->pos() - pc_offset();
1730 DCHECK_LE(offset, 0); // backward jump.
1731 jmp_rel(offset);
1732 return;
1733 }
1734
1735 EnsureSpace ensure_space(this);
1736 if (distance == Label::kNear) {
1737 EMIT(0xEB);
1738 emit_near_disp(L);
1739 } else {
1740 auto jump_opt = jump_optimization_info();
1741 if (V8_UNLIKELY(jump_opt)) {
1742 if (jump_opt->is_optimizing() &&
1743 is_optimizable_farjmp(jump_opt->farjmp_num++)) {
1744 EMIT(0xEB);
1745 record_farjmp_position(L, pc_offset());
1746 EMIT(0);
1747 return;
1748 }
1749 if (jump_opt->is_collecting()) {
1750 jump_opt->farjmps.push_back({pc_offset(), 1, 0});
1751 }
1752 }
1753 // 1110 1001 #32-bit disp.
1754 EMIT(0xE9);
1755 emit_disp(L, Displacement::UNCONDITIONAL_JUMP);
1756 }
1757}
1758
1759void Assembler::jmp(Address entry, RelocInfo::Mode rmode) {
1760 EnsureSpace ensure_space(this);
1761 DCHECK(!RelocInfo::IsCodeTarget(rmode));
1762 EMIT(0xE9);
1763 if (RelocInfo::IsWasmCall(rmode)) {
1764 emit(entry, rmode);
1765 } else {
1766 emit(entry - (reinterpret_cast<Address>(pc_) + sizeof(int32_t)), rmode);
1767 }
1768}
1769
1770void Assembler::jmp(Operand adr) {
1771 EnsureSpace ensure_space(this);
1772 EMIT(0xFF);
1773 emit_operand(esp, adr);
1774}
1775
1776void Assembler::jmp(Handle<Code> code, RelocInfo::Mode rmode) {
1777 EnsureSpace ensure_space(this);
1778 DCHECK(RelocInfo::IsCodeTarget(rmode));
1779 EMIT(0xE9);
1780 emit(code, rmode);
1781}
1782
1783void Assembler::j(Condition cc, Label* L, Label::Distance distance) {
1784 EnsureSpace ensure_space(this);
1785 DCHECK(0 <= cc && static_cast<int>(cc) < 16);
1786 if (L->is_bound()) {
1787 const int short_size = 2;
1788 const int long_size = 6;
1789 int offs = L->pos() - pc_offset();
1790 DCHECK_LE(offs, 0);
1791 if (is_int8(offs - short_size)) {
1792 // 0111 tttn #8-bit disp
1793 EMIT(0x70 | cc);
1794 EMIT((offs - short_size) & 0xFF);
1795 } else {
1796 // 0000 1111 1000 tttn #32-bit disp
1797 EMIT(0x0F);
1798 EMIT(0x80 | cc);
1799 emit(offs - long_size);
1800 }
1801 } else if (distance == Label::kNear) {
1802 EMIT(0x70 | cc);
1803 emit_near_disp(L);
1804 } else {
1805 auto jump_opt = jump_optimization_info();
1806 if (V8_UNLIKELY(jump_opt)) {
1807 if (jump_opt->is_optimizing() &&
1808 is_optimizable_farjmp(jump_opt->farjmp_num++)) {
1809 // 0111 tttn #8-bit disp
1810 EMIT(0x70 | cc);
1811 record_farjmp_position(L, pc_offset());
1812 EMIT(0);
1813 return;
1814 }
1815 if (jump_opt->is_collecting()) {
1816 jump_opt->farjmps.push_back({pc_offset(), 2, 0});
1817 }
1818 }
1819 // 0000 1111 1000 tttn #32-bit disp
1820 // Note: could eliminate cond. jumps to this jump if condition
1821 // is the same however, seems to be rather unlikely case.
1822 EMIT(0x0F);
1823 EMIT(0x80 | cc);
1824 emit_disp(L, Displacement::OTHER);
1825 }
1826}
1827
1828void Assembler::j(Condition cc, uint8_t* entry, RelocInfo::Mode rmode) {
1829 EnsureSpace ensure_space(this);
1830 DCHECK((0 <= cc) && (static_cast<int>(cc) < 16));
1831 // 0000 1111 1000 tttn #32-bit disp.
1832 EMIT(0x0F);
1833 EMIT(0x80 | cc);
1834 emit(entry - (pc_ + sizeof(int32_t)), rmode);
1835}
1836
1837void Assembler::j(Condition cc, Handle<Code> code, RelocInfo::Mode rmode) {
1838 EnsureSpace ensure_space(this);
1839 // 0000 1111 1000 tttn #32-bit disp
1840 EMIT(0x0F);
1841 EMIT(0x80 | cc);
1842 emit(code, rmode);
1843}
1844
1845// FPU instructions.
1846
1847void Assembler::fld(int i) {
1848 EnsureSpace ensure_space(this);
1849 emit_farith(0xD9, 0xC0, i);
1850}
1851
1852void Assembler::fstp(int i) {
1853 EnsureSpace ensure_space(this);
1854 emit_farith(0xDD, 0xD8, i);
1855}
1856
1857void Assembler::fld1() {
1858 EnsureSpace ensure_space(this);
1859 EMIT(0xD9);
1860 EMIT(0xE8);
1861}
1862
1863void Assembler::fldpi() {
1864 EnsureSpace ensure_space(this);
1865 EMIT(0xD9);
1866 EMIT(0xEB);
1867}
1868
1869void Assembler::fldz() {
1870 EnsureSpace ensure_space(this);
1871 EMIT(0xD9);
1872 EMIT(0xEE);
1873}
1874
1875void Assembler::fldln2() {
1876 EnsureSpace ensure_space(this);
1877 EMIT(0xD9);
1878 EMIT(0xED);
1879}
1880
1881void Assembler::fld_s(Operand adr) {
1882 EnsureSpace ensure_space(this);
1883 EMIT(0xD9);
1884 emit_operand(eax, adr);
1885}
1886
1887void Assembler::fld_d(Operand adr) {
1888 EnsureSpace ensure_space(this);
1889 EMIT(0xDD);
1890 emit_operand(eax, adr);
1891}
1892
1893void Assembler::fstp_s(Operand adr) {
1894 EnsureSpace ensure_space(this);
1895 EMIT(0xD9);
1896 emit_operand(ebx, adr);
1897}
1898
1899void Assembler::fst_s(Operand adr) {
1900 EnsureSpace ensure_space(this);
1901 EMIT(0xD9);
1902 emit_operand(edx, adr);
1903}
1904
1905void Assembler::fstp_d(Operand adr) {
1906 EnsureSpace ensure_space(this);
1907 EMIT(0xDD);
1908 emit_operand(ebx, adr);
1909}
1910
1911void Assembler::fst_d(Operand adr) {
1912 EnsureSpace ensure_space(this);
1913 EMIT(0xDD);
1914 emit_operand(edx, adr);
1915}
1916
1917void Assembler::fild_s(Operand adr) {
1918 EnsureSpace ensure_space(this);
1919 EMIT(0xDB);
1920 emit_operand(eax, adr);
1921}
1922
1923void Assembler::fild_d(Operand adr) {
1924 EnsureSpace ensure_space(this);
1925 EMIT(0xDF);
1926 emit_operand(ebp, adr);
1927}
1928
1929void Assembler::fistp_s(Operand adr) {
1930 EnsureSpace ensure_space(this);
1931 EMIT(0xDB);
1932 emit_operand(ebx, adr);
1933}
1934
1935void Assembler::fisttp_s(Operand adr) {
1936 DCHECK(IsEnabled(SSE3));
1937 EnsureSpace ensure_space(this);
1938 EMIT(0xDB);
1939 emit_operand(ecx, adr);
1940}
1941
1942void Assembler::fisttp_d(Operand adr) {
1943 DCHECK(IsEnabled(SSE3));
1944 EnsureSpace ensure_space(this);
1945 EMIT(0xDD);
1946 emit_operand(ecx, adr);
1947}
1948
1949void Assembler::fist_s(Operand adr) {
1950 EnsureSpace ensure_space(this);
1951 EMIT(0xDB);
1952 emit_operand(edx, adr);
1953}
1954
1955void Assembler::fistp_d(Operand adr) {
1956 EnsureSpace ensure_space(this);
1957 EMIT(0xDF);
1958 emit_operand(edi, adr);
1959}
1960
1961void Assembler::fabs() {
1962 EnsureSpace ensure_space(this);
1963 EMIT(0xD9);
1964 EMIT(0xE1);
1965}
1966
1967void Assembler::fchs() {
1968 EnsureSpace ensure_space(this);
1969 EMIT(0xD9);
1970 EMIT(0xE0);
1971}
1972
1973void Assembler::fcos() {
1974 EnsureSpace ensure_space(this);
1975 EMIT(0xD9);
1976 EMIT(0xFF);
1977}
1978
1979void Assembler::fsin() {
1980 EnsureSpace ensure_space(this);
1981 EMIT(0xD9);
1982 EMIT(0xFE);
1983}
1984
1985void Assembler::fptan() {
1986 EnsureSpace ensure_space(this);
1987 EMIT(0xD9);
1988 EMIT(0xF2);
1989}
1990
1991void Assembler::fyl2x() {
1992 EnsureSpace ensure_space(this);
1993 EMIT(0xD9);
1994 EMIT(0xF1);
1995}
1996
1997void Assembler::f2xm1() {
1998 EnsureSpace ensure_space(this);
1999 EMIT(0xD9);
2000 EMIT(0xF0);
2001}
2002
2003void Assembler::fscale() {
2004 EnsureSpace ensure_space(this);
2005 EMIT(0xD9);
2006 EMIT(0xFD);
2007}
2008
2009void Assembler::fninit() {
2010 EnsureSpace ensure_space(this);
2011 EMIT(0xDB);
2012 EMIT(0xE3);
2013}
2014
2015void Assembler::fadd(int i) {
2016 EnsureSpace ensure_space(this);
2017 emit_farith(0xDC, 0xC0, i);
2018}
2019
2020void Assembler::fadd_i(int i) {
2021 EnsureSpace ensure_space(this);
2022 emit_farith(0xD8, 0xC0, i);
2023}
2024
2025void Assembler::fsub(int i) {
2026 EnsureSpace ensure_space(this);
2027 emit_farith(0xDC, 0xE8, i);
2028}
2029
2030void Assembler::fsub_i(int i) {
2031 EnsureSpace ensure_space(this);
2032 emit_farith(0xD8, 0xE0, i);
2033}
2034
2035void Assembler::fisub_s(Operand adr) {
2036 EnsureSpace ensure_space(this);
2037 EMIT(0xDA);
2038 emit_operand(esp, adr);
2039}
2040
2041void Assembler::fmul_i(int i) {
2042 EnsureSpace ensure_space(this);
2043 emit_farith(0xD8, 0xC8, i);
2044}
2045
2046void Assembler::fmul(int i) {
2047 EnsureSpace ensure_space(this);
2048 emit_farith(0xDC, 0xC8, i);
2049}
2050
2051void Assembler::fdiv(int i) {
2052 EnsureSpace ensure_space(this);
2053 emit_farith(0xDC, 0xF8, i);
2054}
2055
2056void Assembler::fdiv_i(int i) {
2057 EnsureSpace ensure_space(this);
2058 emit_farith(0xD8, 0xF0, i);
2059}
2060
2061void Assembler::faddp(int i) {
2062 EnsureSpace ensure_space(this);
2063 emit_farith(0xDE, 0xC0, i);
2064}
2065
2066void Assembler::fsubp(int i) {
2067 EnsureSpace ensure_space(this);
2068 emit_farith(0xDE, 0xE8, i);
2069}
2070
2071void Assembler::fsubrp(int i) {
2072 EnsureSpace ensure_space(this);
2073 emit_farith(0xDE, 0xE0, i);
2074}
2075
2076void Assembler::fmulp(int i) {
2077 EnsureSpace ensure_space(this);
2078 emit_farith(0xDE, 0xC8, i);
2079}
2080
2081void Assembler::fdivp(int i) {
2082 EnsureSpace ensure_space(this);
2083 emit_farith(0xDE, 0xF8, i);
2084}
2085
2086void Assembler::fprem() {
2087 EnsureSpace ensure_space(this);
2088 EMIT(0xD9);
2089 EMIT(0xF8);
2090}
2091
2092void Assembler::fprem1() {
2093 EnsureSpace ensure_space(this);
2094 EMIT(0xD9);
2095 EMIT(0xF5);
2096}
2097
2098void Assembler::fxch(int i) {
2099 EnsureSpace ensure_space(this);
2100 emit_farith(0xD9, 0xC8, i);
2101}
2102
2103void Assembler::fincstp() {
2104 EnsureSpace ensure_space(this);
2105 EMIT(0xD9);
2106 EMIT(0xF7);
2107}
2108
2109void Assembler::ffree(int i) {
2110 EnsureSpace ensure_space(this);
2111 emit_farith(0xDD, 0xC0, i);
2112}
2113
2114void Assembler::ftst() {
2115 EnsureSpace ensure_space(this);
2116 EMIT(0xD9);
2117 EMIT(0xE4);
2118}
2119
2120void Assembler::fucomp(int i) {
2121 EnsureSpace ensure_space(this);
2122 emit_farith(0xDD, 0xE8, i);
2123}
2124
2125void Assembler::fucompp() {
2126 EnsureSpace ensure_space(this);
2127 EMIT(0xDA);
2128 EMIT(0xE9);
2129}
2130
2131void Assembler::fucomi(int i) {
2132 EnsureSpace ensure_space(this);
2133 EMIT(0xDB);
2134 EMIT(0xE8 + i);
2135}
2136
2137void Assembler::fucomip() {
2138 EnsureSpace ensure_space(this);
2139 EMIT(0xDF);
2140 EMIT(0xE9);
2141}
2142
2143void Assembler::fcompp() {
2144 EnsureSpace ensure_space(this);
2145 EMIT(0xDE);
2146 EMIT(0xD9);
2147}
2148
2149void Assembler::fnstsw_ax() {
2150 EnsureSpace ensure_space(this);
2151 EMIT(0xDF);
2152 EMIT(0xE0);
2153}
2154
2155void Assembler::fwait() {
2156 EnsureSpace ensure_space(this);
2157 EMIT(0x9B);
2158}
2159
2160void Assembler::frndint() {
2161 EnsureSpace ensure_space(this);
2162 EMIT(0xD9);
2163 EMIT(0xFC);
2164}
2165
2166void Assembler::fnclex() {
2167 EnsureSpace ensure_space(this);
2168 EMIT(0xDB);
2169 EMIT(0xE2);
2170}
2171
2172void Assembler::sahf() {
2173 EnsureSpace ensure_space(this);
2174 EMIT(0x9E);
2175}
2176
2177void Assembler::setcc(Condition cc, Register reg) {
2178 DCHECK(reg.is_byte_register());
2179 EnsureSpace ensure_space(this);
2180 EMIT(0x0F);
2181 EMIT(0x90 | cc);
2182 EMIT(0xC0 | reg.code());
2183}
2184
2185void Assembler::cvttss2si(Register dst, Operand src) {
2186 EnsureSpace ensure_space(this);
2187 // The [src] might contain ebx's register code, but in
2188 // this case, it refers to xmm3, so it is OK to emit.
2189 EMIT(0xF3);
2190 EMIT(0x0F);
2191 EMIT(0x2C);
2192 emit_operand(dst, src);
2193}
2194
2195void Assembler::cvttsd2si(Register dst, Operand src) {
2196 EnsureSpace ensure_space(this);
2197 // The [src] might contain ebx's register code, but in
2198 // this case, it refers to xmm3, so it is OK to emit.
2199 EMIT(0xF2);
2200 EMIT(0x0F);
2201 EMIT(0x2C);
2202 emit_operand(dst, src);
2203}
2204
2205void Assembler::cvtsd2si(Register dst, XMMRegister src) {
2206 EnsureSpace ensure_space(this);
2207 EMIT(0xF2);
2208 EMIT(0x0F);
2209 EMIT(0x2D);
2210 emit_sse_operand(dst, src);
2211}
2212
2213void Assembler::cvtsi2ss(XMMRegister dst, Operand src) {
2214 EnsureSpace ensure_space(this);
2215 EMIT(0xF3);
2216 EMIT(0x0F);
2217 EMIT(0x2A);
2218 emit_sse_operand(dst, src);
2219}
2220
2221void Assembler::cvtsi2sd(XMMRegister dst, Operand src) {
2222 EnsureSpace ensure_space(this);
2223 EMIT(0xF2);
2224 EMIT(0x0F);
2225 EMIT(0x2A);
2226 emit_sse_operand(dst, src);
2227}
2228
2229void Assembler::cvtss2sd(XMMRegister dst, Operand src) {
2230 EnsureSpace ensure_space(this);
2231 EMIT(0xF3);
2232 EMIT(0x0F);
2233 EMIT(0x5A);
2234 emit_sse_operand(dst, src);
2235}
2236
2237void Assembler::cvtdq2pd(XMMRegister dst, XMMRegister src) {
2238 EnsureSpace ensure_space(this);
2239 EMIT(0xF3);
2240 EMIT(0x0F);
2241 EMIT(0xE6);
2242 emit_sse_operand(dst, src);
2243}
2244
2245void Assembler::cvtpd2ps(XMMRegister dst, XMMRegister src) {
2246 EnsureSpace ensure_space(this);
2247 EMIT(0x66);
2248 EMIT(0x0F);
2249 EMIT(0x5A);
2250 emit_sse_operand(dst, src);
2251}
2252
2253void Assembler::cvttps2dq(XMMRegister dst, Operand src) {
2254 EnsureSpace ensure_space(this);
2255 EMIT(0xF3);
2256 EMIT(0x0F);
2257 EMIT(0x5B);
2258 emit_sse_operand(dst, src);
2259}
2260
2261void Assembler::cvttpd2dq(XMMRegister dst, XMMRegister src) {
2262 EnsureSpace ensure_space(this);
2263 EMIT(0x66);
2264 EMIT(0x0F);
2265 EMIT(0xE6);
2266 emit_sse_operand(dst, src);
2267}
2268
2269void Assembler::cmpps(XMMRegister dst, Operand src, uint8_t cmp) {
2270 EnsureSpace ensure_space(this);
2271 EMIT(0x0F);
2272 EMIT(0xC2);
2273 emit_sse_operand(dst, src);
2274 EMIT(cmp);
2275}
2276
2277void Assembler::cmppd(XMMRegister dst, Operand src, uint8_t cmp) {
2278 EnsureSpace ensure_space(this);
2279 EMIT(0x66);
2280 EMIT(0x0F);
2281 EMIT(0xC2);
2282 emit_sse_operand(dst, src);
2283 EMIT(cmp);
2284}
2285
2286void Assembler::haddps(XMMRegister dst, Operand src) {
2287 DCHECK(IsEnabled(SSE3));
2288 EnsureSpace ensure_space(this);
2289 EMIT(0xF2);
2290 EMIT(0x0F);
2291 EMIT(0x7C);
2292 emit_sse_operand(dst, src);
2293}
2294
2295void Assembler::ucomisd(XMMRegister dst, Operand src) {
2296 EnsureSpace ensure_space(this);
2297 EMIT(0x66);
2298 EMIT(0x0F);
2299 EMIT(0x2E);
2300 emit_sse_operand(dst, src);
2301}
2302
2303void Assembler::roundps(XMMRegister dst, XMMRegister src, RoundingMode mode) {
2304 DCHECK(IsEnabled(SSE4_1));
2305 EnsureSpace ensure_space(this);
2306 EMIT(0x66);
2307 EMIT(0x0F);
2308 EMIT(0x3A);
2309 EMIT(0x08);
2310 emit_sse_operand(dst, src);
2311 // Mask precision exeption.
2312 EMIT(static_cast<uint8_t>(mode) | 0x8);
2313}
2314
2315void Assembler::roundpd(XMMRegister dst, XMMRegister src, RoundingMode mode) {
2316 DCHECK(IsEnabled(SSE4_1));
2317 EnsureSpace ensure_space(this);
2318 EMIT(0x66);
2319 EMIT(0x0F);
2320 EMIT(0x3A);
2321 EMIT(0x09);
2322 emit_sse_operand(dst, src);
2323 // Mask precision exeption.
2324 EMIT(static_cast<uint8_t>(mode) | 0x8);
2325}
2326
2327void Assembler::roundss(XMMRegister dst, XMMRegister src, RoundingMode mode) {
2328 DCHECK(IsEnabled(SSE4_1));
2329 EnsureSpace ensure_space(this);
2330 EMIT(0x66);
2331 EMIT(0x0F);
2332 EMIT(0x3A);
2333 EMIT(0x0A);
2334 emit_sse_operand(dst, src);
2335 // Mask precision exeption.
2336 EMIT(static_cast<uint8_t>(mode) | 0x8);
2337}
2338
2339void Assembler::roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode) {
2340 DCHECK(IsEnabled(SSE4_1));
2341 EnsureSpace ensure_space(this);
2342 EMIT(0x66);
2343 EMIT(0x0F);
2344 EMIT(0x3A);
2345 EMIT(0x0B);
2346 emit_sse_operand(dst, src);
2347 // Mask precision exeption.
2348 EMIT(static_cast<uint8_t>(mode) | 0x8);
2349}
2350
2351void Assembler::movmskpd(Register dst, XMMRegister src) {
2352 EnsureSpace ensure_space(this);
2353 EMIT(0x66);
2354 EMIT(0x0F);
2355 EMIT(0x50);
2356 emit_sse_operand(dst, src);
2357}
2358
2359void Assembler::movmskps(Register dst, XMMRegister src) {
2360 EnsureSpace ensure_space(this);
2361 EMIT(0x0F);
2362 EMIT(0x50);
2363 emit_sse_operand(dst, src);
2364}
2365
2366void Assembler::pmovmskb(Register dst, XMMRegister src) {
2367 EnsureSpace ensure_space(this);
2368 EMIT(0x66);
2369 EMIT(0x0F);
2370 EMIT(0xD7);
2371 emit_sse_operand(dst, src);
2372}
2373
2374void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) {
2375 EnsureSpace ensure_space(this);
2376 EMIT(0xF2);
2377 EMIT(0x0F);
2378 EMIT(0xC2);
2379 emit_sse_operand(dst, src);
2380 EMIT(1); // LT == 1
2381}
2382
2383void Assembler::movaps(XMMRegister dst, Operand src) {
2384 EnsureSpace ensure_space(this);
2385 EMIT(0x0F);
2386 EMIT(0x28);
2387 emit_sse_operand(dst, src);
2388}
2389
2390void Assembler::movups(XMMRegister dst, Operand src) {
2391 EnsureSpace ensure_space(this);
2392 EMIT(0x0F);
2393 EMIT(0x10);
2394 emit_sse_operand(dst, src);
2395}
2396
2397void Assembler::movups(Operand dst, XMMRegister src) {
2398 EnsureSpace ensure_space(this);
2399 EMIT(0x0F);
2400 EMIT(0x11);
2401 emit_sse_operand(src, dst);
2402}
2403
2404void Assembler::movddup(XMMRegister dst, Operand src) {
2405 DCHECK(IsEnabled(SSE3));
2406 EnsureSpace ensure_space(this);
2407 EMIT(0xF2);
2408 EMIT(0x0F);
2409 EMIT(0x12);
2410 emit_sse_operand(dst, src);
2411}
2412
2413void Assembler::movshdup(XMMRegister dst, XMMRegister src) {
2414 DCHECK(IsEnabled(SSE3));
2415 EnsureSpace ensure_space(this);
2416 EMIT(0xF3);
2417 EMIT(0x0F);
2418 EMIT(0x16);
2419 emit_sse_operand(dst, src);
2420}
2421
2422void Assembler::shufps(XMMRegister dst, XMMRegister src, uint8_t imm8) {
2423 DCHECK(is_uint8(imm8));
2424 EnsureSpace ensure_space(this);
2425 EMIT(0x0F);
2426 EMIT(0xC6);
2427 emit_sse_operand(dst, src);
2428 EMIT(imm8);
2429}
2430
2431void Assembler::shufpd(XMMRegister dst, XMMRegister src, uint8_t imm8) {
2432 DCHECK(is_uint8(imm8));
2433 EnsureSpace ensure_space(this);
2434 EMIT(0x66);
2435 EMIT(0x0F);
2436 EMIT(0xC6);
2437 emit_sse_operand(dst, src);
2438 EMIT(imm8);
2439}
2440
2441void Assembler::movhlps(XMMRegister dst, XMMRegister src) {
2442 EnsureSpace ensure_space(this);
2443 EMIT(0x0F);
2444 EMIT(0x12);
2445 emit_sse_operand(dst, src);
2446}
2447
2448void Assembler::movlhps(XMMRegister dst, XMMRegister src) {
2449 EnsureSpace ensure_space(this);
2450 EMIT(0x0F);
2451 EMIT(0x16);
2452 emit_sse_operand(dst, src);
2453}
2454
2455void Assembler::movlps(XMMRegister dst, Operand src) {
2456 EnsureSpace ensure_space(this);
2457 EMIT(0x0F);
2458 EMIT(0x12);
2459 emit_sse_operand(dst, src);
2460}
2461
2462void Assembler::movlps(Operand dst, XMMRegister src) {
2463 EnsureSpace ensure_space(this);
2464 EMIT(0x0F);
2465 EMIT(0x13);
2466 emit_sse_operand(src, dst);
2467}
2468
2469void Assembler::movhps(XMMRegister dst, Operand src) {
2470 EnsureSpace ensure_space(this);
2471 EMIT(0x0F);
2472 EMIT(0x16);
2473 emit_sse_operand(dst, src);
2474}
2475
2476void Assembler::movhps(Operand dst, XMMRegister src) {
2477 EnsureSpace ensure_space(this);
2478 EMIT(0x0F);
2479 EMIT(0x17);
2480 emit_sse_operand(src, dst);
2481}
2482
2483void Assembler::movdqa(Operand dst, XMMRegister src) {
2484 EnsureSpace ensure_space(this);
2485 EMIT(0x66);
2486 EMIT(0x0F);
2487 EMIT(0x7F);
2488 emit_sse_operand(src, dst);
2489}
2490
2491void Assembler::movdqa(XMMRegister dst, Operand src) {
2492 EnsureSpace ensure_space(this);
2493 EMIT(0x66);
2494 EMIT(0x0F);
2495 EMIT(0x6F);
2496 emit_sse_operand(dst, src);
2497}
2498
2499void Assembler::movdqa(XMMRegister dst, XMMRegister src) {
2500 EnsureSpace ensure_space(this);
2501 EMIT(0x66);
2502 EMIT(0x0F);
2503 EMIT(0x6F);
2504 emit_sse_operand(dst, src);
2505}
2506
2507void Assembler::movdqu(Operand dst, XMMRegister src) {
2508 EnsureSpace ensure_space(this);
2509 EMIT(0xF3);
2510 EMIT(0x0F);
2511 EMIT(0x7F);
2512 emit_sse_operand(src, dst);
2513}
2514
2515void Assembler::movdqu(XMMRegister dst, Operand src) {
2516 EnsureSpace ensure_space(this);
2517 EMIT(0xF3);
2518 EMIT(0x0F);
2519 EMIT(0x6F);
2520 emit_sse_operand(dst, src);
2521}
2522
2523void Assembler::movdqu(XMMRegister dst, XMMRegister src) {
2524 EnsureSpace ensure_space(this);
2525 EMIT(0xF3);
2526 EMIT(0x0F);
2527 EMIT(0x7F);
2528 emit_sse_operand(src, dst);
2529}
2530
2531void Assembler::prefetch(Operand src, int level) {
2532 DCHECK(is_uint2(level));
2533 EnsureSpace ensure_space(this);
2534 EMIT(0x0F);
2535 EMIT(0x18);
2536 // Emit hint number in Reg position of RegR/M.
2537 XMMRegister code = XMMRegister::from_code(level);
2538 emit_sse_operand(code, src);
2539}
2540
2541void Assembler::movsd(Operand dst, XMMRegister src) {
2542 EnsureSpace ensure_space(this);
2543 EMIT(0xF2); // double
2544 EMIT(0x0F);
2545 EMIT(0x11); // store
2546 emit_sse_operand(src, dst);
2547}
2548
2549void Assembler::movsd(XMMRegister dst, Operand src) {
2550 EnsureSpace ensure_space(this);
2551 EMIT(0xF2); // double
2552 EMIT(0x0F);
2553 EMIT(0x10); // load
2554 emit_sse_operand(dst, src);
2555}
2556
2557void Assembler::movss(Operand dst, XMMRegister src) {
2558 EnsureSpace ensure_space(this);
2559 EMIT(0xF3); // float
2560 EMIT(0x0F);
2561 EMIT(0x11); // store
2562 emit_sse_operand(src, dst);
2563}
2564
2565void Assembler::movss(XMMRegister dst, Operand src) {
2566 EnsureSpace ensure_space(this);
2567 EMIT(0xF3); // float
2568 EMIT(0x0F);
2569 EMIT(0x10); // load
2570 emit_sse_operand(dst, src);
2571}
2572
2573void Assembler::movd(XMMRegister dst, Operand src) {
2574 EnsureSpace ensure_space(this);
2575 EMIT(0x66);
2576 EMIT(0x0F);
2577 EMIT(0x6E);
2578 emit_sse_operand(dst, src);
2579}
2580
2581void Assembler::movd(Operand dst, XMMRegister src) {
2582 EnsureSpace ensure_space(this);
2583 EMIT(0x66);
2584 EMIT(0x0F);
2585 EMIT(0x7E);
2586 emit_sse_operand(src, dst);
2587}
2588
2589void Assembler::extractps(Operand dst, XMMRegister src, uint8_t imm8) {
2590 DCHECK(IsEnabled(SSE4_1));
2591 DCHECK(is_uint8(imm8));
2592 EnsureSpace ensure_space(this);
2593 EMIT(0x66);
2594 EMIT(0x0F);
2595 EMIT(0x3A);
2596 EMIT(0x17);
2597 emit_sse_operand(src, dst);
2598 EMIT(imm8);
2599}
2600
2601void Assembler::extractps(Register dst, XMMRegister src, uint8_t imm8) {
2602 DCHECK(IsEnabled(SSE4_1));
2603 DCHECK(is_uint8(imm8));
2604 EnsureSpace ensure_space(this);
2605 EMIT(0x66);
2606 EMIT(0x0F);
2607 EMIT(0x3A);
2608 EMIT(0x17);
2609 emit_sse_operand(src, dst);
2610 EMIT(imm8);
2611}
2612
2613void Assembler::pcmpgtq(XMMRegister dst, XMMRegister src) {
2614 DCHECK(IsEnabled(SSE4_2));
2615 EnsureSpace ensure_space(this);
2616 EMIT(0x66);
2617 EMIT(0x0F);
2618 EMIT(0x38);
2619 EMIT(0x37);
2620 emit_sse_operand(dst, src);
2621}
2622
2623void Assembler::psllw(XMMRegister reg, uint8_t shift) {
2624 EnsureSpace ensure_space(this);
2625 EMIT(0x66);
2626 EMIT(0x0F);
2627 EMIT(0x71);
2628 emit_sse_operand(esi, reg); // esi == 6
2629 EMIT(shift);
2630}
2631
2632void Assembler::pslld(XMMRegister reg, uint8_t shift) {
2633 EnsureSpace ensure_space(this);
2634 EMIT(0x66);
2635 EMIT(0x0F);
2636 EMIT(0x72);
2637 emit_sse_operand(esi, reg); // esi == 6
2638 EMIT(shift);
2639}
2640
2641void Assembler::psrlw(XMMRegister reg, uint8_t shift) {
2642 EnsureSpace ensure_space(this);
2643 EMIT(0x66);
2644 EMIT(0x0F);
2645 EMIT(0x71);
2646 emit_sse_operand(edx, reg); // edx == 2
2647 EMIT(shift);
2648}
2649
2650void Assembler::psrld(XMMRegister reg, uint8_t shift) {
2651 EnsureSpace ensure_space(this);
2652 EMIT(0x66);
2653 EMIT(0x0F);
2654 EMIT(0x72);
2655 emit_sse_operand(edx, reg); // edx == 2
2656 EMIT(shift);
2657}
2658
2659void Assembler::psraw(XMMRegister reg, uint8_t shift) {
2660 EnsureSpace ensure_space(this);
2661 EMIT(0x66);
2662 EMIT(0x0F);
2663 EMIT(0x71);
2664 emit_sse_operand(esp, reg); // esp == 4
2665 EMIT(shift);
2666}
2667
2668void Assembler::psrad(XMMRegister reg, uint8_t shift) {
2669 EnsureSpace ensure_space(this);
2670 EMIT(0x66);
2671 EMIT(0x0F);
2672 EMIT(0x72);
2673 emit_sse_operand(esp, reg); // esp == 4
2674 EMIT(shift);
2675}
2676
2677void Assembler::psllq(XMMRegister reg, uint8_t shift) {
2678 EnsureSpace ensure_space(this);
2679 EMIT(0x66);
2680 EMIT(0x0F);
2681 EMIT(0x73);
2682 emit_sse_operand(esi, reg); // esi == 6
2683 EMIT(shift);
2684}
2685
2686void Assembler::psrlq(XMMRegister reg, uint8_t shift) {
2687 EnsureSpace ensure_space(this);
2688 EMIT(0x66);
2689 EMIT(0x0F);
2690 EMIT(0x73);
2691 emit_sse_operand(edx, reg); // edx == 2
2692 EMIT(shift);
2693}
2694
2695void Assembler::pshufhw(XMMRegister dst, Operand src, uint8_t shuffle) {
2696 EnsureSpace ensure_space(this);
2697 EMIT(0xF3);
2698 EMIT(0x0F);
2699 EMIT(0x70);
2700 emit_sse_operand(dst, src);
2701 EMIT(shuffle);
2702}
2703
2704void Assembler::pshuflw(XMMRegister dst, Operand src, uint8_t shuffle) {
2705 EnsureSpace ensure_space(this);
2706 EMIT(0xF2);
2707 EMIT(0x0F);
2708 EMIT(0x70);
2709 emit_sse_operand(dst, src);
2710 EMIT(shuffle);
2711}
2712
2713void Assembler::pshufd(XMMRegister dst, Operand src, uint8_t shuffle) {
2714 EnsureSpace ensure_space(this);
2715 EMIT(0x66);
2716 EMIT(0x0F);
2717 EMIT(0x70);
2718 emit_sse_operand(dst, src);
2719 EMIT(shuffle);
2720}
2721
2722void Assembler::pblendw(XMMRegister dst, Operand src, uint8_t mask) {
2723 DCHECK(IsEnabled(SSE4_1));
2724 EnsureSpace ensure_space(this);
2725 EMIT(0x66);
2726 EMIT(0x0F);
2727 EMIT(0x3A);
2728 EMIT(0x0E);
2729 emit_sse_operand(dst, src);
2730 EMIT(mask);
2731}
2732
2733void Assembler::palignr(XMMRegister dst, Operand src, uint8_t mask) {
2734 DCHECK(IsEnabled(SSSE3));
2735 EnsureSpace ensure_space(this);
2736 EMIT(0x66);
2737 EMIT(0x0F);
2738 EMIT(0x3A);
2739 EMIT(0x0F);
2740 emit_sse_operand(dst, src);
2741 EMIT(mask);
2742}
2743
2744void Assembler::pextrb(Operand dst, XMMRegister src, uint8_t offset) {
2745 DCHECK(IsEnabled(SSE4_1));
2746 EnsureSpace ensure_space(this);
2747 EMIT(0x66);
2748 EMIT(0x0F);
2749 EMIT(0x3A);
2750 EMIT(0x14);
2751 emit_sse_operand(src, dst);
2752 EMIT(offset);
2753}
2754
2755void Assembler::pextrw(Operand dst, XMMRegister src, uint8_t offset) {
2756 DCHECK(IsEnabled(SSE4_1));
2757 EnsureSpace ensure_space(this);
2758 EMIT(0x66);
2759 EMIT(0x0F);
2760 EMIT(0x3A);
2761 EMIT(0x15);
2762 emit_sse_operand(src, dst);
2763 EMIT(offset);
2764}
2765
2766void Assembler::pextrd(Operand dst, XMMRegister src, uint8_t offset) {
2767 DCHECK(IsEnabled(SSE4_1));
2768 EnsureSpace ensure_space(this);
2769 EMIT(0x66);
2770 EMIT(0x0F);
2771 EMIT(0x3A);
2772 EMIT(0x16);
2773 emit_sse_operand(src, dst);
2774 EMIT(offset);
2775}
2776
2777void Assembler::insertps(XMMRegister dst, Operand src, uint8_t offset) {
2778 DCHECK(IsEnabled(SSE4_1));
2779 EnsureSpace ensure_space(this);
2780 EMIT(0x66);
2781 EMIT(0x0F);
2782 EMIT(0x3A);
2783 EMIT(0x21);
2784 emit_sse_operand(dst, src);
2785 EMIT(offset);
2786}
2787
2788void Assembler::pinsrb(XMMRegister dst, Operand src, uint8_t offset) {
2789 DCHECK(IsEnabled(SSE4_1));
2790 EnsureSpace ensure_space(this);
2791 EMIT(0x66);
2792 EMIT(0x0F);
2793 EMIT(0x3A);
2794 EMIT(0x20);
2795 emit_sse_operand(dst, src);
2796 EMIT(offset);
2797}
2798
2799void Assembler::pinsrw(XMMRegister dst, Operand src, uint8_t offset) {
2800 DCHECK(is_uint8(offset));
2801 EnsureSpace ensure_space(this);
2802 EMIT(0x66);
2803 EMIT(0x0F);
2804 EMIT(0xC4);
2805 emit_sse_operand(dst, src);
2806 EMIT(offset);
2807}
2808
2809void Assembler::pinsrd(XMMRegister dst, Operand src, uint8_t offset) {
2810 DCHECK(IsEnabled(SSE4_1));
2811 EnsureSpace ensure_space(this);
2812 EMIT(0x66);
2813 EMIT(0x0F);
2814 EMIT(0x3A);
2815 EMIT(0x22);
2816 emit_sse_operand(dst, src);
2817 EMIT(offset);
2818}
2819
2820void Assembler::addss(XMMRegister dst, Operand src) {
2821 EnsureSpace ensure_space(this);
2822 EMIT(0xF3);
2823 EMIT(0x0F);
2824 EMIT(0x58);
2825 emit_sse_operand(dst, src);
2826}
2827
2828void Assembler::subss(XMMRegister dst, Operand src) {
2829 EnsureSpace ensure_space(this);
2830 EMIT(0xF3);
2831 EMIT(0x0F);
2832 EMIT(0x5C);
2833 emit_sse_operand(dst, src);
2834}
2835
2836void Assembler::mulss(XMMRegister dst, Operand src) {
2837 EnsureSpace ensure_space(this);
2838 EMIT(0xF3);
2839 EMIT(0x0F);
2840 EMIT(0x59);
2841 emit_sse_operand(dst, src);
2842}
2843
2844void Assembler::divss(XMMRegister dst, Operand src) {
2845 EnsureSpace ensure_space(this);
2846 EMIT(0xF3);
2847 EMIT(0x0F);
2848 EMIT(0x5E);
2849 emit_sse_operand(dst, src);
2850}
2851
2852void Assembler::sqrtss(XMMRegister dst, Operand src) {
2853 EnsureSpace ensure_space(this);
2854 EMIT(0xF3);
2855 EMIT(0x0F);
2856 EMIT(0x51);
2857 emit_sse_operand(dst, src);
2858}
2859
2860void Assembler::ucomiss(XMMRegister dst, Operand src) {
2861 EnsureSpace ensure_space(this);
2862 EMIT(0x0F);
2863 EMIT(0x2E);
2864 emit_sse_operand(dst, src);
2865}
2866
2867void Assembler::maxss(XMMRegister dst, Operand src) {
2868 EnsureSpace ensure_space(this);
2869 EMIT(0xF3);
2870 EMIT(0x0F);
2871 EMIT(0x5F);
2872 emit_sse_operand(dst, src);
2873}
2874
2875void Assembler::minss(XMMRegister dst, Operand src) {
2876 EnsureSpace ensure_space(this);
2877 EMIT(0xF3);
2878 EMIT(0x0F);
2879 EMIT(0x5D);
2880 emit_sse_operand(dst, src);
2881}
2882
2883// Packed single-precision floating-point SSE instructions.
2884void Assembler::ps(uint8_t opcode, XMMRegister dst, Operand src) {
2885 EnsureSpace ensure_space(this);
2886 EMIT(0x0F);
2887 EMIT(opcode);
2888 emit_sse_operand(dst, src);
2889}
2890
2891// Packed double-precision floating-point SSE instructions.
2892void Assembler::pd(uint8_t opcode, XMMRegister dst, Operand src) {
2893 EnsureSpace ensure_space(this);
2894 EMIT(0x66);
2895 EMIT(0x0F);
2896 EMIT(opcode);
2897 emit_sse_operand(dst, src);
2898}
2899
2900// AVX instructions
2901
2902void Assembler::vss(uint8_t op, XMMRegister dst, XMMRegister src1,
2903 Operand src2) {
2904 vinstr(op, dst, src1, src2, kF3, k0F, kWIG);
2905}
2906
2907void Assembler::vps(uint8_t op, XMMRegister dst, XMMRegister src1,
2908 Operand src2) {
2909 vinstr(op, dst, src1, src2, kNoPrefix, k0F, kWIG);
2910}
2911
2912void Assembler::vpd(uint8_t op, XMMRegister dst, XMMRegister src1,
2913 Operand src2) {
2914 vinstr(op, dst, src1, src2, k66, k0F, kWIG);
2915}
2916
2917void Assembler::vshufpd(XMMRegister dst, XMMRegister src1, Operand src2,
2918 uint8_t imm8) {
2919 DCHECK(is_uint8(imm8));
2920 vpd(0xC6, dst, src1, src2);
2921 EMIT(imm8);
2922}
2923
2924void Assembler::vmovhlps(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
2925 vinstr(0x12, dst, src1, src2, kNoPrefix, k0F, kWIG);
2926}
2927
2928void Assembler::vmovlhps(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
2929 vinstr(0x16, dst, src1, src2, kNoPrefix, k0F, kWIG);
2930}
2931
2932void Assembler::vmovlps(XMMRegister dst, XMMRegister src1, Operand src2) {
2933 vinstr(0x12, dst, src1, src2, kNoPrefix, k0F, kWIG);
2934}
2935
2936void Assembler::vmovlps(Operand dst, XMMRegister src) {
2937 vinstr(0x13, src, xmm0, dst, kNoPrefix, k0F, kWIG);
2938}
2939
2940void Assembler::vmovhps(XMMRegister dst, XMMRegister src1, Operand src2) {
2941 vinstr(0x16, dst, src1, src2, kNoPrefix, k0F, kWIG);
2942}
2943
2944void Assembler::vmovhps(Operand dst, XMMRegister src) {
2945 vinstr(0x17, src, xmm0, dst, kNoPrefix, k0F, kWIG);
2946}
2947
2948void Assembler::vcmpps(XMMRegister dst, XMMRegister src1, Operand src2,
2949 uint8_t cmp) {
2950 vps(0xC2, dst, src1, src2);
2951 EMIT(cmp);
2952}
2953
2954void Assembler::vcmppd(XMMRegister dst, XMMRegister src1, Operand src2,
2955 uint8_t cmp) {
2956 vpd(0xC2, dst, src1, src2);
2957 EMIT(cmp);
2958}
2959
2960void Assembler::vshufps(XMMRegister dst, XMMRegister src1, Operand src2,
2961 uint8_t imm8) {
2962 DCHECK(is_uint8(imm8));
2963 vps(0xC6, dst, src1, src2);
2964 EMIT(imm8);
2965}
2966
2967void Assembler::vpsllw(XMMRegister dst, XMMRegister src, uint8_t imm8) {
2968 XMMRegister iop = XMMRegister::from_code(6);
2969 vinstr(0x71, iop, dst, Operand(src), k66, k0F, kWIG);
2970 EMIT(imm8);
2971}
2972
2973void Assembler::vpslld(XMMRegister dst, XMMRegister src, uint8_t imm8) {
2974 XMMRegister iop = XMMRegister::from_code(6);
2975 vinstr(0x72, iop, dst, Operand(src), k66, k0F, kWIG);
2976 EMIT(imm8);
2977}
2978
2979void Assembler::vpsllq(XMMRegister dst, XMMRegister src, uint8_t imm8) {
2980 XMMRegister iop = XMMRegister::from_code(6);
2981 vinstr(0x73, iop, dst, Operand(src), k66, k0F, kWIG);
2982 EMIT(imm8);
2983}
2984
2985void Assembler::vpsrlw(XMMRegister dst, XMMRegister src, uint8_t imm8) {
2986 XMMRegister iop = XMMRegister::from_code(2);
2987 vinstr(0x71, iop, dst, Operand(src), k66, k0F, kWIG);
2988 EMIT(imm8);
2989}
2990
2991void Assembler::vpsrld(XMMRegister dst, XMMRegister src, uint8_t imm8) {
2992 XMMRegister iop = XMMRegister::from_code(2);
2993 vinstr(0x72, iop, dst, Operand(src), k66, k0F, kWIG);
2994 EMIT(imm8);
2995}
2996
2997void Assembler::vpsrlq(XMMRegister dst, XMMRegister src, uint8_t imm8) {
2998 XMMRegister iop = XMMRegister::from_code(2);
2999 vinstr(0x73, iop, dst, Operand(src), k66, k0F, kWIG);
3000 EMIT(imm8);
3001}
3002
3003void Assembler::vpsraw(XMMRegister dst, XMMRegister src, uint8_t imm8) {
3004 XMMRegister iop = XMMRegister::from_code(4);
3005 vinstr(0x71, iop, dst, Operand(src), k66, k0F, kWIG);
3006 EMIT(imm8);
3007}
3008
3009void Assembler::vpsrad(XMMRegister dst, XMMRegister src, uint8_t imm8) {
3010 XMMRegister iop = XMMRegister::from_code(4);
3011 vinstr(0x72, iop, dst, Operand(src), k66, k0F, kWIG);
3012 EMIT(imm8);
3013}
3014
3015void Assembler::vpshufhw(XMMRegister dst, Operand src, uint8_t shuffle) {
3016 vinstr(0x70, dst, xmm0, src, kF3, k0F, kWIG);
3017 EMIT(shuffle);
3018}
3019
3020void Assembler::vpshuflw(XMMRegister dst, Operand src, uint8_t shuffle) {
3021 vinstr(0x70, dst, xmm0, src, kF2, k0F, kWIG);
3022 EMIT(shuffle);
3023}
3024
3025void Assembler::vpshufd(XMMRegister dst, Operand src, uint8_t shuffle) {
3026 vinstr(0x70, dst, xmm0, src, k66, k0F, kWIG);
3027 EMIT(shuffle);
3028}
3029
3030void Assembler::vblendvps(XMMRegister dst, XMMRegister src1, XMMRegister src2,
3031 XMMRegister mask) {
3032 vinstr(0x4A, dst, src1, src2, k66, k0F3A, kW0);
3033 EMIT(mask.code() << 4);
3034}
3035
3036void Assembler::vblendvpd(XMMRegister dst, XMMRegister src1, XMMRegister src2,
3037 XMMRegister mask) {
3038 vinstr(0x4B, dst, src1, src2, k66, k0F3A, kW0);
3039 EMIT(mask.code() << 4);
3040}
3041
3042void Assembler::vpblendvb(XMMRegister dst, XMMRegister src1, XMMRegister src2,
3043 XMMRegister mask) {
3044 vinstr(0x4C, dst, src1, src2, k66, k0F3A, kW0);
3045 EMIT(mask.code() << 4);
3046}
3047
3048void Assembler::vpblendw(XMMRegister dst, XMMRegister src1, Operand src2,
3049 uint8_t mask) {
3050 vinstr(0x0E, dst, src1, src2, k66, k0F3A, kWIG);
3051 EMIT(mask);
3052}
3053
3054void Assembler::vpalignr(XMMRegister dst, XMMRegister src1, Operand src2,
3055 uint8_t mask) {
3056 vinstr(0x0F, dst, src1, src2, k66, k0F3A, kWIG);
3057 EMIT(mask);
3058}
3059
3060void Assembler::vpextrb(Operand dst, XMMRegister src, uint8_t offset) {
3061 vinstr(0x14, src, xmm0, dst, k66, k0F3A, kWIG);
3062 EMIT(offset);
3063}
3064
3065void Assembler::vpextrw(Operand dst, XMMRegister src, uint8_t offset) {
3066 vinstr(0x15, src, xmm0, dst, k66, k0F3A, kWIG);
3067 EMIT(offset);
3068}
3069
3070void Assembler::vpextrd(Operand dst, XMMRegister src, uint8_t offset) {
3071 vinstr(0x16, src, xmm0, dst, k66, k0F3A, kWIG);
3072 EMIT(offset);
3073}
3074
3075void Assembler::vinsertps(XMMRegister dst, XMMRegister src1, Operand src2,
3076 uint8_t offset) {
3077 vinstr(0x21, dst, src1, src2, k66, k0F3A, kWIG);
3078 EMIT(offset);
3079}
3080
3081void Assembler::vpinsrb(XMMRegister dst, XMMRegister src1, Operand src2,
3082 uint8_t offset) {
3083 vinstr(0x20, dst, src1, src2, k66, k0F3A, kWIG);
3084 EMIT(offset);
3085}
3086
3087void Assembler::vpinsrw(XMMRegister dst, XMMRegister src1, Operand src2,
3088 uint8_t offset) {
3089 vinstr(0xC4, dst, src1, src2, k66, k0F, kWIG);
3090 EMIT(offset);
3091}
3092
3093void Assembler::vpinsrd(XMMRegister dst, XMMRegister src1, Operand src2,
3094 uint8_t offset) {
3095 vinstr(0x22, dst, src1, src2, k66, k0F3A, kWIG);
3096 EMIT(offset);
3097}
3098
3099void Assembler::vroundsd(XMMRegister dst, XMMRegister src1, XMMRegister src2,
3100 RoundingMode mode) {
3101 vinstr(0x0b, dst, src1, src2, k66, k0F3A, kWIG);
3102 EMIT(static_cast<uint8_t>(mode) | 0x8); // Mask precision exception.
3103}
3104void Assembler::vroundss(XMMRegister dst, XMMRegister src1, XMMRegister src2,
3105 RoundingMode mode) {
3106 vinstr(0x0a, dst, src1, src2, k66, k0F3A, kWIG);
3107 EMIT(static_cast<uint8_t>(mode) | 0x8); // Mask precision exception.
3108}
3109void Assembler::vroundps(XMMRegister dst, XMMRegister src, RoundingMode mode) {
3110 vinstr(0x08, dst, xmm0, Operand(src), k66, k0F3A, kWIG);
3111 EMIT(static_cast<uint8_t>(mode) | 0x8); // Mask precision exception.
3112}
3113void Assembler::vroundpd(XMMRegister dst, XMMRegister src, RoundingMode mode) {
3114 vinstr(0x09, dst, xmm0, Operand(src), k66, k0F3A, kWIG);
3115 EMIT(static_cast<uint8_t>(mode) | 0x8); // Mask precision exception.
3116}
3117
3118void Assembler::vmovmskpd(Register dst, XMMRegister src) {
3119 DCHECK(IsEnabled(AVX));
3120 EnsureSpace ensure_space(this);
3121 emit_vex_prefix(xmm0, kL128, k66, k0F, kWIG);
3122 EMIT(0x50);
3123 emit_sse_operand(dst, src);
3124}
3125
3126void Assembler::vmovmskps(Register dst, XMMRegister src) {
3127 DCHECK(IsEnabled(AVX));
3128 EnsureSpace ensure_space(this);
3129 emit_vex_prefix(xmm0, kL128, kNoPrefix, k0F, kWIG);
3130 EMIT(0x50);
3131 emit_sse_operand(dst, src);
3132}
3133
3134void Assembler::vpmovmskb(Register dst, XMMRegister src) {
3135 DCHECK(IsEnabled(AVX));
3136 EnsureSpace ensure_space(this);
3137 emit_vex_prefix(xmm0, kL128, k66, k0F, kWIG);
3138 EMIT(0xD7);
3139 emit_sse_operand(dst, src);
3140}
3141
3142void Assembler::vextractps(Operand dst, XMMRegister src, uint8_t imm8) {
3143 vinstr(0x17, src, xmm0, dst, k66, k0F3A, VexW::kWIG);
3144 EMIT(imm8);
3145}
3146
3147void Assembler::vpcmpgtq(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
3148 vinstr(0x37, dst, src1, src2, k66, k0F38, VexW::kWIG);
3149}
3150
3151void Assembler::bmi1(uint8_t op, Register reg, Register vreg, Operand rm) {
3152 DCHECK(IsEnabled(BMI1));
3153 EnsureSpace ensure_space(this);
3154 emit_vex_prefix(vreg, kLZ, kNoPrefix, k0F38, kW0);
3155 EMIT(op);
3156 emit_operand(reg, rm);
3157}
3158
3159void Assembler::tzcnt(Register dst, Operand src) {
3160 DCHECK(IsEnabled(BMI1));
3161 EnsureSpace ensure_space(this);
3162 EMIT(0xF3);
3163 EMIT(0x0F);
3164 EMIT(0xBC);
3165 emit_operand(dst, src);
3166}
3167
3168void Assembler::lzcnt(Register dst, Operand src) {
3169 DCHECK(IsEnabled(LZCNT));
3170 EnsureSpace ensure_space(this);
3171 EMIT(0xF3);
3172 EMIT(0x0F);
3173 EMIT(0xBD);
3174 emit_operand(dst, src);
3175}
3176
3177void Assembler::popcnt(Register dst, Operand src) {
3178 DCHECK(IsEnabled(POPCNT));
3179 EnsureSpace ensure_space(this);
3180 EMIT(0xF3);
3181 EMIT(0x0F);
3182 EMIT(0xB8);
3183 emit_operand(dst, src);
3184}
3185
3186void Assembler::bmi2(SIMDPrefix pp, uint8_t op, Register reg, Register vreg,
3187 Operand rm) {
3188 DCHECK(IsEnabled(BMI2));
3189 EnsureSpace ensure_space(this);
3190 emit_vex_prefix(vreg, kLZ, pp, k0F38, kW0);
3191 EMIT(op);
3192 emit_operand(reg, rm);
3193}
3194
3195void Assembler::rorx(Register dst, Operand src, uint8_t imm8) {
3196 DCHECK(IsEnabled(BMI2));
3197 DCHECK(is_uint8(imm8));
3198 Register vreg = Register::from_code(0); // VEX.vvvv unused
3199 EnsureSpace ensure_space(this);
3200 emit_vex_prefix(vreg, kLZ, kF2, k0F3A, kW0);
3201 EMIT(0xF0);
3202 emit_operand(dst, src);
3203 EMIT(imm8);
3204}
3205
3206void Assembler::sse_instr(XMMRegister dst, Operand src, uint8_t escape,
3207 uint8_t opcode) {
3208 EnsureSpace ensure_space(this);
3209 EMIT(escape);
3210 EMIT(opcode);
3211 emit_sse_operand(dst, src);
3212}
3213
3214void Assembler::sse2_instr(XMMRegister dst, Operand src, uint8_t prefix,
3215 uint8_t escape, uint8_t opcode) {
3216 EnsureSpace ensure_space(this);
3217 EMIT(prefix);
3218 EMIT(escape);
3219 EMIT(opcode);
3220 emit_sse_operand(dst, src);
3221}
3222
3223void Assembler::ssse3_instr(XMMRegister dst, Operand src, uint8_t prefix,
3224 uint8_t escape1, uint8_t escape2, uint8_t opcode) {
3225 DCHECK(IsEnabled(SSSE3));
3226 EnsureSpace ensure_space(this);
3227 EMIT(prefix);
3228 EMIT(escape1);
3229 EMIT(escape2);
3230 EMIT(opcode);
3231 emit_sse_operand(dst, src);
3232}
3233
3234void Assembler::sse4_instr(XMMRegister dst, Operand src, uint8_t prefix,
3235 uint8_t escape1, uint8_t escape2, uint8_t opcode) {
3236 DCHECK(IsEnabled(SSE4_1));
3237 EnsureSpace ensure_space(this);
3238 EMIT(prefix);
3239 EMIT(escape1);
3240 EMIT(escape2);
3241 EMIT(opcode);
3242 emit_sse_operand(dst, src);
3243}
3244
3245void Assembler::vinstr(uint8_t op, XMMRegister dst, XMMRegister src1,
3246 XMMRegister src2, SIMDPrefix pp, LeadingOpcode m, VexW w,
3247 CpuFeature feature) {
3248 vinstr(op, dst, src1, src2, kL128, pp, m, w, feature);
3249}
3250
3251void Assembler::vinstr(uint8_t op, XMMRegister dst, XMMRegister src1,
3252 Operand src2, SIMDPrefix pp, LeadingOpcode m, VexW w,
3253 CpuFeature feature) {
3254 vinstr(op, dst, src1, src2, kL128, pp, m, w, feature);
3255}
3256
3257void Assembler::vinstr(uint8_t op, XMMRegister dst, XMMRegister src1,
3258 XMMRegister src2, VectorLength l, SIMDPrefix pp,
3259 LeadingOpcode m, VexW w, CpuFeature feature) {
3260 DCHECK(IsEnabled(feature));
3261 EnsureSpace ensure_space(this);
3262 emit_vex_prefix(src1, l, pp, m, w);
3263 EMIT(op);
3264 emit_sse_operand(dst, src2);
3265}
3266
3267void Assembler::vinstr(uint8_t op, XMMRegister dst, XMMRegister src1,
3268 Operand src2, VectorLength l, SIMDPrefix pp,
3269 LeadingOpcode m, VexW w, CpuFeature feature) {
3270 DCHECK(IsEnabled(feature));
3271 EnsureSpace ensure_space(this);
3272 emit_vex_prefix(src1, l, pp, m, w);
3273 EMIT(op);
3274 emit_sse_operand(dst, src2);
3275}
3276
3277void Assembler::emit_sse_operand(XMMRegister reg, Operand adr) {
3278 Register ireg = Register::from_code(reg.code());
3279 emit_operand(ireg, adr);
3280}
3281
3282void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) {
3283 EMIT(0xC0 | dst.code() << 3 | src.code());
3284}
3285
3286void Assembler::emit_sse_operand(Register dst, XMMRegister src) {
3287 EMIT(0xC0 | dst.code() << 3 | src.code());
3288}
3289
3290void Assembler::emit_sse_operand(XMMRegister dst, Register src) {
3291 EMIT(0xC0 | (dst.code() << 3) | src.code());
3292}
3293
3294void Assembler::emit_vex_prefix(XMMRegister vreg, VectorLength l, SIMDPrefix pp,
3295 LeadingOpcode mm, VexW w) {
3296 if (mm != k0F || w != kW0) {
3297 EMIT(0xC4);
3298 // Change RXB from "110" to "111" to align with gdb disassembler.
3299 EMIT(0xE0 | mm);
3300 EMIT(w | ((~vreg.code() & 0xF) << 3) | l | pp);
3301 } else {
3302 EMIT(0xC5);
3303 EMIT(((~vreg.code()) << 3) | l | pp);
3304 }
3305}
3306
3307void Assembler::emit_vex_prefix(Register vreg, VectorLength l, SIMDPrefix pp,
3308 LeadingOpcode mm, VexW w) {
3309 XMMRegister ivreg = XMMRegister::from_code(vreg.code());
3310 emit_vex_prefix(ivreg, l, pp, mm, w);
3311}
3312
3313void Assembler::GrowBuffer() {
3314 DCHECK(buffer_overflow());
3315 DCHECK_EQ(buffer_start_, buffer_->start());
3316
3317 // Compute new buffer size.
3318 int old_size = buffer_->size();
3319 int new_size = 2 * old_size;
3320
3321 // Some internal data structures overflow for very large buffers,
3322 // they must ensure that kMaximalBufferSize is not too large.
3323 if (new_size > kMaximalBufferSize) {
3324 V8::FatalProcessOutOfMemory(nullptr, "Assembler::GrowBuffer");
3325 }
3326
3327 // Set up new buffer.
3328 std::unique_ptr<AssemblerBuffer> new_buffer = buffer_->Grow(new_size);
3329 DCHECK_EQ(new_size, new_buffer->size());
3330 uint8_t* new_start = new_buffer->start();
3331
3332 // Copy the data.
3333 intptr_t pc_delta = new_start - buffer_start_;
3334 intptr_t rc_delta = (new_start + new_size) - (buffer_start_ + old_size);
3335 size_t reloc_size = (buffer_start_ + old_size) - reloc_info_writer.pos();
3336 MemMove(new_start, buffer_start_, pc_offset());
3337 MemMove(rc_delta + reloc_info_writer.pos(), reloc_info_writer.pos(),
3338 reloc_size);
3339
3340 // Switch buffers.
3341 buffer_ = std::move(new_buffer);
3342 buffer_start_ = new_start;
3343 pc_ += pc_delta;
3344 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
3345 reloc_info_writer.last_pc() + pc_delta);
3346
3347 // Relocate internal references.
3348 for (auto pos : internal_reference_positions_) {
3349 Address p = reinterpret_cast<Address>(buffer_start_ + pos);
3350 WriteUnalignedValue(p, ReadUnalignedValue<int>(p) + pc_delta);
3351 }
3352
3353 // Relocate pc-relative references.
3354 int mode_mask = RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET);
3355 DCHECK_EQ(mode_mask, RelocInfo::kApplyMask & mode_mask);
3356 base::Vector<uint8_t> instructions{buffer_start_,
3357 static_cast<size_t>(pc_offset())};
3358 base::Vector<const uint8_t> reloc_info{reloc_info_writer.pos(), reloc_size};
3359 WritableJitAllocation jit_allocation =
3360 WritableJitAllocation::ForNonExecutableMemory(
3361 reinterpret_cast<Address>(instructions.begin()), instructions.size(),
3362 ThreadIsolation::JitAllocationType::kInstructionStream);
3363 for (WritableRelocIterator it(jit_allocation, instructions, reloc_info, 0,
3364 mode_mask);
3365 !it.done(); it.next()) {
3366 it.rinfo()->apply(pc_delta);
3367 }
3368
3369 DCHECK(!buffer_overflow());
3370}
3371
3372void Assembler::emit_arith_b(int op1, int op2, Register dst, int imm8) {
3373 DCHECK(is_uint8(op1) && is_uint8(op2)); // wrong opcode
3374 DCHECK(is_uint8(imm8));
3375 DCHECK_EQ(op1 & 0x01, 0); // should be 8bit operation
3376 EMIT(op1);
3377 EMIT(op2 | dst.code());
3378 EMIT(imm8);
3379}
3380
3381void Assembler::emit_arith(int sel, Operand dst, const Immediate& x) {
3382 DCHECK((0 <= sel) && (sel <= 7));
3383 Register ireg = Register::from_code(sel);
3384 if (x.is_int8()) {
3385 EMIT(0x83); // using a sign-extended 8-bit immediate.
3386 emit_operand(ireg, dst);
3387 EMIT(x.immediate() & 0xFF);
3388 } else if (dst.is_reg(eax)) {
3389 EMIT((sel << 3) | 0x05); // short form if the destination is eax.
3390 emit(x);
3391 } else {
3392 EMIT(0x81); // using a literal 32-bit immediate.
3393 emit_operand(ireg, dst);
3394 emit(x);
3395 }
3396}
3397
3398void Assembler::emit_operand(Register reg, Operand adr) {
3399 emit_operand(reg.code(), adr);
3400}
3401
3402void Assembler::emit_operand(XMMRegister reg, Operand adr) {
3403 Register ireg = Register::from_code(reg.code());
3404 emit_operand(ireg, adr);
3405}
3406
3407void Assembler::emit_operand(int code, Operand adr) {
3408 // Isolate-independent code may not embed relocatable addresses.
3409 DCHECK_IMPLIES(options().isolate_independent_code,
3410 adr.rmode() != RelocInfo::CODE_TARGET);
3411 DCHECK_IMPLIES(options().isolate_independent_code,
3412 adr.rmode() != RelocInfo::FULL_EMBEDDED_OBJECT);
3413 DCHECK_IMPLIES(options().isolate_independent_code,
3414 adr.rmode() != RelocInfo::EXTERNAL_REFERENCE);
3415
3416 const unsigned length = adr.encoded_bytes().length();
3417 DCHECK_GT(length, 0);
3418
3419 // Emit updated ModRM byte containing the given register.
3420 EMIT((adr.encoded_bytes()[0] & ~0x38) | (code << 3));
3421
3422 // Emit the rest of the encoded operand.
3423 for (unsigned i = 1; i < length; i++) EMIT(adr.encoded_bytes()[i]);
3424
3425 // Emit relocation information if necessary.
3426 if (length >= sizeof(int32_t) && !RelocInfo::IsNoInfo(adr.rmode())) {
3427 pc_ -= sizeof(int32_t); // pc_ must be *at* disp32
3428 RecordRelocInfo(adr.rmode());
3429 if (adr.rmode() == RelocInfo::INTERNAL_REFERENCE) { // Fixup for labels
3430 emit_label(ReadUnalignedValue<Label*>(reinterpret_cast<Address>(pc_)));
3431 } else {
3432 pc_ += sizeof(int32_t);
3433 }
3434 }
3435}
3436
3437void Assembler::emit_label(Label* label) {
3438 if (label->is_bound()) {
3439 internal_reference_positions_.push_back(pc_offset());
3440 emit(reinterpret_cast<uint32_t>(buffer_start_ + label->pos()));
3441 } else {
3442 emit_disp(label, Displacement::CODE_ABSOLUTE);
3443 }
3444}
3445
3446void Assembler::emit_farith(int b1, int b2, int i) {
3447 DCHECK(is_uint8(b1) && is_uint8(b2)); // wrong opcode
3448 DCHECK(0 <= i && i < 8); // illegal stack offset
3449 EMIT(b1);
3450 EMIT(b2 + i);
3451}
3452
3453void Assembler::db(uint8_t data) {
3454 EnsureSpace ensure_space(this);
3455 EMIT(data);
3456}
3457
3458void Assembler::dd(uint32_t data) {
3459 EnsureSpace ensure_space(this);
3460 emit(data);
3461}
3462
3463void Assembler::dq(uint64_t data) {
3464 EnsureSpace ensure_space(this);
3465 emit_q(data);
3466}
3467
3468void Assembler::dd(Label* label) {
3469 EnsureSpace ensure_space(this);
3470 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
3471 emit_label(label);
3472}
3473
3474void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
3475 if (!ShouldRecordRelocInfo(rmode)) return;
3476 RelocInfo rinfo(reinterpret_cast<Address>(pc_), rmode, data);
3477 reloc_info_writer.Write(&rinfo);
3478}
3479
3480#undef EMIT
3481
3482} // namespace internal
3483} // namespace v8
3484
3485#endif // V8_TARGET_ARCH_IA32
interpreter::OperandScale scale
Definition builtins.cc:44
SourcePosition pos
static constexpr bool is_valid(T value)
Definition bit-field.h:50
static constexpr U encode(T value)
Definition bit-field.h:55
std::forward_list< HeapNumberRequest > heap_number_requests_
Definition assembler.h:507
static const int kMaximalBufferSize
void AllocateAndInstallRequestedHeapNumbers(LocalIsolate *isolate)
Assembler(const AssemblerOptions &, std::unique_ptr< AssemblerBuffer >={})
Instruction * pc() const
static bool IsSupported(CpuFeature f)
static bool supports_wasm_simd_128_
static void SetSupported(CpuFeature f)
static void SetUnsupported(CpuFeature f)
static void ProbeImpl(bool cross_compile)
void next(Label *L) const
void init(Label *L, Type type)
Immediate(Handle< T > handle, RelocInfo::Mode mode=RelocInfo::FULL_EMBEDDED_OBJECT)
static Immediate EmbeddedNumber(double number)
void set_disp8(int8_t disp)
void set_sib(ScaleFactor scale, Register index, Register base)
RelocInfo::Mode rmode()
void set_dispr(int32_t disp, RelocInfo::Mode rmode)
void set_modrm(int mod, Register rm)
bool is_reg_only() const
V8_INLINE Operand(int32_t immediate, RelocInfo::Mode rmode=RelocInfo::NO_INFO)
static constexpr Register from_code(int code)
static const int kApplyMask
Definition reloc-info.h:369
uint32_t wasm_call_tag() const
static constexpr int ModeMask(Mode mode)
Definition reloc-info.h:272
static constexpr bool IsNoInfo(Mode mode)
Definition reloc-info.h:257
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
base::OwnedVector< uint8_t > buffer_
Definition assembler.cc:111
Label label
int32_t offset
DirectHandle< JSReceiver > options
ZoneVector< RpoNumber > & result
#define iop(name,...)
LiftoffRegister reg
int pc_offset
int x
uint32_t const mask
int m
Definition mul-fft.cc:294
int r
Definition mul-fft.cc:298
STL namespace.
int int32_t
Definition unicode.cc:40
uintptr_t Address
Definition memory.h:13
bool operator!=(ExternalReference lhs, ExternalReference rhs)
bool DoubleToSmiInteger(double value, int *smi_int_value)
void PrintF(const char *format,...)
Definition utils.cc:39
constexpr int L
V8_EXPORT_PRIVATE void MemMove(void *dest, const void *src, size_t size)
Definition memcopy.h:189
V8_EXPORT_PRIVATE FlagValues v8_flags
uint32_t test
#define FATAL(...)
Definition logging.h:47
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
#define arraysize(array)
Definition macros.h:67
#define V8_INLINE
Definition v8config.h:500
#define V8_UNLIKELY(condition)
Definition v8config.h:660