v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
liftoff-assembler.h
Go to the documentation of this file.
1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_WASM_BASELINE_LIFTOFF_ASSEMBLER_H_
6#define V8_WASM_BASELINE_LIFTOFF_ASSEMBLER_H_
7
8#include <iosfwd>
9#include <memory>
10
11#include "src/base/bits.h"
20#include "src/wasm/wasm-value.h"
21
22// Forward declarations.
23namespace v8::internal::compiler {
24class CallDescriptor;
25} // namespace v8::internal::compiler
26
27namespace v8::internal::wasm {
28
29inline constexpr Condition Negate(Condition cond) {
30 switch (cond) {
31 case kEqual:
32 return kNotEqual;
33 case kNotEqual:
34 return kEqual;
35 case kLessThan:
36 return kGreaterThanEqual;
37 case kLessThanEqual:
38 return kGreaterThan;
40 return kLessThan;
41 case kGreaterThan:
42 return kLessThanEqual;
48 return kUnsignedLessThan;
51 default:
53 }
54}
55
56inline constexpr Condition Flip(Condition cond) {
57 switch (cond) {
58 case kEqual:
59 return kEqual;
60 case kNotEqual:
61 return kNotEqual;
62 case kLessThan:
63 return kGreaterThan;
64 case kLessThanEqual:
65 return kGreaterThanEqual;
67 return kLessThanEqual;
68 case kGreaterThan:
69 return kLessThan;
77 return kUnsignedLessThan;
78 default:
80 }
81}
82
83class LiftoffAssembler;
85 public:
86#if DEBUG
87 explicit FreezeCacheState(LiftoffAssembler& assm);
90
91 private:
92 LiftoffAssembler& assm_;
93#else
95#endif
96};
97
99 public:
100 // Each slot in our stack frame currently has exactly 8 bytes.
101 static constexpr int kStackSlotSize = 8;
102
103 static constexpr ValueKind kIntPtrKind =
105 // A tagged value known to be a Smi can be treated like a ptr-sized int.
107
109
111
112 struct CacheState {
114
115 // Allow move construction and move assignment.
117 CacheState& operator=(CacheState&&) V8_NOEXCEPT = default;
118 // Disallow copy construction.
119 CacheState(const CacheState&) = delete;
120
122 // Generates two lists of locations that contain references. {slots}
123 // contains the indices of slots on the value stack that contain references.
124 // {spills} contains all registers that contain references. The
125 // {spill_location} defines where register values will be spilled for a
126 // function call within the out-of-line code. {kStackSlots} means that the
127 // values in the registers will be written back to their stack slots.
128 // {kTopOfStack} means that the registers will be spilled on the stack with
129 // a {push} instruction.
130 void GetTaggedSlotsForOOLCode(/*out*/ ZoneVector<int>* slots,
131 /*out*/ LiftoffRegList* spills,
132 SpillLocation spill_location);
133
135
138
139 // TODO(jkummerow): Wrap all accesses to {stack_state} in accessors that
140 // check {frozen}.
146 static constexpr int kNoCachedMemIndex = -1;
147 // The index of the cached memory start, or {kNoCachedMemIndex} if none is
148 // cached ({cached_mem_start} will be {no_reg} in that case).
151#if DEBUG
152 uint32_t frozen = 0;
153#endif
154
156 if (kNeedI64RegPair && rc == kGpRegPair) {
157 LiftoffRegList available_regs =
159 return available_regs.GetNumRegsSet() >= 2;
160 } else if (kNeedS128RegPair && rc == kFpRegPair) {
161 LiftoffRegList available_regs =
163 return available_regs.HasAdjacentFpRegsSet();
164 }
165 LiftoffRegList candidates = GetCacheRegList(rc);
166 return has_unused_register(candidates.MaskOut(pinned));
167 }
168
171 return !available_regs.is_empty();
172 }
173
175 LiftoffRegList pinned = {}) const {
176 if (kNeedI64RegPair && rc == kGpRegPair) {
177 Register low = pinned.set(unused_register(kGpReg, pinned)).gp();
178 Register high = unused_register(kGpReg, pinned).gp();
179 return LiftoffRegister::ForPair(low, high);
180 } else if (kNeedS128RegPair && rc == kFpRegPair) {
181 LiftoffRegList available_regs =
183 DoubleRegister low =
184 available_regs.GetAdjacentFpRegsSet().GetFirstRegSet().fp();
186 return LiftoffRegister::ForFpPair(low);
187 }
188 LiftoffRegList candidates = GetCacheRegList(rc);
189 return unused_register(candidates, pinned);
190 }
191
193 LiftoffRegList pinned = {}) const {
194 LiftoffRegList available_regs =
196 return available_regs.GetFirstRegSet();
197 }
198
199 // Volatile registers are registers which are used for caching values that
200 // can easily be reloaded. Those are returned first if we run out of free
201 // registers.
207
229
231 DCHECK(!frozen);
232 DCHECK_EQ(no_reg, *cache);
233 *cache = reg;
234 int liftoff_code = LiftoffRegister{reg}.liftoff_code();
235 DCHECK_EQ(0, register_use_count[liftoff_code]);
236 register_use_count[liftoff_code] = 1;
238 }
239
243
249
252 LiftoffRegList available_regs =
254 if (available_regs.is_empty()) return no_reg;
255 // Prefer the {kWasmImplicitArgRegister}, because that's where the
256 // instance data initially is, and where it needs to be for calls.
257 Register new_cache_reg = available_regs.has(kWasmImplicitArgRegister)
259 : available_regs.GetFirstRegSet().gp();
260 SetInstanceCacheRegister(new_cache_reg);
261 DCHECK_EQ(new_cache_reg, cached_instance_data);
262 return new_cache_reg;
263 }
264
266 DCHECK(!frozen);
267 V8_ASSUME(cache == &cached_instance_data || cache == &cached_mem_start);
268 if (*cache == no_reg) return;
269 int liftoff_code = LiftoffRegister{*cache}.liftoff_code();
270 DCHECK_EQ(1, register_use_count[liftoff_code]);
271 register_use_count[liftoff_code] = 0;
272 used_registers.clear(*cache);
273 *cache = no_reg;
274 }
275
279
287
292
294 DCHECK(!frozen);
295 if (reg.is_pair()) {
296 inc_used(reg.low());
297 inc_used(reg.high());
298 return;
299 }
301 DCHECK_GT(kMaxInt, register_use_count[reg.liftoff_code()]);
302 ++register_use_count[reg.liftoff_code()];
303 }
304
305 // Returns whether this was the last use.
307 // Note that we do not DCHECK(!frozen) here due to a special case: When
308 // performign a call_indirect, we first create an OOL trap label (which
309 // freezes the state to make sure that the safe point table remains valid)
310 // and then we drop values (which doesn't invalidate safe point table, so
311 // it is actually fine to do it.)
313 if (reg.is_pair()) {
314 dec_used(reg.low());
315 dec_used(reg.high());
316 return;
317 }
318 int code = reg.liftoff_code();
320 if (--register_use_count[code] == 0) used_registers.clear(reg);
321 }
322
324 if (reg.is_pair()) return is_used(reg.low()) || is_used(reg.high());
325 bool used = used_registers.has(reg);
326 DCHECK_EQ(used, register_use_count[reg.liftoff_code()] != 0);
327 return used;
328 }
329
331 if (reg.is_pair()) {
332 DCHECK_EQ(register_use_count[reg.low().liftoff_code()],
333 register_use_count[reg.high().liftoff_code()]);
334 reg = reg.low();
335 }
336 DCHECK_GT(arraysize(register_use_count), reg.liftoff_code());
337 return register_use_count[reg.liftoff_code()];
338 }
339
341 DCHECK(!frozen);
342 if (reg.is_pair()) {
343 clear_used(reg.low());
344 clear_used(reg.high());
345 return;
346 }
347 register_use_count[reg.liftoff_code()] = 0;
349 }
350
351 bool is_free(LiftoffRegister reg) const { return !is_used(reg); }
352
354 DCHECK(!frozen);
355 used_registers = {};
356 memset(register_use_count, 0, sizeof(register_use_count));
357 }
358
360 DCHECK(!frozen);
361 DCHECK(!candidates.is_empty());
362 // This method should only be called if none of the candidates is free.
363 DCHECK(candidates.MaskOut(used_registers).is_empty());
365 if (unspilled.is_empty()) {
366 unspilled = candidates;
368 }
369 LiftoffRegister reg = unspilled.GetFirstRegSet();
370 return reg;
371 }
372
373 void Steal(CacheState& source);
374
375 void Split(const CacheState& source);
376
377 uint32_t stack_height() const {
378 return static_cast<uint32_t>(stack_state.size());
379 }
380
381 private:
382 // Make the copy assignment operator private (to be used from {Split()}).
384 };
385
386 explicit LiftoffAssembler(Zone*, std::unique_ptr<AssemblerBuffer>);
387 ~LiftoffAssembler() override;
388
389 Zone* zone() const { return cache_state_.stack_state.get_allocator().zone(); }
390
391 // Load a cache slot to a free register.
393 LiftoffRegList pinned) {
394 if (V8_LIKELY(slot.is_reg())) return slot.reg();
395 return LoadToRegister_Slow(slot, pinned);
396 }
397
398 // Slow path called for the method above.
401
402 // Load a non-register cache slot to a given (fixed) register.
404
405 // Load a cache slot to a register that has no other uses, so it can be
406 // modified.
408 LiftoffRegList pinned) {
409 LiftoffRegister reg = LoadToRegister(slot, pinned);
410 // TODO(jkummerow): The following line is overly optimistic, as long as
411 // we don't pop the VarState, the register will never be considered free.
412 if (cache_state()->is_free(reg) && !pinned.has(reg)) return reg;
413
414 LiftoffRegister new_reg = GetUnusedRegister(reg.reg_class(), pinned);
415 // {new_reg} could be equal to {reg}, but it's unused by the stack now.
416 // Also, {reg} still holds the previous value, even if it was spilled.
417 if (new_reg != reg) Move(new_reg, reg, slot.kind());
418 return new_reg;
419 }
420
421 // Pop a VarState from the stack, updating the register use count accordingly.
424 VarState slot = cache_state_.stack_state.back();
425 cache_state_.stack_state.pop_back();
426 if (V8_LIKELY(slot.is_reg())) cache_state_.dec_used(slot.reg());
427 return slot;
428 }
429
431 VarState slot = PopVarState();
432 return LoadToRegister(slot, pinned);
433 }
434
435 inline void PopToFixedRegister(LiftoffRegister reg);
436
437 // Use this to pop a value into a register that has no other uses, so it
438 // can be modified.
440 ValueKind kind = cache_state_.stack_state.back().kind();
442 if (cache_state()->is_free(reg) && !pinned.has(reg)) return reg;
443
444 LiftoffRegister new_reg = GetUnusedRegister(reg.reg_class(), pinned);
445 // {new_reg} could be equal to {reg}, but it's unused by the stack now.
446 // Also, {reg} still holds the previous value, even if it was spilled.
447 if (new_reg != reg) Move(new_reg, reg, kind);
448 return new_reg;
449 }
450
451 // Returns the register which holds the value of stack slot {index}. If the
452 // value is not stored in a register yet, a register is allocated for it. The
453 // register is then assigned to the stack slot. The value stack height is not
454 // modified. The top of the stack is index 0, i.e. {PopToRegister()} and
455 // {PeekToRegister(0)} should result in the same register.
456 // When the value is finally popped, the use counter of its register has to be
457 // decremented. This can be done by popping the value with {DropValues}.
459 DCHECK_LT(index, cache_state_.stack_state.size());
460 VarState& slot = cache_state_.stack_state.end()[-1 - index];
461 if (V8_LIKELY(slot.is_reg())) return slot.reg();
462 LiftoffRegister reg = LoadToRegister(slot, pinned);
464 slot.MakeRegister(reg);
465 return reg;
466 }
467
468 void DropValues(int count) {
470 for (VarState& slot :
472 if (slot.is_reg()) {
473 cache_state_.dec_used(slot.reg());
474 }
475 }
477 }
478
479 // Drop a specific value from the stack; this is an expensive operation which
480 // is currently only used for exceptions.
481 // Careful: this indexes "from the other end", i.e. offset=0 is the value at
482 // the bottom of the stack.
484
485 // Spill all loop inputs to the stack to free registers and to ensure that we
486 // can merge different values on the back-edge.
487 void SpillLoopArgs(int num);
488
489 V8_INLINE static int NextSpillOffset(ValueKind kind, int top_spill_offset);
491 inline int TopSpillOffset() const;
492
494
495 // Assumes that the exception is in {kReturnRegister0}. This is where the
496 // exception is stored by the unwinder after a throwing call.
497 inline void PushException();
498
499 inline void PushConstant(ValueKind kind, int32_t i32_const);
500
501 inline void PushStack(ValueKind kind);
502
504
507 }
508
509 // Get an unused register for class {rc}, reusing one of {try_first} if
510 // possible.
512 RegClass rc, std::initializer_list<LiftoffRegister> try_first,
513 LiftoffRegList pinned) {
514 DCHECK(!cache_state_.frozen);
515 for (LiftoffRegister reg : try_first) {
516 DCHECK_EQ(reg.reg_class(), rc);
517 if (cache_state_.is_free(reg)) return reg;
518 }
519 return GetUnusedRegister(rc, pinned);
520 }
521
522 // Get an unused register for class {rc}, excluding registers from {pinned},
523 // potentially spilling to free one.
525 DCHECK(!cache_state_.frozen);
526 if (kNeedI64RegPair && rc == kGpRegPair) {
530 return LiftoffRegister::ForPair(low, high);
531 } else if (kNeedS128RegPair && rc == kFpRegPair) {
532 // kFpRegPair specific logic here because we need adjacent registers, not
533 // just any two registers (like kGpRegPair).
534 if (cache_state_.has_unused_register(rc, pinned)) {
535 return cache_state_.unused_register(rc, pinned);
536 }
537 DoubleRegister low_fp = SpillAdjacentFpRegisters(pinned).fp();
538 return LiftoffRegister::ForFpPair(low_fp);
539 }
542 }
543
544 // Get an unused register of {candidates}, potentially spilling to free one.
553
554 // Performs operations on locals and the top {arity} value stack entries
555 // that would (very likely) have to be done by branches. Doing this up front
556 // avoids making each subsequent (conditional) branch repeat this work.
557 void PrepareForBranch(uint32_t arity, LiftoffRegList pinned);
558
559 // These methods handle control-flow merges. {MergeIntoNewState} is used to
560 // generate a new {CacheState} for a merge point, and also emits code to
561 // transfer values from the current state to the new merge state.
562 // {MergeFullStackWith} and {MergeStackWith} then later generate the code for
563 // more merges into an existing state.
564 V8_NODISCARD CacheState MergeIntoNewState(uint32_t num_locals, uint32_t arity,
565 uint32_t stack_depth);
566 void MergeFullStackWith(CacheState& target);
568 void MergeStackWith(CacheState& target, uint32_t arity, JumpDirection);
569
570 void Spill(VarState* slot);
571 void SpillLocals();
572 void SpillAllRegisters();
573 inline void LoadSpillAddress(Register dst, int offset, ValueKind kind);
574
575 // Clear any uses of {reg} in both the cache and in {possible_uses}.
576 // Any use in the stack is spilled. If any register in {possible_uses} matches
577 // {reg}, then the content of {reg} is moved to a new temporary register, and
578 // all matches in {possible_uses} are rewritten to that temporary register.
580 std::initializer_list<Register*> possible_uses,
581 LiftoffRegList pinned);
582
583 // Spills all passed registers.
584 template <typename... Regs>
585 void SpillRegisters(Regs... regs) {
586 for (LiftoffRegister r : {LiftoffRegister(regs)...}) {
587 if (cache_state_.is_free(r)) continue;
588 if (r.is_gp() && cache_state_.cached_instance_data == r.gp()) {
590 } else if (r.is_gp() && cache_state_.cached_mem_start == r.gp()) {
593 } else {
595 }
596 }
597 }
598
599 // Call this method whenever spilling something, such that the number of used
600 // spill slot can be tracked and the stack frame will be allocated big enough.
604
608
609 // Load parameters into the right registers / stack slots for the call.
611 compiler::CallDescriptor* call_descriptor,
612 std::initializer_list<VarState> params);
613
614 // Load parameters into the right registers / stack slots for the call.
615 // Move {*target} into another register if needed and update {*target} to that
616 // register, or {no_reg} if target was spilled to the stack.
618 Register* target = nullptr,
619 Register target_instance = no_reg);
620 // Process return values of the call.
622
623 // Move {src} into {dst}. {src} and {dst} must be different.
625
626 // Parallel register move: For a list of tuples <dst, src, kind>, move the
627 // {src} register of kind {kind} into {dst}. If {src} equals {dst}, ignore
628 // that tuple.
633 template <typename Dst, typename Src>
635 : dst(dst), src(src), kind(kind) {}
636 };
637
639
641 std::initializer_list<ParallelRegisterMoveTuple> moves) {
643 }
644
645 // Move the top stack values into the expected return locations specified by
646 // the given call descriptor.
648 // Slow path for multi-return, called from {MoveToReturnLocations}.
651#if DEBUG
652 void SetCacheStateFrozen() { cache_state_.frozen++; }
653 void UnfreezeCacheState() {
654 DCHECK_GT(cache_state_.frozen, 0);
655 cache_state_.frozen--;
656 }
657#endif
658#ifdef ENABLE_SLOW_DCHECKS
659 // Validate that the register use counts reflect the state of the cache.
660 bool ValidateCacheState() const;
661#endif
662
663 inline void LoadFixedArrayLengthAsInt32(LiftoffRegister dst, Register array,
664 LiftoffRegList pinned);
665
666 inline void LoadSmiAsInt32(LiftoffRegister dst, Register src_addr,
667 int32_t offset);
668
670 // Platform-specific part. //
672
673 // This function emits machine code to prepare the stack frame, before the
674 // size of the stack frame is known. It returns an offset in the machine code
675 // which can later be patched (via {PatchPrepareStackFrame)} when the size of
676 // the frame is known.
677 inline int PrepareStackFrame();
679 inline void PrepareTailCall(int num_callee_stack_params,
680 int stack_param_delta);
681 inline void AlignFrameSize();
682 inline void PatchPrepareStackFrame(int offset, SafepointTableBuilder*,
683 bool feedback_vector_slot,
684 size_t stack_param_slots);
685 inline void FinishCode();
686 inline void AbortCompilation();
687 inline static constexpr int StaticStackFrameSize();
688 inline static int SlotSizeForType(ValueKind kind);
689 inline static bool NeedsAlignment(ValueKind kind);
690
691 inline void CheckTierUp(int declared_func_index, int budget_used,
692 Label* ool_label, const FreezeCacheState& frozen);
693 inline Register LoadOldFramePointer();
694 inline void CheckStackShrink();
695 inline void LoadConstant(LiftoffRegister, WasmValue);
696 inline void LoadInstanceDataFromFrame(Register dst);
697 inline void LoadTrustedPointer(Register dst, Register src_addr, int offset,
699 inline void LoadFromInstance(Register dst, Register instance, int offset,
700 int size);
701 inline void LoadTaggedPointerFromInstance(Register dst, Register instance,
702 int offset);
703 inline void SpillInstanceData(Register instance);
704 inline void ResetOSRTarget();
705 inline void LoadTaggedPointer(Register dst, Register src_addr,
706 Register offset_reg, int32_t offset_imm,
707 uint32_t* protected_load_pc = nullptr,
708 bool offset_reg_needs_shift = false);
709 inline void LoadProtectedPointer(Register dst, Register src_addr,
710 int32_t offset);
711 inline void LoadFullPointer(Register dst, Register src_addr,
712 int32_t offset_imm);
713 inline void LoadCodePointer(Register dst, Register src_addr, int32_t offset);
714#ifdef V8_ENABLE_SANDBOX
715 inline void LoadCodeEntrypointViaCodePointer(Register dsr, Register src_addr,
716 int offset_imm);
717#endif
722 inline void StoreTaggedPointer(Register dst_addr, Register offset_reg,
723 int32_t offset_imm, Register src,
724 LiftoffRegList pinned,
725 uint32_t* protected_store_pc = nullptr,
727 // Warning: may clobber {dst} on some architectures!
728 inline void IncrementSmi(LiftoffRegister dst, int offset);
729 inline void Load(LiftoffRegister dst, Register src_addr, Register offset_reg,
730 uintptr_t offset_imm, LoadType type,
731 uint32_t* protected_load_pc = nullptr,
732 bool is_load_mem = false, bool i64_offset = false,
733 bool needs_shift = false);
734 inline void Store(Register dst_addr, Register offset_reg,
735 uintptr_t offset_imm, LiftoffRegister src, StoreType type,
736 LiftoffRegList pinned,
737 uint32_t* protected_store_pc = nullptr,
738 bool is_store_mem = false, bool i64_offset = false);
739 inline void AtomicLoad(LiftoffRegister dst, Register src_addr,
740 Register offset_reg, uintptr_t offset_imm,
741 LoadType type, LiftoffRegList pinned, bool i64_offset);
742 inline void AtomicStore(Register dst_addr, Register offset_reg,
743 uintptr_t offset_imm, LiftoffRegister src,
744 StoreType type, LiftoffRegList pinned,
745 bool i64_offset);
746
747 inline void AtomicAdd(Register dst_addr, Register offset_reg,
748 uintptr_t offset_imm, LiftoffRegister value,
750 bool i64_offset);
751
752 inline void AtomicSub(Register dst_addr, Register offset_reg,
753 uintptr_t offset_imm, LiftoffRegister value,
755 bool i64_offset);
756
757 inline void AtomicAnd(Register dst_addr, Register offset_reg,
758 uintptr_t offset_imm, LiftoffRegister value,
760 bool i64_offset);
761
762 inline void AtomicOr(Register dst_addr, Register offset_reg,
763 uintptr_t offset_imm, LiftoffRegister value,
764 LiftoffRegister result, StoreType type, bool i64_offset);
765
766 inline void AtomicXor(Register dst_addr, Register offset_reg,
767 uintptr_t offset_imm, LiftoffRegister value,
769 bool i64_offset);
770
771 inline void AtomicExchange(Register dst_addr, Register offset_reg,
772 uintptr_t offset_imm, LiftoffRegister value,
774 bool i64_offset);
775
776 inline void AtomicCompareExchange(Register dst_addr, Register offset_reg,
777 uintptr_t offset_imm,
778 LiftoffRegister expected,
779 LiftoffRegister new_value,
780 LiftoffRegister value, StoreType type,
781 bool i64_offset);
782
783 inline void AtomicFence();
784
785 inline void LoadCallerFrameSlot(LiftoffRegister, uint32_t caller_slot_idx,
786 ValueKind);
787 inline void StoreCallerFrameSlot(LiftoffRegister, uint32_t caller_slot_idx,
788 ValueKind, Register frame_pointer);
790 inline void MoveStackValue(uint32_t dst_offset, uint32_t src_offset,
791 ValueKind);
792
793 inline void Move(Register dst, Register src, ValueKind);
794 inline void Move(DoubleRegister dst, DoubleRegister src, ValueKind);
795
796 inline void Spill(int offset, LiftoffRegister, ValueKind);
797 inline void Spill(int offset, WasmValue);
798 inline void Fill(LiftoffRegister, int offset, ValueKind);
799 // Only used on 32-bit systems: Fill a register from a "half stack slot", i.e.
800 // 4 bytes on the stack holding half of a 64-bit value.
801 inline void FillI64Half(Register, int offset, RegPairHalf);
802 inline void FillStackSlotsWithZero(int start, int size);
803
804 inline void emit_trace_instruction(uint32_t markid);
805
806 // i32 binops.
807 inline void emit_i32_add(Register dst, Register lhs, Register rhs);
808 inline void emit_i32_addi(Register dst, Register lhs, int32_t imm);
809 inline void emit_i32_sub(Register dst, Register lhs, Register rhs);
810 inline void emit_i32_subi(Register dst, Register lhs, int32_t imm);
811 inline void emit_i32_mul(Register dst, Register lhs, Register rhs);
812 inline void emit_i32_muli(Register dst, Register lhs, int32_t imm);
813 inline void emit_i32_divs(Register dst, Register lhs, Register rhs,
814 Label* trap_div_by_zero,
815 Label* trap_div_unrepresentable);
816 inline void emit_i32_divu(Register dst, Register lhs, Register rhs,
817 Label* trap_div_by_zero);
818 inline void emit_i32_rems(Register dst, Register lhs, Register rhs,
819 Label* trap_rem_by_zero);
820 inline void emit_i32_remu(Register dst, Register lhs, Register rhs,
821 Label* trap_rem_by_zero);
822 inline void emit_i32_and(Register dst, Register lhs, Register rhs);
823 inline void emit_i32_andi(Register dst, Register lhs, int32_t imm);
824 inline void emit_i32_or(Register dst, Register lhs, Register rhs);
825 inline void emit_i32_ori(Register dst, Register lhs, int32_t imm);
826 inline void emit_i32_xor(Register dst, Register lhs, Register rhs);
827 inline void emit_i32_xori(Register dst, Register lhs, int32_t imm);
828 inline void emit_i32_shl(Register dst, Register src, Register amount);
829 inline void emit_i32_shli(Register dst, Register src, int32_t amount);
830 inline void emit_i32_sar(Register dst, Register src, Register amount);
831 inline void emit_i32_sari(Register dst, Register src, int32_t amount);
832 inline void emit_i32_shr(Register dst, Register src, Register amount);
833 inline void emit_i32_shri(Register dst, Register src, int32_t amount);
834
835 // i32 unops.
836 inline void emit_i32_clz(Register dst, Register src);
837 inline void emit_i32_ctz(Register dst, Register src);
838 inline bool emit_i32_popcnt(Register dst, Register src);
839
840 // i64 binops.
841 // Most variants taking an immediate as second input only need to support
842 // 32-bit immediates, because that't the only type of constant we track.
843 // Some (like addition) are also called in other situation where we can have
844 // bigger immediates. In that case we type the immediate as int64_t.
845 inline void emit_i64_add(LiftoffRegister dst, LiftoffRegister lhs,
846 LiftoffRegister rhs);
847 inline void emit_i64_addi(LiftoffRegister dst, LiftoffRegister lhs,
848 int64_t imm);
849 inline void emit_i64_sub(LiftoffRegister dst, LiftoffRegister lhs,
850 LiftoffRegister rhs);
851 inline void emit_i64_mul(LiftoffRegister dst, LiftoffRegister lhs,
852 LiftoffRegister rhs);
853 inline void emit_i64_muli(LiftoffRegister dst, LiftoffRegister lhs,
854 int32_t imm);
855 inline bool emit_i64_divs(LiftoffRegister dst, LiftoffRegister lhs,
856 LiftoffRegister rhs, Label* trap_div_by_zero,
857 Label* trap_div_unrepresentable);
858 inline bool emit_i64_divu(LiftoffRegister dst, LiftoffRegister lhs,
859 LiftoffRegister rhs, Label* trap_div_by_zero);
860 inline bool emit_i64_rems(LiftoffRegister dst, LiftoffRegister lhs,
861 LiftoffRegister rhs, Label* trap_rem_by_zero);
862 inline bool emit_i64_remu(LiftoffRegister dst, LiftoffRegister lhs,
863 LiftoffRegister rhs, Label* trap_rem_by_zero);
864 inline void emit_i64_and(LiftoffRegister dst, LiftoffRegister lhs,
865 LiftoffRegister rhs);
866 inline void emit_i64_andi(LiftoffRegister dst, LiftoffRegister lhs,
867 int32_t imm);
868 inline void emit_i64_or(LiftoffRegister dst, LiftoffRegister lhs,
869 LiftoffRegister rhs);
870 inline void emit_i64_ori(LiftoffRegister dst, LiftoffRegister lhs,
871 int32_t imm);
872 inline void emit_i64_xor(LiftoffRegister dst, LiftoffRegister lhs,
873 LiftoffRegister rhs);
874 inline void emit_i64_xori(LiftoffRegister dst, LiftoffRegister lhs,
875 int32_t imm);
876 inline void emit_i64_shl(LiftoffRegister dst, LiftoffRegister src,
877 Register amount);
878 inline void emit_i64_shli(LiftoffRegister dst, LiftoffRegister src,
879 int32_t amount);
880 inline void emit_i64_sar(LiftoffRegister dst, LiftoffRegister src,
881 Register amount);
882 inline void emit_i64_sari(LiftoffRegister dst, LiftoffRegister src,
883 int32_t amount);
884 inline void emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
885 Register amount);
886 inline void emit_i64_shri(LiftoffRegister dst, LiftoffRegister src,
887 int32_t amount);
888
889 // i64 unops.
890 inline void emit_i64_clz(LiftoffRegister dst, LiftoffRegister src);
891 inline void emit_i64_ctz(LiftoffRegister dst, LiftoffRegister src);
892 inline bool emit_i64_popcnt(LiftoffRegister dst, LiftoffRegister src);
893
894 inline void emit_u32_to_uintptr(Register dst, Register src);
895 // For security hardening: unconditionally clear {dst}'s high word.
896 inline void clear_i32_upper_half(Register dst);
897
898 inline void emit_ptrsize_add(Register dst, Register lhs, Register rhs);
899 inline void emit_ptrsize_sub(Register dst, Register lhs, Register rhs);
900 inline void emit_ptrsize_and(Register dst, Register lhs, Register rhs);
901 inline void emit_ptrsize_shri(Register dst, Register src, int amount);
902 inline void emit_ptrsize_addi(Register dst, Register lhs, intptr_t imm);
903 inline void emit_ptrsize_muli(Register dst, Register lhs, int32_t imm);
906
907 // f32 binops.
908 inline void emit_f32_add(DoubleRegister dst, DoubleRegister lhs,
909 DoubleRegister rhs);
910 inline void emit_f32_sub(DoubleRegister dst, DoubleRegister lhs,
911 DoubleRegister rhs);
912 inline void emit_f32_mul(DoubleRegister dst, DoubleRegister lhs,
913 DoubleRegister rhs);
914 inline void emit_f32_div(DoubleRegister dst, DoubleRegister lhs,
915 DoubleRegister rhs);
916 inline void emit_f32_min(DoubleRegister dst, DoubleRegister lhs,
917 DoubleRegister rhs);
918 inline void emit_f32_max(DoubleRegister dst, DoubleRegister lhs,
919 DoubleRegister rhs);
921 DoubleRegister rhs);
922
923 // f32 unops.
924 inline void emit_f32_abs(DoubleRegister dst, DoubleRegister src);
925 inline void emit_f32_neg(DoubleRegister dst, DoubleRegister src);
926 inline bool emit_f32_ceil(DoubleRegister dst, DoubleRegister src);
927 inline bool emit_f32_floor(DoubleRegister dst, DoubleRegister src);
928 inline bool emit_f32_trunc(DoubleRegister dst, DoubleRegister src);
930 inline void emit_f32_sqrt(DoubleRegister dst, DoubleRegister src);
931
932 // f64 binops.
933 inline void emit_f64_add(DoubleRegister dst, DoubleRegister lhs,
934 DoubleRegister rhs);
935 inline void emit_f64_sub(DoubleRegister dst, DoubleRegister lhs,
936 DoubleRegister rhs);
937 inline void emit_f64_mul(DoubleRegister dst, DoubleRegister lhs,
938 DoubleRegister rhs);
939 inline void emit_f64_div(DoubleRegister dst, DoubleRegister lhs,
940 DoubleRegister rhs);
941 inline void emit_f64_min(DoubleRegister dst, DoubleRegister lhs,
942 DoubleRegister rhs);
943 inline void emit_f64_max(DoubleRegister dst, DoubleRegister lhs,
944 DoubleRegister rhs);
946 DoubleRegister rhs);
947
948 // f64 unops.
949 inline void emit_f64_abs(DoubleRegister dst, DoubleRegister src);
950 inline void emit_f64_neg(DoubleRegister dst, DoubleRegister src);
951 inline bool emit_f64_ceil(DoubleRegister dst, DoubleRegister src);
952 inline bool emit_f64_floor(DoubleRegister dst, DoubleRegister src);
953 inline bool emit_f64_trunc(DoubleRegister dst, DoubleRegister src);
955 inline void emit_f64_sqrt(DoubleRegister dst, DoubleRegister src);
956
957 inline bool emit_type_conversion(WasmOpcode opcode, LiftoffRegister dst,
958 LiftoffRegister src, Label* trap = nullptr);
959
960 inline void emit_i32_signextend_i8(Register dst, Register src);
961 inline void emit_i32_signextend_i16(Register dst, Register src);
965
966 inline void emit_jump(Label*);
967 inline void emit_jump(Register);
968
969 inline void emit_cond_jump(Condition, Label*, ValueKind value, Register lhs,
970 Register rhs, const FreezeCacheState& frozen);
971 inline void emit_i32_cond_jumpi(Condition, Label*, Register lhs, int imm,
972 const FreezeCacheState& frozen);
973 // ptrsize compare+jump, but with 32-bit immediate. This will get
974 // sign-extended on 64-bit architectures before the comparison.
976 int32_t imm,
977 const FreezeCacheState& frozen);
978 // Set {dst} to 1 if condition holds, 0 otherwise.
979 inline void emit_i32_eqz(Register dst, Register src);
980 inline void emit_i32_set_cond(Condition, Register dst, Register lhs,
981 Register rhs);
982 inline void emit_i64_eqz(Register dst, LiftoffRegister src);
989
990 // Optional select support: Returns false if generic code (via branches)
991 // should be emitted instead.
993 LiftoffRegister true_value,
994 LiftoffRegister false_value, ValueKind kind);
995
997 inline void emit_smi_check(Register obj, Label* target, SmiCheckMode mode,
998 const FreezeCacheState& frozen);
999
1000 inline void LoadTransform(LiftoffRegister dst, Register src_addr,
1001 Register offset_reg, uintptr_t offset_imm,
1002 LoadType type, LoadTransformationKind transform,
1003 uint32_t* protected_load_pc, bool i64_offset);
1004 inline void LoadLane(LiftoffRegister dst, LiftoffRegister src, Register addr,
1005 Register offset_reg, uintptr_t offset_imm, LoadType type,
1006 uint8_t lane, uint32_t* protected_load_pc,
1007 bool i64_offset);
1008 inline void StoreLane(Register dst, Register offset, uintptr_t offset_imm,
1009 LiftoffRegister src, StoreType type, uint8_t lane,
1010 uint32_t* protected_store_pc, bool i64_offset);
1012 LiftoffRegister rhs, const uint8_t shuffle[16],
1013 bool is_swizzle);
1015 LiftoffRegister rhs);
1017 LiftoffRegister lhs,
1018 LiftoffRegister rhs);
1020 LiftoffRegister src);
1022 LiftoffRegister src);
1024 LiftoffRegister src);
1026 LiftoffRegister src);
1028 LiftoffRegister src1,
1029 LiftoffRegister src2,
1031 int lane_width);
1033 inline void emit_i8x16_splat(LiftoffRegister dst, LiftoffRegister src);
1034 inline void emit_i16x8_splat(LiftoffRegister dst, LiftoffRegister src);
1035 inline void emit_i32x4_splat(LiftoffRegister dst, LiftoffRegister src);
1036 inline void emit_i64x2_splat(LiftoffRegister dst, LiftoffRegister src);
1037 inline bool emit_f16x8_splat(LiftoffRegister dst, LiftoffRegister src);
1038 inline void emit_f32x4_splat(LiftoffRegister dst, LiftoffRegister src);
1039 inline void emit_f64x2_splat(LiftoffRegister dst, LiftoffRegister src);
1040 inline void emit_i8x16_eq(LiftoffRegister dst, LiftoffRegister lhs,
1041 LiftoffRegister rhs);
1042 inline void emit_i8x16_ne(LiftoffRegister dst, LiftoffRegister lhs,
1043 LiftoffRegister rhs);
1044 inline void emit_i8x16_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
1045 LiftoffRegister rhs);
1046 inline void emit_i8x16_gt_u(LiftoffRegister dst, LiftoffRegister lhs,
1047 LiftoffRegister rhs);
1048 inline void emit_i8x16_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
1049 LiftoffRegister rhs);
1050 inline void emit_i8x16_ge_u(LiftoffRegister dst, LiftoffRegister lhs,
1051 LiftoffRegister rhs);
1052 inline void emit_i16x8_eq(LiftoffRegister dst, LiftoffRegister lhs,
1053 LiftoffRegister rhs);
1054 inline void emit_i16x8_ne(LiftoffRegister dst, LiftoffRegister lhs,
1055 LiftoffRegister rhs);
1056 inline void emit_i16x8_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
1057 LiftoffRegister rhs);
1058 inline void emit_i16x8_gt_u(LiftoffRegister dst, LiftoffRegister lhs,
1059 LiftoffRegister rhs);
1060 inline void emit_i16x8_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
1061 LiftoffRegister rhs);
1062 inline void emit_i16x8_ge_u(LiftoffRegister dst, LiftoffRegister lhs,
1063 LiftoffRegister rhs);
1064 inline void emit_i32x4_eq(LiftoffRegister dst, LiftoffRegister lhs,
1065 LiftoffRegister rhs);
1066 inline void emit_i32x4_ne(LiftoffRegister dst, LiftoffRegister lhs,
1067 LiftoffRegister rhs);
1068 inline void emit_i32x4_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
1069 LiftoffRegister rhs);
1070 inline void emit_i32x4_gt_u(LiftoffRegister dst, LiftoffRegister lhs,
1071 LiftoffRegister rhs);
1072 inline void emit_i32x4_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
1073 LiftoffRegister rhs);
1074 inline void emit_i32x4_ge_u(LiftoffRegister dst, LiftoffRegister lhs,
1075 LiftoffRegister rhs);
1076 inline void emit_i64x2_eq(LiftoffRegister dst, LiftoffRegister lhs,
1077 LiftoffRegister rhs);
1078 inline void emit_i64x2_ne(LiftoffRegister dst, LiftoffRegister lhs,
1079 LiftoffRegister rhs);
1080 inline void emit_i64x2_gt_s(LiftoffRegister dst, LiftoffRegister lhs,
1081 LiftoffRegister rhs);
1082 inline void emit_i64x2_ge_s(LiftoffRegister dst, LiftoffRegister lhs,
1083 LiftoffRegister rhs);
1084 inline bool emit_f16x8_eq(LiftoffRegister dst, LiftoffRegister lhs,
1085 LiftoffRegister rhs);
1086 inline bool emit_f16x8_ne(LiftoffRegister dst, LiftoffRegister lhs,
1087 LiftoffRegister rhs);
1088 inline bool emit_f16x8_lt(LiftoffRegister dst, LiftoffRegister lhs,
1089 LiftoffRegister rhs);
1090 inline bool emit_f16x8_le(LiftoffRegister dst, LiftoffRegister lhs,
1091 LiftoffRegister rhs);
1092 inline void emit_f32x4_eq(LiftoffRegister dst, LiftoffRegister lhs,
1093 LiftoffRegister rhs);
1094 inline void emit_f32x4_ne(LiftoffRegister dst, LiftoffRegister lhs,
1095 LiftoffRegister rhs);
1096 inline void emit_f32x4_lt(LiftoffRegister dst, LiftoffRegister lhs,
1097 LiftoffRegister rhs);
1098 inline void emit_f32x4_le(LiftoffRegister dst, LiftoffRegister lhs,
1099 LiftoffRegister rhs);
1100 inline void emit_f64x2_eq(LiftoffRegister dst, LiftoffRegister lhs,
1101 LiftoffRegister rhs);
1102 inline void emit_f64x2_ne(LiftoffRegister dst, LiftoffRegister lhs,
1103 LiftoffRegister rhs);
1104 inline void emit_f64x2_lt(LiftoffRegister dst, LiftoffRegister lhs,
1105 LiftoffRegister rhs);
1106 inline void emit_f64x2_le(LiftoffRegister dst, LiftoffRegister lhs,
1107 LiftoffRegister rhs);
1108 inline void emit_s128_const(LiftoffRegister dst, const uint8_t imms[16]);
1109 inline void emit_s128_not(LiftoffRegister dst, LiftoffRegister src);
1110 inline void emit_s128_and(LiftoffRegister dst, LiftoffRegister lhs,
1111 LiftoffRegister rhs);
1112 inline void emit_s128_or(LiftoffRegister dst, LiftoffRegister lhs,
1113 LiftoffRegister rhs);
1114 inline void emit_s128_xor(LiftoffRegister dst, LiftoffRegister lhs,
1115 LiftoffRegister rhs);
1116 inline void emit_s128_select(LiftoffRegister dst, LiftoffRegister src1,
1118 inline void emit_i8x16_neg(LiftoffRegister dst, LiftoffRegister src);
1122 inline void emit_i8x16_shl(LiftoffRegister dst, LiftoffRegister lhs,
1123 LiftoffRegister rhs);
1124 inline void emit_i8x16_shli(LiftoffRegister dst, LiftoffRegister lhs,
1125 int32_t rhs);
1127 LiftoffRegister rhs);
1129 int32_t rhs);
1131 LiftoffRegister rhs);
1133 int32_t rhs);
1134 inline void emit_i8x16_add(LiftoffRegister dst, LiftoffRegister lhs,
1135 LiftoffRegister rhs);
1137 LiftoffRegister rhs);
1139 LiftoffRegister rhs);
1140 inline void emit_i8x16_sub(LiftoffRegister dst, LiftoffRegister lhs,
1141 LiftoffRegister rhs);
1143 LiftoffRegister rhs);
1145 LiftoffRegister rhs);
1147 LiftoffRegister rhs);
1149 LiftoffRegister rhs);
1151 LiftoffRegister rhs);
1153 LiftoffRegister rhs);
1154 inline void emit_i16x8_neg(LiftoffRegister dst, LiftoffRegister src);
1157 inline void emit_i16x8_shl(LiftoffRegister dst, LiftoffRegister lhs,
1158 LiftoffRegister rhs);
1159 inline void emit_i16x8_shli(LiftoffRegister dst, LiftoffRegister lhs,
1160 int32_t rhs);
1162 LiftoffRegister rhs);
1164 int32_t rhs);
1166 LiftoffRegister rhs);
1168 int32_t rhs);
1169 inline void emit_i16x8_add(LiftoffRegister dst, LiftoffRegister lhs,
1170 LiftoffRegister rhs);
1172 LiftoffRegister rhs);
1174 LiftoffRegister rhs);
1175 inline void emit_i16x8_sub(LiftoffRegister dst, LiftoffRegister lhs,
1176 LiftoffRegister rhs);
1178 LiftoffRegister rhs);
1180 LiftoffRegister rhs);
1181 inline void emit_i16x8_mul(LiftoffRegister dst, LiftoffRegister lhs,
1182 LiftoffRegister rhs);
1184 LiftoffRegister rhs);
1186 LiftoffRegister rhs);
1188 LiftoffRegister rhs);
1190 LiftoffRegister rhs);
1192 LiftoffRegister src);
1194 LiftoffRegister src);
1196 LiftoffRegister src1,
1197 LiftoffRegister src2);
1199 LiftoffRegister src1,
1200 LiftoffRegister src2);
1202 LiftoffRegister src1,
1203 LiftoffRegister src2);
1205 LiftoffRegister src1,
1206 LiftoffRegister src2);
1208 LiftoffRegister src1,
1209 LiftoffRegister src2);
1211 LiftoffRegister src1,
1212 LiftoffRegister src2);
1214 LiftoffRegister src1,
1215 LiftoffRegister src2);
1217 LiftoffRegister src1,
1218 LiftoffRegister src2,
1219 LiftoffRegister acc);
1220 inline void emit_i32x4_neg(LiftoffRegister dst, LiftoffRegister src);
1223 inline void emit_i32x4_shl(LiftoffRegister dst, LiftoffRegister lhs,
1224 LiftoffRegister rhs);
1225 inline void emit_i32x4_shli(LiftoffRegister dst, LiftoffRegister lhs,
1226 int32_t rhs);
1228 LiftoffRegister rhs);
1230 int32_t rhs);
1232 LiftoffRegister rhs);
1234 int32_t rhs);
1235 inline void emit_i32x4_add(LiftoffRegister dst, LiftoffRegister lhs,
1236 LiftoffRegister rhs);
1237 inline void emit_i32x4_sub(LiftoffRegister dst, LiftoffRegister lhs,
1238 LiftoffRegister rhs);
1239 inline void emit_i32x4_mul(LiftoffRegister dst, LiftoffRegister lhs,
1240 LiftoffRegister rhs);
1242 LiftoffRegister rhs);
1244 LiftoffRegister rhs);
1246 LiftoffRegister rhs);
1248 LiftoffRegister rhs);
1250 LiftoffRegister rhs);
1252 LiftoffRegister src);
1254 LiftoffRegister src);
1256 LiftoffRegister src1,
1257 LiftoffRegister src2);
1259 LiftoffRegister src1,
1260 LiftoffRegister src2);
1262 LiftoffRegister src1,
1263 LiftoffRegister src2);
1265 LiftoffRegister src1,
1266 LiftoffRegister src2);
1267 inline void emit_i64x2_neg(LiftoffRegister dst, LiftoffRegister src);
1269 inline void emit_i64x2_shl(LiftoffRegister dst, LiftoffRegister lhs,
1270 LiftoffRegister rhs);
1271 inline void emit_i64x2_shli(LiftoffRegister dst, LiftoffRegister lhs,
1272 int32_t rhs);
1274 LiftoffRegister rhs);
1276 int32_t rhs);
1278 LiftoffRegister rhs);
1280 int32_t rhs);
1281 inline void emit_i64x2_add(LiftoffRegister dst, LiftoffRegister lhs,
1282 LiftoffRegister rhs);
1283 inline void emit_i64x2_sub(LiftoffRegister dst, LiftoffRegister lhs,
1284 LiftoffRegister rhs);
1285 inline void emit_i64x2_mul(LiftoffRegister dst, LiftoffRegister lhs,
1286 LiftoffRegister rhs);
1288 LiftoffRegister src1,
1289 LiftoffRegister src2);
1291 LiftoffRegister src1,
1292 LiftoffRegister src2);
1294 LiftoffRegister src1,
1295 LiftoffRegister src2);
1297 LiftoffRegister src1,
1298 LiftoffRegister src2);
1301 LiftoffRegister src);
1303 LiftoffRegister src);
1305 LiftoffRegister src);
1307 LiftoffRegister src);
1308 inline bool emit_f16x8_abs(LiftoffRegister dst, LiftoffRegister src);
1309 inline bool emit_f16x8_neg(LiftoffRegister dst, LiftoffRegister src);
1310 inline bool emit_f16x8_sqrt(LiftoffRegister dst, LiftoffRegister src);
1311 inline bool emit_f16x8_ceil(LiftoffRegister dst, LiftoffRegister src);
1312 inline bool emit_f16x8_floor(LiftoffRegister dst, LiftoffRegister src);
1313 inline bool emit_f16x8_trunc(LiftoffRegister dst, LiftoffRegister src);
1315 inline bool emit_f16x8_add(LiftoffRegister dst, LiftoffRegister lhs,
1316 LiftoffRegister rhs);
1317 inline bool emit_f16x8_sub(LiftoffRegister dst, LiftoffRegister lhs,
1318 LiftoffRegister rhs);
1319 inline bool emit_f16x8_mul(LiftoffRegister dst, LiftoffRegister lhs,
1320 LiftoffRegister rhs);
1321 inline bool emit_f16x8_div(LiftoffRegister dst, LiftoffRegister lhs,
1322 LiftoffRegister rhs);
1323 inline bool emit_f16x8_min(LiftoffRegister dst, LiftoffRegister lhs,
1324 LiftoffRegister rhs);
1325 inline bool emit_f16x8_max(LiftoffRegister dst, LiftoffRegister lhs,
1326 LiftoffRegister rhs);
1327 inline bool emit_f16x8_pmin(LiftoffRegister dst, LiftoffRegister lhs,
1328 LiftoffRegister rhs);
1329 inline bool emit_f16x8_pmax(LiftoffRegister dst, LiftoffRegister lhs,
1330 LiftoffRegister rhs);
1331 inline void emit_f32x4_abs(LiftoffRegister dst, LiftoffRegister src);
1332 inline void emit_f32x4_neg(LiftoffRegister dst, LiftoffRegister src);
1333 inline void emit_f32x4_sqrt(LiftoffRegister dst, LiftoffRegister src);
1334 inline bool emit_f32x4_ceil(LiftoffRegister dst, LiftoffRegister src);
1335 inline bool emit_f32x4_floor(LiftoffRegister dst, LiftoffRegister src);
1336 inline bool emit_f32x4_trunc(LiftoffRegister dst, LiftoffRegister src);
1338 inline void emit_f32x4_add(LiftoffRegister dst, LiftoffRegister lhs,
1339 LiftoffRegister rhs);
1340 inline void emit_f32x4_sub(LiftoffRegister dst, LiftoffRegister lhs,
1341 LiftoffRegister rhs);
1342 inline void emit_f32x4_mul(LiftoffRegister dst, LiftoffRegister lhs,
1343 LiftoffRegister rhs);
1344 inline void emit_f32x4_div(LiftoffRegister dst, LiftoffRegister lhs,
1345 LiftoffRegister rhs);
1346 inline void emit_f32x4_min(LiftoffRegister dst, LiftoffRegister lhs,
1347 LiftoffRegister rhs);
1348 inline void emit_f32x4_max(LiftoffRegister dst, LiftoffRegister lhs,
1349 LiftoffRegister rhs);
1350 inline void emit_f32x4_pmin(LiftoffRegister dst, LiftoffRegister lhs,
1351 LiftoffRegister rhs);
1352 inline void emit_f32x4_pmax(LiftoffRegister dst, LiftoffRegister lhs,
1353 LiftoffRegister rhs);
1355 LiftoffRegister rhs);
1357 LiftoffRegister rhs);
1358 inline void emit_f64x2_abs(LiftoffRegister dst, LiftoffRegister src);
1359 inline void emit_f64x2_neg(LiftoffRegister dst, LiftoffRegister src);
1360 inline void emit_f64x2_sqrt(LiftoffRegister dst, LiftoffRegister src);
1361 inline bool emit_f64x2_ceil(LiftoffRegister dst, LiftoffRegister src);
1362 inline bool emit_f64x2_floor(LiftoffRegister dst, LiftoffRegister src);
1363 inline bool emit_f64x2_trunc(LiftoffRegister dst, LiftoffRegister src);
1365 inline void emit_f64x2_add(LiftoffRegister dst, LiftoffRegister lhs,
1366 LiftoffRegister rhs);
1367 inline void emit_f64x2_sub(LiftoffRegister dst, LiftoffRegister lhs,
1368 LiftoffRegister rhs);
1369 inline void emit_f64x2_mul(LiftoffRegister dst, LiftoffRegister lhs,
1370 LiftoffRegister rhs);
1371 inline void emit_f64x2_div(LiftoffRegister dst, LiftoffRegister lhs,
1372 LiftoffRegister rhs);
1373 inline void emit_f64x2_min(LiftoffRegister dst, LiftoffRegister lhs,
1374 LiftoffRegister rhs);
1375 inline void emit_f64x2_max(LiftoffRegister dst, LiftoffRegister lhs,
1376 LiftoffRegister rhs);
1377 inline void emit_f64x2_pmin(LiftoffRegister dst, LiftoffRegister lhs,
1378 LiftoffRegister rhs);
1379 inline void emit_f64x2_pmax(LiftoffRegister dst, LiftoffRegister lhs,
1380 LiftoffRegister rhs);
1382 LiftoffRegister rhs);
1384 LiftoffRegister rhs);
1386 LiftoffRegister src);
1388 LiftoffRegister src);
1390 LiftoffRegister src);
1392 LiftoffRegister src);
1394 LiftoffRegister src);
1396 LiftoffRegister src);
1398 LiftoffRegister src);
1400 LiftoffRegister src);
1402 LiftoffRegister src);
1404 LiftoffRegister src);
1406 LiftoffRegister src);
1408 LiftoffRegister src);
1410 LiftoffRegister src);
1412 LiftoffRegister src);
1414 LiftoffRegister src);
1416 LiftoffRegister src);
1418 LiftoffRegister src);
1420 LiftoffRegister lhs,
1421 LiftoffRegister rhs);
1423 LiftoffRegister lhs,
1424 LiftoffRegister rhs);
1426 LiftoffRegister lhs,
1427 LiftoffRegister rhs);
1429 LiftoffRegister lhs,
1430 LiftoffRegister rhs);
1432 LiftoffRegister src);
1434 LiftoffRegister src);
1436 LiftoffRegister src);
1438 LiftoffRegister src);
1440 LiftoffRegister src);
1442 LiftoffRegister src);
1444 LiftoffRegister src);
1446 LiftoffRegister src);
1448 LiftoffRegister rhs);
1450 LiftoffRegister lhs,
1451 LiftoffRegister rhs);
1453 LiftoffRegister lhs,
1454 LiftoffRegister rhs);
1455 inline void emit_i8x16_abs(LiftoffRegister dst, LiftoffRegister src);
1456 inline void emit_i16x8_abs(LiftoffRegister dst, LiftoffRegister src);
1457 inline void emit_i32x4_abs(LiftoffRegister dst, LiftoffRegister src);
1458 inline void emit_i64x2_abs(LiftoffRegister dst, LiftoffRegister src);
1460 LiftoffRegister lhs,
1461 uint8_t imm_lane_idx);
1463 LiftoffRegister lhs,
1464 uint8_t imm_lane_idx);
1466 LiftoffRegister lhs,
1467 uint8_t imm_lane_idx);
1469 LiftoffRegister lhs,
1470 uint8_t imm_lane_idx);
1472 uint8_t imm_lane_idx);
1474 uint8_t imm_lane_idx);
1476 uint8_t imm_lane_idx);
1478 uint8_t imm_lane_idx);
1480 uint8_t imm_lane_idx);
1482 LiftoffRegister src2,
1483 uint8_t imm_lane_idx);
1485 LiftoffRegister src2,
1486 uint8_t imm_lane_idx);
1488 LiftoffRegister src2,
1489 uint8_t imm_lane_idx);
1491 LiftoffRegister src2,
1492 uint8_t imm_lane_idx);
1494 LiftoffRegister src2,
1495 uint8_t imm_lane_idx);
1497 LiftoffRegister src2,
1498 uint8_t imm_lane_idx);
1500 LiftoffRegister src2,
1501 uint8_t imm_lane_idx);
1502 inline bool emit_f16x8_qfma(LiftoffRegister dst, LiftoffRegister src1,
1503 LiftoffRegister src2, LiftoffRegister src3);
1504 inline bool emit_f16x8_qfms(LiftoffRegister dst, LiftoffRegister src1,
1505 LiftoffRegister src2, LiftoffRegister src3);
1506 inline void emit_f32x4_qfma(LiftoffRegister dst, LiftoffRegister src1,
1507 LiftoffRegister src2, LiftoffRegister src3);
1508 inline void emit_f32x4_qfms(LiftoffRegister dst, LiftoffRegister src1,
1509 LiftoffRegister src2, LiftoffRegister src3);
1510 inline void emit_f64x2_qfma(LiftoffRegister dst, LiftoffRegister src1,
1511 LiftoffRegister src2, LiftoffRegister src3);
1512 inline void emit_f64x2_qfms(LiftoffRegister dst, LiftoffRegister src1,
1513 LiftoffRegister src2, LiftoffRegister src3);
1514
1515 inline void set_trap_on_oob_mem64(Register index, uint64_t max_index,
1516 Label* trap_label);
1517
1518 inline void StackCheck(Label* ool_code);
1519
1520 inline void AssertUnreachable(AbortReason reason);
1521
1522 inline void PushRegisters(LiftoffRegList);
1523 inline void PopRegisters(LiftoffRegList);
1524
1525 inline void RecordSpillsInSafepoint(
1526 SafepointTableBuilder::Safepoint& safepoint, LiftoffRegList all_spills,
1527 LiftoffRegList ref_spills, int spill_offset);
1528
1529 inline void DropStackSlotsAndRet(uint32_t num_stack_slots);
1530
1531 // Execute a C call. Arguments are pushed to the stack and a pointer to this
1532 // region is passed to the C function. If {out_argument_kind != kVoid},
1533 // this is the return value of the C function, stored in {rets[0]}. Further
1534 // outputs (specified in {sig->returns()}) are read from the buffer and stored
1535 // in the remaining {rets} registers.
1536 inline void CallCWithStackBuffer(const std::initializer_list<VarState> args,
1537 const LiftoffRegister* rets,
1538 ValueKind return_kind,
1539 ValueKind out_argument_kind, int stack_bytes,
1540 ExternalReference ext_ref);
1541
1542 // Execute a C call with arguments passed according to the C calling
1543 // conventions.
1544 inline void CallC(const std::initializer_list<VarState> args,
1545 ExternalReference ext_ref);
1546
1547 inline void CallNativeWasmCode(Address addr);
1548 inline void TailCallNativeWasmCode(Address addr);
1549 // Indirect call: If {target == no_reg}, then pop the target from the stack.
1550 inline void CallIndirect(const ValueKindSig* sig,
1551 compiler::CallDescriptor* call_descriptor,
1552 Register target);
1553 inline void TailCallIndirect(compiler::CallDescriptor* call_descriptor,
1554 Register target);
1555 inline void CallBuiltin(Builtin builtin);
1556
1557 // Reserve space in the current frame, store address to space in {addr}.
1558 inline void AllocateStackSlot(Register addr, uint32_t size);
1559 inline void DeallocateStackSlot(uint32_t size);
1560
1561 // Instrumentation for shadow-stack-compatible OSR on x64.
1562 inline void MaybeOSR();
1563
1564 // Set the i32 at address {dst} to a non-zero value if {src} is a NaN.
1566 ValueKind kind);
1567
1568 // Set the i32 at address {dst} to a non-zero value if {src} contains a NaN.
1570 Register tmp_gp,
1571 LiftoffRegister tmp_s128,
1572 ValueKind lane_kind);
1573
1574 // Unconditinally set the i32 at address {dst} to a non-zero value.
1575 inline void emit_store_nonzero(Register dst);
1576
1577 inline bool supports_f16_mem_access();
1578
1580 // End of platform-specific part. //
1582
1583 uint32_t num_locals() const { return num_locals_; }
1584 void set_num_locals(uint32_t num_locals);
1585
1586 int GetTotalFrameSlotCountForGC() const;
1587 int OolSpillCount() const;
1588
1590
1591 ValueKind local_kind(uint32_t index) {
1592 DCHECK_GT(num_locals_, index);
1593 ValueKind* locals =
1595 return locals[index];
1596 }
1597
1598 void set_local_kind(uint32_t index, ValueKind kind) {
1599 ValueKind* locals =
1601 locals[index] = kind;
1602 }
1603
1605 const CacheState* cache_state() const { return &cache_state_; }
1606
1609 const char* bailout_detail() const { return bailout_detail_; }
1610
1611 inline void bailout(LiftoffBailoutReason reason, const char* detail);
1612
1613 private:
1615 LiftoffRegList pinned);
1616
1617 // Spill one of the candidate registers.
1620 // Spill one or two fp registers to get a pair of adjacent fp registers.
1622
1623 uint32_t num_locals_ = 0;
1624 static constexpr uint32_t kInlineLocalKinds = 16;
1625 union {
1628 };
1629 static_assert(sizeof(ValueKind) == 1,
1630 "Reconsider this inlining if ValueKind gets bigger");
1632 // The maximum spill offset for slots in the value stack.
1634 // The amount of memory needed for register spills in OOL code.
1637 const char* bailout_detail_ = nullptr;
1638};
1639
1640#if DEBUG
1642 : assm_(assm) {
1643 assm.SetCacheStateFrozen();
1644}
1645inline FreezeCacheState::FreezeCacheState(FreezeCacheState&& other) V8_NOEXCEPT
1646 : assm_(other.assm_) {
1647 assm_.SetCacheStateFrozen();
1648}
1649inline FreezeCacheState::~FreezeCacheState() { assm_.UnfreezeCacheState(); }
1650#endif
1651
1653 public:
1654 explicit LiftoffStackSlots(LiftoffAssembler* wasm_asm) : asm_(wasm_asm) {}
1657
1658 void Add(const LiftoffAssembler::VarState& src, uint32_t src_offset,
1659 RegPairHalf half, int dst_slot) {
1660 DCHECK_LE(0, dst_slot);
1661 slots_.emplace_back(src, src_offset, half, dst_slot);
1662 }
1663
1664 void Add(const LiftoffAssembler::VarState& src, int dst_slot) {
1665 DCHECK_LE(0, dst_slot);
1666 slots_.emplace_back(src, dst_slot);
1667 }
1668
1670 std::sort(slots_.begin(), slots_.end(), [](const Slot& a, const Slot& b) {
1671 return a.dst_slot_ > b.dst_slot_;
1672 });
1673 }
1674
1675 inline void Construct(int param_slots);
1676
1677 private:
1678 // A logical slot, which may occupy multiple stack slots.
1679 struct Slot {
1680 Slot(const LiftoffAssembler::VarState& src, uint32_t src_offset,
1681 RegPairHalf half, int dst_slot)
1682 : src_(src),
1683 src_offset_(src_offset),
1684 half_(half),
1685 dst_slot_(dst_slot) {}
1686 Slot(const LiftoffAssembler::VarState& src, int dst_slot)
1687 : src_(src), half_(kLowWord), dst_slot_(dst_slot) {}
1688
1690 uint32_t src_offset_ = 0;
1692 int dst_slot_ = 0;
1693 };
1694
1695 // Returns the size in bytes of the given logical slot.
1696 static int SlotSizeInBytes(const Slot& slot) {
1697 const ValueKind kind = slot.src_.kind();
1698 if (kind == kS128) return kSimd128Size;
1699 if (kind == kF64) return kDoubleSize;
1700 return kSystemPointerSize;
1701 }
1702
1705};
1706
1707#if DEBUG
1708bool CompatibleStackSlotTypes(ValueKind a, ValueKind b);
1709#endif
1710
1711} // namespace v8::internal::wasm
1712
1713#endif // V8_WASM_BASELINE_LIFTOFF_ASSEMBLER_H_
Builtins::Kind kind
Definition builtins.cc:40
Simd128Register Simd128Register Simd128Register Simd128Register rc
FreezeCacheState(LiftoffAssembler &assm)
void emit_i8x16_swizzle(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_uconvert_i32x4_low(LiftoffRegister dst, LiftoffRegister src)
void ClearRegister(Register reg, std::initializer_list< Register * > possible_uses, LiftoffRegList pinned)
bool emit_f16x8_nearest_int(LiftoffRegister dst, LiftoffRegister src)
void emit_store_nonzero_if_nan(Register dst, DoubleRegister src, ValueKind kind)
bool emit_f32x4_floor(LiftoffRegister dst, LiftoffRegister src)
void emit_f64_div(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_f32x4_pmax(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_sconvert_i32x4_low(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_lt(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void set_trap_on_oob_mem64(Register index, uint64_t max_index, Label *trap_label)
void emit_i16x8_add_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_le(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_div(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
static constexpr ValueKind kSmiKind
void AtomicXor(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
void emit_i64_shl(LiftoffRegister dst, LiftoffRegister src, Register amount)
void emit_i64_ori(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
void emit_i64_shli(LiftoffRegister dst, LiftoffRegister src, int32_t amount)
void emit_i8x16_add_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_relaxed_trunc_f64x2_u_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_min_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_relaxed_q15mulr_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i8x16_extract_lane_s(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f32_min(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i32x4_ge_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_qfms(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_f32x4_div(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f64_ceil(DoubleRegister dst, DoubleRegister src)
void emit_i16x8_extmul_high_i8x16_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
static constexpr uint32_t kInlineLocalKinds
void emit_f64x2_pmax(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_trunc(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_i32_rems(Register dst, Register lhs, Register rhs, Label *trap_rem_by_zero)
void emit_i64x2_extmul_high_i32x4_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i32_clz(Register dst, Register src)
void emit_i8x16_min_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_shri(Register dst, Register src, int32_t amount)
void emit_ptrsize_and(Register dst, Register lhs, Register rhs)
void emit_f64x2_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64_mul(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
V8_NOINLINE V8_PRESERVE_MOST LiftoffRegister LoadToRegister_Slow(VarState slot, LiftoffRegList pinned)
void emit_i16x8_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i32_eqz(Register dst, Register src)
ValueKind local_kinds_[kInlineLocalKinds]
void emit_i32x4_extadd_pairwise_i16x8_s(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_uconvert_f32x4(LiftoffRegister dst, LiftoffRegister src)
V8_INLINE LiftoffRegister PopToRegister(LiftoffRegList pinned={})
void emit_i16x8_uconvert_i8x16_low(LiftoffRegister dst, LiftoffRegister src)
bool emit_f32_floor(DoubleRegister dst, DoubleRegister src)
void emit_f32x4_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_f32_max(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i8x16_uconvert_i16x8(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void FillI64Half(Register, int offset, RegPairHalf)
bool emit_f16x8_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_demote_f64x2_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i64_sari(LiftoffRegister dst, LiftoffRegister src, int32_t amount)
void emit_f64x2_splat(LiftoffRegister dst, LiftoffRegister src)
bool emit_i64_remu(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Label *trap_rem_by_zero)
void CallCWithStackBuffer(const std::initializer_list< VarState > args, const LiftoffRegister *rets, ValueKind return_kind, ValueKind out_argument_kind, int stack_bytes, ExternalReference ext_ref)
void emit_f32_mul(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_s128_and(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_ptrsize_addi(Register dst, Register lhs, intptr_t imm)
void emit_i8x16_sub_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f32_nearest_int(DoubleRegister dst, DoubleRegister src)
void emit_f64x2_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_i32x4_dot_i16x8_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_muli(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
void emit_i64_xori(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
void emit_i16x8_extmul_low_i8x16_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void ParallelRegisterMove(std::initializer_list< ParallelRegisterMoveTuple > moves)
LiftoffAssembler(Zone *, std::unique_ptr< AssemblerBuffer >)
void emit_f64x2_le(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
LiftoffRegister LoadToModifiableRegister(VarState slot, LiftoffRegList pinned)
void emit_i16x8_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadFixedArrayLengthAsInt32(LiftoffRegister dst, Register array, LiftoffRegList pinned)
void emit_i64x2_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_uconvert_i8x16_high(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_pmin(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_dot_i8x16_i7x16_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i16x8_sconvert_i8x16_high(LiftoffRegister dst, LiftoffRegister src)
void emit_f64x2_qfma(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_i32_subi(Register dst, Register lhs, int32_t imm)
void emit_i32_shli(Register dst, Register src, int32_t amount)
void emit_i32x4_extmul_low_i16x8_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void AtomicAdd(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
V8_NODISCARD CacheState MergeIntoNewState(uint32_t num_locals, uint32_t arity, uint32_t stack_depth)
void AtomicSub(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
void LoadTransform(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, LoadTransformationKind transform, uint32_t *protected_load_pc, bool i64_offset)
void emit_i32x4_uconvert_i16x8_low(LiftoffRegister dst, LiftoffRegister src)
void emit_s128_store_nonzero_if_nan(Register dst, LiftoffRegister src, Register tmp_gp, LiftoffRegister tmp_s128, ValueKind lane_kind)
void emit_i32x4_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_sar(Register dst, Register src, Register amount)
void emit_i16x8_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_i16x8_sconvert_f16x8(LiftoffRegister dst, LiftoffRegister src)
void AtomicAnd(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
void emit_i16x8_sconvert_i32x4(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_add_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_add_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void AtomicCompareExchange(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister expected, LiftoffRegister new_value, LiftoffRegister value, StoreType type, bool i64_offset)
void emit_i8x16_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void MergeStackWith(CacheState &target, uint32_t arity, JumpDirection)
void emit_s128_not(LiftoffRegister dst, LiftoffRegister src)
void emit_i64_signextend_i16(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_gt_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_dot_i8x16_i7x16_add_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister acc)
bool emit_f16x8_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_f64x2_convert_low_i32x4_u(LiftoffRegister dst, LiftoffRegister src)
void emit_f32_copysign(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_f32x4_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_pmax(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void set_local_kind(uint32_t index, ValueKind kind)
void emit_i32x4_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_f64_min(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i64x2_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_sconvert_i16x8(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f64_trunc(DoubleRegister dst, DoubleRegister src)
void emit_f64_set_cond(Condition condition, Register dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_f32x4_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_abs(LiftoffRegister dst, LiftoffRegister src)
void Fill(LiftoffRegister, int offset, ValueKind)
void emit_i32_shr(Register dst, Register src, Register amount)
void emit_i64_signextend_i32(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f32x4_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_ptrsize_muli(Register dst, Register lhs, int32_t imm)
void LoadFullPointer(Register dst, Register src_addr, int32_t offset_imm)
void emit_i8x16_max_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_extmul_low_i32x4_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i16x8_max_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_abs(LiftoffRegister dst, LiftoffRegister src)
LiftoffRegister PeekToRegister(int index, LiftoffRegList pinned)
void emit_i32_andi(Register dst, Register lhs, int32_t imm)
bool emit_f16x8_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_ptrsize_set_cond(Condition condition, Register dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_convert_low_i32x4_s(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
bool emit_f32x4_promote_low_f16x8(LiftoffRegister dst, LiftoffRegister src)
void Store(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister src, StoreType type, LiftoffRegList pinned, uint32_t *protected_store_pc=nullptr, bool is_store_mem=false, bool i64_offset=false)
bool emit_f16x8_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void Load(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, uint32_t *protected_load_pc=nullptr, bool is_load_mem=false, bool i64_offset=false, bool needs_shift=false)
void emit_i64x2_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_f16x8_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32x4_uconvert_i32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_extmul_high_i32x4_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f32x4_relaxed_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_f64_sub(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i32_signextend_i16(Register dst, Register src)
void emit_i8x16_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_divs(Register dst, Register lhs, Register rhs, Label *trap_div_by_zero, Label *trap_div_unrepresentable)
void emit_f32_neg(DoubleRegister dst, DoubleRegister src)
void emit_f64x2_promote_low_f32x4(LiftoffRegister dst, LiftoffRegister src)
static V8_INLINE int NextSpillOffset(ValueKind kind, int top_spill_offset)
void emit_ptrsize_add(Register dst, Register lhs, Register rhs)
void emit_i64_addi(LiftoffRegister dst, LiftoffRegister lhs, int64_t imm)
LiftoffRegister PopToModifiableRegister(LiftoffRegList pinned={})
void LoadLane(LiftoffRegister dst, LiftoffRegister src, Register addr, Register offset_reg, uintptr_t offset_imm, LoadType type, uint8_t lane, uint32_t *protected_load_pc, bool i64_offset)
void emit_i16x8_sub_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_bitmask(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_and(Register dst, Register lhs, Register rhs)
void emit_i16x8_extract_lane_s(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
V8_INLINE LiftoffRegister LoadToRegister(VarState slot, LiftoffRegList pinned)
void emit_f64_copysign(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
bool emit_f64x2_trunc(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_rounding_average_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void PrepareCall(const ValueKindSig *, compiler::CallDescriptor *, Register *target=nullptr, Register target_instance=no_reg)
void emit_i8x16_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_relaxed_swizzle(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_abs(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void CallFrameSetupStub(int declared_function_index)
void emit_i64x2_uconvert_i32x4_high(LiftoffRegister dst, LiftoffRegister src)
void emit_f32_abs(DoubleRegister dst, DoubleRegister src)
void emit_i32_remu(Register dst, Register lhs, Register rhs, Label *trap_rem_by_zero)
bool emit_f64x2_ceil(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadSpillAddress(Register dst, int offset, ValueKind kind)
void emit_i16x8_extmul_high_i8x16_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f32x4_qfms(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_f64_neg(DoubleRegister dst, DoubleRegister src)
void emit_i64x2_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_i32_ctz(Register dst, Register src)
LiftoffBailoutReason bailout_reason() const
void StoreCallerFrameSlot(LiftoffRegister, uint32_t caller_slot_idx, ValueKind, Register frame_pointer)
void emit_i32x4_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void AtomicExchange(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
void emit_i32x4_min_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_xor(Register dst, Register lhs, Register rhs)
LiftoffRegister LoadI64HalfIntoRegister(VarState slot, RegPairHalf half, LiftoffRegList pinned)
void emit_s128_select(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister mask)
bool emit_i64_rems(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Label *trap_rem_by_zero)
void emit_ptrsize_shri(Register dst, Register src, int amount)
void emit_i8x16_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void PrepareForBranch(uint32_t arity, LiftoffRegList pinned)
bool emit_f16x8_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_sconvert_i32x4_high(LiftoffRegister dst, LiftoffRegister src)
void CallC(const std::initializer_list< VarState > args, ExternalReference ext_ref)
void emit_i32x4_trunc_sat_f64x2_s_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_or(Register dst, Register lhs, Register rhs)
bool emit_f16x8_floor(LiftoffRegister dst, LiftoffRegister src)
void emit_i64_signextend_i8(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_extadd_pairwise_i8x16_s(LiftoffRegister dst, LiftoffRegister src)
uint32_t GetNumUses(LiftoffRegister reg) const
const CacheState * cache_state() const
void emit_i64x2_bitmask(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_extmul_low_i32x4_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
bool emit_i64_divu(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Label *trap_div_by_zero)
bool emit_f16x8_demote_f32x4_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_qfma(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_f32x4_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_gt_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
bool emit_f64x2_nearest_int(LiftoffRegister dst, LiftoffRegister src)
void DropStackSlotsAndRet(uint32_t num_stack_slots)
void emit_f32x4_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_s128_and_not(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_s128_relaxed_laneselect(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister mask, int lane_width)
void emit_i32x4_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_f64x2_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_i16x8_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void emit_i32x4_sconvert_f32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shri_u(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i32_sari(Register dst, Register src, int32_t amount)
void LoadConstant(LiftoffRegister, WasmValue)
void emit_i32x4_uconvert_i16x8_high(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_sub_sat_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_ptrsize_sub(Register dst, Register lhs, Register rhs)
void emit_i16x8_max_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f32x4_trunc(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_max_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_xor(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void PrepareTailCall(int num_callee_stack_params, int stack_param_delta)
void emit_f32x4_relaxed_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f64_nearest_int(DoubleRegister dst, DoubleRegister src)
bool emit_f16x8_ceil(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_divu(Register dst, Register lhs, Register rhs, Label *trap_div_by_zero)
void emit_cond_jump(Condition, Label *, ValueKind value, Register lhs, Register rhs, const FreezeCacheState &frozen)
void LoadFromInstance(Register dst, Register instance, int offset, int size)
void emit_i8x16_min_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_smi_check(Register obj, Label *target, SmiCheckMode mode, const FreezeCacheState &frozen)
void emit_f64_max(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_f32x4_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void LoadProtectedPointer(Register dst, Register src_addr, int32_t offset)
void emit_i32x4_extmul_low_i16x8_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f64_add(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i64_sar(LiftoffRegister dst, LiftoffRegister src, Register amount)
void emit_i16x8_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_ori(Register dst, Register lhs, int32_t imm)
void emit_f64x2_qfms(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
bool emit_f32x4_ceil(LiftoffRegister dst, LiftoffRegister src)
bool emit_i64_popcnt(LiftoffRegister dst, LiftoffRegister src)
void emit_u32_to_uintptr(Register dst, Register src)
void emit_i32x4_trunc_sat_f64x2_u_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_mul(Register dst, Register lhs, Register rhs)
void emit_i8x16_extract_lane_u(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
bool emit_f64_floor(DoubleRegister dst, DoubleRegister src)
void emit_f32_set_cond(Condition condition, Register dst, DoubleRegister lhs, DoubleRegister rhs)
bool emit_f16x8_uconvert_i16x8(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_min_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_demote_f64x2_zero(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_div(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_gt_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f32x4_nearest_int(LiftoffRegister dst, LiftoffRegister src)
void emit_i64_shri(LiftoffRegister dst, LiftoffRegister src, int32_t amount)
void emit_i32x4_sconvert_i16x8_high(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shri_s(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void emit_i64_set_cond(Condition condition, Register dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f64x2_floor(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_sub_sat_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
LiftoffRegister SpillAdjacentFpRegisters(LiftoffRegList pinned)
void emit_i32x4_sconvert_i16x8_low(LiftoffRegister dst, LiftoffRegister src)
void emit_f64x2_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_le(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_i32_popcnt(Register dst, Register src)
void ParallelRegisterMove(base::Vector< const ParallelRegisterMoveTuple >)
void emit_i16x8_rounding_average_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadCodePointer(Register dst, Register src_addr, int32_t offset)
void emit_i32x4_replace_lane(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, uint8_t imm_lane_idx)
void MoveStackValue(uint32_t dst_offset, uint32_t src_offset, ValueKind)
void emit_i64x2_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_min_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
LiftoffRegister GetUnusedRegister(LiftoffRegList candidates)
void Move(LiftoffRegister dst, LiftoffRegister src, ValueKind)
void emit_i8x16_ge_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64x2_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_uconvert_i32x4(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_lt(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_popcnt(LiftoffRegister dst, LiftoffRegister src)
void AtomicStore(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister src, StoreType type, LiftoffRegList pinned, bool i64_offset)
void emit_f64_abs(DoubleRegister dst, DoubleRegister src)
void emit_i64_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_pmin(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void PatchPrepareStackFrame(int offset, SafepointTableBuilder *, bool feedback_vector_slot, size_t stack_param_slots)
void emit_f32x4_sconvert_i32x4(LiftoffRegister dst, LiftoffRegister src)
void emit_ptrsize_cond_jumpi(Condition, Label *, Register lhs, int32_t imm, const FreezeCacheState &frozen)
void LoadCallerFrameSlot(LiftoffRegister, uint32_t caller_slot_idx, ValueKind)
LiftoffRegister GetUnusedRegister(RegClass rc, LiftoffRegList pinned)
void emit_i64x2_ne(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_or(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32_sqrt(DoubleRegister dst, DoubleRegister src)
void emit_i64x2_alltrue(LiftoffRegister dst, LiftoffRegister src)
void emit_f32_sub(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i16x8_ge_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f64x2_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_xori(Register dst, Register lhs, int32_t imm)
void emit_i64_ctz(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_set_cond(Condition, Register dst, Register lhs, Register rhs)
void emit_f32x4_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_i64_divs(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, Label *trap_div_by_zero, Label *trap_div_unrepresentable)
void emit_i16x8_sconvert_i8x16_low(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_sub(Register dst, Register lhs, Register rhs)
void TailCallIndirect(compiler::CallDescriptor *call_descriptor, Register target)
void PopToFixedRegister(LiftoffRegister reg)
void emit_i16x8_q15mulr_sat_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i64x2_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_shli(LiftoffRegister dst, LiftoffRegister lhs, int32_t rhs)
void PushRegister(ValueKind kind, LiftoffRegister reg)
void emit_i64_shr(LiftoffRegister dst, LiftoffRegister src, Register amount)
void emit_i16x8_bitmask(LiftoffRegister dst, LiftoffRegister src)
void FinishCall(const ValueKindSig *, compiler::CallDescriptor *)
void emit_i32x4_sub(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_max_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void MoveToReturnLocations(const FunctionSig *, compiler::CallDescriptor *)
void emit_f32_div(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
void emit_i64_andi(LiftoffRegister dst, LiftoffRegister lhs, int32_t imm)
bool emit_f16x8_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_relaxed_trunc_f32x4_s(LiftoffRegister dst, LiftoffRegister src)
void emit_i64x2_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_sqrt(LiftoffRegister dst, LiftoffRegister src)
void emit_i16x8_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadSmiAsInt32(LiftoffRegister dst, Register src_addr, int32_t offset)
void emit_i8x16_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_extmul_low_i8x16_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f64x2_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void AllocateStackSlot(Register addr, uint32_t size)
void emit_i32x4_extract_lane(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_s128_or(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadTaggedPointer(Register dst, Register src_addr, Register offset_reg, int32_t offset_imm, uint32_t *protected_load_pc=nullptr, bool offset_reg_needs_shift=false)
void emit_f64x2_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_alltrue(LiftoffRegister dst, LiftoffRegister src)
void LoadToFixedRegister(VarState slot, LiftoffRegister reg)
void emit_i32x4_shr_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_abs(LiftoffRegister dst, LiftoffRegister src)
void emit_f32x4_splat(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shr_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_cond_jumpi(Condition, Label *, Register lhs, int imm, const FreezeCacheState &frozen)
void emit_i64_eqz(Register dst, LiftoffRegister src)
void StoreTaggedPointer(Register dst_addr, Register offset_reg, int32_t offset_imm, Register src, LiftoffRegList pinned, uint32_t *protected_store_pc=nullptr, SkipWriteBarrier=kNoSkipWriteBarrier)
void emit_i64_clz(LiftoffRegister dst, LiftoffRegister src)
void bailout(LiftoffBailoutReason reason, const char *detail)
void emit_f32x4_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void IncrementSmi(LiftoffRegister dst, int offset)
LiftoffRegister GetUnusedRegister(RegClass rc, std::initializer_list< LiftoffRegister > try_first, LiftoffRegList pinned)
void emit_i16x8_gt_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_shl(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_splat(LiftoffRegister dst, LiftoffRegister src)
bool emit_f16x8_sconvert_i16x8(LiftoffRegister dst, LiftoffRegister src)
void MergeFullStackWith(CacheState &target)
void emit_f64x2_sqrt(LiftoffRegister dst, LiftoffRegister src)
void emit_i8x16_shuffle(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs, const uint8_t shuffle[16], bool is_swizzle)
void emit_i32x4_extadd_pairwise_i16x8_u(LiftoffRegister dst, LiftoffRegister src)
V8_NOINLINE V8_PRESERVE_MOST LiftoffRegister SpillOneRegister(LiftoffRegList candidates)
static constexpr ValueKind kIntPtrKind
bool emit_f32_ceil(DoubleRegister dst, DoubleRegister src)
void emit_i32_addi(Register dst, Register lhs, int32_t imm)
void emit_i16x8_extadd_pairwise_i8x16_u(LiftoffRegister dst, LiftoffRegister src)
void emit_i32x4_extmul_high_i16x8_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_i64x2_add(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i64_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i8x16_max_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void AtomicOr(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister value, LiftoffRegister result, StoreType type, bool i64_offset)
bool emit_select(LiftoffRegister dst, Register condition, LiftoffRegister true_value, LiftoffRegister false_value, ValueKind kind)
void PushConstant(ValueKind kind, int32_t i32_const)
bool emit_i16x8_uconvert_f16x8(LiftoffRegister dst, LiftoffRegister src)
bool emit_type_conversion(WasmOpcode opcode, LiftoffRegister dst, LiftoffRegister src, Label *trap=nullptr)
void emit_i8x16_neg(LiftoffRegister dst, LiftoffRegister src)
void PrepareBuiltinCall(const ValueKindSig *sig, compiler::CallDescriptor *call_descriptor, std::initializer_list< VarState > params)
void AtomicLoad(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, LiftoffRegList pinned, bool i64_offset)
void emit_i64x2_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void LoadReturnStackSlot(LiftoffRegister, int offset, ValueKind)
void emit_i32x4_neg(LiftoffRegister dst, LiftoffRegister src)
void emit_s128_xor(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i16x8_ge_u(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32x4_relaxed_trunc_f32x4_u(LiftoffRegister dst, LiftoffRegister src)
bool emit_f32_trunc(DoubleRegister dst, DoubleRegister src)
void LoadTaggedPointerFromInstance(Register dst, Register instance, int offset)
void emit_v128_anytrue(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_shl(Register dst, Register src, Register amount)
void emit_i32x4_extmul_high_i16x8_u(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2)
void emit_f32x4_lt(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_s128_const(LiftoffRegister dst, const uint8_t imms[16])
void emit_f64_sqrt(DoubleRegister dst, DoubleRegister src)
void emit_i64_and(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void StoreLane(Register dst, Register offset, uintptr_t offset_imm, LiftoffRegister src, StoreType type, uint8_t lane, uint32_t *protected_store_pc, bool i64_offset)
void CheckTierUp(int declared_func_index, int budget_used, Label *ool_label, const FreezeCacheState &frozen)
void emit_i16x8_extract_lane_u(LiftoffRegister dst, LiftoffRegister lhs, uint8_t imm_lane_idx)
void emit_f64x2_relaxed_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void CallIndirect(const ValueKindSig *sig, compiler::CallDescriptor *call_descriptor, Register target)
void RecordSpillsInSafepoint(SafepointTableBuilder::Safepoint &safepoint, LiftoffRegList all_spills, LiftoffRegList ref_spills, int spill_offset)
void emit_f64x2_relaxed_max(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
bool emit_f16x8_qfma(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, LiftoffRegister src3)
void emit_i8x16_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
V8_NOINLINE V8_PRESERVE_MOST void MoveToReturnLocationsMultiReturn(const FunctionSig *, compiler::CallDescriptor *)
V8_NOINLINE V8_PRESERVE_MOST void SpillRegister(LiftoffRegister)
void emit_f64x2_pmin(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_f32_add(DoubleRegister dst, DoubleRegister lhs, DoubleRegister rhs)
bool emit_f16x8_sqrt(LiftoffRegister dst, LiftoffRegister src)
void LoadTrustedPointer(Register dst, Register src_addr, int offset, IndirectPointerTag tag)
void emit_i32_add(Register dst, Register lhs, Register rhs)
void emit_i32x4_relaxed_trunc_f64x2_s_zero(LiftoffRegister dst, LiftoffRegister src)
void emit_i32_muli(Register dst, Register lhs, int32_t imm)
void emit_f32x4_eq(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs)
void emit_i32_signextend_i8(Register dst, Register src)
void emit_i8x16_bitmask(LiftoffRegister dst, LiftoffRegister src)
constexpr Register set(Register reg)
constexpr LiftoffRegister clear(LiftoffRegister reg)
LiftoffRegList MaskOut(const LiftoffRegList mask) const
constexpr bool HasAdjacentFpRegsSet() const
bool has(LiftoffRegister reg) const
constexpr LiftoffRegList GetAdjacentFpRegsSet() const
constexpr unsigned GetNumRegsSet() const
LiftoffRegister GetFirstRegSet() const
constexpr DoubleRegister fp() const
static LiftoffRegister ForFpPair(DoubleRegister low)
static LiftoffRegister ForPair(Register low, Register high)
void Add(const LiftoffAssembler::VarState &src, uint32_t src_offset, RegPairHalf half, int dst_slot)
base::SmallVector< Slot, 8 > slots_
static int SlotSizeInBytes(const Slot &slot)
LiftoffStackSlots & operator=(const LiftoffStackSlots &)=delete
void Add(const LiftoffAssembler::VarState &src, int dst_slot)
LiftoffStackSlots(LiftoffAssembler *wasm_asm)
LiftoffStackSlots(const LiftoffStackSlots &)=delete
void MakeRegister(LiftoffRegister r)
int start
ZoneVector< OpIndex > candidates
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
int32_t offset
ZoneVector< RpoNumber > & result
LiftoffRegister reg
LiftoffRegList spills
std::optional< OolTrapLabel > trap
uint32_t const mask
int r
Definition mul-fft.cc:298
constexpr Vector< T > VectorOf(T *start, size_t size)
Definition vector.h:360
constexpr Condition Flip(Condition cond)
constexpr Condition Negate(Condition cond)
static constexpr int kAfterMaxLiftoffRegCode
static constexpr bool kNeedS128RegPair
static constexpr LiftoffRegList GetCacheRegList(RegClass rc)
int declared_function_index(const WasmModule *module, int func_index)
static constexpr LiftoffRegList kGpCacheRegList
static constexpr LiftoffRegList kFpCacheRegList
static constexpr bool kNeedI64RegPair
constexpr Register no_reg
constexpr int kTaggedSize
Definition globals.h:542
constexpr int kSimd128Size
Definition globals.h:706
kWasmInternalFunctionIndirectPointerTag kProtectedInstanceDataOffset sig
constexpr int kSystemPointerSize
Definition globals.h:410
constexpr int kInt32Size
Definition globals.h:401
constexpr Register kWasmImplicitArgRegister
constexpr int kMaxInt
Definition globals.h:374
constexpr int kDoubleSize
Definition globals.h:407
#define V8_NOEXCEPT
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
#define arraysize(array)
Definition macros.h:67
uint32_t register_use_count[kAfterMaxLiftoffRegCode]
void SetMemStartCacheRegister(Register reg, int memory_index)
Register TrySetCachedInstanceRegister(LiftoffRegList pinned)
bool has_unused_register(LiftoffRegList candidates) const
LiftoffRegister GetNextSpillReg(LiftoffRegList candidates)
LiftoffRegister unused_register(RegClass rc, LiftoffRegList pinned={}) const
LiftoffRegister take_volatile_register(LiftoffRegList candidates)
CacheState & operator=(const CacheState &) V8_NOEXCEPT=default
void DefineSafepoint(SafepointTableBuilder::Safepoint &safepoint)
void GetTaggedSlotsForOOLCode(ZoneVector< int > *slots, LiftoffRegList *spills, SpillLocation spill_location)
void DefineSafepointWithCalleeSavedRegisters(SafepointTableBuilder::Safepoint &safepoint)
bool has_unused_register(RegClass rc, LiftoffRegList pinned={}) const
uint32_t get_use_count(LiftoffRegister reg) const
CacheState(CacheState &&) V8_NOEXCEPT=default
void SetCacheRegister(Register *cache, Register reg)
V8_INLINE void ClearCacheRegister(Register *cache)
LiftoffRegister unused_register(LiftoffRegList candidates, LiftoffRegList pinned={}) const
bool has_volatile_register(LiftoffRegList candidates)
Slot(const LiftoffAssembler::VarState &src, int dst_slot)
Slot(const LiftoffAssembler::VarState &src, uint32_t src_offset, RegPairHalf half, int dst_slot)
#define V8_INLINE
Definition v8config.h:500
#define V8_ASSUME
Definition v8config.h:533
#define V8_LIKELY(condition)
Definition v8config.h:661
#define V8_NOINLINE
Definition v8config.h:586
#define V8_PRESERVE_MOST
Definition v8config.h:598
#define V8_NODISCARD
Definition v8config.h:693