5#ifndef V8_BASELINE_LOONG64_BASELINE_ASSEMBLER_LOONG64_INL_H_
6#define V8_BASELINE_LOONG64_BASELINE_ASSEMBLER_LOONG64_INL_H_
17class BaselineAssembler::ScratchRegisterScope {
26 wrapped_scope_.Include({t0, t1, t2, t3});
52MemOperand BaselineAssembler::RegisterFrameOperand(
53 interpreter::Register interpreter_register) {
56void BaselineAssembler::RegisterFrameAddress(
57 interpreter::Register interpreter_register, Register rscratch) {
58 return __ Add_d(rscratch, fp,
61MemOperand BaselineAssembler::FeedbackVectorOperand() {
64MemOperand BaselineAssembler::FeedbackCellOperand() {
68void BaselineAssembler::Bind(Label*
label) {
__ bind(
label); }
70void BaselineAssembler::JumpTarget() {
76void BaselineAssembler::JumpIfRoot(Register value,
RootIndex index,
78 __ JumpIfRoot(value, index, target);
80void BaselineAssembler::JumpIfNotRoot(Register value,
RootIndex index,
82 __ JumpIfNotRoot(value, index, target);
84void BaselineAssembler::JumpIfSmi(Register value, Label* target,
86 __ JumpIfSmi(value, target);
88void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
90 __ JumpIfNotSmi(value, target);
92void BaselineAssembler::JumpIfImmediate(
Condition cc, Register left,
int right,
95 JumpIf(
cc, left, Operand(right), target, distance);
98void BaselineAssembler::TestAndBranch(Register value,
int mask,
Condition cc,
100 ScratchRegisterScope temps(
this);
101 Register scratch = temps.AcquireScratch();
103 __ Branch(target,
cc, scratch, Operand(zero_reg));
106void BaselineAssembler::JumpIf(
Condition cc, Register lhs,
const Operand& rhs,
108 __ Branch(target,
cc, lhs, Operand(rhs));
111#if V8_STATIC_ROOTS_BOOL
112void BaselineAssembler::JumpIfJSAnyIsPrimitive(Register heap_object,
115 __ AssertNotSmi(heap_object);
116 ScratchRegisterScope temps(
this);
117 Register scratch = temps.AcquireScratch();
118 __ JumpIfJSAnyIsPrimitive(heap_object, scratch, target, distance);
122void BaselineAssembler::JumpIfObjectTypeFast(
Condition cc, Register
object,
126 ScratchRegisterScope temps(
this);
127 Register scratch = temps.AcquireScratch();
129 __ JumpIfObjectType(target,
cc,
object, instance_type, scratch);
132 JumpIfObjectType(
cc,
object, instance_type, scratch, target, distance);
134void BaselineAssembler::JumpIfObjectType(
Condition cc, Register
object,
136 Register map, Label* target,
138 ScratchRegisterScope temps(
this);
139 Register type = temps.AcquireScratch();
140 __ GetObjectType(
object, map, type);
141 __ Branch(target,
cc, type, Operand(instance_type));
143void BaselineAssembler::JumpIfInstanceType(
Condition cc, Register map,
146 ScratchRegisterScope temps(
this);
147 Register type = temps.AcquireScratch();
149 __ AssertNotSmi(map);
150 __ GetObjectType(map, type, type);
151 __ Assert(
eq, AbortReason::kUnexpectedValue, type, Operand(MAP_TYPE));
154 __ Branch(target,
cc, type, Operand(instance_type));
158 __ CompareTaggedAndBranch(target,
cc, value, Operand(smi));
160void BaselineAssembler::JumpIfSmi(
Condition cc, Register lhs, Register rhs,
164 __ CompareTaggedAndBranch(target,
cc, lhs, Operand(rhs));
166void BaselineAssembler::JumpIfTagged(
Condition cc, Register value,
169 ScratchRegisterScope temps(
this);
170 Register scratch = temps.AcquireScratch();
171 __ Ld_d(scratch, operand);
172 __ CompareTaggedAndBranch(target,
cc, value, Operand(scratch));
175 Register value, Label* target,
177 ScratchRegisterScope temps(
this);
178 Register scratch = temps.AcquireScratch();
179 __ Ld_d(scratch, operand);
180 __ CompareTaggedAndBranch(target,
cc, scratch, Operand(value));
182void BaselineAssembler::JumpIfByte(
Condition cc, Register value, int32_t
byte,
184 __ Branch(target,
cc, value, Operand(
byte));
186void BaselineAssembler::Move(interpreter::Register output, Register source) {
187 Move(RegisterFrameOperand(output), source);
189void BaselineAssembler::Move(Register output, Tagged<TaggedIndex> value) {
190 __ li(output, Operand(value.ptr()));
192void BaselineAssembler::Move(
MemOperand output, Register source) {
193 __ St_d(source, output);
195void BaselineAssembler::Move(Register output, ExternalReference reference) {
196 __ li(output, Operand(reference));
198void BaselineAssembler::Move(Register output, Handle<HeapObject> value) {
199 __ li(output, Operand(value));
201void BaselineAssembler::Move(Register output, int32_t value) {
202 __ li(output, Operand(value));
204void BaselineAssembler::MoveMaybeSmi(Register output, Register source) {
205 __ Move(output, source);
207void BaselineAssembler::MoveSmi(Register output, Register source) {
208 __ Move(output, source);
213template <
typename Arg>
215 BaselineAssembler::ScratchRegisterScope* scope,
218 basm->Move(
reg, arg);
222 BaselineAssembler::ScratchRegisterScope* scope,
227template <
typename... Args>
230struct PushAllHelper<> {
237template <
typename Arg>
238struct PushAllHelper<Arg> {
245 return Push(basm, arg);
251template <
typename Arg,
typename... Args>
252struct PushAllHelper<Arg, Args...> {
265struct PushAllHelper<interpreter::RegisterList> {
267 for (
int reg_index = 0; reg_index < list.
register_count(); ++reg_index) {
282template <
typename... T>
285struct PopAllHelper<> {
297template <
typename... T>
298struct PopAllHelper<
Register, T...> {
307template <
typename... T>
308int BaselineAssembler::Push(T... vals) {
309 return detail::PushAllHelper<T...>::Push(
this, vals...);
312template <
typename... T>
313void BaselineAssembler::PushReverse(T... vals) {
314 detail::PushAllHelper<T...>::PushReverse(
this, vals...);
317template <
typename... T>
318void BaselineAssembler::Pop(T...
registers) {
319 detail::PopAllHelper<T...>::Pop(
this,
registers...);
322void BaselineAssembler::LoadTaggedField(Register output, Register source,
326void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
330void BaselineAssembler::LoadTaggedSignedFieldAndUntag(Register output,
333 LoadTaggedSignedField(output, source,
offset);
336void BaselineAssembler::LoadWord16FieldZeroExtend(Register output,
337 Register source,
int offset) {
340void BaselineAssembler::LoadWord8Field(Register output, Register source,
344void BaselineAssembler::StoreTaggedSignedField(Register target,
int offset,
347 ScratchRegisterScope temps(
this);
348 Register scratch = temps.AcquireScratch();
349 __ li(scratch, Operand(value));
352void BaselineAssembler::StoreTaggedFieldWithWriteBarrier(Register target,
357 ScratchRegisterScope temps(
this);
361void BaselineAssembler::StoreTaggedFieldNoWriteBarrier(Register target,
366void BaselineAssembler::TryLoadOptimizedOsrCode(Register scratch_and_result,
367 Register feedback_vector,
372 LoadTaggedField(scratch_and_result, feedback_vector,
374 __ LoadWeakValue(scratch_and_result, scratch_and_result, &fallthrough);
377 ScratchRegisterScope temps(
this);
380 __ LoadCodePointerField(
384 Register scratch = temps.AcquireScratch();
385 __ TestCodeIsMarkedForDeoptimizationAndJump(scratch_and_result, scratch,
eq,
388 StoreTaggedFieldNoWriteBarrier(
392 __ bind(&fallthrough);
393 Move(scratch_and_result, 0);
396void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
397 int32_t weight, Label* skip_interrupt_label) {
399 ScratchRegisterScope scratch_scope(
this);
400 Register feedback_cell = scratch_scope.AcquireScratch();
401 LoadFeedbackCell(feedback_cell);
403 Register interrupt_budget = scratch_scope.AcquireScratch();
404 __ Ld_w(interrupt_budget,
406 __ Add_w(interrupt_budget, interrupt_budget, weight);
407 __ St_w(interrupt_budget,
409 if (skip_interrupt_label) {
411 __ Branch(skip_interrupt_label,
ge, interrupt_budget, Operand(zero_reg));
414void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
415 Register weight, Label* skip_interrupt_label) {
417 ScratchRegisterScope scratch_scope(
this);
418 Register feedback_cell = scratch_scope.AcquireScratch();
419 LoadFeedbackCell(feedback_cell);
421 Register interrupt_budget = scratch_scope.AcquireScratch();
422 __ Ld_w(interrupt_budget,
424 __ Add_w(interrupt_budget, interrupt_budget, weight);
425 __ St_w(interrupt_budget,
427 if (skip_interrupt_label)
428 __ Branch(skip_interrupt_label,
ge, interrupt_budget, Operand(zero_reg));
431void BaselineAssembler::LdaContextSlot(Register context, uint32_t index,
433 CompressionMode compression_mode) {
434 for (; depth > 0; --depth) {
441void BaselineAssembler::StaContextSlot(Register context, Register value,
442 uint32_t index, uint32_t depth) {
443 for (; depth > 0; --depth) {
450void BaselineAssembler::LdaModuleVariable(Register context,
int cell_index,
452 for (; depth > 0; --depth) {
456 if (cell_index > 0) {
457 LoadTaggedField(context, context, SourceTextModule::kRegularExportsOffset);
461 LoadTaggedField(context, context, SourceTextModule::kRegularImportsOffset);
463 cell_index = -cell_index - 1;
465 LoadFixedArrayElement(context, context, cell_index);
469void BaselineAssembler::StaModuleVariable(Register context, Register value,
470 int cell_index, uint32_t depth) {
471 for (; depth > 0; --depth) {
475 LoadTaggedField(context, context, SourceTextModule::kRegularExportsOffset);
479 LoadFixedArrayElement(context, context, cell_index);
480 StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
483void BaselineAssembler::IncrementSmi(
MemOperand lhs) {
484 BaselineAssembler::ScratchRegisterScope temps(
this);
485 Register tmp = temps.AcquireScratch();
497void BaselineAssembler::Word32And(Register output, Register lhs,
int rhs) {
498 __ And(output, lhs, Operand(rhs));
501void BaselineAssembler::Switch(Register
reg,
int case_value_base,
502 Label** labels,
int num_labels) {
505 if (case_value_base != 0) {
506 __ Sub_d(
reg,
reg, Operand(case_value_base));
511 __ GenerateSwitchTable(
reg, num_labels,
512 [labels](
size_t i) {
return labels[
i]; });
514 __ bind(&fallthrough);
521void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
523 BaselineAssembler basm(masm);
526 Register params_size = BaselineLeaveFrameDescriptor::ParamsSizeRegister();
531 Label skip_interrupt_label;
532 __ AddToInterruptBudgetAndJumpIfNotExceeded(weight, &skip_interrupt_label);
533 __ masm()->SmiTag(params_size);
539 __ CallRuntime(Runtime::kBytecodeBudgetInterrupt_Sparkplug, 1);
542 __ masm()->SmiUntag(params_size);
543 __ Bind(&skip_interrupt_label);
546 BaselineAssembler::ScratchRegisterScope temps(&basm);
547 Register actual_params_size = temps.AcquireScratch();
549 __ Move(actual_params_size,
554 Label corrected_args_count;
555 __ masm()->Branch(&corrected_args_count,
ge, params_size,
556 Operand(actual_params_size));
557 __ masm()->Move(params_size, actual_params_size);
558 __ Bind(&corrected_args_count);
561 __ masm()->LeaveFrame(StackFrame::BASELINE);
564 __ masm()->DropArguments(params_size);
570inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
572 assembler_->masm()->Assert(
eq, AbortReason::kAccumulatorClobbered,
reg,
#define Assert(condition)
static constexpr int kFeedbackCellFromFp
static constexpr Register WeightRegister()
static const int kExtensionOffset
static V8_INLINE constexpr int OffsetOfElementAt(int index)
static const int kPreviousOffset
static constexpr int OffsetOfElementAt(int index)
const Register & base() const
static constexpr Tagged< Smi > FromInt(int value)
static constexpr int kArgCOffset
static constexpr int kFeedbackVectorFromFp
BaselineAssembler * assembler_
UseScratchRegisterScope wrapped_scope_
ScratchRegisterScope * prev_scope_
Register AcquireScratch()
ScratchRegisterScope(BaselineAssembler *assembler)
ScratchRegisterScope * scratch_register_scope_
int register_count() const
#define ASM_CODE_COMMENT_STRING(asm,...)
#define ASM_CODE_COMMENT(asm)
base::Vector< const DirectHandle< Object > > args
BytecodeAssembler & assembler_
RegListBase< RegisterT > registers
MaglevAssembler *const masm_
Register ToRegister(BaselineAssembler *basm, BaselineAssembler::ScratchRegisterScope *scope, Arg arg)
void And(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
@ kUnsignedGreaterThanEqual
constexpr Register kInterpreterAccumulatorRegister
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
constexpr bool SmiValuesAre31Bits()
constexpr Register kContextRegister
Tagged< ClearedWeakValue > ClearedValue(PtrComprCageBase cage_base)
V8_EXPORT_PRIVATE FlagValues v8_flags
Register ToRegister(int num)
constexpr Register kJSFunctionRegister
#define DCHECK_LT(v1, v2)
static void Pop(BaselineAssembler *basm, Register reg, T... tail)
static void Pop(BaselineAssembler *basm, Register reg)
static void Pop(BaselineAssembler *basm)
static int Push(BaselineAssembler *basm, Arg arg, Args... args)
static int PushReverse(BaselineAssembler *basm, Arg arg, Args... args)
static int Push(BaselineAssembler *basm, Arg arg)
static int PushReverse(BaselineAssembler *basm, Arg arg)
static int Push(BaselineAssembler *basm, interpreter::RegisterList list)
static int PushReverse(BaselineAssembler *basm, interpreter::RegisterList list)
static int PushReverse(BaselineAssembler *basm)
static int Push(BaselineAssembler *basm)