5#ifndef V8_BASELINE_MIPS64_BASELINE_ASSEMBLER_MIPS64_INL_H_
6#define V8_BASELINE_MIPS64_BASELINE_ASSEMBLER_MIPS64_INL_H_
17class BaselineAssembler::ScratchRegisterScope {
26 wrapped_scope_.Include({t0, t1, t2, t3});
52MemOperand BaselineAssembler::RegisterFrameOperand(
53 interpreter::Register interpreter_register) {
56void BaselineAssembler::RegisterFrameAddress(
57 interpreter::Register interpreter_register, Register rscratch) {
58 return __ Daddu(rscratch, fp,
61MemOperand BaselineAssembler::FeedbackVectorOperand() {
64MemOperand BaselineAssembler::FeedbackCellOperand() {
68void BaselineAssembler::Bind(Label*
label) {
__ bind(
label); }
70void BaselineAssembler::JumpTarget() {
76void BaselineAssembler::JumpIfRoot(Register value,
RootIndex index,
78 __ JumpIfRoot(value, index, target);
80void BaselineAssembler::JumpIfNotRoot(Register value,
RootIndex index,
82 __ JumpIfNotRoot(value, index, target);
84void BaselineAssembler::JumpIfSmi(Register value, Label* target,
86 __ JumpIfSmi(value, target);
88void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
90 __ JumpIfNotSmi(value, target);
92void BaselineAssembler::JumpIfImmediate(
Condition cc, Register left,
int right,
95 JumpIf(
cc, left, Operand(right), target, distance);
98void BaselineAssembler::TestAndBranch(Register value,
int mask,
Condition cc,
100 ScratchRegisterScope temps(
this);
101 Register scratch = temps.AcquireScratch();
103 __ Branch(target,
cc, scratch, Operand(zero_reg));
106void BaselineAssembler::JumpIf(
Condition cc, Register lhs,
const Operand& rhs,
108 __ Branch(target,
cc, lhs, Operand(rhs));
110void BaselineAssembler::JumpIfObjectTypeFast(
Condition cc, Register
object,
114 ScratchRegisterScope temps(
this);
115 Register scratch = temps.AcquireScratch();
116 JumpIfObjectType(
cc,
object, instance_type, scratch, target, distance);
118void BaselineAssembler::JumpIfObjectType(
Condition cc, Register
object,
120 Register map, Label* target,
122 ScratchRegisterScope temps(
this);
123 Register type = temps.AcquireScratch();
124 __ GetObjectType(
object, map, type);
125 __ Branch(target,
cc, type, Operand(instance_type));
127void BaselineAssembler::JumpIfInstanceType(
Condition cc, Register map,
130 ScratchRegisterScope temps(
this);
131 Register type = temps.AcquireScratch();
133 __ AssertNotSmi(map);
134 __ GetObjectType(map, type, type);
135 __ Assert(
eq, AbortReason::kUnexpectedValue, type, Operand(MAP_TYPE));
138 __ Branch(target,
cc, type, Operand(instance_type));
140void BaselineAssembler::JumpIfPointer(
Condition cc, Register value,
143 ScratchRegisterScope temps(
this);
144 Register scratch = temps.AcquireScratch();
145 __ Ld(scratch, operand);
146 __ Branch(target,
cc, value, Operand(scratch));
150 ScratchRegisterScope temps(
this);
151 Register scratch = temps.AcquireScratch();
152 __ li(scratch, Operand(smi));
154 __ Branch(target,
cc, value, Operand(scratch));
156void BaselineAssembler::JumpIfSmi(
Condition cc, Register lhs, Register rhs,
160 __ Branch(target,
cc, lhs, Operand(rhs));
162void BaselineAssembler::JumpIfTagged(
Condition cc, Register value,
165 ScratchRegisterScope temps(
this);
166 Register scratch = temps.AcquireScratch();
167 __ Ld(scratch, operand);
168 __ Branch(target,
cc, value, Operand(scratch));
171 Register value, Label* target,
173 ScratchRegisterScope temps(
this);
174 Register scratch = temps.AcquireScratch();
175 __ Ld(scratch, operand);
176 __ Branch(target,
cc, scratch, Operand(value));
178void BaselineAssembler::JumpIfByte(
Condition cc, Register value, int32_t
byte,
180 __ Branch(target,
cc, value, Operand(
byte));
183void BaselineAssembler::Move(interpreter::Register output, Register source) {
184 Move(RegisterFrameOperand(output), source);
186void BaselineAssembler::Move(Register output, Tagged<TaggedIndex> value) {
187 __ li(output, Operand(value.ptr()));
189void BaselineAssembler::Move(
MemOperand output, Register source) {
190 __ Sd(source, output);
192void BaselineAssembler::Move(Register output, ExternalReference reference) {
193 __ li(output, Operand(reference));
195void BaselineAssembler::Move(Register output, Handle<HeapObject> value) {
196 __ li(output, Operand(value));
198void BaselineAssembler::Move(Register output, int32_t value) {
199 __ li(output, Operand(value));
201void BaselineAssembler::MoveMaybeSmi(Register output, Register source) {
202 __ Move(output, source);
204void BaselineAssembler::MoveSmi(Register output, Register source) {
205 __ Move(output, source);
210template <
typename Arg>
212 BaselineAssembler::ScratchRegisterScope* scope,
215 basm->Move(
reg, arg);
219 BaselineAssembler::ScratchRegisterScope* scope,
224template <
typename... Args>
227struct PushAllHelper<> {
234template <
typename Arg>
235struct PushAllHelper<Arg> {
242 return Push(basm, arg);
248template <
typename Arg,
typename... Args>
249struct PushAllHelper<Arg, Args...> {
261struct PushAllHelper<interpreter::RegisterList> {
263 for (
int reg_index = 0; reg_index < list.
register_count(); ++reg_index) {
278template <
typename... T>
281struct PopAllHelper<> {
293template <
typename... T>
294struct PopAllHelper<
Register, T...> {
303template <
typename... T>
304int BaselineAssembler::Push(T... vals) {
305 return detail::PushAllHelper<T...>::Push(
this, vals...);
308template <
typename... T>
309void BaselineAssembler::PushReverse(T... vals) {
310 detail::PushAllHelper<T...>::PushReverse(
this, vals...);
313template <
typename... T>
314void BaselineAssembler::Pop(T...
registers) {
315 detail::PopAllHelper<T...>::Pop(
this,
registers...);
318void BaselineAssembler::LoadTaggedField(Register output, Register source,
322void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
326void BaselineAssembler::LoadTaggedSignedFieldAndUntag(Register output,
329 LoadTaggedSignedField(output, source,
offset);
332void BaselineAssembler::LoadWord16FieldZeroExtend(Register output,
333 Register source,
int offset) {
336void BaselineAssembler::LoadWord8Field(Register output, Register source,
340void BaselineAssembler::StoreTaggedSignedField(Register target,
int offset,
343 ScratchRegisterScope temps(
this);
344 Register scratch = temps.AcquireScratch();
345 __ li(scratch, Operand(value));
348void BaselineAssembler::StoreTaggedFieldWithWriteBarrier(Register target,
353 ScratchRegisterScope temps(
this);
354 Register scratch = temps.AcquireScratch();
358void BaselineAssembler::StoreTaggedFieldNoWriteBarrier(Register target,
364void BaselineAssembler::TryLoadOptimizedOsrCode(Register scratch_and_result,
365 Register feedback_vector,
370 LoadTaggedField(scratch_and_result, feedback_vector,
372 __ LoadWeakValue(scratch_and_result, scratch_and_result, &fallthrough);
375 ScratchRegisterScope temps(
this);
378 __ Ld(scratch_and_result,
381 Register scratch = temps.AcquireScratch();
382 __ TestCodeIsMarkedForDeoptimizationAndJump(scratch_and_result, scratch,
eq,
385 StoreTaggedFieldNoWriteBarrier(
389 __ bind(&fallthrough);
390 Move(scratch_and_result, 0);
393void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
394 int32_t weight, Label* skip_interrupt_label) {
396 ScratchRegisterScope scratch_scope(
this);
397 Register feedback_cell = scratch_scope.AcquireScratch();
398 LoadFeedbackCell(feedback_cell);
400 Register interrupt_budget = scratch_scope.AcquireScratch();
401 __ Lw(interrupt_budget,
403 __ Addu(interrupt_budget, interrupt_budget, weight);
404 __ Sw(interrupt_budget,
406 if (skip_interrupt_label) {
408 __ Branch(skip_interrupt_label,
ge, interrupt_budget, Operand(zero_reg));
411void BaselineAssembler::AddToInterruptBudgetAndJumpIfNotExceeded(
412 Register weight, Label* skip_interrupt_label) {
414 ScratchRegisterScope scratch_scope(
this);
415 Register feedback_cell = scratch_scope.AcquireScratch();
416 LoadFeedbackCell(feedback_cell);
418 Register interrupt_budget = scratch_scope.AcquireScratch();
419 __ Lw(interrupt_budget,
421 __ Addu(interrupt_budget, interrupt_budget, weight);
422 __ Sw(interrupt_budget,
424 if (skip_interrupt_label)
425 __ Branch(skip_interrupt_label,
ge, interrupt_budget, Operand(zero_reg));
428void BaselineAssembler::LdaContextSlot(Register context, uint32_t index,
430 CompressionMode compression_mode) {
431 for (; depth > 0; --depth) {
438void BaselineAssembler::StaContextSlot(Register context, Register value,
439 uint32_t index, uint32_t depth) {
440 for (; depth > 0; --depth) {
447void BaselineAssembler::LdaModuleVariable(Register context,
int cell_index,
449 for (; depth > 0; --depth) {
453 if (cell_index > 0) {
454 LoadTaggedField(context, context, SourceTextModule::kRegularExportsOffset);
458 LoadTaggedField(context, context, SourceTextModule::kRegularImportsOffset);
460 cell_index = -cell_index - 1;
462 LoadFixedArrayElement(context, context, cell_index);
466void BaselineAssembler::StaModuleVariable(Register context, Register value,
467 int cell_index, uint32_t depth) {
468 for (; depth > 0; --depth) {
472 LoadTaggedField(context, context, SourceTextModule::kRegularExportsOffset);
476 LoadFixedArrayElement(context, context, cell_index);
477 StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
480void BaselineAssembler::IncrementSmi(
MemOperand lhs) {
481 BaselineAssembler::ScratchRegisterScope temps(
this);
482 Register tmp = temps.AcquireScratch();
494void BaselineAssembler::Word32And(Register output, Register lhs,
int rhs) {
495 __ And(output, lhs, Operand(rhs));
498void BaselineAssembler::Switch(Register
reg,
int case_value_base,
499 Label** labels,
int num_labels) {
502 if (case_value_base != 0) {
503 __ Dsubu(
reg,
reg, Operand(case_value_base));
508 __ GenerateSwitchTable(
reg, num_labels,
509 [labels](
size_t i) {
return labels[
i]; });
511 __ bind(&fallthrough);
518void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
520 BaselineAssembler basm(masm);
523 Register params_size = BaselineLeaveFrameDescriptor::ParamsSizeRegister();
528 Label skip_interrupt_label;
529 __ AddToInterruptBudgetAndJumpIfNotExceeded(weight, &skip_interrupt_label);
530 __ masm()->SmiTag(params_size);
536 __ CallRuntime(Runtime::kBytecodeBudgetInterrupt_Sparkplug, 1);
539 __ masm()->SmiUntag(params_size);
541 __ Bind(&skip_interrupt_label);
544 BaselineAssembler::ScratchRegisterScope temps(&basm);
545 Register actual_params_size = temps.AcquireScratch();
547 __ Move(actual_params_size,
552 Label corrected_args_count;
553 __ masm()->Branch(&corrected_args_count,
ge, params_size,
554 Operand(actual_params_size));
555 __ masm()->Move(params_size, actual_params_size);
556 __ Bind(&corrected_args_count);
559 __ masm()->LeaveFrame(StackFrame::BASELINE);
562 __ masm()->DropArguments(params_size);
569inline void EnsureAccumulatorPreservedScope::AssertEqualToAccumulator(
571 assembler_->masm()->Assert(
eq, AbortReason::kAccumulatorClobbered,
reg,
#define Assert(condition)
static constexpr int kFeedbackCellFromFp
static constexpr Register WeightRegister()
static const int kExtensionOffset
static V8_INLINE constexpr int OffsetOfElementAt(int index)
static const int kPreviousOffset
static constexpr int OffsetOfElementAt(int index)
bool is_reg(Register reg) const
static constexpr Tagged< Smi > FromInt(int value)
static constexpr int kArgCOffset
static constexpr int kFeedbackVectorFromFp
BaselineAssembler * assembler_
UseScratchRegisterScope wrapped_scope_
ScratchRegisterScope * prev_scope_
Register AcquireScratch()
ScratchRegisterScope(BaselineAssembler *assembler)
ScratchRegisterScope * scratch_register_scope_
int register_count() const
#define ASM_CODE_COMMENT_STRING(asm,...)
#define ASM_CODE_COMMENT(asm)
base::Vector< const DirectHandle< Object > > args
BytecodeAssembler & assembler_
RegListBase< RegisterT > registers
MaglevAssembler *const masm_
Register ToRegister(BaselineAssembler *basm, BaselineAssembler::ScratchRegisterScope *scope, Arg arg)
void And(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
@ kUnsignedGreaterThanEqual
constexpr Register kInterpreterAccumulatorRegister
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
constexpr bool SmiValuesAre31Bits()
constexpr Register kContextRegister
Tagged< ClearedWeakValue > ClearedValue(PtrComprCageBase cage_base)
V8_EXPORT_PRIVATE FlagValues v8_flags
Register ToRegister(int num)
constexpr Register kJSFunctionRegister
#define DCHECK_LT(v1, v2)
static void Pop(BaselineAssembler *basm, Register reg, T... tail)
static void Pop(BaselineAssembler *basm, Register reg)
static void Pop(BaselineAssembler *basm)
static int Push(BaselineAssembler *basm, Arg arg, Args... args)
static int PushReverse(BaselineAssembler *basm, Arg arg, Args... args)
static int Push(BaselineAssembler *basm, Arg arg)
static int PushReverse(BaselineAssembler *basm, Arg arg)
static int Push(BaselineAssembler *basm, interpreter::RegisterList list)
static int PushReverse(BaselineAssembler *basm, interpreter::RegisterList list)
static int PushReverse(BaselineAssembler *basm)
static int Push(BaselineAssembler *basm)