v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
baseline-assembler-ia32-inl.h
Go to the documentation of this file.
1// Use of this source code is governed by a BSD-style license that can be
2// Copyright 2021 the V8 project authors. All rights reserved.
3// found in the LICENSE file.
4
5#ifndef V8_BASELINE_IA32_BASELINE_ASSEMBLER_IA32_INL_H_
6#define V8_BASELINE_IA32_BASELINE_ASSEMBLER_IA32_INL_H_
7
13
14namespace v8 {
15namespace internal {
16namespace baseline {
17
18namespace detail {
19
20static constexpr Register kScratchRegisters[] = {ecx, edx, esi, edi};
22
23} // namespace detail
24
26 public:
35
40
41 private:
45};
46
47namespace detail {
48
49#define __ masm_->
50
51#ifdef DEBUG
52inline bool Clobbers(Register target, MemOperand op) {
53 return op.is_reg(target);
54}
55#endif
56
57} // namespace detail
58
60 interpreter::Register interpreter_register) {
61 return MemOperand(ebp, interpreter_register.ToOperand() * kSystemPointerSize);
62}
64 interpreter::Register interpreter_register, Register rscratch) {
65 return __ lea(rscratch, MemOperand(ebp, interpreter_register.ToOperand() *
67}
70}
73}
74
75void BaselineAssembler::Bind(Label* label) { __ bind(label); }
76
78 // NOP on ia32.
79}
80
81void BaselineAssembler::Jump(Label* target, Label::Distance distance) {
82 __ jmp(target, distance);
83}
84
85void BaselineAssembler::JumpIfRoot(Register value, RootIndex index,
86 Label* target, Label::Distance distance) {
87 __ JumpIfRoot(value, index, target, distance);
88}
89
90void BaselineAssembler::JumpIfNotRoot(Register value, RootIndex index,
91 Label* target, Label::Distance distance) {
92 __ JumpIfNotRoot(value, index, target, distance);
93}
94
95void BaselineAssembler::JumpIfSmi(Register value, Label* target,
96 Label::Distance distance) {
97 __ JumpIfSmi(value, target, distance);
98}
99
100void BaselineAssembler::JumpIfImmediate(Condition cc, Register left, int right,
101 Label* target,
102 Label::Distance distance) {
103 __ cmp(left, Immediate(right));
104 __ j(cc, target, distance);
105}
106
107void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
108 Label::Distance distance) {
109 __ JumpIfNotSmi(value, target, distance);
110}
111
112void BaselineAssembler::TestAndBranch(Register value, int mask, Condition cc,
113 Label* target, Label::Distance distance) {
114 if ((mask & 0xff) == mask) {
115 __ test_b(value, Immediate(mask));
116 } else {
117 __ test(value, Immediate(mask));
118 }
119 __ j(cc, target, distance);
120}
121
122void BaselineAssembler::JumpIf(Condition cc, Register lhs, const Operand& rhs,
123 Label* target, Label::Distance distance) {
124 __ cmp(lhs, rhs);
125 __ j(cc, target, distance);
126}
127
129 InstanceType instance_type,
130 Label* target,
131 Label::Distance distance) {
132 ScratchRegisterScope temps(this);
133 Register scratch = temps.AcquireScratch();
134 JumpIfObjectType(cc, object, instance_type, scratch, target, distance);
135}
136
138 InstanceType instance_type,
139 Register map, Label* target,
140 Label::Distance distance) {
141 __ AssertNotSmi(object);
142 __ CmpObjectType(object, instance_type, map);
143 __ j(cc, target, distance);
144}
146 InstanceType instance_type,
147 Label* target,
148 Label::Distance distance) {
149 if (v8_flags.debug_code) {
150 __ movd(xmm0, eax);
151 __ AssertNotSmi(map);
152 __ CmpObjectType(map, MAP_TYPE, eax);
153 __ Assert(equal, AbortReason::kUnexpectedValue);
154 __ movd(eax, xmm0);
155 }
156 __ CmpInstanceType(map, instance_type);
157 __ j(cc, target, distance);
158}
160 MemOperand operand, Label* target,
161 Label::Distance distance) {
162 JumpIf(cc, value, operand, target, distance);
163}
164void BaselineAssembler::JumpIfSmi(Condition cc, Register value, Tagged<Smi> smi,
165 Label* target, Label::Distance distance) {
166 if (smi.value() == 0) {
167 __ test(value, value);
168 } else {
169 __ cmp(value, Immediate(smi));
170 }
171 __ j(cc, target, distance);
172}
173void BaselineAssembler::JumpIfSmi(Condition cc, Register lhs, Register rhs,
174 Label* target, Label::Distance distance) {
175 __ AssertSmi(lhs);
176 __ AssertSmi(rhs);
177 __ cmp(lhs, rhs);
178 __ j(cc, target, distance);
179}
180void BaselineAssembler::JumpIfTagged(Condition cc, Register value,
181 MemOperand operand, Label* target,
182 Label::Distance distance) {
183 __ cmp(operand, value);
184 __ j(cc, target, distance);
185}
187 Register value, Label* target,
188 Label::Distance distance) {
189 __ cmp(operand, value);
190 __ j(cc, target, distance);
191}
192void BaselineAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
193 Label* target, Label::Distance distance) {
194 __ cmpb(value, Immediate(byte));
195 __ j(cc, target, distance);
196}
197void BaselineAssembler::Move(interpreter::Register output, Register source) {
198 return __ mov(RegisterFrameOperand(output), source);
199}
200void BaselineAssembler::Move(Register output, Tagged<TaggedIndex> value) {
201 __ Move(output, Immediate(value.ptr()));
202}
203void BaselineAssembler::Move(MemOperand output, Register source) {
204 __ mov(output, source);
205}
206void BaselineAssembler::Move(Register output, ExternalReference reference) {
207 __ Move(output, Immediate(reference));
208}
209void BaselineAssembler::Move(Register output, Handle<HeapObject> value) {
210 __ Move(output, value);
211}
212void BaselineAssembler::Move(Register output, int32_t value) {
213 __ Move(output, Immediate(value));
214}
215void BaselineAssembler::MoveMaybeSmi(Register output, Register source) {
216 __ mov(output, source);
217}
218void BaselineAssembler::MoveSmi(Register output, Register source) {
219 __ mov(output, source);
220}
221
222namespace detail {
223inline void PushSingle(MacroAssembler* masm, RootIndex source) {
224 masm->PushRoot(source);
225}
226inline void PushSingle(MacroAssembler* masm, Register reg) { masm->Push(reg); }
228 masm->Push(Immediate(value.ptr()));
229}
230inline void PushSingle(MacroAssembler* masm, Tagged<Smi> value) {
231 masm->Push(value);
232}
233inline void PushSingle(MacroAssembler* masm, Handle<HeapObject> object) {
234 masm->Push(object);
235}
236inline void PushSingle(MacroAssembler* masm, int32_t immediate) {
237 masm->Push(Immediate(immediate));
238}
239inline void PushSingle(MacroAssembler* masm, MemOperand operand) {
240 masm->Push(operand);
241}
244}
245
246template <typename Arg>
247struct PushHelper {
248 static int Push(BaselineAssembler* basm, Arg arg) {
249 PushSingle(basm->masm(), arg);
250 return 1;
251 }
252 static int PushReverse(BaselineAssembler* basm, Arg arg) {
253 return Push(basm, arg);
254 }
255};
256
257template <>
258struct PushHelper<interpreter::RegisterList> {
260 for (int reg_index = 0; reg_index < list.register_count(); ++reg_index) {
261 PushSingle(basm->masm(), list[reg_index]);
262 }
263 return list.register_count();
264 }
267 for (int reg_index = list.register_count() - 1; reg_index >= 0;
268 --reg_index) {
269 PushSingle(basm->masm(), list[reg_index]);
270 }
271 return list.register_count();
272 }
273};
274
275template <typename... Args>
276struct PushAllHelper;
277template <>
278struct PushAllHelper<> {
279 static int Push(BaselineAssembler* masm) { return 0; }
280 static int PushReverse(BaselineAssembler* masm) { return 0; }
281};
282template <typename Arg, typename... Args>
283struct PushAllHelper<Arg, Args...> {
284 static int Push(BaselineAssembler* masm, Arg arg, Args... args) {
285 int nargs = PushHelper<Arg>::Push(masm, arg);
286 return nargs + PushAllHelper<Args...>::Push(masm, args...);
287 }
288 static int PushReverse(BaselineAssembler* masm, Arg arg, Args... args) {
289 int nargs = PushAllHelper<Args...>::PushReverse(masm, args...);
290 return nargs + PushHelper<Arg>::PushReverse(masm, arg);
291 }
292};
293
294} // namespace detail
295
296template <typename... T>
297int BaselineAssembler::Push(T... vals) {
298 return detail::PushAllHelper<T...>::Push(this, vals...);
299}
300
301template <typename... T>
302void BaselineAssembler::PushReverse(T... vals) {
303 detail::PushAllHelper<T...>::PushReverse(this, vals...);
304}
305
306template <typename... T>
308 (__ Pop(registers), ...);
309}
310
311void BaselineAssembler::LoadTaggedField(Register output, Register source,
312 int offset) {
313 __ mov(output, FieldOperand(source, offset));
314}
315
316void BaselineAssembler::LoadTaggedSignedField(Register output, Register source,
317 int offset) {
318 __ mov(output, FieldOperand(source, offset));
319}
320
322 Register source,
323 int offset) {
324 LoadTaggedSignedField(output, source, offset);
325 SmiUntag(output);
326}
327
329 Register source, int offset) {
330 __ movzx_w(output, FieldOperand(source, offset));
331}
332
333void BaselineAssembler::LoadWord8Field(Register output, Register source,
334 int offset) {
335 __ mov_b(output, FieldOperand(source, offset));
336}
337
338void BaselineAssembler::StoreTaggedSignedField(Register target, int offset,
339 Tagged<Smi> value) {
340 __ mov(FieldOperand(target, offset), Immediate(value));
341}
342
344 int offset,
345 Register value) {
347 BaselineAssembler::ScratchRegisterScope scratch_scope(this);
348 Register scratch = scratch_scope.AcquireScratch();
349 DCHECK(!AreAliased(scratch, target, value));
350 __ mov(FieldOperand(target, offset), value);
351 __ RecordWriteField(target, offset, value, scratch, SaveFPRegsMode::kIgnore);
352}
353
355 int offset,
356 Register value) {
357 DCHECK(!AreAliased(target, value));
358 __ mov(FieldOperand(target, offset), value);
359}
360
361void BaselineAssembler::TryLoadOptimizedOsrCode(Register scratch_and_result,
362 Register feedback_vector,
363 FeedbackSlot slot,
364 Label* on_result,
365 Label::Distance distance) {
366 Label fallthrough;
367 LoadTaggedField(scratch_and_result, feedback_vector,
369 __ LoadWeakValue(scratch_and_result, &fallthrough);
370
371 // Is it marked_for_deoptimization? If yes, clear the slot.
372 {
373 ScratchRegisterScope temps(this);
374
375 // The entry references a CodeWrapper object. Unwrap it now.
376 __ mov(scratch_and_result,
377 FieldOperand(scratch_and_result, CodeWrapper::kCodeOffset));
378
379 __ TestCodeIsMarkedForDeoptimization(scratch_and_result);
380 __ j(equal, on_result, distance);
381 __ mov(FieldOperand(feedback_vector,
383 __ ClearedValue());
384 }
385
386 __ bind(&fallthrough);
387 __ Move(scratch_and_result, 0);
388}
389
391 int32_t weight, Label* skip_interrupt_label) {
393 ScratchRegisterScope scratch_scope(this);
394 Register feedback_cell = scratch_scope.AcquireScratch();
395 LoadFeedbackCell(feedback_cell);
396 __ add(FieldOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset),
397 Immediate(weight));
398 if (skip_interrupt_label) {
399 DCHECK_LT(weight, 0);
400 __ j(greater_equal, skip_interrupt_label);
401 }
402}
403
405 Register weight, Label* skip_interrupt_label) {
407 ScratchRegisterScope scratch_scope(this);
408 Register feedback_cell = scratch_scope.AcquireScratch();
409 DCHECK(!AreAliased(feedback_cell, weight));
410 LoadFeedbackCell(feedback_cell);
411 __ add(FieldOperand(feedback_cell, FeedbackCell::kInterruptBudgetOffset),
412 weight);
413 if (skip_interrupt_label) __ j(greater_equal, skip_interrupt_label);
414}
415
416void BaselineAssembler::LdaContextSlot(Register context, uint32_t index,
417 uint32_t depth,
418 CompressionMode compression_mode) {
419 for (; depth > 0; --depth) {
420 LoadTaggedField(context, context, Context::kPreviousOffset);
421 }
424}
425
426void BaselineAssembler::StaContextSlot(Register context, Register value,
427 uint32_t index, uint32_t depth) {
428 for (; depth > 0; --depth) {
429 LoadTaggedField(context, context, Context::kPreviousOffset);
430 }
432 value);
433}
434
435void BaselineAssembler::LdaModuleVariable(Register context, int cell_index,
436 uint32_t depth) {
437 for (; depth > 0; --depth) {
438 LoadTaggedField(context, context, Context::kPreviousOffset);
439 }
441 if (cell_index > 0) {
442 LoadTaggedField(context, context, SourceTextModule::kRegularExportsOffset);
443 // The actual array index is (cell_index - 1).
444 cell_index -= 1;
445 } else {
446 LoadTaggedField(context, context, SourceTextModule::kRegularImportsOffset);
447 // The actual array index is (-cell_index - 1).
448 cell_index = -cell_index - 1;
449 }
450 LoadFixedArrayElement(context, context, cell_index);
451 LoadTaggedField(kInterpreterAccumulatorRegister, context, Cell::kValueOffset);
452}
453
454void BaselineAssembler::StaModuleVariable(Register context, Register value,
455 int cell_index, uint32_t depth) {
456 for (; depth > 0; --depth) {
457 LoadTaggedField(context, context, Context::kPreviousOffset);
458 }
460 LoadTaggedField(context, context, SourceTextModule::kRegularExportsOffset);
461
462 // The actual array index is (cell_index - 1).
463 cell_index -= 1;
464 LoadFixedArrayElement(context, context, cell_index);
465 StoreTaggedFieldWithWriteBarrier(context, Cell::kValueOffset, value);
466}
467
469 __ add(lhs, Immediate(Smi::FromInt(1)));
470}
471
472void BaselineAssembler::Word32And(Register output, Register lhs, int rhs) {
473 Move(output, lhs);
474 __ and_(output, Immediate(rhs));
475}
476
477void BaselineAssembler::Switch(Register reg, int case_value_base,
478 Label** labels, int num_labels) {
480 ScratchRegisterScope scope(this);
481 Register table = scope.AcquireScratch();
482 DCHECK(!AreAliased(reg, table));
483 Label fallthrough, jump_table;
484 if (case_value_base != 0) {
485 __ sub(reg, Immediate(case_value_base));
486 }
487 __ cmp(reg, Immediate(num_labels));
488 __ j(above_equal, &fallthrough);
489 __ lea(table, MemOperand(&jump_table));
490 __ jmp(Operand(table, reg, times_system_pointer_size, 0));
491 // Emit the jump table inline, under the assumption that it's not too big.
493 __ bind(&jump_table);
494 for (int i = 0; i < num_labels; ++i) {
495 __ dd(labels[i]);
496 }
497 __ bind(&fallthrough);
498}
499
500#undef __
501#define __ basm.
502
503void BaselineAssembler::EmitReturn(MacroAssembler* masm) {
506
508 Register params_size = BaselineLeaveFrameDescriptor::ParamsSizeRegister();
509 {
510 ASM_CODE_COMMENT_STRING(masm, "Update Interrupt Budget");
511
512 Label skip_interrupt_label;
513 __ AddToInterruptBudgetAndJumpIfNotExceeded(weight, &skip_interrupt_label);
514 __ masm()->SmiTag(params_size);
516
519 __ CallRuntime(Runtime::kBytecodeBudgetInterrupt_Sparkplug, 1);
520
522 __ masm()->SmiUntag(params_size);
523
524 __ Bind(&skip_interrupt_label);
525 }
526
527 BaselineAssembler::ScratchRegisterScope scope(&basm);
528 Register scratch = scope.AcquireScratch();
529 DCHECK(!AreAliased(weight, params_size, scratch));
530
531 Register actual_params_size = scratch;
532 // Compute the size of the actual parameters + receiver.
533 __ masm()->mov(actual_params_size,
535
536 // If actual is bigger than formal, then we should use it to free up the stack
537 // arguments.
538 __ masm()->cmp(params_size, actual_params_size);
539 __ masm()->cmov(kLessThan, params_size, actual_params_size);
540
541 // Leave the frame (also dropping the register file).
542 __ masm()->LeaveFrame(StackFrame::BASELINE);
543
544 // Drop receiver + arguments.
545 __ masm()->DropArguments(params_size, scratch);
546 __ masm()->Ret();
547}
548
549#undef __
550
552 Register reg) {
554 assembler_->masm()->Assert(equal, AbortReason::kAccumulatorClobbered);
555}
556
557} // namespace baseline
558} // namespace internal
559} // namespace v8
560
561#endif // V8_BASELINE_IA32_BASELINE_ASSEMBLER_IA32_INL_H_
#define Assert(condition)
void cmov(Condition cc, Register dst, Register src)
void cmp(Register src1, const Operand &src2, Condition cond=al)
static const int kExtensionOffset
Definition contexts.h:500
static V8_INLINE constexpr int OffsetOfElementAt(int index)
Definition contexts.h:512
static const int kPreviousOffset
Definition contexts.h:492
static constexpr int OffsetOfElementAt(int index)
void mov(Register rd, Register rj)
void SmiUntag(Register reg, SBit s=LeaveCC)
void Assert(Condition cond, AbortReason reason) NOOP_UNLESS_DEBUG_CODE
void SmiTag(Register reg, SBit s=LeaveCC)
void Ret(Condition cond=al)
int LeaveFrame(StackFrame::Type type)
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
void JumpIfByte(Condition cc, Register value, int32_t byte, Label *target, Label::Distance distance=Label::kFar)
void CallRuntime(Runtime::FunctionId function, int nargs)
void JumpIf(Condition cc, Register lhs, const Operand &rhs, Label *target, Label::Distance distance=Label::kFar)
void LoadFixedArrayElement(Register output, Register array, int32_t index)
static MemOperand RegisterFrameOperand(interpreter::Register interpreter_register)
void JumpIfNotRoot(Register value, RootIndex index, Label *target, Label ::Distance distance=Label::kFar)
void JumpIfPointer(Condition cc, Register value, MemOperand operand, Label *target, Label::Distance distance=Label::kFar)
void Move(Register output, Register source)
void MoveSmi(Register output, Register source)
void TestAndBranch(Register value, int mask, Condition cc, Label *target, Label::Distance distance=Label::kFar)
void LoadWord8Field(Register output, Register source, int offset)
void LoadWord16FieldZeroExtend(Register output, Register source, int offset)
void AddToInterruptBudgetAndJumpIfNotExceeded(int32_t weight, Label *skip_interrupt_label)
void LdaContextSlot(Register context, uint32_t index, uint32_t depth, CompressionMode compression_mode=CompressionMode::kDefault)
void LoadTaggedField(Register output, Register source, int offset)
void Jump(Label *target, Label::Distance distance=Label::kFar)
void Switch(Register reg, int case_value_base, Label **labels, int num_labels)
void StaModuleVariable(Register context, Register value, int cell_index, uint32_t depth)
void JumpIfObjectTypeFast(Condition cc, Register object, InstanceType instance_type, Label *target, Label::Distance distance=Label::kFar)
void MoveMaybeSmi(Register output, Register source)
void StoreTaggedFieldNoWriteBarrier(Register target, int offset, Register value)
void TryLoadOptimizedOsrCode(Register scratch_and_result, Register feedback_vector, FeedbackSlot slot, Label *on_result, Label::Distance distance)
void LoadTaggedSignedFieldAndUntag(Register output, Register source, int offset)
void JumpIfInstanceType(Condition cc, Register map, InstanceType instance_type, Label *target, Label::Distance distance=Label::kFar)
void JumpIfImmediate(Condition cc, Register left, int right, Label *target, Label::Distance distance=Label::kFar)
void JumpIfRoot(Register value, RootIndex index, Label *target, Label::Distance distance=Label::kFar)
void JumpIfSmi(Register value, Label *target, Label::Distance distance=Label::kFar)
void JumpIfNotSmi(Register value, Label *target, Label::Distance distance=Label::kFar)
void JumpIfObjectType(Condition cc, Register object, InstanceType instance_type, Register map, Label *target, Label::Distance distance=Label::kFar)
void LdaModuleVariable(Register context, int cell_index, uint32_t depth)
void StoreTaggedFieldWithWriteBarrier(Register target, int offset, Register value)
void LoadTaggedSignedField(Register output, Register source, int offset)
void RegisterFrameAddress(interpreter::Register interpreter_register, Register rscratch)
void Word32And(Register output, Register lhs, int rhs)
void StoreTaggedSignedField(Register target, int offset, Tagged< Smi > value)
void StaContextSlot(Register context, Register value, uint32_t index, uint32_t depth)
void JumpIfTagged(Condition cc, Register value, MemOperand operand, Label *target, Label::Distance distance=Label::kFar)
#define ASM_CODE_COMMENT_STRING(asm,...)
Definition assembler.h:618
#define ASM_CODE_COMMENT(asm)
Definition assembler.h:617
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
Label label
int32_t offset
LiftoffRegister reg
uint32_t const mask
RegListBase< RegisterT > registers
void PushSingle(MacroAssembler *masm, RootIndex source)
static constexpr Register kScratchRegisters[]
Operand FieldOperand(Register object, int offset)
constexpr Register kInterpreterAccumulatorRegister
constexpr int kSystemPointerSize
Definition globals.h:410
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister &reg1, const CPURegister &reg2, const CPURegister &reg3=NoReg, const CPURegister &reg4=NoReg, const CPURegister &reg5=NoReg, const CPURegister &reg6=NoReg, const CPURegister &reg7=NoReg, const CPURegister &reg8=NoReg)
Tagged< ClearedWeakValue > ClearedValue(PtrComprCageBase cage_base)
V8_EXPORT_PRIVATE FlagValues v8_flags
static const uint16_t * Align(const uint16_t *chars)
Definition api.cc:5641
uint32_t test
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define arraysize(array)
Definition macros.h:67
static int PushReverse(BaselineAssembler *masm, Arg arg, Args... args)
static int Push(BaselineAssembler *masm, Arg arg, Args... args)
static int PushReverse(BaselineAssembler *basm, interpreter::RegisterList list)
static int Push(BaselineAssembler *basm, interpreter::RegisterList list)
static int PushReverse(BaselineAssembler *basm, Arg arg)
static int Push(BaselineAssembler *basm, Arg arg)