v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
macro-assembler-ia32.cc
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_ARCH_IA32
6
7#include <stdint.h>
8
10#include "src/base/bits.h"
11#include "src/base/logging.h"
12#include "src/base/macros.h"
23#include "src/codegen/label.h"
27#include "src/codegen/reglist.h"
29#include "src/common/globals.h"
35#include "src/flags/flags.h"
37#include "src/handles/handles.h"
39#include "src/heap/factory.h"
43#include "src/objects/code.h"
48#include "src/objects/map.h"
49#include "src/objects/objects.h"
50#include "src/objects/oddball.h"
53#include "src/objects/smi.h"
54#include "src/roots/roots-inl.h"
55#include "src/roots/roots.h"
56#include "src/runtime/runtime.h"
57#include "src/utils/utils.h"
58
59// Satisfy cpplint check, but don't include platform-specific header. It is
60// included recursively via macro-assembler.h.
61#if 0
63#endif
64
65#define __ ACCESS_MASM(masm)
66
67namespace v8 {
68namespace internal {
69
70Operand StackArgumentsAccessor::GetArgumentOperand(int index) const {
71 DCHECK_GE(index, 0);
72 // arg[0] = esp + kPCOnStackSize;
73 // arg[i] = arg[0] + i * kSystemPointerSize;
74 return Operand(esp, kPCOnStackSize + index * kSystemPointerSize);
75}
76
77// -------------------------------------------------------------------------
78// MacroAssembler implementation.
79
81 ASM_CODE_COMMENT(this);
82 ExternalReference isolate_root = ExternalReference::isolate_root(isolate());
83 Move(kRootRegister, Immediate(isolate_root));
84}
85
88 return Operand(kRootRegister, RootRegisterOffsetForRootIndex(index));
89}
90
92 ASM_CODE_COMMENT(this);
95 return;
96 }
97
99 Handle<Object> object = isolate()->root_handle(index);
100 if (IsSmi(*object)) {
101 mov(destination, Immediate(Cast<Smi>(*object)));
102 return;
103 } else {
104 DCHECK(IsHeapObject(*object));
106 return;
107 }
108 }
109
110 ExternalReference isolate_root = ExternalReference::isolate_root(isolate());
112 Operand(isolate_root.address(), RelocInfo::EXTERNAL_REFERENCE));
114}
115
116void MacroAssembler::CompareRoot(Register with, Register scratch,
117 RootIndex index) {
118 ASM_CODE_COMMENT(this);
119 if (root_array_available()) {
120 CompareRoot(with, index);
121 } else {
122 ExternalReference isolate_root = ExternalReference::isolate_root(isolate());
123 lea(scratch,
124 Operand(isolate_root.address(), RelocInfo::EXTERNAL_REFERENCE));
125 cmp(with, Operand(scratch, RootRegisterOffsetForRootIndex(index)));
126 }
127}
128
129void MacroAssembler::CompareRoot(Register with, RootIndex index) {
130 ASM_CODE_COMMENT(this);
131 if (root_array_available()) {
132 cmp(with, RootAsOperand(index));
133 return;
134 }
135
137 Handle<Object> object = isolate()->root_handle(index);
138 if (IsHeapObject(*object)) {
139 cmp(with, Cast<HeapObject>(object));
140 } else {
141 cmp(with, Immediate(Cast<Smi>(*object)));
142 }
143}
144
146 ASM_CODE_COMMENT(this);
147 if (root_array_available()) {
149 push(RootAsOperand(index));
150 return;
151 }
152
153 // TODO(v8:6666): Add a scratch register or remove all uses.
155 Handle<Object> object = isolate()->root_handle(index);
156 if (IsHeapObject(*object)) {
157 Push(Cast<HeapObject>(object));
158 } else {
159 Push(Cast<Smi>(*object));
160 }
161}
162
163void MacroAssembler::CompareRange(Register value, unsigned lower_limit,
164 unsigned higher_limit, Register scratch) {
165 ASM_CODE_COMMENT(this);
166 DCHECK_LT(lower_limit, higher_limit);
167 if (lower_limit != 0) {
168 lea(scratch, Operand(value, 0u - lower_limit));
169 cmp(scratch, Immediate(higher_limit - lower_limit));
170 } else {
171 cmp(value, Immediate(higher_limit));
172 }
173}
174
175void MacroAssembler::JumpIfIsInRange(Register value, unsigned lower_limit,
176 unsigned higher_limit, Register scratch,
177 Label* on_in_range,
178 Label::Distance near_jump) {
179 CompareRange(value, lower_limit, higher_limit, scratch);
180 j(below_equal, on_in_range, near_jump);
181}
182
183void MacroAssembler::PushArray(Register array, Register size, Register scratch,
184 PushArrayOrder order) {
185 ASM_CODE_COMMENT(this);
186 DCHECK(!AreAliased(array, size, scratch));
187 Register counter = scratch;
188 Label loop, entry;
189 if (order == PushArrayOrder::kReverse) {
190 mov(counter, 0);
191 jmp(&entry);
192 bind(&loop);
193 Push(Operand(array, counter, times_system_pointer_size, 0));
194 inc(counter);
195 bind(&entry);
196 cmp(counter, size);
197 j(less, &loop, Label::kNear);
198 } else {
199 mov(counter, size);
200 jmp(&entry);
201 bind(&loop);
202 Push(Operand(array, counter, times_system_pointer_size, 0));
203 bind(&entry);
204 dec(counter);
206 }
207}
208
209Operand MacroAssembler::ExternalReferenceAsOperand(ExternalReference reference,
210 Register scratch) {
211 if (root_array_available()) {
212 if (reference.IsIsolateFieldId()) {
213 return Operand(kRootRegister, reference.offset_from_root_register());
214 }
215 if (options().enable_root_relative_access) {
216 intptr_t delta =
218 return Operand(kRootRegister, delta);
219 }
220 if (options().isolate_independent_code) {
221 if (IsAddressableThroughRootRegister(isolate(), reference)) {
222 // Some external references can be efficiently loaded as an offset from
223 // kRootRegister.
224 intptr_t offset =
226 return Operand(kRootRegister, offset);
227 } else {
228 // Otherwise, do a memory load from the external reference table.
229 mov(scratch, Operand(kRootRegister,
231 isolate(), reference)));
232 return Operand(scratch, 0);
233 }
234 }
235 }
236 Move(scratch, Immediate(reference));
237 return Operand(scratch, 0);
238}
239
240// TODO(v8:6666): If possible, refactor into a platform-independent function in
241// MacroAssembler.
242Operand MacroAssembler::HeapObjectAsOperand(Handle<HeapObject> object) {
244
246 RootIndex root_index;
247 if (isolate()->roots_table().IsRootHandle(object, &root_index)) {
248 return RootAsOperand(root_index);
249 } else if (isolate()->builtins()->IsBuiltinHandle(object, &builtin)) {
250 return Operand(kRootRegister, RootRegisterOffsetForBuiltin(builtin));
251 } else if (object.is_identical_to(code_object_) &&
254 } else {
255 // Objects in the constants table need an additional indirection, which
256 // cannot be represented as a single Operand.
257 UNREACHABLE();
258 }
259}
260
262 int constant_index) {
263 ASM_CODE_COMMENT(this);
264 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kBuiltinsConstantsTable));
265 LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
268}
269
271 intptr_t offset) {
272 ASM_CODE_COMMENT(this);
273 DCHECK(is_int32(offset));
275 if (offset == 0) {
277 } else {
278 lea(destination, Operand(kRootRegister, static_cast<int32_t>(offset)));
279 }
280}
281
283 ASM_CODE_COMMENT(this);
286}
287
288void MacroAssembler::StoreRootRelative(int32_t offset, Register value) {
289 ASM_CODE_COMMENT(this);
291 mov(Operand(kRootRegister, offset), value);
292}
293
295 ExternalReference source) {
296 if (root_array_available()) {
297 if (source.IsIsolateFieldId()) {
299 Operand(kRootRegister, source.offset_from_root_register()));
300 return;
301 }
302 if (options().isolate_independent_code) {
304 return;
305 }
306 }
307 // External references should not get created with IDs if
308 // `!root_array_available()`.
309 CHECK(!source.IsIsolateFieldId());
310 mov(destination, Immediate(source));
311}
312
314 Register exclusion) const {
315 int bytes = 0;
316 RegList saved_regs = kCallerSaved - exclusion;
317 bytes += kSystemPointerSize * saved_regs.Count();
318
319 if (fp_mode == SaveFPRegsMode::kSave) {
320 // Count all allocatable XMM registers.
322 }
323
324 return bytes;
325}
326
328 Register exclusion) {
329 ASM_CODE_COMMENT(this);
330 // We don't allow a GC in a write barrier slow path so there is no need to
331 // store the registers in any particular way, but we do have to store and
332 // restore them.
333 int bytes = 0;
334 RegList saved_regs = kCallerSaved - exclusion;
335 for (Register reg : saved_regs) {
336 push(reg);
337 bytes += kSystemPointerSize;
338 }
339
340 if (fp_mode == SaveFPRegsMode::kSave) {
341 // Save all allocatable XMM registers.
343 const int delta = kStackSavedSavedFPSize * i;
344 AllocateStackSpace(delta);
346#if V8_ENABLE_WEBASSEMBLY
347 Movdqu(Operand(esp, --i * kStackSavedSavedFPSize), reg);
348#else
349 Movsd(Operand(esp, --i * kStackSavedSavedFPSize), reg);
350#endif // V8_ENABLE_WEBASSEMBLY
351 }
352 bytes += delta;
353 }
354
355 return bytes;
356}
357
358int MacroAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion) {
359 ASM_CODE_COMMENT(this);
360 int bytes = 0;
361 if (fp_mode == SaveFPRegsMode::kSave) {
362 // Restore all allocatable XMM registers.
364 const int delta = kStackSavedSavedFPSize * i;
366#if V8_ENABLE_WEBASSEMBLY
367 Movdqu(reg, Operand(esp, --i * kStackSavedSavedFPSize));
368#else
369 Movsd(reg, Operand(esp, --i * kStackSavedSavedFPSize));
370#endif // V8_ENABLE_WEBASSEMBLY
371 }
372 add(esp, Immediate(delta));
373 bytes += delta;
374 }
375
376 RegList saved_regs = kCallerSaved - exclusion;
377 for (Register reg : base::Reversed(saved_regs)) {
378 pop(reg);
379 bytes += kSystemPointerSize;
380 }
381
382 return bytes;
383}
384
385void MacroAssembler::RecordWriteField(Register object, int offset,
386 Register value, Register slot_address,
387 SaveFPRegsMode save_fp,
388 SmiCheck smi_check) {
389 ASM_CODE_COMMENT(this);
390 // First, check if a write barrier is even needed. The tests below
391 // catch stores of Smis.
392 Label done;
393
394 // Skip barrier if writing a smi.
395 if (smi_check == SmiCheck::kInline) {
396 JumpIfSmi(value, &done);
397 }
398
399 // Although the object register is tagged, the offset is relative to the start
400 // of the object, so so offset must be a multiple of kTaggedSize.
402
403 lea(slot_address, FieldOperand(object, offset));
404 if (v8_flags.slow_debug_code) {
405 Label ok;
406 test_b(slot_address, Immediate(kTaggedSize - 1));
407 j(zero, &ok, Label::kNear);
408 int3();
409 bind(&ok);
410 }
411
412 RecordWrite(object, slot_address, value, save_fp, SmiCheck::kOmit);
413
414 bind(&done);
415
416 // Clobber clobbered input registers when running with the debug-code flag
417 // turned on to provoke errors.
418 if (v8_flags.slow_debug_code) {
419 mov(value, Immediate(base::bit_cast<int32_t>(kZapValue)));
420 mov(slot_address, Immediate(base::bit_cast<int32_t>(kZapValue)));
421 }
422}
423
425 for (Register reg : registers) {
426 push(reg);
427 }
428}
429
431 for (Register reg : base::Reversed(registers)) {
432 pop(reg);
433 }
434}
435
436void MacroAssembler::CallEphemeronKeyBarrier(Register object,
437 Register slot_address,
438 SaveFPRegsMode fp_mode) {
439 ASM_CODE_COMMENT(this);
440 DCHECK(!AreAliased(object, slot_address));
444
446 Register slot_address_parameter =
448
449 push(object);
450 push(slot_address);
451 pop(slot_address_parameter);
452 pop(object_parameter);
453
456}
457
459 Register slot_address,
460 SaveFPRegsMode fp_mode,
461 StubCallMode mode) {
462 ASM_CODE_COMMENT(this);
463 DCHECK(!AreAliased(object, slot_address));
467
469 Register slot_address_parameter =
471
472 push(object);
473 push(slot_address);
474 pop(slot_address_parameter);
475 pop(object_parameter);
476
477 CallRecordWriteStub(object_parameter, slot_address_parameter, fp_mode, mode);
478
480}
481
482void MacroAssembler::CallRecordWriteStub(Register object, Register slot_address,
483 SaveFPRegsMode fp_mode,
484 StubCallMode mode) {
485 ASM_CODE_COMMENT(this);
486 // Use CallRecordWriteStubSaveRegisters if the object and slot registers
487 // need to be caller saved.
490#if V8_ENABLE_WEBASSEMBLY
491 if (mode == StubCallMode::kCallWasmRuntimeStub) {
492 // Use {wasm_call} for direct Wasm call within a module.
493 auto wasm_target =
494 static_cast<Address>(wasm::WasmCode::GetRecordWriteBuiltin(fp_mode));
496#else
497 if (false) {
498#endif
499 } else {
501 }
502}
503
504void MacroAssembler::RecordWrite(Register object, Register slot_address,
505 Register value, SaveFPRegsMode fp_mode,
506 SmiCheck smi_check) {
507 ASM_CODE_COMMENT(this);
508 DCHECK(!AreAliased(object, value, slot_address));
509 AssertNotSmi(object);
510
511 if (v8_flags.disable_write_barriers) {
512 return;
513 }
514
515 if (v8_flags.slow_debug_code) {
516 ASM_CODE_COMMENT_STRING(this, "Verify slot_address");
517 Label ok;
518 cmp(value, Operand(slot_address, 0));
519 j(equal, &ok, Label::kNear);
520 int3();
521 bind(&ok);
522 }
523
524 // First, check if a write barrier is even needed. The tests below
525 // catch stores of Smis and stores into young gen.
526 Label done;
527
528 if (smi_check == SmiCheck::kInline) {
529 // Skip barrier if writing a smi.
530 JumpIfSmi(value, &done, Label::kNear);
531 }
532
533 CheckPageFlag(value,
534 value, // Used as scratch.
537 CheckPageFlag(object,
538 value, // Used as scratch.
541 RecordComment("CheckPageFlag]");
542
543 CallRecordWriteStub(object, slot_address, fp_mode);
544
545 bind(&done);
546
547 // Clobber clobbered registers when running with the debug-code flag
548 // turned on to provoke errors.
549 if (v8_flags.slow_debug_code) {
550 ASM_CODE_COMMENT_STRING(this, "Clobber slot_address and value");
551 mov(slot_address, Immediate(base::bit_cast<int32_t>(kZapValue)));
552 mov(value, Immediate(base::bit_cast<int32_t>(kZapValue)));
553 }
554}
555
556void MacroAssembler::Cvtsi2ss(XMMRegister dst, Operand src) {
557 xorps(dst, dst);
558 cvtsi2ss(dst, src);
559}
560
561void MacroAssembler::Cvtsi2sd(XMMRegister dst, Operand src) {
562 xorpd(dst, dst);
563 cvtsi2sd(dst, src);
564}
565
566void MacroAssembler::Cvtui2ss(XMMRegister dst, Operand src, Register tmp) {
567 Label done;
568 Register src_reg = src.is_reg_only() ? src.reg() : tmp;
569 if (src_reg == tmp) mov(tmp, src);
570 cvtsi2ss(dst, src_reg);
571 test(src_reg, src_reg);
572 j(positive, &done, Label::kNear);
573
574 // Compute {src/2 | (src&1)} (retain the LSB to avoid rounding errors).
575 if (src_reg != tmp) mov(tmp, src_reg);
576 shr(tmp, 1);
577 // The LSB is shifted into CF. If it is set, set the LSB in {tmp}.
578 Label msb_not_set;
579 j(not_carry, &msb_not_set, Label::kNear);
580 or_(tmp, Immediate(1));
581 bind(&msb_not_set);
582 cvtsi2ss(dst, tmp);
583 addss(dst, dst);
584 bind(&done);
585}
586
587void MacroAssembler::Cvttss2ui(Register dst, Operand src, XMMRegister tmp) {
588 Label done;
589 cvttss2si(dst, src);
590 test(dst, dst);
591 j(positive, &done);
592 Move(tmp, static_cast<float>(INT32_MIN));
593 addss(tmp, src);
594 cvttss2si(dst, tmp);
595 or_(dst, Immediate(0x80000000));
596 bind(&done);
597}
598
599void MacroAssembler::Cvtui2sd(XMMRegister dst, Operand src, Register scratch) {
600 Label done;
601 cmp(src, Immediate(0));
602 ExternalReference uint32_bias = ExternalReference::address_of_uint32_bias();
603 Cvtsi2sd(dst, src);
604 j(not_sign, &done, Label::kNear);
605 addsd(dst, ExternalReferenceAsOperand(uint32_bias, scratch));
606 bind(&done);
607}
608
609void MacroAssembler::Cvttsd2ui(Register dst, Operand src, XMMRegister tmp) {
610 Move(tmp, -2147483648.0);
611 addsd(tmp, src);
612 cvttsd2si(dst, tmp);
613 add(dst, Immediate(0x80000000));
614}
615
616void MacroAssembler::ShlPair(Register high, Register low, uint8_t shift) {
617 DCHECK_GE(63, shift);
618 if (shift >= 32) {
619 mov(high, low);
620 if (shift != 32) shl(high, shift - 32);
621 xor_(low, low);
622 } else {
623 shld(high, low, shift);
624 shl(low, shift);
625 }
626}
627
628void MacroAssembler::ShlPair_cl(Register high, Register low) {
629 ASM_CODE_COMMENT(this);
630 shld_cl(high, low);
631 shl_cl(low);
632 Label done;
633 test(ecx, Immediate(0x20));
634 j(equal, &done, Label::kNear);
635 mov(high, low);
636 xor_(low, low);
637 bind(&done);
638}
639
640void MacroAssembler::ShrPair(Register high, Register low, uint8_t shift) {
641 DCHECK_GE(63, shift);
642 if (shift >= 32) {
643 mov(low, high);
644 if (shift != 32) shr(low, shift - 32);
645 xor_(high, high);
646 } else {
647 shrd(low, high, shift);
648 shr(high, shift);
649 }
650}
651
652void MacroAssembler::ShrPair_cl(Register high, Register low) {
653 ASM_CODE_COMMENT(this);
654 shrd_cl(low, high);
655 shr_cl(high);
656 Label done;
657 test(ecx, Immediate(0x20));
658 j(equal, &done, Label::kNear);
659 mov(low, high);
660 xor_(high, high);
661 bind(&done);
662}
663
664void MacroAssembler::SarPair(Register high, Register low, uint8_t shift) {
665 ASM_CODE_COMMENT(this);
666 DCHECK_GE(63, shift);
667 if (shift >= 32) {
668 mov(low, high);
669 if (shift != 32) sar(low, shift - 32);
670 sar(high, 31);
671 } else {
672 shrd(low, high, shift);
673 sar(high, shift);
674 }
675}
676
677void MacroAssembler::SarPair_cl(Register high, Register low) {
678 ASM_CODE_COMMENT(this);
679 shrd_cl(low, high);
680 sar_cl(high);
681 Label done;
682 test(ecx, Immediate(0x20));
683 j(equal, &done, Label::kNear);
684 mov(low, high);
685 sar(high, 31);
686 bind(&done);
687}
688
689void MacroAssembler::LoadMap(Register destination, Register object) {
691}
692
693void MacroAssembler::LoadFeedbackVector(Register dst, Register closure,
694 Register scratch, Label* fbv_undef,
695 Label::Distance distance) {
696 Label done;
697
698 // Load the feedback vector from the closure.
699 mov(dst, FieldOperand(closure, JSFunction::kFeedbackCellOffset));
700 mov(dst, FieldOperand(dst, FeedbackCell::kValueOffset));
701
702 // Check if feedback vector is valid.
704 CmpInstanceType(scratch, FEEDBACK_VECTOR_TYPE);
705 j(equal, &done, Label::kNear);
706
707 // Not valid, load undefined.
708 LoadRoot(dst, RootIndex::kUndefinedValue);
709 jmp(fbv_undef, distance);
710
711 bind(&done);
712}
713
714void MacroAssembler::CmpObjectType(Register heap_object, InstanceType type,
715 Register map) {
716 ASM_CODE_COMMENT(this);
717 LoadMap(map, heap_object);
718 CmpInstanceType(map, type);
719}
720
721void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
722 cmpw(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
723}
724
726 Register instance_type_out,
727 Register scratch,
728 InstanceType lower_limit,
729 InstanceType higher_limit) {
730 ASM_CODE_COMMENT(this);
731 DCHECK_LT(lower_limit, higher_limit);
732 movzx_w(instance_type_out, FieldOperand(map, Map::kInstanceTypeOffset));
733 CompareRange(instance_type_out, lower_limit, higher_limit, scratch);
734}
735
737 test(FieldOperand(code, Code::kFlagsOffset),
738 Immediate(1 << Code::kMarkedForDeoptimizationBit));
739}
740
741Immediate MacroAssembler::ClearedValue() const {
742 return Immediate(static_cast<int32_t>(i::ClearedValue(isolate()).ptr()));
743}
744
745namespace {
746
747#ifndef V8_ENABLE_LEAPTIERING
748void TailCallOptimizedCodeSlot(MacroAssembler* masm,
749 Register optimized_code_entry) {
750 // ----------- S t a t e -------------
751 // -- eax : actual argument count
752 // -- edx : new target (preserved for callee if needed, and caller)
753 // -- edi : target function (preserved for callee if needed, and caller)
754 // -----------------------------------
755 ASM_CODE_COMMENT(masm);
756 DCHECK(!AreAliased(edx, edi, optimized_code_entry));
757
758 Register closure = edi;
759 __ Push(eax);
760 __ Push(edx);
761
762 Label heal_optimized_code_slot;
763
764 // If the optimized code is cleared, go to runtime to update the optimization
765 // marker field.
766 __ LoadWeakValue(optimized_code_entry, &heal_optimized_code_slot);
767
768 // The entry references a CodeWrapper object. Unwrap it now.
769 __ mov(optimized_code_entry,
770 FieldOperand(optimized_code_entry, CodeWrapper::kCodeOffset));
771
772 // Check if the optimized code is marked for deopt. If it is, bailout to a
773 // given label.
774 __ TestCodeIsMarkedForDeoptimization(optimized_code_entry);
775 __ j(not_zero, &heal_optimized_code_slot);
776
777 // Optimized code is good, get it into the closure and link the closure
778 // into the optimized functions list, then tail call the optimized code.
779 __ Push(optimized_code_entry);
780 __ ReplaceClosureCodeWithOptimizedCode(optimized_code_entry, closure, edx,
781 ecx);
782 static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
783 __ Pop(optimized_code_entry);
784 __ LoadCodeInstructionStart(ecx, optimized_code_entry);
785 __ Pop(edx);
786 __ Pop(eax);
787 __ jmp(ecx);
788
789 // Optimized code slot contains deoptimized code or code is cleared and
790 // optimized code marker isn't updated. Evict the code, update the marker
791 // and re-enter the closure's code.
792 __ bind(&heal_optimized_code_slot);
793 __ Pop(edx);
794 __ Pop(eax);
795 __ GenerateTailCallToReturnedCode(Runtime::kHealOptimizedCodeSlot);
796}
797#endif // V8_ENABLE_LEAPTIERING
798
799} // namespace
800
801#ifdef V8_ENABLE_DEBUG_CODE
802void MacroAssembler::AssertFeedbackCell(Register object, Register scratch) {
803 if (v8_flags.debug_code) {
804 CmpObjectType(object, FEEDBACK_CELL_TYPE, scratch);
805 Assert(equal, AbortReason::kExpectedFeedbackCell);
806 }
807}
808void MacroAssembler::AssertFeedbackVector(Register object, Register scratch) {
809 if (v8_flags.debug_code) {
810 CmpObjectType(object, FEEDBACK_VECTOR_TYPE, scratch);
811 Assert(equal, AbortReason::kExpectedFeedbackVector);
812 }
813}
814#endif // V8_ENABLE_DEBUG_CODE
815
817 Register optimized_code, Register closure, Register value,
818 Register slot_address) {
819 ASM_CODE_COMMENT(this);
820#ifdef V8_ENABLE_LEAPTIERING
821 UNREACHABLE();
822#else
823 // Store the optimized code in the closure.
824 mov(FieldOperand(closure, JSFunction::kCodeOffset), optimized_code);
825 mov(value, optimized_code); // Write barrier clobbers slot_address below.
826 RecordWriteField(closure, JSFunction::kCodeOffset, value, slot_address,
828#endif // V8_ENABLE_LEAPTIERING
829}
830
832 Runtime::FunctionId function_id) {
833 // ----------- S t a t e -------------
834 // -- eax : actual argument count
835 // -- edx : new target (preserved for callee)
836 // -- edi : target function (preserved for callee)
837 // -----------------------------------
838 ASM_CODE_COMMENT(this);
839 {
840 FrameScope scope(this, StackFrame::INTERNAL);
841 // Push a copy of the target function, the new target and the actual
842 // argument count.
847 // Function is also the parameter to the runtime call.
849
850 CallRuntime(function_id, 1);
851 mov(ecx, eax);
852
853 // Restore target function, new target and actual argument count.
858 }
859
860 static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
861 JumpCodeObject(ecx);
862}
863
864#ifndef V8_ENABLE_LEAPTIERING
865
866// Read off the flags in the feedback vector and check if there
867// is optimized code or a tiering state that needs to be processed.
868// Registers flags and feedback_vector must be aliased.
870 Register flags, XMMRegister saved_feedback_vector,
871 CodeKind current_code_kind, Label* flags_need_processing) {
872 ASM_CODE_COMMENT(this);
873 DCHECK(CodeKindCanTierUp(current_code_kind));
874 Register feedback_vector = flags;
875
876 // Store feedback_vector. We may need it if we need to load the optimize code
877 // slot entry.
878 movd(saved_feedback_vector, feedback_vector);
879 mov_w(flags, FieldOperand(feedback_vector, FeedbackVector::kFlagsOffset));
880
881 // Check if there is optimized code or a tiering state that needes to be
882 // processed.
886 if (current_code_kind != CodeKind::MAGLEV) {
888 }
889 test_w(flags, Immediate(kFlagsMask));
890 j(not_zero, flags_need_processing);
891}
892
894 Register flags, XMMRegister saved_feedback_vector) {
895 ASM_CODE_COMMENT(this);
896 Label maybe_has_optimized_code, maybe_needs_logging;
897 // Check if optimized code is available.
899 j(zero, &maybe_needs_logging);
900
901 GenerateTailCallToReturnedCode(Runtime::kCompileOptimized);
902
903 bind(&maybe_needs_logging);
904 test(flags, Immediate(FeedbackVector::LogNextExecutionBit::kMask));
905 j(zero, &maybe_has_optimized_code);
906 GenerateTailCallToReturnedCode(Runtime::kFunctionLogNextExecution);
907
908 bind(&maybe_has_optimized_code);
909 Register optimized_code_entry = flags;
910 Register feedback_vector = flags;
911 movd(feedback_vector, saved_feedback_vector); // Restore feedback vector.
912 mov(optimized_code_entry,
913 FieldOperand(feedback_vector, FeedbackVector::kMaybeOptimizedCodeOffset));
914 TailCallOptimizedCodeSlot(this, optimized_code_entry);
915}
916
917#endif // !V8_ENABLE_LEAPTIERING
918
919#ifdef V8_ENABLE_DEBUG_CODE
920void MacroAssembler::AssertSmi(Register object) {
921 if (v8_flags.debug_code) {
922 ASM_CODE_COMMENT(this);
923 test(object, Immediate(kSmiTagMask));
924 Check(equal, AbortReason::kOperandIsNotASmi);
925 }
926}
927
928void MacroAssembler::AssertSmi(Operand object) {
929 if (!v8_flags.debug_code) return;
930 ASM_CODE_COMMENT(this);
931 test(object, Immediate(kSmiTagMask));
932 Check(equal, AbortReason::kOperandIsNotASmi);
933}
934
935void MacroAssembler::AssertConstructor(Register object) {
936 if (v8_flags.debug_code) {
937 ASM_CODE_COMMENT(this);
938 test(object, Immediate(kSmiTagMask));
939 Check(not_equal, AbortReason::kOperandIsASmiAndNotAConstructor);
940 Push(object);
941 LoadMap(object, object);
942 test_b(FieldOperand(object, Map::kBitFieldOffset),
943 Immediate(Map::Bits1::IsConstructorBit::kMask));
944 Pop(object);
945 Check(not_zero, AbortReason::kOperandIsNotAConstructor);
946 }
947}
948
949void MacroAssembler::AssertFunction(Register object, Register scratch) {
950 if (v8_flags.debug_code) {
951 ASM_CODE_COMMENT(this);
952 test(object, Immediate(kSmiTagMask));
953 Check(not_equal, AbortReason::kOperandIsASmiAndNotAFunction);
954 Push(object);
955 LoadMap(object, object);
956 CmpInstanceTypeRange(object, scratch, scratch, FIRST_JS_FUNCTION_TYPE,
957 LAST_JS_FUNCTION_TYPE);
958 Pop(object);
959 Check(below_equal, AbortReason::kOperandIsNotAFunction);
960 }
961}
962
963void MacroAssembler::AssertCallableFunction(Register object, Register scratch) {
964 if (v8_flags.debug_code) {
965 ASM_CODE_COMMENT(this);
966 test(object, Immediate(kSmiTagMask));
967 Check(not_equal, AbortReason::kOperandIsASmiAndNotAFunction);
968 Push(object);
969 LoadMap(object, object);
970 CmpInstanceTypeRange(object, scratch, scratch,
973 Pop(object);
974 Check(below_equal, AbortReason::kOperandIsNotACallableFunction);
975 }
976}
977
978void MacroAssembler::AssertBoundFunction(Register object) {
979 if (v8_flags.debug_code) {
980 ASM_CODE_COMMENT(this);
981 test(object, Immediate(kSmiTagMask));
982 Check(not_equal, AbortReason::kOperandIsASmiAndNotABoundFunction);
983 Push(object);
984 CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
985 Pop(object);
986 Check(equal, AbortReason::kOperandIsNotABoundFunction);
987 }
988}
989
990void MacroAssembler::AssertGeneratorObject(Register object) {
991 if (!v8_flags.debug_code) return;
992 ASM_CODE_COMMENT(this);
993
994 test(object, Immediate(kSmiTagMask));
995 Check(not_equal, AbortReason::kOperandIsASmiAndNotAGeneratorObject);
996
997 {
998 Push(object);
999 Register map = object;
1000
1001 LoadMap(map, object);
1002
1003 // Check if JSGeneratorObject
1004 CmpInstanceTypeRange(map, map, map, FIRST_JS_GENERATOR_OBJECT_TYPE,
1005 LAST_JS_GENERATOR_OBJECT_TYPE);
1006 Pop(object);
1007 }
1008
1009 Check(below_equal, AbortReason::kOperandIsNotAGeneratorObject);
1010}
1011
1013 Register scratch) {
1014 if (v8_flags.debug_code) {
1015 ASM_CODE_COMMENT(this);
1016 Label done_checking;
1017 AssertNotSmi(object);
1018 CompareRoot(object, scratch, RootIndex::kUndefinedValue);
1019 j(equal, &done_checking);
1020 LoadRoot(scratch, RootIndex::kAllocationSiteWithWeakNextMap);
1021 cmp(FieldOperand(object, 0), scratch);
1022 Assert(equal, AbortReason::kExpectedUndefinedOrCell);
1023 bind(&done_checking);
1024 }
1025}
1026
1027void MacroAssembler::AssertNotSmi(Register object) {
1028 if (v8_flags.debug_code) {
1029 ASM_CODE_COMMENT(this);
1030 test(object, Immediate(kSmiTagMask));
1031 Check(not_equal, AbortReason::kOperandIsASmi);
1032 }
1033}
1034
1035void MacroAssembler::AssertJSAny(Register object, Register map_tmp,
1036 AbortReason abort_reason) {
1037 if (!v8_flags.debug_code) return;
1038
1039 ASM_CODE_COMMENT(this);
1040 DCHECK(!AreAliased(object, map_tmp));
1041 Label ok;
1042
1043 JumpIfSmi(object, &ok, Label::kNear);
1044
1045 mov(map_tmp, FieldOperand(object, HeapObject::kMapOffset));
1046
1047 CmpInstanceType(map_tmp, LAST_NAME_TYPE);
1048 j(below_equal, &ok, Label::kNear);
1049
1050 CmpInstanceType(map_tmp, FIRST_JS_RECEIVER_TYPE);
1051 j(above_equal, &ok, Label::kNear);
1052
1053 CompareRoot(map_tmp, RootIndex::kHeapNumberMap);
1054 j(equal, &ok, Label::kNear);
1055
1056 CompareRoot(map_tmp, RootIndex::kBigIntMap);
1057 j(equal, &ok, Label::kNear);
1058
1059 CompareRoot(object, RootIndex::kUndefinedValue);
1060 j(equal, &ok, Label::kNear);
1061
1062 CompareRoot(object, RootIndex::kTrueValue);
1063 j(equal, &ok, Label::kNear);
1064
1065 CompareRoot(object, RootIndex::kFalseValue);
1066 j(equal, &ok, Label::kNear);
1067
1068 CompareRoot(object, RootIndex::kNullValue);
1069 j(equal, &ok, Label::kNear);
1070
1071 Abort(abort_reason);
1072
1073 bind(&ok);
1074}
1075
1077 if (v8_flags.debug_code) Check(cc, reason);
1078}
1079
1081 if (v8_flags.debug_code) Abort(reason);
1082}
1083#endif // V8_ENABLE_DEBUG_CODE
1084
1086 ASM_CODE_COMMENT(this);
1087 push(ebp); // Caller's frame pointer.
1088 mov(ebp, esp);
1089 push(Immediate(StackFrame::TypeToMarker(type)));
1090}
1091
1093 ASM_CODE_COMMENT(this);
1094 push(ebp); // Caller's frame pointer.
1095 mov(ebp, esp);
1096 push(kContextRegister); // Callee's context.
1097 push(kJSFunctionRegister); // Callee's JS function.
1098 push(kJavaScriptCallArgCountRegister); // Actual argument count.
1099}
1100
1101void MacroAssembler::DropArguments(Register count) {
1102 lea(esp, Operand(esp, count, times_system_pointer_size, 0));
1103}
1104
1105void MacroAssembler::DropArguments(Register count, Register scratch) {
1106 DCHECK(!AreAliased(count, scratch));
1107 PopReturnAddressTo(scratch);
1108 DropArguments(count);
1109 PushReturnAddressFrom(scratch);
1110}
1111
1113 Register receiver,
1114 Register scratch) {
1115 DCHECK(!AreAliased(argc, receiver, scratch));
1116 PopReturnAddressTo(scratch);
1117 DropArguments(argc);
1118 Push(receiver);
1119 PushReturnAddressFrom(scratch);
1120}
1121
1123 Operand receiver,
1124 Register scratch) {
1125 DCHECK(!AreAliased(argc, scratch));
1126 DCHECK(!receiver.is_reg(scratch));
1127 PopReturnAddressTo(scratch);
1128 DropArguments(argc);
1129 Push(receiver);
1130 PushReturnAddressFrom(scratch);
1131}
1132
1134 ASM_CODE_COMMENT(this);
1135 push(ebp);
1136 mov(ebp, esp);
1137 if (!StackFrame::IsJavaScript(type)) {
1138 Push(Immediate(StackFrame::TypeToMarker(type)));
1139 }
1140#if V8_ENABLE_WEBASSEMBLY
1141 if (type == StackFrame::WASM) Push(kWasmImplicitArgRegister);
1142#endif // V8_ENABLE_WEBASSEMBLY
1143}
1144
1146 ASM_CODE_COMMENT(this);
1147 if (v8_flags.debug_code && !StackFrame::IsJavaScript(type)) {
1149 Immediate(StackFrame::TypeToMarker(type)));
1150 Check(equal, AbortReason::kStackFrameTypesMustMatch);
1151 }
1152 leave();
1153}
1154
1155#ifdef V8_OS_WIN
1156void MacroAssembler::AllocateStackSpace(Register bytes_scratch) {
1157 ASM_CODE_COMMENT(this);
1158 // In windows, we cannot increment the stack size by more than one page
1159 // (minimum page size is 4KB) without accessing at least one byte on the
1160 // page. Check this:
1161 // https://msdn.microsoft.com/en-us/library/aa227153(v=vs.60).aspx.
1162 Label check_offset;
1163 Label touch_next_page;
1164 jmp(&check_offset);
1165 bind(&touch_next_page);
1166 sub(esp, Immediate(kStackPageSize));
1167 // Just to touch the page, before we increment further.
1168 mov(Operand(esp, 0), Immediate(0));
1169 sub(bytes_scratch, Immediate(kStackPageSize));
1170
1172 cmp(bytes_scratch, kStackPageSize);
1173 j(greater_equal, &touch_next_page);
1174
1175 sub(esp, bytes_scratch);
1176}
1177
1178void MacroAssembler::AllocateStackSpace(int bytes) {
1179 ASM_CODE_COMMENT(this);
1180 DCHECK_GE(bytes, 0);
1181 while (bytes >= kStackPageSize) {
1182 sub(esp, Immediate(kStackPageSize));
1183 mov(Operand(esp, 0), Immediate(0));
1184 bytes -= kStackPageSize;
1185 }
1186 if (bytes == 0) return;
1187 sub(esp, Immediate(bytes));
1188}
1189#endif
1190
1191void MacroAssembler::EnterExitFrame(int extra_slots,
1192 StackFrame::Type frame_type,
1193 Register c_function) {
1194 ASM_CODE_COMMENT(this);
1195 DCHECK(frame_type == StackFrame::EXIT ||
1196 frame_type == StackFrame::BUILTIN_EXIT ||
1197 frame_type == StackFrame::API_ACCESSOR_EXIT ||
1198 frame_type == StackFrame::API_CALLBACK_EXIT);
1199
1200 // Set up the frame structure on the stack.
1204 push(ebp);
1205 mov(ebp, esp);
1206
1207 push(Immediate(StackFrame::TypeToMarker(frame_type)));
1209 push(Immediate(0)); // Saved entry sp, patched below.
1210
1211 // Save the frame pointer and the context in top.
1212 DCHECK(!AreAliased(ebp, kContextRegister, c_function));
1213 using ER = ExternalReference;
1214 ER r0 = ER::Create(IsolateAddressId::kCEntryFPAddress, isolate());
1216 ER r1 = ER::Create(IsolateAddressId::kContextAddress, isolate());
1218 static_assert(edx == kRuntimeCallFunctionRegister);
1219 ER r2 = ER::Create(IsolateAddressId::kCFunctionAddress, isolate());
1220 mov(ExternalReferenceAsOperand(r2, no_reg), c_function);
1221
1223
1224 // Get the required frame alignment for the OS.
1225 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
1226 if (kFrameAlignment > 0) {
1227 DCHECK(base::bits::IsPowerOfTwo(kFrameAlignment));
1228 and_(esp, -kFrameAlignment);
1229 }
1230
1231 // Patch the saved entry sp.
1232 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
1233}
1234
1235void MacroAssembler::LeaveExitFrame(Register scratch) {
1236 ASM_CODE_COMMENT(this);
1237
1238 leave();
1239
1240 // Clear the top frame.
1241 ExternalReference c_entry_fp_address =
1242 ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate());
1243 mov(ExternalReferenceAsOperand(c_entry_fp_address, scratch), Immediate(0));
1244
1245 // Restore the current context from top and clear it in debug mode.
1246 ExternalReference context_address =
1247 ExternalReference::Create(IsolateAddressId::kContextAddress, isolate());
1248 mov(esi, ExternalReferenceAsOperand(context_address, scratch));
1249
1250#ifdef DEBUG
1251 push(eax);
1252 mov(ExternalReferenceAsOperand(context_address, eax),
1253 Immediate(Context::kInvalidContext));
1254 pop(eax);
1255#endif
1256}
1257
1258void MacroAssembler::PushStackHandler(Register scratch) {
1259 ASM_CODE_COMMENT(this);
1260 // Adjust this code if not the case.
1262 static_assert(StackHandlerConstants::kNextOffset == 0);
1263
1264 push(Immediate(0)); // Padding.
1265
1266 // Link the current handler as the next handler.
1267 ExternalReference handler_address =
1268 ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
1269 push(ExternalReferenceAsOperand(handler_address, scratch));
1270
1271 // Set this new handler as the current one.
1272 mov(ExternalReferenceAsOperand(handler_address, scratch), esp);
1273}
1274
1275void MacroAssembler::PopStackHandler(Register scratch) {
1276 ASM_CODE_COMMENT(this);
1277 static_assert(StackHandlerConstants::kNextOffset == 0);
1278 ExternalReference handler_address =
1279 ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
1280 pop(ExternalReferenceAsOperand(handler_address, scratch));
1282}
1283
1284void MacroAssembler::CallRuntime(const Runtime::Function* f,
1285 int num_arguments) {
1286 ASM_CODE_COMMENT(this);
1287 // If the expected number of arguments of the runtime function is
1288 // constant, we check that the actual number of arguments match the
1289 // expectation.
1290 CHECK(f->nargs < 0 || f->nargs == num_arguments);
1291
1292 // TODO(1236192): Most runtime routines don't need the number of
1293 // arguments passed in because it is constant. At some point we
1294 // should remove this need and make the runtime routine entry code
1295 // smarter.
1296 Move(kRuntimeCallArgCountRegister, Immediate(num_arguments));
1298 bool switch_to_central_stack = options().is_wasm;
1299 CallBuiltin(Builtins::RuntimeCEntry(f->result_size, switch_to_central_stack));
1300}
1301
1303 // ----------- S t a t e -------------
1304 // -- esp[0] : return address
1305 // -- esp[8] : argument num_arguments - 1
1306 // ...
1307 // -- esp[8 * num_arguments] : argument 0 (receiver)
1308 //
1309 // For runtime functions with variable arguments:
1310 // -- eax : number of arguments
1311 // -----------------------------------
1312 ASM_CODE_COMMENT(this);
1313 const Runtime::Function* function = Runtime::FunctionForId(fid);
1314 DCHECK_EQ(1, function->result_size);
1315 if (function->nargs >= 0) {
1316 // TODO(1236192): Most runtime routines don't need the number of
1317 // arguments passed in because it is constant. At some point we
1318 // should remove this need and make the runtime routine entry code
1319 // smarter.
1320 Move(kRuntimeCallArgCountRegister, Immediate(function->nargs));
1321 }
1323}
1324
1325void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
1326 bool builtin_exit_frame) {
1327 ASM_CODE_COMMENT(this);
1328 // Set the entry point and jump to the C entry runtime stub.
1330 TailCallBuiltin(Builtins::CEntry(1, ArgvMode::kStack, builtin_exit_frame));
1331}
1332
1337 : IsolateData::jslimit_offset();
1338
1339 CHECK(is_int32(offset));
1340 return Operand(kRootRegister, static_cast<int32_t>(offset));
1341}
1342
1344 ASM_CODE_COMMENT(this);
1346}
1347
1348void MacroAssembler::StackOverflowCheck(Register num_args, Register scratch,
1349 Label* stack_overflow,
1350 bool include_receiver) {
1351 ASM_CODE_COMMENT(this);
1352 DCHECK_NE(num_args, scratch);
1353 // Check the stack for overflow. We are not trying to catch
1354 // interruptions (e.g. debug break and preemption) here, so the "real stack
1355 // limit" is checked.
1356 ExternalReference real_stack_limit =
1357 ExternalReference::address_of_real_jslimit(isolate());
1358 // Compute the space that is left as a negative number in scratch. If
1359 // we already overflowed, this will be a positive number.
1360 mov(scratch, ExternalReferenceAsOperand(real_stack_limit, scratch));
1361 sub(scratch, esp);
1362 // TODO(victorgomes): Remove {include_receiver} and always require one extra
1363 // word of the stack space.
1364 lea(scratch, Operand(scratch, num_args, times_system_pointer_size, 0));
1365 if (include_receiver) {
1366 add(scratch, Immediate(kSystemPointerSize));
1367 }
1368 // See if we overflowed, i.e. scratch is positive.
1369 cmp(scratch, Immediate(0));
1370 // TODO(victorgomes): Save some bytes in the builtins that use stack checks
1371 // by jumping to a builtin that throws the exception.
1372 j(greater, stack_overflow); // Signed comparison.
1373}
1374
1375void MacroAssembler::InvokePrologue(Register expected_parameter_count,
1376 Register actual_parameter_count,
1377 InvokeType type) {
1378 if (expected_parameter_count == actual_parameter_count) return;
1379 ASM_CODE_COMMENT(this);
1380 DCHECK_EQ(actual_parameter_count, eax);
1381 DCHECK_EQ(expected_parameter_count, ecx);
1382 Label regular_invoke;
1383
1384 // If overapplication or if the actual argument count is equal to the
1385 // formal parameter count, no need to push extra undefined values.
1386 sub(expected_parameter_count, actual_parameter_count);
1387 j(less_equal, &regular_invoke, Label::kFar);
1388
1389 // We need to preserve edx, edi, esi and ebx.
1390 movd(xmm0, edx);
1391 movd(xmm1, edi);
1392 movd(xmm2, esi);
1393 movd(xmm3, ebx);
1394
1395 Label stack_overflow;
1396 StackOverflowCheck(expected_parameter_count, edx, &stack_overflow);
1397
1398 Register scratch = esi;
1399
1400 // Underapplication. Move the arguments already in the stack, including the
1401 // receiver and the return address.
1402 {
1403 Label copy, check;
1404 Register src = edx, dest = esp, num = edi, current = ebx;
1405 mov(src, esp);
1406 lea(scratch,
1407 Operand(expected_parameter_count, times_system_pointer_size, 0));
1408 AllocateStackSpace(scratch);
1409 // Extra words are the receiver (if not already included in argc) and the
1410 // return address (if a jump).
1411 int extra_words = type == InvokeType::kCall ? 0 : 1;
1412 lea(num, Operand(eax, extra_words)); // Number of words to copy.
1413 Move(current, 0);
1414 // Fall-through to the loop body because there are non-zero words to copy.
1415 bind(&copy);
1416 mov(scratch, Operand(src, current, times_system_pointer_size, 0));
1417 mov(Operand(dest, current, times_system_pointer_size, 0), scratch);
1418 inc(current);
1419 bind(&check);
1420 cmp(current, num);
1421 j(less, &copy);
1422 lea(edx, Operand(esp, num, times_system_pointer_size, 0));
1423 }
1424
1425 // Fill remaining expected arguments with undefined values.
1426 movd(ebx, xmm3); // Restore root.
1427 LoadRoot(scratch, RootIndex::kUndefinedValue);
1428 {
1429 Label loop;
1430 bind(&loop);
1431 dec(expected_parameter_count);
1432 mov(Operand(edx, expected_parameter_count, times_system_pointer_size, 0),
1433 scratch);
1434 j(greater, &loop, Label::kNear);
1435 }
1436
1437 // Restore remaining registers.
1438 movd(esi, xmm2);
1439 movd(edi, xmm1);
1440 movd(edx, xmm0);
1441
1442 jmp(&regular_invoke);
1443
1444 bind(&stack_overflow);
1445 {
1446 FrameScope frame(
1447 this, has_frame() ? StackFrame::NO_FRAME_TYPE : StackFrame::INTERNAL);
1448 CallRuntime(Runtime::kThrowStackOverflow);
1449 int3(); // This should be unreachable.
1450 }
1451
1452 bind(&regular_invoke);
1453}
1454
1455void MacroAssembler::CallDebugOnFunctionCall(Register fun, Register new_target,
1456 Register expected_parameter_count,
1457 Register actual_parameter_count) {
1458 ASM_CODE_COMMENT(this);
1459
1460 // We have no available register. So we spill the root register (ebx) and
1461 // recover it later.
1462 movd(xmm0, kRootRegister);
1463
1464 // Load receiver to pass it later to DebugOnFunctionCall hook.
1465 // Receiver is located on top of the stack if we have a frame (usually a
1466 // construct frame), or after the return address if we do not yet have a
1467 // frame.
1469 mov(receiver, Operand(esp, has_frame() ? 0 : kSystemPointerSize));
1470
1471 FrameScope frame(
1472 this, has_frame() ? StackFrame::NO_FRAME_TYPE : StackFrame::INTERNAL);
1473
1474 SmiTag(expected_parameter_count);
1475 Push(expected_parameter_count);
1476
1477 SmiTag(actual_parameter_count);
1478 Push(actual_parameter_count);
1479 SmiUntag(actual_parameter_count);
1480
1481 if (new_target.is_valid()) {
1483 }
1484 Push(fun);
1485 Push(fun);
1486 Push(receiver);
1487
1488 // Recover root register.
1489 movd(kRootRegister, xmm0);
1490
1491 CallRuntime(Runtime::kDebugOnFunctionCall);
1492 Pop(fun);
1493 if (new_target.is_valid()) {
1494 Pop(new_target);
1495 }
1496 Pop(actual_parameter_count);
1497 SmiUntag(actual_parameter_count);
1498
1499 Pop(expected_parameter_count);
1500 SmiUntag(expected_parameter_count);
1501}
1502
1503void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
1504 Register expected_parameter_count,
1505 Register actual_parameter_count,
1506 InvokeType type) {
1507 ASM_CODE_COMMENT(this);
1508 // You can't call a function without a valid frame.
1510 DCHECK_EQ(function, edi);
1511 DCHECK_IMPLIES(new_target.is_valid(), new_target == edx);
1512 DCHECK(expected_parameter_count == ecx || expected_parameter_count == eax);
1513 DCHECK_EQ(actual_parameter_count, eax);
1514
1515 // On function call, call into the debugger if necessary.
1516 Label debug_hook, continue_after_hook;
1517 {
1518 ExternalReference debug_hook_active =
1519 ExternalReference::debug_hook_on_function_call_address(isolate());
1520 push(eax);
1521 cmpb(ExternalReferenceAsOperand(debug_hook_active, eax), Immediate(0));
1522 pop(eax);
1523 j(not_equal, &debug_hook);
1524 }
1525 bind(&continue_after_hook);
1526
1527 // Clear the new.target register if not given.
1528 if (!new_target.is_valid()) {
1529 Move(edx, isolate()->factory()->undefined_value());
1530 }
1531
1532 InvokePrologue(expected_parameter_count, actual_parameter_count, type);
1533 // We call indirectly through the code field in the function to
1534 // allow recompilation to take effect without changing any of the
1535 // call sites.
1536 constexpr int unused_argument_count = 0;
1537 switch (type) {
1538 case InvokeType::kCall:
1539 CallJSFunction(function, unused_argument_count);
1540 break;
1541 case InvokeType::kJump:
1542 JumpJSFunction(function);
1543 break;
1544 }
1545 Label done;
1546 jmp(&done, Label::kNear);
1547
1548 // Deferred debug hook.
1549 bind(&debug_hook);
1550 CallDebugOnFunctionCall(function, new_target, expected_parameter_count,
1551 actual_parameter_count);
1552 jmp(&continue_after_hook);
1553
1554 bind(&done);
1555}
1556
1557void MacroAssembler::InvokeFunction(Register fun, Register new_target,
1558 Register actual_parameter_count,
1559 InvokeType type) {
1560 ASM_CODE_COMMENT(this);
1561 // You can't call a function without a valid frame.
1562 DCHECK(type == InvokeType::kJump || has_frame());
1563
1564 DCHECK(fun == edi);
1565 mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1566 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1567 movzx_w(ecx,
1568 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
1569
1570 InvokeFunctionCode(edi, new_target, ecx, actual_parameter_count, type);
1571}
1572
1573void MacroAssembler::LoadGlobalProxy(Register dst) {
1574 LoadNativeContextSlot(dst, Context::GLOBAL_PROXY_INDEX);
1575}
1576
1577void MacroAssembler::LoadNativeContextSlot(Register destination, int index) {
1578 ASM_CODE_COMMENT(this);
1579 // Load the native context from the current context.
1580 LoadMap(destination, esi);
1583 Map::kConstructorOrBackPointerOrNativeContextOffset));
1584 // Load the function from the native context.
1586}
1587
1588void MacroAssembler::Ret() { ret(0); }
1589
1590void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
1591 if (is_uint16(bytes_dropped)) {
1592 ret(bytes_dropped);
1593 } else {
1594 pop(scratch);
1595 add(esp, Immediate(bytes_dropped));
1596 push(scratch);
1597 ret(0);
1598 }
1599}
1600
1601void MacroAssembler::Push(Immediate value) {
1602 if (root_array_available()) {
1603 if (value.is_external_reference()) {
1604 ExternalReference reference = value.external_reference();
1605 if (reference.IsIsolateFieldId()) {
1607 add(Operand(esp, 0), Immediate(reference.offset_from_root_register()));
1608 return;
1609 }
1610 if (options().isolate_independent_code) {
1612 add(Operand(esp, 0), Immediate(RootRegisterOffsetForExternalReference(
1613 isolate(), reference)));
1614 return;
1615 }
1616 }
1617 if (value.is_embedded_object()) {
1618 Push(HeapObjectAsOperand(value.embedded_object()));
1619 return;
1620 }
1621 }
1622 push(value);
1623}
1624
1625void MacroAssembler::Drop(int stack_elements) {
1626 if (stack_elements > 0) {
1627 add(esp, Immediate(stack_elements * kSystemPointerSize));
1628 }
1629}
1630
1631void MacroAssembler::Move(Register dst, Register src) {
1632 if (dst != src) {
1633 mov(dst, src);
1634 }
1635}
1636
1637void MacroAssembler::Move(Register dst, const Immediate& src) {
1638 if (!src.is_heap_number_request() && src.is_zero()) {
1639 xor_(dst, dst); // Shorter than mov of 32-bit immediate 0.
1640 } else if (src.is_external_reference()) {
1641 LoadAddress(dst, src.external_reference());
1642 } else {
1643 mov(dst, src);
1644 }
1645}
1646
1647namespace {
1648bool ShouldUsePushPopForMove(bool root_array_available,
1649 bool isolate_independent_code,
1650 const Immediate& src) {
1652 if (src.is_external_reference() &&
1653 src.external_reference().IsIsolateFieldId()) {
1654 return true;
1655 }
1656 if (isolate_independent_code) {
1657 if (src.is_external_reference()) return true;
1658 if (src.is_embedded_object()) return true;
1659 if (src.is_heap_number_request()) return true;
1660 }
1661 }
1662 return false;
1663}
1664} // namespace
1665
1666void MacroAssembler::Move(Operand dst, const Immediate& src) {
1667 // Since there's no scratch register available, take a detour through the
1668 // stack.
1669 if (ShouldUsePushPopForMove(root_array_available(),
1670 options().isolate_independent_code, src)) {
1671 Push(src);
1672 pop(dst);
1673 } else if (src.is_embedded_object()) {
1674 mov(dst, src.embedded_object());
1675 } else {
1676 mov(dst, src);
1677 }
1678}
1679
1680void MacroAssembler::Move(Register dst, Operand src) { mov(dst, src); }
1681
1682void MacroAssembler::Move(Register dst, Handle<HeapObject> src) {
1683 if (root_array_available() && options().isolate_independent_code) {
1684 IndirectLoadConstant(dst, src);
1685 return;
1686 }
1687 mov(dst, src);
1688}
1689
1690void MacroAssembler::Move(XMMRegister dst, uint32_t src) {
1691 if (src == 0) {
1692 pxor(dst, dst);
1693 } else {
1694 unsigned cnt = base::bits::CountPopulation(src);
1695 unsigned nlz = base::bits::CountLeadingZeros32(src);
1696 unsigned ntz = base::bits::CountTrailingZeros32(src);
1697 if (nlz + cnt + ntz == 32) {
1698 pcmpeqd(dst, dst);
1699 if (ntz == 0) {
1700 psrld(dst, 32 - cnt);
1701 } else {
1702 pslld(dst, 32 - cnt);
1703 if (nlz != 0) psrld(dst, nlz);
1704 }
1705 } else {
1706 push(eax);
1707 mov(eax, Immediate(src));
1708 movd(dst, Operand(eax));
1709 pop(eax);
1710 }
1711 }
1712}
1713
1714void MacroAssembler::Move(XMMRegister dst, uint64_t src) {
1715 if (src == 0) {
1716 pxor(dst, dst);
1717 } else {
1718 uint32_t lower = static_cast<uint32_t>(src);
1719 uint32_t upper = static_cast<uint32_t>(src >> 32);
1720 unsigned cnt = base::bits::CountPopulation(src);
1721 unsigned nlz = base::bits::CountLeadingZeros64(src);
1722 unsigned ntz = base::bits::CountTrailingZeros64(src);
1723 if (nlz + cnt + ntz == 64) {
1724 pcmpeqd(dst, dst);
1725 if (ntz == 0) {
1726 psrlq(dst, 64 - cnt);
1727 } else {
1728 psllq(dst, 64 - cnt);
1729 if (nlz != 0) psrlq(dst, nlz);
1730 }
1731 } else if (lower == 0) {
1732 Move(dst, upper);
1733 psllq(dst, 32);
1734 } else if (CpuFeatures::IsSupported(SSE4_1)) {
1735 CpuFeatureScope scope(this, SSE4_1);
1736 push(eax);
1737 Move(eax, Immediate(lower));
1738 movd(dst, Operand(eax));
1739 if (upper != lower) {
1740 Move(eax, Immediate(upper));
1741 }
1742 pinsrd(dst, Operand(eax), 1);
1743 pop(eax);
1744 } else {
1745 push(Immediate(upper));
1746 push(Immediate(lower));
1747 movsd(dst, Operand(esp, 0));
1748 add(esp, Immediate(kDoubleSize));
1749 }
1750 }
1751}
1752
1753void MacroAssembler::PextrdPreSse41(Register dst, XMMRegister src,
1754 uint8_t imm8) {
1755 if (imm8 == 0) {
1756 Movd(dst, src);
1757 return;
1758 }
1759 // Without AVX or SSE, we can only have 64-bit values in xmm registers.
1760 // We don't have an xmm scratch register, so move the data via the stack. This
1761 // path is rarely required, so it's acceptable to be slow.
1762 DCHECK_LT(imm8, 2);
1764 movsd(Operand(esp, 0), src);
1765 mov(dst, Operand(esp, imm8 * kUInt32Size));
1766 add(esp, Immediate(kDoubleSize));
1767}
1768
1769void MacroAssembler::PinsrdPreSse41(XMMRegister dst, Operand src, uint8_t imm8,
1770 uint32_t* load_pc_offset) {
1771 // Without AVX or SSE, we can only have 64-bit values in xmm registers.
1772 // We don't have an xmm scratch register, so move the data via the stack. This
1773 // path is rarely required, so it's acceptable to be slow.
1774 DCHECK_LT(imm8, 2);
1776 // Write original content of {dst} to the stack.
1777 movsd(Operand(esp, 0), dst);
1778 // Overwrite the portion specified in {imm8}.
1779 if (src.is_reg_only()) {
1780 mov(Operand(esp, imm8 * kUInt32Size), src.reg());
1781 } else {
1782 movss(dst, src);
1783 movss(Operand(esp, imm8 * kUInt32Size), dst);
1784 }
1785 // Load back the full value into {dst}.
1786 movsd(dst, Operand(esp, 0));
1787 add(esp, Immediate(kDoubleSize));
1788}
1789
1790void MacroAssembler::Lzcnt(Register dst, Operand src) {
1791 if (CpuFeatures::IsSupported(LZCNT)) {
1792 CpuFeatureScope scope(this, LZCNT);
1793 lzcnt(dst, src);
1794 return;
1795 }
1796 Label not_zero_src;
1797 bsr(dst, src);
1798 j(not_zero, &not_zero_src, Label::kNear);
1799 mov(dst, 63); // 63^31 == 32
1800 bind(&not_zero_src);
1801 xor_(dst, Immediate(31)); // for x in [0..31], 31^x == 31-x.
1802}
1803
1804void MacroAssembler::Tzcnt(Register dst, Operand src) {
1805 if (CpuFeatures::IsSupported(BMI1)) {
1806 CpuFeatureScope scope(this, BMI1);
1807 tzcnt(dst, src);
1808 return;
1809 }
1810 Label not_zero_src;
1811 bsf(dst, src);
1812 j(not_zero, &not_zero_src, Label::kNear);
1813 mov(dst, 32); // The result of tzcnt is 32 if src = 0.
1814 bind(&not_zero_src);
1815}
1816
1817void MacroAssembler::Popcnt(Register dst, Operand src) {
1818 if (CpuFeatures::IsSupported(POPCNT)) {
1819 CpuFeatureScope scope(this, POPCNT);
1820 popcnt(dst, src);
1821 return;
1822 }
1823 FATAL("no POPCNT support");
1824}
1825
1826void MacroAssembler::LoadWeakValue(Register in_out, Label* target_if_cleared) {
1827 ASM_CODE_COMMENT(this);
1828 cmp(in_out, Immediate(kClearedWeakHeapObjectLower32));
1829 j(equal, target_if_cleared);
1830
1831 and_(in_out, Immediate(~kWeakHeapObjectMask));
1832}
1833
1834void MacroAssembler::EmitIncrementCounter(StatsCounter* counter, int value,
1835 Register scratch) {
1836 DCHECK_GT(value, 0);
1837 if (v8_flags.native_code_counters && counter->Enabled()) {
1838 ASM_CODE_COMMENT(this);
1839 Operand operand =
1841 if (value == 1) {
1842 inc(operand);
1843 } else {
1844 add(operand, Immediate(value));
1845 }
1846 }
1847}
1848
1849void MacroAssembler::EmitDecrementCounter(StatsCounter* counter, int value,
1850 Register scratch) {
1851 DCHECK_GT(value, 0);
1852 if (v8_flags.native_code_counters && counter->Enabled()) {
1853 ASM_CODE_COMMENT(this);
1854 Operand operand =
1856 if (value == 1) {
1857 dec(operand);
1858 } else {
1859 sub(operand, Immediate(value));
1860 }
1861 }
1862}
1863
1865 Label L;
1866 j(cc, &L);
1867 Abort(reason);
1868 // will not return here
1869 bind(&L);
1870}
1871
1873 ASM_CODE_COMMENT(this);
1874 int frame_alignment = base::OS::ActivationFrameAlignment();
1875 int frame_alignment_mask = frame_alignment - 1;
1876 if (frame_alignment > kSystemPointerSize) {
1877 DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
1878 Label alignment_as_expected;
1879 test(esp, Immediate(frame_alignment_mask));
1880 j(zero, &alignment_as_expected);
1881 // Abort if stack is not aligned.
1882 int3();
1883 bind(&alignment_as_expected);
1884 }
1885}
1886
1888 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
1889 if (kFrameAlignment > 0) {
1890 DCHECK(base::bits::IsPowerOfTwo(kFrameAlignment));
1891 DCHECK(is_int8(kFrameAlignment));
1892 and_(esp, Immediate(-kFrameAlignment));
1893 }
1894}
1895
1897 ASM_CODE_COMMENT(this);
1898 if (v8_flags.code_comments) {
1899 RecordComment("Abort message:", SourceLocation{});
1900 RecordComment(GetAbortReason(reason), SourceLocation{});
1901 }
1902
1903 // Without debug code, save the code size and just trap.
1904 if (!v8_flags.debug_code || v8_flags.trap_on_abort) {
1905 int3();
1906 return;
1907 }
1908
1909 if (should_abort_hard()) {
1910 // We don't care if we constructed a frame. Just pretend we did.
1911 FrameScope assume_frame(this, StackFrame::NO_FRAME_TYPE);
1912 PrepareCallCFunction(1, eax);
1913 mov(Operand(esp, 0), Immediate(static_cast<int>(reason)));
1914 CallCFunction(ExternalReference::abort_with_reason(), 1);
1915 return;
1916 }
1917
1918 Move(edx, Smi::FromInt(static_cast<int>(reason)));
1919
1920 {
1921 // We don't actually want to generate a pile of code for this, so just
1922 // claim there is a stack frame, without generating one.
1923 FrameScope scope(this, StackFrame::NO_FRAME_TYPE);
1924 if (root_array_available()) {
1925 // Generate an indirect call via builtins entry table here in order to
1926 // ensure that the interpreter_entry_return_pc_offset is the same for
1927 // InterpreterEntryTrampoline and InterpreterEntryTrampolineForProfiling
1928 // when v8_flags.debug_code is enabled.
1929 Call(EntryFromBuiltinAsOperand(Builtin::kAbort));
1930 } else {
1931 CallBuiltin(Builtin::kAbort);
1932 }
1933 }
1934
1935 // will not return here
1936 int3();
1937}
1938
1939void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
1940 ASM_CODE_COMMENT(this);
1941 int frame_alignment = base::OS::ActivationFrameAlignment();
1942 if (frame_alignment != 0) {
1943 // Make stack end at alignment and make room for num_arguments words
1944 // and the original value of esp.
1945 mov(scratch, esp);
1946 AllocateStackSpace((num_arguments + 1) * kSystemPointerSize);
1948 mov(Operand(esp, num_arguments * kSystemPointerSize), scratch);
1949 } else {
1950 AllocateStackSpace(num_arguments * kSystemPointerSize);
1951 }
1952}
1953
1954int MacroAssembler::CallCFunction(ExternalReference function, int num_arguments,
1955 SetIsolateDataSlots set_isolate_data_slots,
1956 Label* return_location) {
1957 // Note: The "CallCFunction" code comment will be generated by the other
1958 // CallCFunction method called below.
1959 // Trashing eax is ok as it will be the return value.
1960 Move(eax, Immediate(function));
1961 return CallCFunction(eax, num_arguments, set_isolate_data_slots,
1962 return_location);
1963}
1964
1965int MacroAssembler::CallCFunction(Register function, int num_arguments,
1966 SetIsolateDataSlots set_isolate_data_slots,
1967 Label* return_location) {
1968 ASM_CODE_COMMENT(this);
1969 DCHECK_LE(num_arguments, kMaxCParameters);
1970 DCHECK(has_frame());
1971 // Check stack alignment.
1972 if (v8_flags.debug_code) {
1974 }
1975
1976 Label get_pc;
1977
1978 if (set_isolate_data_slots == SetIsolateDataSlots::kYes) {
1979 // Save the frame pointer and PC so that the stack layout remains iterable,
1980 // even without an ExitFrame which normally exists between JS and C frames.
1981 // Find two caller-saved scratch registers.
1982 Register pc_scratch = eax;
1983 Register scratch = ecx;
1984 if (function == eax) pc_scratch = edx;
1985 if (function == ecx) scratch = edx;
1986 LoadLabelAddress(pc_scratch, &get_pc);
1987
1988 // The root array is always available in production code. Only in one unit
1989 // test it is not available. The following code is not needed in the unit
1990 // test though, so we don't provide code here for the case where the root
1991 // array is not available.
1993 mov(ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerPC),
1994 pc_scratch);
1995 mov(ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerFP), ebp);
1996 }
1997
1998 call(function);
1999 int call_pc_offset = pc_offset();
2000 bind(&get_pc);
2001 if (return_location) bind(return_location);
2002
2003 if (set_isolate_data_slots == SetIsolateDataSlots::kYes) {
2004 // We don't unset the PC; the FP is the source of truth.
2005 mov(ExternalReferenceAsOperand(IsolateFieldId::kFastCCallCallerFP),
2006 Immediate(0));
2007 }
2008
2010 mov(esp, Operand(esp, num_arguments * kSystemPointerSize));
2011 } else {
2012 add(esp, Immediate(num_arguments * kSystemPointerSize));
2013 }
2014
2015 return call_pc_offset;
2016}
2017
2019 // Push the current PC onto the stack as "return address" via calling
2020 // the next instruction.
2021 // This does not pollute the RAS:
2022 // see https://blog.stuffedcow.net/2018/04/ras-microbenchmarks/#call0.
2023 Label get_pc;
2024 call(&get_pc);
2025 bind(&get_pc);
2026}
2027
2028void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
2029 ASM_CODE_COMMENT(this);
2030 DCHECK_IMPLIES(options().isolate_independent_code,
2033 if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin)) {
2034 CallBuiltin(builtin);
2035 return;
2036 }
2038 call(code_object, rmode);
2039}
2040
2041void MacroAssembler::LoadEntryFromBuiltinIndex(Register builtin_index,
2042 Register target) {
2043 ASM_CODE_COMMENT(this);
2044 static_assert(kSystemPointerSize == 4);
2045 static_assert(kSmiShiftSize == 0);
2046 static_assert(kSmiTagSize == 1);
2047 static_assert(kSmiTag == 0);
2048
2049 // The builtin_index register contains the builtin index as a Smi.
2050 // Untagging is folded into the indexing operand below (we use
2051 // times_half_system_pointer_size instead of times_system_pointer_size since
2052 // smis are already shifted by one).
2053 mov(target,
2054 Operand(kRootRegister, builtin_index, times_half_system_pointer_size,
2055 IsolateData::builtin_entry_table_offset()));
2056}
2057
2058void MacroAssembler::CallBuiltinByIndex(Register builtin_index,
2059 Register target) {
2060 ASM_CODE_COMMENT(this);
2061 LoadEntryFromBuiltinIndex(builtin_index, target);
2062 call(target);
2063}
2064
2067 switch (options().builtin_call_jump_mode) {
2070 break;
2071 }
2073 UNREACHABLE();
2076 break;
2078 Handle<Code> code = isolate()->builtins()->code_handle(builtin);
2080 break;
2081 }
2082 }
2083}
2084
2087 CommentForOffHeapTrampoline("tail call", builtin));
2088 switch (options().builtin_call_jump_mode) {
2091 break;
2092 }
2094 UNREACHABLE();
2097 break;
2099 Handle<Code> code = isolate()->builtins()->code_handle(builtin);
2101 break;
2102 }
2103 }
2104}
2105
2107 ASM_CODE_COMMENT(this);
2108 return Operand(kRootRegister, IsolateData::BuiltinEntrySlotOffset(builtin));
2109}
2110
2112 Register code_object,
2113 CodeEntrypointTag tag) {
2114 ASM_CODE_COMMENT(this);
2115 mov(destination, FieldOperand(code_object, Code::kInstructionStartOffset));
2116}
2117
2118void MacroAssembler::CallCodeObject(Register code_object) {
2119 LoadCodeInstructionStart(code_object, code_object);
2120 call(code_object);
2121}
2122
2123void MacroAssembler::JumpCodeObject(Register code_object, JumpMode jump_mode) {
2124 LoadCodeInstructionStart(code_object, code_object);
2125 switch (jump_mode) {
2126 case JumpMode::kJump:
2127 jmp(code_object);
2128 return;
2130 push(code_object);
2131 ret(0);
2132 return;
2133 }
2134}
2135
2136#ifdef V8_ENABLE_LEAPTIERING
2137void MacroAssembler::LoadEntrypointFromJSDispatchTable(
2138 Register destination, Register dispatch_handle) {
2139 // TODO(olivf): If there ever is a caller that has a spare register here, we
2140 // could write this without needing an additional scratch register.
2141 DCHECK(AreAliased(destination, dispatch_handle));
2142
2143 static_assert(kJSDispatchHandleShift == 0);
2144 shl(dispatch_handle, kJSDispatchTableEntrySizeLog2);
2145
2146 DCHECK(!AreAliased(dispatch_handle, eax));
2147 movd(xmm0, eax);
2148 LoadAddress(eax, ExternalReference::js_dispatch_table_address());
2149 mov(destination, Operand(eax, dispatch_handle, times_1,
2150 JSDispatchEntry::kEntrypointOffset));
2151 movd(eax, xmm0);
2152}
2153#endif // V8_ENABLE_LEAPTIERING
2154
2155void MacroAssembler::CallJSFunction(Register function_object,
2156 uint16_t argument_count) {
2157#if V8_ENABLE_LEAPTIERING
2158 static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
2159 mov(ecx, FieldOperand(function_object, JSFunction::kDispatchHandleOffset));
2160 LoadEntrypointFromJSDispatchTable(ecx, ecx);
2161 call(ecx);
2162#else
2163 static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
2164 mov(ecx, FieldOperand(function_object, JSFunction::kCodeOffset));
2165 CallCodeObject(ecx);
2166#endif // V8_ENABLE_LEAPTIERING
2167}
2168
2169void MacroAssembler::JumpJSFunction(Register function_object,
2170 JumpMode jump_mode) {
2171 static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
2172#if V8_ENABLE_LEAPTIERING
2173 mov(ecx, FieldOperand(function_object, JSFunction::kDispatchHandleOffset));
2174 LoadEntrypointFromJSDispatchTable(ecx, ecx);
2175 jmp(ecx);
2176#else
2177 mov(ecx, FieldOperand(function_object, JSFunction::kCodeOffset));
2178 JumpCodeObject(ecx, jump_mode);
2179#endif // V8_ENABLE_LEAPTIERING
2180}
2181
2182#ifdef V8_ENABLE_WEBASSEMBLY
2183
2184void MacroAssembler::ResolveWasmCodePointer(Register target) {
2185 ASM_CODE_COMMENT(this);
2186 static_assert(!V8_ENABLE_SANDBOX_BOOL);
2187 Register scratch = target == eax ? ebx : eax;
2188 // TODO(sroettger): the load from table[target] is possible with a single
2189 // instruction.
2190 Push(scratch);
2191 Move(scratch, Immediate(ExternalReference::wasm_code_pointer_table()));
2192 static_assert(sizeof(wasm::WasmCodePointerTableEntry) == 4);
2193 Move(target, Operand(scratch, target, ScaleFactor::times_4, 0));
2194 Pop(scratch);
2195}
2196
2197void MacroAssembler::CallWasmCodePointer(Register target,
2198 CallJumpMode call_jump_mode) {
2199 ResolveWasmCodePointer(target);
2200 if (call_jump_mode == CallJumpMode::kTailCall) {
2201 jmp(target);
2202 } else {
2203 call(target);
2204 }
2205}
2206
2207#endif
2208
2209void MacroAssembler::Jump(const ExternalReference& reference) {
2212 isolate(), reference)));
2213}
2214
2215void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
2216 DCHECK_IMPLIES(options().isolate_independent_code,
2219 if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin)) {
2220 TailCallBuiltin(builtin);
2221 return;
2222 }
2224 jmp(code_object, rmode);
2225}
2226
2227void MacroAssembler::LoadLabelAddress(Register dst, Label* lbl) {
2228 // An lea of a label using position independent code
2229 // The instruction delta 10 is the difference between the
2230 // value of PC we obtain, from that what we need
2231 // which is just after the lea instruction itself.
2232 //
2233
2234 // The byte distance between acquired PC and end of sequence.
2235 const int kInsDelta = 10;
2236 PushPC();
2237#ifdef DEBUG
2238 const int kStart = pc_offset();
2239#endif
2240 pop(dst);
2241 add(dst, Immediate(kInsDelta)); // point to after next instruction
2242 lea(dst, dst, lbl);
2243 DCHECK(pc_offset() - kStart == kInsDelta);
2244}
2245
2247 Register header) {
2248 constexpr intptr_t alignment_mask =
2250 if (header == object) {
2251 and_(header, Immediate(~alignment_mask));
2252 } else {
2253 mov(header, Immediate(~alignment_mask));
2254 and_(header, object);
2255 }
2256}
2257
2258void MacroAssembler::CheckPageFlag(Register object, Register scratch, int mask,
2259 Condition cc, Label* condition_met,
2260 Label::Distance condition_met_distance) {
2261 ASM_CODE_COMMENT(this);
2262 DCHECK(cc == zero || cc == not_zero);
2263 MemoryChunkHeaderFromObject(object, scratch);
2264 if (mask < (1 << kBitsPerByte)) {
2265 test_b(Operand(scratch, MemoryChunk::FlagsOffset()), Immediate(mask));
2266 } else {
2267 test(Operand(scratch, MemoryChunk::FlagsOffset()), Immediate(mask));
2268 }
2269 j(cc, condition_met, condition_met_distance);
2270}
2271
2272void MacroAssembler::ComputeCodeStartAddress(Register dst) {
2273 ASM_CODE_COMMENT(this);
2274 // In order to get the address of the current instruction, we first need
2275 // to use a call and then use a pop, thus pushing the return address to
2276 // the stack and then popping it into the register.
2277 Label current;
2278 call(&current);
2279 int pc = pc_offset();
2280 bind(&current);
2281 pop(dst);
2282 if (pc != 0) {
2283 sub(dst, Immediate(pc));
2284 }
2285}
2286
2287void MacroAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
2288 DeoptimizeKind kind, Label* ret,
2289 Label*) {
2290 ASM_CODE_COMMENT(this);
2291#if V8_ENABLE_WEBASSEMBLY
2292 if (options().is_wasm) {
2293 CHECK(v8_flags.wasm_deopt);
2294 wasm_call(static_cast<Address>(target), RelocInfo::WASM_STUB_CALL);
2295#else
2296 // For balance.
2297 if (false) {
2298#endif // V8_ENABLE_WEBASSEMBLY
2299 } else {
2300 CallBuiltin(target);
2301 }
2305}
2306
2307void MacroAssembler::Trap() { int3(); }
2308void MacroAssembler::DebugBreak() { int3(); }
2309
2310// Calls an API function. Allocates HandleScope, extracts returned value
2311// from handle and propagates exceptions. Clobbers C argument registers
2312// and C caller-saved registers. Restores context. On return removes
2313// (*argc_operand + slots_to_drop_on_return) * kSystemPointerSize
2314// (GCed, includes the call JS arguments space and the additional space
2315// allocated for the fast call).
2316void CallApiFunctionAndReturn(MacroAssembler* masm, bool with_profiling,
2317 Register function_address,
2318 ExternalReference thunk_ref, Register thunk_arg,
2319 int slots_to_drop_on_return,
2320 MemOperand* argc_operand,
2321 MemOperand return_value_operand) {
2322 ASM_CODE_COMMENT(masm);
2323
2324 using ER = ExternalReference;
2325
2326 Isolate* isolate = masm->isolate();
2328 ER::handle_scope_next_address(isolate), no_reg);
2330 ER::handle_scope_limit_address(isolate), no_reg);
2332 ER::handle_scope_level_address(isolate), no_reg);
2333
2334 Register return_value = eax;
2335 DCHECK(function_address == edx || function_address == eax);
2336 // Use scratch as an "opposite" of function_address register.
2337 Register scratch = function_address == edx ? ecx : edx;
2338
2339 // Allocate HandleScope in callee-saved registers.
2340 // We will need to restore the HandleScope after the call to the API function,
2341 // by allocating it in callee-saved registers it'll be preserved by C code.
2342 Register prev_next_address_reg = esi;
2343 Register prev_limit_reg = edi;
2344
2345 DCHECK(!AreAliased(return_value, scratch, prev_next_address_reg,
2346 prev_limit_reg));
2347 // function_address and thunk_arg might overlap but this function must not
2348 // corrupted them until the call is made (i.e. overlap with return_value is
2349 // fine).
2350 DCHECK(!AreAliased(function_address, // incoming parameters
2351 scratch, prev_next_address_reg, prev_limit_reg));
2352 DCHECK(!AreAliased(thunk_arg, // incoming parameters
2353 scratch, prev_next_address_reg, prev_limit_reg));
2354 {
2356 "Allocate HandleScope in callee-save registers.");
2357 __ add(level_mem_op, Immediate(1));
2358 __ mov(prev_next_address_reg, next_mem_op);
2359 __ mov(prev_limit_reg, limit_mem_op);
2360 }
2361
2362 Label profiler_or_side_effects_check_enabled, done_api_call;
2363 if (with_profiling) {
2364 __ RecordComment("Check if profiler or side effects check is enabled");
2365 __ cmpb(__ ExternalReferenceAsOperand(IsolateFieldId::kExecutionMode),
2366 Immediate(0));
2367 __ j(not_zero, &profiler_or_side_effects_check_enabled);
2368#ifdef V8_RUNTIME_CALL_STATS
2369 __ RecordComment("Check if RCS is enabled");
2370 __ Move(scratch, Immediate(ER::address_of_runtime_stats_flag()));
2371 __ cmp(Operand(scratch, 0), Immediate(0));
2372 __ j(not_zero, &profiler_or_side_effects_check_enabled);
2373#endif // V8_RUNTIME_CALL_STATS
2374 }
2375
2376 __ RecordComment("Call the api function directly.");
2377 __ call(function_address);
2378 __ bind(&done_api_call);
2379
2380 __ RecordComment("Load the value from ReturnValue");
2381 __ mov(return_value, return_value_operand);
2382
2383 Label propagate_exception;
2384 Label delete_allocated_handles;
2385 Label leave_exit_frame;
2386
2387 {
2389 masm,
2390 "No more valid handles (the result handle was the last one)."
2391 "Restore previous handle scope.");
2392 __ mov(next_mem_op, prev_next_address_reg);
2393 __ sub(level_mem_op, Immediate(1));
2394 __ Assert(above_equal, AbortReason::kInvalidHandleScopeLevel);
2395 __ cmp(prev_limit_reg, limit_mem_op);
2396 __ j(not_equal, &delete_allocated_handles);
2397 }
2398
2399 __ RecordComment("Leave the API exit frame.");
2400 __ bind(&leave_exit_frame);
2401 Register argc_reg = prev_limit_reg;
2402 if (argc_operand != nullptr) {
2403 __ mov(argc_reg, *argc_operand);
2404 }
2405 __ LeaveExitFrame(scratch);
2406
2407 {
2409 "Check if the function scheduled an exception.");
2411 ER::exception_address(isolate), no_reg));
2412 __ CompareRoot(scratch, RootIndex::kTheHoleValue);
2413 __ j(not_equal, &propagate_exception);
2414 }
2415
2416 __ AssertJSAny(return_value, scratch,
2417 AbortReason::kAPICallReturnedInvalidObject);
2418
2419 if (argc_operand == nullptr) {
2420 DCHECK_NE(slots_to_drop_on_return, 0);
2421 __ ret(slots_to_drop_on_return * kSystemPointerSize);
2422 } else {
2423 __ pop(scratch);
2424 // {argc_operand} was loaded into {argc_reg} above.
2425 __ lea(esp, Operand(esp, argc_reg, times_system_pointer_size,
2426 slots_to_drop_on_return * kSystemPointerSize));
2427 __ jmp(scratch);
2428 }
2429
2430 if (with_profiling) {
2431 ASM_CODE_COMMENT_STRING(masm, "Call the api function via thunk wrapper.");
2432 __ bind(&profiler_or_side_effects_check_enabled);
2433 // Additional parameter is the address of the actual callback function.
2434 if (thunk_arg.is_valid()) {
2435 MemOperand thunk_arg_mem_op = __ ExternalReferenceAsOperand(
2436 IsolateFieldId::kApiCallbackThunkArgument);
2437 __ mov(thunk_arg_mem_op, thunk_arg);
2438 }
2439 __ Move(scratch, Immediate(thunk_ref));
2440 __ call(scratch);
2441 __ jmp(&done_api_call);
2442 }
2443
2444 __ RecordComment("An exception was thrown. Propagate it.");
2445 __ bind(&propagate_exception);
2446 __ TailCallRuntime(Runtime::kPropagateException);
2447
2448 {
2450 masm, "HandleScope limit has changed. Delete allocated extensions.");
2451 __ bind(&delete_allocated_handles);
2452 __ mov(limit_mem_op, prev_limit_reg);
2453 // Save the return value in a callee-save register.
2454 Register saved_result = prev_limit_reg;
2455 __ mov(saved_result, return_value);
2456 __ Move(scratch, Immediate(ER::isolate_address()));
2457 __ mov(Operand(esp, 0), scratch);
2458 __ Move(scratch, Immediate(ER::delete_handle_scope_extensions()));
2459 __ call(scratch);
2460 __ mov(return_value, saved_result);
2461 __ jmp(&leave_exit_frame);
2462 }
2463}
2464
2465// SMI related operations
2466
2467void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
2468 AssertSmi(smi1);
2469 AssertSmi(smi2);
2470 cmp(smi1, smi2);
2471}
2472
2473void MacroAssembler::SmiCompare(Register dst, Tagged<Smi> src) {
2474 AssertSmi(dst);
2475 cmp(dst, Immediate(src));
2476}
2477
2478void MacroAssembler::SmiCompare(Register dst, Operand src) {
2479 AssertSmi(dst);
2480 AssertSmi(src);
2481 cmp(dst, src);
2482}
2483
2484void MacroAssembler::SmiCompare(Operand dst, Register src) {
2485 AssertSmi(dst);
2486 AssertSmi(src);
2487 cmp(dst, src);
2488}
2489
2490} // namespace internal
2491} // namespace v8
2492
2493#undef __
2494
2495#endif // V8_TARGET_ARCH_IA32
#define Assert(condition)
Builtins::Kind kind
Definition builtins.cc:40
static int ActivationFrameAlignment()
V8_INLINE void RecordComment(const char *comment, const SourceLocation &loc=SourceLocation::Current())
Definition assembler.h:417
const AssemblerOptions & options() const
Definition assembler.h:339
void test_w(Register reg, Operand op)
void popcnt(Register dst, Register src)
void and_(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void movss(XMMRegister dst, Operand src)
void lzcnt(Register dst, Register src)
void sar(Register dst, uint8_t imm8)
void psllq(XMMRegister reg, uint8_t shift)
void psrlq(XMMRegister reg, uint8_t shift)
void mov_w(Register dst, Operand src)
void j(Condition cc, Label *L, Label::Distance distance=Label::kFar)
void movd(XMMRegister dst, Register src)
void shrd_cl(Register dst, Register src)
void add(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void dec(Register dst)
void cmpw(Operand dst, Immediate src)
void shl_cl(Register dst)
void cmp(Register src1, const Operand &src2, Condition cond=al)
void cvttsd2si(Register dst, Operand src)
void test_b(Register reg, Operand op)
void shrd(Register dst, Register src, uint8_t shift)
void psrld(XMMRegister reg, uint8_t shift)
void inc(Register dst)
void pinsrd(XMMRegister dst, Register src, uint8_t offset)
void shift(Operand dst, Immediate shift_amount, int subcode, int size)
void or_(Register dst, int32_t imm32)
void movzx_w(Register dst, Register src)
void sub(Register dst, Register src1, const Operand &src2, SBit s=LeaveCC, Condition cond=al)
void xor_(Register dst, int32_t imm32)
void shld_cl(Register dst, Register src)
void bsf(Register dst, Register src)
void movsd(XMMRegister dst, XMMRegister src)
void cvtsi2sd(XMMRegister dst, Register src)
void pslld(XMMRegister reg, uint8_t shift)
void cmpb(Register reg, Immediate imm8)
void shr_cl(Register dst)
void wasm_call(Address address, RelocInfo::Mode rmode)
void ext(const VRegister &vd, const VRegister &vn, const VRegister &vm, int index)
void lea(Register dst, Operand src)
void cnt(const VRegister &vd, const VRegister &vn)
void bsr(Register dst, Register src)
void shl(const VRegister &vd, const VRegister &vn, int shift)
void sar_cl(Register dst)
void cvtsi2ss(XMMRegister dst, Register src)
void cvttss2si(Register dst, Operand src)
void tzcnt(Register dst, Register src)
void shld(Register dst, Register src, uint8_t shift)
int SizeOfCodeGeneratedSince(Label *label)
Instruction * pc() const
void addss(XMMRegister dst, XMMRegister src)
static constexpr Builtin RecordWrite(SaveFPRegsMode fp_mode)
static bool IsIsolateIndependentBuiltin(Tagged< Code > code)
Definition builtins.cc:372
V8_EXPORT_PRIVATE Handle< Code > code_handle(Builtin builtin)
Definition builtins.cc:154
static constexpr Builtin RuntimeCEntry(int result_size, bool switch_to_central_stack=false)
static constexpr Builtin EphemeronKeyBarrier(SaveFPRegsMode fp_mode)
static constexpr bool IsBuiltinId(Builtin builtin)
Definition builtins.h:128
static constexpr Builtin CEntry(int result_size, ArgvMode argv_mode, bool builtin_exit_frame=false, bool switch_to_central_stack=false)
static const int kMarkedForDeoptimizationBit
Definition code.h:456
static constexpr int kContextOrFrameTypeOffset
static const int kInvalidContext
Definition contexts.h:578
static V8_INLINE constexpr int SlotOffset(int index)
Definition contexts.h:516
static bool IsSupported(CpuFeature f)
static V8_EXPORT_PRIVATE const int kEagerDeoptExitSize
static V8_EXPORT_PRIVATE const int kLazyDeoptExitSize
static constexpr int kCallerSPDisplacement
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr uint32_t kFlagsMaybeHasMaglevCode
static constexpr uint32_t kFlagsTieringStateIsAnyRequested
static constexpr uint32_t kFlagsLogNextExecution
static constexpr uint32_t kFlagsMaybeHasTurbofanCode
static constexpr int kMapOffset
static constexpr int BuiltinEntrySlotOffset(Builtin id)
static constexpr int real_jslimit_offset()
Builtins * builtins()
Definition isolate.h:1443
Handle< Object > root_handle(RootIndex index)
Definition isolate.h:1269
Address BuiltinEntry(Builtin builtin)
static bool IsAddressableThroughRootRegister(Isolate *isolate, const ExternalReference &reference)
V8_INLINE std::string CommentForOffHeapTrampoline(const char *prefix, Builtin builtin)
static int32_t RootRegisterOffsetForExternalReferenceTableEntry(Isolate *isolate, const ExternalReference &reference)
static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index)
IndirectHandle< HeapObject > code_object_
void IndirectLoadConstant(Register destination, Handle< HeapObject > object)
static intptr_t RootRegisterOffsetForExternalReference(Isolate *isolate, const ExternalReference &reference)
void IndirectLoadExternalReference(Register destination, ExternalReference reference)
static int32_t RootRegisterOffsetForBuiltin(Builtin builtin)
void PextrdPreSse41(Register dst, XMMRegister src, uint8_t imm8)
void Tzcnt(Register dst, Register src)
void LoadLabelAddress(Register dst, Label *lbl)
void Call(Register target, Condition cond=al)
void CallJSFunction(Register function_object, uint16_t argument_count)
void CallDebugOnFunctionCall(Register fun, Register new_target, Register expected_parameter_count, Register actual_parameter_count)
void LoadAddress(Register destination, ExternalReference source)
void TestCodeIsMarkedForDeoptimization(Register code, Register scratch)
void JumpIfIsInRange(Register value, Register scratch, unsigned lower_limit, unsigned higher_limit, Label *on_in_range)
void Drop(int count, Condition cond=al)
void ShlPair_cl(Register high, Register low)
void mov(Register rd, Register rj)
void CmpInstanceTypeRange(Register map, Register instance_type_out, Register scratch, InstanceType lower_limit, InstanceType higher_limit)
void SmiUntag(Register reg, SBit s=LeaveCC)
void AssertFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void AssertNotSmi(Register object, AbortReason reason=AbortReason::kOperandIsASmi) NOOP_UNLESS_DEBUG_CODE
void CompareStackLimit(Register with, StackLimitKind kind)
void AssertGeneratorObject(Register object) NOOP_UNLESS_DEBUG_CODE
void SarPair(Register high, Register low, uint8_t imm8)
void CompareRoot(Register obj, RootIndex index)
void Move(Register dst, Tagged< Smi > smi)
void Assert(Condition cond, AbortReason reason) NOOP_UNLESS_DEBUG_CODE
void StackOverflowCheck(Register num_args, Register scratch, Label *stack_overflow)
void AssertFeedbackVector(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void CallBuiltinByIndex(Register builtin_index, Register target)
void LoadRootRelative(Register destination, int32_t offset) final
void JumpIfSmi(Register value, Label *smi_label)
void CallCodeObject(Register code_object)
void AssertUnreachable(AbortReason reason) NOOP_UNLESS_DEBUG_CODE
void LoadRootRegisterOffset(Register destination, intptr_t offset) final
void LoadCodeInstructionStart(Register destination, Register code_object, CodeEntrypointTag tag=kDefaultCodeEntrypointTag)
void LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing(Register flags, Register feedback_vector, CodeKind current_code_kind, Label *flags_need_processing)
void LoadFeedbackVector(Register dst, Register closure, Register scratch, Label *fbv_undef)
void EmitIncrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void near_jump(int offset, RelocInfo::Mode rmode)
void InvokeFunctionCode(Register function, Register new_target, Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
int RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg) const
void InvokePrologue(Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void SmiTag(Register reg, SBit s=LeaveCC)
void Cvttsd2ui(Register dst, XMMRegister src, XMMRegister tmp)
void PushArray(Register array, Register size, Register scratch, PushArrayOrder order=PushArrayOrder::kNormal)
void EnterExitFrame(Register scratch, int stack_space, StackFrame::Type frame_type)
void RecordWriteField(Register object, int offset, Register value, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, SmiCheck smi_check=SmiCheck::kInline)
MemOperand ExternalReferenceAsOperand(ExternalReference reference, Register scratch)
void CmpObjectType(Register heap_object, InstanceType type, Register map)
void AssertSmi(Register object, AbortReason reason=AbortReason::kOperandIsNotASmi) NOOP_UNLESS_DEBUG_CODE
void Cvtui2sd(XMMRegister dst, Register src, Register scratch)
void SarPair_cl(Register high, Register low)
void ShlPair(Register high, Register low, uint8_t imm8)
int LeaveFrame(StackFrame::Type type)
int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg)
void LoadGlobalProxy(Register dst)
void ShrPair(Register high, Register low, uint8_t imm8)
void JumpToExternalReference(const ExternalReference &builtin, bool builtin_exit_frame=false)
void Cvtsi2sd(XMMRegister dst, Register src)
Operand RootAsOperand(RootIndex index)
void Jump(Register target, Condition cond=al)
void LoadRoot(Register destination, RootIndex index) final
void RecordWrite(Register object, Operand offset, Register value, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, SmiCheck smi_check=SmiCheck::kInline)
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg=false)
void InvokeFunction(Register function, Register expected_parameter_count, Register actual_parameter_count, InvokeType type)
void Lzcnt(Register dst, Register src)
void CompareRange(Register value, Register scratch, unsigned lower_limit, unsigned higher_limit)
void JumpCodeObject(Register code_object, JumpMode jump_mode=JumpMode::kJump)
void EmitDecrementCounter(StatsCounter *counter, int value, Register scratch1, Register scratch2)
void LoadFromConstantsTable(Register destination, int constant_index) final
MemOperand EntryFromBuiltinAsOperand(Builtin builtin)
void CmpInstanceType(Register map, InstanceType type)
void MaybeSaveRegisters(RegList registers)
void CheckPageFlag(Register object, int mask, Condition cc, Label *condition_met)
int CallCFunction(ExternalReference function, int num_arguments, SetIsolateDataSlots set_isolate_data_slots=SetIsolateDataSlots::kYes, Label *return_label=nullptr)
void JumpJSFunction(Register function_object, JumpMode jump_mode=JumpMode::kJump)
void AssertConstructor(Register object) NOOP_UNLESS_DEBUG_CODE
void CallRuntime(const Runtime::Function *f, int num_arguments)
void LoadWeakValue(Register out, Register in, Label *target_if_cleared)
void CallBuiltin(Builtin builtin, Condition cond=al)
void GenerateTailCallToReturnedCode(Runtime::FunctionId function_id)
void AssertJSAny(Register object, Register map_tmp, Register tmp, AbortReason abort_reason) NOOP_UNLESS_DEBUG_CODE
void Cvtsi2ss(XMMRegister dst, Register src)
void CallEphemeronKeyBarrier(Register object, Operand offset, SaveFPRegsMode fp_mode)
Operand StackLimitAsOperand(StackLimitKind kind)
void Check(Condition cond, AbortReason reason)
void AssertFeedbackCell(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void CallForDeoptimization(Builtin target, int deopt_id, Label *exit, DeoptimizeKind kind, Label *ret, Label *jump_deoptimization_entry_label)
void DropArgumentsAndPushNewReceiver(Register argc, Register receiver)
void AllocateStackSpace(Register bytes)
void LoadEntryFromBuiltinIndex(Register builtin_index, Register target)
void Popcnt(Register dst, Register src)
void ShrPair_cl(Register high, Register low)
void ReplaceClosureCodeWithOptimizedCode(Register optimized_code, Register closure)
void AssertBoundFunction(Register object) NOOP_UNLESS_DEBUG_CODE
void CallRecordWriteStubSaveRegisters(Register object, Operand offset, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void PinsrdPreSse41(XMMRegister dst, Register src, uint8_t imm8, uint32_t *load_pc_offset)
void OptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, Register feedback_vector)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers=0, Register scratch=no_reg)
void MaybeRestoreRegisters(RegList registers)
void CallRecordWriteStub(Register object, Register slot_address, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void MemoryChunkHeaderFromObject(Register object, Register header)
int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1=no_reg, Register exclusion2=no_reg, Register exclusion3=no_reg)
void AssertUndefinedOrAllocationSite(Register object, Register scratch) NOOP_UNLESS_DEBUG_CODE
void SmiCompare(Register smi1, Register smi2)
void Cvtui2ss(XMMRegister dst, Register src, Register tmp)
void StubPrologue(StackFrame::Type type)
void StoreRootRelative(int32_t offset, Register value) final
void LoadMap(Register destination, Register object)
void TailCallRuntime(Runtime::FunctionId fid)
void LoadNativeContextSlot(Register dst, int index)
Operand HeapObjectAsOperand(Handle< HeapObject > object)
void TailCallBuiltin(Builtin builtin, Condition cond=al)
void Cvttss2ui(Register dst, XMMRegister src, XMMRegister tmp)
void AssertCallableFunction(Register object) NOOP_UNLESS_DEBUG_CODE
static constexpr MainThreadFlags kPointersToHereAreInterestingMask
static constexpr intptr_t FlagsOffset()
static constexpr MainThreadFlags kPointersFromHereAreInterestingMask
static constexpr intptr_t GetAlignmentMaskForAssembler()
constexpr unsigned Count() const
static constexpr bool IsCodeTarget(Mode mode)
Definition reloc-info.h:196
static constexpr bool IsImmortalImmovable(RootIndex root_index)
Definition roots.h:616
static V8_EXPORT_PRIVATE const Function * FunctionForId(FunctionId id)
Definition runtime.cc:350
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
Operand GetArgumentOperand(int index) const
static constexpr int32_t TypeToMarker(Type type)
Definition frames.h:196
static bool IsJavaScript(Type t)
Definition frames.h:284
static constexpr RegList ComputeSavedRegisters(Register object, Register slot_address=no_reg)
static constexpr Builtin GetRecordWriteBuiltin(SaveFPRegsMode fp_mode)
#define ASM_CODE_COMMENT_STRING(asm,...)
Definition assembler.h:618
#define ASM_CODE_COMMENT(asm)
Definition assembler.h:617
#define V8_ENABLE_SANDBOX_BOOL
Definition globals.h:160
LineAndColumn current
DirectHandle< Object > new_target
Definition execution.cc:75
int32_t offset
TNode< Object > receiver
LiftoffRegister reg
Register tmp
uint32_t const mask
SmiCheck
InvokeType
SetIsolateDataSlots
JumpMode
@ kPushAndReturn
RegListBase< RegisterT > registers
InstructionOperand destination
int int32_t
Definition unicode.cc:40
constexpr unsigned CountTrailingZeros64(uint64_t value)
Definition bits.h:164
constexpr unsigned CountTrailingZeros32(uint32_t value)
Definition bits.h:161
constexpr unsigned CountLeadingZeros64(uint64_t value)
Definition bits.h:125
constexpr unsigned CountPopulation(T value)
Definition bits.h:26
constexpr bool IsPowerOfTwo(T value)
Definition bits.h:187
constexpr unsigned CountLeadingZeros32(uint32_t value)
Definition bits.h:122
V8_INLINE Dest bit_cast(Source const &source)
Definition macros.h:95
auto Reversed(T &t)
Definition iterator.h:105
uint32_t WasmInterpreterRuntime int64_t r0
constexpr Register no_reg
constexpr Register kRootRegister
constexpr int kTaggedSize
Definition globals.h:542
constexpr Register kRuntimeCallFunctionRegister
constexpr int kBitsPerByte
Definition globals.h:682
const int kSmiTagSize
Definition v8-internal.h:87
RegListBase< Register > RegList
Definition reglist-arm.h:14
constexpr bool CodeKindCanTierUp(CodeKind kind)
Definition code-kind.h:95
constexpr Register kJavaScriptCallTargetRegister
constexpr int kPCOnStackSize
Definition globals.h:412
Operand FieldOperand(Register object, int offset)
const Address kWeakHeapObjectMask
Definition globals.h:967
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:665
constexpr Register kJavaScriptCallArgCountRegister
Flag flags[]
Definition flags.cc:3797
constexpr int L
constexpr int kSystemPointerSize
Definition globals.h:410
const RegList kCallerSaved
Definition reglist-arm.h:42
const char * GetAbortReason(AbortReason reason)
static constexpr int kMaxCParameters
constexpr uint32_t kZapValue
Definition globals.h:1005
@ LAST_CALLABLE_JS_FUNCTION_TYPE
@ FIRST_CALLABLE_JS_FUNCTION_TYPE
constexpr Register kWasmImplicitArgRegister
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister &reg1, const CPURegister &reg2, const CPURegister &reg3=NoReg, const CPURegister &reg4=NoReg, const CPURegister &reg5=NoReg, const CPURegister &reg6=NoReg, const CPURegister &reg7=NoReg, const CPURegister &reg8=NoReg)
constexpr Register kRuntimeCallArgCountRegister
Tagged< ClearedWeakValue > ClearedValue(PtrComprCageBase cage_base)
const int kSmiShiftSize
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:669
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr int kUInt32Size
Definition globals.h:403
constexpr Register kJavaScriptCallCodeStartRegister
constexpr int kJSDispatchTableEntrySizeLog2
Definition globals.h:562
const intptr_t kSmiTagMask
Definition v8-internal.h:88
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
const int kSmiTag
Definition v8-internal.h:86
@ times_half_system_pointer_size
constexpr int kDoubleSize
Definition globals.h:407
static constexpr DoubleRegList kAllocatableDoubleRegisters
Definition reglist.h:43
const uint32_t kClearedWeakHeapObjectLower32
Definition globals.h:981
constexpr Register kJavaScriptCallNewTargetRegister
constexpr Register kJSFunctionRegister
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
@ kStart
Local< T > Handle
uint32_t test
#define shr(value, bits)
Definition sha-256.cc:31
#define FATAL(...)
Definition logging.h:47
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
constexpr bool IsAligned(T value, U alignment)
Definition macros.h:403