v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
maglev-assembler-x64-inl.h
Go to the documentation of this file.
1// Copyright 2022 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_MAGLEV_X64_MAGLEV_ASSEMBLER_X64_INL_H_
6#define V8_MAGLEV_X64_MAGLEV_ASSEMBLER_X64_INL_H_
7
8#include <tuple>
9#include <type_traits>
10#include <utility>
11
15#include "src/common/globals.h"
20
21namespace v8 {
22namespace internal {
23namespace maglev {
24
25constexpr Condition ConditionForFloat64(Operation operation) {
26 switch (operation) {
27 case Operation::kEqual:
28 case Operation::kStrictEqual:
29 return equal;
30 case Operation::kLessThan:
31 return below;
32 case Operation::kLessThanOrEqual:
33 return below_equal;
34 case Operation::kGreaterThan:
35 return above;
36 case Operation::kGreaterThanOrEqual:
37 return above_equal;
38 default:
40 }
41}
42
43constexpr Condition ConditionForNaN() { return parity_even; }
44
46 switch (n) {
47 case 1:
48 return times_1;
49 case 2:
50 return times_2;
51 case 4:
52 return times_4;
53 case 8:
54 return times_8;
55 default:
57 }
58}
59
61 : public TemporaryRegisterScopeBase<TemporaryRegisterScope> {
63
64 public:
65 struct SavedData : public Base::SavedData {
68 };
69
77 const SavedData& saved_data)
78 : Base(masm, saved_data),
81
96
104
109
110 private:
113};
114
116 size_t map_count)
117 : masm_(masm), object_(object), map_count_(map_count) {
118 if (map_count_ != 1) {
121 }
122}
123
124void MapCompare::Generate(Handle<Map> map, Condition cond, Label* if_true,
125 Label::Distance distance) {
126 if (map_count_ == 1) {
128 masm_->JumpIf(cond, if_true, distance);
129 } else {
130 masm_->CompareTaggedAndJumpIf(map_, map, cond, if_true, distance);
131 }
132}
133
135 if (map_count_ == 1) {
137 // Load the map; the object is in register_for_map_compare_. This
138 // avoids loading the map in the fast path of CheckMapsWithMigration.
139 masm_->LoadMap(kScratchRegister, object_);
140 return kScratchRegister;
141 } else {
143 return map_;
144 }
145}
146
147int MapCompare::TemporaryCount(size_t map_count) {
148 return map_count == 1 ? 0 : 1;
149}
150
151namespace detail {
152
153template <typename... Args>
155
156template <>
158 static void Push(MaglevAssembler* masm) {}
159 static void PushReverse(MaglevAssembler* masm) {}
160};
161
162inline void PushInput(MaglevAssembler* masm, const Input& input) {
163 if (input.operand().IsConstant()) {
164 input.node()->LoadToRegister(masm, kScratchRegister);
165 masm->Push(kScratchRegister);
166 } else {
167 // TODO(leszeks): Consider special casing the value. (Toon: could possibly
168 // be done through Input directly?)
169 const compiler::AllocatedOperand& operand =
170 compiler::AllocatedOperand::cast(input.operand());
171
172 if (operand.IsRegister()) {
173 masm->Push(operand.GetRegister());
174 } else {
175 DCHECK(operand.IsStackSlot());
176 masm->Push(masm->GetStackSlot(operand));
177 }
178 }
179}
180
181template <typename T, typename... Args>
182inline void PushIterator(MaglevAssembler* masm, base::iterator_range<T> range,
183 Args... args) {
184 for (auto iter = range.begin(), end = range.end(); iter != end; ++iter) {
185 masm->Push(*iter);
186 }
187 PushAllHelper<Args...>::Push(masm, args...);
188}
189
190template <typename T, typename... Args>
191inline void PushIteratorReverse(MaglevAssembler* masm,
192 base::iterator_range<T> range, Args... args) {
193 PushAllHelper<Args...>::PushReverse(masm, args...);
194 for (auto iter = range.rbegin(), end = range.rend(); iter != end; ++iter) {
195 masm->Push(*iter);
196 }
197}
198
199template <typename... Args>
200struct PushAllHelper<Input, Args...> {
201 static void Push(MaglevAssembler* masm, const Input& arg, Args... args) {
202 PushInput(masm, arg);
204 }
205 static void PushReverse(MaglevAssembler* masm, const Input& arg,
206 Args... args) {
208 PushInput(masm, arg);
209 }
210};
211template <typename Arg, typename... Args>
212struct PushAllHelper<Arg, Args...> {
213 static void Push(MaglevAssembler* masm, Arg arg, Args... args) {
214 if constexpr (is_iterator_range<Arg>::value) {
215 PushIterator(masm, arg, args...);
216 } else {
217 masm->MacroAssembler::Push(arg);
219 }
220 }
221 static void PushReverse(MaglevAssembler* masm, Arg arg, Args... args) {
222 if constexpr (is_iterator_range<Arg>::value) {
223 PushIteratorReverse(masm, arg, args...);
224 } else {
226 masm->Push(arg);
227 }
228 }
229};
230
231} // namespace detail
232
233template <typename... T>
234void MaglevAssembler::Push(T... vals) {
235 detail::PushAllHelper<T...>::Push(this, vals...);
236}
237
238template <typename... T>
239void MaglevAssembler::PushReverse(T... vals) {
240 detail::PushAllHelper<T...>::PushReverse(this, vals...);
241}
242
243inline void MaglevAssembler::BindJumpTarget(Label* label) {
245}
246
247inline void MaglevAssembler::BindBlock(BasicBlock* block) {
248 bind(block->label());
249}
250
251inline void MaglevAssembler::SmiTagInt32AndSetFlags(Register dst,
252 Register src) {
253 Move(dst, src);
254 if (SmiValuesAre31Bits()) {
255 addl(dst, dst);
256 } else {
257 SmiTag(dst);
258 }
259}
260
261inline void MaglevAssembler::CheckInt32IsSmi(Register obj, Label* fail,
262 Register scratch) {
264
265 if (scratch == Register::no_reg()) {
266 scratch = kScratchRegister;
267 }
268 movl(scratch, obj);
269 addl(scratch, scratch);
270 JumpIf(kOverflow, fail);
271}
272
273inline void MaglevAssembler::SmiAddConstant(Register dst, Register src,
274 int value, Label* fail,
275 Label::Distance distance) {
276 AssertSmi(src);
277 Move(dst, src);
278 if (value != 0) {
279 if (SmiValuesAre31Bits()) {
280 addl(dst, Immediate(Smi::FromInt(value)));
281 } else {
284 addq(dst, kScratchRegister);
285 }
286 JumpIf(kOverflow, fail, distance);
287 }
288}
289
290inline void MaglevAssembler::SmiSubConstant(Register dst, Register src,
291 int value, Label* fail,
292 Label::Distance distance) {
293 AssertSmi(src);
294 Move(dst, src);
295 if (value != 0) {
296 if (SmiValuesAre31Bits()) {
297 subl(dst, Immediate(Smi::FromInt(value)));
298 } else {
301 subq(dst, kScratchRegister);
302 }
303 JumpIf(kOverflow, fail, distance);
304 }
305}
306
307inline void MaglevAssembler::MoveHeapNumber(Register dst, double value) {
308 movq_heap_number(dst, value);
309}
310
312 RootIndex root_index) {
313 if (input.operand().IsRegister()) {
314 CompareRoot(ToRegister(input), root_index);
315 } else {
316 DCHECK(input.operand().IsStackSlot());
317 CompareRoot(ToMemOperand(input), root_index);
318 }
319 return equal;
320}
321
322inline Register MaglevAssembler::GetFramePointer() { return rbp; }
323
325 const compiler::AllocatedOperand& operand) {
326 return MemOperand(rbp, GetFramePointerOffsetForStackSlot(operand));
327}
328
330 const compiler::InstructionOperand& operand) {
332}
333
334inline MemOperand MaglevAssembler::ToMemOperand(const ValueLocation& location) {
335 return ToMemOperand(location.operand());
336}
337
338inline void MaglevAssembler::BuildTypedArrayDataPointer(Register data_pointer,
339 Register object) {
340 DCHECK_NE(data_pointer, object);
342 data_pointer, FieldOperand(object, JSTypedArray::kExternalPointerOffset));
343 if (JSTypedArray::kMaxSizeInHeap == 0) return;
344
347 movl(base, FieldOperand(object, JSTypedArray::kBasePointerOffset));
348 } else {
349 movq(base, FieldOperand(object, JSTypedArray::kBasePointerOffset));
350 }
351 addq(data_pointer, base);
352}
353
355 Register data_pointer, Register index, int element_size) {
356 return Operand(data_pointer, index, ScaleFactorFromInt(element_size), 0);
357}
358
359inline MemOperand MaglevAssembler::DataViewElementOperand(Register data_pointer,
360 Register index) {
361 return Operand(data_pointer, index, times_1, 0);
362}
363
365 Register object,
366 Register index, int scale,
367 int offset) {
370}
371
373 Register object,
374 int offset) {
375 movq(result, FieldOperand(object, offset));
376#ifdef V8_ENABLE_SANDBOX
377 shrq(result, Immediate(kBoundedSizeShift));
378#endif // V8_ENABLE_SANDBOX
379}
380
382 Operand operand) {
383#ifdef V8_ENABLE_SANDBOX
385#else
386 movq(result, operand);
387#endif
388}
389
390void MaglevAssembler::LoadFixedArrayElement(Register result, Register array,
391 Register index) {
392 if (v8_flags.debug_code) {
393 AssertObjectType(array, FIXED_ARRAY_TYPE, AbortReason::kUnexpectedValue);
395 AbortReason::kUnexpectedNegativeValue);
396 }
398 OFFSET_OF_DATA_START(FixedArray));
399}
400
402 Register result, Register object, int offset) {
404 result, FieldMemOperand(object, offset));
405}
406
408 Register result, Register array, Register index) {
409 if (v8_flags.debug_code) {
410 AssertObjectType(array, FIXED_ARRAY_TYPE, AbortReason::kUnexpectedValue);
412 AbortReason::kUnexpectedNegativeValue);
413 }
416 OFFSET_OF_DATA_START(FixedArray)));
417}
418
420 Register array,
421 Register index) {
422 if (v8_flags.debug_code) {
423 AssertObjectType(array, FIXED_DOUBLE_ARRAY_TYPE,
424 AbortReason::kUnexpectedValue);
426 AbortReason::kUnexpectedNegativeValue);
427 }
428 Movsd(result, FieldOperand(array, index, times_8,
429 OFFSET_OF_DATA_START(FixedDoubleArray)));
430}
431
433 Register array, Register index, DoubleRegister value) {
434 Movsd(FieldOperand(array, index, times_8,
435 OFFSET_OF_DATA_START(FixedDoubleArray)),
436 value);
437}
438
439inline void MaglevAssembler::LoadSignedField(Register result, Operand operand,
440 int size) {
441 if (size == 1) {
442 movsxbl(result, operand);
443 } else if (size == 2) {
444 movsxwl(result, operand);
445 } else {
446 DCHECK_EQ(size, 4);
447 movl(result, operand);
448 }
449}
450
451inline void MaglevAssembler::LoadUnsignedField(Register result, Operand operand,
452 int size) {
453 if (size == 1) {
454 movzxbl(result, operand);
455 } else if (size == 2) {
456 movzxwl(result, operand);
457 } else {
458 DCHECK_EQ(size, 4);
459 movl(result, operand);
460 }
461}
462
463inline void MaglevAssembler::SetSlotAddressForTaggedField(Register slot_reg,
464 Register object,
465 int offset) {
466 leaq(slot_reg, FieldOperand(object, offset));
467}
469 Register slot_reg, Register object, Register index) {
470 leaq(slot_reg, FieldOperand(object, index, times_tagged_size,
471 OFFSET_OF_DATA_START(FixedArray)));
472}
473
474inline void MaglevAssembler::StoreTaggedFieldNoWriteBarrier(Register object,
475 int offset,
476 Register value) {
478}
479
481 Register array, Register index, Register value) {
483 FieldOperand(array, index, times_tagged_size,
484 OFFSET_OF_DATA_START(FixedArray)),
485 value);
486}
487
488inline void MaglevAssembler::StoreTaggedSignedField(Register object, int offset,
489 Register value) {
490 AssertSmi(value);
492}
493
494inline void MaglevAssembler::StoreTaggedSignedField(Register object, int offset,
495 Tagged<Smi> value) {
497}
498
499inline void MaglevAssembler::StoreInt32Field(Register object, int offset,
500 int32_t value) {
501 movl(FieldOperand(object, offset), Immediate(value));
502}
503
504inline void MaglevAssembler::StoreField(Operand operand, Register value,
505 int size) {
506 DCHECK(size == 1 || size == 2 || size == 4);
507 if (size == 1) {
508 movb(operand, value);
509 } else if (size == 2) {
510 movw(operand, value);
511 } else {
512 DCHECK_EQ(size, 4);
513 movl(operand, value);
514 }
515}
516
517#ifdef V8_ENABLE_SANDBOX
518
519inline void MaglevAssembler::StoreTrustedPointerFieldNoWriteBarrier(
520 Register object, int offset, Register value) {
522}
523
524#endif // V8_ENABLE_SANDBOX
525
526inline void MaglevAssembler::ReverseByteOrder(Register value, int size) {
527 if (size == 2) {
528 bswapl(value);
529 sarl(value, Immediate(16));
530 } else if (size == 4) {
531 bswapl(value);
532 } else {
533 DCHECK_EQ(size, 1);
534 }
535}
536
537inline MemOperand MaglevAssembler::StackSlotOperand(StackSlot stack_slot) {
538 return MemOperand(rbp, stack_slot.index);
539}
540
541inline void MaglevAssembler::IncrementInt32(Register reg) { incl(reg); }
542
543inline void MaglevAssembler::DecrementInt32(Register reg) { decl(reg); }
544
545inline void MaglevAssembler::AddInt32(Register reg, int amount) {
546 addl(reg, Immediate(amount));
547}
548
549inline void MaglevAssembler::AndInt32(Register reg, int mask) {
550 andl(reg, Immediate(mask));
551}
552
553inline void MaglevAssembler::OrInt32(Register reg, int mask) {
554 orl(reg, Immediate(mask));
555}
556
557inline void MaglevAssembler::AndInt32(Register reg, Register other) {
558 andl(reg, other);
559}
560
561inline void MaglevAssembler::OrInt32(Register reg, Register other) {
562 orl(reg, other);
563}
564
565inline void MaglevAssembler::ShiftLeft(Register reg, int amount) {
566 shll(reg, Immediate(amount));
567}
568
569inline void MaglevAssembler::IncrementAddress(Register reg, int32_t delta) {
570 leaq(reg, MemOperand(reg, delta));
571}
572
573inline void MaglevAssembler::LoadAddress(Register dst, MemOperand location) {
574 leaq(dst, location);
575}
576
577inline void MaglevAssembler::Call(Label* target) { call(target); }
578
579inline void MaglevAssembler::EmitEnterExitFrame(int extra_slots,
580 StackFrame::Type frame_type,
581 Register c_function,
582 Register scratch) {
583 EnterExitFrame(extra_slots, frame_type, c_function);
584}
585
586inline void MaglevAssembler::Move(StackSlot dst, Register src) {
587 movq(StackSlotOperand(dst), src);
588}
589
590inline void MaglevAssembler::Move(StackSlot dst, DoubleRegister src) {
591 Movsd(StackSlotOperand(dst), src);
592}
593
594inline void MaglevAssembler::Move(Register dst, StackSlot src) {
595 movq(dst, StackSlotOperand(src));
596}
597
598inline void MaglevAssembler::Move(DoubleRegister dst, StackSlot src) {
599 Movsd(dst, StackSlotOperand(src));
600}
601
602inline void MaglevAssembler::Move(MemOperand dst, Register src) {
603 movq(dst, src);
604}
605
606inline void MaglevAssembler::Move(Register dst, Tagged<TaggedIndex> i) {
608}
609
611 MacroAssembler::Move(dst, src);
612}
613
614inline void MaglevAssembler::Move(Register dst, Tagged<Smi> src) {
615 MacroAssembler::Move(dst, src);
616}
617
618inline void MaglevAssembler::Move(Register dst, ExternalReference src) {
619 MacroAssembler::Move(dst, src);
620}
621
622inline void MaglevAssembler::Move(Register dst, MemOperand src) {
623 MacroAssembler::Move(dst, src);
624}
625
626inline void MaglevAssembler::Move(Register dst, Register src) {
627 MacroAssembler::Move(dst, src);
628}
629
630inline void MaglevAssembler::Move(Register dst, int32_t i) {
631 // Move as a uint32 to avoid sign extension.
632 MacroAssembler::Move(dst, static_cast<uint32_t>(i));
633}
634
635inline void MaglevAssembler::Move(Register dst, uint32_t i) {
636 // Move as a uint32 to avoid sign extension.
638}
639
640inline void MaglevAssembler::Move(Register dst, IndirectPointerTag i) {
642}
643
644inline void MaglevAssembler::Move(DoubleRegister dst, double n) {
645 MacroAssembler::Move(dst, n);
646}
647
648inline void MaglevAssembler::Move(DoubleRegister dst, Float64 n) {
649 MacroAssembler::Move(dst, n.get_bits());
650}
651
652inline void MaglevAssembler::Move(Register dst, Handle<HeapObject> obj) {
653 MacroAssembler::Move(dst, obj);
654}
655
656void MaglevAssembler::MoveTagged(Register dst, Handle<HeapObject> obj) {
657#ifdef V8_COMPRESS_POINTERS
659#else
660 MacroAssembler::Move(dst, obj);
661#endif
662}
663
664inline void MaglevAssembler::LoadInt32(Register dst, MemOperand src) {
665 movl(dst, src);
666}
667
668inline void MaglevAssembler::StoreInt32(MemOperand dst, Register src) {
669 movl(dst, src);
670}
671
673 Movss(dst, src);
674 Cvtss2sd(dst, dst);
675}
678 Movss(dst, kScratchDoubleReg);
679}
681 Movsd(dst, src);
682}
684 Movsd(dst, src);
685}
686
688 Register base,
689 Register index) {
690 LoadFloat64(dst, Operand(base, index, times_1, 0));
691}
693 DoubleRegister dst, Register base, Register index) {
694 movq(kScratchRegister, Operand(base, index, times_1, 0));
697}
698inline void MaglevAssembler::StoreUnalignedFloat64(Register base,
699 Register index,
700 DoubleRegister src) {
701 StoreFloat64(Operand(base, index, times_1, 0), src);
702}
704 Register base, Register index, DoubleRegister src) {
707 movq(Operand(base, index, times_1, 0), kScratchRegister);
708}
709
710inline void MaglevAssembler::SignExtend32To64Bits(Register dst, Register src) {
711 movsxlq(dst, src);
712}
713inline void MaglevAssembler::NegateInt32(Register val) { negl(val); }
714
715inline void MaglevAssembler::ToUint8Clamped(Register result,
716 DoubleRegister value, Label* min,
717 Label* max, Label* done) {
720 Ucomisd(kScratchDoubleReg, value);
721 // Set to 0 if NaN.
722 j(parity_even, min);
723 j(above_equal, min);
724 Move(kScratchDoubleReg, 255.0);
725 Ucomisd(value, kScratchDoubleReg);
726 j(above_equal, max);
727 // if value in [0, 255], then round up to the nearest.
728 Roundsd(kScratchDoubleReg, value, kRoundToNearest);
730 jmp(done);
731}
732
733template <typename NodeT>
734inline void MaglevAssembler::DeoptIfBufferDetached(Register array,
735 Register scratch,
736 NodeT* node) {
737 // A detached buffer leads to megamorphic feedback, so we won't have a deopt
738 // loop if we deopt here.
739 LoadTaggedField(scratch,
740 FieldOperand(array, JSArrayBufferView::kBufferOffset));
741 LoadTaggedField(scratch,
742 FieldOperand(scratch, JSArrayBuffer::kBitFieldOffset));
743 testl(scratch, Immediate(JSArrayBuffer::WasDetachedBit::kMask));
744 EmitEagerDeoptIf(not_zero, DeoptimizeReason::kArrayBufferWasDetached, node);
745}
746
747inline void MaglevAssembler::LoadByte(Register dst, MemOperand src) {
748 movzxbl(dst, src);
749}
750
752 Register map, Register scratch) {
753 movb(scratch, FieldOperand(map, Map::kBitFieldOffset));
754 andl(scratch, Immediate(Map::Bits1::IsUndetectableBit::kMask |
755 Map::Bits1::IsCallableBit::kMask));
756 cmpl(scratch, Immediate(Map::Bits1::IsCallableBit::kMask));
757 return kEqual;
758}
759
761 Register map, Register scratch) {
762 testb(FieldOperand(map, Map::kBitFieldOffset),
763 Immediate(Map::Bits1::IsUndetectableBit::kMask |
764 Map::Bits1::IsCallableBit::kMask));
765 return kEqual;
766}
767
768inline void MaglevAssembler::LoadInstanceType(Register instance_type,
769 Register heap_object) {
770 LoadMap(instance_type, heap_object);
771 movzxwl(instance_type, FieldOperand(instance_type, Map::kInstanceTypeOffset));
772}
773
774inline void MaglevAssembler::JumpIfObjectType(Register heap_object,
775 InstanceType type, Label* target,
776 Label::Distance distance) {
777 IsObjectType(heap_object, type, kScratchRegister);
778 JumpIf(kEqual, target, distance);
779}
780
781inline void MaglevAssembler::JumpIfNotObjectType(Register heap_object,
782 InstanceType type,
783 Label* target,
784 Label::Distance distance) {
785 IsObjectType(heap_object, type, kScratchRegister);
786 JumpIf(kNotEqual, target, distance);
787}
788
789inline void MaglevAssembler::AssertObjectType(Register heap_object,
790 InstanceType type,
791 AbortReason reason) {
792 AssertNotSmi(heap_object);
793 IsObjectType(heap_object, type, kScratchRegister);
794 Assert(kEqual, reason);
795}
796
798 Register heap_object, InstanceType type, Label* if_true,
799 Label::Distance true_distance, bool fallthrough_when_true, Label* if_false,
800 Label::Distance false_distance, bool fallthrough_when_false) {
801 IsObjectType(heap_object, type, kScratchRegister);
802 Branch(kEqual, if_true, true_distance, fallthrough_when_true, if_false,
803 false_distance, fallthrough_when_false);
804}
805
806inline void MaglevAssembler::JumpIfObjectTypeInRange(Register heap_object,
807 InstanceType lower_limit,
808 InstanceType higher_limit,
809 Label* target,
810 Label::Distance distance) {
811 IsObjectTypeInRange(heap_object, lower_limit, higher_limit, kScratchRegister);
812 JumpIf(kUnsignedLessThanEqual, target, distance);
813}
814
816 Register heap_object, InstanceType lower_limit, InstanceType higher_limit,
817 Label* target, Label::Distance distance) {
818 IsObjectTypeInRange(heap_object, lower_limit, higher_limit, kScratchRegister);
819 JumpIf(kUnsignedGreaterThan, target, distance);
820}
821
822inline void MaglevAssembler::AssertObjectTypeInRange(Register heap_object,
823 InstanceType lower_limit,
824 InstanceType higher_limit,
825 AbortReason reason) {
826 AssertNotSmi(heap_object);
827 IsObjectTypeInRange(heap_object, lower_limit, higher_limit, kScratchRegister);
829}
830
832 Register heap_object, InstanceType lower_limit, InstanceType higher_limit,
833 Label* if_true, Label::Distance true_distance, bool fallthrough_when_true,
834 Label* if_false, Label::Distance false_distance,
835 bool fallthrough_when_false) {
836 IsObjectTypeInRange(heap_object, lower_limit, higher_limit, kScratchRegister);
837 Branch(kUnsignedLessThanEqual, if_true, true_distance, fallthrough_when_true,
838 if_false, false_distance, fallthrough_when_false);
839}
840
842 Register heap_object, Label* target, Label::Distance distance) {
844 target, distance);
845}
846
847#if V8_STATIC_ROOTS_BOOL
848inline void MaglevAssembler::JumpIfObjectInRange(Register heap_object,
849 Tagged_t lower_limit,
850 Tagged_t higher_limit,
851 Label* target,
852 Label::Distance distance) {
853 // Only allowed for comparisons against RORoots.
854 DCHECK_LE(lower_limit, StaticReadOnlyRoot::kLastAllocatedRoot);
855 DCHECK_LE(higher_limit, StaticReadOnlyRoot::kLastAllocatedRoot);
856 AssertNotSmi(heap_object);
857 CompareRange(heap_object, lower_limit, higher_limit);
858 JumpIf(kUnsignedLessThanEqual, target, distance);
859}
860
861inline void MaglevAssembler::JumpIfObjectNotInRange(Register heap_object,
862 Tagged_t lower_limit,
863 Tagged_t higher_limit,
864 Label* target,
865 Label::Distance distance) {
866 // Only allowed for comparisons against RORoots.
867 DCHECK_LE(lower_limit, StaticReadOnlyRoot::kLastAllocatedRoot);
868 DCHECK_LE(higher_limit, StaticReadOnlyRoot::kLastAllocatedRoot);
869 AssertNotSmi(heap_object);
870 CompareRange(heap_object, lower_limit, higher_limit);
871 JumpIf(kUnsignedGreaterThan, target, distance);
872}
873
874inline void MaglevAssembler::AssertObjectInRange(Register heap_object,
875 Tagged_t lower_limit,
876 Tagged_t higher_limit,
877 AbortReason reason) {
878 // Only allowed for comparisons against RORoots.
879 DCHECK_LE(lower_limit, StaticReadOnlyRoot::kLastAllocatedRoot);
880 DCHECK_LE(higher_limit, StaticReadOnlyRoot::kLastAllocatedRoot);
881 AssertNotSmi(heap_object);
882 CompareRange(heap_object, lower_limit, higher_limit);
884}
885#endif
886
887inline void MaglevAssembler::CompareMapWithRoot(Register object,
888 RootIndex index,
889 Register scratch) {
890 if (CanBeImmediate(index)) {
891 cmp_tagged(FieldOperand(object, HeapObject::kMapOffset),
892 Immediate(static_cast<uint32_t>(ReadOnlyRootPtr(index))));
893 return;
894 }
895 LoadMap(scratch, object);
896 CompareRoot(scratch, index);
897}
898
899inline void MaglevAssembler::CompareInstanceType(Register map,
900 InstanceType instance_type) {
901 CmpInstanceType(map, instance_type);
902}
903
905 Register map, Register instance_type_out, InstanceType lower_limit,
906 InstanceType higher_limit) {
907 CmpInstanceTypeRange(map, instance_type_out, lower_limit, higher_limit);
909}
910
912 DoubleRegister src1, DoubleRegister src2, Condition cond, Label* target,
913 Label* nan_failed, Label::Distance distance) {
914 Ucomisd(src1, src2);
915 // Ucomisd sets these flags accordingly:
916 // UNORDERED(one of the operands is a NaN): ZF,PF,CF := 111;
917 // GREATER_THAN: ZF,PF,CF := 000;
918 // LESS_THAN: ZF,PF,CF := 001;
919 // EQUAL: ZF,PF,CF := 100;
920 // Since ZF can be set by NaN or EQUAL, we check for NaN first.
921 JumpIf(ConditionForNaN(), nan_failed);
922 JumpIf(cond, target, distance);
923}
924
926 DoubleRegister src1, DoubleRegister src2, Condition cond,
927 BasicBlock* if_true, BasicBlock* if_false, BasicBlock* next_block,
928 BasicBlock* nan_failed) {
929 Ucomisd(src1, src2);
930 JumpIf(ConditionForNaN(), nan_failed->label());
931 Branch(cond, if_true, if_false, next_block);
932}
933
934inline void MaglevAssembler::PrepareCallCFunction(int num_reg_arguments,
935 int num_double_registers) {
936 MacroAssembler::PrepareCallCFunction(num_reg_arguments +
937 num_double_registers);
938}
939
940inline void MaglevAssembler::CallSelf() {
941 DCHECK(allow_call());
942 DCHECK(code_gen_state()->entry_label()->is_bound());
943 Call(code_gen_state()->entry_label());
944}
945
946inline void MaglevAssembler::Jump(Label* target, Label::Distance distance) {
947 // Any eager deopts should go through JumpIf to enable us to support the
948 // `--deopt-every-n-times` stress mode. See EmitEagerDeoptStress.
949 DCHECK(!IsDeoptLabel(target));
950 jmp(target, distance);
951}
952
953inline void MaglevAssembler::JumpToDeopt(Label* target) {
954 DCHECK(IsDeoptLabel(target));
955 jmp(target);
956}
957
958inline void MaglevAssembler::EmitEagerDeoptStress(Label* target) {
959 if (V8_LIKELY(v8_flags.deopt_every_n_times <= 0)) {
960 return;
961 }
962
963 ExternalReference counter = ExternalReference::stress_deopt_count(isolate());
964 // The following code assumes that `Isolate::stress_deopt_count_` is 8 bytes
965 // wide.
966 static constexpr size_t kSizeofRAX = 8;
967 static_assert(sizeof(decltype(*isolate()->stress_deopt_count_address())) ==
968 kSizeofRAX);
969
970 Label fallthrough;
971 pushfq();
972 pushq(rax);
973 load_rax(counter);
974 decl(rax);
975 JumpIf(not_zero, &fallthrough, Label::kNear);
976
977 RecordComment("-- deopt_every_n_times hit, jump to eager deopt");
978 Move(rax, v8_flags.deopt_every_n_times);
979 store_rax(counter);
980 popq(rax);
981 popfq();
982 JumpToDeopt(target);
983
984 bind(&fallthrough);
985 store_rax(counter);
986 popq(rax);
987 popfq();
988}
989
990inline void MaglevAssembler::JumpIf(Condition cond, Label* target,
991 Label::Distance distance) {
992 // The least common denominator of all eager deopts is that they eventually
993 // (should) bottom out in `JumpIf`. We use the opportunity here to trigger
994 // extra eager deoptimizations with the `--deopt-every-n-times` stress mode.
995 // Since `IsDeoptLabel` is slow we duplicate the test for the flag here.
996 if (V8_UNLIKELY(v8_flags.deopt_every_n_times > 0)) {
997 if (IsDeoptLabel(target)) {
998 EmitEagerDeoptStress(target);
999 }
1000 }
1001 DCHECK_IMPLIES(IsDeoptLabel(target), distance == Label::kFar);
1002 j(cond, target, distance);
1003}
1004
1005inline void MaglevAssembler::JumpIfRoot(Register with, RootIndex index,
1006 Label* if_equal,
1007 Label::Distance distance) {
1008 MacroAssembler::JumpIfRoot(with, index, if_equal, distance);
1009}
1010
1011inline void MaglevAssembler::JumpIfNotRoot(Register with, RootIndex index,
1012 Label* if_not_equal,
1013 Label::Distance distance) {
1014 MacroAssembler::JumpIfNotRoot(with, index, if_not_equal, distance);
1015}
1016
1017inline void MaglevAssembler::JumpIfSmi(Register src, Label* on_smi,
1018 Label::Distance distance) {
1019 MacroAssembler::JumpIfSmi(src, on_smi, distance);
1020}
1021
1022inline void MaglevAssembler::JumpIfNotSmi(Register src, Label* on_not_smi,
1023 Label::Distance distance) {
1024 MacroAssembler::JumpIfNotSmi(src, on_not_smi, distance);
1025}
1026
1027void MaglevAssembler::JumpIfByte(Condition cc, Register value, int32_t byte,
1028 Label* target, Label::Distance distance) {
1029 cmpb(value, Immediate(byte));
1030 j(cc, target, distance);
1031}
1032
1033void MaglevAssembler::JumpIfHoleNan(DoubleRegister value, Register scratch,
1034 Label* target, Label::Distance distance) {
1035 // TODO(leszeks): Right now this only accepts Zone-allocated target labels.
1036 // This works because all callsites are jumping to either a deopt, deferred
1037 // code, or a basic block. If we ever need to jump to an on-stack label, we
1038 // have to add support for it here change the caller to pass a ZoneLabelRef.
1039 DCHECK(compilation_info()->zone()->Contains(target));
1040 ZoneLabelRef is_hole = ZoneLabelRef::UnsafeFromLabelPointer(target);
1041 ZoneLabelRef is_not_hole(this);
1042 Ucomisd(value, value);
1045 [](MaglevAssembler* masm, DoubleRegister value, Register scratch,
1046 ZoneLabelRef is_hole, ZoneLabelRef is_not_hole) {
1047 masm->Pextrd(scratch, value, 1);
1048 masm->CompareInt32AndJumpIf(scratch, kHoleNanUpper32, kEqual,
1049 *is_hole);
1050 masm->Jump(*is_not_hole);
1051 },
1052 value, scratch, is_hole, is_not_hole));
1053 bind(*is_not_hole);
1054}
1055
1056void MaglevAssembler::JumpIfNotHoleNan(DoubleRegister value, Register scratch,
1057 Label* target,
1058 Label::Distance distance) {
1059 JumpIfNotNan(value, target, distance);
1060 Pextrd(scratch, value, 1);
1061 CompareInt32AndJumpIf(scratch, kHoleNanUpper32, kNotEqual, target, distance);
1062}
1063
1064void MaglevAssembler::JumpIfNotHoleNan(MemOperand operand, Label* target,
1065 Label::Distance distance) {
1068 distance);
1069}
1070
1071void MaglevAssembler::JumpIfNan(DoubleRegister value, Label* target,
1072 Label::Distance distance) {
1073 Ucomisd(value, value);
1074 JumpIf(ConditionForNaN(), target, distance);
1075}
1076
1077void MaglevAssembler::JumpIfNotNan(DoubleRegister value, Label* target,
1078 Label::Distance distance) {
1079 Ucomisd(value, value);
1080 JumpIf(NegateCondition(ConditionForNaN()), target, distance);
1081}
1082
1083void MaglevAssembler::CompareInt32AndJumpIf(Register r1, Register r2,
1084 Condition cond, Label* target,
1085 Label::Distance distance) {
1086 cmpl(r1, r2);
1087 JumpIf(cond, target, distance);
1088}
1089
1090void MaglevAssembler::CompareIntPtrAndJumpIf(Register r1, Register r2,
1091 Condition cond, Label* target,
1092 Label::Distance distance) {
1093 cmpq(r1, r2);
1094 JumpIf(cond, target, distance);
1095}
1096
1097inline void MaglevAssembler::CompareInt32AndJumpIf(Register r1, int32_t value,
1098 Condition cond,
1099 Label* target,
1100 Label::Distance distance) {
1101 Cmp(r1, value);
1102 JumpIf(cond, target, distance);
1103}
1104
1105void MaglevAssembler::CompareIntPtrAndJumpIf(Register r1, int32_t value,
1106 Condition cond, Label* target,
1107 Label::Distance distance) {
1108 cmpq(r1, Immediate(value));
1109 JumpIf(cond, target, distance);
1110}
1111
1113 Register r1, int32_t value, Condition cond, Label* if_true,
1114 Label::Distance true_distance, bool fallthrough_when_true, Label* if_false,
1115 Label::Distance false_distance, bool fallthrough_when_false) {
1116 Cmp(r1, value);
1117 Branch(cond, if_true, true_distance, fallthrough_when_true, if_false,
1118 false_distance, fallthrough_when_false);
1119}
1120
1122 Register r1, Register r2, Condition cond, Label* if_true,
1123 Label::Distance true_distance, bool fallthrough_when_true, Label* if_false,
1124 Label::Distance false_distance, bool fallthrough_when_false) {
1125 cmpl(r1, r2);
1126 Branch(cond, if_true, true_distance, fallthrough_when_true, if_false,
1127 false_distance, fallthrough_when_false);
1128}
1129
1131 Register r1, int32_t value, Condition cond, Label* if_true,
1132 Label::Distance true_distance, bool fallthrough_when_true, Label* if_false,
1133 Label::Distance false_distance, bool fallthrough_when_false) {
1134 cmpq(r1, Immediate(value));
1135 Branch(cond, if_true, true_distance, fallthrough_when_true, if_false,
1136 false_distance, fallthrough_when_false);
1137}
1138
1139inline void MaglevAssembler::CompareInt32AndAssert(Register r1, Register r2,
1140 Condition cond,
1141 AbortReason reason) {
1142 cmpl(r1, r2);
1143 Assert(cond, reason);
1144}
1145inline void MaglevAssembler::CompareInt32AndAssert(Register r1, int32_t value,
1146 Condition cond,
1147 AbortReason reason) {
1148 Cmp(r1, value);
1149 Assert(cond, reason);
1150}
1151
1152inline void MaglevAssembler::CompareSmiAndJumpIf(Register r1, Tagged<Smi> value,
1153 Condition cond, Label* target,
1154 Label::Distance distance) {
1155 AssertSmi(r1);
1156 Cmp(r1, value);
1157 JumpIf(cond, target, distance);
1158}
1159
1160inline void MaglevAssembler::CompareSmiAndAssert(Register r1, Tagged<Smi> value,
1161 Condition cond,
1162 AbortReason reason) {
1163 if (!v8_flags.debug_code) return;
1164 AssertSmi(r1);
1165 Cmp(r1, value);
1166 Assert(cond, reason);
1167}
1168
1169inline void MaglevAssembler::CompareByteAndJumpIf(MemOperand left, int8_t right,
1170 Condition cond,
1171 Register scratch,
1172 Label* target,
1173 Label::Distance distance) {
1174 cmpb(left, Immediate(right));
1175 JumpIf(cond, target, distance);
1176}
1177
1178inline void MaglevAssembler::CompareTaggedAndJumpIf(Register r1,
1179 Tagged<Smi> value,
1180 Condition cond,
1181 Label* target,
1182 Label::Distance distance) {
1183 Cmp(r1, value);
1184 JumpIf(cond, target, distance);
1185}
1186
1187inline void MaglevAssembler::CompareTaggedAndJumpIf(Register r1,
1188 Handle<HeapObject> obj,
1189 Condition cond,
1190 Label* target,
1191 Label::Distance distance) {
1192 Cmp(r1, obj);
1193 JumpIf(cond, target, distance);
1194}
1195
1196inline void MaglevAssembler::CompareTaggedAndJumpIf(Register src1,
1197 Register src2,
1198 Condition cond,
1199 Label* target,
1200 Label::Distance distance) {
1201 cmp_tagged(src1, src2);
1202 JumpIf(cond, target, distance);
1203}
1204
1206 DoubleRegister reg, Label* target, Label::Distance distance) {
1207 // Sets scratch register to 0.0.
1209 // Sets ZF if equal to 0.0, -0.0 or NaN.
1210 Ucomisd(kScratchDoubleReg, reg);
1211 JumpIf(kZero, target, distance);
1212}
1213
1215 MemOperand operand, Label* target, Label::Distance distance) {
1216 // Sets scratch register to 0.0.
1218 // Sets ZF if equal to 0.0, -0.0 or NaN.
1219 Ucomisd(kScratchDoubleReg, operand);
1220 JumpIf(kZero, target, distance);
1221}
1222
1224 Register r1, int32_t mask, Label* target, Label::Distance distance) {
1225 testl(r1, Immediate(mask));
1226 JumpIf(kNotZero, target, distance);
1227}
1228
1230 MemOperand operand, int32_t mask, Label* target, Label::Distance distance) {
1231 testl(operand, Immediate(mask));
1232 JumpIf(kNotZero, target, distance);
1233}
1234
1236 MemOperand operand, uint8_t mask, Label* target, Label::Distance distance) {
1237 testb(operand, Immediate(mask));
1238 JumpIf(kNotZero, target, distance);
1239}
1240
1242 Register r1, int32_t mask, Label* target, Label::Distance distance) {
1243 testl(r1, Immediate(mask));
1244 JumpIf(kZero, target, distance);
1245}
1246
1248 MemOperand operand, int32_t mask, Label* target, Label::Distance distance) {
1249 testl(operand, Immediate(mask));
1250 JumpIf(kZero, target, distance);
1251}
1252
1254 MemOperand operand, uint8_t mask, Label* target, Label::Distance distance) {
1255 testb(operand, Immediate(mask));
1256 JumpIf(kZero, target, distance);
1257}
1258
1260 Register heap_number) {
1261 Movsd(result, FieldOperand(heap_number, offsetof(HeapNumber, value_)));
1262}
1263
1264inline void MaglevAssembler::LoadHeapInt32Value(Register result,
1265 Register heap_number) {
1266 movl(result, FieldOperand(heap_number, offsetof(HeapNumber, value_)));
1267}
1268
1269inline void MaglevAssembler::StoreHeapInt32Value(Register value,
1270 Register heap_number) {
1271 movl(FieldOperand(heap_number, offsetof(HeapNumber, value_)), value);
1272}
1273
1275 Register src) {
1276 Cvtlsi2sd(result, src);
1277}
1278
1280 Register src) {
1281 // TODO(leszeks): Cvtlui2sd does a manual movl to clear the top bits of the
1282 // input register. We could eliminate this movl by ensuring that word32
1283 // registers are always written with 32-bit ops and not 64-bit ones.
1284 Cvtlui2sd(result, src);
1285}
1286
1288 Register src) {
1289 Cvtqsi2sd(result, src);
1290}
1291
1292inline void MaglevAssembler::Pop(Register dst) { MacroAssembler::Pop(dst); }
1293
1294template <typename NodeT>
1296 NodeT* node) {
1297 EmitEagerDeoptIf(not_equal, reason, node);
1298}
1299
1301 if (v8_flags.slow_debug_code) {
1302 movq(kScratchRegister, rbp);
1303 subq(kScratchRegister, rsp);
1304 cmpq(kScratchRegister,
1307 Assert(equal, AbortReason::kStackAccessBelowStackPointer);
1308 }
1309}
1310
1312 int stack_check_offset) {
1313 Register stack_cmp_reg = rsp;
1314 if (stack_check_offset >= kStackLimitSlackForDeoptimizationInBytes) {
1315 stack_cmp_reg = kScratchRegister;
1316 leaq(stack_cmp_reg, Operand(rsp, -stack_check_offset));
1317 }
1318 cmpq(stack_cmp_reg,
1321}
1322
1323inline void MaglevAssembler::FinishCode() {}
1324
1325template <typename Dest, typename Source>
1327 Source src) {
1328 switch (repr) {
1330 return movl(dst, src);
1335 return movq(dst, src);
1336 default:
1337 UNREACHABLE();
1338 }
1339}
1340template <>
1342 MemOperand dst, MemOperand src) {
1343 MoveRepr(repr, kScratchRegister, src);
1344 MoveRepr(repr, dst, kScratchRegister);
1345}
1346
1348 if (v8_flags.cet_compatible) {
1350 }
1351}
1352
1353} // namespace maglev
1354} // namespace internal
1355} // namespace v8
1356
1357#endif // V8_MAGLEV_X64_MAGLEV_ASSEMBLER_X64_INL_H_
#define T
#define Assert(condition)
interpreter::OperandScale scale
Definition builtins.cc:44
V8_INLINE void RecordComment(const char *comment, const SourceLocation &loc=SourceLocation::Current())
Definition assembler.h:417
void load_rax(Address value, RelocInfo::Mode rmode)
void shll(const VRegister &vd, const VRegister &vn, int shift)
void store_rax(Address dst, RelocInfo::Mode mode)
void j(Condition cc, Label *L, Label::Distance distance=Label::kFar)
void negl(Register reg)
void pushq(Immediate value)
static constexpr int kIntraSegmentJmpInstrSize
void movb(Register dst, Operand src)
void movsxwl(Register dst, Register src)
void bswapq(Register dst)
void movw(Register reg, uint32_t immediate, Condition cond=al)
void movsxbl(Register dst, Register src)
void movq_heap_number(Register dst, double value)
void cmpb(Register reg, Immediate imm8)
void popq(Register dst)
void movsxlq(Register dst, Register src)
void movl(Operand dst, Label *src)
void testb(Register reg, Operand op)
void bswapl(Register dst)
void movq(XMMRegister dst, Operand src)
static bool IsSupported(CpuFeature f)
static constexpr int kMapOffset
uint64_t * stress_deopt_count_address()
Definition isolate.h:1745
static constexpr size_t kMaxSizeInHeap
static constexpr bool CanBeImmediate(RootIndex index)
Tagged_t ReadOnlyRootPtr(RootIndex index)
void JumpIfRoot(Register with, RootIndex index, Label *if_equal)
void Cvtlui2sd(XMMRegister dst, Register src)
void Cmp(const Register &rn, int imm)
void IsObjectType(Register heap_object, Register scratch1, Register scratch2, InstanceType type)
void CmpInstanceTypeRange(Register map, Register instance_type_out, Register scratch, InstanceType lower_limit, InstanceType higher_limit)
void AssertNotSmi(Register object, AbortReason reason=AbortReason::kOperandIsASmi) NOOP_UNLESS_DEBUG_CODE
void JumpIfNotRoot(Register with, RootIndex index, Label *if_not_equal)
void CompareRoot(Register obj, RootIndex index)
void Move(Register dst, Tagged< Smi > smi)
void Cvtqsi2sd(XMMRegister dst, Register src)
void JumpIfSmi(Register value, Label *smi_label)
void LoadSandboxedPointerField(Register destination, MemOperand field_operand)
void Movq(XMMRegister dst, Register src)
void JumpIfJSAnyIsNotPrimitive(Register heap_object, Register scratch, Label *target, Label::Distance distance=Label::kFar, Condition condition=Condition::kUnsignedGreaterThanEqual)
void SmiTag(Register reg, SBit s=LeaveCC)
void EnterExitFrame(Register scratch, int stack_space, StackFrame::Type frame_type)
void Cvtsd2ss(XMMRegister dst, XMMRegister src)
void AssertSmi(Register object, AbortReason reason=AbortReason::kOperandIsNotASmi) NOOP_UNLESS_DEBUG_CODE
void IsObjectTypeInRange(Register heap_object, Register scratch, InstanceType lower_limit, InstanceType higher_limit)
void StoreTaggedField(const Register &value, const MemOperand &dst_field_operand)
void CompareRange(Register value, Register scratch, unsigned lower_limit, unsigned higher_limit)
void Cvtlsi2sd(XMMRegister dst, Register src)
void CmpInstanceType(Register map, InstanceType type)
void JumpIfNotSmi(Register value, Label *not_smi_label)
Operand StackLimitAsOperand(StackLimitKind kind)
void StoreTrustedPointerField(Register value, MemOperand dst_field_operand)
void Cvtss2sd(XMMRegister dst, XMMRegister src)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers=0, Register scratch=no_reg)
void StoreTaggedSignedField(Operand dst_field_operand, Tagged< Smi > value)
void LoadMap(Register destination, Register object)
void LoadTaggedFieldWithoutDecompressing(const Register &destination, const MemOperand &field_operand)
static constexpr Register no_reg()
void Pextrd(Register dst, XMMRegister src, uint8_t imm8)
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static LocationOperand * cast(InstructionOperand *op)
TemporaryRegisterScope(MaglevAssembler *masm, const SavedData &saved_data)
void CompareMapWithRoot(Register object, RootIndex index, Register scratch)
void LoadFixedArrayElement(Register result, Register array, Register index)
void CompareInstanceType(Register map, InstanceType instance_type)
void SmiAddConstant(Register dst, Register src, int value, Label *fail, Label::Distance distance=Label::kFar)
void JumpIfByte(Condition cc, Register value, int32_t byte, Label *target, Label::Distance distance=Label::kFar)
void JumpIfNotNan(DoubleRegister value, Label *target, Label::Distance distance=Label::kFar)
Condition IsNotCallableNorUndetactable(Register map, Register scratch)
void JumpIfRoot(Register with, RootIndex index, Label *if_equal, Label::Distance distance=Label::kFar)
void ToUint8Clamped(Register result, DoubleRegister value, Label *min, Label *max, Label *done)
Condition IsCallableAndNotUndetectable(Register map, Register scratch)
void LoadFloat32(DoubleRegister dst, MemOperand src)
void JumpIfNotObjectType(Register heap_object, InstanceType type, Label *target, Label::Distance distance=Label::kFar)
void ReverseByteOrderAndStoreUnalignedFloat64(Register base, Register index, DoubleRegister src)
void IntPtrToDouble(DoubleRegister result, Register src)
void Branch(Condition condition, BasicBlock *if_true, BasicBlock *if_false, BasicBlock *next_block)
MemOperand GetStackSlot(const compiler::AllocatedOperand &operand)
void StoreField(MemOperand operand, Register value, int element_size)
void LoadSignedField(Register result, MemOperand operand, int element_size)
void JumpIfSmi(Register src, Label *on_smi, Label::Distance near_jump=Label::kFar)
MaglevAssembler(Isolate *isolate, Zone *zone, MaglevCodeGenState *code_gen_state)
void TestUint8AndJumpIfAllClear(MemOperand operand, uint8_t mask, Label *target, Label::Distance distance=Label::kFar)
void AssertObjectTypeInRange(Register heap_object, InstanceType lower_limit, InstanceType higher_limit, AbortReason reason)
void CompareInt32AndAssert(Register r1, Register r2, Condition cond, AbortReason reason)
void CompareDoubleAndJumpIfZeroOrNaN(DoubleRegister reg, Label *target, Label::Distance distance=Label::kFar)
void LoadFixedDoubleArrayElement(DoubleRegister result, Register array, Register index)
void LoadUnsignedField(Register result, MemOperand operand, int element_size)
void JumpIfObjectTypeInRange(Register heap_object, InstanceType lower_limit, InstanceType higher_limit, Label *target, Label::Distance distance=Label::kFar)
void CheckInt32IsSmi(Register obj, Label *fail, Register scratch=Register::no_reg())
void ReverseByteOrder(Register value, int element_size)
Condition FunctionEntryStackCheck(int stack_check_offset)
void LoadHeapNumberValue(DoubleRegister result, Register heap_number)
void Jump(Label *target, Label::Distance distance=Label::kFar)
MemOperand DataViewElementOperand(Register data_pointer, Register index)
void StoreTaggedSignedField(Register object, int offset, Register value)
void CompareSmiAndJumpIf(Register r1, Tagged< Smi > value, Condition cond, Label *target, Label::Distance distance=Label::kFar)
TemporaryRegisterScope * scratch_register_scope() const
void TestUint8AndJumpIfAnySet(MemOperand operand, uint8_t mask, Label *target, Label::Distance distance=Label::kFar)
void MoveRepr(MachineRepresentation repr, Dest dst, Source src)
void StoreFixedDoubleArrayElement(Register array, Register index, DoubleRegister value)
void SmiTagInt32AndSetFlags(Register dst, Register src)
void LoadInstanceType(Register instance_type, Register heap_object)
void StoreHeapInt32Value(Register value, Register heap_number)
void LoadInt32(Register dst, MemOperand src)
void StoreFixedArrayElementNoWriteBarrier(Register array, Register index, Register value)
void LoadAddress(Register dst, MemOperand location)
void JumpIfJSAnyIsNotPrimitive(Register heap_object, Label *target, Label::Distance distance=Label::kFar)
void LoadBoundedSizeFromObject(Register result, Register object, int offset)
void SetSlotAddressForTaggedField(Register slot_reg, Register object, int offset)
void CompareFloat64AndBranch(DoubleRegister src1, DoubleRegister src2, Condition cond, BasicBlock *if_true, BasicBlock *if_false, BasicBlock *next_block, BasicBlock *nan_failed)
void CompareTaggedAndJumpIf(Register reg, Tagged< Smi > smi, Condition cond, Label *target, Label::Distance distance=Label::kFar)
void StoreInt32Field(Register object, int offset, int32_t value)
void BranchOnObjectTypeInRange(Register heap_object, InstanceType lower_limit, InstanceType higher_limit, Label *if_true, Label::Distance true_distance, bool fallthrough_when_true, Label *if_false, Label::Distance false_distance, bool fallthrough_when_false)
void LoadExternalPointerField(Register result, MemOperand operand)
void StoreInt32(MemOperand dst, Register src)
void BuildTypedArrayDataPointer(Register data_pointer, Register object)
void JumpIfObjectTypeNotInRange(Register heap_object, InstanceType lower_limit, InstanceType higher_limit, Label *target, Label::Distance distance=Label::kFar)
void JumpIfNotRoot(Register with, RootIndex index, Label *if_not_equal, Label::Distance distance=Label::kFar)
void EmitEnterExitFrame(int extra_slots, StackFrame::Type frame_type, Register c_function, Register scratch)
void BranchOnObjectType(Register heap_object, InstanceType type, Label *if_true, Label::Distance true_distance, bool fallthrough_when_true, Label *if_false, Label::Distance false_distance, bool fallthrough_when_false)
void LoadHeapInt32Value(Register result, Register heap_number)
void CompareInstanceTypeRange(Register map, InstanceType lower_limit, InstanceType higher_limit)
void Move(StackSlot dst, Register src)
void SignExtend32To64Bits(Register dst, Register src)
void LoadFixedArrayElementWithoutDecompressing(Register result, Register array, Register index)
void LoadUnalignedFloat64AndReverseByteOrder(DoubleRegister dst, Register base, Register index)
void IncrementAddress(Register reg, int32_t delta)
Label * MakeDeferredCode(Function &&deferred_code_gen, Args &&... args)
void JumpIfNan(DoubleRegister value, Label *target, Label::Distance distance=Label::kFar)
void TruncateDoubleToInt32(Register dst, DoubleRegister src)
void JumpIfNotSmi(Register src, Label *on_not_smi, Label::Distance near_jump=Label::kFar)
MemOperand TypedArrayElementOperand(Register data_pointer, Register index, int element_size)
MaglevCompilationInfo * compilation_info() const
void TestInt32AndJumpIfAllClear(Register r1, int32_t mask, Label *target, Label::Distance distance=Label::kFar)
int GetFramePointerOffsetForStackSlot(const compiler::AllocatedOperand &operand)
void CompareInt32AndJumpIf(Register r1, Register r2, Condition cond, Label *target, Label::Distance distance=Label::kFar)
MaglevCodeGenState * code_gen_state() const
void SetSlotAddressForFixedArrayElement(Register slot_reg, Register object, Register index)
void LoadTaggedFieldByIndex(Register result, Register object, Register index, int scale, int offset)
void CompareIntPtrAndJumpIf(Register r1, Register r2, Condition cond, Label *target, Label::Distance distance=Label::kFar)
void CompareInt32AndBranch(Register r1, int32_t value, Condition cond, BasicBlock *if_true, BasicBlock *if_false, BasicBlock *next_block)
void CompareSmiAndAssert(Register r1, Tagged< Smi > value, Condition cond, AbortReason reason)
void JumpIf(Condition cond, Label *target, Label::Distance distance=Label::kFar)
void LoadFloat64(DoubleRegister dst, MemOperand src)
void LoadUnalignedFloat64(DoubleRegister dst, Register base, Register index)
void LoadTaggedFieldWithoutDecompressing(Register result, Register object, int offset)
Condition IsRootConstant(Input input, RootIndex root_index)
void StoreFloat32(MemOperand dst, DoubleRegister src)
void EmitEagerDeoptIf(Condition cond, DeoptimizeReason reason, NodeT *node)
void StoreFloat64(MemOperand dst, DoubleRegister src)
void EmitEagerDeoptIfNotEqual(DeoptimizeReason reason, NodeT *node)
void StoreTaggedFieldNoWriteBarrier(Register object, int offset, Register value)
MemOperand ToMemOperand(const compiler::InstructionOperand &operand)
void MoveHeapNumber(Register dst, double value)
void LoadTaggedField(Register result, MemOperand operand)
void LoadByte(Register dst, MemOperand src)
void MoveTagged(Register dst, Handle< HeapObject > obj)
void JumpIfNotHoleNan(DoubleRegister value, Register scratch, Label *target, Label::Distance distance=Label::kFar)
void DeoptIfBufferDetached(Register array, Register scratch, NodeT *node)
void StoreUnalignedFloat64(Register base, Register index, DoubleRegister src)
void TestInt32AndJumpIfAnySet(Register r1, int32_t mask, Label *target, Label::Distance distance=Label::kFar)
void JumpIfHoleNan(DoubleRegister value, Register scratch, Label *target, Label::Distance distance=Label::kFar)
void Uint32ToDouble(DoubleRegister result, Register src)
void CompareFloat64AndJumpIf(DoubleRegister src1, DoubleRegister src2, Condition cond, Label *target, Label *nan_failed, Label::Distance distance=Label::kFar)
void CompareIntPtrAndBranch(Register r1, int32_t value, Condition cond, BasicBlock *if_true, BasicBlock *if_false, BasicBlock *next_block)
void Int32ToDouble(DoubleRegister result, Register src)
void JumpIfObjectType(Register heap_object, InstanceType type, Label *target, Label::Distance distance=Label::kFar)
void AssertObjectType(Register heap_object, InstanceType type, AbortReason reason)
void SmiSubConstant(Register dst, Register src, int value, Label *fail, Label::Distance distance=Label::kFar)
void CompareByteAndJumpIf(MemOperand left, int8_t right, Condition cond, Register scratch, Label *target, Label::Distance distance=Label::kFar)
void PrepareCallCFunction(int num_reg_arguments, int num_double_registers=0)
static int TemporaryCount(size_t map_count)
MapCompare(MaglevAssembler *masm, Register object, size_t map_count)
void Generate(Handle< Map > map, Condition cond, Label *if_true, Label::Distance distance=Label::kFar)
static ZoneLabelRef UnsafeFromLabelPointer(Label *label)
Register const object_
Register const value_
#define COMPRESS_POINTERS_BOOL
Definition globals.h:99
const MapRef map_
int end
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
Label label
int32_t offset
ZoneVector< RpoNumber > & result
LiftoffRegister reg
uint32_t const mask
base::SmallVector< int32_t, 1 > stack_slots
MaglevAssembler *const masm_
FloatWithBits< 64 > Float64
Definition index.h:234
void PushIterator(MaglevAssembler *masm, base::iterator_range< T > range, Args... args)
void PushIteratorReverse(MaglevAssembler *masm, base::iterator_range< T > range, Args... args)
void PushInput(MaglevAssembler *masm, const Input &input)
constexpr Condition ConditionForNaN()
ScaleFactor ScaleFactorFromInt(int n)
Register ToRegister(const compiler::InstructionOperand &operand)
constexpr Condition ConditionForFloat64(Operation operation)
NodeTMixin< Node, Derived > NodeT
Definition maglev-ir.h:2858
constexpr VFPRoundingMode kRoundToNearest
constexpr int kTaggedSize
Definition globals.h:542
DwVfpRegister DoubleRegister
constexpr DoubleRegister kScratchDoubleReg
Operand FieldOperand(Register object, int offset)
Address Tagged_t
Definition globals.h:547
MemOperand FieldMemOperand(Register object, int offset)
constexpr int kSystemPointerSize
Definition globals.h:410
constexpr int kStackLimitSlackForDeoptimizationInBytes
Definition globals.h:213
constexpr bool SmiValuesAre31Bits()
constexpr Register kScratchRegister
Condition NegateCondition(Condition cond)
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister &reg1, const CPURegister &reg2, const CPURegister &reg3=NoReg, const CPURegister &reg4=NoReg, const CPURegister &reg5=NoReg, const CPURegister &reg6=NoReg, const CPURegister &reg7=NoReg, const CPURegister &reg8=NoReg)
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr bool SmiValuesAre32Bits()
constexpr uint32_t kHoleNanUpper32
Definition globals.h:1952
return value
Definition map-inl.h:893
constexpr int kDoubleSize
Definition globals.h:407
Operation
Definition operation.h:43
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
static void Push(MaglevAssembler *masm, Arg arg, Args... args)
static void PushReverse(MaglevAssembler *masm, Arg arg, Args... args)
static void Push(MaglevAssembler *masm, const Input &arg, Args... args)
static void PushReverse(MaglevAssembler *masm, const Input &arg, Args... args)
#define OFFSET_OF_DATA_START(Type)
#define V8_LIKELY(condition)
Definition v8config.h:661
#define V8_UNLIKELY(condition)
Definition v8config.h:660