v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
maglev-assembler.h
Go to the documentation of this file.
1// Copyright 2022 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_MAGLEV_MAGLEV_ASSEMBLER_H_
6#define V8_MAGLEV_MAGLEV_ASSEMBLER_H_
7
10#include "src/common/globals.h"
11#include "src/flags/flags.h"
15
16namespace v8 {
17namespace internal {
18namespace maglev {
19
20class Graph;
21class MaglevAssembler;
22
24 AllocationType alloc_type) {
25 if (alloc_type == AllocationType::kYoung) {
26 return ExternalReference::new_space_allocation_top_address(isolate);
27 }
29 return ExternalReference::old_space_allocation_top_address(isolate);
30}
31
33 Isolate* isolate, AllocationType alloc_type) {
34 if (alloc_type == AllocationType::kYoung) {
35 return ExternalReference::new_space_allocation_limit_address(isolate);
36 }
38 return ExternalReference::old_space_allocation_limit_address(isolate);
39}
40
42 if (alloc_type == AllocationType::kYoung) {
43 return Builtin::kAllocateInYoungGeneration;
44 }
46 return Builtin::kAllocateInOldGeneration;
47}
48
49// Label allowed to be passed to deferred code.
51 public:
52 explicit ZoneLabelRef(Zone* zone) : label_(zone->New<Label>()) {}
53 explicit inline ZoneLabelRef(MaglevAssembler* masm);
54
56 // This is an unsafe operation, {label} must be zone allocated.
57 return ZoneLabelRef(label);
58 }
59
60 Label* operator*() { return label_; }
61
62 private:
64
65 // Unsafe constructor. {label} must be zone allocated.
67};
68
69// The slot index is the offset from the frame pointer.
70struct StackSlot {
71 int32_t index;
72};
73
74// Helper for generating the platform-specific parts of map comparison
75// operations.
77 public:
78 inline explicit MapCompare(MaglevAssembler* masm, Register object,
79 size_t map_count);
80
81 inline void Generate(Handle<Map> map, Condition cond, Label* if_true,
82 Label::Distance distance = Label::kFar);
83 inline Register GetObject() const { return object_; }
84 inline Register GetMap();
85
86 // For counting the temporaries needed by the above operations:
87 static inline int TemporaryCount(size_t map_count);
88
89 private:
92 const size_t map_count_;
94};
95
97 public:
99
100 MaglevAssembler(Isolate* isolate, Zone* zone,
101 MaglevCodeGenState* code_gen_state)
102 : MacroAssembler(isolate, zone, CodeObjectRequired::kNo),
103 code_gen_state_(code_gen_state) {}
104
105 static constexpr RegList GetAllocatableRegisters() {
106#if defined(V8_TARGET_ARCH_ARM)
107 return kAllocatableGeneralRegisters - kMaglevExtraScratchRegister;
108#elif defined(V8_TARGET_ARCH_RISCV64)
109 return kAllocatableGeneralRegisters - kMaglevExtraScratchRegister -
111#else
112 return kAllocatableGeneralRegisters;
113#endif
114 }
115
116#if defined(V8_TARGET_ARCH_RISCV64)
117 static constexpr Register GetFlagsRegister() { return kMaglevFlagsRegister; }
118#endif // V8_TARGET_ARCH_RISCV64
119
121 return kAllocatableDoubleRegisters;
122 }
123
124 inline MemOperand GetStackSlot(const compiler::AllocatedOperand& operand);
125 inline MemOperand ToMemOperand(const compiler::InstructionOperand& operand);
126 inline MemOperand ToMemOperand(const ValueLocation& location);
127
128 inline Register GetFramePointer();
129
131 const compiler::AllocatedOperand& operand) {
132 int index = operand.index();
133 if (operand.representation() != MachineRepresentation::kTagged) {
134 index += code_gen_state()->tagged_slots();
135 }
136 return GetFramePointerOffsetForStackSlot(index);
137 }
138
139 template <typename Dest, typename Source>
140 inline void MoveRepr(MachineRepresentation repr, Dest dst, Source src);
141
142 void Allocate(RegisterSnapshot register_snapshot, Register result,
143 int size_in_bytes,
144 AllocationType alloc_type = AllocationType::kYoung,
145 AllocationAlignment alignment = kTaggedAligned);
146
147 void Allocate(RegisterSnapshot register_snapshot, Register result,
148 Register size_in_bytes,
149 AllocationType alloc_type = AllocationType::kYoung,
150 AllocationAlignment alignment = kTaggedAligned);
151
152 void AllocateHeapNumber(RegisterSnapshot register_snapshot, Register result,
153 DoubleRegister value);
154
155 void AllocateTwoByteString(RegisterSnapshot register_snapshot,
156 Register result, int length);
157
158 void LoadSingleCharacterString(Register result, int char_code);
159 void LoadSingleCharacterString(Register result, Register char_code,
160 Register scratch);
161
162 void EnsureWritableFastElements(RegisterSnapshot register_snapshot,
163 Register elements, Register object,
164 Register scratch);
165
166 inline void BindJumpTarget(Label* label);
167 inline void BindBlock(BasicBlock* block);
168
169 inline Condition IsRootConstant(Input input, RootIndex root_index);
170
171 inline void Branch(Condition condition, BasicBlock* if_true,
172 BasicBlock* if_false, BasicBlock* next_block);
173 inline void Branch(Condition condition, Label* if_true,
174 Label::Distance true_distance, bool fallthrough_when_true,
175 Label* if_false, Label::Distance false_distance,
176 bool fallthrough_when_false);
177
178 Register FromAnyToRegister(const Input& input, Register scratch);
179
180 inline void LoadTaggedField(Register result, MemOperand operand);
181 inline void LoadTaggedField(Register result, Register object, int offset);
182 inline void LoadTaggedFieldWithoutDecompressing(Register result,
183 Register object, int offset);
184 inline void LoadTaggedSignedField(Register result, MemOperand operand);
185 inline void LoadTaggedSignedField(Register result, Register object,
186 int offset);
187 inline void LoadAndUntagTaggedSignedField(Register result, Register object,
188 int offset);
189 inline void LoadTaggedFieldByIndex(Register result, Register object,
190 Register index, int scale, int offset);
191 inline void LoadBoundedSizeFromObject(Register result, Register object,
192 int offset);
193 inline void LoadExternalPointerField(Register result, MemOperand operand);
194
196 Register index);
197 inline void LoadFixedArrayElementWithoutDecompressing(Register result,
198 Register array,
199 Register index);
201 Register index);
202 inline void StoreFixedDoubleArrayElement(Register array, Register index,
203 DoubleRegister value);
204
205 inline void LoadSignedField(Register result, MemOperand operand,
206 int element_size);
207 inline void LoadUnsignedField(Register result, MemOperand operand,
208 int element_size);
209 template <typename BitField>
210 inline void LoadBitField(Register result, MemOperand operand) {
211 static constexpr int load_size = sizeof(typename BitField::BaseType);
212 LoadUnsignedField(result, operand, load_size);
213 DecodeField<BitField>(result);
214 }
215
216 enum StoreMode { kField, kElement };
217 enum ValueIsCompressed { kValueIsDecompressed, kValueIsCompressed };
218 enum ValueCanBeSmi { kValueCannotBeSmi, kValueCanBeSmi };
219
220 inline void SetSlotAddressForTaggedField(Register slot_reg, Register object,
221 int offset);
222 inline void SetSlotAddressForFixedArrayElement(Register slot_reg,
223 Register object,
224 Register index);
225
226 template <StoreMode store_mode>
227 using OffsetTypeFor = std::conditional_t<store_mode == kField, int, Register>;
228
229 template <StoreMode store_mode>
230 void CheckAndEmitDeferredWriteBarrier(Register object,
232 Register value,
233 RegisterSnapshot register_snapshot,
234 ValueIsCompressed value_is_compressed,
235 ValueCanBeSmi value_can_be_smi);
236
238 Register object, int offset, Register value,
239 RegisterSnapshot register_snapshot, IndirectPointerTag tag);
240
241 // Preserves all registers that are in the register snapshot, but is otherwise
242 // allowed to clobber both input registers if they are not in the snapshot.
243 //
244 // For maximum efficiency, prefer:
245 // * Having `object` == WriteBarrierDescriptor::ObjectRegister(),
246 // * Not having WriteBarrierDescriptor::SlotAddressRegister() in the
247 // register snapshot,
248 // * Not having `value` in the register snapshot, allowing it to be
249 // clobbered.
251 Register value,
252 RegisterSnapshot register_snapshot,
253 ValueIsCompressed value_is_compressed,
254 ValueCanBeSmi value_can_be_smi);
255 inline void StoreTaggedFieldNoWriteBarrier(Register object, int offset,
256 Register value);
257 inline void StoreTaggedSignedField(Register object, int offset,
258 Register value);
259 inline void StoreTaggedSignedField(Register object, int offset,
260 Tagged<Smi> value);
261
262 inline void StoreInt32Field(Register object, int offset, int32_t value);
263
264 inline void AssertElidedWriteBarrier(Register object, Register value,
265 RegisterSnapshot snapshot);
266
267#ifdef V8_ENABLE_SANDBOX
268
270 Register object, int offset, Register value,
271 RegisterSnapshot register_snapshot, IndirectPointerTag tag);
272 inline void StoreTrustedPointerFieldNoWriteBarrier(Register object,
273 int offset,
274 Register value);
275#endif // V8_ENABLE_SANDBOX
276
277 inline void StoreField(MemOperand operand, Register value, int element_size);
278 inline void ReverseByteOrder(Register value, int element_size);
279
280 inline void BuildTypedArrayDataPointer(Register data_pointer,
281 Register object);
282 inline MemOperand TypedArrayElementOperand(Register data_pointer,
283 Register index, int element_size);
284 inline MemOperand DataViewElementOperand(Register data_pointer,
285 Register index);
286
287 enum class CharCodeMaskMode { kValueIsInRange, kMustApplyMask };
288
289 // Warning: Input registers {string} and {index} will be scratched.
290 // {result} is allowed to alias with one the other 3 input registers.
291 // {result} is an int32.
292 void StringCharCodeOrCodePointAt(
294 RegisterSnapshot& register_snapshot, Register result, Register string,
295 Register index, Register scratch1, Register scratch2,
296 Label* result_fits_one_byte);
297 // Warning: Input {char_code} will be scratched.
298 void StringFromCharCode(RegisterSnapshot register_snapshot,
299 Label* char_code_fits_one_byte, Register result,
300 Register char_code, Register scratch,
301 CharCodeMaskMode mask_mode);
302
303 void ToBoolean(Register value, CheckType check_type, ZoneLabelRef is_true,
304 ZoneLabelRef is_false, bool fallthrough_when_true);
305
306 void TestTypeOf(Register object,
308 Label* if_true, Label::Distance true_distance,
309 bool fallthrough_when_true, Label* if_false,
310 Label::Distance false_distance, bool fallthrough_when_false);
311
312 inline void SmiTagInt32AndJumpIfFail(Register dst, Register src, Label* fail,
313 Label::Distance distance = Label::kFar);
314 inline void SmiTagInt32AndJumpIfFail(Register reg, Label* fail,
315 Label::Distance distance = Label::kFar);
316 inline void SmiTagInt32AndJumpIfSuccess(
317 Register dst, Register src, Label* success,
318 Label::Distance distance = Label::kFar);
319 inline void SmiTagInt32AndJumpIfSuccess(
320 Register reg, Label* success, Label::Distance distance = Label::kFar);
321 inline void UncheckedSmiTagInt32(Register dst, Register src);
322 inline void UncheckedSmiTagInt32(Register reg);
323
324 inline void SmiTagUint32AndJumpIfFail(Register dst, Register src, Label* fail,
325 Label::Distance distance = Label::kFar);
326 inline void SmiTagUint32AndJumpIfFail(Register reg, Label* fail,
327 Label::Distance distance = Label::kFar);
328 inline void SmiTagIntPtrAndJumpIfFail(Register dst, Register src, Label* fail,
329 Label::Distance distance = Label::kFar);
330 inline void SmiTagUint32AndJumpIfSuccess(
331 Register dst, Register src, Label* success,
332 Label::Distance distance = Label::kFar);
333 inline void SmiTagUint32AndJumpIfSuccess(
334 Register reg, Label* success, Label::Distance distance = Label::kFar);
335 inline void SmiTagIntPtrAndJumpIfSuccess(
336 Register dst, Register src, Label* success,
337 Label::Distance distance = Label::kFar);
338 inline void UncheckedSmiTagUint32(Register dst, Register src);
339 inline void UncheckedSmiTagUint32(Register reg);
340
341 // Try to smi-tag {obj}. Result is thrown away.
342 inline void CheckInt32IsSmi(Register obj, Label* fail,
343 Register scratch = Register::no_reg());
344
345 inline void CheckIntPtrIsSmi(Register obj, Label* fail,
346 Label::Distance distance = Label::kFar);
347
348 // Add/Subtract a constant (not smi tagged) to a smi. Jump to {fail} if the
349 // result doesn't fit.
350 inline void SmiAddConstant(Register dst, Register src, int value, Label* fail,
351 Label::Distance distance = Label::kFar);
352 inline void SmiAddConstant(Register reg, int value, Label* fail,
353 Label::Distance distance = Label::kFar);
354 inline void SmiSubConstant(Register dst, Register src, int value, Label* fail,
355 Label::Distance distance = Label::kFar);
356 inline void SmiSubConstant(Register reg, int value, Label* fail,
357 Label::Distance distance = Label::kFar);
358
359 inline void MoveHeapNumber(Register dst, double value);
360
361#ifdef V8_TARGET_ARCH_RISCV64
362 inline Condition CheckSmi(Register src);
363 // Abort execution if argument is not a Map, enabled via
364 // --debug-code.
365 void AssertMap(Register object) NOOP_UNLESS_DEBUG_CODE;
366
367 void CompareRoot(const Register& obj, RootIndex index,
369 void CmpTagged(const Register& rs1, const Register& rs2);
370 void CompareTaggedRoot(const Register& obj, RootIndex index);
371 void Cmp(const Register& rn, int imm);
372 void Assert(Condition cond, AbortReason reason);
373 void IsObjectType(Register heap_object, Register scratch1, Register scratch2,
374 InstanceType type);
375#endif
376
377 void TruncateDoubleToInt32(Register dst, DoubleRegister src);
378 void TryTruncateDoubleToInt32(Register dst, DoubleRegister src, Label* fail);
379 void TryTruncateDoubleToUint32(Register dst, DoubleRegister src, Label* fail);
380
381 void TryChangeFloat64ToIndex(Register result, DoubleRegister value,
382 Label* success, Label* fail);
383
384 inline void MaybeEmitPlaceHolderForDeopt();
385 inline void DefineLazyDeoptPoint(LazyDeoptInfo* info);
386 inline void DefineExceptionHandlerPoint(NodeBase* node);
387 inline void DefineExceptionHandlerAndLazyDeoptPoint(NodeBase* node);
388
389 template <typename Function, typename... Args>
390 inline Label* MakeDeferredCode(Function&& deferred_code_gen, Args&&... args);
391 template <typename Function, typename... Args>
392 inline void JumpToDeferredIf(Condition cond, Function&& deferred_code_gen,
393 Args&&... args);
394 void JumpIfNotCallable(Register object, Register scratch,
395 CheckType check_type, Label* target,
396 Label::Distance distance = Label::kFar);
397 void JumpIfUndetectable(Register object, Register scratch,
398 CheckType check_type, Label* target,
399 Label::Distance distance = Label::kFar);
400 void JumpIfNotUndetectable(Register object, Register scratch, CheckType,
401 Label* target,
402 Label::Distance distance = Label::kFar);
403 template <typename NodeT>
404 inline Label* GetDeoptLabel(NodeT* node, DeoptimizeReason reason);
405 inline bool IsDeoptLabel(Label* label);
406 inline void EmitEagerDeoptStress(Label* label);
407 template <typename NodeT>
408 inline void EmitEagerDeopt(NodeT* node, DeoptimizeReason reason);
409 template <typename NodeT>
410 inline void EmitEagerDeoptIf(Condition cond, DeoptimizeReason reason,
411 NodeT* node);
412 template <typename NodeT>
413 inline void EmitEagerDeoptIfNotEqual(DeoptimizeReason reason, NodeT* node);
414 template <typename NodeT>
415 inline void EmitEagerDeoptIfSmi(NodeT* node, Register object,
416 DeoptimizeReason reason);
417 template <typename NodeT>
418 inline void EmitEagerDeoptIfNotSmi(NodeT* node, Register object,
419 DeoptimizeReason reason);
420
421 void MaterialiseValueNode(Register dst, ValueNode* value);
422
423 inline void IncrementInt32(Register reg);
424 inline void DecrementInt32(Register reg);
425 inline void AddInt32(Register reg, int amount);
426 inline void AndInt32(Register reg, int mask);
427 inline void OrInt32(Register reg, int mask);
428 inline void AndInt32(Register reg, Register other);
429 inline void OrInt32(Register reg, Register other);
430 inline void ShiftLeft(Register reg, int amount);
431 inline void IncrementAddress(Register reg, int32_t delta);
432 inline void LoadAddress(Register dst, MemOperand location);
433
434 inline void Call(Label* target);
435
436 inline void EmitEnterExitFrame(int extra_slots, StackFrame::Type frame_type,
437 Register c_function, Register scratch);
438
439 inline MemOperand StackSlotOperand(StackSlot slot);
440 inline void Move(StackSlot dst, Register src);
441 inline void Move(StackSlot dst, DoubleRegister src);
442 inline void Move(Register dst, StackSlot src);
443 inline void Move(DoubleRegister dst, StackSlot src);
444 inline void Move(MemOperand dst, Register src);
445 inline void Move(Register dst, MemOperand src);
446 inline void Move(DoubleRegister dst, DoubleRegister src);
447 inline void Move(Register dst, Tagged<Smi> src);
448 inline void Move(Register dst, ExternalReference src);
449 inline void Move(Register dst, Register src);
450 inline void Move(Register dst, Tagged<TaggedIndex> i);
451 inline void Move(Register dst, int32_t i);
452 inline void Move(Register dst, uint32_t i);
453 inline void Move(Register dst, IndirectPointerTag i);
454 inline void Move(DoubleRegister dst, double n);
455 inline void Move(DoubleRegister dst, Float64 n);
456 inline void Move(Register dst, Handle<HeapObject> obj);
457
458 inline void MoveTagged(Register dst, Handle<HeapObject> obj);
459
460 inline void LoadMapForCompare(Register dst, Register obj);
461
462 inline void LoadByte(Register dst, MemOperand src);
463
464 inline void LoadInt32(Register dst, MemOperand src);
465 inline void StoreInt32(MemOperand dst, Register src);
466
467 inline void LoadFloat32(DoubleRegister dst, MemOperand src);
468 inline void StoreFloat32(MemOperand dst, DoubleRegister src);
469 inline void LoadFloat64(DoubleRegister dst, MemOperand src);
470 inline void StoreFloat64(MemOperand dst, DoubleRegister src);
471
472 inline void LoadUnalignedFloat64(DoubleRegister dst, Register base,
473 Register index);
474 inline void LoadUnalignedFloat64AndReverseByteOrder(DoubleRegister dst,
476 Register index);
477 inline void StoreUnalignedFloat64(Register base, Register index,
478 DoubleRegister src);
479 inline void ReverseByteOrderAndStoreUnalignedFloat64(Register base,
480 Register index,
481 DoubleRegister src);
482
483 inline void SignExtend32To64Bits(Register dst, Register src);
484 inline void NegateInt32(Register val);
485
486 inline void ToUint8Clamped(Register result, DoubleRegister value, Label* min,
487 Label* max, Label* done);
488
489 template <typename NodeT>
490 inline void DeoptIfBufferDetached(Register array, Register scratch,
491 NodeT* node);
492
493 inline Condition IsCallableAndNotUndetectable(Register map, Register scratch);
494 inline Condition IsNotCallableNorUndetactable(Register map, Register scratch);
495
496 inline void LoadInstanceType(Register instance_type, Register heap_object);
497 inline void JumpIfObjectType(Register heap_object, InstanceType type,
498 Label* target,
499 Label::Distance distance = Label::kFar);
500 inline void JumpIfNotObjectType(Register heap_object, InstanceType type,
501 Label* target,
502 Label::Distance distance = Label::kFar);
503 inline void AssertObjectType(Register heap_object, InstanceType type,
504 AbortReason reason);
505 inline void BranchOnObjectType(Register heap_object, InstanceType type,
506 Label* if_true, Label::Distance true_distance,
507 bool fallthrough_when_true, Label* if_false,
508 Label::Distance false_distance,
509 bool fallthrough_when_false);
510
511 inline void JumpIfObjectTypeInRange(Register heap_object,
512 InstanceType lower_limit,
513 InstanceType higher_limit, Label* target,
514 Label::Distance distance = Label::kFar);
515 inline void JumpIfObjectTypeNotInRange(
516 Register heap_object, InstanceType lower_limit, InstanceType higher_limit,
517 Label* target, Label::Distance distance = Label::kFar);
518 inline void AssertObjectTypeInRange(Register heap_object,
519 InstanceType lower_limit,
520 InstanceType higher_limit,
521 AbortReason reason);
522 inline void BranchOnObjectTypeInRange(
523 Register heap_object, InstanceType lower_limit, InstanceType higher_limit,
524 Label* if_true, Label::Distance true_distance, bool fallthrough_when_true,
525 Label* if_false, Label::Distance false_distance,
526 bool fallthrough_when_false);
527
528#if V8_STATIC_ROOTS_BOOL
529 inline void JumpIfObjectInRange(Register heap_object, Tagged_t lower_limit,
530 Tagged_t higher_limit, Label* target,
531 Label::Distance distance = Label::kFar);
532 inline void JumpIfObjectNotInRange(Register heap_object, Tagged_t lower_limit,
533 Tagged_t higher_limit, Label* target,
534 Label::Distance distance = Label::kFar);
535 inline void AssertObjectInRange(Register heap_object, Tagged_t lower_limit,
536 Tagged_t higher_limit, AbortReason reason);
537#endif
538
539 inline void JumpIfJSAnyIsNotPrimitive(Register heap_object, Label* target,
540 Label::Distance distance = Label::kFar);
541
542 inline void JumpIfStringMap(Register map, Label* target,
543 Label::Distance distance = Label::kFar,
544 bool jump_if_true = true);
545 inline void JumpIfString(Register heap_object, Label* target,
546 Label::Distance distance = Label::kFar);
547 inline void JumpIfNotString(Register heap_object, Label* target,
548 Label::Distance distance = Label::kFar);
549 inline void CheckJSAnyIsStringAndBranch(Register heap_object, Label* if_true,
550 Label::Distance true_distance,
551 bool fallthrough_when_true,
552 Label* if_false,
553 Label::Distance false_distance,
554 bool fallthrough_when_false);
555
556 inline void CompareMapWithRoot(Register object, RootIndex index,
557 Register scratch);
558
559 inline void CompareInstanceTypeAndJumpIf(Register map, InstanceType type,
560 Condition cond, Label* target,
561 Label::Distance distance);
562
563 inline void CompareInstanceType(Register map, InstanceType instance_type);
564 inline void CompareInstanceTypeRange(Register map, InstanceType lower_limit,
565 InstanceType higher_limit);
566 inline Condition CompareInstanceTypeRange(Register map,
567 Register instance_type_out,
568 InstanceType lower_limit,
569 InstanceType higher_limit);
570
571 template <typename NodeT>
572 inline void CompareInstanceTypeRangeAndEagerDeoptIf(
573 Register map, Register instance_type_out, InstanceType lower_limit,
574 InstanceType higher_limit, Condition cond, DeoptimizeReason reason,
575 NodeT* node);
576
577 template <typename NodeT>
578 void CompareRootAndEmitEagerDeoptIf(Register reg, RootIndex index,
579 Condition cond, DeoptimizeReason reason,
580 NodeT* node);
581 template <typename NodeT>
582 void CompareMapWithRootAndEmitEagerDeoptIf(Register reg, RootIndex index,
583 Register scratch, Condition cond,
584 DeoptimizeReason reason,
585 NodeT* node);
586 template <typename NodeT>
587 void CompareTaggedRootAndEmitEagerDeoptIf(Register reg, RootIndex index,
588 Condition cond,
589 DeoptimizeReason reason,
590 NodeT* node);
591 template <typename NodeT>
592 void CompareUInt32AndEmitEagerDeoptIf(Register reg, int imm, Condition cond,
593 DeoptimizeReason reason, NodeT* node);
594 inline void CompareTaggedAndJumpIf(Register reg, Tagged<Smi> smi,
595 Condition cond, Label* target,
596 Label::Distance distance = Label::kFar);
597 inline void CompareTaggedAndJumpIf(Register reg, Handle<HeapObject> obj,
598 Condition cond, Label* target,
599 Label::Distance distance = Label::kFar);
600 inline void CompareTaggedAndJumpIf(Register src1, Register src2,
601 Condition cond, Label* target,
602 Label::Distance distance = Label::kFar);
603
604 inline void CompareFloat64AndJumpIf(DoubleRegister src1, DoubleRegister src2,
605 Condition cond, Label* target,
606 Label* nan_failed,
607 Label::Distance distance = Label::kFar);
608 inline void CompareFloat64AndBranch(DoubleRegister src1, DoubleRegister src2,
609 Condition cond, BasicBlock* if_true,
610 BasicBlock* if_false,
611 BasicBlock* next_block,
612 BasicBlock* nan_failed);
613 inline void PrepareCallCFunction(int num_reg_arguments,
614 int num_double_registers = 0);
615
616 inline void CallSelf();
617 inline void CallBuiltin(Builtin builtin);
618 template <Builtin kBuiltin, typename... Args>
619 inline void CallBuiltin(Args&&... args);
620 inline void CallRuntime(Runtime::FunctionId fid);
621 inline void CallRuntime(Runtime::FunctionId fid, int num_args);
622
623 inline void Jump(Label* target, Label::Distance distance = Label::kFar);
624 inline void JumpToDeopt(Label* target);
625 inline void JumpIf(Condition cond, Label* target,
626 Label::Distance distance = Label::kFar);
627
628 inline void JumpIfRoot(Register with, RootIndex index, Label* if_equal,
629 Label::Distance distance = Label::kFar);
630 inline void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal,
631 Label::Distance distance = Label::kFar);
632 inline void JumpIfSmi(Register src, Label* on_smi,
633 Label::Distance near_jump = Label::kFar);
634 inline void JumpIfNotSmi(Register src, Label* on_not_smi,
635 Label::Distance near_jump = Label::kFar);
636 inline void JumpIfByte(Condition cc, Register value, int32_t byte,
637 Label* target, Label::Distance distance = Label::kFar);
638
639 inline void JumpIfHoleNan(DoubleRegister value, Register scratch,
640 Label* target,
641 Label::Distance distance = Label::kFar);
642 inline void JumpIfNotHoleNan(DoubleRegister value, Register scratch,
643 Label* target,
644 Label::Distance distance = Label::kFar);
645 inline void JumpIfNan(DoubleRegister value, Label* target,
646 Label::Distance distance = Label::kFar);
647 inline void JumpIfNotNan(DoubleRegister value, Label* target,
648 Label::Distance distance = Label::kFar);
649 inline void JumpIfNotHoleNan(MemOperand operand, Label* target,
650 Label::Distance distance = Label::kFar);
651
652 inline void CompareInt32AndJumpIf(Register r1, Register r2, Condition cond,
653 Label* target,
654 Label::Distance distance = Label::kFar);
655 inline void CompareIntPtrAndJumpIf(Register r1, Register r2, Condition cond,
656 Label* target,
657 Label::Distance distance = Label::kFar);
658 inline void CompareIntPtrAndJumpIf(Register r1, int32_t value, Condition cond,
659 Label* target,
660 Label::Distance distance = Label::kFar);
661 inline void CompareInt32AndJumpIf(Register r1, int32_t value, Condition cond,
662 Label* target,
663 Label::Distance distance = Label::kFar);
664 inline void CompareInt32AndBranch(Register r1, int32_t value, Condition cond,
665 BasicBlock* if_true, BasicBlock* if_false,
666 BasicBlock* next_block);
667 inline void CompareInt32AndBranch(Register r1, Register r2, Condition cond,
668 BasicBlock* if_true, BasicBlock* if_false,
669 BasicBlock* next_block);
670 inline void CompareInt32AndBranch(Register r1, int32_t value, Condition cond,
671 Label* if_true,
672 Label::Distance true_distance,
673 bool fallthrough_when_true, Label* if_false,
674 Label::Distance false_distance,
675 bool fallthrough_when_false);
676 inline void CompareInt32AndBranch(Register r1, Register r2, Condition cond,
677 Label* if_true,
678 Label::Distance true_distance,
679 bool fallthrough_when_true, Label* if_false,
680 Label::Distance false_distance,
681 bool fallthrough_when_false);
682 inline void CompareIntPtrAndBranch(Register r1, int32_t value, Condition cond,
683 BasicBlock* if_true, BasicBlock* if_false,
684 BasicBlock* next_block);
685 inline void CompareIntPtrAndBranch(Register r1, int32_t value, Condition cond,
686 Label* if_true,
687 Label::Distance true_distance,
688 bool fallthrough_when_true,
689 Label* if_false,
690 Label::Distance false_distance,
691 bool fallthrough_when_false);
692 inline void CompareInt32AndAssert(Register r1, Register r2, Condition cond,
693 AbortReason reason);
694 inline void CompareInt32AndAssert(Register r1, int32_t value, Condition cond,
695 AbortReason reason);
696 inline void CompareSmiAndJumpIf(Register r1, Tagged<Smi> value,
697 Condition cond, Label* target,
698 Label::Distance distance = Label::kFar);
699 inline void CompareSmiAndAssert(Register r1, Tagged<Smi> value,
700 Condition cond, AbortReason reason);
701 inline void CompareByteAndJumpIf(MemOperand left, int8_t right,
702 Condition cond, Register scratch,
703 Label* target,
704 Label::Distance distance = Label::kFar);
705
706 inline void CompareDoubleAndJumpIfZeroOrNaN(
707 DoubleRegister reg, Label* target,
708 Label::Distance distance = Label::kFar);
709 inline void CompareDoubleAndJumpIfZeroOrNaN(
710 MemOperand operand, Label* target,
711 Label::Distance distance = Label::kFar);
712
713 inline void TestInt32AndJumpIfAnySet(Register r1, int32_t mask, Label* target,
714 Label::Distance distance = Label::kFar);
715 inline void TestInt32AndJumpIfAnySet(MemOperand operand, int32_t mask,
716 Label* target,
717 Label::Distance distance = Label::kFar);
718 inline void TestUint8AndJumpIfAnySet(MemOperand operand, uint8_t mask,
719 Label* target,
720 Label::Distance distance = Label::kFar);
721
722 inline void TestInt32AndJumpIfAllClear(
723 Register r1, int32_t mask, Label* target,
724 Label::Distance distance = Label::kFar);
725 inline void TestInt32AndJumpIfAllClear(
726 MemOperand operand, int32_t mask, Label* target,
727 Label::Distance distance = Label::kFar);
728 inline void TestUint8AndJumpIfAllClear(
729 MemOperand operand, uint8_t mask, Label* target,
730 Label::Distance distance = Label::kFar);
731
732 inline void Int32ToDouble(DoubleRegister result, Register src);
733 inline void Uint32ToDouble(DoubleRegister result, Register src);
734 inline void SmiToDouble(DoubleRegister result, Register smi);
735 inline void IntPtrToDouble(DoubleRegister result, Register src);
736
737 inline void StringLength(Register result, Register string);
738 inline void LoadThinStringValue(Register result, Register string);
739
740 // The registers WriteBarrierDescriptor::ObjectRegister and
741 // WriteBarrierDescriptor::SlotAddressRegister can be clobbered.
743 Register array, Register index, Register value,
744 RegisterSnapshot register_snapshot);
746 Register index,
747 Register value);
748
749 // TODO(victorgomes): Import baseline Pop(T...) methods.
750 inline void Pop(Register dst);
751 using MacroAssembler::Pop;
752
753 template <typename... T>
754 inline void Push(T... vals);
755 template <typename... T>
756 inline void PushReverse(T... vals);
757
758 void OSRPrologue(Graph* graph);
759 void Prologue(Graph* graph);
760
761 inline void FinishCode();
762
763 inline void AssertStackSizeCorrect();
764 inline Condition FunctionEntryStackCheck(int stack_check_offset);
765
766 inline void SetMapAsRoot(Register object, RootIndex map);
767
768 inline void LoadHeapNumberValue(DoubleRegister result, Register heap_number);
769 inline void StoreHeapNumberValue(DoubleRegister value, Register heap_number);
770
771 inline void LoadHeapInt32Value(Register result, Register heap_number);
772 inline void StoreHeapInt32Value(Register value, Register heap_number);
773
774 inline void LoadHeapNumberOrOddballValue(DoubleRegister result,
775 Register object);
776
777 void LoadDataField(const PolymorphicAccessInfo& access_info, Register result,
778 Register object, Register scratch);
779
780 void MaybeEmitDeoptBuiltinsCall(size_t eager_deopt_count,
781 Label* eager_deopt_entry,
782 size_t lazy_deopt_count,
783 Label* lazy_deopt_entry);
784
785 void GenerateCheckConstTrackingLetCellFooter(Register context, Register data,
786 int index, Label* done);
787
788 void TryMigrateInstance(Register object, RegisterSnapshot& register_snapshot,
789 Label* fail);
790
791 void TryMigrateInstanceAndMarkMapAsMigrationTarget(
792 Register object, RegisterSnapshot& register_snapshot);
793
795 return code_gen_state()->broker()->target_native_context();
796 }
797
798 MaglevCodeGenState* code_gen_state() const { return code_gen_state_; }
800 return code_gen_state()->safepoint_table_builder();
801 }
803 return code_gen_state()->compilation_info();
804 }
805
807 return scratch_register_scope_;
808 }
809
810#ifdef DEBUG
811 bool allow_allocate() const { return allow_allocate_; }
812 void set_allow_allocate(bool value) { allow_allocate_ = value; }
813
814 bool allow_call() const { return allow_call_; }
815 void set_allow_call(bool value) { allow_call_ = value; }
816
817 bool allow_deferred_call() const { return allow_deferred_call_; }
818 void set_allow_deferred_call(bool value) { allow_deferred_call_ = value; }
819#endif // DEBUG
820
821 private:
822 template <typename Derived>
823 class TemporaryRegisterScopeBase;
824
825 inline constexpr int GetFramePointerOffsetForStackSlot(int index) {
826 return StandardFrameConstants::kExpressionsOffset -
827 index * kSystemPointerSize;
828 }
829
830 inline void SmiTagInt32AndSetFlags(Register dst, Register src);
831
833 TemporaryRegisterScope* scratch_register_scope_ = nullptr;
834#ifdef DEBUG
835 bool allow_allocate_ = false;
836 bool allow_call_ = false;
837 bool allow_deferred_call_ = false;
838#endif // DEBUG
839};
840
841// Shared logic for per-architecture TemporaryRegisterScope.
842template <typename Derived>
844 public:
849
865 const SavedData& saved_data)
866 : masm_(masm),
868 available_(saved_data.available_),
870 masm_->scratch_register_scope_ = static_cast<Derived*>(this);
871 }
874 // TODO(leszeks): Clear used registers.
875 }
876
878 available_ = {};
880 static_cast<Derived*>(this)->ResetToDefaultImpl();
881 }
882
885 return available_.PopFirst();
886 }
887 void Include(const RegList list) {
889 available_ = available_ | list;
890 }
891
900
902 void SetAvailable(RegList list) { available_ = list; }
903
906
907 protected:
911
913 Derived* prev_scope_;
916};
917
919 public:
925
930
932 // TODO(leszeks): Avoid emitting safepoints when there are no registers to
933 // save.
934 auto safepoint = masm->safepoint_table_builder()->DefineSafepoint(masm);
935 int pushed_reg_index = 0;
938 safepoint.DefineTaggedRegister(pushed_reg_index);
939 }
940 pushed_reg_index++;
941 }
942#ifdef V8_TARGET_ARCH_ARM64
943 pushed_reg_index = RoundUp<2>(pushed_reg_index);
944#endif
945 int num_double_slots = snapshot_.live_double_registers.Count() *
947#ifdef V8_TARGET_ARCH_ARM64
948 num_double_slots = RoundUp<2>(num_double_slots);
949#endif
950 safepoint.SetNumExtraSpillSlots(pushed_reg_index + num_double_slots);
951 }
952
953 inline void DefineSafepointWithLazyDeopt(LazyDeoptInfo* lazy_deopt_info);
954
955 private:
958};
959
961 : ZoneLabelRef(masm->compilation_info()->zone()) {}
962
963// ---
964// Deopt
965// ---
966
968 for (auto deopt : code_gen_state_->eager_deopts()) {
969 if (deopt->deopt_entry_label() == label) {
970 return true;
971 }
972 }
973 return false;
974}
975
976template <typename NodeT>
978 DeoptimizeReason reason) {
979 static_assert(NodeT::kProperties.can_eager_deopt());
980 EagerDeoptInfo* deopt_info = node->eager_deopt_info();
981 if (deopt_info->reason() != DeoptimizeReason::kUnknown) {
982 DCHECK_EQ(deopt_info->reason(), reason);
983 }
984 if (deopt_info->deopt_entry_label()->is_unused()) {
985 code_gen_state()->PushEagerDeopt(deopt_info);
986 deopt_info->set_reason(reason);
987 }
988 return node->eager_deopt_info()->deopt_entry_label();
989}
990
991template <typename NodeT>
993 DeoptimizeReason reason) {
994 RecordComment("-- jump to eager deopt");
995 JumpToDeopt(GetDeoptLabel(node, reason));
996}
997
998template <typename NodeT>
1000 DeoptimizeReason reason,
1001 NodeT* node) {
1002 RecordComment("-- Jump to eager deopt");
1003 JumpIf(cond, GetDeoptLabel(node, reason));
1004}
1005
1006template <typename NodeT>
1008 DeoptimizeReason reason) {
1009 RecordComment("-- Jump to eager deopt");
1010 JumpIfSmi(object, GetDeoptLabel(node, reason));
1011}
1012
1013template <typename NodeT>
1015 DeoptimizeReason reason) {
1016 RecordComment("-- Jump to eager deopt");
1017 JumpIfNotSmi(object, GetDeoptLabel(node, reason));
1018}
1019
1020
1021// Helpers for pushing arguments.
1022template <typename T>
1024 public:
1025 // Although we pretend to be a random access iterator, only methods that are
1026 // required for Push() are implemented right now.
1027 typedef std::random_access_iterator_tag iterator_category;
1028 typedef T value_type;
1029 typedef int difference_type;
1030 typedef T* pointer;
1031 typedef T reference;
1032 RepeatIterator(T val, int count) : val_(val), count_(count) {}
1033 reference operator*() const { return val_; }
1034 pointer operator->() { return &val_; }
1036 ++count_;
1037 return *this;
1038 }
1040 --count_;
1041 return *this;
1042 }
1044 count_ += diff;
1045 return *this;
1046 }
1047 bool operator!=(const RepeatIterator<T>& that) const {
1048 return count_ != that.count_;
1049 }
1050 bool operator==(const RepeatIterator<T>& that) const {
1051 return count_ == that.count_;
1052 }
1054 return count_ - it.count_;
1055 }
1056
1057 private:
1060};
1061
1062template <typename T>
1063auto RepeatValue(T val, int count) {
1065 RepeatIterator<T>(val, count));
1066}
1067
1068namespace detail {
1069
1070template <class T>
1071struct is_iterator_range : std::false_type {};
1072template <typename T>
1073struct is_iterator_range<base::iterator_range<T>> : std::true_type {};
1074
1075} // namespace detail
1076
1077// General helpers.
1078
1080 switch (cond) {
1081#define CASE(Name) \
1082 case AssertCondition::k##Name: \
1083 return k##Name;
1085#undef CASE
1086 }
1087}
1088
1089constexpr Condition ConditionFor(Operation operation) {
1090 switch (operation) {
1091 case Operation::kEqual:
1092 case Operation::kStrictEqual:
1093 return kEqual;
1094 case Operation::kLessThan:
1095 return kLessThan;
1096 case Operation::kLessThanOrEqual:
1097 return kLessThanEqual;
1098 case Operation::kGreaterThan:
1099 return kGreaterThan;
1100 case Operation::kGreaterThanOrEqual:
1101 return kGreaterThanEqual;
1102 default:
1103 UNREACHABLE();
1104 }
1105}
1106
1108 switch (operation) {
1109 case Operation::kEqual:
1110 case Operation::kStrictEqual:
1111 return kEqual;
1112 case Operation::kLessThan:
1113 return kUnsignedLessThan;
1114 case Operation::kLessThanOrEqual:
1116 case Operation::kGreaterThan:
1117 return kUnsignedGreaterThan;
1118 case Operation::kGreaterThanOrEqual:
1120 default:
1121 UNREACHABLE();
1122 }
1123}
1124
1125} // namespace maglev
1126} // namespace internal
1127} // namespace v8
1128
1129#endif // V8_MAGLEV_MAGLEV_ASSEMBLER_H_
#define Assert(condition)
interpreter::OperandScale scale
Definition builtins.cc:44
V8_INLINE void RecordComment(const char *comment, const SourceLocation &loc=SourceLocation::Current())
Definition assembler.h:417
V8_INLINE bool is_unused() const
Definition label.h:65
void PushAll(RegList registers)
void PopAll(RegList registers)
Safepoint DefineSafepoint(Assembler *assembler)
constexpr bool is_empty() const
constexpr bool has(RegisterT reg) const
constexpr unsigned Count() const
constexpr RegisterT PopFirst()
static constexpr Register no_reg()
MachineRepresentation representation() const
DeoptimizeReason reason() const
Definition maglev-ir.h:1675
void set_reason(DeoptimizeReason reason)
Definition maglev-ir.h:1676
TemporaryRegisterScopeBase(MaglevAssembler *masm, const SavedData &saved_data)
compiler::NativeContextRef native_context() const
Label * GetDeoptLabel(NodeT *node, DeoptimizeReason reason)
void JumpIfSmi(Register src, Label *on_smi, Label::Distance near_jump=Label::kFar)
MaglevAssembler(Isolate *isolate, Zone *zone, MaglevCodeGenState *code_gen_state)
TemporaryRegisterScope * scratch_register_scope_
TemporaryRegisterScope * scratch_register_scope() const
MaglevSafepointTableBuilder * safepoint_table_builder() const
void CompareInstanceTypeRange(Register map, InstanceType lower_limit, InstanceType higher_limit)
void JumpIfNotSmi(Register src, Label *on_not_smi, Label::Distance near_jump=Label::kFar)
void EmitEagerDeoptIfSmi(NodeT *node, Register object, DeoptimizeReason reason)
static constexpr DoubleRegList GetAllocatableDoubleRegisters()
MaglevCompilationInfo * compilation_info() const
int GetFramePointerOffsetForStackSlot(const compiler::AllocatedOperand &operand)
MaglevCodeGenState * code_gen_state() const
void JumpIf(Condition cond, Label *target, Label::Distance distance=Label::kFar)
void LoadBitField(Register result, MemOperand operand)
void EmitEagerDeoptIf(Condition cond, DeoptimizeReason reason, NodeT *node)
void EmitEagerDeoptIfNotSmi(NodeT *node, Register object, DeoptimizeReason reason)
constexpr int GetFramePointerOffsetForStackSlot(int index)
static constexpr RegList GetAllocatableRegisters()
void CheckAndEmitDeferredIndirectPointerWriteBarrier(Register object, int offset, Register value, RegisterSnapshot register_snapshot, IndirectPointerTag tag)
void EmitEagerDeopt(NodeT *node, DeoptimizeReason reason)
MaglevCodeGenState *const code_gen_state_
std::conditional_t< store_mode==kField, int, Register > OffsetTypeFor
const std::vector< EagerDeoptInfo * > & eager_deopts() const
static int TemporaryCount(size_t map_count)
MapCompare(MaglevAssembler *masm, Register object, size_t map_count)
void Generate(Handle< Map > map, Condition cond, Label *if_true, Label::Distance distance=Label::kFar)
std::random_access_iterator_tag iterator_category
difference_type operator-(const RepeatIterator< T > &it) const
RepeatIterator & operator+=(difference_type diff)
bool operator!=(const RepeatIterator< T > &that) const
bool operator==(const RepeatIterator< T > &that) const
SaveRegisterStateForCall(MaglevAssembler *masm, RegisterSnapshot snapshot)
void DefineSafepointWithLazyDeopt(LazyDeoptInfo *lazy_deopt_info)
static ZoneLabelRef UnsafeFromLabelPointer(Label *label)
#define NOOP_UNLESS_DEBUG_CODE
Definition assembler.h:628
bool is_empty
Definition sweeper.cc:229
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
Label label
int32_t offset
ZoneVector< RpoNumber > & result
LiftoffRegister reg
FunctionLiteral * literal
Definition liveedit.cc:294
uint32_t const mask
ComparisonMode
#define ASSERT_CONDITION(V)
Definition maglev-ir.h:6413
auto make_iterator_range(ForwardIterator begin, ForwardIterator end)
Definition iterator.h:65
Builtin AllocateBuiltin(AllocationType alloc_type)
constexpr Condition ConditionFor(Operation operation)
ExternalReference SpaceAllocationTopAddress(Isolate *isolate, AllocationType alloc_type)
Condition ToCondition(AssertCondition cond)
auto RepeatValue(T val, int count)
constexpr Condition UnsignedConditionFor(Operation operation)
ExternalReference SpaceAllocationLimitAddress(Isolate *isolate, AllocationType alloc_type)
Address Tagged_t
Definition globals.h:547
static constexpr RegList kAllocatableGeneralRegisters
Definition reglist.h:36
constexpr int kSystemPointerSize
Definition globals.h:410
constexpr Register kMaglevExtraScratchRegister
constexpr Register kMaglevFlagsRegister
constexpr int kDoubleSize
Definition globals.h:407
static constexpr DoubleRegList kAllocatableDoubleRegisters
Definition reglist.h:43
#define CHECK(condition)
Definition logging.h:124
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
constexpr T RoundUp(T x, intptr_t m)
Definition macros.h:387
#define V8_EXPORT_PRIVATE
Definition macros.h:460
std::unique_ptr< ValueMirror > value