v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
maglev-assembler.cc
Go to the documentation of this file.
1// Copyright 2022 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
12
13namespace v8 {
14namespace internal {
15namespace maglev {
16
17#define __ masm->
18
21 DoubleRegister value) {
22 // In the case we need to call the runtime, we should spill the value
23 // register. Even if it is not live in the next node, otherwise the
24 // allocation call might trash it.
25 register_snapshot.live_double_registers.set(value);
26 Allocate(register_snapshot, result, sizeof(HeapNumber));
27 SetMapAsRoot(result, RootIndex::kHeapNumberMap);
29}
30
32 Register result, int length) {
33 int size = SeqTwoByteString::SizeFor(length);
34 Allocate(register_snapshot, result, size);
36 SetMapAsRoot(result, RootIndex::kSeqTwoByteStringMap);
37 StoreInt32Field(result, offsetof(Name, raw_hash_field_),
39 StoreInt32Field(result, offsetof(String, length_), length);
40}
41
43 Register scratch) {
44 if (input.operand().IsConstant()) {
45 input.node()->LoadToRegister(this, scratch);
46 return scratch;
47 }
48 const compiler::AllocatedOperand& operand =
49 compiler::AllocatedOperand::cast(input.operand());
50 if (operand.IsRegister()) {
51 return ToRegister(input);
52 } else {
53 DCHECK(operand.IsStackSlot());
54 Move(scratch, ToMemOperand(input));
55 return scratch;
56 }
57}
58
60 int char_code) {
61 DCHECK_GE(char_code, 0);
63 Register table = result;
64 LoadRoot(table, RootIndex::kSingleCharacterStringTable);
67}
68
70 Register result, Register object,
71 Register scratch) {
72 Register load_source = object;
73 // Resolve property holder.
74 if (access_info.holder().has_value()) {
75 load_source = scratch;
76 Move(load_source, access_info.holder().value().object());
77 }
78 FieldIndex field_index = access_info.field_index();
79 if (!field_index.is_inobject()) {
80 Register load_source_object = load_source;
81 if (load_source == object) {
82 load_source = scratch;
83 }
84 // The field is in the property array, first load it from there.
85 AssertNotSmi(load_source_object);
86 LoadTaggedField(load_source, load_source_object,
87 JSReceiver::kPropertiesOrHashOffset);
88 }
89 AssertNotSmi(load_source);
90 LoadTaggedField(result, load_source, field_index.offset());
91}
92
94 CheckType check_type, Label* target,
95 Label::Distance distance) {
96 if (check_type == CheckType::kCheckHeapObject) {
97 JumpIfSmi(object, target, distance);
98 } else if (v8_flags.debug_code) {
99 AssertNotSmi(object);
100 }
101 // For heap objects, check the map's undetectable bit.
102 LoadMap(scratch, object);
103 TestUint8AndJumpIfAllClear(FieldMemOperand(scratch, Map::kBitFieldOffset),
104 Map::Bits1::IsUndetectableBit::kMask, target,
105 distance);
106}
107
109 CheckType check_type, Label* target,
110 Label::Distance distance) {
111 Label detectable;
112 if (check_type == CheckType::kCheckHeapObject) {
113 JumpIfSmi(object, &detectable, Label::kNear);
114 } else if (v8_flags.debug_code) {
115 AssertNotSmi(object);
116 }
117 // For heap objects, check the map's undetectable bit.
118 LoadMap(scratch, object);
119 TestUint8AndJumpIfAnySet(FieldMemOperand(scratch, Map::kBitFieldOffset),
120 Map::Bits1::IsUndetectableBit::kMask, target,
121 distance);
122 bind(&detectable);
123}
124
126 CheckType check_type, Label* target,
127 Label::Distance distance) {
128 if (check_type == CheckType::kCheckHeapObject) {
129 JumpIfSmi(object, target, distance);
130 } else if (v8_flags.debug_code) {
131 AssertNotSmi(object);
132 }
133 LoadMap(scratch, object);
134 static_assert(Map::kBitFieldOffsetEnd + 1 - Map::kBitFieldOffset == 1);
135 TestUint8AndJumpIfAllClear(FieldMemOperand(scratch, Map::kBitFieldOffset),
136 Map::Bits1::IsCallableBit::kMask, target,
137 distance);
138}
139
141 RegisterSnapshot register_snapshot, Register elements, Register object,
142 Register scratch) {
143 ZoneLabelRef done(this);
144 CompareMapWithRoot(elements, RootIndex::kFixedArrayMap, scratch);
146 kNotEqual,
147 [](MaglevAssembler* masm, ZoneLabelRef done, Register object,
148 Register result_reg, RegisterSnapshot snapshot) {
149 {
150 snapshot.live_registers.clear(result_reg);
151 snapshot.live_tagged_registers.clear(result_reg);
152 SaveRegisterStateForCall save_register_state(masm, snapshot);
154 save_register_state.DefineSafepoint();
155 __ Move(result_reg, kReturnRegister0);
156 }
157 __ Jump(*done);
158 },
159 done, object, elements, register_snapshot);
160 bind(*done);
161}
162
164 ZoneLabelRef is_true, ZoneLabelRef is_false,
165 bool fallthrough_when_true) {
166 TemporaryRegisterScope temps(this);
167
168 if (check_type == CheckType::kCheckHeapObject) {
169 // Check if {{value}} is Smi.
170 Condition is_smi = CheckSmi(value);
172 is_smi,
173 [](MaglevAssembler* masm, Register value, ZoneLabelRef is_true,
174 ZoneLabelRef is_false) {
175 // Check if {value} is not zero.
176 __ CompareSmiAndJumpIf(value, Smi::FromInt(0), kEqual, *is_false);
177 __ Jump(*is_true);
178 },
179 value, is_true, is_false);
180 } else if (v8_flags.debug_code) {
181 AssertNotSmi(value);
182 }
183
184#if V8_STATIC_ROOTS_BOOL
185 // Check if {{value}} is a falsey root or the true value.
186 // Undefined is the first root, so it's the smallest possible pointer
187 // value, which means we don't have to subtract it for the range check.
188 ReadOnlyRoots roots(isolate_);
189 static_assert(StaticReadOnlyRoot::kFirstAllocatedRoot ==
190 StaticReadOnlyRoot::kUndefinedValue);
191 static_assert(StaticReadOnlyRoot::kUndefinedValue + sizeof(Undefined) ==
192 StaticReadOnlyRoot::kNullValue);
193 static_assert(StaticReadOnlyRoot::kNullValue + sizeof(Null) ==
194 StaticReadOnlyRoot::kempty_string);
195 static_assert(StaticReadOnlyRoot::kempty_string +
197 StaticReadOnlyRoot::kFalseValue);
198 static_assert(StaticReadOnlyRoot::kFalseValue + sizeof(False) ==
199 StaticReadOnlyRoot::kTrueValue);
200 CompareInt32AndJumpIf(value, StaticReadOnlyRoot::kTrueValue,
201 kUnsignedLessThan, *is_false);
202 // Reuse the condition flags from the above int32 compare to also check for
203 // the true value itself.
204 JumpIf(kEqual, *is_true);
205#else
206 // Check if {{value}} is false.
207 JumpIfRoot(value, RootIndex::kFalseValue, *is_false);
208
209 // Check if {{value}} is true.
210 JumpIfRoot(value, RootIndex::kTrueValue, *is_true);
211
212 // Check if {{value}} is empty string.
213 JumpIfRoot(value, RootIndex::kempty_string, *is_false);
214
215 // Only check null and undefined if we're not going to check the
216 // undetectable bit.
217 if (compilation_info()
218 ->broker()
219 ->dependencies()
220 ->DependOnNoUndetectableObjectsProtector()) {
221 // Check if {{value}} is undefined.
222 JumpIfRoot(value, RootIndex::kUndefinedValue, *is_false);
223
224 // Check if {{value}} is null.
225 JumpIfRoot(value, RootIndex::kNullValue, *is_false);
226 }
227#endif
228 Register map = temps.AcquireScratch();
229 LoadMap(map, value);
230
231 if (!compilation_info()
232 ->broker()
233 ->dependencies()
234 ->DependOnNoUndetectableObjectsProtector()) {
235 // Check if {{value}} is undetectable.
236 TestUint8AndJumpIfAnySet(FieldMemOperand(map, Map::kBitFieldOffset),
237 Map::Bits1::IsUndetectableBit::kMask, *is_false);
238 }
239
240 // Check if {{value}} is a HeapNumber.
241 JumpIfRoot(map, RootIndex::kHeapNumberMap,
243 [](MaglevAssembler* masm, Register value, ZoneLabelRef is_true,
244 ZoneLabelRef is_false) {
246 FieldMemOperand(value, offsetof(HeapNumber, value_)),
247 *is_false);
248 __ Jump(*is_true);
249 },
250 value, is_true, is_false));
251
252 // Check if {{value}} is a BigInt.
253 // {{map}} is not needed after this check, we pass to the deferred code, so it
254 // can be added to the temporary registers.
255 JumpIfRoot(map, RootIndex::kBigIntMap,
257 [](MaglevAssembler* masm, Register value, Register map,
258 ZoneLabelRef is_true, ZoneLabelRef is_false) {
259 TemporaryRegisterScope temps(masm);
260 temps.IncludeScratch(map);
262 FieldMemOperand(value, offsetof(BigInt, bitfield_)),
263 BigInt::LengthBits::kMask, *is_false);
264 __ Jump(*is_true);
265 },
266 value, map, is_true, is_false));
267 // Otherwise true.
268 if (!fallthrough_when_true) {
269 Jump(*is_true);
270 }
271}
272
274 switch (value->opcode()) {
275 case Opcode::kInt32Constant: {
276 int32_t int_value = value->Cast<Int32Constant>()->value();
277 if (Smi::IsValid(int_value)) {
278 Move(dst, Smi::FromInt(int_value));
279 } else {
280 MoveHeapNumber(dst, int_value);
281 }
282 return;
283 }
284 case Opcode::kUint32Constant: {
285 uint32_t uint_value = value->Cast<Uint32Constant>()->value();
286 if (Smi::IsValid(uint_value)) {
287 Move(dst, Smi::FromInt(uint_value));
288 } else {
289 MoveHeapNumber(dst, uint_value);
290 }
291 return;
292 }
293 case Opcode::kFloat64Constant: {
294 double double_value =
295 value->Cast<Float64Constant>()->value().get_scalar();
296 int smi_value;
297 if (DoubleToSmiInteger(double_value, &smi_value)) {
298 Move(dst, Smi::FromInt(smi_value));
299 } else {
300 MoveHeapNumber(dst, double_value);
301 }
302 return;
303 }
304 default:
305 break;
306 }
307 DCHECK(!value->allocation().IsConstant());
308 DCHECK(value->allocation().IsAnyStackSlot());
310 DoubleRegister builtin_input_value = D::GetDoubleRegisterParameter(D::kValue);
311 MemOperand src = ToMemOperand(value->allocation());
312 switch (value->properties().value_representation()) {
314 Label done;
315 TemporaryRegisterScope temps(this);
316 Register scratch = temps.AcquireScratch();
317 Move(scratch, src);
318 SmiTagInt32AndJumpIfSuccess(dst, scratch, &done, Label::kNear);
319 // If smi tagging fails, instead of bailing out (deopting), we change
320 // representation to a HeapNumber.
321 Int32ToDouble(builtin_input_value, scratch);
322 CallBuiltin<Builtin::kNewHeapNumber>(builtin_input_value);
324 bind(&done);
325 break;
326 }
328 Label done;
329 TemporaryRegisterScope temps(this);
330 Register scratch = temps.AcquireScratch();
331 Move(scratch, src);
332 SmiTagUint32AndJumpIfSuccess(dst, scratch, &done, Label::kNear);
333 // If smi tagging fails, instead of bailing out (deopting), we change
334 // representation to a HeapNumber.
335 Uint32ToDouble(builtin_input_value, scratch);
336 CallBuiltin<Builtin::kNewHeapNumber>(builtin_input_value);
338 bind(&done);
339 break;
340 }
342 LoadFloat64(builtin_input_value, src);
343 CallBuiltin<Builtin::kNewHeapNumber>(builtin_input_value);
345 break;
347 Label done, box;
348 JumpIfNotHoleNan(src, &box, Label::kNear);
349 LoadRoot(dst, RootIndex::kUndefinedValue);
350 Jump(&done);
351 bind(&box);
352 LoadFloat64(builtin_input_value, src);
353 CallBuiltin<Builtin::kNewHeapNumber>(builtin_input_value);
355 bind(&done);
356 break;
357 }
359 Label done;
360 TemporaryRegisterScope temps(this);
361 Register scratch = temps.AcquireScratch();
362 Move(scratch, src);
363 SmiTagIntPtrAndJumpIfSuccess(dst, scratch, &done, Label::kNear);
364 // If smi tagging fails, instead of bailing out (deopting), we change
365 // representation to a HeapNumber.
366 IntPtrToDouble(builtin_input_value, scratch);
367 CallBuiltin<Builtin::kNewHeapNumber>(builtin_input_value);
369 bind(&done);
370 break;
371 }
373 UNREACHABLE();
374 }
375}
376
379 Label* is_true, Label::Distance true_distance, bool fallthrough_when_true,
380 Label* is_false, Label::Distance false_distance,
381 bool fallthrough_when_false) {
382 // If both true and false are fallthroughs, we don't have to do anything.
383 if (fallthrough_when_true && fallthrough_when_false) return;
384
385 // IMPORTANT: Note that `object` could be a register that aliases registers in
386 // the TemporaryRegisterScope. Make sure that all reads of `object` are before
387 // any writes to scratch registers
389 switch (literal) {
390 case LiteralFlag::kNumber: {
392 Register scratch = temps.AcquireScratch();
393 JumpIfSmi(object, is_true, true_distance);
394 CompareMapWithRoot(object, RootIndex::kHeapNumberMap, scratch);
395 Branch(kEqual, is_true, true_distance, fallthrough_when_true, is_false,
396 false_distance, fallthrough_when_false);
397 return;
398 }
399 case LiteralFlag::kString: {
400 JumpIfSmi(object, is_false, false_distance);
401 CheckJSAnyIsStringAndBranch(object, is_true, true_distance,
402 fallthrough_when_true, is_false,
403 false_distance, fallthrough_when_false);
404 return;
405 }
406 case LiteralFlag::kSymbol: {
407 JumpIfSmi(object, is_false, false_distance);
408 BranchOnObjectType(object, SYMBOL_TYPE, is_true, true_distance,
409 fallthrough_when_true, is_false, false_distance,
410 fallthrough_when_false);
411 return;
412 }
413 case LiteralFlag::kBoolean:
414 JumpIfRoot(object, RootIndex::kTrueValue, is_true, true_distance);
415 CompareRoot(object, RootIndex::kFalseValue);
416 Branch(kEqual, is_true, true_distance, fallthrough_when_true, is_false,
417 false_distance, fallthrough_when_false);
418 return;
419 case LiteralFlag::kBigInt: {
420 JumpIfSmi(object, is_false, false_distance);
421 BranchOnObjectType(object, BIGINT_TYPE, is_true, true_distance,
422 fallthrough_when_true, is_false, false_distance,
423 fallthrough_when_false);
424 return;
425 }
426 case LiteralFlag::kUndefined: {
428 Register map = temps.AcquireScratch();
429 // Make sure `object` isn't a valid temp here, since we reuse it.
430 DCHECK(!temps.Available().has(object));
431 JumpIfSmi(object, is_false, false_distance);
432 // Check it has the undetectable bit set and it is not null.
433 LoadMap(map, object);
434 TestUint8AndJumpIfAllClear(FieldMemOperand(map, Map::kBitFieldOffset),
435 Map::Bits1::IsUndetectableBit::kMask, is_false,
436 false_distance);
437 CompareRoot(object, RootIndex::kNullValue);
438 Branch(kNotEqual, is_true, true_distance, fallthrough_when_true, is_false,
439 false_distance, fallthrough_when_false);
440 return;
441 }
442 case LiteralFlag::kFunction: {
444 Register scratch = temps.AcquireScratch();
445 JumpIfSmi(object, is_false, false_distance);
446 // Check if callable bit is set and not undetectable.
447 LoadMap(scratch, object);
448 Branch(IsCallableAndNotUndetectable(scratch, scratch), is_true,
449 true_distance, fallthrough_when_true, is_false, false_distance,
450 fallthrough_when_false);
451 return;
452 }
453 case LiteralFlag::kObject: {
455 Register scratch = temps.AcquireScratch();
456 JumpIfSmi(object, is_false, false_distance);
457 // If the object is null then return true.
458 JumpIfRoot(object, RootIndex::kNullValue, is_true, true_distance);
459 // Check if the object is a receiver type,
460 LoadMap(scratch, object);
461 CompareInstanceTypeAndJumpIf(scratch, FIRST_JS_RECEIVER_TYPE, kLessThan,
462 is_false, false_distance);
463 // ... and is not undefined (undetectable) nor callable.
464 Branch(IsNotCallableNorUndetactable(scratch, scratch), is_true,
465 true_distance, fallthrough_when_true, is_false, false_distance,
466 fallthrough_when_false);
467 return;
468 }
469 case LiteralFlag::kOther:
470 if (!fallthrough_when_false) {
471 Jump(is_false, false_distance);
472 }
473 return;
474 }
475 UNREACHABLE();
476}
477
478template <MaglevAssembler::StoreMode store_mode>
481 RegisterSnapshot register_snapshot, ValueIsCompressed value_is_compressed,
482 ValueCanBeSmi value_can_be_smi) {
483 ZoneLabelRef done(this);
484 Label* deferred_write_barrier = MakeDeferredCode(
485 [](MaglevAssembler* masm, ZoneLabelRef done, Register object,
487 RegisterSnapshot register_snapshot, ValueIsCompressed value_type) {
488 ASM_CODE_COMMENT_STRING(masm, "Write barrier slow path");
489 if (PointerCompressionIsEnabled() && value_type == kValueIsCompressed) {
490 __ DecompressTagged(value, value);
491 }
492
493 {
494 // Use the value as the scratch register if possible, since
495 // CheckPageFlag emits slightly better code when value == scratch.
497 Register scratch = temp.AcquireScratch();
498 if (value != object && !register_snapshot.live_registers.has(value)) {
499 scratch = value;
500 }
501 __ CheckPageFlag(value, scratch,
503 kEqual, *done);
504 }
505
508
509 RegList saved;
510 // The RecordWrite stub promises to restore all allocatable registers,
511 // but not necessarily non-allocatable registers like temporaries. Make
512 // sure we're not trying to keep any non-allocatable registers alive.
514 .is_empty());
515 if (object != stub_object_reg &&
516 register_snapshot.live_registers.has(stub_object_reg)) {
517 saved.set(stub_object_reg);
518 }
519 if (register_snapshot.live_registers.has(slot_reg)) {
520 saved.set(slot_reg);
521 }
522
523 __ PushAll(saved);
524
525 if (object != stub_object_reg) {
526 __ Move(stub_object_reg, object);
527 object = stub_object_reg;
528 }
529
530 if constexpr (store_mode == kElement) {
532 } else {
533 static_assert(store_mode == kField);
534 __ SetSlotAddressForTaggedField(slot_reg, object, offset);
535 }
536
537 SaveFPRegsMode const save_fp_mode =
538 !register_snapshot.live_double_registers.is_empty()
541
542 __ CallRecordWriteStub(object, slot_reg, save_fp_mode);
543
544 __ PopAll(saved);
545 __ Jump(*done);
546 },
547 done, object, offset, value, register_snapshot, value_is_compressed);
548
549 if (!value_can_be_smi) {
550 AssertNotSmi(value);
551 }
552
553#if V8_STATIC_ROOTS_BOOL
554 // Quick check for Read-only and small Smi values.
555 static_assert(StaticReadOnlyRoot::kLastAllocatedRoot < kRegularPageSize);
557#endif // V8_STATIC_ROOTS_BOOL
558
559 if (value_can_be_smi) {
560 JumpIfSmi(value, *done);
561 }
562
565 Register scratch = temp.AcquireScratch();
566 CheckPageFlag(object, scratch,
568 deferred_write_barrier);
569 bind(*done);
570}
571
572#ifdef V8_ENABLE_SANDBOX
573
575 Register object, int offset, Register value,
576 RegisterSnapshot register_snapshot, IndirectPointerTag tag) {
577 ZoneLabelRef done(this);
578 Label* deferred_write_barrier = MakeDeferredCode(
579 [](MaglevAssembler* masm, ZoneLabelRef done, Register object, int offset,
580 Register value, RegisterSnapshot register_snapshot,
581 IndirectPointerTag tag) {
582 ASM_CODE_COMMENT_STRING(masm, "Write barrier slow path");
583
584 Register stub_object_reg =
586 Register slot_reg =
588 Register tag_reg =
590
591 RegList saved;
592 if (object != stub_object_reg &&
593 register_snapshot.live_registers.has(stub_object_reg)) {
594 saved.set(stub_object_reg);
595 }
596 if (register_snapshot.live_registers.has(slot_reg)) {
597 saved.set(slot_reg);
598 }
599 if (register_snapshot.live_registers.has(tag_reg)) {
600 saved.set(tag_reg);
601 }
602
603 __ PushAll(saved);
604
605 if (object != stub_object_reg) {
606 __ Move(stub_object_reg, object);
607 object = stub_object_reg;
608 }
609 __ SetSlotAddressForTaggedField(slot_reg, object, offset);
610 __ Move(tag_reg, tag);
611
612 SaveFPRegsMode const save_fp_mode =
613 !register_snapshot.live_double_registers.is_empty()
616
618
619 __ PopAll(saved);
620 __ Jump(*done);
621 },
622 done, object, offset, value, register_snapshot, tag);
623
624 AssertNotSmi(value);
625
626 JumpIfMarking(deferred_write_barrier);
627 bind(*done);
628}
629
630#endif // V8_ENABLE_SANDBOX
631
633 Register object, int offset, Register value,
634 RegisterSnapshot register_snapshot, ValueIsCompressed value_is_compressed,
635 ValueCanBeSmi value_can_be_smi) {
636 AssertNotSmi(object);
639 object, offset, value, register_snapshot, value_is_compressed,
640 value_can_be_smi);
641}
642
643#ifdef V8_ENABLE_SANDBOX
644
645void MaglevAssembler::StoreTrustedPointerFieldWithWriteBarrier(
646 Register object, int offset, Register value,
647 RegisterSnapshot register_snapshot, IndirectPointerTag tag) {
648 AssertNotSmi(object);
649 StoreTrustedPointerFieldNoWriteBarrier(object, offset, value);
651 register_snapshot, tag);
652}
653
654#endif // V8_ENABLE_SANDBOX
655
657 Register array, Register index, Register value,
658 RegisterSnapshot register_snapshot) {
659 if (v8_flags.debug_code) {
660 AssertObjectType(array, FIXED_ARRAY_TYPE, AbortReason::kUnexpectedValue);
662 AbortReason::kUnexpectedNegativeValue);
663 }
664 StoreFixedArrayElementNoWriteBarrier(array, index, value);
666 array, index, value, register_snapshot, kValueIsDecompressed,
668}
669
671 Register data,
672 int index,
673 Label* done) {
674 Label smi_data, deopt;
675
676 // Load the const tracking let side data.
678 data, context,
680
681 LoadTaggedField(data, data,
684
685 // Load property.
686 JumpIfSmi(data, &smi_data, Label::kNear);
687 JumpIfRoot(data, RootIndex::kUndefinedValue, &deopt);
688 if (v8_flags.debug_code) {
689 AssertObjectType(data, CONTEXT_SIDE_PROPERTY_CELL_TYPE,
690 AbortReason::kUnexpectedValue);
691 }
692 LoadTaggedField(data, data,
693 ContextSidePropertyCell::kPropertyDetailsRawOffset);
694
695 // It must be different than kConst.
696 bind(&smi_data);
698 done, Label::kNear);
699 bind(&deopt);
700}
701
703 RegisterSnapshot& register_snapshot,
704 Label* fail) {
705 Register return_val = Register::no_reg();
706 {
707 SaveRegisterStateForCall save_register_state(this, register_snapshot);
708
709 Push(object);
711 CallRuntime(Runtime::kTryMigrateInstance);
712 save_register_state.DefineSafepoint();
713
714 // Make sure the return value is preserved across the live register
715 // restoring pop all.
716 return_val = kReturnRegister0;
718 Register scratch = temps.AcquireScratch();
719 if (register_snapshot.live_registers.has(return_val)) {
720 DCHECK(!register_snapshot.live_registers.has(scratch));
721 Move(scratch, return_val);
722 return_val = scratch;
723 }
724 }
725
726 // On failure, the returned value is Smi zero.
727 CompareTaggedAndJumpIf(return_val, Smi::zero(), kEqual, fail);
728}
729
731 Register object, RegisterSnapshot& register_snapshot) {
732 SaveRegisterStateForCall save_register_state(this, register_snapshot);
733 Push(object);
735 CallRuntime(Runtime::kTryMigrateInstanceAndMarkMapAsMigrationTarget);
736 save_register_state.DefineSafepoint();
737}
738
739} // namespace maglev
740} // namespace internal
741} // namespace v8
constexpr int kRegularPageSize
static constexpr U kMask
Definition bit-field.h:41
static constexpr Builtin IndirectPointerBarrier(SaveFPRegsMode fp_mode)
static V8_INLINE constexpr int OffsetOfElementAt(int index)
Definition contexts.h:512
void PushAll(RegList registers)
void AssertNotSmi(Register object, AbortReason reason=AbortReason::kOperandIsASmi) NOOP_UNLESS_DEBUG_CODE
void CompareRoot(Register obj, RootIndex index)
void JumpIfUnsignedLessThan(Register x, int32_t y, Label *dest)
void JumpIfMarking(Label *is_marking, Label::Distance condition_met_distance=Label::kFar)
void LoadRoot(Register destination, RootIndex index) final
void CheckPageFlag(Register object, int mask, Condition cc, Label *condition_met)
void CallRecordWriteStub(Register object, Register slot_address, SaveFPRegsMode fp_mode, StubCallMode mode=StubCallMode::kCallBuiltinPointer)
void PopAll(RegList registers)
void DecompressTagged(const Register &destination, const MemOperand &field_operand)
Condition CheckSmi(Register src)
void LoadMap(Register destination, Register object)
static constexpr MainThreadFlags kPointersToHereAreInterestingMask
static constexpr MainThreadFlags kPointersFromHereAreInterestingMask
static constexpr int kEmptyHashField
Definition name.h:133
constexpr void set(RegisterT reg)
constexpr bool is_empty() const
constexpr bool has(RegisterT reg) const
static constexpr Register no_reg()
static V8_INLINE constexpr int32_t SizeFor(int32_t length)
static V8_INLINE constexpr int32_t SizeFor(int32_t length)
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static bool constexpr IsValid(T value)
Definition smi.h:67
static constexpr Tagged< Smi > zero()
Definition smi.h:99
static const int32_t kMaxOneByteCharCode
Definition string.h:500
static constexpr bool kUninterestingPagesCanBeSkipped
static LocationOperand * cast(InstructionOperand *op)
void CompareMapWithRoot(Register object, RootIndex index, Register scratch)
Register FromAnyToRegister(const Input &input, Register scratch)
Condition IsNotCallableNorUndetactable(Register map, Register scratch)
void JumpIfRoot(Register with, RootIndex index, Label *if_equal, Label::Distance distance=Label::kFar)
Condition IsCallableAndNotUndetectable(Register map, Register scratch)
compiler::NativeContextRef native_context() const
void IntPtrToDouble(DoubleRegister result, Register src)
void Branch(Condition condition, BasicBlock *if_true, BasicBlock *if_false, BasicBlock *next_block)
void LoadSingleCharacterString(Register result, int char_code)
void JumpIfSmi(Register src, Label *on_smi, Label::Distance near_jump=Label::kFar)
void TestUint8AndJumpIfAllClear(MemOperand operand, uint8_t mask, Label *target, Label::Distance distance=Label::kFar)
void CompareInt32AndAssert(Register r1, Register r2, Condition cond, AbortReason reason)
void CompareDoubleAndJumpIfZeroOrNaN(DoubleRegister reg, Label *target, Label::Distance distance=Label::kFar)
void Jump(Label *target, Label::Distance distance=Label::kFar)
void StoreTaggedSignedField(Register object, int offset, Register value)
void CompareSmiAndJumpIf(Register r1, Tagged< Smi > value, Condition cond, Label *target, Label::Distance distance=Label::kFar)
void TestUint8AndJumpIfAnySet(MemOperand operand, uint8_t mask, Label *target, Label::Distance distance=Label::kFar)
void CallRuntime(Runtime::FunctionId fid)
void StoreFixedArrayElementNoWriteBarrier(Register array, Register index, Register value)
void Allocate(RegisterSnapshot register_snapshot, Register result, int size_in_bytes, AllocationType alloc_type=AllocationType::kYoung, AllocationAlignment alignment=kTaggedAligned)
void SetSlotAddressForTaggedField(Register slot_reg, Register object, int offset)
void CompareTaggedAndJumpIf(Register reg, Tagged< Smi > smi, Condition cond, Label *target, Label::Distance distance=Label::kFar)
void StoreInt32Field(Register object, int offset, int32_t value)
void ToBoolean(Register value, CheckType check_type, ZoneLabelRef is_true, ZoneLabelRef is_false, bool fallthrough_when_true)
void MaterialiseValueNode(Register dst, ValueNode *value)
void BranchOnObjectType(Register heap_object, InstanceType type, Label *if_true, Label::Distance true_distance, bool fallthrough_when_true, Label *if_false, Label::Distance false_distance, bool fallthrough_when_false)
void AllocateTwoByteString(RegisterSnapshot register_snapshot, Register result, int length)
void Move(StackSlot dst, Register src)
void SetMapAsRoot(Register object, RootIndex map)
Label * MakeDeferredCode(Function &&deferred_code_gen, Args &&... args)
void JumpToDeferredIf(Condition cond, Function &&deferred_code_gen, Args &&... args)
void GenerateCheckConstTrackingLetCellFooter(Register context, Register data, int index, Label *done)
MaglevCompilationInfo * compilation_info() const
void TestInt32AndJumpIfAllClear(Register r1, int32_t mask, Label *target, Label::Distance distance=Label::kFar)
void JumpIfNotCallable(Register object, Register scratch, CheckType check_type, Label *target, Label::Distance distance=Label::kFar)
void CompareInt32AndJumpIf(Register r1, Register r2, Condition cond, Label *target, Label::Distance distance=Label::kFar)
void StoreTaggedFieldWithWriteBarrier(Register object, int offset, Register value, RegisterSnapshot register_snapshot, ValueIsCompressed value_is_compressed, ValueCanBeSmi value_can_be_smi)
void SmiTagIntPtrAndJumpIfSuccess(Register dst, Register src, Label *success, Label::Distance distance=Label::kFar)
void SetSlotAddressForFixedArrayElement(Register slot_reg, Register object, Register index)
void LoadDataField(const PolymorphicAccessInfo &access_info, Register result, Register object, Register scratch)
void JumpIf(Condition cond, Label *target, Label::Distance distance=Label::kFar)
void CheckJSAnyIsStringAndBranch(Register heap_object, Label *if_true, Label::Distance true_distance, bool fallthrough_when_true, Label *if_false, Label::Distance false_distance, bool fallthrough_when_false)
void LoadFloat64(DoubleRegister dst, MemOperand src)
void TryMigrateInstanceAndMarkMapAsMigrationTarget(Register object, RegisterSnapshot &register_snapshot)
void StoreFixedArrayElementWithWriteBarrier(Register array, Register index, Register value, RegisterSnapshot register_snapshot)
void StoreFloat64(MemOperand dst, DoubleRegister src)
void CheckAndEmitDeferredWriteBarrier(Register object, OffsetTypeFor< store_mode > offset, Register value, RegisterSnapshot register_snapshot, ValueIsCompressed value_is_compressed, ValueCanBeSmi value_can_be_smi)
void StoreTaggedFieldNoWriteBarrier(Register object, int offset, Register value)
MemOperand ToMemOperand(const compiler::InstructionOperand &operand)
void MoveHeapNumber(Register dst, double value)
void LoadTaggedField(Register result, MemOperand operand)
void JumpIfUndetectable(Register object, Register scratch, CheckType check_type, Label *target, Label::Distance distance=Label::kFar)
void SmiTagInt32AndJumpIfSuccess(Register dst, Register src, Label *success, Label::Distance distance=Label::kFar)
void JumpIfNotHoleNan(DoubleRegister value, Register scratch, Label *target, Label::Distance distance=Label::kFar)
void TryMigrateInstance(Register object, RegisterSnapshot &register_snapshot, Label *fail)
void EnsureWritableFastElements(RegisterSnapshot register_snapshot, Register elements, Register object, Register scratch)
void Uint32ToDouble(DoubleRegister result, Register src)
void Int32ToDouble(DoubleRegister result, Register src)
void JumpIfNotUndetectable(Register object, Register scratch, CheckType, Label *target, Label::Distance distance=Label::kFar)
void AssertObjectType(Register heap_object, InstanceType type, AbortReason reason)
void CheckAndEmitDeferredIndirectPointerWriteBarrier(Register object, int offset, Register value, RegisterSnapshot register_snapshot, IndirectPointerTag tag)
void AllocateHeapNumber(RegisterSnapshot register_snapshot, Register result, DoubleRegister value)
void CompareInstanceTypeAndJumpIf(Register map, InstanceType type, Condition cond, Label *target, Label::Distance distance)
void SmiTagUint32AndJumpIfSuccess(Register dst, Register src, Label *success, Label::Distance distance=Label::kFar)
std::conditional_t< store_mode==kField, int, Register > OffsetTypeFor
void TestTypeOf(Register object, interpreter::TestTypeOfFlags::LiteralFlag literal, Label *if_true, Label::Distance true_distance, bool fallthrough_when_true, Label *if_false, Label::Distance false_distance, bool fallthrough_when_false)
compiler::OptionalJSObjectRef holder() const
Definition maglev-ir.h:7599
Register const value_
#define ASM_CODE_COMMENT_STRING(asm,...)
Definition assembler.h:618
JSHeapBroker * broker
int32_t offset
std::map< const std::string, const std::string > map
ZoneVector< RpoNumber > & result
FunctionLiteral * literal
Definition liveedit.cc:294
const int length_
Definition mul-fft.cc:473
Register ToRegister(const compiler::InstructionOperand &operand)
constexpr int kTaggedSize
Definition globals.h:542
constexpr intptr_t kObjectAlignment
Definition globals.h:930
bool DoubleToSmiInteger(double value, int *smi_int_value)
static constexpr RegList kAllocatableGeneralRegisters
Definition reglist.h:36
MemOperand FieldMemOperand(Register object, int offset)
constexpr Register kReturnRegister0
constexpr Register kContextRegister
V8_EXPORT_PRIVATE FlagValues v8_flags
return value
Definition map-inl.h:893
constexpr bool PointerCompressionIsEnabled()
V8_INLINE Local< Primitive > Null(Isolate *isolate)
V8_INLINE Local< Boolean > False(Isolate *isolate)
V8_INLINE Local< Primitive > Undefined(Isolate *isolate)
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define OFFSET_OF_DATA_START(Type)