35#ifdef V8_ENABLE_MAGLEV
53#define DEF_NAME(Name) #Name,
56 return names[
static_cast<int>(opcode)];
67[[maybe_unused]]
struct Do_not_use_kScratchRegister_in_arch_independent_code {
69[[maybe_unused]]
struct
70 Do_not_use_kScratchDoubleRegister_in_arch_independent_code {
71} kScratchDoubleRegister;
74 !std::is_same_v<
decltype(kScratchDoubleRegister),
DoubleRegister>);
81template <
size_t InputCount,
typename Base,
typename Derived>
82int StaticInputCount(FixedInputNodeTMixin<InputCount, Base, Derived>*) {
91 OpProperties new_properties) {
92 if (new_opcode == Opcode::kDead)
return;
95 properties().can_eager_deopt());
97 properties().can_lazy_deopt());
99 properties().needs_register_snapshot());
102 size_t old_sizeof = -1;
105 case Opcode::k##op: \
106 old_sizeof = sizeof(op); \
112 switch (new_opcode) {
114 case Opcode::k##op: { \
115 DCHECK_EQ(old_input_count, StaticInputCount(static_cast<op*>(this))); \
116 DCHECK_LE(sizeof(op), old_sizeof); \
142 int bound_inputs = input_count();
143 if (
merge_state()->is_unmerged_loop()) --bound_inputs;
145 for (
int i = 0;
i < bound_inputs;
i++) {
147 phi_input->RecordUseReprHint(repr_mask);
158 for (uint32_t
i = 0;
i < inputs; ++
i) {
162 phi->SetUseRequires31BitValue();
176bool IsStoreToNonEscapedObject(
const NodeBase* node) {
181 return alloc->HasBeenAnalysed() && alloc->HasBeenElided();
187void PrintInputs(std::ostream& os, MaglevGraphLabeller* graph_labeller,
188 const NodeBase* node) {
189 if (!node->has_inputs())
return;
192 for (
int i = 0;
i < node->input_count();
i++) {
193 if (
i != 0) os <<
", ";
194 graph_labeller->PrintInput(os, node->input(
i));
199void PrintResult(std::ostream& os, MaglevGraphLabeller* graph_labeller,
200 const NodeBase* node) {}
202void PrintResult(std::ostream& os, MaglevGraphLabeller* graph_labeller,
203 const ValueNode* node) {
204 os <<
" → " << node->result().operand();
205 if (node->result().operand().IsAllocated() && node->is_spilled() &&
206 node->spill_slot() != node->result().operand()) {
207 os <<
" (spilled: " << node->spill_slot() <<
")";
209 if (node->has_valid_live_range()) {
210 os <<
", live range: [" << node->live_range().start <<
"-"
211 << node->live_range().end <<
"]";
213 if (!node->has_id()) {
214 os <<
", " << node->use_count() <<
" uses";
215 if (
const InlinedAllocation* alloc = node->TryCast<InlinedAllocation>()) {
216 os <<
" (" << alloc->non_escaping_use_count() <<
" non escaping uses)";
217 if (alloc->HasBeenAnalysed() && alloc->HasBeenElided()) {
220 }
else if (!node->is_used()) {
221 if (node->opcode() != Opcode::kAllocationBlock &&
222 node->properties().is_required_when_unused()) {
223 os <<
", but required";
231void PrintTargets(std::ostream& os, MaglevGraphLabeller* graph_labeller,
232 const NodeBase* node) {}
234void PrintTargets(std::ostream& os, MaglevGraphLabeller* graph_labeller,
235 const UnconditionalControlNode* node) {
236 os <<
" b" << node->target()->id();
239void PrintTargets(std::ostream& os, MaglevGraphLabeller* graph_labeller,
240 const BranchControlNode* node) {
241 os <<
" b" << node->if_true()->id() <<
" b" << node->if_false()->id();
244void PrintTargets(std::ostream& os, MaglevGraphLabeller* graph_labeller,
245 const Switch* node) {
246 for (
int i = 0;
i < node->
size();
i++) {
247 const BasicBlockRef& target = node->Cast<Switch>()->targets()[
i];
248 os <<
" b" << target.block_ptr()->id();
250 if (node->Cast<Switch>()->has_fallthrough()) {
251 BasicBlock* fallthrough_target = node->Cast<Switch>()->fallthrough();
252 os <<
" b" << fallthrough_target->id();
256class MaybeUnparkForPrint {
258 MaybeUnparkForPrint() {
259 LocalHeap* local_heap = LocalHeap::Current();
264 if (local_heap->IsParked()) {
265 scope_.emplace(local_heap);
273template <
typename NodeT>
274void PrintImpl(std::ostream& os, MaglevGraphLabeller* graph_labeller,
275 const NodeT* node,
bool skip_targets) {
276 MaybeUnparkForPrint unpark;
277 os << node->opcode();
278 node->PrintParams(os, graph_labeller);
279 PrintInputs(os, graph_labeller, node);
280 PrintResult(os, graph_labeller, node);
281 if (IsStoreToNonEscapedObject(node)) {
285 PrintTargets(os, graph_labeller, node);
291 case RootIndex::kFalseValue:
292 case RootIndex::kNullValue:
293 case RootIndex::kUndefinedValue:
294 case RootIndex::kNanValue:
295 case RootIndex::kHoleNanValue:
296 case RootIndex::kMinusZeroValue:
297 case RootIndex::kempty_string:
298#ifdef V8_ENABLE_WEBASSEMBLY
299 case RootIndex::kWasmNull:
310bool CheckToBooleanOnAllRoots(LocalIsolate* local_isolate) {
311 ReadOnlyRoots roots(local_isolate);
314#define DO_CHECK(type, name, CamelName) \
316 if (roots.name() != roots.undefined_value() || \
317 RootIndex::k##CamelName == RootIndex::kUndefinedValue) { \
318 DCHECK_EQ(Object::BooleanValue(roots.name(), local_isolate), \
319 RootToBoolean(RootIndex::k##CamelName)); \
344 for (
size_t i = 0;
i <
count; ++
i) {
348 input_location_count_ =
count;
356 static bool check_once = CheckToBooleanOnAllRoots(local_isolate);
361 return RootToBoolean(
index_);
366 switch (node->opcode()) {
368 case Opcode::k##Name: { \
369 return node->Cast<Name>()->ToBoolean(local_isolate); \
385 : top_frame_(top_frame), feedback_to_update_(feedback_to_update) {}
428 return unit().register_count() +
unit().parameter_count() -
431 return unit().register_count() - result_location.
index();
438 for (
int i = 0;;
i++) {
440 target_frame = target_frame->
parent();
442 if (
i == handler_info->
depth())
break;
443 target_frame = target_frame->
parent();
449 bool skip_targets)
const {
452 case Opcode::k##Name: \
453 return PrintImpl(os, graph_labeller, this->Cast<Name>(), skip_targets);
462 Print(std::cout, &labeller);
463 std::cout << std::endl;
467 if (!
hint_.IsInvalid())
return;
470 auto operand = compiler::UnallocatedOperand::cast(
result_.
operand());
471 if (operand.HasSameAsInputPolicy()) {
476 for (
Input& input : *
this) {
477 if (input.node()->has_id() && input.node()->id() < this->id()) {
478 input.node()->SetHint(
hint);
498 compiler::UnallocatedOperand::cast(
result().operand())
499 .virtual_register());
504#define CASE(MathName, ExtName, EnumName) \
505 case Ieee754Function::k##EnumName: \
506 return ExternalReference::ieee754_##ExtName##_function();
517 switch (type.representation()) {
543 std::ostringstream str;
544 str <<
"Type representation error: node ";
545 if (graph_labeller) {
546 str <<
"#" << graph_labeller->
NodeId(node) <<
" : ";
548 str << node->opcode() <<
" (input @" <<
i <<
" = " << input->opcode()
549 <<
") type " << got <<
" is not " << expected;
550 FATAL(
"%s", str.str().c_str());
558 if (got != expected) {
559 std::ostringstream str;
560 str <<
"Opcode error: node ";
561 if (graph_labeller) {
562 str <<
"#" << graph_labeller->
NodeId(node) <<
" : ";
564 str << node->opcode() <<
" (input @" <<
i <<
" = " << input->opcode()
565 <<
") opcode " << got <<
" is not " << expected;
566 FATAL(
"%s", str.str().c_str());
571 for (
int i = 0;
i < input_count();
i++) {
577 switch (value_representation()) {
578#define CASE_REPR(repr) \
579 case ValueRepresentation::k##repr: \
580 for (int i = 0; i < input_count(); i++) { \
581 CheckValueInputIs(this, i, ValueRepresentation::k##repr, \
598 for (
int i = 0;
i < input_count();
i++) {
603#ifdef V8_COMPRESS_POINTERS
604void Call::MarkTaggedInputsAsDecompressing() {
605 for (
int i = 0;
i < input_count();
i++) {
606 input(
i).node()->SetTaggedResultNeedsDecompress();
613 for (
int i = 0;
i < input_count();
i++) {
618#ifdef V8_COMPRESS_POINTERS
619void CallForwardVarargs::MarkTaggedInputsAsDecompressing() {
620 for (
int i = 0;
i < input_count();
i++) {
621 input(
i).node()->SetTaggedResultNeedsDecompress();
633#ifdef V8_COMPRESS_POINTERS
634void CallWithArrayLike::MarkTaggedInputsAsDecompressing() {
636 input(
i).node()->SetTaggedResultNeedsDecompress();
642 for (
int i = 0;
i < input_count();
i++) {
647#ifdef V8_COMPRESS_POINTERS
648void CallWithSpread::MarkTaggedInputsAsDecompressing() {
649 for (
int i = 0;
i < input_count();
i++) {
650 input(
i).node()->SetTaggedResultNeedsDecompress();
656 for (
int i = 0;
i < input_count();
i++) {
661#ifdef V8_COMPRESS_POINTERS
662void CallSelf::MarkTaggedInputsAsDecompressing() {
663 for (
int i = 0;
i < input_count();
i++) {
664 input(
i).node()->SetTaggedResultNeedsDecompress();
671 for (
int i = 0;
i < input_count();
i++) {
676#ifdef V8_COMPRESS_POINTERS
677void CallKnownJSFunction::MarkTaggedInputsAsDecompressing() {
678 for (
int i = 0;
i < input_count();
i++) {
679 input(
i).node()->SetTaggedResultNeedsDecompress();
686 for (
int i = 0;
i < input_count();
i++) {
691#ifdef V8_COMPRESS_POINTERS
692void CallKnownApiFunction::MarkTaggedInputsAsDecompressing() {
693 for (
int i = 0;
i < input_count();
i++) {
694 input(
i).node()->SetTaggedResultNeedsDecompress();
700 for (
int i = 0;
i < input_count();
i++) {
705#ifdef V8_COMPRESS_POINTERS
706void Construct::MarkTaggedInputsAsDecompressing() {
707 for (
int i = 0;
i < input_count();
i++) {
708 input(
i).node()->SetTaggedResultNeedsDecompress();
715 for (
int i = 0;
i < input_count();
i++) {
720#ifdef V8_COMPRESS_POINTERS
721void ConstructWithSpread::MarkTaggedInputsAsDecompressing() {
722 for (
int i = 0;
i < input_count();
i++) {
723 input(
i).node()->SetTaggedResultNeedsDecompress();
730 int count = input_count();
732 if (descriptor.HasContextParameter()) {
741 if (descriptor.AllowVarArgs()) {
742 DCHECK_GE(all_input_count, descriptor.GetParameterCount());
744 DCHECK_EQ(all_input_count, descriptor.GetParameterCount());
751 ? descriptor.GetParameterType(
i)
757#ifdef V8_COMPRESS_POINTERS
758void CallBuiltin::MarkTaggedInputsAsDecompressing() {
760 int count = input_count();
762 if (descriptor.HasContextParameter()) {
763 input(
count - 1).node()->SetTaggedResultNeedsDecompress();
770 ? descriptor.GetParameterType(
i)
772 if (type.IsTagged() && !type.IsTaggedSigned()) {
773 input(
i).node()->SetTaggedResultNeedsDecompress();
780 for (
int i = 0;
i < input_count();
i++) {
785#ifdef V8_COMPRESS_POINTERS
786void CallCPPBuiltin::MarkTaggedInputsAsDecompressing() {
787 for (
int i = 0;
i < input_count();
i++) {
788 input(
i).node()->SetTaggedResultNeedsDecompress();
794 for (
int i = 0;
i < input_count();
i++) {
799#ifdef V8_COMPRESS_POINTERS
800void CallRuntime::MarkTaggedInputsAsDecompressing() {
801 for (
int i = 0;
i < input_count();
i++) {
802 input(
i).node()->SetTaggedResultNeedsDecompress();
810 if (
auto host_alloc =
813 CHECK_EQ(host_alloc->allocation_block()->allocation_type(),
841 alloc->object()->ForEachInput([&](
ValueNode* value) {
843 next_alloc->allocation_block()->TryPretenure();
844 }
else if (
auto phi = value->TryCast<
Phi>()) {
845 for (
int i = 0;
i < phi->input_count(); ++
i) {
848 phi_alloc->allocation_block()->TryPretenure();
863 case Opcode::k##Name: \
864 return this->Cast<Name>()->DoReify(isolate);
906 return isolate->root_handle(
index());
909#ifdef V8_ENABLE_MAGLEV
924template <
typename NodeT>
925void LoadToRegisterHelper(
NodeT* node, MaglevAssembler* masm,
Register reg) {
927 NodeT::kProperties.value_representation())) {
928 return node->DoLoadToRegister(masm,
reg);
933template <
typename NodeT>
934void LoadToRegisterHelper(
NodeT* node, MaglevAssembler* masm,
937 NodeT::kProperties.value_representation())) {
938 return node->DoLoadToRegister(masm,
reg);
948 case Opcode::k##Name: \
949 return LoadToRegisterHelper(this->Cast<Name>(), masm, reg);
959 case Opcode::k##Name: \
960 return LoadToRegisterHelper(this->Cast<Name>(), masm, reg);
1024#define TURBOLEV_UNREACHABLE_NODE(Name) \
1025 void Name::SetValueLocationConstraints() { UNREACHABLE(); } \
1026 void Name::GenerateCode(MaglevAssembler*, const ProcessingState&) { \
1032#undef TURBOLEV_UNREACHABLE_NODE
1036 const ProcessingState& state) {}
1040 const ProcessingState& state) {}
1046 const ProcessingState& state) {}
1050 const ProcessingState& state) {}
1054 const ProcessingState& state) {}
1058 const ProcessingState& state) {}
1062 const ProcessingState& state) {}
1066 const ProcessingState& state) {
1067#ifndef V8_ENABLE_SANDBOX
1074 const ProcessingState& state) {}
1081 const ProcessingState& state) {
1100 set_temporaries_needed(2);
1105 const ProcessingState& state) {
1111 const int stack_check_offset = masm->code_gen_state()->stack_check_offset();
1113 DCHECK_LE(register_snapshot().live_registers.Count(), 1);
1115 register_snapshot().live_tagged_registers.has(
1117 ? Builtin::kMaglevFunctionEntryStackCheck_WithNewTarget
1118 : Builtin::kMaglevFunctionEntryStackCheck_WithoutNewTarget;
1119 ZoneLabelRef done(masm);
1121 if (masm->isolate()->is_short_builtin_calls_enabled()) {
1125 masm->DefineLazyDeoptPoint(lazy_deopt_info());
1127 __ JumpToDeferredIf(
1129 [](MaglevAssembler* masm, ZoneLabelRef done,
1131 int stack_check_offset) {
1134 masm->DefineLazyDeoptPoint(node->lazy_deopt_info());
1137 done,
this,
builtin, stack_check_offset);
1146 const ProcessingState& state) {
1154 const ProcessingState& state) {
1161 for (Node* node : state.block()->nodes()) {
1174 __ EmitEagerDeopt(
this, deoptimize_reason());
1178 for (Input& input : *
this) {
1195 CallInterfaceDescriptorFor<Builtin::kNewSloppyArgumentsElements>::type;
1197 CallInterfaceDescriptorFor<Builtin::kNewStrictArgumentsElements>::type;
1199 CallInterfaceDescriptorFor<Builtin::kNewRestArgumentsElements>::type;
1201 SloppyArgsD::GetRegisterParameter(SloppyArgsD::kArgumentCount) ==
1202 StrictArgsD::GetRegisterParameter(StrictArgsD::kArgumentCount));
1204 SloppyArgsD::GetRegisterParameter(SloppyArgsD::kArgumentCount) ==
1205 StrictArgsD::GetRegisterParameter(RestArgsD::kArgumentCount));
1207 SloppyArgsD::GetRegisterParameter(SloppyArgsD::kArgumentCount));
1212 const ProcessingState& state) {
1216 __ CallBuiltin<Builtin::kNewSloppyArgumentsElements>(
1220 __ CallBuiltin<Builtin::kNewStrictArgumentsElements>(
1224 __ CallBuiltin<Builtin::kNewRestArgumentsElements>(
1233 set_temporaries_needed(1);
1236 const ProcessingState& state) {
1239 Label allocate_elements, done;
1240 MaglevAssembler::TemporaryRegisterScope temps(masm);
1241 Register scratch = temps.Acquire();
1243 RegisterSnapshot snapshot = register_snapshot();
1244 snapshot.live_registers.set(length);
1247 __ CompareInt32AndJumpIf(length, 0,
kNotEqual, &allocate_elements,
1249 __ LoadRoot(elements, RootIndex::kEmptyFixedArray);
1253 __ bind(&allocate_elements);
1254 __ CompareInt32AndJumpIf(
1256 __ GetDeoptLabel(
this,
1257 DeoptimizeReason::kGreaterThanMaxFastElementArray));
1260 __ Move(size_in_bytes, length);
1264 __ SetMapAsRoot(elements, RootIndex::kFixedArrayMap);
1268 __ UncheckedSmiTagInt32(smi_length, length);
1269 __ StoreTaggedFieldNoWriteBarrier(elements, offsetof(FixedArray,
length_),
1277 __ LoadTaggedRoot(the_hole, RootIndex::kTheHoleValue);
1279 __ DecrementInt32(length);
1282 __ StoreFixedArrayElementNoWriteBarrier(elements, length, the_hole);
1292 switch (operation) {
1294 case Operation::k##name: \
1295 return Builtin::k##name##_WithFeedback;
1303template <
class Derived, Operation kOperation>
1305 using D = UnaryOp_WithFeedbackDescriptor;
1306 UseFixed(operand_input(), D::GetRegisterParameter(D::kValue));
1310template <
class Derived, Operation kOperation>
1312 MaglevAssembler* masm,
const ProcessingState& state) {
1313 __ CallBuiltin<BuiltinFor(kOperation)>(
1314 masm->native_context().object(),
1319 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
1322template <
class Derived, Operation kOperation>
1323void BinaryWithFeedbackNode<Derived,
1325 using D = BinaryOp_WithFeedbackDescriptor;
1326 UseFixed(left_input(), D::GetRegisterParameter(D::kLeft));
1327 UseFixed(right_input(), D::GetRegisterParameter(D::kRight));
1331template <
class Derived, Operation kOperation>
1333 MaglevAssembler* masm,
const ProcessingState& state) {
1334 __ CallBuiltin<BuiltinFor(kOperation)>(
1335 masm->native_context().object(),
1341 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
1344#define DEF_OPERATION(Name) \
1345 void Name::SetValueLocationConstraints() { \
1346 Base::SetValueLocationConstraints(); \
1348 void Name::GenerateCode(MaglevAssembler* masm, \
1349 const ProcessingState& state) { \
1350 Base::GenerateCode(masm, state); \
1358template <
typename T>
1362 static Register Get(compiler::AllocatedOperand target) {
1363 return target.GetRegister();
1369 return target.GetDoubleRegister();
1375 const ProcessingState& state) {
1378 case Opcode::k##Name: \
1379 return node_->Cast<Name>()->DoLoadToRegister( \
1380 masm, GetRegister<Name::OutputRegister>::Get(target()));
1390 const ProcessingState& state) {
1393 if (
source().IsRegister()) {
1395 if (
target().IsAnyRegister()) {
1399 __ MoveRepr(repr, masm->ToMemOperand(target()), source_reg);
1401 }
else if (
source().IsDoubleRegister()) {
1403 if (
target().IsAnyRegister()) {
1407 __ StoreFloat64(masm->ToMemOperand(target()), source_reg);
1412 if (
target().IsRegister()) {
1415 }
else if (
target().IsDoubleRegister()) {
1420 __ MoveRepr(repr, masm->ToMemOperand(target()), source_op);
1430 const ProcessingState& state) {
1437 const ProcessingState& state) {
1441 DeoptimizeReason::kNotASmi,
this);
1450 const ProcessingState& state) {
1455 __ EmitEagerDeoptIfNotSmi(
this, value, DeoptimizeReason::kNotASmi);
1456 __ SmiToInt32(value);
1465 const ProcessingState& state) {
1467 __ AssertSmi(value);
1468 __ SmiToInt32(value);
1473 const ProcessingState& state) {
1480 Label* fail =
__ GetDeoptLabel(
this, DeoptimizeReason::kNotASmi);
1486 const ProcessingState& state) {
1488 Label* fail =
__ GetDeoptLabel(
this, DeoptimizeReason::kNotASmi);
1497 const ProcessingState& state) {
1498 __ CompareInt32AndJumpIf(
1500 __ GetDeoptLabel(
this, DeoptimizeReason::kNotUint32));
1506 set_temporaries_needed(1);
1510 const ProcessingState& state) {
1511 MaglevAssembler::TemporaryRegisterScope temps(masm);
1512 Register scratch = temps.Acquire();
1513 __ Move(scratch, std::numeric_limits<uint32_t>::max());
1514 __ CompareIntPtrAndJumpIf(
1516 __ GetDeoptLabel(
this, DeoptimizeReason::kNotUint32));
1524 const ProcessingState& state) {}
1528 set_temporaries_needed(1);
1531 const ProcessingState& state) {
1533 MaglevAssembler::TemporaryRegisterScope temps(masm);
1534 Register scratch = temps.Acquire();
1535 Label* fail =
__ GetDeoptLabel(
this, DeoptimizeReason::kNotASmi);
1536 __ TryTruncateDoubleToInt32(scratch, value, fail);
1538 __ CheckInt32IsSmi(scratch, fail, scratch);
1547 const ProcessingState& state) {
1549 Label* fail =
__ GetDeoptLabel(
this, DeoptimizeReason::kNotASmi);
1553 GetGeneralRegistersUsedAsInputs(eager_deopt_info()));
1554 __ SmiTagInt32AndJumpIfFail(
reg, fail);
1562 const ProcessingState& state) {
1567 Label* fail =
__ GetDeoptLabel(
this, DeoptimizeReason::kNotASmi);
1568 __ CheckInt32IsSmi(
reg, fail);
1576 const ProcessingState& state) {
1578 Label* fail =
__ GetDeoptLabel(
this, DeoptimizeReason::kNotASmi);
1582 GetGeneralRegistersUsedAsInputs(eager_deopt_info()));
1583 __ SmiTagUint32AndJumpIfFail(
reg, fail);
1591 const ProcessingState& state) {
1593 Label* fail =
__ GetDeoptLabel(
this, DeoptimizeReason::kNotASmi);
1597 GetGeneralRegistersUsedAsInputs(eager_deopt_info()));
1598 __ SmiTagIntPtrAndJumpIfFail(
reg,
reg, fail);
1606 const ProcessingState& state) {
1615 const ProcessingState& state) {
1624 const ProcessingState& state) {
1636 const ProcessingState& state) {
1637 Label* deopt_label =
__ GetDeoptLabel(
this, DeoptimizeReason::kOverflow);
1647 const ProcessingState& state) {
1648 Label* deopt_label =
__ GetDeoptLabel(
this, DeoptimizeReason::kOverflow);
1654void JumpToFailIfNotHeapNumberOrOddball(
1655 MaglevAssembler* masm, Register value,
1657 if (!fail && !
v8_flags.debug_code)
return;
1659 static_assert(InstanceType::HEAP_NUMBER_TYPE + 1 ==
1660 InstanceType::ODDBALL_TYPE);
1661 switch (conversion_type) {
1664 MaglevAssembler::TemporaryRegisterScope temps(masm);
1665 Register map = temps.AcquireScratch();
1667#if V8_STATIC_ROOTS_BOOL
1668 static_assert(StaticReadOnlyRoot::kBooleanMap + Map::kSize ==
1669 StaticReadOnlyRoot::kHeapNumberMap);
1670 __ LoadMapForCompare(map, value);
1672 __ JumpIfObjectNotInRange(map, StaticReadOnlyRoot::kBooleanMap,
1673 StaticReadOnlyRoot::kHeapNumberMap, fail);
1675 __ AssertObjectInRange(map, StaticReadOnlyRoot::kBooleanMap,
1676 StaticReadOnlyRoot::kHeapNumberMap,
1677 AbortReason::kUnexpectedValue);
1681 __ LoadMap(map, value);
1682 __ CompareRoot(map, RootIndex::kHeapNumberMap);
1684 __ CompareRoot(map, RootIndex::kBooleanMap);
1697 __ JumpIfObjectTypeNotInRange(value, InstanceType::HEAP_NUMBER_TYPE,
1698 InstanceType::ODDBALL_TYPE, fail);
1700 __ AssertObjectTypeInRange(value, InstanceType::HEAP_NUMBER_TYPE,
1701 InstanceType::ODDBALL_TYPE,
1702 AbortReason::kUnexpectedValue);
1708 __ JumpIfNotObjectType(value, InstanceType::HEAP_NUMBER_TYPE, fail);
1710 __ AssertObjectType(value, InstanceType::HEAP_NUMBER_TYPE,
1711 AbortReason::kUnexpectedValue);
1717void TryUnboxNumberOrOddball(MaglevAssembler* masm,
DoubleRegister dst,
1718 Register clobbered_src,
1721 Label is_not_smi, done;
1725 __ SmiToInt32(clobbered_src);
1726 __ Int32ToDouble(dst, clobbered_src);
1728 __ bind(&is_not_smi);
1729 JumpToFailIfNotHeapNumberOrOddball(masm, clobbered_src, conversion_type,
1731 __ LoadHeapNumberOrOddballValue(dst, clobbered_src);
1736template <
typename Derived, ValueRepresentation FloatType>
1739void CheckedNumberOrOddballToFloat64OrHoleyFloat64<
1740 Derived, FloatType>::SetValueLocationConstraints() {
1744template <
typename Derived, ValueRepresentation FloatType>
1747void CheckedNumberOrOddballToFloat64OrHoleyFloat64<
1748 Derived, FloatType>::GenerateCode(MaglevAssembler* masm,
1749 const ProcessingState& state) {
1753 __ GetDeoptLabel(
this, deoptimize_reason()));
1761 MaglevAssembler* masm,
const ProcessingState& state) {
1770 set_double_temporaries_needed(1);
1773 const ProcessingState& state) {
1774 MaglevAssembler::TemporaryRegisterScope temps(masm);
1777 Label is_not_smi, done;
1778 Label* deopt_label =
__ GetDeoptLabel(
this, DeoptimizeReason::kNotInt32);
1783 __ bind(&is_not_smi);
1785 JumpToFailIfNotHeapNumberOrOddball(
1787 __ LoadHeapNumberValue(double_value, value);
1794void EmitTruncateNumberOrOddballToInt32(
1795 MaglevAssembler* masm, Register value, Register result_reg,
1797 Label is_not_smi, done;
1801 __ SmiToInt32(value);
1803 __ bind(&is_not_smi);
1804 JumpToFailIfNotHeapNumberOrOddball(masm, value, conversion_type,
1806 MaglevAssembler::TemporaryRegisterScope temps(masm);
1808 __ LoadHeapNumberOrOddballValue(double_value, value);
1809 __ TruncateDoubleToInt32(result_reg, double_value);
1818 set_double_temporaries_needed(1);
1821 const ProcessingState& state) {
1824 ZoneLabelRef done(masm);
1827 __ MakeDeferredCode(
1828 [](MaglevAssembler* masm, Register
object, Register result_reg,
1830 MaglevAssembler::TemporaryRegisterScope temps(masm);
1831 Register map = temps.AcquireScratch();
1833 __ LoadMapForCompare(map,
object);
1835 map, RootIndex::kHeapNumberMap, &check_string,
1839 __ LoadHeapNumberValue(number_value,
object);
1840 __ TryChangeFloat64ToIndex(
1841 result_reg, number_value, *done,
1842 __ GetDeoptLabel(node, DeoptimizeReason::kNotInt32));
1844 __ bind(&check_string);
1848 map,
__ GetDeoptLabel(node, DeoptimizeReason::kNotInt32),
1854 RegisterSnapshot snapshot = node->register_snapshot();
1855 snapshot.live_registers.clear(result_reg);
1856 DCHECK(!snapshot.live_tagged_registers.has(result_reg));
1858 SaveRegisterStateForCall save_register_state(masm, snapshot);
1859 AllowExternalCallThatCantCauseGC scope(masm);
1860 __ PrepareCallCFunction(1);
1863 ExternalReference::string_to_array_index_function(), 1);
1867 __ CompareInt32AndJumpIf(
1869 __ GetDeoptLabel(node, DeoptimizeReason::kNotInt32));
1873 object, result_reg, done,
this));
1876 __ SmiToInt32(result_reg,
object);
1885 MaglevAssembler* masm,
const ProcessingState& state) {
1889 Label* deopt_label =
1890 __ GetDeoptLabel(
this, DeoptimizeReason::kNotANumberOrOddball);
1891 EmitTruncateNumberOrOddballToInt32(masm, value, result_reg,
conversion_type(),
1900 MaglevAssembler* masm,
const ProcessingState& state) {
1904 EmitTruncateNumberOrOddballToInt32(masm, value, result_reg,
conversion_type(),
1913 const ProcessingState& state) {
1922 const ProcessingState& state) {
1931 const ProcessingState& state) {
1941 const ProcessingState& state) {
1953 constexpr int kMapCountForNearJumps =
kTaggedSize == 4 ? 10 : 5;
1957 : Label::Distance::kFar;
1961 __ AssertNotSmi(
object);
1963 if (maps_include_heap_number) {
1965 __ JumpIfSmi(
object, &done, jump_distance);
1967 __ EmitEagerDeoptIfSmi(
this,
object, DeoptimizeReason::kWrongMap);
1971 MapCompare map_compare(masm,
object,
maps_.size());
1972 size_t map_count =
maps().size();
1973 for (
size_t i = 0;
i < map_count - 1; ++
i) {
1975 map_compare.Generate(map,
kEqual, &done, jump_distance);
1978 Label* fail =
__ GetDeoptLabel(
this, DeoptimizeReason::kWrongMap);
1979 map_compare.Generate(last_map,
kNotEqual, fail);
1985 Runtime::kTryMigrateInstanceAndMarkMapAsMigrationTarget)
1997 MaglevAssembler* masm,
const ProcessingState& state) {
2009 constexpr int kMapCountForNearJumps =
kTaggedSize == 4 ? 10 : 5;
2013 : Label::Distance::kFar;
2017 __ AssertNotSmi(
object);
2019 if (maps_include_heap_number) {
2021 __ JumpIfSmi(
object, &done, jump_distance);
2023 __ EmitEagerDeoptIfSmi(
this,
object, DeoptimizeReason::kWrongMap);
2027 MapCompare map_compare(masm,
object,
maps_.size());
2028 size_t map_count =
maps().size();
2029 for (
size_t i = 0;
i < map_count - 1; ++
i) {
2031 map_compare.Generate(map,
kEqual, &done, jump_distance);
2035 map_compare.Generate(
2037 __ MakeDeferredCode(
2038 [](MaglevAssembler* masm, RegisterSnapshot register_snapshot,
2040 Label* deopt =
__ GetDeoptLabel(node, DeoptimizeReason::kWrongMap);
2042 __ TestInt32AndJumpIfAllClear(
2044 Map::Bits3::IsDeprecatedBit::kMask, deopt);
2047 __ TryMigrateInstanceAndMarkMapAsMigrationTarget(
2048 map_compare.GetObject(), register_snapshot);
2050 __ JumpToDeopt(deopt);
2052 register_snapshot(), map_compare,
this));
2069 const ProcessingState& state) {
2074 MaglevAssembler::TemporaryRegisterScope temps(masm);
2079 ZoneLabelRef map_checks(masm), done(masm);
2082 __ AssertNotSmi(
object);
2084 if (maps_include_heap_number) {
2086 __ JumpIfSmi(
object, *done);
2088 __ EmitEagerDeoptIfSmi(
this,
object, DeoptimizeReason::kWrongMap);
2094 __ bind(*map_checks);
2096 RegisterSnapshot save_registers = register_snapshot();
2100 save_registers.live_registers.set(
object);
2101 save_registers.live_tagged_registers.set(
object);
2103 size_t map_count =
maps().size();
2104 bool has_migration_targets =
false;
2105 MapCompare map_compare(masm,
object,
maps_.size());
2107 for (
size_t i = 0;
i < map_count; ++
i) {
2108 map_handle =
maps().at(
i).object();
2109 const bool last_map = (
i == map_count - 1);
2111 map_compare.Generate(map_handle,
kEqual, *done);
2113 if (map_handle->is_migration_target()) {
2114 has_migration_targets =
true;
2118 if (!has_migration_targets) {
2120 map_compare.Generate(map_handle,
kNotEqual,
2121 __ GetDeoptLabel(
this, DeoptimizeReason::kWrongMap));
2123 map_compare.Generate(
2125 __ MakeDeferredCode(
2126 [](MaglevAssembler* masm, RegisterSnapshot register_snapshot,
2127 ZoneLabelRef map_checks, MapCompare map_compare,
2130 __ GetDeoptLabel(node, DeoptimizeReason::kWrongMap);
2132 __ TestInt32AndJumpIfAllClear(
2134 Map::Bits3::IsDeprecatedBit::kMask, deopt);
2137 __ TryMigrateInstance(map_compare.GetObject(), register_snapshot,
2139 __ Jump(*map_checks);
2143 save_registers, map_checks, map_compare,
this));
2156 const ProcessingState& state) {
2170 constexpr int kMapCountForNearJumps =
kTaggedSize == 4 ? 10 : 5;
2174 : Label::Distance::kFar;
2177 size_t map_count =
maps().size();
2178 for (
size_t i = 0;
i < map_count - 1; ++
i) {
2180 __ CompareTaggedAndJumpIf(map, map_at_i,
kEqual, &done, jump_distance);
2183 Label* fail =
__ GetDeoptLabel(
this, DeoptimizeReason::kWrongMap);
2184 __ CompareTaggedAndJumpIf(map, last_map,
kNotEqual, fail);
2200 const ProcessingState& state) {
2201 MaglevAssembler::TemporaryRegisterScope temps(masm);
2206 ZoneLabelRef done(masm);
2208 RegisterSnapshot save_registers = register_snapshot();
2213 save_registers.live_registers.set(
object);
2214 save_registers.live_tagged_registers.set(
object);
2217 __ TestInt32AndJumpIfAnySet(
2219 Map::Bits3::IsDeprecatedBit::kMask,
2220 __ MakeDeferredCode(
2221 [](MaglevAssembler* masm, RegisterSnapshot register_snapshot,
2222 ZoneLabelRef done, Register
object, Register map,
2224 Label* deopt =
__ GetDeoptLabel(node, DeoptimizeReason::kWrongMap);
2225 __ TryMigrateInstance(
object, register_snapshot, deopt);
2230 save_registers, done, object,
map,
this));
2239 using D = CallInterfaceDescriptorFor<Builtin::kDeleteProperty>::type;
2240 return D::GetStackParameterCount();
2243 using D = CallInterfaceDescriptorFor<Builtin::kDeleteProperty>::type;
2245 UseFixed(
object(), D::GetRegisterParameter(D::kObject));
2246 UseFixed(
key(), D::GetRegisterParameter(D::kKey));
2250 const ProcessingState& state) {
2251 __ CallBuiltin<Builtin::kDeleteProperty>(
2257 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
2261 using D = CallInterfaceDescriptorFor<Builtin::kForInPrepare>::type;
2262 return D::GetStackParameterCount();
2265 using D = CallInterfaceDescriptorFor<Builtin::kForInPrepare>::type;
2271 const ProcessingState& state) {
2272 __ CallBuiltin<Builtin::kForInPrepare>(
2281 using D = CallInterfaceDescriptorFor<Builtin::kForInNext>::type;
2282 return D::GetStackParameterCount();
2285 using D = CallInterfaceDescriptorFor<Builtin::kForInNext>::type;
2294 const ProcessingState& state) {
2295 __ CallBuiltin<Builtin::kForInNext>(
context(),
2303 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
2307 using D = CallInterfaceDescriptorFor<Builtin::kGetIteratorWithFeedback>::type;
2308 return D::GetStackParameterCount();
2311 using D = CallInterfaceDescriptorFor<Builtin::kGetIteratorWithFeedback>::type;
2317 const ProcessingState& state) {
2318 __ CallBuiltin<Builtin::kGetIteratorWithFeedback>(
2325 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
2339 const ProcessingState& state) {
2342 if (Int32Constant* constant =
2344 int32_t right_value = constant->value();
2349 __ CompareInt32AndJumpIf(
2355 __ LoadRoot(
result, RootIndex::kFalseValue);
2359 __ LoadRoot(
result, RootIndex::kTrueValue);
2370 const ProcessingState& state) {
2377 __ LoadRoot(
result,
flip() ? RootIndex::kTrueValue : RootIndex::kFalseValue);
2382 flip() ? RootIndex::kFalseValue : RootIndex::kTrueValue);
2393 const ProcessingState& state) {
2400 __ LoadRoot(
result,
flip() ? RootIndex::kTrueValue : RootIndex::kFalseValue);
2405 flip() ? RootIndex::kFalseValue : RootIndex::kTrueValue);
2417 const ProcessingState& state) {
2421 Label is_false,
end;
2422 __ CompareFloat64AndJumpIf(left, right,
2427 __ LoadRoot(
result, RootIndex::kTrueValue);
2431 __ LoadRoot(
result, RootIndex::kFalseValue);
2438 set_double_temporaries_needed(1);
2442 const ProcessingState& state) {
2443 MaglevAssembler::TemporaryRegisterScope temps(masm);
2446 Label is_false,
end;
2448 __ Move(double_scratch, 0.0);
2452 __ LoadRoot(
result,
flip() ? RootIndex::kFalseValue : RootIndex::kTrueValue);
2457 flip() ? RootIndex::kTrueValue : RootIndex::kFalseValue);
2465 set_temporaries_needed(1);
2468 const ProcessingState& state) {
2469 MaglevAssembler::TemporaryRegisterScope temps(masm);
2471 __ GetDeoptLabel(
this, DeoptimizeReason::kHole));
2477 set_temporaries_needed(1);
2480 const ProcessingState& state) {
2481 MaglevAssembler::TemporaryRegisterScope temps(masm);
2484 __ AssertNotSmi(
object);
2485 __ LoadTaggedField(tmp,
object,
offset());
2486 __ AssertNotSmi(tmp);
2493 set_temporaries_needed(1);
2496 const ProcessingState& state) {
2497 MaglevAssembler::TemporaryRegisterScope temps(masm);
2500 __ AssertNotSmi(
object);
2501 __ LoadTaggedField(tmp,
object,
offset());
2502 __ AssertNotSmi(tmp);
2511 const ProcessingState& state) {
2513 __ AssertNotSmi(
object);
2522 const ProcessingState& state) {
2524 __ AssertNotSmi(
object);
2528template <
typename T>
2533template <
typename T>
2535 const ProcessingState& state) {
2537 __ AssertNotSmi(
object);
2538 if (this->decompresses_tagged_result()) {
2548 set_temporaries_needed(2);
2549 set_double_temporaries_needed(1);
2554 MaglevAssembler* masm,
const ProcessingState& state) {
2555 MaglevAssembler::TemporaryRegisterScope temps(masm);
2558 Register scratch = temps.Acquire();
2559 ZoneLabelRef done(masm);
2560 __ AssertObjectType(script_context, SCRIPT_CONTEXT_TYPE,
2561 AbortReason::kUnexpectedInstanceType);
2564 if (value == script_context) {
2566 __ Move(tmp, script_context);
2567 script_context =
tmp;
2571 __ LoadTaggedField(value, script_context,
offset());
2574 __ JumpIfSmi(value, *done);
2575 __ CompareMapWithRoot(value, RootIndex::kHeapNumberMap, scratch);
2576 __ JumpToDeferredIf(
2578 [](MaglevAssembler* masm, Register script_context, Register result_reg,
2580 ZoneLabelRef done) {
2581 Label property_loaded;
2582 Label check_heap_number, allocate;
2585 __ LoadTaggedField(scratch, script_context,
2593 __ JumpIfSmi(scratch, &property_loaded);
2594 __ AssertObjectType(scratch, CONTEXT_SIDE_PROPERTY_CELL_TYPE,
2595 AbortReason::kUnexpectedInstanceType);
2596 __ LoadTaggedField(scratch, scratch,
2597 ContextSidePropertyCell::kPropertyDetailsRawOffset);
2598 __ bind(&property_loaded);
2600 MaglevAssembler::TemporaryRegisterScope temps(masm);
2603 if (
v8_flags.script_context_mutable_heap_int32) {
2604 __ CompareTaggedAndJumpIf(scratch,
2607 __ LoadHeapInt32Value(scratch, result_reg);
2608 __ Int32ToDouble(double_value, scratch);
2612 __ bind(&check_heap_number);
2613 __ CompareTaggedAndJumpIf(scratch,
2616 __ LoadHeapNumberValue(double_value, result_reg);
2619 __ AllocateHeapNumber(node->register_snapshot(), result_reg,
2623 script_context,
value, scratch,
this, done);
2632 set_temporaries_needed(1);
2633 set_double_temporaries_needed(1);
2636 const ProcessingState& state) {
2640 __ AssertNotSmi(
object);
2641 __ AssertSmi(field_index);
2643 ZoneLabelRef done(masm);
2737 static constexpr int32_t kIsDoubleBitMask = 1 << kSmiTagBitsInValue;
2738 __ TestInt32AndJumpIfAnySet(
2739 field_index, kIsDoubleBitMask,
2740 __ MakeDeferredCode(
2741 [](MaglevAssembler* masm, Register
object, Register field_index,
2742 Register result_reg, RegisterSnapshot register_snapshot,
2743 ZoneLabelRef done) {
2745 static constexpr int kIsDoubleBit = 1;
2749 Label if_outofobject, loaded_field;
2750 __ CompareInt32AndJumpIf(field_index, 0,
kLessThan,
2758 __ SignExtend32To64Bits(field_index, field_index);
2760 __ LoadTaggedFieldByIndex(
2761 result_reg,
object, field_index,
scale,
2762 JSObject::kHeaderSize -
2764 __ Jump(&loaded_field);
2767 __ bind(&if_outofobject);
2769 MaglevAssembler::TemporaryRegisterScope temps(masm);
2770 Register property_array = temps.Acquire();
2778 __ NegateInt32(field_index);
2779 __ LoadTaggedFieldByIndex(
2780 result_reg, property_array, field_index,
scale,
2783 __ Jump(&loaded_field);
2786 __ bind(&loaded_field);
2790 __ JumpIfSmi(result_reg, *done);
2791 MaglevAssembler::TemporaryRegisterScope temps(masm);
2798 if (map == result_reg) {
2802 __ LoadMapForCompare(map, result_reg);
2803 __ JumpIfNotRoot(map, RootIndex::kHeapNumberMap, *done);
2805 __ LoadHeapNumberValue(double_value, result_reg);
2806 __ AllocateHeapNumber(register_snapshot, result_reg, double_value);
2809 object, field_index, result_reg, register_snapshot(), done));
2814 static constexpr int kIsDoubleBit = 0;
2818 Label if_outofobject;
2819 __ CompareInt32AndJumpIf(field_index, 0,
kLessThan, &if_outofobject);
2826 __ SignExtend32To64Bits(field_index, field_index);
2828 __ LoadTaggedFieldByIndex(
2829 result_reg,
object, field_index,
scale,
2834 __ bind(&if_outofobject);
2836 MaglevAssembler::TemporaryRegisterScope temps(masm);
2837 Register property_array = temps.Acquire();
2844 __ NegateInt32(field_index);
2845 __ LoadTaggedFieldByIndex(
2846 result_reg, property_array, field_index,
scale,
2862 const ProcessingState& state) {
2866 if (this->decompresses_tagged_result()) {
2869 __ LoadFixedArrayElementWithoutDecompressing(result_reg, elements, index);
2879 const ProcessingState& state) {
2892 MaglevAssembler* masm,
const ProcessingState& state) {
2896 __ LoadFixedDoubleArrayElement(result_reg, elements, index);
2904 set_temporaries_needed(1);
2907 MaglevAssembler* masm,
const ProcessingState& state) {
2908 MaglevAssembler::TemporaryRegisterScope temps(masm);
2912 __ LoadFixedDoubleArrayElement(result_reg, elements, index);
2913 __ JumpIfHoleNan(result_reg, temps.Acquire(),
2914 __ GetDeoptLabel(
this, DeoptimizeReason::kHole));
2923 const ProcessingState& state) {
2928 __ AssertObjectType(elements, FIXED_DOUBLE_ARRAY_TYPE,
2929 AbortReason::kUnexpectedValue);
2931 AbortReason::kUnexpectedNegativeValue);
2941 set_temporaries_needed(1);
2944 const ProcessingState& state) {
2945 MaglevAssembler::TemporaryRegisterScope temps(masm);
2951 __ MoveTagged(value,
map_.object());
2957 if (inlined->allocation_block()->allocation_type() ==
2961 __ AssertElidedWriteBarrier(
object, value, register_snapshot());
2969 register_snapshot(),
2984 MaglevAssembler* masm,
const ProcessingState& state) {
2992 object,
offset(), value, register_snapshot(),
3007 MaglevAssembler* masm,
const ProcessingState& state) {
3008#ifdef V8_ENABLE_SANDBOX
3015 register_snapshot(),
tag());
3031 set_temporaries_needed(1);
3034 const ProcessingState& state) {
3040 __ AssertObjectTypeInRange(
object,
3041 FIRST_JS_DATA_VIEW_OR_RAB_GSAB_DATA_VIEW_TYPE,
3042 LAST_JS_DATA_VIEW_OR_RAB_GSAB_DATA_VIEW_TYPE,
3043 AbortReason::kUnexpectedValue);
3048 MaglevAssembler::TemporaryRegisterScope temps(masm);
3049 Register data_pointer = temps.Acquire();
3053 Register reg_with_result = result_reg;
3057 reg_with_result = data_pointer;
3061 __ LoadExternalPointerField(
3062 data_pointer,
FieldMemOperand(
object, JSDataView::kDataPointerOffset));
3063 MemOperand element_address =
__ DataViewElementOperand(data_pointer, index);
3064 __ LoadSignedField(reg_with_result, element_address, element_size);
3072 __ ReverseByteOrder(result_reg, element_size);
3075 ZoneLabelRef keep_byte_order(masm), reverse_byte_order(masm);
3082 __ bind(*reverse_byte_order);
3083 __ ReverseByteOrder(reg_with_result, element_size);
3084 __ bind(*keep_byte_order);
3085 if (reg_with_result != result_reg) {
3086 __ Move(result_reg, reg_with_result);
3092void StoreSignedIntDataViewElement::SetValueLocationConstraints() {
3100 if (is_little_endian_constant() ||
3102 UseAny(is_little_endian_input());
3106 set_temporaries_needed(1);
3108void StoreSignedIntDataViewElement::GenerateCode(MaglevAssembler* masm,
3109 const ProcessingState& state) {
3115 __ AssertObjectTypeInRange(
object,
3116 FIRST_JS_DATA_VIEW_OR_RAB_GSAB_DATA_VIEW_TYPE,
3117 LAST_JS_DATA_VIEW_OR_RAB_GSAB_DATA_VIEW_TYPE,
3118 AbortReason::kUnexpectedValue);
3124 if (element_size > 1) {
3125 if (is_little_endian_constant()) {
3128 __ ReverseByteOrder(value, element_size);
3131 ZoneLabelRef keep_byte_order(masm), reverse_byte_order(masm);
3137 __ bind(*reverse_byte_order);
3138 __ ReverseByteOrder(value, element_size);
3139 __ bind(*keep_byte_order);
3143 MaglevAssembler::TemporaryRegisterScope temps(masm);
3144 Register data_pointer = temps.Acquire();
3145 __ LoadExternalPointerField(
3146 data_pointer,
FieldMemOperand(
object, JSDataView::kDataPointerOffset));
3147 MemOperand element_address =
__ DataViewElementOperand(data_pointer, index);
3148 __ StoreField(element_address, value, element_size);
3159 set_temporaries_needed(1);
3163 const ProcessingState& state) {
3164 MaglevAssembler::TemporaryRegisterScope temps(masm);
3168 Register data_pointer = temps.Acquire();
3171 __ AssertObjectTypeInRange(
object,
3172 FIRST_JS_DATA_VIEW_OR_RAB_GSAB_DATA_VIEW_TYPE,
3173 LAST_JS_DATA_VIEW_OR_RAB_GSAB_DATA_VIEW_TYPE,
3174 AbortReason::kUnexpectedValue);
3178 __ LoadExternalPointerField(
3179 data_pointer,
FieldMemOperand(
object, JSDataView::kDataPointerOffset));
3184 __ LoadUnalignedFloat64(result_reg, data_pointer, index);
3186 __ LoadUnalignedFloat64AndReverseByteOrder(result_reg, data_pointer,
3191 ZoneLabelRef keep_byte_order(masm), reverse_byte_order(masm);
3199 __ bind(*keep_byte_order);
3200 __ LoadUnalignedFloat64(result_reg, data_pointer, index);
3203 __ bind(*reverse_byte_order);
3204 __ LoadUnalignedFloat64AndReverseByteOrder(result_reg, data_pointer, index);
3218 set_temporaries_needed(1);
3221 const ProcessingState& state) {
3225 MaglevAssembler::TemporaryRegisterScope temps(masm);
3226 Register data_pointer = temps.Acquire();
3229 __ AssertObjectTypeInRange(
object,
3230 FIRST_JS_DATA_VIEW_OR_RAB_GSAB_DATA_VIEW_TYPE,
3231 LAST_JS_DATA_VIEW_OR_RAB_GSAB_DATA_VIEW_TYPE,
3232 AbortReason::kUnexpectedValue);
3236 __ LoadExternalPointerField(
3237 data_pointer,
FieldMemOperand(
object, JSDataView::kDataPointerOffset));
3242 __ StoreUnalignedFloat64(data_pointer, index, value);
3244 __ ReverseByteOrderAndStoreUnalignedFloat64(data_pointer, index, value);
3248 ZoneLabelRef keep_byte_order(masm), reverse_byte_order(masm);
3256 __ bind(*keep_byte_order);
3257 __ StoreUnalignedFloat64(data_pointer, index, value);
3260 __ bind(*reverse_byte_order);
3261 __ ReverseByteOrderAndStoreUnalignedFloat64(data_pointer, index, value);
3272 const ProcessingState& state) {
3276 __ LoadBitField<Map::Bits3::EnumLengthBits>(
3282 using D = CallInterfaceDescriptorFor<Builtin::kLoadGlobalIC>::type;
3283 return D::GetStackParameterCount();
3286 CallInterfaceDescriptorFor<Builtin::kLoadGlobalICInsideTypeof>::type;
3287 return D::GetStackParameterCount();
3295 const ProcessingState& state) {
3297 __ CallBuiltin<Builtin::kLoadGlobalIC>(
3305 __ CallBuiltin<Builtin::kLoadGlobalICInsideTypeof>(
3313 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
3317 using D = CallInterfaceDescriptorFor<Builtin::kStoreGlobalIC>::type;
3318 return D::GetStackParameterCount();
3321 using D = CallInterfaceDescriptorFor<Builtin::kStoreGlobalIC>::type;
3327 const ProcessingState& state) {
3328 __ CallBuiltin<Builtin::kStoreGlobalIC>(
3335 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
3340 const ProcessingState& state) {
3342 Label* fail =
__ GetDeoptLabel(
this, deoptimize_reason());
3350 const ProcessingState& state) {
3352 Label* fail =
__ GetDeoptLabel(
this, deoptimize_reason());
3357 using D = CallInterfaceDescriptorFor<Builtin::kStringEqual>::type;
3359 RequireSpecificTemporary(D::GetRegisterParameter(D::kLength));
3362 const ProcessingState& state) {
3363 using D = CallInterfaceDescriptorFor<Builtin::kStringEqual>::type;
3365 ZoneLabelRef
end(masm);
3367 Register target = D::GetRegisterParameter(D::kLeft);
3372 __ EmitEagerDeoptIfSmi(
this, target, deoptimize_reason());
3375 __ MakeDeferredCode(
3378 Register target = D::GetRegisterParameter(D::kLeft);
3379 Register string_length = D::GetRegisterParameter(D::kLength);
3380 __ StringLength(string_length, target);
3381 Label* fail =
__ GetDeoptLabel(node, deoptimize_reason);
3382 __ CompareInt32AndJumpIf(string_length, node->value().length(),
3384 RegisterSnapshot snapshot = node->register_snapshot();
3386 SaveRegisterStateForCall save_register_state(masm, snapshot);
3387 __ CallBuiltin<Builtin::kStringEqual>(
3388 node->target_input(),
3389 node->value().object(),
3392 save_register_state.DefineSafepoint();
3397 __ EmitEagerDeoptIf(
kNotEqual, deoptimize_reason, node);
3400 this,
end, deoptimize_reason()));
3402 __ EmitEagerDeopt(
this, deoptimize_reason());
3412 const ProcessingState& state) {
3415 Label* fail =
__ GetDeoptLabel(
this, deoptimize_reason());
3421 const ProcessingState& state) {
3423 __ EmitEagerDeoptIfNotSmi(
this,
object, DeoptimizeReason::kNotASmi);
3430 const ProcessingState& state) {
3432 __ EmitEagerDeoptIfSmi(
this,
object, DeoptimizeReason::kSmi);
3439 const ProcessingState& state) {
3442 __ AssertNotSmi(
object);
3444 __ EmitEagerDeoptIfSmi(
this,
object, DeoptimizeReason::kNotASymbol);
3446 __ JumpIfNotObjectType(
object, SYMBOL_TYPE,
3447 __ GetDeoptLabel(
this, DeoptimizeReason::kNotASymbol));
3454 const ProcessingState& state) {
3457 __ AssertNotSmi(
object);
3459 __ EmitEagerDeoptIfSmi(
this,
object, DeoptimizeReason::kWrongInstanceType);
3462 __ JumpIfNotObjectType(
3464 __ GetDeoptLabel(
this, DeoptimizeReason::kWrongInstanceType));
3466 __ JumpIfObjectTypeNotInRange(
3468 __ GetDeoptLabel(
this, DeoptimizeReason::kWrongInstanceType));
3477 const ProcessingState& state) {
3480 __ AssertNotSmi(indices);
3483 __ AssertObjectType(indices, FIXED_ARRAY_TYPE,
3484 AbortReason::kOperandIsNotAFixedArray);
3489 __ CompareInt32AndJumpIf(length, 0,
kEqual, &done);
3491 __ JumpIfRoot(indices, RootIndex::kEmptyFixedArray,
3492 __ GetDeoptLabel(
this, DeoptimizeReason::kWrongEnumIndices));
3501 const ProcessingState& state) {
3505#ifdef V8_TARGET_ARCH_RISCV64
3507 __ ZeroExtendWord(index, index);
3509 __ AssertZeroExtended(index);
3510 __ CompareIntPtrAndJumpIf(
3512 __ GetDeoptLabel(
this, DeoptimizeReason::kOutOfBounds));
3520 const ProcessingState& state) {
3521 Label* fail =
__ GetDeoptLabel(
this, deoptimize_reason());
3533 set_temporaries_needed(2);
3534 set_double_temporaries_needed(1);
3538 MaglevAssembler* masm,
const ProcessingState& state) {
3539 __ RecordComment(
"StoreScriptContextSlotWithWriteBarrier");
3540 ZoneLabelRef done(masm);
3541 ZoneLabelRef do_normal_store(masm);
3547 MaglevAssembler::TemporaryRegisterScope temps(masm);
3548 Register scratch = temps.Acquire();
3549 Register old_value = temps.Acquire();
3551 __ AssertObjectType(context, SCRIPT_CONTEXT_TYPE,
3552 AbortReason::kUnexpectedInstanceType);
3554 __ LoadTaggedField(old_value, context,
offset());
3555 __ CompareTaggedAndJumpIf(old_value, new_value,
kEqual, *done);
3562 __ LoadTaggedField(scratch, scratch,
3566 __ CompareTaggedAndJumpIf(
3568 __ MakeDeferredCode(
3569 [](MaglevAssembler* masm, Register context, Register old_value,
3570 Register new_value, Register property,
3572 ZoneLabelRef do_normal_store) {
3573 Label check_smi, check_mutable_int32, mutable_heap_number;
3574 __ CompareRootAndEmitEagerDeoptIf(
3575 property, RootIndex::kUndefinedValue,
kEqual,
3576 DeoptimizeReason::kStoreToConstant, node);
3577 __ JumpIfSmi(property, &check_smi);
3578 __ AssertObjectType(property, CONTEXT_SIDE_PROPERTY_CELL_TYPE,
3579 AbortReason::kUnexpectedInstanceType);
3582 ContextSidePropertyCell::kPropertyDetailsRawOffset);
3583 __ bind(&check_smi);
3586 __ CompareTaggedAndJumpIf(
3588 __ GetDeoptLabel(node, DeoptimizeReason::kStoreToConstant));
3590 if (
v8_flags.script_context_mutable_heap_number) {
3592 __ CompareTaggedAndJumpIf(property,
3595 __ EmitEagerDeoptIfNotSmi(node, new_value,
3596 DeoptimizeReason::kStoreToConstant);
3597 __ Jump(*do_normal_store);
3599 MaglevAssembler::TemporaryRegisterScope temps(masm);
3603 __ bind(&check_mutable_int32);
3604 if (
v8_flags.script_context_mutable_heap_int32) {
3605 __ CompareTaggedAndJumpIf(
3609 Label new_value_is_not_smi;
3611 __ JumpIfNotSmi(new_value, &new_value_is_not_smi);
3613 __ StoreHeapInt32Value(new_value_int32, old_value);
3616 __ bind(&new_value_is_not_smi);
3617 __ CompareMapWithRoot(new_value, RootIndex::kHeapNumberMap,
3620 DeoptimizeReason::kStoreToConstant, node);
3622 __ LoadHeapNumberValue(double_scratch, new_value);
3623 __ TryTruncateDoubleToInt32(
3624 new_value_int32, double_scratch,
3625 __ GetDeoptLabel(node,
3626 DeoptimizeReason::kStoreToConstant));
3627 __ StoreHeapInt32Value(new_value_int32, old_value);
3633 __ bind(&mutable_heap_number);
3635 Label new_value_is_not_smi;
3637 __ JumpIfNotSmi(new_value, &new_value_is_not_smi);
3639 __ Int32ToDouble(double_scratch, new_value_int32);
3640 __ StoreHeapNumberValue(double_scratch, old_value);
3643 __ bind(&new_value_is_not_smi);
3644 __ CompareMapWithRoot(new_value, RootIndex::kHeapNumberMap,
3647 DeoptimizeReason::kStoreToConstant, node);
3648 __ LoadHeapNumberValue(double_scratch, new_value);
3649 __ StoreHeapNumberValue(double_scratch, old_value);
3653 __ Jump(*do_normal_store);
3656 context, old_value, new_value, scratch,
this, done, do_normal_store));
3658 __ bind(*do_normal_store);
3659 __ StoreTaggedFieldWithWriteBarrier(
3660 context,
offset(), new_value, register_snapshot(),
3673 const ProcessingState& state) {
3676 __ AssertNotSmi(
object);
3678 __ EmitEagerDeoptIfSmi(
this,
object, DeoptimizeReason::kNotAString);
3680 __ JumpIfNotString(
object,
3681 __ GetDeoptLabel(
this, DeoptimizeReason::kNotAString));
3686 set_temporaries_needed(1);
3690 const ProcessingState& state) {
3694 __ AssertNotSmi(
object);
3696 __ EmitEagerDeoptIfSmi(
this,
object,
3697 DeoptimizeReason::kNotAStringOrStringWrapper);
3701 __ GetDeoptLabel(
this, DeoptimizeReason::kNotAStringOrStringWrapper);
3704 MaglevAssembler::TemporaryRegisterScope temps(masm);
3705 Register scratch = temps.Acquire();
3707 __ JumpIfString(
object, &done);
3708 __ JumpIfNotObjectType(
object, InstanceType::JS_PRIMITIVE_WRAPPER_TYPE,
3710 __ LoadMap(scratch,
object);
3711 __ LoadBitField<Map::Bits2::ElementsKindBits>(
3725 set_temporaries_needed(1);
3729 const ProcessingState& state) {
3731 MaglevAssembler::TemporaryRegisterScope temps(masm);
3732 Register scratch = temps.Acquire();
3733 auto deopt =
__ GetDeoptLabel(
this, DeoptimizeReason::kNotDetectableReceiver);
3734 __ JumpIfNotCallable(
object, scratch,
check_type(), deopt);
3741 set_temporaries_needed(1);
3744 MaglevAssembler* masm,
const ProcessingState& state) {
3747 MaglevAssembler::TemporaryRegisterScope temps(masm);
3748 Register scratch = temps.Acquire();
3751 __ AssertNotSmi(
object);
3753 __ EmitEagerDeoptIfSmi(
3754 this,
object, DeoptimizeReason::kNotAJavaScriptObjectOrNullOrUndefined);
3762 __ JumpIfObjectTypeNotInRange(
3763 object, FIRST_JS_RECEIVER_TYPE, LAST_JS_RECEIVER_TYPE,
3765 this, DeoptimizeReason::kNotAJavaScriptObjectOrNullOrUndefined));
3774 const ProcessingState& state) {
3776 RootIndex::kTheHoleValue,
kEqual,
3777 DeoptimizeReason::kHole,
this);
3782 set_temporaries_needed(1);
3785 const ProcessingState& state) {
3786 MaglevAssembler::TemporaryRegisterScope temps(masm);
3787 Register scratch = temps.AcquireScratch();
3789 __ GetDeoptLabel(
this, DeoptimizeReason::kHole),
3798 const ProcessingState& state) {
3807 using D = CallInterfaceDescriptorFor<Builtin::kToObject>::type;
3808 return D::GetStackParameterCount();
3811 using D = CallInterfaceDescriptorFor<Builtin::kToObject>::type;
3817 const ProcessingState& state) {
3818 Label convert_to_object, done;
3827 __ JumpIfJSAnyIsNotPrimitive(
receiver, &done);
3829 compiler::JSHeapBroker*
broker = masm->compilation_info()->broker();
3831 Label convert_global_proxy;
3832 __ JumpIfRoot(
receiver, RootIndex::kUndefinedValue, &convert_global_proxy,
3835 receiver, RootIndex::kNullValue, &convert_to_object,
3837 __ bind(&convert_global_proxy);
3844 __ bind(&convert_to_object);
3856 const ProcessingState& state) {
3864 Label done, do_throw;
3866 __ CompareRoot(construct_result, RootIndex::kUndefinedValue);
3878 __ Jump(
__ MakeDeferredCode(
3881 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
3882 masm->DefineExceptionHandlerAndLazyDeoptPoint(node);
3883 __ Abort(AbortReason::kUnexpectedReturnFromThrow);
3897 const ProcessingState& state) {
3901 DCHECK_EQ(construct_result, result_reg);
3906 Label done, use_receiver;
3909 __ JumpIfRoot(construct_result, RootIndex::kUndefinedValue, &use_receiver,
3920 __ bind(&use_receiver);
3922 __ Move(result_reg, implicit_receiver);
3935 const ProcessingState& state) {
3936 __ CallBuiltin<Builtin::kCreateObjectFromSlowBoilerplate>(
3937 masm->native_context().object(),
3943 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
3948 CallInterfaceDescriptorFor<Builtin::kCreateShallowArrayLiteral>::type;
3949 return D::GetStackParameterCount();
3955 const ProcessingState& state) {
3956 __ CallBuiltin<Builtin::kCreateShallowArrayLiteral>(
3957 masm->native_context().object(),
3963 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
3974 const ProcessingState& state) {
3975 __ CallBuiltin<Builtin::kCreateArrayFromSlowBoilerplate>(
3976 masm->native_context().object(),
3982 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
3987 CallInterfaceDescriptorFor<Builtin::kCreateShallowObjectLiteral>::type;
3988 return D::GetStackParameterCount();
3994 const ProcessingState& state) {
3995 __ CallBuiltin<Builtin::kCreateShallowObjectLiteral>(
3996 masm->native_context().object(),
4002 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
4008 const ProcessingState& state) {
4015 : Runtime::kNewClosure)
4025 const ProcessingState& state) {
4027 pretenured() ? Runtime::kNewClosure_Tenured : Runtime::kNewClosure;
4029 __ CallRuntime(function_id);
4033 using D = CallInterfaceDescriptorFor<Builtin::kFastNewClosure>::type;
4034 return D::GetStackParameterCount();
4037 using D = CallInterfaceDescriptorFor<Builtin::kFastNewClosure>::type;
4038 static_assert(D::HasContextParameter());
4043 const ProcessingState& state) {
4044 __ CallBuiltin<Builtin::kFastNewClosure>(
4049 masm->DefineLazyDeoptPoint(lazy_deopt_info());
4054 using D = CallInterfaceDescriptorFor<
4055 Builtin::kFastNewFunctionContextFunction>
::type;
4056 return D::GetStackParameterCount();
4059 CallInterfaceDescriptorFor<Builtin::kFastNewFunctionContextEval>::type;
4060 return D::GetStackParameterCount();
4065 static_cast<uint32_t
>(
4068 using D = CallInterfaceDescriptorFor<
4069 Builtin::kFastNewFunctionContextFunction>
::type;
4070 static_assert(D::HasContextParameter());
4075 CallInterfaceDescriptorFor<Builtin::kFastNewFunctionContextEval>::type;
4076 static_assert(D::HasContextParameter());
4082 const ProcessingState& state) {
4084 __ CallBuiltin<Builtin::kFastNewFunctionContextFunction>(
4090 __ CallBuiltin<Builtin::kFastNewFunctionContextEval>(
4096 masm->DefineLazyDeoptPoint(lazy_deopt_info());
4100 using D = CallInterfaceDescriptorFor<Builtin::kCreateRegExpLiteral>::type;
4101 return D::GetStackParameterCount();
4107 const ProcessingState& state) {
4108 __ CallBuiltin<Builtin::kCreateRegExpLiteral>(
4109 masm->native_context().object(),
4115 masm->DefineLazyDeoptPoint(lazy_deopt_info());
4119 using D = CallInterfaceDescriptorFor<Builtin::kGetTemplateObject>::type;
4120 return D::GetStackParameterCount();
4123 using D = GetTemplateObjectDescriptor;
4128 const ProcessingState& state) {
4129 __ CallBuiltin<Builtin::kGetTemplateObject>(
4130 masm->native_context().object(),
4136 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
4146 set_temporaries_needed(2);
4149 const ProcessingState& state) {
4150 MaglevAssembler::TemporaryRegisterScope temps(masm);
4154 Label return_false, return_true;
4155 ZoneLabelRef done(masm);
4157 __ JumpIfSmi(object_reg, &return_false,
4162 __ LoadMap(map, object_reg);
4166 Register scratch = temps.Acquire();
4168 ZoneLabelRef if_objectisdirect(masm);
4170 Condition jump_cond =
__ CompareInstanceTypeRange(
4172 __ JumpToDeferredIf(
4174 [](MaglevAssembler* masm, RegisterSnapshot snapshot,
4175 Register object_reg, Register map, Register instance_type,
4177 ZoneLabelRef if_objectisdirect, ZoneLabelRef done) {
4178 Label return_runtime;
4181 __ JumpIfEqual(instance_type, JS_PROXY_TYPE, &return_runtime);
4183 int mask = Map::Bits1::HasNamedInterceptorBit::kMask |
4184 Map::Bits1::IsAccessCheckNeededBit::kMask;
4185 __ TestUint8AndJumpIfAllClear(
4187 *if_objectisdirect);
4189 __ bind(&return_runtime);
4191 snapshot.live_registers.clear(result_reg);
4192 SaveRegisterStateForCall save_register_state(masm, snapshot);
4193 __ Push(object_reg, node->prototype().object());
4195 __ CallRuntime(Runtime::kHasInPrototypeChain, 2);
4196 masm->DefineExceptionHandlerPoint(node);
4197 save_register_state.DefineSafepointWithLazyDeopt(
4198 node->lazy_deopt_info());
4203 register_snapshot(), object_reg,
map, instance_type, result_reg,
this,
4204 if_objectisdirect, done);
4207 __ bind(*if_objectisdirect);
4209 Register object_prototype = scratch;
4210 __ LoadTaggedField(object_prototype, map, Map::kPrototypeOffset);
4211 __ JumpIfRoot(object_prototype, RootIndex::kNullValue, &return_false,
4217 __ AssertNotSmi(object_prototype);
4218 __ LoadMap(map, object_prototype);
4222 __ bind(&return_true);
4223 __ LoadRoot(result_reg, RootIndex::kTrueValue);
4226 __ bind(&return_false);
4227 __ LoadRoot(result_reg, RootIndex::kFalseValue);
4233 const ProcessingState& state) {
4244 __ CallRuntime(Runtime::kAbort, 1);
4253 const ProcessingState& state) {
4257 __ JumpIf(
__ IsRootConstant(
value(), RootIndex::kFalseValue), &next);
4258 __ JumpIf(
__ IsRootConstant(
value(), RootIndex::kTrueValue), &next);
4259 __ Abort(AbortReason::kUnexpectedValue);
4263 Label return_false, done;
4264 __ JumpIf(
__ IsRootConstant(
value(), RootIndex::kTrueValue), &return_false);
4268 __ bind(&return_false);
4278 using D = LoadWithVectorDescriptor;
4284 const ProcessingState& state) {
4285 __ CallBuiltin<Builtin::kLoadIC>(
4292 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
4299 using D = LoadWithReceiverAndVectorDescriptor;
4303 D::GetRegisterParameter(D::kLookupStartObject));
4307 const ProcessingState& state) {
4308 __ CallBuiltin<Builtin::kLoadSuperIC>(
4316 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
4320 using D = CallInterfaceDescriptorFor<Builtin::kStoreIC>::type;
4321 return D::GetStackParameterCount();
4324 using D = CallInterfaceDescriptorFor<Builtin::kStoreIC>::type;
4331 const ProcessingState& state) {
4332 __ CallBuiltin<Builtin::kStoreIC>(
4340 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
4344 using D = CallInterfaceDescriptorFor<Builtin::kDefineNamedOwnIC>::type;
4345 return D::GetStackParameterCount();
4348 using D = CallInterfaceDescriptorFor<Builtin::kDefineNamedOwnIC>::type;
4375 const ProcessingState& state) {
4381 Label done, tag_length;
4383 __ AssertObjectType(
object, JS_ARRAY_TYPE, AbortReason::kUnexpectedValue);
4385 "MaxLength not a Smi");
4387 AbortReason::kUnexpectedValue);
4391 __ IncrementInt32(index);
4392 __ SmiTag(length, index);
4393 __ StoreTaggedSignedField(
object, JSArray::kLengthOffset, length);
4395 __ bind(&tag_length);
4403 set_temporaries_needed(1);
4407 const ProcessingState& state) {
4411 MaglevAssembler::TemporaryRegisterScope temps(masm);
4412 Register scratch = temps.Acquire();
4422 set_temporaries_needed(1);
4427 const ProcessingState& state) {
4434 ZoneLabelRef done(masm);
4436 __ CompareInt32AndJumpIf(
4438 __ MakeDeferredCode(
4439 [](MaglevAssembler* masm, ZoneLabelRef done, Register
object,
4442 RegisterSnapshot snapshot = node->register_snapshot();
4443 snapshot.live_registers.clear(result_reg);
4444 snapshot.live_tagged_registers.clear(result_reg);
4445 SaveRegisterStateForCall save_register_state(masm, snapshot);
4446 using D = GrowArrayElementsDescriptor;
4447 if (index == D::GetRegisterParameter(D::kObject)) {
4451 __ SmiTag(result_reg, index);
4457 __ CallBuiltin<Builtin::kGrowFastDoubleElements>(
object, index);
4459 __ CallBuiltin<Builtin::kGrowFastSmiOrObjectElements>(
object,
4462 save_register_state.DefineSafepoint();
4465 __ EmitEagerDeoptIfSmi(node, result_reg,
4466 DeoptimizeReason::kCouldNotGrowElements);
4469 done, object,
index, elements,
this));
4478 set_temporaries_needed(2);
4482 const ProcessingState& state) {
4486 MaglevAssembler::TemporaryRegisterScope temps(masm);
4488 result_reg ==
object || result_reg == old_property_array ? temps.Acquire()
4490 Register scratch = temps.Acquire();
4491 DCHECK(!
AreAliased(
object, old_property_array, new_property_array, scratch));
4497 RegisterSnapshot snapshot = register_snapshot();
4500 snapshot.live_registers.set(
object);
4501 snapshot.live_registers.set(old_property_array);
4502 snapshot.live_tagged_registers.set(
object);
4503 snapshot.live_tagged_registers.set(old_property_array);
4507 __ Allocate(snapshot, new_property_array, size_in_bytes,
4509 __ SetMapAsRoot(new_property_array, RootIndex::kPropertyArrayMap);
4514 RegisterSnapshot snapshot = register_snapshot();
4515 snapshot.live_registers.set(
object);
4516 snapshot.live_registers.set(old_property_array);
4517 snapshot.live_registers.set(new_property_array);
4518 snapshot.live_tagged_registers.set(
object);
4519 snapshot.live_tagged_registers.set(old_property_array);
4520 snapshot.live_tagged_registers.set(new_property_array);
4523 __ LoadTaggedFieldWithoutDecompressing(
4526 __ StoreTaggedFieldWithWriteBarrier(
4534 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
4536 __ StoreTaggedFieldNoWriteBarrier(
4546 __ LoadTaggedField(scratch,
object, JSObject::kPropertiesOrHashOffset);
4549 __ JumpIfSmi(scratch, &done);
4557 __ LoadTaggedField(scratch, old_property_array,
4558 PropertyArray::kLengthAndHashOffset);
4567 __ UncheckedSmiTagInt32(scratch, scratch);
4568 __ StoreTaggedFieldNoWriteBarrier(
4569 new_property_array, PropertyArray::kLengthAndHashOffset, scratch);
4572 RegisterSnapshot snapshot = register_snapshot();
4574 snapshot.live_registers.set(new_property_array);
4575 snapshot.live_tagged_registers.set(new_property_array);
4577 __ StoreTaggedFieldWithWriteBarrier(
4578 object, JSObject::kPropertiesOrHashOffset, new_property_array, snapshot,
4582 if (result_reg != new_property_array) {
4583 __ Move(result_reg, new_property_array);
4588 using D = CallInterfaceDescriptorFor<Builtin::kKeyedStoreIC>::type;
4589 return D::GetStackParameterCount();
4592 using D = CallInterfaceDescriptorFor<Builtin::kKeyedStoreIC>::type;
4600 const ProcessingState& state) {
4601 __ CallBuiltin<Builtin::kKeyedStoreIC>(
4609 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
4613 using D = CallInterfaceDescriptorFor<Builtin::kDefineKeyedOwnIC>::type;
4614 return D::GetStackParameterCount();
4617 using D = CallInterfaceDescriptorFor<Builtin::kDefineKeyedOwnIC>::type;
4626 const ProcessingState& state) {
4627 __ CallBuiltin<Builtin::kDefineKeyedOwnIC>(
4636 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
4640 using D = CallInterfaceDescriptorFor<Builtin::kStoreInArrayLiteralIC>::type;
4641 return D::GetStackParameterCount();
4644 using D = CallInterfaceDescriptorFor<Builtin::kStoreInArrayLiteralIC>::type;
4652 const ProcessingState& state) {
4653 __ CallBuiltin<Builtin::kStoreInArrayLiteralIC>(
4661 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
4668 set_temporaries_needed(1);
4671 const ProcessingState& state) {
4672 MaglevAssembler::TemporaryRegisterScope temps(masm);
4680 Register value = (array == result_reg ? temp : result_reg);
4687 __ StoreTaggedFieldNoWriteBarrier(
4690 if (value != result_reg) {
4691 __ Move(result_reg, value);
4708 const ProcessingState& state) {
4711 __ LoadTaggedField(array, generator,
4712 JSGeneratorObject::kParametersAndRegistersOffset);
4714 RegisterSnapshot register_snapshot_during_store = register_snapshot();
4718 register_snapshot_during_store.live_registers.set(array);
4719 register_snapshot_during_store.live_tagged_registers.set(array);
4720 register_snapshot_during_store.live_registers.set(generator);
4721 register_snapshot_during_store.live_tagged_registers.set(generator);
4732 register_snapshot_during_store.live_registers.set(value);
4733 register_snapshot_during_store.live_tagged_registers.set(value);
4734 __ StoreTaggedFieldWithWriteBarrier(
4736 register_snapshot_during_store,
4737 value_input.node()->decompresses_tagged_result()
4743 __ StoreTaggedSignedField(generator, JSGeneratorObject::kContinuationOffset,
4745 __ StoreTaggedSignedField(generator,
4746 JSGeneratorObject::kInputOrDebugPosOffset,
4754 __ StoreTaggedFieldWithWriteBarrier(
4755 generator, JSGeneratorObject::kContextOffset, context,
4756 register_snapshot(),
4764 using D = CallInterfaceDescriptorFor<Builtin::kKeyedLoadIC>::type;
4765 return D::GetStackParameterCount();
4768 using D = CallInterfaceDescriptorFor<Builtin::kKeyedLoadIC>::type;
4775 const ProcessingState& state) {
4776 __ CallBuiltin<Builtin::kKeyedLoadIC>(
4783 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
4791 const ProcessingState& state) {
4794 ZoneLabelRef done(masm);
4795 MaglevAssembler::TemporaryRegisterScope temps(masm);
4799 bool input_output_alias = (
object ==
value);
4801 if (input_output_alias) {
4802 res = temps.AcquireScratch();
4804 __ SmiTagInt32AndJumpIfFail(
4806 __ MakeDeferredCode(
4807 [](MaglevAssembler* masm, Register
object, Register value,
4809 MaglevAssembler::TemporaryRegisterScope temps(masm);
4812 if (scratch.is_valid()) {
4813 temps.IncludeScratch(scratch);
4816 __ Int32ToDouble(double_value, value);
4817 __ AllocateHeapNumber(node->register_snapshot(),
object,
4823 if (input_output_alias) {
4824 __ Move(
object, res);
4831#ifdef V8_TARGET_ARCH_X64
4839 const ProcessingState& state) {
4840 ZoneLabelRef done(masm);
4846 __ SmiTagUint32AndJumpIfFail(
4848 __ MakeDeferredCode(
4849 [](MaglevAssembler* masm, Register
object, Register value,
4851 MaglevAssembler::TemporaryRegisterScope temps(masm);
4853 __ Uint32ToDouble(double_value, value);
4854 __ AllocateHeapNumber(node->register_snapshot(),
object,
4858 object,
value, done,
this));
4864#ifdef V8_TARGET_ARCH_X64
4873 const ProcessingState& state) {
4874 ZoneLabelRef done(masm);
4880 __ SmiTagIntPtrAndJumpIfFail(
4882 __ MakeDeferredCode(
4883 [](MaglevAssembler* masm, Register
object, Register value,
4885 MaglevAssembler::TemporaryRegisterScope temps(masm);
4887 __ IntPtrToDouble(double_value, value);
4888 __ AllocateHeapNumber(node->register_snapshot(),
object,
4892 object,
value, done,
this));
4901 const ProcessingState& state) {
4906 __ TryTruncateDoubleToInt32(
object, value, &box);
4907 __ SmiTagInt32AndJumpIfFail(
object, &box);
4911 __ AllocateHeapNumber(register_snapshot(),
object, value);
4922 const ProcessingState& state) {
4925 __ AllocateHeapNumber(register_snapshot(),
object, value);
4933 const ProcessingState& state) {
4934 ZoneLabelRef done(masm);
4939 __ TryTruncateDoubleToInt32(
object, value, &box);
4940 __ SmiTagInt32AndJumpIfFail(
object, &box);
4947 __ MakeDeferredCode(
4948 [](MaglevAssembler* masm, Register
object, ZoneLabelRef done) {
4950 __ LoadRoot(
object, RootIndex::kUndefinedValue);
4954 __ AllocateHeapNumber(register_snapshot(),
object, value);
4962 set_double_temporaries_needed(1);
4981 const ProcessingState& state) {
4985 __ TryTruncateDoubleToInt32(
4986 object, value,
__ GetDeoptLabel(
this, DeoptimizeReason::kNotASmi));
4987 __ SmiTagInt32AndJumpIfFail(
4988 object,
__ GetDeoptLabel(
this, DeoptimizeReason::kNotASmi));
4996 const ProcessingState& state) {
5000 __ AssertNotSmi(
object);
5009 const ProcessingState& state) {
5013 __ AssertNotSmi(
object);
5022 MaglevAssembler* masm,
const ProcessingState& state) {
5026 __ AssertNotSmi(
object);
5029 __ AssertElidedWriteBarrier(
object, value, register_snapshot());
5040 set_temporaries_needed(1);
5043 const ProcessingState& state) {
5044 MaglevAssembler::TemporaryRegisterScope temps(masm);
5045 Register scratch = temps.Acquire();
5051 ZoneLabelRef done(masm);
5052 Label cached_one_byte_string;
5054 RegisterSnapshot save_registers = register_snapshot();
5055 __ StringCharCodeOrCodePointAt(
5058 &cached_one_byte_string);
5059 __ StringFromCharCode(save_registers, &cached_one_byte_string, result_string,
5075 set_temporaries_needed(
5080 MaglevAssembler* masm,
const ProcessingState& state) {
5081 MaglevAssembler::TemporaryRegisterScope temps(masm);
5082 Register scratch1 = temps.Acquire();
5085 scratch2 = temps.Acquire();
5089 ZoneLabelRef done(masm);
5090 RegisterSnapshot save_registers = register_snapshot();
5092 string, index, scratch1, scratch2, *done);
5101 const ProcessingState& state) {
5106 using D = StringAdd_CheckNoneDescriptor;
5107 UseFixed(
lhs(), D::GetRegisterParameter(D::kLeft));
5108 UseFixed(
rhs(), D::GetRegisterParameter(D::kRight));
5112 const ProcessingState& state) {
5113 __ CallBuiltin<Builtin::kStringAdd_CheckNone>(
5114 masm->native_context().object(),
5118 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
5128 const ProcessingState& state) {
5138 bool left_contains_one_byte_res_map =
5141 RootIndex::kConsOneByteStringMap;
5143#ifdef V8_STATIC_ROOTS
5144 static_assert(InstanceTypeChecker::kOneByteStringMapBit == 0 ||
5145 InstanceTypeChecker::kTwoByteStringMapBit == 0);
5147 if constexpr (InstanceTypeChecker::kOneByteStringMapBit == 0) {
5153 __ TestInt32AndJumpIfAnySet(
reg,
5154 InstanceTypeChecker::kStringMapEncodingMask,
5162 __ TestInt32AndJumpIfAllClear(
reg,
5163 InstanceTypeChecker::kStringMapEncodingMask,
5167 if (left_contains_one_byte_res_map) {
5168 TestForTwoByte(right,
no_reg);
5170 TestForTwoByte(left, right);
5173 MaglevAssembler::TemporaryRegisterScope temps(masm);
5174 Register scratch = temps.AcquireScratch();
5176 if (left_contains_one_byte_res_map) {
5182 if (left != right) {
5184 __ AndInt32(scratch, left);
5191 if (!left_contains_one_byte_res_map) {
5192 __ LoadRoot(res, RootIndex::kConsOneByteStringMap);
5197 __ LoadRoot(res, RootIndex::kConsTwoByteStringMap);
5206 const ProcessingState& state) {
5210 __ LoadTaggedField(input, input, JSPrimitiveWrapper::kValueOffset);
5219 const ProcessingState& state) {
5223 MaglevAssembler::TemporaryRegisterScope temps(masm);
5224 Register scratch = temps.AcquireScratch();
5225#ifdef V8_STATIC_ROOTS
5226 __ LoadCompressedMap(scratch, input);
5227 __ JumpIfObjectNotInRange(
5229 InstanceTypeChecker::kUniqueMapRangeOfStringType::kThinString.first,
5230 InstanceTypeChecker::kUniqueMapRangeOfStringType::kThinString.
second,
5233 __ LoadInstanceType(scratch, input);
5238 __ LoadThinStringValue(input, input);
5243 using D = StringEqualDescriptor;
5244 UseFixed(
lhs(), D::GetRegisterParameter(D::kLeft));
5245 UseFixed(
rhs(), D::GetRegisterParameter(D::kRight));
5246 set_temporaries_needed(1);
5247 RequireSpecificTemporary(D::GetRegisterParameter(D::kLength));
5251 const ProcessingState& state) {
5252 using D = StringEqualDescriptor;
5253 Label done, if_equal, if_not_equal;
5256 MaglevAssembler::TemporaryRegisterScope temps(masm);
5257 Register left_length = temps.Acquire();
5258 Register right_length = D::GetRegisterParameter(D::kLength);
5260 __ CmpTagged(left, right);
5266 __ StringLength(left_length, left);
5267 __ StringLength(right_length, right);
5268 __ CompareInt32AndJumpIf(left_length, right_length,
kNotEqual, &if_not_equal,
5276 DCHECK_EQ(right_length, D::GetRegisterParameter(D::kLength));
5277 __ CallBuiltin<Builtin::kStringEqual>(
lhs(),
rhs(),
5278 D::GetRegisterParameter(D::kLength));
5279 masm->DefineLazyDeoptPoint(this->lazy_deopt_info());
5286 __ bind(&if_not_equal);
5298 const ProcessingState& state) {
5299 Label done, if_equal;
5315 const ProcessingState& state) {
5316 Label done, if_equal;
5327 using D = CallInterfaceDescriptorFor<Builtin::kInstanceOf_WithFeedback>::type;
5328 return D::GetStackParameterCount();
5331 using D = CallInterfaceDescriptorFor<Builtin::kInstanceOf_WithFeedback>::type;
5333 UseFixed(
object(), D::GetRegisterParameter(D::kLeft));
5338 const ProcessingState& state) {
5339 __ CallBuiltin<Builtin::kInstanceOf_WithFeedback>(
5346 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
5352#ifdef V8_TARGET_ARCH_ARM
5353 set_temporaries_needed(1);
5357 const ProcessingState& state) {
5358#ifdef V8_TARGET_ARCH_ARM
5361 MaglevAssembler::TemporaryRegisterScope temps(masm);
5362 temps.IncludeScratch(temps.Acquire());
5365 Label is_true, is_false, done;
5382 const ProcessingState& state) {
5386 ZoneLabelRef object_is_true(masm), object_is_false(masm);
5390 __ bind(*object_is_true);
5391 __ LoadRoot(return_value, RootIndex::kTrueValue);
5393 __ bind(*object_is_false);
5394 __ LoadRoot(return_value, RootIndex::kFalseValue);
5403 const ProcessingState& state) {
5407 ZoneLabelRef object_is_true(masm), object_is_false(masm);
5408 __ ToBoolean(
object,
check_type(), object_is_true, object_is_false,
true);
5409 __ bind(*object_is_true);
5410 __ LoadRoot(return_value, RootIndex::kFalseValue);
5412 __ bind(*object_is_false);
5413 __ LoadRoot(return_value, RootIndex::kTrueValue);
5418 using D = CallInterfaceDescriptorFor<Builtin::kToName>::type;
5419 return D::GetStackParameterCount();
5422 using D = CallInterfaceDescriptorFor<Builtin::kToName>::type;
5428 __ CallBuiltin<Builtin::kToName>(
context(),
5431 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
5439 set_temporaries_needed(1);
5443 const ProcessingState& state) {
5444 ZoneLabelRef done(masm);
5445 Label move_and_return;
5450 MaglevAssembler::TemporaryRegisterScope temps(masm);
5451 Register scratch = temps.Acquire();
5452 __ CompareMapWithRoot(
object, RootIndex::kHeapNumberMap, scratch);
5453 __ JumpToDeferredIf(
5458 RegisterSnapshot snapshot = node->register_snapshot();
5459 snapshot.live_registers.clear(result_reg);
5460 SaveRegisterStateForCall save_register_state(masm, snapshot);
5463 __ CallBuiltin<Builtin::kToNumber>(
5464 masm->native_context().object(),
object);
5467 __ CallBuiltin<Builtin::kToNumeric>(
5468 masm->native_context().object(),
object);
5471 masm->DefineExceptionHandlerPoint(node);
5472 save_register_state.DefineSafepointWithLazyDeopt(
5473 node->lazy_deopt_info());
5478 mode(), object, result_reg,
this, done);
5479 __ bind(&move_and_return);
5480 __ Move(result_reg,
object);
5486 using D = CallInterfaceDescriptorFor<Builtin::kToObject>::type;
5487 return D::GetStackParameterCount();
5490 using D = CallInterfaceDescriptorFor<Builtin::kToObject>::type;
5496 const ProcessingState& state) {
5498 Label call_builtin, done;
5501 __ AssertNotSmi(value);
5506 __ bind(&call_builtin);
5507 __ CallBuiltin<Builtin::kToObject>(
context(),
5510 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
5515 using D = CallInterfaceDescriptorFor<Builtin::kToString>::type;
5516 return D::GetStackParameterCount();
5519 using D = CallInterfaceDescriptorFor<Builtin::kToString>::type;
5525 const ProcessingState& state) {
5527 Label call_builtin, done;
5531 __ bind(&call_builtin);
5533 __ CallBuiltin<Builtin::kToStringConvertSymbol>(
context(),
5536 __ CallBuiltin<Builtin::kToString>(
context(),
5539 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
5544 using D = CallInterfaceDescriptorFor<Builtin::kNumberToString>::type;
5549 const ProcessingState& state) {
5551 masm->DefineLazyDeoptPoint(this->lazy_deopt_info());
5559 const ProcessingState& state) {
5560 __ JumpToDeferredIf(
5561 __ IsRootConstant(
value(), RootIndex::kTheHoleValue),
5563 __ Push(node->name().object());
5565 __ CallRuntime(Runtime::kThrowAccessedUninitializedVariable, 1);
5566 masm->DefineExceptionHandlerAndLazyDeoptPoint(node);
5567 __ Abort(AbortReason::kUnexpectedReturnFromThrow);
5577 const ProcessingState& state) {
5578 __ JumpToDeferredIf(
5579 __ IsRootConstant(
value(), RootIndex::kTheHoleValue),
5582 __ CallRuntime(Runtime::kThrowSuperNotCalled, 0);
5583 masm->DefineExceptionHandlerAndLazyDeoptPoint(node);
5584 __ Abort(AbortReason::kUnexpectedReturnFromThrow);
5594 MaglevAssembler* masm,
const ProcessingState& state) {
5595 __ JumpToDeferredIf(
5599 __ CallRuntime(Runtime::kThrowSuperAlreadyCalledError, 0);
5600 masm->DefineExceptionHandlerAndLazyDeoptPoint(node);
5601 __ Abort(AbortReason::kUnexpectedReturnFromThrow);
5609 set_temporaries_needed(1);
5612 const ProcessingState& state) {
5613 Label* if_not_callable =
__ MakeDeferredCode(
5615 __ Push(node->value());
5617 __ CallRuntime(Runtime::kThrowCalledNonCallable, 1);
5618 masm->DefineExceptionHandlerAndLazyDeoptPoint(node);
5619 __ Abort(AbortReason::kUnexpectedReturnFromThrow);
5624 MaglevAssembler::TemporaryRegisterScope temps(masm);
5625 Register scratch = temps.Acquire();
5634 set_temporaries_needed(1);
5637 const ProcessingState& state) {
5638 MaglevAssembler::TemporaryRegisterScope temps(masm);
5639 Register scratch = temps.Acquire();
5641 static_assert(Map::kBitFieldOffsetEnd + 1 - Map::kBitFieldOffset == 1);
5642 __ TestUint8AndJumpIfAllClear(
5644 Map::Bits1::IsConstructorBit::kMask,
5645 __ MakeDeferredCode(
5650 __ CallRuntime(Runtime::kThrowNotSuperConstructor, 2);
5651 masm->DefineExceptionHandlerAndLazyDeoptPoint(node);
5652 __ Abort(AbortReason::kUnexpectedReturnFromThrow);
5657void TruncateUint32ToInt32::SetValueLocationConstraints() {
5661void TruncateUint32ToInt32::GenerateCode(MaglevAssembler* masm,
5662 const ProcessingState& state) {
5667void TruncateFloat64ToInt32::SetValueLocationConstraints() {
5671void TruncateFloat64ToInt32::GenerateCode(MaglevAssembler* masm,
5672 const ProcessingState& state) {
5681 const ProcessingState& state) {
5682 __ TryTruncateDoubleToInt32(
5684 __ GetDeoptLabel(
this, DeoptimizeReason::kNotInt32));
5692 MaglevAssembler* masm,
const ProcessingState& state) {
5693 __ TryTruncateDoubleToUint32(
5695 __ GetDeoptLabel(
this, DeoptimizeReason::kNotUint32));
5698void UnsafeTruncateFloat64ToInt32::SetValueLocationConstraints() {
5702void UnsafeTruncateFloat64ToInt32::GenerateCode(MaglevAssembler* masm,
5703 const ProcessingState& state) {
5708 __ Abort(AbortReason::kFloat64IsNotAInt32);
5728 const ProcessingState& state) {
5730 Label* fail =
__ GetDeoptLabel(
this, DeoptimizeReason::kNotInt32);
5731 __ CompareInt32AndJumpIf(input_reg, 0,
kLessThan, fail);
5734void UnsafeTruncateUint32ToInt32::SetValueLocationConstraints() {
5738void UnsafeTruncateUint32ToInt32::GenerateCode(MaglevAssembler* masm,
5739 const ProcessingState& state) {
5743 AbortReason::kUint32IsNotAInt32);
5754 const ProcessingState& state) {
5761 __ Move(result_reg, 255);
5764 __ Move(result_reg, 0);
5773 const ProcessingState& state) {
5779 __ Move(value, 255);
5788 const ProcessingState& state) {
5791 Label min, max, done;
5792 __ ToUint8Clamped(result_reg, value, &min, &max, &done);
5794 __ Move(result_reg, 0);
5797 __ Move(result_reg, 255);
5805 const ProcessingState& state) {
5807 MaglevAssembler::TemporaryRegisterScope temps(masm);
5808 Register scratch = temps.AcquireScratch();
5815 __ LoadMapForCompare(scratch, value);
5816 __ CompareTaggedRoot(scratch, RootIndex::kHeapNumberMap);
5822 __ CompareTaggedRootAndEmitEagerDeoptIf(
5823 scratch, RootIndex::kBigIntMap,
kNotEqual,
5824 DeoptimizeReason::kNotANumber,
this);
5826 __ CompareMapWithRootAndEmitEagerDeoptIf(
5827 value, RootIndex::kHeapNumberMap, scratch,
kNotEqual,
5828 DeoptimizeReason::kNotANumber,
this);
5838 const ProcessingState& state) {
5840 MaglevAssembler::TemporaryRegisterScope temps(masm);
5841 Register instance_type = temps.AcquireScratch();
5843 __ AssertNotSmi(
object);
5845 __ EmitEagerDeoptIfSmi(
this,
object, DeoptimizeReason::kWrongMap);
5847 __ LoadInstanceType(instance_type,
object);
5848 __ RecordComment(
"Test IsInternalizedString");
5851 ZoneLabelRef done(masm);
5852 __ TestInt32AndJumpIfAnySet(
5854 __ MakeDeferredCode(
5855 [](MaglevAssembler* masm, ZoneLabelRef done,
5857 Register instance_type) {
5858 __ RecordComment(
"Deferred Test IsThinString");
5860 __ TestInt32AndJumpIfAnySet(
5862 __ GetDeoptLabel(node, DeoptimizeReason::kWrongMap));
5865 __ TestInt32AndJumpIfAllClear(
5867 __ GetDeoptLabel(node, DeoptimizeReason::kWrongMap));
5869 __ LoadTaggedField(
object,
object, offsetof(ThinString, actual_));
5871 __ RecordComment(
"DCHECK IsInternalizedString");
5873 __ LoadInstanceType(instance_type,
object);
5874 __ TestInt32AndJumpIfAllClear(
5877 __ Abort(AbortReason::kUnexpectedValue);
5882 done,
this, object, instance_type));
5889 set_temporaries_needed(1);
5890 set_double_temporaries_needed(1);
5893 const ProcessingState& state) {
5896 MaglevAssembler::TemporaryRegisterScope temps(masm);
5897 Register scratch = temps.Acquire();
5899 Label is_not_smi, min, max, done;
5901 __ JumpIfNotSmi(value, &is_not_smi);
5903 __ SmiToInt32(value);
5908 __ bind(&is_not_smi);
5910 __ CompareMapWithRootAndEmitEagerDeoptIf(value, RootIndex::kHeapNumberMap,
5912 DeoptimizeReason::kNotANumber,
this);
5914 __ LoadHeapNumberValue(double_value, value);
5916 __ ToUint8Clamped(value, double_value, &min, &max, &done);
5918 __ Move(result_reg, 0);
5921 __ Move(result_reg, 255);
5933 MaglevAssembler* masm,
const ProcessingState& state) {
5938 register_snapshot());
5947 MaglevAssembler* masm,
const ProcessingState& state) {
5952 __ AssertElidedWriteBarrier(elements, value, register_snapshot());
5961 using D = CallTrampolineDescriptor;
5978 __ CallBuiltin<Builtin::kCall_ReceiverIsNullOrUndefined>(
5982 __ CallBuiltin<Builtin::kCall_ReceiverIsNotNullOrUndefined>(
5994 __ CallBuiltin<Builtin::kCallFunction_ReceiverIsNullOrUndefined>(
5998 __ CallBuiltin<Builtin::kCallFunction_ReceiverIsNotNullOrUndefined>(
6002 __ CallBuiltin<Builtin::kCallFunction_ReceiverIsAny>(
6008 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
6013 using D = CallTrampolineDescriptor;
6024 const ProcessingState& state) {
6028 __ CallBuiltin<Builtin::kCallFunctionForwardVarargs>(
6036 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
6040 int actual_parameter_count =
num_args() + 1;
6052 set_temporaries_needed(1);
6056 const ProcessingState& state) {
6057 MaglevAssembler::TemporaryRegisterScope temps(masm);
6058 Register scratch = temps.Acquire();
6059 int actual_parameter_count =
num_args() + 1;
6061 int number_of_undefineds =
6063 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
6073 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
6077 int actual_parameter_count =
num_args() + 1;
6089 set_temporaries_needed(1);
6093 const ProcessingState& state) {
6094 MaglevAssembler::TemporaryRegisterScope temps(masm);
6095 Register scratch = temps.Acquire();
6096 int actual_parameter_count =
num_args() + 1;
6098 int number_of_undefineds =
6100 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
6127 __ CallBuiltin(builtin);
6129#if V8_ENABLE_LEAPTIERING
6135 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
6139 int actual_parameter_count =
num_args() + 1;
6140 return actual_parameter_count;
6153 set_temporaries_needed(2);
6158 const ProcessingState& state) {
6159 MaglevAssembler::TemporaryRegisterScope temps(masm);
6173 CallApiCallbackOptimizedDescriptor::ApiFunctionAddressRegister()});
6187 compiler::JSHeapBroker*
broker = masm->compilation_info()->broker();
6189 ExternalReference reference =
6191 __ Move(CallApiCallbackOptimizedDescriptor::ApiFunctionAddressRegister(),
6196 __ CallBuiltin(Builtin::kCallApiCallbackOptimizedNoProfiling);
6201 __ CallBuiltin(Builtin::kCallApiCallbackOptimized);
6204 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
6208 MaglevAssembler* masm,
const ProcessingState& state) {
6209 MaglevAssembler::TemporaryRegisterScope temps(masm);
6210 Register scratch = temps.Acquire();
6211 Register scratch2 = temps.Acquire();
6213 using FCA = FunctionCallbackArguments;
6214 using ER = ExternalReference;
6215 using FC = ApiCallbackExitFrameConstants;
6217 static_assert(FCA::kArgsLength == 6);
6218 static_assert(FCA::kNewTargetIndex == 5);
6219 static_assert(FCA::kTargetIndex == 4);
6220 static_assert(FCA::kReturnValueIndex == 3);
6221 static_assert(FCA::kContextIndex == 2);
6222 static_assert(FCA::kIsolateIndex == 1);
6223 static_assert(FCA::kUnusedIndex == 0);
6237 __ StoreRootRelative(IsolateData::topmost_script_having_context_offset(),
6241 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
6245 __ Move(scratch2, ER::isolate_address());
6247 __ Push(scratch2, scratch);
6250 CallApiCallbackOptimizedDescriptor::ApiFunctionAddressRegister();
6252 compiler::JSHeapBroker*
broker = masm->compilation_info()->broker();
6254 ExternalReference reference =
6256 __ Move(api_function_address, reference);
6258 Label done, call_api_callback_builtin_inline;
6259 __ Call(&call_api_callback_builtin_inline);
6260 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
6266 __ bind(&call_api_callback_builtin_inline);
6269 __ EmitEnterExitFrame(FC::getExtraSlotsCountFrom<ExitFrameConstants>(),
6270 StackFrame::API_CALLBACK_EXIT, api_function_address,
6274#ifdef V8_TARGET_ARCH_ARM64
6293 __ Move(argc_operand, scratch);
6296 __ LoadAddress(scratch,
MemOperand(fp, FC::kImplicitArgsArrayOffset));
6297 __ Move(
MemOperand(fp, FC::kFCIImplicitArgsOffset), scratch);
6300 __ LoadAddress(scratch,
MemOperand(fp, FC::kFirstArgumentOffset));
6301 __ Move(
MemOperand(fp, FC::kFCIValuesOffset), scratch);
6306 __ RecordComment(
"v8::FunctionCallback's argument.");
6307 __ LoadAddress(function_callback_info_arg,
6308 MemOperand(fp, FC::kFunctionCallbackInfoOffset));
6313 const int kSlotsToDropOnReturn =
6316 const bool with_profiling =
false;
6317 ExternalReference no_thunk_ref;
6321 no_thunk_ref, no_thunk_arg, kSlotsToDropOnReturn,
6322 nullptr, return_value_operand);
6323 __ RecordComment(
"end of inlined CallApiCallbackOptimized builtin");
6330 if (!descriptor.AllowVarArgs()) {
6331 return descriptor.GetStackParameterCount();
6334 DCHECK_GE(all_input_count, descriptor.GetRegisterParameterCount());
6335 return all_input_count - descriptor.GetRegisterParameterCount();
6341 bool has_context = descriptor.HasContextParameter();
6344 UseFixed(input(
i), descriptor.GetRegisterParameter(
i));
6355template <
typename... Args>
6374 __ Move(descriptor.GetRegisterParameter(slot_index),
6378 __ Move(descriptor.GetRegisterParameter(slot_index),
6389 int vector_index = slot_index + 1;
6395 if (vector_index < descriptor.GetRegisterParameterCount()) {
6397 __ Move(descriptor.GetRegisterParameter(vector_index), feedback().vector);
6399 }
else if (vector_index == descriptor.GetRegisterParameterCount()) {
6405 DCHECK_EQ(descriptor.GetStackParameterCount(), 1);
6423 const ProcessingState& state) {
6430 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
6434 using D = CallInterfaceDescriptorFor<kCEntry_Builtin>::type;
6435 return D::GetStackParameterCount() +
num_args();
6439 using D = CallInterfaceDescriptorFor<kCEntry_Builtin>::type;
6447 set_temporaries_needed(1);
6448 RequireSpecificTemporary(D::GetRegisterParameter(D::kArity));
6449 RequireSpecificTemporary(D::GetRegisterParameter(D::kCFunction));
6453 const ProcessingState& state) {
6454 using D = CallInterfaceDescriptorFor<kCEntry_Builtin>::type;
6455 constexpr Register kArityReg = D::GetRegisterParameter(D::kArity);
6456 constexpr Register kCFunctionReg = D::GetRegisterParameter(D::kCFunction);
6458 MaglevAssembler::TemporaryRegisterScope temps(masm);
6459 Register scratch = temps.Acquire();
6460 __ LoadRoot(scratch, RootIndex::kTheHoleValue);
6479 ExternalReference builtin_address =
6481 __ Move(kCFunctionReg, builtin_address);
6485 __ CallBuiltin(Builtin::kCEntry_Return1_ArgvOnStack_BuiltinExit);
6497 const ProcessingState& state) {
6502 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
6506 int argc_no_spread =
num_args() - 1;
6507 using D = CallInterfaceDescriptorFor<Builtin::kCallWithSpread>::type;
6508 return argc_no_spread + D::GetStackParameterCount();
6511 using D = CallInterfaceDescriptorFor<Builtin::kCallWithSpread>::type;
6521 const ProcessingState& state) {
6522 __ CallBuiltin<Builtin::kCallWithSpread>(
6530 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
6534 using D = CallInterfaceDescriptorFor<Builtin::kCallWithArrayLike>::type;
6535 return D::GetStackParameterCount();
6538 using D = CallInterfaceDescriptorFor<Builtin::kCallWithArrayLike>::type;
6546 const ProcessingState& state) {
6551 CallInterfaceDescriptorFor<
6552 Builtin::kCallWithArrayLike>::type::GetStackParameterCount() == 0);
6554 __ CallBuiltin<Builtin::kCallWithArrayLike>(
6559 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
6567 using D = Construct_WithFeedbackDescriptor;
6568 return num_args() + D::GetStackParameterCount();
6571 using D = Construct_WithFeedbackDescriptor;
6581 const ProcessingState& state) {
6582 __ CallBuiltin<Builtin::kConstruct_WithFeedback>(
6591 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
6595 int argc_no_spread =
num_args() - 1;
6596 using D = CallInterfaceDescriptorFor<
6597 Builtin::kConstructWithSpread_WithFeedback>
::type;
6598 return argc_no_spread + D::GetStackParameterCount();
6601 using D = CallInterfaceDescriptorFor<
6602 Builtin::kConstructWithSpread_WithFeedback>
::type;
6613 const ProcessingState& state) {
6614 __ CallBuiltin<Builtin::kConstructWithSpread_WithFeedback>(
6624 masm->DefineExceptionHandlerAndLazyDeoptPoint(
this);
6633 const ProcessingState& state) {
6636 MaglevAssembler::TemporaryRegisterScope temps(masm);
6637 Register scratch = temps.AcquireScratch();
6638 MemOperand pending_message_operand =
__ ExternalReferenceAsOperand(
6640 if (new_message != return_value) {
6641 __ Move(return_value, pending_message_operand);
6642 __ Move(pending_message_operand, new_message);
6644 __ Move(scratch, pending_message_operand);
6645 __ Move(pending_message_operand, new_message);
6646 __ Move(return_value, scratch);
6655 const ProcessingState& state) {
6659 MaglevAssembler::TemporaryRegisterScope temps(masm);
6660 Register heap_number = temps.AcquireScratch();
6662 __ AssertNotSmi(
object);
6663 __ LoadTaggedField(heap_number,
object,
offset());
6664 __ AssertNotSmi(heap_number);
6665 __ StoreHeapNumberValue(value, heap_number);
6673 const ProcessingState& state) {
6677 MaglevAssembler::TemporaryRegisterScope temps(masm);
6678 Register heap_number = temps.AcquireScratch();
6680 __ AssertNotSmi(
object);
6681 __ LoadTaggedField(heap_number,
object,
offset());
6682 __ AssertNotSmi(heap_number);
6683 __ StoreHeapInt32Value(value, heap_number);
6688template <
typename NodeT>
6689void GenerateTransitionElementsKind(
6690 MaglevAssembler* masm,
NodeT* node, Register
object, Register map,
6691 base::Vector<const compiler::MapRef> transition_sources,
6692 const compiler::MapRef transition_target, ZoneLabelRef done,
6693 std::optional<Register> result_opt) {
6695 DCHECK(!IsHeapNumberMap(*transition_target.object()));
6697 for (
const compiler::MapRef transition_source : transition_sources) {
6699 transition_source.elements_kind(), transition_target.elements_kind());
6703 __ CompareTaggedAndJumpIf(
6704 map, transition_source.object(),
kEqual,
6705 __ MakeDeferredCode(
6706 [](MaglevAssembler* masm, Register
object, Register map,
6707 RegisterSnapshot register_snapshot,
6708 compiler::MapRef transition_target,
bool is_simple,
6709 ZoneLabelRef done, std::optional<Register> result_opt) {
6711 __ MoveTagged(map, transition_target.object());
6712 __ StoreTaggedFieldWithWriteBarrier(
6717 SaveRegisterStateForCall save_state(masm, register_snapshot);
6718 __ Push(
object, transition_target.object());
6720 __ CallRuntime(Runtime::kTransitionElementsKind);
6721 save_state.DefineSafepoint();
6724 __ MoveTagged(*result_opt, transition_target.object());
6728 object,
map, node->register_snapshot(), transition_target,
6729 is_simple, done, result_opt));
6746 const ProcessingState& state) {
6751 ZoneLabelRef done(masm);
6753 __ AssertNotSmi(
object);
6754 GenerateTransitionElementsKind(masm,
this,
object, map,
6758 __ Move(result_register, map);
6773 MaglevAssembler* masm,
const ProcessingState& state) {
6777 ZoneLabelRef done(masm);
6781 GenerateTransitionElementsKind(masm,
this,
object, map,
6785 __ EmitEagerDeopt(
this, DeoptimizeReason::kWrongMap);
6791 set_temporaries_needed(1);
6795 const ProcessingState& state) {
6796 MaglevAssembler::TemporaryRegisterScope temps(masm);
6798 Register scratch = temps.Acquire();
6799 __ DeoptIfBufferDetached(
object, scratch,
this);
6807 MaglevAssembler* masm,
const ProcessingState& state) {
6809 MaglevAssembler::TemporaryRegisterScope temps(masm);
6811 IsolateFieldId::kContinuationPreservedEmbedderData);
6820 MaglevAssembler* masm,
const ProcessingState& state) {
6822 MaglevAssembler::TemporaryRegisterScope temps(masm);
6824 IsolateFieldId::kContinuationPreservedEmbedderData);
6825 __ Move(reference, data);
6830template <
typename ResultReg,
typename NodeT>
6831void GenerateTypedArrayLoad(MaglevAssembler* masm,
NodeT* node, Register
object,
6832 Register index, ResultReg result_reg,
6834 __ AssertNotSmi(
object);
6836 MaglevAssembler::TemporaryRegisterScope temps(masm);
6837 __ AssertObjectType(
object, JS_TYPED_ARRAY_TYPE,
6838 AbortReason::kUnexpectedValue);
6841 MaglevAssembler::TemporaryRegisterScope temps(masm);
6842 Register scratch = temps.Acquire();
6845 __ BuildTypedArrayDataPointer(data_pointer,
object);
6849 __ TypedArrayElementOperand(data_pointer, index, element_size);
6850 if constexpr (std::is_same_v<ResultReg, Register>) {
6852 __ LoadSignedField(result_reg, operand, element_size);
6855 __ LoadUnsignedField(result_reg, operand, element_size);
6859 bool result_reg_is_double = std::is_same_v<ResultReg, DoubleRegister>;
6860 DCHECK(result_reg_is_double);
6864 case FLOAT32_ELEMENTS:
6865 __ LoadFloat32(result_reg, operand);
6867 case FLOAT64_ELEMENTS:
6868 __ LoadFloat64(result_reg, operand);
6876template <
typename ValueReg,
typename NodeT>
6877void GenerateTypedArrayStore(MaglevAssembler* masm,
NodeT* node,
6878 Register
object, Register index, ValueReg value,
6880 __ AssertNotSmi(
object);
6882 MaglevAssembler::TemporaryRegisterScope temps(masm);
6883 __ AssertObjectType(
object, JS_TYPED_ARRAY_TYPE,
6884 AbortReason::kUnexpectedValue);
6887 MaglevAssembler::TemporaryRegisterScope temps(masm);
6888 Register scratch = temps.Acquire();
6891 __ BuildTypedArrayDataPointer(data_pointer,
object);
6895 __ TypedArrayElementOperand(data_pointer, index, element_size);
6896 if constexpr (std::is_same_v<ValueReg, Register>) {
6897 __ StoreField(operand, value, element_size);
6900 bool value_is_double = std::is_same_v<ValueReg, DoubleRegister>;
6905 case FLOAT32_ELEMENTS:
6906 __ StoreFloat32(operand, value);
6908 case FLOAT64_ELEMENTS:
6909 __ StoreFloat64(operand, value);
6919#define DEF_LOAD_TYPED_ARRAY(Name, ResultReg, ToResultReg) \
6920 void Name::SetValueLocationConstraints() { \
6921 UseRegister(object_input()); \
6922 UseRegister(index_input()); \
6923 DefineAsRegister(this); \
6924 set_temporaries_needed(1); \
6926 void Name::GenerateCode(MaglevAssembler* masm, \
6927 const ProcessingState& state) { \
6928 Register object = ToRegister(object_input()); \
6929 Register index = ToRegister(index_input()); \
6930 ResultReg result_reg = ToResultReg(result()); \
6932 GenerateTypedArrayLoad(masm, this, object, index, result_reg, \
6936DEF_LOAD_TYPED_ARRAY(LoadSignedIntTypedArrayElement, Register,
ToRegister)
6938DEF_LOAD_TYPED_ARRAY(LoadUnsignedIntTypedArrayElement, Register,
ToRegister)
6942#undef DEF_LOAD_TYPED_ARRAY
6944#define DEF_STORE_TYPED_ARRAY(Name, ValueReg, ToValueReg) \
6945 void Name::SetValueLocationConstraints() { \
6946 UseRegister(object_input()); \
6947 UseRegister(index_input()); \
6948 UseRegister(value_input()); \
6949 set_temporaries_needed(1); \
6951 void Name::GenerateCode(MaglevAssembler* masm, \
6952 const ProcessingState& state) { \
6953 Register object = ToRegister(object_input()); \
6954 Register index = ToRegister(index_input()); \
6955 ValueReg value = ToValueReg(value_input()); \
6957 GenerateTypedArrayStore(masm, this, object, index, value, elements_kind_); \
6960DEF_STORE_TYPED_ARRAY(StoreIntTypedArrayElement, Register,
ToRegister)
6962DEF_STORE_TYPED_ARRAY(StoreDoubleTypedArrayElement,
DoubleRegister,
6964#undef DEF_STORE_TYPED_ARRAY
6973 if (
target() != state.next_block()) {
6980 const ProcessingState& state) {
6982 if (
target() != state.next_block()) {
6989void AttemptOnStackReplacement(MaglevAssembler* masm,
6990 ZoneLabelRef no_code_for_osr,
6991 TryOnStackReplacement* node, Register scratch0,
6992 Register scratch1, int32_t loop_depth,
6993 FeedbackSlot feedback_slot,
6994 BytecodeOffset osr_offset) {
7005 __ AssertFeedbackVector(scratch0, scratch1);
7009 Register maybe_target_code = scratch1;
7010 __ TryLoadOptimizedOsrCode(scratch1, CodeKind::TURBOFAN_JS, scratch0,
7015 __ LoadByte(scratch0,
7017 __ DecodeField<FeedbackVector::OsrUrgencyBits>(scratch0);
7024 RegisterSnapshot snapshot = node->register_snapshot();
7025 DCHECK(!snapshot.live_registers.has(maybe_target_code));
7026 SaveRegisterStateForCall save_register_state(masm, snapshot);
7027 if (node->unit()->is_inline()) {
7030 CHECK(!node->unit()->is_osr());
7033 __ CallRuntime(Runtime::kCompileOptimizedOSRFromMaglevInlined, 2);
7037 __ CallRuntime(Runtime::kCompileOptimizedOSRFromMaglev, 1);
7039 save_register_state.DefineSafepoint();
7046 __ CompareInt32AndJumpIf(maybe_target_code, 0,
kEqual, *no_code_for_osr);
7055 GetGeneralRegistersUsedAsInputs(node->eager_deopt_info()));
7056 __ EmitEagerDeopt(node, DeoptimizeReason::kPrepareForOnStackReplacement);
7062 __ Jump(*no_code_for_osr);
7070 if (
unit()->is_inline())
return 2;
7075 set_temporaries_needed(2);
7078 const ProcessingState& state) {
7079 MaglevAssembler::TemporaryRegisterScope temps(masm);
7080 Register scratch0 = temps.Acquire();
7081 Register scratch1 = temps.Acquire();
7083 const Register osr_state = scratch1;
7085 __ AssertFeedbackVector(scratch0, scratch1);
7086 __ LoadByte(osr_state,
7089 ZoneLabelRef no_code_for_osr(masm);
7095 base::BitFieldUnion<FeedbackVector::OsrUrgencyBits,
7096 FeedbackVector::MaybeHasTurbofanOsrCodeBit>>(
7102 static_assert(FeedbackVector::MaybeHasTurbofanOsrCodeBit::encode(
true) >
7104 __ CompareInt32AndJumpIf(
7106 __ MakeDeferredCode(AttemptOnStackReplacement, no_code_for_osr,
this,
7109 __ bind(*no_code_for_osr);
7114 const ProcessingState& state) {
7122 const ProcessingState& state) {
7124 state.next_block());
7131 const ProcessingState& state) {
7134 state.next_block());
7142 const ProcessingState& state) {
7144 ZoneLabelRef true_label =
7146 ZoneLabelRef false_label =
7148 bool fallthrough_when_true = (
if_true() == state.next_block());
7150 false_label, fallthrough_when_true);
7158 const ProcessingState& state) {
7168 const ProcessingState& state) {
7178 const ProcessingState& state) {
7179 MaglevAssembler::TemporaryRegisterScope temps(masm);
7182 __ Move(double_scratch, 0.0);
7193 const ProcessingState& state) {
7194 MaglevAssembler::TemporaryRegisterScope temps(masm);
7195 Register scratch = temps.Acquire();
7198 bool fallthrough_when_true =
if_true() == state.next_block();
7199 bool fallthrough_when_false =
if_false() == state.next_block();
7200 if (fallthrough_when_false) {
7201 if (fallthrough_when_true) {
7212 if (!fallthrough_when_true) {
7221 set_temporaries_needed(1);
7224 const ProcessingState& state) {
7225 MaglevAssembler::TemporaryRegisterScope temps(masm);
7226 Register scratch = temps.Acquire();
7228 Label done, if_not_hole;
7232 __ bind(&if_not_hole);
7242 const ProcessingState& state) {
7255 const ProcessingState& state) {
7258 __ CmpTagged(left, right);
7267 const ProcessingState& state) {
7279 const ProcessingState& state) {
7290 const ProcessingState& state) {
7292 __ JumpIfRoot(value, RootIndex::kUndefinedValue,
if_true()->
label());
7294 auto* next_block = state.next_block();
7305 const ProcessingState& state) {
7307 MaglevAssembler::TemporaryRegisterScope temps(masm);
7308 Register scratch = temps.Acquire();
7310 auto* next_block = state.next_block();
7313 if (next_block !=
if_true()) {
7323 set_temporaries_needed(1);
7327 const ProcessingState& state) {
7330 MaglevAssembler::TemporaryRegisterScope temps(masm);
7331 Register scratch = temps.Acquire();
7333 Label return_false, done;
7334 __ JumpIfNotUndetectable(
object, scratch,
check_type(), &return_false,
7337 __ LoadRoot(return_value, RootIndex::kTrueValue);
7340 __ bind(&return_false);
7341 __ LoadRoot(return_value, RootIndex::kFalseValue);
7352 const ProcessingState& state) {
7363 const ProcessingState& state) {
7375 MaglevAssembler::TemporaryRegisterScope temps(masm);
7376 Register scratch = temps.Acquire();
7377 std::unique_ptr<Label*[]> labels = std::make_unique<Label*[]>(
size());
7378 for (
int i = 0;
i <
size();
i++) {
7379 BasicBlock* block = (
targets())[
i].block_ptr();
7380 block->set_start_block_of_switch_case(
true);
7381 labels[
i] = block->label();
7387 __ SignExtend32To64Bits(val, val);
7400 const ProcessingState& state) {
7401 ZoneLabelRef done(masm);
7402 Label* deferred =
__ MakeDeferredCode(
7403 [](MaglevAssembler* masm, ZoneLabelRef done, Node* node) {
7406 SaveRegisterStateForCall save_register_state(
7407 masm, node->register_snapshot());
7409 __ CallRuntime(Runtime::kHandleNoHeapWritesInterrupts, 0);
7410 save_register_state.DefineSafepointWithLazyDeopt(
7411 node->lazy_deopt_info());
7417 MaglevAssembler::TemporaryRegisterScope temps(masm);
7418 Register scratch = temps.AcquireScratch();
7420 ExternalReference::address_of_no_heap_write_interrupt_request(
7440 os <<
"(" <<
value() <<
")";
7445 os <<
"(" <<
value() <<
")";
7450 os <<
"(" <<
value() <<
")";
7455 os <<
"(" <<
value() <<
")";
7460 if (
value().is_nan()) {
7461 os <<
"(NaN [0x" << std::hex <<
value().
get_bits() << std::dec <<
"]";
7462 if (
value().is_hole_nan()) {
7464 }
else if (
value().get_bits() ==
7466 std::numeric_limits<double>::quiet_NaN())) {
7467 os <<
", quiet NaN";
7478 os <<
"(" << *
object_.object() <<
")";
7483 os <<
"(" << *
object_.object() <<
")";
7508 os <<
"(" <<
input() <<
")";
7532 os <<
" [pretenured]";
7545 if (
object()->has_static_map()) {
7570 os <<
"(CharCodeAt)";
7573 os <<
"(CodePointAt)";
7588 os << *map.object();
7603 os << *map.object();
7618 os << *map.object();
7628 os <<
", " << *source.object();
7681 os << *map.object();
7688 os <<
"(" <<
condition() <<
", " << deoptimize_reason() <<
")";
7693 os <<
"(" <<
index_ <<
")";
7696template <
typename Derived, ValueRepresentation FloatType>
7701 os <<
"(" << conversion_type() <<
")";
7704void UncheckedNumberOrOddballToFloat64::PrintParams(
7706 os <<
"(" << conversion_type() <<
")";
7709void CheckedTruncateNumberOrOddballToInt32::PrintParams(
7711 os <<
"(" << conversion_type() <<
")";
7714void TruncateNumberOrOddballToInt32::PrintParams(
7716 os <<
"(" << conversion_type() <<
")";
7719template <
typename T>
7722 os <<
"(0x" << std::hex <<
offset() << std::dec;
7725 if (!
result().operand().IsUnallocated()) {
7726 if (decompresses_tagged_result()) {
7727 os <<
", decompressed";
7729 os <<
", compressed";
7735void LoadTaggedFieldForScriptContextSlot::PrintParams(
7737 os <<
"(0x" << std::hex <<
offset() << std::dec <<
")";
7740void LoadDoubleField::PrintParams(std::ostream& os,
7742 os <<
"(0x" << std::hex <<
offset() << std::dec <<
")";
7745void LoadFloat64::PrintParams(std::ostream& os,
7747 os <<
"(0x" << std::hex <<
offset() << std::dec <<
")";
7750void LoadHeapInt32::PrintParams(std::ostream& os,
7752 os <<
"(0x" << std::hex <<
offset() << std::dec <<
")";
7755void LoadInt32::PrintParams(std::ostream& os,
7757 os <<
"(0x" << std::hex <<
offset() << std::dec <<
")";
7760void LoadFixedArrayElement::PrintParams(
7764 if (!
result().operand().IsUnallocated()) {
7765 if (decompresses_tagged_result()) {
7766 os <<
"(decompressed)";
7768 os <<
"(compressed)";
7773void StoreDoubleField::PrintParams(std::ostream& os,
7775 os <<
"(0x" << std::hex <<
offset() << std::dec <<
")";
7778void StoreHeapInt32::PrintParams(std::ostream& os,
7780 os <<
"(0x" << std::hex <<
offset() << std::dec <<
")";
7783void StoreFloat64::PrintParams(std::ostream& os,
7785 os <<
"(0x" << std::hex <<
offset() << std::dec <<
")";
7788void StoreInt32::PrintParams(std::ostream& os,
7790 os <<
"(0x" << std::hex <<
offset() << std::dec <<
")";
7793void StoreTaggedFieldNoWriteBarrier::PrintParams(
7795 os <<
"(0x" << std::hex <<
offset() << std::dec <<
")";
7800 case StoreMap::Kind::kInitializing:
7801 os <<
"Initializing";
7803 case StoreMap::Kind::kInlinedAllocation:
7804 os <<
"InlinedAllocation";
7806 case StoreMap::Kind::kTransitioning:
7807 os <<
"Transitioning";
7813void StoreMap::PrintParams(std::ostream& os,
7815 os <<
"(" << *
map_.object() <<
", " <<
kind() <<
")";
7818void StoreTaggedFieldWithWriteBarrier::PrintParams(
7820 os <<
"(0x" << std::hex <<
offset() << std::dec <<
")";
7823void StoreTrustedPointerFieldWithWriteBarrier::PrintParams(
7825 os <<
"(0x" << std::hex <<
offset() << std::dec <<
")";
7828void LoadNamedGeneric::PrintParams(std::ostream& os,
7830 os <<
"(" << *
name_.object() <<
")";
7833void LoadNamedFromSuperGeneric::PrintParams(
7835 os <<
"(" << *
name_.object() <<
")";
7838void SetNamedGeneric::PrintParams(std::ostream& os,
7840 os <<
"(" << *
name_.object() <<
")";
7843void DefineNamedOwnGeneric::PrintParams(
7845 os <<
"(" << *
name_.object() <<
")";
7848void HasInPrototypeChain::PrintParams(
7853void GapMove::PrintParams(std::ostream& os,
7858void ConstantGapMove::PrintParams(std::ostream& os,
7862 os <<
" → " <<
target() <<
")";
7865void Float64Compare::PrintParams(std::ostream& os,
7867 os <<
"(" << operation() <<
")";
7870void Float64ToBoolean::PrintParams(std::ostream& os,
7877void Int32Compare::PrintParams(std::ostream& os,
7879 os <<
"(" << operation() <<
")";
7882void Int32ToBoolean::PrintParams(std::ostream& os,
7889void IntPtrToBoolean::PrintParams(std::ostream& os,
7896void Float64Ieee754Unary::PrintParams(
7898 switch (ieee_function_) {
7899#define CASE(MathName, ExtName, EnumName) \
7900 case Ieee754Function::k##EnumName: \
7901 os << "(" << #EnumName << ")"; \
7908void Float64Round::PrintParams(std::ostream& os,
7917 case Kind::kNearest:
7923void Phi::PrintParams(std::ostream& os,
7925 os <<
"(" << (owner().is_valid() ? owner().ToString() :
"VO") <<
")";
7928void Call::PrintParams(std::ostream& os,
7930 os <<
"(" << receiver_mode_ <<
", ";
7931 switch (target_type_) {
7932 case TargetType::kJSFunction:
7935 case TargetType::kAny:
7942void CallSelf::PrintParams(std::ostream& os,
7945void CallKnownJSFunction::PrintParams(
7947 os <<
"(" << shared_function_info_.object() <<
")";
7950void CallKnownApiFunction::PrintParams(
7955 os <<
"no profiling, ";
7957 case kNoProfilingInlined:
7958 os <<
"no profiling inlined, ";
7963 os << function_template_info_.object() <<
")";
7966void CallBuiltin::PrintParams(std::ostream& os,
7971void CallCPPBuiltin::PrintParams(std::ostream& os,
7976void CallForwardVarargs::PrintParams(
7978 if (start_index_ == 0)
return;
7979 os <<
"(" << start_index_ <<
")";
7982void CallRuntime::PrintParams(std::ostream& os,
7987void TestTypeOf::PrintParams(std::ostream& os,
7992void ReduceInterruptBudgetForLoop::PrintParams(
7994 os <<
"(" << amount() <<
")";
7997void ReduceInterruptBudgetForReturn::PrintParams(
7999 os <<
"(" << amount() <<
")";
8002void Deopt::PrintParams(std::ostream& os,
8007void BranchIfRootConstant::PrintParams(
8012void BranchIfFloat64Compare::PrintParams(
8014 os <<
"(" << operation_ <<
")";
8017void BranchIfInt32Compare::PrintParams(
8019 os <<
"(" << operation_ <<
")";
8022void BranchIfUint32Compare::PrintParams(
8024 os <<
"(" << operation_ <<
")";
8027void BranchIfTypeOf::PrintParams(std::ostream& os,
8032void ExtendPropertiesBackingStore::PrintParams(
8034 os <<
"(" << old_length_ <<
")";
8042 KnownNodeAspects::LoadedPropertyMapKey::Elements());
8044 elements_properties->second.clear();
8045 if (
v8_flags.trace_maglev_graph_building) {
8046 std::cout <<
" * Removing non-constant cached [Elements]";
8052 DCHECK(properties().can_write());
8060 case Kind::kInitializing:
8061 case Kind::kInlinedAllocation:
8063 case Kind::kTransitioning: {
8066 if (node_info->possible_maps_are_known() &&
8067 node_info->possible_maps().size() == 1) {
8070 return map.equals(old_map);
8073 if (
v8_flags.trace_maglev_graph_building) {
8074 std::cout <<
" ! StoreMap: Clearing unstable map "
8085 if (
v8_flags.trace_maglev_graph_building) {
8086 std::cout <<
" ! StoreMap: Clearing unstable maps" << std::endl;
8090void CheckMapsWithMigration::ClearUnstableNodeAspects(
8096void MigrateMapIfNeeded::ClearUnstableNodeAspects(
8111std::optional<int32_t> NodeBase::TryGetInt32ConstantInput(
int index) {
8112 Node* node = input(index).node();
8114 return smi->value().value();
8117 return i32->value();
#define Assert(condition)
interpreter::OperandScale scale
#define SBXCHECK_EQ(lhs, rhs)
static constexpr int kShift
constexpr bool is_subset_of(EnumSet set) const
constexpr void Add(E element)
static constexpr int kNewTargetIndex
static constexpr int kPaddingIndex
static constexpr int kArgcIndex
static constexpr int kNumExtraArgs
static constexpr int kTargetIndex
static constexpr int kReceiverIndex
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
static Address CppEntryOf(Builtin builtin)
static int GetFormalParameterCount(Builtin builtin)
static V8_EXPORT_PRIVATE const char * name(Builtin builtin)
static constexpr Register FunctionTemplateInfoRegister()
static DEFINE_PARAMETERS_VARARGS(kApiFunctionAddress, kActualArgumentsCount, kFunctionTemplateInfo) DEFINE_PARAMETER_TYPES(MachineType constexpr Register ActualArgumentsCountRegister()
int GetStackParameterCount() const
static int MaximumFunctionContextSlots()
static Tagged< Smi > SmiMarker()
static Tagged< Smi > Other()
static Tagged< Smi > MutableInt32()
static Tagged< Smi > Const()
static Tagged< Smi > MutableHeapNumber()
static V8_INLINE constexpr int OffsetOfElementAt(int index)
@ MIN_CONTEXT_EXTENDED_SLOTS
@ CONTEXT_SIDE_TABLE_PROPERTY_INDEX
static V8_EXPORT_PRIVATE ExternalReference address_of_pending_message(LocalIsolate *local_isolate)
static ExternalReference Create(const SCTableReference &table_ref)
static constexpr int kMaxOsrUrgency
static constexpr int kMaxLength
uint64_t get_bits() const
double get_scalar() const
static constexpr int kMapOffset
static V8_INLINE constexpr bool IsValidSmi(T value)
static V8_INLINE Isolate * Current()
LocalIsolate * AsLocalIsolate()
LocalHeap * main_thread_local_heap()
static const int kInitialMaxFastElementArray
static const int kFieldsAdded
static constexpr MachineType AnyTagged()
Isolate * isolate() const
void CallBuiltin(Builtin builtin, Condition cond=al)
static constexpr int OffsetOfElementAt(int index)
static constexpr int SizeFor(int length)
static const int kNoHashSentinel
static constexpr Register no_reg()
static const char * name(RootIndex root_index)
static constexpr bool IsReadOnly(RootIndex root_index)
static V8_EXPORT_PRIVATE const Function * FunctionForId(FunctionId id)
static constexpr Tagged< Smi > FromInt(int value)
static constexpr int kMaxValue
static constexpr int kExpressionsOffset
static constexpr int OffsetOfElementAt(int index)
static Tagged< TaggedIndex > FromIntptr(intptr_t value)
static constexpr int kRegisterFileFromFp
static constexpr Register ObjectRegister()
static constexpr Register SlotAddressRegister()
T * AllocateArray(size_t length)
IndirectHandle< FeedbackCell > object() const
IndirectHandle< FeedbackVector > object() const
IndirectHandle< HeapObject > object() const
IndirectHandle< InternalizedString > object() const
MachineRepresentation representation() const
static LocationOperand * cast(InstructionOperand *op)
IndirectHandle< Map > object() const
IndirectHandle< Name > object() const
IndirectHandle< ScopeInfo > object() const
Builtin builtin_id() const
IndirectHandle< SharedFunctionInfo > object() const
@ REGISTER_OR_SLOT_OR_CONSTANT
static constexpr Register virtual_accumulator()
constexpr int index() const
constexpr int ToParameterIndex() const
constexpr bool is_valid() const
std::string ToString() const
constexpr bool is_parameter() const
static const char * ToString(LiteralFlag literal_flag)
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
int MaxCallStackArgs() const
AbortReason reason() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
AllocationType allocation_type_
InlinedAllocation::List allocation_list_
AllocationType allocation_type() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
bool elided_write_barriers_depend_on_type() const
void SetValueLocationConstraints()
AllocationType allocation_type_
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Input & arguments_count_input()
CreateArgumentsType type() const
void SetValueLocationConstraints()
int formal_parameter_count() const
AssertCondition condition_
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
BasicBlock * if_false() const
BasicBlock * if_true() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
Input & condition_input()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Input & condition_input()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Input & condition_input()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Input & condition_input()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Input & condition_input()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Input & condition_input()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Input & condition_input()
void SetValueLocationConstraints()
void SetValueLocationConstraints()
Input & condition_input()
CheckType check_type() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
interpreter::TestTypeOfFlags::LiteralFlag literal_
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
Input & condition_input()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
Input & condition_input()
void SetValueLocationConstraints()
CheckType check_type() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
int MaxCallStackArgs() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void PushFeedbackAndArguments(MaglevAssembler *)
compiler::FeedbackSource feedback() const
void SetValueLocationConstraints()
int InputCountWithoutContext() const
int MaxCallStackArgs() const
FeedbackSlotType slot_type() const
void PassFeedbackSlotInRegister(MaglevAssembler *)
CallBuiltin(uint64_t bitfield, Builtin builtin)
void PushArguments(MaglevAssembler *masm, Args... extra_args)
void VerifyInputs(MaglevGraphLabeller *graph_labeller) const
bool has_feedback() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int InputsInRegisterCount() const
int MaxCallStackArgs() const
void VerifyInputs(MaglevGraphLabeller *graph_labeller) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void VerifyInputs(MaglevGraphLabeller *graph_labeller) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Call::TargetType target_type_
void SetValueLocationConstraints()
int MaxCallStackArgs() const
void SetValueLocationConstraints()
bool inline_builtin() const
int MaxCallStackArgs() const
void GenerateCallApiCallbackOptimizedInline(MaglevAssembler *masm, const ProcessingState &state)
const compiler::FunctionTemplateInfoRef function_template_info_
void VerifyInputs(MaglevGraphLabeller *graph_labeller) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
compiler::SharedFunctionInfoRef shared_function_info() const
int MaxCallStackArgs() const
void VerifyInputs(MaglevGraphLabeller *graph_labeller) const
int expected_parameter_count_
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Runtime::FunctionId function_id() const
int MaxCallStackArgs() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
CallRuntime(uint64_t bitfield, Runtime::FunctionId function_id, ValueNode *context)
void SetValueLocationConstraints()
void VerifyInputs(MaglevGraphLabeller *graph_labeller) const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void VerifyInputs(MaglevGraphLabeller *graph_labeller) const
int MaxCallStackArgs() const
CallSelf(uint64_t bitfield, int expected_parameter_count, ValueNode *closure, ValueNode *context, ValueNode *receiver, ValueNode *new_target)
int expected_parameter_count_
void VerifyInputs(MaglevGraphLabeller *graph_labeller) const
int MaxCallStackArgs() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
int MaxCallStackArgs() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
int num_args_no_spread() const
void VerifyInputs(MaglevGraphLabeller *graph_labeller) const
void SetValueLocationConstraints()
int MaxCallStackArgs() const
void VerifyInputs(MaglevGraphLabeller *graph_labeller) const
ConvertReceiverMode receiver_mode_
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
Input & construct_result_input()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Input & implicit_receiver_input()
int MaxCallStackArgs() const
void SetValueLocationConstraints()
int MaxCallStackArgs() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Input & construct_result_input()
CheckDerivedConstructResult(uint64_t bitfield)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
CheckType check_type() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
const InstanceType last_instance_type_
CheckType check_type() const
const InstanceType first_instance_type_
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
AssertCondition condition() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
CheckInt32IsSmi(uint64_t bitfield)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
CheckIntPtrIsSmi(uint64_t bitfield)
void SetValueLocationConstraints()
CheckType check_type() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
const compiler::ZoneRefSet< Map > & maps() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
const compiler::ZoneRefSet< Map > maps_
const compiler::ZoneRefSet< Map > & maps() const
CheckMapsWithMigrationAndDeopt(uint64_t bitfield, const compiler::ZoneRefSet< Map > &maps, CheckType check_type)
void SetValueLocationConstraints()
CheckType check_type() const
int MaxCallStackArgs() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
CheckMapsWithMigration(uint64_t bitfield, const compiler::ZoneRefSet< Map > &maps, CheckType check_type)
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
const compiler::ZoneRefSet< Map > maps_
const compiler::ZoneRefSet< Map > & maps() const
CheckType check_type() const
int MaxCallStackArgs() const
void SetValueLocationConstraints()
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
const compiler::ZoneRefSet< Map > maps_
const compiler::ZoneRefSet< Map > & maps() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
CheckType check_type() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
Object::Conversion mode() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
CheckType check_type() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
CheckType check_type() const
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
CheckType check_type() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
CheckValueEqualsString(uint64_t bitfield, compiler::InternalizedStringRef value, DeoptimizeReason reason)
void SetValueLocationConstraints()
compiler::InternalizedStringRef value() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
compiler::HeapObjectRef value() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
CheckedInternalizedString(uint64_t bitfield, CheckType check_type)
CheckType check_type() const
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
CheckedObjectToIndex(uint64_t bitfield, CheckType check_type)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
TaggedToFloat64ConversionType conversion_type() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void DoLoadToRegister(MaglevAssembler *, OutputRegister)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
const compiler::HeapObjectRef object_
void SetValueLocationConstraints()
DirectHandle< Object > DoReify(LocalIsolate *isolate) const
int num_args_no_spread() const
compiler::FeedbackSource feedback() const
void VerifyInputs(MaglevGraphLabeller *graph_labeller) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int MaxCallStackArgs() const
void SetValueLocationConstraints()
void SetValueLocationConstraints()
int MaxCallStackArgs() const
void VerifyInputs(MaglevGraphLabeller *graph_labeller) const
compiler::FeedbackSource feedback() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
const compiler::NativeContextRef native_context_
ConvertReceiverMode mode_
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int MaxCallStackArgs() const
int MaxCallStackArgs() const
compiler::HeapObjectRef constant_elements()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
compiler::FeedbackSource feedback() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int MaxCallStackArgs() const
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
compiler::FeedbackCellRef feedback_cell() const
compiler::SharedFunctionInfoRef shared_function_info() const
uint32_t slot_count() const
int MaxCallStackArgs() const
compiler::ScopeInfoRef scope_info() const
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
ScopeType scope_type() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int MaxCallStackArgs() const
compiler::FeedbackSource feedback() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
compiler::ObjectBoilerplateDescriptionRef boilerplate_descriptor()
int MaxCallStackArgs() const
void SetValueLocationConstraints()
compiler::FeedbackSource feedback() const
compiler::StringRef pattern()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
compiler::FeedbackSource feedback() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
compiler::HeapObjectRef constant_elements()
int MaxCallStackArgs() const
void SetValueLocationConstraints()
compiler::ObjectBoilerplateDescriptionRef boilerplate_descriptor()
compiler::FeedbackSource feedback() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int MaxCallStackArgs() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
DebugBreak(uint64_t bitfield)
void SetValueLocationConstraints()
int MaxCallStackArgs() const
void SetValueLocationConstraints()
compiler::FeedbackSource feedback() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int MaxCallStackArgs() const
compiler::NameRef name() const
compiler::FeedbackSource feedback() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
LanguageMode mode() const
void SetValueLocationConstraints()
int MaxCallStackArgs() const
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
const InterpretedDeoptFrame & as_interpreted() const
@ kConstructInvokeStubFrame
InputLocation * input_locations_
void InitializeInputLocations(Zone *zone, size_t count)
DeoptInfo(Zone *zone, const DeoptFrame top_frame, compiler::FeedbackSource feedback_to_update)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
EnsureWritableFastElements(uint64_t bitfield)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
Input & property_array_input()
void DoLoadToRegister(MaglevAssembler *, OutputRegister)
ExternalReference reference() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
DirectHandle< Object > DoReify(LocalIsolate *isolate) const
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
int MaxCallStackArgs() const
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
compiler::SharedFunctionInfoRef shared_function_info() const
void SetValueLocationConstraints()
compiler::FeedbackCellRef feedback_cell() const
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
constexpr Operation operation() const
void SetValueLocationConstraints()
DirectHandle< Object > DoReify(LocalIsolate *isolate) const
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void DoLoadToRegister(MaglevAssembler *, OutputRegister)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
ExternalReference ieee_function_ref() const
Ieee754Function ieee_function_
void SetValueLocationConstraints()
constexpr bool flip() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
int MaxCallStackArgs() const
compiler::FeedbackSource feedback() const
void SetValueLocationConstraints()
compiler::FeedbackSource feedback() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int MaxCallStackArgs() const
FunctionEntryStackCheck(uint64_t bitfield)
void SetValueLocationConstraints()
int MaxCallStackArgs() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
compiler::AllocatedOperand target() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
compiler::AllocatedOperand source() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int num_parameters_and_registers() const
void VerifyInputs(MaglevGraphLabeller *graph_labeller) const
int MaxCallStackArgs() const
void SetValueLocationConstraints()
int bytecode_offset() const
Input & generator_input()
Input & parameters_and_registers(int i)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
int MaxCallStackArgs() const
IndirectHandle< FeedbackVector > feedback() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
int MaxCallStackArgs() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
compiler::FeedbackSource feedback() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
int MaxCallStackArgs() const
compiler::SharedFunctionInfoRef shared_function_info_
compiler::FeedbackSource feedback() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int MaxCallStackArgs() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
HasInPrototypeChain(uint64_t bitfield, compiler::HeapObjectRef prototype)
compiler::HeapObjectRef prototype()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
interpreter::Register source() const
const interpreter::Register source_
uint32_t stack_slot() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
InitialValue(uint64_t bitfield, interpreter::Register source)
void VerifyInputs(MaglevGraphLabeller *graph_labeller) const
AllocationBlock * allocation_block()
Input & allocation_block_input()
VirtualObject * object() const
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void SetValueLocationConstraints()
constexpr Operation operation() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
DirectHandle< Object > DoReify(LocalIsolate *isolate) const
void DoLoadToRegister(MaglevAssembler *, OutputRegister)
constexpr bool flip() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Int32ToNumber(uint64_t bitfield)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
constexpr bool flip() const
void SetValueLocationConstraints()
void SetValueLocationConstraints()
IntPtrToNumber(uint64_t bitfield)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
const MaglevCompilationUnit & unit() const
int ComputeReturnOffset(interpreter::Register result_location, int result_size) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Jump(uint64_t bitfield, BasicBlockRef *target_refs)
interpreter::Register result_location_
interpreter::Register result_location() const
static bool InReturnValues(interpreter::Register reg, interpreter::Register result_location, int result_size)
bool IsResultRegister(interpreter::Register reg) const
const InterpretedDeoptFrame & GetFrameForExceptionHandler(const ExceptionHandlerInfo *handler_info)
Input & is_little_endian_input()
bool is_little_endian_constant()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
LoadFixedArrayElement(uint64_t bitfield)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
LoadFixedDoubleArrayElement(uint64_t bitfield)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
LoadFloat64(uint64_t bitfield, int offset)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int MaxCallStackArgs() const
compiler::FeedbackSource feedback() const
TypeofMode typeof_mode() const
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
compiler::NameRef name() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
LoadInt32(uint64_t bitfield, int offset)
compiler::NameRef name() const
compiler::FeedbackSource feedback() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int MaxCallStackArgs() const
Input & lookup_start_object()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
compiler::NameRef name() const
int MaxCallStackArgs() const
compiler::FeedbackSource feedback() const
Input & is_little_endian_input()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
bool is_little_endian_constant()
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
LoadTaggedFieldForScriptContextSlot(uint64_t bitfield, const int index)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
static constexpr RegList GetAllocatableRegisters()
void DefineExceptionHandlerAndLazyDeoptPoint(NodeBase *node)
compiler::FeedbackVectorRef feedback() const
int NodeId(const NodeBase *node)
void PrintNodeLabel(std::ostream &os, const NodeBase *node)
static int TemporaryCount(size_t map_count)
void SetValueLocationConstraints()
Input & elements_length_input()
MaybeGrowFastElements(uint64_t bitfield, ElementsKind elements_kind)
ElementsKind elements_kind() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
BasicBlock * predecessor_at(int i) const
uint32_t predecessors_so_far() const
MigrateMapIfNeeded(uint64_t bitfield)
int MaxCallStackArgs() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void set_temporaries_needed(uint8_t value)
constexpr bool Is() const
void CheckCanOverwriteWith(Opcode new_opcode, OpProperties new_properties)
constexpr Input & input(int index)
constexpr int input_count() const
void set_double_temporaries_needed(uint8_t value)
constexpr Opcode opcode() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetUseRequires31BitValue()
bool uses_require_31_bit_value() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void VerifyInputs(MaglevGraphLabeller *graph_labeller) const
BasicBlock * predecessor_at(int i)
MergePointInterpreterFrameState *const merge_state_
UseRepresentationSet same_loop_uses_repr_hint_
UseRepresentationSet uses_repr_hint_
const MergePointInterpreterFrameState * merge_state() const
void set_uses_require_31_bit_value()
bool is_unmerged_loop_phi() const
void RecordUseReprHint(UseRepresentation repr)
void SetValueLocationConstraints()
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
bool ToBoolean(LocalIsolate *local_isolate) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Handle< Object > DoReify(LocalIsolate *isolate) const
void DoLoadToRegister(MaglevAssembler *, OutputRegister)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
int MaxCallStackArgs() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
compiler::FeedbackSource feedback() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
int MaxCallStackArgs() const
compiler::FeedbackSource feedback() const
compiler::NameRef name() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void DoLoadToRegister(MaglevAssembler *, OutputRegister)
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Tagged< Smi > value() const
void SetValueLocationConstraints()
const Tagged< Smi > value_
DirectHandle< Object > DoReify(LocalIsolate *isolate) const
Input & is_little_endian_input()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
bool is_little_endian_constant()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
StoreFixedArrayElementNoWriteBarrier(uint64_t bitfield)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
StoreFixedArrayElementWithWriteBarrier(uint64_t bitfield)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
StoreFixedDoubleArrayElement(uint64_t bitfield)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
StoreFloat64(uint64_t bitfield, int offset)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
compiler::FeedbackSource feedback() const
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void SetValueLocationConstraints()
int MaxCallStackArgs() const
compiler::NameRef name() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
int MaxCallStackArgs() const
compiler::FeedbackSource feedback() const
void SetValueLocationConstraints()
StoreInt32(uint64_t bitfield, int offset)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
const compiler::MapRef map_
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int MaxCallStackArgs() const
void SetValueLocationConstraints()
int MaxCallStackArgs() const
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Input & new_value_input()
void SetValueLocationConstraints()
StoreScriptContextSlotWithWriteBarrier(uint64_t bitfield, int index)
static constexpr int kValueIndex
void VerifyInputs(MaglevGraphLabeller *graph_labeller) const
static constexpr int kObjectIndex
void GenerateCode(MaglevAssembler *, const ProcessingState &)
StoreTaggedFieldNoWriteBarrier(uint64_t bitfield, int offset, StoreTaggedMode store_mode)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
StoreTaggedFieldWithWriteBarrier(uint64_t bitfield, int offset, StoreTaggedMode store_mode)
int MaxCallStackArgs() const
IndirectPointerTag tag() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
StoreTrustedPointerFieldWithWriteBarrier(uint64_t bitfield, int offset, IndirectPointerTag tag, StoreTaggedMode store_mode)
int MaxCallStackArgs() const
void SetValueLocationConstraints()
int MaxCallStackArgs() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
StringLength(uint64_t bitfield)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
bool has_fallthrough() const
Switch(uint64_t bitfield, int value_base, BasicBlockRef *targets, int size)
void SetValueLocationConstraints()
BasicBlock * fallthrough() const
BasicBlockRef * targets() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
DirectHandle< Object > DoReify(LocalIsolate *isolate) const
void DoLoadToRegister(MaglevAssembler *, OutputRegister)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
Tagged< TaggedIndex > value() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
compiler::FeedbackSource feedback() const
int MaxCallStackArgs() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
interpreter::TestTypeOfFlags::LiteralFlag literal_
void SetValueLocationConstraints()
TestTypeOf(uint64_t bitfield, interpreter::TestTypeOfFlags::LiteralFlag literal)
void SetValueLocationConstraints()
CheckType check_type() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int MaxCallStackArgs() const
ThrowIfNotCallable(uint64_t bitfield)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int MaxCallStackArgs() const
void SetValueLocationConstraints()
ThrowIfNotSuperConstructor(uint64_t bitfield)
int MaxCallStackArgs() const
ThrowReferenceErrorIfHole(uint64_t bitfield, const compiler::NameRef name)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
int MaxCallStackArgs() const
ThrowSuperAlreadyCalledIfNotHole(uint64_t bitfield)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int MaxCallStackArgs() const
ThrowSuperNotCalledIfHole(uint64_t bitfield)
void SetValueLocationConstraints()
CheckType check_type() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
CheckType check_type() const
ToBoolean(uint64_t bitfield, CheckType check_type)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int MaxCallStackArgs() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
ToNumberOrNumeric(uint64_t bitfield, Object::Conversion mode)
Object::Conversion mode() const
void SetValueLocationConstraints()
int MaxCallStackArgs() const
void SetValueLocationConstraints()
CheckType check_type() const
int MaxCallStackArgs() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
int MaxCallStackArgs() const
void SetValueLocationConstraints()
ConversionMode mode() const
void SetValueLocationConstraints()
const ZoneVector< compiler::MapRef > & transition_sources() const
int MaxCallStackArgs() const
ZoneVector< compiler::MapRef > transition_sources_
const compiler::MapRef transition_target() const
const compiler::MapRef transition_target_
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
int MaxCallStackArgs() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
ZoneVector< compiler::MapRef > transition_sources_
const compiler::MapRef transition_target_
void GenerateCode(MaglevAssembler *, const ProcessingState &)
TaggedToFloat64ConversionType conversion_type() const
void SetValueLocationConstraints()
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void DoLoadToRegister(MaglevAssembler *, OutputRegister)
DirectHandle< Object > DoReify(LocalIsolate *isolate) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
const compiler::HeapObjectRef object_
void SetValueLocationConstraints()
int MaxCallStackArgs() const
MaglevCompilationUnit *const unit_
const BytecodeOffset osr_offset_
const int32_t loop_depth_
const MaglevCompilationUnit * unit() const
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
const FeedbackSlot feedback_slot_
void DoLoadToRegister(MaglevAssembler *, OutputRegister)
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
void SetValueLocationConstraints()
DirectHandle< Object > DoReify(LocalIsolate *isolate) const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
Uint32ToNumber(uint64_t bitfield)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
TaggedToFloat64ConversionType conversion_type() const
BasicBlock * target() const
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
void SetValueLocationConstraints()
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void GenerateCode(MaglevAssembler *, const ProcessingState &)
void SetValueLocationConstraints()
const compiler::InstructionOperand & operand() const
compiler::InstructionOperand hint_
compiler::AllocatedOperand spill_slot() const
DirectHandle< Object > Reify(LocalIsolate *isolate) const
void LoadToRegister(MaglevAssembler *, Register)
compiler::InstructionOperand spill_
void DoLoadToRegister(MaglevAssembler *, Register)
void SetConstantLocation()
void SetHint(compiler::InstructionOperand hint)
constexpr bool use_double_register() const
const compiler::InstructionOperand & hint() const
void Print(std::ostream &os, const char *prefix, MaglevGraphLabeller *labeller) const
compiler::MapRef map() const
void PrintParams(std::ostream &, MaglevGraphLabeller *) const
static ZoneLabelRef UnsafeFromLabelPointer(Label *label)
#define ASM_CODE_COMMENT_STRING(asm,...)
const ObjectRef prototype_
DeclarationScope * scope_
enum v8::internal::@1270::DeoptimizableCodeIterator::@67 state_
ZoneVector< RpoNumber > & result
#define DCHECK_REGLIST_EMPTY(...)
#define TURBOLEV_NON_VALUE_NODE_LIST(V)
#define VALUE_NODE_LIST(V)
#define IEEE_754_UNARY_LIST(V)
#define TURBOLEV_VALUE_NODE_LIST(V)
#define GENERIC_OPERATIONS_NODE_LIST(V)
#define CONSTANT_VALUE_NODE_LIST(V)
InstructionOperand source
constexpr unsigned CountPopulation(T value)
void * Allocate(void *address, size_t size, OS::MemoryPermission access)
V8_INLINE Dest bit_cast(Source const &source)
constexpr bool IsInRange(T value, U lower_limit, U higher_limit)
constexpr Vector< T > VectorOf(T *start, size_t size)
V8_INLINE const Operation & Get(const Graph &graph, OpIndex index)
int ExternalArrayElementSize(const ExternalArrayType element_type)
bool AnyMapIsHeapNumber(const ZoneRefSet< Map > &maps)
ApiCallbackExitFrameConstants FC
FunctionCallbackArguments FCA
constexpr bool CanBeStoreToNonEscapedObject()
void DefineAsRegister(Node *node)
constexpr Condition ConditionFor(Operation operation)
DoubleRegister ToDoubleRegister(const compiler::InstructionOperand &operand)
void DefineSameAsFirst(Node *node)
const char * OpcodeToString(Opcode opcode)
Condition ToCondition(AssertCondition cond)
constexpr bool IsConstantNode(Opcode opcode)
Register ToRegister(const compiler::InstructionOperand &operand)
void DefineAsFixed(Node *node, Register reg)
void CheckValueInputIs(const NodeBase *node, int i, ValueRepresentation expected, MaglevGraphLabeller *graph_labeller)
void UseAndClobberRegister(Input &input)
constexpr Condition ConditionForFloat64(Operation operation)
void DefineAsConstant(Node *node)
auto RepeatValue(T val, int count)
void UseAny(Input &input)
static constexpr int kNoVreg
bool FromConstantToBool(LocalIsolate *local_isolate, ValueNode *node)
constexpr bool IsDoubleRepresentation(ValueRepresentation repr)
constexpr Condition UnsignedConditionFor(Operation operation)
constexpr bool IsSimpleFieldStore(Opcode opcode)
TaggedToFloat64ConversionType
ValueRepresentation ToValueRepresentation(MachineType type)
void UseRegister(Input &input)
void UseFixed(Input &input, Register reg)
NodeTMixin< Node, Derived > NodeT
constexpr bool IsElementsArrayWrite(Opcode opcode)
constexpr Register no_reg
const uint32_t kStringEncodingMask
bool TryCast(Tagged< From > value, Tagged< To > *out)
constexpr int kTaggedSize
@ kUnsignedGreaterThanEqual
bool Is(IndirectHandle< U > value)
DwVfpRegister DoubleRegister
const uint32_t kTwoByteStringTag
RegListBase< Register > RegList
constexpr Register kJavaScriptCallTargetRegister
const uint32_t kThinStringTagBit
char const * DeoptimizeReasonToString(DeoptimizeReason reason)
constexpr Register kJavaScriptCallArgCountRegister
@ SLOW_STRING_WRAPPER_ELEMENTS
@ FAST_STRING_WRAPPER_ELEMENTS
const uint32_t kStringTag
V8_INLINE DirectHandle< T > direct_handle(Tagged< T > object, Isolate *isolate)
static constexpr RegList kAllocatableGeneralRegisters
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats TracingFlags::gc_stats track native contexts that are expected to be garbage collected verify heap pointers before and after GC memory reducer runs GC with ReduceMemoryFootprint flag Maximum number of memory reducer GCs scheduled Old gen GC speed is computed directly from gc tracer counters Perform compaction on full GCs based on V8 s default heuristics Perform compaction on every full GC Perform code space compaction when finalizing a full GC with stack Stress GC compaction to flush out bugs with moving objects flush of baseline code when it has not been executed recently Use time base code flushing instead of age Use a progress bar to scan large objects in increments when incremental marking is active force incremental marking for small heaps and run it more often force marking at random points between and force scavenge at random points between and reclaim otherwise unreachable unmodified wrapper objects when possible less compaction in non memory reducing mode use high priority threads for concurrent Marking Test mode only flag It allows an unit test to select evacuation candidates use incremental marking for CppHeap cppheap_concurrent_marking c value for membalancer A special constant to balance between memory and space tradeoff The smaller the more memory it uses enable use of SSE4 instructions if available enable use of AVX VNNI instructions if available enable use of POPCNT instruction if available force all emitted branches to be in long mode(MIPS/PPC only)") DEFINE_BOOL(partial_constant_pool
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
constexpr int kJSArgcReceiverSlots
std::ostream & operator<<(std::ostream &os, AtomicMemoryOrder order)
MemOperand FieldMemOperand(Register object, int offset)
bool IsSmiOrObjectElementsKind(ElementsKind kind)
constexpr int kSystemPointerSize
const char * LanguageMode2String(LanguageMode mode)
bool IsSignedIntTypedArrayElementsKind(ElementsKind kind)
constexpr Register kReturnRegister1
const char * GetAbortReason(AbortReason reason)
constexpr int kTaggedSizeLog2
constexpr Register kReturnRegister0
constexpr bool SmiValuesAre31Bits()
constexpr Register kScratchRegister
Condition NegateCondition(Condition cond)
@ LAST_SPECIAL_RECEIVER_TYPE
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
bool IsFloatTypedArrayElementsKind(ElementsKind kind)
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr bool SmiValuesAre32Bits()
const uint32_t kInternalizedTag
constexpr Register kJavaScriptCallCodeStartRegister
bool IsUnsignedIntTypedArrayElementsKind(ElementsKind kind)
const uint32_t kIsNotInternalizedMask
V8_EXPORT_PRIVATE constexpr int ElementSizeInBytes(MachineRepresentation)
void CallApiFunctionAndReturn(MacroAssembler *masm, bool with_profiling, Register function_address, ExternalReference thunk_ref, Register thunk_arg, int slots_to_drop_on_return, MemOperand *argc_operand, MemOperand return_value_operand)
const uint32_t kIsNotStringMask
constexpr bool IsDoubleElementsKind(ElementsKind kind)
constexpr Register kCArgRegs[]
constexpr int ElementsKindToByteSize(ElementsKind elements_kind)
constexpr Register kJavaScriptCallNewTargetRegister
template const char * string
MemOperand ExitFrameStackSlotOperand(int offset)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
#define OPERATION_LIST(V)
#define READ_ONLY_ROOT_LIST(V)
#define DCHECK_LE(v1, v2)
#define CHECK_LE(lhs, rhs)
#define DCHECK_NOT_NULL(val)
#define CHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
IndirectHandle< FeedbackVector > vector
void ClearUnstableMapsIfAny(const Function &condition)
LoadedPropertyMap loaded_properties
const NodeInfo * TryGetInfoFor(ValueNode *node) const
void ClearUnstableNodeAspects()
#define OFFSET_OF_DATA_START(Type)
#define V8_LIKELY(condition)
#define V8_TARGET_BIG_ENDIAN_BOOL