37#elif V8_TARGET_ARCH_ARM64
39#elif V8_TARGET_ARCH_IA32
41#elif V8_TARGET_ARCH_ARM
43#elif V8_TARGET_ARCH_PPC64
45#elif V8_TARGET_ARCH_S390X
47#elif V8_TARGET_ARCH_RISCV64
49#elif V8_TARGET_ARCH_RISCV32
51#elif V8_TARGET_ARCH_MIPS64
53#elif V8_TARGET_ARCH_LOONG64
56#error Unsupported target architecture.
65#define RCS_BASELINE_SCOPE(rcs) \
67 local_isolate_->is_main_thread() \
68 ? RuntimeCallCounterId::kCompileBaseline##rcs \
69 : RuntimeCallCounterId::kCompileBackgroundBaseline##rcs)
71template <
typename IsolateT>
74 if (
bytes_.empty())
return isolate->factory()->empty_trusted_byte_array();
76 isolate->factory()->NewTrustedByteArray(
static_cast<int>(
bytes_.size()));
86bool Clobbers(Register target,
Tagged<Smi> smi) {
return false; }
87bool Clobbers(Register target, Tagged<TaggedIndex> index) {
return false; }
88bool Clobbers(Register target, int32_t imm) {
return false; }
89bool Clobbers(Register target,
RootIndex index) {
return false; }
90bool Clobbers(Register target, interpreter::Register
reg) {
return false; }
91bool Clobbers(Register target, interpreter::RegisterList list) {
return false; }
95bool MachineTypeMatches(MachineType type, Register
reg) {
return true; }
96bool MachineTypeMatches(MachineType type,
MemOperand reg) {
return true; }
97bool MachineTypeMatches(MachineType type, DirectHandle<HeapObject>
handle) {
98 return type.IsTagged() && !type.IsTaggedSigned();
101 return type.IsTagged() && !type.IsTaggedPointer();
103bool MachineTypeMatches(MachineType type, Tagged<TaggedIndex>
handle) {
106 return type.IsTagged() && !type.IsTaggedPointer();
108bool MachineTypeMatches(MachineType type, int32_t imm) {
114bool MachineTypeMatches(MachineType type,
RootIndex index) {
115 return type.IsTagged() && !type.IsTaggedSigned();
117bool MachineTypeMatches(MachineType type, interpreter::Register
reg) {
118 return type.IsTagged();
121template <
typename Descriptor,
typename... Args>
122struct CheckArgsHelper;
124template <
typename Descriptor>
125struct CheckArgsHelper<Descriptor> {
126 static void Check(BaselineAssembler* masm,
int i) {
127 if (Descriptor::AllowVarArgs()) {
128 CHECK_GE(
i, Descriptor::GetParameterCount());
130 CHECK_EQ(
i, Descriptor::GetParameterCount());
135template <
typename Descriptor,
typename Arg,
typename... Args>
136struct CheckArgsHelper<Descriptor, Arg, Args...> {
137 static void Check(BaselineAssembler* masm,
int i, Arg arg, Args...
args) {
138 if (
i >= Descriptor::GetParameterCount()) {
139 CHECK(Descriptor::AllowVarArgs());
142 CHECK(MachineTypeMatches(Descriptor().GetParameterType(
i), arg));
143 CheckArgsHelper<Descriptor, Args...>::Check(masm,
i + 1,
args...);
147template <
typename Descriptor,
typename... Args>
148struct CheckArgsHelper<Descriptor, interpreter::RegisterList, Args...> {
149 static void Check(BaselineAssembler* masm,
int i,
150 interpreter::RegisterList list, Args...
args) {
151 for (
int reg_index = 0; reg_index < list.register_count();
153 if (
i >= Descriptor::GetParameterCount()) {
154 CHECK(Descriptor::AllowVarArgs());
157 CHECK(MachineTypeMatches(Descriptor().GetParameterType(
i),
160 CheckArgsHelper<Descriptor, Args...>::Check(masm,
i,
args...);
164template <
typename Descriptor,
typename... Args>
166 CheckArgsHelper<Descriptor, Args...>::Check(masm, 0,
args...);
170template <
typename Arg,
typename... Args>
172 DCHECK(!Clobbers(target, arg));
178template <
typename Descriptor,
typename... Args>
181template <
typename... Args>
186template <
typename Descriptor,
int ArgIndex,
bool kIsRegister,
typename... Args>
189template <
typename Descriptor,
int ArgIndex,
bool kIsRegister>
193 static_assert(ArgIndex == Descriptor::GetRegisterParameterCount());
197template <
typename Descriptor,
int ArgIndex,
typename Arg,
typename... Args>
200 static_assert(ArgIndex < Descriptor::GetRegisterParameterCount());
201 Register target = Descriptor::GetRegisterParameter(ArgIndex);
203 masm->
Move(target, arg);
206 Descriptor::GetRegisterParameterCount()),
207 Args...>::Set(masm,
args...);
211template <
typename Descriptor,
int ArgIndex>
213 interpreter::RegisterList> {
215 static_assert(ArgIndex < Descriptor::GetRegisterParameterCount());
217 Descriptor::GetRegisterParameterCount());
218 for (
int i = 0; ArgIndex +
i < Descriptor::GetRegisterParameterCount();
220 Register target = Descriptor::GetRegisterParameter(ArgIndex +
i);
226template <
typename Descriptor,
int ArgIndex,
typename Arg,
typename... Args>
237template <
Builtin kBuiltin,
typename... Args>
242 (0 < Descriptor::GetRegisterParameterCount()),
243 Args...>::Set(masm,
args...);
244 if (Descriptor::HasContextParameter()) {
255 options.builtin_call_jump_mode =
256 isolate->is_short_builtin_calls_enabled()
264#ifdef V8_TARGET_ARCH_IA32
265const int kAverageBytecodeToInstructionRatio = 5;
267const int kAverageBytecodeToInstructionRatio = 7;
269std::unique_ptr<AssemblerBuffer> AllocateBuffer(
270 DirectHandle<BytecodeArray> bytecodes) {
285 stats_(local_isolate->runtime_call_stats()),
286 shared_function_info_(shared_function_info),
290 local_isolate->GetMainThreadIsolateUnsafe(), &
zone_,
291 BaselineAssemblerOptions(local_isolate->GetMainThreadIsolateUnsafe()),
311 for (
int i = 0;
i < table.NumberOfRangeEntries(); ++
i) {
358 return bytecode->length() * kAverageBytecodeToInstructionRatio;
362 return iterator().GetRegisterOperand(operand_index);
371 effect_state_.CheckEffect();
379 effect_state_.CheckEffect();
382 std::tie(reg0, reg1) =
iterator().GetRegisterPairOperand(operand_index);
386template <
typename Type>
392 return iterator().GetConstantAtIndexAsSmi(operand_index);
394template <
typename Type>
399 return iterator().GetUnsignedImmediateOperand(operand_index);
402 return iterator().GetImmediateOperand(operand_index);
405 return iterator().GetIndexOperand(operand_index);
408 return iterator().GetFlag8Operand(operand_index);
411 return iterator().GetFlag16Operand(operand_index);
414 return iterator().GetRegisterCountOperand(operand_index);
439 return __ FeedbackVectorOperand();
444 __ Move(output,
__ FeedbackVectorOperand());
449 __ LoadTaggedField(output, output,
450 FeedbackVector::kClosureFeedbackCellArrayOffset);
455 Label done, set_true;
457 __ LoadRoot(output, RootIndex::kFalseValue);
460 __ LoadRoot(output, RootIndex::kTrueValue);
469 switch (
iterator().current_bytecode()) {
470 case interpreter::Bytecode::kJumpLoop:
481 effect_state_.clear();
491 std::ostringstream str;
498#ifdef V8_TRACE_UNOPTIMIZED
499 TraceBytecode(Runtime::kTraceUnoptimizedBytecodeEntry);
506 std::optional<EnsureAccumulatorPreservedScope> accumulator_preserved_scope;
515 accumulator_preserved_scope.emplace(&
basm_);
520#define BYTECODE_CASE(name, ...) \
521 case interpreter::Bytecode::k##name: \
529#ifdef V8_TRACE_UNOPTIMIZED
530 TraceBytecode(Runtime::kTraceUnoptimizedBytecodeExit);
537 __ RecordComment(
" -- Verify frame size");
540 __ RecordComment(
" -- Verify feedback vector");
544 __ Move(scratch,
__ FeedbackVectorOperand());
546 __ JumpIfSmi(scratch, &is_smi);
547 __ JumpIfObjectTypeFast(
kEqual, scratch, FEEDBACK_VECTOR_TYPE, &is_ok);
549 __ masm()->Abort(AbortReason::kExpectedFeedbackVector);
557#ifdef V8_TRACE_UNOPTIMIZED
559 if (!
v8_flags.trace_baseline_exec)
return;
561 function_id == Runtime::kTraceUnoptimizedBytecodeEntry
562 ?
"Trace bytecode entry"
563 :
"Trace bytecode exit");
572#define DECLARE_VISITOR(name, ...) void Visit##name();
574#undef DECLARE_VISITOR
576#define DECLARE_VISITOR(name, ...) \
577 void VisitIntrinsic##name(interpreter::RegisterList args);
579#undef DECLARE_VISITOR
586 __ AddToInterruptBudgetAndJumpIfNotExceeded(weight, skip_interrupt_label);
590 ? Runtime::kBytecodeBudgetInterruptWithStackCheck_Sparkplug
591 : Runtime::kBytecodeBudgetInterrupt_Sparkplug,
592 __ FunctionOperand());
614 int target_offset =
iterator().GetJumpTargetOffset();
618#if defined(DEBUG) || defined(V8_ENABLE_CET_SHADOW_STACK)
623constexpr static bool BuiltinMayDeopt(
Builtin id) {
625 case Builtin::kSuspendGeneratorBaseline:
626 case Builtin::kBaselineOutOfLinePrologue:
627 case Builtin::kIncBlockCounter:
628 case Builtin::kToObject:
629 case Builtin::kStoreScriptContextSlotBaseline:
630 case Builtin::kStoreCurrentScriptContextSlotBaseline:
632 case Builtin::kFindNonDefaultConstructorOrConstruct:
640template <
Builtin kBuiltin,
typename... Args>
643 effect_state_.CheckEffect();
644 if (BuiltinMayDeopt(kBuiltin)) {
645 effect_state_.MayDeopt();
651#ifdef V8_ENABLE_CET_SHADOW_STACK
652 if (BuiltinMayDeopt(kBuiltin)) {
653 __ MaybeEmitPlaceHolderForDeopt();
658template <
Builtin kBuiltin,
typename... Args>
661 effect_state_.CheckEffect();
667template <
typename... Args>
670 effect_state_.CheckEffect();
671 effect_state_.MayDeopt();
674 int nargs =
__ Push(
args...);
676#ifdef V8_ENABLE_CET_SHADOW_STACK
677 __ MaybeEmitPlaceHolderForDeopt();
694void BaselineCompiler::VisitLdaZero() {
698void BaselineCompiler::VisitLdaSmi() {
703void BaselineCompiler::VisitLdaUndefined() {
707void BaselineCompiler::VisitLdaNull() {
711void BaselineCompiler::VisitLdaTheHole() {
715void BaselineCompiler::VisitLdaTrue() {
719void BaselineCompiler::VisitLdaFalse() {
723void BaselineCompiler::VisitLdaConstant() {
727void BaselineCompiler::VisitLdaGlobal() {
732void BaselineCompiler::VisitLdaGlobalInsideTypeof() {
738void BaselineCompiler::VisitStaGlobal() {
745void BaselineCompiler::VisitPushContext() {
746 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
747 Register context = scratch_scope.AcquireScratch();
748 __ LoadContext(context);
753void BaselineCompiler::VisitPopContext() {
754 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
755 Register context = scratch_scope.AcquireScratch();
757 __ StoreContext(context);
760void BaselineCompiler::VisitLdaContextSlot() {
761 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
762 Register context = scratch_scope.AcquireScratch();
764 uint32_t index =
Index(1);
765 uint32_t depth =
Uint(2);
766 __ LdaContextSlot(context, index, depth);
769void BaselineCompiler::VisitLdaScriptContextSlot() {
770 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
771 Register context = scratch_scope.AcquireScratch();
774 uint32_t index =
Index(1);
775 uint32_t depth =
Uint(2);
776 __ LdaContextSlot(context, index, depth,
788void BaselineCompiler::VisitLdaImmutableContextSlot() { VisitLdaContextSlot(); }
790void BaselineCompiler::VisitLdaCurrentContextSlot() {
791 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
792 Register context = scratch_scope.AcquireScratch();
793 __ LoadContext(context);
798void BaselineCompiler::VisitLdaCurrentScriptContextSlot() {
799 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
800 Register context = scratch_scope.AcquireScratch();
802 uint32_t index =
Index(0);
803 __ LoadContext(context);
816void BaselineCompiler::VisitLdaImmutableCurrentContextSlot() {
817 VisitLdaCurrentContextSlot();
820void BaselineCompiler::VisitStaContextSlot() {
826 uint32_t index =
Index(1);
827 uint32_t depth =
Uint(2);
828 __ StaContextSlot(context, value, index, depth);
831void BaselineCompiler::VisitStaCurrentContextSlot() {
836 __ LoadContext(context);
837 __ StoreTaggedFieldWithWriteBarrier(
841void BaselineCompiler::VisitStaScriptContextSlot() {
847 SaveAccumulatorScope accumulator_scope(
this, &
basm_);
855void BaselineCompiler::VisitStaCurrentScriptContextSlot() {
858 SaveAccumulatorScope accumulator_scope(
this, &
basm_);
865void BaselineCompiler::VisitLdaLookupSlot() {
869void BaselineCompiler::VisitLdaLookupContextSlot() {
874void BaselineCompiler::VisitLdaLookupScriptContextSlot() {
879void BaselineCompiler::VisitLdaLookupGlobalSlot() {
884void BaselineCompiler::VisitLdaLookupSlotInsideTypeof() {
888void BaselineCompiler::VisitLdaLookupContextSlotInsideTypeof() {
893void BaselineCompiler::VisitLdaLookupScriptContextSlotInsideTypeof() {
898void BaselineCompiler::VisitLdaLookupGlobalSlotInsideTypeof() {
903void BaselineCompiler::VisitStaLookupSlot() {
904 uint32_t flags =
Flag8(1);
907 function_id = Runtime::kStoreLookupSlot_Strict;
910 function_id = Runtime::kStoreLookupSlot_SloppyHoisting;
912 function_id = Runtime::kStoreLookupSlot_Sloppy;
918void BaselineCompiler::VisitLdar() {
922void BaselineCompiler::VisitStar() {
926#define SHORT_STAR_VISITOR(Name, ...) \
927 void BaselineCompiler::Visit##Name() { \
929 interpreter::Register::FromShortStar(interpreter::Bytecode::k##Name), \
930 kInterpreterAccumulatorRegister); \
933#undef SHORT_STAR_VISITOR
935void BaselineCompiler::VisitMov() {
936 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
937 Register scratch = scratch_scope.AcquireScratch();
942void BaselineCompiler::VisitGetNamedProperty() {
948void BaselineCompiler::VisitGetNamedPropertyFromSuper() {
950 LoadWithReceiverAndVectorDescriptor::LookupStartObjectRegister(),
955 LoadWithReceiverAndVectorDescriptor::
956 LookupStartObjectRegister(),
961void BaselineCompiler::VisitGetKeyedProperty() {
968void BaselineCompiler::VisitGetEnumeratedKeyedProperty() {
978void BaselineCompiler::VisitLdaModuleVariable() {
979 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
980 Register scratch = scratch_scope.AcquireScratch();
981 __ LoadContext(scratch);
982 int cell_index =
Int(0);
984 __ LdaModuleVariable(scratch, cell_index, depth);
987void BaselineCompiler::VisitStaModuleVariable() {
988 int cell_index =
Int(0);
993 AbortReason::kUnsupportedModuleOperation)));
1000 __ LoadContext(scratch);
1001 int depth =
Uint(1);
1002 __ StaModuleVariable(scratch, value, cell_index, depth);
1005void BaselineCompiler::VisitSetNamedProperty() {
1018void BaselineCompiler::VisitDefineNamedOwnProperty() {
1026void BaselineCompiler::VisitSetKeyedProperty() {
1039void BaselineCompiler::VisitDefineKeyedOwnProperty() {
1048void BaselineCompiler::VisitStaInArrayLiteral() {
1056void BaselineCompiler::VisitDefineKeyedOwnPropertyInLiteral() {
1061 CallRuntime(Runtime::kDefineKeyedOwnPropertyInLiteral,
1070void BaselineCompiler::VisitAdd() {
1075void BaselineCompiler::VisitSub() {
1080void BaselineCompiler::VisitMul() {
1085void BaselineCompiler::VisitDiv() {
1090void BaselineCompiler::VisitMod() {
1095void BaselineCompiler::VisitExp() {
1100void BaselineCompiler::VisitBitwiseOr() {
1105void BaselineCompiler::VisitBitwiseXor() {
1110void BaselineCompiler::VisitBitwiseAnd() {
1115void BaselineCompiler::VisitShiftLeft() {
1120void BaselineCompiler::VisitShiftRight() {
1125void BaselineCompiler::VisitShiftRightLogical() {
1130void BaselineCompiler::VisitAddSmi() {
1135void BaselineCompiler::VisitSubSmi() {
1140void BaselineCompiler::VisitMulSmi() {
1145void BaselineCompiler::VisitDivSmi() {
1150void BaselineCompiler::VisitModSmi() {
1155void BaselineCompiler::VisitExpSmi() {
1160void BaselineCompiler::VisitBitwiseOrSmi() {
1165void BaselineCompiler::VisitBitwiseXorSmi() {
1170void BaselineCompiler::VisitBitwiseAndSmi() {
1175void BaselineCompiler::VisitShiftLeftSmi() {
1180void BaselineCompiler::VisitShiftRightSmi() {
1185void BaselineCompiler::VisitShiftRightLogicalSmi() {
1190void BaselineCompiler::VisitInc() {
1195void BaselineCompiler::VisitDec() {
1200void BaselineCompiler::VisitNegate() {
1205void BaselineCompiler::VisitBitwiseNot() {
1210void BaselineCompiler::VisitToBooleanLogicalNot() {
1217void BaselineCompiler::VisitLogicalNot() {
1221 RootIndex::kFalseValue, if_true,
1226void BaselineCompiler::VisitTypeOf() {
1231void BaselineCompiler::VisitDeletePropertyStrict() {
1232 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
1233 Register scratch = scratch_scope.AcquireScratch();
1239void BaselineCompiler::VisitDeletePropertySloppy() {
1240 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
1241 Register scratch = scratch_scope.AcquireScratch();
1247void BaselineCompiler::VisitGetSuperConstructor() {
1248 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
1249 Register prototype = scratch_scope.AcquireScratch();
1254void BaselineCompiler::VisitFindNonDefaultConstructorOrConstruct() {
1255 SaveAccumulatorScope accumulator_scope(
this, &
basm_);
1262constexpr Builtin ConvertReceiverModeToCompactBuiltin(
1266 return Builtin::kCall_ReceiverIsAny_Baseline_Compact;
1268 return Builtin::kCall_ReceiverIsNullOrUndefined_Baseline_Compact;
1270 return Builtin::kCall_ReceiverIsNotNullOrUndefined_Baseline_Compact;
1276 return Builtin::kCall_ReceiverIsAny_Baseline;
1278 return Builtin::kCall_ReceiverIsNullOrUndefined_Baseline;
1280 return Builtin::kCall_ReceiverIsNotNullOrUndefined_Baseline;
1304void BaselineCompiler::VisitCallAnyReceiver() {
1306 uint32_t arg_count =
args.register_count();
1310void BaselineCompiler::VisitCallProperty() {
1312 uint32_t arg_count =
args.register_count();
1317void BaselineCompiler::VisitCallProperty0() {
1322void BaselineCompiler::VisitCallProperty1() {
1327void BaselineCompiler::VisitCallProperty2() {
1333void BaselineCompiler::VisitCallUndefinedReceiver() {
1334 interpreter::RegisterList
args =
iterator().GetRegisterListOperand(1);
1337 Index(3), arg_count, RootIndex::kUndefinedValue,
args);
1340void BaselineCompiler::VisitCallUndefinedReceiver0() {
1345void BaselineCompiler::VisitCallUndefinedReceiver1() {
1351void BaselineCompiler::VisitCallUndefinedReceiver2() {
1357void BaselineCompiler::VisitCallWithSpread() {
1358 interpreter::RegisterList
args =
iterator().GetRegisterListOperand(1);
1361 interpreter::Register spread_register =
args.last_register();
1364 uint32_t arg_count =
args.register_count();
1374void BaselineCompiler::VisitCallRuntime() {
1376 iterator().GetRegisterListOperand(1));
1379void BaselineCompiler::VisitCallRuntimeForPair() {
1380 auto builtin =
iterator().GetRuntimeIdOperand(0);
1382 case Runtime::kLoadLookupSlotForCall: {
1385 auto in =
iterator().GetRegisterListOperand(1);
1386 auto out =
iterator().GetRegisterPairOperand(3);
1387 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
1388 Register out_reg = scratch_scope.AcquireScratch();
1389 __ RegisterFrameAddress(out.first, out_reg);
1391 CallRuntime(Runtime::kLoadLookupSlotForCall_Baseline, in.first_register(),
1400void BaselineCompiler::VisitCallJSRuntime() {
1401 interpreter::RegisterList
args =
iterator().GetRegisterListOperand(1);
1407 iterator().GetNativeContextIndexOperand(0));
1411 RootIndex::kUndefinedValue,
1415void BaselineCompiler::VisitInvokeIntrinsic() {
1417 interpreter::RegisterList
args =
iterator().GetRegisterListOperand(1);
1418 switch (intrinsic_id) {
1419#define CASE(Name, ...) \
1420 case Runtime::kInline##Name: \
1421 VisitIntrinsic##Name(args); \
1431void BaselineCompiler::VisitIntrinsicCopyDataProperties(
1432 interpreter::RegisterList
args) {
1436void BaselineCompiler::
1437 VisitIntrinsicCopyDataPropertiesWithExcludedPropertiesOnStack(
1438 interpreter::RegisterList
args) {
1439 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
1440 Register rscratch = scratch_scope.AcquireScratch();
1446 args[0],
args.register_count() - 1, rscratch);
1449void BaselineCompiler::VisitIntrinsicCreateIterResultObject(
1450 interpreter::RegisterList
args) {
1454void BaselineCompiler::VisitIntrinsicCreateAsyncFromSyncIterator(
1455 interpreter::RegisterList
args) {
1459void BaselineCompiler::VisitIntrinsicCreateJSGeneratorObject(
1460 interpreter::RegisterList
args) {
1464void BaselineCompiler::VisitIntrinsicGeneratorGetResumeMode(
1465 interpreter::RegisterList
args) {
1469 JSGeneratorObject::kResumeModeOffset);
1472void BaselineCompiler::VisitIntrinsicGeneratorClose(
1473 interpreter::RegisterList
args) {
1476 JSGeneratorObject::kContinuationOffset,
1481void BaselineCompiler::VisitIntrinsicGetImportMetaObject(
1482 interpreter::RegisterList
args) {
1486void BaselineCompiler::VisitIntrinsicAsyncFunctionAwait(
1487 interpreter::RegisterList
args) {
1491void BaselineCompiler::VisitIntrinsicAsyncFunctionEnter(
1492 interpreter::RegisterList
args) {
1496void BaselineCompiler::VisitIntrinsicAsyncFunctionReject(
1497 interpreter::RegisterList
args) {
1501void BaselineCompiler::VisitIntrinsicAsyncFunctionResolve(
1502 interpreter::RegisterList
args) {
1506void BaselineCompiler::VisitIntrinsicAsyncGeneratorAwait(
1507 interpreter::RegisterList
args) {
1511void BaselineCompiler::VisitIntrinsicAsyncGeneratorReject(
1512 interpreter::RegisterList
args) {
1516void BaselineCompiler::VisitIntrinsicAsyncGeneratorResolve(
1517 interpreter::RegisterList
args) {
1521void BaselineCompiler::VisitIntrinsicAsyncGeneratorYieldWithAwait(
1522 interpreter::RegisterList
args) {
1526void BaselineCompiler::VisitConstruct() {
1527 interpreter::RegisterList
args =
iterator().GetRegisterListOperand(1);
1534 RootIndex::kUndefinedValue,
1538void BaselineCompiler::VisitConstructWithSpread() {
1539 interpreter::RegisterList
args =
iterator().GetRegisterListOperand(1);
1542 interpreter::Register spread_register =
args.last_register();
1548 CallInterfaceDescriptorFor<Builtin::kConstructWithSpread_Baseline>::type;
1550 Descriptor::GetRegisterParameter(Descriptor::kNewTarget);
1559 RootIndex::kUndefinedValue,
1563void BaselineCompiler::VisitConstructForwardAllArgs() {
1564 using Descriptor = CallInterfaceDescriptorFor<
1565 Builtin::kConstructForwardAllArgs_Baseline>
::type;
1567 Descriptor::GetRegisterParameter(Descriptor::kNewTarget);
1576void BaselineCompiler::VisitTestEqual() {
1581void BaselineCompiler::VisitTestEqualStrict() {
1586void BaselineCompiler::VisitTestLessThan() {
1591void BaselineCompiler::VisitTestGreaterThan() {
1596void BaselineCompiler::VisitTestLessThanOrEqual() {
1601void BaselineCompiler::VisitTestGreaterThanOrEqual() {
1606void BaselineCompiler::VisitTestReferenceEqual() {
1615void BaselineCompiler::VisitTestInstanceOf() {
1617 CallInterfaceDescriptorFor<Builtin::kInstanceOf_Baseline>::type;
1618 Register callable = Descriptor::GetRegisterParameter(Descriptor::kRight);
1626void BaselineCompiler::VisitTestIn() {
1633void BaselineCompiler::VisitTestUndetectable() {
1634 Label done, is_smi, not_undetectable;
1639 __ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
1640 __ TestAndBranch(map_bit_field, Map::Bits1::IsUndetectableBit::kMask,
kZero,
1647 __ Bind(¬_undetectable);
1652void BaselineCompiler::VisitTestNull() {
1656 RootIndex::kNullValue, is_true,
1661void BaselineCompiler::VisitTestUndefined() {
1665 RootIndex::kUndefinedValue, is_true,
1670void BaselineCompiler::VisitTestTypeOf() {
1671 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
1677 switch (literal_flag) {
1678 case interpreter::TestTypeOfFlags::LiteralFlag::kNumber: {
1679 Label is_smi, is_heap_number;
1688 __ Bind(&is_heap_number);
1692 case interpreter::TestTypeOfFlags::LiteralFlag::kString: {
1693 Label is_smi, bad_instance_type;
1704 __ Bind(&bad_instance_type);
1708 case interpreter::TestTypeOfFlags::LiteralFlag::kSymbol: {
1709 Label is_smi, bad_instance_type;
1718 __ Bind(&bad_instance_type);
1722 case interpreter::TestTypeOfFlags::LiteralFlag::kBoolean: {
1723 Label is_true, is_false;
1737 case interpreter::TestTypeOfFlags::LiteralFlag::kBigInt: {
1738 Label is_smi, bad_instance_type;
1747 __ Bind(&bad_instance_type);
1751 case interpreter::TestTypeOfFlags::LiteralFlag::kUndefined: {
1752 Label is_smi, is_null, not_undetectable;
1762 __ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
1763 __ TestAndBranch(map_bit_field, Map::Bits1::IsUndetectableBit::kMask,
1771 __ Bind(¬_undetectable);
1775 case interpreter::TestTypeOfFlags::LiteralFlag::kFunction: {
1776 Label is_smi, not_callable, undetectable;
1782 __ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
1783 __ TestAndBranch(map_bit_field, Map::Bits1::IsCallableBit::kMask,
kZero,
1785 __ TestAndBranch(map_bit_field, Map::Bits1::IsUndetectableBit::kMask,
1792 __ Bind(¬_callable);
1793 __ Bind(&undetectable);
1797 case interpreter::TestTypeOfFlags::LiteralFlag::kObject: {
1798 Label is_smi, is_null, bad_instance_type, undetectable_or_callable;
1806 static_assert(LAST_JS_RECEIVER_TYPE ==
LAST_TYPE);
1807 Register map = scratch_scope.AcquireScratch();
1809 FIRST_JS_RECEIVER_TYPE, map, &bad_instance_type,
1814 __ LoadWord8Field(map_bit_field, map, Map::kBitFieldOffset);
1815 __ TestAndBranch(map_bit_field,
1816 Map::Bits1::IsUndetectableBit::kMask |
1817 Map::Bits1::IsCallableBit::kMask,
1825 __ Bind(&bad_instance_type);
1826 __ Bind(&undetectable_or_callable);
1830 case interpreter::TestTypeOfFlags::LiteralFlag::kOther:
1837void BaselineCompiler::VisitToName() {
1841void BaselineCompiler::VisitToNumber() {
1846void BaselineCompiler::VisitToNumeric() {
1851void BaselineCompiler::VisitToObject() {
1852 SaveAccumulatorScope save_accumulator(
this, &
basm_);
1857void BaselineCompiler::VisitToString() {
1861void BaselineCompiler::VisitToBoolean() {
1865void BaselineCompiler::VisitCreateRegExpLiteral() {
1873void BaselineCompiler::VisitCreateArrayLiteral() {
1874 uint32_t flags =
Flag8(2);
1893void BaselineCompiler::VisitCreateArrayFromIterable() {
1898void BaselineCompiler::VisitCreateEmptyArrayLiteral() {
1903void BaselineCompiler::VisitCreateObjectLiteral() {
1904 uint32_t flags =
Flag8(2);
1923void BaselineCompiler::VisitCreateEmptyObjectLiteral() {
1927void BaselineCompiler::VisitCloneObject() {
1928 uint32_t flags =
Flag8(1);
1937void BaselineCompiler::VisitGetTemplateObject() {
1938 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
1946void BaselineCompiler::VisitCreateClosure() {
1948 FastNewClosureBaselineDescriptor::GetRegisterParameter(
1949 FastNewClosureBaselineDescriptor::kFeedbackCell);
1951 __ LoadFixedArrayElement(feedback_cell, feedback_cell,
Index(1));
1953 uint32_t flags =
Flag8(2);
1960 ? Runtime::kNewClosure_Tenured
1961 : Runtime::kNewClosure;
1966void BaselineCompiler::VisitCreateBlockContext() {
1970void BaselineCompiler::VisitCreateCatchContext() {
1976void BaselineCompiler::VisitCreateFunctionContext() {
1978 uint32_t slot_count =
Uint(1);
1984void BaselineCompiler::VisitCreateEvalContext() {
1986 uint32_t slot_count =
Uint(1);
1987 if (slot_count <
static_cast<uint32_t
>(
1996void BaselineCompiler::VisitCreateWithContext() {
2002void BaselineCompiler::VisitCreateMappedArguments() {
2004 CallRuntime(Runtime::kNewSloppyArguments,
__ FunctionOperand());
2010void BaselineCompiler::VisitCreateUnmappedArguments() {
2014void BaselineCompiler::VisitCreateRestParameter() {
2018void BaselineCompiler::VisitJumpLoop() {
2020 Label osr_armed, osr_not_armed;
2021 using D = OnStackReplacementDescriptor;
2024 const int loop_depth =
iterator().GetImmediateOperand(1);
2027 BaselineAssembler::ScratchRegisterScope temps(&
basm_);
2028 feedback_vector = temps.AcquireScratch();
2029 osr_state = temps.AcquireScratch();
2031 __ LoadWord8Field(osr_state, feedback_vector,
2032 FeedbackVector::kOsrStateOffset);
2033 static_assert(FeedbackVector::MaybeHasMaglevOsrCodeBit::encode(
true) >
2035 static_assert(FeedbackVector::MaybeHasTurbofanOsrCodeBit::encode(
true) >
2041 __ Bind(&osr_not_armed);
2044 int weight =
iterator().GetRelativeJumpTargetOffset() -
2045 iterator().current_bytecode_size_without_prefix();
2057 effect_state_.safe_to_skip =
true;
2061 __ Bind(&osr_armed);
2062 Register maybe_target_code = D::MaybeTargetCodeRegister();
2065 BaselineAssembler::ScratchRegisterScope temps(&
basm_);
2066 Register scratch0 = temps.AcquireScratch();
2067 Register scratch1 = temps.AcquireScratch();
2071 __ TryLoadOptimizedOsrCode(maybe_target_code, scratch0,
2072 iterator().GetSlotOperand(2), &osr,
2074 __ DecodeField<FeedbackVector::OsrUrgencyBits>(scratch1);
2082 __ Push(maybe_target_code);
2086 Register expected_param_count = D::ExpectedParameterCountRegister();
2088 __ Pop(maybe_target_code);
2090 expected_param_count);
2091 __ AddToInterruptBudgetAndJumpIfNotExceeded(weight,
nullptr);
2095 effect_state_.safe_to_skip =
false;
2103void BaselineCompiler::VisitJumpConstant() { VisitJump(); }
2105void BaselineCompiler::VisitJumpIfNullConstant() { VisitJumpIfNull(); }
2107void BaselineCompiler::VisitJumpIfNotNullConstant() { VisitJumpIfNotNull(); }
2109void BaselineCompiler::VisitJumpIfUndefinedConstant() {
2110 VisitJumpIfUndefined();
2113void BaselineCompiler::VisitJumpIfNotUndefinedConstant() {
2114 VisitJumpIfNotUndefined();
2117void BaselineCompiler::VisitJumpIfUndefinedOrNullConstant() {
2118 VisitJumpIfUndefinedOrNull();
2121void BaselineCompiler::VisitJumpIfTrueConstant() { VisitJumpIfTrue(); }
2123void BaselineCompiler::VisitJumpIfFalseConstant() { VisitJumpIfFalse(); }
2125void BaselineCompiler::VisitJumpIfJSReceiverConstant() {
2126 VisitJumpIfJSReceiver();
2129void BaselineCompiler::VisitJumpIfForInDoneConstant() {
2130 VisitJumpIfForInDone();
2133void BaselineCompiler::VisitJumpIfToBooleanTrueConstant() {
2134 VisitJumpIfToBooleanTrue();
2137void BaselineCompiler::VisitJumpIfToBooleanFalseConstant() {
2138 VisitJumpIfToBooleanFalse();
2141void BaselineCompiler::VisitJumpIfToBooleanTrue() {
2145 __ Bind(&dont_jump);
2148void BaselineCompiler::VisitJumpIfToBooleanFalse() {
2152 __ Bind(&dont_jump);
2155void BaselineCompiler::VisitJumpIfTrue() {
JumpIfRoot(RootIndex::kTrueValue); }
2157void BaselineCompiler::VisitJumpIfFalse() {
2161void BaselineCompiler::VisitJumpIfNull() {
JumpIfRoot(RootIndex::kNullValue); }
2163void BaselineCompiler::VisitJumpIfNotNull() {
2167void BaselineCompiler::VisitJumpIfUndefined() {
2171void BaselineCompiler::VisitJumpIfNotUndefined() {
2175void BaselineCompiler::VisitJumpIfUndefinedOrNull() {
2176 Label do_jump, dont_jump;
2183 __ Bind(&dont_jump);
2186void BaselineCompiler::VisitJumpIfJSReceiver() {
2187 Label is_smi, dont_jump;
2190#if V8_STATIC_ROOTS_BOOL
2195 FIRST_JS_RECEIVER_TYPE, &dont_jump);
2200 __ Bind(&dont_jump);
2203void BaselineCompiler::VisitJumpIfForInDone() {
2204 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
2205 Register index = scratch_scope.AcquireScratch();
2211void BaselineCompiler::VisitSwitchOnSmiNoFeedback() {
2212 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
2213 interpreter::JumpTableTargetOffsets offsets =
2214 iterator().GetJumpTableTargetOffsets();
2216 if (offsets.size() == 0)
return;
2218 int case_value_base = (*offsets.begin()).case_value;
2220 std::unique_ptr<Label*[]> labels = std::make_unique<Label*[]>(offsets.size());
2221 for (interpreter::JumpTableTargetOffset
offset : offsets) {
2222 labels[
offset.case_value - case_value_base] =
2225 Register case_value = scratch_scope.AcquireScratch();
2227 __ Switch(case_value, case_value_base, labels.get(), offsets.size());
2230void BaselineCompiler::VisitForInEnumerate() {
2234void BaselineCompiler::VisitForInPrepare() {
2238 interpreter::Register first =
iterator().GetRegisterOperand(0);
2239 interpreter::Register
second(first.index() + 1);
2240 interpreter::Register third(first.index() + 2);
2245void BaselineCompiler::VisitForInNext() {
2246 interpreter::Register cache_type, cache_array;
2247 std::tie(cache_type, cache_array) =
iterator().GetRegisterPairOperand(2);
2256void BaselineCompiler::VisitForInStep() {
2260void BaselineCompiler::VisitSetPendingMessage() {
2261 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
2262 Register pending_message = scratch_scope.AcquireScratch();
2263 __ Move(pending_message,
2265 Register tmp = scratch_scope.AcquireScratch();
2271void BaselineCompiler::VisitThrow() {
2276void BaselineCompiler::VisitReThrow() {
2281void BaselineCompiler::VisitReturn() {
2283 int profiling_weight =
iterator().current_offset() +
2284 iterator().current_bytecode_size_without_prefix();
2291void BaselineCompiler::VisitThrowReferenceErrorIfHole() {
2301void BaselineCompiler::VisitThrowSuperNotCalledIfHole() {
2311void BaselineCompiler::VisitThrowSuperAlreadyCalledIfNotHole() {
2315 CallRuntime(Runtime::kThrowSuperAlreadyCalledError);
2321void BaselineCompiler::VisitThrowIfNotSuperConstructor() {
2324 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
2327 Register map_bit_field = scratch_scope.AcquireScratch();
2328 __ LoadMap(map_bit_field,
reg);
2329 __ LoadWord8Field(map_bit_field, map_bit_field, Map::kBitFieldOffset);
2330 __ TestAndBranch(map_bit_field, Map::Bits1::IsConstructorBit::kMask,
kNotZero,
2333 CallRuntime(Runtime::kThrowNotSuperConstructor,
reg,
__ FunctionOperand());
2338void BaselineCompiler::VisitSwitchOnGeneratorState() {
2339 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
2343 Register generator_object = scratch_scope.AcquireScratch();
2345 __ JumpIfRoot(generator_object, RootIndex::kUndefinedValue, &fallthrough);
2349 JSGeneratorObject::kContinuationOffset);
2350 __ StoreTaggedSignedField(
2351 generator_object, JSGeneratorObject::kContinuationOffset,
2354 Register context = scratch_scope.AcquireScratch();
2355 __ LoadTaggedField(context, generator_object,
2356 JSGeneratorObject::kContextOffset);
2357 __ StoreContext(context);
2359 interpreter::JumpTableTargetOffsets offsets =
2360 iterator().GetJumpTableTargetOffsets();
2362 if (0 < offsets.size()) {
2363 DCHECK_EQ(0, (*offsets.begin()).case_value);
2365 std::unique_ptr<Label*[]> labels =
2366 std::make_unique<Label*[]>(offsets.size());
2367 for (interpreter::JumpTableTargetOffset
offset : offsets) {
2376 __ Bind(&fallthrough);
2379void BaselineCompiler::VisitSuspendGenerator() {
2381 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
2382 Register generator_object = scratch_scope.AcquireScratch();
2385 SaveAccumulatorScope accumulator_scope(
this, &
basm_);
2387 int bytecode_offset =
2391 static_cast<int>(
Uint(3)),
2400void BaselineCompiler::VisitResumeGenerator() {
2402 BaselineAssembler::ScratchRegisterScope scratch_scope(&
basm_);
2403 Register generator_object = scratch_scope.AcquireScratch();
2410void BaselineCompiler::VisitGetIterator() {
2416void BaselineCompiler::VisitDebugger() {
2420void BaselineCompiler::VisitIncBlockCounter() {
2421 SaveAccumulatorScope accumulator_scope(
this, &
basm_);
2426void BaselineCompiler::VisitAbort() {
2431void BaselineCompiler::VisitWide() {
2436void BaselineCompiler::VisitExtraWide() {
2441void BaselineCompiler::VisitIllegal() {
2445#define DEBUG_BREAK(Name, ...) \
2446 void BaselineCompiler::Visit##Name() { UNREACHABLE(); }
2459 compiler_->effect_state_.accumulator_on_stack =
true;
2468 compiler_->effect_state_.accumulator_on_stack =
false;
2474#undef RCS_BASELINE_SCOPE
#define RCS_BASELINE_SCOPE(rcs)
#define DECLARE_VISITOR(name,...)
#define SHORT_STAR_VISITOR(Name,...)
#define BYTECODE_CASE(name,...)
#define DEBUG_BREAK(Name,...)
#define SHORT_STAR_BYTECODE_LIST(V)
#define DEBUG_BREAK_BYTECODE_LIST(V)
#define BYTECODE_LIST(V, V_TSA)
static constexpr T decode(U value)
static bool EncodeBitField(uint32_t argc, uintptr_t slot, uint32_t *out)
static int MaximumFunctionContextSlots()
static V8_INLINE constexpr int OffsetOfElementAt(int index)
static V8_EXPORT_PRIVATE ExternalReference address_of_pending_message(LocalIsolate *local_isolate)
CodeBuilder & set_bytecode_offset_table(Handle< TrustedByteArray > table)
CodeBuilder & set_interpreter_data(Handle< TrustedObject > interpreter_data)
CodeBuilder & set_parameter_count(uint16_t parameter_count)
V8_WARN_UNUSED_RESULT MaybeHandle< Code > TryBuild()
static constexpr int kMaxOsrUrgency
static constexpr int kHeaderSize
static const int kGeneratorClosed
static const int kGeneratorExecuting
static constexpr Register no_reg()
static constexpr Tagged< Smi > FromEnum(E value)
static constexpr Tagged< Smi > FromInt(int value)
static Tagged< TaggedIndex > FromIntptr(intptr_t value)
static constexpr Register ObjectRegister()
static constexpr Register ValueRegister()
Register AcquireScratch()
void PushReverse(T... vals)
static MemOperand RegisterFrameOperand(interpreter::Register interpreter_register)
void Move(Register output, Register source)
void LoadContext(Register output)
void RegisterFrameAddress(interpreter::Register interpreter_register, Register rscratch)
void CallBuiltin(Args... args)
Tagged< TaggedIndex > IndexAsTagged(int operand_index)
bool IsIndirectJumpTarget(int offset) const
Handle< Type > Constant(int operand_index)
void BuildCall(uint32_t slot, uint32_t arg_count, Args... args)
void LoadFeedbackVector(Register output)
void LoadRegister(Register output, int operand_index)
void JumpIfToBoolean(bool do_jump_if_true, Label *label, Label::Distance distance=Label::kFar)
const interpreter::BytecodeArrayIterator & iterator()
int32_t Int(int operand_index)
void StoreRegister(int operand_index, Register value)
Handle< SharedFunctionInfo > shared_function_info_
Tagged< Smi > IntAsSmi(int operand_index)
BytecodeOffsetTableBuilder bytecode_offset_table_builder_
void VisitSingleBytecode()
Tagged< Smi > Flag8AsSmi(int operand_index)
uint32_t Flag16(int operand_index)
void JumpIfRoot(RootIndex root)
Tagged< Smi > UintAsSmi(int operand_index)
void LoadClosureFeedbackArray(Register output)
BaselineCompiler(LocalIsolate *local_isolate, Handle< SharedFunctionInfo > shared_function_info, Handle< BytecodeArray > bytecode)
uint32_t Uint(int operand_index)
Label * BuildForwardJumpLabel()
void SelectBooleanConstant(Register output, std::function< void(Label *, Label::Distance)> jump_func)
static int EstimateInstructionSize(Tagged< BytecodeArray > bytecode)
interpreter::BytecodeArrayIterator iterator_
uint32_t Index(int operand_index)
uint32_t Flag8(int operand_index)
void StoreRegisterPair(int operand_index, Register val0, Register val1)
bool IsJumpTarget(int offset) const
MaybeHandle< Code > Build()
void PreVisitSingleBytecode()
Label * EnsureLabel(int offset, MarkAsIndirectJumpTarget mark=MarkAsIndirectJumpTarget::kNo)
void JumpIfNotRoot(RootIndex root)
void UpdateInterruptBudgetAndJumpToLabel(int weight, Label *label, Label *skip_interrupt_label, StackCheckBehavior stack_check_behavior)
Handle< BytecodeArray > bytecode_
void CallRuntime(Runtime::FunctionId function, Args... args)
Tagged< Smi > IndexAsSmi(int operand_index)
void LoadConstant(Register output, int operand_index)
Tagged< Smi > Flag16AsSmi(int operand_index)
Tagged< Smi > ConstantSmi(int operand_index)
void TailCallBuiltin(Args... args)
Tagged< TaggedIndex > UintAsTagged(int operand_index)
LocalIsolate * local_isolate_
void MarkIndirectJumpTarget(int offset)
uint32_t RegisterCount(int operand_index)
interpreter::Register RegisterOperand(int operand_index)
MemOperand FeedbackVector()
void AddPosition(size_t pc_offset)
std::vector< uint8_t > bytes_
void Reserve(size_t size)
Handle< TrustedByteArray > ToBytecodeOffsetTable(IsolateT *isolate)
BaselineAssembler * assembler_
SaveAccumulatorScope(BaselineCompiler *compiler, BaselineAssembler *assembler)
static constexpr bool IsSwitch(Bytecode bytecode)
static constexpr bool IsJump(Bytecode bytecode)
static bool WritesOrClobbersAccumulator(Bytecode bytecode)
int register_count() const
#define ASM_CODE_COMMENT_STRING(asm,...)
#define ASM_CODE_COMMENT(asm)
base::Vector< const DirectHandle< Object > > args
DirectHandle< Object > new_target
BytecodeAssembler & assembler_
base::Vector< const RegExpInstruction > bytecode_
#define INTRINSICS_LIST(V)
DirectHandle< JSReceiver > options
MovableLabel continuation
LocalIsolate * local_isolate_
MaglevAssembler *const masm_
constexpr size_t RoundUpToPowerOfTwo(size_t value)
void MoveArgumentsForBuiltin(BaselineAssembler *masm, Args... args)
void CheckSettingDoesntClobber(Register target, Args... args)
void CheckArgs(Args... args)
constexpr BuiltinCallJumpMode kFallbackBuiltinCallJumpModeForBaseline
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
PerThreadAssertScopeDebugOnly< false, HEAP_ALLOCATION_ASSERT > DisallowHeapAllocation
constexpr Register kJavaScriptCallTargetRegister
constexpr Register kInterpreterAccumulatorRegister
constexpr Register kReturnRegister1
constexpr Register kReturnRegister0
std::unique_ptr< AssemblerBuffer > NewAssemblerBuffer(int size)
@ INTERNALIZED_TWO_BYTE_STRING_TYPE
constexpr Register kContextRegister
V8_EXPORT_PRIVATE bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr int JSParameterCount(int param_count_without_receiver)
void MemCopy(void *dest, const void *src, size_t size)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
RegExpCompiler * compiler_
#define DCHECK_LE(v1, v2)
#define CHECK_GE(lhs, rhs)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
static AssemblerOptions Default(Isolate *isolate)
static void Set(BaselineAssembler *masm, Arg arg, Args... args)
static void Set(BaselineAssembler *masm, Arg arg, Args... args)
static void Set(BaselineAssembler *masm)
static void Set(BaselineAssembler *masm, interpreter::RegisterList list)
#define V8_UNLIKELY(condition)