28#if V8_ENABLE_WEBASSEMBLY
54 std::optional<OsrHelper> osr_helper,
55 int start_source_position,
58 size_t max_unoptimized_frame_height,
59 size_t max_pushed_argument_count,
60 const char* debug_name)
61 :
zone_(codegen_zone),
63 frame_access_state_(nullptr),
65 instructions_(instructions),
68 labels_(codegen_zone->AllocateArray<
Label>(
69 instructions->InstructionBlockCount())),
71 start_source_position_(start_source_position),
76 safepoints_(codegen_zone),
78 deoptimization_exits_(codegen_zone),
79 protected_deoptimization_literals_(codegen_zone),
80 deoptimization_literals_(codegen_zone),
81 translations_(codegen_zone),
82 max_unoptimized_frame_height_(max_unoptimized_frame_height),
83 max_pushed_argument_count_(max_pushed_argument_count),
84 caller_registers_saved_(
false),
85 jump_tables_(
nullptr),
87 osr_helper_(std::move(osr_helper)),
91#if V8_ENABLE_WEBASSEMBLY
95 block_starts_(codegen_zone),
96 instr_starts_(codegen_zone),
97 debug_name_(debug_name) {
98 for (
int i = 0;
i < instructions->InstructionBlockCount(); ++
i) {
101 CreateFrameAccessState(frame);
102 CHECK_EQ(info->is_osr(), osr_helper_.has_value());
103 masm_.set_jump_optimization_info(jump_opt);
104 CodeKind code_kind = info->code_kind();
105 if (code_kind == CodeKind::WASM_FUNCTION ||
106 code_kind == CodeKind::WASM_TO_CAPI_FUNCTION ||
107 code_kind == CodeKind::WASM_TO_JS_FUNCTION ||
108 code_kind == CodeKind::JS_TO_WASM_FUNCTION) {
109 masm_.set_abort_hard(
true);
111 masm_.set_builtin(builtin);
115#if V8_ENABLE_WEBASSEMBLY
144 size_t incoming_parameter_count =
146 DCHECK(is_int32(incoming_parameter_count));
147 int32_t optimized_frame_height =
151 int32_t signed_max_unoptimized_frame_height =
157 uint32_t frame_height_delta =
static_cast<uint32_t
>(std::max(
158 signed_max_unoptimized_frame_height - optimized_frame_height, 0));
159 uint32_t max_pushed_argument_bytes =
161 return std::max(frame_height_delta, max_pushed_argument_bytes);
173 Label* jump_deoptimization_entry_label =
177 exit->
pos(), deoptimization_id);
190 jump_deoptimization_entry_label);
207 if (call_descriptor->IsJSFunctionCall()) {
212 }
else if (info->has_bytecode_array()) {
222 if (info->source_positions()) {
230 if (
v8_flags.debug_code && info->called_with_code_start_register()) {
235#ifdef V8_ENABLE_LEAPTIERING
237 if (
v8_flags.debug_code && call_descriptor->IsJSFunctionCall()) {
239 AssembleDispatchHandleRegisterCheck();
243#if V8_ENABLE_WEBASSEMBLY
244 if (info->code_kind() == CodeKind::WASM_TO_JS_FUNCTION ||
245 info->builtin() == Builtin::kWasmToJsWrapperCSA ||
260 if (info->IsOptimizing()) {
261 DCHECK(call_descriptor->IsJSFunctionCall());
269 info->inlined_functions()) {
270 if (!inlined.shared_info.equals(info->shared_info())) {
273 inlined.RegisterInlinedFunctionId(index);
281 if (info->trace_turbo_json()) {
289 if (block->ShouldAlignLoopHeader()) {
291 }
else if (block->ShouldAlignCodeTarget()) {
295 if (info->trace_turbo_json()) {
301 if (
v8_flags.code_comments && !block->omitted_by_jump_threading()) {
302 std::ostringstream buffer;
303 buffer <<
"-- B" << block->rpo_number().ToInt() <<
" start";
304 if (block->IsDeferred()) buffer <<
" (deferred)";
305 if (!block->needs_frame()) buffer <<
" (no frame)";
306 if (block->must_construct_frame()) buffer <<
" (construct frame)";
307 if (block->must_deconstruct_frame()) buffer <<
" (deconstruct frame)";
309 if (block->IsLoopHeader()) {
310 buffer <<
" (loop up to " << block->loop_end().ToInt() <<
")";
312 if (block->loop_header().IsValid()) {
313 buffer <<
" (in loop " << block->loop_header().ToInt() <<
")";
323#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
324 if (block->IsSwitchTarget()) {
329 if (block->must_construct_frame()) {
335 if (call_descriptor->InitializeRootRegister()) {
339#ifdef CAN_USE_RVV_INSTRUCTIONS
364 if (ool->exit()->is_bound())
masm()->
jmp(ool->exit());
381 int last_updated = 0;
389 "lazy deopts are expected to be emitted last");
390 if (a->kind() != b->kind()) {
391 return a->kind() < b->kind();
393 return a->pc_offset() < b->pc_offset();
398#ifdef V8_TARGET_ARCH_PPC64
403 if (exit->emitted())
continue;
413 int trampoline_pc = exit->label()->pos();
415 exit->pc_offset(), trampoline_pc, last_updated,
416 exit->deoptimization_id());
464#ifndef V8_TARGET_ARCH_X64
467 std::pair<int32_t, Label*>*
end) {
495#if V8_ENABLE_WEBASSEMBLY
518#if defined(V8_OS_WIN64)
538 if (
info()->function_context_specializing()) {
574 safepoint.DefineTaggedStackSlot(tagged);
577 int frame_header_offset =
frame()->GetFixedSlotCount();
579 if (operand.IsStackSlot()) {
587 if (index < frame_header_offset)
continue;
588 safepoint.DefineTaggedStackSlot(index);
606 if (block->IsHandler()) {
609 for (
int i = block->code_start(); i < block->code_end(); ++
i) {
618 if (source.IsImmediate() &&
622 if (source.IsRegister() &&
626 if (source.IsStackSlot() &&
636 static constexpr int first_push_compatible_index =
644 if (parallel_move !=
nullptr) {
645 for (
auto move : *parallel_move) {
653 first_push_compatible_index) {
665 first_push_compatible_index) {
668 if (index >=
static_cast<int>(pushes->
size())) {
669 pushes->
resize(index + 1);
671 (*pushes)[
index] = move;
680 size_t push_count_upper_bound = pushes->
size();
681 size_t push_begin = push_count_upper_bound;
683 if (move ==
nullptr)
break;
686 size_t push_count = pushes->
size() - push_begin;
687 std::copy(pushes->
begin() + push_begin,
688 pushes->
begin() + push_begin + push_count, pushes->
begin());
689 pushes->
resize(push_count);
694 if (source->IsConstant()) {
704 if (source->IsAnyRegister()) {
712 DCHECK(source->IsAnyStackSlot());
726 if (source->IsAnyRegister()) {
734 DCHECK(source->IsAnyStackSlot());
750 if (true_rpo == false_rpo) {
754 ->InstructionBlockAt(false_rpo)
755 ->IsLoopHeaderInAssemblyOrder()) {
758 std::swap(true_rpo, false_rpo);
771 if (
info()->trace_turbo_json()) {
774 int first_unused_stack_slot;
782 if (
instr->opcode() == kArchNop && block->successors().empty() &&
783 block->code_end() - block->code_start() == 1) {
793 block->must_deconstruct_frame(),
794 instr != instructions()->InstructionAt(block->last_instruction_index()) ||
796 if (
instr->IsJump() && block->must_deconstruct_frame()) {
799 if (
info()->trace_turbo_json()) {
806 if (
info()->trace_turbo_json()) {
823 if (target.IsValid()) {
841 size_t frame_state_offset =
843 size_t immediate_args_count =
846 instr, frame_state_offset, immediate_args_count);
871#if V8_ENABLE_WEBASSEMBLY
888 if (
instr->IsNop() &&
instr->AreMovesRedundant())
return;
896 if (!source_position.
IsKnown())
return;
898 source_position,
false);
902#if V8_ENABLE_WEBASSEMBLY
903 if (!info->IsWasm())
return;
908 std::ostringstream buffer;
912 if (info->trace_turbo_json() || !
masm()->
isolate() ||
913 masm()->
isolate()->concurrent_recompilation_enabled()) {
914 buffer << source_position;
928 if (
instr->IsTailCall()) {
938#if V8_ENABLE_WEBASSEMBLY
940 if (
code_kind == CodeKind::WASM_FUNCTION) {
941 return StubCallMode::kCallWasmRuntimeStub;
943 if (
code_kind == CodeKind::WASM_TO_CAPI_FUNCTION ||
944 code_kind == CodeKind::WASM_TO_JS_FUNCTION) {
966 info->inlined_functions();
969 isolate,
static_cast<int>(inlined_functions.size()));
970 for (
size_t i = 0;
i < inlined_functions.size(); ++
i) {
971 inl_positions->set(
static_cast<int>(
i), inlined_functions[
i].
position);
973 return inl_positions;
981 if (deopt_count == 0 && !info->is_osr()) {
989 isolate()->main_thread_local_isolate()->factory());
991 data->SetFrameTranslation(*translation_array);
992 data->SetInlinedFunctionCount(
994 data->SetOptimizationId(
Smi::FromInt(info->optimization_id()));
1000 if (info->has_shared_info()) {
1003 data->SetWrappedSharedFunctionInfo(*sfi_wrapper);
1005 data->SetWrappedSharedFunctionInfo(
Smi::zero());
1014 CHECK(!
object.is_null());
1015 protected_literals->set(
i, *
object);
1017 data->SetProtectedLiteralArray(*protected_literals);
1024 CHECK(!
object.is_null());
1025 literals->set(
i, *
object);
1027 data->SetLiteralArray(*literals);
1030 CreateInliningPositions(info,
isolate());
1031 data->SetInliningPositions(*inl_pos);
1033 if (info->is_osr()) {
1044 for (
int i = 0;
i < deopt_count;
i++) {
1048 data->SetBytecodeOffset(
i, deoptimization_exit->
bailout_id());
1049 data->SetTranslationIndex(
1058 data->Verify(info->bytecode_array());
1063#if V8_ENABLE_WEBASSEMBLY
1066 if (deopt_count == 0) {
1074 auto deopt_entries =
1077 for (
int i = 0;
i < deopt_count;
i++) {
1080 DCHECK_EQ(
i, deoptimization_exit->deoptimization_id());
1081 deopt_entries[
i] = {deoptimization_exit->bailout_id(),
1082 deoptimization_exit->translation_id()};
1085 base::Vector<const uint8_t> frame_translations =
1093 wasm::WasmDeoptData data = view.GetDeoptData();
1098 DCHECK_EQ(data.translation_array_size, frame_translations.size());
1099 for (
int i = 0;
i < deopt_count;
i++) {
1101 wasm::WasmDeoptEntry entry = view.GetDeoptEntry(
i);
1102 DCHECK_EQ(exit->bailout_id(), entry.bytecode_offset);
1103 DCHECK_EQ(exit->translation_id(), entry.translation_index);
1105 std::vector<DeoptimizationLiteral> literals =
1106 view.BuildDeoptimizationLiteralArray();
1121#ifndef V8_TARGET_ARCH_X64
1128 const bool needs_frame_state =
1135 i.ToConstant(
instr->InputAt(
instr->InputCount() - 1));
1148 if (needs_frame_state) {
1156 size_t frame_state_offset = 1;
1195 int const state_id =
i.InputInt32(frame_state_offset);
1202 if (desc->IsNestedObject()) {
1204 for (
auto field : *nested) {
1207 }
else if (desc->IsArgumentsElements()) {
1209 }
else if (desc->IsArgumentsLength()) {
1211 }
else if (desc->IsRestLength()) {
1213 }
else if (desc->IsDuplicate()) {
1215 }
else if (desc->IsPlain()) {
1218 }
else if (desc->IsStringConcat()) {
1220 for (
auto field : *nested) {
1224 DCHECK(desc->IsOptimizedOut());
1250 if (!descriptor->
shared_info().ToHandle(&shared_info)) {
1251 if (!
info()->has_shared_info()
1252#
if V8_ENABLE_WEBASSEMBLY
1253 && descriptor->
type() != compiler::FrameStateType::kLiftoffFunction
1263 const int shared_info_id =
1264#if V8_ENABLE_WEBASSEMBLY
1273 const unsigned int height =
1274 static_cast<unsigned int>(descriptor->
GetHeight());
1276 switch (descriptor->
type()) {
1280 int return_offset = 0;
1281 int return_count = 0;
1284 return_count =
static_cast<int>(iter->
instruction()->OutputCount());
1287 bytecode_array_id, height,
1288 return_offset, return_count);
1293 shared_info_id, height,
1294 descriptor->
bytecode_array().ToHandleChecked()->parameter_count());
1307#if V8_ENABLE_WEBASSEMBLY
1308 case FrameStateType::kWasmInlinedIntoJS:
1309 translations_.BeginWasmInlinedIntoJSFrame(bailout_id, shared_info_id,
1312 case FrameStateType::kJSToWasmBuiltinContinuation: {
1313 const JSToWasmFrameStateDescriptor* js_to_wasm_descriptor =
1314 static_cast<const JSToWasmFrameStateDescriptor*
>(descriptor);
1316 bailout_id, shared_info_id, height,
1317 js_to_wasm_descriptor->return_kind());
1320 case FrameStateType::kLiftoffFunction:
1327 bailout_id, shared_info_id, height);
1332 bailout_id, shared_info_id, height);
1346 frame_state_offset++;
1369 if (immediate_args_count != 0) {
1372 instr, frame_state_offset - immediate_args_count - 1);
1373 for (
size_t i = 0;
i < immediate_args_count;
i++) {
1374 immediate_args->emplace_back(ImmediateOperand::cast(imm_iter.Advance()));
1404#if defined(V8_COMPRESS_POINTERS)
1413 switch (type.representation()) {
1449#if defined(V8_COMPRESS_POINTERS)
1459 switch (type.representation()) {
1478 CHECK(op->IsImmediate());
1483#if V8_ENABLE_WEBASSEMBLY
1485 switch (type.representation()) {
1515 switch (constant.type()) {
1525 if (constant.ToInt32() == 0) {
1541 static_cast<double>(
static_cast<uint32_t
>(constant.ToInt32())));
1601 if (
literal.object().equals(info()->closure()) &&
1602 info()->function_context_specializing()) {
1613 size_t immediate_args_count) {
constexpr int kReturnAddressStackSlotCount
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
static constexpr T decode(U value)
static OwnedVector< T > New(size_t size)
V8_INLINE void RecordComment(const char *comment, const SourceLocation &loc=SourceLocation::Current())
int pc_offset_for_safepoint()
void MaybeEmitOutOfLineConstantPool()
void GetCode(LocalIsolate *isolate, CodeDesc *desc, SafepointTableBuilderBase *safepoint_table_builder, int handler_table_offset)
void FinalizeJumpOptimizationInfo()
void AbortedCodeGeneration() override
void RecordDeoptReason(DeoptimizeReason reason, uint32_t node_id, SourcePosition position, int id)
static V8_EXPORT_PRIVATE int GetStackParameterCount(Builtin builtin)
static constexpr bool IsBuiltinId(Builtin builtin)
static constexpr BytecodeOffset None()
constexpr int ToInt() const
static Handle< DeoptimizationData > New(Isolate *isolate, int deopt_entry_count)
static V8_EXPORT_PRIVATE Handle< DeoptimizationData > Empty(Isolate *isolate)
static DeoptimizationLiteral HoleNaN()
static V8_EXPORT_PRIVATE Builtin GetDeoptimizationEntry(DeoptimizeKind kind)
static constexpr int kMaxNumberOfEntries
void GetEhFrame(CodeDesc *desc)
DirectHandle< DeoptimizationLiteralArray > NewDeoptimizationLiteralArray(int length)
Handle< ProtectedFixedArray > NewProtectedFixedArray(int length)
DirectHandle< SharedFunctionInfoWrapper > NewSharedFunctionInfoWrapper(DirectHandle< SharedFunctionInfo > sfi)
CodeBuilder & set_source_position_table(Handle< TrustedByteArray > table)
CodeBuilder & set_is_turbofanned()
CodeBuilder & set_builtin(Builtin builtin)
CodeBuilder & set_stack_slots(int stack_slots)
CodeBuilder & set_deoptimization_data(Handle< DeoptimizationData > deopt_data)
CodeBuilder & set_parameter_count(uint16_t parameter_count)
CodeBuilder & set_osr_offset(BytecodeOffset offset)
CodeBuilder & set_inlined_bytecode_size(uint32_t size)
CodeBuilder & set_is_context_specialized()
CodeBuilder & set_profiler_data(BasicBlockProfilerData *profiler_data)
V8_WARN_UNUSED_RESULT MaybeHandle< Code > TryBuild()
int BeginTranslation(int frame_count, int jsframe_count, bool update_feedback)
void StoreBoolStackSlot(int index)
void BeginJavaScriptBuiltinContinuationFrame(BytecodeOffset bailout_id, int literal_id, unsigned height)
void StoreSimd128StackSlot(int index)
base::Vector< const uint8_t > ToFrameTranslationWasm()
void StoreUint32StackSlot(int index)
void StoreFloatStackSlot(int index)
void StoreRegister(TranslationOpcode opcode, Register reg)
void StoreFloatRegister(FloatRegister reg)
void StoreUint32Register(Register reg)
void BeginCapturedObject(int length)
void BeginJavaScriptBuiltinContinuationWithCatchFrame(BytecodeOffset bailout_id, int literal_id, unsigned height)
void StoreInt32StackSlot(int index)
void BeginInlinedExtraArguments(int literal_id, unsigned height, uint32_t parameter_count)
void StoreHoleyDoubleRegister(DoubleRegister reg)
void StoreUnsignedBigInt64Register(Register reg)
void ArgumentsElements(CreateArgumentsType type)
void AddUpdateFeedback(int vector_literal, int slot)
void BeginConstructCreateStubFrame(int literal_id, unsigned height)
void BeginBuiltinContinuationFrame(BytecodeOffset bailout_id, int literal_id, unsigned height)
void StoreBoolRegister(Register reg)
void StoreStackSlot(int index)
void StoreLiteral(int literal_id)
void StoreInt64Register(Register reg)
void StoreDoubleStackSlot(int index)
void StoreInt32Register(Register reg)
void StoreHoleyDoubleStackSlot(int index)
void BeginConstructInvokeStubFrame(int literal_id)
void StoreDoubleRegister(DoubleRegister reg)
void StoreSignedBigInt64StackSlot(int index)
void StoreSignedBigInt64Register(Register reg)
void StoreJSFrameFunction()
void DuplicateObject(int object_index)
void BeginInterpretedFrame(BytecodeOffset bytecode_offset, int literal_id, int bytecode_array_id, unsigned height, int return_value_offset, int return_value_count)
void StoreInt64StackSlot(int index)
DirectHandle< DeoptimizationFrameTranslation > ToFrameTranslation(LocalFactory *factory)
void StoreUnsignedBigInt64StackSlot(int index)
void StoreSimd128Register(Simd128Register reg)
V8_INLINE bool is_null() const
static void EmitReturnEntry(Assembler *masm, int offset, int handler)
static int EmitReturnTableStart(Assembler *masm)
static const int kLazyDeopt
static constexpr int kMetadataAlignment
v8::internal::Factory * factory()
RootsTable & roots_table()
static constexpr MachineType Uint8()
static constexpr MachineType SignedBigInt64()
constexpr MachineRepresentation representation() const
static constexpr MachineType Int32()
static constexpr MachineType AnyTagged()
static constexpr MachineType Uint32()
static constexpr MachineType UnsignedBigInt64()
static constexpr MachineType Uint16()
static constexpr MachineType Int16()
static constexpr MachineType Int64()
static constexpr MachineType HoleyFloat64()
static constexpr MachineType Int8()
void InitializeRootRegister()
void JumpIfEqual(Register x, int32_t y, Label *dest)
void BindExceptionHandler(Label *label)
void CallForDeoptimization(Builtin target, int deopt_id, Label *exit, DeoptimizeKind kind, Label *ret, Label *jump_deoptimization_entry_label)
void JumpIfLessThan(Register x, int32_t y, Label *dest)
V8_WARN_UNUSED_RESULT V8_INLINE bool ToHandle(Handle< S > *out) const
bool IsOptimizing() const
BytecodeOffset osr_offset() const
IndirectHandle< SharedFunctionInfo > shared_info() const
CodeKind code_kind() const
std::vector< InlinedFunctionHolder > InlinedFunctionList
bool IsRootHandle(IndirectHandle< T > handle, RootIndex *index) const
static constexpr bool IsImmortalImmovable(RootIndex root_index)
int UpdateDeoptimizationInfo(int pc, int trampoline, int start, int deopt_index)
V8_EXPORT_PRIVATE void Emit(Assembler *assembler, int stack_slot_count)
Safepoint DefineSafepoint(Assembler *assembler, int pc_offset=0)
static constexpr Tagged< Smi > FromInt(int value)
static constexpr Tagged< Smi > zero()
base::OwnedVector< uint8_t > ToSourcePositionTableVector()
Handle< TrustedByteArray > ToSourcePositionTable(IsolateT *isolate)
void AddPosition(size_t code_offset, SourcePosition source_position, bool is_statement)
@ RECORD_SOURCE_POSITIONS
std::vector< SourcePositionInfo > InliningStack(Isolate *isolate, Tagged< Code > code) const
static SourcePosition Unknown()
static DirectHandle< TrustedPodArray< T > > New(Isolate *isolate, int length)
void resize(size_t new_size)
void assign(size_t new_size, const T &value)
size_t ParameterSlotCount() const
JumpTable(JumpTable *next, const base::Vector< Label * > &targets)
const base::Vector< Label * > & targets() const
base::Vector< Label * > const targets_
static Type InferSwap(InstructionOperand *source, InstructionOperand *destination)
static Type InferMove(InstructionOperand *source, InstructionOperand *destination)
void AssembleArchJump(RpoNumber target)
void AssembleTailCallAfterGap(Instruction *instr, int first_unused_stack_slot)
OptimizedCompilationInfo *const info_
ZoneVector< int > block_starts_
ZoneVector< HandlerInfo > handlers_
CodeGenResult AssembleDeoptimizerCall(DeoptimizationExit *exit)
void AssembleTailCallBeforeGap(Instruction *instr, int first_unused_stack_slot)
FrameAccessState * frame_access_state() const
void AssembleConstructFrame()
int next_deoptimization_id_
void BuildTranslationForFrameStateDescriptor(FrameStateDescriptor *descriptor, InstructionOperandIterator *iter, OutputFrameStateCombine state_combine)
uint16_t parameter_count_
CodeGenResult AssembleArchInstruction(Instruction *instr)
bool HasProtectedDeoptimizationLiteral(IndirectHandle< TrustedObject > object) const
static constexpr int kBinarySearchSwitchMinimalCases
SourcePositionTableBuilder source_position_table_builder_
DeoptimizationExit * BuildTranslation(Instruction *instr, int pc_offset, size_t frame_state_offset, size_t immediate_args_count, OutputFrameStateCombine state_combine)
bool IsNextInAssemblyOrder(RpoNumber block) const
Isolate * isolate() const
Label jump_deoptimization_entry_labels_[kDeoptimizeKindCount]
DeoptimizationEntry const & GetDeoptimizationEntry(Instruction *instr, size_t frame_state_offset)
void AssembleArchJumpRegardlessOfAssemblyOrder(RpoNumber target)
SourcePosition current_source_position_
FrameAccessState * frame_access_state_
SourcePosition start_source_position() const
bool GetSlotAboveSPBeforeTailCall(Instruction *instr, int *slot)
static void GetPushCompatibleMoves(Instruction *instr, PushTypeFlags push_type, ZoneVector< MoveOperands * > *pushes)
CodeKind code_kind() const
void FinishFrame(Frame *frame)
void RecordProtectedInstruction(uint32_t instr_offset)
SafepointTableBuilder * safepoints()
Linkage * linkage() const
void AssembleArchBoolean(Instruction *instr, FlagsCondition condition)
void AddTranslationForOperand(Instruction *instr, InstructionOperand *op, MachineType type)
ZoneDeque< IndirectHandle< TrustedObject > > protected_deoptimization_literals_
DeoptimizationExit * AddDeoptimizationExit(Instruction *instr, size_t frame_state_offset, size_t immediate_args_count)
void AssembleJumpTable(base::Vector< Label * > targets)
void AssembleArchBranch(Instruction *instr, BranchInfo *branch)
void TranslateStateValueDescriptor(StateValueDescriptor *desc, StateValueList *nested, InstructionOperandIterator *iter)
void AssembleCodeStartRegisterCheck()
bool ShouldApplyOffsetToStackCheck(Instruction *instr, uint32_t *offset)
RpoNumber ComputeBranchInfo(BranchInfo *branch, FlagsCondition condition, Instruction *instr)
void RecordSafepoint(ReferenceMap *references, int pc_offset=0)
void AssembleArchBinarySearchSwitchRange(Register input, RpoNumber def_block, std::pair< int32_t, Label * > *begin, std::pair< int32_t, Label * > *end)
CodeGenResult AssembleBlock(const InstructionBlock *block)
void MaybeEmitOutOfLineConstantPool()
void PrepareForDeoptimizationExits(ZoneDeque< DeoptimizationExit * > *exits)
StubCallMode DetermineStubCallMode() const
ZoneDeque< DeoptimizationLiteral > deoptimization_literals_
const size_t max_pushed_argument_count_
MaybeHandle< Code > FinalizeCode()
int DefineDeoptimizationLiteral(DeoptimizationLiteral literal)
void BailoutIfDeoptimized()
bool IsMaterializableFromRoot(Handle< HeapObject > object, RootIndex *index_return)
void AssembleDeconstructFrame()
ZoneDeque< DeoptimizationExit * > deoptimization_exits_
const size_t max_unoptimized_frame_height_
UnwindingInfoWriter unwinding_info_writer_
void AssembleArchConditionalBranch(Instruction *instr, BranchInfo *branch)
void AssembleSourcePosition(Instruction *instr)
InstructionSequence * instructions() const
ZoneVector< TurbolizerInstructionStartInfo > instr_starts_
CodeGenResult AssembleInstruction(int instruction_index, const InstructionBlock *block)
void AssemblePlaceHolderForLazyDeopt(Instruction *instr)
uint32_t GetStackCheckOffset()
@ kTooManyDeoptimizationBailouts
void AssembleGaps(Instruction *instr)
void AssembleArchDeoptBranch(Instruction *instr, BranchInfo *branch)
CodeGenerator(Zone *codegen_zone, Frame *frame, Linkage *linkage, InstructionSequence *instructions, OptimizedCompilationInfo *info, Isolate *isolate, std::optional< OsrHelper > osr_helper, int start_source_position, JumpOptimizationInfo *jump_opt, const AssemblerOptions &options, Builtin builtin, size_t max_unoptimized_frame_height, size_t max_pushed_argument_count, const char *debug_name=nullptr)
void CreateFrameAccessState(Frame *frame)
static bool IsValidPush(InstructionOperand source, PushTypeFlags push_type)
Label * GetLabel(RpoNumber rpo)
base::OwnedVector< uint8_t > GetProtectedInstructionsData()
void RecordCallPosition(Instruction *instr)
base::OwnedVector< uint8_t > GetSourcePositionTable()
TurbolizerCodeOffsetsInfo offsets_info_
Handle< DeoptimizationData > GenerateDeoptimizationData()
Label * AddJumpTable(base::Vector< Label * > targets)
void AssembleArchConditionalBoolean(Instruction *instr)
void RecordDeoptInfo(Instruction *instr, int pc_offset)
OptimizedCompilationInfo * info() const
const Frame * frame() const
void AssembleArchSelect(Instruction *instr, FlagsCondition condition)
int DefineProtectedDeoptimizationLiteral(IndirectHandle< TrustedObject > object)
void TranslateFrameStateDescriptorOperands(FrameStateDescriptor *desc, InstructionOperandIterator *iter)
size_t inlined_function_count_
FrameTranslationBuilder translations_
int handler_table_offset_
int deopt_exit_start_offset_
RpoNumber ToRpoNumber() const
FrameStateDescriptor * descriptor() const
DeoptimizeReason reason() const
FeedbackSource const & feedback() const
DeoptimizeKind kind() const
void set_immediate_args(ZoneVector< ImmediateOperand * > *immediate_args)
int translation_id() const
DeoptimizeReason reason() const
BytecodeOffset bailout_id() const
int deoptimization_id() const
SourcePosition pos() const
DeoptimizeKind kind() const
V8_EXPORT_PRIVATE void MarkHasFrame(bool state)
void SetFPRelativeOnly(bool state)
FrameStateDescriptor * outer_state() const
OutputFrameStateCombine state_combine() const
size_t GetJSFrameCount() const
size_t GetFrameCount() const
MaybeIndirectHandle< BytecodeArray > bytecode_array() const
BytecodeOffset bailout_id() const
FrameStateType type() const
static const int kImpossibleValue
MaybeIndirectHandle< SharedFunctionInfo > shared_info() const
uint32_t GetWasmFunctionIndex() const
V8_EXPORT_PRIVATE void Resolve(ParallelMove *parallel_move)
RpoNumber ao_number() const
DoubleRegister ToDoubleRegister(InstructionOperand *op)
FloatRegister ToFloatRegister(InstructionOperand *op)
int32_t InputInt32(size_t index)
Constant ToConstant(InstructionOperand *op) const
Simd128Register ToSimd128Register(InstructionOperand *op)
Register ToRegister(InstructionOperand *op) const
Instruction * instruction() const
InstructionOperand * Advance()
bool IsFPRegister() const
bool IsFPStackSlot() const
DeoptimizationEntry const & GetDeoptimizationEntry(int deoptimization_id)
Instruction * InstructionAt(int index) const
InstructionBlock * InstructionBlockAt(RpoNumber rpo_number)
CallDescriptor * GetIncomingDescriptor() const
static LocationOperand * cast(InstructionOperand *op)
OutOfLineCode * next() const
OutOfLineCode(CodeGenerator *gen)
static OutputFrameStateCombine Ignore()
size_t GetOffsetToPokeAt() const
bool IsOutputIgnored() const
const ZoneVector< InstructionOperand > & reference_operands() const
static RpoNumber Invalid()
bool IsNext(const RpoNumber other) const
void BeginInstructionBlock(int pc_offset, const InstructionBlock *block)
void SetNumberOfInstructionBlocks(int number)
void Finish(int code_size)
EhFrameWriter * eh_frame_writer()
void EndInstructionBlock(const InstructionBlock *block)
static bool IsWasmBuiltinId(Builtin id)
static base::OwnedVector< uint8_t > Serialize(int deopt_exit_start_offset, int eager_deopt_count, base::Vector< const uint8_t > translation_array, base::Vector< wasm::WasmDeoptEntry > deopt_entries, const ZoneDeque< DeoptimizationLiteral > &deopt_literals)
UnwindingInfoWriter *const unwinding_info_writer_
#define V8_EMBEDDED_CONSTANT_POOL_BOOL
v8::Global< v8::Promise::Resolver > resolver_
SourcePositionTable * source_positions
std::optional< TNode< JSArray > > a
ZoneVector< RpoNumber > & result
ZoneVector< trap_handler::ProtectedInstructionData > protected_instructions_
ZoneVector< HandlerInfo > handlers_
SourcePositionTableBuilder source_position_table_builder_
FunctionLiteral * literal
#define LOG_CODE_EVENT(isolate, Call)
BasicBlock * current_block_
Linkage const *const linkage_
MaglevAssembler *const masm_
InstructionOperand destination
constexpr bool IsInRange(T value, U lower_limit, U higher_limit)
constexpr Vector< T > VectorOf(T *start, size_t size)
OwnedVector< T > OwnedCopyOf(const T *data, size_t size)
FloatWithBits< 64 > Float64
@ kJavaScriptBuiltinContinuationWithCatch
@ kJavaScriptBuiltinContinuation
constexpr size_t kBranchEndOffsetOfTrueBlock
constexpr size_t kBranchEndOffsetOfFalseBlock
constexpr size_t kConditionalBranchEndOffsetOfCondition
FlagsCondition NegateFlagsCondition(FlagsCondition condition)
static constexpr int kLazyDeoptOnThrowSentinel
@ kFlags_conditional_branch
Node::Uses::const_iterator begin(const Node::Uses &uses)
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
constexpr uint64_t kHoleNanInt64
constexpr int kSystemPointerSize
constexpr int kStackLimitSlackForDeoptimizationInBytes
constexpr DeoptimizeKind kLastDeoptimizeKind
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr bool CodeKindUsesDeoptimizationData(CodeKind kind)
constexpr bool PointerCompressionIsEnabled()
OptimizedCompilationInfo * info_
#define DCHECK_LE(v1, v2)
#define CHECK_IMPLIES(lhs, rhs)
#define CHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
IndirectHandle< FeedbackVector > vector
int code_start_register_check