26void TraceSequence(OptimizedCompilationInfo* info,
27 InstructionSequence* sequence, JSHeapBroker*
broker,
28 CodeTracer* code_tracer,
const char* phase_name) {
29 if (info->trace_turbo_json()) {
30 UnparkedScopeIfNeeded scope(
broker);
32 TurboJsonFile json_of(info, std::ios_base::app);
33 json_of <<
"{\"name\":\"" << phase_name <<
"\",\"type\":\"sequence\""
34 <<
",\"blocks\":" << InstructionSequenceAsJSON{sequence}
35 <<
",\"register_allocation\":{"
36 <<
"\"fixed_double_live_ranges\": {}"
37 <<
",\"fixed_live_ranges\": {}"
38 <<
",\"live_ranges\": {}"
41 if (info->trace_turbo_graph()) {
42 UnparkedScopeIfNeeded scope(
broker);
44 CodeTracer::StreamScope tracing_scope(code_tracer);
45 tracing_scope.stream() <<
"----- Instruction sequence " << phase_name
64 stack.emplace_back(block, 0, std::move(succs));
71 const Block* order =
nullptr;
73 Push(&
graph_->StartBlock());
75 while (!stack.empty()) {
120 Push(&
graph_->StartBlock());
121 while (!stack.empty()) {
124 const Block* succ =
nullptr;
150 if (block != entry && outgoing_index < info->outgoing.size()) {
151 succ = info->outgoing[outgoing_index];
156 if (succ !=
nullptr) {
182 for (
const Block* b = info->start;
true;
184 if (
block_data_[b->index()].rpo_next == info->end) {
213 for (
auto [backedge, header_index] : backedges) {
218 loops_[loop_num].header = header;
221 if (backedge != header) {
224 DCHECK(!
loops_[loop_num].members->Contains(backedge->index().id()));
225 loops_[loop_num].members->Add(backedge->index().id());
226 stack.push_back(backedge);
231 while (!stack.empty()) {
232 const Block* block = stack.back();
235 if (pred != header) {
236 if (!
loops_[loop_num].members->Contains(pred->index().id())) {
237 loops_[loop_num].members->Add(pred->index().id());
238 stack.push_back(pred);
247 const Block* entry) {
258 graph.StartBlock().set_custom_data(
260 for (
Block& block : graph.blocks()) {
262 if (predecessor ==
nullptr) {
264 }
else if (block.IsLoop()) {
276 const bool is_deferred =
280 block.set_custom_data(is_deferred,
289 block.set_custom_data(
false,
300 Graph& graph = data->graph();
301 for (
auto& op : graph.AllOperations()) {
303 uint32_t true_block_id = branch->if_true->index().id();
304 uint32_t false_block_id = branch->if_false->index().id();
305 BranchHint hint = profile->GetHint(true_block_id, false_block_id);
315 Graph& graph = data->graph();
319 if (!data->graph_has_special_rpo()) {
322 data->set_graph_has_special_rpo();
332 Graph& graph = data->graph();
335 data->InitializeInstructionComponent(call_descriptor);
339 temp_zone, graph.op_id_count(),
linkage, data->sequence(), &graph,
341 data->info()->switch_jump_table()
344 &data->info()->tick_counter(), data->broker(),
345 &data->max_unoptimized_frame_height(), &data->max_pushed_argument_count(),
346 data->info()->source_positions()
350 v8_flags.turbo_instruction_scheduling
353 data->assembler_options().enable_root_relative_access
356 data->info()->trace_turbo_json()
362 TraceSequence(data->info(), data->sequence(), data->broker(), code_tracer,
363 "after instruction selection");
V8_INLINE bool Contains(int i) const
void reserve(size_t new_cap)
T & emplace_back(Args &&... args)
@ kDisableRootsRelativeAddressing
@ kEnableRootsRelativeAddressing
std::optional< BailoutReason > SelectInstructions()
@ kDisableSwitchJumpTable
static Features SupportedFeatures()
static InstructionSelector ForTurboshaft(Zone *zone, size_t node_count, Linkage *linkage, InstructionSequence *sequence, turboshaft::Graph *schedule, Frame *frame, EnableSwitchJumpTable enable_switch_jump_table, TickCounter *tick_counter, JSHeapBroker *broker, size_t *max_unoptimized_frame_height, size_t *max_pushed_argument_count, SourcePositionMode source_position_mode=kCallSourcePositions, Features features=SupportedFeatures(), EnableScheduling enable_scheduling=v8_flags.turbo_instruction_scheduling ? kEnableScheduling :kDisableScheduling, EnableRootsRelativeAddressing enable_roots_relative_addressing=kDisableRootsRelativeAddressing, EnableTraceTurboJson trace_turbo=kDisableTraceTurboJson)
Block * NeighboringPredecessor() const
NeighboringPredecessorIterable PredecessorsIterable() const
uint32_t get_custom_data(CustomDataKind kind_for_debug_check) const
Block * LastPredecessor() const
static const int kBlockVisited2
ZoneVector< uint32_t > ComputeSpecialRPO()
FixedBlockSidetable< BlockData > block_data_
int32_t rpo_number(const Block *block) const
void set_loop_number(const Block *block, size_t loop_number)
size_t loop_number(const Block *block) const
ZoneVector< LoopInfo > loops_
void set_rpo_number(const Block *block, int32_t rpo_number)
static const int kBlockUnvisited
const Block * PushFront(const Block *head, const Block *block)
static const int kBlockOnStack
void ComputeLoopInfo(size_t num_loops, ZoneVector< Backedge > &backedges)
static const int kBlockVisited1
bool has_loop_number(const Block *block) const
ZoneVector< uint32_t > ComputeBlockPermutation(const Block *entry)
ZoneVector< RpoNumber > & result
ZoneStack< RpoNumber > & stack
constexpr Vector< T > VectorOf(T *start, size_t size)
void PropagateDeferred(Graph &graph)
base::SmallVector< Block *, 4 > SuccessorBlocks(const Block &block, const Graph &graph)
bool IsUnlikelySuccessor(const Block *block, const Block *successor, const Graph &graph)
PerThreadAssertScopeDebugOnly< true, HANDLE_DEREFERENCE_ASSERT > AllowHandleDereference
V8_EXPORT_PRIVATE FlagValues v8_flags
#define DCHECK_NOT_NULL(val)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
std::optional< BailoutReason > Run(PipelineData *data, Zone *temp_zone, const CallDescriptor *call_descriptor, Linkage *linkage, CodeTracer *code_tracer)
const underlying_operation_t< Op > * TryCast() const
void Run(PipelineData *data, Zone *temp_zone, const ProfileDataFromFile *profile)
void Run(PipelineData *data, Zone *temp_zone)
void AddOutgoing(Zone *zone, const Block *block)
SparseBitVector * members
base::SmallVector< Block *, 4 > successors