v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
pipelines.h
Go to the documentation of this file.
1// Copyright 2024 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_COMPILER_TURBOSHAFT_PIPELINES_H_
6#define V8_COMPILER_TURBOSHAFT_PIPELINES_H_
7
8#include <optional>
9
33
34#if V8_ENABLE_WEBASSEMBLY
36#endif // V8_ENABLE_WEBASSEMBLY
37
39
40inline constexpr char kTempZoneName[] = "temp-zone";
41
42#define RUN_MAYBE_ABORT(phase, ...) \
43 if (V8_UNLIKELY(!Run<phase>(__VA_ARGS__))) return {};
44
46 DECL_TURBOSHAFT_PHASE_CONSTANTS(SimplificationAndNormalization)
47
48 void Run(PipelineData* data, Zone* temp_zone);
49};
50
52 public:
53 explicit Pipeline(PipelineData* data) : data_(data) {}
54
55 PipelineData* data() const { return data_; }
56 void BeginPhaseKind(const char* phase_kind_name) {
57 if (auto statistics = data()->pipeline_statistics()) {
58 statistics->BeginPhaseKind(phase_kind_name);
59 }
60 }
61 void EndPhaseKind() {
62 if (auto statistics = data()->pipeline_statistics()) {
63 statistics->EndPhaseKind();
64 }
65 }
66
67 template <TurboshaftPhase Phase, typename... Args>
68 V8_WARN_UNUSED_RESULT auto Run(Args&&... args) {
69 // Setup run scope.
70 PhaseScope phase_scope(data_->pipeline_statistics(), Phase::phase_name());
71 ZoneWithName<Phase::kPhaseName> temp_zone(data_->zone_stats(),
72 Phase::phase_name());
73 NodeOriginTable::PhaseScope origin_scope(data_->node_origins(),
74 Phase::phase_name());
75#ifdef V8_RUNTIME_CALL_STATS
76 RuntimeCallTimerScope runtime_call_timer_scope(data_->runtime_call_stats(),
77 Phase::kRuntimeCallCounterId,
78 Phase::kCounterMode);
79#endif
80
81 Phase phase;
82 using result_t =
83 decltype(phase.Run(data_, temp_zone, std::forward<Args>(args)...));
84 if constexpr (std::is_same_v<result_t, void>) {
85 phase.Run(data_, temp_zone, std::forward<Args>(args)...);
87 PrintGraph(temp_zone, Phase::phase_name());
88 }
89 return !data_->info()->was_cancelled();
90 } else {
91 auto result = phase.Run(data_, temp_zone, std::forward<Args>(args)...);
93 PrintGraph(temp_zone, Phase::phase_name());
94 }
95 return !data_->info()->was_cancelled() ? result : std::nullopt;
96 }
98 }
99
100 void PrintGraph(Zone* zone, const char* phase_name) {
101 CodeTracer* code_tracer = nullptr;
102 if (data_->info()->trace_turbo_graph()) {
103 // NOTE: We must not call `GetCodeTracer` if tracing is not enabled,
104 // because it may not yet be initialized then and doing so from the
105 // background thread is not threadsafe.
106 code_tracer = data_->GetCodeTracer();
107 DCHECK_NOT_NULL(code_tracer);
108 }
109 PrintTurboshaftGraph(data_, zone, code_tracer, phase_name);
110 }
111
112 void TraceSequence(const char* phase_name) {
113 if (info()->trace_turbo_json()) {
115 AllowHandleDereference allow_deref;
116 TurboJsonFile json_of(info(), std::ios_base::app);
117 json_of
118 << "{\"name\":\"" << phase_name << "\",\"type\":\"sequence\""
119 << ",\"blocks\":" << InstructionSequenceAsJSON{data()->sequence()}
120 << ",\"register_allocation\":{"
121 << RegisterAllocationDataAsJSON{*(data()->register_allocation_data()),
122 *(data()->sequence())}
123 << "}},\n";
124 }
125 if (info()->trace_turbo_graph()) {
127 AllowHandleDereference allow_deref;
128 CodeTracer::StreamScope tracing_scope(data()->GetCodeTracer());
129 tracing_scope.stream()
130 << "----- Instruction sequence " << phase_name << " -----\n"
131 << *data()->sequence();
132 }
133 }
134
136 UnparkedScopeIfNeeded unparked_scope(data_->broker());
137
138 BeginPhaseKind("V8.TFGraphCreation");
139 turboshaft::Tracing::Scope tracing_scope(data_->info());
140 std::optional<BailoutReason> bailout =
141 Run<turboshaft::MaglevGraphBuildingPhase>(linkage);
142 EndPhaseKind();
143
144 if (bailout.has_value()) {
145 data_->info()->AbortOptimization(bailout.value());
146 return false;
147 }
148
149 return true;
150 }
151
153 Linkage* linkage) {
154 CHECK_IMPLIES(!v8_flags.disable_optimizing_compilers, v8_flags.turboshaft);
155
156 UnparkedScopeIfNeeded scope(data_->broker(),
157 v8_flags.turboshaft_trace_reduction ||
158 v8_flags.turboshaft_trace_emitted);
159
160 turboshaft::Tracing::Scope tracing_scope(data_->info());
161
162 if (std::optional<BailoutReason> bailout =
163 Run<turboshaft::BuildGraphPhase>(turbofan_data, linkage)) {
164 info()->AbortOptimization(*bailout);
165 return false;
166 }
167
168 return true;
169 }
170
172 UnparkedScopeIfNeeded scope(data_->broker(),
173 v8_flags.turboshaft_trace_reduction ||
174 v8_flags.turboshaft_trace_emitted);
175
176 turboshaft::Tracing::Scope tracing_scope(data_->info());
177
178 BeginPhaseKind("V8.TurboshaftOptimize");
179
180#ifdef V8_ENABLE_WEBASSEMBLY
181 // TODO(dlehmann,353475584): Once the Wasm-in-JS TS inlining MVP is feature-
182 // complete and cleaned-up, move its reducer into the beginning of the
183 // `MachineLoweringPhase` since we can reuse the `DataViewLoweringReducer`
184 // there and avoid a separate phase.
185 if (v8_flags.turboshaft_wasm_in_js_inlining) {
187 }
188#endif // !V8_ENABLE_WEBASSEMBLY
189
191
192 if (v8_flags.turboshaft_loop_unrolling) {
194 }
195
196 if (v8_flags.turbo_store_elimination) {
198 }
199
201
202 if (v8_flags.turboshaft_typed_optimizations) {
204 }
205
206 if (v8_flags.turboshaft_assert_types) {
208 }
209
210 // Perform dead code elimination, reduce stack checks, simplify loads on
211 // platforms where required, ...
213
214#ifdef V8_ENABLE_DEBUG_CODE
215 if (V8_UNLIKELY(v8_flags.turboshaft_enable_debug_features)) {
216 // This phase has to run very late to allow all previous phases to use
217 // debug features.
219 }
220#endif // V8_ENABLE_DEBUG_CODE
221
222 return true;
223 }
224
229
231 const ProfileDataFromFile* profile = nullptr) {
232 if (V8_UNLIKELY(data()->pipeline_kind() == TurboshaftPipelineKind::kCSA ||
233 data()->pipeline_kind() ==
234 TurboshaftPipelineKind::kTSABuiltin)) {
235 if (profile) {
237 }
238
239 if (v8_flags.reorder_builtins &&
240 Builtins::IsBuiltinId(info()->builtin())) {
241 UnparkedScopeIfNeeded unparked_scope(data()->broker());
242 BasicBlockCallGraphProfiler::StoreCallGraph(info(), data()->graph());
243 }
244
245 if (v8_flags.turbo_profiling) {
246 UnparkedScopeIfNeeded unparked_scope(data()->broker());
247
248 // Basic block profiling disables concurrent compilation, so handle
249 // deref is fine.
250 AllowHandleDereference allow_handle_dereference;
251 const size_t block_count = data()->graph().block_count();
252 BasicBlockProfilerData* profiler_data =
253 BasicBlockProfiler::Get()->NewData(block_count);
254
255 // Set the function name.
256 profiler_data->SetFunctionName(info()->GetDebugName());
257 // Capture the schedule string before instrumentation.
258 if (v8_flags.turbo_profiling_verbose) {
259 std::ostringstream os;
260 os << data()->graph();
261 profiler_data->SetSchedule(os);
262 }
263
264 info()->set_profiler_data(profiler_data);
265
267 } else {
268 // We run an empty copying phase to make sure that we have the same
269 // control flow as when taking the profile.
270 ZoneWithName<kTempZoneName> temp_zone(data()->zone_stats(),
272 CopyingPhase<>::Run(data(), temp_zone);
273 }
274 }
275
276 // DecompressionOptimization has to run as the last phase because it
277 // constructs an (slightly) invalid graph that mixes Tagged and Compressed
278 // representations.
280
281 return Run<SpecialRPOSchedulingPhase>();
282 }
283
284 [[nodiscard]] bool SelectInstructions(Linkage* linkage) {
285 auto call_descriptor = linkage->GetIncomingDescriptor();
286
287 // Depending on which code path led us to this function, the frame may or
288 // may not have been initialized. If it hasn't yet, initialize it now.
289 if (!data_->frame()) {
290 data_->InitializeFrameData(call_descriptor);
291 }
292
293 // Select and schedule instructions covering the scheduled graph.
294 CodeTracer* code_tracer = nullptr;
295 if (info()->trace_turbo_graph()) {
296 // NOTE: We must not call `GetCodeTracer` if tracing is not enabled,
297 // because it may not yet be initialized then and doing so from the
298 // background thread is not threadsafe.
299 code_tracer = data_->GetCodeTracer();
300 }
301
302 if (std::optional<BailoutReason> bailout = Run<InstructionSelectionPhase>(
303 call_descriptor, linkage, code_tracer)) {
304 data_->info()->AbortOptimization(*bailout);
305 EndPhaseKind();
306 return false;
307 }
308
309 return true;
310
311 // TODO(nicohartmann@): We might need to provide this.
312 // if (info()->trace_turbo_json()) {
313 // UnparkedScopeIfNeeded scope(turbofan_data->broker());
314 // AllowHandleDereference allow_deref;
315 // TurboCfgFile tcf(isolate());
316 // tcf << AsC1V("CodeGen", turbofan_data->schedule(),
317 // turbofan_data->source_positions(),
318 // turbofan_data->sequence());
319
320 // std::ostringstream source_position_output;
321 // // Output source position information before the graph is deleted.
322 // if (data_->source_positions() != nullptr) {
323 // data_->source_positions()->PrintJson(source_position_output);
324 // } else {
325 // source_position_output << "{}";
326 // }
327 // source_position_output << ",\n\"nodeOrigins\" : ";
328 // data_->node_origins()->PrintJson(source_position_output);
329 // data_->set_source_position_output(source_position_output.str());
330 // }
331 }
332
334 CallDescriptor* call_descriptor) {
335 BeginPhaseKind("V8.TFRegisterAllocation");
336
337 bool run_verifier = v8_flags.turbo_verify_allocation;
338
339 // Allocate registers.
340 const RegisterConfiguration* config = RegisterConfiguration::Default();
341 std::unique_ptr<const RegisterConfiguration> restricted_config;
342 if (call_descriptor->HasRestrictedAllocatableRegisters()) {
343 RegList registers = call_descriptor->AllocatableRegisters();
344 DCHECK_LT(0, registers.Count());
345 restricted_config.reset(
346 RegisterConfiguration::RestrictGeneralRegisters(registers));
347 config = restricted_config.get();
348 }
349 if (!AllocateRegisters(config, call_descriptor, run_verifier)) return false;
350
351 // Verify the instruction sequence has the same hash in two stages.
352 VerifyGeneratedCodeIsIdempotent();
353
355
356 // TODO(mtrofin): move this off to the register allocator.
357 bool generate_frame_at_start =
358 data_->sequence()->instruction_blocks().front()->must_construct_frame();
359 // Optimimize jumps.
360 if (v8_flags.turbo_jt) {
361 RUN_MAYBE_ABORT(JumpThreadingPhase, generate_frame_at_start);
362 }
363
364 EndPhaseKind();
365
366 return !info()->was_cancelled();
367 }
368
370 // TODO(nicohartmann): Are there any graph which are still verifiable?
371 return true;
372 }
373
375 JumpOptimizationInfo* jump_opt = data()->jump_optimization_info();
376 if (jump_opt == nullptr) return;
377
378 InstructionSequence* code = data()->sequence();
379 int instruction_blocks = code->InstructionBlockCount();
380 int virtual_registers = code->VirtualRegisterCount();
381 size_t hash_code =
382 base::hash_combine(instruction_blocks, virtual_registers);
383 for (Instruction* instr : *code) {
384 hash_code = base::hash_combine(hash_code, instr->opcode(),
385 instr->InputCount(), instr->OutputCount());
386 }
387 for (int i = 0; i < virtual_registers; i++) {
388 hash_code = base::hash_combine(hash_code, code->GetRepresentation(i));
389 }
390 if (jump_opt->is_collecting()) {
391 jump_opt->hash_code = hash_code;
392 } else {
393 CHECK_EQ(hash_code, jump_opt->hash_code);
394 }
395 }
396
397 V8_WARN_UNUSED_RESULT bool AllocateRegisters(
398 const RegisterConfiguration* config, CallDescriptor* call_descriptor,
399 bool run_verifier);
400
402 BeginPhaseKind("V8.TFCodeGeneration");
403 data()->InitializeCodeGenerator(linkage);
404
405 UnparkedScopeIfNeeded unparked_scope(data()->broker());
406
408 if (info()->trace_turbo_json()) {
409 TurboJsonFile json_of(info(), std::ios_base::app);
410 json_of
411 << "{\"name\":\"code generation\"" << ", \"type\":\"instructions\""
412 << InstructionStartsAsJSON{&data()->code_generator()->instr_starts()}
414 &data()->code_generator()->offsets_info()};
415 json_of << "},\n";
416 }
417
418 data()->ClearInstructionComponent();
419 EndPhaseKind();
420 return !info()->was_cancelled();
421 }
422
424 Linkage linkage(call_descriptor);
425 if (!PrepareForInstructionSelection()) return {};
426 if (!SelectInstructions(&linkage)) {
427 return MaybeHandle<Code>();
428 }
429 if (!AllocateRegisters(linkage.GetIncomingDescriptor())) return {};
430 if (!AssembleCode(&linkage)) return {};
431 return FinalizeCode();
432 }
433
434 [[nodiscard]] bool GenerateCode(
435 Linkage* linkage, std::shared_ptr<OsrHelper> osr_helper = {},
436 JumpOptimizationInfo* jump_optimization_info = nullptr,
437 const ProfileDataFromFile* profile = nullptr, int initial_graph_hash = 0);
438
439 OptimizedCompilationInfo* info() { return data_->info(); }
440
441 MaybeIndirectHandle<Code> FinalizeCode(bool retire_broker = true) {
442 BeginPhaseKind("V8.TFFinalizeCode");
443 if (data_->broker() && retire_broker) {
444 data_->broker()->Retire();
445 }
447
448 MaybeIndirectHandle<Code> maybe_code = data_->code();
450 if (!maybe_code.ToHandle(&code)) {
451 return maybe_code;
452 }
453
454 data_->info()->SetCode(code);
455 PrintCode(data_->isolate(), code, data_->info());
456
457 // Functions with many inline candidates are sensitive to correct call
458 // frequency feedback and should therefore not be tiered up early.
459 if (v8_flags.profile_guided_optimization &&
460 info()->could_not_inline_all_candidates() &&
461 info()->shared_info()->cached_tiering_decision() !=
462 CachedTieringDecision::kDelayMaglev) {
463 info()->shared_info()->set_cached_tiering_decision(
464 CachedTieringDecision::kNormal);
465 }
466
467 if (info()->trace_turbo_json()) {
468 TurboJsonFile json_of(info(), std::ios_base::app);
469
470 json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\""
471 << BlockStartsAsJSON{&data_->code_generator()->block_starts()}
472 << "\"data\":\"";
473#ifdef ENABLE_DISASSEMBLER
474 std::stringstream disassembly_stream;
475 code->Disassemble(nullptr, disassembly_stream, data_->isolate());
476 std::string disassembly_string(disassembly_stream.str());
477 for (const auto& c : disassembly_string) {
478 json_of << AsEscapedUC16ForJSON(c);
479 }
480#endif // ENABLE_DISASSEMBLER
481 json_of << "\"}\n],\n";
482 json_of << "\"nodePositions\":";
483 // TODO(nicohartmann): We should try to always provide source positions.
484 json_of << (data_->source_position_output().empty()
485 ? "{}"
486 : data_->source_position_output())
487 << ",\n";
488 JsonPrintAllSourceWithPositions(json_of, data_->info(), data_->isolate());
489 if (info()->has_bytecode_array()) {
490 json_of << ",\n";
492 }
493 json_of << "\n}";
494 }
495 if (info()->trace_turbo_json() || info()->trace_turbo_graph()) {
496 CodeTracer::StreamScope tracing_scope(data_->GetCodeTracer());
497 tracing_scope.stream()
498 << "---------------------------------------------------\n"
499 << "Finished compiling method " << info()->GetDebugName().get()
500 << " using TurboFan" << std::endl;
501 }
502 EndPhaseKind();
503 return code;
504 }
505
507 return data_->depedencies() == nullptr ||
508 data_->depedencies()->Commit(code);
509 }
510
511 private:
512#ifdef DEBUG
513 virtual bool IsBuiltinPipeline() const { return false; }
514#endif
515
517};
518
519class BuiltinPipeline : public Pipeline {
520 public:
521 explicit BuiltinPipeline(PipelineData* data) : Pipeline(data) {}
522
523 void OptimizeBuiltin();
524
525#ifdef DEBUG
526 bool IsBuiltinPipeline() const override { return true; }
527#endif
528};
529
530#undef RUN_MAYBE_ABORT
531
532} // namespace v8::internal::compiler::turboshaft
533
534#endif // V8_COMPILER_TURBOSHAFT_PIPELINES_H_
TFGraph * graph
uint8_t data_[MAX_STACK_LENGTH]
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
void SetFunctionName(std::unique_ptr< char[]> name)
void SetSchedule(const std::ostringstream &os)
V8_WARN_UNUSED_RESULT V8_INLINE bool ToHandle(Handle< S > *out) const
V8_WARN_UNUSED_RESULT bool RunSimplificationAndNormalizationPhase()
Definition pipelines.h:225
bool CreateGraphFromTurbofan(compiler::TFPipelineData *turbofan_data, Linkage *linkage)
Definition pipelines.h:152
void PrintGraph(Zone *zone, const char *phase_name)
Definition pipelines.h:100
bool CommitDependencies(Handle< Code > code)
Definition pipelines.h:506
V8_WARN_UNUSED_RESULT bool PrepareForInstructionSelection(const ProfileDataFromFile *profile=nullptr)
Definition pipelines.h:230
void BeginPhaseKind(const char *phase_kind_name)
Definition pipelines.h:56
MaybeHandle< Code > GenerateCode(CallDescriptor *call_descriptor)
Definition pipelines.h:423
MaybeIndirectHandle< Code > FinalizeCode(bool retire_broker=true)
Definition pipelines.h:441
V8_WARN_UNUSED_RESULT bool AssembleCode(Linkage *linkage)
Definition pipelines.h:401
bool CreateGraphWithMaglev(Linkage *linkage)
Definition pipelines.h:135
bool OptimizeTurboshaftGraph(Linkage *linkage)
Definition pipelines.h:171
OptimizedCompilationInfo * info()
Definition pipelines.h:439
void TraceSequence(const char *phase_name)
Definition pipelines.h:112
V8_WARN_UNUSED_RESULT bool AllocateRegisters(CallDescriptor *call_descriptor)
Definition pipelines.h:333
V8_WARN_UNUSED_RESULT auto Run(Args &&... args)
Definition pipelines.h:68
Handle< Code > code
Handle< SharedFunctionInfo > info
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
TurboshaftPipelineKind pipeline_kind
JSHeapBroker * broker
Linkage * linkage
Instruction * instr
ZoneVector< RpoNumber > & result
Builtin builtin
RegListBase< RegisterT > registers
void PrintTurboshaftGraph(PipelineData *data, Zone *temp_zone, CodeTracer *code_tracer, const char *phase_name)
Definition phase.cc:39
base::Vector< const char > GetDebugName(Zone *zone, const wasm::WasmModule *module, const wasm::WireBytesStorage *wire_bytes, int index)
void PrintCode(Isolate *isolate, DirectHandle< Code > code, OptimizedCompilationInfo *info)
Definition pipeline.cc:588
void JsonPrintAllBytecodeSources(std::ostream &os, OptimizedCompilationInfo *info)
void JsonPrintAllSourceWithPositions(std::ostream &os, OptimizedCompilationInfo *info, Isolate *isolate)
V8_EXPORT_PRIVATE FlagValues v8_flags
#define RUN_MAYBE_ABORT(phase,...)
Definition pipeline.cc:152
#define UNREACHABLE()
Definition logging.h:67
#define CHECK_IMPLIES(lhs, rhs)
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define CHECK_EQ(lhs, rhs)
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define V8_EXPORT_PRIVATE
Definition macros.h:460
#define DECL_TURBOSHAFT_PHASE_CONSTANTS(Name)
Definition phase.h:39
#define V8_WARN_UNUSED_RESULT
Definition v8config.h:671
#define V8_UNLIKELY(condition)
Definition v8config.h:660