38 info->DetachPersistentHandles());
45 std::unique_ptr<PersistentHandles> ph =
48 info->set_persistent_handles(std::move(ph));
57constexpr char kMaglevCompilerName[] =
"Maglev";
63 explicit LocalIsolateScope(MaglevCompilationInfo* info,
66 info_->broker()->AttachLocalIsolateForMaglev(
info_, local_isolate);
69 ~LocalIsolateScope() {
info_->broker()->DetachLocalIsolateForMaglev(
info_); }
72 MaglevCompilationInfo*
const info_;
77Zone* ExportedMaglevCompilationInfo::zone()
const {
return info_->zone(); }
79void ExportedMaglevCompilationInfo::set_canonical_handles(
80 std::unique_ptr<CanonicalHandlesMap>&& canonical_handles) {
81 info_->set_canonical_handles(std::move(canonical_handles));
85std::unique_ptr<MaglevCompilationJob> MaglevCompilationJob::New(
88 return std::unique_ptr<MaglevCompilationJob>(
89 new MaglevCompilationJob(isolate, std::move(info)));
94MaglevPipelineStatistics* CreatePipelineStatistics(
95 Isolate* isolate, MaglevCompilationInfo* compilation_info,
96 compiler::ZoneStats* zone_stats) {
97 MaglevPipelineStatistics* pipeline_stats =
nullptr;
101 if (tracing_enabled ||
v8_flags.maglev_stats ||
v8_flags.maglev_stats_nvp) {
102 pipeline_stats =
new MaglevPipelineStatistics(
103 compilation_info, isolate->GetMaglevStatistics(), zone_stats);
105 return pipeline_stats;
110MaglevCompilationJob::MaglevCompilationJob(
111 Isolate* isolate, std::unique_ptr<MaglevCompilationInfo>&& info)
112 : OptimizedCompilationJob(kMaglevCompilerName, State::kReadyToPrepare),
114 zone_stats_(isolate->allocator()),
115 pipeline_statistics_(
116 CreatePipelineStatistics(isolate,
info_.get(), &zone_stats_)) {
117 DCHECK(maglev::IsMaglevEnabled());
120MaglevCompilationJob::~MaglevCompilationJob() =
default;
122CompilationJob::Status MaglevCompilationJob::PrepareJobImpl(Isolate* isolate) {
123 BeginPhaseKind(
"V8.MaglevPrepareJob");
124 if (
info()->collect_source_positions()) {
125 SharedFunctionInfo::EnsureSourcePositionsAvailable(
127 info()->toplevel_compilation_unit()->shared_function_info().
object());
131 return CompilationJob::SUCCEEDED;
134CompilationJob::Status MaglevCompilationJob::ExecuteJobImpl(
135 RuntimeCallStats* stats, LocalIsolate* local_isolate) {
136 BeginPhaseKind(
"V8.MaglevExecuteJob");
137 LocalIsolateScope scope{
info(), local_isolate};
138 if (!maglev::MaglevCompiler::Compile(local_isolate,
info())) {
141 return CompilationJob::FAILED;
145 return CompilationJob::SUCCEEDED;
148CompilationJob::Status MaglevCompilationJob::FinalizeJobImpl(Isolate* isolate) {
149 BeginPhaseKind(
"V8.MaglevFinalizeJob");
151 auto [maybe_code, bailout_reason] =
152 maglev::MaglevCompiler::GenerateCode(isolate,
info());
153 if (!maybe_code.ToHandle(&code)) {
156 return CompilationJob::FAILED;
160 if (
v8_flags.profile_guided_optimization &&
161 info()->could_not_inline_all_candidates() &&
162 info()->toplevel_function()->
shared()->cached_tiering_decision() !=
163 CachedTieringDecision::kDelayMaglev) {
164 info()->toplevel_function()->shared()->set_cached_tiering_decision(
165 CachedTieringDecision::kNormal);
167 info()->set_code(code);
168 GlobalHandleVector<Map> maps = CollectRetainedMaps(isolate, code);
169 RegisterWeakObjectsInOptimizedCode(
170 isolate,
info()->
broker()->target_native_context().
object(), code,
173 return CompilationJob::SUCCEEDED;
176GlobalHandleVector<Map> MaglevCompilationJob::CollectRetainedMaps(
177 Isolate* isolate, DirectHandle<Code> code) {
178 if (
v8_flags.maglev_build_code_on_background) {
179 return info()->code_generator()->RetainedMaps(isolate);
181 return OptimizedCompilationJob::CollectRetainedMaps(isolate, code);
184void MaglevCompilationJob::DisposeOnMainThread(Isolate* isolate) {
188 DCHECK_EQ(ThreadId::Current(), isolate->thread_id());
189 info()->DetachCanonicalHandles()->Clear();
192MaybeIndirectHandle<Code> MaglevCompilationJob::code()
const {
193 return info_->get_code();
196IndirectHandle<JSFunction> MaglevCompilationJob::function()
const {
197 return info_->toplevel_function();
200BytecodeOffset MaglevCompilationJob::osr_offset()
const {
201 return info_->toplevel_osr_offset();
204bool MaglevCompilationJob::is_osr()
const {
return info_->toplevel_is_osr(); }
206bool MaglevCompilationJob::specialize_to_function_context()
const {
207 return info_->specialize_to_function_context();
210void MaglevCompilationJob::RecordCompilationStats(Isolate* isolate)
const {
214 if (base::TimeTicks::IsHighResolution()) {
215 Counters*
const counters = isolate->counters();
216 counters->maglev_optimize_prepare()->AddSample(
217 static_cast<int>(time_taken_to_prepare_.InMicroseconds()));
218 counters->maglev_optimize_execute()->AddSample(
219 static_cast<int>(time_taken_to_execute_.InMicroseconds()));
220 counters->maglev_optimize_finalize()->AddSample(
221 static_cast<int>(time_taken_to_finalize_.InMicroseconds()));
222 counters->maglev_optimize_total_time()->AddSample(
223 static_cast<int>(ElapsedTime().InMicroseconds()));
226 static double compilation_time = 0.0;
227 static int compiled_functions = 0;
228 static int code_size = 0;
230 compilation_time += (time_taken_to_prepare_.InMillisecondsF() +
231 time_taken_to_execute_.InMillisecondsF() +
232 time_taken_to_finalize_.InMillisecondsF());
233 compiled_functions++;
234 code_size += function()->shared()->SourceSize();
236 "[maglev] Compiled: %d functions with %d byte source size in %fms.\n",
237 compiled_functions, code_size, compilation_time);
241uint64_t MaglevCompilationJob::trace_id()
const {
245 return reinterpret_cast<uint64_t
>(
this) ^
246 reinterpret_cast<uint64_t
>(
info_.get()) ^
247 info_->toplevel_function().address() ^
248 info_->toplevel_function()->shared()->function_literal_id();
251void MaglevCompilationJob::BeginPhaseKind(
const char* name) {
252 if (
V8_UNLIKELY(pipeline_statistics_ !=
nullptr)) {
253 pipeline_statistics_->BeginPhaseKind(name);
257void MaglevCompilationJob::EndPhaseKind() {
258 if (
V8_UNLIKELY(pipeline_statistics_ !=
nullptr)) {
259 pipeline_statistics_->EndPhaseKind();
267 explicit JobTask(MaglevConcurrentDispatcher* dispatcher)
268 : dispatcher_(dispatcher) {}
271 if (incoming_queue()->IsEmpty() && destruction_queue()->IsEmpty()) {
280 std::unique_ptr<MaglevCompilationJob> job_to_destruct;
282 std::unique_ptr<MaglevCompilationJob> job;
283 if (incoming_queue()->Dequeue(&job)) {
290 RuntimeCallCounterId::kOptimizeBackgroundMaglev);
293 if (status == CompilationJob::SUCCEEDED) {
294 outgoing_queue()->Enqueue(std::move(job));
295 isolate()->stack_guard()->RequestInstallMaglevCode();
297 }
else if (destruction_queue()->Dequeue(&job)) {
302 "V8.MaglevDestructBackground", job->trace_id(),
314 incoming_queue()->size() + destruction_queue()->size() + worker_count;
315 size_t max_threads =
v8_flags.concurrent_maglev_max_threads;
316 if (max_threads > 0) {
317 return std::min(max_threads, num_tasks);
331MaglevConcurrentDispatcher::MaglevConcurrentDispatcher(
Isolate* isolate)
342 PrintF(
"Concurrent maglev has been disabled for tracing.\n");
351 priority, std::make_unique<JobTask>(
this));
358MaglevConcurrentDispatcher::~MaglevConcurrentDispatcher() {
359 if (is_enabled() && job_handle_->IsValid()) {
362 job_handle_->Cancel();
366void MaglevConcurrentDispatcher::EnqueueJob(
367 std::unique_ptr<MaglevCompilationJob>&& job) {
369 incoming_queue_.Enqueue(std::move(job));
370 job_handle_->NotifyConcurrencyIncrease();
373void MaglevConcurrentDispatcher::FinalizeFinishedJobs() {
375 while (!outgoing_queue_.IsEmpty()) {
376 std::unique_ptr<MaglevCompilationJob> job;
377 outgoing_queue_.Dequeue(&job);
382 RuntimeCallCounterId::kOptimizeConcurrentFinalizeMaglev);
385 if (
v8_flags.maglev_destroy_on_background) {
388 destruction_queue_.Enqueue(std::move(job));
389 job_handle_->NotifyConcurrencyIncrease();
392 "V8.MaglevDestruct", job->trace_id(),
399void MaglevConcurrentDispatcher::AwaitCompileJobs() {
403 isolate_->main_thread_local_isolate()->ExecuteMainThreadWhileParked(
404 [
this]() { job_handle_->Join(); });
411 priority, std::make_unique<JobTask>(
this));
412 DCHECK(incoming_queue_.IsEmpty());
416 while (!incoming_queue_.IsEmpty()) {
417 std::unique_ptr<MaglevCompilationJob> job;
418 if (incoming_queue_.Dequeue(&job)) {
422 while (!destruction_queue_.IsEmpty()) {
423 std::unique_ptr<MaglevCompilationJob> job;
424 destruction_queue_.Dequeue(&job);
429 while (!outgoing_queue_.IsEmpty()) {
430 std::unique_ptr<MaglevCompilationJob> job;
431 if (outgoing_queue_.Dequeue(&job)) {
virtual bool ShouldYield()=0
static void FinalizeMaglevCompilationJob(maglev::MaglevCompilationJob *job, Isolate *isolate)
static void DisposeMaglevCompilationJob(maglev::MaglevCompilationJob *job, Isolate *isolate)
std::unique_ptr< PersistentHandles > DetachPersistentHandles()
void AttachPersistentHandles(std::unique_ptr< PersistentHandles > persistent_handles)
RuntimeCallStats * runtime_call_stats() const
static V8_EXPORT_PRIVATE v8::Platform * GetCurrentPlatform()
void AttachLocalIsolateForMaglev(maglev::MaglevCompilationInfo *info, LocalIsolate *local_isolate)
std::unique_ptr< PersistentHandles > ph_
LocalIsolate * local_isolate() const
LocalIsolate * local_isolate_
void DetachLocalIsolateForMaglev(maglev::MaglevCompilationInfo *info)
static std::unique_ptr< MaglevCompilationInfo > New(Isolate *isolate, IndirectHandle< JSFunction > function, BytecodeOffset osr_offset)
QueueT * outgoing_queue() const
QueueT * incoming_queue() const
void Run(JobDelegate *delegate) override
QueueT * destruction_queue() const
Isolate * isolate() const
size_t GetMaxConcurrency(size_t worker_count) const override
MaglevConcurrentDispatcher *const dispatcher_
JobTask(MaglevConcurrentDispatcher *dispatcher)
Handle< SharedFunctionInfo > info
SharedFunctionInfoRef shared
LiftoffBailoutReason bailout_reason_
MaglevCompilationInfo *const info_
static bool IsMaglevEnabled()
void PrintF(const char *format,...)
PerThreadAssertScopeDebugOnly< true, SAFEPOINTS_ASSERT, HEAP_ALLOCATION_ASSERT > AllowGarbageCollection
V8_EXPORT_PRIVATE FlagValues v8_flags
OptimizedCompilationInfo * info_
#define DCHECK_NOT_NULL(val)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define TRACE_EVENT0(category_group, name)
#define TRACE_DISABLED_BY_DEFAULT(name)
#define TRACE_EVENT_WITH_FLOW0(category_group, name, bind_id, flow_flags)
#define TRACE_EVENT_CATEGORY_GROUP_ENABLED(category_group, ret)
#define TRACE_EVENT_FLAG_FLOW_OUT
#define TRACE_EVENT_FLAG_FLOW_IN
#define V8_UNLIKELY(condition)