v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
runtime-call-stats.cc
Go to the documentation of this file.
1// Copyright 2021 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifdef V8_RUNTIME_CALL_STATS
6
8
9#include <iomanip>
10
11#include "src/flags/flags.h"
13#include "src/utils/ostreams.h"
14
15namespace v8 {
16namespace internal {
17
18base::TimeTicks (*RuntimeCallTimer::Now)() = &base::TimeTicks::Now;
19
20base::TimeTicks RuntimeCallTimer::NowCPUTime() {
21 base::ThreadTicks ticks = base::ThreadTicks::Now();
22 return base::TimeTicks::FromInternalValue(ticks.ToInternalValue());
23}
24
25class RuntimeCallStatEntries {
26 public:
27 void Print(std::ostream& os) {
28 if (total_call_count_ == 0) return;
29 std::sort(entries_.rbegin(), entries_.rend());
30 os << std::setw(50) << "Runtime Function/C++ Builtin" << std::setw(12)
31 << "Time" << std::setw(18) << "Count" << std::endl
32 << std::string(88, '=') << std::endl;
33 for (Entry& entry : entries_) {
34 entry.SetTotal(total_time_, total_call_count_);
35 entry.Print(os);
36 }
37 os << std::string(88, '-') << std::endl;
38 Entry("Total", total_time_, total_call_count_).Print(os);
39 }
40
41 // By default, the compiler will usually inline this, which results in a large
42 // binary size increase: std::vector::push_back expands to a large amount of
43 // instructions, and this function is invoked repeatedly by macros.
44 V8_NOINLINE void Add(RuntimeCallCounter* counter) {
45 if (counter->count() == 0) return;
46 entries_.push_back(
47 Entry(counter->name(), counter->time(), counter->count()));
48 total_time_ += counter->time();
49 total_call_count_ += counter->count();
50 }
51
52 private:
53 class Entry {
54 public:
55 Entry(const char* name, base::TimeDelta time, uint64_t count)
56 : name_(name),
57 time_(time.InMicroseconds()),
58 count_(count),
59 time_percent_(100),
60 count_percent_(100) {}
61
62 bool operator<(const Entry& other) const {
63 if (time_ < other.time_) return true;
64 if (time_ > other.time_) return false;
65 return count_ < other.count_;
66 }
67
68 V8_NOINLINE void Print(std::ostream& os) {
69 os.precision(2);
70 os << std::fixed << std::setprecision(2);
71 os << std::setw(50) << name_;
72 os << std::setw(10) << static_cast<double>(time_) / 1000 << "ms ";
73 os << std::setw(6) << time_percent_ << "%";
74 os << std::setw(10) << count_ << " ";
75 os << std::setw(6) << count_percent_ << "%";
76 os << std::endl;
77 }
78
79 V8_NOINLINE void SetTotal(base::TimeDelta total_time,
80 uint64_t total_count) {
81 if (total_time.InMicroseconds() == 0) {
82 time_percent_ = 0;
83 } else {
84 time_percent_ = 100.0 * time_ / total_time.InMicroseconds();
85 }
86 count_percent_ = 100.0 * count_ / total_count;
87 }
88
89 private:
90 const char* name_;
91 int64_t time_;
92 uint64_t count_;
93 double time_percent_;
94 double count_percent_;
95 };
96
97 uint64_t total_call_count_ = 0;
98 base::TimeDelta total_time_;
99 std::vector<Entry> entries_;
100};
101
102void RuntimeCallCounter::Reset() {
103 count_ = 0;
104 time_ = 0;
105}
106
107void RuntimeCallCounter::Dump(v8::tracing::TracedValue* value) {
108 value->BeginArray(name_);
109 value->AppendDouble(count_);
110 value->AppendDouble(time_);
111 value->EndArray();
112}
113
114void RuntimeCallCounter::Add(RuntimeCallCounter* other) {
115 count_ += other->count();
116 time_ += other->time().InMicroseconds();
117}
118
119void RuntimeCallTimer::Snapshot() {
120 base::TimeTicks now = Now();
121 // Pause only / topmost timer in the timer stack.
122 Pause(now);
123 // Commit all the timer's elapsed time to the counters.
124 RuntimeCallTimer* timer = this;
125 while (timer != nullptr) {
126 timer->CommitTimeToCounter();
127 timer = timer->parent();
128 }
129 Resume(now);
130}
131
132RuntimeCallStats::RuntimeCallStats(ThreadType thread_type)
133 : in_use_(false), thread_type_(thread_type) {
134 static const char* const kNames[] = {
135#define CALL_BUILTIN_COUNTER(name) "GC_" #name,
136 FOR_EACH_GC_COUNTER(CALL_BUILTIN_COUNTER) //
137#undef CALL_BUILTIN_COUNTER
138#define CALL_RUNTIME_COUNTER(name) #name,
139 FOR_EACH_MANUAL_COUNTER(CALL_RUNTIME_COUNTER) //
140#undef CALL_RUNTIME_COUNTER
141#define CALL_RUNTIME_COUNTER(name, nargs, ressize) #name,
142 FOR_EACH_INTRINSIC(CALL_RUNTIME_COUNTER) //
143#undef CALL_RUNTIME_COUNTER
144#define CALL_BUILTIN_COUNTER(name, Argc) #name,
145 BUILTIN_LIST_C(CALL_BUILTIN_COUNTER) //
146#undef CALL_BUILTIN_COUNTER
147#define CALL_BUILTIN_COUNTER(name) "API_" #name,
148 FOR_EACH_API_COUNTER(CALL_BUILTIN_COUNTER) //
149#undef CALL_BUILTIN_COUNTER
150#define CALL_BUILTIN_COUNTER(name) #name,
151 FOR_EACH_HANDLER_COUNTER(CALL_BUILTIN_COUNTER) //
152#undef CALL_BUILTIN_COUNTER
153#define THREAD_SPECIFIC_COUNTER(name) #name,
154 FOR_EACH_THREAD_SPECIFIC_COUNTER(THREAD_SPECIFIC_COUNTER) //
155#undef THREAD_SPECIFIC_COUNTER
156 };
157 for (int i = 0; i < kNumberOfCounters; i++) {
158 this->counters_[i] = RuntimeCallCounter(kNames[i]);
159 }
160 if (v8_flags.rcs_cpu_time) {
161 CHECK(base::ThreadTicks::IsSupported());
162 base::ThreadTicks::WaitUntilInitialized();
163 RuntimeCallTimer::Now = &RuntimeCallTimer::NowCPUTime;
164 }
165}
166
167namespace {
168constexpr RuntimeCallCounterId FirstCounter(RuntimeCallCounterId first, ...) {
169 return first;
170}
171
172#define THREAD_SPECIFIC_COUNTER(name) RuntimeCallCounterId::k##name,
173constexpr RuntimeCallCounterId kFirstThreadVariantCounter =
174 FirstCounter(FOR_EACH_THREAD_SPECIFIC_COUNTER(THREAD_SPECIFIC_COUNTER) 0);
175#undef THREAD_SPECIFIC_COUNTER
176
177#define THREAD_SPECIFIC_COUNTER(name) +1
178constexpr int kThreadVariantCounterCount =
179 0 FOR_EACH_THREAD_SPECIFIC_COUNTER(THREAD_SPECIFIC_COUNTER);
180#undef THREAD_SPECIFIC_COUNTER
181
182constexpr auto kLastThreadVariantCounter = static_cast<RuntimeCallCounterId>(
183 static_cast<int>(kFirstThreadVariantCounter) + kThreadVariantCounterCount -
184 1);
185} // namespace
186
187bool RuntimeCallStats::HasThreadSpecificCounterVariants(
188 RuntimeCallCounterId id) {
189 // Check that it's in the range of the thread-specific variant counters and
190 // also that it's one of the background counters.
191 return id >= kFirstThreadVariantCounter && id <= kLastThreadVariantCounter;
192}
193
194bool RuntimeCallStats::IsBackgroundThreadSpecificVariant(
195 RuntimeCallCounterId id) {
196 return HasThreadSpecificCounterVariants(id) &&
197 (static_cast<int>(id) - static_cast<int>(kFirstThreadVariantCounter)) %
198 2 ==
199 1;
200}
201
202void RuntimeCallStats::Enter(RuntimeCallTimer* timer,
203 RuntimeCallCounterId counter_id) {
204 DCHECK(IsCalledOnTheSameThread());
205 RuntimeCallCounter* counter = GetCounter(counter_id);
206 DCHECK_NOT_NULL(counter->name());
207 timer->Start(counter, current_timer());
208 current_timer_.SetValue(timer);
209 current_counter_.SetValue(counter);
210}
211
212void RuntimeCallStats::Leave(RuntimeCallTimer* timer) {
213 DCHECK(IsCalledOnTheSameThread());
214 RuntimeCallTimer* stack_top = current_timer();
215 if (stack_top == nullptr) return; // Missing timer is a result of Reset().
216 CHECK(stack_top == timer);
217 current_timer_.SetValue(timer->Stop());
218 RuntimeCallTimer* cur_timer = current_timer();
219 current_counter_.SetValue(cur_timer ? cur_timer->counter() : nullptr);
220}
221
222void RuntimeCallStats::Add(RuntimeCallStats* other) {
223 for (int i = 0; i < kNumberOfCounters; i++) {
224 GetCounter(i)->Add(other->GetCounter(i));
225 }
226}
227
228// static
229void RuntimeCallStats::CorrectCurrentCounterId(RuntimeCallCounterId counter_id,
230 CounterMode mode) {
231 DCHECK(IsCalledOnTheSameThread());
232 if (mode == RuntimeCallStats::CounterMode::kThreadSpecific) {
233 counter_id = CounterIdForThread(counter_id);
234 }
235 DCHECK(IsCounterAppropriateForThread(counter_id));
236
237 RuntimeCallTimer* timer = current_timer();
238 if (timer == nullptr) return;
239 RuntimeCallCounter* counter = GetCounter(counter_id);
240 timer->set_counter(counter);
241 current_counter_.SetValue(counter);
242}
243
244bool RuntimeCallStats::IsCalledOnTheSameThread() {
245 if (thread_id_.IsValid()) return thread_id_ == ThreadId::Current();
246 thread_id_ = ThreadId::Current();
247 return true;
248}
249
250void RuntimeCallStats::Print() {
251 StdoutStream os;
252 Print(os);
253}
254
255void RuntimeCallStats::Print(std::ostream& os) {
256 RuntimeCallStatEntries entries;
257 if (current_timer_.Value() != nullptr) {
258 current_timer_.Value()->Snapshot();
259 }
260 for (int i = 0; i < kNumberOfCounters; i++) {
261 entries.Add(GetCounter(i));
262 }
263 entries.Print(os);
264}
265
266void RuntimeCallStats::Reset() {
267 if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
268
269 // In tracing, we only what to trace the time spent on top level trace events,
270 // if runtime counter stack is not empty, we should clear the whole runtime
271 // counter stack, and then reset counters so that we can dump counters into
272 // top level trace events accurately.
273 while (current_timer_.Value()) {
274 current_timer_.SetValue(current_timer_.Value()->Stop());
275 }
276
277 for (int i = 0; i < kNumberOfCounters; i++) {
278 GetCounter(i)->Reset();
279 }
280
281 in_use_ = true;
282}
283
284void RuntimeCallStats::Dump(v8::tracing::TracedValue* value) {
285 for (int i = 0; i < kNumberOfCounters; i++) {
286 if (GetCounter(i)->count() > 0) GetCounter(i)->Dump(value);
287 }
288 in_use_ = false;
289}
290
291WorkerThreadRuntimeCallStats::WorkerThreadRuntimeCallStats()
292 : isolate_thread_id_(ThreadId::Current()) {}
293
294WorkerThreadRuntimeCallStats::~WorkerThreadRuntimeCallStats() {
295 if (tls_key_) base::Thread::DeleteThreadLocalKey(*tls_key_);
296}
297
298base::Thread::LocalStorageKey WorkerThreadRuntimeCallStats::GetKey() {
299 base::MutexGuard lock(&mutex_);
300 if (!tls_key_) tls_key_ = base::Thread::CreateThreadLocalKey();
301 return *tls_key_;
302}
303
304RuntimeCallStats* WorkerThreadRuntimeCallStats::NewTable() {
305 // Never create a new worker table on the isolate's main thread.
306 DCHECK_NE(ThreadId::Current(), isolate_thread_id_);
307 std::unique_ptr<RuntimeCallStats> new_table =
308 std::make_unique<RuntimeCallStats>(RuntimeCallStats::kWorkerThread);
309 RuntimeCallStats* result = new_table.get();
310
311 base::MutexGuard lock(&mutex_);
312 tables_.push_back(std::move(new_table));
313 return result;
314}
315
316void WorkerThreadRuntimeCallStats::AddToMainTable(
317 RuntimeCallStats* main_call_stats) {
318 base::MutexGuard lock(&mutex_);
319 for (auto& worker_stats : tables_) {
320 DCHECK_NE(main_call_stats, worker_stats.get());
321 main_call_stats->Add(worker_stats.get());
322 worker_stats->Reset();
323 }
324}
325
326WorkerThreadRuntimeCallStatsScope::WorkerThreadRuntimeCallStatsScope(
327 WorkerThreadRuntimeCallStats* worker_stats) {
328 if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
329
330 table_ = reinterpret_cast<RuntimeCallStats*>(
331 base::Thread::GetThreadLocal(worker_stats->GetKey()));
332 if (table_ == nullptr) {
333 if (V8_UNLIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
334 table_ = worker_stats->NewTable();
335 base::Thread::SetThreadLocal(worker_stats->GetKey(), table_);
336 }
337
338 if ((TracingFlags::runtime_stats.load(std::memory_order_relaxed) &
340 table_->Reset();
341 }
342}
343
344WorkerThreadRuntimeCallStatsScope::~WorkerThreadRuntimeCallStatsScope() {
345 if (V8_LIKELY(table_ == nullptr)) return;
346
347 if ((TracingFlags::runtime_stats.load(std::memory_order_relaxed) &
350 table_->Dump(value.get());
352 "V8.RuntimeStats", TRACE_EVENT_SCOPE_THREAD,
353 "runtime-call-stats", std::move(value));
354 }
355}
356
357} // namespace internal
358} // namespace v8
359
360#endif // V8_RUNTIME_CALL_STATS
#define BUILTIN_LIST_C(V)
static ThreadTicks Now()
Definition time.cc:797
static TimeTicks Now()
Definition time.cc:736
static TimeTicks FromInternalValue(int64_t us)
Definition time.h:349
V8_EXPORT_PRIVATE RuntimeCallStats(ThreadType thread_type)
static std::unique_ptr< TracedValue > Create()
base::Mutex & mutex_
size_t count_
Definition sweeper.cc:55
uint32_t count
ZoneVector< RpoNumber > & result
std::vector< EntryBuilder > entries_
ZoneVector< Entry > entries
const char * name_
Counters * counters_
void Add(RWDigits Z, Digits X, Digits Y)
V8_INLINE constexpr bool operator<(Builtin a, Builtin b)
Definition builtins.h:75
V8_EXPORT_PRIVATE FlagValues v8_flags
SourcePositionTable *const table_
Definition pipeline.cc:227
#define FOR_EACH_INTRINSIC(F)
Definition runtime.h:884
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK(condition)
Definition logging.h:482
#define TRACE_EVENT_INSTANT1(category_group, name, scope, arg1_name, arg1_val)
#define TRACE_EVENT_SCOPE_THREAD
#define TRACE_DISABLED_BY_DEFAULT(name)
#define V8_LIKELY(condition)
Definition v8config.h:661
#define V8_UNLIKELY(condition)
Definition v8config.h:660
#define V8_NOINLINE
Definition v8config.h:586