51 auto it = std::lower_bound(
55 return it->line_number;
62 auto it = std::lower_bound(
66 return it->inlining_id;
75 base::OS::Print(
" - source position table at %p\n",
this);
77 base::OS::Print(
" %d --> line_number: %d inlining_id: %d\n",
78 pos_info.pc_offset, pos_info.line_number,
79 pos_info.inlining_id);
100 return kProgramEntry.
get();
110 return kIdleEntry.
get();
116 LogEventListener::CodeTag::kBuiltin,
121 return kGcEntry.
get();
131 return kUnresolvedEntry.
get();
141 return kRootEntry.
get();
151 static_cast<uint32_t
>(
reinterpret_cast<uintptr_t
>(
name_)));
153 static_cast<uint32_t
>(
reinterpret_cast<uintptr_t
>(
resource_name_)));
160 if (
this == entry)
return true;
180 std::unordered_set<CodeEntry*, Hasher, Equals> inline_entries,
181 std::unordered_map<
int, std::vector<CodeEntryAndLineNumber>>
184 rare_data_->inline_stacks_ = std::move(inline_stacks);
195 auto it =
rare_data_->inline_stacks_.find(inlining_id);
196 return it !=
rare_data_->inline_stacks_.end() ? &it->second :
nullptr;
200 const char* deopt_reason,
int deopt_id,
201 std::vector<CpuProfileDeoptFrame> inlined_frames) {
209 if (!IsScript(shared->script()))
return;
213 if (shared->optimization_disabled()) {
220 size_t estimated_size = 0;
224 for (
const auto& inline_entry :
rare_data_->inline_entries_) {
225 estimated_size += inline_entry->EstimatedSize();
227 estimated_size +=
rare_data_->inline_entries_.size() *
228 sizeof(
decltype(
rare_data_->inline_entries_)::value_type);
230 for (
const auto& inline_stack_pair :
rare_data_->inline_stacks_) {
231 estimated_size += inline_stack_pair.second.size() *
232 sizeof(
decltype(inline_stack_pair.second)::value_type);
236 (
sizeof(
decltype(
rare_data_->inline_stacks_)::key_type) +
237 sizeof(
decltype(
rare_data_->inline_stacks_)::value_type));
240 rare_data_->deopt_inlined_frames_.capacity() *
241 sizeof(
decltype(
rare_data_->deopt_inlined_frames_)::value_type);
247 return sizeof(*this) + estimated_size;
256 if (
rare_data_->deopt_inlined_frames_.empty()) {
260 info.stack =
rare_data_->deopt_inlined_frames_;
276 strings.Release(
name_);
286 base::OS::Print(
"CodeEntry: at %p\n",
this);
288 base::OS::Print(
" - name: %s\n",
name_);
292 base::OS::Print(
" - script_id: %d\n",
script_id_);
293 base::OS::Print(
" - position: %d\n",
position_);
300 base::OS::Print(
" - deopt_reason: %s\n",
rare_data_->deopt_reason_);
301 base::OS::Print(
" - bailout_reason: %s\n",
rare_data_->bailout_reason_);
302 base::OS::Print(
" - deopt_id: %d\n",
rare_data_->deopt_id_);
305 base::OS::Print(
" - inline stacks:\n");
306 for (
auto it =
rare_data_->inline_stacks_.begin();
307 it !=
rare_data_->inline_stacks_.end(); it++) {
308 base::OS::Print(
" inlining_id: [%d]\n", it->first);
309 for (
const auto& e : it->second) {
310 base::OS::Print(
" %s --> %d\n", e.code_entry->name(),
315 base::OS::Print(
" - inline stacks: (empty)\n");
318 if (!
rare_data_->deopt_inlined_frames_.empty()) {
319 base::OS::Print(
" - deopt inlined frames:\n");
322 base::OS::Print(
"script_id: %d position: %zu\n", frame.
script_id,
326 base::OS::Print(
" - deopt inlined frames: (empty)\n");
329 base::OS::Print(
"\n");
348 case LogEventListener::CodeTag::kEval:
349 case LogEventListener::CodeTag::kScript:
350 case LogEventListener::CodeTag::kFunction:
352 case LogEventListener::CodeTag::kBuiltin:
353 case LogEventListener::CodeTag::kHandler:
354 case LogEventListener::CodeTag::kBytecodeHandler:
355 case LogEventListener::CodeTag::kNativeFunction:
356 case LogEventListener::CodeTag::kNativeScript:
358 case LogEventListener::CodeTag::kCallback:
360 case LogEventListener::CodeTag::kRegExp:
361 case LogEventListener::CodeTag::kStub:
362 case LogEventListener::CodeTag::kLength:
376 return map_entry !=
children_.end() ? map_entry->second :
nullptr;
387 return map_entry->second;
406 unsigned int length)
const {
407 if (
entries ==
nullptr || length == 0)
return false;
409 unsigned line_count =
static_cast<unsigned>(
line_ticks_.size());
411 if (line_count == 0)
return true;
412 if (length < line_count)
return false;
417 entry->line = p->first;
418 entry->hit_count = p->second;
426 base::OS::Print(
"%5u %*s %s:%d %d %d #%d",
self_ticks_, indent,
"",
431 base::OS::Print(
"\n");
434 "%*s;;; deopted at script_id: %d position: %zu with reason '%s'.\n",
435 indent + 10,
"", info.stack[0].script_id, info.stack[0].position,
437 for (
size_t index = 1; index < info.stack.size(); ++
index) {
438 base::OS::Print(
"%*s;;; Inline point: script_id %d position: %zu.\n",
439 indent + 10,
"", info.stack[index].script_id,
440 info.stack[index].position);
446 base::OS::Print(
"%*s bailed out due to '%s'\n", indent + 10,
"",
450 child.second->Print(indent + 2);
466 code_entries_(storage),
475 int src_line,
bool update_stats) {
478 for (
auto it = path.rbegin(); it != path.rend(); ++it) {
479 if (*it ==
nullptr)
continue;
484 node->CollectDeoptInfo(last_entry);
487 node->IncrementSelfTicks();
489 node->IncrementLineTicks(src_line);
496 int src_line,
bool update_stats,
501 for (
auto it = path.rbegin(); it != path.rend(); ++it) {
502 if (it->code_entry ==
nullptr)
continue;
503 last_entry = it->code_entry;
504 node = node->FindOrAddChild(it->code_entry, parent_line_number);
505 parent_line_number = mode == ProfilingMode::kCallerLineNumbers
510 node->CollectDeoptInfo(last_entry);
513 node->IncrementSelfTicks();
515 node->IncrementLineTicks(src_line);
540template <
typename Callback>
542 std::vector<Position>
stack;
543 stack.emplace_back(
root_);
544 while (!stack.empty()) {
546 if (current.has_current_child()) {
547 callback->BeforeTraversingChild(current.node, current.current_child());
548 stack.emplace_back(current.current_child());
550 callback->AfterAllChildrenTraversed(current.node);
551 if (stack.size() > 1) {
552 Position& parent = stack[stack.size() - 2];
553 callback->AfterChildTraversed(parent.
node, current.node);
574 std::unique_ptr<DiscardedSamplesDelegate> delegate)
577 delegate_(
std::move(delegate)),
579 top_down_(profiler->isolate(), profiler->code_entries()),
581 streaming_next_sample_(0),
591 "Profile",
id_,
"data", std::move(value));
597 if (
options_.has_filter_context()) {
610 if (source_sampling_interval.
IsZero())
return true;
626 const std::optional<uint64_t> trace_id) {
631 bool is_buffer_full =
634 bool should_record_sample =
637 if (should_record_sample) {
638 samples_.push_back({top_frame_node, timestamp, src_line, state_tag,
639 embedder_state_tag, trace_id});
640 }
else if (is_buffer_full &&
delegate_ !=
nullptr) {
644 task_runner->
PostTask(std::make_unique<CpuProfileMaxSamplesCallbackTask>(
650 const int kSamplesFlushCount = 100;
651 const int kNodesFlushCount = 10;
662 value->BeginDictionary(
"callFrame");
663 value->SetString(
"functionName", entry->
name());
667 value->SetInteger(
"scriptId", entry->
script_id());
669 value->SetInteger(
"lineNumber", entry->
line_number() - 1);
672 value->SetInteger(
"columnNumber", entry->
column_number() - 1);
675 value->EndDictionary();
676 value->SetInteger(
"id", node->id());
677 if (node->parent()) {
678 value->SetInteger(
"parent", node->parent()->id());
681 if (deopt_reason && deopt_reason[0] && strcmp(deopt_reason,
"no reason")) {
682 value->SetString(
"deoptReason", deopt_reason);
690 if (pending_nodes.empty() &&
samples_.empty())
return;
694 value->BeginDictionary(
"cpuProfile");
695 if (!pending_nodes.empty()) {
696 value->BeginArray(
"nodes");
697 for (
auto node : pending_nodes) {
698 value->BeginDictionary();
699 BuildNodeValue(node, value.get());
700 value->EndDictionary();
705 value->BeginArray(
"samples");
707 value->AppendInteger(
samples_[
i].node->id());
710 value->BeginDictionary(
"trace_ids");
715 value->SetUnsignedInteger(
716 std::to_string(
samples_[
i].trace_id.value()).c_str(),
719 value->EndDictionary();
721 value->EndDictionary();
734 value->BeginArray(
"timeDeltas");
739 value->AppendInteger(
static_cast<int>(
740 (
samples_[
i].timestamp - lastTimestamp).InMicroseconds()));
744 bool has_non_zero_lines =
746 [](
const SampleInfo& sample) { return sample.line != 0; });
747 if (has_non_zero_lines) {
748 value->BeginArray(
"lines");
758 "ProfileChunk",
id_,
"data", std::move(value));
775 "ProfileChunk",
id_,
"data", std::move(value));
781 std::vector<const v8::CpuProfileNode*>* nodes) {
782 nodes->emplace_back(node);
783 const int childrenCount = node->GetChildrenCount();
784 for (
int i = 0;
i < childrenCount;
i++)
785 FlattenNodesTree(node->GetChild(
i), nodes);
800 std::vector<v8::CpuProfileNode::LineTick>
entries(lineCount);
801 if (node->GetLineTicks(&
entries[0], lineCount)) {
802 for (
int i = 0;
i < lineCount;
i++) {
831 for (
int i = 0;
i < childrenCount;
i++) {
849 const int childrenCount = node->GetChildrenCount();
856 const char* deoptReason = node->GetBailoutReason();
857 if (deoptReason && deoptReason[0] && strcmp(deoptReason,
"no reason")) {
863 unsigned lineCount = node->GetHitLineCount();
873 std::vector<const v8::CpuProfileNode*> nodes;
878 for (
size_t i = 0;
i < nodes.
size();
i++) {
889 uint64_t ts =
profile_->
sample(
i).timestamp.since_origin().InMicroseconds();
933 base::OS::Print(
"[Top down]:\n");
946 for (
auto* inline_entry : entry->
rare_data_->inline_entries_) {
956 : code_entries_(storage) {}
962 if (
CodeEntry* entry = slot.second.entry) {
981 for (
auto i = range.
first;
i != range.second; ++
i) {
982 if (
i->second.entry == entry) {
995 if (left->first + left->second.size <=
start) ++left;
998 for (; right !=
code_map_.end() && right->first <
end; ++right) {
1005 Address* out_instruction_start) {
1010 if (it ==
code_map_.begin())
return nullptr;
1012 Address start_address = it->first;
1013 Address end_address = start_address + it->second.size;
1014 CodeEntry* ret = addr < end_address ? it->second.entry :
nullptr;
1015 DCHECK(!ret || (addr >= start_address && addr < end_address));
1016 if (ret && out_instruction_start) *out_instruction_start = start_address;
1021 if (from == to)
return;
1023 auto range =
code_map_.equal_range(from);
1027 size_t distance = std::distance(range.first, range.second);
1028 auto it = range.first;
1029 while (distance--) {
1032 DCHECK_EQ(info.entry->instruction_start(), from);
1033 info.entry->set_instruction_start(to);
1035 DCHECK(from + info.size <= to || to + info.size <= from);
1045 base::OS::Print(
"%p %5d %s\n",
reinterpret_cast<void*
>(pair.first),
1046 pair.second.size, pair.second.entry->name());
1051 size_t map_size = 0;
1053 map_size +=
sizeof(pair.first) +
sizeof(pair.second) +
1054 pair.second.entry->EstimatedSize();
1056 return sizeof(*this) + map_size;
1060 : profiler_(nullptr), current_profiles_mutex_(),
isolate_(isolate) {
1071 std::unique_ptr<DiscardedSamplesDelegate> delegate) {
1073 std::move(delegate));
1078 std::unique_ptr<DiscardedSamplesDelegate> delegate) {
1088 if ((profile->title() !=
nullptr && title !=
nullptr &&
1089 strcmp(profile->title(), title) == 0) ||
1090 profile->id() ==
id) {
1101 std::move(delegate));
1114 auto it = std::find_if(
1116 [=](
const std::unique_ptr<CpuProfile>& p) { return id == p->id(); });
1119 (*it)->FinishProfile();
1120 profile = it->get();
1129 if (title ==
nullptr)
return nullptr;
1132 const bool empty_title = title[0] ==
'\0';
1134 auto it = std::find_if(
1136 [&](
const std::unique_ptr<CpuProfile>& p) {
1137 return (empty_title ||
1138 (p->title() != nullptr && strcmp(p->title(), title) == 0));
1155 [&](
const std::unique_ptr<CpuProfile>& finished_profile) {
1156 return finished_profile.get() == profile;
1164int64_t GreatestCommonDivisor(int64_t a, int64_t b) {
1165 return b ? GreatestCommonDivisor(b, a % b) : a;
1173 int64_t base_sampling_interval_us =
1177 int64_t interval_us = 0;
1183 int64_t profile_interval_us =
1184 std::max<int64_t>((profile->sampling_interval_us() +
1185 base_sampling_interval_us - 1) /
1186 base_sampling_interval_us,
1188 base_sampling_interval_us;
1189 interval_us = GreatestCommonDivisor(interval_us, profile_interval_us);
1199 Address embedder_native_context_address,
1200 const std::optional<uint64_t> trace_id) {
1209 bool accepts_context = context_filter.
Accept(native_context_address);
1210 bool accepts_embedder_context =
1211 context_filter.
Accept(embedder_native_context_address);
1221 timestamp, accepts_context ? path : empty_path, src_line, update_stats,
1222 sampling_interval, state,
1232 profile->context_filter().OnMoveEvent(from, to);
static const int kNoColumnNumberInfo
static const int kNoLineNumberInfo
static const unsigned kNoSampleLimit
void PostTask(std::unique_ptr< Task > task, const SourceLocation &location=SourceLocation::Current())
static const int kNoScriptId
static V8_NODISCARD constexpr U update(U previous, T value)
static constexpr TimeDelta FromMicroseconds(int64_t microseconds)
constexpr bool IsZero() const
int64_t InMicroseconds() const
constexpr bool IsNull() const
constexpr TimeDelta since_origin() const
StringsStorage function_and_resource_names_
static const char *const kNoDeoptReason
LogEventListener::CodeTag code_tag() const
static V8_EXPORT_PRIVATE const char *const kUnresolvedFunctionName
bool is_ref_counted() const
bool IsSameFunctionAs(const CodeEntry *entry) const
bool has_deopt_info() const
std::atomic< std::size_t > ref_count_
void FillFunctionInfo(Tagged< SharedFunctionInfo > shared)
const char * bailout_reason() const
void set_script_id(int script_id)
std::unique_ptr< RareData > rare_data_
static V8_EXPORT_PRIVATE const char *const kIdleEntryName
void SetInlineStacks(std::unordered_set< CodeEntry *, Hasher, Equals > inline_entries, std::unordered_map< int, std::vector< CodeEntryAndLineNumber > > inline_stacks)
static V8_EXPORT_PRIVATE CodeEntry * gc_entry()
int column_number() const
static V8_EXPORT_PRIVATE const char *const kGarbageCollectorEntryName
void set_bailout_reason(const char *bailout_reason)
CpuProfileDeoptInfo GetDeoptInfo()
const char * name() const
static V8_EXPORT_PRIVATE CodeEntry * root_entry()
const std::vector< CodeEntryAndLineNumber > * GetInlineStack(int pc_offset) const
std::unique_ptr< SourcePositionTable > line_info_
void set_position(int position)
RareData * EnsureRareData()
void ReleaseStrings(StringsStorage &strings)
static V8_EXPORT_PRIVATE const char *const kEmptyResourceName
void SetBuiltinId(Builtin id)
static V8_EXPORT_PRIVATE CodeEntry * program_entry()
const char * resource_name_
const char * code_type_string() const
size_t EstimatedSize() const
static const char *const kEmptyBailoutReason
void set_instruction_start(Address address)
static V8_EXPORT_PRIVATE CodeEntry * idle_entry()
int GetSourceLine(int pc_offset) const
static V8_EXPORT_PRIVATE const char *const kRootEntryName
void set_deopt_info(const char *deopt_reason, int deopt_id, std::vector< CpuProfileDeoptFrame > inlined_frames)
static V8_EXPORT_PRIVATE CodeEntry * unresolved_entry()
const char * resource_name() const
static V8_EXPORT_PRIVATE const char *const kProgramEntryName
Address instruction_start() const
void set_native_context_address(Address address)
bool Accept(Address native_context_address) const
void OnMoveEvent(Address from_address, Address to_address)
Address native_context_address() const
void SerializeTimeDeltas()
OutputStreamWriter * writer_
void SerializeNode(const v8::CpuProfileNode *node)
void SerializeChildren(const v8::CpuProfileNode *node, int childrenCount)
void Serialize(v8::OutputStream *stream)
void SerializeCallFrame(const v8::CpuProfileNode *node)
void SerializePositionTicks(const v8::CpuProfileNode *node, int lineCount)
const ProfileTree * top_down() const
base::TimeTicks end_time_
std::unique_ptr< DiscardedSamplesDelegate > delegate_
const CpuProfilingOptions options_
base::TimeTicks start_time() const
base::TimeTicks start_time_
void AddPath(base::TimeTicks timestamp, const ProfileStackTrace &path, int src_line, bool update_stats, base::TimeDelta sampling_interval, StateTag state, EmbedderStateTag embedder_state, const std::optional< uint64_t > trace_id=std::nullopt)
ContextFilter context_filter_
std::deque< SampleInfo > samples_
V8_EXPORT_PRIVATE bool CheckSubsample(base::TimeDelta sampling_interval)
int samples_count() const
base::TimeTicks end_time() const
void StreamPendingTraceEvents()
const SampleInfo & sample(int index) const
size_t streaming_next_sample_
V8_EXPORT_PRIVATE CpuProfile(CpuProfiler *profiler, ProfilerId id, const char *title, CpuProfilingOptions options, std::unique_ptr< DiscardedSamplesDelegate > delegate=nullptr)
V8_EXPORT_PRIVATE void Print() const
base::TimeDelta next_sample_delta_
CpuProfiler *const profiler_
Isolate * isolate() const
base::TimeDelta sampling_interval() const
base::RecursiveMutex current_profiles_mutex_
static const int kMaxSimultaneousProfiles
CpuProfilingResult StartProfilingForTesting(ProfilerId id)
void RemoveProfile(CpuProfile *profile)
CpuProfile * StopProfiling(ProfilerId id)
std::vector< std::unique_ptr< CpuProfile > > finished_profiles_
void UpdateNativeContextAddressForCurrentProfiles(Address from, Address to)
CpuProfilesCollection(Isolate *isolate)
CpuProfile * Lookup(const char *title)
base::TimeDelta GetCommonSamplingInterval()
CpuProfilingResult StartProfiling(const char *title=nullptr, CpuProfilingOptions options={}, std::unique_ptr< DiscardedSamplesDelegate > delegate=nullptr)
bool IsLastProfileLeft(ProfilerId id)
static std::atomic< ProfilerId > last_id_
std::vector< std::unique_ptr< CpuProfile > > current_profiles_
void AddPathToCurrentProfiles(base::TimeTicks timestamp, const ProfileStackTrace &path, int src_line, bool update_stats, base::TimeDelta sampling_interval, StateTag state, EmbedderStateTag embedder_state_tag, Address native_context_address=kNullAddress, Address native_embedder_context_address=kNullAddress, const std::optional< uint64_t > trace_id=std::nullopt)
void AfterChildTraversed(ProfileNode *, ProfileNode *)
void AfterAllChildrenTraversed(ProfileNode *node)
void BeforeTraversingChild(ProfileNode *, ProfileNode *)
void MoveCode(Address from, Address to)
std::multimap< Address, CodeEntryMapInfo > code_map_
void AddCode(Address addr, CodeEntry *entry, unsigned size)
InstructionStreamMap(CodeEntryStorage &storage)
CodeEntry * FindEntry(Address addr, Address *out_instruction_start=nullptr)
CodeEntryStorage & code_entries_
bool RemoveCode(CodeEntry *)
size_t GetEstimatedMemoryUsage() const
void ClearCodesInRange(Address start, Address end)
ThreadId thread_id() const
void AddString(const char *s)
void AddCharacter(char c)
V8_INLINE ProfileNode * current_child()
V8_INLINE void next_child()
Position(ProfileNode *node)
V8_INLINE bool has_current_child()
std::vector< CpuProfileDeoptInfo > deopt_infos_
void Print(int indent) const
std::unordered_map< int, int > line_ticks_
const std::vector< ProfileNode * > * children() const
ProfileNode * FindOrAddChild(CodeEntry *entry, int line_number=0)
void IncrementLineTicks(int src_line)
ProfileNode(ProfileTree *tree, CodeEntry *entry, ProfileNode *parent, int line_number=0)
CpuProfileNode::SourceType source_type() const
bool GetLineTicks(v8::CpuProfileNode::LineTick *entries, unsigned int length) const
CodeEntry * entry() const
ProfileNode * FindChild(CodeEntry *entry, int line_number=v8::CpuProfileNode::kNoLineNumberInfo)
std::unordered_map< CodeEntryAndLineNumber, ProfileNode *, Hasher, Equals > children_
std::vector< ProfileNode * > children_list_
void CollectDeoptInfo(CodeEntry *entry)
CodeEntryStorage * code_entries()
ProfileNode * AddPathFromEnd(const std::vector< CodeEntry * > &path, int src_line=v8::CpuProfileNode::kNoLineNumberInfo, bool update_stats=true)
void TraverseDepthFirst(Callback *callback)
size_t pending_nodes_count() const
std::vector< const ProfileNode * > TakePendingNodes()
ProfileTree(Isolate *isolate, CodeEntryStorage *storage=nullptr)
static ProfilerStats * Instance()
int GetInliningId(int pc_offset) const
int GetSourceLineNumber(int pc_offset) const
void SetPosition(int pc_offset, int line, int inlining_id)
std::vector< SourcePositionTuple > pc_offsets_to_lines_
static const int kNotInlined
static ThreadId Current()
static V8_EXPORT_PRIVATE v8::Platform * GetCurrentPlatform()
static std::unique_ptr< TracedValue > Create()
Handle< SharedFunctionInfo > info
ZoneStack< RpoNumber > & stack
ZoneVector< Entry > entries
base::TimeTicks start_time_
uint32_t ComputeUnseededHash(uint32_t key)
const char * GetBailoutReason(BailoutReason reason)
static constexpr Address kNullAddress
std::vector< CodeEntryAndLineNumber > ProfileStackTrace
constexpr int kNoDeoptimizationId
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
const char * deopt_reason
std::vector< CpuProfileDeoptFrame > deopt_inlined_frames_
const char * deopt_reason_
std::unordered_set< CodeEntry *, Hasher, Equals > inline_entries_
#define TRACE_DISABLED_BY_DEFAULT(name)
#define TRACE_EVENT_SAMPLE_WITH_ID1(category_group, name, id, arg1_name, arg1_val)