v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
sampling-heap-profiler.cc
Go to the documentation of this file.
1// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <stdint.h>
8
9#include <memory>
10
11#include "src/api/api-inl.h"
12#include "src/base/ieee754.h"
17#include "src/heap/heap.h"
19
20namespace v8 {
21namespace internal {
22
23// We sample with a Poisson process, with constant average sampling interval.
24// This follows the exponential probability distribution with parameter
25// λ = 1/rate where rate is the average number of bytes between samples.
26//
27// Let u be a uniformly distributed random number between 0 and 1, then
28// next_sample = (- ln u) / λ
30 if (v8_flags.sampling_heap_profiler_suppress_randomness)
31 return static_cast<intptr_t>(rate);
32 double u = random_->NextDouble();
33 double next = (-base::ieee754::log(u)) * rate;
34 return next < kTaggedSize
36 : (next > INT_MAX ? INT_MAX : static_cast<intptr_t>(next));
37}
38
39// Samples were collected according to a poisson process. Since we have not
40// recorded all allocations, we must approximate the shape of the underlying
41// space of allocations based on the samples we have collected. Given that
42// we sample at rate R, the probability that an allocation of size S will be
43// sampled is 1-exp(-S/R). This function uses the above probability to
44// approximate the true number of allocations with size *size* given that
45// *count* samples were observed.
47 size_t size, unsigned int count) const {
48 double scale = 1.0 / (1.0 - std::exp(-static_cast<double>(size) / rate_));
49 // Round count instead of truncating.
50 return {size, static_cast<unsigned int>(count * scale + 0.5)};
51}
52
54 Heap* heap, StringsStorage* names, uint64_t rate, int stack_depth,
56 : isolate_(Isolate::FromHeap(heap)),
57 heap_(heap),
58 allocation_observer_(heap_, static_cast<intptr_t>(rate), rate, this,
59 isolate_->random_number_generator()),
60 names_(names),
61 profile_root_(nullptr, "(root)", v8::UnboundScript::kNoScriptId, 0,
62 next_node_id()),
63 stack_depth_(stack_depth),
64 rate_(rate),
65 flags_(flags) {
66 CHECK_GT(rate_, 0u);
69}
70
75
76void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) {
78
79 // Check if the area is iterable by confirming that it starts with a map.
80 DCHECK(IsMap(HeapObject::FromAddress(soon_object)->map(isolate_), isolate_));
81
82 HandleScope scope(isolate_);
83 Tagged<HeapObject> heap_object = HeapObject::FromAddress(soon_object);
84 Handle<Object> obj(heap_object, isolate_);
85
86 // Since soon_object can be in code space or trusted space we can't use
87 // v8::Utils::ToLocal.
88 DCHECK(
89 obj.is_null() ||
90 (IsSmi(*obj) ||
92 HeapLayout::InTrustedSpace(heap_object) || !IsTheHole(*obj)));
93 auto loc = Local<v8::Value>::FromSlot(obj.location());
94
95 AllocationNode* node = AddStack();
96 node->allocations_[size]++;
97 auto sample =
98 std::make_unique<Sample>(size, node, loc, this, next_sample_id());
99 sample->global.SetWeak(sample.get(), OnWeakCallback,
101 samples_.emplace(sample.get(), std::move(sample));
102}
103
105 const WeakCallbackInfo<Sample>& data) {
106 Sample* sample = data.GetParameter();
107 Heap* heap = reinterpret_cast<Isolate*>(data.GetIsolate())->heap();
108 bool is_minor_gc = Heap::IsYoungGenerationCollector(
109 heap->current_or_last_garbage_collector());
110 bool should_keep_sample =
111 is_minor_gc
112 ? (sample->profiler->flags_ &
114 : (sample->profiler->flags_ &
116 if (should_keep_sample) {
117 sample->global.Reset();
118 return;
119 }
120 AllocationNode* node = sample->owner;
121 DCHECK_GT(node->allocations_[sample->size], 0);
122 node->allocations_[sample->size]--;
123 if (node->allocations_[sample->size] == 0) {
124 node->allocations_.erase(sample->size);
125 while (node->allocations_.empty() && node->children_.empty() &&
126 node->parent_ && !node->parent_->pinned_) {
127 AllocationNode* parent = node->parent_;
129 node->script_id_, node->script_position_, node->name_);
130 parent->children_.erase(id);
131 node = parent;
132 }
133 }
134 sample->profiler->samples_.erase(sample);
135 // sample is deleted because its unique ptr was erased from samples_.
136}
137
139 AllocationNode* parent, const char* name, int script_id,
140 int start_position) {
142 AllocationNode::function_id(script_id, start_position, name);
143 AllocationNode* child = parent->FindChildNode(id);
144 if (child) {
145 DCHECK_EQ(strcmp(child->name_, name), 0);
146 return child;
147 }
148 auto new_child = std::make_unique<AllocationNode>(
149 parent, name, script_id, start_position, next_node_id());
150 return parent->AddChildNode(id, std::move(new_child));
151}
152
155
156 std::vector<Tagged<SharedFunctionInfo>> stack;
158 int frames_captured = 0;
159 bool found_arguments_marker_frames = false;
160 while (!frame_it.done() && frames_captured < stack_depth_) {
161 JavaScriptFrame* frame = frame_it.frame();
162 // If we are materializing objects during deoptimization, inlined
163 // closures may not yet be materialized, and this includes the
164 // closure on the stack. Skip over any such frames (they'll be
165 // in the top frames of the stack). The allocations made in this
166 // sensitive moment belong to the formerly optimized frame anyway.
167 if (IsJSFunction(frame->unchecked_function())) {
168 Tagged<SharedFunctionInfo> shared = frame->function()->shared();
169 stack.push_back(shared);
170 frames_captured++;
171 } else {
172 found_arguments_marker_frames = true;
173 }
174 frame_it.Advance();
175 }
176
177 if (frames_captured == 0) {
178 const char* name = nullptr;
179 switch (isolate_->current_vm_state()) {
180 case GC:
181 name = "(GC)";
182 break;
183 case PARSER:
184 name = "(PARSER)";
185 break;
186 case COMPILER:
187 name = "(COMPILER)";
188 break;
190 name = "(BYTECODE_COMPILER)";
191 break;
192 case OTHER:
193 name = "(V8 API)";
194 break;
195 case EXTERNAL:
196 name = "(EXTERNAL)";
197 break;
198 case LOGGING:
199 name = "(LOGGING)";
200 break;
201 case IDLE:
202 name = "(IDLE)";
203 break;
204 // Treat atomics wait as a normal JS event; we don't care about the
205 // difference for allocations.
206 case ATOMICS_WAIT:
207 case JS:
208 name = "(JS)";
209 break;
210 }
212 }
213
214 // We need to process the stack in reverse order as the top of the stack is
215 // the first element in the list.
216 for (auto it = stack.rbegin(); it != stack.rend(); ++it) {
217 Tagged<SharedFunctionInfo> shared = *it;
218 const char* name = this->names()->GetCopy(shared->DebugNameCStr().get());
219 int script_id = v8::UnboundScript::kNoScriptId;
220 if (IsScript(shared->script())) {
221 Tagged<Script> script = Cast<Script>(shared->script());
222 script_id = script->id();
223 }
224 node = FindOrAddChildNode(node, name, script_id, shared->StartPosition());
225 }
226
227 if (found_arguments_marker_frames) {
228 node =
230 }
231
232 return node;
233}
234
237 const std::map<int, Handle<Script>>& scripts) {
238 // By pinning the node we make sure its children won't get disposed if
239 // a GC kicks in during the tree retrieval.
240 node->pinned_ = true;
241 Local<v8::String> script_name =
245 std::vector<v8::AllocationProfile::Allocation> allocations;
246 allocations.reserve(node->allocations_.size());
247 if (node->script_id_ != v8::UnboundScript::kNoScriptId) {
248 auto script_iterator = scripts.find(node->script_id_);
249 if (script_iterator != scripts.end()) {
250 DirectHandle<Script> script = script_iterator->second;
251 if (IsName(script->name())) {
252 Tagged<Name> name = Cast<Name>(script->name());
253 script_name = ToApiHandle<v8::String>(
255 }
256 Script::PositionInfo pos_info;
257 Script::GetPositionInfo(script, node->script_position_, &pos_info);
258 line = pos_info.line + 1;
259 column = pos_info.column + 1;
260 }
261 }
262 for (auto alloc : node->allocations_) {
263 allocations.push_back(ScaleSample(alloc.first, alloc.second));
264 }
265
266 profile->nodes_.push_back(v8::AllocationProfile::Node{
268 isolate_->factory()->InternalizeUtf8String(node->name_)),
269 script_name, node->script_id_, node->script_position_, line, column,
270 node->id_, std::vector<v8::AllocationProfile::Node*>(), allocations});
271 v8::AllocationProfile::Node* current = &profile->nodes_.back();
272 // The |children_| map may have nodes inserted into it during translation
273 // because the translation may allocate strings on the JS heap that have
274 // the potential to be sampled. That's ok since map iterators are not
275 // invalidated upon std::map insertion.
276 for (const auto& it : node->children_) {
277 current->children.push_back(
278 TranslateAllocationNode(profile, it.second.get(), scripts));
279 }
280 node->pinned_ = false;
281 return current;
282}
283
288 }
289 // To resolve positions to line/column numbers, we will need to look up
290 // scripts. Build a map to allow fast mapping from script id to script.
291 std::map<int, Handle<Script>> scripts;
292 {
293 Script::Iterator iterator(isolate_);
294 for (Tagged<Script> script = iterator.Next(); !script.is_null();
295 script = iterator.Next()) {
296 scripts[script->id()] = handle(script, isolate_);
297 }
298 }
299 auto profile = new v8::internal::AllocationProfile();
300 TranslateAllocationNode(profile, &profile_root_, scripts);
301 profile->samples_ = BuildSamples();
302
303 return profile;
304}
305
306const std::vector<v8::AllocationProfile::Sample>
308 std::vector<v8::AllocationProfile::Sample> samples;
309 samples.reserve(samples_.size());
310 for (const auto& it : samples_) {
311 const Sample* sample = it.second.get();
312 samples.emplace_back(v8::AllocationProfile::Sample{
313 sample->owner->id_, sample->size, ScaleSample(sample->size, 1).count,
314 sample->sample_id});
315 }
316 return samples;
317}
318
319} // namespace internal
320} // namespace v8
interpreter::OperandScale scale
Definition builtins.cc:44
static const int kNoLineNumberInfo
static const int kNoColumnNumberInfo
@ kSamplingIncludeObjectsCollectedByMajorGC
@ kSamplingIncludeObjectsCollectedByMinorGC
static V8_INLINE Local< T > FromSlot(internal::Address *slot)
static const int kNoScriptId
Definition v8-script.h:91
double NextDouble() V8_WARN_UNUSED_RESULT
Handle< String > InternalizeUtf8String(base::Vector< const char > str)
Definition factory.cc:608
V8_INLINE bool is_null() const
Definition handles.h:69
V8_INLINE Address * location() const
Definition handles.h:80
static V8_INLINE bool InTrustedSpace(Tagged< HeapObject > object)
static V8_INLINE bool InCodeSpace(Tagged< HeapObject > object)
static Tagged< HeapObject > FromAddress(Address address)
V8_EXPORT_PRIVATE void CollectAllGarbage(GCFlags gc_flags, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition heap.cc:1258
void AddAllocationObserversToAllSpaces(AllocationObserver *observer, AllocationObserver *new_space_observer)
Definition heap.cc:1018
void RemoveAllocationObserversFromAllSpaces(AllocationObserver *observer, AllocationObserver *new_space_observer)
Definition heap.cc:1025
v8::internal::Factory * factory()
Definition isolate.h:1527
Tagged< JSFunction > function() const override
Definition frames.cc:2492
Tagged< Object > unchecked_function() const
Definition frames.cc:2496
JavaScriptFrame * frame() const
Definition frames.h:1760
V8_EXPORT_PRIVATE void Advance()
Definition frames.cc:341
std::map< FunctionId, std::unique_ptr< AllocationNode > > children_
static FunctionId function_id(int script_id, int start_position, const char *name)
AllocationNode * AddChildNode(FunctionId id, std::unique_ptr< AllocationNode > node)
v8::HeapProfiler::SamplingFlags flags_
const std::vector< v8::AllocationProfile::Sample > BuildSamples() const
v8::AllocationProfile::Allocation ScaleSample(size_t size, unsigned int count) const
void SampleObject(Address soon_object, size_t size)
v8::AllocationProfile * GetAllocationProfile()
static void OnWeakCallback(const WeakCallbackInfo< Sample > &data)
SamplingHeapProfiler(Heap *heap, StringsStorage *names, uint64_t rate, int stack_depth, v8::HeapProfiler::SamplingFlags flags)
std::unordered_map< Sample *, std::unique_ptr< Sample > > samples_
AllocationNode * FindOrAddChildNode(AllocationNode *parent, const char *name, int script_id, int start_position)
v8::AllocationProfile::Node * TranslateAllocationNode(AllocationProfile *profile, SamplingHeapProfiler::AllocationNode *node, const std::map< int, Handle< Script > > &scripts)
Tagged< Script > Next()
Definition objects.cc:4795
static bool GetPositionInfo(DirectHandle< Script > script, int position, PositionInfo *info, OffsetFlag offset_flag=OffsetFlag::kWithOffset)
Definition objects.cc:4367
const char * GetCopy(const char *src)
const char * GetName(Tagged< Name > name)
#define V8_EXTERNAL_CODE_SPACE_BOOL
Definition globals.h:255
LineAndColumn current
std::map< const std::string, const std::string > map
Node * node
ZoneStack< RpoNumber > & stack
double log(double x)
Definition ieee754.cc:1638
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
constexpr int kTaggedSize
Definition globals.h:542
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:665
V8_EXPORT_PRIVATE FlagValues v8_flags
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
v8::Local< T > ToApiHandle(v8::internal::DirectHandle< v8::internal::Object > obj)
Definition api.h:297
@ COMPILER
Definition v8-unwinder.h:41
@ BYTECODE_COMPILER
Definition v8-unwinder.h:40
@ EXTERNAL
Definition v8-unwinder.h:43
@ GC
Definition v8-unwinder.h:38
@ ATOMICS_WAIT
Definition v8-unwinder.h:44
@ PARSER
Definition v8-unwinder.h:39
@ LOGGING
Definition v8-unwinder.h:46
@ IDLE
Definition v8-unwinder.h:45
@ JS
Definition v8-unwinder.h:37
@ OTHER
Definition v8-unwinder.h:42
#define CHECK_GT(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487