v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
heap-snapshot-generator.cc
Go to the documentation of this file.
1// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <optional>
8#include <utility>
9
10#include "src/api/api-inl.h"
11#include "src/base/vector.h"
14#include "src/common/globals.h"
15#include "src/debug/debug.h"
19#include "src/heap/heap.h"
20#include "src/heap/safepoint.h"
47
48#if V8_ENABLE_WEBASSEMBLY
54#endif // V8_ENABLE_WEBASSEMBLY
55
56namespace v8::internal {
57
58#ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
59class HeapEntryVerifier {
60 public:
61 HeapEntryVerifier(HeapSnapshotGenerator* generator, Tagged<HeapObject> obj)
62 : generator_(generator),
63 primary_object_(obj),
64 reference_summary_(
65 ReferenceSummary::SummarizeReferencesFrom(generator->heap(), obj)) {
66 generator->set_verifier(this);
67 }
68 ~HeapEntryVerifier() {
69 CheckAllReferencesWereChecked();
70 generator_->set_verifier(nullptr);
71 }
72
73 // Checks that `host` retains `target`, according to the marking visitor. This
74 // allows us to verify, when adding edges to the snapshot, that they
75 // correspond to real retaining relationships.
76 void CheckStrongReference(Tagged<HeapObject> host,
77 Tagged<HeapObject> target) {
78 // All references should be from the current primary object.
80
81 checked_objects_.insert(target);
82
83 // Check whether there is a direct strong reference from host to target.
84 if (reference_summary_.strong_references().find(target) !=
85 reference_summary_.strong_references().end()) {
86 return;
87 }
88
89 // There is no direct reference from host to target, but sometimes heap
90 // snapshots include references that skip one, two, or three objects, such
91 // as __proto__ on a JSObject referring to its Map's prototype, or a
92 // property getter that bypasses the property array and accessor info. At
93 // this point, we must check for those indirect references.
94 for (size_t level = 0; level < 3; ++level) {
95 const UnorderedHeapObjectSet& indirect =
96 GetIndirectStrongReferences(level);
97 if (indirect.find(target) != indirect.end()) {
98 return;
99 }
100 }
101
102 FATAL("Could not find any matching reference");
103 }
104
105 // Checks that `host` has a weak reference to `target`, according to the
106 // marking visitor.
107 void CheckWeakReference(Tagged<HeapObject> host, Tagged<HeapObject> target) {
108 // All references should be from the current primary object.
110
111 checked_objects_.insert(target);
112 CHECK_NE(reference_summary_.weak_references().find(target),
113 reference_summary_.weak_references().end());
114 }
115
116 // Marks the relationship between `host` and `target` as checked, even if the
117 // marking visitor found no such relationship. This is necessary for
118 // ephemerons, where a pair of objects is required to retain the target.
119 // Use this function with care, since it bypasses verification.
120 void MarkReferenceCheckedWithoutChecking(Tagged<HeapObject> host,
121 Tagged<HeapObject> target) {
122 if (host == primary_object_) {
123 checked_objects_.insert(target);
124 }
125 }
126
127 // Verifies that all of the references found by the marking visitor were
128 // checked via a call to CheckStrongReference or CheckWeakReference, or
129 // deliberately skipped via a call to MarkReferenceCheckedWithoutChecking.
130 // This ensures that there aren't retaining relationships found by the marking
131 // visitor which were omitted from the heap snapshot.
132 void CheckAllReferencesWereChecked() {
133 // Both loops below skip pointers to read-only objects, because the heap
134 // snapshot deliberately omits many of those (see IsEssentialObject).
135 // Read-only objects can't ever retain normal read-write objects, so these
136 // are fine to skip.
137 for (Tagged<HeapObject> obj : reference_summary_.strong_references()) {
138 if (!MemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
139 CHECK_NE(checked_objects_.find(obj), checked_objects_.end());
140 }
141 }
142 for (Tagged<HeapObject> obj : reference_summary_.weak_references()) {
143 if (!MemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
144 CHECK_NE(checked_objects_.find(obj), checked_objects_.end());
145 }
146 }
147 }
148
149 private:
150 using UnorderedHeapObjectSet =
151 std::unordered_set<Tagged<HeapObject>, Object::Hasher,
152 Object::KeyEqualSafe>;
153
154 const UnorderedHeapObjectSet& GetIndirectStrongReferences(size_t level) {
155 CHECK_GE(indirect_strong_references_.size(), level);
156
157 if (indirect_strong_references_.size() == level) {
158 // Expansion is needed.
159 indirect_strong_references_.resize(level + 1);
160 const UnorderedHeapObjectSet& previous =
161 level == 0 ? reference_summary_.strong_references()
162 : indirect_strong_references_[level - 1];
163 for (Tagged<HeapObject> obj : previous) {
164 if (MemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) {
165 // Marking visitors don't expect to visit objects in read-only space,
166 // and will fail DCHECKs if they are used on those objects. Read-only
167 // objects can never retain anything outside read-only space, so
168 // skipping those objects doesn't weaken verification.
169 continue;
170 }
171
172 // Indirect references should only bypass internal structures, not
173 // user-visible objects or contexts.
174 if (IsJSReceiver(obj) || IsString(obj) || IsContext(obj)) {
175 continue;
176 }
177
178 ReferenceSummary summary =
179 ReferenceSummary::SummarizeReferencesFrom(generator_->heap(), obj);
180 indirect_strong_references_[level].insert(
181 summary.strong_references().begin(),
182 summary.strong_references().end());
183 }
184 }
185
186 return indirect_strong_references_[level];
187 }
188
190 HeapSnapshotGenerator* generator_;
191 Tagged<HeapObject> primary_object_;
192
193 // All objects referred to by primary_object_, according to a marking visitor.
194 ReferenceSummary reference_summary_;
195
196 // Objects that have been checked via a call to CheckStrongReference or
197 // CheckWeakReference, or deliberately skipped via a call to
198 // MarkReferenceCheckedWithoutChecking.
199 std::unordered_set<Tagged<HeapObject>, Object::Hasher, Object::KeyEqualSafe>
200 checked_objects_;
201
202 // Objects transitively retained by the primary object. The objects in the set
203 // at index i are retained by the primary object via a chain of i+1
204 // intermediate objects.
205 std::vector<UnorderedHeapObjectSet> indirect_strong_references_;
206};
207#endif
208
209HeapGraphEdge::HeapGraphEdge(Type type, const char* name, HeapEntry* from,
210 HeapEntry* to)
211 : bit_field_(TypeField::encode(type) |
212 FromIndexField::encode(from->index())),
213 to_entry_(to),
214 name_(name) {
215 DCHECK(type == kContextVariable || type == kProperty || type == kInternal ||
216 type == kShortcut || type == kWeak);
217}
218
220 HeapEntry* to)
221 : bit_field_(TypeField::encode(type) |
222 FromIndexField::encode(from->index())),
223 to_entry_(to),
224 index_(index) {
225 DCHECK(type == kElement || type == kHidden);
226}
227
228HeapEntry::HeapEntry(HeapSnapshot* snapshot, int index, Type type,
229 const char* name, SnapshotObjectId id, size_t self_size,
230 unsigned trace_node_id)
231 : type_(static_cast<unsigned>(type)),
232 index_(index),
233 children_count_(0),
234 self_size_(self_size),
235 snapshot_(snapshot),
236 name_(name),
237 id_(id),
238 trace_node_id_(trace_node_id) {
239 DCHECK_GE(index, 0);
240}
241
243 HeapSnapshotGenerator* generator,
244 ReferenceVerification verification) {
245#ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
246 if (verification == kOffHeapPointer || generator->verifier() == nullptr) {
247 // Off-heap pointers are outside the scope of this verification; we just
248 // trust the embedder to provide accurate data. If the verifier is null,
249 // then verification is disabled.
250 return;
251 }
252 if (verification == kCustomWeakPointer) {
253 // The caller declared that this is a weak pointer ignored by the marking
254 // visitor. All we can verify at this point is that the edge type declares
255 // it to be weak.
257 return;
258 }
259 Address from_address =
260 reinterpret_cast<Address>(generator->FindHeapThingForHeapEntry(this));
261 Address to_address =
262 reinterpret_cast<Address>(generator->FindHeapThingForHeapEntry(entry));
263 if (from_address == kNullAddress || to_address == kNullAddress) {
264 // One of these entries doesn't correspond to a real heap object.
265 // Verification is not possible.
266 return;
267 }
268 Tagged<HeapObject> from_obj = Cast<HeapObject>(Tagged<Object>(from_address));
270 if (MemoryChunk::FromHeapObject(to_obj)->InReadOnlySpace()) {
271 // We can't verify pointers into read-only space, because marking visitors
272 // might not mark those. For example, every Map has a pointer to the
273 // MetaMap, but marking visitors don't bother with following that link.
274 // Read-only objects are immortal and can never point to things outside of
275 // read-only space, so ignoring these objects is safe from the perspective
276 // of ensuring accurate retaining paths for normal read-write objects.
277 // Therefore, do nothing.
278 } else if (verification == kEphemeron) {
279 // Ephemerons can't be verified because they aren't marked directly by the
280 // marking visitor.
281 generator->verifier()->MarkReferenceCheckedWithoutChecking(from_obj,
282 to_obj);
283 } else if (type == HeapGraphEdge::kWeak) {
284 generator->verifier()->CheckWeakReference(from_obj, to_obj);
285 } else {
286 generator->verifier()->CheckStrongReference(from_obj, to_obj);
287 }
288#endif
289}
290
292 HeapEntry* entry,
293 HeapSnapshotGenerator* generator,
294 ReferenceVerification verification) {
296 snapshot_->edges().emplace_back(type, name, this, entry);
297 VerifyReference(type, entry, generator, verification);
298}
299
301 HeapEntry* entry,
302 HeapSnapshotGenerator* generator,
303 ReferenceVerification verification) {
305 snapshot_->edges().emplace_back(type, index, this, entry);
306 VerifyReference(type, entry, generator, verification);
307}
308
310 const char* description,
311 HeapEntry* child,
312 StringsStorage* names,
313 HeapSnapshotGenerator* generator,
314 ReferenceVerification verification) {
315 int index = children_count_ + 1;
316 const char* name = description
317 ? names->GetFormatted("%d / %s", index, description)
318 : names->GetName(index);
319 SetNamedReference(type, name, child, generator, verification);
320}
321
322void HeapEntry::Print(const char* prefix, const char* edge_name, int max_depth,
323 int indent) const {
324 static_assert(sizeof(unsigned) == sizeof(id()));
325 base::OS::Print("%6zu @%6u %*c %s%s: ", self_size(), id(), indent, ' ',
326 prefix, edge_name);
327 if (type() != kString) {
328 base::OS::Print("%s %.40s\n", TypeAsString(), name_);
329 } else {
330 base::OS::Print("\"");
331 const char* c = name_;
332 while (*c && (c - name_) <= 40) {
333 if (*c != '\n')
334 base::OS::Print("%c", *c);
335 else
336 base::OS::Print("\\n");
337 ++c;
338 }
339 base::OS::Print("\"\n");
340 }
341 if (--max_depth == 0) return;
342 for (auto i = children_begin(); i != children_end(); ++i) {
343 HeapGraphEdge& edge = **i;
344 const char* edge_prefix = "";
346 edge_name = index.begin();
347 switch (edge.type()) {
349 edge_prefix = "#";
350 edge_name = edge.name();
351 break;
353 SNPrintF(index, "%d", edge.index());
354 break;
356 edge_prefix = "$";
357 edge_name = edge.name();
358 break;
360 edge_name = edge.name();
361 break;
363 edge_prefix = "$";
364 SNPrintF(index, "%d", edge.index());
365 break;
367 edge_prefix = "^";
368 edge_name = edge.name();
369 break;
371 edge_prefix = "w";
372 edge_name = edge.name();
373 break;
374 default:
375 SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
376 }
377 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
378 }
379}
380
381const char* HeapEntry::TypeAsString() const {
382 switch (type()) {
383 case kHidden:
384 return "/hidden/";
385 case kObject:
386 return "/object/";
387 case kClosure:
388 return "/closure/";
389 case kString:
390 return "/string/";
391 case kCode:
392 return "/code/";
393 case kArray:
394 return "/array/";
395 case kRegExp:
396 return "/regexp/";
397 case kHeapNumber:
398 return "/number/";
399 case kNative:
400 return "/native/";
401 case kSynthetic:
402 return "/synthetic/";
403 case kConsString:
404 return "/concatenated string/";
405 case kSlicedString:
406 return "/sliced string/";
407 case kSymbol:
408 return "/symbol/";
409 case kBigInt:
410 return "/bigint/";
411 case kObjectShape:
412 return "/object shape/";
413 default:
414 return "???";
415 }
416}
417
420 v8::HeapProfiler::NumericsMode numerics_mode)
421 : profiler_(profiler),
422 snapshot_mode_(snapshot_mode),
423 numerics_mode_(numerics_mode) {
424 // It is very important to keep objects that form a heap snapshot
425 // as small as possible. Check assumptions about data structure sizes.
426 static_assert(kSystemPointerSize != 4 || sizeof(HeapGraphEdge) == 12);
427 static_assert(kSystemPointerSize != 8 || sizeof(HeapGraphEdge) == 24);
428 static_assert(kSystemPointerSize != 4 || sizeof(HeapEntry) == 32);
429#if V8_CC_MSVC
430 static_assert(kSystemPointerSize != 8 || sizeof(HeapEntry) == 48);
431#else // !V8_CC_MSVC
432 static_assert(kSystemPointerSize != 8 || sizeof(HeapEntry) == 40);
433#endif // !V8_CC_MSVC
434 memset(&gc_subroot_entries_, 0, sizeof(gc_subroot_entries_));
435}
436
438
442
453
456 DCHECK(entries_.empty()); // Root entry must be the first one.
459 DCHECK_EQ(1u, entries_.size());
460 DCHECK_EQ(root_entry_, &entries_.front());
461}
462
468
474
475void HeapSnapshot::AddLocation(HeapEntry* entry, int scriptId, int line,
476 int col) {
477 locations_.emplace_back(entry->index(), scriptId, line, col);
478}
479
481 SnapshotObjectId id, size_t size,
482 unsigned trace_node_id) {
484 entries_.emplace_back(this, static_cast<int>(entries_.size()), type, name, id,
485 size, trace_node_id);
486 return &entries_.back();
487}
488
490 String::LineEndsVector&& line_ends) {
491 scripts_line_ends_map_.emplace(script_id, std::move(line_ends));
492}
493
495 DCHECK(scripts_line_ends_map_.find(script_id) !=
497 return scripts_line_ends_map_[script_id];
498}
499
501 DCHECK(children().empty());
502 int children_index = 0;
503 for (HeapEntry& entry : entries()) {
504 children_index = entry.set_children_index(children_index);
505 }
506 DCHECK_EQ(edges().size(), static_cast<size_t>(children_index));
507 children().resize(edges().size());
508 for (HeapGraphEdge& edge : edges()) {
509 edge.from()->add_child(&edge);
510 }
511}
512
514 if (entries_by_id_cache_.empty()) {
516 entries_by_id_cache_.reserve(entries_.size());
517 for (HeapEntry& entry : entries_) {
518 entries_by_id_cache_.emplace(entry.id(), &entry);
519 }
520 }
521 auto it = entries_by_id_cache_.find(id);
522 return it != entries_by_id_cache_.end() ? it->second : nullptr;
523}
524
525void HeapSnapshot::Print(int max_depth) { root()->Print("", "", max_depth, 0); }
526
527// We split IDs on evens for embedder objects (see
528// HeapObjectsMap::GenerateId) and odds for native objects.
538
539namespace {
540
541const v8::String::ExternalStringResourceBase* GetExternalStringResource(
542 Tagged<ExternalString> object, PtrComprCageBase cage_base) {
543 if (IsExternalOneByteString(object, cage_base)) {
544 return Cast<ExternalOneByteString>(object)->resource();
545 }
546 return Cast<ExternalTwoByteString>(object)->resource();
547}
548
549int ExternalStringSizeForSnapshot(Tagged<ExternalString> object,
550 PtrComprCageBase cage_base) {
552 GetExternalStringResource(object, cage_base);
553 size_t external_string_size = resource ? resource->EstimateMemoryUsage() : 0;
554 if (external_string_size ==
556 return object->ExternalPayloadSize();
557 }
558 DCHECK_LE(external_string_size, std::numeric_limits<int>::max());
559 return base::saturated_cast<int>(external_string_size);
560}
561
562int SizeForSnapshot(Tagged<HeapObject> object, PtrComprCageBase cage_base) {
563 // Since read-only space can be shared among Isolates, and JS developers have
564 // no control over the size of read-only space, we represent read-only objects
565 // as having zero size.
566 if (HeapLayout::InReadOnlySpace(object)) return 0;
567 int size = object->Size(cage_base);
568 if (IsExternalString(object, cage_base)) {
569 size +=
570 ExternalStringSizeForSnapshot(Cast<ExternalString>(object), cage_base);
571 }
572 return size;
573}
574
575} // namespace
576
578 : next_id_(kFirstAvailableObjectId),
579 next_native_id_(kFirstAvailableNativeId),
580 heap_(heap) {
581 // The dummy element at zero index is needed as entries_map_ cannot hold
582 // an entry with zero value. Otherwise it's impossible to tell if
583 // LookupOrInsert has added a new item or just returning exisiting one
584 // having the value of zero.
585 entries_.emplace_back(0, kNullAddress, 0, true);
586}
587
588bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
590 DCHECK_NE(kNullAddress, from);
591 if (from == to) return false;
592 void* from_value = entries_map_.Remove(reinterpret_cast<void*>(from),
593 ComputeAddressHash(from));
594 if (from_value == nullptr) {
595 // It may occur that some untracked object moves to an address X and there
596 // is a tracked object at that address. In this case we should remove the
597 // entry as we know that the object has died.
598 void* to_value = entries_map_.Remove(reinterpret_cast<void*>(to),
600 if (to_value != nullptr) {
601 int to_entry_info_index =
602 static_cast<int>(reinterpret_cast<intptr_t>(to_value));
603 entries_.at(to_entry_info_index).addr = kNullAddress;
604 }
605 } else {
607 reinterpret_cast<void*>(to), ComputeAddressHash(to));
608 if (to_entry->value != nullptr) {
609 // We found the existing entry with to address for an old object.
610 // Without this operation we will have two EntryInfo's with the same
611 // value in addr field. It is bad because later at RemoveDeadEntries
612 // one of this entry will be removed with the corresponding entries_map_
613 // entry.
614 int to_entry_info_index =
615 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
616 entries_.at(to_entry_info_index).addr = kNullAddress;
617 }
618 int from_entry_info_index =
619 static_cast<int>(reinterpret_cast<intptr_t>(from_value));
620 entries_.at(from_entry_info_index).addr = to;
621 // Size of an object can change during its life, so to keep information
622 // about the object in entries_ consistent, we have to adjust size when the
623 // object is migrated.
624 if (v8_flags.heap_profiler_trace_objects) {
625 PrintF("Move object from %p to %p old size %6d new size %6d\n",
626 reinterpret_cast<void*>(from), reinterpret_cast<void*>(to),
627 entries_.at(from_entry_info_index).size, object_size);
628 }
629 entries_.at(from_entry_info_index).size = object_size;
630 to_entry->value = from_value;
631 }
632 return from_value != nullptr;
633}
634
638
641 reinterpret_cast<void*>(addr), ComputeAddressHash(addr));
642 if (entry == nullptr) return v8::HeapProfiler::kUnknownObjectId;
643 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
644 EntryInfo& entry_info = entries_.at(entry_index);
645 DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy());
646 return entry_info.id;
647}
648
650 Address addr, unsigned int size, MarkEntryAccessed accessed,
651 IsNativeObject is_native_object) {
652 bool accessed_bool = accessed == MarkEntryAccessed::kYes;
653 bool is_native_object_bool = is_native_object == IsNativeObject::kYes;
654 DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy());
656 reinterpret_cast<void*>(addr), ComputeAddressHash(addr));
657 if (entry->value != nullptr) {
658 int entry_index =
659 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
660 EntryInfo& entry_info = entries_.at(entry_index);
661 entry_info.accessed = accessed_bool;
662 if (v8_flags.heap_profiler_trace_objects) {
663 PrintF("Update object size : %p with old size %d and new size %d\n",
664 reinterpret_cast<void*>(addr), entry_info.size, size);
665 }
666 entry_info.size = size;
667 DCHECK_EQ(is_native_object_bool, entry_info.id % 2 == 0);
668 return entry_info.id;
669 }
670 entry->value = reinterpret_cast<void*>(entries_.size());
672 is_native_object_bool ? get_next_native_id() : get_next_id();
673 entries_.push_back(EntryInfo(id, addr, size, accessed_bool));
674 DCHECK(static_cast<uint32_t>(entries_.size()) > entries_map_.occupancy());
675 return id;
676}
677
684
686 Address canonical_addr) {
687 base::HashMap::Entry* entry =
688 entries_map_.Lookup(reinterpret_cast<void*>(canonical_addr),
689 ComputeAddressHash(canonical_addr));
691 {addr, reinterpret_cast<size_t>(entry->value)});
692 if (!result.second) {
693 result.first->second = reinterpret_cast<size_t>(entry->value);
694 }
695}
696
698
700 if (v8_flags.heap_profiler_trace_objects) {
701 PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
703 }
706 PtrComprCageBase cage_base(heap_->isolate());
708 for (Tagged<HeapObject> obj = iterator.Next(); !obj.is_null();
709 obj = iterator.Next()) {
710 FindOrAddEntry(obj.address(), SizeForSnapshot(obj, cage_base));
711 if (v8_flags.heap_profiler_trace_objects) {
712 int object_size = obj->Size(cage_base);
713 PrintF("Update object : %p %6d. Next address is %p\n",
714 reinterpret_cast<void*>(obj.address()), object_size,
715 reinterpret_cast<void*>(obj.address() + object_size));
716 }
717 }
719 if (v8_flags.heap_profiler_trace_objects) {
720 PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
722 }
723}
724
726 int64_t* timestamp_us) {
728 time_intervals_.emplace_back(next_id_);
729 int prefered_chunk_size = stream->GetChunkSize();
730 std::vector<v8::HeapStatsUpdate> stats_buffer;
731 DCHECK(!entries_.empty());
732 EntryInfo* entry_info = &entries_.front();
733 EntryInfo* end_entry_info = &entries_.back() + 1;
734 for (size_t time_interval_index = 0;
735 time_interval_index < time_intervals_.size(); ++time_interval_index) {
736 TimeInterval& time_interval = time_intervals_[time_interval_index];
737 SnapshotObjectId time_interval_id = time_interval.id;
738 uint32_t entries_size = 0;
739 EntryInfo* start_entry_info = entry_info;
740 while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
741 entries_size += entry_info->size;
742 ++entry_info;
743 }
744 uint32_t entries_count =
745 static_cast<uint32_t>(entry_info - start_entry_info);
746 if (time_interval.count != entries_count ||
747 time_interval.size != entries_size) {
748 stats_buffer.emplace_back(static_cast<uint32_t>(time_interval_index),
749 time_interval.count = entries_count,
750 time_interval.size = entries_size);
751 if (static_cast<int>(stats_buffer.size()) >= prefered_chunk_size) {
753 &stats_buffer.front(), static_cast<int>(stats_buffer.size()));
755 stats_buffer.clear();
756 }
757 }
758 }
759 DCHECK(entry_info == end_entry_info);
760 if (!stats_buffer.empty()) {
762 &stats_buffer.front(), static_cast<int>(stats_buffer.size()));
764 }
765 stream->EndOfStream();
766 if (timestamp_us) {
767 *timestamp_us =
768 (time_intervals_.back().timestamp - time_intervals_.front().timestamp)
769 .InMicroseconds();
770 }
771 return last_assigned_id();
772}
773
775 DCHECK(entries_.size() > 0 && entries_.at(0).id == 0 &&
776 entries_.at(0).addr == kNullAddress);
777
778 // Build up temporary reverse map.
779 std::unordered_map<size_t, NativeObject> reverse_merged_native_entries_map;
780 for (const auto& it : merged_native_entries_map_) {
781 auto result =
782 reverse_merged_native_entries_map.emplace(it.second, it.first);
783 DCHECK(result.second);
784 USE(result);
785 }
786
787 size_t first_free_entry = 1;
788 for (size_t i = 1; i < entries_.size(); ++i) {
789 EntryInfo& entry_info = entries_.at(i);
790 auto merged_reverse_it = reverse_merged_native_entries_map.find(i);
791 if (entry_info.accessed) {
792 if (first_free_entry != i) {
793 entries_.at(first_free_entry) = entry_info;
794 }
795 entries_.at(first_free_entry).accessed = false;
796 base::HashMap::Entry* entry =
797 entries_map_.Lookup(reinterpret_cast<void*>(entry_info.addr),
798 ComputeAddressHash(entry_info.addr));
799 DCHECK(entry);
800 entry->value = reinterpret_cast<void*>(first_free_entry);
801 if (merged_reverse_it != reverse_merged_native_entries_map.end()) {
802 auto it = merged_native_entries_map_.find(merged_reverse_it->second);
804 it->second = first_free_entry;
805 }
806 ++first_free_entry;
807 } else {
808 if (entry_info.addr) {
809 entries_map_.Remove(reinterpret_cast<void*>(entry_info.addr),
810 ComputeAddressHash(entry_info.addr));
811 if (merged_reverse_it != reverse_merged_native_entries_map.end()) {
812 merged_native_entries_map_.erase(merged_reverse_it->second);
813 }
814 }
815 }
816 }
817 entries_.erase(entries_.begin() + first_free_entry, entries_.end());
818
819 DCHECK(static_cast<uint32_t>(entries_.size()) - 1 ==
821}
822
826 : heap_(snapshot->profiler()->heap_object_map()->heap()),
827 snapshot_(snapshot),
828 names_(snapshot_->profiler()->names()),
829 heap_object_map_(snapshot_->profiler()->heap_object_map()),
830 progress_(progress),
831 generator_(nullptr),
832 global_object_name_resolver_(resolver) {}
833
838
841 HeapEntry* entry =
842 snapshot_->AddEntry(HeapEntry::kHeapNumber, "smi number", id, 0, 0);
843 // XXX: Smis do not appear in CombinedHeapObjectIterator, so we need to
844 // extract the references here
845 ExtractNumberReference(entry, smi);
846 return entry;
847}
848
850 Tagged<HeapObject> object) {
852
853 if (IsJSFunction(object)) {
854 return Cast<JSFunction>(object);
855 } else if (IsJSGeneratorObject(object)) {
857 return gen->function();
858 } else if (IsJSObject(object)) {
859 Tagged<JSObject> obj = Cast<JSObject>(object);
860 Tagged<JSFunction> maybe_constructor =
862
863 return maybe_constructor;
864 }
865
866 return JSFunction();
867}
868
870 Tagged<HeapObject> object) {
873 if (!func.is_null()) {
874 ExtractLocationForJSFunction(entry, func);
875 }
876}
877
879 Tagged<JSFunction> func) {
880 if (!IsScript(func->shared()->script())) return;
881 Tagged<Script> script = Cast<Script>(func->shared()->script());
882 int scriptId = script->id();
883 int start = func->shared()->StartPosition();
885 if (script->has_line_ends()) {
886 script->GetPositionInfo(start, &info);
887 } else {
888 script->GetPositionInfoWithLineEnds(
889 start, &info, snapshot_->GetScriptLineEnds(script->id()));
890 }
891 snapshot_->AddLocation(entry, scriptId, info.line, info.column);
892}
893
895 PtrComprCageBase cage_base(isolate());
896 InstanceType instance_type = object->map(cage_base)->instance_type();
897 if (InstanceTypeChecker::IsJSObject(instance_type)) {
898 if (InstanceTypeChecker::IsJSFunction(instance_type)) {
900 Tagged<SharedFunctionInfo> shared = func->shared();
901 const char* name = names_->GetName(shared->Name());
902 return AddEntry(object, HeapEntry::kClosure, name);
903
904 } else if (InstanceTypeChecker::IsJSBoundFunction(instance_type)) {
905 return AddEntry(object, HeapEntry::kClosure, "native_bind");
906 }
907 if (InstanceTypeChecker::IsJSRegExp(instance_type)) {
908 Tagged<JSRegExp> re = Cast<JSRegExp>(object);
909 return AddEntry(object, HeapEntry::kRegExp,
910 names_->GetName(re->source()));
911 }
912 // TODO(v8:12674) Fix and run full gcmole.
913 DisableGCMole no_gcmole;
914 const char* name = names_->GetName(
916 if (InstanceTypeChecker::IsJSGlobalObject(instance_type)) {
917 auto it = global_object_tag_map_.find(Cast<JSGlobalObject>(object));
918 if (it != global_object_tag_map_.end()) {
919 name = names_->GetFormatted("%s / %s", name, it->second);
920 }
921 }
922 return AddEntry(object, HeapEntry::kObject, name);
923
924 } else if (InstanceTypeChecker::IsString(instance_type)) {
925 Tagged<String> string = Cast<String>(object);
926 if (IsConsString(string, cage_base)) {
927 return AddEntry(object, HeapEntry::kConsString, "(concatenated string)");
928 } else if (IsSlicedString(string, cage_base)) {
929 return AddEntry(object, HeapEntry::kSlicedString, "(sliced string)");
930 } else {
931 return AddEntry(object, HeapEntry::kString,
932 names_->GetName(Cast<String>(object)));
933 }
934 } else if (InstanceTypeChecker::IsSymbol(instance_type)) {
935 if (Cast<Symbol>(object)->is_private())
936 return AddEntry(object, HeapEntry::kHidden, "private symbol");
937 else
938 return AddEntry(object, HeapEntry::kSymbol, "symbol");
939
940 } else if (InstanceTypeChecker::IsBigInt(instance_type)) {
941 return AddEntry(object, HeapEntry::kBigInt, "bigint");
942
943 } else if (InstanceTypeChecker::IsInstructionStream(instance_type) ||
944 InstanceTypeChecker::IsCode(instance_type)) {
945 return AddEntry(object, HeapEntry::kCode, "");
946
947 } else if (InstanceTypeChecker::IsSharedFunctionInfo(instance_type)) {
948 Tagged<String> name = Cast<SharedFunctionInfo>(object)->Name();
949 return AddEntry(object, HeapEntry::kCode, names_->GetName(name));
950
951 } else if (InstanceTypeChecker::IsScript(instance_type)) {
952 Tagged<Object> name = Cast<Script>(object)->name();
953 return AddEntry(object, HeapEntry::kCode,
954 IsString(name) ? names_->GetName(Cast<String>(name)) : "");
955
956 } else if (InstanceTypeChecker::IsNativeContext(instance_type)) {
957 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
958
959 } else if (InstanceTypeChecker::IsContext(instance_type)) {
960 return AddEntry(object, HeapEntry::kObject, "system / Context");
961
962 } else if (InstanceTypeChecker::IsHeapNumber(instance_type)) {
963 return AddEntry(object, HeapEntry::kHeapNumber, "heap number");
964 } else if (InstanceTypeChecker::IsOddball(instance_type)) {
965 Tagged<String> name = Cast<Oddball>(object)->to_string();
966 return AddEntry(object, HeapEntry::kHidden, names_->GetName(name));
967 }
968#if V8_ENABLE_WEBASSEMBLY
969 if (InstanceTypeChecker::IsWasmObject(instance_type)) {
970 Tagged<WasmTypeInfo> info = object->map()->wasm_type_info();
973 info->type_index());
974 sb << " (wasm)" << '\0';
975 const char* name = names_->GetCopy(sb.start());
976 return AddEntry(object, HeapEntry::kObject, name);
977 }
978#endif // V8_ENABLE_WEBASSEMBLY
979
980 if (InstanceTypeChecker::IsForeign(instance_type)) {
981 Tagged<Foreign> foreign = Cast<Foreign>(object);
982 ExternalPointerTag tag = foreign->GetTag();
983
984 size_t size = SizeForSnapshot(object, cage_base);
985 const char* name = nullptr;
986 // TODO(saelo): consider creating a global mapping of ExternalPointerTags
987 // for Managed objects to their name if we need this anywhere else.
988 switch (tag) {
990 name = "system / Managed<Unknown>";
991 break;
992#if V8_ENABLE_WEBASSEMBLY
994 name = "system / Managed<WasmStreaming>";
995 break;
996 case kWasmFuncDataTag:
997 name = "system / Managed<wasm::FuncData>";
998 break;
1000 name = "system / Managed<wasm::ManagedData>";
1001 break;
1003 size = Cast<Managed<wasm::NativeModule>>(foreign)
1004 ->raw()
1005 ->EstimateCurrentMemoryConsumption();
1006 name = "system / Managed<wasm::NativeModule>";
1007 break;
1008#endif // V8_ENABLE_WEBASSEMBLY
1010 name = "system / Managed<icu::BreakIterator>";
1011 break;
1013 name = "system / Managed<icu::UnicodeString>";
1014 break;
1016 name = "system / Managed<icu::ListFormatter>";
1017 break;
1018 case kIcuLocaleTag:
1019 name = "system / Managed<icu::Locale>";
1020 break;
1022 name = "system / Managed<icu::SimpleDateFormat>";
1023 break;
1025 name = "system / Managed<icu::DateIntervalFormat>";
1026 break;
1028 name = "system / Managed<icu::RelativeDateTimeFormatter>";
1029 break;
1031 name = "system / Managed<icu::LocalizedNumberFormatter>";
1032 break;
1033 case kIcuPluralRulesTag:
1034 name = "system / Managed<icu::PluralRules>";
1035 break;
1036 case kIcuCollatorTag:
1037 name = "system / Managed<icu::Collator>";
1038 break;
1040 name = "system / Managed<DisplayNamesInternal>";
1041 break;
1042 default:
1044 }
1045 if (name != nullptr) {
1046 return AddEntry(object.address(), HeapEntry::kHidden, name, size);
1047 }
1048 }
1049
1050 return AddEntry(object, GetSystemEntryType(object),
1051 GetSystemEntryName(object));
1052}
1053
1055 HeapEntry::Type type, const char* name) {
1056 PtrComprCageBase cage_base(isolate());
1057 return AddEntry(object.address(), type, name,
1058 SizeForSnapshot(object, cage_base));
1059}
1060
1062 const char* name, size_t size) {
1063 if (v8_flags.heap_profiler_show_hidden_objects &&
1064 type == HeapEntry::kHidden) {
1065 type = HeapEntry::kNative;
1066 }
1068 address, static_cast<unsigned int>(size));
1069 unsigned trace_node_id = 0;
1070 if (AllocationTracker* allocation_tracker =
1072 trace_node_id =
1073 allocation_tracker->address_to_trace()->GetTraceNodeId(address);
1074 }
1075 return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
1076}
1077
1079 if (IsMap(object)) {
1080 switch (Cast<Map>(object)->instance_type()) {
1081#define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
1082 case instance_type: \
1083 return "system / Map (" #Name ")";
1085#undef MAKE_STRING_MAP_CASE
1086 default:
1087 return "system / Map";
1088 }
1089 }
1090
1091 InstanceType type = object->map()->instance_type();
1092
1093 // Empty string names are special: TagObject can overwrite them, and devtools
1094 // will report them as "(internal array)".
1095 if (InstanceTypeChecker::IsFixedArray(type) ||
1096 InstanceTypeChecker::IsFixedDoubleArray(type) ||
1097 InstanceTypeChecker::IsByteArray(type)) {
1098 return "";
1099 }
1100
1101 switch (type) {
1102#define MAKE_TORQUE_CASE(Name, TYPE) \
1103 case TYPE: \
1104 return "system / " #Name;
1105 // The following lists include every non-String instance type.
1106 // This includes a few types that already have non-"system" names assigned
1107 // by AddEntry, but this is a convenient way to avoid manual upkeep here.
1108 TORQUE_INSTANCE_CHECKERS_SINGLE_FULLY_DEFINED(MAKE_TORQUE_CASE)
1109 TORQUE_INSTANCE_CHECKERS_MULTIPLE_FULLY_DEFINED(MAKE_TORQUE_CASE)
1110 TORQUE_INSTANCE_CHECKERS_SINGLE_ONLY_DECLARED(MAKE_TORQUE_CASE)
1111 TORQUE_INSTANCE_CHECKERS_MULTIPLE_ONLY_DECLARED(MAKE_TORQUE_CASE)
1112#undef MAKE_TORQUE_CASE
1113
1114 // Strings were already handled by AddEntry.
1115#define MAKE_STRING_CASE(instance_type, size, name, Name) \
1116 case instance_type: \
1117 UNREACHABLE();
1119#undef MAKE_STRING_CASE
1120 }
1121}
1122
1124 InstanceType type = object->map()->instance_type();
1125 if (InstanceTypeChecker::IsAllocationSite(type) ||
1126 InstanceTypeChecker::IsArrayBoilerplateDescription(type) ||
1127 InstanceTypeChecker::IsBytecodeArray(type) ||
1128 InstanceTypeChecker::IsBytecodeWrapper(type) ||
1129 InstanceTypeChecker::IsClosureFeedbackCellArray(type) ||
1130 InstanceTypeChecker::IsCode(type) ||
1131 InstanceTypeChecker::IsCodeWrapper(type) ||
1132 InstanceTypeChecker::IsFeedbackCell(type) ||
1133 InstanceTypeChecker::IsFeedbackMetadata(type) ||
1134 InstanceTypeChecker::IsFeedbackVector(type) ||
1135 InstanceTypeChecker::IsInstructionStream(type) ||
1136 InstanceTypeChecker::IsInterpreterData(type) ||
1137 InstanceTypeChecker::IsLoadHandler(type) ||
1138 InstanceTypeChecker::IsObjectBoilerplateDescription(type) ||
1139 InstanceTypeChecker::IsPreparseData(type) ||
1140 InstanceTypeChecker::IsRegExpBoilerplateDescription(type) ||
1141 InstanceTypeChecker::IsScopeInfo(type) ||
1142 InstanceTypeChecker::IsStoreHandler(type) ||
1143 InstanceTypeChecker::IsTemplateObjectDescription(type) ||
1144 InstanceTypeChecker::IsTurbofanType(type) ||
1145 InstanceTypeChecker::IsUncompiledData(type)) {
1146 return HeapEntry::kCode;
1147 }
1148
1149 // This check must come second, because some subtypes of FixedArray are
1150 // determined above to represent code content.
1151 if (InstanceTypeChecker::IsFixedArray(type) ||
1152 InstanceTypeChecker::IsFixedDoubleArray(type) ||
1153 InstanceTypeChecker::IsByteArray(type)) {
1154 return HeapEntry::kArray;
1155 }
1156
1157 // Maps in read-only space are for internal V8 data, not user-defined object
1158 // shapes.
1159 if ((InstanceTypeChecker::IsMap(type) &&
1160 !MemoryChunk::FromHeapObject(object)->InReadOnlySpace()) ||
1161 InstanceTypeChecker::IsDescriptorArray(type) ||
1162 InstanceTypeChecker::IsTransitionArray(type) ||
1163 InstanceTypeChecker::IsPrototypeInfo(type) ||
1164 InstanceTypeChecker::IsEnumCache(type)) {
1166 }
1167
1168 return HeapEntry::kHidden;
1169}
1170
1172 std::vector<Handle<Script>> scripts;
1173 HandleScope scope(isolate());
1174
1175 {
1176 Script::Iterator iterator(isolate());
1177 for (Tagged<Script> script = iterator.Next(); !script.is_null();
1178 script = iterator.Next()) {
1179 if (!script->has_line_ends()) {
1180 scripts.push_back(handle(script, isolate()));
1181 }
1182 }
1183 }
1184
1185 for (auto& script : scripts) {
1186 snapshot_->AddScriptLineEnds(script->id(),
1187 Script::GetLineEnds(isolate(), script));
1188 }
1189}
1190
1193 uint32_t objects_count = 0;
1194 // Avoid overflowing the objects count. In worst case, we will show the same
1195 // progress for a longer period of time, but we do not expect to have that
1196 // many objects.
1197 while (!it.Next().is_null() &&
1198 objects_count != std::numeric_limits<uint32_t>::max())
1199 ++objects_count;
1200 return objects_count;
1201}
1202
1203#ifdef V8_TARGET_BIG_ENDIAN
1204namespace {
1205int AdjustEmbedderFieldIndex(Tagged<HeapObject> heap_obj, int field_index) {
1206 Tagged<Map> map = heap_obj->map();
1208 int emb_start_index = (JSObject::GetEmbedderFieldsStartOffset(map) +
1211 int emb_field_count = JSObject::GetEmbedderFieldCount(map);
1212 int emb_end_index = emb_start_index + emb_field_count;
1213 if (base::IsInRange(field_index, emb_start_index, emb_end_index)) {
1215 }
1216 }
1217 return 0;
1218}
1219} // namespace
1220#endif // V8_TARGET_BIG_ENDIAN
1222 public:
1224 Tagged<HeapObject> parent_obj, HeapEntry* parent)
1225 : ObjectVisitorWithCageBases(generator->isolate()),
1226 generator_(generator),
1227 parent_obj_(parent_obj),
1228 parent_start_(parent_obj_->RawMaybeWeakField(0)),
1230 parent_obj_->RawMaybeWeakField(parent_obj_->Size(cage_base()))),
1231 parent_(parent),
1232 next_index_(0) {}
1237 void VisitMapPointer(Tagged<HeapObject> object) override {
1238 VisitSlotImpl(cage_base(), object->map_slot());
1239 }
1241 MaybeObjectSlot end) override {
1242 // [start,end) must be a sub-region of [parent_start_, parent_end), i.e.
1243 // all the slots must point inside the object.
1246 for (MaybeObjectSlot slot = start; slot < end; ++slot) {
1247 VisitSlotImpl(cage_base(), slot);
1248 }
1249 }
1250
1255
1262
1264 RelocInfo* rinfo) override {
1265 Tagged<HeapObject> object = rinfo->target_object(cage_base());
1266 Tagged<Code> code = UncheckedCast<Code>(host->raw_code(kAcquireLoad));
1267 if (code->IsWeakObject(object)) {
1269 } else {
1270 VisitHeapObjectImpl(object, -1);
1271 }
1272 }
1273
1278
1280 ProtectedPointerSlot slot) override {
1281 // TODO(saelo): the cage base doesn't currently matter as it isn't used,
1282 // but technically we should either use the trusted cage base here or
1283 // remove the cage_base parameter.
1284 const PtrComprCageBase unused_cage_base(kNullAddress);
1285 VisitSlotImpl(unused_cage_base, slot);
1286 }
1287
1289 ProtectedMaybeObjectSlot slot) override {
1290 // TODO(saelo): the cage base doesn't currently matter as it isn't used,
1291 // but technically we should either use the trusted cage base here or
1292 // remove the cage_base parameter.
1293 const PtrComprCageBase unused_cage_base(kNullAddress);
1294 VisitSlotImpl(unused_cage_base, slot);
1295 }
1296
1298 JSDispatchHandle handle) override {
1299#ifdef V8_ENABLE_LEAPTIERING
1300 // TODO(saelo): implement proper support for these fields here, similar to
1301 // how we handle indirect pointer or protected pointer fields.
1302 // Currently we only expect to see FeedbackCells or JSFunctions here.
1303 if (IsJSFunction(host)) {
1304 int field_index = JSFunction::kDispatchHandleOffset / kTaggedSize;
1305 CHECK(generator_->visited_fields_[field_index]);
1306 generator_->visited_fields_[field_index] = false;
1307 } else if (IsCode(host) || IsFeedbackCell(host)) {
1308 // Nothing to do: the Code object is tracked as part of the JSFunction.
1309 } else {
1310 UNREACHABLE();
1311 }
1312#endif // V8_ENABLE_LEAPTIERING
1313 }
1314
1315 private:
1316 template <typename TIsolateOrCageBase, typename TSlot>
1317 V8_INLINE void VisitSlotImpl(TIsolateOrCageBase isolate_or_cage_base,
1318 TSlot slot) {
1319 int field_index =
1320 static_cast<int>(slot.address() - parent_start_.address()) /
1321 TSlot::kSlotDataSize;
1322#ifdef V8_TARGET_BIG_ENDIAN
1323 field_index += AdjustEmbedderFieldIndex(parent_obj_, field_index);
1324#endif
1325 DCHECK_GE(field_index, 0);
1326 if (generator_->visited_fields_[field_index]) {
1327 generator_->visited_fields_[field_index] = false;
1328 } else {
1329 Tagged<HeapObject> heap_object;
1330 auto loaded_value = slot.load(isolate_or_cage_base);
1331 if (loaded_value.GetHeapObjectIfStrong(&heap_object)) {
1332 VisitHeapObjectImpl(heap_object, field_index);
1333 } else if (loaded_value.GetHeapObjectIfWeak(&heap_object)) {
1334 generator_->SetWeakReference(parent_, next_index_++, heap_object, {});
1335 }
1336 }
1337 }
1338
1340 int field_index) {
1341 DCHECK_LE(-1, field_index);
1342 // The last parameter {field_offset} is only used to check some well-known
1343 // skipped references, so passing -1 * kTaggedSize for objects embedded
1344 // into code is fine.
1346 heap_object, field_index * kTaggedSize);
1347 }
1348
1355};
1356
1358 Tagged<HeapObject> obj) {
1359 if (IsJSGlobalProxy(obj)) {
1361 } else if (IsJSArrayBuffer(obj)) {
1363 } else if (IsJSObject(obj)) {
1364 if (IsJSWeakSet(obj)) {
1366 } else if (IsJSWeakMap(obj)) {
1368 } else if (IsJSSet(obj)) {
1370 } else if (IsJSMap(obj)) {
1372 } else if (IsJSPromise(obj)) {
1374 } else if (IsJSGeneratorObject(obj)) {
1376 } else if (IsJSWeakRef(obj)) {
1378#if V8_ENABLE_WEBASSEMBLY
1379 } else if (IsWasmInstanceObject(obj)) {
1380 ExtractWasmInstanceObjectReferences(Cast<WasmInstanceObject>(obj), entry);
1381 } else if (IsWasmModuleObject(obj)) {
1382 ExtractWasmModuleObjectReferences(Cast<WasmModuleObject>(obj), entry);
1383#endif // V8_ENABLE_WEBASSEMBLY
1384 }
1386 } else if (IsString(obj)) {
1388 } else if (IsSymbol(obj)) {
1390 } else if (IsMap(obj)) {
1391 ExtractMapReferences(entry, Cast<Map>(obj));
1392 } else if (IsSharedFunctionInfo(obj)) {
1394 } else if (IsScript(obj)) {
1396 } else if (IsAccessorInfo(obj)) {
1398 } else if (IsAccessorPair(obj)) {
1400 } else if (IsCode(obj)) {
1401 ExtractCodeReferences(entry, Cast<Code>(obj));
1402 } else if (IsInstructionStream(obj)) {
1404 } else if (IsCell(obj)) {
1405 ExtractCellReferences(entry, Cast<Cell>(obj));
1406 } else if (IsFeedbackCell(obj)) {
1408 } else if (IsPropertyCell(obj)) {
1410 } else if (IsPrototypeInfo(obj)) {
1412 } else if (IsAllocationSite(obj)) {
1414 } else if (IsArrayBoilerplateDescription(obj)) {
1417 } else if (IsRegExpBoilerplateDescription(obj)) {
1420 } else if (IsFeedbackVector(obj)) {
1422 } else if (IsDescriptorArray(obj)) {
1424 } else if (IsEnumCache(obj)) {
1426 } else if (IsTransitionArray(obj)) {
1428 } else if (IsWeakFixedArray(obj)) {
1431 } else if (IsWeakArrayList(obj)) {
1432 ExtractWeakArrayReferences(WeakArrayList::kHeaderSize, entry,
1433 Cast<WeakArrayList>(obj));
1434 } else if (IsContext(obj)) {
1436 } else if (IsEphemeronHashTable(obj)) {
1438 } else if (IsFixedArray(obj)) {
1440 } else if (IsWeakCell(obj)) {
1442 } else if (IsHeapNumber(obj)) {
1444 ExtractNumberReference(entry, obj);
1445 }
1446 } else if (IsBytecodeArray(obj)) {
1448 } else if (IsScopeInfo(obj)) {
1450#if V8_ENABLE_WEBASSEMBLY
1451 } else if (IsWasmStruct(obj)) {
1452 ExtractWasmStructReferences(Cast<WasmStruct>(obj), entry);
1453 } else if (IsWasmArray(obj)) {
1454 ExtractWasmArrayReferences(Cast<WasmArray>(obj), entry);
1455 } else if (IsWasmTrustedInstanceData(obj)) {
1456 ExtractWasmTrustedInstanceDataReferences(Cast<WasmTrustedInstanceData>(obj),
1457 entry);
1458#endif // V8_ENABLE_WEBASSEMBLY
1459 }
1460}
1461
1464
1466 Tagged<JSObject> js_obj) {
1467 Tagged<HeapObject> obj = js_obj;
1468 ExtractPropertyReferences(js_obj, entry);
1469 ExtractElementReferences(js_obj, entry);
1470 ExtractInternalReferences(js_obj, entry);
1471 Isolate* isolate = Isolate::FromHeap(heap_);
1472 PrototypeIterator iter(isolate, js_obj);
1473 ReadOnlyRoots roots(isolate);
1474 SetPropertyReference(entry, roots.proto_string(), iter.GetCurrent());
1475 if (IsJSBoundFunction(obj)) {
1477 TagObject(js_fun->bound_arguments(), "(bound arguments)");
1478 SetInternalReference(entry, "bindings", js_fun->bound_arguments(),
1479 JSBoundFunction::kBoundArgumentsOffset);
1480 SetInternalReference(entry, "bound_this", js_fun->bound_this(),
1481 JSBoundFunction::kBoundThisOffset);
1482 SetInternalReference(entry, "bound_function",
1483 js_fun->bound_target_function(),
1484 JSBoundFunction::kBoundTargetFunctionOffset);
1485 Tagged<FixedArray> bindings = js_fun->bound_arguments();
1486 for (int i = 0; i < bindings->length(); i++) {
1487 const char* reference_name = names_->GetFormatted("bound_argument_%d", i);
1488 SetNativeBindReference(entry, reference_name, bindings->get(i));
1489 }
1490 } else if (IsJSFunction(obj)) {
1491 Tagged<JSFunction> js_fun = Cast<JSFunction>(js_obj);
1492 if (js_fun->has_prototype_slot()) {
1493 Tagged<Object> proto_or_map =
1494 js_fun->prototype_or_initial_map(kAcquireLoad);
1495 if (!IsTheHole(proto_or_map, isolate)) {
1496 if (!IsMap(proto_or_map)) {
1497 SetPropertyReference(entry, roots.prototype_string(), proto_or_map,
1498 nullptr,
1499 JSFunction::kPrototypeOrInitialMapOffset);
1500 } else {
1501 SetPropertyReference(entry, roots.prototype_string(),
1502 js_fun->prototype());
1503 SetInternalReference(entry, "initial_map", proto_or_map,
1504 JSFunction::kPrototypeOrInitialMapOffset);
1505 }
1506 }
1507 }
1508 Tagged<SharedFunctionInfo> shared_info = js_fun->shared();
1509 TagObject(js_fun->raw_feedback_cell(), "(function feedback cell)");
1510 SetInternalReference(entry, "feedback_cell", js_fun->raw_feedback_cell(),
1511 JSFunction::kFeedbackCellOffset);
1512 TagObject(shared_info, "(shared function info)");
1513 SetInternalReference(entry, "shared", shared_info,
1514 JSFunction::kSharedFunctionInfoOffset);
1515 TagObject(js_fun->context(), "(context)");
1516 SetInternalReference(entry, "context", js_fun->context(),
1517 JSFunction::kContextOffset);
1518#ifdef V8_ENABLE_LEAPTIERING
1519 SetInternalReference(entry, "code", js_fun->code(isolate),
1520 JSFunction::kDispatchHandleOffset);
1521#else
1522 SetInternalReference(entry, "code", js_fun->code(isolate),
1523 JSFunction::kCodeOffset);
1524#endif // V8_ENABLE_LEAPTIERING
1525 } else if (IsJSGlobalObject(obj)) {
1527 SetInternalReference(entry, "global_proxy", global_obj->global_proxy(),
1528 JSGlobalObject::kGlobalProxyOffset);
1529 } else if (IsJSArrayBufferView(obj)) {
1531 SetInternalReference(entry, "buffer", view->buffer(),
1532 JSArrayBufferView::kBufferOffset);
1533 }
1534
1535 TagObject(js_obj->raw_properties_or_hash(), "(object properties)");
1536 SetInternalReference(entry, "properties", js_obj->raw_properties_or_hash(),
1537 JSObject::kPropertiesOrHashOffset);
1538
1539 TagObject(js_obj->elements(), "(object elements)");
1540 SetInternalReference(entry, "elements", js_obj->elements(),
1541 JSObject::kElementsOffset);
1542}
1543
1544namespace {
1545
1546class ExternalDataEntryAllocator : public HeapEntriesAllocator {
1547 public:
1548 ExternalDataEntryAllocator(size_t size, V8HeapExplorer* explorer,
1549 const char* name)
1550 : size_(size), explorer_(explorer), name_(name) {}
1551 HeapEntry* AllocateEntry(HeapThing ptr) override {
1552 return explorer_->AddEntry(reinterpret_cast<Address>(ptr),
1553 HeapEntry::kNative, name_, size_);
1554 }
1555 HeapEntry* AllocateEntry(Tagged<Smi> smi) override { UNREACHABLE(); }
1556
1557 private:
1558 size_t size_;
1559 V8HeapExplorer* explorer_;
1560 const char* name_;
1561};
1562
1563class ExternalStringRecorder
1565 public:
1566 ExternalStringRecorder(HeapEntry* entry, V8HeapExplorer* explorer,
1567 HeapSnapshotGenerator* generator,
1568 StringsStorage* names)
1569 : entry_(entry),
1570 explorer_(explorer),
1571 generator_(generator),
1572 names_(names) {}
1573 void RecordSharedMemoryUsage(const void* location, size_t size) final {
1574 ExternalDataEntryAllocator allocator(size, explorer_,
1575 "system / ExternalStringData");
1576 HeapEntry* data_entry =
1577 generator_->FindOrAddEntry(const_cast<HeapThing>(location), &allocator);
1578 entry_->SetNamedAutoIndexReference(HeapGraphEdge::kInternal,
1579 "backing_store", data_entry, names_,
1580 generator_, HeapEntry::kOffHeapPointer);
1581 }
1582
1583 private:
1584 HeapEntry* entry_;
1585 V8HeapExplorer* explorer_;
1586 HeapSnapshotGenerator* generator_;
1587 StringsStorage* names_;
1588};
1589
1590} // namespace
1591
1593 Tagged<String> string) {
1594 if (IsConsString(string)) {
1596 SetInternalReference(entry, "first", cs->first(),
1597 offsetof(ConsString, first_));
1598 SetInternalReference(entry, "second", cs->second(),
1599 offsetof(ConsString, second_));
1600 } else if (IsSlicedString(string)) {
1602 SetInternalReference(entry, "parent", ss->parent(),
1603 offsetof(SlicedString, parent_));
1604 } else if (IsThinString(string)) {
1606 SetInternalReference(entry, "actual", ts->actual(),
1607 offsetof(ThinString, actual_));
1608 } else if (IsExternalString(string)) {
1610 if (const v8::String::ExternalStringResourceBase* resource =
1611 GetExternalStringResource(es, isolate())) {
1612 ExternalStringRecorder recorder(entry, this, generator_, names_);
1613 resource->EstimateSharedMemoryUsage(&recorder);
1614 }
1615 }
1616}
1617
1619 Tagged<Symbol> symbol) {
1620 SetInternalReference(entry, "name", symbol->description(),
1621 offsetof(Symbol, description_));
1622}
1623
1625 HeapEntry* entry, Tagged<JSCollection> collection) {
1626 SetInternalReference(entry, "table", collection->table(),
1627 JSCollection::kTableOffset);
1628}
1629
1631 HeapEntry* entry, Tagged<JSWeakCollection> obj) {
1632 SetInternalReference(entry, "table", obj->table(),
1633 JSWeakCollection::kTableOffset);
1634}
1635
1637 HeapEntry* entry, Tagged<EphemeronHashTable> table) {
1638 for (InternalIndex i : table->IterateEntries()) {
1639 int key_index = EphemeronHashTable::EntryToIndex(i) +
1640 EphemeronHashTable::kEntryKeyIndex;
1641 int value_index = EphemeronHashTable::EntryToValueIndex(i);
1642 Tagged<Object> key = table->get(key_index);
1643 Tagged<Object> value = table->get(value_index);
1644 SetWeakReference(entry, key_index, key,
1645 table->OffsetOfElementAt(key_index));
1646 SetWeakReference(entry, value_index, value,
1647 table->OffsetOfElementAt(value_index));
1648 HeapEntry* key_entry = GetEntry(key);
1649 HeapEntry* value_entry = GetEntry(value);
1650 HeapEntry* table_entry = GetEntry(table);
1651 if (key_entry && value_entry && !IsUndefined(key)) {
1652 const char* edge_name = names_->GetFormatted(
1653 "part of key (%s @%u) -> value (%s @%u) pair in WeakMap (table @%u)",
1654 key_entry->name(), key_entry->id(), value_entry->name(),
1655 value_entry->id(), table_entry->id());
1657 value_entry, names_, generator_,
1659 table_entry->SetNamedAutoIndexReference(
1660 HeapGraphEdge::kInternal, edge_name, value_entry, names_, generator_,
1662 }
1663 }
1664}
1665
1666// These static arrays are used to prevent excessive code-size in
1667// ExtractContextReferences below, which would happen if we called
1668// SetInternalReference for every native context field in a macro.
1669static const struct {
1671 const char* name;
1673#define CONTEXT_FIELD_INDEX_NAME(index, _, name) {Context::index, #name},
1675#undef CONTEXT_FIELD_INDEX_NAME
1677
1679 Tagged<Context> context) {
1681 if (!IsNativeContext(context) && context->is_declaration_context()) {
1682 Tagged<ScopeInfo> scope_info = context->scope_info();
1683 // Add context allocated locals.
1684 for (auto it : ScopeInfo::IterateLocalNames(scope_info, no_gc)) {
1685 int idx = scope_info->ContextHeaderLength() + it->index();
1686 SetContextReference(entry, it->name(), context->get(idx),
1688 }
1689 if (scope_info->HasContextAllocatedFunctionName()) {
1690 Tagged<String> name = Cast<String>(scope_info->FunctionName());
1691 int idx = scope_info->FunctionContextSlotIndex(name);
1692 if (idx >= 0) {
1693 SetContextReference(entry, name, context->get(idx),
1695 }
1696 }
1697 }
1698
1700 entry, "scope_info", context->get(Context::SCOPE_INFO_INDEX),
1702 SetInternalReference(entry, "previous", context->get(Context::PREVIOUS_INDEX),
1704 if (context->has_extension()) {
1706 entry, "extension", context->get(Context::EXTENSION_INDEX),
1708 }
1709
1710 if (IsNativeContext(context)) {
1711 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1712 TagObject(context->embedder_data(), "(context data)");
1713 for (size_t i = 0; i < arraysize(native_context_names); i++) {
1714 int index = native_context_names[i].index;
1715 const char* name = native_context_names[i].name;
1716 SetInternalReference(entry, name, context->get(index),
1718 }
1719
1721 static_assert(Context::FIRST_WEAK_SLOT + 1 ==
1723 }
1724}
1725
1727 Tagged<MaybeObject> maybe_raw_transitions_or_prototype_info =
1728 map->raw_transitions();
1729 Tagged<HeapObject> raw_transitions_or_prototype_info;
1730 if (maybe_raw_transitions_or_prototype_info.GetHeapObjectIfWeak(
1731 &raw_transitions_or_prototype_info)) {
1732 DCHECK(IsMap(raw_transitions_or_prototype_info));
1733 SetWeakReference(entry, "transition", raw_transitions_or_prototype_info,
1734 Map::kTransitionsOrPrototypeInfoOffset);
1735 } else if (maybe_raw_transitions_or_prototype_info.GetHeapObjectIfStrong(
1736 &raw_transitions_or_prototype_info)) {
1737 if (IsTransitionArray(raw_transitions_or_prototype_info)) {
1738 Tagged<TransitionArray> transitions =
1739 Cast<TransitionArray>(raw_transitions_or_prototype_info);
1740 if (map->CanTransition() && transitions->HasPrototypeTransitions()) {
1741 TagObject(transitions->GetPrototypeTransitions(),
1742 "(prototype transitions)");
1743 }
1744 TagObject(transitions, "(transition array)");
1745 SetInternalReference(entry, "transitions", transitions,
1746 Map::kTransitionsOrPrototypeInfoOffset);
1747 } else if (IsFixedArray(raw_transitions_or_prototype_info)) {
1748 TagObject(raw_transitions_or_prototype_info, "(transition)");
1749 SetInternalReference(entry, "transition",
1750 raw_transitions_or_prototype_info,
1751 Map::kTransitionsOrPrototypeInfoOffset);
1752 } else if (map->is_prototype_map()) {
1753 TagObject(raw_transitions_or_prototype_info, "prototype_info");
1754 SetInternalReference(entry, "prototype_info",
1755 raw_transitions_or_prototype_info,
1756 Map::kTransitionsOrPrototypeInfoOffset);
1757 }
1758 }
1759 Tagged<DescriptorArray> descriptors = map->instance_descriptors();
1760 TagObject(descriptors, "(map descriptors)");
1761 SetInternalReference(entry, "descriptors", descriptors,
1762 Map::kInstanceDescriptorsOffset);
1763 SetInternalReference(entry, "prototype", map->prototype(),
1764 Map::kPrototypeOffset);
1765 if (IsContextMap(map) || IsMapMap(map)) {
1766 Tagged<Object> native_context = map->native_context_or_null();
1767 TagObject(native_context, "(native context)");
1768 SetInternalReference(entry, "native_context", native_context,
1769 Map::kConstructorOrBackPointerOrNativeContextOffset);
1770 } else {
1772 map->constructor_or_back_pointer();
1773 if (IsMap(constructor_or_back_pointer)) {
1774 TagObject(constructor_or_back_pointer, "(back pointer)");
1776 Map::kConstructorOrBackPointerOrNativeContextOffset);
1777 } else if (IsFunctionTemplateInfo(constructor_or_back_pointer)) {
1778 TagObject(constructor_or_back_pointer, "(constructor function data)");
1779 SetInternalReference(entry, "constructor_function_data",
1781 Map::kConstructorOrBackPointerOrNativeContextOffset);
1782 } else {
1784 Map::kConstructorOrBackPointerOrNativeContextOffset);
1785 }
1786 }
1787 TagObject(map->dependent_code(), "(dependent code)");
1788 SetInternalReference(entry, "dependent_code", map->dependent_code(),
1789 Map::kDependentCodeOffset);
1790 TagObject(map->prototype_validity_cell(kRelaxedLoad),
1791 "(prototype validity cell)", HeapEntry::kObjectShape);
1792}
1793
1795 HeapEntry* entry, Tagged<SharedFunctionInfo> shared) {
1796 TagObject(shared, "(shared function info)");
1797 {
1798 std::unique_ptr<char[]> name = shared->DebugNameCStr();
1799 Tagged<Code> code = shared->GetCode(isolate());
1800 TagObject(code, name[0] != '\0'
1801 ? names_->GetFormatted("(code for %s)", name.get())
1802 : names_->GetFormatted("(%s code)",
1803 CodeKindToString(code->kind())));
1804 if (code->has_instruction_stream()) {
1805 TagObject(
1806 code->instruction_stream(),
1807 name[0] != '\0'
1808 ? names_->GetFormatted("(instruction stream for %s)", name.get())
1809 : names_->GetFormatted("(%s instruction stream)",
1810 CodeKindToString(code->kind())));
1811 }
1812 }
1813
1814 Tagged<Object> name_or_scope_info = shared->name_or_scope_info(kAcquireLoad);
1815 if (IsScopeInfo(name_or_scope_info)) {
1816 TagObject(name_or_scope_info, "(function scope info)");
1817 }
1818 SetInternalReference(entry, "name_or_scope_info", name_or_scope_info,
1819 SharedFunctionInfo::kNameOrScopeInfoOffset);
1820 SetInternalReference(entry, "script", shared->script(kAcquireLoad),
1821 SharedFunctionInfo::kScriptOffset);
1822 SetInternalReference(entry, "trusted_function_data",
1823 shared->GetTrustedData(isolate()),
1824 SharedFunctionInfo::kTrustedFunctionDataOffset);
1825 SetInternalReference(entry, "untrusted_function_data",
1826 shared->GetUntrustedData(),
1827 SharedFunctionInfo::kUntrustedFunctionDataOffset);
1829 entry, "raw_outer_scope_info_or_feedback_metadata",
1830 shared->raw_outer_scope_info_or_feedback_metadata(),
1831 SharedFunctionInfo::kOuterScopeInfoOrFeedbackMetadataOffset);
1832}
1833
1835 Tagged<Script> script) {
1836 SetInternalReference(entry, "source", script->source(),
1837 Script::kSourceOffset);
1838 SetInternalReference(entry, "name", script->name(), Script::kNameOffset);
1839 SetInternalReference(entry, "context_data", script->context_data(),
1840 Script::kContextDataOffset);
1841 TagObject(script->line_ends(), "(script line ends)", HeapEntry::kCode);
1842 SetInternalReference(entry, "line_ends", script->line_ends(),
1843 Script::kLineEndsOffset);
1844 TagObject(script->infos(), "(infos)", HeapEntry::kCode);
1845 TagObject(script->host_defined_options(), "(host-defined options)",
1847#if V8_ENABLE_WEBASSEMBLY
1848 if (script->type() == Script::Type::kWasm) {
1849 // Wasm reuses some otherwise unused fields for wasm-specific information.
1850 SetInternalReference(entry, "wasm_breakpoint_infos",
1851 script->wasm_breakpoint_infos(),
1852 Script::kEvalFromSharedOrWrappedArgumentsOffset);
1853 SetInternalReference(entry, "wasm_managed_native_module",
1854 script->wasm_managed_native_module(),
1855 Script::kEvalFromPositionOffset);
1856 SetInternalReference(entry, "wasm_weak_instance_list",
1857 script->wasm_weak_instance_list(),
1858 Script::kInfosOffset);
1859 }
1860#endif
1861}
1862
1864 HeapEntry* entry, Tagged<AccessorInfo> accessor_info) {
1865 SetInternalReference(entry, "name", accessor_info->name(),
1866 AccessorInfo::kNameOffset);
1867 SetInternalReference(entry, "data", accessor_info->data(),
1868 AccessorInfo::kDataOffset);
1869}
1870
1872 HeapEntry* entry, Tagged<AccessorPair> accessors) {
1873 SetInternalReference(entry, "getter", accessors->getter(),
1874 offsetof(AccessorPair, getter_));
1875 SetInternalReference(entry, "setter", accessors->setter(),
1876 offsetof(AccessorPair, setter_));
1877}
1878
1880 Tagged<JSWeakRef> js_weak_ref) {
1881 SetWeakReference(entry, "target", js_weak_ref->target(),
1882 JSWeakRef::kTargetOffset);
1883}
1884
1886 Tagged<WeakCell> weak_cell) {
1887 SetWeakReference(entry, "target", weak_cell->target(),
1888 WeakCell::kTargetOffset);
1889 SetWeakReference(entry, "unregister_token", weak_cell->unregister_token(),
1890 WeakCell::kUnregisterTokenOffset);
1891}
1892
1894 TagObject(code, names_->GetFormatted("(%s builtin code)", name));
1895 if (code->has_instruction_stream()) {
1896 TagObject(code->instruction_stream(),
1897 names_->GetFormatted("(%s builtin instruction stream)", name));
1898 }
1899}
1900
1902 Tagged<Code> code) {
1903 if (!code->has_instruction_stream()) return;
1904
1905 SetInternalReference(entry, "instruction_stream", code->instruction_stream(),
1906 Code::kInstructionStreamOffset);
1907
1908 if (code->kind() == CodeKind::BASELINE) {
1909 TagObject(code->bytecode_or_interpreter_data(), "(interpreter data)");
1910 SetInternalReference(entry, "interpreter_data",
1911 code->bytecode_or_interpreter_data(),
1912 Code::kDeoptimizationDataOrInterpreterDataOffset);
1913 TagObject(code->bytecode_offset_table(), "(bytecode offset table)",
1915 SetInternalReference(entry, "bytecode_offset_table",
1916 code->bytecode_offset_table(),
1917 Code::kPositionTableOffset);
1918 } else if (code->uses_deoptimization_data()) {
1919 Tagged<DeoptimizationData> deoptimization_data =
1920 Cast<DeoptimizationData>(code->deoptimization_data());
1921 TagObject(deoptimization_data, "(code deopt data)", HeapEntry::kCode);
1922 SetInternalReference(entry, "deoptimization_data", deoptimization_data,
1923 Code::kDeoptimizationDataOrInterpreterDataOffset);
1924 if (deoptimization_data->length() > 0) {
1925 TagObject(deoptimization_data->FrameTranslation(), "(code deopt data)",
1927 TagObject(deoptimization_data->LiteralArray(), "(code deopt data)",
1929 TagObject(deoptimization_data->InliningPositions(), "(code deopt data)",
1931 }
1932 TagObject(code->source_position_table(), "(source position table)",
1934 SetInternalReference(entry, "source_position_table",
1935 code->source_position_table(),
1936 Code::kPositionTableOffset);
1937 }
1938}
1939
1941 HeapEntry* entry, Tagged<InstructionStream> istream) {
1943 if (!istream->TryGetCode(&code, kAcquireLoad))
1944 return; // Not yet initialized.
1945 TagObject(code, "(code)", HeapEntry::kCode);
1946 SetInternalReference(entry, "code", code, InstructionStream::kCodeOffset);
1947
1948 TagObject(istream->relocation_info(), "(code relocation info)",
1950 SetInternalReference(entry, "relocation_info", istream->relocation_info(),
1951 InstructionStream::kRelocationInfoOffset);
1952}
1953
1955 Tagged<Cell> cell) {
1956 SetInternalReference(entry, "value", cell->value(), Cell::kValueOffset);
1957}
1958
1960 HeapEntry* entry, Tagged<FeedbackCell> feedback_cell) {
1961 TagObject(feedback_cell, "(feedback cell)");
1962 SetInternalReference(entry, "value", feedback_cell->value(),
1963 FeedbackCell::kValueOffset);
1964}
1965
1967 Tagged<PropertyCell> cell) {
1968 SetInternalReference(entry, "value", cell->value(),
1969 PropertyCell::kValueOffset);
1970 TagObject(cell->dependent_code(), "(dependent code)");
1971 SetInternalReference(entry, "dependent_code", cell->dependent_code(),
1972 PropertyCell::kDependentCodeOffset);
1973}
1974
1976 HeapEntry* entry, Tagged<PrototypeInfo> info) {
1977 TagObject(info->prototype_chain_enum_cache(), "(prototype chain enum cache)",
1979 TagObject(info->prototype_users(), "(prototype users)",
1981}
1982
1984 HeapEntry* entry, Tagged<AllocationSite> site) {
1986 entry, "transition_info", site->transition_info_or_boilerplate(),
1987 offsetof(AllocationSite, transition_info_or_boilerplate_));
1988 SetInternalReference(entry, "nested_site", site->nested_site(),
1989 offsetof(AllocationSite, nested_site_));
1990 TagObject(site->dependent_code(), "(dependent code)", HeapEntry::kCode);
1991 SetInternalReference(entry, "dependent_code", site->dependent_code(),
1992 offsetof(AllocationSite, dependent_code_));
1993}
1994
1997 Tagged<FixedArrayBase> constant_elements = value->constant_elements();
1998 SetInternalReference(entry, "constant_elements", constant_elements,
1999 ArrayBoilerplateDescription::kConstantElementsOffset);
2000 TagObject(constant_elements, "(constant elements)", HeapEntry::kCode);
2001}
2002
2007
2009 HeapEntry* entry, Tagged<JSArrayBuffer> buffer) {
2010 // Setup a reference to a native memory backing_store object.
2011 if (!buffer->backing_store()) return;
2012 size_t data_size = buffer->byte_length();
2013 ExternalDataEntryAllocator allocator(data_size, this,
2014 "system / JSArrayBufferData");
2015 HeapEntry* data_entry =
2016 generator_->FindOrAddEntry(buffer->backing_store(), &allocator);
2017 entry->SetNamedReference(HeapGraphEdge::kInternal, "backing_store",
2019}
2020
2022 Tagged<JSPromise> promise) {
2023 SetInternalReference(entry, "reactions_or_result",
2024 promise->reactions_or_result(),
2025 JSPromise::kReactionsOrResultOffset);
2026}
2027
2029 HeapEntry* entry, Tagged<JSGeneratorObject> generator) {
2030 SetInternalReference(entry, "function", generator->function(),
2031 JSGeneratorObject::kFunctionOffset);
2032 SetInternalReference(entry, "context", generator->context(),
2033 JSGeneratorObject::kContextOffset);
2034 SetInternalReference(entry, "receiver", generator->receiver(),
2035 JSGeneratorObject::kReceiverOffset);
2036 SetInternalReference(entry, "parameters_and_registers",
2037 generator->parameters_and_registers(),
2038 JSGeneratorObject::kParametersAndRegistersOffset);
2039}
2040
2042 Tagged<FixedArray> array) {
2043 for (int i = 0, l = array->length(); i < l; ++i) {
2044 DCHECK(!HasWeakHeapObjectTag(array->get(i)));
2045 SetInternalReference(entry, i, array->get(i), array->OffsetOfElementAt(i));
2046 }
2047}
2048
2050 Tagged<Object> number) {
2051 DCHECK(IsNumber(number));
2052
2053 // Must be large enough to fit any double, int, or size_t.
2054 char arr[32];
2056
2057 std::string_view string;
2058 if (IsSmi(number)) {
2059 int int_value = Smi::ToInt(number);
2060 string = IntToStringView(int_value, buffer);
2061 } else {
2062 double double_value = Cast<HeapNumber>(number)->value();
2063 string = DoubleToStringView(double_value, buffer);
2064 }
2065
2066 // GetCopy() requires a null-terminated C-String, as the underlying hash map
2067 // uses strcmp.
2068 const char* name = names_->GetCopy(std::string(string).c_str());
2069
2071 HeapEntry* child_entry =
2072 snapshot_->AddEntry(HeapEntry::kString, name, id, 0, 0);
2073 entry->SetNamedReference(HeapGraphEdge::kInternal, "value", child_entry,
2074 generator_);
2075}
2076
2078 HeapEntry* entry, Tagged<BytecodeArray> bytecode) {
2079 RecursivelyTagConstantPool(bytecode->constant_pool(), "(constant pool)",
2080 HeapEntry::kCode, 3);
2081 TagObject(bytecode->handler_table(), "(handler table)", HeapEntry::kCode);
2082 TagObject(bytecode->raw_source_position_table(kAcquireLoad),
2083 "(source position table)", HeapEntry::kCode);
2084}
2085
2087 Tagged<ScopeInfo> info) {
2088 if (!info->HasInlinedLocalNames()) {
2089 TagObject(info->context_local_names_hashtable(), "(context local names)",
2091 }
2092}
2093
2095 HeapEntry* entry, Tagged<FeedbackVector> feedback_vector) {
2096#ifndef V8_ENABLE_LEAPTIERING
2097 Tagged<MaybeObject> code = feedback_vector->maybe_optimized_code();
2098 Tagged<HeapObject> code_heap_object;
2099 if (code.GetHeapObjectIfWeak(&code_heap_object)) {
2100 SetWeakReference(entry, "optimized code", code_heap_object,
2101 FeedbackVector::kMaybeOptimizedCodeOffset);
2102 }
2103#endif // !V8_ENABLE_LEAPTIERING
2104 for (int i = 0; i < feedback_vector->length(); ++i) {
2105 Tagged<MaybeObject> maybe_entry = *(feedback_vector->slots_start() + i);
2106 Tagged<HeapObject> entry_obj;
2107 if (maybe_entry.GetHeapObjectIfStrong(&entry_obj) &&
2108 (entry_obj->map(isolate())->instance_type() == WEAK_FIXED_ARRAY_TYPE ||
2109 IsFixedArrayExact(entry_obj))) {
2110 TagObject(entry_obj, "(feedback)", HeapEntry::kCode);
2111 }
2112 }
2113}
2114
2116 HeapEntry* entry, Tagged<DescriptorArray> array) {
2117 SetInternalReference(entry, "enum_cache", array->enum_cache(),
2118 DescriptorArray::kEnumCacheOffset);
2119 MaybeObjectSlot start = MaybeObjectSlot(array->GetDescriptorSlot(0));
2121 array->GetDescriptorSlot(array->number_of_all_descriptors()));
2122 for (int i = 0; start + i < end; ++i) {
2123 MaybeObjectSlot slot = start + i;
2124 int offset = static_cast<int>(slot.address() - array.address());
2125 Tagged<MaybeObject> object = *slot;
2126 Tagged<HeapObject> heap_object;
2127 if (object.GetHeapObjectIfWeak(&heap_object)) {
2128 SetWeakReference(entry, i, heap_object, offset);
2129 } else if (object.GetHeapObjectIfStrong(&heap_object)) {
2130 SetInternalReference(entry, i, heap_object, offset);
2131 }
2132 }
2133}
2134
2136 Tagged<EnumCache> cache) {
2137 TagObject(cache->keys(), "(enum cache)", HeapEntry::kObjectShape);
2138 TagObject(cache->indices(), "(enum cache)", HeapEntry::kObjectShape);
2139}
2140
2142 HeapEntry* entry, Tagged<TransitionArray> transitions) {
2143 if (transitions->HasPrototypeTransitions()) {
2144 TagObject(transitions->GetPrototypeTransitions(), "(prototype transitions)",
2146 }
2147}
2148
2149template <typename T>
2151 HeapEntry* entry,
2152 Tagged<T> array) {
2153 for (int i = 0; i < array->length(); ++i) {
2154 Tagged<MaybeObject> object = array->get(i);
2155 Tagged<HeapObject> heap_object;
2156 if (object.GetHeapObjectIfWeak(&heap_object)) {
2157 SetWeakReference(entry, i, heap_object, header_size + i * kTaggedSize);
2158 } else if (object.GetHeapObjectIfStrong(&heap_object)) {
2159 SetInternalReference(entry, i, heap_object,
2160 header_size + i * kTaggedSize);
2161 }
2162 }
2163}
2164
2166 HeapEntry* entry) {
2167 Isolate* isolate = js_obj->GetIsolate();
2168 if (js_obj->HasFastProperties()) {
2170 js_obj->map()->instance_descriptors(isolate);
2171 for (InternalIndex i : js_obj->map()->IterateOwnDescriptors()) {
2172 PropertyDetails details = descs->GetDetails(i);
2173 switch (details.location()) {
2176 Representation r = details.representation();
2177 if (r.IsSmi() || r.IsDouble()) break;
2178 }
2179
2180 Tagged<Name> k = descs->GetKey(i);
2181 FieldIndex field_index =
2182 FieldIndex::ForDetails(js_obj->map(), details);
2183 Tagged<Object> value = js_obj->RawFastPropertyAt(field_index);
2184 int field_offset =
2185 field_index.is_inobject() ? field_index.offset() : -1;
2186
2187 SetDataOrAccessorPropertyReference(details.kind(), entry, k, value,
2188 nullptr, field_offset);
2189 break;
2190 }
2193 descs->GetKey(i),
2194 descs->GetStrongValue(i));
2195 break;
2196 }
2197 }
2198 } else if (IsJSGlobalObject(js_obj)) {
2199 // We assume that global objects can only have slow properties.
2200 Tagged<GlobalDictionary> dictionary =
2201 Cast<JSGlobalObject>(js_obj)->global_dictionary(kAcquireLoad);
2202 ReadOnlyRoots roots(isolate);
2203 for (InternalIndex i : dictionary->IterateEntries()) {
2204 if (!dictionary->IsKey(roots, dictionary->KeyAt(i))) continue;
2205 Tagged<PropertyCell> cell = dictionary->CellAt(i);
2206 Tagged<Name> name = cell->name();
2207 Tagged<Object> value = cell->value();
2208 PropertyDetails details = cell->property_details();
2209 SetDataOrAccessorPropertyReference(details.kind(), entry, name, value);
2210 }
2212 // SwissNameDictionary::IterateEntries creates a Handle, which should not
2213 // leak out of here.
2214 HandleScope scope(isolate);
2215
2216 Tagged<SwissNameDictionary> dictionary =
2217 js_obj->property_dictionary_swiss();
2218 ReadOnlyRoots roots(isolate);
2219 for (InternalIndex i : dictionary->IterateEntries()) {
2220 Tagged<Object> k = dictionary->KeyAt(i);
2221 if (!dictionary->IsKey(roots, k)) continue;
2222 Tagged<Object> value = dictionary->ValueAt(i);
2223 PropertyDetails details = dictionary->DetailsAt(i);
2225 value);
2226 }
2227 } else {
2228 Tagged<NameDictionary> dictionary = js_obj->property_dictionary();
2229 ReadOnlyRoots roots(isolate);
2230 for (InternalIndex i : dictionary->IterateEntries()) {
2231 Tagged<Object> k = dictionary->KeyAt(i);
2232 if (!dictionary->IsKey(roots, k)) continue;
2233 Tagged<Object> value = dictionary->ValueAt(i);
2234 PropertyDetails details = dictionary->DetailsAt(i);
2236 value);
2237 }
2238 }
2239}
2240
2243 Tagged<Object> callback_obj,
2244 int field_offset) {
2245 if (!IsAccessorPair(callback_obj)) return;
2246 Tagged<AccessorPair> accessors = Cast<AccessorPair>(callback_obj);
2247 SetPropertyReference(entry, key, accessors, nullptr, field_offset);
2248 Tagged<Object> getter = accessors->getter();
2249 if (!IsOddball(getter)) {
2250 SetPropertyReference(entry, key, getter, "get %s");
2251 }
2252 Tagged<Object> setter = accessors->setter();
2253 if (!IsOddball(setter)) {
2254 SetPropertyReference(entry, key, setter, "set %s");
2255 }
2256}
2257
2259 HeapEntry* entry) {
2261 if (js_obj->HasObjectElements()) {
2262 Tagged<FixedArray> elements = Cast<FixedArray>(js_obj->elements());
2263 int length = IsJSArray(js_obj) ? Smi::ToInt(Cast<JSArray>(js_obj)->length())
2264 : elements->length();
2265 for (int i = 0; i < length; ++i) {
2266 if (!IsTheHole(elements->get(i), roots)) {
2267 SetElementReference(entry, i, elements->get(i));
2268 }
2269 }
2270 } else if (js_obj->HasDictionaryElements()) {
2271 Tagged<NumberDictionary> dictionary = js_obj->element_dictionary();
2272 for (InternalIndex i : dictionary->IterateEntries()) {
2273 Tagged<Object> k = dictionary->KeyAt(i);
2274 if (!dictionary->IsKey(roots, k)) continue;
2275 uint32_t index =
2276 static_cast<uint32_t>(Object::NumberValue(Cast<Number>(k)));
2277 SetElementReference(entry, index, dictionary->ValueAt(i));
2278 }
2279 }
2280}
2281
2283 HeapEntry* entry) {
2284 int length = js_obj->GetEmbedderFieldCount();
2285 for (int i = 0; i < length; ++i) {
2286 Tagged<Object> o = js_obj->GetEmbedderField(i);
2287 SetInternalReference(entry, i, o, js_obj->GetEmbedderFieldOffset(i));
2288 }
2289}
2290
2291#if V8_ENABLE_WEBASSEMBLY
2292
2293void V8HeapExplorer::ExtractWasmStructReferences(Tagged<WasmStruct> obj,
2294 HeapEntry* entry) {
2295 Tagged<WasmTypeInfo> info = obj->map()->wasm_type_info();
2296 const wasm::CanonicalStructType* type =
2297 wasm::GetTypeCanonicalizer()->LookupStruct(info->type_index());
2300 Isolate* isolate = heap_->isolate();
2301 for (uint32_t i = 0; i < type->field_count(); i++) {
2303 names->PrintFieldName(sb, info->type_index(), i);
2304 sb << '\0';
2305 const char* field_name = names_->GetCopy(sb.start());
2306 switch (type->field(i).kind()) {
2307 case wasm::kI8:
2308 case wasm::kI16:
2309 case wasm::kI32:
2310 case wasm::kI64:
2311 case wasm::kF16:
2312 case wasm::kF32:
2313 case wasm::kF64:
2314 case wasm::kS128: {
2315 if (!snapshot_->capture_numeric_value()) continue;
2316 std::string value_string = obj->GetFieldValue(i).to_string();
2317 const char* value_name = names_->GetCopy(value_string.c_str());
2319 HeapEntry* child_entry =
2320 snapshot_->AddEntry(HeapEntry::kString, value_name, id, 0, 0);
2322 child_entry, generator_);
2323 break;
2324 }
2325 case wasm::kRef:
2326 case wasm::kRefNull: {
2327 int field_offset = type->field_offset(i);
2328 Tagged<Object> value = obj->RawField(field_offset).load(isolate);
2329 // We could consider hiding {null} fields by default (like we do for
2330 // arrays, see below), but for now we always include them, in the hope
2331 // that they might help identify opportunities for struct size
2332 // reductions.
2333 HeapEntry* value_entry = GetEntry(value);
2335 value_entry, generator_);
2336 MarkVisitedField(WasmStruct::kHeaderSize + field_offset);
2337 break;
2338 }
2339 case wasm::kVoid:
2340 case wasm::kTop:
2341 case wasm::kBottom:
2342 UNREACHABLE();
2343 }
2344 }
2345}
2346
2347void V8HeapExplorer::ExtractWasmArrayReferences(Tagged<WasmArray> obj,
2348 HeapEntry* entry) {
2349 const wasm::CanonicalValueType element_type =
2350 obj->map()->wasm_type_info()->element_type();
2351 if (!element_type.is_reference()) return;
2352 Isolate* isolate = heap_->isolate();
2353 ReadOnlyRoots roots(isolate);
2354 for (uint32_t i = 0; i < obj->length(); i++) {
2355 Tagged<Object> value = obj->ElementSlot(i).load(isolate);
2356 // By default, don't show {null} entries, to reduce noise: they can make
2357 // it difficult to find non-null entries in sparse arrays. We piggyback
2358 // on the "capture numeric values" flag as an opt-in to produce more
2359 // detailed/verbose snapshots, including {null} entries.
2360 if (value != roots.wasm_null() || snapshot_->capture_numeric_value()) {
2361 SetElementReference(entry, i, value);
2362 }
2363 MarkVisitedField(obj->element_offset(i));
2364 }
2365}
2366
2367void V8HeapExplorer::ExtractWasmTrustedInstanceDataReferences(
2369 PtrComprCageBase cage_base(heap_->isolate());
2370 for (size_t i = 0; i < WasmTrustedInstanceData::kTaggedFieldOffsets.size();
2371 i++) {
2376 }
2377 for (size_t i = 0; i < WasmTrustedInstanceData::kProtectedFieldNames.size();
2378 i++) {
2382 trusted_data->RawProtectedPointerField(offset).load(heap_->isolate()),
2383 offset);
2384 }
2385}
2386
2387#define ASSERT_FIRST_FIELD(Class, Field) \
2388 static_assert(Class::Super::kHeaderSize == Class::k##Field##Offset)
2389#define ASSERT_CONSECUTIVE_FIELDS(Class, Field, NextField) \
2390 static_assert(Class::k##Field##OffsetEnd + 1 == Class::k##NextField##Offset)
2391#define ASSERT_LAST_FIELD(Class, Field) \
2392 static_assert(Class::k##Field##OffsetEnd + 1 == Class::kHeaderSize)
2393
2394void V8HeapExplorer::ExtractWasmInstanceObjectReferences(
2395 Tagged<WasmInstanceObject> instance_object, HeapEntry* entry) {
2396 // The static assertions verify that we do not miss any fields here when we
2397 // update the class definition.
2398 ASSERT_FIRST_FIELD(WasmInstanceObject, TrustedData);
2399 SetInternalReference(entry, "trusted_data",
2400 instance_object->trusted_data(heap_->isolate()),
2401 WasmInstanceObject::kTrustedDataOffset);
2402 ASSERT_CONSECUTIVE_FIELDS(WasmInstanceObject, TrustedData, ModuleObject);
2403 SetInternalReference(entry, "module_object", instance_object->module_object(),
2404 WasmInstanceObject::kModuleObjectOffset);
2405 ASSERT_CONSECUTIVE_FIELDS(WasmInstanceObject, ModuleObject, ExportsObject);
2406 SetInternalReference(entry, "exports", instance_object->exports_object(),
2407 WasmInstanceObject::kExportsObjectOffset);
2408 ASSERT_LAST_FIELD(WasmInstanceObject, ExportsObject);
2409}
2410
2411void V8HeapExplorer::ExtractWasmModuleObjectReferences(
2412 Tagged<WasmModuleObject> module_object, HeapEntry* entry) {
2413 // The static assertions verify that we do not miss any fields here when we
2414 // update the class definition.
2415 ASSERT_FIRST_FIELD(WasmModuleObject, ManagedNativeModule);
2416 SetInternalReference(entry, "managed_native_module",
2417 module_object->managed_native_module(),
2418 WasmModuleObject::kManagedNativeModuleOffset);
2419 ASSERT_CONSECUTIVE_FIELDS(WasmModuleObject, ManagedNativeModule, Script);
2420 SetInternalReference(entry, "script", module_object->script(),
2421 WasmModuleObject::kScriptOffset);
2422 ASSERT_LAST_FIELD(WasmModuleObject, Script);
2423}
2424
2425#undef ASSERT_FIRST_FIELD
2426#undef ASSERT_CONSECUTIVE_FIELDS
2427#undef ASSERT_LAST_FIELD
2428
2429#endif // V8_ENABLE_WEBASSEMBLY
2430
2434 HandleScope scope(isolate);
2435 MaybeDirectHandle<JSFunction> maybe_constructor =
2437
2438 if (maybe_constructor.is_null()) return JSFunction();
2439
2440 return *maybe_constructor.ToHandleChecked();
2441}
2442
2444 Tagged<JSObject> object) {
2446 HandleScope scope(isolate);
2447 return *JSReceiver::GetConstructorName(isolate,
2448 direct_handle(object, isolate));
2449}
2450
2452 if (IsHeapObject(obj)) {
2453 return generator_->FindOrAddEntry(reinterpret_cast<void*>(obj.ptr()), this);
2454 }
2455
2456 DCHECK(IsSmi(obj));
2458 return nullptr;
2459 }
2460 return generator_->FindOrAddEntry(Cast<Smi>(obj), this);
2461}
2462
2464 public:
2466 : explorer_(explorer), visiting_weak_roots_(false) {}
2467
2469
2470 void VisitRootPointer(Root root, const char* description,
2471 FullObjectSlot p) override {
2472 Tagged<Object> object = *p;
2473#ifdef V8_ENABLE_DIRECT_HANDLE
2474 if (object.ptr() == kTaggedNullAddress) return;
2475#endif
2476 if (root == Root::kBuiltins) {
2477 explorer_->TagBuiltinCodeObject(Cast<Code>(object), description);
2478 }
2480 object);
2481 }
2482
2483 void VisitRootPointers(Root root, const char* description,
2485 for (FullObjectSlot p = start; p < end; ++p) {
2486 DCHECK(!MapWord::IsPacked(p.Relaxed_Load().ptr()));
2487 VisitRootPointer(root, description, p);
2488 }
2489 }
2490
2491 void VisitRootPointers(Root root, const char* description,
2493 OffHeapObjectSlot end) override {
2494 DCHECK_EQ(root, Root::kStringTable);
2495 PtrComprCageBase cage_base(explorer_->heap_->isolate());
2496 for (OffHeapObjectSlot p = start; p < end; ++p) {
2498 p.load(cage_base));
2499 }
2500 }
2501
2502 // Keep this synced with
2503 // MarkCompactCollector::RootMarkingVisitor::VisitRunningCode.
2505 FullObjectSlot istream_or_smi_zero_slot) final {
2506 Tagged<Object> istream_or_smi_zero = *istream_or_smi_zero_slot;
2507 if (istream_or_smi_zero != Smi::zero()) {
2508 Tagged<Code> code = Cast<Code>(*code_slot);
2509 code->IterateDeoptimizationLiterals(this);
2510 VisitRootPointer(Root::kStackRoots, nullptr, istream_or_smi_zero_slot);
2511 }
2512 VisitRootPointer(Root::kStackRoots, nullptr, code_slot);
2513 }
2514
2515 private:
2518};
2519
2521 HeapSnapshotGenerator* generator) {
2522 generator_ = generator;
2523
2524 // Create references to the synthetic roots.
2526 for (int root = 0; root < static_cast<int>(Root::kNumberOfRoots); root++) {
2527 SetGcRootsReference(static_cast<Root>(root));
2528 }
2529
2530 // Make sure builtin code objects get their builtin tags
2531 // first. Otherwise a particular JSFunction object could set
2532 // its custom name to a generic builtin.
2533 RootsReferencesExtractor extractor(this);
2534 ReadOnlyRoots(heap_).Iterate(&extractor);
2536 &extractor,
2538 // TODO(v8:11800): The heap snapshot generator incorrectly considers the weak
2539 // string tables as strong retainers. Move IterateWeakRoots after
2540 // SetVisitingWeakRoots.
2541 heap_->IterateWeakRoots(&extractor, {});
2542 extractor.SetVisitingWeakRoots();
2543 heap_->IterateWeakGlobalHandles(&extractor);
2544
2545 bool interrupted = false;
2546
2548 PtrComprCageBase cage_base(heap_->isolate());
2549 // Heap iteration need not be finished but progress reporting may depend on
2550 // it being finished.
2551 for (Tagged<HeapObject> obj = iterator.Next(); !obj.is_null();
2552 obj = iterator.Next(), progress_->ProgressStep()) {
2553 if (interrupted) continue;
2554
2555 max_pointers_ = obj->Size(cage_base) / kTaggedSize;
2556 if (max_pointers_ > visited_fields_.size()) {
2557 // Reallocate to right size.
2558 visited_fields_.resize(max_pointers_, false);
2559 }
2560
2561#ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY
2562 std::unique_ptr<HeapEntryVerifier> verifier;
2563 // MarkingVisitorBase doesn't expect that we will ever visit read-only
2564 // objects, and fails DCHECKs if we attempt to. Read-only objects can
2565 // never retain read-write objects, so there is no risk in skipping
2566 // verification for them.
2567 if (v8_flags.heap_snapshot_verify &&
2569 verifier = std::make_unique<HeapEntryVerifier>(generator, obj);
2570 }
2571#endif
2572
2573 HeapEntry* entry = GetEntry(obj);
2574 ExtractReferences(entry, obj);
2575 SetInternalReference(entry, "map", obj->map(cage_base),
2577 // Extract unvisited fields as hidden references and restore tags
2578 // of visited fields.
2579 IndexedReferencesExtractor refs_extractor(this, obj, entry);
2580 VisitObject(heap_->isolate(), obj, &refs_extractor);
2581
2582#if DEBUG
2583 // Ensure visited_fields_ doesn't leak to the next object.
2584 for (size_t i = 0; i < max_pointers_; ++i) {
2586 }
2587#endif // DEBUG
2588
2589 // Extract location for specific object types
2590 ExtractLocation(entry, obj);
2591
2592 if (!progress_->ProgressReport(false)) interrupted = true;
2593 }
2594
2595 generator_ = nullptr;
2596 return interrupted ? false : progress_->ProgressReport(true);
2597}
2598
2600 if (!IsHeapObject(object)) return false;
2601 // Avoid comparing objects in other pointer compression cages to objects
2602 // inside the main cage as the comparison may only look at the lower 32 bits.
2605 return true;
2606 }
2607 Isolate* isolate = heap_->isolate();
2608 ReadOnlyRoots roots(isolate);
2609 return !IsOddball(object, isolate) && object != roots.the_hole_value() &&
2610 object != roots.empty_byte_array() &&
2611 object != roots.empty_fixed_array() &&
2612 object != roots.empty_weak_fixed_array() &&
2613 object != roots.empty_descriptor_array() &&
2614 object != roots.fixed_array_map() && object != roots.cell_map() &&
2615 object != roots.global_property_cell_map() &&
2616 object != roots.shared_function_info_map() &&
2617 object != roots.free_space_map() &&
2618 object != roots.one_pointer_filler_map() &&
2619 object != roots.two_pointer_filler_map();
2620}
2621
2623 int field_offset) {
2624 if (IsAllocationSite(parent) &&
2625 field_offset == offsetof(AllocationSiteWithWeakNext, weak_next_))
2626 return false;
2627 if (IsContext(parent) &&
2629 return false;
2630 if (IsJSFinalizationRegistry(parent) &&
2631 field_offset == JSFinalizationRegistry::kNextDirtyOffset)
2632 return false;
2633 return true;
2634}
2635
2637 Tagged<String> reference_name,
2638 Tagged<Object> child_obj,
2639 int field_offset) {
2640 HeapEntry* child_entry = GetEntry(child_obj);
2641 if (child_entry == nullptr) return;
2643 names_->GetName(reference_name), child_entry,
2644 generator_);
2645 MarkVisitedField(field_offset);
2646}
2647
2649 if (offset < 0) return;
2650 int index = offset / kTaggedSize;
2651 DCHECK_LT(index, max_pointers_);
2652 DCHECK(!visited_fields_[index]);
2653 visited_fields_[index] = true;
2654}
2655
2657 const char* reference_name,
2658 Tagged<Object> child_obj) {
2659 HeapEntry* child_entry = GetEntry(child_obj);
2660 if (child_entry == nullptr) return;
2661 parent_entry->SetNamedReference(HeapGraphEdge::kShortcut, reference_name,
2662 child_entry, generator_);
2663}
2664
2666 Tagged<Object> child_obj) {
2667 HeapEntry* child_entry = GetEntry(child_obj);
2668 if (child_entry == nullptr) return;
2669 parent_entry->SetIndexedReference(HeapGraphEdge::kElement, index, child_entry,
2670 generator_);
2671}
2672
2674 const char* reference_name,
2675 Tagged<Object> child_obj,
2676 int field_offset) {
2677 if (!IsEssentialObject(child_obj)) {
2678 return;
2679 }
2680 HeapEntry* child_entry = GetEntry(child_obj);
2681 DCHECK_NOT_NULL(child_entry);
2682 parent_entry->SetNamedReference(HeapGraphEdge::kInternal, reference_name,
2683 child_entry, generator_);
2684 MarkVisitedField(field_offset);
2685}
2686
2688 Tagged<Object> child_obj,
2689 int field_offset) {
2690 if (!IsEssentialObject(child_obj)) {
2691 return;
2692 }
2693 HeapEntry* child_entry = GetEntry(child_obj);
2694 DCHECK_NOT_NULL(child_entry);
2696 names_->GetName(index), child_entry,
2697 generator_);
2698 MarkVisitedField(field_offset);
2699}
2700
2702 HeapEntry* parent_entry, int index,
2703 Tagged<Object> child_obj,
2704 int field_offset) {
2705 DCHECK_EQ(parent_entry, GetEntry(parent_obj));
2706 DCHECK(!MapWord::IsPacked(child_obj.ptr()));
2707 if (!IsEssentialObject(child_obj)) {
2708 return;
2709 }
2710 HeapEntry* child_entry = GetEntry(child_obj);
2711 DCHECK_NOT_NULL(child_entry);
2712 if (IsEssentialHiddenReference(parent_obj, field_offset)) {
2713 parent_entry->SetIndexedReference(HeapGraphEdge::kHidden, index,
2714 child_entry, generator_);
2715 }
2716}
2717
2719 HeapEntry* parent_entry, const char* reference_name,
2720 Tagged<Object> child_obj, int field_offset,
2721 HeapEntry::ReferenceVerification verification) {
2722 if (!IsEssentialObject(child_obj)) {
2723 return;
2724 }
2725 HeapEntry* child_entry = GetEntry(child_obj);
2726 DCHECK_NOT_NULL(child_entry);
2727 parent_entry->SetNamedReference(HeapGraphEdge::kWeak, reference_name,
2728 child_entry, generator_, verification);
2729 MarkVisitedField(field_offset);
2730}
2731
2732void V8HeapExplorer::SetWeakReference(HeapEntry* parent_entry, int index,
2733 Tagged<Object> child_obj,
2734 std::optional<int> field_offset) {
2735 if (!IsEssentialObject(child_obj)) {
2736 return;
2737 }
2738 HeapEntry* child_entry = GetEntry(child_obj);
2739 DCHECK_NOT_NULL(child_entry);
2741 names_->GetFormatted("%d", index),
2742 child_entry, generator_);
2743 if (field_offset.has_value()) {
2744 MarkVisitedField(*field_offset);
2745 }
2746}
2747
2749 PropertyKind kind, HeapEntry* parent_entry, Tagged<Name> reference_name,
2750 Tagged<Object> child_obj, const char* name_format_string,
2751 int field_offset) {
2753 ExtractAccessorPairProperty(parent_entry, reference_name, child_obj,
2754 field_offset);
2755 } else {
2756 SetPropertyReference(parent_entry, reference_name, child_obj,
2757 name_format_string, field_offset);
2758 }
2759}
2760
2762 Tagged<Name> reference_name,
2763 Tagged<Object> child_obj,
2764 const char* name_format_string,
2765 int field_offset) {
2766 HeapEntry* child_entry = GetEntry(child_obj);
2767 if (child_entry == nullptr) return;
2768 HeapGraphEdge::Type type =
2769 IsSymbol(reference_name) || Cast<String>(reference_name)->length() > 0
2772 const char* name = name_format_string != nullptr && IsString(reference_name)
2773 ? names_->GetFormatted(
2774 name_format_string,
2775 Cast<String>(reference_name)->ToCString().get())
2776 : names_->GetName(reference_name);
2777
2778 parent_entry->SetNamedReference(type, name, child_entry, generator_);
2779 MarkVisitedField(field_offset);
2780}
2781
2786
2788 HeapEntry* child_entry = GetEntry(child_obj);
2789 DCHECK_NOT_NULL(child_entry);
2791 HeapGraphEdge::kShortcut, nullptr, child_entry, names_, generator_);
2792}
2793
2798
2799void V8HeapExplorer::SetGcSubrootReference(Root root, const char* description,
2800 bool is_weak,
2801 Tagged<Object> child_obj) {
2802 if (IsSmi(child_obj)) {
2803 // TODO(arenevier): if we handle smis here, the snapshot gets 2 to 3 times
2804 // slower on large heaps. According to perf, The bulk of the extra works
2805 // happens in TemplateHashMapImpl::Probe method, when tyring to get
2806 // names->GetFormatted("%d / %s", index, description)
2807 return;
2808 }
2809 HeapEntry* child_entry = GetEntry(child_obj);
2810 if (child_entry == nullptr) return;
2811 auto child_heap_obj = Cast<HeapObject>(child_obj);
2812 const char* name = GetStrongGcSubrootName(child_heap_obj);
2813 HeapGraphEdge::Type edge_type =
2815 if (name != nullptr) {
2816 snapshot_->gc_subroot(root)->SetNamedReference(edge_type, name, child_entry,
2817 generator_);
2818 } else {
2820 edge_type, description, child_entry, names_, generator_);
2821 }
2822
2823 // For full heap snapshots we do not emit user roots but rather rely on
2824 // regular GC roots to retain objects.
2825 if (snapshot_->expose_internals()) return;
2826
2827 // Add a shortcut to JS global object reference at snapshot root.
2828 // That allows the user to easily find global objects. They are
2829 // also used as starting points in distance calculations.
2830 if (is_weak || !IsNativeContext(child_heap_obj)) return;
2831
2832 Tagged<JSGlobalObject> global =
2833 Cast<Context>(child_heap_obj)->global_object();
2834 if (!IsJSGlobalObject(global)) return;
2835
2836 if (!user_roots_.insert(global).second) return;
2837
2838 SetUserGlobalReference(global);
2839}
2840
2842 if (strong_gc_subroot_names_.empty()) {
2843 Isolate* isolate = Isolate::FromHeap(heap_);
2845 root_index <= RootIndex::kLastStrongOrReadOnlyRoot; ++root_index) {
2846 const char* name = RootsTable::name(root_index);
2847 Tagged<Object> root = isolate->root(root_index);
2848 CHECK(!IsSmi(root));
2849 strong_gc_subroot_names_.emplace(Cast<HeapObject>(root), name);
2850 }
2852 }
2853 auto it = strong_gc_subroot_names_.find(object);
2854 return it != strong_gc_subroot_names_.end() ? it->second : nullptr;
2855}
2856
2858 std::optional<HeapEntry::Type> type,
2859 bool overwrite_existing_name) {
2860 if (IsEssentialObject(obj)) {
2861 HeapEntry* entry = GetEntry(obj);
2862 if (overwrite_existing_name || entry->name()[0] == '\0') {
2863 entry->set_name(tag);
2864 }
2865 if (type.has_value()) {
2866 entry->set_type(*type);
2867 }
2868 }
2869}
2870
2872 const char* tag,
2873 HeapEntry::Type type,
2874 int recursion_limit) {
2875 --recursion_limit;
2876 if (IsFixedArrayExact(obj, isolate())) {
2878 TagObject(arr, tag, type);
2879 if (recursion_limit <= 0) return;
2880 for (int i = 0; i < arr->length(); ++i) {
2881 RecursivelyTagConstantPool(arr->get(i), tag, type, recursion_limit);
2882 }
2883 } else if (IsTrustedFixedArray(obj, isolate())) {
2885 TagObject(arr, tag, type, /*overwrite_existing_name=*/true);
2886 if (recursion_limit <= 0) return;
2887 for (int i = 0; i < arr->length(); ++i) {
2888 RecursivelyTagConstantPool(arr->get(i), tag, type, recursion_limit);
2889 }
2890 } else if (IsNameDictionary(obj, isolate()) ||
2891 IsNumberDictionary(obj, isolate())) {
2892 TagObject(obj, tag, type);
2893 }
2894}
2895
2897 public:
2899 std::function<void(Handle<JSGlobalObject>)> handler)
2900 : isolate_(isolate), handler_(handler) {}
2901
2902 void VisitRootPointers(Root root, const char* description,
2904 VisitRootPointersImpl(root, description, start, end);
2905 }
2906
2907 void VisitRootPointers(Root root, const char* description,
2909 OffHeapObjectSlot end) override {
2910 VisitRootPointersImpl(root, description, start, end);
2911 }
2912
2913 private:
2914 template <typename TSlot>
2915 void VisitRootPointersImpl(Root root, const char* description, TSlot start,
2916 TSlot end) {
2917 for (TSlot p = start; p < end; ++p) {
2918 DCHECK(!MapWord::IsPacked(p.Relaxed_Load(isolate_).ptr()));
2919 Tagged<Object> o = p.load(isolate_);
2920 if (!IsNativeContext(o, isolate_)) continue;
2921 Tagged<JSObject> proxy = Cast<Context>(o)->global_proxy();
2922 if (!IsJSGlobalProxy(proxy, isolate_)) continue;
2923 Tagged<Object> global = proxy->map(isolate_)->prototype(isolate_);
2924 if (!IsJSGlobalObject(global, isolate_)) continue;
2926 }
2927 }
2928
2930 std::function<void(Handle<JSGlobalObject>)> handler_;
2931};
2932
2935 if (!global_object_name_resolver_) return {};
2936
2937 Isolate* isolate = heap_->isolate();
2938 TemporaryGlobalObjectTags global_object_tags;
2939 HandleScope scope(isolate);
2940 GlobalObjectsEnumerator enumerator(
2941 isolate, [this, isolate, &global_object_tags](
2942 DirectHandle<JSGlobalObject> global_object) {
2943 if (const char* tag = global_object_name_resolver_->GetName(
2944 Utils::ToLocal(Cast<JSObject>(global_object)))) {
2945 global_object_tags.emplace_back(
2946 Global<v8::Object>(reinterpret_cast<v8::Isolate*>(isolate),
2947 Utils::ToLocal(Cast<JSObject>(global_object))),
2948 tag);
2949 global_object_tags.back().first.SetWeak();
2950 }
2951 });
2952 isolate->global_handles()->IterateAllRoots(&enumerator);
2953 isolate->traced_handles()->Iterate(&enumerator);
2954 return global_object_tags;
2955}
2956
2958 TemporaryGlobalObjectTags&& global_object_tags) {
2959 HandleScope scope(heap_->isolate());
2960 for (const auto& pair : global_object_tags) {
2961 if (!pair.first.IsEmpty()) {
2962 // Temporary local.
2963 auto local = Utils::OpenPersistent(pair.first);
2964 global_object_tag_map_.emplace(Cast<JSGlobalObject>(*local), pair.second);
2965 }
2966 }
2967}
2968
2970 public:
2971 struct Edge {
2974 const char* name;
2975 };
2976
2977 class V8NodeImpl : public Node {
2978 public:
2979 explicit V8NodeImpl(Tagged<Object> object) : object_(object) {}
2981
2982 // Node overrides.
2983 bool IsEmbedderNode() override { return false; }
2984 const char* Name() override {
2985 // The name should be retrieved via GetObject().
2986 UNREACHABLE();
2987 }
2988 size_t SizeInBytes() override {
2989 // The size should be retrieved via GetObject().
2990 UNREACHABLE();
2991 }
2992
2993 private:
2995 };
2996
2997 Node* V8Node(const v8::Local<v8::Value>& value) final {
2999 return V8Node(data);
3000 }
3001
3002 Node* V8Node(const v8::Local<v8::Data>& data) final {
3004 DCHECK(!object.is_null());
3005 return AddNode(std::unique_ptr<Node>(new V8NodeImpl(*object)));
3006 }
3007
3008 Node* AddNode(std::unique_ptr<Node> node) final {
3009 Node* result = node.get();
3010 nodes_.push_back(std::move(node));
3011 return result;
3012 }
3013
3014 void AddEdge(Node* from, Node* to, const char* name) final {
3015 edges_.push_back({from, to, name});
3016 }
3017
3018 void AddNativeSize(size_t size) final { native_size_ += size; }
3019 size_t native_size() const { return native_size_; }
3020
3021 const std::vector<std::unique_ptr<Node>>& nodes() { return nodes_; }
3022 const std::vector<Edge>& edges() { return edges_; }
3023
3024 private:
3025 std::vector<std::unique_ptr<Node>> nodes_;
3026 std::vector<Edge> edges_;
3027 size_t native_size_ = 0;
3028};
3029
3031 public:
3033 : snapshot_(snapshot),
3034 names_(snapshot_->profiler()->names()),
3035 heap_object_map_(snapshot_->profiler()->heap_object_map()) {}
3036 HeapEntry* AllocateEntry(HeapThing ptr) override;
3037 HeapEntry* AllocateEntry(Tagged<Smi> smi) override;
3038
3039 private:
3043};
3044
3045namespace {
3046
3047const char* EmbedderGraphNodeName(StringsStorage* names,
3049 const char* prefix = node->NamePrefix();
3050 return prefix ? names->GetFormatted("%s %s", prefix, node->Name())
3051 : names->GetCopy(node->Name());
3052}
3053
3054HeapEntry::Type EmbedderGraphNodeType(EmbedderGraphImpl::Node* node) {
3055 return node->IsRootNode() ? HeapEntry::kSynthetic : HeapEntry::kNative;
3056}
3057
3058// Merges the names of an embedder node and its wrapper node.
3059// If the wrapper node name contains a tag suffix (part after '/') then the
3060// result is the embedder node name concatenated with the tag suffix.
3061// Otherwise, the result is the embedder node name.
3062const char* MergeNames(StringsStorage* names, const char* embedder_name,
3063 const char* wrapper_name) {
3064 const char* suffix = strchr(wrapper_name, '/');
3065 return suffix ? names->GetFormatted("%s %s", embedder_name, suffix)
3066 : embedder_name;
3067}
3068
3069} // anonymous namespace
3070
3073 reinterpret_cast<EmbedderGraphImpl::Node*>(ptr);
3074 DCHECK(node->IsEmbedderNode());
3075 size_t size = node->SizeInBytes();
3076 Address lookup_address = reinterpret_cast<Address>(node->GetNativeObject());
3079 HeapObjectsMap::IsNativeObject is_native_object =
3081 if (!lookup_address) {
3082 // If there is not a native object associated with this embedder object,
3083 // then request the address of the embedder object.
3084 lookup_address = reinterpret_cast<Address>(node->GetAddress());
3085 is_native_object = HeapObjectsMap::IsNativeObject::kYes;
3086 }
3087 if (!lookup_address) {
3088 // If the Node implementation did not provide either a native address or an
3089 // embedder address, then use the address of the Node itself for the lookup.
3090 // In this case, we'll set the "accessed" flag on the newly created
3091 // HeapEntry to false, to indicate that this entry should not persist for
3092 // future snapshots.
3093 lookup_address = reinterpret_cast<Address>(node);
3095 }
3097 lookup_address, 0, accessed, is_native_object);
3098 auto* heap_entry = snapshot_->AddEntry(EmbedderGraphNodeType(node),
3099 EmbedderGraphNodeName(names_, node),
3100 id, static_cast<int>(size), 0);
3101 heap_entry->set_detachedness(node->GetDetachedness());
3102 return heap_entry;
3103}
3104
3106 DCHECK(false);
3107 return nullptr;
3108}
3109
3112 : isolate_(
3113 Isolate::FromHeap(snapshot->profiler()->heap_object_map()->heap())),
3114 snapshot_(snapshot),
3115 names_(snapshot_->profiler()->names()),
3116 heap_object_map_(snapshot_->profiler()->heap_object_map()),
3117 embedder_graph_entries_allocator_(
3118 new EmbedderGraphEntriesAllocator(snapshot)) {}
3119
3121 HeapEntry* entry, EmbedderGraph::Node* original_node,
3122 EmbedderGraph::Node* wrapper_node) {
3123 // The wrapper node may be an embedder node (for testing purposes) or a V8
3124 // node (production code).
3125 if (!wrapper_node->IsEmbedderNode()) {
3126 // For V8 nodes only we can add a lookup.
3128 static_cast<EmbedderGraphImpl::V8NodeImpl*>(wrapper_node);
3129 Tagged<Object> object = v8_node->GetObject();
3130 DCHECK(!IsSmi(object));
3131 if (original_node->GetNativeObject()) {
3132 Tagged<HeapObject> heap_object = Cast<HeapObject>(object);
3134 heap_object.address());
3136 original_node->GetNativeObject()));
3137 }
3138 }
3139 entry->set_detachedness(original_node->GetDetachedness());
3140 entry->set_name(MergeNames(
3141 names_, EmbedderGraphNodeName(names_, original_node), entry->name()));
3142 entry->set_type(EmbedderGraphNodeType(original_node));
3143 DCHECK_GE(entry->self_size() + original_node->SizeInBytes(),
3144 entry->self_size());
3145 entry->add_self_size(original_node->SizeInBytes());
3146}
3147
3150 // Return the entry for the wrapper node if present.
3151 if (node->WrapperNode()) {
3152 node = node->WrapperNode();
3153 }
3154 // Node is EmbedderNode.
3155 if (node->IsEmbedderNode()) {
3156 return generator_->FindOrAddEntry(node,
3158 }
3159 // Node is V8NodeImpl.
3160 Tagged<Object> object =
3161 static_cast<EmbedderGraphImpl::V8NodeImpl*>(node)->GetObject();
3162 if (IsSmi(object)) return nullptr;
3163 auto* entry = generator_->FindEntry(
3164 reinterpret_cast<void*>(Cast<Object>(object).ptr()));
3165 return entry;
3166}
3167
3169 HeapSnapshotGenerator* generator) {
3170 generator_ = generator;
3171
3172 if (v8_flags.heap_profiler_use_embedder_graph &&
3174 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_));
3178 for (const auto& node : graph.nodes()) {
3179 // Only add embedder nodes as V8 nodes have been added already by the
3180 // V8HeapExplorer.
3181 if (!node->IsEmbedderNode()) continue;
3182
3183 if (auto* entry = EntryForEmbedderGraphNode(node.get())) {
3184 if (node->IsRootNode()) {
3188 }
3189 if (node->WrapperNode()) {
3190 MergeNodeIntoEntry(entry, node.get(), node->WrapperNode());
3191 }
3192 }
3193 }
3194 // Fill edges of the graph.
3195 for (const auto& edge : graph.edges()) {
3196 // |from| and |to| can be nullptr if the corresponding node is a V8 node
3197 // pointing to a Smi.
3198 HeapEntry* from = EntryForEmbedderGraphNode(edge.from);
3199 if (!from) continue;
3200 HeapEntry* to = EntryForEmbedderGraphNode(edge.to);
3201 if (!to) continue;
3202 if (edge.name == nullptr) {
3203 from->SetIndexedAutoIndexReference(HeapGraphEdge::kElement, to,
3204 generator_,
3206 } else {
3207 from->SetNamedReference(HeapGraphEdge::kInternal,
3208 names_->GetCopy(edge.name), to, generator_,
3210 }
3211 }
3212 snapshot_->set_extra_native_bytes(graph.native_size());
3213 }
3214 generator_ = nullptr;
3215 return true;
3216}
3217
3219 HeapSnapshot* snapshot, v8::ActivityControl* control,
3221 cppgc::EmbedderStackState stack_state)
3222 : snapshot_(snapshot),
3223 control_(control),
3224 v8_heap_explorer_(snapshot_, this, resolver),
3225 dom_explorer_(snapshot_, this),
3226 heap_(heap),
3227 stack_state_(stack_state) {}
3228
3229namespace {
3230class V8_NODISCARD NullContextForSnapshotScope {
3231 public:
3232 explicit NullContextForSnapshotScope(Isolate* isolate)
3233 : isolate_(isolate), prev_(isolate->context()) {
3234 isolate_->set_context(Context());
3235 }
3236 ~NullContextForSnapshotScope() { isolate_->set_context(prev_); }
3237
3238 private:
3239 Isolate* isolate_;
3240 Tagged<Context> prev_;
3241};
3242} // namespace
3243
3246 timer.Start();
3247
3249
3250 Isolate* isolate = heap_->isolate();
3251 auto temporary_global_object_tags =
3253
3254 EmbedderStackStateScope stack_scope(
3257
3258 // No allocation that could trigger GC from here onwards. We cannot use a
3259 // DisallowGarbageCollection scope as the HeapObjectIterator used during
3260 // snapshot creation enters a safepoint as well. However, in practice we
3261 // already enter a safepoint above so that should never trigger a GC.
3262 DisallowPositionInfoSlow no_position_info_slow;
3263
3264 NullContextForSnapshotScope null_context_scope(isolate);
3265
3267 std::move(temporary_global_object_tags));
3268
3270
3272
3274 if (!FillReferences()) return false;
3275
3278
3280
3281 if (i::v8_flags.profile_heap_snapshot) {
3282 base::OS::PrintError("[Heap snapshot took %0.3f ms]\n",
3283 timer.Elapsed().InMillisecondsF());
3284 }
3285 timer.Stop();
3286 if (!ProgressReport(true)) return false;
3287 return true;
3288}
3289
3291 // Same as above, but no allocations, no GC run, and no progress report.
3293 auto temporary_global_object_tags =
3295 NullContextForSnapshotScope null_context_scope(heap_->isolate());
3297 std::move(temporary_global_object_tags));
3300 if (!FillReferences()) return false;
3303 return true;
3304}
3305
3307 // Only increment the progress_counter_ until
3308 // equal to progress_total -1 == progress_counter.
3309 // This ensures that intermediate ProgressReport calls will never signal
3310 // that the work is finished (i.e. progress_counter_ == progress_total_).
3311 // Only the forced ProgressReport() at the end of GenerateSnapshot() should,
3312 // after setting progress_counter_ = progress_total_, signal that the
3313 // work is finished because signalling finished twice
3314 // breaks the DevTools frontend.
3315 if (control_ != nullptr && progress_total_ > progress_counter_ + 1) {
3317 }
3318}
3319
3321 const int kProgressReportGranularity = 10000;
3322 if (control_ != nullptr &&
3323 (force || progress_counter_ % kProgressReportGranularity == 0)) {
3326 }
3327 return true;
3328}
3329
3335
3340
3341// type, name, id, self_size, edge_count, trace_node_id, detachedness.
3344
3347 timer.Start();
3349 writer_ = new OutputStreamWriter(stream);
3351 if (AllocationTracker* tracker =
3354 static_cast<uint32_t>(tracker->function_info_list().size());
3355 }
3356 SerializeImpl();
3357 delete writer_;
3358 writer_ = nullptr;
3359
3360 if (i::v8_flags.profile_heap_snapshot) {
3361 base::OS::PrintError("[Serialization of heap snapshot took %0.3f ms]\n",
3362 timer.Elapsed().InMillisecondsF());
3363 }
3364 timer.Stop();
3365}
3366
3368 DCHECK_EQ(0, snapshot_->root()->index());
3369 writer_->AddCharacter('{');
3370 writer_->AddString("\"snapshot\":{");
3372 if (writer_->aborted()) return;
3373 writer_->AddString("},\n");
3374 writer_->AddString("\"nodes\":[");
3376 if (writer_->aborted()) return;
3377 writer_->AddString("],\n");
3378 writer_->AddString("\"edges\":[");
3380 if (writer_->aborted()) return;
3381 writer_->AddString("],\n");
3382
3383 writer_->AddString("\"trace_function_infos\":[");
3385 if (writer_->aborted()) return;
3386 writer_->AddString("],\n");
3387 writer_->AddString("\"trace_tree\":[");
3389 if (writer_->aborted()) return;
3390 writer_->AddString("],\n");
3391
3392 writer_->AddString("\"samples\":[");
3394 if (writer_->aborted()) return;
3395 writer_->AddString("],\n");
3396
3397 writer_->AddString("\"locations\":[");
3399 if (writer_->aborted()) return;
3400 writer_->AddString("],\n");
3401
3402 writer_->AddString("\"strings\":[");
3404 if (writer_->aborted()) return;
3405 writer_->AddCharacter(']');
3406 writer_->AddCharacter('}');
3407 writer_->Finalize();
3408}
3409
3411 base::HashMap::Entry* cache_entry =
3412 strings_.LookupOrInsert(const_cast<char*>(s), StringHash(s));
3413 if (cache_entry->value == nullptr) {
3414 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
3415 }
3416 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
3417}
3418
3420 bool first_edge) {
3421 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement ||
3422 edge->type() == HeapGraphEdge::kHidden
3423 ? edge->index()
3424 : GetStringId(edge->name());
3425 if (!first_edge) {
3426 writer_->AddCharacter(',');
3427 }
3428 writer_->AddNumber(static_cast<int>(edge->type()));
3429 writer_->AddCharacter(',');
3430 writer_->AddNumber(edge_name_or_index);
3431 writer_->AddCharacter(',');
3432 writer_->AddNumber(to_node_index(edge->to()));
3433}
3434
3436 const std::vector<HeapGraphEdge*>& edges = snapshot_->children();
3437 for (size_t i = 0; i < edges.size(); ++i) {
3438 DCHECK(i == 0 ||
3439 edges[i - 1]->from()->index() <= edges[i]->from()->index());
3440 SerializeEdge(edges[i], i == 0);
3441 if (writer_->aborted()) return;
3442 }
3443}
3444
3446 if (to_node_index(entry) != 0) {
3447 writer_->AddCharacter(',');
3448 }
3449 writer_->AddNumber(static_cast<int>(entry->type()));
3450 writer_->AddCharacter(',');
3451 writer_->AddNumber(GetStringId(entry->name()));
3452 writer_->AddCharacter(',');
3453 writer_->AddNumber(entry->id());
3454 writer_->AddCharacter(',');
3455 writer_->AddNumber(entry->self_size());
3456 writer_->AddCharacter(',');
3457 writer_->AddNumber(entry->children_count());
3458 writer_->AddCharacter(',');
3460 writer_->AddNumber(entry->trace_node_id());
3461 writer_->AddCharacter(',');
3462 } else {
3463 CHECK_EQ(0, entry->trace_node_id());
3464 }
3465 writer_->AddNumber(entry->detachedness());
3466}
3467
3469 const std::deque<HeapEntry>& entries = snapshot_->entries();
3470 for (const HeapEntry& entry : entries) {
3471 SerializeNode(&entry);
3472 if (writer_->aborted()) return;
3473 }
3474}
3475
3477 writer_->AddString("\"meta\":");
3478 // The object describing node serialization layout.
3479 // We use a set of macros to improve readability.
3480
3481 // clang-format off
3482#define JSON_A(s) "[" s "]"
3483#define JSON_S(s) "\"" s "\""
3484 writer_->AddString("{"
3485 JSON_S("node_fields") ":["
3486 JSON_S("type") ","
3487 JSON_S("name") ","
3488 JSON_S("id") ","
3489 JSON_S("self_size") ","
3490 JSON_S("edge_count") ",");
3491 if (trace_function_count_) writer_->AddString(JSON_S("trace_node_id") ",");
3493 JSON_S("detachedness")
3494 "],"
3495 JSON_S("node_types") ":" JSON_A(
3496 JSON_A(
3497 JSON_S("hidden") ","
3498 JSON_S("array") ","
3499 JSON_S("string") ","
3500 JSON_S("object") ","
3501 JSON_S("code") ","
3502 JSON_S("closure") ","
3503 JSON_S("regexp") ","
3504 JSON_S("number") ","
3505 JSON_S("native") ","
3506 JSON_S("synthetic") ","
3507 JSON_S("concatenated string") ","
3508 JSON_S("sliced string") ","
3509 JSON_S("symbol") ","
3510 JSON_S("bigint") ","
3511 JSON_S("object shape")) ","
3512 JSON_S("string") ","
3513 JSON_S("number") ","
3514 JSON_S("number") ","
3515 JSON_S("number") ","
3516 JSON_S("number") ","
3517 JSON_S("number")) ","
3518 JSON_S("edge_fields") ":" JSON_A(
3519 JSON_S("type") ","
3520 JSON_S("name_or_index") ","
3521 JSON_S("to_node")) ","
3522 JSON_S("edge_types") ":" JSON_A(
3523 JSON_A(
3524 JSON_S("context") ","
3525 JSON_S("element") ","
3526 JSON_S("property") ","
3527 JSON_S("internal") ","
3528 JSON_S("hidden") ","
3529 JSON_S("shortcut") ","
3530 JSON_S("weak")) ","
3531 JSON_S("string_or_number") ","
3532 JSON_S("node")) ","
3533 JSON_S("trace_function_info_fields") ":" JSON_A(
3534 JSON_S("function_id") ","
3535 JSON_S("name") ","
3536 JSON_S("script_name") ","
3537 JSON_S("script_id") ","
3538 JSON_S("line") ","
3539 JSON_S("column")) ","
3540 JSON_S("trace_node_fields") ":" JSON_A(
3541 JSON_S("id") ","
3542 JSON_S("function_info_index") ","
3543 JSON_S("count") ","
3544 JSON_S("size") ","
3545 JSON_S("children")) ","
3546 JSON_S("sample_fields") ":" JSON_A(
3547 JSON_S("timestamp_us") ","
3548 JSON_S("last_assigned_id")) ","
3549 JSON_S("location_fields") ":" JSON_A(
3550 JSON_S("object_index") ","
3551 JSON_S("script_id") ","
3552 JSON_S("line") ","
3553 JSON_S("column"))
3554 "}");
3555// clang-format on
3556#undef JSON_S
3557#undef JSON_A
3558 writer_->AddString(",\"node_count\":");
3559 writer_->AddNumber(snapshot_->entries().size());
3560 writer_->AddString(",\"edge_count\":");
3561 writer_->AddNumber(snapshot_->edges().size());
3562 writer_->AddString(",\"trace_function_count\":");
3564 writer_->AddString(",\"extra_native_bytes\":");
3566}
3567
3569 static const char hex_chars[] = "0123456789ABCDEF";
3570 w->AddString("\\u");
3571 w->AddCharacter(hex_chars[(u >> 12) & 0xF]);
3572 w->AddCharacter(hex_chars[(u >> 8) & 0xF]);
3573 w->AddCharacter(hex_chars[(u >> 4) & 0xF]);
3574 w->AddCharacter(hex_chars[u & 0xF]);
3575}
3576
3579 if (!tracker) return;
3580 AllocationTraceTree* traces = tracker->trace_tree();
3581 SerializeTraceNode(traces->root());
3582}
3583
3585 writer_->AddNumber(node->id());
3586 writer_->AddCharacter(',');
3587 writer_->AddNumber(node->function_info_index());
3588 writer_->AddCharacter(',');
3589 writer_->AddNumber(node->allocation_count());
3590 writer_->AddCharacter(',');
3591 writer_->AddNumber(node->allocation_size());
3592 writer_->AddCharacter(',');
3593 writer_->AddCharacter('[');
3594 int i = 0;
3595 for (AllocationTraceNode* child : node->children()) {
3596 if (i++ > 0) {
3597 writer_->AddCharacter(',');
3598 }
3599 SerializeTraceNode(child);
3600 }
3601 writer_->AddCharacter(']');
3602}
3603
3606 if (!tracker) return;
3607 int i = 0;
3608 for (AllocationTracker::FunctionInfo* info : tracker->function_info_list()) {
3609 if (i++ > 0) {
3610 writer_->AddCharacter(',');
3611 }
3612 writer_->AddNumber(info->function_id);
3613 writer_->AddCharacter(',');
3614 writer_->AddNumber(GetStringId(info->name));
3615 writer_->AddCharacter(',');
3616 writer_->AddNumber(GetStringId(info->script_name));
3617 writer_->AddCharacter(',');
3618 writer_->AddNumber(info->script_id);
3619 // 0-based positions are converted to 1-based during serialization.
3620 writer_->AddCharacter(',');
3621 writer_->AddNumber(info->line + 1);
3622 writer_->AddCharacter(',');
3623 writer_->AddNumber(info->column + 1);
3624 }
3625}
3626
3628 const std::vector<HeapObjectsMap::TimeInterval>& samples =
3630 if (samples.empty()) return;
3631 base::TimeTicks start_time = samples[0].timestamp;
3632 int i = 0;
3633 for (const HeapObjectsMap::TimeInterval& sample : samples) {
3634 if (i++ > 0) {
3635 writer_->AddCharacter(',');
3636 }
3637 base::TimeDelta time_delta = sample.timestamp - start_time;
3638 writer_->AddNumber(time_delta.InMicroseconds());
3639 writer_->AddCharacter(',');
3640 writer_->AddNumber(sample.last_assigned_id());
3641 }
3642}
3643
3645 writer_->AddCharacter('\"');
3646 for (; *s != '\0'; ++s) {
3647 switch (*s) {
3648 case '\b':
3649 writer_->AddString("\\b");
3650 continue;
3651 case '\f':
3652 writer_->AddString("\\f");
3653 continue;
3654 case '\n':
3655 writer_->AddString("\\n");
3656 continue;
3657 case '\r':
3658 writer_->AddString("\\r");
3659 continue;
3660 case '\t':
3661 writer_->AddString("\\t");
3662 continue;
3663 case '\"':
3664 case '\\':
3665 writer_->AddCharacter('\\');
3666 writer_->AddCharacter(*s);
3667 continue;
3668 default:
3669 if (*s > 31 && *s < 128) {
3670 writer_->AddCharacter(*s);
3671 } else if (*s <= 31) {
3672 // Special character with no dedicated literal.
3673 WriteUChar(writer_, *s);
3674 } else {
3675 // Convert UTF-8 into \u UTF-16 literal.
3676 size_t length = 1, cursor = 0;
3677 for (; length <= 4 && *(s + length) != '\0'; ++length) {
3678 }
3679 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3680 if (c != unibrow::Utf8::kBadChar) {
3681 WriteUChar(writer_, c);
3682 DCHECK_NE(cursor, 0);
3683 s += cursor - 1;
3684 } else {
3685 writer_->AddCharacter('?');
3686 }
3687 }
3688 }
3689 }
3690 writer_->AddCharacter('\"');
3691}
3692
3695 1);
3696 for (base::HashMap::Entry* entry = strings_.Start(); entry != nullptr;
3697 entry = strings_.Next(entry)) {
3698 int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
3699 sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3700 }
3701 writer_->AddString("\"<dummy>\"");
3702 for (int i = 1; i < sorted_strings.length(); ++i) {
3703 writer_->AddCharacter(',');
3704 SerializeString(sorted_strings[i]);
3705 if (writer_->aborted()) return;
3706 }
3707}
3708
3710 const EntrySourceLocation& location) {
3712 writer_->AddCharacter(',');
3713 writer_->AddNumber(location.scriptId);
3714 writer_->AddCharacter(',');
3715 writer_->AddNumber(location.line);
3716 writer_->AddCharacter(',');
3717 writer_->AddNumber(location.col);
3718}
3719
3721 const std::vector<EntrySourceLocation>& locations = snapshot_->locations();
3722 for (size_t i = 0; i < locations.size(); i++) {
3723 if (i > 0) writer_->AddCharacter(',');
3724 SerializeLocation(locations[i]);
3725 if (writer_->aborted()) return;
3726 }
3727}
3728
3729} // namespace v8::internal
TFGraph * graph
Isolate * isolate_
#define DISALLOW_GARBAGE_COLLECTION(name)
uint32_t bit_field_
Builtins::Kind kind
Definition builtins.cc:40
PropertyT * getter
static const uchar kBadChar
Definition unicode.h:175
static uchar CalculateValue(const uint8_t *str, size_t length, size_t *cursor)
Definition unicode.cc:202
virtual ControlOption ReportProgressValue(uint32_t done, uint32_t total)=0
virtual Detachedness GetDetachedness()
virtual size_t SizeInBytes()=0
virtual bool IsEmbedderNode()
virtual NativeObject GetNativeObject()
virtual const char * GetName(Local< Object > object)=0
static const SnapshotObjectId kUnknownObjectId
virtual WriteResult WriteHeapStatsChunk(HeapStatsUpdate *data, int count)
virtual void EndOfStream()=0
virtual int GetChunkSize()
static constexpr size_t kDefaultMemoryEstimate
virtual void EstimateSharedMemoryUsage(SharedMemoryUsageRecorder *recorder) const
virtual size_t EstimateMemoryUsage() const
static v8::internal::DirectHandle< To > OpenDirectHandle(v8::Local< From > handle)
Definition api.h:279
static v8::internal::Handle< v8::internal::Object > OpenPersistent(const v8::PersistentBase< T > &persistent)
Definition api.h:262
Entry * Next(Entry *entry) const
Definition hashmap.h:345
uint32_t occupancy() const
Definition hashmap.h:111
Entry * LookupOrInsert(const Key &key, uint32_t hash)
Definition hashmap.h:223
Value Remove(const Key &key, uint32_t hash)
Definition hashmap.h:265
Entry * Lookup(const Key &key, uint32_t hash) const
Definition hashmap.h:214
int64_t InMicroseconds() const
Definition time.cc:251
int length() const
Definition vector.h:64
constexpr T * begin() const
Definition vector.h:96
const std::vector< AllocationTraceNode * > & children() const
AllocationTraceTree * trace_tree()
const std::vector< FunctionInfo * > & function_info_list() const
static V8_INLINE constexpr int OffsetOfElementAt(int index)
Definition contexts.h:512
static constexpr int kTaggedPayloadOffset
HeapEntry * AllocateEntry(HeapThing ptr) override
Node * V8Node(const v8::Local< v8::Data > &data) final
std::vector< std::unique_ptr< Node > > nodes_
void AddEdge(Node *from, Node *to, const char *name) final
const std::vector< Edge > & edges()
const std::vector< std::unique_ptr< Node > > & nodes()
Node * AddNode(std::unique_ptr< Node > node) final
Node * V8Node(const v8::Local< v8::Value > &value) final
static FieldIndex ForDetails(Tagged< Map > map, PropertyDetails details)
GlobalObjectsEnumerator(Isolate *isolate, std::function< void(Handle< JSGlobalObject >)> handler)
std::function< void(Handle< JSGlobalObject >)> handler_
void VisitRootPointers(Root root, const char *description, FullObjectSlot start, FullObjectSlot end) override
void VisitRootPointers(Root root, const char *description, OffHeapObjectSlot start, OffHeapObjectSlot end) override
void VisitRootPointersImpl(Root root, const char *description, TSlot start, TSlot end)
V8_INLINE std::vector< HeapGraphEdge * >::iterator children_end() const
void set_detachedness(v8::EmbedderGraph::Node::Detachedness value)
V8_EXPORT_PRIVATE void Print(const char *prefix, const char *edge_name, int max_depth, int indent) const
void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, HeapEntry *child, HeapSnapshotGenerator *generator, ReferenceVerification verification=kVerify)
HeapEntry(HeapSnapshot *snapshot, int index, Type type, const char *name, SnapshotObjectId id, size_t self_size, unsigned trace_node_id)
SnapshotObjectId id() const
void VerifyReference(HeapGraphEdge::Type type, HeapEntry *entry, HeapSnapshotGenerator *generator, ReferenceVerification verification)
void SetNamedAutoIndexReference(HeapGraphEdge::Type type, const char *description, HeapEntry *child, StringsStorage *strings, HeapSnapshotGenerator *generator, ReferenceVerification verification=kVerify)
void SetIndexedReference(HeapGraphEdge::Type type, int index, HeapEntry *entry, HeapSnapshotGenerator *generator, ReferenceVerification verification=kVerify)
V8_INLINE std::vector< HeapGraphEdge * >::iterator children_begin() const
void set_name(const char *name)
V8_INLINE HeapGraphEdge * child(int i)
void SetNamedReference(HeapGraphEdge::Type type, const char *name, HeapEntry *entry, HeapSnapshotGenerator *generator, ReferenceVerification verification=kVerify)
HeapGraphEdge(Type type, const char *name, HeapEntry *from, HeapEntry *to)
static V8_INLINE bool InTrustedSpace(Tagged< HeapObject > object)
static V8_INLINE bool InReadOnlySpace(Tagged< HeapObject > object)
static V8_INLINE bool InCodeSpace(Tagged< HeapObject > object)
static constexpr int kMapOffset
static const SnapshotObjectId kFirstAvailableObjectId
static const SnapshotObjectId kInternalRootObjectId
static const SnapshotObjectId kGcRootsFirstSubrootId
void UpdateObjectSize(Address addr, int size)
SnapshotObjectId FindOrAddEntry(Address addr, unsigned int size, MarkEntryAccessed accessed=MarkEntryAccessed::kYes, IsNativeObject is_native_object=IsNativeObject::kNo)
bool MoveObject(Address from, Address to, int size)
SnapshotObjectId last_assigned_id() const
const std::vector< TimeInterval > & samples() const
SnapshotObjectId FindMergedNativeEntry(NativeObject addr)
SnapshotObjectId PushHeapObjectsStats(OutputStream *stream, int64_t *timestamp_us)
static const SnapshotObjectId kFirstAvailableNativeId
SnapshotObjectId FindEntry(Address addr)
static const SnapshotObjectId kGcRootsObjectId
std::unordered_map< NativeObject, size_t > merged_native_entries_map_
void AddMergedNativeEntry(NativeObject addr, Address canonical_addr)
std::vector< TimeInterval > time_intervals_
HeapObjectsMap * heap_object_map() const
void RemoveSnapshot(HeapSnapshot *snapshot)
AllocationTracker * allocation_tracker() const
void BuildEmbedderGraph(Isolate *isolate, v8::EmbedderGraph *graph)
bool ProgressReport(bool force=false) override
HeapEntry * FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
HeapSnapshotGenerator(HeapSnapshot *snapshot, v8::ActivityControl *control, v8::HeapProfiler::ObjectNameResolver *resolver, Heap *heap, cppgc::EmbedderStackState stack_state)
void SerializeTraceNode(AllocationTraceNode *node)
static V8_INLINE uint32_t StringHash(const void *string)
void SerializeLocation(const EntrySourceLocation &location)
void SerializeEdge(HeapGraphEdge *edge, bool first_edge)
V8_INLINE int to_node_index(const HeapEntry *e)
HeapSnapshot(HeapProfiler *profiler, v8::HeapProfiler::HeapSnapshotMode snapshot_mode, v8::HeapProfiler::NumericsMode numerics_mode)
void AddScriptLineEnds(int script_id, String::LineEndsVector &&line_ends)
std::deque< HeapEntry > & entries()
String::LineEndsVector & GetScriptLineEnds(int script_id)
void AddLocation(HeapEntry *entry, int scriptId, int line, int col)
HeapEntry * AddEntry(HeapEntry::Type type, const char *name, SnapshotObjectId id, size_t size, unsigned trace_node_id)
std::vector< EntrySourceLocation > locations_
std::deque< HeapGraphEdge > & edges()
HeapEntry * GetEntryById(SnapshotObjectId id)
std::unordered_map< SnapshotObjectId, HeapEntry * > entries_by_id_cache_
const std::vector< EntrySourceLocation > & locations() const
HeapEntry * gc_subroot_entries_[static_cast< int >(Root::kNumberOfRoots)]
void AddGcSubrootEntry(Root root, SnapshotObjectId id)
HeapEntry * gc_subroot(Root root) const
std::vector< HeapGraphEdge * > & children()
V8_EXPORT_PRIVATE void PreciseCollectAllGarbage(GCFlags gc_flags, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition heap.cc:1411
V8_EXPORT_PRIVATE void CollectAllAvailableGarbage(GarbageCollectionReason gc_reason)
Definition heap.cc:1327
void IterateWeakRoots(RootVisitor *v, base::EnumSet< SkipRoot > options)
Definition heap.cc:4532
void IterateRoots(RootVisitor *v, base::EnumSet< SkipRoot > options, IterateRootsMode roots_mode=IterateRootsMode::kMainIsolate)
Definition heap.cc:4657
void IterateWeakGlobalHandles(RootVisitor *v)
Definition heap.cc:4811
Isolate * isolate() const
Definition heap-inl.h:61
V8_INLINE void VisitHeapObjectImpl(Tagged< HeapObject > heap_object, int field_index)
void VisitProtectedPointer(Tagged< TrustedObject > host, ProtectedMaybeObjectSlot slot) override
void VisitProtectedPointer(Tagged< TrustedObject > host, ProtectedPointerSlot slot) override
void VisitPointers(Tagged< HeapObject > host, ObjectSlot start, ObjectSlot end) override
void VisitEmbeddedPointer(Tagged< InstructionStream > host, RelocInfo *rinfo) override
void VisitInstructionStreamPointer(Tagged< Code > host, InstructionStreamSlot slot) override
V8_INLINE void VisitSlotImpl(TIsolateOrCageBase isolate_or_cage_base, TSlot slot)
void VisitMapPointer(Tagged< HeapObject > object) override
void VisitCodeTarget(Tagged< InstructionStream > host, RelocInfo *rinfo) override
IndexedReferencesExtractor(V8HeapExplorer *generator, Tagged< HeapObject > parent_obj, HeapEntry *parent)
void VisitJSDispatchTableEntry(Tagged< HeapObject > host, JSDispatchHandle handle) override
void VisitIndirectPointer(Tagged< HeapObject > host, IndirectPointerSlot slot, IndirectPointerMode mode) override
void VisitPointers(Tagged< HeapObject > host, MaybeObjectSlot start, MaybeObjectSlot end) override
static Tagged< InstructionStream > FromTargetAddress(Address address)
static Isolate * FromHeap(const Heap *heap)
Definition isolate.h:1202
bool MayHaveEmbedderFields() const
int GetEmbedderFieldCount() const
static MaybeDirectHandle< JSFunction > GetConstructor(Isolate *isolate, DirectHandle< JSReceiver > receiver)
static DirectHandle< String > GetConstructorName(Isolate *isolate, DirectHandle< JSReceiver > receiver)
static constexpr bool IsPacked(Address)
Definition objects.h:846
V8_INLINE DirectHandle< T > ToHandleChecked() const
V8_INLINE bool is_null() const
static V8_INLINE MemoryChunk * FromHeapObject(Tagged< HeapObject > object)
V8_INLINE bool InReadOnlySpace() const
HeapEntry * EntryForEmbedderGraphNode(EmbedderGraph::Node *node)
bool IterateAndExtractReferences(HeapSnapshotGenerator *generator)
NativeObjectsExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
void MergeNodeIntoEntry(HeapEntry *entry, EmbedderGraph::Node *original_node, EmbedderGraph::Node *wrapper_node)
std::unique_ptr< HeapEntriesAllocator > embedder_graph_entries_allocator_
PtrComprCageBase code_cage_base() const
Definition visitors.h:235
PtrComprCageBase cage_base() const
Definition visitors.h:225
static double NumberValue(Tagged< Number > obj)
PropertyLocation location() const
Representation representation() const
Tagged< T > GetCurrent() const
Definition prototype.h:52
void Iterate(RootVisitor *visitor)
Definition roots.cc:54
V8_INLINE Address target_address()
V8_INLINE Tagged< HeapObject > target_object(PtrComprCageBase cage_base)
static const char * RootName(Root root)
Definition visitors.cc:17
void VisitRootPointers(Root root, const char *description, OffHeapObjectSlot start, OffHeapObjectSlot end) override
void VisitRootPointer(Root root, const char *description, FullObjectSlot p) override
void VisitRootPointers(Root root, const char *description, FullObjectSlot start, FullObjectSlot end) override
void VisitRunningCode(FullObjectSlot code_slot, FullObjectSlot istream_or_smi_zero_slot) final
static const char * name(RootIndex root_index)
Definition roots.h:600
static LocalNamesRange< DirectHandle< ScopeInfo > > IterateLocalNames(DirectHandle< ScopeInfo > scope_info)
Tagged< Script > Next()
Definition objects.cc:4795
static V8_EXPORT_PRIVATE String::LineEndsVector GetLineEnds(Isolate *isolate, DirectHandle< Script > script)
Definition objects.cc:4316
Address address() const
Definition slots.h:78
static constexpr int ToInt(const Tagged< Object > object)
Definition smi.h:33
static constexpr Tagged< Smi > zero()
Definition smi.h:99
const char * GetCopy(const char *src)
const char * GetName(Tagged< Name > name)
static PtrType load(Tagged< HeapObject > host, int offset=0)
V8_INLINE constexpr StorageType ptr() const
bool GetHeapObjectIfStrong(Tagged< HeapObject > *result) const
bool GetHeapObjectIfWeak(Tagged< HeapObject > *result) const
V8_INLINE constexpr bool is_null() const
Definition tagged.h:502
void ExtractJSWeakCollectionReferences(HeapEntry *entry, Tagged< JSWeakCollection > collection)
void ExtractEphemeronHashTableReferences(HeapEntry *entry, Tagged< EphemeronHashTable > table)
const char * GetSystemEntryName(Tagged< HeapObject > object)
void ExtractPropertyReferences(Tagged< JSObject > js_obj, HeapEntry *entry)
void ExtractInstructionStreamReferences(HeapEntry *entry, Tagged< InstructionStream > code)
void SetInternalReference(HeapEntry *parent_entry, const char *reference_name, Tagged< Object > child, int field_offset=-1)
void SetPropertyReference(HeapEntry *parent_entry, Tagged< Name > reference_name, Tagged< Object > child, const char *name_format_string=nullptr, int field_offset=-1)
void RecursivelyTagConstantPool(Tagged< Object > obj, const char *tag, HeapEntry::Type type, int recursion_limit)
void ExtractDescriptorArrayReferences(HeapEntry *entry, Tagged< DescriptorArray > array)
void SetGcSubrootReference(Root root, const char *description, bool is_weak, Tagged< Object > child)
void SetElementReference(HeapEntry *parent_entry, int index, Tagged< Object > child)
HeapEntry::Type GetSystemEntryType(Tagged< HeapObject > object)
void MakeGlobalObjectTagMap(TemporaryGlobalObjectTags &&)
V8HeapExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress, v8::HeapProfiler::ObjectNameResolver *resolver)
void ExtractAccessorPairProperty(HeapEntry *entry, Tagged< Name > key, Tagged< Object > callback_obj, int field_offset=-1)
SnapshottingProgressReportingInterface * progress_
void ExtractJSCollectionReferences(HeapEntry *entry, Tagged< JSCollection > collection)
void ExtractAccessorInfoReferences(HeapEntry *entry, Tagged< AccessorInfo > accessor_info)
void ExtractEnumCacheReferences(HeapEntry *entry, Tagged< EnumCache > cache)
void ExtractJSPromiseReferences(HeapEntry *entry, Tagged< JSPromise > promise)
void ExtractAllocationSiteReferences(HeapEntry *entry, Tagged< AllocationSite > site)
static Tagged< JSFunction > GetConstructor(Isolate *isolate, Tagged< JSReceiver > receiver)
void ExtractBytecodeArrayReferences(HeapEntry *entry, Tagged< BytecodeArray > bytecode)
HeapEntry * AllocateEntry(HeapThing ptr) override
void ExtractPropertyCellReferences(HeapEntry *entry, Tagged< PropertyCell > cell)
std::unordered_set< Tagged< JSGlobalObject >, Object::Hasher > user_roots_
void ExtractSymbolReferences(HeapEntry *entry, Tagged< Symbol > symbol)
void ExtractJSGlobalProxyReferences(HeapEntry *entry, Tagged< JSGlobalProxy > proxy)
void ExtractScriptReferences(HeapEntry *entry, Tagged< Script > script)
static Tagged< String > GetConstructorName(Isolate *isolate, Tagged< JSObject > object)
void ExtractStringReferences(HeapEntry *entry, Tagged< String > obj)
void ExtractFeedbackVectorReferences(HeapEntry *entry, Tagged< FeedbackVector > feedback_vector)
void SetDataOrAccessorPropertyReference(PropertyKind kind, HeapEntry *parent_entry, Tagged< Name > reference_name, Tagged< Object > child, const char *name_format_string=nullptr, int field_offset=-1)
void ExtractJSWeakRefReferences(HeapEntry *entry, Tagged< JSWeakRef > js_weak_ref)
void ExtractContextReferences(HeapEntry *entry, Tagged< Context > context)
v8::HeapProfiler::ObjectNameResolver * global_object_name_resolver_
void ExtractNumberReference(HeapEntry *entry, Tagged< Object > number)
void ExtractJSObjectReferences(HeapEntry *entry, Tagged< JSObject > js_obj)
HeapEntry * GetEntry(Tagged< Object > obj)
void ExtractJSArrayBufferReferences(HeapEntry *entry, Tagged< JSArrayBuffer > buffer)
void ExtractCodeReferences(HeapEntry *entry, Tagged< Code > code)
void SetHiddenReference(Tagged< HeapObject > parent_obj, HeapEntry *parent_entry, int index, Tagged< Object > child, int field_offset)
bool IsEssentialHiddenReference(Tagged< Object > parent, int field_offset)
TemporaryGlobalObjectTags CollectTemporaryGlobalObjectsTags()
void SetNativeBindReference(HeapEntry *parent_entry, const char *reference_name, Tagged< Object > child)
void ExtractTransitionArrayReferences(HeapEntry *entry, Tagged< TransitionArray > transitions)
void ExtractRegExpBoilerplateDescriptionReferences(HeapEntry *entry, Tagged< RegExpBoilerplateDescription > value)
void SetUserGlobalReference(Tagged< Object > user_global)
void ExtractScopeInfoReferences(HeapEntry *entry, Tagged< ScopeInfo > info)
UnorderedHeapObjectMap< const char * > strong_gc_subroot_names_
std::unordered_map< Tagged< JSGlobalObject >, const char *, Object::Hasher > global_object_tag_map_
const char * GetStrongGcSubrootName(Tagged< HeapObject > object)
void ExtractPrototypeInfoReferences(HeapEntry *entry, Tagged< PrototypeInfo > info)
void ExtractLocation(HeapEntry *entry, Tagged< HeapObject > object)
std::vector< std::pair< v8::Global< v8::Object >, const char * > > TemporaryGlobalObjectTags
void ExtractWeakCellReferences(HeapEntry *entry, Tagged< WeakCell > weak_cell)
void ExtractCellReferences(HeapEntry *entry, Tagged< Cell > cell)
void ExtractLocationForJSFunction(HeapEntry *entry, Tagged< JSFunction > func)
void ExtractFixedArrayReferences(HeapEntry *entry, Tagged< FixedArray > array)
void ExtractMapReferences(HeapEntry *entry, Tagged< Map > map)
HeapEntry * AddEntry(Address address, HeapEntry::Type type, const char *name, size_t size)
bool IsEssentialObject(Tagged< Object > object)
void ExtractInternalReferences(Tagged< JSObject > js_obj, HeapEntry *entry)
void TagBuiltinCodeObject(Tagged< Code > code, const char *name)
void ExtractArrayBoilerplateDescriptionReferences(HeapEntry *entry, Tagged< ArrayBoilerplateDescription > value)
void SetWeakReference(HeapEntry *parent_entry, const char *reference_name, Tagged< Object > child_obj, int field_offset, HeapEntry::ReferenceVerification verification=HeapEntry::kVerify)
void ExtractFeedbackCellReferences(HeapEntry *entry, Tagged< FeedbackCell > feedback_cell)
void ExtractElementReferences(Tagged< JSObject > js_obj, HeapEntry *entry)
void TagObject(Tagged< Object > obj, const char *tag, std::optional< HeapEntry::Type > type={}, bool overwrite_existing_name=false)
void ExtractWeakArrayReferences(int header_size, HeapEntry *entry, Tagged< T > array)
void SetContextReference(HeapEntry *parent_entry, Tagged< String > reference_name, Tagged< Object > child, int field_offset)
void ExtractAccessorPairReferences(HeapEntry *entry, Tagged< AccessorPair > accessors)
void ExtractJSGeneratorObjectReferences(HeapEntry *entry, Tagged< JSGeneratorObject > generator)
Tagged< JSFunction > GetLocationFunction(Tagged< HeapObject > object)
void ExtractSharedFunctionInfoReferences(HeapEntry *entry, Tagged< SharedFunctionInfo > shared)
void ExtractReferences(HeapEntry *entry, Tagged< HeapObject > obj)
bool IterateAndExtractReferences(HeapSnapshotGenerator *generator)
static constexpr std::array< const char *, 6 > kProtectedFieldNames
static constexpr std::array< uint16_t, 6 > kProtectedFieldOffsets
static constexpr std::array< const char *, kTaggedFieldsCount > kTaggedFieldNames
static constexpr std::array< uint16_t, kTaggedFieldsCount > kTaggedFieldOffsets
void PrintTypeName(StringBuilder &out, CanonicalTypeIndex type_index, NamesProvider::IndexAsComment index_as_comment=NamesProvider::kDontPrintIndex)
V8_EXPORT_PRIVATE const CanonicalStructType * LookupStruct(CanonicalTypeIndex index) const
Register const index_
Handle< Code > code
const int size_
Definition assembler.cc:132
#define V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL
Definition globals.h:242
const ObjectRef type_
#define NATIVE_CONTEXT_FIELDS(V)
Definition contexts.h:46
int start
Handle< SharedFunctionInfo > info
int end
LineAndColumn previous
#define JSON_S(s)
#define CONTEXT_FIELD_INDEX_NAME(index, _, name)
V8HeapExplorer * explorer_
#define MAKE_TORQUE_CASE(Name, TYPE)
#define JSON_A(s)
StringsStorage * names_
#define MAKE_STRING_CASE(instance_type, size, name, Name)
HeapEntry * entry_
#define MAKE_STRING_MAP_CASE(instance_type, size, name, Name)
int32_t offset
Control control_
TNode< Object > receiver
std::unique_ptr< icu::DateTimePatternGenerator > generator_
Node * node
ZoneVector< RpoNumber > & result
ZoneVector< Entry > entries
Point from
Point to
const char * name_
int s
Definition mul-fft.cc:297
int r
Definition mul-fft.cc:298
EmbedderStackState
Definition common.h:15
unsigned int uchar
Definition unicode.h:21
unsigned short uint16_t
Definition unicode.cc:39
constexpr Vector< T > ArrayVector(T(&arr)[N])
Definition vector.h:354
constexpr bool IsInRange(T value, U lower_limit, U higher_limit)
Definition bounds.h:20
V8_INLINE constexpr bool IsExternalString(InstanceType instance_type)
TypeCanonicalizer * GetTypeCanonicalizer()
CanonicalTypeNamesProvider * GetCanonicalTypeNamesProvider()
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
static V8_INLINE bool HasWeakHeapObjectTag(const Tagged< Object > value)
Definition objects.h:653
static void WriteUChar(OutputStreamWriter *w, unibrow::uchar u)
constexpr int kTaggedSize
Definition globals.h:542
constexpr Address kTaggedNullAddress
Definition handles.h:53
bool IsNumber(Tagged< Object > obj)
ReadOnlyRoots GetReadOnlyRoots()
Definition roots-inl.h:86
void PrintF(const char *format,...)
Definition utils.cc:39
const char * CodeKindToString(CodeKind kind)
Definition code-kind.cc:10
Tagged(T object) -> Tagged< T >
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:665
std::string_view IntToStringView(int n, base::Vector< char > buffer)
kInterpreterTrampolineOffset Tagged< HeapObject >
void VisitObject(Isolate *isolate, Tagged< HeapObject > object, ObjectVisitor *visitor)
V8_INLINE DirectHandle< T > direct_handle(Tagged< T > object, Isolate *isolate)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in name
Definition flags.cc:2086
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset Tagged< WasmInstanceObject >
Handle< To > UncheckedCast(Handle< From > value)
Definition handles-inl.h:55
constexpr int kSystemPointerSize
Definition globals.h:410
static const struct v8::internal::@145 native_context_names[]
constexpr ExternalPointerTagRange kAnyManagedExternalPointerTagRange(kFirstManagedExternalPointerTag, kLastManagedExternalPointerTag)
@ kIcuSimpleDateFormatTag
@ kIcuLocalizedNumberFormatterTag
@ kDisplayNamesInternalTag
@ kIcuRelativeDateTimeFormatterTag
@ kIcuDateIntervalFormatTag
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:669
V8_EXPORT_PRIVATE FlagValues v8_flags
return value
Definition map-inl.h:893
std::string_view DoubleToStringView(double v, base::Vector< char > buffer)
static constexpr Address kNullAddress
Definition v8-internal.h:53
SlotTraits::TMaybeObjectSlot MaybeObjectSlot
Definition globals.h:1248
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset instance_object
constructor_or_back_pointer
Definition map-inl.h:870
template const char * string
uint32_t ComputeAddressHash(Address address)
Definition utils.h:306
!IsContextMap !IsContextMap native_context
Definition map-inl.h:877
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
void * NativeObject
Definition v8-profiler.h:30
static constexpr RelaxedLoadTag kRelaxedLoad
Definition globals.h:2909
uint32_t SnapshotObjectId
Definition v8-profiler.h:31
static constexpr AcquireLoadTag kAcquireLoad
Definition globals.h:2908
#define STRING_TYPE_LIST(V)
BodyGen * gen
Tagged< HeapObject > primary_object_
BytecodeSequenceNode * parent_
Node * prev_
#define FATAL(...)
Definition logging.h:47
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK_GE(lhs, rhs)
#define DCHECK_NULL(val)
Definition logging.h:491
#define CHECK(condition)
Definition logging.h:124
#define CHECK_LE(lhs, rhs)
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_NE(lhs, rhs)
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define USE(...)
Definition macros.h:293
#define arraysize(array)
Definition macros.h:67
constexpr bool Contains(Tag tag) const
#define OFFSET_OF_DATA_START(Type)
Heap * heap_
#define V8_INLINE
Definition v8config.h:500
#define V8_NODISCARD
Definition v8config.h:693