v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
debug-coverage.cc
Go to the documentation of this file.
1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
8#include "src/base/hashmap.h"
10#include "src/common/globals.h"
11#include "src/debug/debug.h"
15#include "src/objects/objects.h"
16
17namespace v8 {
18namespace internal {
19
21 : public base::TemplateHashMapImpl<Tagged<SharedFunctionInfo>, uint32_t,
22 base::KeyEqualityMatcher<Tagged<Object>>,
23 base::DefaultAllocationPolicy> {
24 public:
25 using Entry =
27 inline void Add(Tagged<SharedFunctionInfo> key, uint32_t count) {
28 Entry* entry = LookupOrInsert(key, Hash(key), []() { return 0; });
29 uint32_t old_count = entry->value;
30 if (UINT32_MAX - count < old_count) {
31 entry->value = UINT32_MAX;
32 } else {
33 entry->value = old_count + count;
34 }
35 }
36
38 Entry* entry = Lookup(key, Hash(key));
39 if (entry == nullptr) return 0;
40 return entry->value;
41 }
42
43 private:
45 return static_cast<uint32_t>(key.ptr());
46 }
47
49};
50
51namespace {
52int StartPosition(Tagged<SharedFunctionInfo> info) {
53 int start = info->function_token_position();
54 if (start == kNoSourcePosition) start = info->StartPosition();
55 return start;
56}
57
58bool CompareCoverageBlock(const CoverageBlock& a, const CoverageBlock& b) {
61 if (a.start == b.start) return a.end > b.end;
62 return a.start < b.start;
63}
64
65void SortBlockData(std::vector<CoverageBlock>& v) {
66 // Sort according to the block nesting structure.
67 std::sort(v.begin(), v.end(), CompareCoverageBlock);
68}
69
70std::vector<CoverageBlock> GetSortedBlockData(
71 Isolate* isolate, Tagged<SharedFunctionInfo> shared) {
72 DCHECK(shared->HasCoverageInfo(isolate));
73
74 Tagged<CoverageInfo> coverage_info =
75 Cast<CoverageInfo>(shared->GetDebugInfo(isolate)->coverage_info());
76
77 std::vector<CoverageBlock> result;
78 if (coverage_info->slot_count() == 0) return result;
79
80 for (int i = 0; i < coverage_info->slot_count(); i++) {
81 const int start_pos = coverage_info->slots_start_source_position(i);
82 const int until_pos = coverage_info->slots_end_source_position(i);
83 const int count = coverage_info->slots_block_count(i);
84
85 DCHECK_NE(kNoSourcePosition, start_pos);
86 result.emplace_back(start_pos, until_pos, count);
87 }
88
89 SortBlockData(result);
90
91 return result;
92}
93
94// A utility class to simplify logic for performing passes over block coverage
95// ranges. Provides access to the implicit tree structure of ranges (i.e. access
96// to parent and sibling blocks), and supports efficient in-place editing and
97// deletion. The underlying backing store is the array of CoverageBlocks stored
98// on the CoverageFunction.
99class CoverageBlockIterator final {
100 public:
101 explicit CoverageBlockIterator(CoverageFunction* function)
102 : function_(function) {
103 DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(),
104 CompareCoverageBlock));
105 }
106
107 ~CoverageBlockIterator() {
108 Finalize();
109 DCHECK(std::is_sorted(function_->blocks.begin(), function_->blocks.end(),
110 CompareCoverageBlock));
111 }
112
113 bool HasNext() const {
114 return read_index_ + 1 < static_cast<int>(function_->blocks.size());
115 }
116
117 bool Next() {
118 if (!HasNext()) {
119 if (!ended_) MaybeWriteCurrent();
120 ended_ = true;
121 return false;
122 }
123
124 // If a block has been deleted, subsequent iteration moves trailing blocks
125 // to their updated position within the array.
126 MaybeWriteCurrent();
127
128 if (read_index_ == -1) {
129 // Initialize the nesting stack with the function range.
130 nesting_stack_.emplace_back(function_->start, function_->end,
131 function_->count);
132 } else if (!delete_current_) {
133 nesting_stack_.emplace_back(GetBlock());
134 }
135
136 delete_current_ = false;
137 read_index_++;
138
139 DCHECK(IsActive());
140
141 CoverageBlock& block = GetBlock();
142 while (nesting_stack_.size() > 1 &&
143 nesting_stack_.back().end <= block.start) {
144 nesting_stack_.pop_back();
145 }
146
147 DCHECK_IMPLIES(block.start >= function_->end,
148 block.end == kNoSourcePosition);
149 DCHECK_NE(block.start, kNoSourcePosition);
150 DCHECK_LE(block.end, GetParent().end);
151
152 return true;
153 }
154
155 CoverageBlock& GetBlock() {
156 DCHECK(IsActive());
157 return function_->blocks[read_index_];
158 }
159
160 CoverageBlock& GetNextBlock() {
161 DCHECK(IsActive());
162 DCHECK(HasNext());
163 return function_->blocks[read_index_ + 1];
164 }
165
166 CoverageBlock& GetPreviousBlock() {
167 DCHECK(IsActive());
168 DCHECK_GT(read_index_, 0);
169 return function_->blocks[read_index_ - 1];
170 }
171
172 CoverageBlock& GetParent() {
173 DCHECK(IsActive());
174 return nesting_stack_.back();
175 }
176
177 bool HasSiblingOrChild() {
178 DCHECK(IsActive());
179 return HasNext() && GetNextBlock().start < GetParent().end;
180 }
181
182 CoverageBlock& GetSiblingOrChild() {
183 DCHECK(HasSiblingOrChild());
184 DCHECK(IsActive());
185 return GetNextBlock();
186 }
187
188 // A range is considered to be at top level if its parent range is the
189 // function range.
190 bool IsTopLevel() const { return nesting_stack_.size() == 1; }
191
192 void DeleteBlock() {
193 DCHECK(!delete_current_);
194 DCHECK(IsActive());
195 delete_current_ = true;
196 }
197
198 private:
199 void MaybeWriteCurrent() {
200 if (delete_current_) return;
201 if (read_index_ >= 0 && write_index_ != read_index_) {
202 function_->blocks[write_index_] = function_->blocks[read_index_];
203 }
204 write_index_++;
205 }
206
207 void Finalize() {
208 while (Next()) {
209 // Just iterate to the end.
210 }
211 function_->blocks.resize(write_index_);
212 }
213
214 bool IsActive() const { return read_index_ >= 0 && !ended_; }
215
216 CoverageFunction* function_;
217 std::vector<CoverageBlock> nesting_stack_;
218 bool ended_ = false;
219 bool delete_current_ = false;
220 int read_index_ = -1;
221 int write_index_ = -1;
222};
223
224bool HaveSameSourceRange(const CoverageBlock& lhs, const CoverageBlock& rhs) {
225 return lhs.start == rhs.start && lhs.end == rhs.end;
226}
227
228void MergeDuplicateRanges(CoverageFunction* function) {
229 CoverageBlockIterator iter(function);
230
231 while (iter.Next() && iter.HasNext()) {
232 CoverageBlock& block = iter.GetBlock();
233 CoverageBlock& next_block = iter.GetNextBlock();
234
235 if (!HaveSameSourceRange(block, next_block)) continue;
236
237 DCHECK_NE(kNoSourcePosition, block.end); // Non-singleton range.
238 next_block.count = std::max(block.count, next_block.count);
239 iter.DeleteBlock();
240 }
241}
242
243// Rewrite position singletons (produced by unconditional control flow
244// like return statements, and by continuation counters) into source
245// ranges that end at the next sibling range or the end of the parent
246// range, whichever comes first.
247void RewritePositionSingletonsToRanges(CoverageFunction* function) {
248 CoverageBlockIterator iter(function);
249
250 while (iter.Next()) {
251 CoverageBlock& block = iter.GetBlock();
252 CoverageBlock& parent = iter.GetParent();
253
254 if (block.start >= function->end) {
255 DCHECK_EQ(block.end, kNoSourcePosition);
256 iter.DeleteBlock();
257 } else if (block.end == kNoSourcePosition) {
258 // The current block ends at the next sibling block (if it exists) or the
259 // end of the parent block otherwise.
260 if (iter.HasSiblingOrChild()) {
261 block.end = iter.GetSiblingOrChild().start;
262 } else if (iter.IsTopLevel()) {
263 // See https://crbug.com/v8/6661. Functions are special-cased because
264 // we never want the closing brace to be uncovered. This is mainly to
265 // avoid a noisy UI.
266 block.end = parent.end - 1;
267 } else {
268 block.end = parent.end;
269 }
270 }
271 }
272}
273
274void MergeConsecutiveRanges(CoverageFunction* function) {
275 CoverageBlockIterator iter(function);
276
277 while (iter.Next()) {
278 CoverageBlock& block = iter.GetBlock();
279
280 if (iter.HasSiblingOrChild()) {
281 CoverageBlock& sibling = iter.GetSiblingOrChild();
282 if (sibling.start == block.end && sibling.count == block.count) {
283 // Best-effort: this pass may miss mergeable siblings in the presence of
284 // child blocks.
285 sibling.start = block.start;
286 iter.DeleteBlock();
287 }
288 }
289 }
290}
291
292void MergeNestedRanges(CoverageFunction* function) {
293 CoverageBlockIterator iter(function);
294
295 while (iter.Next()) {
296 CoverageBlock& block = iter.GetBlock();
297 CoverageBlock& parent = iter.GetParent();
298
299 if (parent.count == block.count) {
300 // Transformation may not be valid if sibling blocks exist with a
301 // differing count.
302 iter.DeleteBlock();
303 }
304 }
305}
306
307void RewriteFunctionScopeCounter(CoverageFunction* function) {
308 // Every function must have at least the top-level function counter.
309 DCHECK(!function->blocks.empty());
310
311 CoverageBlockIterator iter(function);
312 if (iter.Next()) {
313 DCHECK(iter.IsTopLevel());
314
315 CoverageBlock& block = iter.GetBlock();
318 // If a function-scope block exists, overwrite the function count. It has
319 // a more reliable count than what we get from the FeedbackVector (which
320 // is imprecise e.g. for generator functions and optimized code).
321 function->count = block.count;
322
323 // Then delete it; for compatibility with non-block coverage modes, the
324 // function-scope block is expected in CoverageFunction, not as a
325 // CoverageBlock.
326 iter.DeleteBlock();
327 }
328 }
329}
330
331void FilterAliasedSingletons(CoverageFunction* function) {
332 CoverageBlockIterator iter(function);
333
334 iter.Next(); // Advance once since we reference the previous block later.
335
336 while (iter.Next()) {
337 CoverageBlock& previous_block = iter.GetPreviousBlock();
338 CoverageBlock& block = iter.GetBlock();
339
340 bool is_singleton = block.end == kNoSourcePosition;
341 bool aliases_start = block.start == previous_block.start;
342
343 if (is_singleton && aliases_start) {
344 // The previous block must have a full range since duplicate singletons
345 // have already been merged.
346 DCHECK_NE(previous_block.end, kNoSourcePosition);
347 // Likewise, the next block must have another start position since
348 // singletons are sorted to the end.
349 DCHECK_IMPLIES(iter.HasNext(), iter.GetNextBlock().start != block.start);
350 iter.DeleteBlock();
351 }
352 }
353}
354
355void FilterUncoveredRanges(CoverageFunction* function) {
356 CoverageBlockIterator iter(function);
357
358 while (iter.Next()) {
359 CoverageBlock& block = iter.GetBlock();
360 CoverageBlock& parent = iter.GetParent();
361 if (block.count == 0 && parent.count == 0) iter.DeleteBlock();
362 }
363}
364
365void FilterEmptyRanges(CoverageFunction* function) {
366 CoverageBlockIterator iter(function);
367
368 while (iter.Next()) {
369 CoverageBlock& block = iter.GetBlock();
370 if (block.start == block.end) iter.DeleteBlock();
371 }
372}
373
374void ClampToBinary(CoverageFunction* function) {
375 CoverageBlockIterator iter(function);
376
377 while (iter.Next()) {
378 CoverageBlock& block = iter.GetBlock();
379 if (block.count > 0) block.count = 1;
380 }
381}
382
383void ResetAllBlockCounts(Isolate* isolate, Tagged<SharedFunctionInfo> shared) {
384 DCHECK(shared->HasCoverageInfo(isolate));
385
386 Tagged<CoverageInfo> coverage_info =
387 Cast<CoverageInfo>(shared->GetDebugInfo(isolate)->coverage_info());
388
389 for (int i = 0; i < coverage_info->slot_count(); i++) {
390 coverage_info->ResetBlockCount(i);
391 }
392}
393
394bool IsBlockMode(debug::CoverageMode mode) {
395 switch (mode) {
398 return true;
399 default:
400 return false;
401 }
402}
403
404bool IsBinaryMode(debug::CoverageMode mode) {
405 switch (mode) {
408 return true;
409 default:
410 return false;
411 }
412}
413
414void CollectBlockCoverageInternal(Isolate* isolate, CoverageFunction* function,
416 debug::CoverageMode mode) {
417 DCHECK(IsBlockMode(mode));
418
419 // Functions with empty source ranges are not interesting to report. This can
420 // happen e.g. for internally-generated functions like class constructors.
421 if (!function->HasNonEmptySourceRange()) return;
422
423 function->has_block_coverage = true;
424 function->blocks = GetSortedBlockData(isolate, info);
425
426 // If in binary mode, only report counts of 0/1.
427 if (mode == debug::CoverageMode::kBlockBinary) ClampToBinary(function);
428
429 // To stay compatible with non-block coverage modes, the function-scope count
430 // is expected to be in the CoverageFunction, not as part of its blocks.
431 // This finds the function-scope counter, overwrites CoverageFunction::count,
432 // and removes it from the block list.
433 //
434 // Important: Must be called before other transformation passes.
435 RewriteFunctionScopeCounter(function);
436
437 // Functions without blocks don't need to be processed further.
438 if (!function->HasBlocks()) return;
439
440 // Remove singleton ranges with the same start position as a full range and
441 // throw away their counts.
442 // Singleton ranges are only intended to split existing full ranges and should
443 // never expand into a full range. Consider 'if (cond) { ... } else { ... }'
444 // as a problematic example; if the then-block produces a continuation
445 // singleton, it would incorrectly expand into the else range.
446 // For more context, see https://crbug.com/v8/8237.
447 FilterAliasedSingletons(function);
448
449 // Rewrite all singletons (created e.g. by continuations and unconditional
450 // control flow) to ranges.
451 RewritePositionSingletonsToRanges(function);
452
453 // Merge nested and consecutive ranges with identical counts.
454 // Note that it's necessary to merge duplicate ranges prior to merging nested
455 // changes in order to avoid invalid transformations. See crbug.com/827530.
456 MergeConsecutiveRanges(function);
457
458 SortBlockData(function->blocks);
459 MergeDuplicateRanges(function);
460 MergeNestedRanges(function);
461
462 MergeConsecutiveRanges(function);
463
464 // Filter out ranges with count == 0 unless the immediate parent range has
465 // a count != 0.
466 FilterUncoveredRanges(function);
467
468 // Filter out ranges of zero length.
469 FilterEmptyRanges(function);
470}
471
472void CollectBlockCoverage(Isolate* isolate, CoverageFunction* function,
474 debug::CoverageMode mode) {
475 CollectBlockCoverageInternal(isolate, function, info, mode);
476
477 // Reset all counters on the DebugInfo to zero.
478 ResetAllBlockCounts(isolate, info);
479}
480
481void PrintBlockCoverage(const CoverageFunction* function,
483 bool has_nonempty_source_range,
484 bool function_is_relevant) {
485 DCHECK(v8_flags.trace_block_coverage);
486 std::unique_ptr<char[]> function_name = function->name->ToCString();
487 i::PrintF(
488 "Coverage for function='%s', SFI=%p, has_nonempty_source_range=%d, "
489 "function_is_relevant=%d\n",
490 function_name.get(), reinterpret_cast<void*>(info.ptr()),
491 has_nonempty_source_range, function_is_relevant);
492 i::PrintF("{start: %d, end: %d, count: %d}\n", function->start, function->end,
493 function->count);
494 for (const auto& block : function->blocks) {
495 i::PrintF("{start: %d, end: %d, count: %d}\n", block.start, block.end,
496 block.count);
497 }
498}
499
500void CollectAndMaybeResetCounts(Isolate* isolate,
501 SharedToCounterMap* counter_map,
502 v8::debug::CoverageMode coverage_mode) {
503 const bool reset_count =
505
506 switch (isolate->code_coverage_mode()) {
511 // Feedback vectors are already listed to prevent losing them to GC.
512 DCHECK(IsArrayList(
513 *isolate->factory()->feedback_vectors_for_profiling_tools()));
514 auto list = Cast<ArrayList>(
515 isolate->factory()->feedback_vectors_for_profiling_tools());
516 for (int i = 0; i < list->length(); i++) {
517 Tagged<FeedbackVector> vector = Cast<FeedbackVector>(list->get(i));
518 Tagged<SharedFunctionInfo> shared = vector->shared_function_info();
519 DCHECK(shared->IsSubjectToDebugging());
520 uint32_t count = static_cast<uint32_t>(vector->invocation_count());
521 if (reset_count) vector->clear_invocation_count(kRelaxedStore);
522 counter_map->Add(shared, count);
523 }
524 break;
525 }
527 DCHECK(!IsArrayList(
528 *isolate->factory()->feedback_vectors_for_profiling_tools()));
530 AllowGarbageCollection allow_gc;
531 HeapObjectIterator heap_iterator(isolate->heap());
532 for (Tagged<HeapObject> current_obj = heap_iterator.Next();
533 !current_obj.is_null(); current_obj = heap_iterator.Next()) {
534 if (!IsJSFunction(current_obj)) continue;
535 Tagged<JSFunction> func = Cast<JSFunction>(current_obj);
536 Tagged<SharedFunctionInfo> shared = func->shared();
537 if (!shared->IsSubjectToDebugging()) continue;
538 if (!(func->has_feedback_vector() ||
539 func->has_closure_feedback_cell_array())) {
540 continue;
541 }
542 uint32_t count = 0;
543 if (func->has_feedback_vector()) {
544 count = static_cast<uint32_t>(
545 func->feedback_vector()->invocation_count());
546 } else if (func->shared()->HasBytecodeArray() &&
547 func->raw_feedback_cell()->interrupt_budget() <
548 TieringManager::InterruptBudgetFor(isolate, func, {})) {
549 // We haven't allocated feedback vector, but executed the function
550 // atleast once. We don't have precise invocation count here.
551 count = 1;
552 }
553 counter_map->Add(shared, count);
554 }
555
556 // Also check functions on the stack to collect the count map. With lazy
557 // feedback allocation we may miss counting functions if the feedback
558 // vector wasn't allocated yet and the function's interrupt budget wasn't
559 // updated (i.e. it didn't execute return / jump).
560 for (JavaScriptStackFrameIterator it(isolate); !it.done(); it.Advance()) {
561 Tagged<SharedFunctionInfo> shared = it.frame()->function()->shared();
562 if (counter_map->Get(shared) != 0) continue;
563 counter_map->Add(shared, 1);
564 }
565 break;
566 }
567 }
568}
569
570// A {SFI, count} tuple is used to sort by source range (stored on
571// the SFI) and call count (in the counter map).
572struct SharedFunctionInfoAndCount {
573 SharedFunctionInfoAndCount(Handle<SharedFunctionInfo> info, uint32_t count)
574 : info(info),
575 count(count),
576 start(StartPosition(*info)),
577 end(info->EndPosition()) {}
578
579 // Sort by:
580 // - start, ascending.
581 // - end, descending.
582 // - info.is_toplevel() first
583 // - count, descending.
584 bool operator<(const SharedFunctionInfoAndCount& that) const {
585 if (this->start != that.start) return this->start < that.start;
586 if (this->end != that.end) return this->end > that.end;
587 if (this->info->is_toplevel() != that.info->is_toplevel()) {
588 return this->info->is_toplevel();
589 }
590 return this->count > that.count;
591 }
592
594 uint32_t count;
595 int start;
596 int end;
597};
598
599} // anonymous namespace
600
601std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) {
602 DCHECK(!isolate->is_best_effort_code_coverage());
603 std::unique_ptr<Coverage> result =
604 Collect(isolate, isolate->code_coverage_mode());
605 if (isolate->is_precise_binary_code_coverage() ||
606 isolate->is_block_binary_code_coverage()) {
607 // We do not have to hold onto feedback vectors for invocations we already
608 // reported. So we can reset the list.
609 isolate->SetFeedbackVectorsForProfilingTools(
610 ReadOnlyRoots(isolate).empty_array_list());
611 }
612 return result;
613}
614
615std::unique_ptr<Coverage> Coverage::CollectBestEffort(Isolate* isolate) {
617}
618
619std::unique_ptr<Coverage> Coverage::Collect(
620 Isolate* isolate, v8::debug::CoverageMode collectionMode) {
621 // Unsupported if jitless mode is enabled at build-time since related
622 // optimizations deactivate invocation count updates.
624
625 // Collect call counts for all functions.
626 SharedToCounterMap counter_map;
627 CollectAndMaybeResetCounts(isolate, &counter_map, collectionMode);
628
629 // Iterate shared function infos of every script and build a mapping
630 // between source ranges and invocation counts.
631 std::unique_ptr<Coverage> result(new Coverage());
632
633 std::vector<Handle<Script>> scripts;
634 Script::Iterator scriptIt(isolate);
635 for (Tagged<Script> script = scriptIt.Next(); !script.is_null();
636 script = scriptIt.Next()) {
637 if (script->IsUserJavaScript()) scripts.push_back(handle(script, isolate));
638 }
639
640 for (Handle<Script> script : scripts) {
641 // Create and add new script data.
642 result->emplace_back(script);
643 std::vector<CoverageFunction>* functions = &result->back().functions;
644
645 std::vector<SharedFunctionInfoAndCount> sorted;
646
647 {
648 // Sort functions by start position, from outer to inner functions.
649 SharedFunctionInfo::ScriptIterator infos(isolate, *script);
650 for (Tagged<SharedFunctionInfo> info = infos.Next(); !info.is_null();
651 info = infos.Next()) {
652 sorted.emplace_back(handle(info, isolate), counter_map.Get(info));
653 }
654 std::sort(sorted.begin(), sorted.end());
655 }
656
657 // Stack to track nested functions, referring function by index.
658 std::vector<size_t> nesting;
659
660 // Use sorted list to reconstruct function nesting.
661 for (const SharedFunctionInfoAndCount& v : sorted) {
663 int start = v.start;
664 int end = v.end;
665 uint32_t count = v.count;
666
667 // Find the correct outer function based on start position.
668 //
669 // This is, in general, not robust when considering two functions with
670 // identical source ranges; then the notion of inner and outer is unclear.
671 // Identical source ranges arise when the source range of top-most entity
672 // (e.g. function) in the script is identical to the whole script, e.g.
673 // <script>function foo() {}<script>. The script has its own shared
674 // function info, which has the same source range as the SFI for `foo`.
675 // Node.js creates an additional wrapper for scripts (again with identical
676 // source range) and those wrappers will have a call count of zero even if
677 // the wrapped script was executed (see v8:9212). We mitigate this issue
678 // by sorting top-level SFIs first among SFIs with the same source range:
679 // This ensures top-level SFIs are processed first. If a top-level SFI has
680 // a non-zero call count, it gets recorded due to `function_is_relevant`
681 // below (e.g. script wrappers), while top-level SFIs with zero call count
682 // do not get reported (this ensures node's extra wrappers do not get
683 // reported). If two SFIs with identical source ranges get reported, we
684 // report them in decreasing order of call count, as in all known cases
685 // this corresponds to the nesting order. In the case of the script tag
686 // example above, we report the zero call count of `foo` last. As it turns
687 // out, embedders started to rely on functions being reported in nesting
688 // order.
689 // TODO(jgruber): Investigate whether it is possible to remove node's
690 // extra top-level wrapper script, or change its source range, or ensure
691 // that it follows the invariant that nesting order is descending count
692 // order for SFIs with identical source ranges.
693 while (!nesting.empty() && functions->at(nesting.back()).end <= start) {
694 nesting.pop_back();
695 }
696
697 if (count != 0) {
698 switch (collectionMode) {
701 break;
704 count = info->has_reported_binary_coverage() ? 0 : 1;
705 info->set_has_reported_binary_coverage(true);
706 break;
708 count = 1;
709 break;
710 }
711 }
712
713 Handle<String> name = SharedFunctionInfo::DebugName(isolate, info);
714 CoverageFunction function(start, end, count, name);
715
716 if (IsBlockMode(collectionMode) && info->HasCoverageInfo(isolate)) {
717 CollectBlockCoverage(isolate, &function, *info, collectionMode);
718 }
719
720 // Only include a function range if itself or its parent function is
721 // covered, or if it contains non-trivial block coverage.
722 bool is_covered = (count != 0);
723 bool parent_is_covered =
724 (!nesting.empty() && functions->at(nesting.back()).count != 0);
725 bool has_block_coverage = !function.blocks.empty();
726 bool function_is_relevant =
727 (is_covered || parent_is_covered || has_block_coverage);
728
729 // It must also have a non-empty source range (otherwise it is not
730 // interesting to report).
731 bool has_nonempty_source_range = function.HasNonEmptySourceRange();
732
733 if (has_nonempty_source_range && function_is_relevant) {
734 nesting.push_back(functions->size());
735 functions->emplace_back(function);
736 }
737
738 if (v8_flags.trace_block_coverage) {
739 PrintBlockCoverage(&function, *info, has_nonempty_source_range,
740 function_is_relevant);
741 }
742 }
743
744 // Remove entries for scripts that have no coverage.
745 if (functions->empty()) result->pop_back();
746 }
747 return result;
748}
749
751 if (mode != isolate->code_coverage_mode()) {
752 // Changing the coverage mode can change the bytecode that would be
753 // generated for a function, which can interfere with lazy source positions,
754 // so just force source position collection whenever there's such a change.
755 isolate->CollectSourcePositionsForAllBytecodeArrays();
756 // Changing the coverage mode changes the generated bytecode and hence it is
757 // not safe to flush bytecode. Set a flag here, so we can disable bytecode
758 // flushing.
759 isolate->set_disable_bytecode_flushing(true);
760 }
761
762 switch (mode) {
764 // Note that DevTools switches back to best-effort coverage once the
765 // recording is stopped. Since we delete coverage infos at that point, any
766 // following coverage recording (without reloads) will be at function
767 // granularity.
768 isolate->debug()->RemoveAllCoverageInfos();
769 isolate->SetFeedbackVectorsForProfilingTools(
770 ReadOnlyRoots(isolate).undefined_value());
771 break;
776 HandleScope scope(isolate);
777
778 // Remove all optimized function. Optimized and inlined functions do not
779 // increment invocation count.
781
782 std::vector<Handle<JSFunction>> funcs_needing_feedback_vector;
783 {
784 HeapObjectIterator heap_iterator(isolate->heap());
785 for (Tagged<HeapObject> o = heap_iterator.Next(); !o.is_null();
786 o = heap_iterator.Next()) {
787 if (IsJSFunction(o)) {
789 if (func->has_closure_feedback_cell_array()) {
790 funcs_needing_feedback_vector.push_back(
791 Handle<JSFunction>(func, isolate));
792 }
793 } else if (IsBinaryMode(mode) && IsSharedFunctionInfo(o)) {
794 // If collecting binary coverage, reset
795 // SFI::has_reported_binary_coverage to avoid optimizing / inlining
796 // functions before they have reported coverage.
798 shared->set_has_reported_binary_coverage(false);
799 } else if (IsFeedbackVector(o)) {
800 // In any case, clear any collected invocation counts.
801 Cast<FeedbackVector>(o)->clear_invocation_count(kRelaxedStore);
802 }
803 }
804 }
805
806 for (DirectHandle<JSFunction> func : funcs_needing_feedback_vector) {
807 IsCompiledScope is_compiled_scope(
808 func->shared()->is_compiled_scope(isolate));
809 CHECK(is_compiled_scope.is_compiled());
810 JSFunction::EnsureFeedbackVector(isolate, func, &is_compiled_scope);
811 }
812
813 // Root all feedback vectors to avoid early collection.
814 isolate->MaybeInitializeVectorListFromHeap();
815
816 break;
817 }
818 }
819 isolate->set_code_coverage_mode(mode);
820}
821
822} // namespace internal
823} // namespace v8
#define DISALLOW_GARBAGE_COLLECTION(name)
static std::unique_ptr< Coverage > CollectPrecise(Isolate *isolate)
static V8_EXPORT_PRIVATE void SelectMode(Isolate *isolate, debug::CoverageMode mode)
static std::unique_ptr< Coverage > Collect(Isolate *isolate, v8::debug::CoverageMode collectionMode)
static std::unique_ptr< Coverage > CollectBestEffort(Isolate *isolate)
static V8_EXPORT_PRIVATE void DeoptimizeAll(Isolate *isolate)
Tagged< HeapObject > Next()
Definition heap.cc:6658
static V8_EXPORT_PRIVATE void EnsureFeedbackVector(Isolate *isolate, DirectHandle< JSFunction > function, IsCompiledScope *compiled_scope)
FeedbackVector eventually. Generally this shouldn't be used to get the.
Tagged< Script > Next()
Definition objects.cc:4795
V8_EXPORT_PRIVATE Tagged< SharedFunctionInfo > Next()
static Handle< String > DebugName(Isolate *isolate, DirectHandle< SharedFunctionInfo > shared)
static uint32_t Hash(Tagged< SharedFunctionInfo > key)
void Add(Tagged< SharedFunctionInfo > key, uint32_t count)
uint32_t Get(Tagged< SharedFunctionInfo > key)
V8_INLINE constexpr bool is_null() const
Definition tagged.h:502
static int InterruptBudgetFor(Isolate *isolate, Tagged< JSFunction > function, std::optional< CodeKind > override_active_tier={})
T & emplace_back(Args &&... args)
const JSFunctionRef function_
bool ended_
int start
bool delete_current_
int read_index_
uint32_t count
int write_index_
std::vector< CoverageBlock > nesting_stack_
int end
#define V8_JITLESS_BOOL
ZoneVector< RpoNumber > & result
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
constexpr int kNoSourcePosition
Definition globals.h:850
void PrintF(const char *format,...)
Definition utils.cc:39
Tagged(T object) -> Tagged< T >
kInterpreterTrampolineOffset Tagged< HeapObject >
PerThreadAssertScopeDebugOnly< true, SAFEPOINTS_ASSERT, HEAP_ALLOCATION_ASSERT > AllowGarbageCollection
V8_INLINE constexpr bool operator<(Builtin a, Builtin b)
Definition builtins.h:75
V8_EXPORT_PRIVATE FlagValues v8_flags
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
Local< T > Handle
static constexpr RelaxedStoreTag kRelaxedStore
Definition globals.h:2911
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
static constexpr int kFunctionLiteralSourcePosition