5#ifndef V8_HEAP_CPPGC_STATS_COLLECTOR_H_
6#define V8_HEAP_CPPGC_STATS_COLLECTOR_H_
28#define CPPGC_FOR_ALL_HISTOGRAM_SCOPES(V) \
36#define CPPGC_FOR_ALL_SCOPES(V) \
38 V(MarkIncrementalStart) \
39 V(MarkIncrementalFinalize) \
40 V(MarkAtomicPrologue) \
41 V(MarkAtomicEpilogue) \
42 V(MarkTransitiveClosure) \
43 V(MarkTransitiveClosureWithDeadline) \
44 V(MarkFlushEphemerons) \
46 V(MarkProcessBailOutObjects) \
47 V(MarkProcessMarkingWorklist) \
48 V(MarkProcessRetraceWorklist) \
49 V(MarkProcessWriteBarrierWorklist) \
50 V(MarkProcessNotFullyconstructedWorklist) \
51 V(MarkProcessEphemerons) \
53 V(MarkVisitNotFullyConstructedObjects) \
54 V(MarkVisitPersistents) \
55 V(MarkVisitCrossThreadPersistents) \
57 V(MarkVisitRememberedSets) \
58 V(WeakContainerCallbacksProcessing) \
59 V(CustomCallbacksProcessing) \
62 V(SweepFinalizeEmptyPages) \
63 V(SweepFinalizeSweptPages) \
64 V(SweepFinishIfOutOfWork) \
65 V(SweepInvokePreFinalizers) \
66 V(SweepInLowPriorityTask) \
68 V(SweepInTaskForStatistics) \
69 V(SweepOnAllocation) \
72#define CPPGC_FOR_ALL_HISTOGRAM_CONCURRENT_SCOPES(V) \
75 V(ConcurrentWeakCallback)
77#define CPPGC_FOR_ALL_CONCURRENT_SCOPES(V) \
78 V(ConcurrentMarkProcessEphemeronWorklist) \
79 V(ConcurrentMarkProcessMarkingWorklist) \
80 V(ConcurrentMarkProcessNotFullyconstructedWorklist) \
81 V(ConcurrentMarkProcessWriteBarrierWorklist)
91#if defined(CPPGC_DECLARE_ENUM)
92 static_assert(
false,
"CPPGC_DECLARE_ENUM macro is already defined");
96#define CPPGC_DECLARE_ENUM(name) k##name,
98 kNumHistogramScopeIds,
100#undef CPPGC_DECLARE_ENUM
105#define CPPGC_DECLARE_ENUM(name) k##name,
107 kNumHistogramConcurrentScopeIds,
109#undef CPPGC_DECLARE_ENUM
110 kNumConcurrentScopeIds
130 size_t marked_bytes = 0;
131 size_t object_size_before_sweep_bytes = -1;
132 size_t memory_size_before_sweep_bytes = -1;
136#if defined(CPPGC_CASE)
137 static_assert(
false,
"CPPGC_CASE macro is already defined");
142#define CPPGC_CASE(name) \
144 return type == CollectionType::kMajor ? "CppGC." #name \
145 : "CppGC." #name ".Minor";
157#define CPPGC_CASE(name) \
159 return type == CollectionType::kMajor ? "CppGC." #name \
160 : "CppGC." #name ".Minor";
174 template <TraceCategory trace_category, ScopeContext scope_category>
176 using ScopeIdType = std::conditional_t<scope_category == kMutatorThread,
180 template <
typename... Args>
185 scope_id_(scope_id) {
188 scope_category == kMutatorThread
189 ?
static_cast<int>(kNumScopeIds)
190 :
static_cast<int>(kNumConcurrentScopeIds));
192 scope_category == kMutatorThread
193 ?
static_cast<int>(kNumHistogramScopeIds)
194 :
static_cast<int>(kNumHistogramConcurrentScopeIds));
211 void*
operator new(size_t,
void*) =
delete;
212 void*
operator new(size_t) =
delete;
216 template <
typename... Args>
217 inline void StartTrace(Args...
args);
218 inline void StopTrace();
220 inline void StartTraceImpl();
221 template <
typename Value1>
222 inline void StartTraceImpl(
const char* k1, Value1 v1);
223 template <
typename Value1,
typename Value2>
224 inline void StartTraceImpl(
const char* k1, Value1 v1,
const char* k2,
226 inline void StopTraceImpl();
228 inline void IncreaseScopeTime();
272 static constexpr size_t kAllocationThresholdBytes = 1024;
281 void NotifyAllocation(
size_t);
282 void NotifyExplicitFree(
size_t);
286 void NotifySafePointForConservativeCollection();
288 void NotifySafePointForTesting();
298 void NotifyMarkingCompleted(
size_t marked_bytes);
303 size_t allocated_memory_size()
const;
306 size_t allocated_object_size()
const;
311 size_t marked_bytes()
const;
315 size_t marked_bytes_on_current_cycle()
const;
321 double GetRecentAllocationSpeedInBytesPerMs()
const;
325 void NotifyAllocatedMemory(int64_t);
326 void NotifyFreedMemory(int64_t);
328 void IncrementDiscardedMemory(
size_t);
329 void DecrementDiscardedMemory(
size_t);
330 void ResetDiscardedMemory();
331 size_t discarded_memory_size()
const;
332 size_t resident_memory_size()
const;
335 metric_recorder_ = std::move(histogram_recorder);
352 template <
typename Callback>
353 void ForAllAllocationObservers(Callback
callback);
355 void AllocatedObjectSizeSafepointImpl();
361 int64_t allocated_bytes_since_end_of_marking_ = 0;
367 int64_t allocated_bytes_since_safepoint_ = 0;
368 int64_t explicitly_freed_bytes_since_safepoint_ = 0;
369#ifdef CPPGC_VERIFY_HEAP
371 size_t tracked_live_bytes_ = 0;
376 size_t marked_bytes_so_far_ = 0;
378 int64_t memory_allocated_bytes_ = 0;
379 int64_t memory_freed_bytes_since_end_of_marking_ = 0;
380 std::atomic<size_t> discarded_bytes_{0};
385 bool allocation_observer_deleted_ =
false;
401template <
typename Callback>
423 switch (trace_category) {
433template <
typename... Args>
438 (scope_category == kMutatorThread
439 ?
static_cast<int>(kNumHistogramScopeIds)
440 :
static_cast<int>(kNumHistogramConcurrentScopeIds)),
442 StartTraceImpl(
args...);
448 scope_category>::StopTrace() {
455 scope_category>::StartTraceImpl() {
463template <
typename Value1>
465 trace_category, scope_category>::StartTraceImpl(
const char* k1, Value1 v1) {
474template <
typename Value1,
typename Value2>
476 trace_category, scope_category>::StartTraceImpl(
const char* k1, Value1 v1,
477 const char* k2, Value2 v2) {
487 scope_category>::StopTraceImpl() {
498 scope_category>::IncreaseScopeTime() {
501 if (
static_cast<int>(scope_id_) >=
502 (scope_category == kMutatorThread
503 ?
static_cast<int>(kNumHistogramScopeIds)
504 :
static_cast<int>(kNumHistogramConcurrentScopeIds)))
515 const int64_t us = time.InMicroseconds();
518 static_cast<AtomicWord
>(us));
virtual void AllocatedObjectSizeIncreased(size_t)
virtual void AllocatedSizeIncreased(size_t)
virtual void AllocatedObjectSizeDecreased(size_t)
virtual void ResetAllocatedObjectSize(size_t)
virtual void AllocatedSizeDecreased(size_t)
static constexpr const char * TraceCategory()
void DecreaseStartTimeForTesting(v8::base::TimeDelta delta)
void StartTrace(Args... args)
v8::base::TimeTicks start_time_
StatsCollector *const stats_collector_
const ScopeIdType scope_id_
InternalScope & operator=(const InternalScope &)=delete
std::conditional_t< scope_category==kMutatorThread, ScopeId, ConcurrentScopeId > ScopeIdType
InternalScope(const InternalScope &)=delete
InternalScope(StatsCollector *stats_collector, ScopeIdType scope_id, Args... args)
const Event & GetPreviousEventForTesting() const
void RecordHistogramSample(ConcurrentScopeId, v8::base::TimeDelta)
static constexpr const char * GetScopeName(ScopeId id, CollectionType type)
GarbageCollectionState gc_state_
std::unique_ptr< MetricRecorder > metric_recorder_
StatsCollector & operator=(const StatsCollector &)=delete
std::vector< AllocationObserver * > allocation_observers_
bool allocation_observer_deleted_
static constexpr const char * GetScopeName(ConcurrentScopeId id, CollectionType type)
void SetMetricRecorder(std::unique_ptr< MetricRecorder > histogram_recorder)
MetricRecorder * GetMetricRecorder() const
void ForAllAllocationObservers(Callback callback)
void RecordHistogramSample(ScopeId, v8::base::TimeDelta)
StatsCollector(const StatsCollector &)=delete
StatsCollector * stats_collector_
base::Vector< const DirectHandle< Object > > args
base::TimeTicks start_time_
Atomic32 Relaxed_AtomicIncrement(volatile Atomic32 *ptr, Atomic32 increment)
#define DCHECK_LE(v1, v2)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_LT(v1, v2)
#define V8_EXPORT_PRIVATE
#define CPPGC_FOR_ALL_CONCURRENT_SCOPES(V)
#define CPPGC_DECLARE_ENUM(name)
#define CPPGC_FOR_ALL_HISTOGRAM_CONCURRENT_SCOPES(V)
#define CPPGC_FOR_ALL_HISTOGRAM_SCOPES(V)
#define CPPGC_FOR_ALL_SCOPES(V)
v8::base::TimeDelta scope_data[kNumHistogramScopeIds]
v8::base::AtomicWord concurrent_scope_data[kNumHistogramConcurrentScopeIds]
CollectionType collection_type
#define TRACE_EVENT_BEGIN2(category_group, name, arg1_name, arg1_val, arg2_name, arg2_val)
#define TRACE_EVENT_END2(category_group, name, arg1_name, arg1_val, arg2_name, arg2_val)
#define TRACE_DISABLED_BY_DEFAULT(name)
#define TRACE_EVENT_BEGIN1(category_group, name, arg1_name, arg1_val)
#define TRACE_EVENT_BEGIN0(category_group, name)