5#ifndef V8_LOGGING_COUNTERS_H_ 
    6#define V8_LOGGING_COUNTERS_H_ 
  102  void Set(
int value) { 
GetPtr()->store(value, std::memory_order_relaxed); }
 
  106    GetPtr()->fetch_add(value, std::memory_order_relaxed);
 
 
  110    GetPtr()->fetch_sub(value, std::memory_order_relaxed);
 
 
  139  void Reset() { 
ptr_.store(
nullptr, std::memory_order_relaxed); }
 
  143    auto* ptr = 
ptr_.load(std::memory_order_acquire);
 
 
  151  std::atomic<std::atomic<int>*> 
ptr_{
nullptr};
 
 
  200    if (create_new && 
histogram_.load(std::memory_order_acquire) == 
nullptr) {
 
  202      if (
histogram_.load(std::memory_order_relaxed) == 
nullptr)
 
 
 
  246  bool ToggleRunningState(
bool expected_is_running) 
const;
 
 
  271class NestedTimedHistogramScope;
 
  272class PauseNestedTimedHistogramScope;
 
  359      : histogram_(histogram) {
 
 
 
  373      : histogram_(histogram) {
 
 
 
  392template <
typename Histogram>
 
  410  void AddSample(
double current_ms, 
double current_value);
 
  422  double Aggregate(
double current_ms, 
double current_value);
 
 
  432template <
typename Histogram>
 
  434                                                     double current_value) {
 
  435  if (!is_initialized_) {
 
  436    aggregate_value_ = current_value;
 
  437    start_ms_ = current_ms;
 
  438    last_value_ = current_value;
 
  439    last_ms_ = current_ms;
 
  440    is_initialized_ = 
true;
 
  442    const double kEpsilon = 1e-6;
 
  443    const int kMaxSamples = 1000;
 
  444    if (current_ms < last_ms_ + kEpsilon) {
 
  446      last_value_ = current_value;
 
  448      double sample_interval_ms = 
v8_flags.histogram_interval;
 
  449      double end_ms = start_ms_ + sample_interval_ms;
 
  450      if (end_ms <= current_ms + kEpsilon) {
 
  452        double slope = (current_value - last_value_) / (current_ms - last_ms_);
 
  456        for (
i = 0; 
i < kMaxSamples && end_ms <= current_ms + kEpsilon; 
i++) {
 
  457          double end_value = last_value_ + (end_ms - last_ms_) * slope;
 
  461            sample_value = Aggregate(end_ms, end_value);
 
  464            sample_value = (last_value_ + end_value) / 2;
 
  466          backing_histogram_->AddSample(
static_cast<int>(sample_value + 0.5));
 
  467          last_value_ = end_value;
 
  469          end_ms += sample_interval_ms;
 
  471        if (
i == kMaxSamples) {
 
  473          aggregate_value_ = current_value;
 
  474          start_ms_ = current_ms;
 
  476          aggregate_value_ = last_value_;
 
  477          start_ms_ = last_ms_;
 
  480      aggregate_value_ = current_ms > start_ms_ + kEpsilon
 
  481                             ? Aggregate(current_ms, current_value)
 
  483      last_value_ = current_value;
 
  484      last_ms_ = current_ms;
 
 
  489template <
typename Histogram>
 
  491                                                       double current_value) {
 
  492  double interval_ms = current_ms - start_ms_;
 
  493  double value = (current_value + last_value_) / 2;
 
  497  return aggregate_value_ * ((last_ms_ - start_ms_) / interval_ms) +
 
  498         value * ((current_ms - last_ms_) / interval_ms);
 
 
  502class Counters : 
public std::enable_shared_from_this<Counters> {
 
  523#define HR(name, caption, min, max, num_buckets) \ 
  524  Histogram* name() {                            \ 
  525    name##_.EnsureCreated();                     \ 
 
  531#if V8_ENABLE_DRUMBRAKE 
  532#define HR(name, caption, min, max, num_buckets)     \ 
  533  Histogram* name() {                                \ 
  534    name##_.EnsureCreated(v8_flags.slow_histograms); \ 
  537  HISTOGRAM_RANGE_LIST_SLOW(
HR)
 
  541#define HT(name, caption, max, res) \ 
  542  NestedTimedHistogram* name() {    \ 
  543    name##_.EnsureCreated();        \ 
 
  549#define HT(name, caption, max, res)                  \ 
  550  NestedTimedHistogram* name() {                     \ 
  551    name##_.EnsureCreated(v8_flags.slow_histograms); \ 
  557#define HT(name, caption, max, res) \ 
  558  TimedHistogram* name() {          \ 
  559    name##_.EnsureCreated();        \ 
  565#define AHT(name, caption)             \ 
  566  AggregatableHistogramTimer* name() { \ 
  567    name##_.EnsureCreated();           \ 
 
  573#define HP(name, caption)       \ 
  574  PercentageHistogram* name() { \ 
  575    name##_.EnsureCreated();    \ 
 
  581#define HM(name, caption)         \ 
  582  LegacyMemoryHistogram* name() { \ 
  583    name##_.EnsureCreated();      \ 
 
  589#define SC(name, caption) \ 
  590  StatsCounter* name() { return &name##_; } 
 
  597#define RATE_ID(name, caption, max, res) k_##name, 
  602#define AGGREGATABLE_ID(name, caption) k_##name, 
  604#undef AGGREGATABLE_ID 
  605#define PERCENTAGE_ID(name, caption) k_##name, 
  608#define MEMORY_ID(name, caption) k_##name, 
  611#define COUNTER_ID(name, caption) k_##name, 
  615#define COUNTER_ID(name) kCountOf##name, kSizeOf##name, 
  618#define COUNTER_ID(name) kCountOfCODE_TYPE_##name, \ 
  619    kSizeOfCODE_TYPE_##name, 
 
  626#ifdef V8_RUNTIME_CALL_STATS 
  630    return &worker_thread_runtime_call_stats_;
 
  661#define HR(name, caption, min, max, num_buckets) Histogram name##_; 
  663#if V8_ENABLE_DRUMBRAKE 
  664  HISTOGRAM_RANGE_LIST_SLOW(
HR)
 
  668#define HT(name, caption, max, res) NestedTimedHistogram name##_; 
  673#define HT(name, caption, max, res) TimedHistogram name##_; 
  677#define AHT(name, caption) AggregatableHistogramTimer name##_; 
  681#define HP(name, caption) PercentageHistogram name##_; 
  685#define HM(name, caption) LegacyMemoryHistogram name##_; 
  689#define SC(name, caption) StatsCounter name##_; 
  694#ifdef V8_RUNTIME_CALL_STATS 
 
  718  virtual void Visit(
Histogram* histogram, 
const char* caption, 
int min,
 
  719                     int max, 
int num_buckets);
 
  725                     const char* caption);
 
 
  739  void Visit(
Histogram* histogram, 
const char* caption, 
int min, 
int max,
 
  740             int num_buckets) 
final;
 
 
int64_t InMicroseconds() const
AggregatableHistogramTimer(const AggregatableHistogramTimer &)=delete
AggregatableHistogramTimer & operator=(const AggregatableHistogramTimer &)=delete
void Add(base::TimeDelta other)
AggregatableHistogramTimer()=default
~AggregatedHistogramTimerScope()
base::ElapsedTimer timer_
AggregatedHistogramTimerScope(AggregatableHistogramTimer *histogram)
AggregatableHistogramTimer * histogram_
Histogram * backing_histogram_
double Aggregate(double current_ms, double current_value)
AggregatedMemoryHistogram(Histogram *backing_histogram)
AggregatedMemoryHistogram()
void AddSample(double current_ms, double current_value)
~AggregatingHistogramTimerScope()
AggregatableHistogramTimer * histogram_
AggregatingHistogramTimerScope(AggregatableHistogramTimer *histogram)
void Visit(Histogram *histogram, const char *caption, int min, int max, int num_buckets) final
virtual void VisitStatsCounter(StatsCounter *counter, const char *caption)
virtual void Visit(Histogram *histogram, const char *caption, int min, int max, int num_buckets)
CountersVisitor(Counters *counters)
virtual void VisitHistogram(Histogram *histogram, const char *caption)
virtual void VisitStatsCounters()
virtual void VisitHistograms()
DISALLOW_IMPLICIT_CONSTRUCTORS(Counters)
void SetAddHistogramSampleFunction(AddHistogramSampleCallback f)
void AddHistogramSample(void *histogram, int sample)
void * CreateHistogram(const char *name, int min, int max, size_t buckets)
void ResetCreateHistogramFunction(CreateHistogramCallback f)
void ResetCounterFunction(CounterLookupCallback f)
WorkerThreadRuntimeCallStats * worker_thread_runtime_call_stats()
RuntimeCallStats * runtime_call_stats()
int * FindLocation(const char *name)
Counters(Isolate *isolate)
void VisitHistogram(Histogram *histogram, const char *caption) final
void VisitStatsCounters() final
V8_EXPORT_PRIVATE void * CreateHistogram() const
V8_EXPORT_PRIVATE void AddSample(int sample)
const char * name() const
Counters * counters() const
void EnsureCreated(bool create_new=true)
Histogram & operator=(const Histogram &)=delete
void Initialize(const char *name, int min, int max, int num_buckets, Counters *counters)
std::atomic< void * > histogram_
void AssertReportsToCounters(Counters *expected_counters)
Histogram(const Histogram &)=delete
void Leave(NestedTimedHistogramScope *previous)
NestedTimedHistogramScope * Enter(NestedTimedHistogramScope *next)
NestedTimedHistogramScope * current_
NestedTimedHistogram(const NestedTimedHistogram &)=delete
NestedTimedHistogram & operator=(const NestedTimedHistogram &)=delete
NestedTimedHistogram(const char *name, int min, int max, TimedHistogramResolution resolution, int num_buckets, Counters *counters)
NestedTimedHistogram()=default
void VisitStatsCounter(StatsCounter *counter, const char *caption) final
void VisitHistograms() final
std::atomic< std::atomic< int > * > ptr_
V8_NOINLINE V8_EXPORT_PRIVATE std::atomic< int > * SetupPtrFromStatsTable()
std::atomic< int > * GetPtr()
V8_EXPORT_PRIVATE bool Enabled()
std::atomic< int > * GetInternalPointer()
void Initialize(const char *name, Counters *counters)
void Decrement(int value=1)
void Increment(int value=1)
CounterLookupCallback lookup_function_
void * CreateHistogram(const char *name, int min, int max, size_t buckets)
StatsTable(const StatsTable &)=delete
void AddHistogramSample(void *histogram, int sample)
CreateHistogramCallback create_histogram_function_
int * FindLocation(const char *name)
bool HasCounterFunction() const
AddHistogramSampleCallback add_histogram_sample_function_
void SetCounterFunction(CounterLookupCallback f)
void SetAddHistogramSampleFunction(AddHistogramSampleCallback f)
StatsTable & operator=(const StatsTable &)=delete
void SetCreateHistogramFunction(CreateHistogramCallback f)
void LogEnd(Isolate *isolate)
TimedHistogram(const TimedHistogram &)=delete
void Stop(base::ElapsedTimer *timer)
TimedHistogramResolution resolution_
void LogStart(Isolate *isolate)
V8_EXPORT_PRIVATE void AddTimedSample(base::TimeDelta sample)
TimedHistogram & operator=(const TimedHistogram &)=delete
void RecordAbandon(base::ElapsedTimer *timer, Isolate *isolate)
void Initialize(const char *name, int min, int max, TimedHistogramResolution resolution, int num_buckets, Counters *counters)
#define CODE_KIND_LIST(V)
#define NESTED_TIMED_HISTOGRAM_LIST(HT)
#define HISTOGRAM_LEGACY_MEMORY_LIST(HM)
#define STATS_COUNTER_LIST(SC)
#define HISTOGRAM_PERCENTAGE_LIST(HP)
#define AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT)
#define NESTED_TIMED_HISTOGRAM_LIST_SLOW(HT)
#define HISTOGRAM_RANGE_LIST(HR)
#define STATS_COUNTER_NATIVE_CODE_LIST(SC)
#define TIMED_HISTOGRAM_LIST(HT)
#define HR(name, caption, min, max, num_buckets)
#define HT(name, caption, max, res)
#define AHT(name, caption)
#define MEMORY_ID(name, caption)
#define RATE_ID(name, caption, max, res)
#define PERCENTAGE_ID(name, caption)
#define HP(name, caption)
#define HM(name, caption)
#define COUNTER_ID(name, caption)
#define AGGREGATABLE_ID(name, caption)
base::ElapsedTimer timer_
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in name
V8_EXPORT_PRIVATE FlagValues v8_flags
void(*)(void *histogram, int sample) AddHistogramSampleCallback
int *(*)(const char *name) CounterLookupCallback
void *(*)(const char *name, int min, int max, size_t buckets) CreateHistogramCallback
#define INSTANCE_TYPE_LIST(V)
#define DCHECK_NOT_NULL(val)
#define DCHECK_EQ(v1, v2)
#define V8_EXPORT_PRIVATE
#define V8_LIKELY(condition)