83 static constexpr double kGrowingFactor = 1.5;
86 static constexpr size_t kMinLimitIncrease =
92 size + kMinLimitIncrease);
105 return std::make_unique<internal::CppHeap>(platform, params.custom_spaces,
106 params.marking_support,
107 params.sweeping_support);
127 std::vector<cppgc::CustomSpaceIndex> custom_spaces,
128 std::unique_ptr<CustomSpaceStatisticsReceiver>
receiver) {
130 std::move(custom_spaces), std::move(
receiver));
140 internal::CppHeap::CollectionType::kMajor, stack_state);
146 internal::CppHeap::CollectionType::kMinor, stack_state);
158 ? platform->GetPageAllocator()
161 CppgcPlatformAdapter(
const CppgcPlatformAdapter&) =
delete;
162 CppgcPlatformAdapter& operator=(
const CppgcPlatformAdapter&) =
delete;
166 double MonotonicallyIncreasingTime() final {
170 std::shared_ptr<TaskRunner> GetForegroundTaskRunner(
175 if (!isolate_ && !is_in_detached_mode_)
return nullptr;
181 std::unique_ptr<JobTask> job_task)
final {
199class UnifiedHeapConcurrentMarker
202 UnifiedHeapConcurrentMarker(
207 UnifiedHeapMarkingState& unified_heap_marking_state,
210 heap, marking_worklists, incremental_marking_schedule, platform),
214 std::unique_ptr<cppgc::Visitor> CreateConcurrentMarkingVisitor(
222std::unique_ptr<cppgc::Visitor>
223UnifiedHeapConcurrentMarker::CreateConcurrentMarkingVisitor(
225 return std::make_unique<ConcurrentUnifiedHeapMarkingVisitor>(
229void FatalOutOfMemoryHandlerImpl(
const std::string& reason,
230 const SourceLocation&, HeapBase*
heap) {
232 auto* isolate = cpp_heap->
isolate();
234 if (
v8_flags.heap_snapshot_on_oom) {
237 isolate->heap()->heap_profiler()->WriteSnapshotToDiskAfterGC(
243void GlobalFatalOutOfMemoryHandlerImpl(
const std::string& reason,
244 const SourceLocation&, HeapBase*
heap) {
248class UnifiedHeapConservativeMarkingVisitor final
251 UnifiedHeapConservativeMarkingVisitor(
252 HeapBase&
heap, MutatorMarkingState& mutator_marking_state,
254 : ConservativeMarkingVisitor(
heap, mutator_marking_state, visitor) {}
255 ~UnifiedHeapConservativeMarkingVisitor()
override =
default;
257 void SetConservativeTracedHandlesMarkingVisitor(
258 std::unique_ptr<ConservativeTracedHandlesMarkingVisitor>
259 global_handle_marking_visitor) {
263 void TraceConservativelyIfNeeded(
const void* address)
override {
264 ConservativeMarkingVisitor::TraceConservativelyIfNeeded(address);
279 std::shared_ptr<::heap::base::IncrementalMarkingSchedule>
280 incremental_schedule,
296 return mutator_unified_heap_marking_state_;
300 return conservative_marking_visitor_;
304 if (
v8_flags.incremental_marking_unified_schedule) {
308 MarkerBase::ScheduleIncrementalMarkingTask();
312 if (
v8_flags.incremental_marking_unified_schedule) {
313 mutator_unified_heap_marking_state_.heap()
314 ->incremental_marking()
315 ->AdvanceOnAllocation();
318 MarkerBase::AdvanceMarkingOnAllocationImpl();
325 return conservative_marking_visitor_;
340 std::shared_ptr<::heap::base::IncrementalMarkingSchedule>
schedule_;
346 std::shared_ptr<::heap::base::IncrementalMarkingSchedule>
347 incremental_schedule,
350 mutator_unified_heap_marking_state_(v8_heap, nullptr,
351 config.collection_type),
353 mutator_unified_heap_marking_state_),
354 conservative_marking_visitor_(
heap, mutator_marking_state_,
358 platform_, mutator_unified_heap_marking_state_,
359 config.collection_type) {
366 if (cppgc_event.
type == MetricRecorder::GCCycle::Type::kMinor) {
369 tracer->NotifyYoungCppGCCompleted();
373 tracer->NotifyFullCppGCCompleted();
382 if (cpp_heap_.is_in_v8_marking_step_) {
383 last_incremental_mark_event_ = cppgc_event;
387 const std::shared_ptr<metrics::Recorder>& recorder =
388 GetIsolate()->metrics_recorder();
390 if (!recorder->HasEmbedderRecorder())
return;
391 incremental_mark_batched_events_.events.emplace_back();
392 incremental_mark_batched_events_.events.back().cpp_wall_clock_duration_in_us =
394 if (incremental_mark_batched_events_.events.size() == kMaxBatchedEvents) {
395 recorder->AddMainThreadEvent(std::move(incremental_mark_batched_events_),
397 incremental_mark_batched_events_ = {};
405 const std::shared_ptr<metrics::Recorder>& recorder =
406 GetIsolate()->metrics_recorder();
408 if (!recorder->HasEmbedderRecorder())
return;
409 incremental_sweep_batched_events_.events.emplace_back();
410 incremental_sweep_batched_events_.events.back()
411 .cpp_wall_clock_duration_in_us = cppgc_event.
duration_us;
412 if (incremental_sweep_batched_events_.events.size() == kMaxBatchedEvents) {
413 recorder->AddMainThreadEvent(std::move(incremental_sweep_batched_events_),
415 incremental_sweep_batched_events_ = {};
420 const std::shared_ptr<metrics::Recorder>& recorder =
421 GetIsolate()->metrics_recorder();
423 if (!incremental_mark_batched_events_.events.empty()) {
424 recorder->AddMainThreadEvent(std::move(incremental_mark_batched_events_),
426 incremental_mark_batched_events_ = {};
428 if (!incremental_sweep_batched_events_.events.empty()) {
429 recorder->AddMainThreadEvent(std::move(incremental_sweep_batched_events_),
431 incremental_sweep_batched_events_ = {};
436 return last_full_gc_event_.has_value();
440 return last_young_gc_event_.has_value();
443const std::optional<cppgc::internal::MetricRecorder::GCCycle>
445 auto res = std::move(last_full_gc_event_);
446 last_full_gc_event_.reset();
450const std::optional<cppgc::internal::MetricRecorder::GCCycle>
452 auto res = std::move(last_young_gc_event_);
453 last_young_gc_event_.reset();
457const std::optional<cppgc::internal::MetricRecorder::MainThreadIncrementalMark>
459 auto res = std::move(last_incremental_mark_event_);
460 last_incremental_mark_event_.reset();
465 incremental_mark_batched_events_.events.clear();
466 incremental_sweep_batched_events_.events.clear();
467 last_incremental_mark_event_.reset();
468 last_full_gc_event_.reset();
469 last_young_gc_event_.reset();
474 return reinterpret_cast<Isolate*
>(cpp_heap_.isolate());
480 if (GetIsolate()->
context().is_null())
483 return GetIsolate()->GetOrRegisterRecorderContextId(
490 &GlobalFatalOutOfMemoryHandlerImpl);
495 const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>& custom_spaces,
499 std::make_shared<CppgcPlatformAdapter>(
platform), custom_spaces,
501 kSupportsConservativeStackScan,
510#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
532class SweepingOnMutatorThreadForGlobalHandlesScope final {
534 explicit SweepingOnMutatorThreadForGlobalHandlesScope(
539 ~SweepingOnMutatorThreadForGlobalHandlesScope() {
546class SweepingOnMutatorThreadForGlobalHandlesObserver final
549 SweepingOnMutatorThreadForGlobalHandlesObserver(CppHeap& cpp_heap,
550 TracedHandles& traced_handles)
555 void Start()
override {
traced_handles_.SetIsSweepingOnMutatorThread(
true); }
557 void End()
override {
traced_handles_.SetIsSweepingOnMutatorThread(
false); }
563class MoveListenerImpl final :
public HeapProfilerNativeMoveListener,
566 MoveListenerImpl(HeapProfiler* profiler, CppHeap*
heap)
567 : HeapProfilerNativeMoveListener(profiler),
heap_(
heap) {}
568 ~MoveListenerImpl() {
570 heap_->UnregisterMoveListener(
this);
575 void StartListening()
override {
578 heap_->RegisterMoveListener(
this);
580 void StopListening()
override {
581 if (!active_)
return;
583 heap_->UnregisterMoveListener(
this);
587 void OnMove(uint8_t* from, uint8_t* to,
588 size_t size_including_header)
override {
589 ObjectMoveEvent(
reinterpret_cast<Address>(from),
591 static_cast<int>(size_including_header));
607 heap_ = isolate->heap();
610 static_cast<CppgcPlatformAdapter*
>(
platform())
612 if (
auto* heap_profiler =
heap()->heap_profiler()) {
614 heap_profiler->set_native_move_listener(
615 std::make_unique<MoveListenerImpl>(heap_profiler,
this));
621 std::make_unique<SweepingOnMutatorThreadForGlobalHandlesObserver>(
651 i::GarbageCollectionReason::kExternalFinalize);
659 if (
auto* heap_profiler =
heap()->heap_profiler()) {
662 heap_profiler->set_native_move_listener(
nullptr);
697 return IsMemoryReducingGC(flags) || IsForceGC(flags);
700constexpr size_t kIncrementalMarkingCheckInterval = 128 *
KB;
706 if (*
collection_type_ == CollectionType::kMinor)
return MarkingType::kAtomic;
709 return MarkingType::kAtomic;
716 if (marking_type == MarkingType::kIncrementalAndConcurrent &&
heap_ &&
718 return MarkingType::kIncremental;
732 v8_flags.cppheap_incremental_marking);
733 if (
v8_flags.cppheap_concurrent_marking) {
736 }
else if (
v8_flags.cppheap_incremental_marking) {
744 ? CppHeap::SweepingType::kIncremental
745 : CppHeap::SweepingType::kIncrementalAndConcurrent;
753 std::shared_ptr<::heap::base::IncrementalMarkingSchedule>
schedule,
757 if (collection_type == CollectionType::kMinor) {
770 if (collection_type == CollectionType::kMajor)
776#if defined(CPPGC_YOUNG_GENERATION)
787 if (
heap()->is_current_gc_forced()) {
790 if (
heap()->ShouldReduceMemory()) {
802 (MarkingType::kAtomic == marking_config.marking_type) ||
808 marking_config.stack_state);
816 marker_ = std::make_unique<UnifiedHeapMarker>(
824 auto*
heap = isolate->heap();
826 return heap->mark_compact_collector()->local_marking_worklists();
828 return heap->minor_mark_sweep_collector()->local_marking_worklists();
853 size_t marked_bytes_limit) {
861 : cppgc::internal::StatsCollector::kIncrementalMark);
864 marker_->NotifyConcurrentMarkingOfWorkIfNeeded(
865 cppgc::TaskPriority::kUserBlocking);
868 marker_->AdvanceMarkingWithLimits(max_duration, marked_bytes_limit);
902 std::make_unique<ConservativeTracedHandlesMarkingVisitor>(
912 return marker_->JoinConcurrentMarkingIfNeeded();
917 marker_->ReEnableConcurrentMarking();
922 marker_->WriteBarrierForObject<
929 size_t marked_bytes) {
943 marker_->ProcessCrossThreadWeaknessIfNeeded();
950#if defined(CPPGC_YOUNG_GENERATION)
954 if (
v8_flags.cppgc_young_generation) {
955 EnableGenerationalGC();
995 bytes_allocated_in_prefinalizers);
997 USE(bytes_allocated_in_prefinalizers);
999#if defined(CPPGC_YOUNG_GENERATION)
1000 ResetRememberedSet();
1009 compactable_space_handling;
1011 std::optional<SweepingOnMutatorThreadForGlobalHandlesScope>
1012 global_handles_scope;
1021 ? cppgc::internal::SweepingConfig::FreeMemoryHandling::
1022 kDiscardWherePossible
1023 : cppgc::internal::SweepingConfig::FreeMemoryHandling::
1026 SweepingType::kAtomic == sweeping_config.sweeping_type);
1059 if (bytes_to_report < 0) {
1061 used_size_.fetch_sub(
static_cast<size_t>(-bytes_to_report),
1062 std::memory_order_relaxed);
1064 used_size_.fetch_add(
static_cast<size_t>(bytes_to_report),
1065 std::memory_order_relaxed);
1068 if (
v8_flags.incremental_marking) {
1071 heap->StartIncrementalMarkingIfAllocationLimitIsReached(
1072 heap->main_thread_local_heap(),
1073 heap->GCFlagsForIncrementalMarking(),
1075 if (
heap->incremental_marking()->IsMajorMarking()) {
1076 if (
heap->AllocationLimitOvershotByLargeMargin()) {
1077 heap->FinalizeIncrementalMarkingAtomically(
1078 i::GarbageCollectionReason::kExternalFinalize);
1080 heap->incremental_marking()->AdvanceOnAllocation();
1101 ->RequestGarbageCollectionForTesting(
1131 static_cast<CppgcPlatformAdapter*
>(
platform())
1132 ->EnableDetachedModeForTesting();
1159void ReportCustomSpaceStatistics(
1161 std::vector<cppgc::CustomSpaceIndex> custom_spaces,
1162 std::unique_ptr<CustomSpaceStatisticsReceiver>
receiver) {
1163 for (
auto custom_space_index : custom_spaces) {
1166 size_t allocated_bytes = std::accumulate(
1167 space->begin(), space->end(), 0, [](
size_t sum,
auto* page) {
1168 return sum + page->AllocatedBytesAtLastGC();
1170 receiver->AllocatedBytes(custom_space_index, allocated_bytes);
1174class CollectCustomSpaceStatisticsAtLastGCTask final :
public v8::Task {
1179 CollectCustomSpaceStatisticsAtLastGCTask(
1181 std::vector<cppgc::CustomSpaceIndex> custom_spaces,
1182 std::unique_ptr<CustomSpaceStatisticsReceiver>
receiver)
1191 cppgc::internal::StatsCollector::kSweepInTaskForStatistics)) {
1194 ReportCustomSpaceStatistics(
heap_.raw_heap(), std::move(custom_spaces_),
1195 std::move(receiver_));
1197 heap_.platform()->GetForegroundTaskRunner()->PostDelayedTask(
1198 std::make_unique<CollectCustomSpaceStatisticsAtLastGCTask>(
1199 heap_, std::move(custom_spaces_), std::move(receiver_)),
1214 CollectCustomSpaceStatisticsAtLastGCTask::kTaskDelayMs;
1216 CollectCustomSpaceStatisticsAtLastGCTask::kStepSizeMs;
1221 std::vector<cppgc::CustomSpaceIndex> custom_spaces,
1222 std::unique_ptr<CustomSpaceStatisticsReceiver>
receiver) {
1223 if (
sweeper().IsSweepingInProgress()) {
1225 std::make_unique<CollectCustomSpaceStatisticsAtLastGCTask>(
1227 CollectCustomSpaceStatisticsAtLastGCTask::kTaskDelayMs.InSecondsF());
1230 ReportCustomSpaceStatistics(
raw_heap(), std::move(custom_spaces),
1260 return std::make_unique<CppMarkingState>(
1261 std::make_unique<cppgc::internal::MarkingStateBase>(
1262 AsBase(),
marker()->To<UnifiedHeapMarker>().GetMarkingWorklists()));
1265std::unique_ptr<CppMarkingState>
1269 return std::make_unique<CppMarkingState>(
1270 marker()->To<UnifiedHeapMarker>().GetMutatorMarkingState());
1280 cppgc::internal::GCConfig::FreeMemoryHandling::kDiscardWherePossible)
1286 config.
sweeping_type == cppgc::internal::GCConfig::SweepingType::kAtomic,
1325#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
1326std::optional<int> CppHeap::UpdateAllocationTimeout() {
1327 if (!
v8_flags.cppgc_random_gc_interval) {
1328 return std::nullopt;
1330 if (!allocation_timeout_rng_) {
1331 allocation_timeout_rng_.emplace(
v8_flags.fuzzer_random_seed);
1333 return allocation_timeout_rng_->NextInt(
v8_flags.cppgc_random_gc_interval) +
1361 !
v8_flags.allow_allocation_in_fast_api_call) ||
CompactableSpaceHandling CompactSpacesIfEnabled()
void InitializeIfShouldCompact(GCConfig::MarkingType, StackState)
void CancelIfShouldNotCompact(GCConfig::MarkingType, StackState)
void SetCustomHandler(Callback *)
SweepingType sweeping_support_
std::unique_ptr< StatsCollector > stats_collector_
std::unique_ptr< heap::base::Stack > stack_
SweepingType sweeping_support() const
EmbedderStackState stack_state_of_prev_gc() const
size_t ExecutePreFinalizers()
HeapStatistics CollectStatistics(HeapStatistics::DetailLevel)
std::shared_ptr< cppgc::Platform > platform_
bool incremental_marking_supported() const
virtual bool CurrentThreadIsHeapThread() const
bool generational_gc_supported() const
std::unique_ptr< PageBackend > page_backend_
StatsCollector * stats_collector()
MarkingType marking_support() const
FatalOutOfMemoryHandler & oom_handler()
virtual bool IsGCForbidden() const
virtual bool IsGCAllowed() const
std::unique_ptr< MarkerBase > marker_
MarkingType marking_support_
MarkerBase * marker() const
void SetMetricRecorder(std::unique_ptr< MetricRecorder > histogram_recorder)
cppgc::Platform * platform()
virtual heap::base::Stack * stack()
ObjectAllocator & object_allocator()
bool in_no_gc_scope() const
virtual ConservativeTracingVisitor & conservative_visitor()=0
void EnterAtomicPause(StackState)
void Run(StackState, std::optional< size_t >)
BaseSpace * CustomSpace(CustomSpaceIndex space_index)
static constexpr size_t kNumberOfRegularSpaces
void RegisterObserver(AllocationObserver *)
size_t allocated_object_size() const
void NotifyUnmarkingStarted(CollectionType)
bool IsSweepingInProgress() const
void Start(SweepingConfig)
bool PerformSweepOnMutatorThread(v8::base::TimeDelta max_duration, StatsCollector::ScopeId)
static std::unique_ptr< IncrementalMarkingSchedule > Create(bool predictable_schedule=false)
V8_INLINE void SetMarkerIfNeededAndCallback(Callback callback)
void SetScanSimulatorCallback(StackVisitorCallback callback)
void CollectCustomSpaceStatisticsAtLastGC(std::vector< cppgc::CustomSpaceIndex > custom_spaces, std::unique_ptr< CustomSpaceStatisticsReceiver > receiver)
void CollectGarbageForTesting(cppgc::EmbedderStackState stack_state)
void CollectGarbageInYoungGenerationForTesting(cppgc::EmbedderStackState stack_state)
static std::unique_ptr< CppHeap > Create(v8::Platform *platform, const CppHeapCreateParams ¶ms)
void EnableDetachedGarbageCollectionsForTesting()
cppgc::HeapStatistics CollectStatistics(cppgc::HeapStatistics::DetailLevel detail_level)
friend class internal::CppHeap
cppgc::HeapHandle & GetHeapHandle()
cppgc::AllocationHandle & GetAllocationHandle()
static int GetCurrentThreadId()
static constexpr TimeDelta Max()
double InSecondsF() const
static constexpr TimeDelta FromMilliseconds(int64_t milliseconds)
static void Run(v8::Isolate *isolate, v8::EmbedderGraph *graph, void *data)
void AddMainThreadEvent(const GCCycle &cppgc_event) final
bool YoungGCMetricsReportPending() const
const std::optional< cppgc::internal::MetricRecorder::MainThreadIncrementalMark > ExtractLastIncrementalMarkEvent()
v8::metrics::Recorder::ContextId GetContextId() const
std::optional< cppgc::internal::MetricRecorder::GCCycle > last_young_gc_event_
const std::optional< cppgc::internal::MetricRecorder::GCCycle > ExtractLastYoungGcEvent()
void FlushBatchedIncrementalEvents()
bool FullGCMetricsReportPending() const
const std::optional< cppgc::internal::MetricRecorder::GCCycle > ExtractLastFullGcEvent()
std::optional< cppgc::internal::MetricRecorder::GCCycle > last_full_gc_event_
Isolate * GetIsolate() const
std::unique_ptr< CppMarkingState > CreateCppMarkingState()
void CollectGarbage(cppgc::internal::GCConfig) override
void StartIncrementalGarbageCollectionForTesting() final
std::unique_ptr< MinorGCHeapGrowing > minor_gc_heap_growing_
void StartDetachingIsolate()
bool IsMarkingDone() const
std::optional< cppgc::EmbedderStackState > detached_override_stack_state_
void UpdateGCCapabilitiesFromFlagsForTesting()
std::atomic< size_t > used_size_
bool IsDetachedGCAllowed() const
::heap::base::Stack * stack() final
void WriteBarrier(void *)
cppgc::internal::CollectionType CollectionType
GarbageCollectionFlags current_gc_flags_
void EnterProcessGlobalAtomicPause()
std::optional< CollectionType > collection_type_
static void InitializeOncePerProcess()
void InitializeMarking(CollectionType, std::shared_ptr<::heap::base::IncrementalMarkingSchedule > schedule={}, GarbageCollectionFlags=GarbageCollectionFlagValues::kNoFlags)
void clear_overridden_stack_state() override
bool FinishConcurrentMarkingIfNeeded()
bool TracingInitialized() const
bool ShouldFinalizeIncrementalMarking() const
bool IsGCAllowed() const override
bool CurrentThreadIsHeapThread() const final
bool force_incremental_marking_for_testing_
bool in_detached_testing_mode_
void UpdateGCCapabilitiesFromFlags()
void FinalizeIncrementalGarbageCollectionForTesting(cppgc::EmbedderStackState) final
std::unique_ptr< CppMarkingState > CreateCppMarkingStateForMutatorThread()
bool AdvanceMarking(v8::base::TimeDelta max_duration, size_t marked_bytes_limit)
void set_override_stack_state(cppgc::EmbedderStackState state) override
Isolate * isolate() const
void EnableDetachedGarbageCollectionsForTesting()
void ProcessCrossThreadWeakness()
MarkingType SelectMarkingType() const
void ReEnableConcurrentMarking()
std::optional< cppgc::EmbedderStackState > overridden_stack_state() const override
bool IsGCForbidden() const override
CrossHeapRememberedSet cross_heap_remembered_set_
size_t epoch() const override
void ReportBufferedAllocationSizeIfPossible()
static CppHeap * From(v8::CppHeap *heap)
std::unique_ptr< v8::internal::EmbedderStackStateScope > override_stack_state_scope_
void EnterFinalPause(cppgc::EmbedderStackState stack_state)
size_t last_bytes_marked() const
void CollectGarbageForTesting(CollectionType, StackState)
void AllocatedObjectSizeIncreased(size_t) final
int64_t buffered_allocated_bytes_
void StartIncrementalGarbageCollection(cppgc::internal::GCConfig) override
void FinishSweepingIfOutOfWork()
void FinishAtomicSweepingIfRunning()
void AttachIsolate(Isolate *isolate)
void AllocatedObjectSizeDecreased(size_t) final
void ResetCrossHeapRememberedSet()
void FinishSweepingIfRunning()
MetricRecorderAdapter * GetMetricRecorder() const
std::unique_ptr< cppgc::internal::Sweeper::SweepingOnMutatorThreadObserver > sweeping_on_mutator_thread_observer_
base::Flags< GarbageCollectionFlagValues > GarbageCollectionFlags
void CollectCustomSpaceStatisticsAtLastGC(std::vector< cppgc::CustomSpaceIndex >, std::unique_ptr< CustomSpaceStatisticsReceiver >)
SweepingType SelectSweepingType() const
bool is_in_v8_marking_step_
void FinishMarkingAndProcessWeakness()
size_t allocated_size_limit_for_check_
void Reset(Isolate &isolate)
void RecordEmbedderMarkingSpeed(size_t bytes, base::TimeDelta duration)
void NotifyYoungCppGCRunning()
V8_EXPORT_PRIVATE void FinalizeIncrementalMarkingAtomically(GarbageCollectionReason gc_reason)
MarkCompactCollector * mark_compact_collector()
IncrementalMarking * incremental_marking() const
V8_EXPORT_PRIVATE void CollectAllGarbage(GCFlags gc_flags, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
std::optional< StackState > overridden_stack_state() const
V8_EXPORT_PRIVATE::heap::base::Stack & stack()
static void IterateRegistersAndStackOfSimulator(::heap::base::StackVisitor *visitor)
bool was_locker_ever_used() const
TracedHandles * traced_handles()
ThreadManager * thread_manager() const
bool UseBackgroundThreadsInCycle() const
cppgc::internal::StatsCollector & stats_collector_
size_t limit_for_atomic_gc_
virtual ~MinorGCHeapGrowing()=default
void AllocatedObjectSizeDecreased(size_t) final
MinorGCHeapGrowing(cppgc::internal::StatsCollector &stats_collector)
void ResetAllocatedObjectSize(size_t allocated_object_size) final
size_t initial_heap_size_
bool LimitReached() const
void ConfigureLimit(size_t allocated_object_size)
void AllocatedObjectSizeIncreased(size_t) final
bool IsLockedByCurrentThread() const
UnifiedHeapMarker(Heap *v8_heap, cppgc::internal::HeapBase &cpp_heap, std::shared_ptr<::heap::base::IncrementalMarkingSchedule > incremental_schedule, cppgc::Platform *platform, cppgc::internal::MarkingConfig config)
UnifiedHeapMarkingState mutator_unified_heap_marking_state_
::heap::base::StackVisitor & stack_visitor() final
void ScheduleIncrementalMarkingTask() final
cppgc::internal::MutatorMarkingState & GetMutatorMarkingState()
UnifiedHeapConcurrentMarker concurrent_marker_
cppgc::Visitor & visitor() final
cppgc::internal::ConcurrentMarkerBase & concurrent_marker() final
::heap::base::IncrementalMarkingSchedule & schedule() final
MutatorUnifiedHeapMarkingVisitor marking_visitor_
UnifiedHeapMarkingState & GetMutatorUnifiedHeapMarkingState()
void AdvanceMarkingOnAllocationImpl() final
UnifiedHeapConservativeMarkingVisitor conservative_marking_visitor_
~UnifiedHeapMarker() final=default
std::shared_ptr<::heap::base::IncrementalMarkingSchedule > schedule_
UnifiedHeapConservativeMarkingVisitor & conservative_visitor() final
static V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(Isolate *isolate, const char *location, const OOMDetails &details=kNoOOMDetails)
static const ContextId Empty()
ConcurrentMarkerBase & concurrent_marker_
static constexpr v8::base::TimeDelta kTaskDelayMs
cppgc::PageAllocator * page_allocator_
std::unique_ptr< ConservativeTracedHandlesMarkingVisitor > marking_visitor_
bool is_in_detached_mode_
static constexpr v8::base::TimeDelta kStepSizeMs
CppHeap::CollectionType collection_type_
TracedHandles & traced_handles_
std::vector< cppgc::CustomSpaceIndex > custom_spaces_
LiftoffAssembler::CacheState state
Schedule const *const schedule_
constexpr size_t kPageSize
FatalOutOfMemoryHandler & GetGlobalOOMHandler()
v8::TracingController TracingController
@ kCppHeapAllocationFailure
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation trace turbo cfg trace TurboFan s graph trimmer trace TurboFan s control equivalence trace TurboFan s register allocator trace stack load store counters for optimized code in run fuzzing &&concurrent_recompilation trace_turbo trace_turbo_scheduled trace_turbo_stack_accesses verify TurboFan machine graph of code stubs enable FixedArray bounds checks print TurboFan statistics of wasm compilations maximum cumulative size of bytecode considered for inlining scale factor of bytecode size used to calculate the inlining budget * KB
V8_EXPORT_PRIVATE FlagValues v8_flags
!IsContextMap !IsContextMap native_context
@ kGCCallbackScheduleIdleGarbageCollection
MarkingWorklists marking_worklists_
#define CHECK_IMPLIES(lhs, rhs)
#define DCHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
SweepingType sweeping_type
FreeMemoryHandling free_memory_handling
#define V8_UNLIKELY(condition)