v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
cpp-heap.cc
Go to the documentation of this file.
1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <cstdint>
8#include <memory>
9#include <numeric>
10#include <optional>
11
14#include "include/v8-isolate.h"
16#include "include/v8-platform.h"
17#include "src/base/logging.h"
18#include "src/base/macros.h"
22#include "src/flags/flags.h"
23#include "src/handles/handles.h"
25#include "src/heap/base/stack.h"
49#include "src/heap/gc-tracer.h"
50#include "src/heap/heap.h"
54#include "src/init/v8.h"
56
57namespace v8 {
58
59namespace internal {
60
63 public:
65 : stats_collector_(stats_collector) {
66 stats_collector.RegisterObserver(this);
67 }
68 virtual ~MinorGCHeapGrowing() = default;
69
70 void AllocatedObjectSizeIncreased(size_t) final {}
71 void AllocatedObjectSizeDecreased(size_t) final {}
72 void ResetAllocatedObjectSize(size_t allocated_object_size) final {
73 ConfigureLimit(allocated_object_size);
74 }
75
79
80 private:
81 void ConfigureLimit(size_t allocated_object_size) {
82 // Constant growing factor for growing the heap limit.
83 static constexpr double kGrowingFactor = 1.5;
84 // For smaller heaps, allow allocating at least LAB in each regular space
85 // before triggering GC again.
86 static constexpr size_t kMinLimitIncrease =
89
90 const size_t size = std::max(allocated_object_size, initial_heap_size_);
91 limit_for_atomic_gc_ = std::max(static_cast<size_t>(size * kGrowingFactor),
92 size + kMinLimitIncrease);
93 }
94
97 size_t limit_for_atomic_gc_ = 0; // See ConfigureLimit().
98};
99
100} // namespace internal
101
102// static
103std::unique_ptr<CppHeap> CppHeap::Create(v8::Platform* platform,
104 const CppHeapCreateParams& params) {
105 return std::make_unique<internal::CppHeap>(platform, params.custom_spaces,
106 params.marking_support,
107 params.sweeping_support);
108}
109
113
117
119
125
127 std::vector<cppgc::CustomSpaceIndex> custom_spaces,
128 std::unique_ptr<CustomSpaceStatisticsReceiver> receiver) {
130 std::move(custom_spaces), std::move(receiver));
131}
132
137
140 internal::CppHeap::CollectionType::kMajor, stack_state);
141}
142
144 cppgc::EmbedderStackState stack_state) {
146 internal::CppHeap::CollectionType::kMinor, stack_state);
147}
148
149namespace internal {
150
151namespace {
152
153class CppgcPlatformAdapter final : public cppgc::Platform {
154 public:
155 explicit CppgcPlatformAdapter(v8::Platform* platform)
156 : platform_(platform),
157 page_allocator_(platform->GetPageAllocator()
158 ? platform->GetPageAllocator()
159 : &cppgc::internal::GetGlobalPageAllocator()) {}
160
161 CppgcPlatformAdapter(const CppgcPlatformAdapter&) = delete;
162 CppgcPlatformAdapter& operator=(const CppgcPlatformAdapter&) = delete;
163
164 PageAllocator* GetPageAllocator() final { return page_allocator_; }
165
166 double MonotonicallyIncreasingTime() final {
168 }
169
170 std::shared_ptr<TaskRunner> GetForegroundTaskRunner(
171 TaskPriority priority) final {
172 // If no Isolate has been set, there's no task runner to leverage for
173 // foreground tasks. In detached mode the original platform handles the
174 // task runner retrieval.
175 if (!isolate_ && !is_in_detached_mode_) return nullptr;
176
177 return platform_->GetForegroundTaskRunner(isolate_, priority);
178 }
179
180 std::unique_ptr<JobHandle> PostJob(TaskPriority priority,
181 std::unique_ptr<JobTask> job_task) final {
182 return platform_->PostJob(priority, std::move(job_task));
183 }
184
185 TracingController* GetTracingController() override {
187 }
188
189 void SetIsolate(v8::Isolate* isolate) { isolate_ = isolate; }
190 void EnableDetachedModeForTesting() { is_in_detached_mode_ = true; }
191
192 private:
197};
198
199class UnifiedHeapConcurrentMarker
201 public:
202 UnifiedHeapConcurrentMarker(
203 cppgc::internal::HeapBase& heap, Heap* v8_heap,
204 cppgc::internal::MarkingWorklists& marking_worklists,
205 ::heap::base::IncrementalMarkingSchedule& incremental_marking_schedule,
206 cppgc::Platform* platform,
207 UnifiedHeapMarkingState& unified_heap_marking_state,
208 CppHeap::CollectionType collection_type)
210 heap, marking_worklists, incremental_marking_schedule, platform),
211 v8_heap_(v8_heap),
212 collection_type_(collection_type) {}
213
214 std::unique_ptr<cppgc::Visitor> CreateConcurrentMarkingVisitor(
216
217 private:
218 Heap* const v8_heap_;
220};
221
222std::unique_ptr<cppgc::Visitor>
223UnifiedHeapConcurrentMarker::CreateConcurrentMarkingVisitor(
224 cppgc::internal::ConcurrentMarkingState& marking_state) const {
225 return std::make_unique<ConcurrentUnifiedHeapMarkingVisitor>(
226 heap(), v8_heap_, marking_state, collection_type_);
227}
228
229void FatalOutOfMemoryHandlerImpl(const std::string& reason,
230 const SourceLocation&, HeapBase* heap) {
231 auto* cpp_heap = static_cast<v8::internal::CppHeap*>(heap);
232 auto* isolate = cpp_heap->isolate();
233 DCHECK_NOT_NULL(isolate);
234 if (v8_flags.heap_snapshot_on_oom) {
236 cpp_heap->AsBase());
237 isolate->heap()->heap_profiler()->WriteSnapshotToDiskAfterGC(
239 }
240 V8::FatalProcessOutOfMemory(isolate, reason.c_str());
241}
242
243void GlobalFatalOutOfMemoryHandlerImpl(const std::string& reason,
244 const SourceLocation&, HeapBase* heap) {
245 V8::FatalProcessOutOfMemory(nullptr, reason.c_str());
246}
247
248class UnifiedHeapConservativeMarkingVisitor final
250 public:
251 UnifiedHeapConservativeMarkingVisitor(
252 HeapBase& heap, MutatorMarkingState& mutator_marking_state,
253 cppgc::Visitor& visitor)
254 : ConservativeMarkingVisitor(heap, mutator_marking_state, visitor) {}
255 ~UnifiedHeapConservativeMarkingVisitor() override = default;
256
257 void SetConservativeTracedHandlesMarkingVisitor(
258 std::unique_ptr<ConservativeTracedHandlesMarkingVisitor>
259 global_handle_marking_visitor) {
260 marking_visitor_ = std::move(global_handle_marking_visitor);
261 }
262
263 void TraceConservativelyIfNeeded(const void* address) override {
264 ConservativeMarkingVisitor::TraceConservativelyIfNeeded(address);
265 if (marking_visitor_) {
266 marking_visitor_->VisitPointer(address);
267 }
268 }
269
270 private:
271 std::unique_ptr<ConservativeTracedHandlesMarkingVisitor> marking_visitor_;
272};
273
274} // namespace
275
277 public:
279 std::shared_ptr<::heap::base::IncrementalMarkingSchedule>
280 incremental_schedule,
281 cppgc::Platform* platform,
283
284 ~UnifiedHeapMarker() final = default;
285
286 cppgc::internal::MarkingWorklists& GetMarkingWorklists() {
287 return marking_worklists_;
288 }
289
294
296 return mutator_unified_heap_marking_state_;
297 }
298
299 UnifiedHeapConservativeMarkingVisitor& conservative_visitor() final {
300 return conservative_marking_visitor_;
301 }
302
304 if (v8_flags.incremental_marking_unified_schedule) {
305 // Not using stand-alone task from CppHeap.
306 return;
307 }
308 MarkerBase::ScheduleIncrementalMarkingTask();
309 }
310
312 if (v8_flags.incremental_marking_unified_schedule) {
313 mutator_unified_heap_marking_state_.heap()
314 ->incremental_marking()
315 ->AdvanceOnAllocation();
316 return;
317 }
318 MarkerBase::AdvanceMarkingOnAllocationImpl();
319 }
320
321 protected:
323
325 return conservative_marking_visitor_;
326 }
327
331
335
336 private:
339 UnifiedHeapConservativeMarkingVisitor conservative_marking_visitor_;
340 std::shared_ptr<::heap::base::IncrementalMarkingSchedule> schedule_;
341 UnifiedHeapConcurrentMarker concurrent_marker_;
342};
343
346 std::shared_ptr<::heap::base::IncrementalMarkingSchedule>
347 incremental_schedule,
349 : cppgc::internal::MarkerBase(heap, platform, config),
350 mutator_unified_heap_marking_state_(v8_heap, nullptr,
351 config.collection_type),
352 marking_visitor_(heap, mutator_marking_state_,
353 mutator_unified_heap_marking_state_),
354 conservative_marking_visitor_(heap, mutator_marking_state_,
356 schedule_(std::move(incremental_schedule)),
358 platform_, mutator_unified_heap_marking_state_,
359 config.collection_type) {
361}
362
364 const GCCycle& cppgc_event) {
365 auto* tracer = GetIsolate()->heap()->tracer();
366 if (cppgc_event.type == MetricRecorder::GCCycle::Type::kMinor) {
368 last_young_gc_event_ = cppgc_event;
369 tracer->NotifyYoungCppGCCompleted();
370 } else {
372 last_full_gc_event_ = cppgc_event;
373 tracer->NotifyFullCppGCCompleted();
374 }
375}
376
378 const MainThreadIncrementalMark& cppgc_event) {
379 // Incremental marking steps might be nested in V8 marking steps. In such
380 // cases, stash the relevant values and delegate to V8 to report them. For
381 // non-nested steps, report to the Recorder directly.
382 if (cpp_heap_.is_in_v8_marking_step_) {
383 last_incremental_mark_event_ = cppgc_event;
384 return;
385 }
386 // This is a standalone incremental marking step.
387 const std::shared_ptr<metrics::Recorder>& recorder =
388 GetIsolate()->metrics_recorder();
389 DCHECK_NOT_NULL(recorder);
390 if (!recorder->HasEmbedderRecorder()) return;
391 incremental_mark_batched_events_.events.emplace_back();
392 incremental_mark_batched_events_.events.back().cpp_wall_clock_duration_in_us =
393 cppgc_event.duration_us;
394 if (incremental_mark_batched_events_.events.size() == kMaxBatchedEvents) {
395 recorder->AddMainThreadEvent(std::move(incremental_mark_batched_events_),
396 GetContextId());
397 incremental_mark_batched_events_ = {};
398 }
399}
400
402 const MainThreadIncrementalSweep& cppgc_event) {
403 // Incremental sweeping steps are never nested inside V8 sweeping steps, so
404 // report to the Recorder directly.
405 const std::shared_ptr<metrics::Recorder>& recorder =
406 GetIsolate()->metrics_recorder();
407 DCHECK_NOT_NULL(recorder);
408 if (!recorder->HasEmbedderRecorder()) return;
409 incremental_sweep_batched_events_.events.emplace_back();
410 incremental_sweep_batched_events_.events.back()
411 .cpp_wall_clock_duration_in_us = cppgc_event.duration_us;
412 if (incremental_sweep_batched_events_.events.size() == kMaxBatchedEvents) {
413 recorder->AddMainThreadEvent(std::move(incremental_sweep_batched_events_),
414 GetContextId());
415 incremental_sweep_batched_events_ = {};
416 }
417}
418
420 const std::shared_ptr<metrics::Recorder>& recorder =
421 GetIsolate()->metrics_recorder();
422 DCHECK_NOT_NULL(recorder);
423 if (!incremental_mark_batched_events_.events.empty()) {
424 recorder->AddMainThreadEvent(std::move(incremental_mark_batched_events_),
425 GetContextId());
426 incremental_mark_batched_events_ = {};
427 }
428 if (!incremental_sweep_batched_events_.events.empty()) {
429 recorder->AddMainThreadEvent(std::move(incremental_sweep_batched_events_),
430 GetContextId());
431 incremental_sweep_batched_events_ = {};
432 }
433}
434
436 return last_full_gc_event_.has_value();
437}
438
440 return last_young_gc_event_.has_value();
441}
442
443const std::optional<cppgc::internal::MetricRecorder::GCCycle>
445 auto res = std::move(last_full_gc_event_);
446 last_full_gc_event_.reset();
447 return res;
448}
449
450const std::optional<cppgc::internal::MetricRecorder::GCCycle>
452 auto res = std::move(last_young_gc_event_);
453 last_young_gc_event_.reset();
454 return res;
455}
456
457const std::optional<cppgc::internal::MetricRecorder::MainThreadIncrementalMark>
459 auto res = std::move(last_incremental_mark_event_);
460 last_incremental_mark_event_.reset();
461 return res;
462}
463
465 incremental_mark_batched_events_.events.clear();
466 incremental_sweep_batched_events_.events.clear();
467 last_incremental_mark_event_.reset();
468 last_full_gc_event_.reset();
469 last_young_gc_event_.reset();
470}
471
473 DCHECK_NOT_NULL(cpp_heap_.isolate());
474 return reinterpret_cast<Isolate*>(cpp_heap_.isolate());
475}
476
478 const {
479 DCHECK_NOT_NULL(GetIsolate());
480 if (GetIsolate()->context().is_null())
482 HandleScope scope(GetIsolate());
483 return GetIsolate()->GetOrRegisterRecorderContextId(
484 GetIsolate()->native_context());
485}
486
487// static
490 &GlobalFatalOutOfMemoryHandlerImpl);
491}
492
495 const std::vector<std::unique_ptr<cppgc::CustomSpaceBase>>& custom_spaces,
499 std::make_shared<CppgcPlatformAdapter>(platform), custom_spaces,
501 kSupportsConservativeStackScan,
504 std::make_unique<MinorGCHeapGrowing>(*stats_collector())),
506 // Enter no GC scope. `AttachIsolate()` removes this and allows triggering
507 // garbage collections.
508 no_gc_scope_++;
510#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
511 object_allocator().UpdateAllocationTimeout();
512#endif // V8_ENABLE_ALLOCATION_TIMEOUT
513}
514
518
520 // TODO(ahaas): Remove `already_terminated_` once the V8 API
521 // CppHeap::Terminate has been removed.
522 if (already_terminated_) return;
523 already_terminated_ = true;
524 // Must not be attached to a heap when invoking termination GCs.
525 CHECK(!isolate_);
526 // Gracefully terminate the C++ heap invoking destructors.
528}
529
530namespace {
531
532class SweepingOnMutatorThreadForGlobalHandlesScope final {
533 public:
534 explicit SweepingOnMutatorThreadForGlobalHandlesScope(
535 TracedHandles& traced_handles)
536 : traced_handles_(traced_handles) {
537 traced_handles_.SetIsSweepingOnMutatorThread(true);
538 }
539 ~SweepingOnMutatorThreadForGlobalHandlesScope() {
540 traced_handles_.SetIsSweepingOnMutatorThread(false);
541 }
542
543 TracedHandles& traced_handles_;
544};
545
546class SweepingOnMutatorThreadForGlobalHandlesObserver final
548 public:
549 SweepingOnMutatorThreadForGlobalHandlesObserver(CppHeap& cpp_heap,
550 TracedHandles& traced_handles)
552 cpp_heap.sweeper()),
553 traced_handles_(traced_handles) {}
554
555 void Start() override { traced_handles_.SetIsSweepingOnMutatorThread(true); }
556
557 void End() override { traced_handles_.SetIsSweepingOnMutatorThread(false); }
558
559 private:
560 TracedHandles& traced_handles_;
561};
562
563class MoveListenerImpl final : public HeapProfilerNativeMoveListener,
565 public:
566 MoveListenerImpl(HeapProfiler* profiler, CppHeap* heap)
567 : HeapProfilerNativeMoveListener(profiler), heap_(heap) {}
568 ~MoveListenerImpl() {
569 if (active_) {
570 heap_->UnregisterMoveListener(this);
571 }
572 }
573
574 // HeapProfilerNativeMoveListener implementation:
575 void StartListening() override {
576 if (active_) return;
577 active_ = true;
578 heap_->RegisterMoveListener(this);
579 }
580 void StopListening() override {
581 if (!active_) return;
582 active_ = false;
583 heap_->UnregisterMoveListener(this);
584 }
585
586 // cppgc::internal::MoveListener implementation:
587 void OnMove(uint8_t* from, uint8_t* to,
588 size_t size_including_header) override {
589 ObjectMoveEvent(reinterpret_cast<Address>(from),
590 reinterpret_cast<Address>(to),
591 static_cast<int>(size_including_header));
592 }
593
594 private:
595 CppHeap* heap_;
596 bool active_ = false;
597};
598
599} // namespace
600
602 // Since a new isolate is attached, we are also allowed to detach it again.
603 is_detached_ = false;
607 heap_ = isolate->heap();
610 static_cast<CppgcPlatformAdapter*>(platform())
611 ->SetIsolate(reinterpret_cast<v8::Isolate*>(isolate_));
612 if (auto* heap_profiler = heap()->heap_profiler()) {
613 heap_profiler->AddBuildEmbedderGraphCallback(&CppGraphBuilder::Run, this);
614 heap_profiler->set_native_move_listener(
615 std::make_unique<MoveListenerImpl>(heap_profiler, this));
616 }
617 SetMetricRecorder(std::make_unique<MetricRecorderAdapter>(*this));
618 oom_handler().SetCustomHandler(&FatalOutOfMemoryHandlerImpl);
621 std::make_unique<SweepingOnMutatorThreadForGlobalHandlesObserver>(
622 *this, *isolate_->traced_handles());
623 no_gc_scope_--;
624
625 // Propagate overridden stack state to the attached heap, if necessary.
626 // TODO(b/326503098): This should not be required, to be removed when the
627 // issue is resolved.
630 override_stack_state_scope_ = std::make_unique<EmbedderStackStateScope>(
634 }
635}
636
639 is_detached_ = true;
640 // TODO(chromium:1056170): Investigate whether this can be enforced with a
641 // CHECK across all relevant embedders and setups.
642 if (!isolate_) return;
643
644 // Store the last thread that owned the isolate, as it is the thread CppHeap
645 // should also get terminated with.
647
648 // Finish any ongoing garbage collection.
651 i::GarbageCollectionReason::kExternalFinalize);
652 }
654}
655
658
659 if (auto* heap_profiler = heap()->heap_profiler()) {
660 heap_profiler->RemoveBuildEmbedderGraphCallback(&CppGraphBuilder::Run,
661 this);
662 heap_profiler->set_native_move_listener(nullptr);
663 }
664 SetMetricRecorder(nullptr);
665
666 // Propagate overridden stack state from the attached heap, if necessary.
667 // TODO(b/326503098): This should not be required, to be removed when the
668 // issue is resolved.
673 }
674 isolate_ = nullptr;
675 heap_ = nullptr;
676 // Any future garbage collections will ignore the V8->C++ references.
677 oom_handler().SetCustomHandler(nullptr);
678 // Enter no GC scope.
679 no_gc_scope_++;
680}
681
685
686namespace {
687
688bool IsMemoryReducingGC(CppHeap::GarbageCollectionFlags flags) {
690}
691
692bool IsForceGC(CppHeap::GarbageCollectionFlags flags) {
694}
695
696bool ShouldReduceMemory(CppHeap::GarbageCollectionFlags flags) {
697 return IsMemoryReducingGC(flags) || IsForceGC(flags);
698}
699
700constexpr size_t kIncrementalMarkingCheckInterval = 128 * KB;
701
702} // namespace
703
705 // For now, force atomic marking for minor collections.
706 if (*collection_type_ == CollectionType::kMinor) return MarkingType::kAtomic;
707
709 return MarkingType::kAtomic;
710
711 const MarkingType marking_type = marking_support();
712
713 // CollectionType is major at this point. Check the surrounding
714 // MarkCompactCollector for whether we should rely on background threads in
715 // this GC cycle.
716 if (marking_type == MarkingType::kIncrementalAndConcurrent && heap_ &&
718 return MarkingType::kIncremental;
719 }
720
721 return marking_support();
722}
723
725 if (IsForceGC(current_gc_flags_)) return SweepingType::kAtomic;
726
727 return sweeping_support();
728}
729
731 CHECK_IMPLIES(v8_flags.cppheap_concurrent_marking,
732 v8_flags.cppheap_incremental_marking);
733 if (v8_flags.cppheap_concurrent_marking) {
734 marking_support_ = static_cast<MarkingType>(
735 std::min(marking_support_, MarkingType::kIncrementalAndConcurrent));
736 } else if (v8_flags.cppheap_incremental_marking) {
737 marking_support_ = static_cast<MarkingType>(
738 std::min(marking_support_, MarkingType::kIncremental));
739 } else {
740 marking_support_ = MarkingType::kAtomic;
741 }
742
743 sweeping_support_ = v8_flags.single_threaded_gc
744 ? CppHeap::SweepingType::kIncremental
745 : CppHeap::SweepingType::kIncrementalAndConcurrent;
746
747 page_backend_->page_pool().SetDecommitPooledPages(
748 v8_flags.decommit_pooled_pages);
749}
750
752 CollectionType collection_type,
753 std::shared_ptr<::heap::base::IncrementalMarkingSchedule> schedule,
754 GarbageCollectionFlags gc_flags) {
756
757 if (collection_type == CollectionType::kMinor) {
758 if (!generational_gc_supported()) return;
759 // Notify GC tracer that CppGC started young GC cycle.
761 }
762
763 collection_type_ = collection_type;
764
766
767 // Check that previous cycle metrics for the same collection type have been
768 // reported.
769 if (GetMetricRecorder()) {
770 if (collection_type == CollectionType::kMajor)
771 DCHECK(!GetMetricRecorder()->FullGCMetricsReportPending());
772 else
773 DCHECK(!GetMetricRecorder()->YoungGCMetricsReportPending());
774 }
775
776#if defined(CPPGC_YOUNG_GENERATION)
778 *collection_type_ == CollectionType::kMajor) {
781 stats_collector(), cppgc::internal::StatsCollector::kUnmark);
783 }
784#endif // defined(CPPGC_YOUNG_GENERATION)
785
787 if (heap()->is_current_gc_forced()) {
789 }
790 if (heap()->ShouldReduceMemory()) {
792 }
793 }
794 current_gc_flags_ = gc_flags;
795
796 const cppgc::internal::MarkingConfig marking_config{
797 *collection_type_, StackState::kNoHeapPointers, SelectMarkingType(),
798 IsForceGC(current_gc_flags_)
802 (MarkingType::kAtomic == marking_config.marking_type) ||
804 if (ShouldReduceMemory(current_gc_flags_)) {
805 // Only enable compaction when in a memory reduction garbage collection as
806 // it may significantly increase the final garbage collection pause.
807 compactor_.InitializeIfShouldCompact(marking_config.marking_type,
808 marking_config.stack_state);
809 }
810
811 if (!schedule) {
812 schedule =
814 }
815
816 marker_ = std::make_unique<UnifiedHeapMarker>(
817 isolate_ ? isolate()->heap() : nullptr, AsBase(), schedule,
818 platform_.get(), marking_config);
819}
820
821namespace {
822MarkingWorklists::Local* GetV8MarkingWorklists(
823 Isolate* isolate, cppgc::internal::CollectionType collection_type) {
824 auto* heap = isolate->heap();
825 if (collection_type == cppgc::internal::CollectionType::kMajor) {
826 return heap->mark_compact_collector()->local_marking_worklists();
827 } else {
828 return heap->minor_mark_sweep_collector()->local_marking_worklists();
829 }
830}
831} // namespace
832
835 if (!TracingInitialized()) return;
836 if (isolate_) {
837 // Reuse the same local worklist for the mutator marking state which results
838 // in directly processing the objects by the JS logic. Also avoids
839 // publishing local objects.
840 marker_->To<UnifiedHeapMarker>().GetMutatorUnifiedHeapMarkingState().Update(
841 GetV8MarkingWorklists(isolate_, *collection_type_));
842 }
843 marker_->StartMarking();
844 marking_done_ = false;
845}
846
849 return TracingInitialized() ? marker_->last_bytes_marked() : 0;
850}
851
853 size_t marked_bytes_limit) {
854 if (!TracingInitialized()) {
855 return true;
856 }
860 in_atomic_pause_ ? cppgc::internal::StatsCollector::kAtomicMark
861 : cppgc::internal::StatsCollector::kIncrementalMark);
863 if (in_atomic_pause_) {
864 marker_->NotifyConcurrentMarkingOfWorkIfNeeded(
865 cppgc::TaskPriority::kUserBlocking);
866 }
868 marker_->AdvanceMarkingWithLimits(max_duration, marked_bytes_limit);
871 return marking_done_;
872}
873
876}
877
881
889
892 // Enter atomic pause even if tracing is not initialized. This is needed to
893 // make sure that we always enable young generation from the atomic pause.
894 in_atomic_pause_ = true;
895 if (!TracingInitialized()) return;
897 // Scan global handles conservatively in case we are attached to an Isolate.
898 // TODO(1029379): Support global handle marking visitors with minor GC.
899 if (isolate_) {
900 auto& heap = *isolate()->heap();
901 marker.conservative_visitor().SetConservativeTracedHandlesMarkingVisitor(
902 std::make_unique<ConservativeTracedHandlesMarkingVisitor>(
903 heap, *GetV8MarkingWorklists(isolate_, *collection_type_),
905 }
906 marker.EnterAtomicPause(stack_state);
907 compactor_.CancelIfShouldNotCompact(MarkingType::kAtomic, stack_state);
908}
909
911 if (!TracingInitialized()) return true;
912 return marker_->JoinConcurrentMarkingIfNeeded();
913}
914
917 marker_->ReEnableConcurrentMarking();
918}
919
920void CppHeap::WriteBarrier(void* object) {
922 marker_->WriteBarrierForObject<
924}
925
926namespace {
927
928void RecordEmbedderMarkingSpeed(GCTracer* tracer, base::TimeDelta marking_time,
929 size_t marked_bytes) {
930 tracer->RecordEmbedderMarkingSpeed(marked_bytes, marking_time);
931}
932
933} // namespace
934
938
939 if (!TracingInitialized()) {
940 return;
941 }
942
943 marker_->ProcessCrossThreadWeaknessIfNeeded();
944}
945
949
950#if defined(CPPGC_YOUNG_GENERATION)
951 // Check if the young generation was enabled via flag. We must enable young
952 // generation before calling the custom weak callbacks to make sure that the
953 // callbacks for old objects are registered in the remembered set.
954 if (v8_flags.cppgc_young_generation) {
955 EnableGenerationalGC();
956 }
957#endif // defined(CPPGC_YOUNG_GENERATION)
958
959 if (!TracingInitialized()) {
960 in_atomic_pause_ = false;
961 return;
962 }
963
964 {
965 cppgc::subtle::DisallowGarbageCollectionScope disallow_gc_scope(*this);
966 marker_->LeaveAtomicPause();
967 }
968 marker_.reset();
969
970 if (isolate_) {
971 // The size is used for recomputing the global heap limit.
972 used_size_ = stats_collector_->marked_bytes();
973 // Force a check next time increased memory is reported. This allows for
974 // setting limits close to actual heap sizes.
976
977 RecordEmbedderMarkingSpeed(isolate_->heap()->tracer(),
978 stats_collector_->marking_time(), used_size_);
979 }
980}
981
983 if (!TracingInitialized()) {
984 return;
985 }
986
987 // The allocated bytes counter in v8 was reset to the current marked bytes, so
988 // any pending allocated bytes updates should be discarded.
990 const size_t bytes_allocated_in_prefinalizers = ExecutePreFinalizers();
991#if CPPGC_VERIFY_HEAP
993 verifier.Run(stack_state_of_prev_gc(),
994 stats_collector()->marked_bytes_on_current_cycle() +
995 bytes_allocated_in_prefinalizers);
996#endif // CPPGC_VERIFY_HEAP
997 USE(bytes_allocated_in_prefinalizers);
998
999#if defined(CPPGC_YOUNG_GENERATION)
1000 ResetRememberedSet();
1001 // We can reset the remembered set on each GC because surviving Oilpan objects
1002 // are immediately considered old.
1004#endif // defined(CPPGC_YOUNG_GENERATION)
1005
1006 {
1009 compactable_space_handling;
1010 {
1011 std::optional<SweepingOnMutatorThreadForGlobalHandlesScope>
1012 global_handles_scope;
1013 if (isolate_) {
1014 global_handles_scope.emplace(*isolate_->traced_handles());
1015 }
1016 compactable_space_handling = compactor_.CompactSpacesIfEnabled();
1017 }
1018 const cppgc::internal::SweepingConfig sweeping_config{
1019 SelectSweepingType(), compactable_space_handling,
1020 ShouldReduceMemory(current_gc_flags_)
1021 ? cppgc::internal::SweepingConfig::FreeMemoryHandling::
1022 kDiscardWherePossible
1023 : cppgc::internal::SweepingConfig::FreeMemoryHandling::
1024 kDoNotDiscard};
1026 SweepingType::kAtomic == sweeping_config.sweeping_type);
1027 sweeper().Start(sweeping_config);
1028 }
1029
1030 in_atomic_pause_ = false;
1031 collection_type_.reset();
1032}
1033
1035 buffered_allocated_bytes_ += static_cast<int64_t>(bytes);
1037}
1038
1040 buffered_allocated_bytes_ -= static_cast<int64_t>(bytes);
1042}
1043
1045 // Reporting memory to V8 may trigger GC.
1046 if (!IsGCAllowed()) {
1047 return;
1048 }
1049
1050 // We are in attached state.
1052
1053 // The calls below may trigger full GCs that are synchronous and also execute
1054 // epilogue callbacks. Since such callbacks may allocate, the counter must
1055 // already be zeroed by that time.
1056 const int64_t bytes_to_report = buffered_allocated_bytes_;
1058
1059 if (bytes_to_report < 0) {
1060 DCHECK_GE(used_size_.load(std::memory_order_relaxed), bytes_to_report);
1061 used_size_.fetch_sub(static_cast<size_t>(-bytes_to_report),
1062 std::memory_order_relaxed);
1063 } else {
1064 used_size_.fetch_add(static_cast<size_t>(bytes_to_report),
1065 std::memory_order_relaxed);
1066 allocated_size_ += bytes_to_report;
1067
1068 if (v8_flags.incremental_marking) {
1070 Heap* heap = isolate_->heap();
1071 heap->StartIncrementalMarkingIfAllocationLimitIsReached(
1072 heap->main_thread_local_heap(),
1073 heap->GCFlagsForIncrementalMarking(),
1075 if (heap->incremental_marking()->IsMajorMarking()) {
1076 if (heap->AllocationLimitOvershotByLargeMargin()) {
1077 heap->FinalizeIncrementalMarkingAtomically(
1078 i::GarbageCollectionReason::kExternalFinalize);
1079 } else {
1080 heap->incremental_marking()->AdvanceOnAllocation();
1081 }
1082 }
1084 allocated_size_ + kIncrementalMarkingCheckInterval;
1085 }
1086 }
1087 }
1088}
1089
1091 StackState stack_state) {
1092 if (!IsDetachedGCAllowed()) {
1093 return;
1094 }
1095
1096 // Finish sweeping in case it is still running.
1098
1099 if (isolate_) {
1100 reinterpret_cast<v8::Isolate*>(isolate_)
1101 ->RequestGarbageCollectionForTesting(
1103 return;
1104 }
1105
1106 stack()->SetMarkerIfNeededAndCallback([this, collection_type, stack_state]() {
1107 // Perform an atomic GC, with starting incremental/concurrent marking and
1108 // immediately finalizing the garbage collection.
1109 if (!IsMarking()) {
1110 InitializeMarking(collection_type, nullptr,
1112 StartMarking();
1113 }
1114 EnterFinalPause(stack_state);
1119 }
1123 });
1124}
1125
1129 no_gc_scope_--;
1131 static_cast<CppgcPlatformAdapter*>(platform())
1132 ->EnableDetachedModeForTesting();
1133}
1134
1145
1147 cppgc::EmbedderStackState stack_state) {
1150 DCHECK(IsMarking());
1151 if (IsMarking()) {
1152 CollectGarbageForTesting(CollectionType::kMajor, stack_state);
1153 }
1155}
1156
1157namespace {
1158
1159void ReportCustomSpaceStatistics(
1160 cppgc::internal::RawHeap& raw_heap,
1161 std::vector<cppgc::CustomSpaceIndex> custom_spaces,
1162 std::unique_ptr<CustomSpaceStatisticsReceiver> receiver) {
1163 for (auto custom_space_index : custom_spaces) {
1164 const cppgc::internal::BaseSpace* space =
1165 raw_heap.CustomSpace(custom_space_index);
1166 size_t allocated_bytes = std::accumulate(
1167 space->begin(), space->end(), 0, [](size_t sum, auto* page) {
1168 return sum + page->AllocatedBytesAtLastGC();
1169 });
1170 receiver->AllocatedBytes(custom_space_index, allocated_bytes);
1171 }
1172}
1173
1174class CollectCustomSpaceStatisticsAtLastGCTask final : public v8::Task {
1175 public:
1178
1179 CollectCustomSpaceStatisticsAtLastGCTask(
1181 std::vector<cppgc::CustomSpaceIndex> custom_spaces,
1182 std::unique_ptr<CustomSpaceStatisticsReceiver> receiver)
1183 : heap_(heap),
1184 custom_spaces_(std::move(custom_spaces)),
1185 receiver_(std::move(receiver)) {}
1186
1187 void Run() final {
1188 cppgc::internal::Sweeper& sweeper = heap_.sweeper();
1189 if (sweeper.PerformSweepOnMutatorThread(
1190 kStepSizeMs,
1191 cppgc::internal::StatsCollector::kSweepInTaskForStatistics)) {
1192 // Sweeping is done.
1193 DCHECK(!sweeper.IsSweepingInProgress());
1194 ReportCustomSpaceStatistics(heap_.raw_heap(), std::move(custom_spaces_),
1195 std::move(receiver_));
1196 } else {
1197 heap_.platform()->GetForegroundTaskRunner()->PostDelayedTask(
1198 std::make_unique<CollectCustomSpaceStatisticsAtLastGCTask>(
1199 heap_, std::move(custom_spaces_), std::move(receiver_)),
1201 }
1202 }
1203
1204 private:
1207
1209 std::vector<cppgc::CustomSpaceIndex> custom_spaces_;
1210 std::unique_ptr<CustomSpaceStatisticsReceiver> receiver_;
1211};
1212
1213constexpr v8::base::TimeDelta
1214 CollectCustomSpaceStatisticsAtLastGCTask::kTaskDelayMs;
1215constexpr v8::base::TimeDelta
1216 CollectCustomSpaceStatisticsAtLastGCTask::kStepSizeMs;
1217
1218} // namespace
1219
1221 std::vector<cppgc::CustomSpaceIndex> custom_spaces,
1222 std::unique_ptr<CustomSpaceStatisticsReceiver> receiver) {
1223 if (sweeper().IsSweepingInProgress()) {
1224 platform()->GetForegroundTaskRunner()->PostDelayedTask(
1225 std::make_unique<CollectCustomSpaceStatisticsAtLastGCTask>(
1226 AsBase(), std::move(custom_spaces), std::move(receiver)),
1227 CollectCustomSpaceStatisticsAtLastGCTask::kTaskDelayMs.InSecondsF());
1228 return;
1229 }
1230 ReportCustomSpaceStatistics(raw_heap(), std::move(custom_spaces),
1231 std::move(receiver));
1232}
1233
1235 return static_cast<MetricRecorderAdapter*>(
1236 stats_collector_->GetMetricRecorder());
1237}
1238
1241 if (isolate_ && ShouldReduceMemory(current_gc_flags_)) {
1243 }
1244}
1245
1247 // Young generation GCs are optional and as such sweeping is not necessarily
1248 // running.
1250 SelectSweepingType() == SweepingType::kAtomic) {
1252 }
1253}
1254
1256
1257std::unique_ptr<CppMarkingState> CppHeap::CreateCppMarkingState() {
1258 if (!TracingInitialized()) return {};
1259 DCHECK(IsMarking());
1260 return std::make_unique<CppMarkingState>(
1261 std::make_unique<cppgc::internal::MarkingStateBase>(
1262 AsBase(), marker()->To<UnifiedHeapMarker>().GetMarkingWorklists()));
1263}
1264
1265std::unique_ptr<CppMarkingState>
1267 if (!TracingInitialized()) return {};
1268 DCHECK(IsMarking());
1269 return std::make_unique<CppMarkingState>(
1270 marker()->To<UnifiedHeapMarker>().GetMutatorMarkingState());
1271}
1272
1274 if (!IsGCAllowed()) {
1275 return;
1276 }
1277 // TODO(mlippautz): Respect full config.
1278 const auto flags =
1279 (config.free_memory_handling ==
1280 cppgc::internal::GCConfig::FreeMemoryHandling::kDiscardWherePossible)
1286 config.sweeping_type == cppgc::internal::GCConfig::SweepingType::kAtomic,
1288}
1289
1290std::optional<cppgc::EmbedderStackState> CppHeap::overridden_stack_state()
1291 const {
1294}
1295
1306
1318
1322
1323size_t CppHeap::epoch() const { UNIMPLEMENTED(); }
1324
1325#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
1326std::optional<int> CppHeap::UpdateAllocationTimeout() {
1327 if (!v8_flags.cppgc_random_gc_interval) {
1328 return std::nullopt;
1329 }
1330 if (!allocation_timeout_rng_) {
1331 allocation_timeout_rng_.emplace(v8_flags.fuzzer_random_seed);
1332 }
1333 return allocation_timeout_rng_->NextInt(v8_flags.cppgc_random_gc_interval) +
1334 1;
1335}
1336#endif // V8_ENABLE_ALLOCATION_TIMEOUT
1337
1346
1350
1354
1356 return isolate_ && HeapBase::IsGCAllowed();
1357}
1358
1360 return (isolate_ && isolate_->InFastCCall() &&
1361 !v8_flags.allow_allocation_in_fast_api_call) ||
1363}
1364
1366 if (!is_detached_ && isolate_ &&
1368 // If v8::Locker has been used, we only check if the isolate is now locked
1369 // by the current thread.
1371 }
1373}
1374
1375} // namespace internal
1376} // namespace v8
Schedule * schedule
Isolate * isolate_
MarkingType
Definition heap.h:60
StackSupport
Definition heap.h:45
SweepingType
Definition heap.h:80
virtual std::shared_ptr< TaskRunner > GetForegroundTaskRunner()
Definition platform.h:54
CompactableSpaceHandling CompactSpacesIfEnabled()
Definition compactor.cc:501
void InitializeIfShouldCompact(GCConfig::MarkingType, StackState)
Definition compactor.cc:481
void CancelIfShouldNotCompact(GCConfig::MarkingType, StackState)
Definition compactor.cc:493
SweepingType sweeping_support_
Definition heap-base.h:338
std::unique_ptr< StatsCollector > stats_collector_
Definition heap-base.h:307
std::unique_ptr< heap::base::Stack > stack_
Definition heap-base.h:308
SweepingType sweeping_support() const
Definition heap-base.h:212
EmbedderStackState stack_state_of_prev_gc() const
Definition heap-base.h:192
HeapStatistics CollectStatistics(HeapStatistics::DetailLevel)
Definition heap-base.cc:299
std::shared_ptr< cppgc::Platform > platform_
Definition heap-base.h:295
bool incremental_marking_supported() const
Definition heap-base.h:214
virtual bool CurrentThreadIsHeapThread() const
Definition heap-base.cc:351
bool generational_gc_supported() const
Definition heap-base.h:218
std::unique_ptr< PageBackend > page_backend_
Definition heap-base.h:302
StatsCollector * stats_collector()
Definition heap-base.h:118
MarkingType marking_support() const
Definition heap-base.h:211
FatalOutOfMemoryHandler & oom_handler()
Definition heap-base.h:110
virtual bool IsGCForbidden() const
Definition heap-base.cc:343
virtual bool IsGCAllowed() const
Definition heap-base.cc:345
std::unique_ptr< MarkerBase > marker_
Definition heap-base.h:310
MarkingType marking_support_
Definition heap-base.h:337
MarkerBase * marker() const
Definition heap-base.h:130
void SetMetricRecorder(std::unique_ptr< MetricRecorder > histogram_recorder)
Definition heap-base.h:205
cppgc::Platform * platform()
Definition heap-base.h:107
virtual heap::base::Stack * stack()
Definition heap-base.h:174
ObjectAllocator & object_allocator()
Definition heap-base.h:135
bool in_no_gc_scope() const
Definition heap-base.h:276
static HeapObjectHeader & FromObject(void *address)
virtual ConservativeTracingVisitor & conservative_visitor()=0
void EnterAtomicPause(StackState)
Definition marker.cc:251
void Run(StackState, std::optional< size_t >)
BaseSpace * CustomSpace(CustomSpaceIndex space_index)
Definition raw-heap.h:78
static constexpr size_t kNumberOfRegularSpaces
Definition raw-heap.h:43
void RegisterObserver(AllocationObserver *)
void NotifyUnmarkingStarted(CollectionType)
bool IsSweepingInProgress() const
Definition sweeper.cc:1680
void Start(SweepingConfig)
Definition sweeper.cc:1661
bool PerformSweepOnMutatorThread(v8::base::TimeDelta max_duration, StatsCollector::ScopeId)
Definition sweeper.cc:1684
static std::unique_ptr< IncrementalMarkingSchedule > Create(bool predictable_schedule=false)
V8_INLINE void SetMarkerIfNeededAndCallback(Callback callback)
Definition stack.h:74
void SetScanSimulatorCallback(StackVisitorCallback callback)
Definition stack.h:116
void CollectCustomSpaceStatisticsAtLastGC(std::vector< cppgc::CustomSpaceIndex > custom_spaces, std::unique_ptr< CustomSpaceStatisticsReceiver > receiver)
Definition cpp-heap.cc:126
void CollectGarbageForTesting(cppgc::EmbedderStackState stack_state)
Definition cpp-heap.cc:138
void CollectGarbageInYoungGenerationForTesting(cppgc::EmbedderStackState stack_state)
Definition cpp-heap.cc:143
static std::unique_ptr< CppHeap > Create(v8::Platform *platform, const CppHeapCreateParams &params)
Definition cpp-heap.cc:103
void EnableDetachedGarbageCollectionsForTesting()
Definition cpp-heap.cc:133
cppgc::HeapStatistics CollectStatistics(cppgc::HeapStatistics::DetailLevel detail_level)
Definition cpp-heap.cc:120
friend class internal::CppHeap
Definition v8-cppgc.h:137
void Terminate()
Definition cpp-heap.cc:118
cppgc::HeapHandle & GetHeapHandle()
Definition cpp-heap.cc:114
cppgc::AllocationHandle & GetAllocationHandle()
Definition cpp-heap.cc:110
@ kFullGarbageCollection
Definition v8-isolate.h:456
std::unique_ptr< JobHandle > PostJob(TaskPriority priority, std::unique_ptr< JobTask > job_task, const SourceLocation &location=SourceLocation::Current())
virtual TracingController * GetTracingController()=0
virtual double MonotonicallyIncreasingTime()=0
std::shared_ptr< v8::TaskRunner > GetForegroundTaskRunner(Isolate *isolate)
static int GetCurrentThreadId()
Definition platform.cc:29
static constexpr TimeDelta Max()
Definition time.h:233
double InSecondsF() const
Definition time.cc:210
static constexpr TimeDelta FromMilliseconds(int64_t milliseconds)
Definition time.h:84
static void Run(v8::Isolate *isolate, v8::EmbedderGraph *graph, void *data)
void AddMainThreadEvent(const GCCycle &cppgc_event) final
Definition cpp-heap.cc:363
const std::optional< cppgc::internal::MetricRecorder::MainThreadIncrementalMark > ExtractLastIncrementalMarkEvent()
Definition cpp-heap.cc:458
v8::metrics::Recorder::ContextId GetContextId() const
Definition cpp-heap.cc:477
std::optional< cppgc::internal::MetricRecorder::GCCycle > last_young_gc_event_
Definition cpp-heap.h:95
const std::optional< cppgc::internal::MetricRecorder::GCCycle > ExtractLastYoungGcEvent()
Definition cpp-heap.cc:451
const std::optional< cppgc::internal::MetricRecorder::GCCycle > ExtractLastFullGcEvent()
Definition cpp-heap.cc:444
std::optional< cppgc::internal::MetricRecorder::GCCycle > last_full_gc_event_
Definition cpp-heap.h:93
std::unique_ptr< CppMarkingState > CreateCppMarkingState()
Definition cpp-heap.cc:1257
void CollectGarbage(cppgc::internal::GCConfig) override
Definition cpp-heap.cc:1273
void StartIncrementalGarbageCollectionForTesting() final
Definition cpp-heap.cc:1135
std::unique_ptr< MinorGCHeapGrowing > minor_gc_heap_growing_
Definition cpp-heap.h:241
bool IsMarkingDone() const
Definition cpp-heap.cc:874
std::optional< cppgc::EmbedderStackState > detached_override_stack_state_
Definition cpp-heap.h:265
void UpdateGCCapabilitiesFromFlagsForTesting()
Definition cpp-heap.cc:1347
std::atomic< size_t > used_size_
Definition cpp-heap.h:257
bool IsDetachedGCAllowed() const
Definition cpp-heap.cc:1351
::heap::base::Stack * stack() final
Definition cpp-heap.cc:682
void WriteBarrier(void *)
Definition cpp-heap.cc:920
cppgc::internal::CollectionType CollectionType
Definition cpp-heap.h:53
GarbageCollectionFlags current_gc_flags_
Definition cpp-heap.h:239
void EnterProcessGlobalAtomicPause()
Definition cpp-heap.cc:882
std::optional< CollectionType > collection_type_
Definition cpp-heap.h:238
Heap * heap() const
Definition cpp-heap.h:232
static void InitializeOncePerProcess()
Definition cpp-heap.cc:488
void InitializeMarking(CollectionType, std::shared_ptr<::heap::base::IncrementalMarkingSchedule > schedule={}, GarbageCollectionFlags=GarbageCollectionFlagValues::kNoFlags)
Definition cpp-heap.cc:751
void clear_overridden_stack_state() override
Definition cpp-heap.cc:1307
bool FinishConcurrentMarkingIfNeeded()
Definition cpp-heap.cc:910
bool TracingInitialized() const
Definition cpp-heap.h:226
bool ShouldFinalizeIncrementalMarking() const
Definition cpp-heap.cc:878
bool IsGCAllowed() const override
Definition cpp-heap.cc:1355
bool CurrentThreadIsHeapThread() const final
Definition cpp-heap.cc:1365
bool force_incremental_marking_for_testing_
Definition cpp-heap.h:253
void UpdateGCCapabilitiesFromFlags()
Definition cpp-heap.cc:730
void FinalizeIncrementalGarbageCollectionForTesting(cppgc::EmbedderStackState) final
Definition cpp-heap.cc:1146
std::unique_ptr< CppMarkingState > CreateCppMarkingStateForMutatorThread()
Definition cpp-heap.cc:1266
bool AdvanceMarking(v8::base::TimeDelta max_duration, size_t marked_bytes_limit)
Definition cpp-heap.cc:852
void set_override_stack_state(cppgc::EmbedderStackState state) override
Definition cpp-heap.cc:1296
Isolate * isolate() const
Definition cpp-heap.h:167
void EnableDetachedGarbageCollectionsForTesting()
Definition cpp-heap.cc:1126
void ProcessCrossThreadWeakness()
Definition cpp-heap.cc:935
MarkingType SelectMarkingType() const
Definition cpp-heap.cc:704
void ReEnableConcurrentMarking()
Definition cpp-heap.cc:915
std::optional< cppgc::EmbedderStackState > overridden_stack_state() const override
Definition cpp-heap.cc:1290
bool IsGCForbidden() const override
Definition cpp-heap.cc:1359
CrossHeapRememberedSet cross_heap_remembered_set_
Definition cpp-heap.h:242
size_t epoch() const override
Definition cpp-heap.cc:1323
void ReportBufferedAllocationSizeIfPossible()
Definition cpp-heap.cc:1044
static CppHeap * From(v8::CppHeap *heap)
Definition cpp-heap.h:102
std::unique_ptr< v8::internal::EmbedderStackStateScope > override_stack_state_scope_
Definition cpp-heap.h:267
void EnterFinalPause(cppgc::EmbedderStackState stack_state)
Definition cpp-heap.cc:890
size_t last_bytes_marked() const
Definition cpp-heap.cc:847
void CollectGarbageForTesting(CollectionType, StackState)
Definition cpp-heap.cc:1090
void AllocatedObjectSizeIncreased(size_t) final
Definition cpp-heap.cc:1034
int64_t buffered_allocated_bytes_
Definition cpp-heap.h:250
void StartIncrementalGarbageCollection(cppgc::internal::GCConfig) override
Definition cpp-heap.cc:1319
void FinishSweepingIfOutOfWork()
Definition cpp-heap.cc:1255
void FinishAtomicSweepingIfRunning()
Definition cpp-heap.cc:1246
void AttachIsolate(Isolate *isolate)
Definition cpp-heap.cc:601
void AllocatedObjectSizeDecreased(size_t) final
Definition cpp-heap.cc:1039
void ResetCrossHeapRememberedSet()
Definition cpp-heap.cc:1338
MetricRecorderAdapter * GetMetricRecorder() const
Definition cpp-heap.cc:1234
std::unique_ptr< cppgc::internal::Sweeper::SweepingOnMutatorThreadObserver > sweeping_on_mutator_thread_observer_
Definition cpp-heap.h:245
base::Flags< GarbageCollectionFlagValues > GarbageCollectionFlags
Definition cpp-heap.h:51
void CollectCustomSpaceStatisticsAtLastGC(std::vector< cppgc::CustomSpaceIndex >, std::unique_ptr< CustomSpaceStatisticsReceiver >)
Definition cpp-heap.cc:1220
SweepingType SelectSweepingType() const
Definition cpp-heap.cc:724
void FinishMarkingAndProcessWeakness()
Definition cpp-heap.cc:946
size_t allocated_size_limit_for_check_
Definition cpp-heap.h:263
HeapBase & AsBase()
Definition cpp-heap.h:117
void RecordEmbedderMarkingSpeed(size_t bytes, base::TimeDelta duration)
V8_EXPORT_PRIVATE void FinalizeIncrementalMarkingAtomically(GarbageCollectionReason gc_reason)
Definition heap.cc:3912
MarkCompactCollector * mark_compact_collector()
Definition heap.h:813
IncrementalMarking * incremental_marking() const
Definition heap.h:1062
V8_EXPORT_PRIVATE void CollectAllGarbage(GCFlags gc_flags, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition heap.cc:1258
std::optional< StackState > overridden_stack_state() const
Definition heap.cc:6045
V8_EXPORT_PRIVATE::heap::base::Stack & stack()
Definition heap.cc:6057
GCTracer * tracer()
Definition heap.h:800
static void IterateRegistersAndStackOfSimulator(::heap::base::StackVisitor *visitor)
Definition isolate.cc:3815
bool was_locker_ever_used() const
Definition isolate.h:1729
bool InFastCCall() const
TracedHandles * traced_handles()
Definition isolate.h:1418
ThreadManager * thread_manager() const
Definition isolate.h:1422
cppgc::internal::StatsCollector & stats_collector_
Definition cpp-heap.cc:95
virtual ~MinorGCHeapGrowing()=default
void AllocatedObjectSizeDecreased(size_t) final
Definition cpp-heap.cc:71
MinorGCHeapGrowing(cppgc::internal::StatsCollector &stats_collector)
Definition cpp-heap.cc:64
void ResetAllocatedObjectSize(size_t allocated_object_size) final
Definition cpp-heap.cc:72
void ConfigureLimit(size_t allocated_object_size)
Definition cpp-heap.cc:81
void AllocatedObjectSizeIncreased(size_t) final
Definition cpp-heap.cc:70
bool IsLockedByCurrentThread() const
Definition v8threads.h:73
UnifiedHeapMarker(Heap *v8_heap, cppgc::internal::HeapBase &cpp_heap, std::shared_ptr<::heap::base::IncrementalMarkingSchedule > incremental_schedule, cppgc::Platform *platform, cppgc::internal::MarkingConfig config)
Definition cpp-heap.cc:344
UnifiedHeapMarkingState mutator_unified_heap_marking_state_
Definition cpp-heap.cc:337
::heap::base::StackVisitor & stack_visitor() final
Definition cpp-heap.cc:324
void ScheduleIncrementalMarkingTask() final
Definition cpp-heap.cc:303
cppgc::internal::MutatorMarkingState & GetMutatorMarkingState()
Definition cpp-heap.cc:290
UnifiedHeapConcurrentMarker concurrent_marker_
Definition cpp-heap.cc:341
cppgc::Visitor & visitor() final
Definition cpp-heap.cc:322
cppgc::internal::ConcurrentMarkerBase & concurrent_marker() final
Definition cpp-heap.cc:328
::heap::base::IncrementalMarkingSchedule & schedule() final
Definition cpp-heap.cc:332
MutatorUnifiedHeapMarkingVisitor marking_visitor_
Definition cpp-heap.cc:338
UnifiedHeapMarkingState & GetMutatorUnifiedHeapMarkingState()
Definition cpp-heap.cc:295
void AdvanceMarkingOnAllocationImpl() final
Definition cpp-heap.cc:311
UnifiedHeapConservativeMarkingVisitor conservative_marking_visitor_
Definition cpp-heap.cc:339
~UnifiedHeapMarker() final=default
std::shared_ptr<::heap::base::IncrementalMarkingSchedule > schedule_
Definition cpp-heap.cc:340
UnifiedHeapConservativeMarkingVisitor & conservative_visitor() final
Definition cpp-heap.cc:299
static V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(Isolate *isolate, const char *location, const OOMDetails &details=kNoOOMDetails)
static const ContextId Empty()
Definition v8-metrics.h:195
ConcurrentMarkerBase & concurrent_marker_
static constexpr v8::base::TimeDelta kTaskDelayMs
Definition cpp-heap.cc:1176
bool active_
Definition cpp-heap.cc:596
cppgc::PageAllocator * page_allocator_
Definition cpp-heap.cc:194
std::unique_ptr< ConservativeTracedHandlesMarkingVisitor > marking_visitor_
Definition cpp-heap.cc:271
bool is_in_detached_mode_
Definition cpp-heap.cc:196
v8::Platform * platform_
Definition cpp-heap.cc:193
Heap *const v8_heap_
Definition cpp-heap.cc:218
static constexpr v8::base::TimeDelta kStepSizeMs
Definition cpp-heap.cc:1205
CppHeap::CollectionType collection_type_
Definition cpp-heap.cc:219
TracedHandles & traced_handles_
Definition cpp-heap.cc:543
std::vector< cppgc::CustomSpaceIndex > custom_spaces_
Definition cpp-heap.cc:1209
Isolate * isolate
TNode< Context > context
Node * receiver_
TNode< Object > receiver
LiftoffAssembler::CacheState state
Schedule const *const schedule_
size_t priority
constexpr size_t kPageSize
Definition globals.h:42
constexpr size_t kMB
Definition globals.h:21
FatalOutOfMemoryHandler & GetGlobalOOMHandler()
Definition platform.cc:73
EmbedderStackState
Definition common.h:15
v8::TracingController TracingController
Definition platform.h:26
STL namespace.
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation trace turbo cfg trace TurboFan s graph trimmer trace TurboFan s control equivalence trace TurboFan s register allocator trace stack load store counters for optimized code in run fuzzing &&concurrent_recompilation trace_turbo trace_turbo_scheduled trace_turbo_stack_accesses verify TurboFan machine graph of code stubs enable FixedArray bounds checks print TurboFan statistics of wasm compilations maximum cumulative size of bytecode considered for inlining scale factor of bytecode size used to calculate the inlining budget * KB
Definition flags.cc:1366
V8_EXPORT_PRIVATE FlagValues v8_flags
!IsContextMap !IsContextMap native_context
Definition map-inl.h:877
@ kGCCallbackScheduleIdleGarbageCollection
TaskPriority
Definition v8-platform.h:24
MarkingWorklists marking_worklists_
#define DCHECK_NULL(val)
Definition logging.h:491
#define CHECK_IMPLIES(lhs, rhs)
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define CHECK_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define USE(...)
Definition macros.h:293
FreeMemoryHandling free_memory_handling
Heap * heap_
#define V8_UNLIKELY(condition)
Definition v8config.h:660