5#ifndef V8_SANDBOX_COMPACTIBLE_EXTERNAL_ENTITY_TABLE_INL_H_
6#define V8_SANDBOX_COMPACTIBLE_EXTERNAL_ENTITY_TABLE_INL_H_
17#ifdef V8_COMPRESS_POINTERS
22template <
typename Entry,
size_t size>
23uint32_t CompactibleExternalEntityTable<Entry, size>::AllocateEntry(
25 uint32_t index = Base::AllocateEntry(space);
38 uint32_t start_of_evacuation_area =
39 space->start_of_evacuation_area_.load(std::memory_order_relaxed);
40 if (
V8_UNLIKELY(index >= start_of_evacuation_area)) {
41 space->AbortCompacting(start_of_evacuation_area);
47template <
typename Entry,
size_t size>
48typename CompactibleExternalEntityTable<Entry, size>::CompactionResult
49CompactibleExternalEntityTable<Entry, size>::FinishCompaction(
50 Space* space, Histogram* counter) {
51 DCHECK(space->BelongsTo(
this));
52 DCHECK(!space->is_internal_read_only_space());
56 uint32_t start_of_evacuation_area =
57 space->start_of_evacuation_area_.load(std::memory_order_relaxed);
58 bool evacuation_was_successful =
false;
59 if (space->IsCompacting()) {
60 auto outcome = ExternalEntityTableCompactionOutcome::kAborted;
61 if (space->CompactingWasAborted()) {
65 start_of_evacuation_area &= ~Space::kCompactionAbortedMarker;
69 evacuation_was_successful =
true;
70 outcome = ExternalEntityTableCompactionOutcome::kSuccess;
75 space->StopCompacting();
76 counter->AddSample(
static_cast<int>(outcome));
79 return {start_of_evacuation_area, evacuation_was_successful};
82template <
typename Entry,
size_t size>
83void CompactibleExternalEntityTable<Entry, size>::MaybeCreateEvacuationEntry(
84 Space* space, uint32_t index,
Address handle_location) {
90 uint32_t start_of_evacuation_area =
91 space->start_of_evacuation_area_.load(std::memory_order_relaxed);
92 if (index >= start_of_evacuation_area) {
93 DCHECK(space->IsCompacting());
95 Base::AllocateEntryBelow(space, start_of_evacuation_area);
97 DCHECK_LT(new_index, start_of_evacuation_area);
98 DCHECK(space->Contains(new_index));
104 Base::at(new_index).MakeEvacuationEntry(handle_location);
114 space->AbortCompacting(start_of_evacuation_area);
119template <
typename Entry,
size_t size>
120void CompactibleExternalEntityTable<Entry, size>::Space::StartCompacting(
121 uint32_t start_of_evacuation_area) {
122 DCHECK_EQ(invalidated_fields_.size(), 0);
123 start_of_evacuation_area_.store(start_of_evacuation_area,
124 std::memory_order_relaxed);
127template <
typename Entry,
size_t size>
128void CompactibleExternalEntityTable<Entry, size>::Space::StopCompacting() {
129 start_of_evacuation_area_.store(kNotCompactingMarker,
130 std::memory_order_relaxed);
133template <
typename Entry,
size_t size>
134void CompactibleExternalEntityTable<Entry, size>::Space::AbortCompacting(
135 uint32_t start_of_evacuation_area) {
136 uint32_t compaction_aborted_marker =
137 start_of_evacuation_area | kCompactionAbortedMarker;
138 DCHECK_NE(compaction_aborted_marker, kNotCompactingMarker);
139 start_of_evacuation_area_.store(compaction_aborted_marker,
140 std::memory_order_relaxed);
143template <
typename Entry,
size_t size>
144bool CompactibleExternalEntityTable<Entry, size>::Space::IsCompacting() {
145 return start_of_evacuation_area_.load(std::memory_order_relaxed) !=
146 kNotCompactingMarker;
149template <
typename Entry,
size_t size>
150bool CompactibleExternalEntityTable<Entry,
151 size>::Space::CompactingWasAborted() {
152 auto value = start_of_evacuation_area_.load(std::memory_order_relaxed);
153 return (value & kCompactionAbortedMarker) == kCompactionAbortedMarker;
156template <
typename Entry,
size_t size>
157bool CompactibleExternalEntityTable<Entry, size>::Space::FieldWasInvalidated(
159 invalidated_fields_mutex_.AssertHeld();
160 return std::find(invalidated_fields_.begin(), invalidated_fields_.end(),
161 field_address) != invalidated_fields_.end();
164template <
typename Entry,
size_t size>
165void CompactibleExternalEntityTable<Entry,
166 size>::Space::ClearInvalidatedFields() {
167 invalidated_fields_mutex_.AssertHeld();
168 invalidated_fields_.clear();
171template <
typename Entry,
size_t size>
172void CompactibleExternalEntityTable<Entry, size>::Space::AddInvalidatedField(
174 if (IsCompacting()) {
176 invalidated_fields_.push_back(field_address);
180template <
typename Entry,
size_t size>
181void CompactibleExternalEntityTable<Entry,
182 size>::Space::StartCompactingIfNeeded() {
192 uint32_t num_free_entries = this->freelist_length();
193 uint32_t num_total_entries = this->capacity();
198 double free_ratio =
static_cast<double>(num_free_entries) /
199 static_cast<double>(num_total_entries);
200 uint32_t num_segments_to_evacuate =
201 (num_free_entries / 2) / Base::kEntriesPerSegment;
202 uint32_t space_size = num_total_entries * Base::kEntrySize;
203 bool should_compact = (space_size >= 1 *
MB) && (free_ratio >= 0.10) &&
204 (num_segments_to_evacuate >= 1);
210 should_compact = this->num_segments() > 1;
211 num_segments_to_evacuate = std::max(1u, num_segments_to_evacuate);
214 if (should_compact) {
217 auto first_segment_to_evacuate =
218 *std::prev(this->segments_.end(), num_segments_to_evacuate);
219 uint32_t start_of_evacuation_area = first_segment_to_evacuate.first_entry();
220 StartCompacting(start_of_evacuation_area);
static constexpr size_t kEntriesPerSegment
LockGuard< Mutex > MutexGuard
V8_EXPORT_PRIVATE FlagValues v8_flags
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage * MB
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
constexpr bool IsAligned(T value, U alignment)
#define V8_UNLIKELY(condition)