12#ifdef V8_COMPRESS_POINTERS
17void ExternalPointerTable::SetUpFromReadOnlyArtifacts(
18 Space* read_only_space,
const ReadOnlyArtifacts* artifacts) {
19 UnsealReadOnlySegmentScope unseal_scope(
this);
20 for (
const auto& registry_entry : artifacts->external_pointer_registry()) {
22 read_only_space, registry_entry.value, registry_entry.tag);
39template <
typename Segment,
typename Data>
40class SegmentsIterator {
41 using iterator =
typename std::set<Segment>::reverse_iterator;
42 using const_iterator =
typename std::set<Segment>::const_reverse_iterator;
45 SegmentsIterator() =
default;
47 void AddSegments(
const std::set<Segment>& segments, Data data) {
48 streams_.emplace_back(segments.rbegin(), segments.rend(), data);
51 std::optional<std::pair<Segment, Data>> Next() {
54 std::optional<std::pair<Segment, Data>>
result;
55 for (
auto [iter,
end, data] : streams_) {
58 Segment segment = *iter;
61 result.emplace(segment, data);
66 streams_[min_stream].iter++;
78 Stream(iterator iter, const_iterator
end, Data data)
79 : iter(iter),
end(
end), data(data) {}
82 std::vector<Stream> streams_;
85uint32_t ExternalPointerTable::EvacuateAndSweepAndCompact(Space* space,
88 DCHECK(space->BelongsTo(
this));
89 DCHECK(!space->is_internal_read_only_space());
92 DCHECK_IMPLIES(from_space, !from_space->is_internal_read_only_space());
99 base::MutexGuard invalidated_fields_guard(&space->invalidated_fields_mutex_);
104 space->freelist_head_.store(kEntryAllocationIsForbiddenMarker,
105 std::memory_order_relaxed);
107 SegmentsIterator<Segment, CompactionResult> segments_iter;
108 Histogram* counter = counters->external_pointer_table_compaction_outcome();
109 CompactionResult space_compaction = FinishCompaction(space, counter);
110 segments_iter.AddSegments(space->segments_, space_compaction);
115 std::set<Segment> from_space_segments;
119 &from_space->invalidated_fields_mutex_);
121 std::swap(from_space->segments_, from_space_segments);
122 DCHECK(from_space->segments_.empty());
124 CompactionResult from_space_compaction =
125 FinishCompaction(from_space, counter);
126 segments_iter.AddSegments(from_space_segments, from_space_compaction);
128 FreelistHead empty_freelist;
129 from_space->freelist_head_.store(empty_freelist, std::memory_order_relaxed);
131 for (
Address field : from_space->invalidated_fields_)
132 space->invalidated_fields_.push_back(field);
133 from_space->ClearInvalidatedFields();
144 uint32_t current_freelist_head = 0;
145 uint32_t current_freelist_length = 0;
146 auto AddToFreelist = [&](uint32_t entry_index) {
147 at(entry_index).MakeFreelistEntry(current_freelist_head);
148 current_freelist_head = entry_index;
149 current_freelist_length++;
152 std::vector<Segment> segments_to_deallocate;
153 while (
auto current = segments_iter.Next()) {
154 Segment segment = current->first;
155 CompactionResult compaction = current->second;
157 bool segment_will_be_evacuated =
158 compaction.success &&
159 segment.first_entry() >= compaction.start_of_evacuation_area;
163 uint32_t previous_freelist_head = current_freelist_head;
164 uint32_t previous_freelist_length = current_freelist_length;
167 for (uint32_t
i = segment.last_entry();
i >= segment.first_entry();
i--) {
168 auto payload = at(
i).GetRawPayload();
169 if (payload.ContainsEvacuationEntry()) {
172 DCHECK(!segment_will_be_evacuated);
177 payload.ExtractEvacuationEntryHandleLocation();
185 if (space->FieldWasInvalidated(handle_location)) {
203 ResolveEvacuationEntryDuringSweeping(
205 compaction.start_of_evacuation_area);
212 DCHECK(!at(
i).GetRawPayload().HasMarkBitSet());
213 }
else if (!payload.HasMarkBitSet()) {
214 FreeManagedResourceIfPresent(
i);
217 auto new_payload = payload;
218 new_payload.ClearMarkBit();
219 at(
i).SetRawPayload(new_payload);
224 DCHECK(!at(
i).HasEvacuationEntry());
231 uint32_t free_entries = current_freelist_length - previous_freelist_length;
232 bool segment_is_empty = free_entries == kEntriesPerSegment;
233 if (segment_is_empty || segment_will_be_evacuated) {
234 segments_to_deallocate.push_back(segment);
236 current_freelist_head = previous_freelist_head;
237 current_freelist_length = previous_freelist_length;
241 space->segments_.merge(from_space_segments);
244 for (
auto segment : segments_to_deallocate) {
254 FreeTableSegment(segment);
255 space->segments_.erase(segment);
258 space->ClearInvalidatedFields();
260 FreelistHead new_freelist(current_freelist_head, current_freelist_length);
261 space->freelist_head_.store(new_freelist, std::memory_order_release);
262 DCHECK_EQ(space->freelist_length(), current_freelist_length);
264 uint32_t num_live_entries = space->capacity() - current_freelist_length;
265 counters->external_pointers_count()->AddSample(num_live_entries);
266 return num_live_entries;
269uint32_t ExternalPointerTable::SweepAndCompact(Space* space,
270 Counters* counters) {
271 return EvacuateAndSweepAndCompact(space,
nullptr, counters);
274uint32_t ExternalPointerTable::Sweep(Space* space, Counters* counters) {
275 DCHECK(!space->IsCompacting());
276 return SweepAndCompact(space, counters);
279void ExternalPointerTable::ResolveEvacuationEntryDuringSweeping(
281 uint32_t start_of_evacuation_area) {
286 CHECK(IsValidHandle(old_handle));
288 uint32_t old_index = HandleToIndex(old_handle);
293 DCHECK_GE(old_index, start_of_evacuation_area);
294 DCHECK_LT(new_index, start_of_evacuation_area);
295 auto& new_entry = at(new_index);
296 at(old_index).Evacuate(new_entry, EvacuateMarkMode::kLeaveUnmarked);
297 *handle_location = new_handle;
301 if (
Address addr = at(new_index).ExtractManagedResourceOrNull()) {
302 ManagedResource* resource =
reinterpret_cast<ManagedResource*
>(addr);
303 DCHECK_EQ(resource->ept_entry_, old_handle);
304 resource->ept_entry_ = new_handle;
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
ZoneVector< RpoNumber > & result
LockGuard< Mutex > MutexGuard
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
constexpr ExternalPointerTagRange kAnyExternalPointerTagRange(kFirstExternalPointerTag, kLastExternalPointerTag)
uint32_t ExternalPointerHandle
static constexpr Address kNullAddress
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)