5#ifndef V8_SANDBOX_EXTERNAL_ENTITY_TABLE_INL_H_
6#define V8_SANDBOX_EXTERNAL_ENTITY_TABLE_INL_H_
21template <
typename Entry,
size_t size>
28template <
typename Entry,
size_t size>
30 auto freelist = freelist_head_.load(std::memory_order_relaxed);
31 return freelist.length();
34template <
typename Entry,
size_t size>
37 return static_cast<uint32_t
>(segments_.size());
40template <
typename Entry,
size_t size>
44 return segments_.find(segment) != segments_.end();
47template <
typename Entry,
size_t size>
59 if (first_segment != this->
vas_->
base()) {
62 "ExternalEntityTable::InitializeTable (first segment allocation)");
67template <
typename Entry,
size_t size>
79template <
typename Entry,
size_t size>
83 space->owning_table_ =
this;
87template <
typename Entry,
size_t size>
90 DCHECK(space->BelongsTo(
this));
91 for (
auto segment : space->segments_) {
94 space->segments_.clear();
97template <
typename Entry,
size_t size>
102 DCHECK(space->BelongsTo(
this));
104 DCHECK(!space->is_internal_read_only_space());
105 space->is_internal_read_only_space_ =
true;
121 Extend(space, segment, freelist);
129template <
typename Entry,
size_t size>
133 DCHECK(space->BelongsTo(
this));
138 space->segments_.clear();
141template <
typename Entry,
size_t size>
149template <
typename Entry,
size_t size>
157template <
typename Entry,
size_t size>
165template <
typename Entry,
size_t size>
169 DCHECK(space->BelongsTo(
this));
185 freelist = space->freelist_head_.load(std::memory_order_acquire);
192 freelist = space->freelist_head_.load(std::memory_order_relaxed);
196 if (
auto maybe_freelist =
TryExtend(space)) {
197 freelist = *maybe_freelist;
211 uint32_t allocated_entry = freelist.
next();
212 DCHECK(space->Contains(allocated_entry));
213 DCHECK_IMPLIES(!space->is_internal_read_only_space(), allocated_entry != 0);
214 return allocated_entry;
217template <
typename Entry,
size_t size>
219 Space* space, uint32_t threshold_index) {
223 bool success =
false;
225 freelist = space->freelist_head_.load(std::memory_order_acquire);
227 if (freelist.
is_empty() || freelist.
next() >= threshold_index)
return 0;
232 uint32_t allocated_entry = freelist.
next();
233 DCHECK(space->Contains(allocated_entry));
235 DCHECK_LT(allocated_entry, threshold_index);
236 return allocated_entry;
239template <
typename Entry,
size_t size>
245 Entry& freelist_entry = this->
at(freelist.
next());
246 uint32_t next_freelist_entry = freelist_entry.GetNextFreelistEntryIndex();
248 bool success = space->freelist_head_.compare_exchange_strong(
249 freelist, new_freelist, std::memory_order_relaxed);
261template <
typename Entry,
size_t size>
262std::optional<typename ExternalEntityTable<Entry, size>::FreelistHead>
267 space->mutex_.AssertHeld();
269 DCHECK(!space->is_internal_read_only_space());
273 if (!extended)
return {};
275 auto [segment, freelist_head] = *extended;
276 Extend(space, segment, freelist_head);
277 return freelist_head;
280template <
typename Entry,
size_t size>
286 space->mutex_.AssertHeld();
288 space->segments_.insert(segment);
291 DCHECK_EQ(space->is_internal_read_only_space(), segment.
number() == 0);
292 DCHECK_EQ(space->is_internal_read_only_space(),
295 if (
V8_UNLIKELY(space->is_internal_read_only_space())) {
302 static constexpr uint8_t kNullBytes[
kEntrySize] = {0};
310 space->freelist_head_.store(freelist, std::memory_order_release);
313template <
typename Entry,
size_t size>
318template <
typename Entry,
size_t size>
319template <
typename Callback>
322 DCHECK(space->BelongsTo(
this));
333 std::memory_order_relaxed);
337 uint32_t current_freelist_head = 0;
338 uint32_t current_freelist_length = 0;
339 std::vector<Segment> segments_to_deallocate;
344 uint32_t previous_freelist_head = current_freelist_head;
345 uint32_t previous_freelist_length = current_freelist_length;
349 it.
index() >= segment.first_entry(); --it) {
350 if (!it->IsMarked()) {
351 it->MakeFreelistEntry(current_freelist_head);
352 current_freelist_head = it.index();
353 current_freelist_length++;
361 uint32_t free_entries = current_freelist_length - previous_freelist_length;
363 if (segment_is_empty) {
364 segments_to_deallocate.push_back(segment);
366 current_freelist_head = previous_freelist_head;
367 current_freelist_length = previous_freelist_length;
373 for (
auto segment : segments_to_deallocate) {
377 space->segments_.erase(segment);
380 FreelistHead new_freelist(current_freelist_head, current_freelist_length);
381 space->freelist_head_.store(new_freelist, std::memory_order_release);
382 DCHECK_EQ(space->freelist_length(), current_freelist_length);
384 uint32_t num_live_entries = space->capacity() - current_freelist_length;
385 return num_live_entries;
388template <
typename Entry,
size_t size>
389template <
typename Callback>
392 DCHECK(space->BelongsTo(
this));
395 for (
auto segment : space->segments_) {
396 for (uint32_t
i = segment.first_entry();
i <= segment.last_entry();
i++) {
virtual void FreePages(Address address, size_t size)=0
virtual V8_WARN_UNUSED_RESULT Address AllocatePages(Address hint, size_t size, size_t alignment, PagePermissions permissions)=0
virtual V8_WARN_UNUSED_RESULT bool SetPagePermissions(Address address, size_t size, PagePermissions permissions)=0
static constexpr uint32_t kInternalReadOnlySegmentOffset
static constexpr size_t kSegmentSize
bool TryAllocateEntryFromFreelist(Space *space, FreelistHead freelist)
uint32_t AllocateEntryBelow(Space *space, uint32_t threshold_index)
void Extend(Space *space, Segment segment, FreelistHead freelist)
uint32_t GenericSweep(Space *space)
void DetachSpaceFromReadOnlySegment(Space *space)
void SealReadOnlySegment()
static constexpr size_t kEntrySize
void InitializeSpace(Space *space)
void IterateEntriesIn(Space *space, Callback callback)
void UnsealReadOnlySegment()
std::optional< FreelistHead > TryExtend(Space *space)
static constexpr FreelistHead kEntryAllocationIsForbiddenMarker
void AttachSpaceToReadOnlySegment(Space *space)
std::optional< uint32_t > TryAllocateEntry(Space *space)
uint32_t AllocateEntry(Space *space)
static constexpr uint32_t kInternalNullEntryIndex
void TearDownSpace(Space *space)
static constexpr size_t kEntriesPerSegment
std::optional< std::pair< Segment, FreelistHead > > TryAllocateAndInitializeSegment()
VirtualAddressSpace * vas_
FreelistHead InitializeFreeList(Segment segment, uint32_t start_offset=0)
Entry & at(uint32_t index)
bool is_initialized() const
void FreeTableSegment(Segment segment)
WriteIterator iter_at(uint32_t index)
static constexpr bool kUseContiguousMemory
static V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(Isolate *isolate, const char *location, const OOMDetails &details=kNoOOMDetails)
#define CHECK_IMPLIES(lhs, rhs)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
bool Contains(uint32_t index)
std::set< Segment > segments_
uint32_t freelist_length() const
uint32_t first_entry() const
static Segment At(uint32_t offset)
static Segment Containing(uint32_t entry_index)
#define V8_LIKELY(condition)
#define V8_UNLIKELY(condition)