54 static constexpr size_t kSegmentSize = Base::kSegmentSize;
55 static constexpr size_t kEntriesPerSegment = Base::kEntriesPerSegment;
56 static constexpr size_t kEntrySize = Base::kEntrySize;
83 uint32_t freelist_length()
const;
88 uint32_t num_segments();
92 bool is_empty() {
return num_segments() == 0; }
97 uint32_t
capacity() {
return num_segments() * kEntriesPerSegment; }
100 bool Contains(uint32_t index);
104 return is_internal_read_only_space_;
109 bool BelongsTo(
const void* table)
const {
return owning_table_ == table; }
113 uint32_t NumSegmentsForTesting() {
115 return num_segments();
125 std::atomic<void*> owning_table_ =
nullptr;
143 bool is_internal_read_only_space_ =
false;
153 allocate_black_ = allocate_black;
157 bool allocate_black_ =
false;
169 uint32_t AllocateEntry(
Space* space);
170 std::optional<uint32_t> TryAllocateEntry(
Space* space);
177 uint32_t AllocateEntryBelow(
Space* space, uint32_t threshold_index);
191 std::optional<FreelistHead> TryExtend(
Space* space);
205 uint32_t GenericSweep(
Space* space);
208 template <
typename Callback>
209 uint32_t GenericSweep(
Space* space, Callback marked);
215 template <
typename Callback>
231 static constexpr bool kSupportsCompaction =
false;
241 void InitializeSpace(
Space* space);
244 void TearDownSpace(
Space* space);
248 void AttachSpaceToReadOnlySegment(
Space* space);
249 void DetachSpaceFromReadOnlySegment(
Space* space);
257 table_->UnsealReadOnlySegment();
267 static constexpr uint32_t kInternalReadOnlySegmentOffset = 0;
268 static constexpr uint32_t kInternalNullEntryIndex = 0;
269 static constexpr uint32_t kEndOfInternalReadOnlySegment = kEntriesPerSegment;
277 void UnsealReadOnlySegment();
278 void SealReadOnlySegment();