19uint32_t BucketIndexForSize(uint32_t size) {
31 return *
new (memory)
Entry(size);
38 next_ = *previous_next;
39 *previous_next =
this;
42 *previous_next =
next_;
57 : free_list_heads_(std::move(other.free_list_heads_)),
58 free_list_tails_(std::move(other.free_list_tails_)),
59 biggest_free_list_index_(std::move(other.biggest_free_list_index_)) {
65 Append(std::move(other));
73 const size_t size = block.size;
77 if (size <
sizeof(
Entry)) {
88 return {
reinterpret_cast<Address>(&filler + 1),
89 reinterpret_cast<Address>(&filler + 1)};
93 const size_t index = BucketIndexForSize(
static_cast<uint32_t
>(size));
100 return {
reinterpret_cast<Address>(&entry + 1),
101 reinterpret_cast<Address>(&entry) + size};
107 const size_t expected_size =
Size() + other.Size();
111 Entry* other_tail = other.free_list_tails_[
index];
114 other_tail->
SetNext(this_head);
118 this_head = other.free_list_heads_[
index];
119 other.free_list_heads_[
index] =
nullptr;
120 other.free_list_tails_[
index] =
nullptr;
126 other.biggest_free_list_index_ = 0;
142 for (; index > 0; --
index, bucket_size >>= 1) {
145 if (allocation_size > bucket_size) {
149 if (!entry || entry->
AllocatedSize() < allocation_size)
break;
152 if (!entry->
Next()) {
162 return {
nullptr, 0u};
175 size += entry->AllocatedSize();
176 entry = entry->Next();
184 [](
const auto* entry) { return !entry; });
189 for (
Entry* entry = list; entry; entry = entry->
Next()) {
190 if (entry <= block.address &&
191 (
reinterpret_cast<Address>(block.address) + block.size <=
192 reinterpret_cast<Address>(entry) + entry->AllocatedSize()))
210 std::vector<size_t>& bucket_size = free_list_stats.
bucket_size;
211 std::vector<size_t>& free_count = free_list_stats.
free_count;
212 std::vector<size_t>& free_size = free_list_stats.
free_size;
213 DCHECK(bucket_size.empty());
214 DCHECK(free_count.empty());
215 DCHECK(free_size.empty());
217 size_t entry_count = 0;
218 size_t entry_size = 0;
221 entry_size += entry->AllocatedSize();
223 bucket_size.push_back(
static_cast<size_t>(1) <<
i);
224 free_count.push_back(entry_count);
225 free_size.push_back(entry_size);
#define ASAN_UNPOISON_MEMORY_REGION(start, size)
static Filler & CreateAt(void *memory, size_t size)
void Link(Entry **previous_next)
static Entry & CreateAt(void *memory, size_t size)
void Unlink(Entry **previous_next)
void SetNext(Entry *next)
std::array< Entry *, kPageSizeLog2 > free_list_tails_
size_t biggest_free_list_index_
bool IsConsistent(size_t) const
std::array< Entry *, kPageSizeLog2 > free_list_heads_
void CollectStatistics(HeapStatistics::FreeListStatistics &)
bool ContainsForTesting(Block) const
std::pair< Address, Address > AddReturningUnusedBounds(Block)
FreeList & operator=(const FreeList &)=delete
constexpr size_t kPageSizeLog2
constexpr size_t kPageSize
constexpr GCInfoIndex kFreeListGCInfoIndex
constexpr size_t kFreeListEntrySize
uint32_t RoundDownToPowerOfTwo32(uint32_t value)
constexpr int WhichPowerOfTwo(T value)
#define DCHECK_LE(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
std::vector< size_t > free_count
std::vector< size_t > bucket_size
std::vector< size_t > free_size