5#ifndef V8_ZONE_ZONE_CHUNK_LIST_H_
6#define V8_ZONE_ZONE_CHUNK_LIST_H_
18template <
typename T,
bool backwards,
bool modifiable>
19class ZoneChunkListIterator;
102 std::swap(
size_, other.size_);
103 std::swap(
front_, other.front_);
108 template <
typename S,
bool backwards,
bool modifiable>
116 T*
items() {
return reinterpret_cast<T*
>(
this + 1); }
117 const T*
items()
const {
return reinterpret_cast<const T*
>(
this + 1); }
148 void Verify()
const {
153 }
else if (
empty()) {
157 for (Chunk* chunk =
front_; chunk !=
nullptr; chunk = chunk->
next_) {
164 size_t size_check = 0;
165 bool in_empty_tail =
false;
166 for (Chunk* chunk =
front_; chunk !=
nullptr; chunk = chunk->
next_) {
168 DCHECK_EQ(in_empty_tail, chunk->empty());
169 size_check += chunk->size();
172 in_empty_tail =
true;
187template <
typename T,
bool backwards,
bool modifiable>
191 template <
typename S>
193 typename std::conditional<modifiable,
S,
194 typename std::add_const<S>::type>
::type;
231 static_assert(!backwards,
"Advance only works on forward iterators");
235 for (uint32_t
i = 0;
i < amount; ++
i) {
262 if (list->empty())
return End(list);
264 DCHECK(!list->last_nonempty_->empty());
266 list->last_nonempty_->position_ - 1);
274 if (list->empty())
return Begin(list);
286 template <
bool move_backward>
340 if (last_nonempty_ ==
nullptr) {
342 front_ = NewChunk(kInitialChunkCapacity);
343 last_nonempty_ = front_;
344 }
else if (last_nonempty_->full()) {
346 if (last_nonempty_->next_ ==
nullptr) {
347 Chunk* chunk = NewChunk(NextChunkCapacity(last_nonempty_->capacity_));
348 last_nonempty_->next_ = chunk;
351 last_nonempty_ = last_nonempty_->next_;
352 DCHECK(!last_nonempty_->full());
355 last_nonempty_->items()[last_nonempty_->position_] = item;
356 ++last_nonempty_->position_;
358 DCHECK_LE(last_nonempty_->position_, last_nonempty_->capacity_);
363 if (front_ ==
nullptr) {
365 front_ = NewChunk(kInitialChunkCapacity);
366 last_nonempty_ = front_;
367 }
else if (front_->full()) {
370 Chunk* chunk = NewChunk(NextChunkCapacity(front_->capacity_));
371 front_->previous_ = chunk;
372 chunk->
next_ = front_;
377 T*
end = front_->items() + front_->position_;
378 std::move_backward(front_->items(),
end,
end + 1);
379 front_->items()[0] = item;
382 DCHECK_LE(front_->position_, front_->capacity_);
387 size_t index)
const {
389 Chunk* current = front_;
390 while (index >= current->capacity_) {
391 index -= current->capacity_;
392 current = current->next_;
400 if (limit >=
size())
return;
409 last_nonempty_ = seek_result.
chunk_;
413 current = current->
next_) {
414 current->position_ = 0;
433 const size_t index)
const {
447 if (split_begin == begin()) {
457 T* chunk_split_begin = split_chunk->
items() + split_begin.
position_;
458 T* chunk_split_end = split_chunk->
items() + split_chunk->
position_;
459 uint32_t new_chunk_size =
460 static_cast<uint32_t
>(chunk_split_end - chunk_split_begin);
461 uint32_t new_chunk_capacity = std::max(
463 CHECK_LE(new_chunk_size, new_chunk_capacity);
464 Chunk* new_chunk = NewChunk(new_chunk_capacity);
465 std::copy(chunk_split_begin, chunk_split_end, new_chunk->
items());
470 result.front_ = new_chunk;
472 (last_nonempty_ == split_chunk) ? new_chunk : last_nonempty_;
474 if (new_chunk->
next_) {
478 last_nonempty_ = split_chunk;
479 split_chunk->
next_ =
nullptr;
483 for (
Chunk* chunk = front_; chunk != split_chunk; chunk = chunk->
next_) {
485 new_size += chunk->size();
487 new_size += split_chunk->
size();
504 if (other.front_ ==
nullptr)
return;
506 last_nonempty_->next_ = other.front_;
507 other.front_->previous_ = last_nonempty_;
509 last_nonempty_ = other.last_nonempty_;
511 size_ += other.size_;
517 other.front_ =
nullptr;
518 other.last_nonempty_ =
nullptr;
524 for (
Chunk* current = front_; current !=
nullptr; current = current->
next_) {
525 void*
start = current->items();
526 void*
end = current->items() + current->position_;
527 size_t bytes =
static_cast<size_t>(
reinterpret_cast<uintptr_t
>(
end) -
528 reinterpret_cast<uintptr_t
>(
start));
530 MemCopy(ptr, current->items(), bytes);
531 ptr += current->position_;
bool operator==(const ZoneChunkListIterator &other) const
static ZoneChunkListIterator End(ChunkList *list)
maybe_const< ZoneChunkList< T > > ChunkList
ZoneChunkListIterator operator++(int)
ZoneChunkListIterator & operator++()
ZoneChunkListIterator(Chunk *current, uint32_t position)
ZoneChunkListIterator operator--(int)
static ZoneChunkListIterator Begin(ChunkList *list)
maybe_const< T > & operator*() const
ZoneChunkListIterator & operator--()
bool operator!=(const ZoneChunkListIterator &other) const
maybe_const< T > * operator->() const
maybe_const< typename ZoneChunkList< T >::Chunk > Chunk
typename std::conditional< modifiable, S, typename std::add_const< S >::type >::type maybe_const
void Advance(uint32_t amount)
ZoneChunkList(Zone *zone)
iterator Find(const size_t index)
void push_back(const T &item)
reverse_iterator rbegin()
const_iterator end() const
void Append(ZoneChunkList< T > &other)
static constexpr uint32_t kInitialChunkCapacity
const_iterator Find(const size_t index) const
SeekResult SeekIndex(size_t index) const
MOVE_ONLY_NO_DEFAULT_CONSTRUCTOR(ZoneChunkList)
Chunk * NewChunk(const uint32_t capacity)
void push_front(const T &item)
ZoneChunkListIterator< T, false, false > const_iterator
void swap(ZoneChunkList< T > &other)
static constexpr uint32_t kMaxChunkCapacity
ZoneChunkList< T > SplitAt(iterator split_begin)
static uint32_t NextChunkCapacity(uint32_t previous_capacity)
const_iterator begin() const
ZoneChunkListIterator< T, false, true > iterator
const_reverse_iterator rbegin() const
const_reverse_iterator rend() const
void Rewind(const size_t limit=0)
void * Allocate(size_t size)
ZoneVector< RpoNumber > & result
bool UnsignedAddOverflow32(uint32_t lhs, uint32_t rhs, uint32_t *val)
V8_BASE_EXPORT constexpr uint32_t RoundUpToPowerOfTwo32(uint32_t value)
void MemCopy(void *dest, const void *src, size_t size)
#define DCHECK_LE(v1, v2)
#define CHECK_LE(lhs, rhs)
#define DCHECK_NOT_NULL(val)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)