v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
zone-containers.h
Go to the documentation of this file.
1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_ZONE_ZONE_CONTAINERS_H_
6#define V8_ZONE_ZONE_CONTAINERS_H_
7
8#include <deque>
9#include <forward_list>
10#include <initializer_list>
11#include <iterator>
12#include <list>
13#include <map>
14#include <queue>
15#include <set>
16#include <stack>
17#include <unordered_map>
18#include <unordered_set>
19
20#include "absl/container/btree_map.h"
21#include "absl/container/flat_hash_map.h"
22#include "absl/container/flat_hash_set.h"
23#include "src/base/hashing.h"
25#include "src/base/small-map.h"
28
29namespace v8 {
30namespace internal {
31
32// A drop-in replacement for std::vector that uses a Zone for its allocations,
33// and (contrary to a std::vector subclass with custom allocator) gives us
34// precise control over its implementation and performance characteristics.
35//
36// When working on this code, keep the following rules of thumb in mind:
37// - Everything between {data_} and {end_} (exclusive) is a live instance of T.
38// When writing to these slots, use the {CopyingOverwrite} or
39// {MovingOverwrite} helpers.
40// - Everything between {end_} (inclusive) and {capacity_} (exclusive) is
41// considered uninitialized memory. When writing to these slots, use the
42// {CopyToNewStorage} or {MoveToNewStorage} helpers. Obviously, also use
43// these helpers to initialize slots in newly allocated backing stores.
44// - When shrinking, call ~T on all slots between the new and the old position
45// of {end_} to maintain the above invariant. Also call ~T on all slots in
46// discarded backing stores.
47// - The interface offered by {ZoneVector} should be a subset of
48// {std::vector}'s API, so that calling code doesn't need to be aware of
49// ZoneVector's implementation details and can assume standard C++ behavior.
50// (It's okay if we don't support everything that std::vector supports; we
51// can fill such gaps when use cases arise.)
52template <typename T>
54 public:
55 using iterator = T*;
56 using const_iterator = const T*;
57 using reverse_iterator = std::reverse_iterator<T*>;
58 using const_reverse_iterator = std::reverse_iterator<const T*>;
59 using value_type = T;
60 using reference = T&;
61 using const_reference = const T&;
63
64 // Constructs an empty vector.
65 explicit ZoneVector(Zone* zone) : zone_(zone) {}
66
67 // Constructs a new vector and fills it with {size} elements, each
68 // constructed via the default constructor.
69 ZoneVector(size_t size, Zone* zone) : zone_(zone) {
70 data_ = size > 0 ? zone->AllocateArray<T>(size) : nullptr;
72 for (T* p = data_; p < end_; p++) emplace(p);
73 }
74
75 // Constructs a new vector and fills it with {size} elements, each
76 // having the value {def}.
77 ZoneVector(size_t size, T def, Zone* zone) : zone_(zone) {
78 data_ = size > 0 ? zone->AllocateArray<T>(size) : nullptr;
80 for (T* p = data_; p < end_; p++) emplace(p, def);
81 }
82
83 // Constructs a new vector and fills it with the contents of the given
84 // initializer list.
85 ZoneVector(std::initializer_list<T> list, Zone* zone) : zone_(zone) {
86 size_t size = list.size();
87 if (size > 0) {
89 CopyToNewStorage(data_, list.begin(), list.end());
90 } else {
91 data_ = nullptr;
92 }
94 }
95
96 // Constructs a new vector and fills it with the contents of the range
97 // [first, last).
98 template <class It,
99 typename = typename std::iterator_traits<It>::iterator_category>
100 ZoneVector(It first, It last, Zone* zone) : zone_(zone) {
101 if constexpr (std::is_base_of_v<
102 std::random_access_iterator_tag,
103 typename std::iterator_traits<It>::iterator_category>) {
104 size_t size = last - first;
105 data_ = size > 0 ? zone->AllocateArray<T>(size) : nullptr;
106 end_ = capacity_ = data_ + size;
107 for (T* p = data_; p < end_; p++) emplace(p, *first++);
108 } else {
109 while (first != last) push_back(*first++);
110 }
111 DCHECK_EQ(first, last);
112 }
113
114 ZoneVector(const ZoneVector& other) V8_NOEXCEPT : zone_(other.zone_) {
115 *this = other;
116 }
117
118 ZoneVector(ZoneVector&& other) V8_NOEXCEPT { *this = std::move(other); }
119
121 for (T* p = data_; p < end_; p++) p->~T();
123 }
124
125 // Assignment operators.
127 // Self-assignment would cause undefined behavior in the !copy_assignable
128 // branch, but likely indicates a bug in calling code anyway.
129 DCHECK_NE(this, &other);
130 T* src = other.data_;
131 if (capacity() >= other.size() && zone_ == other.zone_) {
132 T* dst = data_;
133 if constexpr (std::is_trivially_copyable_v<T>) {
134 size_t size = other.size();
135 if (size) memcpy(dst, src, size * sizeof(T));
136 end_ = dst + size;
137 } else if constexpr (std::is_copy_assignable_v<T>) {
138 while (dst < end_ && src < other.end_) *dst++ = *src++;
139 while (src < other.end_) emplace(dst++, *src++);
140 T* old_end = end_;
141 end_ = dst;
142 for (T* p = end_; p < old_end; p++) p->~T();
143 } else {
144 for (T* p = data_; p < end_; p++) p->~T();
145 while (src < other.end_) emplace(dst++, *src++);
146 end_ = dst;
147 }
148 } else {
149 for (T* p = data_; p < end_; p++) p->~T();
151 size_t new_cap = other.capacity();
152 if (new_cap > 0) {
153 data_ = zone_->AllocateArray<T>(new_cap);
154 CopyToNewStorage(data_, other.data_, other.end_);
155 } else {
156 data_ = nullptr;
157 }
158 capacity_ = data_ + new_cap;
159 end_ = data_ + other.size();
160 }
161 return *this;
162 }
163
165 // Self-assignment would cause undefined behavior, and is probably a bug.
166 DCHECK_NE(this, &other);
167 // Move-assigning vectors from different zones would have surprising
168 // lifetime semantics regardless of how we choose to implement it (keep
169 // the old zone? Take the new zone?).
170 if (zone_ == nullptr) {
171 zone_ = other.zone_;
172 } else {
173 DCHECK_EQ(zone_, other.zone_);
174 }
175 for (T* p = data_; p < end_; p++) p->~T();
177 data_ = other.data_;
178 end_ = other.end_;
179 capacity_ = other.capacity_;
180 // {other.zone_} may stay.
181 other.data_ = other.end_ = other.capacity_ = nullptr;
182 return *this;
183 }
184
185 ZoneVector& operator=(std::initializer_list<T> ilist) {
186 clear();
187 EnsureCapacity(ilist.size());
188 CopyToNewStorage(data_, ilist.begin(), ilist.end());
189 end_ = data_ + ilist.size();
190 return *this;
191 }
192
193 void swap(ZoneVector<T>& other) noexcept {
194 DCHECK_EQ(zone_, other.zone_);
195 std::swap(data_, other.data_);
196 std::swap(end_, other.end_);
197 std::swap(capacity_, other.capacity_);
198 }
199
200 void resize(size_t new_size) {
201 EnsureCapacity(new_size);
202 T* new_end = data_ + new_size;
203 for (T* p = end_; p < new_end; p++) emplace(p);
204 for (T* p = new_end; p < end_; p++) p->~T();
205 end_ = new_end;
206 }
207
208 void resize(size_t new_size, const T& value) {
209 EnsureCapacity(new_size);
210 T* new_end = data_ + new_size;
211 for (T* p = end_; p < new_end; p++) emplace(p, value);
212 for (T* p = new_end; p < end_; p++) p->~T();
213 end_ = new_end;
214 }
215
216 void assign(size_t new_size, const T& value) {
217 if (capacity() >= new_size) {
218 T* new_end = data_ + new_size;
219 T* assignable = data_ + std::min(size(), new_size);
220 for (T* p = data_; p < assignable; p++) CopyingOverwrite(p, &value);
221 for (T* p = assignable; p < new_end; p++) CopyToNewStorage(p, &value);
222 for (T* p = new_end; p < end_; p++) p->~T();
223 end_ = new_end;
224 } else {
225 clear();
226 EnsureCapacity(new_size);
227 T* new_end = data_ + new_size;
228 for (T* p = data_; p < new_end; p++) emplace(p, value);
229 end_ = new_end;
230 }
231 }
232
233 void clear() {
234 for (T* p = data_; p < end_; p++) p->~T();
235 end_ = data_;
236 }
237
238 size_t size() const { return end_ - data_; }
239 bool empty() const { return end_ == data_; }
240 size_t capacity() const { return capacity_ - data_; }
241 void reserve(size_t new_cap) { EnsureCapacity(new_cap); }
242 T* data() { return data_; }
243 const T* data() const { return data_; }
244 Zone* zone() const { return zone_; }
245
246 T& at(size_t pos) {
247 DCHECK_LT(pos, size());
248 return data_[pos];
249 }
250 const T& at(size_t pos) const {
251 DCHECK_LT(pos, size());
252 return data_[pos];
253 }
254
255 T& operator[](size_t pos) { return at(pos); }
256 const T& operator[](size_t pos) const { return at(pos); }
257
258 T& front() {
260 return *data_;
261 }
262 const T& front() const {
264 return *data_;
265 }
266
267 T& back() {
269 return *(end_ - 1);
270 }
271 const T& back() const {
273 return *(end_ - 1);
274 }
275
276 T* begin() V8_NOEXCEPT { return data_; }
277 const T* begin() const V8_NOEXCEPT { return data_; }
278 const T* cbegin() const V8_NOEXCEPT { return data_; }
279
280 T* end() V8_NOEXCEPT { return end_; }
281 const T* end() const V8_NOEXCEPT { return end_; }
282 const T* cend() const V8_NOEXCEPT { return end_; }
283
285 return std::make_reverse_iterator(end());
286 }
288 return std::make_reverse_iterator(end());
289 }
291 return std::make_reverse_iterator(cend());
292 }
294 return std::make_reverse_iterator(begin());
295 }
297 return std::make_reverse_iterator(begin());
298 }
300 return std::make_reverse_iterator(cbegin());
301 }
302
303 void push_back(const T& value) {
305 emplace(end_++, value);
306 }
307 void push_back(T&& value) { emplace_back(std::move(value)); }
308
309 void pop_back() {
311 (--end_)->~T();
312 }
313
314 template <typename... Args>
315 T& emplace_back(Args&&... args) {
317 T* ptr = end_++;
318 new (ptr) T(std::forward<Args>(args)...);
319 return *ptr;
320 }
321
322 template <class It,
323 typename = typename std::iterator_traits<It>::iterator_category>
324 T* insert(const T* pos, It first, It last) {
325 T* position;
326 if constexpr (std::is_base_of_v<
327 std::random_access_iterator_tag,
328 typename std::iterator_traits<It>::iterator_category>) {
329 DCHECK_LE(0, last - first);
330 size_t count = last - first;
331 size_t assignable;
332 position = PrepareForInsertion(pos, count, &assignable);
333 if constexpr (std::is_trivially_copyable_v<T>) {
334 if (count > 0) memcpy(position, first, count * sizeof(T));
335 } else {
336 CopyingOverwrite(position, first, first + assignable);
337 CopyToNewStorage(position + assignable, first + assignable, last);
338 }
339 } else if (pos == end()) {
340 position = end_;
341 while (first != last) {
343 emplace(end_++, *first++);
344 }
345 } else {
347 // We currently have no users of this case.
348 // It could be implemented inefficiently as a combination of the two
349 // cases above: while (first != last) { PrepareForInsertion(_, 1, _); }.
350 // A more efficient approach would be to accumulate the input iterator's
351 // results into a temporary vector first, then grow {this} only once
352 // (by calling PrepareForInsertion(_, count, _)), then copy over the
353 // accumulated elements.
354 }
355 return position;
356 }
357 T* insert(const T* pos, size_t count, const T& value) {
358 size_t assignable;
359 T* position = PrepareForInsertion(pos, count, &assignable);
360 T* dst = position;
361 T* stop = dst + assignable;
362 while (dst < stop) {
363 CopyingOverwrite(dst++, &value);
364 }
365 stop = position + count;
366 while (dst < stop) emplace(dst++, value);
367 return position;
368 }
369
370 T* erase(const T* pos) {
371 DCHECK(data_ <= pos && pos <= end());
372 if (pos == end()) return const_cast<T*>(pos);
373 return erase(pos, 1);
374 }
375 T* erase(const T* first, const T* last) {
376 DCHECK(data_ <= first && first <= last && last <= end());
377 if (first == last) return const_cast<T*>(first);
378 return erase(first, last - first);
379 }
380
381 private:
382 static constexpr size_t kMinCapacity = 2;
383 size_t NewCapacity(size_t minimum) {
384 // We can ignore possible overflow here: on 32-bit platforms, if the
385 // multiplication overflows, there's no better way to handle it than
386 // relying on the "new_capacity < minimum" check; in particular, a
387 // saturating multiplication would make no sense. On 64-bit platforms,
388 // overflow is effectively impossible anyway.
389 size_t new_capacity = data_ == capacity_ ? kMinCapacity : capacity() * 2;
390 return new_capacity < minimum ? minimum : new_capacity;
391 }
392
394 if (V8_LIKELY(end_ < capacity_)) return;
395 Grow(capacity() + 1);
396 }
397
398 V8_INLINE void EnsureCapacity(size_t minimum) {
399 if (V8_LIKELY(minimum <= capacity())) return;
400 Grow(minimum);
401 }
402
403 V8_INLINE void CopyToNewStorage(T* dst, const T* src) { emplace(dst, *src); }
404
405 V8_INLINE void MoveToNewStorage(T* dst, T* src) {
406 if constexpr (std::is_move_constructible_v<T>) {
407 emplace(dst, std::move(*src));
408 } else {
409 CopyToNewStorage(dst, src);
410 }
411 }
412
413 V8_INLINE void CopyingOverwrite(T* dst, const T* src) {
414 if constexpr (std::is_copy_assignable_v<T>) {
415 *dst = *src;
416 } else {
417 dst->~T();
418 CopyToNewStorage(dst, src);
419 }
420 }
421
422 V8_INLINE void MovingOverwrite(T* dst, T* src) {
423 if constexpr (std::is_move_assignable_v<T>) {
424 *dst = std::move(*src);
425 } else {
426 CopyingOverwrite(dst, src);
427 }
428 }
429
430#define EMIT_TRIVIAL_CASE(memcpy_function) \
431 DCHECK_LE(src, src_end); \
432 if constexpr (std::is_trivially_copyable_v<T>) { \
433 size_t count = src_end - src; \
434 /* Add V8_ASSUME to silence gcc null check warning. */ \
435 V8_ASSUME(src != nullptr); \
436 memcpy_function(dst, src, count * sizeof(T)); \
437 return; \
438 }
439
440 V8_INLINE void CopyToNewStorage(T* dst, const T* src, const T* src_end) {
441 EMIT_TRIVIAL_CASE(memcpy)
442 for (; src < src_end; dst++, src++) {
443 CopyToNewStorage(dst, src);
444 }
445 }
446
447 V8_INLINE void MoveToNewStorage(T* dst, T* src, const T* src_end) {
448 EMIT_TRIVIAL_CASE(memcpy)
449 for (; src < src_end; dst++, src++) {
450 MoveToNewStorage(dst, src);
451 src->~T();
452 }
453 }
454
455 V8_INLINE void CopyingOverwrite(T* dst, const T* src, const T* src_end) {
456 EMIT_TRIVIAL_CASE(memmove)
457 for (; src < src_end; dst++, src++) {
458 CopyingOverwrite(dst, src);
459 }
460 }
461
462 V8_INLINE void MovingOverwrite(T* dst, T* src, const T* src_end) {
463 EMIT_TRIVIAL_CASE(memmove)
464 for (; src < src_end; dst++, src++) {
465 MovingOverwrite(dst, src);
466 }
467 }
468
469#undef EMIT_TRIVIAL_CASE
470
471 V8_NOINLINE V8_PRESERVE_MOST void Grow(size_t minimum) {
472 T* old_data = data_;
473 T* old_end = end_;
474 size_t old_size = size();
475 size_t new_capacity = NewCapacity(minimum);
476 data_ = zone_->AllocateArray<T>(new_capacity);
477 end_ = data_ + old_size;
478 if (old_data) {
479 MoveToNewStorage(data_, old_data, old_end);
480 zone_->DeleteArray(old_data, capacity_ - old_data);
481 }
482 capacity_ = data_ + new_capacity;
483 }
484
485 T* PrepareForInsertion(const T* pos, size_t count, size_t* assignable) {
486 DCHECK(data_ <= pos && pos <= end_);
487 CHECK(std::numeric_limits<size_t>::max() - size() >= count);
488 size_t index = pos - data_;
489 size_t to_shift = end() - pos;
490 DCHECK_EQ(index + to_shift, size());
491 if (capacity() < size() + count) {
492 *assignable = 0; // Fresh memory is not assignable (must be constructed).
493 T* old_data = data_;
494 T* old_end = end_;
495 size_t old_size = size();
496 size_t new_capacity = NewCapacity(old_size + count);
497 data_ = zone_->AllocateArray<T>(new_capacity);
498 end_ = data_ + old_size + count;
499 if (old_data) {
500 MoveToNewStorage(data_, old_data, pos);
501 MoveToNewStorage(data_ + index + count, const_cast<T*>(pos), old_end);
502 zone_->DeleteArray(old_data, capacity_ - old_data);
503 }
504 capacity_ = data_ + new_capacity;
505 } else {
506 // There are two interesting cases: we're inserting more elements
507 // than we're shifting (top), or the other way round (bottom).
508 //
509 // Old: [ABCDEFGHIJ___________]
510 // <--used--><--empty-->
511 //
512 // Case 1: index=7, count=8, to_shift=3
513 // New: [ABCDEFGaaacccccHIJ___]
514 // <-><------>
515 // ↑ ↑ to be in-place constructed
516 // ↑
517 // assignable_slots
518 //
519 // Case 2: index=3, count=3, to_shift=7
520 // New: [ABCaaaDEFGHIJ________]
521 // <-----><->
522 // ↑ ↑ to be in-place constructed
523 // ↑
524 // This range can be assigned. We report the first 3
525 // as {assignable_slots} to the caller, and use the other 4
526 // in the loop below.
527 // Observe that the number of old elements that are moved to the
528 // new end by in-place construction always equals {assignable_slots}.
529 size_t assignable_slots = std::min(to_shift, count);
530 *assignable = assignable_slots;
531 if constexpr (std::is_trivially_copyable_v<T>) {
532 if (to_shift > 0) {
533 // Add V8_ASSUME to silence gcc null check warning.
534 V8_ASSUME(pos != nullptr);
535 memmove(const_cast<T*>(pos + count), pos, to_shift * sizeof(T));
536 }
537 end_ += count;
538 return data_ + index;
539 }
540 // Construct elements in previously-unused area ("HIJ" in the example
541 // above). This frees up assignable slots.
542 T* dst = end_ + count;
543 T* src = end_;
544 for (T* stop = dst - assignable_slots; dst > stop;) {
545 MoveToNewStorage(--dst, --src);
546 }
547 // Move (by assignment) elements into previously used area. This is
548 // "DEFG" in "case 2" in the example above.
549 DCHECK_EQ(src > pos, to_shift > count);
550 DCHECK_IMPLIES(src > pos, dst == end_);
551 while (src > pos) MovingOverwrite(--dst, --src);
552 // Not destructing {src} here because that'll happen either in a
553 // future iteration (when that spot becomes {dst}) or in {insert()}.
554 end_ += count;
555 }
556 return data_ + index;
557 }
558
559 T* erase(const T* first, size_t count) {
560 DCHECK(data_ <= first && first <= end());
561 DCHECK_LE(count, end() - first);
562 T* position = const_cast<T*>(first);
564 T* old_end = end();
565 end_ -= count;
566 for (T* p = end_; p < old_end; p++) p->~T();
567 return position;
568 }
569
570 template <typename... Args>
571 void emplace(T* target, Args&&... args) {
572 new (target) T(std::forward<Args>(args)...);
573 }
574
575 Zone* zone_{nullptr};
576 T* data_{nullptr};
577 T* end_{nullptr};
578 T* capacity_{nullptr};
579};
580
581template <class T>
582bool operator==(const ZoneVector<T>& lhs, const ZoneVector<T>& rhs) {
583 return std::equal(lhs.begin(), lhs.end(), rhs.begin(), rhs.end());
584}
585
586template <class T>
587bool operator!=(const ZoneVector<T>& lhs, const ZoneVector<T>& rhs) {
588 return !(lhs == rhs);
589}
590
591template <class T>
592bool operator<(const ZoneVector<T>& lhs, const ZoneVector<T>& rhs) {
593 return std::lexicographical_compare(lhs.begin(), lhs.end(), rhs.begin(),
594 rhs.end());
595}
596
597template <class T, class GetIntrusiveSetIndex>
599 : public base::IntrusiveSet<T, GetIntrusiveSetIndex, ZoneVector<T>> {
600 public:
601 explicit ZoneIntrusiveSet(Zone* zone, GetIntrusiveSetIndex index_functor = {})
602 : base::IntrusiveSet<T, GetIntrusiveSetIndex, ZoneVector<T>>(
603 ZoneVector<T>(zone), std::move(index_functor)) {}
604};
605using base::IntrusiveSetIndex;
606
607// A wrapper subclass for std::deque to make it easy to construct one
608// that uses a zone allocator.
609template <typename T>
610class ZoneDeque : public std::deque<T, RecyclingZoneAllocator<T>> {
611 public:
612 // Constructs an empty deque.
613 explicit ZoneDeque(Zone* zone)
614 : std::deque<T, RecyclingZoneAllocator<T>>(
615 RecyclingZoneAllocator<T>(zone)) {}
616};
617
618// A wrapper subclass for std::list to make it easy to construct one
619// that uses a zone allocator.
620// TODO(all): This should be renamed to ZoneList once we got rid of our own
621// home-grown ZoneList that actually is a ZoneVector.
622template <typename T>
623class ZoneLinkedList : public std::list<T, ZoneAllocator<T>> {
624 public:
625 // Constructs an empty list.
626 explicit ZoneLinkedList(Zone* zone)
627 : std::list<T, ZoneAllocator<T>>(ZoneAllocator<T>(zone)) {}
628};
629
630// A wrapper subclass for std::forward_list to make it easy to construct one
631// that uses a zone allocator.
632template <typename T>
633class ZoneForwardList : public std::forward_list<T, ZoneAllocator<T>> {
634 public:
635 // Constructs an empty list.
636 explicit ZoneForwardList(Zone* zone)
637 : std::forward_list<T, ZoneAllocator<T>>(ZoneAllocator<T>(zone)) {}
638};
639
640// A wrapper subclass for std::priority_queue to make it easy to construct one
641// that uses a zone allocator.
642template <typename T, typename Compare = std::less<T>>
644 : public std::priority_queue<T, ZoneVector<T>, Compare> {
645 public:
646 // Constructs an empty list.
647 explicit ZonePriorityQueue(Zone* zone)
648 : std::priority_queue<T, ZoneVector<T>, Compare>(Compare(),
649 ZoneVector<T>(zone)) {}
650};
651
652// A wrapper subclass for std::queue to make it easy to construct one
653// that uses a zone allocator.
654template <typename T>
655class ZoneQueue : public std::queue<T, ZoneDeque<T>> {
656 public:
657 // Constructs an empty queue.
658 explicit ZoneQueue(Zone* zone)
659 : std::queue<T, ZoneDeque<T>>(ZoneDeque<T>(zone)) {}
660};
661
662// A wrapper subclass for std::stack to make it easy to construct one that uses
663// a zone allocator.
664template <typename T>
665class ZoneStack : public std::stack<T, ZoneDeque<T>> {
666 public:
667 // Constructs an empty stack.
668 explicit ZoneStack(Zone* zone)
669 : std::stack<T, ZoneDeque<T>>(ZoneDeque<T>(zone)) {}
670};
671
672// A wrapper subclass for std::set to make it easy to construct one that uses
673// a zone allocator.
674template <typename K, typename Compare = std::less<K>>
675class ZoneSet : public std::set<K, Compare, ZoneAllocator<K>> {
676 public:
677 // Constructs an empty set.
678 explicit ZoneSet(Zone* zone)
679 : std::set<K, Compare, ZoneAllocator<K>>(Compare(),
680 ZoneAllocator<K>(zone)) {}
681};
682
683// A wrapper subclass for std::multiset to make it easy to construct one that
684// uses a zone allocator.
685template <typename K, typename Compare = std::less<K>>
686class ZoneMultiset : public std::multiset<K, Compare, ZoneAllocator<K>> {
687 public:
688 // Constructs an empty multiset.
689 explicit ZoneMultiset(Zone* zone)
690 : std::multiset<K, Compare, ZoneAllocator<K>>(Compare(),
691 ZoneAllocator<K>(zone)) {}
692};
693
694// A wrapper subclass for std::map to make it easy to construct one that uses
695// a zone allocator.
696template <typename K, typename V, typename Compare = std::less<K>>
698 : public std::map<K, V, Compare, ZoneAllocator<std::pair<const K, V>>> {
699 public:
700 // Constructs an empty map.
701 explicit ZoneMap(Zone* zone)
702 : std::map<K, V, Compare, ZoneAllocator<std::pair<const K, V>>>(
703 Compare(), ZoneAllocator<std::pair<const K, V>>(zone)) {}
704};
705
706// A wrapper subclass for std::unordered_map to make it easy to construct one
707// that uses a zone allocator.
708template <typename K, typename V, typename Hash = base::hash<K>,
709 typename KeyEqual = std::equal_to<K>>
711 : public std::unordered_map<K, V, Hash, KeyEqual,
712 ZoneAllocator<std::pair<const K, V>>> {
713 public:
714 // Constructs an empty map.
715 explicit ZoneUnorderedMap(Zone* zone, size_t bucket_count = 0)
716 : std::unordered_map<K, V, Hash, KeyEqual,
717 ZoneAllocator<std::pair<const K, V>>>(
718 bucket_count, Hash(), KeyEqual(),
719 ZoneAllocator<std::pair<const K, V>>(zone)) {}
720};
721
722// A wrapper subclass for std::unordered_set to make it easy to construct one
723// that uses a zone allocator.
724template <typename K, typename Hash = base::hash<K>,
725 typename KeyEqual = std::equal_to<K>>
727 : public std::unordered_set<K, Hash, KeyEqual, ZoneAllocator<K>> {
728 public:
729 // Constructs an empty set.
730 explicit ZoneUnorderedSet(Zone* zone, size_t bucket_count = 0)
731 : std::unordered_set<K, Hash, KeyEqual, ZoneAllocator<K>>(
732 bucket_count, Hash(), KeyEqual(), ZoneAllocator<K>(zone)) {}
733};
734
735// A wrapper subclass for std::multimap to make it easy to construct one that
736// uses a zone allocator.
737template <typename K, typename V, typename Compare = std::less<K>>
739 : public std::multimap<K, V, Compare,
740 ZoneAllocator<std::pair<const K, V>>> {
741 public:
742 // Constructs an empty multimap.
743 explicit ZoneMultimap(Zone* zone)
744 : std::multimap<K, V, Compare, ZoneAllocator<std::pair<const K, V>>>(
745 Compare(), ZoneAllocator<std::pair<const K, V>>(zone)) {}
746};
747
748// A wrapper subclass for base::SmallVector to make it easy to construct one
749// that uses a zone allocator.
750template <typename T, size_t kSize>
751class SmallZoneVector : public base::SmallVector<T, kSize, ZoneAllocator<T>> {
752 public:
753 // Constructs an empty small vector.
754 explicit SmallZoneVector(Zone* zone)
755 : base::SmallVector<T, kSize, ZoneAllocator<T>>(ZoneAllocator<T>(zone)) {}
756
757 explicit SmallZoneVector(size_t size, Zone* zone)
758 : base::SmallVector<T, kSize, ZoneAllocator<T>>(
759 size, ZoneAllocator<T>(ZoneAllocator<T>(zone))) {}
760};
761
762// Used by SmallZoneMap below. Essentially a closure around placement-new of
763// the "full" fallback ZoneMap. Called once SmallMap grows beyond kArraySize.
764template <typename ZoneMap>
766 public:
767 explicit ZoneMapInit(Zone* zone) : zone_(zone) {}
768 void operator()(ZoneMap* map) const { new (map) ZoneMap(zone_); }
769
770 private:
772};
773
774// A wrapper subclass for base::SmallMap to make it easy to construct one that
775// uses a zone-allocated std::map as the fallback once the SmallMap outgrows
776// its inline storage.
777template <typename K, typename V, size_t kArraySize,
778 typename Compare = std::less<K>, typename KeyEqual = std::equal_to<K>>
780 : public base::SmallMap<ZoneMap<K, V, Compare>, kArraySize, KeyEqual,
781 ZoneMapInit<ZoneMap<K, V, Compare>>> {
782 public:
783 explicit SmallZoneMap(Zone* zone)
784 : base::SmallMap<ZoneMap<K, V, Compare>, kArraySize, KeyEqual,
786 ZoneMapInit<ZoneMap<K, V, Compare>>(zone)) {}
787};
788
789// A wrapper subclass for absl::flat_hash_map to make it easy to construct one
790// that uses a zone allocator. If you want to use a user-defined type as key
791// (K), you'll need to define a AbslHashValue function for it (see
792// https://abseil.io/docs/cpp/guides/hash).
793template <typename K, typename V,
794 typename Hash = typename absl::flat_hash_map<K, V>::hasher,
795 typename KeyEqual =
796 typename absl::flat_hash_map<K, V, Hash>::key_equal>
798 : public absl::flat_hash_map<K, V, Hash, KeyEqual,
799 ZoneAllocator<std::pair<const K, V>>> {
800 public:
801 // Constructs an empty map.
802 explicit ZoneAbslFlatHashMap(Zone* zone, size_t bucket_count = 0)
803 : absl::flat_hash_map<K, V, Hash, KeyEqual,
804 ZoneAllocator<std::pair<const K, V>>>(
805 bucket_count, Hash(), KeyEqual(),
806 ZoneAllocator<std::pair<const K, V>>(zone)) {}
807};
808
809// A wrapper subclass for absl::flat_hash_set to make it easy to construct one
810// that uses a zone allocator. If you want to use a user-defined type as key
811// (K), you'll need to define a AbslHashValue function for it (see
812// https://abseil.io/docs/cpp/guides/hash).
813template <typename K, typename Hash = typename absl::flat_hash_set<K>::hasher,
814 typename KeyEqual = typename absl::flat_hash_set<K, Hash>::key_equal>
816 : public absl::flat_hash_set<K, Hash, KeyEqual, ZoneAllocator<K>> {
817 public:
818 // Constructs an empty map.
819 explicit ZoneAbslFlatHashSet(Zone* zone, size_t bucket_count = 0)
820 : absl::flat_hash_set<K, Hash, KeyEqual, ZoneAllocator<K>>(
821 bucket_count, Hash(), KeyEqual(), ZoneAllocator<K>(zone)) {}
822};
823
824// A wrapper subclass for absl::btree_map to make it easy to construct one
825// that uses a zone allocator. If you want to use a user-defined type as key
826// (K), you'll need to define a AbslHashValue function for it (see
827// https://abseil.io/docs/cpp/guides/hash).
828template <typename K, typename V, typename Compare = std::less<K>>
830 : public absl::btree_map<K, V, Compare,
831 ZoneAllocator<std::pair<const K, V>>> {
832 public:
833 // Constructs an empty map.
834 explicit ZoneAbslBTreeMap(Zone* zone)
835 : absl::btree_map<K, V, Compare, ZoneAllocator<std::pair<const K, V>>>(
836 ZoneAllocator<std::pair<const K, V>>(zone)) {}
837};
838
839// Typedefs to shorten commonly used vectors.
841
842} // namespace internal
843} // namespace v8
844
845#endif // V8_ZONE_ZONE_CONTAINERS_H_
#define T
SourcePosition pos
IntrusiveSet(ZoneVector< T > container, GetIntrusiveSetIndex index_functor={})
SmallZoneVector(size_t size, Zone *zone)
ZoneAbslFlatHashMap(Zone *zone, size_t bucket_count=0)
ZoneAbslFlatHashSet(Zone *zone, size_t bucket_count=0)
ZoneIntrusiveSet(Zone *zone, GetIntrusiveSetIndex index_functor={})
void operator()(ZoneMap *map) const
ZoneUnorderedMap(Zone *zone, size_t bucket_count=0)
ZoneUnorderedSet(Zone *zone, size_t bucket_count=0)
void emplace(T *target, Args &&... args)
reverse_iterator rbegin() V8_NOEXCEPT
V8_INLINE void MoveToNewStorage(T *dst, T *src)
T * insert(const T *pos, size_t count, const T &value)
void reserve(size_t new_cap)
V8_INLINE void CopyingOverwrite(T *dst, const T *src)
V8_INLINE void EnsureOneMoreCapacity()
V8_INLINE void MovingOverwrite(T *dst, T *src, const T *src_end)
ZoneVector & operator=(std::initializer_list< T > ilist)
void swap(ZoneVector< T > &other) noexcept
static constexpr size_t kMinCapacity
std::reverse_iterator< const T * > const_reverse_iterator
void resize(size_t new_size)
V8_NOINLINE V8_PRESERVE_MOST void Grow(size_t minimum)
V8_INLINE void CopyToNewStorage(T *dst, const T *src, const T *src_end)
V8_INLINE void EnsureCapacity(size_t minimum)
const_reverse_iterator rbegin() const V8_NOEXCEPT
const T * cbegin() const V8_NOEXCEPT
T * erase(const T *first, const T *last)
void assign(size_t new_size, const T &value)
T * PrepareForInsertion(const T *pos, size_t count, size_t *assignable)
std::reverse_iterator< T * > reverse_iterator
V8_INLINE void MoveToNewStorage(T *dst, T *src, const T *src_end)
T * insert(const T *pos, It first, It last)
ZoneVector & operator=(const ZoneVector &other) V8_NOEXCEPT
const T & operator[](size_t pos) const
const T * end() const V8_NOEXCEPT
V8_INLINE void CopyingOverwrite(T *dst, const T *src, const T *src_end)
V8_INLINE void MovingOverwrite(T *dst, T *src)
reverse_iterator rend() V8_NOEXCEPT
ZoneVector(size_t size, T def, Zone *zone)
ZoneVector(ZoneVector &&other) V8_NOEXCEPT
const_reverse_iterator crbegin() const V8_NOEXCEPT
const T * cend() const V8_NOEXCEPT
ZoneVector(std::initializer_list< T > list, Zone *zone)
const_reverse_iterator rend() const V8_NOEXCEPT
const_reverse_iterator crend() const V8_NOEXCEPT
const T & at(size_t pos) const
void resize(size_t new_size, const T &value)
V8_INLINE void CopyToNewStorage(T *dst, const T *src)
size_t NewCapacity(size_t minimum)
T * erase(const T *first, size_t count)
const T * begin() const V8_NOEXCEPT
ZoneVector(const ZoneVector &other) V8_NOEXCEPT
T & emplace_back(Args &&... args)
ZoneVector & operator=(ZoneVector &&other) V8_NOEXCEPT
ZoneVector(It first, It last, Zone *zone)
void push_back(const T &value)
ZoneVector(size_t size, Zone *zone)
T * AllocateArray(size_t length)
Definition zone.h:127
void DeleteArray(T *pointer, size_t length)
Definition zone.h:171
uint32_t count
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
TNode< Object > target
std::map< const std::string, const std::string > map
int position
Definition liveedit.cc:290
STL namespace.
bool operator!=(ExternalReference lhs, ExternalReference rhs)
static const uint32_t K[64]
Definition sha-256.cc:36
V8_INLINE constexpr bool operator<(Builtin a, Builtin b)
Definition builtins.h:75
static uint32_t Hash(RegisteredExtension *extension)
bool operator==(ExternalReference lhs, ExternalReference rhs)
int Compare(const T &a, const T &b)
#define V8_NOEXCEPT
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
#define V8_INLINE
Definition v8config.h:500
#define V8_ASSUME
Definition v8config.h:533
#define V8_LIKELY(condition)
Definition v8config.h:661
#define V8_NOINLINE
Definition v8config.h:586
#define V8_PRESERVE_MOST
Definition v8config.h:598
#define EMIT_TRIVIAL_CASE(memcpy_function)