22#define ZONE_NAME __func__
46 bool support_compression =
false);
60 template <
typename TypeTag>
62#ifdef V8_USE_ADDRESS_SANITIZER
65 size =
RoundUp(size, kAlignmentInBytes);
66#ifdef V8_ENABLE_PRECISE_ZONE_STATS
67 if (
V8_UNLIKELY(TracingFlags::is_zone_stats_enabled())) {
68 type_stats_.AddAllocated<TypeTag>(
size);
70 allocation_size_for_tracing_ +=
size;
90 template <
typename TypeTag =
void>
91 void Delete(
void* pointer,
size_t size) {
94 size =
RoundUp(size, kAlignmentInBytes);
95#ifdef V8_ENABLE_PRECISE_ZONE_STATS
96 if (
V8_UNLIKELY(TracingFlags::is_zone_stats_enabled())) {
97 type_stats_.AddDeallocated<TypeTag>(
size);
99 freed_size_for_tracing_ +=
size;
103 static const unsigned char kZapDeadByte = 0xcd;
104 memset(pointer, kZapDeadByte, size);
113 template <
typename T,
typename... Args>
115 static_assert(
alignof(
T) <= kAlignmentInBytes);
116 void* memory = Allocate<T>(
sizeof(T));
117 return new (memory)
T(std::forward<Args>(
args)...);
126 template <
typename T,
typename TypeTag = T[]>
128 static_assert(
alignof(
T) <= kAlignmentInBytes);
130 DCHECK_LT(length, std::numeric_limits<size_t>::max() /
sizeof(T));
131 return static_cast<T*
>(Allocate<TypeTag>(length *
sizeof(T)));
135 template <
typename T,
typename TypeTag = T[]>
137 T* new_array = AllocateArray<T, TypeTag>(length);
138 return {new_array, length};
142 template <
typename T,
typename TypeTag = T[]>
144 T* new_array = AllocateArray<T, TypeTag>(length);
145 std::uninitialized_value_construct_n(new_array, length);
146 return {new_array, length};
151 template <
typename T,
typename TypeTag = T[]>
153 T* new_array = AllocateArray<T, TypeTag>(length);
154 std::uninitialized_fill_n(new_array, length, value);
155 return {new_array, length};
158 template <
typename T,
typename TypeTag = std::remove_const_t<T>[]>
160 auto* new_array = AllocateArray<std::remove_const_t<T>, TypeTag>(v.
size());
161 std::uninitialized_copy(v.
begin(), v.
end(), new_array);
162 return {new_array, v.
size()};
170 template <
typename T,
typename TypeTag = T[]>
172 Delete<TypeTag>(pointer, length *
sizeof(T));
176 void Seal() { sealed_ =
true; }
190 size_t extra = segment_head_ ?
position_ - segment_head_->start() : 0;
191 return allocation_size_ + extra;
198#ifdef V8_ENABLE_PRECISE_ZONE_STATS
199 return allocation_size_for_tracing_;
201 return allocation_size_;
209#ifdef V8_ENABLE_PRECISE_ZONE_STATS
210 return freed_size_for_tracing_;
218#ifdef V8_ENABLE_PRECISE_ZONE_STATS
219 const TypeStats& type_stats()
const {
return type_stats_; }
223 bool Contains(
const void* ptr)
const;
229 void* AsanNew(
size_t size);
236 void ReleaseSegment(Segment* segment);
240 static const size_t kAlignmentInBytes = 8;
243 static const size_t kMinimumSegmentSize = 8 *
KB;
246 static const size_t kMaximumSegmentSize = 32 *
KB;
249 std::atomic<size_t> allocation_size_ = {0};
254 std::atomic<size_t> segment_bytes_allocated_ = {0};
271 bool sealed_ =
false;
273#ifdef V8_ENABLE_PRECISE_ZONE_STATS
274 TypeStats type_stats_;
275 std::atomic<size_t> allocation_size_for_tracing_ = {0};
278 std::atomic<size_t> freed_size_for_tracing_ = {0};
298#ifdef V8_ENABLE_PRECISE_ZONE_STATS
299 const size_t allocation_size_for_tracing_;
300 const size_t freed_size_for_tracing_;
335 void*
operator new(
size_t size,
void* ptr) {
349 void operator delete(
void* pointer,
Zone* zone) =
delete;
360 template <
typename T,
typename TypeTag = T[]>
364 template <
typename T,
typename TypeTag = T[]>
RegisterAllocator * allocator_
constexpr size_t size() const
constexpr T * begin() const
constexpr T * end() const
V8_INLINE void DeleteArray(T *p, size_t length)
ZoneAllocationPolicy(Zone *zone)
V8_INLINE T * AllocateArray(size_t length)
const ZoneSnapshot snapshot_
const size_t allocation_size_
const size_t segment_bytes_allocated_
Segment *const segment_head_
ZoneSnapshot(const Zone *zone)
void Restore(Zone *zone) const
const bool supports_compression_
base::Vector< std::remove_const_t< T > > CloneVector(base::Vector< T > v)
bool supports_compression() const
size_t allocation_size() const
base::Vector< T > NewVector(size_t length, T value)
size_t freed_size_for_tracing() const
AccountingAllocator * allocator() const
T * AllocateArray(size_t length)
size_t allocation_size_for_tracing() const
base::Vector< T > NewVector(size_t length)
const char * name() const
size_t segment_bytes_allocated() const
AccountingAllocator * allocator_
void * Allocate(size_t size)
void Delete(void *pointer, size_t size)
base::Vector< T > AllocateVector(size_t length)
void DeleteArray(T *pointer, size_t length)
#define COMPRESS_ZONES_BOOL
base::Vector< const DirectHandle< Object > > args
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation trace turbo cfg trace TurboFan s graph trimmer trace TurboFan s control equivalence trace TurboFan s register allocator trace stack load store counters for optimized code in run fuzzing &&concurrent_recompilation trace_turbo trace_turbo_scheduled trace_turbo_stack_accesses verify TurboFan machine graph of code stubs enable FixedArray bounds checks print TurboFan statistics of wasm compilations maximum cumulative size of bytecode considered for inlining scale factor of bytecode size used to calculate the inlining budget * KB
ZoneVector< RpoNumber > & result
#define DCHECK_LE(v1, v2)
#define DCHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
#define V8_EXPORT_PRIVATE
#define V8_WARN_UNUSED_RESULT
#define V8_UNLIKELY(condition)