v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
zone.h
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_ZONE_ZONE_H_
6#define V8_ZONE_ZONE_H_
7
8#include <limits>
9#include <memory>
10#include <type_traits>
11#include <utility>
12
13#include "src/base/logging.h"
14#include "src/base/vector.h"
15#include "src/common/globals.h"
17#include "src/zone/type-stats.h"
20
21#ifndef ZONE_NAME
22#define ZONE_NAME __func__
23#endif
24
25namespace v8 {
26namespace internal {
27
28// The Zone supports very fast allocation of small chunks of
29// memory. The chunks cannot be deallocated individually, but instead
30// the Zone supports deallocating all chunks in one fast
31// operation. The Zone is used to hold temporary data structures like
32// the abstract syntax tree, which is deallocated after compilation.
33//
34// Note: There is no need to initialize the Zone; the first time an
35// allocation is attempted, a segment of memory will be requested
36// through the allocator.
37//
38// Note: The implementation is inherently not thread safe. Do not use
39// from multi-threaded code.
40
41class ZoneSnapshot;
42
44 public:
45 Zone(AccountingAllocator* allocator, const char* name,
46 bool support_compression = false);
47 ~Zone();
48
49 // Returns true if the zone supports zone pointer compression.
50 bool supports_compression() const {
51 return COMPRESS_ZONES_BOOL && supports_compression_;
52 }
53
54 // Allocate 'size' bytes of uninitialized memory in the Zone; expands the Zone
55 // by allocating new segments of memory on demand using AccountingAllocator
56 // (see AccountingAllocator::AllocateSegment()).
57 //
58 // When V8_ENABLE_PRECISE_ZONE_STATS is defined, the allocated bytes are
59 // associated with the provided TypeTag type.
60 template <typename TypeTag>
61 void* Allocate(size_t size) {
62#ifdef V8_USE_ADDRESS_SANITIZER
63 return AsanNew(size);
64#else
65 size = RoundUp(size, kAlignmentInBytes);
66#ifdef V8_ENABLE_PRECISE_ZONE_STATS
67 if (V8_UNLIKELY(TracingFlags::is_zone_stats_enabled())) {
68 type_stats_.AddAllocated<TypeTag>(size);
69 }
70 allocation_size_for_tracing_ += size;
71#endif
72 if (V8_UNLIKELY(size > limit_ - position_)) {
73 Expand(size);
74 }
75
77 DCHECK_LE(size, limit_ - position_);
78 DCHECK_EQ(0, position_ % kAlignmentInBytes);
79 void* result = reinterpret_cast<void*>(position_);
80 position_ += size;
81 return result;
82#endif // V8_USE_ADDRESS_SANITIZER
83 }
84
85 // Return 'size' bytes of memory back to Zone. These bytes can be reused
86 // for following allocations.
87 //
88 // When V8_ENABLE_PRECISE_ZONE_STATS is defined, the deallocated bytes are
89 // associated with the provided TypeTag type.
90 template <typename TypeTag = void>
91 void Delete(void* pointer, size_t size) {
92 DCHECK_NOT_NULL(pointer);
93 DCHECK_NE(size, 0);
94 size = RoundUp(size, kAlignmentInBytes);
95#ifdef V8_ENABLE_PRECISE_ZONE_STATS
96 if (V8_UNLIKELY(TracingFlags::is_zone_stats_enabled())) {
97 type_stats_.AddDeallocated<TypeTag>(size);
98 }
99 freed_size_for_tracing_ += size;
100#endif
101
102#ifdef DEBUG
103 static const unsigned char kZapDeadByte = 0xcd;
104 memset(pointer, kZapDeadByte, size);
105#endif
106 }
107
108 // Allocates memory for T instance and constructs object by calling respective
109 // Args... constructor.
110 //
111 // When V8_ENABLE_PRECISE_ZONE_STATS is defined, the allocated bytes are
112 // associated with the T type.
113 template <typename T, typename... Args>
114 T* New(Args&&... args) {
115 static_assert(alignof(T) <= kAlignmentInBytes);
116 void* memory = Allocate<T>(sizeof(T));
117 return new (memory) T(std::forward<Args>(args)...);
118 }
119
120 // Allocates uninitialized memory for 'length' number of T instances.
121 //
122 // When V8_ENABLE_PRECISE_ZONE_STATS is defined, the allocated bytes are
123 // associated with the provided TypeTag type. It might be useful to tag
124 // buffer allocations with meaningful names to make buffer allocation sites
125 // distinguishable between each other.
126 template <typename T, typename TypeTag = T[]>
127 T* AllocateArray(size_t length) {
128 static_assert(alignof(T) <= kAlignmentInBytes);
129 DCHECK_IMPLIES(is_compressed_pointer<T>::value, supports_compression());
130 DCHECK_LT(length, std::numeric_limits<size_t>::max() / sizeof(T));
131 return static_cast<T*>(Allocate<TypeTag>(length * sizeof(T)));
132 }
133
134 // Allocates a Vector with 'length' uninitialized entries.
135 template <typename T, typename TypeTag = T[]>
137 T* new_array = AllocateArray<T, TypeTag>(length);
138 return {new_array, length};
139 }
140
141 // Allocates a Vector with 'length' elements and value-constructs them.
142 template <typename T, typename TypeTag = T[]>
143 base::Vector<T> NewVector(size_t length) {
144 T* new_array = AllocateArray<T, TypeTag>(length);
145 std::uninitialized_value_construct_n(new_array, length);
146 return {new_array, length};
147 }
148
149 // Allocates a Vector with 'length' elements and initializes them with
150 // 'value'.
151 template <typename T, typename TypeTag = T[]>
152 base::Vector<T> NewVector(size_t length, T value) {
153 T* new_array = AllocateArray<T, TypeTag>(length);
154 std::uninitialized_fill_n(new_array, length, value);
155 return {new_array, length};
156 }
157
158 template <typename T, typename TypeTag = std::remove_const_t<T>[]>
160 auto* new_array = AllocateArray<std::remove_const_t<T>, TypeTag>(v.size());
161 std::uninitialized_copy(v.begin(), v.end(), new_array);
162 return {new_array, v.size()};
163 }
164
165 // Return array of 'length' elements back to Zone. These bytes can be reused
166 // for following allocations.
167 //
168 // When V8_ENABLE_PRECISE_ZONE_STATS is defined, the deallocated bytes are
169 // associated with the provided TypeTag type.
170 template <typename T, typename TypeTag = T[]>
171 void DeleteArray(T* pointer, size_t length) {
172 Delete<TypeTag>(pointer, length * sizeof(T));
173 }
174
175 // Seals the zone to prevent any further allocation.
176 void Seal() { sealed_ = true; }
177
178 // Allows the zone to be safely reused. Releases the memory except for the
179 // last page, and fires zone destruction and creation events for the
180 // accounting allocator.
181 void Reset();
182
183 size_t segment_bytes_allocated() const { return segment_bytes_allocated_; }
184
185 const char* name() const { return name_; }
186
187 // Returns precise value of used zone memory, allowed to be called only
188 // from thread owning the zone.
189 size_t allocation_size() const {
190 size_t extra = segment_head_ ? position_ - segment_head_->start() : 0;
191 return allocation_size_ + extra;
192 }
193
194 // When V8_ENABLE_PRECISE_ZONE_STATS is not defined, returns used zone memory
195 // not including the head segment.
196 // Can be called from threads not owning the zone.
198#ifdef V8_ENABLE_PRECISE_ZONE_STATS
199 return allocation_size_for_tracing_;
200#else
201 return allocation_size_;
202#endif
203 }
204
205 // Returns number of bytes freed in this zone via Delete<T>()/DeleteArray<T>()
206 // calls. Returns non-zero values only when V8_ENABLE_PRECISE_ZONE_STATS is
207 // defined.
208 size_t freed_size_for_tracing() const {
209#ifdef V8_ENABLE_PRECISE_ZONE_STATS
210 return freed_size_for_tracing_;
211#else
212 return 0;
213#endif
214 }
215
217
218#ifdef V8_ENABLE_PRECISE_ZONE_STATS
219 const TypeStats& type_stats() const { return type_stats_; }
220#endif
221
222#ifdef DEBUG
223 bool Contains(const void* ptr) const;
224#endif
225
226 V8_WARN_UNUSED_RESULT ZoneSnapshot Snapshot() const;
227
228 private:
229 void* AsanNew(size_t size);
230
231 // Deletes all objects and free all memory allocated in the Zone.
232 void DeleteAll();
233
234 // Releases the current segment without performing any local bookkeeping
235 // (e.g. tracking allocated bytes, maintaining linked lists, etc).
236 void ReleaseSegment(Segment* segment);
237
238 // All pointers returned from New() are 8-byte aligned.
239 // ASan requires 8-byte alignment. MIPS also requires 8-byte alignment.
240 static const size_t kAlignmentInBytes = 8;
241
242 // Never allocate segments smaller than this size in bytes.
243 static const size_t kMinimumSegmentSize = 8 * KB;
244
245 // Never allocate segments larger than this size in bytes.
246 static const size_t kMaximumSegmentSize = 32 * KB;
247
248 // The number of bytes allocated in this zone so far.
249 std::atomic<size_t> allocation_size_ = {0};
250
251 // The number of bytes allocated in segments. Note that this number
252 // includes memory allocated from the OS but not yet allocated from
253 // the zone.
254 std::atomic<size_t> segment_bytes_allocated_ = {0};
255
256 // Expand the Zone to hold at least 'size' more bytes.
257 // Should only be called if there is not enough room in the Zone already.
258 V8_NOINLINE V8_PRESERVE_MOST void Expand(size_t size);
259
260 // The free region in the current (front) segment is represented as
261 // the half-open interval [position, limit). The 'position' variable
262 // is guaranteed to be aligned as dictated by kAlignment.
263 Address position_ = 0;
264 Address limit_ = 0;
265
267
268 Segment* segment_head_ = nullptr;
269 const char* name_;
271 bool sealed_ = false;
272
273#ifdef V8_ENABLE_PRECISE_ZONE_STATS
274 TypeStats type_stats_;
275 std::atomic<size_t> allocation_size_for_tracing_ = {0};
276
277 // The number of bytes freed in this zone so far.
278 std::atomic<size_t> freed_size_for_tracing_ = {0};
279#endif
280
281 friend class ZoneSnapshot;
282};
283
284// A `ZoneSnapshot` stores the allocation state of a zone. The zone can later be
285// reset to that state, effectively deleting all memory which was allocated in
286// the zone after taking the snapshot.
287// See `ZoneScope` for an example usage of `ZoneSnapshot`.
288class ZoneSnapshot final {
289 public:
290 // Reset the `Zone` from which this snapshot was taken to the state stored in
291 // this snapshot.
292 void Restore(Zone* zone) const;
293
294 private:
295 explicit ZoneSnapshot(const Zone* zone);
296 friend class Zone;
297
298#ifdef V8_ENABLE_PRECISE_ZONE_STATS
299 const size_t allocation_size_for_tracing_;
300 const size_t freed_size_for_tracing_;
301#endif
302 const size_t allocation_size_;
307};
308
309// Similar to the HandleScope, the ZoneScope defines a region of validity for
310// zone memory. All memory allocated in the given Zone during the scope's
311// lifetime is freed when the scope is destructed, i.e. the Zone is reset to
312// the state it was in when the scope was created.
313class ZoneScope final {
314 public:
315 explicit ZoneScope(Zone* zone) : zone_(zone), snapshot_(zone->Snapshot()) {}
316
317 ~ZoneScope() { snapshot_.Restore(zone_); }
318
319 private:
320 Zone* const zone_;
322};
323
324// ZoneObject is an abstraction that helps define classes of objects
325// allocated in the Zone. Use it as a base class; see ast.h.
327 public:
328 // The accidential old-style pattern
329 // new (zone) SomeObject(...)
330 // now produces compilation error. The proper way of allocating objects in
331 // Zones looks like this:
332 // zone->New<SomeObject>(...)
333 void* operator new(size_t, Zone*) = delete; // See explanation above.
334 // Allow non-allocating placement new.
335 void* operator new(size_t size, void* ptr) { // See explanation above.
336 return ptr;
337 }
338
339 // Ideally, the delete operator should be private instead of
340 // public, but unfortunately the compiler sometimes synthesizes
341 // (unused) destructors for classes derived from ZoneObject, which
342 // require the operator to be visible. MSVC requires the delete
343 // operator to be public.
344
345 // ZoneObjects should never be deleted individually; use
346 // Zone::DeleteAll() to delete all zone objects in one go.
347 // Note, that descructors will not be called.
348 void operator delete(void*, size_t) { UNREACHABLE(); }
349 void operator delete(void* pointer, Zone* zone) = delete;
350};
351
352// The ZoneAllocationPolicy is used to specialize generic data
353// structures to allocate themselves and their elements in the Zone.
355 public:
356 // Creates unusable allocation policy.
359
360 template <typename T, typename TypeTag = T[]>
361 V8_INLINE T* AllocateArray(size_t length) {
362 return zone()->AllocateArray<T, TypeTag>(length);
363 }
364 template <typename T, typename TypeTag = T[]>
365 V8_INLINE void DeleteArray(T* p, size_t length) {
366 zone()->DeleteArray<T, TypeTag>(p, length);
367 }
368
369 Zone* zone() const { return zone_; }
370
371 private:
373};
374
375} // namespace internal
376} // namespace v8
377
378// The accidential old-style pattern
379// new (zone) SomeObject(...)
380// now produces compilation error. The proper way of allocating objects in
381// Zones looks like this:
382// zone->New<SomeObject>(...)
383void* operator new(size_t, v8::internal::Zone*) = delete; // See explanation.
384void operator delete(void*, v8::internal::Zone*) = delete; // See explanation.
385
386#endif // V8_ZONE_ZONE_H_
friend Zone
Definition asm-types.cc:195
#define T
RegisterAllocator * allocator_
constexpr size_t size() const
Definition vector.h:70
constexpr T * begin() const
Definition vector.h:96
constexpr T * end() const
Definition vector.h:103
V8_INLINE void DeleteArray(T *p, size_t length)
Definition zone.h:365
V8_INLINE T * AllocateArray(size_t length)
Definition zone.h:361
const ZoneSnapshot snapshot_
Definition zone.h:321
Zone *const zone_
Definition zone.h:320
ZoneScope(Zone *zone)
Definition zone.h:315
const Address limit_
Definition zone.h:305
const Address position_
Definition zone.h:304
const size_t allocation_size_
Definition zone.h:302
const size_t segment_bytes_allocated_
Definition zone.h:303
Segment *const segment_head_
Definition zone.h:306
ZoneSnapshot(const Zone *zone)
Definition zone.cc:202
void Restore(Zone *zone) const
Definition zone.cc:215
const bool supports_compression_
Definition zone.h:270
base::Vector< std::remove_const_t< T > > CloneVector(base::Vector< T > v)
Definition zone.h:159
bool supports_compression() const
Definition zone.h:50
size_t allocation_size() const
Definition zone.h:189
base::Vector< T > NewVector(size_t length, T value)
Definition zone.h:152
size_t freed_size_for_tracing() const
Definition zone.h:208
const char * name_
Definition zone.h:269
AccountingAllocator * allocator() const
Definition zone.h:216
T * AllocateArray(size_t length)
Definition zone.h:127
size_t allocation_size_for_tracing() const
Definition zone.h:197
base::Vector< T > NewVector(size_t length)
Definition zone.h:143
const char * name() const
Definition zone.h:185
size_t segment_bytes_allocated() const
Definition zone.h:183
T * New(Args &&... args)
Definition zone.h:114
AccountingAllocator * allocator_
Definition zone.h:266
void * Allocate(size_t size)
Definition zone.h:61
void Delete(void *pointer, size_t size)
Definition zone.h:91
base::Vector< T > AllocateVector(size_t length)
Definition zone.h:136
void DeleteArray(T *pointer, size_t length)
Definition zone.h:171
#define COMPRESS_ZONES_BOOL
Definition globals.h:520
const int position_
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation trace turbo cfg trace TurboFan s graph trimmer trace TurboFan s control equivalence trace TurboFan s register allocator trace stack load store counters for optimized code in run fuzzing &&concurrent_recompilation trace_turbo trace_turbo_scheduled trace_turbo_stack_accesses verify TurboFan machine graph of code stubs enable FixedArray bounds checks print TurboFan statistics of wasm compilations maximum cumulative size of bytecode considered for inlining scale factor of bytecode size used to calculate the inlining budget * KB
const int limit_
Definition isolate.cc:1114
ZoneVector< RpoNumber > & result
const char * name_
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
constexpr T RoundUp(T x, intptr_t m)
Definition macros.h:387
#define V8_EXPORT_PRIVATE
Definition macros.h:460
#define V8_INLINE
Definition v8config.h:500
#define V8_WARN_UNUSED_RESULT
Definition v8config.h:671
#define V8_UNLIKELY(condition)
Definition v8config.h:660
#define V8_NOINLINE
Definition v8config.h:586
#define V8_PRESERVE_MOST
Definition v8config.h:598