v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
zone.cc
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/zone/zone.h"
6
7#include <cstring>
8#include <memory>
9
11#include "src/init/v8.h"
12#include "src/utils/utils.h"
13#include "src/zone/type-stats.h"
14
15namespace v8 {
16namespace internal {
17
18namespace {
19
20#ifdef V8_USE_ADDRESS_SANITIZER
21
22constexpr size_t kASanRedzoneBytes = 24; // Must be a multiple of 8.
23
24#else // !V8_USE_ADDRESS_SANITIZER
25
26constexpr size_t kASanRedzoneBytes = 0;
27
28#endif // V8_USE_ADDRESS_SANITIZER
29
30} // namespace
31
32Zone::Zone(AccountingAllocator* allocator, const char* name,
33 bool support_compression)
34 : allocator_(allocator),
35 name_(name),
36 supports_compression_(support_compression) {
38}
39
44
45void* Zone::AsanNew(size_t size) {
46 CHECK(!sealed_);
47
48 // Round up the requested size to fit the alignment.
49 size = RoundUp(size, kAlignmentInBytes);
50
51 // Check if the requested size is available without expanding.
52 const size_t size_with_redzone = size + kASanRedzoneBytes;
54 if (V8_UNLIKELY(size_with_redzone > limit_ - position_)) {
55 Expand(size_with_redzone);
56 }
57 DCHECK_LE(size_with_redzone, limit_ - position_);
58
60 position_ += size_with_redzone;
61
62 Address redzone_position = result + size;
63 DCHECK_EQ(redzone_position + kASanRedzoneBytes, position_);
64 ASAN_POISON_MEMORY_REGION(reinterpret_cast<void*>(redzone_position),
65 kASanRedzoneBytes);
66
67 // Check that the result has the proper alignment and return it.
69 return reinterpret_cast<void*>(result);
70}
71
73 if (!segment_head_) return;
74 Segment* keep = segment_head_;
76 if (segment_head_ != nullptr) {
77 // Reset the position to the end of the new head, and uncommit its
78 // allocation size (which will be re-committed in DeleteAll).
81 }
82 keep->set_next(nullptr);
83 DeleteAll();
85
86 // Un-poison the kept segment content so we can zap and reuse it.
87 ASAN_UNPOISON_MEMORY_REGION(reinterpret_cast<void*>(keep->start()),
88 keep->capacity());
89 keep->ZapContents();
90
91 segment_head_ = keep;
92 position_ = RoundUp(keep->start(), kAlignmentInBytes);
93 limit_ = keep->end();
95 DCHECK_EQ(segment_bytes_allocated_, keep->total_size());
96}
97
98#ifdef DEBUG
99bool Zone::Contains(const void* ptr) const {
100 Address address = reinterpret_cast<Address>(ptr);
101 for (Segment* segment = segment_head_; segment != nullptr;
102 segment = segment->next()) {
103 if (address >= segment->start() && address < segment->end()) {
104 return true;
105 }
106 }
107 return false;
108}
109#endif
110
111ZoneSnapshot Zone::Snapshot() const { return ZoneSnapshot{this}; }
112
114 Segment* current = segment_head_;
115 if (current) {
116 // Commit the allocation_size_ of segment_head_ and disconnect the segments
117 // list from the zone in order to ensure that tracing accounting allocator
118 // will observe value including memory from the head segment.
120 segment_head_ = nullptr;
121 }
123
124 // Traverse the chained list of segments and return them all to the allocator.
125 while (current) {
126 Segment* next = current->next();
127 segment_bytes_allocated_ -= current->total_size();
128 ReleaseSegment(current);
129 current = next;
130 }
131
132 position_ = limit_ = 0;
134#ifdef V8_ENABLE_PRECISE_ZONE_STATS
135 allocation_size_for_tracing_ = 0;
136#endif
137}
138
140 // Un-poison the segment content so we can reuse or zap it later.
141 ASAN_UNPOISON_MEMORY_REGION(reinterpret_cast<void*>(segment->start()),
142 segment->capacity());
144}
145
146void Zone::Expand(size_t size) {
147 // Make sure the requested size is already properly aligned and that
148 // there isn't enough room in the Zone to satisfy the request.
150 DCHECK_LT(limit_ - position_, size);
151
152 // Compute the new segment size. We use a 'high water mark'
153 // strategy, where we increase the segment size every time we expand
154 // except that we employ a maximum segment size when we delete. This
155 // is to avoid excessive malloc() and free() overhead.
156 Segment* head = segment_head_;
157 const size_t old_size = head ? head->total_size() : 0;
158 static const size_t kSegmentOverhead = sizeof(Segment) + kAlignmentInBytes;
159 const size_t new_size_no_overhead = size + (old_size << 1);
160 size_t new_size = kSegmentOverhead + new_size_no_overhead;
161 const size_t min_new_size = kSegmentOverhead + size;
162 // Guard against integer overflow.
163 if (new_size_no_overhead < size || new_size < kSegmentOverhead) {
164 V8::FatalProcessOutOfMemory(nullptr, "Zone");
165 }
166 if (new_size < kMinimumSegmentSize) {
167 new_size = kMinimumSegmentSize;
168 } else if (new_size >= kMaximumSegmentSize) {
169 // Limit the size of new segments to avoid growing the segment size
170 // exponentially, thus putting pressure on contiguous virtual address space.
171 // All the while making sure to allocate a segment large enough to hold the
172 // requested size.
173 new_size = std::max({min_new_size, kMaximumSegmentSize});
174 }
175 if (new_size > INT_MAX) {
176 V8::FatalProcessOutOfMemory(nullptr, "Zone");
177 }
178 Segment* segment =
180 if (segment == nullptr) {
181 V8::FatalProcessOutOfMemory(nullptr, "Zone");
182 }
183
184 DCHECK_GE(segment->total_size(), new_size);
186 segment->set_zone(this);
187 segment->set_next(segment_head_);
188 // Commit the allocation_size_ of segment_head_ if any, in order to ensure
189 // that tracing accounting allocator will observe value including memory
190 // from the previous head segment.
192 segment_head_ = segment;
194
195 // Recompute 'top' and 'limit' based on the new segment.
197 limit_ = segment->end();
199 DCHECK_LE(size, limit_ - position_);
200}
201
203 :
204#ifdef V8_ENABLE_PRECISE_ZONE_STATS
205 allocation_size_for_tracing_(zone->allocation_size_for_tracing_),
206 freed_size_for_tracing_(zone->freed_size_for_tracing_),
207#endif
208 allocation_size_(zone->allocation_size_),
209 segment_bytes_allocated_(zone->segment_bytes_allocated_),
210 position_(zone->position_),
211 limit_(zone->limit_),
212 segment_head_(zone->segment_head_) {
213}
214
215void ZoneSnapshot::Restore(Zone* zone) const {
216 // Release segments up to the stored segment_head_.
217 Segment* current = zone->segment_head_;
218 while (current != segment_head_) {
219 // If this check failed, then either you passed a wrong zone, or the zone
220 // was reset to an earlier snapshot already. We cannot move forward again.
221 CHECK_NOT_NULL(current);
222 Segment* next = current->next();
223 zone->ReleaseSegment(current);
224 current = next;
225 }
226
227 // Un-poison the trailing segment content so we can reuse or zap it later.
228 if (segment_head_ != nullptr) {
229 void* const start = reinterpret_cast<void*>(position_);
230 DCHECK_GE(start, reinterpret_cast<void*>(current->start()));
231 DCHECK_LE(start, reinterpret_cast<void*>(current->end()));
232 const size_t length = current->end() - reinterpret_cast<Address>(start);
234 }
235
236 // Reset the Zone to the stored state.
239 zone->position_ = position_;
240 zone->limit_ = limit_;
242#ifdef V8_ENABLE_PRECISE_ZONE_STATS
243 zone->allocation_size_for_tracing_ = allocation_size_for_tracing_;
244 zone->freed_size_for_tracing_ = freed_size_for_tracing_;
245#endif
246}
247
248} // namespace internal
249} // namespace v8
#define ASAN_UNPOISON_MEMORY_REGION(start, size)
Definition asan.h:71
#define ASAN_POISON_MEMORY_REGION(start, size)
Definition asan.h:64
RegisterAllocator * allocator_
void TraceZoneDestruction(const Zone *zone)
void TraceAllocateSegment(Segment *segment)
void ReturnSegment(Segment *memory, bool supports_compression)
Segment * AllocateSegment(size_t bytes, bool supports_compression)
Segment * next() const
Address start() const
size_t total_size() const
void set_next(Segment *const next)
void set_zone(Zone *const zone)
Address end() const
size_t capacity() const
static V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(Isolate *isolate, const char *location, const OOMDetails &details=kNoOOMDetails)
const Address limit_
Definition zone.h:305
const Address position_
Definition zone.h:304
const size_t allocation_size_
Definition zone.h:302
const size_t segment_bytes_allocated_
Definition zone.h:303
Segment *const segment_head_
Definition zone.h:306
ZoneSnapshot(const Zone *zone)
Definition zone.cc:202
void Restore(Zone *zone) const
Definition zone.cc:215
V8_WARN_UNUSED_RESULT ZoneSnapshot Snapshot() const
Definition zone.cc:111
static const size_t kMinimumSegmentSize
Definition zone.h:243
static const size_t kMaximumSegmentSize
Definition zone.h:246
void * AsanNew(size_t size)
Definition zone.cc:45
Address position_
Definition zone.h:263
Zone(AccountingAllocator *allocator, const char *name, bool support_compression=false)
Definition zone.cc:32
bool supports_compression() const
Definition zone.h:50
size_t allocation_size() const
Definition zone.h:189
V8_NOINLINE V8_PRESERVE_MOST void Expand(size_t size)
Definition zone.cc:146
std::atomic< size_t > segment_bytes_allocated_
Definition zone.h:254
void ReleaseSegment(Segment *segment)
Definition zone.cc:139
std::atomic< size_t > allocation_size_
Definition zone.h:249
static const size_t kAlignmentInBytes
Definition zone.h:240
Address limit_
Definition zone.h:264
AccountingAllocator * allocator_
Definition zone.h:266
void DeleteAll()
Definition zone.cc:113
Segment * segment_head_
Definition zone.h:268
int start
int end
const int position_
const int limit_
Definition isolate.cc:1114
ZoneVector< RpoNumber > & result
const char * name_
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK(condition)
Definition logging.h:124
#define CHECK_NOT_NULL(val)
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
constexpr T RoundUp(T x, intptr_t m)
Definition macros.h:387
constexpr T RoundDown(T x, intptr_t m)
Definition macros.h:371
constexpr bool IsAligned(T value, U alignment)
Definition macros.h:403
#define V8_UNLIKELY(condition)
Definition v8config.h:660