v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
worklist.h
Go to the documentation of this file.
1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_HEAP_BASE_WORKLIST_H_
6#define V8_HEAP_BASE_WORKLIST_H_
7
8#include <cstddef>
9#include <utility>
10
11#include "src/base/logging.h"
12#include "src/base/macros.h"
15
16namespace heap::base {
17namespace internal {
18
20 public:
21 static SegmentBase* GetSentinelSegmentAddress();
22
23 explicit constexpr SegmentBase(uint16_t capacity) : capacity_(capacity) {}
24
25 size_t Size() const { return index_; }
26 size_t Capacity() const { return capacity_; }
27 bool IsEmpty() const { return index_ == 0; }
28 bool IsFull() const { return index_ == capacity_; }
29 void Clear() { index_ = 0; }
30
31 protected:
32 const uint16_t capacity_;
33 uint16_t index_ = 0;
34};
35} // namespace internal
36
38 public:
39 // Enforces predictable order of push/pop sequences in single-threaded mode.
40 static void EnforcePredictableOrder();
41 static bool PredictableOrder() { return predictable_order_; }
42
43 private:
44 static bool predictable_order_;
45};
46
47// A global worklist based on segments which allows for a thread-local
48// producer/consumer pattern with global work stealing.
49//
50// - Entries in the worklist are of type `EntryType`.
51// - Segments have a capacity of at least `MinSegmentSize` but possibly more.
52//
53// All methods on the worklist itself are safe for concurrent usage but only
54// consider published segments. Unpublished work in views using `Local` is not
55// visible.
56template <typename EntryType, uint16_t MinSegmentSize>
57class Worklist final {
58 public:
59 // A thread-local view on the worklist. Any work that is not published from
60 // the local view is not visible to the global worklist.
61 class Local;
62 class Segment;
63
64 static constexpr int kMinSegmentSize = MinSegmentSize;
65
66 Worklist() = default;
68
69 Worklist(const Worklist&) = delete;
70 Worklist& operator=(const Worklist&) = delete;
71
72 // Returns true if the global worklist is empty and false otherwise. May be
73 // read concurrently for an approximation.
74 bool IsEmpty() const;
75 // Returns the number of segments in the global worklist. May be read
76 // concurrently for an approximation.
77 size_t Size() const;
78
79 // Moves the segments from `other` into this worklist, leaving behind `other`
80 // as empty.
82
83 // Removes all segments from the worklist.
84 void Clear();
85
86 // Invokes `callback` on each item. Callback is of type `bool(EntryType&)` and
87 // should return true if the entry should be kept and false if the entry
88 // should be removed.
89 template <typename Callback>
90 void Update(Callback callback);
91
92 // Invokes `callback` on each item. Callback is of type `void(EntryType&)`.
93 template <typename Callback>
94 void Iterate(Callback callback) const;
95
96 private:
97 void Push(Segment* segment);
98 bool Pop(Segment** segment);
99
101 Segment* top_ = nullptr;
102 std::atomic<size_t> size_{0};
103};
104
105template <typename EntryType, uint16_t MinSegmentSize>
107 DCHECK(!segment->IsEmpty());
108 v8::base::MutexGuard guard(&lock_);
109 segment->set_next(top_);
110 top_ = segment;
111 size_.fetch_add(1, std::memory_order_relaxed);
112}
113
114template <typename EntryType, uint16_t MinSegmentSize>
116 v8::base::MutexGuard guard(&lock_);
117 if (top_ == nullptr) return false;
118 DCHECK_LT(0U, size_);
119 size_.fetch_sub(1, std::memory_order_relaxed);
120 *segment = top_;
121 top_ = top_->next();
122 return true;
123}
124
125template <typename EntryType, uint16_t MinSegmentSize>
127 return Size() == 0;
128}
129
130template <typename EntryType, uint16_t MinSegmentSize>
132 // It is safe to read |size_| without a lock since this variable is
133 // atomic, keeping in mind that threads may not immediately see the new
134 // value when it is updated.
135 return size_.load(std::memory_order_relaxed);
136}
137
138template <typename EntryType, uint16_t MinSegmentSize>
140 v8::base::MutexGuard guard(&lock_);
141 size_.store(0, std::memory_order_relaxed);
142 Segment* current = top_;
143 while (current != nullptr) {
144 Segment* tmp = current;
145 current = current->next();
146 Segment::Delete(tmp);
147 }
148 top_ = nullptr;
149}
150
151template <typename EntryType, uint16_t MinSegmentSize>
152template <typename Callback>
154 v8::base::MutexGuard guard(&lock_);
155 Segment* prev = nullptr;
156 Segment* current = top_;
157 size_t num_deleted = 0;
158 while (current != nullptr) {
159 current->Update(callback);
160 if (current->IsEmpty()) {
161 DCHECK_LT(0U, size_);
162 ++num_deleted;
163 if (prev == nullptr) {
164 top_ = current->next();
165 } else {
166 prev->set_next(current->next());
167 }
168 Segment* tmp = current;
169 current = current->next();
170 Segment::Delete(tmp);
171 } else {
172 prev = current;
173 current = current->next();
174 }
175 }
176 size_.fetch_sub(num_deleted, std::memory_order_relaxed);
177}
178
179template <typename EntryType, uint16_t MinSegmentSize>
180template <typename Callback>
182 v8::base::MutexGuard guard(&lock_);
183 for (Segment* current = top_; current != nullptr; current = current->next()) {
184 current->Iterate(callback);
185 }
186}
187
188template <typename EntryType, uint16_t MinSegmentSize>
191 Segment* other_top;
192 size_t other_size;
193 {
194 v8::base::MutexGuard guard(&other.lock_);
195 if (!other.top_) return;
196
197 other_top = std::exchange(other.top_, nullptr);
198 other_size = other.size_.exchange(0, std::memory_order_relaxed);
199 }
200
201 // It's safe to iterate through these segments because the top was
202 // extracted from `other`.
203 Segment* end = other_top;
204 while (end->next()) end = end->next();
205
206 {
207 v8::base::MutexGuard guard(&lock_);
208 size_.fetch_add(other_size, std::memory_order_relaxed);
209 end->set_next(top_);
210 top_ = other_top;
211 }
212}
213
214template <typename EntryType, uint16_t MinSegmentSize>
215class Worklist<EntryType, MinSegmentSize>::Segment final
216 : public internal::SegmentBase {
217 public:
218 static Segment* Create(uint16_t min_segment_size) {
219 const auto wanted_bytes = MallocSizeForCapacity(min_segment_size);
222 result.ptr = static_cast<char*>(v8::base::Malloc(wanted_bytes));
223 result.count = wanted_bytes;
224 } else {
226 }
227 if (!result.ptr) {
229 "Worklist::Segment::Create");
230 }
231 return new (result.ptr)
232 Segment(CapacityForMallocSize(result.count * sizeof(char)));
233 }
234
235 static void Delete(Segment* segment) { v8::base::Free(segment); }
236
237 V8_INLINE void Push(EntryType entry);
238 V8_INLINE void Pop(EntryType* entry);
239
240 template <typename Callback>
241 void Update(Callback callback);
242 template <typename Callback>
243 void Iterate(Callback callback) const;
244
245 Segment* next() const { return next_; }
246 void set_next(Segment* segment) { next_ = segment; }
247
248 private:
249 static constexpr size_t MallocSizeForCapacity(size_t num_entries) {
250 return sizeof(Segment) + sizeof(EntryType) * num_entries;
251 }
252 static constexpr size_t CapacityForMallocSize(size_t malloc_size) {
253 return (malloc_size - sizeof(Segment)) / sizeof(EntryType);
254 }
255
256 constexpr explicit Segment(size_t capacity)
257 : internal::SegmentBase(capacity) {}
258
259 EntryType& entry(size_t index) {
260 return reinterpret_cast<EntryType*>(this + 1)[index];
261 }
262 const EntryType& entry(size_t index) const {
263 return reinterpret_cast<const EntryType*>(this + 1)[index];
264 }
265
266 Segment* next_ = nullptr;
267};
268
269template <typename EntryType, uint16_t MinSegmentSize>
274
275template <typename EntryType, uint16_t MinSegmentSize>
277 DCHECK(!IsEmpty());
278 *e = entry(--index_);
279}
280
281template <typename EntryType, uint16_t MinSegmentSize>
282template <typename Callback>
284 size_t new_index = 0;
285 for (size_t i = 0; i < index_; i++) {
286 if (callback(entry(i), &entry(new_index))) {
287 new_index++;
288 }
289 }
290 index_ = new_index;
291}
292
293template <typename EntryType, uint16_t MinSegmentSize>
294template <typename Callback>
296 Callback callback) const {
297 for (size_t i = 0; i < index_; i++) {
298 callback(entry(i));
299 }
300}
301
302// A thread-local on a given worklist.
303template <typename EntryType, uint16_t MinSegmentSize>
304class Worklist<EntryType, MinSegmentSize>::Local final {
305 public:
306 using ItemType = EntryType;
307
308 explicit Local(Worklist<EntryType, MinSegmentSize>& worklist);
309 ~Local();
310
311 // Moving needs to specify whether the `worklist_` pointer is preserved or
312 // not.
313 Local(Local&& other) V8_NOEXCEPT : worklist_(other.worklist_) {
314 std::swap(push_segment_, other.push_segment_);
315 std::swap(pop_segment_, other.pop_segment_);
316 }
318
319 // Having multiple copies of the same local view may be unsafe.
320 Local(const Local&) = delete;
321 Local& operator=(const Local& other) = delete;
322
323 V8_INLINE void Push(EntryType entry);
324 V8_INLINE bool Pop(EntryType* entry);
325
326 bool IsLocalAndGlobalEmpty() const;
327 bool IsLocalEmpty() const;
328 bool IsGlobalEmpty() const;
329
330 size_t PushSegmentSize() const { return push_segment_->Size(); }
331
332 void Publish();
333
335
336 void Clear();
337
338 private:
339 void PublishPushSegment();
340 void PublishPopSegment();
341 bool StealPopSegment();
342
344 // Bottleneck for filtering in crash dumps.
345 return Segment::Create(MinSegmentSize);
346 }
349 Segment::Delete(static_cast<Segment*>(segment));
350 }
351
354 push_segment_);
355 return static_cast<Segment*>(push_segment_);
356 }
357 inline const Segment* push_segment() const {
359 push_segment_);
360 return static_cast<const Segment*>(push_segment_);
361 }
362
365 return static_cast<Segment*>(pop_segment_);
366 }
367 inline const Segment* pop_segment() const {
369 return static_cast<const Segment*>(pop_segment_);
370 }
371
373 internal::SegmentBase* push_segment_ = nullptr;
374 internal::SegmentBase* pop_segment_ = nullptr;
375};
376
377template <typename EntryType, uint16_t MinSegmentSize>
380 : worklist_(worklist),
381 push_segment_(internal::SegmentBase::GetSentinelSegmentAddress()),
382 pop_segment_(internal::SegmentBase::GetSentinelSegmentAddress()) {}
383
384template <typename EntryType, uint16_t MinSegmentSize>
386 CHECK_IMPLIES(push_segment_, push_segment_->IsEmpty());
387 CHECK_IMPLIES(pop_segment_, pop_segment_->IsEmpty());
388 DeleteSegment(push_segment_);
389 DeleteSegment(pop_segment_);
390}
391
392template <typename EntryType, uint16_t MinSegmentSize>
394 if (V8_UNLIKELY(push_segment_->IsFull())) {
395 PublishPushSegment();
396 push_segment_ = NewSegment();
397 }
398 push_segment()->Push(entry);
399}
400
401template <typename EntryType, uint16_t MinSegmentSize>
403 if (pop_segment_->IsEmpty()) {
404 if (!push_segment_->IsEmpty()) {
405 std::swap(push_segment_, pop_segment_);
406 } else if (!StealPopSegment()) {
407 return false;
408 }
409 }
410 pop_segment()->Pop(entry);
411 return true;
412}
413
414template <typename EntryType, uint16_t MinSegmentSize>
416 return IsLocalEmpty() && IsGlobalEmpty();
417}
418
419template <typename EntryType, uint16_t MinSegmentSize>
421 return push_segment_->IsEmpty() && pop_segment_->IsEmpty();
422}
423
424template <typename EntryType, uint16_t MinSegmentSize>
426 return worklist_.IsEmpty();
427}
428
429template <typename EntryType, uint16_t MinSegmentSize>
431 if (!push_segment_->IsEmpty()) {
432 PublishPushSegment();
434 }
435 if (!pop_segment_->IsEmpty()) {
436 PublishPopSegment();
438 }
439}
440
441template <typename EntryType, uint16_t MinSegmentSize>
444 worklist_.Merge(other.worklist_);
445}
446
447template <typename EntryType, uint16_t MinSegmentSize>
450 worklist_.Push(push_segment());
451}
452
453template <typename EntryType, uint16_t MinSegmentSize>
456 worklist_.Push(pop_segment());
457}
458
459template <typename EntryType, uint16_t MinSegmentSize>
461 if (worklist_.IsEmpty()) return false;
462 Segment* new_segment = nullptr;
463 if (worklist_.Pop(&new_segment)) {
464 DeleteSegment(pop_segment_);
465 pop_segment_ = new_segment;
466 return true;
467 }
468 return false;
469}
470
471template <typename EntryType, uint16_t MinSegmentSize>
473 push_segment_->Clear();
474 pop_segment_->Clear();
475}
476
477} // namespace heap::base
478
479#endif // V8_HEAP_BASE_WORKLIST_H_
static bool predictable_order_
Definition worklist.h:44
static bool PredictableOrder()
Definition worklist.h:41
Local(Worklist< EntryType, MinSegmentSize > &worklist)
Definition worklist.h:378
void DeleteSegment(internal::SegmentBase *segment) const
Definition worklist.h:347
bool IsLocalAndGlobalEmpty() const
Definition worklist.h:415
void Merge(Worklist< EntryType, MinSegmentSize >::Local &other)
Definition worklist.h:442
Worklist< EntryType, MinSegmentSize > & worklist_
Definition worklist.h:372
Local & operator=(Local &&) V8_NOEXCEPT=delete
V8_INLINE void Push(EntryType entry)
Definition worklist.h:393
Segment * NewSegment() const
Definition worklist.h:343
Local(Local &&other) V8_NOEXCEPT
Definition worklist.h:313
const Segment * pop_segment() const
Definition worklist.h:367
V8_INLINE bool Pop(EntryType *entry)
Definition worklist.h:402
const Segment * push_segment() const
Definition worklist.h:357
void set_next(Segment *segment)
Definition worklist.h:246
static void Delete(Segment *segment)
Definition worklist.h:235
void Iterate(Callback callback) const
Definition worklist.h:295
static constexpr size_t CapacityForMallocSize(size_t malloc_size)
Definition worklist.h:252
constexpr Segment(size_t capacity)
Definition worklist.h:256
static Segment * Create(uint16_t min_segment_size)
Definition worklist.h:218
V8_INLINE void Pop(EntryType *entry)
Definition worklist.h:276
const EntryType & entry(size_t index) const
Definition worklist.h:262
Segment * next() const
Definition worklist.h:245
void Update(Callback callback)
Definition worklist.h:283
V8_INLINE void Push(EntryType entry)
Definition worklist.h:270
EntryType & entry(size_t index)
Definition worklist.h:259
static constexpr size_t MallocSizeForCapacity(size_t num_entries)
Definition worklist.h:249
static constexpr int kMinSegmentSize
Definition worklist.h:64
bool IsEmpty() const
Definition worklist.h:126
Worklist(const Worklist &)=delete
void Iterate(Callback callback) const
Definition worklist.h:181
Worklist & operator=(const Worklist &)=delete
v8::base::Mutex lock_
Definition worklist.h:100
std::atomic< size_t > size_
Definition worklist.h:102
void Update(Callback callback)
Definition worklist.h:153
size_t Size() const
Definition worklist.h:131
void Push(Segment *segment)
Definition worklist.h:106
void Merge(Worklist< EntryType, MinSegmentSize > &other)
Definition worklist.h:189
bool Pop(Segment **segment)
Definition worklist.h:115
constexpr SegmentBase(uint16_t capacity)
Definition worklist.h:23
static SegmentBase * GetSentinelSegmentAddress()
Definition worklist.cc:18
Register const index_
const int size_
Definition assembler.cc:132
int end
LineAndColumn current
TNode< Object > callback
ZoneVector< RpoNumber > & result
V8_NODISCARD AllocationResult< T * > AllocateAtLeast(size_t n)
Definition memory.h:144
void FatalOOM(OOMType type, const char *msg)
Definition logging.cc:80
void * Malloc(size_t size)
Definition memory.h:36
void Free(void *memory)
Definition memory.h:63
#define V8_NOEXCEPT
#define CHECK_IMPLIES(lhs, rhs)
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define V8_EXPORT_PRIVATE
Definition macros.h:460
#define V8_INLINE
Definition v8config.h:500
#define V8_UNLIKELY(condition)
Definition v8config.h:660