v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
new-spaces.h
Go to the documentation of this file.
1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_HEAP_NEW_SPACES_H_
6#define V8_HEAP_NEW_SPACES_H_
7
8#include <atomic>
9#include <memory>
10#include <numeric>
11#include <optional>
12
13#include "include/v8-internal.h"
15#include "src/base/logging.h"
16#include "src/base/macros.h"
18#include "src/common/globals.h"
21#include "src/heap/heap.h"
25#include "src/heap/spaces.h"
27
28namespace v8 {
29namespace internal {
30
31class Heap;
32class MutablePageMetadata;
33class SemiSpaceNewSpace;
34
36
37// -----------------------------------------------------------------------------
38// SemiSpace in young generation
39//
40// A SemiSpace is a contiguous chunk of memory holding page-like memory chunks.
41// The mark-compact collector uses the memory of the first page in the from
42// space as a marking stack when tracing live objects.
43class SemiSpace final : public Space {
44 public:
47
48 static void Swap(SemiSpace* from, SemiSpace* to);
49
50 SemiSpace(Heap* heap, SemiSpaceId semispace);
52
53 inline bool Contains(Tagged<HeapObject> o) const;
54 inline bool Contains(Tagged<Object> o) const;
55 template <typename T>
56 inline bool Contains(Tagged<T> o) const;
57 inline bool ContainsSlow(Address a) const;
58
59 void Uncommit();
60 bool IsCommitted() const { return !memory_chunk_list_.Empty(); }
61
62 // Returns the start address of the first page of the space.
64 const PageMetadata* page = first_page();
65 return page ? page->area_start() : kNullAddress;
66 }
67
69
70 // Returns the start address of the current page of the space.
72
73 // Returns one past the end address of the current page of the space.
74 Address page_high() const { return current_page_->area_end(); }
75
76 bool AdvancePage(size_t target_capacity);
77
78 void RemovePage(PageMetadata* page);
80
82
83 // Returns the current capacity of the semispace.
84 size_t current_capacity() const { return current_capacity_; }
85 // Returns the current capacity of the semispace using an atomic load.
89
90 SemiSpaceId id() const { return id_; }
91
92 // Approximate amount of physical memory committed for this space.
93 size_t CommittedPhysicalMemory() const final;
94
95 // If we don't have these here then SemiSpace will be abstract. However
96 // they should never be called:
97
98 size_t Size() const final { UNREACHABLE(); }
99
100 size_t SizeOfObjects() const final { return Size(); }
101
102 size_t Available() const final { UNREACHABLE(); }
103
106 }
109 }
110
111 const PageMetadata* first_page() const final {
112 return reinterpret_cast<const PageMetadata*>(memory_chunk_list_.front());
113 }
114 const PageMetadata* last_page() const final {
115 return reinterpret_cast<const PageMetadata*>(memory_chunk_list_.back());
116 }
117
119 iterator end() { return iterator(nullptr); }
120
122 const_iterator end() const { return const_iterator(nullptr); }
123
124 std::unique_ptr<ObjectIterator> GetObjectIterator(Heap* heap) final;
125
126#ifdef DEBUG
127 V8_EXPORT_PRIVATE void Print() final;
128 // Validate a range of of addresses in a SemiSpace.
129 // The "from" address must be on a page prior to the "to" address,
130 // in the linked page order, or it must be earlier on the same page.
131 static void AssertValidRange(Address from, Address to);
132#else
133 // Do nothing.
134 inline static void AssertValidRange(Address from, Address to) {}
135#endif
136
137#ifdef VERIFY_HEAP
138 void Verify(Isolate* isolate, SpaceVerificationVisitor* visitor) const final {
139 UNREACHABLE();
140 }
141 void VerifyPageMetadata() const;
142#endif
143
145
146 void MoveQuarantinedPage(MemoryChunk* chunk);
147
148 private:
149 bool AllocateFreshPage();
150
151 void RewindPages(int num_pages);
152
153 // Iterates all pages and properly initializes page flags for this space.
154 void FixPagesFlags();
155
156 void IncrementCommittedPhysicalMemory(size_t increment_value);
157 void DecrementCommittedPhysicalMemory(size_t decrement_value);
158
159 void ShrinkCapacityTo(size_t capacity);
160 void Reset();
161
162 // The currently committed space capacity.
167
168 // The number of quarantined pages in this space. Those pages are at the front
169 // of memory_chunk_list_.
171
172 friend class SemiSpaceNewSpace;
174};
175
176// A SemiSpaceObjectIterator is an ObjectIterator that iterates over the active
177// semispace of the heap's new space.
179 public:
180 // Create an iterator over the objects in the given to-space.
181 inline explicit SemiSpaceObjectIterator(const SemiSpaceNewSpace* space);
182
183 inline Tagged<HeapObject> Next() final;
184
185 private:
186 // The current iteration point.
188};
189
190class NewSpace : NON_EXPORTED_BASE(public SpaceWithLinearArea) {
191 public:
194
195 explicit NewSpace(Heap* heap);
196
197 base::Mutex* mutex() { return &mutex_; }
198
199 inline bool Contains(Tagged<Object> o) const;
200 inline bool Contains(Tagged<HeapObject> o) const;
201 virtual bool ContainsSlow(Address a) const = 0;
202
204 size_t result = 0;
206 [this, &result](ExternalBackingStoreType type, int index) {
207 result += ExternalBackingStoreBytes(type);
208 });
209 return result;
210 }
211
212 void PromotePageToOldSpace(PageMetadata* page, FreeMode free_mode);
213
214 virtual size_t Capacity() const = 0;
215 virtual size_t TotalCapacity() const = 0;
216 virtual size_t MinimumCapacity() const = 0;
217 virtual size_t MaximumCapacity() const = 0;
218 virtual size_t AllocatedSinceLastGC() const = 0;
219
220 // Grow the capacity of the space.
221 virtual void Grow(size_t new_capacity) = 0;
222
223 virtual void MakeIterable() = 0;
224
225 virtual iterator begin() = 0;
226 virtual iterator end() = 0;
227
228 virtual const_iterator begin() const = 0;
229 virtual const_iterator end() const = 0;
230
232
234 virtual void GarbageCollectionEpilogue() = 0;
235
236 virtual bool IsPromotionCandidate(const MutablePageMetadata* page) const = 0;
237
239
240 protected:
241 static const int kAllocationBufferParkingThreshold = 4 * KB;
242
244
245 virtual void RemovePage(PageMetadata* page) = 0;
246};
247
248// -----------------------------------------------------------------------------
249// The young generation space.
250//
251// The new space consists of a contiguous pair of semispaces. It simply
252// forwards most functions to the appropriate semispace.
253
255 using ParkedAllocationBuffer = std::pair<int, Address>;
256 using ParkedAllocationBuffersVector = std::vector<ParkedAllocationBuffer>;
257
258 public:
260 DCHECK(!v8_flags.minor_ms);
261 return static_cast<SemiSpaceNewSpace*>(space);
262 }
263
264 SemiSpaceNewSpace(Heap* heap, size_t initial_semispace_capacity,
265 size_t min_semispace_capacity_,
266 size_t max_semispace_capacity);
267
268 ~SemiSpaceNewSpace() final = default;
269
270 bool ContainsSlow(Address a) const final;
271
272 // Grow the capacity of the semispaces. Assumes that they are not at
273 // their maximum capacity.
274 void Grow(size_t new_capacity) final;
275
276 // Shrink the capacity of the semispaces.
277 void Shrink(size_t new_capacity);
278
279 // Return the allocated bytes in the active semispace.
280 size_t Size() const final;
281
282 size_t SizeOfObjects() const final { return Size(); }
283
284 // Return the allocatable capacity of a semispace.
285 size_t Capacity() const final {
286 size_t actual_capacity =
287 std::max(to_space_.current_capacity(), target_capacity_);
288 return (actual_capacity / PageMetadata::kPageSize) *
289 MemoryChunkLayout::AllocatableMemoryInDataPage();
290 }
291
292 // Return the capacity of pages currently used for allocations. This is
293 // a capped overapproximation of the size of objects.
294 size_t CurrentCapacitySafe() const {
295 return (to_space_.current_capacity_safe() / PageMetadata::kPageSize) *
296 MemoryChunkLayout::AllocatableMemoryInDataPage();
297 }
298
299 // Return the current size of a semispace, allocatable and non-allocatable
300 // memory.
301 size_t TotalCapacity() const final { return target_capacity_; }
302
303 // Committed memory for NewSpace is the committed memory of both semi-spaces
304 // combined.
305 size_t CommittedMemory() const final {
306 return from_space_.CommittedMemory() + to_space_.CommittedMemory();
307 }
308
309 size_t MaximumCommittedMemory() const final {
310 return from_space_.MaximumCommittedMemory() +
311 to_space_.MaximumCommittedMemory();
312 }
313
314 // Approximate amount of physical memory committed for this space.
315 size_t CommittedPhysicalMemory() const final;
316
317 // Return the available bytes without growing.
318 size_t Available() const final;
319
320 bool ReachedTargetCapacity() const {
321 return to_space_.current_capacity_ >= target_capacity_;
322 }
323
325 if (type == ExternalBackingStoreType::kArrayBuffer)
326 return heap()->YoungArrayBufferBytes();
327 DCHECK_EQ(0, from_space_.ExternalBackingStoreBytes(type));
328 return to_space_.ExternalBackingStoreBytes(type);
329 }
330
331 size_t AllocatedSinceLastGC() const final;
332
333 void GrowToMaximumCapacityForTesting() final;
334
335 // Return the maximum capacity of a semispace.
336 size_t MaximumCapacity() const final { return maximum_capacity_; }
337
338 // Returns the minimum capacity of a semispace.
339 size_t MinimumCapacity() const final { return minimum_capacity_; }
340
341 // Return the address of the first allocatable address in the active
342 // semispace. This may be the address where the first object resides.
343 Address first_allocatable_address() const final {
344 return to_space_.space_start();
345 }
346
347 // Get the age mark of the inactive semispace.
348 Address age_mark() const { return age_mark_; }
349
350 // Sets the age mark to the current top pointer. It also sets proper page
351 // flags for all pages before the age mark.
352 void SetAgeMarkAndBelowAgeMarkPageFlags();
353
354 // Try to switch the active semispace to a new, empty, page.
355 // Returns false if this isn't possible or reasonable (i.e., there
356 // are no pages, or the current page is already empty), or true
357 // if successful.
358 bool AddFreshPage();
359
360 bool AddParkedAllocationBuffer(int size_in_bytes,
361 AllocationAlignment alignment);
362
363 void ResetParkedAllocationBuffers();
364
365#ifdef VERIFY_HEAP
366 // Verify the active semispace.
367 void Verify(Isolate* isolate, SpaceVerificationVisitor* visitor) const final;
368
369 // VerifyObjects verifies all objects in the active semi space.
370 void VerifyObjects(Isolate* isolate, SpaceVerificationVisitor* visitor) const;
371#endif
372
373#ifdef DEBUG
374 // Print the active semispace.
375 void Print() override { to_space_.Print(); }
376#endif
377
378 void MakeIterable() override;
379
380 void MakeAllPagesInFromSpaceIterable();
381 void MakeUnusedPagesInToSpaceIterable();
382
383 PageMetadata* first_page() final { return to_space_.first_page(); }
384 PageMetadata* last_page() final { return to_space_.last_page(); }
385
386 const PageMetadata* first_page() const final {
387 return to_space_.first_page();
388 }
389 const PageMetadata* last_page() const final { return to_space_.last_page(); }
390
391 iterator begin() final { return to_space_.begin(); }
392 iterator end() final { return to_space_.end(); }
393
394 const_iterator begin() const final { return to_space_.begin(); }
395 const_iterator end() const final { return to_space_.end(); }
396
397 std::unique_ptr<ObjectIterator> GetObjectIterator(Heap* heap) final;
398
399 SemiSpace& from_space() { return from_space_; }
400 const SemiSpace& from_space() const { return from_space_; }
401 SemiSpace& to_space() { return to_space_; }
402 const SemiSpace& to_space() const { return to_space_; }
403
404 // Used for conservative stack scanning to determine if a page with pinned
405 // objects should remain in new space or move to old space.
406 bool ShouldPageBePromoted(const MemoryChunk* chunk) const;
407
408 V8_INLINE bool ShouldBePromoted(Address object) const;
409
410 // Invoked at the beginning of a GC to swap from- and to-space.
411 void SwapSemiSpaces();
412
413 void GarbageCollectionPrologue() final;
414 void GarbageCollectionEpilogue() final;
415
416 void ZapUnusedMemory();
417
418 bool IsPromotionCandidate(const MutablePageMetadata* page) const final;
419
420 AllocatorPolicy* CreateAllocatorPolicy(MainAllocator* allocator) final;
421
422 int GetSpaceRemainingOnCurrentPageForTesting();
423 void FillCurrentPageForTesting();
424
425 void MoveQuarantinedPage(MemoryChunk* chunk);
426 size_t QuarantinedSize() const { return quarantined_size_; }
427 size_t QuarantinedPageCount() const {
428 return to_space_.quarantined_pages_count_;
429 }
430 void SetQuarantinedSize(size_t quarantined_size) {
431 quarantined_size_ = quarantined_size;
432 }
433
434 V8_INLINE bool IsAddressBelowAgeMark(Address address) const;
435
436 private:
437 bool IsFromSpaceCommitted() const { return from_space_.IsCommitted(); }
438
439 SemiSpace* active_space() { return &to_space_; }
440
441 std::optional<std::pair<Address, Address>> Allocate(
442 int size_in_bytes, AllocationAlignment alignment);
443
444 std::optional<std::pair<Address, Address>> AllocateOnNewPageBeyondCapacity(
445 int size_in_bytes, AllocationAlignment alignment);
446
447 // Removes a page from the space. Assumes the page is in the `from_space` semi
448 // space.
449 void RemovePage(PageMetadata* page) final;
450
451 // Frees the given memory region. Will be resuable for allocation if this was
452 // the last allocation.
453 void Free(Address start, Address end);
454
456 allocation_top_ =
457 to_space_.current_page() ? to_space_.page_low() : kNullAddress;
458 }
459
460 void SetAllocationTop(Address top) { allocation_top_ = top; }
461
462 V8_INLINE void IncrementAllocationTop(Address new_top);
463
464 V8_INLINE void DecrementAllocationTop(Address new_top);
465
466 Address allocation_top() const { return allocation_top_; }
467
468 // The semispaces.
471
472 // Bump pointer for allocation. to_space_.page_low() <= allocation_top_ <=
473 // to_space.page_high() always holds.
474 Address allocation_top_ = kNullAddress;
475
477
478 // Current overall size of objects that were quarantined in the last GC.
479 size_t quarantined_size_ = 0;
480
481 // Size right after the last GC. Used for computing `AllocatedSinceLastGC()`.
482 size_t size_after_last_gc_ = 0;
483
484 // The minimum semi space capacity. A semi space cannot shrink below this
485 // size.
486 const size_t minimum_capacity_ = 0;
487
488 // The maximum capacity of a semi space. A space cannot grow beyond that size.
489 const size_t maximum_capacity_ = 0;
490
491 // Used to govern object promotion during mark-compact collection.
492 Address age_mark_ = kNullAddress;
493
494 // The target capacity of a semi space.
495 size_t target_capacity_ = 0;
496
499};
500
501// -----------------------------------------------------------------------------
502// PagedNewSpace
503
505 public:
506 // Creates an old space object. The constructor does not allocate pages
507 // from OS.
508 explicit PagedSpaceForNewSpace(Heap* heap, size_t initial_capacity,
509 size_t min_capacity, size_t max_capacity);
510
511 void TearDown() { PagedSpaceBase::TearDown(); }
512
513 // Grow the capacity of the space.
514 void Grow(size_t new_capacity);
515 void GrowToMaximumCapacityForTesting();
516
517 // Shrink the capacity of the space.
518 bool StartShrinking(size_t new_target_capacity);
519 void FinishShrinking();
520
521 size_t AllocatedSinceLastGC() const;
522
523 // Return the minimum capacity of the space.
524 size_t MinimumCapacity() const { return min_capacity_; }
525
526 // Return the maximum capacity of the space.
527 size_t MaximumCapacity() const { return max_capacity_; }
528
529 size_t TotalCapacity() const { return target_capacity_; }
530
531 // Return the address of the first allocatable address in the active
532 // semispace. This may be the address where the first object resides.
534 return first_page()->area_start();
535 }
536
537 // Reset the allocation pointer.
539 size_at_last_gc_ = Size();
540 last_lab_page_ = nullptr;
541 }
542
543 PageMetadata* InitializePage(MutablePageMetadata* chunk) final;
544
545 size_t AddPage(PageMetadata* page) final;
546 void RemovePage(PageMetadata* page) final;
547 void ReleasePage(PageMetadata* page) final;
548
550 if (type == ExternalBackingStoreType::kArrayBuffer)
551 return heap()->YoungArrayBufferBytes();
552 return external_backing_store_bytes_[static_cast<int>(type)];
553 }
554
555#ifdef VERIFY_HEAP
556 void Verify(Isolate* isolate, SpaceVerificationVisitor* visitor) const final;
557#endif // VERIFY_HEAP
558
559 void MakeIterable() { free_list()->RepairLists(heap()); }
560
561 bool ShouldReleaseEmptyPage() const;
562
563 // Allocates pages as long as current capacity is below the target capacity.
564 void AllocatePageUpToCapacityForTesting();
565
566 bool IsPromotionCandidate(const MutablePageMetadata* page) const;
567
568 // Return the available bytes without growing.
569 size_t Available() const final;
570
571 size_t UsableCapacity() const {
572 DCHECK_LE(free_list_->wasted_bytes(), current_capacity_);
573 return current_capacity_ - free_list_->wasted_bytes();
574 }
575
579
580 private:
581 bool AllocatePage();
582
583 const size_t min_capacity_;
584 const size_t max_capacity_;
585 size_t target_capacity_ = 0;
586 size_t current_capacity_ = 0;
587
588 PageMetadata* last_lab_page_ = nullptr;
589
591};
592
593// TODO(v8:12612): PagedNewSpace is a bridge between the NewSpace interface and
594// the PagedSpaceForNewSpace implementation. Once we settle on a single new
595// space implementation, we can merge these 3 classes into 1.
597 public:
598 static PagedNewSpace* From(NewSpace* space) {
599 DCHECK(v8_flags.minor_ms);
600 return static_cast<PagedNewSpace*>(space);
601 }
602
603 PagedNewSpace(Heap* heap, size_t initial_capacity, size_t min_capacity,
604 size_t max_capacity);
605
606 ~PagedNewSpace() final;
607
608 bool ContainsSlow(Address a) const final {
609 return paged_space_.ContainsSlow(a);
610 }
611
612 // Grow the capacity of the space.
613 void Grow(size_t new_capacity) final { paged_space_.Grow(new_capacity); }
614
615 // Shrink the capacity of the space.
616 bool StartShrinking(size_t new_target_capacity) {
617 return paged_space_.StartShrinking(new_target_capacity);
618 }
619 void FinishShrinking() { paged_space_.FinishShrinking(); }
620
621 // Return the allocated bytes in the active space.
622 size_t Size() const final { return paged_space_.Size(); }
623
624 size_t SizeOfObjects() const final { return paged_space_.SizeOfObjects(); }
625
626 // Return the allocatable capacity of the space.
627 size_t Capacity() const final { return paged_space_.Capacity(); }
628
629 // Return the current size of the space, allocatable and non-allocatable
630 // memory.
631 size_t TotalCapacity() const final { return paged_space_.TotalCapacity(); }
632
633 // Committed memory for PagedNewSpace.
634 size_t CommittedMemory() const final {
635 return paged_space_.CommittedMemory();
636 }
637
638 size_t MaximumCommittedMemory() const final {
639 return paged_space_.MaximumCommittedMemory();
640 }
641
642 // Approximate amount of physical memory committed for this space.
643 size_t CommittedPhysicalMemory() const final {
644 return paged_space_.CommittedPhysicalMemory();
645 }
646
647 // Return the available bytes without growing.
648 size_t Available() const final { return paged_space_.Available(); }
649
651 return paged_space_.ExternalBackingStoreBytes(type);
652 }
653
654 size_t AllocatedSinceLastGC() const final {
655 return paged_space_.AllocatedSinceLastGC();
656 }
657
658 // Return the maximum capacity of the space.
659 size_t MinimumCapacity() const final {
660 return paged_space_.MinimumCapacity();
661 }
662
663 // Return the maximum capacity of the space.
664 size_t MaximumCapacity() const final {
665 return paged_space_.MaximumCapacity();
666 }
667
668 // Return the address of the first allocatable address in the active
669 // semispace. This may be the address where the first object resides.
670 Address first_allocatable_address() const final {
671 return paged_space_.first_allocatable_address();
672 }
673
674#ifdef VERIFY_HEAP
675 // Verify the active semispace.
676 void Verify(Isolate* isolate, SpaceVerificationVisitor* visitor) const final {
677 paged_space_.Verify(isolate, visitor);
678 }
679#endif
680
681#ifdef DEBUG
682 // Print the active semispace.
683 void Print() final { paged_space_.Print(); }
684#endif
685
686 PageMetadata* first_page() final { return paged_space_.first_page(); }
687 PageMetadata* last_page() final { return paged_space_.last_page(); }
688
689 const PageMetadata* first_page() const final {
690 return paged_space_.first_page();
691 }
692 const PageMetadata* last_page() const final {
693 return paged_space_.last_page();
694 }
695
696 iterator begin() final { return paged_space_.begin(); }
697 iterator end() final { return paged_space_.end(); }
698
699 const_iterator begin() const final { return paged_space_.begin(); }
700 const_iterator end() const final { return paged_space_.end(); }
701
702 std::unique_ptr<ObjectIterator> GetObjectIterator(Heap* heap) final {
703 return paged_space_.GetObjectIterator(heap);
704 }
705
707 paged_space_.GarbageCollectionEpilogue();
708 }
709
710 bool IsPromotionCandidate(const MutablePageMetadata* page) const final {
711 return paged_space_.IsPromotionCandidate(page);
712 }
713
715 paged_space_.GrowToMaximumCapacityForTesting();
716 }
717
718 PagedSpaceForNewSpace* paged_space() { return &paged_space_; }
719 const PagedSpaceForNewSpace* paged_space() const { return &paged_space_; }
720
721 void MakeIterable() override { paged_space_.MakeIterable(); }
722
723 // All operations on `memory_chunk_list_` should go through `paged_space_`.
725
727 return paged_space_.ShouldReleaseEmptyPage();
728 }
729 void ReleasePage(PageMetadata* page) { paged_space_.ReleasePage(page); }
730
731 AllocatorPolicy* CreateAllocatorPolicy(MainAllocator* allocator) final;
732
733 private:
734 void RemovePage(PageMetadata* page) final { paged_space_.RemovePage(page); }
735
737};
738
739// For contiguous spaces, top should be in the space (or at the end) and limit
740// should be the end of the space.
741#define DCHECK_SEMISPACE_ALLOCATION_TOP(top, space) \
742 SLOW_DCHECK((space).page_low() <= (top) && (top) <= (space).page_high())
743
744} // namespace internal
745} // namespace v8
746
747#endif // V8_HEAP_NEW_SPACES_H_
ThreadLocalTop * top
static T Relaxed_Load(T *addr)
size_t ExternalBackingStoreOverallBytes() const
Definition new-spaces.h:203
base::Mutex * mutex()
Definition new-spaces.h:197
virtual size_t MinimumCapacity() const =0
virtual bool ContainsSlow(Address a) const =0
virtual iterator begin()=0
virtual size_t Capacity() const =0
virtual void Grow(size_t new_capacity)=0
virtual void MakeIterable()=0
virtual Address first_allocatable_address() const =0
virtual void GrowToMaximumCapacityForTesting()=0
virtual void RemovePage(PageMetadata *page)=0
virtual bool IsPromotionCandidate(const MutablePageMetadata *page) const =0
bool Contains(Tagged< Object > o) const
virtual void GarbageCollectionEpilogue()=0
virtual void GarbageCollectionPrologue()
Definition new-spaces.h:233
virtual size_t AllocatedSinceLastGC() const =0
virtual iterator end()=0
virtual const_iterator begin() const =0
virtual size_t MaximumCapacity() const =0
static const int kAllocationBufferParkingThreshold
Definition new-spaces.h:241
virtual size_t TotalCapacity() const =0
virtual const_iterator end() const =0
void PromotePageToOldSpace(PageMetadata *page, FreeMode free_mode)
static PageMetadata * cast(MemoryChunkMetadata *metadata)
PagedSpaceForNewSpace * paged_space()
Definition new-spaces.h:718
static PagedNewSpace * From(NewSpace *space)
Definition new-spaces.h:598
void RemovePage(PageMetadata *page) final
Definition new-spaces.h:734
const_iterator end() const final
Definition new-spaces.h:700
PageMetadata * last_page() final
Definition new-spaces.h:687
void GarbageCollectionEpilogue() final
Definition new-spaces.h:706
heap::List< MutablePageMetadata > & memory_chunk_list() final
Definition new-spaces.h:724
size_t AllocatedSinceLastGC() const final
Definition new-spaces.h:654
const_iterator begin() const final
Definition new-spaces.h:699
size_t Capacity() const final
Definition new-spaces.h:627
const PagedSpaceForNewSpace * paged_space() const
Definition new-spaces.h:719
size_t MaximumCapacity() const final
Definition new-spaces.h:664
bool ContainsSlow(Address a) const final
Definition new-spaces.h:608
size_t CommittedMemory() const final
Definition new-spaces.h:634
size_t Size() const final
Definition new-spaces.h:622
void Grow(size_t new_capacity) final
Definition new-spaces.h:613
const PageMetadata * last_page() const final
Definition new-spaces.h:692
size_t SizeOfObjects() const final
Definition new-spaces.h:624
bool StartShrinking(size_t new_target_capacity)
Definition new-spaces.h:616
const PageMetadata * first_page() const final
Definition new-spaces.h:689
size_t Available() const final
Definition new-spaces.h:648
void MakeIterable() override
Definition new-spaces.h:721
size_t TotalCapacity() const final
Definition new-spaces.h:631
void GrowToMaximumCapacityForTesting() final
Definition new-spaces.h:714
size_t MaximumCommittedMemory() const final
Definition new-spaces.h:638
PageMetadata * first_page() final
Definition new-spaces.h:686
size_t ExternalBackingStoreBytes(ExternalBackingStoreType type) const final
Definition new-spaces.h:650
std::unique_ptr< ObjectIterator > GetObjectIterator(Heap *heap) final
Definition new-spaces.h:702
bool IsPromotionCandidate(const MutablePageMetadata *page) const final
Definition new-spaces.h:710
Address first_allocatable_address() const final
Definition new-spaces.h:670
PagedSpaceForNewSpace paged_space_
Definition new-spaces.h:736
size_t CommittedPhysicalMemory() const final
Definition new-spaces.h:643
size_t MinimumCapacity() const final
Definition new-spaces.h:659
void ReleasePage(PageMetadata *page)
Definition new-spaces.h:729
AllocatorPolicy * CreateAllocatorPolicy(MainAllocator *allocator) final
Definition new-spaces.h:576
Address first_allocatable_address() const
Definition new-spaces.h:533
size_t ExternalBackingStoreBytes(ExternalBackingStoreType type) const final
Definition new-spaces.h:549
size_t TotalCapacity() const final
Definition new-spaces.h:301
const PageMetadata * last_page() const final
Definition new-spaces.h:389
size_t MaximumCommittedMemory() const final
Definition new-spaces.h:309
ParkedAllocationBuffersVector parked_allocation_buffers_
Definition new-spaces.h:476
const SemiSpace & from_space() const
Definition new-spaces.h:400
const PageMetadata * first_page() const final
Definition new-spaces.h:386
Address first_allocatable_address() const final
Definition new-spaces.h:343
static SemiSpaceNewSpace * From(NewSpace *space)
Definition new-spaces.h:259
std::pair< int, Address > ParkedAllocationBuffer
Definition new-spaces.h:255
void SetAllocationTop(Address top)
Definition new-spaces.h:460
~SemiSpaceNewSpace() final=default
PageMetadata * last_page() final
Definition new-spaces.h:384
std::vector< ParkedAllocationBuffer > ParkedAllocationBuffersVector
Definition new-spaces.h:256
size_t Capacity() const final
Definition new-spaces.h:285
size_t QuarantinedPageCount() const
Definition new-spaces.h:427
size_t CommittedMemory() const final
Definition new-spaces.h:305
size_t MinimumCapacity() const final
Definition new-spaces.h:339
void SetQuarantinedSize(size_t quarantined_size)
Definition new-spaces.h:430
size_t ExternalBackingStoreBytes(ExternalBackingStoreType type) const final
Definition new-spaces.h:324
const_iterator end() const final
Definition new-spaces.h:395
const_iterator begin() const final
Definition new-spaces.h:394
const SemiSpace & to_space() const
Definition new-spaces.h:402
PageMetadata * first_page() final
Definition new-spaces.h:383
SemiSpaceObjectIterator(const SemiSpaceNewSpace *space)
Tagged< HeapObject > Next() final
Address page_low() const
Definition new-spaces.h:71
SemiSpaceId id() const
Definition new-spaces.h:90
bool AdvancePage(size_t target_capacity)
V8_EXPORT_PRIVATE ~SemiSpace()
Definition new-spaces.cc:57
size_t Available() const final
Definition new-spaces.h:102
size_t current_capacity() const
Definition new-spaces.h:84
static void Swap(SemiSpace *from, SemiSpace *to)
SemiSpace(Heap *heap, SemiSpaceId semispace)
Definition new-spaces.cc:54
size_t current_capacity_safe() const
Definition new-spaces.h:86
PageMetadata * last_page() final
Definition new-spaces.h:107
const_iterator end() const
Definition new-spaces.h:122
void RewindPages(int num_pages)
void IncrementCommittedPhysicalMemory(size_t increment_value)
void AddRangeToActiveSystemPages(Address start, Address end)
const PageMetadata * first_page() const final
Definition new-spaces.h:111
PageMetadata * current_page() const
Definition new-spaces.h:68
void DecrementCommittedPhysicalMemory(size_t decrement_value)
size_t CommittedPhysicalMemory() const final
const PageMetadata * last_page() const final
Definition new-spaces.h:114
void RemovePage(PageMetadata *page)
std::unique_ptr< ObjectIterator > GetObjectIterator(Heap *heap) final
void MovePageToTheEnd(PageMetadata *page)
bool Contains(Tagged< HeapObject > o) const
Address space_start() const
Definition new-spaces.h:63
PageMetadata * current_page_
Definition new-spaces.h:166
void MoveQuarantinedPage(MemoryChunk *chunk)
bool ContainsSlow(Address a) const
PageMetadata * first_page() final
Definition new-spaces.h:104
void ShrinkCapacityTo(size_t capacity)
Definition new-spaces.cc:69
PageMetadata * InitializePage(MutablePageMetadata *chunk) final
Definition new-spaces.cc:42
ConstPageIterator const_iterator
Definition new-spaces.h:46
Address page_high() const
Definition new-spaces.h:74
const_iterator begin() const
Definition new-spaces.h:121
static void AssertValidRange(Address from, Address to)
Definition new-spaces.h:134
size_t Size() const final
Definition new-spaces.h:98
bool IsCommitted() const
Definition new-spaces.h:60
PageIterator iterator
Definition new-spaces.h:45
size_t SizeOfObjects() const final
Definition new-spaces.h:100
heap::List< MutablePageMetadata > memory_chunk_list_
Definition spaces.h:136
FreeList & free_list_
Definition sweeper.cc:156
int start
int end
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available space
ZoneVector< RpoNumber > & result
void Print(Tagged< Object > obj)
Definition objects.h:774
void ForAll(Callback callback)
Definition spaces.h:58
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation trace turbo cfg trace TurboFan s graph trimmer trace TurboFan s control equivalence trace TurboFan s register allocator trace stack load store counters for optimized code in run fuzzing &&concurrent_recompilation trace_turbo trace_turbo_scheduled trace_turbo_stack_accesses verify TurboFan machine graph of code stubs enable FixedArray bounds checks print TurboFan statistics of wasm compilations maximum cumulative size of bytecode considered for inlining scale factor of bytecode size used to calculate the inlining budget * KB
Definition flags.cc:1366
V8_EXPORT_PRIVATE FlagValues v8_flags
ExternalBackingStoreType
Definition globals.h:1605
static constexpr Address kNullAddress
Definition v8-internal.h:53
PageIteratorImpl< PageMetadata > PageIterator
Definition spaces.h:181
PageIteratorImpl< const PageMetadata > ConstPageIterator
Definition spaces.h:182
#define NON_EXPORTED_BASE(code)
#define UNREACHABLE()
Definition logging.h:67
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define V8_EXPORT_PRIVATE
Definition macros.h:460
#define V8_INLINE
Definition v8config.h:500
wasm::ValueType type