v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
explicit-management.cc
Go to the documentation of this file.
1// Copyright 2021 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <algorithm>
8#include <tuple>
9
15
16namespace cppgc {
17namespace internal {
18
19namespace {
20
21bool InGC(HeapHandle& heap_handle) {
22 const auto& heap = HeapBase::From(heap_handle);
23 // Whenever the GC is active, avoid modifying the object as it may mess with
24 // state that the GC needs.
25 return heap.in_atomic_pause() || heap.marker() ||
26 heap.sweeper().IsSweepingInProgress();
27}
28
29} // namespace
30
32 void* object) {
33 if (InGC(heap_handle)) {
34 return;
35 }
36
37 auto& header = HeapObjectHeader::FromObject(object);
38 header.Finalize();
39
40 // `object` is guaranteed to be of type GarbageCollected, so getting the
41 // BasePage is okay for regular and large objects.
42 BasePage* base_page = BasePage::FromPayload(object);
43
44#if defined(CPPGC_YOUNG_GENERATION)
45 const size_t object_size = ObjectView<>(header).Size();
46
47 if (auto& heap_base = HeapBase::From(heap_handle);
48 heap_base.generational_gc_supported()) {
49 heap_base.remembered_set().InvalidateRememberedSlotsInRange(
50 object, reinterpret_cast<uint8_t*>(object) + object_size);
51 // If this object was registered as remembered, remove it. Do that before
52 // the page gets destroyed.
53 heap_base.remembered_set().InvalidateRememberedSourceObject(header);
54 if (header.IsMarked()) {
55 base_page->DecrementMarkedBytes(
56 header.IsLargeObject<AccessMode::kNonAtomic>()
57 ? reinterpret_cast<const LargePage*>(
58 BasePage::FromPayload(&header))
59 ->PayloadSize()
60 : header.AllocatedSize<AccessMode::kNonAtomic>());
61 }
62 }
63#endif // defined(CPPGC_YOUNG_GENERATION)
64
65 if (base_page->is_large()) { // Large object.
66 base_page->space().RemovePage(base_page);
68 LargePage::From(base_page)->PayloadSize());
70 } else { // Regular object.
71 const size_t header_size = header.AllocatedSize();
72 auto* normal_page = NormalPage::From(base_page);
73 auto& normal_space = *static_cast<NormalPageSpace*>(&base_page->space());
74 auto& lab = normal_space.linear_allocation_buffer();
75 ConstAddress payload_end = header.ObjectEnd();
76 SetMemoryInaccessible(&header, header_size);
77 if (payload_end == lab.start()) { // Returning to LAB.
78 lab.Set(reinterpret_cast<Address>(&header), lab.size() + header_size);
79 normal_page->object_start_bitmap().ClearBit(lab.start());
80 } else { // Returning to free list.
81 base_page->heap().stats_collector()->NotifyExplicitFree(header_size);
82 normal_space.free_list().Add({&header, header_size});
83 // No need to update the bitmap as the same bit is reused for the free
84 // list entry.
85 }
86 }
87}
88
89namespace {
90
91bool Grow(HeapObjectHeader& header, BasePage& base_page, size_t new_size,
92 size_t size_delta) {
95 DCHECK(!base_page.is_large());
96
97 auto& normal_space = *static_cast<NormalPageSpace*>(&base_page.space());
98 auto& lab = normal_space.linear_allocation_buffer();
99 if (lab.start() == header.ObjectEnd() && lab.size() >= size_delta) {
100 // LABs are considered used memory which means that no allocated size
101 // adjustments are needed.
102 Address delta_start = lab.Allocate(size_delta);
103 SetMemoryAccessible(delta_start, size_delta);
104 header.SetAllocatedSize(new_size);
105#if defined(CPPGC_YOUNG_GENERATION)
106 if (auto& heap_base = *normal_space.raw_heap()->heap();
107 heap_base.generational_gc_supported()) {
108 if (header.IsMarked()) {
109 base_page.IncrementMarkedBytes(
111 }
112 }
113#endif // defined(CPPGC_YOUNG_GENERATION)
114 return true;
115 }
116 return false;
117}
118
119bool Shrink(HeapObjectHeader& header, BasePage& base_page, size_t new_size,
120 size_t size_delta) {
121 DCHECK_GE(header.AllocatedSize(), new_size + kAllocationGranularity);
123 DCHECK(!base_page.is_large());
124
125 auto& normal_space = *static_cast<NormalPageSpace*>(&base_page.space());
126 auto& lab = normal_space.linear_allocation_buffer();
127 Address free_start = header.ObjectEnd() - size_delta;
128 if (lab.start() == header.ObjectEnd()) {
129 DCHECK_EQ(free_start, lab.start() - size_delta);
130 // LABs are considered used memory which means that no allocated size
131 // adjustments are needed.
132 lab.Set(free_start, lab.size() + size_delta);
133 SetMemoryInaccessible(lab.start(), size_delta);
134 header.SetAllocatedSize(new_size);
135 } else if (size_delta >= ObjectAllocator::kSmallestSpaceSize) {
136 // Heuristic: Only return memory to the free list if the block is larger
137 // than the smallest size class.
138 SetMemoryInaccessible(free_start, size_delta);
139 base_page.heap().stats_collector()->NotifyExplicitFree(size_delta);
140 normal_space.free_list().Add({free_start, size_delta});
141 NormalPage::From(&base_page)->object_start_bitmap().SetBit(free_start);
142 header.SetAllocatedSize(new_size);
143 }
144#if defined(CPPGC_YOUNG_GENERATION)
145 auto& heap = base_page.heap();
146 if (heap.generational_gc_supported()) {
147 heap.remembered_set().InvalidateRememberedSlotsInRange(
148 free_start, free_start + size_delta);
149 if (header.IsMarked()) {
150 base_page.DecrementMarkedBytes(
151 header.AllocatedSize<AccessMode::kNonAtomic>());
152 }
153 }
154#endif // defined(CPPGC_YOUNG_GENERATION)
155 // Return success in any case, as we want to avoid that embedders start
156 // copying memory because of small deltas.
157 return true;
158}
159
160} // namespace
161
162bool ExplicitManagementImpl::Resize(void* object, size_t new_object_size) {
163 // `object` is guaranteed to be of type GarbageCollected, so getting the
164 // BasePage is okay for regular and large objects.
165 BasePage* base_page = BasePage::FromPayload(object);
166
167 if (InGC(base_page->heap())) {
168 return false;
169 }
170
171 // TODO(chromium:1056170): Consider supporting large objects within certain
172 // restrictions.
173 if (base_page->is_large()) {
174 return false;
175 }
176
177 const size_t new_size = RoundUp<kAllocationGranularity>(
178 sizeof(HeapObjectHeader) + new_object_size);
179 auto& header = HeapObjectHeader::FromObject(object);
180 const size_t old_size = header.AllocatedSize();
181
182 if (new_size > old_size) {
183 return Grow(header, *base_page, new_size, new_size - old_size);
184 } else if (old_size > new_size) {
185 return Shrink(header, *base_page, new_size, old_size - new_size);
186 }
187 // Same size considering internal restrictions, e.g. alignment.
188 return true;
189}
190
191} // namespace internal
192} // namespace cppgc
HeapBase & heap() const
Definition heap-page.cc:35
void IncrementMarkedBytes(size_t value)
Definition heap-page.h:99
void DecrementMarkedBytes(size_t value)
Definition heap-page.h:105
static BasePage * FromPayload(void *)
Definition heap-page.h:314
BaseSpace & space() const
Definition heap-page.h:45
void RemovePage(BasePage *)
Definition heap-space.cc:31
friend void subtle::FreeUnreferencedObject(HeapHandle &, T &)
friend bool subtle::Resize(T &, AdditionalBytes)
bool generational_gc_supported() const
Definition heap-base.h:218
StatsCollector * stats_collector()
Definition heap-base.h:118
static HeapBase & From(cppgc::HeapHandle &heap_handle)
Definition heap-base.h:88
static HeapObjectHeader & FromObject(void *address)
static LargePage * From(BasePage *page)
Definition heap-page.h:275
size_t PayloadSize() const
Definition heap-page.h:291
static void Destroy(LargePage *)
Definition heap-page.cc:273
LinearAllocationBuffer & linear_allocation_buffer()
Definition heap-space.h:103
static NormalPage * From(BasePage *page)
Definition heap-page.h:205
PlatformAwareObjectStartBitmap & object_start_bitmap()
Definition heap-page.h:241
static constexpr size_t kSmallestSpaceSize
V8_INLINE size_t Size() const
Definition object-view.h:54
uint8_t * Address
Definition globals.h:17
V8_INLINE void SetMemoryInaccessible(void *address, size_t size)
Definition memory.h:76
V8_INLINE void SetMemoryAccessible(void *address, size_t size)
Definition memory.h:72
const uint8_t * ConstAddress
Definition globals.h:18
constexpr size_t kAllocationGranularity
Definition globals.h:37
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
constexpr T RoundUp(T x, intptr_t m)
Definition macros.h:387