v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
marking-inl.h
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_HEAP_MARKING_INL_H_
6#define V8_HEAP_MARKING_INL_H_
7
8#include "src/heap/marking.h"
9// Include the non-inl header before the rest of the headers.
10
12#include "src/base/macros.h"
13#include "src/heap/heap-inl.h"
16#include "src/heap/spaces.h"
17
18namespace v8::internal {
19
20template <>
21inline void MarkingBitmap::SetBitsInCell<AccessMode::NON_ATOMIC>(
22 uint32_t cell_index, MarkBit::CellType mask) {
23 cells()[cell_index] |= mask;
24}
25
26template <>
27inline void MarkingBitmap::SetBitsInCell<AccessMode::ATOMIC>(
28 uint32_t cell_index, MarkBit::CellType mask) {
29 base::AsAtomicWord::Relaxed_SetBits(cells() + cell_index, mask, mask);
30}
31
32template <>
33inline void MarkingBitmap::ClearBitsInCell<AccessMode::NON_ATOMIC>(
34 uint32_t cell_index, MarkBit::CellType mask) {
35 cells()[cell_index] &= ~mask;
36}
37
38template <>
39inline void MarkingBitmap::ClearBitsInCell<AccessMode::ATOMIC>(
40 uint32_t cell_index, MarkBit::CellType mask) {
41 base::AsAtomicWord::Relaxed_SetBits(cells() + cell_index,
42 static_cast<MarkBit::CellType>(0u), mask);
43}
44
45template <>
46inline void MarkingBitmap::ClearCellRangeRelaxed<AccessMode::ATOMIC>(
47 uint32_t start_cell_index, uint32_t end_cell_index) {
48 base::AtomicWord* cell_base = reinterpret_cast<base::AtomicWord*>(cells());
49 for (uint32_t i = start_cell_index; i < end_cell_index; i++) {
50 base::Relaxed_Store(cell_base + i, 0);
51 }
52}
53
54template <>
55inline void MarkingBitmap::ClearCellRangeRelaxed<AccessMode::NON_ATOMIC>(
56 uint32_t start_cell_index, uint32_t end_cell_index) {
57 for (uint32_t i = start_cell_index; i < end_cell_index; i++) {
58 cells()[i] = 0;
59 }
60}
61
62template <>
63inline void MarkingBitmap::SetCellRangeRelaxed<AccessMode::ATOMIC>(
64 uint32_t start_cell_index, uint32_t end_cell_index) {
65 base::AtomicWord* cell_base = reinterpret_cast<base::AtomicWord*>(cells());
66 for (uint32_t i = start_cell_index; i < end_cell_index; i++) {
67 base::Relaxed_Store(cell_base + i,
68 std::numeric_limits<MarkBit::CellType>::max());
69 }
70}
71
72template <>
73inline void MarkingBitmap::SetCellRangeRelaxed<AccessMode::NON_ATOMIC>(
74 uint32_t start_cell_index, uint32_t end_cell_index) {
75 for (uint32_t i = start_cell_index; i < end_cell_index; i++) {
76 cells()[i] = std::numeric_limits<MarkBit::CellType>::max();
77 }
78}
79
80template <AccessMode mode>
83 if constexpr (mode == AccessMode::ATOMIC) {
84 // This fence prevents re-ordering of publishing stores with the mark-bit
85 // setting stores.
87 }
88}
89
90template <AccessMode mode>
91inline void MarkingBitmap::SetRange(MarkBitIndex start_index,
92 MarkBitIndex end_index) {
93 if (start_index >= end_index) return;
94 end_index--;
95
96 const CellIndex start_cell_index = IndexToCell(start_index);
97 const MarkBit::CellType start_index_mask = IndexInCellMask(start_index);
98 const CellIndex end_cell_index = IndexToCell(end_index);
99 const MarkBit::CellType end_index_mask = IndexInCellMask(end_index);
100
101 if (start_cell_index != end_cell_index) {
102 // Firstly, fill all bits from the start address to the end of the first
103 // cell with 1s.
104 SetBitsInCell<mode>(start_cell_index, ~(start_index_mask - 1));
105 // Then fill all in between cells with 1s.
106 SetCellRangeRelaxed<mode>(start_cell_index + 1, end_cell_index);
107 // Finally, fill all bits until the end address in the last cell with 1s.
108 SetBitsInCell<mode>(end_cell_index, end_index_mask | (end_index_mask - 1));
109 } else {
110 SetBitsInCell<mode>(start_cell_index,
111 end_index_mask | (end_index_mask - start_index_mask));
112 }
113 if (mode == AccessMode::ATOMIC) {
114 // This fence prevents re-ordering of publishing stores with the mark-bit
115 // setting stores.
117 }
118}
119
120template <AccessMode mode>
122 MarkBitIndex end_index) {
123 if (start_index >= end_index) return;
124 end_index--;
125
126 const CellIndex start_cell_index = IndexToCell(start_index);
127 const MarkBit::CellType start_index_mask = IndexInCellMask(start_index);
128 const CellIndex end_cell_index = IndexToCell(end_index);
129 const MarkBit::CellType end_index_mask = IndexInCellMask(end_index);
130
131 if (start_cell_index != end_cell_index) {
132 // Firstly, fill all bits from the start address to the end of the first
133 // cell with 0s.
134 ClearBitsInCell<mode>(start_cell_index, ~(start_index_mask - 1));
135 // Then fill all in between cells with 0s.
136 ClearCellRangeRelaxed<mode>(start_cell_index + 1, end_cell_index);
137 // Finally, set all bits until the end address in the last cell with 0s.
138 ClearBitsInCell<mode>(end_cell_index,
139 end_index_mask | (end_index_mask - 1));
140 } else {
141 ClearBitsInCell<mode>(start_cell_index,
142 end_index_mask | (end_index_mask - start_index_mask));
143 }
144 if (mode == AccessMode::ATOMIC) {
145 // This fence prevents re-ordering of publishing stores with the mark-bit
146 // clearing stores.
148 }
149}
150
151// static
153 Address metadata_address =
155 return Cast(metadata_address + MutablePageMetadata::MarkingBitmapOffset());
156}
157
158// static
162
163// static
165 Address address) {
166 DCHECK_EQ(bitmap, FromAddress(address));
167 const auto index = AddressToIndex(address);
168 const auto mask = IndexInCellMask(index);
169 MarkBit::CellType* cell = bitmap->cells() + IndexToCell(index);
170 return MarkBit(cell, mask);
171}
172
173// static
178
179// static
181 Address address) {
182 if (MemoryChunk::IsAligned(address)) return kLength;
183 return AddressToIndex(address);
184}
185
186// static
188 Address maybe_inner_ptr) {
189 DCHECK(page->Contains(maybe_inner_ptr));
190 const auto* bitmap = page->marking_bitmap();
191 const MarkBit::CellType* cells = bitmap->cells();
192
193 // The first actual bit of the bitmap, corresponding to page->area_start(),
194 // is at start_index which is somewhere in (not necessarily at the start of)
195 // start_cell_index.
196 const auto start_index = MarkingBitmap::AddressToIndex(page->area_start());
197 const auto start_cell_index = MarkingBitmap::IndexToCell(start_index);
198 // We assume that all markbits before start_index are clear:
199 // SLOW_DCHECK(bitmap->AllBitsClearInRange(0, start_index));
200 // This has already been checked for the entire bitmap before starting marking
201 // by MarkCompactCollector::VerifyMarkbitsAreClean.
202
203 const auto index = MarkingBitmap::AddressToIndex(maybe_inner_ptr);
204 auto cell_index = MarkingBitmap::IndexToCell(index);
205 const auto index_in_cell = MarkingBitmap::IndexInCell(index);
207 auto cell = cells[cell_index];
208
209 // Clear the bits corresponding to higher addresses in the cell.
210 cell &= ((~static_cast<MarkBit::CellType>(0)) >>
211 (MarkingBitmap::kBitsPerCell - index_in_cell - 1));
212
213 // Traverse the bitmap backwards, until we find a markbit that is set and
214 // whose previous markbit (if it exists) is unset.
215 // First, iterate backwards to find a cell with any set markbit.
216 while (cell == 0 && cell_index > start_cell_index) cell = cells[--cell_index];
217 if (cell == 0) {
218 DCHECK_EQ(start_cell_index, cell_index);
219 // We have reached the start of the page.
220 return page->area_start();
221 }
222
223 // We have found such a cell.
224 const auto leading_zeros = base::bits::CountLeadingZeros(cell);
225 const auto leftmost_ones =
226 base::bits::CountLeadingZeros(~(cell << leading_zeros));
227 const auto index_of_last_leftmost_one =
228 MarkingBitmap::kBitsPerCell - leading_zeros - leftmost_ones;
229
230 const MemoryChunk* chunk = page->Chunk();
231
232 // If the leftmost sequence of set bits does not reach the start of the cell,
233 // we found it.
234 if (index_of_last_leftmost_one > 0) {
236 cell_index * MarkingBitmap::kBitsPerCell +
237 index_of_last_leftmost_one);
238 }
239
240 // The leftmost sequence of set bits reaches the start of the cell. We must
241 // keep traversing backwards until we find the first unset markbit.
242 if (cell_index == start_cell_index) {
243 // We have reached the start of the page.
244 return page->area_start();
245 }
246
247 // Iterate backwards to find a cell with any unset markbit.
248 do {
249 cell = cells[--cell_index];
250 } while (~cell == 0 && cell_index > start_cell_index);
251 if (~cell == 0) {
252 DCHECK_EQ(start_cell_index, cell_index);
253 // We have reached the start of the page.
254 return page->area_start();
255 }
256
257 // We have found such a cell.
258 const auto leading_ones = base::bits::CountLeadingZeros(~cell);
259 const auto index_of_last_leading_one =
260 MarkingBitmap::kBitsPerCell - leading_ones;
261 DCHECK_LT(0, index_of_last_leading_one);
263 cell_index * MarkingBitmap::kBitsPerCell +
264 index_of_last_leading_one);
265}
266
267// static
271
272// static
274 return MarkingBitmap::MarkBitFromAddress(heap_object.ptr());
275}
276
277// static
278std::optional<MarkingHelper::WorklistTarget> MarkingHelper::ShouldMarkObject(
279 Heap* heap, Tagged<HeapObject> object) {
280 const auto* chunk = MemoryChunk::FromHeapObject(object);
281 const auto flags = chunk->GetFlags();
282 if (flags & MemoryChunk::READ_ONLY_HEAP) {
283 return {};
284 }
285 if (v8_flags.black_allocated_pages &&
288 return {};
289 }
292 }
293 // Object in shared writable space. Only mark it if the Isolate is owning the
294 // shared space.
295 //
296 // TODO(340989496): Speed up check here by keeping the flag on Heap.
297 if (heap->isolate()->is_shared_space_isolate()) {
299 }
300 return {};
301}
302
303// static
305 Heap* heap, Tagged<HeapObject> object) {
306 const auto* chunk = MemoryChunk::FromHeapObject(object);
307 const auto flags = chunk->GetFlags();
308 if (flags & MemoryChunk::READ_ONLY_HEAP) {
310 }
311 if (v8_flags.black_allocated_pages &&
314 }
317 }
318 // Object in shared writable space. Only mark it if the Isolate is owning the
319 // shared space.
320 //
321 // TODO(340989496): Speed up check here by keeping the flag on Heap.
322 if (heap->isolate()->is_shared_space_isolate()) {
324 }
326}
327
328// static
329template <typename MarkingStateT>
331 MarkingStateT* marking_state,
332 Tagged<HeapObject> object) {
333 return (MarkingHelper::GetLivenessMode(heap, object) ==
335 marking_state->IsMarked(object);
336}
337
338// static
339template <typename MarkingStateT>
341 MarkingStateT* marking_state,
342 Tagged<HeapObject> object) {
343 return (MarkingHelper::GetLivenessMode(heap, object) !=
345 marking_state->IsUnmarked(object);
346}
347
348// static
349template <typename MarkingState>
351 MarkingWorklists::Local* marking_worklist,
352 MarkingState* marking_state,
353 WorklistTarget target_worklist,
354 Tagged<HeapObject> object) {
355 DCHECK(heap->Contains(object));
356 if (marking_state->TryMark(object)) {
357 if (V8_LIKELY(target_worklist == WorklistTarget::kRegular)) {
358 marking_worklist->Push(object);
359 }
360 return true;
361 }
362 return false;
363}
364
365} // namespace v8::internal
366
367#endif // V8_HEAP_MARKING_INL_H_
static bool Relaxed_SetBits(T *addr, T bits, T mask)
uintptr_t CellType
Definition marking.h:21
static V8_ALLOW_UNUSED MarkBit From(Address)
static constexpr size_t kCellsCount
Definition marking.h:109
static V8_INLINE constexpr Address IndexToAddressOffset(MarkBitIndex index)
Definition marking.h:123
void ClearBitsInCell(uint32_t cell_index, MarkBit::CellType mask)
static Address FindPreviousValidObject(const PageMetadata *page, Address maybe_inner_ptr)
static V8_INLINE MarkBit MarkBitFromAddress(Address address)
static V8_INLINE MarkingBitmap * FromAddress(Address address)
static constexpr size_t kLength
Definition marking.h:107
void SetCellRangeRelaxed(uint32_t start_cell_index, uint32_t end_cell_index)
static V8_INLINE constexpr uint32_t IndexInCell(MarkBitIndex index)
Definition marking.h:131
void SetBitsInCell(uint32_t cell_index, MarkBit::CellType mask)
void ClearCellRangeRelaxed(uint32_t start_cell_index, uint32_t end_cell_index)
static V8_INLINE constexpr MarkBitIndex LimitAddressToIndex(Address address)
V8_INLINE CellType * cells()
Definition marking.h:158
void ClearRange(MarkBitIndex start_index, MarkBitIndex end_index)
static V8_INLINE constexpr CellIndex IndexToCell(MarkBitIndex index)
Definition marking.h:119
static constexpr uint32_t kBitsPerCell
Definition marking.h:98
static V8_INLINE constexpr MarkBitIndex AddressToIndex(Address address)
static V8_INLINE MarkingBitmap * Cast(Address addr)
Definition marking.h:144
void SetRange(MarkBitIndex start_index, MarkBitIndex end_index)
Definition marking-inl.h:91
static V8_INLINE constexpr CellType IndexInCellMask(MarkBitIndex index)
Definition marking.h:135
V8_INLINE bool TryMark(Tagged< HeapObject > obj)
void Push(Tagged< HeapObject > object)
static constexpr uint32_t AddressToOffset(Address address)
static constexpr MainThreadFlags kIsInYoungGenerationMask
V8_INLINE Address address() const
static V8_INLINE MemoryChunk * FromHeapObject(Tagged< HeapObject > object)
static V8_INLINE constexpr bool IsAligned(Address address)
static V8_INLINE MutablePageMetadata * FromAddress(Address a)
static constexpr intptr_t MarkingBitmapOffset()
V8_INLINE constexpr StorageType ptr() const
uint32_t const mask
constexpr unsigned CountLeadingZeros(T value)
Definition bits.h:100
void Relaxed_Store(volatile Atomic8 *ptr, Atomic8 value)
Definition atomicops.h:189
Atomic32 AtomicWord
Definition atomicops.h:76
void SeqCst_MemoryFence()
Definition atomicops.h:91
constexpr int kTaggedSizeLog2
Definition globals.h:543
V8_EXPORT_PRIVATE FlagValues v8_flags
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
static V8_INLINE bool TryMarkAndPush(Heap *heap, MarkingWorklists::Local *marking_worklist, MarkingState *marking_state, WorklistTarget target_worklis, Tagged< HeapObject > object)
static V8_INLINE bool IsMarkedOrAlwaysLive(Heap *heap, MarkingStateT *marking_state, Tagged< HeapObject > object)
static V8_INLINE bool IsUnmarkedAndNotAlwaysLive(Heap *heap, MarkingStateT *marking_state, Tagged< HeapObject > object)
static V8_INLINE std::optional< WorklistTarget > ShouldMarkObject(Heap *heap, Tagged< HeapObject > object)
static V8_INLINE LivenessMode GetLivenessMode(Heap *heap, Tagged< HeapObject > object)
#define V8_LIKELY(condition)
Definition v8config.h:661
#define V8_UNLIKELY(condition)
Definition v8config.h:660