v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
string-forwarding-table.cc
Go to the documentation of this file.
1// Copyright 2022 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
12#include "src/objects/slots.h"
15
16namespace v8 {
17namespace internal {
18
19StringForwardingTable::Block::Block(int capacity) : capacity_(capacity) {
20 static_assert(unused_element().ptr() == 0);
21 static_assert(kNullAddress == 0);
22 static_assert(sizeof(Record) % sizeof(Address) == 0);
23 static_assert(offsetof(Record, original_string_) == 0);
24 constexpr int kRecordPointerSize = sizeof(Record) / sizeof(Address);
25 MemsetPointer(reinterpret_cast<Address*>(&elements_[0]), 0,
26 capacity_ * kRecordPointerSize);
27}
28
29void* StringForwardingTable::Block::operator new(size_t size, int capacity) {
30 // Make sure the size given is the size of the Block structure.
32 // Make sure the Record class is trivial and has standard layout.
33 static_assert(std::is_trivial_v<Record>);
34 static_assert(std::is_standard_layout_v<Record>);
35 // Make sure that the elements_ array is at the end of Block, with no padding,
36 // so that subsequent elements can be accessed as offsets from elements_.
37 static_assert(offsetof(StringForwardingTable::Block, elements_) ==
38 sizeof(StringForwardingTable::Block) - sizeof(Record));
39 // Make sure that elements_ is aligned when StringTable::Block is aligned.
40 static_assert((alignof(StringForwardingTable::Block) +
43 0);
44
45 const size_t elements_size = capacity * sizeof(Record);
46 // Storage for the first element is already supplied by elements_, so subtract
47 // sizeof(Record).
48 const size_t new_size = size + elements_size - sizeof(Record);
51}
52
53void StringForwardingTable::Block::operator delete(void* block) {
54 AlignedFree(block);
55}
56
57std::unique_ptr<StringForwardingTable::Block> StringForwardingTable::Block::New(
58 int capacity) {
59 return std::unique_ptr<Block>(new (capacity) Block(capacity));
60}
61
66
71
72namespace {
73
74bool UpdateForwardedSlot(Tagged<HeapObject> object, OffHeapObjectSlot slot) {
75 MapWord map_word = object->map_word(kRelaxedLoad);
76 if (map_word.IsForwardingAddress()) {
77 Tagged<HeapObject> forwarded_object = map_word.ToForwardingAddress(object);
78 slot.Release_Store(forwarded_object);
79 return true;
80 }
81 return false;
82}
83
84bool UpdateForwardedSlot(Tagged<Object> object, OffHeapObjectSlot slot) {
85 if (!IsHeapObject(object)) return false;
86 return UpdateForwardedSlot(Cast<HeapObject>(object), slot);
87}
88
89} // namespace
90
92 PtrComprCageBase cage_base, int up_to_index) {
93 for (int index = 0; index < up_to_index; ++index) {
94 OffHeapObjectSlot slot = record(index)->OriginalStringSlot();
95 Tagged<Object> original = slot.Acquire_Load(cage_base);
96 if (!IsHeapObject(original)) continue;
97 Tagged<HeapObject> object = Cast<HeapObject>(original);
98 if (Heap::InFromPage(object)) {
100 const bool was_forwarded = UpdateForwardedSlot(object, slot);
101 if (!was_forwarded) {
102 // The object died in young space.
104 }
105 } else {
106 DCHECK(!object->map_word(kRelaxedLoad).IsForwardingAddress());
107 }
108// No need to update forwarded (internalized) strings as they are never
109// in young space.
110#ifdef DEBUG
111 Tagged<Object> forward =
112 record(index)->ForwardStringObjectOrHash(cage_base);
113 if (IsHeapObject(forward)) {
115 }
116#endif
117 }
118}
119
121 PtrComprCageBase cage_base, int up_to_index) {
122 for (int index = 0; index < up_to_index; ++index) {
123 OffHeapObjectSlot original_slot = record(index)->OriginalStringSlot();
124 Tagged<Object> original = original_slot.Acquire_Load(cage_base);
125 if (!IsHeapObject(original)) continue;
126 UpdateForwardedSlot(Cast<HeapObject>(original), original_slot);
127 // During mark compact the forwarded (internalized) string may have been
128 // evacuated.
129 OffHeapObjectSlot forward_slot = record(index)->ForwardStringOrHashSlot();
130 Tagged<Object> forward = forward_slot.Acquire_Load(cage_base);
131 UpdateForwardedSlot(forward, forward_slot);
132 }
133}
134
136 : allocator_(Allocator()), capacity_(capacity), size_(0) {
137 begin_ = allocator_.allocate(capacity);
138}
139
141 allocator_.deallocate(begin_, capacity());
142}
143
144// static
145std::unique_ptr<StringForwardingTable::BlockVector>
147 StringForwardingTable::BlockVector* data, size_t capacity,
148 const base::Mutex& mutex) {
149 mutex.AssertHeld();
150 std::unique_ptr<BlockVector> new_data =
151 std::make_unique<BlockVector>(capacity);
152 // Copy pointers to blocks from the old to the new vector.
153 for (size_t i = 0; i < data->size(); i++) {
154 new_data->begin_[i] = data->LoadBlock(i);
155 }
156 new_data->size_ = data->size();
157 return new_data;
158}
159
164
166 BlockVector* blocks = blocks_.load(std::memory_order_relaxed);
167 for (uint32_t block_index = 0; block_index < blocks->size(); block_index++) {
168 delete blocks->LoadBlock(block_index);
169 }
170}
171
174 .emplace_back(std::make_unique<BlockVector>(
176 .get();
178 blocks_.store(blocks, std::memory_order_relaxed);
179}
180
182 uint32_t block_index) {
183 BlockVector* blocks = blocks_.load(std::memory_order_acquire);
184 if (V8_UNLIKELY(block_index >= blocks->size())) {
185 base::MutexGuard table_grow_guard(&grow_mutex_);
186 // Reload the vector, as another thread could have grown it.
187 blocks = blocks_.load(std::memory_order_relaxed);
188 // Check again if we need to grow under lock.
189 if (block_index >= blocks->size()) {
190 // Grow the vector if the block to insert is greater than the vectors
191 // capacity.
192 if (block_index >= blocks->capacity()) {
193 std::unique_ptr<BlockVector> new_blocks =
194 BlockVector::Grow(blocks, blocks->capacity() * 2, grow_mutex_);
195 block_vector_storage_.push_back(std::move(new_blocks));
196 blocks = block_vector_storage_.back().get();
197 blocks_.store(blocks, std::memory_order_release);
198 }
199 const uint32_t capacity = CapacityForBlock(block_index);
200 std::unique_ptr<Block> new_block = Block::New(capacity);
201 blocks->AddBlock(std::move(new_block));
202 }
203 }
204 return blocks;
205}
206
208 Tagged<String> forward_to) {
209 DCHECK_IMPLIES(!v8_flags.always_use_string_forwarding_table,
211 DCHECK_IMPLIES(!v8_flags.always_use_string_forwarding_table,
212 HeapLayout::InAnySharedSpace(forward_to));
213 int index = next_free_index_++;
214 uint32_t index_in_block;
215 const uint32_t block_index = BlockForIndex(index, &index_in_block);
216
217 BlockVector* blocks = EnsureCapacity(block_index);
218 Block* block = blocks->LoadBlock(block_index, kAcquireLoad);
219 block->record(index_in_block)->SetInternalized(string, forward_to);
220 return index;
221}
222
224 Tagged<String> forward_to) {
225 CHECK_LT(index, size());
226 uint32_t index_in_block;
227 const uint32_t block_index = BlockForIndex(index, &index_in_block);
228 Block* block = blocks_.load(std::memory_order_acquire)
229 ->LoadBlock(block_index, kAcquireLoad);
230 block->record(index_in_block)->set_forward_string(forward_to);
231}
232
233template <typename T>
235 T* resource,
236 uint32_t raw_hash) {
237 constexpr bool is_one_byte =
238 std::is_base_of_v<v8::String::ExternalOneByteStringResource, T>;
239
240 DCHECK_IMPLIES(!v8_flags.always_use_string_forwarding_table,
242 int index = next_free_index_++;
243 uint32_t index_in_block;
244 const uint32_t block_index = BlockForIndex(index, &index_in_block);
245
246 BlockVector* blocks = EnsureCapacity(block_index);
247 Block* block = blocks->LoadBlock(block_index, kAcquireLoad);
248 block->record(index_in_block)
249 ->SetExternal(string, resource, is_one_byte, raw_hash);
250 return index;
251}
252
256 uint32_t raw_hash);
260 uint32_t raw_hash);
261
262template <typename T>
263bool StringForwardingTable::TryUpdateExternalResource(int index, T* resource) {
264 constexpr bool is_one_byte =
265 std::is_base_of_v<v8::String::ExternalOneByteStringResource, T>;
266
267 CHECK_LT(index, size());
268 uint32_t index_in_block;
269 const uint32_t block_index = BlockForIndex(index, &index_in_block);
270 Block* block = blocks_.load(std::memory_order_acquire)
271 ->LoadBlock(block_index, kAcquireLoad);
272 return block->record(index_in_block)
273 ->TryUpdateExternalResource(resource, is_one_byte);
274}
275
278 int index, v8::String::ExternalOneByteStringResource* resource);
282
284 PtrComprCageBase cage_base, int index) const {
285 CHECK_LT(index, size());
286 uint32_t index_in_block;
287 const uint32_t block_index = BlockForIndex(index, &index_in_block);
288 Block* block = blocks_.load(std::memory_order_acquire)
289 ->LoadBlock(block_index, kAcquireLoad);
290 return block->record(index_in_block)->forward_string(cage_base);
291}
292
293// static
295 int index) {
296 return isolate->string_forwarding_table()
297 ->GetForwardString(isolate, index)
298 .ptr();
299}
300
302 int index) const {
303 CHECK_LT(index, size());
304 uint32_t index_in_block;
305 const uint32_t block_index = BlockForIndex(index, &index_in_block);
306 Block* block = blocks_.load(std::memory_order_acquire)
307 ->LoadBlock(block_index, kAcquireLoad);
308 return block->record(index_in_block)->raw_hash(cage_base);
309}
310
311// static
313 return isolate->string_forwarding_table()->GetRawHash(isolate, index);
314}
315
317StringForwardingTable::GetExternalResource(int index, bool* is_one_byte) const {
318 CHECK_LT(index, size());
319 uint32_t index_in_block;
320 const uint32_t block_index = BlockForIndex(index, &index_in_block);
321 Block* block = blocks_.load(std::memory_order_acquire)
322 ->LoadBlock(block_index, kAcquireLoad);
323 return block->record(index_in_block)->external_resource(is_one_byte);
324}
325
327 std::unordered_set<Address> disposed_resources;
328 IterateElements([this, &disposed_resources](Record* record) {
329 if (record->OriginalStringObject(isolate_) != deleted_element()) {
330 Address resource = record->ExternalResourceAddress();
331 if (resource != kNullAddress && disposed_resources.count(resource) == 0) {
332 record->DisposeExternalResource();
333 disposed_resources.insert(resource);
334 }
335 }
336 });
337 Reset();
338}
339
342 DCHECK_NE(isolate_->heap()->gc_state(), Heap::NOT_IN_GC);
343
344 BlockVector* blocks = blocks_.load(std::memory_order_relaxed);
345 for (uint32_t block_index = 0; block_index < blocks->size(); ++block_index) {
346 delete blocks->LoadBlock(block_index);
347 }
348
349 block_vector_storage_.clear();
352}
353
355 // This is only used for the Scavenger.
356 DCHECK(!v8_flags.minor_ms);
357 DCHECK(v8_flags.always_use_string_forwarding_table);
358
359 if (empty()) return;
360
361 BlockVector* blocks = blocks_.load(std::memory_order_relaxed);
362 const unsigned int last_block_index =
363 static_cast<unsigned int>(blocks->size() - 1);
364 for (unsigned int block_index = 0; block_index < last_block_index;
365 ++block_index) {
366 Block* block = blocks->LoadBlock(block_index, kAcquireLoad);
368 }
369 // Handle last block separately, as it is not filled to capacity.
370 const int max_index = IndexInBlock(size() - 1, last_block_index) + 1;
371 blocks->LoadBlock(last_block_index, kAcquireLoad)
373}
374
376 if (empty()) return;
377
378 BlockVector* blocks = blocks_.load(std::memory_order_relaxed);
379 const unsigned int last_block_index =
380 static_cast<unsigned int>(blocks->size() - 1);
381 for (unsigned int block_index = 0; block_index < last_block_index;
382 ++block_index) {
383 Block* block = blocks->LoadBlock(block_index, kAcquireLoad);
385 }
386 // Handle last block separately, as it is not filled to capacity.
387 const int max_index = IndexInBlock(size() - 1, last_block_index) + 1;
388 blocks->LoadBlock(last_block_index, kAcquireLoad)
390}
391
392} // namespace internal
393} // namespace v8
RegisterAllocator * allocator_
Tagged< Object > Acquire_Load() const
Definition slots-inl.h:74
void Release_Store(Tagged< Object > value) const
Definition slots-inl.h:104
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
static V8_INLINE bool InWritableSharedSpace(Tagged< HeapObject > object)
static V8_INLINE bool InAnySharedSpace(Tagged< HeapObject > object)
HeapState gc_state() const
Definition heap.h:521
IsolateSafepoint * safepoint()
Definition heap.h:579
bool IsForwardingAddress() const
Tagged< HeapObject > ToForwardingAddress(Tagged< HeapObject > map_word_host)
static std::unique_ptr< BlockVector > Grow(BlockVector *data, size_t capacity, const base::Mutex &mutex)
void UpdateAfterFullEvacuation(PtrComprCageBase cage_base)
static std::unique_ptr< Block > New(int capacity)
void UpdateAfterYoungEvacuation(PtrComprCageBase cage_base)
void SetInternalized(Tagged< String > string, Tagged< String > forward_to)
uint32_t raw_hash(PtrComprCageBase cage_base) const
void SetExternal(Tagged< String > string, v8::String::ExternalStringResourceBase *, bool is_one_byte, uint32_t raw_hash)
v8::String::ExternalStringResourceBase * external_resource(bool *is_one_byte) const
static uint32_t IndexInBlock(int index, uint32_t block)
bool TryUpdateExternalResource(int index, T *resource)
static constexpr Tagged< Smi > unused_element()
int AddExternalResourceAndHash(Tagged< String > string, T *resource, uint32_t raw_hash)
static Address GetForwardStringAddress(Isolate *isolate, int index)
void UpdateForwardString(int index, Tagged< String > forward_to)
V8_EXPORT_PRIVATE uint32_t GetRawHash(PtrComprCageBase cage_base, int index) const
v8::String::ExternalStringResourceBase * GetExternalResource(int index, bool *is_one_byte) const
static uint32_t GetRawHashStatic(Isolate *isolate, int index)
static uint32_t CapacityForBlock(uint32_t block)
static uint32_t BlockForIndex(int index, uint32_t *index_in_block_out)
V8_INLINE void IterateElements(Func &&callback)
static constexpr Tagged< Smi > deleted_element()
BlockVector * EnsureCapacity(uint32_t block)
Tagged< String > GetForwardString(PtrComprCageBase cage_base, int index) const
std::vector< std::unique_ptr< BlockVector > > block_vector_storage_
int AddForwardString(Tagged< String > string, Tagged< String > forward_to)
V8_INLINE constexpr StorageType ptr() const
const int size_
Definition assembler.cc:132
#define EXPORT_TEMPLATE_DEFINE(export)
Handle< FixedArray > elements_
Definition isolate.cc:1119
DurationRecord record
RpoNumber block
base::Mutex mutex
constexpr int kTaggedSize
Definition globals.h:542
Tagged(T object) -> Tagged< T >
void * AlignedAllocWithRetry(size_t size, size_t alignment)
constexpr int kSystemPointerSize
Definition globals.h:410
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:669
V8_EXPORT_PRIVATE FlagValues v8_flags
SlotTraits::TOffHeapObjectSlot OffHeapObjectSlot
Definition globals.h:1258
void AlignedFree(void *ptr)
static constexpr Address kNullAddress
Definition v8-internal.h:53
void MemsetPointer(FullObjectSlot start, Tagged< Object > value, size_t counter)
Definition slots-inl.h:507
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
static constexpr RelaxedLoadTag kRelaxedLoad
Definition globals.h:2909
static constexpr AcquireLoadTag kAcquireLoad
Definition globals.h:2908
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK_LT(lhs, rhs)
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define V8_EXPORT_PRIVATE
Definition macros.h:460
#define V8_UNLIKELY(condition)
Definition v8config.h:660