v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
js-array-buffer.cc
Go to the documentation of this file.
1// Copyright 2018 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
11
12namespace v8 {
13namespace internal {
14
15namespace {
16
17// ES#sec-canonicalnumericindexstring
18// Returns true if the lookup_key represents a valid index string.
19bool CanonicalNumericIndexString(Isolate* isolate,
20 const PropertyKey& lookup_key,
21 bool* is_minus_zero) {
22 // 1. Assert: Type(argument) is String.
23 DCHECK(lookup_key.is_element() || IsString(*lookup_key.name()));
24 *is_minus_zero = false;
25 if (lookup_key.is_element()) return true;
26
27 DirectHandle<String> key = Cast<String>(lookup_key.name());
28
29 // 3. Let n be ! ToNumber(argument).
30 DirectHandle<Object> result = String::ToNumber(isolate, key);
31 if (IsMinusZero(*result)) {
32 // 2. If argument is "-0", return -0𝔽.
33 // We are not performing SaveValue check for -0 because it'll be rejected
34 // anyway.
35 *is_minus_zero = true;
36 } else {
37 // 4. If SameValue(! ToString(n), argument) is false, return undefined.
38 DirectHandle<String> str =
39 Object::ToString(isolate, result).ToHandleChecked();
40 // Avoid treating strings like "2E1" and "20" as the same key.
41 if (!Object::SameValue(*str, *key)) return false;
42 }
43 return true;
44}
45} // anonymous namespace
46
48 std::shared_ptr<BackingStore> backing_store,
49 Isolate* isolate) {
50 auto finish_setup = [shared, isolate]() {
51 // Count usage may lead to a blink allocation, through the callback, which
52 // may trigger a GC. It is important to delay this, until the array buffer
53 // is properly initialized.
54 if (shared == SharedFlag::kShared) {
55 isolate->CountUsage(
57 }
58 };
61 set_detach_key(ReadOnlyRoots(isolate).undefined_value());
62 set_bit_field(0);
63 set_is_shared(shared == SharedFlag::kShared);
64 set_is_resizable_by_js(resizable == ResizableFlag::kResizable);
65 set_is_detachable(shared != SharedFlag::kShared);
66 SetupLazilyInitializedCppHeapPointerField(
67 JSAPIObjectWithEmbedderSlots::kCppHeapWrappableOffset);
68 for (int i = 0; i < v8::ArrayBuffer::kEmbedderFieldCount; i++) {
69 SetEmbedderField(i, Smi::zero());
70 }
71 if (!backing_store) {
73 set_byte_length(0);
74 set_max_byte_length(0);
75 finish_setup();
76 return;
77 }
78 // Rest of the code here deals with attaching the BackingStore.
79 DCHECK_EQ(is_shared(), backing_store->is_shared());
80 DCHECK((is_resizable_by_js() == backing_store->is_resizable_by_js()) ||
81 (backing_store->is_wasm_memory() && is_shared()));
83 !backing_store->is_wasm_memory() && !backing_store->is_resizable_by_js(),
84 backing_store->byte_length() == backing_store->max_byte_length());
85
86 void* backing_store_buffer = backing_store->buffer_start();
87 // Wasm memory always needs a backing store; this is guaranteed by reserving
88 // at least one page for the BackingStore (so {IsEmpty()} is always false).
89 DCHECK_IMPLIES(backing_store->is_wasm_memory(), !backing_store->IsEmpty());
90 // Non-empty backing stores must start at a non-null pointer.
91 DCHECK_IMPLIES(backing_store_buffer == EmptyBackingStoreBuffer(),
92 backing_store->IsEmpty());
93 // Empty backing stores can be backed by an empty buffer pointer or by an
94 // externally provided pointer: Either is acceptable. However, the pointer
95 // must always point into the sandbox, so nullptr is not acceptable if the
96 // sandbox is enabled.
97 DCHECK_IMPLIES(V8_ENABLE_SANDBOX_BOOL, backing_store_buffer != nullptr);
98 set_backing_store(isolate, backing_store_buffer);
99
100 // GSABs need to read their byte_length from the BackingStore. Maintain the
101 // invariant that their byte_length field is always 0.
102 auto byte_len =
103 (is_shared() && is_resizable_by_js()) ? 0 : backing_store->byte_length();
104 CHECK_LE(backing_store->byte_length(), kMaxByteLength);
105 set_byte_length(byte_len);
106
107 // For Wasm memories, it is possible for the backing store maximum to be
108 // different from the JSArrayBuffer maximum. The maximum pages allowed on a
109 // Wasm memory are tracked on the Wasm memory object, and not the
110 // JSArrayBuffer associated with it.
111 auto max_byte_len = is_resizable_by_js() ? backing_store->max_byte_length()
112 : backing_store->byte_length();
113 set_max_byte_length(max_byte_len);
114
115 if (backing_store->is_wasm_memory()) {
116 set_is_detachable(false);
117 }
118
119 CreateExtension(isolate, std::move(backing_store));
120 finish_setup();
121}
122
124 bool force_for_wasm_memory,
125 DirectHandle<Object> maybe_key) {
126 Isolate* const isolate = buffer->GetIsolate();
127
128 DirectHandle<Object> detach_key(buffer->detach_key(), isolate);
129
130 bool key_mismatch = false;
131
132 if (!IsUndefined(*detach_key, isolate)) {
133 key_mismatch =
134 maybe_key.is_null() || !Object::StrictEquals(*maybe_key, *detach_key);
135 } else {
136 // Detach key is undefined; allow not passing maybe_key but disallow passing
137 // something else than undefined.
138 key_mismatch =
139 !maybe_key.is_null() && !Object::StrictEquals(*maybe_key, *detach_key);
140 }
141 if (key_mismatch) {
143 isolate,
144 NewTypeError(MessageTemplate::kArrayBufferDetachKeyDoesntMatch),
145 Nothing<bool>());
146 }
147
148 if (buffer->was_detached()) return Just(true);
149
150 if (force_for_wasm_memory) {
151 // Skip the is_detachable() check.
152 } else if (!buffer->is_detachable()) {
153 // Not detachable, do nothing.
154 return Just(true);
155 }
156
157 buffer->DetachInternal(force_for_wasm_memory, isolate);
158 return Just(true);
159}
160
161void JSArrayBuffer::DetachInternal(bool force_for_wasm_memory,
162 Isolate* isolate) {
164
165 if (extension) {
166 DisallowGarbageCollection disallow_gc;
167 isolate->heap()->DetachArrayBufferExtension(extension);
168 std::shared_ptr<BackingStore> backing_store = RemoveExtension();
169 CHECK_IMPLIES(force_for_wasm_memory, backing_store->is_wasm_memory());
170 }
171
172 if (Protectors::IsArrayBufferDetachingIntact(isolate)) {
173 Protectors::InvalidateArrayBufferDetaching(isolate);
174 }
175
176 DCHECK(!is_shared());
178 set_byte_length(0);
179 set_was_detached(true);
180}
181
183 Address raw_array_buffer) {
184 // TODO(v8:11111): Cache the last seen length in JSArrayBuffer and use it
185 // in bounds checks to minimize the need for calling this function.
187 DisallowJavascriptExecution no_js(isolate);
188 Tagged<JSArrayBuffer> buffer =
189 Cast<JSArrayBuffer>(Tagged<Object>(raw_array_buffer));
190 CHECK(buffer->is_resizable_by_js());
191 CHECK(buffer->is_shared());
192 return buffer->GetBackingStore()->byte_length(std::memory_order_seq_cst);
193}
194
195// static
197 Isolate* isolate, size_t byte_length, size_t max_byte_length,
198 ShouldThrow should_throw, size_t* page_size, size_t* initial_pages,
199 size_t* max_pages) {
200 DCHECK_NOT_NULL(page_size);
201 DCHECK_NOT_NULL(initial_pages);
202 DCHECK_NOT_NULL(max_pages);
203
204 *page_size = AllocatePageSize();
205
206 if (!RoundUpToPageSize(byte_length, *page_size, JSArrayBuffer::kMaxByteLength,
207 initial_pages)) {
208 if (should_throw == kDontThrow) return Nothing<bool>();
210 isolate, NewRangeError(MessageTemplate::kInvalidArrayBufferLength),
211 Nothing<bool>());
212 }
213
214 if (!RoundUpToPageSize(max_byte_length, *page_size,
215 JSArrayBuffer::kMaxByteLength, max_pages)) {
216 if (should_throw == kDontThrow) return Nothing<bool>();
218 isolate, NewRangeError(MessageTemplate::kInvalidArrayBufferMaxLength),
219 Nothing<bool>());
220 }
221
222 return Just(true);
223}
224
225// static
226std::optional<MessageTemplate>
228 Isolate* isolate, size_t byte_length, size_t max_byte_length,
229 size_t* page_size, size_t* initial_pages, size_t* max_pages) {
230 DCHECK_NOT_NULL(page_size);
231 DCHECK_NOT_NULL(initial_pages);
232 DCHECK_NOT_NULL(max_pages);
233
234 *page_size = AllocatePageSize();
235
236 if (!RoundUpToPageSize(byte_length, *page_size, JSArrayBuffer::kMaxByteLength,
237 initial_pages)) {
238 return MessageTemplate::kInvalidArrayBufferLength;
239 }
240
241 if (!RoundUpToPageSize(max_byte_length, *page_size,
242 JSArrayBuffer::kMaxByteLength, max_pages)) {
243 return MessageTemplate::kInvalidArrayBufferMaxLength;
244 }
245 return {};
246}
247
249 Isolate* isolate, std::shared_ptr<BackingStore> backing_store) {
250 // `Heap::InYoungGeneration` during full GC with sticky markbits is generally
251 // inaccurate. However, a full GC will sweep both lists and promote all to
252 // old, so it doesn't matter which list initially holds the extension in this
253 // case.
254 const auto age =
259 new ArrayBufferExtension(std::move(backing_store), age);
260 set_extension(extension);
261 isolate->heap()->AppendArrayBufferExtension(extension);
262 return extension;
263}
264
265std::shared_ptr<BackingStore> JSArrayBuffer::RemoveExtension() {
267 DCHECK_NOT_NULL(extension);
268 auto result = extension->RemoveBackingStore();
269 // Remove pointer to extension such that the next GC will free it
270 // automatically.
271 set_extension(nullptr);
272 return result;
273}
274
277 if (extension) {
278 extension->Mark();
279 }
280}
281
284 if (extension) {
286 extension->YoungMark();
287 }
288}
289
292 if (extension) {
293 extension->YoungMarkPromoted();
294 }
295}
296
298 Isolate* isolate = GetIsolate();
299 DirectHandle<JSTypedArray> self(*this, isolate);
300 DCHECK(IsTypedArrayOrRabGsabTypedArrayElementsKind(self->GetElementsKind()));
301 Handle<JSArrayBuffer> array_buffer(Cast<JSArrayBuffer>(self->buffer()),
302 isolate);
303 if (!is_on_heap()) {
304 // Already is off heap, so return the existing buffer.
305 return array_buffer;
306 }
307 DCHECK(!array_buffer->is_resizable_by_js());
308
309 // The existing array buffer should be empty.
310 DCHECK(array_buffer->IsEmpty());
311
312 // Allocate a new backing store and attach it to the existing array buffer.
313 size_t byte_length = self->byte_length();
314 auto backing_store =
317
318 if (!backing_store) {
319 isolate->heap()->FatalProcessOutOfMemory("JSTypedArray::GetBuffer");
320 }
321
322 // Copy the elements into the backing store of the array buffer.
323 if (byte_length > 0) {
324 memcpy(backing_store->buffer_start(), self->DataPtr(), byte_length);
325 }
326
327 // Attach the backing store to the array buffer.
329 std::move(backing_store), isolate);
330
331 // Clear the elements of the typed array.
332 self->set_elements(ReadOnlyRoots(isolate).empty_byte_array());
333 self->SetOffHeapDataPtr(isolate, array_buffer->backing_store(), 0);
334 DCHECK(!self->is_on_heap());
335
336 return array_buffer;
337}
338
339// ES#sec-integer-indexed-exotic-objects-defineownproperty-p-desc
340// static
344 PropertyDescriptor* desc,
345 Maybe<ShouldThrow> should_throw) {
346 DCHECK(IsName(*key) || IsNumber(*key));
347 // 1. If Type(P) is String, then
348 PropertyKey lookup_key(isolate, key);
349 if (lookup_key.is_element() || IsSmi(*key) || IsString(*key)) {
350 // 1a. Let numericIndex be ! CanonicalNumericIndexString(P)
351 // 1b. If numericIndex is not undefined, then
352 bool is_minus_zero = false;
353 if (IsSmi(*key) || // Smi keys are definitely canonical
354 CanonicalNumericIndexString(isolate, lookup_key, &is_minus_zero)) {
355 // 1b i. If IsValidIntegerIndex(O, numericIndex) is false, return false.
356
357 // IsValidIntegerIndex:
358 size_t index = lookup_key.index();
359 bool out_of_bounds = false;
360 size_t length = o->GetLengthOrOutOfBounds(out_of_bounds);
361 if (o->WasDetached() || out_of_bounds || index >= length) {
362 RETURN_FAILURE(isolate, GetShouldThrow(isolate, should_throw),
363 NewTypeError(MessageTemplate::kInvalidTypedArrayIndex));
364 }
365 if (!lookup_key.is_element() || is_minus_zero) {
366 RETURN_FAILURE(isolate, GetShouldThrow(isolate, should_throw),
367 NewTypeError(MessageTemplate::kInvalidTypedArrayIndex));
368 }
369
370 // 1b ii. If Desc has a [[Configurable]] field and if
371 // Desc.[[Configurable]] is false, return false.
372 // 1b iii. If Desc has an [[Enumerable]] field and if Desc.[[Enumerable]]
373 // is false, return false.
374 // 1b iv. If IsAccessorDescriptor(Desc) is true, return false.
375 // 1b v. If Desc has a [[Writable]] field and if Desc.[[Writable]] is
376 // false, return false.
377
379 RETURN_FAILURE(isolate, GetShouldThrow(isolate, should_throw),
380 NewTypeError(MessageTemplate::kRedefineDisallowed, key));
381 }
382
383 if ((desc->has_configurable() && !desc->configurable()) ||
384 (desc->has_enumerable() && !desc->enumerable()) ||
385 (desc->has_writable() && !desc->writable())) {
386 RETURN_FAILURE(isolate, GetShouldThrow(isolate, should_throw),
387 NewTypeError(MessageTemplate::kRedefineDisallowed, key));
388 }
389
390 // 1b vi. If Desc has a [[Value]] field, perform
391 // ? IntegerIndexedElementSet(O, numericIndex, Desc.[[Value]]).
392 if (desc->has_value()) {
393 if (!desc->has_configurable()) desc->set_configurable(true);
394 if (!desc->has_enumerable()) desc->set_enumerable(true);
395 if (!desc->has_writable()) desc->set_writable(true);
396 DirectHandle<Object> value = desc->value();
397 LookupIterator it(isolate, o, index, LookupIterator::OWN);
399 isolate,
400 DefineOwnPropertyIgnoreAttributes(&it, value, desc->ToAttributes()),
401 Nothing<bool>());
402 }
403 // 1b vii. Return true.
404 return Just(true);
405 }
406 }
407 // 4. Return ! OrdinaryDefineOwnProperty(O, P, Desc).
408 return OrdinaryDefineOwnProperty(isolate, o, lookup_key, desc, should_throw);
409}
410
412 switch (map()->elements_kind()) {
413#define ELEMENTS_KIND_TO_ARRAY_TYPE(Type, type, TYPE, ctype) \
414 case TYPE##_ELEMENTS: \
415 return kExternal##Type##Array;
416
419#undef ELEMENTS_KIND_TO_ARRAY_TYPE
420
421 default:
422 UNREACHABLE();
423 }
424}
425
427 switch (map()->elements_kind()) {
428#define ELEMENTS_KIND_TO_ELEMENT_SIZE(Type, type, TYPE, ctype) \
429 case TYPE##_ELEMENTS: \
430 return sizeof(ctype);
431
434#undef ELEMENTS_KIND_TO_ELEMENT_SIZE
435
436 default:
437 UNREACHABLE();
438 }
439}
440
442 Isolate* isolate, Address raw_array) {
443 // TODO(v8:11111): Cache the last seen length in JSArrayBuffer and use it
444 // in bounds checks to minimize the need for calling this function.
446 DisallowJavascriptExecution no_js(isolate);
448 CHECK(array->is_length_tracking());
449 Tagged<JSArrayBuffer> buffer = array->buffer();
450 CHECK(buffer->is_resizable_by_js());
451 CHECK(buffer->is_shared());
452 size_t backing_byte_length =
453 buffer->GetBackingStore()->byte_length(std::memory_order_seq_cst);
454 CHECK_GE(backing_byte_length, array->byte_offset());
455 auto element_byte_size = ElementsKindToByteSize(array->GetElementsKind());
456 return (backing_byte_length - array->byte_offset()) / element_byte_size;
457}
458
460 bool& out_of_bounds) const {
461 DCHECK(!WasDetached());
462 size_t own_byte_offset = byte_offset();
463 if (is_length_tracking()) {
464 if (is_backed_by_rab()) {
465 size_t buffer_byte_length = buffer()->byte_length();
466 if (own_byte_offset > buffer_byte_length) {
467 out_of_bounds = true;
468 return 0;
469 }
470 return (buffer_byte_length - own_byte_offset);
471 }
472 // GSAB-backed TypedArrays can't be out of bounds.
473 size_t buffer_byte_length =
474 buffer()->GetBackingStore()->byte_length(std::memory_order_seq_cst);
475 SBXCHECK(own_byte_offset <= buffer_byte_length);
476 return buffer_byte_length - own_byte_offset;
477 }
479 size_t own_byte_length = byte_length();
480 size_t buffer_byte_length = buffer()->byte_length();
481 if (own_byte_length > buffer_byte_length ||
482 own_byte_offset > buffer_byte_length - own_byte_length) {
483 out_of_bounds = true;
484 return 0;
485 }
486 return own_byte_length;
487}
488
489size_t JSTypedArray::GetVariableLengthOrOutOfBounds(bool& out_of_bounds) const {
490 return GetVariableByteLengthOrOutOfBounds(out_of_bounds) / element_size();
491}
492
493} // namespace internal
494} // namespace v8
#define SBXCHECK(condition)
Definition check.h:61
static constexpr int kEmbedderFieldCount
@ kSharedArrayBufferConstructed
Definition v8-isolate.h:551
static std::unique_ptr< BackingStore > Allocate(Isolate *isolate, size_t byte_length, SharedFlag shared, InitializedFlag initialized)
V8_INLINE bool is_null() const
Definition handles.h:693
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
std::shared_ptr< BackingStore > RemoveExtension()
V8_EXPORT_PRIVATE ArrayBufferExtension * CreateExtension(Isolate *isolate, std::shared_ptr< BackingStore > backing_store)
V8_EXPORT_PRIVATE void Setup(SharedFlag shared, ResizableFlag resizable, std::shared_ptr< BackingStore > backing_store, Isolate *isolate)
void DetachInternal(bool force_for_wasm_memory, Isolate *isolate)
static size_t GsabByteLength(Isolate *isolate, Address raw_array_buffer)
V8_EXPORT_PRIVATE static V8_WARN_UNUSED_RESULT Maybe< bool > Detach(DirectHandle< JSArrayBuffer > buffer, bool force_for_wasm_memory=false, DirectHandle< Object > key={})
static constexpr size_t kMaxByteLength
static Maybe< bool > GetResizableBackingStorePageConfiguration(Isolate *isolate, size_t byte_length, size_t max_byte_length, ShouldThrow should_throw, size_t *page_size, size_t *initial_pages, size_t *max_pages)
void set_backing_store(Isolate *isolate, void *value)
static std::optional< MessageTemplate > GetResizableBackingStorePageConfigurationImpl(Isolate *isolate, size_t byte_length, size_t max_byte_length, size_t *page_size, size_t *initial_pages, size_t *max_pages)
size_t GetVariableByteLengthOrOutOfBounds(bool &out_of_bounds) const
V8_EXPORT_PRIVATE Handle< JSArrayBuffer > GetBuffer()
size_t GetVariableLengthOrOutOfBounds(bool &out_of_bounds) const
V8_EXPORT_PRIVATE size_t element_size() const
static V8_WARN_UNUSED_RESULT Maybe< bool > DefineOwnProperty(Isolate *isolate, DirectHandle< JSTypedArray > o, DirectHandle< Object > key, PropertyDescriptor *desc, Maybe< ShouldThrow > should_throw)
static size_t LengthTrackingGsabBackedTypedArrayLength(Isolate *isolate, Address raw_array)
static V8_WARN_UNUSED_RESULT HandleType< String >::MaybeType ToString(Isolate *isolate, HandleType< T > input)
static V8_EXPORT_PRIVATE bool SameValue(Tagged< Object > obj, Tagged< Object > other)
Definition objects.cc:1706
static V8_EXPORT_PRIVATE bool StrictEquals(Tagged< Object > obj, Tagged< Object > that)
Definition objects.cc:986
static bool IsAccessorDescriptor(PropertyDescriptor *desc)
size_t index() const
Definition lookup.h:42
static constexpr Tagged< Smi > zero()
Definition smi.h:99
static HandleType< Number > ToNumber(Isolate *isolate, HandleType< String > subject)
Definition string.cc:661
#define V8_ENABLE_SANDBOX_BOOL
Definition globals.h:160
#define RAB_GSAB_TYPED_ARRAYS(V)
#define TYPED_ARRAYS(V)
#define RAB_GSAB_TYPED_ARRAYS_WITH_TYPED_ARRAY_TYPE(V)
#define THROW_NEW_ERROR_RETURN_VALUE(isolate, call, value)
Definition isolate.h:300
#define RETURN_FAILURE(isolate, should_throw, call)
Definition isolate.h:398
#define RETURN_ON_EXCEPTION_VALUE(isolate, call, value)
Definition isolate.h:340
Isolate * isolate
std::string extension
#define ELEMENTS_KIND_TO_ELEMENT_SIZE(Type, type, TYPE, ctype)
#define ELEMENTS_KIND_TO_ARRAY_TYPE(Type, type, TYPE, ctype)
SharedFunctionInfoRef shared
std::map< const std::string, const std::string > map
ZoneVector< RpoNumber > & result
bool IsNumber(Tagged< Object > obj)
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:665
bool RoundUpToPageSize(size_t byte_length, size_t page_size, size_t max_allowed_byte_length, size_t *pages)
Definition utils.h:844
Handle< To > UncheckedCast(Handle< From > value)
Definition handles-inl.h:55
ShouldThrow GetShouldThrow(Isolate *isolate, Maybe< ShouldThrow > should_throw)
Definition objects.cc:140
V8_INLINE void * EmptyBackingStoreBuffer()
Definition sandbox.h:345
size_t AllocatePageSize()
static bool IsMinusZero(double value)
bool IsTypedArrayOrRabGsabTypedArrayElementsKind(ElementsKind kind)
JSArrayBuffer::IsDetachableBit is_shared
constexpr int ElementsKindToByteSize(ElementsKind elements_kind)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
Maybe< T > Nothing()
Definition v8-maybe.h:112
Maybe< T > Just(const T &t)
Definition v8-maybe.h:117
#define CHECK_GE(lhs, rhs)
#define CHECK_IMPLIES(lhs, rhs)
#define CHECK(condition)
Definition logging.h:124
#define CHECK_LE(lhs, rhs)
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485