v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
elements.cc
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
14#include "src/heap/factory.h"
15#include "src/heap/heap-inl.h" // For MaxNumberToStringCacheSize.
23#include "src/objects/keys.h"
26#include "src/objects/slots.h"
27#include "src/utils/utils.h"
28#include "third_party/fp16/src/include/fp16.h"
29
30// Each concrete ElementsAccessor can handle exactly one ElementsKind,
31// several abstract ElementsAccessor classes are used to allow sharing
32// common code.
33//
34// Inheritance hierarchy:
35// - ElementsAccessorBase (abstract)
36// - FastElementsAccessor (abstract)
37// - FastSmiOrObjectElementsAccessor
38// - FastPackedSmiElementsAccessor
39// - FastHoleySmiElementsAccessor
40// - FastPackedObjectElementsAccessor
41// - FastNonextensibleObjectElementsAccessor: template
42// - FastPackedNonextensibleObjectElementsAccessor
43// - FastHoleyNonextensibleObjectElementsAccessor
44// - FastSealedObjectElementsAccessor: template
45// - FastPackedSealedObjectElementsAccessor
46// - FastHoleySealedObjectElementsAccessor
47// - FastFrozenObjectElementsAccessor: template
48// - FastPackedFrozenObjectElementsAccessor
49// - FastHoleyFrozenObjectElementsAccessor
50// - FastHoleyObjectElementsAccessor
51// - FastDoubleElementsAccessor
52// - FastPackedDoubleElementsAccessor
53// - FastHoleyDoubleElementsAccessor
54// - TypedElementsAccessor: template, with instantiations:
55// - Uint8ElementsAccessor
56// - Int8ElementsAccessor
57// - Uint16ElementsAccessor
58// - Int16ElementsAccessor
59// - Uint32ElementsAccessor
60// - Int32ElementsAccessor
61// - Float32ElementsAccessor
62// - Float64ElementsAccessor
63// - Uint8ClampedElementsAccessor
64// - BigUint64ElementsAccessor
65// - BigInt64ElementsAccessor
66// - RabGsabUint8ElementsAccessor
67// - RabGsabInt8ElementsAccessor
68// - RabGsabUint16ElementsAccessor
69// - RabGsabInt16ElementsAccessor
70// - RabGsabUint32ElementsAccessor
71// - RabGsabInt32ElementsAccessor
72// - RabGsabFloat32ElementsAccessor
73// - RabGsabFloat64ElementsAccessor
74// - RabGsabUint8ClampedElementsAccessor
75// - RabGsabBigUint64ElementsAccessor
76// - RabGsabBigInt64ElementsAccessor
77// - DictionaryElementsAccessor
78// - SloppyArgumentsElementsAccessor
79// - FastSloppyArgumentsElementsAccessor
80// - SlowSloppyArgumentsElementsAccessor
81// - StringWrapperElementsAccessor
82// - FastStringWrapperElementsAccessor
83// - SlowStringWrapperElementsAccessor
84
85namespace v8 {
86namespace internal {
87
88namespace {
89
90#define RETURN_NOTHING_IF_NOT_SUCCESSFUL(call) \
91 do { \
92 if (!(call)) return Nothing<bool>(); \
93 } while (false)
94
95#define RETURN_FAILURE_IF_NOT_SUCCESSFUL(call) \
96 do { \
97 ExceptionStatus status_enum_result = (call); \
98 if (!status_enum_result) return status_enum_result; \
99 } while (false)
100
101static const int kPackedSizeNotKnown = -1;
102
103enum Where { AT_START, AT_END };
104
105// First argument in list is the accessor class, the second argument is the
106// accessor ElementsKind, and the third is the backing store class. Use the
107// fast element handler for smi-only arrays. The implementation is currently
108// identical. Note that the order must match that of the ElementsKind enum for
109// the |accessor_array[]| below to work.
110#define ELEMENTS_LIST(V) \
111 V(FastPackedSmiElementsAccessor, PACKED_SMI_ELEMENTS, FixedArray) \
112 V(FastHoleySmiElementsAccessor, HOLEY_SMI_ELEMENTS, FixedArray) \
113 V(FastPackedObjectElementsAccessor, PACKED_ELEMENTS, FixedArray) \
114 V(FastHoleyObjectElementsAccessor, HOLEY_ELEMENTS, FixedArray) \
115 V(FastPackedDoubleElementsAccessor, PACKED_DOUBLE_ELEMENTS, \
116 FixedDoubleArray) \
117 V(FastHoleyDoubleElementsAccessor, HOLEY_DOUBLE_ELEMENTS, FixedDoubleArray) \
118 V(FastPackedNonextensibleObjectElementsAccessor, \
119 PACKED_NONEXTENSIBLE_ELEMENTS, FixedArray) \
120 V(FastHoleyNonextensibleObjectElementsAccessor, \
121 HOLEY_NONEXTENSIBLE_ELEMENTS, FixedArray) \
122 V(FastPackedSealedObjectElementsAccessor, PACKED_SEALED_ELEMENTS, \
123 FixedArray) \
124 V(FastHoleySealedObjectElementsAccessor, HOLEY_SEALED_ELEMENTS, FixedArray) \
125 V(FastPackedFrozenObjectElementsAccessor, PACKED_FROZEN_ELEMENTS, \
126 FixedArray) \
127 V(FastHoleyFrozenObjectElementsAccessor, HOLEY_FROZEN_ELEMENTS, FixedArray) \
128 V(SharedArrayElementsAccessor, SHARED_ARRAY_ELEMENTS, FixedArray) \
129 V(DictionaryElementsAccessor, DICTIONARY_ELEMENTS, NumberDictionary) \
130 V(FastSloppyArgumentsElementsAccessor, FAST_SLOPPY_ARGUMENTS_ELEMENTS, \
131 FixedArray) \
132 V(SlowSloppyArgumentsElementsAccessor, SLOW_SLOPPY_ARGUMENTS_ELEMENTS, \
133 FixedArray) \
134 V(FastStringWrapperElementsAccessor, FAST_STRING_WRAPPER_ELEMENTS, \
135 FixedArray) \
136 V(SlowStringWrapperElementsAccessor, SLOW_STRING_WRAPPER_ELEMENTS, \
137 FixedArray) \
138 V(Uint8ElementsAccessor, UINT8_ELEMENTS, ByteArray) \
139 V(Int8ElementsAccessor, INT8_ELEMENTS, ByteArray) \
140 V(Uint16ElementsAccessor, UINT16_ELEMENTS, ByteArray) \
141 V(Int16ElementsAccessor, INT16_ELEMENTS, ByteArray) \
142 V(Uint32ElementsAccessor, UINT32_ELEMENTS, ByteArray) \
143 V(Int32ElementsAccessor, INT32_ELEMENTS, ByteArray) \
144 V(BigUint64ElementsAccessor, BIGUINT64_ELEMENTS, ByteArray) \
145 V(BigInt64ElementsAccessor, BIGINT64_ELEMENTS, ByteArray) \
146 V(Uint8ClampedElementsAccessor, UINT8_CLAMPED_ELEMENTS, ByteArray) \
147 V(Float32ElementsAccessor, FLOAT32_ELEMENTS, ByteArray) \
148 V(Float64ElementsAccessor, FLOAT64_ELEMENTS, ByteArray) \
149 V(Float16ElementsAccessor, FLOAT16_ELEMENTS, ByteArray) \
150 V(RabGsabUint8ElementsAccessor, RAB_GSAB_UINT8_ELEMENTS, ByteArray) \
151 V(RabGsabInt8ElementsAccessor, RAB_GSAB_INT8_ELEMENTS, ByteArray) \
152 V(RabGsabUint16ElementsAccessor, RAB_GSAB_UINT16_ELEMENTS, ByteArray) \
153 V(RabGsabInt16ElementsAccessor, RAB_GSAB_INT16_ELEMENTS, ByteArray) \
154 V(RabGsabUint32ElementsAccessor, RAB_GSAB_UINT32_ELEMENTS, ByteArray) \
155 V(RabGsabInt32ElementsAccessor, RAB_GSAB_INT32_ELEMENTS, ByteArray) \
156 V(RabGsabBigUint64ElementsAccessor, RAB_GSAB_BIGUINT64_ELEMENTS, ByteArray) \
157 V(RabGsabBigInt64ElementsAccessor, RAB_GSAB_BIGINT64_ELEMENTS, ByteArray) \
158 V(RabGsabUint8ClampedElementsAccessor, RAB_GSAB_UINT8_CLAMPED_ELEMENTS, \
159 ByteArray) \
160 V(RabGsabFloat32ElementsAccessor, RAB_GSAB_FLOAT32_ELEMENTS, ByteArray) \
161 V(RabGsabFloat64ElementsAccessor, RAB_GSAB_FLOAT64_ELEMENTS, ByteArray) \
162 V(RabGsabFloat16ElementsAccessor, RAB_GSAB_FLOAT16_ELEMENTS, ByteArray)
163
164template <ElementsKind Kind>
165class ElementsKindTraits {
166 public:
167 using BackingStore = FixedArrayBase;
168};
169
170#define ELEMENTS_TRAITS(Class, KindParam, Store) \
171 template <> \
172 class ElementsKindTraits<KindParam> { \
173 public: /* NOLINT */ \
174 static constexpr ElementsKind Kind = KindParam; \
175 using BackingStore = Store; \
176 }; \
177 constexpr ElementsKind ElementsKindTraits<KindParam>::Kind;
179#undef ELEMENTS_TRAITS
180
182MaybeDirectHandle<Object> ThrowArrayLengthRangeError(Isolate* isolate) {
183 THROW_NEW_ERROR(isolate, NewRangeError(MessageTemplate::kInvalidArrayLength));
184}
185
186WriteBarrierMode GetWriteBarrierMode(Tagged<FixedArrayBase> elements,
188 const DisallowGarbageCollection& promise) {
191 return elements->GetWriteBarrierMode(promise);
192}
193
194// If kCopyToEndAndInitializeToHole is specified as the copy_size to
195// CopyElements, it copies all of elements from source after source_start to
196// destination array, padding any remaining uninitialized elements in the
197// destination array with the hole.
198constexpr int kCopyToEndAndInitializeToHole = -1;
199
200void CopyObjectToObjectElements(Isolate* isolate,
201 Tagged<FixedArrayBase> from_base,
202 ElementsKind from_kind, uint32_t from_start,
204 ElementsKind to_kind, uint32_t to_start,
205 int raw_copy_size) {
206 ReadOnlyRoots roots(isolate);
207 DCHECK(to_base->map() != roots.fixed_cow_array_map());
209 int copy_size = raw_copy_size;
210 if (raw_copy_size < 0) {
211 DCHECK_EQ(kCopyToEndAndInitializeToHole, raw_copy_size);
212 copy_size = std::min(from_base->length() - from_start,
213 to_base->length() - to_start);
214 int start = to_start + copy_size;
215 int length = to_base->length() - start;
216 if (length > 0) {
217 MemsetTagged(Cast<FixedArray>(to_base)->RawFieldOfElementAt(start),
218 roots.the_hole_value(), length);
219 }
220 }
221 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() &&
222 (copy_size + static_cast<int>(from_start)) <= from_base->length());
223 if (copy_size == 0) return;
224 Tagged<FixedArray> from = Cast<FixedArray>(from_base);
228
229 WriteBarrierMode write_barrier_mode =
230 (IsObjectElementsKind(from_kind) && IsObjectElementsKind(to_kind))
233 to->CopyElements(isolate, to_start, from, from_start, copy_size,
234 write_barrier_mode);
235}
236
237void CopyDictionaryToObjectElements(Isolate* isolate,
238 Tagged<FixedArrayBase> from_base,
239 uint32_t from_start,
241 ElementsKind to_kind, uint32_t to_start,
242 int raw_copy_size) {
245 int copy_size = raw_copy_size;
246 if (raw_copy_size < 0) {
247 DCHECK_EQ(kCopyToEndAndInitializeToHole, raw_copy_size);
248 copy_size = from->max_number_key() + 1 - from_start;
249 int start = to_start + copy_size;
250 int length = to_base->length() - start;
251 if (length > 0) {
252 MemsetTagged(Cast<FixedArray>(to_base)->RawFieldOfElementAt(start),
253 ReadOnlyRoots(isolate).the_hole_value(), length);
254 }
255 }
256 DCHECK(to_base != from_base);
258 if (copy_size == 0) return;
260 uint32_t to_length = to->length();
261 if (to_start + copy_size > to_length) {
262 copy_size = to_length - to_start;
263 }
264 WriteBarrierMode write_barrier_mode = GetWriteBarrierMode(to, to_kind, no_gc);
265 for (int i = 0; i < copy_size; i++) {
266 InternalIndex entry = from->FindEntry(isolate, i + from_start);
267 if (entry.is_found()) {
268 Tagged<Object> value = from->ValueAt(entry);
269 DCHECK(!IsTheHole(value, isolate));
270 to->set(i + to_start, value, write_barrier_mode);
271 } else {
272 to->set_the_hole(isolate, i + to_start);
273 }
274 }
275}
276
277// NOTE: this method violates the handlified function signature convention:
278// raw pointer parameters in the function that allocates.
279// See ElementsAccessorBase::CopyElements() for details.
280void CopyDoubleToObjectElements(Isolate* isolate,
281 Tagged<FixedArrayBase> from_base,
282 uint32_t from_start,
284 uint32_t to_start, int raw_copy_size) {
285 int copy_size = raw_copy_size;
286 if (raw_copy_size < 0) {
288 DCHECK_EQ(kCopyToEndAndInitializeToHole, raw_copy_size);
289 copy_size = std::min(from_base->length() - from_start,
290 to_base->length() - to_start);
291 // Also initialize the area that will be copied over since HeapNumber
292 // allocation below can cause an incremental marking step, requiring all
293 // existing heap objects to be properly initialized.
294 int start = to_start;
295 int length = to_base->length() - start;
296 if (length > 0) {
297 MemsetTagged(Cast<FixedArray>(to_base)->RawFieldOfElementAt(start),
298 ReadOnlyRoots(isolate).the_hole_value(), length);
299 }
300 }
301
302 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() &&
303 (copy_size + static_cast<int>(from_start)) <= from_base->length());
304 if (copy_size == 0) return;
305
306 // From here on, the code below could actually allocate. Therefore the raw
307 // values are wrapped into handles.
308 DirectHandle<FixedDoubleArray> from(Cast<FixedDoubleArray>(from_base),
309 isolate);
310 DirectHandle<FixedArray> to(Cast<FixedArray>(to_base), isolate);
311
312 // Use an outer loop to not waste too much time on creating HandleScopes.
313 // On the other hand we might overflow a single handle scope depending on
314 // the copy_size.
315 int offset = 0;
316 while (offset < copy_size) {
317 HandleScope scope(isolate);
318 offset += 100;
319 for (int i = offset - 100; i < offset && i < copy_size; ++i) {
320 DirectHandle<Object> value =
321 FixedDoubleArray::get(*from, i + from_start, isolate);
322 to->set(i + to_start, *value, UPDATE_WRITE_BARRIER);
323 }
324 }
325}
326
327void CopyDoubleToDoubleElements(Tagged<FixedArrayBase> from_base,
328 uint32_t from_start,
330 uint32_t to_start, int raw_copy_size) {
332 int copy_size = raw_copy_size;
333 if (raw_copy_size < 0) {
334 DCHECK_EQ(kCopyToEndAndInitializeToHole, raw_copy_size);
335 copy_size = std::min(from_base->length() - from_start,
336 to_base->length() - to_start);
337 for (int i = to_start + copy_size; i < to_base->length(); ++i) {
338 Cast<FixedDoubleArray>(to_base)->set_the_hole(i);
339 }
340 }
341 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() &&
342 (copy_size + static_cast<int>(from_start)) <= from_base->length());
343 if (copy_size == 0) return;
346 Address to_address = reinterpret_cast<Address>(to->begin());
347 Address from_address = reinterpret_cast<Address>(from->begin());
348 to_address += kDoubleSize * to_start;
349 from_address += kDoubleSize * from_start;
350#ifdef V8_COMPRESS_POINTERS
351 // TODO(ishell, v8:8875): we use CopyTagged() in order to avoid unaligned
352 // access to double values in the arrays. This will no longed be necessary
353 // once the allocations alignment issue is fixed.
354 int words_per_double = (kDoubleSize / kTaggedSize);
355 CopyTagged(to_address, from_address,
356 static_cast<size_t>(words_per_double * copy_size));
357#else
358 int words_per_double = (kDoubleSize / kSystemPointerSize);
359 CopyWords(to_address, from_address,
360 static_cast<size_t>(words_per_double * copy_size));
361#endif
362}
363
364void CopySmiToDoubleElements(Tagged<FixedArrayBase> from_base,
365 uint32_t from_start,
366 Tagged<FixedArrayBase> to_base, uint32_t to_start,
367 int raw_copy_size) {
369 int copy_size = raw_copy_size;
370 if (raw_copy_size < 0) {
371 DCHECK_EQ(kCopyToEndAndInitializeToHole, raw_copy_size);
372 copy_size = from_base->length() - from_start;
373 for (int i = to_start + copy_size; i < to_base->length(); ++i) {
374 Cast<FixedDoubleArray>(to_base)->set_the_hole(i);
375 }
376 }
377 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() &&
378 (copy_size + static_cast<int>(from_start)) <= from_base->length());
379 if (copy_size == 0) return;
380 Tagged<FixedArray> from = Cast<FixedArray>(from_base);
382 Tagged<Object> the_hole = GetReadOnlyRoots().the_hole_value();
383 for (uint32_t from_end = from_start + static_cast<uint32_t>(copy_size);
384 from_start < from_end; from_start++, to_start++) {
385 Tagged<Object> hole_or_smi = from->get(from_start);
386 if (hole_or_smi == the_hole) {
387 to->set_the_hole(to_start);
388 } else {
389 to->set(to_start, Smi::ToInt(hole_or_smi));
390 }
391 }
392}
393
394void CopyPackedSmiToDoubleElements(Tagged<FixedArrayBase> from_base,
395 uint32_t from_start,
397 uint32_t to_start, int packed_size,
398 int raw_copy_size) {
400 int copy_size = raw_copy_size;
401 uint32_t to_end;
402 if (raw_copy_size < 0) {
403 DCHECK_EQ(kCopyToEndAndInitializeToHole, raw_copy_size);
404 copy_size = packed_size - from_start;
405 to_end = to_base->length();
406 for (uint32_t i = to_start + copy_size; i < to_end; ++i) {
407 Cast<FixedDoubleArray>(to_base)->set_the_hole(i);
408 }
409 } else {
410 to_end = to_start + static_cast<uint32_t>(copy_size);
411 }
412 DCHECK(static_cast<int>(to_end) <= to_base->length());
413 DCHECK(packed_size >= 0 && packed_size <= copy_size);
414 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() &&
415 (copy_size + static_cast<int>(from_start)) <= from_base->length());
416 if (copy_size == 0) return;
417 Tagged<FixedArray> from = Cast<FixedArray>(from_base);
419 for (uint32_t from_end = from_start + static_cast<uint32_t>(packed_size);
420 from_start < from_end; from_start++, to_start++) {
421 Tagged<Object> smi = from->get(from_start);
422 DCHECK(!IsTheHole(smi));
423 to->set(to_start, Smi::ToInt(smi));
424 }
425}
426
427void CopyObjectToDoubleElements(Tagged<FixedArrayBase> from_base,
428 uint32_t from_start,
430 uint32_t to_start, int raw_copy_size) {
432 int copy_size = raw_copy_size;
433 if (raw_copy_size < 0) {
434 DCHECK_EQ(kCopyToEndAndInitializeToHole, raw_copy_size);
435 copy_size = from_base->length() - from_start;
436 for (int i = to_start + copy_size; i < to_base->length(); ++i) {
437 Cast<FixedDoubleArray>(to_base)->set_the_hole(i);
438 }
439 }
440 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base->length() &&
441 (copy_size + static_cast<int>(from_start)) <= from_base->length());
442 if (copy_size == 0) return;
443 Tagged<FixedArray> from = Cast<FixedArray>(from_base);
445 Tagged<Hole> the_hole = GetReadOnlyRoots().the_hole_value();
446 for (uint32_t from_end = from_start + copy_size; from_start < from_end;
447 from_start++, to_start++) {
448 Tagged<Object> hole_or_object = from->get(from_start);
449 if (hole_or_object == the_hole) {
450 to->set_the_hole(to_start);
451 } else {
452 to->set(to_start, Object::NumberValue(Cast<Number>(hole_or_object)));
453 }
454 }
455}
456
457void CopyDictionaryToDoubleElements(Isolate* isolate,
458 Tagged<FixedArrayBase> from_base,
459 uint32_t from_start,
461 uint32_t to_start, int raw_copy_size) {
464 int copy_size = raw_copy_size;
465 if (copy_size < 0) {
466 DCHECK_EQ(kCopyToEndAndInitializeToHole, copy_size);
467 copy_size = from->max_number_key() + 1 - from_start;
468 for (int i = to_start + copy_size; i < to_base->length(); ++i) {
469 Cast<FixedDoubleArray>(to_base)->set_the_hole(i);
470 }
471 }
472 if (copy_size == 0) return;
474 uint32_t to_length = to->length();
475 if (to_start + copy_size > to_length) {
476 copy_size = to_length - to_start;
477 }
478 for (int i = 0; i < copy_size; i++) {
479 InternalIndex entry = from->FindEntry(isolate, i + from_start);
480 if (entry.is_found()) {
481 to->set(i + to_start,
482 Object::NumberValue(Cast<Number>(from->ValueAt(entry))));
483 } else {
484 to->set_the_hole(i + to_start);
485 }
486 }
487}
488
489void SortIndices(Isolate* isolate, DirectHandle<FixedArray> indices,
490 uint32_t sort_size) {
491 if (sort_size == 0) return;
492
493 // Use AtomicSlot wrapper to ensure that std::sort uses atomic load and
494 // store operations that are safe for concurrent marking.
495 AtomicSlot start(indices->RawFieldOfFirstElement());
496 AtomicSlot end(start + sort_size);
497 std::sort(start, end, [isolate](Tagged_t elementA, Tagged_t elementB) {
498#ifdef V8_COMPRESS_POINTERS
503#else
504 Tagged<Object> a(elementA);
505 Tagged<Object> b(elementB);
506#endif
507 if (IsSmi(a) || !IsUndefined(a, isolate)) {
508 if (!IsSmi(b) && IsUndefined(b, isolate)) {
509 return true;
510 }
513 }
514 return !IsSmi(b) && IsUndefined(b, isolate);
515 });
516 WriteBarrier::ForRange(isolate->heap(), *indices, ObjectSlot(start),
517 ObjectSlot(end));
518}
519
520Maybe<bool> IncludesValueSlowPath(Isolate* isolate,
521 DirectHandle<JSObject> receiver,
522 DirectHandle<Object> value, size_t start_from,
523 size_t length) {
524 bool search_for_hole = IsUndefined(*value, isolate);
525 for (size_t k = start_from; k < length; ++k) {
526 LookupIterator it(isolate, receiver, k);
527 if (!it.IsFound()) {
528 if (search_for_hole) return Just(true);
529 continue;
530 }
531 DirectHandle<Object> element_k;
532 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, element_k,
534
535 if (Object::SameValueZero(*value, *element_k)) return Just(true);
536 }
537
538 return Just(false);
539}
540
541Maybe<int64_t> IndexOfValueSlowPath(Isolate* isolate,
542 DirectHandle<JSObject> receiver,
543 DirectHandle<Object> value,
544 size_t start_from, size_t length) {
545 for (size_t k = start_from; k < length; ++k) {
546 LookupIterator it(isolate, receiver, k);
547 if (!it.IsFound()) {
548 continue;
549 }
550 DirectHandle<Object> element_k;
552 isolate, element_k, Object::GetProperty(&it), Nothing<int64_t>());
553
554 if (Object::StrictEquals(*value, *element_k)) return Just<int64_t>(k);
555 }
556
557 return Just<int64_t>(-1);
558}
559
560// The InternalElementsAccessor is a helper class to expose otherwise protected
561// methods to its subclasses. Namely, we don't want to publicly expose methods
562// that take an entry (instead of an index) as an argument.
563class InternalElementsAccessor : public ElementsAccessor {
564 public:
565 InternalIndex GetEntryForIndex(Isolate* isolate, Tagged<JSObject> holder,
566 Tagged<FixedArrayBase> backing_store,
567 size_t index) override = 0;
568
569 PropertyDetails GetDetails(Tagged<JSObject> holder,
570 InternalIndex entry) override = 0;
571};
572
573// Base class for element handler implementations. Contains the
574// the common logic for objects with different ElementsKinds.
575// Subclasses must specialize method for which the element
576// implementation differs from the base class implementation.
577//
578// This class is intended to be used in the following way:
579//
580// class SomeElementsAccessor :
581// public ElementsAccessorBase<SomeElementsAccessor,
582// BackingStoreClass> {
583// ...
584// }
585//
586// This is an example of the Curiously Recurring Template Pattern (see
587// http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern). We use
588// CRTP to guarantee aggressive compile time optimizations (i.e. inlining and
589// specialization of SomeElementsAccessor methods).
590template <typename Subclass, typename ElementsTraitsParam>
591class ElementsAccessorBase : public InternalElementsAccessor {
592 public:
593 ElementsAccessorBase() = default;
594 ElementsAccessorBase(const ElementsAccessorBase&) = delete;
595 ElementsAccessorBase& operator=(const ElementsAccessorBase&) = delete;
596
597 using ElementsTraits = ElementsTraitsParam;
598 using BackingStore = typename ElementsTraitsParam::BackingStore;
599
600 static ElementsKind kind() { return ElementsTraits::Kind; }
601
602 static void ValidateContents(Tagged<JSObject> holder, size_t length) {}
603
604 static void ValidateImpl(Tagged<JSObject> holder) {
605 Tagged<FixedArrayBase> fixed_array_base = holder->elements();
606 if (!IsHeapObject(fixed_array_base)) return;
607 // Arrays that have been shifted in place can't be verified.
608 if (IsFreeSpaceOrFiller(fixed_array_base)) return;
609 size_t length = 0;
610 if (IsJSArray(holder)) {
611 Tagged<Object> length_obj = Cast<JSArray>(holder)->length();
612 if (IsSmi(length_obj)) {
613 length = Smi::ToInt(length_obj);
614 }
615 } else if (IsJSTypedArray(holder)) {
616 length = Cast<JSTypedArray>(holder)->length();
617 } else {
618 length = fixed_array_base->length();
619 }
620 Subclass::ValidateContents(holder, length);
621 }
622
623 void Validate(Tagged<JSObject> holder) final {
625 Subclass::ValidateImpl(holder);
626 }
627
628 bool HasElement(Tagged<JSObject> holder, uint32_t index,
629 Tagged<FixedArrayBase> backing_store,
630 PropertyFilter filter) final {
631 return Subclass::HasElementImpl(holder->GetIsolate(), holder, index,
632 backing_store, filter);
633 }
634
635 static bool HasElementImpl(Isolate* isolate, Tagged<JSObject> holder,
636 size_t index, Tagged<FixedArrayBase> backing_store,
638 return Subclass::GetEntryForIndexImpl(isolate, holder, backing_store, index,
639 filter)
640 .is_found();
641 }
642
643 bool HasEntry(Tagged<JSObject> holder, InternalIndex entry) final {
644 return Subclass::HasEntryImpl(holder->GetIsolate(), holder->elements(),
645 entry);
646 }
647
648 static bool HasEntryImpl(Isolate* isolate,
649 Tagged<FixedArrayBase> backing_store,
650 InternalIndex entry) {
652 }
653
654 bool HasAccessors(Tagged<JSObject> holder) final {
655 return Subclass::HasAccessorsImpl(holder, holder->elements());
656 }
657
658 static bool HasAccessorsImpl(Tagged<JSObject> holder,
659 Tagged<FixedArrayBase> backing_store) {
660 return false;
661 }
662
663 Handle<Object> Get(Isolate* isolate, DirectHandle<JSObject> holder,
664 InternalIndex entry) final {
665 return Subclass::GetInternalImpl(isolate, holder, entry);
666 }
667
668 static Handle<Object> GetInternalImpl(Isolate* isolate,
669 DirectHandle<JSObject> holder,
670 InternalIndex entry) {
671 return Subclass::GetImpl(isolate, holder->elements(), entry);
672 }
673
674 static Handle<Object> GetImpl(Isolate* isolate,
675 Tagged<FixedArrayBase> backing_store,
676 InternalIndex entry) {
677 return handle(Cast<BackingStore>(backing_store)->get(entry.as_int()),
678 isolate);
679 }
680
681 Handle<Object> GetAtomic(Isolate* isolate, DirectHandle<JSObject> holder,
682 InternalIndex entry, SeqCstAccessTag tag) final {
683 return Subclass::GetAtomicInternalImpl(isolate, holder->elements(), entry,
684 tag);
685 }
686
687 static Handle<Object> GetAtomicInternalImpl(
688 Isolate* isolate, Tagged<FixedArrayBase> backing_store,
689 InternalIndex entry, SeqCstAccessTag tag) {
690 UNREACHABLE();
691 }
692
693 void SetAtomic(DirectHandle<JSObject> holder, InternalIndex entry,
694 Tagged<Object> value, SeqCstAccessTag tag) final {
695 Subclass::SetAtomicInternalImpl(holder->elements(), entry, value, tag);
696 }
697
698 static void SetAtomicInternalImpl(Tagged<FixedArrayBase> backing_store,
699 InternalIndex entry, Tagged<Object> value,
700 SeqCstAccessTag tag) {
701 UNREACHABLE();
702 }
703
704 Handle<Object> SwapAtomic(Isolate* isolate, DirectHandle<JSObject> holder,
705 InternalIndex entry, Tagged<Object> value,
706 SeqCstAccessTag tag) final {
707 return Subclass::SwapAtomicInternalImpl(isolate, holder->elements(), entry,
708 value, tag);
709 }
710
711 static Handle<Object> SwapAtomicInternalImpl(
712 Isolate* isolate, Tagged<FixedArrayBase> backing_store,
713 InternalIndex entry, Tagged<Object> value, SeqCstAccessTag tag) {
714 UNREACHABLE();
715 }
716
717 Handle<Object> CompareAndSwapAtomic(Isolate* isolate,
718 DirectHandle<JSObject> holder,
719 InternalIndex entry,
720 Tagged<Object> expected,
721 Tagged<Object> value,
722 SeqCstAccessTag tag) final {
724 expected, value,
725 [=](Tagged<Object> expected_value,
726 Tagged<Object> new_value) {
727 return Subclass::CompareAndSwapAtomicInternalImpl(
728 holder->elements(), entry, expected_value,
729 new_value, tag);
730 }),
731 isolate);
732 }
733
734 static Tagged<Object> CompareAndSwapAtomicInternalImpl(
735 Tagged<FixedArrayBase> backing_store, InternalIndex entry,
736 Tagged<Object> expected, Tagged<Object> value, SeqCstAccessTag tag) {
737 UNREACHABLE();
738 }
739
740 void Set(DirectHandle<JSObject> holder, InternalIndex entry,
741 Tagged<Object> value) final {
742 Subclass::SetImpl(holder, entry, value);
743 }
744
745 void Reconfigure(DirectHandle<JSObject> object,
746 DirectHandle<FixedArrayBase> store, InternalIndex entry,
747 DirectHandle<Object> value,
748 PropertyAttributes attributes) final {
749 Subclass::ReconfigureImpl(object, store, entry, value, attributes);
750 }
751
752 static void ReconfigureImpl(DirectHandle<JSObject> object,
753 DirectHandle<FixedArrayBase> store,
754 InternalIndex entry, DirectHandle<Object> value,
755 PropertyAttributes attributes) {
756 UNREACHABLE();
757 }
758
759 Maybe<bool> Add(DirectHandle<JSObject> object, uint32_t index,
760 DirectHandle<Object> value, PropertyAttributes attributes,
761 uint32_t new_capacity) final {
762 return Subclass::AddImpl(object, index, value, attributes, new_capacity);
763 }
764
765 static Maybe<bool> AddImpl(DirectHandle<JSObject> object, uint32_t index,
766 DirectHandle<Object> value,
767 PropertyAttributes attributes,
768 uint32_t new_capacity) {
769 UNREACHABLE();
770 }
771
772 Maybe<uint32_t> Push(DirectHandle<JSArray> receiver, BuiltinArguments* args,
773 uint32_t push_size) final {
774 return Subclass::PushImpl(receiver, args, push_size);
775 }
776
777 static Maybe<uint32_t> PushImpl(DirectHandle<JSArray> receiver,
778 BuiltinArguments* args, uint32_t push_sized) {
779 UNREACHABLE();
780 }
781
782 Maybe<uint32_t> Unshift(DirectHandle<JSArray> receiver,
783 BuiltinArguments* args, uint32_t unshift_size) final {
784 return Subclass::UnshiftImpl(receiver, args, unshift_size);
785 }
786
787 static Maybe<uint32_t> UnshiftImpl(DirectHandle<JSArray> receiver,
788 BuiltinArguments* args,
789 uint32_t unshift_size) {
790 UNREACHABLE();
791 }
792
793 MaybeDirectHandle<Object> Pop(DirectHandle<JSArray> receiver) final {
794 return Subclass::PopImpl(receiver);
795 }
796
797 static MaybeDirectHandle<Object> PopImpl(DirectHandle<JSArray> receiver) {
798 UNREACHABLE();
799 }
800
801 MaybeDirectHandle<Object> Shift(DirectHandle<JSArray> receiver) final {
802 return Subclass::ShiftImpl(receiver);
803 }
804
805 static MaybeDirectHandle<Object> ShiftImpl(DirectHandle<JSArray> receiver) {
806 UNREACHABLE();
807 }
808
809 Maybe<bool> SetLength(DirectHandle<JSArray> array, uint32_t length) final {
810 return Subclass::SetLengthImpl(
811 array->GetIsolate(), array, length,
812 direct_handle(array->elements(), array->GetIsolate()));
813 }
814
815 static Maybe<bool> SetLengthImpl(Isolate* isolate,
816 DirectHandle<JSArray> array, uint32_t length,
817 DirectHandle<FixedArrayBase> backing_store) {
818 DCHECK(!array->SetLengthWouldNormalize(length));
819 DCHECK(IsFastElementsKind(array->GetElementsKind()));
820 uint32_t old_length = 0;
821 CHECK(Object::ToArrayIndex(array->length(), &old_length));
822
823 if (old_length < length) {
824 ElementsKind kind = array->GetElementsKind();
828 }
829 }
830
831 // Check whether the backing store should be shrunk or grown.
832 uint32_t capacity = backing_store->length();
833 old_length = std::min(old_length, capacity);
834 if (length == 0) {
835 array->initialize_elements();
836 } else if (length <= capacity) {
839 if (array->elements() != *backing_store) {
840 backing_store = direct_handle(array->elements(), isolate);
841 }
842 }
843 if (2 * length + JSObject::kMinAddedElementsCapacity <= capacity) {
844 // If more than half the elements won't be used, trim the array.
845 // Do not trim from short arrays to prevent frequent trimming on
846 // repeated pop operations.
847 // Leave some space to allow for subsequent push operations.
848 uint32_t new_capacity =
849 length + 1 == old_length ? (capacity + length) / 2 : length;
850 DCHECK_LT(new_capacity, capacity);
851 isolate->heap()->RightTrimArray(Cast<BackingStore>(*backing_store),
852 new_capacity, capacity);
853 // Fill the non-trimmed elements with holes.
854 Cast<BackingStore>(*backing_store)
855 ->FillWithHoles(length, std::min(old_length, new_capacity));
856 } else {
857 // Otherwise, fill the unused tail with holes.
858 Cast<BackingStore>(*backing_store)->FillWithHoles(length, old_length);
859 }
860 } else {
861 // Calculate a new capacity for the array.
862 uint32_t new_capacity;
863 if (capacity == 0) {
864 // If the existing capacity is zero, assume we are setting the length to
865 // presize to the exact size we want.
866 new_capacity = length;
867 } else {
868 // Otherwise, assume we want exponential growing semantics, and grow as
869 // if we were pushing. We might not grow enough for the length, so take
870 // the max of hte two values.
871 new_capacity = std::max(length, JSArray::NewElementsCapacity(capacity));
872 }
873 // Grow the array to the new capacity. Note that this code will allow
874 // create backing stores that consist almost entirely of holes, for which
875 // `JSObject::ShouldConvertToSlowElements` would return "true". This is
876 // intentional, because we are assuming the user is setting a length to
877 // pre-size an array to then write to it within bounds. A subsequent
878 // resizing operation, like Array.p.push, might still trigger a transition
879 // to dictionary elements because of sparseness.
880 MAYBE_RETURN(Subclass::GrowCapacityAndConvertImpl(array, new_capacity),
881 Nothing<bool>());
882 }
883
884 array->set_length(Smi::FromInt(length));
886 return Just(true);
887 }
888
889 size_t NumberOfElements(Isolate* isolate, Tagged<JSObject> receiver) final {
890 return Subclass::NumberOfElementsImpl(isolate, receiver,
891 receiver->elements());
892 }
893
894 static uint32_t NumberOfElementsImpl(Isolate* isolate,
896 Tagged<FixedArrayBase> backing_store) {
897 UNREACHABLE();
898 }
899
900 static size_t GetMaxIndex(Tagged<JSObject> receiver,
901 Tagged<FixedArrayBase> elements) {
902 if (IsJSArray(receiver)) {
904 return static_cast<uint32_t>(
906 }
907 return Subclass::GetCapacityImpl(receiver, elements);
908 }
909
910 static size_t GetMaxNumberOfEntries(Isolate* isolate,
912 Tagged<FixedArrayBase> elements) {
913 return Subclass::GetMaxIndex(receiver, elements);
914 }
915
916 static MaybeDirectHandle<FixedArrayBase> ConvertElementsWithCapacity(
917 DirectHandle<JSObject> object, DirectHandle<FixedArrayBase> old_elements,
918 ElementsKind from_kind, uint32_t capacity) {
919 return ConvertElementsWithCapacity(object, old_elements, from_kind,
920 capacity, 0, 0);
921 }
922
923 static MaybeDirectHandle<FixedArrayBase> ConvertElementsWithCapacity(
924 DirectHandle<JSObject> object, DirectHandle<FixedArrayBase> old_elements,
925 ElementsKind from_kind, uint32_t capacity, uint32_t src_index,
926 uint32_t dst_index) {
927 Isolate* isolate = object->GetIsolate();
928 DirectHandle<FixedArrayBase> new_elements;
929 // TODO(victorgomes): Retrieve native context in optimized code
930 // and remove the check isolate->context().is_null().
931 if (IsDoubleElementsKind(kind())) {
932 if (!isolate->context().is_null() &&
934 THROW_NEW_ERROR(isolate,
935 NewRangeError(MessageTemplate::kInvalidArrayLength));
936 }
937 new_elements = isolate->factory()->NewFixedDoubleArray(capacity);
938 } else {
939 if (!isolate->context().is_null() &&
941 THROW_NEW_ERROR(isolate,
942 NewRangeError(MessageTemplate::kInvalidArrayLength));
943 }
944 new_elements = isolate->factory()->NewFixedArray(capacity);
945 }
946
947 int packed_size = kPackedSizeNotKnown;
948 if (IsFastPackedElementsKind(from_kind) && IsJSArray(*object)) {
949 packed_size = Smi::ToInt(Cast<JSArray>(*object)->length());
950 }
951
952 Subclass::CopyElementsImpl(isolate, *old_elements, src_index, *new_elements,
953 from_kind, dst_index, packed_size,
954 kCopyToEndAndInitializeToHole);
955
956 return MaybeDirectHandle<FixedArrayBase>(new_elements);
957 }
958
959 static void TransitionElementsKindImpl(DirectHandle<JSObject> object,
960 DirectHandle<Map> to_map) {
961 Isolate* isolate = object->GetIsolate();
962 DirectHandle<Map> from_map(object->map(), isolate);
963 ElementsKind from_kind = from_map->elements_kind();
964 ElementsKind to_kind = to_map->elements_kind();
965 if (IsHoleyElementsKind(from_kind)) {
966 to_kind = GetHoleyElementsKind(to_kind);
967 }
968 if (from_kind != to_kind) {
969 // This method should never be called for any other case.
970 DCHECK(IsFastElementsKind(from_kind));
971 DCHECK(IsFastElementsKind(to_kind));
973
974 DirectHandle<FixedArrayBase> from_elements(object->elements(), isolate);
975 if (object->elements() == ReadOnlyRoots(isolate).empty_fixed_array() ||
976 IsDoubleElementsKind(from_kind) == IsDoubleElementsKind(to_kind)) {
977 // No change is needed to the elements() buffer, the transition
978 // only requires a map change.
979 JSObject::MigrateToMap(isolate, object, to_map);
980 } else {
981 DCHECK(
982 (IsSmiElementsKind(from_kind) && IsDoubleElementsKind(to_kind)) ||
983 (IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind)));
984 uint32_t capacity = static_cast<uint32_t>(object->elements()->length());
985 // Since the max length of FixedArray and FixedDoubleArray is the same,
986 // we can safely assume that element conversion with the same capacity
987 // will succeed.
990 DirectHandle<FixedArrayBase> elements =
991 ConvertElementsWithCapacity(object, from_elements, from_kind,
992 capacity)
993 .ToHandleChecked();
994 JSObject::SetMapAndElements(object, to_map, elements);
995 }
996 if (v8_flags.trace_elements_transitions) {
998 stdout, object, from_kind, from_elements, to_kind,
999 direct_handle(object->elements(), isolate));
1000 }
1001 }
1002 }
1003
1004 static Maybe<bool> GrowCapacityAndConvertImpl(DirectHandle<JSObject> object,
1005 uint32_t capacity) {
1006 ElementsKind from_kind = object->GetElementsKind();
1007 if (IsSmiOrObjectElementsKind(from_kind)) {
1008 // Array optimizations rely on the prototype lookups of Array objects
1009 // always returning undefined. If there is a store to the initial
1010 // prototype object, make sure all of these optimizations are invalidated.
1011 object->GetIsolate()->UpdateNoElementsProtectorOnSetLength(object);
1012 }
1013 DirectHandle<FixedArrayBase> old_elements(object->elements(),
1014 object->GetIsolate());
1015 // This method should only be called if there's a reason to update the
1016 // elements.
1018 IsDictionaryElementsKind(from_kind) ||
1019 static_cast<uint32_t>(old_elements->length()) < capacity);
1020 return Subclass::BasicGrowCapacityAndConvertImpl(
1021 object, old_elements, from_kind, kind(), capacity);
1022 }
1023
1024 static Maybe<bool> BasicGrowCapacityAndConvertImpl(
1025 DirectHandle<JSObject> object, DirectHandle<FixedArrayBase> old_elements,
1026 ElementsKind from_kind, ElementsKind to_kind, uint32_t capacity) {
1027 DirectHandle<FixedArrayBase> elements;
1029 object->GetIsolate(), elements,
1030 ConvertElementsWithCapacity(object, old_elements, from_kind, capacity),
1031 Nothing<bool>());
1032
1033 if (IsHoleyElementsKind(from_kind)) {
1034 to_kind = GetHoleyElementsKind(to_kind);
1035 }
1036 DirectHandle<Map> new_map =
1037 JSObject::GetElementsTransitionMap(object, to_kind);
1038 JSObject::SetMapAndElements(object, new_map, elements);
1039
1040 // Transition through the allocation site as well if present.
1041 JSObject::UpdateAllocationSite(object, to_kind);
1042
1043 if (v8_flags.trace_elements_transitions) {
1044 JSObject::PrintElementsTransition(stdout, object, from_kind, old_elements,
1045 to_kind, elements);
1046 }
1047 return Just(true);
1048 }
1049
1050 void TransitionElementsKind(DirectHandle<JSObject> object,
1051 DirectHandle<Map> map) final {
1052 Subclass::TransitionElementsKindImpl(object, map);
1053 }
1054
1055 Maybe<bool> GrowCapacityAndConvert(DirectHandle<JSObject> object,
1056 uint32_t capacity) final {
1057 return Subclass::GrowCapacityAndConvertImpl(object, capacity);
1058 }
1059
1060 Maybe<bool> GrowCapacity(DirectHandle<JSObject> object,
1061 uint32_t index) final {
1062 // This function is intended to be called from optimized code. We don't
1063 // want to trigger lazy deopts there, so refuse to handle cases that would.
1064 if (object->map()->is_prototype_map() ||
1065 object->WouldConvertToSlowElements(index)) {
1066 return Just(false);
1067 }
1068 DirectHandle<FixedArrayBase> old_elements(object->elements(),
1069 object->GetIsolate());
1070 uint32_t new_capacity = JSObject::NewElementsCapacity(index + 1);
1071 DCHECK(static_cast<uint32_t>(old_elements->length()) < new_capacity);
1073 constexpr uint32_t kMaxLength = FixedArray::kMaxLength;
1074
1075 if (new_capacity > kMaxLength) {
1076 return Just(false);
1077 }
1078 DirectHandle<FixedArrayBase> elements;
1080 object->GetIsolate(), elements,
1081 ConvertElementsWithCapacity(object, old_elements, kind(), new_capacity),
1082 Nothing<bool>());
1083
1084 DCHECK_EQ(object->GetElementsKind(), kind());
1085 // Transition through the allocation site as well if present.
1086 if (JSObject::UpdateAllocationSite<AllocationSiteUpdateMode::kCheckOnly>(
1087 object, kind())) {
1088 return Just(false);
1089 }
1090
1091 object->set_elements(*elements);
1092 return Just(true);
1093 }
1094
1095 void Delete(DirectHandle<JSObject> obj, InternalIndex entry) final {
1096 Subclass::DeleteImpl(obj, entry);
1097 }
1098
1099 static void CopyElementsImpl(Isolate* isolate, Tagged<FixedArrayBase> from,
1100 uint32_t from_start, Tagged<FixedArrayBase> to,
1101 ElementsKind from_kind, uint32_t to_start,
1102 int packed_size, int copy_size) {
1103 UNREACHABLE();
1104 }
1105
1106 void CopyElements(Isolate* isolate, Tagged<JSObject> from_holder,
1107 uint32_t from_start, ElementsKind from_kind,
1108 DirectHandle<FixedArrayBase> to, uint32_t to_start,
1109 int copy_size) final {
1110 int packed_size = kPackedSizeNotKnown;
1111 bool is_packed =
1112 IsFastPackedElementsKind(from_kind) && IsJSArray(from_holder);
1113 if (is_packed) {
1114 packed_size = Smi::ToInt(Cast<JSArray>(from_holder)->length());
1115 if (copy_size >= 0 && packed_size > copy_size) {
1116 packed_size = copy_size;
1117 }
1118 }
1119 Tagged<FixedArrayBase> from = from_holder->elements();
1120 // NOTE: the Subclass::CopyElementsImpl() methods
1121 // violate the handlified function signature convention:
1122 // raw pointer parameters in the function that allocates. This is done
1123 // intentionally to avoid ArrayConcat() builtin performance degradation.
1124 //
1125 // Details: The idea is that allocations actually happen only in case of
1126 // copying from object with fast double elements to object with object
1127 // elements. In all the other cases there are no allocations performed and
1128 // handle creation causes noticeable performance degradation of the builtin.
1129 Subclass::CopyElementsImpl(isolate, from, from_start, *to, from_kind,
1130 to_start, packed_size, copy_size);
1131 }
1132
1133 void CopyElements(Isolate* isolate, DirectHandle<FixedArrayBase> source,
1134 ElementsKind source_kind,
1135 DirectHandle<FixedArrayBase> destination,
1136 int size) override {
1137 Subclass::CopyElementsImpl(isolate, *source, 0, *destination, source_kind,
1138 0, kPackedSizeNotKnown, size);
1139 }
1140
1143 size_t start, size_t end) override {
1144 Subclass::CopyTypedArrayElementsSliceImpl(source, destination, start, end);
1145 }
1146
1147 static void CopyTypedArrayElementsSliceImpl(Tagged<JSTypedArray> source,
1149 size_t start, size_t end) {
1150 UNREACHABLE();
1151 }
1152
1153 Tagged<Object> CopyElements(DirectHandle<JSAny> source,
1154 DirectHandle<JSObject> destination, size_t length,
1155 size_t offset) final {
1156 return Subclass::CopyElementsHandleImpl(source, destination, length,
1157 offset);
1158 }
1159
1160 static Tagged<Object> CopyElementsHandleImpl(
1161 DirectHandle<Object> source, DirectHandle<JSObject> destination,
1162 size_t length, size_t offset) {
1163 UNREACHABLE();
1164 }
1165
1166 DirectHandle<NumberDictionary> Normalize(
1167 DirectHandle<JSObject> object) final {
1168 return Subclass::NormalizeImpl(
1169 object, direct_handle(object->elements(), object->GetIsolate()));
1170 }
1171
1172 static DirectHandle<NumberDictionary> NormalizeImpl(
1173 DirectHandle<JSObject> object, DirectHandle<FixedArrayBase> elements) {
1174 UNREACHABLE();
1175 }
1176
1177 Maybe<bool> CollectValuesOrEntries(Isolate* isolate,
1178 DirectHandle<JSObject> object,
1179 DirectHandle<FixedArray> values_or_entries,
1180 bool get_entries, int* nof_items,
1181 PropertyFilter filter) override {
1182 return Subclass::CollectValuesOrEntriesImpl(
1183 isolate, object, values_or_entries, get_entries, nof_items, filter);
1184 }
1185
1186 static Maybe<bool> CollectValuesOrEntriesImpl(
1187 Isolate* isolate, DirectHandle<JSObject> object,
1188 DirectHandle<FixedArray> values_or_entries, bool get_entries,
1189 int* nof_items, PropertyFilter filter) {
1190 DCHECK_EQ(*nof_items, 0);
1191 KeyAccumulator accumulator(isolate, KeyCollectionMode::kOwnOnly,
1193 RETURN_NOTHING_IF_NOT_SUCCESSFUL(Subclass::CollectElementIndicesImpl(
1194 object, direct_handle(object->elements(), isolate), &accumulator));
1195 DirectHandle<FixedArray> keys = accumulator.GetKeys();
1196
1197 int count = 0;
1198 int i = 0;
1199 ElementsKind original_elements_kind = object->GetElementsKind();
1200
1201 for (; i < keys->length(); ++i) {
1202 DirectHandle<Object> key(keys->get(i), isolate);
1203 uint32_t index;
1204 if (!Object::ToUint32(*key, &index)) continue;
1205
1206 DCHECK_EQ(object->GetElementsKind(), original_elements_kind);
1207 InternalIndex entry = Subclass::GetEntryForIndexImpl(
1208 isolate, *object, object->elements(), index, filter);
1209 if (entry.is_not_found()) continue;
1210 PropertyDetails details = Subclass::GetDetailsImpl(*object, entry);
1211
1212 DirectHandle<Object> value;
1213 if (details.kind() == PropertyKind::kData) {
1214 value = Subclass::GetInternalImpl(isolate, object, entry);
1215 } else {
1216 // This might modify the elements and/or change the elements kind.
1217 LookupIterator it(isolate, object, index, LookupIterator::OWN);
1219 isolate, value, Object::GetProperty(&it), Nothing<bool>());
1220 }
1221 if (get_entries) value = MakeEntryPair(isolate, index, value);
1222 values_or_entries->set(count++, *value);
1223 if (object->GetElementsKind() != original_elements_kind) break;
1224 }
1225
1226 // Slow path caused by changes in elements kind during iteration.
1227 for (; i < keys->length(); i++) {
1228 DirectHandle<Object> key(keys->get(i), isolate);
1229 uint32_t index;
1230 if (!Object::ToUint32(*key, &index)) continue;
1231
1232 if (filter & ONLY_ENUMERABLE) {
1233 InternalElementsAccessor* accessor =
1234 reinterpret_cast<InternalElementsAccessor*>(
1235 object->GetElementsAccessor());
1236 InternalIndex entry = accessor->GetEntryForIndex(
1237 isolate, *object, object->elements(), index);
1238 if (entry.is_not_found()) continue;
1239 PropertyDetails details = accessor->GetDetails(*object, entry);
1240 if (!details.IsEnumerable()) continue;
1241 }
1242
1243 DirectHandle<Object> value;
1244 LookupIterator it(isolate, object, index, LookupIterator::OWN);
1246 Nothing<bool>());
1247
1248 if (get_entries) value = MakeEntryPair(isolate, index, value);
1249 values_or_entries->set(count++, *value);
1250 }
1251
1252 *nof_items = count;
1253 return Just(true);
1254 }
1255
1256 V8_WARN_UNUSED_RESULT ExceptionStatus CollectElementIndices(
1257 DirectHandle<JSObject> object, DirectHandle<FixedArrayBase> backing_store,
1258 KeyAccumulator* keys) final {
1259 return Subclass::CollectElementIndicesImpl(object, backing_store, keys);
1260 }
1261
1262 V8_WARN_UNUSED_RESULT static ExceptionStatus CollectElementIndicesImpl(
1263 DirectHandle<JSObject> object, DirectHandle<FixedArrayBase> backing_store,
1264 KeyAccumulator* keys) {
1266 // Non-dictionary elements can't have all-can-read accessors.
1267 size_t length = Subclass::GetMaxIndex(*object, *backing_store);
1268 PropertyFilter filter = keys->filter();
1269 Isolate* isolate = keys->isolate();
1270 Factory* factory = isolate->factory();
1271 for (size_t i = 0; i < length; i++) {
1272 if (Subclass::HasElementImpl(isolate, *object, i, *backing_store,
1273 filter)) {
1275 keys->AddKey(factory->NewNumberFromSize(i)));
1276 }
1277 }
1279 }
1280
1281 static Handle<FixedArray> DirectCollectElementIndicesImpl(
1282 Isolate* isolate, DirectHandle<JSObject> object,
1283 DirectHandle<FixedArrayBase> backing_store, GetKeysConversion convert,
1284 PropertyFilter filter, Handle<FixedArray> list, uint32_t* nof_indices,
1285 uint32_t insertion_index = 0) {
1286 size_t length = Subclass::GetMaxIndex(*object, *backing_store);
1287 uint32_t const kMaxStringTableEntries =
1288 isolate->heap()->MaxNumberToStringCacheSize();
1289 for (size_t i = 0; i < length; i++) {
1290 if (Subclass::HasElementImpl(isolate, *object, i, *backing_store,
1291 filter)) {
1292 if (convert == GetKeysConversion::kConvertToString) {
1293 bool use_cache = i < kMaxStringTableEntries;
1294 DirectHandle<String> index_string =
1295 isolate->factory()->SizeToString(i, use_cache);
1296 list->set(insertion_index, *index_string);
1297 } else {
1298 DirectHandle<Object> number =
1299 isolate->factory()->NewNumberFromSize(i);
1300 list->set(insertion_index, *number);
1301 }
1302 insertion_index++;
1303 }
1304 }
1305 *nof_indices = insertion_index;
1306 return list;
1307 }
1308
1309 MaybeHandle<FixedArray> PrependElementIndices(
1310 Isolate* isolate, DirectHandle<JSObject> object,
1311 DirectHandle<FixedArrayBase> backing_store, DirectHandle<FixedArray> keys,
1312 GetKeysConversion convert, PropertyFilter filter) final {
1313 return Subclass::PrependElementIndicesImpl(isolate, object, backing_store,
1314 keys, convert, filter);
1315 }
1316
1317 static MaybeHandle<FixedArray> PrependElementIndicesImpl(
1318 Isolate* isolate, DirectHandle<JSObject> object,
1319 DirectHandle<FixedArrayBase> backing_store, DirectHandle<FixedArray> keys,
1320 GetKeysConversion convert, PropertyFilter filter) {
1321 uint32_t nof_property_keys = keys->length();
1322 size_t initial_list_length =
1323 Subclass::GetMaxNumberOfEntries(isolate, *object, *backing_store);
1324
1325 if (initial_list_length > FixedArray::kMaxLength - nof_property_keys) {
1326 THROW_NEW_ERROR(isolate,
1327 NewRangeError(MessageTemplate::kInvalidArrayLength));
1328 }
1329 initial_list_length += nof_property_keys;
1330
1331 // Collect the element indices into a new list.
1332 DCHECK_LE(initial_list_length, std::numeric_limits<int>::max());
1333 MaybeHandle<FixedArray> raw_array = isolate->factory()->TryNewFixedArray(
1334 static_cast<int>(initial_list_length));
1335 Handle<FixedArray> combined_keys;
1336
1337 // If we have a holey backing store try to precisely estimate the backing
1338 // store size as a last emergency measure if we cannot allocate the big
1339 // array.
1340 if (!raw_array.ToHandle(&combined_keys)) {
1342 // If we overestimate the result list size we might end up in the
1343 // large-object space which doesn't free memory on shrinking the list.
1344 // Hence we try to estimate the final size for holey backing stores more
1345 // precisely here.
1346 initial_list_length =
1347 Subclass::NumberOfElementsImpl(isolate, *object, *backing_store);
1348 initial_list_length += nof_property_keys;
1349 }
1350 DCHECK_LE(initial_list_length, std::numeric_limits<int>::max());
1351 combined_keys = isolate->factory()->NewFixedArray(
1352 static_cast<int>(initial_list_length));
1353 }
1354
1355 uint32_t nof_indices = 0;
1356 bool needs_sorting = IsDictionaryElementsKind(kind()) ||
1358 combined_keys = Subclass::DirectCollectElementIndicesImpl(
1359 isolate, object, backing_store,
1360 needs_sorting ? GetKeysConversion::kKeepNumbers : convert, filter,
1361 combined_keys, &nof_indices);
1362
1363 if (needs_sorting) {
1364 SortIndices(isolate, combined_keys, nof_indices);
1365 // Indices from dictionary elements should only be converted after
1366 // sorting.
1367 if (convert == GetKeysConversion::kConvertToString) {
1368 for (uint32_t i = 0; i < nof_indices; i++) {
1369 DirectHandle<Object> index_string =
1370 isolate->factory()->Uint32ToString(
1371 Object::NumberValue(combined_keys->get(i)));
1372 combined_keys->set(i, *index_string);
1373 }
1374 }
1375 }
1376
1377 // Copy over the passed-in property keys.
1378 CopyObjectToObjectElements(isolate, *keys, PACKED_ELEMENTS, 0,
1379 *combined_keys, PACKED_ELEMENTS, nof_indices,
1380 nof_property_keys);
1381
1382 // For holey elements and arguments we might have to shrink the collected
1383 // keys since the estimates might be off.
1386 // Shrink combined_keys to the final size.
1387 int final_size = nof_indices + nof_property_keys;
1388 DCHECK_LE(final_size, combined_keys->length());
1389 return FixedArray::RightTrimOrEmpty(isolate, combined_keys, final_size);
1390 }
1391
1392 return combined_keys;
1393 }
1394
1395 V8_WARN_UNUSED_RESULT ExceptionStatus AddElementsToKeyAccumulator(
1396 DirectHandle<JSObject> receiver, KeyAccumulator* accumulator,
1397 AddKeyConversion convert) final {
1398 return Subclass::AddElementsToKeyAccumulatorImpl(receiver, accumulator,
1399 convert);
1400 }
1401
1402 static uint32_t GetCapacityImpl(Tagged<JSObject> holder,
1403 Tagged<FixedArrayBase> backing_store) {
1404 return backing_store->length();
1405 }
1406
1407 size_t GetCapacity(Tagged<JSObject> holder,
1408 Tagged<FixedArrayBase> backing_store) final {
1409 return Subclass::GetCapacityImpl(holder, backing_store);
1410 }
1411
1412 static MaybeDirectHandle<Object> FillImpl(DirectHandle<JSObject> receiver,
1413 DirectHandle<Object> obj_value,
1414 size_t start, size_t end) {
1415 UNREACHABLE();
1416 }
1417
1418 MaybeDirectHandle<Object> Fill(DirectHandle<JSObject> receiver,
1419 DirectHandle<Object> obj_value, size_t start,
1420 size_t end) override {
1421 return Subclass::FillImpl(receiver, obj_value, start, end);
1422 }
1423
1424 static Maybe<bool> IncludesValueImpl(Isolate* isolate,
1425 DirectHandle<JSObject> receiver,
1426 DirectHandle<Object> value,
1427 size_t start_from, size_t length) {
1428 return IncludesValueSlowPath(isolate, receiver, value, start_from, length);
1429 }
1430
1431 Maybe<bool> IncludesValue(Isolate* isolate, DirectHandle<JSObject> receiver,
1432 DirectHandle<Object> value, size_t start_from,
1433 size_t length) final {
1434 return Subclass::IncludesValueImpl(isolate, receiver, value, start_from,
1435 length);
1436 }
1437
1438 static Maybe<int64_t> IndexOfValueImpl(Isolate* isolate,
1439 DirectHandle<JSObject> receiver,
1440 DirectHandle<Object> value,
1441 size_t start_from, size_t length) {
1442 return IndexOfValueSlowPath(isolate, receiver, value, start_from, length);
1443 }
1444
1445 Maybe<int64_t> IndexOfValue(Isolate* isolate, DirectHandle<JSObject> receiver,
1446 DirectHandle<Object> value, size_t start_from,
1447 size_t length) final {
1448 return Subclass::IndexOfValueImpl(isolate, receiver, value, start_from,
1449 length);
1450 }
1451
1452 static Maybe<int64_t> LastIndexOfValueImpl(DirectHandle<JSObject> receiver,
1453 DirectHandle<Object> value,
1454 size_t start_from) {
1455 UNREACHABLE();
1456 }
1457
1458 Maybe<int64_t> LastIndexOfValue(DirectHandle<JSObject> receiver,
1459 DirectHandle<Object> value,
1460 size_t start_from) final {
1461 return Subclass::LastIndexOfValueImpl(receiver, value, start_from);
1462 }
1463
1464 static void ReverseImpl(Tagged<JSObject> receiver) { UNREACHABLE(); }
1465
1466 void Reverse(Tagged<JSObject> receiver) final {
1467 Subclass::ReverseImpl(receiver);
1468 }
1469
1470 static InternalIndex GetEntryForIndexImpl(
1471 Isolate* isolate, Tagged<JSObject> holder,
1472 Tagged<FixedArrayBase> backing_store, size_t index,
1473 PropertyFilter filter) {
1476 size_t length = Subclass::GetMaxIndex(holder, backing_store);
1479 index < length,
1480 index <= static_cast<size_t>(std::numeric_limits<int>::max()));
1481 return index < length &&
1482 !Cast<BackingStore>(backing_store)
1483 ->is_the_hole(isolate, static_cast<int>(index))
1484 ? InternalIndex(index)
1485 : InternalIndex::NotFound();
1486 } else {
1487 return index < length ? InternalIndex(index) : InternalIndex::NotFound();
1488 }
1489 }
1490
1491 InternalIndex GetEntryForIndex(Isolate* isolate, Tagged<JSObject> holder,
1492 Tagged<FixedArrayBase> backing_store,
1493 size_t index) final {
1494 return Subclass::GetEntryForIndexImpl(isolate, holder, backing_store, index,
1496 }
1497
1498 static PropertyDetails GetDetailsImpl(Tagged<FixedArrayBase> backing_store,
1499 InternalIndex entry) {
1500 return PropertyDetails(PropertyKind::kData, NONE,
1502 }
1503
1504 static PropertyDetails GetDetailsImpl(Tagged<JSObject> holder,
1505 InternalIndex entry) {
1506 return PropertyDetails(PropertyKind::kData, NONE,
1508 }
1509
1510 PropertyDetails GetDetails(Tagged<JSObject> holder,
1511 InternalIndex entry) final {
1512 return Subclass::GetDetailsImpl(holder, entry);
1513 }
1514
1515 Handle<FixedArray> CreateListFromArrayLike(Isolate* isolate,
1516 DirectHandle<JSObject> object,
1517 uint32_t length) final {
1518 return Subclass::CreateListFromArrayLikeImpl(isolate, object, length);
1519 }
1520
1521 static Handle<FixedArray> CreateListFromArrayLikeImpl(
1522 Isolate* isolate, DirectHandle<JSObject> object, uint32_t length) {
1523 UNREACHABLE();
1524 }
1525};
1526
1527class DictionaryElementsAccessor
1528 : public ElementsAccessorBase<DictionaryElementsAccessor,
1529 ElementsKindTraits<DICTIONARY_ELEMENTS>> {
1530 public:
1531 static uint32_t GetMaxIndex(Tagged<JSObject> receiver,
1532 Tagged<FixedArrayBase> elements) {
1533 // We cannot properly estimate this for dictionaries.
1534 UNREACHABLE();
1535 }
1536
1537 static uint32_t GetMaxNumberOfEntries(Isolate* isolate,
1539 Tagged<FixedArrayBase> backing_store) {
1540 return NumberOfElementsImpl(isolate, receiver, backing_store);
1541 }
1542
1543 static uint32_t NumberOfElementsImpl(Isolate* isolate,
1545 Tagged<FixedArrayBase> backing_store) {
1547 return dict->NumberOfElements();
1548 }
1549
1550 static Maybe<bool> SetLengthImpl(Isolate* isolate,
1551 DirectHandle<JSArray> array, uint32_t length,
1552 DirectHandle<FixedArrayBase> backing_store) {
1553 auto dict = Cast<NumberDictionary>(backing_store);
1554 uint32_t old_length = 0;
1555 CHECK(Object::ToArrayLength(array->length(), &old_length));
1556 {
1558 ReadOnlyRoots roots(isolate);
1559 if (length < old_length) {
1560 if (dict->requires_slow_elements()) {
1561 // Find last non-deletable element in range of elements to be
1562 // deleted and adjust range accordingly.
1563 for (InternalIndex entry : dict->IterateEntries()) {
1564 Tagged<Object> index = dict->KeyAt(isolate, entry);
1565 if (dict->IsKey(roots, index)) {
1566 uint32_t number =
1567 static_cast<uint32_t>(Object::NumberValue(index));
1568 if (length <= number && number < old_length) {
1569 PropertyDetails details = dict->DetailsAt(entry);
1570 if (!details.IsConfigurable()) length = number + 1;
1571 }
1572 }
1573 }
1574 }
1575
1576 if (length == 0) {
1577 // Flush the backing store.
1578 array->initialize_elements();
1579 } else {
1580 // Remove elements that should be deleted.
1581 int removed_entries = 0;
1582 for (InternalIndex entry : dict->IterateEntries()) {
1583 Tagged<Object> index = dict->KeyAt(isolate, entry);
1584 if (dict->IsKey(roots, index)) {
1585 uint32_t number =
1586 static_cast<uint32_t>(Object::NumberValue(index));
1587 if (length <= number && number < old_length) {
1588 dict->ClearEntry(entry);
1589 removed_entries++;
1590 }
1591 }
1592 }
1593
1594 if (removed_entries > 0) {
1595 // Update the number of elements.
1596 dict->ElementsRemoved(removed_entries);
1597 }
1598 }
1599 }
1600 }
1601
1602 DirectHandle<Number> length_obj =
1603 isolate->factory()->NewNumberFromUint(length);
1604 array->set_length(*length_obj);
1605 return Just(true);
1606 }
1607
1608 static void CopyElementsImpl(Isolate* isolate, Tagged<FixedArrayBase> from,
1609 uint32_t from_start, Tagged<FixedArrayBase> to,
1610 ElementsKind from_kind, uint32_t to_start,
1611 int packed_size, int copy_size) {
1612 UNREACHABLE();
1613 }
1614
1615 static void DeleteImpl(DirectHandle<JSObject> obj, InternalIndex entry) {
1616 DirectHandle<NumberDictionary> dict(Cast<NumberDictionary>(obj->elements()),
1617 obj->GetIsolate());
1618 dict = NumberDictionary::DeleteEntry(obj->GetIsolate(), dict, entry);
1619 obj->set_elements(*dict);
1620 }
1621
1622 static bool HasAccessorsImpl(Tagged<JSObject> holder,
1623 Tagged<FixedArrayBase> backing_store) {
1626 if (!dict->requires_slow_elements()) return false;
1627 PtrComprCageBase cage_base = GetPtrComprCageBase(holder);
1628 ReadOnlyRoots roots = GetReadOnlyRoots();
1629 for (InternalIndex i : dict->IterateEntries()) {
1630 Tagged<Object> key = dict->KeyAt(cage_base, i);
1631 if (!dict->IsKey(roots, key)) continue;
1632 PropertyDetails details = dict->DetailsAt(i);
1633 if (details.kind() == PropertyKind::kAccessor) return true;
1634 }
1635 return false;
1636 }
1637
1638 static Tagged<Object> GetRaw(Tagged<FixedArrayBase> store,
1639 InternalIndex entry) {
1640 Tagged<NumberDictionary> backing_store = Cast<NumberDictionary>(store);
1641 return backing_store->ValueAt(entry);
1642 }
1643
1644 static Handle<Object> GetImpl(Isolate* isolate,
1645 Tagged<FixedArrayBase> backing_store,
1646 InternalIndex entry) {
1647 return handle(GetRaw(backing_store, entry), isolate);
1648 }
1649
1650 static Handle<Object> GetAtomicInternalImpl(
1651 Isolate* isolate, Tagged<FixedArrayBase> backing_store,
1652 InternalIndex entry, SeqCstAccessTag tag) {
1653 return handle(Cast<NumberDictionary>(backing_store)->ValueAt(entry, tag),
1654 isolate);
1655 }
1656
1657 static inline void SetImpl(DirectHandle<JSObject> holder, InternalIndex entry,
1658 Tagged<Object> value) {
1659 SetImpl(holder->elements(), entry, value);
1660 }
1661
1662 static inline void SetImpl(Tagged<FixedArrayBase> backing_store,
1663 InternalIndex entry, Tagged<Object> value) {
1664 Cast<NumberDictionary>(backing_store)->ValueAtPut(entry, value);
1665 }
1666
1667 static void SetAtomicInternalImpl(Tagged<FixedArrayBase> backing_store,
1668 InternalIndex entry, Tagged<Object> value,
1669 SeqCstAccessTag tag) {
1670 Cast<NumberDictionary>(backing_store)->ValueAtPut(entry, value, tag);
1671 }
1672
1673 static Handle<Object> SwapAtomicInternalImpl(
1674 Isolate* isolate, Tagged<FixedArrayBase> backing_store,
1675 InternalIndex entry, Tagged<Object> value, SeqCstAccessTag tag) {
1676 return handle(
1677 Cast<NumberDictionary>(backing_store)->ValueAtSwap(entry, value, tag),
1678 isolate);
1679 }
1680
1681 static Tagged<Object> CompareAndSwapAtomicInternalImpl(
1682 Tagged<FixedArrayBase> backing_store, InternalIndex entry,
1683 Tagged<Object> expected, Tagged<Object> value, SeqCstAccessTag tag) {
1684 return Cast<NumberDictionary>(backing_store)
1685 ->ValueAtCompareAndSwap(entry, expected, value, tag);
1686 }
1687
1688 static void ReconfigureImpl(DirectHandle<JSObject> object,
1689 DirectHandle<FixedArrayBase> store,
1690 InternalIndex entry, DirectHandle<Object> value,
1691 PropertyAttributes attributes) {
1693 if (attributes != NONE) object->RequireSlowElements(dictionary);
1694 dictionary->ValueAtPut(entry, *value);
1695 PropertyDetails details = dictionary->DetailsAt(entry);
1696 details =
1697 PropertyDetails(PropertyKind::kData, attributes,
1698 PropertyCellType::kNoCell, details.dictionary_index());
1699
1700 dictionary->DetailsAtPut(entry, details);
1701 }
1702
1703 static Maybe<bool> AddImpl(DirectHandle<JSObject> object, uint32_t index,
1704 DirectHandle<Object> value,
1705 PropertyAttributes attributes,
1706 uint32_t new_capacity) {
1707 PropertyDetails details(PropertyKind::kData, attributes,
1709 DirectHandle<NumberDictionary> dictionary =
1710 object->HasFastElements() || object->HasFastStringWrapperElements()
1712 : direct_handle(Cast<NumberDictionary>(object->elements()),
1713 object->GetIsolate());
1714 DirectHandle<NumberDictionary> new_dictionary = NumberDictionary::Add(
1715 object->GetIsolate(), dictionary, index, value, details);
1716 new_dictionary->UpdateMaxNumberKey(index, object);
1717 if (attributes != NONE) object->RequireSlowElements(*new_dictionary);
1718 if (dictionary.is_identical_to(new_dictionary)) return Just(true);
1719 object->set_elements(*new_dictionary);
1720 return Just(true);
1721 }
1722
1723 static bool HasEntryImpl(Isolate* isolate, Tagged<FixedArrayBase> store,
1724 InternalIndex entry) {
1727 Tagged<Object> index = dict->KeyAt(isolate, entry);
1728 return !IsTheHole(index, isolate);
1729 }
1730
1731 static InternalIndex GetEntryForIndexImpl(Isolate* isolate,
1732 Tagged<JSObject> holder,
1734 size_t index,
1735 PropertyFilter filter) {
1738 DCHECK_LE(index, std::numeric_limits<uint32_t>::max());
1739 InternalIndex entry =
1740 dictionary->FindEntry(isolate, static_cast<uint32_t>(index));
1741 if (entry.is_not_found()) return entry;
1742
1743 if (filter != ALL_PROPERTIES) {
1744 PropertyDetails details = dictionary->DetailsAt(entry);
1745 PropertyAttributes attr = details.attributes();
1746 if ((int{attr} & filter) != 0) return InternalIndex::NotFound();
1747 }
1748 return entry;
1749 }
1750
1751 static PropertyDetails GetDetailsImpl(Tagged<JSObject> holder,
1752 InternalIndex entry) {
1753 return GetDetailsImpl(holder->elements(), entry);
1754 }
1755
1756 static PropertyDetails GetDetailsImpl(Tagged<FixedArrayBase> backing_store,
1757 InternalIndex entry) {
1758 return Cast<NumberDictionary>(backing_store)->DetailsAt(entry);
1759 }
1760
1761 static uint32_t FilterKey(DirectHandle<NumberDictionary> dictionary,
1762 InternalIndex entry, Tagged<Object> raw_key,
1763 PropertyFilter filter) {
1764 DCHECK(IsNumber(raw_key));
1766 PropertyDetails details = dictionary->DetailsAt(entry);
1767 PropertyAttributes attr = details.attributes();
1768 if ((int{attr} & filter) != 0) return kMaxUInt32;
1769 return static_cast<uint32_t>(Object::NumberValue(raw_key));
1770 }
1771
1772 static uint32_t GetKeyForEntryImpl(Isolate* isolate,
1773 DirectHandle<NumberDictionary> dictionary,
1774 InternalIndex entry,
1775 PropertyFilter filter) {
1777 Tagged<Object> raw_key = dictionary->KeyAt(isolate, entry);
1778 if (!dictionary->IsKey(ReadOnlyRoots(isolate), raw_key)) return kMaxUInt32;
1779 return FilterKey(dictionary, entry, raw_key, filter);
1780 }
1781
1782 V8_WARN_UNUSED_RESULT static ExceptionStatus CollectElementIndicesImpl(
1783 DirectHandle<JSObject> object, DirectHandle<FixedArrayBase> backing_store,
1784 KeyAccumulator* keys) {
1785 if (keys->filter() & SKIP_STRINGS) return ExceptionStatus::kSuccess;
1786 Isolate* isolate = keys->isolate();
1787 auto dictionary = Cast<NumberDictionary>(backing_store);
1788 DirectHandle<FixedArray> elements = isolate->factory()->NewFixedArray(
1789 GetMaxNumberOfEntries(isolate, *object, *backing_store));
1790 int insertion_index = 0;
1791 PropertyFilter filter = keys->filter();
1792 ReadOnlyRoots roots(isolate);
1793 for (InternalIndex i : dictionary->IterateEntries()) {
1794 AllowGarbageCollection allow_gc;
1795 Tagged<Object> raw_key = dictionary->KeyAt(isolate, i);
1796 if (!dictionary->IsKey(roots, raw_key)) continue;
1797 uint32_t key = FilterKey(dictionary, i, raw_key, filter);
1798 if (key == kMaxUInt32) {
1799 // This might allocate, but {raw_key} is not used afterwards.
1800 keys->AddShadowingKey(raw_key, &allow_gc);
1801 continue;
1802 }
1803 elements->set(insertion_index, raw_key);
1804 insertion_index++;
1805 }
1806 SortIndices(isolate, elements, insertion_index);
1807 for (int i = 0; i < insertion_index; i++) {
1808 RETURN_FAILURE_IF_NOT_SUCCESSFUL(keys->AddKey(elements->get(i)));
1809 }
1811 }
1812
1813 static Handle<FixedArray> DirectCollectElementIndicesImpl(
1814 Isolate* isolate, DirectHandle<JSObject> object,
1815 DirectHandle<FixedArrayBase> backing_store, GetKeysConversion convert,
1816 PropertyFilter filter, Handle<FixedArray> list, uint32_t* nof_indices,
1817 uint32_t insertion_index = 0) {
1818 if (filter & SKIP_STRINGS) return list;
1819
1820 auto dictionary = Cast<NumberDictionary>(backing_store);
1821 for (InternalIndex i : dictionary->IterateEntries()) {
1822 uint32_t key = GetKeyForEntryImpl(isolate, dictionary, i, filter);
1823 if (key == kMaxUInt32) continue;
1824 DirectHandle<Object> index = isolate->factory()->NewNumberFromUint(key);
1825 list->set(insertion_index, *index);
1826 insertion_index++;
1827 }
1828 *nof_indices = insertion_index;
1829 return list;
1830 }
1831
1832 V8_WARN_UNUSED_RESULT static ExceptionStatus AddElementsToKeyAccumulatorImpl(
1833 DirectHandle<JSObject> receiver, KeyAccumulator* accumulator,
1834 AddKeyConversion convert) {
1835 Isolate* isolate = accumulator->isolate();
1836 DirectHandle<NumberDictionary> dictionary(
1837 Cast<NumberDictionary>(receiver->elements()), isolate);
1838 ReadOnlyRoots roots(isolate);
1839 for (InternalIndex i : dictionary->IterateEntries()) {
1840 Tagged<Object> k = dictionary->KeyAt(isolate, i);
1841 if (!dictionary->IsKey(roots, k)) continue;
1842 Tagged<Object> value = dictionary->ValueAt(isolate, i);
1843 DCHECK(!IsTheHole(value, isolate));
1844 DCHECK(!IsAccessorPair(value));
1845 DCHECK(!IsAccessorInfo(value));
1846 RETURN_FAILURE_IF_NOT_SUCCESSFUL(accumulator->AddKey(value, convert));
1847 }
1849 }
1850
1851 static bool IncludesValueFastPath(Isolate* isolate,
1852 DirectHandle<JSObject> receiver,
1853 DirectHandle<Object> value,
1854 size_t start_from, size_t length,
1855 Maybe<bool>* result) {
1857 Tagged<NumberDictionary> dictionary =
1858 Cast<NumberDictionary>(receiver->elements());
1859 Tagged<Object> the_hole = ReadOnlyRoots(isolate).the_hole_value();
1860 Tagged<Object> undefined = ReadOnlyRoots(isolate).undefined_value();
1861
1862 // Scan for accessor properties. If accessors are present, then elements
1863 // must be accessed in order via the slow path.
1864 bool found = false;
1865 for (InternalIndex i : dictionary->IterateEntries()) {
1866 Tagged<Object> k = dictionary->KeyAt(isolate, i);
1867 if (k == the_hole) continue;
1868 if (k == undefined) continue;
1869
1870 uint32_t index;
1871 if (!Object::ToArrayIndex(k, &index) || index < start_from ||
1872 index >= length) {
1873 continue;
1874 }
1875
1876 if (dictionary->DetailsAt(i).kind() == PropertyKind::kAccessor) {
1877 // Restart from beginning in slow path, otherwise we may observably
1878 // access getters out of order
1879 return false;
1880 } else if (!found) {
1881 Tagged<Object> element_k = dictionary->ValueAt(isolate, i);
1882 if (Object::SameValueZero(*value, element_k)) found = true;
1883 }
1884 }
1885
1886 *result = Just(found);
1887 return true;
1888 }
1889
1890 static Maybe<bool> IncludesValueImpl(Isolate* isolate,
1891 DirectHandle<JSObject> receiver,
1892 DirectHandle<Object> value,
1893 size_t start_from, size_t length) {
1895 bool search_for_hole = IsUndefined(*value, isolate);
1896
1897 if (!search_for_hole) {
1898 Maybe<bool> result = Nothing<bool>();
1899 if (DictionaryElementsAccessor::IncludesValueFastPath(
1900 isolate, receiver, value, start_from, length, &result)) {
1901 return result;
1902 }
1903 }
1904 ElementsKind original_elements_kind = receiver->GetElementsKind();
1905 USE(original_elements_kind);
1906 DirectHandle<NumberDictionary> dictionary(
1907 Cast<NumberDictionary>(receiver->elements()), isolate);
1908 // Iterate through the entire range, as accessing elements out of order is
1909 // observable.
1910 for (size_t k = start_from; k < length; ++k) {
1911 DCHECK_EQ(receiver->GetElementsKind(), original_elements_kind);
1912 InternalIndex entry =
1913 dictionary->FindEntry(isolate, static_cast<uint32_t>(k));
1914 if (entry.is_not_found()) {
1915 if (search_for_hole) return Just(true);
1916 continue;
1917 }
1918
1919 PropertyDetails details = GetDetailsImpl(*dictionary, entry);
1920 switch (details.kind()) {
1921 case PropertyKind::kData: {
1922 Tagged<Object> element_k = dictionary->ValueAt(entry);
1923 if (Object::SameValueZero(*value, element_k)) return Just(true);
1924 break;
1925 }
1927 LookupIterator it(isolate, receiver, k,
1929 DCHECK(it.IsFound());
1931 DirectHandle<Object> element_k;
1932
1933 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, element_k,
1935 Nothing<bool>());
1936
1937 if (Object::SameValueZero(*value, *element_k)) return Just(true);
1938
1939 // Bailout to slow path if elements on prototype changed
1941 return IncludesValueSlowPath(isolate, receiver, value, k + 1,
1942 length);
1943 }
1944
1945 // Continue if elements unchanged
1946 if (*dictionary == receiver->elements()) continue;
1947
1948 // Otherwise, bailout or update elements
1949
1950 // If switched to initial elements, return true if searching for
1951 // undefined, and false otherwise.
1952 if (receiver->map()->GetInitialElements() == receiver->elements()) {
1953 return Just(search_for_hole);
1954 }
1955
1956 // If switched to fast elements, continue with the correct accessor.
1957 if (receiver->GetElementsKind() != DICTIONARY_ELEMENTS) {
1958 ElementsAccessor* accessor = receiver->GetElementsAccessor();
1959 return accessor->IncludesValue(isolate, receiver, value, k + 1,
1960 length);
1961 }
1962 dictionary = direct_handle(
1963 Cast<NumberDictionary>(receiver->elements()), isolate);
1964 break;
1965 }
1966 }
1967 }
1968 return Just(false);
1969 }
1970
1971 static Maybe<int64_t> IndexOfValueImpl(Isolate* isolate,
1972 DirectHandle<JSObject> receiver,
1973 DirectHandle<Object> value,
1974 size_t start_from, size_t length) {
1976
1977 ElementsKind original_elements_kind = receiver->GetElementsKind();
1978 USE(original_elements_kind);
1979 DirectHandle<NumberDictionary> dictionary(
1980 Cast<NumberDictionary>(receiver->elements()), isolate);
1981 // Iterate through entire range, as accessing elements out of order is
1982 // observable.
1983 for (size_t k = start_from; k < length; ++k) {
1984 DCHECK_EQ(receiver->GetElementsKind(), original_elements_kind);
1985 DCHECK_LE(k, std::numeric_limits<uint32_t>::max());
1986 InternalIndex entry =
1987 dictionary->FindEntry(isolate, static_cast<uint32_t>(k));
1988 if (entry.is_not_found()) continue;
1989
1990 PropertyDetails details =
1991 GetDetailsImpl(*dictionary, InternalIndex(entry));
1992 switch (details.kind()) {
1993 case PropertyKind::kData: {
1994 Tagged<Object> element_k = dictionary->ValueAt(entry);
1995 if (Object::StrictEquals(*value, element_k)) {
1996 return Just<int64_t>(k);
1997 }
1998 break;
1999 }
2001 LookupIterator it(isolate, receiver, k,
2003 DCHECK(it.IsFound());
2005 DirectHandle<Object> element_k;
2006
2007 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, element_k,
2010
2011 if (Object::StrictEquals(*value, *element_k)) return Just<int64_t>(k);
2012
2013 // Bailout to slow path if elements on prototype changed.
2015 return IndexOfValueSlowPath(isolate, receiver, value, k + 1,
2016 length);
2017 }
2018
2019 // Continue if elements unchanged.
2020 if (*dictionary == receiver->elements()) continue;
2021
2022 // Otherwise, bailout or update elements.
2023 if (receiver->GetElementsKind() != DICTIONARY_ELEMENTS) {
2024 // Otherwise, switch to slow path.
2025 return IndexOfValueSlowPath(isolate, receiver, value, k + 1,
2026 length);
2027 }
2028 dictionary = direct_handle(
2029 Cast<NumberDictionary>(receiver->elements()), isolate);
2030 break;
2031 }
2032 }
2033 }
2034 return Just<int64_t>(-1);
2035 }
2036
2037 static void ValidateContents(Tagged<JSObject> holder, size_t length) {
2039#if DEBUG
2040 DCHECK_EQ(holder->map()->elements_kind(), DICTIONARY_ELEMENTS);
2041 if (!v8_flags.enable_slow_asserts) return;
2042 ReadOnlyRoots roots = GetReadOnlyRoots();
2043 Tagged<NumberDictionary> dictionary =
2044 Cast<NumberDictionary>(holder->elements());
2045 // Validate the requires_slow_elements and max_number_key values.
2046 bool requires_slow_elements = false;
2047 int max_key = 0;
2048 for (InternalIndex i : dictionary->IterateEntries()) {
2050 if (!dictionary->ToKey(roots, i, &k)) continue;
2052 if (Object::NumberValue(k) >
2054 requires_slow_elements = true;
2055 } else {
2056 max_key = std::max(max_key, Smi::ToInt(k));
2057 }
2058 }
2059 if (requires_slow_elements) {
2060 DCHECK(dictionary->requires_slow_elements());
2061 } else if (!dictionary->requires_slow_elements()) {
2062 DCHECK_LE(max_key, dictionary->max_number_key());
2063 }
2064#endif
2065 }
2066};
2067
2068// Super class for all fast element arrays.
2069template <typename Subclass, typename KindTraits>
2070class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> {
2071 public:
2072 using BackingStore = typename KindTraits::BackingStore;
2073
2074 static DirectHandle<NumberDictionary> NormalizeImpl(
2075 DirectHandle<JSObject> object, DirectHandle<FixedArrayBase> store) {
2076 Isolate* isolate = object->GetIsolate();
2077 ElementsKind kind = Subclass::kind();
2078
2079 // Ensure that notifications fire if the array or object prototypes are
2080 // normalizing.
2083 isolate->UpdateNoElementsProtectorOnNormalizeElements(object);
2084 }
2085
2086 int capacity = object->GetFastElementsUsage();
2087 DirectHandle<NumberDictionary> dictionary =
2088 NumberDictionary::New(isolate, capacity);
2089
2090 PropertyDetails details = PropertyDetails::Empty();
2091 int j = 0;
2092 int max_number_key = -1;
2093 for (int i = 0; j < capacity; i++) {
2095 if (Cast<BackingStore>(*store)->is_the_hole(isolate, i)) continue;
2096 }
2097 max_number_key = i;
2098 DirectHandle<Object> value =
2099 Subclass::GetImpl(isolate, *store, InternalIndex(i));
2100 dictionary =
2101 NumberDictionary::Add(isolate, dictionary, i, value, details);
2102 j++;
2103 }
2104
2105 if (max_number_key > 0) {
2106 dictionary->UpdateMaxNumberKey(static_cast<uint32_t>(max_number_key),
2107 object);
2108 }
2109 return dictionary;
2110 }
2111
2112 static void DeleteAtEnd(DirectHandle<JSObject> obj,
2113 DirectHandle<BackingStore> backing_store,
2114 uint32_t entry) {
2115 uint32_t length = static_cast<uint32_t>(backing_store->length());
2116 DCHECK_LT(entry, length);
2117 Isolate* isolate = obj->GetIsolate();
2118 for (; entry > 0; entry--) {
2119 if (!backing_store->is_the_hole(isolate, entry - 1)) break;
2120 }
2121 if (entry == 0) {
2122 Tagged<FixedArray> empty = ReadOnlyRoots(isolate).empty_fixed_array();
2123 // Dynamically ask for the elements kind here since we manually redirect
2124 // the operations for argument backing stores.
2125 if (obj->GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS) {
2126 Cast<SloppyArgumentsElements>(obj->elements())->set_arguments(empty);
2127 } else {
2128 obj->set_elements(empty);
2129 }
2130 return;
2131 }
2132
2133 isolate->heap()->RightTrimArray(*backing_store, entry, length);
2134 }
2135
2136 static void DeleteCommon(DirectHandle<JSObject> obj, uint32_t entry,
2137 DirectHandle<FixedArrayBase> store) {
2138 DCHECK(obj->HasSmiOrObjectElements() || obj->HasDoubleElements() ||
2139 obj->HasNonextensibleElements() || obj->HasFastArgumentsElements() ||
2140 obj->HasFastStringWrapperElements());
2141 DirectHandle<BackingStore> backing_store = Cast<BackingStore>(store);
2142 if (!IsJSArray(*obj) &&
2143 entry == static_cast<uint32_t>(store->length()) - 1) {
2144 DeleteAtEnd(obj, backing_store, entry);
2145 return;
2146 }
2147
2148 Isolate* isolate = obj->GetIsolate();
2149 backing_store->set_the_hole(isolate, entry);
2150
2151 // TODO(verwaest): Move this out of elements.cc.
2152 // If the backing store is larger than a certain size and
2153 // has too few used values, normalize it.
2154 const int kMinLengthForSparsenessCheck = 64;
2155 if (backing_store->length() < kMinLengthForSparsenessCheck) return;
2156 uint32_t length = 0;
2157 if (IsJSArray(*obj)) {
2158 Object::ToArrayLength(Cast<JSArray>(*obj)->length(), &length);
2159 } else {
2160 length = static_cast<uint32_t>(store->length());
2161 }
2162
2163 // To avoid doing the check on every delete, use a counter-based heuristic.
2164 const int kLengthFraction = 16;
2165 // The above constant must be large enough to ensure that we check for
2166 // normalization frequently enough. At a minimum, it should be large
2167 // enough to reliably hit the "window" of remaining elements count where
2168 // normalization would be beneficial.
2169 static_assert(kLengthFraction >=
2170 NumberDictionary::kEntrySize *
2172 size_t current_counter = isolate->elements_deletion_counter();
2173 if (current_counter < length / kLengthFraction) {
2174 isolate->set_elements_deletion_counter(current_counter + 1);
2175 return;
2176 }
2177 // Reset the counter whenever the full check is performed.
2178 isolate->set_elements_deletion_counter(0);
2179
2180 if (!IsJSArray(*obj)) {
2181 uint32_t i;
2182 for (i = entry + 1; i < length; i++) {
2183 if (!backing_store->is_the_hole(isolate, i)) break;
2184 }
2185 if (i == length) {
2186 DeleteAtEnd(obj, backing_store, entry);
2187 return;
2188 }
2189 }
2190 int num_used = 0;
2191 for (int i = 0; i < backing_store->length(); ++i) {
2192 if (!backing_store->is_the_hole(isolate, i)) {
2193 ++num_used;
2194 // Bail out if a number dictionary wouldn't be able to save much space.
2196 NumberDictionary::ComputeCapacity(num_used) *
2197 NumberDictionary::kEntrySize >
2198 static_cast<uint32_t>(backing_store->length())) {
2199 return;
2200 }
2201 }
2202 }
2204 }
2205
2206 static void ReconfigureImpl(DirectHandle<JSObject> object,
2207 DirectHandle<FixedArrayBase> store,
2208 InternalIndex entry, DirectHandle<Object> value,
2209 PropertyAttributes attributes) {
2210 DirectHandle<NumberDictionary> dictionary =
2212 entry = InternalIndex(
2213 dictionary->FindEntry(object->GetIsolate(), entry.as_uint32()));
2214 DictionaryElementsAccessor::ReconfigureImpl(
2215 object, Cast<FixedArrayBase>(dictionary), entry, value, attributes);
2216 }
2217
2218 static Maybe<bool> AddImpl(DirectHandle<JSObject> object, uint32_t index,
2219 DirectHandle<Object> value,
2220 PropertyAttributes attributes,
2221 uint32_t new_capacity) {
2222 DCHECK_EQ(NONE, attributes);
2223 ElementsKind from_kind = object->GetElementsKind();
2224 ElementsKind to_kind = Subclass::kind();
2225 if (IsDictionaryElementsKind(from_kind) ||
2226 IsDoubleElementsKind(from_kind) != IsDoubleElementsKind(to_kind) ||
2227 Subclass::GetCapacityImpl(*object, object->elements()) !=
2228 new_capacity) {
2229 MAYBE_RETURN(Subclass::GrowCapacityAndConvertImpl(object, new_capacity),
2230 Nothing<bool>());
2231 } else {
2232 if (IsFastElementsKind(from_kind) && from_kind != to_kind) {
2233 JSObject::TransitionElementsKind(object, to_kind);
2234 }
2235 if (IsSmiOrObjectElementsKind(from_kind)) {
2238 }
2239 }
2240 Subclass::SetImpl(object, InternalIndex(index), *value);
2241 return Just(true);
2242 }
2243
2244 static void DeleteImpl(DirectHandle<JSObject> obj, InternalIndex entry) {
2245 ElementsKind kind = KindTraits::Kind;
2249 }
2250 if (IsSmiOrObjectElementsKind(KindTraits::Kind) ||
2253 }
2254 DeleteCommon(obj, entry.as_uint32(),
2255 direct_handle(obj->elements(), obj->GetIsolate()));
2256 }
2257
2258 static bool HasEntryImpl(Isolate* isolate,
2259 Tagged<FixedArrayBase> backing_store,
2260 InternalIndex entry) {
2261 return !Cast<BackingStore>(backing_store)
2262 ->is_the_hole(isolate, entry.as_int());
2263 }
2264
2265 static uint32_t NumberOfElementsImpl(Isolate* isolate,
2267 Tagged<FixedArrayBase> backing_store) {
2268 size_t max_index = Subclass::GetMaxIndex(receiver, backing_store);
2269 DCHECK_LE(max_index, std::numeric_limits<uint32_t>::max());
2270 if (IsFastPackedElementsKind(Subclass::kind())) {
2271 return static_cast<uint32_t>(max_index);
2272 }
2273 uint32_t count = 0;
2274 for (size_t i = 0; i < max_index; i++) {
2275 if (Subclass::HasEntryImpl(isolate, backing_store, InternalIndex(i))) {
2276 count++;
2277 }
2278 }
2279 return count;
2280 }
2281
2282 V8_WARN_UNUSED_RESULT static ExceptionStatus AddElementsToKeyAccumulatorImpl(
2283 DirectHandle<JSObject> receiver, KeyAccumulator* accumulator,
2284 AddKeyConversion convert) {
2285 Isolate* isolate = accumulator->isolate();
2286 DirectHandle<FixedArrayBase> elements(receiver->elements(), isolate);
2287 size_t length =
2288 Subclass::GetMaxNumberOfEntries(isolate, *receiver, *elements);
2289 for (size_t i = 0; i < length; i++) {
2290 if (IsFastPackedElementsKind(KindTraits::Kind) ||
2291 HasEntryImpl(isolate, *elements, InternalIndex(i))) {
2292 RETURN_FAILURE_IF_NOT_SUCCESSFUL(accumulator->AddKey(
2293 Subclass::GetImpl(isolate, *elements, InternalIndex(i)), convert));
2294 }
2295 }
2297 }
2298
2299 static void ValidateContents(Tagged<JSObject> holder, size_t length) {
2300#if DEBUG
2301 Isolate* isolate = holder->GetIsolate();
2302 Heap* heap = isolate->heap();
2303 Tagged<FixedArrayBase> elements = holder->elements();
2304 Tagged<Map> map = elements->map();
2305 if (IsSmiOrObjectElementsKind(KindTraits::Kind)) {
2306 DCHECK_NE(map, ReadOnlyRoots(heap).fixed_double_array_map());
2307 } else if (IsDoubleElementsKind(KindTraits::Kind)) {
2308 DCHECK_NE(map, ReadOnlyRoots(heap).fixed_cow_array_map());
2309 if (map == ReadOnlyRoots(heap).fixed_array_map()) DCHECK_EQ(0u, length);
2310 } else {
2311 UNREACHABLE();
2312 }
2313 if (length == 0u) return; // nothing to do!
2314#if ENABLE_SLOW_DCHECKS
2316 Tagged<BackingStore> backing_store = Cast<BackingStore>(elements);
2317 DCHECK(length <= std::numeric_limits<int>::max());
2318 int length_int = static_cast<int>(length);
2319 if (IsSmiElementsKind(KindTraits::Kind)) {
2320 HandleScope scope(isolate);
2321 for (int i = 0; i < length_int; i++) {
2322 Tagged<Object> element = Cast<FixedArray>(backing_store)->get(i);
2323 DCHECK(IsSmi(element) || (IsHoleyElementsKind(KindTraits::Kind) &&
2324 IsTheHole(element, isolate)));
2325 }
2326 } else if (KindTraits::Kind == PACKED_ELEMENTS ||
2327 KindTraits::Kind == PACKED_DOUBLE_ELEMENTS) {
2328 for (int i = 0; i < length_int; i++) {
2329 DCHECK(!backing_store->is_the_hole(isolate, i));
2330 }
2331 } else {
2332 DCHECK(IsHoleyElementsKind(KindTraits::Kind));
2333 }
2334#endif
2335#endif
2336 }
2337
2338 static MaybeDirectHandle<Object> PopImpl(DirectHandle<JSArray> receiver) {
2339 return Subclass::RemoveElement(receiver, AT_END);
2340 }
2341
2342 static MaybeDirectHandle<Object> ShiftImpl(DirectHandle<JSArray> receiver) {
2343 return Subclass::RemoveElement(receiver, AT_START);
2344 }
2345
2346 static Maybe<uint32_t> PushImpl(DirectHandle<JSArray> receiver,
2347 BuiltinArguments* args, uint32_t push_size) {
2348 DirectHandle<FixedArrayBase> backing_store(receiver->elements(),
2349 receiver->GetIsolate());
2350 return Subclass::AddArguments(receiver, backing_store, args, push_size,
2351 AT_END);
2352 }
2353
2354 static Maybe<uint32_t> UnshiftImpl(DirectHandle<JSArray> receiver,
2355 BuiltinArguments* args,
2356 uint32_t unshift_size) {
2357 DirectHandle<FixedArrayBase> backing_store(receiver->elements(),
2358 receiver->GetIsolate());
2359 return Subclass::AddArguments(receiver, backing_store, args, unshift_size,
2360 AT_START);
2361 }
2362
2363 static DirectHandle<FixedArrayBase> MoveElements(
2364 Isolate* isolate, DirectHandle<JSArray> receiver,
2365 DirectHandle<FixedArrayBase> backing_store, int dst_index, int src_index,
2366 int len, int hole_start, int hole_end) {
2368 Tagged<BackingStore> dst_elms = Cast<BackingStore>(*backing_store);
2369 if (len > JSArray::kMaxCopyElements && dst_index == 0 &&
2370 isolate->heap()->CanMoveObjectStart(dst_elms)) {
2371 dst_elms = Cast<BackingStore>(
2372 isolate->heap()->LeftTrimFixedArray(dst_elms, src_index));
2373 // Updates this backing_store handle.
2374 backing_store.SetValue(dst_elms);
2375 receiver->set_elements(dst_elms);
2376 // Adjust the hole offset as the array has been shrunk.
2377 hole_end -= src_index;
2378 DCHECK_LE(hole_start, backing_store->length());
2379 DCHECK_LE(hole_end, backing_store->length());
2380 } else if (len != 0) {
2381 WriteBarrierMode mode =
2382 GetWriteBarrierMode(dst_elms, KindTraits::Kind, no_gc);
2383 dst_elms->MoveElements(isolate, dst_index, src_index, len, mode);
2384 }
2385 if (hole_start != hole_end) {
2386 dst_elms->FillWithHoles(hole_start, hole_end);
2387 }
2388 return backing_store;
2389 }
2390
2391 static MaybeDirectHandle<Object> FillImpl(DirectHandle<JSObject> receiver,
2392 DirectHandle<Object> obj_value,
2393 size_t start, size_t end) {
2394 // Ensure indexes are within array bounds
2395 DCHECK_LE(0, start);
2397
2398 // Make sure COW arrays are copied.
2399 if (IsSmiOrObjectElementsKind(Subclass::kind())) {
2401 }
2402
2403 // Make sure we have enough space.
2404 DCHECK_LE(end, std::numeric_limits<uint32_t>::max());
2405 if (end > Subclass::GetCapacityImpl(*receiver, receiver->elements())) {
2406 MAYBE_RETURN_NULL(Subclass::GrowCapacityAndConvertImpl(
2407 receiver, static_cast<uint32_t>(end)));
2408 CHECK_EQ(Subclass::kind(), receiver->GetElementsKind());
2409 }
2410 DCHECK_LE(end, Subclass::GetCapacityImpl(*receiver, receiver->elements()));
2411
2412 for (size_t index = start; index < end; ++index) {
2413 Subclass::SetImpl(receiver, InternalIndex(index), *obj_value);
2414 }
2415 return MaybeDirectHandle<Object>(receiver);
2416 }
2417
2418 static Maybe<bool> IncludesValueImpl(Isolate* isolate,
2419 DirectHandle<JSObject> receiver,
2420 DirectHandle<Object> search_value,
2421 size_t start_from, size_t length) {
2424 Tagged<FixedArrayBase> elements_base = receiver->elements();
2425 Tagged<Object> the_hole = ReadOnlyRoots(isolate).the_hole_value();
2426 Tagged<Object> undefined = ReadOnlyRoots(isolate).undefined_value();
2427 Tagged<Object> value = *search_value;
2428
2429 if (start_from >= length) return Just(false);
2430
2431 // Elements beyond the capacity of the backing store treated as undefined.
2432 size_t elements_length = static_cast<size_t>(elements_base->length());
2433 if (value == undefined && elements_length < length) return Just(true);
2434 if (elements_length == 0) {
2435 DCHECK_NE(value, undefined);
2436 return Just(false);
2437 }
2438
2439 length = std::min(elements_length, length);
2440 DCHECK_LE(length, std::numeric_limits<int>::max());
2441
2442 if (!IsNumber(value)) {
2443 if (value == undefined) {
2444 // Search for `undefined` or The Hole. Even in the case of
2445 // PACKED_DOUBLE_ELEMENTS or PACKED_SMI_ELEMENTS, we might encounter The
2446 // Hole here, since the {length} used here can be larger than
2447 // JSArray::length.
2448 if (IsSmiOrObjectElementsKind(Subclass::kind()) ||
2449 IsAnyNonextensibleElementsKind(Subclass::kind())) {
2450 Tagged<FixedArray> elements = Cast<FixedArray>(receiver->elements());
2451
2452 for (size_t k = start_from; k < length; ++k) {
2453 Tagged<Object> element_k = elements->get(static_cast<int>(k));
2454
2455 if (element_k == the_hole || element_k == undefined) {
2456 return Just(true);
2457 }
2458 }
2459 return Just(false);
2460 } else {
2461 // Search for The Hole in HOLEY_DOUBLE_ELEMENTS or
2462 // PACKED_DOUBLE_ELEMENTS.
2463 DCHECK(IsDoubleElementsKind(Subclass::kind()));
2464 Tagged<FixedDoubleArray> elements =
2465 Cast<FixedDoubleArray>(receiver->elements());
2466
2467 for (size_t k = start_from; k < length; ++k) {
2468 if (elements->is_the_hole(static_cast<int>(k))) return Just(true);
2469 }
2470 return Just(false);
2471 }
2472 } else if (!IsObjectElementsKind(Subclass::kind()) &&
2473 !IsAnyNonextensibleElementsKind(Subclass::kind())) {
2474 // Search for non-number, non-Undefined value, with either
2475 // PACKED_SMI_ELEMENTS, PACKED_DOUBLE_ELEMENTS, HOLEY_SMI_ELEMENTS or
2476 // HOLEY_DOUBLE_ELEMENTS. Guaranteed to return false, since these
2477 // elements kinds can only contain Number values or undefined.
2478 return Just(false);
2479 } else {
2480 // Search for non-number, non-Undefined value with either
2481 // PACKED_ELEMENTS or HOLEY_ELEMENTS.
2482 DCHECK(IsObjectElementsKind(Subclass::kind()) ||
2483 IsAnyNonextensibleElementsKind(Subclass::kind()));
2484 Tagged<FixedArray> elements = Cast<FixedArray>(receiver->elements());
2485
2486 for (size_t k = start_from; k < length; ++k) {
2487 Tagged<Object> element_k = elements->get(static_cast<int>(k));
2488 if (element_k == the_hole) continue;
2489 if (Object::SameValueZero(value, element_k)) return Just(true);
2490 }
2491 return Just(false);
2492 }
2493 } else {
2494 if (!IsNaN(value)) {
2495 double search_number = Object::NumberValue(value);
2496 if (IsDoubleElementsKind(Subclass::kind())) {
2497 // Search for non-NaN Number in PACKED_DOUBLE_ELEMENTS or
2498 // HOLEY_DOUBLE_ELEMENTS --- Skip TheHole, and trust UCOMISD or
2499 // similar operation for result.
2500 Tagged<FixedDoubleArray> elements =
2501 Cast<FixedDoubleArray>(receiver->elements());
2502
2503 for (size_t k = start_from; k < length; ++k) {
2504 if (elements->is_the_hole(static_cast<int>(k))) continue;
2505 if (elements->get_scalar(static_cast<int>(k)) == search_number) {
2506 return Just(true);
2507 }
2508 }
2509 return Just(false);
2510 } else {
2511 // Search for non-NaN Number in PACKED_ELEMENTS, HOLEY_ELEMENTS,
2512 // PACKED_SMI_ELEMENTS or HOLEY_SMI_ELEMENTS --- Skip non-Numbers,
2513 // and trust UCOMISD or similar operation for result
2514 Tagged<FixedArray> elements = Cast<FixedArray>(receiver->elements());
2515
2516 for (size_t k = start_from; k < length; ++k) {
2517 Tagged<Object> element_k = elements->get(static_cast<int>(k));
2518 if (IsNumber(element_k) &&
2519 Object::NumberValue(element_k) == search_number) {
2520 return Just(true);
2521 }
2522 }
2523 return Just(false);
2524 }
2525 } else {
2526 // Search for NaN --- NaN cannot be represented with Smi elements, so
2527 // abort if ElementsKind is PACKED_SMI_ELEMENTS or HOLEY_SMI_ELEMENTS
2528 if (IsSmiElementsKind(Subclass::kind())) return Just(false);
2529
2530 if (IsDoubleElementsKind(Subclass::kind())) {
2531 // Search for NaN in PACKED_DOUBLE_ELEMENTS or
2532 // HOLEY_DOUBLE_ELEMENTS --- Skip The Hole and trust
2533 // std::isnan(elementK) for result
2534 Tagged<FixedDoubleArray> elements =
2535 Cast<FixedDoubleArray>(receiver->elements());
2536
2537 for (size_t k = start_from; k < length; ++k) {
2538 if (elements->is_the_hole(static_cast<int>(k))) continue;
2539 if (std::isnan(elements->get_scalar(static_cast<int>(k)))) {
2540 return Just(true);
2541 }
2542 }
2543 return Just(false);
2544 } else {
2545 // Search for NaN in PACKED_ELEMENTS or HOLEY_ELEMENTS. Return true
2546 // if elementK->IsHeapNumber() && std::isnan(elementK->Number())
2547 DCHECK(IsObjectElementsKind(Subclass::kind()) ||
2548 IsAnyNonextensibleElementsKind(Subclass::kind()));
2549 Tagged<FixedArray> elements = Cast<FixedArray>(receiver->elements());
2550
2551 for (size_t k = start_from; k < length; ++k) {
2552 if (IsNaN(elements->get(static_cast<int>(k)))) return Just(true);
2553 }
2554 return Just(false);
2555 }
2556 }
2557 }
2558 }
2559
2560 static Handle<FixedArray> CreateListFromArrayLikeImpl(
2561 Isolate* isolate, DirectHandle<JSObject> object, uint32_t length) {
2562 Handle<FixedArray> result = isolate->factory()->NewFixedArray(length);
2563 DirectHandle<FixedArrayBase> elements(object->elements(), isolate);
2564 for (uint32_t i = 0; i < length; i++) {
2565 InternalIndex entry(i);
2566 if (!Subclass::HasEntryImpl(isolate, *elements, entry)) continue;
2567 DirectHandle<Object> value;
2568 value = Subclass::GetImpl(isolate, *elements, entry);
2569 if (IsName(*value)) {
2570 value = isolate->factory()->InternalizeName(Cast<Name>(value));
2571 }
2572 result->set(i, *value);
2573 }
2574 return result;
2575 }
2576
2577 static MaybeDirectHandle<Object> RemoveElement(DirectHandle<JSArray> receiver,
2578 Where remove_position) {
2579 Isolate* isolate = receiver->GetIsolate();
2580 ElementsKind kind = KindTraits::Kind;
2582 HandleScope scope(isolate);
2584 }
2585 DirectHandle<FixedArrayBase> backing_store(receiver->elements(), isolate);
2586 uint32_t length = static_cast<uint32_t>(Smi::ToInt(receiver->length()));
2587 DCHECK_GT(length, 0);
2588 int new_length = length - 1;
2589 int remove_index = remove_position == AT_START ? 0 : new_length;
2590 DirectHandle<Object> result =
2591 Subclass::GetImpl(isolate, *backing_store, InternalIndex(remove_index));
2592 if (remove_position == AT_START) {
2593 backing_store = Subclass::MoveElements(isolate, receiver, backing_store,
2594 0, 1, new_length, 0, 0);
2595 }
2597 Subclass::SetLengthImpl(isolate, receiver, new_length, backing_store));
2598
2599 if (IsHoleyElementsKind(kind) && IsTheHole(*result, isolate)) {
2600 return isolate->factory()->undefined_value();
2601 }
2602 return MaybeDirectHandle<Object>(result);
2603 }
2604
2605 static Maybe<uint32_t> AddArguments(
2606 DirectHandle<JSArray> receiver,
2607 DirectHandle<FixedArrayBase> backing_store, BuiltinArguments* args,
2608 uint32_t add_size, Where add_position) {
2609 uint32_t length = Smi::ToInt(receiver->length());
2610 DCHECK_LT(0, add_size);
2611 uint32_t elms_len = backing_store->length();
2612 // Check we do not overflow the new_length.
2613 DCHECK(add_size <= static_cast<uint32_t>(Smi::kMaxValue - length));
2614 uint32_t new_length = length + add_size;
2615 Isolate* isolate = receiver->GetIsolate();
2616
2617 if (new_length > elms_len) {
2618 // New backing storage is needed.
2619 uint32_t capacity = JSObject::NewElementsCapacity(new_length);
2620 // If we add arguments to the start we have to shift the existing objects.
2621 int copy_dst_index = add_position == AT_START ? add_size : 0;
2622 // Copy over all objects to a new backing_store.
2624 isolate, backing_store,
2625 Subclass::ConvertElementsWithCapacity(receiver, backing_store,
2626 KindTraits::Kind, capacity, 0,
2627 copy_dst_index),
2629 receiver->set_elements(*backing_store);
2630 } else if (add_position == AT_START) {
2631 // If the backing store has enough capacity and we add elements to the
2632 // start we have to shift the existing objects.
2633 backing_store = Subclass::MoveElements(isolate, receiver, backing_store,
2634 add_size, 0, length, 0, 0);
2635 }
2636
2637 int insertion_index = add_position == AT_START ? 0 : length;
2638 // Copy the arguments to the start.
2639 Subclass::CopyArguments(args, backing_store, add_size, 1, insertion_index);
2640 // Set the length.
2641 receiver->set_length(Smi::FromInt(new_length));
2642 return Just(new_length);
2643 }
2644
2645 static void CopyArguments(BuiltinArguments* args,
2646 DirectHandle<FixedArrayBase> dst_store,
2647 uint32_t copy_size, uint32_t src_index,
2648 uint32_t dst_index) {
2649 // Add the provided values.
2651 Tagged<FixedArrayBase> raw_backing_store = *dst_store;
2652 WriteBarrierMode mode = raw_backing_store->GetWriteBarrierMode(no_gc);
2653 for (uint32_t i = 0; i < copy_size; i++) {
2654 Tagged<Object> argument = (*args)[src_index + i];
2655 DCHECK(!IsTheHole(argument));
2656 Subclass::SetImpl(raw_backing_store, InternalIndex(dst_index + i),
2657 argument, mode);
2658 }
2659 }
2660};
2661
2662template <typename Subclass, typename KindTraits>
2663class FastSmiOrObjectElementsAccessor
2664 : public FastElementsAccessor<Subclass, KindTraits> {
2665 public:
2666 static inline void SetImpl(DirectHandle<JSObject> holder, InternalIndex entry,
2667 Tagged<Object> value) {
2668 SetImpl(holder->elements(), entry, value);
2669 }
2670
2671 static inline void SetImpl(Tagged<FixedArrayBase> backing_store,
2672 InternalIndex entry, Tagged<Object> value) {
2673 Cast<FixedArray>(backing_store)->set(entry.as_int(), value);
2674 }
2675
2676 static inline void SetImpl(Tagged<FixedArrayBase> backing_store,
2677 InternalIndex entry, Tagged<Object> value,
2678 WriteBarrierMode mode) {
2679 Cast<FixedArray>(backing_store)->set(entry.as_int(), value, mode);
2680 }
2681
2682 static Tagged<Object> GetRaw(Tagged<FixedArray> backing_store,
2683 InternalIndex entry) {
2684 return backing_store->get(entry.as_int());
2685 }
2686
2687 // NOTE: this method violates the handlified function signature convention:
2688 // raw pointer parameters in the function that allocates.
2689 // See ElementsAccessor::CopyElements() for details.
2690 // This method could actually allocate if copying from double elements to
2691 // object elements.
2692 static void CopyElementsImpl(Isolate* isolate, Tagged<FixedArrayBase> from,
2693 uint32_t from_start, Tagged<FixedArrayBase> to,
2694 ElementsKind from_kind, uint32_t to_start,
2695 int packed_size, int copy_size) {
2697 ElementsKind to_kind = KindTraits::Kind;
2698 switch (from_kind) {
2700 case HOLEY_SMI_ELEMENTS:
2701 case PACKED_ELEMENTS:
2705 case HOLEY_ELEMENTS:
2710 CopyObjectToObjectElements(isolate, from, from_kind, from_start, to,
2711 to_kind, to_start, copy_size);
2712 break;
2714 case HOLEY_DOUBLE_ELEMENTS: {
2715 AllowGarbageCollection allow_allocation;
2716 DCHECK(IsObjectElementsKind(to_kind));
2717 CopyDoubleToObjectElements(isolate, from, from_start, to, to_start,
2718 copy_size);
2719 break;
2720 }
2722 CopyDictionaryToObjectElements(isolate, from, from_start, to, to_kind,
2723 to_start, copy_size);
2724 break;
2729#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) case TYPE##_ELEMENTS:
2732#undef TYPED_ARRAY_CASE
2734 // This function is currently only used for JSArrays with non-zero
2735 // length.
2736 UNREACHABLE();
2737 case NO_ELEMENTS:
2738 break; // Nothing to do.
2739 }
2740 }
2741
2742 static Maybe<bool> CollectValuesOrEntriesImpl(
2743 Isolate* isolate, DirectHandle<JSObject> object,
2744 DirectHandle<FixedArray> values_or_entries, bool get_entries,
2745 int* nof_items, PropertyFilter filter) {
2746 int count = 0;
2747 if (get_entries) {
2748 // Collecting entries needs to allocate, so this code must be handlified.
2749 DirectHandle<FixedArray> elements(Cast<FixedArray>(object->elements()),
2750 isolate);
2751 uint32_t length = elements->length();
2752 for (uint32_t index = 0; index < length; ++index) {
2753 InternalIndex entry(index);
2754 if (!Subclass::HasEntryImpl(isolate, *elements, entry)) continue;
2755 DirectHandle<Object> value =
2756 Subclass::GetImpl(isolate, *elements, entry);
2757 value = MakeEntryPair(isolate, index, value);
2758 values_or_entries->set(count++, *value);
2759 }
2760 } else {
2761 // No allocations here, so we can avoid handlification overhead.
2763 Tagged<FixedArray> elements = Cast<FixedArray>(object->elements());
2764 uint32_t length = elements->length();
2765 for (uint32_t index = 0; index < length; ++index) {
2766 InternalIndex entry(index);
2767 if (!Subclass::HasEntryImpl(isolate, elements, entry)) continue;
2768 Tagged<Object> value = GetRaw(elements, entry);
2769 values_or_entries->set(count++, value);
2770 }
2771 }
2772 *nof_items = count;
2773 return Just(true);
2774 }
2775
2776 static Maybe<int64_t> IndexOfValueImpl(Isolate* isolate,
2777 DirectHandle<JSObject> receiver,
2778 DirectHandle<Object> search_value,
2779 size_t start_from, size_t length) {
2782 Tagged<FixedArrayBase> elements_base = receiver->elements();
2783 Tagged<Object> value = *search_value;
2784
2785 if (start_from >= length) return Just<int64_t>(-1);
2786
2787 length = std::min(static_cast<size_t>(elements_base->length()), length);
2788
2789 // Only FAST_{,HOLEY_}ELEMENTS can store non-numbers.
2790 if (!IsNumber(value) && !IsObjectElementsKind(Subclass::kind()) &&
2791 !IsAnyNonextensibleElementsKind(Subclass::kind())) {
2792 return Just<int64_t>(-1);
2793 }
2794 // NaN can never be found by strict equality.
2795 if (IsNaN(value)) return Just<int64_t>(-1);
2796
2797 // k can be greater than receiver->length() below, but it is bounded by
2798 // elements_base->length() so we never read out of bounds. This means that
2799 // elements->get(k) can return the hole, for which the StrictEquals will
2800 // always fail.
2801 Tagged<FixedArray> elements = Cast<FixedArray>(receiver->elements());
2802 static_assert(FixedArray::kMaxLength <=
2803 std::numeric_limits<uint32_t>::max());
2804 for (size_t k = start_from; k < length; ++k) {
2805 if (Object::StrictEquals(value,
2806 elements->get(static_cast<uint32_t>(k)))) {
2807 return Just<int64_t>(k);
2808 }
2809 }
2810 return Just<int64_t>(-1);
2811 }
2812};
2813
2814class FastPackedSmiElementsAccessor
2815 : public FastSmiOrObjectElementsAccessor<
2816 FastPackedSmiElementsAccessor,
2817 ElementsKindTraits<PACKED_SMI_ELEMENTS>> {};
2818
2819class FastHoleySmiElementsAccessor
2820 : public FastSmiOrObjectElementsAccessor<
2821 FastHoleySmiElementsAccessor,
2822 ElementsKindTraits<HOLEY_SMI_ELEMENTS>> {};
2823
2824class FastPackedObjectElementsAccessor
2825 : public FastSmiOrObjectElementsAccessor<
2826 FastPackedObjectElementsAccessor,
2827 ElementsKindTraits<PACKED_ELEMENTS>> {};
2828
2829template <typename Subclass, typename KindTraits>
2830class FastNonextensibleObjectElementsAccessor
2831 : public FastSmiOrObjectElementsAccessor<Subclass, KindTraits> {
2832 public:
2833 using BackingStore = typename KindTraits::BackingStore;
2834
2835 static Maybe<uint32_t> PushImpl(DirectHandle<JSArray> receiver,
2836 BuiltinArguments* args, uint32_t push_size) {
2837 UNREACHABLE();
2838 }
2839
2840 static Maybe<bool> AddImpl(DirectHandle<JSObject> object, uint32_t index,
2841 DirectHandle<Object> value,
2842 PropertyAttributes attributes,
2843 uint32_t new_capacity) {
2844 UNREACHABLE();
2845 }
2846
2847 // TODO(duongn): refactor this due to code duplication of sealed version.
2848 // Consider using JSObject::NormalizeElements(). Also consider follow the fast
2849 // element logic instead of changing to dictionary mode.
2850 static Maybe<bool> SetLengthImpl(Isolate* isolate,
2851 DirectHandle<JSArray> array, uint32_t length,
2852 DirectHandle<FixedArrayBase> backing_store) {
2853 uint32_t old_length = 0;
2854 CHECK(Object::ToArrayIndex(array->length(), &old_length));
2855 if (length == old_length) {
2856 // Do nothing.
2857 return Just(true);
2858 }
2859
2860 // Transition to DICTIONARY_ELEMENTS.
2861 // Convert to dictionary mode.
2862 DirectHandle<NumberDictionary> new_element_dictionary =
2863 old_length == 0 ? isolate->factory()->empty_slow_element_dictionary()
2864 : array->GetElementsAccessor()->Normalize(array);
2865
2866 // Migrate map.
2867 DirectHandle<Map> new_map =
2868 Map::Copy(isolate, direct_handle(array->map(), isolate),
2869 "SlowCopyForSetLengthImpl");
2870 new_map->set_is_extensible(false);
2871 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
2872 JSObject::MigrateToMap(isolate, array, new_map);
2873
2874 if (!new_element_dictionary.is_null()) {
2875 array->set_elements(*new_element_dictionary);
2876 }
2877
2878 if (array->elements() !=
2879 ReadOnlyRoots(isolate).empty_slow_element_dictionary()) {
2880 DirectHandle<NumberDictionary> dictionary(array->element_dictionary(),
2881 isolate);
2882 // Make sure we never go back to the fast case
2883 array->RequireSlowElements(*dictionary);
2884 JSObject::ApplyAttributesToDictionary(isolate, ReadOnlyRoots(isolate),
2885 dictionary,
2887 }
2888
2889 // Set length.
2890 DirectHandle<FixedArrayBase> new_backing_store(array->elements(), isolate);
2891 return DictionaryElementsAccessor::SetLengthImpl(isolate, array, length,
2892 new_backing_store);
2893 }
2894};
2895
2896class FastPackedNonextensibleObjectElementsAccessor
2897 : public FastNonextensibleObjectElementsAccessor<
2898 FastPackedNonextensibleObjectElementsAccessor,
2899 ElementsKindTraits<PACKED_NONEXTENSIBLE_ELEMENTS>> {};
2900
2901class FastHoleyNonextensibleObjectElementsAccessor
2902 : public FastNonextensibleObjectElementsAccessor<
2903 FastHoleyNonextensibleObjectElementsAccessor,
2904 ElementsKindTraits<HOLEY_NONEXTENSIBLE_ELEMENTS>> {};
2905
2906template <typename Subclass, typename KindTraits>
2907class FastSealedObjectElementsAccessor
2908 : public FastSmiOrObjectElementsAccessor<Subclass, KindTraits> {
2909 public:
2910 using BackingStore = typename KindTraits::BackingStore;
2911
2912 static DirectHandle<Object> RemoveElement(DirectHandle<JSArray> receiver,
2913 Where remove_position) {
2914 UNREACHABLE();
2915 }
2916
2917 static void DeleteImpl(DirectHandle<JSObject> obj, InternalIndex entry) {
2918 UNREACHABLE();
2919 }
2920
2921 static void DeleteAtEnd(DirectHandle<JSObject> obj,
2922 DirectHandle<BackingStore> backing_store,
2923 uint32_t entry) {
2924 UNREACHABLE();
2925 }
2926
2927 static void DeleteCommon(DirectHandle<JSObject> obj, uint32_t entry,
2928 DirectHandle<FixedArrayBase> store) {
2929 UNREACHABLE();
2930 }
2931
2932 static MaybeDirectHandle<Object> PopImpl(DirectHandle<JSArray> receiver) {
2933 UNREACHABLE();
2934 }
2935
2936 static Maybe<uint32_t> PushImpl(DirectHandle<JSArray> receiver,
2937 BuiltinArguments* args, uint32_t push_size) {
2938 UNREACHABLE();
2939 }
2940
2941 static Maybe<bool> AddImpl(DirectHandle<JSObject> object, uint32_t index,
2942 DirectHandle<Object> value,
2943 PropertyAttributes attributes,
2944 uint32_t new_capacity) {
2945 UNREACHABLE();
2946 }
2947
2948 // TODO(duongn): refactor this due to code duplication of nonextensible
2949 // version. Consider using JSObject::NormalizeElements(). Also consider follow
2950 // the fast element logic instead of changing to dictionary mode.
2951 static Maybe<bool> SetLengthImpl(Isolate* isolate,
2952 DirectHandle<JSArray> array, uint32_t length,
2953 DirectHandle<FixedArrayBase> backing_store) {
2954 uint32_t old_length = 0;
2955 CHECK(Object::ToArrayIndex(array->length(), &old_length));
2956 if (length == old_length) {
2957 // Do nothing.
2958 return Just(true);
2959 }
2960
2961 // Transition to DICTIONARY_ELEMENTS.
2962 // Convert to dictionary mode
2963 DirectHandle<NumberDictionary> new_element_dictionary =
2964 old_length == 0 ? isolate->factory()->empty_slow_element_dictionary()
2965 : array->GetElementsAccessor()->Normalize(array);
2966
2967 // Migrate map.
2968 DirectHandle<Map> new_map =
2969 Map::Copy(isolate, direct_handle(array->map(), isolate),
2970 "SlowCopyForSetLengthImpl");
2971 new_map->set_is_extensible(false);
2972 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
2973 JSObject::MigrateToMap(isolate, array, new_map);
2974
2975 if (!new_element_dictionary.is_null()) {
2976 array->set_elements(*new_element_dictionary);
2977 }
2978
2979 if (array->elements() !=
2980 ReadOnlyRoots(isolate).empty_slow_element_dictionary()) {
2981 DirectHandle<NumberDictionary> dictionary(array->element_dictionary(),
2982 isolate);
2983 // Make sure we never go back to the fast case
2984 array->RequireSlowElements(*dictionary);
2985 JSObject::ApplyAttributesToDictionary(isolate, ReadOnlyRoots(isolate),
2986 dictionary,
2988 }
2989
2990 // Set length
2991 DirectHandle<FixedArrayBase> new_backing_store(array->elements(), isolate);
2992 return DictionaryElementsAccessor::SetLengthImpl(isolate, array, length,
2993 new_backing_store);
2994 }
2995};
2996
2997class FastPackedSealedObjectElementsAccessor
2998 : public FastSealedObjectElementsAccessor<
2999 FastPackedSealedObjectElementsAccessor,
3000 ElementsKindTraits<PACKED_SEALED_ELEMENTS>> {};
3001
3002class SharedArrayElementsAccessor
3003 : public FastSealedObjectElementsAccessor<
3004 SharedArrayElementsAccessor,
3005 ElementsKindTraits<SHARED_ARRAY_ELEMENTS>> {
3006 public:
3007 static Handle<Object> GetAtomicInternalImpl(
3008 Isolate* isolate, Tagged<FixedArrayBase> backing_store,
3009 InternalIndex entry, SeqCstAccessTag tag) {
3010 return handle(Cast<BackingStore>(backing_store)->get(entry.as_int(), tag),
3011 isolate);
3012 }
3013
3014 static void SetAtomicInternalImpl(Tagged<FixedArrayBase> backing_store,
3015 InternalIndex entry, Tagged<Object> value,
3016 SeqCstAccessTag tag) {
3017 Cast<BackingStore>(backing_store)->set(entry.as_int(), value, tag);
3018 }
3019
3020 static Handle<Object> SwapAtomicInternalImpl(
3021 Isolate* isolate, Tagged<FixedArrayBase> backing_store,
3022 InternalIndex entry, Tagged<Object> value, SeqCstAccessTag tag) {
3023 return handle(
3024 Cast<BackingStore>(backing_store)->swap(entry.as_int(), value, tag),
3025 isolate);
3026 }
3027
3028 static Tagged<Object> CompareAndSwapAtomicInternalImpl(
3029 Tagged<FixedArrayBase> backing_store, InternalIndex entry,
3030 Tagged<Object> expected, Tagged<Object> value, SeqCstAccessTag tag) {
3031 return Cast<BackingStore>(backing_store)
3032 ->compare_and_swap(entry.as_int(), expected, value, tag);
3033 }
3034};
3035
3036class FastHoleySealedObjectElementsAccessor
3037 : public FastSealedObjectElementsAccessor<
3038 FastHoleySealedObjectElementsAccessor,
3039 ElementsKindTraits<HOLEY_SEALED_ELEMENTS>> {};
3040
3041template <typename Subclass, typename KindTraits>
3042class FastFrozenObjectElementsAccessor
3043 : public FastSmiOrObjectElementsAccessor<Subclass, KindTraits> {
3044 public:
3045 using BackingStore = typename KindTraits::BackingStore;
3046
3047 static inline void SetImpl(DirectHandle<JSObject> holder, InternalIndex entry,
3048 Tagged<Object> value) {
3049 UNREACHABLE();
3050 }
3051
3052 static inline void SetImpl(Tagged<FixedArrayBase> backing_store,
3053 InternalIndex entry, Tagged<Object> value) {
3054 UNREACHABLE();
3055 }
3056
3057 static inline void SetImpl(Tagged<FixedArrayBase> backing_store,
3058 InternalIndex entry, Tagged<Object> value,
3059 WriteBarrierMode mode) {
3060 UNREACHABLE();
3061 }
3062
3063 static DirectHandle<Object> RemoveElement(DirectHandle<JSArray> receiver,
3064 Where remove_position) {
3065 UNREACHABLE();
3066 }
3067
3068 static void DeleteImpl(DirectHandle<JSObject> obj, InternalIndex entry) {
3069 UNREACHABLE();
3070 }
3071
3072 static void DeleteAtEnd(DirectHandle<JSObject> obj,
3073 DirectHandle<BackingStore> backing_store,
3074 uint32_t entry) {
3075 UNREACHABLE();
3076 }
3077
3078 static void DeleteCommon(DirectHandle<JSObject> obj, uint32_t entry,
3079 DirectHandle<FixedArrayBase> store) {
3080 UNREACHABLE();
3081 }
3082
3083 static MaybeDirectHandle<Object> PopImpl(DirectHandle<JSArray> receiver) {
3084 UNREACHABLE();
3085 }
3086
3087 static Maybe<uint32_t> PushImpl(DirectHandle<JSArray> receiver,
3088 BuiltinArguments* args, uint32_t push_size) {
3089 UNREACHABLE();
3090 }
3091
3092 static Maybe<bool> AddImpl(DirectHandle<JSObject> object, uint32_t index,
3093 DirectHandle<Object> value,
3094 PropertyAttributes attributes,
3095 uint32_t new_capacity) {
3096 UNREACHABLE();
3097 }
3098
3099 static Maybe<bool> SetLengthImpl(Isolate* isolate,
3100 DirectHandle<JSArray> array, uint32_t length,
3101 DirectHandle<FixedArrayBase> backing_store) {
3102 UNREACHABLE();
3103 }
3104
3105 static void ReconfigureImpl(DirectHandle<JSObject> object,
3106 DirectHandle<FixedArrayBase> store,
3107 InternalIndex entry, DirectHandle<Object> value,
3108 PropertyAttributes attributes) {
3109 UNREACHABLE();
3110 }
3111};
3112
3113class FastPackedFrozenObjectElementsAccessor
3114 : public FastFrozenObjectElementsAccessor<
3115 FastPackedFrozenObjectElementsAccessor,
3116 ElementsKindTraits<PACKED_FROZEN_ELEMENTS>> {};
3117
3118class FastHoleyFrozenObjectElementsAccessor
3119 : public FastFrozenObjectElementsAccessor<
3120 FastHoleyFrozenObjectElementsAccessor,
3121 ElementsKindTraits<HOLEY_FROZEN_ELEMENTS>> {};
3122
3123class FastHoleyObjectElementsAccessor
3124 : public FastSmiOrObjectElementsAccessor<
3125 FastHoleyObjectElementsAccessor, ElementsKindTraits<HOLEY_ELEMENTS>> {
3126};
3127
3128// Helper templates to statically determine if our destination type can contain
3129// the source type.
3130template <ElementsKind Kind, typename ElementType, ElementsKind SourceKind,
3131 typename SourceElementType>
3132struct CopyBetweenBackingStoresImpl;
3133
3134template <typename Subclass, typename KindTraits>
3135class FastDoubleElementsAccessor
3136 : public FastElementsAccessor<Subclass, KindTraits> {
3137 public:
3138 static Handle<Object> GetImpl(Isolate* isolate,
3139 Tagged<FixedArrayBase> backing_store,
3140 InternalIndex entry) {
3141 return FixedDoubleArray::get(Cast<FixedDoubleArray>(backing_store),
3142 entry.as_int(), isolate);
3143 }
3144
3145 static inline void SetImpl(DirectHandle<JSObject> holder, InternalIndex entry,
3146 Tagged<Object> value) {
3147 SetImpl(holder->elements(), entry, value);
3148 }
3149
3150 static inline void SetImpl(Tagged<FixedArrayBase> backing_store,
3151 InternalIndex entry, Tagged<Object> value) {
3152#ifdef V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
3153 if (IsUndefined(value)) {
3154 Cast<FixedDoubleArray>(backing_store)->set_undefined(entry.as_int());
3155 return;
3156 }
3157#endif // V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
3158 Cast<FixedDoubleArray>(backing_store)
3159 ->set(entry.as_int(), Object::NumberValue(value));
3160 }
3161
3162 static inline void SetImpl(Tagged<FixedArrayBase> backing_store,
3163 InternalIndex entry, Tagged<Object> value,
3164 WriteBarrierMode mode) {
3165 Cast<FixedDoubleArray>(backing_store)
3166 ->set(entry.as_int(), Object::NumberValue(value));
3167 }
3168
3169 static void CopyElementsImpl(Isolate* isolate, Tagged<FixedArrayBase> from,
3170 uint32_t from_start, Tagged<FixedArrayBase> to,
3171 ElementsKind from_kind, uint32_t to_start,
3172 int packed_size, int copy_size) {
3174 switch (from_kind) {
3176 CopyPackedSmiToDoubleElements(from, from_start, to, to_start,
3177 packed_size, copy_size);
3178 break;
3179 case HOLEY_SMI_ELEMENTS:
3180 CopySmiToDoubleElements(from, from_start, to, to_start, copy_size);
3181 break;
3184 CopyDoubleToDoubleElements(from, from_start, to, to_start, copy_size);
3185 break;
3186 case PACKED_ELEMENTS:
3190 case HOLEY_ELEMENTS:
3195 CopyObjectToDoubleElements(from, from_start, to, to_start, copy_size);
3196 break;
3198 CopyDictionaryToDoubleElements(isolate, from, from_start, to, to_start,
3199 copy_size);
3200 break;
3206 case NO_ELEMENTS:
3207#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) case TYPE##_ELEMENTS:
3210#undef TYPED_ARRAY_CASE
3211 // This function is currently only used for JSArrays with non-zero
3212 // length.
3213 UNREACHABLE();
3214 }
3215 }
3216
3217 static Maybe<bool> CollectValuesOrEntriesImpl(
3218 Isolate* isolate, DirectHandle<JSObject> object,
3219 DirectHandle<FixedArray> values_or_entries, bool get_entries,
3220 int* nof_items, PropertyFilter filter) {
3221 DirectHandle<FixedDoubleArray> elements(
3222 Cast<FixedDoubleArray>(object->elements()), isolate);
3223 int count = 0;
3224 uint32_t length = elements->length();
3225 for (uint32_t index = 0; index < length; ++index) {
3226 InternalIndex entry(index);
3227 if (!Subclass::HasEntryImpl(isolate, *elements, entry)) continue;
3228 DirectHandle<Object> value = Subclass::GetImpl(isolate, *elements, entry);
3229 if (get_entries) {
3230 value = MakeEntryPair(isolate, index, value);
3231 }
3232 values_or_entries->set(count++, *value);
3233 }
3234 *nof_items = count;
3235 return Just(true);
3236 }
3237
3238 static Maybe<int64_t> IndexOfValueImpl(Isolate* isolate,
3239 DirectHandle<JSObject> receiver,
3240 DirectHandle<Object> search_value,
3241 size_t start_from, size_t length) {
3244 Tagged<FixedArrayBase> elements_base = receiver->elements();
3245 Tagged<Object> value = *search_value;
3246
3247 length = std::min(static_cast<size_t>(elements_base->length()), length);
3248
3249 if (start_from >= length) return Just<int64_t>(-1);
3250
3251 if (!IsNumber(value)) {
3252 return Just<int64_t>(-1);
3253 }
3254 if (IsNaN(value)) {
3255 return Just<int64_t>(-1);
3256 }
3257 double numeric_search_value = Object::NumberValue(value);
3258 Tagged<FixedDoubleArray> elements =
3259 Cast<FixedDoubleArray>(receiver->elements());
3260
3261 static_assert(FixedDoubleArray::kMaxLength <=
3262 std::numeric_limits<int>::max());
3263 for (size_t k = start_from; k < length; ++k) {
3264 int k_int = static_cast<int>(k);
3265 if (elements->is_the_hole(k_int)) {
3266 continue;
3267 }
3268 if (elements->get_scalar(k_int) == numeric_search_value) {
3269 return Just<int64_t>(k);
3270 }
3271 }
3272 return Just<int64_t>(-1);
3273 }
3274};
3275
3276class FastPackedDoubleElementsAccessor
3277 : public FastDoubleElementsAccessor<
3278 FastPackedDoubleElementsAccessor,
3279 ElementsKindTraits<PACKED_DOUBLE_ELEMENTS>> {};
3280
3281class FastHoleyDoubleElementsAccessor
3282 : public FastDoubleElementsAccessor<
3283 FastHoleyDoubleElementsAccessor,
3284 ElementsKindTraits<HOLEY_DOUBLE_ELEMENTS>> {};
3285
3286enum IsSharedBuffer : bool { kShared = true, kUnshared = false };
3287
3288constexpr bool IsFloat16RawBitsZero(uint16_t x) {
3289 // IEEE754 comparison returns true for 0 == -0, even though they are two
3290 // different bit patterns.
3291 return (x & ~0x8000) == 0;
3292}
3293
3294// Super class for all external element arrays.
3295template <ElementsKind Kind, typename ElementType>
3296class TypedElementsAccessor
3297 : public ElementsAccessorBase<TypedElementsAccessor<Kind, ElementType>,
3298 ElementsKindTraits<Kind>> {
3299 public:
3300 using BackingStore = typename ElementsKindTraits<Kind>::BackingStore;
3301 using AccessorClass = TypedElementsAccessor<Kind, ElementType>;
3302
3303 // Conversions from (other) scalar values.
3304 static ElementType FromScalar(int value) {
3305 return static_cast<ElementType>(value);
3306 }
3307 static ElementType FromScalar(uint32_t value) {
3308 return static_cast<ElementType>(value);
3309 }
3310 static ElementType FromScalar(double value) {
3311 return FromScalar(DoubleToInt32(value));
3312 }
3313 static ElementType FromScalar(int64_t value) { UNREACHABLE(); }
3314 static ElementType FromScalar(uint64_t value) { UNREACHABLE(); }
3315
3316 // Conversions from objects / handles.
3317 static ElementType FromObject(Tagged<Object> value,
3318 bool* lossless = nullptr) {
3319 if (IsSmi(value)) {
3320 return FromScalar(Smi::ToInt(value));
3321 } else if (IsHeapNumber(value)) {
3322 return FromScalar(Cast<HeapNumber>(value)->value());
3323 } else {
3324 // Clamp undefined here as well. All other types have been
3325 // converted to a number type further up in the call chain.
3326 DCHECK(IsUndefined(value));
3327 return FromScalar(Cast<Oddball>(value)->to_number_raw());
3328 }
3329 }
3330 static ElementType FromHandle(DirectHandle<Object> value,
3331 bool* lossless = nullptr) {
3332 return FromObject(*value, lossless);
3333 }
3334
3335 // Conversion of scalar value to handlified object.
3336 static Handle<Object> ToHandle(Isolate* isolate, ElementType value);
3337
3338 static void SetImpl(DirectHandle<JSObject> holder, InternalIndex entry,
3339 Tagged<Object> value) {
3340 auto typed_array = Cast<JSTypedArray>(holder);
3341 DCHECK_LE(entry.raw_value(), typed_array->GetLength());
3342 auto* entry_ptr =
3343 static_cast<ElementType*>(typed_array->DataPtr()) + entry.raw_value();
3344 auto is_shared = typed_array->buffer()->is_shared() ? kShared : kUnshared;
3345 SetImpl(entry_ptr, FromObject(value), is_shared);
3346 }
3347
3348 static void SetImpl(ElementType* data_ptr, ElementType value,
3349 IsSharedBuffer is_shared) {
3350 // TODO(ishell, v8:8875): Independent of pointer compression, 8-byte size
3351 // fields (external pointers, doubles and BigInt data) are not always 8-byte
3352 // aligned. This is relying on undefined behaviour in C++, since {data_ptr}
3353 // is not aligned to {alignof(ElementType)}.
3354 if (!is_shared) {
3355 base::WriteUnalignedValue(reinterpret_cast<Address>(data_ptr), value);
3356 return;
3357 }
3358
3359 // The JavaScript memory model allows for racy reads and writes to a
3360 // SharedArrayBuffer's backing store. Using relaxed atomics is not strictly
3361 // required for JavaScript, but will avoid undefined behaviour in C++ and is
3362 // unlikely to introduce noticeable overhead.
3363 if (IsAligned(reinterpret_cast<uintptr_t>(data_ptr),
3364 alignof(std::atomic<ElementType>))) {
3365 // Use a single relaxed atomic store.
3366 static_assert(sizeof(std::atomic<ElementType>) == sizeof(ElementType));
3367 reinterpret_cast<std::atomic<ElementType>*>(data_ptr)->store(
3368 value, std::memory_order_relaxed);
3369 return;
3370 }
3371
3372 // Some static CHECKs (are optimized out if succeeding) to ensure that
3373 // {data_ptr} is at least four byte aligned, and {std::atomic<uint32_t>}
3374 // has size and alignment of four bytes, such that we can cast the
3375 // {data_ptr} to it.
3376 CHECK_LE(kInt32Size, alignof(ElementType));
3377 CHECK_EQ(kInt32Size, alignof(std::atomic<uint32_t>));
3378 CHECK_EQ(kInt32Size, sizeof(std::atomic<uint32_t>));
3379 // And dynamically check that we indeed have at least four byte alignment.
3380 DCHECK(IsAligned(reinterpret_cast<uintptr_t>(data_ptr), kInt32Size));
3381 // Store as multiple 32-bit words. Make {kNumWords} >= 1 to avoid compiler
3382 // warnings for the empty array or memcpy to an empty object.
3383 constexpr size_t kNumWords =
3384 std::max(size_t{1}, sizeof(ElementType) / kInt32Size);
3385 uint32_t words[kNumWords];
3386 CHECK_EQ(sizeof(words), sizeof(value));
3387 memcpy(words, &value, sizeof(value));
3388 for (size_t word = 0; word < kNumWords; ++word) {
3389 static_assert(sizeof(std::atomic<uint32_t>) == sizeof(uint32_t));
3390 reinterpret_cast<std::atomic<uint32_t>*>(data_ptr)[word].store(
3391 words[word], std::memory_order_relaxed);
3392 }
3393 }
3394
3395 static Handle<Object> GetInternalImpl(Isolate* isolate,
3396 DirectHandle<JSObject> holder,
3397 InternalIndex entry) {
3398 auto typed_array = Cast<JSTypedArray>(holder);
3399 DCHECK_LT(entry.raw_value(), typed_array->GetLength());
3400 DCHECK(!typed_array->IsDetachedOrOutOfBounds());
3401 auto* element_ptr =
3402 static_cast<ElementType*>(typed_array->DataPtr()) + entry.raw_value();
3403 auto is_shared = typed_array->buffer()->is_shared() ? kShared : kUnshared;
3404 ElementType elem = GetImpl(element_ptr, is_shared);
3405 return ToHandle(isolate, elem);
3406 }
3407
3408 static DirectHandle<Object> GetImpl(Isolate* isolate,
3409 Tagged<FixedArrayBase> backing_store,
3410 InternalIndex entry) {
3411 UNREACHABLE();
3412 }
3413
3414 static ElementType GetImpl(ElementType* data_ptr, IsSharedBuffer is_shared) {
3415 // TODO(ishell, v8:8875): Independent of pointer compression, 8-byte size
3416 // fields (external pointers, doubles and BigInt data) are not always
3417 // 8-byte aligned.
3418 if (!is_shared) {
3420 reinterpret_cast<Address>(data_ptr));
3421 }
3422
3423 // The JavaScript memory model allows for racy reads and writes to a
3424 // SharedArrayBuffer's backing store. Using relaxed atomics is not strictly
3425 // required for JavaScript, but will avoid undefined behaviour in C++ and is
3426 // unlikely to introduce noticeable overhead.
3427 if (IsAligned(reinterpret_cast<uintptr_t>(data_ptr),
3428 alignof(std::atomic<ElementType>))) {
3429 // Use a single relaxed atomic load.
3430 static_assert(sizeof(std::atomic<ElementType>) == sizeof(ElementType));
3431 // Note: acquire semantics are not needed here, but clang seems to merge
3432 // this atomic load with the non-atomic load above if we use relaxed
3433 // semantics. This will result in TSan failures.
3434 return reinterpret_cast<std::atomic<ElementType>*>(data_ptr)->load(
3435 std::memory_order_acquire);
3436 }
3437
3438 // Some static CHECKs (are optimized out if succeeding) to ensure that
3439 // {data_ptr} is at least four byte aligned, and {std::atomic<uint32_t>}
3440 // has size and alignment of four bytes, such that we can cast the
3441 // {data_ptr} to it.
3442 CHECK_LE(kInt32Size, alignof(ElementType));
3443 CHECK_EQ(kInt32Size, alignof(std::atomic<uint32_t>));
3444 CHECK_EQ(kInt32Size, sizeof(std::atomic<uint32_t>));
3445 // And dynamically check that we indeed have at least four byte alignment.
3446 DCHECK(IsAligned(reinterpret_cast<uintptr_t>(data_ptr), kInt32Size));
3447 // Load in multiple 32-bit words. Make {kNumWords} >= 1 to avoid compiler
3448 // warnings for the empty array or memcpy to an empty object.
3449 constexpr size_t kNumWords =
3450 std::max(size_t{1}, sizeof(ElementType) / kInt32Size);
3451 uint32_t words[kNumWords];
3452 for (size_t word = 0; word < kNumWords; ++word) {
3453 static_assert(sizeof(std::atomic<uint32_t>) == sizeof(uint32_t));
3454 words[word] =
3455 reinterpret_cast<std::atomic<uint32_t>*>(data_ptr)[word].load(
3456 std::memory_order_relaxed);
3457 }
3458 ElementType result;
3459 CHECK_EQ(sizeof(words), sizeof(result));
3460 memcpy(&result, words, sizeof(result));
3461 return result;
3462 }
3463
3464 static PropertyDetails GetDetailsImpl(Tagged<JSObject> holder,
3465 InternalIndex entry) {
3466 return PropertyDetails(PropertyKind::kData, NONE,
3468 }
3469
3470 static PropertyDetails GetDetailsImpl(Tagged<FixedArrayBase> backing_store,
3471 InternalIndex entry) {
3472 return PropertyDetails(PropertyKind::kData, NONE,
3474 }
3475
3476 static bool HasElementImpl(Isolate* isolate, Tagged<JSObject> holder,
3477 size_t index, Tagged<FixedArrayBase> backing_store,
3478 PropertyFilter filter) {
3479 return index < AccessorClass::GetCapacityImpl(holder, backing_store);
3480 }
3481
3482 static bool HasAccessorsImpl(Tagged<JSObject> holder,
3483 Tagged<FixedArrayBase> backing_store) {
3484 return false;
3485 }
3486
3487 static Maybe<bool> SetLengthImpl(Isolate* isolate,
3488 DirectHandle<JSArray> array, uint32_t length,
3489 DirectHandle<FixedArrayBase> backing_store) {
3490 // External arrays do not support changing their length.
3491 UNREACHABLE();
3492 }
3493
3494 static void DeleteImpl(DirectHandle<JSObject> obj, InternalIndex entry) {
3495 // Do nothing.
3496 //
3497 // TypedArray elements are configurable to explain detaching, but cannot be
3498 // deleted otherwise.
3499 }
3500
3501 static InternalIndex GetEntryForIndexImpl(
3502 Isolate* isolate, Tagged<JSObject> holder,
3503 Tagged<FixedArrayBase> backing_store, size_t index,
3504 PropertyFilter filter) {
3505 return index < AccessorClass::GetCapacityImpl(holder, backing_store)
3506 ? InternalIndex(index)
3507 : InternalIndex::NotFound();
3508 }
3509
3510 static size_t GetCapacityImpl(Tagged<JSObject> holder,
3511 Tagged<FixedArrayBase> backing_store) {
3512 Tagged<JSTypedArray> typed_array = Cast<JSTypedArray>(holder);
3513 return typed_array->GetLength();
3514 }
3515
3516 static size_t NumberOfElementsImpl(Isolate* isolate,
3518 Tagged<FixedArrayBase> backing_store) {
3519 return AccessorClass::GetCapacityImpl(receiver, backing_store);
3520 }
3521
3522 V8_WARN_UNUSED_RESULT static ExceptionStatus AddElementsToKeyAccumulatorImpl(
3523 DirectHandle<JSObject> receiver, KeyAccumulator* accumulator,
3524 AddKeyConversion convert) {
3525 Isolate* isolate = receiver->GetIsolate();
3526 DirectHandle<FixedArrayBase> elements(receiver->elements(), isolate);
3527 size_t length = AccessorClass::GetCapacityImpl(*receiver, *elements);
3528 for (size_t i = 0; i < length; i++) {
3529 DirectHandle<Object> value =
3530 AccessorClass::GetInternalImpl(isolate, receiver, InternalIndex(i));
3531 RETURN_FAILURE_IF_NOT_SUCCESSFUL(accumulator->AddKey(value, convert));
3532 }
3534 }
3535
3536 static Maybe<bool> CollectValuesOrEntriesImpl(
3537 Isolate* isolate, DirectHandle<JSObject> object,
3538 DirectHandle<FixedArray> values_or_entries, bool get_entries,
3539 int* nof_items, PropertyFilter filter) {
3540 int count = 0;
3541 if ((filter & ONLY_CONFIGURABLE) == 0) {
3542 DirectHandle<FixedArrayBase> elements(object->elements(), isolate);
3543 size_t length = AccessorClass::GetCapacityImpl(*object, *elements);
3544 for (size_t index = 0; index < length; ++index) {
3545 DirectHandle<Object> value = AccessorClass::GetInternalImpl(
3546 isolate, object, InternalIndex(index));
3547 if (get_entries) {
3548 value = MakeEntryPair(isolate, index, value);
3549 }
3550 values_or_entries->set(count++, *value);
3551 }
3552 }
3553 *nof_items = count;
3554 return Just(true);
3555 }
3556
3557 static bool ToTypedSearchValue(double search_value,
3558 ElementType* typed_search_value) {
3559 if (!base::IsValueInRangeForNumericType<ElementType>(search_value) &&
3560 std::isfinite(search_value)) {
3561 // Return true if value can't be represented in this space.
3562 return true;
3563 }
3564 ElementType typed_value;
3566 typed_value = fp16_ieee_from_fp32_value(static_cast<float>(search_value));
3567 *typed_search_value = typed_value;
3568 return (static_cast<double>(fp16_ieee_to_fp32_value(typed_value)) !=
3569 search_value); // Loss of precision.
3570 }
3571 typed_value = static_cast<ElementType>(search_value);
3572 *typed_search_value = typed_value;
3573 return static_cast<double>(typed_value) !=
3574 search_value; // Loss of precision.
3575 }
3576
3577 static MaybeDirectHandle<Object> FillImpl(DirectHandle<JSObject> receiver,
3578 DirectHandle<Object> value,
3579 size_t start, size_t end) {
3580 DirectHandle<JSTypedArray> typed_array = Cast<JSTypedArray>(receiver);
3581 DCHECK(!typed_array->IsDetachedOrOutOfBounds());
3583 DCHECK_LE(end, typed_array->GetLength());
3585 ElementType scalar = FromHandle(value);
3586 ElementType* data = static_cast<ElementType*>(typed_array->DataPtr());
3587 ElementType* first = data + start;
3588 ElementType* last = data + end;
3589 if (typed_array->buffer()->is_shared()) {
3590 // TypedArrays backed by shared buffers need to be filled using atomic
3591 // operations. Since 8-byte data are not currently always 8-byte aligned,
3592 // manually fill using SetImpl, which abstracts over alignment and atomic
3593 // complexities.
3594 for (; first != last; ++first) {
3595 AccessorClass::SetImpl(first, scalar, kShared);
3596 }
3597 } else if ((scalar == 0 && !(std::is_floating_point_v<ElementType> &&
3598 IsMinusZero(scalar))) ||
3599 (std::is_integral_v<ElementType> &&
3600 scalar == static_cast<ElementType>(-1))) {
3601 // As of 2022-06, this is faster than {std::fill}.
3602 // We could extend this to any {scalar} that's a pattern of repeating
3603 // bytes, but patterns other than 0 and -1 are probably rare.
3604 size_t num_bytes = static_cast<size_t>(reinterpret_cast<int8_t*>(last) -
3605 reinterpret_cast<int8_t*>(first));
3606 memset(first, static_cast<int8_t>(scalar), num_bytes);
3607 } else if (COMPRESS_POINTERS_BOOL && alignof(ElementType) > kTaggedSize) {
3608 // TODO(ishell, v8:8875): See UnalignedSlot<T> for details.
3609 std::fill(UnalignedSlot<ElementType>(first),
3610 UnalignedSlot<ElementType>(last), scalar);
3611 } else {
3612 std::fill(first, last, scalar);
3613 }
3614 return MaybeDirectHandle<Object>(typed_array);
3615 }
3616
3617 static Maybe<bool> IncludesValueImpl(Isolate* isolate,
3618 DirectHandle<JSObject> receiver,
3619 DirectHandle<Object> value,
3620 size_t start_from, size_t length) {
3623
3624 bool out_of_bounds = false;
3625 size_t new_length = typed_array->GetLengthOrOutOfBounds(out_of_bounds);
3626 if (V8_UNLIKELY(out_of_bounds)) {
3627 return Just(IsUndefined(*value, isolate) && length > start_from);
3628 }
3629
3630 // Prototype has no elements, and not searching for the hole --- limit
3631 // search to backing store length.
3632 if (new_length < length) {
3633 if (IsUndefined(*value, isolate) && length > start_from) {
3634 return Just(true);
3635 }
3636 length = new_length;
3637 }
3638
3639 ElementType typed_search_value;
3640 ElementType* data_ptr =
3641 reinterpret_cast<ElementType*>(typed_array->DataPtr());
3642 auto is_shared = typed_array->buffer()->is_shared() ? kShared : kUnshared;
3643 if (Kind == BIGINT64_ELEMENTS || Kind == BIGUINT64_ELEMENTS ||
3644 Kind == RAB_GSAB_BIGINT64_ELEMENTS ||
3645 Kind == RAB_GSAB_BIGUINT64_ELEMENTS) {
3646 if (!IsBigInt(*value)) return Just(false);
3647 bool lossless;
3648 typed_search_value = FromHandle(value, &lossless);
3649 if (!lossless) return Just(false);
3650 } else {
3651 if (!IsNumber(*value)) return Just(false);
3652 double search_value = Object::NumberValue(*value);
3653 if (!std::isfinite(search_value)) {
3654 // Integral types cannot represent +Inf or NaN.
3655 if (!IsFloatTypedArrayElementsKind(Kind)) {
3656 return Just(false);
3657 }
3658 if (std::isnan(search_value)) {
3659 for (size_t k = start_from; k < length; ++k) {
3661 float elem_k = fp16_ieee_to_fp32_value(
3662 AccessorClass::GetImpl(data_ptr + k, is_shared));
3663 if (std::isnan(elem_k)) return Just(true);
3664 } else {
3665 double elem_k = static_cast<double>(
3666 AccessorClass::GetImpl(data_ptr + k, is_shared));
3667 if (std::isnan(elem_k)) return Just(true);
3668 }
3669 }
3670 return Just(false);
3671 }
3672 } else if (IsFloat16TypedArrayElementsKind(Kind) && search_value == 0) {
3673 for (size_t k = start_from; k < length; ++k) {
3674 ElementType elem_k = AccessorClass::GetImpl(data_ptr + k, is_shared);
3675 if (IsFloat16RawBitsZero(elem_k)) return Just(true);
3676 }
3677 return Just(false);
3678 }
3679
3680 if (AccessorClass::ToTypedSearchValue(search_value,
3681 &typed_search_value)) {
3682 return Just(false);
3683 }
3684 }
3685
3686 for (size_t k = start_from; k < length; ++k) {
3687 ElementType elem_k = AccessorClass::GetImpl(data_ptr + k, is_shared);
3688 if (elem_k == typed_search_value) return Just(true);
3689 }
3690 return Just(false);
3691 }
3692
3693 static Maybe<int64_t> IndexOfValueImpl(Isolate* isolate,
3694 DirectHandle<JSObject> receiver,
3695 DirectHandle<Object> value,
3696 size_t start_from, size_t length) {
3699
3700 // If this is called via Array.prototype.indexOf (not
3701 // TypedArray.prototype.indexOf), it's possible that the TypedArray is
3702 // detached / out of bounds here.
3703 if (V8_UNLIKELY(typed_array->WasDetached())) return Just<int64_t>(-1);
3704 bool out_of_bounds = false;
3705 size_t typed_array_length =
3706 typed_array->GetLengthOrOutOfBounds(out_of_bounds);
3707 if (V8_UNLIKELY(out_of_bounds)) {
3708 return Just<int64_t>(-1);
3709 }
3710
3711 // Prototype has no elements, and not searching for the hole --- limit
3712 // search to backing store length.
3713 if (typed_array_length < length) {
3714 length = typed_array_length;
3715 }
3716
3717 auto is_shared = typed_array->buffer()->is_shared() ? kShared : kUnshared;
3718 ElementType typed_search_value;
3719
3720 ElementType* data_ptr =
3721 reinterpret_cast<ElementType*>(typed_array->DataPtr());
3723 if (!IsBigInt(*value)) return Just<int64_t>(-1);
3724 bool lossless;
3725 typed_search_value = FromHandle(value, &lossless);
3726 if (!lossless) return Just<int64_t>(-1);
3727 } else {
3728 if (!IsNumber(*value)) return Just<int64_t>(-1);
3729 double search_value = Object::NumberValue(*value);
3730 if (!std::isfinite(search_value)) {
3731 // Integral types cannot represent +Inf or NaN.
3732 if (!IsFloatTypedArrayElementsKind(Kind)) {
3733 return Just<int64_t>(-1);
3734 }
3735 if (std::isnan(search_value)) {
3736 return Just<int64_t>(-1);
3737 }
3738 } else if (IsFloat16TypedArrayElementsKind(Kind) && search_value == 0) {
3739 for (size_t k = start_from; k < length; ++k) {
3740 ElementType elem_k = AccessorClass::GetImpl(data_ptr + k, is_shared);
3741 if (IsFloat16RawBitsZero(elem_k)) return Just<int64_t>(k);
3742 }
3743 return Just<int64_t>(-1);
3744 }
3745 if (AccessorClass::ToTypedSearchValue(search_value,
3746 &typed_search_value)) {
3747 return Just<int64_t>(-1);
3748 }
3749 }
3750
3751 for (size_t k = start_from; k < length; ++k) {
3752 ElementType elem_k = AccessorClass::GetImpl(data_ptr + k, is_shared);
3753 if (elem_k == typed_search_value) return Just<int64_t>(k);
3754 }
3755 return Just<int64_t>(-1);
3756 }
3757
3758 static Maybe<int64_t> LastIndexOfValueImpl(DirectHandle<JSObject> receiver,
3759 DirectHandle<Object> value,
3760 size_t start_from) {
3763 auto is_shared = typed_array->buffer()->is_shared() ? kShared : kUnshared;
3764
3765 DCHECK(!typed_array->IsDetachedOrOutOfBounds());
3766
3767 ElementType typed_search_value;
3768
3769 ElementType* data_ptr =
3770 reinterpret_cast<ElementType*>(typed_array->DataPtr());
3772 if (!IsBigInt(*value)) return Just<int64_t>(-1);
3773 bool lossless;
3774 typed_search_value = FromHandle(value, &lossless);
3775 if (!lossless) return Just<int64_t>(-1);
3776 } else {
3777 if (!IsNumber(*value)) return Just<int64_t>(-1);
3778 double search_value = Object::NumberValue(*value);
3779 if (!std::isfinite(search_value)) {
3781 std::is_integral_v<ElementType>) {
3782 // Integral types cannot represent +Inf or NaN.
3783 return Just<int64_t>(-1);
3784 } else if (std::isnan(search_value)) {
3785 // Strict Equality Comparison of NaN is always false.
3786 return Just<int64_t>(-1);
3787 }
3788 }
3789 if (AccessorClass::ToTypedSearchValue(search_value,
3790 &typed_search_value)) {
3791 return Just<int64_t>(-1);
3792 }
3793 }
3794
3795 size_t typed_array_length = typed_array->GetLength();
3796 if (V8_UNLIKELY(start_from >= typed_array_length)) {
3797 // This can happen if the TypedArray got resized when we did ToInteger
3798 // on the last parameter of lastIndexOf.
3799 DCHECK(typed_array->IsVariableLength());
3800 if (typed_array_length == 0) {
3801 return Just<int64_t>(-1);
3802 }
3803 start_from = typed_array_length - 1;
3804 }
3805
3806 size_t k = start_from;
3807 do {
3808 ElementType elem_k = AccessorClass::GetImpl(data_ptr + k, is_shared);
3809 if constexpr (IsFloat16TypedArrayElementsKind(Kind)) {
3810 if (IsFloat16RawBitsZero(typed_search_value) &&
3811 IsFloat16RawBitsZero(elem_k)) {
3812 return Just<int64_t>(k);
3813 }
3814 }
3815 if (elem_k == typed_search_value) return Just<int64_t>(k);
3816 } while (k-- != 0);
3817 return Just<int64_t>(-1);
3818 }
3819
3820 static void ReverseImpl(Tagged<JSObject> receiver) {
3823
3824 DCHECK(!typed_array->IsDetachedOrOutOfBounds());
3825
3826 size_t len = typed_array->GetLength();
3827 if (len == 0) return;
3828
3829 ElementType* data = static_cast<ElementType*>(typed_array->DataPtr());
3830 if (typed_array->buffer()->is_shared()) {
3831 // TypedArrays backed by shared buffers need to be reversed using atomic
3832 // operations. Since 8-byte data are not currently always 8-byte aligned,
3833 // manually reverse using GetImpl and SetImpl, which abstract over
3834 // alignment and atomic complexities.
3835 for (ElementType *first = data, *last = data + len - 1; first < last;
3836 ++first, --last) {
3837 ElementType first_value = AccessorClass::GetImpl(first, kShared);
3838 ElementType last_value = AccessorClass::GetImpl(last, kShared);
3839 AccessorClass::SetImpl(first, last_value, kShared);
3840 AccessorClass::SetImpl(last, first_value, kShared);
3841 }
3842 } else if (COMPRESS_POINTERS_BOOL && alignof(ElementType) > kTaggedSize) {
3843 // TODO(ishell, v8:8875): See UnalignedSlot<T> for details.
3844 std::reverse(UnalignedSlot<ElementType>(data),
3845 UnalignedSlot<ElementType>(data + len));
3846 } else {
3847 std::reverse(data, data + len);
3848 }
3849 }
3850
3851 static Handle<FixedArray> CreateListFromArrayLikeImpl(
3852 Isolate* isolate, DirectHandle<JSObject> object, uint32_t length) {
3853 DirectHandle<JSTypedArray> typed_array = Cast<JSTypedArray>(object);
3854 Handle<FixedArray> result = isolate->factory()->NewFixedArray(length);
3855 for (uint32_t i = 0; i < length; i++) {
3856 DirectHandle<Object> value = AccessorClass::GetInternalImpl(
3857 isolate, typed_array, InternalIndex(i));
3858 result->set(i, *value);
3859 }
3860 return result;
3861 }
3862
3863 static void CopyTypedArrayElementsSliceImpl(Tagged<JSTypedArray> source,
3865 size_t start, size_t end) {
3867 DCHECK_EQ(destination->GetElementsKind(), AccessorClass::kind());
3868 CHECK(!source->IsDetachedOrOutOfBounds());
3869 CHECK(!destination->IsDetachedOrOutOfBounds());
3871 DCHECK_LE(end, source->GetLength());
3872 size_t count = end - start;
3873 DCHECK_LE(count, destination->GetLength());
3874 ElementType* dest_data = static_cast<ElementType*>(destination->DataPtr());
3875 auto is_shared =
3876 source->buffer()->is_shared() || destination->buffer()->is_shared()
3877 ? kShared
3878 : kUnshared;
3879 switch (source->GetElementsKind()) {
3880#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
3881 case TYPE##_ELEMENTS: { \
3882 ctype* source_data = reinterpret_cast<ctype*>(source->DataPtr()) + start; \
3883 CopyBetweenBackingStores<TYPE##_ELEMENTS, ctype>(source_data, dest_data, \
3884 count, is_shared); \
3885 break; \
3886 }
3888#undef TYPED_ARRAY_CASE
3889
3890#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, NON_RAB_GSAB_TYPE) \
3891 case TYPE##_ELEMENTS: { \
3892 ctype* source_data = reinterpret_cast<ctype*>(source->DataPtr()) + start; \
3893 CopyBetweenBackingStores<NON_RAB_GSAB_TYPE##_ELEMENTS, ctype>( \
3894 source_data, dest_data, count, is_shared); \
3895 break; \
3896 }
3898#undef TYPED_ARRAY_CASE
3899 default:
3900 UNREACHABLE();
3901 break;
3902 }
3903 }
3904
3905 // TODO(v8:11111): Update this once we have external RAB / GSAB array types.
3906 static bool HasSimpleRepresentation(ExternalArrayType type) {
3907 return !(type == kExternalFloat32Array || type == kExternalFloat64Array ||
3909 type == kExternalFloat16Array);
3910 }
3911
3912 template <ElementsKind SourceKind, typename SourceElementType>
3913 static void CopyBetweenBackingStores(SourceElementType* source_data_ptr,
3914 ElementType* dest_data_ptr,
3915 size_t length,
3916 IsSharedBuffer is_shared) {
3917 CopyBetweenBackingStoresImpl<Kind, ElementType, SourceKind,
3918 SourceElementType>::Copy(source_data_ptr,
3919 dest_data_ptr, length,
3920 is_shared);
3921 }
3922
3923 static void CopyElementsFromTypedArray(Tagged<JSTypedArray> source,
3925 size_t length, size_t offset) {
3926 // The source is a typed array, so we know we don't need to do ToNumber
3927 // side-effects, as the source elements will always be a number.
3929
3930 CHECK(!source->IsDetachedOrOutOfBounds());
3931 CHECK(!destination->IsDetachedOrOutOfBounds());
3932
3933 DCHECK_LE(offset, destination->GetLength());
3934 DCHECK_LE(length, destination->GetLength() - offset);
3935 DCHECK_LE(length, source->GetLength());
3936
3937 ExternalArrayType source_type = source->type();
3938 ExternalArrayType destination_type = destination->type();
3939
3940 bool same_type = source_type == destination_type;
3941 bool same_size = source->element_size() == destination->element_size();
3942 bool both_are_simple = HasSimpleRepresentation(source_type) &&
3943 HasSimpleRepresentation(destination_type);
3944
3945 uint8_t* source_data = static_cast<uint8_t*>(source->DataPtr());
3946 uint8_t* dest_data = static_cast<uint8_t*>(destination->DataPtr());
3947 size_t source_byte_length = source->GetByteLength();
3948 size_t dest_byte_length = destination->GetByteLength();
3949
3950 bool source_shared = source->buffer()->is_shared();
3951 bool destination_shared = destination->buffer()->is_shared();
3952
3953 // We can simply copy the backing store if the types are the same, or if
3954 // we are converting e.g. Uint8 <-> Int8, as the binary representation
3955 // will be the same. This is not the case for floats or clamped Uint8,
3956 // which have special conversion operations.
3957 if (same_type || (same_size && both_are_simple)) {
3958 size_t element_size = source->element_size();
3959 if (source_shared || destination_shared) {
3961 reinterpret_cast<base::Atomic8*>(dest_data + offset * element_size),
3962 reinterpret_cast<base::Atomic8*>(source_data),
3963 length * element_size);
3964 } else {
3965 std::memmove(dest_data + offset * element_size, source_data,
3966 length * element_size);
3967 }
3968 } else {
3969 std::unique_ptr<uint8_t[]> cloned_source_elements;
3970
3971 // If the typedarrays are overlapped, clone the source.
3972 if (dest_data + dest_byte_length > source_data &&
3973 source_data + source_byte_length > dest_data) {
3974 cloned_source_elements.reset(new uint8_t[source_byte_length]);
3975 if (source_shared) {
3977 reinterpret_cast<base::Atomic8*>(cloned_source_elements.get()),
3978 reinterpret_cast<base::Atomic8*>(source_data),
3979 source_byte_length);
3980 } else {
3981 std::memcpy(cloned_source_elements.get(), source_data,
3982 source_byte_length);
3983 }
3984 source_data = cloned_source_elements.get();
3985 }
3986
3987 switch (source->GetElementsKind()) {
3988#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
3989 case TYPE##_ELEMENTS: \
3990 CopyBetweenBackingStores<TYPE##_ELEMENTS, ctype>( \
3991 reinterpret_cast<ctype*>(source_data), \
3992 reinterpret_cast<ElementType*>(dest_data) + offset, length, \
3993 source_shared || destination_shared ? kShared : kUnshared); \
3994 break;
3997 default:
3998 UNREACHABLE();
3999 break;
4000 }
4001#undef TYPED_ARRAY_CASE
4002 }
4003 }
4004
4005 static bool HoleyPrototypeLookupRequired(Isolate* isolate,
4006 Tagged<Context> context,
4007 Tagged<JSArray> source) {
4009 DisallowJavascriptExecution no_js(isolate);
4010
4011#ifdef V8_ENABLE_FORCE_SLOW_PATH
4012 if (isolate->force_slow_path()) return true;
4013#endif
4014
4015 Tagged<Object> source_proto = source->map()->prototype();
4016
4017 // Null prototypes are OK - we don't need to do prototype chain lookups on
4018 // them.
4019 if (IsNull(source_proto, isolate)) return false;
4020 if (IsJSProxy(source_proto)) return true;
4021 if (IsJSObject(source_proto) &&
4022 !context->native_context()->is_initial_array_prototype(
4023 Cast<JSObject>(source_proto))) {
4024 return true;
4025 }
4026
4027 return !Protectors::IsNoElementsIntact(isolate);
4028 }
4029
4030 static bool TryCopyElementsFastNumber(Tagged<Context> context,
4031 Tagged<JSArray> source,
4033 size_t length, size_t offset) {
4034 if (IsBigIntTypedArrayElementsKind(Kind)) return false;
4035 Isolate* isolate = source->GetIsolate();
4037 DisallowJavascriptExecution no_js(isolate);
4038
4039 CHECK(!destination->WasDetached());
4040 bool out_of_bounds = false;
4041 CHECK_GE(destination->GetLengthOrOutOfBounds(out_of_bounds), length);
4042 CHECK(!out_of_bounds);
4043
4044 size_t current_length;
4045 DCHECK(IsNumber(source->length()) &&
4046 TryNumberToSize(source->length(), &current_length) &&
4047 length <= current_length);
4048 USE(current_length);
4049
4050 size_t dest_length = destination->GetLength();
4051 DCHECK(length + offset <= dest_length);
4052 USE(dest_length);
4053
4054 ElementsKind kind = source->GetElementsKind();
4055
4056 auto destination_shared =
4057 destination->buffer()->is_shared() ? kShared : kUnshared;
4058
4059 // When we find the hole, we normally have to look up the element on the
4060 // prototype chain, which is not handled here and we return false instead.
4061 // When the array has the original array prototype, and that prototype has
4062 // not been changed in a way that would affect lookups, we can just convert
4063 // the hole into undefined.
4064 if (HoleyPrototypeLookupRequired(isolate, context, source)) return false;
4065
4066 Tagged<Oddball> undefined = ReadOnlyRoots(isolate).undefined_value();
4067 ElementType* dest_data =
4068 reinterpret_cast<ElementType*>(destination->DataPtr()) + offset;
4069
4070 // Fast-path for packed Smi kind.
4071 if (kind == PACKED_SMI_ELEMENTS) {
4072 Tagged<FixedArray> source_store = Cast<FixedArray>(source->elements());
4073
4074 for (size_t i = 0; i < length; i++) {
4075 Tagged<Object> elem = source_store->get(static_cast<int>(i));
4076 ElementType elem_k;
4078 elem_k = fp16_ieee_from_fp32_value(Smi::ToInt(elem));
4079 } else {
4080 elem_k = FromScalar(Smi::ToInt(elem));
4081 }
4082 SetImpl(dest_data + i, elem_k, destination_shared);
4083 }
4084 return true;
4085 } else if (kind == HOLEY_SMI_ELEMENTS) {
4086 Tagged<FixedArray> source_store = Cast<FixedArray>(source->elements());
4087 for (size_t i = 0; i < length; i++) {
4088 if (source_store->is_the_hole(isolate, static_cast<int>(i))) {
4089 SetImpl(dest_data + i, FromObject(undefined), destination_shared);
4090 } else {
4091 Tagged<Object> elem = source_store->get(static_cast<int>(i));
4092 ElementType elem_k;
4094 elem_k = fp16_ieee_from_fp32_value(Smi::ToInt(elem));
4095 } else {
4096 elem_k = FromScalar(Smi::ToInt(elem));
4097 }
4098 SetImpl(dest_data + i, elem_k, destination_shared);
4099 }
4100 }
4101 return true;
4102 } else if (kind == PACKED_DOUBLE_ELEMENTS) {
4103 // Fast-path for packed double kind. We avoid boxing and then immediately
4104 // unboxing the double here by using get_scalar.
4105 Tagged<FixedDoubleArray> source_store =
4106 Cast<FixedDoubleArray>(source->elements());
4107
4108 for (size_t i = 0; i < length; i++) {
4109 // Use the from_double conversion for this specific TypedArray type,
4110 // rather than relying on C++ to convert elem.
4111 double elem = source_store->get_scalar(static_cast<int>(i));
4112 SetImpl(dest_data + i, FromScalar(elem), destination_shared);
4113 }
4114 return true;
4115 } else if (kind == HOLEY_DOUBLE_ELEMENTS) {
4116 Tagged<FixedDoubleArray> source_store =
4117 Cast<FixedDoubleArray>(source->elements());
4118 for (size_t i = 0; i < length; i++) {
4119 if (source_store->is_the_hole(static_cast<int>(i))) {
4120 SetImpl(dest_data + i, FromObject(undefined), destination_shared);
4121 } else {
4122 double elem = source_store->get_scalar(static_cast<int>(i));
4123 SetImpl(dest_data + i, FromScalar(elem), destination_shared);
4124 }
4125 }
4126 return true;
4127 }
4128 return false;
4129 }
4130
4131 // ES#sec-settypedarrayfromarraylike
4132 static Tagged<Object> CopyElementsHandleSlow(
4133 DirectHandle<JSAny> source, DirectHandle<JSTypedArray> destination,
4134 size_t length, size_t offset) {
4135 Isolate* isolate = destination->GetIsolate();
4136 // 8. Let k be 0.
4137 // 9. Repeat, while k < srcLength,
4138 for (size_t i = 0; i < length; i++) {
4139 DirectHandle<Object> elem;
4140 // a. Let Pk be ! ToString(đť”˝(k)).
4141 // b. Let value be ? Get(src, Pk).
4142 LookupIterator it(isolate, source, i);
4144 Object::GetProperty(&it));
4145 // c. Let targetIndex be đť”˝(targetOffset + k).
4146 // d. Perform ? IntegerIndexedElementSet(target, targetIndex, value).
4147 //
4148 // Rest of loop body inlines ES#IntegerIndexedElementSet
4150 // 1. If O.[[ContentType]] is BigInt, let numValue be ? ToBigInt(value).
4152 BigInt::FromObject(isolate, elem));
4153 } else {
4154 // 2. Otherwise, let numValue be ? ToNumber(value).
4156 Object::ToNumber(isolate, elem));
4157 }
4158 // 3. If IsValidIntegerIndex(O, index) is true, then
4159 // a. Let offset be O.[[ByteOffset]].
4160 // b. Let elementSize be TypedArrayElementSize(O).
4161 // c. Let indexedPosition be (ℝ(index) × elementSize) + offset.
4162 // d. Let elementType be TypedArrayElementType(O).
4163 // e. Perform SetValueInBuffer(O.[[ViewedArrayBuffer]],
4164 // indexedPosition, elementType, numValue, true, Unordered).
4165 bool out_of_bounds = false;
4166 size_t new_length = destination->GetLengthOrOutOfBounds(out_of_bounds);
4167 if (V8_UNLIKELY(out_of_bounds || destination->WasDetached() ||
4168 new_length <= offset + i)) {
4169 // Proceed with the loop so that we call get getters for the source even
4170 // though we don't set the values in the target.
4171 continue;
4172 }
4173 SetImpl(destination, InternalIndex(offset + i), *elem);
4174 // e. Set k to k + 1.
4175 }
4176 // 10. Return unused.
4177 return *isolate->factory()->undefined_value();
4178 }
4179
4180 // This doesn't guarantee that the destination array will be completely
4181 // filled. The caller must do this by passing a source with equal length, if
4182 // that is required.
4183 static Tagged<Object> CopyElementsHandleImpl(
4184 DirectHandle<JSAny> source, DirectHandle<JSObject> destination,
4185 size_t length, size_t offset) {
4186 Isolate* isolate = destination->GetIsolate();
4187 if (length == 0) return *isolate->factory()->undefined_value();
4188
4189 DirectHandle<JSTypedArray> destination_ta = Cast<JSTypedArray>(destination);
4190
4191 // All conversions from TypedArrays can be done without allocation.
4192 if (IsJSTypedArray(*source)) {
4193 CHECK(!destination_ta->WasDetached());
4194 bool out_of_bounds = false;
4195 CHECK_LE(offset + length,
4196 destination_ta->GetLengthOrOutOfBounds(out_of_bounds));
4197 CHECK(!out_of_bounds);
4198 auto source_ta = Cast<JSTypedArray>(source);
4199 ElementsKind source_kind = source_ta->GetElementsKind();
4200 bool source_is_bigint = IsBigIntTypedArrayElementsKind(source_kind);
4201 bool target_is_bigint = IsBigIntTypedArrayElementsKind(Kind);
4202 // If we have to copy more elements than we have in the source, we need to
4203 // do special handling and conversion; that happens in the slow case.
4204 if (source_is_bigint == target_is_bigint && !source_ta->WasDetached() &&
4205 length + offset <= source_ta->GetLength()) {
4206 CopyElementsFromTypedArray(*source_ta, *destination_ta, length, offset);
4207 return *isolate->factory()->undefined_value();
4208 }
4209 } else if (IsJSArray(*source)) {
4210 CHECK(!destination_ta->WasDetached());
4211 bool out_of_bounds = false;
4212 CHECK_LE(offset + length,
4213 destination_ta->GetLengthOrOutOfBounds(out_of_bounds));
4214 CHECK(!out_of_bounds);
4215 // Fast cases for packed numbers kinds where we don't need to allocate.
4216 auto source_js_array = Cast<JSArray>(source);
4217 size_t current_length;
4218 DCHECK(IsNumber(source_js_array->length()));
4219 if (TryNumberToSize(source_js_array->length(), &current_length) &&
4220 length <= current_length) {
4221 auto source_array = Cast<JSArray>(source);
4222 if (TryCopyElementsFastNumber(isolate->context(), *source_array,
4223 *destination_ta, length, offset)) {
4224 return *isolate->factory()->undefined_value();
4225 }
4226 }
4227 }
4228 // Final generic case that handles prototype chain lookups, getters, proxies
4229 // and observable side effects via valueOf, etc. In this case, it's possible
4230 // that the length getter detached / resized the underlying buffer.
4231 return CopyElementsHandleSlow(source, destination_ta, length, offset);
4232 }
4233};
4234
4235template <ElementsKind Kind, typename ElementType, ElementsKind SourceKind,
4236 typename SourceElementType>
4237struct CopyBetweenBackingStoresImpl {
4238 static void Copy(SourceElementType* source_data_ptr,
4239 ElementType* dest_data_ptr, size_t length,
4240 IsSharedBuffer is_shared) {
4241 for (; length > 0; --length, ++source_data_ptr, ++dest_data_ptr) {
4242 // We use scalar accessors to avoid boxing/unboxing, so there are no
4243 // allocations.
4244 SourceElementType source_elem =
4245 TypedElementsAccessor<SourceKind, SourceElementType>::GetImpl(
4246 source_data_ptr, is_shared);
4247 ElementType dest_elem =
4248 TypedElementsAccessor<Kind, ElementType>::FromScalar(source_elem);
4249
4250 TypedElementsAccessor<Kind, ElementType>::SetImpl(dest_data_ptr,
4251 dest_elem, is_shared);
4252 }
4253 }
4254};
4255
4256template <ElementsKind Kind, typename ElementType>
4257struct CopyBetweenBackingStoresImpl<Kind, ElementType, FLOAT16_ELEMENTS,
4258 uint16_t> {
4259 static void Copy(uint16_t* source_data_ptr, ElementType* dest_data_ptr,
4260 size_t length, IsSharedBuffer is_shared) {
4261 for (; length > 0; --length, ++source_data_ptr, ++dest_data_ptr) {
4262 // We use scalar accessors to avoid boxing/unboxing, so there are no
4263 // allocations.
4264 uint16_t source_elem =
4265 TypedElementsAccessor<FLOAT16_ELEMENTS, uint16_t>::GetImpl(
4266 source_data_ptr, is_shared);
4267 ElementType dest_elem =
4268 TypedElementsAccessor<Kind, ElementType>::FromScalar(
4269 fp16_ieee_to_fp32_value(source_elem));
4270
4271 TypedElementsAccessor<Kind, ElementType>::SetImpl(dest_data_ptr,
4272 dest_elem, is_shared);
4273 }
4274 }
4275};
4276
4277template <ElementsKind Kind, typename ElementType>
4278struct CopyBetweenBackingStoresImpl<Kind, ElementType,
4279 RAB_GSAB_FLOAT16_ELEMENTS, uint16_t> {
4280 static void Copy(uint16_t* source_data_ptr, ElementType* dest_data_ptr,
4281 size_t length, IsSharedBuffer is_shared) {
4282 for (; length > 0; --length, ++source_data_ptr, ++dest_data_ptr) {
4283 // We use scalar accessors to avoid boxing/unboxing, so there are no
4284 // allocations.
4285 uint16_t source_elem =
4286 TypedElementsAccessor<RAB_GSAB_FLOAT16_ELEMENTS, uint16_t>::GetImpl(
4287 source_data_ptr, is_shared);
4288 ElementType dest_elem =
4289 TypedElementsAccessor<Kind, ElementType>::FromScalar(
4290 fp16_ieee_to_fp32_value(source_elem));
4291
4292 TypedElementsAccessor<Kind, ElementType>::SetImpl(dest_data_ptr,
4293 dest_elem, is_shared);
4294 }
4295 }
4296};
4297
4298// static
4299template <>
4300Handle<Object> TypedElementsAccessor<INT8_ELEMENTS, int8_t>::ToHandle(
4301 Isolate* isolate, int8_t value) {
4302 return handle(Smi::FromInt(value), isolate);
4303}
4304
4305// static
4306template <>
4307Handle<Object> TypedElementsAccessor<UINT8_ELEMENTS, uint8_t>::ToHandle(
4308 Isolate* isolate, uint8_t value) {
4309 return handle(Smi::FromInt(value), isolate);
4310}
4311
4312// static
4313template <>
4314Handle<Object> TypedElementsAccessor<INT16_ELEMENTS, int16_t>::ToHandle(
4315 Isolate* isolate, int16_t value) {
4316 return handle(Smi::FromInt(value), isolate);
4317}
4318
4319// static
4320template <>
4321Handle<Object> TypedElementsAccessor<UINT16_ELEMENTS, uint16_t>::ToHandle(
4322 Isolate* isolate, uint16_t value) {
4323 return handle(Smi::FromInt(value), isolate);
4324}
4325
4326// static
4327template <>
4328Handle<Object> TypedElementsAccessor<INT32_ELEMENTS, int32_t>::ToHandle(
4329 Isolate* isolate, int32_t value) {
4330 return isolate->factory()->NewNumberFromInt(value);
4331}
4332
4333// static
4334template <>
4335Handle<Object> TypedElementsAccessor<UINT32_ELEMENTS, uint32_t>::ToHandle(
4336 Isolate* isolate, uint32_t value) {
4337 return isolate->factory()->NewNumberFromUint(value);
4338}
4339
4340// static
4341template <>
4342uint16_t TypedElementsAccessor<FLOAT16_ELEMENTS, uint16_t>::FromScalar(
4343 double value) {
4344 return DoubleToFloat16(value);
4345}
4346
4347// static
4348template <>
4349float TypedElementsAccessor<FLOAT32_ELEMENTS, float>::FromScalar(double value) {
4350 return DoubleToFloat32(value);
4351}
4352
4353// static
4354template <>
4355uint16_t TypedElementsAccessor<FLOAT16_ELEMENTS, uint16_t>::FromScalar(
4356 int value) {
4357 return fp16_ieee_from_fp32_value(value);
4358}
4359
4360// static
4361template <>
4362uint16_t TypedElementsAccessor<FLOAT16_ELEMENTS, uint16_t>::FromScalar(
4363 uint32_t value) {
4364 return fp16_ieee_from_fp32_value(value);
4365}
4366
4367// static
4368template <>
4369Handle<Object> TypedElementsAccessor<FLOAT16_ELEMENTS, uint16_t>::ToHandle(
4370 Isolate* isolate, uint16_t value) {
4371 return isolate->factory()->NewNumber(fp16_ieee_to_fp32_value(value));
4372}
4373
4374// static
4375template <>
4376Handle<Object> TypedElementsAccessor<FLOAT32_ELEMENTS, float>::ToHandle(
4377 Isolate* isolate, float value) {
4378 return isolate->factory()->NewNumber(value);
4379}
4380
4381// static
4382template <>
4383double TypedElementsAccessor<FLOAT64_ELEMENTS, double>::FromScalar(
4384 double value) {
4385 return value;
4386}
4387
4388// static
4389template <>
4390Handle<Object> TypedElementsAccessor<FLOAT64_ELEMENTS, double>::ToHandle(
4391 Isolate* isolate, double value) {
4392 return isolate->factory()->NewNumber(value);
4393}
4394
4395// static
4396template <>
4397uint8_t TypedElementsAccessor<UINT8_CLAMPED_ELEMENTS, uint8_t>::FromScalar(
4398 int value) {
4399 if (value < 0x00) return 0x00;
4400 if (value > 0xFF) return 0xFF;
4401 return static_cast<uint8_t>(value);
4402}
4403
4404// static
4405template <>
4406uint8_t TypedElementsAccessor<UINT8_CLAMPED_ELEMENTS, uint8_t>::FromScalar(
4407 uint32_t value) {
4408 // We need this special case for Uint32 -> Uint8Clamped, because the highest
4409 // Uint32 values will be negative as an int, clamping to 0, rather than 255.
4410 if (value > 0xFF) return 0xFF;
4411 return static_cast<uint8_t>(value);
4412}
4413
4414// static
4415template <>
4416uint8_t TypedElementsAccessor<UINT8_CLAMPED_ELEMENTS, uint8_t>::FromScalar(
4417 double value) {
4418 // Handle NaNs and less than zero values which clamp to zero.
4419 if (!(value > 0)) return 0;
4420 if (value > 0xFF) return 0xFF;
4421 return static_cast<uint8_t>(lrint(value));
4422}
4423
4424// static
4425template <>
4426Handle<Object> TypedElementsAccessor<UINT8_CLAMPED_ELEMENTS, uint8_t>::ToHandle(
4427 Isolate* isolate, uint8_t value) {
4428 return handle(Smi::FromInt(value), isolate);
4429}
4430
4431// static
4432template <>
4433int64_t TypedElementsAccessor<BIGINT64_ELEMENTS, int64_t>::FromScalar(
4434 int value) {
4435 UNREACHABLE();
4436}
4437
4438// static
4439template <>
4440int64_t TypedElementsAccessor<BIGINT64_ELEMENTS, int64_t>::FromScalar(
4441 uint32_t value) {
4442 UNREACHABLE();
4443}
4444
4445// static
4446template <>
4447int64_t TypedElementsAccessor<BIGINT64_ELEMENTS, int64_t>::FromScalar(
4448 double value) {
4449 UNREACHABLE();
4450}
4451
4452// static
4453template <>
4454int64_t TypedElementsAccessor<BIGINT64_ELEMENTS, int64_t>::FromScalar(
4455 int64_t value) {
4456 return value;
4457}
4458
4459// static
4460template <>
4461int64_t TypedElementsAccessor<BIGINT64_ELEMENTS, int64_t>::FromScalar(
4462 uint64_t value) {
4463 return static_cast<int64_t>(value);
4464}
4465
4466// static
4467template <>
4468int64_t TypedElementsAccessor<BIGINT64_ELEMENTS, int64_t>::FromObject(
4469 Tagged<Object> value, bool* lossless) {
4470 return Cast<BigInt>(value)->AsInt64(lossless);
4471}
4472
4473// static
4474template <>
4475Handle<Object> TypedElementsAccessor<BIGINT64_ELEMENTS, int64_t>::ToHandle(
4476 Isolate* isolate, int64_t value) {
4477 return BigInt::FromInt64(isolate, value);
4478}
4479
4480// static
4481template <>
4482uint64_t TypedElementsAccessor<BIGUINT64_ELEMENTS, uint64_t>::FromScalar(
4483 int value) {
4484 UNREACHABLE();
4485}
4486
4487// static
4488template <>
4489uint64_t TypedElementsAccessor<BIGUINT64_ELEMENTS, uint64_t>::FromScalar(
4490 uint32_t value) {
4491 UNREACHABLE();
4492}
4493
4494// static
4495template <>
4496uint64_t TypedElementsAccessor<BIGUINT64_ELEMENTS, uint64_t>::FromScalar(
4497 double value) {
4498 UNREACHABLE();
4499}
4500
4501// static
4502template <>
4503uint64_t TypedElementsAccessor<BIGUINT64_ELEMENTS, uint64_t>::FromScalar(
4504 int64_t value) {
4505 return static_cast<uint64_t>(value);
4506}
4507
4508// static
4509template <>
4510uint64_t TypedElementsAccessor<BIGUINT64_ELEMENTS, uint64_t>::FromScalar(
4511 uint64_t value) {
4512 return value;
4513}
4514
4515// static
4516template <>
4517uint64_t TypedElementsAccessor<BIGUINT64_ELEMENTS, uint64_t>::FromObject(
4518 Tagged<Object> value, bool* lossless) {
4519 return Cast<BigInt>(value)->AsUint64(lossless);
4520}
4521
4522// static
4523template <>
4524Handle<Object> TypedElementsAccessor<BIGUINT64_ELEMENTS, uint64_t>::ToHandle(
4525 Isolate* isolate, uint64_t value) {
4526 return BigInt::FromUint64(isolate, value);
4527}
4528
4529// static
4530template <>
4531Handle<Object> TypedElementsAccessor<RAB_GSAB_INT8_ELEMENTS, int8_t>::ToHandle(
4532 Isolate* isolate, int8_t value) {
4533 return handle(Smi::FromInt(value), isolate);
4534}
4535
4536// static
4537template <>
4538Handle<Object> TypedElementsAccessor<RAB_GSAB_UINT8_ELEMENTS,
4539 uint8_t>::ToHandle(Isolate* isolate,
4540 uint8_t value) {
4541 return handle(Smi::FromInt(value), isolate);
4542}
4543
4544// static
4545template <>
4546Handle<Object> TypedElementsAccessor<RAB_GSAB_INT16_ELEMENTS,
4547 int16_t>::ToHandle(Isolate* isolate,
4548 int16_t value) {
4549 return handle(Smi::FromInt(value), isolate);
4550}
4551
4552// static
4553template <>
4554Handle<Object> TypedElementsAccessor<RAB_GSAB_UINT16_ELEMENTS,
4555 uint16_t>::ToHandle(Isolate* isolate,
4556 uint16_t value) {
4557 return handle(Smi::FromInt(value), isolate);
4558}
4559
4560// static
4561template <>
4562Handle<Object> TypedElementsAccessor<RAB_GSAB_INT32_ELEMENTS,
4563 int32_t>::ToHandle(Isolate* isolate,
4564 int32_t value) {
4565 return isolate->factory()->NewNumberFromInt(value);
4566}
4567
4568// static
4569template <>
4570Handle<Object> TypedElementsAccessor<RAB_GSAB_UINT32_ELEMENTS,
4571 uint32_t>::ToHandle(Isolate* isolate,
4572 uint32_t value) {
4573 return isolate->factory()->NewNumberFromUint(value);
4574}
4575
4576// static
4577template <>
4578uint16_t TypedElementsAccessor<RAB_GSAB_FLOAT16_ELEMENTS, uint16_t>::FromScalar(
4579 double value) {
4580 return DoubleToFloat16(value);
4581}
4582
4583// static
4584template <>
4585uint16_t TypedElementsAccessor<RAB_GSAB_FLOAT16_ELEMENTS, uint16_t>::FromScalar(
4586 int value) {
4587 return fp16_ieee_from_fp32_value(value);
4588}
4589
4590// static
4591template <>
4592uint16_t TypedElementsAccessor<RAB_GSAB_FLOAT16_ELEMENTS, uint16_t>::FromScalar(
4593 uint32_t value) {
4594 return fp16_ieee_from_fp32_value(value);
4595}
4596
4597// static
4598template <>
4599Handle<Object> TypedElementsAccessor<RAB_GSAB_FLOAT16_ELEMENTS,
4600 uint16_t>::ToHandle(Isolate* isolate,
4601 uint16_t value) {
4602 return isolate->factory()->NewHeapNumber(fp16_ieee_to_fp32_value(value));
4603}
4604
4605// static
4606template <>
4607float TypedElementsAccessor<RAB_GSAB_FLOAT32_ELEMENTS, float>::FromScalar(
4608 double value) {
4609 return DoubleToFloat32(value);
4610}
4611
4612// static
4613template <>
4614Handle<Object> TypedElementsAccessor<RAB_GSAB_FLOAT32_ELEMENTS,
4615 float>::ToHandle(Isolate* isolate,
4616 float value) {
4617 return isolate->factory()->NewNumber(value);
4618}
4619
4620// static
4621template <>
4622double TypedElementsAccessor<RAB_GSAB_FLOAT64_ELEMENTS, double>::FromScalar(
4623 double value) {
4624 return value;
4625}
4626
4627// static
4628template <>
4629Handle<Object> TypedElementsAccessor<RAB_GSAB_FLOAT64_ELEMENTS,
4630 double>::ToHandle(Isolate* isolate,
4631 double value) {
4632 return isolate->factory()->NewNumber(value);
4633}
4634
4635// static
4636template <>
4637uint8_t TypedElementsAccessor<RAB_GSAB_UINT8_CLAMPED_ELEMENTS,
4638 uint8_t>::FromScalar(int value) {
4639 if (value < 0x00) return 0x00;
4640 if (value > 0xFF) return 0xFF;
4641 return static_cast<uint8_t>(value);
4642}
4643
4644// static
4645template <>
4646uint8_t TypedElementsAccessor<RAB_GSAB_UINT8_CLAMPED_ELEMENTS,
4647 uint8_t>::FromScalar(uint32_t value) {
4648 // We need this special case for Uint32 -> Uint8Clamped, because the highest
4649 // Uint32 values will be negative as an int, clamping to 0, rather than 255.
4650 if (value > 0xFF) return 0xFF;
4651 return static_cast<uint8_t>(value);
4652}
4653
4654// static
4655template <>
4656uint8_t TypedElementsAccessor<RAB_GSAB_UINT8_CLAMPED_ELEMENTS,
4657 uint8_t>::FromScalar(double value) {
4658 // Handle NaNs and less than zero values which clamp to zero.
4659 if (!(value > 0)) return 0;
4660 if (value > 0xFF) return 0xFF;
4661 return static_cast<uint8_t>(lrint(value));
4662}
4663
4664// static
4665template <>
4666Handle<Object> TypedElementsAccessor<RAB_GSAB_UINT8_CLAMPED_ELEMENTS,
4667 uint8_t>::ToHandle(Isolate* isolate,
4668 uint8_t value) {
4669 return handle(Smi::FromInt(value), isolate);
4670}
4671
4672// static
4673template <>
4674int64_t TypedElementsAccessor<RAB_GSAB_BIGINT64_ELEMENTS, int64_t>::FromScalar(
4675 int value) {
4676 UNREACHABLE();
4677}
4678
4679// static
4680template <>
4681int64_t TypedElementsAccessor<RAB_GSAB_BIGINT64_ELEMENTS, int64_t>::FromScalar(
4682 uint32_t value) {
4683 UNREACHABLE();
4684}
4685
4686// static
4687template <>
4688int64_t TypedElementsAccessor<RAB_GSAB_BIGINT64_ELEMENTS, int64_t>::FromScalar(
4689 double value) {
4690 UNREACHABLE();
4691}
4692
4693// static
4694template <>
4695int64_t TypedElementsAccessor<RAB_GSAB_BIGINT64_ELEMENTS, int64_t>::FromScalar(
4696 int64_t value) {
4697 return value;
4698}
4699
4700// static
4701template <>
4702int64_t TypedElementsAccessor<RAB_GSAB_BIGINT64_ELEMENTS, int64_t>::FromScalar(
4703 uint64_t value) {
4704 return static_cast<int64_t>(value);
4705}
4706
4707// static
4708template <>
4709int64_t TypedElementsAccessor<RAB_GSAB_BIGINT64_ELEMENTS, int64_t>::FromObject(
4710 Tagged<Object> value, bool* lossless) {
4711 return Cast<BigInt>(value)->AsInt64(lossless);
4712}
4713
4714// static
4715template <>
4716Handle<Object> TypedElementsAccessor<RAB_GSAB_BIGINT64_ELEMENTS,
4717 int64_t>::ToHandle(Isolate* isolate,
4718 int64_t value) {
4719 return BigInt::FromInt64(isolate, value);
4720}
4721
4722// static
4723template <>
4724uint64_t TypedElementsAccessor<RAB_GSAB_BIGUINT64_ELEMENTS,
4725 uint64_t>::FromScalar(int value) {
4726 UNREACHABLE();
4727}
4728
4729// static
4730template <>
4731uint64_t TypedElementsAccessor<RAB_GSAB_BIGUINT64_ELEMENTS,
4732 uint64_t>::FromScalar(uint32_t value) {
4733 UNREACHABLE();
4734}
4735
4736// static
4737template <>
4738uint64_t TypedElementsAccessor<RAB_GSAB_BIGUINT64_ELEMENTS,
4739 uint64_t>::FromScalar(double value) {
4740 UNREACHABLE();
4741}
4742
4743// static
4744template <>
4745uint64_t TypedElementsAccessor<RAB_GSAB_BIGUINT64_ELEMENTS,
4746 uint64_t>::FromScalar(int64_t value) {
4747 return static_cast<uint64_t>(value);
4748}
4749
4750// static
4751template <>
4752uint64_t TypedElementsAccessor<RAB_GSAB_BIGUINT64_ELEMENTS,
4753 uint64_t>::FromScalar(uint64_t value) {
4754 return value;
4755}
4756
4757// static
4758template <>
4759uint64_t TypedElementsAccessor<RAB_GSAB_BIGUINT64_ELEMENTS,
4760 uint64_t>::FromObject(Tagged<Object> value,
4761 bool* lossless) {
4762 return Cast<BigInt>(value)->AsUint64(lossless);
4763}
4764
4765// static
4766template <>
4767Handle<Object> TypedElementsAccessor<RAB_GSAB_BIGUINT64_ELEMENTS,
4768 uint64_t>::ToHandle(Isolate* isolate,
4769 uint64_t value) {
4770 return BigInt::FromUint64(isolate, value);
4771}
4772
4773#define FIXED_ELEMENTS_ACCESSOR(Type, type, TYPE, ctype) \
4774 using Type##ElementsAccessor = TypedElementsAccessor<TYPE##_ELEMENTS, ctype>;
4777#undef FIXED_ELEMENTS_ACCESSOR
4778
4779template <typename Subclass, typename ArgumentsAccessor, typename KindTraits>
4780class SloppyArgumentsElementsAccessor
4781 : public ElementsAccessorBase<Subclass, KindTraits> {
4782 public:
4783 static void ConvertArgumentsStoreResult(
4784 DirectHandle<SloppyArgumentsElements> elements,
4785 DirectHandle<Object> result) {
4786 UNREACHABLE();
4787 }
4788
4789 static Handle<Object> GetImpl(Isolate* isolate,
4790 Tagged<FixedArrayBase> parameters,
4791 InternalIndex entry) {
4792 DirectHandle<SloppyArgumentsElements> elements(
4793 Cast<SloppyArgumentsElements>(parameters), isolate);
4794 uint32_t length = elements->length();
4795 if (entry.as_uint32() < length) {
4796 // Read context mapped entry.
4798 Tagged<Object> probe =
4799 elements->mapped_entries(entry.as_uint32(), kRelaxedLoad);
4800 DCHECK(!IsTheHole(probe, isolate));
4801 Tagged<Context> context = elements->context();
4802 int context_entry = Smi::ToInt(probe);
4803 DCHECK(!IsTheHole(context->get(context_entry), isolate));
4804 return handle(context->get(context_entry), isolate);
4805 } else {
4806 // Entry is not context mapped, defer to the arguments.
4807 Handle<Object> result = ArgumentsAccessor::GetImpl(
4808 isolate, elements->arguments(), entry.adjust_down(length));
4809 return Subclass::ConvertArgumentsStoreResult(isolate, elements, result);
4810 }
4811 }
4812
4813 static void TransitionElementsKindImpl(DirectHandle<JSObject> object,
4814 DirectHandle<Map> map) {
4815 UNREACHABLE();
4816 }
4817
4818 static Maybe<bool> GrowCapacityAndConvertImpl(DirectHandle<JSObject> object,
4819 uint32_t capacity) {
4820 UNREACHABLE();
4821 }
4822
4823 static inline void SetImpl(DirectHandle<JSObject> holder, InternalIndex entry,
4824 Tagged<Object> value) {
4825 SetImpl(holder->elements(), entry, value);
4826 }
4827
4828 static inline void SetImpl(Tagged<FixedArrayBase> store, InternalIndex entry,
4829 Tagged<Object> value) {
4832 uint32_t length = elements->length();
4833 if (entry.as_uint32() < length) {
4834 // Store context mapped entry.
4836 Tagged<Object> probe =
4837 elements->mapped_entries(entry.as_uint32(), kRelaxedLoad);
4838 DCHECK(!IsTheHole(probe));
4839 Tagged<Context> context = Cast<Context>(elements->context());
4840 int context_entry = Smi::ToInt(probe);
4841 DCHECK(!IsTheHole(context->get(context_entry)));
4842 context->set(context_entry, value);
4843 } else {
4844 // Entry is not context mapped defer to arguments.
4845 Tagged<FixedArray> arguments = elements->arguments();
4846 Tagged<Object> current =
4847 ArgumentsAccessor::GetRaw(arguments, entry.adjust_down(length));
4848 if (IsAliasedArgumentsEntry(current)) {
4851 Tagged<Context> context = Cast<Context>(elements->context());
4852 int context_entry = alias->aliased_context_slot();
4853 DCHECK(!IsTheHole(context->get(context_entry)));
4854 context->set(context_entry, value);
4855 } else {
4856 ArgumentsAccessor::SetImpl(arguments, entry.adjust_down(length), value);
4857 }
4858 }
4859 }
4860
4861 static Maybe<bool> SetLengthImpl(Isolate* isolate,
4862 DirectHandle<JSArray> array, uint32_t length,
4863 DirectHandle<FixedArrayBase> parameter_map) {
4864 // Sloppy arguments objects are not arrays.
4865 UNREACHABLE();
4866 }
4867
4868 static uint32_t GetCapacityImpl(Tagged<JSObject> holder,
4869 Tagged<FixedArrayBase> store) {
4872 Tagged<FixedArray> arguments = elements->arguments();
4873 return elements->length() +
4874 ArgumentsAccessor::GetCapacityImpl(holder, arguments);
4875 }
4876
4877 static uint32_t GetMaxNumberOfEntries(Isolate* isolate,
4878 Tagged<JSObject> holder,
4879 Tagged<FixedArrayBase> backing_store) {
4881 Cast<SloppyArgumentsElements>(backing_store);
4882 Tagged<FixedArrayBase> arguments = elements->arguments();
4883 size_t max_entries =
4884 ArgumentsAccessor::GetMaxNumberOfEntries(isolate, holder, arguments);
4885 DCHECK_LE(max_entries, std::numeric_limits<uint32_t>::max());
4886 return elements->length() + static_cast<uint32_t>(max_entries);
4887 }
4888
4889 static uint32_t NumberOfElementsImpl(Isolate* isolate,
4891 Tagged<FixedArrayBase> backing_store) {
4893 Cast<SloppyArgumentsElements>(backing_store);
4894 Tagged<FixedArrayBase> arguments = elements->arguments();
4895 uint32_t nof_elements = 0;
4896 uint32_t length = elements->length();
4897 for (uint32_t index = 0; index < length; index++) {
4898 if (HasParameterMapArg(isolate, elements, index)) nof_elements++;
4899 }
4900 return nof_elements + ArgumentsAccessor::NumberOfElementsImpl(
4901 isolate, receiver, arguments);
4902 }
4903
4904 V8_WARN_UNUSED_RESULT static ExceptionStatus AddElementsToKeyAccumulatorImpl(
4905 DirectHandle<JSObject> receiver, KeyAccumulator* accumulator,
4906 AddKeyConversion convert) {
4907 Isolate* isolate = accumulator->isolate();
4908 DirectHandle<FixedArrayBase> elements(receiver->elements(), isolate);
4909 uint32_t length = GetCapacityImpl(*receiver, *elements);
4910 for (uint32_t index = 0; index < length; index++) {
4911 InternalIndex entry(index);
4912 if (!HasEntryImpl(isolate, *elements, entry)) continue;
4913 DirectHandle<Object> value = GetImpl(isolate, *elements, entry);
4914 RETURN_FAILURE_IF_NOT_SUCCESSFUL(accumulator->AddKey(value, convert));
4915 }
4917 }
4918
4919 static bool HasEntryImpl(Isolate* isolate, Tagged<FixedArrayBase> parameters,
4920 InternalIndex entry) {
4923 uint32_t length = elements->length();
4924 if (entry.raw_value() < length) {
4925 return HasParameterMapArg(isolate, elements, entry.raw_value());
4926 }
4927 Tagged<FixedArrayBase> arguments = elements->arguments();
4928 return ArgumentsAccessor::HasEntryImpl(isolate, arguments,
4929 entry.adjust_down(length));
4930 }
4931
4932 static bool HasAccessorsImpl(Tagged<JSObject> holder,
4933 Tagged<FixedArrayBase> backing_store) {
4935 Cast<SloppyArgumentsElements>(backing_store);
4936 Tagged<FixedArray> arguments = elements->arguments();
4937 return ArgumentsAccessor::HasAccessorsImpl(holder, arguments);
4938 }
4939
4940 static InternalIndex GetEntryForIndexImpl(Isolate* isolate,
4941 Tagged<JSObject> holder,
4942 Tagged<FixedArrayBase> parameters,
4943 size_t index,
4944 PropertyFilter filter) {
4947 if (HasParameterMapArg(isolate, elements, index)) {
4948 return InternalIndex(index);
4949 }
4950 Tagged<FixedArray> arguments = elements->arguments();
4951 InternalIndex entry = ArgumentsAccessor::GetEntryForIndexImpl(
4952 isolate, holder, arguments, index, filter);
4953 if (entry.is_not_found()) return entry;
4954 // Arguments entries could overlap with the dictionary entries, hence offset
4955 // them by the number of context mapped entries.
4956 return entry.adjust_up(elements->length());
4957 }
4958
4959 static PropertyDetails GetDetailsImpl(Tagged<JSObject> holder,
4960 InternalIndex entry) {
4962 Cast<SloppyArgumentsElements>(holder->elements());
4963 uint32_t length = elements->length();
4964 if (entry.as_uint32() < length) {
4965 return PropertyDetails(PropertyKind::kData, NONE,
4967 }
4968 Tagged<FixedArray> arguments = elements->arguments();
4969 return ArgumentsAccessor::GetDetailsImpl(arguments,
4970 entry.adjust_down(length));
4971 }
4972
4973 static bool HasParameterMapArg(Isolate* isolate,
4975 size_t index) {
4976 uint32_t length = elements->length();
4977 if (index >= length) return false;
4978 return !IsTheHole(
4979 elements->mapped_entries(static_cast<uint32_t>(index), kRelaxedLoad),
4980 isolate);
4981 }
4982
4983 static void DeleteImpl(DirectHandle<JSObject> obj, InternalIndex entry) {
4984 DirectHandle<SloppyArgumentsElements> elements(
4985 Cast<SloppyArgumentsElements>(obj->elements()), obj->GetIsolate());
4986 uint32_t length = elements->length();
4987 InternalIndex delete_or_entry = entry;
4988 if (entry.as_uint32() < length) {
4989 delete_or_entry = InternalIndex::NotFound();
4990 }
4991 Subclass::SloppyDeleteImpl(obj, elements, delete_or_entry);
4992 // SloppyDeleteImpl allocates a new dictionary elements store. For making
4993 // heap verification happy we postpone clearing out the mapped entry.
4994 if (entry.as_uint32() < length) {
4995 elements->set_mapped_entries(entry.as_uint32(),
4996 GetReadOnlyRoots().the_hole_value());
4997 }
4998 }
4999
5000 static void SloppyDeleteImpl(DirectHandle<JSObject> obj,
5001 DirectHandle<SloppyArgumentsElements> elements,
5002 InternalIndex entry) {
5003 // Implemented in subclasses.
5004 UNREACHABLE();
5005 }
5006
5007 V8_WARN_UNUSED_RESULT static ExceptionStatus CollectElementIndicesImpl(
5008 DirectHandle<JSObject> object, DirectHandle<FixedArrayBase> backing_store,
5009 KeyAccumulator* keys) {
5010 Isolate* isolate = keys->isolate();
5011 uint32_t nof_indices = 0;
5012 Handle<FixedArray> indices = isolate->factory()->NewFixedArray(
5013 GetCapacityImpl(*object, *backing_store));
5014 DirectCollectElementIndicesImpl(isolate, object, backing_store,
5016 ENUMERABLE_STRINGS, indices, &nof_indices);
5017 SortIndices(isolate, indices, nof_indices);
5018 for (uint32_t i = 0; i < nof_indices; i++) {
5019 RETURN_FAILURE_IF_NOT_SUCCESSFUL(keys->AddKey(indices->get(i)));
5020 }
5022 }
5023
5024 static Handle<FixedArray> DirectCollectElementIndicesImpl(
5025 Isolate* isolate, DirectHandle<JSObject> object,
5026 DirectHandle<FixedArrayBase> backing_store, GetKeysConversion convert,
5027 PropertyFilter filter, Handle<FixedArray> list, uint32_t* nof_indices,
5028 uint32_t insertion_index = 0) {
5029 auto elements = Cast<SloppyArgumentsElements>(backing_store);
5030 uint32_t length = elements->length();
5031
5032 for (uint32_t i = 0; i < length; ++i) {
5033 if (IsTheHole(elements->mapped_entries(i, kRelaxedLoad), isolate))
5034 continue;
5035 if (convert == GetKeysConversion::kConvertToString) {
5036 DirectHandle<String> index_string =
5037 isolate->factory()->Uint32ToString(i);
5038 list->set(insertion_index, *index_string);
5039 } else {
5040 list->set(insertion_index, Smi::FromInt(i));
5041 }
5042 insertion_index++;
5043 }
5044
5045 DirectHandle<FixedArray> store(elements->arguments(), isolate);
5046 return ArgumentsAccessor::DirectCollectElementIndicesImpl(
5047 isolate, object, store, convert, filter, list, nof_indices,
5048 insertion_index);
5049 }
5050
5051 static Maybe<bool> IncludesValueImpl(Isolate* isolate,
5052 DirectHandle<JSObject> object,
5053 DirectHandle<Object> value,
5054 size_t start_from, size_t length) {
5055 DCHECK(JSObject::PrototypeHasNoElements(isolate, *object));
5056 DirectHandle<Map> original_map(object->map(), isolate);
5057 DirectHandle<SloppyArgumentsElements> elements(
5058 Cast<SloppyArgumentsElements>(object->elements()), isolate);
5059 bool search_for_hole = IsUndefined(*value, isolate);
5060
5061 for (size_t k = start_from; k < length; ++k) {
5062 DCHECK_EQ(object->map(), *original_map);
5063 InternalIndex entry =
5064 GetEntryForIndexImpl(isolate, *object, *elements, k, ALL_PROPERTIES);
5065 if (entry.is_not_found()) {
5066 if (search_for_hole) return Just(true);
5067 continue;
5068 }
5069
5070 DirectHandle<Object> element_k =
5071 Subclass::GetImpl(isolate, *elements, entry);
5072
5073 if (IsAccessorPair(*element_k)) {
5074 LookupIterator it(isolate, object, k, LookupIterator::OWN);
5075 DCHECK(it.IsFound());
5077 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, element_k,
5079 Nothing<bool>());
5080
5081 if (Object::SameValueZero(*value, *element_k)) return Just(true);
5082
5083 if (object->map() != *original_map) {
5084 // Some mutation occurred in accessor. Abort "fast" path
5085 return IncludesValueSlowPath(isolate, object, value, k + 1, length);
5086 }
5087 } else if (Object::SameValueZero(*value, *element_k)) {
5088 return Just(true);
5089 }
5090 }
5091 return Just(false);
5092 }
5093
5094 static Maybe<int64_t> IndexOfValueImpl(Isolate* isolate,
5095 DirectHandle<JSObject> object,
5096 DirectHandle<Object> value,
5097 size_t start_from, size_t length) {
5098 DCHECK(JSObject::PrototypeHasNoElements(isolate, *object));
5099 DirectHandle<Map> original_map(object->map(), isolate);
5100 DirectHandle<SloppyArgumentsElements> elements(
5101 Cast<SloppyArgumentsElements>(object->elements()), isolate);
5102
5103 for (size_t k = start_from; k < length; ++k) {
5104 DCHECK_EQ(object->map(), *original_map);
5105 InternalIndex entry =
5106 GetEntryForIndexImpl(isolate, *object, *elements, k, ALL_PROPERTIES);
5107 if (entry.is_not_found()) {
5108 continue;
5109 }
5110
5111 DirectHandle<Object> element_k =
5112 Subclass::GetImpl(isolate, *elements, entry);
5113
5114 if (IsAccessorPair(*element_k)) {
5115 LookupIterator it(isolate, object, k, LookupIterator::OWN);
5116 DCHECK(it.IsFound());
5118 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, element_k,
5121
5122 if (Object::StrictEquals(*value, *element_k)) {
5123 return Just<int64_t>(k);
5124 }
5125
5126 if (object->map() != *original_map) {
5127 // Some mutation occurred in accessor. Abort "fast" path.
5128 return IndexOfValueSlowPath(isolate, object, value, k + 1, length);
5129 }
5130 } else if (Object::StrictEquals(*value, *element_k)) {
5131 return Just<int64_t>(k);
5132 }
5133 }
5134 return Just<int64_t>(-1);
5135 }
5136};
5137
5138class SlowSloppyArgumentsElementsAccessor
5139 : public SloppyArgumentsElementsAccessor<
5140 SlowSloppyArgumentsElementsAccessor, DictionaryElementsAccessor,
5141 ElementsKindTraits<SLOW_SLOPPY_ARGUMENTS_ELEMENTS>> {
5142 public:
5143 static Handle<Object> ConvertArgumentsStoreResult(
5144 Isolate* isolate, DirectHandle<SloppyArgumentsElements> elements,
5145 Handle<Object> result) {
5146 // Elements of the arguments object in slow mode might be slow aliases.
5147 if (IsAliasedArgumentsEntry(*result)) {
5151 Tagged<Context> context = elements->context();
5152 int context_entry = alias->aliased_context_slot();
5153 DCHECK(!IsTheHole(context->get(context_entry), isolate));
5154 return handle(context->get(context_entry), isolate);
5155 }
5156 return result;
5157 }
5158 static void SloppyDeleteImpl(DirectHandle<JSObject> obj,
5159 DirectHandle<SloppyArgumentsElements> elements,
5160 InternalIndex entry) {
5161 // No need to delete a context mapped entry from the arguments elements.
5162 if (entry.is_not_found()) return;
5163 Isolate* isolate = obj->GetIsolate();
5164 DirectHandle<NumberDictionary> dict(
5165 Cast<NumberDictionary>(elements->arguments()), isolate);
5166 uint32_t length = elements->length();
5167 dict =
5168 NumberDictionary::DeleteEntry(isolate, dict, entry.adjust_down(length));
5169 elements->set_arguments(*dict);
5170 }
5171 static Maybe<bool> AddImpl(DirectHandle<JSObject> object, uint32_t index,
5172 DirectHandle<Object> value,
5173 PropertyAttributes attributes,
5174 uint32_t new_capacity) {
5175 Isolate* isolate = object->GetIsolate();
5176 DirectHandle<SloppyArgumentsElements> elements(
5177 Cast<SloppyArgumentsElements>(object->elements()), isolate);
5178 DirectHandle<FixedArrayBase> old_arguments(elements->arguments(), isolate);
5179 DirectHandle<NumberDictionary> dictionary =
5180 IsNumberDictionary(*old_arguments)
5181 ? Cast<NumberDictionary>(old_arguments)
5182 : JSObject::NormalizeElements(object);
5183 PropertyDetails details(PropertyKind::kData, attributes,
5185 DirectHandle<NumberDictionary> new_dictionary =
5186 NumberDictionary::Add(isolate, dictionary, index, value, details);
5187 if (attributes != NONE) object->RequireSlowElements(*new_dictionary);
5188 if (*dictionary != *new_dictionary) {
5189 elements->set_arguments(*new_dictionary);
5190 }
5191 return Just(true);
5192 }
5193
5194 static void ReconfigureImpl(DirectHandle<JSObject> object,
5195 DirectHandle<FixedArrayBase> store,
5196 InternalIndex entry, DirectHandle<Object> value,
5197 PropertyAttributes attributes) {
5198 Isolate* isolate = object->GetIsolate();
5199 auto elements = Cast<SloppyArgumentsElements>(store);
5200 uint32_t length = elements->length();
5201 if (entry.as_uint32() < length) {
5202 Tagged<Object> probe =
5203 elements->mapped_entries(entry.as_uint32(), kRelaxedLoad);
5204 DCHECK(!IsTheHole(probe, isolate));
5205 Tagged<Context> context = elements->context();
5206 int context_entry = Smi::ToInt(probe);
5207 DCHECK(!IsTheHole(context->get(context_entry), isolate));
5208 context->set(context_entry, *value);
5209
5210 // Redefining attributes of an aliased element destroys fast aliasing.
5211 elements->set_mapped_entries(entry.as_uint32(),
5212 ReadOnlyRoots(isolate).the_hole_value());
5213 // For elements that are still writable we re-establish slow aliasing.
5214 if ((attributes & READ_ONLY) == 0) {
5215 value = isolate->factory()->NewAliasedArgumentsEntry(context_entry);
5216 }
5217
5218 PropertyDetails details(PropertyKind::kData, attributes,
5220 DirectHandle<NumberDictionary> arguments(
5221 Cast<NumberDictionary>(elements->arguments()), isolate);
5222 arguments = NumberDictionary::Add(isolate, arguments, entry.as_uint32(),
5223 value, details);
5224 // If the attributes were NONE, we would have called set rather than
5225 // reconfigure.
5226 DCHECK_NE(NONE, attributes);
5227 object->RequireSlowElements(*arguments);
5228 elements->set_arguments(*arguments);
5229 } else {
5230 DirectHandle<FixedArrayBase> arguments(elements->arguments(), isolate);
5231 DictionaryElementsAccessor::ReconfigureImpl(
5232 object, arguments, entry.adjust_down(length), value, attributes);
5233 }
5234 }
5235};
5236
5237class FastSloppyArgumentsElementsAccessor
5238 : public SloppyArgumentsElementsAccessor<
5239 FastSloppyArgumentsElementsAccessor, FastHoleyObjectElementsAccessor,
5240 ElementsKindTraits<FAST_SLOPPY_ARGUMENTS_ELEMENTS>> {
5241 public:
5242 static Handle<Object> ConvertArgumentsStoreResult(
5243 Isolate* isolate, DirectHandle<SloppyArgumentsElements> parameter_map,
5244 Handle<Object> result) {
5245 DCHECK(!IsAliasedArgumentsEntry(*result));
5246 return result;
5247 }
5248
5249 static DirectHandle<FixedArray> GetArguments(Isolate* isolate,
5250 Tagged<FixedArrayBase> store) {
5253 return DirectHandle<FixedArray>(elements->arguments(), isolate);
5254 }
5255
5256 static DirectHandle<NumberDictionary> NormalizeImpl(
5257 DirectHandle<JSObject> object, DirectHandle<FixedArrayBase> elements) {
5258 DirectHandle<FixedArray> arguments =
5259 GetArguments(object->GetIsolate(), *elements);
5260 return FastHoleyObjectElementsAccessor::NormalizeImpl(object, arguments);
5261 }
5262
5263 static DirectHandle<NumberDictionary> NormalizeArgumentsElements(
5264 DirectHandle<JSObject> object,
5265 DirectHandle<SloppyArgumentsElements> elements, InternalIndex* entry) {
5266 DirectHandle<NumberDictionary> dictionary =
5268 elements->set_arguments(*dictionary);
5269 // kMaxUInt32 indicates that a context mapped element got deleted. In this
5270 // case we only normalize the elements (aka. migrate to SLOW_SLOPPY).
5271 if (entry->is_not_found()) return dictionary;
5272 uint32_t length = elements->length();
5273 if (entry->as_uint32() >= length) {
5274 *entry =
5275 dictionary
5276 ->FindEntry(object->GetIsolate(), entry->as_uint32() - length)
5277 .adjust_up(length);
5278 }
5279 return dictionary;
5280 }
5281
5282 static void SloppyDeleteImpl(DirectHandle<JSObject> obj,
5283 DirectHandle<SloppyArgumentsElements> elements,
5284 InternalIndex entry) {
5285 // Always normalize element on deleting an entry.
5286 NormalizeArgumentsElements(obj, elements, &entry);
5287 SlowSloppyArgumentsElementsAccessor::SloppyDeleteImpl(obj, elements, entry);
5288 }
5289
5290 static Maybe<bool> AddImpl(DirectHandle<JSObject> object, uint32_t index,
5291 DirectHandle<Object> value,
5292 PropertyAttributes attributes,
5293 uint32_t new_capacity) {
5294 DCHECK_EQ(NONE, attributes);
5295 Isolate* isolate = object->GetIsolate();
5296 DirectHandle<SloppyArgumentsElements> elements(
5297 Cast<SloppyArgumentsElements>(object->elements()), isolate);
5298 DirectHandle<FixedArray> old_arguments(elements->arguments(), isolate);
5299 if (IsNumberDictionary(*old_arguments) ||
5300 static_cast<uint32_t>(old_arguments->length()) < new_capacity) {
5301 MAYBE_RETURN(GrowCapacityAndConvertImpl(object, new_capacity),
5302 Nothing<bool>());
5303 }
5304 Tagged<FixedArray> arguments = elements->arguments();
5305 // For fast holey objects, the entry equals the index. The code above made
5306 // sure that there's enough space to store the value. We cannot convert
5307 // index to entry explicitly since the slot still contains the hole, so the
5308 // current EntryForIndex would indicate that it is "absent" by returning
5309 // kMaxUInt32.
5310 FastHoleyObjectElementsAccessor::SetImpl(arguments, InternalIndex(index),
5311 *value);
5312 return Just(true);
5313 }
5314
5315 static void ReconfigureImpl(DirectHandle<JSObject> object,
5316 DirectHandle<FixedArrayBase> store,
5317 InternalIndex entry, DirectHandle<Object> value,
5318 PropertyAttributes attributes) {
5319 DCHECK_EQ(object->elements(), *store);
5320 DirectHandle<SloppyArgumentsElements> elements(
5321 Cast<SloppyArgumentsElements>(*store), object->GetIsolate());
5322 NormalizeArgumentsElements(object, elements, &entry);
5323 SlowSloppyArgumentsElementsAccessor::ReconfigureImpl(object, store, entry,
5324 value, attributes);
5325 }
5326
5327 static void CopyElementsImpl(Isolate* isolate, Tagged<FixedArrayBase> from,
5328 uint32_t from_start, Tagged<FixedArrayBase> to,
5329 ElementsKind from_kind, uint32_t to_start,
5330 int packed_size, int copy_size) {
5331 DCHECK(!IsNumberDictionary(to));
5332 if (from_kind == SLOW_SLOPPY_ARGUMENTS_ELEMENTS) {
5333 CopyDictionaryToObjectElements(isolate, from, from_start, to,
5334 HOLEY_ELEMENTS, to_start, copy_size);
5335 } else {
5337 CopyObjectToObjectElements(isolate, from, HOLEY_ELEMENTS, from_start, to,
5338 HOLEY_ELEMENTS, to_start, copy_size);
5339 }
5340 }
5341
5342 static Maybe<bool> GrowCapacityAndConvertImpl(DirectHandle<JSObject> object,
5343 uint32_t capacity) {
5344 Isolate* isolate = object->GetIsolate();
5345 DirectHandle<SloppyArgumentsElements> elements(
5346 Cast<SloppyArgumentsElements>(object->elements()), isolate);
5347 DirectHandle<FixedArray> old_arguments(
5348 Cast<FixedArray>(elements->arguments()), isolate);
5349 ElementsKind from_kind = object->GetElementsKind();
5350 // This method should only be called if there's a reason to update the
5351 // elements.
5353 static_cast<uint32_t>(old_arguments->length()) < capacity);
5354 DirectHandle<FixedArrayBase> arguments;
5356 isolate, arguments,
5357 ConvertElementsWithCapacity(object, old_arguments, from_kind, capacity),
5358 Nothing<bool>());
5359 DirectHandle<Map> new_map = JSObject::GetElementsTransitionMap(
5361 JSObject::MigrateToMap(isolate, object, new_map);
5362 elements->set_arguments(Cast<FixedArray>(*arguments));
5364 return Just(true);
5365 }
5366};
5367
5368template <typename Subclass, typename BackingStoreAccessor, typename KindTraits>
5369class StringWrapperElementsAccessor
5370 : public ElementsAccessorBase<Subclass, KindTraits> {
5371 public:
5372 static Handle<Object> GetInternalImpl(Isolate* isolate,
5373 DirectHandle<JSObject> holder,
5374 InternalIndex entry) {
5375 return GetImpl(holder, entry);
5376 }
5377
5378 static Handle<Object> GetImpl(DirectHandle<JSObject> holder,
5379 InternalIndex entry) {
5380 Isolate* isolate = holder->GetIsolate();
5381 DirectHandle<String> string(GetString(*holder), isolate);
5382 uint32_t length = static_cast<uint32_t>(string->length());
5383 if (entry.as_uint32() < length) {
5384 return isolate->factory()->LookupSingleCharacterStringFromCode(
5385 String::Flatten(isolate, string)->Get(entry.as_int()));
5386 }
5387 return BackingStoreAccessor::GetImpl(isolate, holder->elements(),
5388 entry.adjust_down(length));
5389 }
5390
5391 static DirectHandle<Object> GetImpl(Isolate* isolate,
5392 Tagged<FixedArrayBase> elements,
5393 InternalIndex entry) {
5394 UNREACHABLE();
5395 }
5396
5397 static PropertyDetails GetDetailsImpl(Tagged<JSObject> holder,
5398 InternalIndex entry) {
5399 uint32_t length = static_cast<uint32_t>(GetString(holder)->length());
5400 if (entry.as_uint32() < length) {
5401 PropertyAttributes attributes =
5403 return PropertyDetails(PropertyKind::kData, attributes,
5405 }
5406 return BackingStoreAccessor::GetDetailsImpl(holder,
5407 entry.adjust_down(length));
5408 }
5409
5410 static InternalIndex GetEntryForIndexImpl(
5411 Isolate* isolate, Tagged<JSObject> holder,
5412 Tagged<FixedArrayBase> backing_store, size_t index,
5413 PropertyFilter filter) {
5414 uint32_t length = static_cast<uint32_t>(GetString(holder)->length());
5415 if (index < length) return InternalIndex(index);
5416 InternalIndex backing_store_entry =
5417 BackingStoreAccessor::GetEntryForIndexImpl(
5418 isolate, holder, backing_store, index, filter);
5419 if (backing_store_entry.is_not_found()) return backing_store_entry;
5420 return backing_store_entry.adjust_up(length);
5421 }
5422
5423 static void DeleteImpl(DirectHandle<JSObject> holder, InternalIndex entry) {
5424 uint32_t length = static_cast<uint32_t>(GetString(*holder)->length());
5425 if (entry.as_uint32() < length) {
5426 return; // String contents can't be deleted.
5427 }
5428 BackingStoreAccessor::DeleteImpl(holder, entry.adjust_down(length));
5429 }
5430
5431 static void SetImpl(DirectHandle<JSObject> holder, InternalIndex entry,
5432 Tagged<Object> value) {
5433 uint32_t length = static_cast<uint32_t>(GetString(*holder)->length());
5434 if (entry.as_uint32() < length) {
5435 return; // String contents are read-only.
5436 }
5437 BackingStoreAccessor::SetImpl(holder->elements(), entry.adjust_down(length),
5438 value);
5439 }
5440
5441 static Maybe<bool> AddImpl(DirectHandle<JSObject> object, uint32_t index,
5442 DirectHandle<Object> value,
5443 PropertyAttributes attributes,
5444 uint32_t new_capacity) {
5445 DCHECK(index >= static_cast<uint32_t>(GetString(*object)->length()));
5446 // Explicitly grow fast backing stores if needed. Dictionaries know how to
5447 // extend their capacity themselves.
5448 if (KindTraits::Kind == FAST_STRING_WRAPPER_ELEMENTS &&
5449 (object->GetElementsKind() == SLOW_STRING_WRAPPER_ELEMENTS ||
5450 BackingStoreAccessor::GetCapacityImpl(*object, object->elements()) !=
5451 new_capacity)) {
5452 MAYBE_RETURN(GrowCapacityAndConvertImpl(object, new_capacity),
5453 Nothing<bool>());
5454 }
5455 BackingStoreAccessor::AddImpl(object, index, value, attributes,
5456 new_capacity);
5457 return Just(true);
5458 }
5459
5460 static void ReconfigureImpl(DirectHandle<JSObject> object,
5461 DirectHandle<FixedArrayBase> store,
5462 InternalIndex entry, DirectHandle<Object> value,
5463 PropertyAttributes attributes) {
5464 uint32_t length = static_cast<uint32_t>(GetString(*object)->length());
5465 if (entry.as_uint32() < length) {
5466 return; // String contents can't be reconfigured.
5467 }
5468 BackingStoreAccessor::ReconfigureImpl(
5469 object, store, entry.adjust_down(length), value, attributes);
5470 }
5471
5472 V8_WARN_UNUSED_RESULT static ExceptionStatus AddElementsToKeyAccumulatorImpl(
5473 DirectHandle<JSObject> receiver, KeyAccumulator* accumulator,
5474 AddKeyConversion convert) {
5475 Isolate* isolate = receiver->GetIsolate();
5476 DirectHandle<String> string(GetString(*receiver), isolate);
5477 string = String::Flatten(isolate, string);
5478 uint32_t length = static_cast<uint32_t>(string->length());
5479 for (uint32_t i = 0; i < length; i++) {
5480 DirectHandle<String> key =
5481 isolate->factory()->LookupSingleCharacterStringFromCode(
5482 string->Get(i));
5483 RETURN_FAILURE_IF_NOT_SUCCESSFUL(accumulator->AddKey(key, convert));
5484 }
5485 return BackingStoreAccessor::AddElementsToKeyAccumulatorImpl(
5486 receiver, accumulator, convert);
5487 }
5488
5489 V8_WARN_UNUSED_RESULT static ExceptionStatus CollectElementIndicesImpl(
5490 DirectHandle<JSObject> object, DirectHandle<FixedArrayBase> backing_store,
5491 KeyAccumulator* keys) {
5492 uint32_t length = GetString(*object)->length();
5493 Factory* factory = keys->isolate()->factory();
5494 for (uint32_t i = 0; i < length; i++) {
5496 keys->AddKey(factory->NewNumberFromUint(i)));
5497 }
5498 return BackingStoreAccessor::CollectElementIndicesImpl(object,
5499 backing_store, keys);
5500 }
5501
5502 static Maybe<bool> GrowCapacityAndConvertImpl(DirectHandle<JSObject> object,
5503 uint32_t capacity) {
5504 DirectHandle<FixedArrayBase> old_elements(object->elements(),
5505 object->GetIsolate());
5506 ElementsKind from_kind = object->GetElementsKind();
5507 if (from_kind == FAST_STRING_WRAPPER_ELEMENTS) {
5508 // The optimizing compiler relies on the prototype lookups of String
5509 // objects always returning undefined. If there's a store to the
5510 // initial String.prototype object, make sure all the optimizations
5511 // are invalidated.
5512 object->GetIsolate()->UpdateNoElementsProtectorOnSetLength(object);
5513 }
5514 // This method should only be called if there's a reason to update the
5515 // elements.
5516 DCHECK(from_kind == SLOW_STRING_WRAPPER_ELEMENTS ||
5517 static_cast<uint32_t>(old_elements->length()) < capacity);
5518 return Subclass::BasicGrowCapacityAndConvertImpl(
5519 object, old_elements, from_kind, FAST_STRING_WRAPPER_ELEMENTS,
5520 capacity);
5521 }
5522
5523 static void CopyElementsImpl(Isolate* isolate, Tagged<FixedArrayBase> from,
5524 uint32_t from_start, Tagged<FixedArrayBase> to,
5525 ElementsKind from_kind, uint32_t to_start,
5526 int packed_size, int copy_size) {
5527 DCHECK(!IsNumberDictionary(to));
5528 if (from_kind == SLOW_STRING_WRAPPER_ELEMENTS) {
5529 CopyDictionaryToObjectElements(isolate, from, from_start, to,
5530 HOLEY_ELEMENTS, to_start, copy_size);
5531 } else {
5533 CopyObjectToObjectElements(isolate, from, HOLEY_ELEMENTS, from_start, to,
5534 HOLEY_ELEMENTS, to_start, copy_size);
5535 }
5536 }
5537
5538 static uint32_t NumberOfElementsImpl(Isolate* isolate,
5539 Tagged<JSObject> object,
5540 Tagged<FixedArrayBase> backing_store) {
5541 uint32_t length = GetString(object)->length();
5542 return length + BackingStoreAccessor::NumberOfElementsImpl(isolate, object,
5543 backing_store);
5544 }
5545
5546 private:
5547 static Tagged<String> GetString(Tagged<JSObject> holder) {
5548 DCHECK(IsJSPrimitiveWrapper(holder));
5550 DCHECK(IsString(js_value->value()));
5551 return Cast<String>(js_value->value());
5552 }
5553};
5554
5555class FastStringWrapperElementsAccessor
5556 : public StringWrapperElementsAccessor<
5557 FastStringWrapperElementsAccessor, FastHoleyObjectElementsAccessor,
5558 ElementsKindTraits<FAST_STRING_WRAPPER_ELEMENTS>> {
5559 public:
5560 static DirectHandle<NumberDictionary> NormalizeImpl(
5561 DirectHandle<JSObject> object, DirectHandle<FixedArrayBase> elements) {
5562 return FastHoleyObjectElementsAccessor::NormalizeImpl(object, elements);
5563 }
5564};
5565
5566class SlowStringWrapperElementsAccessor
5567 : public StringWrapperElementsAccessor<
5568 SlowStringWrapperElementsAccessor, DictionaryElementsAccessor,
5569 ElementsKindTraits<SLOW_STRING_WRAPPER_ELEMENTS>> {
5570 public:
5571 static bool HasAccessorsImpl(Tagged<JSObject> holder,
5572 Tagged<FixedArrayBase> backing_store) {
5573 return DictionaryElementsAccessor::HasAccessorsImpl(holder, backing_store);
5574 }
5575};
5576
5577} // namespace
5578
5581 if (args->length() == 0) {
5582 // Optimize the case where there are no parameters passed.
5584 return array;
5585
5586 } else if (args->length() == 1 && IsNumber(*args->at(0))) {
5587 uint32_t length;
5588 if (!Object::ToArrayLength(*args->at(0), &length)) {
5589 return ThrowArrayLengthRangeError(array->GetIsolate());
5590 }
5591
5592 // Optimize the case where there is one argument and the argument is a small
5593 // smi.
5594 if (length > 0 && length < JSArray::kInitialMaxFastElementArray) {
5595 ElementsKind elements_kind = array->GetElementsKind();
5596 JSArray::Initialize(array, length, length);
5597
5598 if (!IsHoleyElementsKind(elements_kind)) {
5599 elements_kind = GetHoleyElementsKind(elements_kind);
5600 JSObject::TransitionElementsKind(array, elements_kind);
5601 }
5602 } else if (length == 0) {
5604 } else {
5605 // Take the argument as the length.
5606 JSArray::Initialize(array, 0);
5607 MAYBE_RETURN_NULL(JSArray::SetLength(array, length));
5608 }
5609 return array;
5610 }
5611
5612 Factory* factory = array->GetIsolate()->factory();
5613
5614 // Set length and elements on the array.
5615 int number_of_elements = args->length();
5616 JSObject::EnsureCanContainElements(array, args, number_of_elements,
5618
5619 // Allocate an appropriately typed elements array.
5620 ElementsKind elements_kind = array->GetElementsKind();
5622 if (IsDoubleElementsKind(elements_kind)) {
5623 elms =
5624 Cast<FixedArrayBase>(factory->NewFixedDoubleArray(number_of_elements));
5625 } else {
5626 elms = Cast<FixedArrayBase>(
5627 factory->NewFixedArrayWithHoles(number_of_elements));
5628 }
5629
5630 // Fill in the content
5631 switch (elements_kind) {
5632 case HOLEY_SMI_ELEMENTS:
5633 case PACKED_SMI_ELEMENTS: {
5634 auto smi_elms = Cast<FixedArray>(elms);
5635 for (int entry = 0; entry < number_of_elements; entry++) {
5636 smi_elms->set(entry, (*args)[entry], SKIP_WRITE_BARRIER);
5637 }
5638 break;
5639 }
5640 case HOLEY_ELEMENTS:
5641 case PACKED_ELEMENTS: {
5643 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
5644 auto object_elms = Cast<FixedArray>(elms);
5645 for (int entry = 0; entry < number_of_elements; entry++) {
5646 object_elms->set(entry, (*args)[entry], mode);
5647 }
5648 break;
5649 }
5652 auto double_elms = Cast<FixedDoubleArray>(elms);
5653 for (int entry = 0; entry < number_of_elements; entry++) {
5654 double_elms->set(entry, Object::NumberValue((*args)[entry]));
5655 }
5656 break;
5657 }
5658 default:
5659 UNREACHABLE();
5660 }
5661
5662 array->set_elements(*elms);
5663 array->set_length(Smi::FromInt(number_of_elements));
5664 return array;
5665}
5666
5668 Address raw_source,
5669 Address raw_destination,
5670 uintptr_t length,
5671 uintptr_t offset) {
5673 Tagged<JSArray> source = Cast<JSArray>(Tagged<Object>(raw_source));
5675 Cast<JSTypedArray>(Tagged<Object>(raw_destination));
5676
5677 switch (destination->GetElementsKind()) {
5678#define TYPED_ARRAYS_CASE(Type, type, TYPE, ctype) \
5679 case TYPE##_ELEMENTS: \
5680 CHECK(Type##ElementsAccessor::TryCopyElementsFastNumber( \
5681 context, source, destination, length, offset)); \
5682 break;
5685#undef TYPED_ARRAYS_CASE
5686 default:
5687 UNREACHABLE();
5688 }
5689}
5690
5692 Address raw_destination,
5693 uintptr_t length, uintptr_t offset) {
5696 Cast<JSTypedArray>(Tagged<Object>(raw_destination));
5697
5698 switch (destination->GetElementsKind()) {
5699#define TYPED_ARRAYS_CASE(Type, type, TYPE, ctype) \
5700 case TYPE##_ELEMENTS: \
5701 Type##ElementsAccessor::CopyElementsFromTypedArray(source, destination, \
5702 length, offset); \
5703 break;
5706#undef TYPED_ARRAYS_CASE
5707 default:
5708 UNREACHABLE();
5709 }
5710}
5711
5712void CopyTypedArrayElementsSlice(Address raw_source, Address raw_destination,
5713 uintptr_t start, uintptr_t end) {
5716 Cast<JSTypedArray>(Tagged<Object>(raw_destination));
5717
5718 destination->GetElementsAccessor()->CopyTypedArrayElementsSlice(
5719 source, destination, start, end);
5720}
5721
5722template <typename Mapping>
5723constexpr bool IsIdentityMapping(const Mapping& mapping, size_t index) {
5724 return (index >= std::size(mapping)) ||
5725 (mapping[index] == index && IsIdentityMapping(mapping, index + 1));
5726}
5727
5729 // Here we create an array with more entries than element kinds.
5730 // This is due to the sandbox: this array is indexed with an ElementsKind
5731 // read directly from within the sandbox, which must therefore be considered
5732 // attacker-controlled. An ElementsKind is a uint8_t under the hood, so we
5733 // can either use an array with 256 entries or have an explicit bounds-check
5734 // on access. The latter is probably more expensive.
5735 static_assert(std::is_same_v<std::underlying_type_t<ElementsKind>, uint8_t>);
5736 static ElementsAccessor* accessor_array[256] = {
5737#define ACCESSOR_ARRAY(Class, Kind, Store) new Class(),
5739#undef ACCESSOR_ARRAY
5740 };
5741
5742 static_assert((sizeof(accessor_array) / sizeof(*accessor_array)) >=
5744
5745 // Check that the ELEMENTS_LIST macro is in the same order as the ElementsKind
5746 // enum.
5747 constexpr ElementsKind elements_kinds_from_macro[] = {
5748#define ACCESSOR_KIND(Class, Kind, Store) Kind,
5750#undef ACCESSOR_KIND
5751 };
5752 static_assert(IsIdentityMapping(elements_kinds_from_macro, 0));
5753
5754 elements_accessors_ = accessor_array;
5755}
5756
5758 if (elements_accessors_ == nullptr) return;
5759#define ACCESSOR_DELETE(Class, Kind, Store) delete elements_accessors_[Kind];
5761#undef ACCESSOR_DELETE
5762 elements_accessors_ = nullptr;
5763}
5764
5767 uint32_t concat_size,
5768 uint32_t result_len) {
5769 ElementsKind result_elements_kind = GetInitialFastElementsKind();
5770 bool has_raw_doubles = false;
5771 {
5773 bool is_holey = false;
5774 for (uint32_t i = 0; i < concat_size; i++) {
5775 Tagged<Object> arg = (*args)[i];
5776 ElementsKind arg_kind = Cast<JSArray>(arg)->GetElementsKind();
5777 has_raw_doubles = has_raw_doubles || IsDoubleElementsKind(arg_kind);
5778 is_holey = is_holey || IsHoleyElementsKind(arg_kind);
5779 result_elements_kind =
5780 GetMoreGeneralElementsKind(result_elements_kind, arg_kind);
5781 }
5782 if (is_holey) {
5783 result_elements_kind = GetHoleyElementsKind(result_elements_kind);
5784 }
5785 }
5786
5787 // If a double array is concatted into a fast elements array, the fast
5788 // elements array needs to be initialized to contain proper holes, since
5789 // boxing doubles may cause incremental marking.
5790 bool requires_double_boxing =
5791 has_raw_doubles && !IsDoubleElementsKind(result_elements_kind);
5792 auto mode =
5793 requires_double_boxing
5796 DirectHandle<JSArray> result_array = isolate->factory()->NewJSArray(
5797 result_elements_kind, result_len, result_len, mode);
5798 if (result_len == 0) return result_array;
5799
5800 uint32_t insertion_index = 0;
5801 DirectHandle<FixedArrayBase> storage(result_array->elements(), isolate);
5802 ElementsAccessor* accessor = ElementsAccessor::ForKind(result_elements_kind);
5803 for (uint32_t i = 0; i < concat_size; i++) {
5804 // It is crucial to keep |array| in a raw pointer form to avoid
5805 // performance degradation.
5806 Tagged<JSArray> array = Cast<JSArray>((*args)[i]);
5807 uint32_t len = 0;
5808 Object::ToArrayLength(array->length(), &len);
5809 if (len == 0) continue;
5810 ElementsKind from_kind = array->GetElementsKind();
5811 accessor->CopyElements(isolate, array, 0, from_kind, storage,
5812 insertion_index, len);
5813 insertion_index += len;
5814 }
5815
5816 DCHECK_EQ(insertion_index, result_len);
5817 return result_array;
5818}
5819
5821
5822#undef ELEMENTS_LIST
5823#undef RETURN_NOTHING_IF_NOT_SUCCESSFUL
5824#undef RETURN_FAILURE_IF_NOT_SUCCESSFUL
5825} // namespace internal
5826} // namespace v8
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype)
Builtins::Kind kind
Definition builtins.cc:40
static V8_EXPORT_PRIVATE Handle< BigInt > FromUint64(Isolate *isolate, uint64_t n)
Definition bigint.cc:1355
static V8_EXPORT_PRIVATE Handle< BigInt > FromInt64(Isolate *isolate, int64_t n)
Definition bigint.cc:1333
static ElementsAccessor * ForKind(ElementsKind elements_kind)
Definition elements.h:29
virtual void CopyElements(Isolate *isolate, DirectHandle< FixedArrayBase > source, ElementsKind source_kind, DirectHandle< FixedArrayBase > destination, int size)=0
static V8_EXPORT_PRIVATE ElementsAccessor ** elements_accessors_
Definition elements.h:248
static void InitializeOncePerProcess()
Definition elements.cc:5728
static DirectHandle< JSArray > Concat(Isolate *isolate, BuiltinArguments *args, uint32_t concat_size, uint32_t result_length)
Definition elements.cc:5765
Handle< FixedArray > NewFixedArrayWithHoles(int length, AllocationType allocation=AllocationType::kYoung)
Handle< FixedArrayBase > NewFixedDoubleArray(int length, AllocationType allocation=AllocationType::kYoung)
static constexpr int kMaxLength
static HandleType< FixedArray > RightTrimOrEmpty(Isolate *isolate, HandleType< FixedArray > array, int new_length)
static Handle< Object > get(Tagged< FixedDoubleArray > array, int index, Isolate *isolate)
static Tagged< Object > SeqCst_CompareAndSwapField(Tagged< Object > expected_value, Tagged< Object > new_value, CompareAndSwapImpl compare_and_swap_impl)
static InternalIndex NotFound()
static V8_EXPORT_PRIVATE Maybe< bool > SetLength(DirectHandle< JSArray > array, uint32_t length)
Definition objects.cc:4812
static const int kInitialMaxFastElementArray
Definition js-array.h:144
static V8_EXPORT_PRIVATE void Initialize(DirectHandle< JSArray > array, int capacity, int length=0)
Definition objects.cc:4804
static const int kPreallocatedArrayElements
Definition js-array.h:122
static const int kMaxCopyElements
Definition js-array.h:127
static V8_EXPORT_PRIVATE DirectHandle< NumberDictionary > NormalizeElements(DirectHandle< JSObject > object)
static bool PrototypeHasNoElements(Isolate *isolate, Tagged< JSObject > object)
static void ApplyAttributesToDictionary(Isolate *isolate, ReadOnlyRoots roots, DirectHandle< Dictionary > dictionary, const PropertyAttributes attributes)
static void ValidateElements(Tagged< JSObject > object)
static constexpr uint32_t NewElementsCapacity(uint32_t old_capacity)
Definition js-objects.h:652
static void EnsureCanContainElements(DirectHandle< JSObject > object, TSlot elements, uint32_t count, EnsureElementsMode mode)
static void SetMapAndElements(DirectHandle< JSObject > object, DirectHandle< Map > map, DirectHandle< FixedArrayBase > elements)
static V8_EXPORT_PRIVATE void TransitionElementsKind(DirectHandle< JSObject > object, ElementsKind to_kind)
static void PrintElementsTransition(FILE *file, DirectHandle< JSObject > object, ElementsKind from_kind, DirectHandle< FixedArrayBase > from_elements, ElementsKind to_kind, DirectHandle< FixedArrayBase > to_elements)
static V8_EXPORT_PRIVATE void MigrateToMap(Isolate *isolate, DirectHandle< JSObject > object, DirectHandle< Map > new_map, int expected_additional_properties=0)
static DirectHandle< Map > GetElementsTransitionMap(DirectHandle< JSObject > object, ElementsKind to_kind)
static bool UpdateAllocationSite(DirectHandle< JSObject > object, ElementsKind to_kind)
static void EnsureWritableFastElements(DirectHandle< JSObject > object)
static const uint32_t kMinAddedElementsCapacity
Definition js-objects.h:649
static Handle< Map > Copy(Isolate *isolate, DirectHandle< Map > map, const char *reason, TransitionKindFlag kind=SPECIAL_TRANSITION)
Definition map.cc:1811
static const uint32_t kPreferFastElementsSizeFactor
Definition dictionary.h:440
static const uint32_t kRequiresSlowElementsLimit
Definition dictionary.h:436
static V8_WARN_UNUSED_RESULT MaybeHandle< JSAny > GetPropertyWithAccessor(LookupIterator *it)
Definition objects.cc:1513
static V8_WARN_UNUSED_RESULT bool ToArrayIndex(Tagged< Object > obj, uint32_t *index)
static bool ToArrayLength(Tagged< Object > obj, uint32_t *index)
static V8_WARN_UNUSED_RESULT HandleType< Number >::MaybeType ToNumber(Isolate *isolate, HandleType< T > input)
static bool SameValueZero(Tagged< Object > obj, Tagged< Object > other)
Definition objects.cc:1723
static double NumberValue(Tagged< Number > obj)
static V8_EXPORT_PRIVATE bool StrictEquals(Tagged< Object > obj, Tagged< Object > that)
Definition objects.cc:986
V8_EXPORT_PRIVATE static V8_WARN_UNUSED_RESULT MaybeHandle< Object > GetProperty(LookupIterator *it, bool is_global_reference=false)
Definition objects.cc:1248
static bool ToUint32(Tagged< Object > obj, uint32_t *value)
static constexpr PropertyDetails Empty(PropertyCellType cell_type=PropertyCellType::kNoCell)
static constexpr int ToInt(const Tagged< Object > object)
Definition smi.h:33
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static constexpr int kMaxValue
Definition smi.h:101
static V8_INLINE HandleType< String > Flatten(Isolate *isolate, HandleType< T > string, AllocationType allocation=AllocationType::kYoung)
static V8_INLINE Address DecompressTagged(TOnHeapAddress on_heap_addr, Tagged_t raw_value)
static void ForRange(Heap *heap, Tagged< HeapObject > object, TSlot start, TSlot end)
#define COMPRESS_POINTERS_BOOL
Definition globals.h:99
int start
uint32_t count
int end
#define RAB_GSAB_TYPED_ARRAYS_WITH_NON_RAB_GSAB_ELEMENTS_KIND(V)
#define RAB_GSAB_TYPED_ARRAYS(V)
#define TYPED_ARRAYS(V)
#define RETURN_FAILURE_IF_NOT_SUCCESSFUL(call)
Definition elements.cc:95
#define FIXED_ELEMENTS_ACCESSOR(Type, type, TYPE, ctype)
Definition elements.cc:4773
#define ACCESSOR_ARRAY(Class, Kind, Store)
#define RETURN_NOTHING_IF_NOT_SUCCESSFUL(call)
Definition elements.cc:90
#define ACCESSOR_DELETE(Class, Kind, Store)
#define ELEMENTS_TRAITS(Class, KindParam, Store)
Definition elements.cc:170
#define ACCESSOR_KIND(Class, Kind, Store)
#define ELEMENTS_LIST(V)
Definition elements.cc:110
#define TYPED_ARRAYS_CASE(Type, type, TYPE, ctype)
#define ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call)
Definition isolate.h:284
#define THROW_NEW_ERROR(isolate, call)
Definition isolate.h:307
#define ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value)
Definition isolate.h:276
#define MAYBE_RETURN_NULL(call)
Definition isolate.h:413
#define MAYBE_RETURN(call, value)
Definition isolate.h:408
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
Isolate * isolate
int32_t offset
std::optional< TNode< JSArray > > a
TNode< Object > receiver
ZoneVector< RpoNumber > & result
Point from
int x
Point to
InstructionOperand destination
int int32_t
Definition unicode.cc:40
unsigned short uint16_t
Definition unicode.cc:39
signed short int16_t
Definition unicode.cc:38
static V ReadUnalignedValue(Address p)
Definition memory.h:28
void Relaxed_Memcpy(volatile Atomic8 *dst, volatile const Atomic8 *src, size_t bytes)
Definition atomicops.h:363
char Atomic8
Definition atomicops.h:57
constexpr bool IsInRange(T value, U lower_limit, U higher_limit)
Definition bounds.h:20
static void WriteUnalignedValue(Address p, V value)
Definition memory.h:41
void Add(RWDigits Z, Digits X, Digits Y)
V8_INLINE constexpr bool IsFreeSpaceOrFiller(InstanceType instance_type)
V8_INLINE const Operation & Get(const Graph &graph, OpIndex index)
Definition graph.h:1231
constexpr bool is_packed(ValueKind kind)
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
@ ALLOW_CONVERTED_DOUBLE_ELEMENTS
Definition objects.h:875
constexpr int kTaggedSize
Definition globals.h:542
bool IsNaN(Tagged< Object > obj)
PerThreadAssertScopeDebugOnly< false, SAFEPOINTS_ASSERT, HEAP_ALLOCATION_ASSERT > DisallowGarbageCollection
@ SKIP_WRITE_BARRIER
Definition objects.h:52
@ UPDATE_WRITE_BARRIER
Definition objects.h:55
constexpr bool IsHoleyElementsKind(ElementsKind kind)
static DirectHandle< Object > MakeEntryPair(Isolate *isolate, size_t index, DirectHandle< Object > value)
SlotTraits::TObjectSlot ObjectSlot
Definition globals.h:1243
void CopyTagged(Address dst, const Address src, size_t num_tagged)
Definition slots-inl.h:479
bool IsNumber(Tagged< Object > obj)
ReadOnlyRoots GetReadOnlyRoots()
Definition roots-inl.h:86
MaybeDirectHandle< Object > ArrayConstructInitializeElements(DirectHandle< JSArray > array, JavaScriptArguments *args)
Definition elements.cc:5579
GetKeysConversion
Definition keys.h:22
Tagged(T object) -> Tagged< T >
constexpr bool IsSmiElementsKind(ElementsKind kind)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats store(v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) DEFINE_GENERIC_IMPLICATION(trace_gc_object_stats
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:665
bool IsAnyNonextensibleElementsKind(ElementsKind kind)
constexpr bool IsObjectElementsKind(ElementsKind kind)
kStaticElementsTemplateOffset kInstancePropertiesTemplateOffset Tagged< FixedArray >
void MemsetTagged(Tagged_t *start, Tagged< MaybeObject > value, size_t counter)
Definition slots-inl.h:486
Address Tagged_t
Definition globals.h:547
constexpr bool IsFloat16TypedArrayElementsKind(ElementsKind kind)
bool IsNonextensibleElementsKind(ElementsKind kind)
@ HOLEY_NONEXTENSIBLE_ELEMENTS
@ SLOW_STRING_WRAPPER_ELEMENTS
@ PACKED_NONEXTENSIBLE_ELEMENTS
@ TERMINAL_FAST_ELEMENTS_KIND
@ SLOW_SLOPPY_ARGUMENTS_ELEMENTS
@ FAST_SLOPPY_ARGUMENTS_ELEMENTS
@ FAST_STRING_WRAPPER_ELEMENTS
V8_INLINE DirectHandle< T > direct_handle(Tagged< T > object, Isolate *isolate)
return Cast< NumberDictionary >(elements(cage_base))
bool IsHoleyElementsKindForRead(ElementsKind kind)
PerThreadAssertScopeDebugOnly< true, SAFEPOINTS_ASSERT, HEAP_ALLOCATION_ASSERT > AllowGarbageCollection
bool IsSmiOrObjectElementsKind(ElementsKind kind)
constexpr int kSystemPointerSize
Definition globals.h:410
bool IsFastPackedElementsKind(ElementsKind kind)
V8_INLINE PtrComprCageBase GetPtrComprCageBase()
void CopyTypedArrayElementsToTypedArray(Address raw_source, Address raw_destination, uintptr_t length, uintptr_t offset)
Definition elements.cc:5691
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
bool IsSloppyArgumentsElementsKind(ElementsKind kind)
DONT_OVERRIDE DISABLE_ALLOCATION_SITES HOLEY_ELEMENTS
bool IsBigIntTypedArrayElementsKind(ElementsKind kind)
constexpr int kInt32Size
Definition globals.h:401
bool IsFastElementsKind(ElementsKind kind)
void CopyWords(Address dst, const Address src, size_t num_words)
Definition memcopy.h:253
DONT_OVERRIDE DISABLE_ALLOCATION_SITES DISABLE_ALLOCATION_SITES HOLEY_DOUBLE_ELEMENTS
int32_t DoubleToInt32(double x)
bool IsHoleyOrDictionaryElementsKind(ElementsKind kind)
bool IsDictionaryElementsKind(ElementsKind kind)
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:669
bool IsFloatTypedArrayElementsKind(ElementsKind kind)
V8_EXPORT_PRIVATE FlagValues v8_flags
void CopyTypedArrayElementsSlice(Address raw_source, Address raw_destination, uintptr_t start, uintptr_t end)
Definition elements.cc:5712
float DoubleToFloat32(double x)
@ kExternalFloat64Array
Definition globals.h:2461
@ kExternalUint8ClampedArray
Definition globals.h:2462
@ kExternalFloat32Array
Definition globals.h:2460
@ kExternalFloat16Array
Definition globals.h:2459
uint16_t DoubleToFloat16(double value)
return value
Definition map-inl.h:893
static bool IsMinusZero(double value)
tsan_relaxed_store_8_bits tsan_relaxed_store_32_bits tsan_seq_cst_store_8_bits tsan_seq_cst_store_32_bits tsan_relaxed_load_32_bits Address raw_context
constexpr bool IsDoubleElementsKind(ElementsKind kind)
constexpr int kElementsKindCount
constexpr int kDoubleSize
Definition globals.h:407
ElementsKind GetMoreGeneralElementsKind(ElementsKind from_kind, ElementsKind to_kind)
JSArrayBuffer::IsDetachableBit is_shared
constexpr bool IsIdentityMapping(const Mapping &mapping, size_t index)
Definition elements.cc:5723
ElementsKind GetInitialFastElementsKind()
void CopyFastNumberJSArrayElementsToTypedArray(Address raw_context, Address raw_source, Address raw_destination, uintptr_t length, uintptr_t offset)
Definition elements.cc:5667
bool TryNumberToSize(Tagged< Object > number, size_t *result)
AddKeyConversion
Definition keys.h:20
constexpr uint32_t kMaxUInt32
Definition globals.h:387
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset IsNull(value)||IsJSProxy(value)||IsWasmObject(value)||(IsJSObject(value) &&(HeapLayout
Definition map-inl.h:70
uint32_t GetLength(Tagged< JSArray > array)
Definition api.cc:8179
template const char * string
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
Local< T > Handle
Maybe< T > Nothing()
Definition v8-maybe.h:112
static constexpr RelaxedLoadTag kRelaxedLoad
Definition globals.h:2909
Maybe< T > Just(const T &t)
Definition v8-maybe.h:117
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK_GE(lhs, rhs)
#define CHECK(condition)
Definition logging.h:124
#define CHECK_LE(lhs, rhs)
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
#define USE(...)
Definition macros.h:293
constexpr bool IsAligned(T value, U alignment)
Definition macros.h:403
#define V8_WARN_UNUSED_RESULT
Definition v8config.h:671
#define V8_UNLIKELY(condition)
Definition v8config.h:660