14#include "third_party/fp16/src/include/fp16.h"
23 if (
args.length() < 1 || !IsJSArrayBuffer(*
args.at(0))) {
25 isolate, NewTypeError(MessageTemplate::kNotTypedArray));
28 constexpr bool kForceForWasmMemory =
false;
30 args.atOrUndefined(isolate, 1)),
42 if (!IsJSArrayBuffer(*argument)) {
44 isolate, NewTypeError(MessageTemplate::kNotTypedArray));
47 array_buffer->set_detach_key(*
key);
59 return accessor->
CopyElements(source, target, length, 0);
66 return *holder->GetBuffer();
83 CHECK_EQ(0, array_buffer->byte_length_unchecked());
84 size_t byte_length = array_buffer->GetBackingStore()->byte_length();
85 return *isolate->factory()->NewNumberFromSize(byte_length);
91bool CompareNum(T
x, T
y) {
96 }
else if (!std::is_integral_v<T>) {
97 double _x =
x, _y =
y;
98 if (
x == 0 &&
x ==
y) {
100 return std::signbit(_x) && !std::signbit(_y);
101 }
else if (!std::isnan(_x) && std::isnan(_y)) {
109bool LessThanFloat16RawBits(uint16_t
x, uint16_t
y) {
110 return CompareNum(fp16_ieee_to_fp32_value(
x), fp16_ieee_to_fp32_value(
y));
121 DCHECK(!array->WasDetached());
122 DCHECK(!array->IsOutOfBounds());
125 if (
v8_flags.multi_mapped_mock_allocator) {
132 const size_t byte_length = array->GetByteLength();
137 CHECK(IsJSArrayBuffer(array->buffer()));
140 const bool copy_data = buffer->is_shared();
143 std::vector<uint8_t> offheap_copy;
144 void* data_copy_ptr =
nullptr;
146 if (byte_length <=
static_cast<unsigned>(
149 isolate->factory()->NewByteArray(
static_cast<int>(byte_length));
150 data_copy_ptr = array_copy->begin();
153 offheap_copy.resize(byte_length);
154 data_copy_ptr = &offheap_copy[0];
163 size_t length = array->GetLength();
166 switch (array->type()) {
167#define TYPED_ARRAY_SORT(Type, type, TYPE, ctype) \
168 case kExternal##Type##Array: { \
169 ctype* data = copy_data ? reinterpret_cast<ctype*>(data_copy_ptr) \
170 : static_cast<ctype*>(array->DataPtr()); \
171 SBXCHECK(length * sizeof(ctype) == byte_length); \
172 if (kExternal##Type##Array == kExternalFloat64Array || \
173 kExternal##Type##Array == kExternalFloat32Array) { \
174 if (COMPRESS_POINTERS_BOOL && alignof(ctype) > kTaggedSize) { \
176 std::sort(UnalignedSlot<ctype>(data), \
177 UnalignedSlot<ctype>(data + length), CompareNum<ctype>); \
179 std::sort(data, data + length, CompareNum<ctype>); \
181 } else if (kExternal##Type##Array == kExternalFloat16Array) { \
182 DCHECK_IMPLIES(COMPRESS_POINTERS_BOOL, alignof(ctype) <= kTaggedSize); \
183 std::sort(data, data + length, LessThanFloat16RawBits); \
185 if (COMPRESS_POINTERS_BOOL && alignof(ctype) > kTaggedSize) { \
187 std::sort(UnalignedSlot<ctype>(data), \
188 UnalignedSlot<ctype>(data + length)); \
190 std::sort(data, data + length); \
196#undef TYPED_ARRAY_SORT
225 size_t heap_max = isolate->array_buffer_allocator()->MaxAllocationSize();
226 return *isolate->factory()->NewNumber(heap_max);
static constexpr int LengthFor(int size_in_bytes)
V8_INLINE bool is_null() const
virtual void CopyElements(Isolate *isolate, DirectHandle< FixedArrayBase > source, ElementsKind source_kind, DirectHandle< FixedArrayBase > destination, int size)=0
V8_EXPORT_PRIVATE static V8_WARN_UNUSED_RESULT Maybe< bool > Detach(DirectHandle< JSArrayBuffer > buffer, bool force_for_wasm_memory=false, DirectHandle< Object > key={})
#define RUNTIME_FUNCTION(Name)
#define THROW_NEW_ERROR_RETURN_FAILURE(isolate, call)
#define MAYBE_RETURN(call, value)
base::Vector< const DirectHandle< Object > > args
void Relaxed_Memcpy(volatile Atomic8 *dst, volatile const Atomic8 *src, size_t bytes)
constexpr int kMaxRegularHeapObjectSize
V8_EXPORT_PRIVATE FlagValues v8_flags
bool TryNumberToSize(Tagged< Object > number, size_t *result)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
#define TYPED_ARRAY_SORT(Type, type, TYPE, ctype)
#define DCHECK_NOT_NULL(val)
#define DCHECK_NE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)