23#if V8_TARGET_ARCH_MIPS64 || V8_TARGET_ARCH_PPC64 || V8_TARGET_ARCH_S390X || \
24 V8_TARGET_ARCH_LOONG64
28#if defined(V8_OS_STARBOARD)
31inline T ExchangeSeqCst(T* p, T value) {
36inline T CompareExchangeSeqCst(T* p, T oldval, T newval) {
41inline T AddSeqCst(T* p, T value) {
46inline T SubSeqCst(T* p, T value) {
51inline T AndSeqCst(T* p, T value) {
56inline T OrSeqCst(T* p, T value) {
61inline T XorSeqCst(T* p, T value) {
69#ifdef V8_TARGET_ARCH_32_BIT
70#pragma GCC diagnostic push
71#pragma GCC diagnostic ignored "-Wpragmas"
72#pragma GCC diagnostic ignored "-Watomic-alignment"
76inline T LoadSeqCst(T* p) {
77 return __atomic_load_n(p, __ATOMIC_SEQ_CST);
81inline void StoreSeqCst(T* p, T value) {
82 __atomic_store_n(p, value, __ATOMIC_SEQ_CST);
86inline T ExchangeSeqCst(T* p, T value) {
87 return __atomic_exchange_n(p, value, __ATOMIC_SEQ_CST);
91inline T CompareExchangeSeqCst(T* p, T oldval, T newval) {
92 (void)__atomic_compare_exchange_n(p, &oldval, newval, 0, __ATOMIC_SEQ_CST,
98inline T AddSeqCst(T* p, T value) {
99 return __atomic_fetch_add(p, value, __ATOMIC_SEQ_CST);
103inline T SubSeqCst(T* p, T value) {
104 return __atomic_fetch_sub(p, value, __ATOMIC_SEQ_CST);
108inline T AndSeqCst(T* p, T value) {
109 return __atomic_fetch_and(p, value, __ATOMIC_SEQ_CST);
113inline T OrSeqCst(T* p, T value) {
114 return __atomic_fetch_or(p, value, __ATOMIC_SEQ_CST);
118inline T XorSeqCst(T* p, T value) {
119 return __atomic_fetch_xor(p, value, __ATOMIC_SEQ_CST);
122#ifdef V8_TARGET_ARCH_32_BIT
123#pragma GCC diagnostic pop
128#define InterlockedExchange32 _InterlockedExchange
129#define InterlockedCompareExchange32 _InterlockedCompareExchange
130#define InterlockedCompareExchange8 _InterlockedCompareExchange8
131#define InterlockedExchangeAdd32 _InterlockedExchangeAdd
132#define InterlockedExchangeAdd16 _InterlockedExchangeAdd16
133#define InterlockedExchangeAdd8 _InterlockedExchangeAdd8
134#define InterlockedAnd32 _InterlockedAnd
135#define InterlockedOr64 _InterlockedOr64
136#define InterlockedOr32 _InterlockedOr
137#define InterlockedXor32 _InterlockedXor
139#if defined(V8_HOST_ARCH_ARM64)
140#define InterlockedExchange8 _InterlockedExchange8
143#define ATOMIC_OPS(type, suffix, vctype) \
144 inline type ExchangeSeqCst(type* p, type value) { \
145 return InterlockedExchange##suffix(reinterpret_cast<vctype*>(p), \
146 base::bit_cast<vctype>(value)); \
148 inline type CompareExchangeSeqCst(type* p, type oldval, type newval) { \
149 return InterlockedCompareExchange##suffix(reinterpret_cast<vctype*>(p), \
150 base::bit_cast<vctype>(newval), \
151 base::bit_cast<vctype>(oldval)); \
153 inline type AddSeqCst(type* p, type value) { \
154 return InterlockedExchangeAdd##suffix(reinterpret_cast<vctype*>(p), \
155 base::bit_cast<vctype>(value)); \
157 inline type SubSeqCst(type* p, type value) { \
158 return InterlockedExchangeAdd##suffix(reinterpret_cast<vctype*>(p), \
159 -base::bit_cast<vctype>(value)); \
161 inline type AndSeqCst(type* p, type value) { \
162 return InterlockedAnd##suffix(reinterpret_cast<vctype*>(p), \
163 base::bit_cast<vctype>(value)); \
165 inline type OrSeqCst(type* p, type value) { \
166 return InterlockedOr##suffix(reinterpret_cast<vctype*>(p), \
167 base::bit_cast<vctype>(value)); \
169 inline type XorSeqCst(type* p, type value) { \
170 return InterlockedXor##suffix(reinterpret_cast<vctype*>(p), \
171 base::bit_cast<vctype>(value)); \
174ATOMIC_OPS(int8_t, 8,
char)
175ATOMIC_OPS(uint8_t, 8,
char)
176ATOMIC_OPS(int16_t, 16,
short)
177ATOMIC_OPS(uint16_t, 16,
short)
178ATOMIC_OPS(int32_t, 32,
long)
179ATOMIC_OPS(uint32_t, 32,
long)
180ATOMIC_OPS(int64_t, 64, __int64)
181ATOMIC_OPS(uint64_t, 64, __int64)
184inline T LoadSeqCst(T* p) {
189inline void StoreSeqCst(T* p, T value) {
195#undef InterlockedExchange32
196#undef InterlockedCompareExchange32
197#undef InterlockedCompareExchange8
198#undef InterlockedExchangeAdd32
199#undef InterlockedExchangeAdd16
200#undef InterlockedExchangeAdd8
201#undef InterlockedAnd32
202#undef InterlockedOr64
203#undef InterlockedOr32
204#undef InterlockedXor32
206#if defined(V8_HOST_ARCH_ARM64)
207#undef InterlockedExchange8
212#error Unsupported platform!
217T FromObject(Handle<Object> number);
220inline uint8_t FromObject<uint8_t>(Handle<Object> number) {
225inline int8_t FromObject<int8_t>(Handle<Object> number) {
230inline uint16_t FromObject<uint16_t>(Handle<Object> number) {
235inline int16_t FromObject<int16_t>(Handle<Object> number) {
240inline uint32_t FromObject<uint32_t>(Handle<Object> number) {
245inline int32_t FromObject<int32_t>(Handle<Object> number) {
250inline uint64_t FromObject<uint64_t>(Handle<Object> bigint) {
255inline int64_t FromObject<int64_t>(Handle<Object> bigint) {
276 return *isolate->factory()->NewNumber(t);
280 return *isolate->factory()->NewNumber(t);
293 static inline Tagged<Object> Do(Isolate* isolate,
void* buffer,
295 T
result = LoadSeqCst(
static_cast<T*
>(buffer) + index);
296 return ToObject(isolate,
result);
302 static inline void Do(Isolate* isolate,
void* buffer,
size_t index,
303 Handle<Object> obj) {
304 T value = FromObject<T>(obj);
305 StoreSeqCst(
static_cast<T*
>(buffer) + index, value);
311 static inline Tagged<Object> Do(Isolate* isolate,
void* buffer,
size_t index,
312 Handle<Object> obj) {
313 T value = FromObject<T>(obj);
314 T
result = ExchangeSeqCst(
static_cast<T*
>(buffer) + index, value);
315 return ToObject(isolate,
result);
320inline Tagged<Object> DoCompareExchange(Isolate* isolate,
void* buffer,
321 size_t index, Handle<Object> oldobj,
322 Handle<Object> newobj) {
323 T oldval = FromObject<T>(oldobj);
324 T newval = FromObject<T>(newobj);
326 CompareExchangeSeqCst(
static_cast<T*
>(buffer) + index, oldval, newval);
327 return ToObject(isolate,
result);
332 static inline Tagged<Object> Do(Isolate* isolate,
void* buffer,
size_t index,
333 Handle<Object> obj) {
334 T value = FromObject<T>(obj);
335 T
result = AddSeqCst(
static_cast<T*
>(buffer) + index, value);
336 return ToObject(isolate,
result);
342 static inline Tagged<Object> Do(Isolate* isolate,
void* buffer,
size_t index,
343 Handle<Object> obj) {
344 T value = FromObject<T>(obj);
345 T
result = SubSeqCst(
static_cast<T*
>(buffer) + index, value);
346 return ToObject(isolate,
result);
352 static inline Tagged<Object> Do(Isolate* isolate,
void* buffer,
size_t index,
353 Handle<Object> obj) {
354 T value = FromObject<T>(obj);
355 T
result = AndSeqCst(
static_cast<T*
>(buffer) + index, value);
356 return ToObject(isolate,
result);
362 static inline Tagged<Object> Do(Isolate* isolate,
void* buffer,
size_t index,
363 Handle<Object> obj) {
364 T value = FromObject<T>(obj);
365 T
result = OrSeqCst(
static_cast<T*
>(buffer) + index, value);
366 return ToObject(isolate,
result);
372 static inline Tagged<Object> Do(Isolate* isolate,
void* buffer,
size_t index,
373 Handle<Object> obj) {
374 T value = FromObject<T>(obj);
375 T
result = XorSeqCst(
static_cast<T*
>(buffer) + index, value);
376 return ToObject(isolate,
result);
384#define INTEGER_TYPED_ARRAYS(V) \
385 V(Uint8, uint8, UINT8, uint8_t) \
386 V(Int8, int8, INT8, int8_t) \
387 V(Uint16, uint16, UINT16, uint16_t) \
388 V(Int16, int16, INT16, int16_t) \
389 V(Uint32, uint32, UINT32, uint32_t) \
390 V(Int32, int32, INT32, int32_t)
392#define THROW_ERROR_RETURN_FAILURE_ON_DETACHED_OR_OUT_OF_BOUNDS( \
393 isolate, sta, index, method_name) \
395 bool out_of_bounds = false; \
396 auto length = sta->GetLengthOrOutOfBounds(out_of_bounds); \
397 if (V8_UNLIKELY(sta->WasDetached() || out_of_bounds || index >= length)) { \
398 THROW_NEW_ERROR_RETURN_FAILURE( \
399 isolate, NewTypeError(MessageTemplate::kDetachedOperation, \
400 isolate->factory()->NewStringFromAsciiChecked( \
408template <
template <
typename>
class Op>
411 const char* method_name) {
412 HandleScope scope(isolate);
416 Handle<Object> value_obj =
args.at(2);
418 uint8_t* source =
static_cast<uint8_t*
>(sta->GetBuffer()->backing_store()) +
424 BigInt::FromObject(isolate, value_obj));
426 THROW_ERROR_RETURN_FAILURE_ON_DETACHED_OR_OUT_OF_BOUNDS(isolate, sta, index,
431 return Op<int64_t>::Do(isolate, source, index, bigint);
434 return Op<uint64_t>::Do(isolate, source, index, bigint);
437 Handle<Object>
value;
441 THROW_ERROR_RETURN_FAILURE_ON_DETACHED_OR_OUT_OF_BOUNDS(isolate, sta, index,
446 switch (sta->type()) {
447#define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype) \
448 case kExternal##Type##Array: \
449 return Op<ctype>::Do(isolate, source, index, value);
452#undef TYPED_ARRAY_CASE
462 HandleScope scope(isolate);
467 uint8_t* source =
static_cast<uint8_t*
>(sta->GetBuffer()->backing_store()) +
472 DCHECK(!sta->IsDetachedOrOutOfBounds());
475 return Load<int64_t>::Do(isolate, source, index);
478 return Load<uint64_t>::Do(isolate, source, index);
482 HandleScope scope(isolate);
486 Handle<Object> value_obj =
args.at(2);
488 uint8_t* source =
static_cast<uint8_t*
>(sta->GetBuffer()->backing_store()) +
493 BigInt::FromObject(isolate, value_obj));
495 THROW_ERROR_RETURN_FAILURE_ON_DETACHED_OR_OUT_OF_BOUNDS(isolate, sta, index,
502 Store<int64_t>::Do(isolate, source, index, bigint);
506 Store<uint64_t>::Do(isolate, source, index, bigint);
511 return GetModifySetValueInBuffer<Exchange>(
args, isolate,
"Atomics.exchange");
515 HandleScope scope(isolate);
519 Handle<Object> old_value_obj =
args.at(2);
520 Handle<Object> new_value_obj =
args.at(3);
522 uint8_t* source =
static_cast<uint8_t*
>(sta->GetBuffer()->backing_store()) +
529 isolate, old_bigint, BigInt::FromObject(isolate, old_value_obj));
531 isolate, new_bigint, BigInt::FromObject(isolate, new_value_obj));
533 THROW_ERROR_RETURN_FAILURE_ON_DETACHED_OR_OUT_OF_BOUNDS(
534 isolate, sta, index,
"Atomics.compareExchange");
538 return DoCompareExchange<int64_t>(isolate, source, index, old_bigint,
542 return DoCompareExchange<uint64_t>(isolate, source, index, old_bigint,
546 Handle<Object> old_value;
547 Handle<Object> new_value;
553 THROW_ERROR_RETURN_FAILURE_ON_DETACHED_OR_OUT_OF_BOUNDS(
554 isolate, sta, index,
"Atomics.compareExchange");
556 switch (sta->type()) {
557#define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype) \
558 case kExternal##Type##Array: \
559 return DoCompareExchange<ctype>(isolate, source, index, old_value, \
563#undef TYPED_ARRAY_CASE
575 return GetModifySetValueInBuffer<Add>(
args, isolate,
"Atomics.add");
581 return GetModifySetValueInBuffer<Sub>(
args, isolate,
"Atomics.sub");
587 return GetModifySetValueInBuffer<And>(
args, isolate,
"Atomics.and");
593 return GetModifySetValueInBuffer<Or>(
args, isolate,
"Atomics.or");
599 return GetModifySetValueInBuffer<Xor>(
args, isolate,
"Atomics.xor");
602#undef INTEGER_TYPED_ARRAYS
644template <
typename WriteOperation>
647 DirectHandle<Object> value,
648 WriteOperation write_operation) {
649 LookupIterator it(isolate,
object, PropertyKey(isolate, field_name),
653 if (!it.IsReadOnly()) {
654 return write_operation(it);
669 return ReadOnlyRoots(isolate).exception();
684 return AtomicFieldWrite(isolate, shared_struct_or_shared_array, field_name,
687 return *shared_value;
702 return AtomicFieldWrite(isolate, shared_struct_or_shared_array, field_name,
704 return *it.SwapDataValue(shared_value,
718 isolate, shared_expected,
724 return AtomicFieldWrite(isolate, shared_struct_or_shared_array, field_name,
726 return *it.CompareAndSwapDataValue(
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype)
static V8_EXPORT_PRIVATE Handle< BigInt > FromUint64(Isolate *isolate, uint64_t n)
static V8_EXPORT_PRIVATE Handle< BigInt > FromInt64(Isolate *isolate, int64_t n)
V8_EXPORT_PRIVATE static V8_WARN_UNUSED_RESULT Maybe< bool > AddDataProperty(LookupIterator *it, DirectHandle< Object > value, PropertyAttributes attributes, Maybe< ShouldThrow > should_throw, StoreOrigin store_origin, EnforceDefineSemantics semantics=EnforceDefineSemantics::kSet)
static V8_WARN_UNUSED_RESULT HandleType< Number >::MaybeType ToInteger(Isolate *isolate, HandleType< T > input)
static V8_WARN_UNUSED_RESULT HandleType< Name >::MaybeType ToName(Isolate *isolate, HandleType< Object > input)
static V8_WARN_UNUSED_RESULT Maybe< bool > WriteToReadOnlyProperty(LookupIterator *it, DirectHandle< Object > value, Maybe< ShouldThrow > should_throw)
static HandleType< Object >::MaybeType Share(Isolate *isolate, HandleType< T > value, ShouldThrow throw_if_cannot_be_shared)
static constexpr Tagged< Smi > FromInt(int value)
#define RUNTIME_FUNCTION(Name)
#define ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call)
base::Vector< const DirectHandle< Object > > args
ZoneVector< RpoNumber > & result
void Add(RWDigits Z, Digits X, Digits Y)
void Store(LiftoffAssembler *assm, LiftoffRegister src, MemOperand dst, ValueKind kind)
void Or(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
void Xor(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
void And(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
void Exchange(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
void Sub(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
uint32_t NumberToUint32(Tagged< Object > number)
int32_t NumberToInt32(Tagged< Object > number)
@ kExternalBigUint64Array
size_t NumberToSize(Tagged< Object > number)
Arguments< ArgumentsType::kRuntime > RuntimeArguments
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
static constexpr SeqCstAccessTag kSeqCstAccess
i::Address Load(i::Address address)
Maybe< T > Just(const T &t)
#define CHECK_LT(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)