v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
runtime-atomics.cc
Go to the documentation of this file.
1// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/base/macros.h"
8#include "src/heap/factory.h"
15
16// Implement Atomic accesses to ArrayBuffers and SharedArrayBuffers.
17// https://tc39.es/ecma262/#sec-atomics
18
19namespace v8 {
20namespace internal {
21
22// Other platforms have CSA support, see builtins-sharedarraybuffer-gen.h.
23#if V8_TARGET_ARCH_MIPS64 || V8_TARGET_ARCH_PPC64 || V8_TARGET_ARCH_S390X || \
24 V8_TARGET_ARCH_LOONG64
25
26namespace {
27
28#if defined(V8_OS_STARBOARD)
29
30template <typename T>
31inline T ExchangeSeqCst(T* p, T value) {
33}
34
35template <typename T>
36inline T CompareExchangeSeqCst(T* p, T oldval, T newval) {
38}
39
40template <typename T>
41inline T AddSeqCst(T* p, T value) {
43}
44
45template <typename T>
46inline T SubSeqCst(T* p, T value) {
48}
49
50template <typename T>
51inline T AndSeqCst(T* p, T value) {
53}
54
55template <typename T>
56inline T OrSeqCst(T* p, T value) {
58}
59
60template <typename T>
61inline T XorSeqCst(T* p, T value) {
63}
64
65#elif V8_CC_GNU
66
67// GCC/Clang helpfully warn us that using 64-bit atomics on 32-bit platforms
68// can be slow. Good to know, but we don't have a choice.
69#ifdef V8_TARGET_ARCH_32_BIT
70#pragma GCC diagnostic push
71#pragma GCC diagnostic ignored "-Wpragmas"
72#pragma GCC diagnostic ignored "-Watomic-alignment"
73#endif // V8_TARGET_ARCH_32_BIT
74
75template <typename T>
76inline T LoadSeqCst(T* p) {
77 return __atomic_load_n(p, __ATOMIC_SEQ_CST);
78}
79
80template <typename T>
81inline void StoreSeqCst(T* p, T value) {
82 __atomic_store_n(p, value, __ATOMIC_SEQ_CST);
83}
84
85template <typename T>
86inline T ExchangeSeqCst(T* p, T value) {
87 return __atomic_exchange_n(p, value, __ATOMIC_SEQ_CST);
88}
89
90template <typename T>
91inline T CompareExchangeSeqCst(T* p, T oldval, T newval) {
92 (void)__atomic_compare_exchange_n(p, &oldval, newval, 0, __ATOMIC_SEQ_CST,
93 __ATOMIC_SEQ_CST);
94 return oldval;
95}
96
97template <typename T>
98inline T AddSeqCst(T* p, T value) {
99 return __atomic_fetch_add(p, value, __ATOMIC_SEQ_CST);
100}
101
102template <typename T>
103inline T SubSeqCst(T* p, T value) {
104 return __atomic_fetch_sub(p, value, __ATOMIC_SEQ_CST);
105}
106
107template <typename T>
108inline T AndSeqCst(T* p, T value) {
109 return __atomic_fetch_and(p, value, __ATOMIC_SEQ_CST);
110}
111
112template <typename T>
113inline T OrSeqCst(T* p, T value) {
114 return __atomic_fetch_or(p, value, __ATOMIC_SEQ_CST);
115}
116
117template <typename T>
118inline T XorSeqCst(T* p, T value) {
119 return __atomic_fetch_xor(p, value, __ATOMIC_SEQ_CST);
120}
121
122#ifdef V8_TARGET_ARCH_32_BIT
123#pragma GCC diagnostic pop
124#endif // V8_TARGET_ARCH_32_BIT
125
126#elif V8_CC_MSVC
127
128#define InterlockedExchange32 _InterlockedExchange
129#define InterlockedCompareExchange32 _InterlockedCompareExchange
130#define InterlockedCompareExchange8 _InterlockedCompareExchange8
131#define InterlockedExchangeAdd32 _InterlockedExchangeAdd
132#define InterlockedExchangeAdd16 _InterlockedExchangeAdd16
133#define InterlockedExchangeAdd8 _InterlockedExchangeAdd8
134#define InterlockedAnd32 _InterlockedAnd
135#define InterlockedOr64 _InterlockedOr64
136#define InterlockedOr32 _InterlockedOr
137#define InterlockedXor32 _InterlockedXor
138
139#if defined(V8_HOST_ARCH_ARM64)
140#define InterlockedExchange8 _InterlockedExchange8
141#endif
142
143#define ATOMIC_OPS(type, suffix, vctype) \
144 inline type ExchangeSeqCst(type* p, type value) { \
145 return InterlockedExchange##suffix(reinterpret_cast<vctype*>(p), \
146 base::bit_cast<vctype>(value)); \
147 } \
148 inline type CompareExchangeSeqCst(type* p, type oldval, type newval) { \
149 return InterlockedCompareExchange##suffix(reinterpret_cast<vctype*>(p), \
150 base::bit_cast<vctype>(newval), \
151 base::bit_cast<vctype>(oldval)); \
152 } \
153 inline type AddSeqCst(type* p, type value) { \
154 return InterlockedExchangeAdd##suffix(reinterpret_cast<vctype*>(p), \
155 base::bit_cast<vctype>(value)); \
156 } \
157 inline type SubSeqCst(type* p, type value) { \
158 return InterlockedExchangeAdd##suffix(reinterpret_cast<vctype*>(p), \
159 -base::bit_cast<vctype>(value)); \
160 } \
161 inline type AndSeqCst(type* p, type value) { \
162 return InterlockedAnd##suffix(reinterpret_cast<vctype*>(p), \
163 base::bit_cast<vctype>(value)); \
164 } \
165 inline type OrSeqCst(type* p, type value) { \
166 return InterlockedOr##suffix(reinterpret_cast<vctype*>(p), \
167 base::bit_cast<vctype>(value)); \
168 } \
169 inline type XorSeqCst(type* p, type value) { \
170 return InterlockedXor##suffix(reinterpret_cast<vctype*>(p), \
171 base::bit_cast<vctype>(value)); \
172 }
173
174ATOMIC_OPS(int8_t, 8, char)
175ATOMIC_OPS(uint8_t, 8, char)
176ATOMIC_OPS(int16_t, 16, short) /* NOLINT(runtime/int) */
177ATOMIC_OPS(uint16_t, 16, short) /* NOLINT(runtime/int) */
178ATOMIC_OPS(int32_t, 32, long) /* NOLINT(runtime/int) */
179ATOMIC_OPS(uint32_t, 32, long) /* NOLINT(runtime/int) */
180ATOMIC_OPS(int64_t, 64, __int64)
181ATOMIC_OPS(uint64_t, 64, __int64)
182
183template <typename T>
184inline T LoadSeqCst(T* p) {
185 UNREACHABLE();
186}
187
188template <typename T>
189inline void StoreSeqCst(T* p, T value) {
190 UNREACHABLE();
191}
192
193#undef ATOMIC_OPS
194
195#undef InterlockedExchange32
196#undef InterlockedCompareExchange32
197#undef InterlockedCompareExchange8
198#undef InterlockedExchangeAdd32
199#undef InterlockedExchangeAdd16
200#undef InterlockedExchangeAdd8
201#undef InterlockedAnd32
202#undef InterlockedOr64
203#undef InterlockedOr32
204#undef InterlockedXor32
205
206#if defined(V8_HOST_ARCH_ARM64)
207#undef InterlockedExchange8
208#endif
209
210#else
211
212#error Unsupported platform!
213
214#endif
215
216template <typename T>
217T FromObject(Handle<Object> number);
218
219template <>
220inline uint8_t FromObject<uint8_t>(Handle<Object> number) {
221 return NumberToUint32(*number);
222}
223
224template <>
225inline int8_t FromObject<int8_t>(Handle<Object> number) {
226 return NumberToInt32(*number);
227}
228
229template <>
230inline uint16_t FromObject<uint16_t>(Handle<Object> number) {
231 return NumberToUint32(*number);
232}
233
234template <>
235inline int16_t FromObject<int16_t>(Handle<Object> number) {
236 return NumberToInt32(*number);
237}
238
239template <>
240inline uint32_t FromObject<uint32_t>(Handle<Object> number) {
241 return NumberToUint32(*number);
242}
243
244template <>
245inline int32_t FromObject<int32_t>(Handle<Object> number) {
246 return NumberToInt32(*number);
247}
248
249template <>
250inline uint64_t FromObject<uint64_t>(Handle<Object> bigint) {
251 return Cast<BigInt>(bigint)->AsUint64();
252}
253
254template <>
255inline int64_t FromObject<int64_t>(Handle<Object> bigint) {
256 return Cast<BigInt>(bigint)->AsInt64();
257}
258
259inline Tagged<Object> ToObject(Isolate* isolate, int8_t t) {
260 return Smi::FromInt(t);
261}
262
263inline Tagged<Object> ToObject(Isolate* isolate, uint8_t t) {
264 return Smi::FromInt(t);
265}
266
267inline Tagged<Object> ToObject(Isolate* isolate, int16_t t) {
268 return Smi::FromInt(t);
269}
270
271inline Tagged<Object> ToObject(Isolate* isolate, uint16_t t) {
272 return Smi::FromInt(t);
273}
274
275inline Tagged<Object> ToObject(Isolate* isolate, int32_t t) {
276 return *isolate->factory()->NewNumber(t);
277}
278
279inline Tagged<Object> ToObject(Isolate* isolate, uint32_t t) {
280 return *isolate->factory()->NewNumber(t);
281}
282
283inline Tagged<Object> ToObject(Isolate* isolate, int64_t t) {
284 return *BigInt::FromInt64(isolate, t);
285}
286
287inline Tagged<Object> ToObject(Isolate* isolate, uint64_t t) {
288 return *BigInt::FromUint64(isolate, t);
289}
290
291template <typename T>
292struct Load {
293 static inline Tagged<Object> Do(Isolate* isolate, void* buffer,
294 size_t index) {
295 T result = LoadSeqCst(static_cast<T*>(buffer) + index);
296 return ToObject(isolate, result);
297 }
298};
299
300template <typename T>
301struct Store {
302 static inline void Do(Isolate* isolate, void* buffer, size_t index,
303 Handle<Object> obj) {
304 T value = FromObject<T>(obj);
305 StoreSeqCst(static_cast<T*>(buffer) + index, value);
306 }
307};
308
309template <typename T>
310struct Exchange {
311 static inline Tagged<Object> Do(Isolate* isolate, void* buffer, size_t index,
312 Handle<Object> obj) {
313 T value = FromObject<T>(obj);
314 T result = ExchangeSeqCst(static_cast<T*>(buffer) + index, value);
315 return ToObject(isolate, result);
316 }
317};
318
319template <typename T>
320inline Tagged<Object> DoCompareExchange(Isolate* isolate, void* buffer,
321 size_t index, Handle<Object> oldobj,
322 Handle<Object> newobj) {
323 T oldval = FromObject<T>(oldobj);
324 T newval = FromObject<T>(newobj);
325 T result =
326 CompareExchangeSeqCst(static_cast<T*>(buffer) + index, oldval, newval);
327 return ToObject(isolate, result);
328}
329
330template <typename T>
331struct Add {
332 static inline Tagged<Object> Do(Isolate* isolate, void* buffer, size_t index,
333 Handle<Object> obj) {
334 T value = FromObject<T>(obj);
335 T result = AddSeqCst(static_cast<T*>(buffer) + index, value);
336 return ToObject(isolate, result);
337 }
338};
339
340template <typename T>
341struct Sub {
342 static inline Tagged<Object> Do(Isolate* isolate, void* buffer, size_t index,
343 Handle<Object> obj) {
344 T value = FromObject<T>(obj);
345 T result = SubSeqCst(static_cast<T*>(buffer) + index, value);
346 return ToObject(isolate, result);
347 }
348};
349
350template <typename T>
351struct And {
352 static inline Tagged<Object> Do(Isolate* isolate, void* buffer, size_t index,
353 Handle<Object> obj) {
354 T value = FromObject<T>(obj);
355 T result = AndSeqCst(static_cast<T*>(buffer) + index, value);
356 return ToObject(isolate, result);
357 }
358};
359
360template <typename T>
361struct Or {
362 static inline Tagged<Object> Do(Isolate* isolate, void* buffer, size_t index,
363 Handle<Object> obj) {
364 T value = FromObject<T>(obj);
365 T result = OrSeqCst(static_cast<T*>(buffer) + index, value);
366 return ToObject(isolate, result);
367 }
368};
369
370template <typename T>
371struct Xor {
372 static inline Tagged<Object> Do(Isolate* isolate, void* buffer, size_t index,
373 Handle<Object> obj) {
374 T value = FromObject<T>(obj);
375 T result = XorSeqCst(static_cast<T*>(buffer) + index, value);
376 return ToObject(isolate, result);
377 }
378};
379
380} // anonymous namespace
381
382// Duplicated from objects.h
383// V has parameters (Type, type, TYPE, C type)
384#define INTEGER_TYPED_ARRAYS(V) \
385 V(Uint8, uint8, UINT8, uint8_t) \
386 V(Int8, int8, INT8, int8_t) \
387 V(Uint16, uint16, UINT16, uint16_t) \
388 V(Int16, int16, INT16, int16_t) \
389 V(Uint32, uint32, UINT32, uint32_t) \
390 V(Int32, int32, INT32, int32_t)
391
392#define THROW_ERROR_RETURN_FAILURE_ON_DETACHED_OR_OUT_OF_BOUNDS( \
393 isolate, sta, index, method_name) \
394 do { \
395 bool out_of_bounds = false; \
396 auto length = sta->GetLengthOrOutOfBounds(out_of_bounds); \
397 if (V8_UNLIKELY(sta->WasDetached() || out_of_bounds || index >= length)) { \
398 THROW_NEW_ERROR_RETURN_FAILURE( \
399 isolate, NewTypeError(MessageTemplate::kDetachedOperation, \
400 isolate->factory()->NewStringFromAsciiChecked( \
401 method_name))); \
402 } \
403 } while (false)
404
405// This is https://tc39.github.io/ecma262/#sec-getmodifysetvalueinbuffer
406// but also includes the ToInteger/ToBigInt conversion that's part of
407// https://tc39.github.io/ecma262/#sec-atomicreadmodifywrite
408template <template <typename> class Op>
409Tagged<Object> GetModifySetValueInBuffer(RuntimeArguments args,
410 Isolate* isolate,
411 const char* method_name) {
412 HandleScope scope(isolate);
413 DCHECK_EQ(3, args.length());
414 Handle<JSTypedArray> sta = args.at<JSTypedArray>(0);
415 size_t index = NumberToSize(args[1]);
416 Handle<Object> value_obj = args.at(2);
417
418 uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
419 sta->byte_offset();
420
421 if (sta->type() >= kExternalBigInt64Array) {
422 Handle<BigInt> bigint;
424 BigInt::FromObject(isolate, value_obj));
425
426 THROW_ERROR_RETURN_FAILURE_ON_DETACHED_OR_OUT_OF_BOUNDS(isolate, sta, index,
427 method_name);
428
429 CHECK_LT(index, sta->GetLength());
430 if (sta->type() == kExternalBigInt64Array) {
431 return Op<int64_t>::Do(isolate, source, index, bigint);
432 }
433 DCHECK(sta->type() == kExternalBigUint64Array);
434 return Op<uint64_t>::Do(isolate, source, index, bigint);
435 }
436
437 Handle<Object> value;
439 Object::ToInteger(isolate, value_obj));
440
441 THROW_ERROR_RETURN_FAILURE_ON_DETACHED_OR_OUT_OF_BOUNDS(isolate, sta, index,
442 method_name);
443
444 CHECK_LT(index, sta->GetLength());
445
446 switch (sta->type()) {
447#define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype) \
448 case kExternal##Type##Array: \
449 return Op<ctype>::Do(isolate, source, index, value);
450
451 INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
452#undef TYPED_ARRAY_CASE
453
454 default:
455 break;
456 }
457
458 UNREACHABLE();
459}
460
461RUNTIME_FUNCTION(Runtime_AtomicsLoad64) {
462 HandleScope scope(isolate);
463 DCHECK_EQ(2, args.length());
464 Handle<JSTypedArray> sta = args.at<JSTypedArray>(0);
465 size_t index = NumberToSize(args[1]);
466
467 uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
468 sta->byte_offset();
469
470 DCHECK(sta->type() == kExternalBigInt64Array ||
471 sta->type() == kExternalBigUint64Array);
472 DCHECK(!sta->IsDetachedOrOutOfBounds());
473 CHECK_LT(index, sta->GetLength());
474 if (sta->type() == kExternalBigInt64Array) {
475 return Load<int64_t>::Do(isolate, source, index);
476 }
477 DCHECK(sta->type() == kExternalBigUint64Array);
478 return Load<uint64_t>::Do(isolate, source, index);
479}
480
481RUNTIME_FUNCTION(Runtime_AtomicsStore64) {
482 HandleScope scope(isolate);
483 DCHECK_EQ(3, args.length());
484 Handle<JSTypedArray> sta = args.at<JSTypedArray>(0);
485 size_t index = NumberToSize(args[1]);
486 Handle<Object> value_obj = args.at(2);
487
488 uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
489 sta->byte_offset();
490
491 Handle<BigInt> bigint;
493 BigInt::FromObject(isolate, value_obj));
494
495 THROW_ERROR_RETURN_FAILURE_ON_DETACHED_OR_OUT_OF_BOUNDS(isolate, sta, index,
496 "Atomics.store");
497
498 DCHECK(sta->type() == kExternalBigInt64Array ||
499 sta->type() == kExternalBigUint64Array);
500 CHECK_LT(index, sta->GetLength());
501 if (sta->type() == kExternalBigInt64Array) {
502 Store<int64_t>::Do(isolate, source, index, bigint);
503 return *bigint;
504 }
505 DCHECK(sta->type() == kExternalBigUint64Array);
506 Store<uint64_t>::Do(isolate, source, index, bigint);
507 return *bigint;
508}
509
510RUNTIME_FUNCTION(Runtime_AtomicsExchange) {
511 return GetModifySetValueInBuffer<Exchange>(args, isolate, "Atomics.exchange");
512}
513
514RUNTIME_FUNCTION(Runtime_AtomicsCompareExchange) {
515 HandleScope scope(isolate);
516 DCHECK_EQ(4, args.length());
517 Handle<JSTypedArray> sta = args.at<JSTypedArray>(0);
518 size_t index = NumberToSize(args[1]);
519 Handle<Object> old_value_obj = args.at(2);
520 Handle<Object> new_value_obj = args.at(3);
521
522 uint8_t* source = static_cast<uint8_t*>(sta->GetBuffer()->backing_store()) +
523 sta->byte_offset();
524
525 if (sta->type() >= kExternalBigInt64Array) {
526 Handle<BigInt> old_bigint;
527 Handle<BigInt> new_bigint;
529 isolate, old_bigint, BigInt::FromObject(isolate, old_value_obj));
531 isolate, new_bigint, BigInt::FromObject(isolate, new_value_obj));
532
533 THROW_ERROR_RETURN_FAILURE_ON_DETACHED_OR_OUT_OF_BOUNDS(
534 isolate, sta, index, "Atomics.compareExchange");
535
536 CHECK_LT(index, sta->GetLength());
537 if (sta->type() == kExternalBigInt64Array) {
538 return DoCompareExchange<int64_t>(isolate, source, index, old_bigint,
539 new_bigint);
540 }
541 DCHECK(sta->type() == kExternalBigUint64Array);
542 return DoCompareExchange<uint64_t>(isolate, source, index, old_bigint,
543 new_bigint);
544 }
545
546 Handle<Object> old_value;
547 Handle<Object> new_value;
548 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, old_value,
549 Object::ToInteger(isolate, old_value_obj));
550 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, new_value,
551 Object::ToInteger(isolate, new_value_obj));
552
553 THROW_ERROR_RETURN_FAILURE_ON_DETACHED_OR_OUT_OF_BOUNDS(
554 isolate, sta, index, "Atomics.compareExchange");
555
556 switch (sta->type()) {
557#define TYPED_ARRAY_CASE(Type, typeName, TYPE, ctype) \
558 case kExternal##Type##Array: \
559 return DoCompareExchange<ctype>(isolate, source, index, old_value, \
560 new_value);
561
562 INTEGER_TYPED_ARRAYS(TYPED_ARRAY_CASE)
563#undef TYPED_ARRAY_CASE
564
565 default:
566 break;
567 }
568
569 UNREACHABLE();
570}
571
572// ES #sec-atomics.add
573// Atomics.add( typedArray, index, value )
574RUNTIME_FUNCTION(Runtime_AtomicsAdd) {
575 return GetModifySetValueInBuffer<Add>(args, isolate, "Atomics.add");
576}
577
578// ES #sec-atomics.sub
579// Atomics.sub( typedArray, index, value )
580RUNTIME_FUNCTION(Runtime_AtomicsSub) {
581 return GetModifySetValueInBuffer<Sub>(args, isolate, "Atomics.sub");
582}
583
584// ES #sec-atomics.and
585// Atomics.and( typedArray, index, value )
586RUNTIME_FUNCTION(Runtime_AtomicsAnd) {
587 return GetModifySetValueInBuffer<And>(args, isolate, "Atomics.and");
588}
589
590// ES #sec-atomics.or
591// Atomics.or( typedArray, index, value )
592RUNTIME_FUNCTION(Runtime_AtomicsOr) {
593 return GetModifySetValueInBuffer<Or>(args, isolate, "Atomics.or");
594}
595
596// ES #sec-atomics.xor
597// Atomics.xor( typedArray, index, value )
598RUNTIME_FUNCTION(Runtime_AtomicsXor) {
599 return GetModifySetValueInBuffer<Xor>(args, isolate, "Atomics.xor");
600}
601
602#undef INTEGER_TYPED_ARRAYS
603
604#else
605
606RUNTIME_FUNCTION(Runtime_AtomicsLoad64) { UNREACHABLE(); }
607
608RUNTIME_FUNCTION(Runtime_AtomicsStore64) { UNREACHABLE(); }
609
610RUNTIME_FUNCTION(Runtime_AtomicsExchange) { UNREACHABLE(); }
611
612RUNTIME_FUNCTION(Runtime_AtomicsCompareExchange) { UNREACHABLE(); }
613
614RUNTIME_FUNCTION(Runtime_AtomicsAdd) { UNREACHABLE(); }
615
616RUNTIME_FUNCTION(Runtime_AtomicsSub) { UNREACHABLE(); }
617
618RUNTIME_FUNCTION(Runtime_AtomicsAnd) { UNREACHABLE(); }
619
620RUNTIME_FUNCTION(Runtime_AtomicsOr) { UNREACHABLE(); }
621
622RUNTIME_FUNCTION(Runtime_AtomicsXor) { UNREACHABLE(); }
623
624#endif // V8_TARGET_ARCH_MIPS64 || V8_TARGET_ARCH_PPC64
625 // || V8_TARGET_ARCH_S390X || V8_TARGET_ARCH_RISCV64 ||
626 // V8_TARGET_ARCH_LOONG64 || V8_TARGET_ARCH_RISCV32
627
628RUNTIME_FUNCTION(Runtime_AtomicsLoadSharedStructOrArray) {
629 HandleScope scope(isolate);
630 DCHECK_EQ(2, args.length());
631 DirectHandle<JSObject> shared_struct_or_shared_array = args.at<JSObject>(0);
632 Handle<Name> field_name;
633 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, field_name,
634 Object::ToName(isolate, args.at(1)));
635 // Shared structs are prototypeless.
636 LookupIterator it(isolate, shared_struct_or_shared_array,
637 PropertyKey(isolate, field_name), LookupIterator::OWN);
638 if (it.IsFound()) return *it.GetDataValue(kSeqCstAccess);
639 return ReadOnlyRoots(isolate).undefined_value();
640}
641
642namespace {
643
644template <typename WriteOperation>
645Tagged<Object> AtomicFieldWrite(Isolate* isolate, DirectHandle<JSObject> object,
646 Handle<Name> field_name,
647 DirectHandle<Object> value,
648 WriteOperation write_operation) {
649 LookupIterator it(isolate, object, PropertyKey(isolate, field_name),
652 if (it.IsFound()) {
653 if (!it.IsReadOnly()) {
654 return write_operation(it);
655 }
656 // Shared structs and arrays are non-extensible and have non-configurable,
657 // writable, enumerable properties. The only exception is SharedArrays'
658 // "length" property, which is non-writable.
660 } else {
661 // Shared structs are non-extensible. Instead of duplicating logic, call
662 // Object::AddDataProperty to handle the error case.
665 }
666 // Treat as strict code and always throw an error.
667 DCHECK(result.IsNothing());
668 USE(result);
669 return ReadOnlyRoots(isolate).exception();
670}
671} // namespace
672
673RUNTIME_FUNCTION(Runtime_AtomicsStoreSharedStructOrArray) {
674 HandleScope scope(isolate);
675 DCHECK_EQ(3, args.length());
676 DirectHandle<JSObject> shared_struct_or_shared_array = args.at<JSObject>(0);
677 Handle<Name> field_name;
678 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, field_name,
679 Object::ToName(isolate, args.at(1)));
680 Handle<Object> shared_value;
682 isolate, shared_value, Object::Share(isolate, args.at(2), kThrowOnError));
683
684 return AtomicFieldWrite(isolate, shared_struct_or_shared_array, field_name,
685 shared_value, [=](LookupIterator it) {
686 it.WriteDataValue(shared_value, kSeqCstAccess);
687 return *shared_value;
688 });
689}
690
691RUNTIME_FUNCTION(Runtime_AtomicsExchangeSharedStructOrArray) {
692 HandleScope scope(isolate);
693 DCHECK_EQ(3, args.length());
694 DirectHandle<JSObject> shared_struct_or_shared_array = args.at<JSObject>(0);
695 Handle<Name> field_name;
696 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, field_name,
697 Object::ToName(isolate, args.at(1)));
698 Handle<Object> shared_value;
700 isolate, shared_value, Object::Share(isolate, args.at(2), kThrowOnError));
701
702 return AtomicFieldWrite(isolate, shared_struct_or_shared_array, field_name,
703 shared_value, [=](LookupIterator it) {
704 return *it.SwapDataValue(shared_value,
706 });
707}
708
709RUNTIME_FUNCTION(Runtime_AtomicsCompareExchangeSharedStructOrArray) {
710 HandleScope scope(isolate);
711 DCHECK_EQ(4, args.length());
712 DirectHandle<JSObject> shared_struct_or_shared_array = args.at<JSObject>(0);
713 Handle<Name> field_name;
714 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, field_name,
715 Object::ToName(isolate, args.at(1)));
716 Handle<Object> shared_expected;
718 isolate, shared_expected,
719 Object::Share(isolate, args.at(2), kThrowOnError));
720 Handle<Object> shared_value;
722 isolate, shared_value, Object::Share(isolate, args.at(3), kThrowOnError));
723
724 return AtomicFieldWrite(isolate, shared_struct_or_shared_array, field_name,
725 shared_value, [=](LookupIterator it) {
726 return *it.CompareAndSwapDataValue(
727 shared_expected, shared_value, kSeqCstAccess);
728 });
729}
730
731} // namespace internal
732} // namespace v8
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype)
static V8_EXPORT_PRIVATE Handle< BigInt > FromUint64(Isolate *isolate, uint64_t n)
Definition bigint.cc:1355
static V8_EXPORT_PRIVATE Handle< BigInt > FromInt64(Isolate *isolate, int64_t n)
Definition bigint.cc:1333
V8_EXPORT_PRIVATE static V8_WARN_UNUSED_RESULT Maybe< bool > AddDataProperty(LookupIterator *it, DirectHandle< Object > value, PropertyAttributes attributes, Maybe< ShouldThrow > should_throw, StoreOrigin store_origin, EnforceDefineSemantics semantics=EnforceDefineSemantics::kSet)
Definition objects.cc:2667
static V8_WARN_UNUSED_RESULT HandleType< Number >::MaybeType ToInteger(Isolate *isolate, HandleType< T > input)
static V8_WARN_UNUSED_RESULT HandleType< Name >::MaybeType ToName(Isolate *isolate, HandleType< Object > input)
static V8_WARN_UNUSED_RESULT Maybe< bool > WriteToReadOnlyProperty(LookupIterator *it, DirectHandle< Object > value, Maybe< ShouldThrow > should_throw)
Definition objects.cc:2570
static HandleType< Object >::MaybeType Share(Isolate *isolate, HandleType< T > value, ShouldThrow throw_if_cannot_be_shared)
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
#define RUNTIME_FUNCTION(Name)
Definition arguments.h:162
#define ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call)
Definition isolate.h:284
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
ZoneVector< RpoNumber > & result
int int32_t
Definition unicode.cc:40
unsigned short uint16_t
Definition unicode.cc:39
signed short int16_t
Definition unicode.cc:38
void Add(RWDigits Z, Digits X, Digits Y)
void Store(LiftoffAssembler *assm, LiftoffRegister src, MemOperand dst, ValueKind kind)
void Or(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
void Xor(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
void And(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
void Exchange(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
void Sub(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
uint32_t NumberToUint32(Tagged< Object > number)
int32_t NumberToInt32(Tagged< Object > number)
@ kExternalBigInt64Array
Definition globals.h:2463
@ kExternalBigUint64Array
Definition globals.h:2464
return value
Definition map-inl.h:893
size_t NumberToSize(Tagged< Object > number)
Arguments< ArgumentsType::kRuntime > RuntimeArguments
Definition globals.h:1047
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
Local< T > Handle
Maybe< T > Nothing()
Definition v8-maybe.h:112
static constexpr SeqCstAccessTag kSeqCstAccess
Definition globals.h:2912
i::Address Load(i::Address address)
Definition unwinder.cc:19
Maybe< T > Just(const T &t)
Definition v8-maybe.h:117
#define CHECK_LT(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define USE(...)
Definition macros.h:293