5#ifndef V8_BASE_ATOMICOPS_H_
6#define V8_BASE_ATOMICOPS_H_
42#if defined(V8_OS_STARBOARD)
43#include "starboard/atomic.h"
54using Atomic64 = SbAtomic64;
60#if defined(V8_HOST_ARCH_64_BIT)
64using Atomic64 = int64_t;
66using Atomic64 = intptr_t;
73#if defined(V8_HOST_ARCH_64_BIT)
78static_assert(
sizeof(
void*) ==
sizeof(
AtomicWord));
83 return reinterpret_cast<volatile std::atomic<T>*
>(ptr);
87 return reinterpret_cast<volatile const std::atomic<T>*
>(ptr);
92 std::atomic_thread_fence(std::memory_order_seq_cst);
106 std::atomic_compare_exchange_strong_explicit(
108 std::memory_order_relaxed, std::memory_order_relaxed);
114 std::atomic_compare_exchange_strong_explicit(
116 std::memory_order_relaxed, std::memory_order_relaxed);
122 std::atomic_compare_exchange_strong_explicit(
124 std::memory_order_relaxed, std::memory_order_relaxed);
131 std::memory_order_relaxed);
137 std::memory_order_seq_cst);
144 std::memory_order_relaxed);
149 atomic_compare_exchange_strong_explicit(
151 std::memory_order_acquire, std::memory_order_acquire);
157 bool result = atomic_compare_exchange_strong_explicit(
159 std::memory_order_release, std::memory_order_relaxed);
166 atomic_compare_exchange_strong_explicit(
168 std::memory_order_release, std::memory_order_relaxed);
175 atomic_compare_exchange_strong_explicit(
177 std::memory_order_acq_rel, std::memory_order_acquire);
183 atomic_compare_exchange_strong_explicit(
185 std::memory_order_seq_cst, std::memory_order_seq_cst);
191 std::memory_order_relaxed);
196 std::memory_order_relaxed);
201 std::memory_order_relaxed);
206 std::memory_order_release);
211 std::memory_order_release);
216 std::memory_order_release);
221 std::memory_order_seq_cst);
226 std::memory_order_seq_cst);
231 std::memory_order_seq_cst);
236 std::memory_order_relaxed);
241 std::memory_order_relaxed);
246 std::memory_order_relaxed);
251 std::memory_order_acquire);
256 std::memory_order_acquire);
261 std::memory_order_seq_cst);
266 std::memory_order_seq_cst);
269#if defined(V8_HOST_ARCH_64_BIT)
272 Atomic64 old_value, Atomic64 new_value) {
273 std::atomic_compare_exchange_strong_explicit(
275 std::memory_order_relaxed, std::memory_order_relaxed);
280 Atomic64 new_value) {
282 std::memory_order_relaxed);
286 Atomic64 new_value) {
288 std::memory_order_seq_cst);
295 std::memory_order_relaxed);
299 Atomic64 old_value, Atomic64 new_value) {
300 std::atomic_compare_exchange_strong_explicit(
302 std::memory_order_acquire, std::memory_order_acquire);
307 Atomic64 old_value, Atomic64 new_value) {
308 std::atomic_compare_exchange_strong_explicit(
310 std::memory_order_release, std::memory_order_relaxed);
316 Atomic64 new_value) {
317 std::atomic_compare_exchange_strong_explicit(
319 std::memory_order_acq_rel, std::memory_order_acquire);
324 Atomic64 old_value, Atomic64 new_value) {
325 std::atomic_compare_exchange_strong_explicit(
327 std::memory_order_seq_cst, std::memory_order_seq_cst);
331inline void Relaxed_Store(
volatile Atomic64* ptr, Atomic64 value) {
333 std::memory_order_relaxed);
336inline void Release_Store(
volatile Atomic64* ptr, Atomic64 value) {
338 std::memory_order_release);
341inline void SeqCst_Store(
volatile Atomic64* ptr, Atomic64 value) {
343 std::memory_order_seq_cst);
346inline Atomic64
Relaxed_Load(
volatile const Atomic64* ptr) {
348 std::memory_order_relaxed);
351inline Atomic64
Acquire_Load(
volatile const Atomic64* ptr) {
353 std::memory_order_acquire);
356inline Atomic64
SeqCst_Load(
volatile const Atomic64* ptr) {
358 std::memory_order_seq_cst);
365 constexpr size_t kAtomicWordSize =
sizeof(
AtomicWord);
367 !
IsAligned(
reinterpret_cast<uintptr_t
>(dst), kAtomicWordSize)) {
371 if (
IsAligned(
reinterpret_cast<uintptr_t
>(src), kAtomicWordSize) &&
372 IsAligned(
reinterpret_cast<uintptr_t
>(dst), kAtomicWordSize)) {
373 while (bytes >= kAtomicWordSize) {
377 dst += kAtomicWordSize;
378 src += kAtomicWordSize;
379 bytes -= kAtomicWordSize;
393 if (
reinterpret_cast<uintptr_t
>(dst) -
reinterpret_cast<uintptr_t
>(src) >=
402 constexpr size_t kAtomicWordSize =
sizeof(
AtomicWord);
404 !
IsAligned(
reinterpret_cast<uintptr_t
>(dst), kAtomicWordSize)) {
408 if (
IsAligned(
reinterpret_cast<uintptr_t
>(src), kAtomicWordSize) &&
409 IsAligned(
reinterpret_cast<uintptr_t
>(dst), kAtomicWordSize)) {
410 while (bytes >= kAtomicWordSize) {
411 dst -= kAtomicWordSize;
412 src -= kAtomicWordSize;
413 bytes -= kAtomicWordSize;
428 return u1 < u2 ? -1 : 1;
432#if defined(V8_TARGET_BIG_ENDIAN)
433 return u1 < u2 ? -1 : 1;
436 uint8_t byte1 = u1 & 0xFF;
437 uint8_t byte2 = u2 & 0xFF;
438 if (byte1 != byte2)
return byte1 < byte2 ? -1 : 1;
448 volatile const Atomic8* s2,
size_t len) {
449 constexpr size_t kAtomicWordSize =
sizeof(
AtomicWord);
451 !(
IsAligned(
reinterpret_cast<uintptr_t
>(s1), kAtomicWordSize) &&
452 IsAligned(
reinterpret_cast<uintptr_t
>(s2), kAtomicWordSize))) {
459 if (
IsAligned(
reinterpret_cast<uintptr_t
>(s1), kAtomicWordSize) &&
460 IsAligned(
reinterpret_cast<uintptr_t
>(s2), kAtomicWordSize)) {
461 while (len >= kAtomicWordSize) {
467 s1 += kAtomicWordSize;
468 s2 += kAtomicWordSize;
469 len -= kAtomicWordSize;
ZoneVector< RpoNumber > & result
volatile const std::atomic< T > * to_std_atomic_const(volatile const T *ptr)
volatile std::atomic< T > * to_std_atomic(volatile T *ptr)
int MemcmpNotEqualFundamental(Atomic8 u1, Atomic8 u2)
void Relaxed_Store(volatile Atomic8 *ptr, Atomic8 value)
Atomic8 Release_CompareAndSwap(volatile Atomic8 *ptr, Atomic8 old_value, Atomic8 new_value)
Atomic8 Relaxed_CompareAndSwap(volatile Atomic8 *ptr, Atomic8 old_value, Atomic8 new_value)
Atomic32 Relaxed_AtomicExchange(volatile Atomic32 *ptr, Atomic32 new_value)
int Relaxed_Memcmp(volatile const Atomic8 *s1, volatile const Atomic8 *s2, size_t len)
Atomic8 Relaxed_Load(volatile const Atomic8 *ptr)
void Relaxed_Memmove(volatile Atomic8 *dst, volatile const Atomic8 *src, size_t bytes)
Atomic32 SeqCst_CompareAndSwap(volatile Atomic32 *ptr, Atomic32 old_value, Atomic32 new_value)
Atomic32 Relaxed_AtomicIncrement(volatile Atomic32 *ptr, Atomic32 increment)
Atomic8 SeqCst_Load(volatile const Atomic8 *ptr)
void Relaxed_Memcpy(volatile Atomic8 *dst, volatile const Atomic8 *src, size_t bytes)
Atomic8 Acquire_Load(volatile const Atomic8 *ptr)
Atomic32 SeqCst_AtomicExchange(volatile Atomic32 *ptr, Atomic32 new_value)
void SeqCst_MemoryFence()
void Release_Store(volatile Atomic8 *ptr, Atomic8 value)
void SeqCst_Store(volatile Atomic8 *ptr, Atomic8 value)
Atomic32 AcquireRelease_CompareAndSwap(volatile Atomic32 *ptr, Atomic32 old_value, Atomic32 new_value)
Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr, Atomic32 old_value, Atomic32 new_value)
#define DCHECK_NE(v1, v2)
constexpr bool IsAligned(T value, U alignment)