5#ifndef V8_OBJECTS_JS_ATOMICS_SYNCHRONIZATION_INL_H_
6#define V8_OBJECTS_JS_ATOMICS_SYNCHRONIZATION_INL_H_
23#include "torque-generated/src/objects/js-atomics-synchronization-tq-inl.inc"
27std::atomic<JSSynchronizationPrimitive::StateT>*
29 StateT* state_ptr =
reinterpret_cast<StateT*
>(field_address(kStateOffset));
35#if V8_COMPRESS_POINTERS
43#if V8_COMPRESS_POINTERS
45JSSynchronizationPrimitive::waiter_queue_head_handle_location()
const {
46 Address location = field_address(kWaiterQueueHeadOffset);
52 Address location = field_address(kWaiterQueueHeadOffset);
64#if V8_COMPRESS_POINTERS
88#if V8_COMPRESS_POINTERS
93 ExternalPointerTable& table = requester->shared_external_pointer_table();
95 handle = table.AllocateAndInitializeEntry(
96 requester->shared_external_pointer_space(),
108 Address old = requester->shared_external_pointer_table().Exchange(
113 requester->shared_external_pointer_table().Set(
144 std::optional<base::TimeDelta> timeout)
153template <
typename LockSlowPathWrapper,
typename>
156 std::optional<base::TimeDelta> timeout,
157 LockSlowPathWrapper slow_path_wrapper) {
165 std::atomic<StateT>* state =
mutex->AtomicStatePtr();
169 std::memory_order_acquire,
170 std::memory_order_relaxed))) {
173 locked = slow_path_wrapper(state);
176 mutex->SetCurrentThreadAsOwner();
184 std::optional<base::TimeDelta> timeout) {
185 return LockImpl(requester,
mutex, timeout, [=](std::atomic<StateT>* state) {
193 if (
V8_LIKELY(AtomicStatePtr()->compare_exchange_strong(
195 std::memory_order_relaxed))) {
212 std::atomic<StateT>* state = AtomicStatePtr();
215 std::memory_order_release,
216 std::memory_order_relaxed))) {
224 AtomicStatePtr()->load(std::memory_order_relaxed));
236 std::memory_order_relaxed);
241 std::memory_order_relaxed);
245 int32_t* owner_thread_id_ptr =
246 reinterpret_cast<int32_t*
>(field_address(kOwnerThreadIdOffset));
static void Release_Store(T *addr, typename std::remove_reference< T >::type new_value)
static void Relaxed_Store(T *addr, typename std::remove_reference< T >::type new_value)
static T Relaxed_Load(T *addr)
static constexpr T decode(U value)
static V8_NODISCARD constexpr U update(U previous, T value)
DirectHandle< JSAtomicsMutex > mutex_
LockGuard(Isolate *isolate, DirectHandle< JSAtomicsMutex > mutex, std::optional< base::TimeDelta > timeout=std::nullopt)
TryLockGuard(Isolate *isolate, DirectHandle< JSAtomicsMutex > mutex)
void SetCurrentThreadAsOwner()
static bool Lock(Isolate *requester, DirectHandle< JSAtomicsMutex > mutex, std::optional< base::TimeDelta > timeout=std::nullopt)
static constexpr StateT kLockedUncontended
V8_EXPORT_PRIVATE void UnlockSlowPath(Isolate *requester, std::atomic< StateT > *state)
static bool LockImpl(Isolate *requester, DirectHandle< JSAtomicsMutex > mutex, std::optional< base::TimeDelta > timeout, LockSlowPathWrapper slow_path_wrapper)
void Unlock(Isolate *requester)
V8_WARN_UNUSED_RESULT bool TryLock()
std::atomic< int32_t > * AtomicOwnerThreadIdPtr()
static V8_EXPORT_PRIVATE bool LockSlowPath(Isolate *requester, DirectHandle< JSAtomicsMutex > mutex, std::atomic< StateT > *state, std::optional< base::TimeDelta > timeout)
static constexpr StateT kUnlockedUncontended
bool IsCurrentThreadOwner()
WaiterQueueNode * DestructivelyGetWaiterQueueHead(Isolate *requester)
void SetNullWaiterQueueHead()
WaiterQueueNode ** waiter_queue_head_location() const
StateT SetWaiterQueueHead(Isolate *requester, WaiterQueueNode *waiter_head, StateT new_state)
std::atomic< StateT > * AtomicStatePtr()
constexpr int ToInteger() const
static ThreadId Current()
static constexpr ThreadId Invalid()
ZoneVector< RpoNumber > & result
V8_INLINE std::atomic< T > * AsAtomicPtr(T *t)
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
constexpr ExternalPointerHandle kNullExternalPointerHandle
uint32_t ExternalPointerHandle
static constexpr Address kNullAddress
#define EXTERNAL_POINTER_WRITE_BARRIER(object, offset, tag)
#define TQ_OBJECT_CONSTRUCTORS_IMPL(Type)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
constexpr bool IsAligned(T value, U alignment)
#define V8_LIKELY(condition)
#define V8_UNLIKELY(condition)