mirror of
https://github.com/LadybirdBrowser/ladybird.git
synced 2025-04-20 11:36:10 +00:00
AK: Add AtomicRef, for atomically accesing a reference to a varaible
This is distintly different from Atomic<T*>, because we want to atomically access a variable that the atomic object itself does not own.
This commit is contained in:
parent
bde7bc3472
commit
aff594f1e7
Notes:
sideshowbarker
2024-07-19 06:20:32 +09:00
Author: https://github.com/ADKaster Commit: https://github.com/SerenityOS/serenity/commit/aff594f1e79 Pull-request: https://github.com/SerenityOS/serenity/pull/2242 Reviewed-by: https://github.com/bugaevc
1 changed files with 102 additions and 0 deletions
102
AK/Atomic.h
102
AK/Atomic.h
|
@ -257,6 +257,108 @@ public:
|
|||
}
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
class AtomicRef
|
||||
{
|
||||
T* m_value { nullptr };
|
||||
|
||||
public:
|
||||
AtomicRef() noexcept = delete;
|
||||
AtomicRef(const AtomicRef&) = delete;
|
||||
AtomicRef& operator=(const AtomicRef&) volatile = delete;
|
||||
|
||||
AtomicRef(T* val) noexcept
|
||||
: m_value(val)
|
||||
{
|
||||
}
|
||||
|
||||
AtomicRef(T& val) noexcept
|
||||
: m_value(&val)
|
||||
{
|
||||
}
|
||||
|
||||
T exchange(T desired, MemoryOrder order = memory_order_seq_cst) volatile noexcept
|
||||
{
|
||||
return __atomic_exchange_n(m_value, desired, order);
|
||||
}
|
||||
|
||||
bool compare_exchange_strong(T& expected, T desired, MemoryOrder order = memory_order_seq_cst) volatile noexcept
|
||||
{
|
||||
// On failure, acquire the new value
|
||||
if (order == memory_order_acq_rel || order == memory_order_release)
|
||||
return __atomic_compare_exchange_n(m_value, &expected, desired, false, memory_order_acq_rel, memory_order_acquire);
|
||||
else
|
||||
return __atomic_compare_exchange_n(m_value, &expected, desired, false, order, order);
|
||||
}
|
||||
|
||||
T operator++() volatile noexcept
|
||||
{
|
||||
return fetch_add(1) + 1;
|
||||
}
|
||||
|
||||
T operator++(int) volatile noexcept
|
||||
{
|
||||
return fetch_add(1);
|
||||
}
|
||||
|
||||
T operator+=(T val) volatile noexcept
|
||||
{
|
||||
return fetch_add(val) + val;
|
||||
}
|
||||
|
||||
T fetch_add(T val, MemoryOrder order = memory_order_seq_cst) volatile noexcept
|
||||
{
|
||||
return __atomic_fetch_add(m_value, val, order);
|
||||
}
|
||||
|
||||
T operator--() volatile noexcept
|
||||
{
|
||||
return fetch_sub(1) - 1;
|
||||
}
|
||||
|
||||
T operator--(int) volatile noexcept
|
||||
{
|
||||
return fetch_sub(1);
|
||||
}
|
||||
|
||||
T operator-=(T val) volatile noexcept
|
||||
{
|
||||
return fetch_sub(val) - val;
|
||||
}
|
||||
|
||||
T fetch_sub(T val, MemoryOrder order = memory_order_seq_cst) volatile noexcept
|
||||
{
|
||||
return __atomic_fetch_sub(m_value, val, order);
|
||||
}
|
||||
|
||||
operator T*() const volatile noexcept
|
||||
{
|
||||
return load();
|
||||
}
|
||||
|
||||
T load(MemoryOrder order = memory_order_seq_cst) const volatile noexcept
|
||||
{
|
||||
return __atomic_load_n(m_value, order);
|
||||
}
|
||||
|
||||
T operator=(T desired) volatile noexcept
|
||||
{
|
||||
store(desired);
|
||||
return desired;
|
||||
}
|
||||
|
||||
void store(T desired, MemoryOrder order = memory_order_seq_cst) volatile noexcept
|
||||
{
|
||||
__atomic_store_n(&m_value, desired, order);
|
||||
}
|
||||
|
||||
bool is_lock_free() const volatile noexcept
|
||||
{
|
||||
return __atomic_is_lock_free(sizeof(*m_value), m_value);
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
using AK::Atomic;
|
||||
using AK::AtomicRef;
|
||||
|
|
Loading…
Add table
Reference in a new issue