2013-10-28 12:48:13 -07:00
|
|
|
#ifndef AL_ATOMIC_H
|
|
|
|
#define AL_ATOMIC_H
|
|
|
|
|
2018-11-17 18:56:00 -08:00
|
|
|
#include <atomic>
|
|
|
|
|
2018-01-11 10:03:26 -08:00
|
|
|
#ifdef __GNUC__
|
|
|
|
/* This helps cast away the const-ness of a pointer without accidentally
|
|
|
|
* changing the pointer type. This is necessary due to Clang's inability to use
|
|
|
|
* atomic_load on a const _Atomic variable.
|
|
|
|
*/
|
|
|
|
#define CONST_CAST(T, V) __extension__({ \
|
|
|
|
const T _tmp = (V); \
|
|
|
|
(T)_tmp; \
|
|
|
|
})
|
|
|
|
#else
|
|
|
|
#define CONST_CAST(T, V) ((T)(V))
|
|
|
|
#endif
|
|
|
|
|
2018-10-29 20:50:57 -07:00
|
|
|
#define _Atomic(T) std::atomic<T>
|
|
|
|
using std::memory_order;
|
|
|
|
using std::memory_order_relaxed;
|
|
|
|
using std::memory_order_consume;
|
|
|
|
using std::memory_order_acquire;
|
|
|
|
using std::memory_order_release;
|
|
|
|
using std::memory_order_acq_rel;
|
|
|
|
using std::memory_order_seq_cst;
|
|
|
|
|
|
|
|
using std::atomic_init;
|
|
|
|
using std::atomic_load_explicit;
|
|
|
|
using std::atomic_store_explicit;
|
|
|
|
using std::atomic_fetch_add_explicit;
|
|
|
|
using std::atomic_fetch_sub_explicit;
|
|
|
|
using std::atomic_exchange_explicit;
|
|
|
|
using std::atomic_compare_exchange_strong_explicit;
|
|
|
|
using std::atomic_compare_exchange_weak_explicit;
|
|
|
|
using std::atomic_thread_fence;
|
|
|
|
|
2014-09-07 00:42:50 -07:00
|
|
|
#define almemory_order memory_order
|
|
|
|
#define almemory_order_relaxed memory_order_relaxed
|
|
|
|
#define almemory_order_consume memory_order_consume
|
|
|
|
#define almemory_order_acquire memory_order_acquire
|
|
|
|
#define almemory_order_release memory_order_release
|
|
|
|
#define almemory_order_acq_rel memory_order_acq_rel
|
|
|
|
#define almemory_order_seq_cst memory_order_seq_cst
|
2014-07-23 06:36:34 -07:00
|
|
|
|
2018-10-29 20:50:57 -07:00
|
|
|
#define ATOMIC(T) _Atomic(T)
|
2014-07-23 06:36:34 -07:00
|
|
|
|
2017-04-14 17:55:23 -07:00
|
|
|
#define ATOMIC_INIT atomic_init
|
|
|
|
#define ATOMIC_INIT_STATIC ATOMIC_VAR_INIT
|
2014-07-23 06:36:34 -07:00
|
|
|
|
2017-04-14 17:55:23 -07:00
|
|
|
#define ATOMIC_LOAD atomic_load_explicit
|
|
|
|
#define ATOMIC_STORE atomic_store_explicit
|
2014-07-23 06:36:34 -07:00
|
|
|
|
2017-04-14 17:55:23 -07:00
|
|
|
#define ATOMIC_ADD atomic_fetch_add_explicit
|
|
|
|
#define ATOMIC_SUB atomic_fetch_sub_explicit
|
2014-09-07 00:42:50 -07:00
|
|
|
|
2017-04-14 17:55:23 -07:00
|
|
|
#define ATOMIC_EXCHANGE atomic_exchange_explicit
|
|
|
|
#define ATOMIC_COMPARE_EXCHANGE_STRONG atomic_compare_exchange_strong_explicit
|
|
|
|
#define ATOMIC_COMPARE_EXCHANGE_WEAK atomic_compare_exchange_weak_explicit
|
2014-09-07 00:42:50 -07:00
|
|
|
|
2016-11-21 23:58:28 -08:00
|
|
|
#define ATOMIC_THREAD_FENCE atomic_thread_fence
|
|
|
|
|
2013-10-28 12:48:13 -07:00
|
|
|
|
2016-12-20 20:49:37 -08:00
|
|
|
#define ATOMIC_LOAD_SEQ(_val) ATOMIC_LOAD(_val, almemory_order_seq_cst)
|
|
|
|
#define ATOMIC_STORE_SEQ(_val, _newval) ATOMIC_STORE(_val, _newval, almemory_order_seq_cst)
|
|
|
|
|
|
|
|
#define ATOMIC_ADD_SEQ(_val, _incr) ATOMIC_ADD(_val, _incr, almemory_order_seq_cst)
|
|
|
|
#define ATOMIC_SUB_SEQ(_val, _decr) ATOMIC_SUB(_val, _decr, almemory_order_seq_cst)
|
|
|
|
|
2017-04-14 17:47:55 -07:00
|
|
|
#define ATOMIC_EXCHANGE_SEQ(_val, _newval) ATOMIC_EXCHANGE(_val, _newval, almemory_order_seq_cst)
|
|
|
|
#define ATOMIC_COMPARE_EXCHANGE_STRONG_SEQ(_val, _oldval, _newval) \
|
|
|
|
ATOMIC_COMPARE_EXCHANGE_STRONG(_val, _oldval, _newval, almemory_order_seq_cst, almemory_order_seq_cst)
|
|
|
|
#define ATOMIC_COMPARE_EXCHANGE_WEAK_SEQ(_val, _oldval, _newval) \
|
|
|
|
ATOMIC_COMPARE_EXCHANGE_WEAK(_val, _oldval, _newval, almemory_order_seq_cst, almemory_order_seq_cst)
|
|
|
|
|
2016-12-20 20:49:37 -08:00
|
|
|
|
2014-07-23 06:36:34 -07:00
|
|
|
typedef unsigned int uint;
|
2014-07-22 18:18:14 -07:00
|
|
|
typedef ATOMIC(uint) RefCount;
|
|
|
|
|
|
|
|
inline void InitRef(RefCount *ptr, uint value)
|
2014-08-03 00:26:21 -07:00
|
|
|
{ ATOMIC_INIT(ptr, value); }
|
2014-07-22 18:18:14 -07:00
|
|
|
inline uint ReadRef(RefCount *ptr)
|
2018-03-26 10:14:27 -07:00
|
|
|
{ return ATOMIC_LOAD(ptr, almemory_order_acquire); }
|
2014-07-22 18:18:14 -07:00
|
|
|
inline uint IncrementRef(RefCount *ptr)
|
2018-10-29 20:50:57 -07:00
|
|
|
{ return ATOMIC_ADD(ptr, 1u, almemory_order_acq_rel)+1; }
|
2014-07-22 18:18:14 -07:00
|
|
|
inline uint DecrementRef(RefCount *ptr)
|
2018-10-29 20:50:57 -07:00
|
|
|
{ return ATOMIC_SUB(ptr, 1u, almemory_order_acq_rel)-1; }
|
2014-07-22 18:18:14 -07:00
|
|
|
|
2014-09-03 17:37:07 -07:00
|
|
|
|
2016-12-21 19:58:03 -08:00
|
|
|
/* WARNING: A livelock is theoretically possible if another thread keeps
|
|
|
|
* changing the head without giving this a chance to actually swap in the new
|
|
|
|
* one (practically impossible with this little code, but...).
|
|
|
|
*/
|
|
|
|
#define ATOMIC_REPLACE_HEAD(T, _head, _entry) do { \
|
|
|
|
T _first = ATOMIC_LOAD(_head, almemory_order_acquire); \
|
|
|
|
do { \
|
|
|
|
ATOMIC_STORE(&(_entry)->next, _first, almemory_order_relaxed); \
|
2018-11-18 03:59:39 -08:00
|
|
|
} while(ATOMIC_COMPARE_EXCHANGE_WEAK(_head, &_first, _entry, \
|
2016-12-21 19:58:03 -08:00
|
|
|
almemory_order_acq_rel, almemory_order_acquire) == 0); \
|
|
|
|
} while(0)
|
|
|
|
|
2013-10-28 12:48:13 -07:00
|
|
|
#endif /* AL_ATOMIC_H */
|