summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSebastian Huber <sebastian.huber@embedded-brains.de>2015-02-13 16:30:47 +0100
committerSebastian Huber <sebastian.huber@embedded-brains.de>2015-03-06 14:58:13 +0100
commite5a017550a1d496db3df54901b4cb7c94be2dc09 (patch)
tree6f774ae1342f85b1293e7242f0a19476a279f7a0
parentatomic.h: Format changes (diff)
downloadrtems-libbsd-e5a017550a1d496db3df54901b4cb7c94be2dc09.tar.bz2
atomic.h: Use <stdatomic.h> or <atomic> for SMP
-rw-r--r--rtemsbsd/include/machine/atomic.h1075
1 files changed, 1041 insertions, 34 deletions
diff --git a/rtemsbsd/include/machine/atomic.h b/rtemsbsd/include/machine/atomic.h
index 32c15e0e..d8ee3663 100644
--- a/rtemsbsd/include/machine/atomic.h
+++ b/rtemsbsd/include/machine/atomic.h
@@ -7,7 +7,7 @@
*/
/*
- * Copyright (c) 2009, 2010 embedded brains GmbH. All rights reserved.
+ * Copyright (c) 2009, 2015 embedded brains GmbH. All rights reserved.
*
* embedded brains GmbH
* Dornierstr. 4
@@ -46,66 +46,329 @@
#include <rtems.h>
-#define mb() RTEMS_COMPILER_MEMORY_BARRIER()
-#define wmb() RTEMS_COMPILER_MEMORY_BARRIER()
-#define rmb() RTEMS_COMPILER_MEMORY_BARRIER()
+#ifdef RTEMS_SMP
+ #if defined(__cplusplus) && __GNUC__ >= 4 && __GNUC_MINOR__ >= 9
+ /*
+ * The GCC 4.9 ships its own <stdatomic.h> which is not C++ compatible. The
+ * suggested solution was to include <atomic> in case C++ is used. This works
+ * at least with GCC 4.9. See also:
+ *
+ * http://gcc.gnu.org/bugzilla/show_bug.cgi?id=60932
+ * http://gcc.gnu.org/bugzilla/show_bug.cgi?id=60940
+ */
+ #include <atomic>
+ #define _RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC
+ #else
+ #include <stdatomic.h>
+ #define _RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC
+ #endif
+#endif
+
+static inline void
+mb(void)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_thread_fence(std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_thread_fence(memory_order_seq_cst);
+#else
+ RTEMS_COMPILER_MEMORY_BARRIER();
+#endif
+}
+
+static inline void
+wmb(void)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_thread_fence(std::memory_order_release);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_thread_fence(memory_order_release);
+#else
+ RTEMS_COMPILER_MEMORY_BARRIER();
+#endif
+}
+
+static inline void
+rmb(void)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_thread_fence(std::memory_order_acquire);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_thread_fence(memory_order_acquire);
+#else
+ RTEMS_COMPILER_MEMORY_BARRIER();
+#endif
+}
static inline void
atomic_add_int(volatile int *p, int v)
{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ q->fetch_add(v, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
*p += v;
rtems_interrupt_enable(level);
+#endif
+}
+
+static inline void
+atomic_add_acq_int(volatile int *p, int v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ q->fetch_add(v, std::memory_order_acquire);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ atomic_fetch_add_explicit(q, v, memory_order_acquire);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p += v;
+ rtems_interrupt_enable(level);
+#endif
}
-#define atomic_add_acq_int atomic_add_int
-#define atomic_add_rel_int atomic_add_int
+static inline void
+atomic_add_rel_int(volatile int *p, int v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ q->fetch_add(v, std::memory_order_release);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ atomic_fetch_add_explicit(q, v, memory_order_release);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p += v;
+ rtems_interrupt_enable(level);
+#endif
+}
static inline void
atomic_subtract_int(volatile int *p, int v)
{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ q->fetch_sub(v, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
*p -= v;
rtems_interrupt_enable(level);
+#endif
}
-#define atomic_subtract_acq_int atomic_subtract_int
-#define atomic_subtract_rel_int atomic_subtract_int
+static inline void
+atomic_subtract_acq_int(volatile int *p, int v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ q->fetch_sub(v, std::memory_order_acquire);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ atomic_fetch_sub_explicit(q, v, memory_order_acquire);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p -= v;
+ rtems_interrupt_enable(level);
+#endif
+}
+
+static inline void
+atomic_subtract_rel_int(volatile int *p, int v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ q->fetch_sub(v, std::memory_order_release);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ atomic_fetch_sub_explicit(q, v, memory_order_release);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p -= v;
+ rtems_interrupt_enable(level);
+#endif
+}
static inline void
atomic_set_int(volatile int *p, int v)
{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ q->fetch_or(v, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
*p |= v;
rtems_interrupt_enable(level);
+#endif
}
-#define atomic_set_acq_int atomic_set_int
-#define atomic_set_rel_int atomic_set_int
+static inline void
+atomic_set_acq_int(volatile int *p, int v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ q->fetch_or(v, std::memory_order_acquire);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ atomic_fetch_or_explicit(q, v, memory_order_acquire);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p |= v;
+ rtems_interrupt_enable(level);
+#endif
+}
+
+static inline void
+atomic_set_rel_int(volatile int *p, int v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ q->fetch_or(v, std::memory_order_release);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ atomic_fetch_or_explicit(q, v, memory_order_release);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p |= v;
+ rtems_interrupt_enable(level);
+#endif
+}
static inline void
atomic_clear_int(volatile int *p, int v)
{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ q->fetch_and(~v, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
*p &= ~v;
rtems_interrupt_enable(level);
+#endif
}
-#define atomic_clear_acq_int atomic_clear_int
-#define atomic_clear_rel_int atomic_clear_int
+static inline void
+atomic_clear_acq_int(volatile int *p, int v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ q->fetch_and(~v, std::memory_order_acquire);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p &= ~v;
+ rtems_interrupt_enable(level);
+#endif
+}
+
+static inline void
+atomic_clear_rel_int(volatile int *p, int v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ q->fetch_and(~v, std::memory_order_release);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ atomic_fetch_and_explicit(q, ~v, memory_order_release);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p &= ~v;
+ rtems_interrupt_enable(level);
+#endif
+}
static inline int
atomic_cmpset_int(volatile int *p, int cmp, int set)
{
int rv;
+
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
+ std::memory_order_relaxed);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
+ memory_order_seq_cst, memory_order_relaxed);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
@@ -114,23 +377,93 @@ atomic_cmpset_int(volatile int *p, int cmp, int set)
*p = set;
}
rtems_interrupt_enable(level);
+#endif
return (rv);
}
-#define atomic_cmpset_acq_int atomic_cmpset_int
-#define atomic_cmpset_rel_int atomic_cmpset_int
+static inline int
+atomic_cmpset_acq_int(volatile int *p, int cmp, int set)
+{
+ int rv;
+
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
+ std::memory_order_relaxed);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
+ memory_order_acquire, memory_order_relaxed);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ rv = *p == cmp;
+ if (rv) {
+ *p = set;
+ }
+ rtems_interrupt_enable(level);
+#endif
+
+ return (rv);
+}
+
+static inline int
+atomic_cmpset_rel_int(volatile int *p, int cmp, int set)
+{
+ int rv;
+
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
+ std::memory_order_relaxed);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
+ memory_order_release, memory_order_relaxed);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ rv = *p == cmp;
+ if (rv) {
+ *p = set;
+ }
+ rtems_interrupt_enable(level);
+#endif
+
+ return (rv);
+}
static inline int
atomic_fetchadd_int(volatile int *p, int v)
{
int tmp;
+
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ tmp = q->fetch_add(v, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
tmp = *p;
*p += v;
rtems_interrupt_enable(level);
+#endif
return (tmp);
}
@@ -139,12 +472,24 @@ static inline int
atomic_readandclear_int(volatile int *p)
{
int tmp;
+
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ tmp = q->exchange(0, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
tmp = *p;
*p = 0;
rtems_interrupt_enable(level);
+#endif
return (tmp);
}
@@ -154,7 +499,19 @@ atomic_load_acq_int(volatile int *p)
{
int tmp;
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ tmp = q->load(std::memory_order_acquire);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ tmp = atomic_load_explicit(q, memory_order_acquire);
+#else
+ RTEMS_COMPILER_MEMORY_BARRIER();
tmp = *p;
+#endif
return (tmp);
}
@@ -162,65 +519,290 @@ atomic_load_acq_int(volatile int *p)
static inline void
atomic_store_rel_int(volatile int *p, int v)
{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_int *q =
+ reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+
+ q->store(v, std::memory_order_release);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+
+ atomic_store_explicit(q, v, memory_order_release);
+#else
*p = v;
+ RTEMS_COMPILER_MEMORY_BARRIER();
+#endif
}
static inline void
atomic_add_32(volatile uint32_t *p, uint32_t v)
{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ q->fetch_add(v, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
*p += v;
rtems_interrupt_enable(level);
+#endif
}
-#define atomic_add_acq_32 atomic_add_32
-#define atomic_add_rel_32 atomic_add_32
+static inline void
+atomic_add_acq_32(volatile uint32_t *p, uint32_t v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ q->fetch_add(v, std::memory_order_acquire);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ atomic_fetch_add_explicit(q, v, memory_order_acquire);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p += v;
+ rtems_interrupt_enable(level);
+#endif
+}
+
+static inline void
+atomic_add_rel_32(volatile uint32_t *p, uint32_t v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ q->fetch_add(v, std::memory_order_release);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ atomic_fetch_add_explicit(q, v, memory_order_release);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p += v;
+ rtems_interrupt_enable(level);
+#endif
+}
static inline void
atomic_subtract_32(volatile uint32_t *p, uint32_t v)
{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ q->fetch_sub(v, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
*p -= v;
rtems_interrupt_enable(level);
+#endif
}
-#define atomic_subtract_acq_32 atomic_subtract_32
-#define atomic_subtract_rel_32 atomic_subtract_32
+static inline void
+atomic_subtract_acq_32(volatile uint32_t *p, uint32_t v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ q->fetch_sub(v, std::memory_order_acquire);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ atomic_fetch_sub_explicit(q, v, memory_order_acquire);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p -= v;
+ rtems_interrupt_enable(level);
+#endif
+}
+
+static inline void
+atomic_subtract_rel_32(volatile uint32_t *p, uint32_t v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ q->fetch_sub(v, std::memory_order_release);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ atomic_fetch_sub_explicit(q, v, memory_order_release);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p -= v;
+ rtems_interrupt_enable(level);
+#endif
+}
static inline void
atomic_set_32(volatile uint32_t *p, uint32_t v)
{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ q->fetch_or(v, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
*p |= v;
rtems_interrupt_enable(level);
+#endif
}
-#define atomic_set_acq_32 atomic_set_32
-#define atomic_set_rel_32 atomic_set_32
+static inline void
+atomic_set_acq_32(volatile uint32_t *p, uint32_t v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ q->fetch_or(v, std::memory_order_acquire);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ atomic_fetch_or_explicit(q, v, memory_order_acquire);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p |= v;
+ rtems_interrupt_enable(level);
+#endif
+}
+
+static inline void
+atomic_set_rel_32(volatile uint32_t *p, uint32_t v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ q->fetch_or(v, std::memory_order_release);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ atomic_fetch_or_explicit(q, v, memory_order_release);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p |= v;
+ rtems_interrupt_enable(level);
+#endif
+}
static inline void
atomic_clear_32(volatile uint32_t *p, uint32_t v)
{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ q->fetch_and(~v, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
*p &= ~v;
rtems_interrupt_enable(level);
+#endif
}
-#define atomic_clear_acq_32 atomic_clear_32
-#define atomic_clear_rel_32 atomic_clear_32
+static inline void
+atomic_clear_acq_32(volatile uint32_t *p, uint32_t v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ q->fetch_and(~v, std::memory_order_acquire);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p &= ~v;
+ rtems_interrupt_enable(level);
+#endif
+}
+
+static inline void
+atomic_clear_rel_32(volatile uint32_t *p, uint32_t v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ q->fetch_and(~v, std::memory_order_release);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ atomic_fetch_and_explicit(q, ~v, memory_order_release);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p &= ~v;
+ rtems_interrupt_enable(level);
+#endif
+}
static inline int
atomic_cmpset_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
{
int rv;
+
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
+ std::memory_order_relaxed);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
+ memory_order_seq_cst, memory_order_relaxed);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
@@ -229,23 +811,93 @@ atomic_cmpset_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
*p = set;
}
rtems_interrupt_enable(level);
+#endif
return (rv);
}
-#define atomic_cmpset_acq_32 atomic_cmpset_32
-#define atomic_cmpset_rel_32 atomic_cmpset_32
+static inline int
+atomic_cmpset_acq_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
+{
+ int rv;
+
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
+ std::memory_order_relaxed);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
+ memory_order_acquire, memory_order_relaxed);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ rv = *p == cmp;
+ if (rv) {
+ *p = set;
+ }
+ rtems_interrupt_enable(level);
+#endif
+
+ return (rv);
+}
+
+static inline int
+atomic_cmpset_rel_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
+{
+ int rv;
+
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
+ std::memory_order_relaxed);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
+ memory_order_release, memory_order_relaxed);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ rv = *p == cmp;
+ if (rv) {
+ *p = set;
+ }
+ rtems_interrupt_enable(level);
+#endif
+
+ return (rv);
+}
static inline uint32_t
atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
{
uint32_t tmp;
+
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ tmp = q->fetch_add(v, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
tmp = *p;
*p += v;
rtems_interrupt_enable(level);
+#endif
return (tmp);
}
@@ -254,12 +906,24 @@ static inline uint32_t
atomic_readandclear_32(volatile uint32_t *p)
{
uint32_t tmp;
+
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ tmp = q->exchange(0, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
tmp = *p;
*p = 0;
rtems_interrupt_enable(level);
+#endif
return (tmp);
}
@@ -269,7 +933,19 @@ atomic_load_acq_32(volatile uint32_t *p)
{
uint32_t tmp;
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ tmp = q->load(std::memory_order_acquire);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ tmp = atomic_load_explicit(q, memory_order_acquire);
+#else
+ RTEMS_COMPILER_MEMORY_BARRIER();
tmp = *p;
+#endif
return (tmp);
}
@@ -277,65 +953,290 @@ atomic_load_acq_32(volatile uint32_t *p)
static inline void
atomic_store_rel_32(volatile uint32_t *p, uint32_t v)
{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_uint_least32_t *q =
+ reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+
+ q->store(v, std::memory_order_release);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+
+ atomic_store_explicit(q, v, memory_order_release);
+#else
*p = v;
+ RTEMS_COMPILER_MEMORY_BARRIER();
+#endif
}
static inline void
atomic_add_long(volatile long *p, long v)
{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ q->fetch_add(v, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
*p += v;
rtems_interrupt_enable(level);
+#endif
}
-#define atomic_add_acq_long atomic_add_long
-#define atomic_add_rel_long atomic_add_long
+static inline void
+atomic_add_acq_long(volatile long *p, long v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ q->fetch_add(v, std::memory_order_acquire);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ atomic_fetch_add_explicit(q, v, memory_order_acquire);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p += v;
+ rtems_interrupt_enable(level);
+#endif
+}
+
+static inline void
+atomic_add_rel_long(volatile long *p, long v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ q->fetch_add(v, std::memory_order_release);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ atomic_fetch_add_explicit(q, v, memory_order_release);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p += v;
+ rtems_interrupt_enable(level);
+#endif
+}
static inline void
atomic_subtract_long(volatile long *p, long v)
{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ q->fetch_sub(v, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p -= v;
+ rtems_interrupt_enable(level);
+#endif
+}
+
+static inline void
+atomic_subtract_acq_long(volatile long *p, long v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ q->fetch_sub(v, std::memory_order_acquire);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ atomic_fetch_sub_explicit(q, v, memory_order_acquire);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
*p -= v;
rtems_interrupt_enable(level);
+#endif
}
-#define atomic_subtract_acq_long atomic_subtract_long
-#define atomic_subtract_rel_long atomic_subtract_long
+static inline void
+atomic_subtract_rel_long(volatile long *p, long v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ q->fetch_sub(v, std::memory_order_release);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ atomic_fetch_sub_explicit(q, v, memory_order_release);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p -= v;
+ rtems_interrupt_enable(level);
+#endif
+}
static inline void
atomic_set_long(volatile long *p, long v)
{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ q->fetch_or(v, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
*p |= v;
rtems_interrupt_enable(level);
+#endif
+}
+
+static inline void
+atomic_set_acq_long(volatile long *p, long v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ q->fetch_or(v, std::memory_order_acquire);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ atomic_fetch_or_explicit(q, v, memory_order_acquire);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p |= v;
+ rtems_interrupt_enable(level);
+#endif
}
-#define atomic_set_acq_long atomic_set_long
-#define atomic_set_rel_long atomic_set_long
+static inline void
+atomic_set_rel_long(volatile long *p, long v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ q->fetch_or(v, std::memory_order_release);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ atomic_fetch_or_explicit(q, v, memory_order_release);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p |= v;
+ rtems_interrupt_enable(level);
+#endif
+}
static inline void
atomic_clear_long(volatile long *p, long v)
{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ q->fetch_and(~v, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
*p &= ~v;
rtems_interrupt_enable(level);
+#endif
}
-#define atomic_clear_acq_long atomic_clear_long
-#define atomic_clear_rel_long atomic_clear_long
+static inline void
+atomic_clear_acq_long(volatile long *p, long v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ q->fetch_and(~v, std::memory_order_acquire);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p &= ~v;
+ rtems_interrupt_enable(level);
+#endif
+}
+
+static inline void
+atomic_clear_rel_long(volatile long *p, long v)
+{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ q->fetch_and(~v, std::memory_order_release);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ atomic_fetch_and_explicit(q, ~v, memory_order_release);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ *p &= ~v;
+ rtems_interrupt_enable(level);
+#endif
+}
static inline int
atomic_cmpset_long(volatile long *p, long cmp, long set)
{
int rv;
+
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
+ std::memory_order_relaxed);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
+ memory_order_seq_cst, memory_order_relaxed);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
@@ -344,23 +1245,93 @@ atomic_cmpset_long(volatile long *p, long cmp, long set)
*p = set;
}
rtems_interrupt_enable(level);
+#endif
return (rv);
}
-#define atomic_cmpset_acq_long atomic_cmpset_long
-#define atomic_cmpset_rel_long atomic_cmpset_long
+static inline int
+atomic_cmpset_acq_long(volatile long *p, long cmp, long set)
+{
+ int rv;
+
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
+ std::memory_order_relaxed);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
+ memory_order_acquire, memory_order_relaxed);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ rv = *p == cmp;
+ if (rv) {
+ *p = set;
+ }
+ rtems_interrupt_enable(level);
+#endif
+
+ return (rv);
+}
+
+static inline int
+atomic_cmpset_rel_long(volatile long *p, long cmp, long set)
+{
+ int rv;
+
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
+ std::memory_order_relaxed);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
+ memory_order_release, memory_order_relaxed);
+#else
+ rtems_interrupt_level level;
+
+ rtems_interrupt_disable(level);
+ rv = *p == cmp;
+ if (rv) {
+ *p = set;
+ }
+ rtems_interrupt_enable(level);
+#endif
+
+ return (rv);
+}
static inline long
atomic_fetchadd_long(volatile long *p, long v)
{
long tmp;
+
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ tmp = q->fetch_add(v, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
tmp = *p;
*p += v;
rtems_interrupt_enable(level);
+#endif
return (tmp);
}
@@ -369,12 +1340,24 @@ static inline long
atomic_readandclear_long(volatile long *p)
{
long tmp;
+
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ tmp = q->exchange(0, std::memory_order_seq_cst);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
+#else
rtems_interrupt_level level;
rtems_interrupt_disable(level);
tmp = *p;
*p = 0;
rtems_interrupt_enable(level);
+#endif
return (tmp);
}
@@ -384,7 +1367,19 @@ atomic_load_acq_long(volatile long *p)
{
long tmp;
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ tmp = q->load(std::memory_order_acquire);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ tmp = atomic_load_explicit(q, memory_order_acquire);
+#else
+ RTEMS_COMPILER_MEMORY_BARRIER();
tmp = *p;
+#endif
return (tmp);
}
@@ -392,7 +1387,19 @@ atomic_load_acq_long(volatile long *p)
static inline void
atomic_store_rel_long(volatile long *p, long v)
{
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+ std::atomic_long *q =
+ reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+
+ q->store(v, std::memory_order_release);
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+ atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
+
+ atomic_store_explicit(q, v, memory_order_release);
+#else
*p = v;
+ RTEMS_COMPILER_MEMORY_BARRIER();
+#endif
}
#endif /* _RTEMS_BSD_MACHINE_ATOMIC_H_ */