summaryrefslogtreecommitdiffstats
path: root/rtemsbsd/include/machine/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'rtemsbsd/include/machine/atomic.h')
-rw-r--r--rtemsbsd/include/machine/atomic.h1621
1 files changed, 476 insertions, 1145 deletions
diff --git a/rtemsbsd/include/machine/atomic.h b/rtemsbsd/include/machine/atomic.h
index a4b6b488..9465fefd 100644
--- a/rtemsbsd/include/machine/atomic.h
+++ b/rtemsbsd/include/machine/atomic.h
@@ -7,6 +7,7 @@
*/
/*
+ * Copyright (c) 2020 Chris Johns All rights reserved.
* Copyright (c) 2009, 2015 embedded brains GmbH. All rights reserved.
*
* embedded brains GmbH
@@ -97,1446 +98,776 @@ rmb(void)
#endif
}
-static inline void
-atomic_add_int(volatile int *p, int v)
-{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+/*
+ * Load
+ */
- q->fetch_add(v, std::memory_order_seq_cst);
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+#define _ATOMIC_LOAD(T, p, mo, barrier) \
+ std::atomic_##T *q = \
+ reinterpret_cast<std::atomic_##T *>(const_cast<T *>(p)); \
+ tmp = q->load(std::mo)
#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
+#define _ATOMIC_LOAD(T, p, mo, barrier) \
+ atomic_##T *q = (atomic_##T *)RTEMS_DEVOLATILE(T *, p); \
+ tmp = atomic_load_explicit(q, mo)
#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p += v;
- rtems_interrupt_enable(level);
+#define _ATOMIC_LOAD(T, p, mo, barrier) \
+ if (barrier) \
+ RTEMS_COMPILER_MEMORY_BARRIER(); \
+ tmp = *p
#endif
+
+static inline int
+atomic_load_int(volatile int *p)
+{
+ int tmp;
+ _ATOMIC_LOAD(int, p, memory_order_relaxed, false);
+ return (tmp);
}
-static inline void
-atomic_add_acq_int(volatile int *p, int v)
+static inline int
+atomic_load_acq_int(volatile int *p)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+ int tmp;
+ _ATOMIC_LOAD(int, p, memory_order_acquire, true);
+ return (tmp);
+}
- q->fetch_add(v, std::memory_order_acquire);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+static inline uint32_t
+atomic_load_32(volatile uint32_t *p)
+{
+ uint32_t tmp;
+ _ATOMIC_LOAD(uint_fast32_t, p, memory_order_relaxed, false);
+ return (tmp);
+}
- atomic_fetch_add_explicit(q, v, memory_order_acquire);
-#else
- rtems_interrupt_level level;
+static inline uint32_t
+atomic_load_acq_32(volatile uint32_t *p)
+{
+ uint32_t tmp;
+ _ATOMIC_LOAD(uint_fast32_t, p, memory_order_acquire, true);
+ return (tmp);
+}
- rtems_interrupt_disable(level);
- *p += v;
- rtems_interrupt_enable(level);
-#endif
+static inline long
+atomic_load_long(volatile long *p)
+{
+ long tmp;
+ _ATOMIC_LOAD(long, p, memory_order_relaxed, false);
+ return (tmp);
}
-static inline void
-atomic_add_rel_int(volatile int *p, int v)
+static inline long
+atomic_load_acq_long(volatile long *p)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+ long tmp;
+ _ATOMIC_LOAD(long, p, memory_order_acquire, true);
+ return (tmp);
+}
- q->fetch_add(v, std::memory_order_release);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+/*
+ * Store
+ */
- atomic_fetch_add_explicit(q, v, memory_order_release);
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+#define _ATOMIC_STORE(T, p, v, mo, barrier) \
+ std::atomic_##T *q = \
+ reinterpret_cast<std::atomic_##T *>(const_cast<T *>(p)); \
+ q->store(std::mo)
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+#define _ATOMIC_STORE(T, p, v, mo, barrier) \
+ atomic_##T *q = (atomic_##T *)RTEMS_DEVOLATILE(T *, p); \
+ atomic_store_explicit(q, v, mo)
#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p += v;
- rtems_interrupt_enable(level);
+#define _ATOMIC_STORE(T, p, v, mo, barrier) \
+ *p = v; \
+ if (barrier) \
+ RTEMS_COMPILER_MEMORY_BARRIER()
#endif
-}
static inline void
-atomic_subtract_int(volatile int *p, int v)
+atomic_store_int(volatile int *p, int v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
-
- q->fetch_sub(v, std::memory_order_seq_cst);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p -= v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_STORE(int, p, v, memory_order_relaxed, false);
}
static inline void
-atomic_subtract_acq_int(volatile int *p, int v)
+atomic_store_rel_int(volatile int *p, int v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
-
- q->fetch_sub(v, std::memory_order_acquire);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- atomic_fetch_sub_explicit(q, v, memory_order_acquire);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p -= v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_STORE(int, p, v, memory_order_release, true);
}
static inline void
-atomic_subtract_rel_int(volatile int *p, int v)
+atomic_store_rel_32(volatile uint32_t *p, uint32_t v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
-
- q->fetch_sub(v, std::memory_order_release);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- atomic_fetch_sub_explicit(q, v, memory_order_release);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p -= v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_STORE(uint_fast32_t, p, v, memory_order_release, true);
}
static inline void
-atomic_set_int(volatile int *p, int v)
+atomic_store_rel_long(volatile long *p, long v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
-
- q->fetch_or(v, std::memory_order_seq_cst);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p |= v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_STORE(long, p, v, memory_order_release, true);
}
static inline void
-atomic_set_acq_int(volatile int *p, int v)
+atomic_store_rel_ptr(volatile uintptr_t *p, uintptr_t v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+ /* XXX IPL32 ok with this? */
+ _ATOMIC_STORE(uintptr_t, p, v, memory_order_release, true);
+}
- q->fetch_or(v, std::memory_order_acquire);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+/*
+ * Add
+ */
- atomic_fetch_or_explicit(q, v, memory_order_acquire);
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+#define _ATOMIC_ADD(T, p, v, mo) \
+ std::atomic_##T *q = \
+ reinterpret_cast<std::atomic_##T *>(const_cast<T *>(p)); \
+ q->fetch_add(v, std::mo)
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+#define _ATOMIC_ADD(T, p, v, mo) \
+ atomic_##T *q = (atomic_##T *)RTEMS_DEVOLATILE(T *, p); \
+ atomic_fetch_add_explicit(q, v, mo)
#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p |= v;
- rtems_interrupt_enable(level);
+#define _ATOMIC_ADD(T, p, v, mo) \
+ rtems_interrupt_level level; \
+ rtems_interrupt_disable(level); \
+ *p += v; \
+ rtems_interrupt_enable(level)
#endif
-}
static inline void
-atomic_set_rel_int(volatile int *p, int v)
+atomic_add_int(volatile int *p, int v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
-
- q->fetch_or(v, std::memory_order_release);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- atomic_fetch_or_explicit(q, v, memory_order_release);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p |= v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_ADD(int, p, v, memory_order_seq_cst);
}
static inline void
-atomic_clear_int(volatile int *p, int v)
+atomic_add_acq_int(volatile int *p, int v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
-
- q->fetch_and(~v, std::memory_order_seq_cst);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p &= ~v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_ADD(int, p, v, memory_order_acquire);
}
static inline void
-atomic_clear_acq_int(volatile int *p, int v)
+atomic_add_rel_int(volatile int *p, int v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
-
- q->fetch_and(~v, std::memory_order_acquire);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p &= ~v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_ADD(int, p, v, memory_order_release);
}
static inline void
-atomic_clear_rel_int(volatile int *p, int v)
+atomic_add_32(volatile uint32_t *p, uint32_t v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
-
- q->fetch_and(~v, std::memory_order_release);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- atomic_fetch_and_explicit(q, ~v, memory_order_release);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p &= ~v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_ADD(uint_fast32_t, p, v, memory_order_seq_cst);
}
-static inline int
-atomic_cmpset_int(volatile int *p, int cmp, int set)
+static inline void
+atomic_add_acq_32(volatile uint32_t *p, uint32_t v)
{
- int rv;
-
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
-
- rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
- std::memory_order_relaxed);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
- memory_order_seq_cst, memory_order_relaxed);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- rv = *p == cmp;
- if (rv) {
- *p = set;
- }
- rtems_interrupt_enable(level);
-#endif
-
- return (rv);
+ _ATOMIC_ADD(uint_fast32_t, p, v, memory_order_acquire);
}
-static inline int
-atomic_fcmpset_int(volatile int *p, int *cmp, int set)
+static inline void
+atomic_add_rel_32(volatile uint32_t *p, uint32_t v)
{
- int rv;
-
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
-
- rv = q->compare_exchange_strong(*cmp, set, std::memory_order_seq_cst,
- std::memory_order_relaxed);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- rv = atomic_compare_exchange_strong_explicit(q, cmp, set,
- memory_order_seq_cst, memory_order_relaxed);
-#else
- rtems_interrupt_level level;
- int actual;
-
- rtems_interrupt_disable(level);
- actual = *p;
- rv = actual == *cmp;
- *cmp = actual;
- if (rv) {
- *p = set;
- }
- rtems_interrupt_enable(level);
-#endif
-
- return (rv);
+ _ATOMIC_ADD(uint_fast32_t, p, v, memory_order_release);
}
-static inline int
-atomic_cmpset_acq_int(volatile int *p, int cmp, int set)
+static inline void
+atomic_add_long(volatile long *p, long v)
{
- int rv;
-
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
-
- rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
- std::memory_order_relaxed);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
- memory_order_acquire, memory_order_relaxed);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- rv = *p == cmp;
- if (rv) {
- *p = set;
- }
- rtems_interrupt_enable(level);
-#endif
-
- return (rv);
+ _ATOMIC_ADD(long, p, v, memory_order_seq_cst);
}
-static inline int
-atomic_cmpset_rel_int(volatile int *p, int cmp, int set)
+static inline void
+atomic_add_acq_long(volatile long *p, long v)
{
- int rv;
-
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
-
- rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
- std::memory_order_relaxed);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
- memory_order_release, memory_order_relaxed);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- rv = *p == cmp;
- if (rv) {
- *p = set;
- }
- rtems_interrupt_enable(level);
-#endif
-
- return (rv);
+ _ATOMIC_ADD(long, p, v, memory_order_acquire);
}
-static inline int
-atomic_fetchadd_int(volatile int *p, int v)
+static inline void
+atomic_add_rel_long(volatile long *p, long v)
{
- int tmp;
+ _ATOMIC_ADD(long, p, v, memory_order_release);
+}
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+/*
+ * Subtract
+ */
- tmp = q->fetch_add(v, std::memory_order_seq_cst);
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+#define _ATOMIC_SUB(T, p, v, mo) \
+ std::atomic_##T *q = \
+ reinterpret_cast<std::atomic_##T *>(const_cast<T *>(p)); \
+ q->fetch_sub(v, std::mo)
#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
+#define _ATOMIC_SUB(T, p, v, mo) \
+ atomic_##T *q = (atomic_##T *)RTEMS_DEVOLATILE(T *, p); \
+ atomic_fetch_sub_explicit(q, v, mo)
#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- tmp = *p;
- *p += v;
- rtems_interrupt_enable(level);
+#define _ATOMIC_SUB(T, p, v, mo) \
+ rtems_interrupt_level level; \
+ rtems_interrupt_disable(level); \
+ *p -= v; \
+ rtems_interrupt_enable(level)
#endif
- return (tmp);
+static inline void
+atomic_subtract_int(volatile int *p, int v)
+{
+ _ATOMIC_SUB(int, p, v, memory_order_seq_cst);
}
-static inline int
-atomic_readandclear_int(volatile int *p)
+static inline void
+atomic_subtract_acq_int(volatile int *p, int v)
{
- int tmp;
-
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
-
- tmp = q->exchange(0, std::memory_order_seq_cst);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- tmp = *p;
- *p = 0;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_SUB(int, p, v, memory_order_acquire);
+}
- return (tmp);
+static inline void
+atomic_subtract_rel_int(volatile int *p, int v)
+{
+ _ATOMIC_SUB(int, p, v, memory_order_release);
}
-static inline int
-atomic_load_int(volatile int *p)
+static inline void
+atomic_subtract_32(volatile uint32_t *p, uint32_t v)
{
- int tmp;
+ _ATOMIC_SUB(uint_fast32_t, p, v, memory_order_seq_cst);
+}
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+static inline void
+atomic_subtract_acq_32(volatile uint32_t *p, uint32_t v)
+{
+ _ATOMIC_SUB(uint_fast32_t, p, v, memory_order_acquire);
+}
- tmp = q->load(std::memory_order_relaxed);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
+static inline void
+atomic_subtract_rel_32(volatile uint32_t *p, uint32_t v)
+{
+ _ATOMIC_SUB(uint_fast32_t, p, v, memory_order_release);
+}
- tmp = atomic_load_explicit(q, memory_order_relaxed);
-#else
- tmp = *p;
-#endif
+static inline void
+atomic_subtract_long(volatile long *p, long v)
+{
+ _ATOMIC_SUB(long, p, v, memory_order_seq_cst);
+}
- return (tmp);
+static inline void
+atomic_subtract_acq_long(volatile long *p, long v)
+{
+ _ATOMIC_SUB(long, p, v, memory_order_acquire);
}
-static inline int
-atomic_load_acq_int(volatile int *p)
+static inline void
+atomic_subtract_rel_long(volatile long *p, long v)
{
- int tmp;
+ _ATOMIC_SUB(long, p, v, memory_order_release);
+}
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
+/*
+ * Set
+ */
- tmp = q->load(std::memory_order_acquire);
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+#define _ATOMIC_SET(T, p, v, mo) \
+ std::atomic_##T *q = \
+ reinterpret_cast<std::atomic_##T *>(const_cast<T *>(p)); \
+ q->fetch_or(v, std::mo)
#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- tmp = atomic_load_explicit(q, memory_order_acquire);
+#define _ATOMIC_SET(T, p, v, mo) \
+ atomic_##T *q = (atomic_##T *)RTEMS_DEVOLATILE(T *, p); \
+ atomic_fetch_or_explicit(q, v, mo)
#else
- RTEMS_COMPILER_MEMORY_BARRIER();
- tmp = *p;
+#define _ATOMIC_SET(T, p, v, mo) \
+ rtems_interrupt_level level; \
+ rtems_interrupt_disable(level); \
+ *p |= v; \
+ rtems_interrupt_enable(level)
#endif
- return (tmp);
+static inline void
+atomic_set_int(volatile int *p, int v)
+{
+ _ATOMIC_SET(int, p, v, memory_order_seq_cst);
}
static inline void
-atomic_store_int(volatile int *p, int v)
+atomic_set_acq_int(volatile int *p, int v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
-
- q->store(v, std::memory_order_relaxed);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- atomic_store_explicit(q, v, memory_order_relaxed);
-#else
- *p = v;
-#endif
+ _ATOMIC_SET(int, p, v, memory_order_acquire);
}
static inline void
-atomic_store_rel_int(volatile int *p, int v)
+atomic_set_rel_int(volatile int *p, int v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_int *q =
- reinterpret_cast<std::atomic_int *>(const_cast<int *>(p));
-
- q->store(v, std::memory_order_release);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_int *q = (atomic_int *)RTEMS_DEVOLATILE(int *, p);
-
- atomic_store_explicit(q, v, memory_order_release);
-#else
- *p = v;
- RTEMS_COMPILER_MEMORY_BARRIER();
-#endif
+ _ATOMIC_SET(int, p, v, memory_order_release);
}
static inline void
-atomic_add_32(volatile uint32_t *p, uint32_t v)
+atomic_set_32(volatile uint32_t *p, uint32_t v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- q->fetch_add(v, std::memory_order_seq_cst);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p += v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_SET(uint_fast32_t, p, v, memory_order_seq_cst);
}
static inline void
-atomic_add_acq_32(volatile uint32_t *p, uint32_t v)
+atomic_set_acq_32(volatile uint32_t *p, uint32_t v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- q->fetch_add(v, std::memory_order_acquire);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- atomic_fetch_add_explicit(q, v, memory_order_acquire);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p += v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_SET(uint_fast32_t, p, v, memory_order_acquire);
}
static inline void
-atomic_add_rel_32(volatile uint32_t *p, uint32_t v)
+atomic_set_rel_32(volatile uint32_t *p, uint32_t v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- q->fetch_add(v, std::memory_order_release);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- atomic_fetch_add_explicit(q, v, memory_order_release);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p += v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_SET(uint_fast32_t, p, v, memory_order_release);
}
static inline void
-atomic_subtract_32(volatile uint32_t *p, uint32_t v)
+atomic_set_long(volatile long *p, long v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- q->fetch_sub(v, std::memory_order_seq_cst);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p -= v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_SET(long, p, v, memory_order_seq_cst);
}
static inline void
-atomic_subtract_acq_32(volatile uint32_t *p, uint32_t v)
+atomic_set_acq_long(volatile long *p, long v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- q->fetch_sub(v, std::memory_order_acquire);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- atomic_fetch_sub_explicit(q, v, memory_order_acquire);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p -= v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_SET(long, p, v, memory_order_acquire);
}
static inline void
-atomic_subtract_rel_32(volatile uint32_t *p, uint32_t v)
+atomic_set_rel_long(volatile long *p, long v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+ _ATOMIC_SET(long, p, v, memory_order_release);
+}
- q->fetch_sub(v, std::memory_order_release);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+/*
+ * Clear
+ */
- atomic_fetch_sub_explicit(q, v, memory_order_release);
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+#define _ATOMIC_CLEAR(T, p, v, mo) \
+ std::atomic_##T *q = \
+ reinterpret_cast<std::atomic_##T *>(const_cast<T *>(p)); \
+ q->fetch_and(~v, std::mo)
+#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
+#define _ATOMIC_CLEAR(T, p, v, mo) \
+ atomic_##T *q = (atomic_##T *)RTEMS_DEVOLATILE(T *, p); \
+ atomic_fetch_and_explicit(q, ~v, mo)
#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p -= v;
- rtems_interrupt_enable(level);
+#define _ATOMIC_CLEAR(T, p, v, mo) \
+ rtems_interrupt_level level; \
+ rtems_interrupt_disable(level); \
+ *p &= ~v; \
+ rtems_interrupt_enable(level)
#endif
-}
static inline void
-atomic_set_32(volatile uint32_t *p, uint32_t v)
+atomic_clear_int(volatile int *p, int v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- q->fetch_or(v, std::memory_order_seq_cst);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p |= v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_CLEAR(int, p, v, memory_order_seq_cst);
}
static inline void
-atomic_set_acq_32(volatile uint32_t *p, uint32_t v)
+atomic_clear_acq_int(volatile int *p, int v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- q->fetch_or(v, std::memory_order_acquire);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- atomic_fetch_or_explicit(q, v, memory_order_acquire);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p |= v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_CLEAR(int, p, v, memory_order_acquire);
}
static inline void
-atomic_set_rel_32(volatile uint32_t *p, uint32_t v)
+atomic_clear_rel_int(volatile int *p, int v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- q->fetch_or(v, std::memory_order_release);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- atomic_fetch_or_explicit(q, v, memory_order_release);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p |= v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_CLEAR(int, p, v, memory_order_release);
}
static inline void
atomic_clear_32(volatile uint32_t *p, uint32_t v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- q->fetch_and(~v, std::memory_order_seq_cst);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p &= ~v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_CLEAR(uint_fast32_t, p, v, memory_order_seq_cst);
}
static inline void
atomic_clear_acq_32(volatile uint32_t *p, uint32_t v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- q->fetch_and(~v, std::memory_order_acquire);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p &= ~v;
- rtems_interrupt_enable(level);
-#endif
+ _ATOMIC_CLEAR(uint_fast32_t, p, v, memory_order_acquire);
}
static inline void
atomic_clear_rel_32(volatile uint32_t *p, uint32_t v)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- q->fetch_and(~v, std::memory_order_release);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
+ _ATOMIC_CLEAR(uint_fast32_t, p, v, memory_order_release);
+}
- atomic_fetch_and_explicit(q, ~v, memory_order_release);
-#else
- rtems_interrupt_level level;
+static inline void
+atomic_clear_long(volatile long *p, long v)
+{
+ _ATOMIC_CLEAR(long, p, v, memory_order_release);
+}
- rtems_interrupt_disable(level);
- *p &= ~v;
- rtems_interrupt_enable(level);
-#endif
+static inline void
+atomic_clear_acq_long(volatile long *p, long v)
+{
+ _ATOMIC_CLEAR(long, p, v, memory_order_acquire);
}
-static inline int
-atomic_cmpset_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
+static inline void
+atomic_clear_rel_long(volatile long *p, long v)
{
- int rv;
+ _ATOMIC_CLEAR(long, p, v, memory_order_release);
+}
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
+/*
+ * Compare and set
+ */
- rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
- std::memory_order_relaxed);
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+#define _ATOMIC_CMPSET(T, p, cmp, set, mo) \
+ std::atomic_##T *q = \
+ reinterpret_cast<std::atomic_##T *>(const_cast<T *>(p)); \
+ rv = q->compare_exchange_strong(cmp, set, \
+ std::mo, std::memory_order_relaxed)
#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
- memory_order_seq_cst, memory_order_relaxed);
+#define _ATOMIC_CMPSET(T, p, cmp, set, mo) \
+ atomic_##T *q = (atomic_##T *)RTEMS_DEVOLATILE(T *, p); \
+ atomic_compare_exchange_strong_explicit(q, &cmp, set, \
+ mo, memory_order_relaxed)
#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- rv = *p == cmp;
- if (rv) {
- *p = set;
- }
+#define _ATOMIC_CMPSET(T, p, cmp, set, mo) \
+ rtems_interrupt_level level; \
+ rtems_interrupt_disable(level); \
+ rv = *p == cmp; \
+ if (rv) { \
+ *p = set; \
+ } \
rtems_interrupt_enable(level);
#endif
+static inline int
+atomic_cmpset_int(volatile int *p, int cmp, int set)
+{
+ int rv;
+ _ATOMIC_CMPSET(int, p, cmp, set, memory_order_seq_cst);
return (rv);
}
static inline int
-atomic_cmpset_acq_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
+atomic_cmpset_acq_int(volatile int *p, int cmp, int set)
{
int rv;
-
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
- std::memory_order_relaxed);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
- memory_order_acquire, memory_order_relaxed);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- rv = *p == cmp;
- if (rv) {
- *p = set;
- }
- rtems_interrupt_enable(level);
-#endif
-
+ _ATOMIC_CMPSET(int, p, cmp, set, memory_order_acquire);
return (rv);
}
static inline int
-atomic_cmpset_rel_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
+atomic_cmpset_rel_int(volatile int *p, int cmp, int set)
{
int rv;
-
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
- std::memory_order_relaxed);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
- memory_order_release, memory_order_relaxed);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- rv = *p == cmp;
- if (rv) {
- *p = set;
- }
- rtems_interrupt_enable(level);
-#endif
-
+ _ATOMIC_CMPSET(int, p, cmp, set, memory_order_release);
return (rv);
}
-static inline uint32_t
-atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
+static inline int
+atomic_cmpset_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
{
- uint32_t tmp;
-
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- tmp = q->fetch_add(v, std::memory_order_seq_cst);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- tmp = *p;
- *p += v;
- rtems_interrupt_enable(level);
-#endif
-
- return (tmp);
+ int rv;
+ _ATOMIC_CMPSET(uint_least32_t, p, cmp, set, memory_order_seq_cst);
+ return (rv);
}
-static inline uint32_t
-atomic_readandclear_32(volatile uint32_t *p)
+static inline int
+atomic_cmpset_acq_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
{
- uint32_t tmp;
-
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- tmp = q->exchange(0, std::memory_order_seq_cst);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- tmp = *p;
- *p = 0;
- rtems_interrupt_enable(level);
-#endif
-
- return (tmp);
+ int rv;
+ _ATOMIC_CMPSET(uint_least32_t, p, cmp, set, memory_order_acquire);
+ return (rv);
}
-static inline uint32_t
-atomic_load_32(volatile uint32_t *p)
+static inline int
+atomic_cmpset_rel_32(volatile uint32_t *p, uint32_t cmp, uint32_t set)
{
- uint32_t tmp;
-
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- tmp = q->load(std::memory_order_relaxed);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- tmp = atomic_load_explicit(q, memory_order_relaxed);
-#else
- tmp = *p;
-#endif
-
- return (tmp);
+ int rv;
+ _ATOMIC_CMPSET(uint_least32_t, p, cmp, set, memory_order_release);
+ return (rv);
}
-static inline uint32_t
-atomic_load_acq_32(volatile uint32_t *p)
+static inline int
+atomic_cmpset_64(volatile uint64_t *p, uint64_t cmp, uint64_t set)
{
- uint32_t tmp;
-
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- tmp = q->load(std::memory_order_acquire);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- tmp = atomic_load_explicit(q, memory_order_acquire);
-#else
- RTEMS_COMPILER_MEMORY_BARRIER();
- tmp = *p;
-#endif
-
- return (tmp);
+ int rv;
+ _ATOMIC_CMPSET(uint_least64_t, p, cmp, set, memory_order_seq_cst);
+ return (rv);
}
-static inline void
-atomic_store_rel_32(volatile uint32_t *p, uint32_t v)
+static inline int
+atomic_cmpset_acq_64(volatile uint64_t *p, uint64_t cmp, uint64_t set)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least32_t *q =
- reinterpret_cast<std::atomic_uint_least32_t *>(const_cast<uint32_t *>(p));
-
- q->store(v, std::memory_order_release);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least32_t *q = (atomic_uint_least32_t *)RTEMS_DEVOLATILE(uint32_t *, p);
-
- atomic_store_explicit(q, v, memory_order_release);
-#else
- *p = v;
- RTEMS_COMPILER_MEMORY_BARRIER();
-#endif
+ int rv;
+ _ATOMIC_CMPSET(uint_least64_t, p, cmp, set, memory_order_acquire);
+ return (rv);
}
-static inline uint64_t
-atomic_fetchadd_64(volatile uint64_t *p, uint64_t v)
+static inline int
+atomic_cmpset_rel_64(volatile uint64_t *p, uint64_t cmp, uint64_t set)
{
- uint64_t tmp;
-
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_uint_least64_t *q =
- reinterpret_cast<std::atomic_uint_least64_t *>(const_cast<uint64_t *>(p));
-
- tmp = q->fetch_add(v, std::memory_order_seq_cst);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_uint_least64_t *q = (atomic_uint_least64_t *)RTEMS_DEVOLATILE(uint64_t *, p);
-
- tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- tmp = *p;
- *p += v;
- rtems_interrupt_enable(level);
-#endif
-
- return (tmp);
+ int rv;
+ _ATOMIC_CMPSET(uint_least64_t, p, cmp, set, memory_order_release);
+ return (rv);
}
-static inline void
-atomic_add_long(volatile long *p, long v)
+static inline int
+atomic_cmpset_long(volatile long *p, long cmp, long set)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- q->fetch_add(v, std::memory_order_seq_cst);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p += v;
- rtems_interrupt_enable(level);
-#endif
+ int rv;
+ _ATOMIC_CMPSET(long, p, cmp, set, memory_order_seq_cst);
+ return (rv);
}
-static inline void
-atomic_add_acq_long(volatile long *p, long v)
+static inline int
+atomic_cmpset_acq_long(volatile long *p, long cmp, long set)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- q->fetch_add(v, std::memory_order_acquire);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- atomic_fetch_add_explicit(q, v, memory_order_acquire);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p += v;
- rtems_interrupt_enable(level);
-#endif
+ int rv;
+ _ATOMIC_CMPSET(long, p, cmp, set, memory_order_acquire);
+ return (rv);
}
-static inline void
-atomic_add_rel_long(volatile long *p, long v)
+static inline int
+atomic_cmpset_rel_long(volatile long *p, long cmp, long set)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- q->fetch_add(v, std::memory_order_release);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- atomic_fetch_add_explicit(q, v, memory_order_release);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p += v;
- rtems_interrupt_enable(level);
-#endif
+ int rv;
+ _ATOMIC_CMPSET(long, p, cmp, set, memory_order_release);
+ return (rv);
}
-static inline void
-atomic_subtract_long(volatile long *p, long v)
+static inline int
+atomic_cmpset_ptr(volatile uintptr_t *p, uintptr_t cmp, uintptr_t set)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- q->fetch_sub(v, std::memory_order_seq_cst);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- atomic_fetch_sub_explicit(q, v, memory_order_seq_cst);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p -= v;
- rtems_interrupt_enable(level);
-#endif
+ int rv;
+ _ATOMIC_CMPSET(uintptr_t, p, cmp, set, memory_order_seq_cst);
+ return (rv);
}
-static inline void
-atomic_subtract_acq_long(volatile long *p, long v)
+static inline int
+atomic_cmpset_acq_ptr(volatile uintptr_t *p, uintptr_t cmp, uintptr_t set)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- q->fetch_sub(v, std::memory_order_acquire);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- atomic_fetch_sub_explicit(q, v, memory_order_acquire);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p -= v;
- rtems_interrupt_enable(level);
-#endif
+ int rv;
+ _ATOMIC_CMPSET(uintptr_t, p, cmp, set, memory_order_acquire);
+ return (rv);
}
-static inline void
-atomic_subtract_rel_long(volatile long *p, long v)
+static inline int
+atomic_cmpset_rel_ptr(volatile uintptr_t *p, uintptr_t cmp, uintptr_t set)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- q->fetch_sub(v, std::memory_order_release);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- atomic_fetch_sub_explicit(q, v, memory_order_release);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p -= v;
- rtems_interrupt_enable(level);
-#endif
+ int rv;
+ _ATOMIC_CMPSET(uintptr_t, p, cmp, set, memory_order_release);
+ return (rv);
}
-static inline void
-atomic_set_long(volatile long *p, long v)
-{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+/*
+ * Fetch compare and set
+ */
- q->fetch_or(v, std::memory_order_seq_cst);
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+#define _ATOMIC_FCMPSET(T, p, cmp, set, mo) \
+ std::atomic_##T *q = \
+ reinterpret_cast<std::atomic_##T *>(const_cast<T *>(p)); \
+ rv = q->compare_exchange_strong(*cmp, set, \
+ std::mo, std::memory_order_relaxed)
#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- atomic_fetch_or_explicit(q, v, memory_order_seq_cst);
+#define _ATOMIC_FCMPSET(T, p, cmp, set, mo) \
+ atomic_##T *q = (atomic_##T *)RTEMS_DEVOLATILE(T *, p); \
+ atomic_compare_exchange_strong_explicit(q, cmp, set, \
+ mo, memory_order_relaxed)
#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p |= v;
+#define _ATOMIC_FCMPSET(T, p, cmp, set, mo) \
+ rtems_interrupt_level level; \
+ T actual; \
+ rtems_interrupt_disable(level); \
+ actual = *p; \
+ rv = actual == *cmp; \
+ *cmp = actual; \
+ if (rv) { \
+ *p = set; \
+ } \
rtems_interrupt_enable(level);
#endif
-}
-static inline void
-atomic_set_acq_long(volatile long *p, long v)
+static inline int
+atomic_fcmpset_int(volatile int *p, int *cmp, int set)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- q->fetch_or(v, std::memory_order_acquire);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- atomic_fetch_or_explicit(q, v, memory_order_acquire);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p |= v;
- rtems_interrupt_enable(level);
-#endif
+ int rv;
+ _ATOMIC_FCMPSET(int, p, cmp, set, memory_order_seq_cst);
+ return (rv);
}
-static inline void
-atomic_set_rel_long(volatile long *p, long v)
+static inline int
+atomic_fcmpset_acq_int(volatile int *p, int *cmp, int set)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- q->fetch_or(v, std::memory_order_release);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- atomic_fetch_or_explicit(q, v, memory_order_release);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p |= v;
- rtems_interrupt_enable(level);
-#endif
+ int rv;
+ _ATOMIC_FCMPSET(int, p, cmp, set, memory_order_acquire);
+ return (rv);
}
-static inline void
-atomic_clear_long(volatile long *p, long v)
+static inline int
+atomic_fcmpset_rel_int(volatile int *p, int *cmp, int set)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- q->fetch_and(~v, std::memory_order_seq_cst);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- atomic_fetch_and_explicit(q, ~v, memory_order_seq_cst);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p &= ~v;
- rtems_interrupt_enable(level);
-#endif
+ int rv;
+ _ATOMIC_FCMPSET(int, p, cmp, set, memory_order_release);
+ return (rv);
}
-static inline void
-atomic_clear_acq_long(volatile long *p, long v)
+static inline int
+atomic_fcmpset_64(volatile uint64_t *p, uint64_t *cmp, uint64_t set)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- q->fetch_and(~v, std::memory_order_acquire);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- atomic_fetch_and_explicit(q, ~v, memory_order_acquire);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p &= ~v;
- rtems_interrupt_enable(level);
-#endif
+ int rv;
+ _ATOMIC_FCMPSET(uint_least64_t, p, cmp, set, memory_order_seq_cst);
+ return (rv);
}
-static inline void
-atomic_clear_rel_long(volatile long *p, long v)
+static inline int
+atomic_fcmpset_acq_64(volatile uint64_t *p, uint64_t *cmp, uint64_t set)
{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- q->fetch_and(~v, std::memory_order_release);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- atomic_fetch_and_explicit(q, ~v, memory_order_release);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- *p &= ~v;
- rtems_interrupt_enable(level);
-#endif
+ int rv;
+ _ATOMIC_FCMPSET(uint_least64_t, p, cmp, set, memory_order_acquire);
+ return (rv);
}
static inline int
-atomic_cmpset_long(volatile long *p, long cmp, long set)
+atomic_fcmpset_rel_64(volatile uint64_t *p, uint64_t *cmp, uint64_t set)
{
int rv;
-
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- rv = q->compare_exchange_strong(cmp, set, std::memory_order_seq_cst,
- std::memory_order_relaxed);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
- memory_order_seq_cst, memory_order_relaxed);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- rv = *p == cmp;
- if (rv) {
- *p = set;
- }
- rtems_interrupt_enable(level);
-#endif
-
+ _ATOMIC_FCMPSET(uint_least64_t, p, cmp, set, memory_order_release);
return (rv);
}
+/* XXX Is IPL32 ok with the .*_ptr versions? */
static inline int
-atomic_cmpset_acq_long(volatile long *p, long cmp, long set)
+atomic_fcmpset_ptr(volatile uintptr_t *p, uintptr_t *cmp, uintptr_t set)
{
int rv;
+ _ATOMIC_FCMPSET(uintptr_t, p, cmp, set, memory_order_seq_cst);
+ return (rv);
+}
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- rv = q->compare_exchange_strong(cmp, set, std::memory_order_acquire,
- std::memory_order_relaxed);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
- memory_order_acquire, memory_order_relaxed);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- rv = *p == cmp;
- if (rv) {
- *p = set;
- }
- rtems_interrupt_enable(level);
-#endif
-
+static inline int
+atomic_fcmpset_acq_ptr(volatile uintptr_t *p, uintptr_t *cmp, uintptr_t set)
+{
+ int rv;
+ _ATOMIC_FCMPSET(uintptr_t, p, cmp, set, memory_order_acquire);
return (rv);
}
static inline int
-atomic_cmpset_rel_long(volatile long *p, long cmp, long set)
+atomic_fcmpset_rel_ptr(volatile uintptr_t *p, uintptr_t *cmp, uintptr_t set)
{
int rv;
+ _ATOMIC_FCMPSET(uintptr_t, p, cmp, set, memory_order_release);
+ return (rv);
+}
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+/*
+ * Fetch add
+ */
- rv = q->compare_exchange_strong(cmp, set, std::memory_order_release,
- std::memory_order_relaxed);
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+#define _ATOMIC_FETCHADD(T, p, v, mo) \
+ std::atomic_##T *q = \
+ reinterpret_cast<std::atomic_##T *>(const_cast<T *>(p)); \
+ tmp = q->fetch_add(v, std::mo)
#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- rv = atomic_compare_exchange_strong_explicit(q, &cmp, set,
- memory_order_release, memory_order_relaxed);
+#define _ATOMIC_FETCHADD(T, p, v, mo) \
+ atomic_##T *q = (atomic_##T *)RTEMS_DEVOLATILE(T *, p); \
+ tmp = atomic_fetch_add_explicit(q, v, mo)
#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- rv = *p == cmp;
- if (rv) {
- *p = set;
- }
+#define _ATOMIC_FETCHADD(T, p, v, mo) \
+ rtems_interrupt_level level; \
+ rtems_interrupt_disable(level); \
+ tmp = *p; \
+ *p += v; \
rtems_interrupt_enable(level);
#endif
- return (rv);
+static inline int
+atomic_fetchadd_int(volatile int *p, int v)
+{
+ int tmp;
+ _ATOMIC_FETCHADD(int, p, v, memory_order_seq_cst);
+ return (tmp);
}
-static inline long
-atomic_fetchadd_long(volatile long *p, long v)
+static inline uint32_t
+atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
{
- long tmp;
-
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- tmp = q->fetch_add(v, std::memory_order_seq_cst);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- tmp = atomic_fetch_add_explicit(q, v, memory_order_seq_cst);
-#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- tmp = *p;
- *p += v;
- rtems_interrupt_enable(level);
-#endif
+ uint32_t tmp;
+ _ATOMIC_FETCHADD(uint_fast32_t, p, v, memory_order_seq_cst);
+ return (tmp);
+}
+static inline uint64_t
+atomic_fetchadd_64(volatile uint64_t *p, uint64_t v)
+{
+ uint64_t tmp;
+ _ATOMIC_FETCHADD(uint_fast64_t, p, v, memory_order_seq_cst);
return (tmp);
}
static inline long
-atomic_readandclear_long(volatile long *p)
+atomic_fetchadd_long(volatile long *p, long v)
{
long tmp;
+ _ATOMIC_FETCHADD(long, p, v, memory_order_seq_cst);
+ return (tmp);
+}
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
+/*
+ * Read and clear
+ */
- tmp = q->exchange(0, std::memory_order_seq_cst);
+#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
+#define _ATOMIC_READANDCLEAR(T, p, mo) \
+ std::atomic_##T *q = \
+ reinterpret_cast<std::atomic_##T *>(const_cast<T *>(p)); \
+ tmp = q->exchange(0, std::mo)
#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- tmp = atomic_exchange_explicit(q, 0, memory_order_seq_cst);
+#define _ATOMIC_READANDCLEAR(T, p, mo) \
+ atomic_##T *q = (atomic_##T *)RTEMS_DEVOLATILE(T *, p); \
+ tmp = atomic_exchange_explicit(q, 0, mo)
#else
- rtems_interrupt_level level;
-
- rtems_interrupt_disable(level);
- tmp = *p;
- *p = 0;
+#define _ATOMIC_READANDCLEAR(T, p, mo) \
+ rtems_interrupt_level level; \
+ rtems_interrupt_disable(level); \
+ tmp = *p; \
+ *p = 0; \
rtems_interrupt_enable(level);
#endif
+static inline int
+atomic_readandclear_int(volatile int *p)
+{
+ int tmp;
+ _ATOMIC_READANDCLEAR(int, p, memory_order_seq_cst);
return (tmp);
}
-static inline long
-atomic_load_long(volatile long *p)
+static inline uint32_t
+atomic_readandclear_32(volatile uint32_t *p)
{
- long tmp;
-
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- tmp = q->load(std::memory_order_relaxed);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- tmp = atomic_load_explicit(q, memory_order_relaxed);
-#else
- tmp = *p;
-#endif
-
+ uint32_t tmp;
+ _ATOMIC_READANDCLEAR(uint_least32_t, p, memory_order_seq_cst);
return (tmp);
}
static inline long
-atomic_load_acq_long(volatile long *p)
+atomic_readandclear_long(volatile long *p)
{
long tmp;
-
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- tmp = q->load(std::memory_order_acquire);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- tmp = atomic_load_explicit(q, memory_order_acquire);
-#else
- RTEMS_COMPILER_MEMORY_BARRIER();
- tmp = *p;
-#endif
-
+ _ATOMIC_READANDCLEAR(long, p, memory_order_seq_cst);
return (tmp);
}
-static inline void
-atomic_store_rel_long(volatile long *p, long v)
-{
-#if defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_ATOMIC)
- std::atomic_long *q =
- reinterpret_cast<std::atomic_long *>(const_cast<long *>(p));
-
- q->store(v, std::memory_order_release);
-#elif defined(_RTEMS_BSD_MACHINE_ATOMIC_USE_STDATOMIC)
- atomic_long *q = (atomic_long *)RTEMS_DEVOLATILE(long *, p);
-
- atomic_store_explicit(q, v, memory_order_release);
-#else
- *p = v;
- RTEMS_COMPILER_MEMORY_BARRIER();
-#endif
-}
+/*
+ * Thread fence
+ */
static inline void
atomic_thread_fence_acq(void)