summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorWeiY <wei.a.yang@gmail.com>2013-07-15 23:31:08 +0800
committerSebastian Huber <sebastian.huber@embedded-brains.de>2013-07-17 13:07:32 +0200
commit1180099ed95e99ad31b33a87923286fd6c523d04 (patch)
tree50a1653a8bf915f8e456eaf5b9399ff8ee3d27ef
parentbsps/arm: Fix for top of the address space (diff)
downloadrtems-1180099ed95e99ad31b33a87923286fd6c523d04.tar.bz2
New atomic API definition based on C11 atomic
-rw-r--r--cpukit/score/include/rtems/score/atomic.h445
1 files changed, 232 insertions, 213 deletions
diff --git a/cpukit/score/include/rtems/score/atomic.h b/cpukit/score/include/rtems/score/atomic.h
index 3b62cb10cb..80b9ea7302 100644
--- a/cpukit/score/include/rtems/score/atomic.h
+++ b/cpukit/score/include/rtems/score/atomic.h
@@ -8,7 +8,7 @@
*/
/*
- * COPYRIGHT (c) 2012 Deng Hengyi.
+ * COPYRIGHT (c) 2012-2013 Deng Hengyi.
*
* The license and distribution terms for this file may be
* found in the file LICENSE in this distribution or at
@@ -32,241 +32,260 @@ extern "C" {
/**@{*/
/**
- * @brief the enumeration Atomic_Memory_barrier specifies the detailed regular
- * memory synchronization operations used in the atomic operation API
- * definitions.
+ * @brief Atomically load an atomic type value from atomic object.
+ *
+ * @param object an atomic type pointer of object.
+ * @param order a type of Atomic_Order.
+ *
+ * The order shall not be ATOMIC_ORDER_RELEASE.
*/
-typedef enum {
- /** no operation orders memory. */
- ATOMIC_RELAXED_BARRIER,
- /** a load operation performs an acquire operation on the affected memory
- * location. This flag guarantees that the effects of load operation are
- * completed before the effects of any later data accesses.
- */
- ATOMIC_ACQUIRE_BARRIER,
- /** a store operation performs a release operation on the affected memory
- * location. This flag guarantee that all effects of all previous data
- * accesses are completed before the store operation takes place.
- */
- ATOMIC_RELEASE_BARRIER
-} Atomic_Memory_barrier;
+RTEMS_INLINE_ROUTINE uint_fast32_t _Atomic_Load_uint(
+ volatile Atomic_Uint *object,
+ Atomic_Order order
+)
+{
+ return _CPU_atomic_Load_uint( object, order );
+}
+
+RTEMS_INLINE_ROUTINE uintptr_t _Atomic_Load_ptr(
+ volatile Atomic_Pointer *object,
+ Atomic_Order order
+)
+{
+ return _CPU_atomic_Load_ptr( object, order );
+}
/**
- * @brief Atomically load an atomic type value from address @a address with
- * a type of Atomic_Memory_barrier @a memory_barrier. The @a memory_barrier
- * shall not be ATOMIC_RELEASE_BARRIER.
+ * @brief Atomically store an atomic type value into a atomic object.
+ *
+ * @param object an atomic type pointer of object.
+ * @param value a value to be stored into object.
+ * @param order a type of Atomic_Order.
+ *
+ * The order shall not be ATOMIC_ORDER_ACQUIRE.
*/
-RTEMS_INLINE_ROUTINE Atomic_Int _Atomic_Load_int(
- volatile Atomic_Int *address,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE Atomic_Long _Atomic_Load_long(
- volatile Atomic_Long *address,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE Atomic_Pointer _Atomic_Load_ptr(
- volatile Atomic_Pointer *address,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE Atomic_Int32 _Atomic_Load_32(
- volatile Atomic_Int32 *address,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE Atomic_Int64 _Atomic_Load_64(
- volatile Atomic_Int64 *address,
- Atomic_Memory_barrier memory_barrier
-);
+RTEMS_INLINE_ROUTINE void _Atomic_Store_uint(
+ volatile Atomic_Uint *object,
+ uint_fast32_t value,
+ Atomic_Order order
+)
+{
+ _CPU_atomic_Store_uint( object, value, order );
+}
+
+RTEMS_INLINE_ROUTINE void _Atomic_Store_ptr(
+ volatile Atomic_Pointer *object,
+ uintptr_t value,
+ Atomic_Order order
+)
+{
+ _CPU_atomic_Store_ptr( object, value, order );
+}
/**
- * @brief Atomically store an atomic type value @a value into address @a
- * address with a type of Atomic_Memory_barrier @a memory_barrier. The @a
- * memory_barrier shall not be ATOMIC_ACQUIRE_BARRIER.
+ * @brief Atomically load-add-store an atomic type value into object
+ *
+ * @param object a atomic type pointer of object.
+ * @param value a value to be add and store into object.
+ * @param order a type of Atomic_Order.
+ *
+ * @retval a result value after add ops.
*/
-RTEMS_INLINE_ROUTINE void _Atomic_Store_int(
- volatile Atomic_Int *address,
- Atomic_Int value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Store_long(
- volatile Atomic_Long *address,
- Atomic_Long value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Store_ptr(
- volatile Atomic_Pointer *address,
- Atomic_Pointer value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Store_32(
- volatile Atomic_Int32 *address,
- Atomic_Int32 value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Store_64(
- volatile Atomic_Int64 *address,
- Atomic_Int64 value,
- Atomic_Memory_barrier memory_barrier
-);
+RTEMS_INLINE_ROUTINE uint_fast32_t _Atomic_Fetch_add_uint(
+ volatile Atomic_Uint *object,
+ uint_fast32_t value,
+ Atomic_Order order
+)
+{
+ return _CPU_atomic_Fetch_add_uint( object, value, order );
+}
+
+RTEMS_INLINE_ROUTINE uintptr_t _Atomic_Fetch_add_ptr(
+ volatile Atomic_Pointer *object,
+ uintptr_t value,
+ Atomic_Order order
+)
+{
+ return _CPU_atomic_Fetch_add_ptr( object, value, order );
+}
/**
- * @brief Atomically load-add-store an atomic type value @a value into address
- * @a address with a type of Atomic_Memory_barrier @a memory_barrier.
+ * @brief Atomically load-sub-store an atomic type value into object
+ *
+ * @param object a atomic type pointer of object.
+ * @param value a value to be sub and store into object.
+ * @param order a type of Atomic_Order.
+ *
+ * @retval a result value after sub ops.
*/
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_int(
- volatile Atomic_Int *address,
- Atomic_Int value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_long(
- volatile Atomic_Long *address,
- Atomic_Long value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_ptr(
- volatile Atomic_Pointer *address,
- Atomic_Pointer value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_32(
- volatile Atomic_Int32 *address,
- Atomic_Int32 value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_64(
- volatile Atomic_Int64 *address,
- Atomic_Int64 value,
- Atomic_Memory_barrier memory_barrier
-);
+RTEMS_INLINE_ROUTINE uint_fast32_t _Atomic_Fetch_sub_uint(
+ volatile Atomic_Uint *object,
+ uint_fast32_t value,
+ Atomic_Order order
+)
+{
+ return _CPU_atomic_Fetch_sub_uint( object, value, order );
+}
+
+RTEMS_INLINE_ROUTINE uintptr_t _Atomic_Fetch_sub_ptr(
+ volatile Atomic_Pointer *object,
+ uintptr_t value,
+ Atomic_Order order
+)
+{
+ return _CPU_atomic_Fetch_sub_ptr( object, value, order );
+}
/**
- * @brief Atomically load-sub-store an atomic type value @a value into address
- * @a address with a type of Atomic_Memory_barrier @a memory_barrier.
+ * @brief Atomically load-or-store an atomic type value into object
+ *
+ * @param object a atomic type pointer of object.
+ * @param value a value to be or and store into object.
+ * @param order a type of Atomic_Order.
+ *
+ * @retval a result value after or ops.
*/
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_int(
- volatile Atomic_Int *address,
- Atomic_Int value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_long(
- volatile Atomic_Long *address,
- Atomic_Long value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_ptr(
- volatile Atomic_Pointer *address,
- Atomic_Pointer value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_32(
- volatile Atomic_Int32 *address,
- Atomic_Int32 value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_64(
- volatile Atomic_Int64 *address,
- Atomic_Int64 value,
- Atomic_Memory_barrier memory_barrier
-);
+RTEMS_INLINE_ROUTINE uint_fast32_t _Atomic_Fetch_or_uint(
+ volatile Atomic_Uint *object,
+ uint_fast32_t value,
+ Atomic_Order order
+)
+{
+ return _CPU_atomic_Fetch_or_uint( object, value, order );
+}
+
+RTEMS_INLINE_ROUTINE uintptr_t _Atomic_Fetch_or_ptr(
+ volatile Atomic_Pointer *object,
+ uintptr_t value,
+ Atomic_Order order
+)
+{
+ return _CPU_atomic_Fetch_or_ptr( object, value, order );
+}
/**
- * @brief Atomically load-or-store an atomic type value @a value into address
- * @a address with a type of Atomic_Memory_barrier @a memory_barrier.
+ * @brief Atomically load-and-store an atomic type value into object
+ *
+ * @param object a atomic type pointer of object.
+ * @param value a value to be and and store into object.
+ * @param order a type of Atomic_Order.
+ *
+ * @retval a result value after and ops.
*/
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_int(
- volatile Atomic_Int *address,
- Atomic_Int value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_long(
- volatile Atomic_Long *address,
- Atomic_Long value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_ptr(
- volatile Atomic_Pointer *address,
- Atomic_Pointer value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_32(
- volatile Atomic_Int32 *address,
- Atomic_Int32 value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_64(
- volatile Atomic_Int64 *address,
- Atomic_Int64 value,
- Atomic_Memory_barrier memory_barrier
-);
+RTEMS_INLINE_ROUTINE uint_fast32_t _Atomic_Fetch_and_uint(
+ volatile Atomic_Uint *object,
+ uint_fast32_t value,
+ Atomic_Order order
+)
+{
+ return _CPU_atomic_Fetch_and_uint( object, value, order );
+}
+
+RTEMS_INLINE_ROUTINE uintptr_t _Atomic_Fetch_and_ptr(
+ volatile Atomic_Pointer *object,
+ uintptr_t value,
+ Atomic_Order order
+)
+{
+ return _CPU_atomic_Fetch_and_ptr( object, value, order );
+}
+
+/**
+ * @brief Atomically exchange an atomic type value into object
+ *
+ * @param object a atomic type pointer of object.
+ * @param value a value to exchange and and store into object.
+ * @param order a type of Atomic_Order.
+ *
+ * @retval a result value after exchange ops.
+ */
+RTEMS_INLINE_ROUTINE uint_fast32_t _Atomic_Exchange_uint(
+ volatile Atomic_Uint *object,
+ uint_fast32_t value,
+ Atomic_Order order
+)
+{
+ return _CPU_atomic_Exchange_uint( object, value, order );
+}
+
+RTEMS_INLINE_ROUTINE uintptr_t _Atomic_Exchange_ptr(
+ volatile Atomic_Pointer *object,
+ uintptr_t value,
+ Atomic_Order order
+)
+{
+ return _CPU_atomic_Exchange_ptr( object, value, order );
+}
/**
- * @brief Atomically load-and-store an atomic type value @a value into address
- * @a address with a type of Atomic_Memory_barrier @a memory_barrier.
+ * @brief Atomically compare the value stored at object with a
+ * old_value and if the two values are equal, update the value of a
+ * address with a new_value
+ *
+ * @param object a atomic type pointer of object.
+ * @param old_value pointer of a value.
+ * @param new_value a atomic type value.
+ * @param order_succ a type of Atomic_Order for successful exchange.
+ * @param order_fail a type of Atomic_Order for failed exchange.
+ *
+ * @retval true if the compare exchange successully.
+ * @retval false if the compare exchange failed.
*/
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_int(
- volatile Atomic_Int *address,
- Atomic_Int value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_long(
- volatile Atomic_Long *address,
- Atomic_Long value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_ptr(
- volatile Atomic_Pointer *address,
- Atomic_Pointer value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_32(
- volatile Atomic_Int32 *address,
- Atomic_Int32 value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_64(
- volatile Atomic_Int64 *address,
- Atomic_Int64 value,
- Atomic_Memory_barrier memory_barrier
-);
+RTEMS_INLINE_ROUTINE bool _Atomic_Compare_exchange_uint(
+ volatile Atomic_Uint *object,
+ uint_fast32_t *old_value,
+ uint_fast32_t new_value,
+ Atomic_Order order_succ,
+ Atomic_Order order_fail
+)
+{
+ return _CPU_atomic_Compare_exchange_uint( object, old_value, new_value,
+ order_succ, order_fail );
+}
+
+RTEMS_INLINE_ROUTINE bool _Atomic_Compare_exchange_ptr(
+ volatile Atomic_Pointer *object,
+ uintptr_t *old_value,
+ uintptr_t new_value,
+ Atomic_Order order_succ,
+ Atomic_Order order_fail
+)
+{
+ return _CPU_atomic_Compare_exchange_ptr( object, old_value, new_value,
+ order_succ, order_fail );
+}
/**
- * @brief Atomically compare the value stored at @a address with @a
- * old_value and if the two values are equal, update the value of @a
- * address with @a new_value. Returns zero if the compare failed,
- * nonzero otherwise. The operation uses a type of Atomic_Memory_barrier
- * @a memory_barrier.
+ * @brief Atomically clear the value of an atomic flag type object.
+ *
+ * @param[in, out] object an atomic flag type pointer of object.
+ * @param order a type of Atomic_Order.
+ *
*/
-RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_int(
- volatile Atomic_Int *address,
- Atomic_Int old_value,
- Atomic_Int new_value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_long(
- volatile Atomic_Long *address,
- Atomic_Long old_value,
- Atomic_Long new_value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_ptr(
- volatile Atomic_Pointer *address,
- Atomic_Pointer old_value,
- Atomic_Pointer new_value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_32(
- volatile Atomic_Int32 *address,
- Atomic_Int32 old_value,
- Atomic_Int32 new_value,
- Atomic_Memory_barrier memory_barrier
-);
-RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_64(
- volatile Atomic_Int64 *address,
- Atomic_Int64 old_value,
- Atomic_Int64 new_value,
- Atomic_Memory_barrier memory_barrier
-);
+RTEMS_INLINE_ROUTINE void _Atomic_Clear_flag(
+ volatile Atomic_Flag *object,
+ Atomic_Order order
+)
+{
+ _CPU_atomic_Clear_flag( object, order );
+}
-#include <rtems/score/atomic.inl>
+/**
+ * @brief Atomically test and clear the value of an atomic flag type object.
+ *
+ * @param[in, out] object an atomic flag type pointer of object.
+ * @param order a type of Atomic_Order.
+ *
+ * @retval true if the test and set successully.
+ * @retval false if the test and set failed.
+ */
+RTEMS_INLINE_ROUTINE bool _Atomic_Test_set_flag(
+ volatile Atomic_Flag *object,
+ Atomic_Order order
+)
+{
+ return _CPU_atomic_Test_set_flag( object, order );
+}
#ifdef __cplusplus
}