From a04182c608d31470d3ea3ec8649e1dcab83376f1 Mon Sep 17 00:00:00 2001 From: WeiY Date: Mon, 15 Jul 2013 23:31:14 +0800 Subject: clean up old atomic related files --- cpukit/score/Makefile.am | 3 - .../score/include/rtems/score/genericatomicops.h | 274 ------------ .../score/include/rtems/score/genericcpuatomic.h | 62 --- cpukit/score/inline/rtems/score/atomic.inl | 496 --------------------- cpukit/score/preinstall.am | 12 - 5 files changed, 847 deletions(-) delete mode 100644 cpukit/score/include/rtems/score/genericatomicops.h delete mode 100644 cpukit/score/include/rtems/score/genericcpuatomic.h delete mode 100644 cpukit/score/inline/rtems/score/atomic.inl (limited to 'cpukit/score') diff --git a/cpukit/score/Makefile.am b/cpukit/score/Makefile.am index 338c797a30..793d840c25 100644 --- a/cpukit/score/Makefile.am +++ b/cpukit/score/Makefile.am @@ -62,8 +62,6 @@ include_rtems_score_HEADERS += include/rtems/score/cpuopts.h include_rtems_score_HEADERS += include/rtems/score/basedefs.h if ATOMIC include_rtems_score_HEADERS += include/rtems/score/atomic.h -include_rtems_score_HEADERS += include/rtems/score/genericcpuatomic.h -include_rtems_score_HEADERS += include/rtems/score/genericatomicops.h include_rtems_score_HEADERS += include/rtems/score/cpustdatomic.h endif @@ -109,7 +107,6 @@ include_rtems_score_HEADERS += inline/rtems/score/tod.inl include_rtems_score_HEADERS += inline/rtems/score/tqdata.inl include_rtems_score_HEADERS += inline/rtems/score/watchdog.inl include_rtems_score_HEADERS += inline/rtems/score/wkspace.inl -include_rtems_score_HEADERS += inline/rtems/score/atomic.inl if HAS_PTHREADS include_rtems_score_HEADERS += inline/rtems/score/corespinlock.inl diff --git a/cpukit/score/include/rtems/score/genericatomicops.h b/cpukit/score/include/rtems/score/genericatomicops.h deleted file mode 100644 index a6c09d32cb..0000000000 --- a/cpukit/score/include/rtems/score/genericatomicops.h +++ /dev/null @@ -1,274 +0,0 @@ -/** - * @file rtems/score/genericatomicops.h - * - * This include file includes the general atomic functions - * for all the uniprocessors or working in UP mode architectures. - * If the architecture is working at the UP mode, the atomic - * operations will be simulated by disable/enable-irq functions. - * Supposing that if the macro "RTEMS_SMP" is defined it works - * in SMP mode, otherwise UP mode. - */ - -/* - * COPYRIGHT (c) 2013 Deng Hengyi. - * - * The license and distribution terms for this file may be - * found in the file LICENSE in this distribution or at - */ - -#ifndef _RTEMS_SCORE_GENERAL_ATOMIC_OPS_H -#define _RTEMS_SCORE_GEMERAL_ATOMIC_OPS_H - -#include -#include - -#ifdef __cplusplus -extern "C" { -#endif - -/** - * @defgroup RTEMS generic UP atomic implementation - * - */ - -/**@{*/ - -/** - * @brief Atomically load an atomic type value from address @a address. - */ -#define ATOMIC_LOAD(NAME, TYPE) \ -RTEMS_INLINE_ROUTINE Atomic_##TYPE _CPU_Atomic_Load_##NAME( \ - volatile Atomic_##TYPE *address \ -) \ -{ \ - Atomic_##TYPE tmp; \ - ISR_Level level; \ - \ - _ISR_Disable( level ); \ - tmp = *address; \ - _ISR_Enable( level ); \ - return tmp; \ -} \ - -ATOMIC_LOAD(int, Int); -ATOMIC_LOAD(acq_int, Int); -ATOMIC_LOAD(long, Long); -ATOMIC_LOAD(acq_long, Long); -ATOMIC_LOAD(ptr, Pointer); -ATOMIC_LOAD(acq_ptr, Pointer); -ATOMIC_LOAD(32, Int32); -ATOMIC_LOAD(acq_32, Int32); -ATOMIC_LOAD(64, Int64); -ATOMIC_LOAD(acq_64, Int64); - -/** - * @brief Atomically store an atomic type value @a value into address @a - * address. - */ -#define ATOMIC_STORE(NAME, TYPE) \ -RTEMS_INLINE_ROUTINE void _CPU_Atomic_Store_##NAME( \ - volatile Atomic_##TYPE *address, \ - Atomic_##TYPE value \ -) \ -{ \ - ISR_Level level; \ - \ - _ISR_Disable( level ); \ - *address = value; \ - _ISR_Enable( level ); \ -} \ - -ATOMIC_STORE(int, Int); -ATOMIC_STORE(rel_int, Int); -ATOMIC_STORE(long, Long); -ATOMIC_STORE(rel_long, Long); -ATOMIC_STORE(ptr, Pointer); -ATOMIC_STORE(rel_ptr, Pointer); -ATOMIC_STORE(32, Int32); -ATOMIC_STORE(rel_32, Int32); -ATOMIC_STORE(64, Int64); -ATOMIC_STORE(rel_64, Int64); - -/** - * @brief Atomically load-add-store an atomic type value @a value into address - * @a address. - */ -#define ATOMIC_FETCH_ADD(NAME, TYPE) \ -RTEMS_INLINE_ROUTINE void _CPU_Atomic_Fetch_add_##NAME( \ - volatile Atomic_##TYPE *address, \ - Atomic_##TYPE value \ -) \ -{ \ - ISR_Level level; \ - \ - _ISR_Disable( level ); \ - *address += value; \ - _ISR_Enable( level ); \ -} \ - -ATOMIC_FETCH_ADD(int, Int); -ATOMIC_FETCH_ADD(acq_int, Int); -ATOMIC_FETCH_ADD(rel_int, Int); -ATOMIC_FETCH_ADD(long, Long); -ATOMIC_FETCH_ADD(acq_long, Long); -ATOMIC_FETCH_ADD(rel_long, Long); -ATOMIC_FETCH_ADD(ptr, Pointer); -ATOMIC_FETCH_ADD(acq_ptr, Pointer); -ATOMIC_FETCH_ADD(rel_ptr, Pointer); -ATOMIC_FETCH_ADD(32, Int32); -ATOMIC_FETCH_ADD(acq_32, Int32); -ATOMIC_FETCH_ADD(rel_32, Int32); -ATOMIC_FETCH_ADD(64, Int64); -ATOMIC_FETCH_ADD(acq_64, Int64); -ATOMIC_FETCH_ADD(rel_64, Int64); - -/** - * @brief Atomically load-sub-store an atomic type value @a value into address - * @a address. - */ -#define ATOMIC_FETCH_SUB(NAME, TYPE) \ -RTEMS_INLINE_ROUTINE void _CPU_Atomic_Fetch_sub_##NAME( \ - volatile Atomic_##TYPE *address, \ - Atomic_##TYPE value \ -) \ -{ \ - ISR_Level level; \ - \ - _ISR_Disable( level ); \ - *address -= value; \ - _ISR_Enable( level ); \ -} \ - -ATOMIC_FETCH_SUB(int, Int); -ATOMIC_FETCH_SUB(acq_int, Int); -ATOMIC_FETCH_SUB(rel_int, Int); -ATOMIC_FETCH_SUB(long, Long); -ATOMIC_FETCH_SUB(acq_long, Long); -ATOMIC_FETCH_SUB(rel_long, Long); -ATOMIC_FETCH_SUB(ptr, Pointer); -ATOMIC_FETCH_SUB(acq_ptr, Pointer); -ATOMIC_FETCH_SUB(rel_ptr, Pointer); -ATOMIC_FETCH_SUB(32, Int32); -ATOMIC_FETCH_SUB(acq_32, Int32); -ATOMIC_FETCH_SUB(rel_32, Int32); -ATOMIC_FETCH_SUB(64, Int64); -ATOMIC_FETCH_SUB(acq_64, Int64); -ATOMIC_FETCH_SUB(rel_64, Int64); - -/** - * @brief Atomically load-or-store an atomic type value @a value into address - * @a address. - */ -#define ATOMIC_FETCH_OR(NAME, TYPE) \ -RTEMS_INLINE_ROUTINE void _CPU_Atomic_Fetch_or_##NAME( \ - volatile Atomic_##TYPE *address, \ - Atomic_##TYPE value \ -) \ -{ \ - ISR_Level level; \ - \ - _ISR_Disable( level ); \ - *address |= value; \ - _ISR_Enable( level ); \ -} \ - -ATOMIC_FETCH_OR(int, Int); -ATOMIC_FETCH_OR(acq_int, Int); -ATOMIC_FETCH_OR(rel_int, Int); -ATOMIC_FETCH_OR(long, Long); -ATOMIC_FETCH_OR(acq_long, Long); -ATOMIC_FETCH_OR(rel_long, Long); -ATOMIC_FETCH_OR(ptr, Pointer); -ATOMIC_FETCH_OR(acq_ptr, Pointer); -ATOMIC_FETCH_OR(rel_ptr, Pointer); -ATOMIC_FETCH_OR(32, Int32); -ATOMIC_FETCH_OR(acq_32, Int32); -ATOMIC_FETCH_OR(rel_32, Int32); -ATOMIC_FETCH_OR(64, Int64); -ATOMIC_FETCH_OR(acq_64, Int64); -ATOMIC_FETCH_OR(rel_64, Int64); - -/** - * @brief Atomically load-and-store an atomic type value @a value into address - * @a address. - */ -#define ATOMIC_FETCH_AND(NAME, TYPE) \ -RTEMS_INLINE_ROUTINE void _CPU_Atomic_Fetch_and_##NAME( \ - volatile Atomic_##TYPE *address, \ - Atomic_##TYPE value \ -) \ -{ \ - ISR_Level level; \ - \ - _ISR_Disable( level ); \ - *address &= value; \ - _ISR_Enable( level ); \ -} \ - -ATOMIC_FETCH_AND(int, Int); -ATOMIC_FETCH_AND(acq_int, Int); -ATOMIC_FETCH_AND(rel_int, Int); -ATOMIC_FETCH_AND(long, Long); -ATOMIC_FETCH_AND(acq_long, Long); -ATOMIC_FETCH_AND(rel_long, Long); -ATOMIC_FETCH_AND(ptr, Pointer); -ATOMIC_FETCH_AND(acq_ptr, Pointer); -ATOMIC_FETCH_AND(rel_ptr, Pointer); -ATOMIC_FETCH_AND(32, Int32); -ATOMIC_FETCH_AND(acq_32, Int32); -ATOMIC_FETCH_AND(rel_32, Int32); -ATOMIC_FETCH_AND(64, Int64); -ATOMIC_FETCH_AND(acq_64, Int64); -ATOMIC_FETCH_AND(rel_64, Int64); - -/** - * @brief Atomically compare the value stored at @a address with @a - * old_value and if the two values are equal, update the value of @a - * address with @a new_value. Returns zero if the compare failed, - * nonzero otherwise. - */ -#define ATOMIC_COMPARE_EXCHANGE(NAME, TYPE) \ -RTEMS_INLINE_ROUTINE int _CPU_Atomic_Compare_exchange_##NAME( \ - volatile Atomic_##TYPE *address, \ - Atomic_##TYPE old_value, \ - Atomic_##TYPE new_value \ -) \ -{ \ - ISR_Level level; \ - int ret; \ - \ - _ISR_Disable( level ); \ - if (*address == old_value) { \ - *address = new_value; \ - ret = TRUE; \ - } else { \ - ret = FALSE; \ - } \ - _ISR_Enable( level ); \ - \ - return ret; \ -} \ - -ATOMIC_COMPARE_EXCHANGE(int, Int); -ATOMIC_COMPARE_EXCHANGE(acq_int, Int); -ATOMIC_COMPARE_EXCHANGE(rel_int, Int); -ATOMIC_COMPARE_EXCHANGE(long, Long); -ATOMIC_COMPARE_EXCHANGE(acq_long, Long); -ATOMIC_COMPARE_EXCHANGE(rel_long, Long); -ATOMIC_COMPARE_EXCHANGE(ptr, Pointer); -ATOMIC_COMPARE_EXCHANGE(acq_ptr, Pointer); -ATOMIC_COMPARE_EXCHANGE(rel_ptr, Pointer); -ATOMIC_COMPARE_EXCHANGE(32, Int32); -ATOMIC_COMPARE_EXCHANGE(acq_32, Int32); -ATOMIC_COMPARE_EXCHANGE(rel_32, Int32); -ATOMIC_COMPARE_EXCHANGE(64, Int64); -ATOMIC_COMPARE_EXCHANGE(acq_64, Int64); -ATOMIC_COMPARE_EXCHANGE(rel_64, Int64); - -#ifdef __cplusplus -} -#endif - -/**@}*/ -#endif -/* end of include file */ diff --git a/cpukit/score/include/rtems/score/genericcpuatomic.h b/cpukit/score/include/rtems/score/genericcpuatomic.h deleted file mode 100644 index 2599d0d30d..0000000000 --- a/cpukit/score/include/rtems/score/genericcpuatomic.h +++ /dev/null @@ -1,62 +0,0 @@ -/** - * @file rtems/score/genericcpuatomic.h - * - * This include file includes the general atomic data type - * for all the architecture. - */ - -/* - * COPYRIGHT (c) 2012 Deng Hengyi. - * - * The license and distribution terms for this file may be - * found in the file LICENSE in this distribution or at - */ - -#ifndef _RTEMS_SCORE_GENERAL_ATOMIC_CPU_H -#define _RTEMS_SCORE_GEMERAL_ATOMIC_CPU_H - -#include - -#ifdef __cplusplus -extern "C" { -#endif - -/** - * @defgroup RTEMS general atomic data type - * - */ - -/**@{*/ - -/** - * @brief atomic operation unsigned integer type - */ -typedef unsigned int Atomic_Int; - -/** - * @brief atomic operation unsigned long integer type - */ -typedef unsigned long Atomic_Long; - -/** - * @brief atomic operation unsigned 32-bit integer type - */ -typedef uint32_t Atomic_Int32; - -/** - * @brief atomic operation unsigned 64-bit integer type - */ -typedef uint64_t Atomic_Int64; - -/** - * @brief atomic operation unsigned integer the size of a pointer type - */ -typedef uintptr_t Atomic_Pointer; - -#ifdef __cplusplus -} -#endif - -/**@}*/ -#endif -/* end of include file */ diff --git a/cpukit/score/inline/rtems/score/atomic.inl b/cpukit/score/inline/rtems/score/atomic.inl deleted file mode 100644 index 313366c79f..0000000000 --- a/cpukit/score/inline/rtems/score/atomic.inl +++ /dev/null @@ -1,496 +0,0 @@ -/* - * Atomic Manager - * - * COPYRIGHT (c) 2012 Deng Hengyi. - * - * The license and distribution terms for this file may be - * found in the file LICENSE in this distribution or at - * http://www.rtems.com/license/LICENSE. - */ - -/* - * - * The functions in this file implement the API to the RTEMS Atomic Manager and - * The API is designed to be compatable with C1X atomic definition as far as - * possible. And its implementation reuses the FreeBSD kernel atomic operation. - * The functions below are implemented with CPU dependent inline routines - * found in the path - * - * rtems/cpukit/score/cpu/xxx/rtems/score/cpuatomic.h - * - * In the event that a CPU does not support a specific atomic function it has, - * the CPU dependent routine does nothing (but does exist). - */ - -#ifndef _RTEMS_SCORE_ATOMIC_H -# error "Never use directly; include instead." -#endif - -#include - -#ifndef _RTEMS_SCORE_ATOMIC_INL -#define _RTEMS_SCORE_ATOMIC_INL - -RTEMS_INLINE_ROUTINE Atomic_Int _Atomic_Load_int( - volatile Atomic_Int *address, - Atomic_Memory_barrier memory_barrier -) -{ - if(ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Load_acq_int(address); - return _CPU_Atomic_Load_int(address); -} - -RTEMS_INLINE_ROUTINE Atomic_Long _Atomic_Load_long( - volatile Atomic_Long *address, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Load_acq_long(address); - return _CPU_Atomic_Load_long(address); -} - -RTEMS_INLINE_ROUTINE Atomic_Pointer _Atomic_Load_ptr( - volatile Atomic_Pointer *address, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Load_acq_ptr(address); - return _CPU_Atomic_Load_ptr(address); -} - -RTEMS_INLINE_ROUTINE Atomic_Int32 _Atomic_Load_32( - volatile Atomic_Int32 *address, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Load_acq_32(address); - return _CPU_Atomic_Load_32(address); -} - -RTEMS_INLINE_ROUTINE Atomic_Int64 _Atomic_Load_64( - volatile Atomic_Int64 *address, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Load_acq_64(address); - return _CPU_Atomic_Load_64(address); -} - - -RTEMS_INLINE_ROUTINE void _Atomic_Store_int( - volatile Atomic_Int *address, - Atomic_Int value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Store_rel_int(address, value); - return _CPU_Atomic_Store_int(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Store_long( - volatile Atomic_Long *address, - Atomic_Long value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Store_rel_long(address, value); - return _CPU_Atomic_Store_long(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Store_ptr( - volatile Atomic_Pointer *address, - Atomic_Pointer value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Store_rel_ptr(address, value); - return _CPU_Atomic_Store_ptr(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Store_32( - volatile Atomic_Int32 *address, - Atomic_Int32 value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Store_rel_32(address, value); - return _CPU_Atomic_Store_32(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Store_64( - volatile Atomic_Int64 *address, - Atomic_Int64 value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Store_rel_64(address, value); - return _CPU_Atomic_Store_64(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_int( - volatile Atomic_Int *address, - Atomic_Int value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_add_acq_int(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_add_rel_int(address, value); - else - return _CPU_Atomic_Fetch_add_int(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_long( - volatile Atomic_Long *address, - Atomic_Long value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_add_acq_long(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_add_rel_long(address, value); - else - return _CPU_Atomic_Fetch_add_long(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_ptr( - volatile Atomic_Pointer *address, - Atomic_Pointer value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_add_acq_ptr(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_add_rel_ptr(address, value); - else - return _CPU_Atomic_Fetch_add_ptr(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_32( - volatile Atomic_Int32 *address, - Atomic_Int32 value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_add_acq_32(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_add_rel_32(address, value); - else - return _CPU_Atomic_Fetch_add_32(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_add_64( - volatile Atomic_Int64 *address, - Atomic_Int64 value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_add_acq_64(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_add_rel_64(address, value); - else - return _CPU_Atomic_Fetch_add_64(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_int( - volatile Atomic_Int *address, - Atomic_Int value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_sub_acq_int(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_sub_rel_int(address, value); - else - return _CPU_Atomic_Fetch_sub_int(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_long( - volatile Atomic_Long *address, - Atomic_Long value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_sub_acq_long(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_sub_rel_long(address, value); - else - return _CPU_Atomic_Fetch_sub_long(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_ptr( - volatile Atomic_Pointer *address, - Atomic_Pointer value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_sub_acq_ptr(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_sub_rel_ptr(address, value); - else - return _CPU_Atomic_Fetch_sub_ptr(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_32( - volatile Atomic_Int32 *address, - Atomic_Int32 value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_sub_acq_32(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_sub_rel_32(address, value); - else - return _CPU_Atomic_Fetch_sub_32(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_sub_64( - volatile Atomic_Int64 *address, - Atomic_Int64 value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_sub_acq_64(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_sub_rel_64(address, value); - else - return _CPU_Atomic_Fetch_sub_64(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_int( - volatile Atomic_Int *address, - Atomic_Int value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_or_acq_int(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_or_rel_int(address, value); - else - return _CPU_Atomic_Fetch_or_int(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_long( - volatile Atomic_Long *address, - Atomic_Long value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_or_acq_long(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_or_rel_long(address, value); - else - return _CPU_Atomic_Fetch_or_long(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_ptr( - volatile Atomic_Pointer *address, - Atomic_Pointer value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_or_acq_ptr(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_or_rel_ptr(address, value); - else - return _CPU_Atomic_Fetch_or_ptr(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_32( - volatile Atomic_Int32 *address, - Atomic_Int32 value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_or_acq_32(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_or_rel_32(address, value); - else - return _CPU_Atomic_Fetch_or_32(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_or_64( - volatile Atomic_Int64 *address, - Atomic_Int64 value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_or_acq_64(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_or_rel_64(address, value); - else - return _CPU_Atomic_Fetch_or_64(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_int( - volatile Atomic_Int *address, - Atomic_Int value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_and_acq_int(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_and_rel_int(address, value); - else - return _CPU_Atomic_Fetch_and_int(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_long( - volatile Atomic_Long *address, - Atomic_Long value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_and_acq_long(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_and_rel_long(address, value); - else - return _CPU_Atomic_Fetch_and_long(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_ptr( - volatile Atomic_Pointer *address, - Atomic_Pointer value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_and_acq_ptr(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_and_rel_ptr(address, value); - else - return _CPU_Atomic_Fetch_and_ptr(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_32( - volatile Atomic_Int32 *address, - Atomic_Int32 value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_and_acq_32(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_and_rel_32(address, value); - else - return _CPU_Atomic_Fetch_and_32(address, value); -} - -RTEMS_INLINE_ROUTINE void _Atomic_Fetch_and_64( - volatile Atomic_Int64 *address, - Atomic_Int64 value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_and_acq_64(address, value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Fetch_and_rel_64(address, value); - else - return _CPU_Atomic_Fetch_and_64(address, value); -} - -RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_int( - volatile Atomic_Int *address, - Atomic_Int old_value, - Atomic_Int new_value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Compare_exchange_acq_int(address, old_value, new_value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Compare_exchange_rel_int(address, old_value, new_value); - else - return _CPU_Atomic_Compare_exchange_int(address, old_value, new_value); -} - -RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_long( - volatile Atomic_Long *address, - Atomic_Long old_value, - Atomic_Long new_value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Compare_exchange_acq_long(address, old_value, new_value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Compare_exchange_rel_long(address, old_value, new_value); - else - return _CPU_Atomic_Compare_exchange_long(address, old_value, new_value); -} - -RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_ptr( - volatile Atomic_Pointer *address, - Atomic_Pointer old_value, - Atomic_Pointer new_value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Compare_exchange_acq_ptr(address, old_value, new_value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Compare_exchange_rel_ptr(address, old_value, new_value); - else - return _CPU_Atomic_Compare_exchange_ptr(address, old_value, new_value); -} - -RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_32( - volatile Atomic_Int32 *address, - Atomic_Int32 old_value, - Atomic_Int32 new_value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Compare_exchange_acq_32(address, old_value, new_value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Compare_exchange_rel_32(address, old_value, new_value); - else - return _CPU_Atomic_Compare_exchange_32(address, old_value, new_value); -} - -RTEMS_INLINE_ROUTINE int _Atomic_Compare_exchange_64( - volatile Atomic_Int64 *address, - Atomic_Int64 old_value, - Atomic_Int64 new_value, - Atomic_Memory_barrier memory_barrier -) -{ - if (ATOMIC_ACQUIRE_BARRIER == memory_barrier) - return _CPU_Atomic_Compare_exchange_acq_64(address, old_value, new_value); - else if (ATOMIC_RELEASE_BARRIER == memory_barrier) - return _CPU_Atomic_Compare_exchange_rel_64(address, old_value, new_value); - else - return _CPU_Atomic_Compare_exchange_64(address, old_value, new_value); -} - -#endif -/* end of include file */ diff --git a/cpukit/score/preinstall.am b/cpukit/score/preinstall.am index a48d21eec6..130be84d99 100644 --- a/cpukit/score/preinstall.am +++ b/cpukit/score/preinstall.am @@ -228,14 +228,6 @@ $(PROJECT_INCLUDE)/rtems/score/atomic.h: include/rtems/score/atomic.h $(PROJECT_ $(INSTALL_DATA) $< $(PROJECT_INCLUDE)/rtems/score/atomic.h PREINSTALL_FILES += $(PROJECT_INCLUDE)/rtems/score/atomic.h -$(PROJECT_INCLUDE)/rtems/score/genericcpuatomic.h: include/rtems/score/genericcpuatomic.h $(PROJECT_INCLUDE)/rtems/score/$(dirstamp) - $(INSTALL_DATA) $< $(PROJECT_INCLUDE)/rtems/score/genericcpuatomic.h -PREINSTALL_FILES += $(PROJECT_INCLUDE)/rtems/score/genericcpuatomic.h - -$(PROJECT_INCLUDE)/rtems/score/genericatomicops.h: include/rtems/score/genericatomicops.h $(PROJECT_INCLUDE)/rtems/score/$(dirstamp) - $(INSTALL_DATA) $< $(PROJECT_INCLUDE)/rtems/score/genericatomicops.h -PREINSTALL_FILES += $(PROJECT_INCLUDE)/rtems/score/genericatomicops.h - $(PROJECT_INCLUDE)/rtems/score/cpustdatomic.h: include/rtems/score/cpustdatomic.h $(PROJECT_INCLUDE)/rtems/score/$(dirstamp) $(INSTALL_DATA) $< $(PROJECT_INCLUDE)/rtems/score/cpustdatomic.h PREINSTALL_FILES += $(PROJECT_INCLUDE)/rtems/score/cpustdatomic.h @@ -367,10 +359,6 @@ $(PROJECT_INCLUDE)/rtems/score/wkspace.inl: inline/rtems/score/wkspace.inl $(PRO $(INSTALL_DATA) $< $(PROJECT_INCLUDE)/rtems/score/wkspace.inl PREINSTALL_FILES += $(PROJECT_INCLUDE)/rtems/score/wkspace.inl -$(PROJECT_INCLUDE)/rtems/score/atomic.inl: inline/rtems/score/atomic.inl $(PROJECT_INCLUDE)/rtems/score/$(dirstamp) - $(INSTALL_DATA) $< $(PROJECT_INCLUDE)/rtems/score/atomic.inl -PREINSTALL_FILES += $(PROJECT_INCLUDE)/rtems/score/atomic.inl - if HAS_PTHREADS $(PROJECT_INCLUDE)/rtems/score/corespinlock.inl: inline/rtems/score/corespinlock.inl $(PROJECT_INCLUDE)/rtems/score/$(dirstamp) $(INSTALL_DATA) $< $(PROJECT_INCLUDE)/rtems/score/corespinlock.inl -- cgit v1.2.3