Index: sys/arm/include/atomic.h =================================================================== --- sys/arm/include/atomic.h +++ sys/arm/include/atomic.h @@ -55,6 +55,13 @@ #include #endif /* Arch >= v6 */ +static __inline u_long +atomic_swap_long(volatile u_long *p, u_long v) +{ + + return (atomic_swap_32((volatile uint32_t *)p, v)); +} + #define atomic_clear_ptr atomic_clear_32 #define atomic_clear_acq_ptr atomic_clear_acq_32 #define atomic_clear_rel_ptr atomic_clear_rel_32 Index: sys/compat/linuxkpi/common/include/asm/atomic-long.h =================================================================== --- sys/compat/linuxkpi/common/include/asm/atomic-long.h +++ sys/compat/linuxkpi/common/include/asm/atomic-long.h @@ -78,15 +78,7 @@ static inline long atomic_long_xchg(atomic_long_t *v, long val) { -#if defined(__i386__) || defined(__amd64__) || defined(__aarch64__) return atomic_swap_long(&v->counter, val); -#else - long ret = atomic_long_read(v); - - while (!atomic_fcmpset_long(&v->counter, &ret, val)) - ; - return (ret); -#endif } static inline long Index: sys/compat/linuxkpi/common/include/asm/atomic.h =================================================================== --- sys/compat/linuxkpi/common/include/asm/atomic.h +++ sys/compat/linuxkpi/common/include/asm/atomic.h @@ -128,15 +128,7 @@ static inline int atomic_xchg(atomic_t *v, int i) { -#if !defined(__mips__) return (atomic_swap_int(&v->counter, i)); -#else - int ret = atomic_read(v); - - while (!atomic_fcmpset_int(&v->counter, &ret, i)) - ; - return (ret); -#endif } static inline int Index: sys/mips/include/atomic.h =================================================================== --- sys/mips/include/atomic.h +++ sys/mips/include/atomic.h @@ -755,4 +755,68 @@ #define atomic_store_rel_ptr atomic_store_rel_long #define atomic_readandclear_ptr atomic_readandclear_long +static __inline unsigned int +atomic_swap_int(volatile unsigned int *ptr, const unsigned int value) +{ + unsigned int retval; + + retval = *ptr; + + while (!atomic_fcmpset_int(ptr, &retval, value)) + ; + return (retval); +} + +static __inline uint32_t +atomic_swap_32(volatile uint32_t *ptr, const uint32_t value) +{ + uint32_t retval; + + retval = *ptr; + + while (!atomic_fcmpset_32(ptr, &retval, value)) + ; + return (retval); +} + +#if defined(__mips_n64) || defined(__mips_n32) +static __inline uint64_t +atomic_swap_64(volatile uint64_t *ptr, const uint64_t value) +{ + uint64_t retval; + + retval = *ptr; + + while (!atomic_fcmpset_64(ptr, &retval, value)) + ; + return (retval); +} +#endif + +static __inline unsigned long +atomic_swap_long(volatile unsigned long *ptr, const unsigned long value) +{ + unsigned long retval; + + retval = *ptr; + + while (!atomic_fcmpset_32((volatile uint32_t *)ptr, + (uint32_t *)&retval, value)) + ; + return (retval); +} + +static __inline uintptr_t +atomic_swap_ptr(volatile uintptr_t *ptr, const uintptr_t value) +{ + uintptr_t retval; + + retval = *ptr; + + while (!atomic_fcmpset_32((volatile uint32_t *)ptr, + (uint32_t *)&retval, value)) + ; + return (retval); +} + #endif /* ! _MACHINE_ATOMIC_H_ */ Index: sys/powerpc/include/atomic.h =================================================================== --- sys/powerpc/include/atomic.h +++ sys/powerpc/include/atomic.h @@ -852,6 +852,9 @@ #define atomic_fetchadd_64 atomic_fetchadd_long #define atomic_swap_long atomic_swap_64 #define atomic_swap_ptr atomic_swap_64 +#else +#define atomic_swap_long(p,v) atomic_swap_32((volatile u_int *)(p), v) +#define atomic_swap_ptr(p,v) atomic_swap_32((volatile u_int *)(p), v) #endif #undef __ATOMIC_REL