Index: sys/arm/include/atomic-v4.h =================================================================== --- sys/arm/include/atomic-v4.h +++ sys/arm/include/atomic-v4.h @@ -543,6 +543,13 @@ atomic_subtract_32((volatile uint32_t *)p, v); } +static __inline u_long +atomic_swap_long(volatile u_long *p, u_long v) +{ + + return (atomic_swap_32((volatile uint32_t *)p, v)); +} + /* * ARMv5 does not support SMP. For both kernel and user modes, only a * compiler barrier is needed for fences, since CPU is always Index: sys/arm/include/atomic-v6.h =================================================================== --- sys/arm/include/atomic-v6.h +++ sys/arm/include/atomic-v6.h @@ -768,6 +768,13 @@ return (ret); } +static __inline u_long +atomic_swap_long(volatile u_long *p, u_long v) +{ + + return (atomic_swap_32((volatile uint32_t *)p, v)); +} + static __inline uint64_t atomic_swap_64(volatile uint64_t *p, uint64_t v) { Index: sys/compat/linuxkpi/common/include/asm/atomic-long.h =================================================================== --- sys/compat/linuxkpi/common/include/asm/atomic-long.h +++ sys/compat/linuxkpi/common/include/asm/atomic-long.h @@ -78,15 +78,7 @@ static inline long atomic_long_xchg(atomic_long_t *v, long val) { -#if defined(__i386__) || defined(__amd64__) || defined(__aarch64__) return atomic_swap_long(&v->counter, val); -#else - long ret = atomic_long_read(v); - - while (!atomic_fcmpset_long(&v->counter, &ret, val)) - ; - return (ret); -#endif } static inline long Index: sys/compat/linuxkpi/common/include/asm/atomic.h =================================================================== --- sys/compat/linuxkpi/common/include/asm/atomic.h +++ sys/compat/linuxkpi/common/include/asm/atomic.h @@ -128,15 +128,7 @@ static inline int atomic_xchg(atomic_t *v, int i) { -#if !defined(__mips__) return (atomic_swap_int(&v->counter, i)); -#else - int ret = atomic_read(v); - - while (!atomic_fcmpset_int(&v->counter, &ret, i)) - ; - return (ret); -#endif } static inline int Index: sys/mips/include/atomic.h =================================================================== --- sys/mips/include/atomic.h +++ sys/mips/include/atomic.h @@ -755,4 +755,42 @@ #define atomic_store_rel_ptr atomic_store_rel_long #define atomic_readandclear_ptr atomic_readandclear_long +#define ATOMIC_SWAP(sem,suf,dtype,before,after) \ +static __inline dtype \ +atomic_swap##sem##suff(volatile dtype *ptr, const dtype value) \ +{ \ + dtype retval; \ + \ + if (before) \ + atomic_thread_fence_rel(); \ + retval = *ptr; \ + while (!atomic_fcmpset_##suffix(ptr, &retval, value)) \ + ; \ + if (after) \ + atomic_thread_fence_acq(); \ + return (retval); \ +} + +ATOMIC_SWAP(,_int,u_int,0,0) +ATOMIC_SWAP(_rel,_int,u_int,1,0) +ATOMIC_SWAP(_ack,_int,u_int,0,1) + +ATOMIC_SWAP(,_32,uint32_t,0,0) +ATOMIC_SWAP(_rel,_32,uint32_t,1,0) +ATOMIC_SWAP(_ack,_32,uint32_t,0,1) + +#if defined(__mips_n64) || defined(__mips_n32) +ATOMIC_SWAP(,_64,uint64_t,0,0) +ATOMIC_SWAP(_rel,_64,uint64_t,1,0) +ATOMIC_SWAP(_ack,_64,uint64_t,0,1) +#endif + +ATOMIC_SWAP(,_long,u_long,0,0) +ATOMIC_SWAP(_rel,_long,u_long,1,0) +ATOMIC_SWAP(_ack,_long,u_long,0,1) + +ATOMIC_SWAP(,_ptr,void *,0,0) +ATOMIC_SWAP(_rel,_ptr,void *,1,0) +ATOMIC_SWAP(_ack,_ptr,void *,0,1) + #endif /* ! _MACHINE_ATOMIC_H_ */ Index: sys/powerpc/include/atomic.h =================================================================== --- sys/powerpc/include/atomic.h +++ sys/powerpc/include/atomic.h @@ -852,6 +852,9 @@ #define atomic_fetchadd_64 atomic_fetchadd_long #define atomic_swap_long atomic_swap_64 #define atomic_swap_ptr atomic_swap_64 +#else +#define atomic_swap_long atomic_swap_32 +#define atomic_swap_ptr atomic_swap_32 #endif #undef __ATOMIC_REL