Index: sys/arm/include/atomic-v4.h =================================================================== --- sys/arm/include/atomic-v4.h +++ sys/arm/include/atomic-v4.h @@ -543,6 +543,13 @@ atomic_subtract_32((volatile uint32_t *)p, v); } +static __inline u_long +atomic_swap_long(volatile u_long *p, u_long v) +{ + + return (atomic_swap_32((volatile uint32_t *)p, v)); +} + /* * ARMv5 does not support SMP. For both kernel and user modes, only a * compiler barrier is needed for fences, since CPU is always Index: sys/arm/include/atomic-v6.h =================================================================== --- sys/arm/include/atomic-v6.h +++ sys/arm/include/atomic-v6.h @@ -768,6 +768,13 @@ return (ret); } +static __inline u_long +atomic_swap_long(volatile u_long *p, u_long v) +{ + + return (atomic_swap_32((volatile uint32_t *)p, v)); +} + static __inline uint64_t atomic_swap_64(volatile uint64_t *p, uint64_t v) { Index: sys/compat/linuxkpi/common/include/asm/atomic-long.h =================================================================== --- sys/compat/linuxkpi/common/include/asm/atomic-long.h +++ sys/compat/linuxkpi/common/include/asm/atomic-long.h @@ -78,15 +78,7 @@ static inline long atomic_long_xchg(atomic_long_t *v, long val) { -#if defined(__i386__) || defined(__amd64__) || defined(__aarch64__) return atomic_swap_long(&v->counter, val); -#else - long ret = atomic_long_read(v); - - while (!atomic_fcmpset_long(&v->counter, &ret, val)) - ; - return (ret); -#endif } static inline long Index: sys/compat/linuxkpi/common/include/asm/atomic.h =================================================================== --- sys/compat/linuxkpi/common/include/asm/atomic.h +++ sys/compat/linuxkpi/common/include/asm/atomic.h @@ -128,15 +128,7 @@ static inline int atomic_xchg(atomic_t *v, int i) { -#if !defined(__mips__) return (atomic_swap_int(&v->counter, i)); -#else - int ret = atomic_read(v); - - while (!atomic_fcmpset_int(&v->counter, &ret, i)) - ; - return (ret); -#endif } static inline int Index: sys/mips/include/atomic.h =================================================================== --- sys/mips/include/atomic.h +++ sys/mips/include/atomic.h @@ -755,4 +755,56 @@ #define atomic_store_rel_ptr atomic_store_rel_long #define atomic_readandclear_ptr atomic_readandclear_long +static __inline u_int +atomic_swap_int(volatile u_int *ptr, const u_int value) +{ + u_int retval = *ptr; + + while (!atomic_fcmpset_int(ptr, &retval, value)) + ; + return (retval); +} + +static __inline uint32_t +atomic_swap_32(volatile uint32_t *ptr, const uint32_t value) +{ + uint32_t retval = *ptr; + + while (!atomic_fcmpset_32(ptr, &retval, value)) + ; + return (retval); +} + +#if defined(__mips_n64) || defined(__mips_n32) +static __inline uint64_t +atomic_swap_64(volatile uint64_t *ptr, const uint64_t value) +{ + uint64_t retval = *ptr; + + while (!atomic_fcmpset_64(ptr, &retval, value)) + ; + return (retval); +} +#endif + +static __inline u_long +atomic_swap_long(volatile u_long *ptr, const u_long value) +{ + u_long retval = *ptr; + + while (!atomic_fcmpset_long(ptr, &retval, value)) + ; + return (retval); +} + +static __inline void * +atomic_swap_ptr(volatile void **ptr, const void *value) +{ + void *retval = *ptr; + + while (!atomic_fcmpset_ptr(ptr, &retval, value)) + ; + return (retval); +} + #endif /* ! _MACHINE_ATOMIC_H_ */ Index: sys/powerpc/include/atomic.h =================================================================== --- sys/powerpc/include/atomic.h +++ sys/powerpc/include/atomic.h @@ -852,6 +852,9 @@ #define atomic_fetchadd_64 atomic_fetchadd_long #define atomic_swap_long atomic_swap_64 #define atomic_swap_ptr atomic_swap_64 +#else +#define atomic_swap_long atomic_swap_32 +#define atomic_swap_ptr atomic_swap_32 #endif #undef __ATOMIC_REL