Index: sys/powerpc/include/atomic.h =================================================================== --- sys/powerpc/include/atomic.h +++ sys/powerpc/include/atomic.h @@ -560,6 +560,120 @@ * two values are equal, update the value of *p with newval. Returns * zero if the compare failed, nonzero otherwise. */ +static __inline int +atomic_cmpset_masked(volatile u_int *p, u_int cmpval, u_int newval, u_int mask) +{ + int ret; + uint32_t tmp; + + __asm __volatile ( + "1:\tlwarx %2, 0, %3\n\t" /* load old value */ + "and %0, %2, %7\n\t" + "cmplw %4, %0\n\t" /* compare */ + "bne- 2f\n\t" /* exit if not equal */ + "andc %2, %2, %7\n\t" + "or %2, %2, %5\n\t" + "stwcx. %2, 0, %3\n\t" /* attempt to store */ + "bne- 1b\n\t" /* spin if failed */ + "li %0, 1\n\t" /* success - retval = 1 */ + "b 3f\n\t" /* we've succeeded */ + "2:\n\t" + "stwcx. %2, 0, %3\n\t" /* clear reservation (74xx) */ + "li %0, 0\n\t" /* failure - retval = 0 */ + "3:\n\t" + : "=&r" (ret), "=m" (*p), "+&r" (tmp) + : "r" (p), "r" (cmpval), "r" (newval), "m" (*p), + "r" (mask) + : "cr0", "memory"); + + return (ret); +} + +static __inline int +atomic_cmpset_char(volatile u_char *p, u_char cmpval, u_char newval) +{ + int ret; +#ifdef ISA_206_ATOMICS + __asm __volatile ( + "1:\tlbarx %0, 0, %2\n\t" /* load old value */ + "cmplw %3, %0\n\t" /* compare */ + "bne- 2f\n\t" /* exit if not equal */ + "stbcx. %4, 0, %2\n\t" /* attempt to store */ + "bne- 1b\n\t" /* spin if failed */ + "li %0, 1\n\t" /* success - retval = 1 */ + "b 3f\n\t" /* we've succeeded */ + "2:\n\t" + "stbcx. %0, 0, %2\n\t" /* clear reservation (74xx) */ + "li %0, 0\n\t" /* failure - retval = 0 */ + "3:\n\t" + : "=&r" (ret), "=m" (*p) + : "r" (p), "r" (cmpval), "r" (newval), "m" (*p) + : "cr0", "memory"); +#else + u_int *p_adj; + uint32_t alignment, cmp_val, mask, new_val, shift; + + alignment = ((((uintptr_t)p) & 0x3)); + p_adj = (void *)(((uintptr_t)p & ~0x3)); + + shift = (3 - alignment) * 8; + + mask = 0xff; + new_val = newval; + cmp_val = cmpval; + mask <<= shift; + cmp_val <<= shift; + new_val <<= shift; + + ret = atomic_cmpset_masked(p_adj, cmp_val, new_val, mask); +#endif + + return (ret); +} + +static __inline int +atomic_cmpset_short(volatile u_short *p, u_short cmpval, u_short newval) +{ + int ret; +#ifdef ISA_206_ATOMICS + __asm __volatile ( + "1:\tlharx %0, 0, %2\n\t" /* load old value */ + "cmplw %3, %0\n\t" /* compare */ + "bne- 2f\n\t" /* exit if not equal */ + "sthcx. %4, 0, %2\n\t" /* attempt to store */ + "bne- 1b\n\t" /* spin if failed */ + "li %0, 1\n\t" /* success - retval = 1 */ + "b 3f\n\t" /* we've succeeded */ + "2:\n\t" + "sthcx. %0, 0, %2\n\t" /* clear reservation (74xx) */ + "li %0, 0\n\t" /* failure - retval = 0 */ + "3:\n\t" + : "=&r" (ret), "=m" (*p) + : "r" (p), "r" (cmpval), "r" (newval), "m" (*p) + : "cr0", "memory"); +#else + u_int *p_adj; + uint32_t cmp_val, mask, new_val; + bool is_aligned; + + is_aligned = ((((uintptr_t)p) & 0x2) == 0); + p_adj = (void *)(((uintptr_t)p & ~0x3)); + + mask = 0xffff; + new_val = newval; + cmp_val = cmpval; + if (is_aligned) { + mask <<= 16; + cmp_val <<= 16; + new_val <<= 16; + } + + ret = atomic_cmpset_masked(p_adj, cmp_val, new_val, mask); +#endif + + return (ret); +} + static __inline int atomic_cmpset_int(volatile u_int* p, u_int cmpval, u_int newval) { @@ -618,39 +732,38 @@ return (ret); } -static __inline int -atomic_cmpset_acq_int(volatile u_int *p, u_int cmpval, u_int newval) -{ - int retval; - - retval = atomic_cmpset_int(p, cmpval, newval); - __ATOMIC_ACQ(); - return (retval); -} - -static __inline int -atomic_cmpset_rel_int(volatile u_int *p, u_int cmpval, u_int newval) -{ - __ATOMIC_REL(); - return (atomic_cmpset_int(p, cmpval, newval)); -} - -static __inline int -atomic_cmpset_acq_long(volatile u_long *p, u_long cmpval, u_long newval) -{ - u_long retval; - - retval = atomic_cmpset_long(p, cmpval, newval); - __ATOMIC_ACQ(); - return (retval); -} - -static __inline int -atomic_cmpset_rel_long(volatile u_long *p, u_long cmpval, u_long newval) -{ - __ATOMIC_REL(); - return (atomic_cmpset_long(p, cmpval, newval)); -} +#define ATOMIC_CMPSET_ACQ_REL(type) \ + static __inline int \ + atomic_cmpset_acq_##type(volatile u_##type *p, \ + u_##type cmpval, u_##type newval)\ + {\ + u_##type retval; \ + retval = atomic_cmpset_##type(p, cmpval, newval);\ + __ATOMIC_ACQ();\ + return (retval);\ + }\ + static __inline int \ + atomic_cmpset_rel_##type(volatile u_##type *p, \ + u_##type cmpval, u_##type newval)\ + {\ + __ATOMIC_ACQ();\ + return (atomic_cmpset_##type(p, cmpval, newval));\ + }\ + struct hack + +ATOMIC_CMPSET_ACQ_REL(char); +ATOMIC_CMPSET_ACQ_REL(short); +ATOMIC_CMPSET_ACQ_REL(int); +ATOMIC_CMPSET_ACQ_REL(long); + + +#define atomic_cmpset_8 atomic_cmpset_char +#define atomic_cmpset_acq_8 atomic_cmpset_acq_char +#define atomic_cmpset_rel_8 atomic_cmpset_rel_char + +#define atomic_cmpset_16 atomic_cmpset_short +#define atomic_cmpset_acq_16 atomic_cmpset_acq_short +#define atomic_cmpset_rel_16 atomic_cmpset_rel_short #define atomic_cmpset_32 atomic_cmpset_int #define atomic_cmpset_acq_32 atomic_cmpset_acq_int @@ -677,12 +790,130 @@ * nonzero otherwise. */ static __inline int +atomic_fcmpset_masked(volatile u_int *p, u_int *cmpval, u_int newval, u_int mask) +{ + int ret; + uint32_t tmp; + + __asm __volatile ( + "lwarx %2, 0, %4\n\t" /* load old value */ + "and %0, %2, %9\n\t" + "cmplw %4, %0\n\t" /* compare */ + "bne- 1f\n\t" /* exit if not equal */ + "andc %2, %2, %8\n\t" + "or %2, %2, %6\n\t" + "stwcx. %2, 0, %4\n\t" /* attempt to store */ + "li %0, 1\n\t" /* success - retval = 1 */ + "b 2f\n\t" /* we've succeeded */ + "1:\n\t" + "stwcx. %2, 0, %4\n\t" /* clear reservation (74xx) */ + "li %0, 0\n\t" /* failure - retval = 0 */ + "2:\n\t" + : "=&r" (ret), "=m" (*p), "+&r" (tmp), "=m" (*cmpval) + : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval), + "r" (mask) + : "cr0", "memory"); + + return (ret); +} + +static __inline int +atomic_fcmpset_char(volatile u_char *p, u_char *cmpval, u_char newval) +{ + int ret; +#ifdef ISA_206_ATOMICS + __asm __volatile ( + "lbarx %0, 0, %3\n\t" /* load old value */ + "cmplw %4, %0\n\t" /* compare */ + "bne- 1f\n\t" /* exit if not equal */ + "stbcx. %5, 0, %3\n\t" /* attempt to store */ + "bne- 1f\n\t" /* exit if failed */ + "li %0, 1\n\t" /* success - retval = 1 */ + "b 2f\n\t" /* we've succeeded */ + "1:\n\t" + "stbcx. %0, 0, %3\n\t" /* clear reservation (74xx) */ + "stwx %0, 0, %7\n\t" + "li %0, 0\n\t" /* failure - retval = 0 */ + "2:\n\t" + : "=&r" (ret), "=m" (*p), "=m" (*cmpval) + : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval) + : "cr0", "memory"); +#else + u_int *p_adj; + uint32_t alignment, mask, new_val, shift, tmp; + + alignment = ((((uintptr_t)p) & 0x3)); + p_adj = (void *)(((uintptr_t)p & ~0x3)); + + shift = (3 - alignment) * 8; + tmp = *cmpval; + new_val = newval; + + mask = 0xff; + mask <<= shift; + tmp <<= shift; + newval <<= shift; + + ret = atomic_fcmpset_masked(p_adj, &tmp, newval, mask); + if (!ret) + *cmpval = tmp >> shift; +#endif + + return (ret); +} + +static __inline int +atomic_fcmpset_short(volatile u_short *p, u_short *cmpval, u_short newval) +{ + int ret; +#ifdef ISA_206_ATOMICS + __asm __volatile ( + "lharx %0, 0, %3\n\t" /* load old value */ + "cmplw %4, %0\n\t" /* compare */ + "bne- 1f\n\t" /* exit if not equal */ + "sthcx. %5, 0, %3\n\t" /* attempt to store */ + "bne- 1f\n\t" /* exit if failed */ + "li %0, 1\n\t" /* success - retval = 1 */ + "b 2f\n\t" /* we've succeeded */ + "1:\n\t" + "sthcx. %0, 0, %3\n\t" /* clear reservation (74xx) */ + "stwx %0, 0, %7\n\t" + "li %0, 0\n\t" /* failure - retval = 0 */ + "2:\n\t" + : "=&r" (ret), "=m" (*p), "=m" (*cmpval) + : "r" (p), "r" (*cmpval), "r" (newval), "m" (*p), "r"(cmpval) + : "cr0", "memory"); +#else + u_int *p_adj; + uint32_t mask, new_val, tmp; + bool is_aligned; + + is_aligned = ((((uintptr_t)p) & 0x2) == 0); + p_adj = (void *)(((uintptr_t)p & ~0x3)); + + tmp = *cmpval; + new_val = newval; + mask = 0xffff; + if (is_aligned) { + mask <<= 16; + newval <<= 16; + tmp <<= 16; + } + + ret = atomic_fcmpset_masked(p_adj, &tmp, newval, mask); + if (!ret) + *cmpval = tmp >> (is_aligned ? 16 : 0); +#endif + + return (ret); +} +static __inline int atomic_fcmpset_int(volatile u_int *p, u_int *cmpval, u_int newval) { int ret; __asm __volatile ( - "lwarx %0, 0, %3\n\t" /* load old value */ + "lwarx %0, 0, %3\n\t" /* load old value */ "cmplw %4, %0\n\t" /* compare */ "bne- 1f\n\t" /* exit if not equal */ "stwcx. %5, 0, %3\n\t" /* attempt to store */ @@ -707,12 +938,12 @@ __asm __volatile ( #ifdef __powerpc64__ - "ldarx %0, 0, %3\n\t" /* load old value */ + "ldarx %0, 0, %3\n\t" /* load old value */ "cmpld %4, %0\n\t" /* compare */ "bne- 1f\n\t" /* exit if not equal */ "stdcx. %5, 0, %3\n\t" /* attempt to store */ #else - "lwarx %0, 0, %3\n\t" /* load old value */ + "lwarx %0, 0, %3\n\t" /* load old value */ "cmplw %4, %0\n\t" /* compare */ "bne- 1f\n\t" /* exit if not equal */ "stwcx. %5, 0, %3\n\t" /* attempt to store */ @@ -737,39 +968,29 @@ return (ret); } -static __inline int -atomic_fcmpset_acq_int(volatile u_int *p, u_int *cmpval, u_int newval) -{ - int retval; - - retval = atomic_fcmpset_int(p, cmpval, newval); - __ATOMIC_ACQ(); - return (retval); -} - -static __inline int -atomic_fcmpset_rel_int(volatile u_int *p, u_int *cmpval, u_int newval) -{ - __ATOMIC_REL(); - return (atomic_fcmpset_int(p, cmpval, newval)); -} - -static __inline int -atomic_fcmpset_acq_long(volatile u_long *p, u_long *cmpval, u_long newval) -{ - u_long retval; - - retval = atomic_fcmpset_long(p, cmpval, newval); - __ATOMIC_ACQ(); - return (retval); -} - -static __inline int -atomic_fcmpset_rel_long(volatile u_long *p, u_long *cmpval, u_long newval) -{ - __ATOMIC_REL(); - return (atomic_fcmpset_long(p, cmpval, newval)); -} +#define ATOMIC_FCMPSET_ACQ_REL(type) \ + static __inline int \ + atomic_fcmpset_acq_##type(volatile u_##type *p, \ + u_##type *cmpval, u_##type newval)\ + {\ + u_##type retval; \ + retval = atomic_fcmpset_##type(p, cmpval, newval);\ + __ATOMIC_ACQ();\ + return (retval);\ + }\ + static __inline int \ + atomic_fcmpset_rel_##type(volatile u_##type *p, \ + u_##type *cmpval, u_##type newval)\ + {\ + __ATOMIC_REL();\ + return (atomic_fcmpset_##type(p, cmpval, newval));\ + }\ + struct hack + +ATOMIC_FCMPSET_ACQ_REL(char); +ATOMIC_FCMPSET_ACQ_REL(short); +ATOMIC_FCMPSET_ACQ_REL(int); +ATOMIC_FCMPSET_ACQ_REL(long); #define atomic_fcmpset_32 atomic_fcmpset_int #define atomic_fcmpset_acq_32 atomic_fcmpset_acq_int