Index: sys/amd64/amd64/pmap.c =================================================================== --- sys/amd64/amd64/pmap.c +++ sys/amd64/amd64/pmap.c @@ -705,6 +705,15 @@ invl_gen->next = (void *)PMAP_INVL_GEN_NEXT_INVALID; } +/* Copied from atomic.h. */ +#ifdef __GCC_ASM_FLAG_OUTPUTS__ +#define CC_FLAG_OUT_INST(cc, loc) +#define CC_FLAG_OUT_CONS(cc) "=@cc" #cc +#else +#define CC_FLAG_OUT_INST(cc, loc) "set" #cc " " loc " ; " +#define CC_FLAG_OUT_CONS(cc) "=q" +#endif + static bool pmap_di_load_invl(struct pmap_invl_gen *ptr, struct pmap_invl_gen *out) { @@ -714,8 +723,9 @@ old_low = new_low = 0; old_high = new_high = (uintptr_t)0; - __asm volatile("lock;cmpxchg16b\t%1" - : "=@cce" (res), "+m" (*ptr), "+a" (old_low), "+d" (old_high) + __asm volatile("lock;cmpxchg16b\t%1;" CC_FLAG_OUT_INST(e, "%0") + : CC_FLAG_OUT_CONS(e) (res), "+m" (*ptr), "+a" (old_low), + "+d" (old_high) : "b"(new_low), "c" (new_high) : "memory", "cc"); if (res == 0) { @@ -742,8 +752,9 @@ old_low = old_val->gen; old_high = (uintptr_t)old_val->next; - __asm volatile("lock;cmpxchg16b\t%1" - : "=@cce" (res), "+m" (*ptr), "+a" (old_low), "+d" (old_high) + __asm volatile("lock;cmpxchg16b\t%1;" CC_FLAG_OUT_INST(e, "%0") + : CC_FLAG_OUT_CONS(e) (res), "+m" (*ptr), "+a" (old_low), + "+d" (old_high) : "b"(new_low), "c" (new_high) : "memory", "cc"); return (res); Index: sys/amd64/include/atomic.h =================================================================== --- sys/amd64/include/atomic.h +++ sys/amd64/include/atomic.h @@ -152,6 +152,19 @@ #define MPLOCKED #endif +/* + * Newer versions of clang and gcc, including the in-tree compiler, support + * outputting status flags directly from asm blocks. Provide support for + * older cross compilers, where the flag is extracted via setcc. + */ +#ifdef __GCC_ASM_FLAG_OUTPUTS__ +#define CC_FLAG_OUT_INST(cc, loc) +#define CC_FLAG_OUT_CONS(cc) "=@cc" #cc +#else +#define CC_FLAG_OUT_INST(cc, loc) "set" #cc " " loc " ; " +#define CC_FLAG_OUT_CONS(cc) "=q" +#endif + /* * The assembly is volatilized to avoid code chunk removal by the compiler. * GCC aggressively reorders operations and memory clobbering is necessary @@ -201,8 +214,9 @@ __asm __volatile( \ " " MPLOCKED " " \ " cmpxchg %3,%1 ; " \ + CC_FLAG_OUT_INST(e, "%0") \ "# atomic_cmpset_" #TYPE " " \ - : "=@cce" (res), /* 0 */ \ + : CC_FLAG_OUT_CONS(e) (res), /* 0 */ \ "+m" (*dst), /* 1 */ \ "+a" (expect) /* 2 */ \ : "r" (src) /* 3 */ \ @@ -218,8 +232,9 @@ __asm __volatile( \ " " MPLOCKED " " \ " cmpxchg %3,%1 ; " \ + CC_FLAG_OUT_INST(e, "%0") \ "# atomic_fcmpset_" #TYPE " " \ - : "=@cce" (res), /* 0 */ \ + : CC_FLAG_OUT_CONS(e) (res), /* 0 */ \ "+m" (*dst), /* 1 */ \ "+a" (*expect) /* 2 */ \ : "r" (src) /* 3 */ \ @@ -276,8 +291,9 @@ __asm __volatile( " " MPLOCKED " " " btsl %2,%1 ; " + CC_FLAG_OUT_INST(c, "%0") "# atomic_testandset_int" - : "=@ccc" (res), /* 0 */ + : CC_FLAG_OUT_CONS(c) (res), /* 0 */ "+m" (*p) /* 1 */ : "Ir" (v & 0x1f) /* 2 */ : "cc"); @@ -292,8 +308,9 @@ __asm __volatile( " " MPLOCKED " " " btsq %2,%1 ; " + CC_FLAG_OUT_INST(c, "%0") "# atomic_testandset_long" - : "=@ccc" (res), /* 0 */ + : CC_FLAG_OUT_CONS(c) (res), /* 0 */ "+m" (*p) /* 1 */ : "Jr" ((u_long)(v & 0x3f)) /* 2 */ : "cc"); @@ -308,8 +325,9 @@ __asm __volatile( " " MPLOCKED " " " btrl %2,%1 ; " + CC_FLAG_OUT_INST(c, "%0") "# atomic_testandclear_int" - : "=@ccc" (res), /* 0 */ + : CC_FLAG_OUT_CONS(c) (res), /* 0 */ "+m" (*p) /* 1 */ : "Ir" (v & 0x1f) /* 2 */ : "cc"); @@ -324,8 +342,9 @@ __asm __volatile( " " MPLOCKED " " " btrq %2,%1 ; " + CC_FLAG_OUT_INST(c, "%0") "# atomic_testandclear_long" - : "=@ccc" (res), /* 0 */ + : CC_FLAG_OUT_CONS(c) (res), /* 0 */ "+m" (*p) /* 1 */ : "Jr" ((u_long)(v & 0x3f)) /* 2 */ : "cc"); @@ -463,6 +482,8 @@ #undef ATOMIC_LOAD #undef ATOMIC_STORE #undef ATOMIC_LOADSTORE +#undef CC_FLAG_OUT_INST +#undef CC_FLAG_OUT_CONS #ifndef WANT_FUNCTIONS /* Read the current value and store a new value in the destination. */