Index: sys/arm64/arm64/identcpu.c =================================================================== --- sys/arm64/arm64/identcpu.c +++ sys/arm64/arm64/identcpu.c @@ -114,8 +114,8 @@ uint64_t id_aa64pfr1; }; -struct cpu_desc cpu_desc[MAXCPU]; -struct cpu_desc user_cpu_desc; +static struct cpu_desc cpu_desc[MAXCPU]; +static struct cpu_desc user_cpu_desc; static u_int cpu_print_regs; #define PRINT_ID_AA64_AFR0 0x00000001 #define PRINT_ID_AA64_AFR1 0x00000002 @@ -961,6 +961,7 @@ /* HWCAP */ extern u_long elf_hwcap; +bool __read_frequently lse_supported = false; static void identify_cpu_sysinit(void *dummy __unused) @@ -975,7 +976,6 @@ ID_AA64PFR0_FP_NONE | ID_AA64PFR0_EL1_64 | ID_AA64PFR0_EL0_64; user_cpu_desc.id_aa64dfr0 = ID_AA64DFR0_DebugVer_8; - CPU_FOREACH(cpu) { print_cpu_features(cpu); hwcap = parse_cpu_features_hwcap(cpu); @@ -986,6 +986,12 @@ update_user_regs(cpu); } + if ((elf_hwcap & HWCAP_ATOMICS) != 0) { + lse_supported = true; + if (bootverbose) + printf("Enabling LSE atomics in the kernel\n"); + } + install_undef_handler(true, user_mrs_handler); } SYSINIT(idenrity_cpu, SI_SUB_SMP, SI_ORDER_ANY, identify_cpu_sysinit, NULL); Index: sys/arm64/include/atomic.h =================================================================== --- sys/arm64/include/atomic.h +++ sys/arm64/include/atomic.h @@ -59,6 +59,13 @@ #include +#ifdef _KERNEL +extern bool lse_supported; +#define _ATOMIC_LSE_SUPPORTED lse_supported +#else +#define _ATOMIC_LSE_SUPPORTED 0 +#endif + #define _ATOMIC_OP_PROTO(t, op, bar, flav) \ static __inline void \ atomic_##op##_##bar##t##flav(volatile uint##t##_t *p, uint##t##_t val) @@ -98,7 +105,10 @@ \ _ATOMIC_OP_PROTO(t, op, bar, ) \ { \ - atomic_##op##_##bar##t##_llsc(p, val); \ + if (_ATOMIC_LSE_SUPPORTED) \ + atomic_##op##_##bar##t##_lse(p, val); \ + else \ + atomic_##op##_##bar##t##_llsc(p, val); \ } #define __ATOMIC_OP(op, llsc_asm_op, lse_asm_op, pre, bar, a, l) \ @@ -175,7 +185,12 @@ \ _ATOMIC_CMPSET_PROTO(t, bar, ) \ { \ - return (atomic_cmpset_##bar##t##_llsc(p, cmpval, newval)); \ + if (_ATOMIC_LSE_SUPPORTED) \ + return (atomic_cmpset_##bar##t##_lse(p, cmpval, \ + newval)); \ + else \ + return (atomic_cmpset_##bar##t##_llsc(p, cmpval, \ + newval)); \ } \ \ _ATOMIC_FCMPSET_PROTO(t, bar, _llsc) \ @@ -223,7 +238,12 @@ \ _ATOMIC_FCMPSET_PROTO(t, bar, ) \ { \ - return (atomic_fcmpset_##bar##t##_llsc(p, cmpval, newval)); \ + if (_ATOMIC_LSE_SUPPORTED) \ + return (atomic_fcmpset_##bar##t##_lse(p, cmpval, \ + newval)); \ + else \ + return (atomic_fcmpset_##bar##t##_llsc(p, cmpval, \ + newval)); \ } #define _ATOMIC_CMPSET(bar, a, l) \ @@ -277,7 +297,10 @@ \ _ATOMIC_FETCHADD_PROTO(t, ) \ { \ - return (atomic_fetchadd_##t##_llsc(p, val)); \ + if (_ATOMIC_LSE_SUPPORTED) \ + return (atomic_fetchadd_##t##_lse(p, val)); \ + else \ + return (atomic_fetchadd_##t##_llsc(p, val)); \ } _ATOMIC_FETCHADD_IMPL(32, w) @@ -327,7 +350,10 @@ \ _ATOMIC_SWAP_PROTO(t, ) \ { \ - return (atomic_swap_##t##_llsc(p, val)); \ + if (_ATOMIC_LSE_SUPPORTED) \ + return (atomic_swap_##t##_lse(p, val)); \ + else \ + return (atomic_swap_##t##_llsc(p, val)); \ } \ \ _ATOMIC_READANDCLEAR_PROTO(t, _llsc) \ @@ -354,7 +380,10 @@ \ _ATOMIC_READANDCLEAR_PROTO(t, ) \ { \ - return (atomic_readandclear_##t##_llsc(p)); \ + if (_ATOMIC_LSE_SUPPORTED) \ + return (atomic_readandclear_##t##_lse(p)); \ + else \ + return (atomic_readandclear_##t##_llsc(p)); \ } _ATOMIC_SWAP_IMPL(32, w, wzr) @@ -403,7 +432,10 @@ \ _ATOMIC_TEST_OP_PROTO(t, op, ) \ { \ - return (atomic_testand##op##_##t##_llsc(p, val)); \ + if (_ATOMIC_LSE_SUPPORTED) \ + return (atomic_testand##op##_##t##_lse(p, val)); \ + else \ + return (atomic_testand##op##_##t##_llsc(p, val)); \ } #define _ATOMIC_TEST_OP(op, llsc_asm_op, lse_asm_op) \