Index: share/man/man9/atomic.9 =================================================================== --- share/man/man9/atomic.9 +++ share/man/man9/atomic.9 @@ -22,7 +22,7 @@ .\" .\" $FreeBSD$ .\" -.Dd March 6, 2021 +.Dd April 7, 2022 .Dt ATOMIC 9 .Os .Sh NAME @@ -63,7 +63,7 @@ .Ft void .Fn atomic_interrupt_fence "void" .Ft -.Fn atomic_load_[acq_] "volatile *p" +.Fn atomic_load_[acq_] "const volatile *p" .Ft .Fn atomic_readandclear_ "volatile *p" .Ft void @@ -496,6 +496,7 @@ and .Tn powerpc architectures. +Additionally, some may not fully conform to the specified prototypes. .Sh RETURN VALUES The .Fn atomic_cmpset Index: sys/amd64/include/atomic.h =================================================================== --- sys/amd64/include/atomic.h +++ sys/amd64/include/atomic.h @@ -128,7 +128,7 @@ void atomic_thread_fence_seq_cst(void); #define ATOMIC_LOAD(TYPE) \ -u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p) +u_##TYPE atomic_load_acq_##TYPE(const volatile u_##TYPE *const p) #define ATOMIC_STORE(TYPE) \ void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) @@ -337,7 +337,7 @@ #define ATOMIC_LOAD(TYPE) \ static __inline u_##TYPE \ -atomic_load_acq_##TYPE(volatile u_##TYPE *p) \ +atomic_load_acq_##TYPE(const volatile u_##TYPE *const p) \ { \ u_##TYPE res; \ \ Index: sys/arm64/include/atomic.h =================================================================== --- sys/arm64/include/atomic.h +++ sys/arm64/include/atomic.h @@ -459,7 +459,7 @@ #define _ATOMIC_LOAD_ACQ_IMPL(t, w, s) \ static __inline uint##t##_t \ -atomic_load_acq_##t(volatile uint##t##_t *p) \ +atomic_load_acq_##t(const volatile uint##t##_t *const p) \ { \ uint##t##_t ret; \ \ Index: sys/i386/include/atomic.h =================================================================== --- sys/i386/include/atomic.h +++ sys/i386/include/atomic.h @@ -117,13 +117,13 @@ void atomic_thread_fence_seq_cst(void); #define ATOMIC_LOAD(TYPE) \ -u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p) +u_##TYPE atomic_load_acq_##TYPE(const volatile u_##TYPE *const p) #define ATOMIC_STORE(TYPE) \ void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) int atomic_cmpset_64(volatile uint64_t *, uint64_t, uint64_t); int atomic_fcmpset_64(volatile uint64_t *, uint64_t *, uint64_t); -uint64_t atomic_load_acq_64(volatile uint64_t *); +uint64_t atomic_load_acq_64(volatile uint64_t *const); void atomic_store_rel_64(volatile uint64_t *, uint64_t); uint64_t atomic_swap_64(volatile uint64_t *, uint64_t); uint64_t atomic_fetchadd_64(volatile uint64_t *, uint64_t); @@ -291,7 +291,7 @@ #define ATOMIC_LOAD(TYPE) \ static __inline u_##TYPE \ -atomic_load_acq_##TYPE(volatile u_##TYPE *p) \ +atomic_load_acq_##TYPE(const volatile u_##TYPE *const p) \ { \ u_##TYPE res; \ \ @@ -344,8 +344,8 @@ #ifdef WANT_FUNCTIONS int atomic_cmpset_64_i386(volatile uint64_t *, uint64_t, uint64_t); int atomic_cmpset_64_i586(volatile uint64_t *, uint64_t, uint64_t); -uint64_t atomic_load_acq_64_i386(volatile uint64_t *); -uint64_t atomic_load_acq_64_i586(volatile uint64_t *); +uint64_t atomic_load_acq_64_i386(const volatile uint64_t *const); +uint64_t atomic_load_acq_64_i586(volatile uint64_t *const); void atomic_store_rel_64_i386(volatile uint64_t *, uint64_t); void atomic_store_rel_64_i586(volatile uint64_t *, uint64_t); uint64_t atomic_swap_64_i386(volatile uint64_t *, uint64_t); @@ -395,12 +395,12 @@ } static __inline uint64_t -atomic_load_acq_64_i386(volatile uint64_t *p) +atomic_load_acq_64_i386(const volatile uint64_t *const p) { - volatile uint32_t *q; + const volatile uint32_t *q; uint64_t res; - q = (volatile uint32_t *)p; + q = (const volatile uint32_t *)p; __asm __volatile( " pushfl ; " " cli ; " @@ -490,7 +490,7 @@ } static __inline uint64_t -atomic_load_acq_64_i586(volatile uint64_t *p) +atomic_load_acq_64_i586(volatile uint64_t *const p) { uint64_t res; @@ -556,7 +556,7 @@ } static __inline uint64_t -atomic_load_acq_64(volatile uint64_t *p) +atomic_load_acq_64(volatile uint64_t *const p) { if ((cpu_feature & CPUID_CX8) == 0) @@ -895,7 +895,7 @@ #define atomic_subtract_rel_ptr(p, v) \ atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v)) #define atomic_load_acq_ptr(p) \ - atomic_load_acq_int((volatile u_int *)(p)) + atomic_load_acq_int((const volatile u_int *const)(p)) #define atomic_store_rel_ptr(p, v) \ atomic_store_rel_int((volatile u_int *)(p), (v)) #define atomic_cmpset_ptr(dst, old, new) \ Index: sys/powerpc/include/atomic.h =================================================================== --- sys/powerpc/include/atomic.h +++ sys/powerpc/include/atomic.h @@ -508,7 +508,7 @@ */ #define ATOMIC_STORE_LOAD(TYPE) \ static __inline u_##TYPE \ -atomic_load_acq_##TYPE(volatile u_##TYPE *p) \ +atomic_load_acq_##TYPE(const volatile u_##TYPE *const p) \ { \ u_##TYPE v; \ \ @@ -540,10 +540,10 @@ #define atomic_store_rel_ptr atomic_store_rel_long #else static __inline u_long -atomic_load_acq_long(volatile u_long *addr) +atomic_load_acq_long(const volatile u_long *const addr) { - return ((u_long)atomic_load_acq_int((volatile u_int *)addr)); + return ((u_long)atomic_load_acq_int((const volatile u_int *const)addr)); } static __inline void Index: sys/riscv/include/atomic.h =================================================================== --- sys/riscv/include/atomic.h +++ sys/riscv/include/atomic.h @@ -259,7 +259,7 @@ ATOMIC_FCMPSET_ACQ_REL(32); static __inline uint32_t -atomic_load_acq_32(volatile uint32_t *p) +atomic_load_acq_32(const volatile uint32_t *const p) { uint32_t ret; @@ -469,7 +469,7 @@ ATOMIC_FCMPSET_ACQ_REL(64); static __inline uint64_t -atomic_load_acq_64(volatile uint64_t *p) +atomic_load_acq_64(const volatile uint64_t *const p) { uint64_t ret; Index: sys/sys/atomic_common.h =================================================================== --- sys/sys/atomic_common.h +++ sys/sys/atomic_common.h @@ -36,16 +36,16 @@ #error do not include this header, use machine/atomic.h #endif -#define atomic_load_char(p) (*(volatile u_char *)(p)) -#define atomic_load_short(p) (*(volatile u_short *)(p)) -#define atomic_load_int(p) (*(volatile u_int *)(p)) -#define atomic_load_long(p) (*(volatile u_long *)(p)) -#define atomic_load_ptr(p) (*(volatile __typeof(*p) *)(p)) -#define atomic_load_8(p) (*(volatile uint8_t *)(p)) -#define atomic_load_16(p) (*(volatile uint16_t *)(p)) -#define atomic_load_32(p) (*(volatile uint32_t *)(p)) +#define atomic_load_char(p) (*(const volatile u_char *const)(p)) +#define atomic_load_short(p) (*(const volatile u_short *const)(p)) +#define atomic_load_int(p) (*(const volatile u_int *const)(p)) +#define atomic_load_long(p) (*(const volatile u_long *const)(p)) +#define atomic_load_ptr(p) (*(volatile __typeof(*p) *const)(p)) +#define atomic_load_8(p) (*(const volatile uint8_t *const)(p)) +#define atomic_load_16(p) (*(const volatile uint16_t *const)(p)) +#define atomic_load_32(p) (*(const volatile uint32_t *const)(p)) #ifdef _LP64 -#define atomic_load_64(p) (*(volatile uint64_t *)(p)) +#define atomic_load_64(p) (*(const volatile uint64_t *const)(p)) #endif #define atomic_store_char(p, v) \ @@ -75,7 +75,7 @@ * openly resorting to the stronger acquire fence, to be sorted out. */ #define atomic_load_consume_ptr(p) \ - ((__typeof(*p)) atomic_load_acq_ptr((uintptr_t *)p)) + ((__typeof(*p)) atomic_load_acq_ptr((const uintptr_t *const)p)) #define atomic_interrupt_fence() __compiler_membar() Index: sys/sys/atomic_san.h =================================================================== --- sys/sys/atomic_san.h +++ sys/sys/atomic_san.h @@ -65,8 +65,8 @@ type sp##_atomic_readandclear_##name(volatile type *) #define ATOMIC_SAN_LOAD(sp, name, type) \ - type sp##_atomic_load_##name(volatile type *); \ - type sp##_atomic_load_acq_##name(volatile type *) + type sp##_atomic_load_##name(const volatile type *const); \ + type sp##_atomic_load_acq_##name(const volatile type *const) #define ATOMIC_SAN_STORE(sp, name, type) \ void sp##_atomic_store_##name(volatile type *, type); \ @@ -253,13 +253,13 @@ #define atomic_fetchadd_ptr ATOMIC_SAN(fetchadd_ptr) #define atomic_load_ptr(x) ({ \ __typeof(*x) __retptr; \ - __retptr = (void *)ATOMIC_SAN(load_ptr)((volatile uintptr_t *)(x)); \ + __retptr = (void *)ATOMIC_SAN(load_ptr)((const volatile uintptr_t *const)(x));\ __retptr; \ }) #define atomic_load_acq_ptr ATOMIC_SAN(load_acq_ptr) #define atomic_load_consume_ptr(x) ({ \ __typeof(*x) __retptr; \ - __retptr = (void *)atomic_load_acq_ptr((volatile uintptr_t *)(x));\ + __retptr = (void *)atomic_load_acq_ptr((const volatile uintptr_t *const)(x));\ __retptr; \ }) #define atomic_readandclear_ptr ATOMIC_SAN(readandclear_ptr)