diff --git a/sys/powerpc/include/atomic.h b/sys/powerpc/include/atomic.h index 710a50f5aba..1c492bbf861 100644 --- a/sys/powerpc/include/atomic.h +++ b/sys/powerpc/include/atomic.h @@ -65,6 +65,15 @@ #define __ATOMIC_ACQ() __asm __volatile("isync" : : : "memory") #endif +#if !defined(__powerpc64__) && defined(_KERNEL) +/* + * 64-bit atomic emulation using a global lock. + * Restricted to kernel use. + */ +#include +extern struct mtx ppc32_atomic64_mtx; +#endif + static __inline void powerpc_lwsync(void) { @@ -114,25 +123,33 @@ powerpc_lwsync(void) : "r" (p), "r" (v), "m" (*p) \ : "cr0", "memory") \ /* __atomic_add_long */ + +#ifdef _KERNEL +#define __atomic_add_int64_t(p, v, t) \ + mtx_lock_spin(&ppc32_atomic64_mtx); \ + *p += v; \ + mtx_unlock_spin(&ppc32_atomic64_mtx); \ + /* __atomic_add_int64_t*/ +#endif #endif #define _ATOMIC_ADD(type) \ static __inline void \ atomic_add_##type(volatile u_##type *p, u_##type v) { \ - u_##type t; \ + u_##type t __unused; \ __atomic_add_##type(p, v, t); \ } \ \ static __inline void \ atomic_add_acq_##type(volatile u_##type *p, u_##type v) { \ - u_##type t; \ + u_##type t __unused; \ __atomic_add_##type(p, v, t); \ __ATOMIC_ACQ(); \ } \ \ static __inline void \ atomic_add_rel_##type(volatile u_##type *p, u_##type v) { \ - u_##type t; \ + u_##type t __unused; \ __ATOMIC_REL(); \ __atomic_add_##type(p, v, t); \ } \ @@ -154,6 +171,14 @@ _ATOMIC_ADD(long) #define atomic_add_acq_ptr atomic_add_acq_long #define atomic_add_rel_ptr atomic_add_rel_long #else +#ifdef _KERNEL +_ATOMIC_ADD(int64_t) + +#define atomic_add_64 atomic_add_int64_t +#define atomic_add_acq_64 atomic_add_acq_int64_t +#define atomic_add_rel_64 atomic_add_rel_int64_t +#endif + #define atomic_add_ptr atomic_add_int #define atomic_add_acq_ptr atomic_add_acq_int #define atomic_add_rel_ptr atomic_add_rel_int @@ -200,25 +225,33 @@ _ATOMIC_ADD(long) : "r" (p), "r" (v), "m" (*p) \ : "cr0", "memory") \ /* __atomic_clear_long */ + +#ifdef _KERNEL +#define __atomic_clear_int64_t(p, v, t) \ + mtx_lock_spin(&ppc32_atomic64_mtx); \ + *p &= ~v; \ + mtx_unlock_spin(&ppc32_atomic64_mtx); \ + /* __atomic_clear_int64_t */ +#endif #endif #define _ATOMIC_CLEAR(type) \ static __inline void \ atomic_clear_##type(volatile u_##type *p, u_##type v) { \ - u_##type t; \ + u_##type t __unused; \ __atomic_clear_##type(p, v, t); \ } \ \ static __inline void \ atomic_clear_acq_##type(volatile u_##type *p, u_##type v) { \ - u_##type t; \ + u_##type t __unused; \ __atomic_clear_##type(p, v, t); \ __ATOMIC_ACQ(); \ } \ \ static __inline void \ atomic_clear_rel_##type(volatile u_##type *p, u_##type v) { \ - u_##type t; \ + u_##type t __unused; \ __ATOMIC_REL(); \ __atomic_clear_##type(p, v, t); \ } \ @@ -241,6 +274,14 @@ _ATOMIC_CLEAR(long) #define atomic_clear_acq_ptr atomic_clear_acq_long #define atomic_clear_rel_ptr atomic_clear_rel_long #else +#ifdef _KERNEL +_ATOMIC_CLEAR(int64_t) + +#define atomic_clear_64 atomic_clear_int64_t +#define atomic_clear_acq_64 atomic_clear_acq_int64_t +#define atomic_clear_rel_64 atomic_clear_rel_int64_t +#endif + #define atomic_clear_ptr atomic_clear_int #define atomic_clear_acq_ptr atomic_clear_acq_int #define atomic_clear_rel_ptr atomic_clear_rel_int @@ -302,25 +343,33 @@ _ATOMIC_CLEAR(long) : "r" (p), "r" (v), "m" (*p) \ : "cr0", "memory") \ /* __atomic_set_long */ + +#ifdef _KERNEL +#define __atomic_set_int64_t(p, v, t) \ + mtx_lock_spin(&ppc32_atomic64_mtx); \ + *p |= v; \ + mtx_unlock_spin(&ppc32_atomic64_mtx); \ + /* __atomic_set_int64_t */ +#endif #endif #define _ATOMIC_SET(type) \ static __inline void \ atomic_set_##type(volatile u_##type *p, u_##type v) { \ - u_##type t; \ + u_##type t __unused; \ __atomic_set_##type(p, v, t); \ } \ \ static __inline void \ atomic_set_acq_##type(volatile u_##type *p, u_##type v) { \ - u_##type t; \ + u_##type t __unused; \ __atomic_set_##type(p, v, t); \ __ATOMIC_ACQ(); \ } \ \ static __inline void \ atomic_set_rel_##type(volatile u_##type *p, u_##type v) { \ - u_##type t; \ + u_##type t __unused; \ __ATOMIC_REL(); \ __atomic_set_##type(p, v, t); \ } \ @@ -342,6 +391,14 @@ _ATOMIC_SET(long) #define atomic_set_acq_ptr atomic_set_acq_long #define atomic_set_rel_ptr atomic_set_rel_long #else +#ifdef _KERNEL +_ATOMIC_SET(int64_t) + +#define atomic_set_64 atomic_set_int64_t +#define atomic_set_acq_64 atomic_set_acq_int64_t +#define atomic_set_rel_64 atomic_set_rel_int64_t +#endif + #define atomic_set_ptr atomic_set_int #define atomic_set_acq_ptr atomic_set_acq_int #define atomic_set_rel_ptr atomic_set_rel_int @@ -388,25 +445,32 @@ _ATOMIC_SET(long) : "r" (p), "r" (v), "m" (*p) \ : "cr0", "memory") \ /* __atomic_subtract_long */ +#ifdef _KERNEL +#define __atomic_subtract_int64_t(p, v, t) \ + mtx_lock_spin(&ppc32_atomic64_mtx); \ + *p -= v; \ + mtx_unlock_spin(&ppc32_atomic64_mtx); \ + /* __atomic_subtract_int64_t */ +#endif #endif #define _ATOMIC_SUBTRACT(type) \ static __inline void \ atomic_subtract_##type(volatile u_##type *p, u_##type v) { \ - u_##type t; \ + u_##type t __unused; \ __atomic_subtract_##type(p, v, t); \ } \ \ static __inline void \ atomic_subtract_acq_##type(volatile u_##type *p, u_##type v) { \ - u_##type t; \ + u_##type t __unused; \ __atomic_subtract_##type(p, v, t); \ __ATOMIC_ACQ(); \ } \ \ static __inline void \ atomic_subtract_rel_##type(volatile u_##type *p, u_##type v) { \ - u_##type t; \ + u_##type t __unused; \ __ATOMIC_REL(); \ __atomic_subtract_##type(p, v, t); \ } \ @@ -428,6 +492,14 @@ _ATOMIC_SUBTRACT(long) #define atomic_subtract_acq_ptr atomic_subtract_acq_long #define atomic_subtract_rel_ptr atomic_subtract_rel_long #else +#ifdef _KERNEL +_ATOMIC_SUBTRACT(int64_t) + +#define atomic_subtract_64 atomic_subtract_int64_t +#define atomic_subtract_acq_64 atomic_subtract_acq_int64_t +#define atomic_subtract_rel_64 atomic_subtract_rel_int64_t +#endif + #define atomic_subtract_ptr atomic_subtract_int #define atomic_subtract_acq_ptr atomic_subtract_acq_int #define atomic_subtract_rel_ptr atomic_subtract_rel_int @@ -481,6 +553,19 @@ atomic_readandclear_long(volatile u_long *addr) return (result); } +#else +#ifdef _KERNEL +static __inline uint64_t +atomic_readandclear_64(volatile uint64_t *addr) +{ + uint64_t result; + mtx_lock_spin(&ppc32_atomic64_mtx); + mb(); + result = *addr; + mtx_unlock_spin(&ppc32_atomic64_mtx); + return (result); +} +#endif #endif #define atomic_readandclear_32 atomic_readandclear_int @@ -699,6 +784,20 @@ atomic_cmpset_long(volatile u_long* p, u_long cmpval, u_long newval) return (ret); } +#if defined(_KERNEL) && !defined(__powerpc64__) +static __inline int +atomic_cmpset_int64_t(volatile uint64_t *p, uint64_t cmpval, uint64_t newval) +{ + int ret = 0; + mtx_lock_spin(&ppc32_atomic64_mtx); + if (*p == cmpval) { + *p = newval; + ret = 1; + } + mtx_unlock_spin(&ppc32_atomic64_mtx); + return (ret); +} +#endif #define ATOMIC_CMPSET_ACQ_REL(type) \ static __inline int \ @@ -744,6 +843,13 @@ ATOMIC_CMPSET_ACQ_REL(long); #define atomic_cmpset_acq_ptr atomic_cmpset_acq_long #define atomic_cmpset_rel_ptr atomic_cmpset_rel_long #else +#ifdef _KERNEL +ATOMIC_CMPSET_ACQ_REL(int64_t); +#define atomic_cmpset_64 atomic_cmpset_int64_t +#define atomic_cmpset_acq_64 atomic_cmpset_acq_int64_t +#define atomic_cmpset_rel_64 atomic_cmpset_rel_int64_t +#endif + #define atomic_cmpset_ptr atomic_cmpset_int #define atomic_cmpset_acq_ptr atomic_cmpset_acq_int #define atomic_cmpset_rel_ptr atomic_cmpset_rel_int @@ -868,6 +974,25 @@ atomic_fcmpset_long(volatile u_long *p, u_long *cmpval, u_long newval) return (ret); } +#if defined(_KERNEL) && !defined(__powerpc64__) +static __inline int +atomic_fcmpset_int64_t(volatile uint64_t *p, uint64_t *cmpval, uint64_t newval) +{ + int ret; + mtx_lock_spin(&ppc32_atomic64_mtx); + if (*p == *cmpval) { + *p = newval; + ret = 1; + } + else { + *cmpval = newval; + ret = 0; + } + mtx_unlock_spin(&ppc32_atomic64_mtx); + return (ret); +} +#endif + #define ATOMIC_FCMPSET_ACQ_REL(type) \ static __inline int \ atomic_fcmpset_acq_##type(volatile u_##type *p, \ @@ -911,6 +1036,13 @@ ATOMIC_FCMPSET_ACQ_REL(long); #define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_long #define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_long #else +#ifdef _KERNEL +ATOMIC_FCMPSET_ACQ_REL(int64_t); +#define atomic_fcmpset_64 atomic_fcmpset_int64_t; +#define atomic_fcmpset_acq_64 atomic_fcmpset_acq_int64_t; +#define atomic_fcmpset_rel_64 atomic_fcmpset_rel_int64_t; +#endif + #define atomic_fcmpset_ptr atomic_fcmpset_int #define atomic_fcmpset_acq_ptr atomic_fcmpset_acq_int #define atomic_fcmpset_rel_ptr atomic_fcmpset_rel_int @@ -970,6 +1102,19 @@ atomic_swap_64(volatile u_long *p, u_long v) return (prev); } +#else +#ifdef _KERNEL +static __inline uint64_t +atomic_swap_64(volatile uint64_t *p, uint64_t v) +{ + uint64_t prev; + mtx_lock_spin(&ppc32_atomic64_mtx); + prev = *p; + *p = v; + mtx_unlock_spin(&ppc32_atomic64_mtx); + return (prev); +} +#endif #endif #define atomic_fetchadd_32 atomic_fetchadd_int diff --git a/sys/powerpc/powerpc/machdep.c b/sys/powerpc/powerpc/machdep.c index 16e440b96b4..6fc805c1f7e 100644 --- a/sys/powerpc/powerpc/machdep.c +++ b/sys/powerpc/powerpc/machdep.c @@ -139,6 +139,10 @@ int cacheline_size = 32; #endif int hw_direct_map = 1; +#ifndef __powerpc64__ +struct mtx ppc32_atomic64_mtx; +#endif + #ifdef BOOKE extern vm_paddr_t kernload; #endif @@ -401,6 +405,10 @@ powerpc_init(vm_offset_t fdt, vm_offset_t toc, vm_offset_t ofentry, void *mdp, */ mutex_init(); +#ifndef __powerpc64__ + mtx_init(&ppc32_atomic64_mtx, "atomic64", NULL, MTX_SPIN); +#endif + /* * Install the OF client interface */