powerpc: Make {cmp}xchg* and their atomic_ versions fully ordered
According to memory-barriers.txt, xchg*, cmpxchg* and their atomic_
versions all need to be fully ordered, however they are now just
RELEASE+ACQUIRE, which are not fully ordered.
So also replace PPC_RELEASE_BARRIER and PPC_ACQUIRE_BARRIER with
PPC_ATOMIC_ENTRY_BARRIER and PPC_ATOMIC_EXIT_BARRIER in
__{cmp,}xchg_{u32,u64} respectively to guarantee fully ordered semantics
of atomic{,64}_{cmp,}xchg() and {cmp,}xchg(), as a complement of commit
b97021f855
("powerpc: Fix atomic_xxx_return barrier semantics")
This patch depends on patch "powerpc: Make value-returning atomics fully
ordered" for PPC_ATOMIC_ENTRY_BARRIER definition.
Cc: stable@vger.kernel.org # 3.2+
Signed-off-by: Boqun Feng <boqun.feng@gmail.com>
Reviewed-by: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
This commit is contained in:
parent
49e9cf3f0c
commit
81d7a3294d
|
@ -18,12 +18,12 @@ __xchg_u32(volatile void *p, unsigned long val)
|
|||
unsigned long prev;
|
||||
|
||||
__asm__ __volatile__(
|
||||
PPC_RELEASE_BARRIER
|
||||
PPC_ATOMIC_ENTRY_BARRIER
|
||||
"1: lwarx %0,0,%2 \n"
|
||||
PPC405_ERR77(0,%2)
|
||||
" stwcx. %3,0,%2 \n\
|
||||
bne- 1b"
|
||||
PPC_ACQUIRE_BARRIER
|
||||
PPC_ATOMIC_EXIT_BARRIER
|
||||
: "=&r" (prev), "+m" (*(volatile unsigned int *)p)
|
||||
: "r" (p), "r" (val)
|
||||
: "cc", "memory");
|
||||
|
@ -61,12 +61,12 @@ __xchg_u64(volatile void *p, unsigned long val)
|
|||
unsigned long prev;
|
||||
|
||||
__asm__ __volatile__(
|
||||
PPC_RELEASE_BARRIER
|
||||
PPC_ATOMIC_ENTRY_BARRIER
|
||||
"1: ldarx %0,0,%2 \n"
|
||||
PPC405_ERR77(0,%2)
|
||||
" stdcx. %3,0,%2 \n\
|
||||
bne- 1b"
|
||||
PPC_ACQUIRE_BARRIER
|
||||
PPC_ATOMIC_EXIT_BARRIER
|
||||
: "=&r" (prev), "+m" (*(volatile unsigned long *)p)
|
||||
: "r" (p), "r" (val)
|
||||
: "cc", "memory");
|
||||
|
@ -151,14 +151,14 @@ __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
|
|||
unsigned int prev;
|
||||
|
||||
__asm__ __volatile__ (
|
||||
PPC_RELEASE_BARRIER
|
||||
PPC_ATOMIC_ENTRY_BARRIER
|
||||
"1: lwarx %0,0,%2 # __cmpxchg_u32\n\
|
||||
cmpw 0,%0,%3\n\
|
||||
bne- 2f\n"
|
||||
PPC405_ERR77(0,%2)
|
||||
" stwcx. %4,0,%2\n\
|
||||
bne- 1b"
|
||||
PPC_ACQUIRE_BARRIER
|
||||
PPC_ATOMIC_EXIT_BARRIER
|
||||
"\n\
|
||||
2:"
|
||||
: "=&r" (prev), "+m" (*p)
|
||||
|
@ -197,13 +197,13 @@ __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
|
|||
unsigned long prev;
|
||||
|
||||
__asm__ __volatile__ (
|
||||
PPC_RELEASE_BARRIER
|
||||
PPC_ATOMIC_ENTRY_BARRIER
|
||||
"1: ldarx %0,0,%2 # __cmpxchg_u64\n\
|
||||
cmpd 0,%0,%3\n\
|
||||
bne- 2f\n\
|
||||
stdcx. %4,0,%2\n\
|
||||
bne- 1b"
|
||||
PPC_ACQUIRE_BARRIER
|
||||
PPC_ATOMIC_EXIT_BARRIER
|
||||
"\n\
|
||||
2:"
|
||||
: "=&r" (prev), "+m" (*p)
|
||||
|
|
Loading…
Reference in New Issue