diff options
Diffstat (limited to 'arch/mips/include/asm/atomic.h')
-rw-r--r-- | arch/mips/include/asm/atomic.h | 44 |
1 files changed, 22 insertions, 22 deletions
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h index dd75d673447..c63c56bfd18 100644 --- a/arch/mips/include/asm/atomic.h +++ b/arch/mips/include/asm/atomic.h @@ -29,7 +29,7 @@ * * Atomically reads the value of @v. */ -#define atomic_read(v) ((v)->counter) +#define atomic_read(v) (*(volatile int *)&(v)->counter) /* * atomic_set - set atomic variable @@ -137,7 +137,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) { int result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { int temp; @@ -189,7 +189,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) { int result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { int temp; @@ -249,7 +249,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) { int result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { int temp; @@ -410,7 +410,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) * @v: pointer of type atomic64_t * */ -#define atomic64_read(v) ((v)->counter) +#define atomic64_read(v) (*(volatile long *)&(v)->counter) /* * atomic64_set - set atomic variable @@ -434,7 +434,7 @@ static __inline__ void atomic64_add(long i, atomic64_t * v) __asm__ __volatile__( " .set mips3 \n" "1: lld %0, %1 # atomic64_add \n" - " addu %0, %2 \n" + " daddu %0, %2 \n" " scd %0, %1 \n" " beqzl %0, 1b \n" " .set mips0 \n" @@ -446,7 +446,7 @@ static __inline__ void atomic64_add(long i, atomic64_t * v) __asm__ __volatile__( " .set mips3 \n" "1: lld %0, %1 # atomic64_add \n" - " addu %0, %2 \n" + " daddu %0, %2 \n" " scd %0, %1 \n" " beqz %0, 2f \n" " .subsection 2 \n" @@ -479,7 +479,7 @@ static __inline__ void atomic64_sub(long i, atomic64_t * v) __asm__ __volatile__( " .set mips3 \n" "1: lld %0, %1 # atomic64_sub \n" - " subu %0, %2 \n" + " dsubu %0, %2 \n" " scd %0, %1 \n" " beqzl %0, 1b \n" " .set mips0 \n" @@ -491,7 +491,7 @@ static __inline__ void atomic64_sub(long i, atomic64_t * v) __asm__ __volatile__( " .set mips3 \n" "1: lld %0, %1 # atomic64_sub \n" - " subu %0, %2 \n" + " dsubu %0, %2 \n" " scd %0, %1 \n" " beqz %0, 2f \n" " .subsection 2 \n" @@ -516,7 +516,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) { long result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { long temp; @@ -524,10 +524,10 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) __asm__ __volatile__( " .set mips3 \n" "1: lld %1, %2 # atomic64_add_return \n" - " addu %0, %1, %3 \n" + " daddu %0, %1, %3 \n" " scd %0, %2 \n" " beqzl %0, 1b \n" - " addu %0, %1, %3 \n" + " daddu %0, %1, %3 \n" " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter) @@ -538,10 +538,10 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) __asm__ __volatile__( " .set mips3 \n" "1: lld %1, %2 # atomic64_add_return \n" - " addu %0, %1, %3 \n" + " daddu %0, %1, %3 \n" " scd %0, %2 \n" " beqz %0, 2f \n" - " addu %0, %1, %3 \n" + " daddu %0, %1, %3 \n" " .subsection 2 \n" "2: b 1b \n" " .previous \n" @@ -568,7 +568,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) { long result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { long temp; @@ -576,10 +576,10 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) __asm__ __volatile__( " .set mips3 \n" "1: lld %1, %2 # atomic64_sub_return \n" - " subu %0, %1, %3 \n" + " dsubu %0, %1, %3 \n" " scd %0, %2 \n" " beqzl %0, 1b \n" - " subu %0, %1, %3 \n" + " dsubu %0, %1, %3 \n" " .set mips0 \n" : "=&r" (result), "=&r" (temp), "=m" (v->counter) : "Ir" (i), "m" (v->counter) @@ -590,10 +590,10 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) __asm__ __volatile__( " .set mips3 \n" "1: lld %1, %2 # atomic64_sub_return \n" - " subu %0, %1, %3 \n" + " dsubu %0, %1, %3 \n" " scd %0, %2 \n" " beqz %0, 2f \n" - " subu %0, %1, %3 \n" + " dsubu %0, %1, %3 \n" " .subsection 2 \n" "2: b 1b \n" " .previous \n" @@ -628,7 +628,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) { long result; - smp_llsc_mb(); + smp_mb__before_llsc(); if (kernel_uses_llsc && R10000_LLSC_WAR) { long temp; @@ -788,9 +788,9 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) * atomic*_return operations are serializing but not the non-*_return * versions. */ -#define smp_mb__before_atomic_dec() smp_llsc_mb() +#define smp_mb__before_atomic_dec() smp_mb__before_llsc() #define smp_mb__after_atomic_dec() smp_llsc_mb() -#define smp_mb__before_atomic_inc() smp_llsc_mb() +#define smp_mb__before_atomic_inc() smp_mb__before_llsc() #define smp_mb__after_atomic_inc() smp_llsc_mb() #include <asm-generic/atomic-long.h> |