summaryrefslogtreecommitdiffstats
path: root/arch/sparc64/lib
diff options
context:
space:
mode:
authorDavid S. Miller <davem@davemloft.net>2008-11-15 13:33:25 -0800
committerDavid S. Miller <davem@davemloft.net>2008-12-04 09:16:47 -0800
commit293666b7a17cb7a389fc274980439212386a19c4 (patch)
tree075cc7661d2113cf04da7130b3383979d8024206 /arch/sparc64/lib
parent64f2dde3f743c8a1ad8c0a1aa74166c1034afd92 (diff)
sparc64: Stop using memory barriers for atomics and locks.
The kernel always executes in the TSO memory model now, so none of this stuff is necessary any more. With helpful feedback from Nick Piggin. Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc64/lib')
-rw-r--r--arch/sparc64/lib/atomic.S26
-rw-r--r--arch/sparc64/lib/bitops.S24
-rw-r--r--arch/sparc64/lib/rwsem.S7
3 files changed, 0 insertions, 57 deletions
diff --git a/arch/sparc64/lib/atomic.S b/arch/sparc64/lib/atomic.S
index 70ac4186f62..0268210ca16 100644
--- a/arch/sparc64/lib/atomic.S
+++ b/arch/sparc64/lib/atomic.S
@@ -43,29 +43,10 @@ atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
2: BACKOFF_SPIN(%o2, %o3, 1b)
.size atomic_sub, .-atomic_sub
- /* On SMP we need to use memory barriers to ensure
- * correct memory operation ordering, nop these out
- * for uniprocessor.
- */
-#ifdef CONFIG_SMP
-
-#define ATOMIC_PRE_BARRIER membar #StoreLoad | #LoadLoad;
-#define ATOMIC_POST_BARRIER \
- ba,pt %xcc, 80b; \
- membar #StoreLoad | #StoreStore
-
-80: retl
- nop
-#else
-#define ATOMIC_PRE_BARRIER
-#define ATOMIC_POST_BARRIER
-#endif
-
.globl atomic_add_ret
.type atomic_add_ret,#function
atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
BACKOFF_SETUP(%o2)
- ATOMIC_PRE_BARRIER
1: lduw [%o1], %g1
add %g1, %o0, %g7
cas [%o1], %g1, %g7
@@ -73,7 +54,6 @@ atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
bne,pn %icc, 2f
add %g7, %o0, %g7
sra %g7, 0, %o0
- ATOMIC_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o2, %o3, 1b)
@@ -83,7 +63,6 @@ atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
.type atomic_sub_ret,#function
atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
BACKOFF_SETUP(%o2)
- ATOMIC_PRE_BARRIER
1: lduw [%o1], %g1
sub %g1, %o0, %g7
cas [%o1], %g1, %g7
@@ -91,7 +70,6 @@ atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
bne,pn %icc, 2f
sub %g7, %o0, %g7
sra %g7, 0, %o0
- ATOMIC_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o2, %o3, 1b)
@@ -131,7 +109,6 @@ atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
.type atomic64_add_ret,#function
atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
BACKOFF_SETUP(%o2)
- ATOMIC_PRE_BARRIER
1: ldx [%o1], %g1
add %g1, %o0, %g7
casx [%o1], %g1, %g7
@@ -139,7 +116,6 @@ atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
bne,pn %xcc, 2f
add %g7, %o0, %g7
mov %g7, %o0
- ATOMIC_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o2, %o3, 1b)
@@ -149,7 +125,6 @@ atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
.type atomic64_sub_ret,#function
atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
BACKOFF_SETUP(%o2)
- ATOMIC_PRE_BARRIER
1: ldx [%o1], %g1
sub %g1, %o0, %g7
casx [%o1], %g1, %g7
@@ -157,7 +132,6 @@ atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
bne,pn %xcc, 2f
sub %g7, %o0, %g7
mov %g7, %o0
- ATOMIC_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o2, %o3, 1b)
diff --git a/arch/sparc64/lib/bitops.S b/arch/sparc64/lib/bitops.S
index 6b015a6eefb..2b7228cb8c2 100644
--- a/arch/sparc64/lib/bitops.S
+++ b/arch/sparc64/lib/bitops.S
@@ -8,29 +8,10 @@
.text
- /* On SMP we need to use memory barriers to ensure
- * correct memory operation ordering, nop these out
- * for uniprocessor.
- */
-
-#ifdef CONFIG_SMP
-#define BITOP_PRE_BARRIER membar #StoreLoad | #LoadLoad
-#define BITOP_POST_BARRIER \
- ba,pt %xcc, 80b; \
- membar #StoreLoad | #StoreStore
-
-80: retl
- nop
-#else
-#define BITOP_PRE_BARRIER
-#define BITOP_POST_BARRIER
-#endif
-
.globl test_and_set_bit
.type test_and_set_bit,#function
test_and_set_bit: /* %o0=nr, %o1=addr */
BACKOFF_SETUP(%o3)
- BITOP_PRE_BARRIER
srlx %o0, 6, %g1
mov 1, %o2
sllx %g1, 3, %g3
@@ -45,7 +26,6 @@ test_and_set_bit: /* %o0=nr, %o1=addr */
and %g7, %o2, %g2
clr %o0
movrne %g2, 1, %o0
- BITOP_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o3, %o4, 1b)
@@ -55,7 +35,6 @@ test_and_set_bit: /* %o0=nr, %o1=addr */
.type test_and_clear_bit,#function
test_and_clear_bit: /* %o0=nr, %o1=addr */
BACKOFF_SETUP(%o3)
- BITOP_PRE_BARRIER
srlx %o0, 6, %g1
mov 1, %o2
sllx %g1, 3, %g3
@@ -70,7 +49,6 @@ test_and_clear_bit: /* %o0=nr, %o1=addr */
and %g7, %o2, %g2
clr %o0
movrne %g2, 1, %o0
- BITOP_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o3, %o4, 1b)
@@ -80,7 +58,6 @@ test_and_clear_bit: /* %o0=nr, %o1=addr */
.type test_and_change_bit,#function
test_and_change_bit: /* %o0=nr, %o1=addr */
BACKOFF_SETUP(%o3)
- BITOP_PRE_BARRIER
srlx %o0, 6, %g1
mov 1, %o2
sllx %g1, 3, %g3
@@ -95,7 +72,6 @@ test_and_change_bit: /* %o0=nr, %o1=addr */
and %g7, %o2, %g2
clr %o0
movrne %g2, 1, %o0
- BITOP_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o3, %o4, 1b)
diff --git a/arch/sparc64/lib/rwsem.S b/arch/sparc64/lib/rwsem.S
index 1a4cc5654de..91a7d29a79d 100644
--- a/arch/sparc64/lib/rwsem.S
+++ b/arch/sparc64/lib/rwsem.S
@@ -17,7 +17,6 @@ __down_read:
bne,pn %icc, 1b
add %g7, 1, %g7
cmp %g7, 0
- membar #StoreLoad | #StoreStore
bl,pn %icc, 3f
nop
2:
@@ -42,7 +41,6 @@ __down_read_trylock:
cmp %g1, %g7
bne,pn %icc, 1b
mov 1, %o1
- membar #StoreLoad | #StoreStore
2: retl
mov %o1, %o0
.size __down_read_trylock, .-__down_read_trylock
@@ -58,7 +56,6 @@ __down_write:
cmp %g3, %g7
bne,pn %icc, 1b
cmp %g7, 0
- membar #StoreLoad | #StoreStore
bne,pn %icc, 3f
nop
2: retl
@@ -85,7 +82,6 @@ __down_write_trylock:
cmp %g3, %g7
bne,pn %icc, 1b
mov 1, %o1
- membar #StoreLoad | #StoreStore
2: retl
mov %o1, %o0
.size __down_write_trylock, .-__down_write_trylock
@@ -99,7 +95,6 @@ __up_read:
cmp %g1, %g7
bne,pn %icc, 1b
cmp %g7, 0
- membar #StoreLoad | #StoreStore
bl,pn %icc, 3f
nop
2: retl
@@ -129,7 +124,6 @@ __up_write:
bne,pn %icc, 1b
sub %g7, %g1, %g7
cmp %g7, 0
- membar #StoreLoad | #StoreStore
bl,pn %icc, 3f
nop
2:
@@ -155,7 +149,6 @@ __downgrade_write:
bne,pn %icc, 1b
sub %g7, %g1, %g7
cmp %g7, 0
- membar #StoreLoad | #StoreStore
bl,pn %icc, 3f
nop
2: