summaryrefslogtreecommitdiffstats
path: root/arch/x86
diff options
context:
space:
mode:
authorEric Dumazet <eric.dumazet@gmail.com>2012-01-03 17:35:40 +0100
committerIngo Molnar <mingo@elte.hu>2012-01-04 15:01:56 +0100
commitceb7b40b65539a771d1bfaf47660ac0ee57e0c4f (patch)
tree77bae0c2bf43b9f415d99d1e3a8dc0667404f965 /arch/x86
parentcdcd629869fabcd38ebd24a03b0a05ec1cbcafb0 (diff)
x86: Fix atomic64_xxx_cx8() functions
It appears about all functions in arch/x86/lib/atomic64_cx8_32.S are wrong in case cmpxchg8b must be restarted, because LOCK_PREFIX macro defines a label "1" clashing with other local labels : 1: some_instructions LOCK_PREFIX cmpxchg8b (%ebp) jne 1b / jumps to beginning of LOCK_PREFIX ! A possible fix is to use a magic label "672" in LOCK_PREFIX asm definition, similar to the "671" one we defined in LOCK_PREFIX_HERE. Signed-off-by: Eric Dumazet <eric.dumazet@gmail.com> Acked-by: Jan Beulich <JBeulich@suse.com> Cc: Christoph Lameter <cl@linux.com> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Andrew Morton <akpm@linux-foundation.org> Link: http://lkml.kernel.org/r/1325608540.2320.103.camel@edumazet-HP-Compaq-6005-Pro-SFF-PC Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'arch/x86')
-rw-r--r--arch/x86/include/asm/alternative-asm.h4
1 files changed, 2 insertions, 2 deletions
diff --git a/arch/x86/include/asm/alternative-asm.h b/arch/x86/include/asm/alternative-asm.h
index 091508b533b..952bd0100c5 100644
--- a/arch/x86/include/asm/alternative-asm.h
+++ b/arch/x86/include/asm/alternative-asm.h
@@ -4,10 +4,10 @@
#ifdef CONFIG_SMP
.macro LOCK_PREFIX
-1: lock
+672: lock
.section .smp_locks,"a"
.balign 4
- .long 1b - .
+ .long 672b - .
.previous
.endm
#else