diff options
author | Thomas Gleixner <tglx@linutronix.de> | 2008-01-30 13:30:34 +0100 |
---|---|---|
committer | Ingo Molnar <mingo@elte.hu> | 2008-01-30 13:30:34 +0100 |
commit | a33fff3a033f2e8a930067ad608c21e1f86bffab (patch) | |
tree | cf689c288fd27991af739c4af6a5e30368ea4348 /include/asm-x86 | |
parent | 2fed0c507cf0101d511366f36e8573f403dbfea5 (diff) |
x86: fix asm constraints in spinlock_32/64.h
Use the correct constraints for the spinlock assembler functions.
read (modify) write functions need "+m" instead of "=m"
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'include/asm-x86')
-rw-r--r-- | include/asm-x86/spinlock_32.h | 2 | ||||
-rw-r--r-- | include/asm-x86/spinlock_64.h | 10 |
2 files changed, 6 insertions, 6 deletions
diff --git a/include/asm-x86/spinlock_32.h b/include/asm-x86/spinlock_32.h index c42c3f12d7c..fca124a1103 100644 --- a/include/asm-x86/spinlock_32.h +++ b/include/asm-x86/spinlock_32.h @@ -99,7 +99,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock) static inline void __raw_spin_unlock(raw_spinlock_t *lock) { - asm volatile("movb $1,%0" : "+m" (lock->slock) :: "memory"); + asm volatile("movb $1,%0" : "=m" (lock->slock) :: "memory"); } #else diff --git a/include/asm-x86/spinlock_64.h b/include/asm-x86/spinlock_64.h index 3b5adf92ad0..e81f6c18d87 100644 --- a/include/asm-x86/spinlock_64.h +++ b/include/asm-x86/spinlock_64.h @@ -34,7 +34,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock) "jle 3b\n\t" "jmp 1b\n" "2:\t" - : "=m" (lock->slock) : : "memory"); + : "+m" (lock->slock) : : "memory"); } /* @@ -80,7 +80,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock) asm volatile( "xchgl %0,%1" - :"=q" (oldval), "=m" (lock->slock) + :"=q" (oldval), "+m" (lock->slock) :"0" (0) : "memory"); return oldval > 0; @@ -162,13 +162,13 @@ static inline int __raw_write_trylock(raw_rwlock_t *lock) static inline void __raw_read_unlock(raw_rwlock_t *rw) { - asm volatile(LOCK_PREFIX "incl %0" :"=m" (rw->lock) : : "memory"); + asm volatile(LOCK_PREFIX "incl %0" :"+m" (rw->lock) : : "memory"); } static inline void __raw_write_unlock(raw_rwlock_t *rw) { - asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ",%0" - : "=m" (rw->lock) : : "memory"); + asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ", %0" + : "+m" (rw->lock) : : "memory"); } #define _raw_spin_relax(lock) cpu_relax() |