summaryrefslogtreecommitdiffstats
path: root/include/asm-x86
diff options
context:
space:
mode:
authorThomas Gleixner <tglx@linutronix.de>2008-01-30 13:30:34 +0100
committerIngo Molnar <mingo@elte.hu>2008-01-30 13:30:34 +0100
commit6514f93a2ce643ef5914eae7ce49b978e1d356aa (patch)
tree048b77dbd62677bdcaba692e626dfc070ef0d28f /include/asm-x86
parenta33fff3a033f2e8a930067ad608c21e1f86bffab (diff)
x86: use immediates instead of RW_LOCK_BIAS_STR
Use immediate instead of the RW_LOCK_BIAS_STR. Makes the code more readable and gets rid of the string constant. Signed-off-by: Thomas Gleixner <tglx@linutronix.de> Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'include/asm-x86')
-rw-r--r--include/asm-x86/rwlock.h1
-rw-r--r--include/asm-x86/spinlock_32.h8
-rw-r--r--include/asm-x86/spinlock_64.h6
3 files changed, 7 insertions, 8 deletions
diff --git a/include/asm-x86/rwlock.h b/include/asm-x86/rwlock.h
index f2b64a429e6..6a8c0d64510 100644
--- a/include/asm-x86/rwlock.h
+++ b/include/asm-x86/rwlock.h
@@ -2,7 +2,6 @@
#define _ASM_X86_RWLOCK_H
#define RW_LOCK_BIAS 0x01000000
-#define RW_LOCK_BIAS_STR "0x01000000"
/* Actual code is in asm/spinlock.h or in arch/x86/lib/rwlock.S */
diff --git a/include/asm-x86/spinlock_32.h b/include/asm-x86/spinlock_32.h
index fca124a1103..e7a14ab906e 100644
--- a/include/asm-x86/spinlock_32.h
+++ b/include/asm-x86/spinlock_32.h
@@ -156,11 +156,11 @@ static inline void __raw_read_lock(raw_rwlock_t *rw)
static inline void __raw_write_lock(raw_rwlock_t *rw)
{
- asm volatile(LOCK_PREFIX " subl $" RW_LOCK_BIAS_STR ",(%0)\n\t"
+ asm volatile(LOCK_PREFIX " subl %1,(%0)\n\t"
"jz 1f\n"
"call __write_lock_failed\n\t"
"1:\n"
- ::"a" (rw) : "memory");
+ ::"a" (rw), "i" (RW_LOCK_BIAS) : "memory");
}
static inline int __raw_read_trylock(raw_rwlock_t *lock)
@@ -191,8 +191,8 @@ static inline void __raw_read_unlock(raw_rwlock_t *rw)
static inline void __raw_write_unlock(raw_rwlock_t *rw)
{
- asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ", %0"
- : "+m" (rw->lock) : : "memory");
+ asm volatile(LOCK_PREFIX "addl %1, %0"
+ : "+m" (rw->lock) : "i" (RW_LOCK_BIAS) : "memory");
}
#define _raw_spin_relax(lock) cpu_relax()
diff --git a/include/asm-x86/spinlock_64.h b/include/asm-x86/spinlock_64.h
index e81f6c18d87..ab50e7f5105 100644
--- a/include/asm-x86/spinlock_64.h
+++ b/include/asm-x86/spinlock_64.h
@@ -127,7 +127,7 @@ static inline void __raw_read_lock(raw_rwlock_t *rw)
"jns 1f\n"
"call __read_lock_failed\n\t"
"1:\n"
- ::"D" (rw), "i" (RW_LOCK_BIAS) : "memory");
+ ::"D" (rw) : "memory");
}
static inline void __raw_write_lock(raw_rwlock_t *rw)
@@ -167,8 +167,8 @@ static inline void __raw_read_unlock(raw_rwlock_t *rw)
static inline void __raw_write_unlock(raw_rwlock_t *rw)
{
- asm volatile(LOCK_PREFIX "addl $" RW_LOCK_BIAS_STR ", %0"
- : "+m" (rw->lock) : : "memory");
+ asm volatile(LOCK_PREFIX "addl %1, %0"
+ : "+m" (rw->lock) : "i" (RW_LOCK_BIAS) : "memory");
}
#define _raw_spin_relax(lock) cpu_relax()