summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorScott Wood <scottwood@freescale.com>2011-07-25 11:02:11 +0000
committerBenjamin Herrenschmidt <benh@kernel.crashing.org>2011-08-05 14:47:54 +1000
commit326ed6a9bcf8d451a6d714d10c8b0f40941a3ed3 (patch)
treed57cbea021dc1866dbe68e1d32a42fd508c869a9
parent53d1e658df6e26d62500410719aaee2b82067c03 (diff)
powerpc: mtspr/mtmsr should take an unsigned long
Add a cast in case the caller passes in a different type, as it would if mtspr/mtmsr were functions. Previously, if a 64-bit type was passed in on 32-bit, GCC would bind the constraint to a pair of registers, and would substitute the first register in the pair in the asm code. This corresponds to the upper half of the 64-bit register, which is generally not the desired behavior. Signed-off-by: Scott Wood <scottwood@freescale.com> Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
-rw-r--r--arch/powerpc/include/asm/reg.h7
1 files changed, 5 insertions, 2 deletions
diff --git a/arch/powerpc/include/asm/reg.h b/arch/powerpc/include/asm/reg.h
index e8aaf6fce38..9561e6016ec 100644
--- a/arch/powerpc/include/asm/reg.h
+++ b/arch/powerpc/include/asm/reg.h
@@ -1024,13 +1024,16 @@
#define mtmsrd(v) __mtmsrd((v), 0)
#define mtmsr(v) mtmsrd(v)
#else
-#define mtmsr(v) asm volatile("mtmsr %0" : : "r" (v) : "memory")
+#define mtmsr(v) asm volatile("mtmsr %0" : \
+ : "r" ((unsigned long)(v)) \
+ : "memory")
#endif
#define mfspr(rn) ({unsigned long rval; \
asm volatile("mfspr %0," __stringify(rn) \
: "=r" (rval)); rval;})
-#define mtspr(rn, v) asm volatile("mtspr " __stringify(rn) ",%0" : : "r" (v)\
+#define mtspr(rn, v) asm volatile("mtspr " __stringify(rn) ",%0" : \
+ : "r" ((unsigned long)(v)) \
: "memory")
#ifdef __powerpc64__