summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorKirill Korotaev <dev@openvz.org>2007-10-17 18:04:33 +0200
committerThomas Gleixner <tglx@inhelltoy.tec.linutronix.de>2007-10-17 20:15:31 +0200
commitc1217a75ea102d4e69321f210fab60bc47b9a48e (patch)
treefffa575916b27bac17dfb751300ed2e1f9107f0c
parentf891dd18c107d582c3ab98da5209c930e16baf78 (diff)
x86: mark read_crX() asm code as volatile
Some gcc versions (I checked at least 4.1.1 from RHEL5 & 4.1.2 from gentoo) can generate incorrect code with read_crX()/write_crX() functions mix up, due to cached results of read_crX(). The small app for x8664 below compiled with -O2 demonstrates this (i686 does the same thing):
-rw-r--r--include/asm-x86/system_32.h2
-rw-r--r--include/asm-x86/system_64.h8
2 files changed, 5 insertions, 5 deletions
diff --git a/include/asm-x86/system_32.h b/include/asm-x86/system_32.h
index d84e593b7df..1d6fb3afa53 100644
--- a/include/asm-x86/system_32.h
+++ b/include/asm-x86/system_32.h
@@ -142,7 +142,7 @@ static inline unsigned long native_read_cr4_safe(void)
{
unsigned long val;
/* This could fault if %cr4 does not exist */
- asm("1: movl %%cr4, %0 \n"
+ asm volatile("1: movl %%cr4, %0 \n"
"2: \n"
".section __ex_table,\"a\" \n"
".long 1b,2b \n"
diff --git a/include/asm-x86/system_64.h b/include/asm-x86/system_64.h
index 5022aecc333..fb4bcf99e66 100644
--- a/include/asm-x86/system_64.h
+++ b/include/asm-x86/system_64.h
@@ -85,7 +85,7 @@ static inline void write_cr0(unsigned long val)
static inline unsigned long read_cr2(void)
{
unsigned long cr2;
- asm("movq %%cr2,%0" : "=r" (cr2));
+ asm volatile("movq %%cr2,%0" : "=r" (cr2));
return cr2;
}
@@ -97,7 +97,7 @@ static inline void write_cr2(unsigned long val)
static inline unsigned long read_cr3(void)
{
unsigned long cr3;
- asm("movq %%cr3,%0" : "=r" (cr3));
+ asm volatile("movq %%cr3,%0" : "=r" (cr3));
return cr3;
}
@@ -109,7 +109,7 @@ static inline void write_cr3(unsigned long val)
static inline unsigned long read_cr4(void)
{
unsigned long cr4;
- asm("movq %%cr4,%0" : "=r" (cr4));
+ asm volatile("movq %%cr4,%0" : "=r" (cr4));
return cr4;
}
@@ -121,7 +121,7 @@ static inline void write_cr4(unsigned long val)
static inline unsigned long read_cr8(void)
{
unsigned long cr8;
- asm("movq %%cr8,%0" : "=r" (cr8));
+ asm volatile("movq %%cr8,%0" : "=r" (cr8));
return cr8;
}