diff options
Diffstat (limited to 'include/asm-x86_64/system.h')
-rw-r--r-- | include/asm-x86_64/system.h | 45 |
1 files changed, 30 insertions, 15 deletions
diff --git a/include/asm-x86_64/system.h b/include/asm-x86_64/system.h index ead9f9a5623..02175aa1d16 100644 --- a/include/asm-x86_64/system.h +++ b/include/asm-x86_64/system.h @@ -75,19 +75,31 @@ static inline unsigned long read_cr0(void) unsigned long cr0; asm volatile("movq %%cr0,%0" : "=r" (cr0)); return cr0; -} +} static inline void write_cr0(unsigned long val) { asm volatile("movq %0,%%cr0" :: "r" (val)); -} +} + +static inline unsigned long read_cr2(void) +{ + unsigned long cr2; + asm("movq %%cr2,%0" : "=r" (cr2)); + return cr2; +} + +static inline void write_cr2(unsigned long val) +{ + asm volatile("movq %0,%%cr2" :: "r" (val)); +} static inline unsigned long read_cr3(void) { unsigned long cr3; asm("movq %%cr3,%0" : "=r" (cr3)); return cr3; -} +} static inline void write_cr3(unsigned long val) { @@ -99,27 +111,30 @@ static inline unsigned long read_cr4(void) unsigned long cr4; asm("movq %%cr4,%0" : "=r" (cr4)); return cr4; -} +} static inline void write_cr4(unsigned long val) { asm volatile("movq %0,%%cr4" :: "r" (val) : "memory"); -} - -#define stts() write_cr0(8 | read_cr0()) +} -#define wbinvd() \ - __asm__ __volatile__ ("wbinvd": : :"memory"); +static inline unsigned long read_cr8(void) +{ + unsigned long cr8; + asm("movq %%cr8,%0" : "=r" (cr8)); + return cr8; +} -/* - * On SMP systems, when the scheduler does migration-cost autodetection, - * it needs a way to flush as much of the CPU's caches as possible. - */ -static inline void sched_cacheflush(void) +static inline void write_cr8(unsigned long val) { - wbinvd(); + asm volatile("movq %0,%%cr8" :: "r" (val) : "memory"); } +#define stts() write_cr0(8 | read_cr0()) + +#define wbinvd() \ + __asm__ __volatile__ ("wbinvd": : :"memory") + #endif /* __KERNEL__ */ #define nop() __asm__ __volatile__ ("nop") |