diff options
Diffstat (limited to 'include')
-rw-r--r-- | include/asm-i386/atomic.h | 12 | ||||
-rw-r--r-- | include/asm-i386/edac.h | 18 | ||||
-rw-r--r-- | include/asm-x86_64/atomic.h | 12 | ||||
-rw-r--r-- | include/asm-x86_64/edac.h | 18 |
4 files changed, 36 insertions, 24 deletions
diff --git a/include/asm-i386/atomic.h b/include/asm-i386/atomic.h index e2c00c95a5e..de649d3aa2d 100644 --- a/include/asm-i386/atomic.h +++ b/include/asm-i386/atomic.h @@ -255,17 +255,5 @@ __asm__ __volatile__(LOCK "orl %0,%1" \ #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() -/* ECC atomic, DMA, SMP and interrupt safe scrub function */ - -static __inline__ void atomic_scrub(unsigned long *virt_addr, u32 size) -{ - u32 i; - for (i = 0; i < size / 4; i++, virt_addr++) - /* Very carefully read and write to memory atomically - * so we are interrupt, DMA and SMP safe. - */ - __asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr)); -} - #include <asm-generic/atomic.h> #endif diff --git a/include/asm-i386/edac.h b/include/asm-i386/edac.h new file mode 100644 index 00000000000..3e7dd0ab68c --- /dev/null +++ b/include/asm-i386/edac.h @@ -0,0 +1,18 @@ +#ifndef ASM_EDAC_H +#define ASM_EDAC_H + +/* ECC atomic, DMA, SMP and interrupt safe scrub function */ + +static __inline__ void atomic_scrub(void *va, u32 size) +{ + unsigned long *virt_addr = va; + u32 i; + + for (i = 0; i < size / 4; i++, virt_addr++) + /* Very carefully read and write to memory atomically + * so we are interrupt, DMA and SMP safe. + */ + __asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr)); +} + +#endif diff --git a/include/asm-x86_64/atomic.h b/include/asm-x86_64/atomic.h index 4048508c4f4..4b5cd553e77 100644 --- a/include/asm-x86_64/atomic.h +++ b/include/asm-x86_64/atomic.h @@ -426,17 +426,5 @@ __asm__ __volatile__(LOCK "orl %0,%1" \ #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() -/* ECC atomic, DMA, SMP and interrupt safe scrub function */ - -static __inline__ void atomic_scrub(u32 *virt_addr, u32 size) -{ - u32 i; - for (i = 0; i < size / 4; i++, virt_addr++) - /* Very carefully read and write to memory atomically - * so we are interrupt, DMA and SMP safe. - */ - __asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr)); -} - #include <asm-generic/atomic.h> #endif diff --git a/include/asm-x86_64/edac.h b/include/asm-x86_64/edac.h new file mode 100644 index 00000000000..cad1cd42b4e --- /dev/null +++ b/include/asm-x86_64/edac.h @@ -0,0 +1,18 @@ +#ifndef ASM_EDAC_H +#define ASM_EDAC_H + +/* ECC atomic, DMA, SMP and interrupt safe scrub function */ + +static __inline__ void atomic_scrub(void *va, u32 size) +{ + unsigned int *virt_addr = va; + u32 i; + + for (i = 0; i < size / 4; i++, virt_addr++) + /* Very carefully read and write to memory atomically + * so we are interrupt, DMA and SMP safe. + */ + __asm__ __volatile__("lock; addl $0, %0"::"m"(*virt_addr)); +} + +#endif |