diff options
author | Ingo Molnar <mingo@elte.hu> | 2006-01-14 13:21:30 -0800 |
---|---|---|
committer | Linus Torvalds <torvalds@g5.osdl.org> | 2006-01-14 18:27:15 -0800 |
commit | 652050aec936fdd70ed9cbce1cd1ef30a7c9d117 (patch) | |
tree | a2859991fd1c71d918c85c5a8b4bc63bb2c64808 /include/asm-i386/string.h | |
parent | 9ab34fe76114b9538bfcaf3a9d112dee0feb5f17 (diff) |
[PATCH] mark several functions __always_inline
Arjan van de Ven <arjan@infradead.org>
Mark a number of functions as 'must inline'. The functions affected by this
patch need to be inlined because they use knowledge that their arguments are
constant so that most of the function optimizes away. At this point this
patch does not change behavior, it's for documentation only (and for future
patches in the inline series)
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Arjan van de Ven <arjan@infradead.org>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include/asm-i386/string.h')
-rw-r--r-- | include/asm-i386/string.h | 8 |
1 files changed, 4 insertions, 4 deletions
diff --git a/include/asm-i386/string.h b/include/asm-i386/string.h index 02c8f5d2206..bb5f88a27f7 100644 --- a/include/asm-i386/string.h +++ b/include/asm-i386/string.h @@ -201,7 +201,7 @@ __asm__ __volatile__( return __res; } -static inline void * __memcpy(void * to, const void * from, size_t n) +static __always_inline void * __memcpy(void * to, const void * from, size_t n) { int d0, d1, d2; __asm__ __volatile__( @@ -223,7 +223,7 @@ return (to); * This looks ugly, but the compiler can optimize it totally, * as the count is constant. */ -static inline void * __constant_memcpy(void * to, const void * from, size_t n) +static __always_inline void * __constant_memcpy(void * to, const void * from, size_t n) { long esi, edi; if (!n) return to; @@ -367,7 +367,7 @@ return s; * things 32 bits at a time even when we don't know the size of the * area at compile-time.. */ -static inline void * __constant_c_memset(void * s, unsigned long c, size_t count) +static __always_inline void * __constant_c_memset(void * s, unsigned long c, size_t count) { int d0, d1; __asm__ __volatile__( @@ -416,7 +416,7 @@ extern char *strstr(const char *cs, const char *ct); * This looks horribly ugly, but the compiler can optimize it totally, * as we by now know that both pattern and count is constant.. */ -static inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count) +static __always_inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count) { switch (count) { case 0: |