diff options
author | Ingo Molnar <mingo@elte.hu> | 2009-06-17 12:52:15 +0200 |
---|---|---|
committer | Ingo Molnar <mingo@elte.hu> | 2009-06-17 12:56:49 +0200 |
commit | eadb8a091b27a840de7450f84ecff5ef13476424 (patch) | |
tree | 58c3782d40def63baa8167f3d31e3048cb4c7660 /arch/s390/kernel/mcount.S | |
parent | 73874005cd8800440be4299bd095387fff4b90ac (diff) | |
parent | 65795efbd380a832ae508b04dba8f8e53f0b84d9 (diff) |
Merge branch 'linus' into tracing/hw-breakpoints
Conflicts:
arch/x86/Kconfig
arch/x86/kernel/traps.c
arch/x86/power/cpu.c
arch/x86/power/cpu_32.c
kernel/Makefile
Semantic conflict:
arch/x86/kernel/hw_breakpoint.c
Merge reason: Resolve the conflicts, move from put_cpu_no_sched() to
put_cpu() in arch/x86/kernel/hw_breakpoint.c.
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'arch/s390/kernel/mcount.S')
-rw-r--r-- | arch/s390/kernel/mcount.S | 212 |
1 files changed, 185 insertions, 27 deletions
diff --git a/arch/s390/kernel/mcount.S b/arch/s390/kernel/mcount.S index 80641224a09..2a0a5e97ba8 100644 --- a/arch/s390/kernel/mcount.S +++ b/arch/s390/kernel/mcount.S @@ -1,5 +1,5 @@ /* - * Copyright IBM Corp. 2008 + * Copyright IBM Corp. 2008,2009 * * Author(s): Heiko Carstens <heiko.carstens@de.ibm.com>, * @@ -7,36 +7,64 @@ #include <asm/asm-offsets.h> -#ifndef CONFIG_64BIT -.globl _mcount + .globl ftrace_stub +ftrace_stub: + br %r14 + +#ifdef CONFIG_64BIT + +#ifdef CONFIG_DYNAMIC_FTRACE + + .globl _mcount _mcount: - stm %r0,%r5,8(%r15) - st %r14,56(%r15) - lr %r1,%r15 - ahi %r15,-96 - l %r3,100(%r15) - la %r2,0(%r14) - st %r1,__SF_BACKCHAIN(%r15) - la %r3,0(%r3) - bras %r14,0f - .long ftrace_trace_function -0: l %r14,0(%r14) - l %r14,0(%r14) - basr %r14,%r14 - ahi %r15,96 - lm %r0,%r5,8(%r15) - l %r14,56(%r15) br %r14 -.globl ftrace_stub -ftrace_stub: + .globl ftrace_caller +ftrace_caller: + larl %r1,function_trace_stop + icm %r1,0xf,0(%r1) + bnzr %r14 + stmg %r2,%r5,32(%r15) + stg %r14,112(%r15) + lgr %r1,%r15 + aghi %r15,-160 + stg %r1,__SF_BACKCHAIN(%r15) + lgr %r2,%r14 + lg %r3,168(%r15) + larl %r14,ftrace_dyn_func + lg %r14,0(%r14) + basr %r14,%r14 +#ifdef CONFIG_FUNCTION_GRAPH_TRACER + .globl ftrace_graph_caller +ftrace_graph_caller: + # This unconditional branch gets runtime patched. Change only if + # you know what you are doing. See ftrace_enable_graph_caller(). + j 0f + lg %r2,272(%r15) + lg %r3,168(%r15) + brasl %r14,prepare_ftrace_return + stg %r2,168(%r15) +0: +#endif + aghi %r15,160 + lmg %r2,%r5,32(%r15) + lg %r14,112(%r15) br %r14 -#else /* CONFIG_64BIT */ + .data + .globl ftrace_dyn_func +ftrace_dyn_func: + .quad ftrace_stub + .previous + +#else /* CONFIG_DYNAMIC_FTRACE */ -.globl _mcount + .globl _mcount _mcount: - stmg %r0,%r5,16(%r15) + larl %r1,function_trace_stop + icm %r1,0xf,0(%r1) + bnzr %r14 + stmg %r2,%r5,32(%r15) stg %r14,112(%r15) lgr %r1,%r15 aghi %r15,-160 @@ -46,13 +74,143 @@ _mcount: larl %r14,ftrace_trace_function lg %r14,0(%r14) basr %r14,%r14 +#ifdef CONFIG_FUNCTION_GRAPH_TRACER + lg %r2,272(%r15) + lg %r3,168(%r15) + brasl %r14,prepare_ftrace_return + stg %r2,168(%r15) +#endif aghi %r15,160 - lmg %r0,%r5,16(%r15) + lmg %r2,%r5,32(%r15) lg %r14,112(%r15) br %r14 -.globl ftrace_stub -ftrace_stub: +#endif /* CONFIG_DYNAMIC_FTRACE */ + +#ifdef CONFIG_FUNCTION_GRAPH_TRACER + + .globl return_to_handler +return_to_handler: + stmg %r2,%r5,32(%r15) + lgr %r1,%r15 + aghi %r15,-160 + stg %r1,__SF_BACKCHAIN(%r15) + brasl %r14,ftrace_return_to_handler + aghi %r15,160 + lgr %r14,%r2 + lmg %r2,%r5,32(%r15) + br %r14 + +#endif /* CONFIG_FUNCTION_GRAPH_TRACER */ + +#else /* CONFIG_64BIT */ + +#ifdef CONFIG_DYNAMIC_FTRACE + + .globl _mcount +_mcount: + br %r14 + + .globl ftrace_caller +ftrace_caller: + stm %r2,%r5,16(%r15) + bras %r1,2f +0: .long ftrace_trace_function +1: .long function_trace_stop +2: l %r2,1b-0b(%r1) + icm %r2,0xf,0(%r2) + jnz 3f + st %r14,56(%r15) + lr %r0,%r15 + ahi %r15,-96 + l %r3,100(%r15) + la %r2,0(%r14) + st %r0,__SF_BACKCHAIN(%r15) + la %r3,0(%r3) + l %r14,0b-0b(%r1) + l %r14,0(%r14) + basr %r14,%r14 +#ifdef CONFIG_FUNCTION_GRAPH_TRACER + .globl ftrace_graph_caller +ftrace_graph_caller: + # This unconditional branch gets runtime patched. Change only if + # you know what you are doing. See ftrace_enable_graph_caller(). + j 1f + bras %r1,0f + .long prepare_ftrace_return +0: l %r2,152(%r15) + l %r4,0(%r1) + l %r3,100(%r15) + basr %r14,%r4 + st %r2,100(%r15) +1: +#endif + ahi %r15,96 + l %r14,56(%r15) +3: lm %r2,%r5,16(%r15) br %r14 + .data + .globl ftrace_dyn_func +ftrace_dyn_func: + .long ftrace_stub + .previous + +#else /* CONFIG_DYNAMIC_FTRACE */ + + .globl _mcount +_mcount: + stm %r2,%r5,16(%r15) + bras %r1,2f +0: .long ftrace_trace_function +1: .long function_trace_stop +2: l %r2,1b-0b(%r1) + icm %r2,0xf,0(%r2) + jnz 3f + st %r14,56(%r15) + lr %r0,%r15 + ahi %r15,-96 + l %r3,100(%r15) + la %r2,0(%r14) + st %r0,__SF_BACKCHAIN(%r15) + la %r3,0(%r3) + l %r14,0b-0b(%r1) + l %r14,0(%r14) + basr %r14,%r14 +#ifdef CONFIG_FUNCTION_GRAPH_TRACER + bras %r1,0f + .long prepare_ftrace_return +0: l %r2,152(%r15) + l %r4,0(%r1) + l %r3,100(%r15) + basr %r14,%r4 + st %r2,100(%r15) +#endif + ahi %r15,96 + l %r14,56(%r15) +3: lm %r2,%r5,16(%r15) + br %r14 + +#endif /* CONFIG_DYNAMIC_FTRACE */ + +#ifdef CONFIG_FUNCTION_GRAPH_TRACER + + .globl return_to_handler +return_to_handler: + stm %r2,%r5,16(%r15) + st %r14,56(%r15) + lr %r0,%r15 + ahi %r15,-96 + st %r0,__SF_BACKCHAIN(%r15) + bras %r1,0f + .long ftrace_return_to_handler +0: l %r2,0b-0b(%r1) + basr %r14,%r2 + lr %r14,%r2 + ahi %r15,96 + lm %r2,%r5,16(%r15) + br %r14 + +#endif /* CONFIG_FUNCTION_GRAPH_TRACER */ + #endif /* CONFIG_64BIT */ |