summaryrefslogtreecommitdiffstats
path: root/arch/i386/kernel/stacktrace.c
blob: e62a037ab39985b1494599c0a9d2e47b0a493c89 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
/*
 * arch/i386/kernel/stacktrace.c
 *
 * Stack trace management functions
 *
 *  Copyright (C) 2006 Red Hat, Inc., Ingo Molnar <mingo@redhat.com>
 */
#include <linux/sched.h>
#include <linux/stacktrace.h>

static inline int valid_stack_ptr(struct thread_info *tinfo, void *p)
{
	return	p > (void *)tinfo &&
		p < (void *)tinfo + THREAD_SIZE - 3;
}

/*
 * Save stack-backtrace addresses into a stack_trace buffer:
 */
static inline unsigned long
save_context_stack(struct stack_trace *trace, unsigned int skip,
		   struct thread_info *tinfo, unsigned long *stack,
		   unsigned long ebp)
{
	unsigned long addr;

#ifdef CONFIG_FRAME_POINTER
	while (valid_stack_ptr(tinfo, (void *)ebp)) {
		addr = *(unsigned long *)(ebp + 4);
		if (!skip)
			trace->entries[trace->nr_entries++] = addr;
		else
			skip--;
		if (trace->nr_entries >= trace->max_entries)
			break;
		/*
		 * break out of recursive entries (such as
		 * end_of_stack_stop_unwind_function):
	 	 */
		if (ebp == *(unsigned long *)ebp)
			break;

		ebp = *(unsigned long *)ebp;
	}
#else
	while (valid_stack_ptr(tinfo, stack)) {
		addr = *stack++;
		if (__kernel_text_address(addr)) {
			if (!skip)
				trace->entries[trace->nr_entries++] = addr;
			else
				skip--;
			if (trace->nr_entries >= trace->max_entries)
				break;
		}
	}
#endif

	return ebp;
}

/*
 * Save stack-backtrace addresses into a stack_trace buffer.
 * If all_contexts is set, all contexts (hardirq, softirq and process)
 * are saved. If not set then only the current context is saved.
 */
void save_stack_trace(struct stack_trace *trace,
		      struct task_struct *task, int all_contexts,
		      unsigned int skip)
{
	unsigned long ebp;
	unsigned long *stack = &ebp;

	WARN_ON(trace->nr_entries || !trace->max_entries);

	if (!task || task == current) {
		/* Grab ebp right from our regs: */
		asm ("movl %%ebp, %0" : "=r" (ebp));
	} else {
		/* ebp is the last reg pushed by switch_to(): */
		ebp = *(unsigned long *) task->thread.esp;
	}

	while (1) {
		struct thread_info *context = (struct thread_info *)
				((unsigned long)stack & (~(THREAD_SIZE - 1)));

		ebp = save_context_stack(trace, skip, context, stack, ebp);
		stack = (unsigned long *)context->previous_esp;
		if (!all_contexts || !stack ||
				trace->nr_entries >= trace->max_entries)
			break;
		trace->entries[trace->nr_entries++] = ULONG_MAX;
		if (trace->nr_entries >= trace->max_entries)
			break;
	}
}