summaryrefslogtreecommitdiffstats
path: root/include/asm-generic/vmlinux.lds.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-generic/vmlinux.lds.h')
-rw-r--r--include/asm-generic/vmlinux.lds.h15
1 files changed, 15 insertions, 0 deletions
diff --git a/include/asm-generic/vmlinux.lds.h b/include/asm-generic/vmlinux.lds.h
index b3bb326ae5b..6f857be2b64 100644
--- a/include/asm-generic/vmlinux.lds.h
+++ b/include/asm-generic/vmlinux.lds.h
@@ -6,6 +6,9 @@
#define VMLINUX_SYMBOL(_sym_) _sym_
#endif
+/* Align . to a 8 byte boundary equals to maximum function alignment. */
+#define ALIGN_FUNCTION() . = ALIGN(8)
+
#define RODATA \
.rodata : AT(ADDR(.rodata) - LOAD_OFFSET) { \
*(.rodata) *(.rodata.*) \
@@ -79,12 +82,24 @@
VMLINUX_SYMBOL(__security_initcall_end) = .; \
}
+/* sched.text is aling to function alignment to secure we have same
+ * address even at second ld pass when generating System.map */
#define SCHED_TEXT \
+ ALIGN_FUNCTION(); \
VMLINUX_SYMBOL(__sched_text_start) = .; \
*(.sched.text) \
VMLINUX_SYMBOL(__sched_text_end) = .;
+/* spinlock.text is aling to function alignment to secure we have same
+ * address even at second ld pass when generating System.map */
#define LOCK_TEXT \
+ ALIGN_FUNCTION(); \
VMLINUX_SYMBOL(__lock_text_start) = .; \
*(.spinlock.text) \
VMLINUX_SYMBOL(__lock_text_end) = .;
+
+#define KPROBES_TEXT \
+ ALIGN_FUNCTION(); \
+ VMLINUX_SYMBOL(__kprobes_text_start) = .; \
+ *(.kprobes.text) \
+ VMLINUX_SYMBOL(__kprobes_text_end) = .;