diff options
Diffstat (limited to 'include/asm-generic/vmlinux.lds.h')
-rw-r--r-- | include/asm-generic/vmlinux.lds.h | 17 |
1 files changed, 16 insertions, 1 deletions
diff --git a/include/asm-generic/vmlinux.lds.h b/include/asm-generic/vmlinux.lds.h index 99cef06..6f857be 100644 --- a/include/asm-generic/vmlinux.lds.h +++ b/include/asm-generic/vmlinux.lds.h @@ -6,6 +6,9 @@ #define VMLINUX_SYMBOL(_sym_) _sym_ #endif +/* Align . to a 8 byte boundary equals to maximum function alignment. */ +#define ALIGN_FUNCTION() . = ALIGN(8) + #define RODATA \ .rodata : AT(ADDR(.rodata) - LOAD_OFFSET) { \ *(.rodata) *(.rodata.*) \ @@ -73,18 +76,30 @@ } #define SECURITY_INIT \ - .security_initcall.init : { \ + .security_initcall.init : AT(ADDR(.security_initcall.init) - LOAD_OFFSET) { \ VMLINUX_SYMBOL(__security_initcall_start) = .; \ *(.security_initcall.init) \ VMLINUX_SYMBOL(__security_initcall_end) = .; \ } +/* sched.text is aling to function alignment to secure we have same + * address even at second ld pass when generating System.map */ #define SCHED_TEXT \ + ALIGN_FUNCTION(); \ VMLINUX_SYMBOL(__sched_text_start) = .; \ *(.sched.text) \ VMLINUX_SYMBOL(__sched_text_end) = .; +/* spinlock.text is aling to function alignment to secure we have same + * address even at second ld pass when generating System.map */ #define LOCK_TEXT \ + ALIGN_FUNCTION(); \ VMLINUX_SYMBOL(__lock_text_start) = .; \ *(.spinlock.text) \ VMLINUX_SYMBOL(__lock_text_end) = .; + +#define KPROBES_TEXT \ + ALIGN_FUNCTION(); \ + VMLINUX_SYMBOL(__kprobes_text_start) = .; \ + *(.kprobes.text) \ + VMLINUX_SYMBOL(__kprobes_text_end) = .; |