# This is a BitKeeper generated diff -Nru style patch. # # ChangeSet # 2005/03/14 20:56:01+01:00 sam@mars.ravnborg.org # kbuild: Avoid inconsistent kallsyms data # # Several reports on inconsistent kallsyms data has been caused by the aliased symbols # __sched_text_start and __down to shift places in the output of nm. # The root cause was that on second pass ld aligned __sched_text_start to a 4 byte boundary # which is the function alignment on i386. # sched.text and spinlock.text is now aligned to an 8 byte boundary to make sure they # are aligned to a function alignemnt on most (all?) archs. # # Tested by: Paulo Marques # Tested by: Alexander Stohr # # Signed-off-by: Sam Ravnborg # # include/asm-generic/vmlinux.lds.h # 2005/03/14 20:55:39+01:00 sam@mars.ravnborg.org +9 -0 # Align sched.text and spinlock.text to an 8 byte boundary # diff -Nru a/include/asm-generic/vmlinux.lds.h b/include/asm-generic/vmlinux.lds.h --- a/include/asm-generic/vmlinux.lds.h 2005-03-26 09:07:42 +01:00 +++ b/include/asm-generic/vmlinux.lds.h 2005-03-26 09:07:42 +01:00 @@ -6,6 +6,9 @@ #define VMLINUX_SYMBOL(_sym_) _sym_ #endif +/* Align . to a 8 byte boundary equals to maximum function alignment. */ +#define ALIGN_FUNCTION() . = ALIGN(8) + #define RODATA \ .rodata : AT(ADDR(.rodata) - LOAD_OFFSET) { \ *(.rodata) *(.rodata.*) \ @@ -79,12 +82,18 @@ VMLINUX_SYMBOL(__security_initcall_end) = .; \ } +/* sched.text is aling to function alignment to secure we have same + * address even at second ld pass when generating System.map */ #define SCHED_TEXT \ + ALIGN_FUNCTION(); \ VMLINUX_SYMBOL(__sched_text_start) = .; \ *(.sched.text) \ VMLINUX_SYMBOL(__sched_text_end) = .; +/* spinlock.text is aling to function alignment to secure we have same + * address even at second ld pass when generating System.map */ #define LOCK_TEXT \ + ALIGN_FUNCTION(); \ VMLINUX_SYMBOL(__lock_text_start) = .; \ *(.spinlock.text) \ VMLINUX_SYMBOL(__lock_text_end) = .;