summaryrefslogtreecommitdiff
path: root/target/linux/generic/patches-4.4/220-gc_sections.patch
diff options
context:
space:
mode:
Diffstat (limited to 'target/linux/generic/patches-4.4/220-gc_sections.patch')
-rw-r--r--target/linux/generic/patches-4.4/220-gc_sections.patch536
1 files changed, 536 insertions, 0 deletions
diff --git a/target/linux/generic/patches-4.4/220-gc_sections.patch b/target/linux/generic/patches-4.4/220-gc_sections.patch
new file mode 100644
index 0000000..e89ef77
--- /dev/null
+++ b/target/linux/generic/patches-4.4/220-gc_sections.patch
@@ -0,0 +1,536 @@
+From: Felix Fietkau <nbd@openwrt.org>
+
+use -ffunction-sections, -fdata-sections and --gc-sections
+
+In combination with kernel symbol export stripping this significantly reduces
+the kernel image size. Used on both ARM and MIPS architectures.
+
+Signed-off-by: Felix Fietkau <nbd@openwrt.org>
+Signed-off-by: Jonas Gorski <jogo@openwrt.org>
+Signed-off-by: Gabor Juhos <juhosg@openwrt.org>
+---
+
+--- a/arch/mips/Makefile
++++ b/arch/mips/Makefile
+@@ -89,10 +89,14 @@ all-$(CONFIG_SYS_SUPPORTS_ZBOOT)+= vmlin
+ #
+ cflags-y += -G 0 -mno-abicalls -fno-pic -pipe
+ cflags-y += -msoft-float
+-LDFLAGS_vmlinux += -G 0 -static -n -nostdlib
++LDFLAGS_vmlinux += -G 0 -static -n -nostdlib --gc-sections
+ KBUILD_AFLAGS_MODULE += -mlong-calls
+ KBUILD_CFLAGS_MODULE += -mlong-calls
+
++ifndef CONFIG_FUNCTION_TRACER
++KBUILD_CFLAGS_KERNEL += -ffunction-sections -fdata-sections
++endif
++
+ #
+ # pass -msoft-float to GAS if it supports it. However on newer binutils
+ # (specifically newer than 2.24.51.20140728) we then also need to explicitly
+--- a/arch/mips/kernel/vmlinux.lds.S
++++ b/arch/mips/kernel/vmlinux.lds.S
+@@ -69,7 +69,7 @@ SECTIONS
+ /* Exception table for data bus errors */
+ __dbe_table : {
+ __start___dbe_table = .;
+- *(__dbe_table)
++ KEEP(*(__dbe_table))
+ __stop___dbe_table = .;
+ }
+
+@@ -119,7 +119,7 @@ SECTIONS
+ . = ALIGN(4);
+ .mips.machines.init : AT(ADDR(.mips.machines.init) - LOAD_OFFSET) {
+ __mips_machines_start = .;
+- *(.mips.machines.init)
++ KEEP(*(.mips.machines.init))
+ __mips_machines_end = .;
+ }
+
+--- a/include/asm-generic/vmlinux.lds.h
++++ b/include/asm-generic/vmlinux.lds.h
+@@ -89,7 +89,7 @@
+ #ifdef CONFIG_FTRACE_MCOUNT_RECORD
+ #define MCOUNT_REC() . = ALIGN(8); \
+ VMLINUX_SYMBOL(__start_mcount_loc) = .; \
+- *(__mcount_loc) \
++ KEEP(*(__mcount_loc)) \
+ VMLINUX_SYMBOL(__stop_mcount_loc) = .;
+ #else
+ #define MCOUNT_REC()
+@@ -97,7 +97,7 @@
+
+ #ifdef CONFIG_TRACE_BRANCH_PROFILING
+ #define LIKELY_PROFILE() VMLINUX_SYMBOL(__start_annotated_branch_profile) = .; \
+- *(_ftrace_annotated_branch) \
++ KEEP(*(_ftrace_annotated_branch)) \
+ VMLINUX_SYMBOL(__stop_annotated_branch_profile) = .;
+ #else
+ #define LIKELY_PROFILE()
+@@ -105,7 +105,7 @@
+
+ #ifdef CONFIG_PROFILE_ALL_BRANCHES
+ #define BRANCH_PROFILE() VMLINUX_SYMBOL(__start_branch_profile) = .; \
+- *(_ftrace_branch) \
++ KEEP(*(_ftrace_branch)) \
+ VMLINUX_SYMBOL(__stop_branch_profile) = .;
+ #else
+ #define BRANCH_PROFILE()
+@@ -114,7 +114,7 @@
+ #ifdef CONFIG_KPROBES
+ #define KPROBE_BLACKLIST() . = ALIGN(8); \
+ VMLINUX_SYMBOL(__start_kprobe_blacklist) = .; \
+- *(_kprobe_blacklist) \
++ KEEP(*(_kprobe_blacklist)) \
+ VMLINUX_SYMBOL(__stop_kprobe_blacklist) = .;
+ #else
+ #define KPROBE_BLACKLIST()
+@@ -123,10 +123,10 @@
+ #ifdef CONFIG_EVENT_TRACING
+ #define FTRACE_EVENTS() . = ALIGN(8); \
+ VMLINUX_SYMBOL(__start_ftrace_events) = .; \
+- *(_ftrace_events) \
++ KEEP(*(_ftrace_events)) \
+ VMLINUX_SYMBOL(__stop_ftrace_events) = .; \
+ VMLINUX_SYMBOL(__start_ftrace_enum_maps) = .; \
+- *(_ftrace_enum_map) \
++ KEEP(*(_ftrace_enum_map)) \
+ VMLINUX_SYMBOL(__stop_ftrace_enum_maps) = .;
+ #else
+ #define FTRACE_EVENTS()
+@@ -134,7 +134,7 @@
+
+ #ifdef CONFIG_TRACING
+ #define TRACE_PRINTKS() VMLINUX_SYMBOL(__start___trace_bprintk_fmt) = .; \
+- *(__trace_printk_fmt) /* Trace_printk fmt' pointer */ \
++ KEEP(*(__trace_printk_fmt)) /* Trace_printk fmt' pointer */ \
+ VMLINUX_SYMBOL(__stop___trace_bprintk_fmt) = .;
+ #define TRACEPOINT_STR() VMLINUX_SYMBOL(__start___tracepoint_str) = .; \
+ *(__tracepoint_str) /* Trace_printk fmt' pointer */ \
+@@ -147,7 +147,7 @@
+ #ifdef CONFIG_FTRACE_SYSCALLS
+ #define TRACE_SYSCALLS() . = ALIGN(8); \
+ VMLINUX_SYMBOL(__start_syscalls_metadata) = .; \
+- *(__syscalls_metadata) \
++ KEEP(*(__syscalls_metadata)) \
+ VMLINUX_SYMBOL(__stop_syscalls_metadata) = .;
+ #else
+ #define TRACE_SYSCALLS()
+@@ -169,8 +169,8 @@
+ #define _OF_TABLE_1(name) \
+ . = ALIGN(8); \
+ VMLINUX_SYMBOL(__##name##_of_table) = .; \
+- *(__##name##_of_table) \
+- *(__##name##_of_table_end)
++ KEEP(*(__##name##_of_table)) \
++ KEEP(*(__##name##_of_table_end))
+
+ #define CLKSRC_OF_TABLES() OF_TABLE(CONFIG_CLKSRC_OF, clksrc)
+ #define IRQCHIP_OF_MATCH_TABLE() OF_TABLE(CONFIG_IRQCHIP, irqchip)
+@@ -194,7 +194,7 @@
+ #define KERNEL_DTB() \
+ STRUCT_ALIGN(); \
+ VMLINUX_SYMBOL(__dtb_start) = .; \
+- *(.dtb.init.rodata) \
++ KEEP(*(.dtb.init.rodata)) \
+ VMLINUX_SYMBOL(__dtb_end) = .;
+
+ /* .data section */
+@@ -210,16 +210,17 @@
+ /* implement dynamic printk debug */ \
+ . = ALIGN(8); \
+ VMLINUX_SYMBOL(__start___jump_table) = .; \
+- *(__jump_table) \
++ KEEP(*(__jump_table)) \
+ VMLINUX_SYMBOL(__stop___jump_table) = .; \
+ . = ALIGN(8); \
+ VMLINUX_SYMBOL(__start___verbose) = .; \
+- *(__verbose) \
++ KEEP(*(__verbose)) \
+ VMLINUX_SYMBOL(__stop___verbose) = .; \
+ LIKELY_PROFILE() \
+ BRANCH_PROFILE() \
+ TRACE_PRINTKS() \
+- TRACEPOINT_STR()
++ TRACEPOINT_STR() \
++ *(.data.[a-zA-Z_]*)
+
+ /*
+ * Data section helpers
+@@ -273,35 +274,35 @@
+ /* PCI quirks */ \
+ .pci_fixup : AT(ADDR(.pci_fixup) - LOAD_OFFSET) { \
+ VMLINUX_SYMBOL(__start_pci_fixups_early) = .; \
+- *(.pci_fixup_early) \
++ KEEP(*(.pci_fixup_early)) \
+ VMLINUX_SYMBOL(__end_pci_fixups_early) = .; \
+ VMLINUX_SYMBOL(__start_pci_fixups_header) = .; \
+- *(.pci_fixup_header) \
++ KEEP(*(.pci_fixup_header)) \
+ VMLINUX_SYMBOL(__end_pci_fixups_header) = .; \
+ VMLINUX_SYMBOL(__start_pci_fixups_final) = .; \
+- *(.pci_fixup_final) \
++ KEEP(*(.pci_fixup_final)) \
+ VMLINUX_SYMBOL(__end_pci_fixups_final) = .; \
+ VMLINUX_SYMBOL(__start_pci_fixups_enable) = .; \
+- *(.pci_fixup_enable) \
++ KEEP(*(.pci_fixup_enable)) \
+ VMLINUX_SYMBOL(__end_pci_fixups_enable) = .; \
+ VMLINUX_SYMBOL(__start_pci_fixups_resume) = .; \
+- *(.pci_fixup_resume) \
++ KEEP(*(.pci_fixup_resume)) \
+ VMLINUX_SYMBOL(__end_pci_fixups_resume) = .; \
+ VMLINUX_SYMBOL(__start_pci_fixups_resume_early) = .; \
+- *(.pci_fixup_resume_early) \
++ KEEP(*(.pci_fixup_resume_early)) \
+ VMLINUX_SYMBOL(__end_pci_fixups_resume_early) = .; \
+ VMLINUX_SYMBOL(__start_pci_fixups_suspend) = .; \
+- *(.pci_fixup_suspend) \
++ KEEP(*(.pci_fixup_suspend)) \
+ VMLINUX_SYMBOL(__end_pci_fixups_suspend) = .; \
+ VMLINUX_SYMBOL(__start_pci_fixups_suspend_late) = .; \
+- *(.pci_fixup_suspend_late) \
++ KEEP(*(.pci_fixup_suspend_late)) \
+ VMLINUX_SYMBOL(__end_pci_fixups_suspend_late) = .; \
+ } \
+ \
+ /* Built-in firmware blobs */ \
+ .builtin_fw : AT(ADDR(.builtin_fw) - LOAD_OFFSET) { \
+ VMLINUX_SYMBOL(__start_builtin_fw) = .; \
+- *(.builtin_fw) \
++ KEEP(*(.builtin_fw)) \
+ VMLINUX_SYMBOL(__end_builtin_fw) = .; \
+ } \
+ \
+@@ -310,49 +311,49 @@
+ /* Kernel symbol table: Normal symbols */ \
+ __ksymtab : AT(ADDR(__ksymtab) - LOAD_OFFSET) { \
+ VMLINUX_SYMBOL(__start___ksymtab) = .; \
+- *(SORT(___ksymtab+*)) \
++ KEEP(*(SORT(___ksymtab+*))) \
+ VMLINUX_SYMBOL(__stop___ksymtab) = .; \
+ } \
+ \
+ /* Kernel symbol table: GPL-only symbols */ \
+ __ksymtab_gpl : AT(ADDR(__ksymtab_gpl) - LOAD_OFFSET) { \
+ VMLINUX_SYMBOL(__start___ksymtab_gpl) = .; \
+- *(SORT(___ksymtab_gpl+*)) \
++ KEEP(*(SORT(___ksymtab_gpl+*))) \
+ VMLINUX_SYMBOL(__stop___ksymtab_gpl) = .; \
+ } \
+ \
+ /* Kernel symbol table: Normal unused symbols */ \
+ __ksymtab_unused : AT(ADDR(__ksymtab_unused) - LOAD_OFFSET) { \
+ VMLINUX_SYMBOL(__start___ksymtab_unused) = .; \
+- *(SORT(___ksymtab_unused+*)) \
++ KEEP(*(SORT(___ksymtab_unused+*))) \
+ VMLINUX_SYMBOL(__stop___ksymtab_unused) = .; \
+ } \
+ \
+ /* Kernel symbol table: GPL-only unused symbols */ \
+ __ksymtab_unused_gpl : AT(ADDR(__ksymtab_unused_gpl) - LOAD_OFFSET) { \
+ VMLINUX_SYMBOL(__start___ksymtab_unused_gpl) = .; \
+- *(SORT(___ksymtab_unused_gpl+*)) \
++ KEEP(*(SORT(___ksymtab_unused_gpl+*))) \
+ VMLINUX_SYMBOL(__stop___ksymtab_unused_gpl) = .; \
+ } \
+ \
+ /* Kernel symbol table: GPL-future-only symbols */ \
+ __ksymtab_gpl_future : AT(ADDR(__ksymtab_gpl_future) - LOAD_OFFSET) { \
+ VMLINUX_SYMBOL(__start___ksymtab_gpl_future) = .; \
+- *(SORT(___ksymtab_gpl_future+*)) \
++ KEEP(*(SORT(___ksymtab_gpl_future+*))) \
+ VMLINUX_SYMBOL(__stop___ksymtab_gpl_future) = .; \
+ } \
+ \
+ /* Kernel symbol table: Normal symbols */ \
+ __kcrctab : AT(ADDR(__kcrctab) - LOAD_OFFSET) { \
+ VMLINUX_SYMBOL(__start___kcrctab) = .; \
+- *(SORT(___kcrctab+*)) \
++ KEEP(*(SORT(___kcrctab+*))) \
+ VMLINUX_SYMBOL(__stop___kcrctab) = .; \
+ } \
+ \
+ /* Kernel symbol table: GPL-only symbols */ \
+ __kcrctab_gpl : AT(ADDR(__kcrctab_gpl) - LOAD_OFFSET) { \
+ VMLINUX_SYMBOL(__start___kcrctab_gpl) = .; \
+- *(SORT(___kcrctab_gpl+*)) \
++ KEEP(*(SORT(___kcrctab_gpl+*))) \
+ VMLINUX_SYMBOL(__stop___kcrctab_gpl) = .; \
+ } \
+ \
+@@ -366,14 +367,14 @@
+ /* Kernel symbol table: GPL-only unused symbols */ \
+ __kcrctab_unused_gpl : AT(ADDR(__kcrctab_unused_gpl) - LOAD_OFFSET) { \
+ VMLINUX_SYMBOL(__start___kcrctab_unused_gpl) = .; \
+- *(SORT(___kcrctab_unused_gpl+*)) \
++ KEEP(*(SORT(___kcrctab_unused_gpl+*))) \
+ VMLINUX_SYMBOL(__stop___kcrctab_unused_gpl) = .; \
+ } \
+ \
+ /* Kernel symbol table: GPL-future-only symbols */ \
+ __kcrctab_gpl_future : AT(ADDR(__kcrctab_gpl_future) - LOAD_OFFSET) { \
+ VMLINUX_SYMBOL(__start___kcrctab_gpl_future) = .; \
+- *(SORT(___kcrctab_gpl_future+*)) \
++ KEEP(*(SORT(___kcrctab_gpl_future+*))) \
+ VMLINUX_SYMBOL(__stop___kcrctab_gpl_future) = .; \
+ } \
+ \
+@@ -392,14 +393,14 @@
+ /* Built-in module parameters. */ \
+ __param : AT(ADDR(__param) - LOAD_OFFSET) { \
+ VMLINUX_SYMBOL(__start___param) = .; \
+- *(__param) \
++ KEEP(*(__param)) \
+ VMLINUX_SYMBOL(__stop___param) = .; \
+ } \
+ \
+ /* Built-in module versions. */ \
+ __modver : AT(ADDR(__modver) - LOAD_OFFSET) { \
+ VMLINUX_SYMBOL(__start___modver) = .; \
+- *(__modver) \
++ KEEP(*(__modver)) \
+ VMLINUX_SYMBOL(__stop___modver) = .; \
+ . = ALIGN((align)); \
+ VMLINUX_SYMBOL(__end_rodata) = .; \
+@@ -453,7 +454,7 @@
+ #define ENTRY_TEXT \
+ ALIGN_FUNCTION(); \
+ VMLINUX_SYMBOL(__entry_text_start) = .; \
+- *(.entry.text) \
++ KEEP(*(.entry.text)) \
+ VMLINUX_SYMBOL(__entry_text_end) = .;
+
+ #ifdef CONFIG_FUNCTION_GRAPH_TRACER
+@@ -481,7 +482,7 @@
+ . = ALIGN(align); \
+ __ex_table : AT(ADDR(__ex_table) - LOAD_OFFSET) { \
+ VMLINUX_SYMBOL(__start___ex_table) = .; \
+- *(__ex_table) \
++ KEEP(*(__ex_table)) \
+ VMLINUX_SYMBOL(__stop___ex_table) = .; \
+ }
+
+@@ -497,9 +498,9 @@
+ #ifdef CONFIG_CONSTRUCTORS
+ #define KERNEL_CTORS() . = ALIGN(8); \
+ VMLINUX_SYMBOL(__ctors_start) = .; \
+- *(.ctors) \
++ KEEP(*(.ctors)) \
+ *(SORT(.init_array.*)) \
+- *(.init_array) \
++ KEEP(*(.init_array)) \
+ VMLINUX_SYMBOL(__ctors_end) = .;
+ #else
+ #define KERNEL_CTORS()
+@@ -552,7 +553,7 @@
+ #define SBSS(sbss_align) \
+ . = ALIGN(sbss_align); \
+ .sbss : AT(ADDR(.sbss) - LOAD_OFFSET) { \
+- *(.sbss) \
++ *(.sbss .sbss.*) \
+ *(.scommon) \
+ }
+
+@@ -570,7 +571,7 @@
+ BSS_FIRST_SECTIONS \
+ *(.bss..page_aligned) \
+ *(.dynbss) \
+- *(.bss) \
++ *(.bss .bss.*) \
+ *(COMMON) \
+ }
+
+@@ -619,7 +620,7 @@
+ . = ALIGN(8); \
+ __bug_table : AT(ADDR(__bug_table) - LOAD_OFFSET) { \
+ VMLINUX_SYMBOL(__start___bug_table) = .; \
+- *(__bug_table) \
++ KEEP(*(__bug_table)) \
+ VMLINUX_SYMBOL(__stop___bug_table) = .; \
+ }
+ #else
+@@ -631,7 +632,7 @@
+ . = ALIGN(4); \
+ .tracedata : AT(ADDR(.tracedata) - LOAD_OFFSET) { \
+ VMLINUX_SYMBOL(__tracedata_start) = .; \
+- *(.tracedata) \
++ KEEP(*(.tracedata)) \
+ VMLINUX_SYMBOL(__tracedata_end) = .; \
+ }
+ #else
+@@ -648,17 +649,17 @@
+ #define INIT_SETUP(initsetup_align) \
+ . = ALIGN(initsetup_align); \
+ VMLINUX_SYMBOL(__setup_start) = .; \
+- *(.init.setup) \
++ KEEP(*(.init.setup)) \
+ VMLINUX_SYMBOL(__setup_end) = .;
+
+ #define INIT_CALLS_LEVEL(level) \
+ VMLINUX_SYMBOL(__initcall##level##_start) = .; \
+- *(.initcall##level##.init) \
+- *(.initcall##level##s.init) \
++ KEEP(*(.initcall##level##.init)) \
++ KEEP(*(.initcall##level##s.init)) \
+
+ #define INIT_CALLS \
+ VMLINUX_SYMBOL(__initcall_start) = .; \
+- *(.initcallearly.init) \
++ KEEP(*(.initcallearly.init)) \
+ INIT_CALLS_LEVEL(0) \
+ INIT_CALLS_LEVEL(1) \
+ INIT_CALLS_LEVEL(2) \
+@@ -672,21 +673,21 @@
+
+ #define CON_INITCALL \
+ VMLINUX_SYMBOL(__con_initcall_start) = .; \
+- *(.con_initcall.init) \
++ KEEP(*(.con_initcall.init)) \
+ VMLINUX_SYMBOL(__con_initcall_end) = .;
+
+ #define SECURITY_INITCALL \
+ VMLINUX_SYMBOL(__security_initcall_start) = .; \
+- *(.security_initcall.init) \
++ KEEP(*(.security_initcall.init)) \
+ VMLINUX_SYMBOL(__security_initcall_end) = .;
+
+ #ifdef CONFIG_BLK_DEV_INITRD
+ #define INIT_RAM_FS \
+ . = ALIGN(4); \
+ VMLINUX_SYMBOL(__initramfs_start) = .; \
+- *(.init.ramfs) \
++ KEEP(*(.init.ramfs)) \
+ . = ALIGN(8); \
+- *(.init.ramfs.info)
++ KEEP(*(.init.ramfs.info))
+ #else
+ #define INIT_RAM_FS
+ #endif
+--- a/arch/arm/Makefile
++++ b/arch/arm/Makefile
+@@ -22,11 +22,16 @@ endif
+ ifeq ($(CONFIG_ARM_MODULE_PLTS),y)
+ LDFLAGS_MODULE += -T $(srctree)/arch/arm/kernel/module.lds
+ endif
++LDFLAGS_vmlinux += --gc-sections
+
+ OBJCOPYFLAGS :=-O binary -R .comment -S
+ GZFLAGS :=-9
+ #KBUILD_CFLAGS +=-pipe
+
++ifndef CONFIG_FUNCTION_TRACER
++KBUILD_CFLAGS_KERNEL += -ffunction-sections -fdata-sections
++endif
++
+ # Never generate .eh_frame
+ KBUILD_CFLAGS += $(call cc-option,-fno-dwarf2-cfi-asm)
+
+--- a/arch/arm/kernel/vmlinux.lds.S
++++ b/arch/arm/kernel/vmlinux.lds.S
+@@ -15,13 +15,13 @@
+ #define PROC_INFO \
+ . = ALIGN(4); \
+ VMLINUX_SYMBOL(__proc_info_begin) = .; \
+- *(.proc.info.init) \
++ KEEP(*(.proc.info.init)) \
+ VMLINUX_SYMBOL(__proc_info_end) = .;
+
+ #define IDMAP_TEXT \
+ ALIGN_FUNCTION(); \
+ VMLINUX_SYMBOL(__idmap_text_start) = .; \
+- *(.idmap.text) \
++ KEEP(*(.idmap.text)) \
+ VMLINUX_SYMBOL(__idmap_text_end) = .; \
+ . = ALIGN(PAGE_SIZE); \
+ VMLINUX_SYMBOL(__hyp_idmap_text_start) = .; \
+@@ -102,7 +102,7 @@ SECTIONS
+ _stext = .; /* Text and read-only data */
+ IDMAP_TEXT
+ __exception_text_start = .;
+- *(.exception.text)
++ KEEP(*(.exception.text))
+ __exception_text_end = .;
+ IRQENTRY_TEXT
+ TEXT_TEXT
+@@ -126,7 +126,7 @@ SECTIONS
+ __ex_table : AT(ADDR(__ex_table) - LOAD_OFFSET) {
+ __start___ex_table = .;
+ #ifdef CONFIG_MMU
+- *(__ex_table)
++ KEEP(*(__ex_table))
+ #endif
+ __stop___ex_table = .;
+ }
+@@ -138,12 +138,12 @@ SECTIONS
+ . = ALIGN(8);
+ .ARM.unwind_idx : {
+ __start_unwind_idx = .;
+- *(.ARM.exidx*)
++ KEEP(*(.ARM.exidx*))
+ __stop_unwind_idx = .;
+ }
+ .ARM.unwind_tab : {
+ __start_unwind_tab = .;
+- *(.ARM.extab*)
++ KEEP(*(.ARM.extab*))
+ __stop_unwind_tab = .;
+ }
+ #endif
+@@ -166,14 +166,14 @@ SECTIONS
+ */
+ __vectors_start = .;
+ .vectors 0 : AT(__vectors_start) {
+- *(.vectors)
++ KEEP(*(.vectors))
+ }
+ . = __vectors_start + SIZEOF(.vectors);
+ __vectors_end = .;
+
+ __stubs_start = .;
+ .stubs 0x1000 : AT(__stubs_start) {
+- *(.stubs)
++ KEEP(*(.stubs))
+ }
+ . = __stubs_start + SIZEOF(.stubs);
+ __stubs_end = .;
+@@ -187,24 +187,24 @@ SECTIONS
+ }
+ .init.arch.info : {
+ __arch_info_begin = .;
+- *(.arch.info.init)
++ KEEP(*(.arch.info.init))
+ __arch_info_end = .;
+ }
+ .init.tagtable : {
+ __tagtable_begin = .;
+- *(.taglist.init)
++ KEEP(*(.taglist.init))
+ __tagtable_end = .;
+ }
+ #ifdef CONFIG_SMP_ON_UP
+ .init.smpalt : {
+ __smpalt_begin = .;
+- *(.alt.smp.init)
++ KEEP(*(.alt.smp.init))
+ __smpalt_end = .;
+ }
+ #endif
+ .init.pv_table : {
+ __pv_table_begin = .;
+- *(.pv_table)
++ KEEP(*(.pv_table))
+ __pv_table_end = .;
+ }
+ .init.data : {
+--- a/arch/arm/boot/compressed/Makefile
++++ b/arch/arm/boot/compressed/Makefile
+@@ -105,6 +105,7 @@ ifeq ($(CONFIG_FUNCTION_TRACER),y)
+ ORIG_CFLAGS := $(KBUILD_CFLAGS)
+ KBUILD_CFLAGS = $(subst -pg, , $(ORIG_CFLAGS))
+ endif
++KBUILD_CFLAGS_KERNEL := $(patsubst -f%-sections,,$(KBUILD_CFLAGS_KERNEL))
+
+ ccflags-y := -fpic -mno-single-pic-base -fno-builtin -I$(obj)
+ asflags-y := -DZIMAGE