diff options
Diffstat (limited to 'target/linux/generic/hack-4.19/220-gc_sections.patch')
-rw-r--r-- | target/linux/generic/hack-4.19/220-gc_sections.patch | 258 |
1 files changed, 258 insertions, 0 deletions
diff --git a/target/linux/generic/hack-4.19/220-gc_sections.patch b/target/linux/generic/hack-4.19/220-gc_sections.patch new file mode 100644 index 0000000000..d257232403 --- /dev/null +++ b/target/linux/generic/hack-4.19/220-gc_sections.patch @@ -0,0 +1,258 @@ +From e3d8676f5722b7622685581e06e8f53e6138e3ab Mon Sep 17 00:00:00 2001 +From: Felix Fietkau <nbd@nbd.name> +Date: Sat, 15 Jul 2017 23:42:36 +0200 +Subject: use -ffunction-sections, -fdata-sections and --gc-sections + +In combination with kernel symbol export stripping this significantly reduces +the kernel image size. Used on both ARM and MIPS architectures. + +Signed-off-by: Felix Fietkau <nbd@nbd.name> +Signed-off-by: Jonas Gorski <jogo@openwrt.org> +Signed-off-by: Gabor Juhos <juhosg@openwrt.org> +--- + Makefile | 10 +++---- + arch/arm/Kconfig | 1 + + arch/arm/boot/compressed/Makefile | 1 + + arch/arm/kernel/vmlinux.lds.S | 26 ++++++++-------- + arch/mips/Kconfig | 1 + + arch/mips/kernel/vmlinux.lds.S | 4 +-- + include/asm-generic/vmlinux.lds.h | 63 ++++++++++++++++++++------------------- + 7 files changed, 55 insertions(+), 51 deletions(-) + +--- a/Makefile ++++ b/Makefile +@@ -272,6 +272,11 @@ else + scripts/Kbuild.include: ; + include scripts/Kbuild.include + ++ifdef CONFIG_LD_DEAD_CODE_DATA_ELIMINATION ++KBUILD_CFLAGS_KERNEL += $(call cc-option,-ffunction-sections,) ++KBUILD_CFLAGS_KERNEL += $(call cc-option,-fdata-sections,) ++endif ++ + # Read KERNELRELEASE from include/config/kernel.release (if it exists) + KERNELRELEASE = $(shell cat include/config/kernel.release 2> /dev/null) + KERNELVERSION = $(VERSION)$(if $(PATCHLEVEL),.$(PATCHLEVEL)$(if $(SUBLEVEL),.$(SUBLEVEL)))$(EXTRAVERSION) +@@ -788,11 +793,6 @@ ifdef CONFIG_DEBUG_SECTION_MISMATCH + KBUILD_CFLAGS += $(call cc-option, -fno-inline-functions-called-once) + endif + +-ifdef CONFIG_LD_DEAD_CODE_DATA_ELIMINATION +-KBUILD_CFLAGS += $(call cc-option,-ffunction-sections,) +-KBUILD_CFLAGS += $(call cc-option,-fdata-sections,) +-endif +- + # arch Makefile may override CC so keep this after arch Makefile is included + NOSTDINC_FLAGS += -nostdinc -isystem $(shell $(CC) -print-file-name=include) + CHECKFLAGS += $(NOSTDINC_FLAGS) +--- a/arch/arm/Kconfig ++++ b/arch/arm/Kconfig +@@ -91,6 +91,7 @@ config ARM + select HAVE_UID16 + select HAVE_VIRT_CPU_ACCOUNTING_GEN + select IRQ_FORCED_THREADING ++ select LD_DEAD_CODE_DATA_ELIMINATION + select MODULES_USE_ELF_REL + select NO_BOOTMEM + select OF_EARLY_FLATTREE if OF +--- a/arch/arm/boot/compressed/Makefile ++++ b/arch/arm/boot/compressed/Makefile +@@ -103,6 +103,7 @@ ifeq ($(CONFIG_FUNCTION_TRACER),y) + ORIG_CFLAGS := $(KBUILD_CFLAGS) + KBUILD_CFLAGS = $(subst -pg, , $(ORIG_CFLAGS)) + endif ++KBUILD_CFLAGS_KERNEL := $(patsubst -f%-sections,,$(KBUILD_CFLAGS_KERNEL)) + + # -fstack-protector-strong triggers protection checks in this code, + # but it is being used too early to link to meaningful stack_chk logic. +--- a/arch/arm/kernel/vmlinux.lds.S ++++ b/arch/arm/kernel/vmlinux.lds.S +@@ -18,7 +18,7 @@ + #define PROC_INFO \ + . = ALIGN(4); \ + VMLINUX_SYMBOL(__proc_info_begin) = .; \ +- *(.proc.info.init) \ ++ KEEP(*(.proc.info.init)) \ + VMLINUX_SYMBOL(__proc_info_end) = .; + + #define HYPERVISOR_TEXT \ +@@ -29,11 +29,11 @@ + #define IDMAP_TEXT \ + ALIGN_FUNCTION(); \ + VMLINUX_SYMBOL(__idmap_text_start) = .; \ +- *(.idmap.text) \ ++ KEEP(*(.idmap.text)) \ + VMLINUX_SYMBOL(__idmap_text_end) = .; \ + . = ALIGN(PAGE_SIZE); \ + VMLINUX_SYMBOL(__hyp_idmap_text_start) = .; \ +- *(.hyp.idmap.text) \ ++ KEEP(*(.hyp.idmap.text)) \ + VMLINUX_SYMBOL(__hyp_idmap_text_end) = .; + + #ifdef CONFIG_HOTPLUG_CPU +@@ -106,7 +106,7 @@ SECTIONS + _stext = .; /* Text and read-only data */ + IDMAP_TEXT + __exception_text_start = .; +- *(.exception.text) ++ KEEP(*(.exception.text)) + __exception_text_end = .; + IRQENTRY_TEXT + SOFTIRQENTRY_TEXT +@@ -135,7 +135,7 @@ SECTIONS + __ex_table : AT(ADDR(__ex_table) - LOAD_OFFSET) { + __start___ex_table = .; + #ifdef CONFIG_MMU +- *(__ex_table) ++ KEEP(*(__ex_table)) + #endif + __stop___ex_table = .; + } +@@ -147,12 +147,12 @@ SECTIONS + . = ALIGN(8); + .ARM.unwind_idx : { + __start_unwind_idx = .; +- *(.ARM.exidx*) ++ KEEP(*(.ARM.exidx*)) + __stop_unwind_idx = .; + } + .ARM.unwind_tab : { + __start_unwind_tab = .; +- *(.ARM.extab*) ++ KEEP(*(.ARM.extab*)) + __stop_unwind_tab = .; + } + #endif +@@ -172,14 +172,14 @@ SECTIONS + */ + __vectors_start = .; + .vectors 0xffff0000 : AT(__vectors_start) { +- *(.vectors) ++ KEEP(*(.vectors)) + } + . = __vectors_start + SIZEOF(.vectors); + __vectors_end = .; + + __stubs_start = .; + .stubs ADDR(.vectors) + 0x1000 : AT(__stubs_start) { +- *(.stubs) ++ KEEP(*(.stubs)) + } + . = __stubs_start + SIZEOF(.stubs); + __stubs_end = .; +@@ -195,24 +195,24 @@ SECTIONS + } + .init.arch.info : { + __arch_info_begin = .; +- *(.arch.info.init) ++ KEEP(*(.arch.info.init)) + __arch_info_end = .; + } + .init.tagtable : { + __tagtable_begin = .; +- *(.taglist.init) ++ KEEP(*(.taglist.init)) + __tagtable_end = .; + } + #ifdef CONFIG_SMP_ON_UP + .init.smpalt : { + __smpalt_begin = .; +- *(.alt.smp.init) ++ KEEP(*(.alt.smp.init)) + __smpalt_end = .; + } + #endif + .init.pv_table : { + __pv_table_begin = .; +- *(.pv_table) ++ KEEP(*(.pv_table)) + __pv_table_end = .; + } + .init.data : { +--- a/arch/mips/Kconfig ++++ b/arch/mips/Kconfig +@@ -40,6 +40,7 @@ config MIPS + select HAVE_CBPF_JIT if (!64BIT && !CPU_MICROMIPS) + select HAVE_EBPF_JIT if (64BIT && !CPU_MICROMIPS) + select HAVE_CC_STACKPROTECTOR ++ select LD_DEAD_CODE_DATA_ELIMINATION + select HAVE_CONTEXT_TRACKING + select HAVE_COPY_THREAD_TLS + select HAVE_C_RECORDMCOUNT +--- a/arch/mips/kernel/vmlinux.lds.S ++++ b/arch/mips/kernel/vmlinux.lds.S +@@ -72,7 +72,7 @@ SECTIONS + /* Exception table for data bus errors */ + __dbe_table : { + __start___dbe_table = .; +- *(__dbe_table) ++ KEEP(*(__dbe_table)) + __stop___dbe_table = .; + } + +@@ -123,7 +123,7 @@ SECTIONS + . = ALIGN(4); + .mips.machines.init : AT(ADDR(.mips.machines.init) - LOAD_OFFSET) { + __mips_machines_start = .; +- *(.mips.machines.init) ++ KEEP(*(.mips.machines.init)) + __mips_machines_end = .; + } + +--- a/include/asm-generic/vmlinux.lds.h ++++ b/include/asm-generic/vmlinux.lds.h +@@ -105,7 +105,7 @@ + #ifdef CONFIG_FTRACE_MCOUNT_RECORD + #define MCOUNT_REC() . = ALIGN(8); \ + VMLINUX_SYMBOL(__start_mcount_loc) = .; \ +- *(__mcount_loc) \ ++ KEEP(*(__mcount_loc)) \ + VMLINUX_SYMBOL(__stop_mcount_loc) = .; + #else + #define MCOUNT_REC() +@@ -113,7 +113,7 @@ + + #ifdef CONFIG_TRACE_BRANCH_PROFILING + #define LIKELY_PROFILE() VMLINUX_SYMBOL(__start_annotated_branch_profile) = .; \ +- *(_ftrace_annotated_branch) \ ++ KEEP(*(_ftrace_annotated_branch)) \ + VMLINUX_SYMBOL(__stop_annotated_branch_profile) = .; + #else + #define LIKELY_PROFILE() +@@ -121,7 +121,7 @@ + + #ifdef CONFIG_PROFILE_ALL_BRANCHES + #define BRANCH_PROFILE() VMLINUX_SYMBOL(__start_branch_profile) = .; \ +- *(_ftrace_branch) \ ++ KEEP(*(_ftrace_branch)) \ + VMLINUX_SYMBOL(__stop_branch_profile) = .; + #else + #define BRANCH_PROFILE() +@@ -237,7 +237,8 @@ + LIKELY_PROFILE() \ + BRANCH_PROFILE() \ + TRACE_PRINTKS() \ +- TRACEPOINT_STR() ++ TRACEPOINT_STR() \ ++ *(.data.[a-zA-Z_]*) + + /* + * Data section helpers +@@ -496,7 +497,7 @@ + #define ENTRY_TEXT \ + ALIGN_FUNCTION(); \ + VMLINUX_SYMBOL(__entry_text_start) = .; \ +- *(.entry.text) \ ++ KEEP(*(.entry.text)) \ + VMLINUX_SYMBOL(__entry_text_end) = .; + + #define IRQENTRY_TEXT \ +@@ -603,7 +604,7 @@ + . = ALIGN(sbss_align); \ + .sbss : AT(ADDR(.sbss) - LOAD_OFFSET) { \ + *(.dynsbss) \ +- *(.sbss) \ ++ *(.sbss .sbss.*) \ + *(.scommon) \ + } + |