220-gc_sections.patch 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258
  1. From e3d8676f5722b7622685581e06e8f53e6138e3ab Mon Sep 17 00:00:00 2001
  2. From: Felix Fietkau <nbd@nbd.name>
  3. Date: Sat, 15 Jul 2017 23:42:36 +0200
  4. Subject: use -ffunction-sections, -fdata-sections and --gc-sections
  5. In combination with kernel symbol export stripping this significantly reduces
  6. the kernel image size. Used on both ARM and MIPS architectures.
  7. Signed-off-by: Felix Fietkau <nbd@nbd.name>
  8. Signed-off-by: Jonas Gorski <jogo@openwrt.org>
  9. Signed-off-by: Gabor Juhos <juhosg@openwrt.org>
  10. ---
  11. Makefile | 10 +++----
  12. arch/arm/Kconfig | 1 +
  13. arch/arm/boot/compressed/Makefile | 1 +
  14. arch/arm/kernel/vmlinux.lds.S | 26 ++++++++--------
  15. arch/mips/Kconfig | 1 +
  16. arch/mips/kernel/vmlinux.lds.S | 4 +--
  17. include/asm-generic/vmlinux.lds.h | 63 ++++++++++++++++++++-------------------
  18. 7 files changed, 55 insertions(+), 51 deletions(-)
  19. --- a/Makefile
  20. +++ b/Makefile
  21. @@ -272,6 +272,11 @@ else
  22. scripts/Kbuild.include: ;
  23. include scripts/Kbuild.include
  24. +ifdef CONFIG_LD_DEAD_CODE_DATA_ELIMINATION
  25. +KBUILD_CFLAGS_KERNEL += $(call cc-option,-ffunction-sections,)
  26. +KBUILD_CFLAGS_KERNEL += $(call cc-option,-fdata-sections,)
  27. +endif
  28. +
  29. # Read KERNELRELEASE from include/config/kernel.release (if it exists)
  30. KERNELRELEASE = $(shell cat include/config/kernel.release 2> /dev/null)
  31. KERNELVERSION = $(VERSION)$(if $(PATCHLEVEL),.$(PATCHLEVEL)$(if $(SUBLEVEL),.$(SUBLEVEL)))$(EXTRAVERSION)
  32. @@ -779,11 +784,6 @@ ifdef CONFIG_DEBUG_SECTION_MISMATCH
  33. KBUILD_CFLAGS += $(call cc-option, -fno-inline-functions-called-once)
  34. endif
  35. -ifdef CONFIG_LD_DEAD_CODE_DATA_ELIMINATION
  36. -KBUILD_CFLAGS += $(call cc-option,-ffunction-sections,)
  37. -KBUILD_CFLAGS += $(call cc-option,-fdata-sections,)
  38. -endif
  39. -
  40. # arch Makefile may override CC so keep this after arch Makefile is included
  41. NOSTDINC_FLAGS += -nostdinc -isystem $(shell $(CC) -print-file-name=include)
  42. CHECKFLAGS += $(NOSTDINC_FLAGS)
  43. --- a/arch/arm/Kconfig
  44. +++ b/arch/arm/Kconfig
  45. @@ -91,6 +91,7 @@ config ARM
  46. select HAVE_UID16
  47. select HAVE_VIRT_CPU_ACCOUNTING_GEN
  48. select IRQ_FORCED_THREADING
  49. + select LD_DEAD_CODE_DATA_ELIMINATION
  50. select MODULES_USE_ELF_REL
  51. select NO_BOOTMEM
  52. select OF_EARLY_FLATTREE if OF
  53. --- a/arch/arm/boot/compressed/Makefile
  54. +++ b/arch/arm/boot/compressed/Makefile
  55. @@ -103,6 +103,7 @@ ifeq ($(CONFIG_FUNCTION_TRACER),y)
  56. ORIG_CFLAGS := $(KBUILD_CFLAGS)
  57. KBUILD_CFLAGS = $(subst -pg, , $(ORIG_CFLAGS))
  58. endif
  59. +KBUILD_CFLAGS_KERNEL := $(patsubst -f%-sections,,$(KBUILD_CFLAGS_KERNEL))
  60. # -fstack-protector-strong triggers protection checks in this code,
  61. # but it is being used too early to link to meaningful stack_chk logic.
  62. --- a/arch/arm/kernel/vmlinux.lds.S
  63. +++ b/arch/arm/kernel/vmlinux.lds.S
  64. @@ -18,7 +18,7 @@
  65. #define PROC_INFO \
  66. . = ALIGN(4); \
  67. VMLINUX_SYMBOL(__proc_info_begin) = .; \
  68. - *(.proc.info.init) \
  69. + KEEP(*(.proc.info.init)) \
  70. VMLINUX_SYMBOL(__proc_info_end) = .;
  71. #define HYPERVISOR_TEXT \
  72. @@ -29,11 +29,11 @@
  73. #define IDMAP_TEXT \
  74. ALIGN_FUNCTION(); \
  75. VMLINUX_SYMBOL(__idmap_text_start) = .; \
  76. - *(.idmap.text) \
  77. + KEEP(*(.idmap.text)) \
  78. VMLINUX_SYMBOL(__idmap_text_end) = .; \
  79. . = ALIGN(PAGE_SIZE); \
  80. VMLINUX_SYMBOL(__hyp_idmap_text_start) = .; \
  81. - *(.hyp.idmap.text) \
  82. + KEEP(*(.hyp.idmap.text)) \
  83. VMLINUX_SYMBOL(__hyp_idmap_text_end) = .;
  84. #ifdef CONFIG_HOTPLUG_CPU
  85. @@ -106,7 +106,7 @@ SECTIONS
  86. _stext = .; /* Text and read-only data */
  87. IDMAP_TEXT
  88. __exception_text_start = .;
  89. - *(.exception.text)
  90. + KEEP(*(.exception.text))
  91. __exception_text_end = .;
  92. IRQENTRY_TEXT
  93. SOFTIRQENTRY_TEXT
  94. @@ -135,7 +135,7 @@ SECTIONS
  95. __ex_table : AT(ADDR(__ex_table) - LOAD_OFFSET) {
  96. __start___ex_table = .;
  97. #ifdef CONFIG_MMU
  98. - *(__ex_table)
  99. + KEEP(*(__ex_table))
  100. #endif
  101. __stop___ex_table = .;
  102. }
  103. @@ -147,12 +147,12 @@ SECTIONS
  104. . = ALIGN(8);
  105. .ARM.unwind_idx : {
  106. __start_unwind_idx = .;
  107. - *(.ARM.exidx*)
  108. + KEEP(*(.ARM.exidx*))
  109. __stop_unwind_idx = .;
  110. }
  111. .ARM.unwind_tab : {
  112. __start_unwind_tab = .;
  113. - *(.ARM.extab*)
  114. + KEEP(*(.ARM.extab*))
  115. __stop_unwind_tab = .;
  116. }
  117. #endif
  118. @@ -172,14 +172,14 @@ SECTIONS
  119. */
  120. __vectors_start = .;
  121. .vectors 0xffff0000 : AT(__vectors_start) {
  122. - *(.vectors)
  123. + KEEP(*(.vectors))
  124. }
  125. . = __vectors_start + SIZEOF(.vectors);
  126. __vectors_end = .;
  127. __stubs_start = .;
  128. .stubs ADDR(.vectors) + 0x1000 : AT(__stubs_start) {
  129. - *(.stubs)
  130. + KEEP(*(.stubs))
  131. }
  132. . = __stubs_start + SIZEOF(.stubs);
  133. __stubs_end = .;
  134. @@ -195,24 +195,24 @@ SECTIONS
  135. }
  136. .init.arch.info : {
  137. __arch_info_begin = .;
  138. - *(.arch.info.init)
  139. + KEEP(*(.arch.info.init))
  140. __arch_info_end = .;
  141. }
  142. .init.tagtable : {
  143. __tagtable_begin = .;
  144. - *(.taglist.init)
  145. + KEEP(*(.taglist.init))
  146. __tagtable_end = .;
  147. }
  148. #ifdef CONFIG_SMP_ON_UP
  149. .init.smpalt : {
  150. __smpalt_begin = .;
  151. - *(.alt.smp.init)
  152. + KEEP(*(.alt.smp.init))
  153. __smpalt_end = .;
  154. }
  155. #endif
  156. .init.pv_table : {
  157. __pv_table_begin = .;
  158. - *(.pv_table)
  159. + KEEP(*(.pv_table))
  160. __pv_table_end = .;
  161. }
  162. .init.data : {
  163. --- a/arch/mips/Kconfig
  164. +++ b/arch/mips/Kconfig
  165. @@ -40,6 +40,7 @@ config MIPS
  166. select HAVE_CBPF_JIT if (!64BIT && !CPU_MICROMIPS)
  167. select HAVE_EBPF_JIT if (64BIT && !CPU_MICROMIPS)
  168. select HAVE_CC_STACKPROTECTOR
  169. + select LD_DEAD_CODE_DATA_ELIMINATION
  170. select HAVE_CONTEXT_TRACKING
  171. select HAVE_COPY_THREAD_TLS
  172. select HAVE_C_RECORDMCOUNT
  173. --- a/arch/mips/kernel/vmlinux.lds.S
  174. +++ b/arch/mips/kernel/vmlinux.lds.S
  175. @@ -72,7 +72,7 @@ SECTIONS
  176. /* Exception table for data bus errors */
  177. __dbe_table : {
  178. __start___dbe_table = .;
  179. - *(__dbe_table)
  180. + KEEP(*(__dbe_table))
  181. __stop___dbe_table = .;
  182. }
  183. @@ -123,7 +123,7 @@ SECTIONS
  184. . = ALIGN(4);
  185. .mips.machines.init : AT(ADDR(.mips.machines.init) - LOAD_OFFSET) {
  186. __mips_machines_start = .;
  187. - *(.mips.machines.init)
  188. + KEEP(*(.mips.machines.init))
  189. __mips_machines_end = .;
  190. }
  191. --- a/include/asm-generic/vmlinux.lds.h
  192. +++ b/include/asm-generic/vmlinux.lds.h
  193. @@ -105,7 +105,7 @@
  194. #ifdef CONFIG_FTRACE_MCOUNT_RECORD
  195. #define MCOUNT_REC() . = ALIGN(8); \
  196. VMLINUX_SYMBOL(__start_mcount_loc) = .; \
  197. - *(__mcount_loc) \
  198. + KEEP(*(__mcount_loc)) \
  199. VMLINUX_SYMBOL(__stop_mcount_loc) = .;
  200. #else
  201. #define MCOUNT_REC()
  202. @@ -113,7 +113,7 @@
  203. #ifdef CONFIG_TRACE_BRANCH_PROFILING
  204. #define LIKELY_PROFILE() VMLINUX_SYMBOL(__start_annotated_branch_profile) = .; \
  205. - *(_ftrace_annotated_branch) \
  206. + KEEP(*(_ftrace_annotated_branch)) \
  207. VMLINUX_SYMBOL(__stop_annotated_branch_profile) = .;
  208. #else
  209. #define LIKELY_PROFILE()
  210. @@ -121,7 +121,7 @@
  211. #ifdef CONFIG_PROFILE_ALL_BRANCHES
  212. #define BRANCH_PROFILE() VMLINUX_SYMBOL(__start_branch_profile) = .; \
  213. - *(_ftrace_branch) \
  214. + KEEP(*(_ftrace_branch)) \
  215. VMLINUX_SYMBOL(__stop_branch_profile) = .;
  216. #else
  217. #define BRANCH_PROFILE()
  218. @@ -237,7 +237,8 @@
  219. LIKELY_PROFILE() \
  220. BRANCH_PROFILE() \
  221. TRACE_PRINTKS() \
  222. - TRACEPOINT_STR()
  223. + TRACEPOINT_STR() \
  224. + *(.data.[a-zA-Z_]*)
  225. /*
  226. * Data section helpers
  227. @@ -496,7 +497,7 @@
  228. #define ENTRY_TEXT \
  229. ALIGN_FUNCTION(); \
  230. VMLINUX_SYMBOL(__entry_text_start) = .; \
  231. - *(.entry.text) \
  232. + KEEP(*(.entry.text)) \
  233. VMLINUX_SYMBOL(__entry_text_end) = .;
  234. #define IRQENTRY_TEXT \
  235. @@ -603,7 +604,7 @@
  236. . = ALIGN(sbss_align); \
  237. .sbss : AT(ADDR(.sbss) - LOAD_OFFSET) { \
  238. *(.dynsbss) \
  239. - *(.sbss) \
  240. + *(.sbss .sbss.*) \
  241. *(.scommon) \
  242. }