x86/asm: Make some functions local labels

Boris suggests to make a local label (prepend ".L") to these functions
to eliminate them from the symbol table. These are functions with very
local names and really should not be visible anywhere.

Note that objtool won't see these functions anymore (to generate ORC
debug info). But all the functions are not annotated with ENDPROC, so
they won't have objtool's attention anyway.

Signed-off-by: Jiri Slaby <jslaby@suse.cz>
Signed-off-by: Borislav Petkov <bp@suse.de>
Cc: Andy Lutomirski <luto@kernel.org>
Cc: Cao jin <caoj.fnst@cn.fujitsu.com>
Cc: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
Cc: "H. Peter Anvin" <hpa@zytor.com>
Cc: Ingo Molnar <mingo@redhat.com>
Cc: Josh Poimboeuf <jpoimboe@redhat.com>
Cc: "Kirill A. Shutemov" <kirill.shutemov@linux.intel.com>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Steve Winslow <swinslow@gmail.com>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Wei Huang <wei@redhat.com>
Cc: x86-ml <x86@kernel.org>
Cc: Xiaoyao Li <xiaoyao.li@linux.intel.com>
Link: https://lkml.kernel.org/r/20190906075550.23435-2-jslaby@suse.cz
This commit is contained in:
Jiri Slaby 2019-09-06 09:55:50 +02:00 committed by Borislav Petkov
parent 559ceeed62
commit 98ededb61f
6 changed files with 39 additions and 39 deletions

View File

@ -140,7 +140,7 @@ ENTRY(startup_32)
/*
* Jump to the relocated address.
*/
leal relocated(%ebx), %eax
leal .Lrelocated(%ebx), %eax
jmp *%eax
ENDPROC(startup_32)
@ -209,7 +209,7 @@ ENDPROC(efi32_stub_entry)
#endif
.text
relocated:
.Lrelocated:
/*
* Clear BSS (stack is currently empty)

View File

@ -87,7 +87,7 @@ ENTRY(startup_32)
call verify_cpu
testl %eax, %eax
jnz no_longmode
jnz .Lno_longmode
/*
* Compute the delta between where we were compiled to run at
@ -322,7 +322,7 @@ ENTRY(startup_64)
1: popq %rdi
subq $1b, %rdi
call adjust_got
call .Ladjust_got
/*
* At this point we are in long mode with 4-level paging enabled,
@ -421,7 +421,7 @@ trampoline_return:
/* The new adjustment is the relocation address */
movq %rbx, %rdi
call adjust_got
call .Ladjust_got
/*
* Copy the compressed kernel to the end of our buffer
@ -440,7 +440,7 @@ trampoline_return:
/*
* Jump to the relocated address.
*/
leaq relocated(%rbx), %rax
leaq .Lrelocated(%rbx), %rax
jmp *%rax
#ifdef CONFIG_EFI_STUB
@ -511,7 +511,7 @@ ENDPROC(efi64_stub_entry)
#endif
.text
relocated:
.Lrelocated:
/*
* Clear BSS (stack is currently empty)
@ -548,7 +548,7 @@ relocated:
* first time we touch GOT).
* RDI is the new adjustment to apply.
*/
adjust_got:
.Ladjust_got:
/* Walk through the GOT adding the address to the entries */
leaq _got(%rip), %rdx
leaq _egot(%rip), %rcx
@ -622,7 +622,7 @@ ENTRY(trampoline_32bit_src)
movl %eax, %cr4
/* Calculate address of paging_enabled() once we are executing in the trampoline */
leal paging_enabled - trampoline_32bit_src + TRAMPOLINE_32BIT_CODE_OFFSET(%ecx), %eax
leal .Lpaging_enabled - trampoline_32bit_src + TRAMPOLINE_32BIT_CODE_OFFSET(%ecx), %eax
/* Prepare the stack for far return to Long Mode */
pushl $__KERNEL_CS
@ -635,7 +635,7 @@ ENTRY(trampoline_32bit_src)
lret
.code64
paging_enabled:
.Lpaging_enabled:
/* Return from the trampoline */
jmp *%rdi
@ -647,7 +647,7 @@ paging_enabled:
.org trampoline_32bit_src + TRAMPOLINE_32BIT_CODE_SIZE
.code32
no_longmode:
.Lno_longmode:
/* This isn't an x86-64 CPU, so hang intentionally, we cannot continue */
1:
hlt

View File

@ -1058,10 +1058,10 @@ ENTRY(native_load_gs_index)
ENDPROC(native_load_gs_index)
EXPORT_SYMBOL(native_load_gs_index)
_ASM_EXTABLE(.Lgs_change, bad_gs)
_ASM_EXTABLE(.Lgs_change, .Lbad_gs)
.section .fixup, "ax"
/* running with kernelgs */
bad_gs:
.Lbad_gs:
SWAPGS /* switch back to user gs */
.macro ZAP_GS
/* This can't be a string because the preprocessor needs to see it. */

View File

@ -33,7 +33,7 @@
102:
.section .fixup,"ax"
103: addl %ecx,%edx /* ecx is zerorest also */
jmp copy_user_handle_tail
jmp .Lcopy_user_handle_tail
.previous
_ASM_EXTABLE_UA(100b, 103b)
@ -113,7 +113,7 @@ ENTRY(copy_user_generic_unrolled)
40: leal (%rdx,%rcx,8),%edx
jmp 60f
50: movl %ecx,%edx
60: jmp copy_user_handle_tail /* ecx is zerorest also */
60: jmp .Lcopy_user_handle_tail /* ecx is zerorest also */
.previous
_ASM_EXTABLE_UA(1b, 30b)
@ -177,7 +177,7 @@ ENTRY(copy_user_generic_string)
.section .fixup,"ax"
11: leal (%rdx,%rcx,8),%ecx
12: movl %ecx,%edx /* ecx is zerorest also */
jmp copy_user_handle_tail
jmp .Lcopy_user_handle_tail
.previous
_ASM_EXTABLE_UA(1b, 11b)
@ -210,7 +210,7 @@ ENTRY(copy_user_enhanced_fast_string)
.section .fixup,"ax"
12: movl %ecx,%edx /* ecx is zerorest also */
jmp copy_user_handle_tail
jmp .Lcopy_user_handle_tail
.previous
_ASM_EXTABLE_UA(1b, 12b)
@ -231,7 +231,7 @@ EXPORT_SYMBOL(copy_user_enhanced_fast_string)
* eax uncopied bytes or 0 if successful.
*/
ALIGN;
copy_user_handle_tail:
.Lcopy_user_handle_tail:
movl %edx,%ecx
1: rep movsb
2: mov %ecx,%eax
@ -239,7 +239,7 @@ copy_user_handle_tail:
ret
_ASM_EXTABLE_UA(1b, 2b)
END(copy_user_handle_tail)
END(.Lcopy_user_handle_tail)
/*
* copy_user_nocache - Uncached memory copy with exception handling
@ -364,7 +364,7 @@ ENTRY(__copy_user_nocache)
movl %ecx,%edx
.L_fixup_handle_tail:
sfence
jmp copy_user_handle_tail
jmp .Lcopy_user_handle_tail
.previous
_ASM_EXTABLE_UA(1b, .L_fixup_4x8b_copy)

View File

@ -115,7 +115,7 @@ ENDPROC(__get_user_8)
EXPORT_SYMBOL(__get_user_8)
bad_get_user_clac:
.Lbad_get_user_clac:
ASM_CLAC
bad_get_user:
xor %edx,%edx
@ -123,7 +123,7 @@ bad_get_user:
ret
#ifdef CONFIG_X86_32
bad_get_user_8_clac:
.Lbad_get_user_8_clac:
ASM_CLAC
bad_get_user_8:
xor %edx,%edx
@ -132,12 +132,12 @@ bad_get_user_8:
ret
#endif
_ASM_EXTABLE_UA(1b, bad_get_user_clac)
_ASM_EXTABLE_UA(2b, bad_get_user_clac)
_ASM_EXTABLE_UA(3b, bad_get_user_clac)
_ASM_EXTABLE_UA(1b, .Lbad_get_user_clac)
_ASM_EXTABLE_UA(2b, .Lbad_get_user_clac)
_ASM_EXTABLE_UA(3b, .Lbad_get_user_clac)
#ifdef CONFIG_X86_64
_ASM_EXTABLE_UA(4b, bad_get_user_clac)
_ASM_EXTABLE_UA(4b, .Lbad_get_user_clac)
#else
_ASM_EXTABLE_UA(4b, bad_get_user_8_clac)
_ASM_EXTABLE_UA(5b, bad_get_user_8_clac)
_ASM_EXTABLE_UA(4b, .Lbad_get_user_8_clac)
_ASM_EXTABLE_UA(5b, .Lbad_get_user_8_clac)
#endif

View File

@ -37,7 +37,7 @@
ENTRY(__put_user_1)
ENTER
cmp TASK_addr_limit(%_ASM_BX),%_ASM_CX
jae bad_put_user
jae .Lbad_put_user
ASM_STAC
1: movb %al,(%_ASM_CX)
xor %eax,%eax
@ -51,7 +51,7 @@ ENTRY(__put_user_2)
mov TASK_addr_limit(%_ASM_BX),%_ASM_BX
sub $1,%_ASM_BX
cmp %_ASM_BX,%_ASM_CX
jae bad_put_user
jae .Lbad_put_user
ASM_STAC
2: movw %ax,(%_ASM_CX)
xor %eax,%eax
@ -65,7 +65,7 @@ ENTRY(__put_user_4)
mov TASK_addr_limit(%_ASM_BX),%_ASM_BX
sub $3,%_ASM_BX
cmp %_ASM_BX,%_ASM_CX
jae bad_put_user
jae .Lbad_put_user
ASM_STAC
3: movl %eax,(%_ASM_CX)
xor %eax,%eax
@ -79,7 +79,7 @@ ENTRY(__put_user_8)
mov TASK_addr_limit(%_ASM_BX),%_ASM_BX
sub $7,%_ASM_BX
cmp %_ASM_BX,%_ASM_CX
jae bad_put_user
jae .Lbad_put_user
ASM_STAC
4: mov %_ASM_AX,(%_ASM_CX)
#ifdef CONFIG_X86_32
@ -91,16 +91,16 @@ ENTRY(__put_user_8)
ENDPROC(__put_user_8)
EXPORT_SYMBOL(__put_user_8)
bad_put_user_clac:
.Lbad_put_user_clac:
ASM_CLAC
bad_put_user:
.Lbad_put_user:
movl $-EFAULT,%eax
RET
_ASM_EXTABLE_UA(1b, bad_put_user_clac)
_ASM_EXTABLE_UA(2b, bad_put_user_clac)
_ASM_EXTABLE_UA(3b, bad_put_user_clac)
_ASM_EXTABLE_UA(4b, bad_put_user_clac)
_ASM_EXTABLE_UA(1b, .Lbad_put_user_clac)
_ASM_EXTABLE_UA(2b, .Lbad_put_user_clac)
_ASM_EXTABLE_UA(3b, .Lbad_put_user_clac)
_ASM_EXTABLE_UA(4b, .Lbad_put_user_clac)
#ifdef CONFIG_X86_32
_ASM_EXTABLE_UA(5b, bad_put_user_clac)
_ASM_EXTABLE_UA(5b, .Lbad_put_user_clac)
#endif