tools arch: Update arch/x86/lib/mem{cpy,set}_64.S copies used in 'perf bench mem memcpy'

To bring in the change made in this cset:

  4d6ffa27b8 ("x86/lib: Change .weak to SYM_FUNC_START_WEAK for arch/x86/lib/mem*_64.S")
  6dcc5627f6 ("x86/asm: Change all ENTRY+ENDPROC to SYM_FUNC_*")

I needed to define SYM_FUNC_START_LOCAL() as SYM_L_GLOBAL as
mem{cpy,set}_{orig,erms} are used by 'perf bench'.

This silences these perf tools build warnings:

  Warning: Kernel ABI header at 'tools/arch/x86/lib/memcpy_64.S' differs from latest version at 'arch/x86/lib/memcpy_64.S'
  diff -u tools/arch/x86/lib/memcpy_64.S arch/x86/lib/memcpy_64.S
  Warning: Kernel ABI header at 'tools/arch/x86/lib/memset_64.S' differs from latest version at 'arch/x86/lib/memset_64.S'
  diff -u tools/arch/x86/lib/memset_64.S arch/x86/lib/memset_64.S

Cc: Adrian Hunter <adrian.hunter@intel.com>
Cc: Borislav Petkov <bp@suse.de>
Cc: Fangrui Song <maskray@google.com>
Cc: Ian Rogers <irogers@google.com>
Cc: Jiri Olsa <jolsa@kernel.org>
Cc: Jiri Slaby <jirislaby@kernel.org>
Cc: Namhyung Kim <namhyung@kernel.org>
Signed-off-by: Arnaldo Carvalho de Melo <acme@redhat.com>
This commit is contained in:
Arnaldo Carvalho de Melo 2020-11-09 13:59:15 -03:00
parent b0e5a05cc9
commit db1a8b97a0
5 changed files with 22 additions and 10 deletions

View File

@ -16,8 +16,6 @@
* to a jmp to memcpy_erms which does the REP; MOVSB mem copy. * to a jmp to memcpy_erms which does the REP; MOVSB mem copy.
*/ */
.weak memcpy
/* /*
* memcpy - Copy a memory block. * memcpy - Copy a memory block.
* *
@ -30,7 +28,7 @@
* rax original destination * rax original destination
*/ */
SYM_FUNC_START_ALIAS(__memcpy) SYM_FUNC_START_ALIAS(__memcpy)
SYM_FUNC_START_LOCAL(memcpy) SYM_FUNC_START_WEAK(memcpy)
ALTERNATIVE_2 "jmp memcpy_orig", "", X86_FEATURE_REP_GOOD, \ ALTERNATIVE_2 "jmp memcpy_orig", "", X86_FEATURE_REP_GOOD, \
"jmp memcpy_erms", X86_FEATURE_ERMS "jmp memcpy_erms", X86_FEATURE_ERMS
@ -51,14 +49,14 @@ EXPORT_SYMBOL(__memcpy)
* memcpy_erms() - enhanced fast string memcpy. This is faster and * memcpy_erms() - enhanced fast string memcpy. This is faster and
* simpler than memcpy. Use memcpy_erms when possible. * simpler than memcpy. Use memcpy_erms when possible.
*/ */
SYM_FUNC_START(memcpy_erms) SYM_FUNC_START_LOCAL(memcpy_erms)
movq %rdi, %rax movq %rdi, %rax
movq %rdx, %rcx movq %rdx, %rcx
rep movsb rep movsb
ret ret
SYM_FUNC_END(memcpy_erms) SYM_FUNC_END(memcpy_erms)
SYM_FUNC_START(memcpy_orig) SYM_FUNC_START_LOCAL(memcpy_orig)
movq %rdi, %rax movq %rdi, %rax
cmpq $0x20, %rdx cmpq $0x20, %rdx

View File

@ -4,8 +4,7 @@
#include <linux/linkage.h> #include <linux/linkage.h>
#include <asm/cpufeatures.h> #include <asm/cpufeatures.h>
#include <asm/alternative-asm.h> #include <asm/alternative-asm.h>
#include <asm/export.h>
.weak memset
/* /*
* ISO C memset - set a memory block to a byte value. This function uses fast * ISO C memset - set a memory block to a byte value. This function uses fast
@ -18,7 +17,7 @@
* *
* rax original destination * rax original destination
*/ */
SYM_FUNC_START_ALIAS(memset) SYM_FUNC_START_WEAK(memset)
SYM_FUNC_START(__memset) SYM_FUNC_START(__memset)
/* /*
* Some CPUs support enhanced REP MOVSB/STOSB feature. It is recommended * Some CPUs support enhanced REP MOVSB/STOSB feature. It is recommended
@ -44,6 +43,8 @@ SYM_FUNC_START(__memset)
ret ret
SYM_FUNC_END(__memset) SYM_FUNC_END(__memset)
SYM_FUNC_END_ALIAS(memset) SYM_FUNC_END_ALIAS(memset)
EXPORT_SYMBOL(memset)
EXPORT_SYMBOL(__memset)
/* /*
* ISO C memset - set a memory block to a byte value. This function uses * ISO C memset - set a memory block to a byte value. This function uses
@ -56,7 +57,7 @@ SYM_FUNC_END_ALIAS(memset)
* *
* rax original destination * rax original destination
*/ */
SYM_FUNC_START(memset_erms) SYM_FUNC_START_LOCAL(memset_erms)
movq %rdi,%r9 movq %rdi,%r9
movb %sil,%al movb %sil,%al
movq %rdx,%rcx movq %rdx,%rcx
@ -65,7 +66,7 @@ SYM_FUNC_START(memset_erms)
ret ret
SYM_FUNC_END(memset_erms) SYM_FUNC_END(memset_erms)
SYM_FUNC_START(memset_orig) SYM_FUNC_START_LOCAL(memset_orig)
movq %rdi,%r10 movq %rdi,%r10
/* expand byte value */ /* expand byte value */

View File

@ -2,6 +2,9 @@
/* Various wrappers to make the kernel .S file build in user-space: */ /* Various wrappers to make the kernel .S file build in user-space: */
// memcpy_orig and memcpy_erms are being defined as SYM_L_LOCAL but we need it
#define SYM_FUNC_START_LOCAL(name) \
SYM_START(name, SYM_L_GLOBAL, SYM_A_ALIGN)
#define memcpy MEMCPY /* don't hide glibc's memcpy() */ #define memcpy MEMCPY /* don't hide glibc's memcpy() */
#define altinstr_replacement text #define altinstr_replacement text
#define globl p2align 4; .globl #define globl p2align 4; .globl

View File

@ -1,4 +1,7 @@
/* SPDX-License-Identifier: GPL-2.0 */ /* SPDX-License-Identifier: GPL-2.0 */
// memset_orig and memset_erms are being defined as SYM_L_LOCAL but we need it
#define SYM_FUNC_START_LOCAL(name) \
SYM_START(name, SYM_L_GLOBAL, SYM_A_ALIGN)
#define memset MEMSET /* don't hide glibc's memset() */ #define memset MEMSET /* don't hide glibc's memset() */
#define altinstr_replacement text #define altinstr_replacement text
#define globl p2align 4; .globl #define globl p2align 4; .globl

View File

@ -25,6 +25,7 @@
/* SYM_L_* -- linkage of symbols */ /* SYM_L_* -- linkage of symbols */
#define SYM_L_GLOBAL(name) .globl name #define SYM_L_GLOBAL(name) .globl name
#define SYM_L_WEAK(name) .weak name
#define SYM_L_LOCAL(name) /* nothing */ #define SYM_L_LOCAL(name) /* nothing */
#define ALIGN __ALIGN #define ALIGN __ALIGN
@ -84,6 +85,12 @@
SYM_END(name, SYM_T_FUNC) SYM_END(name, SYM_T_FUNC)
#endif #endif
/* SYM_FUNC_START_WEAK -- use for weak functions */
#ifndef SYM_FUNC_START_WEAK
#define SYM_FUNC_START_WEAK(name) \
SYM_START(name, SYM_L_WEAK, SYM_A_ALIGN)
#endif
/* /*
* SYM_FUNC_END -- the end of SYM_FUNC_START_LOCAL, SYM_FUNC_START, * SYM_FUNC_END -- the end of SYM_FUNC_START_LOCAL, SYM_FUNC_START,
* SYM_FUNC_START_WEAK, ... * SYM_FUNC_START_WEAK, ...