Commit aa3d4803 authored by Peter Zijlstra's avatar Peter Zijlstra Committed by Borislav Petkov

x86: Use return-thunk in asm code

Use the return thunk in asm code. If the thunk isn't needed, it will
get patched into a RET instruction during boot by apply_returns().

Since alternatives can't handle relocations outside of the first
instruction, putting a 'jmp __x86_return_thunk' in one is not valid,
therefore carve out the memmove ERMS path into a separate label and jump
to it.
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: default avatarBorislav Petkov <bp@suse.de>
Reviewed-by: default avatarJosh Poimboeuf <jpoimboe@kernel.org>
Signed-off-by: default avatarBorislav Petkov <bp@suse.de>
parent 0ee90730
...@@ -92,6 +92,7 @@ endif ...@@ -92,6 +92,7 @@ endif
endif endif
$(vobjs): KBUILD_CFLAGS := $(filter-out $(CC_FLAGS_LTO) $(RANDSTRUCT_CFLAGS) $(GCC_PLUGINS_CFLAGS) $(RETPOLINE_CFLAGS),$(KBUILD_CFLAGS)) $(CFL) $(vobjs): KBUILD_CFLAGS := $(filter-out $(CC_FLAGS_LTO) $(RANDSTRUCT_CFLAGS) $(GCC_PLUGINS_CFLAGS) $(RETPOLINE_CFLAGS),$(KBUILD_CFLAGS)) $(CFL)
$(vobjs): KBUILD_AFLAGS += -DBUILD_VDSO
# #
# vDSO code runs in userspace and -pg doesn't help with profiling anyway. # vDSO code runs in userspace and -pg doesn't help with profiling anyway.
......
...@@ -19,19 +19,27 @@ ...@@ -19,19 +19,27 @@
#define __ALIGN_STR __stringify(__ALIGN) #define __ALIGN_STR __stringify(__ALIGN)
#endif #endif
#if defined(CONFIG_RETPOLINE) && !defined(__DISABLE_EXPORTS) && !defined(BUILD_VDSO)
#define RET jmp __x86_return_thunk
#else /* CONFIG_RETPOLINE */
#ifdef CONFIG_SLS #ifdef CONFIG_SLS
#define RET ret; int3 #define RET ret; int3
#else #else
#define RET ret #define RET ret
#endif #endif
#endif /* CONFIG_RETPOLINE */
#else /* __ASSEMBLY__ */ #else /* __ASSEMBLY__ */
#if defined(CONFIG_RETPOLINE) && !defined(__DISABLE_EXPORTS) && !defined(BUILD_VDSO)
#define ASM_RET "jmp __x86_return_thunk\n\t"
#else /* CONFIG_RETPOLINE */
#ifdef CONFIG_SLS #ifdef CONFIG_SLS
#define ASM_RET "ret; int3\n\t" #define ASM_RET "ret; int3\n\t"
#else #else
#define ASM_RET "ret\n\t" #define ASM_RET "ret\n\t"
#endif #endif
#endif /* CONFIG_RETPOLINE */
#endif /* __ASSEMBLY__ */ #endif /* __ASSEMBLY__ */
......
...@@ -39,7 +39,7 @@ SYM_FUNC_START(__memmove) ...@@ -39,7 +39,7 @@ SYM_FUNC_START(__memmove)
/* FSRM implies ERMS => no length checks, do the copy directly */ /* FSRM implies ERMS => no length checks, do the copy directly */
.Lmemmove_begin_forward: .Lmemmove_begin_forward:
ALTERNATIVE "cmp $0x20, %rdx; jb 1f", "", X86_FEATURE_FSRM ALTERNATIVE "cmp $0x20, %rdx; jb 1f", "", X86_FEATURE_FSRM
ALTERNATIVE "", __stringify(movq %rdx, %rcx; rep movsb; RET), X86_FEATURE_ERMS ALTERNATIVE "", "jmp .Lmemmove_erms", X86_FEATURE_ERMS
/* /*
* movsq instruction have many startup latency * movsq instruction have many startup latency
...@@ -205,6 +205,11 @@ SYM_FUNC_START(__memmove) ...@@ -205,6 +205,11 @@ SYM_FUNC_START(__memmove)
movb %r11b, (%rdi) movb %r11b, (%rdi)
13: 13:
RET RET
.Lmemmove_erms:
movq %rdx, %rcx
rep movsb
RET
SYM_FUNC_END(__memmove) SYM_FUNC_END(__memmove)
EXPORT_SYMBOL(__memmove) EXPORT_SYMBOL(__memmove)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment