[PATCH 5.10 063/130] x86: Prepare inline-asm for straight-line-speculation

From: Greg Kroah-Hartman
Date: Tue Jul 12 2022 - 14:49:04 EST


From: Peter Zijlstra <peterz@xxxxxxxxxxxxx>

commit b17c2baa305cccbd16bafa289fd743cc2db77966 upstream.

Replace all ret/retq instructions with ASM_RET in preparation of
making it more than a single instruction.

Signed-off-by: Peter Zijlstra (Intel) <peterz@xxxxxxxxxxxxx>
Signed-off-by: Borislav Petkov <bp@xxxxxxx>
Link: https://lore.kernel.org/r/20211204134907.964635458@xxxxxxxxxxxxx
Signed-off-by: Sasha Levin <sashal@xxxxxxxxxx>
Signed-off-by: Greg Kroah-Hartman <gregkh@xxxxxxxxxxxxxxxxxxx>
[bwh: Backported to 5.10: adjust context]
Signed-off-by: Ben Hutchings <ben@xxxxxxxxxxxxxxx>
Signed-off-by: Greg Kroah-Hartman <gregkh@xxxxxxxxxxxxxxxxxxx>
---
arch/x86/include/asm/linkage.h | 4 ++++
arch/x86/include/asm/paravirt.h | 2 +-
arch/x86/include/asm/qspinlock_paravirt.h | 4 ++--
arch/x86/kernel/alternative.c | 2 +-
arch/x86/kernel/kprobes/core.c | 2 +-
arch/x86/kernel/paravirt.c | 2 +-
arch/x86/kvm/emulate.c | 4 ++--
arch/x86/lib/error-inject.c | 3 ++-
samples/ftrace/ftrace-direct-modify.c | 4 ++--
samples/ftrace/ftrace-direct-too.c | 2 +-
samples/ftrace/ftrace-direct.c | 2 +-
11 files changed, 18 insertions(+), 13 deletions(-)

--- a/arch/x86/include/asm/linkage.h
+++ b/arch/x86/include/asm/linkage.h
@@ -18,6 +18,10 @@
#define __ALIGN_STR __stringify(__ALIGN)
#endif

+#else /* __ASSEMBLY__ */
+
+#define ASM_RET "ret\n\t"
+
#endif /* __ASSEMBLY__ */

#endif /* _ASM_X86_LINKAGE_H */
--- a/arch/x86/include/asm/paravirt.h
+++ b/arch/x86/include/asm/paravirt.h
@@ -630,7 +630,7 @@ bool __raw_callee_save___native_vcpu_is_
"call " #func ";" \
PV_RESTORE_ALL_CALLER_REGS \
FRAME_END \
- "ret;" \
+ ASM_RET \
".size " PV_THUNK_NAME(func) ", .-" PV_THUNK_NAME(func) ";" \
".popsection")

--- a/arch/x86/include/asm/qspinlock_paravirt.h
+++ b/arch/x86/include/asm/qspinlock_paravirt.h
@@ -48,7 +48,7 @@ asm (".pushsection .text;"
"jne .slowpath;"
"pop %rdx;"
FRAME_END
- "ret;"
+ ASM_RET
".slowpath: "
"push %rsi;"
"movzbl %al,%esi;"
@@ -56,7 +56,7 @@ asm (".pushsection .text;"
"pop %rsi;"
"pop %rdx;"
FRAME_END
- "ret;"
+ ASM_RET
".size " PV_UNLOCK ", .-" PV_UNLOCK ";"
".popsection");

--- a/arch/x86/kernel/alternative.c
+++ b/arch/x86/kernel/alternative.c
@@ -869,7 +869,7 @@ asm (
" .type int3_magic, @function\n"
"int3_magic:\n"
" movl $1, (%" _ASM_ARG1 ")\n"
-" ret\n"
+ ASM_RET
" .size int3_magic, .-int3_magic\n"
" .popsection\n"
);
--- a/arch/x86/kernel/kprobes/core.c
+++ b/arch/x86/kernel/kprobes/core.c
@@ -768,7 +768,7 @@ asm(
RESTORE_REGS_STRING
" popfl\n"
#endif
- " ret\n"
+ ASM_RET
".size kretprobe_trampoline, .-kretprobe_trampoline\n"
);
NOKPROBE_SYMBOL(kretprobe_trampoline);
--- a/arch/x86/kernel/paravirt.c
+++ b/arch/x86/kernel/paravirt.c
@@ -40,7 +40,7 @@ extern void _paravirt_nop(void);
asm (".pushsection .entry.text, \"ax\"\n"
".global _paravirt_nop\n"
"_paravirt_nop:\n\t"
- "ret\n\t"
+ ASM_RET
".size _paravirt_nop, . - _paravirt_nop\n\t"
".type _paravirt_nop, @function\n\t"
".popsection");
--- a/arch/x86/kvm/emulate.c
+++ b/arch/x86/kvm/emulate.c
@@ -316,7 +316,7 @@ static int fastop(struct x86_emulate_ctx
__FOP_FUNC(#name)

#define __FOP_RET(name) \
- "ret \n\t" \
+ ASM_RET \
".size " name ", .-" name "\n\t"

#define FOP_RET(name) \
@@ -437,7 +437,7 @@ static int fastop(struct x86_emulate_ctx

asm(".pushsection .fixup, \"ax\"\n"
".global kvm_fastop_exception \n"
- "kvm_fastop_exception: xor %esi, %esi; ret\n"
+ "kvm_fastop_exception: xor %esi, %esi; " ASM_RET
".popsection");

FOP_START(setcc)
--- a/arch/x86/lib/error-inject.c
+++ b/arch/x86/lib/error-inject.c
@@ -1,5 +1,6 @@
// SPDX-License-Identifier: GPL-2.0

+#include <linux/linkage.h>
#include <linux/error-injection.h>
#include <linux/kprobes.h>

@@ -10,7 +11,7 @@ asm(
".type just_return_func, @function\n"
".globl just_return_func\n"
"just_return_func:\n"
- " ret\n"
+ ASM_RET
".size just_return_func, .-just_return_func\n"
);

--- a/samples/ftrace/ftrace-direct-modify.c
+++ b/samples/ftrace/ftrace-direct-modify.c
@@ -31,7 +31,7 @@ asm (
" call my_direct_func1\n"
" leave\n"
" .size my_tramp1, .-my_tramp1\n"
-" ret\n"
+ ASM_RET
" .type my_tramp2, @function\n"
" .globl my_tramp2\n"
" my_tramp2:"
@@ -39,7 +39,7 @@ asm (
" movq %rsp, %rbp\n"
" call my_direct_func2\n"
" leave\n"
-" ret\n"
+ ASM_RET
" .size my_tramp2, .-my_tramp2\n"
" .popsection\n"
);
--- a/samples/ftrace/ftrace-direct-too.c
+++ b/samples/ftrace/ftrace-direct-too.c
@@ -31,7 +31,7 @@ asm (
" popq %rsi\n"
" popq %rdi\n"
" leave\n"
-" ret\n"
+ ASM_RET
" .size my_tramp, .-my_tramp\n"
" .popsection\n"
);
--- a/samples/ftrace/ftrace-direct.c
+++ b/samples/ftrace/ftrace-direct.c
@@ -24,7 +24,7 @@ asm (
" call my_direct_func\n"
" popq %rdi\n"
" leave\n"
-" ret\n"
+ ASM_RET
" .size my_tramp, .-my_tramp\n"
" .popsection\n"
);