[PATCH V6 13/49] x86/entry: Expose the address of .Lgs_change to entry64.c

From: Lai Jiangshan
Date: Fri Nov 26 2021 - 05:24:19 EST


From: Lai Jiangshan <laijs@xxxxxxxxxxxxxxxxx>

The address of .Lgs_change will be used in traps.c in later patch when
some entry code is implemented in entry64.c. So the address of .Lgs_change
is exposed to traps.c for preparation.

Signed-off-by: Lai Jiangshan <laijs@xxxxxxxxxxxxxxxxx>
---
arch/x86/entry/entry64.c | 2 ++
arch/x86/entry/entry_64.S | 6 +++---
2 files changed, 5 insertions(+), 3 deletions(-)

diff --git a/arch/x86/entry/entry64.c b/arch/x86/entry/entry64.c
index 762595603ce7..9813a30dbadb 100644
--- a/arch/x86/entry/entry64.c
+++ b/arch/x86/entry/entry64.c
@@ -12,3 +12,5 @@
* is PTI user CR3 or both.
*/
#include <asm/traps.h>
+
+extern unsigned char asm_load_gs_index_gs_change[];
diff --git a/arch/x86/entry/entry_64.S b/arch/x86/entry/entry_64.S
index 44774cc5bcc9..5db0196835cd 100644
--- a/arch/x86/entry/entry_64.S
+++ b/arch/x86/entry/entry_64.S
@@ -733,7 +733,7 @@ _ASM_NOKPROBE(common_interrupt_return)
SYM_FUNC_START(asm_load_gs_index)
FRAME_BEGIN
swapgs
-.Lgs_change:
+SYM_INNER_LABEL(asm_load_gs_index_gs_change, SYM_L_GLOBAL)
movl %edi, %gs
2: ALTERNATIVE "", "mfence", X86_BUG_SWAPGS_FENCE
swapgs
@@ -742,7 +742,7 @@ SYM_FUNC_START(asm_load_gs_index)
SYM_FUNC_END(asm_load_gs_index)
EXPORT_SYMBOL(asm_load_gs_index)

- _ASM_EXTABLE(.Lgs_change, .Lbad_gs)
+ _ASM_EXTABLE(asm_load_gs_index_gs_change, .Lbad_gs)
.section .fixup, "ax"
/* running with kernelgs */
SYM_CODE_START_LOCAL_NOALIGN(.Lbad_gs)
@@ -1008,7 +1008,7 @@ SYM_CODE_START_LOCAL(error_entry)
movl %ecx, %eax /* zero extend */
cmpq %rax, RIP+8(%rsp)
je .Lbstep_iret
- cmpq $.Lgs_change, RIP+8(%rsp)
+ cmpq $asm_load_gs_index_gs_change, RIP+8(%rsp)
jne .Lerror_entry_done_lfence

/*
--
2.19.1.6.gb485710b