[PATCH 3.10 145/180] ARC: use ASL assembler mnemonic

From: Willy Tarreau
Date: Sun Aug 21 2016 - 11:41:35 EST


From: Vineet Gupta <vgupta@xxxxxxxxxxxx>

commit a6416f57ce57fb390b6ee30b12c01c29032a26af upstream.

ARCompact and ARCv2 only have ASL, while binutils used to support LSL as
a alias mnemonic.

Newer binutils (upstream) don't want to do that so replace it.

Signed-off-by: Vineet Gupta <vgupta@xxxxxxxxxxxx>
Signed-off-by: Willy Tarreau <w@xxxxxx>
---
arch/arc/mm/tlbex.S | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/arch/arc/mm/tlbex.S b/arch/arc/mm/tlbex.S
index 3357d26..74691e6 100644
--- a/arch/arc/mm/tlbex.S
+++ b/arch/arc/mm/tlbex.S
@@ -219,7 +219,7 @@ ex_saved_reg1:
#ifdef CONFIG_SMP
sr r0, [ARC_REG_SCRATCH_DATA0] ; freeup r0 to code with
GET_CPU_ID r0 ; get to per cpu scratch mem,
- lsl r0, r0, L1_CACHE_SHIFT ; cache line wide per cpu
+ asl r0, r0, L1_CACHE_SHIFT ; cache line wide per cpu
add r0, @ex_saved_reg1, r0
#else
st r0, [@ex_saved_reg1]
@@ -239,7 +239,7 @@ ex_saved_reg1:
.macro TLBMISS_RESTORE_REGS
#ifdef CONFIG_SMP
GET_CPU_ID r0 ; get to per cpu scratch mem
- lsl r0, r0, L1_CACHE_SHIFT ; each is cache line wide
+ asl r0, r0, L1_CACHE_SHIFT ; each is cache line wide
add r0, @ex_saved_reg1, r0
ld_s r3, [r0,12]
ld_s r2, [r0, 8]
--
2.8.0.rc2.1.gbe9624a