Commit 46d597e2 authored by Vineet Gupta's avatar Vineet Gupta Committed by Willy Tarreau

ARC: use ASL assembler mnemonic

commit a6416f57 upstream.

ARCompact and ARCv2 only have ASL, while binutils used to support LSL as
a alias mnemonic.

Newer binutils (upstream) don't want to do that so replace it.
Signed-off-by: default avatarVineet Gupta <vgupta@synopsys.com>
Signed-off-by: default avatarWilly Tarreau <w@1wt.eu>
parent e5201134
...@@ -219,7 +219,7 @@ ex_saved_reg1: ...@@ -219,7 +219,7 @@ ex_saved_reg1:
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
sr r0, [ARC_REG_SCRATCH_DATA0] ; freeup r0 to code with sr r0, [ARC_REG_SCRATCH_DATA0] ; freeup r0 to code with
GET_CPU_ID r0 ; get to per cpu scratch mem, GET_CPU_ID r0 ; get to per cpu scratch mem,
lsl r0, r0, L1_CACHE_SHIFT ; cache line wide per cpu asl r0, r0, L1_CACHE_SHIFT ; cache line wide per cpu
add r0, @ex_saved_reg1, r0 add r0, @ex_saved_reg1, r0
#else #else
st r0, [@ex_saved_reg1] st r0, [@ex_saved_reg1]
...@@ -239,7 +239,7 @@ ex_saved_reg1: ...@@ -239,7 +239,7 @@ ex_saved_reg1:
.macro TLBMISS_RESTORE_REGS .macro TLBMISS_RESTORE_REGS
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
GET_CPU_ID r0 ; get to per cpu scratch mem GET_CPU_ID r0 ; get to per cpu scratch mem
lsl r0, r0, L1_CACHE_SHIFT ; each is cache line wide asl r0, r0, L1_CACHE_SHIFT ; each is cache line wide
add r0, @ex_saved_reg1, r0 add r0, @ex_saved_reg1, r0
ld_s r3, [r0,12] ld_s r3, [r0,12]
ld_s r2, [r0, 8] ld_s r2, [r0, 8]
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment