Commit cceb0183 authored by Heiko Carstens's avatar Heiko Carstens Committed by Vasily Gorbik

s390/alternatives: make use of asm_inline

This is the s390 version of commit 40576e5e ("x86: alternative.h:
use asm_inline for all alternative variants").

See commit eb111869 ("compiler-types.h: add asm_inline
definition") for more details.

With this change the compiler will not generate many out-of-line
versions for the three instruction sized arch_spin_unlock() function
anymore. Due to this gcc seems to change a lot of other inline
decisions which results in a net 6k text size growth according to
bloat-o-meter (gcc 9.2 with defconfig).
But that's still better than having many out-of-line versions of
arch_spin_unlock().
Signed-off-by: default avatarHeiko Carstens <heiko.carstens@de.ibm.com>
Signed-off-by: default avatarVasily Gorbik <gor@linux.ibm.com>
parent 6a3035da
...@@ -139,10 +139,10 @@ void apply_alternatives(struct alt_instr *start, struct alt_instr *end); ...@@ -139,10 +139,10 @@ void apply_alternatives(struct alt_instr *start, struct alt_instr *end);
* without volatile and memory clobber. * without volatile and memory clobber.
*/ */
#define alternative(oldinstr, altinstr, facility) \ #define alternative(oldinstr, altinstr, facility) \
asm volatile(ALTERNATIVE(oldinstr, altinstr, facility) : : : "memory") asm_inline volatile(ALTERNATIVE(oldinstr, altinstr, facility) : : : "memory")
#define alternative_2(oldinstr, altinstr1, facility1, altinstr2, facility2) \ #define alternative_2(oldinstr, altinstr1, facility1, altinstr2, facility2) \
asm volatile(ALTERNATIVE_2(oldinstr, altinstr1, facility1, \ asm_inline volatile(ALTERNATIVE_2(oldinstr, altinstr1, facility1, \
altinstr2, facility2) ::: "memory") altinstr2, facility2) ::: "memory")
#endif /* __ASSEMBLY__ */ #endif /* __ASSEMBLY__ */
......
...@@ -85,7 +85,7 @@ static inline int arch_spin_trylock(arch_spinlock_t *lp) ...@@ -85,7 +85,7 @@ static inline int arch_spin_trylock(arch_spinlock_t *lp)
static inline void arch_spin_unlock(arch_spinlock_t *lp) static inline void arch_spin_unlock(arch_spinlock_t *lp)
{ {
typecheck(int, lp->lock); typecheck(int, lp->lock);
asm volatile( asm_inline volatile(
ALTERNATIVE("", ".long 0xb2fa0070", 49) /* NIAI 7 */ ALTERNATIVE("", ".long 0xb2fa0070", 49) /* NIAI 7 */
" sth %1,%0\n" " sth %1,%0\n"
: "=Q" (((unsigned short *) &lp->lock)[1]) : "=Q" (((unsigned short *) &lp->lock)[1])
......
...@@ -74,7 +74,7 @@ static inline int arch_load_niai4(int *lock) ...@@ -74,7 +74,7 @@ static inline int arch_load_niai4(int *lock)
{ {
int owner; int owner;
asm volatile( asm_inline volatile(
ALTERNATIVE("", ".long 0xb2fa0040", 49) /* NIAI 4 */ ALTERNATIVE("", ".long 0xb2fa0040", 49) /* NIAI 4 */
" l %0,%1\n" " l %0,%1\n"
: "=d" (owner) : "Q" (*lock) : "memory"); : "=d" (owner) : "Q" (*lock) : "memory");
...@@ -85,7 +85,7 @@ static inline int arch_cmpxchg_niai8(int *lock, int old, int new) ...@@ -85,7 +85,7 @@ static inline int arch_cmpxchg_niai8(int *lock, int old, int new)
{ {
int expected = old; int expected = old;
asm volatile( asm_inline volatile(
ALTERNATIVE("", ".long 0xb2fa0080", 49) /* NIAI 8 */ ALTERNATIVE("", ".long 0xb2fa0080", 49) /* NIAI 8 */
" cs %0,%3,%1\n" " cs %0,%3,%1\n"
: "=d" (old), "=Q" (*lock) : "=d" (old), "=Q" (*lock)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment