Commit 740d63b5 authored by Heiko Carstens's avatar Heiko Carstens

Merge branch 'cmpxchg_user_key' into features

Signed-off-by: default avatarHeiko Carstens <hca@linux.ibm.com>
parents 20e6ce48 b33d59fb
......@@ -12,6 +12,7 @@
#define EX_TYPE_UA_STORE 3
#define EX_TYPE_UA_LOAD_MEM 4
#define EX_TYPE_UA_LOAD_REG 5
#define EX_TYPE_UA_LOAD_REGPAIR 6
#define EX_DATA_REG_ERR_SHIFT 0
#define EX_DATA_REG_ERR GENMASK(3, 0)
......@@ -85,4 +86,7 @@
#define EX_TABLE_UA_LOAD_REG(_fault, _target, _regerr, _regzero) \
__EX_TABLE_UA(__ex_table, _fault, _target, EX_TYPE_UA_LOAD_REG, _regerr, _regzero, 0)
#define EX_TABLE_UA_LOAD_REGPAIR(_fault, _target, _regerr, _regzero) \
__EX_TABLE_UA(__ex_table, _fault, _target, EX_TYPE_UA_LOAD_REGPAIR, _regerr, _regzero, 0)
#endif /* __ASM_EXTABLE_H */
......@@ -88,67 +88,90 @@ static __always_inline unsigned long __cmpxchg(unsigned long address,
unsigned long old,
unsigned long new, int size)
{
unsigned long prev, tmp;
int shift;
switch (size) {
case 1:
case 1: {
unsigned int prev, shift, mask;
shift = (3 ^ (address & 3)) << 3;
address ^= address & 3;
old = (old & 0xff) << shift;
new = (new & 0xff) << shift;
mask = ~(0xff << shift);
asm volatile(
" l %0,%2\n"
"0: nr %0,%5\n"
" lr %1,%0\n"
" or %0,%3\n"
" or %1,%4\n"
" cs %0,%1,%2\n"
" jnl 1f\n"
" xr %1,%0\n"
" nr %1,%5\n"
" jnz 0b\n"
" l %[prev],%[address]\n"
" nr %[prev],%[mask]\n"
" xilf %[mask],0xffffffff\n"
" or %[new],%[prev]\n"
" or %[prev],%[tmp]\n"
"0: lr %[tmp],%[prev]\n"
" cs %[prev],%[new],%[address]\n"
" jnl 1f\n"
" xr %[tmp],%[prev]\n"
" xr %[new],%[tmp]\n"
" nr %[tmp],%[mask]\n"
" jz 0b\n"
"1:"
: "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) address)
: "d" ((old & 0xff) << shift),
"d" ((new & 0xff) << shift),
"d" (~(0xff << shift))
: "memory", "cc");
: [prev] "=&d" (prev),
[address] "+Q" (*(int *)address),
[tmp] "+&d" (old),
[new] "+&d" (new),
[mask] "+&d" (mask)
:: "memory", "cc");
return prev >> shift;
case 2:
}
case 2: {
unsigned int prev, shift, mask;
shift = (2 ^ (address & 2)) << 3;
address ^= address & 2;
old = (old & 0xffff) << shift;
new = (new & 0xffff) << shift;
mask = ~(0xffff << shift);
asm volatile(
" l %0,%2\n"
"0: nr %0,%5\n"
" lr %1,%0\n"
" or %0,%3\n"
" or %1,%4\n"
" cs %0,%1,%2\n"
" jnl 1f\n"
" xr %1,%0\n"
" nr %1,%5\n"
" jnz 0b\n"
" l %[prev],%[address]\n"
" nr %[prev],%[mask]\n"
" xilf %[mask],0xffffffff\n"
" or %[new],%[prev]\n"
" or %[prev],%[tmp]\n"
"0: lr %[tmp],%[prev]\n"
" cs %[prev],%[new],%[address]\n"
" jnl 1f\n"
" xr %[tmp],%[prev]\n"
" xr %[new],%[tmp]\n"
" nr %[tmp],%[mask]\n"
" jz 0b\n"
"1:"
: "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) address)
: "d" ((old & 0xffff) << shift),
"d" ((new & 0xffff) << shift),
"d" (~(0xffff << shift))
: "memory", "cc");
: [prev] "=&d" (prev),
[address] "+Q" (*(int *)address),
[tmp] "+&d" (old),
[new] "+&d" (new),
[mask] "+&d" (mask)
:: "memory", "cc");
return prev >> shift;
case 4:
}
case 4: {
unsigned int prev = old;
asm volatile(
" cs %0,%3,%1\n"
: "=&d" (prev), "+Q" (*(int *) address)
: "0" (old), "d" (new)
" cs %[prev],%[new],%[address]\n"
: [prev] "+&d" (prev),
[address] "+Q" (*(int *)address)
: [new] "d" (new)
: "memory", "cc");
return prev;
case 8:
}
case 8: {
unsigned long prev = old;
asm volatile(
" csg %0,%3,%1\n"
: "=&d" (prev), "+QS" (*(long *) address)
: "0" (old), "d" (new)
" csg %[prev],%[new],%[address]\n"
: [prev] "+&d" (prev),
[address] "+QS" (*(long *)address)
: [new] "d" (new)
: "memory", "cc");
return prev;
}
}
__cmpxchg_called_with_bad_pointer();
return old;
}
......
......@@ -390,4 +390,212 @@ do { \
goto err_label; \
} while (0)
void __cmpxchg_user_key_called_with_bad_pointer(void);
#define CMPXCHG_USER_KEY_MAX_LOOPS 128
static __always_inline int __cmpxchg_user_key(unsigned long address, void *uval,
__uint128_t old, __uint128_t new,
unsigned long key, int size)
{
int rc = 0;
switch (size) {
case 1: {
unsigned int prev, shift, mask, _old, _new;
unsigned long count;
shift = (3 ^ (address & 3)) << 3;
address ^= address & 3;
_old = ((unsigned int)old & 0xff) << shift;
_new = ((unsigned int)new & 0xff) << shift;
mask = ~(0xff << shift);
asm volatile(
" spka 0(%[key])\n"
" sacf 256\n"
" llill %[count],%[max_loops]\n"
"0: l %[prev],%[address]\n"
"1: nr %[prev],%[mask]\n"
" xilf %[mask],0xffffffff\n"
" or %[new],%[prev]\n"
" or %[prev],%[tmp]\n"
"2: lr %[tmp],%[prev]\n"
"3: cs %[prev],%[new],%[address]\n"
"4: jnl 5f\n"
" xr %[tmp],%[prev]\n"
" xr %[new],%[tmp]\n"
" nr %[tmp],%[mask]\n"
" jnz 5f\n"
" brct %[count],2b\n"
"5: sacf 768\n"
" spka %[default_key]\n"
EX_TABLE_UA_LOAD_REG(0b, 5b, %[rc], %[prev])
EX_TABLE_UA_LOAD_REG(1b, 5b, %[rc], %[prev])
EX_TABLE_UA_LOAD_REG(3b, 5b, %[rc], %[prev])
EX_TABLE_UA_LOAD_REG(4b, 5b, %[rc], %[prev])
: [rc] "+&d" (rc),
[prev] "=&d" (prev),
[address] "+Q" (*(int *)address),
[tmp] "+&d" (_old),
[new] "+&d" (_new),
[mask] "+&d" (mask),
[count] "=a" (count)
: [key] "%[count]" (key << 4),
[default_key] "J" (PAGE_DEFAULT_KEY),
[max_loops] "J" (CMPXCHG_USER_KEY_MAX_LOOPS)
: "memory", "cc");
*(unsigned char *)uval = prev >> shift;
if (!count)
rc = -EAGAIN;
return rc;
}
case 2: {
unsigned int prev, shift, mask, _old, _new;
unsigned long count;
shift = (2 ^ (address & 2)) << 3;
address ^= address & 2;
_old = ((unsigned int)old & 0xffff) << shift;
_new = ((unsigned int)new & 0xffff) << shift;
mask = ~(0xffff << shift);
asm volatile(
" spka 0(%[key])\n"
" sacf 256\n"
" llill %[count],%[max_loops]\n"
"0: l %[prev],%[address]\n"
"1: nr %[prev],%[mask]\n"
" xilf %[mask],0xffffffff\n"
" or %[new],%[prev]\n"
" or %[prev],%[tmp]\n"
"2: lr %[tmp],%[prev]\n"
"3: cs %[prev],%[new],%[address]\n"
"4: jnl 5f\n"
" xr %[tmp],%[prev]\n"
" xr %[new],%[tmp]\n"
" nr %[tmp],%[mask]\n"
" jnz 5f\n"
" brct %[count],2b\n"
"5: sacf 768\n"
" spka %[default_key]\n"
EX_TABLE_UA_LOAD_REG(0b, 5b, %[rc], %[prev])
EX_TABLE_UA_LOAD_REG(1b, 5b, %[rc], %[prev])
EX_TABLE_UA_LOAD_REG(3b, 5b, %[rc], %[prev])
EX_TABLE_UA_LOAD_REG(4b, 5b, %[rc], %[prev])
: [rc] "+&d" (rc),
[prev] "=&d" (prev),
[address] "+Q" (*(int *)address),
[tmp] "+&d" (_old),
[new] "+&d" (_new),
[mask] "+&d" (mask),
[count] "=a" (count)
: [key] "%[count]" (key << 4),
[default_key] "J" (PAGE_DEFAULT_KEY),
[max_loops] "J" (CMPXCHG_USER_KEY_MAX_LOOPS)
: "memory", "cc");
*(unsigned short *)uval = prev >> shift;
if (!count)
rc = -EAGAIN;
return rc;
}
case 4: {
unsigned int prev = old;
asm volatile(
" spka 0(%[key])\n"
" sacf 256\n"
"0: cs %[prev],%[new],%[address]\n"
"1: sacf 768\n"
" spka %[default_key]\n"
EX_TABLE_UA_LOAD_REG(0b, 1b, %[rc], %[prev])
EX_TABLE_UA_LOAD_REG(1b, 1b, %[rc], %[prev])
: [rc] "+&d" (rc),
[prev] "+&d" (prev),
[address] "+Q" (*(int *)address)
: [new] "d" ((unsigned int)new),
[key] "a" (key << 4),
[default_key] "J" (PAGE_DEFAULT_KEY)
: "memory", "cc");
*(unsigned int *)uval = prev;
return rc;
}
case 8: {
unsigned long prev = old;
asm volatile(
" spka 0(%[key])\n"
" sacf 256\n"
"0: csg %[prev],%[new],%[address]\n"
"1: sacf 768\n"
" spka %[default_key]\n"
EX_TABLE_UA_LOAD_REG(0b, 1b, %[rc], %[prev])
EX_TABLE_UA_LOAD_REG(1b, 1b, %[rc], %[prev])
: [rc] "+&d" (rc),
[prev] "+&d" (prev),
[address] "+QS" (*(long *)address)
: [new] "d" ((unsigned long)new),
[key] "a" (key << 4),
[default_key] "J" (PAGE_DEFAULT_KEY)
: "memory", "cc");
*(unsigned long *)uval = prev;
return rc;
}
case 16: {
__uint128_t prev = old;
asm volatile(
" spka 0(%[key])\n"
" sacf 256\n"
"0: cdsg %[prev],%[new],%[address]\n"
"1: sacf 768\n"
" spka %[default_key]\n"
EX_TABLE_UA_LOAD_REGPAIR(0b, 1b, %[rc], %[prev])
EX_TABLE_UA_LOAD_REGPAIR(1b, 1b, %[rc], %[prev])
: [rc] "+&d" (rc),
[prev] "+&d" (prev),
[address] "+QS" (*(__int128_t *)address)
: [new] "d" (new),
[key] "a" (key << 4),
[default_key] "J" (PAGE_DEFAULT_KEY)
: "memory", "cc");
*(__uint128_t *)uval = prev;
return rc;
}
}
__cmpxchg_user_key_called_with_bad_pointer();
return rc;
}
/**
* cmpxchg_user_key() - cmpxchg with user space target, honoring storage keys
* @ptr: User space address of value to compare to @old and exchange with
* @new. Must be aligned to sizeof(*@ptr).
* @uval: Address where the old value of *@ptr is written to.
* @old: Old value. Compared to the content pointed to by @ptr in order to
* determine if the exchange occurs. The old value read from *@ptr is
* written to *@uval.
* @new: New value to place at *@ptr.
* @key: Access key to use for checking storage key protection.
*
* Perform a cmpxchg on a user space target, honoring storage key protection.
* @key alone determines how key checking is performed, neither
* storage-protection-override nor fetch-protection-override apply.
* The caller must compare *@uval and @old to determine if values have been
* exchanged. In case of an exception *@uval is set to zero.
*
* Return: 0: cmpxchg executed
* -EFAULT: an exception happened when trying to access *@ptr
* -EAGAIN: maxed out number of retries (byte and short only)
*/
#define cmpxchg_user_key(ptr, uval, old, new, key) \
({ \
__typeof__(ptr) __ptr = (ptr); \
__typeof__(uval) __uval = (uval); \
\
BUILD_BUG_ON(sizeof(*(__ptr)) != sizeof(*(__uval))); \
might_fault(); \
__chk_user_ptr(__ptr); \
__cmpxchg_user_key((unsigned long)(__ptr), (void *)(__uval), \
(old), (new), (key), sizeof(*(__ptr))); \
})
#endif /* __S390_UACCESS_H */
......@@ -47,13 +47,16 @@ static bool ex_handler_ua_load_mem(const struct exception_table_entry *ex, struc
return true;
}
static bool ex_handler_ua_load_reg(const struct exception_table_entry *ex, struct pt_regs *regs)
static bool ex_handler_ua_load_reg(const struct exception_table_entry *ex,
bool pair, struct pt_regs *regs)
{
unsigned int reg_zero = FIELD_GET(EX_DATA_REG_ADDR, ex->data);
unsigned int reg_err = FIELD_GET(EX_DATA_REG_ERR, ex->data);
regs->gprs[reg_err] = -EFAULT;
regs->gprs[reg_zero] = 0;
if (pair)
regs->gprs[reg_zero + 1] = 0;
regs->psw.addr = extable_fixup(ex);
return true;
}
......@@ -75,7 +78,9 @@ bool fixup_exception(struct pt_regs *regs)
case EX_TYPE_UA_LOAD_MEM:
return ex_handler_ua_load_mem(ex, regs);
case EX_TYPE_UA_LOAD_REG:
return ex_handler_ua_load_reg(ex, regs);
return ex_handler_ua_load_reg(ex, false, regs);
case EX_TYPE_UA_LOAD_REGPAIR:
return ex_handler_ua_load_reg(ex, true, regs);
}
panic("invalid exception table entry");
}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment