Commit ad32fd74 authored by Linus Torvalds's avatar Linus Torvalds

Merge tag 'xtensa-20191017' of git://github.com/jcmvbkbc/linux-xtensa

Pull Xtensa fixes from Max Filippov:

 - fix {get,put}_user() for 64bit values

 - fix warning about static EXPORT_SYMBOL from modpost

 - fix PCI IO ports mapping for the virt board

 - fix pasto in change_bit for exclusive access option

* tag 'xtensa-20191017' of git://github.com/jcmvbkbc/linux-xtensa:
  xtensa: fix change_bit in exclusive access option
  xtensa: virt: fix PCI IO ports mapping
  xtensa: drop EXPORT_SYMBOL for outs*/ins*
  xtensa: fix type conversion in __get_user_[no]check
  xtensa: clean up assembly arguments in uaccess macros
  xtensa: fix {get,put}_user() for 64bit values
parents 6e8ba009 775fd6bf
......@@ -56,7 +56,7 @@ pci {
reg = <0xf0100000 0x03f00000>;
// BUS_ADDRESS(3) CPU_PHYSICAL(1) SIZE(2)
ranges = <0x01000000 0x0 0xf0000000 0xf0000000 0x0 0x00010000>,
ranges = <0x01000000 0x0 0x00000000 0xf0000000 0x0 0x00010000>,
<0x02000000 0x0 0xf4000000 0xf4000000 0x0 0x08000000>;
// PCI_DEVICE(3) INT#(1) CONTROLLER(PHANDLE) CONTROLLER_DATA(2)
......
......@@ -148,7 +148,7 @@ static inline void change_bit(unsigned int bit, volatile unsigned long *p)
" getex %0\n"
" beqz %0, 1b\n"
: "=&a" (tmp)
: "a" (~mask), "a" (p)
: "a" (mask), "a" (p)
: "memory");
}
......
......@@ -100,7 +100,7 @@ do { \
case 4: __put_user_asm(x, ptr, retval, 4, "s32i", __cb); break; \
case 8: { \
__typeof__(*ptr) __v64 = x; \
retval = __copy_to_user(ptr, &__v64, 8); \
retval = __copy_to_user(ptr, &__v64, 8) ? -EFAULT : 0; \
break; \
} \
default: __put_user_bad(); \
......@@ -132,14 +132,14 @@ do { \
#define __check_align_1 ""
#define __check_align_2 \
" _bbci.l %3, 0, 1f \n" \
" movi %0, %4 \n" \
" _bbci.l %[addr], 0, 1f \n" \
" movi %[err], %[efault] \n" \
" _j 2f \n"
#define __check_align_4 \
" _bbsi.l %3, 0, 0f \n" \
" _bbci.l %3, 1, 1f \n" \
"0: movi %0, %4 \n" \
" _bbsi.l %[addr], 0, 0f \n" \
" _bbci.l %[addr], 1, 1f \n" \
"0: movi %[err], %[efault] \n" \
" _j 2f \n"
......@@ -151,40 +151,40 @@ do { \
* WARNING: If you modify this macro at all, verify that the
* __check_align_* macros still work.
*/
#define __put_user_asm(x, addr, err, align, insn, cb) \
#define __put_user_asm(x_, addr_, err_, align, insn, cb)\
__asm__ __volatile__( \
__check_align_##align \
"1: "insn" %2, %3, 0 \n" \
"1: "insn" %[x], %[addr], 0 \n" \
"2: \n" \
" .section .fixup,\"ax\" \n" \
" .align 4 \n" \
" .literal_position \n" \
"5: \n" \
" movi %1, 2b \n" \
" movi %0, %4 \n" \
" jx %1 \n" \
" movi %[tmp], 2b \n" \
" movi %[err], %[efault] \n" \
" jx %[tmp] \n" \
" .previous \n" \
" .section __ex_table,\"a\" \n" \
" .long 1b, 5b \n" \
" .previous" \
:"=r" (err), "=r" (cb) \
:"r" ((int)(x)), "r" (addr), "i" (-EFAULT), "0" (err))
:[err] "+r"(err_), [tmp] "=r"(cb) \
:[x] "r"(x_), [addr] "r"(addr_), [efault] "i"(-EFAULT))
#define __get_user_nocheck(x, ptr, size) \
({ \
long __gu_err, __gu_val; \
__get_user_size(__gu_val, (ptr), (size), __gu_err); \
(x) = (__force __typeof__(*(ptr)))__gu_val; \
long __gu_err; \
__get_user_size((x), (ptr), (size), __gu_err); \
__gu_err; \
})
#define __get_user_check(x, ptr, size) \
({ \
long __gu_err = -EFAULT, __gu_val = 0; \
long __gu_err = -EFAULT; \
const __typeof__(*(ptr)) *__gu_addr = (ptr); \
if (access_ok(__gu_addr, size)) \
__get_user_size(__gu_val, __gu_addr, (size), __gu_err); \
(x) = (__force __typeof__(*(ptr)))__gu_val; \
if (access_ok(__gu_addr, size)) \
__get_user_size((x), __gu_addr, (size), __gu_err); \
else \
(x) = 0; \
__gu_err; \
})
......@@ -198,8 +198,17 @@ do { \
case 1: __get_user_asm(x, ptr, retval, 1, "l8ui", __cb); break;\
case 2: __get_user_asm(x, ptr, retval, 2, "l16ui", __cb); break;\
case 4: __get_user_asm(x, ptr, retval, 4, "l32i", __cb); break;\
case 8: retval = __copy_from_user(&x, ptr, 8); break; \
default: (x) = __get_user_bad(); \
case 8: { \
u64 __x; \
if (unlikely(__copy_from_user(&__x, ptr, 8))) { \
retval = -EFAULT; \
(x) = 0; \
} else { \
(x) = *(__force __typeof__((ptr)))&__x; \
} \
break; \
} \
default: (x) = 0; __get_user_bad(); \
} \
} while (0)
......@@ -208,25 +217,28 @@ do { \
* WARNING: If you modify this macro at all, verify that the
* __check_align_* macros still work.
*/
#define __get_user_asm(x, addr, err, align, insn, cb) \
__asm__ __volatile__( \
__check_align_##align \
"1: "insn" %2, %3, 0 \n" \
"2: \n" \
" .section .fixup,\"ax\" \n" \
" .align 4 \n" \
" .literal_position \n" \
"5: \n" \
" movi %1, 2b \n" \
" movi %2, 0 \n" \
" movi %0, %4 \n" \
" jx %1 \n" \
" .previous \n" \
" .section __ex_table,\"a\" \n" \
" .long 1b, 5b \n" \
" .previous" \
:"=r" (err), "=r" (cb), "=r" (x) \
:"r" (addr), "i" (-EFAULT), "0" (err))
#define __get_user_asm(x_, addr_, err_, align, insn, cb) \
do { \
u32 __x = 0; \
__asm__ __volatile__( \
__check_align_##align \
"1: "insn" %[x], %[addr], 0 \n" \
"2: \n" \
" .section .fixup,\"ax\" \n" \
" .align 4 \n" \
" .literal_position \n" \
"5: \n" \
" movi %[tmp], 2b \n" \
" movi %[err], %[efault] \n" \
" jx %[tmp] \n" \
" .previous \n" \
" .section __ex_table,\"a\" \n" \
" .long 1b, 5b \n" \
" .previous" \
:[err] "+r"(err_), [tmp] "=r"(cb), [x] "+r"(__x) \
:[addr] "r"(addr_), [efault] "i"(-EFAULT)); \
(x_) = (__force __typeof__(*(addr_)))__x; \
} while (0)
/*
......
......@@ -119,13 +119,6 @@ EXPORT_SYMBOL(__invalidate_icache_range);
// FIXME EXPORT_SYMBOL(screen_info);
#endif
EXPORT_SYMBOL(outsb);
EXPORT_SYMBOL(outsw);
EXPORT_SYMBOL(outsl);
EXPORT_SYMBOL(insb);
EXPORT_SYMBOL(insw);
EXPORT_SYMBOL(insl);
extern long common_exception_return;
EXPORT_SYMBOL(common_exception_return);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment