Commit 256ec489 authored by Thomas Bogendoerfer's avatar Thomas Bogendoerfer

MIPS: Convert R10000_LLSC_WAR info a config option

Use a new config option to enabel R1000_LLSC workaound and remove
define from different war.h files.
Signed-off-by: default avatarThomas Bogendoerfer <tsbogend@alpha.franken.de>
parent 886ee136
...@@ -669,6 +669,7 @@ config SGI_IP27 ...@@ -669,6 +669,7 @@ config SGI_IP27
select SYS_SUPPORTS_BIG_ENDIAN select SYS_SUPPORTS_BIG_ENDIAN
select SYS_SUPPORTS_NUMA select SYS_SUPPORTS_NUMA
select SYS_SUPPORTS_SMP select SYS_SUPPORTS_SMP
select WAR_R10000_LLSC
select MIPS_L1_CACHE_SHIFT_7 select MIPS_L1_CACHE_SHIFT_7
select NUMA select NUMA
help help
...@@ -704,6 +705,7 @@ config SGI_IP28 ...@@ -704,6 +705,7 @@ config SGI_IP28
select SYS_HAS_EARLY_PRINTK select SYS_HAS_EARLY_PRINTK
select SYS_SUPPORTS_64BIT_KERNEL select SYS_SUPPORTS_64BIT_KERNEL
select SYS_SUPPORTS_BIG_ENDIAN select SYS_SUPPORTS_BIG_ENDIAN
select WAR_R10000_LLSC
select MIPS_L1_CACHE_SHIFT_7 select MIPS_L1_CACHE_SHIFT_7
help help
This is the SGI Indigo2 with R10000 processor. To compile a Linux This is the SGI Indigo2 with R10000 processor. To compile a Linux
...@@ -730,6 +732,7 @@ config SGI_IP30 ...@@ -730,6 +732,7 @@ config SGI_IP30
select SYS_SUPPORTS_64BIT_KERNEL select SYS_SUPPORTS_64BIT_KERNEL
select SYS_SUPPORTS_BIG_ENDIAN select SYS_SUPPORTS_BIG_ENDIAN
select SYS_SUPPORTS_SMP select SYS_SUPPORTS_SMP
select WAR_R10000_LLSC
select MIPS_L1_CACHE_SHIFT_7 select MIPS_L1_CACHE_SHIFT_7
select ARC_MEMORY select ARC_MEMORY
help help
...@@ -2675,6 +2678,11 @@ config WAR_TX49XX_ICACHE_INDEX_INV ...@@ -2675,6 +2678,11 @@ config WAR_TX49XX_ICACHE_INDEX_INV
config WAR_ICACHE_REFILLS config WAR_ICACHE_REFILLS
bool bool
# On the R10000 up to version 2.6 (not sure about 2.7) there is a bug that
# may cause ll / sc and lld / scd sequences to execute non-atomically.
config WAR_R10000_LLSC
bool
# #
# - Highmem only makes sense for the 32-bit kernel. # - Highmem only makes sense for the 32-bit kernel.
# - The current highmem code will only work properly on physically indexed # - The current highmem code will only work properly on physically indexed
......
...@@ -21,7 +21,7 @@ ...@@ -21,7 +21,7 @@
#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \ #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
{ \ { \
if (cpu_has_llsc && R10000_LLSC_WAR) { \ if (cpu_has_llsc && IS_ENABLED(CONFIG_WAR_R10000_LLSC)) { \
__asm__ __volatile__( \ __asm__ __volatile__( \
" .set push \n" \ " .set push \n" \
" .set noat \n" \ " .set noat \n" \
...@@ -133,7 +133,7 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, ...@@ -133,7 +133,7 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
if (!access_ok(uaddr, sizeof(u32))) if (!access_ok(uaddr, sizeof(u32)))
return -EFAULT; return -EFAULT;
if (cpu_has_llsc && R10000_LLSC_WAR) { if (cpu_has_llsc && IS_ENABLED(CONFIG_WAR_R10000_LLSC)) {
__asm__ __volatile__( __asm__ __volatile__(
"# futex_atomic_cmpxchg_inatomic \n" "# futex_atomic_cmpxchg_inatomic \n"
" .set push \n" " .set push \n"
......
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
* works around a bug present in R10000 CPUs prior to revision 3.0 that could * works around a bug present in R10000 CPUs prior to revision 3.0 that could
* cause ll-sc sequences to execute non-atomically. * cause ll-sc sequences to execute non-atomically.
*/ */
#if R10000_LLSC_WAR #ifdef CONFIG_WAR_R10000_LLSC
# define __SC_BEQZ "beqzl " # define __SC_BEQZ "beqzl "
#elif MIPS_ISA_REV >= 6 #elif MIPS_ISA_REV >= 6
# define __SC_BEQZ "beqzc " # define __SC_BEQZ "beqzc "
......
...@@ -31,7 +31,7 @@ static __inline__ long local_add_return(long i, local_t * l) ...@@ -31,7 +31,7 @@ static __inline__ long local_add_return(long i, local_t * l)
{ {
unsigned long result; unsigned long result;
if (kernel_uses_llsc && R10000_LLSC_WAR) { if (kernel_uses_llsc && IS_ENABLED(CONFIG_WAR_R10000_LLSC)) {
unsigned long temp; unsigned long temp;
__asm__ __volatile__( __asm__ __volatile__(
...@@ -80,7 +80,7 @@ static __inline__ long local_sub_return(long i, local_t * l) ...@@ -80,7 +80,7 @@ static __inline__ long local_sub_return(long i, local_t * l)
{ {
unsigned long result; unsigned long result;
if (kernel_uses_llsc && R10000_LLSC_WAR) { if (kernel_uses_llsc && IS_ENABLED(CONFIG_WAR_R10000_LLSC)) {
unsigned long temp; unsigned long temp;
__asm__ __volatile__( __asm__ __volatile__(
......
...@@ -11,7 +11,6 @@ ...@@ -11,7 +11,6 @@
#define BCM1250_M3_WAR 0 #define BCM1250_M3_WAR 0
#define SIBYTE_1956_WAR 0 #define SIBYTE_1956_WAR 0
#define R10000_LLSC_WAR 0
#define MIPS34K_MISSED_ITLB_WAR 0 #define MIPS34K_MISSED_ITLB_WAR 0
#define CAVIUM_OCTEON_DCACHE_PREFETCH_WAR \ #define CAVIUM_OCTEON_DCACHE_PREFETCH_WAR \
......
...@@ -10,7 +10,6 @@ ...@@ -10,7 +10,6 @@
#define BCM1250_M3_WAR 0 #define BCM1250_M3_WAR 0
#define SIBYTE_1956_WAR 0 #define SIBYTE_1956_WAR 0
#define R10000_LLSC_WAR 0
#define MIPS34K_MISSED_ITLB_WAR 0 #define MIPS34K_MISSED_ITLB_WAR 0
#endif /* __ASM_MACH_GENERIC_WAR_H */ #endif /* __ASM_MACH_GENERIC_WAR_H */
...@@ -10,7 +10,6 @@ ...@@ -10,7 +10,6 @@
#define BCM1250_M3_WAR 0 #define BCM1250_M3_WAR 0
#define SIBYTE_1956_WAR 0 #define SIBYTE_1956_WAR 0
#define R10000_LLSC_WAR 0
#define MIPS34K_MISSED_ITLB_WAR 0 #define MIPS34K_MISSED_ITLB_WAR 0
#endif /* __ASM_MIPS_MACH_IP22_WAR_H */ #endif /* __ASM_MIPS_MACH_IP22_WAR_H */
...@@ -10,7 +10,6 @@ ...@@ -10,7 +10,6 @@
#define BCM1250_M3_WAR 0 #define BCM1250_M3_WAR 0
#define SIBYTE_1956_WAR 0 #define SIBYTE_1956_WAR 0
#define R10000_LLSC_WAR 1
#define MIPS34K_MISSED_ITLB_WAR 0 #define MIPS34K_MISSED_ITLB_WAR 0
#endif /* __ASM_MIPS_MACH_IP27_WAR_H */ #endif /* __ASM_MIPS_MACH_IP27_WAR_H */
...@@ -10,7 +10,6 @@ ...@@ -10,7 +10,6 @@
#define BCM1250_M3_WAR 0 #define BCM1250_M3_WAR 0
#define SIBYTE_1956_WAR 0 #define SIBYTE_1956_WAR 0
#define R10000_LLSC_WAR 1
#define MIPS34K_MISSED_ITLB_WAR 0 #define MIPS34K_MISSED_ITLB_WAR 0
#endif /* __ASM_MIPS_MACH_IP28_WAR_H */ #endif /* __ASM_MIPS_MACH_IP28_WAR_H */
...@@ -7,11 +7,6 @@ ...@@ -7,11 +7,6 @@
#define BCM1250_M3_WAR 0 #define BCM1250_M3_WAR 0
#define SIBYTE_1956_WAR 0 #define SIBYTE_1956_WAR 0
#ifdef CONFIG_CPU_R10000
#define R10000_LLSC_WAR 1
#else
#define R10000_LLSC_WAR 0
#endif
#define MIPS34K_MISSED_ITLB_WAR 0 #define MIPS34K_MISSED_ITLB_WAR 0
#endif /* __ASM_MIPS_MACH_IP30_WAR_H */ #endif /* __ASM_MIPS_MACH_IP30_WAR_H */
...@@ -10,7 +10,6 @@ ...@@ -10,7 +10,6 @@
#define BCM1250_M3_WAR 0 #define BCM1250_M3_WAR 0
#define SIBYTE_1956_WAR 0 #define SIBYTE_1956_WAR 0
#define R10000_LLSC_WAR 0
#define MIPS34K_MISSED_ITLB_WAR 0 #define MIPS34K_MISSED_ITLB_WAR 0
#endif /* __ASM_MIPS_MACH_IP32_WAR_H */ #endif /* __ASM_MIPS_MACH_IP32_WAR_H */
...@@ -10,7 +10,6 @@ ...@@ -10,7 +10,6 @@
#define BCM1250_M3_WAR 0 #define BCM1250_M3_WAR 0
#define SIBYTE_1956_WAR 0 #define SIBYTE_1956_WAR 0
#define R10000_LLSC_WAR 0
#define MIPS34K_MISSED_ITLB_WAR 0 #define MIPS34K_MISSED_ITLB_WAR 0
#endif /* __ASM_MIPS_MACH_MIPS_WAR_H */ #endif /* __ASM_MIPS_MACH_MIPS_WAR_H */
...@@ -10,7 +10,6 @@ ...@@ -10,7 +10,6 @@
#define BCM1250_M3_WAR 0 #define BCM1250_M3_WAR 0
#define SIBYTE_1956_WAR 0 #define SIBYTE_1956_WAR 0
#define R10000_LLSC_WAR 0
#define MIPS34K_MISSED_ITLB_WAR 0 #define MIPS34K_MISSED_ITLB_WAR 0
#endif /* __ASM_MIPS_MACH_MIPS_WAR_H */ #endif /* __ASM_MIPS_MACH_MIPS_WAR_H */
...@@ -10,7 +10,6 @@ ...@@ -10,7 +10,6 @@
#define BCM1250_M3_WAR 0 #define BCM1250_M3_WAR 0
#define SIBYTE_1956_WAR 0 #define SIBYTE_1956_WAR 0
#define R10000_LLSC_WAR 0
#define MIPS34K_MISSED_ITLB_WAR 0 #define MIPS34K_MISSED_ITLB_WAR 0
#endif /* __ASM_MIPS_MACH_RM_WAR_H */ #endif /* __ASM_MIPS_MACH_RM_WAR_H */
...@@ -24,7 +24,6 @@ extern int sb1250_m3_workaround_needed(void); ...@@ -24,7 +24,6 @@ extern int sb1250_m3_workaround_needed(void);
#endif #endif
#define R10000_LLSC_WAR 0
#define MIPS34K_MISSED_ITLB_WAR 0 #define MIPS34K_MISSED_ITLB_WAR 0
#endif /* __ASM_MIPS_MACH_SIBYTE_WAR_H */ #endif /* __ASM_MIPS_MACH_SIBYTE_WAR_H */
...@@ -10,7 +10,6 @@ ...@@ -10,7 +10,6 @@
#define BCM1250_M3_WAR 0 #define BCM1250_M3_WAR 0
#define SIBYTE_1956_WAR 0 #define SIBYTE_1956_WAR 0
#define R10000_LLSC_WAR 0
#define MIPS34K_MISSED_ITLB_WAR 0 #define MIPS34K_MISSED_ITLB_WAR 0
#endif /* __ASM_MIPS_MACH_TX49XX_WAR_H */ #endif /* __ASM_MIPS_MACH_TX49XX_WAR_H */
...@@ -93,14 +93,6 @@ ...@@ -93,14 +93,6 @@
#error Check setting of SIBYTE_1956_WAR for your platform #error Check setting of SIBYTE_1956_WAR for your platform
#endif #endif
/*
* On the R10000 up to version 2.6 (not sure about 2.7) there is a bug that
* may cause ll / sc and lld / scd sequences to execute non-atomically.
*/
#ifndef R10000_LLSC_WAR
#error Check setting of R10000_LLSC_WAR for your platform
#endif
/* /*
* 34K core erratum: "Problems Executing the TLBR Instruction" * 34K core erratum: "Problems Executing the TLBR Instruction"
*/ */
......
...@@ -106,7 +106,7 @@ static inline int mips_atomic_set(unsigned long addr, unsigned long new) ...@@ -106,7 +106,7 @@ static inline int mips_atomic_set(unsigned long addr, unsigned long new)
if (unlikely(!access_ok((const void __user *)addr, 4))) if (unlikely(!access_ok((const void __user *)addr, 4)))
return -EINVAL; return -EINVAL;
if (cpu_has_llsc && R10000_LLSC_WAR) { if (cpu_has_llsc && IS_ENABLED(CONFIG_WAR_R10000_LLSC)) {
__asm__ __volatile__ ( __asm__ __volatile__ (
" .set push \n" " .set push \n"
" .set arch=r4000 \n" " .set arch=r4000 \n"
......
...@@ -90,7 +90,7 @@ static inline int __maybe_unused bcm1250_m3_war(void) ...@@ -90,7 +90,7 @@ static inline int __maybe_unused bcm1250_m3_war(void)
static inline int __maybe_unused r10000_llsc_war(void) static inline int __maybe_unused r10000_llsc_war(void)
{ {
return R10000_LLSC_WAR; return IS_ENABLED(CONFIG_WAR_R10000_LLSC);
} }
static int use_bbit_insns(void) static int use_bbit_insns(void)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment