Commit d7d86aa8 authored by Ralf Baechle's avatar Ralf Baechle

[MIPS] Cleanup hazard handling.

Mostly based on patch by Chris Dearman and cleanups from Yoichi.
Signed-off-by: default avatarRalf Baechle <ralf@linux-mips.org>
parent da79e827
...@@ -11,103 +11,96 @@ ...@@ -11,103 +11,96 @@
#define _ASM_HAZARDS_H #define _ASM_HAZARDS_H
#ifdef __ASSEMBLY__ #ifdef __ASSEMBLER__
#define ASMMACRO(name, code...) .macro name; code; .endm
.macro _ssnop
sll $0, $0, 1
.endm
.macro _ehb
sll $0, $0, 3
.endm
/*
* RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent
* use of the JTLB for instructions should not occur for 4 cpu cycles and use
* for data translations should not occur for 3 cpu cycles.
*/
#ifdef CONFIG_CPU_RM9000
.macro mtc0_tlbw_hazard
.set push
.set mips32
_ssnop; _ssnop; _ssnop; _ssnop
.set pop
.endm
.macro tlbw_eret_hazard
.set push
.set mips32
_ssnop; _ssnop; _ssnop; _ssnop
.set pop
.endm
#else #else
/* #define ASMMACRO(name, code...) \
* The taken branch will result in a two cycle penalty for the two killed __asm__(".macro " #name "; " #code "; .endm"); \
* instructions on R4000 / R4400. Other processors only have a single cycle \
* hazard so this is nice trick to have an optimal code for a range of static inline void name(void) \
* processors. { \
*/ __asm__ __volatile__ (#name); \
.macro mtc0_tlbw_hazard }
b . + 8
.endm
.macro tlbw_eret_hazard
.endm
#endif #endif
ASMMACRO(_ssnop,
sll $0, $0, 1
)
ASMMACRO(_ehb,
sll $0, $0, 3
)
/* /*
* mtc0->mfc0 hazard * TLB hazards
* The 24K has a 2 cycle mtc0/mfc0 execution hazard.
* It is a MIPS32R2 processor so ehb will clear the hazard.
*/ */
#if defined(CONFIG_CPU_MIPSR2)
#ifdef CONFIG_CPU_MIPSR2
/* /*
* Use a macro for ehb unless explicit support for MIPSR2 is enabled * MIPSR2 defines ehb for hazard avoidance
*/ */
#define irq_enable_hazard \ ASMMACRO(mtc0_tlbw_hazard,
_ehb
)
ASMMACRO(tlbw_use_hazard,
_ehb
)
ASMMACRO(tlb_probe_hazard,
_ehb
)
ASMMACRO(irq_enable_hazard,
)
ASMMACRO(irq_disable_hazard,
_ehb _ehb
)
#define irq_disable_hazard \ ASMMACRO(back_to_back_c0_hazard,
_ehb _ehb
)
#elif defined(CONFIG_CPU_R10000) || defined(CONFIG_CPU_RM9000)
/* /*
* R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer. * gcc has a tradition of misscompiling the previous construct using the
* address of a label as argument to inline assembler. Gas otoh has the
* annoying difference between la and dla which are only usable for 32-bit
* rsp. 64-bit code, so can't be used without conditional compilation.
* The alterantive is switching the assembler to 64-bit code which happens
* to work right even for 32-bit code ...
*/ */
#define instruction_hazard() \
do { \
unsigned long tmp; \
\
__asm__ __volatile__( \
" .set mips64r2 \n" \
" dla %0, 1f \n" \
" jr.hb %0 \n" \
" .set mips0 \n" \
"1: \n" \
: "=r" (tmp)); \
} while (0)
#define irq_enable_hazard #elif defined(CONFIG_CPU_R10000)
#define irq_disable_hazard
#else
/* /*
* Classic MIPS needs 1 - 3 nops or ssnops * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
*/ */
#define irq_enable_hazard
#define irq_disable_hazard \
_ssnop; _ssnop; _ssnop
#endif ASMMACRO(mtc0_tlbw_hazard,
)
#else /* __ASSEMBLY__ */ ASMMACRO(tlbw_use_hazard,
)
__asm__( ASMMACRO(tlb_probe_hazard,
" .macro _ssnop \n" )
" sll $0, $0, 1 \n" ASMMACRO(irq_enable_hazard,
" .endm \n" )
" \n" ASMMACRO(irq_disable_hazard,
" .macro _ehb \n" )
" sll $0, $0, 3 \n" ASMMACRO(back_to_back_c0_hazard,
" .endm \n"); )
#define instruction_hazard() do { } while (0)
#ifdef CONFIG_CPU_RM9000 #elif defined(CONFIG_CPU_RM9000)
/* /*
* RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent
...@@ -115,176 +108,73 @@ __asm__( ...@@ -115,176 +108,73 @@ __asm__(
* for data translations should not occur for 3 cpu cycles. * for data translations should not occur for 3 cpu cycles.
*/ */
#define mtc0_tlbw_hazard() \ ASMMACRO(mtc0_tlbw_hazard,
__asm__ __volatile__( \ _ssnop; _ssnop; _ssnop; _ssnop
" .set mips32 \n" \ )
" _ssnop \n" \ ASMMACRO(tlbw_use_hazard,
" _ssnop \n" \ _ssnop; _ssnop; _ssnop; _ssnop
" _ssnop \n" \ )
" _ssnop \n" \ ASMMACRO(tlb_probe_hazard,
" .set mips0 \n") _ssnop; _ssnop; _ssnop; _ssnop
)
#define tlbw_use_hazard() \ ASMMACRO(irq_enable_hazard,
__asm__ __volatile__( \ )
" .set mips32 \n" \ ASMMACRO(irq_disable_hazard,
" _ssnop \n" \ )
" _ssnop \n" \ ASMMACRO(back_to_back_c0_hazard,
" _ssnop \n" \ )
" _ssnop \n" \ #define instruction_hazard() do { } while (0)
" .set mips0 \n")
#else
/*
* Overkill warning ...
*/
#define mtc0_tlbw_hazard() \
__asm__ __volatile__( \
" .set noreorder \n" \
" nop \n" \
" nop \n" \
" nop \n" \
" nop \n" \
" nop \n" \
" nop \n" \
" .set reorder \n")
#define tlbw_use_hazard() \
__asm__ __volatile__( \
" .set noreorder \n" \
" nop \n" \
" nop \n" \
" nop \n" \
" nop \n" \
" nop \n" \
" nop \n" \
" .set reorder \n")
#endif
/*
* Interrupt enable/disable hazards
* Some processors have hazards when modifying
* the status register to change the interrupt state
*/
#ifdef CONFIG_CPU_MIPSR2
__asm__(" .macro irq_enable_hazard \n"
" _ehb \n"
" .endm \n"
" \n"
" .macro irq_disable_hazard \n"
" _ehb \n"
" .endm \n");
#elif defined(CONFIG_CPU_R10000) || defined(CONFIG_CPU_RM9000) #elif defined(CONFIG_CPU_SB1)
/* /*
* R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer. * Mostly like R4000 for historic reasons
*/ */
ASMMACRO(mtc0_tlbw_hazard,
__asm__( )
" .macro irq_enable_hazard \n" ASMMACRO(tlbw_use_hazard,
" .endm \n" )
" \n" ASMMACRO(tlb_probe_hazard,
" .macro irq_disable_hazard \n" )
" .endm \n"); ASMMACRO(irq_enable_hazard,
)
ASMMACRO(irq_disable_hazard,
_ssnop; _ssnop; _ssnop
)
ASMMACRO(back_to_back_c0_hazard,
)
#define instruction_hazard() do { } while (0)
#else #else
/* /*
* Default for classic MIPS processors. Assume worst case hazards but don't * Finally the catchall case for all other processors including R4000, R4400,
* care about the irq_enable_hazard - sooner or later the hardware will * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
* enable it and we don't care when exactly.
*/
__asm__(
" # \n"
" # There is a hazard but we do not care \n"
" # \n"
" .macro\tirq_enable_hazard \n"
" .endm \n"
" \n"
" .macro\tirq_disable_hazard \n"
" _ssnop \n"
" _ssnop \n"
" _ssnop \n"
" .endm \n");
#endif
#define irq_enable_hazard() \
__asm__ __volatile__("irq_enable_hazard")
#define irq_disable_hazard() \
__asm__ __volatile__("irq_disable_hazard")
/*
* Back-to-back hazards -
* *
* What is needed to separate a move to cp0 from a subsequent read from the * The taken branch will result in a two cycle penalty for the two killed
* same cp0 register? * instructions on R4000 / R4400. Other processors only have a single cycle
*/ * hazard so this is nice trick to have an optimal code for a range of
#ifdef CONFIG_CPU_MIPSR2 * processors.
__asm__(" .macro back_to_back_c0_hazard \n"
" _ehb \n"
" .endm \n");
#elif defined(CONFIG_CPU_R10000) || defined(CONFIG_CPU_RM9000) || \
defined(CONFIG_CPU_SB1)
__asm__(" .macro back_to_back_c0_hazard \n"
" .endm \n");
#else
__asm__(" .macro back_to_back_c0_hazard \n"
" .set noreorder \n"
" _ssnop \n"
" _ssnop \n"
" _ssnop \n"
" .set reorder \n"
" .endm");
#endif
#define back_to_back_c0_hazard() \
__asm__ __volatile__("back_to_back_c0_hazard")
/*
* Instruction execution hazard
*/
#ifdef CONFIG_CPU_MIPSR2
/*
* gcc has a tradition of misscompiling the previous construct using the
* address of a label as argument to inline assembler. Gas otoh has the
* annoying difference between la and dla which are only usable for 32-bit
* rsp. 64-bit code, so can't be used without conditional compilation.
* The alterantive is switching the assembler to 64-bit code which happens
* to work right even for 32-bit code ...
*/ */
#define instruction_hazard() \ ASMMACRO(mtc0_tlbw_hazard,
do { \ nop
unsigned long tmp; \ )
\ ASMMACRO(tlbw_use_hazard,
__asm__ __volatile__( \ nop; nop; nop
" .set mips64r2 \n" \ )
" dla %0, 1f \n" \ ASMMACRO(tlb_probe_hazard,
" jr.hb %0 \n" \ nop; nop; nop
" .set mips0 \n" \ )
"1: \n" \ ASMMACRO(irq_enable_hazard,
: "=r" (tmp)); \ )
} while (0) ASMMACRO(irq_disable_hazard,
nop; nop; nop
#else )
ASMMACRO(back_to_back_c0_hazard,
_ssnop; _ssnop; _ssnop;
)
#define instruction_hazard() do { } while (0) #define instruction_hazard() do { } while (0)
#endif
extern void mips_ihb(void);
#endif /* __ASSEMBLY__ */ #endif
#endif /* _ASM_HAZARDS_H */ #endif /* _ASM_HAZARDS_H */
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment