Commit 4c59e294 authored by Paul Mundt's avatar Paul Mundt

sh: Move lookup_exception_vector() out to asm/system_32.h.

There are other places where we want to have access to the trap/exception
number, so move out the lookup_exception_vector() helper. While we're at
it, refactor it slightly to return the vector instead.
Signed-off-by: default avatarPaul Mundt <lethal@linux-sh.org>
parent 347cd34f
...@@ -97,6 +97,31 @@ do { \ ...@@ -97,6 +97,31 @@ do { \
: "=&r" (__dummy)); \ : "=&r" (__dummy)); \
} while (0) } while (0)
#ifdef CONFIG_CPU_HAS_SR_RB
#define lookup_exception_vector() \
({ \
unsigned long _vec; \
\
__asm__ __volatile__ ( \
"stc r2_bank, %0\n\t" \
: "=r" (_vec) \
); \
\
_vec; \
})
#else
#define lookup_exception_vector() \
({ \
unsigned long _vec; \
__asm__ __volatile__ ( \
"mov r4, %0\n\t" \
: "=r" (_vec) \
); \
\
_vec; \
})
#endif
int handle_unaligned_access(opcode_t instruction, struct pt_regs *regs, int handle_unaligned_access(opcode_t instruction, struct pt_regs *regs,
struct mem_access *ma); struct mem_access *ma);
......
...@@ -514,14 +514,6 @@ int handle_unaligned_access(opcode_t instruction, struct pt_regs *regs, ...@@ -514,14 +514,6 @@ int handle_unaligned_access(opcode_t instruction, struct pt_regs *regs,
return ret; return ret;
} }
#ifdef CONFIG_CPU_HAS_SR_RB
#define lookup_exception_vector(x) \
__asm__ __volatile__ ("stc r2_bank, %0\n\t" : "=r" ((x)))
#else
#define lookup_exception_vector(x) \
__asm__ __volatile__ ("mov r4, %0\n\t" : "=r" ((x)))
#endif
/* /*
* Handle various address error exceptions: * Handle various address error exceptions:
* - instruction address error: * - instruction address error:
...@@ -545,7 +537,7 @@ asmlinkage void do_address_error(struct pt_regs *regs, ...@@ -545,7 +537,7 @@ asmlinkage void do_address_error(struct pt_regs *regs,
/* Intentional ifdef */ /* Intentional ifdef */
#ifdef CONFIG_CPU_HAS_SR_RB #ifdef CONFIG_CPU_HAS_SR_RB
lookup_exception_vector(error_code); error_code = lookup_exception_vector();
#endif #endif
oldfs = get_fs(); oldfs = get_fs();
...@@ -686,7 +678,7 @@ asmlinkage void do_reserved_inst(unsigned long r4, unsigned long r5, ...@@ -686,7 +678,7 @@ asmlinkage void do_reserved_inst(unsigned long r4, unsigned long r5,
} }
#endif #endif
lookup_exception_vector(error_code); error_code = lookup_exception_vector();
local_irq_enable(); local_irq_enable();
CHK_REMOTE_DEBUG(regs); CHK_REMOTE_DEBUG(regs);
...@@ -759,7 +751,7 @@ asmlinkage void do_illegal_slot_inst(unsigned long r4, unsigned long r5, ...@@ -759,7 +751,7 @@ asmlinkage void do_illegal_slot_inst(unsigned long r4, unsigned long r5,
/* not a FPU inst. */ /* not a FPU inst. */
#endif #endif
lookup_exception_vector(inst); inst = lookup_exception_vector();
local_irq_enable(); local_irq_enable();
CHK_REMOTE_DEBUG(regs); CHK_REMOTE_DEBUG(regs);
...@@ -774,7 +766,7 @@ asmlinkage void do_exception_error(unsigned long r4, unsigned long r5, ...@@ -774,7 +766,7 @@ asmlinkage void do_exception_error(unsigned long r4, unsigned long r5,
struct pt_regs *regs = RELOC_HIDE(&__regs, 0); struct pt_regs *regs = RELOC_HIDE(&__regs, 0);
long ex; long ex;
lookup_exception_vector(ex); ex = lookup_exception_vector();
die_if_kernel("exception", regs, ex); die_if_kernel("exception", regs, ex);
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment