Commit f4e3de75 authored by Heiko Carstens's avatar Heiko Carstens

s390/fpu: provide and use lfpc, sfpc, and stfpc inline assemblies

Instead of open-coding lfpc, sfpc, and stfpc inline assemblies at
several locations, provide an fpu_* function for each instruction and
use the function instead.
Reviewed-by: default avatarClaudio Imbrenda <imbrenda@linux.ibm.com>
Signed-off-by: default avatarHeiko Carstens <hca@linux.ibm.com>
parent 88d8136a
......@@ -45,6 +45,15 @@ static __always_inline void fpu_ld(unsigned short fpr, freg_t *reg)
: "memory");
}
static __always_inline void fpu_lfpc(unsigned int *fpc)
{
instrument_read(fpc, sizeof(*fpc));
asm volatile("lfpc %[fpc]"
:
: [fpc] "Q" (*fpc)
: "memory");
}
/**
* fpu_lfpc_safe - Load floating point control register safely.
* @fpc: new value for floating point control register
......@@ -82,5 +91,22 @@ static __always_inline void fpu_std(unsigned short fpr, freg_t *reg)
: "memory");
}
static __always_inline void fpu_sfpc(unsigned int fpc)
{
asm volatile("sfpc %[fpc]"
:
: [fpc] "d" (fpc)
: "memory");
}
static __always_inline void fpu_stfpc(unsigned int *fpc)
{
instrument_write(fpc, sizeof(*fpc));
asm volatile("stfpc %[fpc]"
: [fpc] "=Q" (*fpc)
:
: "memory");
}
#endif /* __ASSEMBLY__ */
#endif /* __ASM_S390_FPU_INSN_H */
......@@ -17,10 +17,8 @@ void __kernel_fpu_begin(struct kernel_fpu *state, u32 flags)
* in use by the previous context.
*/
flags &= state->mask;
if (flags & KERNEL_FPC) {
/* Save floating point control */
asm volatile("stfpc %0" : "=Q" (state->fpc));
}
if (flags & KERNEL_FPC)
fpu_stfpc(&state->fpc);
if (!cpu_has_vx()) {
if (flags & KERNEL_VXR_LOW)
save_fp_regs(state->fprs);
......@@ -80,10 +78,8 @@ void __kernel_fpu_end(struct kernel_fpu *state, u32 flags)
* current context.
*/
flags &= state->mask;
if (flags & KERNEL_FPC) {
/* Restore floating-point controls */
asm volatile("lfpc %0" : : "Q" (state->fpc));
}
if (flags & KERNEL_FPC)
fpu_lfpc(&state->fpc);
if (!cpu_has_vx()) {
if (flags & KERNEL_VXR_LOW)
load_fp_regs(state->fprs);
......@@ -176,7 +172,7 @@ void save_fpu_regs(void)
state = &current->thread.fpu;
regs = current->thread.fpu.regs;
asm volatile("stfpc %0" : "=Q" (state->fpc));
fpu_stfpc(&state->fpc);
if (likely(cpu_has_vx())) {
asm volatile("lgr 1,%0\n"
"VSTM 0,15,0,1\n"
......
......@@ -191,7 +191,7 @@ int copy_thread(struct task_struct *p, const struct kernel_clone_args *args)
void execve_tail(void)
{
current->thread.fpu.fpc = 0;
asm volatile("sfpc %0" : : "d" (0));
fpu_sfpc(0);
}
struct task_struct *__switch_to(struct task_struct *prev, struct task_struct *next)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment