Commit 613036d9 authored by Cyril Bur's avatar Cyril Bur Committed by Greg Kroah-Hartman

powerpc: Always restore FPU/VEC/VSX if hardware transactional memory in use

commit dc16b553 upstream.

Comment from arch/powerpc/kernel/process.c:967:
 If userspace is inside a transaction (whether active or
 suspended) and FP/VMX/VSX instructions have ever been enabled
 inside that transaction, then we have to keep them enabled
 and keep the FP/VMX/VSX state loaded while ever the transaction
 continues.  The reason is that if we didn't, and subsequently
 got a FP/VMX/VSX unavailable interrupt inside a transaction,
 we don't know whether it's the same transaction, and thus we
 don't know which of the checkpointed state and the ransactional
 state to use.

restore_math() restore_fp() and restore_altivec() currently may not
restore the registers. It doesn't appear that this is more serious
than a performance penalty. If the math registers aren't restored the
userspace thread will still be run with the facility disabled.
Userspace will not be able to read invalid values. On the first access
it will take an facility unavailable exception and the kernel will
detected an active transaction, at which point it will abort the
transaction. There is the possibility for a pathological case
preventing any progress by transactions, however, transactions
are never guaranteed to make progress.

Fixes: 70fe3d98 ("powerpc: Restore FPU/VEC/VSX if previously used")
Signed-off-by: default avatarCyril Bur <cyrilbur@gmail.com>
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
Signed-off-by: default avatarGreg Kroah-Hartman <gregkh@linuxfoundation.org>
parent d7baac24
...@@ -88,7 +88,13 @@ static void check_if_tm_restore_required(struct task_struct *tsk) ...@@ -88,7 +88,13 @@ static void check_if_tm_restore_required(struct task_struct *tsk)
set_thread_flag(TIF_RESTORE_TM); set_thread_flag(TIF_RESTORE_TM);
} }
} }
static inline bool msr_tm_active(unsigned long msr)
{
return MSR_TM_ACTIVE(msr);
}
#else #else
static inline bool msr_tm_active(unsigned long msr) { return false; }
static inline void check_if_tm_restore_required(struct task_struct *tsk) { } static inline void check_if_tm_restore_required(struct task_struct *tsk) { }
#endif /* CONFIG_PPC_TRANSACTIONAL_MEM */ #endif /* CONFIG_PPC_TRANSACTIONAL_MEM */
...@@ -208,7 +214,7 @@ void enable_kernel_fp(void) ...@@ -208,7 +214,7 @@ void enable_kernel_fp(void)
EXPORT_SYMBOL(enable_kernel_fp); EXPORT_SYMBOL(enable_kernel_fp);
static int restore_fp(struct task_struct *tsk) { static int restore_fp(struct task_struct *tsk) {
if (tsk->thread.load_fp) { if (tsk->thread.load_fp || msr_tm_active(tsk->thread.regs->msr)) {
load_fp_state(&current->thread.fp_state); load_fp_state(&current->thread.fp_state);
current->thread.load_fp++; current->thread.load_fp++;
return 1; return 1;
...@@ -278,7 +284,8 @@ EXPORT_SYMBOL_GPL(flush_altivec_to_thread); ...@@ -278,7 +284,8 @@ EXPORT_SYMBOL_GPL(flush_altivec_to_thread);
static int restore_altivec(struct task_struct *tsk) static int restore_altivec(struct task_struct *tsk)
{ {
if (cpu_has_feature(CPU_FTR_ALTIVEC) && tsk->thread.load_vec) { if (cpu_has_feature(CPU_FTR_ALTIVEC) &&
(tsk->thread.load_vec || msr_tm_active(tsk->thread.regs->msr))) {
load_vr_state(&tsk->thread.vr_state); load_vr_state(&tsk->thread.vr_state);
tsk->thread.used_vr = 1; tsk->thread.used_vr = 1;
tsk->thread.load_vec++; tsk->thread.load_vec++;
...@@ -464,7 +471,8 @@ void restore_math(struct pt_regs *regs) ...@@ -464,7 +471,8 @@ void restore_math(struct pt_regs *regs)
{ {
unsigned long msr; unsigned long msr;
if (!current->thread.load_fp && !loadvec(current->thread)) if (!msr_tm_active(regs->msr) &&
!current->thread.load_fp && !loadvec(current->thread))
return; return;
msr = regs->msr; msr = regs->msr;
...@@ -983,6 +991,13 @@ void restore_tm_state(struct pt_regs *regs) ...@@ -983,6 +991,13 @@ void restore_tm_state(struct pt_regs *regs)
msr_diff = current->thread.ckpt_regs.msr & ~regs->msr; msr_diff = current->thread.ckpt_regs.msr & ~regs->msr;
msr_diff &= MSR_FP | MSR_VEC | MSR_VSX; msr_diff &= MSR_FP | MSR_VEC | MSR_VSX;
/* Ensure that restore_math() will restore */
if (msr_diff & MSR_FP)
current->thread.load_fp = 1;
#ifdef CONFIG_ALIVEC
if (cpu_has_feature(CPU_FTR_ALTIVEC) && msr_diff & MSR_VEC)
current->thread.load_vec = 1;
#endif
restore_math(regs); restore_math(regs);
regs->msr |= msr_diff; regs->msr |= msr_diff;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment