Commit a6fa5ed0 authored by Andy Whitcroft's avatar Andy Whitcroft Committed by Kleber Sacilotto de Souza

Revert "x86/entry: Stuff RSB for entry to kernel for non-SMEP platform"

CVE-2017-5753 (revert embargoed)
CVE-2017-5715 (revert embargoed)

This reverts commit 47a07600.
Signed-off-by: default avatarAndy Whitcroft <apw@canonical.com>
Signed-off-by: default avatarKleber Sacilotto de Souza <kleber.souza@canonical.com>
parent 30fd5229
......@@ -175,7 +175,6 @@ GLOBAL(entry_SYSCALL_64_after_swapgs)
sub $(6*8), %rsp /* pt_regs->bp, bx, r12-15 not saved */
ENABLE_IBRS
STUFF_RSB
testl $_TIF_WORK_SYSCALL_ENTRY, ASM_THREAD_INFO(TI_flags, %rsp, SIZEOF_PTREGS)
jnz tracesys
......@@ -536,12 +535,6 @@ END(irq_entries_start)
ALLOC_PT_GPREGS_ON_STACK
SAVE_C_REGS
SAVE_EXTRA_REGS
/*
* Have to do stuffing before encoding frame pointer.
* Could add some unnecessary RSB clearing if coming
* from kernel for non-SMEP platform.
*/
STUFF_RSB
testb $3, CS(%rsp)
jz 1f
......@@ -1054,10 +1047,6 @@ ENTRY(paranoid_entry)
cld
SAVE_C_REGS 8
SAVE_EXTRA_REGS 8
/*
* Do the stuffing unconditionally from user/kernel to be safe
*/
STUFF_RSB
movl $1, %ebx
movl $MSR_GS_BASE, %ecx
rdmsr
......@@ -1141,7 +1130,6 @@ ENTRY(error_entry)
* the kernel CR3 here.
*/
SWITCH_KERNEL_CR3
STUFF_RSB
xorl %ebx, %ebx
testb $3, CS+8(%rsp)
jz .Lerror_kernelspace
......
......@@ -101,7 +101,6 @@ ENTRY(entry_SYSENTER_compat)
cld
ENABLE_IBRS
STUFF_RSB
/*
* Sysenter doesn't filter flags, so we need to clear NT
......@@ -202,7 +201,6 @@ ENTRY(entry_SYSCALL_compat)
pushq %r8 /* pt_regs->r15 = 0 */
ENABLE_IBRS
STUFF_RSB
/*
* User mode is traced as though IRQs are on, and SYSENTER
......@@ -314,7 +312,6 @@ ENTRY(entry_INT80_compat)
cld
ENABLE_IBRS
STUFF_RSB
/*
* User mode is traced as though IRQs are on, and the interrupt
......
......@@ -35,73 +35,6 @@
popq %rdx; \
popq %rcx; \
popq %rax
#define __ASM_STUFF_RSB \
call 1f; \
pause; \
1: call 2f; \
pause; \
2: call 3f; \
pause; \
3: call 4f; \
pause; \
4: call 5f; \
pause; \
5: call 6f; \
pause; \
6: call 7f; \
pause; \
7: call 8f; \
pause; \
8: call 9f; \
pause; \
9: call 10f; \
pause; \
10: call 11f; \
pause; \
11: call 12f; \
pause; \
12: call 13f; \
pause; \
13: call 14f; \
pause; \
14: call 15f; \
pause; \
15: call 16f; \
pause; \
16: call 17f; \
pause; \
17: call 18f; \
pause; \
18: call 19f; \
pause; \
19: call 20f; \
pause; \
20: call 21f; \
pause; \
21: call 22f; \
pause; \
22: call 23f; \
pause; \
23: call 24f; \
pause; \
24: call 25f; \
pause; \
25: call 26f; \
pause; \
26: call 27f; \
pause; \
27: call 28f; \
pause; \
28: call 29f; \
pause; \
29: call 30f; \
pause; \
30: call 31f; \
pause; \
31: call 32f; \
pause; \
32: \
add $(32*8), %rsp;
.macro ENABLE_IBRS
ALTERNATIVE "", __stringify(__ASM_ENABLE_IBRS), X86_FEATURE_SPEC_CTRL
......@@ -115,9 +48,5 @@ ALTERNATIVE "", __stringify(__ASM_ENABLE_IBRS_CLOBBER), X86_FEATURE_SPEC_CTRL
ALTERNATIVE "", __stringify(__ASM_DISABLE_IBRS), X86_FEATURE_SPEC_CTRL
.endm
.macro STUFF_RSB
ALTERNATIVE __stringify(__ASM_STUFF_RSB), "", X86_FEATURE_SMEP
.endm
#endif /* __ASSEMBLY__ */
#endif /* _ASM_X86_SPEC_CTRL_H */
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment