Commit 1d293758 authored by Thomas Gleixner's avatar Thomas Gleixner Committed by Peter Zijlstra

x86/paravirt: Properly align PV functions

Ensure inline asm functions are consistently aligned with compiler
generated and SYM_FUNC_START*() functions.
Signed-off-by: default avatarThomas Gleixner <tglx@linutronix.de>
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: default avatarJuergen Gross <jgross@suse.com>
Link: https://lore.kernel.org/r/20220915111144.038540008@infradead.org
parent 1934dc9a
......@@ -665,6 +665,7 @@ bool __raw_callee_save___native_vcpu_is_preempted(long cpu);
asm(".pushsection " section ", \"ax\";" \
".globl " PV_THUNK_NAME(func) ";" \
".type " PV_THUNK_NAME(func) ", @function;" \
ASM_FUNC_ALIGN \
PV_THUNK_NAME(func) ":" \
ASM_ENDBR \
FRAME_BEGIN \
......
......@@ -40,7 +40,7 @@ __PV_CALLEE_SAVE_REGS_THUNK(__pv_queued_spin_unlock_slowpath, ".spinlock.text");
asm (".pushsection .spinlock.text;"
".globl " PV_UNLOCK ";"
".type " PV_UNLOCK ", @function;"
".align 4,0x90;"
ASM_FUNC_ALIGN
PV_UNLOCK ": "
ASM_ENDBR
FRAME_BEGIN
......
......@@ -802,6 +802,7 @@ asm(
".pushsection .text;"
".global __raw_callee_save___kvm_vcpu_is_preempted;"
".type __raw_callee_save___kvm_vcpu_is_preempted, @function;"
ASM_FUNC_ALIGN
"__raw_callee_save___kvm_vcpu_is_preempted:"
ASM_ENDBR
"movq __per_cpu_offset(,%rdi,8), %rax;"
......
......@@ -40,6 +40,7 @@
extern void _paravirt_nop(void);
asm (".pushsection .entry.text, \"ax\"\n"
".global _paravirt_nop\n"
ASM_FUNC_ALIGN
"_paravirt_nop:\n\t"
ASM_ENDBR
ASM_RET
......@@ -50,6 +51,7 @@ asm (".pushsection .entry.text, \"ax\"\n"
/* stub always returning 0. */
asm (".pushsection .entry.text, \"ax\"\n"
".global paravirt_ret0\n"
ASM_FUNC_ALIGN
"paravirt_ret0:\n\t"
ASM_ENDBR
"xor %" _ASM_AX ", %" _ASM_AX ";\n\t"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment