Commit 7b84543c authored by Benjamin Berg's avatar Benjamin Berg Committed by Richard Weinberger

um: Always inline stub functions

The stub executable page is remapped to a different location in the
userland process. As these functions may be used by the stub, they
really need to be always inlined rather than permitting the compiler to
emit a function.
Signed-off-by: default avatarBenjamin Berg <benjamin@sipsolutions.net>
Signed-off-by: default avatarRichard Weinberger <richard@nod.at>
parent 6d64095e
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
#define STUB_MMAP_NR __NR_mmap2 #define STUB_MMAP_NR __NR_mmap2
#define MMAP_OFFSET(o) ((o) >> UM_KERN_PAGE_SHIFT) #define MMAP_OFFSET(o) ((o) >> UM_KERN_PAGE_SHIFT)
static inline long stub_syscall0(long syscall) static __always_inline long stub_syscall0(long syscall)
{ {
long ret; long ret;
...@@ -21,7 +21,7 @@ static inline long stub_syscall0(long syscall) ...@@ -21,7 +21,7 @@ static inline long stub_syscall0(long syscall)
return ret; return ret;
} }
static inline long stub_syscall1(long syscall, long arg1) static __always_inline long stub_syscall1(long syscall, long arg1)
{ {
long ret; long ret;
...@@ -30,7 +30,7 @@ static inline long stub_syscall1(long syscall, long arg1) ...@@ -30,7 +30,7 @@ static inline long stub_syscall1(long syscall, long arg1)
return ret; return ret;
} }
static inline long stub_syscall2(long syscall, long arg1, long arg2) static __always_inline long stub_syscall2(long syscall, long arg1, long arg2)
{ {
long ret; long ret;
...@@ -40,7 +40,8 @@ static inline long stub_syscall2(long syscall, long arg1, long arg2) ...@@ -40,7 +40,8 @@ static inline long stub_syscall2(long syscall, long arg1, long arg2)
return ret; return ret;
} }
static inline long stub_syscall3(long syscall, long arg1, long arg2, long arg3) static __always_inline long stub_syscall3(long syscall, long arg1, long arg2,
long arg3)
{ {
long ret; long ret;
...@@ -50,8 +51,8 @@ static inline long stub_syscall3(long syscall, long arg1, long arg2, long arg3) ...@@ -50,8 +51,8 @@ static inline long stub_syscall3(long syscall, long arg1, long arg2, long arg3)
return ret; return ret;
} }
static inline long stub_syscall4(long syscall, long arg1, long arg2, long arg3, static __always_inline long stub_syscall4(long syscall, long arg1, long arg2,
long arg4) long arg3, long arg4)
{ {
long ret; long ret;
...@@ -61,8 +62,8 @@ static inline long stub_syscall4(long syscall, long arg1, long arg2, long arg3, ...@@ -61,8 +62,8 @@ static inline long stub_syscall4(long syscall, long arg1, long arg2, long arg3,
return ret; return ret;
} }
static inline long stub_syscall5(long syscall, long arg1, long arg2, long arg3, static __always_inline long stub_syscall5(long syscall, long arg1, long arg2,
long arg4, long arg5) long arg3, long arg4, long arg5)
{ {
long ret; long ret;
...@@ -72,12 +73,12 @@ static inline long stub_syscall5(long syscall, long arg1, long arg2, long arg3, ...@@ -72,12 +73,12 @@ static inline long stub_syscall5(long syscall, long arg1, long arg2, long arg3,
return ret; return ret;
} }
static inline void trap_myself(void) static __always_inline void trap_myself(void)
{ {
__asm("int3"); __asm("int3");
} }
static inline void remap_stack_and_trap(void) static __always_inline void remap_stack_and_trap(void)
{ {
__asm__ volatile ( __asm__ volatile (
"movl %%esp,%%ebx ;" "movl %%esp,%%ebx ;"
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
#define __syscall_clobber "r11","rcx","memory" #define __syscall_clobber "r11","rcx","memory"
#define __syscall "syscall" #define __syscall "syscall"
static inline long stub_syscall0(long syscall) static __always_inline long stub_syscall0(long syscall)
{ {
long ret; long ret;
...@@ -27,7 +27,7 @@ static inline long stub_syscall0(long syscall) ...@@ -27,7 +27,7 @@ static inline long stub_syscall0(long syscall)
return ret; return ret;
} }
static inline long stub_syscall2(long syscall, long arg1, long arg2) static __always_inline long stub_syscall2(long syscall, long arg1, long arg2)
{ {
long ret; long ret;
...@@ -38,7 +38,8 @@ static inline long stub_syscall2(long syscall, long arg1, long arg2) ...@@ -38,7 +38,8 @@ static inline long stub_syscall2(long syscall, long arg1, long arg2)
return ret; return ret;
} }
static inline long stub_syscall3(long syscall, long arg1, long arg2, long arg3) static __always_inline long stub_syscall3(long syscall, long arg1, long arg2,
long arg3)
{ {
long ret; long ret;
...@@ -50,7 +51,7 @@ static inline long stub_syscall3(long syscall, long arg1, long arg2, long arg3) ...@@ -50,7 +51,7 @@ static inline long stub_syscall3(long syscall, long arg1, long arg2, long arg3)
return ret; return ret;
} }
static inline long stub_syscall4(long syscall, long arg1, long arg2, long arg3, static __always_inline long stub_syscall4(long syscall, long arg1, long arg2, long arg3,
long arg4) long arg4)
{ {
long ret; long ret;
...@@ -64,8 +65,8 @@ static inline long stub_syscall4(long syscall, long arg1, long arg2, long arg3, ...@@ -64,8 +65,8 @@ static inline long stub_syscall4(long syscall, long arg1, long arg2, long arg3,
return ret; return ret;
} }
static inline long stub_syscall5(long syscall, long arg1, long arg2, long arg3, static __always_inline long stub_syscall5(long syscall, long arg1, long arg2,
long arg4, long arg5) long arg3, long arg4, long arg5)
{ {
long ret; long ret;
...@@ -78,12 +79,12 @@ static inline long stub_syscall5(long syscall, long arg1, long arg2, long arg3, ...@@ -78,12 +79,12 @@ static inline long stub_syscall5(long syscall, long arg1, long arg2, long arg3,
return ret; return ret;
} }
static inline void trap_myself(void) static __always_inline void trap_myself(void)
{ {
__asm("int3"); __asm("int3");
} }
static inline void remap_stack_and_trap(void) static __always_inline void remap_stack_and_trap(void)
{ {
__asm__ volatile ( __asm__ volatile (
"movq %0,%%rax ;" "movq %0,%%rax ;"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment