Commit c5386c20 authored by Joe Perches's avatar Joe Perches Committed by Ingo Molnar

include/asm-x86/system.h: checkpatch cleanups - formatting only

Signed-off-by: default avatarJoe Perches <joe@perches.com>
Signed-off-by: default avatarIngo Molnar <mingo@elte.hu>
parent 26b7fcc4
...@@ -38,35 +38,33 @@ do { \ ...@@ -38,35 +38,33 @@ do { \
*/ \ */ \
unsigned long ebx, ecx, edx, esi, edi; \ unsigned long ebx, ecx, edx, esi, edi; \
\ \
asm volatile( \ asm volatile("pushfl\n\t" /* save flags */ \
"pushfl \n\t" /* save flags */ \ "pushl %%ebp\n\t" /* save EBP */ \
"pushl %%ebp \n\t" /* save EBP */ \ "movl %%esp,%[prev_sp]\n\t" /* save ESP */ \
"movl %%esp,%[prev_sp] \n\t" /* save ESP */ \ "movl %[next_sp],%%esp\n\t" /* restore ESP */ \
"movl %[next_sp],%%esp \n\t" /* restore ESP */ \ "movl $1f,%[prev_ip]\n\t" /* save EIP */ \
"movl $1f,%[prev_ip] \n\t" /* save EIP */ \ "pushl %[next_ip]\n\t" /* restore EIP */ \
"pushl %[next_ip] \n\t" /* restore EIP */ \ "jmp __switch_to\n" /* regparm call */ \
"jmp __switch_to \n" /* regparm call */ \ "1:\t" \
"1: \t" \ "popl %%ebp\n\t" /* restore EBP */ \
"popl %%ebp \n\t" /* restore EBP */ \ "popfl\n" /* restore flags */ \
"popfl \n" /* restore flags */ \
\ \
/* output parameters */ \ /* output parameters */ \
: [prev_sp] "=m" (prev->thread.sp), \ : [prev_sp] "=m" (prev->thread.sp), \
[prev_ip] "=m" (prev->thread.ip), \ [prev_ip] "=m" (prev->thread.ip), \
"=a" (last), \ "=a" (last), \
\ \
/* clobbered output registers: */ \ /* clobbered output registers: */ \
"=b" (ebx), "=c" (ecx), "=d" (edx), \ "=b" (ebx), "=c" (ecx), "=d" (edx), \
"=S" (esi), "=D" (edi) \ "=S" (esi), "=D" (edi) \
\ \
/* input parameters: */ \ /* input parameters: */ \
: [next_sp] "m" (next->thread.sp), \ : [next_sp] "m" (next->thread.sp), \
[next_ip] "m" (next->thread.ip), \ [next_ip] "m" (next->thread.ip), \
\ \
/* regparm parameters for __switch_to(): */ \ /* regparm parameters for __switch_to(): */ \
[prev] "a" (prev), \ [prev] "a" (prev), \
[next] "d" (next) \ [next] "d" (next)); \
); \
} while (0) } while (0)
/* /*
...@@ -146,35 +144,34 @@ extern void load_gs_index(unsigned); ...@@ -146,35 +144,34 @@ extern void load_gs_index(unsigned);
*/ */
#define loadsegment(seg, value) \ #define loadsegment(seg, value) \
asm volatile("\n" \ asm volatile("\n" \
"1:\t" \ "1:\t" \
"movl %k0,%%" #seg "\n" \ "movl %k0,%%" #seg "\n" \
"2:\n" \ "2:\n" \
".section .fixup,\"ax\"\n" \ ".section .fixup,\"ax\"\n" \
"3:\t" \ "3:\t" \
"movl %k1, %%" #seg "\n\t" \ "movl %k1, %%" #seg "\n\t" \
"jmp 2b\n" \ "jmp 2b\n" \
".previous\n" \ ".previous\n" \
_ASM_EXTABLE(1b,3b) \ _ASM_EXTABLE(1b,3b) \
: :"r" (value), "r" (0)) : :"r" (value), "r" (0))
/* /*
* Save a segment register away * Save a segment register away
*/ */
#define savesegment(seg, value) \ #define savesegment(seg, value) \
asm volatile("mov %%" #seg ",%0":"=rm" (value)) asm volatile("mov %%" #seg ",%0":"=rm" (value))
static inline unsigned long get_limit(unsigned long segment) static inline unsigned long get_limit(unsigned long segment)
{ {
unsigned long __limit; unsigned long __limit;
__asm__("lsll %1,%0" asm("lsll %1,%0" : "=r" (__limit) : "r" (segment));
:"=r" (__limit):"r" (segment)); return __limit + 1;
return __limit+1;
} }
static inline void native_clts(void) static inline void native_clts(void)
{ {
asm volatile ("clts"); asm volatile("clts");
} }
/* /*
...@@ -189,43 +186,43 @@ static unsigned long __force_order; ...@@ -189,43 +186,43 @@ static unsigned long __force_order;
static inline unsigned long native_read_cr0(void) static inline unsigned long native_read_cr0(void)
{ {
unsigned long val; unsigned long val;
asm volatile("mov %%cr0,%0\n\t" :"=r" (val), "=m" (__force_order)); asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order));
return val; return val;
} }
static inline void native_write_cr0(unsigned long val) static inline void native_write_cr0(unsigned long val)
{ {
asm volatile("mov %0,%%cr0": :"r" (val), "m" (__force_order)); asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order));
} }
static inline unsigned long native_read_cr2(void) static inline unsigned long native_read_cr2(void)
{ {
unsigned long val; unsigned long val;
asm volatile("mov %%cr2,%0\n\t" :"=r" (val), "=m" (__force_order)); asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order));
return val; return val;
} }
static inline void native_write_cr2(unsigned long val) static inline void native_write_cr2(unsigned long val)
{ {
asm volatile("mov %0,%%cr2": :"r" (val), "m" (__force_order)); asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order));
} }
static inline unsigned long native_read_cr3(void) static inline unsigned long native_read_cr3(void)
{ {
unsigned long val; unsigned long val;
asm volatile("mov %%cr3,%0\n\t" :"=r" (val), "=m" (__force_order)); asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order));
return val; return val;
} }
static inline void native_write_cr3(unsigned long val) static inline void native_write_cr3(unsigned long val)
{ {
asm volatile("mov %0,%%cr3": :"r" (val), "m" (__force_order)); asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order));
} }
static inline unsigned long native_read_cr4(void) static inline unsigned long native_read_cr4(void)
{ {
unsigned long val; unsigned long val;
asm volatile("mov %%cr4,%0\n\t" :"=r" (val), "=m" (__force_order)); asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order));
return val; return val;
} }
...@@ -237,7 +234,7 @@ static inline unsigned long native_read_cr4_safe(void) ...@@ -237,7 +234,7 @@ static inline unsigned long native_read_cr4_safe(void)
#ifdef CONFIG_X86_32 #ifdef CONFIG_X86_32
asm volatile("1: mov %%cr4, %0\n" asm volatile("1: mov %%cr4, %0\n"
"2:\n" "2:\n"
_ASM_EXTABLE(1b,2b) _ASM_EXTABLE(1b, 2b)
: "=r" (val), "=m" (__force_order) : "0" (0)); : "=r" (val), "=m" (__force_order) : "0" (0));
#else #else
val = native_read_cr4(); val = native_read_cr4();
...@@ -247,7 +244,7 @@ static inline unsigned long native_read_cr4_safe(void) ...@@ -247,7 +244,7 @@ static inline unsigned long native_read_cr4_safe(void)
static inline void native_write_cr4(unsigned long val) static inline void native_write_cr4(unsigned long val)
{ {
asm volatile("mov %0,%%cr4": :"r" (val), "m" (__force_order)); asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order));
} }
#ifdef CONFIG_X86_64 #ifdef CONFIG_X86_64
...@@ -268,6 +265,7 @@ static inline void native_wbinvd(void) ...@@ -268,6 +265,7 @@ static inline void native_wbinvd(void)
{ {
asm volatile("wbinvd": : :"memory"); asm volatile("wbinvd": : :"memory");
} }
#ifdef CONFIG_PARAVIRT #ifdef CONFIG_PARAVIRT
#include <asm/paravirt.h> #include <asm/paravirt.h>
#else #else
...@@ -300,7 +298,7 @@ static inline void clflush(volatile void *__p) ...@@ -300,7 +298,7 @@ static inline void clflush(volatile void *__p)
asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p)); asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p));
} }
#define nop() __asm__ __volatile__ ("nop") #define nop() asm volatile ("nop")
void disable_hlt(void); void disable_hlt(void);
void enable_hlt(void); void enable_hlt(void);
...@@ -399,7 +397,7 @@ void default_idle(void); ...@@ -399,7 +397,7 @@ void default_idle(void);
# define smp_wmb() barrier() # define smp_wmb() barrier()
#endif #endif
#define smp_read_barrier_depends() read_barrier_depends() #define smp_read_barrier_depends() read_barrier_depends()
#define set_mb(var, value) do { (void) xchg(&var, value); } while (0) #define set_mb(var, value) do { (void)xchg(&var, value); } while (0)
#else #else
#define smp_mb() barrier() #define smp_mb() barrier()
#define smp_rmb() barrier() #define smp_rmb() barrier()
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment