Commit 7bce586f authored by Russell King's avatar Russell King

[ARM] Ensure gcc does not assume asm() is conditional.

Prevent all gcc's from assuming that assembly within asm() may be
conditional.
parent 8b0147ab
......@@ -135,7 +135,7 @@ extern const struct processor sa110_processor_functions;
({ \
unsigned long pg; \
__asm__("mrc p15, 0, %0, c2, c0, 0" \
: "=r" (pg)); \
: "=r" (pg) : : "cc"); \
pg &= ~0x3fff; \
(pgd_t *)phys_to_virt(pg); \
})
......
......@@ -76,7 +76,7 @@ extern volatile void cpu_reset(unsigned long addr);
({ \
unsigned long pg; \
__asm__("mrc p15, 0, %0, c2, c0, 0" \
: "=r" (pg)); \
: "=r" (pg) : : "cc"); \
pg &= ~0x3fff; \
(pgd_t *)phys_to_virt(pg); \
})
......
......@@ -77,11 +77,14 @@ typedef struct {
#endif
#ifndef CONFIG_SMP
/*
* Some compilers get the use of "%?" wrong in the asm below.
*/
#define irq_exit() \
do { \
preempt_count() -= IRQ_EXIT_OFFSET; \
if (!in_interrupt() && softirq_pending(smp_processor_id())) \
__asm__("bl%? __do_softirq": : : "lr");/* out of line */\
__asm__("bl __do_softirq": : : "lr", "cc");/* out of line */\
preempt_enable_no_resched(); \
} while (0)
......
......@@ -15,12 +15,16 @@
#define set_cr(x) \
__asm__ __volatile__( \
"mcr p15, 0, %0, c1, c0, 0 @ set CR" \
: : "r" (x))
: : "r" (x) : "cc")
#define get_cr(x) \
#define get_cr() \
({ \
unsigned int __val; \
__asm__ __volatile__( \
"mrc p15, 0, %0, c1, c0, 0 @ get CR" \
: "=r" (x))
: "=r" (__val) : : "cc"); \
__val; \
})
#define CR_M (1 << 0) /* MMU enable */
#define CR_A (1 << 1) /* Alignment abort enable */
......@@ -47,16 +51,6 @@ extern unsigned long cr_alignment; /* defined in entry-armv.S */
#define vectors_base() (0)
#endif
/*
* Save the current interrupt enable state.
*/
#define local_save_flags(x) \
({ \
__asm__ __volatile__( \
"mrs %0, cpsr @ local_save_flags" \
: "=r" (x) : : "memory"); \
})
/*
* Save the current interrupt enable state & disable IRQs
*/
......@@ -70,7 +64,7 @@ extern unsigned long cr_alignment; /* defined in entry-armv.S */
" msr cpsr_c, %1" \
: "=r" (x), "=r" (temp) \
: \
: "memory"); \
: "memory", "cc"); \
})
/*
......@@ -85,7 +79,7 @@ extern unsigned long cr_alignment; /* defined in entry-armv.S */
" msr cpsr_c, %0" \
: "=r" (temp) \
: \
: "memory"); \
: "memory", "cc"); \
})
/*
......@@ -100,7 +94,7 @@ extern unsigned long cr_alignment; /* defined in entry-armv.S */
" msr cpsr_c, %0" \
: "=r" (temp) \
: \
: "memory"); \
: "memory", "cc"); \
})
/*
......@@ -115,7 +109,7 @@ extern unsigned long cr_alignment; /* defined in entry-armv.S */
" msr cpsr_c, %0" \
: "=r" (temp) \
: \
: "memory"); \
: "memory", "cc"); \
})
/*
......@@ -130,7 +124,17 @@ extern unsigned long cr_alignment; /* defined in entry-armv.S */
" msr cpsr_c, %0" \
: "=r" (temp) \
: \
: "memory"); \
: "memory", "cc"); \
})
/*
* Save the current interrupt enable state.
*/
#define local_save_flags(x) \
({ \
__asm__ __volatile__( \
"mrs %0, cpsr @ local_save_flags" \
: "=r" (x) : : "memory", "cc"); \
})
/*
......@@ -141,7 +145,7 @@ extern unsigned long cr_alignment; /* defined in entry-armv.S */
"msr cpsr_c, %0 @ local_irq_restore\n" \
: \
: "r" (x) \
: "memory")
: "memory", "cc")
#if defined(CONFIG_CPU_SA1100) || defined(CONFIG_CPU_SA110)
/*
......@@ -186,12 +190,12 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
case 1: __asm__ __volatile__ ("swpb %0, %1, [%2]"
: "=&r" (ret)
: "r" (x), "r" (ptr)
: "memory");
: "memory", "cc");
break;
case 4: __asm__ __volatile__ ("swp %0, %1, [%2]"
: "=&r" (ret)
: "r" (x), "r" (ptr)
: "memory");
: "memory", "cc");
break;
#endif
default: __bad_xchg(ptr, size), ret = 0;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment