Commit 6b60f95b authored by Richard Henderson's avatar Richard Henderson

[ALPHA] Update readb and friends for __iomem.

parent d34bd869
......@@ -282,7 +282,7 @@ void
cia_pci_tbi_try2(struct pci_controller *hose,
dma_addr_t start, dma_addr_t end)
{
unsigned long bus_addr;
void __iomem *bus_addr;
int ctrl;
/* Put the chip into PCI loopback mode. */
......@@ -351,7 +351,7 @@ verify_tb_operation(void)
struct pci_iommu_arena *arena = pci_isa_hose->sg_isa;
int ctrl, addr0, tag0, pte0, data0;
int temp, use_tbia_try2 = 0;
unsigned long bus_addr;
void __iomem *bus_addr;
/* pyxis -- tbia is broken */
if (pci_isa_hose->dense_io_base)
......
......@@ -310,7 +310,7 @@ irongate_init_arch(void)
#define GET_GATT_OFF(addr) ((addr & 0x003ff000) >> 12)
#define GET_GATT(addr) (gatt_pages[GET_PAGE_DIR_IDX(addr)])
unsigned long
void __iomem *
irongate_ioremap(unsigned long addr, unsigned long size)
{
struct vm_struct *area;
......@@ -320,7 +320,7 @@ irongate_ioremap(unsigned long addr, unsigned long size)
unsigned long gart_bus_addr;
if (!alpha_agpgart_size)
return addr + IRONGATE_MEM;
return (void __iomem *)(addr + IRONGATE_MEM);
gart_bus_addr = (unsigned long)IRONGATE0->bar0 &
PCI_BASE_ADDRESS_MEM_MASK;
......@@ -339,7 +339,7 @@ irongate_ioremap(unsigned long addr, unsigned long size)
/*
* Not found - assume legacy ioremap
*/
return addr + IRONGATE_MEM;
return (void __iomem *)(addr + IRONGATE_MEM);
} while(0);
mmio_regs = (u32 *)(((unsigned long)IRONGATE0->bar1 &
......@@ -353,7 +353,7 @@ irongate_ioremap(unsigned long addr, unsigned long size)
if (addr & ~PAGE_MASK) {
printk("AGP ioremap failed... addr not page aligned (0x%lx)\n",
addr);
return addr + IRONGATE_MEM;
return (void __iomem *)(addr + IRONGATE_MEM);
}
last = addr + size - 1;
size = PAGE_ALIGN(last) - addr;
......@@ -378,7 +378,7 @@ irongate_ioremap(unsigned long addr, unsigned long size)
* Map it
*/
area = get_vm_area(size, VM_IOREMAP);
if (!area) return (unsigned long)NULL;
if (!area) return NULL;
for(baddr = addr, vaddr = (unsigned long)area->addr;
baddr <= last;
......@@ -391,7 +391,7 @@ irongate_ioremap(unsigned long addr, unsigned long size)
pte, PAGE_SIZE, 0)) {
printk("AGP ioremap: FAILED to map...\n");
vfree(area->addr);
return (unsigned long)NULL;
return NULL;
}
}
......@@ -402,13 +402,15 @@ irongate_ioremap(unsigned long addr, unsigned long size)
printk("irongate_ioremap(0x%lx, 0x%lx) returning 0x%lx\n",
addr, size, vaddr);
#endif
return vaddr;
return (void __iomem *)vaddr;
}
void
irongate_iounmap(unsigned long addr)
irongate_iounmap(volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
if (((long)addr >> 41) == -2)
return; /* kseg map, nothing to do */
if (addr) return vfree((void *)(PAGE_MASK & addr));
if (addr)
return vfree((void *)(PAGE_MASK & addr));
}
......@@ -40,93 +40,93 @@ void _outl(u32 b, unsigned long addr)
__outl(b, addr);
}
u8 ___raw_readb(unsigned long addr)
u8 ___raw_readb(const volatile void __iomem *addr)
{
return __readb(addr);
}
u16 ___raw_readw(unsigned long addr)
u16 ___raw_readw(const volatile void __iomem *addr)
{
return __readw(addr);
}
u32 ___raw_readl(unsigned long addr)
u32 ___raw_readl(const volatile void __iomem *addr)
{
return __readl(addr);
}
u64 ___raw_readq(unsigned long addr)
u64 ___raw_readq(const volatile void __iomem *addr)
{
return __readq(addr);
}
u8 _readb(unsigned long addr)
u8 _readb(const volatile void __iomem *addr)
{
unsigned long r = __readb(addr);
mb();
return r;
}
u16 _readw(unsigned long addr)
u16 _readw(const volatile void __iomem *addr)
{
unsigned long r = __readw(addr);
mb();
return r;
}
u32 _readl(unsigned long addr)
u32 _readl(const volatile void __iomem *addr)
{
unsigned long r = __readl(addr);
mb();
return r;
}
u64 _readq(unsigned long addr)
u64 _readq(const volatile void __iomem *addr)
{
unsigned long r = __readq(addr);
mb();
return r;
}
void ___raw_writeb(u8 b, unsigned long addr)
void ___raw_writeb(u8 b, volatile void __iomem *addr)
{
__writeb(b, addr);
}
void ___raw_writew(u16 b, unsigned long addr)
void ___raw_writew(u16 b, volatile void __iomem *addr)
{
__writew(b, addr);
}
void ___raw_writel(u32 b, unsigned long addr)
void ___raw_writel(u32 b, volatile void __iomem *addr)
{
__writel(b, addr);
}
void ___raw_writeq(u64 b, unsigned long addr)
void ___raw_writeq(u64 b, volatile void __iomem *addr)
{
__writeq(b, addr);
}
void _writeb(u8 b, unsigned long addr)
void _writeb(u8 b, volatile void __iomem *addr)
{
__writeb(b, addr);
mb();
}
void _writew(u16 b, unsigned long addr)
void _writew(u16 b, volatile void __iomem *addr)
{
__writew(b, addr);
mb();
}
void _writel(u32 b, unsigned long addr)
void _writel(u32 b, volatile void __iomem *addr)
{
__writel(b, addr);
mb();
}
void _writeq(u64 b, unsigned long addr)
void _writeq(u64 b, volatile void __iomem *addr)
{
__writeq(b, addr);
mb();
......@@ -411,12 +411,12 @@ void outsl (unsigned long port, const void *src, unsigned long count)
* Copy data from IO memory space to "real" memory space.
* This needs to be optimized.
*/
void _memcpy_fromio(void * to, unsigned long from, long count)
void _memcpy_fromio(void * to, const volatile void __iomem *from, long count)
{
/* Optimize co-aligned transfers. Everything else gets handled
a byte at a time. */
if (count >= 8 && ((unsigned long)to & 7) == (from & 7)) {
if (count >= 8 && ((u64)to & 7) == ((u64)from & 7)) {
count -= 8;
do {
*(u64 *)to = __raw_readq(from);
......@@ -427,7 +427,7 @@ void _memcpy_fromio(void * to, unsigned long from, long count)
count += 8;
}
if (count >= 4 && ((unsigned long)to & 3) == (from & 3)) {
if (count >= 4 && ((u64)to & 3) == ((u64)from & 3)) {
count -= 4;
do {
*(u32 *)to = __raw_readl(from);
......@@ -438,7 +438,7 @@ void _memcpy_fromio(void * to, unsigned long from, long count)
count += 4;
}
if (count >= 2 && ((unsigned long)to & 1) == (from & 1)) {
if (count >= 2 && ((u64)to & 1) == ((u64)from & 1)) {
count -= 2;
do {
*(u16 *)to = __raw_readw(from);
......@@ -455,19 +455,20 @@ void _memcpy_fromio(void * to, unsigned long from, long count)
to++;
from++;
}
mb();
}
/*
* Copy data from "real" memory space to IO memory space.
* This needs to be optimized.
*/
void _memcpy_toio(unsigned long to, const void * from, long count)
void _memcpy_toio(volatile void __iomem *to, const void * from, long count)
{
/* Optimize co-aligned transfers. Everything else gets handled
a byte at a time. */
/* FIXME -- align FROM. */
if (count >= 8 && (to & 7) == ((unsigned long)from & 7)) {
if (count >= 8 && ((u64)to & 7) == ((u64)from & 7)) {
count -= 8;
do {
__raw_writeq(*(const u64 *)from, to);
......@@ -478,7 +479,7 @@ void _memcpy_toio(unsigned long to, const void * from, long count)
count += 8;
}
if (count >= 4 && (to & 3) == ((unsigned long)from & 3)) {
if (count >= 4 && ((u64)to & 3) == ((u64)from & 3)) {
count -= 4;
do {
__raw_writel(*(const u32 *)from, to);
......@@ -489,7 +490,7 @@ void _memcpy_toio(unsigned long to, const void * from, long count)
count += 4;
}
if (count >= 2 && (to & 1) == ((unsigned long)from & 1)) {
if (count >= 2 && ((u64)to & 1) == ((u64)from & 1)) {
count -= 2;
do {
__raw_writew(*(const u16 *)from, to);
......@@ -512,24 +513,24 @@ void _memcpy_toio(unsigned long to, const void * from, long count)
/*
* "memset" on IO memory space.
*/
void _memset_c_io(unsigned long to, unsigned long c, long count)
void _memset_c_io(volatile void __iomem *to, unsigned long c, long count)
{
/* Handle any initial odd byte */
if (count > 0 && (to & 1)) {
if (count > 0 && ((u64)to & 1)) {
__raw_writeb(c, to);
to++;
count--;
}
/* Handle any initial odd halfword */
if (count >= 2 && (to & 2)) {
if (count >= 2 && ((u64)to & 2)) {
__raw_writew(c, to);
to += 2;
count -= 2;
}
/* Handle any initial odd word */
if (count >= 4 && (to & 4)) {
if (count >= 4 && ((u64)to & 4)) {
__raw_writel(c, to);
to += 4;
count -= 4;
......@@ -571,24 +572,28 @@ void _memset_c_io(unsigned long to, unsigned long c, long count)
void
scr_memcpyw(u16 *d, const u16 *s, unsigned int count)
{
if (! __is_ioaddr((unsigned long) s)) {
/* Source is memory. */
if (! __is_ioaddr((unsigned long) d))
memcpy(d, s, count);
else
memcpy_toio(d, s, count);
} else {
/* Source is screen. */
if (! __is_ioaddr((unsigned long) d))
memcpy_fromio(d, s, count);
else {
const u16 __iomem *ios = (const u16 __iomem *) s;
u16 __iomem *iod = (u16 __iomem *) d;
int s_isio = __is_ioaddr(s);
int d_isio = __is_ioaddr(d);
if (s_isio) {
if (d_isio) {
/* FIXME: Should handle unaligned ops and
operation widening. */
count /= 2;
while (count--) {
u16 tmp = __raw_readw((unsigned long)(s++));
__raw_writew(tmp, (unsigned long)(d++));
u16 tmp = __raw_readw(ios++);
__raw_writew(tmp, iod++);
}
}
else
memcpy_fromio(d, ios, count);
} else {
if (d_isio)
memcpy_toio(iod, s, count);
else
memcpy(d, s, count);
}
}
......@@ -370,9 +370,9 @@ struct el_apecs_procdata
* data to/from the right byte-lanes.
*/
#define vip volatile int *
#define vuip volatile unsigned int *
#define vulp volatile unsigned long *
#define vip volatile int __force *
#define vuip volatile unsigned int __force *
#define vulp volatile unsigned long __force *
__EXTERN_INLINE u8 apecs_inb(unsigned long addr)
{
......@@ -421,8 +421,9 @@ __EXTERN_INLINE void apecs_outl(u32 b, unsigned long addr)
* dense memory space, everything else through sparse space.
*/
__EXTERN_INLINE u8 apecs_readb(unsigned long addr)
__EXTERN_INLINE u8 apecs_readb(const volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long result, msb;
addr -= APECS_DENSE_MEM;
......@@ -435,8 +436,9 @@ __EXTERN_INLINE u8 apecs_readb(unsigned long addr)
return __kernel_extbl(result, addr & 3);
}
__EXTERN_INLINE u16 apecs_readw(unsigned long addr)
__EXTERN_INLINE u16 apecs_readw(const volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long result, msb;
addr -= APECS_DENSE_MEM;
......@@ -449,18 +451,19 @@ __EXTERN_INLINE u16 apecs_readw(unsigned long addr)
return __kernel_extwl(result, addr & 3);
}
__EXTERN_INLINE u32 apecs_readl(unsigned long addr)
__EXTERN_INLINE u32 apecs_readl(const volatile void __iomem *addr)
{
return (*(vuip)addr) & 0xffffffff;
return *(vuip)addr;
}
__EXTERN_INLINE u64 apecs_readq(unsigned long addr)
__EXTERN_INLINE u64 apecs_readq(const volatile void __iomem *addr)
{
return *(vulp)addr;
}
__EXTERN_INLINE void apecs_writeb(u8 b, unsigned long addr)
__EXTERN_INLINE void apecs_writeb(u8 b, volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long msb;
addr -= APECS_DENSE_MEM;
......@@ -472,8 +475,9 @@ __EXTERN_INLINE void apecs_writeb(u8 b, unsigned long addr)
*(vuip) ((addr << 5) + APECS_SPARSE_MEM + 0x00) = b * 0x01010101;
}
__EXTERN_INLINE void apecs_writew(u16 b, unsigned long addr)
__EXTERN_INLINE void apecs_writew(u16 b, volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long msb;
addr -= APECS_DENSE_MEM;
......@@ -485,24 +489,24 @@ __EXTERN_INLINE void apecs_writew(u16 b, unsigned long addr)
*(vuip) ((addr << 5) + APECS_SPARSE_MEM + 0x08) = b * 0x00010001;
}
__EXTERN_INLINE void apecs_writel(u32 b, unsigned long addr)
__EXTERN_INLINE void apecs_writel(u32 b, volatile void __iomem *addr)
{
*(vuip)addr = b;
}
__EXTERN_INLINE void apecs_writeq(u64 b, unsigned long addr)
__EXTERN_INLINE void apecs_writeq(u64 b, volatile void __iomem *addr)
{
*(vulp)addr = b;
}
__EXTERN_INLINE unsigned long apecs_ioremap(unsigned long addr,
__EXTERN_INLINE void __iomem *apecs_ioremap(unsigned long addr,
unsigned long size
__attribute__((unused)))
{
return addr + APECS_DENSE_MEM;
return (void __iomem *)(addr + APECS_DENSE_MEM);
}
__EXTERN_INLINE void apecs_iounmap(unsigned long addr)
__EXTERN_INLINE void apecs_iounmap(volatile void __iomem *addr)
{
return;
}
......@@ -521,25 +525,25 @@ __EXTERN_INLINE int apecs_is_ioaddr(unsigned long addr)
#define __inb(p) apecs_inb((unsigned long)(p))
#define __inw(p) apecs_inw((unsigned long)(p))
#define __inl(p) apecs_inl((unsigned long)(p))
#define __outb(x,p) apecs_outb((x),(unsigned long)(p))
#define __outw(x,p) apecs_outw((x),(unsigned long)(p))
#define __outl(x,p) apecs_outl((x),(unsigned long)(p))
#define __readb(a) apecs_readb((unsigned long)(a))
#define __readw(a) apecs_readw((unsigned long)(a))
#define __readl(a) apecs_readl((unsigned long)(a))
#define __readq(a) apecs_readq((unsigned long)(a))
#define __writeb(x,a) apecs_writeb((x),(unsigned long)(a))
#define __writew(x,a) apecs_writew((x),(unsigned long)(a))
#define __writel(x,a) apecs_writel((x),(unsigned long)(a))
#define __writeq(x,a) apecs_writeq((x),(unsigned long)(a))
#define __ioremap(a,s) apecs_ioremap((unsigned long)(a),(s))
#define __iounmap(a) apecs_iounmap((unsigned long)(a))
#define __outb(x,p) apecs_outb(x,(unsigned long)(p))
#define __outw(x,p) apecs_outw(x,(unsigned long)(p))
#define __outl(x,p) apecs_outl(x,(unsigned long)(p))
#define __readb(a) apecs_readb(a)
#define __readw(a) apecs_readw(a)
#define __readl(a) apecs_readl(a)
#define __readq(a) apecs_readq(a)
#define __writeb(x,a) apecs_writeb(x,a)
#define __writew(x,a) apecs_writew(x,a)
#define __writel(x,a) apecs_writel(x,a)
#define __writeq(x,a) apecs_writeq(x,a)
#define __ioremap(a,s) apecs_ioremap(a,s)
#define __iounmap(a) apecs_iounmap(a)
#define __is_ioaddr(a) apecs_is_ioaddr((unsigned long)(a))
#define __raw_readl(a) __readl(a)
#define __raw_readq(a) __readq(a)
#define __raw_writel(v,a) __writel((v),(a))
#define __raw_writeq(v,a) __writeq((v),(a))
#define __raw_writel(v,a) __writel(v,a)
#define __raw_writeq(v,a) __writeq(v,a)
#endif /* __WANT_IO_DEF */
......
......@@ -306,11 +306,11 @@ struct el_CIA_sysdata_mcheck {
* get at PCI memory and I/O.
*/
#define vucp volatile unsigned char *
#define vusp volatile unsigned short *
#define vip volatile int *
#define vuip volatile unsigned int *
#define vulp volatile unsigned long *
#define vucp volatile unsigned char __force *
#define vusp volatile unsigned short __force *
#define vip volatile int __force *
#define vuip volatile unsigned int __force *
#define vulp volatile unsigned long __force *
__EXTERN_INLINE u8 cia_inb(unsigned long addr)
{
......@@ -422,8 +422,9 @@ __EXTERN_INLINE void cia_bwx_outl(u32 b, unsigned long addr)
*
*/
__EXTERN_INLINE u8 cia_readb(unsigned long addr)
__EXTERN_INLINE u8 cia_readb(const volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long result;
addr &= CIA_MEM_R1_MASK;
......@@ -431,8 +432,9 @@ __EXTERN_INLINE u8 cia_readb(unsigned long addr)
return __kernel_extbl(result, addr & 3);
}
__EXTERN_INLINE u16 cia_readw(unsigned long addr)
__EXTERN_INLINE u16 cia_readw(const volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long result;
addr &= CIA_MEM_R1_MASK;
......@@ -440,8 +442,9 @@ __EXTERN_INLINE u16 cia_readw(unsigned long addr)
return __kernel_extwl(result, addr & 3);
}
__EXTERN_INLINE void cia_writeb(u8 b, unsigned long addr)
__EXTERN_INLINE void cia_writeb(u8 b, volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long w;
addr &= CIA_MEM_R1_MASK;
......@@ -449,8 +452,9 @@ __EXTERN_INLINE void cia_writeb(u8 b, unsigned long addr)
*(vuip) ((addr << 5) + CIA_SPARSE_MEM + 0x00) = w;
}
__EXTERN_INLINE void cia_writew(u16 b, unsigned long addr)
__EXTERN_INLINE void cia_writew(u16 b, volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long w;
addr &= CIA_MEM_R1_MASK;
......@@ -458,85 +462,85 @@ __EXTERN_INLINE void cia_writew(u16 b, unsigned long addr)
*(vuip) ((addr << 5) + CIA_SPARSE_MEM + 0x08) = w;
}
__EXTERN_INLINE u32 cia_readl(unsigned long addr)
__EXTERN_INLINE u32 cia_readl(const volatile void __iomem *addr)
{
return *(vuip)addr;
}
__EXTERN_INLINE u64 cia_readq(unsigned long addr)
__EXTERN_INLINE u64 cia_readq(const volatile void __iomem *addr)
{
return *(vulp)addr;
}
__EXTERN_INLINE void cia_writel(u32 b, unsigned long addr)
__EXTERN_INLINE void cia_writel(u32 b, volatile void __iomem *addr)
{
*(vuip)addr = b;
}
__EXTERN_INLINE void cia_writeq(u64 b, unsigned long addr)
__EXTERN_INLINE void cia_writeq(u64 b, volatile void __iomem *addr)
{
*(vulp)addr = b;
}
__EXTERN_INLINE unsigned long cia_ioremap(unsigned long addr,
__EXTERN_INLINE void __iomem *cia_ioremap(unsigned long addr,
unsigned long size
__attribute__((unused)))
{
return addr + CIA_DENSE_MEM;
return (void __iomem *)(addr + CIA_DENSE_MEM);
}
__EXTERN_INLINE void cia_iounmap(unsigned long addr)
__EXTERN_INLINE void cia_iounmap(volatile void __iomem *addr)
{
return;
}
__EXTERN_INLINE u8 cia_bwx_readb(unsigned long addr)
__EXTERN_INLINE u8 cia_bwx_readb(const volatile void __iomem *addr)
{
return __kernel_ldbu(*(vucp)addr);
}
__EXTERN_INLINE u16 cia_bwx_readw(unsigned long addr)
__EXTERN_INLINE u16 cia_bwx_readw(const volatile void __iomem *addr)
{
return __kernel_ldwu(*(vusp)addr);
}
__EXTERN_INLINE u32 cia_bwx_readl(unsigned long addr)
__EXTERN_INLINE u32 cia_bwx_readl(const volatile void __iomem *addr)
{
return *(vuip)addr;
}
__EXTERN_INLINE u64 cia_bwx_readq(unsigned long addr)
__EXTERN_INLINE u64 cia_bwx_readq(const volatile void __iomem *addr)
{
return *(vulp)addr;
}
__EXTERN_INLINE void cia_bwx_writeb(u8 b, unsigned long addr)
__EXTERN_INLINE void cia_bwx_writeb(u8 b, volatile void __iomem *addr)
{
__kernel_stb(b, *(vucp)addr);
}
__EXTERN_INLINE void cia_bwx_writew(u16 b, unsigned long addr)
__EXTERN_INLINE void cia_bwx_writew(u16 b, volatile void __iomem *addr)
{
__kernel_stw(b, *(vusp)addr);
}
__EXTERN_INLINE void cia_bwx_writel(u32 b, unsigned long addr)
__EXTERN_INLINE void cia_bwx_writel(u32 b, volatile void __iomem *addr)
{
*(vuip)addr = b;
}
__EXTERN_INLINE void cia_bwx_writeq(u64 b, unsigned long addr)
__EXTERN_INLINE void cia_bwx_writeq(u64 b, volatile void __iomem *addr)
{
*(vulp)addr = b;
}
__EXTERN_INLINE unsigned long cia_bwx_ioremap(unsigned long addr,
__EXTERN_INLINE void __iomem *cia_bwx_ioremap(unsigned long addr,
unsigned long size)
{
return addr + CIA_BW_MEM;
return (void __iomem *)(addr + CIA_BW_MEM);
}
__EXTERN_INLINE void cia_bwx_iounmap(unsigned long addr)
__EXTERN_INLINE void cia_bwx_iounmap(volatile void __iomem *addr)
{
return;
}
......@@ -558,54 +562,54 @@ __EXTERN_INLINE int cia_is_ioaddr(unsigned long addr)
# define __inb(p) cia_bwx_inb((unsigned long)(p))
# define __inw(p) cia_bwx_inw((unsigned long)(p))
# define __inl(p) cia_bwx_inl((unsigned long)(p))
# define __outb(x,p) cia_bwx_outb((x),(unsigned long)(p))
# define __outw(x,p) cia_bwx_outw((x),(unsigned long)(p))
# define __outl(x,p) cia_bwx_outl((x),(unsigned long)(p))
# define __readb(a) cia_bwx_readb((unsigned long)(a))
# define __readw(a) cia_bwx_readw((unsigned long)(a))
# define __readl(a) cia_bwx_readl((unsigned long)(a))
# define __readq(a) cia_bwx_readq((unsigned long)(a))
# define __writeb(x,a) cia_bwx_writeb((x),(unsigned long)(a))
# define __writew(x,a) cia_bwx_writew((x),(unsigned long)(a))
# define __writel(x,a) cia_bwx_writel((x),(unsigned long)(a))
# define __writeq(x,a) cia_bwx_writeq((x),(unsigned long)(a))
# define __ioremap(a,s) cia_bwx_ioremap((unsigned long)(a),(s))
# define __iounmap(a) cia_bwx_iounmap((unsigned long)(a))
# define __outb(x,p) cia_bwx_outb(x,(unsigned long)(p))
# define __outw(x,p) cia_bwx_outw(x,(unsigned long)(p))
# define __outl(x,p) cia_bwx_outl(x,(unsigned long)(p))
# define __readb(a) cia_bwx_readb(a)
# define __readw(a) cia_bwx_readw(a)
# define __readl(a) cia_bwx_readl(a)
# define __readq(a) cia_bwx_readq(a)
# define __writeb(x,a) cia_bwx_writeb(x,a)
# define __writew(x,a) cia_bwx_writew(x,a)
# define __writel(x,a) cia_bwx_writel(x,a)
# define __writeq(x,a) cia_bwx_writeq(x,a)
# define __ioremap(a,s) cia_bwx_ioremap(a,s)
# define __iounmap(a) cia_bwx_iounmap(a)
# define inb(p) __inb(p)
# define inw(p) __inw(p)
# define inl(p) __inl(p)
# define outb(x,p) __outb((x),(p))
# define outw(x,p) __outw((x),(p))
# define outl(x,p) __outl((x),(p))
# define outb(x,p) __outb(x,p)
# define outw(x,p) __outw(x,p)
# define outl(x,p) __outl(x,p)
# define __raw_readb(a) __readb(a)
# define __raw_readw(a) __readw(a)
# define __raw_readl(a) __readl(a)
# define __raw_readq(a) __readq(a)
# define __raw_writeb(x,a) __writeb((x),(a))
# define __raw_writew(x,a) __writew((x),(a))
# define __raw_writel(x,a) __writel((x),(a))
# define __raw_writeq(x,a) __writeq((x),(a))
# define __raw_writeb(x,a) __writeb(x,a)
# define __raw_writew(x,a) __writew(x,a)
# define __raw_writel(x,a) __writel(x,a)
# define __raw_writeq(x,a) __writeq(x,a)
#else
# define __inb(p) cia_inb((unsigned long)(p))
# define __inw(p) cia_inw((unsigned long)(p))
# define __inl(p) cia_inl((unsigned long)(p))
# define __outb(x,p) cia_outb((x),(unsigned long)(p))
# define __outw(x,p) cia_outw((x),(unsigned long)(p))
# define __outl(x,p) cia_outl((x),(unsigned long)(p))
# define __readb(a) cia_readb((unsigned long)(a))
# define __readw(a) cia_readw((unsigned long)(a))
# define __readl(a) cia_readl((unsigned long)(a))
# define __readq(a) cia_readq((unsigned long)(a))
# define __writeb(x,a) cia_writeb((x),(unsigned long)(a))
# define __writew(x,a) cia_writew((x),(unsigned long)(a))
# define __writel(x,a) cia_writel((x),(unsigned long)(a))
# define __writeq(x,a) cia_writeq((x),(unsigned long)(a))
# define __ioremap(a,s) cia_ioremap((unsigned long)(a),(s))
# define __iounmap(a) cia_iounmap((unsigned long)(a))
# define __outb(x,p) cia_outb(x,(unsigned long)(p))
# define __outw(x,p) cia_outw(x,(unsigned long)(p))
# define __outl(x,p) cia_outl(x,(unsigned long)(p))
# define __readb(a) cia_readb(a)
# define __readw(a) cia_readw(a)
# define __readl(a) cia_readl(a)
# define __readq(a) cia_readq(a)
# define __writeb(x,a) cia_writeb(x,a)
# define __writew(x,a) cia_writew(x,a)
# define __writel(x,a) cia_writel(x,a)
# define __writeq(x,a) cia_writeq(x,a)
# define __ioremap(a,s) cia_ioremap(a,s)
# define __iounmap(a) cia_iounmap(a)
# define __raw_readl(a) __readl(a)
# define __raw_readq(a) __readq(a)
# define __raw_writel(v,a) __writel((v),(a))
# define __raw_writeq(v,a) __writeq((v),(a))
# define __raw_writel(v,a) __writel(v,a)
# define __raw_writeq(v,a) __writeq(v,a)
#endif /* PYXIS */
#define __is_ioaddr(a) cia_is_ioaddr((unsigned long)(a))
......
......@@ -190,10 +190,10 @@ struct el_IRONGATE_sysdata_mcheck {
* K7 can only use linear accesses to get at PCI memory and I/O spaces.
*/
#define vucp volatile unsigned char *
#define vusp volatile unsigned short *
#define vuip volatile unsigned int *
#define vulp volatile unsigned long *
#define vucp volatile unsigned char __force *
#define vusp volatile unsigned short __force *
#define vuip volatile unsigned int __force *
#define vulp volatile unsigned long __force *
__EXTERN_INLINE u8 irongate_inb(unsigned long addr)
{
......@@ -232,48 +232,48 @@ __EXTERN_INLINE void irongate_outl(u32 b, unsigned long addr)
* Memory functions. All accesses are done through linear space.
*/
__EXTERN_INLINE u8 irongate_readb(unsigned long addr)
__EXTERN_INLINE u8 irongate_readb(const volatile void __iomem *addr)
{
return __kernel_ldbu(*(vucp)addr);
}
__EXTERN_INLINE u16 irongate_readw(unsigned long addr)
__EXTERN_INLINE u16 irongate_readw(const volatile void __iomem *addr)
{
return __kernel_ldwu(*(vusp)addr);
}
__EXTERN_INLINE u32 irongate_readl(unsigned long addr)
__EXTERN_INLINE u32 irongate_readl(const volatile void __iomem *addr)
{
return (*(vuip)addr) & 0xffffffff;
return *(vuip)addr;
}
__EXTERN_INLINE u64 irongate_readq(unsigned long addr)
__EXTERN_INLINE u64 irongate_readq(const volatile void __iomem *addr)
{
return *(vulp)addr;
}
__EXTERN_INLINE void irongate_writeb(u8 b, unsigned long addr)
__EXTERN_INLINE void irongate_writeb(u8 b, volatile void __iomem *addr)
{
__kernel_stb(b, *(vucp)addr);
}
__EXTERN_INLINE void irongate_writew(u16 b, unsigned long addr)
__EXTERN_INLINE void irongate_writew(u16 b, volatile void __iomem *addr)
{
__kernel_stw(b, *(vusp)addr);
}
__EXTERN_INLINE void irongate_writel(u32 b, unsigned long addr)
__EXTERN_INLINE void irongate_writel(u32 b, volatile void __iomem *addr)
{
*(vuip)addr = b;
}
__EXTERN_INLINE void irongate_writeq(u64 b, unsigned long addr)
__EXTERN_INLINE void irongate_writeq(u64 b, volatile void __iomem *addr)
{
*(vulp)addr = b;
}
extern unsigned long irongate_ioremap(unsigned long addr, unsigned long size);
extern void irongate_iounmap(unsigned long addr);
extern void __iomem *irongate_ioremap(unsigned long addr, unsigned long size);
extern void irongate_iounmap(volatile void __iomem *addr);
__EXTERN_INLINE int irongate_is_ioaddr(unsigned long addr)
{
......@@ -290,35 +290,35 @@ __EXTERN_INLINE int irongate_is_ioaddr(unsigned long addr)
#define __inb(p) irongate_inb((unsigned long)(p))
#define __inw(p) irongate_inw((unsigned long)(p))
#define __inl(p) irongate_inl((unsigned long)(p))
#define __outb(x,p) irongate_outb((x),(unsigned long)(p))
#define __outw(x,p) irongate_outw((x),(unsigned long)(p))
#define __outl(x,p) irongate_outl((x),(unsigned long)(p))
#define __readb(a) irongate_readb((unsigned long)(a))
#define __readw(a) irongate_readw((unsigned long)(a))
#define __readl(a) irongate_readl((unsigned long)(a))
#define __readq(a) irongate_readq((unsigned long)(a))
#define __writeb(x,a) irongate_writeb((x),(unsigned long)(a))
#define __writew(x,a) irongate_writew((x),(unsigned long)(a))
#define __writel(x,a) irongate_writel((x),(unsigned long)(a))
#define __writeq(x,a) irongate_writeq((x),(unsigned long)(a))
#define __ioremap(a,s) irongate_ioremap((unsigned long)(a),(s))
#define __iounmap(a) irongate_iounmap((unsigned long)(a))
#define __outb(x,p) irongate_outb(x,(unsigned long)(p))
#define __outw(x,p) irongate_outw(x,(unsigned long)(p))
#define __outl(x,p) irongate_outl(x,(unsigned long)(p))
#define __readb(a) irongate_readb(a)
#define __readw(a) irongate_readw(a)
#define __readl(a) irongate_readl(a)
#define __readq(a) irongate_readq(a)
#define __writeb(x,a) irongate_writeb(x,a)
#define __writew(x,a) irongate_writew(x,a)
#define __writel(x,a) irongate_writel(x,a)
#define __writeq(x,a) irongate_writeq(x,a)
#define __ioremap(a,s) irongate_ioremap(a,s)
#define __iounmap(a) irongate_iounmap(a)
#define __is_ioaddr(a) irongate_is_ioaddr((unsigned long)(a))
#define inb(p) __inb(p)
#define inw(p) __inw(p)
#define inl(p) __inl(p)
#define outb(x,p) __outb((x),(p))
#define outw(x,p) __outw((x),(p))
#define outl(x,p) __outl((x),(p))
#define outb(x,p) __outb(x,p)
#define outw(x,p) __outw(x,p)
#define outl(x,p) __outl(x,p)
#define __raw_readb(a) __readb(a)
#define __raw_readw(a) __readw(a)
#define __raw_readl(a) __readl(a)
#define __raw_readq(a) __readq(a)
#define __raw_writeb(v,a) __writeb((v),(a))
#define __raw_writew(v,a) __writew((v),(a))
#define __raw_writel(v,a) __writel((v),(a))
#define __raw_writeq(v,a) __writeq((v),(a))
#define __raw_writeb(v,a) __writeb(v,a)
#define __raw_writew(v,a) __writew(v,a)
#define __raw_writel(v,a) __writel(v,a)
#define __raw_writeq(v,a) __writeq(v,a)
#endif /* __WANT_IO_DEF */
......
......@@ -215,9 +215,9 @@ union el_lca {
* data to/from the right byte-lanes.
*/
#define vip volatile int *
#define vuip volatile unsigned int *
#define vulp volatile unsigned long *
#define vip volatile int __force *
#define vuip volatile unsigned int __force *
#define vulp volatile unsigned long __force *
__EXTERN_INLINE u8 lca_inb(unsigned long addr)
{
......@@ -266,8 +266,9 @@ __EXTERN_INLINE void lca_outl(u32 b, unsigned long addr)
* dense memory space, everything else through sparse space.
*/
__EXTERN_INLINE u8 lca_readb(unsigned long addr)
__EXTERN_INLINE u8 lca_readb(const volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long result, msb;
addr -= LCA_DENSE_MEM;
......@@ -280,8 +281,9 @@ __EXTERN_INLINE u8 lca_readb(unsigned long addr)
return __kernel_extbl(result, addr & 3);
}
__EXTERN_INLINE u16 lca_readw(unsigned long addr)
__EXTERN_INLINE u16 lca_readw(const volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long result, msb;
addr -= LCA_DENSE_MEM;
......@@ -294,18 +296,19 @@ __EXTERN_INLINE u16 lca_readw(unsigned long addr)
return __kernel_extwl(result, addr & 3);
}
__EXTERN_INLINE u32 lca_readl(unsigned long addr)
__EXTERN_INLINE u32 lca_readl(const volatile void __iomem *addr)
{
return (*(vuip)addr) & 0xffffffff;
return *(vuip)addr;
}
__EXTERN_INLINE u64 lca_readq(unsigned long addr)
__EXTERN_INLINE u64 lca_readq(const volatile void __iomem *addr)
{
return *(vulp)addr;
}
__EXTERN_INLINE void lca_writeb(u8 b, unsigned long addr)
__EXTERN_INLINE void lca_writeb(u8 b, volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long msb;
unsigned long w;
......@@ -319,8 +322,9 @@ __EXTERN_INLINE void lca_writeb(u8 b, unsigned long addr)
*(vuip) ((addr << 5) + LCA_SPARSE_MEM + 0x00) = w;
}
__EXTERN_INLINE void lca_writew(u16 b, unsigned long addr)
__EXTERN_INLINE void lca_writew(u16 b, volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long msb;
unsigned long w;
......@@ -334,24 +338,24 @@ __EXTERN_INLINE void lca_writew(u16 b, unsigned long addr)
*(vuip) ((addr << 5) + LCA_SPARSE_MEM + 0x08) = w;
}
__EXTERN_INLINE void lca_writel(u32 b, unsigned long addr)
__EXTERN_INLINE void lca_writel(u32 b, volatile void __iomem *addr)
{
*(vuip)addr = b;
}
__EXTERN_INLINE void lca_writeq(u64 b, unsigned long addr)
__EXTERN_INLINE void lca_writeq(u64 b, volatile void __iomem *addr)
{
*(vulp)addr = b;
}
__EXTERN_INLINE unsigned long lca_ioremap(unsigned long addr,
__EXTERN_INLINE void __iomem *lca_ioremap(unsigned long addr,
unsigned long size
__attribute__((unused)))
{
return addr + LCA_DENSE_MEM;
return (void __iomem *)(addr + LCA_DENSE_MEM);
}
__EXTERN_INLINE void lca_iounmap(unsigned long addr)
__EXTERN_INLINE void lca_iounmap(volatile void __iomem *addr)
{
return;
}
......@@ -370,25 +374,25 @@ __EXTERN_INLINE int lca_is_ioaddr(unsigned long addr)
#define __inb(p) lca_inb((unsigned long)(p))
#define __inw(p) lca_inw((unsigned long)(p))
#define __inl(p) lca_inl((unsigned long)(p))
#define __outb(x,p) lca_outb((x),(unsigned long)(p))
#define __outw(x,p) lca_outw((x),(unsigned long)(p))
#define __outl(x,p) lca_outl((x),(unsigned long)(p))
#define __readb(a) lca_readb((unsigned long)(a))
#define __readw(a) lca_readw((unsigned long)(a))
#define __readl(a) lca_readl((unsigned long)(a))
#define __readq(a) lca_readq((unsigned long)(a))
#define __writeb(x,a) lca_writeb((x),(unsigned long)(a))
#define __writew(x,a) lca_writew((x),(unsigned long)(a))
#define __writel(x,a) lca_writel((x),(unsigned long)(a))
#define __writeq(x,a) lca_writeq((x),(unsigned long)(a))
#define __ioremap(a,s) lca_ioremap((unsigned long)(a),(s))
#define __iounmap(a) lca_iounmap((unsigned long)(a))
#define __outb(x,p) lca_outb(x,(unsigned long)(p))
#define __outw(x,p) lca_outw(x,(unsigned long)(p))
#define __outl(x,p) lca_outl(x,(unsigned long)(p))
#define __readb(a) lca_readb(a)
#define __readw(a) lca_readw(a)
#define __readl(a) lca_readl(a)
#define __readq(a) lca_readq(a)
#define __writeb(x,a) lca_writeb(x,a)
#define __writew(x,a) lca_writew(x,a)
#define __writel(x,a) lca_writel(x,a)
#define __writeq(x,a) lca_writeq(x,a)
#define __ioremap(a,s) lca_ioremap(a,s)
#define __iounmap(a) lca_iounmap(a)
#define __is_ioaddr(a) lca_is_ioaddr((unsigned long)(a))
#define __raw_readl(a) __readl(a)
#define __raw_readq(a) __readq(a)
#define __raw_writel(v,a) __writel((v),(a))
#define __raw_writeq(v,a) __writeq((v),(a))
#define __raw_writel(v,a) __writel(v,a)
#define __raw_writeq(v,a) __writeq(v,a)
#endif /* __WANT_IO_DEF */
......
......@@ -451,78 +451,46 @@ __EXTERN_INLINE void marvel_outl(u32 l, unsigned long addr)
* Memory functions. All accesses through linear space.
*/
extern unsigned long marvel_ioremap(unsigned long addr, unsigned long size);
extern void marvel_iounmap(unsigned long addr);
extern void __iomem *marvel_ioremap(unsigned long addr, unsigned long size);
extern void marvel_iounmap(volatile void __iomem *addr);
__EXTERN_INLINE u8 marvel_readb(unsigned long addr)
__EXTERN_INLINE u8 marvel_readb(const volatile void __iomem *addr)
{
if (!marvel_is_ioaddr(addr)) {
IOBUG(("Bad MEM addr %lx - reading -1\n", addr));
return (u8)-1;
}
return __kernel_ldbu(*(vucp)addr);
}
__EXTERN_INLINE u16 marvel_readw(unsigned long addr)
__EXTERN_INLINE u16 marvel_readw(const volatile void __iomem *addr)
{
if (!marvel_is_ioaddr(addr)) {
IOBUG(("Bad MEM addr %lx - reading -1\n", addr));
return (u16)-1;
}
return __kernel_ldwu(*(vusp)addr);
}
__EXTERN_INLINE u32 marvel_readl(unsigned long addr)
__EXTERN_INLINE u32 marvel_readl(const volatile void __iomem *addr)
{
if (!marvel_is_ioaddr(addr)) {
IOBUG(("Bad MEM addr %lx - reading -1\n", addr));
return (u32)-1;
}
return *(vuip)addr;
}
__EXTERN_INLINE u64 marvel_readq(unsigned long addr)
__EXTERN_INLINE u64 marvel_readq(const volatile void __iomem *addr)
{
if (!marvel_is_ioaddr(addr)) {
IOBUG(("Bad MEM addr %lx - reading -1\n", addr));
return (u64)-1;
}
return *(vulp)addr;
}
__EXTERN_INLINE void marvel_writeb(u8 b, unsigned long addr)
__EXTERN_INLINE void marvel_writeb(u8 b, volatile void __iomem *addr)
{
if (!marvel_is_ioaddr(addr)) {
IOBUG(("Bad MEM addr %lx - dropping store\n", addr));
return;
}
__kernel_stb(b, *(vucp)addr);
}
__EXTERN_INLINE void marvel_writew(u16 w, unsigned long addr)
__EXTERN_INLINE void marvel_writew(u16 w, volatile void __iomem *addr)
{
if (!marvel_is_ioaddr(addr)) {
IOBUG(("Bad MEM addr %lx - dropping store\n", addr));
return;
}
__kernel_stw(w, *(vusp)addr);
}
__EXTERN_INLINE void marvel_writel(u32 l, unsigned long addr)
__EXTERN_INLINE void marvel_writel(u32 l, volatile void __iomem *addr)
{
if (!marvel_is_ioaddr(addr)) {
IOBUG(("Bad MEM addr %lx - dropping store\n", addr));
return;
}
*(vuip)addr = l;
}
__EXTERN_INLINE void marvel_writeq(u64 q, unsigned long addr)
__EXTERN_INLINE void marvel_writeq(u64 q, volatile void __iomem *addr)
{
if (!marvel_is_ioaddr(addr)) {
IOBUG(("Bad MEM addr %lx - dropping store\n", addr));
return;
}
*(vulp)addr = q;
}
......@@ -540,19 +508,19 @@ __EXTERN_INLINE void marvel_writeq(u64 q, unsigned long addr)
#define __inb(p) marvel_inb((unsigned long)(p))
#define __inw(p) marvel_inw((unsigned long)(p))
#define __inl(p) marvel_inl((unsigned long)(p))
#define __outb(x,p) marvel_outb((x),(unsigned long)(p))
#define __outw(x,p) marvel_outw((x),(unsigned long)(p))
#define __outl(x,p) marvel_outl((x),(unsigned long)(p))
#define __readb(a) marvel_readb((unsigned long)(a))
#define __readw(a) marvel_readw((unsigned long)(a))
#define __readl(a) marvel_readl((unsigned long)(a))
#define __readq(a) marvel_readq((unsigned long)(a))
#define __writeb(x,a) marvel_writeb((x),(unsigned long)(a))
#define __writew(x,a) marvel_writew((x),(unsigned long)(a))
#define __writel(x,a) marvel_writel((x),(unsigned long)(a))
#define __writeq(x,a) marvel_writeq((x),(unsigned long)(a))
#define __ioremap(a,s) marvel_ioremap((unsigned long)(a),(s))
#define __iounmap(a) marvel_iounmap((unsigned long)(a))
#define __outb(x,p) marvel_outb(x,(unsigned long)(p))
#define __outw(x,p) marvel_outw(x,(unsigned long)(p))
#define __outl(x,p) marvel_outl(x,(unsigned long)(p))
#define __readb(a) marvel_readb(a)
#define __readw(a) marvel_readw(a)
#define __readl(a) marvel_readl(a)
#define __readq(a) marvel_readq(a)
#define __writeb(x,a) marvel_writeb(x,a)
#define __writew(x,a) marvel_writew(x,a)
#define __writel(x,a) marvel_writel(x,a)
#define __writeq(x,a) marvel_writeq(x,a)
#define __ioremap(a,s) marvel_ioremap(a,s)
#define __iounmap(a) marvel_iounmap(a)
#define __is_ioaddr(a) marvel_is_ioaddr((unsigned long)(a))
/* Disable direct inlining of these calls with the debug checks present. */
......
......@@ -211,11 +211,11 @@ struct el_MCPCIA_uncorrected_frame_mcheck {
* Unfortunately, we can't use BWIO with EV5, so for now, we always use SPARSE.
*/
#define vucp volatile unsigned char *
#define vusp volatile unsigned short *
#define vip volatile int *
#define vuip volatile unsigned int *
#define vulp volatile unsigned long *
#define vucp volatile unsigned char __force *
#define vusp volatile unsigned short __force *
#define vip volatile int __force *
#define vuip volatile unsigned int __force *
#define vulp volatile unsigned long __force *
__EXTERN_INLINE u8 mcpcia_inb(unsigned long in_addr)
{
......@@ -328,14 +328,14 @@ __EXTERN_INLINE void mcpcia_outl(u32 b, unsigned long in_addr)
*
*/
__EXTERN_INLINE unsigned long mcpcia_ioremap(unsigned long addr,
__EXTERN_INLINE void __iomem *mcpcia_ioremap(unsigned long addr,
unsigned long size
__attribute__((unused)))
{
return addr + MCPCIA_MEM_BIAS;
return (void __iomem *)(addr + MCPCIA_MEM_BIAS);
}
__EXTERN_INLINE void mcpcia_iounmap(unsigned long addr)
__EXTERN_INLINE void mcpcia_iounmap(volatile void __iomem *addr)
{
return;
}
......@@ -345,10 +345,10 @@ __EXTERN_INLINE int mcpcia_is_ioaddr(unsigned long addr)
return addr >= MCPCIA_SPARSE(0);
}
__EXTERN_INLINE u8 mcpcia_readb(unsigned long in_addr)
__EXTERN_INLINE u8 mcpcia_readb(const volatile void __iomem *xaddr)
{
unsigned long addr = in_addr & 0xffffffffUL;
unsigned long hose = in_addr & ~0xffffffffUL;
unsigned long addr = (unsigned long)xaddr & 0xffffffffUL;
unsigned long hose = (unsigned long)xaddr & ~0xffffffffUL;
unsigned long result, work;
#ifndef MCPCIA_ONE_HAE_WINDOW
......@@ -364,10 +364,10 @@ __EXTERN_INLINE u8 mcpcia_readb(unsigned long in_addr)
return __kernel_extbl(result, addr & 3);
}
__EXTERN_INLINE u16 mcpcia_readw(unsigned long in_addr)
__EXTERN_INLINE u16 mcpcia_readw(const volatile void __iomem *xaddr)
{
unsigned long addr = in_addr & 0xffffffffUL;
unsigned long hose = in_addr & ~0xffffffffUL;
unsigned long addr = (unsigned long)xaddr & 0xffffffffUL;
unsigned long hose = (unsigned long)xaddr & ~0xffffffffUL;
unsigned long result, work;
#ifndef MCPCIA_ONE_HAE_WINDOW
......@@ -383,10 +383,10 @@ __EXTERN_INLINE u16 mcpcia_readw(unsigned long in_addr)
return __kernel_extwl(result, addr & 3);
}
__EXTERN_INLINE void mcpcia_writeb(u8 b, unsigned long in_addr)
__EXTERN_INLINE void mcpcia_writeb(u8 b, volatile void __iomem *xaddr)
{
unsigned long addr = in_addr & 0xffffffffUL;
unsigned long hose = in_addr & ~0xffffffffUL;
unsigned long addr = (unsigned long)xaddr & 0xffffffffUL;
unsigned long hose = (unsigned long)xaddr & ~0xffffffffUL;
unsigned long w;
#ifndef MCPCIA_ONE_HAE_WINDOW
......@@ -396,15 +396,15 @@ __EXTERN_INLINE void mcpcia_writeb(u8 b, unsigned long in_addr)
#endif
addr = addr & MCPCIA_MEM_MASK;
w = __kernel_insbl(b, in_addr & 3);
w = __kernel_insbl(b, addr & 3);
hose = hose - MCPCIA_DENSE(4) + MCPCIA_SPARSE(4);
*(vuip) ((addr << 5) + hose + 0x00) = w;
}
__EXTERN_INLINE void mcpcia_writew(u16 b, unsigned long in_addr)
__EXTERN_INLINE void mcpcia_writew(u16 b, volatile void __iomem *xaddr)
{
unsigned long addr = in_addr & 0xffffffffUL;
unsigned long hose = in_addr & ~0xffffffffUL;
unsigned long addr = (unsigned long)xaddr & 0xffffffffUL;
unsigned long hose = (unsigned long)xaddr & ~0xffffffffUL;
unsigned long w;
#ifndef MCPCIA_ONE_HAE_WINDOW
......@@ -414,27 +414,27 @@ __EXTERN_INLINE void mcpcia_writew(u16 b, unsigned long in_addr)
#endif
addr = addr & MCPCIA_MEM_MASK;
w = __kernel_inswl(b, in_addr & 3);
w = __kernel_inswl(b, addr & 3);
hose = hose - MCPCIA_DENSE(4) + MCPCIA_SPARSE(4);
*(vuip) ((addr << 5) + hose + 0x08) = w;
}
__EXTERN_INLINE u32 mcpcia_readl(unsigned long addr)
__EXTERN_INLINE u32 mcpcia_readl(const volatile void __iomem *addr)
{
return (*(vuip)addr) & 0xffffffff;
return *(vuip)addr;
}
__EXTERN_INLINE u64 mcpcia_readq(unsigned long addr)
__EXTERN_INLINE u64 mcpcia_readq(const volatile void __iomem *addr)
{
return *(vulp)addr;
}
__EXTERN_INLINE void mcpcia_writel(u32 b, unsigned long addr)
__EXTERN_INLINE void mcpcia_writel(u32 b, volatile void __iomem *addr)
{
*(vuip)addr = b;
}
__EXTERN_INLINE void mcpcia_writeq(u64 b, unsigned long addr)
__EXTERN_INLINE void mcpcia_writeq(u64 b, volatile void __iomem *addr)
{
*(vulp)addr = b;
}
......@@ -450,25 +450,25 @@ __EXTERN_INLINE void mcpcia_writeq(u64 b, unsigned long addr)
#define __inb(p) mcpcia_inb((unsigned long)(p))
#define __inw(p) mcpcia_inw((unsigned long)(p))
#define __inl(p) mcpcia_inl((unsigned long)(p))
#define __outb(x,p) mcpcia_outb((x),(unsigned long)(p))
#define __outw(x,p) mcpcia_outw((x),(unsigned long)(p))
#define __outl(x,p) mcpcia_outl((x),(unsigned long)(p))
#define __readb(a) mcpcia_readb((unsigned long)(a))
#define __readw(a) mcpcia_readw((unsigned long)(a))
#define __readl(a) mcpcia_readl((unsigned long)(a))
#define __readq(a) mcpcia_readq((unsigned long)(a))
#define __writeb(x,a) mcpcia_writeb((x),(unsigned long)(a))
#define __writew(x,a) mcpcia_writew((x),(unsigned long)(a))
#define __writel(x,a) mcpcia_writel((x),(unsigned long)(a))
#define __writeq(x,a) mcpcia_writeq((x),(unsigned long)(a))
#define __ioremap(a,s) mcpcia_ioremap((unsigned long)(a),(s))
#define __iounmap(a) mcpcia_iounmap((unsigned long)(a))
#define __outb(x,p) mcpcia_outb(x,(unsigned long)(p))
#define __outw(x,p) mcpcia_outw(x,(unsigned long)(p))
#define __outl(x,p) mcpcia_outl(x,(unsigned long)(p))
#define __readb(a) mcpcia_readb(a)
#define __readw(a) mcpcia_readw(a)
#define __readl(a) mcpcia_readl(a)
#define __readq(a) mcpcia_readq(a)
#define __writeb(x,a) mcpcia_writeb(x,a)
#define __writew(x,a) mcpcia_writew(x,a)
#define __writel(x,a) mcpcia_writel(x,a)
#define __writeq(x,a) mcpcia_writeq(x,a)
#define __ioremap(a,s) mcpcia_ioremap(a,s)
#define __iounmap(a) mcpcia_iounmap(a)
#define __is_ioaddr(a) mcpcia_is_ioaddr((unsigned long)(a))
#define __raw_readl(a) __readl(a)
#define __raw_readq(a) __readq(a)
#define __raw_writel(v,a) __writel((v),(a))
#define __raw_writeq(v,a) __writeq((v),(a))
#define __raw_writel(v,a) __writel(v,a)
#define __raw_writeq(v,a) __writeq(v,a)
#endif /* __WANT_IO_DEF */
......
......@@ -63,10 +63,10 @@ struct el_POLARIS_sysdata_mcheck {
* However, we will support only the BWX form.
*/
#define vucp volatile unsigned char *
#define vusp volatile unsigned short *
#define vuip volatile unsigned int *
#define vulp volatile unsigned long *
#define vucp volatile unsigned char __force *
#define vusp volatile unsigned short __force *
#define vuip volatile unsigned int __force *
#define vulp volatile unsigned long __force *
__EXTERN_INLINE u8 polaris_inb(unsigned long addr)
{
......@@ -113,54 +113,54 @@ __EXTERN_INLINE void polaris_outl(u32 b, unsigned long addr)
* We will only support DENSE access via BWX insns.
*/
__EXTERN_INLINE u8 polaris_readb(unsigned long addr)
__EXTERN_INLINE u8 polaris_readb(const volatile void __iomem *addr)
{
return __kernel_ldbu(*(vucp)addr);
}
__EXTERN_INLINE u16 polaris_readw(unsigned long addr)
__EXTERN_INLINE u16 polaris_readw(const volatile void __iomem *addr)
{
return __kernel_ldwu(*(vusp)addr);
}
__EXTERN_INLINE u32 polaris_readl(unsigned long addr)
__EXTERN_INLINE u32 polaris_readl(const volatile void __iomem *addr)
{
return (*(vuip)addr) & 0xffffffff;
return *(vuip)addr;
}
__EXTERN_INLINE u64 polaris_readq(unsigned long addr)
__EXTERN_INLINE u64 polaris_readq(const volatile void __iomem *addr)
{
return *(vulp)addr;
}
__EXTERN_INLINE void polaris_writeb(u8 b, unsigned long addr)
__EXTERN_INLINE void polaris_writeb(u8 b, volatile void __iomem *addr)
{
__kernel_stb(b, *(vucp)addr);
}
__EXTERN_INLINE void polaris_writew(u16 b, unsigned long addr)
__EXTERN_INLINE void polaris_writew(u16 b, volatile void __iomem *addr)
{
__kernel_stw(b, *(vusp)addr);
}
__EXTERN_INLINE void polaris_writel(u32 b, unsigned long addr)
__EXTERN_INLINE void polaris_writel(u32 b, volatile void __iomem *addr)
{
*(vuip)addr = b;
}
__EXTERN_INLINE void polaris_writeq(u64 b, unsigned long addr)
__EXTERN_INLINE void polaris_writeq(u64 b, volatile void __iomem *addr)
{
*(vulp)addr = b;
}
__EXTERN_INLINE unsigned long polaris_ioremap(unsigned long addr,
__EXTERN_INLINE void __iomem *polaris_ioremap(unsigned long addr,
unsigned long size
__attribute__((unused)))
{
return addr + POLARIS_DENSE_MEM_BASE;
return (void __iomem *)(addr + POLARIS_DENSE_MEM_BASE);
}
__EXTERN_INLINE void polaris_iounmap(unsigned long addr)
__EXTERN_INLINE void polaris_iounmap(volatile void __iomem *addr)
{
return;
}
......@@ -180,35 +180,35 @@ __EXTERN_INLINE int polaris_is_ioaddr(unsigned long addr)
#define __inb(p) polaris_inb((unsigned long)(p))
#define __inw(p) polaris_inw((unsigned long)(p))
#define __inl(p) polaris_inl((unsigned long)(p))
#define __outb(x,p) polaris_outb((x),(unsigned long)(p))
#define __outw(x,p) polaris_outw((x),(unsigned long)(p))
#define __outl(x,p) polaris_outl((x),(unsigned long)(p))
#define __readb(a) polaris_readb((unsigned long)(a))
#define __readw(a) polaris_readw((unsigned long)(a))
#define __readl(a) polaris_readl((unsigned long)(a))
#define __readq(a) polaris_readq((unsigned long)(a))
#define __writeb(x,a) polaris_writeb((x),(unsigned long)(a))
#define __writew(x,a) polaris_writew((x),(unsigned long)(a))
#define __writel(x,a) polaris_writel((x),(unsigned long)(a))
#define __writeq(x,a) polaris_writeq((x),(unsigned long)(a))
#define __ioremap(a,s) polaris_ioremap((unsigned long)(a),(s))
#define __iounmap(a) polaris_iounmap((unsigned long)(a))
#define __outb(x,p) polaris_outb(x,(unsigned long)(p))
#define __outw(x,p) polaris_outw(x,(unsigned long)(p))
#define __outl(x,p) polaris_outl(x,(unsigned long)(p))
#define __readb(a) polaris_readb(a)
#define __readw(a) polaris_readw(a)
#define __readl(a) polaris_readl(a)
#define __readq(a) polaris_readq(a)
#define __writeb(x,a) polaris_writeb(x,a)
#define __writew(x,a) polaris_writew(x,a)
#define __writel(x,a) polaris_writel(x,a)
#define __writeq(x,a) polaris_writeq(x,a)
#define __ioremap(a,s) polaris_ioremap(a,s)
#define __iounmap(a) polaris_iounmap(a)
#define __is_ioaddr(a) polaris_is_ioaddr((unsigned long)(a))
#define inb(p) __inb(p)
#define inw(p) __inw(p)
#define inl(p) __inl(p)
#define outb(x,p) __outb((x),(p))
#define outw(x,p) __outw((x),(p))
#define outl(x,p) __outl((x),(p))
#define outb(x,p) __outb(x,p)
#define outw(x,p) __outw(x,p)
#define outl(x,p) __outl(x,p)
#define __raw_readb(a) __readb(a)
#define __raw_readw(a) __readw(a)
#define __raw_readl(a) __readl(a)
#define __raw_readq(a) __readq(a)
#define __raw_writeb(v,a) __writeb((v),(a))
#define __raw_writew(v,a) __writew((v),(a))
#define __raw_writel(v,a) __writel((v),(a))
#define __raw_writeq(v,a) __writeq((v),(a))
#define __raw_writeb(v,a) __writeb(v,a)
#define __raw_writew(v,a) __writew(v,a)
#define __raw_writel(v,a) __writel(v,a)
#define __raw_writeq(v,a) __writeq(v,a)
#endif /* __WANT_IO_DEF */
......
......@@ -199,8 +199,8 @@ struct el_t2_procdata_mcheck {
struct el_t2_logout_header {
unsigned int elfl_size; /* size in bytes of logout area. */
int elfl_sbz1:31; /* Should be zero. */
char elfl_retry:1; /* Retry flag. */
unsigned int elfl_sbz1:31; /* Should be zero. */
unsigned int elfl_retry:1; /* Retry flag. */
unsigned int elfl_procoffset; /* Processor-specific offset. */
unsigned int elfl_sysoffset; /* Offset of system-specific. */
unsigned int elfl_error_type; /* PAL error type code. */
......@@ -438,8 +438,9 @@ __EXTERN_INLINE void t2_outl(u32 b, unsigned long addr)
static spinlock_t t2_hae_lock = SPIN_LOCK_UNLOCKED;
__EXTERN_INLINE u8 t2_readb(unsigned long addr)
__EXTERN_INLINE u8 t2_readb(const volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long result, msb;
unsigned long flags;
spin_lock_irqsave(&t2_hae_lock, flags);
......@@ -451,8 +452,9 @@ __EXTERN_INLINE u8 t2_readb(unsigned long addr)
return __kernel_extbl(result, addr & 3);
}
__EXTERN_INLINE u16 t2_readw(unsigned long addr)
__EXTERN_INLINE u16 t2_readw(const volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long result, msb;
unsigned long flags;
spin_lock_irqsave(&t2_hae_lock, flags);
......@@ -468,8 +470,9 @@ __EXTERN_INLINE u16 t2_readw(unsigned long addr)
* On SABLE with T2, we must use SPARSE memory even for 32-bit access,
* because we cannot access all of DENSE without changing its HAE.
*/
__EXTERN_INLINE u32 t2_readl(unsigned long addr)
__EXTERN_INLINE u32 t2_readl(const volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long result, msb;
unsigned long flags;
spin_lock_irqsave(&t2_hae_lock, flags);
......@@ -481,8 +484,9 @@ __EXTERN_INLINE u32 t2_readl(unsigned long addr)
return result & 0xffffffffUL;
}
__EXTERN_INLINE u64 t2_readq(unsigned long addr)
__EXTERN_INLINE u64 t2_readq(const volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long r0, r1, work, msb;
unsigned long flags;
spin_lock_irqsave(&t2_hae_lock, flags);
......@@ -496,8 +500,9 @@ __EXTERN_INLINE u64 t2_readq(unsigned long addr)
return r1 << 32 | r0;
}
__EXTERN_INLINE void t2_writeb(u8 b, unsigned long addr)
__EXTERN_INLINE void t2_writeb(u8 b, volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long msb, w;
unsigned long flags;
spin_lock_irqsave(&t2_hae_lock, flags);
......@@ -509,8 +514,9 @@ __EXTERN_INLINE void t2_writeb(u8 b, unsigned long addr)
spin_unlock_irqrestore(&t2_hae_lock, flags);
}
__EXTERN_INLINE void t2_writew(u16 b, unsigned long addr)
__EXTERN_INLINE void t2_writew(u16 b, volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long msb, w;
unsigned long flags;
spin_lock_irqsave(&t2_hae_lock, flags);
......@@ -526,8 +532,9 @@ __EXTERN_INLINE void t2_writew(u16 b, unsigned long addr)
* On SABLE with T2, we must use SPARSE memory even for 32-bit access,
* because we cannot access all of DENSE without changing its HAE.
*/
__EXTERN_INLINE void t2_writel(u32 b, unsigned long addr)
__EXTERN_INLINE void t2_writel(u32 b, volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long msb;
unsigned long flags;
spin_lock_irqsave(&t2_hae_lock, flags);
......@@ -538,8 +545,9 @@ __EXTERN_INLINE void t2_writel(u32 b, unsigned long addr)
spin_unlock_irqrestore(&t2_hae_lock, flags);
}
__EXTERN_INLINE void t2_writeq(u64 b, unsigned long addr)
__EXTERN_INLINE void t2_writeq(u64 b, volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long msb, work;
unsigned long flags;
spin_lock_irqsave(&t2_hae_lock, flags);
......@@ -552,14 +560,14 @@ __EXTERN_INLINE void t2_writeq(u64 b, unsigned long addr)
spin_unlock_irqrestore(&t2_hae_lock, flags);
}
__EXTERN_INLINE unsigned long t2_ioremap(unsigned long addr,
__EXTERN_INLINE void __iomem *t2_ioremap(unsigned long addr,
unsigned long size
__attribute__((unused)))
{
return addr;
return (void __iomem *)addr;
}
__EXTERN_INLINE void t2_iounmap(unsigned long addr)
__EXTERN_INLINE void t2_iounmap(volatile void __iomem *addr)
{
return;
}
......@@ -577,19 +585,19 @@ __EXTERN_INLINE int t2_is_ioaddr(unsigned long addr)
#define __inb(p) t2_inb((unsigned long)(p))
#define __inw(p) t2_inw((unsigned long)(p))
#define __inl(p) t2_inl((unsigned long)(p))
#define __outb(x,p) t2_outb((x),(unsigned long)(p))
#define __outw(x,p) t2_outw((x),(unsigned long)(p))
#define __outl(x,p) t2_outl((x),(unsigned long)(p))
#define __readb(a) t2_readb((unsigned long)(a))
#define __readw(a) t2_readw((unsigned long)(a))
#define __readl(a) t2_readl((unsigned long)(a))
#define __readq(a) t2_readq((unsigned long)(a))
#define __writeb(x,a) t2_writeb((x),(unsigned long)(a))
#define __writew(x,a) t2_writew((x),(unsigned long)(a))
#define __writel(x,a) t2_writel((x),(unsigned long)(a))
#define __writeq(x,a) t2_writeq((x),(unsigned long)(a))
#define __ioremap(a,s) t2_ioremap((unsigned long)(a),(s))
#define __iounmap(a) t2_iounmap((unsigned long)(a))
#define __outb(x,p) t2_outb(x,(unsigned long)(p))
#define __outw(x,p) t2_outw(x,(unsigned long)(p))
#define __outl(x,p) t2_outl(x,(unsigned long)(p))
#define __readb(a) t2_readb(a)
#define __readw(a) t2_readw(a)
#define __readl(a) t2_readl(a)
#define __readq(a) t2_readq(a)
#define __writeb(x,a) t2_writeb(x,a)
#define __writew(x,a) t2_writew(x,a)
#define __writel(x,a) t2_writel(x,a)
#define __writeq(x,a) t2_writeq(x,a)
#define __ioremap(a,s) t2_ioremap(a,s)
#define __iounmap(a) t2_iounmap(a)
#define __is_ioaddr(a) t2_is_ioaddr((unsigned long)(a))
#endif /* __WANT_IO_DEF */
......
......@@ -430,50 +430,50 @@ __EXTERN_INLINE void titan_outl(u32 b, unsigned long addr)
* Memory functions. all accesses are done through linear space.
*/
extern unsigned long titan_ioremap(unsigned long addr, unsigned long size);
extern void titan_iounmap(unsigned long addr);
extern void __iomem *titan_ioremap(unsigned long addr, unsigned long size);
extern void titan_iounmap(volatile void __iomem *addr);
__EXTERN_INLINE int titan_is_ioaddr(unsigned long addr)
{
return addr >= TITAN_BASE;
}
__EXTERN_INLINE u8 titan_readb(unsigned long addr)
__EXTERN_INLINE u8 titan_readb(const volatile void __iomem *addr)
{
return __kernel_ldbu(*(vucp)addr);
}
__EXTERN_INLINE u16 titan_readw(unsigned long addr)
__EXTERN_INLINE u16 titan_readw(const volatile void __iomem *addr)
{
return __kernel_ldwu(*(vusp)addr);
}
__EXTERN_INLINE u32 titan_readl(unsigned long addr)
__EXTERN_INLINE u32 titan_readl(const volatile void __iomem *addr)
{
return (*(vuip)addr) & 0xffffffff;
}
__EXTERN_INLINE u64 titan_readq(unsigned long addr)
__EXTERN_INLINE u64 titan_readq(const volatile void __iomem *addr)
{
return *(vulp)addr;
}
__EXTERN_INLINE void titan_writeb(u8 b, unsigned long addr)
__EXTERN_INLINE void titan_writeb(u8 b, volatile void __iomem *addr)
{
__kernel_stb(b, *(vucp)addr);
}
__EXTERN_INLINE void titan_writew(u16 b, unsigned long addr)
__EXTERN_INLINE void titan_writew(u16 b, volatile void __iomem *addr)
{
__kernel_stw(b, *(vusp)addr);
}
__EXTERN_INLINE void titan_writel(u32 b, unsigned long addr)
__EXTERN_INLINE void titan_writel(u32 b, volatile void __iomem *addr)
{
*(vuip)addr = b;
}
__EXTERN_INLINE void titan_writeq(u64 b, unsigned long addr)
__EXTERN_INLINE void titan_writeq(u64 b, volatile void __iomem *addr)
{
*(vulp)addr = b;
}
......@@ -488,36 +488,36 @@ __EXTERN_INLINE void titan_writeq(u64 b, unsigned long addr)
#define __inb(p) titan_inb((unsigned long)(p))
#define __inw(p) titan_inw((unsigned long)(p))
#define __inl(p) titan_inl((unsigned long)(p))
#define __outb(x,p) titan_outb((x),(unsigned long)(p))
#define __outw(x,p) titan_outw((x),(unsigned long)(p))
#define __outl(x,p) titan_outl((x),(unsigned long)(p))
#define __readb(a) titan_readb((unsigned long)(a))
#define __readw(a) titan_readw((unsigned long)(a))
#define __readl(a) titan_readl((unsigned long)(a))
#define __readq(a) titan_readq((unsigned long)(a))
#define __writeb(x,a) titan_writeb((x),(unsigned long)(a))
#define __writew(x,a) titan_writew((x),(unsigned long)(a))
#define __writel(x,a) titan_writel((x),(unsigned long)(a))
#define __writeq(x,a) titan_writeq((x),(unsigned long)(a))
#define __ioremap(a,s) titan_ioremap((unsigned long)(a),(s))
#define __iounmap(a) titan_iounmap((unsigned long)(a))
#define __outb(x,p) titan_outb(x,(unsigned long)(p))
#define __outw(x,p) titan_outw(x,(unsigned long)(p))
#define __outl(x,p) titan_outl(x,(unsigned long)(p))
#define __readb(a) titan_readb(a)
#define __readw(a) titan_readw(a)
#define __readl(a) titan_readl(a)
#define __readq(a) titan_readq(a)
#define __writeb(x,a) titan_writeb(x,a)
#define __writew(x,a) titan_writew(x,a)
#define __writel(x,a) titan_writel(x,a)
#define __writeq(x,a) titan_writeq(x,a)
#define __ioremap(a,s) titan_ioremap(a,s)
#define __iounmap(a) titan_iounmap(a)
#define __is_ioaddr(a) titan_is_ioaddr((unsigned long)(a))
#define inb(port) __inb((port))
#define inw(port) __inw((port))
#define inl(port) __inl((port))
#define outb(v, port) __outb((v),(port))
#define outw(v, port) __outw((v),(port))
#define outl(v, port) __outl((v),(port))
#define __raw_readb(a) __readb((unsigned long)(a))
#define __raw_readw(a) __readw((unsigned long)(a))
#define __raw_readl(a) __readl((unsigned long)(a))
#define __raw_readq(a) __readq((unsigned long)(a))
#define __raw_writeb(v,a) __writeb((v),(unsigned long)(a))
#define __raw_writew(v,a) __writew((v),(unsigned long)(a))
#define __raw_writel(v,a) __writel((v),(unsigned long)(a))
#define __raw_writeq(v,a) __writeq((v),(unsigned long)(a))
#define inb(p) __inb(p)
#define inw(p) __inw(p)
#define inl(p) __inl(p)
#define outb(v,p) __outb(v,p)
#define outw(v,p) __outw(v,p)
#define outl(v,p) __outl(v,p)
#define __raw_readb(a) __readb(a)
#define __raw_readw(a) __readw(a)
#define __raw_readl(a) __readl(a)
#define __raw_readq(a) __readq(a)
#define __raw_writeb(v,a) __writeb(v,a)
#define __raw_writew(v,a) __writew(v,a)
#define __raw_writel(v,a) __writel(v,a)
#define __raw_writeq(v,a) __writeq(v,a)
#endif /* __WANT_IO_DEF */
......
......@@ -299,10 +299,10 @@ struct el_TSUNAMI_sysdata_mcheck {
* can only use linear accesses to get at PCI memory and I/O spaces.
*/
#define vucp volatile unsigned char *
#define vusp volatile unsigned short *
#define vuip volatile unsigned int *
#define vulp volatile unsigned long *
#define vucp volatile unsigned char __force *
#define vusp volatile unsigned short __force *
#define vuip volatile unsigned int __force *
#define vulp volatile unsigned long __force *
__EXTERN_INLINE u8 tsunami_inb(unsigned long addr)
{
......@@ -352,14 +352,14 @@ __EXTERN_INLINE void tsunami_outl(u32 b, unsigned long addr)
* Memory functions. all accesses are done through linear space.
*/
__EXTERN_INLINE unsigned long tsunami_ioremap(unsigned long addr,
__EXTERN_INLINE void __iomem *tsunami_ioremap(unsigned long addr,
unsigned long size
__attribute__((unused)))
{
return addr + TSUNAMI_MEM_BIAS;
return (void __iomem *)(addr + TSUNAMI_MEM_BIAS);
}
__EXTERN_INLINE void tsunami_iounmap(unsigned long addr)
__EXTERN_INLINE void tsunami_iounmap(volatile void __iomem *addr)
{
return;
}
......@@ -369,42 +369,42 @@ __EXTERN_INLINE int tsunami_is_ioaddr(unsigned long addr)
return addr >= TSUNAMI_BASE;
}
__EXTERN_INLINE u8 tsunami_readb(unsigned long addr)
__EXTERN_INLINE u8 tsunami_readb(const volatile void __iomem *addr)
{
return __kernel_ldbu(*(vucp)addr);
}
__EXTERN_INLINE u16 tsunami_readw(unsigned long addr)
__EXTERN_INLINE u16 tsunami_readw(const volatile void __iomem *addr)
{
return __kernel_ldwu(*(vusp)addr);
}
__EXTERN_INLINE u32 tsunami_readl(unsigned long addr)
__EXTERN_INLINE u32 tsunami_readl(const volatile void __iomem *addr)
{
return *(vuip)addr;
}
__EXTERN_INLINE u64 tsunami_readq(unsigned long addr)
__EXTERN_INLINE u64 tsunami_readq(const volatile void __iomem *addr)
{
return *(vulp)addr;
}
__EXTERN_INLINE void tsunami_writeb(u8 b, unsigned long addr)
__EXTERN_INLINE void tsunami_writeb(u8 b, volatile void __iomem *addr)
{
__kernel_stb(b, *(vucp)addr);
}
__EXTERN_INLINE void tsunami_writew(u16 b, unsigned long addr)
__EXTERN_INLINE void tsunami_writew(u16 b, volatile void __iomem *addr)
{
__kernel_stw(b, *(vusp)addr);
}
__EXTERN_INLINE void tsunami_writel(u32 b, unsigned long addr)
__EXTERN_INLINE void tsunami_writel(u32 b, volatile void __iomem *addr)
{
*(vuip)addr = b;
}
__EXTERN_INLINE void tsunami_writeq(u64 b, unsigned long addr)
__EXTERN_INLINE void tsunami_writeq(u64 b, volatile void __iomem *addr)
{
*(vulp)addr = b;
}
......@@ -419,35 +419,35 @@ __EXTERN_INLINE void tsunami_writeq(u64 b, unsigned long addr)
#define __inb(p) tsunami_inb((unsigned long)(p))
#define __inw(p) tsunami_inw((unsigned long)(p))
#define __inl(p) tsunami_inl((unsigned long)(p))
#define __outb(x,p) tsunami_outb((x),(unsigned long)(p))
#define __outw(x,p) tsunami_outw((x),(unsigned long)(p))
#define __outl(x,p) tsunami_outl((x),(unsigned long)(p))
#define __readb(a) tsunami_readb((unsigned long)(a))
#define __readw(a) tsunami_readw((unsigned long)(a))
#define __readl(a) tsunami_readl((unsigned long)(a))
#define __readq(a) tsunami_readq((unsigned long)(a))
#define __writeb(x,a) tsunami_writeb((x),(unsigned long)(a))
#define __writew(x,a) tsunami_writew((x),(unsigned long)(a))
#define __writel(x,a) tsunami_writel((x),(unsigned long)(a))
#define __writeq(x,a) tsunami_writeq((x),(unsigned long)(a))
#define __ioremap(a,s) tsunami_ioremap((unsigned long)(a),(s))
#define __iounmap(a) tsunami_iounmap((unsigned long)(a))
#define __outb(x,p) tsunami_outb(x,(unsigned long)(p))
#define __outw(x,p) tsunami_outw(x,(unsigned long)(p))
#define __outl(x,p) tsunami_outl(x,(unsigned long)(p))
#define __readb(a) tsunami_readb(a)
#define __readw(a) tsunami_readw(a)
#define __readl(a) tsunami_readl(a)
#define __readq(a) tsunami_readq(a)
#define __writeb(x,a) tsunami_writeb(x,a)
#define __writew(x,a) tsunami_writew(x,a)
#define __writel(x,a) tsunami_writel(x,a)
#define __writeq(x,a) tsunami_writeq(x,a)
#define __ioremap(a,s) tsunami_ioremap(a,s)
#define __iounmap(a) tsunami_iounmap(a)
#define __is_ioaddr(a) tsunami_is_ioaddr((unsigned long)(a))
#define inb(p) __inb(p)
#define inw(p) __inw(p)
#define inl(p) __inl(p)
#define outb(x,p) __outb((x),(p))
#define outw(x,p) __outw((x),(p))
#define outl(x,p) __outl((x),(p))
#define outb(x,p) __outb(x,p)
#define outw(x,p) __outw(x,p)
#define outl(x,p) __outl(x,p)
#define __raw_readb(a) __readb(a)
#define __raw_readw(a) __readw(a)
#define __raw_readl(a) __readl(a)
#define __raw_readq(a) __readq(a)
#define __raw_writeb(v,a) __writeb((v),(a))
#define __raw_writew(v,a) __writew((v),(a))
#define __raw_writel(v,a) __writel((v),(a))
#define __raw_writeq(v,a) __writeq((v),(a))
#define __raw_writeb(v,a) __writeb(v,a)
#define __raw_writew(v,a) __writew(v,a)
#define __raw_writel(v,a) __writel(v,a)
#define __raw_writeq(v,a) __writeq(v,a)
#endif /* __WANT_IO_DEF */
......
......@@ -326,14 +326,14 @@ __EXTERN_INLINE void wildfire_outl(u32 b, unsigned long addr)
* Memory functions. all accesses are done through linear space.
*/
__EXTERN_INLINE unsigned long wildfire_ioremap(unsigned long addr,
__EXTERN_INLINE void __iomem *wildfire_ioremap(unsigned long addr,
unsigned long size
__attribute__((unused)))
{
return addr + WILDFIRE_MEM_BIAS;
return (void __iomem *)(addr + WILDFIRE_MEM_BIAS);
}
__EXTERN_INLINE void wildfire_iounmap(unsigned long addr)
__EXTERN_INLINE void wildfire_iounmap(volatile void __iomem *addr)
{
return;
}
......@@ -343,42 +343,42 @@ __EXTERN_INLINE int wildfire_is_ioaddr(unsigned long addr)
return addr >= WILDFIRE_BASE;
}
__EXTERN_INLINE u8 wildfire_readb(unsigned long addr)
__EXTERN_INLINE u8 wildfire_readb(const volatile void __iomem *addr)
{
return __kernel_ldbu(*(vucp)addr);
}
__EXTERN_INLINE u16 wildfire_readw(unsigned long addr)
__EXTERN_INLINE u16 wildfire_readw(const volatile void __iomem *addr)
{
return __kernel_ldwu(*(vusp)addr);
}
__EXTERN_INLINE u32 wildfire_readl(unsigned long addr)
__EXTERN_INLINE u32 wildfire_readl(const volatile void __iomem *addr)
{
return (*(vuip)addr) & 0xffffffff;
return *(vuip)addr;
}
__EXTERN_INLINE u64 wildfire_readq(unsigned long addr)
__EXTERN_INLINE u64 wildfire_readq(const volatile void __iomem *addr)
{
return *(vulp)addr;
}
__EXTERN_INLINE void wildfire_writeb(u8 b, unsigned long addr)
__EXTERN_INLINE void wildfire_writeb(u8 b, volatile void __iomem *addr)
{
__kernel_stb(b, *(vucp)addr);
}
__EXTERN_INLINE void wildfire_writew(u16 b, unsigned long addr)
__EXTERN_INLINE void wildfire_writew(u16 b, volatile void __iomem *addr)
{
__kernel_stw(b, *(vusp)addr);
}
__EXTERN_INLINE void wildfire_writel(u32 b, unsigned long addr)
__EXTERN_INLINE void wildfire_writel(u32 b, volatile void __iomem *addr)
{
*(vuip)addr = b;
}
__EXTERN_INLINE void wildfire_writeq(u64 b, unsigned long addr)
__EXTERN_INLINE void wildfire_writeq(u64 b, volatile void __iomem *addr)
{
*(vulp)addr = b;
}
......@@ -393,35 +393,35 @@ __EXTERN_INLINE void wildfire_writeq(u64 b, unsigned long addr)
#define __inb(p) wildfire_inb((unsigned long)(p))
#define __inw(p) wildfire_inw((unsigned long)(p))
#define __inl(p) wildfire_inl((unsigned long)(p))
#define __outb(x,p) wildfire_outb((x),(unsigned long)(p))
#define __outw(x,p) wildfire_outw((x),(unsigned long)(p))
#define __outl(x,p) wildfire_outl((x),(unsigned long)(p))
#define __readb(a) wildfire_readb((unsigned long)(a))
#define __readw(a) wildfire_readw((unsigned long)(a))
#define __readl(a) wildfire_readl((unsigned long)(a))
#define __readq(a) wildfire_readq((unsigned long)(a))
#define __writeb(x,a) wildfire_writeb((x),(unsigned long)(a))
#define __writew(x,a) wildfire_writew((x),(unsigned long)(a))
#define __writel(x,a) wildfire_writel((x),(unsigned long)(a))
#define __writeq(x,a) wildfire_writeq((x),(unsigned long)(a))
#define __ioremap(a,s) wildfire_ioremap((unsigned long)(a),(s))
#define __iounmap(a) wildfire_iounmap((unsigned long)(a))
#define __outb(x,p) wildfire_outb(x,(unsigned long)(p))
#define __outw(x,p) wildfire_outw(x,(unsigned long)(p))
#define __outl(x,p) wildfire_outl(x,(unsigned long)(p))
#define __readb(a) wildfire_readb(a)
#define __readw(a) wildfire_readw(a)
#define __readl(a) wildfire_readl(a)
#define __readq(a) wildfire_readq(a)
#define __writeb(x,a) wildfire_writeb(x,a)
#define __writew(x,a) wildfire_writew(x,a)
#define __writel(x,a) wildfire_writel(x,a)
#define __writeq(x,a) wildfire_writeq(x,a)
#define __ioremap(a,s) wildfire_ioremap(a,s)
#define __iounmap(a) wildfire_iounmap(a)
#define __is_ioaddr(a) wildfire_is_ioaddr((unsigned long)(a))
#define inb(p) __inb(p)
#define inw(p) __inw(p)
#define inl(p) __inl(p)
#define outb(x,p) __outb((x),(p))
#define outw(x,p) __outw((x),(p))
#define outl(x,p) __outl((x),(p))
#define outb(x,p) __outb(x,p)
#define outw(x,p) __outw(x,p)
#define outl(x,p) __outl(x,p)
#define __raw_readb(a) __readb(a)
#define __raw_readw(a) __readw(a)
#define __raw_readl(a) __readl(a)
#define __raw_readq(a) __readq(a)
#define __raw_writeb(v,a) __writeb((v),(a))
#define __raw_writew(v,a) __writew((v),(a))
#define __raw_writel(v,a) __writel((v),(a))
#define __raw_writeq(v,a) __writeq((v),(a))
#define __raw_writeb(v,a) __writeb(v,a)
#define __raw_writew(v,a) __writew(v,a)
#define __raw_writel(v,a) __writel(v,a)
#define __raw_writeq(v,a) __writeq(v,a)
#endif /* __WANT_IO_DEF */
......
This diff is collapsed.
......@@ -200,8 +200,9 @@ __EXTERN_INLINE void jensen_outl(u32 b, unsigned long addr)
* Memory functions.
*/
__EXTERN_INLINE u8 jensen_readb(unsigned long addr)
__EXTERN_INLINE u8 jensen_readb(const volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
long result;
jensen_set_hae(addr);
......@@ -211,8 +212,9 @@ __EXTERN_INLINE u8 jensen_readb(unsigned long addr)
return 0xffUL & result;
}
__EXTERN_INLINE u16 jensen_readw(unsigned long addr)
__EXTERN_INLINE u16 jensen_readw(const volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
long result;
jensen_set_hae(addr);
......@@ -222,15 +224,17 @@ __EXTERN_INLINE u16 jensen_readw(unsigned long addr)
return 0xffffUL & result;
}
__EXTERN_INLINE u32 jensen_readl(unsigned long addr)
__EXTERN_INLINE u32 jensen_readl(const volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
jensen_set_hae(addr);
addr &= JENSEN_HAE_MASK;
return *(vuip) ((addr << 7) + EISA_MEM + 0x60);
}
__EXTERN_INLINE u64 jensen_readq(unsigned long addr)
__EXTERN_INLINE u64 jensen_readq(const volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
unsigned long r0, r1;
jensen_set_hae(addr);
......@@ -241,29 +245,33 @@ __EXTERN_INLINE u64 jensen_readq(unsigned long addr)
return r1 << 32 | r0;
}
__EXTERN_INLINE void jensen_writeb(u8 b, unsigned long addr)
__EXTERN_INLINE void jensen_writeb(u8 b, volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
jensen_set_hae(addr);
addr &= JENSEN_HAE_MASK;
*(vuip) ((addr << 7) + EISA_MEM + 0x00) = b * 0x01010101;
}
__EXTERN_INLINE void jensen_writew(u16 b, unsigned long addr)
__EXTERN_INLINE void jensen_writew(u16 b, volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
jensen_set_hae(addr);
addr &= JENSEN_HAE_MASK;
*(vuip) ((addr << 7) + EISA_MEM + 0x20) = b * 0x00010001;
}
__EXTERN_INLINE void jensen_writel(u32 b, unsigned long addr)
__EXTERN_INLINE void jensen_writel(u32 b, volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
jensen_set_hae(addr);
addr &= JENSEN_HAE_MASK;
*(vuip) ((addr << 7) + EISA_MEM + 0x60) = b;
}
__EXTERN_INLINE void jensen_writeq(u64 b, unsigned long addr)
__EXTERN_INLINE void jensen_writeq(u64 b, volatile void __iomem *xaddr)
{
unsigned long addr = (unsigned long) xaddr;
jensen_set_hae(addr);
addr &= JENSEN_HAE_MASK;
addr = (addr << 7) + EISA_MEM + 0x60;
......@@ -271,13 +279,13 @@ __EXTERN_INLINE void jensen_writeq(u64 b, unsigned long addr)
*(vuip) (addr + (4 << 7)) = b >> 32;
}
__EXTERN_INLINE unsigned long jensen_ioremap(unsigned long addr,
__EXTERN_INLINE void __iomem *jensen_ioremap(unsigned long addr,
unsigned long size)
{
return addr;
return (void __iomem *)addr;
}
__EXTERN_INLINE void jensen_iounmap(unsigned long addr)
__EXTERN_INLINE void jensen_iounmap(volatile void __iomem *addr)
{
return;
}
......@@ -306,18 +314,18 @@ __EXTERN_INLINE int jensen_is_ioaddr(unsigned long addr)
#define __writel jensen_writel
#define __writeq jensen_writeq
#define __ioremap jensen_ioremap
#define __iounmap(a) jensen_iounmap((unsigned long)a)
#define __is_ioaddr jensen_is_ioaddr
#define __iounmap jensen_iounmap
#define __is_ioaddr(a) jensen_is_ioaddr((unsigned long)(a))
/*
* The above have so much overhead that it probably doesn't make
* sense to have them inlined (better icache behaviour).
*/
#define inb(port) \
(__builtin_constant_p((port))?__inb(port):_inb(port))
(__builtin_constant_p(port)?__inb(port):_inb(port))
#define outb(x, port) \
(__builtin_constant_p((port))?__outb((x),(port)):_outb((x),(port)))
(__builtin_constant_p(port)?__outb(x,port):_outb(x,port))
#endif /* __WANT_IO_DEF */
......
......@@ -53,18 +53,18 @@ struct alpha_machine_vector
void (*mv_outw)(u16, unsigned long);
void (*mv_outl)(u32, unsigned long);
u8 (*mv_readb)(unsigned long);
u16 (*mv_readw)(unsigned long);
u32 (*mv_readl)(unsigned long);
u64 (*mv_readq)(unsigned long);
void (*mv_writeb)(u8, unsigned long);
void (*mv_writew)(u16, unsigned long);
void (*mv_writel)(u32, unsigned long);
void (*mv_writeq)(u64, unsigned long);
unsigned long (*mv_ioremap)(unsigned long, unsigned long);
void (*mv_iounmap)(unsigned long);
u8 (*mv_readb)(const volatile void __iomem *);
u16 (*mv_readw)(const volatile void __iomem *);
u32 (*mv_readl)(const volatile void __iomem *);
u64 (*mv_readq)(const volatile void __iomem *);
void (*mv_writeb)(u8, volatile void __iomem *);
void (*mv_writew)(u16, volatile void __iomem *);
void (*mv_writel)(u32, volatile void __iomem *);
void (*mv_writeq)(u64, volatile void __iomem *);
void __iomem *(*mv_ioremap)(unsigned long, unsigned long);
void (*mv_iounmap)(volatile void __iomem *);
int (*mv_is_ioaddr)(unsigned long);
void (*mv_switch_mm)(struct mm_struct *, struct mm_struct *,
......
......@@ -95,7 +95,7 @@ static inline int _raw_spin_trylock(spinlock_t *lock)
/***********************************************************/
typedef struct {
volatile int write_lock:1, read_counter:31;
volatile unsigned int write_lock:1, read_counter:31;
} /*__attribute__((aligned(32)))*/ rwlock_t;
#define RW_LOCK_UNLOCKED (rwlock_t) { 0, 0 }
......
......@@ -15,24 +15,24 @@
extern inline void scr_writew(u16 val, volatile u16 *addr)
{
if (__is_ioaddr((unsigned long) addr))
__raw_writew(val, (unsigned long) addr);
if (__is_ioaddr(addr))
__raw_writew(val, (volatile u16 __iomem *) addr);
else
*addr = val;
}
extern inline u16 scr_readw(volatile const u16 *addr)
{
if (__is_ioaddr((unsigned long) addr))
return __raw_readw((unsigned long) addr);
if (__is_ioaddr(addr))
return __raw_readw((volatile const u16 __iomem *) addr);
else
return *addr;
}
extern inline void scr_memsetw(u16 *s, u16 c, unsigned int count)
{
if (__is_ioaddr((unsigned long) s))
memsetw_io(s, c, count);
if (__is_ioaddr(s))
memsetw_io((u16 __iomem *) s, c, count);
else
memsetw(s, c, count);
}
......@@ -43,9 +43,9 @@ extern void scr_memcpyw(u16 *d, const u16 *s, unsigned int count);
/* ??? These are currently only used for downloading character sets. As
such, they don't need memory barriers. Is this all they are intended
to be used for? */
#define vga_readb readb
#define vga_writeb writeb
#define vga_readb(a) readb((u8 __iomem *)(a))
#define vga_writeb(v,a) writeb(v, (u8 __iomem *)(a))
#define VGA_MAP_MEM(x) ((unsigned long) ioremap((x), 0))
#define VGA_MAP_MEM(x) ((unsigned long) ioremap(x, 0))
#endif
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment