Commit 370b0b5f authored by Heiko Carstens's avatar Heiko Carstens Committed by Martin Schwidefsky

s390/bitops: remove CONFIG_SMP / simplify non-atomic bitops

Remove CONFIG_SMP from bitops code. This reduces the C code significantly
but also generates better code for the SMP case.

This means that for !CONFIG_SMP set_bit() and friends now also have
compare and swap semantics (read: more code). However nobody really cares
for !CONFIG_SMP and this is the trade-off to simplify the SMP code which we
do care about.

The non-atomic bitops like __set_bit() now generate also better code
because the old code did not have a __builtin_contant_p() check for the
CONFIG_SMP case and therefore always generated the inline assembly variant.
However the inline assemblies for the non-atomic case now got completely
removed since gcc can produce better code, which accesses less memory
operands.

test_bit() got also a bit simplified since it did have a
__builtin_constant_p() check, however two identical code pathes for each
case (written differently).

In result this mainly reduces the to be maintained code but is not very
relevant for code generation, since there are not many non-atomic bitops
usages that we care about.
(code reduction defconfig kernel image before/after: 560 bytes).
Signed-off-by: default avatarHeiko Carstens <heiko.carstens@de.ibm.com>
Signed-off-by: default avatarMartin Schwidefsky <schwidefsky@de.ibm.com>
parent 9a70a428
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
#error only <linux/bitops.h> can be included directly #error only <linux/bitops.h> can be included directly
#endif #endif
#include <linux/typecheck.h>
#include <linux/compiler.h> #include <linux/compiler.h>
/* /*
...@@ -54,8 +55,6 @@ ...@@ -54,8 +55,6 @@
*/ */
/* bitmap tables from arch/s390/kernel/bitmap.c */ /* bitmap tables from arch/s390/kernel/bitmap.c */
extern const char _oi_bitmap[];
extern const char _ni_bitmap[];
extern const char _zb_findmap[]; extern const char _zb_findmap[];
extern const char _sb_findmap[]; extern const char _sb_findmap[];
...@@ -69,15 +68,15 @@ extern const char _sb_findmap[]; ...@@ -69,15 +68,15 @@ extern const char _sb_findmap[];
({ \ ({ \
unsigned long __old, __new; \ unsigned long __old, __new; \
\ \
typecheck(unsigned long *, (__addr)); \
asm volatile( \ asm volatile( \
" l %0,%2\n" \ " l %0,%2\n" \
"0: lr %1,%0\n" \ "0: lr %1,%0\n" \
__op_string " %1,%3\n" \ __op_string " %1,%3\n" \
" cs %0,%1,%2\n" \ " cs %0,%1,%2\n" \
" jl 0b" \ " jl 0b" \
: "=&d" (__old), "=&d" (__new), \ : "=&d" (__old), "=&d" (__new), "+Q" (*(__addr))\
"=Q" (*(unsigned long *) __addr) \ : "d" (__val) \
: "d" (__val), "Q" (*(unsigned long *) __addr) \
: "cc"); \ : "cc"); \
__old; \ __old; \
}) })
...@@ -94,9 +93,10 @@ extern const char _sb_findmap[]; ...@@ -94,9 +93,10 @@ extern const char _sb_findmap[];
({ \ ({ \
unsigned long __old; \ unsigned long __old; \
\ \
typecheck(unsigned long *, (__addr)); \
asm volatile( \ asm volatile( \
__op_string " %0,%2,%1\n" \ __op_string " %0,%2,%1\n" \
: "=d" (__old), "+Q" (*(unsigned long *)__addr) \ : "=d" (__old), "+Q" (*(__addr)) \
: "d" (__val) \ : "d" (__val) \
: "cc"); \ : "cc"); \
__old; \ __old; \
...@@ -112,15 +112,15 @@ extern const char _sb_findmap[]; ...@@ -112,15 +112,15 @@ extern const char _sb_findmap[];
({ \ ({ \
unsigned long __old, __new; \ unsigned long __old, __new; \
\ \
typecheck(unsigned long *, (__addr)); \
asm volatile( \ asm volatile( \
" lg %0,%2\n" \ " lg %0,%2\n" \
"0: lgr %1,%0\n" \ "0: lgr %1,%0\n" \
__op_string " %1,%3\n" \ __op_string " %1,%3\n" \
" csg %0,%1,%2\n" \ " csg %0,%1,%2\n" \
" jl 0b" \ " jl 0b" \
: "=&d" (__old), "=&d" (__new), \ : "=&d" (__old), "=&d" (__new), "+Q" (*(__addr))\
"=Q" (*(unsigned long *) __addr) \ : "d" (__val) \
: "d" (__val), "Q" (*(unsigned long *) __addr) \
: "cc"); \ : "cc"); \
__old; \ __old; \
}) })
...@@ -131,294 +131,148 @@ extern const char _sb_findmap[]; ...@@ -131,294 +131,148 @@ extern const char _sb_findmap[];
#define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG) #define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
#ifdef CONFIG_SMP static inline unsigned long *
/* __bitops_word(unsigned long nr, volatile unsigned long *ptr)
* SMP safe set_bit routine based on compare and swap (CS) {
*/ unsigned long addr;
static inline void set_bit_cs(unsigned long nr, volatile unsigned long *ptr)
addr = (unsigned long)ptr + ((nr ^ (nr & (BITS_PER_LONG - 1))) >> 3);
return (unsigned long *)addr;
}
static inline unsigned char *
__bitops_byte(unsigned long nr, volatile unsigned long *ptr)
{
return ((unsigned char *)ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
}
static inline void set_bit(unsigned long nr, volatile unsigned long *ptr)
{ {
unsigned long addr, mask; unsigned long *addr = __bitops_word(nr, ptr);
unsigned long mask;
addr = (unsigned long) ptr;
/* calculate address for CS */
addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
/* make OR mask */
mask = 1UL << (nr & (BITS_PER_LONG - 1)); mask = 1UL << (nr & (BITS_PER_LONG - 1));
/* Do the atomic update. */
__BITOPS_LOOP(addr, mask, __BITOPS_OR); __BITOPS_LOOP(addr, mask, __BITOPS_OR);
} }
/* static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr)
* SMP safe clear_bit routine based on compare and swap (CS)
*/
static inline void clear_bit_cs(unsigned long nr, volatile unsigned long *ptr)
{ {
unsigned long addr, mask; unsigned long *addr = __bitops_word(nr, ptr);
unsigned long mask;
addr = (unsigned long) ptr;
/* calculate address for CS */
addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
/* make AND mask */
mask = ~(1UL << (nr & (BITS_PER_LONG - 1))); mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
/* Do the atomic update. */
__BITOPS_LOOP(addr, mask, __BITOPS_AND); __BITOPS_LOOP(addr, mask, __BITOPS_AND);
} }
/* static inline void change_bit(unsigned long nr, volatile unsigned long *ptr)
* SMP safe change_bit routine based on compare and swap (CS)
*/
static inline void change_bit_cs(unsigned long nr, volatile unsigned long *ptr)
{ {
unsigned long addr, mask; unsigned long *addr = __bitops_word(nr, ptr);
unsigned long mask;
addr = (unsigned long) ptr;
/* calculate address for CS */
addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
/* make XOR mask */
mask = 1UL << (nr & (BITS_PER_LONG - 1)); mask = 1UL << (nr & (BITS_PER_LONG - 1));
/* Do the atomic update. */
__BITOPS_LOOP(addr, mask, __BITOPS_XOR); __BITOPS_LOOP(addr, mask, __BITOPS_XOR);
} }
/*
* SMP safe test_and_set_bit routine based on compare and swap (CS)
*/
static inline int static inline int
test_and_set_bit_cs(unsigned long nr, volatile unsigned long *ptr) test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
{ {
unsigned long addr, old, mask; unsigned long *addr = __bitops_word(nr, ptr);
unsigned long old, mask;
addr = (unsigned long) ptr;
/* calculate address for CS */
addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
/* make OR/test mask */
mask = 1UL << (nr & (BITS_PER_LONG - 1)); mask = 1UL << (nr & (BITS_PER_LONG - 1));
/* Do the atomic update. */
old = __BITOPS_LOOP(addr, mask, __BITOPS_OR); old = __BITOPS_LOOP(addr, mask, __BITOPS_OR);
barrier(); barrier();
return (old & mask) != 0; return (old & mask) != 0;
} }
/*
* SMP safe test_and_clear_bit routine based on compare and swap (CS)
*/
static inline int static inline int
test_and_clear_bit_cs(unsigned long nr, volatile unsigned long *ptr) test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
{ {
unsigned long addr, old, mask; unsigned long *addr = __bitops_word(nr, ptr);
unsigned long old, mask;
addr = (unsigned long) ptr;
/* calculate address for CS */
addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
/* make AND/test mask */
mask = ~(1UL << (nr & (BITS_PER_LONG - 1))); mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
/* Do the atomic update. */
old = __BITOPS_LOOP(addr, mask, __BITOPS_AND); old = __BITOPS_LOOP(addr, mask, __BITOPS_AND);
barrier(); barrier();
return (old & ~mask) != 0; return (old & ~mask) != 0;
} }
/*
* SMP safe test_and_change_bit routine based on compare and swap (CS)
*/
static inline int static inline int
test_and_change_bit_cs(unsigned long nr, volatile unsigned long *ptr) test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
{ {
unsigned long addr, old, mask; unsigned long *addr = __bitops_word(nr, ptr);
unsigned long old, mask;
addr = (unsigned long) ptr;
/* calculate address for CS */
addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
/* make XOR/test mask */
mask = 1UL << (nr & (BITS_PER_LONG - 1)); mask = 1UL << (nr & (BITS_PER_LONG - 1));
/* Do the atomic update. */
old = __BITOPS_LOOP(addr, mask, __BITOPS_XOR); old = __BITOPS_LOOP(addr, mask, __BITOPS_XOR);
barrier(); barrier();
return (old & mask) != 0; return (old & mask) != 0;
} }
#endif /* CONFIG_SMP */
/*
* fast, non-SMP set_bit routine
*/
static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr) static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr)
{ {
unsigned long addr; unsigned char *addr = __bitops_byte(nr, ptr);
addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
asm volatile(
" oc %O0(1,%R0),%1"
: "+Q" (*(char *) addr) : "Q" (_oi_bitmap[nr & 7]) : "cc");
}
static inline void
__constant_set_bit(const unsigned long nr, volatile unsigned long *ptr)
{
unsigned long addr;
addr = ((unsigned long) ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3); *addr |= 1 << (nr & 7);
*(unsigned char *) addr |= 1 << (nr & 7);
} }
#define set_bit_simple(nr,addr) \
(__builtin_constant_p((nr)) ? \
__constant_set_bit((nr),(addr)) : \
__set_bit((nr),(addr)) )
/*
* fast, non-SMP clear_bit routine
*/
static inline void static inline void
__clear_bit(unsigned long nr, volatile unsigned long *ptr) __clear_bit(unsigned long nr, volatile unsigned long *ptr)
{ {
unsigned long addr; unsigned char *addr = __bitops_byte(nr, ptr);
addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3); *addr &= ~(1 << (nr & 7));
asm volatile(
" nc %O0(1,%R0),%1"
: "+Q" (*(char *) addr) : "Q" (_ni_bitmap[nr & 7]) : "cc");
} }
static inline void
__constant_clear_bit(const unsigned long nr, volatile unsigned long *ptr)
{
unsigned long addr;
addr = ((unsigned long) ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
*(unsigned char *) addr &= ~(1 << (nr & 7));
}
#define clear_bit_simple(nr,addr) \
(__builtin_constant_p((nr)) ? \
__constant_clear_bit((nr),(addr)) : \
__clear_bit((nr),(addr)) )
/*
* fast, non-SMP change_bit routine
*/
static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr) static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr)
{ {
unsigned long addr; unsigned char *addr = __bitops_byte(nr, ptr);
addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
asm volatile(
" xc %O0(1,%R0),%1"
: "+Q" (*(char *) addr) : "Q" (_oi_bitmap[nr & 7]) : "cc");
}
static inline void
__constant_change_bit(const unsigned long nr, volatile unsigned long *ptr)
{
unsigned long addr;
addr = ((unsigned long) ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3); *addr ^= 1 << (nr & 7);
*(unsigned char *) addr ^= 1 << (nr & 7);
} }
#define change_bit_simple(nr,addr) \
(__builtin_constant_p((nr)) ? \
__constant_change_bit((nr),(addr)) : \
__change_bit((nr),(addr)) )
/*
* fast, non-SMP test_and_set_bit routine
*/
static inline int static inline int
test_and_set_bit_simple(unsigned long nr, volatile unsigned long *ptr) __test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
{ {
unsigned long addr; unsigned char *addr = __bitops_byte(nr, ptr);
unsigned char ch; unsigned char ch;
addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3); ch = *addr;
ch = *(unsigned char *) addr; *addr |= 1 << (nr & 7);
asm volatile(
" oc %O0(1,%R0),%1"
: "+Q" (*(char *) addr) : "Q" (_oi_bitmap[nr & 7])
: "cc", "memory");
return (ch >> (nr & 7)) & 1; return (ch >> (nr & 7)) & 1;
} }
#define __test_and_set_bit(X,Y) test_and_set_bit_simple(X,Y)
/*
* fast, non-SMP test_and_clear_bit routine
*/
static inline int static inline int
test_and_clear_bit_simple(unsigned long nr, volatile unsigned long *ptr) __test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
{ {
unsigned long addr; unsigned char *addr = __bitops_byte(nr, ptr);
unsigned char ch; unsigned char ch;
addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3); ch = *addr;
ch = *(unsigned char *) addr; *addr &= ~(1 << (nr & 7));
asm volatile(
" nc %O0(1,%R0),%1"
: "+Q" (*(char *) addr) : "Q" (_ni_bitmap[nr & 7])
: "cc", "memory");
return (ch >> (nr & 7)) & 1; return (ch >> (nr & 7)) & 1;
} }
#define __test_and_clear_bit(X,Y) test_and_clear_bit_simple(X,Y)
/*
* fast, non-SMP test_and_change_bit routine
*/
static inline int static inline int
test_and_change_bit_simple(unsigned long nr, volatile unsigned long *ptr) __test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
{ {
unsigned long addr; unsigned char *addr = __bitops_byte(nr, ptr);
unsigned char ch; unsigned char ch;
addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3); ch = *addr;
ch = *(unsigned char *) addr; *addr ^= 1 << (nr & 7);
asm volatile(
" xc %O0(1,%R0),%1"
: "+Q" (*(char *) addr) : "Q" (_oi_bitmap[nr & 7])
: "cc", "memory");
return (ch >> (nr & 7)) & 1; return (ch >> (nr & 7)) & 1;
} }
#define __test_and_change_bit(X,Y) test_and_change_bit_simple(X,Y)
#ifdef CONFIG_SMP
#define set_bit set_bit_cs
#define clear_bit clear_bit_cs
#define change_bit change_bit_cs
#define test_and_set_bit test_and_set_bit_cs
#define test_and_clear_bit test_and_clear_bit_cs
#define test_and_change_bit test_and_change_bit_cs
#else
#define set_bit set_bit_simple
#define clear_bit clear_bit_simple
#define change_bit change_bit_simple
#define test_and_set_bit test_and_set_bit_simple
#define test_and_clear_bit test_and_clear_bit_simple
#define test_and_change_bit test_and_change_bit_simple
#endif
/* static inline int test_bit(unsigned long nr, const volatile unsigned long *ptr)
* This routine doesn't need to be atomic.
*/
static inline int __test_bit(unsigned long nr, const volatile unsigned long *ptr)
{ {
unsigned long addr; const volatile unsigned char *addr;
unsigned char ch;
addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3); addr = ((const volatile unsigned char *)ptr);
ch = *(volatile unsigned char *) addr; addr += (nr ^ (BITS_PER_LONG - 8)) >> 3;
return (ch >> (nr & 7)) & 1; return (*addr >> (nr & 7)) & 1;
}
static inline int
__constant_test_bit(unsigned long nr, const volatile unsigned long *addr) {
return (((volatile char *) addr)
[(nr^(BITS_PER_LONG-8))>>3] & (1<<(nr&7))) != 0;
} }
#define test_bit(nr,addr) \
(__builtin_constant_p((nr)) ? \
__constant_test_bit((nr),(addr)) : \
__test_bit((nr),(addr)) )
/* /*
* Optimized find bit helper functions. * Optimized find bit helper functions.
*/ */
......
...@@ -9,12 +9,6 @@ ...@@ -9,12 +9,6 @@
#include <linux/bitops.h> #include <linux/bitops.h>
#include <linux/module.h> #include <linux/module.h>
const char _oi_bitmap[] = { 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80 };
EXPORT_SYMBOL(_oi_bitmap);
const char _ni_bitmap[] = { 0xfe, 0xfd, 0xfb, 0xf7, 0xef, 0xdf, 0xbf, 0x7f };
EXPORT_SYMBOL(_ni_bitmap);
const char _zb_findmap[] = { const char _zb_findmap[] = {
0,1,0,2,0,1,0,3,0,1,0,2,0,1,0,4, 0,1,0,2,0,1,0,3,0,1,0,2,0,1,0,4,
0,1,0,2,0,1,0,3,0,1,0,2,0,1,0,5, 0,1,0,2,0,1,0,3,0,1,0,2,0,1,0,5,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment