Commit 886ee55e authored by Ingo Molnar's avatar Ingo Molnar

locking/seqlock: Propagate 'const' pointers within read-only methods, remove forced type casts

Currently __seqprop_ptr() is an inline function that must chose to either
use 'const' or non-const seqcount related pointers - but this results in
the undesirable loss of 'const' propagation, via a forced type cast.

The easiest solution would be to turn the pointer wrappers into macros that
pass through whatever type is passed to them - but the clever maze of
seqlock API instantiation macros relies on the GCC CPP '##' macro
extension, which isn't recursive, so inline functions must be used here.

So create two wrapper variants instead: 'ptr' and 'const_ptr', and pick the
right one for the codepaths that are const: read_seqcount_begin() and
read_seqcount_retry().

This cleans up type handling and allows the removal of all type forcing.

No change in functionality.
Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
Reviewed-by: default avatarOleg Nesterov <oleg@redhat.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Waiman Long <longman@redhat.com>
Cc: Will Deacon <will.deacon@arm.com>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Paul E. McKenney <paulmck@kernel.org>
parent ac8b60be
...@@ -200,9 +200,15 @@ typedef struct seqcount_##lockname { \ ...@@ -200,9 +200,15 @@ typedef struct seqcount_##lockname { \
} seqcount_##lockname##_t; \ } seqcount_##lockname##_t; \
\ \
static __always_inline seqcount_t * \ static __always_inline seqcount_t * \
__seqprop_##lockname##_ptr(const seqcount_##lockname##_t *s) \ __seqprop_##lockname##_ptr(seqcount_##lockname##_t *s) \
{ \ { \
return (void *)&s->seqcount; /* drop const */ \ return &s->seqcount; \
} \
\
static __always_inline const seqcount_t * \
__seqprop_##lockname##_const_ptr(const seqcount_##lockname##_t *s) \
{ \
return &s->seqcount; \
} \ } \
\ \
static __always_inline unsigned \ static __always_inline unsigned \
...@@ -247,9 +253,14 @@ __seqprop_##lockname##_assert(const seqcount_##lockname##_t *s) \ ...@@ -247,9 +253,14 @@ __seqprop_##lockname##_assert(const seqcount_##lockname##_t *s) \
* __seqprop() for seqcount_t * __seqprop() for seqcount_t
*/ */
static inline seqcount_t *__seqprop_ptr(const seqcount_t *s) static inline seqcount_t *__seqprop_ptr(seqcount_t *s)
{
return s;
}
static inline const seqcount_t *__seqprop_const_ptr(const seqcount_t *s)
{ {
return (void *)s; /* drop const */ return s;
} }
static inline unsigned __seqprop_sequence(const seqcount_t *s) static inline unsigned __seqprop_sequence(const seqcount_t *s)
...@@ -302,6 +313,7 @@ SEQCOUNT_LOCKNAME(mutex, struct mutex, true, mutex) ...@@ -302,6 +313,7 @@ SEQCOUNT_LOCKNAME(mutex, struct mutex, true, mutex)
__seqprop_case((s), mutex, prop)) __seqprop_case((s), mutex, prop))
#define seqprop_ptr(s) __seqprop(s, ptr)(s) #define seqprop_ptr(s) __seqprop(s, ptr)(s)
#define seqprop_const_ptr(s) __seqprop(s, const_ptr)(s)
#define seqprop_sequence(s) __seqprop(s, sequence)(s) #define seqprop_sequence(s) __seqprop(s, sequence)(s)
#define seqprop_preemptible(s) __seqprop(s, preemptible)(s) #define seqprop_preemptible(s) __seqprop(s, preemptible)(s)
#define seqprop_assert(s) __seqprop(s, assert)(s) #define seqprop_assert(s) __seqprop(s, assert)(s)
...@@ -353,7 +365,7 @@ SEQCOUNT_LOCKNAME(mutex, struct mutex, true, mutex) ...@@ -353,7 +365,7 @@ SEQCOUNT_LOCKNAME(mutex, struct mutex, true, mutex)
*/ */
#define read_seqcount_begin(s) \ #define read_seqcount_begin(s) \
({ \ ({ \
seqcount_lockdep_reader_access(seqprop_ptr(s)); \ seqcount_lockdep_reader_access(seqprop_const_ptr(s)); \
raw_read_seqcount_begin(s); \ raw_read_seqcount_begin(s); \
}) })
...@@ -419,7 +431,7 @@ SEQCOUNT_LOCKNAME(mutex, struct mutex, true, mutex) ...@@ -419,7 +431,7 @@ SEQCOUNT_LOCKNAME(mutex, struct mutex, true, mutex)
* Return: true if a read section retry is required, else false * Return: true if a read section retry is required, else false
*/ */
#define __read_seqcount_retry(s, start) \ #define __read_seqcount_retry(s, start) \
do___read_seqcount_retry(seqprop_ptr(s), start) do___read_seqcount_retry(seqprop_const_ptr(s), start)
static inline int do___read_seqcount_retry(const seqcount_t *s, unsigned start) static inline int do___read_seqcount_retry(const seqcount_t *s, unsigned start)
{ {
...@@ -439,7 +451,7 @@ static inline int do___read_seqcount_retry(const seqcount_t *s, unsigned start) ...@@ -439,7 +451,7 @@ static inline int do___read_seqcount_retry(const seqcount_t *s, unsigned start)
* Return: true if a read section retry is required, else false * Return: true if a read section retry is required, else false
*/ */
#define read_seqcount_retry(s, start) \ #define read_seqcount_retry(s, start) \
do_read_seqcount_retry(seqprop_ptr(s), start) do_read_seqcount_retry(seqprop_const_ptr(s), start)
static inline int do_read_seqcount_retry(const seqcount_t *s, unsigned start) static inline int do_read_seqcount_retry(const seqcount_t *s, unsigned start)
{ {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment