Commit a435b9a1 authored by Peter Zijlstra's avatar Peter Zijlstra

locking/refcount: Provide __refcount API to obtain the old value

David requested means to obtain the old/previous value from the
refcount API for tracing purposes.

Duplicate (most of) the API as __refcount*() with an additional
'int *' argument into which, if !NULL, the old value will be stored.
Requested-by: default avatarDavid Howells <dhowells@redhat.com>
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: default avatarKees Cook <keescook@chromium.org>
Link: https://lkml.kernel.org/r/20200729111120.GA2638@hirez.programming.kicks-ass.net
parent 6eb6d059
...@@ -165,7 +165,7 @@ static inline unsigned int refcount_read(const refcount_t *r) ...@@ -165,7 +165,7 @@ static inline unsigned int refcount_read(const refcount_t *r)
* *
* Return: false if the passed refcount is 0, true otherwise * Return: false if the passed refcount is 0, true otherwise
*/ */
static inline __must_check bool refcount_add_not_zero(int i, refcount_t *r) static inline __must_check bool __refcount_add_not_zero(int i, refcount_t *r, int *oldp)
{ {
int old = refcount_read(r); int old = refcount_read(r);
...@@ -174,12 +174,20 @@ static inline __must_check bool refcount_add_not_zero(int i, refcount_t *r) ...@@ -174,12 +174,20 @@ static inline __must_check bool refcount_add_not_zero(int i, refcount_t *r)
break; break;
} while (!atomic_try_cmpxchg_relaxed(&r->refs, &old, old + i)); } while (!atomic_try_cmpxchg_relaxed(&r->refs, &old, old + i));
if (oldp)
*oldp = old;
if (unlikely(old < 0 || old + i < 0)) if (unlikely(old < 0 || old + i < 0))
refcount_warn_saturate(r, REFCOUNT_ADD_NOT_ZERO_OVF); refcount_warn_saturate(r, REFCOUNT_ADD_NOT_ZERO_OVF);
return old; return old;
} }
static inline __must_check bool refcount_add_not_zero(int i, refcount_t *r)
{
return __refcount_add_not_zero(i, r, NULL);
}
/** /**
* refcount_add - add a value to a refcount * refcount_add - add a value to a refcount
* @i: the value to add to the refcount * @i: the value to add to the refcount
...@@ -196,16 +204,24 @@ static inline __must_check bool refcount_add_not_zero(int i, refcount_t *r) ...@@ -196,16 +204,24 @@ static inline __must_check bool refcount_add_not_zero(int i, refcount_t *r)
* cases, refcount_inc(), or one of its variants, should instead be used to * cases, refcount_inc(), or one of its variants, should instead be used to
* increment a reference count. * increment a reference count.
*/ */
static inline void refcount_add(int i, refcount_t *r) static inline void __refcount_add(int i, refcount_t *r, int *oldp)
{ {
int old = atomic_fetch_add_relaxed(i, &r->refs); int old = atomic_fetch_add_relaxed(i, &r->refs);
if (oldp)
*oldp = old;
if (unlikely(!old)) if (unlikely(!old))
refcount_warn_saturate(r, REFCOUNT_ADD_UAF); refcount_warn_saturate(r, REFCOUNT_ADD_UAF);
else if (unlikely(old < 0 || old + i < 0)) else if (unlikely(old < 0 || old + i < 0))
refcount_warn_saturate(r, REFCOUNT_ADD_OVF); refcount_warn_saturate(r, REFCOUNT_ADD_OVF);
} }
static inline void refcount_add(int i, refcount_t *r)
{
__refcount_add(i, r, NULL);
}
/** /**
* refcount_inc_not_zero - increment a refcount unless it is 0 * refcount_inc_not_zero - increment a refcount unless it is 0
* @r: the refcount to increment * @r: the refcount to increment
...@@ -219,9 +235,14 @@ static inline void refcount_add(int i, refcount_t *r) ...@@ -219,9 +235,14 @@ static inline void refcount_add(int i, refcount_t *r)
* *
* Return: true if the increment was successful, false otherwise * Return: true if the increment was successful, false otherwise
*/ */
static inline __must_check bool __refcount_inc_not_zero(refcount_t *r, int *oldp)
{
return __refcount_add_not_zero(1, r, oldp);
}
static inline __must_check bool refcount_inc_not_zero(refcount_t *r) static inline __must_check bool refcount_inc_not_zero(refcount_t *r)
{ {
return refcount_add_not_zero(1, r); return __refcount_inc_not_zero(r, NULL);
} }
/** /**
...@@ -236,9 +257,14 @@ static inline __must_check bool refcount_inc_not_zero(refcount_t *r) ...@@ -236,9 +257,14 @@ static inline __must_check bool refcount_inc_not_zero(refcount_t *r)
* Will WARN if the refcount is 0, as this represents a possible use-after-free * Will WARN if the refcount is 0, as this represents a possible use-after-free
* condition. * condition.
*/ */
static inline void __refcount_inc(refcount_t *r, int *oldp)
{
__refcount_add(1, r, oldp);
}
static inline void refcount_inc(refcount_t *r) static inline void refcount_inc(refcount_t *r)
{ {
refcount_add(1, r); __refcount_inc(r, NULL);
} }
/** /**
...@@ -261,10 +287,13 @@ static inline void refcount_inc(refcount_t *r) ...@@ -261,10 +287,13 @@ static inline void refcount_inc(refcount_t *r)
* *
* Return: true if the resulting refcount is 0, false otherwise * Return: true if the resulting refcount is 0, false otherwise
*/ */
static inline __must_check bool refcount_sub_and_test(int i, refcount_t *r) static inline __must_check bool __refcount_sub_and_test(int i, refcount_t *r, int *oldp)
{ {
int old = atomic_fetch_sub_release(i, &r->refs); int old = atomic_fetch_sub_release(i, &r->refs);
if (oldp)
*oldp = old;
if (old == i) { if (old == i) {
smp_acquire__after_ctrl_dep(); smp_acquire__after_ctrl_dep();
return true; return true;
...@@ -276,6 +305,11 @@ static inline __must_check bool refcount_sub_and_test(int i, refcount_t *r) ...@@ -276,6 +305,11 @@ static inline __must_check bool refcount_sub_and_test(int i, refcount_t *r)
return false; return false;
} }
static inline __must_check bool refcount_sub_and_test(int i, refcount_t *r)
{
return __refcount_sub_and_test(i, r, NULL);
}
/** /**
* refcount_dec_and_test - decrement a refcount and test if it is 0 * refcount_dec_and_test - decrement a refcount and test if it is 0
* @r: the refcount * @r: the refcount
...@@ -289,9 +323,14 @@ static inline __must_check bool refcount_sub_and_test(int i, refcount_t *r) ...@@ -289,9 +323,14 @@ static inline __must_check bool refcount_sub_and_test(int i, refcount_t *r)
* *
* Return: true if the resulting refcount is 0, false otherwise * Return: true if the resulting refcount is 0, false otherwise
*/ */
static inline __must_check bool __refcount_dec_and_test(refcount_t *r, int *oldp)
{
return __refcount_sub_and_test(1, r, oldp);
}
static inline __must_check bool refcount_dec_and_test(refcount_t *r) static inline __must_check bool refcount_dec_and_test(refcount_t *r)
{ {
return refcount_sub_and_test(1, r); return __refcount_dec_and_test(r, NULL);
} }
/** /**
...@@ -304,12 +343,22 @@ static inline __must_check bool refcount_dec_and_test(refcount_t *r) ...@@ -304,12 +343,22 @@ static inline __must_check bool refcount_dec_and_test(refcount_t *r)
* Provides release memory ordering, such that prior loads and stores are done * Provides release memory ordering, such that prior loads and stores are done
* before. * before.
*/ */
static inline void refcount_dec(refcount_t *r) static inline void __refcount_dec(refcount_t *r, int *oldp)
{ {
if (unlikely(atomic_fetch_sub_release(1, &r->refs) <= 1)) int old = atomic_fetch_sub_release(1, &r->refs);
if (oldp)
*oldp = old;
if (unlikely(old <= 1))
refcount_warn_saturate(r, REFCOUNT_DEC_LEAK); refcount_warn_saturate(r, REFCOUNT_DEC_LEAK);
} }
static inline void refcount_dec(refcount_t *r)
{
__refcount_dec(r, NULL);
}
extern __must_check bool refcount_dec_if_one(refcount_t *r); extern __must_check bool refcount_dec_if_one(refcount_t *r);
extern __must_check bool refcount_dec_not_one(refcount_t *r); extern __must_check bool refcount_dec_not_one(refcount_t *r);
extern __must_check bool refcount_dec_and_mutex_lock(refcount_t *r, struct mutex *lock); extern __must_check bool refcount_dec_and_mutex_lock(refcount_t *r, struct mutex *lock);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment