Commit bf857ddd authored by Liam R. Howlett's avatar Liam R. Howlett Committed by Andrew Morton

maple_tree: move debug check to __mas_set_range()

__mas_set_range() was created to shortcut resetting the maple state and a
debug check was added to the caller (the vma iterator) to ensure the
internal maple state remains safe to use.  Move the debug check from the
vma iterator into the maple tree itself so other users do not incorrectly
use the advanced maple state modification.

Fallout from this change include a large amount of debug setup needed to
be moved to earlier in the header, and the maple_tree.h radix-tree test
code needed to move the inclusion of the header to after the atomic
define.  None of those changes have functional changes.

Link: https://lkml.kernel.org/r/20231101171629.3612299-4-Liam.Howlett@oracle.comSigned-off-by: default avatarLiam R. Howlett <Liam.Howlett@oracle.com>
Cc: Peng Zhang <zhangpeng.00@bytedance.com>
Signed-off-by: default avatarAndrew Morton <akpm@linux-foundation.org>
parent f7a59018
......@@ -557,6 +557,131 @@ static inline void mas_reset(struct ma_state *mas)
*/
#define mas_for_each(__mas, __entry, __max) \
while (((__entry) = mas_find((__mas), (__max))) != NULL)
#ifdef CONFIG_DEBUG_MAPLE_TREE
enum mt_dump_format {
mt_dump_dec,
mt_dump_hex,
};
extern atomic_t maple_tree_tests_run;
extern atomic_t maple_tree_tests_passed;
void mt_dump(const struct maple_tree *mt, enum mt_dump_format format);
void mas_dump(const struct ma_state *mas);
void mas_wr_dump(const struct ma_wr_state *wr_mas);
void mt_validate(struct maple_tree *mt);
void mt_cache_shrink(void);
#define MT_BUG_ON(__tree, __x) do { \
atomic_inc(&maple_tree_tests_run); \
if (__x) { \
pr_info("BUG at %s:%d (%u)\n", \
__func__, __LINE__, __x); \
mt_dump(__tree, mt_dump_hex); \
pr_info("Pass: %u Run:%u\n", \
atomic_read(&maple_tree_tests_passed), \
atomic_read(&maple_tree_tests_run)); \
dump_stack(); \
} else { \
atomic_inc(&maple_tree_tests_passed); \
} \
} while (0)
#define MAS_BUG_ON(__mas, __x) do { \
atomic_inc(&maple_tree_tests_run); \
if (__x) { \
pr_info("BUG at %s:%d (%u)\n", \
__func__, __LINE__, __x); \
mas_dump(__mas); \
mt_dump((__mas)->tree, mt_dump_hex); \
pr_info("Pass: %u Run:%u\n", \
atomic_read(&maple_tree_tests_passed), \
atomic_read(&maple_tree_tests_run)); \
dump_stack(); \
} else { \
atomic_inc(&maple_tree_tests_passed); \
} \
} while (0)
#define MAS_WR_BUG_ON(__wrmas, __x) do { \
atomic_inc(&maple_tree_tests_run); \
if (__x) { \
pr_info("BUG at %s:%d (%u)\n", \
__func__, __LINE__, __x); \
mas_wr_dump(__wrmas); \
mas_dump((__wrmas)->mas); \
mt_dump((__wrmas)->mas->tree, mt_dump_hex); \
pr_info("Pass: %u Run:%u\n", \
atomic_read(&maple_tree_tests_passed), \
atomic_read(&maple_tree_tests_run)); \
dump_stack(); \
} else { \
atomic_inc(&maple_tree_tests_passed); \
} \
} while (0)
#define MT_WARN_ON(__tree, __x) ({ \
int ret = !!(__x); \
atomic_inc(&maple_tree_tests_run); \
if (ret) { \
pr_info("WARN at %s:%d (%u)\n", \
__func__, __LINE__, __x); \
mt_dump(__tree, mt_dump_hex); \
pr_info("Pass: %u Run:%u\n", \
atomic_read(&maple_tree_tests_passed), \
atomic_read(&maple_tree_tests_run)); \
dump_stack(); \
} else { \
atomic_inc(&maple_tree_tests_passed); \
} \
unlikely(ret); \
})
#define MAS_WARN_ON(__mas, __x) ({ \
int ret = !!(__x); \
atomic_inc(&maple_tree_tests_run); \
if (ret) { \
pr_info("WARN at %s:%d (%u)\n", \
__func__, __LINE__, __x); \
mas_dump(__mas); \
mt_dump((__mas)->tree, mt_dump_hex); \
pr_info("Pass: %u Run:%u\n", \
atomic_read(&maple_tree_tests_passed), \
atomic_read(&maple_tree_tests_run)); \
dump_stack(); \
} else { \
atomic_inc(&maple_tree_tests_passed); \
} \
unlikely(ret); \
})
#define MAS_WR_WARN_ON(__wrmas, __x) ({ \
int ret = !!(__x); \
atomic_inc(&maple_tree_tests_run); \
if (ret) { \
pr_info("WARN at %s:%d (%u)\n", \
__func__, __LINE__, __x); \
mas_wr_dump(__wrmas); \
mas_dump((__wrmas)->mas); \
mt_dump((__wrmas)->mas->tree, mt_dump_hex); \
pr_info("Pass: %u Run:%u\n", \
atomic_read(&maple_tree_tests_passed), \
atomic_read(&maple_tree_tests_run)); \
dump_stack(); \
} else { \
atomic_inc(&maple_tree_tests_passed); \
} \
unlikely(ret); \
})
#else
#define MT_BUG_ON(__tree, __x) BUG_ON(__x)
#define MAS_BUG_ON(__mas, __x) BUG_ON(__x)
#define MAS_WR_BUG_ON(__mas, __x) BUG_ON(__x)
#define MT_WARN_ON(__tree, __x) WARN_ON(__x)
#define MAS_WARN_ON(__mas, __x) WARN_ON(__x)
#define MAS_WR_WARN_ON(__mas, __x) WARN_ON(__x)
#endif /* CONFIG_DEBUG_MAPLE_TREE */
/**
* __mas_set_range() - Set up Maple Tree operation state to a sub-range of the
* current location.
......@@ -570,6 +695,9 @@ static inline void mas_reset(struct ma_state *mas)
static inline void __mas_set_range(struct ma_state *mas, unsigned long start,
unsigned long last)
{
/* Ensure the range starts within the current slot */
MAS_WARN_ON(mas, mas_is_active(mas) &&
(mas->index > start || mas->last < start));
mas->index = start;
mas->last = last;
}
......@@ -587,8 +715,8 @@ static inline void __mas_set_range(struct ma_state *mas, unsigned long start,
static inline
void mas_set_range(struct ma_state *mas, unsigned long start, unsigned long last)
{
__mas_set_range(mas, start, last);
mas->node = MAS_START;
__mas_set_range(mas, start, last);
}
/**
......@@ -713,129 +841,4 @@ void *mt_next(struct maple_tree *mt, unsigned long index, unsigned long max);
for (__entry = mt_find(__tree, &(__index), __max); \
__entry; __entry = mt_find_after(__tree, &(__index), __max))
#ifdef CONFIG_DEBUG_MAPLE_TREE
enum mt_dump_format {
mt_dump_dec,
mt_dump_hex,
};
extern atomic_t maple_tree_tests_run;
extern atomic_t maple_tree_tests_passed;
void mt_dump(const struct maple_tree *mt, enum mt_dump_format format);
void mas_dump(const struct ma_state *mas);
void mas_wr_dump(const struct ma_wr_state *wr_mas);
void mt_validate(struct maple_tree *mt);
void mt_cache_shrink(void);
#define MT_BUG_ON(__tree, __x) do { \
atomic_inc(&maple_tree_tests_run); \
if (__x) { \
pr_info("BUG at %s:%d (%u)\n", \
__func__, __LINE__, __x); \
mt_dump(__tree, mt_dump_hex); \
pr_info("Pass: %u Run:%u\n", \
atomic_read(&maple_tree_tests_passed), \
atomic_read(&maple_tree_tests_run)); \
dump_stack(); \
} else { \
atomic_inc(&maple_tree_tests_passed); \
} \
} while (0)
#define MAS_BUG_ON(__mas, __x) do { \
atomic_inc(&maple_tree_tests_run); \
if (__x) { \
pr_info("BUG at %s:%d (%u)\n", \
__func__, __LINE__, __x); \
mas_dump(__mas); \
mt_dump((__mas)->tree, mt_dump_hex); \
pr_info("Pass: %u Run:%u\n", \
atomic_read(&maple_tree_tests_passed), \
atomic_read(&maple_tree_tests_run)); \
dump_stack(); \
} else { \
atomic_inc(&maple_tree_tests_passed); \
} \
} while (0)
#define MAS_WR_BUG_ON(__wrmas, __x) do { \
atomic_inc(&maple_tree_tests_run); \
if (__x) { \
pr_info("BUG at %s:%d (%u)\n", \
__func__, __LINE__, __x); \
mas_wr_dump(__wrmas); \
mas_dump((__wrmas)->mas); \
mt_dump((__wrmas)->mas->tree, mt_dump_hex); \
pr_info("Pass: %u Run:%u\n", \
atomic_read(&maple_tree_tests_passed), \
atomic_read(&maple_tree_tests_run)); \
dump_stack(); \
} else { \
atomic_inc(&maple_tree_tests_passed); \
} \
} while (0)
#define MT_WARN_ON(__tree, __x) ({ \
int ret = !!(__x); \
atomic_inc(&maple_tree_tests_run); \
if (ret) { \
pr_info("WARN at %s:%d (%u)\n", \
__func__, __LINE__, __x); \
mt_dump(__tree, mt_dump_hex); \
pr_info("Pass: %u Run:%u\n", \
atomic_read(&maple_tree_tests_passed), \
atomic_read(&maple_tree_tests_run)); \
dump_stack(); \
} else { \
atomic_inc(&maple_tree_tests_passed); \
} \
unlikely(ret); \
})
#define MAS_WARN_ON(__mas, __x) ({ \
int ret = !!(__x); \
atomic_inc(&maple_tree_tests_run); \
if (ret) { \
pr_info("WARN at %s:%d (%u)\n", \
__func__, __LINE__, __x); \
mas_dump(__mas); \
mt_dump((__mas)->tree, mt_dump_hex); \
pr_info("Pass: %u Run:%u\n", \
atomic_read(&maple_tree_tests_passed), \
atomic_read(&maple_tree_tests_run)); \
dump_stack(); \
} else { \
atomic_inc(&maple_tree_tests_passed); \
} \
unlikely(ret); \
})
#define MAS_WR_WARN_ON(__wrmas, __x) ({ \
int ret = !!(__x); \
atomic_inc(&maple_tree_tests_run); \
if (ret) { \
pr_info("WARN at %s:%d (%u)\n", \
__func__, __LINE__, __x); \
mas_wr_dump(__wrmas); \
mas_dump((__wrmas)->mas); \
mt_dump((__wrmas)->mas->tree, mt_dump_hex); \
pr_info("Pass: %u Run:%u\n", \
atomic_read(&maple_tree_tests_passed), \
atomic_read(&maple_tree_tests_run)); \
dump_stack(); \
} else { \
atomic_inc(&maple_tree_tests_passed); \
} \
unlikely(ret); \
})
#else
#define MT_BUG_ON(__tree, __x) BUG_ON(__x)
#define MAS_BUG_ON(__mas, __x) BUG_ON(__x)
#define MAS_WR_BUG_ON(__mas, __x) BUG_ON(__x)
#define MT_WARN_ON(__tree, __x) WARN_ON(__x)
#define MAS_WARN_ON(__mas, __x) WARN_ON(__x)
#define MAS_WR_WARN_ON(__mas, __x) WARN_ON(__x)
#endif /* CONFIG_DEBUG_MAPLE_TREE */
#endif /*_LINUX_MAPLE_TREE_H */
......@@ -1135,8 +1135,6 @@ static inline bool vma_soft_dirty_enabled(struct vm_area_struct *vma)
static inline void vma_iter_config(struct vma_iterator *vmi,
unsigned long index, unsigned long last)
{
MAS_BUG_ON(&vmi->mas, vmi->mas.node != MAS_START &&
(vmi->mas.index > index || vmi->mas.last < index));
__mas_set_range(&vmi->mas, index, last - 1);
}
......
/* SPDX-License-Identifier: GPL-2.0+ */
#define atomic_t int32_t
#include "../../../../include/linux/maple_tree.h"
#define atomic_inc(x) uatomic_inc(x)
#define atomic_read(x) uatomic_read(x)
#define atomic_set(x, y) do {} while (0)
#define U8_MAX UCHAR_MAX
#include "../../../../include/linux/maple_tree.h"
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment