Commit e9a28dc5 authored by Qu Wenruo's avatar Qu Wenruo Committed by David Sterba

btrfs: rename tree_entry to rb_simple_node and export it

Structure tree_entry provides a very simple rb_tree which only uses
bytenr as search index.

That tree_entry is used in 3 structures: backref_node, mapping_node and
tree_block.

Since we're going to make backref_node independnt from relocation, it's
a good time to extract the tree_entry into rb_simple_node, and export it
into misc.h.
Signed-off-by: default avatarQu Wenruo <wqu@suse.com>
Reviewed-by: default avatarDavid Sterba <dsterba@suse.com>
Signed-off-by: default avatarDavid Sterba <dsterba@suse.com>
parent 70535441
...@@ -161,8 +161,10 @@ static inline void btrfs_backref_iter_release(struct btrfs_backref_iter *iter) ...@@ -161,8 +161,10 @@ static inline void btrfs_backref_iter_release(struct btrfs_backref_iter *iter)
* Represent a tree block in the backref cache * Represent a tree block in the backref cache
*/ */
struct btrfs_backref_node { struct btrfs_backref_node {
struct rb_node rb_node; struct {
u64 bytenr; struct rb_node rb_node;
u64 bytenr;
}; /* Use rb_simple_node for search/insert */
u64 new_bytenr; u64 new_bytenr;
/* Objectid of tree block owner, can be not uptodate */ /* Objectid of tree block owner, can be not uptodate */
......
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
#include <linux/sched.h> #include <linux/sched.h>
#include <linux/wait.h> #include <linux/wait.h>
#include <asm/div64.h> #include <asm/div64.h>
#include <linux/rbtree.h>
#define in_range(b, first, len) ((b) >= (first) && (b) < (first) + (len)) #define in_range(b, first, len) ((b) >= (first) && (b) < (first) + (len))
...@@ -58,4 +59,57 @@ static inline bool has_single_bit_set(u64 n) ...@@ -58,4 +59,57 @@ static inline bool has_single_bit_set(u64 n)
return is_power_of_two_u64(n); return is_power_of_two_u64(n);
} }
/*
* Simple bytenr based rb_tree relate structures
*
* Any structure wants to use bytenr as single search index should have their
* structure start with these members.
*/
struct rb_simple_node {
struct rb_node rb_node;
u64 bytenr;
};
static inline struct rb_node *rb_simple_search(struct rb_root *root, u64 bytenr)
{
struct rb_node *node = root->rb_node;
struct rb_simple_node *entry;
while (node) {
entry = rb_entry(node, struct rb_simple_node, rb_node);
if (bytenr < entry->bytenr)
node = node->rb_left;
else if (bytenr > entry->bytenr)
node = node->rb_right;
else
return node;
}
return NULL;
}
static inline struct rb_node *rb_simple_insert(struct rb_root *root, u64 bytenr,
struct rb_node *node)
{
struct rb_node **p = &root->rb_node;
struct rb_node *parent = NULL;
struct rb_simple_node *entry;
while (*p) {
parent = *p;
entry = rb_entry(parent, struct rb_simple_node, rb_node);
if (bytenr < entry->bytenr)
p = &(*p)->rb_left;
else if (bytenr > entry->bytenr)
p = &(*p)->rb_right;
else
return parent;
}
rb_link_node(node, parent, p);
rb_insert_color(node, root);
return NULL;
}
#endif #endif
...@@ -24,6 +24,7 @@ ...@@ -24,6 +24,7 @@
#include "delalloc-space.h" #include "delalloc-space.h"
#include "block-group.h" #include "block-group.h"
#include "backref.h" #include "backref.h"
#include "misc.h"
/* /*
* Relocation overview * Relocation overview
...@@ -72,21 +73,15 @@ ...@@ -72,21 +73,15 @@
* The entry point of relocation is relocate_block_group() function. * The entry point of relocation is relocate_block_group() function.
*/ */
/*
* btrfs_backref_node, mapping_node and tree_block start with this
*/
struct tree_entry {
struct rb_node rb_node;
u64 bytenr;
};
#define RELOCATION_RESERVED_NODES 256 #define RELOCATION_RESERVED_NODES 256
/* /*
* map address of tree root to tree * map address of tree root to tree
*/ */
struct mapping_node { struct mapping_node {
struct rb_node rb_node; struct {
u64 bytenr; struct rb_node rb_node;
u64 bytenr;
}; /* Use rb_simle_node for search/insert */
void *data; void *data;
}; };
...@@ -99,8 +94,10 @@ struct mapping_tree { ...@@ -99,8 +94,10 @@ struct mapping_tree {
* present a tree block to process * present a tree block to process
*/ */
struct tree_block { struct tree_block {
struct rb_node rb_node; struct {
u64 bytenr; struct rb_node rb_node;
u64 bytenr;
}; /* Use rb_simple_node for search/insert */
struct btrfs_key key; struct btrfs_key key;
unsigned int level:8; unsigned int level:8;
unsigned int key_ready:1; unsigned int key_ready:1;
...@@ -294,48 +291,6 @@ static void free_backref_edge(struct btrfs_backref_cache *cache, ...@@ -294,48 +291,6 @@ static void free_backref_edge(struct btrfs_backref_cache *cache,
} }
} }
static struct rb_node *tree_insert(struct rb_root *root, u64 bytenr,
struct rb_node *node)
{
struct rb_node **p = &root->rb_node;
struct rb_node *parent = NULL;
struct tree_entry *entry;
while (*p) {
parent = *p;
entry = rb_entry(parent, struct tree_entry, rb_node);
if (bytenr < entry->bytenr)
p = &(*p)->rb_left;
else if (bytenr > entry->bytenr)
p = &(*p)->rb_right;
else
return parent;
}
rb_link_node(node, parent, p);
rb_insert_color(node, root);
return NULL;
}
static struct rb_node *tree_search(struct rb_root *root, u64 bytenr)
{
struct rb_node *n = root->rb_node;
struct tree_entry *entry;
while (n) {
entry = rb_entry(n, struct tree_entry, rb_node);
if (bytenr < entry->bytenr)
n = n->rb_left;
else if (bytenr > entry->bytenr)
n = n->rb_right;
else
return n;
}
return NULL;
}
static void backref_tree_panic(struct rb_node *rb_node, int errno, u64 bytenr) static void backref_tree_panic(struct rb_node *rb_node, int errno, u64 bytenr)
{ {
...@@ -474,7 +429,7 @@ static void update_backref_node(struct btrfs_backref_cache *cache, ...@@ -474,7 +429,7 @@ static void update_backref_node(struct btrfs_backref_cache *cache,
struct rb_node *rb_node; struct rb_node *rb_node;
rb_erase(&node->rb_node, &cache->rb_root); rb_erase(&node->rb_node, &cache->rb_root);
node->bytenr = bytenr; node->bytenr = bytenr;
rb_node = tree_insert(&cache->rb_root, node->bytenr, &node->rb_node); rb_node = rb_simple_insert(&cache->rb_root, node->bytenr, &node->rb_node);
if (rb_node) if (rb_node)
backref_tree_panic(rb_node, -EEXIST, bytenr); backref_tree_panic(rb_node, -EEXIST, bytenr);
} }
...@@ -599,7 +554,7 @@ struct btrfs_root *find_reloc_root(struct btrfs_fs_info *fs_info, u64 bytenr) ...@@ -599,7 +554,7 @@ struct btrfs_root *find_reloc_root(struct btrfs_fs_info *fs_info, u64 bytenr)
ASSERT(rc); ASSERT(rc);
spin_lock(&rc->reloc_root_tree.lock); spin_lock(&rc->reloc_root_tree.lock);
rb_node = tree_search(&rc->reloc_root_tree.rb_root, bytenr); rb_node = rb_simple_search(&rc->reloc_root_tree.rb_root, bytenr);
if (rb_node) { if (rb_node) {
node = rb_entry(rb_node, struct mapping_node, rb_node); node = rb_entry(rb_node, struct mapping_node, rb_node);
root = (struct btrfs_root *)node->data; root = (struct btrfs_root *)node->data;
...@@ -667,7 +622,7 @@ static int handle_direct_tree_backref(struct btrfs_backref_cache *cache, ...@@ -667,7 +622,7 @@ static int handle_direct_tree_backref(struct btrfs_backref_cache *cache,
if (!edge) if (!edge)
return -ENOMEM; return -ENOMEM;
rb_node = tree_search(&cache->rb_root, ref_key->offset); rb_node = rb_simple_search(&cache->rb_root, ref_key->offset);
if (!rb_node) { if (!rb_node) {
/* Parent node not yet cached */ /* Parent node not yet cached */
upper = alloc_backref_node(cache, ref_key->offset, upper = alloc_backref_node(cache, ref_key->offset,
...@@ -788,7 +743,7 @@ static int handle_indirect_tree_backref(struct btrfs_backref_cache *cache, ...@@ -788,7 +743,7 @@ static int handle_indirect_tree_backref(struct btrfs_backref_cache *cache,
} }
eb = path->nodes[level]; eb = path->nodes[level];
rb_node = tree_search(&cache->rb_root, eb->start); rb_node = rb_simple_search(&cache->rb_root, eb->start);
if (!rb_node) { if (!rb_node) {
upper = alloc_backref_node(cache, eb->start, upper = alloc_backref_node(cache, eb->start,
lower->level + 1); lower->level + 1);
...@@ -994,8 +949,8 @@ static int finish_upper_links(struct btrfs_backref_cache *cache, ...@@ -994,8 +949,8 @@ static int finish_upper_links(struct btrfs_backref_cache *cache,
/* Insert this node to cache if it's not COW-only */ /* Insert this node to cache if it's not COW-only */
if (!start->cowonly) { if (!start->cowonly) {
rb_node = tree_insert(&cache->rb_root, start->bytenr, rb_node = rb_simple_insert(&cache->rb_root, start->bytenr,
&start->rb_node); &start->rb_node);
if (rb_node) if (rb_node)
backref_tree_panic(rb_node, -EEXIST, start->bytenr); backref_tree_panic(rb_node, -EEXIST, start->bytenr);
list_add_tail(&start->lower, &cache->leaves); list_add_tail(&start->lower, &cache->leaves);
...@@ -1062,8 +1017,8 @@ static int finish_upper_links(struct btrfs_backref_cache *cache, ...@@ -1062,8 +1017,8 @@ static int finish_upper_links(struct btrfs_backref_cache *cache,
/* Only cache non-COW-only (subvolume trees) tree blocks */ /* Only cache non-COW-only (subvolume trees) tree blocks */
if (!upper->cowonly) { if (!upper->cowonly) {
rb_node = tree_insert(&cache->rb_root, upper->bytenr, rb_node = rb_simple_insert(&cache->rb_root, upper->bytenr,
&upper->rb_node); &upper->rb_node);
if (rb_node) { if (rb_node) {
backref_tree_panic(rb_node, -EEXIST, backref_tree_panic(rb_node, -EEXIST,
upper->bytenr); upper->bytenr);
...@@ -1311,7 +1266,7 @@ static int clone_backref_node(struct btrfs_trans_handle *trans, ...@@ -1311,7 +1266,7 @@ static int clone_backref_node(struct btrfs_trans_handle *trans,
if (cache->last_trans > 0) if (cache->last_trans > 0)
update_backref_cache(trans, cache); update_backref_cache(trans, cache);
rb_node = tree_search(&cache->rb_root, src->commit_root->start); rb_node = rb_simple_search(&cache->rb_root, src->commit_root->start);
if (rb_node) { if (rb_node) {
node = rb_entry(rb_node, struct btrfs_backref_node, rb_node); node = rb_entry(rb_node, struct btrfs_backref_node, rb_node);
if (node->detached) if (node->detached)
...@@ -1321,8 +1276,8 @@ static int clone_backref_node(struct btrfs_trans_handle *trans, ...@@ -1321,8 +1276,8 @@ static int clone_backref_node(struct btrfs_trans_handle *trans,
} }
if (!node) { if (!node) {
rb_node = tree_search(&cache->rb_root, rb_node = rb_simple_search(&cache->rb_root,
reloc_root->commit_root->start); reloc_root->commit_root->start);
if (rb_node) { if (rb_node) {
node = rb_entry(rb_node, struct btrfs_backref_node, node = rb_entry(rb_node, struct btrfs_backref_node,
rb_node); rb_node);
...@@ -1355,8 +1310,8 @@ static int clone_backref_node(struct btrfs_trans_handle *trans, ...@@ -1355,8 +1310,8 @@ static int clone_backref_node(struct btrfs_trans_handle *trans,
list_add_tail(&new_node->lower, &cache->leaves); list_add_tail(&new_node->lower, &cache->leaves);
} }
rb_node = tree_insert(&cache->rb_root, new_node->bytenr, rb_node = rb_simple_insert(&cache->rb_root, new_node->bytenr,
&new_node->rb_node); &new_node->rb_node);
if (rb_node) if (rb_node)
backref_tree_panic(rb_node, -EEXIST, new_node->bytenr); backref_tree_panic(rb_node, -EEXIST, new_node->bytenr);
...@@ -1396,8 +1351,8 @@ static int __must_check __add_reloc_root(struct btrfs_root *root) ...@@ -1396,8 +1351,8 @@ static int __must_check __add_reloc_root(struct btrfs_root *root)
node->data = root; node->data = root;
spin_lock(&rc->reloc_root_tree.lock); spin_lock(&rc->reloc_root_tree.lock);
rb_node = tree_insert(&rc->reloc_root_tree.rb_root, rb_node = rb_simple_insert(&rc->reloc_root_tree.rb_root,
node->bytenr, &node->rb_node); node->bytenr, &node->rb_node);
spin_unlock(&rc->reloc_root_tree.lock); spin_unlock(&rc->reloc_root_tree.lock);
if (rb_node) { if (rb_node) {
btrfs_panic(fs_info, -EEXIST, btrfs_panic(fs_info, -EEXIST,
...@@ -1423,8 +1378,8 @@ static void __del_reloc_root(struct btrfs_root *root) ...@@ -1423,8 +1378,8 @@ static void __del_reloc_root(struct btrfs_root *root)
if (rc && root->node) { if (rc && root->node) {
spin_lock(&rc->reloc_root_tree.lock); spin_lock(&rc->reloc_root_tree.lock);
rb_node = tree_search(&rc->reloc_root_tree.rb_root, rb_node = rb_simple_search(&rc->reloc_root_tree.rb_root,
root->commit_root->start); root->commit_root->start);
if (rb_node) { if (rb_node) {
node = rb_entry(rb_node, struct mapping_node, rb_node); node = rb_entry(rb_node, struct mapping_node, rb_node);
rb_erase(&node->rb_node, &rc->reloc_root_tree.rb_root); rb_erase(&node->rb_node, &rc->reloc_root_tree.rb_root);
...@@ -1467,8 +1422,8 @@ static int __update_reloc_root(struct btrfs_root *root) ...@@ -1467,8 +1422,8 @@ static int __update_reloc_root(struct btrfs_root *root)
struct reloc_control *rc = fs_info->reloc_ctl; struct reloc_control *rc = fs_info->reloc_ctl;
spin_lock(&rc->reloc_root_tree.lock); spin_lock(&rc->reloc_root_tree.lock);
rb_node = tree_search(&rc->reloc_root_tree.rb_root, rb_node = rb_simple_search(&rc->reloc_root_tree.rb_root,
root->commit_root->start); root->commit_root->start);
if (rb_node) { if (rb_node) {
node = rb_entry(rb_node, struct mapping_node, rb_node); node = rb_entry(rb_node, struct mapping_node, rb_node);
rb_erase(&node->rb_node, &rc->reloc_root_tree.rb_root); rb_erase(&node->rb_node, &rc->reloc_root_tree.rb_root);
...@@ -1481,8 +1436,8 @@ static int __update_reloc_root(struct btrfs_root *root) ...@@ -1481,8 +1436,8 @@ static int __update_reloc_root(struct btrfs_root *root)
spin_lock(&rc->reloc_root_tree.lock); spin_lock(&rc->reloc_root_tree.lock);
node->bytenr = root->node->start; node->bytenr = root->node->start;
rb_node = tree_insert(&rc->reloc_root_tree.rb_root, rb_node = rb_simple_insert(&rc->reloc_root_tree.rb_root,
node->bytenr, &node->rb_node); node->bytenr, &node->rb_node);
spin_unlock(&rc->reloc_root_tree.lock); spin_unlock(&rc->reloc_root_tree.lock);
if (rb_node) if (rb_node)
backref_tree_panic(rb_node, -EEXIST, node->bytenr); backref_tree_panic(rb_node, -EEXIST, node->bytenr);
...@@ -3640,7 +3595,7 @@ static int add_tree_block(struct reloc_control *rc, ...@@ -3640,7 +3595,7 @@ static int add_tree_block(struct reloc_control *rc,
block->level = level; block->level = level;
block->key_ready = 0; block->key_ready = 0;
rb_node = tree_insert(blocks, block->bytenr, &block->rb_node); rb_node = rb_simple_insert(blocks, block->bytenr, &block->rb_node);
if (rb_node) if (rb_node)
backref_tree_panic(rb_node, -EEXIST, block->bytenr); backref_tree_panic(rb_node, -EEXIST, block->bytenr);
...@@ -3663,7 +3618,7 @@ static int __add_tree_block(struct reloc_control *rc, ...@@ -3663,7 +3618,7 @@ static int __add_tree_block(struct reloc_control *rc,
if (tree_block_processed(bytenr, rc)) if (tree_block_processed(bytenr, rc))
return 0; return 0;
if (tree_search(blocks, bytenr)) if (rb_simple_search(blocks, bytenr))
return 0; return 0;
path = btrfs_alloc_path(); path = btrfs_alloc_path();
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment