Commit 5a75034e authored by Josef Bacik's avatar Josef Bacik Committed by David Sterba

btrfs: do not panic if we can't allocate a prealloc extent state

We sometimes have to allocate new extent states when clearing or setting
new bits in an extent io tree.  Generally we preallocate this before
taking the tree spin lock, but we can use this preallocated extent state
sometimes and then need to try to do a GFP_ATOMIC allocation under the
lock.

Unfortunately sometimes this fails, and then we hit the BUG_ON() and
bring the box down.  This happens roughly 20 times a week in our fleet.

However the vast majority of callers use GFP_NOFS, which means that if
this GFP_ATOMIC allocation fails, we could simply drop the spin lock, go
back and allocate a new extent state with our given gfp mask, and begin
again from where we left off.

For the remaining callers that do not use GFP_NOFS, they are generally
using GFP_NOWAIT, which still allows for some reclaim.  So allow these
allocations to attempt to happen outside of the spin lock so we don't
need to rely on GFP_ATOMIC allocations.

This in essence creates an infinite loop for anything that isn't
GFP_NOFS.  To address this we may want to migrate to using mempools for
extent states so that we will always have emergency reserves in order to
make our allocations.
Signed-off-by: default avatarJosef Bacik <josef@toxicpanda.com>
Reviewed-by: default avatarDavid Sterba <dsterba@suse.com>
Signed-off-by: default avatarDavid Sterba <dsterba@suse.com>
parent da2a071b
...@@ -572,7 +572,7 @@ int __clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, ...@@ -572,7 +572,7 @@ int __clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
if (bits & (EXTENT_LOCKED | EXTENT_BOUNDARY)) if (bits & (EXTENT_LOCKED | EXTENT_BOUNDARY))
clear = 1; clear = 1;
again: again:
if (!prealloc && gfpflags_allow_blocking(mask)) { if (!prealloc) {
/* /*
* Don't care for allocation failure here because we might end * Don't care for allocation failure here because we might end
* up not needing the pre-allocated extent state at all, which * up not needing the pre-allocated extent state at all, which
...@@ -636,7 +636,8 @@ int __clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, ...@@ -636,7 +636,8 @@ int __clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
if (state->start < start) { if (state->start < start) {
prealloc = alloc_extent_state_atomic(prealloc); prealloc = alloc_extent_state_atomic(prealloc);
BUG_ON(!prealloc); if (!prealloc)
goto search_again;
err = split_state(tree, state, prealloc, start); err = split_state(tree, state, prealloc, start);
if (err) if (err)
extent_io_tree_panic(tree, err); extent_io_tree_panic(tree, err);
...@@ -657,7 +658,8 @@ int __clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, ...@@ -657,7 +658,8 @@ int __clear_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
*/ */
if (state->start <= end && state->end > end) { if (state->start <= end && state->end > end) {
prealloc = alloc_extent_state_atomic(prealloc); prealloc = alloc_extent_state_atomic(prealloc);
BUG_ON(!prealloc); if (!prealloc)
goto search_again;
err = split_state(tree, state, prealloc, end + 1); err = split_state(tree, state, prealloc, end + 1);
if (err) if (err)
extent_io_tree_panic(tree, err); extent_io_tree_panic(tree, err);
...@@ -987,7 +989,7 @@ static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, ...@@ -987,7 +989,7 @@ static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
else else
ASSERT(failed_start == NULL && failed_state == NULL); ASSERT(failed_start == NULL && failed_state == NULL);
again: again:
if (!prealloc && gfpflags_allow_blocking(mask)) { if (!prealloc) {
/* /*
* Don't care for allocation failure here because we might end * Don't care for allocation failure here because we might end
* up not needing the pre-allocated extent state at all, which * up not needing the pre-allocated extent state at all, which
...@@ -1012,7 +1014,8 @@ static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, ...@@ -1012,7 +1014,8 @@ static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
state = tree_search_for_insert(tree, start, &p, &parent); state = tree_search_for_insert(tree, start, &p, &parent);
if (!state) { if (!state) {
prealloc = alloc_extent_state_atomic(prealloc); prealloc = alloc_extent_state_atomic(prealloc);
BUG_ON(!prealloc); if (!prealloc)
goto search_again;
prealloc->start = start; prealloc->start = start;
prealloc->end = end; prealloc->end = end;
insert_state_fast(tree, prealloc, p, parent, bits, changeset); insert_state_fast(tree, prealloc, p, parent, bits, changeset);
...@@ -1085,7 +1088,8 @@ static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, ...@@ -1085,7 +1088,8 @@ static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
} }
prealloc = alloc_extent_state_atomic(prealloc); prealloc = alloc_extent_state_atomic(prealloc);
BUG_ON(!prealloc); if (!prealloc)
goto search_again;
err = split_state(tree, state, prealloc, start); err = split_state(tree, state, prealloc, start);
if (err) if (err)
extent_io_tree_panic(tree, err); extent_io_tree_panic(tree, err);
...@@ -1122,7 +1126,8 @@ static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, ...@@ -1122,7 +1126,8 @@ static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
this_end = last_start - 1; this_end = last_start - 1;
prealloc = alloc_extent_state_atomic(prealloc); prealloc = alloc_extent_state_atomic(prealloc);
BUG_ON(!prealloc); if (!prealloc)
goto search_again;
/* /*
* Avoid to free 'prealloc' if it can be merged with the later * Avoid to free 'prealloc' if it can be merged with the later
...@@ -1154,7 +1159,8 @@ static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end, ...@@ -1154,7 +1159,8 @@ static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
} }
prealloc = alloc_extent_state_atomic(prealloc); prealloc = alloc_extent_state_atomic(prealloc);
BUG_ON(!prealloc); if (!prealloc)
goto search_again;
err = split_state(tree, state, prealloc, end + 1); err = split_state(tree, state, prealloc, end + 1);
if (err) if (err)
extent_io_tree_panic(tree, err); extent_io_tree_panic(tree, err);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment