Commit b2b3b886 authored by Liam R. Howlett's avatar Liam R. Howlett Committed by Andrew Morton

mm: don't use __vma_adjust() in __split_vma()

Use the abstracted locking and maple tree operations.  Since __split_vma()
is the only user of the __vma_adjust() function to use the insert
argument, drop that argument.  Remove the NULL passed through from
fs/exec's shift_arg_pages() and mremap() at the same time.

Link: https://lkml.kernel.org/r/20230120162650.984577-44-Liam.Howlett@oracle.comSigned-off-by: default avatarLiam R. Howlett <Liam.Howlett@oracle.com>
Signed-off-by: default avatarAndrew Morton <akpm@linux-foundation.org>
parent 68cefec5
...@@ -699,7 +699,7 @@ static int shift_arg_pages(struct vm_area_struct *vma, unsigned long shift) ...@@ -699,7 +699,7 @@ static int shift_arg_pages(struct vm_area_struct *vma, unsigned long shift)
/* /*
* cover the whole range: [new_start, old_end) * cover the whole range: [new_start, old_end)
*/ */
if (vma_adjust(&vmi, vma, new_start, old_end, vma->vm_pgoff, NULL)) if (vma_adjust(&vmi, vma, new_start, old_end, vma->vm_pgoff))
return -ENOMEM; return -ENOMEM;
/* /*
...@@ -733,7 +733,7 @@ static int shift_arg_pages(struct vm_area_struct *vma, unsigned long shift) ...@@ -733,7 +733,7 @@ static int shift_arg_pages(struct vm_area_struct *vma, unsigned long shift)
vma_prev(&vmi); vma_prev(&vmi);
/* Shrink the vma to just the new range */ /* Shrink the vma to just the new range */
return vma_adjust(&vmi, vma, new_start, new_end, vma->vm_pgoff, NULL); return vma_adjust(&vmi, vma, new_start, new_end, vma->vm_pgoff);
} }
/* /*
......
...@@ -2832,13 +2832,12 @@ void anon_vma_interval_tree_verify(struct anon_vma_chain *node); ...@@ -2832,13 +2832,12 @@ void anon_vma_interval_tree_verify(struct anon_vma_chain *node);
/* mmap.c */ /* mmap.c */
extern int __vm_enough_memory(struct mm_struct *mm, long pages, int cap_sys_admin); extern int __vm_enough_memory(struct mm_struct *mm, long pages, int cap_sys_admin);
extern int __vma_adjust(struct vma_iterator *vmi, struct vm_area_struct *vma, unsigned long start, extern int __vma_adjust(struct vma_iterator *vmi, struct vm_area_struct *vma, unsigned long start,
unsigned long end, pgoff_t pgoff, struct vm_area_struct *insert, unsigned long end, pgoff_t pgoff, struct vm_area_struct *expand);
struct vm_area_struct *expand);
static inline int vma_adjust(struct vma_iterator *vmi, static inline int vma_adjust(struct vma_iterator *vmi,
struct vm_area_struct *vma, unsigned long start, unsigned long end, struct vm_area_struct *vma, unsigned long start, unsigned long end,
pgoff_t pgoff, struct vm_area_struct *insert) pgoff_t pgoff)
{ {
return __vma_adjust(vmi, vma, start, end, pgoff, insert, NULL); return __vma_adjust(vmi, vma, start, end, pgoff, NULL);
} }
extern struct vm_area_struct *vma_merge(struct vma_iterator *vmi, extern struct vm_area_struct *vma_merge(struct vma_iterator *vmi,
struct mm_struct *, struct vm_area_struct *prev, unsigned long addr, struct mm_struct *, struct vm_area_struct *prev, unsigned long addr,
......
...@@ -691,7 +691,7 @@ inline int vma_expand(struct vma_iterator *vmi, struct vm_area_struct *vma, ...@@ -691,7 +691,7 @@ inline int vma_expand(struct vma_iterator *vmi, struct vm_area_struct *vma,
*/ */
int __vma_adjust(struct vma_iterator *vmi, struct vm_area_struct *vma, int __vma_adjust(struct vma_iterator *vmi, struct vm_area_struct *vma,
unsigned long start, unsigned long end, pgoff_t pgoff, unsigned long start, unsigned long end, pgoff_t pgoff,
struct vm_area_struct *insert, struct vm_area_struct *expand) struct vm_area_struct *expand)
{ {
struct mm_struct *mm = vma->vm_mm; struct mm_struct *mm = vma->vm_mm;
struct vm_area_struct *remove2 = NULL; struct vm_area_struct *remove2 = NULL;
...@@ -704,7 +704,7 @@ int __vma_adjust(struct vma_iterator *vmi, struct vm_area_struct *vma, ...@@ -704,7 +704,7 @@ int __vma_adjust(struct vma_iterator *vmi, struct vm_area_struct *vma,
struct vm_area_struct *exporter = NULL, *importer = NULL; struct vm_area_struct *exporter = NULL, *importer = NULL;
struct vma_prepare vma_prep; struct vma_prepare vma_prep;
if (next && !insert) { if (next) {
if (end >= next->vm_end) { if (end >= next->vm_end) {
/* /*
* vma expands, overlapping all the next, and * vma expands, overlapping all the next, and
...@@ -795,39 +795,25 @@ int __vma_adjust(struct vma_iterator *vmi, struct vm_area_struct *vma, ...@@ -795,39 +795,25 @@ int __vma_adjust(struct vma_iterator *vmi, struct vm_area_struct *vma,
VM_WARN_ON(vma_prep.anon_vma && adjust_next && next->anon_vma && VM_WARN_ON(vma_prep.anon_vma && adjust_next && next->anon_vma &&
vma_prep.anon_vma != next->anon_vma); vma_prep.anon_vma != next->anon_vma);
vma_prep.insert = insert;
vma_prepare(&vma_prep); vma_prepare(&vma_prep);
if (start != vma->vm_start) { if (vma->vm_start < start)
if (vma->vm_start < start) {
if (!insert || (insert->vm_end != start)) {
vma_iter_clear(vmi, vma->vm_start, start); vma_iter_clear(vmi, vma->vm_start, start);
vma_iter_set(vmi, start); else if (start != vma->vm_start)
VM_WARN_ON(insert && insert->vm_start > vma->vm_start);
}
} else {
vma_changed = true; vma_changed = true;
}
vma->vm_start = start; if (vma->vm_end > end)
}
if (end != vma->vm_end) {
if (vma->vm_end > end) {
if (!insert || (insert->vm_start != end)) {
vma_iter_clear(vmi, end, vma->vm_end); vma_iter_clear(vmi, end, vma->vm_end);
vma_iter_set(vmi, vma->vm_end); else if (end != vma->vm_end)
VM_WARN_ON(insert &&
insert->vm_end < vma->vm_end);
}
} else {
vma_changed = true; vma_changed = true;
}
vma->vm_start = start;
vma->vm_end = end; vma->vm_end = end;
} vma->vm_pgoff = pgoff;
if (vma_changed) if (vma_changed)
vma_iter_store(vmi, vma); vma_iter_store(vmi, vma);
vma->vm_pgoff = pgoff;
if (adjust_next) { if (adjust_next) {
next->vm_start += adjust_next; next->vm_start += adjust_next;
next->vm_pgoff += adjust_next >> PAGE_SHIFT; next->vm_pgoff += adjust_next >> PAGE_SHIFT;
...@@ -1030,20 +1016,19 @@ struct vm_area_struct *vma_merge(struct vma_iterator *vmi, struct mm_struct *mm, ...@@ -1030,20 +1016,19 @@ struct vm_area_struct *vma_merge(struct vma_iterator *vmi, struct mm_struct *mm,
is_mergeable_anon_vma(prev->anon_vma, is_mergeable_anon_vma(prev->anon_vma,
next->anon_vma, NULL)) { /* cases 1, 6 */ next->anon_vma, NULL)) { /* cases 1, 6 */
err = __vma_adjust(vmi, prev, prev->vm_start, err = __vma_adjust(vmi, prev, prev->vm_start,
next->vm_end, prev->vm_pgoff, NULL, next->vm_end, prev->vm_pgoff, prev);
prev);
res = prev; res = prev;
} else if (merge_prev) { /* cases 2, 5, 7 */ } else if (merge_prev) { /* cases 2, 5, 7 */
err = __vma_adjust(vmi, prev, prev->vm_start, err = __vma_adjust(vmi, prev, prev->vm_start,
end, prev->vm_pgoff, NULL, prev); end, prev->vm_pgoff, prev);
res = prev; res = prev;
} else if (merge_next) { } else if (merge_next) {
if (prev && addr < prev->vm_end) /* case 4 */ if (prev && addr < prev->vm_end) /* case 4 */
err = __vma_adjust(vmi, prev, prev->vm_start, err = __vma_adjust(vmi, prev, prev->vm_start,
addr, prev->vm_pgoff, NULL, next); addr, prev->vm_pgoff, next);
else /* cases 3, 8 */ else /* cases 3, 8 */
err = __vma_adjust(vmi, mid, addr, next->vm_end, err = __vma_adjust(vmi, mid, addr, next->vm_end,
next->vm_pgoff - pglen, NULL, next); next->vm_pgoff - pglen, next);
res = next; res = next;
} }
...@@ -2187,11 +2172,15 @@ static void unmap_region(struct mm_struct *mm, struct maple_tree *mt, ...@@ -2187,11 +2172,15 @@ static void unmap_region(struct mm_struct *mm, struct maple_tree *mt,
int __split_vma(struct vma_iterator *vmi, struct vm_area_struct *vma, int __split_vma(struct vma_iterator *vmi, struct vm_area_struct *vma,
unsigned long addr, int new_below) unsigned long addr, int new_below)
{ {
struct vma_prepare vp;
struct vm_area_struct *new; struct vm_area_struct *new;
int err; int err;
validate_mm_mt(vma->vm_mm); validate_mm_mt(vma->vm_mm);
WARN_ON(vma->vm_start >= addr);
WARN_ON(vma->vm_end <= addr);
if (vma->vm_ops && vma->vm_ops->may_split) { if (vma->vm_ops && vma->vm_ops->may_split) {
err = vma->vm_ops->may_split(vma, addr); err = vma->vm_ops->may_split(vma, addr);
if (err) if (err)
...@@ -2202,16 +2191,20 @@ int __split_vma(struct vma_iterator *vmi, struct vm_area_struct *vma, ...@@ -2202,16 +2191,20 @@ int __split_vma(struct vma_iterator *vmi, struct vm_area_struct *vma,
if (!new) if (!new)
return -ENOMEM; return -ENOMEM;
if (new_below) err = -ENOMEM;
if (vma_iter_prealloc(vmi))
goto out_free_vma;
if (new_below) {
new->vm_end = addr; new->vm_end = addr;
else { } else {
new->vm_start = addr; new->vm_start = addr;
new->vm_pgoff += ((addr - vma->vm_start) >> PAGE_SHIFT); new->vm_pgoff += ((addr - vma->vm_start) >> PAGE_SHIFT);
} }
err = vma_dup_policy(vma, new); err = vma_dup_policy(vma, new);
if (err) if (err)
goto out_free_vma; goto out_free_vmi;
err = anon_vma_clone(new, vma); err = anon_vma_clone(new, vma);
if (err) if (err)
...@@ -2223,33 +2216,32 @@ int __split_vma(struct vma_iterator *vmi, struct vm_area_struct *vma, ...@@ -2223,33 +2216,32 @@ int __split_vma(struct vma_iterator *vmi, struct vm_area_struct *vma,
if (new->vm_ops && new->vm_ops->open) if (new->vm_ops && new->vm_ops->open)
new->vm_ops->open(new); new->vm_ops->open(new);
if (new_below) vma_adjust_trans_huge(vma, vma->vm_start, addr, 0);
err = vma_adjust(vmi, vma, addr, vma->vm_end, init_vma_prep(&vp, vma);
vma->vm_pgoff + ((addr - new->vm_start) >> PAGE_SHIFT), vp.insert = new;
new); vma_prepare(&vp);
else
err = vma_adjust(vmi, vma, vma->vm_start, addr, vma->vm_pgoff, if (new_below) {
new); vma->vm_start = addr;
vma->vm_pgoff += (addr - new->vm_start) >> PAGE_SHIFT;
} else {
vma->vm_end = addr;
}
/* vma_complete stores the new vma */
vma_complete(&vp, vmi, vma->vm_mm);
/* Success. */ /* Success. */
if (!err) {
if (new_below) if (new_below)
vma_next(vmi); vma_next(vmi);
validate_mm_mt(vma->vm_mm);
return 0; return 0;
}
/* Avoid vm accounting in close() operation */ out_free_mpol:
new->vm_start = new->vm_end;
new->vm_pgoff = 0;
/* Clean everything up if vma_adjust failed. */
if (new->vm_ops && new->vm_ops->close)
new->vm_ops->close(new);
if (new->vm_file)
fput(new->vm_file);
unlink_anon_vmas(new);
out_free_mpol:
mpol_put(vma_policy(new)); mpol_put(vma_policy(new));
out_free_vma: out_free_vmi:
vma_iter_free(vmi);
out_free_vma:
vm_area_free(new); vm_area_free(new);
validate_mm_mt(vma->vm_mm); validate_mm_mt(vma->vm_mm);
return err; return err;
......
...@@ -1054,7 +1054,7 @@ SYSCALL_DEFINE5(mremap, unsigned long, addr, unsigned long, old_len, ...@@ -1054,7 +1054,7 @@ SYSCALL_DEFINE5(mremap, unsigned long, addr, unsigned long, old_len,
vma->vm_file, extension_pgoff, vma_policy(vma), vma->vm_file, extension_pgoff, vma_policy(vma),
vma->vm_userfaultfd_ctx, anon_vma_name(vma)); vma->vm_userfaultfd_ctx, anon_vma_name(vma));
} else if (vma_adjust(&vmi, vma, vma->vm_start, } else if (vma_adjust(&vmi, vma, vma->vm_start,
addr + new_len, vma->vm_pgoff, NULL)) { addr + new_len, vma->vm_pgoff)) {
vma = NULL; vma = NULL;
} }
if (!vma) { if (!vma) {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment