Commit 1c7f4bca authored by Chris Wilson's avatar Chris Wilson Committed by Tvrtko Ursulin

drm/i915: Rename vma->*_list to *_link for consistency

Elsewhere we have adopted the convention of using '_link' to denote
elements in the list (and '_list' for the actual list_head itself), and
that the name should indicate which list the link belongs to (and
preferrably not just where the link is being stored).

s/vma_link/obj_link/ (we iterate over obj->vma_list)
s/mm_list/vm_link/ (we iterate over vm->[in]active_list)
Signed-off-by: default avatarChris Wilson <chris@chris-wilson.co.uk>
Reviewed-by: default avatarTvrtko Ursulin <tvrtko.ursulin@intel.com>
Signed-off-by: default avatarTvrtko Ursulin <tvrtko.ursulin@intel.com>
parent 135dc79e
...@@ -117,9 +117,8 @@ static u64 i915_gem_obj_total_ggtt_size(struct drm_i915_gem_object *obj) ...@@ -117,9 +117,8 @@ static u64 i915_gem_obj_total_ggtt_size(struct drm_i915_gem_object *obj)
u64 size = 0; u64 size = 0;
struct i915_vma *vma; struct i915_vma *vma;
list_for_each_entry(vma, &obj->vma_list, vma_link) { list_for_each_entry(vma, &obj->vma_list, obj_link) {
if (i915_is_ggtt(vma->vm) && if (i915_is_ggtt(vma->vm) && drm_mm_node_allocated(&vma->node))
drm_mm_node_allocated(&vma->node))
size += vma->node.size; size += vma->node.size;
} }
...@@ -155,7 +154,7 @@ describe_obj(struct seq_file *m, struct drm_i915_gem_object *obj) ...@@ -155,7 +154,7 @@ describe_obj(struct seq_file *m, struct drm_i915_gem_object *obj)
obj->madv == I915_MADV_DONTNEED ? " purgeable" : ""); obj->madv == I915_MADV_DONTNEED ? " purgeable" : "");
if (obj->base.name) if (obj->base.name)
seq_printf(m, " (name: %d)", obj->base.name); seq_printf(m, " (name: %d)", obj->base.name);
list_for_each_entry(vma, &obj->vma_list, vma_link) { list_for_each_entry(vma, &obj->vma_list, obj_link) {
if (vma->pin_count > 0) if (vma->pin_count > 0)
pin_count++; pin_count++;
} }
...@@ -164,7 +163,7 @@ describe_obj(struct seq_file *m, struct drm_i915_gem_object *obj) ...@@ -164,7 +163,7 @@ describe_obj(struct seq_file *m, struct drm_i915_gem_object *obj)
seq_printf(m, " (display)"); seq_printf(m, " (display)");
if (obj->fence_reg != I915_FENCE_REG_NONE) if (obj->fence_reg != I915_FENCE_REG_NONE)
seq_printf(m, " (fence: %d)", obj->fence_reg); seq_printf(m, " (fence: %d)", obj->fence_reg);
list_for_each_entry(vma, &obj->vma_list, vma_link) { list_for_each_entry(vma, &obj->vma_list, obj_link) {
seq_printf(m, " (%sgtt offset: %08llx, size: %08llx", seq_printf(m, " (%sgtt offset: %08llx, size: %08llx",
i915_is_ggtt(vma->vm) ? "g" : "pp", i915_is_ggtt(vma->vm) ? "g" : "pp",
vma->node.start, vma->node.size); vma->node.start, vma->node.size);
...@@ -230,7 +229,7 @@ static int i915_gem_object_list_info(struct seq_file *m, void *data) ...@@ -230,7 +229,7 @@ static int i915_gem_object_list_info(struct seq_file *m, void *data)
} }
total_obj_size = total_gtt_size = count = 0; total_obj_size = total_gtt_size = count = 0;
list_for_each_entry(vma, head, mm_list) { list_for_each_entry(vma, head, vm_link) {
seq_printf(m, " "); seq_printf(m, " ");
describe_obj(m, vma->obj); describe_obj(m, vma->obj);
seq_printf(m, "\n"); seq_printf(m, "\n");
...@@ -342,7 +341,7 @@ static int per_file_stats(int id, void *ptr, void *data) ...@@ -342,7 +341,7 @@ static int per_file_stats(int id, void *ptr, void *data)
stats->shared += obj->base.size; stats->shared += obj->base.size;
if (USES_FULL_PPGTT(obj->base.dev)) { if (USES_FULL_PPGTT(obj->base.dev)) {
list_for_each_entry(vma, &obj->vma_list, vma_link) { list_for_each_entry(vma, &obj->vma_list, obj_link) {
struct i915_hw_ppgtt *ppgtt; struct i915_hw_ppgtt *ppgtt;
if (!drm_mm_node_allocated(&vma->node)) if (!drm_mm_node_allocated(&vma->node))
...@@ -454,12 +453,12 @@ static int i915_gem_object_info(struct seq_file *m, void* data) ...@@ -454,12 +453,12 @@ static int i915_gem_object_info(struct seq_file *m, void* data)
count, mappable_count, size, mappable_size); count, mappable_count, size, mappable_size);
size = count = mappable_size = mappable_count = 0; size = count = mappable_size = mappable_count = 0;
count_vmas(&vm->active_list, mm_list); count_vmas(&vm->active_list, vm_link);
seq_printf(m, " %u [%u] active objects, %llu [%llu] bytes\n", seq_printf(m, " %u [%u] active objects, %llu [%llu] bytes\n",
count, mappable_count, size, mappable_size); count, mappable_count, size, mappable_size);
size = count = mappable_size = mappable_count = 0; size = count = mappable_size = mappable_count = 0;
count_vmas(&vm->inactive_list, mm_list); count_vmas(&vm->inactive_list, vm_link);
seq_printf(m, " %u [%u] inactive objects, %llu [%llu] bytes\n", seq_printf(m, " %u [%u] inactive objects, %llu [%llu] bytes\n",
count, mappable_count, size, mappable_size); count, mappable_count, size, mappable_size);
......
...@@ -138,10 +138,10 @@ i915_gem_get_aperture_ioctl(struct drm_device *dev, void *data, ...@@ -138,10 +138,10 @@ i915_gem_get_aperture_ioctl(struct drm_device *dev, void *data,
pinned = 0; pinned = 0;
mutex_lock(&dev->struct_mutex); mutex_lock(&dev->struct_mutex);
list_for_each_entry(vma, &ggtt->base.active_list, mm_list) list_for_each_entry(vma, &ggtt->base.active_list, vm_link)
if (vma->pin_count) if (vma->pin_count)
pinned += vma->node.size; pinned += vma->node.size;
list_for_each_entry(vma, &ggtt->base.inactive_list, mm_list) list_for_each_entry(vma, &ggtt->base.inactive_list, vm_link)
if (vma->pin_count) if (vma->pin_count)
pinned += vma->node.size; pinned += vma->node.size;
mutex_unlock(&dev->struct_mutex); mutex_unlock(&dev->struct_mutex);
...@@ -272,7 +272,7 @@ drop_pages(struct drm_i915_gem_object *obj) ...@@ -272,7 +272,7 @@ drop_pages(struct drm_i915_gem_object *obj)
int ret; int ret;
drm_gem_object_reference(&obj->base); drm_gem_object_reference(&obj->base);
list_for_each_entry_safe(vma, next, &obj->vma_list, vma_link) list_for_each_entry_safe(vma, next, &obj->vma_list, obj_link)
if (i915_vma_unbind(vma)) if (i915_vma_unbind(vma))
break; break;
...@@ -2416,7 +2416,7 @@ void i915_vma_move_to_active(struct i915_vma *vma, ...@@ -2416,7 +2416,7 @@ void i915_vma_move_to_active(struct i915_vma *vma,
list_move_tail(&obj->ring_list[ring->id], &ring->active_list); list_move_tail(&obj->ring_list[ring->id], &ring->active_list);
i915_gem_request_assign(&obj->last_read_req[ring->id], req); i915_gem_request_assign(&obj->last_read_req[ring->id], req);
list_move_tail(&vma->mm_list, &vma->vm->active_list); list_move_tail(&vma->vm_link, &vma->vm->active_list);
} }
static void static void
...@@ -2454,9 +2454,9 @@ i915_gem_object_retire__read(struct drm_i915_gem_object *obj, int ring) ...@@ -2454,9 +2454,9 @@ i915_gem_object_retire__read(struct drm_i915_gem_object *obj, int ring)
list_move_tail(&obj->global_list, list_move_tail(&obj->global_list,
&to_i915(obj->base.dev)->mm.bound_list); &to_i915(obj->base.dev)->mm.bound_list);
list_for_each_entry(vma, &obj->vma_list, vma_link) { list_for_each_entry(vma, &obj->vma_list, obj_link) {
if (!list_empty(&vma->mm_list)) if (!list_empty(&vma->vm_link))
list_move_tail(&vma->mm_list, &vma->vm->inactive_list); list_move_tail(&vma->vm_link, &vma->vm->inactive_list);
} }
i915_gem_request_assign(&obj->last_fenced_req, NULL); i915_gem_request_assign(&obj->last_fenced_req, NULL);
...@@ -3317,7 +3317,7 @@ static int __i915_vma_unbind(struct i915_vma *vma, bool wait) ...@@ -3317,7 +3317,7 @@ static int __i915_vma_unbind(struct i915_vma *vma, bool wait)
struct drm_i915_private *dev_priv = obj->base.dev->dev_private; struct drm_i915_private *dev_priv = obj->base.dev->dev_private;
int ret; int ret;
if (list_empty(&vma->vma_link)) if (list_empty(&vma->obj_link))
return 0; return 0;
if (!drm_mm_node_allocated(&vma->node)) { if (!drm_mm_node_allocated(&vma->node)) {
...@@ -3351,7 +3351,7 @@ static int __i915_vma_unbind(struct i915_vma *vma, bool wait) ...@@ -3351,7 +3351,7 @@ static int __i915_vma_unbind(struct i915_vma *vma, bool wait)
vma->vm->unbind_vma(vma); vma->vm->unbind_vma(vma);
vma->bound = 0; vma->bound = 0;
list_del_init(&vma->mm_list); list_del_init(&vma->vm_link);
if (i915_is_ggtt(vma->vm)) { if (i915_is_ggtt(vma->vm)) {
if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) { if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) {
obj->map_and_fenceable = false; obj->map_and_fenceable = false;
...@@ -3609,7 +3609,7 @@ i915_gem_object_bind_to_vm(struct drm_i915_gem_object *obj, ...@@ -3609,7 +3609,7 @@ i915_gem_object_bind_to_vm(struct drm_i915_gem_object *obj,
goto err_remove_node; goto err_remove_node;
list_move_tail(&obj->global_list, &dev_priv->mm.bound_list); list_move_tail(&obj->global_list, &dev_priv->mm.bound_list);
list_add_tail(&vma->mm_list, &vm->inactive_list); list_add_tail(&vma->vm_link, &vm->inactive_list);
return vma; return vma;
...@@ -3774,7 +3774,7 @@ i915_gem_object_set_to_gtt_domain(struct drm_i915_gem_object *obj, bool write) ...@@ -3774,7 +3774,7 @@ i915_gem_object_set_to_gtt_domain(struct drm_i915_gem_object *obj, bool write)
/* And bump the LRU for this access */ /* And bump the LRU for this access */
vma = i915_gem_obj_to_ggtt(obj); vma = i915_gem_obj_to_ggtt(obj);
if (vma && drm_mm_node_allocated(&vma->node) && !obj->active) if (vma && drm_mm_node_allocated(&vma->node) && !obj->active)
list_move_tail(&vma->mm_list, list_move_tail(&vma->vm_link,
&to_i915(obj->base.dev)->gtt.base.inactive_list); &to_i915(obj->base.dev)->gtt.base.inactive_list);
return 0; return 0;
...@@ -3809,7 +3809,7 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj, ...@@ -3809,7 +3809,7 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj,
* catch the issue of the CS prefetch crossing page boundaries and * catch the issue of the CS prefetch crossing page boundaries and
* reading an invalid PTE on older architectures. * reading an invalid PTE on older architectures.
*/ */
list_for_each_entry_safe(vma, next, &obj->vma_list, vma_link) { list_for_each_entry_safe(vma, next, &obj->vma_list, obj_link) {
if (!drm_mm_node_allocated(&vma->node)) if (!drm_mm_node_allocated(&vma->node))
continue; continue;
...@@ -3872,7 +3872,7 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj, ...@@ -3872,7 +3872,7 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj,
*/ */
} }
list_for_each_entry(vma, &obj->vma_list, vma_link) { list_for_each_entry(vma, &obj->vma_list, obj_link) {
if (!drm_mm_node_allocated(&vma->node)) if (!drm_mm_node_allocated(&vma->node))
continue; continue;
...@@ -3882,7 +3882,7 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj, ...@@ -3882,7 +3882,7 @@ int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj,
} }
} }
list_for_each_entry(vma, &obj->vma_list, vma_link) list_for_each_entry(vma, &obj->vma_list, obj_link)
vma->node.color = cache_level; vma->node.color = cache_level;
obj->cache_level = cache_level; obj->cache_level = cache_level;
...@@ -4556,7 +4556,7 @@ void i915_gem_free_object(struct drm_gem_object *gem_obj) ...@@ -4556,7 +4556,7 @@ void i915_gem_free_object(struct drm_gem_object *gem_obj)
trace_i915_gem_object_destroy(obj); trace_i915_gem_object_destroy(obj);
list_for_each_entry_safe(vma, next, &obj->vma_list, vma_link) { list_for_each_entry_safe(vma, next, &obj->vma_list, obj_link) {
int ret; int ret;
vma->pin_count = 0; vma->pin_count = 0;
...@@ -4613,7 +4613,7 @@ struct i915_vma *i915_gem_obj_to_vma(struct drm_i915_gem_object *obj, ...@@ -4613,7 +4613,7 @@ struct i915_vma *i915_gem_obj_to_vma(struct drm_i915_gem_object *obj,
struct i915_address_space *vm) struct i915_address_space *vm)
{ {
struct i915_vma *vma; struct i915_vma *vma;
list_for_each_entry(vma, &obj->vma_list, vma_link) { list_for_each_entry(vma, &obj->vma_list, obj_link) {
if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL && if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL &&
vma->vm == vm) vma->vm == vm)
return vma; return vma;
...@@ -4630,7 +4630,7 @@ struct i915_vma *i915_gem_obj_to_ggtt_view(struct drm_i915_gem_object *obj, ...@@ -4630,7 +4630,7 @@ struct i915_vma *i915_gem_obj_to_ggtt_view(struct drm_i915_gem_object *obj,
if (WARN_ONCE(!view, "no view specified")) if (WARN_ONCE(!view, "no view specified"))
return ERR_PTR(-EINVAL); return ERR_PTR(-EINVAL);
list_for_each_entry(vma, &obj->vma_list, vma_link) list_for_each_entry(vma, &obj->vma_list, obj_link)
if (vma->vm == ggtt && if (vma->vm == ggtt &&
i915_ggtt_view_equal(&vma->ggtt_view, view)) i915_ggtt_view_equal(&vma->ggtt_view, view))
return vma; return vma;
...@@ -4651,7 +4651,7 @@ void i915_gem_vma_destroy(struct i915_vma *vma) ...@@ -4651,7 +4651,7 @@ void i915_gem_vma_destroy(struct i915_vma *vma)
if (!i915_is_ggtt(vm)) if (!i915_is_ggtt(vm))
i915_ppgtt_put(i915_vm_to_ppgtt(vm)); i915_ppgtt_put(i915_vm_to_ppgtt(vm));
list_del(&vma->vma_link); list_del(&vma->obj_link);
kmem_cache_free(to_i915(vma->obj->base.dev)->vmas, vma); kmem_cache_free(to_i915(vma->obj->base.dev)->vmas, vma);
} }
...@@ -5201,7 +5201,7 @@ u64 i915_gem_obj_offset(struct drm_i915_gem_object *o, ...@@ -5201,7 +5201,7 @@ u64 i915_gem_obj_offset(struct drm_i915_gem_object *o,
WARN_ON(vm == &dev_priv->mm.aliasing_ppgtt->base); WARN_ON(vm == &dev_priv->mm.aliasing_ppgtt->base);
list_for_each_entry(vma, &o->vma_list, vma_link) { list_for_each_entry(vma, &o->vma_list, obj_link) {
if (i915_is_ggtt(vma->vm) && if (i915_is_ggtt(vma->vm) &&
vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL)
continue; continue;
...@@ -5220,7 +5220,7 @@ u64 i915_gem_obj_ggtt_offset_view(struct drm_i915_gem_object *o, ...@@ -5220,7 +5220,7 @@ u64 i915_gem_obj_ggtt_offset_view(struct drm_i915_gem_object *o,
struct i915_address_space *ggtt = i915_obj_to_ggtt(o); struct i915_address_space *ggtt = i915_obj_to_ggtt(o);
struct i915_vma *vma; struct i915_vma *vma;
list_for_each_entry(vma, &o->vma_list, vma_link) list_for_each_entry(vma, &o->vma_list, obj_link)
if (vma->vm == ggtt && if (vma->vm == ggtt &&
i915_ggtt_view_equal(&vma->ggtt_view, view)) i915_ggtt_view_equal(&vma->ggtt_view, view))
return vma->node.start; return vma->node.start;
...@@ -5234,7 +5234,7 @@ bool i915_gem_obj_bound(struct drm_i915_gem_object *o, ...@@ -5234,7 +5234,7 @@ bool i915_gem_obj_bound(struct drm_i915_gem_object *o,
{ {
struct i915_vma *vma; struct i915_vma *vma;
list_for_each_entry(vma, &o->vma_list, vma_link) { list_for_each_entry(vma, &o->vma_list, obj_link) {
if (i915_is_ggtt(vma->vm) && if (i915_is_ggtt(vma->vm) &&
vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL)
continue; continue;
...@@ -5251,7 +5251,7 @@ bool i915_gem_obj_ggtt_bound_view(struct drm_i915_gem_object *o, ...@@ -5251,7 +5251,7 @@ bool i915_gem_obj_ggtt_bound_view(struct drm_i915_gem_object *o,
struct i915_address_space *ggtt = i915_obj_to_ggtt(o); struct i915_address_space *ggtt = i915_obj_to_ggtt(o);
struct i915_vma *vma; struct i915_vma *vma;
list_for_each_entry(vma, &o->vma_list, vma_link) list_for_each_entry(vma, &o->vma_list, obj_link)
if (vma->vm == ggtt && if (vma->vm == ggtt &&
i915_ggtt_view_equal(&vma->ggtt_view, view) && i915_ggtt_view_equal(&vma->ggtt_view, view) &&
drm_mm_node_allocated(&vma->node)) drm_mm_node_allocated(&vma->node))
...@@ -5264,7 +5264,7 @@ bool i915_gem_obj_bound_any(struct drm_i915_gem_object *o) ...@@ -5264,7 +5264,7 @@ bool i915_gem_obj_bound_any(struct drm_i915_gem_object *o)
{ {
struct i915_vma *vma; struct i915_vma *vma;
list_for_each_entry(vma, &o->vma_list, vma_link) list_for_each_entry(vma, &o->vma_list, obj_link)
if (drm_mm_node_allocated(&vma->node)) if (drm_mm_node_allocated(&vma->node))
return true; return true;
...@@ -5281,7 +5281,7 @@ unsigned long i915_gem_obj_size(struct drm_i915_gem_object *o, ...@@ -5281,7 +5281,7 @@ unsigned long i915_gem_obj_size(struct drm_i915_gem_object *o,
BUG_ON(list_empty(&o->vma_list)); BUG_ON(list_empty(&o->vma_list));
list_for_each_entry(vma, &o->vma_list, vma_link) { list_for_each_entry(vma, &o->vma_list, obj_link) {
if (i915_is_ggtt(vma->vm) && if (i915_is_ggtt(vma->vm) &&
vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL)
continue; continue;
...@@ -5294,7 +5294,7 @@ unsigned long i915_gem_obj_size(struct drm_i915_gem_object *o, ...@@ -5294,7 +5294,7 @@ unsigned long i915_gem_obj_size(struct drm_i915_gem_object *o,
bool i915_gem_obj_is_pinned(struct drm_i915_gem_object *obj) bool i915_gem_obj_is_pinned(struct drm_i915_gem_object *obj)
{ {
struct i915_vma *vma; struct i915_vma *vma;
list_for_each_entry(vma, &obj->vma_list, vma_link) list_for_each_entry(vma, &obj->vma_list, obj_link)
if (vma->pin_count > 0) if (vma->pin_count > 0)
return true; return true;
......
...@@ -142,7 +142,7 @@ static void i915_gem_context_clean(struct intel_context *ctx) ...@@ -142,7 +142,7 @@ static void i915_gem_context_clean(struct intel_context *ctx)
return; return;
list_for_each_entry_safe(vma, next, &ppgtt->base.inactive_list, list_for_each_entry_safe(vma, next, &ppgtt->base.inactive_list,
mm_list) { vm_link) {
if (WARN_ON(__i915_vma_unbind_no_wait(vma))) if (WARN_ON(__i915_vma_unbind_no_wait(vma)))
break; break;
} }
......
...@@ -116,7 +116,7 @@ i915_gem_evict_something(struct drm_device *dev, struct i915_address_space *vm, ...@@ -116,7 +116,7 @@ i915_gem_evict_something(struct drm_device *dev, struct i915_address_space *vm,
search_again: search_again:
/* First see if there is a large enough contiguous idle region... */ /* First see if there is a large enough contiguous idle region... */
list_for_each_entry(vma, &vm->inactive_list, mm_list) { list_for_each_entry(vma, &vm->inactive_list, vm_link) {
if (mark_free(vma, &unwind_list)) if (mark_free(vma, &unwind_list))
goto found; goto found;
} }
...@@ -125,7 +125,7 @@ i915_gem_evict_something(struct drm_device *dev, struct i915_address_space *vm, ...@@ -125,7 +125,7 @@ i915_gem_evict_something(struct drm_device *dev, struct i915_address_space *vm,
goto none; goto none;
/* Now merge in the soon-to-be-expired objects... */ /* Now merge in the soon-to-be-expired objects... */
list_for_each_entry(vma, &vm->active_list, mm_list) { list_for_each_entry(vma, &vm->active_list, vm_link) {
if (mark_free(vma, &unwind_list)) if (mark_free(vma, &unwind_list))
goto found; goto found;
} }
...@@ -270,7 +270,7 @@ int i915_gem_evict_vm(struct i915_address_space *vm, bool do_idle) ...@@ -270,7 +270,7 @@ int i915_gem_evict_vm(struct i915_address_space *vm, bool do_idle)
WARN_ON(!list_empty(&vm->active_list)); WARN_ON(!list_empty(&vm->active_list));
} }
list_for_each_entry_safe(vma, next, &vm->inactive_list, mm_list) list_for_each_entry_safe(vma, next, &vm->inactive_list, vm_link)
if (vma->pin_count == 0) if (vma->pin_count == 0)
WARN_ON(i915_vma_unbind(vma)); WARN_ON(i915_vma_unbind(vma));
......
...@@ -2758,7 +2758,7 @@ static int i915_gem_setup_global_gtt(struct drm_device *dev, ...@@ -2758,7 +2758,7 @@ static int i915_gem_setup_global_gtt(struct drm_device *dev,
} }
vma->bound |= GLOBAL_BIND; vma->bound |= GLOBAL_BIND;
__i915_vma_set_map_and_fenceable(vma); __i915_vma_set_map_and_fenceable(vma);
list_add_tail(&vma->mm_list, &ggtt_vm->inactive_list); list_add_tail(&vma->vm_link, &ggtt_vm->inactive_list);
} }
/* Clear any non-preallocated blocks */ /* Clear any non-preallocated blocks */
...@@ -3258,7 +3258,7 @@ void i915_gem_restore_gtt_mappings(struct drm_device *dev) ...@@ -3258,7 +3258,7 @@ void i915_gem_restore_gtt_mappings(struct drm_device *dev)
vm = &dev_priv->gtt.base; vm = &dev_priv->gtt.base;
list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) { list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) {
flush = false; flush = false;
list_for_each_entry(vma, &obj->vma_list, vma_link) { list_for_each_entry(vma, &obj->vma_list, obj_link) {
if (vma->vm != vm) if (vma->vm != vm)
continue; continue;
...@@ -3314,8 +3314,8 @@ __i915_gem_vma_create(struct drm_i915_gem_object *obj, ...@@ -3314,8 +3314,8 @@ __i915_gem_vma_create(struct drm_i915_gem_object *obj,
if (vma == NULL) if (vma == NULL)
return ERR_PTR(-ENOMEM); return ERR_PTR(-ENOMEM);
INIT_LIST_HEAD(&vma->vma_link); INIT_LIST_HEAD(&vma->vm_link);
INIT_LIST_HEAD(&vma->mm_list); INIT_LIST_HEAD(&vma->obj_link);
INIT_LIST_HEAD(&vma->exec_list); INIT_LIST_HEAD(&vma->exec_list);
vma->vm = vm; vma->vm = vm;
vma->obj = obj; vma->obj = obj;
...@@ -3323,7 +3323,7 @@ __i915_gem_vma_create(struct drm_i915_gem_object *obj, ...@@ -3323,7 +3323,7 @@ __i915_gem_vma_create(struct drm_i915_gem_object *obj,
if (i915_is_ggtt(vm)) if (i915_is_ggtt(vm))
vma->ggtt_view = *ggtt_view; vma->ggtt_view = *ggtt_view;
list_add_tail(&vma->vma_link, &obj->vma_list); list_add_tail(&vma->obj_link, &obj->vma_list);
if (!i915_is_ggtt(vm)) if (!i915_is_ggtt(vm))
i915_ppgtt_get(i915_vm_to_ppgtt(vm)); i915_ppgtt_get(i915_vm_to_ppgtt(vm));
......
...@@ -194,9 +194,9 @@ struct i915_vma { ...@@ -194,9 +194,9 @@ struct i915_vma {
struct i915_ggtt_view ggtt_view; struct i915_ggtt_view ggtt_view;
/** This object's place on the active/inactive lists */ /** This object's place on the active/inactive lists */
struct list_head mm_list; struct list_head vm_link;
struct list_head vma_link; /* Link in the object's VMA list */ struct list_head obj_link; /* Link in the object's VMA list */
/** This vma's place in the batchbuffer or on the eviction list */ /** This vma's place in the batchbuffer or on the eviction list */
struct list_head exec_list; struct list_head exec_list;
......
...@@ -52,7 +52,7 @@ static int num_vma_bound(struct drm_i915_gem_object *obj) ...@@ -52,7 +52,7 @@ static int num_vma_bound(struct drm_i915_gem_object *obj)
struct i915_vma *vma; struct i915_vma *vma;
int count = 0; int count = 0;
list_for_each_entry(vma, &obj->vma_list, vma_link) { list_for_each_entry(vma, &obj->vma_list, obj_link) {
if (drm_mm_node_allocated(&vma->node)) if (drm_mm_node_allocated(&vma->node))
count++; count++;
if (vma->pin_count) if (vma->pin_count)
...@@ -176,7 +176,7 @@ i915_gem_shrink(struct drm_i915_private *dev_priv, ...@@ -176,7 +176,7 @@ i915_gem_shrink(struct drm_i915_private *dev_priv,
/* For the unbound phase, this should be a no-op! */ /* For the unbound phase, this should be a no-op! */
list_for_each_entry_safe(vma, v, list_for_each_entry_safe(vma, v,
&obj->vma_list, vma_link) &obj->vma_list, obj_link)
if (i915_vma_unbind(vma)) if (i915_vma_unbind(vma))
break; break;
......
...@@ -697,7 +697,7 @@ i915_gem_object_create_stolen_for_preallocated(struct drm_device *dev, ...@@ -697,7 +697,7 @@ i915_gem_object_create_stolen_for_preallocated(struct drm_device *dev,
vma->bound |= GLOBAL_BIND; vma->bound |= GLOBAL_BIND;
__i915_vma_set_map_and_fenceable(vma); __i915_vma_set_map_and_fenceable(vma);
list_add_tail(&vma->mm_list, &ggtt->inactive_list); list_add_tail(&vma->vm_link, &ggtt->inactive_list);
} }
list_add_tail(&obj->global_list, &dev_priv->mm.bound_list); list_add_tail(&obj->global_list, &dev_priv->mm.bound_list);
......
...@@ -78,7 +78,7 @@ static void cancel_userptr(struct work_struct *work) ...@@ -78,7 +78,7 @@ static void cancel_userptr(struct work_struct *work)
was_interruptible = dev_priv->mm.interruptible; was_interruptible = dev_priv->mm.interruptible;
dev_priv->mm.interruptible = false; dev_priv->mm.interruptible = false;
list_for_each_entry_safe(vma, tmp, &obj->vma_list, vma_link) { list_for_each_entry_safe(vma, tmp, &obj->vma_list, obj_link) {
int ret = i915_vma_unbind(vma); int ret = i915_vma_unbind(vma);
WARN_ON(ret && ret != -EIO); WARN_ON(ret && ret != -EIO);
} }
......
...@@ -736,7 +736,7 @@ static u32 capture_active_bo(struct drm_i915_error_buffer *err, ...@@ -736,7 +736,7 @@ static u32 capture_active_bo(struct drm_i915_error_buffer *err,
struct i915_vma *vma; struct i915_vma *vma;
int i = 0; int i = 0;
list_for_each_entry(vma, head, mm_list) { list_for_each_entry(vma, head, vm_link) {
capture_bo(err++, vma); capture_bo(err++, vma);
if (++i == count) if (++i == count)
break; break;
...@@ -759,7 +759,7 @@ static u32 capture_pinned_bo(struct drm_i915_error_buffer *err, ...@@ -759,7 +759,7 @@ static u32 capture_pinned_bo(struct drm_i915_error_buffer *err,
if (err == last) if (err == last)
break; break;
list_for_each_entry(vma, &obj->vma_list, vma_link) list_for_each_entry(vma, &obj->vma_list, obj_link)
if (vma->vm == vm && vma->pin_count > 0) if (vma->vm == vm && vma->pin_count > 0)
capture_bo(err++, vma); capture_bo(err++, vma);
} }
...@@ -1127,12 +1127,12 @@ static void i915_gem_capture_vm(struct drm_i915_private *dev_priv, ...@@ -1127,12 +1127,12 @@ static void i915_gem_capture_vm(struct drm_i915_private *dev_priv,
int i; int i;
i = 0; i = 0;
list_for_each_entry(vma, &vm->active_list, mm_list) list_for_each_entry(vma, &vm->active_list, vm_link)
i++; i++;
error->active_bo_count[ndx] = i; error->active_bo_count[ndx] = i;
list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) { list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) {
list_for_each_entry(vma, &obj->vma_list, vma_link) list_for_each_entry(vma, &obj->vma_list, obj_link)
if (vma->vm == vm && vma->pin_count > 0) if (vma->vm == vm && vma->pin_count > 0)
i++; i++;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment