Commit ba7a5741 authored by Chris Wilson's avatar Chris Wilson

drm/i915: Micro-optimise i915_get_ggtt_vma_pages()

The predominant VMA class is normal GTT, so allow gcc to emphasize that
path and avoid unnecessary stack movement.
Signed-off-by: default avatarChris Wilson <chris@chris-wilson.co.uk>
Reviewed-by: default avatarMika Kuoppala <mika.kuoppala@intel.com>
Link: http://patchwork.freedesktop.org/patch/msgid/20170215084357.19977-1-chris@chris-wilson.co.uk
parent dfb65e71
...@@ -2621,14 +2621,16 @@ static int ggtt_bind_vma(struct i915_vma *vma, ...@@ -2621,14 +2621,16 @@ static int ggtt_bind_vma(struct i915_vma *vma,
{ {
struct drm_i915_private *i915 = vma->vm->i915; struct drm_i915_private *i915 = vma->vm->i915;
struct drm_i915_gem_object *obj = vma->obj; struct drm_i915_gem_object *obj = vma->obj;
u32 pte_flags = 0; u32 pte_flags;
int ret;
ret = i915_get_ggtt_vma_pages(vma); if (unlikely(!vma->pages)) {
if (ret) int ret = i915_get_ggtt_vma_pages(vma);
return ret; if (ret)
return ret;
}
/* Currently applicable only to VLV */ /* Currently applicable only to VLV */
pte_flags = 0;
if (obj->gt_ro) if (obj->gt_ro)
pte_flags |= PTE_READ_ONLY; pte_flags |= PTE_READ_ONLY;
...@@ -2653,18 +2655,18 @@ static int aliasing_gtt_bind_vma(struct i915_vma *vma, ...@@ -2653,18 +2655,18 @@ static int aliasing_gtt_bind_vma(struct i915_vma *vma,
{ {
struct drm_i915_private *i915 = vma->vm->i915; struct drm_i915_private *i915 = vma->vm->i915;
u32 pte_flags; u32 pte_flags;
int ret;
ret = i915_get_ggtt_vma_pages(vma); if (unlikely(!vma->pages)) {
if (ret) int ret = i915_get_ggtt_vma_pages(vma);
return ret; if (ret)
return ret;
}
/* Currently applicable only to VLV */ /* Currently applicable only to VLV */
pte_flags = 0; pte_flags = 0;
if (vma->obj->gt_ro) if (vma->obj->gt_ro)
pte_flags |= PTE_READ_ONLY; pte_flags |= PTE_READ_ONLY;
if (flags & I915_VMA_GLOBAL_BIND) { if (flags & I915_VMA_GLOBAL_BIND) {
intel_runtime_pm_get(i915); intel_runtime_pm_get(i915);
vma->vm->insert_entries(vma->vm, vma->vm->insert_entries(vma->vm,
...@@ -3430,9 +3432,9 @@ rotate_pages(const dma_addr_t *in, unsigned int offset, ...@@ -3430,9 +3432,9 @@ rotate_pages(const dma_addr_t *in, unsigned int offset,
return sg; return sg;
} }
static struct sg_table * static noinline struct sg_table *
intel_rotate_fb_obj_pages(const struct intel_rotation_info *rot_info, intel_rotate_pages(struct intel_rotation_info *rot_info,
struct drm_i915_gem_object *obj) struct drm_i915_gem_object *obj)
{ {
const size_t n_pages = obj->base.size / PAGE_SIZE; const size_t n_pages = obj->base.size / PAGE_SIZE;
unsigned int size = intel_rotation_info_size(rot_info); unsigned int size = intel_rotation_info_size(rot_info);
...@@ -3493,7 +3495,7 @@ intel_rotate_fb_obj_pages(const struct intel_rotation_info *rot_info, ...@@ -3493,7 +3495,7 @@ intel_rotate_fb_obj_pages(const struct intel_rotation_info *rot_info,
return ERR_PTR(ret); return ERR_PTR(ret);
} }
static struct sg_table * static noinline struct sg_table *
intel_partial_pages(const struct i915_ggtt_view *view, intel_partial_pages(const struct i915_ggtt_view *view,
struct drm_i915_gem_object *obj) struct drm_i915_gem_object *obj)
{ {
...@@ -3547,7 +3549,7 @@ intel_partial_pages(const struct i915_ggtt_view *view, ...@@ -3547,7 +3549,7 @@ intel_partial_pages(const struct i915_ggtt_view *view,
static int static int
i915_get_ggtt_vma_pages(struct i915_vma *vma) i915_get_ggtt_vma_pages(struct i915_vma *vma)
{ {
int ret = 0; int ret;
/* The vma->pages are only valid within the lifespan of the borrowed /* The vma->pages are only valid within the lifespan of the borrowed
* obj->mm.pages. When the obj->mm.pages sg_table is regenerated, so * obj->mm.pages. When the obj->mm.pages sg_table is regenerated, so
...@@ -3556,32 +3558,33 @@ i915_get_ggtt_vma_pages(struct i915_vma *vma) ...@@ -3556,32 +3558,33 @@ i915_get_ggtt_vma_pages(struct i915_vma *vma)
*/ */
GEM_BUG_ON(!i915_gem_object_has_pinned_pages(vma->obj)); GEM_BUG_ON(!i915_gem_object_has_pinned_pages(vma->obj));
if (vma->pages) switch (vma->ggtt_view.type) {
case I915_GGTT_VIEW_NORMAL:
vma->pages = vma->obj->mm.pages;
return 0; return 0;
if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) case I915_GGTT_VIEW_ROTATED:
vma->pages = vma->obj->mm.pages;
else if (vma->ggtt_view.type == I915_GGTT_VIEW_ROTATED)
vma->pages = vma->pages =
intel_rotate_fb_obj_pages(&vma->ggtt_view.rotated, intel_rotate_pages(&vma->ggtt_view.rotated, vma->obj);
vma->obj); break;
else if (vma->ggtt_view.type == I915_GGTT_VIEW_PARTIAL)
case I915_GGTT_VIEW_PARTIAL:
vma->pages = intel_partial_pages(&vma->ggtt_view, vma->obj); vma->pages = intel_partial_pages(&vma->ggtt_view, vma->obj);
else break;
default:
WARN_ONCE(1, "GGTT view %u not implemented!\n", WARN_ONCE(1, "GGTT view %u not implemented!\n",
vma->ggtt_view.type); vma->ggtt_view.type);
return -EINVAL;
}
if (!vma->pages) { ret = 0;
DRM_ERROR("Failed to get pages for GGTT view type %u!\n", if (unlikely(IS_ERR(vma->pages))) {
vma->ggtt_view.type);
ret = -EINVAL;
} else if (IS_ERR(vma->pages)) {
ret = PTR_ERR(vma->pages); ret = PTR_ERR(vma->pages);
vma->pages = NULL; vma->pages = NULL;
DRM_ERROR("Failed to get pages for VMA view type %u (%d)!\n", DRM_ERROR("Failed to get pages for VMA view type %u (%d)!\n",
vma->ggtt_view.type, ret); vma->ggtt_view.type, ret);
} }
return ret; return ret;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment