drm/i915: Set the map-and-fenceable flag for preallocated objects
As we mark the preallocated objects as bound, we should also flag them correctly as being map-and-fenceable (if appropriate!) so that later users do not get confused and try and rebind the pinned vma in order to get a map-and-fenceable binding. Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk> Cc: "Goel, Akash" <akash.goel@intel.com> Cc: Daniel Vetter <daniel.vetter@ffwll.ch> Cc: Jesse Barnes <jbarnes@virtuousgeek.org> Cc: drm-intel-fixes@lists.freedesktop.org Link: http://patchwork.freedesktop.org/patch/msgid/1448029000-10616-1-git-send-email-chris@chris-wilson.co.uk Reviewed-by: Jesse Barnes <jbarnes@virtuousgeek.org> Signed-off-by: Daniel Vetter <daniel.vetter@ffwll.ch>
This commit is contained in:
parent
71a199bacb
commit
d0710abbcd
4 changed files with 27 additions and 19 deletions
|
@ -2894,6 +2894,7 @@ i915_gem_object_ggtt_pin(struct drm_i915_gem_object *obj,
|
|||
|
||||
int i915_vma_bind(struct i915_vma *vma, enum i915_cache_level cache_level,
|
||||
u32 flags);
|
||||
void __i915_vma_set_map_and_fenceable(struct i915_vma *vma);
|
||||
int __must_check i915_vma_unbind(struct i915_vma *vma);
|
||||
/*
|
||||
* BEWARE: Do not use the function below unless you can _absolutely_
|
||||
|
|
|
@ -4110,6 +4110,29 @@ i915_vma_misplaced(struct i915_vma *vma, uint32_t alignment, uint64_t flags)
|
|||
return false;
|
||||
}
|
||||
|
||||
void __i915_vma_set_map_and_fenceable(struct i915_vma *vma)
|
||||
{
|
||||
struct drm_i915_gem_object *obj = vma->obj;
|
||||
bool mappable, fenceable;
|
||||
u32 fence_size, fence_alignment;
|
||||
|
||||
fence_size = i915_gem_get_gtt_size(obj->base.dev,
|
||||
obj->base.size,
|
||||
obj->tiling_mode);
|
||||
fence_alignment = i915_gem_get_gtt_alignment(obj->base.dev,
|
||||
obj->base.size,
|
||||
obj->tiling_mode,
|
||||
true);
|
||||
|
||||
fenceable = (vma->node.size == fence_size &&
|
||||
(vma->node.start & (fence_alignment - 1)) == 0);
|
||||
|
||||
mappable = (vma->node.start + fence_size <=
|
||||
to_i915(obj->base.dev)->gtt.mappable_end);
|
||||
|
||||
obj->map_and_fenceable = mappable && fenceable;
|
||||
}
|
||||
|
||||
static int
|
||||
i915_gem_object_do_pin(struct drm_i915_gem_object *obj,
|
||||
struct i915_address_space *vm,
|
||||
|
@ -4177,25 +4200,7 @@ i915_gem_object_do_pin(struct drm_i915_gem_object *obj,
|
|||
|
||||
if (ggtt_view && ggtt_view->type == I915_GGTT_VIEW_NORMAL &&
|
||||
(bound ^ vma->bound) & GLOBAL_BIND) {
|
||||
bool mappable, fenceable;
|
||||
u32 fence_size, fence_alignment;
|
||||
|
||||
fence_size = i915_gem_get_gtt_size(obj->base.dev,
|
||||
obj->base.size,
|
||||
obj->tiling_mode);
|
||||
fence_alignment = i915_gem_get_gtt_alignment(obj->base.dev,
|
||||
obj->base.size,
|
||||
obj->tiling_mode,
|
||||
true);
|
||||
|
||||
fenceable = (vma->node.size == fence_size &&
|
||||
(vma->node.start & (fence_alignment - 1)) == 0);
|
||||
|
||||
mappable = (vma->node.start + fence_size <=
|
||||
dev_priv->gtt.mappable_end);
|
||||
|
||||
obj->map_and_fenceable = mappable && fenceable;
|
||||
|
||||
__i915_vma_set_map_and_fenceable(vma);
|
||||
WARN_ON(flags & PIN_MAPPABLE && !obj->map_and_fenceable);
|
||||
}
|
||||
|
||||
|
|
|
@ -2728,6 +2728,7 @@ static int i915_gem_setup_global_gtt(struct drm_device *dev,
|
|||
return ret;
|
||||
}
|
||||
vma->bound |= GLOBAL_BIND;
|
||||
__i915_vma_set_map_and_fenceable(vma);
|
||||
list_add_tail(&vma->mm_list, &ggtt_vm->inactive_list);
|
||||
}
|
||||
|
||||
|
|
|
@ -688,6 +688,7 @@ i915_gem_object_create_stolen_for_preallocated(struct drm_device *dev,
|
|||
}
|
||||
|
||||
vma->bound |= GLOBAL_BIND;
|
||||
__i915_vma_set_map_and_fenceable(vma);
|
||||
list_add_tail(&vma->mm_list, &ggtt->inactive_list);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue