On 11/20/2015 06:16 AM, Chris Wilson wrote: > As we mark the preallocated objects as bound, we should also flag them > correctly as being map-and-fenceable (if appropriate!) so that later > users do not get confused and try and rebind the pinned vma in order to > get a map-and-fenceable binding. > > Signed-off-by: Chris Wilson <chris@xxxxxxxxxxxxxxxxxx> > Cc: "Goel, Akash" <akash.goel@xxxxxxxxx> > Cc: Daniel Vetter <daniel.vetter@xxxxxxxx> > Cc: Jesse Barnes <jbarnes@xxxxxxxxxxxxxxxx> > Cc: stable@xxxxxxxxxxxxxxx > --- > drivers/gpu/drm/i915/i915_drv.h | 1 + > drivers/gpu/drm/i915/i915_gem.c | 43 +++++++++++++++++++--------------- > drivers/gpu/drm/i915/i915_gem_gtt.c | 1 + > drivers/gpu/drm/i915/i915_gem_stolen.c | 1 + > 4 files changed, 27 insertions(+), 19 deletions(-) > > diff --git a/drivers/gpu/drm/i915/i915_drv.h b/drivers/gpu/drm/i915/i915_drv.h > index f2b65433ed7d..24143e5273b6 100644 > --- a/drivers/gpu/drm/i915/i915_drv.h > +++ b/drivers/gpu/drm/i915/i915_drv.h > @@ -2844,6 +2844,7 @@ i915_gem_object_ggtt_pin(struct drm_i915_gem_object *obj, > > int i915_vma_bind(struct i915_vma *vma, enum i915_cache_level cache_level, > u32 flags); > +void __i915_vma_set_map_and_fenceable(struct i915_vma *vma); > int __must_check i915_vma_unbind(struct i915_vma *vma); > /* > * BEWARE: Do not use the function below unless you can _absolutely_ > diff --git a/drivers/gpu/drm/i915/i915_gem.c b/drivers/gpu/drm/i915/i915_gem.c > index 3ad198a41c4a..e6a8a52c8a6b 100644 > --- a/drivers/gpu/drm/i915/i915_gem.c > +++ b/drivers/gpu/drm/i915/i915_gem.c > @@ -4092,6 +4092,29 @@ i915_vma_misplaced(struct i915_vma *vma, uint32_t alignment, uint64_t flags) > return false; > } > > +void __i915_vma_set_map_and_fenceable(struct i915_vma *vma) > +{ > + struct drm_i915_gem_object *obj = vma->obj; > + bool mappable, fenceable; > + u32 fence_size, fence_alignment; > + > + fence_size = i915_gem_get_gtt_size(obj->base.dev, > + obj->base.size, > + obj->tiling_mode); > + fence_alignment = i915_gem_get_gtt_alignment(obj->base.dev, > + obj->base.size, > + obj->tiling_mode, > + true); > + > + fenceable = (vma->node.size == fence_size && > + (vma->node.start & (fence_alignment - 1)) == 0); > + > + mappable = (vma->node.start + fence_size <= > + to_i915(obj->base.dev)->gtt.mappable_end); > + > + obj->map_and_fenceable = mappable && fenceable; > +} > + > static int > i915_gem_object_do_pin(struct drm_i915_gem_object *obj, > struct i915_address_space *vm, > @@ -4159,25 +4182,7 @@ i915_gem_object_do_pin(struct drm_i915_gem_object *obj, > > if (ggtt_view && ggtt_view->type == I915_GGTT_VIEW_NORMAL && > (bound ^ vma->bound) & GLOBAL_BIND) { > - bool mappable, fenceable; > - u32 fence_size, fence_alignment; > - > - fence_size = i915_gem_get_gtt_size(obj->base.dev, > - obj->base.size, > - obj->tiling_mode); > - fence_alignment = i915_gem_get_gtt_alignment(obj->base.dev, > - obj->base.size, > - obj->tiling_mode, > - true); > - > - fenceable = (vma->node.size == fence_size && > - (vma->node.start & (fence_alignment - 1)) == 0); > - > - mappable = (vma->node.start + fence_size <= > - dev_priv->gtt.mappable_end); > - > - obj->map_and_fenceable = mappable && fenceable; > - > + __i915_vma_set_map_and_fenceable(vma); > WARN_ON(flags & PIN_MAPPABLE && !obj->map_and_fenceable); > } > > diff --git a/drivers/gpu/drm/i915/i915_gem_gtt.c b/drivers/gpu/drm/i915/i915_gem_gtt.c > index a09f8f0510d5..74b26b2d0889 100644 > --- a/drivers/gpu/drm/i915/i915_gem_gtt.c > +++ b/drivers/gpu/drm/i915/i915_gem_gtt.c > @@ -2704,6 +2704,7 @@ static int i915_gem_setup_global_gtt(struct drm_device *dev, > return ret; > } > vma->bound |= GLOBAL_BIND; > + __i915_vma_set_map_and_fenceable(vma); > list_add_tail(&vma->mm_list, &ggtt_vm->inactive_list); > } > > diff --git a/drivers/gpu/drm/i915/i915_gem_stolen.c b/drivers/gpu/drm/i915/i915_gem_stolen.c > index 598ed2facf85..3476877fc0d6 100644 > --- a/drivers/gpu/drm/i915/i915_gem_stolen.c > +++ b/drivers/gpu/drm/i915/i915_gem_stolen.c > @@ -688,6 +688,7 @@ i915_gem_object_create_stolen_for_preallocated(struct drm_device *dev, > } > > vma->bound |= GLOBAL_BIND; > + __i915_vma_set_map_and_fenceable(vma); > list_add_tail(&vma->mm_list, &ggtt->inactive_list); > } > > Looks important. Reviewed-by: Jesse Barnes <jbarnes@xxxxxxxxxxxxxxxx> -- To unsubscribe from this list: send the line "unsubscribe stable" in the body of a message to majordomo@xxxxxxxxxxxxxxx More majordomo info at http://vger.kernel.org/majordomo-info.html