Lines Matching refs:vma
132 struct i915_vma *vma; in eb_lookup_vmas() local
146 vma = i915_gem_obj_lookup_or_create_vma(obj, vm); in eb_lookup_vmas()
147 if (IS_ERR(vma)) { in eb_lookup_vmas()
149 ret = PTR_ERR(vma); in eb_lookup_vmas()
154 list_add_tail(&vma->exec_list, &eb->vmas); in eb_lookup_vmas()
157 vma->exec_entry = &exec[i]; in eb_lookup_vmas()
159 eb->lut[i] = vma; in eb_lookup_vmas()
162 vma->exec_handle = handle; in eb_lookup_vmas()
163 hlist_add_head(&vma->exec_node, in eb_lookup_vmas()
200 struct i915_vma *vma; in eb_get_vma() local
202 vma = hlist_entry(node, struct i915_vma, exec_node); in eb_get_vma()
203 if (vma->exec_handle == handle) in eb_get_vma()
204 return vma; in eb_get_vma()
211 i915_gem_execbuffer_unreserve_vma(struct i915_vma *vma) in i915_gem_execbuffer_unreserve_vma() argument
214 struct drm_i915_gem_object *obj = vma->obj; in i915_gem_execbuffer_unreserve_vma()
216 if (!drm_mm_node_allocated(&vma->node)) in i915_gem_execbuffer_unreserve_vma()
219 entry = vma->exec_entry; in i915_gem_execbuffer_unreserve_vma()
225 vma->pin_count--; in i915_gem_execbuffer_unreserve_vma()
233 struct i915_vma *vma; in eb_destroy() local
235 vma = list_first_entry(&eb->vmas, in eb_destroy()
238 list_del_init(&vma->exec_list); in eb_destroy()
239 i915_gem_execbuffer_unreserve_vma(vma); in eb_destroy()
240 drm_gem_object_unreference(&vma->obj->base); in eb_destroy()
486 i915_gem_execbuffer_relocate_vma(struct i915_vma *vma, in i915_gem_execbuffer_relocate_vma() argument
492 struct drm_i915_gem_exec_object2 *entry = vma->exec_entry; in i915_gem_execbuffer_relocate_vma()
511 ret = i915_gem_execbuffer_relocate_entry(vma->obj, eb, r); in i915_gem_execbuffer_relocate_vma()
532 i915_gem_execbuffer_relocate_vma_slow(struct i915_vma *vma, in i915_gem_execbuffer_relocate_vma_slow() argument
536 const struct drm_i915_gem_exec_object2 *entry = vma->exec_entry; in i915_gem_execbuffer_relocate_vma_slow()
540 ret = i915_gem_execbuffer_relocate_entry(vma->obj, eb, &relocs[i]); in i915_gem_execbuffer_relocate_vma_slow()
551 struct i915_vma *vma; in i915_gem_execbuffer_relocate() local
562 list_for_each_entry(vma, &eb->vmas, exec_list) { in i915_gem_execbuffer_relocate()
563 ret = i915_gem_execbuffer_relocate_vma(vma, eb); in i915_gem_execbuffer_relocate()
579 i915_gem_execbuffer_reserve_vma(struct i915_vma *vma, in i915_gem_execbuffer_reserve_vma() argument
583 struct drm_i915_gem_object *obj = vma->obj; in i915_gem_execbuffer_reserve_vma()
584 struct drm_i915_gem_exec_object2 *entry = vma->exec_entry; in i915_gem_execbuffer_reserve_vma()
592 if (!drm_mm_node_allocated(&vma->node)) { in i915_gem_execbuffer_reserve_vma()
606 ret = i915_gem_object_pin(obj, vma->vm, entry->alignment, flags); in i915_gem_execbuffer_reserve_vma()
609 ret = i915_gem_object_pin(obj, vma->vm, in i915_gem_execbuffer_reserve_vma()
626 if (entry->offset != vma->node.start) { in i915_gem_execbuffer_reserve_vma()
627 entry->offset = vma->node.start; in i915_gem_execbuffer_reserve_vma()
640 need_reloc_mappable(struct i915_vma *vma) in need_reloc_mappable() argument
642 struct drm_i915_gem_exec_object2 *entry = vma->exec_entry; in need_reloc_mappable()
647 if (!i915_is_ggtt(vma->vm)) in need_reloc_mappable()
651 if (HAS_LLC(vma->obj->base.dev)) in need_reloc_mappable()
654 if (vma->obj->base.write_domain == I915_GEM_DOMAIN_CPU) in need_reloc_mappable()
661 eb_vma_misplaced(struct i915_vma *vma) in eb_vma_misplaced() argument
663 struct drm_i915_gem_exec_object2 *entry = vma->exec_entry; in eb_vma_misplaced()
664 struct drm_i915_gem_object *obj = vma->obj; in eb_vma_misplaced()
667 !i915_is_ggtt(vma->vm)); in eb_vma_misplaced()
670 vma->node.start & (entry->alignment - 1)) in eb_vma_misplaced()
674 vma->node.start < BATCH_OFFSET_BIAS) in eb_vma_misplaced()
682 (vma->node.start + vma->node.size - 1) >> 32) in eb_vma_misplaced()
695 struct i915_vma *vma; in i915_gem_execbuffer_reserve() local
710 vma = list_first_entry(vmas, struct i915_vma, exec_list); in i915_gem_execbuffer_reserve()
711 obj = vma->obj; in i915_gem_execbuffer_reserve()
712 entry = vma->exec_entry; in i915_gem_execbuffer_reserve()
722 need_mappable = need_fence || need_reloc_mappable(vma); in i915_gem_execbuffer_reserve()
726 list_move(&vma->exec_list, &ordered_vmas); in i915_gem_execbuffer_reserve()
728 list_move_tail(&vma->exec_list, &ordered_vmas); in i915_gem_execbuffer_reserve()
752 list_for_each_entry(vma, vmas, exec_list) { in i915_gem_execbuffer_reserve()
753 if (!drm_mm_node_allocated(&vma->node)) in i915_gem_execbuffer_reserve()
756 if (eb_vma_misplaced(vma)) in i915_gem_execbuffer_reserve()
757 ret = i915_vma_unbind(vma); in i915_gem_execbuffer_reserve()
759 ret = i915_gem_execbuffer_reserve_vma(vma, ring, need_relocs); in i915_gem_execbuffer_reserve()
765 list_for_each_entry(vma, vmas, exec_list) { in i915_gem_execbuffer_reserve()
766 if (drm_mm_node_allocated(&vma->node)) in i915_gem_execbuffer_reserve()
769 ret = i915_gem_execbuffer_reserve_vma(vma, ring, need_relocs); in i915_gem_execbuffer_reserve()
779 list_for_each_entry(vma, vmas, exec_list) in i915_gem_execbuffer_reserve()
780 i915_gem_execbuffer_unreserve_vma(vma); in i915_gem_execbuffer_reserve()
799 struct i915_vma *vma; in i915_gem_execbuffer_relocate_slow() local
809 vma = list_first_entry(&eb->vmas, struct i915_vma, exec_list); in i915_gem_execbuffer_relocate_slow()
810 list_del_init(&vma->exec_list); in i915_gem_execbuffer_relocate_slow()
811 i915_gem_execbuffer_unreserve_vma(vma); in i915_gem_execbuffer_relocate_slow()
812 drm_gem_object_unreference(&vma->obj->base); in i915_gem_execbuffer_relocate_slow()
885 list_for_each_entry(vma, &eb->vmas, exec_list) { in i915_gem_execbuffer_relocate_slow()
886 int offset = vma->exec_entry - exec; in i915_gem_execbuffer_relocate_slow()
887 ret = i915_gem_execbuffer_relocate_vma_slow(vma, eb, in i915_gem_execbuffer_relocate_slow()
910 struct i915_vma *vma; in i915_gem_execbuffer_move_to_gpu() local
915 list_for_each_entry(vma, vmas, exec_list) { in i915_gem_execbuffer_move_to_gpu()
916 struct drm_i915_gem_object *obj = vma->obj; in i915_gem_execbuffer_move_to_gpu()
1052 struct i915_vma *vma; in i915_gem_execbuffer_move_to_active() local
1054 list_for_each_entry(vma, vmas, exec_list) { in i915_gem_execbuffer_move_to_active()
1055 struct drm_i915_gem_exec_object2 *entry = vma->exec_entry; in i915_gem_execbuffer_move_to_active()
1056 struct drm_i915_gem_object *obj = vma->obj; in i915_gem_execbuffer_move_to_active()
1066 i915_vma_move_to_active(vma, req); in i915_gem_execbuffer_move_to_active()
1136 struct i915_vma *vma; in i915_gem_execbuffer_parse() local
1161 vma = i915_gem_obj_to_ggtt(shadow_batch_obj); in i915_gem_execbuffer_parse()
1162 vma->exec_entry = shadow_exec_entry; in i915_gem_execbuffer_parse()
1163 vma->exec_entry->flags = __EXEC_OBJECT_HAS_PIN; in i915_gem_execbuffer_parse()
1165 list_add_tail(&vma->exec_list, &eb->vmas); in i915_gem_execbuffer_parse()
1309 struct i915_vma *vma = list_entry(eb->vmas.prev, typeof(*vma), exec_list); in eb_get_batch() local
1320 vma->exec_entry->flags |= __EXEC_OBJECT_NEEDS_BIAS; in eb_get_batch()
1322 return vma->obj; in eb_get_batch()