Lines Matching refs:vma

136 	struct i915_vma *vma;  in i915_gem_get_aperture_ioctl()  local
141 list_for_each_entry(vma, &ggtt->base.active_list, mm_list) in i915_gem_get_aperture_ioctl()
142 if (vma->pin_count) in i915_gem_get_aperture_ioctl()
143 pinned += vma->node.size; in i915_gem_get_aperture_ioctl()
144 list_for_each_entry(vma, &ggtt->base.inactive_list, mm_list) in i915_gem_get_aperture_ioctl()
145 if (vma->pin_count) in i915_gem_get_aperture_ioctl()
146 pinned += vma->node.size; in i915_gem_get_aperture_ioctl()
271 struct i915_vma *vma, *next; in drop_pages() local
275 list_for_each_entry_safe(vma, next, &obj->vma_list, vma_link) in drop_pages()
276 if (i915_vma_unbind(vma)) in drop_pages()
1753 struct vm_area_struct *vma; in i915_gem_mmap_ioctl() local
1756 vma = find_vma(mm, addr); in i915_gem_mmap_ioctl()
1757 if (vma) in i915_gem_mmap_ioctl()
1758 vma->vm_page_prot = in i915_gem_mmap_ioctl()
1759 pgprot_writecombine(vm_get_page_prot(vma->vm_flags)); in i915_gem_mmap_ioctl()
1789 int i915_gem_fault(struct vm_area_struct *vma, struct vm_fault *vmf) in i915_gem_fault() argument
1791 struct drm_i915_gem_object *obj = to_intel_bo(vma->vm_private_data); in i915_gem_fault()
1803 page_offset = ((unsigned long)vmf->virtual_address - vma->vm_start) >> in i915_gem_fault()
1838 (vma->vm_end - vma->vm_start)/PAGE_SIZE - in i915_gem_fault()
1866 unsigned long base = vma->vm_start + in i915_gem_fault()
1871 ret = vm_insert_pfn(vma, base + i * PAGE_SIZE, pfn + i); in i915_gem_fault()
1880 vma->vm_end - vma->vm_start, in i915_gem_fault()
1885 ret = vm_insert_pfn(vma, in i915_gem_fault()
1886 (unsigned long)vma->vm_start + i * PAGE_SIZE, in i915_gem_fault()
1894 ret = vm_insert_pfn(vma, in i915_gem_fault()
2401 void i915_vma_move_to_active(struct i915_vma *vma, in i915_vma_move_to_active() argument
2404 struct drm_i915_gem_object *obj = vma->obj; in i915_vma_move_to_active()
2417 list_move_tail(&vma->mm_list, &vma->vm->active_list); in i915_vma_move_to_active()
2433 struct i915_vma *vma; in i915_gem_object_retire__read() local
2455 list_for_each_entry(vma, &obj->vma_list, vma_link) { in i915_gem_object_retire__read()
2456 if (!list_empty(&vma->mm_list)) in i915_gem_object_retire__read()
2457 list_move_tail(&vma->mm_list, &vma->vm->inactive_list); in i915_gem_object_retire__read()
3270 static int __i915_vma_unbind(struct i915_vma *vma, bool wait) in __i915_vma_unbind() argument
3272 struct drm_i915_gem_object *obj = vma->obj; in __i915_vma_unbind()
3276 if (list_empty(&vma->vma_link)) in __i915_vma_unbind()
3279 if (!drm_mm_node_allocated(&vma->node)) { in __i915_vma_unbind()
3280 i915_gem_vma_destroy(vma); in __i915_vma_unbind()
3284 if (vma->pin_count) in __i915_vma_unbind()
3295 if (i915_is_ggtt(vma->vm) && in __i915_vma_unbind()
3296 vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) { in __i915_vma_unbind()
3305 trace_i915_vma_unbind(vma); in __i915_vma_unbind()
3307 vma->vm->unbind_vma(vma); in __i915_vma_unbind()
3308 vma->bound = 0; in __i915_vma_unbind()
3310 list_del_init(&vma->mm_list); in __i915_vma_unbind()
3311 if (i915_is_ggtt(vma->vm)) { in __i915_vma_unbind()
3312 if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) { in __i915_vma_unbind()
3314 } else if (vma->ggtt_view.pages) { in __i915_vma_unbind()
3315 sg_free_table(vma->ggtt_view.pages); in __i915_vma_unbind()
3316 kfree(vma->ggtt_view.pages); in __i915_vma_unbind()
3318 vma->ggtt_view.pages = NULL; in __i915_vma_unbind()
3321 drm_mm_remove_node(&vma->node); in __i915_vma_unbind()
3322 i915_gem_vma_destroy(vma); in __i915_vma_unbind()
3338 int i915_vma_unbind(struct i915_vma *vma) in i915_vma_unbind() argument
3340 return __i915_vma_unbind(vma, true); in i915_vma_unbind()
3343 int __i915_vma_unbind_no_wait(struct i915_vma *vma) in __i915_vma_unbind_no_wait() argument
3345 return __i915_vma_unbind(vma, false); in __i915_vma_unbind_no_wait()
3381 static bool i915_gem_valid_gtt_space(struct i915_vma *vma, in i915_gem_valid_gtt_space() argument
3384 struct drm_mm_node *gtt_space = &vma->node; in i915_gem_valid_gtt_space()
3394 if (vma->vm->mm.color_adjust == NULL) in i915_gem_valid_gtt_space()
3431 struct i915_vma *vma; in i915_gem_object_bind_to_vm() local
3506 vma = ggtt_view ? i915_gem_obj_lookup_or_create_ggtt_vma(obj, ggtt_view) : in i915_gem_object_bind_to_vm()
3509 if (IS_ERR(vma)) in i915_gem_object_bind_to_vm()
3521 ret = drm_mm_insert_node_in_range_generic(&vm->mm, &vma->node, in i915_gem_object_bind_to_vm()
3537 if (WARN_ON(!i915_gem_valid_gtt_space(vma, obj->cache_level))) { in i915_gem_object_bind_to_vm()
3542 trace_i915_vma_bind(vma, flags); in i915_gem_object_bind_to_vm()
3543 ret = i915_vma_bind(vma, obj->cache_level, flags); in i915_gem_object_bind_to_vm()
3548 list_add_tail(&vma->mm_list, &vm->inactive_list); in i915_gem_object_bind_to_vm()
3550 return vma; in i915_gem_object_bind_to_vm()
3553 drm_mm_remove_node(&vma->node); in i915_gem_object_bind_to_vm()
3555 i915_gem_vma_destroy(vma); in i915_gem_object_bind_to_vm()
3556 vma = ERR_PTR(ret); in i915_gem_object_bind_to_vm()
3559 return vma; in i915_gem_object_bind_to_vm()
3661 struct i915_vma *vma; in i915_gem_object_set_to_gtt_domain() local
3711 vma = i915_gem_obj_to_ggtt(obj); in i915_gem_object_set_to_gtt_domain()
3712 if (vma && drm_mm_node_allocated(&vma->node) && !obj->active) in i915_gem_object_set_to_gtt_domain()
3713 list_move_tail(&vma->mm_list, in i915_gem_object_set_to_gtt_domain()
3736 struct i915_vma *vma, *next; in i915_gem_object_set_cache_level() local
3748 list_for_each_entry_safe(vma, next, &obj->vma_list, vma_link) { in i915_gem_object_set_cache_level()
3749 if (!drm_mm_node_allocated(&vma->node)) in i915_gem_object_set_cache_level()
3752 if (vma->pin_count) { in i915_gem_object_set_cache_level()
3757 if (!i915_gem_valid_gtt_space(vma, cache_level)) { in i915_gem_object_set_cache_level()
3758 ret = i915_vma_unbind(vma); in i915_gem_object_set_cache_level()
3811 list_for_each_entry(vma, &obj->vma_list, vma_link) { in i915_gem_object_set_cache_level()
3812 if (!drm_mm_node_allocated(&vma->node)) in i915_gem_object_set_cache_level()
3815 ret = i915_vma_bind(vma, cache_level, PIN_UPDATE); in i915_gem_object_set_cache_level()
3821 list_for_each_entry(vma, &obj->vma_list, vma_link) in i915_gem_object_set_cache_level()
3822 vma->node.color = cache_level; in i915_gem_object_set_cache_level()
4117 i915_vma_misplaced(struct i915_vma *vma, uint32_t alignment, uint64_t flags) in i915_vma_misplaced() argument
4119 struct drm_i915_gem_object *obj = vma->obj; in i915_vma_misplaced()
4122 vma->node.start & (alignment - 1)) in i915_vma_misplaced()
4129 vma->node.start < (flags & PIN_OFFSET_MASK)) in i915_vma_misplaced()
4135 void __i915_vma_set_map_and_fenceable(struct i915_vma *vma) in __i915_vma_set_map_and_fenceable() argument
4137 struct drm_i915_gem_object *obj = vma->obj; in __i915_vma_set_map_and_fenceable()
4149 fenceable = (vma->node.size == fence_size && in __i915_vma_set_map_and_fenceable()
4150 (vma->node.start & (fence_alignment - 1)) == 0); in __i915_vma_set_map_and_fenceable()
4152 mappable = (vma->node.start + fence_size <= in __i915_vma_set_map_and_fenceable()
4166 struct i915_vma *vma; in i915_gem_object_do_pin() local
4182 vma = ggtt_view ? i915_gem_obj_to_ggtt_view(obj, ggtt_view) : in i915_gem_object_do_pin()
4185 if (IS_ERR(vma)) in i915_gem_object_do_pin()
4186 return PTR_ERR(vma); in i915_gem_object_do_pin()
4188 if (vma) { in i915_gem_object_do_pin()
4189 if (WARN_ON(vma->pin_count == DRM_I915_GEM_OBJECT_MAX_PIN_COUNT)) in i915_gem_object_do_pin()
4192 if (i915_vma_misplaced(vma, alignment, flags)) { in i915_gem_object_do_pin()
4193 WARN(vma->pin_count, in i915_gem_object_do_pin()
4198 upper_32_bits(vma->node.start), in i915_gem_object_do_pin()
4199 lower_32_bits(vma->node.start), in i915_gem_object_do_pin()
4203 ret = i915_vma_unbind(vma); in i915_gem_object_do_pin()
4207 vma = NULL; in i915_gem_object_do_pin()
4211 bound = vma ? vma->bound : 0; in i915_gem_object_do_pin()
4212 if (vma == NULL || !drm_mm_node_allocated(&vma->node)) { in i915_gem_object_do_pin()
4213 vma = i915_gem_object_bind_to_vm(obj, vm, ggtt_view, alignment, in i915_gem_object_do_pin()
4215 if (IS_ERR(vma)) in i915_gem_object_do_pin()
4216 return PTR_ERR(vma); in i915_gem_object_do_pin()
4218 ret = i915_vma_bind(vma, obj->cache_level, flags); in i915_gem_object_do_pin()
4224 (bound ^ vma->bound) & GLOBAL_BIND) { in i915_gem_object_do_pin()
4225 __i915_vma_set_map_and_fenceable(vma); in i915_gem_object_do_pin()
4229 vma->pin_count++; in i915_gem_object_do_pin()
4261 struct i915_vma *vma = i915_gem_obj_to_ggtt_view(obj, view); in i915_gem_object_ggtt_unpin_view() local
4263 BUG_ON(!vma); in i915_gem_object_ggtt_unpin_view()
4264 WARN_ON(vma->pin_count == 0); in i915_gem_object_ggtt_unpin_view()
4267 --vma->pin_count; in i915_gem_object_ggtt_unpin_view()
4480 struct i915_vma *vma, *next; in i915_gem_free_object() local
4486 list_for_each_entry_safe(vma, next, &obj->vma_list, vma_link) { in i915_gem_free_object()
4489 vma->pin_count = 0; in i915_gem_free_object()
4490 ret = i915_vma_unbind(vma); in i915_gem_free_object()
4497 WARN_ON(i915_vma_unbind(vma)); in i915_gem_free_object()
4542 struct i915_vma *vma; in i915_gem_obj_to_vma() local
4543 list_for_each_entry(vma, &obj->vma_list, vma_link) { in i915_gem_obj_to_vma()
4544 if (i915_is_ggtt(vma->vm) && in i915_gem_obj_to_vma()
4545 vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) in i915_gem_obj_to_vma()
4547 if (vma->vm == vm) in i915_gem_obj_to_vma()
4548 return vma; in i915_gem_obj_to_vma()
4557 struct i915_vma *vma; in i915_gem_obj_to_ggtt_view() local
4562 list_for_each_entry(vma, &obj->vma_list, vma_link) in i915_gem_obj_to_ggtt_view()
4563 if (vma->vm == ggtt && in i915_gem_obj_to_ggtt_view()
4564 i915_ggtt_view_equal(&vma->ggtt_view, view)) in i915_gem_obj_to_ggtt_view()
4565 return vma; in i915_gem_obj_to_ggtt_view()
4569 void i915_gem_vma_destroy(struct i915_vma *vma) in i915_gem_vma_destroy() argument
4572 WARN_ON(vma->node.allocated); in i915_gem_vma_destroy()
4575 if (!list_empty(&vma->exec_list)) in i915_gem_vma_destroy()
4578 vm = vma->vm; in i915_gem_vma_destroy()
4583 list_del(&vma->vma_link); in i915_gem_vma_destroy()
4585 kmem_cache_free(to_i915(vma->obj->base.dev)->vmas, vma); in i915_gem_vma_destroy()
5139 struct i915_vma *vma; in i915_gem_obj_offset() local
5143 list_for_each_entry(vma, &o->vma_list, vma_link) { in i915_gem_obj_offset()
5144 if (i915_is_ggtt(vma->vm) && in i915_gem_obj_offset()
5145 vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) in i915_gem_obj_offset()
5147 if (vma->vm == vm) in i915_gem_obj_offset()
5148 return vma->node.start; in i915_gem_obj_offset()
5160 struct i915_vma *vma; in i915_gem_obj_ggtt_offset_view() local
5162 list_for_each_entry(vma, &o->vma_list, vma_link) in i915_gem_obj_ggtt_offset_view()
5163 if (vma->vm == ggtt && in i915_gem_obj_ggtt_offset_view()
5164 i915_ggtt_view_equal(&vma->ggtt_view, view)) in i915_gem_obj_ggtt_offset_view()
5165 return vma->node.start; in i915_gem_obj_ggtt_offset_view()
5174 struct i915_vma *vma; in i915_gem_obj_bound() local
5176 list_for_each_entry(vma, &o->vma_list, vma_link) { in i915_gem_obj_bound()
5177 if (i915_is_ggtt(vma->vm) && in i915_gem_obj_bound()
5178 vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) in i915_gem_obj_bound()
5180 if (vma->vm == vm && drm_mm_node_allocated(&vma->node)) in i915_gem_obj_bound()
5191 struct i915_vma *vma; in i915_gem_obj_ggtt_bound_view() local
5193 list_for_each_entry(vma, &o->vma_list, vma_link) in i915_gem_obj_ggtt_bound_view()
5194 if (vma->vm == ggtt && in i915_gem_obj_ggtt_bound_view()
5195 i915_ggtt_view_equal(&vma->ggtt_view, view) && in i915_gem_obj_ggtt_bound_view()
5196 drm_mm_node_allocated(&vma->node)) in i915_gem_obj_ggtt_bound_view()
5204 struct i915_vma *vma; in i915_gem_obj_bound_any() local
5206 list_for_each_entry(vma, &o->vma_list, vma_link) in i915_gem_obj_bound_any()
5207 if (drm_mm_node_allocated(&vma->node)) in i915_gem_obj_bound_any()
5217 struct i915_vma *vma; in i915_gem_obj_size() local
5223 list_for_each_entry(vma, &o->vma_list, vma_link) { in i915_gem_obj_size()
5224 if (i915_is_ggtt(vma->vm) && in i915_gem_obj_size()
5225 vma->ggtt_view.type != I915_GGTT_VIEW_NORMAL) in i915_gem_obj_size()
5227 if (vma->vm == vm) in i915_gem_obj_size()
5228 return vma->node.size; in i915_gem_obj_size()
5235 struct i915_vma *vma; in i915_gem_obj_is_pinned() local
5236 list_for_each_entry(vma, &obj->vma_list, vma_link) in i915_gem_obj_is_pinned()
5237 if (vma->pin_count > 0) in i915_gem_obj_is_pinned()