Lines Matching refs:dev_priv
82 static void i915_gem_info_add_obj(struct drm_i915_private *dev_priv, in i915_gem_info_add_obj() argument
85 spin_lock(&dev_priv->mm.object_stat_lock); in i915_gem_info_add_obj()
86 dev_priv->mm.object_count++; in i915_gem_info_add_obj()
87 dev_priv->mm.object_memory += size; in i915_gem_info_add_obj()
88 spin_unlock(&dev_priv->mm.object_stat_lock); in i915_gem_info_add_obj()
91 static void i915_gem_info_remove_obj(struct drm_i915_private *dev_priv, in i915_gem_info_remove_obj() argument
94 spin_lock(&dev_priv->mm.object_stat_lock); in i915_gem_info_remove_obj()
95 dev_priv->mm.object_count--; in i915_gem_info_remove_obj()
96 dev_priv->mm.object_memory -= size; in i915_gem_info_remove_obj()
97 spin_unlock(&dev_priv->mm.object_stat_lock); in i915_gem_info_remove_obj()
131 struct drm_i915_private *dev_priv = dev->dev_private; in i915_mutex_lock_interruptible() local
134 ret = i915_gem_wait_for_error(&dev_priv->gpu_error); in i915_mutex_lock_interruptible()
150 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_get_aperture_ioctl() local
157 list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) in i915_gem_get_aperture_ioctl()
162 args->aper_size = dev_priv->gtt.base.total; in i915_gem_get_aperture_ioctl()
380 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_object_alloc() local
381 return kmem_cache_zalloc(dev_priv->slab, GFP_KERNEL); in i915_gem_object_alloc()
386 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; in i915_gem_object_free() local
387 kmem_cache_free(dev_priv->slab, obj); in i915_gem_object_free()
786 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_gtt_pwrite_fast() local
828 if (fast_user_write(dev_priv->gtt.mappable, page_base, in i915_gem_gtt_pwrite_fast()
1048 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_pwrite_ioctl() local
1068 intel_runtime_pm_get(dev_priv); in i915_gem_pwrite_ioctl()
1125 intel_runtime_pm_put(dev_priv); in i915_gem_pwrite_ioctl()
1178 static bool missed_irq(struct drm_i915_private *dev_priv, in missed_irq() argument
1181 return test_bit(ring->id, &dev_priv->gpu_error.missed_irq_rings); in missed_irq()
1217 struct drm_i915_private *dev_priv = dev->dev_private; in __i915_wait_request() local
1219 ACCESS_ONCE(dev_priv->gpu_error.test_irq_rings) & intel_ring_flag(ring); in __i915_wait_request()
1225 WARN(!intel_irqs_enabled(dev_priv), "IRQs disabled"); in __i915_wait_request()
1234 gen6_rps_boost(dev_priv); in __i915_wait_request()
1236 mod_delayed_work(dev_priv->wq, in __i915_wait_request()
1255 if (reset_counter != atomic_read(&dev_priv->gpu_error.reset_counter)) { in __i915_wait_request()
1258 ret = i915_gem_check_wedge(&dev_priv->gpu_error, interruptible); in __i915_wait_request()
1280 if (timeout || missed_irq(dev_priv, ring)) { in __i915_wait_request()
1284 expire = missed_irq(dev_priv, ring) ? jiffies + 1 : timeout_expire; in __i915_wait_request()
1330 struct drm_i915_private *dev_priv; in i915_wait_request() local
1338 dev_priv = dev->dev_private; in i915_wait_request()
1339 interruptible = dev_priv->mm.interruptible; in i915_wait_request()
1343 ret = i915_gem_check_wedge(&dev_priv->gpu_error, interruptible); in i915_wait_request()
1351 reset_counter = atomic_read(&dev_priv->gpu_error.reset_counter); in i915_wait_request()
1409 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_object_wait_rendering__nonblocking() local
1414 BUG_ON(!dev_priv->mm.interruptible); in i915_gem_object_wait_rendering__nonblocking()
1420 ret = i915_gem_check_wedge(&dev_priv->gpu_error, true); in i915_gem_object_wait_rendering__nonblocking()
1428 reset_counter = atomic_read(&dev_priv->gpu_error.reset_counter); in i915_gem_object_wait_rendering__nonblocking()
1618 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_fault() local
1624 intel_runtime_pm_get(dev_priv); in i915_gem_fault()
1665 pfn = dev_priv->gtt.mappable_base + i915_gem_obj_ggtt_offset(obj); in i915_gem_fault()
1700 if (!i915_terminally_wedged(&dev_priv->gpu_error)) { in i915_gem_fault()
1733 intel_runtime_pm_put(dev_priv); in i915_gem_fault()
1763 i915_gem_release_all_mmaps(struct drm_i915_private *dev_priv) in i915_gem_release_all_mmaps() argument
1767 list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) in i915_gem_release_all_mmaps()
1820 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; in i915_gem_object_create_mmap_offset() local
1826 dev_priv->mm.shrinker_no_lock_stealing = true; in i915_gem_object_create_mmap_offset()
1839 i915_gem_shrink(dev_priv, in i915_gem_object_create_mmap_offset()
1848 i915_gem_shrink_all(dev_priv); in i915_gem_object_create_mmap_offset()
1851 dev_priv->mm.shrinker_no_lock_stealing = false; in i915_gem_object_create_mmap_offset()
1867 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_mmap_gtt() local
1881 if (obj->base.size > dev_priv->gtt.mappable_end) { in i915_gem_mmap_gtt()
2037 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; in i915_gem_object_get_pages_gtt() local
2078 i915_gem_shrink(dev_priv, in i915_gem_object_get_pages_gtt()
2090 i915_gem_shrink_all(dev_priv); in i915_gem_object_get_pages_gtt()
2126 dev_priv->quirks & QUIRK_PIN_SWIZZLED_PAGES) in i915_gem_object_get_pages_gtt()
2162 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; in i915_gem_object_get_pages() local
2180 list_add_tail(&obj->global_list, &dev_priv->mm.unbound_list); in i915_gem_object_get_pages()
2261 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_init_seqno() local
2266 for_each_ring(ring, dev_priv, i) { in i915_gem_init_seqno()
2274 for_each_ring(ring, dev_priv, i) { in i915_gem_init_seqno()
2286 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_set_seqno() local
2302 dev_priv->next_seqno = seqno; in i915_gem_set_seqno()
2303 dev_priv->last_seqno = seqno - 1; in i915_gem_set_seqno()
2304 if (dev_priv->last_seqno == 0) in i915_gem_set_seqno()
2305 dev_priv->last_seqno--; in i915_gem_set_seqno()
2313 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_get_seqno() local
2316 if (dev_priv->next_seqno == 0) { in i915_gem_get_seqno()
2321 dev_priv->next_seqno = 1; in i915_gem_get_seqno()
2324 *seqno = dev_priv->last_seqno = dev_priv->next_seqno++; in i915_gem_get_seqno()
2332 struct drm_i915_private *dev_priv = ring->dev->dev_private; in __i915_add_request() local
2425 cancel_delayed_work_sync(&dev_priv->mm.idle_work); in __i915_add_request()
2426 queue_delayed_work(dev_priv->wq, in __i915_add_request()
2427 &dev_priv->mm.retire_work, in __i915_add_request()
2429 intel_mark_busy(dev_priv->dev); in __i915_add_request()
2448 static bool i915_context_is_banned(struct drm_i915_private *dev_priv, in i915_context_is_banned() argument
2463 } else if (i915_stop_ring_allow_ban(dev_priv)) { in i915_context_is_banned()
2464 if (i915_stop_ring_allow_warn(dev_priv)) in i915_context_is_banned()
2473 static void i915_set_reset_status(struct drm_i915_private *dev_priv, in i915_set_reset_status() argument
2485 hs->banned = i915_context_is_banned(dev_priv, ctx); in i915_set_reset_status()
2538 static void i915_gem_reset_ring_status(struct drm_i915_private *dev_priv, in i915_gem_reset_ring_status() argument
2551 i915_set_reset_status(dev_priv, request->ctx, ring_hung); in i915_gem_reset_ring_status()
2554 i915_set_reset_status(dev_priv, request->ctx, false); in i915_gem_reset_ring_status()
2557 static void i915_gem_reset_ring_cleanup(struct drm_i915_private *dev_priv, in i915_gem_reset_ring_cleanup() argument
2582 intel_runtime_pm_put(dev_priv); in i915_gem_reset_ring_cleanup()
2613 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_restore_fences() local
2616 for (i = 0; i < dev_priv->num_fence_regs; i++) { in i915_gem_restore_fences()
2617 struct drm_i915_fence_reg *reg = &dev_priv->fence_regs[i]; in i915_gem_restore_fences()
2634 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_reset() local
2643 for_each_ring(ring, dev_priv, i) in i915_gem_reset()
2644 i915_gem_reset_ring_status(dev_priv, ring); in i915_gem_reset()
2646 for_each_ring(ring, dev_priv, i) in i915_gem_reset()
2647 i915_gem_reset_ring_cleanup(dev_priv, ring); in i915_gem_reset()
2721 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_retire_requests() local
2726 for_each_ring(ring, dev_priv, i) { in i915_gem_retire_requests()
2741 mod_delayed_work(dev_priv->wq, in i915_gem_retire_requests()
2742 &dev_priv->mm.idle_work, in i915_gem_retire_requests()
2751 struct drm_i915_private *dev_priv = in i915_gem_retire_work_handler() local
2752 container_of(work, typeof(*dev_priv), mm.retire_work.work); in i915_gem_retire_work_handler()
2753 struct drm_device *dev = dev_priv->dev; in i915_gem_retire_work_handler()
2763 queue_delayed_work(dev_priv->wq, &dev_priv->mm.retire_work, in i915_gem_retire_work_handler()
2770 struct drm_i915_private *dev_priv = in i915_gem_idle_work_handler() local
2771 container_of(work, typeof(*dev_priv), mm.idle_work.work); in i915_gem_idle_work_handler()
2773 intel_mark_idle(dev_priv->dev); in i915_gem_idle_work_handler()
2825 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_wait_ioctl() local
2864 reset_counter = atomic_read(&dev_priv->gpu_error.reset_counter); in i915_gem_wait_ioctl()
2962 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; in i915_vma_unbind() local
3022 list_move_tail(&obj->global_list, &dev_priv->mm.unbound_list); in i915_vma_unbind()
3036 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gpu_idle() local
3041 for_each_ring(ring, dev_priv, i) { in i915_gpu_idle()
3059 struct drm_i915_private *dev_priv = dev->dev_private; in i965_write_fence_reg() local
3118 struct drm_i915_private *dev_priv = dev->dev_private; in i915_write_fence_reg() local
3162 struct drm_i915_private *dev_priv = dev->dev_private; in i830_write_fence_reg() local
3199 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_write_fence() local
3204 if (i915_gem_object_needs_mb(dev_priv->fence_regs[reg].obj)) in i915_gem_write_fence()
3225 static inline int fence_number(struct drm_i915_private *dev_priv, in fence_number() argument
3228 return fence - dev_priv->fence_regs; in fence_number()
3235 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; in i915_gem_object_update_fence() local
3236 int reg = fence_number(dev_priv, fence); in i915_gem_object_update_fence()
3243 list_move_tail(&fence->lru_list, &dev_priv->mm.fence_list); in i915_gem_object_update_fence()
3269 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; in i915_gem_object_put_fence() local
3280 fence = &dev_priv->fence_regs[obj->fence_reg]; in i915_gem_object_put_fence()
3294 struct drm_i915_private *dev_priv = dev->dev_private; in i915_find_fence_reg() local
3300 for (i = dev_priv->fence_reg_start; i < dev_priv->num_fence_regs; i++) { in i915_find_fence_reg()
3301 reg = &dev_priv->fence_regs[i]; in i915_find_fence_reg()
3313 list_for_each_entry(reg, &dev_priv->mm.fence_list, lru_list) { in i915_find_fence_reg()
3346 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_object_get_fence() local
3362 reg = &dev_priv->fence_regs[obj->fence_reg]; in i915_gem_object_get_fence()
3365 &dev_priv->mm.fence_list); in i915_gem_object_get_fence()
3437 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_object_bind_to_vm() local
3442 flags & PIN_MAPPABLE ? dev_priv->gtt.mappable_end : vm->total; in i915_gem_object_bind_to_vm()
3536 list_move_tail(&obj->global_list, &dev_priv->mm.bound_list); in i915_gem_object_bind_to_vm()
4042 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_ring_throttle() local
4049 ret = i915_gem_wait_for_error(&dev_priv->gpu_error); in i915_gem_ring_throttle()
4053 ret = i915_gem_check_wedge(&dev_priv->gpu_error, false); in i915_gem_ring_throttle()
4064 reset_counter = atomic_read(&dev_priv->gpu_error.reset_counter); in i915_gem_ring_throttle()
4074 queue_delayed_work(dev_priv->wq, &dev_priv->mm.retire_work, 0); in i915_gem_ring_throttle()
4109 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; in i915_gem_object_do_pin() local
4114 if (WARN_ON(vm == &dev_priv->mm.aliasing_ppgtt->base)) in i915_gem_object_do_pin()
4191 dev_priv->gtt.mappable_end); in i915_gem_object_do_pin()
4247 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; in i915_gem_object_pin_fence() local
4251 dev_priv->fence_regs[obj->fence_reg].pin_count > in i915_gem_object_pin_fence()
4253 dev_priv->fence_regs[obj->fence_reg].pin_count++; in i915_gem_object_pin_fence()
4263 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; in i915_gem_object_unpin_fence() local
4264 WARN_ON(dev_priv->fence_regs[obj->fence_reg].pin_count <= 0); in i915_gem_object_unpin_fence()
4265 dev_priv->fence_regs[obj->fence_reg].pin_count--; in i915_gem_object_unpin_fence()
4319 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_madvise_ioctl() local
4349 dev_priv->quirks & QUIRK_PIN_SWIZZLED_PAGES) { in i915_gem_madvise_ioctl()
4475 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_free_object() local
4478 intel_runtime_pm_get(dev_priv); in i915_gem_free_object()
4490 was_interruptible = dev_priv->mm.interruptible; in i915_gem_free_object()
4491 dev_priv->mm.interruptible = false; in i915_gem_free_object()
4495 dev_priv->mm.interruptible = was_interruptible; in i915_gem_free_object()
4507 dev_priv->quirks & QUIRK_PIN_SWIZZLED_PAGES && in i915_gem_free_object()
4527 i915_gem_info_remove_obj(dev_priv, obj->base.size); in i915_gem_free_object()
4532 intel_runtime_pm_put(dev_priv); in i915_gem_free_object()
4587 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_stop_ringbuffers() local
4591 for_each_ring(ring, dev_priv, i) in i915_gem_stop_ringbuffers()
4592 dev_priv->gt.stop_ring(ring); in i915_gem_stop_ringbuffers()
4598 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_suspend() local
4611 cancel_delayed_work_sync(&dev_priv->gpu_error.hangcheck_work); in i915_gem_suspend()
4612 cancel_delayed_work_sync(&dev_priv->mm.retire_work); in i915_gem_suspend()
4613 flush_delayed_work(&dev_priv->mm.idle_work); in i915_gem_suspend()
4618 WARN_ON(dev_priv->mm.busy); in i915_gem_suspend()
4630 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_l3_remap() local
4632 u32 *remap_info = dev_priv->l3_parity.remap_info[slice]; in i915_gem_l3_remap()
4660 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_init_swizzling() local
4663 dev_priv->mm.bit_6_swizzle_x == I915_BIT_6_SWIZZLE_NONE) in i915_gem_init_swizzling()
4701 struct drm_i915_private *dev_priv = dev->dev_private; in init_unused_ring() local
4728 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_init_rings() local
4766 intel_cleanup_ring_buffer(&dev_priv->ring[VCS2]); in i915_gem_init_rings()
4768 intel_cleanup_ring_buffer(&dev_priv->ring[VECS]); in i915_gem_init_rings()
4770 intel_cleanup_ring_buffer(&dev_priv->ring[BCS]); in i915_gem_init_rings()
4772 intel_cleanup_ring_buffer(&dev_priv->ring[VCS]); in i915_gem_init_rings()
4774 intel_cleanup_ring_buffer(&dev_priv->ring[RCS]); in i915_gem_init_rings()
4782 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_init_hw() local
4790 intel_uncore_forcewake_get(dev_priv, FORCEWAKE_ALL); in i915_gem_init_hw()
4792 if (dev_priv->ellc_size) in i915_gem_init_hw()
4821 for_each_ring(ring, dev_priv, i) { in i915_gem_init_hw()
4828 i915_gem_l3_remap(&dev_priv->ring[RCS], i); in i915_gem_init_hw()
4836 ret = i915_gem_context_enable(dev_priv); in i915_gem_init_hw()
4845 intel_uncore_forcewake_put(dev_priv, FORCEWAKE_ALL); in i915_gem_init_hw()
4851 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_init() local
4868 dev_priv->gt.do_execbuf = i915_gem_ringbuffer_submission; in i915_gem_init()
4869 dev_priv->gt.init_rings = i915_gem_init_rings; in i915_gem_init()
4870 dev_priv->gt.cleanup_ring = intel_cleanup_ring_buffer; in i915_gem_init()
4871 dev_priv->gt.stop_ring = intel_stop_ring_buffer; in i915_gem_init()
4873 dev_priv->gt.do_execbuf = intel_execlists_submission; in i915_gem_init()
4874 dev_priv->gt.init_rings = intel_logical_rings_init; in i915_gem_init()
4875 dev_priv->gt.cleanup_ring = intel_logical_ring_cleanup; in i915_gem_init()
4876 dev_priv->gt.stop_ring = intel_logical_ring_stop; in i915_gem_init()
4885 intel_uncore_forcewake_get(dev_priv, FORCEWAKE_ALL); in i915_gem_init()
4897 ret = dev_priv->gt.init_rings(dev); in i915_gem_init()
4908 atomic_set_mask(I915_WEDGED, &dev_priv->gpu_error.reset_counter); in i915_gem_init()
4913 intel_uncore_forcewake_put(dev_priv, FORCEWAKE_ALL); in i915_gem_init()
4922 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_cleanup_ringbuffer() local
4926 for_each_ring(ring, dev_priv, i) in i915_gem_cleanup_ringbuffer()
4927 dev_priv->gt.cleanup_ring(ring); in i915_gem_cleanup_ringbuffer()
4937 void i915_init_vm(struct drm_i915_private *dev_priv, in i915_init_vm() argument
4942 vm->dev = dev_priv->dev; in i915_init_vm()
4946 list_add_tail(&vm->global_link, &dev_priv->vm_list); in i915_init_vm()
4952 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_load() local
4955 dev_priv->slab = in i915_gem_load()
4961 INIT_LIST_HEAD(&dev_priv->vm_list); in i915_gem_load()
4962 i915_init_vm(dev_priv, &dev_priv->gtt.base); in i915_gem_load()
4964 INIT_LIST_HEAD(&dev_priv->context_list); in i915_gem_load()
4965 INIT_LIST_HEAD(&dev_priv->mm.unbound_list); in i915_gem_load()
4966 INIT_LIST_HEAD(&dev_priv->mm.bound_list); in i915_gem_load()
4967 INIT_LIST_HEAD(&dev_priv->mm.fence_list); in i915_gem_load()
4969 init_ring_lists(&dev_priv->ring[i]); in i915_gem_load()
4971 INIT_LIST_HEAD(&dev_priv->fence_regs[i].lru_list); in i915_gem_load()
4972 INIT_DELAYED_WORK(&dev_priv->mm.retire_work, in i915_gem_load()
4974 INIT_DELAYED_WORK(&dev_priv->mm.idle_work, in i915_gem_load()
4976 init_waitqueue_head(&dev_priv->gpu_error.reset_queue); in i915_gem_load()
4978 dev_priv->relative_constants_mode = I915_EXEC_CONSTANTS_REL_GENERAL; in i915_gem_load()
4981 dev_priv->num_fence_regs = 32; in i915_gem_load()
4983 dev_priv->num_fence_regs = 16; in i915_gem_load()
4985 dev_priv->num_fence_regs = 8; in i915_gem_load()
4988 dev_priv->num_fence_regs = in i915_gem_load()
4992 INIT_LIST_HEAD(&dev_priv->mm.fence_list); in i915_gem_load()
4996 init_waitqueue_head(&dev_priv->pending_flip_queue); in i915_gem_load()
4998 dev_priv->mm.interruptible = true; in i915_gem_load()
5000 i915_gem_shrinker_init(dev_priv); in i915_gem_load()
5002 i915_gem_batch_pool_init(dev, &dev_priv->mm.batch_pool); in i915_gem_load()
5004 mutex_init(&dev_priv->fb_tracking.lock); in i915_gem_load()
5051 file_priv->dev_priv = dev->dev_private; in i915_gem_open()
5097 struct drm_i915_private *dev_priv = o->base.dev->dev_private; in i915_gem_obj_offset() local
5100 WARN_ON(vm == &dev_priv->mm.aliasing_ppgtt->base); in i915_gem_obj_offset()
5176 struct drm_i915_private *dev_priv = o->base.dev->dev_private; in i915_gem_obj_size() local
5179 WARN_ON(vm == &dev_priv->mm.aliasing_ppgtt->base); in i915_gem_obj_size()