Lines Matching refs:dev_priv

65 static void i915_gem_info_add_obj(struct drm_i915_private *dev_priv,  in i915_gem_info_add_obj()  argument
68 spin_lock(&dev_priv->mm.object_stat_lock); in i915_gem_info_add_obj()
69 dev_priv->mm.object_count++; in i915_gem_info_add_obj()
70 dev_priv->mm.object_memory += size; in i915_gem_info_add_obj()
71 spin_unlock(&dev_priv->mm.object_stat_lock); in i915_gem_info_add_obj()
74 static void i915_gem_info_remove_obj(struct drm_i915_private *dev_priv, in i915_gem_info_remove_obj() argument
77 spin_lock(&dev_priv->mm.object_stat_lock); in i915_gem_info_remove_obj()
78 dev_priv->mm.object_count--; in i915_gem_info_remove_obj()
79 dev_priv->mm.object_memory -= size; in i915_gem_info_remove_obj()
80 spin_unlock(&dev_priv->mm.object_stat_lock); in i915_gem_info_remove_obj()
114 struct drm_i915_private *dev_priv = dev->dev_private; in i915_mutex_lock_interruptible() local
117 ret = i915_gem_wait_for_error(&dev_priv->gpu_error); in i915_mutex_lock_interruptible()
133 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_get_aperture_ioctl() local
135 struct i915_gtt *ggtt = &dev_priv->gtt; in i915_gem_get_aperture_ioctl()
149 args->aper_size = dev_priv->gtt.base.total; in i915_gem_get_aperture_ioctl()
364 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_object_alloc() local
365 return kmem_cache_zalloc(dev_priv->objects, GFP_KERNEL); in i915_gem_object_alloc()
370 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; in i915_gem_object_free() local
371 kmem_cache_free(dev_priv->objects, obj); in i915_gem_object_free()
768 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_gtt_pwrite_fast() local
810 if (fast_user_write(dev_priv->gtt.mappable, page_base, in i915_gem_gtt_pwrite_fast()
1030 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_pwrite_ioctl() local
1050 intel_runtime_pm_get(dev_priv); in i915_gem_pwrite_ioctl()
1107 intel_runtime_pm_put(dev_priv); in i915_gem_pwrite_ioctl()
1143 static bool missed_irq(struct drm_i915_private *dev_priv, in missed_irq() argument
1146 return test_bit(ring->id, &dev_priv->gpu_error.missed_irq_rings); in missed_irq()
1248 struct drm_i915_private *dev_priv = dev->dev_private; in __i915_wait_request() local
1250 ACCESS_ONCE(dev_priv->gpu_error.test_irq_rings) & intel_ring_flag(ring); in __i915_wait_request()
1257 WARN(!intel_irqs_enabled(dev_priv), "IRQs disabled"); in __i915_wait_request()
1276 if (INTEL_INFO(dev_priv)->gen >= 6) in __i915_wait_request()
1277 gen6_rps_boost(dev_priv, rps, req->emitted_jiffies); in __i915_wait_request()
1300 if (reset_counter != atomic_read(&dev_priv->gpu_error.reset_counter)) { in __i915_wait_request()
1303 ret = i915_gem_check_wedge(&dev_priv->gpu_error, interruptible); in __i915_wait_request()
1325 if (timeout || missed_irq(dev_priv, ring)) { in __i915_wait_request()
1329 expire = missed_irq(dev_priv, ring) ? jiffies + 1 : timeout_expire; in __i915_wait_request()
1461 struct drm_i915_private *dev_priv; in i915_wait_request() local
1468 dev_priv = dev->dev_private; in i915_wait_request()
1469 interruptible = dev_priv->mm.interruptible; in i915_wait_request()
1473 ret = i915_gem_check_wedge(&dev_priv->gpu_error, interruptible); in i915_wait_request()
1478 atomic_read(&dev_priv->gpu_error.reset_counter), in i915_wait_request()
1552 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_object_wait_rendering__nonblocking() local
1558 BUG_ON(!dev_priv->mm.interruptible); in i915_gem_object_wait_rendering__nonblocking()
1563 ret = i915_gem_check_wedge(&dev_priv->gpu_error, true); in i915_gem_object_wait_rendering__nonblocking()
1567 reset_counter = atomic_read(&dev_priv->gpu_error.reset_counter); in i915_gem_object_wait_rendering__nonblocking()
1793 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_fault() local
1800 intel_runtime_pm_get(dev_priv); in i915_gem_fault()
1828 if (obj->base.size >= dev_priv->gtt.mappable_end && in i915_gem_fault()
1856 pfn = dev_priv->gtt.mappable_base + in i915_gem_fault()
1911 if (!i915_terminally_wedged(&dev_priv->gpu_error)) { in i915_gem_fault()
1944 intel_runtime_pm_put(dev_priv); in i915_gem_fault()
1974 i915_gem_release_all_mmaps(struct drm_i915_private *dev_priv) in i915_gem_release_all_mmaps() argument
1978 list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) in i915_gem_release_all_mmaps()
2031 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; in i915_gem_object_create_mmap_offset() local
2037 dev_priv->mm.shrinker_no_lock_stealing = true; in i915_gem_object_create_mmap_offset()
2050 i915_gem_shrink(dev_priv, in i915_gem_object_create_mmap_offset()
2059 i915_gem_shrink_all(dev_priv); in i915_gem_object_create_mmap_offset()
2062 dev_priv->mm.shrinker_no_lock_stealing = false; in i915_gem_object_create_mmap_offset()
2244 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; in i915_gem_object_get_pages_gtt() local
2285 i915_gem_shrink(dev_priv, in i915_gem_object_get_pages_gtt()
2297 i915_gem_shrink_all(dev_priv); in i915_gem_object_get_pages_gtt()
2339 dev_priv->quirks & QUIRK_PIN_SWIZZLED_PAGES) in i915_gem_object_get_pages_gtt()
2375 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; in i915_gem_object_get_pages() local
2393 list_add_tail(&obj->global_list, &dev_priv->mm.unbound_list); in i915_gem_object_get_pages()
2467 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_init_seqno() local
2472 for_each_ring(ring, dev_priv, i) { in i915_gem_init_seqno()
2480 for_each_ring(ring, dev_priv, i) { in i915_gem_init_seqno()
2492 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_set_seqno() local
2508 dev_priv->next_seqno = seqno; in i915_gem_set_seqno()
2509 dev_priv->last_seqno = seqno - 1; in i915_gem_set_seqno()
2510 if (dev_priv->last_seqno == 0) in i915_gem_set_seqno()
2511 dev_priv->last_seqno--; in i915_gem_set_seqno()
2519 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_get_seqno() local
2522 if (dev_priv->next_seqno == 0) { in i915_gem_get_seqno()
2527 dev_priv->next_seqno = 1; in i915_gem_get_seqno()
2530 *seqno = dev_priv->last_seqno = dev_priv->next_seqno++; in i915_gem_get_seqno()
2544 struct drm_i915_private *dev_priv; in __i915_add_request() local
2553 dev_priv = ring->dev->dev_private; in __i915_add_request()
2616 queue_delayed_work(dev_priv->wq, in __i915_add_request()
2617 &dev_priv->mm.retire_work, in __i915_add_request()
2619 intel_mark_busy(dev_priv->dev); in __i915_add_request()
2625 static bool i915_context_is_banned(struct drm_i915_private *dev_priv, in i915_context_is_banned() argument
2640 } else if (i915_stop_ring_allow_ban(dev_priv)) { in i915_context_is_banned()
2641 if (i915_stop_ring_allow_warn(dev_priv)) in i915_context_is_banned()
2650 static void i915_set_reset_status(struct drm_i915_private *dev_priv, in i915_set_reset_status() argument
2662 hs->banned = i915_context_is_banned(dev_priv, ctx); in i915_set_reset_status()
2695 struct drm_i915_private *dev_priv = to_i915(ring->dev); in i915_gem_request_alloc() local
2704 req = kmem_cache_zalloc(dev_priv->requests, GFP_KERNEL); in i915_gem_request_alloc()
2713 req->i915 = dev_priv; in i915_gem_request_alloc()
2752 kmem_cache_free(dev_priv->requests, req); in i915_gem_request_alloc()
2778 static void i915_gem_reset_ring_status(struct drm_i915_private *dev_priv, in i915_gem_reset_ring_status() argument
2791 i915_set_reset_status(dev_priv, request->ctx, ring_hung); in i915_gem_reset_ring_status()
2794 i915_set_reset_status(dev_priv, request->ctx, false); in i915_gem_reset_ring_status()
2797 static void i915_gem_reset_ring_cleanup(struct drm_i915_private *dev_priv, in i915_gem_reset_ring_cleanup() argument
2849 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_reset() local
2858 for_each_ring(ring, dev_priv, i) in i915_gem_reset()
2859 i915_gem_reset_ring_status(dev_priv, ring); in i915_gem_reset()
2861 for_each_ring(ring, dev_priv, i) in i915_gem_reset()
2862 i915_gem_reset_ring_cleanup(dev_priv, ring); in i915_gem_reset()
2926 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_retire_requests() local
2931 for_each_ring(ring, dev_priv, i) { in i915_gem_retire_requests()
2946 mod_delayed_work(dev_priv->wq, in i915_gem_retire_requests()
2947 &dev_priv->mm.idle_work, in i915_gem_retire_requests()
2956 struct drm_i915_private *dev_priv = in i915_gem_retire_work_handler() local
2957 container_of(work, typeof(*dev_priv), mm.retire_work.work); in i915_gem_retire_work_handler()
2958 struct drm_device *dev = dev_priv->dev; in i915_gem_retire_work_handler()
2968 queue_delayed_work(dev_priv->wq, &dev_priv->mm.retire_work, in i915_gem_retire_work_handler()
2975 struct drm_i915_private *dev_priv = in i915_gem_idle_work_handler() local
2976 container_of(work, typeof(*dev_priv), mm.idle_work.work); in i915_gem_idle_work_handler()
2977 struct drm_device *dev = dev_priv->dev; in i915_gem_idle_work_handler()
2981 for_each_ring(ring, dev_priv, i) in i915_gem_idle_work_handler()
2991 for_each_ring(ring, dev_priv, i) in i915_gem_idle_work_handler()
3056 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_wait_ioctl() local
3094 reset_counter = atomic_read(&dev_priv->gpu_error.reset_counter); in i915_gem_wait_ioctl()
3273 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; in __i915_vma_unbind() local
3327 list_move_tail(&obj->global_list, &dev_priv->mm.unbound_list); in __i915_vma_unbind()
3350 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gpu_idle() local
3355 for_each_ring(ring, dev_priv, i) { in i915_gpu_idle()
3426 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_object_bind_to_vm() local
3473 end = min_t(u64, end, dev_priv->gtt.mappable_end); in i915_gem_object_bind_to_vm()
3547 list_move_tail(&obj->global_list, &dev_priv->mm.bound_list); in i915_gem_object_bind_to_vm()
3872 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_set_caching_ioctl() local
3901 intel_runtime_pm_get(dev_priv); in i915_gem_set_caching_ioctl()
3919 intel_runtime_pm_put(dev_priv); in i915_gem_set_caching_ioctl()
4070 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_ring_throttle() local
4077 ret = i915_gem_wait_for_error(&dev_priv->gpu_error); in i915_gem_ring_throttle()
4081 ret = i915_gem_check_wedge(&dev_priv->gpu_error, false); in i915_gem_ring_throttle()
4099 reset_counter = atomic_read(&dev_priv->gpu_error.reset_counter); in i915_gem_ring_throttle()
4109 queue_delayed_work(dev_priv->wq, &dev_priv->mm.retire_work, 0); in i915_gem_ring_throttle()
4165 struct drm_i915_private *dev_priv = obj->base.dev->dev_private; in i915_gem_object_do_pin() local
4170 if (WARN_ON(vm == &dev_priv->mm.aliasing_ppgtt->base)) in i915_gem_object_do_pin()
4320 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_madvise_ioctl() local
4350 dev_priv->quirks & QUIRK_PIN_SWIZZLED_PAGES) { in i915_gem_madvise_ioctl()
4479 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_free_object() local
4482 intel_runtime_pm_get(dev_priv); in i915_gem_free_object()
4494 was_interruptible = dev_priv->mm.interruptible; in i915_gem_free_object()
4495 dev_priv->mm.interruptible = false; in i915_gem_free_object()
4499 dev_priv->mm.interruptible = was_interruptible; in i915_gem_free_object()
4511 dev_priv->quirks & QUIRK_PIN_SWIZZLED_PAGES && in i915_gem_free_object()
4531 i915_gem_info_remove_obj(dev_priv, obj->base.size); in i915_gem_free_object()
4536 intel_runtime_pm_put(dev_priv); in i915_gem_free_object()
4591 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_stop_ringbuffers() local
4595 for_each_ring(ring, dev_priv, i) in i915_gem_stop_ringbuffers()
4596 dev_priv->gt.stop_ring(ring); in i915_gem_stop_ringbuffers()
4602 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_suspend() local
4615 cancel_delayed_work_sync(&dev_priv->gpu_error.hangcheck_work); in i915_gem_suspend()
4616 cancel_delayed_work_sync(&dev_priv->mm.retire_work); in i915_gem_suspend()
4617 flush_delayed_work(&dev_priv->mm.idle_work); in i915_gem_suspend()
4622 WARN_ON(dev_priv->mm.busy); in i915_gem_suspend()
4635 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_l3_remap() local
4637 u32 *remap_info = dev_priv->l3_parity.remap_info[slice]; in i915_gem_l3_remap()
4665 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_init_swizzling() local
4668 dev_priv->mm.bit_6_swizzle_x == I915_BIT_6_SWIZZLE_NONE) in i915_gem_init_swizzling()
4690 struct drm_i915_private *dev_priv = dev->dev_private; in init_unused_ring() local
4717 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_init_rings() local
4751 intel_cleanup_ring_buffer(&dev_priv->ring[VECS]); in i915_gem_init_rings()
4753 intel_cleanup_ring_buffer(&dev_priv->ring[BCS]); in i915_gem_init_rings()
4755 intel_cleanup_ring_buffer(&dev_priv->ring[VCS]); in i915_gem_init_rings()
4757 intel_cleanup_ring_buffer(&dev_priv->ring[RCS]); in i915_gem_init_rings()
4765 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_init_hw() local
4773 intel_uncore_forcewake_get(dev_priv, FORCEWAKE_ALL); in i915_gem_init_hw()
4775 if (dev_priv->ellc_size) in i915_gem_init_hw()
4804 BUG_ON(!dev_priv->ring[RCS].default_context); in i915_gem_init_hw()
4813 for_each_ring(ring, dev_priv, i) { in i915_gem_init_hw()
4842 ret = i915_gem_set_seqno(dev, dev_priv->next_seqno+0x100); in i915_gem_init_hw()
4847 for_each_ring(ring, dev_priv, i) { in i915_gem_init_hw()
4883 intel_uncore_forcewake_put(dev_priv, FORCEWAKE_ALL); in i915_gem_init_hw()
4889 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_init() local
4906 dev_priv->gt.execbuf_submit = i915_gem_ringbuffer_submission; in i915_gem_init()
4907 dev_priv->gt.init_rings = i915_gem_init_rings; in i915_gem_init()
4908 dev_priv->gt.cleanup_ring = intel_cleanup_ring_buffer; in i915_gem_init()
4909 dev_priv->gt.stop_ring = intel_stop_ring_buffer; in i915_gem_init()
4911 dev_priv->gt.execbuf_submit = intel_execlists_submission; in i915_gem_init()
4912 dev_priv->gt.init_rings = intel_logical_rings_init; in i915_gem_init()
4913 dev_priv->gt.cleanup_ring = intel_logical_ring_cleanup; in i915_gem_init()
4914 dev_priv->gt.stop_ring = intel_logical_ring_stop; in i915_gem_init()
4923 intel_uncore_forcewake_get(dev_priv, FORCEWAKE_ALL); in i915_gem_init()
4935 ret = dev_priv->gt.init_rings(dev); in i915_gem_init()
4946 atomic_or(I915_WEDGED, &dev_priv->gpu_error.reset_counter); in i915_gem_init()
4951 intel_uncore_forcewake_put(dev_priv, FORCEWAKE_ALL); in i915_gem_init()
4960 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_cleanup_ringbuffer() local
4964 for_each_ring(ring, dev_priv, i) in i915_gem_cleanup_ringbuffer()
4965 dev_priv->gt.cleanup_ring(ring); in i915_gem_cleanup_ringbuffer()
4986 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_load() local
4989 dev_priv->objects = in i915_gem_load()
4994 dev_priv->vmas = in i915_gem_load()
4999 dev_priv->requests = in i915_gem_load()
5005 INIT_LIST_HEAD(&dev_priv->vm_list); in i915_gem_load()
5006 INIT_LIST_HEAD(&dev_priv->context_list); in i915_gem_load()
5007 INIT_LIST_HEAD(&dev_priv->mm.unbound_list); in i915_gem_load()
5008 INIT_LIST_HEAD(&dev_priv->mm.bound_list); in i915_gem_load()
5009 INIT_LIST_HEAD(&dev_priv->mm.fence_list); in i915_gem_load()
5011 init_ring_lists(&dev_priv->ring[i]); in i915_gem_load()
5013 INIT_LIST_HEAD(&dev_priv->fence_regs[i].lru_list); in i915_gem_load()
5014 INIT_DELAYED_WORK(&dev_priv->mm.retire_work, in i915_gem_load()
5016 INIT_DELAYED_WORK(&dev_priv->mm.idle_work, in i915_gem_load()
5018 init_waitqueue_head(&dev_priv->gpu_error.reset_queue); in i915_gem_load()
5020 dev_priv->relative_constants_mode = I915_EXEC_CONSTANTS_REL_GENERAL; in i915_gem_load()
5023 dev_priv->num_fence_regs = 32; in i915_gem_load()
5025 dev_priv->num_fence_regs = 16; in i915_gem_load()
5027 dev_priv->num_fence_regs = 8; in i915_gem_load()
5030 dev_priv->num_fence_regs = in i915_gem_load()
5038 dev_priv->next_seqno = ((u32)~0 - 0x1100); in i915_gem_load()
5039 dev_priv->last_seqno = ((u32)~0 - 0x1101); in i915_gem_load()
5042 INIT_LIST_HEAD(&dev_priv->mm.fence_list); in i915_gem_load()
5046 init_waitqueue_head(&dev_priv->pending_flip_queue); in i915_gem_load()
5048 dev_priv->mm.interruptible = true; in i915_gem_load()
5050 i915_gem_shrinker_init(dev_priv); in i915_gem_load()
5052 mutex_init(&dev_priv->fb_tracking.lock); in i915_gem_load()
5094 file_priv->dev_priv = dev->dev_private; in i915_gem_open()
5138 struct drm_i915_private *dev_priv = o->base.dev->dev_private; in i915_gem_obj_offset() local
5141 WARN_ON(vm == &dev_priv->mm.aliasing_ppgtt->base); in i915_gem_obj_offset()
5216 struct drm_i915_private *dev_priv = o->base.dev->dev_private; in i915_gem_obj_size() local
5219 WARN_ON(vm == &dev_priv->mm.aliasing_ppgtt->base); in i915_gem_obj_size()