Lines Matching refs:dev_priv

100 static void bdw_setup_private_ppat(struct drm_i915_private *dev_priv);
101 static void chv_setup_private_ppat(struct drm_i915_private *dev_priv);
861 struct drm_i915_private *dev_priv = ppgtt->base.dev->dev_private; in gen6_dump_ppgtt() local
870 pd_addr = (gen6_pte_t __iomem *)dev_priv->gtt.gsm + in gen6_dump_ppgtt()
933 static void gen6_write_page_range(struct drm_i915_private *dev_priv, in gen6_write_page_range() argument
945 readl(dev_priv->gtt.gsm); in gen6_write_page_range()
983 struct drm_i915_private *dev_priv = to_i915(ppgtt->base.dev); in vgpu_mm_switch() local
1026 struct drm_i915_private *dev_priv = dev->dev_private; in gen6_mm_switch() local
1039 struct drm_i915_private *dev_priv = dev->dev_private; in gen8_ppgtt_enable() local
1043 for_each_ring(ring, dev_priv, j) { in gen8_ppgtt_enable()
1051 struct drm_i915_private *dev_priv = dev->dev_private; in gen7_ppgtt_enable() local
1068 for_each_ring(ring, dev_priv, i) { in gen7_ppgtt_enable()
1077 struct drm_i915_private *dev_priv = dev->dev_private; in gen6_ppgtt_enable() local
1195 struct drm_i915_private *dev_priv = dev->dev_private; in gen6_alloc_va_range() local
1259 readl(dev_priv->gtt.gsm); in gen6_alloc_va_range()
1304 struct drm_i915_private *dev_priv = dev->dev_private; in gen6_ppgtt_allocate_page_directories() local
1312 BUG_ON(!drm_mm_initialized(&dev_priv->gtt.base.mm)); in gen6_ppgtt_allocate_page_directories()
1320 ret = drm_mm_insert_node_in_range_generic(&dev_priv->gtt.base.mm, in gen6_ppgtt_allocate_page_directories()
1323 0, dev_priv->gtt.base.total, in gen6_ppgtt_allocate_page_directories()
1326 ret = i915_gem_evict_something(dev, &dev_priv->gtt.base, in gen6_ppgtt_allocate_page_directories()
1329 0, dev_priv->gtt.base.total, in gen6_ppgtt_allocate_page_directories()
1342 if (ppgtt->node.start < dev_priv->gtt.mappable_end) in gen6_ppgtt_allocate_page_directories()
1371 struct drm_i915_private *dev_priv = dev->dev_private; in gen6_ppgtt_init() local
1374 ppgtt->base.pte_encode = dev_priv->gtt.base.pte_encode; in gen6_ppgtt_init()
1413 ppgtt->pd_addr = (gen6_pte_t __iomem *)dev_priv->gtt.gsm + in gen6_ppgtt_init()
1421 gen6_write_page_range(dev_priv, &ppgtt->pd, 0, ppgtt->base.total); in gen6_ppgtt_init()
1436 struct drm_i915_private *dev_priv = dev->dev_private; in __hw_ppgtt_init() local
1439 ppgtt->base.scratch = dev_priv->gtt.base.scratch; in __hw_ppgtt_init()
1444 return gen8_ppgtt_init(ppgtt, dev_priv->gtt.base.total); in __hw_ppgtt_init()
1448 struct drm_i915_private *dev_priv = dev->dev_private; in i915_ppgtt_init() local
1456 i915_init_vm(dev_priv, &ppgtt->base); in i915_ppgtt_init()
1464 struct drm_i915_private *dev_priv = dev->dev_private; in i915_ppgtt_init_hw() local
1466 struct i915_hw_ppgtt *ppgtt = dev_priv->mm.aliasing_ppgtt; in i915_ppgtt_init_hw()
1488 for_each_ring(ring, dev_priv, i) { in i915_ppgtt_init_hw()
1575 static bool do_idling(struct drm_i915_private *dev_priv) in do_idling() argument
1577 bool ret = dev_priv->mm.interruptible; in do_idling()
1579 if (unlikely(dev_priv->gtt.do_idle_maps)) { in do_idling()
1580 dev_priv->mm.interruptible = false; in do_idling()
1581 if (i915_gpu_idle(dev_priv->dev)) { in do_idling()
1591 static void undo_idling(struct drm_i915_private *dev_priv, bool interruptible) in undo_idling() argument
1593 if (unlikely(dev_priv->gtt.do_idle_maps)) in undo_idling()
1594 dev_priv->mm.interruptible = interruptible; in undo_idling()
1599 struct drm_i915_private *dev_priv = dev->dev_private; in i915_check_and_clear_faults() local
1606 for_each_ring(ring, dev_priv, i) { in i915_check_and_clear_faults()
1623 POSTING_READ(RING_FAULT_REG(&dev_priv->ring[RCS])); in i915_check_and_clear_faults()
1626 static void i915_ggtt_flush(struct drm_i915_private *dev_priv) in i915_ggtt_flush() argument
1628 if (INTEL_INFO(dev_priv->dev)->gen < 6) { in i915_ggtt_flush()
1638 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_suspend_gtt_mappings() local
1648 dev_priv->gtt.base.clear_range(&dev_priv->gtt.base, in i915_gem_suspend_gtt_mappings()
1649 dev_priv->gtt.base.start, in i915_gem_suspend_gtt_mappings()
1650 dev_priv->gtt.base.total, in i915_gem_suspend_gtt_mappings()
1653 i915_ggtt_flush(dev_priv); in i915_gem_suspend_gtt_mappings()
1658 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_restore_gtt_mappings() local
1665 dev_priv->gtt.base.clear_range(&dev_priv->gtt.base, in i915_gem_restore_gtt_mappings()
1666 dev_priv->gtt.base.start, in i915_gem_restore_gtt_mappings()
1667 dev_priv->gtt.base.total, in i915_gem_restore_gtt_mappings()
1670 list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) { in i915_gem_restore_gtt_mappings()
1672 &dev_priv->gtt.base); in i915_gem_restore_gtt_mappings()
1691 chv_setup_private_ppat(dev_priv); in i915_gem_restore_gtt_mappings()
1693 bdw_setup_private_ppat(dev_priv); in i915_gem_restore_gtt_mappings()
1699 list_for_each_entry(vm, &dev_priv->vm_list, global_link) { in i915_gem_restore_gtt_mappings()
1707 ppgtt = dev_priv->mm.aliasing_ppgtt; in i915_gem_restore_gtt_mappings()
1709 gen6_write_page_range(dev_priv, &ppgtt->pd, in i915_gem_restore_gtt_mappings()
1714 i915_ggtt_flush(dev_priv); in i915_gem_restore_gtt_mappings()
1745 struct drm_i915_private *dev_priv = vm->dev->dev_private; in gen8_ggtt_insert_entries() local
1748 (gen8_pte_t __iomem *)dev_priv->gtt.gsm + first_entry; in gen8_ggtt_insert_entries()
1791 struct drm_i915_private *dev_priv = vm->dev->dev_private; in gen6_ggtt_insert_entries() local
1794 (gen6_pte_t __iomem *)dev_priv->gtt.gsm + first_entry; in gen6_ggtt_insert_entries()
1829 struct drm_i915_private *dev_priv = vm->dev->dev_private; in gen8_ggtt_clear_range() local
1833 (gen8_pte_t __iomem *) dev_priv->gtt.gsm + first_entry; in gen8_ggtt_clear_range()
1834 const int max_entries = gtt_total_entries(dev_priv->gtt) - first_entry; in gen8_ggtt_clear_range()
1855 struct drm_i915_private *dev_priv = vm->dev->dev_private; in gen6_ggtt_clear_range() local
1859 (gen6_pte_t __iomem *) dev_priv->gtt.gsm + first_entry; in gen6_ggtt_clear_range()
1860 const int max_entries = gtt_total_entries(dev_priv->gtt) - first_entry; in gen6_ggtt_clear_range()
1914 struct drm_i915_private *dev_priv = dev->dev_private; in ggtt_bind_vma() local
1936 if (!dev_priv->mm.aliasing_ppgtt || flags & GLOBAL_BIND) { in ggtt_bind_vma()
1946 if (dev_priv->mm.aliasing_ppgtt && in ggtt_bind_vma()
1949 struct i915_hw_ppgtt *appgtt = dev_priv->mm.aliasing_ppgtt; in ggtt_bind_vma()
1960 struct drm_i915_private *dev_priv = dev->dev_private; in ggtt_unbind_vma() local
1972 struct i915_hw_ppgtt *appgtt = dev_priv->mm.aliasing_ppgtt; in ggtt_unbind_vma()
1984 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_gtt_finish_object() local
1987 interruptible = do_idling(dev_priv); in i915_gem_gtt_finish_object()
1994 undo_idling(dev_priv, interruptible); in i915_gem_gtt_finish_object()
2028 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_setup_global_gtt() local
2029 struct i915_address_space *ggtt_vm = &dev_priv->gtt.base; in i915_gem_setup_global_gtt()
2040 dev_priv->gtt.base.start = start; in i915_gem_setup_global_gtt()
2041 dev_priv->gtt.base.total = end - start; in i915_gem_setup_global_gtt()
2050 dev_priv->gtt.base.mm.color_adjust = i915_gtt_color_adjust; in i915_gem_setup_global_gtt()
2053 list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) { in i915_gem_setup_global_gtt()
2092 dev_priv->mm.aliasing_ppgtt = ppgtt; in i915_gem_setup_global_gtt()
2100 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_init_global_gtt() local
2103 gtt_size = dev_priv->gtt.base.total; in i915_gem_init_global_gtt()
2104 mappable_size = dev_priv->gtt.mappable_end; in i915_gem_init_global_gtt()
2111 struct drm_i915_private *dev_priv = dev->dev_private; in i915_global_gtt_cleanup() local
2112 struct i915_address_space *vm = &dev_priv->gtt.base; in i915_global_gtt_cleanup()
2114 if (dev_priv->mm.aliasing_ppgtt) { in i915_global_gtt_cleanup()
2115 struct i915_hw_ppgtt *ppgtt = dev_priv->mm.aliasing_ppgtt; in i915_global_gtt_cleanup()
2133 struct drm_i915_private *dev_priv = dev->dev_private; in setup_scratch_page() local
2150 dev_priv->gtt.base.scratch.page = page; in setup_scratch_page()
2151 dev_priv->gtt.base.scratch.addr = dma_addr; in setup_scratch_page()
2158 struct drm_i915_private *dev_priv = dev->dev_private; in teardown_scratch_page() local
2159 struct page *page = dev_priv->gtt.base.scratch.page; in teardown_scratch_page()
2162 pci_unmap_page(dev->pdev, dev_priv->gtt.base.scratch.addr, in teardown_scratch_page()
2248 struct drm_i915_private *dev_priv = dev->dev_private; in ggtt_probe_common() local
2256 dev_priv->gtt.gsm = ioremap_wc(gtt_phys_addr, gtt_size); in ggtt_probe_common()
2257 if (!dev_priv->gtt.gsm) { in ggtt_probe_common()
2266 iounmap(dev_priv->gtt.gsm); in ggtt_probe_common()
2275 static void bdw_setup_private_ppat(struct drm_i915_private *dev_priv) in bdw_setup_private_ppat() argument
2288 if (!USES_PPGTT(dev_priv->dev)) in bdw_setup_private_ppat()
2310 static void chv_setup_private_ppat(struct drm_i915_private *dev_priv) in chv_setup_private_ppat() argument
2351 struct drm_i915_private *dev_priv = dev->dev_private; in gen8_gmch_probe() local
2379 chv_setup_private_ppat(dev_priv); in gen8_gmch_probe()
2381 bdw_setup_private_ppat(dev_priv); in gen8_gmch_probe()
2385 dev_priv->gtt.base.clear_range = gen8_ggtt_clear_range; in gen8_gmch_probe()
2386 dev_priv->gtt.base.insert_entries = gen8_ggtt_insert_entries; in gen8_gmch_probe()
2397 struct drm_i915_private *dev_priv = dev->dev_private; in gen6_gmch_probe() local
2410 dev_priv->gtt.mappable_end); in gen6_gmch_probe()
2425 dev_priv->gtt.base.clear_range = gen6_ggtt_clear_range; in gen6_gmch_probe()
2426 dev_priv->gtt.base.insert_entries = gen6_ggtt_insert_entries; in gen6_gmch_probe()
2446 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gmch_probe() local
2449 ret = intel_gmch_probe(dev_priv->bridge_dev, dev_priv->dev->pdev, NULL); in i915_gmch_probe()
2457 dev_priv->gtt.do_idle_maps = needs_idle_maps(dev_priv->dev); in i915_gmch_probe()
2458 dev_priv->gtt.base.clear_range = i915_ggtt_clear_range; in i915_gmch_probe()
2460 if (unlikely(dev_priv->gtt.do_idle_maps)) in i915_gmch_probe()
2473 struct drm_i915_private *dev_priv = dev->dev_private; in i915_gem_gtt_init() local
2474 struct i915_gtt *gtt = &dev_priv->gtt; in i915_gem_gtt_init()
2483 if (IS_HASWELL(dev) && dev_priv->ellc_size) in i915_gem_gtt_init()
2494 dev_priv->gtt.gtt_probe = gen8_gmch_probe; in i915_gem_gtt_init()
2495 dev_priv->gtt.base.cleanup = gen6_gmch_remove; in i915_gem_gtt_init()