Lines Matching refs:ppgtt

361 #define kunmap_px(ppgtt, vaddr) kunmap_page_dma((ppgtt)->base.dev, (vaddr))  argument
621 gen8_setup_page_directory(struct i915_hw_ppgtt *ppgtt, in gen8_setup_page_directory() argument
628 if (!USES_FULL_48BIT_PPGTT(ppgtt->base.dev)) in gen8_setup_page_directory()
633 kunmap_px(ppgtt, page_directorypo); in gen8_setup_page_directory()
637 gen8_setup_page_directory_pointer(struct i915_hw_ppgtt *ppgtt, in gen8_setup_page_directory_pointer() argument
644 WARN_ON(!USES_FULL_48BIT_PPGTT(ppgtt->base.dev)); in gen8_setup_page_directory_pointer()
646 kunmap_px(ppgtt, pagemap); in gen8_setup_page_directory_pointer()
674 static int gen8_legacy_mm_switch(struct i915_hw_ppgtt *ppgtt, in gen8_legacy_mm_switch() argument
680 const dma_addr_t pd_daddr = i915_page_dir_dma_addr(ppgtt, i); in gen8_legacy_mm_switch()
690 static int gen8_48b_mm_switch(struct i915_hw_ppgtt *ppgtt, in gen8_48b_mm_switch() argument
693 return gen8_write_pdp(req, 0, px_dma(&ppgtt->pml4)); in gen8_48b_mm_switch()
702 struct i915_hw_ppgtt *ppgtt = in gen8_ppgtt_clear_pte_range() local
742 kunmap_px(ppgtt, pt); in gen8_ppgtt_clear_pte_range()
758 struct i915_hw_ppgtt *ppgtt = in gen8_ppgtt_clear_range() local
764 gen8_ppgtt_clear_pte_range(vm, &ppgtt->pdp, start, length, in gen8_ppgtt_clear_range()
770 gen8_for_each_pml4e(pdp, &ppgtt->pml4, start, length, templ4, pml4e) { in gen8_ppgtt_clear_range()
784 struct i915_hw_ppgtt *ppgtt = in gen8_ppgtt_insert_pte_entries() local
804 kunmap_px(ppgtt, pt_vaddr); in gen8_ppgtt_insert_pte_entries()
816 kunmap_px(ppgtt, pt_vaddr); in gen8_ppgtt_insert_pte_entries()
825 struct i915_hw_ppgtt *ppgtt = in gen8_ppgtt_insert_entries() local
832 gen8_ppgtt_insert_pte_entries(vm, &ppgtt->pdp, &sg_iter, start, in gen8_ppgtt_insert_entries()
839 gen8_for_each_pml4e(pdp, &ppgtt->pml4, start, length, templ4, pml4e) { in gen8_ppgtt_insert_entries()
902 static int gen8_ppgtt_notify_vgt(struct i915_hw_ppgtt *ppgtt, bool create) in gen8_ppgtt_notify_vgt() argument
905 struct drm_device *dev = ppgtt->base.dev; in gen8_ppgtt_notify_vgt()
911 u64 daddr = px_dma(&ppgtt->pml4); in gen8_ppgtt_notify_vgt()
920 u64 daddr = i915_page_dir_dma_addr(ppgtt, i); in gen8_ppgtt_notify_vgt()
964 static void gen8_ppgtt_cleanup_4lvl(struct i915_hw_ppgtt *ppgtt) in gen8_ppgtt_cleanup_4lvl() argument
968 for_each_set_bit(i, ppgtt->pml4.used_pml4es, GEN8_PML4ES_PER_PML4) { in gen8_ppgtt_cleanup_4lvl()
969 if (WARN_ON(!ppgtt->pml4.pdps[i])) in gen8_ppgtt_cleanup_4lvl()
972 gen8_ppgtt_cleanup_3lvl(ppgtt->base.dev, ppgtt->pml4.pdps[i]); in gen8_ppgtt_cleanup_4lvl()
975 cleanup_px(ppgtt->base.dev, &ppgtt->pml4); in gen8_ppgtt_cleanup_4lvl()
980 struct i915_hw_ppgtt *ppgtt = in gen8_ppgtt_cleanup() local
984 gen8_ppgtt_notify_vgt(ppgtt, false); in gen8_ppgtt_cleanup()
986 if (!USES_FULL_48BIT_PPGTT(ppgtt->base.dev)) in gen8_ppgtt_cleanup()
987 gen8_ppgtt_cleanup_3lvl(ppgtt->base.dev, &ppgtt->pdp); in gen8_ppgtt_cleanup()
989 gen8_ppgtt_cleanup_4lvl(ppgtt); in gen8_ppgtt_cleanup()
1208 static void mark_tlbs_dirty(struct i915_hw_ppgtt *ppgtt) in mark_tlbs_dirty() argument
1210 ppgtt->pd_dirty_rings = INTEL_INFO(ppgtt->base.dev)->ring_mask; in mark_tlbs_dirty()
1218 struct i915_hw_ppgtt *ppgtt = in gen8_alloc_va_range_3lvl() local
1291 trace_i915_page_table_entry_map(&ppgtt->base, pde, pt, in gen8_alloc_va_range_3lvl()
1300 kunmap_px(ppgtt, page_directory); in gen8_alloc_va_range_3lvl()
1302 gen8_setup_page_directory(ppgtt, pdp, pd, pdpe); in gen8_alloc_va_range_3lvl()
1306 mark_tlbs_dirty(ppgtt); in gen8_alloc_va_range_3lvl()
1320 mark_tlbs_dirty(ppgtt); in gen8_alloc_va_range_3lvl()
1330 struct i915_hw_ppgtt *ppgtt = in gen8_alloc_va_range_4lvl() local
1359 gen8_setup_page_directory_pointer(ppgtt, pml4, pdp, pml4e); in gen8_alloc_va_range_4lvl()
1377 struct i915_hw_ppgtt *ppgtt = in gen8_alloc_va_range() local
1381 return gen8_alloc_va_range_4lvl(vm, &ppgtt->pml4, start, length); in gen8_alloc_va_range()
1383 return gen8_alloc_va_range_3lvl(vm, &ppgtt->pdp, start, length); in gen8_alloc_va_range()
1444 static void gen8_dump_ppgtt(struct i915_hw_ppgtt *ppgtt, struct seq_file *m) in gen8_dump_ppgtt() argument
1446 struct i915_address_space *vm = &ppgtt->base; in gen8_dump_ppgtt()
1447 uint64_t start = ppgtt->base.start; in gen8_dump_ppgtt()
1448 uint64_t length = ppgtt->base.total; in gen8_dump_ppgtt()
1453 gen8_dump_pdp(&ppgtt->pdp, start, length, scratch_pte, m); in gen8_dump_ppgtt()
1456 struct i915_pml4 *pml4 = &ppgtt->pml4; in gen8_dump_ppgtt()
1469 static int gen8_preallocate_top_level_pdps(struct i915_hw_ppgtt *ppgtt) in gen8_preallocate_top_level_pdps() argument
1485 ret = gen8_ppgtt_alloc_page_directories(&ppgtt->base, &ppgtt->pdp, in gen8_preallocate_top_level_pdps()
1489 *ppgtt->pdp.used_pdpes = *new_page_dirs; in gen8_preallocate_top_level_pdps()
1503 static int gen8_ppgtt_init(struct i915_hw_ppgtt *ppgtt) in gen8_ppgtt_init() argument
1507 ret = gen8_init_scratch(&ppgtt->base); in gen8_ppgtt_init()
1511 ppgtt->base.start = 0; in gen8_ppgtt_init()
1512 ppgtt->base.cleanup = gen8_ppgtt_cleanup; in gen8_ppgtt_init()
1513 ppgtt->base.allocate_va_range = gen8_alloc_va_range; in gen8_ppgtt_init()
1514 ppgtt->base.insert_entries = gen8_ppgtt_insert_entries; in gen8_ppgtt_init()
1515 ppgtt->base.clear_range = gen8_ppgtt_clear_range; in gen8_ppgtt_init()
1516 ppgtt->base.unbind_vma = ppgtt_unbind_vma; in gen8_ppgtt_init()
1517 ppgtt->base.bind_vma = ppgtt_bind_vma; in gen8_ppgtt_init()
1518 ppgtt->debug_dump = gen8_dump_ppgtt; in gen8_ppgtt_init()
1520 if (USES_FULL_48BIT_PPGTT(ppgtt->base.dev)) { in gen8_ppgtt_init()
1521 ret = setup_px(ppgtt->base.dev, &ppgtt->pml4); in gen8_ppgtt_init()
1525 gen8_initialize_pml4(&ppgtt->base, &ppgtt->pml4); in gen8_ppgtt_init()
1527 ppgtt->base.total = 1ULL << 48; in gen8_ppgtt_init()
1528 ppgtt->switch_mm = gen8_48b_mm_switch; in gen8_ppgtt_init()
1530 ret = __pdp_init(ppgtt->base.dev, &ppgtt->pdp); in gen8_ppgtt_init()
1534 ppgtt->base.total = 1ULL << 32; in gen8_ppgtt_init()
1535 ppgtt->switch_mm = gen8_legacy_mm_switch; in gen8_ppgtt_init()
1536 trace_i915_page_directory_pointer_entry_alloc(&ppgtt->base, in gen8_ppgtt_init()
1540 if (intel_vgpu_active(ppgtt->base.dev)) { in gen8_ppgtt_init()
1541 ret = gen8_preallocate_top_level_pdps(ppgtt); in gen8_ppgtt_init()
1547 if (intel_vgpu_active(ppgtt->base.dev)) in gen8_ppgtt_init()
1548 gen8_ppgtt_notify_vgt(ppgtt, true); in gen8_ppgtt_init()
1553 gen8_free_scratch(&ppgtt->base); in gen8_ppgtt_init()
1557 static void gen6_dump_ppgtt(struct i915_hw_ppgtt *ppgtt, struct seq_file *m) in gen6_dump_ppgtt() argument
1559 struct i915_address_space *vm = &ppgtt->base; in gen6_dump_ppgtt()
1564 uint32_t start = ppgtt->base.start, length = ppgtt->base.total; in gen6_dump_ppgtt()
1569 gen6_for_each_pde(unused, &ppgtt->pd, start, length, temp, pde) { in gen6_dump_ppgtt()
1572 const dma_addr_t pt_addr = px_dma(ppgtt->pd.page_table[pde]); in gen6_dump_ppgtt()
1573 pd_entry = readl(ppgtt->pd_addr + pde); in gen6_dump_ppgtt()
1583 pt_vaddr = kmap_px(ppgtt->pd.page_table[pde]); in gen6_dump_ppgtt()
1606 kunmap_px(ppgtt, pt_vaddr); in gen6_dump_ppgtt()
1615 struct i915_hw_ppgtt *ppgtt = in gen6_write_pde() local
1622 writel(pd_entry, ppgtt->pd_addr + pde); in gen6_write_pde()
1642 static uint32_t get_pd_offset(struct i915_hw_ppgtt *ppgtt) in get_pd_offset() argument
1644 BUG_ON(ppgtt->pd.base.ggtt_offset & 0x3f); in get_pd_offset()
1646 return (ppgtt->pd.base.ggtt_offset / 64) << 16; in get_pd_offset()
1649 static int hsw_mm_switch(struct i915_hw_ppgtt *ppgtt, in hsw_mm_switch() argument
1668 intel_ring_emit(ring, get_pd_offset(ppgtt)); in hsw_mm_switch()
1675 static int vgpu_mm_switch(struct i915_hw_ppgtt *ppgtt, in vgpu_mm_switch() argument
1679 struct drm_i915_private *dev_priv = to_i915(ppgtt->base.dev); in vgpu_mm_switch()
1682 I915_WRITE(RING_PP_DIR_BASE(ring), get_pd_offset(ppgtt)); in vgpu_mm_switch()
1686 static int gen7_mm_switch(struct i915_hw_ppgtt *ppgtt, in gen7_mm_switch() argument
1705 intel_ring_emit(ring, get_pd_offset(ppgtt)); in gen7_mm_switch()
1719 static int gen6_mm_switch(struct i915_hw_ppgtt *ppgtt, in gen6_mm_switch() argument
1723 struct drm_device *dev = ppgtt->base.dev; in gen6_mm_switch()
1728 I915_WRITE(RING_PP_DIR_BASE(ring), get_pd_offset(ppgtt)); in gen6_mm_switch()
1798 struct i915_hw_ppgtt *ppgtt = in gen6_ppgtt_clear_range() local
1815 pt_vaddr = kmap_px(ppgtt->pd.page_table[act_pt]); in gen6_ppgtt_clear_range()
1820 kunmap_px(ppgtt, pt_vaddr); in gen6_ppgtt_clear_range()
1833 struct i915_hw_ppgtt *ppgtt = in gen6_ppgtt_insert_entries() local
1844 pt_vaddr = kmap_px(ppgtt->pd.page_table[act_pt]); in gen6_ppgtt_insert_entries()
1851 kunmap_px(ppgtt, pt_vaddr); in gen6_ppgtt_insert_entries()
1858 kunmap_px(ppgtt, pt_vaddr); in gen6_ppgtt_insert_entries()
1867 struct i915_hw_ppgtt *ppgtt = in gen6_alloc_va_range() local
1874 if (WARN_ON(start_in + length_in > ppgtt->base.total)) in gen6_alloc_va_range()
1887 gen6_for_each_pde(pt, &ppgtt->pd, start, length, temp, pde) { in gen6_alloc_va_range()
1904 ppgtt->pd.page_table[pde] = pt; in gen6_alloc_va_range()
1912 gen6_for_each_pde(pt, &ppgtt->pd, start, length, temp, pde) { in gen6_alloc_va_range()
1920 gen6_write_pde(&ppgtt->pd, pde, pt); in gen6_alloc_va_range()
1936 mark_tlbs_dirty(ppgtt); in gen6_alloc_va_range()
1941 struct i915_page_table *pt = ppgtt->pd.page_table[pde]; in gen6_alloc_va_range()
1943 ppgtt->pd.page_table[pde] = vm->scratch_pt; in gen6_alloc_va_range()
1947 mark_tlbs_dirty(ppgtt); in gen6_alloc_va_range()
1980 struct i915_hw_ppgtt *ppgtt = in gen6_ppgtt_cleanup() local
1985 drm_mm_remove_node(&ppgtt->node); in gen6_ppgtt_cleanup()
1987 gen6_for_all_pdes(pt, ppgtt, pde) { in gen6_ppgtt_cleanup()
1989 free_pt(ppgtt->base.dev, pt); in gen6_ppgtt_cleanup()
1995 static int gen6_ppgtt_allocate_page_directories(struct i915_hw_ppgtt *ppgtt) in gen6_ppgtt_allocate_page_directories() argument
1997 struct i915_address_space *vm = &ppgtt->base; in gen6_ppgtt_allocate_page_directories()
1998 struct drm_device *dev = ppgtt->base.dev; in gen6_ppgtt_allocate_page_directories()
2015 &ppgtt->node, GEN6_PD_SIZE, in gen6_ppgtt_allocate_page_directories()
2036 if (ppgtt->node.start < dev_priv->gtt.mappable_end) in gen6_ppgtt_allocate_page_directories()
2046 static int gen6_ppgtt_alloc(struct i915_hw_ppgtt *ppgtt) in gen6_ppgtt_alloc() argument
2048 return gen6_ppgtt_allocate_page_directories(ppgtt); in gen6_ppgtt_alloc()
2051 static void gen6_scratch_va_range(struct i915_hw_ppgtt *ppgtt, in gen6_scratch_va_range() argument
2057 gen6_for_each_pde(unused, &ppgtt->pd, start, length, temp, pde) in gen6_scratch_va_range()
2058 ppgtt->pd.page_table[pde] = ppgtt->base.scratch_pt; in gen6_scratch_va_range()
2061 static int gen6_ppgtt_init(struct i915_hw_ppgtt *ppgtt) in gen6_ppgtt_init() argument
2063 struct drm_device *dev = ppgtt->base.dev; in gen6_ppgtt_init()
2067 ppgtt->base.pte_encode = dev_priv->gtt.base.pte_encode; in gen6_ppgtt_init()
2069 ppgtt->switch_mm = gen6_mm_switch; in gen6_ppgtt_init()
2071 ppgtt->switch_mm = hsw_mm_switch; in gen6_ppgtt_init()
2073 ppgtt->switch_mm = gen7_mm_switch; in gen6_ppgtt_init()
2078 ppgtt->switch_mm = vgpu_mm_switch; in gen6_ppgtt_init()
2080 ret = gen6_ppgtt_alloc(ppgtt); in gen6_ppgtt_init()
2084 ppgtt->base.allocate_va_range = gen6_alloc_va_range; in gen6_ppgtt_init()
2085 ppgtt->base.clear_range = gen6_ppgtt_clear_range; in gen6_ppgtt_init()
2086 ppgtt->base.insert_entries = gen6_ppgtt_insert_entries; in gen6_ppgtt_init()
2087 ppgtt->base.unbind_vma = ppgtt_unbind_vma; in gen6_ppgtt_init()
2088 ppgtt->base.bind_vma = ppgtt_bind_vma; in gen6_ppgtt_init()
2089 ppgtt->base.cleanup = gen6_ppgtt_cleanup; in gen6_ppgtt_init()
2090 ppgtt->base.start = 0; in gen6_ppgtt_init()
2091 ppgtt->base.total = I915_PDES * GEN6_PTES * PAGE_SIZE; in gen6_ppgtt_init()
2092 ppgtt->debug_dump = gen6_dump_ppgtt; in gen6_ppgtt_init()
2094 ppgtt->pd.base.ggtt_offset = in gen6_ppgtt_init()
2095 ppgtt->node.start / PAGE_SIZE * sizeof(gen6_pte_t); in gen6_ppgtt_init()
2097 ppgtt->pd_addr = (gen6_pte_t __iomem *)dev_priv->gtt.gsm + in gen6_ppgtt_init()
2098 ppgtt->pd.base.ggtt_offset / sizeof(gen6_pte_t); in gen6_ppgtt_init()
2100 gen6_scratch_va_range(ppgtt, 0, ppgtt->base.total); in gen6_ppgtt_init()
2102 gen6_write_page_range(dev_priv, &ppgtt->pd, 0, ppgtt->base.total); in gen6_ppgtt_init()
2105 ppgtt->node.size >> 20, in gen6_ppgtt_init()
2106 ppgtt->node.start / PAGE_SIZE); in gen6_ppgtt_init()
2109 ppgtt->pd.base.ggtt_offset << 10); in gen6_ppgtt_init()
2114 static int __hw_ppgtt_init(struct drm_device *dev, struct i915_hw_ppgtt *ppgtt) in __hw_ppgtt_init() argument
2116 ppgtt->base.dev = dev; in __hw_ppgtt_init()
2119 return gen6_ppgtt_init(ppgtt); in __hw_ppgtt_init()
2121 return gen8_ppgtt_init(ppgtt); in __hw_ppgtt_init()
2134 int i915_ppgtt_init(struct drm_device *dev, struct i915_hw_ppgtt *ppgtt) in i915_ppgtt_init() argument
2139 ret = __hw_ppgtt_init(dev, ppgtt); in i915_ppgtt_init()
2141 kref_init(&ppgtt->ref); in i915_ppgtt_init()
2142 i915_address_space_init(&ppgtt->base, dev_priv); in i915_ppgtt_init()
2174 struct i915_hw_ppgtt *ppgtt = dev_priv->mm.aliasing_ppgtt; in i915_ppgtt_init_ring() local
2179 if (!ppgtt) in i915_ppgtt_init_ring()
2182 return ppgtt->switch_mm(ppgtt, req); in i915_ppgtt_init_ring()
2188 struct i915_hw_ppgtt *ppgtt; in i915_ppgtt_create() local
2191 ppgtt = kzalloc(sizeof(*ppgtt), GFP_KERNEL); in i915_ppgtt_create()
2192 if (!ppgtt) in i915_ppgtt_create()
2195 ret = i915_ppgtt_init(dev, ppgtt); in i915_ppgtt_create()
2197 kfree(ppgtt); in i915_ppgtt_create()
2201 ppgtt->file_priv = fpriv; in i915_ppgtt_create()
2203 trace_i915_ppgtt_create(&ppgtt->base); in i915_ppgtt_create()
2205 return ppgtt; in i915_ppgtt_create()
2210 struct i915_hw_ppgtt *ppgtt = in i915_ppgtt_release() local
2213 trace_i915_ppgtt_release(&ppgtt->base); in i915_ppgtt_release()
2216 WARN_ON(!list_empty(&ppgtt->base.active_list)); in i915_ppgtt_release()
2217 WARN_ON(!list_empty(&ppgtt->base.inactive_list)); in i915_ppgtt_release()
2219 list_del(&ppgtt->base.global_link); in i915_ppgtt_release()
2220 drm_mm_takedown(&ppgtt->base.mm); in i915_ppgtt_release()
2222 ppgtt->base.cleanup(&ppgtt->base); in i915_ppgtt_release()
2223 kfree(ppgtt); in i915_ppgtt_release()
2695 struct i915_hw_ppgtt *ppgtt; in i915_gem_setup_global_gtt() local
2697 ppgtt = kzalloc(sizeof(*ppgtt), GFP_KERNEL); in i915_gem_setup_global_gtt()
2698 if (!ppgtt) in i915_gem_setup_global_gtt()
2701 ret = __hw_ppgtt_init(dev, ppgtt); in i915_gem_setup_global_gtt()
2703 ppgtt->base.cleanup(&ppgtt->base); in i915_gem_setup_global_gtt()
2704 kfree(ppgtt); in i915_gem_setup_global_gtt()
2708 if (ppgtt->base.allocate_va_range) in i915_gem_setup_global_gtt()
2709 ret = ppgtt->base.allocate_va_range(&ppgtt->base, 0, in i915_gem_setup_global_gtt()
2710 ppgtt->base.total); in i915_gem_setup_global_gtt()
2712 ppgtt->base.cleanup(&ppgtt->base); in i915_gem_setup_global_gtt()
2713 kfree(ppgtt); in i915_gem_setup_global_gtt()
2717 ppgtt->base.clear_range(&ppgtt->base, in i915_gem_setup_global_gtt()
2718 ppgtt->base.start, in i915_gem_setup_global_gtt()
2719 ppgtt->base.total, in i915_gem_setup_global_gtt()
2722 dev_priv->mm.aliasing_ppgtt = ppgtt; in i915_gem_setup_global_gtt()
2747 struct i915_hw_ppgtt *ppgtt = dev_priv->mm.aliasing_ppgtt; in i915_global_gtt_cleanup() local
2749 ppgtt->base.cleanup(&ppgtt->base); in i915_global_gtt_cleanup()
3189 struct i915_hw_ppgtt *ppgtt = in i915_gem_restore_gtt_mappings() local
3194 ppgtt = dev_priv->mm.aliasing_ppgtt; in i915_gem_restore_gtt_mappings()
3196 gen6_write_page_range(dev_priv, &ppgtt->pd, in i915_gem_restore_gtt_mappings()
3197 0, ppgtt->base.total); in i915_gem_restore_gtt_mappings()