gart              868 drivers/gpu/drm/amd/amdgpu/amdgpu.h 	struct amdgpu_gart		gart;
gart             1867 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c 	memcpy(adev->reset_magic, adev->gart.ptr, AMDGPU_RESET_MAGIC_NUM);
gart             1882 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c 	return !!memcmp(adev->gart.ptr, adev->reset_magic,
gart              118 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	if (adev->gart.bo == NULL) {
gart              122 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		bp.size = adev->gart.table_size;
gart              129 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		r = amdgpu_bo_create(adev, &bp, &adev->gart.bo);
gart              151 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	r = amdgpu_bo_reserve(adev->gart.bo, false);
gart              154 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	r = amdgpu_bo_pin(adev->gart.bo, AMDGPU_GEM_DOMAIN_VRAM);
gart              156 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		amdgpu_bo_unreserve(adev->gart.bo);
gart              159 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	r = amdgpu_bo_kmap(adev->gart.bo, &adev->gart.ptr);
gart              161 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		amdgpu_bo_unpin(adev->gart.bo);
gart              162 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	amdgpu_bo_unreserve(adev->gart.bo);
gart              178 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	if (adev->gart.bo == NULL) {
gart              181 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	r = amdgpu_bo_reserve(adev->gart.bo, true);
gart              183 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		amdgpu_bo_kunmap(adev->gart.bo);
gart              184 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		amdgpu_bo_unpin(adev->gart.bo);
gart              185 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		amdgpu_bo_unreserve(adev->gart.bo);
gart              186 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		adev->gart.ptr = NULL;
gart              201 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	if (adev->gart.bo == NULL) {
gart              204 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	amdgpu_bo_unref(&adev->gart.bo);
gart              231 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	if (!adev->gart.ready) {
gart              240 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		adev->gart.pages[p] = NULL;
gart              243 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		if (!adev->gart.ptr)
gart              247 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 			amdgpu_gmc_set_pte_pde(adev, adev->gart.ptr,
gart              280 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	if (!adev->gart.ready) {
gart              319 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	if (!adev->gart.ready) {
gart              328 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		adev->gart.pages[p] = pagelist ? pagelist[i] : NULL;
gart              331 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	if (!adev->gart.ptr)
gart              335 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		    adev->gart.ptr);
gart              370 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	adev->gart.num_cpu_pages = adev->gmc.gart_size / PAGE_SIZE;
gart              371 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	adev->gart.num_gpu_pages = adev->gmc.gart_size / AMDGPU_GPU_PAGE_SIZE;
gart              373 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 		 adev->gart.num_cpu_pages, adev->gart.num_gpu_pages);
gart              377 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	adev->gart.pages = vzalloc(array_size(sizeof(void *),
gart              378 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 					      adev->gart.num_cpu_pages));
gart              379 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	if (adev->gart.pages == NULL)
gart              396 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	vfree(adev->gart.pages);
gart              397 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c 	adev->gart.pages = NULL;
gart              191 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 		lpfn = adev->gart.num_cpu_pages;
gart             1456 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	flags |= adev->gart.gart_pte_flags;
gart             1934 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	dst_addr = amdgpu_bo_gpu_offset(adev->gart.bo);
gart             1995 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		job->vm_pd_addr = amdgpu_gmc_pd_addr(adev->gart.bo);
gart             2270 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		if (p >= adev->gart.num_cpu_pages)
gart             2273 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		page = adev->gart.pages[p];
gart             2279 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			kunmap(adev->gart.pages[p]);
gart               54 drivers/gpu/drm/amd/amdgpu/gfxhub_v1_0.c 	uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo);
gart               51 drivers/gpu/drm/amd/amdgpu/gfxhub_v2_0.c 	uint64_t value = amdgpu_gmc_pd_addr(adev->gart.bo);
gart              345 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 	job->vm_pd_addr = amdgpu_gmc_pd_addr(adev->gart.bo);
gart              647 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 	if (adev->gart.bo) {
gart              657 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 	adev->gart.table_size = adev->gart.num_gpu_pages * 8;
gart              658 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 	adev->gart.gart_pte_flags = AMDGPU_PTE_MTYPE_NV10(MTYPE_UC) |
gart              829 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 	if (adev->gart.bo == NULL) {
gart              866 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 		 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo));
gart              868 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 	adev->gart.ready = true;
gart              492 drivers/gpu/drm/amd/amdgpu/gmc_v6_0.c 	if (adev->gart.bo == NULL) {
gart              500 drivers/gpu/drm/amd/amdgpu/gmc_v6_0.c 	table_addr = amdgpu_bo_gpu_offset(adev->gart.bo);
gart              578 drivers/gpu/drm/amd/amdgpu/gmc_v6_0.c 	adev->gart.ready = true;
gart              586 drivers/gpu/drm/amd/amdgpu/gmc_v6_0.c 	if (adev->gart.bo) {
gart              593 drivers/gpu/drm/amd/amdgpu/gmc_v6_0.c 	adev->gart.table_size = adev->gart.num_gpu_pages * 8;
gart              594 drivers/gpu/drm/amd/amdgpu/gmc_v6_0.c 	adev->gart.gart_pte_flags = 0;
gart              588 drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c 	if (adev->gart.bo == NULL) {
gart              596 drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c 	table_addr = amdgpu_bo_gpu_offset(adev->gart.bo);
gart              684 drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c 	adev->gart.ready = true;
gart              692 drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c 	if (adev->gart.bo) {
gart              700 drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c 	adev->gart.table_size = adev->gart.num_gpu_pages * 8;
gart              701 drivers/gpu/drm/amd/amdgpu/gmc_v7_0.c 	adev->gart.gart_pte_flags = 0;
gart              815 drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c 	if (adev->gart.bo == NULL) {
gart              823 drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c 	table_addr = amdgpu_bo_gpu_offset(adev->gart.bo);
gart              928 drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c 	adev->gart.ready = true;
gart              936 drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c 	if (adev->gart.bo) {
gart              944 drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c 	adev->gart.table_size = adev->gart.num_gpu_pages * 8;
gart              945 drivers/gpu/drm/amd/amdgpu/gmc_v8_0.c 	adev->gart.gart_pte_flags = AMDGPU_PTE_EXECUTABLE;
gart             1122 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	if (adev->gart.bo) {
gart             1130 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	adev->gart.table_size = adev->gart.num_gpu_pages * 8;
gart             1131 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	adev->gart.gart_pte_flags = AMDGPU_PTE_MTYPE_VG10(MTYPE_UC) |
gart             1413 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	if (adev->gart.bo == NULL) {
gart             1468 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 		 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo));
gart             1469 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	adev->gart.ready = true;
gart               74 drivers/gpu/drm/amd/amdgpu/mmhub_v1_0.c 	uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo);
gart               36 drivers/gpu/drm/amd/amdgpu/mmhub_v2_0.c 	uint64_t value = amdgpu_gmc_pd_addr(adev->gart.bo);
gart               80 drivers/gpu/drm/amd/amdgpu/mmhub_v9_4.c 	uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo);
gart              104 drivers/gpu/drm/nouveau/nouveau_chan.c 		nvif_object_fini(&chan->gart);
gart              355 drivers/gpu/drm/nouveau/nouveau_chan.c nouveau_channel_init(struct nouveau_channel *chan, u32 vram, u32 gart)
gart              415 drivers/gpu/drm/nouveau/nouveau_chan.c 		ret = nvif_object_init(&chan->user, gart, NV_DMA_IN_MEMORY,
gart              416 drivers/gpu/drm/nouveau/nouveau_chan.c 				       &args, sizeof(args), &chan->gart);
gart               18 drivers/gpu/drm/nouveau/nouveau_chan.h 	struct nvif_object gart;
gart             5437 drivers/gpu/drm/radeon/cik.c 	if (rdev->gart.robj == NULL) {
gart             5466 drivers/gpu/drm/radeon/cik.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
gart             5538 drivers/gpu/drm/radeon/cik.c 		 (unsigned long long)rdev->gart.table_addr);
gart             5539 drivers/gpu/drm/radeon/cik.c 	rdev->gart.ready = true;
gart             2402 drivers/gpu/drm/radeon/evergreen.c 	if (rdev->gart.robj == NULL) {
gart             2440 drivers/gpu/drm/radeon/evergreen.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
gart             2450 drivers/gpu/drm/radeon/evergreen.c 		 (unsigned long long)rdev->gart.table_addr);
gart             2451 drivers/gpu/drm/radeon/evergreen.c 	rdev->gart.ready = true;
gart             1278 drivers/gpu/drm/radeon/ni.c 	if (rdev->gart.robj == NULL) {
gart             1307 drivers/gpu/drm/radeon/ni.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
gart             1353 drivers/gpu/drm/radeon/ni.c 		 (unsigned long long)rdev->gart.table_addr);
gart             1354 drivers/gpu/drm/radeon/ni.c 	rdev->gart.ready = true;
gart              645 drivers/gpu/drm/radeon/r100.c 	if (rdev->gart.ptr) {
gart              653 drivers/gpu/drm/radeon/r100.c 	rdev->gart.table_size = rdev->gart.num_gpu_pages * 4;
gart              654 drivers/gpu/drm/radeon/r100.c 	rdev->asic->gart.tlb_flush = &r100_pci_gart_tlb_flush;
gart              655 drivers/gpu/drm/radeon/r100.c 	rdev->asic->gart.get_page_entry = &r100_pci_gart_get_page_entry;
gart              656 drivers/gpu/drm/radeon/r100.c 	rdev->asic->gart.set_page = &r100_pci_gart_set_page;
gart              671 drivers/gpu/drm/radeon/r100.c 	WREG32(RADEON_AIC_PT_BASE, rdev->gart.table_addr);
gart              677 drivers/gpu/drm/radeon/r100.c 		 (unsigned long long)rdev->gart.table_addr);
gart              678 drivers/gpu/drm/radeon/r100.c 	rdev->gart.ready = true;
gart              701 drivers/gpu/drm/radeon/r100.c 	u32 *gtt = rdev->gart.ptr;
gart              123 drivers/gpu/drm/radeon/r300.c 	void __iomem *ptr = rdev->gart.ptr;
gart              135 drivers/gpu/drm/radeon/r300.c 	if (rdev->gart.robj) {
gart              146 drivers/gpu/drm/radeon/r300.c 	rdev->gart.table_size = rdev->gart.num_gpu_pages * 4;
gart              147 drivers/gpu/drm/radeon/r300.c 	rdev->asic->gart.tlb_flush = &rv370_pcie_gart_tlb_flush;
gart              148 drivers/gpu/drm/radeon/r300.c 	rdev->asic->gart.get_page_entry = &rv370_pcie_gart_get_page_entry;
gart              149 drivers/gpu/drm/radeon/r300.c 	rdev->asic->gart.set_page = &rv370_pcie_gart_set_page;
gart              159 drivers/gpu/drm/radeon/r300.c 	if (rdev->gart.robj == NULL) {
gart              174 drivers/gpu/drm/radeon/r300.c 	table_addr = rdev->gart.table_addr;
gart              189 drivers/gpu/drm/radeon/r300.c 	rdev->gart.ready = true;
gart             1082 drivers/gpu/drm/radeon/r600.c 		void __iomem *ptr = (void *)rdev->gart.ptr;
gart             1117 drivers/gpu/drm/radeon/r600.c 	if (rdev->gart.robj) {
gart             1125 drivers/gpu/drm/radeon/r600.c 	rdev->gart.table_size = rdev->gart.num_gpu_pages * 8;
gart             1134 drivers/gpu/drm/radeon/r600.c 	if (rdev->gart.robj == NULL) {
gart             1171 drivers/gpu/drm/radeon/r600.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
gart             1182 drivers/gpu/drm/radeon/r600.c 		 (unsigned long long)rdev->gart.table_addr);
gart             1183 drivers/gpu/drm/radeon/r600.c 	rdev->gart.ready = true;
gart             1866 drivers/gpu/drm/radeon/radeon.h 	} gart;
gart             2367 drivers/gpu/drm/radeon/radeon.h 	struct radeon_gart		gart;
gart             2706 drivers/gpu/drm/radeon/radeon.h #define radeon_gart_tlb_flush(rdev) (rdev)->asic->gart.tlb_flush((rdev))
gart             2707 drivers/gpu/drm/radeon/radeon.h #define radeon_gart_get_page_entry(a, f) (rdev)->asic->gart.get_page_entry((a), (f))
gart             2708 drivers/gpu/drm/radeon/radeon.h #define radeon_gart_set_page(rdev, i, e) (rdev)->asic->gart.set_page((rdev), (i), (e))
gart              167 drivers/gpu/drm/radeon/radeon_asic.c 		rdev->asic->gart.tlb_flush = &rv370_pcie_gart_tlb_flush;
gart              168 drivers/gpu/drm/radeon/radeon_asic.c 		rdev->asic->gart.get_page_entry = &rv370_pcie_gart_get_page_entry;
gart              169 drivers/gpu/drm/radeon/radeon_asic.c 		rdev->asic->gart.set_page = &rv370_pcie_gart_set_page;
gart              173 drivers/gpu/drm/radeon/radeon_asic.c 		rdev->asic->gart.tlb_flush = &r100_pci_gart_tlb_flush;
gart              174 drivers/gpu/drm/radeon/radeon_asic.c 		rdev->asic->gart.get_page_entry = &r100_pci_gart_get_page_entry;
gart              175 drivers/gpu/drm/radeon/radeon_asic.c 		rdev->asic->gart.set_page = &r100_pci_gart_set_page;
gart              209 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart              277 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart              373 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart              441 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart              509 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart              577 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart              645 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart              713 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart              781 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart              849 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart              945 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart             1030 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart             1123 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart             1229 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart             1349 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart             1443 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart             1536 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart             1673 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart             1791 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart             1929 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart             2099 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart             2212 drivers/gpu/drm/radeon/radeon_asic.c 	.gart = {
gart               75 drivers/gpu/drm/radeon/radeon_gart.c 	ptr = pci_alloc_consistent(rdev->pdev, rdev->gart.table_size,
gart               76 drivers/gpu/drm/radeon/radeon_gart.c 				   &rdev->gart.table_addr);
gart               84 drivers/gpu/drm/radeon/radeon_gart.c 			      rdev->gart.table_size >> PAGE_SHIFT);
gart               87 drivers/gpu/drm/radeon/radeon_gart.c 	rdev->gart.ptr = ptr;
gart               88 drivers/gpu/drm/radeon/radeon_gart.c 	memset((void *)rdev->gart.ptr, 0, rdev->gart.table_size);
gart              103 drivers/gpu/drm/radeon/radeon_gart.c 	if (rdev->gart.ptr == NULL) {
gart              109 drivers/gpu/drm/radeon/radeon_gart.c 		set_memory_wb((unsigned long)rdev->gart.ptr,
gart              110 drivers/gpu/drm/radeon/radeon_gart.c 			      rdev->gart.table_size >> PAGE_SHIFT);
gart              113 drivers/gpu/drm/radeon/radeon_gart.c 	pci_free_consistent(rdev->pdev, rdev->gart.table_size,
gart              114 drivers/gpu/drm/radeon/radeon_gart.c 			    (void *)rdev->gart.ptr,
gart              115 drivers/gpu/drm/radeon/radeon_gart.c 			    rdev->gart.table_addr);
gart              116 drivers/gpu/drm/radeon/radeon_gart.c 	rdev->gart.ptr = NULL;
gart              117 drivers/gpu/drm/radeon/radeon_gart.c 	rdev->gart.table_addr = 0;
gart              134 drivers/gpu/drm/radeon/radeon_gart.c 	if (rdev->gart.robj == NULL) {
gart              135 drivers/gpu/drm/radeon/radeon_gart.c 		r = radeon_bo_create(rdev, rdev->gart.table_size,
gart              137 drivers/gpu/drm/radeon/radeon_gart.c 				     0, NULL, NULL, &rdev->gart.robj);
gart              160 drivers/gpu/drm/radeon/radeon_gart.c 	r = radeon_bo_reserve(rdev->gart.robj, false);
gart              163 drivers/gpu/drm/radeon/radeon_gart.c 	r = radeon_bo_pin(rdev->gart.robj,
gart              166 drivers/gpu/drm/radeon/radeon_gart.c 		radeon_bo_unreserve(rdev->gart.robj);
gart              169 drivers/gpu/drm/radeon/radeon_gart.c 	r = radeon_bo_kmap(rdev->gart.robj, &rdev->gart.ptr);
gart              171 drivers/gpu/drm/radeon/radeon_gart.c 		radeon_bo_unpin(rdev->gart.robj);
gart              172 drivers/gpu/drm/radeon/radeon_gart.c 	radeon_bo_unreserve(rdev->gart.robj);
gart              173 drivers/gpu/drm/radeon/radeon_gart.c 	rdev->gart.table_addr = gpu_addr;
gart              181 drivers/gpu/drm/radeon/radeon_gart.c 		for (i = 0; i < rdev->gart.num_gpu_pages; i++)
gart              182 drivers/gpu/drm/radeon/radeon_gart.c 			radeon_gart_set_page(rdev, i, rdev->gart.pages_entry[i]);
gart              202 drivers/gpu/drm/radeon/radeon_gart.c 	if (rdev->gart.robj == NULL) {
gart              205 drivers/gpu/drm/radeon/radeon_gart.c 	r = radeon_bo_reserve(rdev->gart.robj, false);
gart              207 drivers/gpu/drm/radeon/radeon_gart.c 		radeon_bo_kunmap(rdev->gart.robj);
gart              208 drivers/gpu/drm/radeon/radeon_gart.c 		radeon_bo_unpin(rdev->gart.robj);
gart              209 drivers/gpu/drm/radeon/radeon_gart.c 		radeon_bo_unreserve(rdev->gart.robj);
gart              210 drivers/gpu/drm/radeon/radeon_gart.c 		rdev->gart.ptr = NULL;
gart              225 drivers/gpu/drm/radeon/radeon_gart.c 	if (rdev->gart.robj == NULL) {
gart              228 drivers/gpu/drm/radeon/radeon_gart.c 	radeon_bo_unref(&rdev->gart.robj);
gart              251 drivers/gpu/drm/radeon/radeon_gart.c 	if (!rdev->gart.ready) {
gart              258 drivers/gpu/drm/radeon/radeon_gart.c 		if (rdev->gart.pages[p]) {
gart              259 drivers/gpu/drm/radeon/radeon_gart.c 			rdev->gart.pages[p] = NULL;
gart              261 drivers/gpu/drm/radeon/radeon_gart.c 				rdev->gart.pages_entry[t] = rdev->dummy_page.entry;
gart              262 drivers/gpu/drm/radeon/radeon_gart.c 				if (rdev->gart.ptr) {
gart              269 drivers/gpu/drm/radeon/radeon_gart.c 	if (rdev->gart.ptr) {
gart              298 drivers/gpu/drm/radeon/radeon_gart.c 	if (!rdev->gart.ready) {
gart              306 drivers/gpu/drm/radeon/radeon_gart.c 		rdev->gart.pages[p] = pagelist[i];
gart              310 drivers/gpu/drm/radeon/radeon_gart.c 			rdev->gart.pages_entry[t] = page_entry;
gart              311 drivers/gpu/drm/radeon/radeon_gart.c 			if (rdev->gart.ptr) {
gart              317 drivers/gpu/drm/radeon/radeon_gart.c 	if (rdev->gart.ptr) {
gart              336 drivers/gpu/drm/radeon/radeon_gart.c 	if (rdev->gart.pages) {
gart              348 drivers/gpu/drm/radeon/radeon_gart.c 	rdev->gart.num_cpu_pages = rdev->mc.gtt_size / PAGE_SIZE;
gart              349 drivers/gpu/drm/radeon/radeon_gart.c 	rdev->gart.num_gpu_pages = rdev->mc.gtt_size / RADEON_GPU_PAGE_SIZE;
gart              351 drivers/gpu/drm/radeon/radeon_gart.c 		 rdev->gart.num_cpu_pages, rdev->gart.num_gpu_pages);
gart              353 drivers/gpu/drm/radeon/radeon_gart.c 	rdev->gart.pages = vzalloc(array_size(sizeof(void *),
gart              354 drivers/gpu/drm/radeon/radeon_gart.c 				   rdev->gart.num_cpu_pages));
gart              355 drivers/gpu/drm/radeon/radeon_gart.c 	if (rdev->gart.pages == NULL) {
gart              359 drivers/gpu/drm/radeon/radeon_gart.c 	rdev->gart.pages_entry = vmalloc(array_size(sizeof(uint64_t),
gart              360 drivers/gpu/drm/radeon/radeon_gart.c 						    rdev->gart.num_gpu_pages));
gart              361 drivers/gpu/drm/radeon/radeon_gart.c 	if (rdev->gart.pages_entry == NULL) {
gart              366 drivers/gpu/drm/radeon/radeon_gart.c 	for (i = 0; i < rdev->gart.num_gpu_pages; i++)
gart              367 drivers/gpu/drm/radeon/radeon_gart.c 		rdev->gart.pages_entry[i] = rdev->dummy_page.entry;
gart              380 drivers/gpu/drm/radeon/radeon_gart.c 	if (rdev->gart.ready) {
gart              382 drivers/gpu/drm/radeon/radeon_gart.c 		radeon_gart_unbind(rdev, 0, rdev->gart.num_cpu_pages);
gart              384 drivers/gpu/drm/radeon/radeon_gart.c 	rdev->gart.ready = false;
gart              385 drivers/gpu/drm/radeon/radeon_gart.c 	vfree(rdev->gart.pages);
gart              386 drivers/gpu/drm/radeon/radeon_gart.c 	vfree(rdev->gart.pages_entry);
gart              387 drivers/gpu/drm/radeon/radeon_gart.c 	rdev->gart.pages = NULL;
gart              388 drivers/gpu/drm/radeon/radeon_gart.c 	rdev->gart.pages_entry = NULL;
gart             1027 drivers/gpu/drm/radeon/radeon_ttm.c 		if (p >= rdev->gart.num_cpu_pages)
gart             1030 drivers/gpu/drm/radeon/radeon_ttm.c 		page = rdev->gart.pages[p];
gart             1036 drivers/gpu/drm/radeon/radeon_ttm.c 			kunmap(rdev->gart.pages[p]);
gart              368 drivers/gpu/drm/radeon/radeon_vm.c 		uint64_t src = rdev->gart.table_addr + (addr >> 12) * 8;
gart              600 drivers/gpu/drm/radeon/radeon_vm.c 	result = rdev->gart.pages_entry[addr >> RADEON_GPU_PAGE_SHIFT];
gart               85 drivers/gpu/drm/radeon/rs400.c 	if (rdev->gart.ptr) {
gart              108 drivers/gpu/drm/radeon/rs400.c 	rdev->gart.table_size = rdev->gart.num_gpu_pages * 4;
gart              166 drivers/gpu/drm/radeon/rs400.c 	tmp = (u32)rdev->gart.table_addr & 0xfffff000;
gart              167 drivers/gpu/drm/radeon/rs400.c 	tmp |= (upper_32_bits(rdev->gart.table_addr) & 0xff) << 4;
gart              194 drivers/gpu/drm/radeon/rs400.c 		 (unsigned long long)rdev->gart.table_addr);
gart              195 drivers/gpu/drm/radeon/rs400.c 	rdev->gart.ready = true;
gart              238 drivers/gpu/drm/radeon/rs400.c 	u32 *gtt = rdev->gart.ptr;
gart              543 drivers/gpu/drm/radeon/rs600.c 	if (rdev->gart.robj) {
gart              552 drivers/gpu/drm/radeon/rs600.c 	rdev->gart.table_size = rdev->gart.num_gpu_pages * 8;
gart              561 drivers/gpu/drm/radeon/rs600.c 	if (rdev->gart.robj == NULL) {
gart              598 drivers/gpu/drm/radeon/rs600.c 		  rdev->gart.table_addr);
gart              615 drivers/gpu/drm/radeon/rs600.c 		 (unsigned long long)rdev->gart.table_addr);
gart              616 drivers/gpu/drm/radeon/rs600.c 	rdev->gart.ready = true;
gart              656 drivers/gpu/drm/radeon/rs600.c 	void __iomem *ptr = (void *)rdev->gart.ptr;
gart              902 drivers/gpu/drm/radeon/rv770.c 	if (rdev->gart.robj == NULL) {
gart              931 drivers/gpu/drm/radeon/rv770.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
gart              942 drivers/gpu/drm/radeon/rv770.c 		 (unsigned long long)rdev->gart.table_addr);
gart              943 drivers/gpu/drm/radeon/rv770.c 	rdev->gart.ready = true;
gart             4289 drivers/gpu/drm/radeon/si.c 	if (rdev->gart.robj == NULL) {
gart             4318 drivers/gpu/drm/radeon/si.c 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
gart             4368 drivers/gpu/drm/radeon/si.c 		 (unsigned long long)rdev->gart.table_addr);
gart             4369 drivers/gpu/drm/radeon/si.c 	rdev->gart.ready = true;
gart               58 drivers/iommu/tegra-gart.c #define FLUSH_GART_REGS(gart)	readl_relaxed((gart)->regs + GART_CONFIG)
gart               60 drivers/iommu/tegra-gart.c #define for_each_gart_pte(gart, iova)					\
gart               61 drivers/iommu/tegra-gart.c 	for (iova = gart->iovmm_base;					\
gart               62 drivers/iommu/tegra-gart.c 	     iova < gart->iovmm_end;					\
gart               65 drivers/iommu/tegra-gart.c static inline void gart_set_pte(struct gart_device *gart,
gart               68 drivers/iommu/tegra-gart.c 	writel_relaxed(iova, gart->regs + GART_ENTRY_ADDR);
gart               69 drivers/iommu/tegra-gart.c 	writel_relaxed(pte, gart->regs + GART_ENTRY_DATA);
gart               72 drivers/iommu/tegra-gart.c static inline unsigned long gart_read_pte(struct gart_device *gart,
gart               77 drivers/iommu/tegra-gart.c 	writel_relaxed(iova, gart->regs + GART_ENTRY_ADDR);
gart               78 drivers/iommu/tegra-gart.c 	pte = readl_relaxed(gart->regs + GART_ENTRY_DATA);
gart               83 drivers/iommu/tegra-gart.c static void do_gart_setup(struct gart_device *gart, const u32 *data)
gart               87 drivers/iommu/tegra-gart.c 	for_each_gart_pte(gart, iova)
gart               88 drivers/iommu/tegra-gart.c 		gart_set_pte(gart, iova, data ? *(data++) : 0);
gart               90 drivers/iommu/tegra-gart.c 	writel_relaxed(1, gart->regs + GART_CONFIG);
gart               91 drivers/iommu/tegra-gart.c 	FLUSH_GART_REGS(gart);
gart               94 drivers/iommu/tegra-gart.c static inline bool gart_iova_range_invalid(struct gart_device *gart,
gart               97 drivers/iommu/tegra-gart.c 	return unlikely(iova < gart->iovmm_base || bytes != GART_PAGE_SIZE ||
gart               98 drivers/iommu/tegra-gart.c 			iova + bytes > gart->iovmm_end);
gart              101 drivers/iommu/tegra-gart.c static inline bool gart_pte_valid(struct gart_device *gart, unsigned long iova)
gart              103 drivers/iommu/tegra-gart.c 	return !!(gart_read_pte(gart, iova) & GART_ENTRY_PHYS_ADDR_VALID);
gart              109 drivers/iommu/tegra-gart.c 	struct gart_device *gart = gart_handle;
gart              112 drivers/iommu/tegra-gart.c 	spin_lock(&gart->dom_lock);
gart              114 drivers/iommu/tegra-gart.c 	if (gart->active_domain && gart->active_domain != domain) {
gart              118 drivers/iommu/tegra-gart.c 		gart->active_domain = domain;
gart              119 drivers/iommu/tegra-gart.c 		gart->active_devices++;
gart              122 drivers/iommu/tegra-gart.c 	spin_unlock(&gart->dom_lock);
gart              130 drivers/iommu/tegra-gart.c 	struct gart_device *gart = gart_handle;
gart              132 drivers/iommu/tegra-gart.c 	spin_lock(&gart->dom_lock);
gart              137 drivers/iommu/tegra-gart.c 		if (--gart->active_devices == 0)
gart              138 drivers/iommu/tegra-gart.c 			gart->active_domain = NULL;
gart              141 drivers/iommu/tegra-gart.c 	spin_unlock(&gart->dom_lock);
gart              167 drivers/iommu/tegra-gart.c static inline int __gart_iommu_map(struct gart_device *gart, unsigned long iova,
gart              170 drivers/iommu/tegra-gart.c 	if (unlikely(gart_debug && gart_pte_valid(gart, iova))) {
gart              171 drivers/iommu/tegra-gart.c 		dev_err(gart->dev, "Page entry is in-use\n");
gart              175 drivers/iommu/tegra-gart.c 	gart_set_pte(gart, iova, GART_ENTRY_PHYS_ADDR_VALID | pa);
gart              183 drivers/iommu/tegra-gart.c 	struct gart_device *gart = gart_handle;
gart              186 drivers/iommu/tegra-gart.c 	if (gart_iova_range_invalid(gart, iova, bytes))
gart              189 drivers/iommu/tegra-gart.c 	spin_lock(&gart->pte_lock);
gart              190 drivers/iommu/tegra-gart.c 	ret = __gart_iommu_map(gart, iova, (unsigned long)pa);
gart              191 drivers/iommu/tegra-gart.c 	spin_unlock(&gart->pte_lock);
gart              196 drivers/iommu/tegra-gart.c static inline int __gart_iommu_unmap(struct gart_device *gart,
gart              199 drivers/iommu/tegra-gart.c 	if (unlikely(gart_debug && !gart_pte_valid(gart, iova))) {
gart              200 drivers/iommu/tegra-gart.c 		dev_err(gart->dev, "Page entry is invalid\n");
gart              204 drivers/iommu/tegra-gart.c 	gart_set_pte(gart, iova, 0);
gart              212 drivers/iommu/tegra-gart.c 	struct gart_device *gart = gart_handle;
gart              215 drivers/iommu/tegra-gart.c 	if (gart_iova_range_invalid(gart, iova, bytes))
gart              218 drivers/iommu/tegra-gart.c 	spin_lock(&gart->pte_lock);
gart              219 drivers/iommu/tegra-gart.c 	err = __gart_iommu_unmap(gart, iova);
gart              220 drivers/iommu/tegra-gart.c 	spin_unlock(&gart->pte_lock);
gart              228 drivers/iommu/tegra-gart.c 	struct gart_device *gart = gart_handle;
gart              231 drivers/iommu/tegra-gart.c 	if (gart_iova_range_invalid(gart, iova, GART_PAGE_SIZE))
gart              234 drivers/iommu/tegra-gart.c 	spin_lock(&gart->pte_lock);
gart              235 drivers/iommu/tegra-gart.c 	pte = gart_read_pte(gart, iova);
gart              236 drivers/iommu/tegra-gart.c 	spin_unlock(&gart->pte_lock);
gart              305 drivers/iommu/tegra-gart.c int tegra_gart_suspend(struct gart_device *gart)
gart              307 drivers/iommu/tegra-gart.c 	u32 *data = gart->savedata;
gart              315 drivers/iommu/tegra-gart.c 	writel_relaxed(0, gart->regs + GART_CONFIG);
gart              316 drivers/iommu/tegra-gart.c 	FLUSH_GART_REGS(gart);
gart              318 drivers/iommu/tegra-gart.c 	for_each_gart_pte(gart, iova)
gart              319 drivers/iommu/tegra-gart.c 		*(data++) = gart_read_pte(gart, iova);
gart              324 drivers/iommu/tegra-gart.c int tegra_gart_resume(struct gart_device *gart)
gart              326 drivers/iommu/tegra-gart.c 	do_gart_setup(gart, gart->savedata);
gart              333 drivers/iommu/tegra-gart.c 	struct gart_device *gart;
gart              346 drivers/iommu/tegra-gart.c 	gart = kzalloc(sizeof(*gart), GFP_KERNEL);
gart              347 drivers/iommu/tegra-gart.c 	if (!gart)
gart              350 drivers/iommu/tegra-gart.c 	gart_handle = gart;
gart              352 drivers/iommu/tegra-gart.c 	gart->dev = dev;
gart              353 drivers/iommu/tegra-gart.c 	gart->regs = mc->regs + GART_REG_BASE;
gart              354 drivers/iommu/tegra-gart.c 	gart->iovmm_base = res->start;
gart              355 drivers/iommu/tegra-gart.c 	gart->iovmm_end = res->end + 1;
gart              356 drivers/iommu/tegra-gart.c 	spin_lock_init(&gart->pte_lock);
gart              357 drivers/iommu/tegra-gart.c 	spin_lock_init(&gart->dom_lock);
gart              359 drivers/iommu/tegra-gart.c 	do_gart_setup(gart, NULL);
gart              361 drivers/iommu/tegra-gart.c 	err = iommu_device_sysfs_add(&gart->iommu, dev, NULL, "gart");
gart              365 drivers/iommu/tegra-gart.c 	iommu_device_set_ops(&gart->iommu, &gart_iommu_ops);
gart              366 drivers/iommu/tegra-gart.c 	iommu_device_set_fwnode(&gart->iommu, dev->fwnode);
gart              368 drivers/iommu/tegra-gart.c 	err = iommu_device_register(&gart->iommu);
gart              372 drivers/iommu/tegra-gart.c 	gart->savedata = vmalloc(resource_size(res) / GART_PAGE_SIZE *
gart              374 drivers/iommu/tegra-gart.c 	if (!gart->savedata) {
gart              379 drivers/iommu/tegra-gart.c 	return gart;
gart              382 drivers/iommu/tegra-gart.c 	iommu_device_unregister(&gart->iommu);
gart              384 drivers/iommu/tegra-gart.c 	iommu_device_sysfs_remove(&gart->iommu);
gart              386 drivers/iommu/tegra-gart.c 	kfree(gart);
gart              712 drivers/memory/tegra/mc.c 		mc->gart = tegra_gart_probe(&pdev->dev, mc);
gart              713 drivers/memory/tegra/mc.c 		if (IS_ERR(mc->gart)) {
gart              715 drivers/memory/tegra/mc.c 				PTR_ERR(mc->gart));
gart              716 drivers/memory/tegra/mc.c 			mc->gart = NULL;
gart              728 drivers/memory/tegra/mc.c 	if (IS_ENABLED(CONFIG_TEGRA_IOMMU_GART) && mc->gart) {
gart              729 drivers/memory/tegra/mc.c 		err = tegra_gart_suspend(mc->gart);
gart              742 drivers/memory/tegra/mc.c 	if (IS_ENABLED(CONFIG_TEGRA_IOMMU_GART) && mc->gart) {
gart              743 drivers/memory/tegra/mc.c 		err = tegra_gart_resume(mc->gart);
gart              100 include/soc/tegra/mc.h int tegra_gart_suspend(struct gart_device *gart);
gart              101 include/soc/tegra/mc.h int tegra_gart_resume(struct gart_device *gart);
gart              109 include/soc/tegra/mc.h static inline int tegra_gart_suspend(struct gart_device *gart)
gart              114 include/soc/tegra/mc.h static inline int tegra_gart_resume(struct gart_device *gart)
gart              168 include/soc/tegra/mc.h 	struct gart_device *gart;