/linux-4.4.14/drivers/gpu/drm/radeon/ |
D | r600_dma.c | 144 upper_32_bits(rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFF); in r600_dma_resume() 146 ((rdev->wb.gpu_addr + R600_WB_DMA_RPTR_OFFSET) & 0xFFFFFFFC)); in r600_dma_resume() 151 WREG32(DMA_RB_BASE, ring->gpu_addr >> 8); in r600_dma_resume() 237 u64 gpu_addr; in r600_dma_ring_test() local 244 gpu_addr = rdev->wb.gpu_addr + index; in r600_dma_ring_test() 255 radeon_ring_write(ring, lower_32_bits(gpu_addr)); in r600_dma_ring_test() 256 radeon_ring_write(ring, upper_32_bits(gpu_addr) & 0xff); in r600_dma_ring_test() 291 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_dma_fence_ring_emit() 318 u64 addr = semaphore->gpu_addr; in r600_dma_semaphore_ring_emit() 344 u64 gpu_addr; in r600_dma_ib_test() local [all …]
|
D | uvd_v2_2.c | 43 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v2_2_fence_emit() 77 uint64_t addr = semaphore->gpu_addr; in uvd_v2_2_semaphore_emit() 113 addr = rdev->uvd.gpu_addr >> 3; in uvd_v2_2_resume() 129 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v2_2_resume() 133 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v2_2_resume()
|
D | cik_sdma.c | 155 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute() 156 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute() 204 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_sdma_fence_ring_emit() 233 u64 addr = semaphore->gpu_addr; in cik_sdma_semaphore_ring_emit() 401 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume() 403 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume() 408 WREG32(SDMA0_GFX_RB_BASE + reg_offset, ring->gpu_addr >> 8); in cik_sdma_gfx_resume() 409 WREG32(SDMA0_GFX_RB_BASE_HI + reg_offset, ring->gpu_addr >> 40); in cik_sdma_gfx_resume() 652 u64 gpu_addr; in cik_sdma_ring_test() local 659 gpu_addr = rdev->wb.gpu_addr + index; in cik_sdma_ring_test() [all …]
|
D | uvd_v4_2.c | 44 addr = rdev->uvd.gpu_addr >> 3; in uvd_v4_2_resume() 60 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v4_2_resume() 64 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v4_2_resume()
|
D | r600_blit.c | 77 set_render_target(drm_radeon_private_t *dev_priv, int format, int w, int h, u64 gpu_addr) in set_render_target() argument 97 OUT_RING(gpu_addr >> 8); in set_render_target() 104 OUT_RING(gpu_addr >> 8); in set_render_target() 160 u64 gpu_addr; in set_shaders() local 178 gpu_addr = dev_priv->gart_buffers_offset + dev_priv->blit_vb->offset; in set_shaders() 187 OUT_RING(gpu_addr >> 8); in set_shaders() 200 OUT_RING((gpu_addr + 256) >> 8); in set_shaders() 216 R600_SH_ACTION_ENA, 512, gpu_addr); in set_shaders() 220 set_vtx_resource(drm_radeon_private_t *dev_priv, u64 gpu_addr) in set_vtx_resource() argument 226 sq_vtx_constant_word2 = (((gpu_addr >> 32) & 0xff) | (16 << 8)); in set_vtx_resource() [all …]
|
D | radeon_semaphore.c | 51 (*semaphore)->gpu_addr = radeon_sa_bo_gpu_addr((*semaphore)->sa_bo); in radeon_semaphore_create() 69 ring->last_semaphore_signal_addr = semaphore->gpu_addr; in radeon_semaphore_emit_signal() 86 ring->last_semaphore_wait_addr = semaphore->gpu_addr; in radeon_semaphore_emit_wait()
|
D | uvd_v1_0.c | 85 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in uvd_v1_0_fence_emit() 121 addr = (rdev->uvd.gpu_addr >> 3) + 16; in uvd_v1_0_resume() 137 addr = (rdev->uvd.gpu_addr >> 28) & 0xF; in uvd_v1_0_resume() 141 addr = (rdev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v1_0_resume() 363 WREG32(UVD_LMI_EXT40_ADDR, upper_32_bits(ring->gpu_addr) | in uvd_v1_0_start() 373 WREG32(UVD_RBC_RB_BASE, ring->gpu_addr); in uvd_v1_0_start() 486 radeon_ring_write(ring, ib->gpu_addr); in uvd_v1_0_ib_execute()
|
D | vce_v1_0.c | 218 uint64_t addr = rdev->vce.gpu_addr; in vce_v1_0_resume() 300 WREG32(VCE_RB_BASE_LO, ring->gpu_addr); in vce_v1_0_start() 301 WREG32(VCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in vce_v1_0_start() 307 WREG32(VCE_RB_BASE_LO2, ring->gpu_addr); in vce_v1_0_start() 308 WREG32(VCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr)); in vce_v1_0_start()
|
D | evergreen_dma.c | 45 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in evergreen_dma_fence_ring_emit() 89 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in evergreen_dma_ring_ib_execute() 90 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
|
D | radeon_trace.h | 176 __field(uint64_t, gpu_addr) 182 __entry->gpu_addr = sem->gpu_addr; 186 __entry->waiters, __entry->gpu_addr)
|
D | radeon_object.h | 135 extern int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr); 137 u64 max_offset, u64 *gpu_addr); 165 return sa_bo->manager->gpu_addr + sa_bo->soffset; in radeon_sa_bo_gpu_addr()
|
D | ni_dma.c | 145 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute() 146 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute() 223 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFF); in cayman_dma_resume() 225 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cayman_dma_resume() 230 WREG32(DMA_RB_BASE + reg_offset, ring->gpu_addr >> 8); in cayman_dma_resume()
|
D | radeon_fence.c | 812 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + in radeon_fence_driver_start_ring() 819 rdev->fence_drv[ring].gpu_addr = rdev->uvd.gpu_addr + index; in radeon_fence_driver_start_ring() 832 rdev->fence_drv[ring].gpu_addr = rdev->wb.gpu_addr + index; in radeon_fence_driver_start_ring() 837 ring, rdev->fence_drv[ring].gpu_addr, rdev->fence_drv[ring].cpu_addr); in radeon_fence_driver_start_ring() 857 rdev->fence_drv[ring].gpu_addr = 0; in radeon_fence_driver_init_ring()
|
D | radeon_vce.c | 158 &rdev->vce.gpu_addr); in radeon_vce_init() 360 dummy = ib.gpu_addr + 1024; in radeon_vce_get_create_msg() 427 dummy = ib.gpu_addr + 1024; in radeon_vce_get_destroy_msg() 700 uint64_t addr = semaphore->gpu_addr; in radeon_vce_semaphore_emit() 723 radeon_ring_write(ring, cpu_to_le32(ib->gpu_addr)); in radeon_vce_ib_execute() 724 radeon_ring_write(ring, cpu_to_le32(upper_32_bits(ib->gpu_addr))); in radeon_vce_ib_execute() 739 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr; in radeon_vce_fence_emit()
|
D | radeon_object.c | 330 u64 *gpu_addr) in radeon_bo_pin_restricted() argument 339 if (gpu_addr) in radeon_bo_pin_restricted() 340 *gpu_addr = radeon_bo_gpu_offset(bo); in radeon_bo_pin_restricted() 372 if (gpu_addr != NULL) in radeon_bo_pin_restricted() 373 *gpu_addr = radeon_bo_gpu_offset(bo); in radeon_bo_pin_restricted() 384 int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr) in radeon_bo_pin() argument 386 return radeon_bo_pin_restricted(bo, domain, 0, gpu_addr); in radeon_bo_pin()
|
D | radeon_kfd.c | 46 uint64_t gpu_addr; member 52 void **mem_obj, uint64_t *gpu_addr, 231 void **mem_obj, uint64_t *gpu_addr, in alloc_gtt_mem() argument 239 BUG_ON(gpu_addr == NULL); in alloc_gtt_mem() 262 &(*mem)->gpu_addr); in alloc_gtt_mem() 267 *gpu_addr = (*mem)->gpu_addr; in alloc_gtt_mem()
|
D | radeon_gart.c | 151 uint64_t gpu_addr; in radeon_gart_table_vram_pin() local 158 RADEON_GEM_DOMAIN_VRAM, &gpu_addr); in radeon_gart_table_vram_pin() 167 rdev->gart.table_addr = gpu_addr; in radeon_gart_table_vram_pin()
|
D | uvd_v3_1.c | 45 uint64_t addr = semaphore->gpu_addr; in uvd_v3_1_semaphore_emit()
|
D | radeon_ib.c | 77 ib->gpu_addr = ib->sa_bo->soffset + RADEON_VA_IB_OFFSET; in radeon_ib_get() 79 ib->gpu_addr = radeon_sa_bo_gpu_addr(ib->sa_bo); in radeon_ib_get()
|
D | radeon_sa.c | 112 r = radeon_bo_pin(sa_manager->bo, sa_manager->domain, &sa_manager->gpu_addr); in radeon_sa_bo_manager_start() 407 uint64_t soffset = i->soffset + sa_manager->gpu_addr; in radeon_sa_bo_dump_debug_info() 408 uint64_t eoffset = i->eoffset + sa_manager->gpu_addr; in radeon_sa_bo_dump_debug_info()
|
D | radeon_uvd.c | 159 &rdev->uvd.gpu_addr); in radeon_uvd_init() 581 (start >> 28) != (p->rdev->uvd.gpu_addr >> 28)) { in radeon_uvd_cs_reloc() 736 uint64_t addr = rdev->uvd.gpu_addr + offs; in radeon_uvd_get_create_msg() 772 uint64_t addr = rdev->uvd.gpu_addr + offs; in radeon_uvd_get_destroy_msg()
|
D | vce_v2_0.c | 155 uint64_t addr = rdev->vce.gpu_addr; in vce_v2_0_resume()
|
D | radeon_ring.c | 396 &ring->gpu_addr); in radeon_ring_init() 414 ring->next_rptr_gpu_addr = rdev->wb.gpu_addr + index; in radeon_ring_init()
|
D | r600.c | 1336 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12); in r600_mc_program() 1517 RADEON_GEM_DOMAIN_VRAM, &rdev->vram_scratch.gpu_addr); in r600_vram_scratch_init() 2746 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC)); in r600_cp_resume() 2747 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF); in r600_cp_resume() 2748 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); in r600_cp_resume() 2760 WREG32(CP_RB_BASE, ring->gpu_addr >> 8); in r600_cp_resume() 2875 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in r600_fence_ring_emit() 2928 uint64_t addr = semaphore->gpu_addr; in r600_semaphore_ring_emit() 3351 (ib->gpu_addr & 0xFFFFFFFC)); in r600_ring_ib_execute() 3352 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF); in r600_ring_ib_execute() [all …]
|
D | radeon.h | 357 uint64_t gpu_addr; member 546 uint64_t gpu_addr; member 592 uint64_t gpu_addr; member 838 uint64_t gpu_addr; member 861 uint64_t gpu_addr; member 988 uint64_t gpu_addr; member 1144 uint64_t gpu_addr; member 1679 uint64_t gpu_addr; member 1719 uint64_t gpu_addr; member 2254 u64 gpu_addr; member
|
D | si.c | 3375 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in si_fence_ring_emit() 3438 (ib->gpu_addr & 0xFFFFFFFC)); in si_ring_ib_execute() 3439 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF); in si_ring_ib_execute() 3661 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); in si_cp_resume() 3679 WREG32(CP_RB0_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC); in si_cp_resume() 3680 WREG32(CP_RB0_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF); in si_cp_resume() 3692 WREG32(CP_RB0_BASE, ring->gpu_addr >> 8); in si_cp_resume() 3710 WREG32(CP_RB1_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFFFFFFFC); in si_cp_resume() 3711 WREG32(CP_RB1_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFF); in si_cp_resume() 3716 WREG32(CP_RB1_BASE, ring->gpu_addr >> 8); in si_cp_resume() [all …]
|
D | ni.c | 1412 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cayman_fence_ring_emit() 1455 (ib->gpu_addr & 0xFFFFFFFC)); in cayman_ring_ib_execute() 1456 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF); in cayman_ring_ib_execute() 1691 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); in cayman_cp_resume() 1708 addr = rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET; in cayman_cp_resume() 1716 WREG32(cp_rb_base[i], ring->gpu_addr >> 8); in cayman_cp_resume()
|
D | cik.c | 3966 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_fence_gfx_ring_emit() 4007 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cik_fence_compute_ring_emit() 4038 uint64_t addr = semaphore->gpu_addr; in cik_semaphore_ring_emit() 4176 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFFC)); in cik_ring_ib_execute() 4177 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF); in cik_ring_ib_execute() 4479 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); in cik_cp_gfx_resume() 4497 WREG32(CP_RB0_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC); in cik_cp_gfx_resume() 4498 WREG32(CP_RB0_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF); in cik_cp_gfx_resume() 4509 rb_addr = ring->gpu_addr >> 8; in cik_cp_gfx_resume() 5064 hqd_gpu_addr = rdev->ring[idx].gpu_addr >> 8; in cik_cp_compute_resume() [all …]
|
D | evergreen.c | 2982 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12); in evergreen_mc_program() 3048 (ib->gpu_addr & 0xFFFFFFFC)); in evergreen_ring_ib_execute() 3049 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF); in evergreen_ring_ib_execute() 3192 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC)); in evergreen_cp_resume() 3193 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF); in evergreen_cp_resume() 3194 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); in evergreen_cp_resume() 3206 WREG32(CP_RB_BASE, ring->gpu_addr >> 8); in evergreen_cp_resume()
|
D | r100.c | 1177 DRM_INFO("radeon: ring at 0x%016lX\n", (unsigned long)ring->gpu_addr); in r100_cp_init() 1178 WREG32(RADEON_CP_RB_BASE, ring->gpu_addr); in r100_cp_init() 1187 S_00070C_RB_RPTR_ADDR((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) >> 2)); in r100_cp_init() 1188 WREG32(R_000774_SCRATCH_ADDR, rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET); in r100_cp_init() 3697 radeon_ring_write(ring, ib->gpu_addr); in r100_ring_ib_execute()
|
D | radeon_device.c | 480 &rdev->wb.gpu_addr); in radeon_wb_init()
|
D | rv770.c | 1048 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12); in rv770_mc_program()
|
/linux-4.4.14/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_semaphore.c | 51 (*semaphore)->gpu_addr = amdgpu_sa_bo_gpu_addr((*semaphore)->sa_bo); in amdgpu_semaphore_create() 67 ring->last_semaphore_signal_addr = semaphore->gpu_addr; in amdgpu_semaphore_emit_signal() 82 ring->last_semaphore_wait_addr = semaphore->gpu_addr; in amdgpu_semaphore_emit_wait()
|
D | amdgpu_amdkfd.c | 170 void **mem_obj, uint64_t *gpu_addr, in alloc_gtt_mem() argument 178 BUG_ON(gpu_addr == NULL); in alloc_gtt_mem() 201 &(*mem)->gpu_addr); in alloc_gtt_mem() 206 *gpu_addr = (*mem)->gpu_addr; in alloc_gtt_mem()
|
D | amdgpu_amdkfd.h | 35 uint64_t gpu_addr; member 57 void **mem_obj, uint64_t *gpu_addr,
|
D | cik_sdma.c | 232 amdgpu_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_emit_ib() 233 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xffffffff); in cik_sdma_ring_emit_ib() 311 u64 addr = semaphore->gpu_addr; in cik_sdma_ring_emit_semaphore() 438 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume() 440 ((adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume() 444 WREG32(mmSDMA0_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8); in cik_sdma_gfx_resume() 445 WREG32(mmSDMA0_GFX_RB_BASE_HI + sdma_offsets[i], ring->gpu_addr >> 40); in cik_sdma_gfx_resume() 575 u64 gpu_addr; in cik_sdma_ring_test_ring() local 583 gpu_addr = adev->wb.gpu_addr + (index * 4); in cik_sdma_ring_test_ring() 594 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in cik_sdma_ring_test_ring() [all …]
|
D | sdma_v2_4.c | 268 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v2_4_ring_emit_ib() 269 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v2_4_ring_emit_ib() 351 u64 addr = semaphore->gpu_addr; in sdma_v2_4_ring_emit_semaphore() 481 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v2_4_gfx_resume() 483 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v2_4_gfx_resume() 487 WREG32(mmSDMA0_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8); in sdma_v2_4_gfx_resume() 488 WREG32(mmSDMA0_GFX_RB_BASE_HI + sdma_offsets[i], ring->gpu_addr >> 40); in sdma_v2_4_gfx_resume() 627 u64 gpu_addr; in sdma_v2_4_ring_test_ring() local 635 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v2_4_ring_test_ring() 648 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v2_4_ring_test_ring() [all …]
|
D | vce_v3_0.c | 190 WREG32(mmVCE_RB_BASE_LO, ring->gpu_addr); in vce_v3_0_start() 191 WREG32(mmVCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start() 197 WREG32(mmVCE_RB_BASE_LO2, ring->gpu_addr); in vce_v3_0_start() 198 WREG32(mmVCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr)); in vce_v3_0_start() 400 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR0, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume() 401 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR1, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume() 402 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR2, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume() 404 WREG32(mmVCE_LMI_VCPU_CACHE_40BIT_BAR, (adev->vce.gpu_addr >> 8)); in vce_v3_0_mc_resume()
|
D | sdma_v3_0.c | 378 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v3_0_ring_emit_ib() 379 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v3_0_ring_emit_ib() 462 u64 addr = semaphore->gpu_addr; in sdma_v3_0_ring_emit_semaphore() 618 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v3_0_gfx_resume() 620 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v3_0_gfx_resume() 624 WREG32(mmSDMA0_GFX_RB_BASE + sdma_offsets[i], ring->gpu_addr >> 8); in sdma_v3_0_gfx_resume() 625 WREG32(mmSDMA0_GFX_RB_BASE_HI + sdma_offsets[i], ring->gpu_addr >> 40); in sdma_v3_0_gfx_resume() 777 u64 gpu_addr; in sdma_v3_0_ring_test_ring() local 785 gpu_addr = adev->wb.gpu_addr + (index * 4); in sdma_v3_0_ring_test_ring() 798 amdgpu_ring_write(ring, lower_32_bits(gpu_addr)); in sdma_v3_0_ring_test_ring() [all …]
|
D | amdgpu_fence.c | 117 amdgpu_ring_emit_fence(ring, ring->fence_drv.gpu_addr, in amdgpu_fence_emit() 443 ring->fence_drv.gpu_addr = adev->wb.gpu_addr + (ring->fence_offs * 4); in amdgpu_fence_driver_start_ring() 448 ring->fence_drv.gpu_addr = adev->uvd.gpu_addr + index; in amdgpu_fence_driver_start_ring() 459 ring->fence_drv.gpu_addr, ring->fence_drv.cpu_addr); in amdgpu_fence_driver_start_ring() 477 ring->fence_drv.gpu_addr = 0; in amdgpu_fence_driver_init_ring()
|
D | amdgpu_object.h | 145 int amdgpu_bo_pin(struct amdgpu_bo *bo, u32 domain, u64 *gpu_addr); 148 u64 *gpu_addr); 175 return sa_bo->manager->gpu_addr + sa_bo->soffset; in amdgpu_sa_bo_gpu_addr()
|
D | uvd_v6_0.c | 262 lower_32_bits(adev->uvd.gpu_addr)); in uvd_v6_0_mc_resume() 264 upper_32_bits(adev->uvd.gpu_addr)); in uvd_v6_0_mc_resume() 409 WREG32(mmUVD_RBC_RB_RPTR_ADDR, (upper_32_bits(ring->gpu_addr) >> 2)); in uvd_v6_0_start() 413 lower_32_bits(ring->gpu_addr)); in uvd_v6_0_start() 415 upper_32_bits(ring->gpu_addr)); in uvd_v6_0_start() 498 uint64_t addr = semaphore->gpu_addr; in uvd_v6_0_ring_emit_semaphore() 566 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in uvd_v6_0_ring_emit_ib() 568 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in uvd_v6_0_ring_emit_ib()
|
D | uvd_v5_0.c | 264 lower_32_bits(adev->uvd.gpu_addr)); in uvd_v5_0_mc_resume() 266 upper_32_bits(adev->uvd.gpu_addr)); in uvd_v5_0_mc_resume() 409 WREG32(mmUVD_RBC_RB_RPTR_ADDR, (upper_32_bits(ring->gpu_addr) >> 2)); in uvd_v5_0_start() 413 lower_32_bits(ring->gpu_addr)); in uvd_v5_0_start() 415 upper_32_bits(ring->gpu_addr)); in uvd_v5_0_start() 498 uint64_t addr = semaphore->gpu_addr; in uvd_v5_0_ring_emit_semaphore() 566 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in uvd_v5_0_ring_emit_ib() 568 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in uvd_v5_0_ring_emit_ib()
|
D | uvd_v4_2.c | 364 WREG32(mmUVD_LMI_EXT40_ADDR, upper_32_bits(ring->gpu_addr) | in uvd_v4_2_start() 374 WREG32(mmUVD_RBC_RB_BASE, ring->gpu_addr); in uvd_v4_2_start() 454 uint64_t addr = semaphore->gpu_addr; in uvd_v4_2_ring_emit_semaphore() 522 amdgpu_ring_write(ring, ib->gpu_addr); in uvd_v4_2_ring_emit_ib() 583 addr = (adev->uvd.gpu_addr + AMDGPU_UVD_FIRMWARE_OFFSET) >> 3; in uvd_v4_2_mc_resume() 599 addr = (adev->uvd.gpu_addr >> 28) & 0xF; in uvd_v4_2_mc_resume() 603 addr = (adev->uvd.gpu_addr >> 32) & 0xFF; in uvd_v4_2_mc_resume()
|
D | amdgpu_object.c | 365 u64 *gpu_addr) in amdgpu_bo_pin_restricted() argument 378 if (gpu_addr) in amdgpu_bo_pin_restricted() 379 *gpu_addr = amdgpu_bo_gpu_offset(bo); in amdgpu_bo_pin_restricted() 419 if (gpu_addr != NULL) in amdgpu_bo_pin_restricted() 420 *gpu_addr = amdgpu_bo_gpu_offset(bo); in amdgpu_bo_pin_restricted() 431 int amdgpu_bo_pin(struct amdgpu_bo *bo, u32 domain, u64 *gpu_addr) in amdgpu_bo_pin() argument 433 return amdgpu_bo_pin_restricted(bo, domain, 0, 0, gpu_addr); in amdgpu_bo_pin()
|
D | amdgpu_trace.h | 259 __field(uint64_t, gpu_addr) 265 __entry->gpu_addr = sem->gpu_addr; 269 __entry->waiters, __entry->gpu_addr)
|
D | vce_v2_0.c | 119 WREG32(mmVCE_RB_BASE_LO, ring->gpu_addr); in vce_v2_0_start() 120 WREG32(mmVCE_RB_BASE_HI, upper_32_bits(ring->gpu_addr)); in vce_v2_0_start() 126 WREG32(mmVCE_RB_BASE_LO2, ring->gpu_addr); in vce_v2_0_start() 127 WREG32(mmVCE_RB_BASE_HI2, upper_32_bits(ring->gpu_addr)); in vce_v2_0_start() 414 uint64_t addr = adev->vce.gpu_addr; in vce_v2_0_mc_resume()
|
D | amdgpu_gart.c | 150 uint64_t gpu_addr; in amdgpu_gart_table_vram_pin() local 157 AMDGPU_GEM_DOMAIN_VRAM, &gpu_addr); in amdgpu_gart_table_vram_pin() 166 adev->gart.table_addr = gpu_addr; in amdgpu_gart_table_vram_pin()
|
D | amdgpu_vce.c | 165 &adev->vce.gpu_addr); in amdgpu_vce_sw_init() 388 dummy = ib->gpu_addr + 1024; in amdgpu_vce_get_create_msg() 475 dummy = ib->gpu_addr + 1024; in amdgpu_vce_get_destroy_msg() 758 uint64_t addr = semaphore->gpu_addr; in amdgpu_vce_ring_emit_semaphore() 780 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr)); in amdgpu_vce_ring_emit_ib() 781 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in amdgpu_vce_ring_emit_ib()
|
D | amdgpu_ih.h | 37 uint64_t gpu_addr; member
|
D | fiji_smc.c | 393 uint64_t gpu_addr; in fiji_smu_populate_single_firmware_entry() local 398 gpu_addr = ucode->mc_addr; in fiji_smu_populate_single_firmware_entry() 404 gpu_addr += le32_to_cpu(header->jt_offset) << 2; in fiji_smu_populate_single_firmware_entry() 410 entry->image_addr_high = upper_32_bits(gpu_addr); in fiji_smu_populate_single_firmware_entry() 411 entry->image_addr_low = lower_32_bits(gpu_addr); in fiji_smu_populate_single_firmware_entry()
|
D | tonga_smc.c | 394 uint64_t gpu_addr; in tonga_smu_populate_single_firmware_entry() local 400 gpu_addr = ucode->mc_addr; in tonga_smu_populate_single_firmware_entry() 406 gpu_addr += le32_to_cpu(header->jt_offset) << 2; in tonga_smu_populate_single_firmware_entry() 412 entry->image_addr_high = upper_32_bits(gpu_addr); in tonga_smu_populate_single_firmware_entry() 413 entry->image_addr_low = lower_32_bits(gpu_addr); in tonga_smu_populate_single_firmware_entry()
|
D | iceland_smc.c | 453 uint64_t gpu_addr; in iceland_smu_populate_single_firmware_entry() local 459 gpu_addr = ucode->mc_addr; in iceland_smu_populate_single_firmware_entry() 465 entry->image_addr_high = upper_32_bits(gpu_addr); in iceland_smu_populate_single_firmware_entry() 466 entry->image_addr_low = lower_32_bits(gpu_addr); in iceland_smu_populate_single_firmware_entry()
|
D | amdgpu_sa.c | 112 r = amdgpu_bo_pin(sa_manager->bo, sa_manager->domain, &sa_manager->gpu_addr); in amdgpu_sa_bo_manager_start() 436 uint64_t soffset = i->soffset + sa_manager->gpu_addr; in amdgpu_sa_bo_dump_debug_info() 437 uint64_t eoffset = i->eoffset + sa_manager->gpu_addr; in amdgpu_sa_bo_dump_debug_info()
|
D | cz_smc.c | 453 uint64_t gpu_addr; in cz_smu_populate_single_firmware_entry() local 463 gpu_addr = ucode->mc_addr; in cz_smu_populate_single_firmware_entry() 469 gpu_addr += le32_to_cpu(header->jt_offset) << 2; in cz_smu_populate_single_firmware_entry() 473 entry->mc_addr_low = lower_32_bits(gpu_addr); in cz_smu_populate_single_firmware_entry() 474 entry->mc_addr_high = upper_32_bits(gpu_addr); in cz_smu_populate_single_firmware_entry()
|
D | amdgpu_cgs.c | 670 uint64_t gpu_addr; in amdgpu_cgs_get_firmware_info() local 681 gpu_addr = ucode->mc_addr; in amdgpu_cgs_get_firmware_info() 687 gpu_addr += le32_to_cpu(header->jt_offset) << 2; in amdgpu_cgs_get_firmware_info() 690 info->mc_addr = gpu_addr; in amdgpu_cgs_get_firmware_info()
|
D | cik_ih.c | 125 WREG32(mmIH_RB_BASE, adev->irq.ih.gpu_addr >> 8); in cik_ih_irq_init() 135 wptr_off = adev->wb.gpu_addr + (adev->irq.ih.wptr_offs * 4); in cik_ih_irq_init()
|
D | cz_ih.c | 126 WREG32(mmIH_RB_BASE, adev->irq.ih.gpu_addr >> 8); in cz_ih_irq_init() 137 wptr_off = adev->wb.gpu_addr + (adev->irq.ih.wptr_offs * 4); in cz_ih_irq_init()
|
D | iceland_ih.c | 126 WREG32(mmIH_RB_BASE, adev->irq.ih.gpu_addr >> 8); in iceland_ih_irq_init() 137 wptr_off = adev->wb.gpu_addr + (adev->irq.ih.wptr_offs * 4); in iceland_ih_irq_init()
|
D | tonga_ih.c | 125 WREG32(mmIH_RB_BASE, adev->irq.ih.gpu_addr >> 8); in tonga_ih_irq_init() 143 wptr_off = adev->wb.gpu_addr + (adev->irq.ih.wptr_offs * 4); in tonga_ih_irq_init()
|
D | amdgpu.h | 392 uint64_t gpu_addr; member 601 uint64_t gpu_addr; member 645 uint64_t gpu_addr; member 808 uint64_t gpu_addr; member 860 uint64_t gpu_addr; member 1294 uint64_t gpu_addr; member 1675 uint64_t gpu_addr; member 1697 uint64_t gpu_addr; member 1878 u64 gpu_addr; member
|
D | amdgpu_ring.c | 346 ring->next_rptr_gpu_addr = adev->wb.gpu_addr + (ring->next_rptr_offs * 4); in amdgpu_ring_init() 377 &ring->gpu_addr); in amdgpu_ring_init()
|
D | amdgpu_ih.c | 56 &adev->irq.ih.gpu_addr); in amdgpu_ih_ring_alloc()
|
D | amdgpu_ib.c | 75 ib->gpu_addr = amdgpu_sa_bo_gpu_addr(ib->sa_bo); in amdgpu_ib_get()
|
D | gfx_v8_0.c | 3328 rptr_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4); in gfx_v8_0_cp_gfx_resume() 3335 rb_addr = ring->gpu_addr >> 8; in gfx_v8_0_cp_gfx_resume() 3866 hqd_gpu_addr = ring->gpu_addr >> 8; in gfx_v8_0_cp_compute_resume() 3889 wb_gpu_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4); in gfx_v8_0_cp_compute_resume() 3899 wb_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in gfx_v8_0_cp_compute_resume() 4582 (ib->gpu_addr & 0xFFFFFFFC)); in gfx_v8_0_ring_emit_ib_gfx() 4583 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF); in gfx_v8_0_ring_emit_ib_gfx() 4612 (ib->gpu_addr & 0xFFFFFFFC)); in gfx_v8_0_ring_emit_ib_compute() 4613 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF); in gfx_v8_0_ring_emit_ib_compute() 4651 uint64_t addr = semaphore->gpu_addr; in gfx_v8_0_ring_emit_semaphore() [all …]
|
D | gfx_v7_0.c | 2533 uint64_t addr = semaphore->gpu_addr; in gfx_v7_0_ring_emit_semaphore() 2604 (ib->gpu_addr & 0xFFFFFFFC)); in gfx_v7_0_ring_emit_ib_gfx() 2605 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF); in gfx_v7_0_ring_emit_ib_gfx() 2633 (ib->gpu_addr & 0xFFFFFFFC)); in gfx_v7_0_ring_emit_ib_compute() 2634 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF); in gfx_v7_0_ring_emit_ib_compute() 2964 rptr_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4); in gfx_v7_0_cp_gfx_resume() 2974 rb_addr = ring->gpu_addr >> 8; in gfx_v7_0_cp_gfx_resume() 3460 hqd_gpu_addr = ring->gpu_addr >> 8; in gfx_v7_0_cp_compute_resume() 3490 wb_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in gfx_v7_0_cp_compute_resume() 3498 wb_gpu_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4); in gfx_v7_0_cp_compute_resume() [all …]
|
D | amdgpu_uvd.c | 181 &adev->uvd.gpu_addr); in amdgpu_uvd_sw_init() 679 (start >> 28) != (ctx->parser->adev->uvd.gpu_addr >> 28)) { in amdgpu_uvd_cs_pass2()
|
D | amdgpu_device.c | 260 AMDGPU_GEM_DOMAIN_VRAM, &adev->vram_scratch.gpu_addr); in amdgpu_vram_scratch_init() 465 &adev->wb.gpu_addr); in amdgpu_wb_init()
|
D | amdgpu_cs.c | 686 ib->gpu_addr = chunk_ib->va_start; in amdgpu_cs_ib_fill()
|
D | gmc_v8_0.c | 364 adev->vram_scratch.gpu_addr >> 12); in gmc_v8_0_mc_program()
|
D | gmc_v7_0.c | 323 adev->vram_scratch.gpu_addr >> 12); in gmc_v7_0_mc_program()
|
/linux-4.4.14/drivers/gpu/drm/mgag200/ |
D | mgag200_cursor.c | 53 u64 gpu_addr; in mga_crtc_cursor_set() local 218 gpu_addr = mdev->cursor.pixels_1_gpu_addr; in mga_crtc_cursor_set() 220 gpu_addr = mdev->cursor.pixels_2_gpu_addr; in mga_crtc_cursor_set() 221 WREG_DAC(MGA1064_CURSOR_BASE_ADR_LOW, (u8)((gpu_addr>>10) & 0xff)); in mga_crtc_cursor_set() 222 WREG_DAC(MGA1064_CURSOR_BASE_ADR_HI, (u8)((gpu_addr>>18) & 0x3f)); in mga_crtc_cursor_set()
|
D | mgag200_ttm.c | 355 int mgag200_bo_pin(struct mgag200_bo *bo, u32 pl_flag, u64 *gpu_addr) in mgag200_bo_pin() argument 361 if (gpu_addr) in mgag200_bo_pin() 362 *gpu_addr = mgag200_bo_gpu_offset(bo); in mgag200_bo_pin() 374 if (gpu_addr) in mgag200_bo_pin() 375 *gpu_addr = mgag200_bo_gpu_offset(bo); in mgag200_bo_pin()
|
D | mgag200_drv.h | 303 int mgag200_bo_pin(struct mgag200_bo *bo, u32 pl_flag, u64 *gpu_addr);
|
D | mgag200_mode.c | 832 u64 gpu_addr; in mga_crtc_do_set_base() local 854 ret = mgag200_bo_pin(bo, TTM_PL_FLAG_VRAM, &gpu_addr); in mga_crtc_do_set_base() 869 mga_set_start_address(crtc, (u32)gpu_addr); in mga_crtc_do_set_base()
|
/linux-4.4.14/drivers/gpu/drm/qxl/ |
D | qxl_object.c | 224 int qxl_bo_pin(struct qxl_bo *bo, u32 domain, u64 *gpu_addr) in qxl_bo_pin() argument 231 if (gpu_addr) in qxl_bo_pin() 232 *gpu_addr = qxl_bo_gpu_offset(bo); in qxl_bo_pin() 239 if (gpu_addr != NULL) in qxl_bo_pin() 240 *gpu_addr = qxl_bo_gpu_offset(bo); in qxl_bo_pin()
|
D | qxl_object.h | 98 extern int qxl_bo_pin(struct qxl_bo *bo, u32 domain, u64 *gpu_addr);
|
/linux-4.4.14/drivers/gpu/drm/ast/ |
D | ast_mode.c | 517 u64 gpu_addr; in ast_crtc_do_set_base() local 539 ret = ast_bo_pin(bo, TTM_PL_FLAG_VRAM, &gpu_addr); in ast_crtc_do_set_base() 551 ast_fbdev_set_base(ast, gpu_addr); in ast_crtc_do_set_base() 555 ast_set_start_address_crt1(crtc, (u32)gpu_addr); in ast_crtc_do_set_base() 915 uint64_t gpu_addr; in ast_cursor_init() local 927 ret = ast_bo_pin(bo, TTM_PL_FLAG_VRAM, &gpu_addr); in ast_cursor_init() 938 ast->cursor_cache_gpu_addr = gpu_addr; in ast_cursor_init() 1146 uint64_t gpu_addr; in ast_cursor_set() local 1198 gpu_addr = ast->cursor_cache_gpu_addr; in ast_cursor_set() 1199 gpu_addr += (AST_HWC_SIZE + AST_HWC_SIGNATURE_SIZE)*ast->next_cursor; in ast_cursor_set() [all …]
|
D | ast_ttm.c | 355 int ast_bo_pin(struct ast_bo *bo, u32 pl_flag, u64 *gpu_addr) in ast_bo_pin() argument 361 if (gpu_addr) in ast_bo_pin() 362 *gpu_addr = ast_bo_gpu_offset(bo); in ast_bo_pin() 373 if (gpu_addr) in ast_bo_pin() 374 *gpu_addr = ast_bo_gpu_offset(bo); in ast_bo_pin()
|
D | ast_fb.c | 369 void ast_fbdev_set_base(struct ast_private *ast, unsigned long gpu_addr) in ast_fbdev_set_base() argument 372 ast->fbdev->helper.fbdev->apertures->ranges[0].base + gpu_addr; in ast_fbdev_set_base() 373 ast->fbdev->helper.fbdev->fix.smem_len = ast->vram_size - gpu_addr; in ast_fbdev_set_base()
|
D | ast_drv.h | 318 void ast_fbdev_set_base(struct ast_private *ast, unsigned long gpu_addr); 364 int ast_bo_pin(struct ast_bo *bo, u32 pl_flag, u64 *gpu_addr);
|
/linux-4.4.14/drivers/gpu/drm/cirrus/ |
D | cirrus_ttm.c | 359 int cirrus_bo_pin(struct cirrus_bo *bo, u32 pl_flag, u64 *gpu_addr) in cirrus_bo_pin() argument 365 if (gpu_addr) in cirrus_bo_pin() 366 *gpu_addr = cirrus_bo_gpu_offset(bo); in cirrus_bo_pin() 377 if (gpu_addr) in cirrus_bo_pin() 378 *gpu_addr = cirrus_bo_gpu_offset(bo); in cirrus_bo_pin()
|
D | cirrus_mode.c | 139 u64 gpu_addr; in cirrus_crtc_do_set_base() local 161 ret = cirrus_bo_pin(bo, TTM_PL_FLAG_VRAM, &gpu_addr); in cirrus_crtc_do_set_base() 175 cirrus_set_start_address(crtc, (u32)gpu_addr); in cirrus_crtc_do_set_base()
|
D | cirrus_drv.h | 264 int cirrus_bo_pin(struct cirrus_bo *bo, u32 pl_flag, u64 *gpu_addr);
|
/linux-4.4.14/drivers/gpu/drm/amd/amdkfd/ |
D | kfd_kernel_queue.c | 84 kq->pq_gpu_addr = kq->pq->gpu_addr; in initialize() 97 kq->rptr_gpu_addr = kq->rptr_mem->gpu_addr; in initialize() 106 kq->wptr_gpu_addr = kq->wptr_mem->gpu_addr; in initialize() 154 kq->fence_gpu_addr = kq->fence_mem_obj->gpu_addr; in initialize()
|
D | kfd_kernel_queue_vi.c | 45 kq->eop_gpu_addr = kq->eop_mem->gpu_addr; in initialize_vi()
|
D | kfd_mqd_manager_cik.c | 56 addr = (*mqd_mem_obj)->gpu_addr; in init_mqd() 131 *gart_addr = (*mqd_mem_obj)->gpu_addr; in init_mqd_sdma() 313 addr = (*mqd_mem_obj)->gpu_addr; in init_mqd_hiq()
|
D | kfd_device.c | 491 (*mem_obj)->gpu_addr = kfd_gtt_sa_calc_gpu_addr( in kfd_gtt_sa_allocate() 501 (uint64_t *) (*mem_obj)->gpu_addr, (*mem_obj)->cpu_ptr); in kfd_gtt_sa_allocate()
|
D | kfd_dbgdev.c | 141 addr.quad_part = mem_obj->gpu_addr; in dbgdev_diq_submit_ib() 497 mem_obj->gpu_addr, in dbgdev_address_watch_diq() 718 mem_obj->gpu_addr, in dbgdev_wave_control_diq()
|
D | kfd_mqd_manager_vi.c | 53 addr = (*mqd_mem_obj)->gpu_addr; in init_mqd()
|
D | kfd_priv.h | 140 uint64_t gpu_addr; member
|
D | kfd_device_queue_manager.c | 501 dqm->pipelines_addr = dqm->pipeline_mem->gpu_addr; in init_pipelines() 753 dqm->fence_gpu_addr = dqm->fence_mem->gpu_addr; in start_cpsch()
|
D | kfd_packet_manager.c | 115 *rl_gpu_buffer = pm->ib_buffer_obj->gpu_addr; in pm_allocate_runlist_ib()
|
/linux-4.4.14/drivers/gpu/drm/bochs/ |
D | bochs_mm.c | 290 int bochs_bo_pin(struct bochs_bo *bo, u32 pl_flag, u64 *gpu_addr) in bochs_bo_pin() argument 296 if (gpu_addr) in bochs_bo_pin() 297 *gpu_addr = bochs_bo_gpu_offset(bo); in bochs_bo_pin() 309 if (gpu_addr) in bochs_bo_pin() 310 *gpu_addr = bochs_bo_gpu_offset(bo); in bochs_bo_pin()
|
D | bochs_kms.c | 47 u64 gpu_addr = 0; in bochs_crtc_mode_set_base() local 71 ret = bochs_bo_pin(bo, TTM_PL_FLAG_VRAM, &gpu_addr); in bochs_crtc_mode_set_base() 78 bochs_hw_setbase(bochs, x, y, gpu_addr); in bochs_crtc_mode_set_base()
|
D | bochs.h | 154 int bochs_bo_pin(struct bochs_bo *bo, u32 pl_flag, u64 *gpu_addr);
|
/linux-4.4.14/drivers/gpu/drm/amd/include/ |
D | kgd_kfd_interface.h | 127 void **mem_obj, uint64_t *gpu_addr,
|