Lines Matching refs:rdev

29 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev);
41 void evergreen_dma_fence_ring_emit(struct radeon_device *rdev, in evergreen_dma_fence_ring_emit() argument
44 struct radeon_ring *ring = &rdev->ring[fence->ring]; in evergreen_dma_fence_ring_emit()
45 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in evergreen_dma_fence_ring_emit()
67 void evergreen_dma_ring_ib_execute(struct radeon_device *rdev, in evergreen_dma_ring_ib_execute() argument
70 struct radeon_ring *ring = &rdev->ring[ib->ring]; in evergreen_dma_ring_ib_execute()
72 if (rdev->wb.enabled) { in evergreen_dma_ring_ib_execute()
107 struct radeon_fence *evergreen_copy_dma(struct radeon_device *rdev, in evergreen_copy_dma() argument
115 int ring_index = rdev->asic->copy.dma_ring_index; in evergreen_copy_dma()
116 struct radeon_ring *ring = &rdev->ring[ring_index]; in evergreen_copy_dma()
125 r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11); in evergreen_copy_dma()
128 radeon_sync_free(rdev, &sync, NULL); in evergreen_copy_dma()
132 radeon_sync_resv(rdev, &sync, resv, false); in evergreen_copy_dma()
133 radeon_sync_rings(rdev, &sync, ring->idx); in evergreen_copy_dma()
149 r = radeon_fence_emit(rdev, &fence, ring->idx); in evergreen_copy_dma()
151 radeon_ring_unlock_undo(rdev, ring); in evergreen_copy_dma()
152 radeon_sync_free(rdev, &sync, NULL); in evergreen_copy_dma()
156 radeon_ring_unlock_commit(rdev, ring, false); in evergreen_copy_dma()
157 radeon_sync_free(rdev, &sync, fence); in evergreen_copy_dma()
171 bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) in evergreen_dma_is_lockup() argument
173 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev); in evergreen_dma_is_lockup()
176 radeon_ring_lockup_update(rdev, ring); in evergreen_dma_is_lockup()
179 return radeon_ring_test_lockup(rdev, ring); in evergreen_dma_is_lockup()