Lines Matching refs:gpu
27 int adreno_get_param(struct msm_gpu *gpu, uint32_t param, uint64_t *value) in adreno_get_param() argument
29 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_get_param()
45 DBG("%s: invalid param: %u", gpu->name, param); in adreno_get_param()
53 int adreno_hw_init(struct msm_gpu *gpu) in adreno_hw_init() argument
55 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_hw_init()
58 DBG("%s", gpu->name); in adreno_hw_init()
60 ret = msm_gem_get_iova(gpu->rb->bo, gpu->id, &gpu->rb_iova); in adreno_hw_init()
62 gpu->rb_iova = 0; in adreno_hw_init()
63 dev_err(gpu->dev->dev, "could not map ringbuffer: %d\n", ret); in adreno_hw_init()
70 AXXX_CP_RB_CNTL_BUFSZ(ilog2(gpu->rb->size / 8)) | in adreno_hw_init()
74 adreno_gpu_write(adreno_gpu, REG_ADRENO_CP_RB_BASE, gpu->rb_iova); in adreno_hw_init()
92 uint32_t adreno_last_fence(struct msm_gpu *gpu) in adreno_last_fence() argument
94 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_last_fence()
98 void adreno_recover(struct msm_gpu *gpu) in adreno_recover() argument
100 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_recover()
101 struct drm_device *dev = gpu->dev; in adreno_recover()
104 gpu->funcs->pm_suspend(gpu); in adreno_recover()
107 gpu->rb->cur = gpu->rb->start; in adreno_recover()
110 adreno_gpu->memptrs->fence = gpu->submitted_fence; in adreno_recover()
114 gpu->funcs->pm_resume(gpu); in adreno_recover()
115 ret = gpu->funcs->hw_init(gpu); in adreno_recover()
122 int adreno_submit(struct msm_gpu *gpu, struct msm_gem_submit *submit, in adreno_submit() argument
125 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_submit()
126 struct msm_drm_private *priv = gpu->dev->dev_private; in adreno_submit()
127 struct msm_ringbuffer *ring = gpu->rb; in adreno_submit()
199 gpu->funcs->flush(gpu); in adreno_submit()
204 void adreno_flush(struct msm_gpu *gpu) in adreno_flush() argument
206 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_flush()
207 uint32_t wptr = get_wptr(gpu->rb); in adreno_flush()
215 void adreno_idle(struct msm_gpu *gpu) in adreno_idle() argument
217 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_idle()
218 uint32_t wptr = get_wptr(gpu->rb); in adreno_idle()
222 DRM_ERROR("%s: timeout waiting to drain ringbuffer!\n", gpu->name); in adreno_idle()
228 void adreno_show(struct msm_gpu *gpu, struct seq_file *m) in adreno_show() argument
230 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_show()
239 gpu->submitted_fence); in adreno_show()
242 seq_printf(m, "rb wptr: %d\n", get_wptr(gpu->rb)); in adreno_show()
244 gpu->funcs->pm_resume(gpu); in adreno_show()
247 seq_printf(m, "IO:region %s 00000000 00020000\n", gpu->name); in adreno_show()
254 uint32_t val = gpu_read(gpu, addr); in adreno_show()
259 gpu->funcs->pm_suspend(gpu); in adreno_show()
269 void adreno_dump_info(struct msm_gpu *gpu) in adreno_dump_info() argument
271 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_dump_info()
280 gpu->submitted_fence); in adreno_dump_info()
283 printk("rb wptr: %d\n", get_wptr(gpu->rb)); in adreno_dump_info()
287 gpu_read(gpu, REG_AXXX_CP_SCRATCH_REG0 + i)); in adreno_dump_info()
292 void adreno_dump(struct msm_gpu *gpu) in adreno_dump() argument
294 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_dump()
298 printk("IO:region %s 00000000 00020000\n", gpu->name); in adreno_dump()
305 uint32_t val = gpu_read(gpu, addr); in adreno_dump()
311 static uint32_t ring_freewords(struct msm_gpu *gpu) in ring_freewords() argument
313 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in ring_freewords()
314 uint32_t size = gpu->rb->size / 4; in ring_freewords()
315 uint32_t wptr = get_wptr(gpu->rb); in ring_freewords()
320 void adreno_wait_ring(struct msm_gpu *gpu, uint32_t ndwords) in adreno_wait_ring() argument
322 if (spin_until(ring_freewords(gpu) >= ndwords)) in adreno_wait_ring()
323 DRM_ERROR("%s: timeout waiting for ringbuffer space\n", gpu->name); in adreno_wait_ring()
335 struct msm_gpu *gpu = &adreno_gpu->base; in adreno_gpu_init() local
345 gpu->fast_rate = config->fast_rate; in adreno_gpu_init()
346 gpu->slow_rate = config->slow_rate; in adreno_gpu_init()
347 gpu->bus_freq = config->bus_freq; in adreno_gpu_init()
349 gpu->bus_scale_table = config->bus_scale_table; in adreno_gpu_init()
353 gpu->fast_rate, gpu->slow_rate, gpu->bus_freq); in adreno_gpu_init()
375 mmu = gpu->mmu; in adreno_gpu_init()
400 ret = msm_gem_get_iova(adreno_gpu->memptrs_bo, gpu->id, in adreno_gpu_init()
410 void adreno_gpu_cleanup(struct adreno_gpu *gpu) in adreno_gpu_cleanup() argument
412 if (gpu->memptrs_bo) { in adreno_gpu_cleanup()
413 if (gpu->memptrs_iova) in adreno_gpu_cleanup()
414 msm_gem_put_iova(gpu->memptrs_bo, gpu->base.id); in adreno_gpu_cleanup()
415 drm_gem_object_unreference_unlocked(gpu->memptrs_bo); in adreno_gpu_cleanup()
417 release_firmware(gpu->pm4); in adreno_gpu_cleanup()
418 release_firmware(gpu->pfp); in adreno_gpu_cleanup()
419 msm_gpu_cleanup(&gpu->base); in adreno_gpu_cleanup()