Lines Matching refs:gpu

27 int adreno_get_param(struct msm_gpu *gpu, uint32_t param, uint64_t *value)  in adreno_get_param()  argument
29 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_get_param()
45 DBG("%s: invalid param: %u", gpu->name, param); in adreno_get_param()
53 int adreno_hw_init(struct msm_gpu *gpu) in adreno_hw_init() argument
55 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_hw_init()
58 DBG("%s", gpu->name); in adreno_hw_init()
60 ret = msm_gem_get_iova(gpu->rb->bo, gpu->id, &gpu->rb_iova); in adreno_hw_init()
62 gpu->rb_iova = 0; in adreno_hw_init()
63 dev_err(gpu->dev->dev, "could not map ringbuffer: %d\n", ret); in adreno_hw_init()
70 AXXX_CP_RB_CNTL_BUFSZ(ilog2(gpu->rb->size / 8)) | in adreno_hw_init()
74 adreno_gpu_write(adreno_gpu, REG_ADRENO_CP_RB_BASE, gpu->rb_iova); in adreno_hw_init()
92 uint32_t adreno_last_fence(struct msm_gpu *gpu) in adreno_last_fence() argument
94 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_last_fence()
98 void adreno_recover(struct msm_gpu *gpu) in adreno_recover() argument
100 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_recover()
101 struct drm_device *dev = gpu->dev; in adreno_recover()
104 gpu->funcs->pm_suspend(gpu); in adreno_recover()
107 gpu->rb->cur = gpu->rb->start; in adreno_recover()
110 adreno_gpu->memptrs->fence = gpu->submitted_fence; in adreno_recover()
114 gpu->funcs->pm_resume(gpu); in adreno_recover()
115 ret = gpu->funcs->hw_init(gpu); in adreno_recover()
122 int adreno_submit(struct msm_gpu *gpu, struct msm_gem_submit *submit, in adreno_submit() argument
125 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_submit()
126 struct msm_drm_private *priv = gpu->dev->dev_private; in adreno_submit()
127 struct msm_ringbuffer *ring = gpu->rb; in adreno_submit()
188 gpu->funcs->flush(gpu); in adreno_submit()
193 void adreno_flush(struct msm_gpu *gpu) in adreno_flush() argument
195 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_flush()
196 uint32_t wptr = get_wptr(gpu->rb); in adreno_flush()
204 void adreno_idle(struct msm_gpu *gpu) in adreno_idle() argument
206 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_idle()
207 uint32_t wptr = get_wptr(gpu->rb); in adreno_idle()
211 DRM_ERROR("%s: timeout waiting to drain ringbuffer!\n", gpu->name); in adreno_idle()
217 void adreno_show(struct msm_gpu *gpu, struct seq_file *m) in adreno_show() argument
219 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_show()
228 gpu->submitted_fence); in adreno_show()
231 seq_printf(m, "rb wptr: %d\n", get_wptr(gpu->rb)); in adreno_show()
233 gpu->funcs->pm_resume(gpu); in adreno_show()
236 seq_printf(m, "IO:region %s 00000000 00020000\n", gpu->name); in adreno_show()
243 uint32_t val = gpu_read(gpu, addr); in adreno_show()
248 gpu->funcs->pm_suspend(gpu); in adreno_show()
253 void adreno_dump(struct msm_gpu *gpu) in adreno_dump() argument
255 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in adreno_dump()
264 gpu->submitted_fence); in adreno_dump()
267 printk("rb wptr: %d\n", get_wptr(gpu->rb)); in adreno_dump()
270 printk("IO:region %s 00000000 00020000\n", gpu->name); in adreno_dump()
277 uint32_t val = gpu_read(gpu, addr); in adreno_dump()
283 static uint32_t ring_freewords(struct msm_gpu *gpu) in ring_freewords() argument
285 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in ring_freewords()
286 uint32_t size = gpu->rb->size / 4; in ring_freewords()
287 uint32_t wptr = get_wptr(gpu->rb); in ring_freewords()
292 void adreno_wait_ring(struct msm_gpu *gpu, uint32_t ndwords) in adreno_wait_ring() argument
294 if (spin_until(ring_freewords(gpu) >= ndwords)) in adreno_wait_ring()
295 DRM_ERROR("%s: timeout waiting for ringbuffer space\n", gpu->name); in adreno_wait_ring()
307 struct msm_gpu *gpu = &adreno_gpu->base; in adreno_gpu_init() local
317 gpu->fast_rate = config->fast_rate; in adreno_gpu_init()
318 gpu->slow_rate = config->slow_rate; in adreno_gpu_init()
319 gpu->bus_freq = config->bus_freq; in adreno_gpu_init()
321 gpu->bus_scale_table = config->bus_scale_table; in adreno_gpu_init()
325 gpu->fast_rate, gpu->slow_rate, gpu->bus_freq); in adreno_gpu_init()
347 mmu = gpu->mmu; in adreno_gpu_init()
372 ret = msm_gem_get_iova(adreno_gpu->memptrs_bo, gpu->id, in adreno_gpu_init()
382 void adreno_gpu_cleanup(struct adreno_gpu *gpu) in adreno_gpu_cleanup() argument
384 if (gpu->memptrs_bo) { in adreno_gpu_cleanup()
385 if (gpu->memptrs_iova) in adreno_gpu_cleanup()
386 msm_gem_put_iova(gpu->memptrs_bo, gpu->base.id); in adreno_gpu_cleanup()
387 drm_gem_object_unreference_unlocked(gpu->memptrs_bo); in adreno_gpu_cleanup()
389 release_firmware(gpu->pm4); in adreno_gpu_cleanup()
390 release_firmware(gpu->pfp); in adreno_gpu_cleanup()
391 msm_gpu_cleanup(&gpu->base); in adreno_gpu_cleanup()