memptrs 102 drivers/gpu/drm/msm/adreno/a5xx_gpu.c ring->memptrs->fence = submit->seqno; memptrs 66 drivers/gpu/drm/msm/adreno/a5xx_preempt.c empty = (get_wptr(ring) == ring->memptrs->rptr); memptrs 353 drivers/gpu/drm/msm/adreno/adreno_gpu.c ring->memptrs->fence = ring->seqno; memptrs 354 drivers/gpu/drm/msm/adreno/adreno_gpu.c ring->memptrs->rptr = 0; memptrs 385 drivers/gpu/drm/msm/adreno/adreno_gpu.c return ring->memptrs->rptr = adreno_gpu_read( memptrs 388 drivers/gpu/drm/msm/adreno/adreno_gpu.c return ring->memptrs->rptr; memptrs 535 drivers/gpu/drm/msm/adreno/adreno_gpu.c state->ring[i].fence = gpu->rb[i]->memptrs->fence; memptrs 771 drivers/gpu/drm/msm/adreno/adreno_gpu.c ring->memptrs->fence, memptrs 433 drivers/gpu/drm/msm/msm_gpu.c submit = find_submit(cur_ring, cur_ring->memptrs->fence + 1); memptrs 474 drivers/gpu/drm/msm/msm_gpu.c uint32_t fence = ring->memptrs->fence; memptrs 524 drivers/gpu/drm/msm/msm_gpu.c uint32_t fence = ring->memptrs->fence; memptrs 660 drivers/gpu/drm/msm/msm_gpu.c stats = &ring->memptrs->stats[index]; memptrs 713 drivers/gpu/drm/msm/msm_gpu.c update_fences(gpu, gpu->rb[i], gpu->rb[i]->memptrs->fence); memptrs 856 drivers/gpu/drm/msm/msm_gpu.c void *memptrs; memptrs 933 drivers/gpu/drm/msm/msm_gpu.c memptrs = msm_gem_kernel_new(drm, memptrs 938 drivers/gpu/drm/msm/msm_gpu.c if (IS_ERR(memptrs)) { memptrs 939 drivers/gpu/drm/msm/msm_gpu.c ret = PTR_ERR(memptrs); memptrs 954 drivers/gpu/drm/msm/msm_gpu.c gpu->rb[i] = msm_ringbuffer_new(gpu, i, memptrs, memptrs_iova); memptrs 963 drivers/gpu/drm/msm/msm_gpu.c memptrs += sizeof(struct msm_rbmemptrs); memptrs 151 drivers/gpu/drm/msm/msm_gpu.h if (ring->seqno > ring->memptrs->fence) memptrs 11 drivers/gpu/drm/msm/msm_ringbuffer.c void *memptrs, uint64_t memptrs_iova) memptrs 44 drivers/gpu/drm/msm/msm_ringbuffer.c ring->memptrs = memptrs; memptrs 45 drivers/gpu/drm/msm/msm_ringbuffer.h struct msm_rbmemptrs *memptrs; memptrs 52 drivers/gpu/drm/msm/msm_ringbuffer.h void *memptrs, uint64_t memptrs_iova);