v2_context 49 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); v2_context 55 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c if (v2_context->stlb_cpu[i]) v2_context 57 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c v2_context->stlb_cpu[i], v2_context 58 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c v2_context->stlb_dma[i]); v2_context 61 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c dma_free_wc(context->global->dev, SZ_4K, v2_context->mtlb_cpu, v2_context 62 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c v2_context->mtlb_dma); v2_context 64 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c clear_bit(v2_context->id, context->global->v2.pta_alloc); v2_context 66 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c vfree(v2_context); v2_context 69 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c etnaviv_iommuv2_ensure_stlb(struct etnaviv_iommuv2_context *v2_context, v2_context 72 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c if (v2_context->stlb_cpu[stlb]) v2_context 75 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c v2_context->stlb_cpu[stlb] = v2_context 76 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c dma_alloc_wc(v2_context->base.global->dev, SZ_4K, v2_context 77 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c &v2_context->stlb_dma[stlb], v2_context 80 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c if (!v2_context->stlb_cpu[stlb]) v2_context 83 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c memset32(v2_context->stlb_cpu[stlb], MMUv2_PTE_EXCEPTION, v2_context 86 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c v2_context->mtlb_cpu[stlb] = v2_context 87 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c v2_context->stlb_dma[stlb] | MMUv2_PTE_PRESENT; v2_context 96 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); v2_context 112 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c ret = etnaviv_iommuv2_ensure_stlb(v2_context, mtlb_entry); v2_context 116 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c v2_context->stlb_cpu[mtlb_entry][stlb_entry] = entry; v2_context 140 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); v2_context 145 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c if (v2_context->mtlb_cpu[i] & MMUv2_PTE_PRESENT) v2_context 153 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); v2_context 156 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c memcpy(buf, v2_context->mtlb_cpu, SZ_4K); v2_context 159 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c if (v2_context->mtlb_cpu[i] & MMUv2_PTE_PRESENT) { v2_context 160 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c memcpy(buf, v2_context->stlb_cpu[i], SZ_4K); v2_context 168 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); v2_context 176 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c (u32)v2_context->mtlb_dma, v2_context 188 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); v2_context 211 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c context->global->v2.pta_cpu[v2_context->id] = v2_context->mtlb_dma | v2_context 215 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c prefetch = etnaviv_buffer_config_pta(gpu, v2_context->id); v2_context 225 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); v2_context 227 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c return v2_context->mtlb_dma; v2_context 232 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); v2_context 234 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c return v2_context->id; v2_context 264 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context; v2_context 267 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c v2_context = vzalloc(sizeof(*v2_context)); v2_context 268 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c if (!v2_context) v2_context 272 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c v2_context->id = find_first_zero_bit(global->v2.pta_alloc, v2_context 274 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c if (v2_context->id < ETNAVIV_PTA_ENTRIES) { v2_context 275 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c set_bit(v2_context->id, global->v2.pta_alloc); v2_context 282 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c v2_context->mtlb_cpu = dma_alloc_wc(global->dev, SZ_4K, v2_context 283 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c &v2_context->mtlb_dma, GFP_KERNEL); v2_context 284 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c if (!v2_context->mtlb_cpu) v2_context 287 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c memset32(v2_context->mtlb_cpu, MMUv2_PTE_EXCEPTION, v2_context 290 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c global->v2.pta_cpu[v2_context->id] = v2_context->mtlb_dma; v2_context 292 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c context = &v2_context->base; v2_context 302 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c clear_bit(v2_context->id, global->v2.pta_alloc); v2_context 304 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c vfree(v2_context);