cma_obj 339 drivers/gpu/drm/arm/malidp_planes.c struct drm_gem_cma_object *cma_obj; cma_obj 344 drivers/gpu/drm/arm/malidp_planes.c cma_obj = to_drm_gem_cma_obj(obj); cma_obj 346 drivers/gpu/drm/arm/malidp_planes.c if (cma_obj->sgt) cma_obj 347 drivers/gpu/drm/arm/malidp_planes.c sgt = cma_obj->sgt; cma_obj 358 drivers/gpu/drm/arm/malidp_planes.c if (!cma_obj->sgt) cma_obj 365 drivers/gpu/drm/arm/malidp_planes.c if (!cma_obj->sgt) cma_obj 51 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_cma_object *cma_obj; cma_obj 58 drivers/gpu/drm/drm_gem_cma_helper.c gem_obj = kzalloc(sizeof(*cma_obj), GFP_KERNEL); cma_obj 61 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj = container_of(gem_obj, struct drm_gem_cma_object, base); cma_obj 73 drivers/gpu/drm/drm_gem_cma_helper.c return cma_obj; cma_obj 76 drivers/gpu/drm/drm_gem_cma_helper.c kfree(cma_obj); cma_obj 96 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_cma_object *cma_obj; cma_obj 101 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj = __drm_gem_cma_create(drm, size); cma_obj 102 drivers/gpu/drm/drm_gem_cma_helper.c if (IS_ERR(cma_obj)) cma_obj 103 drivers/gpu/drm/drm_gem_cma_helper.c return cma_obj; cma_obj 105 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj->vaddr = dma_alloc_wc(drm->dev, size, &cma_obj->paddr, cma_obj 107 drivers/gpu/drm/drm_gem_cma_helper.c if (!cma_obj->vaddr) { cma_obj 114 drivers/gpu/drm/drm_gem_cma_helper.c return cma_obj; cma_obj 117 drivers/gpu/drm/drm_gem_cma_helper.c drm_gem_object_put_unlocked(&cma_obj->base); cma_obj 143 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_cma_object *cma_obj; cma_obj 147 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj = drm_gem_cma_create(drm, size); cma_obj 148 drivers/gpu/drm/drm_gem_cma_helper.c if (IS_ERR(cma_obj)) cma_obj 149 drivers/gpu/drm/drm_gem_cma_helper.c return cma_obj; cma_obj 151 drivers/gpu/drm/drm_gem_cma_helper.c gem_obj = &cma_obj->base; cma_obj 163 drivers/gpu/drm/drm_gem_cma_helper.c return cma_obj; cma_obj 178 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_cma_object *cma_obj; cma_obj 180 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj = to_drm_gem_cma_obj(gem_obj); cma_obj 183 drivers/gpu/drm/drm_gem_cma_helper.c if (cma_obj->vaddr) cma_obj 184 drivers/gpu/drm/drm_gem_cma_helper.c dma_buf_vunmap(gem_obj->import_attach->dmabuf, cma_obj->vaddr); cma_obj 185 drivers/gpu/drm/drm_gem_cma_helper.c drm_prime_gem_destroy(gem_obj, cma_obj->sgt); cma_obj 186 drivers/gpu/drm/drm_gem_cma_helper.c } else if (cma_obj->vaddr) { cma_obj 187 drivers/gpu/drm/drm_gem_cma_helper.c dma_free_wc(gem_obj->dev->dev, cma_obj->base.size, cma_obj 188 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj->vaddr, cma_obj->paddr); cma_obj 193 drivers/gpu/drm/drm_gem_cma_helper.c kfree(cma_obj); cma_obj 216 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_cma_object *cma_obj; cma_obj 224 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj = drm_gem_cma_create_with_handle(file_priv, drm, args->size, cma_obj 226 drivers/gpu/drm/drm_gem_cma_helper.c return PTR_ERR_OR_ZERO(cma_obj); cma_obj 252 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_cma_object *cma_obj; cma_obj 257 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj = drm_gem_cma_create_with_handle(file_priv, drm, args->size, cma_obj 259 drivers/gpu/drm/drm_gem_cma_helper.c return PTR_ERR_OR_ZERO(cma_obj); cma_obj 269 drivers/gpu/drm/drm_gem_cma_helper.c static int drm_gem_cma_mmap_obj(struct drm_gem_cma_object *cma_obj, cma_obj 282 drivers/gpu/drm/drm_gem_cma_helper.c ret = dma_mmap_wc(cma_obj->base.dev->dev, vma, cma_obj->vaddr, cma_obj 283 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj->paddr, vma->vm_end - vma->vm_start); cma_obj 310 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_cma_object *cma_obj; cma_obj 319 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj = to_drm_gem_cma_obj(gem_obj); cma_obj 321 drivers/gpu/drm/drm_gem_cma_helper.c return drm_gem_cma_mmap_obj(cma_obj, vma); cma_obj 348 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_cma_object *cma_obj; cma_obj 387 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj = to_drm_gem_cma_obj(obj); cma_obj 391 drivers/gpu/drm/drm_gem_cma_helper.c return cma_obj->vaddr ? (unsigned long)cma_obj->vaddr : -EINVAL; cma_obj 408 drivers/gpu/drm/drm_gem_cma_helper.c const struct drm_gem_cma_object *cma_obj = to_drm_gem_cma_obj(obj); cma_obj 410 drivers/gpu/drm/drm_gem_cma_helper.c drm_printf_indent(p, indent, "paddr=%pad\n", &cma_obj->paddr); cma_obj 411 drivers/gpu/drm/drm_gem_cma_helper.c drm_printf_indent(p, indent, "vaddr=%p\n", cma_obj->vaddr); cma_obj 429 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_cma_object *cma_obj = to_drm_gem_cma_obj(obj); cma_obj 437 drivers/gpu/drm/drm_gem_cma_helper.c ret = dma_get_sgtable(obj->dev->dev, sgt, cma_obj->vaddr, cma_obj 438 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj->paddr, obj->size); cma_obj 472 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_cma_object *cma_obj; cma_obj 496 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj = __drm_gem_cma_create(dev, attach->dmabuf->size); cma_obj 497 drivers/gpu/drm/drm_gem_cma_helper.c if (IS_ERR(cma_obj)) cma_obj 498 drivers/gpu/drm/drm_gem_cma_helper.c return ERR_CAST(cma_obj); cma_obj 500 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj->paddr = sg_dma_address(sgt->sgl); cma_obj 501 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj->sgt = sgt; cma_obj 503 drivers/gpu/drm/drm_gem_cma_helper.c DRM_DEBUG_PRIME("dma_addr = %pad, size = %zu\n", &cma_obj->paddr, attach->dmabuf->size); cma_obj 505 drivers/gpu/drm/drm_gem_cma_helper.c return &cma_obj->base; cma_obj 524 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_cma_object *cma_obj; cma_obj 531 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj = to_drm_gem_cma_obj(obj); cma_obj 532 drivers/gpu/drm/drm_gem_cma_helper.c return drm_gem_cma_mmap_obj(cma_obj, vma); cma_obj 552 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_cma_object *cma_obj = to_drm_gem_cma_obj(obj); cma_obj 554 drivers/gpu/drm/drm_gem_cma_helper.c return cma_obj->vaddr; cma_obj 598 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_cma_object *cma_obj; cma_obj 600 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj = kzalloc(sizeof(*cma_obj), GFP_KERNEL); cma_obj 601 drivers/gpu/drm/drm_gem_cma_helper.c if (!cma_obj) cma_obj 604 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj->base.funcs = &drm_cma_gem_default_funcs; cma_obj 606 drivers/gpu/drm/drm_gem_cma_helper.c return &cma_obj->base; cma_obj 635 drivers/gpu/drm/drm_gem_cma_helper.c struct drm_gem_cma_object *cma_obj; cma_obj 651 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj = to_drm_gem_cma_obj(obj); cma_obj 652 drivers/gpu/drm/drm_gem_cma_helper.c cma_obj->vaddr = vaddr; cma_obj 204 drivers/gpu/drm/drm_mipi_dbi.c struct drm_gem_cma_object *cma_obj = to_drm_gem_cma_obj(gem); cma_obj 207 drivers/gpu/drm/drm_mipi_dbi.c void *src = cma_obj->vaddr; cma_obj 244 drivers/gpu/drm/drm_mipi_dbi.c struct drm_gem_cma_object *cma_obj = to_drm_gem_cma_obj(gem); cma_obj 271 drivers/gpu/drm/drm_mipi_dbi.c tr = cma_obj->vaddr; cma_obj 96 drivers/gpu/drm/imx/ipuv3-plane.c struct drm_gem_cma_object *cma_obj; cma_obj 100 drivers/gpu/drm/imx/ipuv3-plane.c cma_obj = drm_fb_cma_get_gem_obj(fb, plane); cma_obj 101 drivers/gpu/drm/imx/ipuv3-plane.c BUG_ON(!cma_obj); cma_obj 103 drivers/gpu/drm/imx/ipuv3-plane.c return cma_obj->paddr + fb->offsets[plane] + fb->pitches[plane] * y + cma_obj 111 drivers/gpu/drm/imx/ipuv3-plane.c struct drm_gem_cma_object *cma_obj; cma_obj 116 drivers/gpu/drm/imx/ipuv3-plane.c cma_obj = drm_fb_cma_get_gem_obj(fb, 1); cma_obj 117 drivers/gpu/drm/imx/ipuv3-plane.c BUG_ON(!cma_obj); cma_obj 122 drivers/gpu/drm/imx/ipuv3-plane.c return cma_obj->paddr + fb->offsets[1] + fb->pitches[1] * y + cma_obj 130 drivers/gpu/drm/imx/ipuv3-plane.c struct drm_gem_cma_object *cma_obj; cma_obj 135 drivers/gpu/drm/imx/ipuv3-plane.c cma_obj = drm_fb_cma_get_gem_obj(fb, 2); cma_obj 136 drivers/gpu/drm/imx/ipuv3-plane.c BUG_ON(!cma_obj); cma_obj 141 drivers/gpu/drm/imx/ipuv3-plane.c return cma_obj->paddr + fb->offsets[2] + fb->pitches[2] * y + cma_obj 264 drivers/gpu/drm/sti/sti_cursor.c struct drm_gem_cma_object *cma_obj; cma_obj 275 drivers/gpu/drm/sti/sti_cursor.c cma_obj = drm_fb_cma_get_gem_obj(fb, 0); cma_obj 278 drivers/gpu/drm/sti/sti_cursor.c sti_cursor_argb8888_to_clut8(cursor, (u32 *)cma_obj->vaddr); cma_obj 707 drivers/gpu/drm/sti/sti_gdp.c struct drm_gem_cma_object *cma_obj; cma_obj 771 drivers/gpu/drm/sti/sti_gdp.c cma_obj = drm_fb_cma_get_gem_obj(fb, 0); cma_obj 775 drivers/gpu/drm/sti/sti_gdp.c (unsigned long)cma_obj->paddr); cma_obj 779 drivers/gpu/drm/sti/sti_gdp.c top_field->gam_gdp_pml = (u32)cma_obj->paddr + fb->offsets[0]; cma_obj 824 drivers/gpu/drm/sti/sti_gdp.c (unsigned long)cma_obj->paddr, cma_obj 1120 drivers/gpu/drm/sti/sti_hqvdp.c struct drm_gem_cma_object *cma_obj; cma_obj 1174 drivers/gpu/drm/sti/sti_hqvdp.c cma_obj = drm_fb_cma_get_gem_obj(fb, 0); cma_obj 1178 drivers/gpu/drm/sti/sti_hqvdp.c (unsigned long)cma_obj->paddr); cma_obj 1181 drivers/gpu/drm/sti/sti_hqvdp.c cmd->top.current_luma = (u32)cma_obj->paddr + fb->offsets[0]; cma_obj 1182 drivers/gpu/drm/sti/sti_hqvdp.c cmd->top.current_chroma = (u32)cma_obj->paddr + fb->offsets[1]; cma_obj 80 drivers/gpu/drm/tiny/ili9225.c struct drm_gem_cma_object *cma_obj = drm_fb_cma_get_gem_obj(fb, 0); cma_obj 109 drivers/gpu/drm/tiny/ili9225.c tr = cma_obj->vaddr; cma_obj 533 drivers/gpu/drm/tiny/repaper.c struct drm_gem_cma_object *cma_obj = drm_fb_cma_get_gem_obj(fb, 0); cma_obj 534 drivers/gpu/drm/tiny/repaper.c struct dma_buf_attachment *import_attach = cma_obj->base.import_attach; cma_obj 570 drivers/gpu/drm/tiny/repaper.c drm_fb_xrgb8888_to_gray8(buf, cma_obj->vaddr, fb, &clip); cma_obj 94 drivers/gpu/drm/tiny/st7586.c struct drm_gem_cma_object *cma_obj = drm_fb_cma_get_gem_obj(fb, 0); cma_obj 95 drivers/gpu/drm/tiny/st7586.c struct dma_buf_attachment *import_attach = cma_obj->base.import_attach; cma_obj 96 drivers/gpu/drm/tiny/st7586.c void *src = cma_obj->vaddr; cma_obj 411 drivers/gpu/drm/vc4/vc4_bo.c struct drm_gem_cma_object *cma_obj; cma_obj 425 drivers/gpu/drm/vc4/vc4_bo.c cma_obj = drm_gem_cma_create(dev, size); cma_obj 426 drivers/gpu/drm/vc4/vc4_bo.c if (IS_ERR(cma_obj)) { cma_obj 432 drivers/gpu/drm/vc4/vc4_bo.c cma_obj = drm_gem_cma_create(dev, size); cma_obj 435 drivers/gpu/drm/vc4/vc4_bo.c if (IS_ERR(cma_obj)) { cma_obj 448 drivers/gpu/drm/vc4/vc4_bo.c cma_obj = drm_gem_cma_create(dev, size); cma_obj 451 drivers/gpu/drm/vc4/vc4_bo.c if (IS_ERR(cma_obj)) { cma_obj 457 drivers/gpu/drm/vc4/vc4_bo.c bo = to_vc4_bo(&cma_obj->base); cma_obj 466 drivers/gpu/drm/vc4/vc4_bo.c vc4_bo_set_label(&cma_obj->base, type); cma_obj 189 drivers/gpu/drm/zte/zx_plane.c struct drm_gem_cma_object *cma_obj; cma_obj 218 drivers/gpu/drm/zte/zx_plane.c cma_obj = drm_fb_cma_get_gem_obj(fb, i); cma_obj 219 drivers/gpu/drm/zte/zx_plane.c paddr = cma_obj->paddr + fb->offsets[i]; cma_obj 354 drivers/gpu/drm/zte/zx_plane.c struct drm_gem_cma_object *cma_obj; cma_obj 384 drivers/gpu/drm/zte/zx_plane.c cma_obj = drm_fb_cma_get_gem_obj(fb, 0); cma_obj 385 drivers/gpu/drm/zte/zx_plane.c paddr = cma_obj->paddr + fb->offsets[0];