Lines Matching refs:g2d_userptr
364 struct g2d_cmdlist_userptr *g2d_userptr = in g2d_userptr_put_dma_addr() local
373 atomic_dec(&g2d_userptr->refcount); in g2d_userptr_put_dma_addr()
375 if (atomic_read(&g2d_userptr->refcount) > 0) in g2d_userptr_put_dma_addr()
378 if (g2d_userptr->in_pool) in g2d_userptr_put_dma_addr()
382 exynos_gem_unmap_sgt_from_dma(drm_dev, g2d_userptr->sgt, in g2d_userptr_put_dma_addr()
385 exynos_gem_put_pages_to_userptr(g2d_userptr->pages, in g2d_userptr_put_dma_addr()
386 g2d_userptr->npages, in g2d_userptr_put_dma_addr()
387 g2d_userptr->vma); in g2d_userptr_put_dma_addr()
389 exynos_gem_put_vma(g2d_userptr->vma); in g2d_userptr_put_dma_addr()
391 if (!g2d_userptr->out_of_list) in g2d_userptr_put_dma_addr()
392 list_del_init(&g2d_userptr->list); in g2d_userptr_put_dma_addr()
394 sg_free_table(g2d_userptr->sgt); in g2d_userptr_put_dma_addr()
395 kfree(g2d_userptr->sgt); in g2d_userptr_put_dma_addr()
397 drm_free_large(g2d_userptr->pages); in g2d_userptr_put_dma_addr()
398 kfree(g2d_userptr); in g2d_userptr_put_dma_addr()
409 struct g2d_cmdlist_userptr *g2d_userptr; in g2d_userptr_get_dma_addr() local
426 list_for_each_entry(g2d_userptr, &g2d_priv->userptr_list, list) { in g2d_userptr_get_dma_addr()
427 if (g2d_userptr->userptr == userptr) { in g2d_userptr_get_dma_addr()
432 if (g2d_userptr->size == size) { in g2d_userptr_get_dma_addr()
433 atomic_inc(&g2d_userptr->refcount); in g2d_userptr_get_dma_addr()
434 *obj = (unsigned long)g2d_userptr; in g2d_userptr_get_dma_addr()
436 return &g2d_userptr->dma_addr; in g2d_userptr_get_dma_addr()
446 g2d_userptr->out_of_list = true; in g2d_userptr_get_dma_addr()
447 g2d_userptr->in_pool = false; in g2d_userptr_get_dma_addr()
448 list_del_init(&g2d_userptr->list); in g2d_userptr_get_dma_addr()
454 g2d_userptr = kzalloc(sizeof(*g2d_userptr), GFP_KERNEL); in g2d_userptr_get_dma_addr()
455 if (!g2d_userptr) in g2d_userptr_get_dma_addr()
458 atomic_set(&g2d_userptr->refcount, 1); in g2d_userptr_get_dma_addr()
464 g2d_userptr->npages = npages; in g2d_userptr_get_dma_addr()
489 g2d_userptr->vma = exynos_gem_get_vma(vma); in g2d_userptr_get_dma_addr()
490 if (!g2d_userptr->vma) { in g2d_userptr_get_dma_addr()
497 g2d_userptr->size = size; in g2d_userptr_get_dma_addr()
508 g2d_userptr->pages = pages; in g2d_userptr_get_dma_addr()
523 g2d_userptr->sgt = sgt; in g2d_userptr_get_dma_addr()
525 ret = exynos_gem_map_sgt_with_dma(drm_dev, g2d_userptr->sgt, in g2d_userptr_get_dma_addr()
532 g2d_userptr->dma_addr = sgt->sgl[0].dma_address; in g2d_userptr_get_dma_addr()
533 g2d_userptr->userptr = userptr; in g2d_userptr_get_dma_addr()
535 list_add_tail(&g2d_userptr->list, &g2d_priv->userptr_list); in g2d_userptr_get_dma_addr()
539 g2d_userptr->in_pool = true; in g2d_userptr_get_dma_addr()
542 *obj = (unsigned long)g2d_userptr; in g2d_userptr_get_dma_addr()
544 return &g2d_userptr->dma_addr; in g2d_userptr_get_dma_addr()
553 exynos_gem_put_pages_to_userptr(g2d_userptr->pages, in g2d_userptr_get_dma_addr()
554 g2d_userptr->npages, in g2d_userptr_get_dma_addr()
555 g2d_userptr->vma); in g2d_userptr_get_dma_addr()
558 exynos_gem_put_vma(g2d_userptr->vma); in g2d_userptr_get_dma_addr()
564 kfree(g2d_userptr); in g2d_userptr_get_dma_addr()
575 struct g2d_cmdlist_userptr *g2d_userptr, *n; in g2d_userptr_free_all() local
577 list_for_each_entry_safe(g2d_userptr, n, &g2d_priv->userptr_list, list) in g2d_userptr_free_all()
578 if (g2d_userptr->in_pool) in g2d_userptr_free_all()
580 (unsigned long)g2d_userptr, in g2d_userptr_free_all()
740 struct drm_exynos_g2d_userptr g2d_userptr; in g2d_map_cmdlist_gem() local
742 if (copy_from_user(&g2d_userptr, (void __user *)handle, in g2d_map_cmdlist_gem()
749 g2d_userptr.size)) { in g2d_map_cmdlist_gem()
755 g2d_userptr.userptr, in g2d_map_cmdlist_gem()
756 g2d_userptr.size, in g2d_map_cmdlist_gem()