vmw_tt            361 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static void vmw_ttm_unmap_from_dma(struct vmw_ttm_tt *vmw_tt)
vmw_tt            363 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	struct device *dev = vmw_tt->dev_priv->dev->dev;
vmw_tt            365 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	dma_unmap_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.nents,
vmw_tt            367 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vmw_tt->sgt.nents = vmw_tt->sgt.orig_nents;
vmw_tt            383 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static int vmw_ttm_map_for_dma(struct vmw_ttm_tt *vmw_tt)
vmw_tt            385 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	struct device *dev = vmw_tt->dev_priv->dev->dev;
vmw_tt            388 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	ret = dma_map_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.orig_nents,
vmw_tt            393 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vmw_tt->sgt.nents = ret;
vmw_tt            408 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static int vmw_ttm_map_dma(struct vmw_ttm_tt *vmw_tt)
vmw_tt            410 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	struct vmw_private *dev_priv = vmw_tt->dev_priv;
vmw_tt            412 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	struct vmw_sg_table *vsgt = &vmw_tt->vsgt;
vmw_tt            423 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	if (vmw_tt->mapped)
vmw_tt            427 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vsgt->pages = vmw_tt->dma_ttm.ttm.pages;
vmw_tt            428 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vsgt->num_pages = vmw_tt->dma_ttm.ttm.num_pages;
vmw_tt            429 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vsgt->addrs = vmw_tt->dma_ttm.dma_address;
vmw_tt            430 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vsgt->sgt = &vmw_tt->sgt;
vmw_tt            439 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		vmw_tt->sg_alloc_size = sgt_size + sgl_size * vsgt->num_pages;
vmw_tt            440 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		ret = ttm_mem_global_alloc(glob, vmw_tt->sg_alloc_size, &ctx);
vmw_tt            445 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 			(&vmw_tt->sgt, vsgt->pages, vsgt->num_pages, 0,
vmw_tt            452 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		if (vsgt->num_pages > vmw_tt->sgt.nents) {
vmw_tt            455 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 					    vmw_tt->sgt.nents);
vmw_tt            458 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 			vmw_tt->sg_alloc_size -= over_alloc;
vmw_tt            461 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		ret = vmw_ttm_map_for_dma(vmw_tt);
vmw_tt            471 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vmw_tt->vsgt.num_regions = 0;
vmw_tt            476 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 			vmw_tt->vsgt.num_regions++;
vmw_tt            480 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vmw_tt->mapped = true;
vmw_tt            484 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	sg_free_table(vmw_tt->vsgt.sgt);
vmw_tt            485 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vmw_tt->vsgt.sgt = NULL;
vmw_tt            487 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	ttm_mem_global_free(glob, vmw_tt->sg_alloc_size);
vmw_tt            500 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static void vmw_ttm_unmap_dma(struct vmw_ttm_tt *vmw_tt)
vmw_tt            502 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	struct vmw_private *dev_priv = vmw_tt->dev_priv;
vmw_tt            504 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	if (!vmw_tt->vsgt.sgt)
vmw_tt            510 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		vmw_ttm_unmap_from_dma(vmw_tt);
vmw_tt            511 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		sg_free_table(vmw_tt->vsgt.sgt);
vmw_tt            512 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		vmw_tt->vsgt.sgt = NULL;
vmw_tt            514 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 				    vmw_tt->sg_alloc_size);
vmw_tt            519 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vmw_tt->mapped = false;
vmw_tt            535 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	struct vmw_ttm_tt *vmw_tt =
vmw_tt            538 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	return vmw_ttm_map_dma(vmw_tt);
vmw_tt            552 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	struct vmw_ttm_tt *vmw_tt =
vmw_tt            555 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vmw_ttm_unmap_dma(vmw_tt);
vmw_tt            572 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	struct vmw_ttm_tt *vmw_tt =
vmw_tt            575 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	return &vmw_tt->vsgt;
vmw_tt            656 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	struct vmw_ttm_tt *vmw_tt =
vmw_tt            658 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	struct vmw_private *dev_priv = vmw_tt->dev_priv;
vmw_tt            672 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		ret = ttm_dma_populate(&vmw_tt->dma_ttm, dev_priv->dev->dev,
vmw_tt            684 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	struct vmw_ttm_tt *vmw_tt = container_of(ttm, struct vmw_ttm_tt,
vmw_tt            686 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	struct vmw_private *dev_priv = vmw_tt->dev_priv;
vmw_tt            690 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	if (vmw_tt->mob) {
vmw_tt            691 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		vmw_mob_destroy(vmw_tt->mob);
vmw_tt            692 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		vmw_tt->mob = NULL;
vmw_tt            695 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vmw_ttm_unmap_dma(vmw_tt);
vmw_tt            700 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		ttm_dma_unpopulate(&vmw_tt->dma_ttm, dev_priv->dev->dev);