Lines Matching refs:hwdev

295 xen_swiotlb_alloc_coherent(struct device *hwdev, size_t size,  in xen_swiotlb_alloc_coherent()  argument
318 ret = xen_alloc_coherent_pages(hwdev, size, dma_handle, flags, attrs); in xen_swiotlb_alloc_coherent()
323 if (hwdev && hwdev->coherent_dma_mask) in xen_swiotlb_alloc_coherent()
324 dma_mask = dma_alloc_coherent_mask(hwdev, flags); in xen_swiotlb_alloc_coherent()
338 xen_free_coherent_pages(hwdev, size, ret, (dma_addr_t)phys, attrs); in xen_swiotlb_alloc_coherent()
348 xen_swiotlb_free_coherent(struct device *hwdev, size_t size, void *vaddr, in xen_swiotlb_free_coherent() argument
355 if (hwdev && hwdev->coherent_dma_mask) in xen_swiotlb_free_coherent()
356 dma_mask = hwdev->coherent_dma_mask; in xen_swiotlb_free_coherent()
366 xen_free_coherent_pages(hwdev, size, vaddr, (dma_addr_t)phys, attrs); in xen_swiotlb_free_coherent()
435 static void xen_unmap_single(struct device *hwdev, dma_addr_t dev_addr, in xen_unmap_single() argument
443 xen_dma_unmap_page(hwdev, dev_addr, size, dir, attrs); in xen_unmap_single()
447 swiotlb_tbl_unmap_single(hwdev, paddr, size, dir); in xen_unmap_single()
463 void xen_swiotlb_unmap_page(struct device *hwdev, dma_addr_t dev_addr, in xen_swiotlb_unmap_page() argument
467 xen_unmap_single(hwdev, dev_addr, size, dir, attrs); in xen_swiotlb_unmap_page()
482 xen_swiotlb_sync_single(struct device *hwdev, dma_addr_t dev_addr, in xen_swiotlb_sync_single() argument
491 xen_dma_sync_single_for_cpu(hwdev, dev_addr, size, dir); in xen_swiotlb_sync_single()
495 swiotlb_tbl_sync_single(hwdev, paddr, size, dir, target); in xen_swiotlb_sync_single()
498 xen_dma_sync_single_for_device(hwdev, dev_addr, size, dir); in xen_swiotlb_sync_single()
507 xen_swiotlb_sync_single_for_cpu(struct device *hwdev, dma_addr_t dev_addr, in xen_swiotlb_sync_single_for_cpu() argument
510 xen_swiotlb_sync_single(hwdev, dev_addr, size, dir, SYNC_FOR_CPU); in xen_swiotlb_sync_single_for_cpu()
515 xen_swiotlb_sync_single_for_device(struct device *hwdev, dma_addr_t dev_addr, in xen_swiotlb_sync_single_for_device() argument
518 xen_swiotlb_sync_single(hwdev, dev_addr, size, dir, SYNC_FOR_DEVICE); in xen_swiotlb_sync_single_for_device()
539 xen_swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, in xen_swiotlb_map_sg_attrs() argument
553 xen_arch_need_swiotlb(hwdev, paddr, dev_addr) || in xen_swiotlb_map_sg_attrs()
554 !dma_capable(hwdev, dev_addr, sg->length) || in xen_swiotlb_map_sg_attrs()
556 phys_addr_t map = swiotlb_tbl_map_single(hwdev, in xen_swiotlb_map_sg_attrs()
562 dev_warn(hwdev, "swiotlb buffer is full\n"); in xen_swiotlb_map_sg_attrs()
565 xen_swiotlb_unmap_sg_attrs(hwdev, sgl, i, dir, in xen_swiotlb_map_sg_attrs()
570 xen_dma_map_page(hwdev, pfn_to_page(map >> PAGE_SHIFT), in xen_swiotlb_map_sg_attrs()
581 xen_dma_map_page(hwdev, pfn_to_page(paddr >> PAGE_SHIFT), in xen_swiotlb_map_sg_attrs()
600 xen_swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl, in xen_swiotlb_unmap_sg_attrs() argument
610 xen_unmap_single(hwdev, sg->dma_address, sg_dma_len(sg), dir, attrs); in xen_swiotlb_unmap_sg_attrs()
623 xen_swiotlb_sync_sg(struct device *hwdev, struct scatterlist *sgl, in xen_swiotlb_sync_sg() argument
631 xen_swiotlb_sync_single(hwdev, sg->dma_address, in xen_swiotlb_sync_sg()
636 xen_swiotlb_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg, in xen_swiotlb_sync_sg_for_cpu() argument
639 xen_swiotlb_sync_sg(hwdev, sg, nelems, dir, SYNC_FOR_CPU); in xen_swiotlb_sync_sg_for_cpu()
644 xen_swiotlb_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg, in xen_swiotlb_sync_sg_for_device() argument
647 xen_swiotlb_sync_sg(hwdev, sg, nelems, dir, SYNC_FOR_DEVICE); in xen_swiotlb_sync_sg_for_device()
652 xen_swiotlb_dma_mapping_error(struct device *hwdev, dma_addr_t dma_addr) in xen_swiotlb_dma_mapping_error() argument
665 xen_swiotlb_dma_supported(struct device *hwdev, u64 mask) in xen_swiotlb_dma_supported() argument