Lines Matching refs:dev_addr

303 	dma_addr_t dev_addr;  in xen_swiotlb_alloc_coherent()  local
331 dev_addr = xen_phys_to_bus(phys); in xen_swiotlb_alloc_coherent()
332 if (((dev_addr + size - 1 <= dma_mask)) && in xen_swiotlb_alloc_coherent()
334 *dma_handle = dev_addr; in xen_swiotlb_alloc_coherent()
349 dma_addr_t dev_addr, struct dma_attrs *attrs) in xen_swiotlb_free_coherent() argument
360 phys = xen_bus_to_phys(dev_addr); in xen_swiotlb_free_coherent()
362 if (((dev_addr + size - 1 > dma_mask)) || in xen_swiotlb_free_coherent()
384 dma_addr_t dev_addr = xen_phys_to_bus(phys); in xen_swiotlb_map_page() local
392 if (dma_capable(dev, dev_addr, size) && in xen_swiotlb_map_page()
394 !xen_arch_need_swiotlb(dev, phys, dev_addr) && in xen_swiotlb_map_page()
399 xen_dma_map_page(dev, page, dev_addr, offset, size, dir, attrs); in xen_swiotlb_map_page()
400 return dev_addr; in xen_swiotlb_map_page()
406 trace_swiotlb_bounced(dev, dev_addr, size, swiotlb_force); in xen_swiotlb_map_page()
413 dev_addr, map & ~PAGE_MASK, size, dir, attrs); in xen_swiotlb_map_page()
414 dev_addr = xen_phys_to_bus(map); in xen_swiotlb_map_page()
419 if (!dma_capable(dev, dev_addr, size)) { in xen_swiotlb_map_page()
421 dev_addr = 0; in xen_swiotlb_map_page()
423 return dev_addr; in xen_swiotlb_map_page()
435 static void xen_unmap_single(struct device *hwdev, dma_addr_t dev_addr, in xen_unmap_single() argument
439 phys_addr_t paddr = xen_bus_to_phys(dev_addr); in xen_unmap_single()
443 xen_dma_unmap_page(hwdev, dev_addr, size, dir, attrs); in xen_unmap_single()
446 if (is_xen_swiotlb_buffer(dev_addr)) { in xen_unmap_single()
463 void xen_swiotlb_unmap_page(struct device *hwdev, dma_addr_t dev_addr, in xen_swiotlb_unmap_page() argument
467 xen_unmap_single(hwdev, dev_addr, size, dir, attrs); in xen_swiotlb_unmap_page()
482 xen_swiotlb_sync_single(struct device *hwdev, dma_addr_t dev_addr, in xen_swiotlb_sync_single() argument
486 phys_addr_t paddr = xen_bus_to_phys(dev_addr); in xen_swiotlb_sync_single()
491 xen_dma_sync_single_for_cpu(hwdev, dev_addr, size, dir); in xen_swiotlb_sync_single()
494 if (is_xen_swiotlb_buffer(dev_addr)) in xen_swiotlb_sync_single()
498 xen_dma_sync_single_for_device(hwdev, dev_addr, size, dir); in xen_swiotlb_sync_single()
507 xen_swiotlb_sync_single_for_cpu(struct device *hwdev, dma_addr_t dev_addr, in xen_swiotlb_sync_single_for_cpu() argument
510 xen_swiotlb_sync_single(hwdev, dev_addr, size, dir, SYNC_FOR_CPU); in xen_swiotlb_sync_single_for_cpu()
515 xen_swiotlb_sync_single_for_device(struct device *hwdev, dma_addr_t dev_addr, in xen_swiotlb_sync_single_for_device() argument
518 xen_swiotlb_sync_single(hwdev, dev_addr, size, dir, SYNC_FOR_DEVICE); in xen_swiotlb_sync_single_for_device()
550 dma_addr_t dev_addr = xen_phys_to_bus(paddr); in xen_swiotlb_map_sg_attrs() local
553 xen_arch_need_swiotlb(hwdev, paddr, dev_addr) || in xen_swiotlb_map_sg_attrs()
554 !dma_capable(hwdev, dev_addr, sg->length) || in xen_swiotlb_map_sg_attrs()
571 dev_addr, in xen_swiotlb_map_sg_attrs()
582 dev_addr, in xen_swiotlb_map_sg_attrs()
587 sg->dma_address = dev_addr; in xen_swiotlb_map_sg_attrs()