IO_TLB_SHIFT      239 arch/mips/cavium-octeon/dma-octeon.c 	swiotlb_nslabs = swiotlbsize >> IO_TLB_SHIFT;
IO_TLB_SHIFT      241 arch/mips/cavium-octeon/dma-octeon.c 	swiotlbsize = swiotlb_nslabs << IO_TLB_SHIFT;
IO_TLB_SHIFT     4195 drivers/mmc/host/sdhci.c 			unsigned int max_req_size = (1 << IO_TLB_SHIFT) *
IO_TLB_SHIFT      127 drivers/xen/swiotlb-xen.c 	dma_bits = get_order(IO_TLB_SEGSIZE << IO_TLB_SHIFT) + PAGE_SHIFT;
IO_TLB_SHIFT      135 drivers/xen/swiotlb-xen.c 				p + (i << IO_TLB_SHIFT),
IO_TLB_SHIFT      136 drivers/xen/swiotlb-xen.c 				get_order(slabs << IO_TLB_SHIFT),
IO_TLB_SHIFT      149 drivers/xen/swiotlb-xen.c 		xen_io_tlb_nslabs = (64 * 1024 * 1024 >> IO_TLB_SHIFT);
IO_TLB_SHIFT      154 drivers/xen/swiotlb-xen.c 	return xen_io_tlb_nslabs << IO_TLB_SHIFT;
IO_TLB_SHIFT      188 drivers/xen/swiotlb-xen.c 	order = get_order(xen_io_tlb_nslabs << IO_TLB_SHIFT);
IO_TLB_SHIFT      208 drivers/xen/swiotlb-xen.c #define SLABS_PER_PAGE (1 << (PAGE_SHIFT - IO_TLB_SHIFT))
IO_TLB_SHIFT      209 drivers/xen/swiotlb-xen.c #define IO_TLB_MIN_SLABS ((1<<20) >> IO_TLB_SHIFT)
IO_TLB_SHIFT      220 drivers/xen/swiotlb-xen.c 			bytes = xen_io_tlb_nslabs << IO_TLB_SHIFT;
IO_TLB_SHIFT      264 drivers/xen/swiotlb-xen.c 			(xen_io_tlb_nslabs << IO_TLB_SHIFT) >> 20);
IO_TLB_SHIFT       55 kernel/dma/swiotlb.c #define SLABS_PER_PAGE (1 << (PAGE_SHIFT - IO_TLB_SHIFT))
IO_TLB_SHIFT       62 kernel/dma/swiotlb.c #define IO_TLB_MIN_SLABS ((1<<20) >> IO_TLB_SHIFT)
IO_TLB_SHIFT      160 kernel/dma/swiotlb.c 	size = io_tlb_nslabs << IO_TLB_SHIFT;
IO_TLB_SHIFT      167 kernel/dma/swiotlb.c 	unsigned long bytes = io_tlb_nslabs << IO_TLB_SHIFT;
IO_TLB_SHIFT      195 kernel/dma/swiotlb.c 	bytes = PAGE_ALIGN(io_tlb_nslabs << IO_TLB_SHIFT);
IO_TLB_SHIFT      205 kernel/dma/swiotlb.c 	bytes = nslabs << IO_TLB_SHIFT;
IO_TLB_SHIFT      237 kernel/dma/swiotlb.c 	swiotlb_set_max_segment(io_tlb_nslabs << IO_TLB_SHIFT);
IO_TLB_SHIFT      253 kernel/dma/swiotlb.c 		io_tlb_nslabs = (default_size >> IO_TLB_SHIFT);
IO_TLB_SHIFT      257 kernel/dma/swiotlb.c 	bytes = io_tlb_nslabs << IO_TLB_SHIFT;
IO_TLB_SHIFT      266 kernel/dma/swiotlb.c 				    PAGE_ALIGN(io_tlb_nslabs << IO_TLB_SHIFT));
IO_TLB_SHIFT      285 kernel/dma/swiotlb.c 		io_tlb_nslabs = (default_size >> IO_TLB_SHIFT);
IO_TLB_SHIFT      292 kernel/dma/swiotlb.c 	order = get_order(io_tlb_nslabs << IO_TLB_SHIFT);
IO_TLB_SHIFT      294 kernel/dma/swiotlb.c 	bytes = io_tlb_nslabs << IO_TLB_SHIFT;
IO_TLB_SHIFT      333 kernel/dma/swiotlb.c 	bytes = nslabs << IO_TLB_SHIFT;
IO_TLB_SHIFT      369 kernel/dma/swiotlb.c 	swiotlb_set_max_segment(io_tlb_nslabs << IO_TLB_SHIFT);
IO_TLB_SHIFT      393 kernel/dma/swiotlb.c 			   get_order(io_tlb_nslabs << IO_TLB_SHIFT));
IO_TLB_SHIFT      400 kernel/dma/swiotlb.c 				   PAGE_ALIGN(io_tlb_nslabs << IO_TLB_SHIFT));
IO_TLB_SHIFT      478 kernel/dma/swiotlb.c 	offset_slots = ALIGN(tbl_dma_addr, 1 << IO_TLB_SHIFT) >> IO_TLB_SHIFT;
IO_TLB_SHIFT      484 kernel/dma/swiotlb.c 		    ? ALIGN(mask + 1, 1 << IO_TLB_SHIFT) >> IO_TLB_SHIFT
IO_TLB_SHIFT      485 kernel/dma/swiotlb.c 		    : 1UL << (BITS_PER_LONG - IO_TLB_SHIFT);
IO_TLB_SHIFT      491 kernel/dma/swiotlb.c 	nslots = ALIGN(alloc_size, 1 << IO_TLB_SHIFT) >> IO_TLB_SHIFT;
IO_TLB_SHIFT      493 kernel/dma/swiotlb.c 		stride = (1 << (PAGE_SHIFT - IO_TLB_SHIFT));
IO_TLB_SHIFT      535 kernel/dma/swiotlb.c 			tlb_addr = io_tlb_start + (index << IO_TLB_SHIFT);
IO_TLB_SHIFT      569 kernel/dma/swiotlb.c 		io_tlb_orig_addr[index+i] = orig_addr + (i << IO_TLB_SHIFT);
IO_TLB_SHIFT      585 kernel/dma/swiotlb.c 	int i, count, nslots = ALIGN(alloc_size, 1 << IO_TLB_SHIFT) >> IO_TLB_SHIFT;
IO_TLB_SHIFT      586 kernel/dma/swiotlb.c 	int index = (tlb_addr - io_tlb_start) >> IO_TLB_SHIFT;
IO_TLB_SHIFT      631 kernel/dma/swiotlb.c 	int index = (tlb_addr - io_tlb_start) >> IO_TLB_SHIFT;
IO_TLB_SHIFT      636 kernel/dma/swiotlb.c 	orig_addr += (unsigned long)tlb_addr & ((1 << IO_TLB_SHIFT) - 1);
IO_TLB_SHIFT      692 kernel/dma/swiotlb.c 	return ((size_t)1 << IO_TLB_SHIFT) * IO_TLB_SEGSIZE;