boundary_size 141 arch/alpha/kernel/pci_iommu.c unsigned long boundary_size; boundary_size 145 arch/alpha/kernel/pci_iommu.c boundary_size = dma_get_seg_boundary(dev) + 1; boundary_size 146 arch/alpha/kernel/pci_iommu.c boundary_size >>= PAGE_SHIFT; boundary_size 148 arch/alpha/kernel/pci_iommu.c boundary_size = 1UL << (32 - PAGE_SHIFT); boundary_size 159 arch/alpha/kernel/pci_iommu.c if (!i && iommu_is_span_boundary(p, n, base, boundary_size)) { boundary_size 481 arch/ia64/hp/common/sba_iommu.c unsigned long boundary_size; boundary_size 488 arch/ia64/hp/common/sba_iommu.c boundary_size = (unsigned long long)dma_get_seg_boundary(dev) + 1; boundary_size 489 arch/ia64/hp/common/sba_iommu.c boundary_size = ALIGN(boundary_size, 1ULL << iovp_shift) >> iovp_shift; boundary_size 550 arch/ia64/hp/common/sba_iommu.c boundary_size); boundary_size 576 arch/ia64/hp/common/sba_iommu.c shift, boundary_size); boundary_size 175 arch/powerpc/kernel/iommu.c unsigned long boundary_size; boundary_size 240 arch/powerpc/kernel/iommu.c boundary_size = ALIGN(dma_get_seg_boundary(dev) + 1, boundary_size 243 arch/powerpc/kernel/iommu.c boundary_size = ALIGN(1UL << 32, 1 << tbl->it_page_shift); boundary_size 247 arch/powerpc/kernel/iommu.c boundary_size >> tbl->it_page_shift, align_mask); boundary_size 264 arch/s390/pci/pci_dma.c unsigned long boundary_size; boundary_size 266 arch/s390/pci/pci_dma.c boundary_size = ALIGN(dma_get_seg_boundary(dev) + 1, boundary_size 270 arch/s390/pci/pci_dma.c boundary_size, 0); boundary_size 105 arch/sparc/kernel/iommu-common.c unsigned long n, end, start, limit, boundary_size; boundary_size 170 arch/sparc/kernel/iommu-common.c boundary_size = ALIGN(dma_get_seg_boundary(dev) + 1, boundary_size 173 arch/sparc/kernel/iommu-common.c boundary_size = ALIGN(1ULL << 32, 1 << iommu->table_shift); boundary_size 175 arch/sparc/kernel/iommu-common.c boundary_size = boundary_size >> iommu->table_shift; boundary_size 183 arch/sparc/kernel/iommu-common.c boundary_size = iommu->poolsize * iommu->nr_pools; boundary_size 186 arch/sparc/kernel/iommu-common.c boundary_size, align_mask); boundary_size 40 arch/sparc/kernel/iommu_common.h unsigned long boundary_size, boundary_size 48 arch/sparc/kernel/iommu_common.h return iommu_is_span_boundary(entry, nr, shift, boundary_size); boundary_size 95 arch/x86/kernel/amd_gart_64.c unsigned long boundary_size; boundary_size 100 arch/x86/kernel/amd_gart_64.c boundary_size = ALIGN((u64)dma_get_seg_boundary(dev) + 1, boundary_size 105 arch/x86/kernel/amd_gart_64.c size, base_index, boundary_size, align_mask); boundary_size 109 arch/x86/kernel/amd_gart_64.c size, base_index, boundary_size, boundary_size 220 arch/x86/kernel/pci-calgary_64.c unsigned long boundary_size; boundary_size 222 arch/x86/kernel/pci-calgary_64.c boundary_size = ALIGN(dma_get_seg_boundary(dev) + 1, boundary_size 230 arch/x86/kernel/pci-calgary_64.c npages, 0, boundary_size, 0); boundary_size 235 arch/x86/kernel/pci-calgary_64.c npages, 0, boundary_size, 0); boundary_size 2549 drivers/iommu/amd_iommu.c unsigned long mask, boundary_size; boundary_size 2554 drivers/iommu/amd_iommu.c boundary_size = mask + 1 ? ALIGN(mask + 1, PAGE_SIZE) >> PAGE_SHIFT : boundary_size 2561 drivers/iommu/amd_iommu.c p = npages % boundary_size; boundary_size 2563 drivers/iommu/amd_iommu.c if (p + n > boundary_size) boundary_size 2564 drivers/iommu/amd_iommu.c npages += boundary_size - p; boundary_size 299 drivers/parisc/ccio-dma.c ret = iommu_is_span_boundary(idx << 3, pages_needed, 0, boundary_size);\ boundary_size 343 drivers/parisc/ccio-dma.c unsigned long boundary_size; boundary_size 359 drivers/parisc/ccio-dma.c boundary_size = ALIGN((unsigned long long)dma_get_seg_boundary(dev) + 1, boundary_size 341 drivers/parisc/sba_iommu.c unsigned long boundary_size; boundary_size 345 drivers/parisc/sba_iommu.c boundary_size = ALIGN((unsigned long long)dma_get_seg_boundary(dev) + 1, boundary_size 361 drivers/parisc/sba_iommu.c boundary_size); boundary_size 396 drivers/parisc/sba_iommu.c boundary_size); boundary_size 19 include/linux/iommu-helper.h unsigned long shift, unsigned long boundary_size) boundary_size 21 include/linux/iommu-helper.h BUG_ON(!is_power_of_2(boundary_size)); boundary_size 23 include/linux/iommu-helper.h shift = (shift + index) & (boundary_size - 1); boundary_size 24 include/linux/iommu-helper.h return shift + nr > boundary_size; boundary_size 30 include/linux/iommu-helper.h unsigned long boundary_size, boundary_size 11 lib/iommu-helper.c unsigned long shift, unsigned long boundary_size, boundary_size 21 lib/iommu-helper.c if (iommu_is_span_boundary(index, nr, shift, boundary_size)) { boundary_size 22 lib/iommu-helper.c start = ALIGN(shift + index, boundary_size) - shift;