Lines Matching refs:page_size
166 #define NVME_INT_BYTES(dev) (NVME_INT_PAGES * (dev)->page_size)
176 unsigned nprps = DIV_ROUND_UP(size + dev->page_size, dev->page_size); in nvme_npages()
468 const int last_prp = dev->page_size / 8 - 1; in nvme_free_iod()
659 u32 page_size = dev->page_size; in nvme_setup_prps() local
660 int offset = dma_addr & (page_size - 1); in nvme_setup_prps()
666 length -= (page_size - offset); in nvme_setup_prps()
670 dma_len -= (page_size - offset); in nvme_setup_prps()
672 dma_addr += (page_size - offset); in nvme_setup_prps()
679 if (length <= page_size) { in nvme_setup_prps()
684 nprps = DIV_ROUND_UP(length, page_size); in nvme_setup_prps()
697 return (total_len - length) + page_size; in nvme_setup_prps()
703 if (i == page_size >> 3) { in nvme_setup_prps()
714 dma_len -= page_size; in nvme_setup_prps()
715 dma_addr += page_size; in nvme_setup_prps()
716 length -= page_size; in nvme_setup_prps()
1474 unsigned q_size_aligned = roundup(q_depth * entry_size, dev->page_size); in nvme_cmb_qdepth()
1478 mem_per_q = round_down(mem_per_q, dev->page_size); in nvme_cmb_qdepth()
1498 roundup(SQ_SIZE(depth), dev->page_size); in nvme_alloc_sq_cmds()
1776 dev->page_size = 1 << page_shift; in nvme_configure_admin_queue()
2278 (dev->max_hw_sectors / (dev->page_size >> 9)) + 1); in nvme_alloc_ns()
2284 blk_queue_virt_boundary(ns->queue, dev->page_size - 1); in nvme_alloc_ns()