page_idx 1012 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c uint64_t page_idx = 1; page_idx 1014 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c r = amdgpu_gart_bind(adev, gtt->offset, page_idx, page_idx 1024 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c gtt->offset + (page_idx << PAGE_SHIFT), page_idx 1025 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c ttm->num_pages - page_idx, page_idx 1026 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c &ttm->pages[page_idx], page_idx 1027 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c &(gtt->ttm.dma_address[page_idx]), flags); page_idx 97 drivers/infiniband/hw/hns/hns_roce_hem.h int page_idx; page_idx 157 drivers/infiniband/hw/hns/hns_roce_hem.h iter->page_idx = 0; page_idx 167 drivers/infiniband/hw/hns/hns_roce_hem.h if (++iter->page_idx >= iter->chunk->nsg) { page_idx 175 drivers/infiniband/hw/hns/hns_roce_hem.h iter->page_idx = 0; page_idx 181 drivers/infiniband/hw/hns/hns_roce_hem.h return sg_dma_address(&iter->chunk->mem[iter->page_idx]); page_idx 97 drivers/infiniband/hw/mlx5/cmd.c u64 page_idx = 0; page_idx 117 drivers/infiniband/hw/mlx5/cmd.c while (page_idx < num_memic_hw_pages) { page_idx 119 drivers/infiniband/hw/mlx5/cmd.c page_idx = bitmap_find_next_zero_area(dm->memic_alloc_pages, page_idx 121 drivers/infiniband/hw/mlx5/cmd.c page_idx, page_idx 124 drivers/infiniband/hw/mlx5/cmd.c if (page_idx < num_memic_hw_pages) page_idx 126 drivers/infiniband/hw/mlx5/cmd.c page_idx, num_pages); page_idx 130 drivers/infiniband/hw/mlx5/cmd.c if (page_idx >= num_memic_hw_pages) page_idx 134 drivers/infiniband/hw/mlx5/cmd.c hw_start_addr + (page_idx * PAGE_SIZE)); page_idx 140 drivers/infiniband/hw/mlx5/cmd.c page_idx, num_pages); page_idx 144 drivers/infiniband/hw/mlx5/cmd.c page_idx++; page_idx 2196 drivers/infiniband/hw/mlx5/main.c u16 page_idx = get_extended_index(vma->vm_pgoff); page_idx 2201 drivers/infiniband/hw/mlx5/main.c if (find_next_zero_bit(mctx->dm_pages, page_idx + npages, page_idx) != page_idx 2202 drivers/infiniband/hw/mlx5/main.c page_idx + npages) page_idx 2208 drivers/infiniband/hw/mlx5/main.c page_idx; page_idx 2292 drivers/infiniband/hw/mlx5/main.c u32 page_idx; page_idx 2302 drivers/infiniband/hw/mlx5/main.c page_idx = (dm->dev_addr - pci_resource_start(dm_db->dev->pdev, 0) - page_idx 2308 drivers/infiniband/hw/mlx5/main.c &page_idx, sizeof(page_idx)); page_idx 2319 drivers/infiniband/hw/mlx5/main.c bitmap_set(to_mucontext(ctx)->dm_pages, page_idx, page_idx 2429 drivers/infiniband/hw/mlx5/main.c u32 page_idx; page_idx 2438 drivers/infiniband/hw/mlx5/main.c page_idx = (dm->dev_addr - pci_resource_start(dev->pdev, 0) - page_idx 2441 drivers/infiniband/hw/mlx5/main.c bitmap_clear(ctx->dm_pages, page_idx, page_idx 77 drivers/infiniband/hw/mthca/mthca_memfree.h int page_idx; page_idx 106 drivers/infiniband/hw/mthca/mthca_memfree.h iter->page_idx = 0; page_idx 116 drivers/infiniband/hw/mthca/mthca_memfree.h if (++iter->page_idx >= iter->chunk->nsg) { page_idx 124 drivers/infiniband/hw/mthca/mthca_memfree.h iter->page_idx = 0; page_idx 130 drivers/infiniband/hw/mthca/mthca_memfree.h return sg_dma_address(&iter->chunk->mem[iter->page_idx]); page_idx 135 drivers/infiniband/hw/mthca/mthca_memfree.h return sg_dma_len(&iter->chunk->mem[iter->page_idx]); page_idx 65 drivers/infiniband/sw/siw/siw_mem.h unsigned int page_idx = (addr - umem->fp_addr) >> PAGE_SHIFT, page_idx 66 drivers/infiniband/sw/siw/siw_mem.h chunk_idx = page_idx >> CHUNK_SHIFT, page_idx 67 drivers/infiniband/sw/siw/siw_mem.h page_in_chunk = page_idx & ~CHUNK_MASK; page_idx 69 drivers/infiniband/sw/siw/siw_mem.h if (likely(page_idx < umem->num_pages)) page_idx 574 drivers/input/touchscreen/raydium_i2c_ts.c u16 page_idx, const void *data, size_t len) page_idx 585 drivers/input/touchscreen/raydium_i2c_ts.c buf[BL_PAGE_STR] = page_idx ? 0xff : 0; page_idx 599 drivers/input/touchscreen/raydium_i2c_ts.c page_idx, i, error); page_idx 2647 drivers/md/raid1.c int page_idx = 0; page_idx 2878 drivers/md/raid1.c page = resync_fetch_page(rp, page_idx); page_idx 2890 drivers/md/raid1.c } while (++page_idx < RESYNC_PAGES); page_idx 2914 drivers/md/raid10.c int page_idx = 0; page_idx 3430 drivers/md/raid10.c page = resync_fetch_page(rp, page_idx); page_idx 3439 drivers/md/raid10.c } while (++page_idx < RESYNC_PAGES); page_idx 179 drivers/mmc/host/usdhi6rol0.c int page_idx; /* page index within an SG segment */ page_idx 418 drivers/mmc/host/usdhi6rol0.c (host->page_idx << PAGE_SHIFT) + data->blksz - blk_head) page_idx 442 drivers/mmc/host/usdhi6rol0.c host->page_idx++; page_idx 452 drivers/mmc/host/usdhi6rol0.c host->page_idx++; page_idx 462 drivers/mmc/host/usdhi6rol0.c done = (host->page_idx << PAGE_SHIFT) + host->offset; page_idx 488 drivers/mmc/host/usdhi6rol0.c host->page_idx = 0; page_idx 505 drivers/mmc/host/usdhi6rol0.c host->pg.page = nth_page(sg_page(host->sg), host->page_idx); page_idx 1006 drivers/mmc/host/usdhi6rol0.c host->page_idx = 0; page_idx 1708 drivers/mmc/host/usdhi6rol0.c data->flags & MMC_DATA_READ ? 'R' : 'W', host->page_idx, page_idx 45 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c ((void *)((wqs)->page_vaddr[(wq)->page_idx]) \ page_idx 49 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c ((wqs)->page_paddr[(wq)->page_idx] \ page_idx 53 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c ((void *)((wqs)->shadow_page_vaddr[(wq)->page_idx]) \ page_idx 136 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c static int wqs_allocate_page(struct hinic_wqs *wqs, int page_idx) page_idx 138 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c return queue_alloc_page(wqs->hwif, &wqs->page_vaddr[page_idx], page_idx 139 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c &wqs->page_paddr[page_idx], page_idx 140 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c &wqs->shadow_page_vaddr[page_idx], page_idx 149 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c static void wqs_free_page(struct hinic_wqs *wqs, int page_idx) page_idx 155 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c wqs->page_vaddr[page_idx], page_idx 156 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c (dma_addr_t)wqs->page_paddr[page_idx]); page_idx 157 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c vfree(wqs->shadow_page_vaddr[page_idx]); page_idx 232 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c static int wqs_next_block(struct hinic_wqs *wqs, int *page_idx, page_idx 250 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c *page_idx = wqs->free_blocks[pos].page_idx; page_idx 253 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c wqs->free_blocks[pos].page_idx = -1; page_idx 260 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c static void wqs_return_block(struct hinic_wqs *wqs, int page_idx, page_idx 270 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c wqs->free_blocks[pos].page_idx = page_idx; page_idx 280 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c int page_idx, blk_idx, pos = 0; page_idx 282 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c for (page_idx = 0; page_idx < wqs->num_pages; page_idx++) { page_idx 284 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c wqs->free_blocks[pos].page_idx = page_idx; page_idx 309 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c int err, i, page_idx; page_idx 326 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c for (page_idx = 0; page_idx < wqs->num_pages; page_idx++) { page_idx 327 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c err = wqs_allocate_page(wqs, page_idx); page_idx 346 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c for (i = 0; i < page_idx; i++) page_idx 361 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c int page_idx; page_idx 365 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c for (page_idx = 0; page_idx < wqs->num_pages; page_idx++) page_idx 366 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c wqs_free_page(wqs, page_idx); page_idx 541 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c err = wqs_next_block(wqs, &wq->page_idx, &wq->block_idx); page_idx 572 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c wqs_return_block(wqs, wq->page_idx, wq->block_idx); page_idx 585 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c wqs_return_block(wqs, wq->page_idx, wq->block_idx); page_idx 647 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c wq[i].page_idx = 0; page_idx 18 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.h int page_idx; page_idx 25 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.h int page_idx; page_idx 75 drivers/net/ethernet/mellanox/mlx4/icm.h int page_idx; page_idx 103 drivers/net/ethernet/mellanox/mlx4/icm.h iter->page_idx = 0; page_idx 113 drivers/net/ethernet/mellanox/mlx4/icm.h if (++iter->page_idx >= iter->chunk->nsg) { page_idx 121 drivers/net/ethernet/mellanox/mlx4/icm.h iter->page_idx = 0; page_idx 128 drivers/net/ethernet/mellanox/mlx4/icm.h return iter->chunk->buf[iter->page_idx].dma_addr; page_idx 130 drivers/net/ethernet/mellanox/mlx4/icm.h return sg_dma_address(&iter->chunk->sg[iter->page_idx]); page_idx 136 drivers/net/ethernet/mellanox/mlx4/icm.h return iter->chunk->buf[iter->page_idx].size; page_idx 138 drivers/net/ethernet/mellanox/mlx4/icm.h return sg_dma_len(&iter->chunk->sg[iter->page_idx]); page_idx 958 drivers/net/ethernet/mellanox/mlx5/core/en.h u16 cqe_bcnt, u32 head_offset, u32 page_idx); page_idx 961 drivers/net/ethernet/mellanox/mlx5/core/en.h u16 cqe_bcnt, u32 head_offset, u32 page_idx); page_idx 91 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/rx.c u32 page_idx) page_idx 93 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/rx.c struct mlx5e_dma_info *di = &wi->umr.dma_info[page_idx]; page_idx 141 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/rx.c __set_bit(page_idx, wi->xdp_xmit_bitmap); /* non-atomic */ page_idx 22 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/rx.h u32 page_idx); page_idx 1238 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c u16 cqe_bcnt, u32 head_offset, u32 page_idx) page_idx 1241 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c struct mlx5e_dma_info *di = &wi->umr.dma_info[page_idx]; page_idx 1284 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c u16 cqe_bcnt, u32 head_offset, u32 page_idx) page_idx 1286 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c struct mlx5e_dma_info *di = &wi->umr.dma_info[page_idx]; page_idx 1314 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c __set_bit(page_idx, wi->xdp_xmit_bitmap); /* non-atomic */ page_idx 1336 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c u32 page_idx = wqe_offset >> PAGE_SHIFT; page_idx 1363 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c rq, wi, cqe_bcnt, head_offset, page_idx); page_idx 15036 drivers/scsi/lpfc/lpfc_sli.c int cnt, idx, numcq, page_idx = 0; page_idx 15213 drivers/scsi/lpfc/lpfc_sli.c cnt = page_idx + dmabuf->buffer_tag; page_idx 15220 drivers/scsi/lpfc/lpfc_sli.c page_idx += rc; page_idx 16055 drivers/scsi/lpfc/lpfc_sli.c int cnt, idx, numrq, page_idx = 0; page_idx 16141 drivers/scsi/lpfc/lpfc_sli.c cnt = page_idx + dmabuf->buffer_tag; page_idx 16148 drivers/scsi/lpfc/lpfc_sli.c page_idx += rc; page_idx 16153 drivers/scsi/lpfc/lpfc_sli.c cnt = page_idx + dmabuf->buffer_tag; page_idx 16160 drivers/scsi/lpfc/lpfc_sli.c page_idx += rc; page_idx 392 fs/mpage.c unsigned page_idx; page_idx 394 fs/mpage.c for (page_idx = 0; page_idx < nr_pages; page_idx++) { page_idx 403 fs/mpage.c args.nr_pages = nr_pages - page_idx; page_idx 724 fs/ubifs/file.c int err, page_idx, page_cnt, ret = 0, n = 0; page_idx 783 fs/ubifs/file.c for (page_idx = 1; page_idx < page_cnt; page_idx++) { page_idx 784 fs/ubifs/file.c pgoff_t page_offset = offset + page_idx; page_idx 802 fs/ubifs/file.c ui->last_page_read = offset + page_idx - 1; page_idx 13 mm/percpu-vm.c unsigned int cpu, int page_idx) page_idx 18 mm/percpu-vm.c return vmalloc_to_page((void *)pcpu_chunk_addr(chunk, cpu, page_idx)); page_idx 256 mm/percpu.c static int __maybe_unused pcpu_page_idx(unsigned int cpu, int page_idx) page_idx 258 mm/percpu.c return pcpu_unit_map[cpu] * pcpu_unit_pages + page_idx; page_idx 261 mm/percpu.c static unsigned long pcpu_unit_page_offset(unsigned int cpu, int page_idx) page_idx 263 mm/percpu.c return pcpu_unit_offsets[cpu] + (page_idx << PAGE_SHIFT); page_idx 267 mm/percpu.c unsigned int cpu, int page_idx) page_idx 270 mm/percpu.c pcpu_unit_page_offset(cpu, page_idx); page_idx 120 mm/readahead.c unsigned page_idx; page_idx 132 mm/readahead.c for (page_idx = 0; page_idx < nr_pages; page_idx++) { page_idx 163 mm/readahead.c int page_idx; page_idx 176 mm/readahead.c for (page_idx = 0; page_idx < nr_to_read; page_idx++) { page_idx 177 mm/readahead.c pgoff_t page_offset = offset + page_idx; page_idx 201 mm/readahead.c if (page_idx == nr_to_read - lookahead_size) page_idx 301 sound/soc/amd/acp-pcm-dma.c u16 page_idx; page_idx 307 sound/soc/amd/acp-pcm-dma.c for (page_idx = 0; page_idx < (num_of_pages); page_idx++) { page_idx 309 sound/soc/amd/acp-pcm-dma.c acp_reg_write((offset + (page_idx * 8)), page_idx 318 sound/soc/amd/acp-pcm-dma.c acp_reg_write((offset + (page_idx * 8) + 4), page_idx 213 sound/soc/amd/raven/acp3x-pcm-dma.c u16 page_idx; page_idx 229 sound/soc/amd/raven/acp3x-pcm-dma.c for (page_idx = 0; page_idx < rtd->num_pages; page_idx++) {