Lines Matching refs:npages
139 int npages = nvkm_memory_size(memory) >> 12; in gk20a_instobj_cpu_map_dma() local
140 struct page *pages[npages]; in gk20a_instobj_cpu_map_dma()
146 for (i = 1; i < npages; i++) in gk20a_instobj_cpu_map_dma()
149 return vmap(pages, npages, VM_MAP, pgprot_writecombine(PAGE_KERNEL)); in gk20a_instobj_cpu_map_dma()
160 int npages = nvkm_memory_size(memory) >> 12; in gk20a_instobj_cpu_map_iommu() local
162 return vmap(node->pages, npages, VM_MAP, in gk20a_instobj_cpu_map_iommu()
390 gk20a_instobj_ctor_dma(struct gk20a_instmem *imem, u32 npages, u32 align, in gk20a_instobj_ctor_dma() argument
403 node->cpuaddr = dma_alloc_attrs(dev, npages << PAGE_SHIFT, in gk20a_instobj_ctor_dma()
420 node->r.length = (npages << PAGE_SHIFT) >> 12; in gk20a_instobj_ctor_dma()
431 gk20a_instobj_ctor_iommu(struct gk20a_instmem *imem, u32 npages, u32 align, in gk20a_instobj_ctor_iommu() argument
446 sizeof(*node->dma_addrs)) * npages), GFP_KERNEL))) in gk20a_instobj_ctor_iommu()
449 node->dma_addrs = (void *)(node->pages + npages); in gk20a_instobj_ctor_iommu()
454 for (i = 0; i < npages; i++) { in gk20a_instobj_ctor_iommu()
474 ret = nvkm_mm_head(imem->mm, 0, 1, npages, npages, in gk20a_instobj_ctor_iommu()
483 for (i = 0; i < npages; i++) { in gk20a_instobj_ctor_iommu()
515 for (i = 0; i < npages && node->pages[i] != NULL; i++) { in gk20a_instobj_ctor_iommu()