Lines Matching refs:vma

579 static int uio_find_mem_index(struct vm_area_struct *vma)  in uio_find_mem_index()  argument
581 struct uio_device *idev = vma->vm_private_data; in uio_find_mem_index()
583 if (vma->vm_pgoff < MAX_UIO_MAPS) { in uio_find_mem_index()
584 if (idev->info->mem[vma->vm_pgoff].size == 0) in uio_find_mem_index()
586 return (int)vma->vm_pgoff; in uio_find_mem_index()
591 static int uio_vma_fault(struct vm_area_struct *vma, struct vm_fault *vmf) in uio_vma_fault() argument
593 struct uio_device *idev = vma->vm_private_data; in uio_vma_fault()
598 int mi = uio_find_mem_index(vma); in uio_vma_fault()
622 static int uio_mmap_logical(struct vm_area_struct *vma) in uio_mmap_logical() argument
624 vma->vm_flags |= VM_DONTEXPAND | VM_DONTDUMP; in uio_mmap_logical()
625 vma->vm_ops = &uio_logical_vm_ops; in uio_mmap_logical()
635 static int uio_mmap_physical(struct vm_area_struct *vma) in uio_mmap_physical() argument
637 struct uio_device *idev = vma->vm_private_data; in uio_mmap_physical()
638 int mi = uio_find_mem_index(vma); in uio_mmap_physical()
646 if (vma->vm_end - vma->vm_start > mem->size) in uio_mmap_physical()
649 vma->vm_ops = &uio_physical_vm_ops; in uio_mmap_physical()
650 vma->vm_page_prot = pgprot_noncached(vma->vm_page_prot); in uio_mmap_physical()
661 return remap_pfn_range(vma, in uio_mmap_physical()
662 vma->vm_start, in uio_mmap_physical()
664 vma->vm_end - vma->vm_start, in uio_mmap_physical()
665 vma->vm_page_prot); in uio_mmap_physical()
668 static int uio_mmap(struct file *filep, struct vm_area_struct *vma) in uio_mmap() argument
676 if (vma->vm_end < vma->vm_start) in uio_mmap()
679 vma->vm_private_data = idev; in uio_mmap()
681 mi = uio_find_mem_index(vma); in uio_mmap()
685 requested_pages = vma_pages(vma); in uio_mmap()
692 ret = idev->info->mmap(idev->info, vma); in uio_mmap()
698 return uio_mmap_physical(vma); in uio_mmap()
701 return uio_mmap_logical(vma); in uio_mmap()