Lines Matching refs:va_end
302 else if (addr >= va->va_end) in __find_vmap_area()
322 if (va->va_start < tmp_va->va_end) in __insert_vmap_area()
324 else if (va->va_end > tmp_va->va_start) in __insert_vmap_area()
401 addr = ALIGN(first->va_end, align); in alloc_vmap_area()
418 if (tmp->va_end >= addr) { in alloc_vmap_area()
435 addr = ALIGN(first->va_end, align); in alloc_vmap_area()
451 va->va_end = addr + size; in alloc_vmap_area()
459 BUG_ON(va->va_end > vend); in alloc_vmap_area()
482 if (va->va_end < cached_vstart) { in __free_vmap_area()
506 if (va->va_end > VMALLOC_START && va->va_end <= VMALLOC_END) in __free_vmap_area()
507 vmap_area_pcpu_hole = max(vmap_area_pcpu_hole, va->va_end); in __free_vmap_area()
527 vunmap_page_range(va->va_start, va->va_end); in unmap_vmap_area()
628 if (va->va_end > *end) in __purge_vmap_area_lazy()
629 *end = va->va_end; in __purge_vmap_area_lazy()
630 nr += (va->va_end - va->va_start) >> PAGE_SHIFT; in __purge_vmap_area_lazy()
682 atomic_add((va->va_end - va->va_start) >> PAGE_SHIFT, &vmap_lazy_nr); in free_vmap_area_noflush()
702 flush_cache_vunmap(va->va_start, va->va_end); in free_unmap_vmap_area()
1219 va->va_end = va->va_start + tmp->size; in vmalloc_init()
1310 vm->size = va->va_end - va->va_start; in setup_vmalloc_vm()
1441 vmap_debug_free_range(va->va_start, va->va_end); in remove_vm_area()
2301 if (end < va->va_end) in pvm_find_next_prev()
2303 else if (end > va->va_end) in pvm_find_next_prev()
2312 if (va->va_end > end) { in pvm_find_next_prev()
2350 while (*pprev && (*pprev)->va_end > addr) { in pvm_determine_end()
2452 BUG_ON(next && next->va_end <= base + end); in pcpu_get_vm_areas()
2453 BUG_ON(prev && prev->va_end > base + end); in pcpu_get_vm_areas()
2484 if (prev && prev->va_end > base + start) { in pcpu_get_vm_areas()
2509 va->va_end = va->va_start + sizes[area]; in pcpu_get_vm_areas()
2723 vmi->used += (va->va_end - va->va_start); in get_vmalloc_info()
2729 prev_end = va->va_end; in get_vmalloc_info()