Lines Matching refs:va_end

304 		else if (addr >= va->va_end)  in __find_vmap_area()
324 if (va->va_start < tmp_va->va_end) in __insert_vmap_area()
326 else if (va->va_end > tmp_va->va_start) in __insert_vmap_area()
403 addr = ALIGN(first->va_end, align); in alloc_vmap_area()
420 if (tmp->va_end >= addr) { in alloc_vmap_area()
437 addr = ALIGN(first->va_end, align); in alloc_vmap_area()
453 va->va_end = addr + size; in alloc_vmap_area()
461 BUG_ON(va->va_end > vend); in alloc_vmap_area()
484 if (va->va_end < cached_vstart) { in __free_vmap_area()
508 if (va->va_end > VMALLOC_START && va->va_end <= VMALLOC_END) in __free_vmap_area()
509 vmap_area_pcpu_hole = max(vmap_area_pcpu_hole, va->va_end); in __free_vmap_area()
529 vunmap_page_range(va->va_start, va->va_end); in unmap_vmap_area()
630 if (va->va_end > *end) in __purge_vmap_area_lazy()
631 *end = va->va_end; in __purge_vmap_area_lazy()
632 nr += (va->va_end - va->va_start) >> PAGE_SHIFT; in __purge_vmap_area_lazy()
684 atomic_add((va->va_end - va->va_start) >> PAGE_SHIFT, &vmap_lazy_nr); in free_vmap_area_noflush()
704 flush_cache_vunmap(va->va_start, va->va_end); in free_unmap_vmap_area()
1221 va->va_end = va->va_start + tmp->size; in vmalloc_init()
1312 vm->size = va->va_end - va->va_start; in setup_vmalloc_vm()
1443 vmap_debug_free_range(va->va_start, va->va_end); in remove_vm_area()
2302 if (end < va->va_end) in pvm_find_next_prev()
2304 else if (end > va->va_end) in pvm_find_next_prev()
2313 if (va->va_end > end) { in pvm_find_next_prev()
2351 while (*pprev && (*pprev)->va_end > addr) { in pvm_determine_end()
2453 BUG_ON(next && next->va_end <= base + end); in pcpu_get_vm_areas()
2454 BUG_ON(prev && prev->va_end > base + end); in pcpu_get_vm_areas()
2485 if (prev && prev->va_end > base + start) { in pcpu_get_vm_areas()
2510 va->va_end = va->va_start + sizes[area]; in pcpu_get_vm_areas()