Lines Matching refs:size

74 static inline phys_addr_t memblock_cap_size(phys_addr_t base, phys_addr_t *size)  in memblock_cap_size()  argument
76 return *size = min(*size, (phys_addr_t)ULLONG_MAX - base); in memblock_cap_size()
89 phys_addr_t base, phys_addr_t size) in memblock_overlaps_region() argument
95 phys_addr_t rgnsize = type->regions[i].size; in memblock_overlaps_region()
96 if (memblock_addrs_overlap(base, size, rgnbase, rgnsize)) in memblock_overlaps_region()
118 phys_addr_t size, phys_addr_t align, int nid) in __memblock_find_range_bottom_up() argument
128 if (cand < this_end && this_end - cand >= size) in __memblock_find_range_bottom_up()
150 phys_addr_t size, phys_addr_t align, int nid) in __memblock_find_range_top_down() argument
159 if (this_end < size) in __memblock_find_range_top_down()
162 cand = round_down(this_end - size, align); in __memblock_find_range_top_down()
191 phys_addr_t __init_memblock memblock_find_in_range_node(phys_addr_t size, in memblock_find_in_range_node() argument
218 size, align, nid); in memblock_find_in_range_node()
236 return __memblock_find_range_top_down(start, end, size, align, nid); in memblock_find_in_range_node()
252 phys_addr_t end, phys_addr_t size, in memblock_find_in_range() argument
255 return memblock_find_in_range_node(size, align, start, end, in memblock_find_in_range()
261 type->total_size -= type->regions[r].size; in memblock_remove_region()
271 type->regions[0].size = 0; in memblock_remove_region()
437 if (this->base + this->size != next->base || in memblock_merge_regions()
441 BUG_ON(this->base + this->size > next->base); in memblock_merge_regions()
446 this->size += next->size; in memblock_merge_regions()
467 phys_addr_t size, in memblock_insert_region() argument
475 rgn->size = size; in memblock_insert_region()
479 type->total_size += size; in memblock_insert_region()
499 phys_addr_t base, phys_addr_t size, in memblock_add_range() argument
504 phys_addr_t end = base + memblock_cap_size(base, &size); in memblock_add_range()
507 if (!size) in memblock_add_range()
511 if (type->regions[0].size == 0) { in memblock_add_range()
514 type->regions[0].size = size; in memblock_add_range()
517 type->total_size = size; in memblock_add_range()
532 phys_addr_t rend = rbase + rgn->size; in memblock_add_range()
567 if (memblock_double_array(type, obase, size) < 0) in memblock_add_range()
577 int __init_memblock memblock_add_node(phys_addr_t base, phys_addr_t size, in memblock_add_node() argument
580 return memblock_add_range(&memblock.memory, base, size, nid, 0); in memblock_add_node()
584 phys_addr_t size, in memblock_add_region() argument
592 (unsigned long long)base + size - 1, in memblock_add_region()
595 return memblock_add_range(_rgn, base, size, nid, flags); in memblock_add_region()
598 int __init_memblock memblock_add(phys_addr_t base, phys_addr_t size) in memblock_add() argument
600 return memblock_add_region(base, size, MAX_NUMNODES, 0); in memblock_add()
620 phys_addr_t base, phys_addr_t size, in memblock_isolate_range() argument
623 phys_addr_t end = base + memblock_cap_size(base, &size); in memblock_isolate_range()
628 if (!size) in memblock_isolate_range()
633 if (memblock_double_array(type, base, size) < 0) in memblock_isolate_range()
639 phys_addr_t rend = rbase + rgn->size; in memblock_isolate_range()
652 rgn->size -= base - rbase; in memblock_isolate_range()
663 rgn->size -= end - rbase; in memblock_isolate_range()
680 phys_addr_t base, phys_addr_t size) in memblock_remove_range() argument
685 ret = memblock_isolate_range(type, base, size, &start_rgn, &end_rgn); in memblock_remove_range()
694 int __init_memblock memblock_remove(phys_addr_t base, phys_addr_t size) in memblock_remove() argument
696 return memblock_remove_range(&memblock.memory, base, size); in memblock_remove()
700 int __init_memblock memblock_free(phys_addr_t base, phys_addr_t size) in memblock_free() argument
704 (unsigned long long)base + size - 1, in memblock_free()
707 kmemleak_free_part(__va(base), size); in memblock_free()
708 return memblock_remove_range(&memblock.reserved, base, size); in memblock_free()
712 phys_addr_t size, in memblock_reserve_region() argument
720 (unsigned long long)base + size - 1, in memblock_reserve_region()
723 return memblock_add_range(type, base, size, nid, flags); in memblock_reserve_region()
726 int __init_memblock memblock_reserve(phys_addr_t base, phys_addr_t size) in memblock_reserve() argument
728 return memblock_reserve_region(base, size, MAX_NUMNODES, 0); in memblock_reserve()
738 phys_addr_t size, int set, int flag) in memblock_setclr_flag() argument
743 ret = memblock_isolate_range(type, base, size, &start_rgn, &end_rgn); in memblock_setclr_flag()
764 int __init_memblock memblock_mark_hotplug(phys_addr_t base, phys_addr_t size) in memblock_mark_hotplug() argument
766 return memblock_setclr_flag(base, size, 1, MEMBLOCK_HOTPLUG); in memblock_mark_hotplug()
776 int __init_memblock memblock_clear_hotplug(phys_addr_t base, phys_addr_t size) in memblock_clear_hotplug() argument
778 return memblock_setclr_flag(base, size, 0, MEMBLOCK_HOTPLUG); in memblock_clear_hotplug()
823 phys_addr_t m_end = m->base + m->size; in __next_mem_range()
853 r_start = idx_b ? r[-1].base + r[-1].size : 0; in __next_mem_range()
927 phys_addr_t m_end = m->base + m->size; in __next_mem_range_rev()
957 r_start = idx_b ? r[-1].base + r[-1].size : 0; in __next_mem_range_rev()
1002 if (PFN_UP(r->base) >= PFN_DOWN(r->base + r->size)) in __next_mem_pfn_range()
1015 *out_end_pfn = PFN_DOWN(r->base + r->size); in __next_mem_pfn_range()
1033 int __init_memblock memblock_set_node(phys_addr_t base, phys_addr_t size, in memblock_set_node() argument
1039 ret = memblock_isolate_range(type, base, size, &start_rgn, &end_rgn); in memblock_set_node()
1051 static phys_addr_t __init memblock_alloc_range_nid(phys_addr_t size, in memblock_alloc_range_nid() argument
1060 found = memblock_find_in_range_node(size, align, start, end, nid); in memblock_alloc_range_nid()
1061 if (found && !memblock_reserve(found, size)) { in memblock_alloc_range_nid()
1066 kmemleak_alloc(__va(found), size, 0, 0); in memblock_alloc_range_nid()
1072 phys_addr_t __init memblock_alloc_range(phys_addr_t size, phys_addr_t align, in memblock_alloc_range() argument
1075 return memblock_alloc_range_nid(size, align, start, end, NUMA_NO_NODE); in memblock_alloc_range()
1078 static phys_addr_t __init memblock_alloc_base_nid(phys_addr_t size, in memblock_alloc_base_nid() argument
1082 return memblock_alloc_range_nid(size, align, 0, max_addr, nid); in memblock_alloc_base_nid()
1085 phys_addr_t __init memblock_alloc_nid(phys_addr_t size, phys_addr_t align, int nid) in memblock_alloc_nid() argument
1087 return memblock_alloc_base_nid(size, align, MEMBLOCK_ALLOC_ACCESSIBLE, nid); in memblock_alloc_nid()
1090 phys_addr_t __init __memblock_alloc_base(phys_addr_t size, phys_addr_t align, phys_addr_t max_addr) in __memblock_alloc_base() argument
1092 return memblock_alloc_base_nid(size, align, max_addr, NUMA_NO_NODE); in __memblock_alloc_base()
1095 phys_addr_t __init memblock_alloc_base(phys_addr_t size, phys_addr_t align, phys_addr_t max_addr) in memblock_alloc_base() argument
1099 alloc = __memblock_alloc_base(size, align, max_addr); in memblock_alloc_base()
1103 (unsigned long long) size, (unsigned long long) max_addr); in memblock_alloc_base()
1108 phys_addr_t __init memblock_alloc(phys_addr_t size, phys_addr_t align) in memblock_alloc() argument
1110 return memblock_alloc_base(size, align, MEMBLOCK_ALLOC_ACCESSIBLE); in memblock_alloc()
1113 phys_addr_t __init memblock_alloc_try_nid(phys_addr_t size, phys_addr_t align, int nid) in memblock_alloc_try_nid() argument
1115 phys_addr_t res = memblock_alloc_nid(size, align, nid); in memblock_alloc_try_nid()
1119 return memblock_alloc_base(size, align, MEMBLOCK_ALLOC_ACCESSIBLE); in memblock_alloc_try_nid()
1150 phys_addr_t size, phys_addr_t align, in memblock_virt_alloc_internal() argument
1166 return kzalloc_node(size, GFP_NOWAIT, nid); in memblock_virt_alloc_internal()
1175 alloc = memblock_find_in_range_node(size, align, min_addr, max_addr, in memblock_virt_alloc_internal()
1181 alloc = memblock_find_in_range_node(size, align, min_addr, in memblock_virt_alloc_internal()
1195 memblock_reserve(alloc, size); in memblock_virt_alloc_internal()
1197 memset(ptr, 0, size); in memblock_virt_alloc_internal()
1205 kmemleak_alloc(ptr, size, 0, 0); in memblock_virt_alloc_internal()
1231 phys_addr_t size, phys_addr_t align, in memblock_virt_alloc_try_nid_nopanic() argument
1236 __func__, (u64)size, (u64)align, nid, (u64)min_addr, in memblock_virt_alloc_try_nid_nopanic()
1238 return memblock_virt_alloc_internal(size, align, min_addr, in memblock_virt_alloc_try_nid_nopanic()
1261 phys_addr_t size, phys_addr_t align, in memblock_virt_alloc_try_nid() argument
1268 __func__, (u64)size, (u64)align, nid, (u64)min_addr, in memblock_virt_alloc_try_nid()
1270 ptr = memblock_virt_alloc_internal(size, align, in memblock_virt_alloc_try_nid()
1276 __func__, (u64)size, (u64)align, nid, (u64)min_addr, in memblock_virt_alloc_try_nid()
1289 void __init __memblock_free_early(phys_addr_t base, phys_addr_t size) in __memblock_free_early() argument
1292 __func__, (u64)base, (u64)base + size - 1, in __memblock_free_early()
1294 kmemleak_free_part(__va(base), size); in __memblock_free_early()
1295 memblock_remove_range(&memblock.reserved, base, size); in __memblock_free_early()
1307 void __init __memblock_free_late(phys_addr_t base, phys_addr_t size) in __memblock_free_late() argument
1312 __func__, (u64)base, (u64)base + size - 1, in __memblock_free_late()
1314 kmemleak_free_part(__va(base), size); in __memblock_free_late()
1316 end = PFN_DOWN(base + size); in __memblock_free_late()
1360 return (memblock.memory.regions[idx].base + memblock.memory.regions[idx].size); in memblock_end_of_DRAM()
1373 if (limit <= r->size) { in memblock_enforce_memory_limit()
1377 limit -= r->size; in memblock_enforce_memory_limit()
1397 type->regions[mid].size)) in memblock_search()
1426 *end_pfn = PFN_DOWN(type->regions[mid].base + type->regions[mid].size); in memblock_search_pfn_nid()
1442 int __init_memblock memblock_is_region_memory(phys_addr_t base, phys_addr_t size) in memblock_is_region_memory() argument
1445 phys_addr_t end = base + memblock_cap_size(base, &size); in memblock_is_region_memory()
1451 memblock.memory.regions[idx].size) >= end; in memblock_is_region_memory()
1464 int __init_memblock memblock_is_region_reserved(phys_addr_t base, phys_addr_t size) in memblock_is_region_reserved() argument
1466 memblock_cap_size(base, &size); in memblock_is_region_reserved()
1467 return memblock_overlaps_region(&memblock.reserved, base, size) >= 0; in memblock_is_region_reserved()
1477 orig_end = r->base + r->size; in memblock_trim_memory()
1486 r->size = end - start; in memblock_trim_memory()
1507 unsigned long long base, size; in memblock_dump() local
1518 size = rgn->size; in memblock_dump()
1526 name, i, base, base + size - 1, size, nid_buf, flags); in memblock_dump()
1568 (unsigned long)(reg->base + reg->size - 1)); in memblock_debug_show()
1572 (unsigned long long)(reg->base + reg->size - 1)); in memblock_debug_show()