Lines Matching refs:ai
824 static int __init pcpu_verify_alloc_info(const struct pcpu_alloc_info *ai);
1378 struct pcpu_alloc_info *ai; in pcpu_alloc_alloc_info() local
1383 base_size = ALIGN(sizeof(*ai) + nr_groups * sizeof(ai->groups[0]), in pcpu_alloc_alloc_info()
1384 __alignof__(ai->groups[0].cpu_map[0])); in pcpu_alloc_alloc_info()
1385 ai_size = base_size + nr_units * sizeof(ai->groups[0].cpu_map[0]); in pcpu_alloc_alloc_info()
1390 ai = ptr; in pcpu_alloc_alloc_info()
1393 ai->groups[0].cpu_map = ptr; in pcpu_alloc_alloc_info()
1396 ai->groups[0].cpu_map[unit] = NR_CPUS; in pcpu_alloc_alloc_info()
1398 ai->nr_groups = nr_groups; in pcpu_alloc_alloc_info()
1399 ai->__ai_size = PFN_ALIGN(ai_size); in pcpu_alloc_alloc_info()
1401 return ai; in pcpu_alloc_alloc_info()
1410 void __init pcpu_free_alloc_info(struct pcpu_alloc_info *ai) in pcpu_free_alloc_info() argument
1412 memblock_free_early(__pa(ai), ai->__ai_size); in pcpu_free_alloc_info()
1423 const struct pcpu_alloc_info *ai) in pcpu_dump_alloc_info() argument
1431 v = ai->nr_groups; in pcpu_dump_alloc_info()
1440 upa = ai->alloc_size / ai->unit_size; in pcpu_dump_alloc_info()
1445 lvl, ai->static_size, ai->reserved_size, ai->dyn_size, in pcpu_dump_alloc_info()
1446 ai->unit_size, ai->alloc_size / ai->atom_size, ai->atom_size); in pcpu_dump_alloc_info()
1448 for (group = 0; group < ai->nr_groups; group++) { in pcpu_dump_alloc_info()
1449 const struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_dump_alloc_info()
1528 int __init pcpu_setup_first_chunk(const struct pcpu_alloc_info *ai, in pcpu_setup_first_chunk() argument
1533 size_t dyn_size = ai->dyn_size; in pcpu_setup_first_chunk()
1534 size_t size_sum = ai->static_size + ai->reserved_size + dyn_size; in pcpu_setup_first_chunk()
1548 pcpu_dump_alloc_info(KERN_EMERG, ai); \ in pcpu_setup_first_chunk()
1554 PCPU_SETUP_BUG_ON(ai->nr_groups <= 0); in pcpu_setup_first_chunk()
1556 PCPU_SETUP_BUG_ON(!ai->static_size); in pcpu_setup_first_chunk()
1561 PCPU_SETUP_BUG_ON(ai->unit_size < size_sum); in pcpu_setup_first_chunk()
1562 PCPU_SETUP_BUG_ON(ai->unit_size & ~PAGE_MASK); in pcpu_setup_first_chunk()
1563 PCPU_SETUP_BUG_ON(ai->unit_size < PCPU_MIN_UNIT_SIZE); in pcpu_setup_first_chunk()
1564 PCPU_SETUP_BUG_ON(ai->dyn_size < PERCPU_DYNAMIC_EARLY_SIZE); in pcpu_setup_first_chunk()
1565 PCPU_SETUP_BUG_ON(pcpu_verify_alloc_info(ai) < 0); in pcpu_setup_first_chunk()
1568 group_offsets = memblock_virt_alloc(ai->nr_groups * in pcpu_setup_first_chunk()
1570 group_sizes = memblock_virt_alloc(ai->nr_groups * in pcpu_setup_first_chunk()
1581 for (group = 0, unit = 0; group < ai->nr_groups; group++, unit += i) { in pcpu_setup_first_chunk()
1582 const struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_setup_first_chunk()
1585 group_sizes[group] = gi->nr_units * ai->unit_size; in pcpu_setup_first_chunk()
1597 unit_off[cpu] = gi->base_offset + i * ai->unit_size; in pcpu_setup_first_chunk()
1615 pcpu_dump_alloc_info(KERN_DEBUG, ai); in pcpu_setup_first_chunk()
1617 pcpu_nr_groups = ai->nr_groups; in pcpu_setup_first_chunk()
1624 pcpu_unit_pages = ai->unit_size >> PAGE_SHIFT; in pcpu_setup_first_chunk()
1626 pcpu_atom_size = ai->atom_size; in pcpu_setup_first_chunk()
1657 if (ai->reserved_size) { in pcpu_setup_first_chunk()
1658 schunk->free_size = ai->reserved_size; in pcpu_setup_first_chunk()
1660 pcpu_reserved_chunk_limit = ai->static_size + ai->reserved_size; in pcpu_setup_first_chunk()
1668 schunk->map[1] = ai->static_size; in pcpu_setup_first_chunk()
1671 schunk->map[++schunk->map_used] = 1 | (ai->static_size + schunk->free_size); in pcpu_setup_first_chunk()
1788 struct pcpu_alloc_info *ai; in pcpu_build_alloc_info() local
1871 ai = pcpu_alloc_alloc_info(nr_groups, nr_units); in pcpu_build_alloc_info()
1872 if (!ai) in pcpu_build_alloc_info()
1874 cpu_map = ai->groups[0].cpu_map; in pcpu_build_alloc_info()
1877 ai->groups[group].cpu_map = cpu_map; in pcpu_build_alloc_info()
1881 ai->static_size = static_size; in pcpu_build_alloc_info()
1882 ai->reserved_size = reserved_size; in pcpu_build_alloc_info()
1883 ai->dyn_size = dyn_size; in pcpu_build_alloc_info()
1884 ai->unit_size = alloc_size / upa; in pcpu_build_alloc_info()
1885 ai->atom_size = atom_size; in pcpu_build_alloc_info()
1886 ai->alloc_size = alloc_size; in pcpu_build_alloc_info()
1889 struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_build_alloc_info()
1896 gi->base_offset = unit * ai->unit_size; in pcpu_build_alloc_info()
1906 return ai; in pcpu_build_alloc_info()
1951 struct pcpu_alloc_info *ai; in pcpu_embed_first_chunk() local
1955 ai = pcpu_build_alloc_info(reserved_size, dyn_size, atom_size, in pcpu_embed_first_chunk()
1957 if (IS_ERR(ai)) in pcpu_embed_first_chunk()
1958 return PTR_ERR(ai); in pcpu_embed_first_chunk()
1960 size_sum = ai->static_size + ai->reserved_size + ai->dyn_size; in pcpu_embed_first_chunk()
1961 areas_size = PFN_ALIGN(ai->nr_groups * sizeof(void *)); in pcpu_embed_first_chunk()
1970 for (group = 0; group < ai->nr_groups; group++) { in pcpu_embed_first_chunk()
1971 struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_embed_first_chunk()
1980 ptr = alloc_fn(cpu, gi->nr_units * ai->unit_size, atom_size); in pcpu_embed_first_chunk()
1997 for (group = 0; group < ai->nr_groups; group++) { in pcpu_embed_first_chunk()
1998 struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_embed_first_chunk()
2001 for (i = 0; i < gi->nr_units; i++, ptr += ai->unit_size) { in pcpu_embed_first_chunk()
2004 free_fn(ptr, ai->unit_size); in pcpu_embed_first_chunk()
2008 memcpy(ptr, __per_cpu_load, ai->static_size); in pcpu_embed_first_chunk()
2009 free_fn(ptr + size_sum, ai->unit_size - size_sum); in pcpu_embed_first_chunk()
2015 for (group = 0; group < ai->nr_groups; group++) { in pcpu_embed_first_chunk()
2016 ai->groups[group].base_offset = areas[group] - base; in pcpu_embed_first_chunk()
2018 ai->groups[group].base_offset); in pcpu_embed_first_chunk()
2020 max_distance += ai->unit_size; in pcpu_embed_first_chunk()
2035 PFN_DOWN(size_sum), base, ai->static_size, ai->reserved_size, in pcpu_embed_first_chunk()
2036 ai->dyn_size, ai->unit_size); in pcpu_embed_first_chunk()
2038 rc = pcpu_setup_first_chunk(ai, base); in pcpu_embed_first_chunk()
2042 for (group = 0; group < ai->nr_groups; group++) in pcpu_embed_first_chunk()
2045 ai->groups[group].nr_units * ai->unit_size); in pcpu_embed_first_chunk()
2047 pcpu_free_alloc_info(ai); in pcpu_embed_first_chunk()
2077 struct pcpu_alloc_info *ai; in pcpu_page_first_chunk() local
2086 ai = pcpu_build_alloc_info(reserved_size, 0, PAGE_SIZE, NULL); in pcpu_page_first_chunk()
2087 if (IS_ERR(ai)) in pcpu_page_first_chunk()
2088 return PTR_ERR(ai); in pcpu_page_first_chunk()
2089 BUG_ON(ai->nr_groups != 1); in pcpu_page_first_chunk()
2090 BUG_ON(ai->groups[0].nr_units != num_possible_cpus()); in pcpu_page_first_chunk()
2092 unit_pages = ai->unit_size >> PAGE_SHIFT; in pcpu_page_first_chunk()
2103 unsigned int cpu = ai->groups[0].cpu_map[unit]; in pcpu_page_first_chunk()
2119 vm.size = num_possible_cpus() * ai->unit_size; in pcpu_page_first_chunk()
2124 (unsigned long)vm.addr + unit * ai->unit_size; in pcpu_page_first_chunk()
2144 memcpy((void *)unit_addr, __per_cpu_load, ai->static_size); in pcpu_page_first_chunk()
2149 unit_pages, psize_str, vm.addr, ai->static_size, in pcpu_page_first_chunk()
2150 ai->reserved_size, ai->dyn_size); in pcpu_page_first_chunk()
2152 rc = pcpu_setup_first_chunk(ai, vm.addr); in pcpu_page_first_chunk()
2161 pcpu_free_alloc_info(ai); in pcpu_page_first_chunk()
2230 struct pcpu_alloc_info *ai; in setup_per_cpu_areas() local
2233 ai = pcpu_alloc_alloc_info(1, 1); in setup_per_cpu_areas()
2237 if (!ai || !fc) in setup_per_cpu_areas()
2242 ai->dyn_size = unit_size; in setup_per_cpu_areas()
2243 ai->unit_size = unit_size; in setup_per_cpu_areas()
2244 ai->atom_size = unit_size; in setup_per_cpu_areas()
2245 ai->alloc_size = unit_size; in setup_per_cpu_areas()
2246 ai->groups[0].nr_units = 1; in setup_per_cpu_areas()
2247 ai->groups[0].cpu_map[0] = 0; in setup_per_cpu_areas()
2249 if (pcpu_setup_first_chunk(ai, fc) < 0) in setup_per_cpu_areas()