Lines Matching full:ai

1583 static int __init pcpu_verify_alloc_info(const struct pcpu_alloc_info *ai);
2392 * Allocate ai which is large enough for @nr_groups groups containing
2393 * @nr_units units. The returned ai's groups[0].cpu_map points to the
2405 struct pcpu_alloc_info *ai; in pcpu_alloc_alloc_info() local
2410 base_size = ALIGN(struct_size(ai, groups, nr_groups), in pcpu_alloc_alloc_info()
2411 __alignof__(ai->groups[0].cpu_map[0])); in pcpu_alloc_alloc_info()
2412 ai_size = base_size + nr_units * sizeof(ai->groups[0].cpu_map[0]); in pcpu_alloc_alloc_info()
2417 ai = ptr; in pcpu_alloc_alloc_info()
2420 ai->groups[0].cpu_map = ptr; in pcpu_alloc_alloc_info()
2423 ai->groups[0].cpu_map[unit] = NR_CPUS; in pcpu_alloc_alloc_info()
2425 ai->nr_groups = nr_groups; in pcpu_alloc_alloc_info()
2426 ai->__ai_size = PFN_ALIGN(ai_size); in pcpu_alloc_alloc_info()
2428 return ai; in pcpu_alloc_alloc_info()
2433 * @ai: pcpu_alloc_info to free
2435 * Free @ai which was allocated by pcpu_alloc_alloc_info().
2437 void __init pcpu_free_alloc_info(struct pcpu_alloc_info *ai) in pcpu_free_alloc_info() argument
2439 memblock_free(ai, ai->__ai_size); in pcpu_free_alloc_info()
2445 * @ai: allocation info to dump
2447 * Print out information about @ai using loglevel @lvl.
2450 const struct pcpu_alloc_info *ai) in pcpu_dump_alloc_info() argument
2458 v = ai->nr_groups; in pcpu_dump_alloc_info()
2467 upa = ai->alloc_size / ai->unit_size; in pcpu_dump_alloc_info()
2472 lvl, ai->static_size, ai->reserved_size, ai->dyn_size, in pcpu_dump_alloc_info()
2473 ai->unit_size, ai->alloc_size / ai->atom_size, ai->atom_size); in pcpu_dump_alloc_info()
2475 for (group = 0; group < ai->nr_groups; group++) { in pcpu_dump_alloc_info()
2476 const struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_dump_alloc_info()
2501 * @ai: pcpu_alloc_info describing how to percpu area is shaped
2508 * @ai contains all information necessary to initialize the first
2511 * @ai->static_size is the size of static percpu area.
2513 * @ai->reserved_size, if non-zero, specifies the amount of bytes to
2521 * @ai->dyn_size determines the number of bytes available for dynamic
2522 * allocation in the first chunk. The area between @ai->static_size +
2523 * @ai->reserved_size + @ai->dyn_size and @ai->unit_size is unused.
2525 * @ai->unit_size specifies unit size and must be aligned to PAGE_SIZE
2526 * and equal to or larger than @ai->static_size + @ai->reserved_size +
2527 * @ai->dyn_size.
2529 * @ai->atom_size is the allocation atom size and used as alignment
2532 * @ai->alloc_size is the allocation size and always multiple of
2533 * @ai->atom_size. This is larger than @ai->atom_size if
2534 * @ai->unit_size is larger than @ai->atom_size.
2536 * @ai->nr_groups and @ai->groups describe virtual memory layout of
2539 * groupings. If @ai->nr_groups is zero, a single group containing
2553 void __init pcpu_setup_first_chunk(const struct pcpu_alloc_info *ai, in pcpu_setup_first_chunk() argument
2556 size_t size_sum = ai->static_size + ai->reserved_size + ai->dyn_size; in pcpu_setup_first_chunk()
2572 pcpu_dump_alloc_info(KERN_EMERG, ai); \ in pcpu_setup_first_chunk()
2578 PCPU_SETUP_BUG_ON(ai->nr_groups <= 0); in pcpu_setup_first_chunk()
2580 PCPU_SETUP_BUG_ON(!ai->static_size); in pcpu_setup_first_chunk()
2585 PCPU_SETUP_BUG_ON(ai->unit_size < size_sum); in pcpu_setup_first_chunk()
2586 PCPU_SETUP_BUG_ON(offset_in_page(ai->unit_size)); in pcpu_setup_first_chunk()
2587 PCPU_SETUP_BUG_ON(ai->unit_size < PCPU_MIN_UNIT_SIZE); in pcpu_setup_first_chunk()
2588 PCPU_SETUP_BUG_ON(!IS_ALIGNED(ai->unit_size, PCPU_BITMAP_BLOCK_SIZE)); in pcpu_setup_first_chunk()
2589 PCPU_SETUP_BUG_ON(ai->dyn_size < PERCPU_DYNAMIC_EARLY_SIZE); in pcpu_setup_first_chunk()
2590 PCPU_SETUP_BUG_ON(!IS_ALIGNED(ai->reserved_size, PCPU_MIN_ALLOC_SIZE)); in pcpu_setup_first_chunk()
2593 PCPU_SETUP_BUG_ON(pcpu_verify_alloc_info(ai) < 0); in pcpu_setup_first_chunk()
2596 alloc_size = ai->nr_groups * sizeof(group_offsets[0]); in pcpu_setup_first_chunk()
2602 alloc_size = ai->nr_groups * sizeof(group_sizes[0]); in pcpu_setup_first_chunk()
2626 for (group = 0, unit = 0; group < ai->nr_groups; group++, unit += i) { in pcpu_setup_first_chunk()
2627 const struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_setup_first_chunk()
2630 group_sizes[group] = gi->nr_units * ai->unit_size; in pcpu_setup_first_chunk()
2642 unit_off[cpu] = gi->base_offset + i * ai->unit_size; in pcpu_setup_first_chunk()
2660 pcpu_dump_alloc_info(KERN_DEBUG, ai); in pcpu_setup_first_chunk()
2662 pcpu_nr_groups = ai->nr_groups; in pcpu_setup_first_chunk()
2669 pcpu_unit_pages = ai->unit_size >> PAGE_SHIFT; in pcpu_setup_first_chunk()
2671 pcpu_atom_size = ai->atom_size; in pcpu_setup_first_chunk()
2675 pcpu_stats_save_ai(ai); in pcpu_setup_first_chunk()
2705 static_size = ALIGN(ai->static_size, PCPU_MIN_ALLOC_SIZE); in pcpu_setup_first_chunk()
2706 dyn_size = ai->dyn_size - (static_size - ai->static_size); in pcpu_setup_first_chunk()
2720 if (ai->reserved_size) in pcpu_setup_first_chunk()
2722 ai->reserved_size); in pcpu_setup_first_chunk()
2723 tmp_addr = (unsigned long)base_addr + static_size + ai->reserved_size; in pcpu_setup_first_chunk()
2823 struct pcpu_alloc_info *ai; in pcpu_build_alloc_info() local
2913 ai = pcpu_alloc_alloc_info(nr_groups, nr_units); in pcpu_build_alloc_info()
2914 if (!ai) in pcpu_build_alloc_info()
2916 cpu_map = ai->groups[0].cpu_map; in pcpu_build_alloc_info()
2919 ai->groups[group].cpu_map = cpu_map; in pcpu_build_alloc_info()
2923 ai->static_size = static_size; in pcpu_build_alloc_info()
2924 ai->reserved_size = reserved_size; in pcpu_build_alloc_info()
2925 ai->dyn_size = dyn_size; in pcpu_build_alloc_info()
2926 ai->unit_size = alloc_size / upa; in pcpu_build_alloc_info()
2927 ai->atom_size = atom_size; in pcpu_build_alloc_info()
2928 ai->alloc_size = alloc_size; in pcpu_build_alloc_info()
2931 struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_build_alloc_info()
2938 gi->base_offset = unit * ai->unit_size; in pcpu_build_alloc_info()
2948 return ai; in pcpu_build_alloc_info()
3027 struct pcpu_alloc_info *ai; in pcpu_embed_first_chunk() local
3032 ai = pcpu_build_alloc_info(reserved_size, dyn_size, atom_size, in pcpu_embed_first_chunk()
3034 if (IS_ERR(ai)) in pcpu_embed_first_chunk()
3035 return PTR_ERR(ai); in pcpu_embed_first_chunk()
3037 size_sum = ai->static_size + ai->reserved_size + ai->dyn_size; in pcpu_embed_first_chunk()
3038 areas_size = PFN_ALIGN(ai->nr_groups * sizeof(void *)); in pcpu_embed_first_chunk()
3048 for (group = 0; group < ai->nr_groups; group++) { in pcpu_embed_first_chunk()
3049 struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_embed_first_chunk()
3058 ptr = pcpu_fc_alloc(cpu, gi->nr_units * ai->unit_size, atom_size, cpu_to_nd_fn); in pcpu_embed_first_chunk()
3072 max_distance += ai->unit_size * ai->groups[highest_group].nr_units; in pcpu_embed_first_chunk()
3090 for (group = 0; group < ai->nr_groups; group++) { in pcpu_embed_first_chunk()
3091 struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_embed_first_chunk()
3094 for (i = 0; i < gi->nr_units; i++, ptr += ai->unit_size) { in pcpu_embed_first_chunk()
3097 pcpu_fc_free(ptr, ai->unit_size); in pcpu_embed_first_chunk()
3101 memcpy(ptr, __per_cpu_load, ai->static_size); in pcpu_embed_first_chunk()
3102 pcpu_fc_free(ptr + size_sum, ai->unit_size - size_sum); in pcpu_embed_first_chunk()
3107 for (group = 0; group < ai->nr_groups; group++) { in pcpu_embed_first_chunk()
3108 ai->groups[group].base_offset = areas[group] - base; in pcpu_embed_first_chunk()
3112 PFN_DOWN(size_sum), ai->static_size, ai->reserved_size, in pcpu_embed_first_chunk()
3113 ai->dyn_size, ai->unit_size); in pcpu_embed_first_chunk()
3115 pcpu_setup_first_chunk(ai, base); in pcpu_embed_first_chunk()
3119 for (group = 0; group < ai->nr_groups; group++) in pcpu_embed_first_chunk()
3122 ai->groups[group].nr_units * ai->unit_size); in pcpu_embed_first_chunk()
3124 pcpu_free_alloc_info(ai); in pcpu_embed_first_chunk()
3212 struct pcpu_alloc_info *ai; in pcpu_page_first_chunk() local
3223 ai = pcpu_build_alloc_info(reserved_size, 0, PAGE_SIZE, NULL); in pcpu_page_first_chunk()
3224 if (IS_ERR(ai)) in pcpu_page_first_chunk()
3225 return PTR_ERR(ai); in pcpu_page_first_chunk()
3226 BUG_ON(ai->nr_groups != 1); in pcpu_page_first_chunk()
3227 upa = ai->alloc_size/ai->unit_size; in pcpu_page_first_chunk()
3229 if (WARN_ON(ai->groups[0].nr_units != nr_g0_units)) { in pcpu_page_first_chunk()
3230 pcpu_free_alloc_info(ai); in pcpu_page_first_chunk()
3234 unit_pages = ai->unit_size >> PAGE_SHIFT; in pcpu_page_first_chunk()
3247 unsigned int cpu = ai->groups[0].cpu_map[unit]; in pcpu_page_first_chunk()
3265 vm.size = num_possible_cpus() * ai->unit_size; in pcpu_page_first_chunk()
3270 (unsigned long)vm.addr + unit * ai->unit_size; in pcpu_page_first_chunk()
3281 flush_cache_vmap_early(unit_addr, unit_addr + ai->unit_size); in pcpu_page_first_chunk()
3284 memcpy((void *)unit_addr, __per_cpu_load, ai->static_size); in pcpu_page_first_chunk()
3289 unit_pages, psize_str, ai->static_size, in pcpu_page_first_chunk()
3290 ai->reserved_size, ai->dyn_size); in pcpu_page_first_chunk()
3292 pcpu_setup_first_chunk(ai, vm.addr); in pcpu_page_first_chunk()
3301 pcpu_free_alloc_info(ai); in pcpu_page_first_chunk()
3357 struct pcpu_alloc_info *ai; in setup_per_cpu_areas() local
3360 ai = pcpu_alloc_alloc_info(1, 1); in setup_per_cpu_areas()
3362 if (!ai || !fc) in setup_per_cpu_areas()
3367 ai->dyn_size = unit_size; in setup_per_cpu_areas()
3368 ai->unit_size = unit_size; in setup_per_cpu_areas()
3369 ai->atom_size = unit_size; in setup_per_cpu_areas()
3370 ai->alloc_size = unit_size; in setup_per_cpu_areas()
3371 ai->groups[0].nr_units = 1; in setup_per_cpu_areas()
3372 ai->groups[0].cpu_map[0] = 0; in setup_per_cpu_areas()
3374 pcpu_setup_first_chunk(ai, fc); in setup_per_cpu_areas()
3375 pcpu_free_alloc_info(ai); in setup_per_cpu_areas()