Lines Matching full:masks
47 cpumask_var_t *masks; in alloc_node_to_cpumask() local
50 masks = kcalloc(nr_node_ids, sizeof(cpumask_var_t), GFP_KERNEL); in alloc_node_to_cpumask()
51 if (!masks) in alloc_node_to_cpumask()
55 if (!zalloc_cpumask_var(&masks[node], GFP_KERNEL)) in alloc_node_to_cpumask()
59 return masks; in alloc_node_to_cpumask()
63 free_cpumask_var(masks[node]); in alloc_node_to_cpumask()
64 kfree(masks); in alloc_node_to_cpumask()
68 static void free_node_to_cpumask(cpumask_var_t *masks) in free_node_to_cpumask() argument
73 free_cpumask_var(masks[node]); in free_node_to_cpumask()
74 kfree(masks); in free_node_to_cpumask()
77 static void build_node_to_cpumask(cpumask_var_t *masks) in build_node_to_cpumask() argument
82 cpumask_set_cpu(cpu, masks[cpu_to_node(cpu)]); in build_node_to_cpumask()
252 struct cpumask *nmsk, struct cpumask *masks) in __group_cpus_evenly() argument
271 /* Ensure that only CPUs which are in both masks are set */ in __group_cpus_evenly()
273 cpumask_or(&masks[curgrp], &masks[curgrp], nmsk); in __group_cpus_evenly()
323 grp_spread_init_one(&masks[curgrp], nmsk, in __group_cpus_evenly()
353 struct cpumask *masks = NULL; in group_cpus_evenly() local
365 masks = kcalloc(numgrps, sizeof(*masks), GFP_KERNEL); in group_cpus_evenly()
366 if (!masks) in group_cpus_evenly()
387 npresmsk, nmsk, masks); in group_cpus_evenly()
404 npresmsk, nmsk, masks); in group_cpus_evenly()
421 kfree(masks); in group_cpus_evenly()
424 return masks; in group_cpus_evenly()
429 struct cpumask *masks = kcalloc(numgrps, sizeof(*masks), GFP_KERNEL); in group_cpus_evenly() local
431 if (!masks) in group_cpus_evenly()
435 cpumask_copy(&masks[0], cpu_possible_mask); in group_cpus_evenly()
436 return masks; in group_cpus_evenly()