Lines Matching refs:cmap

85 	struct bpf_cpu_map *cmap;  in cpu_map_alloc()  local
98 cmap = bpf_map_area_alloc(sizeof(*cmap), NUMA_NO_NODE); in cpu_map_alloc()
99 if (!cmap) in cpu_map_alloc()
102 bpf_map_init_from_attr(&cmap->map, attr); in cpu_map_alloc()
105 cmap->cpu_map = bpf_map_area_alloc(cmap->map.max_entries * in cpu_map_alloc()
107 cmap->map.numa_node); in cpu_map_alloc()
108 if (!cmap->cpu_map) { in cpu_map_alloc()
109 bpf_map_area_free(cmap); in cpu_map_alloc()
113 return &cmap->map; in cpu_map_alloc()
506 static void __cpu_map_entry_replace(struct bpf_cpu_map *cmap, in __cpu_map_entry_replace() argument
511 old_rcpu = unrcu_pointer(xchg(&cmap->cpu_map[key_cpu], RCU_INITIALIZER(rcpu))); in __cpu_map_entry_replace()
520 struct bpf_cpu_map *cmap = container_of(map, struct bpf_cpu_map, map); in cpu_map_delete_elem() local
527 __cpu_map_entry_replace(cmap, key_cpu, NULL); in cpu_map_delete_elem()
534 struct bpf_cpu_map *cmap = container_of(map, struct bpf_cpu_map, map); in cpu_map_update_elem() local
544 if (unlikely(key_cpu >= cmap->map.max_entries)) in cpu_map_update_elem()
564 __cpu_map_entry_replace(cmap, key_cpu, rcpu); in cpu_map_update_elem()
571 struct bpf_cpu_map *cmap = container_of(map, struct bpf_cpu_map, map); in cpu_map_free() local
587 for (i = 0; i < cmap->map.max_entries; i++) { in cpu_map_free()
590 rcpu = rcu_dereference_raw(cmap->cpu_map[i]); in cpu_map_free()
597 bpf_map_area_free(cmap->cpu_map); in cpu_map_free()
598 bpf_map_area_free(cmap); in cpu_map_free()
607 struct bpf_cpu_map *cmap = container_of(map, struct bpf_cpu_map, map); in __cpu_map_lookup_elem() local
613 rcpu = rcu_dereference_check(cmap->cpu_map[key], in __cpu_map_lookup_elem()
628 struct bpf_cpu_map *cmap = container_of(map, struct bpf_cpu_map, map); in cpu_map_get_next_key() local
632 if (index >= cmap->map.max_entries) { in cpu_map_get_next_key()
637 if (index == cmap->map.max_entries - 1) in cpu_map_get_next_key()