Home
last modified time | relevance | path

Searched refs:bulk_len (Results 1 – 9 of 9) sorted by relevance

/linux-6.12.1/drivers/net/ethernet/mellanox/mlx5/core/
Dfs_counters.c187 int bulk_len; in mlx5_fc_stats_query_counter_range() local
195 bulk_len = min_t(int, cur_bulk_len, in mlx5_fc_stats_query_counter_range()
198 err = mlx5_cmd_fc_bulk_query(dev, bulk_base_id, bulk_len, in mlx5_fc_stats_query_counter_range()
210 if (counter->id >= bulk_base_id + bulk_len) { in mlx5_fc_stats_query_counter_range()
540 int bulk_len; member
542 struct mlx5_fc fcs[] __counted_by(bulk_len);
554 return bitmap_weight(bulk->bitmask, bulk->bulk_len); in mlx5_fc_bulk_get_free_fcs_amount()
562 int bulk_len; in mlx5_fc_bulk_create() local
567 bulk_len = alloc_bitmask > 0 ? MLX5_FC_BULK_NUM_FCS(alloc_bitmask) : 1; in mlx5_fc_bulk_create()
569 bulk = kvzalloc(struct_size(bulk, fcs, bulk_len), GFP_KERNEL); in mlx5_fc_bulk_create()
[all …]
Dfs_cmd.h118 int mlx5_cmd_fc_get_bulk_query_out_len(int bulk_len);
119 int mlx5_cmd_fc_bulk_query(struct mlx5_core_dev *dev, u32 base_id, int bulk_len,
Dfs_cmd.c852 int mlx5_cmd_fc_get_bulk_query_out_len(int bulk_len) in mlx5_cmd_fc_get_bulk_query_out_len() argument
855 MLX5_ST_SZ_BYTES(traffic_counter) * bulk_len; in mlx5_cmd_fc_get_bulk_query_out_len()
858 int mlx5_cmd_fc_bulk_query(struct mlx5_core_dev *dev, u32 base_id, int bulk_len, in mlx5_cmd_fc_bulk_query() argument
861 int outlen = mlx5_cmd_fc_get_bulk_query_out_len(bulk_len); in mlx5_cmd_fc_bulk_query()
867 MLX5_SET(query_flow_counter_in, in, num_of_counters, bulk_len); in mlx5_cmd_fc_bulk_query()
/linux-6.12.1/crypto/
Dadiantum.c224 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in adiantum_hash_header() local
229 .message_bits = cpu_to_le64((u64)bulk_len * 8) in adiantum_hash_header()
252 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in adiantum_hash_message() local
263 for (i = 0; i < bulk_len; i += n) { in adiantum_hash_message()
265 n = min_t(unsigned int, miter.length, bulk_len - i); in adiantum_hash_message()
283 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in adiantum_finish() local
306 err = crypto_shash_digest(&rctx->u.hash_desc, virt, bulk_len, in adiantum_finish()
313 memcpy(virt + bulk_len, &rctx->rbuf.bignum, sizeof(le128)); in adiantum_finish()
323 bulk_len, sizeof(le128), 1); in adiantum_finish()
343 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in adiantum_crypt() local
[all …]
Dhctr2.c199 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in hctr2_hash_message() local
201 unsigned int remainder = bulk_len % BLOCKCIPHER_BLOCK_SIZE; in hctr2_hash_message()
208 for (i = 0; i < bulk_len; i += n) { in hctr2_hash_message()
210 n = min_t(unsigned int, miter.length, bulk_len - i); in hctr2_hash_message()
271 int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; in hctr2_crypt() local
315 rctx->bulk_part_dst, bulk_len, in hctr2_crypt()
/linux-6.12.1/drivers/gpu/drm/gud/
Dgud_drv.c332 string_get_size(gdrm->bulk_len, 1, STRING_UNITS_2, buf, sizeof(buf)); in gud_stats_debugfs()
394 gdrm->bulk_buf = vmalloc_32(gdrm->bulk_len); in gud_alloc_bulk_buffer()
398 num_pages = DIV_ROUND_UP(gdrm->bulk_len, PAGE_SIZE); in gud_alloc_bulk_buffer()
407 0, gdrm->bulk_len, GFP_KERNEL); in gud_alloc_bulk_buffer()
568 gdrm->bulk_len = max_buffer_size; in gud_probe()
579 gdrm->compress_buf = vmalloc(gdrm->bulk_len); in gud_probe()
Dgud_pipe.c166 if (len > gdrm->bulk_len) in gud_prep_flush()
338 if (gdrm->bulk_len < lines * pitch) in gud_flush_damage()
339 lines = gdrm->bulk_len / pitch; in gud_flush_damage()
Dgud_internal.h29 size_t bulk_len; member
/linux-6.12.1/net/core/
Dpage_pool.c860 int i, bulk_len = 0; in page_pool_put_page_bulk() local
876 data[bulk_len++] = (__force void *)netmem; in page_pool_put_page_bulk()
879 if (!bulk_len) in page_pool_put_page_bulk()
884 for (i = 0; i < bulk_len; i++) { in page_pool_put_page_bulk()
895 if (likely(i == bulk_len)) in page_pool_put_page_bulk()
901 for (; i < bulk_len; i++) in page_pool_put_page_bulk()