Home
last modified time | relevance | path

Searched refs:mlx5_wqe_data_seg (Results 1 – 19 of 19) sorted by relevance

/linux-6.12.1/drivers/infiniband/hw/mlx5/
Dsrq.c203 sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_create_srq()
229 srq->msrq.max_gs * sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_create_srq()
239 sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_create_srq()
414 struct mlx5_wqe_data_seg *scat; in mlx5_ib_post_srq_recv()
447 scat = (struct mlx5_wqe_data_seg *)(next + 1); in mlx5_ib_post_srq_recv()
Dwr.c111 static void set_data_ptr_seg(struct mlx5_wqe_data_seg *dseg, struct ib_sge *sg) in set_data_ptr_seg()
217 static void set_reg_data_seg(struct mlx5_wqe_data_seg *dseg, in set_reg_data_seg()
686 *seg += sizeof(struct mlx5_wqe_data_seg); in set_reg_wr()
687 *size += (sizeof(struct mlx5_wqe_data_seg) / 16); in set_reg_wr()
1177 (struct mlx5_wqe_data_seg *)seg, in mlx5_ib_post_send()
1179 size += sizeof(struct mlx5_wqe_data_seg) / 16; in mlx5_ib_post_send()
1180 seg += sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_post_send()
1210 struct mlx5_wqe_data_seg *scat; in mlx5_ib_post_recv()
Dumr.h91 struct mlx5_wqe_data_seg data_seg;
Dumr.c265 sizeof(struct mlx5_wqe_data_seg); in mlx5r_umr_post_send()
587 mlx5r_umr_set_update_xlt_data_seg(struct mlx5_wqe_data_seg *data_seg, in mlx5r_umr_set_update_xlt_data_seg()
Dqp.c458 sizeof(struct mlx5_wqe_data_seg) < in set_rq_size()
463 sizeof(struct mlx5_wqe_data_seg) - in set_rq_size()
470 wqe_size += cap->max_recv_sge * sizeof(struct mlx5_wqe_data_seg); in set_rq_size()
485 sizeof(struct mlx5_wqe_data_seg) - in set_rq_size()
560 size += attr->cap.max_send_sge * sizeof(struct mlx5_wqe_data_seg); in calc_send_wqe()
576 sizeof(struct mlx5_wqe_data_seg); in get_send_sge()
582 sizeof(struct mlx5_wqe_data_seg); in get_send_sge()
585 sizeof(struct mlx5_wqe_data_seg); in get_send_sge()
588 sizeof(struct mlx5_wqe_data_seg)); in get_send_sge()
Dodp.c1134 struct mlx5_wqe_data_seg *dseg = wqe; in pagefault_data_segments()
Dmain.c1028 sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_query_device()
1032 sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_query_device()
/linux-6.12.1/drivers/net/ethernet/mellanox/mlx5/core/en/
Dxdp.h205 struct mlx5_wqe_data_seg *dseg = in mlx5e_xdp_mpwqe_add_dseg()
206 (struct mlx5_wqe_data_seg *)session->wqe + session->ds_count; in mlx5e_xdp_mpwqe_add_dseg()
Dxdp.c536 struct mlx5_wqe_data_seg *dseg; in mlx5e_xmit_xdp_frame()
Dparams.c803 int sz = sizeof(struct mlx5_wqe_data_seg) * ndsegs; in mlx5e_get_rqwq_log_stride()
/linux-6.12.1/drivers/net/ethernet/mellanox/mlx5/core/ipoib/
Dipoib.h114 struct mlx5_wqe_data_seg data[];
/linux-6.12.1/drivers/net/ethernet/mellanox/mlx5/core/
Den_tx.c183 struct mlx5_wqe_data_seg *dseg) in mlx5e_txwqe_build_dsegs()
433 struct mlx5_wqe_data_seg *dseg; in mlx5e_sq_xmit_wqe()
555 struct mlx5_wqe_data_seg *dseg; in mlx5e_tx_mpwqe_add_dseg()
557 dseg = (struct mlx5_wqe_data_seg *)session->wqe + session->ds_count; in mlx5e_tx_mpwqe_add_dseg()
987 struct mlx5_wqe_data_seg *dseg; in mlx5i_sq_xmit()
Den.h222 struct mlx5_wqe_data_seg data[];
227 struct mlx5_wqe_data_seg data[];
231 DECLARE_FLEX_ARRAY(struct mlx5_wqe_data_seg, data);
/linux-6.12.1/include/linux/mlx5/
Dqp.h364 struct mlx5_wqe_data_seg { struct
/linux-6.12.1/drivers/net/ethernet/mellanox/mlx5/core/en_accel/
Dktls_tx.c11 struct mlx5_wqe_data_seg data;
718 struct mlx5_wqe_data_seg *dseg; in tx_post_resync_dump()
/linux-6.12.1/drivers/net/ethernet/mellanox/mlx5/core/fpga/
Dconn.c102 struct mlx5_wqe_data_seg *data; in mlx5_fpga_conn_post_recv()
145 struct mlx5_wqe_data_seg *data; in mlx5_fpga_conn_post_send()
/linux-6.12.1/drivers/net/ethernet/mellanox/mlx5/core/steering/
Ddr_send.c406 struct mlx5_wqe_data_seg *wq_dseg; in dr_rdma_handle_icm_write_segments()
/linux-6.12.1/drivers/vfio/pci/mlx5/
Dcmd.c1249 struct mlx5_wqe_data_seg *data; in mlx5vf_post_recv()
/linux-6.12.1/drivers/vdpa/mlx5/net/
Dmlx5_vnet.c410 num_ent * sizeof(struct mlx5_wqe_data_seg), &vqp->frag_buf, in rq_buf_alloc()