Searched refs:mlx5_wqe_data_seg (Results 1 – 19 of 19) sorted by relevance
203 sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_create_srq()229 srq->msrq.max_gs * sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_create_srq()239 sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_create_srq()414 struct mlx5_wqe_data_seg *scat; in mlx5_ib_post_srq_recv()447 scat = (struct mlx5_wqe_data_seg *)(next + 1); in mlx5_ib_post_srq_recv()
111 static void set_data_ptr_seg(struct mlx5_wqe_data_seg *dseg, struct ib_sge *sg) in set_data_ptr_seg()217 static void set_reg_data_seg(struct mlx5_wqe_data_seg *dseg, in set_reg_data_seg()686 *seg += sizeof(struct mlx5_wqe_data_seg); in set_reg_wr()687 *size += (sizeof(struct mlx5_wqe_data_seg) / 16); in set_reg_wr()1177 (struct mlx5_wqe_data_seg *)seg, in mlx5_ib_post_send()1179 size += sizeof(struct mlx5_wqe_data_seg) / 16; in mlx5_ib_post_send()1180 seg += sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_post_send()1210 struct mlx5_wqe_data_seg *scat; in mlx5_ib_post_recv()
91 struct mlx5_wqe_data_seg data_seg;
265 sizeof(struct mlx5_wqe_data_seg); in mlx5r_umr_post_send()587 mlx5r_umr_set_update_xlt_data_seg(struct mlx5_wqe_data_seg *data_seg, in mlx5r_umr_set_update_xlt_data_seg()
458 sizeof(struct mlx5_wqe_data_seg) < in set_rq_size()463 sizeof(struct mlx5_wqe_data_seg) - in set_rq_size()470 wqe_size += cap->max_recv_sge * sizeof(struct mlx5_wqe_data_seg); in set_rq_size()485 sizeof(struct mlx5_wqe_data_seg) - in set_rq_size()560 size += attr->cap.max_send_sge * sizeof(struct mlx5_wqe_data_seg); in calc_send_wqe()576 sizeof(struct mlx5_wqe_data_seg); in get_send_sge()582 sizeof(struct mlx5_wqe_data_seg); in get_send_sge()585 sizeof(struct mlx5_wqe_data_seg); in get_send_sge()588 sizeof(struct mlx5_wqe_data_seg)); in get_send_sge()
1134 struct mlx5_wqe_data_seg *dseg = wqe; in pagefault_data_segments()
1028 sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_query_device()1032 sizeof(struct mlx5_wqe_data_seg); in mlx5_ib_query_device()
205 struct mlx5_wqe_data_seg *dseg = in mlx5e_xdp_mpwqe_add_dseg()206 (struct mlx5_wqe_data_seg *)session->wqe + session->ds_count; in mlx5e_xdp_mpwqe_add_dseg()
536 struct mlx5_wqe_data_seg *dseg; in mlx5e_xmit_xdp_frame()
803 int sz = sizeof(struct mlx5_wqe_data_seg) * ndsegs; in mlx5e_get_rqwq_log_stride()
114 struct mlx5_wqe_data_seg data[];
183 struct mlx5_wqe_data_seg *dseg) in mlx5e_txwqe_build_dsegs()433 struct mlx5_wqe_data_seg *dseg; in mlx5e_sq_xmit_wqe()555 struct mlx5_wqe_data_seg *dseg; in mlx5e_tx_mpwqe_add_dseg()557 dseg = (struct mlx5_wqe_data_seg *)session->wqe + session->ds_count; in mlx5e_tx_mpwqe_add_dseg()987 struct mlx5_wqe_data_seg *dseg; in mlx5i_sq_xmit()
222 struct mlx5_wqe_data_seg data[];227 struct mlx5_wqe_data_seg data[];231 DECLARE_FLEX_ARRAY(struct mlx5_wqe_data_seg, data);
364 struct mlx5_wqe_data_seg { struct
11 struct mlx5_wqe_data_seg data;718 struct mlx5_wqe_data_seg *dseg; in tx_post_resync_dump()
102 struct mlx5_wqe_data_seg *data; in mlx5_fpga_conn_post_recv()145 struct mlx5_wqe_data_seg *data; in mlx5_fpga_conn_post_send()
406 struct mlx5_wqe_data_seg *wq_dseg; in dr_rdma_handle_icm_write_segments()
1249 struct mlx5_wqe_data_seg *data; in mlx5vf_post_recv()
410 num_ent * sizeof(struct mlx5_wqe_data_seg), &vqp->frag_buf, in rq_buf_alloc()