Lines Matching refs:ctlq_msg

171 				struct idpf_ctlq_msg *ctlq_msg)  in idpf_recv_event_msg()  argument
173 int payload_size = ctlq_msg->ctx.indirect.payload->size; in idpf_recv_event_msg()
179 ctlq_msg->cookie.mbx.chnl_opcode, in idpf_recv_event_msg()
184 v2e = (struct virtchnl2_event *)ctlq_msg->ctx.indirect.payload->va; in idpf_recv_event_msg()
253 struct idpf_ctlq_msg *ctlq_msg; in idpf_send_mb_msg() local
269 ctlq_msg = kzalloc(sizeof(*ctlq_msg), GFP_ATOMIC); in idpf_send_mb_msg()
270 if (!ctlq_msg) in idpf_send_mb_msg()
279 ctlq_msg->opcode = idpf_mbq_opc_send_msg_to_cp; in idpf_send_mb_msg()
280 ctlq_msg->func_id = 0; in idpf_send_mb_msg()
281 ctlq_msg->data_len = msg_size; in idpf_send_mb_msg()
282 ctlq_msg->cookie.mbx.chnl_opcode = op; in idpf_send_mb_msg()
283 ctlq_msg->cookie.mbx.chnl_retval = 0; in idpf_send_mb_msg()
295 ctlq_msg->ctx.indirect.payload = dma_mem; in idpf_send_mb_msg()
296 ctlq_msg->ctx.sw_cookie.data = cookie; in idpf_send_mb_msg()
298 err = idpf_ctlq_send(&adapter->hw, adapter->hw.asq, 1, ctlq_msg); in idpf_send_mb_msg()
310 kfree(ctlq_msg); in idpf_send_mb_msg()
560 const struct idpf_ctlq_msg *ctlq_msg) in idpf_vc_xn_forward_async() argument
564 if (ctlq_msg->cookie.mbx.chnl_opcode != xn->vc_op) { in idpf_vc_xn_forward_async()
566 ctlq_msg->cookie.mbx.chnl_opcode, xn->vc_op); in idpf_vc_xn_forward_async()
573 err = xn->async_handler(adapter, xn, ctlq_msg); in idpf_vc_xn_forward_async()
577 if (ctlq_msg->cookie.mbx.chnl_retval) { in idpf_vc_xn_forward_async()
580 ctlq_msg->cookie.mbx.chnl_opcode); in idpf_vc_xn_forward_async()
597 const struct idpf_ctlq_msg *ctlq_msg) in idpf_vc_xn_forward_reply() argument
607 msg_info = ctlq_msg->ctx.sw_cookie.data; in idpf_vc_xn_forward_reply()
629 ctlq_msg->cookie.mbx.chnl_opcode); in idpf_vc_xn_forward_reply()
641 err = idpf_vc_xn_forward_async(adapter, xn, ctlq_msg); in idpf_vc_xn_forward_reply()
646 ctlq_msg->cookie.mbx.chnl_opcode); in idpf_vc_xn_forward_reply()
651 if (ctlq_msg->cookie.mbx.chnl_opcode != xn->vc_op) { in idpf_vc_xn_forward_reply()
653 ctlq_msg->cookie.mbx.chnl_opcode, xn->vc_op); in idpf_vc_xn_forward_reply()
660 if (ctlq_msg->cookie.mbx.chnl_retval) { in idpf_vc_xn_forward_reply()
667 if (ctlq_msg->data_len) { in idpf_vc_xn_forward_reply()
668 payload = ctlq_msg->ctx.indirect.payload->va; in idpf_vc_xn_forward_reply()
669 payload_size = ctlq_msg->data_len; in idpf_vc_xn_forward_reply()
696 struct idpf_ctlq_msg ctlq_msg; in idpf_recv_mb_msg() local
706 err = idpf_ctlq_recv(adapter->hw.arq, &num_recv, &ctlq_msg); in idpf_recv_mb_msg()
710 if (ctlq_msg.data_len) { in idpf_recv_mb_msg()
711 dma_mem = ctlq_msg.ctx.indirect.payload; in idpf_recv_mb_msg()
717 if (ctlq_msg.cookie.mbx.chnl_opcode == VIRTCHNL2_OP_EVENT) in idpf_recv_mb_msg()
718 idpf_recv_event_msg(adapter, &ctlq_msg); in idpf_recv_mb_msg()
720 err = idpf_vc_xn_forward_reply(adapter, &ctlq_msg); in idpf_recv_mb_msg()
3520 const struct idpf_ctlq_msg *ctlq_msg) in idpf_mac_filter_async_handler() argument
3532 if (!ctlq_msg->cookie.mbx.chnl_retval) in idpf_mac_filter_async_handler()
3539 ma_list = ctlq_msg->ctx.indirect.payload->va; in idpf_mac_filter_async_handler()