/linux-6.12.1/drivers/net/ethernet/mellanox/mlx5/core/steering/hws/ |
D | mlx5hws_debug.c | 245 struct mlx5hws_send_engine *send_queue; in hws_debug_dump_context_send_engine() local 252 send_queue = &ctx->send_queue[i]; in hws_debug_dump_context_send_engine() 257 send_queue->used_entries, in hws_debug_dump_context_send_engine() 258 send_queue->num_entries, in hws_debug_dump_context_send_engine() 260 send_queue->num_entries, in hws_debug_dump_context_send_engine() 261 send_queue->err, in hws_debug_dump_context_send_engine() 262 send_queue->completed.ci, in hws_debug_dump_context_send_engine() 263 send_queue->completed.pi, in hws_debug_dump_context_send_engine() 264 send_queue->completed.mask); in hws_debug_dump_context_send_engine() 266 send_ring = &send_queue->send_ring; in hws_debug_dump_context_send_engine() [all …]
|
D | mlx5hws_send.c | 554 return hws_send_engine_poll(&ctx->send_queue[queue_id], res, res_nb); in mlx5hws_send_queue_poll() 939 mlx5hws_send_queue_close(&ctx->send_queue[queues]); in __hws_send_queues_close() 963 kfree(ctx->send_queue); in mlx5hws_send_queues_close() 1017 ctx->send_queue = kcalloc(ctx->queues, sizeof(*ctx->send_queue), GFP_KERNEL); in mlx5hws_send_queues_open() 1018 if (!ctx->send_queue) { in mlx5hws_send_queues_open() 1024 err = mlx5hws_send_queue_open(ctx, &ctx->send_queue[i], queue_size); in mlx5hws_send_queues_open() 1034 kfree(ctx->send_queue); in mlx5hws_send_queues_open() 1051 queue = &ctx->send_queue[queue_id]; in mlx5hws_send_queue_action() 1173 queue_id = queue - ctx->send_queue; in mlx5hws_send_stes_fw()
|
D | mlx5hws_rule.c | 375 queue = &ctx->send_queue[attr->queue_id]; in hws_rule_create_hws() 477 queue = &ctx->send_queue[attr->queue_id]; in hws_rule_destroy_failed_hws() 512 queue = &ctx->send_queue[attr->queue_id]; in hws_rule_destroy_hws() 584 if (unlikely(mlx5hws_send_engine_full(&ctx->send_queue[attr->queue_id]))) in hws_rule_enqueue_precheck() 678 queue = &ctx->send_queue[attr->queue_id]; in mlx5hws_rule_move_hws_add()
|
D | mlx5hws_context.h | 46 struct mlx5hws_send_engine *send_queue; member
|
D | mlx5hws_bwc.c | 15 return min(ctx->send_queue[queue_id].num_entries / 2, in hws_bwc_get_burst_th() 230 queue_full = mlx5hws_send_engine_full(&ctx->send_queue[queue_id]); in hws_bwc_queue_poll() 629 mlx5hws_send_engine_flush_queue(&ctx->send_queue[queue_id]); in hws_bwc_matcher_move_all_simple()
|
D | mlx5hws_pat_arg.c | 377 queue = &ctx->send_queue[ctx->queues - 1]; in mlx5hws_arg_write_inline_arg_data()
|
D | mlx5hws_action.c | 1714 queue = &ctx->send_queue[ctx->queues - 1]; in hws_action_create_dest_match_range_fill_table()
|
/linux-6.12.1/drivers/net/wireless/ath/ath6kl/ |
D | htc_pipe.c | 303 struct list_head send_queue; /* temp queue to hold packets */ in htc_try_send() local 315 INIT_LIST_HEAD(&send_queue); in htc_try_send() 357 list_splice_tail_init(txq, &send_queue); in htc_try_send() 373 list_move_tail(&packet->list, &send_queue); in htc_try_send() 398 &send_queue); in htc_try_send() 402 if (list_empty(&send_queue)) { in htc_try_send() 418 if (!list_empty(&send_queue)) { in htc_try_send() 420 list_splice_tail_init(&send_queue, &ep->txq); in htc_try_send() 421 if (!list_empty(&send_queue)) { in htc_try_send() 426 INIT_LIST_HEAD(&send_queue); in htc_try_send() [all …]
|
/linux-6.12.1/drivers/infiniband/core/ |
D | mad.c | 1004 mad_send_wr->mad_list.mad_queue = &qp_info->send_queue; in ib_send_mad() 1031 spin_lock_irqsave(&qp_info->send_queue.lock, flags); in ib_send_mad() 1032 if (qp_info->send_queue.count < qp_info->send_queue.max_active) { in ib_send_mad() 1036 list = &qp_info->send_queue.list; in ib_send_mad() 1043 qp_info->send_queue.count++; in ib_send_mad() 1046 spin_unlock_irqrestore(&qp_info->send_queue.lock, flags); in ib_send_mad() 2273 struct ib_mad_queue *send_queue; in ib_mad_send_done() local 2288 send_queue = mad_list->mad_queue; in ib_mad_send_done() 2289 qp_info = send_queue->qp_info; in ib_mad_send_done() 2302 spin_lock_irqsave(&send_queue->lock, flags); in ib_mad_send_done() [all …]
|
D | mad_priv.h | 189 struct ib_mad_queue send_queue; member
|
/linux-6.12.1/fs/dlm/ |
D | midcomms.c | 164 struct list_head send_queue; member 310 list_for_each_entry_rcu(mh, &node->send_queue, list) { in dlm_send_queue_flush() 362 INIT_LIST_HEAD(&node->send_queue); in dlm_midcomms_addr() 455 list_for_each_entry_rcu(mh, &node->send_queue, list) { in dlm_receive_ack() 466 list_for_each_entry_rcu(mh, &node->send_queue, list) { in dlm_receive_ack() 941 list_for_each_entry_rcu(mh, &node->send_queue, list) { in dlm_midcomms_unack_msg_resend() 971 list_add_tail_rcu(&mh->list, &mh->node->send_queue); in midcomms_new_msg_cb()
|
/linux-6.12.1/drivers/net/hamradio/ |
D | yam.c | 125 struct sk_buff_head send_queue; /* Packets awaiting transmission */ member 588 skb_queue_tail(&yp->send_queue, skb); in yam_send_packet() 608 skb_queue_empty(&yp->send_queue)) in yam_arbitrate() 658 if (!(skb = skb_dequeue(&yp->send_queue))) { in yam_tx_byte() 702 if (skb_queue_empty(&yp->send_queue)) { in yam_tx_byte() 913 while ((skb = skb_dequeue(&yp->send_queue))) in yam_close() 1098 skb_queue_head_init(&yp->send_queue); in yam_setup()
|
/linux-6.12.1/drivers/net/ |
D | virtio_net.c | 283 struct send_queue { struct 393 struct send_queue *sq; 565 static void __free_old_xmit(struct send_queue *sq, struct netdev_queue *txq, in __free_old_xmit() 1019 static void free_old_xmit(struct send_queue *sq, struct netdev_queue *txq, in free_old_xmit() 1050 struct send_queue *sq) in check_sq_full_and_disable() 1394 struct send_queue *sq; in virtnet_xsk_wakeup() 1415 struct send_queue *sq, in __virtnet_xdp_xmit_one() 1507 struct send_queue *sq; in virtnet_xdp_xmit() 2782 struct send_queue *sq = &vi->sq[index]; in virtnet_poll_cleantx() 2836 struct send_queue *sq; in virtnet_poll() [all …]
|