Home
last modified time | relevance | path

Searched refs:txqs (Results 1 – 25 of 59) sorted by relevance

123

/linux-6.12.1/drivers/net/wireless/intel/iwlwifi/pcie/
Dtx.c94 txq_id != trans_pcie->txqs.cmd.q_id && in iwl_pcie_txq_inc_wr_ptr()
129 struct iwl_txq *txq = trans_pcie->txqs.txq[i]; in iwl_pcie_txq_check_wrptrs()
131 if (!test_bit(i, trans_pcie->txqs.queue_used)) in iwl_pcie_txq_check_wrptrs()
169 tfd = (u8 *)txq->tfds + trans_pcie->txqs.tfd.size * txq->write_ptr; in iwl_pcie_txq_build_tfd()
172 memset(tfd, 0, trans_pcie->txqs.tfd.size); in iwl_pcie_txq_build_tfd()
177 if (num_tbs >= trans_pcie->txqs.tfd.max_tbs) { in iwl_pcie_txq_build_tfd()
179 trans_pcie->txqs.tfd.max_tbs); in iwl_pcie_txq_build_tfd()
233 page_ptr = (void *)((u8 *)skb->cb + trans_pcie->txqs.page_offs); in iwl_pcie_free_tso_pages()
300 if (num_tbs > trans_pcie->txqs.tfd.max_tbs) { in iwl_txq_gen1_tfd_unmap()
384 struct iwl_txq *txq = trans_pcie->txqs.txq[txq_id]; in iwl_pcie_txq_unmap()
[all …]
Dtx-gen2.c27 page_ptr = (void *)((u8 *)skb->cb + trans_pcie->txqs.page_offs); in get_workaround_page()
587 WARN_ON(trans_pcie->txqs.bc_table_dword); in iwl_pcie_gen2_update_byte_tbl()
595 WARN_ON(!trans_pcie->txqs.bc_table_dword); in iwl_pcie_gen2_update_byte_tbl()
630 if (le16_to_cpu(tfd->num_tbs) >= trans_pcie->txqs.tfd.max_tbs) { in iwl_txq_gen2_set_tb()
632 trans_pcie->txqs.tfd.max_tbs); in iwl_txq_gen2_set_tb()
654 if (num_tbs > trans_pcie->txqs.tfd.max_tbs) { in iwl_txq_gen2_tfd_unmap()
729 struct iwl_txq *txq = trans_pcie->txqs.txq[txq_id]; in iwl_txq_gen2_tx()
738 if (WARN_ONCE(!test_bit(txq_id, trans_pcie->txqs.queue_used), in iwl_txq_gen2_tx()
757 trans_pcie->txqs.dev_cmd_offs); in iwl_txq_gen2_tx()
825 struct iwl_txq *txq = trans_pcie->txqs.txq[txq_id]; in iwl_txq_gen2_unmap()
[all …]
Dtrans.c1968 trans_pcie->txqs.cmd.q_id = trans_cfg->cmd_queue; in iwl_trans_pcie_configure()
1969 trans_pcie->txqs.cmd.fifo = trans_cfg->cmd_fifo; in iwl_trans_pcie_configure()
1970 trans_pcie->txqs.page_offs = trans_cfg->cb_data_offs; in iwl_trans_pcie_configure()
1971 trans_pcie->txqs.dev_cmd_offs = trans_cfg->cb_data_offs + sizeof(void *); in iwl_trans_pcie_configure()
1972 trans_pcie->txqs.queue_alloc_cmd_ver = trans_cfg->queue_alloc_cmd_ver; in iwl_trans_pcie_configure()
1991 trans_pcie->txqs.bc_table_dword = trans_cfg->bc_table_dword; in iwl_trans_pcie_configure()
2091 if (trans_pcie->txqs.tso_hdr_page) { in iwl_trans_pcie_free()
2094 per_cpu_ptr(trans_pcie->txqs.tso_hdr_page, i); in iwl_trans_pcie_free()
2100 free_percpu(trans_pcie->txqs.tso_hdr_page); in iwl_trans_pcie_free()
2418 if (!test_bit(txq_idx, trans_pcie->txqs.queue_used)) in iwl_trans_pcie_wait_txq_empty()
[all …]
Dtrans-gen2.c250 if (iwl_txq_gen2_init(trans, trans_pcie->txqs.cmd.q_id, queue_size)) in iwl_pcie_gen2_nic_init()
349 memset(trans_pcie->txqs.queue_stopped, 0, in iwl_trans_pcie_gen2_fw_alive()
350 sizeof(trans_pcie->txqs.queue_stopped)); in iwl_trans_pcie_gen2_fw_alive()
351 memset(trans_pcie->txqs.queue_used, 0, in iwl_trans_pcie_gen2_fw_alive()
352 sizeof(trans_pcie->txqs.queue_used)); in iwl_trans_pcie_gen2_fw_alive()
Dinternal.h520 struct iwl_pcie_txqs txqs; member
599 #define IWL_TRANS_PCIE_MAX_FRAGS(trans_pcie) ((trans_pcie)->txqs.tfd.max_tbs - 3)
681 return (u8 *)txq->tfds + trans_pcie->txqs.tfd.size * idx; in iwl_txq_get_tfd()
700 if (!test_and_set_bit(txq->id, trans_pcie->txqs.queue_stopped)) { in iwl_txq_stop()
738 if (test_and_clear_bit(txq->id, trans_pcie->txqs.queue_stopped)) { in iwl_trans_pcie_wake_queue()
798 unsigned long txqs, bool freeze);
Dctxt-info.c221 cpu_to_le64(trans_pcie->txqs.txq[trans_pcie->txqs.cmd.q_id]->dma_addr); in iwl_pcie_ctxt_info_init()
/linux-6.12.1/drivers/net/wireless/ath/ath5k/
Dmac80211-ops.c69 ath5k_tx_queue(hw, skb, &ah->txqs[qnum], control); in ath5k_tx()
745 *tx = ah->txqs[AR5K_TX_QUEUE_ID_DATA_MIN].txq_max; in ath5k_get_ringparam()
765 for (qnum = 0; qnum < ARRAY_SIZE(ah->txqs); qnum++) { in ath5k_set_ringparam()
766 if (!ah->txqs[qnum].setup) in ath5k_set_ringparam()
768 if (ah->txqs[qnum].qnum < AR5K_TX_QUEUE_ID_DATA_MIN || in ath5k_set_ringparam()
769 ah->txqs[qnum].qnum > AR5K_TX_QUEUE_ID_DATA_MAX) in ath5k_set_ringparam()
772 ah->txqs[qnum].txq_max = tx; in ath5k_set_ringparam()
773 if (ah->txqs[qnum].txq_len >= ah->txqs[qnum].txq_max) in ath5k_set_ringparam()
774 ieee80211_stop_queue(hw, ah->txqs[qnum].qnum); in ath5k_set_ringparam()
Dbase.c1055 txq = &ah->txqs[qnum]; in ath5k_txq_setup()
1067 return &ah->txqs[qnum]; in ath5k_txq_setup()
1161 for (i = 0; i < ARRAY_SIZE(ah->txqs); i++) { in ath5k_drain_tx_buffs()
1162 if (ah->txqs[i].setup) { in ath5k_drain_tx_buffs()
1163 txq = &ah->txqs[i]; in ath5k_drain_tx_buffs()
1186 struct ath5k_txq *txq = ah->txqs; in ath5k_txq_release()
1189 for (i = 0; i < ARRAY_SIZE(ah->txqs); i++, txq++) in ath5k_txq_release()
1838 if (ah->txqs[i].setup && (ah->ah_txq_isr_txok_all & BIT(i))) in ath5k_tasklet_tx()
1839 ath5k_tx_processq(ah, &ah->txqs[i]); in ath5k_tasklet_tx()
2054 trace_ath5k_tx(ah, bf->skb, &ah->txqs[ah->bhalq]); in ath5k_beacon_send()
[all …]
/linux-6.12.1/drivers/net/ethernet/fungible/funeth/
Dfuneth_main.c350 static void free_txqs(struct funeth_txq **txqs, unsigned int nqs, in free_txqs() argument
355 for (i = start; i < nqs && txqs[i]; i++) in free_txqs()
356 txqs[i] = funeth_txq_free(txqs[i], state); in free_txqs()
359 static int alloc_txqs(struct net_device *dev, struct funeth_txq **txqs, in alloc_txqs() argument
369 state, &txqs[i]); in alloc_txqs()
371 free_txqs(txqs, nqs, start, FUN_QSTATE_DESTROYED); in alloc_txqs()
453 qset->txqs = fp->txqs; in fun_free_rings()
465 fp->txqs = NULL; in fun_free_rings()
469 free_txqs(qset->txqs, qset->ntxqs, qset->txq_start, qset->state); in fun_free_rings()
481 struct funeth_txq **xdpqs = NULL, **txqs; in fun_alloc_rings() local
[all …]
Dfuneth.h67 struct funeth_txq **txqs; member
88 struct funeth_txq **txqs; member
/linux-6.12.1/drivers/net/ethernet/huawei/hinic/
Dhinic_main.c120 gather_tx_stats(nic_tx_stats, &nic_dev->txqs[i]); in gather_nic_stats()
134 if (nic_dev->txqs) in create_txqs()
137 nic_dev->txqs = devm_kcalloc(&netdev->dev, num_txqs, in create_txqs()
138 sizeof(*nic_dev->txqs), GFP_KERNEL); in create_txqs()
139 if (!nic_dev->txqs) in create_txqs()
147 err = hinic_init_txq(&nic_dev->txqs[i], sq, netdev); in create_txqs()
165 hinic_clean_txq(&nic_dev->txqs[i]); in create_txqs()
168 hinic_sq_debug_rem(nic_dev->txqs[j].sq); in create_txqs()
169 hinic_clean_txq(&nic_dev->txqs[j]); in create_txqs()
174 devm_kfree(&netdev->dev, nic_dev->txqs); in create_txqs()
[all …]
Dhinic_dev.h97 struct hinic_txq *txqs; member
/linux-6.12.1/drivers/infiniband/hw/hfi1/
Dipoib_tx.c593 txp.txq = &priv->txqs[skb_get_queue_mapping(skb)]; in hfi1_ipoib_send()
697 priv->txqs = kcalloc_node(dev->num_tx_queues, in hfi1_ipoib_txreq_init()
701 if (!priv->txqs) in hfi1_ipoib_txreq_init()
705 struct hfi1_ipoib_txq *txq = &priv->txqs[i]; in hfi1_ipoib_txreq_init()
756 struct hfi1_ipoib_txq *txq = &priv->txqs[i]; in hfi1_ipoib_txreq_init()
765 kfree(priv->txqs); in hfi1_ipoib_txreq_init()
766 priv->txqs = NULL; in hfi1_ipoib_txreq_init()
799 struct hfi1_ipoib_txq *txq = &priv->txqs[i]; in hfi1_ipoib_txreq_deinit()
812 kfree(priv->txqs); in hfi1_ipoib_txreq_deinit()
813 priv->txqs = NULL; in hfi1_ipoib_txreq_deinit()
[all …]
Dipoib.h125 struct hfi1_ipoib_txq *txqs; member
/linux-6.12.1/net/
Ddevres.c22 unsigned int txqs, unsigned int rxqs) in devm_alloc_etherdev_mqs() argument
30 dr->ndev = alloc_etherdev_mqs(sizeof_priv, txqs, rxqs); in devm_alloc_etherdev_mqs()
/linux-6.12.1/drivers/net/wireless/intel/iwlwifi/
Diwl-trans.c463 int iwl_trans_wait_tx_queues_empty(struct iwl_trans *trans, u32 txqs) in iwl_trans_wait_tx_queues_empty() argument
469 return iwl_trans_pcie_wait_txqs_empty(trans, txqs); in iwl_trans_wait_tx_queues_empty()
474 unsigned long txqs, bool freeze) in iwl_trans_freeze_txq_timer() argument
480 iwl_pcie_freeze_txq_timer(trans, txqs, freeze); in iwl_trans_freeze_txq_timer()
/linux-6.12.1/drivers/net/ethernet/netronome/nfp/abm/
Dmain.c85 unsigned int txqs; in nfp_abm_spawn_repr() local
90 txqs = 1; in nfp_abm_spawn_repr()
93 txqs = alink->vnic->max_rx_rings; in nfp_abm_spawn_repr()
96 netdev = nfp_repr_alloc_mqs(app, txqs, 1); in nfp_abm_spawn_repr()
/linux-6.12.1/drivers/net/ethernet/intel/idpf/
Didpf_txrx.c182 idpf_tx_desc_rel(txq_grp->txqs[j]); in idpf_tx_desc_rel_all()
318 struct idpf_tx_queue *txq = vport->txq_grps[i].txqs[j]; in idpf_tx_desc_alloc_all()
997 kfree(txq_grp->txqs[j]); in idpf_txq_group_rel()
998 txq_grp->txqs[j] = NULL; in idpf_txq_group_rel()
1094 kfree(vport->txqs); in idpf_vport_queues_rel()
1095 vport->txqs = NULL; in idpf_vport_queues_rel()
1112 vport->txqs = kcalloc(vport->num_txq, sizeof(*vport->txqs), in idpf_vport_init_fast_path_txqs()
1115 if (!vport->txqs) in idpf_vport_init_fast_path_txqs()
1122 vport->txqs[k] = tx_grp->txqs[j]; in idpf_vport_init_fast_path_txqs()
1123 vport->txqs[k]->idx = k; in idpf_vport_init_fast_path_txqs()
[all …]
Didpf_virtchnl.c755 idpf_queue_set(SW_MARKER, vport->txqs[i]); in idpf_wait_for_marker_event()
763 idpf_queue_clear(POLL_MODE, vport->txqs[i]); in idpf_wait_for_marker_event()
1105 tx_qgrp->txqs[j]->tail = in __idpf_queue_reg_init()
1456 cpu_to_le32(tx_qgrp->txqs[j]->q_id); in idpf_send_config_tx_queues_msg()
1462 cpu_to_le16(tx_qgrp->txqs[j]->desc_count); in idpf_send_config_tx_queues_msg()
1464 cpu_to_le64(tx_qgrp->txqs[j]->dma); in idpf_send_config_tx_queues_msg()
1466 struct idpf_tx_queue *q = tx_qgrp->txqs[j]; in idpf_send_config_tx_queues_msg()
1731 qc[k].start_queue_id = cpu_to_le32(tx_qgrp->txqs[j]->q_id); in idpf_send_ena_dis_queues_msg()
1876 vqv[k].queue_id = cpu_to_le32(tx_qgrp->txqs[j]->q_id); in idpf_send_map_unmap_queue_vector_msg()
1885 cpu_to_le16(tx_qgrp->txqs[j]->q_vector->v_idx); in idpf_send_map_unmap_queue_vector_msg()
[all …]
/linux-6.12.1/drivers/net/ethernet/intel/ice/
Dice_base.c891 u8 buf_len = struct_size(qg_buf, txqs, 1); in ice_vsi_cfg_txq()
907 qg_buf->txqs[0].txq_id = cpu_to_le16(pf_q); in ice_vsi_cfg_txq()
908 ice_set_ctx(hw, (u8 *)&tlan_ctx, qg_buf->txqs[0].txq_ctx, in ice_vsi_cfg_txq()
944 txq = &qg_buf->txqs[0]; in ice_vsi_cfg_txq()
954 DEFINE_RAW_FLEX(struct ice_aqc_add_tx_qgrp, qg_buf, txqs, 1); in ice_vsi_cfg_single_txq()
976 DEFINE_RAW_FLEX(struct ice_aqc_add_tx_qgrp, qg_buf, txqs, 1); in ice_vsi_cfg_txqs()
/linux-6.12.1/drivers/net/ethernet/netronome/nfp/
Dnfp_net_repr.h102 nfp_repr_alloc_mqs(struct nfp_app *app, unsigned int txqs, unsigned int rxqs);
/linux-6.12.1/include/linux/
Detherdevice.h56 struct net_device *alloc_etherdev_mqs(int sizeof_priv, unsigned int txqs,
62 unsigned int txqs,
/linux-6.12.1/net/ethernet/
Deth.c379 struct net_device *alloc_etherdev_mqs(int sizeof_priv, unsigned int txqs, in alloc_etherdev_mqs() argument
383 ether_setup, txqs, rxqs); in alloc_etherdev_mqs()
/linux-6.12.1/drivers/net/can/dev/
Ddev.c250 unsigned int txqs, unsigned int rxqs) in alloc_candev_mqs() argument
277 txqs, rxqs); in alloc_candev_mqs()
/linux-6.12.1/include/linux/can/
Ddev.h174 unsigned int txqs, unsigned int rxqs);

123