Lines Matching +full:1 +full:q
15 static dma_addr_t ionic_tx_map_single(struct ionic_queue *q,
18 static dma_addr_t ionic_tx_map_frag(struct ionic_queue *q,
22 static void ionic_tx_desc_unmap_bufs(struct ionic_queue *q,
25 static void ionic_tx_clean(struct ionic_queue *q,
30 static inline void ionic_txq_post(struct ionic_queue *q, bool ring_dbell) in ionic_txq_post() argument
32 ionic_q_post(q, ring_dbell); in ionic_txq_post()
35 static inline void ionic_rxq_post(struct ionic_queue *q, bool ring_dbell) in ionic_rxq_post() argument
37 ionic_q_post(q, ring_dbell); in ionic_rxq_post()
40 bool ionic_txq_poke_doorbell(struct ionic_queue *q) in ionic_txq_poke_doorbell() argument
46 netdev = q->lif->netdev; in ionic_txq_poke_doorbell()
47 netdev_txq = netdev_get_tx_queue(netdev, q->index); in ionic_txq_poke_doorbell()
51 if (q->tail_idx == q->head_idx) { in ionic_txq_poke_doorbell()
57 then = q->dbell_jiffies; in ionic_txq_poke_doorbell()
60 if (dif > q->dbell_deadline) { in ionic_txq_poke_doorbell()
61 ionic_dbell_ring(q->lif->kern_dbpage, q->hw_type, in ionic_txq_poke_doorbell()
62 q->dbval | q->head_idx); in ionic_txq_poke_doorbell()
64 q->dbell_jiffies = now; in ionic_txq_poke_doorbell()
72 bool ionic_rxq_poke_doorbell(struct ionic_queue *q) in ionic_rxq_poke_doorbell() argument
78 if (q->tail_idx == q->head_idx) in ionic_rxq_poke_doorbell()
82 then = q->dbell_jiffies; in ionic_rxq_poke_doorbell()
85 if (dif > q->dbell_deadline) { in ionic_rxq_poke_doorbell()
86 ionic_dbell_ring(q->lif->kern_dbpage, q->hw_type, in ionic_rxq_poke_doorbell()
87 q->dbval | q->head_idx); in ionic_rxq_poke_doorbell()
89 q->dbell_jiffies = now; in ionic_rxq_poke_doorbell()
91 dif = 2 * q->dbell_deadline; in ionic_rxq_poke_doorbell()
95 q->dbell_deadline = dif; in ionic_rxq_poke_doorbell()
101 static inline struct ionic_txq_sg_elem *ionic_tx_sg_elems(struct ionic_queue *q) in ionic_tx_sg_elems() argument
103 if (likely(q->sg_desc_size == sizeof(struct ionic_txq_sg_desc_v1))) in ionic_tx_sg_elems()
104 return q->txq_sgl_v1[q->head_idx].elems; in ionic_tx_sg_elems()
106 return q->txq_sgl[q->head_idx].elems; in ionic_tx_sg_elems()
110 struct ionic_queue *q) in q_to_ndq() argument
112 return netdev_get_tx_queue(netdev, q->index); in q_to_ndq()
125 static void __ionic_rx_put_buf(struct ionic_queue *q, in __ionic_rx_put_buf() argument
132 page_pool_put_full_page(q->page_pool, buf_info->page, recycle_direct); in __ionic_rx_put_buf()
139 static void ionic_rx_put_buf(struct ionic_queue *q, in ionic_rx_put_buf() argument
142 __ionic_rx_put_buf(q, buf_info, false); in ionic_rx_put_buf()
145 static void ionic_rx_put_buf_direct(struct ionic_queue *q, in ionic_rx_put_buf_direct() argument
148 __ionic_rx_put_buf(q, buf_info, true); in ionic_rx_put_buf_direct()
151 static void ionic_rx_add_skb_frag(struct ionic_queue *q, in ionic_rx_add_skb_frag() argument
158 page_pool_dma_sync_for_cpu(q->page_pool, in ionic_rx_add_skb_frag()
175 static struct sk_buff *ionic_rx_build_skb(struct ionic_queue *q, in ionic_rx_build_skb() argument
190 skb = napi_get_frags(&q_to_qcq(q)->napi); in ionic_rx_build_skb()
193 dev_name(q->dev), q->name); in ionic_rx_build_skb()
194 q_to_rx_stats(q)->alloc_err++; in ionic_rx_build_skb()
207 ionic_rx_add_skb_frag(q, skb, buf_info, headroom, frag_len, synced); in ionic_rx_build_skb()
215 ionic_rx_add_skb_frag(q, skb, buf_info, 0, frag_len, synced); in ionic_rx_build_skb()
227 struct ionic_queue *q, in ionic_rx_copybreak() argument
235 struct device *dev = q->dev; in ionic_rx_copybreak()
241 skb = napi_alloc_skb(&q_to_qcq(q)->napi, len); in ionic_rx_copybreak()
244 dev_name(dev), q->name); in ionic_rx_copybreak()
245 q_to_rx_stats(q)->alloc_err++; in ionic_rx_copybreak()
251 page_pool_dma_sync_for_cpu(q->page_pool, in ionic_rx_copybreak()
262 ionic_rx_put_buf_direct(q, buf_info); in ionic_rx_copybreak()
265 ionic_rx_put_buf_direct(q, buf_info); in ionic_rx_copybreak()
270 static void ionic_xdp_tx_desc_clean(struct ionic_queue *q, in ionic_xdp_tx_desc_clean() argument
288 ionic_tx_desc_unmap_bufs(q, desc_info); in ionic_xdp_tx_desc_clean()
300 static int ionic_xdp_post_frame(struct ionic_queue *q, struct xdp_frame *frame, in ionic_xdp_post_frame() argument
312 desc_info = &q->tx_info[q->head_idx]; in ionic_xdp_post_frame()
313 desc = &q->txq[q->head_idx]; in ionic_xdp_post_frame()
315 stats = q_to_tx_stats(q); in ionic_xdp_post_frame()
320 dma_sync_single_for_device(q->dev, dma_addr, in ionic_xdp_post_frame()
323 dma_addr = ionic_tx_map_single(q, frame->data, len); in ionic_xdp_post_frame()
333 desc_info->nbufs = 1; in ionic_xdp_post_frame()
344 bi = &buf_info[1]; in ionic_xdp_post_frame()
347 elem = ionic_tx_sg_elems(q); in ionic_xdp_post_frame()
354 dma_sync_single_for_device(q->dev, dma_addr, in ionic_xdp_post_frame()
358 dma_addr = ionic_tx_map_frag(q, frag, 0, in ionic_xdp_post_frame()
360 if (dma_mapping_error(q->dev, dma_addr)) { in ionic_xdp_post_frame()
361 ionic_tx_desc_unmap_bufs(q, desc_info); in ionic_xdp_post_frame()
378 0, (desc_info->nbufs - 1), buf_info->dma_addr); in ionic_xdp_post_frame()
388 ionic_txq_post(q, ring_doorbell); in ionic_xdp_post_frame()
411 * TxRx queue pairs 0..n-1 on cpus 1..n. We try to keep with that in ionic_xdp_xmit()
416 qi = cpu ? (cpu - 1) % lif->nxqs : cpu; in ionic_xdp_xmit()
418 txq = &lif->txqcqs[qi]->q; in ionic_xdp_xmit()
426 1, 1)) { in ionic_xdp_xmit()
454 static void ionic_xdp_rx_unlink_bufs(struct ionic_queue *q, in ionic_xdp_rx_unlink_bufs() argument
479 int nbufs = 1; in ionic_run_xdp()
558 1, 1)) { in ionic_run_xdp()
603 static void ionic_rx_clean(struct ionic_queue *q, in ionic_rx_clean() argument
608 struct net_device *netdev = q->lif->netdev; in ionic_rx_clean()
609 struct ionic_qcq *qcq = q_to_qcq(q); in ionic_rx_clean()
617 stats = q_to_rx_stats(q); in ionic_rx_clean()
624 dev_dbg(q->dev, "q%d drop comp->status %d comp->len %d desc->len %d\n", in ionic_rx_clean()
625 q->index, comp->status, comp->len, q->rxq[q->head_idx].len); in ionic_rx_clean()
636 if (ionic_run_xdp(stats, netdev, xdp_prog, q, desc_info->bufs, len)) in ionic_rx_clean()
642 use_copybreak = len <= q->lif->rx_copybreak; in ionic_rx_clean()
644 skb = ionic_rx_copybreak(netdev, q, desc_info, in ionic_rx_clean()
648 skb = ionic_rx_build_skb(q, desc_info, headroom, len, in ionic_rx_clean()
656 skb_record_rx_queue(skb, q->index); in ionic_rx_clean()
696 if (unlikely(q->features & IONIC_RXQ_F_HWSTAMP)) { in ionic_rx_clean()
709 skb_hwtstamps(skb)->hwtstamp = ionic_lif_phc_ktime(q->lif, hwstamp); in ionic_rx_clean()
725 struct ionic_queue *q = cq->bound_q; in __ionic_rx_service() local
734 if (q->tail_idx == q->head_idx) in __ionic_rx_service()
737 if (q->tail_idx != le16_to_cpu(comp->comp_index)) in __ionic_rx_service()
740 desc_info = &q->rx_info[q->tail_idx]; in __ionic_rx_service()
741 q->tail_idx = (q->tail_idx + 1) & (q->num_descs - 1); in __ionic_rx_service()
743 /* clean the related q entry, only one per qc completion */ in __ionic_rx_service()
744 ionic_rx_clean(q, desc_info, comp, xdp_prog); in __ionic_rx_service()
754 static inline void ionic_write_cmb_desc(struct ionic_queue *q, in ionic_write_cmb_desc() argument
760 if (unlikely(q_to_qcq(q)->flags & IONIC_QCQ_F_CMB_RINGS)) in ionic_write_cmb_desc()
761 memcpy_toio(&q->cmb_txq[q->head_idx], desc, sizeof(q->cmb_txq[0])); in ionic_write_cmb_desc()
764 void ionic_rx_fill(struct ionic_queue *q, struct bpf_prog *xdp_prog) in ionic_rx_fill() argument
766 struct net_device *netdev = q->lif->netdev; in ionic_rx_fill()
783 n_fill = ionic_q_space_avail(q); in ionic_rx_fill()
786 q->num_descs / IONIC_RX_FILL_DIV); in ionic_rx_fill()
811 desc = &q->rxq[q->head_idx]; in ionic_rx_fill()
812 desc_info = &q->rx_info[q->head_idx]; in ionic_rx_fill()
820 buf_info->page = page_pool_alloc(q->page_pool, in ionic_rx_fill()
835 /* fill sg descriptors - buf[1..n] */ in ionic_rx_fill()
836 sg_elem = q->rxq_sgl[q->head_idx].elems; in ionic_rx_fill()
837 for (j = 0; remain_len > 0 && j < q->max_sg_elems; j++, sg_elem++) { in ionic_rx_fill()
842 ionic_rx_put_buf_direct(q, buf_info); in ionic_rx_fill()
847 buf_info->page = page_pool_alloc(q->page_pool, in ionic_rx_fill()
865 if (j < q->max_sg_elems) in ionic_rx_fill()
868 desc->opcode = (nfrags > 1) ? IONIC_RXQ_DESC_OPCODE_SG : in ionic_rx_fill()
872 ionic_write_cmb_desc(q, desc); in ionic_rx_fill()
874 ionic_rxq_post(q, false); in ionic_rx_fill()
877 ionic_dbell_ring(q->lif->kern_dbpage, q->hw_type, in ionic_rx_fill()
878 q->dbval | q->head_idx); in ionic_rx_fill()
880 q->dbell_deadline = IONIC_RX_MIN_DOORBELL_DEADLINE; in ionic_rx_fill()
881 q->dbell_jiffies = jiffies; in ionic_rx_fill()
884 void ionic_rx_empty(struct ionic_queue *q) in ionic_rx_empty() argument
889 for (i = 0; i < q->num_descs; i++) { in ionic_rx_empty()
890 desc_info = &q->rx_info[i]; in ionic_rx_empty()
892 ionic_rx_put_buf(q, &desc_info->bufs[j]); in ionic_rx_empty()
896 q->head_idx = 0; in ionic_rx_empty()
897 q->tail_idx = 0; in ionic_rx_empty()
910 lif = qcq->q.lif; in ionic_dim_update()
960 ionic_txq_poke_doorbell(&qcq->q); in ionic_tx_napi()
976 struct ionic_queue *q = cq->bound_q; in ionic_rx_cq_service() local
983 xdp_prog = READ_ONCE(q->xdp_prog); in ionic_rx_cq_service()
985 if (cq->tail_idx == cq->num_descs - 1) in ionic_rx_cq_service()
988 cq->tail_idx = (cq->tail_idx + 1) & (cq->num_descs - 1); in ionic_rx_cq_service()
993 ionic_rx_fill(q, xdp_prog); in ionic_rx_cq_service()
1025 ionic_rxq_poke_doorbell(&qcq->q); in ionic_rx_napi()
1067 ionic_rxq_poke_doorbell(&rxqcq->q); in ionic_txrx_napi()
1069 ionic_txq_poke_doorbell(&txqcq->q); in ionic_txrx_napi()
1075 static dma_addr_t ionic_tx_map_single(struct ionic_queue *q, in ionic_tx_map_single() argument
1078 struct device *dev = q->dev; in ionic_tx_map_single()
1084 dev_name(dev), q->name); in ionic_tx_map_single()
1085 q_to_tx_stats(q)->dma_map_err++; in ionic_tx_map_single()
1091 static dma_addr_t ionic_tx_map_frag(struct ionic_queue *q, in ionic_tx_map_frag() argument
1095 struct device *dev = q->dev; in ionic_tx_map_frag()
1101 dev_name(dev), q->name); in ionic_tx_map_frag()
1102 q_to_tx_stats(q)->dma_map_err++; in ionic_tx_map_frag()
1108 static int ionic_tx_map_skb(struct ionic_queue *q, struct sk_buff *skb, in ionic_tx_map_skb() argument
1112 struct device *dev = q->dev; in ionic_tx_map_skb()
1118 dma_addr = ionic_tx_map_single(q, skb->data, skb_headlen(skb)); in ionic_tx_map_skb()
1128 dma_addr = ionic_tx_map_frag(q, frag, 0, skb_frag_size(frag)); in ionic_tx_map_skb()
1136 desc_info->nbufs = 1 + nfrags; in ionic_tx_map_skb()
1153 static void ionic_tx_desc_unmap_bufs(struct ionic_queue *q, in ionic_tx_desc_unmap_bufs() argument
1157 struct device *dev = q->dev; in ionic_tx_desc_unmap_bufs()
1166 for (i = 1; i < desc_info->nbufs; i++, buf_info++) in ionic_tx_desc_unmap_bufs()
1173 static void ionic_tx_clean(struct ionic_queue *q, in ionic_tx_clean() argument
1178 struct ionic_tx_stats *stats = q_to_tx_stats(q); in ionic_tx_clean()
1179 struct ionic_qcq *qcq = q_to_qcq(q); in ionic_tx_clean()
1183 ionic_xdp_tx_desc_clean(q->partner, desc_info, in_napi); in ionic_tx_clean()
1186 if (unlikely(__netif_subqueue_stopped(q->lif->netdev, q->index))) in ionic_tx_clean()
1187 netif_wake_subqueue(q->lif->netdev, q->index); in ionic_tx_clean()
1192 ionic_tx_desc_unmap_bufs(q, desc_info); in ionic_tx_clean()
1198 if (unlikely(ionic_txq_hwstamp_enabled(q))) { in ionic_tx_clean()
1213 hwts.hwtstamp = ionic_lif_phc_ktime(q->lif, hwstamp); in ionic_tx_clean()
1228 napi_consume_skb(skb, likely(in_napi) ? 1 : 0); in ionic_tx_clean()
1237 struct ionic_queue *q = cq->bound_q; in ionic_tx_service() local
1248 /* clean the related q entries, there could be in ionic_tx_service()
1249 * several q entries completed for each cq completion in ionic_tx_service()
1252 desc_info = &q->tx_info[q->tail_idx]; in ionic_tx_service()
1254 index = q->tail_idx; in ionic_tx_service()
1255 q->tail_idx = (q->tail_idx + 1) & (q->num_descs - 1); in ionic_tx_service()
1256 ionic_tx_clean(q, desc_info, comp, in_napi); in ionic_tx_service()
1282 if (cq->tail_idx == cq->num_descs - 1) in ionic_tx_cq_service()
1284 cq->tail_idx = (cq->tail_idx + 1) & (cq->num_descs - 1); in ionic_tx_cq_service()
1291 struct ionic_queue *q = cq->bound_q; in ionic_tx_cq_service() local
1293 if (likely(!ionic_txq_hwstamp_enabled(q))) in ionic_tx_cq_service()
1294 netif_txq_completed_wake(q_to_ndq(q->lif->netdev, q), in ionic_tx_cq_service()
1296 ionic_q_space_avail(q), in ionic_tx_cq_service()
1313 void ionic_tx_empty(struct ionic_queue *q) in ionic_tx_empty() argument
1320 while (q->head_idx != q->tail_idx) { in ionic_tx_empty()
1321 desc_info = &q->tx_info[q->tail_idx]; in ionic_tx_empty()
1323 q->tail_idx = (q->tail_idx + 1) & (q->num_descs - 1); in ionic_tx_empty()
1324 ionic_tx_clean(q, desc_info, NULL, false); in ionic_tx_empty()
1332 if (likely(!ionic_txq_hwstamp_enabled(q))) { in ionic_tx_empty()
1333 struct netdev_queue *ndq = q_to_ndq(q->lif->netdev, q); in ionic_tx_empty()
1385 static void ionic_tx_tso_post(struct net_device *netdev, struct ionic_queue *q, in ionic_tx_tso_post() argument
1409 ionic_write_cmb_desc(q, desc); in ionic_tx_tso_post()
1413 if (likely(!ionic_txq_hwstamp_enabled(q))) in ionic_tx_tso_post()
1414 netdev_tx_sent_queue(q_to_ndq(netdev, q), skb->len); in ionic_tx_tso_post()
1415 ionic_txq_post(q, false); in ionic_tx_tso_post()
1417 ionic_txq_post(q, done); in ionic_tx_tso_post()
1421 static int ionic_tx_tso(struct net_device *netdev, struct ionic_queue *q, in ionic_tx_tso() argument
1424 struct ionic_tx_stats *stats = q_to_tx_stats(q); in ionic_tx_tso()
1447 desc_info = &q->tx_info[q->head_idx]; in ionic_tx_tso()
1449 if (unlikely(ionic_tx_map_skb(q, skb, desc_info))) in ionic_tx_tso()
1475 ionic_tx_desc_unmap_bufs(q, desc_info); in ionic_tx_tso()
1512 desc = &q->txq[q->head_idx]; in ionic_tx_tso()
1513 elem = ionic_tx_sg_elems(q); in ionic_tx_tso()
1531 ionic_tx_tso_post(netdev, q, desc, skb, desc_addr, desc_nsge, in ionic_tx_tso()
1536 desc_info = &q->tx_info[q->head_idx]; in ionic_tx_tso()
1548 static void ionic_tx_calc_csum(struct ionic_queue *q, struct sk_buff *skb, in ionic_tx_calc_csum() argument
1551 struct ionic_txq_desc *desc = &q->txq[q->head_idx]; in ionic_tx_calc_csum()
1553 struct ionic_tx_stats *stats = q_to_tx_stats(q); in ionic_tx_calc_csum()
1579 ionic_write_cmb_desc(q, desc); in ionic_tx_calc_csum()
1587 static void ionic_tx_calc_no_csum(struct ionic_queue *q, struct sk_buff *skb, in ionic_tx_calc_no_csum() argument
1590 struct ionic_txq_desc *desc = &q->txq[q->head_idx]; in ionic_tx_calc_no_csum()
1592 struct ionic_tx_stats *stats = q_to_tx_stats(q); in ionic_tx_calc_no_csum()
1618 ionic_write_cmb_desc(q, desc); in ionic_tx_calc_no_csum()
1623 static void ionic_tx_skb_frags(struct ionic_queue *q, struct sk_buff *skb, in ionic_tx_skb_frags() argument
1626 struct ionic_buf_info *buf_info = &desc_info->bufs[1]; in ionic_tx_skb_frags()
1627 struct ionic_tx_stats *stats = q_to_tx_stats(q); in ionic_tx_skb_frags()
1631 elem = ionic_tx_sg_elems(q); in ionic_tx_skb_frags()
1640 static int ionic_tx(struct net_device *netdev, struct ionic_queue *q, in ionic_tx() argument
1643 struct ionic_tx_desc_info *desc_info = &q->tx_info[q->head_idx]; in ionic_tx()
1644 struct ionic_tx_stats *stats = q_to_tx_stats(q); in ionic_tx()
1647 if (unlikely(ionic_tx_map_skb(q, skb, desc_info))) in ionic_tx()
1654 ionic_tx_calc_csum(q, skb, desc_info); in ionic_tx()
1656 ionic_tx_calc_no_csum(q, skb, desc_info); in ionic_tx()
1659 ionic_tx_skb_frags(q, skb, desc_info); in ionic_tx()
1665 if (likely(!ionic_txq_hwstamp_enabled(q))) { in ionic_tx()
1666 struct netdev_queue *ndq = q_to_ndq(netdev, q); in ionic_tx()
1668 if (unlikely(!ionic_q_has_space(q, MAX_SKB_FRAGS + 1))) in ionic_tx()
1673 ionic_txq_post(q, ring_dbell); in ionic_tx()
1678 static int ionic_tx_descs_needed(struct ionic_queue *q, struct sk_buff *skb) in ionic_tx_descs_needed() argument
1699 ndescs = 1; in ionic_tx_descs_needed()
1703 if (unlikely(nr_frags > q->max_sg_elems)) { in ionic_tx_descs_needed()
1733 /* We add the +1 because we can take buffers for one in ionic_tx_descs_needed()
1737 if (desc_bufs > q->max_sg_elems + 1) { in ionic_tx_descs_needed()
1760 q_to_tx_stats(q)->linearize++; in ionic_tx_descs_needed()
1770 struct ionic_queue *q; in ionic_start_hwstamp_xmit() local
1778 q = &lif->hwstamp_txq->q; in ionic_start_hwstamp_xmit()
1779 ndescs = ionic_tx_descs_needed(q, skb); in ionic_start_hwstamp_xmit()
1783 if (unlikely(!ionic_q_has_space(q, ndescs))) in ionic_start_hwstamp_xmit()
1788 err = ionic_tx_tso(netdev, q, skb); in ionic_start_hwstamp_xmit()
1790 err = ionic_tx(netdev, q, skb); in ionic_start_hwstamp_xmit()
1798 q->drop++; in ionic_start_hwstamp_xmit()
1807 struct ionic_queue *q; in ionic_start_xmit() local
1822 q = &lif->txqcqs[queue_index]->q; in ionic_start_xmit()
1824 ndescs = ionic_tx_descs_needed(q, skb); in ionic_start_xmit()
1828 if (!netif_txq_maybe_stop(q_to_ndq(netdev, q), in ionic_start_xmit()
1829 ionic_q_space_avail(q), in ionic_start_xmit()
1834 err = ionic_tx_tso(netdev, q, skb); in ionic_start_xmit()
1836 err = ionic_tx(netdev, q, skb); in ionic_start_xmit()
1844 q->drop++; in ionic_start_xmit()