Home
last modified time | relevance | path

Searched refs:desc_ring (Results 1 – 25 of 31) sorted by relevance

12

/linux-6.12.1/kernel/printk/
Dprintk_ringbuffer.c315 #define DESCS_COUNT(desc_ring) _DESCS_COUNT((desc_ring)->count_bits) argument
316 #define DESCS_COUNT_MASK(desc_ring) (DESCS_COUNT(desc_ring) - 1) argument
322 #define DESC_INDEX(desc_ring, n) ((n) & DESCS_COUNT_MASK(desc_ring)) argument
337 #define DESC_ID_PREV_WRAP(desc_ring, id) \ argument
338 DESC_ID((id) - DESCS_COUNT(desc_ring))
359 static struct prb_desc *to_desc(struct prb_desc_ring *desc_ring, u64 n) in to_desc() argument
361 return &desc_ring->descs[DESC_INDEX(desc_ring, n)]; in to_desc()
368 static struct printk_info *to_info(struct prb_desc_ring *desc_ring, u64 n) in to_info() argument
370 return &desc_ring->infos[DESC_INDEX(desc_ring, n)]; in to_info()
436 static enum desc_state desc_read(struct prb_desc_ring *desc_ring, in desc_read() argument
[all …]
Dprintk_ringbuffer.h91 struct prb_desc_ring desc_ring; member
273 .desc_ring = { \
/linux-6.12.1/drivers/net/ethernet/marvell/octeon_ep_vf/
Doctep_vf_rx.c34 struct octep_vf_oq_desc_hw *desc_ring = oq->desc_ring; in octep_vf_oq_fill_ring_buffers() local
44 desc_ring[i].buffer_ptr = dma_map_page(oq->dev, page, 0, in octep_vf_oq_fill_ring_buffers()
47 if (dma_mapping_error(oq->dev, desc_ring[i].buffer_ptr)) { in octep_vf_oq_fill_ring_buffers()
63 dma_unmap_page(oq->dev, desc_ring[i].buffer_ptr, PAGE_SIZE, DMA_FROM_DEVICE); in octep_vf_oq_fill_ring_buffers()
81 struct octep_vf_oq_desc_hw *desc_ring = oq->desc_ring; in octep_vf_oq_refill() local
94 desc_ring[refill_idx].buffer_ptr = dma_map_page(oq->dev, page, 0, in octep_vf_oq_refill()
96 if (dma_mapping_error(oq->dev, desc_ring[refill_idx].buffer_ptr)) { in octep_vf_oq_refill()
152 oq->desc_ring = dma_alloc_coherent(oq->dev, desc_ring_size, in octep_vf_setup_oq()
155 if (unlikely(!oq->desc_ring)) { in octep_vf_setup_oq()
183 oq->desc_ring, oq->desc_ring_dma); in octep_vf_setup_oq()
[all …]
Doctep_vf_tx.c196 iq->desc_ring = dma_alloc_coherent(iq->dev, desc_ring_size, in octep_vf_setup_iq()
198 if (unlikely(!iq->desc_ring)) { in octep_vf_setup_iq()
246 iq->desc_ring, iq->desc_ring_dma); in octep_vf_setup_iq()
271 if (iq->desc_ring) in octep_vf_free_iq()
273 iq->desc_ring, iq->desc_ring_dma); in octep_vf_free_iq()
Doctep_vf_rx.h217 struct octep_vf_oq_desc_hw *desc_ring; member
Doctep_vf_tx.h135 struct octep_vf_tx_desc_hw *desc_ring; member
/linux-6.12.1/drivers/net/ethernet/marvell/octeon_ep/
Doctep_rx.c34 struct octep_oq_desc_hw *desc_ring = oq->desc_ring; in octep_oq_fill_ring_buffers() local
44 desc_ring[i].buffer_ptr = dma_map_page(oq->dev, page, 0, in octep_oq_fill_ring_buffers()
47 if (dma_mapping_error(oq->dev, desc_ring[i].buffer_ptr)) { in octep_oq_fill_ring_buffers()
63 dma_unmap_page(oq->dev, desc_ring[i].buffer_ptr, PAGE_SIZE, DMA_FROM_DEVICE); in octep_oq_fill_ring_buffers()
81 struct octep_oq_desc_hw *desc_ring = oq->desc_ring; in octep_oq_refill() local
94 desc_ring[refill_idx].buffer_ptr = dma_map_page(oq->dev, page, 0, in octep_oq_refill()
96 if (dma_mapping_error(oq->dev, desc_ring[refill_idx].buffer_ptr)) { in octep_oq_refill()
152 oq->desc_ring = dma_alloc_coherent(oq->dev, desc_ring_size, in octep_setup_oq()
155 if (unlikely(!oq->desc_ring)) { in octep_setup_oq()
182 oq->desc_ring, oq->desc_ring_dma); in octep_setup_oq()
[all …]
Doctep_tx.c197 iq->desc_ring = dma_alloc_coherent(iq->dev, desc_ring_size, in octep_setup_iq()
199 if (unlikely(!iq->desc_ring)) { in octep_setup_iq()
247 iq->desc_ring, iq->desc_ring_dma); in octep_setup_iq()
272 if (iq->desc_ring) in octep_free_iq()
274 iq->desc_ring, iq->desc_ring_dma); in octep_free_iq()
Doctep_rx.h217 struct octep_oq_desc_hw *desc_ring; member
Doctep_tx.h177 struct octep_tx_desc_hw *desc_ring; member
/linux-6.12.1/drivers/net/ethernet/intel/idpf/
Didpf_controlq_setup.c16 cq->desc_ring.va = idpf_alloc_dma_mem(hw, &cq->desc_ring, size); in idpf_ctlq_alloc_desc_ring()
17 if (!cq->desc_ring.va) in idpf_ctlq_alloc_desc_ring()
93 idpf_free_dma_mem(hw, &cq->desc_ring); in idpf_ctlq_free_desc_ring()
168 idpf_free_dma_mem(hw, &cq->desc_ring); in idpf_ctlq_alloc_ring_res()
Didpf_controlq.h15 (&(((struct idpf_ctlq_desc *)((R)->desc_ring.va))[i]))
Didpf_controlq_api.h110 struct idpf_dma_mem desc_ring; /* descriptor ring memory member
Didpf_controlq.c49 wr32(hw, cq->reg.bal, lower_32_bits(cq->desc_ring.pa)); in idpf_ctlq_init_regs()
50 wr32(hw, cq->reg.bah, upper_32_bits(cq->desc_ring.pa)); in idpf_ctlq_init_regs()
Didpf_txrx.c138 if (!txq->desc_ring) in idpf_tx_desc_rel()
141 dmam_free_coherent(txq->dev, txq->size, txq->desc_ring, txq->dma); in idpf_tx_desc_rel()
142 txq->desc_ring = NULL; in idpf_tx_desc_rel()
256 tx_q->desc_ring = dmam_alloc_coherent(dev, tx_q->size, &tx_q->dma, in idpf_tx_desc_alloc()
258 if (!tx_q->desc_ring) { in idpf_tx_desc_alloc()
487 if (!rxq->desc_ring) in idpf_rx_desc_rel()
490 dmam_free_coherent(dev, rxq->size, rxq->desc_ring, rxq->dma); in idpf_rx_desc_rel()
491 rxq->desc_ring = NULL; in idpf_rx_desc_rel()
871 rxq->desc_ring = dmam_alloc_coherent(dev, rxq->size, in idpf_rx_desc_alloc()
873 if (!rxq->desc_ring) { in idpf_rx_desc_alloc()
/linux-6.12.1/drivers/net/ethernet/google/gve/
Dgve_rx_dqo.c259 if (rx->dqo.bufq.desc_ring) { in gve_rx_reset_ring_dqo()
260 size = sizeof(rx->dqo.bufq.desc_ring[0]) * in gve_rx_reset_ring_dqo()
262 memset(rx->dqo.bufq.desc_ring, 0, size); in gve_rx_reset_ring_dqo()
266 if (rx->dqo.complq.desc_ring) { in gve_rx_reset_ring_dqo()
267 size = sizeof(rx->dqo.complq.desc_ring[0]) * in gve_rx_reset_ring_dqo()
269 memset(rx->dqo.complq.desc_ring, 0, size); in gve_rx_reset_ring_dqo()
335 if (rx->dqo.bufq.desc_ring) { in gve_rx_free_ring_dqo()
336 size = sizeof(rx->dqo.bufq.desc_ring[0]) * buffer_queue_slots; in gve_rx_free_ring_dqo()
337 dma_free_coherent(hdev, size, rx->dqo.bufq.desc_ring, in gve_rx_free_ring_dqo()
339 rx->dqo.bufq.desc_ring = NULL; in gve_rx_free_ring_dqo()
[all …]
Dgve_rx.c78 if (rx->desc.desc_ring) { in gve_rx_reset_ring_gqi()
79 size = slots * sizeof(rx->desc.desc_ring[0]); in gve_rx_reset_ring_gqi()
80 memset(rx->desc.desc_ring, 0, size); in gve_rx_reset_ring_gqi()
111 if (rx->desc.desc_ring) { in gve_rx_free_ring_gqi()
113 dma_free_coherent(dev, bytes, rx->desc.desc_ring, rx->desc.bus); in gve_rx_free_ring_gqi()
114 rx->desc.desc_ring = NULL; in gve_rx_free_ring_gqi()
345 rx->desc.desc_ring = dma_alloc_coherent(hdev, bytes, &rx->desc.bus, in gve_rx_alloc_ring_gqi()
347 if (!rx->desc.desc_ring) { in gve_rx_alloc_ring_gqi()
938 desc = rx->desc.desc_ring + next_idx; in gve_rx_work_pending()
1010 struct gve_rx_desc *desc = &rx->desc.desc_ring[idx]; in gve_clean_rx_done()
[all …]
Dgve.h95 struct gve_rx_desc *desc_ring; /* the descriptor ring */ member
135 struct gve_rx_desc_dqo *desc_ring; member
144 struct gve_rx_compl_desc_dqo *desc_ring; member
/linux-6.12.1/scripts/gdb/linux/
Ddmesg.py44 desc_ring = utils.read_memoryview(inf, addr, sz).tobytes()
48 desc_ring_count = 1 << utils.read_u32(desc_ring, off)
51 desc_addr = utils.read_ulong(desc_ring, off)
56 info_addr = utils.read_ulong(desc_ring, off)
89 tail_id = utils.read_atomic_long(desc_ring, off)
91 head_id = utils.read_atomic_long(desc_ring, off)
/linux-6.12.1/drivers/net/ethernet/cavium/liquidio/
Docteon_droq.c168 struct octeon_droq_desc *desc_ring = droq->desc_ring; in octeon_droq_setup_ring_buffers() local
182 desc_ring[i].info_ptr = 0; in octeon_droq_setup_ring_buffers()
183 desc_ring[i].buffer_ptr = in octeon_droq_setup_ring_buffers()
203 if (droq->desc_ring) in octeon_delete_droq()
205 droq->desc_ring, droq->desc_ring_dma); in octeon_delete_droq()
266 droq->desc_ring = lio_dma_alloc(oct, desc_ring_size, in octeon_init_droq()
269 if (!droq->desc_ring) { in octeon_init_droq()
276 q_no, droq->desc_ring, droq->desc_ring_dma); in octeon_init_droq()
387 struct octeon_droq_desc *desc_ring) in octeon_droq_refill_pullup_descs() argument
399 desc_ring[droq->refill_idx].buffer_ptr = in octeon_droq_refill_pullup_descs()
[all …]
Docteon_droq.h257 struct octeon_droq_desc *desc_ring; member
/linux-6.12.1/drivers/dma/
Dplx_dma.c116 struct plx_dma_desc **desc_ring; member
131 return plxdev->desc_ring[i & (PLX_DMA_RING_COUNT - 1)]; in plx_dma_get_desc()
381 plxdev->desc_ring = kcalloc(PLX_DMA_RING_COUNT, in plx_dma_alloc_desc()
382 sizeof(*plxdev->desc_ring), GFP_KERNEL); in plx_dma_alloc_desc()
383 if (!plxdev->desc_ring) in plx_dma_alloc_desc()
395 plxdev->desc_ring[i] = desc; in plx_dma_alloc_desc()
402 kfree(plxdev->desc_ring[i]); in plx_dma_alloc_desc()
403 kfree(plxdev->desc_ring); in plx_dma_alloc_desc()
480 kfree(plxdev->desc_ring[i]); in plx_dma_free_chan_resources()
482 kfree(plxdev->desc_ring); in plx_dma_free_chan_resources()
/linux-6.12.1/drivers/net/ethernet/socionext/
Dnetsec.c292 struct netsec_desc_ring desc_ring[NETSEC_RING_MAX]; member
637 struct netsec_desc_ring *dring = &priv->desc_ring[NETSEC_RING_TX]; in netsec_clean_tx_dring()
739 struct netsec_desc_ring *dring = &priv->desc_ring[NETSEC_RING_RX]; in netsec_alloc_rx_data()
761 struct netsec_desc_ring *dring = &priv->desc_ring[NETSEC_RING_RX]; in netsec_rx_fill()
832 struct netsec_desc_ring *tx_ring = &priv->desc_ring[NETSEC_RING_TX]; in netsec_xdp_queue_one()
861 &priv->desc_ring[NETSEC_RING_RX]; in netsec_xdp_queue_one()
884 struct netsec_desc_ring *tx_ring = &priv->desc_ring[NETSEC_RING_TX]; in netsec_xdp_xmit_back()
901 struct netsec_desc_ring *dring = &priv->desc_ring[NETSEC_RING_RX]; in netsec_run_xdp()
953 struct netsec_desc_ring *dring = &priv->desc_ring[NETSEC_RING_RX]; in netsec_process_rx()
1115 struct netsec_desc_ring *dring = &priv->desc_ring[NETSEC_RING_TX]; in netsec_check_stop_tx()
[all …]
/linux-6.12.1/Documentation/admin-guide/kdump/
Dgdbmacros.txt298 set var $desc_count = 1U << prb->desc_ring.count_bits
301 set var $id = prb->desc_ring.tail_id.counter
302 set var $end_id = prb->desc_ring.head_id.counter
305 set var $desc = &prb->desc_ring.descs[$id % $desc_count]
306 set var $info = &prb->desc_ring.infos[$id % $desc_count]
/linux-6.12.1/drivers/net/ethernet/cortina/
Dgemini.c547 struct gmac_txdesc *desc_ring; in gmac_setup_txqs() local
560 desc_ring = dma_alloc_coherent(geth->dev, len * sizeof(*desc_ring), in gmac_setup_txqs()
563 if (!desc_ring) { in gmac_setup_txqs()
570 dma_free_coherent(geth->dev, len * sizeof(*desc_ring), in gmac_setup_txqs()
571 desc_ring, port->txq_dma_base); in gmac_setup_txqs()
580 txq->ring = desc_ring; in gmac_setup_txqs()
591 desc_ring += entries; in gmac_setup_txqs()

12