Home
last modified time | relevance | path

Searched refs:rx_buff (Results 1 – 25 of 25) sorted by relevance

/linux-6.12.1/drivers/net/ethernet/intel/idpf/
Didpf_controlq_setup.c43 cq->bi.rx_buff = kcalloc(cq->ring_size, sizeof(struct idpf_dma_mem *), in idpf_ctlq_alloc_bufs()
45 if (!cq->bi.rx_buff) in idpf_ctlq_alloc_bufs()
53 cq->bi.rx_buff[i] = kcalloc(num, sizeof(struct idpf_dma_mem), in idpf_ctlq_alloc_bufs()
55 if (!cq->bi.rx_buff[i]) in idpf_ctlq_alloc_bufs()
58 bi = cq->bi.rx_buff[i]; in idpf_ctlq_alloc_bufs()
63 kfree(cq->bi.rx_buff[i]); in idpf_ctlq_alloc_bufs()
74 idpf_free_dma_mem(hw, cq->bi.rx_buff[i]); in idpf_ctlq_alloc_bufs()
75 kfree(cq->bi.rx_buff[i]); in idpf_ctlq_alloc_bufs()
77 kfree(cq->bi.rx_buff); in idpf_ctlq_alloc_bufs()
113 if (cq->bi.rx_buff[i]) { in idpf_ctlq_free_bufs()
[all …]
Didpf_controlq.c67 struct idpf_dma_mem *bi = cq->bi.rx_buff[i]; in idpf_ctlq_init_rxq_bufs()
448 if (cq->bi.rx_buff[ntp]) in idpf_ctlq_post_rx_buffs()
463 if (cq->bi.rx_buff[tbp]) { in idpf_ctlq_post_rx_buffs()
464 cq->bi.rx_buff[ntp] = in idpf_ctlq_post_rx_buffs()
465 cq->bi.rx_buff[tbp]; in idpf_ctlq_post_rx_buffs()
466 cq->bi.rx_buff[tbp] = NULL; in idpf_ctlq_post_rx_buffs()
484 cq->bi.rx_buff[ntp] = buffs[i]; in idpf_ctlq_post_rx_buffs()
496 desc->datalen = cpu_to_le16(cq->bi.rx_buff[ntp]->size); in idpf_ctlq_post_rx_buffs()
498 cpu_to_le32(upper_32_bits(cq->bi.rx_buff[ntp]->pa)); in idpf_ctlq_post_rx_buffs()
500 cpu_to_le32(lower_32_bits(cq->bi.rx_buff[ntp]->pa)); in idpf_ctlq_post_rx_buffs()
[all …]
Didpf_controlq_api.h114 struct idpf_dma_mem **rx_buff; member
/linux-6.12.1/drivers/net/ethernet/arc/
Demac_main.c184 struct buffer_state *rx_buff = &priv->rx_buff[*last_rx_bd]; in arc_emac_rx() local
242 dma_unmap_single(dev, dma_unmap_addr(rx_buff, addr), in arc_emac_rx()
243 dma_unmap_len(rx_buff, len), DMA_FROM_DEVICE); in arc_emac_rx()
248 skb_put(rx_buff->skb, pktlen); in arc_emac_rx()
249 rx_buff->skb->dev = ndev; in arc_emac_rx()
250 rx_buff->skb->protocol = eth_type_trans(rx_buff->skb, ndev); in arc_emac_rx()
252 netif_receive_skb(rx_buff->skb); in arc_emac_rx()
254 rx_buff->skb = skb; in arc_emac_rx()
255 dma_unmap_addr_set(rx_buff, addr, addr); in arc_emac_rx()
256 dma_unmap_len_set(rx_buff, len, EMAC_BUFFER_SIZE); in arc_emac_rx()
[all …]
Demac.h151 struct buffer_state rx_buff[RX_BD_NUM]; member
/linux-6.12.1/drivers/net/ethernet/
Dlantiq_xrx200.c69 void *rx_buff[LTQ_DESC_NUM]; member
189 void *buf = ch->rx_buff[ch->dma.desc]; in xrx200_alloc_buf()
194 ch->rx_buff[ch->dma.desc] = alloc(priv->rx_skb_size); in xrx200_alloc_buf()
195 if (!ch->rx_buff[ch->dma.desc]) { in xrx200_alloc_buf()
196 ch->rx_buff[ch->dma.desc] = buf; in xrx200_alloc_buf()
201 mapping = dma_map_single(priv->dev, ch->rx_buff[ch->dma.desc], in xrx200_alloc_buf()
204 skb_free_frag(ch->rx_buff[ch->dma.desc]); in xrx200_alloc_buf()
205 ch->rx_buff[ch->dma.desc] = buf; in xrx200_alloc_buf()
224 void *buf = ch->rx_buff[ch->dma.desc]; in xrx200_hw_receive()
440 buff = ch_rx->rx_buff[ch_rx->dma.desc]; in xrx200_change_mtu()
[all …]
/linux-6.12.1/drivers/net/ethernet/amd/
Dariadne.c91 volatile u_short *rx_buff[RX_RING_SIZE]; member
103 u_short rx_buff[RX_RING_SIZE][PKT_BUF_SIZE / sizeof(u_short)]; member
148 offsetof(struct lancedata, rx_buff[i])); in ariadne_init_ring()
150 offsetof(struct lancedata, rx_buff[i])) | in ariadne_init_ring()
155 priv->rx_buff[i] = lancedata->rx_buff[i]; in ariadne_init_ring()
157 i, &lancedata->rx_ring[i], lancedata->rx_buff[i]); in ariadne_init_ring()
213 (const void *)priv->rx_buff[entry], in ariadne_rx()
Dlance.c877 void *rx_buff; in lance_init_ring() local
882 rx_buff = skb->data; in lance_init_ring()
884 rx_buff = kmalloc(PKT_BUF_SZ, GFP_DMA | gfp); in lance_init_ring()
885 if (!rx_buff) in lance_init_ring()
888 lp->rx_ring[i].base = (u32)isa_virt_to_bus(rx_buff) | 0x80000000; in lance_init_ring()
/linux-6.12.1/drivers/mailbox/
Dti-msgmgr.c117 u32 *rx_buff; member
211 message.buf = (u8 *)qinst->rx_buff; in ti_msgmgr_queue_rx_data()
227 for (data_reg = qinst->queue_buff_start, word_data = qinst->rx_buff, in ti_msgmgr_queue_rx_data()
537 qinst->rx_buff = kzalloc(d->max_message_size, GFP_KERNEL); in ti_msgmgr_queue_startup()
538 if (!qinst->rx_buff) in ti_msgmgr_queue_startup()
543 kfree(qinst->rx_buff); in ti_msgmgr_queue_startup()
561 kfree(qinst->rx_buff); in ti_msgmgr_queue_shutdown()
/linux-6.12.1/drivers/tty/serial/8250/
D8250_pci1xxxx.c329 unsigned char *rx_buff, u32 *buff_index, in pci1xxxx_process_read_data() argument
344 burst_buf = (u32 *)&rx_buff[*buff_index]; in pci1xxxx_process_read_data()
353 rx_buff[*buff_index] = readb(port->membase + in pci1xxxx_process_read_data()
364 unsigned char rx_buff[RX_BUF_SIZE]; in pci1xxxx_rx_burst() local
370 pci1xxxx_process_read_data(port, rx_buff, &buff_index, in pci1xxxx_rx_burst()
373 copied_len = (u32)tty_insert_flip_string(tty_port, rx_buff, in pci1xxxx_rx_burst()
/linux-6.12.1/drivers/net/ethernet/ibm/
Dibmvnic.c795 skb = pool->rx_buff[bufidx].skb; in replenish_rx_pool()
816 pool->rx_buff[bufidx].data = dst; in replenish_rx_pool()
817 pool->rx_buff[bufidx].dma = dma_addr; in replenish_rx_pool()
818 pool->rx_buff[bufidx].skb = skb; in replenish_rx_pool()
819 pool->rx_buff[bufidx].pool_index = pool->index; in replenish_rx_pool()
820 pool->rx_buff[bufidx].size = pool->buff_size; in replenish_rx_pool()
827 cpu_to_be64((u64)&pool->rx_buff[bufidx]); in replenish_rx_pool()
862 struct ibmvnic_rx_buff *rx_buff; in replenish_rx_pool() local
867 rx_buff = (struct ibmvnic_rx_buff *) in replenish_rx_pool()
869 bufidx = (int)(rx_buff - pool->rx_buff); in replenish_rx_pool()
[all …]
Dibmvnic.h869 struct ibmvnic_rx_buff *rx_buff; member
/linux-6.12.1/drivers/spi/
Dspi-tle62x0.c33 unsigned char rx_buff[4]; member
62 .rx_buf = st->rx_buff, in tle62x0_read()
101 unsigned char *buff = st->rx_buff; in tle62x0_status_show()
/linux-6.12.1/drivers/net/ethernet/renesas/
Dravb_main.c300 struct ravb_rx_buffer *rx_buff; in ravb_alloc_rx_buffer() local
304 rx_buff = &priv->rx_buffers[q][entry]; in ravb_alloc_rx_buffer()
306 rx_buff->page = page_pool_alloc(priv->rx_pool[q], &rx_buff->offset, in ravb_alloc_rx_buffer()
308 if (unlikely(!rx_buff->page)) { in ravb_alloc_rx_buffer()
316 dma_addr = page_pool_get_dma_addr(rx_buff->page) + rx_buff->offset; in ravb_alloc_rx_buffer()
851 struct ravb_rx_buffer *rx_buff; in ravb_rx_gbeth() local
854 rx_buff = &priv->rx_buffers[q][entry]; in ravb_rx_gbeth()
855 rx_addr = page_address(rx_buff->page) + rx_buff->offset; in ravb_rx_gbeth()
870 rx_buff->page, 0, in ravb_rx_gbeth()
897 rx_buff->page, 0, in ravb_rx_gbeth()
[all …]
/linux-6.12.1/drivers/isdn/mISDN/
Ddsp_cmx.c236 memset(dsp->rx_buff, dsp_silence, sizeof(dsp->rx_buff)); in dsp_cmx_add_conf_member()
1244 memset(dsp->rx_buff, dsp_silence, sizeof(dsp->rx_buff)); in dsp_cmx_receive()
1265 memset(dsp->rx_buff, dsp_silence, sizeof(dsp->rx_buff)); in dsp_cmx_receive()
1277 d = dsp->rx_buff; in dsp_cmx_receive()
1358 q = dsp->rx_buff; /* received data */
1454 o_q = other->rx_buff; /* received data */
1704 q = dsp->rx_buff;
1727 p = dsp->rx_buff;
Ddsp.h205 u8 rx_buff[CMX_BUFF_SIZE]; member
Ddsp_core.c843 memset(dsp->rx_buff, 0, sizeof(dsp->rx_buff)); in dsp_function()
/linux-6.12.1/drivers/s390/net/
Dnetiucv.c142 struct sk_buff *rx_buff; member
652 conn->rx_buff->data = conn->rx_buff->head; in conn_action_rx()
653 skb_reset_tail_pointer(conn->rx_buff); in conn_action_rx()
654 conn->rx_buff->len = 0; in conn_action_rx()
655 rc = iucv_message_receive(conn->path, msg, 0, conn->rx_buff->data, in conn_action_rx()
662 netiucv_unpack_skb(conn, conn->rx_buff); in conn_action_rx()
1744 conn->rx_buff = alloc_skb(conn->max_buffsize, GFP_KERNEL | GFP_DMA); in netiucv_new_connection()
1745 if (!conn->rx_buff) in netiucv_new_connection()
1775 kfree_skb(conn->rx_buff); in netiucv_new_connection()
1802 kfree_skb(conn->rx_buff); in netiucv_remove_connection()
/linux-6.12.1/include/linux/
Dscc.h75 struct sk_buff *rx_buff; /* pointer to frame currently received */ member
/linux-6.12.1/drivers/net/hamradio/
Dscc.c350 if(scc->rx_buff != NULL) /* did we receive something? */ in flush_rx_FIFO()
353 dev_kfree_skb_irq(scc->rx_buff); in flush_rx_FIFO()
354 scc->rx_buff = NULL; in flush_rx_FIFO()
531 skb = scc->rx_buff; in scc_rxint()
545 scc->rx_buff = skb; in scc_rxint()
555 scc->rx_buff = NULL; in scc_rxint()
576 skb = scc->rx_buff; in scc_spint()
585 scc->rx_buff = skb = NULL; in scc_spint()
597 scc->rx_buff = NULL; in scc_spint()
601 scc->rx_buff = NULL; in scc_spint()
/linux-6.12.1/drivers/net/wireless/ath/wil6210/
Dtxrx_edma.c160 struct wil_rx_buff *rx_buff; in wil_ring_alloc_skb_edma() local
191 rx_buff = list_first_entry(free, struct wil_rx_buff, list); in wil_ring_alloc_skb_edma()
192 buff_id = rx_buff->id; in wil_ring_alloc_skb_edma()
195 list_move(&rx_buff->list, active); in wil_ring_alloc_skb_edma()
274 struct wil_rx_buff *rx_buff = in wil_move_all_rx_buff_to_free_list() local
276 struct sk_buff *skb = rx_buff->skb; in wil_move_all_rx_buff_to_free_list()
279 wil_err(wil, "No Rx skb at buff_id %d\n", rx_buff->id); in wil_move_all_rx_buff_to_free_list()
281 rx_buff->skb = NULL; in wil_move_all_rx_buff_to_free_list()
289 list_move(&rx_buff->list, &wil->rx_buff_mgmt.free); in wil_move_all_rx_buff_to_free_list()
/linux-6.12.1/drivers/net/ethernet/freescale/
Dgianfar.c1103 struct gfar_rx_buff *rxb = &rx_queue->rx_buff[i]; in free_skb_rx_queue()
1119 kfree(rx_queue->rx_buff); in free_skb_rx_queue()
1120 rx_queue->rx_buff = NULL; in free_skb_rx_queue()
1145 if (rx_queue->rx_buff) in free_skb_resources()
1254 rxb = &rx_queue->rx_buff[i]; in gfar_alloc_rx_buffs()
1276 rxb = rx_queue->rx_buff; in gfar_alloc_rx_buffs()
1400 rx_queue->rx_buff = kcalloc(rx_queue->rx_ring_size, in gfar_alloc_skb_resources()
1401 sizeof(*rx_queue->rx_buff), in gfar_alloc_skb_resources()
1403 if (!rx_queue->rx_buff) in gfar_alloc_skb_resources()
2391 new_rxb = &rxq->rx_buff[nta]; in gfar_reuse_rx_page()
[all …]
Dgianfar.h1051 struct gfar_rx_buff *rx_buff __aligned(SMP_CACHE_BYTES);
/linux-6.12.1/drivers/ntb/
Dntb_transport.c167 void *rx_buff; member
503 "rx_buff - \t0x%p\n", qp->rx_buff); in debugfs_read()
642 qp->rx_buff = mw->virt_addr + rx_size * (qp_num / mw_count); in ntb_transport_setup_qp_mw()
645 qp->remote_rx_info = qp->rx_buff + rx_size; in ntb_transport_setup_qp_mw()
673 void *offset = (qp->rx_buff + qp->rx_max_frame * (i + 1) - in ntb_transport_setup_qp_mw()
1510 void *offset = qp->rx_buff + qp->rx_max_frame * in ntb_rx_copy_callback()
1642 offset = qp->rx_buff + qp->rx_max_frame * qp->rx_index; in ntb_process_rxc()
/linux-6.12.1/arch/mips/include/asm/ip32/
Dmace.h106 volatile unsigned long rx_buff; member