Home
last modified time | relevance | path

Searched refs:txd (Results 1 – 25 of 141) sorted by relevance

123456

/linux-6.12.1/drivers/dma/
Dsa11x0-dma.c148 static void sa11x0_dma_start_desc(struct sa11x0_dma_phy *p, struct sa11x0_dma_desc *txd) in sa11x0_dma_start_desc() argument
150 list_del(&txd->vd.node); in sa11x0_dma_start_desc()
151 p->txd_load = txd; in sa11x0_dma_start_desc()
155 p->num, &txd->vd, txd->vd.tx.cookie, txd->ddar); in sa11x0_dma_start_desc()
161 struct sa11x0_dma_desc *txd = p->txd_load; in sa11x0_dma_start_sg() local
167 if (!txd) in sa11x0_dma_start_sg()
176 if (p->sg_load == txd->sglen) { in sa11x0_dma_start_sg()
177 if (!txd->cyclic) { in sa11x0_dma_start_sg()
185 if (txn && txn->ddar == txd->ddar) { in sa11x0_dma_start_sg()
186 txd = txn; in sa11x0_dma_start_sg()
[all …]
Dowl-dma.c206 struct owl_dma_txd *txd; member
364 static struct owl_dma_lli *owl_dma_add_lli(struct owl_dma_txd *txd, in owl_dma_add_lli() argument
370 list_add_tail(&next->node, &txd->lli_list); in owl_dma_add_lli()
540 struct owl_dma_txd *txd = to_owl_txd(&vd->tx); in owl_dma_start_next_txd() local
547 vchan->txd = txd; in owl_dma_start_next_txd()
553 lli = list_first_entry(&txd->lli_list, in owl_dma_start_next_txd()
556 if (txd->cyclic) in owl_dma_start_next_txd()
641 struct owl_dma_txd *txd; in owl_dma_interrupt() local
654 txd = vchan->txd; in owl_dma_interrupt()
655 if (txd) { in owl_dma_interrupt()
[all …]
Dtimb_dma.c64 struct dma_async_tx_descriptor txd; member
199 iowrite32(td_desc->txd.phys, td_chan->membase + in __td_start_dma()
209 iowrite32(td_desc->txd.phys, td_chan->membase + in __td_start_dma()
222 struct dma_async_tx_descriptor *txd; in __td_finish() local
231 txd = &td_desc->txd; in __td_finish()
234 txd->cookie); in __td_finish()
243 dma_cookie_complete(txd); in __td_finish()
246 dmaengine_desc_get_callback(txd, &cb); in __td_finish()
250 dma_descriptor_unmap(txd); in __td_finish()
288 __func__, td_desc->txd.cookie); in __td_start_next()
[all …]
Damba-pl08x.c532 struct pl08x_txd *txd = to_pl08x_txd(&vd->tx); in pl08x_start_next_txd() local
535 list_del(&txd->vd.node); in pl08x_start_next_txd()
537 plchan->at = txd; in pl08x_start_next_txd()
543 pl08x_write_lli(pl08x, phychan, &txd->llis_va[0], txd->ccfg); in pl08x_start_next_txd()
759 struct pl08x_txd *txd; in pl08x_getbytes_chan() local
765 txd = plchan->at; in pl08x_getbytes_chan()
767 if (!ch || !txd) in pl08x_getbytes_chan()
782 llis_va = txd->llis_va; in pl08x_getbytes_chan()
783 llis_bus = txd->llis_bus; in pl08x_getbytes_chan()
1107 struct pl08x_txd *txd; member
[all …]
Dtxx9dmac.c139 txd_to_txx9dmac_desc(struct dma_async_tx_descriptor *txd) in txd_to_txx9dmac_desc() argument
141 return container_of(txd, struct txx9dmac_desc, txd); in txd_to_txx9dmac_desc()
199 dma_async_tx_descriptor_init(&desc->txd, &dc->chan); in txx9dmac_desc_alloc()
200 desc->txd.tx_submit = txx9dmac_tx_submit; in txx9dmac_desc_alloc()
202 desc->txd.flags = DMA_CTRL_ACK; in txx9dmac_desc_alloc()
203 desc->txd.phys = dma_map_single(chan2parent(&dc->chan), &desc->hwdesc, in txx9dmac_desc_alloc()
216 if (async_tx_test_ack(&desc->txd)) { in txx9dmac_desc_get()
249 child->txd.phys, ddev->descsize, in txx9dmac_sync_desc_for_cpu()
252 desc->txd.phys, ddev->descsize, in txx9dmac_sync_desc_for_cpu()
337 first->txd.cookie, first); in txx9dmac_dostart()
[all …]
Dsun6i-dma.c351 struct sun6i_desc *txd = pchan->desc; in sun6i_get_chan_size() local
362 for (lli = txd->v_lli; lli; lli = lli->v_lli_next) { in sun6i_get_chan_size()
376 struct sun6i_desc *txd) in sun6i_dma_lli_add() argument
378 if ((!prev && !txd) || !next) in sun6i_dma_lli_add()
382 txd->p_lli = next_phy; in sun6i_dma_lli_add()
383 txd->v_lli = next; in sun6i_dma_lli_add()
410 struct sun6i_desc *txd = to_sun6i_desc(&vd->tx); in sun6i_dma_free_desc() local
415 if (unlikely(!txd)) in sun6i_dma_free_desc()
418 p_lli = txd->p_lli; in sun6i_dma_free_desc()
419 v_lli = txd->v_lli; in sun6i_dma_free_desc()
[all …]
Dplx_dma.c98 struct dma_async_tx_descriptor txd; member
124 static struct plx_dma_desc *to_plx_desc(struct dma_async_tx_descriptor *txd) in to_plx_desc() argument
126 return container_of(txd, struct plx_dma_desc, txd); in to_plx_desc()
159 dma_cookie_complete(&desc->txd); in plx_dma_process_desc()
160 dma_descriptor_unmap(&desc->txd); in plx_dma_process_desc()
161 dmaengine_desc_get_callback_invoke(&desc->txd, &res); in plx_dma_process_desc()
162 desc->txd.callback = NULL; in plx_dma_process_desc()
163 desc->txd.callback_result = NULL; in plx_dma_process_desc()
186 dma_cookie_complete(&desc->txd); in plx_dma_abort_desc()
187 dma_descriptor_unmap(&desc->txd); in plx_dma_abort_desc()
[all …]
Dep93xx_dma.c153 struct dma_async_tx_descriptor txd; member
318 d->txd.callback = desc->txd.callback; in ep93xx_dma_set_active()
319 d->txd.callback_param = desc->txd.callback_param; in ep93xx_dma_set_active()
362 return !desc->txd.cookie; in ep93xx_dma_advance_active()
489 desc->txd.cookie, desc->src_addr, desc->dst_addr, in m2p_hw_interrupt()
678 last_done = !desc || desc->txd.cookie; in m2m_hw_interrupt()
742 if (async_tx_test_ack(&desc->txd)) { in ep93xx_dma_desc_get()
750 desc->txd.cookie = 0; in ep93xx_dma_desc_get()
751 desc->txd.callback = NULL; in ep93xx_dma_desc_get()
752 desc->txd.callback_param = NULL; in ep93xx_dma_desc_get()
[all …]
Dpch_dma.c85 struct dma_async_tx_descriptor txd; member
138 struct pch_dma_desc *to_pd_desc(struct dma_async_tx_descriptor *txd) in to_pd_desc() argument
140 return container_of(txd, struct pch_dma_desc, txd); in to_pd_desc()
338 channel_writel(pd_chan, NEXT, desc->txd.phys); in pdc_dostart()
346 struct dma_async_tx_descriptor *txd = &desc->txd; in pdc_chain_complete() local
349 dmaengine_desc_get_callback(txd, &cb); in pdc_chain_complete()
387 bad_desc->txd.cookie); in pdc_handle_error()
403 static dma_cookie_t pd_tx_submit(struct dma_async_tx_descriptor *txd) in pd_tx_submit() argument
405 struct pch_dma_desc *desc = to_pd_desc(txd); in pd_tx_submit()
406 struct pch_dma_chan *pd_chan = to_pd_chan(txd->chan); in pd_tx_submit()
[all …]
/linux-6.12.1/drivers/net/ethernet/sfc/
Def100_tx.c26 return efx_nic_alloc_buffer(tx_queue->efx, &tx_queue->txd, in ef100_tx_probe()
104 if (likely(tx_queue->txd.addr)) in ef100_tx_desc()
105 return ((efx_oword_t *)tx_queue->txd.addr) + index; in ef100_tx_desc()
135 struct efx_tx_buffer *buffer, efx_oword_t *txd) in ef100_set_tx_csum_partial() argument
153 EFX_OR_OWORD(*txd, *txd, csum); in ef100_set_tx_csum_partial()
156 static void ef100_set_tx_hw_vlan(const struct sk_buff *skb, efx_oword_t *txd) in ef100_set_tx_hw_vlan() argument
164 EFX_OR_OWORD(*txd, *txd, vlan); in ef100_set_tx_hw_vlan()
169 struct efx_tx_buffer *buffer, efx_oword_t *txd, in ef100_make_send_desc() argument
173 EFX_POPULATE_OWORD_3(*txd, in ef100_make_send_desc()
179 ef100_set_tx_csum_partial(skb, buffer, txd); in ef100_make_send_desc()
[all …]
/linux-6.12.1/drivers/net/ethernet/netronome/nfp/nfd3/
Ddp.c75 struct nfp_nfd3_tx_desc *txd, struct sk_buff *skb, u32 md_bytes) in nfp_nfd3_tx_tso() argument
100 txd->l3_offset = l3_offset - md_bytes; in nfp_nfd3_tx_tso()
101 txd->l4_offset = l4_offset - md_bytes; in nfp_nfd3_tx_tso()
102 txd->lso_hdrlen = hdrlen - md_bytes; in nfp_nfd3_tx_tso()
103 txd->mss = cpu_to_le16(mss); in nfp_nfd3_tx_tso()
104 txd->flags |= NFD3_DESC_TX_LSO; in nfp_nfd3_tx_tso()
124 struct nfp_nfd3_tx_buf *txbuf, struct nfp_nfd3_tx_desc *txd, in nfp_nfd3_tx_csum() argument
137 txd->flags |= NFD3_DESC_TX_CSUM; in nfp_nfd3_tx_csum()
139 txd->flags |= NFD3_DESC_TX_ENCAP; in nfp_nfd3_tx_csum()
145 txd->flags |= NFD3_DESC_TX_IP4_CSUM; in nfp_nfd3_tx_csum()
[all …]
Dipsec.c10 void nfp_nfd3_ipsec_tx(struct nfp_nfd3_tx_desc *txd, struct sk_buff *skb) in nfp_nfd3_ipsec_tx() argument
18 txd->flags |= NFD3_DESC_TX_CSUM; in nfp_nfd3_ipsec_tx()
21 txd->flags |= NFD3_DESC_TX_IP4_CSUM; in nfp_nfd3_ipsec_tx()
32 txd->flags |= NFD3_DESC_TX_UDP_CSUM; in nfp_nfd3_ipsec_tx()
35 txd->flags |= NFD3_DESC_TX_TCP_CSUM; in nfp_nfd3_ipsec_tx()
Dxsk.c23 struct nfp_nfd3_tx_desc *txd; in nfp_nfd3_xsk_tx_xdp() local
40 txd = &tx_ring->txds[wr_idx]; in nfp_nfd3_xsk_tx_xdp()
41 txd->offset_eop = NFD3_DESC_TX_EOP; in nfp_nfd3_xsk_tx_xdp()
42 txd->dma_len = cpu_to_le16(pkt_len); in nfp_nfd3_xsk_tx_xdp()
43 nfp_desc_set_dma_addr_40b(txd, xrxbuf->dma_addr + pkt_off); in nfp_nfd3_xsk_tx_xdp()
44 txd->data_len = cpu_to_le16(pkt_len); in nfp_nfd3_xsk_tx_xdp()
46 txd->flags = 0; in nfp_nfd3_xsk_tx_xdp()
47 txd->mss = 0; in nfp_nfd3_xsk_tx_xdp()
48 txd->lso_hdrlen = 0; in nfp_nfd3_xsk_tx_xdp()
338 struct nfp_nfd3_tx_desc *txd; in nfp_nfd3_xsk_tx() local
[all …]
/linux-6.12.1/drivers/net/ethernet/netronome/nfp/nfdk/
Ddp.c44 struct nfp_nfdk_tx_desc txd; in nfp_nfdk_tx_tso() local
63 txd.l3_offset = l3_offset; in nfp_nfdk_tx_tso()
64 txd.l4_offset = l4_offset; in nfp_nfdk_tx_tso()
65 txd.lso_meta_res = 0; in nfp_nfdk_tx_tso()
66 txd.mss = cpu_to_le16(mss); in nfp_nfdk_tx_tso()
67 txd.lso_hdrlen = hdrlen; in nfp_nfdk_tx_tso()
68 txd.lso_totsegs = segs; in nfp_nfdk_tx_tso()
77 return txd.raw; in nfp_nfdk_tx_tso()
124 struct nfp_nfdk_tx_desc *txd; in nfp_nfdk_tx_maybe_close_block() local
166 txd = &tx_ring->ktxds[wr_idx]; in nfp_nfdk_tx_maybe_close_block()
[all …]
/linux-6.12.1/Documentation/devicetree/bindings/pinctrl/
Dmarvell,armada-39x-pinctrl.txt19 mpp1 1 gpio, ua0(txd)
22 mpp4 4 gpio, ua1(txd), ua0(rts), smi(mdc)
32 mpp14 14 gpio, dram(vttctrl), dev(we1), ua1(txd)
36 mpp18 18 gpio, ua1(txd), spi0(cs0), i2c2(sck)
38 mpp20 20 gpio, sata0(prsnt) [1], ua0(rts), ua1(txd), smi(mdc)
44 mpp25 25 gpio, spi0(cs0), ua0(rts), ua1(txd), sd0(d5), dev(cs0)
61 mpp42 42 gpio, ua1(txd), ua0(rts), dev(ad7)
66 mpp46 46 gpio, ref(clk), pcie0(rstout), ua1(txd), led(stb)
69 …, sata0(prsnt) [1], dram(vttctrl), tdm(pclk) [2], audio(mclk) [2], sd0(d4), pcie0(clkreq), ua1(txd)
73 mpp51 51 gpio, tdm(dtx) [2], audio(sdo) [2], dram(deccerr), ua2(txd)
[all …]
Dmarvell,kirkwood-pinctrl.txt29 mpp5 5 gpo, nand(io7), uart0(txd), ptp(trig)
36 mpp10 10 gpo, spi(sck), uart0(txd), ptp(trig)
40 mpp13 13 gpio, sdio(cmd), uart1(txd)
42 mpp15 15 gpio, sdio(d1), uart0(rts), uart1(txd)
67 mpp5 5 gpo, nand(io7), uart0(txd), ptp(trig), sata0(act)
74 mpp10 10 gpo, spi(sck), uart0(txd), ptp(trig)
78 mpp13 13 gpio, sdio(cmd), uart1(txd)
80 mpp15 15 gpio, sdio(d1), uart0(rts), uart1(txd), sata0(act)
111 mpp5 5 gpo, nand(io7), uart0(txd), ptp(trig), sata0(act)
118 mpp10 10 gpo, spi(sck), uart0(txd), ptp(trig), sata1(act)
[all …]
/linux-6.12.1/drivers/net/wireless/mediatek/mt76/mt7603/
Dmcu.c43 struct mt7603_mcu_txd *txd; in mt7603_mcu_skb_send_msg() local
52 txd = (struct mt7603_mcu_txd *)skb_push(skb, hdrlen); in mt7603_mcu_skb_send_msg()
54 txd->len = cpu_to_le16(skb->len); in mt7603_mcu_skb_send_msg()
56 txd->pq_id = cpu_to_le16(MCU_PORT_QUEUE_FW); in mt7603_mcu_skb_send_msg()
58 txd->pq_id = cpu_to_le16(MCU_PORT_QUEUE); in mt7603_mcu_skb_send_msg()
59 txd->pkt_type = MCU_PKT_ID; in mt7603_mcu_skb_send_msg()
60 txd->seq = seq; in mt7603_mcu_skb_send_msg()
63 txd->cid = -cmd; in mt7603_mcu_skb_send_msg()
64 txd->set_query = MCU_Q_NA; in mt7603_mcu_skb_send_msg()
66 txd->cid = MCU_CMD_EXT_CID; in mt7603_mcu_skb_send_msg()
[all …]
/linux-6.12.1/drivers/rapidio/devices/
Dtsi721_dma.c58 struct tsi721_tx_desc *to_tsi721_desc(struct dma_async_tx_descriptor *txd) in to_tsi721_desc() argument
60 return container_of(txd, struct tsi721_tx_desc, txd); in to_tsi721_desc()
377 struct dma_async_tx_descriptor *txd = &desc->txd; in tsi721_dma_tx_err() local
378 dma_async_tx_callback callback = txd->callback; in tsi721_dma_tx_err()
379 void *param = txd->callback_param; in tsi721_dma_tx_err()
413 struct dma_chan *dchan = desc->txd.chan; in tsi721_submit_sg()
643 dma_cookie_complete(&desc->txd); in tsi721_dma_tasklet()
669 dma_cookie_complete(&desc->txd); in tsi721_dma_tasklet()
670 if (desc->txd.flags & DMA_PREP_INTERRUPT) { in tsi721_dma_tasklet()
671 callback = desc->txd.callback; in tsi721_dma_tasklet()
[all …]
/linux-6.12.1/drivers/net/ethernet/intel/fm10k/
Dfm10k_debugfs.c60 struct fm10k_tx_desc *txd = FM10K_TX_DESC(ring, i); in fm10k_dbg_tx_desc_seq_show() local
63 i, txd->buffer_addr, txd->buflen, txd->vlan, in fm10k_dbg_tx_desc_seq_show()
64 txd->mss, txd->hdrlen, txd->flags); in fm10k_dbg_tx_desc_seq_show()
/linux-6.12.1/include/linux/
Ddmaengine.h666 static inline void txd_lock(struct dma_async_tx_descriptor *txd) in txd_lock() argument
669 static inline void txd_unlock(struct dma_async_tx_descriptor *txd) in txd_unlock() argument
672 static inline void txd_chain(struct dma_async_tx_descriptor *txd, struct dma_async_tx_descriptor *n… in txd_chain() argument
676 static inline void txd_clear_parent(struct dma_async_tx_descriptor *txd) in txd_clear_parent() argument
679 static inline void txd_clear_next(struct dma_async_tx_descriptor *txd) in txd_clear_next() argument
682 static inline struct dma_async_tx_descriptor *txd_next(struct dma_async_tx_descriptor *txd) in txd_next() argument
686 static inline struct dma_async_tx_descriptor *txd_parent(struct dma_async_tx_descriptor *txd) in txd_parent() argument
692 static inline void txd_lock(struct dma_async_tx_descriptor *txd) in txd_lock() argument
694 spin_lock_bh(&txd->lock); in txd_lock()
696 static inline void txd_unlock(struct dma_async_tx_descriptor *txd) in txd_unlock() argument
[all …]
/linux-6.12.1/drivers/media/pci/mantis/
Dmantis_i2c.c75 u32 txd = 0, stat, trials; in mantis_i2c_write() local
82 txd = (msg->addr << 25) | (msg->buf[i] << 8) in mantis_i2c_write()
88 txd &= ~MANTIS_I2C_STOP; in mantis_i2c_write()
91 mmwrite(txd, MANTIS_I2CDATA_CTL); in mantis_i2c_write()
119 u32 stat, data, txd; in mantis_i2c_xfer() local
142 txd = msgs[i].addr << 25 | (0x1 << 24) in mantis_i2c_xfer()
146 mmwrite(txd, MANTIS_I2CDATA_CTL); in mantis_i2c_xfer()
/linux-6.12.1/Documentation/devicetree/bindings/arm/marvell/
Dcp110-system-controller.txt92 mpp1 1 gpio, dev(ale0), au(i2sdo_spdifo), ge0(rxd2), tdm(drx), ptp(clk), mss_i2c(sck), uart0(txd), …
94 …ev(ad14), au(i2slrclk), ge0(rxd0), tdm(fsync), mss_uart(txd), pcie(rstoutn), i2c1(sda), uart1(txd)…
96 … dev(ad12), au(i2sdi), ge0(rxclk), tdm(intn), mss_uart(txd), uart1(rts), pcie1(clkreq), uart3(txd)…
98 mpp7 7 gpio, dev(ad10), ge0(txd2), spi0(csn1), spi1(csn1), sata0(present_act), led(data), uart0(txd
102 mpp11 11 gpio, dev(wen1), ge0(txclkout), spi0(clk), spi1(clk), uart0(rts), led(clk), uart2(txd), sa…
121 …), spi0(csn7), pcie0(clkreq), ptp(pclk_out), mss_i2c(sck), sata1(present_act), uart0(txd), led(clk)
129 mpp38 38 gpio, uart2(txd), i2c0(sda), ptp(pulse), tdm(rstn), mss_i2c(sda), sata0(present_act), ge(m…
131 …nce1(clk), mss_i2c(sda), au(i2sdo_spdifo), ptp(pclk_out), spi0(clk), uart1(txd), ge(mdio), sata0(p…
133 mpp42 42 gpio, sdio(v18_en), sdio(wr_protect), synce2(clk), au(i2smclk), mss_uart(txd), spi0(miso),…
136 mpp45 45 gpio, ge1(txd3), uart0(txd), pcie(rstoutn)
[all …]
/linux-6.12.1/drivers/net/ethernet/aquantia/atlantic/hw_atl/
Dhw_atl_a0.c442 struct hw_atl_txd_s *txd = NULL; in hw_atl_a0_hw_ring_tx_xmit() local
452 txd = (struct hw_atl_txd_s *)&ring->dx_ring[ring->sw_tail * in hw_atl_a0_hw_ring_tx_xmit()
454 txd->ctl = 0; in hw_atl_a0_hw_ring_tx_xmit()
455 txd->ctl2 = 0; in hw_atl_a0_hw_ring_tx_xmit()
456 txd->buf_addr = 0; in hw_atl_a0_hw_ring_tx_xmit()
461 txd->ctl |= (buff->len_l3 << 31) | in hw_atl_a0_hw_ring_tx_xmit()
465 txd->ctl2 |= (buff->mss << 16) | in hw_atl_a0_hw_ring_tx_xmit()
475 txd->ctl |= HW_ATL_A0_TXD_CTL_CMD_IPV6; in hw_atl_a0_hw_ring_tx_xmit()
479 txd->buf_addr = buff->pa; in hw_atl_a0_hw_ring_tx_xmit()
480 txd->ctl |= (HW_ATL_A0_TXD_CTL_BLEN & in hw_atl_a0_hw_ring_tx_xmit()
[all …]
/linux-6.12.1/drivers/dma/dw/
Dcore.c73 __func__, desc->txd.cookie); in dwc_tx_submit()
90 dma_async_tx_descriptor_init(&desc->txd, &dwc->chan); in dwc_desc_get()
91 desc->txd.tx_submit = dwc_tx_submit; in dwc_desc_get()
92 desc->txd.flags = DMA_CTRL_ACK; in dwc_desc_get()
93 desc->txd.phys = phys; in dwc_desc_get()
107 dma_pool_free(dw->desc_pool, child, child->txd.phys); in dwc_desc_put()
111 dma_pool_free(dw->desc_pool, desc, desc->txd.phys); in dwc_desc_put()
211 channel_writel(dwc, LLP, first->txd.phys | lms); in dwc_dostart()
226 dev_vdbg(chan2dev(&dwc->chan), "%s: started %u\n", __func__, desc->txd.cookie); in dwc_dostart_first_queued()
236 struct dma_async_tx_descriptor *txd = &desc->txd; in dwc_descriptor_complete() local
[all …]
/linux-6.12.1/drivers/dma/ioat/
Dprep.c139 desc->txd.flags = flags; in ioat_dma_prep_memcpy_lock()
147 return &desc->txd; in ioat_dma_prep_memcpy_lock()
223 desc->txd.flags = flags; in __ioat_prep_xor_lock()
231 compl_desc->txd.flags = flags & DMA_PREP_INTERRUPT; in __ioat_prep_xor_lock()
241 return &compl_desc->txd; in __ioat_prep_xor_lock()
289 desc_id(desc), (unsigned long long) desc->txd.phys, in dump_pq_desc_dbg()
291 desc->txd.flags, pq->size, pq->ctl, pq->ctl_f.op, in dump_pq_desc_dbg()
322 desc_id(desc), (unsigned long long) desc->txd.phys, in dump_pq16_desc_dbg()
324 desc->txd.flags, pq->size, pq->ctl, in dump_pq16_desc_dbg()
433 desc->txd.flags = flags; in __ioat_prep_pq_lock()
[all …]

123456