/linux-6.12.1/include/linux/ |
D | dmaengine.h | 575 struct dma_async_tx_descriptor; 578 int (*attach)(struct dma_async_tx_descriptor *desc, void *data, 581 void *(*get_ptr)(struct dma_async_tx_descriptor *desc, 583 int (*set_len)(struct dma_async_tx_descriptor *desc, 610 struct dma_async_tx_descriptor { struct 615 dma_cookie_t (*tx_submit)(struct dma_async_tx_descriptor *tx); argument 616 int (*desc_free)(struct dma_async_tx_descriptor *tx); argument 624 struct dma_async_tx_descriptor *next; argument 625 struct dma_async_tx_descriptor *parent; argument 631 static inline void dma_set_unmap(struct dma_async_tx_descriptor *tx, in dma_set_unmap() argument [all …]
|
D | async_tx.h | 71 struct dma_async_tx_descriptor *depend_tx; 88 static inline void async_tx_issue_pending(struct dma_async_tx_descriptor *tx) in async_tx_issue_pending() 112 static inline void async_tx_issue_pending(struct dma_async_tx_descriptor *tx) in async_tx_issue_pending() 147 struct dma_async_tx_descriptor *tx, in init_async_submit() 158 void async_tx_submit(struct dma_chan *chan, struct dma_async_tx_descriptor *tx, 161 struct dma_async_tx_descriptor * 165 struct dma_async_tx_descriptor * 170 struct dma_async_tx_descriptor * 175 struct dma_async_tx_descriptor * 181 struct dma_async_tx_descriptor * [all …]
|
/linux-6.12.1/crypto/async_tx/ |
D | async_tx.c | 46 struct dma_async_tx_descriptor *depend_tx = submit->depend_tx; in __async_tx_find_channel() 65 async_tx_channel_switch(struct dma_async_tx_descriptor *depend_tx, in async_tx_channel_switch() 66 struct dma_async_tx_descriptor *tx) in async_tx_channel_switch() 70 struct dma_async_tx_descriptor *intr_tx = (void *) ~0; in async_tx_channel_switch() 143 async_tx_submit(struct dma_chan *chan, struct dma_async_tx_descriptor *tx, in async_tx_submit() 146 struct dma_async_tx_descriptor *depend_tx = submit->depend_tx; in async_tx_submit() 220 struct dma_async_tx_descriptor * 225 struct dma_async_tx_descriptor *tx; in async_trigger_callback() 226 struct dma_async_tx_descriptor *depend_tx = submit->depend_tx; in async_trigger_callback() 263 void async_tx_quiesce(struct dma_async_tx_descriptor **tx) in async_tx_quiesce()
|
D | async_xor.c | 22 static __async_inline struct dma_async_tx_descriptor * 27 struct dma_async_tx_descriptor *tx = NULL; in do_async_xor() 181 struct dma_async_tx_descriptor * 199 struct dma_async_tx_descriptor *tx; in async_xor_offs() 272 struct dma_async_tx_descriptor * 314 struct dma_async_tx_descriptor * 322 struct dma_async_tx_descriptor *tx = NULL; in async_xor_val_offs() 408 struct dma_async_tx_descriptor *
|
D | async_raid6_recov.c | 17 static struct dma_async_tx_descriptor * 36 struct dma_async_tx_descriptor *tx; in async_sum_product() 86 static struct dma_async_tx_descriptor * 104 struct dma_async_tx_descriptor *tx; in async_mult() 152 static struct dma_async_tx_descriptor * 157 struct dma_async_tx_descriptor *tx = NULL; in __2data_recov_4() 202 static struct dma_async_tx_descriptor * 207 struct dma_async_tx_descriptor *tx = NULL; in __2data_recov_5() 293 static struct dma_async_tx_descriptor * 298 struct dma_async_tx_descriptor *tx = NULL; in __2data_recov_n() [all …]
|
D | async_pq.c | 34 static __async_inline struct dma_async_tx_descriptor * 41 struct dma_async_tx_descriptor *tx = NULL; in do_async_gen_syndrome() 176 struct dma_async_tx_descriptor * 197 struct dma_async_tx_descriptor *tx; in async_gen_syndrome() 297 struct dma_async_tx_descriptor * 304 struct dma_async_tx_descriptor *tx; in async_syndrome_val()
|
D | async_memcpy.c | 31 struct dma_async_tx_descriptor * 39 struct dma_async_tx_descriptor *tx = NULL; in async_memcpy()
|
/linux-6.12.1/drivers/dma/ioat/ |
D | dma.h | 188 struct dma_async_tx_descriptor txd; 220 struct dma_async_tx_descriptor *tx, int id) in __dump_desc_dbg() 356 struct dma_async_tx_descriptor * 359 struct dma_async_tx_descriptor * 361 struct dma_async_tx_descriptor * 364 struct dma_async_tx_descriptor * 368 struct dma_async_tx_descriptor * 372 struct dma_async_tx_descriptor * 376 struct dma_async_tx_descriptor * 379 struct dma_async_tx_descriptor *
|
D | prep.c | 100 struct dma_async_tx_descriptor * 151 static struct dma_async_tx_descriptor * 244 struct dma_async_tx_descriptor * 256 struct dma_async_tx_descriptor * 338 static struct dma_async_tx_descriptor * 462 static struct dma_async_tx_descriptor * 573 struct dma_async_tx_descriptor * 618 struct dma_async_tx_descriptor * 646 struct dma_async_tx_descriptor * 672 struct dma_async_tx_descriptor * [all …]
|
/linux-6.12.1/drivers/dma/ |
D | dmaengine.h | 29 static inline dma_cookie_t dma_cookie_assign(struct dma_async_tx_descriptor *tx) in dma_cookie_assign() 52 static inline void dma_cookie_complete(struct dma_async_tx_descriptor *tx) in dma_cookie_complete() 114 dmaengine_desc_get_callback(struct dma_async_tx_descriptor *tx, in dmaengine_desc_get_callback() 160 dmaengine_desc_get_callback_invoke(struct dma_async_tx_descriptor *tx, in dmaengine_desc_get_callback_invoke()
|
D | virt-dma.h | 16 struct dma_async_tx_descriptor tx; 47 extern dma_cookie_t vchan_tx_submit(struct dma_async_tx_descriptor *); 48 extern int vchan_tx_desc_free(struct dma_async_tx_descriptor *); 56 static inline struct dma_async_tx_descriptor *vchan_tx_prep(struct virt_dma_chan *vc, in vchan_tx_prep()
|
D | virt-dma.c | 14 static struct virt_dma_desc *to_virt_desc(struct dma_async_tx_descriptor *tx) in to_virt_desc() 19 dma_cookie_t vchan_tx_submit(struct dma_async_tx_descriptor *tx) in vchan_tx_submit() 49 int vchan_tx_desc_free(struct dma_async_tx_descriptor *tx) in vchan_tx_desc_free()
|
D | dmaengine.c | 1443 void dma_async_tx_descriptor_init(struct dma_async_tx_descriptor *tx, in dma_async_tx_descriptor_init() 1454 struct dma_async_tx_descriptor *desc, enum dma_desc_metadata_mode mode) in desc_check_and_set_metadata_mode() 1469 int dmaengine_desc_attach_metadata(struct dma_async_tx_descriptor *desc, in dmaengine_desc_attach_metadata() 1488 void *dmaengine_desc_get_metadata_ptr(struct dma_async_tx_descriptor *desc, in dmaengine_desc_get_metadata_ptr() 1507 int dmaengine_desc_set_metadata_len(struct dma_async_tx_descriptor *desc, in dmaengine_desc_set_metadata_len() 1531 dma_wait_for_async_tx(struct dma_async_tx_descriptor *tx) in dma_wait_for_async_tx() 1558 void dma_run_dependencies(struct dma_async_tx_descriptor *tx) in dma_run_dependencies() 1560 struct dma_async_tx_descriptor *dep = txd_next(tx); in dma_run_dependencies() 1561 struct dma_async_tx_descriptor *dep_next; in dma_run_dependencies()
|
D | mv_xor_v2.c | 183 struct dma_async_tx_descriptor async_tx; 303 mv_xor_v2_tx_submit(struct dma_async_tx_descriptor *tx) in mv_xor_v2_tx_submit() 377 static struct dma_async_tx_descriptor * 430 static struct dma_async_tx_descriptor * 489 static struct dma_async_tx_descriptor *
|
D | fsl_raid.c | 87 static dma_cookie_t fsl_re_tx_submit(struct dma_async_tx_descriptor *tx) in fsl_re_tx_submit() 315 static struct dma_async_tx_descriptor *fsl_re_prep_dma_genq( in fsl_re_prep_dma_genq() 390 static struct dma_async_tx_descriptor *fsl_re_prep_dma_xor( in fsl_re_prep_dma_xor() 402 static struct dma_async_tx_descriptor *fsl_re_prep_dma_pq( in fsl_re_prep_dma_pq() 429 struct dma_async_tx_descriptor *tx; in fsl_re_prep_dma_pq() 525 static struct dma_async_tx_descriptor *fsl_re_prep_dma_memcpy( in fsl_re_prep_dma_memcpy()
|
D | mmp_pdma.c | 83 struct dma_async_tx_descriptor async_tx; 91 struct dma_async_tx_descriptor desc; 339 static dma_cookie_t mmp_pdma_tx_submit(struct dma_async_tx_descriptor *tx) in mmp_pdma_tx_submit() 442 static struct dma_async_tx_descriptor * 523 static struct dma_async_tx_descriptor * 603 static struct dma_async_tx_descriptor * 922 struct dma_async_tx_descriptor *txd = &desc->async_tx; in dma_do_tasklet()
|
D | plx_dma.c | 98 struct dma_async_tx_descriptor txd; 124 static struct plx_dma_desc *to_plx_desc(struct dma_async_tx_descriptor *txd) in to_plx_desc() 251 static struct dma_async_tx_descriptor *plx_dma_prep_memcpy(struct dma_chan *c, in plx_dma_prep_memcpy() 300 static dma_cookie_t plx_dma_tx_submit(struct dma_async_tx_descriptor *desc) in plx_dma_tx_submit()
|
/linux-6.12.1/drivers/dma/idxd/ |
D | dma.c | 28 struct dma_async_tx_descriptor *tx; in idxd_dma_complete_txd() 85 static struct dma_async_tx_descriptor * 106 static struct dma_async_tx_descriptor * 171 static dma_cookie_t idxd_dma_tx_submit(struct dma_async_tx_descriptor *tx) in idxd_dma_tx_submit()
|
/linux-6.12.1/Documentation/driver-api/dmaengine/ |
D | client.rst | 104 struct dma_async_tx_descriptor *dmaengine_prep_slave_sg( 109 struct dma_async_tx_descriptor *dmaengine_prep_peripheral_dma_vec( 114 struct dma_async_tx_descriptor *dmaengine_prep_dma_cyclic( 118 struct dma_async_tx_descriptor *dmaengine_prep_interleaved_dma( 178 int dmaengine_desc_attach_metadata(struct dma_async_tx_descriptor *desc, 197 void *dmaengine_desc_get_metadata_ptr(struct dma_async_tx_descriptor *desc, 200 int dmaengine_desc_set_metadata_len(struct dma_async_tx_descriptor *desc, 269 dma_cookie_t dmaengine_submit(struct dma_async_tx_descriptor *desc) 280 (``struct dma_async_tx_descriptor``) belongs to the DMA engine.
|
/linux-6.12.1/drivers/dma/sh/ |
D | shdma-base.c | 70 static dma_cookie_t shdma_tx_submit(struct dma_async_tx_descriptor *tx) in shdma_tx_submit() 337 struct dma_async_tx_descriptor *tx = &desc->async_tx; in __ld_cleanup() 560 static struct dma_async_tx_descriptor *shdma_prep_sg(struct shdma_chan *schan, in shdma_prep_sg() 639 static struct dma_async_tx_descriptor *shdma_prep_memcpy( in shdma_prep_memcpy() 661 static struct dma_async_tx_descriptor *shdma_prep_slave_sg( in shdma_prep_slave_sg() 691 static struct dma_async_tx_descriptor *shdma_prep_dma_cyclic( in shdma_prep_dma_cyclic() 698 struct dma_async_tx_descriptor *desc; in shdma_prep_dma_cyclic() 876 struct dma_async_tx_descriptor *tx = &sdesc->async_tx; in shdma_reset()
|
/linux-6.12.1/include/linux/dma/ |
D | mxs-dma.h | 16 static inline struct dma_async_tx_descriptor *mxs_dmaengine_prep_pio( in mxs_dmaengine_prep_pio()
|
/linux-6.12.1/drivers/md/ |
D | raid5-log.h | 34 struct dma_async_tx_descriptor * 36 struct dma_async_tx_descriptor *tx);
|
/linux-6.12.1/drivers/spi/ |
D | spi-pxa2xx-dma.c | 66 static struct dma_async_tx_descriptor * 139 struct dma_async_tx_descriptor *tx_desc, *rx_desc; in pxa2xx_spi_dma_prepare()
|
/linux-6.12.1/drivers/dma/ptdma/ |
D | ptdma-dmaengine.c | 81 struct dma_async_tx_descriptor *tx_desc; in pt_handle_active_desc() 211 static struct dma_async_tx_descriptor * 224 static struct dma_async_tx_descriptor *
|
/linux-6.12.1/drivers/mmc/host/ |
D | mxs-mmc.c | 213 static struct dma_async_tx_descriptor *mxs_mmc_prep_dma( in mxs_mmc_prep_dma() 217 struct dma_async_tx_descriptor *desc; in mxs_mmc_prep_dma() 252 struct dma_async_tx_descriptor *desc; in mxs_mmc_bc() 286 struct dma_async_tx_descriptor *desc; in mxs_mmc_ac() 347 struct dma_async_tx_descriptor *desc; in mxs_mmc_adtc()
|