Home
last modified time | relevance | path

Searched refs:sg_len (Results 1 – 25 of 177) sorted by relevance

12345678

/linux-6.12.1/drivers/dma/sh/
Dusb-dmac.c55 unsigned int sg_len; member
263 static int usb_dmac_desc_alloc(struct usb_dmac_chan *chan, unsigned int sg_len, in usb_dmac_desc_alloc() argument
269 desc = kzalloc(struct_size(desc, sg, sg_len), gfp); in usb_dmac_desc_alloc()
273 desc->sg_allocated_len = sg_len; in usb_dmac_desc_alloc()
299 unsigned int sg_len, gfp_t gfp) in usb_dmac_desc_get() argument
307 if (sg_len <= desc->sg_allocated_len) { in usb_dmac_desc_get()
316 if (!usb_dmac_desc_alloc(chan, sg_len, gfp)) { in usb_dmac_desc_get()
417 unsigned int sg_len, enum dma_transfer_direction dir, in usb_dmac_prep_slave_sg() argument
425 if (!sg_len) { in usb_dmac_prep_slave_sg()
427 "%s: bad parameter: len=%d\n", __func__, sg_len); in usb_dmac_prep_slave_sg()
[all …]
Dshdma-base.c561 struct scatterlist *sgl, unsigned int sg_len, dma_addr_t *addr, in shdma_prep_sg() argument
571 for_each_sg(sgl, sg, sg_len, i) in shdma_prep_sg()
588 for_each_sg(sgl, sg, sg_len, i) { in shdma_prep_sg()
662 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, in shdma_prep_slave_sg() argument
677 if (slave_id < 0 || !sg_len) { in shdma_prep_slave_sg()
679 __func__, sg_len, slave_id); in shdma_prep_slave_sg()
685 return shdma_prep_sg(schan, sgl, sg_len, &slave_addr, in shdma_prep_slave_sg()
700 unsigned int sg_len = buf_len / period_len; in shdma_prep_dma_cyclic() local
711 if (sg_len > SHDMA_MAX_SG_LEN) { in shdma_prep_dma_cyclic()
713 sg_len, SHDMA_MAX_SG_LEN); in shdma_prep_dma_cyclic()
[all …]
/linux-6.12.1/drivers/mmc/core/
Dmmc_test.c82 unsigned int sg_len; member
162 unsigned int *sg_len; member
210 struct mmc_request *mrq, struct scatterlist *sg, unsigned sg_len, in mmc_test_prepare_mrq() argument
242 mrq->data->sg_len = sg_len; in mmc_test_prepare_mrq()
411 unsigned int *sg_len, int min_sg_len) in mmc_test_map_sg() argument
421 *sg_len = 0; in mmc_test_map_sg()
440 *sg_len += 1; in mmc_test_map_sg()
464 unsigned int *sg_len) in mmc_test_map_sg_max_scatter() argument
473 *sg_len = 0; in mmc_test_map_sg_max_scatter()
495 *sg_len += 1; in mmc_test_map_sg_max_scatter()
[all …]
Dsdio_ops.c158 data.sg_len = nents; in mmc_io_rw_extended()
160 for_each_sg(data.sg, sg_ptr, data.sg_len, i) { in mmc_io_rw_extended()
167 data.sg_len = 1; in mmc_io_rw_extended()
/linux-6.12.1/drivers/mmc/host/
Duniphier-sd.c109 dma_unmap_sg(mmc_dev(host->mmc), host->sg_ptr, host->sg_len, in uniphier_sd_external_dma_callback()
138 int sg_len; in uniphier_sd_external_dma_start() local
151 sg_len = dma_map_sg(mmc_dev(host->mmc), host->sg_ptr, host->sg_len, in uniphier_sd_external_dma_start()
153 if (sg_len == 0) in uniphier_sd_external_dma_start()
156 desc = dmaengine_prep_slave_sg(priv->chan, host->sg_ptr, sg_len, in uniphier_sd_external_dma_start()
173 dma_unmap_sg(mmc_dev(host->mmc), host->sg_ptr, host->sg_len, in uniphier_sd_external_dma_start()
260 int sg_len; in uniphier_sd_internal_dma_start() local
265 if (WARN_ON(host->sg_len != 1)) in uniphier_sd_internal_dma_start()
279 sg_len = dma_map_sg(mmc_dev(host->mmc), sg, 1, priv->dma_dir); in uniphier_sd_internal_dma_start()
280 if (sg_len == 0) in uniphier_sd_internal_dma_start()
Dau1xmmc.c337 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, host->dma.dir); in au1xmmc_data_complete()
380 int sg_len, max, count; in au1xmmc_send_pio() local
395 sg_len = data->sg[host->pio.index].length - host->pio.offset; in au1xmmc_send_pio()
398 max = (sg_len > host->pio.len) ? host->pio.len : sg_len; in au1xmmc_send_pio()
418 if (count == sg_len) { in au1xmmc_send_pio()
436 int max, count, sg_len = 0; in au1xmmc_receive_pio() local
453 sg_len = sg_dma_len(&data->sg[host->pio.index]) - host->pio.offset; in au1xmmc_receive_pio()
456 if (sg_len < max) in au1xmmc_receive_pio()
457 max = sg_len; in au1xmmc_receive_pio()
497 if (sg_len && count == sg_len) { in au1xmmc_receive_pio()
[all …]
Dmxs-mmc.c147 data->sg_len, ssp->dma_dir); in mxs_mmc_request_done()
220 unsigned int sg_len; in mxs_mmc_prep_dma() local
225 data->sg_len, ssp->dma_dir); in mxs_mmc_prep_dma()
227 sg_len = data->sg_len; in mxs_mmc_prep_dma()
231 sg_len = SSP_PIO_NUM; in mxs_mmc_prep_dma()
235 sgl, sg_len, ssp->slave_dirn, flags); in mxs_mmc_prep_dma()
242 data->sg_len, ssp->dma_dir); in mxs_mmc_prep_dma()
349 unsigned int sg_len = data->sg_len; in mxs_mmc_adtc() local
394 for_each_sg(sgl, sg, sg_len, i) in mxs_mmc_adtc()
Drenesas_sdhi_sys_dmac.c139 host->sg_ptr, host->sg_len, in renesas_sdhi_sys_dmac_dma_callback()
143 host->sg_ptr, host->sg_len, in renesas_sdhi_sys_dmac_dma_callback()
167 for_each_sg(sg, sg_tmp, host->sg_len, i) { in renesas_sdhi_sys_dmac_start_dma_rx()
176 if ((!aligned && (host->sg_len > 1 || sg->length > PAGE_SIZE || in renesas_sdhi_sys_dmac_start_dma_rx()
192 ret = dma_map_sg(chan->device->dev, sg, host->sg_len, DMA_FROM_DEVICE); in renesas_sdhi_sys_dmac_start_dma_rx()
239 for_each_sg(sg, sg_tmp, host->sg_len, i) { in renesas_sdhi_sys_dmac_start_dma_tx()
248 if ((!aligned && (host->sg_len > 1 || sg->length > PAGE_SIZE || in renesas_sdhi_sys_dmac_start_dma_tx()
268 ret = dma_map_sg(chan->device->dev, sg, host->sg_len, DMA_TO_DEVICE); in renesas_sdhi_sys_dmac_start_dma_tx()
Dmmci_stm32_sdmmc.c94 for_each_sg(data->sg, sg, data->sg_len - 1, i) { in sdmmc_idma_validate_data()
140 sg_copy_to_buffer(data->sg, data->sg_len, in _sdmmc_idma_prep_data()
149 data->sg_len, in _sdmmc_idma_prep_data()
179 sg_copy_from_buffer(data->sg, data->sg_len, in sdmmc_idma_unprep_data()
183 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in sdmmc_idma_unprep_data()
231 if (!host->variant->dma_lli || data->sg_len == 1 || in sdmmc_idma_start()
247 for_each_sg(data->sg, sg, data->sg_len, i) { in sdmmc_idma_start()
256 desc[data->sg_len - 1].idmalar &= ~MMCI_STM32_ULA; in sdmmc_idma_start()
282 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in sdmmc_idma_error()
Dtifm_sd.c105 int sg_len; member
174 if (host->sg_pos == host->sg_len) in tifm_sd_transfer_data()
181 if (host->sg_pos == host->sg_len) { in tifm_sd_transfer_data()
237 if (host->sg_pos == host->sg_len) in tifm_sd_bounce_block()
269 if (host->sg_pos == host->sg_len) in tifm_sd_set_dma_data()
275 if (host->sg_pos == host->sg_len) in tifm_sd_set_dma_data()
283 if (host->sg_pos == host->sg_len) in tifm_sd_set_dma_data()
658 host->sg_len = r_data->sg_len; in tifm_sd_request()
672 host->sg_len = tifm_map_sg(sock, r_data->sg, in tifm_sd_request()
673 r_data->sg_len, in tifm_sd_request()
[all …]
Domap.c153 unsigned int sg_len; member
436 dma_unmap_sg(dev, data->sg, host->sg_len, dma_data_dir); in mmc_omap_release_dma()
462 host->sg_len = 0; in mmc_omap_xfer_done()
519 host->sg_len = 0; in mmc_omap_abort_xfer()
955 unsigned sg_len; in mmc_omap_prepare_data() local
976 sg_len = (data->blocks == 1) ? 1 : data->sg_len; in mmc_omap_prepare_data()
979 for_each_sg(data->sg, sg, sg_len, i) { in mmc_omap_prepare_data()
1039 host->sg_len = dma_map_sg(c->device->dev, data->sg, sg_len, in mmc_omap_prepare_data()
1041 if (host->sg_len == 0) in mmc_omap_prepare_data()
1044 tx = dmaengine_prep_slave_sg(c, data->sg, host->sg_len, in mmc_omap_prepare_data()
[all …]
Dcavium.c377 dma_unmap_sg(host->dev, data->sg, data->sg_len, get_dma_dir(data)); in finish_dma_single()
397 dma_unmap_sg(host->dev, data->sg, data->sg_len, get_dma_dir(data)); in finish_dma_sg()
403 if (host->use_sg && data->sg_len > 1) in finish_dma()
521 count = dma_map_sg(host->dev, data->sg, data->sg_len, in prepare_dma_single()
558 count = dma_map_sg(host->dev, data->sg, data->sg_len, in prepare_dma_sg()
611 dma_unmap_sg(host->dev, data->sg, data->sg_len, get_dma_dir(data)); in prepare_dma_sg()
619 if (host->use_sg && data->sg_len > 1) in prepare_dma()
657 if (!mrq->data || !mrq->data->sg || !mrq->data->sg_len || in cvm_mmc_dma_request()
709 sg_miter_start(&host->smi, mrq->data->sg, mrq->data->sg_len, in do_read_request()
722 sg_miter_start(smi, mrq->data->sg, mrq->data->sg_len, SG_MITER_FROM_SG); in do_write_request()
Ddavinci_mmc.c194 unsigned int sg_len; member
410 host->sg_len, in mmc_davinci_send_dma_request()
432 host->sg_len, in mmc_davinci_send_dma_request()
457 host->sg_len = dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in mmc_davinci_start_dma_transfer()
461 for (i = 0; i < host->sg_len; i++) { in mmc_davinci_start_dma_transfer()
464 data->sg, data->sg_len, in mmc_davinci_start_dma_transfer()
571 host->sg_len = data->sg_len; in mmc_davinci_prepare_data()
572 sg_miter_start(&host->sg_miter, data->sg, data->sg_len, flags); in mmc_davinci_prepare_data()
777 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in mmc_davinci_xfer_done()
/linux-6.12.1/drivers/dma/
Dloongson1-apb-dma.c217 struct scatterlist *sgl, unsigned int sg_len, in ls1x_dma_prep_lli() argument
244 for_each_sg(sgl, sg, sg_len, i) { in ls1x_dma_prep_lli()
297 unsigned int sg_len, enum dma_transfer_direction dir, in ls1x_dma_prep_slave_sg() argument
303 sg_len, flags, dmaengine_get_direction_text(dir)); in ls1x_dma_prep_slave_sg()
309 if (ls1x_dma_prep_lli(dchan, desc, sgl, sg_len, dir, false)) { in ls1x_dma_prep_slave_sg()
324 unsigned int sg_len; in ls1x_dma_prep_dma_cyclic() local
337 sg_len = buf_len / period_len; in ls1x_dma_prep_dma_cyclic()
338 sgl = kmalloc_array(sg_len, sizeof(*sgl), GFP_NOWAIT); in ls1x_dma_prep_dma_cyclic()
342 sg_init_table(sgl, sg_len); in ls1x_dma_prep_dma_cyclic()
343 for (i = 0; i < sg_len; ++i) { in ls1x_dma_prep_dma_cyclic()
[all …]
Dst_fdma.c239 int sg_len) in st_fdma_alloc_desc() argument
244 fdesc = kzalloc(struct_size(fdesc, node, sg_len), GFP_NOWAIT); in st_fdma_alloc_desc()
249 fdesc->n_nodes = sg_len; in st_fdma_alloc_desc()
250 for (i = 0; i < sg_len; i++) { in st_fdma_alloc_desc()
455 int sg_len, i; in st_fdma_prep_dma_cyclic() local
475 sg_len = len / period_len; in st_fdma_prep_dma_cyclic()
476 fdesc = st_fdma_alloc_desc(fchan, sg_len); in st_fdma_prep_dma_cyclic()
484 for (i = 0; i < sg_len; i++) { in st_fdma_prep_dma_cyclic()
487 hw_node->next = fdesc->node[(i + 1) % sg_len].pdesc; in st_fdma_prep_dma_cyclic()
509 unsigned int sg_len, enum dma_transfer_direction direction, in st_fdma_prep_slave_sg() argument
[all …]
Dmilbeaut-hdmac.c62 unsigned int sg_len; member
193 if (md->sg_cur >= md->sg_len) { in milbeaut_hdmac_interrupt()
257 unsigned int sg_len, in milbeaut_hdmac_prep_slave_sg() argument
272 md->sgl = kcalloc(sg_len, sizeof(*sgl), GFP_NOWAIT); in milbeaut_hdmac_prep_slave_sg()
278 for (i = 0; i < sg_len; i++) in milbeaut_hdmac_prep_slave_sg()
281 md->sg_len = sg_len; in milbeaut_hdmac_prep_slave_sg()
371 for (i = md->sg_cur; i < md->sg_len; i++) in milbeaut_hdmac_tx_status()
Dfsl-edma-common.c545 int sg_len) in fsl_edma_alloc_desc() argument
550 fsl_desc = kzalloc(struct_size(fsl_desc, tcd, sg_len), GFP_NOWAIT); in fsl_edma_alloc_desc()
555 fsl_desc->n_tcds = sg_len; in fsl_edma_alloc_desc()
556 for (i = 0; i < sg_len; i++) { in fsl_edma_alloc_desc()
581 int sg_len, i; in fsl_edma_prep_dma_cyclic() local
592 sg_len = buf_len / period_len; in fsl_edma_prep_dma_cyclic()
593 fsl_desc = fsl_edma_alloc_desc(fsl_chan, sg_len); in fsl_edma_prep_dma_cyclic()
614 for (i = 0; i < sg_len; i++) { in fsl_edma_prep_dma_cyclic()
619 last_sg = fsl_desc->tcd[(i + 1) % sg_len].ptcd; in fsl_edma_prep_dma_cyclic()
650 unsigned int sg_len, enum dma_transfer_direction direction, in fsl_edma_prep_slave_sg() argument
[all …]
Duniphier-mdmac.c54 unsigned int sg_len; member
209 if (md->sg_cur >= md->sg_len) { in uniphier_mdmac_interrupt()
231 unsigned int sg_len, in uniphier_mdmac_prep_slave_sg() argument
246 md->sg_len = sg_len; in uniphier_mdmac_prep_slave_sg()
319 for (i = md->sg_cur; i < md->sg_len; i++) in uniphier_mdmac_tx_status()
Dste_dma40_ll.c267 int sg_len, in d40_phy_sg_to_lli() argument
285 for_each_sg(sg, current_sg, sg_len, i) { in d40_phy_sg_to_lli()
292 if (i == sg_len - 1) in d40_phy_sg_to_lli()
419 int sg_len, in d40_log_sg_to_lli() argument
434 for_each_sg(sg, current_sg, sg_len, i) { in d40_log_sg_to_lli()
Dat_hdmac.c979 size_t sg_len; in atc_prep_dma_memcpy() local
994 sg_len = DIV_ROUND_UP(len, ATC_BTSIZE_MAX); in atc_prep_dma_memcpy()
995 desc = kzalloc(struct_size(desc, sg, sg_len), GFP_ATOMIC); in atc_prep_dma_memcpy()
998 desc->sglen = sg_len; in atc_prep_dma_memcpy()
1164 unsigned int sg_len, int value, in atc_prep_dma_memset_sg() argument
1178 value, sg_len, flags); in atc_prep_dma_memset_sg()
1180 if (unlikely(!sgl || !sg_len)) { in atc_prep_dma_memset_sg()
1194 desc = kzalloc(struct_size(desc, sg, sg_len), GFP_ATOMIC); in atc_prep_dma_memset_sg()
1197 desc->sglen = sg_len; in atc_prep_dma_memset_sg()
1199 for_each_sg(sgl, sg, sg_len, i) { in atc_prep_dma_memset_sg()
[all …]
Dls2x-apb-dma.c322 u32 sg_len, enum dma_transfer_direction direction, in ls2x_dma_prep_slave_sg() argument
331 if (unlikely(!sg_len || !is_slave_direction(direction))) in ls2x_dma_prep_slave_sg()
338 desc = kzalloc(struct_size(desc, sg, sg_len), GFP_NOWAIT); in ls2x_dma_prep_slave_sg()
342 desc->desc_num = sg_len; in ls2x_dma_prep_slave_sg()
346 for_each_sg(sgl, sg, sg_len, i) { in ls2x_dma_prep_slave_sg()
364 desc->sg[sg_len - 1].hw->ndesc_addr &= ~LDMA_DESC_EN; in ls2x_dma_prep_slave_sg()
/linux-6.12.1/net/rds/
Dib_frmr.c114 atomic_add(ibmr->sg_len, &pool->free_pinned); in rds_ib_free_frmr()
190 struct scatterlist *sg, unsigned int sg_len) in rds_ib_map_frmr() argument
204 ibmr->sg_len = sg_len; in rds_ib_map_frmr()
208 ibmr->sg_dma_len = ib_dma_map_sg(dev, ibmr->sg, ibmr->sg_len, in rds_ib_map_frmr()
260 ib_dma_unmap_sg(rds_ibdev->dev, ibmr->sg, ibmr->sg_len, in rds_ib_map_frmr()
379 *unpinned += ibmr->sg_len; in rds_ib_unreg_frmr()
Dib_rdma.c244 ibmr->sg, ibmr->sg_len, in __rds_ib_teardown_mr()
250 if (ibmr->sg_len) { in __rds_ib_teardown_mr()
253 for (i = 0; i < ibmr->sg_len; ++i) { in __rds_ib_teardown_mr()
265 ibmr->sg_len = 0; in __rds_ib_teardown_mr()
271 unsigned int pinned = ibmr->sg_len; in rds_ib_teardown_mr()
490 rdsdebug("RDS/IB: free_mr nents %u\n", ibmr->sg_len); in rds_ib_free_mr()
505 atomic_add(ibmr->sg_len, &pool->free_pinned); in rds_ib_free_mr()
/linux-6.12.1/drivers/crypto/nx/
Dnx.c83 unsigned int sg_len = 0; in nx_build_sg_list() local
108 for (sg = sg_head; sg_len < *len; sg++) { in nx_build_sg_list()
117 sg_len += sg->len; in nx_build_sg_list()
120 is_vmalloc_addr(start_addr + sg_len)) { in nx_build_sg_list()
122 start_addr + sg_len)); in nx_build_sg_list()
123 end_addr = sg_addr + *len - sg_len; in nx_build_sg_list()
133 *len = sg_len; in nx_build_sg_list()
/linux-6.12.1/drivers/crypto/chelsio/
Dchcr_algo.c1582 param->sg_len) <= SGE_MAX_WR_LEN; in create_hash_wr()
1583 nents = sg_nents_xlen(req_ctx->hctx_wr.srcsg, param->sg_len, in create_hash_wr()
1587 param->sg_len, 16) : (sgl_len(nents) * 8); in create_hash_wr()
1598 chcr_req->sec_cpl.pldlen = htonl(param->bfr_len + param->sg_len); in create_hash_wr()
1644 (param->sg_len + param->bfr_len) : 0); in create_hash_wr()
1707 params.sg_len = chcr_hash_ent_in_wr(req->src, !!req_ctx->reqlen, in chcr_ahash_update()
1709 if (params.sg_len > req->nbytes) in chcr_ahash_update()
1710 params.sg_len = req->nbytes; in chcr_ahash_update()
1711 params.sg_len = rounddown(params.sg_len + req_ctx->reqlen, bs) - in chcr_ahash_update()
1721 req_ctx->data_len += params.sg_len + params.bfr_len; in chcr_ahash_update()
[all …]

12345678