Home
last modified time | relevance | path

Searched refs:in_sg (Results 1 – 25 of 27) sorted by relevance

12

/linux-6.12.1/drivers/crypto/nx/
Dnx-aes-xcbc.c63 struct nx_sg *in_sg, *out_sg; in nx_xcbc_empty() local
81 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys, &len, in nx_xcbc_empty()
93 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
108 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys[1], &len, in nx_xcbc_empty()
121 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
172 struct nx_sg *in_sg; in nx_xcbc_update() local
195 in_sg = nx_ctx->in_sg; in nx_xcbc_update()
230 in_sg = nx_build_sg_list(nx_ctx->in_sg, in nx_xcbc_update()
241 in_sg = nx_build_sg_list(in_sg, in nx_xcbc_update()
251 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * in nx_xcbc_update()
[all …]
Dnx-sha256.c109 struct nx_sg *in_sg = nx_ctx->in_sg; in nx_sha256_update() local
113 in_sg = nx_build_sg_list(in_sg, in nx_sha256_update()
122 used_sgs = in_sg - nx_ctx->in_sg; in nx_sha256_update()
137 in_sg = nx_build_sg_list(in_sg, (u8 *) data, in nx_sha256_update()
140 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha256_update()
186 struct nx_sg *in_sg, *out_sg; in nx_sha256_final() local
215 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) sctx->buf, in nx_sha256_final()
231 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha256_final()
Dnx-sha512.c109 struct nx_sg *in_sg = nx_ctx->in_sg; in nx_sha512_update() local
113 in_sg = nx_build_sg_list(in_sg, in nx_sha512_update()
121 used_sgs = in_sg - nx_ctx->in_sg; in nx_sha512_update()
136 in_sg = nx_build_sg_list(in_sg, (u8 *) data, in nx_sha512_update()
139 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha512_update()
189 struct nx_sg *in_sg, *out_sg; in nx_sha512_final() local
224 in_sg = nx_build_sg_list(nx_ctx->in_sg, sctx->buf, &len, in nx_sha512_final()
236 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha512_final()
Dnx-aes-gcm.c107 struct nx_sg *nx_sg = nx_ctx->in_sg; in nx_gca()
137 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len, in nx_gca()
145 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_sg) in nx_gca()
204 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len, in gmac()
212 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_sg) in gmac()
249 struct nx_sg *in_sg, *out_sg; in gcm_empty() local
269 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) iv, in gcm_empty()
282 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in gcm_empty()
Dnx.c269 struct nx_sg *nx_insg = nx_ctx->in_sg; in nx_build_sg_lists()
294 nx_ctx->op.inlen = trim_sg_list(nx_ctx->in_sg, nx_insg, delta, nbytes); in nx_build_sg_lists()
314 nx_ctx->op.in = __pa(nx_ctx->in_sg); in nx_ctx_init()
322 nx_ctx->op_aead.in = __pa(nx_ctx->in_sg); in nx_ctx_init()
677 nx_ctx->in_sg = (struct nx_sg *)((u8 *)nx_ctx->csbcpb + NX_PAGE_SIZE); in nx_crypto_ctx_init()
678 nx_ctx->out_sg = (struct nx_sg *)((u8 *)nx_ctx->in_sg + NX_PAGE_SIZE); in nx_crypto_ctx_init()
753 nx_ctx->in_sg = NULL; in nx_crypto_ctx_exit()
Dnx-aes-ccm.c161 struct nx_sg *nx_insg = nx_ctx->in_sg; in generate_pat()
249 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_insg) * in generate_pat()
282 nx_insg = nx_walk_and_build(nx_ctx->in_sg, in generate_pat()
296 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_insg) * in generate_pat()
Dnx.h126 struct nx_sg *in_sg; /* aligned pointer into kmem to an sg list */ member
/linux-6.12.1/lib/
Dsg_split.c80 struct scatterlist *in_sg, *out_sg; in sg_split_phys() local
84 in_sg = split->in_sg0; in sg_split_phys()
87 *out_sg = *in_sg; in sg_split_phys()
96 in_sg = sg_next(in_sg); in sg_split_phys()
106 struct scatterlist *in_sg, *out_sg; in sg_split_mapped() local
110 in_sg = split->in_sg0; in sg_split_mapped()
113 sg_dma_address(out_sg) = sg_dma_address(in_sg); in sg_split_mapped()
114 sg_dma_len(out_sg) = sg_dma_len(in_sg); in sg_split_mapped()
119 in_sg = sg_next(in_sg); in sg_split_mapped()
/linux-6.12.1/drivers/crypto/
Domap-des.c147 struct scatterlist *in_sg; member
372 struct scatterlist *in_sg, struct scatterlist *out_sg, in omap_des_crypt_dma() argument
382 scatterwalk_start(&dd->in_walk, dd->in_sg); in omap_des_crypt_dma()
391 dma_sync_sg_for_device(dd->dev, dd->in_sg, in_sg_len, DMA_TO_DEVICE); in omap_des_crypt_dma()
410 tx_in = dmaengine_prep_slave_sg(dd->dma_lch_in, in_sg, in_sg_len, in omap_des_crypt_dma()
461 err = dma_map_sg(dd->dev, dd->in_sg, dd->in_sg_len, in omap_des_crypt_dma_start()
476 err = omap_des_crypt_dma(tfm, dd->in_sg, dd->out_sg, dd->in_sg_len, in omap_des_crypt_dma_start()
479 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); in omap_des_crypt_dma_start()
533 dd->in_sg = req->src; in omap_des_prepare_req()
541 ret = omap_crypto_align_sg(&dd->in_sg, dd->total, DES_BLOCK_SIZE, in omap_des_prepare_req()
[all …]
Domap-aes.c266 struct scatterlist *in_sg, in omap_aes_crypt_dma() argument
275 scatterwalk_start(&dd->in_walk, dd->in_sg); in omap_aes_crypt_dma()
285 dma_sync_sg_for_device(dd->dev, dd->in_sg, in_sg_len, DMA_TO_DEVICE); in omap_aes_crypt_dma()
304 tx_in = dmaengine_prep_slave_sg(dd->dma_lch_in, in_sg, in_sg_len, in omap_aes_crypt_dma()
367 err = dma_map_sg(dd->dev, dd->in_sg, dd->in_sg_len, in omap_aes_crypt_dma_start()
384 err = omap_aes_crypt_dma(dd, dd->in_sg, dd->out_sg, dd->in_sg_len, in omap_aes_crypt_dma_start()
387 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); in omap_aes_crypt_dma_start()
440 dd->in_sg = req->src; in omap_aes_prepare_req()
448 ret = omap_crypto_align_sg(&dd->in_sg, dd->total, AES_BLOCK_SIZE, in omap_aes_prepare_req()
460 dd->in_sg_len = sg_nents_for_len(dd->in_sg, dd->total); in omap_aes_prepare_req()
[all …]
Datmel-tdes.c105 struct scatterlist *in_sg; member
295 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_pdc_stop()
460 in = IS_ALIGNED((u32)dd->in_sg->offset, sizeof(u32)) && in atmel_tdes_crypt_start()
461 IS_ALIGNED(dd->in_sg->length, dd->ctx->block_size); in atmel_tdes_crypt_start()
466 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) in atmel_tdes_crypt_start()
472 count = min_t(size_t, dd->total, sg_dma_len(dd->in_sg)); in atmel_tdes_crypt_start()
475 err = dma_map_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_start()
485 dma_unmap_sg(dd->dev, dd->in_sg, 1, in atmel_tdes_crypt_start()
490 addr_in = sg_dma_address(dd->in_sg); in atmel_tdes_crypt_start()
497 count = atmel_tdes_sg_copy(&dd->in_sg, &dd->in_offset, in atmel_tdes_crypt_start()
[all …]
Dsahara.c178 struct scatterlist *in_sg; member
212 struct scatterlist *in_sg; member
458 dev->nb_in_sg = sg_nents_for_len(dev->in_sg, dev->total); in sahara_hw_descriptor_create()
474 ret = dma_map_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_hw_descriptor_create()
490 sg = dev->in_sg; in sahara_hw_descriptor_create()
534 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_hw_descriptor_create()
571 dev->in_sg = req->src; in sahara_aes_process()
605 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_aes_process()
765 dev->in_sg = rctx->in_sg; in sahara_sha_hw_links_create()
767 dev->nb_in_sg = sg_nents_for_len(dev->in_sg, rctx->total); in sahara_sha_hw_links_create()
[all …]
Domap-aes-gcm.c36 dd->in_sg = NULL; in omap_aes_gcm_finish_req()
59 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); in omap_aes_gcm_done_task()
148 dd->in_sg = dd->in_sgl; in omap_aes_gcm_copy_buffers()
171 dd->in_sg_len = sg_nents_for_len(dd->in_sg, alen + clen); in omap_aes_gcm_copy_buffers()
Domap-aes.h181 struct scatterlist *in_sg; member
/linux-6.12.1/drivers/crypto/stm32/
Dstm32-cryp.c213 struct scatterlist *in_sg; member
926 if (cryp->in_sg != cryp->out_sg) in stm32_cryp_dma_callback()
927 dma_unmap_sg(cryp->dev, cryp->in_sg, cryp->in_sg_len, DMA_TO_DEVICE); in stm32_cryp_dma_callback()
937 kfree(cryp->in_sg); in stm32_cryp_dma_callback()
940 if (cryp->in_sg != cryp->req->src) in stm32_cryp_dma_callback()
941 kfree(cryp->in_sg); in stm32_cryp_dma_callback()
1019 if (cryp->in_sg != cryp->out_sg) { in stm32_cryp_dma_start()
1020 ret = dma_map_sg(cryp->dev, cryp->in_sg, cryp->in_sg_len, DMA_TO_DEVICE); in stm32_cryp_dma_start()
1033 dma_sync_sg_for_device(cryp->dev, cryp->in_sg, cryp->in_sg_len, DMA_TO_DEVICE); in stm32_cryp_dma_start()
1035 tx_in = dmaengine_prep_slave_sg(cryp->dma_lch_in, cryp->in_sg, cryp->in_sg_len, in stm32_cryp_dma_start()
[all …]
/linux-6.12.1/drivers/crypto/gemini/
Dsl3516-ce-cipher.c30 struct scatterlist *in_sg; in sl3516_ce_need_fallback() local
87 in_sg = areq->src; in sl3516_ce_need_fallback()
89 while (in_sg && out_sg) { in sl3516_ce_need_fallback()
90 if (in_sg->length != out_sg->length) { in sl3516_ce_need_fallback()
94 in_sg = sg_next(in_sg); in sl3516_ce_need_fallback()
97 if (in_sg || out_sg) in sl3516_ce_need_fallback()
/linux-6.12.1/drivers/crypto/allwinner/sun8i-ss/
Dsun8i-ss-cipher.c28 struct scatterlist *in_sg = areq->src; in sun8i_ss_need_fallback() local
76 in_sg = areq->src; in sun8i_ss_need_fallback()
78 while (in_sg && out_sg) { in sun8i_ss_need_fallback()
79 if (in_sg->length != out_sg->length) in sun8i_ss_need_fallback()
81 in_sg = sg_next(in_sg); in sun8i_ss_need_fallback()
84 if (in_sg || out_sg) in sun8i_ss_need_fallback()
/linux-6.12.1/drivers/crypto/allwinner/sun4i-ss/
Dsun4i-ss-hash.c198 struct scatterlist *in_sg = areq->src; in sun4i_hash() local
263 while (in_sg && i == 1) { in sun4i_hash()
264 if (in_sg->length % 4) in sun4i_hash()
266 in_sg = sg_next(in_sg); in sun4i_hash()
Dsun4i-ss-cipher.c182 struct scatterlist *in_sg = areq->src; in sun4i_ss_cipher_poll() local
225 while (in_sg && no_chunk == 1) { in sun4i_ss_cipher_poll()
226 if ((in_sg->length | in_sg->offset) & 3u) in sun4i_ss_cipher_poll()
228 in_sg = sg_next(in_sg); in sun4i_ss_cipher_poll()
/linux-6.12.1/crypto/
Drsa-pkcs1pad.c125 struct scatterlist in_sg[2], out_sg[1]; member
279 pkcs1pad_sg_set_buf(req_ctx->in_sg, req_ctx->in_buf, in pkcs1pad_encrypt()
287 akcipher_request_set_crypt(&req_ctx->child_req, req_ctx->in_sg, in pkcs1pad_encrypt()
436 pkcs1pad_sg_set_buf(req_ctx->in_sg, req_ctx->in_buf, in pkcs1pad_sign()
444 akcipher_request_set_crypt(&req_ctx->child_req, req_ctx->in_sg, in pkcs1pad_sign()
/linux-6.12.1/drivers/crypto/starfive/
Djh7110-aes.c584 rctx->in_sg = req->src; in starfive_aes_do_one_req()
598 ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg); in starfive_aes_do_one_req()
652 rctx->in_sg = scatterwalk_ffwd(_src, req->src, cryp->assoclen); in starfive_aes_aead_do_one_req()
654 rctx->out_sg = rctx->in_sg; in starfive_aes_aead_do_one_req()
681 sg_zero_buffer(rctx->in_sg, sg_nents(rctx->in_sg), in starfive_aes_aead_do_one_req()
682 sg_dma_len(rctx->in_sg) - cryp->total_in, in starfive_aes_aead_do_one_req()
710 ret = starfive_aes_map_sg(cryp, rctx->in_sg, rctx->out_sg); in starfive_aes_aead_do_one_req()
Djh7110-rsa.c262 rctx->total = sg_copy_to_buffer(rctx->in_sg, sg_nents(rctx->in_sg), in starfive_rsa_enc_core()
309 rctx->in_sg = req->src; in starfive_rsa_enc()
340 rctx->in_sg = req->src; in starfive_rsa_dec()
Djh7110-cryp.h217 struct scatterlist *in_sg; member
Djh7110-hash.c229 for_each_sg(rctx->in_sg, tsg, rctx->in_sg_len, i) { in starfive_hash_one_request()
334 rctx->in_sg = req->src; in starfive_hash_digest()
337 rctx->in_sg_len = sg_nents_for_len(rctx->in_sg, rctx->total); in starfive_hash_digest()
/linux-6.12.1/arch/um/drivers/
Dvirt-pci.c77 struct scatterlist out_sg, extra_sg, in_sg; in um_pci_send_cmd() local
80 [1] = extra ? &extra_sg : &in_sg, in um_pci_send_cmd()
81 [2] = extra ? &in_sg : NULL, in um_pci_send_cmd()
132 sg_init_one(&in_sg, out, out_size); in um_pci_send_cmd()

12