Home
last modified time | relevance | path

Searched refs:src_sg (Results 1 – 22 of 22) sorted by relevance

/linux-6.12.1/drivers/crypto/amlogic/
Damlogic-gxl-cipher.c27 struct scatterlist *src_sg = areq->src; in meson_cipher_need_fallback() local
33 if (sg_nents(src_sg) != sg_nents(dst_sg)) in meson_cipher_need_fallback()
37 if (sg_nents(src_sg) > MAXDESC - 3 || sg_nents(dst_sg) > MAXDESC - 3) in meson_cipher_need_fallback()
40 while (src_sg && dst_sg) { in meson_cipher_need_fallback()
41 if ((src_sg->length % 16) != 0) in meson_cipher_need_fallback()
45 if (src_sg->length != dst_sg->length) in meson_cipher_need_fallback()
47 if (!IS_ALIGNED(src_sg->offset, sizeof(u32))) in meson_cipher_need_fallback()
51 src_sg = sg_next(src_sg); in meson_cipher_need_fallback()
94 struct scatterlist *src_sg = areq->src; in meson_cipher() local
203 src_sg = areq->src; in meson_cipher()
[all …]
/linux-6.12.1/drivers/crypto/ccp/
Dccp-dmaengine.c355 struct scatterlist *src_sg, in ccp_create_desc() argument
372 if (!dst_sg || !src_sg) in ccp_create_desc()
384 src_len = sg_dma_len(src_sg); in ccp_create_desc()
396 src_sg = sg_next(src_sg); in ccp_create_desc()
397 if (!src_sg) in ccp_create_desc()
400 src_len = sg_dma_len(src_sg); in ccp_create_desc()
433 ccp_pt->src_dma = sg_dma_address(src_sg) + src_offset; in ccp_create_desc()
485 struct scatterlist dst_sg, src_sg; in ccp_prep_dma_memcpy() local
495 sg_init_table(&src_sg, 1); in ccp_prep_dma_memcpy()
496 sg_dma_address(&src_sg) = src; in ccp_prep_dma_memcpy()
[all …]
/linux-6.12.1/security/keys/trusted-keys/
Dtrusted_dcp.c80 struct scatterlist src_sg, dst_sg; in do_dcp_crypto() local
112 sg_init_one(&src_sg, in, AES_KEYSIZE_128); in do_dcp_crypto()
114 skcipher_request_set_crypt(req, &src_sg, &dst_sg, AES_KEYSIZE_128, in do_dcp_crypto()
133 struct scatterlist src_sg, dst_sg; in do_aead_crypto() local
156 sg_init_one(&src_sg, in, len); in do_aead_crypto()
166 aead_request_set_crypt(aead_req, &src_sg, &dst_sg, len, nonce); in do_aead_crypto()
/linux-6.12.1/drivers/scsi/arm/
Darm_scsi.h45 struct scatterlist *src_sg; in copy_SCp_to_sg() local
48 for_each_sg(sg_next(SCp->buffer), src_sg, bufs, i) in copy_SCp_to_sg()
49 *(++sg) = *src_sg; in copy_SCp_to_sg()
/linux-6.12.1/drivers/mailbox/
Dbcm-flexrm-mailbox.c596 struct scatterlist *src_sg = msg->spu.src, *dst_sg = msg->spu.dst; in flexrm_spu_estimate_nonheader_desc_count() local
598 while (src_sg || dst_sg) { in flexrm_spu_estimate_nonheader_desc_count()
599 if (src_sg) { in flexrm_spu_estimate_nonheader_desc_count()
601 dst_target = src_sg->length; in flexrm_spu_estimate_nonheader_desc_count()
602 src_sg = sg_next(src_sg); in flexrm_spu_estimate_nonheader_desc_count()
655 struct scatterlist *src_sg = msg->spu.src, *dst_sg = msg->spu.dst; in flexrm_spu_write_descs() local
657 while (src_sg || dst_sg) { in flexrm_spu_write_descs()
658 if (src_sg) { in flexrm_spu_write_descs()
659 if (sg_dma_len(src_sg) & 0xf) in flexrm_spu_write_descs()
660 d = flexrm_src_desc(sg_dma_address(src_sg), in flexrm_spu_write_descs()
[all …]
Dbcm-pdc-mailbox.c392 struct scatterlist *src_sg[PDC_RING_ENTRIES]; member
607 dma_unmap_sg(dev, pdcs->src_sg[pdcs->txin], in pdc_receive_one()
608 sg_nents(pdcs->src_sg[pdcs->txin]), DMA_TO_DEVICE); in pdc_receive_one()
731 pdcs->src_sg[pdcs->txout] = sg; in pdc_tx_list_sg_add()
/linux-6.12.1/drivers/crypto/qce/
Daead.c49 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_aead_done()
277 rctx->src_sg = rctx->src_tbl.sgl; in qce_aead_ccm_prepare_buf_assoclen()
279 rctx->src_nents = sg_nents_for_len(rctx->src_sg, totallen); in qce_aead_ccm_prepare_buf_assoclen()
292 rctx->dst_sg = rctx->src_sg; in qce_aead_ccm_prepare_buf_assoclen()
321 rctx->src_sg = req->src; in qce_aead_prepare_buf()
324 rctx->src_sg = rctx->dst_sg; in qce_aead_prepare_buf()
347 rctx->src_sg = req->src; in qce_aead_ccm_prepare_buf()
353 rctx->src_sg = req->src; in qce_aead_ccm_prepare_buf()
355 rctx->dst_sg = rctx->src_sg; in qce_aead_ccm_prepare_buf()
459 src_nents = dma_map_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_aead_async_req_handle()
[all …]
Dcipher.h43 struct scatterlist *src_sg; member
Dskcipher.c50 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_skcipher_done()
138 rctx->src_sg = req->src; in qce_skcipher_async_req_handle()
140 rctx->src_sg = rctx->dst_sg; in qce_skcipher_async_req_handle()
144 ret = qce_dma_prep_sgs(&qce->dma, rctx->src_sg, src_nents, in qce_skcipher_async_req_handle()
Daead.h37 struct scatterlist *src_sg; member
/linux-6.12.1/drivers/dma/
Dnbpfaxi.c920 struct scatterlist *src_sg, struct scatterlist *dst_sg, in nbpf_prep_sg() argument
939 mem_sg = src_sg; in nbpf_prep_sg()
946 mem_sg = src_sg; in nbpf_prep_sg()
965 sg_dma_address(src_sg), in nbpf_prep_sg()
975 src_sg = sg_next(src_sg); in nbpf_prep_sg()
978 mem_sg = direction == DMA_DEV_TO_MEM ? dst_sg : src_sg; in nbpf_prep_sg()
994 struct scatterlist src_sg; in nbpf_prep_memcpy() local
997 sg_init_table(&src_sg, 1); in nbpf_prep_memcpy()
1000 sg_dma_address(&src_sg) = src; in nbpf_prep_memcpy()
1003 sg_dma_len(&src_sg) = len; in nbpf_prep_memcpy()
[all …]
Dste_dma40.c2503 struct scatterlist src_sg; in d40_prep_memcpy() local
2506 sg_init_table(&src_sg, 1); in d40_prep_memcpy()
2509 sg_dma_address(&src_sg) = src; in d40_prep_memcpy()
2512 sg_dma_len(&src_sg) = size; in d40_prep_memcpy()
2514 return d40_prep_sg(chan, &src_sg, &dst_sg, 1, in d40_prep_memcpy()
/linux-6.12.1/fs/crypto/
Dkeysetup_v1.c54 struct scatterlist src_sg, dst_sg; in derive_key_aes() local
75 sg_init_one(&src_sg, master_key, derived_keysize); in derive_key_aes()
77 skcipher_request_set_crypt(req, &src_sg, &dst_sg, derived_keysize, in derive_key_aes()
Dfname.c153 struct scatterlist src_sg, dst_sg; in fname_decrypt() local
171 sg_init_one(&src_sg, iname->name, iname->len); in fname_decrypt()
173 skcipher_request_set_crypt(req, &src_sg, &dst_sg, iname->len, &iv); in fname_decrypt()
/linux-6.12.1/drivers/crypto/aspeed/
Daspeed-hace-hash.c132 rctx->bufcnt, rctx->src_sg, in aspeed_ahash_dma_prepare()
141 scatterwalk_map_and_copy(rctx->buffer, rctx->src_sg, in aspeed_ahash_dma_prepare()
181 sg_len = dma_map_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents, in aspeed_ahash_dma_prepare_sg()
227 for_each_sg(rctx->src_sg, s, sg_len, i) { in aspeed_ahash_dma_prepare_sg()
265 dma_unmap_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents, in aspeed_ahash_dma_prepare_sg()
461 dma_unmap_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents, in aspeed_ahash_update_resume_sg()
472 scatterwalk_map_and_copy(rctx->buffer, rctx->src_sg, rctx->offset, in aspeed_ahash_update_resume_sg()
591 rctx->src_sg = req->src; in aspeed_sham_update()
602 rctx->src_sg, rctx->offset, in aspeed_sham_update()
Daspeed-hace.h183 struct scatterlist *src_sg; member
/linux-6.12.1/drivers/crypto/tegra/
Dtegra-se-aes.c53 struct scatterlist *src_sg; member
74 struct scatterlist *src_sg; member
714 rctx->src_sg, 0, rctx->assoclen, 0); in tegra_gcm_do_gmac()
730 scatterwalk_map_and_copy(rctx->inbuf.buf, rctx->src_sg, in tegra_gcm_do_crypt()
782 scatterwalk_map_and_copy(mac, rctx->src_sg, offset, rctx->authsize, 0); in tegra_gcm_do_verify()
990 rctx->src_sg, 0, rctx->assoclen, 0); in tegra_ccm_format_blocks()
1050 sg = rctx->encrypt ? rctx->src_sg : rctx->dst_sg; in tegra_ccm_compute_auth()
1071 struct scatterlist *sg = rctx->src_sg; in tegra_ccm_do_ctr()
1115 rctx->src_sg = req->src; in tegra_ccm_crypt_init()
1235 rctx->src_sg = req->src; in tegra_gcm_do_one_req()
[all …]
Dtegra-se-hash.c33 struct scatterlist *src_sg; member
318 rctx->src_sg = req->src; in tegra_sha_do_update()
328 rctx->src_sg, 0, req->nbytes, 0); in tegra_sha_do_update()
339 rctx->src_sg, 0, req->nbytes - nresidue, 0); in tegra_sha_do_update()
341 scatterwalk_map_and_copy(rctx->residue.buf, rctx->src_sg, in tegra_sha_do_update()
/linux-6.12.1/fs/ecryptfs/
Dkeystore.c599 struct scatterlist src_sg[2]; member
795 s->block_aligned_filename_size, s->src_sg, 2); in ecryptfs_write_tag_70_packet()
830 skcipher_request_set_crypt(s->skcipher_req, s->src_sg, s->dst_sg, in ecryptfs_write_tag_70_packet()
869 struct scatterlist src_sg[2]; member
983 s->block_aligned_filename_size, s->src_sg, 2); in ecryptfs_parse_tag_70_packet()
1046 skcipher_request_set_crypt(s->skcipher_req, s->src_sg, s->dst_sg, in ecryptfs_parse_tag_70_packet()
1652 struct scatterlist src_sg[2]; in decrypt_passphrase_encrypted_session_key() local
1676 src_sg, 2); in decrypt_passphrase_encrypted_session_key()
1718 skcipher_request_set_crypt(req, src_sg, dst_sg, in decrypt_passphrase_encrypted_session_key()
2185 struct scatterlist src_sg[2]; in write_tag_3_packet() local
[all …]
Dcrypto.c276 struct scatterlist *src_sg, int size, in crypt_scatterlist() argument
316 skcipher_request_set_crypt(req, src_sg, dst_sg, size, iv); in crypt_scatterlist()
358 struct scatterlist src_sg, dst_sg; in crypt_extent() local
372 sg_init_table(&src_sg, 1); in crypt_extent()
375 sg_set_page(&src_sg, src_page, extent_size, in crypt_extent()
380 rc = crypt_scatterlist(crypt_stat, &dst_sg, &src_sg, extent_size, in crypt_extent()
/linux-6.12.1/drivers/crypto/bcm/
Dcipher.c224 datalen = spu_msg_sg_add(&sg, &rctx->src_sg, &rctx->src_skip, in spu_skcipher_tx_sg_create()
347 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, chunksize); in handle_skcipher_req()
421 dump_sg(rctx->src_sg, rctx->src_skip, chunksize); in handle_skcipher_req()
605 datalen = spu_msg_sg_add(&sg, &rctx->src_sg, &rctx->src_skip, in spu_ahash_tx_sg_create()
768 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, in handle_ahash_req()
853 dump_sg(rctx->src_sg, rctx->src_skip, new_data_len); in handle_ahash_req()
1216 written = spu_msg_sg_add(&sg, &rctx->src_sg, &rctx->src_skip, in spu_aead_tx_sg_create()
1343 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, chunksize); in handle_aead_req()
1460 dump_sg(rctx->src_sg, rctx->src_skip, chunksize); in handle_aead_req()
1733 rctx->src_sg = req->src; in skcipher_enqueue()
[all …]
Dcipher.h286 struct scatterlist *src_sg; member