/linux-6.12.1/drivers/crypto/caam/ |
D | caamalg_qi2.h | 111 dma_addr_t iv_dma; member 131 dma_addr_t iv_dma; member
|
D | caamalg_qi.c | 805 dma_addr_t iv_dma; member 827 dma_addr_t iv_dma; member 874 int dst_nents, dma_addr_t iv_dma, int ivsize, in caam_unmap() argument 887 if (iv_dma) in caam_unmap() 888 dma_unmap_single(dev, iv_dma, ivsize, iv_dir); in caam_unmap() 901 edesc->iv_dma, ivsize, DMA_TO_DEVICE, edesc->qm_sg_dma, in aead_unmap() 913 edesc->iv_dma, ivsize, DMA_BIDIRECTIONAL, edesc->qm_sg_dma, in skcipher_unmap() 954 dma_addr_t qm_sg_dma, iv_dma = 0; in aead_edesc_alloc() local 1082 iv_dma = dma_map_single(qidev, iv, ivsize, DMA_TO_DEVICE); in aead_edesc_alloc() 1083 if (dma_mapping_error(qidev, iv_dma)) { in aead_edesc_alloc() [all …]
|
D | caamalg_qi2.c | 152 int dst_nents, dma_addr_t iv_dma, int ivsize, in caam_unmap() argument 165 if (iv_dma) in caam_unmap() 166 dma_unmap_single(dev, iv_dma, ivsize, iv_dir); in caam_unmap() 367 dma_addr_t qm_sg_dma, iv_dma = 0; in aead_edesc_alloc() local 490 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE); in aead_edesc_alloc() 491 if (dma_mapping_error(dev, iv_dma)) { in aead_edesc_alloc() 502 edesc->iv_dma = iv_dma; in aead_edesc_alloc() 518 iv_dma, ivsize, DMA_TO_DEVICE, 0, 0); in aead_edesc_alloc() 526 dma_to_qm_sg_one(sg_table + qm_sg_index, iv_dma, ivsize, 0); in aead_edesc_alloc() 540 iv_dma, ivsize, DMA_TO_DEVICE, 0, 0); in aead_edesc_alloc() [all …]
|
D | caamalg.c | 928 dma_addr_t iv_dma; member 939 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma, in caam_unmap() argument 951 if (iv_dma) in caam_unmap() 952 dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL); in caam_unmap() 975 edesc->iv_dma, ivsize, in skcipher_unmap() 1623 dma_addr_t iv_dma = 0; in skcipher_edesc_alloc() local 1728 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL); in skcipher_edesc_alloc() 1729 if (dma_mapping_error(jrdev, iv_dma)) { in skcipher_edesc_alloc() 1737 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); in skcipher_edesc_alloc() 1749 mapped_dst_nents, iv_dma, ivsize, 0); in skcipher_edesc_alloc() [all …]
|
/linux-6.12.1/drivers/crypto/ |
D | talitos.h | 63 dma_addr_t iv_dma; member
|
D | talitos.c | 1035 dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE); in ipsec_esp_encrypt_done() 1239 to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1); in ipsec_esp() 1333 dma_addr_t iv_dma = 0; in talitos_edesc_alloc() local 1401 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE); in talitos_edesc_alloc() 1407 edesc->iv_dma = iv_dma; in talitos_edesc_alloc() 1588 to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, ivsize, is_sec1); in common_nonsnoop()
|
/linux-6.12.1/drivers/crypto/chelsio/ |
D | chcr_crypto.h | 184 dma_addr_t iv_dma; member
|
D | chcr_algo.c | 2607 reqctx->iv_dma = dma_map_single(dev, reqctx->iv, (IV + reqctx->b0_len), in chcr_aead_dma_map() 2609 if (dma_mapping_error(dev, reqctx->iv_dma)) in chcr_aead_dma_map() 2612 reqctx->b0_dma = reqctx->iv_dma + IV; in chcr_aead_dma_map() 2640 dma_unmap_single(dev, reqctx->iv_dma, IV, DMA_BIDIRECTIONAL); in chcr_aead_dma_map() 2669 dma_unmap_single(dev, reqctx->iv_dma, (IV + reqctx->b0_len), in chcr_aead_dma_unmap() 2726 dsgl_walk_add_page(&dsgl_walk, IV + reqctx->b0_len, reqctx->iv_dma); in chcr_add_aead_dst_ent()
|