/linux-6.12.1/crypto/ |
D | rsa-pkcs1pad.c | 201 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); in pkcs1pad_encrypt_sign_complete() local 209 len = req_ctx->child_req.dst_len; in pkcs1pad_encrypt_sign_complete() 231 kfree(req_ctx->in_buf); in pkcs1pad_encrypt_sign_complete() 253 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); in pkcs1pad_encrypt() local 268 req_ctx->in_buf = kmalloc(ctx->key_size - 1 - req->src_len, in pkcs1pad_encrypt() 270 if (!req_ctx->in_buf) in pkcs1pad_encrypt() 274 req_ctx->in_buf[0] = 0x02; in pkcs1pad_encrypt() 276 req_ctx->in_buf[i] = get_random_u32_inclusive(1, 255); in pkcs1pad_encrypt() 277 req_ctx->in_buf[ps_end] = 0x00; in pkcs1pad_encrypt() 279 pkcs1pad_sg_set_buf(req_ctx->in_sg, req_ctx->in_buf, in pkcs1pad_encrypt() [all …]
|
/linux-6.12.1/drivers/crypto/axis/ |
D | artpec6_crypto.c | 1090 struct artpec6_crypto_request_context *req_ctx = NULL; in artpec6_crypto_encrypt() local 1094 req_ctx = skcipher_request_ctx(req); in artpec6_crypto_encrypt() 1100 req_ctx->decrypt = 0; in artpec6_crypto_encrypt() 1115 ret = artpec6_crypto_common_init(&req_ctx->common, in artpec6_crypto_encrypt() 1124 artpec6_crypto_common_destroy(&req_ctx->common); in artpec6_crypto_encrypt() 1128 return artpec6_crypto_submit(&req_ctx->common); in artpec6_crypto_encrypt() 1136 struct artpec6_crypto_request_context *req_ctx = NULL; in artpec6_crypto_decrypt() local 1139 req_ctx = skcipher_request_ctx(req); in artpec6_crypto_decrypt() 1145 req_ctx->decrypt = 1; in artpec6_crypto_decrypt() 1161 ret = artpec6_crypto_common_init(&req_ctx->common, &req->base, in artpec6_crypto_decrypt() [all …]
|
/linux-6.12.1/drivers/crypto/ |
D | talitos.c | 1704 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); in common_nonsnoop_hash_unmap() local 1716 if (req_ctx->last) in common_nonsnoop_hash_unmap() 1717 memcpy(areq->result, req_ctx->hw_context, in common_nonsnoop_hash_unmap() 1720 if (req_ctx->psrc) in common_nonsnoop_hash_unmap() 1721 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0); in common_nonsnoop_hash_unmap() 1731 if (is_sec1 && req_ctx->nbuf) in common_nonsnoop_hash_unmap() 1751 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); in ahash_done() local 1753 if (!req_ctx->last && req_ctx->to_hash_later) { in ahash_done() 1755 req_ctx->buf_idx = (req_ctx->buf_idx + 1) & 1; in ahash_done() 1756 req_ctx->nbuf = req_ctx->to_hash_later; in ahash_done() [all …]
|
D | mxs-dcp.c | 120 struct dcp_sha_req_ctx req_ctx; member 873 memcpy(rctx, &export->req_ctx, sizeof(struct dcp_sha_req_ctx)); in dcp_sha_import() 886 memcpy(&export->req_ctx, rctx_state, sizeof(struct dcp_sha_req_ctx)); in dcp_sha_export()
|
/linux-6.12.1/drivers/crypto/ccree/ |
D | cc_cipher.c | 505 struct cipher_req_ctx *req_ctx, in cc_setup_readiv_desc() argument 513 int direction = req_ctx->gen_ctx.op_type; in cc_setup_readiv_desc() 514 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; in cc_setup_readiv_desc() 561 struct cipher_req_ctx *req_ctx, in cc_setup_state_desc() argument 570 int direction = req_ctx->gen_ctx.op_type; in cc_setup_state_desc() 571 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; in cc_setup_state_desc() 605 struct cipher_req_ctx *req_ctx, in cc_setup_xex_state_desc() argument 614 int direction = req_ctx->gen_ctx.op_type; in cc_setup_xex_state_desc() 617 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; in cc_setup_xex_state_desc() 683 struct cipher_req_ctx *req_ctx, in cc_setup_key_desc() argument [all …]
|
D | cc_aead.c | 876 struct aead_req_ctx *req_ctx = aead_request_ctx_dma(req); in cc_proc_digest_desc() local 880 int direct = req_ctx->gen_ctx.op_type; in cc_proc_digest_desc() 887 set_dout_dlli(&desc[idx], req_ctx->icv_dma_addr, ctx->authsize, in cc_proc_digest_desc() 903 set_dout_dlli(&desc[idx], req_ctx->mac_buf_dma_addr, in cc_proc_digest_desc() 926 struct aead_req_ctx *req_ctx = aead_request_ctx_dma(req); in cc_set_cipher_desc() local 927 unsigned int hw_iv_size = req_ctx->hw_iv_size; in cc_set_cipher_desc() 929 int direct = req_ctx->gen_ctx.op_type; in cc_set_cipher_desc() 935 set_din_type(&desc[idx], DMA_DLLI, req_ctx->gen_ctx.iv_dma_addr, in cc_set_cipher_desc() 968 struct aead_req_ctx *req_ctx = aead_request_ctx_dma(req); in cc_proc_cipher() local 969 int direct = req_ctx->gen_ctx.op_type; in cc_proc_cipher() [all …]
|
D | cc_buffer_mgr.c | 343 struct cipher_req_ctx *req_ctx = (struct cipher_req_ctx *)ctx; in cc_unmap_cipher_request() local 345 if (req_ctx->gen_ctx.iv_dma_addr) { in cc_unmap_cipher_request() 347 &req_ctx->gen_ctx.iv_dma_addr, ivsize); in cc_unmap_cipher_request() 348 dma_unmap_single(dev, req_ctx->gen_ctx.iv_dma_addr, in cc_unmap_cipher_request() 352 if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI && in cc_unmap_cipher_request() 353 req_ctx->mlli_params.mlli_virt_addr) { in cc_unmap_cipher_request() 354 dma_pool_free(req_ctx->mlli_params.curr_pool, in cc_unmap_cipher_request() 355 req_ctx->mlli_params.mlli_virt_addr, in cc_unmap_cipher_request() 356 req_ctx->mlli_params.mlli_dma_addr); in cc_unmap_cipher_request() 360 dma_unmap_sg(dev, src, req_ctx->in_nents, DMA_TO_DEVICE); in cc_unmap_cipher_request() [all …]
|
/linux-6.12.1/arch/x86/crypto/ |
D | aria_gfni_avx512_glue.c | 77 struct aria_avx512_request_ctx *req_ctx = skcipher_request_ctx(req); in aria_avx512_ctr_encrypt() local 93 &req_ctx->keystream[0], in aria_avx512_ctr_encrypt() 104 &req_ctx->keystream[0], in aria_avx512_ctr_encrypt() 115 &req_ctx->keystream[0], in aria_avx512_ctr_encrypt() 124 memcpy(&req_ctx->keystream[0], walk.iv, in aria_avx512_ctr_encrypt() 128 aria_encrypt(ctx, &req_ctx->keystream[0], in aria_avx512_ctr_encrypt() 129 &req_ctx->keystream[0]); in aria_avx512_ctr_encrypt() 131 crypto_xor_cpy(dst, src, &req_ctx->keystream[0], in aria_avx512_ctr_encrypt() 139 memcpy(&req_ctx->keystream[0], walk.iv, in aria_avx512_ctr_encrypt() 143 aria_encrypt(ctx, &req_ctx->keystream[0], in aria_avx512_ctr_encrypt() [all …]
|
D | aria_aesni_avx_glue.c | 88 struct aria_avx_request_ctx *req_ctx = skcipher_request_ctx(req); in aria_avx_ctr_encrypt() local 104 &req_ctx->keystream[0], in aria_avx_ctr_encrypt() 113 memcpy(&req_ctx->keystream[0], walk.iv, ARIA_BLOCK_SIZE); in aria_avx_ctr_encrypt() 116 aria_encrypt(ctx, &req_ctx->keystream[0], in aria_avx_ctr_encrypt() 117 &req_ctx->keystream[0]); in aria_avx_ctr_encrypt() 119 crypto_xor_cpy(dst, src, &req_ctx->keystream[0], in aria_avx_ctr_encrypt() 127 memcpy(&req_ctx->keystream[0], walk.iv, in aria_avx_ctr_encrypt() 131 aria_encrypt(ctx, &req_ctx->keystream[0], in aria_avx_ctr_encrypt() 132 &req_ctx->keystream[0]); in aria_avx_ctr_encrypt() 134 crypto_xor_cpy(dst, src, &req_ctx->keystream[0], in aria_avx_ctr_encrypt()
|
D | aria_aesni_avx2_glue.c | 90 struct aria_avx2_request_ctx *req_ctx = skcipher_request_ctx(req); in aria_avx2_ctr_encrypt() local 106 &req_ctx->keystream[0], in aria_avx2_ctr_encrypt() 117 &req_ctx->keystream[0], in aria_avx2_ctr_encrypt() 126 memcpy(&req_ctx->keystream[0], walk.iv, ARIA_BLOCK_SIZE); in aria_avx2_ctr_encrypt() 129 aria_encrypt(ctx, &req_ctx->keystream[0], in aria_avx2_ctr_encrypt() 130 &req_ctx->keystream[0]); in aria_avx2_ctr_encrypt() 132 crypto_xor_cpy(dst, src, &req_ctx->keystream[0], in aria_avx2_ctr_encrypt() 140 memcpy(&req_ctx->keystream[0], walk.iv, in aria_avx2_ctr_encrypt() 144 aria_encrypt(ctx, &req_ctx->keystream[0], in aria_avx2_ctr_encrypt() 145 &req_ctx->keystream[0]); in aria_avx2_ctr_encrypt() [all …]
|
/linux-6.12.1/drivers/crypto/caam/ |
D | caampkc.c | 52 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); in rsa_io_unmap() local 55 dma_unmap_sg(dev, req_ctx->fixup_src, edesc->src_nents, DMA_TO_DEVICE); in rsa_io_unmap() 126 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); in rsa_pub_done() local 135 edesc = req_ctx->edesc; in rsa_pub_done() 160 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); in rsa_priv_f_done() local 168 edesc = req_ctx->edesc; in rsa_priv_f_done() 255 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); in rsa_edesc_alloc() local 277 req_ctx->fixup_src = scatterwalk_ffwd(req_ctx->src, req->src, in rsa_edesc_alloc() 279 req_ctx->fixup_src_len = req->src_len - lzeros; in rsa_edesc_alloc() 286 req_ctx->fixup_src = req->src; in rsa_edesc_alloc() [all …]
|
D | caamalg_qi2.c | 355 struct caam_request *req_ctx = aead_request_ctx_dma(req); in aead_edesc_alloc() local 356 struct dpaa2_fl_entry *in_fle = &req_ctx->fd_flt[1]; in aead_edesc_alloc() 357 struct dpaa2_fl_entry *out_fle = &req_ctx->fd_flt[0]; in aead_edesc_alloc() 552 memset(&req_ctx->fd_flt, 0, sizeof(req_ctx->fd_flt)); in aead_edesc_alloc() 1115 struct caam_request *req_ctx = skcipher_request_ctx_dma(req); in skcipher_edesc_alloc() local 1116 struct dpaa2_fl_entry *in_fle = &req_ctx->fd_flt[1]; in skcipher_edesc_alloc() 1117 struct dpaa2_fl_entry *out_fle = &req_ctx->fd_flt[0]; in skcipher_edesc_alloc() 1241 memset(&req_ctx->fd_flt, 0, sizeof(req_ctx->fd_flt)); in skcipher_edesc_alloc() 1289 struct caam_request *req_ctx = to_caam_req(areq); in aead_encrypt_done() local 1290 struct aead_edesc *edesc = req_ctx->edesc; in aead_encrypt_done() [all …]
|
/linux-6.12.1/drivers/crypto/chelsio/ |
D | chcr_algo.c | 1563 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req); in create_hash_wr() local 1577 unsigned int rx_channel_id = req_ctx->rxqidx / ctx->rxq_perchan; in create_hash_wr() 1581 req_ctx->hctx_wr.imm = (transhdr_len + param->bfr_len + in create_hash_wr() 1583 nents = sg_nents_xlen(req_ctx->hctx_wr.srcsg, param->sg_len, in create_hash_wr() 1584 CHCR_SRC_SG_SIZE, req_ctx->hctx_wr.src_ofst); in create_hash_wr() 1586 transhdr_len += req_ctx->hctx_wr.imm ? roundup(param->bfr_len + in create_hash_wr() 1611 memcpy(chcr_req->key_ctx.key, req_ctx->partial_hash, in create_hash_wr() 1629 req_ctx->hctx_wr.dma_addr = in create_hash_wr() 1630 dma_map_single(&u_ctx->lldi.pdev->dev, req_ctx->reqbfr, in create_hash_wr() 1633 req_ctx->hctx_wr. dma_addr)) { in create_hash_wr() [all …]
|
/linux-6.12.1/drivers/crypto/intel/ixp4xx/ |
D | ixp4xx_crypto.c | 354 struct aead_ctx *req_ctx = aead_request_ctx(req); in finish_scattered_hmac() local 359 if (req_ctx->encrypt) { in finish_scattered_hmac() 360 scatterwalk_map_and_copy(req_ctx->hmac_virt, req->dst, in finish_scattered_hmac() 363 dma_pool_free(buffer_pool, req_ctx->hmac_virt, crypt->icv_rev_aes); in finish_scattered_hmac() 380 struct aead_ctx *req_ctx = aead_request_ctx(req); in one_packet() local 382 free_buf_chain(dev, req_ctx->src, crypt->src_buf); in one_packet() 383 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); in one_packet() 384 if (req_ctx->hmac_virt) in one_packet() 392 struct ablk_ctx *req_ctx = skcipher_request_ctx(req); in one_packet() local 399 if (req_ctx->encrypt) { in one_packet() [all …]
|
/linux-6.12.1/drivers/mtd/nand/ |
D | ecc-sw-bch.c | 234 ret = nand_ecc_init_req_tweaking(&engine_conf->req_ctx, nand); in nand_ecc_sw_bch_init_ctx() 267 nand_ecc_cleanup_req_tweaking(&engine_conf->req_ctx); in nand_ecc_sw_bch_init_ctx() 283 nand_ecc_cleanup_req_tweaking(&engine_conf->req_ctx); in nand_ecc_sw_bch_cleanup_ctx() 312 nand_ecc_tweak_req(&engine_conf->req_ctx, req); in nand_ecc_sw_bch_prepare_io_req() 353 nand_ecc_restore_req(&engine_conf->req_ctx, req); in nand_ecc_sw_bch_finish_io_req() 382 nand_ecc_restore_req(&engine_conf->req_ctx, req); in nand_ecc_sw_bch_finish_io_req()
|
D | ecc-mxic.c | 107 struct nand_ecc_req_tweak_ctx req_ctx; member 309 ctx->req_ctx.oob_buffer_size = nanddev_per_page_oobsize(nand) + in mxic_ecc_init_ctx() 311 ret = nand_ecc_init_req_tweaking(&ctx->req_ctx, nand); in mxic_ecc_init_ctx() 355 nand_ecc_cleanup_req_tweaking(&ctx->req_ctx); in mxic_ecc_init_ctx() 417 nand_ecc_cleanup_req_tweaking(&ctx->req_ctx); in mxic_ecc_cleanup_ctx() 563 nand_ecc_tweak_req(&ctx->req_ctx, req); in mxic_ecc_prepare_io_req_external() 623 nand_ecc_restore_req(&ctx->req_ctx, req); in mxic_ecc_finish_io_req_external() 654 nand_ecc_restore_req(&ctx->req_ctx, req); in mxic_ecc_finish_io_req_external() 662 nand_ecc_restore_req(&ctx->req_ctx, req); in mxic_ecc_finish_io_req_external() 678 nand_ecc_tweak_req(&ctx->req_ctx, req); in mxic_ecc_prepare_io_req_pipelined() [all …]
|
D | ecc-sw-hamming.c | 503 ret = nand_ecc_init_req_tweaking(&engine_conf->req_ctx, nand); in nand_ecc_sw_hamming_init_ctx() 522 nand_ecc_cleanup_req_tweaking(&engine_conf->req_ctx); in nand_ecc_sw_hamming_init_ctx() 537 nand_ecc_cleanup_req_tweaking(&engine_conf->req_ctx); in nand_ecc_sw_hamming_cleanup_ctx() 566 nand_ecc_tweak_req(&engine_conf->req_ctx, req); in nand_ecc_sw_hamming_prepare_io_req() 607 nand_ecc_restore_req(&engine_conf->req_ctx, req); in nand_ecc_sw_hamming_finish_io_req() 636 nand_ecc_restore_req(&engine_conf->req_ctx, req); in nand_ecc_sw_hamming_finish_io_req()
|
/linux-6.12.1/include/linux/mtd/ |
D | nand-ecc-sw-bch.h | 26 struct nand_ecc_req_tweak_ctx req_ctx; member
|
D | nand-ecc-sw-hamming.h | 25 struct nand_ecc_req_tweak_ctx req_ctx; member
|