/linux-6.12.1/drivers/crypto/qce/ |
D | core.c | 66 static int qce_handle_request(struct crypto_async_request *async_req) in qce_handle_request() argument 70 u32 type = crypto_tfm_alg_type(async_req->tfm); in qce_handle_request() 76 ret = ops->async_req_handle(async_req); in qce_handle_request() 86 struct crypto_async_request *async_req, *backlog; in qce_handle_queue() local 102 async_req = crypto_dequeue_request(&qce->queue); in qce_handle_queue() 103 if (async_req) in qce_handle_queue() 104 qce->req = async_req; in qce_handle_queue() 108 if (!async_req) in qce_handle_queue() 117 err = qce_handle_request(async_req); in qce_handle_queue()
|
D | common.c | 147 static int qce_setup_regs_ahash(struct crypto_async_request *async_req) in qce_setup_regs_ahash() argument 149 struct ahash_request *req = ahash_request_cast(async_req); in qce_setup_regs_ahash() 150 struct crypto_ahash *ahash = __crypto_ahash_cast(async_req->tfm); in qce_setup_regs_ahash() 152 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_setup_regs_ahash() 155 unsigned int blocksize = crypto_tfm_alg_blocksize(async_req->tfm); in qce_setup_regs_ahash() 314 static int qce_setup_regs_skcipher(struct crypto_async_request *async_req) in qce_setup_regs_skcipher() argument 316 struct skcipher_request *req = skcipher_request_cast(async_req); in qce_setup_regs_skcipher() 318 struct qce_cipher_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_setup_regs_skcipher() 420 static int qce_setup_regs_aead(struct crypto_async_request *async_req) in qce_setup_regs_aead() argument 422 struct aead_request *req = aead_request_cast(async_req); in qce_setup_regs_aead() [all …]
|
D | sha.c | 38 struct crypto_async_request *async_req = data; in qce_ahash_done() local 39 struct ahash_request *req = ahash_request_cast(async_req); in qce_ahash_done() 42 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_ahash_done() 75 static int qce_ahash_async_req_handle(struct crypto_async_request *async_req) in qce_ahash_async_req_handle() argument 77 struct ahash_request *req = ahash_request_cast(async_req); in qce_ahash_async_req_handle() 79 struct qce_sha_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_ahash_async_req_handle() 80 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_ahash_async_req_handle() 112 &rctx->result_sg, 1, qce_ahash_done, async_req); in qce_ahash_async_req_handle() 118 ret = qce_start(async_req, tmpl->crypto_alg_type); in qce_ahash_async_req_handle()
|
D | skcipher.c | 29 struct crypto_async_request *async_req = data; in qce_skcipher_done() local 30 struct skcipher_request *req = skcipher_request_cast(async_req); in qce_skcipher_done() 64 qce_skcipher_async_req_handle(struct crypto_async_request *async_req) in qce_skcipher_async_req_handle() argument 66 struct skcipher_request *req = skcipher_request_cast(async_req); in qce_skcipher_async_req_handle() 146 qce_skcipher_done, async_req); in qce_skcipher_async_req_handle() 152 ret = qce_start(async_req, tmpl->crypto_alg_type); in qce_skcipher_async_req_handle()
|
D | aead.c | 25 struct crypto_async_request *async_req = data; in qce_aead_done() local 26 struct aead_request *req = aead_request_cast(async_req); in qce_aead_done() 28 struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_aead_done() 408 qce_aead_async_req_handle(struct crypto_async_request *async_req) in qce_aead_async_req_handle() argument 410 struct aead_request *req = aead_request_cast(async_req); in qce_aead_async_req_handle() 413 struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_aead_async_req_handle() 472 qce_aead_done, async_req); in qce_aead_async_req_handle() 478 ret = qce_start(async_req, tmpl->crypto_alg_type); in qce_aead_async_req_handle()
|
D | core.h | 58 int (*async_req_handle)(struct crypto_async_request *async_req);
|
D | common.h | 102 int qce_start(struct crypto_async_request *async_req, u32 type);
|
/linux-6.12.1/crypto/ |
D | crypto_engine.c | 76 struct crypto_async_request *async_req, *backlog; in crypto_pump_requests() local 123 async_req = crypto_dequeue_request(&engine->queue); in crypto_pump_requests() 124 if (!async_req) in crypto_pump_requests() 133 engine->cur_req = async_req; in crypto_pump_requests() 151 if (async_req->tfm->__crt_alg->cra_flags & CRYPTO_ALG_ENGINE) { in crypto_pump_requests() 152 alg = container_of(async_req->tfm->__crt_alg, in crypto_pump_requests() 161 ret = op->do_one_request(engine, async_req); in crypto_pump_requests() 183 crypto_enqueue_request_head(&engine->queue, async_req); in crypto_pump_requests() 192 crypto_request_complete(async_req, ret); in crypto_pump_requests()
|
/linux-6.12.1/drivers/crypto/ccp/ |
D | ccp-crypto-aes.c | 22 static int ccp_aes_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_complete() argument 24 struct skcipher_request *req = skcipher_request_cast(async_req); in ccp_aes_complete() 132 static int ccp_aes_rfc3686_complete(struct crypto_async_request *async_req, in ccp_aes_rfc3686_complete() argument 135 struct skcipher_request *req = skcipher_request_cast(async_req); in ccp_aes_rfc3686_complete() 141 return ccp_aes_complete(async_req, ret); in ccp_aes_rfc3686_complete()
|
D | ccp-crypto-des3.c | 21 static int ccp_des3_complete(struct crypto_async_request *async_req, int ret) in ccp_des3_complete() argument 23 struct skcipher_request *req = skcipher_request_cast(async_req); in ccp_des3_complete()
|
D | ccp-crypto-aes-xts.c | 62 static int ccp_aes_xts_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_xts_complete() argument 64 struct skcipher_request *req = skcipher_request_cast(async_req); in ccp_aes_xts_complete()
|
D | ccp-crypto-rsa.c | 44 static int ccp_rsa_complete(struct crypto_async_request *async_req, int ret) in ccp_rsa_complete() argument 46 struct akcipher_request *req = akcipher_request_cast(async_req); in ccp_rsa_complete()
|
D | ccp-crypto-aes-cmac.c | 23 static int ccp_aes_cmac_complete(struct crypto_async_request *async_req, in ccp_aes_cmac_complete() argument 26 struct ahash_request *req = ahash_request_cast(async_req); in ccp_aes_cmac_complete()
|
D | ccp-crypto-sha.c | 27 static int ccp_sha_complete(struct crypto_async_request *async_req, int ret) in ccp_sha_complete() argument 29 struct ahash_request *req = ahash_request_cast(async_req); in ccp_sha_complete()
|
D | ccp-crypto-aes-galois.c | 24 static int ccp_aes_gcm_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_gcm_complete() argument
|
/linux-6.12.1/drivers/crypto/allwinner/sun8i-ce/ |
D | sun8i-ce-cipher.c | 120 static int sun8i_ce_cipher_prepare(struct crypto_engine *engine, void *async_req) in sun8i_ce_cipher_prepare() argument 122 struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base); in sun8i_ce_cipher_prepare() 303 void *async_req) in sun8i_ce_cipher_unprepare() argument 305 struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base); in sun8i_ce_cipher_unprepare()
|
/linux-6.12.1/drivers/net/usb/ |
D | rtl8150.c | 141 struct async_req { struct 169 struct async_req *req = (struct async_req *)urb->context; in async_set_reg_cb() 182 struct async_req *req; in async_set_registers() 184 req = kmalloc(sizeof(struct async_req), GFP_ATOMIC); in async_set_registers()
|
/linux-6.12.1/drivers/crypto/ |
D | s5p-sss.c | 1370 struct crypto_async_request *async_req, *backlog; in s5p_hash_handle_queue() local 1386 async_req = crypto_dequeue_request(&dd->hash_queue); in s5p_hash_handle_queue() 1387 if (async_req) in s5p_hash_handle_queue() 1392 if (!async_req) in s5p_hash_handle_queue() 1398 req = ahash_request_cast(async_req); in s5p_hash_handle_queue() 1971 struct crypto_async_request *async_req, *backlog; in s5p_tasklet_cb() local 1977 async_req = crypto_dequeue_request(&dev->queue); in s5p_tasklet_cb() 1979 if (!async_req) { in s5p_tasklet_cb() 1989 dev->req = skcipher_request_cast(async_req); in s5p_tasklet_cb()
|
D | img-hash.c | 503 struct crypto_async_request *async_req, *backlog; in img_hash_handle_queue() local 519 async_req = crypto_dequeue_request(&hdev->queue); in img_hash_handle_queue() 520 if (async_req) in img_hash_handle_queue() 525 if (!async_req) in img_hash_handle_queue() 531 req = ahash_request_cast(async_req); in img_hash_handle_queue()
|
D | atmel-tdes.c | 558 struct crypto_async_request *async_req, *backlog; in atmel_tdes_handle_queue() local 572 async_req = crypto_dequeue_request(&dd->queue); in atmel_tdes_handle_queue() 573 if (async_req) in atmel_tdes_handle_queue() 577 if (!async_req) in atmel_tdes_handle_queue() 583 req = skcipher_request_cast(async_req); in atmel_tdes_handle_queue()
|
D | sahara.c | 988 struct crypto_async_request *async_req = areq; in sahara_do_one_request() local 991 if (crypto_tfm_alg_type(async_req->tfm) == CRYPTO_ALG_TYPE_AHASH) { in sahara_do_one_request() 992 struct ahash_request *req = ahash_request_cast(async_req); in sahara_do_one_request() 999 struct skcipher_request *req = skcipher_request_cast(async_req); in sahara_do_one_request() 1001 err = sahara_aes_process(skcipher_request_cast(async_req)); in sahara_do_one_request()
|
D | hifn_795x.c | 1898 struct crypto_async_request *async_req; in hifn_flush() local 1914 while ((async_req = crypto_dequeue_request(&dev->queue))) { in hifn_flush() 1915 req = skcipher_request_cast(async_req); in hifn_flush() 2025 struct crypto_async_request *async_req, *backlog; in hifn_process_queue() local 2033 async_req = crypto_dequeue_request(&dev->queue); in hifn_process_queue() 2036 if (!async_req) in hifn_process_queue() 2042 req = skcipher_request_cast(async_req); in hifn_process_queue()
|
/linux-6.12.1/drivers/nvme/host/ |
D | tcp.c | 196 struct nvme_tcp_request async_req; member 279 return req == &req->queue->ctrl->async_req; in nvme_tcp_async_req() 1353 struct nvme_tcp_request *async = &ctrl->async_req; in nvme_tcp_free_async_req() 1361 struct nvme_tcp_request *async = &ctrl->async_req; in nvme_tcp_alloc_async_req() 1899 if (to_tcp_ctrl(ctrl)->async_req.pdu) { in nvme_tcp_free_admin_queue() 1902 to_tcp_ctrl(ctrl)->async_req.pdu = NULL; in nvme_tcp_free_admin_queue() 2438 struct nvme_tcp_cmd_pdu *pdu = ctrl->async_req.pdu; in nvme_tcp_submit_async_event() 2454 ctrl->async_req.state = NVME_TCP_SEND_CMD_PDU; in nvme_tcp_submit_async_event() 2455 ctrl->async_req.offset = 0; in nvme_tcp_submit_async_event() 2456 ctrl->async_req.curr_bio = NULL; in nvme_tcp_submit_async_event() [all …]
|
/linux-6.12.1/drivers/crypto/amcc/ |
D | crypto4xx_core.h | 71 struct crypto_async_request *async_req; /* base crypto request member
|
D | crypto4xx_core.c | 526 req = skcipher_request_cast(pd_uinfo->async_req); in crypto4xx_cipher_done() 558 ahash_req = ahash_request_cast(pd_uinfo->async_req); in crypto4xx_ahash_done() 573 struct aead_request *aead_req = container_of(pd_uinfo->async_req, in crypto4xx_aead_done() 638 switch (crypto_tfm_alg_type(pd_uinfo->async_req->tfm)) { in crypto4xx_pd_done() 814 pd_uinfo->async_req = req; in crypto4xx_build_pd()
|