Lines Matching +full:sg +full:- +full:micro

1 // SPDX-License-Identifier: GPL-2.0-only
5 * Copyright (C) 2013,2017 Advanced Micro Devices, Inc.
19 #include "ccp-crypto.h"
28 MODULE_PARM_DESC(aes_disable, "Disable use of AES - any non-zero value");
32 MODULE_PARM_DESC(sha_disable, "Disable use of SHA - any non-zero value");
36 MODULE_PARM_DESC(des3_disable, "Disable use of 3DES - any non-zero value");
40 MODULE_PARM_DESC(rsa_disable, "Disable use of RSA - any non-zero value");
83 if (err && (err != -EINPROGRESS) && (err != -EBUSY)) in ccp_crypto_success()
104 if (crypto_cmd->tfm != tmp->tfm) in ccp_crypto_cmd_complete()
116 if (req_queue.backlog == &crypto_cmd->entry) in ccp_crypto_cmd_complete()
117 req_queue.backlog = crypto_cmd->entry.next; in ccp_crypto_cmd_complete()
121 req_queue.backlog = req_queue.backlog->next; in ccp_crypto_cmd_complete()
124 if (req_queue.backlog == &crypto_cmd->entry) in ccp_crypto_cmd_complete()
125 req_queue.backlog = crypto_cmd->entry.next; in ccp_crypto_cmd_complete()
129 req_queue.cmd_count--; in ccp_crypto_cmd_complete()
130 list_del(&crypto_cmd->entry); in ccp_crypto_cmd_complete()
141 struct crypto_async_request *req = crypto_cmd->req; in ccp_crypto_complete()
142 struct ccp_ctx *ctx = crypto_tfm_ctx_dma(req->tfm); in ccp_crypto_complete()
145 if (err == -EINPROGRESS) { in ccp_crypto_complete()
146 /* Only propagate the -EINPROGRESS if necessary */ in ccp_crypto_complete()
147 if (crypto_cmd->ret == -EBUSY) { in ccp_crypto_complete()
148 crypto_cmd->ret = -EINPROGRESS; in ccp_crypto_complete()
149 crypto_request_complete(req, -EINPROGRESS); in ccp_crypto_complete()
155 /* Operation has completed - update the queue before invoking in ccp_crypto_complete()
161 backlog->ret = -EINPROGRESS; in ccp_crypto_complete()
162 crypto_request_complete(backlog->req, -EINPROGRESS); in ccp_crypto_complete()
165 /* Transition the state from -EBUSY to -EINPROGRESS first */ in ccp_crypto_complete()
166 if (crypto_cmd->ret == -EBUSY) in ccp_crypto_complete()
167 crypto_request_complete(req, -EINPROGRESS); in ccp_crypto_complete()
171 if (ctx->complete) in ccp_crypto_complete()
172 ret = ctx->complete(req, ret); in ccp_crypto_complete()
180 held->cmd->flags |= CCP_CMD_MAY_BACKLOG; in ccp_crypto_complete()
181 ret = ccp_enqueue_cmd(held->cmd); in ccp_crypto_complete()
186 ctx = crypto_tfm_ctx_dma(held->req->tfm); in ccp_crypto_complete()
187 if (ctx->complete) in ccp_crypto_complete()
188 ret = ctx->complete(held->req, ret); in ccp_crypto_complete()
189 crypto_request_complete(held->req, ret); in ccp_crypto_complete()
193 backlog->ret = -EINPROGRESS; in ccp_crypto_complete()
194 crypto_request_complete(backlog->req, -EINPROGRESS); in ccp_crypto_complete()
215 if (!(crypto_cmd->cmd->flags & CCP_CMD_MAY_BACKLOG)) { in ccp_crypto_enqueue_cmd()
216 ret = -ENOSPC; in ccp_crypto_enqueue_cmd()
226 if (crypto_cmd->tfm != tmp->tfm) in ccp_crypto_enqueue_cmd()
232 ret = -EINPROGRESS; in ccp_crypto_enqueue_cmd()
234 ret = ccp_enqueue_cmd(crypto_cmd->cmd); in ccp_crypto_enqueue_cmd()
240 ret = -EBUSY; in ccp_crypto_enqueue_cmd()
242 req_queue.backlog = &crypto_cmd->entry; in ccp_crypto_enqueue_cmd()
244 crypto_cmd->ret = ret; in ccp_crypto_enqueue_cmd()
247 list_add_tail(&crypto_cmd->entry, &req_queue.cmds); in ccp_crypto_enqueue_cmd()
261 * ccp_crypto_enqueue_request - queue an crypto async request for processing
273 gfp = req->flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL : GFP_ATOMIC; in ccp_crypto_enqueue_request()
277 return -ENOMEM; in ccp_crypto_enqueue_request()
284 crypto_cmd->cmd = cmd; in ccp_crypto_enqueue_request()
285 crypto_cmd->req = req; in ccp_crypto_enqueue_request()
286 crypto_cmd->tfm = req->tfm; in ccp_crypto_enqueue_request()
288 cmd->callback = ccp_crypto_complete; in ccp_crypto_enqueue_request()
289 cmd->data = crypto_cmd; in ccp_crypto_enqueue_request()
291 if (req->flags & CRYPTO_TFM_REQ_MAY_BACKLOG) in ccp_crypto_enqueue_request()
292 cmd->flags |= CCP_CMD_MAY_BACKLOG; in ccp_crypto_enqueue_request()
294 cmd->flags &= ~CCP_CMD_MAY_BACKLOG; in ccp_crypto_enqueue_request()
302 struct scatterlist *sg, *sg_last = NULL; in ccp_crypto_sg_table_add() local
304 for (sg = table->sgl; sg; sg = sg_next(sg)) in ccp_crypto_sg_table_add()
305 if (!sg_page(sg)) in ccp_crypto_sg_table_add()
307 if (WARN_ON(!sg)) in ccp_crypto_sg_table_add()
310 for (; sg && sg_add; sg = sg_next(sg), sg_add = sg_next(sg_add)) { in ccp_crypto_sg_table_add()
311 sg_set_page(sg, sg_page(sg_add), sg_add->length, in ccp_crypto_sg_table_add()
312 sg_add->offset); in ccp_crypto_sg_table_add()
313 sg_last = sg; in ccp_crypto_sg_table_add()
372 crypto_unregister_ahash(&ahash_alg->alg); in ccp_unregister_algs()
373 list_del(&ahash_alg->entry); in ccp_unregister_algs()
378 crypto_unregister_skcipher(&ablk_alg->alg); in ccp_unregister_algs()
379 list_del(&ablk_alg->entry); in ccp_unregister_algs()
384 crypto_unregister_aead(&aead_alg->alg); in ccp_unregister_algs()
385 list_del(&aead_alg->entry); in ccp_unregister_algs()
390 crypto_unregister_akcipher(&akc_alg->alg); in ccp_unregister_algs()
391 list_del(&akc_alg->entry); in ccp_unregister_algs()