Lines Matching +full:sg +full:- +full:micro
1 // SPDX-License-Identifier: GPL-2.0-only
5 * Copyright (C) 2013,2018 Advanced Micro Devices, Inc.
25 #include "ccp-crypto.h"
37 if (rctx->hash_rem) { in ccp_sha_complete()
39 unsigned int offset = rctx->nbytes - rctx->hash_rem; in ccp_sha_complete()
41 scatterwalk_map_and_copy(rctx->buf, rctx->src, in ccp_sha_complete()
42 offset, rctx->hash_rem, 0); in ccp_sha_complete()
43 rctx->buf_count = rctx->hash_rem; in ccp_sha_complete()
45 rctx->buf_count = 0; in ccp_sha_complete()
49 if (req->result && rctx->final) in ccp_sha_complete()
50 memcpy(req->result, rctx->ctx, digest_size); in ccp_sha_complete()
53 sg_free_table(&rctx->data_sg); in ccp_sha_complete()
64 struct scatterlist *sg; in ccp_do_sha_update() local
72 len = (u64)rctx->buf_count + (u64)nbytes; in ccp_do_sha_update()
75 scatterwalk_map_and_copy(rctx->buf + rctx->buf_count, req->src, in ccp_do_sha_update()
77 rctx->buf_count += nbytes; in ccp_do_sha_update()
82 rctx->src = req->src; in ccp_do_sha_update()
83 rctx->nbytes = nbytes; in ccp_do_sha_update()
85 rctx->final = final; in ccp_do_sha_update()
86 rctx->hash_rem = final ? 0 : len & (block_size - 1); in ccp_do_sha_update()
87 rctx->hash_cnt = len - rctx->hash_rem; in ccp_do_sha_update()
88 if (!final && !rctx->hash_rem) { in ccp_do_sha_update()
90 rctx->hash_cnt -= block_size; in ccp_do_sha_update()
91 rctx->hash_rem = block_size; in ccp_do_sha_update()
95 sg_init_one(&rctx->ctx_sg, rctx->ctx, sizeof(rctx->ctx)); in ccp_do_sha_update()
97 sg = NULL; in ccp_do_sha_update()
98 if (rctx->buf_count && nbytes) { in ccp_do_sha_update()
99 /* Build the data scatterlist table - allocate enough entries in ccp_do_sha_update()
102 gfp = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? in ccp_do_sha_update()
104 sg_count = sg_nents(req->src) + 1; in ccp_do_sha_update()
105 ret = sg_alloc_table(&rctx->data_sg, sg_count, gfp); in ccp_do_sha_update()
109 sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); in ccp_do_sha_update()
110 sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->buf_sg); in ccp_do_sha_update()
111 if (!sg) { in ccp_do_sha_update()
112 ret = -EINVAL; in ccp_do_sha_update()
115 sg = ccp_crypto_sg_table_add(&rctx->data_sg, req->src); in ccp_do_sha_update()
116 if (!sg) { in ccp_do_sha_update()
117 ret = -EINVAL; in ccp_do_sha_update()
120 sg_mark_end(sg); in ccp_do_sha_update()
122 sg = rctx->data_sg.sgl; in ccp_do_sha_update()
123 } else if (rctx->buf_count) { in ccp_do_sha_update()
124 sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); in ccp_do_sha_update()
126 sg = &rctx->buf_sg; in ccp_do_sha_update()
128 sg = req->src; in ccp_do_sha_update()
131 rctx->msg_bits += (rctx->hash_cnt << 3); /* Total in bits */ in ccp_do_sha_update()
133 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_do_sha_update()
134 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_do_sha_update()
135 rctx->cmd.engine = CCP_ENGINE_SHA; in ccp_do_sha_update()
136 rctx->cmd.u.sha.type = rctx->type; in ccp_do_sha_update()
137 rctx->cmd.u.sha.ctx = &rctx->ctx_sg; in ccp_do_sha_update()
139 switch (rctx->type) { in ccp_do_sha_update()
141 rctx->cmd.u.sha.ctx_len = SHA1_DIGEST_SIZE; in ccp_do_sha_update()
144 rctx->cmd.u.sha.ctx_len = SHA224_DIGEST_SIZE; in ccp_do_sha_update()
147 rctx->cmd.u.sha.ctx_len = SHA256_DIGEST_SIZE; in ccp_do_sha_update()
150 rctx->cmd.u.sha.ctx_len = SHA384_DIGEST_SIZE; in ccp_do_sha_update()
153 rctx->cmd.u.sha.ctx_len = SHA512_DIGEST_SIZE; in ccp_do_sha_update()
160 rctx->cmd.u.sha.src = sg; in ccp_do_sha_update()
161 rctx->cmd.u.sha.src_len = rctx->hash_cnt; in ccp_do_sha_update()
162 rctx->cmd.u.sha.opad = ctx->u.sha.key_len ? in ccp_do_sha_update()
163 &ctx->u.sha.opad_sg : NULL; in ccp_do_sha_update()
164 rctx->cmd.u.sha.opad_len = ctx->u.sha.key_len ? in ccp_do_sha_update()
165 ctx->u.sha.opad_count : 0; in ccp_do_sha_update()
166 rctx->cmd.u.sha.first = rctx->first; in ccp_do_sha_update()
167 rctx->cmd.u.sha.final = rctx->final; in ccp_do_sha_update()
168 rctx->cmd.u.sha.msg_bits = rctx->msg_bits; in ccp_do_sha_update()
170 rctx->first = 0; in ccp_do_sha_update()
172 ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); in ccp_do_sha_update()
177 sg_free_table(&rctx->data_sg); in ccp_do_sha_update()
194 rctx->type = alg->type; in ccp_sha_init()
195 rctx->first = 1; in ccp_sha_init()
197 if (ctx->u.sha.key_len) { in ccp_sha_init()
199 memcpy(rctx->buf, ctx->u.sha.ipad, block_size); in ccp_sha_init()
200 rctx->buf_count = block_size; in ccp_sha_init()
208 return ccp_do_sha_update(req, req->nbytes, 0); in ccp_sha_update()
218 return ccp_do_sha_update(req, req->nbytes, 1); in ccp_sha_finup()
240 state.type = rctx->type; in ccp_sha_export()
241 state.msg_bits = rctx->msg_bits; in ccp_sha_export()
242 state.first = rctx->first; in ccp_sha_export()
243 memcpy(state.ctx, rctx->ctx, sizeof(state.ctx)); in ccp_sha_export()
244 state.buf_count = rctx->buf_count; in ccp_sha_export()
245 memcpy(state.buf, rctx->buf, sizeof(state.buf)); in ccp_sha_export()
262 rctx->type = state.type; in ccp_sha_import()
263 rctx->msg_bits = state.msg_bits; in ccp_sha_import()
264 rctx->first = state.first; in ccp_sha_import()
265 memcpy(rctx->ctx, state.ctx, sizeof(rctx->ctx)); in ccp_sha_import()
266 rctx->buf_count = state.buf_count; in ccp_sha_import()
267 memcpy(rctx->buf, state.buf, sizeof(rctx->buf)); in ccp_sha_import()
276 struct crypto_shash *shash = ctx->u.sha.hmac_tfm; in ccp_sha_setkey()
282 ctx->u.sha.key_len = 0; in ccp_sha_setkey()
287 memset(ctx->u.sha.key, 0, sizeof(ctx->u.sha.key)); in ccp_sha_setkey()
292 ctx->u.sha.key); in ccp_sha_setkey()
294 return -EINVAL; in ccp_sha_setkey()
298 memcpy(ctx->u.sha.key, key, key_len); in ccp_sha_setkey()
302 ctx->u.sha.ipad[i] = ctx->u.sha.key[i] ^ HMAC_IPAD_VALUE; in ccp_sha_setkey()
303 ctx->u.sha.opad[i] = ctx->u.sha.key[i] ^ HMAC_OPAD_VALUE; in ccp_sha_setkey()
306 sg_init_one(&ctx->u.sha.opad_sg, ctx->u.sha.opad, block_size); in ccp_sha_setkey()
307 ctx->u.sha.opad_count = block_size; in ccp_sha_setkey()
309 ctx->u.sha.key_len = key_len; in ccp_sha_setkey()
319 ctx->complete = ccp_sha_complete; in ccp_sha_cra_init()
320 ctx->u.sha.key_len = 0; in ccp_sha_cra_init()
337 hmac_tfm = crypto_alloc_shash(alg->child_alg, 0, 0); in ccp_hmac_sha_cra_init()
340 alg->child_alg); in ccp_hmac_sha_cra_init()
344 ctx->u.sha.hmac_tfm = hmac_tfm; in ccp_hmac_sha_cra_init()
353 if (ctx->u.sha.hmac_tfm) in ccp_hmac_sha_cra_exit()
354 crypto_free_shash(ctx->u.sha.hmac_tfm); in ccp_hmac_sha_cra_exit()
372 .drv_name = "sha1-ccp",
380 .drv_name = "sha224-ccp",
388 .drv_name = "sha256-ccp",
396 .drv_name = "sha384-ccp",
404 .drv_name = "sha512-ccp",
423 return -ENOMEM; in ccp_register_hmac_alg()
427 INIT_LIST_HEAD(&ccp_alg->entry); in ccp_register_hmac_alg()
429 strscpy(ccp_alg->child_alg, def->name, CRYPTO_MAX_ALG_NAME); in ccp_register_hmac_alg()
431 alg = &ccp_alg->alg; in ccp_register_hmac_alg()
432 alg->setkey = ccp_sha_setkey; in ccp_register_hmac_alg()
434 halg = &alg->halg; in ccp_register_hmac_alg()
436 base = &halg->base; in ccp_register_hmac_alg()
437 snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME, "hmac(%s)", def->name); in ccp_register_hmac_alg()
438 snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME, "hmac-%s", in ccp_register_hmac_alg()
439 def->drv_name); in ccp_register_hmac_alg()
440 base->cra_init = ccp_hmac_sha_cra_init; in ccp_register_hmac_alg()
441 base->cra_exit = ccp_hmac_sha_cra_exit; in ccp_register_hmac_alg()
446 base->cra_name, ret); in ccp_register_hmac_alg()
451 list_add(&ccp_alg->entry, head); in ccp_register_hmac_alg()
467 return -ENOMEM; in ccp_register_sha_alg()
469 INIT_LIST_HEAD(&ccp_alg->entry); in ccp_register_sha_alg()
471 ccp_alg->type = def->type; in ccp_register_sha_alg()
473 alg = &ccp_alg->alg; in ccp_register_sha_alg()
474 alg->init = ccp_sha_init; in ccp_register_sha_alg()
475 alg->update = ccp_sha_update; in ccp_register_sha_alg()
476 alg->final = ccp_sha_final; in ccp_register_sha_alg()
477 alg->finup = ccp_sha_finup; in ccp_register_sha_alg()
478 alg->digest = ccp_sha_digest; in ccp_register_sha_alg()
479 alg->export = ccp_sha_export; in ccp_register_sha_alg()
480 alg->import = ccp_sha_import; in ccp_register_sha_alg()
482 halg = &alg->halg; in ccp_register_sha_alg()
483 halg->digestsize = def->digest_size; in ccp_register_sha_alg()
484 halg->statesize = sizeof(struct ccp_sha_exp_ctx); in ccp_register_sha_alg()
486 base = &halg->base; in ccp_register_sha_alg()
487 snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME, "%s", def->name); in ccp_register_sha_alg()
488 snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", in ccp_register_sha_alg()
489 def->drv_name); in ccp_register_sha_alg()
490 base->cra_flags = CRYPTO_ALG_ASYNC | in ccp_register_sha_alg()
494 base->cra_blocksize = def->block_size; in ccp_register_sha_alg()
495 base->cra_ctxsize = sizeof(struct ccp_ctx) + crypto_dma_padding(); in ccp_register_sha_alg()
496 base->cra_priority = CCP_CRA_PRIORITY; in ccp_register_sha_alg()
497 base->cra_init = ccp_sha_cra_init; in ccp_register_sha_alg()
498 base->cra_exit = ccp_sha_cra_exit; in ccp_register_sha_alg()
499 base->cra_module = THIS_MODULE; in ccp_register_sha_alg()
504 base->cra_name, ret); in ccp_register_sha_alg()
509 list_add(&ccp_alg->entry, head); in ccp_register_sha_alg()