Lines Matching refs:rctx

114 	struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);  in tegra_sha_fallback_init()  local
118 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_init()
119 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_init()
122 return crypto_ahash_init(&rctx->fallback_req); in tegra_sha_fallback_init()
127 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_fallback_update() local
131 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_update()
132 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_update()
134 rctx->fallback_req.nbytes = req->nbytes; in tegra_sha_fallback_update()
135 rctx->fallback_req.src = req->src; in tegra_sha_fallback_update()
137 return crypto_ahash_update(&rctx->fallback_req); in tegra_sha_fallback_update()
142 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_fallback_final() local
146 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_final()
147 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_final()
149 rctx->fallback_req.result = req->result; in tegra_sha_fallback_final()
151 return crypto_ahash_final(&rctx->fallback_req); in tegra_sha_fallback_final()
156 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_fallback_finup() local
160 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_finup()
161 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_finup()
164 rctx->fallback_req.nbytes = req->nbytes; in tegra_sha_fallback_finup()
165 rctx->fallback_req.src = req->src; in tegra_sha_fallback_finup()
166 rctx->fallback_req.result = req->result; in tegra_sha_fallback_finup()
168 return crypto_ahash_finup(&rctx->fallback_req); in tegra_sha_fallback_finup()
173 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_fallback_digest() local
177 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_digest()
178 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_digest()
181 rctx->fallback_req.nbytes = req->nbytes; in tegra_sha_fallback_digest()
182 rctx->fallback_req.src = req->src; in tegra_sha_fallback_digest()
183 rctx->fallback_req.result = req->result; in tegra_sha_fallback_digest()
185 return crypto_ahash_digest(&rctx->fallback_req); in tegra_sha_fallback_digest()
190 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_fallback_import() local
194 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_import()
195 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_import()
198 return crypto_ahash_import(&rctx->fallback_req, in); in tegra_sha_fallback_import()
203 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_fallback_export() local
207 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in tegra_sha_fallback_export()
208 rctx->fallback_req.base.flags = req->base.flags & in tegra_sha_fallback_export()
211 return crypto_ahash_export(&rctx->fallback_req, out); in tegra_sha_fallback_export()
215 struct tegra_sha_reqctx *rctx) in tegra_sha_prep_cmd() argument
220 msg_len = rctx->total_len * 8; in tegra_sha_prep_cmd()
221 msg_left = rctx->datbuf.size * 8; in tegra_sha_prep_cmd()
229 if (rctx->task & SHA_UPDATE) { in tegra_sha_prep_cmd()
246 cpuvaddr[i++] = rctx->config; in tegra_sha_prep_cmd()
248 if (rctx->task & SHA_FIRST) { in tegra_sha_prep_cmd()
250 rctx->task &= ~SHA_FIRST; in tegra_sha_prep_cmd()
255 cpuvaddr[i++] = rctx->datbuf.addr; in tegra_sha_prep_cmd()
256 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->datbuf.addr)) | in tegra_sha_prep_cmd()
257 SE_ADDR_HI_SZ(rctx->datbuf.size)); in tegra_sha_prep_cmd()
258 cpuvaddr[i++] = rctx->digest.addr; in tegra_sha_prep_cmd()
259 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->digest.addr)) | in tegra_sha_prep_cmd()
260 SE_ADDR_HI_SZ(rctx->digest.size)); in tegra_sha_prep_cmd()
261 if (rctx->key_id) { in tegra_sha_prep_cmd()
264 cpuvaddr[i++] = SE_AES_KEY_INDEX(rctx->key_id); in tegra_sha_prep_cmd()
277 msg_len, msg_left, rctx->config); in tegra_sha_prep_cmd()
282 static void tegra_sha_copy_hash_result(struct tegra_se *se, struct tegra_sha_reqctx *rctx) in tegra_sha_copy_hash_result() argument
287 rctx->result[i] = readl(se->base + se->hw->regs->result + (i * 4)); in tegra_sha_copy_hash_result()
290 static void tegra_sha_paste_hash_result(struct tegra_se *se, struct tegra_sha_reqctx *rctx) in tegra_sha_paste_hash_result() argument
295 writel(rctx->result[i], in tegra_sha_paste_hash_result()
302 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_do_update() local
306 nresidue = (req->nbytes + rctx->residue.size) % rctx->blk_size; in tegra_sha_do_update()
307 nblks = (req->nbytes + rctx->residue.size) / rctx->blk_size; in tegra_sha_do_update()
314 nresidue = rctx->blk_size; in tegra_sha_do_update()
318 rctx->src_sg = req->src; in tegra_sha_do_update()
319 rctx->datbuf.size = (req->nbytes + rctx->residue.size) - nresidue; in tegra_sha_do_update()
320 rctx->total_len += rctx->datbuf.size; in tegra_sha_do_update()
327 scatterwalk_map_and_copy(rctx->residue.buf + rctx->residue.size, in tegra_sha_do_update()
328 rctx->src_sg, 0, req->nbytes, 0); in tegra_sha_do_update()
330 rctx->residue.size += req->nbytes; in tegra_sha_do_update()
335 if (rctx->residue.size) in tegra_sha_do_update()
336 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size); in tegra_sha_do_update()
338 scatterwalk_map_and_copy(rctx->datbuf.buf + rctx->residue.size, in tegra_sha_do_update()
339 rctx->src_sg, 0, req->nbytes - nresidue, 0); in tegra_sha_do_update()
341 scatterwalk_map_and_copy(rctx->residue.buf, rctx->src_sg, in tegra_sha_do_update()
345 rctx->residue.size = nresidue; in tegra_sha_do_update()
347 rctx->config = tegra_sha_get_config(rctx->alg) | in tegra_sha_do_update()
355 if (!(rctx->task & SHA_FIRST)) in tegra_sha_do_update()
356 tegra_sha_paste_hash_result(ctx->se, rctx); in tegra_sha_do_update()
358 size = tegra_sha_prep_cmd(ctx->se, cpuvaddr, rctx); in tegra_sha_do_update()
367 if (!(rctx->task & SHA_FINAL)) in tegra_sha_do_update()
368 tegra_sha_copy_hash_result(ctx->se, rctx); in tegra_sha_do_update()
375 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_do_final() local
382 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size); in tegra_sha_do_final()
383 rctx->datbuf.size = rctx->residue.size; in tegra_sha_do_final()
384 rctx->total_len += rctx->residue.size; in tegra_sha_do_final()
386 rctx->config = tegra_sha_get_config(rctx->alg) | in tegra_sha_do_final()
389 size = tegra_sha_prep_cmd(se, cpuvaddr, rctx); in tegra_sha_do_final()
396 memcpy(req->result, rctx->digest.buf, rctx->digest.size); in tegra_sha_do_final()
400 rctx->datbuf.buf, rctx->datbuf.addr); in tegra_sha_do_final()
402 rctx->residue.buf, rctx->residue.addr); in tegra_sha_do_final()
403 dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf, in tegra_sha_do_final()
404 rctx->digest.addr); in tegra_sha_do_final()
411 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_do_one_req() local
417 if (rctx->task & SHA_UPDATE) { in tegra_sha_do_one_req()
419 rctx->task &= ~SHA_UPDATE; in tegra_sha_do_one_req()
422 if (rctx->task & SHA_FINAL) { in tegra_sha_do_one_req()
424 rctx->task &= ~SHA_FINAL; in tegra_sha_do_one_req()
502 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_init() local
510 rctx->total_len = 0; in tegra_sha_init()
511 rctx->datbuf.size = 0; in tegra_sha_init()
512 rctx->residue.size = 0; in tegra_sha_init()
513 rctx->key_id = ctx->key_id; in tegra_sha_init()
514 rctx->task = SHA_FIRST; in tegra_sha_init()
515 rctx->alg = ctx->alg; in tegra_sha_init()
516 rctx->blk_size = crypto_ahash_blocksize(tfm); in tegra_sha_init()
517 rctx->digest.size = crypto_ahash_digestsize(tfm); in tegra_sha_init()
519 rctx->digest.buf = dma_alloc_coherent(se->dev, rctx->digest.size, in tegra_sha_init()
520 &rctx->digest.addr, GFP_KERNEL); in tegra_sha_init()
521 if (!rctx->digest.buf) in tegra_sha_init()
524 rctx->residue.buf = dma_alloc_coherent(se->dev, rctx->blk_size, in tegra_sha_init()
525 &rctx->residue.addr, GFP_KERNEL); in tegra_sha_init()
526 if (!rctx->residue.buf) in tegra_sha_init()
529 rctx->datbuf.buf = dma_alloc_coherent(se->dev, SE_SHA_BUFLEN, in tegra_sha_init()
530 &rctx->datbuf.addr, GFP_KERNEL); in tegra_sha_init()
531 if (!rctx->datbuf.buf) in tegra_sha_init()
537 dma_free_coherent(se->dev, rctx->blk_size, rctx->residue.buf, in tegra_sha_init()
538 rctx->residue.addr); in tegra_sha_init()
540 dma_free_coherent(se->dev, SE_SHA_BUFLEN, rctx->datbuf.buf, in tegra_sha_init()
541 rctx->datbuf.addr); in tegra_sha_init()
573 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_update() local
580 rctx->task |= SHA_UPDATE; in tegra_sha_update()
587 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_final() local
594 rctx->task |= SHA_FINAL; in tegra_sha_final()
601 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_finup() local
608 rctx->task |= SHA_UPDATE | SHA_FINAL; in tegra_sha_finup()
615 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_digest() local
623 rctx->task |= SHA_UPDATE | SHA_FINAL; in tegra_sha_digest()
630 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_export() local
637 memcpy(out, rctx, sizeof(*rctx)); in tegra_sha_export()
644 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req); in tegra_sha_import() local
651 memcpy(rctx, in, sizeof(*rctx)); in tegra_sha_import()