Lines Matching +full:aes +full:- +full:cmac
1 // SPDX-License-Identifier: GPL-2.0-only
3 * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
11 #include <crypto/aes.h>
22 #include "aes-ce-setkey.h"
41 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS/XCTR using ARMv8 Crypto Extensions");
58 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS/XCTR using ARMv8 NEON");
61 MODULE_ALIAS_CRYPTO("ecb(aes)");
62 MODULE_ALIAS_CRYPTO("cbc(aes)");
63 MODULE_ALIAS_CRYPTO("ctr(aes)");
64 MODULE_ALIAS_CRYPTO("xts(aes)");
65 MODULE_ALIAS_CRYPTO("xctr(aes)");
67 MODULE_ALIAS_CRYPTO("cts(cbc(aes))");
68 MODULE_ALIAS_CRYPTO("essiv(cbc(aes),sha256)");
69 MODULE_ALIAS_CRYPTO("cmac(aes)");
70 MODULE_ALIAS_CRYPTO("xcbc(aes)");
71 MODULE_ALIAS_CRYPTO("cbcmac(aes)");
76 /* defined in aes-modes.S */
155 ret = aes_expandkey(&ctx->key1, in_key, key_len / 2); in xts_set_key()
157 ret = aes_expandkey(&ctx->key2, &in_key[key_len / 2], in xts_set_key()
170 ret = aes_expandkey(&ctx->key1, in_key, key_len); in essiv_cbc_set_key()
174 crypto_shash_tfm_digest(ctx->hash, in_key, key_len, digest); in essiv_cbc_set_key()
176 return aes_expandkey(&ctx->key2, digest, sizeof(digest)); in essiv_cbc_set_key()
183 int err, rounds = 6 + ctx->key_length / 4; in ecb_encrypt()
192 ctx->key_enc, rounds, blocks); in ecb_encrypt()
203 int err, rounds = 6 + ctx->key_length / 4; in ecb_decrypt()
212 ctx->key_dec, rounds, blocks); in ecb_decrypt()
224 int err = 0, rounds = 6 + ctx->key_length / 4; in cbc_encrypt_walk()
227 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk()
229 aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr, in cbc_encrypt_walk()
230 ctx->key_enc, rounds, blocks, walk->iv); in cbc_encrypt_walk()
232 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk()
253 int err = 0, rounds = 6 + ctx->key_length / 4; in cbc_decrypt_walk()
256 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk()
258 aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr, in cbc_decrypt_walk()
259 ctx->key_dec, rounds, blocks, walk->iv); in cbc_decrypt_walk()
261 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk()
281 int err, rounds = 6 + ctx->key_length / 4; in cts_cbc_encrypt()
282 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt()
283 struct scatterlist *src = req->src, *dst = req->dst; in cts_cbc_encrypt()
292 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt()
293 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt()
294 return -EINVAL; in cts_cbc_encrypt()
299 skcipher_request_set_crypt(&subreq, req->src, req->dst, in cts_cbc_encrypt()
301 req->iv); in cts_cbc_encrypt()
308 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt()
311 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_encrypt()
312 if (req->dst != req->src) in cts_cbc_encrypt()
313 dst = scatterwalk_ffwd(sg_dst, req->dst, in cts_cbc_encrypt()
319 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt()
320 req->iv); in cts_cbc_encrypt()
328 ctx->key_enc, rounds, walk.nbytes, walk.iv); in cts_cbc_encrypt()
338 int err, rounds = 6 + ctx->key_length / 4; in cts_cbc_decrypt()
339 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_decrypt()
340 struct scatterlist *src = req->src, *dst = req->dst; in cts_cbc_decrypt()
349 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt()
350 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt()
351 return -EINVAL; in cts_cbc_decrypt()
356 skcipher_request_set_crypt(&subreq, req->src, req->dst, in cts_cbc_decrypt()
358 req->iv); in cts_cbc_decrypt()
365 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_decrypt()
368 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_decrypt()
369 if (req->dst != req->src) in cts_cbc_decrypt()
370 dst = scatterwalk_ffwd(sg_dst, req->dst, in cts_cbc_decrypt()
376 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_decrypt()
377 req->iv); in cts_cbc_decrypt()
385 ctx->key_dec, rounds, walk.nbytes, walk.iv); in cts_cbc_decrypt()
395 ctx->hash = crypto_alloc_shash("sha256", 0, 0); in essiv_cbc_init_tfm()
397 return PTR_ERR_OR_ZERO(ctx->hash); in essiv_cbc_init_tfm()
404 crypto_free_shash(ctx->hash); in essiv_cbc_exit_tfm()
411 int err, rounds = 6 + ctx->key1.key_length / 4; in essiv_cbc_encrypt()
421 ctx->key1.key_enc, rounds, blocks, in essiv_cbc_encrypt()
422 req->iv, ctx->key2.key_enc); in essiv_cbc_encrypt()
433 int err, rounds = 6 + ctx->key1.key_length / 4; in essiv_cbc_decrypt()
443 ctx->key1.key_dec, rounds, blocks, in essiv_cbc_decrypt()
444 req->iv, ctx->key2.key_enc); in essiv_cbc_decrypt()
455 int err, rounds = 6 + ctx->key_length / 4; in xctr_encrypt()
476 src = dst = memcpy(buf + sizeof(buf) - nbytes, in xctr_encrypt()
479 nbytes &= ~(AES_BLOCK_SIZE - 1); in xctr_encrypt()
482 aes_xctr_encrypt(dst, src, ctx->key_enc, rounds, nbytes, in xctr_encrypt()
488 buf + sizeof(buf) - nbytes, nbytes); in xctr_encrypt()
491 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xctr_encrypt()
501 int err, rounds = 6 + ctx->key_length / 4; in ctr_encrypt()
521 src = dst = memcpy(buf + sizeof(buf) - nbytes, in ctr_encrypt()
524 nbytes &= ~(AES_BLOCK_SIZE - 1); in ctr_encrypt()
527 aes_ctr_encrypt(dst, src, ctx->key_enc, rounds, nbytes, in ctr_encrypt()
533 buf + sizeof(buf) - nbytes, nbytes); in ctr_encrypt()
535 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in ctr_encrypt()
545 int err, first, rounds = 6 + ctx->key1.key_length / 4; in xts_encrypt()
546 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_encrypt()
552 if (req->cryptlen < AES_BLOCK_SIZE) in xts_encrypt()
553 return -EINVAL; in xts_encrypt()
558 int xts_blocks = DIV_ROUND_UP(req->cryptlen, in xts_encrypt()
559 AES_BLOCK_SIZE) - 2; in xts_encrypt()
567 skcipher_request_set_crypt(&subreq, req->src, req->dst, in xts_encrypt()
569 req->iv); in xts_encrypt()
580 nbytes &= ~(AES_BLOCK_SIZE - 1); in xts_encrypt()
584 ctx->key1.key_enc, rounds, nbytes, in xts_encrypt()
585 ctx->key2.key_enc, walk.iv, first); in xts_encrypt()
587 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xts_encrypt()
593 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); in xts_encrypt()
594 if (req->dst != req->src) in xts_encrypt()
595 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in xts_encrypt()
598 req->iv); in xts_encrypt()
606 ctx->key1.key_enc, rounds, walk.nbytes, in xts_encrypt()
607 ctx->key2.key_enc, walk.iv, first); in xts_encrypt()
617 int err, first, rounds = 6 + ctx->key1.key_length / 4; in xts_decrypt()
618 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_decrypt()
624 if (req->cryptlen < AES_BLOCK_SIZE) in xts_decrypt()
625 return -EINVAL; in xts_decrypt()
630 int xts_blocks = DIV_ROUND_UP(req->cryptlen, in xts_decrypt()
631 AES_BLOCK_SIZE) - 2; in xts_decrypt()
639 skcipher_request_set_crypt(&subreq, req->src, req->dst, in xts_decrypt()
641 req->iv); in xts_decrypt()
652 nbytes &= ~(AES_BLOCK_SIZE - 1); in xts_decrypt()
656 ctx->key1.key_dec, rounds, nbytes, in xts_decrypt()
657 ctx->key2.key_enc, walk.iv, first); in xts_decrypt()
659 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in xts_decrypt()
665 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); in xts_decrypt()
666 if (req->dst != req->src) in xts_decrypt()
667 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in xts_decrypt()
670 req->iv); in xts_decrypt()
679 ctx->key1.key_dec, rounds, walk.nbytes, in xts_decrypt()
680 ctx->key2.key_enc, walk.iv, first); in xts_decrypt()
689 .cra_name = "ecb(aes)",
690 .cra_driver_name = "ecb-aes-" MODE,
703 .cra_name = "cbc(aes)",
704 .cra_driver_name = "cbc-aes-" MODE,
718 .cra_name = "ctr(aes)",
719 .cra_driver_name = "ctr-aes-" MODE,
734 .cra_name = "xctr(aes)",
735 .cra_driver_name = "xctr-aes-" MODE,
750 .cra_name = "xts(aes)",
751 .cra_driver_name = "xts-aes-" MODE,
767 .cra_name = "cts(cbc(aes))",
768 .cra_driver_name = "cts-cbc-aes-" MODE,
783 .cra_name = "essiv(cbc(aes),sha256)",
784 .cra_driver_name = "essiv-cbc-aes-sha256-" MODE,
805 return aes_expandkey(&ctx->key, in_key, key_len); in cbcmac_setkey()
810 u64 a = be64_to_cpu(x->a); in cmac_gf128_mul_by_x()
811 u64 b = be64_to_cpu(x->b); in cmac_gf128_mul_by_x()
813 y->a = cpu_to_be64((a << 1) | (b >> 63)); in cmac_gf128_mul_by_x()
814 y->b = cpu_to_be64((b << 1) ^ ((a >> 63) ? 0x87 : 0)); in cmac_gf128_mul_by_x()
821 be128 *consts = (be128 *)ctx->consts; in cmac_setkey()
831 aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, ctx->key.key_enc, in cmac_setkey()
845 { [0 ... AES_BLOCK_SIZE - 1] = 0x1 }, in xcbc_setkey()
846 { [0 ... AES_BLOCK_SIZE - 1] = 0x2 }, in xcbc_setkey()
847 { [0 ... AES_BLOCK_SIZE - 1] = 0x3 }, in xcbc_setkey()
860 aes_ecb_encrypt(key, ks[0], ctx->key.key_enc, rounds, 1); in xcbc_setkey()
861 aes_ecb_encrypt(ctx->consts, ks[1], ctx->key.key_enc, rounds, 2); in xcbc_setkey()
871 memset(ctx->dg, 0, AES_BLOCK_SIZE); in mac_init()
872 ctx->len = 0; in mac_init()
880 int rounds = 6 + ctx->key_length / 4; in mac_do_update()
887 rem = aes_mac_update(in, ctx->key_enc, rounds, blocks, in mac_do_update()
890 in += (blocks - rem) * AES_BLOCK_SIZE; in mac_do_update()
898 while (blocks--) { in mac_do_update()
910 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in mac_update()
916 if ((ctx->len % AES_BLOCK_SIZE) == 0 && in mac_update()
917 (ctx->len + len) > AES_BLOCK_SIZE) { in mac_update()
923 mac_do_update(&tctx->key, p, blocks, ctx->dg, in mac_update()
924 (ctx->len != 0), (len != 0)); in mac_update()
929 ctx->len = AES_BLOCK_SIZE; in mac_update()
932 ctx->len = 0; in mac_update()
935 l = min(len, AES_BLOCK_SIZE - ctx->len); in mac_update()
938 crypto_xor(ctx->dg + ctx->len, p, l); in mac_update()
939 ctx->len += l; in mac_update()
940 len -= l; in mac_update()
950 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in cbcmac_final()
953 mac_do_update(&tctx->key, NULL, 0, ctx->dg, (ctx->len != 0), 0); in cbcmac_final()
955 memcpy(out, ctx->dg, AES_BLOCK_SIZE); in cbcmac_final()
962 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); in cmac_final()
964 u8 *consts = tctx->consts; in cmac_final()
966 if (ctx->len != AES_BLOCK_SIZE) { in cmac_final()
967 ctx->dg[ctx->len] ^= 0x80; in cmac_final()
971 mac_do_update(&tctx->key, consts, 1, ctx->dg, 0, 1); in cmac_final()
973 memcpy(out, ctx->dg, AES_BLOCK_SIZE); in cmac_final()
979 .base.cra_name = "cmac(aes)",
980 .base.cra_driver_name = "cmac-aes-" MODE,
994 .base.cra_name = "xcbc(aes)",
995 .base.cra_driver_name = "xcbc-aes-" MODE,
1009 .base.cra_name = "cbcmac(aes)",
1010 .base.cra_driver_name = "cbcmac-aes-" MODE,
1050 module_cpu_feature_match(AES, aes_init);