1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Bit sliced AES using NEON instructions
4  *
5  * Copyright (C) 2016 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6  */
7 
8 #include <asm/neon.h>
9 #include <asm/simd.h>
10 #include <crypto/aes.h>
11 #include <crypto/ctr.h>
12 #include <crypto/internal/simd.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/scatterwalk.h>
15 #include <crypto/xts.h>
16 #include <linux/module.h>
17 
18 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
19 MODULE_DESCRIPTION("Bit sliced AES using NEON instructions");
20 MODULE_LICENSE("GPL v2");
21 
22 MODULE_ALIAS_CRYPTO("ecb(aes)");
23 MODULE_ALIAS_CRYPTO("cbc(aes)");
24 MODULE_ALIAS_CRYPTO("ctr(aes)");
25 MODULE_ALIAS_CRYPTO("xts(aes)");
26 
27 asmlinkage void aesbs_convert_key(u8 out[], u32 const rk[], int rounds);
28 
29 asmlinkage void aesbs_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
30 				  int rounds, int blocks);
31 asmlinkage void aesbs_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
32 				  int rounds, int blocks);
33 
34 asmlinkage void aesbs_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
35 				  int rounds, int blocks, u8 iv[]);
36 
37 asmlinkage void aesbs_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
38 				  int rounds, int blocks, u8 iv[]);
39 
40 asmlinkage void aesbs_xts_encrypt(u8 out[], u8 const in[], u8 const rk[],
41 				  int rounds, int blocks, u8 iv[]);
42 asmlinkage void aesbs_xts_decrypt(u8 out[], u8 const in[], u8 const rk[],
43 				  int rounds, int blocks, u8 iv[]);
44 
45 /* borrowed from aes-neon-blk.ko */
46 asmlinkage void neon_aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],
47 				     int rounds, int blocks);
48 asmlinkage void neon_aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[],
49 				     int rounds, int blocks, u8 iv[]);
50 asmlinkage void neon_aes_ctr_encrypt(u8 out[], u8 const in[], u32 const rk[],
51 				     int rounds, int bytes, u8 ctr[]);
52 asmlinkage void neon_aes_xts_encrypt(u8 out[], u8 const in[],
53 				     u32 const rk1[], int rounds, int bytes,
54 				     u32 const rk2[], u8 iv[], int first);
55 asmlinkage void neon_aes_xts_decrypt(u8 out[], u8 const in[],
56 				     u32 const rk1[], int rounds, int bytes,
57 				     u32 const rk2[], u8 iv[], int first);
58 
59 struct aesbs_ctx {
60 	u8	rk[13 * (8 * AES_BLOCK_SIZE) + 32];
61 	int	rounds;
62 } __aligned(AES_BLOCK_SIZE);
63 
64 struct aesbs_cbc_ctr_ctx {
65 	struct aesbs_ctx	key;
66 	u32			enc[AES_MAX_KEYLENGTH_U32];
67 };
68 
69 struct aesbs_xts_ctx {
70 	struct aesbs_ctx	key;
71 	u32			twkey[AES_MAX_KEYLENGTH_U32];
72 	struct crypto_aes_ctx	cts;
73 };
74 
aesbs_setkey(struct crypto_skcipher * tfm,const u8 * in_key,unsigned int key_len)75 static int aesbs_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
76 			unsigned int key_len)
77 {
78 	struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
79 	struct crypto_aes_ctx rk;
80 	int err;
81 
82 	err = aes_expandkey(&rk, in_key, key_len);
83 	if (err)
84 		return err;
85 
86 	ctx->rounds = 6 + key_len / 4;
87 
88 	kernel_neon_begin();
89 	aesbs_convert_key(ctx->rk, rk.key_enc, ctx->rounds);
90 	kernel_neon_end();
91 
92 	return 0;
93 }
94 
__ecb_crypt(struct skcipher_request * req,void (* fn)(u8 out[],u8 const in[],u8 const rk[],int rounds,int blocks))95 static int __ecb_crypt(struct skcipher_request *req,
96 		       void (*fn)(u8 out[], u8 const in[], u8 const rk[],
97 				  int rounds, int blocks))
98 {
99 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
100 	struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
101 	struct skcipher_walk walk;
102 	int err;
103 
104 	err = skcipher_walk_virt(&walk, req, false);
105 
106 	while (walk.nbytes >= AES_BLOCK_SIZE) {
107 		unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
108 
109 		if (walk.nbytes < walk.total)
110 			blocks = round_down(blocks,
111 					    walk.stride / AES_BLOCK_SIZE);
112 
113 		kernel_neon_begin();
114 		fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk,
115 		   ctx->rounds, blocks);
116 		kernel_neon_end();
117 		err = skcipher_walk_done(&walk,
118 					 walk.nbytes - blocks * AES_BLOCK_SIZE);
119 	}
120 
121 	return err;
122 }
123 
ecb_encrypt(struct skcipher_request * req)124 static int ecb_encrypt(struct skcipher_request *req)
125 {
126 	return __ecb_crypt(req, aesbs_ecb_encrypt);
127 }
128 
ecb_decrypt(struct skcipher_request * req)129 static int ecb_decrypt(struct skcipher_request *req)
130 {
131 	return __ecb_crypt(req, aesbs_ecb_decrypt);
132 }
133 
aesbs_cbc_ctr_setkey(struct crypto_skcipher * tfm,const u8 * in_key,unsigned int key_len)134 static int aesbs_cbc_ctr_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
135 			    unsigned int key_len)
136 {
137 	struct aesbs_cbc_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
138 	struct crypto_aes_ctx rk;
139 	int err;
140 
141 	err = aes_expandkey(&rk, in_key, key_len);
142 	if (err)
143 		return err;
144 
145 	ctx->key.rounds = 6 + key_len / 4;
146 
147 	memcpy(ctx->enc, rk.key_enc, sizeof(ctx->enc));
148 
149 	kernel_neon_begin();
150 	aesbs_convert_key(ctx->key.rk, rk.key_enc, ctx->key.rounds);
151 	kernel_neon_end();
152 	memzero_explicit(&rk, sizeof(rk));
153 
154 	return 0;
155 }
156 
cbc_encrypt(struct skcipher_request * req)157 static int cbc_encrypt(struct skcipher_request *req)
158 {
159 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
160 	struct aesbs_cbc_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
161 	struct skcipher_walk walk;
162 	int err;
163 
164 	err = skcipher_walk_virt(&walk, req, false);
165 
166 	while (walk.nbytes >= AES_BLOCK_SIZE) {
167 		unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
168 
169 		/* fall back to the non-bitsliced NEON implementation */
170 		kernel_neon_begin();
171 		neon_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
172 				     ctx->enc, ctx->key.rounds, blocks,
173 				     walk.iv);
174 		kernel_neon_end();
175 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
176 	}
177 	return err;
178 }
179 
cbc_decrypt(struct skcipher_request * req)180 static int cbc_decrypt(struct skcipher_request *req)
181 {
182 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
183 	struct aesbs_cbc_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
184 	struct skcipher_walk walk;
185 	int err;
186 
187 	err = skcipher_walk_virt(&walk, req, false);
188 
189 	while (walk.nbytes >= AES_BLOCK_SIZE) {
190 		unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
191 
192 		if (walk.nbytes < walk.total)
193 			blocks = round_down(blocks,
194 					    walk.stride / AES_BLOCK_SIZE);
195 
196 		kernel_neon_begin();
197 		aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
198 				  ctx->key.rk, ctx->key.rounds, blocks,
199 				  walk.iv);
200 		kernel_neon_end();
201 		err = skcipher_walk_done(&walk,
202 					 walk.nbytes - blocks * AES_BLOCK_SIZE);
203 	}
204 
205 	return err;
206 }
207 
ctr_encrypt(struct skcipher_request * req)208 static int ctr_encrypt(struct skcipher_request *req)
209 {
210 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
211 	struct aesbs_cbc_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
212 	struct skcipher_walk walk;
213 	int err;
214 
215 	err = skcipher_walk_virt(&walk, req, false);
216 
217 	while (walk.nbytes > 0) {
218 		int blocks = (walk.nbytes / AES_BLOCK_SIZE) & ~7;
219 		int nbytes = walk.nbytes % (8 * AES_BLOCK_SIZE);
220 		const u8 *src = walk.src.virt.addr;
221 		u8 *dst = walk.dst.virt.addr;
222 
223 		kernel_neon_begin();
224 		if (blocks >= 8) {
225 			aesbs_ctr_encrypt(dst, src, ctx->key.rk, ctx->key.rounds,
226 					  blocks, walk.iv);
227 			dst += blocks * AES_BLOCK_SIZE;
228 			src += blocks * AES_BLOCK_SIZE;
229 		}
230 		if (nbytes && walk.nbytes == walk.total) {
231 			u8 buf[AES_BLOCK_SIZE];
232 			u8 *d = dst;
233 
234 			if (unlikely(nbytes < AES_BLOCK_SIZE))
235 				src = dst = memcpy(buf + sizeof(buf) - nbytes,
236 						   src, nbytes);
237 
238 			neon_aes_ctr_encrypt(dst, src, ctx->enc, ctx->key.rounds,
239 					     nbytes, walk.iv);
240 
241 			if (unlikely(nbytes < AES_BLOCK_SIZE))
242 				memcpy(d, dst, nbytes);
243 
244 			nbytes = 0;
245 		}
246 		kernel_neon_end();
247 		err = skcipher_walk_done(&walk, nbytes);
248 	}
249 	return err;
250 }
251 
aesbs_xts_setkey(struct crypto_skcipher * tfm,const u8 * in_key,unsigned int key_len)252 static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
253 			    unsigned int key_len)
254 {
255 	struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
256 	struct crypto_aes_ctx rk;
257 	int err;
258 
259 	err = xts_verify_key(tfm, in_key, key_len);
260 	if (err)
261 		return err;
262 
263 	key_len /= 2;
264 	err = aes_expandkey(&ctx->cts, in_key, key_len);
265 	if (err)
266 		return err;
267 
268 	err = aes_expandkey(&rk, in_key + key_len, key_len);
269 	if (err)
270 		return err;
271 
272 	memcpy(ctx->twkey, rk.key_enc, sizeof(ctx->twkey));
273 
274 	return aesbs_setkey(tfm, in_key, key_len);
275 }
276 
__xts_crypt(struct skcipher_request * req,bool encrypt,void (* fn)(u8 out[],u8 const in[],u8 const rk[],int rounds,int blocks,u8 iv[]))277 static int __xts_crypt(struct skcipher_request *req, bool encrypt,
278 		       void (*fn)(u8 out[], u8 const in[], u8 const rk[],
279 				  int rounds, int blocks, u8 iv[]))
280 {
281 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
282 	struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
283 	int tail = req->cryptlen % (8 * AES_BLOCK_SIZE);
284 	struct scatterlist sg_src[2], sg_dst[2];
285 	struct skcipher_request subreq;
286 	struct scatterlist *src, *dst;
287 	struct skcipher_walk walk;
288 	int nbytes, err;
289 	int first = 1;
290 	u8 *out, *in;
291 
292 	if (req->cryptlen < AES_BLOCK_SIZE)
293 		return -EINVAL;
294 
295 	/* ensure that the cts tail is covered by a single step */
296 	if (unlikely(tail > 0 && tail < AES_BLOCK_SIZE)) {
297 		int xts_blocks = DIV_ROUND_UP(req->cryptlen,
298 					      AES_BLOCK_SIZE) - 2;
299 
300 		skcipher_request_set_tfm(&subreq, tfm);
301 		skcipher_request_set_callback(&subreq,
302 					      skcipher_request_flags(req),
303 					      NULL, NULL);
304 		skcipher_request_set_crypt(&subreq, req->src, req->dst,
305 					   xts_blocks * AES_BLOCK_SIZE,
306 					   req->iv);
307 		req = &subreq;
308 	} else {
309 		tail = 0;
310 	}
311 
312 	err = skcipher_walk_virt(&walk, req, false);
313 	if (err)
314 		return err;
315 
316 	while (walk.nbytes >= AES_BLOCK_SIZE) {
317 		int blocks = (walk.nbytes / AES_BLOCK_SIZE) & ~7;
318 		out = walk.dst.virt.addr;
319 		in = walk.src.virt.addr;
320 		nbytes = walk.nbytes;
321 
322 		kernel_neon_begin();
323 		if (blocks >= 8) {
324 			if (first == 1)
325 				neon_aes_ecb_encrypt(walk.iv, walk.iv,
326 						     ctx->twkey,
327 						     ctx->key.rounds, 1);
328 			first = 2;
329 
330 			fn(out, in, ctx->key.rk, ctx->key.rounds, blocks,
331 			   walk.iv);
332 
333 			out += blocks * AES_BLOCK_SIZE;
334 			in += blocks * AES_BLOCK_SIZE;
335 			nbytes -= blocks * AES_BLOCK_SIZE;
336 		}
337 		if (walk.nbytes == walk.total && nbytes > 0) {
338 			if (encrypt)
339 				neon_aes_xts_encrypt(out, in, ctx->cts.key_enc,
340 						     ctx->key.rounds, nbytes,
341 						     ctx->twkey, walk.iv, first);
342 			else
343 				neon_aes_xts_decrypt(out, in, ctx->cts.key_dec,
344 						     ctx->key.rounds, nbytes,
345 						     ctx->twkey, walk.iv, first);
346 			nbytes = first = 0;
347 		}
348 		kernel_neon_end();
349 		err = skcipher_walk_done(&walk, nbytes);
350 	}
351 
352 	if (err || likely(!tail))
353 		return err;
354 
355 	/* handle ciphertext stealing */
356 	dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
357 	if (req->dst != req->src)
358 		dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
359 
360 	skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
361 				   req->iv);
362 
363 	err = skcipher_walk_virt(&walk, req, false);
364 	if (err)
365 		return err;
366 
367 	out = walk.dst.virt.addr;
368 	in = walk.src.virt.addr;
369 	nbytes = walk.nbytes;
370 
371 	kernel_neon_begin();
372 	if (encrypt)
373 		neon_aes_xts_encrypt(out, in, ctx->cts.key_enc, ctx->key.rounds,
374 				     nbytes, ctx->twkey, walk.iv, first);
375 	else
376 		neon_aes_xts_decrypt(out, in, ctx->cts.key_dec, ctx->key.rounds,
377 				     nbytes, ctx->twkey, walk.iv, first);
378 	kernel_neon_end();
379 
380 	return skcipher_walk_done(&walk, 0);
381 }
382 
xts_encrypt(struct skcipher_request * req)383 static int xts_encrypt(struct skcipher_request *req)
384 {
385 	return __xts_crypt(req, true, aesbs_xts_encrypt);
386 }
387 
xts_decrypt(struct skcipher_request * req)388 static int xts_decrypt(struct skcipher_request *req)
389 {
390 	return __xts_crypt(req, false, aesbs_xts_decrypt);
391 }
392 
393 static struct skcipher_alg aes_algs[] = { {
394 	.base.cra_name		= "ecb(aes)",
395 	.base.cra_driver_name	= "ecb-aes-neonbs",
396 	.base.cra_priority	= 250,
397 	.base.cra_blocksize	= AES_BLOCK_SIZE,
398 	.base.cra_ctxsize	= sizeof(struct aesbs_ctx),
399 	.base.cra_module	= THIS_MODULE,
400 
401 	.min_keysize		= AES_MIN_KEY_SIZE,
402 	.max_keysize		= AES_MAX_KEY_SIZE,
403 	.walksize		= 8 * AES_BLOCK_SIZE,
404 	.setkey			= aesbs_setkey,
405 	.encrypt		= ecb_encrypt,
406 	.decrypt		= ecb_decrypt,
407 }, {
408 	.base.cra_name		= "cbc(aes)",
409 	.base.cra_driver_name	= "cbc-aes-neonbs",
410 	.base.cra_priority	= 250,
411 	.base.cra_blocksize	= AES_BLOCK_SIZE,
412 	.base.cra_ctxsize	= sizeof(struct aesbs_cbc_ctr_ctx),
413 	.base.cra_module	= THIS_MODULE,
414 
415 	.min_keysize		= AES_MIN_KEY_SIZE,
416 	.max_keysize		= AES_MAX_KEY_SIZE,
417 	.walksize		= 8 * AES_BLOCK_SIZE,
418 	.ivsize			= AES_BLOCK_SIZE,
419 	.setkey			= aesbs_cbc_ctr_setkey,
420 	.encrypt		= cbc_encrypt,
421 	.decrypt		= cbc_decrypt,
422 }, {
423 	.base.cra_name		= "ctr(aes)",
424 	.base.cra_driver_name	= "ctr-aes-neonbs",
425 	.base.cra_priority	= 250,
426 	.base.cra_blocksize	= 1,
427 	.base.cra_ctxsize	= sizeof(struct aesbs_cbc_ctr_ctx),
428 	.base.cra_module	= THIS_MODULE,
429 
430 	.min_keysize		= AES_MIN_KEY_SIZE,
431 	.max_keysize		= AES_MAX_KEY_SIZE,
432 	.chunksize		= AES_BLOCK_SIZE,
433 	.walksize		= 8 * AES_BLOCK_SIZE,
434 	.ivsize			= AES_BLOCK_SIZE,
435 	.setkey			= aesbs_cbc_ctr_setkey,
436 	.encrypt		= ctr_encrypt,
437 	.decrypt		= ctr_encrypt,
438 }, {
439 	.base.cra_name		= "xts(aes)",
440 	.base.cra_driver_name	= "xts-aes-neonbs",
441 	.base.cra_priority	= 250,
442 	.base.cra_blocksize	= AES_BLOCK_SIZE,
443 	.base.cra_ctxsize	= sizeof(struct aesbs_xts_ctx),
444 	.base.cra_module	= THIS_MODULE,
445 
446 	.min_keysize		= 2 * AES_MIN_KEY_SIZE,
447 	.max_keysize		= 2 * AES_MAX_KEY_SIZE,
448 	.walksize		= 8 * AES_BLOCK_SIZE,
449 	.ivsize			= AES_BLOCK_SIZE,
450 	.setkey			= aesbs_xts_setkey,
451 	.encrypt		= xts_encrypt,
452 	.decrypt		= xts_decrypt,
453 } };
454 
aes_exit(void)455 static void aes_exit(void)
456 {
457 	crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
458 }
459 
aes_init(void)460 static int __init aes_init(void)
461 {
462 	if (!cpu_have_named_feature(ASIMD))
463 		return -ENODEV;
464 
465 	return crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
466 }
467 
468 module_init(aes_init);
469 module_exit(aes_exit);
470