Lines Matching refs:AES_BLOCK_SIZE

44 #define AES_BLOCK_MASK	(~(AES_BLOCK_SIZE - 1))
193 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt()
215 nbytes &= AES_BLOCK_SIZE - 1; in ecb_decrypt()
237 nbytes &= AES_BLOCK_SIZE - 1; in cbc_encrypt()
259 nbytes &= AES_BLOCK_SIZE - 1; in cbc_decrypt()
270 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt()
281 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt()
282 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt()
289 cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt()
296 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt()
307 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt()
326 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_decrypt()
337 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt()
338 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt()
345 cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_decrypt()
352 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_decrypt()
363 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_decrypt()
400 u8 keystream[AES_BLOCK_SIZE]; in ctr_crypt()
421 crypto_inc(walk.iv, AES_BLOCK_SIZE); in ctr_crypt()
449 u8 keystream[AES_BLOCK_SIZE]; in xctr_crypt()
454 __le32 block[AES_BLOCK_SIZE / sizeof(__le32)]; in xctr_crypt()
468 memcpy(block, walk.iv, AES_BLOCK_SIZE); in xctr_crypt()
469 block[0] ^= cpu_to_le32(1 + byte_ctr / AES_BLOCK_SIZE); in xctr_crypt()
506 u8 iv[AES_BLOCK_SIZE]);
509 u8 tweak[AES_BLOCK_SIZE]);
517 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_crypt_slowpath()
536 req->cryptlen - tail - AES_BLOCK_SIZE, in xts_crypt_slowpath()
547 walk.nbytes & ~(AES_BLOCK_SIZE - 1), req->iv); in xts_crypt_slowpath()
550 walk.nbytes & (AES_BLOCK_SIZE - 1)); in xts_crypt_slowpath()
562 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, in xts_crypt_slowpath()
588 if (unlikely(cryptlen < AES_BLOCK_SIZE)) in xts_crypt()
621 u8 iv[AES_BLOCK_SIZE]) in aesni_xts_encrypt_iv() argument
628 u8 tweak[AES_BLOCK_SIZE]) in aesni_xts_encrypt() argument
635 u8 tweak[AES_BLOCK_SIZE]) in aesni_xts_decrypt() argument
655 .cra_blocksize = AES_BLOCK_SIZE,
676 .cra_blocksize = AES_BLOCK_SIZE,
691 .cra_blocksize = AES_BLOCK_SIZE,
697 .ivsize = AES_BLOCK_SIZE,
707 .cra_blocksize = AES_BLOCK_SIZE,
713 .ivsize = AES_BLOCK_SIZE,
714 .walksize = 2 * AES_BLOCK_SIZE,
731 .ivsize = AES_BLOCK_SIZE,
732 .chunksize = AES_BLOCK_SIZE,
743 .cra_blocksize = AES_BLOCK_SIZE,
749 .ivsize = AES_BLOCK_SIZE,
750 .walksize = 2 * AES_BLOCK_SIZE,
777 .ivsize = AES_BLOCK_SIZE,
778 .chunksize = AES_BLOCK_SIZE,
787 u8 iv[AES_BLOCK_SIZE]);
793 u8 *dst, unsigned int len, u8 tweak[AES_BLOCK_SIZE]); \
796 u8 *dst, unsigned int len, u8 tweak[AES_BLOCK_SIZE]); \
814 .cra_blocksize = AES_BLOCK_SIZE, \
820 .ivsize = AES_BLOCK_SIZE, \
821 .walksize = 2 * AES_BLOCK_SIZE, \
1394 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in gcm_crypt()
1397 le_ctr[0] += nbytes / AES_BLOCK_SIZE; in gcm_crypt()
1485 .chunksize = AES_BLOCK_SIZE, \
1502 .chunksize = AES_BLOCK_SIZE, \