Lines Matching refs:AES_BLOCK_SIZE
232 n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_paes_crypt()
262 .base.cra_blocksize = AES_BLOCK_SIZE,
334 u8 iv[AES_BLOCK_SIZE]; in cbc_paes_crypt()
342 memcpy(param.iv, walk.iv, AES_BLOCK_SIZE); in cbc_paes_crypt()
349 n = nbytes & ~(AES_BLOCK_SIZE - 1); in cbc_paes_crypt()
353 memcpy(walk.iv, param.iv, AES_BLOCK_SIZE); in cbc_paes_crypt()
381 .base.cra_blocksize = AES_BLOCK_SIZE,
389 .ivsize = AES_BLOCK_SIZE,
529 n = nbytes & ~(AES_BLOCK_SIZE - 1); in xts_paes_crypt()
561 .base.cra_blocksize = AES_BLOCK_SIZE,
569 .ivsize = AES_BLOCK_SIZE,
632 memcpy(ctrptr, iv, AES_BLOCK_SIZE); in __ctrblk_init()
633 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1); in __ctrblk_init()
634 for (i = (n / AES_BLOCK_SIZE) - 1; i > 0; i--) { in __ctrblk_init()
635 memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE); in __ctrblk_init()
636 crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE); in __ctrblk_init()
637 ctrptr += AES_BLOCK_SIZE; in __ctrblk_init()
646 u8 buf[AES_BLOCK_SIZE], *ctrptr; in ctr_paes_crypt()
664 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in ctr_paes_crypt()
665 n = AES_BLOCK_SIZE; in ctr_paes_crypt()
666 if (nbytes >= 2*AES_BLOCK_SIZE && locked) in ctr_paes_crypt()
668 ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk.iv; in ctr_paes_crypt()
673 memcpy(walk.iv, ctrptr + k - AES_BLOCK_SIZE, in ctr_paes_crypt()
674 AES_BLOCK_SIZE); in ctr_paes_crypt()
675 crypto_inc(walk.iv, AES_BLOCK_SIZE); in ctr_paes_crypt()
695 memset(buf, 0, AES_BLOCK_SIZE); in ctr_paes_crypt()
699 buf, AES_BLOCK_SIZE, in ctr_paes_crypt()
700 walk.iv) == AES_BLOCK_SIZE) in ctr_paes_crypt()
709 crypto_inc(walk.iv, AES_BLOCK_SIZE); in ctr_paes_crypt()
728 .ivsize = AES_BLOCK_SIZE,
732 .chunksize = AES_BLOCK_SIZE,