Searched refs:XTS_BLOCK_SIZE (Results 1 – 7 of 7) sorted by relevance
/linux-6.12.1/crypto/ |
D | xts.c | 87 const bool cts = (req->cryptlen % XTS_BLOCK_SIZE); in xts_xor_tweak() 88 const int bs = XTS_BLOCK_SIZE; in xts_xor_tweak() 110 w.total - w.nbytes + avail < 2 * XTS_BLOCK_SIZE) { in xts_xor_tweak() 151 scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 0); in xts_cts_done() 153 scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 1); in xts_cts_done() 164 int offset = req->cryptlen & ~(XTS_BLOCK_SIZE - 1); in xts_cts_final() 167 int tail = req->cryptlen % XTS_BLOCK_SIZE; in xts_cts_final() 172 offset - XTS_BLOCK_SIZE); in xts_cts_final() 174 scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 0); in xts_cts_final() 180 scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE + tail, 1); in xts_cts_final() [all …]
|
/linux-6.12.1/include/crypto/ |
D | xts.h | 9 #define XTS_BLOCK_SIZE 16 macro
|
/linux-6.12.1/arch/powerpc/crypto/ |
D | aes_xts.c | 90 if (!crypto_simd_usable() || (req->cryptlen % XTS_BLOCK_SIZE) != 0) { in p8_aes_xts_crypt()
|
/linux-6.12.1/drivers/crypto/intel/qat/qat_common/ |
D | qat_algs.c | 1072 if (req->cryptlen < XTS_BLOCK_SIZE) in qat_alg_skcipher_xts_encrypt() 1140 if (req->cryptlen < XTS_BLOCK_SIZE) in qat_alg_skcipher_xts_decrypt()
|
/linux-6.12.1/drivers/crypto/inside-secure/ |
D | safexcel_cipher.c | 2462 if (req->cryptlen < XTS_BLOCK_SIZE) in safexcel_encrypt_xts() 2470 if (req->cryptlen < XTS_BLOCK_SIZE) in safexcel_decrypt_xts() 2486 .ivsize = XTS_BLOCK_SIZE, 2494 .cra_blocksize = XTS_BLOCK_SIZE,
|
/linux-6.12.1/drivers/crypto/ |
D | atmel-aes.c | 1069 if (req->cryptlen < XTS_BLOCK_SIZE) in atmel_aes_crypt() 1072 if (!IS_ALIGNED(req->cryptlen, XTS_BLOCK_SIZE)) in atmel_aes_crypt()
|
/linux-6.12.1/drivers/crypto/tegra/ |
D | tegra-se-aes.c | 438 } else if (req->cryptlen < XTS_BLOCK_SIZE) { in tegra_aes_crypt()
|