Searched refs:ctr32 (Results 1 – 2 of 2) sorted by relevance
/linux-6.12.1/crypto/ |
D | xctr.c | 41 __le32 ctr32 = cpu_to_le32(byte_ctr / XCTR_BLOCKSIZE + 1); in crypto_xctr_crypt_final() local 43 crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32)); in crypto_xctr_crypt_final() 46 crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32)); in crypto_xctr_crypt_final() 57 __le32 ctr32 = cpu_to_le32(byte_ctr / XCTR_BLOCKSIZE + 1); in crypto_xctr_crypt_segment() local 60 crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32)); in crypto_xctr_crypt_segment() 63 crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32)); in crypto_xctr_crypt_segment() 65 le32_add_cpu(&ctr32, 1); in crypto_xctr_crypt_segment() 84 __le32 ctr32 = cpu_to_le32(byte_ctr / XCTR_BLOCKSIZE + 1); in crypto_xctr_crypt_inplace() local 87 crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32)); in crypto_xctr_crypt_inplace() 90 crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32)); in crypto_xctr_crypt_inplace() [all …]
|
/linux-6.12.1/arch/riscv/crypto/ |
D | aes-riscv64-glue.c | 283 u32 ctr32, nblocks; in riscv64_aes_ctr_crypt() local 287 ctr32 = get_unaligned_be32(req->iv + 12); in riscv64_aes_ctr_crypt() 299 ctr32 += nblocks; in riscv64_aes_ctr_crypt() 302 if (ctr32 >= nblocks) { in riscv64_aes_ctr_crypt() 315 (nblocks - ctr32) * AES_BLOCK_SIZE); in riscv64_aes_ctr_crypt() 321 if (ctr32) { in riscv64_aes_ctr_crypt()
|