Home
last modified time | relevance | path

Searched refs:rk (Results 1 – 25 of 45) sorted by relevance

12

/linux-6.12.1/crypto/
Dsm4.c104 static inline u32 sm4_round(u32 x0, u32 x1, u32 x2, u32 x3, u32 rk) in sm4_round() argument
106 return x0 ^ sm4_enc_sub(x1 ^ x2 ^ x3 ^ rk); in sm4_round()
122 u32 rk[4]; in sm4_expandkey() local
129 rk[0] = get_unaligned_be32(&key[0]) ^ fk[0]; in sm4_expandkey()
130 rk[1] = get_unaligned_be32(&key[1]) ^ fk[1]; in sm4_expandkey()
131 rk[2] = get_unaligned_be32(&key[2]) ^ fk[2]; in sm4_expandkey()
132 rk[3] = get_unaligned_be32(&key[3]) ^ fk[3]; in sm4_expandkey()
135 rk[0] ^= sm4_key_sub(rk[1] ^ rk[2] ^ rk[3] ^ ck[i + 0]); in sm4_expandkey()
136 rk[1] ^= sm4_key_sub(rk[2] ^ rk[3] ^ rk[0] ^ ck[i + 1]); in sm4_expandkey()
137 rk[2] ^= sm4_key_sub(rk[3] ^ rk[0] ^ rk[1] ^ ck[i + 2]); in sm4_expandkey()
[all …]
/linux-6.12.1/arch/arm64/crypto/
Daes-neonbs-glue.c27 asmlinkage void aesbs_convert_key(u8 out[], u32 const rk[], int rounds);
29 asmlinkage void aesbs_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
31 asmlinkage void aesbs_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
34 asmlinkage void aesbs_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
37 asmlinkage void aesbs_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
40 asmlinkage void aesbs_xts_encrypt(u8 out[], u8 const in[], u8 const rk[],
42 asmlinkage void aesbs_xts_decrypt(u8 out[], u8 const in[], u8 const rk[],
46 asmlinkage void neon_aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],
48 asmlinkage void neon_aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[],
50 asmlinkage void neon_aes_ctr_encrypt(u8 out[], u8 const in[], u32 const rk[],
[all …]
Daes-ce.S28 .macro load_round_keys, rk, nr, tmp
29 add \tmp, \rk, \nr, sxtw #4
31 ld1 {v17.4s-v20.4s}, [\rk]
38 .macro enc_prepare, rounds, rk, temp
39 load_round_keys \rk, \rounds, \temp
43 .macro enc_switch_key, rounds, rk, temp
44 load_round_keys \rk, \rounds, \temp
48 .macro dec_prepare, rounds, rk, temp
49 load_round_keys \rk, \rounds, \temp
Daes-neon.S98 .macro do_block, enc, in, rounds, rk, rkp, i
99 ld1 {v15.4s}, [\rk]
100 add \rkp, \rk, #16
114 .macro encrypt_block, in, rounds, rk, rkp, i
115 do_block 1, \in, \rounds, \rk, \rkp, \i
118 .macro decrypt_block, in, rounds, rk, rkp, i
119 do_block 0, \in, \rounds, \rk, \rkp, \i
205 .macro do_block_4x, enc, in0, in1, in2, in3, rounds, rk, rkp, i
206 ld1 {v15.4s}, [\rk]
207 add \rkp, \rk, #16
[all …]
Daes-cipher-core.S14 rk .req x0
57 ldp \out0, \out1, [rk], #8
87 ldp w8, w9, [rk], #16
88 ldp w10, w11, [rk, #-8]
Daes-cipher-glue.c12 asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
13 asmlinkage void __aes_arm64_decrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
Daes-ce-ccm-glue.c35 asmlinkage u32 ce_aes_mac_update(u8 const in[], u32 const rk[], int rounds,
40 u32 const rk[], u32 rounds, u8 mac[],
44 u32 const rk[], u32 rounds, u8 mac[],
101 u32 macp, u32 const rk[], u32 rounds) in ce_aes_ccm_auth_data() argument
109 u32 rem = ce_aes_mac_update(in, rk, rounds, blocks, mac, in ce_aes_ccm_auth_data()
Daes-ce-ccm-core.S17 .macro load_round_keys, rk, nr, tmp
19 add \tmp, \rk, w\tmp, sxtw #4
20 ld1 {v10.4s-v13.4s}, [\rk]
Daes-ce-glue.c27 asmlinkage void __aes_ce_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
28 asmlinkage void __aes_ce_decrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
Daes-glue.c77 asmlinkage void aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],
79 asmlinkage void aes_ecb_decrypt(u8 out[], u8 const in[], u32 const rk[],
82 asmlinkage void aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[],
84 asmlinkage void aes_cbc_decrypt(u8 out[], u8 const in[], u32 const rk[],
87 asmlinkage void aes_cbc_cts_encrypt(u8 out[], u8 const in[], u32 const rk[],
89 asmlinkage void aes_cbc_cts_decrypt(u8 out[], u8 const in[], u32 const rk[],
92 asmlinkage void aes_ctr_encrypt(u8 out[], u8 const in[], u32 const rk[],
95 asmlinkage void aes_xctr_encrypt(u8 out[], u8 const in[], u32 const rk[],
112 asmlinkage int aes_mac_update(u8 const in[], u32 const rk[], int rounds,
/linux-6.12.1/arch/arm/crypto/
Daes-neonbs-glue.c28 asmlinkage void aesbs_convert_key(u8 out[], u32 const rk[], int rounds);
30 asmlinkage void aesbs_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
32 asmlinkage void aesbs_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
35 asmlinkage void aesbs_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
38 asmlinkage void aesbs_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
41 asmlinkage void aesbs_xts_encrypt(u8 out[], u8 const in[], u8 const rk[],
43 asmlinkage void aesbs_xts_decrypt(u8 out[], u8 const in[], u8 const rk[],
48 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32] __aligned(AES_BLOCK_SIZE);
71 struct crypto_aes_ctx rk; in aesbs_setkey() local
74 err = aes_expandkey(&rk, in_key, key_len); in aesbs_setkey()
[all …]
Dsha2-ce-core.S18 rk .req r3
35 vld1.32 {k\ev}, [rk, :128]!
90 adr rk, .Lsha256_rcon
91 vld1.32 {k0}, [rk, :128]!
Daes-cipher.h8 asmlinkage void __aes_arm_encrypt(const u32 rk[], int rounds,
10 asmlinkage void __aes_arm_decrypt(const u32 rk[], int rounds,
Daes-cipher-core.S16 rk .req r0
84 ldm rk!, {t1, t2}
106 ldm rk!, {r8-r11}
Daes-ce-glue.c29 asmlinkage void ce_aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],
31 asmlinkage void ce_aes_ecb_decrypt(u8 out[], u8 const in[], u32 const rk[],
34 asmlinkage void ce_aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[],
36 asmlinkage void ce_aes_cbc_decrypt(u8 out[], u8 const in[], u32 const rk[],
38 asmlinkage void ce_aes_cbc_cts_encrypt(u8 out[], u8 const in[], u32 const rk[],
40 asmlinkage void ce_aes_cbc_cts_decrypt(u8 out[], u8 const in[], u32 const rk[],
43 asmlinkage void ce_aes_ctr_encrypt(u8 out[], u8 const in[], u32 const rk[],
/linux-6.12.1/include/crypto/
Daria.h407 static inline void aria_add_round_key(u32 *rk, u32 *t0, u32 *t1, u32 *t2, in aria_add_round_key() argument
410 *t0 ^= rk[0]; in aria_add_round_key()
411 *t1 ^= rk[1]; in aria_add_round_key()
412 *t2 ^= rk[2]; in aria_add_round_key()
413 *t3 ^= rk[3]; in aria_add_round_key()
434 static inline void aria_gsrk(u32 *rk, u32 *x, u32 *y, u32 n) in aria_gsrk() argument
439 rk[0] = (x[0]) ^ in aria_gsrk()
442 rk[1] = (x[1]) ^ in aria_gsrk()
445 rk[2] = (x[2]) ^ in aria_gsrk()
448 rk[3] = (x[3]) ^ in aria_gsrk()
Dsm4.h46 void sm4_crypt_block(const u32 *rk, u8 *out, const u8 *in);
/linux-6.12.1/arch/x86/crypto/
Daria-gfni-avx512-asm_64.S272 t0, rk, round) \
274 vpbroadcastb ((round * 16) + 3)(rk), t0; \
276 vpbroadcastb ((round * 16) + 2)(rk), t0; \
278 vpbroadcastb ((round * 16) + 1)(rk), t0; \
280 vpbroadcastb ((round * 16) + 0)(rk), t0; \
282 vpbroadcastb ((round * 16) + 7)(rk), t0; \
284 vpbroadcastb ((round * 16) + 6)(rk), t0; \
286 vpbroadcastb ((round * 16) + 5)(rk), t0; \
288 vpbroadcastb ((round * 16) + 4)(rk), t0; \
290 vpbroadcastb ((round * 16) + 11)(rk), t0; \
[all …]
Daria-aesni-avx2-asm_64.S286 t0, rk, idx, round) \
288 vpbroadcastb ((round * 16) + idx + 3)(rk), t0; \
290 vpbroadcastb ((round * 16) + idx + 2)(rk), t0; \
292 vpbroadcastb ((round * 16) + idx + 1)(rk), t0; \
294 vpbroadcastb ((round * 16) + idx + 0)(rk), t0; \
296 vpbroadcastb ((round * 16) + idx + 7)(rk), t0; \
298 vpbroadcastb ((round * 16) + idx + 6)(rk), t0; \
300 vpbroadcastb ((round * 16) + idx + 5)(rk), t0; \
302 vpbroadcastb ((round * 16) + idx + 4)(rk), t0; \
464 mem_tmp, rk, round) \ argument
[all …]
Daria-aesni-avx-asm_64.S270 t0, t1, t2, rk, \
273 vbroadcastss ((round * 16) + idx + 0)(rk), t0; \
285 vbroadcastss ((round * 16) + idx + 4)(rk), t0; \
423 mem_tmp, rk, round) \ argument
426 y0, y7, y2, rk, 8, round); \
441 y0, y7, y2, rk, 0, round); \
478 mem_tmp, rk, round) \ argument
481 y0, y7, y2, rk, 8, round); \
496 y0, y7, y2, rk, 0, round); \
533 mem_tmp, rk, round, last_round) \ argument
[all …]
Dsm4_aesni_avx2_glue.c22 asmlinkage void sm4_aesni_avx2_ctr_enc_blk16(const u32 *rk, u8 *dst,
24 asmlinkage void sm4_aesni_avx2_cbc_dec_blk16(const u32 *rk, u8 *dst,
Dsm4_aesni_avx_glue.c22 asmlinkage void sm4_aesni_avx_crypt4(const u32 *rk, u8 *dst,
24 asmlinkage void sm4_aesni_avx_crypt8(const u32 *rk, u8 *dst,
26 asmlinkage void sm4_aesni_avx_ctr_enc_blk8(const u32 *rk, u8 *dst,
28 asmlinkage void sm4_aesni_avx_cbc_dec_blk8(const u32 *rk, u8 *dst,
Dsm4-avx.h8 typedef void (*sm4_crypt_func)(const u32 *rk, u8 *dst, const u8 *src, u8 *iv);
/linux-6.12.1/arch/loongarch/include/asm/
Dinst.h343 unsigned int rk : 5; member
350 unsigned int rk : 5; member
507 u32 larch_insn_gen_or(enum loongarch_gpr rd, enum loongarch_gpr rj, enum loongarch_gpr rk);
708 enum loongarch_gpr rk) \
713 insn->reg3_format.rk = rk; \
759 enum loongarch_gpr rk, \
766 insn->reg3sa2_format.rk = rk; \
/linux-6.12.1/Documentation/userspace-api/media/v4l/
Dmetafmt-rkisp1.rst3 .. _v4l2-meta-fmt-rk-isp1-stat-3a:
21 .. _v4l2-meta-fmt-rk-isp1-params:
41 .. _v4l2-meta-fmt-rk-isp1-ext-params:

12