/linux-6.12.1/arch/x86/crypto/ |
D | crc32c-intel_glue.c | 97 u32 *crcp = shash_desc_ctx(desc); in crc32c_intel_init() local 99 *crcp = *mctx; in crc32c_intel_init() 107 u32 *crcp = shash_desc_ctx(desc); in crc32c_intel_update() local 109 *crcp = crc32c_intel_le_hw(*crcp, data, len); in crc32c_intel_update() 113 static int __crc32c_intel_finup(u32 *crcp, const u8 *data, unsigned int len, in __crc32c_intel_finup() argument 116 *(__le32 *)out = ~cpu_to_le32(crc32c_intel_le_hw(*crcp, data, len)); in __crc32c_intel_finup() 128 u32 *crcp = shash_desc_ctx(desc); in crc32c_intel_final() local 130 *(__le32 *)out = ~cpu_to_le32p(crcp); in crc32c_intel_final() 154 u32 *crcp = shash_desc_ctx(desc); in crc32c_pcl_intel_update() local 162 *crcp = crc_pcl(data, len, *crcp); in crc32c_pcl_intel_update() [all …]
|
D | crc32-pclmul_glue.c | 106 u32 *crcp = shash_desc_ctx(desc); in crc32_pclmul_init() local 108 *crcp = *mctx; in crc32_pclmul_init() 116 u32 *crcp = shash_desc_ctx(desc); in crc32_pclmul_update() local 118 *crcp = crc32_pclmul_le(*crcp, data, len); in crc32_pclmul_update() 123 static int __crc32_pclmul_finup(u32 *crcp, const u8 *data, unsigned int len, in __crc32_pclmul_finup() argument 126 *(__le32 *)out = cpu_to_le32(crc32_pclmul_le(*crcp, data, len)); in __crc32_pclmul_finup() 138 u32 *crcp = shash_desc_ctx(desc); in crc32_pclmul_final() local 140 *(__le32 *)out = cpu_to_le32p(crcp); in crc32_pclmul_final()
|
/linux-6.12.1/crypto/ |
D | crc32_generic.c | 50 u32 *crcp = shash_desc_ctx(desc); in crc32_init() local 52 *crcp = *mctx; in crc32_init() 60 u32 *crcp = shash_desc_ctx(desc); in crc32_update() local 62 *crcp = crc32_le(*crcp, data, len); in crc32_update() 67 static int __crc32_finup(u32 *crcp, const u8 *data, unsigned int len, in __crc32_finup() argument 70 put_unaligned_le32(crc32_le(*crcp, data, len), out); in __crc32_finup() 82 u32 *crcp = shash_desc_ctx(desc); in crc32_final() local 84 put_unaligned_le32(*crcp, out); in crc32_final()
|
D | crc32c_generic.c | 100 static int __chksum_finup(u32 *crcp, const u8 *data, unsigned int len, u8 *out) in __chksum_finup() argument 102 put_unaligned_le32(~__crc32c_le(*crcp, data, len), out); in __chksum_finup()
|
/linux-6.12.1/arch/sparc/crypto/ |
D | crc32c_glue.c | 46 u32 *crcp = shash_desc_ctx(desc); in crc32c_sparc64_init() local 48 *crcp = *mctx; in crc32c_sparc64_init() 53 extern void crc32c_sparc64(u32 *crcp, const u64 *data, unsigned int len); 80 u32 *crcp = shash_desc_ctx(desc); in crc32c_sparc64_update() local 82 *crcp = crc32c_compute(*crcp, data, len); in crc32c_sparc64_update() 86 static int __crc32c_sparc64_finup(const u32 *crcp, const u8 *data, in __crc32c_sparc64_finup() argument 89 put_unaligned_le32(~crc32c_compute(*crcp, data, len), out); in __crc32c_sparc64_finup() 101 u32 *crcp = shash_desc_ctx(desc); in crc32c_sparc64_final() local 103 put_unaligned_le32(~*crcp, out); in crc32c_sparc64_final()
|
/linux-6.12.1/arch/powerpc/crypto/ |
D | crc32c-vpmsum_glue.c | 85 u32 *crcp = shash_desc_ctx(desc); in crc32c_vpmsum_init() local 87 *crcp = *mctx; in crc32c_vpmsum_init() 95 u32 *crcp = shash_desc_ctx(desc); in crc32c_vpmsum_update() local 97 *crcp = crc32c_vpmsum(*crcp, data, len); in crc32c_vpmsum_update() 102 static int __crc32c_vpmsum_finup(u32 *crcp, const u8 *data, unsigned int len, in __crc32c_vpmsum_finup() argument 105 *(__le32 *)out = ~cpu_to_le32(crc32c_vpmsum(*crcp, data, len)); in __crc32c_vpmsum_finup() 118 u32 *crcp = shash_desc_ctx(desc); in crc32c_vpmsum_final() local 120 *(__le32 *)out = ~cpu_to_le32p(crcp); in crc32c_vpmsum_final()
|
D | crct10dif-vpmsum_glue.c | 85 u16 *crcp = shash_desc_ctx(desc); in crct10dif_vpmsum_final() local 87 *(u16 *)out = *crcp; in crct10dif_vpmsum_final()
|
/linux-6.12.1/net/ceph/ |
D | messenger_v2.c | 487 void *crcp = p + CEPH_PREAMBLE_LEN - CEPH_CRC_LEN; in encode_preamble() local 500 put_unaligned_le32(crc32c(0, start, crcp - start), crcp); in encode_preamble() 505 void *crcp = p + CEPH_PREAMBLE_LEN - CEPH_CRC_LEN; in decode_preamble() local 509 crc = crc32c(0, p, crcp - p); in decode_preamble() 510 expected_crc = get_unaligned_le32(crcp); in decode_preamble() 1222 void *crcp = base + base_len - CEPH_CRC_LEN; in prepare_head_plain() local 1228 put_unaligned_le32(crc, crcp); in prepare_head_plain() 1237 add_out_kvec(con, base, crcp - base); in prepare_head_plain() 1239 add_out_kvec(con, crcp, CEPH_CRC_LEN); in prepare_head_plain() 1241 add_out_sign_kvec(con, base, crcp - base); in prepare_head_plain() [all …]
|
/linux-6.12.1/kernel/locking/ |
D | locktorture.c | 1063 struct call_rcu_chain *crcp = container_of(rhp, struct call_rcu_chain, crc_rh); in call_rcu_chain_cb() local 1065 if (!smp_load_acquire(&crcp->crc_stop)) { in call_rcu_chain_cb() 1067 call_rcu(&crcp->crc_rh, call_rcu_chain_cb); // ... and later start another. in call_rcu_chain_cb()
|