1 // SPDX-License-Identifier: GPL-2.0-only
2 /* Glue code for CRC32C optimized for sparc64 crypto opcodes.
3 *
4 * This is based largely upon arch/x86/crypto/crc32c-intel.c
5 *
6 * Copyright (C) 2008 Intel Corporation
7 * Authors: Austin Zhang <austin_zhang@linux.intel.com>
8 * Kent Liu <kent.liu@intel.com>
9 */
10
11 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
12
13 #include <linux/init.h>
14 #include <linux/module.h>
15 #include <linux/string.h>
16 #include <linux/kernel.h>
17 #include <linux/crc32.h>
18
19 #include <crypto/internal/hash.h>
20
21 #include <asm/pstate.h>
22 #include <asm/elf.h>
23 #include <linux/unaligned.h>
24
25 #include "opcodes.h"
26
27 /*
28 * Setting the seed allows arbitrary accumulators and flexible XOR policy
29 * If your algorithm starts with ~0, then XOR with ~0 before you set
30 * the seed.
31 */
crc32c_sparc64_setkey(struct crypto_shash * hash,const u8 * key,unsigned int keylen)32 static int crc32c_sparc64_setkey(struct crypto_shash *hash, const u8 *key,
33 unsigned int keylen)
34 {
35 u32 *mctx = crypto_shash_ctx(hash);
36
37 if (keylen != sizeof(u32))
38 return -EINVAL;
39 *mctx = get_unaligned_le32(key);
40 return 0;
41 }
42
crc32c_sparc64_init(struct shash_desc * desc)43 static int crc32c_sparc64_init(struct shash_desc *desc)
44 {
45 u32 *mctx = crypto_shash_ctx(desc->tfm);
46 u32 *crcp = shash_desc_ctx(desc);
47
48 *crcp = *mctx;
49
50 return 0;
51 }
52
53 extern void crc32c_sparc64(u32 *crcp, const u64 *data, unsigned int len);
54
crc32c_compute(u32 crc,const u8 * data,unsigned int len)55 static u32 crc32c_compute(u32 crc, const u8 *data, unsigned int len)
56 {
57 unsigned int n = -(uintptr_t)data & 7;
58
59 if (n) {
60 /* Data isn't 8-byte aligned. Align it. */
61 n = min(n, len);
62 crc = __crc32c_le(crc, data, n);
63 data += n;
64 len -= n;
65 }
66 n = len & ~7U;
67 if (n) {
68 crc32c_sparc64(&crc, (const u64 *)data, n);
69 data += n;
70 len -= n;
71 }
72 if (len)
73 crc = __crc32c_le(crc, data, len);
74 return crc;
75 }
76
crc32c_sparc64_update(struct shash_desc * desc,const u8 * data,unsigned int len)77 static int crc32c_sparc64_update(struct shash_desc *desc, const u8 *data,
78 unsigned int len)
79 {
80 u32 *crcp = shash_desc_ctx(desc);
81
82 *crcp = crc32c_compute(*crcp, data, len);
83 return 0;
84 }
85
__crc32c_sparc64_finup(const u32 * crcp,const u8 * data,unsigned int len,u8 * out)86 static int __crc32c_sparc64_finup(const u32 *crcp, const u8 *data,
87 unsigned int len, u8 *out)
88 {
89 put_unaligned_le32(~crc32c_compute(*crcp, data, len), out);
90 return 0;
91 }
92
crc32c_sparc64_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)93 static int crc32c_sparc64_finup(struct shash_desc *desc, const u8 *data,
94 unsigned int len, u8 *out)
95 {
96 return __crc32c_sparc64_finup(shash_desc_ctx(desc), data, len, out);
97 }
98
crc32c_sparc64_final(struct shash_desc * desc,u8 * out)99 static int crc32c_sparc64_final(struct shash_desc *desc, u8 *out)
100 {
101 u32 *crcp = shash_desc_ctx(desc);
102
103 put_unaligned_le32(~*crcp, out);
104 return 0;
105 }
106
crc32c_sparc64_digest(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)107 static int crc32c_sparc64_digest(struct shash_desc *desc, const u8 *data,
108 unsigned int len, u8 *out)
109 {
110 return __crc32c_sparc64_finup(crypto_shash_ctx(desc->tfm), data, len,
111 out);
112 }
113
crc32c_sparc64_cra_init(struct crypto_tfm * tfm)114 static int crc32c_sparc64_cra_init(struct crypto_tfm *tfm)
115 {
116 u32 *key = crypto_tfm_ctx(tfm);
117
118 *key = ~0;
119
120 return 0;
121 }
122
123 #define CHKSUM_BLOCK_SIZE 1
124 #define CHKSUM_DIGEST_SIZE 4
125
126 static struct shash_alg alg = {
127 .setkey = crc32c_sparc64_setkey,
128 .init = crc32c_sparc64_init,
129 .update = crc32c_sparc64_update,
130 .final = crc32c_sparc64_final,
131 .finup = crc32c_sparc64_finup,
132 .digest = crc32c_sparc64_digest,
133 .descsize = sizeof(u32),
134 .digestsize = CHKSUM_DIGEST_SIZE,
135 .base = {
136 .cra_name = "crc32c",
137 .cra_driver_name = "crc32c-sparc64",
138 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
139 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
140 .cra_blocksize = CHKSUM_BLOCK_SIZE,
141 .cra_ctxsize = sizeof(u32),
142 .cra_module = THIS_MODULE,
143 .cra_init = crc32c_sparc64_cra_init,
144 }
145 };
146
sparc64_has_crc32c_opcode(void)147 static bool __init sparc64_has_crc32c_opcode(void)
148 {
149 unsigned long cfr;
150
151 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
152 return false;
153
154 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
155 if (!(cfr & CFR_CRC32C))
156 return false;
157
158 return true;
159 }
160
crc32c_sparc64_mod_init(void)161 static int __init crc32c_sparc64_mod_init(void)
162 {
163 if (sparc64_has_crc32c_opcode()) {
164 pr_info("Using sparc64 crc32c opcode optimized CRC32C implementation\n");
165 return crypto_register_shash(&alg);
166 }
167 pr_info("sparc64 crc32c opcode not available.\n");
168 return -ENODEV;
169 }
170
crc32c_sparc64_mod_fini(void)171 static void __exit crc32c_sparc64_mod_fini(void)
172 {
173 crypto_unregister_shash(&alg);
174 }
175
176 module_init(crc32c_sparc64_mod_init);
177 module_exit(crc32c_sparc64_mod_fini);
178
179 MODULE_LICENSE("GPL");
180 MODULE_DESCRIPTION("CRC32c (Castagnoli), sparc64 crc32c opcode accelerated");
181
182 MODULE_ALIAS_CRYPTO("crc32c");
183
184 #include "crop_devid.c"
185