blob: dd14616b773970d13c2886f255c0f76b4eb58450 [file] [log] [blame]
Kyle Swenson8d8f6542021-03-15 11:02:55 -06001/*
2 * Cryptographic API.
3 *
4 * Glue code for the SHA1 Secure Hash Algorithm assembler implementation using
5 * Supplemental SSE3 instructions.
6 *
7 * This file is based on sha1_generic.c
8 *
9 * Copyright (c) Alan Smithee.
10 * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
11 * Copyright (c) Jean-Francois Dive <jef@linuxbe.org>
12 * Copyright (c) Mathias Krause <minipli@googlemail.com>
13 * Copyright (c) Chandramouli Narayanan <mouli@linux.intel.com>
14 *
15 * This program is free software; you can redistribute it and/or modify it
16 * under the terms of the GNU General Public License as published by the Free
17 * Software Foundation; either version 2 of the License, or (at your option)
18 * any later version.
19 *
20 */
21
22#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
23
24#include <crypto/internal/hash.h>
25#include <linux/init.h>
26#include <linux/module.h>
27#include <linux/mm.h>
28#include <linux/cryptohash.h>
29#include <linux/types.h>
30#include <crypto/sha.h>
31#include <crypto/sha1_base.h>
32#include <asm/fpu/api.h>
33
34typedef void (sha1_transform_fn)(u32 *digest, const char *data,
35 unsigned int rounds);
36
37static int sha1_update(struct shash_desc *desc, const u8 *data,
38 unsigned int len, sha1_transform_fn *sha1_xform)
39{
40 struct sha1_state *sctx = shash_desc_ctx(desc);
41
42 if (!irq_fpu_usable() ||
43 (sctx->count % SHA1_BLOCK_SIZE) + len < SHA1_BLOCK_SIZE)
44 return crypto_sha1_update(desc, data, len);
45
46 /* make sure casting to sha1_block_fn() is safe */
47 BUILD_BUG_ON(offsetof(struct sha1_state, state) != 0);
48
49 kernel_fpu_begin();
50 sha1_base_do_update(desc, data, len,
51 (sha1_block_fn *)sha1_xform);
52 kernel_fpu_end();
53
54 return 0;
55}
56
57static int sha1_finup(struct shash_desc *desc, const u8 *data,
58 unsigned int len, u8 *out, sha1_transform_fn *sha1_xform)
59{
60 if (!irq_fpu_usable())
61 return crypto_sha1_finup(desc, data, len, out);
62
63 kernel_fpu_begin();
64 if (len)
65 sha1_base_do_update(desc, data, len,
66 (sha1_block_fn *)sha1_xform);
67 sha1_base_do_finalize(desc, (sha1_block_fn *)sha1_xform);
68 kernel_fpu_end();
69
70 return sha1_base_finish(desc, out);
71}
72
73asmlinkage void sha1_transform_ssse3(u32 *digest, const char *data,
74 unsigned int rounds);
75
76static int sha1_ssse3_update(struct shash_desc *desc, const u8 *data,
77 unsigned int len)
78{
79 return sha1_update(desc, data, len,
80 (sha1_transform_fn *) sha1_transform_ssse3);
81}
82
83static int sha1_ssse3_finup(struct shash_desc *desc, const u8 *data,
84 unsigned int len, u8 *out)
85{
86 return sha1_finup(desc, data, len, out,
87 (sha1_transform_fn *) sha1_transform_ssse3);
88}
89
90/* Add padding and return the message digest. */
91static int sha1_ssse3_final(struct shash_desc *desc, u8 *out)
92{
93 return sha1_ssse3_finup(desc, NULL, 0, out);
94}
95
96static struct shash_alg sha1_ssse3_alg = {
97 .digestsize = SHA1_DIGEST_SIZE,
98 .init = sha1_base_init,
99 .update = sha1_ssse3_update,
100 .final = sha1_ssse3_final,
101 .finup = sha1_ssse3_finup,
102 .descsize = sizeof(struct sha1_state),
103 .base = {
104 .cra_name = "sha1",
105 .cra_driver_name = "sha1-ssse3",
106 .cra_priority = 150,
107 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
108 .cra_blocksize = SHA1_BLOCK_SIZE,
109 .cra_module = THIS_MODULE,
110 }
111};
112
113static int register_sha1_ssse3(void)
114{
115 if (boot_cpu_has(X86_FEATURE_SSSE3))
116 return crypto_register_shash(&sha1_ssse3_alg);
117 return 0;
118}
119
120static void unregister_sha1_ssse3(void)
121{
122 if (boot_cpu_has(X86_FEATURE_SSSE3))
123 crypto_unregister_shash(&sha1_ssse3_alg);
124}
125
126#ifdef CONFIG_AS_AVX
127asmlinkage void sha1_transform_avx(u32 *digest, const char *data,
128 unsigned int rounds);
129
130static int sha1_avx_update(struct shash_desc *desc, const u8 *data,
131 unsigned int len)
132{
133 return sha1_update(desc, data, len,
134 (sha1_transform_fn *) sha1_transform_avx);
135}
136
137static int sha1_avx_finup(struct shash_desc *desc, const u8 *data,
138 unsigned int len, u8 *out)
139{
140 return sha1_finup(desc, data, len, out,
141 (sha1_transform_fn *) sha1_transform_avx);
142}
143
144static int sha1_avx_final(struct shash_desc *desc, u8 *out)
145{
146 return sha1_avx_finup(desc, NULL, 0, out);
147}
148
149static struct shash_alg sha1_avx_alg = {
150 .digestsize = SHA1_DIGEST_SIZE,
151 .init = sha1_base_init,
152 .update = sha1_avx_update,
153 .final = sha1_avx_final,
154 .finup = sha1_avx_finup,
155 .descsize = sizeof(struct sha1_state),
156 .base = {
157 .cra_name = "sha1",
158 .cra_driver_name = "sha1-avx",
159 .cra_priority = 160,
160 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
161 .cra_blocksize = SHA1_BLOCK_SIZE,
162 .cra_module = THIS_MODULE,
163 }
164};
165
166static bool avx_usable(void)
167{
168 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
169 if (cpu_has_avx)
170 pr_info("AVX detected but unusable.\n");
171 return false;
172 }
173
174 return true;
175}
176
177static int register_sha1_avx(void)
178{
179 if (avx_usable())
180 return crypto_register_shash(&sha1_avx_alg);
181 return 0;
182}
183
184static void unregister_sha1_avx(void)
185{
186 if (avx_usable())
187 crypto_unregister_shash(&sha1_avx_alg);
188}
189
190#else /* CONFIG_AS_AVX */
191static inline int register_sha1_avx(void) { return 0; }
192static inline void unregister_sha1_avx(void) { }
193#endif /* CONFIG_AS_AVX */
194
195
196#if defined(CONFIG_AS_AVX2) && (CONFIG_AS_AVX)
197#define SHA1_AVX2_BLOCK_OPTSIZE 4 /* optimal 4*64 bytes of SHA1 blocks */
198
199asmlinkage void sha1_transform_avx2(u32 *digest, const char *data,
200 unsigned int rounds);
201
202static bool avx2_usable(void)
203{
204 if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2)
205 && boot_cpu_has(X86_FEATURE_BMI1)
206 && boot_cpu_has(X86_FEATURE_BMI2))
207 return true;
208
209 return false;
210}
211
212static void sha1_apply_transform_avx2(u32 *digest, const char *data,
213 unsigned int rounds)
214{
215 /* Select the optimal transform based on data block size */
216 if (rounds >= SHA1_AVX2_BLOCK_OPTSIZE)
217 sha1_transform_avx2(digest, data, rounds);
218 else
219 sha1_transform_avx(digest, data, rounds);
220}
221
222static int sha1_avx2_update(struct shash_desc *desc, const u8 *data,
223 unsigned int len)
224{
225 return sha1_update(desc, data, len,
226 (sha1_transform_fn *) sha1_apply_transform_avx2);
227}
228
229static int sha1_avx2_finup(struct shash_desc *desc, const u8 *data,
230 unsigned int len, u8 *out)
231{
232 return sha1_finup(desc, data, len, out,
233 (sha1_transform_fn *) sha1_apply_transform_avx2);
234}
235
236static int sha1_avx2_final(struct shash_desc *desc, u8 *out)
237{
238 return sha1_avx2_finup(desc, NULL, 0, out);
239}
240
241static struct shash_alg sha1_avx2_alg = {
242 .digestsize = SHA1_DIGEST_SIZE,
243 .init = sha1_base_init,
244 .update = sha1_avx2_update,
245 .final = sha1_avx2_final,
246 .finup = sha1_avx2_finup,
247 .descsize = sizeof(struct sha1_state),
248 .base = {
249 .cra_name = "sha1",
250 .cra_driver_name = "sha1-avx2",
251 .cra_priority = 170,
252 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
253 .cra_blocksize = SHA1_BLOCK_SIZE,
254 .cra_module = THIS_MODULE,
255 }
256};
257
258static int register_sha1_avx2(void)
259{
260 if (avx2_usable())
261 return crypto_register_shash(&sha1_avx2_alg);
262 return 0;
263}
264
265static void unregister_sha1_avx2(void)
266{
267 if (avx2_usable())
268 crypto_unregister_shash(&sha1_avx2_alg);
269}
270
271#else
272static inline int register_sha1_avx2(void) { return 0; }
273static inline void unregister_sha1_avx2(void) { }
274#endif
275
276#ifdef CONFIG_AS_SHA1_NI
277asmlinkage void sha1_ni_transform(u32 *digest, const char *data,
278 unsigned int rounds);
279
280static int sha1_ni_update(struct shash_desc *desc, const u8 *data,
281 unsigned int len)
282{
283 return sha1_update(desc, data, len,
284 (sha1_transform_fn *) sha1_ni_transform);
285}
286
287static int sha1_ni_finup(struct shash_desc *desc, const u8 *data,
288 unsigned int len, u8 *out)
289{
290 return sha1_finup(desc, data, len, out,
291 (sha1_transform_fn *) sha1_ni_transform);
292}
293
294static int sha1_ni_final(struct shash_desc *desc, u8 *out)
295{
296 return sha1_ni_finup(desc, NULL, 0, out);
297}
298
299static struct shash_alg sha1_ni_alg = {
300 .digestsize = SHA1_DIGEST_SIZE,
301 .init = sha1_base_init,
302 .update = sha1_ni_update,
303 .final = sha1_ni_final,
304 .finup = sha1_ni_finup,
305 .descsize = sizeof(struct sha1_state),
306 .base = {
307 .cra_name = "sha1",
308 .cra_driver_name = "sha1-ni",
309 .cra_priority = 250,
310 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
311 .cra_blocksize = SHA1_BLOCK_SIZE,
312 .cra_module = THIS_MODULE,
313 }
314};
315
316static int register_sha1_ni(void)
317{
318 if (boot_cpu_has(X86_FEATURE_SHA_NI))
319 return crypto_register_shash(&sha1_ni_alg);
320 return 0;
321}
322
323static void unregister_sha1_ni(void)
324{
325 if (boot_cpu_has(X86_FEATURE_SHA_NI))
326 crypto_unregister_shash(&sha1_ni_alg);
327}
328
329#else
330static inline int register_sha1_ni(void) { return 0; }
331static inline void unregister_sha1_ni(void) { }
332#endif
333
334static int __init sha1_ssse3_mod_init(void)
335{
336 if (register_sha1_ssse3())
337 goto fail;
338
339 if (register_sha1_avx()) {
340 unregister_sha1_ssse3();
341 goto fail;
342 }
343
344 if (register_sha1_avx2()) {
345 unregister_sha1_avx();
346 unregister_sha1_ssse3();
347 goto fail;
348 }
349
350 if (register_sha1_ni()) {
351 unregister_sha1_avx2();
352 unregister_sha1_avx();
353 unregister_sha1_ssse3();
354 goto fail;
355 }
356
357 return 0;
358fail:
359 return -ENODEV;
360}
361
362static void __exit sha1_ssse3_mod_fini(void)
363{
364 unregister_sha1_ni();
365 unregister_sha1_avx2();
366 unregister_sha1_avx();
367 unregister_sha1_ssse3();
368}
369
370module_init(sha1_ssse3_mod_init);
371module_exit(sha1_ssse3_mod_fini);
372
373MODULE_LICENSE("GPL");
374MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm, Supplemental SSE3 accelerated");
375
376MODULE_ALIAS_CRYPTO("sha1");