blob: 51006151365725252150744b1a45422cde2820e1 [file] [log] [blame]
Damjan Marion04bd0ea2023-03-16 16:37:56 +00001/*
2 * Copyright (c) 2019 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#ifndef included_sha2_h
17#define included_sha2_h
18
19#include <vppinfra/clib.h>
Damjan Marion7ad751b2024-03-03 22:12:21 +000020#include <vppinfra/vector.h>
Damjan Marion04bd0ea2023-03-16 16:37:56 +000021
22#define SHA224_DIGEST_SIZE 28
23#define SHA224_BLOCK_SIZE 64
24
25#define SHA256_DIGEST_SIZE 32
26#define SHA256_BLOCK_SIZE 64
27#define SHA256_ROTR(x, y) ((x >> y) | (x << (32 - y)))
28#define SHA256_CH(a, b, c) ((a & b) ^ (~a & c))
29#define SHA256_MAJ(a, b, c) ((a & b) ^ (a & c) ^ (b & c))
30#define SHA256_CSIGMA0(x) \
31 (SHA256_ROTR (x, 2) ^ SHA256_ROTR (x, 13) ^ SHA256_ROTR (x, 22));
32#define SHA256_CSIGMA1(x) \
33 (SHA256_ROTR (x, 6) ^ SHA256_ROTR (x, 11) ^ SHA256_ROTR (x, 25));
34#define SHA256_SSIGMA0(x) (SHA256_ROTR (x, 7) ^ SHA256_ROTR (x, 18) ^ (x >> 3))
35#define SHA256_SSIGMA1(x) \
36 (SHA256_ROTR (x, 17) ^ SHA256_ROTR (x, 19) ^ (x >> 10))
37
38#define SHA256_MSG_SCHED(w, j) \
39 { \
40 w[j] = w[j - 7] + w[j - 16]; \
41 w[j] += SHA256_SSIGMA0 (w[j - 15]); \
42 w[j] += SHA256_SSIGMA1 (w[j - 2]); \
43 }
44
45#define SHA256_TRANSFORM(s, w, i, k) \
46 { \
47 __typeof__ (s[0]) t1, t2; \
48 t1 = k + w[i] + s[7]; \
49 t1 += SHA256_CSIGMA1 (s[4]); \
50 t1 += SHA256_CH (s[4], s[5], s[6]); \
51 t2 = SHA256_CSIGMA0 (s[0]); \
52 t2 += SHA256_MAJ (s[0], s[1], s[2]); \
53 s[7] = s[6]; \
54 s[6] = s[5]; \
55 s[5] = s[4]; \
56 s[4] = s[3] + t1; \
57 s[3] = s[2]; \
58 s[2] = s[1]; \
59 s[1] = s[0]; \
60 s[0] = t1 + t2; \
61 }
62
63#define SHA512_224_DIGEST_SIZE 28
64#define SHA512_224_BLOCK_SIZE 128
65
66#define SHA512_256_DIGEST_SIZE 32
67#define SHA512_256_BLOCK_SIZE 128
68
69#define SHA384_DIGEST_SIZE 48
70#define SHA384_BLOCK_SIZE 128
71
72#define SHA512_DIGEST_SIZE 64
73#define SHA512_BLOCK_SIZE 128
74#define SHA512_ROTR(x, y) ((x >> y) | (x << (64 - y)))
75#define SHA512_CH(a, b, c) ((a & b) ^ (~a & c))
76#define SHA512_MAJ(a, b, c) ((a & b) ^ (a & c) ^ (b & c))
77#define SHA512_CSIGMA0(x) \
78 (SHA512_ROTR (x, 28) ^ SHA512_ROTR (x, 34) ^ SHA512_ROTR (x, 39))
79#define SHA512_CSIGMA1(x) \
80 (SHA512_ROTR (x, 14) ^ SHA512_ROTR (x, 18) ^ SHA512_ROTR (x, 41))
81#define SHA512_SSIGMA0(x) (SHA512_ROTR (x, 1) ^ SHA512_ROTR (x, 8) ^ (x >> 7))
82#define SHA512_SSIGMA1(x) \
83 (SHA512_ROTR (x, 19) ^ SHA512_ROTR (x, 61) ^ (x >> 6))
84
85#define SHA512_MSG_SCHED(w, j) \
86 { \
87 w[j] = w[j - 7] + w[j - 16]; \
88 w[j] += SHA512_SSIGMA0 (w[j - 15]); \
89 w[j] += SHA512_SSIGMA1 (w[j - 2]); \
90 }
91
92#define SHA512_TRANSFORM(s, w, i, k) \
93 { \
94 __typeof__ (s[0]) t1, t2; \
95 t1 = k + w[i] + s[7]; \
96 t1 += SHA512_CSIGMA1 (s[4]); \
97 t1 += SHA512_CH (s[4], s[5], s[6]); \
98 t2 = SHA512_CSIGMA0 (s[0]); \
99 t2 += SHA512_MAJ (s[0], s[1], s[2]); \
100 s[7] = s[6]; \
101 s[6] = s[5]; \
102 s[5] = s[4]; \
103 s[4] = s[3] + t1; \
104 s[3] = s[2]; \
105 s[2] = s[1]; \
106 s[1] = s[0]; \
107 s[0] = t1 + t2; \
108 }
109
Damjan Marion7ad751b2024-03-03 22:12:21 +0000110#if defined(__SHA__) && defined(__x86_64__)
111#define CLIB_SHA256_ISA_INTEL
112#define CLIB_SHA256_ISA
113#endif
114
115#ifdef __ARM_FEATURE_SHA2
116#define CLIB_SHA256_ISA_ARM
117#define CLIB_SHA256_ISA
118#endif
119
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000120static const u32 sha224_h[8] = { 0xc1059ed8, 0x367cd507, 0x3070dd17,
121 0xf70e5939, 0xffc00b31, 0x68581511,
122 0x64f98fa7, 0xbefa4fa4 };
123
124static const u32 sha256_h[8] = { 0x6a09e667, 0xbb67ae85, 0x3c6ef372,
125 0xa54ff53a, 0x510e527f, 0x9b05688c,
126 0x1f83d9ab, 0x5be0cd19 };
127
128static const u32 sha256_k[64] = {
129 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1,
130 0x923f82a4, 0xab1c5ed5, 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,
131 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, 0xe49b69c1, 0xefbe4786,
132 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
133 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147,
134 0x06ca6351, 0x14292967, 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,
135 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, 0xa2bfe8a1, 0xa81a664b,
136 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
137 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a,
138 0x5b9cca4f, 0x682e6ff3, 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,
139 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2
140};
141
142static const u64 sha384_h[8] = { 0xcbbb9d5dc1059ed8, 0x629a292a367cd507,
143 0x9159015a3070dd17, 0x152fecd8f70e5939,
144 0x67332667ffc00b31, 0x8eb44a8768581511,
145 0xdb0c2e0d64f98fa7, 0x47b5481dbefa4fa4 };
146
147static const u64 sha512_h[8] = { 0x6a09e667f3bcc908, 0xbb67ae8584caa73b,
148 0x3c6ef372fe94f82b, 0xa54ff53a5f1d36f1,
149 0x510e527fade682d1, 0x9b05688c2b3e6c1f,
150 0x1f83d9abfb41bd6b, 0x5be0cd19137e2179 };
151
152static const u64 sha512_224_h[8] = { 0x8c3d37c819544da2, 0x73e1996689dcd4d6,
153 0x1dfab7ae32ff9c82, 0x679dd514582f9fcf,
154 0x0f6d2b697bd44da8, 0x77e36f7304c48942,
155 0x3f9d85a86a1d36c8, 0x1112e6ad91d692a1 };
156
157static const u64 sha512_256_h[8] = { 0x22312194fc2bf72c, 0x9f555fa3c84c64c2,
158 0x2393b86b6f53b151, 0x963877195940eabd,
159 0x96283ee2a88effe3, 0xbe5e1e2553863992,
160 0x2b0199fc2c85b8aa, 0x0eb72ddc81c52ca2 };
161
162static const u64 sha512_k[80] = {
163 0x428a2f98d728ae22, 0x7137449123ef65cd, 0xb5c0fbcfec4d3b2f,
164 0xe9b5dba58189dbbc, 0x3956c25bf348b538, 0x59f111f1b605d019,
165 0x923f82a4af194f9b, 0xab1c5ed5da6d8118, 0xd807aa98a3030242,
166 0x12835b0145706fbe, 0x243185be4ee4b28c, 0x550c7dc3d5ffb4e2,
167 0x72be5d74f27b896f, 0x80deb1fe3b1696b1, 0x9bdc06a725c71235,
168 0xc19bf174cf692694, 0xe49b69c19ef14ad2, 0xefbe4786384f25e3,
169 0x0fc19dc68b8cd5b5, 0x240ca1cc77ac9c65, 0x2de92c6f592b0275,
170 0x4a7484aa6ea6e483, 0x5cb0a9dcbd41fbd4, 0x76f988da831153b5,
171 0x983e5152ee66dfab, 0xa831c66d2db43210, 0xb00327c898fb213f,
172 0xbf597fc7beef0ee4, 0xc6e00bf33da88fc2, 0xd5a79147930aa725,
173 0x06ca6351e003826f, 0x142929670a0e6e70, 0x27b70a8546d22ffc,
174 0x2e1b21385c26c926, 0x4d2c6dfc5ac42aed, 0x53380d139d95b3df,
175 0x650a73548baf63de, 0x766a0abb3c77b2a8, 0x81c2c92e47edaee6,
176 0x92722c851482353b, 0xa2bfe8a14cf10364, 0xa81a664bbc423001,
177 0xc24b8b70d0f89791, 0xc76c51a30654be30, 0xd192e819d6ef5218,
178 0xd69906245565a910, 0xf40e35855771202a, 0x106aa07032bbd1b8,
179 0x19a4c116b8d2d0c8, 0x1e376c085141ab53, 0x2748774cdf8eeb99,
180 0x34b0bcb5e19b48a8, 0x391c0cb3c5c95a63, 0x4ed8aa4ae3418acb,
181 0x5b9cca4f7763e373, 0x682e6ff3d6b2b8a3, 0x748f82ee5defb2fc,
182 0x78a5636f43172f60, 0x84c87814a1f0ab72, 0x8cc702081a6439ec,
183 0x90befffa23631e28, 0xa4506cebde82bde9, 0xbef9a3f7b2c67915,
184 0xc67178f2e372532b, 0xca273eceea26619c, 0xd186b8c721c0c207,
185 0xeada7dd6cde0eb1e, 0xf57d4f7fee6ed178, 0x06f067aa72176fba,
186 0x0a637dc5a2c898a6, 0x113f9804bef90dae, 0x1b710b35131c471b,
187 0x28db77f523047d84, 0x32caab7b40c72493, 0x3c9ebe0a15c9bebc,
188 0x431d67c49c100d4c, 0x4cc5d4becb3e42b6, 0x597f299cfc657e2a,
189 0x5fcb6fab3ad6faec, 0x6c44198c4a475817
190};
191
192typedef enum
193{
194 CLIB_SHA2_224,
195 CLIB_SHA2_256,
196 CLIB_SHA2_384,
197 CLIB_SHA2_512,
198 CLIB_SHA2_512_224,
199 CLIB_SHA2_512_256,
200} clib_sha2_type_t;
201
202#define SHA2_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
203#define SHA2_MAX_DIGEST_SIZE SHA512_DIGEST_SIZE
204
205typedef struct
206{
207 u64 total_bytes;
208 u16 n_pending;
209 u8 block_size;
210 u8 digest_size;
211 union
212 {
213 u32 h32[8];
214 u64 h64[8];
Damjan Marion7ad751b2024-03-03 22:12:21 +0000215#ifdef CLIB_SHA256_ISA
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000216 u32x4 h32x4[2];
217#endif
218 };
219 union
220 {
221 u8 as_u8[SHA2_MAX_BLOCK_SIZE];
222 u64 as_u64[SHA2_MAX_BLOCK_SIZE / sizeof (u64)];
223 uword as_uword[SHA2_MAX_BLOCK_SIZE / sizeof (uword)];
224 } pending;
225} clib_sha2_ctx_t;
226
227static_always_inline void
228clib_sha2_init (clib_sha2_ctx_t *ctx, clib_sha2_type_t type)
229{
230 const u32 *h32 = 0;
231 const u64 *h64 = 0;
232
233 ctx->total_bytes = 0;
234 ctx->n_pending = 0;
235
236 switch (type)
237 {
238 case CLIB_SHA2_224:
239 h32 = sha224_h;
240 ctx->block_size = SHA224_BLOCK_SIZE;
241 ctx->digest_size = SHA224_DIGEST_SIZE;
242 break;
243 case CLIB_SHA2_256:
244 h32 = sha256_h;
245 ctx->block_size = SHA256_BLOCK_SIZE;
246 ctx->digest_size = SHA256_DIGEST_SIZE;
247 break;
248 case CLIB_SHA2_384:
249 h64 = sha384_h;
250 ctx->block_size = SHA384_BLOCK_SIZE;
251 ctx->digest_size = SHA384_DIGEST_SIZE;
252 break;
253 case CLIB_SHA2_512:
254 h64 = sha512_h;
255 ctx->block_size = SHA512_BLOCK_SIZE;
256 ctx->digest_size = SHA512_DIGEST_SIZE;
257 break;
258 case CLIB_SHA2_512_224:
259 h64 = sha512_224_h;
260 ctx->block_size = SHA512_224_BLOCK_SIZE;
261 ctx->digest_size = SHA512_224_DIGEST_SIZE;
262 break;
263 case CLIB_SHA2_512_256:
264 h64 = sha512_256_h;
265 ctx->block_size = SHA512_256_BLOCK_SIZE;
266 ctx->digest_size = SHA512_256_DIGEST_SIZE;
267 break;
268 }
269 if (h32)
270 for (int i = 0; i < 8; i++)
271 ctx->h32[i] = h32[i];
272
273 if (h64)
274 for (int i = 0; i < 8; i++)
275 ctx->h64[i] = h64[i];
276}
277
Damjan Marion7ad751b2024-03-03 22:12:21 +0000278#ifdef CLIB_SHA256_ISA
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000279static inline void
Damjan Marion7ad751b2024-03-03 22:12:21 +0000280clib_sha256_vec_cycle_w (u32x4 w[], u8 i)
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000281{
Damjan Marion7ad751b2024-03-03 22:12:21 +0000282 u8 j = (i + 1) % 4;
283 u8 k = (i + 2) % 4;
284 u8 l = (i + 3) % 4;
285#ifdef CLIB_SHA256_ISA_INTEL
286 w[i] = (u32x4) _mm_sha256msg1_epu32 ((__m128i) w[i], (__m128i) w[j]);
287 w[i] += (u32x4) _mm_alignr_epi8 ((__m128i) w[l], (__m128i) w[k], 4);
288 w[i] = (u32x4) _mm_sha256msg2_epu32 ((__m128i) w[i], (__m128i) w[l]);
289#elif defined(CLIB_SHA256_ISA_ARM)
290 w[i] = vsha256su1q_u32 (vsha256su0q_u32 (w[i], w[j]), w[k], w[l]);
291#endif
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000292}
293
294static inline void
Damjan Marion7ad751b2024-03-03 22:12:21 +0000295clib_sha256_vec_4_rounds (u32x4 w, u8 n, u32x4 s[])
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000296{
Damjan Marion7ad751b2024-03-03 22:12:21 +0000297#ifdef CLIB_SHA256_ISA_INTEL
298 u32x4 r = *(u32x4 *) (sha256_k + 4 * n) + w;
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000299 s[0] = (u32x4) _mm_sha256rnds2_epu32 ((__m128i) s[0], (__m128i) s[1],
300 (__m128i) r);
301 r = (u32x4) u64x2_interleave_hi ((u64x2) r, (u64x2) r);
302 s[1] = (u32x4) _mm_sha256rnds2_epu32 ((__m128i) s[1], (__m128i) s[0],
303 (__m128i) r);
Damjan Marion7ad751b2024-03-03 22:12:21 +0000304#elif defined(CLIB_SHA256_ISA_ARM)
305 u32x4 r0, s0;
306 const u32x4u *k = (u32x4u *) sha256_k;
307
308 r0 = w + k[n];
309 s0 = s[0];
310 s[0] = vsha256hq_u32 (s[0], s[1], r0);
311 s[1] = vsha256h2q_u32 (s[1], s0, r0);
312#endif
313}
314#endif
315
316#if defined(CLIB_SHA256_ISA)
317static inline u32x4
318clib_sha256_vec_load (u32x4 r)
319{
320#if defined(CLIB_SHA256_ISA_INTEL)
321 return u32x4_byte_swap (r);
322#elif defined(CLIB_SHA256_ISA_ARM)
323 return vreinterpretq_u32_u8 (vrev32q_u8 (vreinterpretq_u8_u32 (r)));
324#endif
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000325}
326
327static inline void
Damjan Marion7ad751b2024-03-03 22:12:21 +0000328clib_sha256_vec_shuffle (u32x4 d[2])
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000329{
Damjan Marion7ad751b2024-03-03 22:12:21 +0000330#if defined(CLIB_SHA256_ISA_INTEL)
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000331 /* {0, 1, 2, 3}, {4, 5, 6, 7} -> {7, 6, 3, 2}, {5, 4, 1, 0} */
Damjan Marion7ad751b2024-03-03 22:12:21 +0000332 u32x4 r;
333 r = (u32x4) _mm_shuffle_ps ((__m128) d[1], (__m128) d[0], 0xbb);
334 d[1] = (u32x4) _mm_shuffle_ps ((__m128) d[1], (__m128) d[0], 0x11);
335 d[0] = r;
336#endif
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000337}
338#endif
339
340static inline void
341clib_sha256_block (clib_sha2_ctx_t *ctx, const u8 *msg, uword n_blocks)
342{
Damjan Marion7ad751b2024-03-03 22:12:21 +0000343#if defined(CLIB_SHA256_ISA)
344 u32x4 h[2];
345 u32x4u *m = (u32x4u *) msg;
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000346
Damjan Marion7ad751b2024-03-03 22:12:21 +0000347 h[0] = ctx->h32x4[0];
348 h[1] = ctx->h32x4[1];
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000349
Damjan Marion7ad751b2024-03-03 22:12:21 +0000350 clib_sha256_vec_shuffle (h);
351
352 for (; n_blocks; m += 4, n_blocks--)
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000353 {
Damjan Marion7ad751b2024-03-03 22:12:21 +0000354 u32x4 s[2], w[4];
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000355
356 s[0] = h[0];
357 s[1] = h[1];
358
Damjan Marion7ad751b2024-03-03 22:12:21 +0000359 w[0] = clib_sha256_vec_load (m[0]);
360 w[1] = clib_sha256_vec_load (m[1]);
361 w[2] = clib_sha256_vec_load (m[2]);
362 w[3] = clib_sha256_vec_load (m[3]);
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000363
Damjan Marion7ad751b2024-03-03 22:12:21 +0000364 clib_sha256_vec_4_rounds (w[0], 0, s);
365 clib_sha256_vec_4_rounds (w[1], 1, s);
366 clib_sha256_vec_4_rounds (w[2], 2, s);
367 clib_sha256_vec_4_rounds (w[3], 3, s);
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000368
Damjan Marion7ad751b2024-03-03 22:12:21 +0000369 clib_sha256_vec_cycle_w (w, 0);
370 clib_sha256_vec_4_rounds (w[0], 4, s);
371 clib_sha256_vec_cycle_w (w, 1);
372 clib_sha256_vec_4_rounds (w[1], 5, s);
373 clib_sha256_vec_cycle_w (w, 2);
374 clib_sha256_vec_4_rounds (w[2], 6, s);
375 clib_sha256_vec_cycle_w (w, 3);
376 clib_sha256_vec_4_rounds (w[3], 7, s);
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000377
Damjan Marion7ad751b2024-03-03 22:12:21 +0000378 clib_sha256_vec_cycle_w (w, 0);
379 clib_sha256_vec_4_rounds (w[0], 8, s);
380 clib_sha256_vec_cycle_w (w, 1);
381 clib_sha256_vec_4_rounds (w[1], 9, s);
382 clib_sha256_vec_cycle_w (w, 2);
383 clib_sha256_vec_4_rounds (w[2], 10, s);
384 clib_sha256_vec_cycle_w (w, 3);
385 clib_sha256_vec_4_rounds (w[3], 11, s);
386
387 clib_sha256_vec_cycle_w (w, 0);
388 clib_sha256_vec_4_rounds (w[0], 12, s);
389 clib_sha256_vec_cycle_w (w, 1);
390 clib_sha256_vec_4_rounds (w[1], 13, s);
391 clib_sha256_vec_cycle_w (w, 2);
392 clib_sha256_vec_4_rounds (w[2], 14, s);
393 clib_sha256_vec_cycle_w (w, 3);
394 clib_sha256_vec_4_rounds (w[3], 15, s);
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000395
396 h[0] += s[0];
397 h[1] += s[1];
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000398 }
399
Damjan Marion7ad751b2024-03-03 22:12:21 +0000400 clib_sha256_vec_shuffle (h);
401
402 ctx->h32x4[0] = h[0];
403 ctx->h32x4[1] = h[1];
Damjan Marion04bd0ea2023-03-16 16:37:56 +0000404#else
405 u32 w[64], s[8], i;
406
407 while (n_blocks)
408 {
409 for (i = 0; i < 8; i++)
410 s[i] = ctx->h32[i];
411
412 for (i = 0; i < 16; i++)
413 {
414 w[i] = clib_net_to_host_u32 (*((u32 *) msg + i));
415 SHA256_TRANSFORM (s, w, i, sha256_k[i]);
416 }
417
418 for (i = 16; i < 64; i++)
419 {
420 SHA256_MSG_SCHED (w, i);
421 SHA256_TRANSFORM (s, w, i, sha256_k[i]);
422 }
423
424 for (i = 0; i < 8; i++)
425 ctx->h32[i] += s[i];
426
427 /* next */
428 msg += SHA256_BLOCK_SIZE;
429 n_blocks--;
430 }
431#endif
432}
433
434static_always_inline void
435clib_sha512_block (clib_sha2_ctx_t *ctx, const u8 *msg, uword n_blocks)
436{
437 u64 w[80], s[8], i;
438
439 while (n_blocks)
440 {
441 for (i = 0; i < 8; i++)
442 s[i] = ctx->h64[i];
443
444 for (i = 0; i < 16; i++)
445 {
446 w[i] = clib_net_to_host_u64 (*((u64 *) msg + i));
447 SHA512_TRANSFORM (s, w, i, sha512_k[i]);
448 }
449
450 for (i = 16; i < 80; i++)
451 {
452 SHA512_MSG_SCHED (w, i);
453 SHA512_TRANSFORM (s, w, i, sha512_k[i]);
454 }
455
456 for (i = 0; i < 8; i++)
457 ctx->h64[i] += s[i];
458
459 /* next */
460 msg += SHA512_BLOCK_SIZE;
461 n_blocks--;
462 }
463}
464
465static_always_inline void
466clib_sha2_update (clib_sha2_ctx_t *ctx, const u8 *msg, uword n_bytes)
467{
468 uword n_blocks;
469 if (ctx->n_pending)
470 {
471 uword n_left = ctx->block_size - ctx->n_pending;
472 if (n_bytes < n_left)
473 {
474 clib_memcpy_fast (ctx->pending.as_u8 + ctx->n_pending, msg, n_bytes);
475 ctx->n_pending += n_bytes;
476 return;
477 }
478 else
479 {
480 clib_memcpy_fast (ctx->pending.as_u8 + ctx->n_pending, msg, n_left);
481 if (ctx->block_size == SHA512_BLOCK_SIZE)
482 clib_sha512_block (ctx, ctx->pending.as_u8, 1);
483 else
484 clib_sha256_block (ctx, ctx->pending.as_u8, 1);
485 ctx->n_pending = 0;
486 ctx->total_bytes += ctx->block_size;
487 n_bytes -= n_left;
488 msg += n_left;
489 }
490 }
491
492 if ((n_blocks = n_bytes / ctx->block_size))
493 {
494 if (ctx->block_size == SHA512_BLOCK_SIZE)
495 clib_sha512_block (ctx, msg, n_blocks);
496 else
497 clib_sha256_block (ctx, msg, n_blocks);
498 n_bytes -= n_blocks * ctx->block_size;
499 msg += n_blocks * ctx->block_size;
500 ctx->total_bytes += n_blocks * ctx->block_size;
501 }
502
503 if (n_bytes)
504 {
505 clib_memset_u8 (ctx->pending.as_u8, 0, ctx->block_size);
506 clib_memcpy_fast (ctx->pending.as_u8, msg, n_bytes);
507 ctx->n_pending = n_bytes;
508 }
509 else
510 ctx->n_pending = 0;
511}
512
513static_always_inline void
514clib_sha2_final (clib_sha2_ctx_t *ctx, u8 *digest)
515{
516 int i;
517
518 ctx->total_bytes += ctx->n_pending;
519 if (ctx->n_pending == 0)
520 {
521 clib_memset (ctx->pending.as_u8, 0, ctx->block_size);
522 ctx->pending.as_u8[0] = 0x80;
523 }
524 else if (ctx->n_pending + sizeof (u64) + sizeof (u8) > ctx->block_size)
525 {
526 ctx->pending.as_u8[ctx->n_pending] = 0x80;
527 if (ctx->block_size == SHA512_BLOCK_SIZE)
528 clib_sha512_block (ctx, ctx->pending.as_u8, 1);
529 else
530 clib_sha256_block (ctx, ctx->pending.as_u8, 1);
531 clib_memset (ctx->pending.as_u8, 0, ctx->block_size);
532 }
533 else
534 ctx->pending.as_u8[ctx->n_pending] = 0x80;
535
536 ctx->pending.as_u64[ctx->block_size / 8 - 1] =
537 clib_net_to_host_u64 (ctx->total_bytes * 8);
538 if (ctx->block_size == SHA512_BLOCK_SIZE)
539 clib_sha512_block (ctx, ctx->pending.as_u8, 1);
540 else
541 clib_sha256_block (ctx, ctx->pending.as_u8, 1);
542
543 if (ctx->block_size == SHA512_BLOCK_SIZE)
544 {
545 for (i = 0; i < ctx->digest_size / sizeof (u64); i++)
546 *((u64 *) digest + i) = clib_net_to_host_u64 (ctx->h64[i]);
547
548 /* sha512-224 case - write half of u64 */
549 if (i * sizeof (u64) < ctx->digest_size)
550 *((u32 *) digest + 2 * i) = clib_net_to_host_u32 (ctx->h64[i] >> 32);
551 }
552 else
553 for (i = 0; i < ctx->digest_size / sizeof (u32); i++)
554 *((u32 *) digest + i) = clib_net_to_host_u32 (ctx->h32[i]);
555}
556
557static_always_inline void
558clib_sha2 (clib_sha2_type_t type, const u8 *msg, uword len, u8 *digest)
559{
560 clib_sha2_ctx_t ctx;
561 clib_sha2_init (&ctx, type);
562 clib_sha2_update (&ctx, msg, len);
563 clib_sha2_final (&ctx, digest);
564}
565
566#define clib_sha224(...) clib_sha2 (CLIB_SHA2_224, __VA_ARGS__)
567#define clib_sha256(...) clib_sha2 (CLIB_SHA2_256, __VA_ARGS__)
568#define clib_sha384(...) clib_sha2 (CLIB_SHA2_384, __VA_ARGS__)
569#define clib_sha512(...) clib_sha2 (CLIB_SHA2_512, __VA_ARGS__)
570#define clib_sha512_224(...) clib_sha2 (CLIB_SHA2_512_224, __VA_ARGS__)
571#define clib_sha512_256(...) clib_sha2 (CLIB_SHA2_512_256, __VA_ARGS__)
572
573static_always_inline void
574clib_hmac_sha2 (clib_sha2_type_t type, const u8 *key, uword key_len,
575 const u8 *msg, uword len, u8 *digest)
576{
577 clib_sha2_ctx_t _ctx, *ctx = &_ctx;
578 uword key_data[SHA2_MAX_BLOCK_SIZE / sizeof (uword)];
579 u8 i_digest[SHA2_MAX_DIGEST_SIZE];
580 int i, n_words;
581
582 clib_sha2_init (ctx, type);
583 n_words = ctx->block_size / sizeof (uword);
584
585 /* key */
586 if (key_len > ctx->block_size)
587 {
588 /* key is longer than block, calculate hash of key */
589 clib_sha2_update (ctx, key, key_len);
590 for (i = (ctx->digest_size / sizeof (uword)) / 2; i < n_words; i++)
591 key_data[i] = 0;
592 clib_sha2_final (ctx, (u8 *) key_data);
593 clib_sha2_init (ctx, type);
594 }
595 else
596 {
597 for (i = 0; i < n_words; i++)
598 key_data[i] = 0;
599 clib_memcpy_fast (key_data, key, key_len);
600 }
601
602 /* ipad */
603 for (i = 0; i < n_words; i++)
604 ctx->pending.as_uword[i] = key_data[i] ^ (uword) 0x3636363636363636;
605 if (ctx->block_size == SHA512_BLOCK_SIZE)
606 clib_sha512_block (ctx, ctx->pending.as_u8, 1);
607 else
608 clib_sha256_block (ctx, ctx->pending.as_u8, 1);
609 ctx->total_bytes += ctx->block_size;
610
611 /* message */
612 clib_sha2_update (ctx, msg, len);
613 clib_sha2_final (ctx, i_digest);
614
615 /* opad */
616 clib_sha2_init (ctx, type);
617 for (i = 0; i < n_words; i++)
618 ctx->pending.as_uword[i] = key_data[i] ^ (uword) 0x5c5c5c5c5c5c5c5c;
619 if (ctx->block_size == SHA512_BLOCK_SIZE)
620 clib_sha512_block (ctx, ctx->pending.as_u8, 1);
621 else
622 clib_sha256_block (ctx, ctx->pending.as_u8, 1);
623 ctx->total_bytes += ctx->block_size;
624
625 /* digest */
626 clib_sha2_update (ctx, i_digest, ctx->digest_size);
627 clib_sha2_final (ctx, digest);
628}
629
630#define clib_hmac_sha224(...) clib_hmac_sha2 (CLIB_SHA2_224, __VA_ARGS__)
631#define clib_hmac_sha256(...) clib_hmac_sha2 (CLIB_SHA2_256, __VA_ARGS__)
632#define clib_hmac_sha384(...) clib_hmac_sha2 (CLIB_SHA2_384, __VA_ARGS__)
633#define clib_hmac_sha512(...) clib_hmac_sha2 (CLIB_SHA2_512, __VA_ARGS__)
634#define clib_hmac_sha512_224(...) \
635 clib_hmac_sha2 (CLIB_SHA2_512_224, __VA_ARGS__)
636#define clib_hmac_sha512_256(...) \
637 clib_hmac_sha2 (CLIB_SHA2_512_256, __VA_ARGS__)
638
639#endif /* included_sha2_h */