blob: a5e286e4c6e510931e26de9157ba8d6443622fc1 [file] [log] [blame]
Damjan Mariondeb8af62019-04-02 19:06:50 +02001/*
2 *------------------------------------------------------------------
Damjan Marion776644e2020-01-31 10:24:07 +01003 * Copyright (c) 2020 Cisco and/or its affiliates.
Damjan Mariondeb8af62019-04-02 19:06:50 +02004 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at:
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *------------------------------------------------------------------
16 */
17
18#ifndef __aesni_h__
19#define __aesni_h__
20
21typedef enum
22{
Damjan Marion7d08e392020-01-28 09:55:25 +010023 AES_KEY_128 = 0,
24 AES_KEY_192 = 1,
25 AES_KEY_256 = 2,
26} aes_key_size_t;
Damjan Mariondeb8af62019-04-02 19:06:50 +020027
Damjan Marion7d08e392020-01-28 09:55:25 +010028#define AES_KEY_ROUNDS(x) (10 + x * 2)
29#define AES_KEY_BYTES(x) (16 + x * 8)
Damjan Mariondeb8af62019-04-02 19:06:50 +020030
Damjan Marion93975e62020-01-30 15:46:23 +010031static_always_inline u8x16
32aes_block_load (u8 * p)
33{
Damjan Marion622b5ce2020-02-12 10:59:14 +010034 return *(u8x16u *) p;
Damjan Marion93975e62020-01-30 15:46:23 +010035}
36
37static_always_inline u8x16
38aes_enc_round (u8x16 a, u8x16 k)
39{
Damjan Marion622b5ce2020-02-12 10:59:14 +010040#if defined (__AES__)
Damjan Marion93975e62020-01-30 15:46:23 +010041 return (u8x16) _mm_aesenc_si128 ((__m128i) a, (__m128i) k);
Damjan Marion4ba16a42020-04-28 13:29:37 +020042#elif defined (__ARM_FEATURE_CRYPTO)
Damjan Marion622b5ce2020-02-12 10:59:14 +010043 return vaesmcq_u8 (vaeseq_u8 (a, u8x16_splat (0))) ^ k;
44#endif
Damjan Marion93975e62020-01-30 15:46:23 +010045}
46
Damjan Marionadeaf162023-03-14 18:04:45 +000047#if defined(__VAES__) && defined(__AVX512F__)
Damjan Marione84e9d72020-02-13 13:11:02 +010048static_always_inline u8x64
49aes_enc_round_x4 (u8x64 a, u8x64 k)
50{
51 return (u8x64) _mm512_aesenc_epi128 ((__m512i) a, (__m512i) k);
52}
53
54static_always_inline u8x64
55aes_enc_last_round_x4 (u8x64 a, u8x64 k)
56{
57 return (u8x64) _mm512_aesenclast_epi128 ((__m512i) a, (__m512i) k);
58}
59
60static_always_inline u8x64
61aes_dec_round_x4 (u8x64 a, u8x64 k)
62{
63 return (u8x64) _mm512_aesdec_epi128 ((__m512i) a, (__m512i) k);
64}
65
66static_always_inline u8x64
67aes_dec_last_round_x4 (u8x64 a, u8x64 k)
68{
69 return (u8x64) _mm512_aesdeclast_epi128 ((__m512i) a, (__m512i) k);
70}
71#endif
72
Damjan Marionadeaf162023-03-14 18:04:45 +000073#ifdef __VAES__
74static_always_inline u8x32
75aes_enc_round_x2 (u8x32 a, u8x32 k)
76{
77 return (u8x32) _mm256_aesenc_epi128 ((__m256i) a, (__m256i) k);
78}
79
80static_always_inline u8x32
81aes_enc_last_round_x2 (u8x32 a, u8x32 k)
82{
83 return (u8x32) _mm256_aesenclast_epi128 ((__m256i) a, (__m256i) k);
84}
85
86static_always_inline u8x32
87aes_dec_round_x2 (u8x32 a, u8x32 k)
88{
89 return (u8x32) _mm256_aesdec_epi128 ((__m256i) a, (__m256i) k);
90}
91
92static_always_inline u8x32
93aes_dec_last_round_x2 (u8x32 a, u8x32 k)
94{
95 return (u8x32) _mm256_aesdeclast_epi128 ((__m256i) a, (__m256i) k);
96}
97#endif
98
Damjan Marion93975e62020-01-30 15:46:23 +010099static_always_inline u8x16
100aes_enc_last_round (u8x16 a, u8x16 k)
101{
Damjan Marion622b5ce2020-02-12 10:59:14 +0100102#if defined (__AES__)
Damjan Marion93975e62020-01-30 15:46:23 +0100103 return (u8x16) _mm_aesenclast_si128 ((__m128i) a, (__m128i) k);
Damjan Marion4ba16a42020-04-28 13:29:37 +0200104#elif defined (__ARM_FEATURE_CRYPTO)
Damjan Marion622b5ce2020-02-12 10:59:14 +0100105 return vaeseq_u8 (a, u8x16_splat (0)) ^ k;
106#endif
Damjan Marion93975e62020-01-30 15:46:23 +0100107}
108
Damjan Marion622b5ce2020-02-12 10:59:14 +0100109#ifdef __x86_64__
110
Damjan Marion93975e62020-01-30 15:46:23 +0100111static_always_inline u8x16
112aes_dec_round (u8x16 a, u8x16 k)
113{
114 return (u8x16) _mm_aesdec_si128 ((__m128i) a, (__m128i) k);
115}
116
117static_always_inline u8x16
118aes_dec_last_round (u8x16 a, u8x16 k)
119{
120 return (u8x16) _mm_aesdeclast_si128 ((__m128i) a, (__m128i) k);
121}
Damjan Marion622b5ce2020-02-12 10:59:14 +0100122#endif
Damjan Marion93975e62020-01-30 15:46:23 +0100123
124static_always_inline void
125aes_block_store (u8 * p, u8x16 r)
126{
Damjan Marion622b5ce2020-02-12 10:59:14 +0100127 *(u8x16u *) p = r;
Damjan Marion93975e62020-01-30 15:46:23 +0100128}
129
130static_always_inline u8x16
Damjan Marion415b4b02020-02-11 17:04:38 +0100131aes_encrypt_block (u8x16 block, const u8x16 * round_keys, aes_key_size_t ks)
132{
Damjan Marion622b5ce2020-02-12 10:59:14 +0100133 int rounds = AES_KEY_ROUNDS (ks);
Damjan Marion415b4b02020-02-11 17:04:38 +0100134 block ^= round_keys[0];
Damjan Marion622b5ce2020-02-12 10:59:14 +0100135 for (int i = 1; i < rounds; i += 1)
Damjan Marion415b4b02020-02-11 17:04:38 +0100136 block = aes_enc_round (block, round_keys[i]);
Damjan Marion622b5ce2020-02-12 10:59:14 +0100137 return aes_enc_last_round (block, round_keys[rounds]);
Damjan Marion415b4b02020-02-11 17:04:38 +0100138}
139
140static_always_inline u8x16
Damjan Marion93975e62020-01-30 15:46:23 +0100141aes_inv_mix_column (u8x16 a)
142{
Damjan Marion622b5ce2020-02-12 10:59:14 +0100143#if defined (__AES__)
Damjan Marion93975e62020-01-30 15:46:23 +0100144 return (u8x16) _mm_aesimc_si128 ((__m128i) a);
Damjan Marion4ba16a42020-04-28 13:29:37 +0200145#elif defined (__ARM_FEATURE_CRYPTO)
Damjan Marion622b5ce2020-02-12 10:59:14 +0100146 return vaesimcq_u8 (a);
147#endif
Damjan Marion93975e62020-01-30 15:46:23 +0100148}
Damjan Mariondeb8af62019-04-02 19:06:50 +0200149
Damjan Marion622b5ce2020-02-12 10:59:14 +0100150#ifdef __x86_64__
Damjan Marion415b4b02020-02-11 17:04:38 +0100151#define aes_keygen_assist(a, b) \
152 (u8x16) _mm_aeskeygenassist_si128((__m128i) a, b)
153
Damjan Mariondeb8af62019-04-02 19:06:50 +0200154/* AES-NI based AES key expansion based on code samples from
155 Intel(r) Advanced Encryption Standard (AES) New Instructions White Paper
156 (323641-001) */
157
Damjan Marion93975e62020-01-30 15:46:23 +0100158static_always_inline void
Damjan Marion415b4b02020-02-11 17:04:38 +0100159aes128_key_assist (u8x16 * rk, u8x16 r)
Damjan Mariondeb8af62019-04-02 19:06:50 +0200160{
Damjan Marion415b4b02020-02-11 17:04:38 +0100161 u8x16 t = rk[-1];
162 t ^= u8x16_word_shift_left (t, 4);
163 t ^= u8x16_word_shift_left (t, 4);
164 t ^= u8x16_word_shift_left (t, 4);
165 rk[0] = t ^ (u8x16) u32x4_shuffle ((u32x4) r, 3, 3, 3, 3);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200166}
167
168static_always_inline void
Damjan Marion21feecf2021-10-29 22:08:39 +0200169aes128_key_expand (u8x16 *rk, u8x16u const *k)
Damjan Mariondeb8af62019-04-02 19:06:50 +0200170{
Damjan Marion415b4b02020-02-11 17:04:38 +0100171 rk[0] = k[0];
172 aes128_key_assist (rk + 1, aes_keygen_assist (rk[0], 0x01));
173 aes128_key_assist (rk + 2, aes_keygen_assist (rk[1], 0x02));
174 aes128_key_assist (rk + 3, aes_keygen_assist (rk[2], 0x04));
175 aes128_key_assist (rk + 4, aes_keygen_assist (rk[3], 0x08));
176 aes128_key_assist (rk + 5, aes_keygen_assist (rk[4], 0x10));
177 aes128_key_assist (rk + 6, aes_keygen_assist (rk[5], 0x20));
178 aes128_key_assist (rk + 7, aes_keygen_assist (rk[6], 0x40));
179 aes128_key_assist (rk + 8, aes_keygen_assist (rk[7], 0x80));
180 aes128_key_assist (rk + 9, aes_keygen_assist (rk[8], 0x1b));
181 aes128_key_assist (rk + 10, aes_keygen_assist (rk[9], 0x36));
Damjan Mariondeb8af62019-04-02 19:06:50 +0200182}
183
184static_always_inline void
Damjan Marion415b4b02020-02-11 17:04:38 +0100185aes192_key_assist (u8x16 * r1, u8x16 * r2, u8x16 key_assist)
Damjan Mariondeb8af62019-04-02 19:06:50 +0200186{
Damjan Marion415b4b02020-02-11 17:04:38 +0100187 u8x16 t;
188 r1[0] ^= t = u8x16_word_shift_left (r1[0], 4);
189 r1[0] ^= t = u8x16_word_shift_left (t, 4);
190 r1[0] ^= u8x16_word_shift_left (t, 4);
191 r1[0] ^= (u8x16) _mm_shuffle_epi32 ((__m128i) key_assist, 0x55);
192 r2[0] ^= u8x16_word_shift_left (r2[0], 4);
193 r2[0] ^= (u8x16) _mm_shuffle_epi32 ((__m128i) r1[0], 0xff);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200194}
195
196static_always_inline void
Damjan Marion415b4b02020-02-11 17:04:38 +0100197aes192_key_expand (u8x16 * rk, u8x16u const *k)
Damjan Mariondeb8af62019-04-02 19:06:50 +0200198{
Damjan Marion415b4b02020-02-11 17:04:38 +0100199 u8x16 r1, r2;
Damjan Mariondeb8af62019-04-02 19:06:50 +0200200
Damjan Marion415b4b02020-02-11 17:04:38 +0100201 rk[0] = r1 = k[0];
202 /* *INDENT-OFF* */
203 rk[1] = r2 = (u8x16) (u64x2) { *(u64 *) (k + 1), 0 };
204 /* *INDENT-ON* */
Damjan Mariondeb8af62019-04-02 19:06:50 +0200205
Damjan Marion415b4b02020-02-11 17:04:38 +0100206 aes192_key_assist (&r1, &r2, aes_keygen_assist (r2, 0x1));
207 rk[1] = (u8x16) _mm_shuffle_pd ((__m128d) rk[1], (__m128d) r1, 0);
208 rk[2] = (u8x16) _mm_shuffle_pd ((__m128d) r1, (__m128d) r2, 1);
Damjan Marion93975e62020-01-30 15:46:23 +0100209
Damjan Marion415b4b02020-02-11 17:04:38 +0100210 aes192_key_assist (&r1, &r2, aes_keygen_assist (r2, 0x2));
211 rk[3] = r1;
212 rk[4] = r2;
Damjan Mariondeb8af62019-04-02 19:06:50 +0200213
Damjan Marion415b4b02020-02-11 17:04:38 +0100214 aes192_key_assist (&r1, &r2, aes_keygen_assist (r2, 0x4));
215 rk[4] = (u8x16) _mm_shuffle_pd ((__m128d) rk[4], (__m128d) r1, 0);
216 rk[5] = (u8x16) _mm_shuffle_pd ((__m128d) r1, (__m128d) r2, 1);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200217
Damjan Marion415b4b02020-02-11 17:04:38 +0100218 aes192_key_assist (&r1, &r2, aes_keygen_assist (r2, 0x8));
219 rk[6] = r1;
220 rk[7] = r2;
Damjan Marion93975e62020-01-30 15:46:23 +0100221
Damjan Marion415b4b02020-02-11 17:04:38 +0100222 aes192_key_assist (&r1, &r2, aes_keygen_assist (r2, 0x10));
223 rk[7] = (u8x16) _mm_shuffle_pd ((__m128d) rk[7], (__m128d) r1, 0);
224 rk[8] = (u8x16) _mm_shuffle_pd ((__m128d) r1, (__m128d) r2, 1);
Damjan Marion93975e62020-01-30 15:46:23 +0100225
Damjan Marion415b4b02020-02-11 17:04:38 +0100226 aes192_key_assist (&r1, &r2, aes_keygen_assist (r2, 0x20));
227 rk[9] = r1;
228 rk[10] = r2;
Damjan Mariondeb8af62019-04-02 19:06:50 +0200229
Damjan Marion415b4b02020-02-11 17:04:38 +0100230 aes192_key_assist (&r1, &r2, aes_keygen_assist (r2, 0x40));
231 rk[10] = (u8x16) _mm_shuffle_pd ((__m128d) rk[10], (__m128d) r1, 0);
232 rk[11] = (u8x16) _mm_shuffle_pd ((__m128d) r1, (__m128d) r2, 1);
Damjan Marion93975e62020-01-30 15:46:23 +0100233
Damjan Marion415b4b02020-02-11 17:04:38 +0100234 aes192_key_assist (&r1, &r2, aes_keygen_assist (r2, 0x80));
235 rk[12] = r1;
Damjan Mariondeb8af62019-04-02 19:06:50 +0200236}
237
238static_always_inline void
Damjan Marion415b4b02020-02-11 17:04:38 +0100239aes256_key_assist (u8x16 * rk, int i, u8x16 key_assist)
Damjan Mariondeb8af62019-04-02 19:06:50 +0200240{
Damjan Marion415b4b02020-02-11 17:04:38 +0100241 u8x16 r, t;
242 rk += i;
243 r = rk[-2];
244 r ^= t = u8x16_word_shift_left (r, 4);
245 r ^= t = u8x16_word_shift_left (t, 4);
246 r ^= u8x16_word_shift_left (t, 4);
247 r ^= (u8x16) u32x4_shuffle ((u32x4) key_assist, 3, 3, 3, 3);
248 rk[0] = r;
Damjan Marion93975e62020-01-30 15:46:23 +0100249
250 if (i >= 14)
251 return;
252
Damjan Marion415b4b02020-02-11 17:04:38 +0100253 key_assist = aes_keygen_assist (rk[0], 0x0);
254 r = rk[-1];
255 r ^= t = u8x16_word_shift_left (r, 4);
256 r ^= t = u8x16_word_shift_left (t, 4);
257 r ^= u8x16_word_shift_left (t, 4);
258 r ^= (u8x16) u32x4_shuffle ((u32x4) key_assist, 2, 2, 2, 2);
259 rk[1] = r;
Damjan Mariondeb8af62019-04-02 19:06:50 +0200260}
261
262static_always_inline void
Damjan Marion415b4b02020-02-11 17:04:38 +0100263aes256_key_expand (u8x16 * rk, u8x16u const *k)
Damjan Mariondeb8af62019-04-02 19:06:50 +0200264{
Damjan Marion415b4b02020-02-11 17:04:38 +0100265 rk[0] = k[0];
266 rk[1] = k[1];
267 aes256_key_assist (rk, 2, aes_keygen_assist (rk[1], 0x01));
268 aes256_key_assist (rk, 4, aes_keygen_assist (rk[3], 0x02));
269 aes256_key_assist (rk, 6, aes_keygen_assist (rk[5], 0x04));
270 aes256_key_assist (rk, 8, aes_keygen_assist (rk[7], 0x08));
271 aes256_key_assist (rk, 10, aes_keygen_assist (rk[9], 0x10));
272 aes256_key_assist (rk, 12, aes_keygen_assist (rk[11], 0x20));
273 aes256_key_assist (rk, 14, aes_keygen_assist (rk[13], 0x40));
Damjan Mariondeb8af62019-04-02 19:06:50 +0200274}
Damjan Marion776644e2020-01-31 10:24:07 +0100275#endif
276
277#ifdef __aarch64__
278
Damjan Marion776644e2020-01-31 10:24:07 +0100279static const u8x16 aese_prep_mask1 =
280 { 13, 14, 15, 12, 13, 14, 15, 12, 13, 14, 15, 12, 13, 14, 15, 12 };
281static const u8x16 aese_prep_mask2 =
282 { 12, 13, 14, 15, 12, 13, 14, 15, 12, 13, 14, 15, 12, 13, 14, 15 };
283
Damjan Marion4fe44af2020-02-12 18:24:24 +0100284static_always_inline void
Damjan Marion776644e2020-01-31 10:24:07 +0100285aes128_key_expand_round_neon (u8x16 * rk, u32 rcon)
286{
287 u8x16 r, t, last_round = rk[-1], z = { };
288 r = vqtbl1q_u8 (last_round, aese_prep_mask1);
289 r = vaeseq_u8 (r, z);
290 r ^= (u8x16) vdupq_n_u32 (rcon);
291 r ^= last_round;
292 r ^= t = vextq_u8 (z, last_round, 12);
293 r ^= t = vextq_u8 (z, t, 12);
294 r ^= vextq_u8 (z, t, 12);
295 rk[0] = r;
296}
297
Damjan Marion4fe44af2020-02-12 18:24:24 +0100298static_always_inline void
Jieqiang Wang83b982b2021-11-29 14:25:03 +0000299aes128_key_expand (u8x16 *rk, u8x16u const *k)
Damjan Marion776644e2020-01-31 10:24:07 +0100300{
Damjan Marion415b4b02020-02-11 17:04:38 +0100301 rk[0] = k[0];
Damjan Marion776644e2020-01-31 10:24:07 +0100302 aes128_key_expand_round_neon (rk + 1, 0x01);
303 aes128_key_expand_round_neon (rk + 2, 0x02);
304 aes128_key_expand_round_neon (rk + 3, 0x04);
305 aes128_key_expand_round_neon (rk + 4, 0x08);
306 aes128_key_expand_round_neon (rk + 5, 0x10);
307 aes128_key_expand_round_neon (rk + 6, 0x20);
308 aes128_key_expand_round_neon (rk + 7, 0x40);
309 aes128_key_expand_round_neon (rk + 8, 0x80);
310 aes128_key_expand_round_neon (rk + 9, 0x1b);
311 aes128_key_expand_round_neon (rk + 10, 0x36);
312}
313
Damjan Marion4fe44af2020-02-12 18:24:24 +0100314static_always_inline void
Damjan Marion776644e2020-01-31 10:24:07 +0100315aes192_key_expand_round_neon (u8x8 * rk, u32 rcon)
316{
317 u8x8 r, last_round = rk[-1], z = { };
318 u8x16 r2, z2 = { };
319
320 r2 = (u8x16) vdupq_lane_u64 ((uint64x1_t) last_round, 0);
321 r2 = vqtbl1q_u8 (r2, aese_prep_mask1);
322 r2 = vaeseq_u8 (r2, z2);
323 r2 ^= (u8x16) vdupq_n_u32 (rcon);
324
325 r = (u8x8) vdup_laneq_u64 ((u64x2) r2, 0);
326 r ^= rk[-3];
327 r ^= vext_u8 (z, rk[-3], 4);
328 rk[0] = r;
329
330 r = rk[-2] ^ vext_u8 (r, z, 4);
331 r ^= vext_u8 (z, r, 4);
332 rk[1] = r;
333
334 if (rcon == 0x80)
335 return;
336
337 r = rk[-1] ^ vext_u8 (r, z, 4);
338 r ^= vext_u8 (z, r, 4);
339 rk[2] = r;
340}
341
Damjan Marion4fe44af2020-02-12 18:24:24 +0100342static_always_inline void
Damjan Marion415b4b02020-02-11 17:04:38 +0100343aes192_key_expand (u8x16 * ek, const u8x16u * k)
Damjan Marion776644e2020-01-31 10:24:07 +0100344{
345 u8x8 *rk = (u8x8 *) ek;
Damjan Marion415b4b02020-02-11 17:04:38 +0100346 ek[0] = k[0];
347 rk[2] = *(u8x8u *) (k + 1);
Damjan Marion776644e2020-01-31 10:24:07 +0100348 aes192_key_expand_round_neon (rk + 3, 0x01);
349 aes192_key_expand_round_neon (rk + 6, 0x02);
350 aes192_key_expand_round_neon (rk + 9, 0x04);
351 aes192_key_expand_round_neon (rk + 12, 0x08);
352 aes192_key_expand_round_neon (rk + 15, 0x10);
353 aes192_key_expand_round_neon (rk + 18, 0x20);
354 aes192_key_expand_round_neon (rk + 21, 0x40);
355 aes192_key_expand_round_neon (rk + 24, 0x80);
356}
357
358
Damjan Marion4fe44af2020-02-12 18:24:24 +0100359static_always_inline void
Damjan Marion776644e2020-01-31 10:24:07 +0100360aes256_key_expand_round_neon (u8x16 * rk, u32 rcon)
361{
362 u8x16 r, t, z = { };
363
364 r = vqtbl1q_u8 (rk[-1], rcon ? aese_prep_mask1 : aese_prep_mask2);
365 r = vaeseq_u8 (r, z);
366 if (rcon)
367 r ^= (u8x16) vdupq_n_u32 (rcon);
368 r ^= rk[-2];
369 r ^= t = vextq_u8 (z, rk[-2], 12);
370 r ^= t = vextq_u8 (z, t, 12);
371 r ^= vextq_u8 (z, t, 12);
372 rk[0] = r;
373}
374
Damjan Marion4fe44af2020-02-12 18:24:24 +0100375static_always_inline void
Jieqiang Wang83b982b2021-11-29 14:25:03 +0000376aes256_key_expand (u8x16 *rk, u8x16u const *k)
Damjan Marion776644e2020-01-31 10:24:07 +0100377{
Damjan Marion415b4b02020-02-11 17:04:38 +0100378 rk[0] = k[0];
379 rk[1] = k[1];
Damjan Marion776644e2020-01-31 10:24:07 +0100380 aes256_key_expand_round_neon (rk + 2, 0x01);
381 aes256_key_expand_round_neon (rk + 3, 0);
382 aes256_key_expand_round_neon (rk + 4, 0x02);
383 aes256_key_expand_round_neon (rk + 5, 0);
384 aes256_key_expand_round_neon (rk + 6, 0x04);
385 aes256_key_expand_round_neon (rk + 7, 0);
386 aes256_key_expand_round_neon (rk + 8, 0x08);
387 aes256_key_expand_round_neon (rk + 9, 0);
388 aes256_key_expand_round_neon (rk + 10, 0x10);
389 aes256_key_expand_round_neon (rk + 11, 0);
390 aes256_key_expand_round_neon (rk + 12, 0x20);
391 aes256_key_expand_round_neon (rk + 13, 0);
392 aes256_key_expand_round_neon (rk + 14, 0x40);
393}
394
395#endif
Damjan Mariondeb8af62019-04-02 19:06:50 +0200396
397static_always_inline void
Damjan Marion415b4b02020-02-11 17:04:38 +0100398aes_key_expand (u8x16 * key_schedule, u8 const *key, aes_key_size_t ks)
Damjan Mariondeb8af62019-04-02 19:06:50 +0200399{
400 switch (ks)
401 {
Damjan Marion7d08e392020-01-28 09:55:25 +0100402 case AES_KEY_128:
Damjan Marion415b4b02020-02-11 17:04:38 +0100403 aes128_key_expand (key_schedule, (u8x16u const *) key);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200404 break;
Damjan Marion7d08e392020-01-28 09:55:25 +0100405 case AES_KEY_192:
Damjan Marion415b4b02020-02-11 17:04:38 +0100406 aes192_key_expand (key_schedule, (u8x16u const *) key);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200407 break;
Damjan Marion7d08e392020-01-28 09:55:25 +0100408 case AES_KEY_256:
Damjan Marion415b4b02020-02-11 17:04:38 +0100409 aes256_key_expand (key_schedule, (u8x16u const *) key);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200410 break;
411 }
412}
413
Damjan Mariondeb8af62019-04-02 19:06:50 +0200414static_always_inline void
Damjan Marion93975e62020-01-30 15:46:23 +0100415aes_key_enc_to_dec (u8x16 * ke, u8x16 * kd, aes_key_size_t ks)
Damjan Mariondeb8af62019-04-02 19:06:50 +0200416{
Damjan Marion7d08e392020-01-28 09:55:25 +0100417 int rounds = AES_KEY_ROUNDS (ks);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200418
Damjan Marion78b58f62020-01-29 10:31:26 +0100419 kd[rounds] = ke[0];
420 kd[0] = ke[rounds];
Damjan Mariondeb8af62019-04-02 19:06:50 +0200421
422 for (int i = 1; i < (rounds / 2); i++)
423 {
Damjan Marion93975e62020-01-30 15:46:23 +0100424 kd[rounds - i] = aes_inv_mix_column (ke[i]);
425 kd[i] = aes_inv_mix_column (ke[rounds - i]);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200426 }
427
Damjan Marion93975e62020-01-30 15:46:23 +0100428 kd[rounds / 2] = aes_inv_mix_column (ke[rounds / 2]);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200429}
430
431#endif /* __aesni_h__ */
432
433/*
434 * fd.io coding-style-patch-verification: ON
435 *
436 * Local Variables:
437 * eval: (c-set-style "gnu")
438 * End:
439 */