blob: 98555f2f316084c13ba017176df3a458dbe680fa [file] [log] [blame]
Damjan Mariondeb8af62019-04-02 19:06:50 +02001/*
2 *------------------------------------------------------------------
Damjan Marion776644e2020-01-31 10:24:07 +01003 * Copyright (c) 2020 Cisco and/or its affiliates.
Damjan Mariondeb8af62019-04-02 19:06:50 +02004 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at:
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *------------------------------------------------------------------
16 */
17
18#ifndef __aesni_h__
19#define __aesni_h__
20
21typedef enum
22{
Damjan Marion7d08e392020-01-28 09:55:25 +010023 AES_KEY_128 = 0,
24 AES_KEY_192 = 1,
25 AES_KEY_256 = 2,
26} aes_key_size_t;
Damjan Mariondeb8af62019-04-02 19:06:50 +020027
Damjan Marion7d08e392020-01-28 09:55:25 +010028#define AES_KEY_ROUNDS(x) (10 + x * 2)
29#define AES_KEY_BYTES(x) (16 + x * 8)
Damjan Mariondeb8af62019-04-02 19:06:50 +020030
Damjan Marion415b4b02020-02-11 17:04:38 +010031static const u8x16 byte_mask_scale = {
32 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
33};
34
Damjan Marion93975e62020-01-30 15:46:23 +010035static_always_inline u8x16
36aes_block_load (u8 * p)
37{
Damjan Marion622b5ce2020-02-12 10:59:14 +010038 return *(u8x16u *) p;
Damjan Marion93975e62020-01-30 15:46:23 +010039}
40
41static_always_inline u8x16
42aes_enc_round (u8x16 a, u8x16 k)
43{
Damjan Marion622b5ce2020-02-12 10:59:14 +010044#if defined (__AES__)
Damjan Marion93975e62020-01-30 15:46:23 +010045 return (u8x16) _mm_aesenc_si128 ((__m128i) a, (__m128i) k);
Damjan Marion4ba16a42020-04-28 13:29:37 +020046#elif defined (__ARM_FEATURE_CRYPTO)
Damjan Marion622b5ce2020-02-12 10:59:14 +010047 return vaesmcq_u8 (vaeseq_u8 (a, u8x16_splat (0))) ^ k;
48#endif
Damjan Marion93975e62020-01-30 15:46:23 +010049}
50
Damjan Marione84e9d72020-02-13 13:11:02 +010051#if defined (__VAES__)
52static_always_inline u8x64
53aes_enc_round_x4 (u8x64 a, u8x64 k)
54{
55 return (u8x64) _mm512_aesenc_epi128 ((__m512i) a, (__m512i) k);
56}
57
58static_always_inline u8x64
59aes_enc_last_round_x4 (u8x64 a, u8x64 k)
60{
61 return (u8x64) _mm512_aesenclast_epi128 ((__m512i) a, (__m512i) k);
62}
63
64static_always_inline u8x64
65aes_dec_round_x4 (u8x64 a, u8x64 k)
66{
67 return (u8x64) _mm512_aesdec_epi128 ((__m512i) a, (__m512i) k);
68}
69
70static_always_inline u8x64
71aes_dec_last_round_x4 (u8x64 a, u8x64 k)
72{
73 return (u8x64) _mm512_aesdeclast_epi128 ((__m512i) a, (__m512i) k);
74}
75#endif
76
Damjan Marion93975e62020-01-30 15:46:23 +010077static_always_inline u8x16
78aes_enc_last_round (u8x16 a, u8x16 k)
79{
Damjan Marion622b5ce2020-02-12 10:59:14 +010080#if defined (__AES__)
Damjan Marion93975e62020-01-30 15:46:23 +010081 return (u8x16) _mm_aesenclast_si128 ((__m128i) a, (__m128i) k);
Damjan Marion4ba16a42020-04-28 13:29:37 +020082#elif defined (__ARM_FEATURE_CRYPTO)
Damjan Marion622b5ce2020-02-12 10:59:14 +010083 return vaeseq_u8 (a, u8x16_splat (0)) ^ k;
84#endif
Damjan Marion93975e62020-01-30 15:46:23 +010085}
86
Damjan Marion622b5ce2020-02-12 10:59:14 +010087#ifdef __x86_64__
88
Damjan Marion93975e62020-01-30 15:46:23 +010089static_always_inline u8x16
90aes_dec_round (u8x16 a, u8x16 k)
91{
92 return (u8x16) _mm_aesdec_si128 ((__m128i) a, (__m128i) k);
93}
94
95static_always_inline u8x16
96aes_dec_last_round (u8x16 a, u8x16 k)
97{
98 return (u8x16) _mm_aesdeclast_si128 ((__m128i) a, (__m128i) k);
99}
Damjan Marion622b5ce2020-02-12 10:59:14 +0100100#endif
Damjan Marion93975e62020-01-30 15:46:23 +0100101
102static_always_inline void
103aes_block_store (u8 * p, u8x16 r)
104{
Damjan Marion622b5ce2020-02-12 10:59:14 +0100105 *(u8x16u *) p = r;
Damjan Marion93975e62020-01-30 15:46:23 +0100106}
107
108static_always_inline u8x16
Damjan Marion415b4b02020-02-11 17:04:38 +0100109aes_byte_mask (u8x16 x, u8 n_bytes)
110{
Damjan Marionf0beeb02020-02-12 18:25:39 +0100111 return x & u8x16_is_greater (u8x16_splat (n_bytes), byte_mask_scale);
Damjan Marion415b4b02020-02-11 17:04:38 +0100112}
113
114static_always_inline u8x16
115aes_load_partial (u8x16u * p, int n_bytes)
116{
117 ASSERT (n_bytes <= 16);
118#ifdef __AVX512F__
119 __m128i zero = { };
120 return (u8x16) _mm_mask_loadu_epi8 (zero, (1 << n_bytes) - 1, p);
121#else
BenoƮt Ganne96856242021-09-08 15:36:56 +0200122 return aes_byte_mask (CLIB_MEM_OVERFLOW_LOAD (p), n_bytes);
Damjan Marion415b4b02020-02-11 17:04:38 +0100123#endif
124}
125
126static_always_inline void
127aes_store_partial (void *p, u8x16 r, int n_bytes)
128{
Damjan Marion622b5ce2020-02-12 10:59:14 +0100129#if __aarch64__
130 clib_memcpy_fast (p, &r, n_bytes);
131#else
Damjan Marion415b4b02020-02-11 17:04:38 +0100132#ifdef __AVX512F__
133 _mm_mask_storeu_epi8 (p, (1 << n_bytes) - 1, (__m128i) r);
134#else
135 u8x16 mask = u8x16_is_greater (u8x16_splat (n_bytes), byte_mask_scale);
136 _mm_maskmoveu_si128 ((__m128i) r, (__m128i) mask, p);
137#endif
Damjan Marion622b5ce2020-02-12 10:59:14 +0100138#endif
Damjan Marion415b4b02020-02-11 17:04:38 +0100139}
140
141
142static_always_inline u8x16
143aes_encrypt_block (u8x16 block, const u8x16 * round_keys, aes_key_size_t ks)
144{
Damjan Marion622b5ce2020-02-12 10:59:14 +0100145 int rounds = AES_KEY_ROUNDS (ks);
Damjan Marion415b4b02020-02-11 17:04:38 +0100146 block ^= round_keys[0];
Damjan Marion622b5ce2020-02-12 10:59:14 +0100147 for (int i = 1; i < rounds; i += 1)
Damjan Marion415b4b02020-02-11 17:04:38 +0100148 block = aes_enc_round (block, round_keys[i]);
Damjan Marion622b5ce2020-02-12 10:59:14 +0100149 return aes_enc_last_round (block, round_keys[rounds]);
Damjan Marion415b4b02020-02-11 17:04:38 +0100150}
151
152static_always_inline u8x16
Damjan Marion93975e62020-01-30 15:46:23 +0100153aes_inv_mix_column (u8x16 a)
154{
Damjan Marion622b5ce2020-02-12 10:59:14 +0100155#if defined (__AES__)
Damjan Marion93975e62020-01-30 15:46:23 +0100156 return (u8x16) _mm_aesimc_si128 ((__m128i) a);
Damjan Marion4ba16a42020-04-28 13:29:37 +0200157#elif defined (__ARM_FEATURE_CRYPTO)
Damjan Marion622b5ce2020-02-12 10:59:14 +0100158 return vaesimcq_u8 (a);
159#endif
Damjan Marion93975e62020-01-30 15:46:23 +0100160}
Damjan Mariondeb8af62019-04-02 19:06:50 +0200161
Damjan Marion622b5ce2020-02-12 10:59:14 +0100162#ifdef __x86_64__
Damjan Marion415b4b02020-02-11 17:04:38 +0100163#define aes_keygen_assist(a, b) \
164 (u8x16) _mm_aeskeygenassist_si128((__m128i) a, b)
165
Damjan Mariondeb8af62019-04-02 19:06:50 +0200166/* AES-NI based AES key expansion based on code samples from
167 Intel(r) Advanced Encryption Standard (AES) New Instructions White Paper
168 (323641-001) */
169
Damjan Marion93975e62020-01-30 15:46:23 +0100170static_always_inline void
Damjan Marion415b4b02020-02-11 17:04:38 +0100171aes128_key_assist (u8x16 * rk, u8x16 r)
Damjan Mariondeb8af62019-04-02 19:06:50 +0200172{
Damjan Marion415b4b02020-02-11 17:04:38 +0100173 u8x16 t = rk[-1];
174 t ^= u8x16_word_shift_left (t, 4);
175 t ^= u8x16_word_shift_left (t, 4);
176 t ^= u8x16_word_shift_left (t, 4);
177 rk[0] = t ^ (u8x16) u32x4_shuffle ((u32x4) r, 3, 3, 3, 3);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200178}
179
180static_always_inline void
Damjan Marion21feecf2021-10-29 22:08:39 +0200181aes128_key_expand (u8x16 *rk, u8x16u const *k)
Damjan Mariondeb8af62019-04-02 19:06:50 +0200182{
Damjan Marion415b4b02020-02-11 17:04:38 +0100183 rk[0] = k[0];
184 aes128_key_assist (rk + 1, aes_keygen_assist (rk[0], 0x01));
185 aes128_key_assist (rk + 2, aes_keygen_assist (rk[1], 0x02));
186 aes128_key_assist (rk + 3, aes_keygen_assist (rk[2], 0x04));
187 aes128_key_assist (rk + 4, aes_keygen_assist (rk[3], 0x08));
188 aes128_key_assist (rk + 5, aes_keygen_assist (rk[4], 0x10));
189 aes128_key_assist (rk + 6, aes_keygen_assist (rk[5], 0x20));
190 aes128_key_assist (rk + 7, aes_keygen_assist (rk[6], 0x40));
191 aes128_key_assist (rk + 8, aes_keygen_assist (rk[7], 0x80));
192 aes128_key_assist (rk + 9, aes_keygen_assist (rk[8], 0x1b));
193 aes128_key_assist (rk + 10, aes_keygen_assist (rk[9], 0x36));
Damjan Mariondeb8af62019-04-02 19:06:50 +0200194}
195
196static_always_inline void
Damjan Marion415b4b02020-02-11 17:04:38 +0100197aes192_key_assist (u8x16 * r1, u8x16 * r2, u8x16 key_assist)
Damjan Mariondeb8af62019-04-02 19:06:50 +0200198{
Damjan Marion415b4b02020-02-11 17:04:38 +0100199 u8x16 t;
200 r1[0] ^= t = u8x16_word_shift_left (r1[0], 4);
201 r1[0] ^= t = u8x16_word_shift_left (t, 4);
202 r1[0] ^= u8x16_word_shift_left (t, 4);
203 r1[0] ^= (u8x16) _mm_shuffle_epi32 ((__m128i) key_assist, 0x55);
204 r2[0] ^= u8x16_word_shift_left (r2[0], 4);
205 r2[0] ^= (u8x16) _mm_shuffle_epi32 ((__m128i) r1[0], 0xff);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200206}
207
208static_always_inline void
Damjan Marion415b4b02020-02-11 17:04:38 +0100209aes192_key_expand (u8x16 * rk, u8x16u const *k)
Damjan Mariondeb8af62019-04-02 19:06:50 +0200210{
Damjan Marion415b4b02020-02-11 17:04:38 +0100211 u8x16 r1, r2;
Damjan Mariondeb8af62019-04-02 19:06:50 +0200212
Damjan Marion415b4b02020-02-11 17:04:38 +0100213 rk[0] = r1 = k[0];
214 /* *INDENT-OFF* */
215 rk[1] = r2 = (u8x16) (u64x2) { *(u64 *) (k + 1), 0 };
216 /* *INDENT-ON* */
Damjan Mariondeb8af62019-04-02 19:06:50 +0200217
Damjan Marion415b4b02020-02-11 17:04:38 +0100218 aes192_key_assist (&r1, &r2, aes_keygen_assist (r2, 0x1));
219 rk[1] = (u8x16) _mm_shuffle_pd ((__m128d) rk[1], (__m128d) r1, 0);
220 rk[2] = (u8x16) _mm_shuffle_pd ((__m128d) r1, (__m128d) r2, 1);
Damjan Marion93975e62020-01-30 15:46:23 +0100221
Damjan Marion415b4b02020-02-11 17:04:38 +0100222 aes192_key_assist (&r1, &r2, aes_keygen_assist (r2, 0x2));
223 rk[3] = r1;
224 rk[4] = r2;
Damjan Mariondeb8af62019-04-02 19:06:50 +0200225
Damjan Marion415b4b02020-02-11 17:04:38 +0100226 aes192_key_assist (&r1, &r2, aes_keygen_assist (r2, 0x4));
227 rk[4] = (u8x16) _mm_shuffle_pd ((__m128d) rk[4], (__m128d) r1, 0);
228 rk[5] = (u8x16) _mm_shuffle_pd ((__m128d) r1, (__m128d) r2, 1);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200229
Damjan Marion415b4b02020-02-11 17:04:38 +0100230 aes192_key_assist (&r1, &r2, aes_keygen_assist (r2, 0x8));
231 rk[6] = r1;
232 rk[7] = r2;
Damjan Marion93975e62020-01-30 15:46:23 +0100233
Damjan Marion415b4b02020-02-11 17:04:38 +0100234 aes192_key_assist (&r1, &r2, aes_keygen_assist (r2, 0x10));
235 rk[7] = (u8x16) _mm_shuffle_pd ((__m128d) rk[7], (__m128d) r1, 0);
236 rk[8] = (u8x16) _mm_shuffle_pd ((__m128d) r1, (__m128d) r2, 1);
Damjan Marion93975e62020-01-30 15:46:23 +0100237
Damjan Marion415b4b02020-02-11 17:04:38 +0100238 aes192_key_assist (&r1, &r2, aes_keygen_assist (r2, 0x20));
239 rk[9] = r1;
240 rk[10] = r2;
Damjan Mariondeb8af62019-04-02 19:06:50 +0200241
Damjan Marion415b4b02020-02-11 17:04:38 +0100242 aes192_key_assist (&r1, &r2, aes_keygen_assist (r2, 0x40));
243 rk[10] = (u8x16) _mm_shuffle_pd ((__m128d) rk[10], (__m128d) r1, 0);
244 rk[11] = (u8x16) _mm_shuffle_pd ((__m128d) r1, (__m128d) r2, 1);
Damjan Marion93975e62020-01-30 15:46:23 +0100245
Damjan Marion415b4b02020-02-11 17:04:38 +0100246 aes192_key_assist (&r1, &r2, aes_keygen_assist (r2, 0x80));
247 rk[12] = r1;
Damjan Mariondeb8af62019-04-02 19:06:50 +0200248}
249
250static_always_inline void
Damjan Marion415b4b02020-02-11 17:04:38 +0100251aes256_key_assist (u8x16 * rk, int i, u8x16 key_assist)
Damjan Mariondeb8af62019-04-02 19:06:50 +0200252{
Damjan Marion415b4b02020-02-11 17:04:38 +0100253 u8x16 r, t;
254 rk += i;
255 r = rk[-2];
256 r ^= t = u8x16_word_shift_left (r, 4);
257 r ^= t = u8x16_word_shift_left (t, 4);
258 r ^= u8x16_word_shift_left (t, 4);
259 r ^= (u8x16) u32x4_shuffle ((u32x4) key_assist, 3, 3, 3, 3);
260 rk[0] = r;
Damjan Marion93975e62020-01-30 15:46:23 +0100261
262 if (i >= 14)
263 return;
264
Damjan Marion415b4b02020-02-11 17:04:38 +0100265 key_assist = aes_keygen_assist (rk[0], 0x0);
266 r = rk[-1];
267 r ^= t = u8x16_word_shift_left (r, 4);
268 r ^= t = u8x16_word_shift_left (t, 4);
269 r ^= u8x16_word_shift_left (t, 4);
270 r ^= (u8x16) u32x4_shuffle ((u32x4) key_assist, 2, 2, 2, 2);
271 rk[1] = r;
Damjan Mariondeb8af62019-04-02 19:06:50 +0200272}
273
274static_always_inline void
Damjan Marion415b4b02020-02-11 17:04:38 +0100275aes256_key_expand (u8x16 * rk, u8x16u const *k)
Damjan Mariondeb8af62019-04-02 19:06:50 +0200276{
Damjan Marion415b4b02020-02-11 17:04:38 +0100277 rk[0] = k[0];
278 rk[1] = k[1];
279 aes256_key_assist (rk, 2, aes_keygen_assist (rk[1], 0x01));
280 aes256_key_assist (rk, 4, aes_keygen_assist (rk[3], 0x02));
281 aes256_key_assist (rk, 6, aes_keygen_assist (rk[5], 0x04));
282 aes256_key_assist (rk, 8, aes_keygen_assist (rk[7], 0x08));
283 aes256_key_assist (rk, 10, aes_keygen_assist (rk[9], 0x10));
284 aes256_key_assist (rk, 12, aes_keygen_assist (rk[11], 0x20));
285 aes256_key_assist (rk, 14, aes_keygen_assist (rk[13], 0x40));
Damjan Mariondeb8af62019-04-02 19:06:50 +0200286}
Damjan Marion776644e2020-01-31 10:24:07 +0100287#endif
288
289#ifdef __aarch64__
290
Damjan Marion776644e2020-01-31 10:24:07 +0100291static const u8x16 aese_prep_mask1 =
292 { 13, 14, 15, 12, 13, 14, 15, 12, 13, 14, 15, 12, 13, 14, 15, 12 };
293static const u8x16 aese_prep_mask2 =
294 { 12, 13, 14, 15, 12, 13, 14, 15, 12, 13, 14, 15, 12, 13, 14, 15 };
295
Damjan Marion4fe44af2020-02-12 18:24:24 +0100296static_always_inline void
Damjan Marion776644e2020-01-31 10:24:07 +0100297aes128_key_expand_round_neon (u8x16 * rk, u32 rcon)
298{
299 u8x16 r, t, last_round = rk[-1], z = { };
300 r = vqtbl1q_u8 (last_round, aese_prep_mask1);
301 r = vaeseq_u8 (r, z);
302 r ^= (u8x16) vdupq_n_u32 (rcon);
303 r ^= last_round;
304 r ^= t = vextq_u8 (z, last_round, 12);
305 r ^= t = vextq_u8 (z, t, 12);
306 r ^= vextq_u8 (z, t, 12);
307 rk[0] = r;
308}
309
Damjan Marion4fe44af2020-02-12 18:24:24 +0100310static_always_inline void
Damjan Marion415b4b02020-02-11 17:04:38 +0100311aes128_key_expand (u8x16 * rk, const u8x16 * k)
Damjan Marion776644e2020-01-31 10:24:07 +0100312{
Damjan Marion415b4b02020-02-11 17:04:38 +0100313 rk[0] = k[0];
Damjan Marion776644e2020-01-31 10:24:07 +0100314 aes128_key_expand_round_neon (rk + 1, 0x01);
315 aes128_key_expand_round_neon (rk + 2, 0x02);
316 aes128_key_expand_round_neon (rk + 3, 0x04);
317 aes128_key_expand_round_neon (rk + 4, 0x08);
318 aes128_key_expand_round_neon (rk + 5, 0x10);
319 aes128_key_expand_round_neon (rk + 6, 0x20);
320 aes128_key_expand_round_neon (rk + 7, 0x40);
321 aes128_key_expand_round_neon (rk + 8, 0x80);
322 aes128_key_expand_round_neon (rk + 9, 0x1b);
323 aes128_key_expand_round_neon (rk + 10, 0x36);
324}
325
Damjan Marion4fe44af2020-02-12 18:24:24 +0100326static_always_inline void
Damjan Marion776644e2020-01-31 10:24:07 +0100327aes192_key_expand_round_neon (u8x8 * rk, u32 rcon)
328{
329 u8x8 r, last_round = rk[-1], z = { };
330 u8x16 r2, z2 = { };
331
332 r2 = (u8x16) vdupq_lane_u64 ((uint64x1_t) last_round, 0);
333 r2 = vqtbl1q_u8 (r2, aese_prep_mask1);
334 r2 = vaeseq_u8 (r2, z2);
335 r2 ^= (u8x16) vdupq_n_u32 (rcon);
336
337 r = (u8x8) vdup_laneq_u64 ((u64x2) r2, 0);
338 r ^= rk[-3];
339 r ^= vext_u8 (z, rk[-3], 4);
340 rk[0] = r;
341
342 r = rk[-2] ^ vext_u8 (r, z, 4);
343 r ^= vext_u8 (z, r, 4);
344 rk[1] = r;
345
346 if (rcon == 0x80)
347 return;
348
349 r = rk[-1] ^ vext_u8 (r, z, 4);
350 r ^= vext_u8 (z, r, 4);
351 rk[2] = r;
352}
353
Damjan Marion4fe44af2020-02-12 18:24:24 +0100354static_always_inline void
Damjan Marion415b4b02020-02-11 17:04:38 +0100355aes192_key_expand (u8x16 * ek, const u8x16u * k)
Damjan Marion776644e2020-01-31 10:24:07 +0100356{
357 u8x8 *rk = (u8x8 *) ek;
Damjan Marion415b4b02020-02-11 17:04:38 +0100358 ek[0] = k[0];
359 rk[2] = *(u8x8u *) (k + 1);
Damjan Marion776644e2020-01-31 10:24:07 +0100360 aes192_key_expand_round_neon (rk + 3, 0x01);
361 aes192_key_expand_round_neon (rk + 6, 0x02);
362 aes192_key_expand_round_neon (rk + 9, 0x04);
363 aes192_key_expand_round_neon (rk + 12, 0x08);
364 aes192_key_expand_round_neon (rk + 15, 0x10);
365 aes192_key_expand_round_neon (rk + 18, 0x20);
366 aes192_key_expand_round_neon (rk + 21, 0x40);
367 aes192_key_expand_round_neon (rk + 24, 0x80);
368}
369
370
Damjan Marion4fe44af2020-02-12 18:24:24 +0100371static_always_inline void
Damjan Marion776644e2020-01-31 10:24:07 +0100372aes256_key_expand_round_neon (u8x16 * rk, u32 rcon)
373{
374 u8x16 r, t, z = { };
375
376 r = vqtbl1q_u8 (rk[-1], rcon ? aese_prep_mask1 : aese_prep_mask2);
377 r = vaeseq_u8 (r, z);
378 if (rcon)
379 r ^= (u8x16) vdupq_n_u32 (rcon);
380 r ^= rk[-2];
381 r ^= t = vextq_u8 (z, rk[-2], 12);
382 r ^= t = vextq_u8 (z, t, 12);
383 r ^= vextq_u8 (z, t, 12);
384 rk[0] = r;
385}
386
Damjan Marion4fe44af2020-02-12 18:24:24 +0100387static_always_inline void
Damjan Marion415b4b02020-02-11 17:04:38 +0100388aes256_key_expand (u8x16 * rk, u8x16 const *k)
Damjan Marion776644e2020-01-31 10:24:07 +0100389{
Damjan Marion415b4b02020-02-11 17:04:38 +0100390 rk[0] = k[0];
391 rk[1] = k[1];
Damjan Marion776644e2020-01-31 10:24:07 +0100392 aes256_key_expand_round_neon (rk + 2, 0x01);
393 aes256_key_expand_round_neon (rk + 3, 0);
394 aes256_key_expand_round_neon (rk + 4, 0x02);
395 aes256_key_expand_round_neon (rk + 5, 0);
396 aes256_key_expand_round_neon (rk + 6, 0x04);
397 aes256_key_expand_round_neon (rk + 7, 0);
398 aes256_key_expand_round_neon (rk + 8, 0x08);
399 aes256_key_expand_round_neon (rk + 9, 0);
400 aes256_key_expand_round_neon (rk + 10, 0x10);
401 aes256_key_expand_round_neon (rk + 11, 0);
402 aes256_key_expand_round_neon (rk + 12, 0x20);
403 aes256_key_expand_round_neon (rk + 13, 0);
404 aes256_key_expand_round_neon (rk + 14, 0x40);
405}
406
407#endif
Damjan Mariondeb8af62019-04-02 19:06:50 +0200408
409static_always_inline void
Damjan Marion415b4b02020-02-11 17:04:38 +0100410aes_key_expand (u8x16 * key_schedule, u8 const *key, aes_key_size_t ks)
Damjan Mariondeb8af62019-04-02 19:06:50 +0200411{
412 switch (ks)
413 {
Damjan Marion7d08e392020-01-28 09:55:25 +0100414 case AES_KEY_128:
Damjan Marion415b4b02020-02-11 17:04:38 +0100415 aes128_key_expand (key_schedule, (u8x16u const *) key);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200416 break;
Damjan Marion7d08e392020-01-28 09:55:25 +0100417 case AES_KEY_192:
Damjan Marion415b4b02020-02-11 17:04:38 +0100418 aes192_key_expand (key_schedule, (u8x16u const *) key);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200419 break;
Damjan Marion7d08e392020-01-28 09:55:25 +0100420 case AES_KEY_256:
Damjan Marion415b4b02020-02-11 17:04:38 +0100421 aes256_key_expand (key_schedule, (u8x16u const *) key);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200422 break;
423 }
424}
425
Damjan Mariondeb8af62019-04-02 19:06:50 +0200426static_always_inline void
Damjan Marion93975e62020-01-30 15:46:23 +0100427aes_key_enc_to_dec (u8x16 * ke, u8x16 * kd, aes_key_size_t ks)
Damjan Mariondeb8af62019-04-02 19:06:50 +0200428{
Damjan Marion7d08e392020-01-28 09:55:25 +0100429 int rounds = AES_KEY_ROUNDS (ks);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200430
Damjan Marion78b58f62020-01-29 10:31:26 +0100431 kd[rounds] = ke[0];
432 kd[0] = ke[rounds];
Damjan Mariondeb8af62019-04-02 19:06:50 +0200433
434 for (int i = 1; i < (rounds / 2); i++)
435 {
Damjan Marion93975e62020-01-30 15:46:23 +0100436 kd[rounds - i] = aes_inv_mix_column (ke[i]);
437 kd[i] = aes_inv_mix_column (ke[rounds - i]);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200438 }
439
Damjan Marion93975e62020-01-30 15:46:23 +0100440 kd[rounds / 2] = aes_inv_mix_column (ke[rounds / 2]);
Damjan Mariondeb8af62019-04-02 19:06:50 +0200441}
442
443#endif /* __aesni_h__ */
444
445/*
446 * fd.io coding-style-patch-verification: ON
447 *
448 * Local Variables:
449 * eval: (c-set-style "gnu")
450 * End:
451 */