blob: 1c724a346c2730a15ee2aeef61098ab0479d1347 [file] [log] [blame]
Damjan Marion91f17dc2019-03-18 18:59:25 +01001/*
2 * Copyright (c) 2018 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include <stdbool.h>
17#include <vlib/vlib.h>
18#include <vnet/crypto/crypto.h>
19
20vnet_crypto_main_t crypto_main;
21
Filip Tehlarefcad1a2020-02-04 09:36:04 +000022static_always_inline void
23crypto_set_op_status (vnet_crypto_op_t * ops[], u32 n_ops, int status)
24{
25 while (n_ops--)
26 {
27 ops[0]->status = status;
28 ops++;
29 }
30}
31
Damjan Marion085637f2019-04-03 18:39:27 +020032static_always_inline u32
33vnet_crypto_process_ops_call_handler (vlib_main_t * vm,
34 vnet_crypto_main_t * cm,
Damjan Marion060bfb92019-03-29 13:47:54 +010035 vnet_crypto_op_id_t opt,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000036 vnet_crypto_op_t * ops[],
37 vnet_crypto_op_chunk_t * chunks,
38 u32 n_ops)
Damjan Marion085637f2019-04-03 18:39:27 +020039{
Filip Tehlarefcad1a2020-02-04 09:36:04 +000040 u32 rv = 0;
Damjan Marion085637f2019-04-03 18:39:27 +020041 if (n_ops == 0)
42 return 0;
43
Filip Tehlarefcad1a2020-02-04 09:36:04 +000044 if (chunks)
Damjan Marion085637f2019-04-03 18:39:27 +020045 {
Damjan Marion085637f2019-04-03 18:39:27 +020046
Filip Tehlarefcad1a2020-02-04 09:36:04 +000047 if (cm->chained_ops_handlers[opt] == 0)
48 crypto_set_op_status (ops, n_ops,
49 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
50 else
51 rv = (cm->chained_ops_handlers[opt]) (vm, ops, chunks, n_ops);
52 }
53 else
54 {
55 if (cm->ops_handlers[opt] == 0)
56 crypto_set_op_status (ops, n_ops,
57 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
58 else
59 rv = (cm->ops_handlers[opt]) (vm, ops, n_ops);
60 }
61 return rv;
Damjan Marion085637f2019-04-03 18:39:27 +020062}
63
Filip Tehlarefcad1a2020-02-04 09:36:04 +000064static_always_inline u32
65vnet_crypto_process_ops_inline (vlib_main_t * vm, vnet_crypto_op_t ops[],
66 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
Damjan Marion91f17dc2019-03-18 18:59:25 +010067{
68 vnet_crypto_main_t *cm = &crypto_main;
Damjan Marion085637f2019-04-03 18:39:27 +020069 const int op_q_size = VLIB_FRAME_SIZE;
70 vnet_crypto_op_t *op_queue[op_q_size];
Damjan Marion060bfb92019-03-29 13:47:54 +010071 vnet_crypto_op_id_t opt, current_op_type = ~0;
Damjan Marion085637f2019-04-03 18:39:27 +020072 u32 n_op_queue = 0;
Damjan Marion91f17dc2019-03-18 18:59:25 +010073 u32 rv = 0, i;
74
Damjan Marion085637f2019-04-03 18:39:27 +020075 ASSERT (n_ops >= 1);
76
Damjan Marion91f17dc2019-03-18 18:59:25 +010077 for (i = 0; i < n_ops; i++)
78 {
Damjan Marion085637f2019-04-03 18:39:27 +020079 opt = ops[i].op;
Damjan Marion91f17dc2019-03-18 18:59:25 +010080
Damjan Marion085637f2019-04-03 18:39:27 +020081 if (current_op_type != opt || n_op_queue >= op_q_size)
82 {
83 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000084 op_queue, chunks,
85 n_op_queue);
Damjan Marion085637f2019-04-03 18:39:27 +020086 n_op_queue = 0;
87 current_op_type = opt;
88 }
89
90 op_queue[n_op_queue++] = &ops[i];
Damjan Marion91f17dc2019-03-18 18:59:25 +010091 }
92
Damjan Marion085637f2019-04-03 18:39:27 +020093 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000094 op_queue, chunks, n_op_queue);
Damjan Marion91f17dc2019-03-18 18:59:25 +010095 return rv;
96}
97
98u32
Filip Tehlarefcad1a2020-02-04 09:36:04 +000099vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[], u32 n_ops)
100{
101 return vnet_crypto_process_ops_inline (vm, ops, 0, n_ops);
102}
103
104u32
105vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
106 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
107{
108 return vnet_crypto_process_ops_inline (vm, ops, chunks, n_ops);
109}
110
111u32
Damjan Marion91f17dc2019-03-18 18:59:25 +0100112vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
113 char *desc)
114{
115 vnet_crypto_main_t *cm = &crypto_main;
116 vnet_crypto_engine_t *p;
117
118 vec_add2 (cm->engines, p, 1);
119 p->name = name;
120 p->desc = desc;
121 p->priority = prio;
122
Filip Tehlar1469d542019-03-25 09:04:41 -0700123 hash_set_mem (cm->engine_index_by_name, p->name, p - cm->engines);
124
Damjan Marion91f17dc2019-03-18 18:59:25 +0100125 return p - cm->engines;
126}
127
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000128static_always_inline void
129crypto_set_active_engine (vnet_crypto_op_data_t * od,
130 vnet_crypto_op_id_t id, u32 ei,
131 crypto_op_class_type_t oct)
132{
133 vnet_crypto_main_t *cm = &crypto_main;
134 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
135
136 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_CHAINED)
137 {
138 if (ce->chained_ops_handlers[id])
139 {
140 od->active_engine_index_chained = ei;
141 cm->chained_ops_handlers[id] = ce->chained_ops_handlers[id];
142 }
143 }
144
145 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_SIMPLE)
146 {
147 if (ce->ops_handlers[id])
148 {
149 od->active_engine_index_simple = ei;
150 cm->ops_handlers[id] = ce->ops_handlers[id];
151 }
152 }
153}
154
Filip Tehlar1469d542019-03-25 09:04:41 -0700155int
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000156vnet_crypto_set_handler2 (char *alg_name, char *engine,
157 crypto_op_class_type_t oct)
Filip Tehlar1469d542019-03-25 09:04:41 -0700158{
159 uword *p;
160 vnet_crypto_main_t *cm = &crypto_main;
Damjan Marion060bfb92019-03-29 13:47:54 +0100161 vnet_crypto_alg_data_t *ad;
Damjan Marion060bfb92019-03-29 13:47:54 +0100162 int i;
Filip Tehlar1469d542019-03-25 09:04:41 -0700163
Damjan Marion060bfb92019-03-29 13:47:54 +0100164 p = hash_get_mem (cm->alg_index_by_name, alg_name);
Filip Tehlar1469d542019-03-25 09:04:41 -0700165 if (!p)
166 return -1;
167
Damjan Marion060bfb92019-03-29 13:47:54 +0100168 ad = vec_elt_at_index (cm->algs, p[0]);
Filip Tehlar1469d542019-03-25 09:04:41 -0700169
170 p = hash_get_mem (cm->engine_index_by_name, engine);
171 if (!p)
172 return -1;
173
Filip Tehlard26b8602020-02-25 09:53:26 +0000174 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
Damjan Marion060bfb92019-03-29 13:47:54 +0100175 {
176 vnet_crypto_op_data_t *od;
177 vnet_crypto_op_id_t id = ad->op_by_type[i];
178 if (id == 0)
179 continue;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000180
Lijian Zhangb15d7962019-09-27 16:25:35 +0800181 od = cm->opt_data + id;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000182 crypto_set_active_engine (od, id, p[0], oct);
Damjan Marion060bfb92019-03-29 13:47:54 +0100183 }
Filip Tehlar1469d542019-03-25 09:04:41 -0700184
185 return 0;
186}
187
Neale Rannsece2ae02019-06-21 12:44:11 +0000188int
189vnet_crypto_is_set_handler (vnet_crypto_alg_t alg)
190{
191 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleskib2525922021-01-11 08:59:31 +0000192 vnet_crypto_op_id_t opt = 0;
193 int i;
Neale Rannsece2ae02019-06-21 12:44:11 +0000194
PiotrX Kleskib2525922021-01-11 08:59:31 +0000195 if (alg > vec_len (cm->algs))
196 return 0;
197
198 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
199 if ((opt = cm->algs[alg].op_by_type[i]) != 0)
200 break;
201
202 return NULL != cm->ops_handlers[opt];
Neale Rannsece2ae02019-06-21 12:44:11 +0000203}
204
Damjan Mariond1bed682019-04-24 15:20:35 +0200205void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000206vnet_crypto_register_ops_handler_inline (vlib_main_t * vm, u32 engine_index,
207 vnet_crypto_op_id_t opt,
208 vnet_crypto_ops_handler_t * fn,
209 vnet_crypto_chained_ops_handler_t *
210 cfn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100211{
212 vnet_crypto_main_t *cm = &crypto_main;
213 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
Damjan Marion060bfb92019-03-29 13:47:54 +0100214 vnet_crypto_op_data_t *otd = cm->opt_data + opt;
215 vec_validate_aligned (cm->ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
Damjan Marion91f17dc2019-03-18 18:59:25 +0100216 CLIB_CACHE_LINE_BYTES);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000217 vec_validate_aligned (cm->chained_ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
218 CLIB_CACHE_LINE_BYTES);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100219
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000220 if (fn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100221 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000222 e->ops_handlers[opt] = fn;
223 if (otd->active_engine_index_simple == ~0)
224 {
225 otd->active_engine_index_simple = engine_index;
226 cm->ops_handlers[opt] = fn;
227 }
228
229 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_simple);
230 if (ae->priority < e->priority)
231 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_SIMPLE);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100232 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000233
234 if (cfn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100235 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000236 e->chained_ops_handlers[opt] = cfn;
237 if (otd->active_engine_index_chained == ~0)
238 {
239 otd->active_engine_index_chained = engine_index;
240 cm->chained_ops_handlers[opt] = cfn;
241 }
242
243 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_chained);
244 if (ae->priority < e->priority)
245 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_CHAINED);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100246 }
247
Damjan Mariond1bed682019-04-24 15:20:35 +0200248 return;
249}
250
251void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000252vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
253 vnet_crypto_op_id_t opt,
254 vnet_crypto_ops_handler_t * fn)
255{
256 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, 0);
257}
258
259void
260vnet_crypto_register_chained_ops_handler (vlib_main_t * vm, u32 engine_index,
261 vnet_crypto_op_id_t opt,
262 vnet_crypto_chained_ops_handler_t *
263 fn)
264{
265 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, 0, fn);
266}
267
268void
269vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
270 vnet_crypto_op_id_t opt,
271 vnet_crypto_ops_handler_t * fn,
272 vnet_crypto_chained_ops_handler_t * cfn)
273{
274 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, cfn);
275}
276
277void
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000278vnet_crypto_register_enqueue_handler (vlib_main_t *vm, u32 engine_index,
279 vnet_crypto_async_op_id_t opt,
280 vnet_crypto_frame_enqueue_t *enqueue_hdl)
Fan Zhangf5395782020-04-29 14:00:03 +0100281{
282 vnet_crypto_main_t *cm = &crypto_main;
283 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
284 vnet_crypto_async_op_data_t *otd = cm->async_opt_data + opt;
Alexander Chernavin005d1e42021-02-01 05:17:24 -0500285 vec_validate_aligned (cm->enqueue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS,
Fan Zhangf5395782020-04-29 14:00:03 +0100286 CLIB_CACHE_LINE_BYTES);
Alexander Chernavin005d1e42021-02-01 05:17:24 -0500287 vec_validate_aligned (cm->dequeue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS,
Fan Zhangf5395782020-04-29 14:00:03 +0100288 CLIB_CACHE_LINE_BYTES);
289
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000290 if (!enqueue_hdl)
Fan Zhangf5395782020-04-29 14:00:03 +0100291 return;
292
293 e->enqueue_handlers[opt] = enqueue_hdl;
Fan Zhangf5395782020-04-29 14:00:03 +0100294 if (otd->active_engine_index_async == ~0)
295 {
296 otd->active_engine_index_async = engine_index;
297 cm->enqueue_handlers[opt] = enqueue_hdl;
Fan Zhangf5395782020-04-29 14:00:03 +0100298 }
299
300 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_async);
Fan Zhange4db9452021-03-30 17:31:38 +0100301 if (ae->priority <= e->priority)
Fan Zhangf5395782020-04-29 14:00:03 +0100302 {
303 otd->active_engine_index_async = engine_index;
304 cm->enqueue_handlers[opt] = enqueue_hdl;
Fan Zhangf5395782020-04-29 14:00:03 +0100305 }
306
307 return;
308}
309
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000310static int
311engine_index_cmp (void *v1, void *v2)
312{
313 u32 *a1 = v1;
314 u32 *a2 = v2;
315
316 if (*a1 > *a2)
317 return 1;
318 if (*a1 < *a2)
319 return -1;
320 return 0;
321}
322
323static void
324vnet_crypto_update_cm_dequeue_handlers (void)
325{
326 vnet_crypto_main_t *cm = &crypto_main;
327 vnet_crypto_async_op_data_t *otd;
328 vnet_crypto_engine_t *e;
329 u32 *active_engines = 0, *ei, last_ei = ~0, i;
330
331 vec_reset_length (cm->dequeue_handlers);
332
333 for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_IDS; i++)
334 {
335 otd = cm->async_opt_data + i;
mgovind6a517752022-02-16 01:15:02 +0000336 if (otd->active_engine_index_async == ~0)
337 continue;
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000338 e = cm->engines + otd->active_engine_index_async;
339 if (!e->dequeue_handler)
340 continue;
341 vec_add1 (active_engines, otd->active_engine_index_async);
342 }
343
344 vec_sort_with_function (active_engines, engine_index_cmp);
345
346 vec_foreach (ei, active_engines)
347 {
348 if (ei[0] == last_ei)
349 continue;
mgovind6a517752022-02-16 01:15:02 +0000350 if (ei[0] == ~0)
351 continue;
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000352
353 e = cm->engines + ei[0];
354 vec_add1 (cm->dequeue_handlers, e->dequeue_handler);
355 last_ei = ei[0];
356 }
357
358 vec_free (active_engines);
359}
360
361void
362vnet_crypto_register_dequeue_handler (vlib_main_t *vm, u32 engine_index,
363 vnet_crypto_frame_dequeue_t *deq_fn)
364{
365 vnet_crypto_main_t *cm = &crypto_main;
366 vnet_crypto_engine_t *e = vec_elt_at_index (cm->engines, engine_index);
367
368 if (!deq_fn)
369 return;
370
371 e->dequeue_handler = deq_fn;
372
373 return;
374}
375
Fan Zhangf5395782020-04-29 14:00:03 +0100376void
Damjan Mariond1bed682019-04-24 15:20:35 +0200377vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
378 vnet_crypto_key_handler_t * key_handler)
379{
380 vnet_crypto_main_t *cm = &crypto_main;
381 vnet_crypto_engine_t *e = vec_elt_at_index (cm->engines, engine_index);
382 e->key_op_handler = key_handler;
383 return;
384}
385
Benoît Gannebe954442019-04-29 16:05:46 +0200386static int
387vnet_crypto_key_len_check (vnet_crypto_alg_t alg, u16 length)
388{
389 switch (alg)
390 {
391 case VNET_CRYPTO_N_ALGS:
392 return 0;
393 case VNET_CRYPTO_ALG_NONE:
394 return 1;
395
396#define _(n, s, l) \
397 case VNET_CRYPTO_ALG_##n: \
398 if ((l) == length) \
Neale Rannse6be7022019-06-04 15:37:34 +0000399 return 1; \
400 break;
Benoît Gannebe954442019-04-29 16:05:46 +0200401 foreach_crypto_cipher_alg foreach_crypto_aead_alg
402#undef _
403 /* HMAC allows any key length */
404#define _(n, s) \
405 case VNET_CRYPTO_ALG_HMAC_##n: \
406 return 1;
Filip Tehlar06111a82021-05-03 15:29:56 +0000407 foreach_crypto_hmac_alg
408#undef _
409
410#define _(n, s) \
411 case VNET_CRYPTO_ALG_HASH_##n: \
412 return 1;
413 foreach_crypto_hash_alg
Benoît Gannebe954442019-04-29 16:05:46 +0200414#undef _
415 }
416
417 return 0;
418}
419
Damjan Mariond1bed682019-04-24 15:20:35 +0200420u32
421vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg, u8 * data,
422 u16 length)
423{
424 u32 index;
425 vnet_crypto_main_t *cm = &crypto_main;
426 vnet_crypto_engine_t *engine;
427 vnet_crypto_key_t *key;
Benoît Gannebe954442019-04-29 16:05:46 +0200428
Gabriel Oginskic12d48f2021-10-26 07:43:33 +0100429 u8 need_barrier_sync = 0;
430
Benoît Gannebe954442019-04-29 16:05:46 +0200431 if (!vnet_crypto_key_len_check (alg, length))
432 return ~0;
433
Damjan Marion66d4cb52022-03-17 18:59:46 +0100434 need_barrier_sync = pool_get_will_expand (cm->keys);
Gabriel Oginskic12d48f2021-10-26 07:43:33 +0100435 /* If the cm->keys will expand, stop the parade. */
436 if (need_barrier_sync)
437 vlib_worker_thread_barrier_sync (vm);
438
Damjan Mariond1bed682019-04-24 15:20:35 +0200439 pool_get_zero (cm->keys, key);
Gabriel Oginskic12d48f2021-10-26 07:43:33 +0100440
441 if (need_barrier_sync)
442 vlib_worker_thread_barrier_release (vm);
443
Damjan Mariond1bed682019-04-24 15:20:35 +0200444 index = key - cm->keys;
Fan Zhangf5395782020-04-29 14:00:03 +0100445 key->type = VNET_CRYPTO_KEY_TYPE_DATA;
Damjan Mariond1bed682019-04-24 15:20:35 +0200446 key->alg = alg;
447 vec_validate_aligned (key->data, length - 1, CLIB_CACHE_LINE_BYTES);
448 clib_memcpy (key->data, data, length);
Damjan Mariond1bed682019-04-24 15:20:35 +0200449 /* *INDENT-OFF* */
450 vec_foreach (engine, cm->engines)
451 if (engine->key_op_handler)
452 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
453 /* *INDENT-ON* */
454 return index;
455}
456
457void
458vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index)
459{
460 vnet_crypto_main_t *cm = &crypto_main;
461 vnet_crypto_engine_t *engine;
462 vnet_crypto_key_t *key = pool_elt_at_index (cm->keys, index);
463
464 /* *INDENT-OFF* */
465 vec_foreach (engine, cm->engines)
466 if (engine->key_op_handler)
467 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_DEL, index);
468 /* *INDENT-ON* */
469
Fan Zhangf5395782020-04-29 14:00:03 +0100470 if (key->type == VNET_CRYPTO_KEY_TYPE_DATA)
471 {
472 clib_memset (key->data, 0, vec_len (key->data));
473 vec_free (key->data);
474 }
475 else if (key->type == VNET_CRYPTO_KEY_TYPE_LINK)
476 {
477 key->index_crypto = key->index_integ = 0;
478 }
479
Damjan Mariond1bed682019-04-24 15:20:35 +0200480 pool_put (cm->keys, key);
481}
482
Fan Zhangf5395782020-04-29 14:00:03 +0100483vnet_crypto_async_alg_t
484vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
485 vnet_crypto_alg_t integ_alg)
486{
487#define _(c, h, s, k ,d) \
488 if (crypto_alg == VNET_CRYPTO_ALG_##c && \
489 integ_alg == VNET_CRYPTO_ALG_HMAC_##h) \
490 return VNET_CRYPTO_ALG_##c##_##h##_TAG##d;
491 foreach_crypto_link_async_alg
492#undef _
493 return ~0;
494}
495
496u32
497vnet_crypto_key_add_linked (vlib_main_t * vm,
498 vnet_crypto_key_index_t index_crypto,
499 vnet_crypto_key_index_t index_integ)
500{
501 u32 index;
502 vnet_crypto_main_t *cm = &crypto_main;
503 vnet_crypto_engine_t *engine;
504 vnet_crypto_key_t *key_crypto, *key_integ, *key;
505 vnet_crypto_async_alg_t linked_alg;
506
507 key_crypto = pool_elt_at_index (cm->keys, index_crypto);
508 key_integ = pool_elt_at_index (cm->keys, index_integ);
509
Fan Zhangf5395782020-04-29 14:00:03 +0100510 linked_alg = vnet_crypto_link_algs (key_crypto->alg, key_integ->alg);
511 if (linked_alg == ~0)
512 return ~0;
513
514 pool_get_zero (cm->keys, key);
515 index = key - cm->keys;
516 key->type = VNET_CRYPTO_KEY_TYPE_LINK;
517 key->index_crypto = index_crypto;
518 key->index_integ = index_integ;
519 key->async_alg = linked_alg;
520
521 /* *INDENT-OFF* */
522 vec_foreach (engine, cm->engines)
523 if (engine->key_op_handler)
524 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
525 /* *INDENT-ON* */
526
527 return index;
528}
529
530clib_error_t *
531crypto_dispatch_enable_disable (int is_enable)
532{
Fan Zhangf5395782020-04-29 14:00:03 +0100533 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleski22848172020-07-08 14:36:34 +0200534 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100535 u32 skip_master = vlib_num_workers () > 0, i;
PiotrX Kleski22848172020-07-08 14:36:34 +0200536 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
537 u8 state_change = 0;
Fan Zhangf5395782020-04-29 14:00:03 +0100538
PiotrX Kleski22848172020-07-08 14:36:34 +0200539 CLIB_MEMORY_STORE_BARRIER ();
Fan Zhangf5395782020-04-29 14:00:03 +0100540 if (is_enable && cm->async_refcnt > 0)
541 {
542 state_change = 1;
PiotrX Kleski22848172020-07-08 14:36:34 +0200543 state =
544 cm->dispatch_mode ==
545 VNET_CRYPTO_ASYNC_DISPATCH_POLLING ? VLIB_NODE_STATE_POLLING :
546 VLIB_NODE_STATE_INTERRUPT;
Fan Zhangf5395782020-04-29 14:00:03 +0100547 }
548
549 if (!is_enable && cm->async_refcnt == 0)
550 {
551 state_change = 1;
552 state = VLIB_NODE_STATE_DISABLED;
553 }
554
555 if (state_change)
556 for (i = skip_master; i < tm->n_vlib_mains; i++)
PiotrX Kleski22848172020-07-08 14:36:34 +0200557 {
Damjan Marion6ffb7c62021-03-26 13:06:13 +0100558 vlib_main_t *ovm = vlib_get_main_by_index (i);
559 if (state != vlib_node_get_state (ovm, cm->crypto_node_index))
560 vlib_node_set_state (ovm, cm->crypto_node_index, state);
PiotrX Kleski22848172020-07-08 14:36:34 +0200561 }
Fan Zhangf5395782020-04-29 14:00:03 +0100562 return 0;
563}
564
565static_always_inline void
566crypto_set_active_async_engine (vnet_crypto_async_op_data_t * od,
567 vnet_crypto_async_op_id_t id, u32 ei)
568{
569 vnet_crypto_main_t *cm = &crypto_main;
570 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
571
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000572 if (ce->enqueue_handlers[id] && ce->dequeue_handler)
Fan Zhangf5395782020-04-29 14:00:03 +0100573 {
574 od->active_engine_index_async = ei;
575 cm->enqueue_handlers[id] = ce->enqueue_handlers[id];
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000576 cm->dequeue_handlers[id] = ce->dequeue_handler;
Fan Zhangf5395782020-04-29 14:00:03 +0100577 }
578}
579
580int
581vnet_crypto_set_async_handler2 (char *alg_name, char *engine)
582{
583 uword *p;
584 vnet_crypto_main_t *cm = &crypto_main;
585 vnet_crypto_async_alg_data_t *ad;
586 int i;
587
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000588 if (cm->async_refcnt)
589 return -EBUSY;
590
Fan Zhangf5395782020-04-29 14:00:03 +0100591 p = hash_get_mem (cm->async_alg_index_by_name, alg_name);
592 if (!p)
593 return -1;
594
595 ad = vec_elt_at_index (cm->async_algs, p[0]);
596
597 p = hash_get_mem (cm->engine_index_by_name, engine);
598 if (!p)
599 return -1;
600
601 for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_TYPES; i++)
602 {
603 vnet_crypto_async_op_data_t *od;
604 vnet_crypto_async_op_id_t id = ad->op_by_type[i];
605 if (id == 0)
606 continue;
607
608 od = cm->async_opt_data + id;
609 crypto_set_active_async_engine (od, id, p[0]);
610 }
611
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000612 vnet_crypto_update_cm_dequeue_handlers ();
613
Fan Zhangf5395782020-04-29 14:00:03 +0100614 return 0;
615}
616
617u32
618vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name)
619{
620 vnet_crypto_main_t *cm = &crypto_main;
621 vnet_crypto_async_next_node_t *nn = 0;
622 vlib_node_t *cc, *pn;
623 uword index = vec_len (cm->next_nodes);
624
625 pn = vlib_get_node_by_name (vm, (u8 *) post_node_name);
626 if (!pn)
627 return ~0;
628
629 /* *INDENT-OFF* */
630 vec_foreach (cm->next_nodes, nn)
631 {
632 if (nn->node_idx == pn->index)
633 return nn->next_idx;
634 }
635 /* *INDENT-ON* */
636
637 vec_validate (cm->next_nodes, index);
638 nn = vec_elt_at_index (cm->next_nodes, index);
639
640 cc = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch");
641 nn->next_idx = vlib_node_add_named_next (vm, cc->index, post_node_name);
642 nn->node_idx = pn->index;
643
644 return nn->next_idx;
645}
646
647void
648vnet_crypto_request_async_mode (int is_enable)
649{
Fan Zhangf5395782020-04-29 14:00:03 +0100650 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleski22848172020-07-08 14:36:34 +0200651 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100652 u32 skip_master = vlib_num_workers () > 0, i;
PiotrX Kleski22848172020-07-08 14:36:34 +0200653 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
654 u8 state_change = 0;
Fan Zhangf5395782020-04-29 14:00:03 +0100655
PiotrX Kleski22848172020-07-08 14:36:34 +0200656 CLIB_MEMORY_STORE_BARRIER ();
Fan Zhangf5395782020-04-29 14:00:03 +0100657 if (is_enable && cm->async_refcnt == 0)
658 {
659 state_change = 1;
PiotrX Kleski22848172020-07-08 14:36:34 +0200660 state =
661 cm->dispatch_mode == VNET_CRYPTO_ASYNC_DISPATCH_POLLING ?
662 VLIB_NODE_STATE_POLLING : VLIB_NODE_STATE_INTERRUPT;
Fan Zhangf5395782020-04-29 14:00:03 +0100663 }
Fan Zhangf5395782020-04-29 14:00:03 +0100664 if (!is_enable && cm->async_refcnt == 1)
665 {
666 state_change = 1;
667 state = VLIB_NODE_STATE_DISABLED;
668 }
669
670 if (state_change)
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000671 {
672
673 for (i = skip_master; i < tm->n_vlib_mains; i++)
674 {
675 vlib_main_t *ovm = vlib_get_main_by_index (i);
676 if (state != vlib_node_get_state (ovm, cm->crypto_node_index))
677 vlib_node_set_state (ovm, cm->crypto_node_index, state);
678 }
679
680 if (is_enable)
681 vnet_crypto_update_cm_dequeue_handlers ();
682 }
Fan Zhangf5395782020-04-29 14:00:03 +0100683
684 if (is_enable)
685 cm->async_refcnt += 1;
686 else if (cm->async_refcnt > 0)
687 cm->async_refcnt -= 1;
688}
689
PiotrX Kleski22848172020-07-08 14:36:34 +0200690void
691vnet_crypto_set_async_dispatch_mode (u8 mode)
692{
693 vnet_crypto_main_t *cm = &crypto_main;
694 u32 skip_master = vlib_num_workers () > 0, i;
695 vlib_thread_main_t *tm = vlib_get_thread_main ();
696 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
697
698 CLIB_MEMORY_STORE_BARRIER ();
699 cm->dispatch_mode = mode;
700 if (mode == VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT)
701 {
702 state =
703 cm->async_refcnt == 0 ?
704 VLIB_NODE_STATE_DISABLED : VLIB_NODE_STATE_INTERRUPT;
705 }
706 else if (mode == VNET_CRYPTO_ASYNC_DISPATCH_POLLING)
707 {
708 state =
709 cm->async_refcnt == 0 ?
710 VLIB_NODE_STATE_DISABLED : VLIB_NODE_STATE_POLLING;
711 }
712
713 for (i = skip_master; i < tm->n_vlib_mains; i++)
714 {
Damjan Marion6ffb7c62021-03-26 13:06:13 +0100715 vlib_main_t *ovm = vlib_get_main_by_index (i);
716 if (state != vlib_node_get_state (ovm, cm->crypto_node_index))
717 vlib_node_set_state (ovm, cm->crypto_node_index, state);
PiotrX Kleski22848172020-07-08 14:36:34 +0200718 }
PiotrX Kleski22848172020-07-08 14:36:34 +0200719}
720
Fan Zhangf5395782020-04-29 14:00:03 +0100721int
722vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t op)
723{
724 vnet_crypto_main_t *cm = &crypto_main;
725
726 return (op < vec_len (cm->enqueue_handlers) &&
727 NULL != cm->enqueue_handlers[op]);
728}
729
Damjan Marion060bfb92019-03-29 13:47:54 +0100730static void
731vnet_crypto_init_cipher_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t eid,
732 vnet_crypto_op_id_t did, char *name, u8 is_aead)
733{
734 vnet_crypto_op_type_t eopt, dopt;
735 vnet_crypto_main_t *cm = &crypto_main;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000736
Damjan Marion060bfb92019-03-29 13:47:54 +0100737 cm->algs[alg].name = name;
738 cm->opt_data[eid].alg = cm->opt_data[did].alg = alg;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000739 cm->opt_data[eid].active_engine_index_simple = ~0;
740 cm->opt_data[did].active_engine_index_simple = ~0;
741 cm->opt_data[eid].active_engine_index_chained = ~0;
742 cm->opt_data[did].active_engine_index_chained = ~0;
Damjan Marion060bfb92019-03-29 13:47:54 +0100743 if (is_aead)
744 {
745 eopt = VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT;
746 dopt = VNET_CRYPTO_OP_TYPE_AEAD_DECRYPT;
747 }
748 else
749 {
750 eopt = VNET_CRYPTO_OP_TYPE_ENCRYPT;
751 dopt = VNET_CRYPTO_OP_TYPE_DECRYPT;
752 }
753 cm->opt_data[eid].type = eopt;
754 cm->opt_data[did].type = dopt;
755 cm->algs[alg].op_by_type[eopt] = eid;
756 cm->algs[alg].op_by_type[dopt] = did;
757 hash_set_mem (cm->alg_index_by_name, name, alg);
758}
759
760static void
Filip Tehlar06111a82021-05-03 15:29:56 +0000761vnet_crypto_init_hash_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t id,
762 char *name)
763{
764 vnet_crypto_main_t *cm = &crypto_main;
765 cm->algs[alg].name = name;
766 cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HASH] = id;
767 cm->opt_data[id].alg = alg;
768 cm->opt_data[id].active_engine_index_simple = ~0;
769 cm->opt_data[id].active_engine_index_chained = ~0;
770 cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HASH;
771 hash_set_mem (cm->alg_index_by_name, name, alg);
772}
773
774static void
Damjan Marion060bfb92019-03-29 13:47:54 +0100775vnet_crypto_init_hmac_data (vnet_crypto_alg_t alg,
776 vnet_crypto_op_id_t id, char *name)
777{
778 vnet_crypto_main_t *cm = &crypto_main;
779 cm->algs[alg].name = name;
780 cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HMAC] = id;
781 cm->opt_data[id].alg = alg;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000782 cm->opt_data[id].active_engine_index_simple = ~0;
783 cm->opt_data[id].active_engine_index_chained = ~0;
Damjan Marion060bfb92019-03-29 13:47:54 +0100784 cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HMAC;
785 hash_set_mem (cm->alg_index_by_name, name, alg);
786}
787
Fan Zhangf5395782020-04-29 14:00:03 +0100788static void
789vnet_crypto_init_async_data (vnet_crypto_async_alg_t alg,
790 vnet_crypto_async_op_id_t eid,
791 vnet_crypto_async_op_id_t did, char *name)
792{
793 vnet_crypto_main_t *cm = &crypto_main;
794
795 cm->async_algs[alg].name = name;
796 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT] = eid;
797 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT] = did;
798 cm->async_opt_data[eid].type = VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT;
799 cm->async_opt_data[eid].alg = alg;
800 cm->async_opt_data[eid].active_engine_index_async = ~0;
801 cm->async_opt_data[eid].active_engine_index_async = ~0;
802 cm->async_opt_data[did].type = VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT;
803 cm->async_opt_data[did].alg = alg;
804 cm->async_opt_data[did].active_engine_index_async = ~0;
805 cm->async_opt_data[did].active_engine_index_async = ~0;
806 hash_set_mem (cm->async_alg_index_by_name, name, alg);
807}
808
Damjan Marion91f17dc2019-03-18 18:59:25 +0100809clib_error_t *
810vnet_crypto_init (vlib_main_t * vm)
811{
812 vnet_crypto_main_t *cm = &crypto_main;
813 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100814 vnet_crypto_thread_t *ct = 0;
PiotrX Kleski22848172020-07-08 14:36:34 +0200815
816 cm->dispatch_mode = VNET_CRYPTO_ASYNC_DISPATCH_POLLING;
Filip Tehlar1469d542019-03-25 09:04:41 -0700817 cm->engine_index_by_name = hash_create_string ( /* size */ 0,
818 sizeof (uword));
Damjan Marion060bfb92019-03-29 13:47:54 +0100819 cm->alg_index_by_name = hash_create_string (0, sizeof (uword));
Fan Zhangf5395782020-04-29 14:00:03 +0100820 cm->async_alg_index_by_name = hash_create_string (0, sizeof (uword));
Damjan Marion91f17dc2019-03-18 18:59:25 +0100821 vec_validate_aligned (cm->threads, tm->n_vlib_mains, CLIB_CACHE_LINE_BYTES);
Fan Zhangf5395782020-04-29 14:00:03 +0100822 vec_foreach (ct, cm->threads)
Fan Zhange4db9452021-03-30 17:31:38 +0100823 pool_alloc_aligned (ct->frame_pool, VNET_CRYPTO_FRAME_POOL_SIZE,
824 CLIB_CACHE_LINE_BYTES);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100825 vec_validate (cm->algs, VNET_CRYPTO_N_ALGS);
Fan Zhangf5395782020-04-29 14:00:03 +0100826 vec_validate (cm->async_algs, VNET_CRYPTO_N_ASYNC_ALGS);
Fan Zhangf5395782020-04-29 14:00:03 +0100827
Benoît Gannebe954442019-04-29 16:05:46 +0200828#define _(n, s, l) \
Damjan Marion060bfb92019-03-29 13:47:54 +0100829 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
830 VNET_CRYPTO_OP_##n##_ENC, \
831 VNET_CRYPTO_OP_##n##_DEC, s, 0);
832 foreach_crypto_cipher_alg;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100833#undef _
Benoît Gannebe954442019-04-29 16:05:46 +0200834#define _(n, s, l) \
Damjan Marion060bfb92019-03-29 13:47:54 +0100835 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
836 VNET_CRYPTO_OP_##n##_ENC, \
837 VNET_CRYPTO_OP_##n##_DEC, s, 1);
838 foreach_crypto_aead_alg;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100839#undef _
Damjan Marion060bfb92019-03-29 13:47:54 +0100840#define _(n, s) \
841 vnet_crypto_init_hmac_data (VNET_CRYPTO_ALG_HMAC_##n, \
842 VNET_CRYPTO_OP_##n##_HMAC, "hmac-" s);
843 foreach_crypto_hmac_alg;
844#undef _
Filip Tehlar06111a82021-05-03 15:29:56 +0000845#define _(n, s) \
846 vnet_crypto_init_hash_data (VNET_CRYPTO_ALG_HASH_##n, \
847 VNET_CRYPTO_OP_##n##_HASH, s);
848 foreach_crypto_hash_alg;
849#undef _
Fan Zhangf5395782020-04-29 14:00:03 +0100850#define _(n, s, k, t, a) \
851 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a, \
852 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
853 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
854 s);
855 foreach_crypto_aead_async_alg
856#undef _
857#define _(c, h, s, k ,d) \
858 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##c##_##h##_TAG##d, \
859 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
860 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
861 s);
862 foreach_crypto_link_async_alg
863#undef _
PiotrX Kleski22848172020-07-08 14:36:34 +0200864 cm->crypto_node_index =
865 vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch")->index;
866
867 return 0;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100868}
869
870VLIB_INIT_FUNCTION (vnet_crypto_init);
871
872/*
873 * fd.io coding-style-patch-verification: ON
874 *
875 * Local Variables:
876 * eval: (c-set-style "gnu")
877 * End:
878 */