blob: 93fd9742766247a5329da7862d2b5819f82e1558 [file] [log] [blame]
Damjan Marion91f17dc2019-03-18 18:59:25 +01001/*
2 * Copyright (c) 2018 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include <stdbool.h>
17#include <vlib/vlib.h>
18#include <vnet/crypto/crypto.h>
19
20vnet_crypto_main_t crypto_main;
21
Filip Tehlarefcad1a2020-02-04 09:36:04 +000022static_always_inline void
23crypto_set_op_status (vnet_crypto_op_t * ops[], u32 n_ops, int status)
24{
25 while (n_ops--)
26 {
27 ops[0]->status = status;
28 ops++;
29 }
30}
31
Damjan Marion085637f2019-04-03 18:39:27 +020032static_always_inline u32
33vnet_crypto_process_ops_call_handler (vlib_main_t * vm,
34 vnet_crypto_main_t * cm,
Damjan Marion060bfb92019-03-29 13:47:54 +010035 vnet_crypto_op_id_t opt,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000036 vnet_crypto_op_t * ops[],
37 vnet_crypto_op_chunk_t * chunks,
38 u32 n_ops)
Damjan Marion085637f2019-04-03 18:39:27 +020039{
Filip Tehlarefcad1a2020-02-04 09:36:04 +000040 u32 rv = 0;
Damjan Marion085637f2019-04-03 18:39:27 +020041 if (n_ops == 0)
42 return 0;
43
Filip Tehlarefcad1a2020-02-04 09:36:04 +000044 if (chunks)
Damjan Marion085637f2019-04-03 18:39:27 +020045 {
Damjan Marion085637f2019-04-03 18:39:27 +020046
Filip Tehlarefcad1a2020-02-04 09:36:04 +000047 if (cm->chained_ops_handlers[opt] == 0)
48 crypto_set_op_status (ops, n_ops,
49 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
50 else
51 rv = (cm->chained_ops_handlers[opt]) (vm, ops, chunks, n_ops);
52 }
53 else
54 {
55 if (cm->ops_handlers[opt] == 0)
56 crypto_set_op_status (ops, n_ops,
57 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
58 else
59 rv = (cm->ops_handlers[opt]) (vm, ops, n_ops);
60 }
61 return rv;
Damjan Marion085637f2019-04-03 18:39:27 +020062}
63
Filip Tehlarefcad1a2020-02-04 09:36:04 +000064static_always_inline u32
65vnet_crypto_process_ops_inline (vlib_main_t * vm, vnet_crypto_op_t ops[],
66 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
Damjan Marion91f17dc2019-03-18 18:59:25 +010067{
68 vnet_crypto_main_t *cm = &crypto_main;
Damjan Marion085637f2019-04-03 18:39:27 +020069 const int op_q_size = VLIB_FRAME_SIZE;
70 vnet_crypto_op_t *op_queue[op_q_size];
Damjan Marion060bfb92019-03-29 13:47:54 +010071 vnet_crypto_op_id_t opt, current_op_type = ~0;
Damjan Marion085637f2019-04-03 18:39:27 +020072 u32 n_op_queue = 0;
Damjan Marion91f17dc2019-03-18 18:59:25 +010073 u32 rv = 0, i;
74
Damjan Marion085637f2019-04-03 18:39:27 +020075 ASSERT (n_ops >= 1);
76
Damjan Marion91f17dc2019-03-18 18:59:25 +010077 for (i = 0; i < n_ops; i++)
78 {
Damjan Marion085637f2019-04-03 18:39:27 +020079 opt = ops[i].op;
Damjan Marion91f17dc2019-03-18 18:59:25 +010080
Damjan Marion085637f2019-04-03 18:39:27 +020081 if (current_op_type != opt || n_op_queue >= op_q_size)
82 {
83 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000084 op_queue, chunks,
85 n_op_queue);
Damjan Marion085637f2019-04-03 18:39:27 +020086 n_op_queue = 0;
87 current_op_type = opt;
88 }
89
90 op_queue[n_op_queue++] = &ops[i];
Damjan Marion91f17dc2019-03-18 18:59:25 +010091 }
92
Damjan Marion085637f2019-04-03 18:39:27 +020093 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000094 op_queue, chunks, n_op_queue);
Damjan Marion91f17dc2019-03-18 18:59:25 +010095 return rv;
96}
97
98u32
Filip Tehlarefcad1a2020-02-04 09:36:04 +000099vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[], u32 n_ops)
100{
101 return vnet_crypto_process_ops_inline (vm, ops, 0, n_ops);
102}
103
104u32
105vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
106 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
107{
108 return vnet_crypto_process_ops_inline (vm, ops, chunks, n_ops);
109}
110
111u32
Damjan Marion91f17dc2019-03-18 18:59:25 +0100112vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
113 char *desc)
114{
115 vnet_crypto_main_t *cm = &crypto_main;
116 vnet_crypto_engine_t *p;
117
118 vec_add2 (cm->engines, p, 1);
119 p->name = name;
120 p->desc = desc;
121 p->priority = prio;
122
Filip Tehlar1469d542019-03-25 09:04:41 -0700123 hash_set_mem (cm->engine_index_by_name, p->name, p - cm->engines);
124
Damjan Marion91f17dc2019-03-18 18:59:25 +0100125 return p - cm->engines;
126}
127
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000128static_always_inline void
129crypto_set_active_engine (vnet_crypto_op_data_t * od,
130 vnet_crypto_op_id_t id, u32 ei,
131 crypto_op_class_type_t oct)
132{
133 vnet_crypto_main_t *cm = &crypto_main;
134 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
135
136 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_CHAINED)
137 {
138 if (ce->chained_ops_handlers[id])
139 {
140 od->active_engine_index_chained = ei;
141 cm->chained_ops_handlers[id] = ce->chained_ops_handlers[id];
142 }
143 }
144
145 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_SIMPLE)
146 {
147 if (ce->ops_handlers[id])
148 {
149 od->active_engine_index_simple = ei;
150 cm->ops_handlers[id] = ce->ops_handlers[id];
151 }
152 }
153}
154
Filip Tehlar1469d542019-03-25 09:04:41 -0700155int
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000156vnet_crypto_set_handler2 (char *alg_name, char *engine,
157 crypto_op_class_type_t oct)
Filip Tehlar1469d542019-03-25 09:04:41 -0700158{
159 uword *p;
160 vnet_crypto_main_t *cm = &crypto_main;
Damjan Marion060bfb92019-03-29 13:47:54 +0100161 vnet_crypto_alg_data_t *ad;
Damjan Marion060bfb92019-03-29 13:47:54 +0100162 int i;
Filip Tehlar1469d542019-03-25 09:04:41 -0700163
Damjan Marion060bfb92019-03-29 13:47:54 +0100164 p = hash_get_mem (cm->alg_index_by_name, alg_name);
Filip Tehlar1469d542019-03-25 09:04:41 -0700165 if (!p)
166 return -1;
167
Damjan Marion060bfb92019-03-29 13:47:54 +0100168 ad = vec_elt_at_index (cm->algs, p[0]);
Filip Tehlar1469d542019-03-25 09:04:41 -0700169
170 p = hash_get_mem (cm->engine_index_by_name, engine);
171 if (!p)
172 return -1;
173
Filip Tehlard26b8602020-02-25 09:53:26 +0000174 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
Damjan Marion060bfb92019-03-29 13:47:54 +0100175 {
176 vnet_crypto_op_data_t *od;
177 vnet_crypto_op_id_t id = ad->op_by_type[i];
178 if (id == 0)
179 continue;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000180
Lijian Zhangb15d7962019-09-27 16:25:35 +0800181 od = cm->opt_data + id;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000182 crypto_set_active_engine (od, id, p[0], oct);
Damjan Marion060bfb92019-03-29 13:47:54 +0100183 }
Filip Tehlar1469d542019-03-25 09:04:41 -0700184
185 return 0;
186}
187
Neale Rannsece2ae02019-06-21 12:44:11 +0000188int
189vnet_crypto_is_set_handler (vnet_crypto_alg_t alg)
190{
191 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleskib2525922021-01-11 08:59:31 +0000192 vnet_crypto_op_id_t opt = 0;
193 int i;
Neale Rannsece2ae02019-06-21 12:44:11 +0000194
PiotrX Kleskib2525922021-01-11 08:59:31 +0000195 if (alg > vec_len (cm->algs))
196 return 0;
197
198 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
199 if ((opt = cm->algs[alg].op_by_type[i]) != 0)
200 break;
201
202 return NULL != cm->ops_handlers[opt];
Neale Rannsece2ae02019-06-21 12:44:11 +0000203}
204
Damjan Mariond1bed682019-04-24 15:20:35 +0200205void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000206vnet_crypto_register_ops_handler_inline (vlib_main_t * vm, u32 engine_index,
207 vnet_crypto_op_id_t opt,
208 vnet_crypto_ops_handler_t * fn,
209 vnet_crypto_chained_ops_handler_t *
210 cfn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100211{
212 vnet_crypto_main_t *cm = &crypto_main;
213 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
Damjan Marion060bfb92019-03-29 13:47:54 +0100214 vnet_crypto_op_data_t *otd = cm->opt_data + opt;
215 vec_validate_aligned (cm->ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
Damjan Marion91f17dc2019-03-18 18:59:25 +0100216 CLIB_CACHE_LINE_BYTES);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000217 vec_validate_aligned (cm->chained_ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
218 CLIB_CACHE_LINE_BYTES);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100219
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000220 if (fn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100221 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000222 e->ops_handlers[opt] = fn;
223 if (otd->active_engine_index_simple == ~0)
224 {
225 otd->active_engine_index_simple = engine_index;
226 cm->ops_handlers[opt] = fn;
227 }
228
229 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_simple);
230 if (ae->priority < e->priority)
231 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_SIMPLE);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100232 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000233
234 if (cfn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100235 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000236 e->chained_ops_handlers[opt] = cfn;
237 if (otd->active_engine_index_chained == ~0)
238 {
239 otd->active_engine_index_chained = engine_index;
240 cm->chained_ops_handlers[opt] = cfn;
241 }
242
243 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_chained);
244 if (ae->priority < e->priority)
245 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_CHAINED);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100246 }
247
Damjan Mariond1bed682019-04-24 15:20:35 +0200248 return;
249}
250
251void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000252vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
253 vnet_crypto_op_id_t opt,
254 vnet_crypto_ops_handler_t * fn)
255{
256 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, 0);
257}
258
259void
260vnet_crypto_register_chained_ops_handler (vlib_main_t * vm, u32 engine_index,
261 vnet_crypto_op_id_t opt,
262 vnet_crypto_chained_ops_handler_t *
263 fn)
264{
265 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, 0, fn);
266}
267
268void
269vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
270 vnet_crypto_op_id_t opt,
271 vnet_crypto_ops_handler_t * fn,
272 vnet_crypto_chained_ops_handler_t * cfn)
273{
274 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, cfn);
275}
276
277void
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000278vnet_crypto_register_enqueue_handler (vlib_main_t *vm, u32 engine_index,
279 vnet_crypto_async_op_id_t opt,
280 vnet_crypto_frame_enqueue_t *enqueue_hdl)
Fan Zhangf5395782020-04-29 14:00:03 +0100281{
282 vnet_crypto_main_t *cm = &crypto_main;
283 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
284 vnet_crypto_async_op_data_t *otd = cm->async_opt_data + opt;
Alexander Chernavin005d1e42021-02-01 05:17:24 -0500285 vec_validate_aligned (cm->enqueue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS,
Fan Zhangf5395782020-04-29 14:00:03 +0100286 CLIB_CACHE_LINE_BYTES);
Alexander Chernavin005d1e42021-02-01 05:17:24 -0500287 vec_validate_aligned (cm->dequeue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS,
Fan Zhangf5395782020-04-29 14:00:03 +0100288 CLIB_CACHE_LINE_BYTES);
289
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000290 if (!enqueue_hdl)
Fan Zhangf5395782020-04-29 14:00:03 +0100291 return;
292
293 e->enqueue_handlers[opt] = enqueue_hdl;
Fan Zhangf5395782020-04-29 14:00:03 +0100294 if (otd->active_engine_index_async == ~0)
295 {
296 otd->active_engine_index_async = engine_index;
297 cm->enqueue_handlers[opt] = enqueue_hdl;
Fan Zhangf5395782020-04-29 14:00:03 +0100298 }
299
300 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_async);
Fan Zhange4db9452021-03-30 17:31:38 +0100301 if (ae->priority <= e->priority)
Fan Zhangf5395782020-04-29 14:00:03 +0100302 {
303 otd->active_engine_index_async = engine_index;
304 cm->enqueue_handlers[opt] = enqueue_hdl;
Fan Zhangf5395782020-04-29 14:00:03 +0100305 }
306
307 return;
308}
309
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000310static int
311engine_index_cmp (void *v1, void *v2)
312{
313 u32 *a1 = v1;
314 u32 *a2 = v2;
315
316 if (*a1 > *a2)
317 return 1;
318 if (*a1 < *a2)
319 return -1;
320 return 0;
321}
322
323static void
324vnet_crypto_update_cm_dequeue_handlers (void)
325{
326 vnet_crypto_main_t *cm = &crypto_main;
327 vnet_crypto_async_op_data_t *otd;
328 vnet_crypto_engine_t *e;
329 u32 *active_engines = 0, *ei, last_ei = ~0, i;
330
331 vec_reset_length (cm->dequeue_handlers);
332
333 for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_IDS; i++)
334 {
335 otd = cm->async_opt_data + i;
mgovind6a517752022-02-16 01:15:02 +0000336 if (otd->active_engine_index_async == ~0)
337 continue;
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000338 e = cm->engines + otd->active_engine_index_async;
339 if (!e->dequeue_handler)
340 continue;
341 vec_add1 (active_engines, otd->active_engine_index_async);
342 }
343
344 vec_sort_with_function (active_engines, engine_index_cmp);
345
346 vec_foreach (ei, active_engines)
347 {
348 if (ei[0] == last_ei)
349 continue;
mgovind6a517752022-02-16 01:15:02 +0000350 if (ei[0] == ~0)
351 continue;
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000352
353 e = cm->engines + ei[0];
354 vec_add1 (cm->dequeue_handlers, e->dequeue_handler);
355 last_ei = ei[0];
356 }
357
358 vec_free (active_engines);
359}
360
361void
362vnet_crypto_register_dequeue_handler (vlib_main_t *vm, u32 engine_index,
363 vnet_crypto_frame_dequeue_t *deq_fn)
364{
365 vnet_crypto_main_t *cm = &crypto_main;
366 vnet_crypto_engine_t *e = vec_elt_at_index (cm->engines, engine_index);
367
368 if (!deq_fn)
369 return;
370
371 e->dequeue_handler = deq_fn;
372
373 return;
374}
375
Fan Zhangf5395782020-04-29 14:00:03 +0100376void
Damjan Mariond1bed682019-04-24 15:20:35 +0200377vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
378 vnet_crypto_key_handler_t * key_handler)
379{
380 vnet_crypto_main_t *cm = &crypto_main;
381 vnet_crypto_engine_t *e = vec_elt_at_index (cm->engines, engine_index);
382 e->key_op_handler = key_handler;
383 return;
384}
385
Benoît Gannebe954442019-04-29 16:05:46 +0200386static int
387vnet_crypto_key_len_check (vnet_crypto_alg_t alg, u16 length)
388{
389 switch (alg)
390 {
391 case VNET_CRYPTO_N_ALGS:
392 return 0;
393 case VNET_CRYPTO_ALG_NONE:
394 return 1;
395
396#define _(n, s, l) \
397 case VNET_CRYPTO_ALG_##n: \
398 if ((l) == length) \
Neale Rannse6be7022019-06-04 15:37:34 +0000399 return 1; \
400 break;
Benoît Gannebe954442019-04-29 16:05:46 +0200401 foreach_crypto_cipher_alg foreach_crypto_aead_alg
402#undef _
403 /* HMAC allows any key length */
404#define _(n, s) \
405 case VNET_CRYPTO_ALG_HMAC_##n: \
406 return 1;
Filip Tehlar06111a82021-05-03 15:29:56 +0000407 foreach_crypto_hmac_alg
408#undef _
409
410#define _(n, s) \
411 case VNET_CRYPTO_ALG_HASH_##n: \
412 return 1;
413 foreach_crypto_hash_alg
Benoît Gannebe954442019-04-29 16:05:46 +0200414#undef _
415 }
416
417 return 0;
418}
419
Damjan Mariond1bed682019-04-24 15:20:35 +0200420u32
421vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg, u8 * data,
422 u16 length)
423{
424 u32 index;
425 vnet_crypto_main_t *cm = &crypto_main;
426 vnet_crypto_engine_t *engine;
427 vnet_crypto_key_t *key;
Benoît Gannebe954442019-04-29 16:05:46 +0200428
Gabriel Oginskic12d48f2021-10-26 07:43:33 +0100429 u8 need_barrier_sync = 0;
430
Benoît Gannebe954442019-04-29 16:05:46 +0200431 if (!vnet_crypto_key_len_check (alg, length))
432 return ~0;
433
Gabriel Oginskic12d48f2021-10-26 07:43:33 +0100434 pool_get_aligned_will_expand (cm->keys, need_barrier_sync,
435 CLIB_CACHE_LINE_BYTES);
436 /* If the cm->keys will expand, stop the parade. */
437 if (need_barrier_sync)
438 vlib_worker_thread_barrier_sync (vm);
439
Damjan Mariond1bed682019-04-24 15:20:35 +0200440 pool_get_zero (cm->keys, key);
Gabriel Oginskic12d48f2021-10-26 07:43:33 +0100441
442 if (need_barrier_sync)
443 vlib_worker_thread_barrier_release (vm);
444
Damjan Mariond1bed682019-04-24 15:20:35 +0200445 index = key - cm->keys;
Fan Zhangf5395782020-04-29 14:00:03 +0100446 key->type = VNET_CRYPTO_KEY_TYPE_DATA;
Damjan Mariond1bed682019-04-24 15:20:35 +0200447 key->alg = alg;
448 vec_validate_aligned (key->data, length - 1, CLIB_CACHE_LINE_BYTES);
449 clib_memcpy (key->data, data, length);
Damjan Mariond1bed682019-04-24 15:20:35 +0200450 /* *INDENT-OFF* */
451 vec_foreach (engine, cm->engines)
452 if (engine->key_op_handler)
453 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
454 /* *INDENT-ON* */
455 return index;
456}
457
458void
459vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index)
460{
461 vnet_crypto_main_t *cm = &crypto_main;
462 vnet_crypto_engine_t *engine;
463 vnet_crypto_key_t *key = pool_elt_at_index (cm->keys, index);
464
465 /* *INDENT-OFF* */
466 vec_foreach (engine, cm->engines)
467 if (engine->key_op_handler)
468 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_DEL, index);
469 /* *INDENT-ON* */
470
Fan Zhangf5395782020-04-29 14:00:03 +0100471 if (key->type == VNET_CRYPTO_KEY_TYPE_DATA)
472 {
473 clib_memset (key->data, 0, vec_len (key->data));
474 vec_free (key->data);
475 }
476 else if (key->type == VNET_CRYPTO_KEY_TYPE_LINK)
477 {
478 key->index_crypto = key->index_integ = 0;
479 }
480
Damjan Mariond1bed682019-04-24 15:20:35 +0200481 pool_put (cm->keys, key);
482}
483
Fan Zhangf5395782020-04-29 14:00:03 +0100484vnet_crypto_async_alg_t
485vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
486 vnet_crypto_alg_t integ_alg)
487{
488#define _(c, h, s, k ,d) \
489 if (crypto_alg == VNET_CRYPTO_ALG_##c && \
490 integ_alg == VNET_CRYPTO_ALG_HMAC_##h) \
491 return VNET_CRYPTO_ALG_##c##_##h##_TAG##d;
492 foreach_crypto_link_async_alg
493#undef _
494 return ~0;
495}
496
497u32
498vnet_crypto_key_add_linked (vlib_main_t * vm,
499 vnet_crypto_key_index_t index_crypto,
500 vnet_crypto_key_index_t index_integ)
501{
502 u32 index;
503 vnet_crypto_main_t *cm = &crypto_main;
504 vnet_crypto_engine_t *engine;
505 vnet_crypto_key_t *key_crypto, *key_integ, *key;
506 vnet_crypto_async_alg_t linked_alg;
507
508 key_crypto = pool_elt_at_index (cm->keys, index_crypto);
509 key_integ = pool_elt_at_index (cm->keys, index_integ);
510
Fan Zhangf5395782020-04-29 14:00:03 +0100511 linked_alg = vnet_crypto_link_algs (key_crypto->alg, key_integ->alg);
512 if (linked_alg == ~0)
513 return ~0;
514
515 pool_get_zero (cm->keys, key);
516 index = key - cm->keys;
517 key->type = VNET_CRYPTO_KEY_TYPE_LINK;
518 key->index_crypto = index_crypto;
519 key->index_integ = index_integ;
520 key->async_alg = linked_alg;
521
522 /* *INDENT-OFF* */
523 vec_foreach (engine, cm->engines)
524 if (engine->key_op_handler)
525 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
526 /* *INDENT-ON* */
527
528 return index;
529}
530
531clib_error_t *
532crypto_dispatch_enable_disable (int is_enable)
533{
Fan Zhangf5395782020-04-29 14:00:03 +0100534 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleski22848172020-07-08 14:36:34 +0200535 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100536 u32 skip_master = vlib_num_workers () > 0, i;
PiotrX Kleski22848172020-07-08 14:36:34 +0200537 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
538 u8 state_change = 0;
Fan Zhangf5395782020-04-29 14:00:03 +0100539
PiotrX Kleski22848172020-07-08 14:36:34 +0200540 CLIB_MEMORY_STORE_BARRIER ();
Fan Zhangf5395782020-04-29 14:00:03 +0100541 if (is_enable && cm->async_refcnt > 0)
542 {
543 state_change = 1;
PiotrX Kleski22848172020-07-08 14:36:34 +0200544 state =
545 cm->dispatch_mode ==
546 VNET_CRYPTO_ASYNC_DISPATCH_POLLING ? VLIB_NODE_STATE_POLLING :
547 VLIB_NODE_STATE_INTERRUPT;
Fan Zhangf5395782020-04-29 14:00:03 +0100548 }
549
550 if (!is_enable && cm->async_refcnt == 0)
551 {
552 state_change = 1;
553 state = VLIB_NODE_STATE_DISABLED;
554 }
555
556 if (state_change)
557 for (i = skip_master; i < tm->n_vlib_mains; i++)
PiotrX Kleski22848172020-07-08 14:36:34 +0200558 {
Damjan Marion6ffb7c62021-03-26 13:06:13 +0100559 vlib_main_t *ovm = vlib_get_main_by_index (i);
560 if (state != vlib_node_get_state (ovm, cm->crypto_node_index))
561 vlib_node_set_state (ovm, cm->crypto_node_index, state);
PiotrX Kleski22848172020-07-08 14:36:34 +0200562 }
Fan Zhangf5395782020-04-29 14:00:03 +0100563 return 0;
564}
565
566static_always_inline void
567crypto_set_active_async_engine (vnet_crypto_async_op_data_t * od,
568 vnet_crypto_async_op_id_t id, u32 ei)
569{
570 vnet_crypto_main_t *cm = &crypto_main;
571 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
572
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000573 if (ce->enqueue_handlers[id] && ce->dequeue_handler)
Fan Zhangf5395782020-04-29 14:00:03 +0100574 {
575 od->active_engine_index_async = ei;
576 cm->enqueue_handlers[id] = ce->enqueue_handlers[id];
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000577 cm->dequeue_handlers[id] = ce->dequeue_handler;
Fan Zhangf5395782020-04-29 14:00:03 +0100578 }
579}
580
581int
582vnet_crypto_set_async_handler2 (char *alg_name, char *engine)
583{
584 uword *p;
585 vnet_crypto_main_t *cm = &crypto_main;
586 vnet_crypto_async_alg_data_t *ad;
587 int i;
588
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000589 if (cm->async_refcnt)
590 return -EBUSY;
591
Fan Zhangf5395782020-04-29 14:00:03 +0100592 p = hash_get_mem (cm->async_alg_index_by_name, alg_name);
593 if (!p)
594 return -1;
595
596 ad = vec_elt_at_index (cm->async_algs, p[0]);
597
598 p = hash_get_mem (cm->engine_index_by_name, engine);
599 if (!p)
600 return -1;
601
602 for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_TYPES; i++)
603 {
604 vnet_crypto_async_op_data_t *od;
605 vnet_crypto_async_op_id_t id = ad->op_by_type[i];
606 if (id == 0)
607 continue;
608
609 od = cm->async_opt_data + id;
610 crypto_set_active_async_engine (od, id, p[0]);
611 }
612
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000613 vnet_crypto_update_cm_dequeue_handlers ();
614
Fan Zhangf5395782020-04-29 14:00:03 +0100615 return 0;
616}
617
618u32
619vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name)
620{
621 vnet_crypto_main_t *cm = &crypto_main;
622 vnet_crypto_async_next_node_t *nn = 0;
623 vlib_node_t *cc, *pn;
624 uword index = vec_len (cm->next_nodes);
625
626 pn = vlib_get_node_by_name (vm, (u8 *) post_node_name);
627 if (!pn)
628 return ~0;
629
630 /* *INDENT-OFF* */
631 vec_foreach (cm->next_nodes, nn)
632 {
633 if (nn->node_idx == pn->index)
634 return nn->next_idx;
635 }
636 /* *INDENT-ON* */
637
638 vec_validate (cm->next_nodes, index);
639 nn = vec_elt_at_index (cm->next_nodes, index);
640
641 cc = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch");
642 nn->next_idx = vlib_node_add_named_next (vm, cc->index, post_node_name);
643 nn->node_idx = pn->index;
644
645 return nn->next_idx;
646}
647
648void
649vnet_crypto_request_async_mode (int is_enable)
650{
Fan Zhangf5395782020-04-29 14:00:03 +0100651 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleski22848172020-07-08 14:36:34 +0200652 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100653 u32 skip_master = vlib_num_workers () > 0, i;
PiotrX Kleski22848172020-07-08 14:36:34 +0200654 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
655 u8 state_change = 0;
Fan Zhangf5395782020-04-29 14:00:03 +0100656
PiotrX Kleski22848172020-07-08 14:36:34 +0200657 CLIB_MEMORY_STORE_BARRIER ();
Fan Zhangf5395782020-04-29 14:00:03 +0100658 if (is_enable && cm->async_refcnt == 0)
659 {
660 state_change = 1;
PiotrX Kleski22848172020-07-08 14:36:34 +0200661 state =
662 cm->dispatch_mode == VNET_CRYPTO_ASYNC_DISPATCH_POLLING ?
663 VLIB_NODE_STATE_POLLING : VLIB_NODE_STATE_INTERRUPT;
Fan Zhangf5395782020-04-29 14:00:03 +0100664 }
Fan Zhangf5395782020-04-29 14:00:03 +0100665 if (!is_enable && cm->async_refcnt == 1)
666 {
667 state_change = 1;
668 state = VLIB_NODE_STATE_DISABLED;
669 }
670
671 if (state_change)
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000672 {
673
674 for (i = skip_master; i < tm->n_vlib_mains; i++)
675 {
676 vlib_main_t *ovm = vlib_get_main_by_index (i);
677 if (state != vlib_node_get_state (ovm, cm->crypto_node_index))
678 vlib_node_set_state (ovm, cm->crypto_node_index, state);
679 }
680
681 if (is_enable)
682 vnet_crypto_update_cm_dequeue_handlers ();
683 }
Fan Zhangf5395782020-04-29 14:00:03 +0100684
685 if (is_enable)
686 cm->async_refcnt += 1;
687 else if (cm->async_refcnt > 0)
688 cm->async_refcnt -= 1;
689}
690
PiotrX Kleski22848172020-07-08 14:36:34 +0200691void
692vnet_crypto_set_async_dispatch_mode (u8 mode)
693{
694 vnet_crypto_main_t *cm = &crypto_main;
695 u32 skip_master = vlib_num_workers () > 0, i;
696 vlib_thread_main_t *tm = vlib_get_thread_main ();
697 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
698
699 CLIB_MEMORY_STORE_BARRIER ();
700 cm->dispatch_mode = mode;
701 if (mode == VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT)
702 {
703 state =
704 cm->async_refcnt == 0 ?
705 VLIB_NODE_STATE_DISABLED : VLIB_NODE_STATE_INTERRUPT;
706 }
707 else if (mode == VNET_CRYPTO_ASYNC_DISPATCH_POLLING)
708 {
709 state =
710 cm->async_refcnt == 0 ?
711 VLIB_NODE_STATE_DISABLED : VLIB_NODE_STATE_POLLING;
712 }
713
714 for (i = skip_master; i < tm->n_vlib_mains; i++)
715 {
Damjan Marion6ffb7c62021-03-26 13:06:13 +0100716 vlib_main_t *ovm = vlib_get_main_by_index (i);
717 if (state != vlib_node_get_state (ovm, cm->crypto_node_index))
718 vlib_node_set_state (ovm, cm->crypto_node_index, state);
PiotrX Kleski22848172020-07-08 14:36:34 +0200719 }
PiotrX Kleski22848172020-07-08 14:36:34 +0200720}
721
Fan Zhangf5395782020-04-29 14:00:03 +0100722int
723vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t op)
724{
725 vnet_crypto_main_t *cm = &crypto_main;
726
727 return (op < vec_len (cm->enqueue_handlers) &&
728 NULL != cm->enqueue_handlers[op]);
729}
730
Damjan Marion060bfb92019-03-29 13:47:54 +0100731static void
732vnet_crypto_init_cipher_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t eid,
733 vnet_crypto_op_id_t did, char *name, u8 is_aead)
734{
735 vnet_crypto_op_type_t eopt, dopt;
736 vnet_crypto_main_t *cm = &crypto_main;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000737
Damjan Marion060bfb92019-03-29 13:47:54 +0100738 cm->algs[alg].name = name;
739 cm->opt_data[eid].alg = cm->opt_data[did].alg = alg;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000740 cm->opt_data[eid].active_engine_index_simple = ~0;
741 cm->opt_data[did].active_engine_index_simple = ~0;
742 cm->opt_data[eid].active_engine_index_chained = ~0;
743 cm->opt_data[did].active_engine_index_chained = ~0;
Damjan Marion060bfb92019-03-29 13:47:54 +0100744 if (is_aead)
745 {
746 eopt = VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT;
747 dopt = VNET_CRYPTO_OP_TYPE_AEAD_DECRYPT;
748 }
749 else
750 {
751 eopt = VNET_CRYPTO_OP_TYPE_ENCRYPT;
752 dopt = VNET_CRYPTO_OP_TYPE_DECRYPT;
753 }
754 cm->opt_data[eid].type = eopt;
755 cm->opt_data[did].type = dopt;
756 cm->algs[alg].op_by_type[eopt] = eid;
757 cm->algs[alg].op_by_type[dopt] = did;
758 hash_set_mem (cm->alg_index_by_name, name, alg);
759}
760
761static void
Filip Tehlar06111a82021-05-03 15:29:56 +0000762vnet_crypto_init_hash_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t id,
763 char *name)
764{
765 vnet_crypto_main_t *cm = &crypto_main;
766 cm->algs[alg].name = name;
767 cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HASH] = id;
768 cm->opt_data[id].alg = alg;
769 cm->opt_data[id].active_engine_index_simple = ~0;
770 cm->opt_data[id].active_engine_index_chained = ~0;
771 cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HASH;
772 hash_set_mem (cm->alg_index_by_name, name, alg);
773}
774
775static void
Damjan Marion060bfb92019-03-29 13:47:54 +0100776vnet_crypto_init_hmac_data (vnet_crypto_alg_t alg,
777 vnet_crypto_op_id_t id, char *name)
778{
779 vnet_crypto_main_t *cm = &crypto_main;
780 cm->algs[alg].name = name;
781 cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HMAC] = id;
782 cm->opt_data[id].alg = alg;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000783 cm->opt_data[id].active_engine_index_simple = ~0;
784 cm->opt_data[id].active_engine_index_chained = ~0;
Damjan Marion060bfb92019-03-29 13:47:54 +0100785 cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HMAC;
786 hash_set_mem (cm->alg_index_by_name, name, alg);
787}
788
Fan Zhangf5395782020-04-29 14:00:03 +0100789static void
790vnet_crypto_init_async_data (vnet_crypto_async_alg_t alg,
791 vnet_crypto_async_op_id_t eid,
792 vnet_crypto_async_op_id_t did, char *name)
793{
794 vnet_crypto_main_t *cm = &crypto_main;
795
796 cm->async_algs[alg].name = name;
797 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT] = eid;
798 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT] = did;
799 cm->async_opt_data[eid].type = VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT;
800 cm->async_opt_data[eid].alg = alg;
801 cm->async_opt_data[eid].active_engine_index_async = ~0;
802 cm->async_opt_data[eid].active_engine_index_async = ~0;
803 cm->async_opt_data[did].type = VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT;
804 cm->async_opt_data[did].alg = alg;
805 cm->async_opt_data[did].active_engine_index_async = ~0;
806 cm->async_opt_data[did].active_engine_index_async = ~0;
807 hash_set_mem (cm->async_alg_index_by_name, name, alg);
808}
809
Damjan Marion91f17dc2019-03-18 18:59:25 +0100810clib_error_t *
811vnet_crypto_init (vlib_main_t * vm)
812{
813 vnet_crypto_main_t *cm = &crypto_main;
814 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100815 vnet_crypto_thread_t *ct = 0;
PiotrX Kleski22848172020-07-08 14:36:34 +0200816
817 cm->dispatch_mode = VNET_CRYPTO_ASYNC_DISPATCH_POLLING;
Filip Tehlar1469d542019-03-25 09:04:41 -0700818 cm->engine_index_by_name = hash_create_string ( /* size */ 0,
819 sizeof (uword));
Damjan Marion060bfb92019-03-29 13:47:54 +0100820 cm->alg_index_by_name = hash_create_string (0, sizeof (uword));
Fan Zhangf5395782020-04-29 14:00:03 +0100821 cm->async_alg_index_by_name = hash_create_string (0, sizeof (uword));
Damjan Marion91f17dc2019-03-18 18:59:25 +0100822 vec_validate_aligned (cm->threads, tm->n_vlib_mains, CLIB_CACHE_LINE_BYTES);
Fan Zhangf5395782020-04-29 14:00:03 +0100823 vec_foreach (ct, cm->threads)
Fan Zhange4db9452021-03-30 17:31:38 +0100824 pool_alloc_aligned (ct->frame_pool, VNET_CRYPTO_FRAME_POOL_SIZE,
825 CLIB_CACHE_LINE_BYTES);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100826 vec_validate (cm->algs, VNET_CRYPTO_N_ALGS);
Fan Zhangf5395782020-04-29 14:00:03 +0100827 vec_validate (cm->async_algs, VNET_CRYPTO_N_ASYNC_ALGS);
Fan Zhangf5395782020-04-29 14:00:03 +0100828
Benoît Gannebe954442019-04-29 16:05:46 +0200829#define _(n, s, l) \
Damjan Marion060bfb92019-03-29 13:47:54 +0100830 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
831 VNET_CRYPTO_OP_##n##_ENC, \
832 VNET_CRYPTO_OP_##n##_DEC, s, 0);
833 foreach_crypto_cipher_alg;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100834#undef _
Benoît Gannebe954442019-04-29 16:05:46 +0200835#define _(n, s, l) \
Damjan Marion060bfb92019-03-29 13:47:54 +0100836 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
837 VNET_CRYPTO_OP_##n##_ENC, \
838 VNET_CRYPTO_OP_##n##_DEC, s, 1);
839 foreach_crypto_aead_alg;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100840#undef _
Damjan Marion060bfb92019-03-29 13:47:54 +0100841#define _(n, s) \
842 vnet_crypto_init_hmac_data (VNET_CRYPTO_ALG_HMAC_##n, \
843 VNET_CRYPTO_OP_##n##_HMAC, "hmac-" s);
844 foreach_crypto_hmac_alg;
845#undef _
Filip Tehlar06111a82021-05-03 15:29:56 +0000846#define _(n, s) \
847 vnet_crypto_init_hash_data (VNET_CRYPTO_ALG_HASH_##n, \
848 VNET_CRYPTO_OP_##n##_HASH, s);
849 foreach_crypto_hash_alg;
850#undef _
Fan Zhangf5395782020-04-29 14:00:03 +0100851#define _(n, s, k, t, a) \
852 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a, \
853 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
854 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
855 s);
856 foreach_crypto_aead_async_alg
857#undef _
858#define _(c, h, s, k ,d) \
859 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##c##_##h##_TAG##d, \
860 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
861 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
862 s);
863 foreach_crypto_link_async_alg
864#undef _
PiotrX Kleski22848172020-07-08 14:36:34 +0200865 cm->crypto_node_index =
866 vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch")->index;
867
868 return 0;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100869}
870
871VLIB_INIT_FUNCTION (vnet_crypto_init);
872
873/*
874 * fd.io coding-style-patch-verification: ON
875 *
876 * Local Variables:
877 * eval: (c-set-style "gnu")
878 * End:
879 */