blob: 288e227821b65d7d343b14e15345a61d22523ccb [file] [log] [blame]
Damjan Marion91f17dc2019-03-18 18:59:25 +01001/*
2 * Copyright (c) 2018 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include <stdbool.h>
17#include <vlib/vlib.h>
18#include <vnet/crypto/crypto.h>
19
20vnet_crypto_main_t crypto_main;
21
Filip Tehlarefcad1a2020-02-04 09:36:04 +000022static_always_inline void
23crypto_set_op_status (vnet_crypto_op_t * ops[], u32 n_ops, int status)
24{
25 while (n_ops--)
26 {
27 ops[0]->status = status;
28 ops++;
29 }
30}
31
Damjan Marion085637f2019-04-03 18:39:27 +020032static_always_inline u32
33vnet_crypto_process_ops_call_handler (vlib_main_t * vm,
34 vnet_crypto_main_t * cm,
Damjan Marion060bfb92019-03-29 13:47:54 +010035 vnet_crypto_op_id_t opt,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000036 vnet_crypto_op_t * ops[],
37 vnet_crypto_op_chunk_t * chunks,
38 u32 n_ops)
Damjan Marion085637f2019-04-03 18:39:27 +020039{
Filip Tehlarefcad1a2020-02-04 09:36:04 +000040 u32 rv = 0;
Damjan Marion085637f2019-04-03 18:39:27 +020041 if (n_ops == 0)
42 return 0;
43
Filip Tehlarefcad1a2020-02-04 09:36:04 +000044 if (chunks)
Damjan Marion085637f2019-04-03 18:39:27 +020045 {
Damjan Marion085637f2019-04-03 18:39:27 +020046
Filip Tehlarefcad1a2020-02-04 09:36:04 +000047 if (cm->chained_ops_handlers[opt] == 0)
48 crypto_set_op_status (ops, n_ops,
49 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
50 else
51 rv = (cm->chained_ops_handlers[opt]) (vm, ops, chunks, n_ops);
52 }
53 else
54 {
55 if (cm->ops_handlers[opt] == 0)
56 crypto_set_op_status (ops, n_ops,
57 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
58 else
59 rv = (cm->ops_handlers[opt]) (vm, ops, n_ops);
60 }
61 return rv;
Damjan Marion085637f2019-04-03 18:39:27 +020062}
63
Filip Tehlarefcad1a2020-02-04 09:36:04 +000064static_always_inline u32
65vnet_crypto_process_ops_inline (vlib_main_t * vm, vnet_crypto_op_t ops[],
66 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
Damjan Marion91f17dc2019-03-18 18:59:25 +010067{
68 vnet_crypto_main_t *cm = &crypto_main;
Damjan Marion085637f2019-04-03 18:39:27 +020069 const int op_q_size = VLIB_FRAME_SIZE;
70 vnet_crypto_op_t *op_queue[op_q_size];
Damjan Marion060bfb92019-03-29 13:47:54 +010071 vnet_crypto_op_id_t opt, current_op_type = ~0;
Damjan Marion085637f2019-04-03 18:39:27 +020072 u32 n_op_queue = 0;
Damjan Marion91f17dc2019-03-18 18:59:25 +010073 u32 rv = 0, i;
74
Damjan Marion085637f2019-04-03 18:39:27 +020075 ASSERT (n_ops >= 1);
76
Damjan Marion91f17dc2019-03-18 18:59:25 +010077 for (i = 0; i < n_ops; i++)
78 {
Damjan Marion085637f2019-04-03 18:39:27 +020079 opt = ops[i].op;
Damjan Marion91f17dc2019-03-18 18:59:25 +010080
Damjan Marion085637f2019-04-03 18:39:27 +020081 if (current_op_type != opt || n_op_queue >= op_q_size)
82 {
83 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000084 op_queue, chunks,
85 n_op_queue);
Damjan Marion085637f2019-04-03 18:39:27 +020086 n_op_queue = 0;
87 current_op_type = opt;
88 }
89
90 op_queue[n_op_queue++] = &ops[i];
Damjan Marion91f17dc2019-03-18 18:59:25 +010091 }
92
Damjan Marion085637f2019-04-03 18:39:27 +020093 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000094 op_queue, chunks, n_op_queue);
Damjan Marion91f17dc2019-03-18 18:59:25 +010095 return rv;
96}
97
98u32
Filip Tehlarefcad1a2020-02-04 09:36:04 +000099vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[], u32 n_ops)
100{
101 return vnet_crypto_process_ops_inline (vm, ops, 0, n_ops);
102}
103
104u32
105vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
106 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
107{
108 return vnet_crypto_process_ops_inline (vm, ops, chunks, n_ops);
109}
110
111u32
Damjan Marion91f17dc2019-03-18 18:59:25 +0100112vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
113 char *desc)
114{
115 vnet_crypto_main_t *cm = &crypto_main;
116 vnet_crypto_engine_t *p;
117
118 vec_add2 (cm->engines, p, 1);
119 p->name = name;
120 p->desc = desc;
121 p->priority = prio;
122
Filip Tehlar1469d542019-03-25 09:04:41 -0700123 hash_set_mem (cm->engine_index_by_name, p->name, p - cm->engines);
124
Damjan Marion91f17dc2019-03-18 18:59:25 +0100125 return p - cm->engines;
126}
127
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000128static_always_inline void
129crypto_set_active_engine (vnet_crypto_op_data_t * od,
130 vnet_crypto_op_id_t id, u32 ei,
131 crypto_op_class_type_t oct)
132{
133 vnet_crypto_main_t *cm = &crypto_main;
134 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
135
136 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_CHAINED)
137 {
138 if (ce->chained_ops_handlers[id])
139 {
140 od->active_engine_index_chained = ei;
141 cm->chained_ops_handlers[id] = ce->chained_ops_handlers[id];
142 }
143 }
144
145 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_SIMPLE)
146 {
147 if (ce->ops_handlers[id])
148 {
149 od->active_engine_index_simple = ei;
150 cm->ops_handlers[id] = ce->ops_handlers[id];
151 }
152 }
153}
154
Filip Tehlar1469d542019-03-25 09:04:41 -0700155int
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000156vnet_crypto_set_handler2 (char *alg_name, char *engine,
157 crypto_op_class_type_t oct)
Filip Tehlar1469d542019-03-25 09:04:41 -0700158{
159 uword *p;
160 vnet_crypto_main_t *cm = &crypto_main;
Damjan Marion060bfb92019-03-29 13:47:54 +0100161 vnet_crypto_alg_data_t *ad;
Damjan Marion060bfb92019-03-29 13:47:54 +0100162 int i;
Filip Tehlar1469d542019-03-25 09:04:41 -0700163
Damjan Marion060bfb92019-03-29 13:47:54 +0100164 p = hash_get_mem (cm->alg_index_by_name, alg_name);
Filip Tehlar1469d542019-03-25 09:04:41 -0700165 if (!p)
166 return -1;
167
Damjan Marion060bfb92019-03-29 13:47:54 +0100168 ad = vec_elt_at_index (cm->algs, p[0]);
Filip Tehlar1469d542019-03-25 09:04:41 -0700169
170 p = hash_get_mem (cm->engine_index_by_name, engine);
171 if (!p)
172 return -1;
173
Filip Tehlard26b8602020-02-25 09:53:26 +0000174 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
Damjan Marion060bfb92019-03-29 13:47:54 +0100175 {
176 vnet_crypto_op_data_t *od;
177 vnet_crypto_op_id_t id = ad->op_by_type[i];
178 if (id == 0)
179 continue;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000180
Lijian Zhangb15d7962019-09-27 16:25:35 +0800181 od = cm->opt_data + id;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000182 crypto_set_active_engine (od, id, p[0], oct);
Damjan Marion060bfb92019-03-29 13:47:54 +0100183 }
Filip Tehlar1469d542019-03-25 09:04:41 -0700184
185 return 0;
186}
187
Neale Rannsece2ae02019-06-21 12:44:11 +0000188int
189vnet_crypto_is_set_handler (vnet_crypto_alg_t alg)
190{
191 vnet_crypto_main_t *cm = &crypto_main;
192
Benoît Ganne6545df72019-11-06 14:21:07 +0100193 return (alg < vec_len (cm->ops_handlers) && NULL != cm->ops_handlers[alg]);
Neale Rannsece2ae02019-06-21 12:44:11 +0000194}
195
Damjan Mariond1bed682019-04-24 15:20:35 +0200196void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000197vnet_crypto_register_ops_handler_inline (vlib_main_t * vm, u32 engine_index,
198 vnet_crypto_op_id_t opt,
199 vnet_crypto_ops_handler_t * fn,
200 vnet_crypto_chained_ops_handler_t *
201 cfn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100202{
203 vnet_crypto_main_t *cm = &crypto_main;
204 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
Damjan Marion060bfb92019-03-29 13:47:54 +0100205 vnet_crypto_op_data_t *otd = cm->opt_data + opt;
206 vec_validate_aligned (cm->ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
Damjan Marion91f17dc2019-03-18 18:59:25 +0100207 CLIB_CACHE_LINE_BYTES);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000208 vec_validate_aligned (cm->chained_ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
209 CLIB_CACHE_LINE_BYTES);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100210
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000211 if (fn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100212 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000213 e->ops_handlers[opt] = fn;
214 if (otd->active_engine_index_simple == ~0)
215 {
216 otd->active_engine_index_simple = engine_index;
217 cm->ops_handlers[opt] = fn;
218 }
219
220 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_simple);
221 if (ae->priority < e->priority)
222 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_SIMPLE);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100223 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000224
225 if (cfn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100226 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000227 e->chained_ops_handlers[opt] = cfn;
228 if (otd->active_engine_index_chained == ~0)
229 {
230 otd->active_engine_index_chained = engine_index;
231 cm->chained_ops_handlers[opt] = cfn;
232 }
233
234 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_chained);
235 if (ae->priority < e->priority)
236 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_CHAINED);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100237 }
238
Damjan Mariond1bed682019-04-24 15:20:35 +0200239 return;
240}
241
242void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000243vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
244 vnet_crypto_op_id_t opt,
245 vnet_crypto_ops_handler_t * fn)
246{
247 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, 0);
248}
249
250void
251vnet_crypto_register_chained_ops_handler (vlib_main_t * vm, u32 engine_index,
252 vnet_crypto_op_id_t opt,
253 vnet_crypto_chained_ops_handler_t *
254 fn)
255{
256 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, 0, fn);
257}
258
259void
260vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
261 vnet_crypto_op_id_t opt,
262 vnet_crypto_ops_handler_t * fn,
263 vnet_crypto_chained_ops_handler_t * cfn)
264{
265 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, cfn);
266}
267
268void
Fan Zhangf5395782020-04-29 14:00:03 +0100269vnet_crypto_register_async_handler (vlib_main_t * vm, u32 engine_index,
270 vnet_crypto_async_op_id_t opt,
271 vnet_crypto_frame_enqueue_t * enqueue_hdl,
272 vnet_crypto_frame_dequeue_t * dequeue_hdl)
273{
274 vnet_crypto_main_t *cm = &crypto_main;
275 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
276 vnet_crypto_async_op_data_t *otd = cm->async_opt_data + opt;
277 vec_validate_aligned (cm->enqueue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS - 1,
278 CLIB_CACHE_LINE_BYTES);
279 vec_validate_aligned (cm->dequeue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS - 1,
280 CLIB_CACHE_LINE_BYTES);
281
282 /* both enqueue hdl and dequeue hdl should present */
283 if (!enqueue_hdl && !dequeue_hdl)
284 return;
285
286 e->enqueue_handlers[opt] = enqueue_hdl;
287 e->dequeue_handlers[opt] = dequeue_hdl;
288 if (otd->active_engine_index_async == ~0)
289 {
290 otd->active_engine_index_async = engine_index;
291 cm->enqueue_handlers[opt] = enqueue_hdl;
292 cm->dequeue_handlers[opt] = dequeue_hdl;
293 }
294
295 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_async);
296 if (ae->priority < e->priority)
297 {
298 otd->active_engine_index_async = engine_index;
299 cm->enqueue_handlers[opt] = enqueue_hdl;
300 cm->dequeue_handlers[opt] = dequeue_hdl;
301 }
302
303 return;
304}
305
306void
Damjan Mariond1bed682019-04-24 15:20:35 +0200307vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
308 vnet_crypto_key_handler_t * key_handler)
309{
310 vnet_crypto_main_t *cm = &crypto_main;
311 vnet_crypto_engine_t *e = vec_elt_at_index (cm->engines, engine_index);
312 e->key_op_handler = key_handler;
313 return;
314}
315
Benoît Gannebe954442019-04-29 16:05:46 +0200316static int
317vnet_crypto_key_len_check (vnet_crypto_alg_t alg, u16 length)
318{
319 switch (alg)
320 {
321 case VNET_CRYPTO_N_ALGS:
322 return 0;
323 case VNET_CRYPTO_ALG_NONE:
324 return 1;
325
326#define _(n, s, l) \
327 case VNET_CRYPTO_ALG_##n: \
328 if ((l) == length) \
Neale Rannse6be7022019-06-04 15:37:34 +0000329 return 1; \
330 break;
Benoît Gannebe954442019-04-29 16:05:46 +0200331 foreach_crypto_cipher_alg foreach_crypto_aead_alg
332#undef _
333 /* HMAC allows any key length */
334#define _(n, s) \
335 case VNET_CRYPTO_ALG_HMAC_##n: \
336 return 1;
337 foreach_crypto_hmac_alg
338#undef _
339 }
340
341 return 0;
342}
343
Damjan Mariond1bed682019-04-24 15:20:35 +0200344u32
345vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg, u8 * data,
346 u16 length)
347{
348 u32 index;
349 vnet_crypto_main_t *cm = &crypto_main;
350 vnet_crypto_engine_t *engine;
351 vnet_crypto_key_t *key;
Benoît Gannebe954442019-04-29 16:05:46 +0200352
Benoît Gannebe954442019-04-29 16:05:46 +0200353 if (!vnet_crypto_key_len_check (alg, length))
354 return ~0;
355
Damjan Mariond1bed682019-04-24 15:20:35 +0200356 pool_get_zero (cm->keys, key);
357 index = key - cm->keys;
Fan Zhangf5395782020-04-29 14:00:03 +0100358 key->type = VNET_CRYPTO_KEY_TYPE_DATA;
Damjan Mariond1bed682019-04-24 15:20:35 +0200359 key->alg = alg;
360 vec_validate_aligned (key->data, length - 1, CLIB_CACHE_LINE_BYTES);
361 clib_memcpy (key->data, data, length);
Damjan Mariond1bed682019-04-24 15:20:35 +0200362 /* *INDENT-OFF* */
363 vec_foreach (engine, cm->engines)
364 if (engine->key_op_handler)
365 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
366 /* *INDENT-ON* */
367 return index;
368}
369
370void
371vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index)
372{
373 vnet_crypto_main_t *cm = &crypto_main;
374 vnet_crypto_engine_t *engine;
375 vnet_crypto_key_t *key = pool_elt_at_index (cm->keys, index);
376
377 /* *INDENT-OFF* */
378 vec_foreach (engine, cm->engines)
379 if (engine->key_op_handler)
380 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_DEL, index);
381 /* *INDENT-ON* */
382
Fan Zhangf5395782020-04-29 14:00:03 +0100383 if (key->type == VNET_CRYPTO_KEY_TYPE_DATA)
384 {
385 clib_memset (key->data, 0, vec_len (key->data));
386 vec_free (key->data);
387 }
388 else if (key->type == VNET_CRYPTO_KEY_TYPE_LINK)
389 {
390 key->index_crypto = key->index_integ = 0;
391 }
392
Damjan Mariond1bed682019-04-24 15:20:35 +0200393 pool_put (cm->keys, key);
394}
395
Fan Zhangf5395782020-04-29 14:00:03 +0100396vnet_crypto_async_alg_t
397vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
398 vnet_crypto_alg_t integ_alg)
399{
400#define _(c, h, s, k ,d) \
401 if (crypto_alg == VNET_CRYPTO_ALG_##c && \
402 integ_alg == VNET_CRYPTO_ALG_HMAC_##h) \
403 return VNET_CRYPTO_ALG_##c##_##h##_TAG##d;
404 foreach_crypto_link_async_alg
405#undef _
406 return ~0;
407}
408
409u32
410vnet_crypto_key_add_linked (vlib_main_t * vm,
411 vnet_crypto_key_index_t index_crypto,
412 vnet_crypto_key_index_t index_integ)
413{
414 u32 index;
415 vnet_crypto_main_t *cm = &crypto_main;
416 vnet_crypto_engine_t *engine;
417 vnet_crypto_key_t *key_crypto, *key_integ, *key;
418 vnet_crypto_async_alg_t linked_alg;
419
420 key_crypto = pool_elt_at_index (cm->keys, index_crypto);
421 key_integ = pool_elt_at_index (cm->keys, index_integ);
422
423 if (!key_crypto || !key_integ)
424 return ~0;
425
426 linked_alg = vnet_crypto_link_algs (key_crypto->alg, key_integ->alg);
427 if (linked_alg == ~0)
428 return ~0;
429
430 pool_get_zero (cm->keys, key);
431 index = key - cm->keys;
432 key->type = VNET_CRYPTO_KEY_TYPE_LINK;
433 key->index_crypto = index_crypto;
434 key->index_integ = index_integ;
435 key->async_alg = linked_alg;
436
437 /* *INDENT-OFF* */
438 vec_foreach (engine, cm->engines)
439 if (engine->key_op_handler)
440 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
441 /* *INDENT-ON* */
442
443 return index;
444}
445
446clib_error_t *
447crypto_dispatch_enable_disable (int is_enable)
448{
449 vlib_main_t *vm = vlib_get_main ();
450 vlib_thread_main_t *tm = vlib_get_thread_main ();
451 vlib_node_t *node = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch");
452 vnet_crypto_main_t *cm = &crypto_main;
453 u32 skip_master = vlib_num_workers () > 0, i;
454 u32 state_change = 0;
455 vlib_node_state_t state;
456
457 if (is_enable && cm->async_refcnt > 0)
458 {
459 state_change = 1;
460 state = VLIB_NODE_STATE_POLLING;
461 }
462
463 if (!is_enable && cm->async_refcnt == 0)
464 {
465 state_change = 1;
466 state = VLIB_NODE_STATE_DISABLED;
467 }
468
469 if (state_change)
470 for (i = skip_master; i < tm->n_vlib_mains; i++)
471 vlib_node_set_state (vlib_mains[i], node->index, state);
472
473 return 0;
474}
475
476static_always_inline void
477crypto_set_active_async_engine (vnet_crypto_async_op_data_t * od,
478 vnet_crypto_async_op_id_t id, u32 ei)
479{
480 vnet_crypto_main_t *cm = &crypto_main;
481 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
482
483 if (ce->enqueue_handlers[id] && ce->dequeue_handlers[id])
484 {
485 od->active_engine_index_async = ei;
486 cm->enqueue_handlers[id] = ce->enqueue_handlers[id];
487 cm->dequeue_handlers[id] = ce->dequeue_handlers[id];
488 }
489}
490
491int
492vnet_crypto_set_async_handler2 (char *alg_name, char *engine)
493{
494 uword *p;
495 vnet_crypto_main_t *cm = &crypto_main;
496 vnet_crypto_async_alg_data_t *ad;
497 int i;
498
499 p = hash_get_mem (cm->async_alg_index_by_name, alg_name);
500 if (!p)
501 return -1;
502
503 ad = vec_elt_at_index (cm->async_algs, p[0]);
504
505 p = hash_get_mem (cm->engine_index_by_name, engine);
506 if (!p)
507 return -1;
508
509 for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_TYPES; i++)
510 {
511 vnet_crypto_async_op_data_t *od;
512 vnet_crypto_async_op_id_t id = ad->op_by_type[i];
513 if (id == 0)
514 continue;
515
516 od = cm->async_opt_data + id;
517 crypto_set_active_async_engine (od, id, p[0]);
518 }
519
520 return 0;
521}
522
523u32
524vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name)
525{
526 vnet_crypto_main_t *cm = &crypto_main;
527 vnet_crypto_async_next_node_t *nn = 0;
528 vlib_node_t *cc, *pn;
529 uword index = vec_len (cm->next_nodes);
530
531 pn = vlib_get_node_by_name (vm, (u8 *) post_node_name);
532 if (!pn)
533 return ~0;
534
535 /* *INDENT-OFF* */
536 vec_foreach (cm->next_nodes, nn)
537 {
538 if (nn->node_idx == pn->index)
539 return nn->next_idx;
540 }
541 /* *INDENT-ON* */
542
543 vec_validate (cm->next_nodes, index);
544 nn = vec_elt_at_index (cm->next_nodes, index);
545
546 cc = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch");
547 nn->next_idx = vlib_node_add_named_next (vm, cc->index, post_node_name);
548 nn->node_idx = pn->index;
549
550 return nn->next_idx;
551}
552
553void
554vnet_crypto_request_async_mode (int is_enable)
555{
556 vlib_main_t *vm = vlib_get_main ();
557 vlib_thread_main_t *tm = vlib_get_thread_main ();
558 vlib_node_t *node = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch");
559 vnet_crypto_main_t *cm = &crypto_main;
560 u32 skip_master = vlib_num_workers () > 0, i;
561 u32 state_change = 0;
562 vlib_node_state_t state;
563
564 if (is_enable && cm->async_refcnt == 0)
565 {
566 state_change = 1;
567 state = VLIB_NODE_STATE_POLLING;
568 }
569
570 if (!is_enable && cm->async_refcnt == 1)
571 {
572 state_change = 1;
573 state = VLIB_NODE_STATE_DISABLED;
574 }
575
576 if (state_change)
577 for (i = skip_master; i < tm->n_vlib_mains; i++)
578 vlib_node_set_state (vlib_mains[i], node->index, state);
579
580 if (is_enable)
581 cm->async_refcnt += 1;
582 else if (cm->async_refcnt > 0)
583 cm->async_refcnt -= 1;
584}
585
586int
587vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t op)
588{
589 vnet_crypto_main_t *cm = &crypto_main;
590
591 return (op < vec_len (cm->enqueue_handlers) &&
592 NULL != cm->enqueue_handlers[op]);
593}
594
Damjan Marion060bfb92019-03-29 13:47:54 +0100595static void
596vnet_crypto_init_cipher_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t eid,
597 vnet_crypto_op_id_t did, char *name, u8 is_aead)
598{
599 vnet_crypto_op_type_t eopt, dopt;
600 vnet_crypto_main_t *cm = &crypto_main;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000601
Damjan Marion060bfb92019-03-29 13:47:54 +0100602 cm->algs[alg].name = name;
603 cm->opt_data[eid].alg = cm->opt_data[did].alg = alg;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000604 cm->opt_data[eid].active_engine_index_simple = ~0;
605 cm->opt_data[did].active_engine_index_simple = ~0;
606 cm->opt_data[eid].active_engine_index_chained = ~0;
607 cm->opt_data[did].active_engine_index_chained = ~0;
Damjan Marion060bfb92019-03-29 13:47:54 +0100608 if (is_aead)
609 {
610 eopt = VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT;
611 dopt = VNET_CRYPTO_OP_TYPE_AEAD_DECRYPT;
612 }
613 else
614 {
615 eopt = VNET_CRYPTO_OP_TYPE_ENCRYPT;
616 dopt = VNET_CRYPTO_OP_TYPE_DECRYPT;
617 }
618 cm->opt_data[eid].type = eopt;
619 cm->opt_data[did].type = dopt;
620 cm->algs[alg].op_by_type[eopt] = eid;
621 cm->algs[alg].op_by_type[dopt] = did;
622 hash_set_mem (cm->alg_index_by_name, name, alg);
623}
624
625static void
626vnet_crypto_init_hmac_data (vnet_crypto_alg_t alg,
627 vnet_crypto_op_id_t id, char *name)
628{
629 vnet_crypto_main_t *cm = &crypto_main;
630 cm->algs[alg].name = name;
631 cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HMAC] = id;
632 cm->opt_data[id].alg = alg;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000633 cm->opt_data[id].active_engine_index_simple = ~0;
634 cm->opt_data[id].active_engine_index_chained = ~0;
Damjan Marion060bfb92019-03-29 13:47:54 +0100635 cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HMAC;
636 hash_set_mem (cm->alg_index_by_name, name, alg);
637}
638
Fan Zhangf5395782020-04-29 14:00:03 +0100639static void
640vnet_crypto_init_async_data (vnet_crypto_async_alg_t alg,
641 vnet_crypto_async_op_id_t eid,
642 vnet_crypto_async_op_id_t did, char *name)
643{
644 vnet_crypto_main_t *cm = &crypto_main;
645
646 cm->async_algs[alg].name = name;
647 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT] = eid;
648 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT] = did;
649 cm->async_opt_data[eid].type = VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT;
650 cm->async_opt_data[eid].alg = alg;
651 cm->async_opt_data[eid].active_engine_index_async = ~0;
652 cm->async_opt_data[eid].active_engine_index_async = ~0;
653 cm->async_opt_data[did].type = VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT;
654 cm->async_opt_data[did].alg = alg;
655 cm->async_opt_data[did].active_engine_index_async = ~0;
656 cm->async_opt_data[did].active_engine_index_async = ~0;
657 hash_set_mem (cm->async_alg_index_by_name, name, alg);
658}
659
Damjan Marion91f17dc2019-03-18 18:59:25 +0100660clib_error_t *
661vnet_crypto_init (vlib_main_t * vm)
662{
663 vnet_crypto_main_t *cm = &crypto_main;
664 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100665 vnet_crypto_thread_t *ct = 0;
Filip Tehlar1469d542019-03-25 09:04:41 -0700666 cm->engine_index_by_name = hash_create_string ( /* size */ 0,
667 sizeof (uword));
Damjan Marion060bfb92019-03-29 13:47:54 +0100668 cm->alg_index_by_name = hash_create_string (0, sizeof (uword));
Fan Zhangf5395782020-04-29 14:00:03 +0100669 cm->async_alg_index_by_name = hash_create_string (0, sizeof (uword));
Damjan Marion91f17dc2019-03-18 18:59:25 +0100670 vec_validate_aligned (cm->threads, tm->n_vlib_mains, CLIB_CACHE_LINE_BYTES);
Fan Zhangf5395782020-04-29 14:00:03 +0100671 vec_foreach (ct, cm->threads)
672 pool_alloc_aligned (ct->frame_pool, 256, CLIB_CACHE_LINE_BYTES);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100673 vec_validate (cm->algs, VNET_CRYPTO_N_ALGS);
Fan Zhangf5395782020-04-29 14:00:03 +0100674 vec_validate (cm->async_algs, VNET_CRYPTO_N_ASYNC_ALGS);
675 clib_bitmap_validate (cm->async_active_ids, VNET_CRYPTO_ASYNC_OP_N_IDS - 1);
676
Benoît Gannebe954442019-04-29 16:05:46 +0200677#define _(n, s, l) \
Damjan Marion060bfb92019-03-29 13:47:54 +0100678 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
679 VNET_CRYPTO_OP_##n##_ENC, \
680 VNET_CRYPTO_OP_##n##_DEC, s, 0);
681 foreach_crypto_cipher_alg;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100682#undef _
Benoît Gannebe954442019-04-29 16:05:46 +0200683#define _(n, s, l) \
Damjan Marion060bfb92019-03-29 13:47:54 +0100684 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
685 VNET_CRYPTO_OP_##n##_ENC, \
686 VNET_CRYPTO_OP_##n##_DEC, s, 1);
687 foreach_crypto_aead_alg;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100688#undef _
Damjan Marion060bfb92019-03-29 13:47:54 +0100689#define _(n, s) \
690 vnet_crypto_init_hmac_data (VNET_CRYPTO_ALG_HMAC_##n, \
691 VNET_CRYPTO_OP_##n##_HMAC, "hmac-" s);
692 foreach_crypto_hmac_alg;
693#undef _
Fan Zhangf5395782020-04-29 14:00:03 +0100694#define _(n, s, k, t, a) \
695 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a, \
696 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
697 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
698 s);
699 foreach_crypto_aead_async_alg
700#undef _
701#define _(c, h, s, k ,d) \
702 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##c##_##h##_TAG##d, \
703 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
704 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
705 s);
706 foreach_crypto_link_async_alg
707#undef _
708 return 0;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100709}
710
711VLIB_INIT_FUNCTION (vnet_crypto_init);
712
713/*
714 * fd.io coding-style-patch-verification: ON
715 *
716 * Local Variables:
717 * eval: (c-set-style "gnu")
718 * End:
719 */