blob: a82ebae4313a9a3275de551e38e030f7d746f041 [file] [log] [blame]
Damjan Marion91f17dc2019-03-18 18:59:25 +01001/*
2 * Copyright (c) 2018 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include <stdbool.h>
17#include <vlib/vlib.h>
18#include <vnet/crypto/crypto.h>
19
20vnet_crypto_main_t crypto_main;
21
Filip Tehlarefcad1a2020-02-04 09:36:04 +000022static_always_inline void
23crypto_set_op_status (vnet_crypto_op_t * ops[], u32 n_ops, int status)
24{
25 while (n_ops--)
26 {
27 ops[0]->status = status;
28 ops++;
29 }
30}
31
Damjan Marion085637f2019-04-03 18:39:27 +020032static_always_inline u32
33vnet_crypto_process_ops_call_handler (vlib_main_t * vm,
34 vnet_crypto_main_t * cm,
Damjan Marion060bfb92019-03-29 13:47:54 +010035 vnet_crypto_op_id_t opt,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000036 vnet_crypto_op_t * ops[],
37 vnet_crypto_op_chunk_t * chunks,
38 u32 n_ops)
Damjan Marion085637f2019-04-03 18:39:27 +020039{
Filip Tehlarefcad1a2020-02-04 09:36:04 +000040 u32 rv = 0;
Damjan Marion085637f2019-04-03 18:39:27 +020041 if (n_ops == 0)
42 return 0;
43
Filip Tehlarefcad1a2020-02-04 09:36:04 +000044 if (chunks)
Damjan Marion085637f2019-04-03 18:39:27 +020045 {
Damjan Marion085637f2019-04-03 18:39:27 +020046
Filip Tehlarefcad1a2020-02-04 09:36:04 +000047 if (cm->chained_ops_handlers[opt] == 0)
48 crypto_set_op_status (ops, n_ops,
49 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
50 else
51 rv = (cm->chained_ops_handlers[opt]) (vm, ops, chunks, n_ops);
52 }
53 else
54 {
55 if (cm->ops_handlers[opt] == 0)
56 crypto_set_op_status (ops, n_ops,
57 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
58 else
59 rv = (cm->ops_handlers[opt]) (vm, ops, n_ops);
60 }
61 return rv;
Damjan Marion085637f2019-04-03 18:39:27 +020062}
63
Filip Tehlarefcad1a2020-02-04 09:36:04 +000064static_always_inline u32
65vnet_crypto_process_ops_inline (vlib_main_t * vm, vnet_crypto_op_t ops[],
66 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
Damjan Marion91f17dc2019-03-18 18:59:25 +010067{
68 vnet_crypto_main_t *cm = &crypto_main;
Damjan Marion085637f2019-04-03 18:39:27 +020069 const int op_q_size = VLIB_FRAME_SIZE;
70 vnet_crypto_op_t *op_queue[op_q_size];
Damjan Marion060bfb92019-03-29 13:47:54 +010071 vnet_crypto_op_id_t opt, current_op_type = ~0;
Damjan Marion085637f2019-04-03 18:39:27 +020072 u32 n_op_queue = 0;
Damjan Marion91f17dc2019-03-18 18:59:25 +010073 u32 rv = 0, i;
74
Damjan Marion085637f2019-04-03 18:39:27 +020075 ASSERT (n_ops >= 1);
76
Damjan Marion91f17dc2019-03-18 18:59:25 +010077 for (i = 0; i < n_ops; i++)
78 {
Damjan Marion085637f2019-04-03 18:39:27 +020079 opt = ops[i].op;
Damjan Marion91f17dc2019-03-18 18:59:25 +010080
Damjan Marion085637f2019-04-03 18:39:27 +020081 if (current_op_type != opt || n_op_queue >= op_q_size)
82 {
83 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000084 op_queue, chunks,
85 n_op_queue);
Damjan Marion085637f2019-04-03 18:39:27 +020086 n_op_queue = 0;
87 current_op_type = opt;
88 }
89
90 op_queue[n_op_queue++] = &ops[i];
Damjan Marion91f17dc2019-03-18 18:59:25 +010091 }
92
Damjan Marion085637f2019-04-03 18:39:27 +020093 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000094 op_queue, chunks, n_op_queue);
Damjan Marion91f17dc2019-03-18 18:59:25 +010095 return rv;
96}
97
98u32
Filip Tehlarefcad1a2020-02-04 09:36:04 +000099vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[], u32 n_ops)
100{
101 return vnet_crypto_process_ops_inline (vm, ops, 0, n_ops);
102}
103
104u32
105vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
106 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
107{
108 return vnet_crypto_process_ops_inline (vm, ops, chunks, n_ops);
109}
110
111u32
Damjan Marion91f17dc2019-03-18 18:59:25 +0100112vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
113 char *desc)
114{
115 vnet_crypto_main_t *cm = &crypto_main;
116 vnet_crypto_engine_t *p;
117
118 vec_add2 (cm->engines, p, 1);
119 p->name = name;
120 p->desc = desc;
121 p->priority = prio;
122
Filip Tehlar1469d542019-03-25 09:04:41 -0700123 hash_set_mem (cm->engine_index_by_name, p->name, p - cm->engines);
124
Damjan Marion91f17dc2019-03-18 18:59:25 +0100125 return p - cm->engines;
126}
127
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000128static_always_inline void
129crypto_set_active_engine (vnet_crypto_op_data_t * od,
130 vnet_crypto_op_id_t id, u32 ei,
131 crypto_op_class_type_t oct)
132{
133 vnet_crypto_main_t *cm = &crypto_main;
134 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
135
136 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_CHAINED)
137 {
138 if (ce->chained_ops_handlers[id])
139 {
140 od->active_engine_index_chained = ei;
141 cm->chained_ops_handlers[id] = ce->chained_ops_handlers[id];
142 }
143 }
144
145 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_SIMPLE)
146 {
147 if (ce->ops_handlers[id])
148 {
149 od->active_engine_index_simple = ei;
150 cm->ops_handlers[id] = ce->ops_handlers[id];
151 }
152 }
153}
154
Filip Tehlar1469d542019-03-25 09:04:41 -0700155int
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000156vnet_crypto_set_handler2 (char *alg_name, char *engine,
157 crypto_op_class_type_t oct)
Filip Tehlar1469d542019-03-25 09:04:41 -0700158{
159 uword *p;
160 vnet_crypto_main_t *cm = &crypto_main;
Damjan Marion060bfb92019-03-29 13:47:54 +0100161 vnet_crypto_alg_data_t *ad;
Damjan Marion060bfb92019-03-29 13:47:54 +0100162 int i;
Filip Tehlar1469d542019-03-25 09:04:41 -0700163
Damjan Marion060bfb92019-03-29 13:47:54 +0100164 p = hash_get_mem (cm->alg_index_by_name, alg_name);
Filip Tehlar1469d542019-03-25 09:04:41 -0700165 if (!p)
166 return -1;
167
Damjan Marion060bfb92019-03-29 13:47:54 +0100168 ad = vec_elt_at_index (cm->algs, p[0]);
Filip Tehlar1469d542019-03-25 09:04:41 -0700169
170 p = hash_get_mem (cm->engine_index_by_name, engine);
171 if (!p)
172 return -1;
173
Filip Tehlard26b8602020-02-25 09:53:26 +0000174 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
Damjan Marion060bfb92019-03-29 13:47:54 +0100175 {
176 vnet_crypto_op_data_t *od;
177 vnet_crypto_op_id_t id = ad->op_by_type[i];
178 if (id == 0)
179 continue;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000180
Lijian Zhangb15d7962019-09-27 16:25:35 +0800181 od = cm->opt_data + id;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000182 crypto_set_active_engine (od, id, p[0], oct);
Damjan Marion060bfb92019-03-29 13:47:54 +0100183 }
Filip Tehlar1469d542019-03-25 09:04:41 -0700184
185 return 0;
186}
187
Neale Rannsece2ae02019-06-21 12:44:11 +0000188int
189vnet_crypto_is_set_handler (vnet_crypto_alg_t alg)
190{
191 vnet_crypto_main_t *cm = &crypto_main;
192
Benoît Ganne6545df72019-11-06 14:21:07 +0100193 return (alg < vec_len (cm->ops_handlers) && NULL != cm->ops_handlers[alg]);
Neale Rannsece2ae02019-06-21 12:44:11 +0000194}
195
Damjan Mariond1bed682019-04-24 15:20:35 +0200196void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000197vnet_crypto_register_ops_handler_inline (vlib_main_t * vm, u32 engine_index,
198 vnet_crypto_op_id_t opt,
199 vnet_crypto_ops_handler_t * fn,
200 vnet_crypto_chained_ops_handler_t *
201 cfn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100202{
203 vnet_crypto_main_t *cm = &crypto_main;
204 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
Damjan Marion060bfb92019-03-29 13:47:54 +0100205 vnet_crypto_op_data_t *otd = cm->opt_data + opt;
206 vec_validate_aligned (cm->ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
Damjan Marion91f17dc2019-03-18 18:59:25 +0100207 CLIB_CACHE_LINE_BYTES);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000208 vec_validate_aligned (cm->chained_ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
209 CLIB_CACHE_LINE_BYTES);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100210
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000211 if (fn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100212 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000213 e->ops_handlers[opt] = fn;
214 if (otd->active_engine_index_simple == ~0)
215 {
216 otd->active_engine_index_simple = engine_index;
217 cm->ops_handlers[opt] = fn;
218 }
219
220 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_simple);
221 if (ae->priority < e->priority)
222 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_SIMPLE);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100223 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000224
225 if (cfn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100226 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000227 e->chained_ops_handlers[opt] = cfn;
228 if (otd->active_engine_index_chained == ~0)
229 {
230 otd->active_engine_index_chained = engine_index;
231 cm->chained_ops_handlers[opt] = cfn;
232 }
233
234 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_chained);
235 if (ae->priority < e->priority)
236 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_CHAINED);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100237 }
238
Damjan Mariond1bed682019-04-24 15:20:35 +0200239 return;
240}
241
242void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000243vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
244 vnet_crypto_op_id_t opt,
245 vnet_crypto_ops_handler_t * fn)
246{
247 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, 0);
248}
249
250void
251vnet_crypto_register_chained_ops_handler (vlib_main_t * vm, u32 engine_index,
252 vnet_crypto_op_id_t opt,
253 vnet_crypto_chained_ops_handler_t *
254 fn)
255{
256 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, 0, fn);
257}
258
259void
260vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
261 vnet_crypto_op_id_t opt,
262 vnet_crypto_ops_handler_t * fn,
263 vnet_crypto_chained_ops_handler_t * cfn)
264{
265 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, cfn);
266}
267
268void
Fan Zhangf5395782020-04-29 14:00:03 +0100269vnet_crypto_register_async_handler (vlib_main_t * vm, u32 engine_index,
270 vnet_crypto_async_op_id_t opt,
271 vnet_crypto_frame_enqueue_t * enqueue_hdl,
272 vnet_crypto_frame_dequeue_t * dequeue_hdl)
273{
274 vnet_crypto_main_t *cm = &crypto_main;
275 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
276 vnet_crypto_async_op_data_t *otd = cm->async_opt_data + opt;
277 vec_validate_aligned (cm->enqueue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS - 1,
278 CLIB_CACHE_LINE_BYTES);
279 vec_validate_aligned (cm->dequeue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS - 1,
280 CLIB_CACHE_LINE_BYTES);
281
282 /* both enqueue hdl and dequeue hdl should present */
283 if (!enqueue_hdl && !dequeue_hdl)
284 return;
285
286 e->enqueue_handlers[opt] = enqueue_hdl;
287 e->dequeue_handlers[opt] = dequeue_hdl;
288 if (otd->active_engine_index_async == ~0)
289 {
290 otd->active_engine_index_async = engine_index;
291 cm->enqueue_handlers[opt] = enqueue_hdl;
292 cm->dequeue_handlers[opt] = dequeue_hdl;
293 }
294
295 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_async);
296 if (ae->priority < e->priority)
297 {
298 otd->active_engine_index_async = engine_index;
299 cm->enqueue_handlers[opt] = enqueue_hdl;
300 cm->dequeue_handlers[opt] = dequeue_hdl;
301 }
302
303 return;
304}
305
306void
Damjan Mariond1bed682019-04-24 15:20:35 +0200307vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
308 vnet_crypto_key_handler_t * key_handler)
309{
310 vnet_crypto_main_t *cm = &crypto_main;
311 vnet_crypto_engine_t *e = vec_elt_at_index (cm->engines, engine_index);
312 e->key_op_handler = key_handler;
313 return;
314}
315
Benoît Gannebe954442019-04-29 16:05:46 +0200316static int
317vnet_crypto_key_len_check (vnet_crypto_alg_t alg, u16 length)
318{
319 switch (alg)
320 {
321 case VNET_CRYPTO_N_ALGS:
322 return 0;
323 case VNET_CRYPTO_ALG_NONE:
324 return 1;
325
326#define _(n, s, l) \
327 case VNET_CRYPTO_ALG_##n: \
328 if ((l) == length) \
Neale Rannse6be7022019-06-04 15:37:34 +0000329 return 1; \
330 break;
Benoît Gannebe954442019-04-29 16:05:46 +0200331 foreach_crypto_cipher_alg foreach_crypto_aead_alg
332#undef _
333 /* HMAC allows any key length */
334#define _(n, s) \
335 case VNET_CRYPTO_ALG_HMAC_##n: \
336 return 1;
337 foreach_crypto_hmac_alg
338#undef _
339 }
340
341 return 0;
342}
343
Damjan Mariond1bed682019-04-24 15:20:35 +0200344u32
345vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg, u8 * data,
346 u16 length)
347{
348 u32 index;
349 vnet_crypto_main_t *cm = &crypto_main;
350 vnet_crypto_engine_t *engine;
351 vnet_crypto_key_t *key;
Benoît Gannebe954442019-04-29 16:05:46 +0200352
Benoît Gannebe954442019-04-29 16:05:46 +0200353 if (!vnet_crypto_key_len_check (alg, length))
354 return ~0;
355
Damjan Mariond1bed682019-04-24 15:20:35 +0200356 pool_get_zero (cm->keys, key);
357 index = key - cm->keys;
Fan Zhangf5395782020-04-29 14:00:03 +0100358 key->type = VNET_CRYPTO_KEY_TYPE_DATA;
Damjan Mariond1bed682019-04-24 15:20:35 +0200359 key->alg = alg;
360 vec_validate_aligned (key->data, length - 1, CLIB_CACHE_LINE_BYTES);
361 clib_memcpy (key->data, data, length);
Damjan Mariond1bed682019-04-24 15:20:35 +0200362 /* *INDENT-OFF* */
363 vec_foreach (engine, cm->engines)
364 if (engine->key_op_handler)
365 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
366 /* *INDENT-ON* */
367 return index;
368}
369
370void
371vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index)
372{
373 vnet_crypto_main_t *cm = &crypto_main;
374 vnet_crypto_engine_t *engine;
375 vnet_crypto_key_t *key = pool_elt_at_index (cm->keys, index);
376
377 /* *INDENT-OFF* */
378 vec_foreach (engine, cm->engines)
379 if (engine->key_op_handler)
380 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_DEL, index);
381 /* *INDENT-ON* */
382
Fan Zhangf5395782020-04-29 14:00:03 +0100383 if (key->type == VNET_CRYPTO_KEY_TYPE_DATA)
384 {
385 clib_memset (key->data, 0, vec_len (key->data));
386 vec_free (key->data);
387 }
388 else if (key->type == VNET_CRYPTO_KEY_TYPE_LINK)
389 {
390 key->index_crypto = key->index_integ = 0;
391 }
392
Damjan Mariond1bed682019-04-24 15:20:35 +0200393 pool_put (cm->keys, key);
394}
395
Fan Zhangf5395782020-04-29 14:00:03 +0100396vnet_crypto_async_alg_t
397vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
398 vnet_crypto_alg_t integ_alg)
399{
400#define _(c, h, s, k ,d) \
401 if (crypto_alg == VNET_CRYPTO_ALG_##c && \
402 integ_alg == VNET_CRYPTO_ALG_HMAC_##h) \
403 return VNET_CRYPTO_ALG_##c##_##h##_TAG##d;
404 foreach_crypto_link_async_alg
405#undef _
406 return ~0;
407}
408
409u32
410vnet_crypto_key_add_linked (vlib_main_t * vm,
411 vnet_crypto_key_index_t index_crypto,
412 vnet_crypto_key_index_t index_integ)
413{
414 u32 index;
415 vnet_crypto_main_t *cm = &crypto_main;
416 vnet_crypto_engine_t *engine;
417 vnet_crypto_key_t *key_crypto, *key_integ, *key;
418 vnet_crypto_async_alg_t linked_alg;
419
420 key_crypto = pool_elt_at_index (cm->keys, index_crypto);
421 key_integ = pool_elt_at_index (cm->keys, index_integ);
422
423 if (!key_crypto || !key_integ)
424 return ~0;
425
426 linked_alg = vnet_crypto_link_algs (key_crypto->alg, key_integ->alg);
427 if (linked_alg == ~0)
428 return ~0;
429
430 pool_get_zero (cm->keys, key);
431 index = key - cm->keys;
432 key->type = VNET_CRYPTO_KEY_TYPE_LINK;
433 key->index_crypto = index_crypto;
434 key->index_integ = index_integ;
435 key->async_alg = linked_alg;
436
437 /* *INDENT-OFF* */
438 vec_foreach (engine, cm->engines)
439 if (engine->key_op_handler)
440 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
441 /* *INDENT-ON* */
442
443 return index;
444}
445
446clib_error_t *
447crypto_dispatch_enable_disable (int is_enable)
448{
Fan Zhangf5395782020-04-29 14:00:03 +0100449 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleski22848172020-07-08 14:36:34 +0200450 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100451 u32 skip_master = vlib_num_workers () > 0, i;
PiotrX Kleski22848172020-07-08 14:36:34 +0200452 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
453 u8 state_change = 0;
Fan Zhangf5395782020-04-29 14:00:03 +0100454
PiotrX Kleski22848172020-07-08 14:36:34 +0200455 CLIB_MEMORY_STORE_BARRIER ();
Fan Zhangf5395782020-04-29 14:00:03 +0100456 if (is_enable && cm->async_refcnt > 0)
457 {
458 state_change = 1;
PiotrX Kleski22848172020-07-08 14:36:34 +0200459 state =
460 cm->dispatch_mode ==
461 VNET_CRYPTO_ASYNC_DISPATCH_POLLING ? VLIB_NODE_STATE_POLLING :
462 VLIB_NODE_STATE_INTERRUPT;
Fan Zhangf5395782020-04-29 14:00:03 +0100463 }
464
465 if (!is_enable && cm->async_refcnt == 0)
466 {
467 state_change = 1;
468 state = VLIB_NODE_STATE_DISABLED;
469 }
470
471 if (state_change)
472 for (i = skip_master; i < tm->n_vlib_mains; i++)
PiotrX Kleski22848172020-07-08 14:36:34 +0200473 {
474 if (state !=
475 vlib_node_get_state (vlib_mains[i], cm->crypto_node_index))
476 vlib_node_set_state (vlib_mains[i], cm->crypto_node_index, state);
477 }
Fan Zhangf5395782020-04-29 14:00:03 +0100478 return 0;
479}
480
481static_always_inline void
482crypto_set_active_async_engine (vnet_crypto_async_op_data_t * od,
483 vnet_crypto_async_op_id_t id, u32 ei)
484{
485 vnet_crypto_main_t *cm = &crypto_main;
486 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
487
488 if (ce->enqueue_handlers[id] && ce->dequeue_handlers[id])
489 {
490 od->active_engine_index_async = ei;
491 cm->enqueue_handlers[id] = ce->enqueue_handlers[id];
492 cm->dequeue_handlers[id] = ce->dequeue_handlers[id];
493 }
494}
495
496int
497vnet_crypto_set_async_handler2 (char *alg_name, char *engine)
498{
499 uword *p;
500 vnet_crypto_main_t *cm = &crypto_main;
501 vnet_crypto_async_alg_data_t *ad;
502 int i;
503
504 p = hash_get_mem (cm->async_alg_index_by_name, alg_name);
505 if (!p)
506 return -1;
507
508 ad = vec_elt_at_index (cm->async_algs, p[0]);
509
510 p = hash_get_mem (cm->engine_index_by_name, engine);
511 if (!p)
512 return -1;
513
514 for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_TYPES; i++)
515 {
516 vnet_crypto_async_op_data_t *od;
517 vnet_crypto_async_op_id_t id = ad->op_by_type[i];
518 if (id == 0)
519 continue;
520
521 od = cm->async_opt_data + id;
522 crypto_set_active_async_engine (od, id, p[0]);
523 }
524
525 return 0;
526}
527
528u32
529vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name)
530{
531 vnet_crypto_main_t *cm = &crypto_main;
532 vnet_crypto_async_next_node_t *nn = 0;
533 vlib_node_t *cc, *pn;
534 uword index = vec_len (cm->next_nodes);
535
536 pn = vlib_get_node_by_name (vm, (u8 *) post_node_name);
537 if (!pn)
538 return ~0;
539
540 /* *INDENT-OFF* */
541 vec_foreach (cm->next_nodes, nn)
542 {
543 if (nn->node_idx == pn->index)
544 return nn->next_idx;
545 }
546 /* *INDENT-ON* */
547
548 vec_validate (cm->next_nodes, index);
549 nn = vec_elt_at_index (cm->next_nodes, index);
550
551 cc = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch");
552 nn->next_idx = vlib_node_add_named_next (vm, cc->index, post_node_name);
553 nn->node_idx = pn->index;
554
555 return nn->next_idx;
556}
557
558void
559vnet_crypto_request_async_mode (int is_enable)
560{
Fan Zhangf5395782020-04-29 14:00:03 +0100561 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleski22848172020-07-08 14:36:34 +0200562 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100563 u32 skip_master = vlib_num_workers () > 0, i;
PiotrX Kleski22848172020-07-08 14:36:34 +0200564 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
565 u8 state_change = 0;
Fan Zhangf5395782020-04-29 14:00:03 +0100566
PiotrX Kleski22848172020-07-08 14:36:34 +0200567 CLIB_MEMORY_STORE_BARRIER ();
Fan Zhangf5395782020-04-29 14:00:03 +0100568 if (is_enable && cm->async_refcnt == 0)
569 {
570 state_change = 1;
PiotrX Kleski22848172020-07-08 14:36:34 +0200571 state =
572 cm->dispatch_mode == VNET_CRYPTO_ASYNC_DISPATCH_POLLING ?
573 VLIB_NODE_STATE_POLLING : VLIB_NODE_STATE_INTERRUPT;
Fan Zhangf5395782020-04-29 14:00:03 +0100574 }
Fan Zhangf5395782020-04-29 14:00:03 +0100575 if (!is_enable && cm->async_refcnt == 1)
576 {
577 state_change = 1;
578 state = VLIB_NODE_STATE_DISABLED;
579 }
580
581 if (state_change)
582 for (i = skip_master; i < tm->n_vlib_mains; i++)
PiotrX Kleski22848172020-07-08 14:36:34 +0200583 {
584 if (state !=
585 vlib_node_get_state (vlib_mains[i], cm->crypto_node_index))
586 vlib_node_set_state (vlib_mains[i], cm->crypto_node_index, state);
587 }
Fan Zhangf5395782020-04-29 14:00:03 +0100588
589 if (is_enable)
590 cm->async_refcnt += 1;
591 else if (cm->async_refcnt > 0)
592 cm->async_refcnt -= 1;
593}
594
PiotrX Kleski22848172020-07-08 14:36:34 +0200595void
596vnet_crypto_set_async_dispatch_mode (u8 mode)
597{
598 vnet_crypto_main_t *cm = &crypto_main;
599 u32 skip_master = vlib_num_workers () > 0, i;
600 vlib_thread_main_t *tm = vlib_get_thread_main ();
601 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
602
603 CLIB_MEMORY_STORE_BARRIER ();
604 cm->dispatch_mode = mode;
605 if (mode == VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT)
606 {
607 state =
608 cm->async_refcnt == 0 ?
609 VLIB_NODE_STATE_DISABLED : VLIB_NODE_STATE_INTERRUPT;
610 }
611 else if (mode == VNET_CRYPTO_ASYNC_DISPATCH_POLLING)
612 {
613 state =
614 cm->async_refcnt == 0 ?
615 VLIB_NODE_STATE_DISABLED : VLIB_NODE_STATE_POLLING;
616 }
617
618 for (i = skip_master; i < tm->n_vlib_mains; i++)
619 {
620 if (state != vlib_node_get_state (vlib_mains[i], cm->crypto_node_index))
621 vlib_node_set_state (vlib_mains[i], cm->crypto_node_index, state);
622 }
PiotrX Kleski22848172020-07-08 14:36:34 +0200623}
624
Fan Zhangf5395782020-04-29 14:00:03 +0100625int
626vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t op)
627{
628 vnet_crypto_main_t *cm = &crypto_main;
629
630 return (op < vec_len (cm->enqueue_handlers) &&
631 NULL != cm->enqueue_handlers[op]);
632}
633
Damjan Marion060bfb92019-03-29 13:47:54 +0100634static void
635vnet_crypto_init_cipher_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t eid,
636 vnet_crypto_op_id_t did, char *name, u8 is_aead)
637{
638 vnet_crypto_op_type_t eopt, dopt;
639 vnet_crypto_main_t *cm = &crypto_main;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000640
Damjan Marion060bfb92019-03-29 13:47:54 +0100641 cm->algs[alg].name = name;
642 cm->opt_data[eid].alg = cm->opt_data[did].alg = alg;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000643 cm->opt_data[eid].active_engine_index_simple = ~0;
644 cm->opt_data[did].active_engine_index_simple = ~0;
645 cm->opt_data[eid].active_engine_index_chained = ~0;
646 cm->opt_data[did].active_engine_index_chained = ~0;
Damjan Marion060bfb92019-03-29 13:47:54 +0100647 if (is_aead)
648 {
649 eopt = VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT;
650 dopt = VNET_CRYPTO_OP_TYPE_AEAD_DECRYPT;
651 }
652 else
653 {
654 eopt = VNET_CRYPTO_OP_TYPE_ENCRYPT;
655 dopt = VNET_CRYPTO_OP_TYPE_DECRYPT;
656 }
657 cm->opt_data[eid].type = eopt;
658 cm->opt_data[did].type = dopt;
659 cm->algs[alg].op_by_type[eopt] = eid;
660 cm->algs[alg].op_by_type[dopt] = did;
661 hash_set_mem (cm->alg_index_by_name, name, alg);
662}
663
664static void
665vnet_crypto_init_hmac_data (vnet_crypto_alg_t alg,
666 vnet_crypto_op_id_t id, char *name)
667{
668 vnet_crypto_main_t *cm = &crypto_main;
669 cm->algs[alg].name = name;
670 cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HMAC] = id;
671 cm->opt_data[id].alg = alg;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000672 cm->opt_data[id].active_engine_index_simple = ~0;
673 cm->opt_data[id].active_engine_index_chained = ~0;
Damjan Marion060bfb92019-03-29 13:47:54 +0100674 cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HMAC;
675 hash_set_mem (cm->alg_index_by_name, name, alg);
676}
677
Fan Zhangf5395782020-04-29 14:00:03 +0100678static void
679vnet_crypto_init_async_data (vnet_crypto_async_alg_t alg,
680 vnet_crypto_async_op_id_t eid,
681 vnet_crypto_async_op_id_t did, char *name)
682{
683 vnet_crypto_main_t *cm = &crypto_main;
684
685 cm->async_algs[alg].name = name;
686 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT] = eid;
687 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT] = did;
688 cm->async_opt_data[eid].type = VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT;
689 cm->async_opt_data[eid].alg = alg;
690 cm->async_opt_data[eid].active_engine_index_async = ~0;
691 cm->async_opt_data[eid].active_engine_index_async = ~0;
692 cm->async_opt_data[did].type = VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT;
693 cm->async_opt_data[did].alg = alg;
694 cm->async_opt_data[did].active_engine_index_async = ~0;
695 cm->async_opt_data[did].active_engine_index_async = ~0;
696 hash_set_mem (cm->async_alg_index_by_name, name, alg);
697}
698
Damjan Marion91f17dc2019-03-18 18:59:25 +0100699clib_error_t *
700vnet_crypto_init (vlib_main_t * vm)
701{
702 vnet_crypto_main_t *cm = &crypto_main;
703 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100704 vnet_crypto_thread_t *ct = 0;
PiotrX Kleski22848172020-07-08 14:36:34 +0200705
706 cm->dispatch_mode = VNET_CRYPTO_ASYNC_DISPATCH_POLLING;
Filip Tehlar1469d542019-03-25 09:04:41 -0700707 cm->engine_index_by_name = hash_create_string ( /* size */ 0,
708 sizeof (uword));
Damjan Marion060bfb92019-03-29 13:47:54 +0100709 cm->alg_index_by_name = hash_create_string (0, sizeof (uword));
Fan Zhangf5395782020-04-29 14:00:03 +0100710 cm->async_alg_index_by_name = hash_create_string (0, sizeof (uword));
Damjan Marion91f17dc2019-03-18 18:59:25 +0100711 vec_validate_aligned (cm->threads, tm->n_vlib_mains, CLIB_CACHE_LINE_BYTES);
Fan Zhangf5395782020-04-29 14:00:03 +0100712 vec_foreach (ct, cm->threads)
713 pool_alloc_aligned (ct->frame_pool, 256, CLIB_CACHE_LINE_BYTES);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100714 vec_validate (cm->algs, VNET_CRYPTO_N_ALGS);
Fan Zhangf5395782020-04-29 14:00:03 +0100715 vec_validate (cm->async_algs, VNET_CRYPTO_N_ASYNC_ALGS);
716 clib_bitmap_validate (cm->async_active_ids, VNET_CRYPTO_ASYNC_OP_N_IDS - 1);
717
Benoît Gannebe954442019-04-29 16:05:46 +0200718#define _(n, s, l) \
Damjan Marion060bfb92019-03-29 13:47:54 +0100719 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
720 VNET_CRYPTO_OP_##n##_ENC, \
721 VNET_CRYPTO_OP_##n##_DEC, s, 0);
722 foreach_crypto_cipher_alg;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100723#undef _
Benoît Gannebe954442019-04-29 16:05:46 +0200724#define _(n, s, l) \
Damjan Marion060bfb92019-03-29 13:47:54 +0100725 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
726 VNET_CRYPTO_OP_##n##_ENC, \
727 VNET_CRYPTO_OP_##n##_DEC, s, 1);
728 foreach_crypto_aead_alg;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100729#undef _
Damjan Marion060bfb92019-03-29 13:47:54 +0100730#define _(n, s) \
731 vnet_crypto_init_hmac_data (VNET_CRYPTO_ALG_HMAC_##n, \
732 VNET_CRYPTO_OP_##n##_HMAC, "hmac-" s);
733 foreach_crypto_hmac_alg;
734#undef _
Fan Zhangf5395782020-04-29 14:00:03 +0100735#define _(n, s, k, t, a) \
736 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a, \
737 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
738 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
739 s);
740 foreach_crypto_aead_async_alg
741#undef _
742#define _(c, h, s, k ,d) \
743 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##c##_##h##_TAG##d, \
744 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
745 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
746 s);
747 foreach_crypto_link_async_alg
748#undef _
PiotrX Kleski22848172020-07-08 14:36:34 +0200749 cm->crypto_node_index =
750 vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch")->index;
751
752 return 0;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100753}
754
755VLIB_INIT_FUNCTION (vnet_crypto_init);
756
757/*
758 * fd.io coding-style-patch-verification: ON
759 *
760 * Local Variables:
761 * eval: (c-set-style "gnu")
762 * End:
763 */