blob: e52e68ed610dd133e9003f31f15f3b86d00f64d6 [file] [log] [blame]
Damjan Marion91f17dc2019-03-18 18:59:25 +01001/*
2 * Copyright (c) 2018 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include <stdbool.h>
17#include <vlib/vlib.h>
18#include <vnet/crypto/crypto.h>
19
20vnet_crypto_main_t crypto_main;
21
Filip Tehlarefcad1a2020-02-04 09:36:04 +000022static_always_inline void
23crypto_set_op_status (vnet_crypto_op_t * ops[], u32 n_ops, int status)
24{
25 while (n_ops--)
26 {
27 ops[0]->status = status;
28 ops++;
29 }
30}
31
Damjan Marion085637f2019-04-03 18:39:27 +020032static_always_inline u32
33vnet_crypto_process_ops_call_handler (vlib_main_t * vm,
34 vnet_crypto_main_t * cm,
Damjan Marion060bfb92019-03-29 13:47:54 +010035 vnet_crypto_op_id_t opt,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000036 vnet_crypto_op_t * ops[],
37 vnet_crypto_op_chunk_t * chunks,
38 u32 n_ops)
Damjan Marion085637f2019-04-03 18:39:27 +020039{
Filip Tehlarefcad1a2020-02-04 09:36:04 +000040 u32 rv = 0;
Damjan Marion085637f2019-04-03 18:39:27 +020041 if (n_ops == 0)
42 return 0;
43
Filip Tehlarefcad1a2020-02-04 09:36:04 +000044 if (chunks)
Damjan Marion085637f2019-04-03 18:39:27 +020045 {
Damjan Marion085637f2019-04-03 18:39:27 +020046
Filip Tehlarefcad1a2020-02-04 09:36:04 +000047 if (cm->chained_ops_handlers[opt] == 0)
48 crypto_set_op_status (ops, n_ops,
49 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
50 else
51 rv = (cm->chained_ops_handlers[opt]) (vm, ops, chunks, n_ops);
52 }
53 else
54 {
55 if (cm->ops_handlers[opt] == 0)
56 crypto_set_op_status (ops, n_ops,
57 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
58 else
59 rv = (cm->ops_handlers[opt]) (vm, ops, n_ops);
60 }
61 return rv;
Damjan Marion085637f2019-04-03 18:39:27 +020062}
63
Filip Tehlarefcad1a2020-02-04 09:36:04 +000064static_always_inline u32
65vnet_crypto_process_ops_inline (vlib_main_t * vm, vnet_crypto_op_t ops[],
66 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
Damjan Marion91f17dc2019-03-18 18:59:25 +010067{
68 vnet_crypto_main_t *cm = &crypto_main;
Damjan Marion085637f2019-04-03 18:39:27 +020069 const int op_q_size = VLIB_FRAME_SIZE;
70 vnet_crypto_op_t *op_queue[op_q_size];
Damjan Marion060bfb92019-03-29 13:47:54 +010071 vnet_crypto_op_id_t opt, current_op_type = ~0;
Damjan Marion085637f2019-04-03 18:39:27 +020072 u32 n_op_queue = 0;
Damjan Marion91f17dc2019-03-18 18:59:25 +010073 u32 rv = 0, i;
74
Damjan Marion085637f2019-04-03 18:39:27 +020075 ASSERT (n_ops >= 1);
76
Damjan Marion91f17dc2019-03-18 18:59:25 +010077 for (i = 0; i < n_ops; i++)
78 {
Damjan Marion085637f2019-04-03 18:39:27 +020079 opt = ops[i].op;
Damjan Marion91f17dc2019-03-18 18:59:25 +010080
Damjan Marion085637f2019-04-03 18:39:27 +020081 if (current_op_type != opt || n_op_queue >= op_q_size)
82 {
83 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000084 op_queue, chunks,
85 n_op_queue);
Damjan Marion085637f2019-04-03 18:39:27 +020086 n_op_queue = 0;
87 current_op_type = opt;
88 }
89
90 op_queue[n_op_queue++] = &ops[i];
Damjan Marion91f17dc2019-03-18 18:59:25 +010091 }
92
Damjan Marion085637f2019-04-03 18:39:27 +020093 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000094 op_queue, chunks, n_op_queue);
Damjan Marion91f17dc2019-03-18 18:59:25 +010095 return rv;
96}
97
98u32
Filip Tehlarefcad1a2020-02-04 09:36:04 +000099vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[], u32 n_ops)
100{
101 return vnet_crypto_process_ops_inline (vm, ops, 0, n_ops);
102}
103
104u32
105vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
106 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
107{
108 return vnet_crypto_process_ops_inline (vm, ops, chunks, n_ops);
109}
110
111u32
Damjan Marion91f17dc2019-03-18 18:59:25 +0100112vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
113 char *desc)
114{
115 vnet_crypto_main_t *cm = &crypto_main;
116 vnet_crypto_engine_t *p;
117
118 vec_add2 (cm->engines, p, 1);
119 p->name = name;
120 p->desc = desc;
121 p->priority = prio;
122
Filip Tehlar1469d542019-03-25 09:04:41 -0700123 hash_set_mem (cm->engine_index_by_name, p->name, p - cm->engines);
124
Damjan Marion91f17dc2019-03-18 18:59:25 +0100125 return p - cm->engines;
126}
127
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000128static_always_inline void
129crypto_set_active_engine (vnet_crypto_op_data_t * od,
130 vnet_crypto_op_id_t id, u32 ei,
131 crypto_op_class_type_t oct)
132{
133 vnet_crypto_main_t *cm = &crypto_main;
134 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
135
136 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_CHAINED)
137 {
138 if (ce->chained_ops_handlers[id])
139 {
140 od->active_engine_index_chained = ei;
141 cm->chained_ops_handlers[id] = ce->chained_ops_handlers[id];
142 }
143 }
144
145 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_SIMPLE)
146 {
147 if (ce->ops_handlers[id])
148 {
149 od->active_engine_index_simple = ei;
150 cm->ops_handlers[id] = ce->ops_handlers[id];
151 }
152 }
153}
154
Filip Tehlar1469d542019-03-25 09:04:41 -0700155int
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000156vnet_crypto_set_handler2 (char *alg_name, char *engine,
157 crypto_op_class_type_t oct)
Filip Tehlar1469d542019-03-25 09:04:41 -0700158{
159 uword *p;
160 vnet_crypto_main_t *cm = &crypto_main;
Damjan Marion060bfb92019-03-29 13:47:54 +0100161 vnet_crypto_alg_data_t *ad;
Damjan Marion060bfb92019-03-29 13:47:54 +0100162 int i;
Filip Tehlar1469d542019-03-25 09:04:41 -0700163
Damjan Marion060bfb92019-03-29 13:47:54 +0100164 p = hash_get_mem (cm->alg_index_by_name, alg_name);
Filip Tehlar1469d542019-03-25 09:04:41 -0700165 if (!p)
166 return -1;
167
Damjan Marion060bfb92019-03-29 13:47:54 +0100168 ad = vec_elt_at_index (cm->algs, p[0]);
Filip Tehlar1469d542019-03-25 09:04:41 -0700169
170 p = hash_get_mem (cm->engine_index_by_name, engine);
171 if (!p)
172 return -1;
173
Filip Tehlard26b8602020-02-25 09:53:26 +0000174 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
Damjan Marion060bfb92019-03-29 13:47:54 +0100175 {
176 vnet_crypto_op_data_t *od;
177 vnet_crypto_op_id_t id = ad->op_by_type[i];
178 if (id == 0)
179 continue;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000180
Lijian Zhangb15d7962019-09-27 16:25:35 +0800181 od = cm->opt_data + id;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000182 crypto_set_active_engine (od, id, p[0], oct);
Damjan Marion060bfb92019-03-29 13:47:54 +0100183 }
Filip Tehlar1469d542019-03-25 09:04:41 -0700184
185 return 0;
186}
187
Neale Rannsece2ae02019-06-21 12:44:11 +0000188int
189vnet_crypto_is_set_handler (vnet_crypto_alg_t alg)
190{
191 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleskib2525922021-01-11 08:59:31 +0000192 vnet_crypto_op_id_t opt = 0;
193 int i;
Neale Rannsece2ae02019-06-21 12:44:11 +0000194
PiotrX Kleskib2525922021-01-11 08:59:31 +0000195 if (alg > vec_len (cm->algs))
196 return 0;
197
198 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
199 if ((opt = cm->algs[alg].op_by_type[i]) != 0)
200 break;
201
202 return NULL != cm->ops_handlers[opt];
Neale Rannsece2ae02019-06-21 12:44:11 +0000203}
204
Damjan Mariond1bed682019-04-24 15:20:35 +0200205void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000206vnet_crypto_register_ops_handler_inline (vlib_main_t * vm, u32 engine_index,
207 vnet_crypto_op_id_t opt,
208 vnet_crypto_ops_handler_t * fn,
209 vnet_crypto_chained_ops_handler_t *
210 cfn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100211{
212 vnet_crypto_main_t *cm = &crypto_main;
213 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
Damjan Marion060bfb92019-03-29 13:47:54 +0100214 vnet_crypto_op_data_t *otd = cm->opt_data + opt;
215 vec_validate_aligned (cm->ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
Damjan Marion91f17dc2019-03-18 18:59:25 +0100216 CLIB_CACHE_LINE_BYTES);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000217 vec_validate_aligned (cm->chained_ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
218 CLIB_CACHE_LINE_BYTES);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100219
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000220 if (fn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100221 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000222 e->ops_handlers[opt] = fn;
223 if (otd->active_engine_index_simple == ~0)
224 {
225 otd->active_engine_index_simple = engine_index;
226 cm->ops_handlers[opt] = fn;
227 }
228
229 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_simple);
230 if (ae->priority < e->priority)
231 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_SIMPLE);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100232 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000233
234 if (cfn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100235 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000236 e->chained_ops_handlers[opt] = cfn;
237 if (otd->active_engine_index_chained == ~0)
238 {
239 otd->active_engine_index_chained = engine_index;
240 cm->chained_ops_handlers[opt] = cfn;
241 }
242
243 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_chained);
244 if (ae->priority < e->priority)
245 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_CHAINED);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100246 }
247
Damjan Mariond1bed682019-04-24 15:20:35 +0200248 return;
249}
250
251void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000252vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
253 vnet_crypto_op_id_t opt,
254 vnet_crypto_ops_handler_t * fn)
255{
256 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, 0);
257}
258
259void
260vnet_crypto_register_chained_ops_handler (vlib_main_t * vm, u32 engine_index,
261 vnet_crypto_op_id_t opt,
262 vnet_crypto_chained_ops_handler_t *
263 fn)
264{
265 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, 0, fn);
266}
267
268void
269vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
270 vnet_crypto_op_id_t opt,
271 vnet_crypto_ops_handler_t * fn,
272 vnet_crypto_chained_ops_handler_t * cfn)
273{
274 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, cfn);
275}
276
277void
Fan Zhangf5395782020-04-29 14:00:03 +0100278vnet_crypto_register_async_handler (vlib_main_t * vm, u32 engine_index,
279 vnet_crypto_async_op_id_t opt,
280 vnet_crypto_frame_enqueue_t * enqueue_hdl,
281 vnet_crypto_frame_dequeue_t * dequeue_hdl)
282{
283 vnet_crypto_main_t *cm = &crypto_main;
284 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
285 vnet_crypto_async_op_data_t *otd = cm->async_opt_data + opt;
286 vec_validate_aligned (cm->enqueue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS - 1,
287 CLIB_CACHE_LINE_BYTES);
288 vec_validate_aligned (cm->dequeue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS - 1,
289 CLIB_CACHE_LINE_BYTES);
290
291 /* both enqueue hdl and dequeue hdl should present */
292 if (!enqueue_hdl && !dequeue_hdl)
293 return;
294
295 e->enqueue_handlers[opt] = enqueue_hdl;
296 e->dequeue_handlers[opt] = dequeue_hdl;
297 if (otd->active_engine_index_async == ~0)
298 {
299 otd->active_engine_index_async = engine_index;
300 cm->enqueue_handlers[opt] = enqueue_hdl;
301 cm->dequeue_handlers[opt] = dequeue_hdl;
302 }
303
304 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_async);
305 if (ae->priority < e->priority)
306 {
307 otd->active_engine_index_async = engine_index;
308 cm->enqueue_handlers[opt] = enqueue_hdl;
309 cm->dequeue_handlers[opt] = dequeue_hdl;
310 }
311
312 return;
313}
314
315void
Damjan Mariond1bed682019-04-24 15:20:35 +0200316vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
317 vnet_crypto_key_handler_t * key_handler)
318{
319 vnet_crypto_main_t *cm = &crypto_main;
320 vnet_crypto_engine_t *e = vec_elt_at_index (cm->engines, engine_index);
321 e->key_op_handler = key_handler;
322 return;
323}
324
Benoît Gannebe954442019-04-29 16:05:46 +0200325static int
326vnet_crypto_key_len_check (vnet_crypto_alg_t alg, u16 length)
327{
328 switch (alg)
329 {
330 case VNET_CRYPTO_N_ALGS:
331 return 0;
332 case VNET_CRYPTO_ALG_NONE:
333 return 1;
334
335#define _(n, s, l) \
336 case VNET_CRYPTO_ALG_##n: \
337 if ((l) == length) \
Neale Rannse6be7022019-06-04 15:37:34 +0000338 return 1; \
339 break;
Benoît Gannebe954442019-04-29 16:05:46 +0200340 foreach_crypto_cipher_alg foreach_crypto_aead_alg
341#undef _
342 /* HMAC allows any key length */
343#define _(n, s) \
344 case VNET_CRYPTO_ALG_HMAC_##n: \
345 return 1;
346 foreach_crypto_hmac_alg
347#undef _
348 }
349
350 return 0;
351}
352
Damjan Mariond1bed682019-04-24 15:20:35 +0200353u32
354vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg, u8 * data,
355 u16 length)
356{
357 u32 index;
358 vnet_crypto_main_t *cm = &crypto_main;
359 vnet_crypto_engine_t *engine;
360 vnet_crypto_key_t *key;
Benoît Gannebe954442019-04-29 16:05:46 +0200361
Benoît Gannebe954442019-04-29 16:05:46 +0200362 if (!vnet_crypto_key_len_check (alg, length))
363 return ~0;
364
Damjan Mariond1bed682019-04-24 15:20:35 +0200365 pool_get_zero (cm->keys, key);
366 index = key - cm->keys;
Fan Zhangf5395782020-04-29 14:00:03 +0100367 key->type = VNET_CRYPTO_KEY_TYPE_DATA;
Damjan Mariond1bed682019-04-24 15:20:35 +0200368 key->alg = alg;
369 vec_validate_aligned (key->data, length - 1, CLIB_CACHE_LINE_BYTES);
370 clib_memcpy (key->data, data, length);
Damjan Mariond1bed682019-04-24 15:20:35 +0200371 /* *INDENT-OFF* */
372 vec_foreach (engine, cm->engines)
373 if (engine->key_op_handler)
374 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
375 /* *INDENT-ON* */
376 return index;
377}
378
379void
380vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index)
381{
382 vnet_crypto_main_t *cm = &crypto_main;
383 vnet_crypto_engine_t *engine;
384 vnet_crypto_key_t *key = pool_elt_at_index (cm->keys, index);
385
386 /* *INDENT-OFF* */
387 vec_foreach (engine, cm->engines)
388 if (engine->key_op_handler)
389 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_DEL, index);
390 /* *INDENT-ON* */
391
Fan Zhangf5395782020-04-29 14:00:03 +0100392 if (key->type == VNET_CRYPTO_KEY_TYPE_DATA)
393 {
394 clib_memset (key->data, 0, vec_len (key->data));
395 vec_free (key->data);
396 }
397 else if (key->type == VNET_CRYPTO_KEY_TYPE_LINK)
398 {
399 key->index_crypto = key->index_integ = 0;
400 }
401
Damjan Mariond1bed682019-04-24 15:20:35 +0200402 pool_put (cm->keys, key);
403}
404
Fan Zhangf5395782020-04-29 14:00:03 +0100405vnet_crypto_async_alg_t
406vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
407 vnet_crypto_alg_t integ_alg)
408{
409#define _(c, h, s, k ,d) \
410 if (crypto_alg == VNET_CRYPTO_ALG_##c && \
411 integ_alg == VNET_CRYPTO_ALG_HMAC_##h) \
412 return VNET_CRYPTO_ALG_##c##_##h##_TAG##d;
413 foreach_crypto_link_async_alg
414#undef _
415 return ~0;
416}
417
418u32
419vnet_crypto_key_add_linked (vlib_main_t * vm,
420 vnet_crypto_key_index_t index_crypto,
421 vnet_crypto_key_index_t index_integ)
422{
423 u32 index;
424 vnet_crypto_main_t *cm = &crypto_main;
425 vnet_crypto_engine_t *engine;
426 vnet_crypto_key_t *key_crypto, *key_integ, *key;
427 vnet_crypto_async_alg_t linked_alg;
428
429 key_crypto = pool_elt_at_index (cm->keys, index_crypto);
430 key_integ = pool_elt_at_index (cm->keys, index_integ);
431
432 if (!key_crypto || !key_integ)
433 return ~0;
434
435 linked_alg = vnet_crypto_link_algs (key_crypto->alg, key_integ->alg);
436 if (linked_alg == ~0)
437 return ~0;
438
439 pool_get_zero (cm->keys, key);
440 index = key - cm->keys;
441 key->type = VNET_CRYPTO_KEY_TYPE_LINK;
442 key->index_crypto = index_crypto;
443 key->index_integ = index_integ;
444 key->async_alg = linked_alg;
445
446 /* *INDENT-OFF* */
447 vec_foreach (engine, cm->engines)
448 if (engine->key_op_handler)
449 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
450 /* *INDENT-ON* */
451
452 return index;
453}
454
455clib_error_t *
456crypto_dispatch_enable_disable (int is_enable)
457{
Fan Zhangf5395782020-04-29 14:00:03 +0100458 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleski22848172020-07-08 14:36:34 +0200459 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100460 u32 skip_master = vlib_num_workers () > 0, i;
PiotrX Kleski22848172020-07-08 14:36:34 +0200461 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
462 u8 state_change = 0;
Fan Zhangf5395782020-04-29 14:00:03 +0100463
PiotrX Kleski22848172020-07-08 14:36:34 +0200464 CLIB_MEMORY_STORE_BARRIER ();
Fan Zhangf5395782020-04-29 14:00:03 +0100465 if (is_enable && cm->async_refcnt > 0)
466 {
467 state_change = 1;
PiotrX Kleski22848172020-07-08 14:36:34 +0200468 state =
469 cm->dispatch_mode ==
470 VNET_CRYPTO_ASYNC_DISPATCH_POLLING ? VLIB_NODE_STATE_POLLING :
471 VLIB_NODE_STATE_INTERRUPT;
Fan Zhangf5395782020-04-29 14:00:03 +0100472 }
473
474 if (!is_enable && cm->async_refcnt == 0)
475 {
476 state_change = 1;
477 state = VLIB_NODE_STATE_DISABLED;
478 }
479
480 if (state_change)
481 for (i = skip_master; i < tm->n_vlib_mains; i++)
PiotrX Kleski22848172020-07-08 14:36:34 +0200482 {
483 if (state !=
484 vlib_node_get_state (vlib_mains[i], cm->crypto_node_index))
485 vlib_node_set_state (vlib_mains[i], cm->crypto_node_index, state);
486 }
Fan Zhangf5395782020-04-29 14:00:03 +0100487 return 0;
488}
489
490static_always_inline void
491crypto_set_active_async_engine (vnet_crypto_async_op_data_t * od,
492 vnet_crypto_async_op_id_t id, u32 ei)
493{
494 vnet_crypto_main_t *cm = &crypto_main;
495 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
496
497 if (ce->enqueue_handlers[id] && ce->dequeue_handlers[id])
498 {
499 od->active_engine_index_async = ei;
500 cm->enqueue_handlers[id] = ce->enqueue_handlers[id];
501 cm->dequeue_handlers[id] = ce->dequeue_handlers[id];
502 }
503}
504
505int
506vnet_crypto_set_async_handler2 (char *alg_name, char *engine)
507{
508 uword *p;
509 vnet_crypto_main_t *cm = &crypto_main;
510 vnet_crypto_async_alg_data_t *ad;
511 int i;
512
513 p = hash_get_mem (cm->async_alg_index_by_name, alg_name);
514 if (!p)
515 return -1;
516
517 ad = vec_elt_at_index (cm->async_algs, p[0]);
518
519 p = hash_get_mem (cm->engine_index_by_name, engine);
520 if (!p)
521 return -1;
522
523 for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_TYPES; i++)
524 {
525 vnet_crypto_async_op_data_t *od;
526 vnet_crypto_async_op_id_t id = ad->op_by_type[i];
527 if (id == 0)
528 continue;
529
530 od = cm->async_opt_data + id;
531 crypto_set_active_async_engine (od, id, p[0]);
532 }
533
534 return 0;
535}
536
537u32
538vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name)
539{
540 vnet_crypto_main_t *cm = &crypto_main;
541 vnet_crypto_async_next_node_t *nn = 0;
542 vlib_node_t *cc, *pn;
543 uword index = vec_len (cm->next_nodes);
544
545 pn = vlib_get_node_by_name (vm, (u8 *) post_node_name);
546 if (!pn)
547 return ~0;
548
549 /* *INDENT-OFF* */
550 vec_foreach (cm->next_nodes, nn)
551 {
552 if (nn->node_idx == pn->index)
553 return nn->next_idx;
554 }
555 /* *INDENT-ON* */
556
557 vec_validate (cm->next_nodes, index);
558 nn = vec_elt_at_index (cm->next_nodes, index);
559
560 cc = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch");
561 nn->next_idx = vlib_node_add_named_next (vm, cc->index, post_node_name);
562 nn->node_idx = pn->index;
563
564 return nn->next_idx;
565}
566
567void
568vnet_crypto_request_async_mode (int is_enable)
569{
Fan Zhangf5395782020-04-29 14:00:03 +0100570 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleski22848172020-07-08 14:36:34 +0200571 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100572 u32 skip_master = vlib_num_workers () > 0, i;
PiotrX Kleski22848172020-07-08 14:36:34 +0200573 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
574 u8 state_change = 0;
Fan Zhangf5395782020-04-29 14:00:03 +0100575
PiotrX Kleski22848172020-07-08 14:36:34 +0200576 CLIB_MEMORY_STORE_BARRIER ();
Fan Zhangf5395782020-04-29 14:00:03 +0100577 if (is_enable && cm->async_refcnt == 0)
578 {
579 state_change = 1;
PiotrX Kleski22848172020-07-08 14:36:34 +0200580 state =
581 cm->dispatch_mode == VNET_CRYPTO_ASYNC_DISPATCH_POLLING ?
582 VLIB_NODE_STATE_POLLING : VLIB_NODE_STATE_INTERRUPT;
Fan Zhangf5395782020-04-29 14:00:03 +0100583 }
Fan Zhangf5395782020-04-29 14:00:03 +0100584 if (!is_enable && cm->async_refcnt == 1)
585 {
586 state_change = 1;
587 state = VLIB_NODE_STATE_DISABLED;
588 }
589
590 if (state_change)
591 for (i = skip_master; i < tm->n_vlib_mains; i++)
PiotrX Kleski22848172020-07-08 14:36:34 +0200592 {
593 if (state !=
594 vlib_node_get_state (vlib_mains[i], cm->crypto_node_index))
595 vlib_node_set_state (vlib_mains[i], cm->crypto_node_index, state);
596 }
Fan Zhangf5395782020-04-29 14:00:03 +0100597
598 if (is_enable)
599 cm->async_refcnt += 1;
600 else if (cm->async_refcnt > 0)
601 cm->async_refcnt -= 1;
602}
603
PiotrX Kleski22848172020-07-08 14:36:34 +0200604void
605vnet_crypto_set_async_dispatch_mode (u8 mode)
606{
607 vnet_crypto_main_t *cm = &crypto_main;
608 u32 skip_master = vlib_num_workers () > 0, i;
609 vlib_thread_main_t *tm = vlib_get_thread_main ();
610 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
611
612 CLIB_MEMORY_STORE_BARRIER ();
613 cm->dispatch_mode = mode;
614 if (mode == VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT)
615 {
616 state =
617 cm->async_refcnt == 0 ?
618 VLIB_NODE_STATE_DISABLED : VLIB_NODE_STATE_INTERRUPT;
619 }
620 else if (mode == VNET_CRYPTO_ASYNC_DISPATCH_POLLING)
621 {
622 state =
623 cm->async_refcnt == 0 ?
624 VLIB_NODE_STATE_DISABLED : VLIB_NODE_STATE_POLLING;
625 }
626
627 for (i = skip_master; i < tm->n_vlib_mains; i++)
628 {
629 if (state != vlib_node_get_state (vlib_mains[i], cm->crypto_node_index))
630 vlib_node_set_state (vlib_mains[i], cm->crypto_node_index, state);
631 }
PiotrX Kleski22848172020-07-08 14:36:34 +0200632}
633
Fan Zhangf5395782020-04-29 14:00:03 +0100634int
635vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t op)
636{
637 vnet_crypto_main_t *cm = &crypto_main;
638
639 return (op < vec_len (cm->enqueue_handlers) &&
640 NULL != cm->enqueue_handlers[op]);
641}
642
Damjan Marion060bfb92019-03-29 13:47:54 +0100643static void
644vnet_crypto_init_cipher_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t eid,
645 vnet_crypto_op_id_t did, char *name, u8 is_aead)
646{
647 vnet_crypto_op_type_t eopt, dopt;
648 vnet_crypto_main_t *cm = &crypto_main;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000649
Damjan Marion060bfb92019-03-29 13:47:54 +0100650 cm->algs[alg].name = name;
651 cm->opt_data[eid].alg = cm->opt_data[did].alg = alg;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000652 cm->opt_data[eid].active_engine_index_simple = ~0;
653 cm->opt_data[did].active_engine_index_simple = ~0;
654 cm->opt_data[eid].active_engine_index_chained = ~0;
655 cm->opt_data[did].active_engine_index_chained = ~0;
Damjan Marion060bfb92019-03-29 13:47:54 +0100656 if (is_aead)
657 {
658 eopt = VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT;
659 dopt = VNET_CRYPTO_OP_TYPE_AEAD_DECRYPT;
660 }
661 else
662 {
663 eopt = VNET_CRYPTO_OP_TYPE_ENCRYPT;
664 dopt = VNET_CRYPTO_OP_TYPE_DECRYPT;
665 }
666 cm->opt_data[eid].type = eopt;
667 cm->opt_data[did].type = dopt;
668 cm->algs[alg].op_by_type[eopt] = eid;
669 cm->algs[alg].op_by_type[dopt] = did;
670 hash_set_mem (cm->alg_index_by_name, name, alg);
671}
672
673static void
674vnet_crypto_init_hmac_data (vnet_crypto_alg_t alg,
675 vnet_crypto_op_id_t id, char *name)
676{
677 vnet_crypto_main_t *cm = &crypto_main;
678 cm->algs[alg].name = name;
679 cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HMAC] = id;
680 cm->opt_data[id].alg = alg;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000681 cm->opt_data[id].active_engine_index_simple = ~0;
682 cm->opt_data[id].active_engine_index_chained = ~0;
Damjan Marion060bfb92019-03-29 13:47:54 +0100683 cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HMAC;
684 hash_set_mem (cm->alg_index_by_name, name, alg);
685}
686
Fan Zhangf5395782020-04-29 14:00:03 +0100687static void
688vnet_crypto_init_async_data (vnet_crypto_async_alg_t alg,
689 vnet_crypto_async_op_id_t eid,
690 vnet_crypto_async_op_id_t did, char *name)
691{
692 vnet_crypto_main_t *cm = &crypto_main;
693
694 cm->async_algs[alg].name = name;
695 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT] = eid;
696 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT] = did;
697 cm->async_opt_data[eid].type = VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT;
698 cm->async_opt_data[eid].alg = alg;
699 cm->async_opt_data[eid].active_engine_index_async = ~0;
700 cm->async_opt_data[eid].active_engine_index_async = ~0;
701 cm->async_opt_data[did].type = VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT;
702 cm->async_opt_data[did].alg = alg;
703 cm->async_opt_data[did].active_engine_index_async = ~0;
704 cm->async_opt_data[did].active_engine_index_async = ~0;
705 hash_set_mem (cm->async_alg_index_by_name, name, alg);
706}
707
Damjan Marion91f17dc2019-03-18 18:59:25 +0100708clib_error_t *
709vnet_crypto_init (vlib_main_t * vm)
710{
711 vnet_crypto_main_t *cm = &crypto_main;
712 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100713 vnet_crypto_thread_t *ct = 0;
PiotrX Kleski22848172020-07-08 14:36:34 +0200714
715 cm->dispatch_mode = VNET_CRYPTO_ASYNC_DISPATCH_POLLING;
Filip Tehlar1469d542019-03-25 09:04:41 -0700716 cm->engine_index_by_name = hash_create_string ( /* size */ 0,
717 sizeof (uword));
Damjan Marion060bfb92019-03-29 13:47:54 +0100718 cm->alg_index_by_name = hash_create_string (0, sizeof (uword));
Fan Zhangf5395782020-04-29 14:00:03 +0100719 cm->async_alg_index_by_name = hash_create_string (0, sizeof (uword));
Damjan Marion91f17dc2019-03-18 18:59:25 +0100720 vec_validate_aligned (cm->threads, tm->n_vlib_mains, CLIB_CACHE_LINE_BYTES);
Fan Zhangf5395782020-04-29 14:00:03 +0100721 vec_foreach (ct, cm->threads)
Fan Zhangef80ad62020-09-03 17:10:57 +0100722 pool_alloc_aligned (ct->frame_pool, 1024, CLIB_CACHE_LINE_BYTES);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100723 vec_validate (cm->algs, VNET_CRYPTO_N_ALGS);
Fan Zhangf5395782020-04-29 14:00:03 +0100724 vec_validate (cm->async_algs, VNET_CRYPTO_N_ASYNC_ALGS);
725 clib_bitmap_validate (cm->async_active_ids, VNET_CRYPTO_ASYNC_OP_N_IDS - 1);
726
Benoît Gannebe954442019-04-29 16:05:46 +0200727#define _(n, s, l) \
Damjan Marion060bfb92019-03-29 13:47:54 +0100728 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
729 VNET_CRYPTO_OP_##n##_ENC, \
730 VNET_CRYPTO_OP_##n##_DEC, s, 0);
731 foreach_crypto_cipher_alg;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100732#undef _
Benoît Gannebe954442019-04-29 16:05:46 +0200733#define _(n, s, l) \
Damjan Marion060bfb92019-03-29 13:47:54 +0100734 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
735 VNET_CRYPTO_OP_##n##_ENC, \
736 VNET_CRYPTO_OP_##n##_DEC, s, 1);
737 foreach_crypto_aead_alg;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100738#undef _
Damjan Marion060bfb92019-03-29 13:47:54 +0100739#define _(n, s) \
740 vnet_crypto_init_hmac_data (VNET_CRYPTO_ALG_HMAC_##n, \
741 VNET_CRYPTO_OP_##n##_HMAC, "hmac-" s);
742 foreach_crypto_hmac_alg;
743#undef _
Fan Zhangf5395782020-04-29 14:00:03 +0100744#define _(n, s, k, t, a) \
745 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a, \
746 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
747 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
748 s);
749 foreach_crypto_aead_async_alg
750#undef _
751#define _(c, h, s, k ,d) \
752 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##c##_##h##_TAG##d, \
753 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
754 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
755 s);
756 foreach_crypto_link_async_alg
757#undef _
PiotrX Kleski22848172020-07-08 14:36:34 +0200758 cm->crypto_node_index =
759 vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch")->index;
760
761 return 0;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100762}
763
764VLIB_INIT_FUNCTION (vnet_crypto_init);
765
766/*
767 * fd.io coding-style-patch-verification: ON
768 *
769 * Local Variables:
770 * eval: (c-set-style "gnu")
771 * End:
772 */