blob: 9d8504781a57b03da1c7091c7a425e8843c1865f [file] [log] [blame]
Damjan Marion91f17dc2019-03-18 18:59:25 +01001/*
2 * Copyright (c) 2018 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include <stdbool.h>
17#include <vlib/vlib.h>
18#include <vnet/crypto/crypto.h>
19
20vnet_crypto_main_t crypto_main;
21
Filip Tehlarefcad1a2020-02-04 09:36:04 +000022static_always_inline void
23crypto_set_op_status (vnet_crypto_op_t * ops[], u32 n_ops, int status)
24{
25 while (n_ops--)
26 {
27 ops[0]->status = status;
28 ops++;
29 }
30}
31
Damjan Marion085637f2019-04-03 18:39:27 +020032static_always_inline u32
33vnet_crypto_process_ops_call_handler (vlib_main_t * vm,
34 vnet_crypto_main_t * cm,
Damjan Marion060bfb92019-03-29 13:47:54 +010035 vnet_crypto_op_id_t opt,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000036 vnet_crypto_op_t * ops[],
37 vnet_crypto_op_chunk_t * chunks,
38 u32 n_ops)
Damjan Marion085637f2019-04-03 18:39:27 +020039{
Filip Tehlarefcad1a2020-02-04 09:36:04 +000040 u32 rv = 0;
Damjan Marion085637f2019-04-03 18:39:27 +020041 if (n_ops == 0)
42 return 0;
43
Filip Tehlarefcad1a2020-02-04 09:36:04 +000044 if (chunks)
Damjan Marion085637f2019-04-03 18:39:27 +020045 {
Damjan Marion085637f2019-04-03 18:39:27 +020046
Filip Tehlarefcad1a2020-02-04 09:36:04 +000047 if (cm->chained_ops_handlers[opt] == 0)
48 crypto_set_op_status (ops, n_ops,
49 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
50 else
51 rv = (cm->chained_ops_handlers[opt]) (vm, ops, chunks, n_ops);
52 }
53 else
54 {
55 if (cm->ops_handlers[opt] == 0)
56 crypto_set_op_status (ops, n_ops,
57 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
58 else
59 rv = (cm->ops_handlers[opt]) (vm, ops, n_ops);
60 }
61 return rv;
Damjan Marion085637f2019-04-03 18:39:27 +020062}
63
Filip Tehlarefcad1a2020-02-04 09:36:04 +000064static_always_inline u32
65vnet_crypto_process_ops_inline (vlib_main_t * vm, vnet_crypto_op_t ops[],
66 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
Damjan Marion91f17dc2019-03-18 18:59:25 +010067{
68 vnet_crypto_main_t *cm = &crypto_main;
Damjan Marion085637f2019-04-03 18:39:27 +020069 const int op_q_size = VLIB_FRAME_SIZE;
70 vnet_crypto_op_t *op_queue[op_q_size];
Damjan Marion060bfb92019-03-29 13:47:54 +010071 vnet_crypto_op_id_t opt, current_op_type = ~0;
Damjan Marion085637f2019-04-03 18:39:27 +020072 u32 n_op_queue = 0;
Damjan Marion91f17dc2019-03-18 18:59:25 +010073 u32 rv = 0, i;
74
Damjan Marion085637f2019-04-03 18:39:27 +020075 ASSERT (n_ops >= 1);
76
Damjan Marion91f17dc2019-03-18 18:59:25 +010077 for (i = 0; i < n_ops; i++)
78 {
Damjan Marion085637f2019-04-03 18:39:27 +020079 opt = ops[i].op;
Damjan Marion91f17dc2019-03-18 18:59:25 +010080
Damjan Marion085637f2019-04-03 18:39:27 +020081 if (current_op_type != opt || n_op_queue >= op_q_size)
82 {
83 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000084 op_queue, chunks,
85 n_op_queue);
Damjan Marion085637f2019-04-03 18:39:27 +020086 n_op_queue = 0;
87 current_op_type = opt;
88 }
89
90 op_queue[n_op_queue++] = &ops[i];
Damjan Marion91f17dc2019-03-18 18:59:25 +010091 }
92
Damjan Marion085637f2019-04-03 18:39:27 +020093 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000094 op_queue, chunks, n_op_queue);
Damjan Marion91f17dc2019-03-18 18:59:25 +010095 return rv;
96}
97
98u32
Filip Tehlarefcad1a2020-02-04 09:36:04 +000099vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[], u32 n_ops)
100{
101 return vnet_crypto_process_ops_inline (vm, ops, 0, n_ops);
102}
103
104u32
105vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
106 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
107{
108 return vnet_crypto_process_ops_inline (vm, ops, chunks, n_ops);
109}
110
111u32
Damjan Marion91f17dc2019-03-18 18:59:25 +0100112vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
113 char *desc)
114{
115 vnet_crypto_main_t *cm = &crypto_main;
116 vnet_crypto_engine_t *p;
117
118 vec_add2 (cm->engines, p, 1);
119 p->name = name;
120 p->desc = desc;
121 p->priority = prio;
122
Filip Tehlar1469d542019-03-25 09:04:41 -0700123 hash_set_mem (cm->engine_index_by_name, p->name, p - cm->engines);
124
Damjan Marion91f17dc2019-03-18 18:59:25 +0100125 return p - cm->engines;
126}
127
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000128static_always_inline void
129crypto_set_active_engine (vnet_crypto_op_data_t * od,
130 vnet_crypto_op_id_t id, u32 ei,
131 crypto_op_class_type_t oct)
132{
133 vnet_crypto_main_t *cm = &crypto_main;
134 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
135
136 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_CHAINED)
137 {
138 if (ce->chained_ops_handlers[id])
139 {
140 od->active_engine_index_chained = ei;
141 cm->chained_ops_handlers[id] = ce->chained_ops_handlers[id];
142 }
143 }
144
145 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_SIMPLE)
146 {
147 if (ce->ops_handlers[id])
148 {
149 od->active_engine_index_simple = ei;
150 cm->ops_handlers[id] = ce->ops_handlers[id];
151 }
152 }
153}
154
Filip Tehlar1469d542019-03-25 09:04:41 -0700155int
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000156vnet_crypto_set_handler2 (char *alg_name, char *engine,
157 crypto_op_class_type_t oct)
Filip Tehlar1469d542019-03-25 09:04:41 -0700158{
159 uword *p;
160 vnet_crypto_main_t *cm = &crypto_main;
Damjan Marion060bfb92019-03-29 13:47:54 +0100161 vnet_crypto_alg_data_t *ad;
Damjan Marion060bfb92019-03-29 13:47:54 +0100162 int i;
Filip Tehlar1469d542019-03-25 09:04:41 -0700163
Damjan Marion060bfb92019-03-29 13:47:54 +0100164 p = hash_get_mem (cm->alg_index_by_name, alg_name);
Filip Tehlar1469d542019-03-25 09:04:41 -0700165 if (!p)
166 return -1;
167
Damjan Marion060bfb92019-03-29 13:47:54 +0100168 ad = vec_elt_at_index (cm->algs, p[0]);
Filip Tehlar1469d542019-03-25 09:04:41 -0700169
170 p = hash_get_mem (cm->engine_index_by_name, engine);
171 if (!p)
172 return -1;
173
Filip Tehlard26b8602020-02-25 09:53:26 +0000174 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
Damjan Marion060bfb92019-03-29 13:47:54 +0100175 {
176 vnet_crypto_op_data_t *od;
177 vnet_crypto_op_id_t id = ad->op_by_type[i];
178 if (id == 0)
179 continue;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000180
Lijian Zhangb15d7962019-09-27 16:25:35 +0800181 od = cm->opt_data + id;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000182 crypto_set_active_engine (od, id, p[0], oct);
Damjan Marion060bfb92019-03-29 13:47:54 +0100183 }
Filip Tehlar1469d542019-03-25 09:04:41 -0700184
185 return 0;
186}
187
Neale Rannsece2ae02019-06-21 12:44:11 +0000188int
189vnet_crypto_is_set_handler (vnet_crypto_alg_t alg)
190{
191 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleskib2525922021-01-11 08:59:31 +0000192 vnet_crypto_op_id_t opt = 0;
193 int i;
Neale Rannsece2ae02019-06-21 12:44:11 +0000194
PiotrX Kleskib2525922021-01-11 08:59:31 +0000195 if (alg > vec_len (cm->algs))
196 return 0;
197
198 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
199 if ((opt = cm->algs[alg].op_by_type[i]) != 0)
200 break;
201
202 return NULL != cm->ops_handlers[opt];
Neale Rannsece2ae02019-06-21 12:44:11 +0000203}
204
Damjan Mariond1bed682019-04-24 15:20:35 +0200205void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000206vnet_crypto_register_ops_handler_inline (vlib_main_t * vm, u32 engine_index,
207 vnet_crypto_op_id_t opt,
208 vnet_crypto_ops_handler_t * fn,
209 vnet_crypto_chained_ops_handler_t *
210 cfn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100211{
212 vnet_crypto_main_t *cm = &crypto_main;
213 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
Damjan Marion060bfb92019-03-29 13:47:54 +0100214 vnet_crypto_op_data_t *otd = cm->opt_data + opt;
215 vec_validate_aligned (cm->ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
Damjan Marion91f17dc2019-03-18 18:59:25 +0100216 CLIB_CACHE_LINE_BYTES);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000217 vec_validate_aligned (cm->chained_ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
218 CLIB_CACHE_LINE_BYTES);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100219
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000220 if (fn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100221 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000222 e->ops_handlers[opt] = fn;
223 if (otd->active_engine_index_simple == ~0)
224 {
225 otd->active_engine_index_simple = engine_index;
226 cm->ops_handlers[opt] = fn;
227 }
228
229 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_simple);
230 if (ae->priority < e->priority)
231 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_SIMPLE);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100232 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000233
234 if (cfn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100235 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000236 e->chained_ops_handlers[opt] = cfn;
237 if (otd->active_engine_index_chained == ~0)
238 {
239 otd->active_engine_index_chained = engine_index;
240 cm->chained_ops_handlers[opt] = cfn;
241 }
242
243 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_chained);
244 if (ae->priority < e->priority)
245 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_CHAINED);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100246 }
247
Damjan Mariond1bed682019-04-24 15:20:35 +0200248 return;
249}
250
251void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000252vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
253 vnet_crypto_op_id_t opt,
254 vnet_crypto_ops_handler_t * fn)
255{
256 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, 0);
257}
258
259void
260vnet_crypto_register_chained_ops_handler (vlib_main_t * vm, u32 engine_index,
261 vnet_crypto_op_id_t opt,
262 vnet_crypto_chained_ops_handler_t *
263 fn)
264{
265 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, 0, fn);
266}
267
268void
269vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
270 vnet_crypto_op_id_t opt,
271 vnet_crypto_ops_handler_t * fn,
272 vnet_crypto_chained_ops_handler_t * cfn)
273{
274 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, cfn);
275}
276
277void
Fan Zhangf5395782020-04-29 14:00:03 +0100278vnet_crypto_register_async_handler (vlib_main_t * vm, u32 engine_index,
279 vnet_crypto_async_op_id_t opt,
280 vnet_crypto_frame_enqueue_t * enqueue_hdl,
281 vnet_crypto_frame_dequeue_t * dequeue_hdl)
282{
283 vnet_crypto_main_t *cm = &crypto_main;
284 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
285 vnet_crypto_async_op_data_t *otd = cm->async_opt_data + opt;
286 vec_validate_aligned (cm->enqueue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS - 1,
287 CLIB_CACHE_LINE_BYTES);
288 vec_validate_aligned (cm->dequeue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS - 1,
289 CLIB_CACHE_LINE_BYTES);
290
291 /* both enqueue hdl and dequeue hdl should present */
292 if (!enqueue_hdl && !dequeue_hdl)
293 return;
294
295 e->enqueue_handlers[opt] = enqueue_hdl;
296 e->dequeue_handlers[opt] = dequeue_hdl;
297 if (otd->active_engine_index_async == ~0)
298 {
299 otd->active_engine_index_async = engine_index;
300 cm->enqueue_handlers[opt] = enqueue_hdl;
301 cm->dequeue_handlers[opt] = dequeue_hdl;
302 }
303
304 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_async);
305 if (ae->priority < e->priority)
306 {
307 otd->active_engine_index_async = engine_index;
308 cm->enqueue_handlers[opt] = enqueue_hdl;
309 cm->dequeue_handlers[opt] = dequeue_hdl;
310 }
311
312 return;
313}
314
315void
Damjan Mariond1bed682019-04-24 15:20:35 +0200316vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
317 vnet_crypto_key_handler_t * key_handler)
318{
319 vnet_crypto_main_t *cm = &crypto_main;
320 vnet_crypto_engine_t *e = vec_elt_at_index (cm->engines, engine_index);
321 e->key_op_handler = key_handler;
322 return;
323}
324
Benoît Gannebe954442019-04-29 16:05:46 +0200325static int
326vnet_crypto_key_len_check (vnet_crypto_alg_t alg, u16 length)
327{
328 switch (alg)
329 {
330 case VNET_CRYPTO_N_ALGS:
331 return 0;
332 case VNET_CRYPTO_ALG_NONE:
333 return 1;
334
335#define _(n, s, l) \
336 case VNET_CRYPTO_ALG_##n: \
337 if ((l) == length) \
Neale Rannse6be7022019-06-04 15:37:34 +0000338 return 1; \
339 break;
Benoît Gannebe954442019-04-29 16:05:46 +0200340 foreach_crypto_cipher_alg foreach_crypto_aead_alg
341#undef _
342 /* HMAC allows any key length */
343#define _(n, s) \
344 case VNET_CRYPTO_ALG_HMAC_##n: \
345 return 1;
346 foreach_crypto_hmac_alg
347#undef _
348 }
349
350 return 0;
351}
352
Damjan Mariond1bed682019-04-24 15:20:35 +0200353u32
354vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg, u8 * data,
355 u16 length)
356{
357 u32 index;
358 vnet_crypto_main_t *cm = &crypto_main;
359 vnet_crypto_engine_t *engine;
360 vnet_crypto_key_t *key;
Benoît Gannebe954442019-04-29 16:05:46 +0200361
Benoît Gannebe954442019-04-29 16:05:46 +0200362 if (!vnet_crypto_key_len_check (alg, length))
363 return ~0;
364
Damjan Mariond1bed682019-04-24 15:20:35 +0200365 pool_get_zero (cm->keys, key);
366 index = key - cm->keys;
Fan Zhangf5395782020-04-29 14:00:03 +0100367 key->type = VNET_CRYPTO_KEY_TYPE_DATA;
Damjan Mariond1bed682019-04-24 15:20:35 +0200368 key->alg = alg;
369 vec_validate_aligned (key->data, length - 1, CLIB_CACHE_LINE_BYTES);
370 clib_memcpy (key->data, data, length);
Damjan Mariond1bed682019-04-24 15:20:35 +0200371 /* *INDENT-OFF* */
372 vec_foreach (engine, cm->engines)
373 if (engine->key_op_handler)
374 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
375 /* *INDENT-ON* */
376 return index;
377}
378
379void
380vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index)
381{
382 vnet_crypto_main_t *cm = &crypto_main;
383 vnet_crypto_engine_t *engine;
384 vnet_crypto_key_t *key = pool_elt_at_index (cm->keys, index);
385
386 /* *INDENT-OFF* */
387 vec_foreach (engine, cm->engines)
388 if (engine->key_op_handler)
389 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_DEL, index);
390 /* *INDENT-ON* */
391
Fan Zhangf5395782020-04-29 14:00:03 +0100392 if (key->type == VNET_CRYPTO_KEY_TYPE_DATA)
393 {
394 clib_memset (key->data, 0, vec_len (key->data));
395 vec_free (key->data);
396 }
397 else if (key->type == VNET_CRYPTO_KEY_TYPE_LINK)
398 {
399 key->index_crypto = key->index_integ = 0;
400 }
401
Damjan Mariond1bed682019-04-24 15:20:35 +0200402 pool_put (cm->keys, key);
403}
404
Fan Zhangf5395782020-04-29 14:00:03 +0100405vnet_crypto_async_alg_t
406vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
407 vnet_crypto_alg_t integ_alg)
408{
409#define _(c, h, s, k ,d) \
410 if (crypto_alg == VNET_CRYPTO_ALG_##c && \
411 integ_alg == VNET_CRYPTO_ALG_HMAC_##h) \
412 return VNET_CRYPTO_ALG_##c##_##h##_TAG##d;
413 foreach_crypto_link_async_alg
414#undef _
415 return ~0;
416}
417
418u32
419vnet_crypto_key_add_linked (vlib_main_t * vm,
420 vnet_crypto_key_index_t index_crypto,
421 vnet_crypto_key_index_t index_integ)
422{
423 u32 index;
424 vnet_crypto_main_t *cm = &crypto_main;
425 vnet_crypto_engine_t *engine;
426 vnet_crypto_key_t *key_crypto, *key_integ, *key;
427 vnet_crypto_async_alg_t linked_alg;
428
429 key_crypto = pool_elt_at_index (cm->keys, index_crypto);
430 key_integ = pool_elt_at_index (cm->keys, index_integ);
431
Fan Zhangf5395782020-04-29 14:00:03 +0100432 linked_alg = vnet_crypto_link_algs (key_crypto->alg, key_integ->alg);
433 if (linked_alg == ~0)
434 return ~0;
435
436 pool_get_zero (cm->keys, key);
437 index = key - cm->keys;
438 key->type = VNET_CRYPTO_KEY_TYPE_LINK;
439 key->index_crypto = index_crypto;
440 key->index_integ = index_integ;
441 key->async_alg = linked_alg;
442
443 /* *INDENT-OFF* */
444 vec_foreach (engine, cm->engines)
445 if (engine->key_op_handler)
446 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
447 /* *INDENT-ON* */
448
449 return index;
450}
451
452clib_error_t *
453crypto_dispatch_enable_disable (int is_enable)
454{
Fan Zhangf5395782020-04-29 14:00:03 +0100455 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleski22848172020-07-08 14:36:34 +0200456 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100457 u32 skip_master = vlib_num_workers () > 0, i;
PiotrX Kleski22848172020-07-08 14:36:34 +0200458 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
459 u8 state_change = 0;
Fan Zhangf5395782020-04-29 14:00:03 +0100460
PiotrX Kleski22848172020-07-08 14:36:34 +0200461 CLIB_MEMORY_STORE_BARRIER ();
Fan Zhangf5395782020-04-29 14:00:03 +0100462 if (is_enable && cm->async_refcnt > 0)
463 {
464 state_change = 1;
PiotrX Kleski22848172020-07-08 14:36:34 +0200465 state =
466 cm->dispatch_mode ==
467 VNET_CRYPTO_ASYNC_DISPATCH_POLLING ? VLIB_NODE_STATE_POLLING :
468 VLIB_NODE_STATE_INTERRUPT;
Fan Zhangf5395782020-04-29 14:00:03 +0100469 }
470
471 if (!is_enable && cm->async_refcnt == 0)
472 {
473 state_change = 1;
474 state = VLIB_NODE_STATE_DISABLED;
475 }
476
477 if (state_change)
478 for (i = skip_master; i < tm->n_vlib_mains; i++)
PiotrX Kleski22848172020-07-08 14:36:34 +0200479 {
480 if (state !=
481 vlib_node_get_state (vlib_mains[i], cm->crypto_node_index))
482 vlib_node_set_state (vlib_mains[i], cm->crypto_node_index, state);
483 }
Fan Zhangf5395782020-04-29 14:00:03 +0100484 return 0;
485}
486
487static_always_inline void
488crypto_set_active_async_engine (vnet_crypto_async_op_data_t * od,
489 vnet_crypto_async_op_id_t id, u32 ei)
490{
491 vnet_crypto_main_t *cm = &crypto_main;
492 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
493
494 if (ce->enqueue_handlers[id] && ce->dequeue_handlers[id])
495 {
496 od->active_engine_index_async = ei;
497 cm->enqueue_handlers[id] = ce->enqueue_handlers[id];
498 cm->dequeue_handlers[id] = ce->dequeue_handlers[id];
499 }
500}
501
502int
503vnet_crypto_set_async_handler2 (char *alg_name, char *engine)
504{
505 uword *p;
506 vnet_crypto_main_t *cm = &crypto_main;
507 vnet_crypto_async_alg_data_t *ad;
508 int i;
509
510 p = hash_get_mem (cm->async_alg_index_by_name, alg_name);
511 if (!p)
512 return -1;
513
514 ad = vec_elt_at_index (cm->async_algs, p[0]);
515
516 p = hash_get_mem (cm->engine_index_by_name, engine);
517 if (!p)
518 return -1;
519
520 for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_TYPES; i++)
521 {
522 vnet_crypto_async_op_data_t *od;
523 vnet_crypto_async_op_id_t id = ad->op_by_type[i];
524 if (id == 0)
525 continue;
526
527 od = cm->async_opt_data + id;
528 crypto_set_active_async_engine (od, id, p[0]);
529 }
530
531 return 0;
532}
533
534u32
535vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name)
536{
537 vnet_crypto_main_t *cm = &crypto_main;
538 vnet_crypto_async_next_node_t *nn = 0;
539 vlib_node_t *cc, *pn;
540 uword index = vec_len (cm->next_nodes);
541
542 pn = vlib_get_node_by_name (vm, (u8 *) post_node_name);
543 if (!pn)
544 return ~0;
545
546 /* *INDENT-OFF* */
547 vec_foreach (cm->next_nodes, nn)
548 {
549 if (nn->node_idx == pn->index)
550 return nn->next_idx;
551 }
552 /* *INDENT-ON* */
553
554 vec_validate (cm->next_nodes, index);
555 nn = vec_elt_at_index (cm->next_nodes, index);
556
557 cc = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch");
558 nn->next_idx = vlib_node_add_named_next (vm, cc->index, post_node_name);
559 nn->node_idx = pn->index;
560
561 return nn->next_idx;
562}
563
564void
565vnet_crypto_request_async_mode (int is_enable)
566{
Fan Zhangf5395782020-04-29 14:00:03 +0100567 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleski22848172020-07-08 14:36:34 +0200568 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100569 u32 skip_master = vlib_num_workers () > 0, i;
PiotrX Kleski22848172020-07-08 14:36:34 +0200570 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
571 u8 state_change = 0;
Fan Zhangf5395782020-04-29 14:00:03 +0100572
PiotrX Kleski22848172020-07-08 14:36:34 +0200573 CLIB_MEMORY_STORE_BARRIER ();
Fan Zhangf5395782020-04-29 14:00:03 +0100574 if (is_enable && cm->async_refcnt == 0)
575 {
576 state_change = 1;
PiotrX Kleski22848172020-07-08 14:36:34 +0200577 state =
578 cm->dispatch_mode == VNET_CRYPTO_ASYNC_DISPATCH_POLLING ?
579 VLIB_NODE_STATE_POLLING : VLIB_NODE_STATE_INTERRUPT;
Fan Zhangf5395782020-04-29 14:00:03 +0100580 }
Fan Zhangf5395782020-04-29 14:00:03 +0100581 if (!is_enable && cm->async_refcnt == 1)
582 {
583 state_change = 1;
584 state = VLIB_NODE_STATE_DISABLED;
585 }
586
587 if (state_change)
588 for (i = skip_master; i < tm->n_vlib_mains; i++)
PiotrX Kleski22848172020-07-08 14:36:34 +0200589 {
590 if (state !=
591 vlib_node_get_state (vlib_mains[i], cm->crypto_node_index))
592 vlib_node_set_state (vlib_mains[i], cm->crypto_node_index, state);
593 }
Fan Zhangf5395782020-04-29 14:00:03 +0100594
595 if (is_enable)
596 cm->async_refcnt += 1;
597 else if (cm->async_refcnt > 0)
598 cm->async_refcnt -= 1;
599}
600
PiotrX Kleski22848172020-07-08 14:36:34 +0200601void
602vnet_crypto_set_async_dispatch_mode (u8 mode)
603{
604 vnet_crypto_main_t *cm = &crypto_main;
605 u32 skip_master = vlib_num_workers () > 0, i;
606 vlib_thread_main_t *tm = vlib_get_thread_main ();
607 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
608
609 CLIB_MEMORY_STORE_BARRIER ();
610 cm->dispatch_mode = mode;
611 if (mode == VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT)
612 {
613 state =
614 cm->async_refcnt == 0 ?
615 VLIB_NODE_STATE_DISABLED : VLIB_NODE_STATE_INTERRUPT;
616 }
617 else if (mode == VNET_CRYPTO_ASYNC_DISPATCH_POLLING)
618 {
619 state =
620 cm->async_refcnt == 0 ?
621 VLIB_NODE_STATE_DISABLED : VLIB_NODE_STATE_POLLING;
622 }
623
624 for (i = skip_master; i < tm->n_vlib_mains; i++)
625 {
626 if (state != vlib_node_get_state (vlib_mains[i], cm->crypto_node_index))
627 vlib_node_set_state (vlib_mains[i], cm->crypto_node_index, state);
628 }
PiotrX Kleski22848172020-07-08 14:36:34 +0200629}
630
Fan Zhangf5395782020-04-29 14:00:03 +0100631int
632vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t op)
633{
634 vnet_crypto_main_t *cm = &crypto_main;
635
636 return (op < vec_len (cm->enqueue_handlers) &&
637 NULL != cm->enqueue_handlers[op]);
638}
639
Damjan Marion060bfb92019-03-29 13:47:54 +0100640static void
641vnet_crypto_init_cipher_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t eid,
642 vnet_crypto_op_id_t did, char *name, u8 is_aead)
643{
644 vnet_crypto_op_type_t eopt, dopt;
645 vnet_crypto_main_t *cm = &crypto_main;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000646
Damjan Marion060bfb92019-03-29 13:47:54 +0100647 cm->algs[alg].name = name;
648 cm->opt_data[eid].alg = cm->opt_data[did].alg = alg;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000649 cm->opt_data[eid].active_engine_index_simple = ~0;
650 cm->opt_data[did].active_engine_index_simple = ~0;
651 cm->opt_data[eid].active_engine_index_chained = ~0;
652 cm->opt_data[did].active_engine_index_chained = ~0;
Damjan Marion060bfb92019-03-29 13:47:54 +0100653 if (is_aead)
654 {
655 eopt = VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT;
656 dopt = VNET_CRYPTO_OP_TYPE_AEAD_DECRYPT;
657 }
658 else
659 {
660 eopt = VNET_CRYPTO_OP_TYPE_ENCRYPT;
661 dopt = VNET_CRYPTO_OP_TYPE_DECRYPT;
662 }
663 cm->opt_data[eid].type = eopt;
664 cm->opt_data[did].type = dopt;
665 cm->algs[alg].op_by_type[eopt] = eid;
666 cm->algs[alg].op_by_type[dopt] = did;
667 hash_set_mem (cm->alg_index_by_name, name, alg);
668}
669
670static void
671vnet_crypto_init_hmac_data (vnet_crypto_alg_t alg,
672 vnet_crypto_op_id_t id, char *name)
673{
674 vnet_crypto_main_t *cm = &crypto_main;
675 cm->algs[alg].name = name;
676 cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HMAC] = id;
677 cm->opt_data[id].alg = alg;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000678 cm->opt_data[id].active_engine_index_simple = ~0;
679 cm->opt_data[id].active_engine_index_chained = ~0;
Damjan Marion060bfb92019-03-29 13:47:54 +0100680 cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HMAC;
681 hash_set_mem (cm->alg_index_by_name, name, alg);
682}
683
Fan Zhangf5395782020-04-29 14:00:03 +0100684static void
685vnet_crypto_init_async_data (vnet_crypto_async_alg_t alg,
686 vnet_crypto_async_op_id_t eid,
687 vnet_crypto_async_op_id_t did, char *name)
688{
689 vnet_crypto_main_t *cm = &crypto_main;
690
691 cm->async_algs[alg].name = name;
692 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT] = eid;
693 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT] = did;
694 cm->async_opt_data[eid].type = VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT;
695 cm->async_opt_data[eid].alg = alg;
696 cm->async_opt_data[eid].active_engine_index_async = ~0;
697 cm->async_opt_data[eid].active_engine_index_async = ~0;
698 cm->async_opt_data[did].type = VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT;
699 cm->async_opt_data[did].alg = alg;
700 cm->async_opt_data[did].active_engine_index_async = ~0;
701 cm->async_opt_data[did].active_engine_index_async = ~0;
702 hash_set_mem (cm->async_alg_index_by_name, name, alg);
703}
704
Damjan Marion91f17dc2019-03-18 18:59:25 +0100705clib_error_t *
706vnet_crypto_init (vlib_main_t * vm)
707{
708 vnet_crypto_main_t *cm = &crypto_main;
709 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100710 vnet_crypto_thread_t *ct = 0;
PiotrX Kleski22848172020-07-08 14:36:34 +0200711
712 cm->dispatch_mode = VNET_CRYPTO_ASYNC_DISPATCH_POLLING;
Filip Tehlar1469d542019-03-25 09:04:41 -0700713 cm->engine_index_by_name = hash_create_string ( /* size */ 0,
714 sizeof (uword));
Damjan Marion060bfb92019-03-29 13:47:54 +0100715 cm->alg_index_by_name = hash_create_string (0, sizeof (uword));
Fan Zhangf5395782020-04-29 14:00:03 +0100716 cm->async_alg_index_by_name = hash_create_string (0, sizeof (uword));
Damjan Marion91f17dc2019-03-18 18:59:25 +0100717 vec_validate_aligned (cm->threads, tm->n_vlib_mains, CLIB_CACHE_LINE_BYTES);
Fan Zhangf5395782020-04-29 14:00:03 +0100718 vec_foreach (ct, cm->threads)
Fan Zhangef80ad62020-09-03 17:10:57 +0100719 pool_alloc_aligned (ct->frame_pool, 1024, CLIB_CACHE_LINE_BYTES);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100720 vec_validate (cm->algs, VNET_CRYPTO_N_ALGS);
Fan Zhangf5395782020-04-29 14:00:03 +0100721 vec_validate (cm->async_algs, VNET_CRYPTO_N_ASYNC_ALGS);
722 clib_bitmap_validate (cm->async_active_ids, VNET_CRYPTO_ASYNC_OP_N_IDS - 1);
723
Benoît Gannebe954442019-04-29 16:05:46 +0200724#define _(n, s, l) \
Damjan Marion060bfb92019-03-29 13:47:54 +0100725 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
726 VNET_CRYPTO_OP_##n##_ENC, \
727 VNET_CRYPTO_OP_##n##_DEC, s, 0);
728 foreach_crypto_cipher_alg;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100729#undef _
Benoît Gannebe954442019-04-29 16:05:46 +0200730#define _(n, s, l) \
Damjan Marion060bfb92019-03-29 13:47:54 +0100731 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
732 VNET_CRYPTO_OP_##n##_ENC, \
733 VNET_CRYPTO_OP_##n##_DEC, s, 1);
734 foreach_crypto_aead_alg;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100735#undef _
Damjan Marion060bfb92019-03-29 13:47:54 +0100736#define _(n, s) \
737 vnet_crypto_init_hmac_data (VNET_CRYPTO_ALG_HMAC_##n, \
738 VNET_CRYPTO_OP_##n##_HMAC, "hmac-" s);
739 foreach_crypto_hmac_alg;
740#undef _
Fan Zhangf5395782020-04-29 14:00:03 +0100741#define _(n, s, k, t, a) \
742 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a, \
743 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
744 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
745 s);
746 foreach_crypto_aead_async_alg
747#undef _
748#define _(c, h, s, k ,d) \
749 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##c##_##h##_TAG##d, \
750 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
751 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
752 s);
753 foreach_crypto_link_async_alg
754#undef _
PiotrX Kleski22848172020-07-08 14:36:34 +0200755 cm->crypto_node_index =
756 vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch")->index;
757
758 return 0;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100759}
760
761VLIB_INIT_FUNCTION (vnet_crypto_init);
762
763/*
764 * fd.io coding-style-patch-verification: ON
765 *
766 * Local Variables:
767 * eval: (c-set-style "gnu")
768 * End:
769 */