blob: 7903f88b7cb7e2d5328ada0a4ba0177d896d23ef [file] [log] [blame]
Damjan Marion91f17dc2019-03-18 18:59:25 +01001/*
2 * Copyright (c) 2018 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include <stdbool.h>
17#include <vlib/vlib.h>
18#include <vnet/crypto/crypto.h>
19
20vnet_crypto_main_t crypto_main;
21
Filip Tehlarefcad1a2020-02-04 09:36:04 +000022static_always_inline void
23crypto_set_op_status (vnet_crypto_op_t * ops[], u32 n_ops, int status)
24{
25 while (n_ops--)
26 {
27 ops[0]->status = status;
28 ops++;
29 }
30}
31
Damjan Marion085637f2019-04-03 18:39:27 +020032static_always_inline u32
33vnet_crypto_process_ops_call_handler (vlib_main_t * vm,
34 vnet_crypto_main_t * cm,
Damjan Marion060bfb92019-03-29 13:47:54 +010035 vnet_crypto_op_id_t opt,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000036 vnet_crypto_op_t * ops[],
37 vnet_crypto_op_chunk_t * chunks,
38 u32 n_ops)
Damjan Marion085637f2019-04-03 18:39:27 +020039{
Filip Tehlarefcad1a2020-02-04 09:36:04 +000040 u32 rv = 0;
Damjan Marion085637f2019-04-03 18:39:27 +020041 if (n_ops == 0)
42 return 0;
43
Filip Tehlarefcad1a2020-02-04 09:36:04 +000044 if (chunks)
Damjan Marion085637f2019-04-03 18:39:27 +020045 {
Damjan Marion085637f2019-04-03 18:39:27 +020046
Filip Tehlarefcad1a2020-02-04 09:36:04 +000047 if (cm->chained_ops_handlers[opt] == 0)
48 crypto_set_op_status (ops, n_ops,
49 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
50 else
51 rv = (cm->chained_ops_handlers[opt]) (vm, ops, chunks, n_ops);
52 }
53 else
54 {
55 if (cm->ops_handlers[opt] == 0)
56 crypto_set_op_status (ops, n_ops,
57 VNET_CRYPTO_OP_STATUS_FAIL_NO_HANDLER);
58 else
59 rv = (cm->ops_handlers[opt]) (vm, ops, n_ops);
60 }
61 return rv;
Damjan Marion085637f2019-04-03 18:39:27 +020062}
63
Filip Tehlarefcad1a2020-02-04 09:36:04 +000064static_always_inline u32
65vnet_crypto_process_ops_inline (vlib_main_t * vm, vnet_crypto_op_t ops[],
66 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
Damjan Marion91f17dc2019-03-18 18:59:25 +010067{
68 vnet_crypto_main_t *cm = &crypto_main;
Damjan Marion085637f2019-04-03 18:39:27 +020069 const int op_q_size = VLIB_FRAME_SIZE;
70 vnet_crypto_op_t *op_queue[op_q_size];
Damjan Marion060bfb92019-03-29 13:47:54 +010071 vnet_crypto_op_id_t opt, current_op_type = ~0;
Damjan Marion085637f2019-04-03 18:39:27 +020072 u32 n_op_queue = 0;
Damjan Marion91f17dc2019-03-18 18:59:25 +010073 u32 rv = 0, i;
74
Damjan Marion085637f2019-04-03 18:39:27 +020075 ASSERT (n_ops >= 1);
76
Damjan Marion91f17dc2019-03-18 18:59:25 +010077 for (i = 0; i < n_ops; i++)
78 {
Damjan Marion085637f2019-04-03 18:39:27 +020079 opt = ops[i].op;
Damjan Marion91f17dc2019-03-18 18:59:25 +010080
Damjan Marion085637f2019-04-03 18:39:27 +020081 if (current_op_type != opt || n_op_queue >= op_q_size)
82 {
83 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000084 op_queue, chunks,
85 n_op_queue);
Damjan Marion085637f2019-04-03 18:39:27 +020086 n_op_queue = 0;
87 current_op_type = opt;
88 }
89
90 op_queue[n_op_queue++] = &ops[i];
Damjan Marion91f17dc2019-03-18 18:59:25 +010091 }
92
Damjan Marion085637f2019-04-03 18:39:27 +020093 rv += vnet_crypto_process_ops_call_handler (vm, cm, current_op_type,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000094 op_queue, chunks, n_op_queue);
Damjan Marion91f17dc2019-03-18 18:59:25 +010095 return rv;
96}
97
98u32
Filip Tehlarefcad1a2020-02-04 09:36:04 +000099vnet_crypto_process_ops (vlib_main_t * vm, vnet_crypto_op_t ops[], u32 n_ops)
100{
101 return vnet_crypto_process_ops_inline (vm, ops, 0, n_ops);
102}
103
104u32
105vnet_crypto_process_chained_ops (vlib_main_t * vm, vnet_crypto_op_t ops[],
106 vnet_crypto_op_chunk_t * chunks, u32 n_ops)
107{
108 return vnet_crypto_process_ops_inline (vm, ops, chunks, n_ops);
109}
110
111u32
Damjan Marion91f17dc2019-03-18 18:59:25 +0100112vnet_crypto_register_engine (vlib_main_t * vm, char *name, int prio,
113 char *desc)
114{
115 vnet_crypto_main_t *cm = &crypto_main;
116 vnet_crypto_engine_t *p;
117
118 vec_add2 (cm->engines, p, 1);
119 p->name = name;
120 p->desc = desc;
121 p->priority = prio;
122
Filip Tehlar1469d542019-03-25 09:04:41 -0700123 hash_set_mem (cm->engine_index_by_name, p->name, p - cm->engines);
124
Damjan Marion91f17dc2019-03-18 18:59:25 +0100125 return p - cm->engines;
126}
127
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000128static_always_inline void
129crypto_set_active_engine (vnet_crypto_op_data_t * od,
130 vnet_crypto_op_id_t id, u32 ei,
131 crypto_op_class_type_t oct)
132{
133 vnet_crypto_main_t *cm = &crypto_main;
134 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
135
136 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_CHAINED)
137 {
138 if (ce->chained_ops_handlers[id])
139 {
140 od->active_engine_index_chained = ei;
141 cm->chained_ops_handlers[id] = ce->chained_ops_handlers[id];
142 }
143 }
144
145 if (oct == CRYPTO_OP_BOTH || oct == CRYPTO_OP_SIMPLE)
146 {
147 if (ce->ops_handlers[id])
148 {
149 od->active_engine_index_simple = ei;
150 cm->ops_handlers[id] = ce->ops_handlers[id];
151 }
152 }
153}
154
Filip Tehlar1469d542019-03-25 09:04:41 -0700155int
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000156vnet_crypto_set_handler2 (char *alg_name, char *engine,
157 crypto_op_class_type_t oct)
Filip Tehlar1469d542019-03-25 09:04:41 -0700158{
159 uword *p;
160 vnet_crypto_main_t *cm = &crypto_main;
Damjan Marion060bfb92019-03-29 13:47:54 +0100161 vnet_crypto_alg_data_t *ad;
Damjan Marion060bfb92019-03-29 13:47:54 +0100162 int i;
Filip Tehlar1469d542019-03-25 09:04:41 -0700163
Damjan Marion060bfb92019-03-29 13:47:54 +0100164 p = hash_get_mem (cm->alg_index_by_name, alg_name);
Filip Tehlar1469d542019-03-25 09:04:41 -0700165 if (!p)
166 return -1;
167
Damjan Marion060bfb92019-03-29 13:47:54 +0100168 ad = vec_elt_at_index (cm->algs, p[0]);
Filip Tehlar1469d542019-03-25 09:04:41 -0700169
170 p = hash_get_mem (cm->engine_index_by_name, engine);
171 if (!p)
172 return -1;
173
Filip Tehlard26b8602020-02-25 09:53:26 +0000174 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
Damjan Marion060bfb92019-03-29 13:47:54 +0100175 {
176 vnet_crypto_op_data_t *od;
177 vnet_crypto_op_id_t id = ad->op_by_type[i];
178 if (id == 0)
179 continue;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000180
Lijian Zhangb15d7962019-09-27 16:25:35 +0800181 od = cm->opt_data + id;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000182 crypto_set_active_engine (od, id, p[0], oct);
Damjan Marion060bfb92019-03-29 13:47:54 +0100183 }
Filip Tehlar1469d542019-03-25 09:04:41 -0700184
185 return 0;
186}
187
Neale Rannsece2ae02019-06-21 12:44:11 +0000188int
189vnet_crypto_is_set_handler (vnet_crypto_alg_t alg)
190{
191 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleskib2525922021-01-11 08:59:31 +0000192 vnet_crypto_op_id_t opt = 0;
193 int i;
Neale Rannsece2ae02019-06-21 12:44:11 +0000194
PiotrX Kleskib2525922021-01-11 08:59:31 +0000195 if (alg > vec_len (cm->algs))
196 return 0;
197
198 for (i = 0; i < VNET_CRYPTO_OP_N_TYPES; i++)
199 if ((opt = cm->algs[alg].op_by_type[i]) != 0)
200 break;
201
202 return NULL != cm->ops_handlers[opt];
Neale Rannsece2ae02019-06-21 12:44:11 +0000203}
204
Damjan Mariond1bed682019-04-24 15:20:35 +0200205void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000206vnet_crypto_register_ops_handler_inline (vlib_main_t * vm, u32 engine_index,
207 vnet_crypto_op_id_t opt,
208 vnet_crypto_ops_handler_t * fn,
209 vnet_crypto_chained_ops_handler_t *
210 cfn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100211{
212 vnet_crypto_main_t *cm = &crypto_main;
213 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
Damjan Marion060bfb92019-03-29 13:47:54 +0100214 vnet_crypto_op_data_t *otd = cm->opt_data + opt;
215 vec_validate_aligned (cm->ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
Damjan Marion91f17dc2019-03-18 18:59:25 +0100216 CLIB_CACHE_LINE_BYTES);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000217 vec_validate_aligned (cm->chained_ops_handlers, VNET_CRYPTO_N_OP_IDS - 1,
218 CLIB_CACHE_LINE_BYTES);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100219
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000220 if (fn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100221 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000222 e->ops_handlers[opt] = fn;
223 if (otd->active_engine_index_simple == ~0)
224 {
225 otd->active_engine_index_simple = engine_index;
226 cm->ops_handlers[opt] = fn;
227 }
228
229 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_simple);
230 if (ae->priority < e->priority)
231 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_SIMPLE);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100232 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000233
234 if (cfn)
Damjan Marion91f17dc2019-03-18 18:59:25 +0100235 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000236 e->chained_ops_handlers[opt] = cfn;
237 if (otd->active_engine_index_chained == ~0)
238 {
239 otd->active_engine_index_chained = engine_index;
240 cm->chained_ops_handlers[opt] = cfn;
241 }
242
243 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_chained);
244 if (ae->priority < e->priority)
245 crypto_set_active_engine (otd, opt, engine_index, CRYPTO_OP_CHAINED);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100246 }
247
Damjan Mariond1bed682019-04-24 15:20:35 +0200248 return;
249}
250
251void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000252vnet_crypto_register_ops_handler (vlib_main_t * vm, u32 engine_index,
253 vnet_crypto_op_id_t opt,
254 vnet_crypto_ops_handler_t * fn)
255{
256 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, 0);
257}
258
259void
260vnet_crypto_register_chained_ops_handler (vlib_main_t * vm, u32 engine_index,
261 vnet_crypto_op_id_t opt,
262 vnet_crypto_chained_ops_handler_t *
263 fn)
264{
265 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, 0, fn);
266}
267
268void
269vnet_crypto_register_ops_handlers (vlib_main_t * vm, u32 engine_index,
270 vnet_crypto_op_id_t opt,
271 vnet_crypto_ops_handler_t * fn,
272 vnet_crypto_chained_ops_handler_t * cfn)
273{
274 vnet_crypto_register_ops_handler_inline (vm, engine_index, opt, fn, cfn);
275}
276
277void
Fan Zhangf5395782020-04-29 14:00:03 +0100278vnet_crypto_register_async_handler (vlib_main_t * vm, u32 engine_index,
279 vnet_crypto_async_op_id_t opt,
280 vnet_crypto_frame_enqueue_t * enqueue_hdl,
281 vnet_crypto_frame_dequeue_t * dequeue_hdl)
282{
283 vnet_crypto_main_t *cm = &crypto_main;
284 vnet_crypto_engine_t *ae, *e = vec_elt_at_index (cm->engines, engine_index);
285 vnet_crypto_async_op_data_t *otd = cm->async_opt_data + opt;
Alexander Chernavin005d1e42021-02-01 05:17:24 -0500286 vec_validate_aligned (cm->enqueue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS,
Fan Zhangf5395782020-04-29 14:00:03 +0100287 CLIB_CACHE_LINE_BYTES);
Alexander Chernavin005d1e42021-02-01 05:17:24 -0500288 vec_validate_aligned (cm->dequeue_handlers, VNET_CRYPTO_ASYNC_OP_N_IDS,
Fan Zhangf5395782020-04-29 14:00:03 +0100289 CLIB_CACHE_LINE_BYTES);
290
291 /* both enqueue hdl and dequeue hdl should present */
292 if (!enqueue_hdl && !dequeue_hdl)
293 return;
294
295 e->enqueue_handlers[opt] = enqueue_hdl;
296 e->dequeue_handlers[opt] = dequeue_hdl;
297 if (otd->active_engine_index_async == ~0)
298 {
299 otd->active_engine_index_async = engine_index;
300 cm->enqueue_handlers[opt] = enqueue_hdl;
301 cm->dequeue_handlers[opt] = dequeue_hdl;
302 }
303
304 ae = vec_elt_at_index (cm->engines, otd->active_engine_index_async);
Fan Zhange4db9452021-03-30 17:31:38 +0100305 if (ae->priority <= e->priority)
Fan Zhangf5395782020-04-29 14:00:03 +0100306 {
307 otd->active_engine_index_async = engine_index;
308 cm->enqueue_handlers[opt] = enqueue_hdl;
309 cm->dequeue_handlers[opt] = dequeue_hdl;
310 }
311
312 return;
313}
314
315void
Damjan Mariond1bed682019-04-24 15:20:35 +0200316vnet_crypto_register_key_handler (vlib_main_t * vm, u32 engine_index,
317 vnet_crypto_key_handler_t * key_handler)
318{
319 vnet_crypto_main_t *cm = &crypto_main;
320 vnet_crypto_engine_t *e = vec_elt_at_index (cm->engines, engine_index);
321 e->key_op_handler = key_handler;
322 return;
323}
324
Benoît Gannebe954442019-04-29 16:05:46 +0200325static int
326vnet_crypto_key_len_check (vnet_crypto_alg_t alg, u16 length)
327{
328 switch (alg)
329 {
330 case VNET_CRYPTO_N_ALGS:
331 return 0;
332 case VNET_CRYPTO_ALG_NONE:
333 return 1;
334
335#define _(n, s, l) \
336 case VNET_CRYPTO_ALG_##n: \
337 if ((l) == length) \
Neale Rannse6be7022019-06-04 15:37:34 +0000338 return 1; \
339 break;
Benoît Gannebe954442019-04-29 16:05:46 +0200340 foreach_crypto_cipher_alg foreach_crypto_aead_alg
341#undef _
342 /* HMAC allows any key length */
343#define _(n, s) \
344 case VNET_CRYPTO_ALG_HMAC_##n: \
345 return 1;
Filip Tehlar06111a82021-05-03 15:29:56 +0000346 foreach_crypto_hmac_alg
347#undef _
348
349#define _(n, s) \
350 case VNET_CRYPTO_ALG_HASH_##n: \
351 return 1;
352 foreach_crypto_hash_alg
Benoît Gannebe954442019-04-29 16:05:46 +0200353#undef _
354 }
355
356 return 0;
357}
358
Damjan Mariond1bed682019-04-24 15:20:35 +0200359u32
360vnet_crypto_key_add (vlib_main_t * vm, vnet_crypto_alg_t alg, u8 * data,
361 u16 length)
362{
363 u32 index;
364 vnet_crypto_main_t *cm = &crypto_main;
365 vnet_crypto_engine_t *engine;
366 vnet_crypto_key_t *key;
Benoît Gannebe954442019-04-29 16:05:46 +0200367
Gabriel Oginskic12d48f2021-10-26 07:43:33 +0100368 u8 need_barrier_sync = 0;
369
Benoît Gannebe954442019-04-29 16:05:46 +0200370 if (!vnet_crypto_key_len_check (alg, length))
371 return ~0;
372
Gabriel Oginskic12d48f2021-10-26 07:43:33 +0100373 pool_get_aligned_will_expand (cm->keys, need_barrier_sync,
374 CLIB_CACHE_LINE_BYTES);
375 /* If the cm->keys will expand, stop the parade. */
376 if (need_barrier_sync)
377 vlib_worker_thread_barrier_sync (vm);
378
Damjan Mariond1bed682019-04-24 15:20:35 +0200379 pool_get_zero (cm->keys, key);
Gabriel Oginskic12d48f2021-10-26 07:43:33 +0100380
381 if (need_barrier_sync)
382 vlib_worker_thread_barrier_release (vm);
383
Damjan Mariond1bed682019-04-24 15:20:35 +0200384 index = key - cm->keys;
Fan Zhangf5395782020-04-29 14:00:03 +0100385 key->type = VNET_CRYPTO_KEY_TYPE_DATA;
Damjan Mariond1bed682019-04-24 15:20:35 +0200386 key->alg = alg;
387 vec_validate_aligned (key->data, length - 1, CLIB_CACHE_LINE_BYTES);
388 clib_memcpy (key->data, data, length);
Damjan Mariond1bed682019-04-24 15:20:35 +0200389 /* *INDENT-OFF* */
390 vec_foreach (engine, cm->engines)
391 if (engine->key_op_handler)
392 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
393 /* *INDENT-ON* */
394 return index;
395}
396
397void
398vnet_crypto_key_del (vlib_main_t * vm, vnet_crypto_key_index_t index)
399{
400 vnet_crypto_main_t *cm = &crypto_main;
401 vnet_crypto_engine_t *engine;
402 vnet_crypto_key_t *key = pool_elt_at_index (cm->keys, index);
403
404 /* *INDENT-OFF* */
405 vec_foreach (engine, cm->engines)
406 if (engine->key_op_handler)
407 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_DEL, index);
408 /* *INDENT-ON* */
409
Fan Zhangf5395782020-04-29 14:00:03 +0100410 if (key->type == VNET_CRYPTO_KEY_TYPE_DATA)
411 {
412 clib_memset (key->data, 0, vec_len (key->data));
413 vec_free (key->data);
414 }
415 else if (key->type == VNET_CRYPTO_KEY_TYPE_LINK)
416 {
417 key->index_crypto = key->index_integ = 0;
418 }
419
Damjan Mariond1bed682019-04-24 15:20:35 +0200420 pool_put (cm->keys, key);
421}
422
Fan Zhangf5395782020-04-29 14:00:03 +0100423vnet_crypto_async_alg_t
424vnet_crypto_link_algs (vnet_crypto_alg_t crypto_alg,
425 vnet_crypto_alg_t integ_alg)
426{
427#define _(c, h, s, k ,d) \
428 if (crypto_alg == VNET_CRYPTO_ALG_##c && \
429 integ_alg == VNET_CRYPTO_ALG_HMAC_##h) \
430 return VNET_CRYPTO_ALG_##c##_##h##_TAG##d;
431 foreach_crypto_link_async_alg
432#undef _
433 return ~0;
434}
435
436u32
437vnet_crypto_key_add_linked (vlib_main_t * vm,
438 vnet_crypto_key_index_t index_crypto,
439 vnet_crypto_key_index_t index_integ)
440{
441 u32 index;
442 vnet_crypto_main_t *cm = &crypto_main;
443 vnet_crypto_engine_t *engine;
444 vnet_crypto_key_t *key_crypto, *key_integ, *key;
445 vnet_crypto_async_alg_t linked_alg;
446
447 key_crypto = pool_elt_at_index (cm->keys, index_crypto);
448 key_integ = pool_elt_at_index (cm->keys, index_integ);
449
Fan Zhangf5395782020-04-29 14:00:03 +0100450 linked_alg = vnet_crypto_link_algs (key_crypto->alg, key_integ->alg);
451 if (linked_alg == ~0)
452 return ~0;
453
454 pool_get_zero (cm->keys, key);
455 index = key - cm->keys;
456 key->type = VNET_CRYPTO_KEY_TYPE_LINK;
457 key->index_crypto = index_crypto;
458 key->index_integ = index_integ;
459 key->async_alg = linked_alg;
460
461 /* *INDENT-OFF* */
462 vec_foreach (engine, cm->engines)
463 if (engine->key_op_handler)
464 engine->key_op_handler (vm, VNET_CRYPTO_KEY_OP_ADD, index);
465 /* *INDENT-ON* */
466
467 return index;
468}
469
470clib_error_t *
471crypto_dispatch_enable_disable (int is_enable)
472{
Fan Zhangf5395782020-04-29 14:00:03 +0100473 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleski22848172020-07-08 14:36:34 +0200474 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100475 u32 skip_master = vlib_num_workers () > 0, i;
PiotrX Kleski22848172020-07-08 14:36:34 +0200476 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
477 u8 state_change = 0;
Fan Zhangf5395782020-04-29 14:00:03 +0100478
PiotrX Kleski22848172020-07-08 14:36:34 +0200479 CLIB_MEMORY_STORE_BARRIER ();
Fan Zhangf5395782020-04-29 14:00:03 +0100480 if (is_enable && cm->async_refcnt > 0)
481 {
482 state_change = 1;
PiotrX Kleski22848172020-07-08 14:36:34 +0200483 state =
484 cm->dispatch_mode ==
485 VNET_CRYPTO_ASYNC_DISPATCH_POLLING ? VLIB_NODE_STATE_POLLING :
486 VLIB_NODE_STATE_INTERRUPT;
Fan Zhangf5395782020-04-29 14:00:03 +0100487 }
488
489 if (!is_enable && cm->async_refcnt == 0)
490 {
491 state_change = 1;
492 state = VLIB_NODE_STATE_DISABLED;
493 }
494
495 if (state_change)
496 for (i = skip_master; i < tm->n_vlib_mains; i++)
PiotrX Kleski22848172020-07-08 14:36:34 +0200497 {
Damjan Marion6ffb7c62021-03-26 13:06:13 +0100498 vlib_main_t *ovm = vlib_get_main_by_index (i);
499 if (state != vlib_node_get_state (ovm, cm->crypto_node_index))
500 vlib_node_set_state (ovm, cm->crypto_node_index, state);
PiotrX Kleski22848172020-07-08 14:36:34 +0200501 }
Fan Zhangf5395782020-04-29 14:00:03 +0100502 return 0;
503}
504
505static_always_inline void
506crypto_set_active_async_engine (vnet_crypto_async_op_data_t * od,
507 vnet_crypto_async_op_id_t id, u32 ei)
508{
509 vnet_crypto_main_t *cm = &crypto_main;
510 vnet_crypto_engine_t *ce = vec_elt_at_index (cm->engines, ei);
511
512 if (ce->enqueue_handlers[id] && ce->dequeue_handlers[id])
513 {
514 od->active_engine_index_async = ei;
515 cm->enqueue_handlers[id] = ce->enqueue_handlers[id];
516 cm->dequeue_handlers[id] = ce->dequeue_handlers[id];
517 }
518}
519
520int
521vnet_crypto_set_async_handler2 (char *alg_name, char *engine)
522{
523 uword *p;
524 vnet_crypto_main_t *cm = &crypto_main;
525 vnet_crypto_async_alg_data_t *ad;
526 int i;
527
528 p = hash_get_mem (cm->async_alg_index_by_name, alg_name);
529 if (!p)
530 return -1;
531
532 ad = vec_elt_at_index (cm->async_algs, p[0]);
533
534 p = hash_get_mem (cm->engine_index_by_name, engine);
535 if (!p)
536 return -1;
537
538 for (i = 0; i < VNET_CRYPTO_ASYNC_OP_N_TYPES; i++)
539 {
540 vnet_crypto_async_op_data_t *od;
541 vnet_crypto_async_op_id_t id = ad->op_by_type[i];
542 if (id == 0)
543 continue;
544
545 od = cm->async_opt_data + id;
546 crypto_set_active_async_engine (od, id, p[0]);
547 }
548
549 return 0;
550}
551
552u32
553vnet_crypto_register_post_node (vlib_main_t * vm, char *post_node_name)
554{
555 vnet_crypto_main_t *cm = &crypto_main;
556 vnet_crypto_async_next_node_t *nn = 0;
557 vlib_node_t *cc, *pn;
558 uword index = vec_len (cm->next_nodes);
559
560 pn = vlib_get_node_by_name (vm, (u8 *) post_node_name);
561 if (!pn)
562 return ~0;
563
564 /* *INDENT-OFF* */
565 vec_foreach (cm->next_nodes, nn)
566 {
567 if (nn->node_idx == pn->index)
568 return nn->next_idx;
569 }
570 /* *INDENT-ON* */
571
572 vec_validate (cm->next_nodes, index);
573 nn = vec_elt_at_index (cm->next_nodes, index);
574
575 cc = vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch");
576 nn->next_idx = vlib_node_add_named_next (vm, cc->index, post_node_name);
577 nn->node_idx = pn->index;
578
579 return nn->next_idx;
580}
581
582void
583vnet_crypto_request_async_mode (int is_enable)
584{
Fan Zhangf5395782020-04-29 14:00:03 +0100585 vnet_crypto_main_t *cm = &crypto_main;
PiotrX Kleski22848172020-07-08 14:36:34 +0200586 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100587 u32 skip_master = vlib_num_workers () > 0, i;
PiotrX Kleski22848172020-07-08 14:36:34 +0200588 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
589 u8 state_change = 0;
Fan Zhangf5395782020-04-29 14:00:03 +0100590
PiotrX Kleski22848172020-07-08 14:36:34 +0200591 CLIB_MEMORY_STORE_BARRIER ();
Fan Zhangf5395782020-04-29 14:00:03 +0100592 if (is_enable && cm->async_refcnt == 0)
593 {
594 state_change = 1;
PiotrX Kleski22848172020-07-08 14:36:34 +0200595 state =
596 cm->dispatch_mode == VNET_CRYPTO_ASYNC_DISPATCH_POLLING ?
597 VLIB_NODE_STATE_POLLING : VLIB_NODE_STATE_INTERRUPT;
Fan Zhangf5395782020-04-29 14:00:03 +0100598 }
Fan Zhangf5395782020-04-29 14:00:03 +0100599 if (!is_enable && cm->async_refcnt == 1)
600 {
601 state_change = 1;
602 state = VLIB_NODE_STATE_DISABLED;
603 }
604
605 if (state_change)
606 for (i = skip_master; i < tm->n_vlib_mains; i++)
PiotrX Kleski22848172020-07-08 14:36:34 +0200607 {
Damjan Marion6ffb7c62021-03-26 13:06:13 +0100608 vlib_main_t *ovm = vlib_get_main_by_index (i);
609 if (state != vlib_node_get_state (ovm, cm->crypto_node_index))
610 vlib_node_set_state (ovm, cm->crypto_node_index, state);
PiotrX Kleski22848172020-07-08 14:36:34 +0200611 }
Fan Zhangf5395782020-04-29 14:00:03 +0100612
613 if (is_enable)
614 cm->async_refcnt += 1;
615 else if (cm->async_refcnt > 0)
616 cm->async_refcnt -= 1;
617}
618
PiotrX Kleski22848172020-07-08 14:36:34 +0200619void
620vnet_crypto_set_async_dispatch_mode (u8 mode)
621{
622 vnet_crypto_main_t *cm = &crypto_main;
623 u32 skip_master = vlib_num_workers () > 0, i;
624 vlib_thread_main_t *tm = vlib_get_thread_main ();
625 vlib_node_state_t state = VLIB_NODE_STATE_DISABLED;
626
627 CLIB_MEMORY_STORE_BARRIER ();
628 cm->dispatch_mode = mode;
629 if (mode == VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT)
630 {
631 state =
632 cm->async_refcnt == 0 ?
633 VLIB_NODE_STATE_DISABLED : VLIB_NODE_STATE_INTERRUPT;
634 }
635 else if (mode == VNET_CRYPTO_ASYNC_DISPATCH_POLLING)
636 {
637 state =
638 cm->async_refcnt == 0 ?
639 VLIB_NODE_STATE_DISABLED : VLIB_NODE_STATE_POLLING;
640 }
641
642 for (i = skip_master; i < tm->n_vlib_mains; i++)
643 {
Damjan Marion6ffb7c62021-03-26 13:06:13 +0100644 vlib_main_t *ovm = vlib_get_main_by_index (i);
645 if (state != vlib_node_get_state (ovm, cm->crypto_node_index))
646 vlib_node_set_state (ovm, cm->crypto_node_index, state);
PiotrX Kleski22848172020-07-08 14:36:34 +0200647 }
PiotrX Kleski22848172020-07-08 14:36:34 +0200648}
649
Fan Zhangf5395782020-04-29 14:00:03 +0100650int
651vnet_crypto_is_set_async_handler (vnet_crypto_async_op_id_t op)
652{
653 vnet_crypto_main_t *cm = &crypto_main;
654
655 return (op < vec_len (cm->enqueue_handlers) &&
656 NULL != cm->enqueue_handlers[op]);
657}
658
Damjan Marion060bfb92019-03-29 13:47:54 +0100659static void
660vnet_crypto_init_cipher_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t eid,
661 vnet_crypto_op_id_t did, char *name, u8 is_aead)
662{
663 vnet_crypto_op_type_t eopt, dopt;
664 vnet_crypto_main_t *cm = &crypto_main;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000665
Damjan Marion060bfb92019-03-29 13:47:54 +0100666 cm->algs[alg].name = name;
667 cm->opt_data[eid].alg = cm->opt_data[did].alg = alg;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000668 cm->opt_data[eid].active_engine_index_simple = ~0;
669 cm->opt_data[did].active_engine_index_simple = ~0;
670 cm->opt_data[eid].active_engine_index_chained = ~0;
671 cm->opt_data[did].active_engine_index_chained = ~0;
Damjan Marion060bfb92019-03-29 13:47:54 +0100672 if (is_aead)
673 {
674 eopt = VNET_CRYPTO_OP_TYPE_AEAD_ENCRYPT;
675 dopt = VNET_CRYPTO_OP_TYPE_AEAD_DECRYPT;
676 }
677 else
678 {
679 eopt = VNET_CRYPTO_OP_TYPE_ENCRYPT;
680 dopt = VNET_CRYPTO_OP_TYPE_DECRYPT;
681 }
682 cm->opt_data[eid].type = eopt;
683 cm->opt_data[did].type = dopt;
684 cm->algs[alg].op_by_type[eopt] = eid;
685 cm->algs[alg].op_by_type[dopt] = did;
686 hash_set_mem (cm->alg_index_by_name, name, alg);
687}
688
689static void
Filip Tehlar06111a82021-05-03 15:29:56 +0000690vnet_crypto_init_hash_data (vnet_crypto_alg_t alg, vnet_crypto_op_id_t id,
691 char *name)
692{
693 vnet_crypto_main_t *cm = &crypto_main;
694 cm->algs[alg].name = name;
695 cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HASH] = id;
696 cm->opt_data[id].alg = alg;
697 cm->opt_data[id].active_engine_index_simple = ~0;
698 cm->opt_data[id].active_engine_index_chained = ~0;
699 cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HASH;
700 hash_set_mem (cm->alg_index_by_name, name, alg);
701}
702
703static void
Damjan Marion060bfb92019-03-29 13:47:54 +0100704vnet_crypto_init_hmac_data (vnet_crypto_alg_t alg,
705 vnet_crypto_op_id_t id, char *name)
706{
707 vnet_crypto_main_t *cm = &crypto_main;
708 cm->algs[alg].name = name;
709 cm->algs[alg].op_by_type[VNET_CRYPTO_OP_TYPE_HMAC] = id;
710 cm->opt_data[id].alg = alg;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000711 cm->opt_data[id].active_engine_index_simple = ~0;
712 cm->opt_data[id].active_engine_index_chained = ~0;
Damjan Marion060bfb92019-03-29 13:47:54 +0100713 cm->opt_data[id].type = VNET_CRYPTO_OP_TYPE_HMAC;
714 hash_set_mem (cm->alg_index_by_name, name, alg);
715}
716
Fan Zhangf5395782020-04-29 14:00:03 +0100717static void
718vnet_crypto_init_async_data (vnet_crypto_async_alg_t alg,
719 vnet_crypto_async_op_id_t eid,
720 vnet_crypto_async_op_id_t did, char *name)
721{
722 vnet_crypto_main_t *cm = &crypto_main;
723
724 cm->async_algs[alg].name = name;
725 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT] = eid;
726 cm->async_algs[alg].op_by_type[VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT] = did;
727 cm->async_opt_data[eid].type = VNET_CRYPTO_ASYNC_OP_TYPE_ENCRYPT;
728 cm->async_opt_data[eid].alg = alg;
729 cm->async_opt_data[eid].active_engine_index_async = ~0;
730 cm->async_opt_data[eid].active_engine_index_async = ~0;
731 cm->async_opt_data[did].type = VNET_CRYPTO_ASYNC_OP_TYPE_DECRYPT;
732 cm->async_opt_data[did].alg = alg;
733 cm->async_opt_data[did].active_engine_index_async = ~0;
734 cm->async_opt_data[did].active_engine_index_async = ~0;
735 hash_set_mem (cm->async_alg_index_by_name, name, alg);
736}
737
Damjan Marion91f17dc2019-03-18 18:59:25 +0100738clib_error_t *
739vnet_crypto_init (vlib_main_t * vm)
740{
741 vnet_crypto_main_t *cm = &crypto_main;
742 vlib_thread_main_t *tm = vlib_get_thread_main ();
Fan Zhangf5395782020-04-29 14:00:03 +0100743 vnet_crypto_thread_t *ct = 0;
PiotrX Kleski22848172020-07-08 14:36:34 +0200744
745 cm->dispatch_mode = VNET_CRYPTO_ASYNC_DISPATCH_POLLING;
Filip Tehlar1469d542019-03-25 09:04:41 -0700746 cm->engine_index_by_name = hash_create_string ( /* size */ 0,
747 sizeof (uword));
Damjan Marion060bfb92019-03-29 13:47:54 +0100748 cm->alg_index_by_name = hash_create_string (0, sizeof (uword));
Fan Zhangf5395782020-04-29 14:00:03 +0100749 cm->async_alg_index_by_name = hash_create_string (0, sizeof (uword));
Damjan Marion91f17dc2019-03-18 18:59:25 +0100750 vec_validate_aligned (cm->threads, tm->n_vlib_mains, CLIB_CACHE_LINE_BYTES);
Fan Zhangf5395782020-04-29 14:00:03 +0100751 vec_foreach (ct, cm->threads)
Fan Zhange4db9452021-03-30 17:31:38 +0100752 pool_alloc_aligned (ct->frame_pool, VNET_CRYPTO_FRAME_POOL_SIZE,
753 CLIB_CACHE_LINE_BYTES);
Damjan Marion91f17dc2019-03-18 18:59:25 +0100754 vec_validate (cm->algs, VNET_CRYPTO_N_ALGS);
Fan Zhangf5395782020-04-29 14:00:03 +0100755 vec_validate (cm->async_algs, VNET_CRYPTO_N_ASYNC_ALGS);
Alexander Chernavin005d1e42021-02-01 05:17:24 -0500756 clib_bitmap_validate (cm->async_active_ids, VNET_CRYPTO_ASYNC_OP_N_IDS);
Fan Zhangf5395782020-04-29 14:00:03 +0100757
Benoît Gannebe954442019-04-29 16:05:46 +0200758#define _(n, s, l) \
Damjan Marion060bfb92019-03-29 13:47:54 +0100759 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
760 VNET_CRYPTO_OP_##n##_ENC, \
761 VNET_CRYPTO_OP_##n##_DEC, s, 0);
762 foreach_crypto_cipher_alg;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100763#undef _
Benoît Gannebe954442019-04-29 16:05:46 +0200764#define _(n, s, l) \
Damjan Marion060bfb92019-03-29 13:47:54 +0100765 vnet_crypto_init_cipher_data (VNET_CRYPTO_ALG_##n, \
766 VNET_CRYPTO_OP_##n##_ENC, \
767 VNET_CRYPTO_OP_##n##_DEC, s, 1);
768 foreach_crypto_aead_alg;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100769#undef _
Damjan Marion060bfb92019-03-29 13:47:54 +0100770#define _(n, s) \
771 vnet_crypto_init_hmac_data (VNET_CRYPTO_ALG_HMAC_##n, \
772 VNET_CRYPTO_OP_##n##_HMAC, "hmac-" s);
773 foreach_crypto_hmac_alg;
774#undef _
Filip Tehlar06111a82021-05-03 15:29:56 +0000775#define _(n, s) \
776 vnet_crypto_init_hash_data (VNET_CRYPTO_ALG_HASH_##n, \
777 VNET_CRYPTO_OP_##n##_HASH, s);
778 foreach_crypto_hash_alg;
779#undef _
Fan Zhangf5395782020-04-29 14:00:03 +0100780#define _(n, s, k, t, a) \
781 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##n##_TAG##t##_AAD##a, \
782 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_ENC, \
783 VNET_CRYPTO_OP_##n##_TAG##t##_AAD##a##_DEC, \
784 s);
785 foreach_crypto_aead_async_alg
786#undef _
787#define _(c, h, s, k ,d) \
788 vnet_crypto_init_async_data (VNET_CRYPTO_ALG_##c##_##h##_TAG##d, \
789 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_ENC, \
790 VNET_CRYPTO_OP_##c##_##h##_TAG##d##_DEC, \
791 s);
792 foreach_crypto_link_async_alg
793#undef _
PiotrX Kleski22848172020-07-08 14:36:34 +0200794 cm->crypto_node_index =
795 vlib_get_node_by_name (vm, (u8 *) "crypto-dispatch")->index;
796
797 return 0;
Damjan Marion91f17dc2019-03-18 18:59:25 +0100798}
799
800VLIB_INIT_FUNCTION (vnet_crypto_init);
801
802/*
803 * fd.io coding-style-patch-verification: ON
804 *
805 * Local Variables:
806 * eval: (c-set-style "gnu")
807 * End:
808 */