blob: 216b924f96ebf33d6e1910ed77e13325d739c4f7 [file] [log] [blame]
Fan Zhangf5395782020-04-29 14:00:03 +01001/*
2 * Copyright (c) 2020 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include <stdbool.h>
17#include <vlib/vlib.h>
18#include <vnet/crypto/crypto.h>
19
20typedef enum
21{
22#define _(sym,str) VNET_CRYPTO_ASYNC_ERROR_##sym,
23 foreach_crypto_op_status
24#undef _
25 VNET_CRYPTO_ASYNC_N_ERROR,
26} vnet_crypto_async_error_t;
27
28static char *vnet_crypto_async_error_strings[] = {
29#define _(sym,string) string,
30 foreach_crypto_op_status
31#undef _
32};
33
34#define foreach_crypto_dispatch_next \
35 _(ERR_DROP, "error-drop")
36
37typedef enum
38{
39#define _(n, s) CRYPTO_DISPATCH_NEXT_##n,
40 foreach_crypto_dispatch_next
41#undef _
42 CRYPTO_DISPATCH_N_NEXT,
43} crypto_dispatch_next_t;
44
45typedef struct
46{
47 vnet_crypto_op_status_t op_status;
48 vnet_crypto_async_op_id_t op;
49} crypto_dispatch_trace_t;
50
51static u8 *
52format_crypto_dispatch_trace (u8 * s, va_list * args)
53{
54 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
55 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
56 crypto_dispatch_trace_t *t = va_arg (*args, crypto_dispatch_trace_t *);
57
58 s = format (s, "%U: %U", format_vnet_crypto_async_op, t->op,
59 format_vnet_crypto_op_status, t->op_status);
60 return s;
61}
62
63static void
64vnet_crypto_async_add_trace (vlib_main_t * vm, vlib_node_runtime_t * node,
65 vlib_buffer_t * b,
66 vnet_crypto_async_op_id_t op_id,
67 vnet_crypto_op_status_t status)
68{
69 crypto_dispatch_trace_t *tr = vlib_add_trace (vm, node, b, sizeof (*tr));
70 tr->op_status = status;
71 tr->op = op_id;
72}
73
74static_always_inline u32
75crypto_dequeue_frame (vlib_main_t * vm, vlib_node_runtime_t * node,
76 vnet_crypto_thread_t * ct,
PiotrX Kleski22848172020-07-08 14:36:34 +020077 vnet_crypto_frame_dequeue_t * hdl, u32 n_cache,
78 u32 * n_total)
Fan Zhangf5395782020-04-29 14:00:03 +010079{
PiotrX Kleski22848172020-07-08 14:36:34 +020080 vnet_crypto_main_t *cm = &crypto_main;
81 u32 n_elts = 0;
82 u32 enqueue_thread_idx = ~0;
83 vnet_crypto_async_frame_t *cf = (hdl) (vm, &n_elts, &enqueue_thread_idx);
84 *n_total += n_elts;
Fan Zhangf5395782020-04-29 14:00:03 +010085
PiotrX Kleski22848172020-07-08 14:36:34 +020086 while (cf || n_elts)
Fan Zhangf5395782020-04-29 14:00:03 +010087 {
PiotrX Kleski22848172020-07-08 14:36:34 +020088 if (cf)
Fan Zhangf5395782020-04-29 14:00:03 +010089 {
Neale Ranns63ab8512021-02-24 09:18:53 +000090 vec_validate (ct->buffer_indices, n_cache + cf->n_elts);
PiotrX Kleski22848172020-07-08 14:36:34 +020091 vec_validate (ct->nexts, n_cache + cf->n_elts);
Neale Ranns63ab8512021-02-24 09:18:53 +000092 clib_memcpy_fast (ct->buffer_indices + n_cache, cf->buffer_indices,
PiotrX Kleski22848172020-07-08 14:36:34 +020093 sizeof (u32) * cf->n_elts);
94 if (cf->state == VNET_CRYPTO_FRAME_STATE_SUCCESS)
Fan Zhangf5395782020-04-29 14:00:03 +010095 {
PiotrX Kleski22848172020-07-08 14:36:34 +020096 clib_memcpy_fast (ct->nexts + n_cache, cf->next_node_index,
97 sizeof (u16) * cf->n_elts);
98 }
99 else
100 {
101 u32 i;
102 for (i = 0; i < cf->n_elts; i++)
Fan Zhangf5395782020-04-29 14:00:03 +0100103 {
PiotrX Kleski22848172020-07-08 14:36:34 +0200104 if (cf->elts[i].status != VNET_CRYPTO_OP_STATUS_COMPLETED)
105 {
106 ct->nexts[i + n_cache] = CRYPTO_DISPATCH_NEXT_ERR_DROP;
107 vlib_node_increment_counter (vm, node->node_index,
108 cf->elts[i].status, 1);
109 }
110 else
111 ct->nexts[i + n_cache] = cf->next_node_index[i];
Fan Zhangf5395782020-04-29 14:00:03 +0100112 }
Fan Zhangf5395782020-04-29 14:00:03 +0100113 }
PiotrX Kleski22848172020-07-08 14:36:34 +0200114 n_cache += cf->n_elts;
115 if (n_cache >= VLIB_FRAME_SIZE)
Fan Zhangf5395782020-04-29 14:00:03 +0100116 {
Benoît Ganne10bb21f2021-09-08 16:26:52 +0200117 vlib_buffer_enqueue_to_next_vec (vm, node, &ct->buffer_indices,
118 &ct->nexts, n_cache);
PiotrX Kleski22848172020-07-08 14:36:34 +0200119 n_cache = 0;
Fan Zhangf5395782020-04-29 14:00:03 +0100120 }
PiotrX Kleski22848172020-07-08 14:36:34 +0200121
122 if (PREDICT_FALSE (node->flags & VLIB_NODE_FLAG_TRACE))
123 {
124 u32 i;
125
126 for (i = 0; i < cf->n_elts; i++)
127 {
128 vlib_buffer_t *b = vlib_get_buffer (vm,
129 cf->buffer_indices[i]);
130 if (b->flags & VLIB_BUFFER_IS_TRACED)
131 vnet_crypto_async_add_trace (vm, node, b, cf->op,
132 cf->elts[i].status);
133 }
134 }
135 vnet_crypto_async_free_frame (vm, cf);
Fan Zhangf5395782020-04-29 14:00:03 +0100136 }
PiotrX Kleski22848172020-07-08 14:36:34 +0200137 /* signal enqueue-thread to dequeue the processed frame (n_elts>0) */
138 if (cm->dispatch_mode == VNET_CRYPTO_ASYNC_DISPATCH_INTERRUPT
139 && n_elts > 0)
140 {
Damjan Marion6ffb7c62021-03-26 13:06:13 +0100141 vlib_node_set_interrupt_pending (
142 vlib_get_main_by_index (enqueue_thread_idx),
143 cm->crypto_node_index);
PiotrX Kleski22848172020-07-08 14:36:34 +0200144 }
145
146 n_elts = 0;
147 enqueue_thread_idx = 0;
148 cf = (hdl) (vm, &n_elts, &enqueue_thread_idx);
149 *n_total += n_elts;
Fan Zhangf5395782020-04-29 14:00:03 +0100150 }
151
152 return n_cache;
153}
154
155VLIB_NODE_FN (crypto_dispatch_node) (vlib_main_t * vm,
156 vlib_node_runtime_t * node,
157 vlib_frame_t * frame)
158{
159 vnet_crypto_main_t *cm = &crypto_main;
160 vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000161 u32 n_dispatched = 0, n_cache = 0, index;
162 vec_foreach_index (index, cm->dequeue_handlers)
Dastin Wilski440bf5f2022-01-12 09:28:45 +0100163 {
164 if (PREDICT_FALSE (cm->dequeue_handlers[index] == 0))
165 continue;
166 n_cache = crypto_dequeue_frame (
167 vm, node, ct, cm->dequeue_handlers[index], n_cache, &n_dispatched);
168 }
Fan Zhangf5395782020-04-29 14:00:03 +0100169 /* *INDENT-ON* */
170 if (n_cache)
Benoît Ganne10bb21f2021-09-08 16:26:52 +0200171 vlib_buffer_enqueue_to_next_vec (vm, node, &ct->buffer_indices, &ct->nexts,
172 n_cache);
Fan Zhangf5395782020-04-29 14:00:03 +0100173
174 return n_dispatched;
175}
176
177/* *INDENT-OFF* */
178VLIB_REGISTER_NODE (crypto_dispatch_node) = {
179 .name = "crypto-dispatch",
180 .type = VLIB_NODE_TYPE_INPUT,
181 .state = VLIB_NODE_STATE_DISABLED,
182 .format_trace = format_crypto_dispatch_trace,
183
184 .n_errors = ARRAY_LEN(vnet_crypto_async_error_strings),
185 .error_strings = vnet_crypto_async_error_strings,
186
187 .n_next_nodes = CRYPTO_DISPATCH_N_NEXT,
188 .next_nodes = {
189#define _(n, s) \
190 [CRYPTO_DISPATCH_NEXT_##n] = s,
191 foreach_crypto_dispatch_next
192#undef _
193 },
194};
195/* *INDENT-ON* */
196
197/*
198 * fd.io coding-style-patch-verification: ON
199 *
200 * Local Variables:
201 * eval: (c-set-style "gnu")
202 * End:
203 */