blob: cabbfb08d573c8543d56bcb829e84e02b39af521 [file] [log] [blame]
Fan Zhangf5395782020-04-29 14:00:03 +01001/*
2 * Copyright (c) 2020 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include <stdbool.h>
17#include <vlib/vlib.h>
18#include <vnet/crypto/crypto.h>
19
20typedef enum
21{
22#define _(sym,str) VNET_CRYPTO_ASYNC_ERROR_##sym,
23 foreach_crypto_op_status
24#undef _
25 VNET_CRYPTO_ASYNC_N_ERROR,
26} vnet_crypto_async_error_t;
27
28static char *vnet_crypto_async_error_strings[] = {
29#define _(sym,string) string,
30 foreach_crypto_op_status
31#undef _
32};
33
34#define foreach_crypto_dispatch_next \
35 _(ERR_DROP, "error-drop")
36
37typedef enum
38{
39#define _(n, s) CRYPTO_DISPATCH_NEXT_##n,
40 foreach_crypto_dispatch_next
41#undef _
42 CRYPTO_DISPATCH_N_NEXT,
43} crypto_dispatch_next_t;
44
45typedef struct
46{
47 vnet_crypto_op_status_t op_status;
48 vnet_crypto_async_op_id_t op;
49} crypto_dispatch_trace_t;
50
51static u8 *
52format_crypto_dispatch_trace (u8 * s, va_list * args)
53{
54 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
55 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
56 crypto_dispatch_trace_t *t = va_arg (*args, crypto_dispatch_trace_t *);
57
58 s = format (s, "%U: %U", format_vnet_crypto_async_op, t->op,
59 format_vnet_crypto_op_status, t->op_status);
60 return s;
61}
62
63static void
64vnet_crypto_async_add_trace (vlib_main_t * vm, vlib_node_runtime_t * node,
65 vlib_buffer_t * b,
66 vnet_crypto_async_op_id_t op_id,
67 vnet_crypto_op_status_t status)
68{
69 crypto_dispatch_trace_t *tr = vlib_add_trace (vm, node, b, sizeof (*tr));
70 tr->op_status = status;
71 tr->op = op_id;
72}
73
74static_always_inline u32
75crypto_dequeue_frame (vlib_main_t * vm, vlib_node_runtime_t * node,
76 vnet_crypto_thread_t * ct,
PiotrX Kleski22848172020-07-08 14:36:34 +020077 vnet_crypto_frame_dequeue_t * hdl, u32 n_cache,
78 u32 * n_total)
Fan Zhangf5395782020-04-29 14:00:03 +010079{
PiotrX Kleski22848172020-07-08 14:36:34 +020080 vnet_crypto_main_t *cm = &crypto_main;
81 u32 n_elts = 0;
82 u32 enqueue_thread_idx = ~0;
83 vnet_crypto_async_frame_t *cf = (hdl) (vm, &n_elts, &enqueue_thread_idx);
84 *n_total += n_elts;
Fan Zhangf5395782020-04-29 14:00:03 +010085
PiotrX Kleski22848172020-07-08 14:36:34 +020086 while (cf || n_elts)
Fan Zhangf5395782020-04-29 14:00:03 +010087 {
PiotrX Kleski22848172020-07-08 14:36:34 +020088 if (cf)
Fan Zhangf5395782020-04-29 14:00:03 +010089 {
Neale Ranns63ab8512021-02-24 09:18:53 +000090 vec_validate (ct->buffer_indices, n_cache + cf->n_elts);
PiotrX Kleski22848172020-07-08 14:36:34 +020091 vec_validate (ct->nexts, n_cache + cf->n_elts);
Neale Ranns63ab8512021-02-24 09:18:53 +000092 clib_memcpy_fast (ct->buffer_indices + n_cache, cf->buffer_indices,
PiotrX Kleski22848172020-07-08 14:36:34 +020093 sizeof (u32) * cf->n_elts);
94 if (cf->state == VNET_CRYPTO_FRAME_STATE_SUCCESS)
Fan Zhangf5395782020-04-29 14:00:03 +010095 {
PiotrX Kleski22848172020-07-08 14:36:34 +020096 clib_memcpy_fast (ct->nexts + n_cache, cf->next_node_index,
97 sizeof (u16) * cf->n_elts);
98 }
99 else
100 {
101 u32 i;
102 for (i = 0; i < cf->n_elts; i++)
Fan Zhangf5395782020-04-29 14:00:03 +0100103 {
PiotrX Kleski22848172020-07-08 14:36:34 +0200104 if (cf->elts[i].status != VNET_CRYPTO_OP_STATUS_COMPLETED)
105 {
106 ct->nexts[i + n_cache] = CRYPTO_DISPATCH_NEXT_ERR_DROP;
107 vlib_node_increment_counter (vm, node->node_index,
108 cf->elts[i].status, 1);
109 }
110 else
111 ct->nexts[i + n_cache] = cf->next_node_index[i];
Fan Zhangf5395782020-04-29 14:00:03 +0100112 }
Fan Zhangf5395782020-04-29 14:00:03 +0100113 }
PiotrX Kleski22848172020-07-08 14:36:34 +0200114 n_cache += cf->n_elts;
115 if (n_cache >= VLIB_FRAME_SIZE)
Fan Zhangf5395782020-04-29 14:00:03 +0100116 {
Benoît Ganne10bb21f2021-09-08 16:26:52 +0200117 vlib_buffer_enqueue_to_next_vec (vm, node, &ct->buffer_indices,
118 &ct->nexts, n_cache);
PiotrX Kleski22848172020-07-08 14:36:34 +0200119 n_cache = 0;
Fan Zhangf5395782020-04-29 14:00:03 +0100120 }
PiotrX Kleski22848172020-07-08 14:36:34 +0200121
122 if (PREDICT_FALSE (node->flags & VLIB_NODE_FLAG_TRACE))
123 {
124 u32 i;
125
126 for (i = 0; i < cf->n_elts; i++)
127 {
128 vlib_buffer_t *b = vlib_get_buffer (vm,
129 cf->buffer_indices[i]);
130 if (b->flags & VLIB_BUFFER_IS_TRACED)
131 vnet_crypto_async_add_trace (vm, node, b, cf->op,
132 cf->elts[i].status);
133 }
134 }
135 vnet_crypto_async_free_frame (vm, cf);
Fan Zhangf5395782020-04-29 14:00:03 +0100136 }
PiotrX Kleski22848172020-07-08 14:36:34 +0200137 /* signal enqueue-thread to dequeue the processed frame (n_elts>0) */
Xiaoming Jiang9a9604b2023-03-09 02:03:50 +0000138 if (n_elts > 0 &&
139 ((node->state == VLIB_NODE_STATE_POLLING &&
140 (node->flags &
141 VLIB_NODE_FLAG_SWITCH_FROM_POLLING_TO_INTERRUPT_MODE)) ||
142 node->state == VLIB_NODE_STATE_INTERRUPT))
PiotrX Kleski22848172020-07-08 14:36:34 +0200143 {
Damjan Marion6ffb7c62021-03-26 13:06:13 +0100144 vlib_node_set_interrupt_pending (
145 vlib_get_main_by_index (enqueue_thread_idx),
146 cm->crypto_node_index);
PiotrX Kleski22848172020-07-08 14:36:34 +0200147 }
148
149 n_elts = 0;
150 enqueue_thread_idx = 0;
151 cf = (hdl) (vm, &n_elts, &enqueue_thread_idx);
152 *n_total += n_elts;
Fan Zhangf5395782020-04-29 14:00:03 +0100153 }
154
155 return n_cache;
156}
157
158VLIB_NODE_FN (crypto_dispatch_node) (vlib_main_t * vm,
159 vlib_node_runtime_t * node,
160 vlib_frame_t * frame)
161{
162 vnet_crypto_main_t *cm = &crypto_main;
163 vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
Jakub Wysocki83b2bb82021-11-30 10:53:03 +0000164 u32 n_dispatched = 0, n_cache = 0, index;
165 vec_foreach_index (index, cm->dequeue_handlers)
Dastin Wilski440bf5f2022-01-12 09:28:45 +0100166 {
Dastin Wilski440bf5f2022-01-12 09:28:45 +0100167 n_cache = crypto_dequeue_frame (
168 vm, node, ct, cm->dequeue_handlers[index], n_cache, &n_dispatched);
169 }
Fan Zhangf5395782020-04-29 14:00:03 +0100170 /* *INDENT-ON* */
171 if (n_cache)
Benoît Ganne10bb21f2021-09-08 16:26:52 +0200172 vlib_buffer_enqueue_to_next_vec (vm, node, &ct->buffer_indices, &ct->nexts,
173 n_cache);
Fan Zhangf5395782020-04-29 14:00:03 +0100174
Xiaoming Jiang9a9604b2023-03-09 02:03:50 +0000175 /* if there are still pending tasks and node in interrupt mode,
176 sending current thread signal to dequeue next loop */
177 if (pool_elts (ct->frame_pool) > 0 &&
178 ((node->state == VLIB_NODE_STATE_POLLING &&
179 (node->flags &
180 VLIB_NODE_FLAG_SWITCH_FROM_POLLING_TO_INTERRUPT_MODE)) ||
181 node->state == VLIB_NODE_STATE_INTERRUPT))
182 {
183 vlib_node_set_interrupt_pending (vm, node->node_index);
184 }
185
Fan Zhangf5395782020-04-29 14:00:03 +0100186 return n_dispatched;
187}
188
189/* *INDENT-OFF* */
190VLIB_REGISTER_NODE (crypto_dispatch_node) = {
191 .name = "crypto-dispatch",
192 .type = VLIB_NODE_TYPE_INPUT,
Xiaoming Jiang9a9604b2023-03-09 02:03:50 +0000193 .flags = VLIB_NODE_FLAG_ADAPTIVE_MODE,
194 .state = VLIB_NODE_STATE_INTERRUPT,
Fan Zhangf5395782020-04-29 14:00:03 +0100195 .format_trace = format_crypto_dispatch_trace,
196
197 .n_errors = ARRAY_LEN(vnet_crypto_async_error_strings),
198 .error_strings = vnet_crypto_async_error_strings,
199
200 .n_next_nodes = CRYPTO_DISPATCH_N_NEXT,
201 .next_nodes = {
202#define _(n, s) \
203 [CRYPTO_DISPATCH_NEXT_##n] = s,
204 foreach_crypto_dispatch_next
205#undef _
206 },
207};
208/* *INDENT-ON* */
209
210/*
211 * fd.io coding-style-patch-verification: ON
212 *
213 * Local Variables:
214 * eval: (c-set-style "gnu")
215 * End:
216 */