blob: 51ee63d1d628cbe2c715762d70ac3b43d72672db [file] [log] [blame]
Fan Zhangf5395782020-04-29 14:00:03 +01001/*
2 * Copyright (c) 2020 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include <stdbool.h>
17#include <vlib/vlib.h>
18#include <vnet/crypto/crypto.h>
19
20typedef enum
21{
22#define _(sym,str) VNET_CRYPTO_ASYNC_ERROR_##sym,
23 foreach_crypto_op_status
24#undef _
25 VNET_CRYPTO_ASYNC_N_ERROR,
26} vnet_crypto_async_error_t;
27
28static char *vnet_crypto_async_error_strings[] = {
29#define _(sym,string) string,
30 foreach_crypto_op_status
31#undef _
32};
33
34#define foreach_crypto_dispatch_next \
35 _(ERR_DROP, "error-drop")
36
37typedef enum
38{
39#define _(n, s) CRYPTO_DISPATCH_NEXT_##n,
40 foreach_crypto_dispatch_next
41#undef _
42 CRYPTO_DISPATCH_N_NEXT,
43} crypto_dispatch_next_t;
44
45typedef struct
46{
47 vnet_crypto_op_status_t op_status;
48 vnet_crypto_async_op_id_t op;
49} crypto_dispatch_trace_t;
50
51static u8 *
52format_crypto_dispatch_trace (u8 * s, va_list * args)
53{
54 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
55 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
56 crypto_dispatch_trace_t *t = va_arg (*args, crypto_dispatch_trace_t *);
57
58 s = format (s, "%U: %U", format_vnet_crypto_async_op, t->op,
59 format_vnet_crypto_op_status, t->op_status);
60 return s;
61}
62
63static void
64vnet_crypto_async_add_trace (vlib_main_t * vm, vlib_node_runtime_t * node,
65 vlib_buffer_t * b,
66 vnet_crypto_async_op_id_t op_id,
67 vnet_crypto_op_status_t status)
68{
69 crypto_dispatch_trace_t *tr = vlib_add_trace (vm, node, b, sizeof (*tr));
70 tr->op_status = status;
71 tr->op = op_id;
72}
73
74static_always_inline u32
75crypto_dequeue_frame (vlib_main_t * vm, vlib_node_runtime_t * node,
76 vnet_crypto_thread_t * ct,
77 vnet_crypto_frame_dequeue_t * hdl,
78 u32 n_cache, u32 * n_total)
79{
80 vnet_crypto_async_frame_t *cf = (hdl) (vm);
81
82 while (cf)
83 {
84 vec_validate (ct->buffer_indice, n_cache + cf->n_elts);
85 vec_validate (ct->nexts, n_cache + cf->n_elts);
86 clib_memcpy_fast (ct->buffer_indice + n_cache, cf->buffer_indices,
87 sizeof (u32) * cf->n_elts);
88 if (cf->state == VNET_CRYPTO_FRAME_STATE_SUCCESS)
89 {
90 clib_memcpy_fast (ct->nexts + n_cache, cf->next_node_index,
91 sizeof (u16) * cf->n_elts);
92 }
93 else
94 {
95 u32 i;
96 for (i = 0; i < cf->n_elts; i++)
97 {
98 if (cf->elts[i].status != VNET_CRYPTO_OP_STATUS_COMPLETED)
99 {
100 ct->nexts[i + n_cache] = CRYPTO_DISPATCH_NEXT_ERR_DROP;
101 vlib_node_increment_counter (vm, node->node_index,
102 cf->elts[i].status, 1);
103 }
104 else
105 ct->nexts[i + n_cache] = cf->next_node_index[i];
106 }
107 }
108 n_cache += cf->n_elts;
109 *n_total += cf->n_elts;
110 if (n_cache >= VLIB_FRAME_SIZE)
111 {
112 vlib_buffer_enqueue_to_next (vm, node, ct->buffer_indice, ct->nexts,
113 n_cache);
114 n_cache = 0;
115 }
116
117 if (PREDICT_FALSE (node->flags & VLIB_NODE_FLAG_TRACE))
118 {
119 u32 i;
120
121 for (i = 0; i < cf->n_elts; i++)
122 {
123 vlib_buffer_t *b = vlib_get_buffer (vm, cf->buffer_indices[i]);
124 if (b->flags & VLIB_BUFFER_IS_TRACED)
125 vnet_crypto_async_add_trace (vm, node, b, cf->op,
126 cf->elts[i].status);
127 }
128 }
129 vnet_crypto_async_free_frame (vm, cf);
130 cf = (hdl) (vm);
131 }
132
133 return n_cache;
134}
135
136VLIB_NODE_FN (crypto_dispatch_node) (vlib_main_t * vm,
137 vlib_node_runtime_t * node,
138 vlib_frame_t * frame)
139{
140 vnet_crypto_main_t *cm = &crypto_main;
141 vnet_crypto_thread_t *ct = cm->threads + vm->thread_index;
142 u32 n_dispatched = 0, n_cache = 0;
143 u32 index;
144
145 /* *INDENT-OFF* */
146 clib_bitmap_foreach (index, cm->async_active_ids, ({
147 n_cache = crypto_dequeue_frame (vm, node, ct, cm->dequeue_handlers[index],
148 n_cache, &n_dispatched);
149 }));
150 /* *INDENT-ON* */
151 if (n_cache)
152 vlib_buffer_enqueue_to_next (vm, node, ct->buffer_indice, ct->nexts,
153 n_cache);
154
155 return n_dispatched;
156}
157
158/* *INDENT-OFF* */
159VLIB_REGISTER_NODE (crypto_dispatch_node) = {
160 .name = "crypto-dispatch",
161 .type = VLIB_NODE_TYPE_INPUT,
162 .state = VLIB_NODE_STATE_DISABLED,
163 .format_trace = format_crypto_dispatch_trace,
164
165 .n_errors = ARRAY_LEN(vnet_crypto_async_error_strings),
166 .error_strings = vnet_crypto_async_error_strings,
167
168 .n_next_nodes = CRYPTO_DISPATCH_N_NEXT,
169 .next_nodes = {
170#define _(n, s) \
171 [CRYPTO_DISPATCH_NEXT_##n] = s,
172 foreach_crypto_dispatch_next
173#undef _
174 },
175};
176/* *INDENT-ON* */
177
178/*
179 * fd.io coding-style-patch-verification: ON
180 *
181 * Local Variables:
182 * eval: (c-set-style "gnu")
183 * End:
184 */