blob: 1a9d1bae7240dc99a7fe83f31fda9d95711134c4 [file] [log] [blame]
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +01001/*
2 * Copyright (c) 2016 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include <vlib/vlib.h>
17#include <vnet/vnet.h>
18#include <vppinfra/error.h>
19
20#include <vnet/span/span.h>
Eyal Bari001fd402017-07-16 09:34:53 +030021#include <vnet/l2/l2_input.h>
22#include <vnet/l2/l2_output.h>
23#include <vnet/l2/feat_bitmap.h>
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +010024
25#include <vppinfra/error.h>
26#include <vppinfra/elog.h>
27
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +010028/* packet trace format function */
Filip Tehlara79271f2019-03-05 03:46:40 -080029static u8 *
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +010030format_span_trace (u8 * s, va_list * args)
31{
32 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
33 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
34 span_trace_t *t = va_arg (*args, span_trace_t *);
35
Igor Mikhailov (imichail)2d6fc6b2019-03-29 19:25:15 -070036 vnet_main_t *vnm = vnet_get_main ();
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +010037 s = format (s, "SPAN: mirrored %U -> %U",
38 format_vnet_sw_if_index_name, vnm, t->src_sw_if_index,
39 format_vnet_sw_if_index_name, vnm, t->mirror_sw_if_index);
40
41 return s;
42}
43
44#define foreach_span_error \
Paul Vinciguerrabdc0e6b2018-09-22 05:32:50 -070045_(HITS, "SPAN incoming packets processed")
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +010046
47typedef enum
48{
49#define _(sym,str) SPAN_ERROR_##sym,
50 foreach_span_error
51#undef _
52 SPAN_N_ERROR,
53} span_error_t;
54
55static char *span_error_strings[] = {
56#define _(sym,string) string,
57 foreach_span_error
58#undef _
59};
60
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010061static_always_inline void
Pavel Kotucek077d6ae2017-01-24 08:33:38 +010062span_mirror (vlib_main_t * vm, vlib_node_runtime_t * node, u32 sw_if_index0,
Eyal Bari001fd402017-07-16 09:34:53 +030063 vlib_buffer_t * b0, vlib_frame_t ** mirror_frames,
64 vlib_rx_or_tx_t rxtx, span_feat_t sf)
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010065{
66 vlib_buffer_t *c0;
Pavel Kotucek077d6ae2017-01-24 08:33:38 +010067 span_main_t *sm = &span_main;
Igor Mikhailov (imichail)2d6fc6b2019-03-29 19:25:15 -070068 vnet_main_t *vnm = vnet_get_main ();
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010069 u32 *to_mirror_next = 0;
70 u32 i;
Steven074883a2018-04-24 22:43:07 -070071 span_interface_t *si0;
72 span_mirror_t *sm0;
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010073
Steven074883a2018-04-24 22:43:07 -070074 if (sw_if_index0 >= vec_len (sm->interfaces))
75 return;
76
77 si0 = vec_elt_at_index (sm->interfaces, sw_if_index0);
78 sm0 = &si0->mirror_rxtx[sf][rxtx];
Pavel Kotucek077d6ae2017-01-24 08:33:38 +010079
Eyal Bari001fd402017-07-16 09:34:53 +030080 if (sm0->num_mirror_ports == 0)
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010081 return;
82
83 /* Don't do it again */
Damjan Marion213b5aa2017-07-13 21:19:27 +020084 if (PREDICT_FALSE (b0->flags & VNET_BUFFER_F_SPAN_CLONE))
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010085 return;
86
87 /* *INDENT-OFF* */
Eyal Bari001fd402017-07-16 09:34:53 +030088 clib_bitmap_foreach (i, sm0->mirror_ports, (
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010089 {
90 if (mirror_frames[i] == 0)
Eyal Bari001fd402017-07-16 09:34:53 +030091 {
92 if (sf == SPAN_FEAT_L2)
Igor Mikhailov (imichail)2d6fc6b2019-03-29 19:25:15 -070093 mirror_frames[i] = vlib_get_frame_to_node (vm, l2output_node.index);
Eyal Bari001fd402017-07-16 09:34:53 +030094 else
95 mirror_frames[i] = vnet_get_frame_to_sw_interface (vnm, i);
96 }
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010097 to_mirror_next = vlib_frame_vector_args (mirror_frames[i]);
98 to_mirror_next += mirror_frames[i]->n_vectors;
Dave Barach26cd8c12017-02-23 17:11:26 -050099 /* This can fail */
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100100 c0 = vlib_buffer_copy (vm, b0);
Dave Barach26cd8c12017-02-23 17:11:26 -0500101 if (PREDICT_TRUE(c0 != 0))
102 {
103 vnet_buffer (c0)->sw_if_index[VLIB_TX] = i;
Damjan Marion213b5aa2017-07-13 21:19:27 +0200104 c0->flags |= VNET_BUFFER_F_SPAN_CLONE;
Eyal Bari001fd402017-07-16 09:34:53 +0300105 if (sf == SPAN_FEAT_L2)
106 vnet_buffer (c0)->l2.feature_bitmap = L2OUTPUT_FEAT_OUTPUT;
Dave Barach26cd8c12017-02-23 17:11:26 -0500107 to_mirror_next[0] = vlib_get_buffer_index (vm, c0);
108 mirror_frames[i]->n_vectors++;
Pavel Kotucek077d6ae2017-01-24 08:33:38 +0100109 if (PREDICT_FALSE (b0->flags & VLIB_BUFFER_IS_TRACED))
110 {
111 span_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t));
112 t->src_sw_if_index = sw_if_index0;
113 t->mirror_sw_if_index = i;
John Loa43ccae2018-02-13 17:15:23 -0500114#if 0
115 /* Enable this path to allow packet trace of SPAN packets.
116 Note that all SPAN packets will show up on the trace output
117 with the first SPAN packet (since they are in the same frame)
118 thus making trace output of the original packet confusing */
119 mirror_frames[i]->flags |= VLIB_FRAME_TRACE;
120 c0->flags |= VLIB_BUFFER_IS_TRACED;
121#endif
122 }
123 }
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100124 }));
125 /* *INDENT-ON* */
126}
127
128static_always_inline uword
129span_node_inline_fn (vlib_main_t * vm, vlib_node_runtime_t * node,
Eyal Bari001fd402017-07-16 09:34:53 +0300130 vlib_frame_t * frame, vlib_rx_or_tx_t rxtx,
131 span_feat_t sf)
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100132{
133 span_main_t *sm = &span_main;
Igor Mikhailov (imichail)2d6fc6b2019-03-29 19:25:15 -0700134 vnet_main_t *vnm = vnet_get_main ();
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100135 u32 n_left_from, *from, *to_next;
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100136 u32 next_index;
137 u32 sw_if_index;
138 static __thread vlib_frame_t **mirror_frames = 0;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100139
140 from = vlib_frame_vector_args (frame);
141 n_left_from = frame->n_vectors;
142 next_index = node->cached_next_index;
143
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100144 vec_validate_aligned (mirror_frames, sm->max_sw_if_index,
145 CLIB_CACHE_LINE_BYTES);
146
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100147 while (n_left_from > 0)
148 {
149 u32 n_left_to_next;
150
151 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
152
153 while (n_left_from >= 4 && n_left_to_next >= 2)
154 {
155 u32 bi0;
156 u32 bi1;
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100157 vlib_buffer_t *b0;
158 vlib_buffer_t *b1;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100159 u32 sw_if_index0;
Pavel Kotuceke2e95ce2016-11-29 11:03:37 +0100160 u32 next0 = 0;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100161 u32 sw_if_index1;
Pavel Kotuceke2e95ce2016-11-29 11:03:37 +0100162 u32 next1 = 0;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100163
164 /* speculatively enqueue b0, b1 to the current next frame */
165 to_next[0] = bi0 = from[0];
166 to_next[1] = bi1 = from[1];
167 to_next += 2;
168 n_left_to_next -= 2;
169 from += 2;
170 n_left_from -= 2;
171
172 b0 = vlib_get_buffer (vm, bi0);
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100173 b1 = vlib_get_buffer (vm, bi1);
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100174 sw_if_index0 = vnet_buffer (b0)->sw_if_index[rxtx];
175 sw_if_index1 = vnet_buffer (b1)->sw_if_index[rxtx];
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100176
Eyal Bari001fd402017-07-16 09:34:53 +0300177 span_mirror (vm, node, sw_if_index0, b0, mirror_frames, rxtx, sf);
178 span_mirror (vm, node, sw_if_index1, b1, mirror_frames, rxtx, sf);
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100179
Eyal Bari001fd402017-07-16 09:34:53 +0300180 switch (sf)
181 {
182 case SPAN_FEAT_L2:
183 if (rxtx == VLIB_RX)
184 {
185 next0 = vnet_l2_feature_next (b0, sm->l2_input_next,
186 L2INPUT_FEAT_SPAN);
187 next1 = vnet_l2_feature_next (b1, sm->l2_input_next,
188 L2INPUT_FEAT_SPAN);
189 }
190 else
191 {
192 next0 = vnet_l2_feature_next (b0, sm->l2_output_next,
193 L2OUTPUT_FEAT_SPAN);
194 next1 = vnet_l2_feature_next (b1, sm->l2_output_next,
195 L2OUTPUT_FEAT_SPAN);
196 }
197 break;
198 case SPAN_FEAT_DEVICE:
199 default:
Damjan Marion7d98a122018-07-19 20:42:08 +0200200 vnet_feature_next (&next0, b0);
201 vnet_feature_next (&next1, b1);
Eyal Bari001fd402017-07-16 09:34:53 +0300202 break;
203 }
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100204
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100205 /* verify speculative enqueue, maybe switch current next frame */
206 vlib_validate_buffer_enqueue_x2 (vm, node, next_index,
207 to_next, n_left_to_next,
208 bi0, bi1, next0, next1);
209 }
210 while (n_left_from > 0 && n_left_to_next > 0)
211 {
212 u32 bi0;
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100213 vlib_buffer_t *b0;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100214 u32 sw_if_index0;
Pavel Kotuceke2e95ce2016-11-29 11:03:37 +0100215 u32 next0 = 0;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100216
217 /* speculatively enqueue b0 to the current next frame */
218 to_next[0] = bi0 = from[0];
219 to_next += 1;
220 n_left_to_next -= 1;
221 from += 1;
222 n_left_from -= 1;
223
224 b0 = vlib_get_buffer (vm, bi0);
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100225 sw_if_index0 = vnet_buffer (b0)->sw_if_index[rxtx];
Pavel Kotucek077d6ae2017-01-24 08:33:38 +0100226
Eyal Bari001fd402017-07-16 09:34:53 +0300227 span_mirror (vm, node, sw_if_index0, b0, mirror_frames, rxtx, sf);
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100228
Eyal Bari001fd402017-07-16 09:34:53 +0300229 switch (sf)
230 {
231 case SPAN_FEAT_L2:
232 if (rxtx == VLIB_RX)
233 next0 = vnet_l2_feature_next (b0, sm->l2_input_next,
234 L2INPUT_FEAT_SPAN);
235 else
236 next0 = vnet_l2_feature_next (b0, sm->l2_output_next,
237 L2OUTPUT_FEAT_SPAN);
238 break;
239 case SPAN_FEAT_DEVICE:
240 default:
Damjan Marion7d98a122018-07-19 20:42:08 +0200241 vnet_feature_next (&next0, b0);
Eyal Bari001fd402017-07-16 09:34:53 +0300242 break;
243 }
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100244
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100245 /* verify speculative enqueue, maybe switch current next frame */
246 vlib_validate_buffer_enqueue_x1 (vm, node, next_index, to_next,
247 n_left_to_next, bi0, next0);
248 }
249
250 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
251 }
252
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100253
254 for (sw_if_index = 0; sw_if_index < vec_len (mirror_frames); sw_if_index++)
255 {
Eyal Bari001fd402017-07-16 09:34:53 +0300256 vlib_frame_t *f = mirror_frames[sw_if_index];
257 if (f == 0)
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100258 continue;
259
Eyal Bari001fd402017-07-16 09:34:53 +0300260 if (sf == SPAN_FEAT_L2)
Igor Mikhailov (imichail)2d6fc6b2019-03-29 19:25:15 -0700261 vlib_put_frame_to_node (vm, l2output_node.index, f);
Eyal Bari001fd402017-07-16 09:34:53 +0300262 else
263 vnet_put_frame_to_sw_interface (vnm, sw_if_index, f);
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100264 mirror_frames[sw_if_index] = 0;
265 }
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100266
267 return frame->n_vectors;
268}
269
Filip Tehlara79271f2019-03-05 03:46:40 -0800270VLIB_NODE_FN (span_input_node) (vlib_main_t * vm, vlib_node_runtime_t * node,
271 vlib_frame_t * frame)
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100272{
Eyal Bari001fd402017-07-16 09:34:53 +0300273 return span_node_inline_fn (vm, node, frame, VLIB_RX, SPAN_FEAT_DEVICE);
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100274}
275
Filip Tehlara79271f2019-03-05 03:46:40 -0800276VLIB_NODE_FN (span_output_node) (vlib_main_t * vm, vlib_node_runtime_t * node,
277 vlib_frame_t * frame)
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100278{
Eyal Bari001fd402017-07-16 09:34:53 +0300279 return span_node_inline_fn (vm, node, frame, VLIB_TX, SPAN_FEAT_DEVICE);
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100280}
281
Filip Tehlara79271f2019-03-05 03:46:40 -0800282VLIB_NODE_FN (span_l2_input_node) (vlib_main_t * vm,
283 vlib_node_runtime_t * node,
284 vlib_frame_t * frame)
Eyal Bari001fd402017-07-16 09:34:53 +0300285{
286 return span_node_inline_fn (vm, node, frame, VLIB_RX, SPAN_FEAT_L2);
287}
288
Filip Tehlara79271f2019-03-05 03:46:40 -0800289VLIB_NODE_FN (span_l2_output_node) (vlib_main_t * vm,
290 vlib_node_runtime_t * node,
291 vlib_frame_t * frame)
Eyal Bari001fd402017-07-16 09:34:53 +0300292{
293 return span_node_inline_fn (vm, node, frame, VLIB_TX, SPAN_FEAT_L2);
294}
295
296#define span_node_defs \
297 .vector_size = sizeof (u32), \
298 .format_trace = format_span_trace, \
299 .type = VLIB_NODE_TYPE_INTERNAL, \
300 .n_errors = ARRAY_LEN(span_error_strings), \
301 .error_strings = span_error_strings, \
302 .n_next_nodes = 0, \
303 .next_nodes = { \
304 [0] = "error-drop" \
305 }
306
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100307/* *INDENT-OFF* */
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100308VLIB_REGISTER_NODE (span_input_node) = {
Eyal Bari001fd402017-07-16 09:34:53 +0300309 span_node_defs,
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100310 .name = "span-input",
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100311};
312
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100313VLIB_REGISTER_NODE (span_output_node) = {
Eyal Bari001fd402017-07-16 09:34:53 +0300314 span_node_defs,
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100315 .name = "span-output",
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100316};
317
Eyal Bari001fd402017-07-16 09:34:53 +0300318VLIB_REGISTER_NODE (span_l2_input_node) = {
319 span_node_defs,
Eyal Bari001fd402017-07-16 09:34:53 +0300320 .name = "span-l2-input",
321};
322
Eyal Bari001fd402017-07-16 09:34:53 +0300323VLIB_REGISTER_NODE (span_l2_output_node) = {
324 span_node_defs,
Eyal Bari001fd402017-07-16 09:34:53 +0300325 .name = "span-l2-output",
326};
327
Filip Tehlara79271f2019-03-05 03:46:40 -0800328#ifndef CLIB_MARCH_VARIANT
Eyal Bari001fd402017-07-16 09:34:53 +0300329clib_error_t *span_init (vlib_main_t * vm)
330{
331 span_main_t *sm = &span_main;
332
333 sm->vlib_main = vm;
334 sm->vnet_main = vnet_get_main ();
335
336 /* Initialize the feature next-node indexes */
337 feat_bitmap_init_next_nodes (vm,
338 span_l2_input_node.index,
339 L2INPUT_N_FEAT,
340 l2input_get_feat_names (),
341 sm->l2_input_next);
342
343 feat_bitmap_init_next_nodes (vm,
344 span_l2_output_node.index,
345 L2OUTPUT_N_FEAT,
346 l2output_get_feat_names (),
347 sm->l2_output_next);
348 return 0;
349}
350
351VLIB_INIT_FUNCTION (span_init);
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100352/* *INDENT-ON* */
Filip Tehlara79271f2019-03-05 03:46:40 -0800353#endif /* CLIB_MARCH_VARIANT */
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100354
Eyal Bari001fd402017-07-16 09:34:53 +0300355#undef span_node_defs
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100356/*
357 * fd.io coding-style-patch-verification: ON
358 *
359 * Local Variables:
360 * eval: (c-set-style "gnu")
361 * End:
362 */