blob: 56977b58dc26cd176559a56e794592fb6281797c [file] [log] [blame]
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +01001/*
2 * Copyright (c) 2016 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include <vlib/vlib.h>
17#include <vnet/vnet.h>
18#include <vppinfra/error.h>
19
20#include <vnet/span/span.h>
Eyal Bari001fd402017-07-16 09:34:53 +030021#include <vnet/l2/l2_input.h>
22#include <vnet/l2/l2_output.h>
23#include <vnet/l2/feat_bitmap.h>
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +010024
25#include <vppinfra/error.h>
26#include <vppinfra/elog.h>
27
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +010028/* packet trace format function */
Filip Tehlara79271f2019-03-05 03:46:40 -080029static u8 *
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +010030format_span_trace (u8 * s, va_list * args)
31{
32 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
33 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
34 span_trace_t *t = va_arg (*args, span_trace_t *);
35
Igor Mikhailov (imichail)2d6fc6b2019-03-29 19:25:15 -070036 vnet_main_t *vnm = vnet_get_main ();
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +010037 s = format (s, "SPAN: mirrored %U -> %U",
38 format_vnet_sw_if_index_name, vnm, t->src_sw_if_index,
39 format_vnet_sw_if_index_name, vnm, t->mirror_sw_if_index);
40
41 return s;
42}
43
44#define foreach_span_error \
Paul Vinciguerrabdc0e6b2018-09-22 05:32:50 -070045_(HITS, "SPAN incoming packets processed")
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +010046
47typedef enum
48{
49#define _(sym,str) SPAN_ERROR_##sym,
50 foreach_span_error
51#undef _
52 SPAN_N_ERROR,
53} span_error_t;
54
55static char *span_error_strings[] = {
56#define _(sym,string) string,
57 foreach_span_error
58#undef _
59};
60
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010061static_always_inline void
Pavel Kotucek077d6ae2017-01-24 08:33:38 +010062span_mirror (vlib_main_t * vm, vlib_node_runtime_t * node, u32 sw_if_index0,
Eyal Bari001fd402017-07-16 09:34:53 +030063 vlib_buffer_t * b0, vlib_frame_t ** mirror_frames,
64 vlib_rx_or_tx_t rxtx, span_feat_t sf)
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010065{
66 vlib_buffer_t *c0;
Pavel Kotucek077d6ae2017-01-24 08:33:38 +010067 span_main_t *sm = &span_main;
Igor Mikhailov (imichail)2d6fc6b2019-03-29 19:25:15 -070068 vnet_main_t *vnm = vnet_get_main ();
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010069 u32 *to_mirror_next = 0;
70 u32 i;
Steven074883a2018-04-24 22:43:07 -070071 span_interface_t *si0;
72 span_mirror_t *sm0;
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010073
Steven074883a2018-04-24 22:43:07 -070074 if (sw_if_index0 >= vec_len (sm->interfaces))
75 return;
76
77 si0 = vec_elt_at_index (sm->interfaces, sw_if_index0);
78 sm0 = &si0->mirror_rxtx[sf][rxtx];
Pavel Kotucek077d6ae2017-01-24 08:33:38 +010079
Eyal Bari001fd402017-07-16 09:34:53 +030080 if (sm0->num_mirror_ports == 0)
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010081 return;
82
83 /* Don't do it again */
Damjan Marion213b5aa2017-07-13 21:19:27 +020084 if (PREDICT_FALSE (b0->flags & VNET_BUFFER_F_SPAN_CLONE))
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010085 return;
86
Damjan Marionf0ca1e82020-12-13 23:26:56 +010087 clib_bitmap_foreach (i, sm0->mirror_ports)
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010088 {
89 if (mirror_frames[i] == 0)
Eyal Bari001fd402017-07-16 09:34:53 +030090 {
91 if (sf == SPAN_FEAT_L2)
Igor Mikhailov (imichail)2d6fc6b2019-03-29 19:25:15 -070092 mirror_frames[i] = vlib_get_frame_to_node (vm, l2output_node.index);
Eyal Bari001fd402017-07-16 09:34:53 +030093 else
94 mirror_frames[i] = vnet_get_frame_to_sw_interface (vnm, i);
95 }
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010096 to_mirror_next = vlib_frame_vector_args (mirror_frames[i]);
97 to_mirror_next += mirror_frames[i]->n_vectors;
Dave Barach26cd8c12017-02-23 17:11:26 -050098 /* This can fail */
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010099 c0 = vlib_buffer_copy (vm, b0);
Dave Barach26cd8c12017-02-23 17:11:26 -0500100 if (PREDICT_TRUE(c0 != 0))
101 {
102 vnet_buffer (c0)->sw_if_index[VLIB_TX] = i;
Damjan Marion213b5aa2017-07-13 21:19:27 +0200103 c0->flags |= VNET_BUFFER_F_SPAN_CLONE;
Eyal Bari001fd402017-07-16 09:34:53 +0300104 if (sf == SPAN_FEAT_L2)
105 vnet_buffer (c0)->l2.feature_bitmap = L2OUTPUT_FEAT_OUTPUT;
Dave Barach26cd8c12017-02-23 17:11:26 -0500106 to_mirror_next[0] = vlib_get_buffer_index (vm, c0);
107 mirror_frames[i]->n_vectors++;
Pavel Kotucek077d6ae2017-01-24 08:33:38 +0100108 if (PREDICT_FALSE (b0->flags & VLIB_BUFFER_IS_TRACED))
109 {
110 span_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t));
111 t->src_sw_if_index = sw_if_index0;
112 t->mirror_sw_if_index = i;
John Loa43ccae2018-02-13 17:15:23 -0500113#if 0
114 /* Enable this path to allow packet trace of SPAN packets.
115 Note that all SPAN packets will show up on the trace output
116 with the first SPAN packet (since they are in the same frame)
117 thus making trace output of the original packet confusing */
118 mirror_frames[i]->flags |= VLIB_FRAME_TRACE;
119 c0->flags |= VLIB_BUFFER_IS_TRACED;
120#endif
121 }
122 }
Damjan Marionf0ca1e82020-12-13 23:26:56 +0100123 }
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100124}
125
126static_always_inline uword
127span_node_inline_fn (vlib_main_t * vm, vlib_node_runtime_t * node,
Eyal Bari001fd402017-07-16 09:34:53 +0300128 vlib_frame_t * frame, vlib_rx_or_tx_t rxtx,
129 span_feat_t sf)
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100130{
131 span_main_t *sm = &span_main;
Igor Mikhailov (imichail)2d6fc6b2019-03-29 19:25:15 -0700132 vnet_main_t *vnm = vnet_get_main ();
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100133 u32 n_left_from, *from, *to_next;
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100134 u32 next_index;
135 u32 sw_if_index;
136 static __thread vlib_frame_t **mirror_frames = 0;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100137
138 from = vlib_frame_vector_args (frame);
139 n_left_from = frame->n_vectors;
140 next_index = node->cached_next_index;
141
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100142 vec_validate_aligned (mirror_frames, sm->max_sw_if_index,
143 CLIB_CACHE_LINE_BYTES);
144
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100145 while (n_left_from > 0)
146 {
147 u32 n_left_to_next;
148
149 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
150
151 while (n_left_from >= 4 && n_left_to_next >= 2)
152 {
153 u32 bi0;
154 u32 bi1;
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100155 vlib_buffer_t *b0;
156 vlib_buffer_t *b1;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100157 u32 sw_if_index0;
Pavel Kotuceke2e95ce2016-11-29 11:03:37 +0100158 u32 next0 = 0;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100159 u32 sw_if_index1;
Pavel Kotuceke2e95ce2016-11-29 11:03:37 +0100160 u32 next1 = 0;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100161
162 /* speculatively enqueue b0, b1 to the current next frame */
163 to_next[0] = bi0 = from[0];
164 to_next[1] = bi1 = from[1];
165 to_next += 2;
166 n_left_to_next -= 2;
167 from += 2;
168 n_left_from -= 2;
169
170 b0 = vlib_get_buffer (vm, bi0);
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100171 b1 = vlib_get_buffer (vm, bi1);
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100172 sw_if_index0 = vnet_buffer (b0)->sw_if_index[rxtx];
173 sw_if_index1 = vnet_buffer (b1)->sw_if_index[rxtx];
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100174
Eyal Bari001fd402017-07-16 09:34:53 +0300175 span_mirror (vm, node, sw_if_index0, b0, mirror_frames, rxtx, sf);
176 span_mirror (vm, node, sw_if_index1, b1, mirror_frames, rxtx, sf);
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100177
Eyal Bari001fd402017-07-16 09:34:53 +0300178 switch (sf)
179 {
180 case SPAN_FEAT_L2:
181 if (rxtx == VLIB_RX)
182 {
183 next0 = vnet_l2_feature_next (b0, sm->l2_input_next,
184 L2INPUT_FEAT_SPAN);
185 next1 = vnet_l2_feature_next (b1, sm->l2_input_next,
186 L2INPUT_FEAT_SPAN);
187 }
188 else
189 {
190 next0 = vnet_l2_feature_next (b0, sm->l2_output_next,
191 L2OUTPUT_FEAT_SPAN);
192 next1 = vnet_l2_feature_next (b1, sm->l2_output_next,
193 L2OUTPUT_FEAT_SPAN);
194 }
195 break;
196 case SPAN_FEAT_DEVICE:
197 default:
Damjan Marion7d98a122018-07-19 20:42:08 +0200198 vnet_feature_next (&next0, b0);
199 vnet_feature_next (&next1, b1);
Eyal Bari001fd402017-07-16 09:34:53 +0300200 break;
201 }
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100202
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100203 /* verify speculative enqueue, maybe switch current next frame */
204 vlib_validate_buffer_enqueue_x2 (vm, node, next_index,
205 to_next, n_left_to_next,
206 bi0, bi1, next0, next1);
207 }
208 while (n_left_from > 0 && n_left_to_next > 0)
209 {
210 u32 bi0;
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100211 vlib_buffer_t *b0;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100212 u32 sw_if_index0;
Pavel Kotuceke2e95ce2016-11-29 11:03:37 +0100213 u32 next0 = 0;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100214
215 /* speculatively enqueue b0 to the current next frame */
216 to_next[0] = bi0 = from[0];
217 to_next += 1;
218 n_left_to_next -= 1;
219 from += 1;
220 n_left_from -= 1;
221
222 b0 = vlib_get_buffer (vm, bi0);
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100223 sw_if_index0 = vnet_buffer (b0)->sw_if_index[rxtx];
Pavel Kotucek077d6ae2017-01-24 08:33:38 +0100224
Eyal Bari001fd402017-07-16 09:34:53 +0300225 span_mirror (vm, node, sw_if_index0, b0, mirror_frames, rxtx, sf);
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100226
Eyal Bari001fd402017-07-16 09:34:53 +0300227 switch (sf)
228 {
229 case SPAN_FEAT_L2:
230 if (rxtx == VLIB_RX)
231 next0 = vnet_l2_feature_next (b0, sm->l2_input_next,
232 L2INPUT_FEAT_SPAN);
233 else
234 next0 = vnet_l2_feature_next (b0, sm->l2_output_next,
235 L2OUTPUT_FEAT_SPAN);
236 break;
237 case SPAN_FEAT_DEVICE:
238 default:
Damjan Marion7d98a122018-07-19 20:42:08 +0200239 vnet_feature_next (&next0, b0);
Eyal Bari001fd402017-07-16 09:34:53 +0300240 break;
241 }
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100242
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100243 /* verify speculative enqueue, maybe switch current next frame */
244 vlib_validate_buffer_enqueue_x1 (vm, node, next_index, to_next,
245 n_left_to_next, bi0, next0);
246 }
247
248 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
249 }
250
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100251
252 for (sw_if_index = 0; sw_if_index < vec_len (mirror_frames); sw_if_index++)
253 {
Eyal Bari001fd402017-07-16 09:34:53 +0300254 vlib_frame_t *f = mirror_frames[sw_if_index];
255 if (f == 0)
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100256 continue;
257
Eyal Bari001fd402017-07-16 09:34:53 +0300258 if (sf == SPAN_FEAT_L2)
Igor Mikhailov (imichail)2d6fc6b2019-03-29 19:25:15 -0700259 vlib_put_frame_to_node (vm, l2output_node.index, f);
Eyal Bari001fd402017-07-16 09:34:53 +0300260 else
261 vnet_put_frame_to_sw_interface (vnm, sw_if_index, f);
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100262 mirror_frames[sw_if_index] = 0;
263 }
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100264
265 return frame->n_vectors;
266}
267
Filip Tehlara79271f2019-03-05 03:46:40 -0800268VLIB_NODE_FN (span_input_node) (vlib_main_t * vm, vlib_node_runtime_t * node,
269 vlib_frame_t * frame)
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100270{
Eyal Bari001fd402017-07-16 09:34:53 +0300271 return span_node_inline_fn (vm, node, frame, VLIB_RX, SPAN_FEAT_DEVICE);
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100272}
273
Filip Tehlara79271f2019-03-05 03:46:40 -0800274VLIB_NODE_FN (span_output_node) (vlib_main_t * vm, vlib_node_runtime_t * node,
275 vlib_frame_t * frame)
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100276{
Eyal Bari001fd402017-07-16 09:34:53 +0300277 return span_node_inline_fn (vm, node, frame, VLIB_TX, SPAN_FEAT_DEVICE);
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100278}
279
Filip Tehlara79271f2019-03-05 03:46:40 -0800280VLIB_NODE_FN (span_l2_input_node) (vlib_main_t * vm,
281 vlib_node_runtime_t * node,
282 vlib_frame_t * frame)
Eyal Bari001fd402017-07-16 09:34:53 +0300283{
284 return span_node_inline_fn (vm, node, frame, VLIB_RX, SPAN_FEAT_L2);
285}
286
Filip Tehlara79271f2019-03-05 03:46:40 -0800287VLIB_NODE_FN (span_l2_output_node) (vlib_main_t * vm,
288 vlib_node_runtime_t * node,
289 vlib_frame_t * frame)
Eyal Bari001fd402017-07-16 09:34:53 +0300290{
291 return span_node_inline_fn (vm, node, frame, VLIB_TX, SPAN_FEAT_L2);
292}
293
294#define span_node_defs \
295 .vector_size = sizeof (u32), \
296 .format_trace = format_span_trace, \
297 .type = VLIB_NODE_TYPE_INTERNAL, \
298 .n_errors = ARRAY_LEN(span_error_strings), \
299 .error_strings = span_error_strings, \
300 .n_next_nodes = 0, \
301 .next_nodes = { \
302 [0] = "error-drop" \
303 }
304
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100305VLIB_REGISTER_NODE (span_input_node) = {
Eyal Bari001fd402017-07-16 09:34:53 +0300306 span_node_defs,
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100307 .name = "span-input",
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100308};
309
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100310VLIB_REGISTER_NODE (span_output_node) = {
Eyal Bari001fd402017-07-16 09:34:53 +0300311 span_node_defs,
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100312 .name = "span-output",
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100313};
314
Eyal Bari001fd402017-07-16 09:34:53 +0300315VLIB_REGISTER_NODE (span_l2_input_node) = {
316 span_node_defs,
Eyal Bari001fd402017-07-16 09:34:53 +0300317 .name = "span-l2-input",
318};
319
Eyal Bari001fd402017-07-16 09:34:53 +0300320VLIB_REGISTER_NODE (span_l2_output_node) = {
321 span_node_defs,
Eyal Bari001fd402017-07-16 09:34:53 +0300322 .name = "span-l2-output",
323};
324
Filip Tehlara79271f2019-03-05 03:46:40 -0800325#ifndef CLIB_MARCH_VARIANT
Eyal Bari001fd402017-07-16 09:34:53 +0300326clib_error_t *span_init (vlib_main_t * vm)
327{
328 span_main_t *sm = &span_main;
329
330 sm->vlib_main = vm;
331 sm->vnet_main = vnet_get_main ();
332
333 /* Initialize the feature next-node indexes */
334 feat_bitmap_init_next_nodes (vm,
335 span_l2_input_node.index,
336 L2INPUT_N_FEAT,
337 l2input_get_feat_names (),
338 sm->l2_input_next);
339
340 feat_bitmap_init_next_nodes (vm,
341 span_l2_output_node.index,
342 L2OUTPUT_N_FEAT,
343 l2output_get_feat_names (),
344 sm->l2_output_next);
345 return 0;
346}
347
348VLIB_INIT_FUNCTION (span_init);
Filip Tehlara79271f2019-03-05 03:46:40 -0800349#endif /* CLIB_MARCH_VARIANT */
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100350
Eyal Bari001fd402017-07-16 09:34:53 +0300351#undef span_node_defs
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100352/*
353 * fd.io coding-style-patch-verification: ON
354 *
355 * Local Variables:
356 * eval: (c-set-style "gnu")
357 * End:
358 */