blob: a70dac769c8f5e60bb6460d8098e650f1f89edac [file] [log] [blame]
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +01001/*
2 * Copyright (c) 2016 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include <vlib/vlib.h>
17#include <vnet/vnet.h>
18#include <vppinfra/error.h>
19
20#include <vnet/span/span.h>
Eyal Bari001fd402017-07-16 09:34:53 +030021#include <vnet/l2/l2_input.h>
22#include <vnet/l2/l2_output.h>
23#include <vnet/l2/feat_bitmap.h>
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +010024
25#include <vppinfra/error.h>
26#include <vppinfra/elog.h>
27
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +010028/* packet trace format function */
Filip Tehlara79271f2019-03-05 03:46:40 -080029static u8 *
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +010030format_span_trace (u8 * s, va_list * args)
31{
32 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
33 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
34 span_trace_t *t = va_arg (*args, span_trace_t *);
35
36 vnet_main_t *vnm = &vnet_main;
37 s = format (s, "SPAN: mirrored %U -> %U",
38 format_vnet_sw_if_index_name, vnm, t->src_sw_if_index,
39 format_vnet_sw_if_index_name, vnm, t->mirror_sw_if_index);
40
41 return s;
42}
43
44#define foreach_span_error \
Paul Vinciguerrabdc0e6b2018-09-22 05:32:50 -070045_(HITS, "SPAN incoming packets processed")
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +010046
47typedef enum
48{
49#define _(sym,str) SPAN_ERROR_##sym,
50 foreach_span_error
51#undef _
52 SPAN_N_ERROR,
53} span_error_t;
54
55static char *span_error_strings[] = {
56#define _(sym,string) string,
57 foreach_span_error
58#undef _
59};
60
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010061static_always_inline void
Pavel Kotucek077d6ae2017-01-24 08:33:38 +010062span_mirror (vlib_main_t * vm, vlib_node_runtime_t * node, u32 sw_if_index0,
Eyal Bari001fd402017-07-16 09:34:53 +030063 vlib_buffer_t * b0, vlib_frame_t ** mirror_frames,
64 vlib_rx_or_tx_t rxtx, span_feat_t sf)
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010065{
66 vlib_buffer_t *c0;
Pavel Kotucek077d6ae2017-01-24 08:33:38 +010067 span_main_t *sm = &span_main;
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010068 vnet_main_t *vnm = &vnet_main;
69 u32 *to_mirror_next = 0;
70 u32 i;
Steven074883a2018-04-24 22:43:07 -070071 span_interface_t *si0;
72 span_mirror_t *sm0;
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010073
Steven074883a2018-04-24 22:43:07 -070074 if (sw_if_index0 >= vec_len (sm->interfaces))
75 return;
76
77 si0 = vec_elt_at_index (sm->interfaces, sw_if_index0);
78 sm0 = &si0->mirror_rxtx[sf][rxtx];
Pavel Kotucek077d6ae2017-01-24 08:33:38 +010079
Eyal Bari001fd402017-07-16 09:34:53 +030080 if (sm0->num_mirror_ports == 0)
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010081 return;
82
83 /* Don't do it again */
Damjan Marion213b5aa2017-07-13 21:19:27 +020084 if (PREDICT_FALSE (b0->flags & VNET_BUFFER_F_SPAN_CLONE))
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010085 return;
86
87 /* *INDENT-OFF* */
Eyal Bari001fd402017-07-16 09:34:53 +030088 clib_bitmap_foreach (i, sm0->mirror_ports, (
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010089 {
90 if (mirror_frames[i] == 0)
Eyal Bari001fd402017-07-16 09:34:53 +030091 {
92 if (sf == SPAN_FEAT_L2)
John Loa43ccae2018-02-13 17:15:23 -050093 mirror_frames[i] = vlib_get_frame_to_node (vnm->vlib_main,
94 l2output_node.index);
Eyal Bari001fd402017-07-16 09:34:53 +030095 else
96 mirror_frames[i] = vnet_get_frame_to_sw_interface (vnm, i);
97 }
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +010098 to_mirror_next = vlib_frame_vector_args (mirror_frames[i]);
99 to_mirror_next += mirror_frames[i]->n_vectors;
Dave Barach26cd8c12017-02-23 17:11:26 -0500100 /* This can fail */
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100101 c0 = vlib_buffer_copy (vm, b0);
Dave Barach26cd8c12017-02-23 17:11:26 -0500102 if (PREDICT_TRUE(c0 != 0))
103 {
104 vnet_buffer (c0)->sw_if_index[VLIB_TX] = i;
Damjan Marion213b5aa2017-07-13 21:19:27 +0200105 c0->flags |= VNET_BUFFER_F_SPAN_CLONE;
Eyal Bari001fd402017-07-16 09:34:53 +0300106 if (sf == SPAN_FEAT_L2)
107 vnet_buffer (c0)->l2.feature_bitmap = L2OUTPUT_FEAT_OUTPUT;
Dave Barach26cd8c12017-02-23 17:11:26 -0500108 to_mirror_next[0] = vlib_get_buffer_index (vm, c0);
109 mirror_frames[i]->n_vectors++;
Pavel Kotucek077d6ae2017-01-24 08:33:38 +0100110 if (PREDICT_FALSE (b0->flags & VLIB_BUFFER_IS_TRACED))
111 {
112 span_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t));
113 t->src_sw_if_index = sw_if_index0;
114 t->mirror_sw_if_index = i;
John Loa43ccae2018-02-13 17:15:23 -0500115#if 0
116 /* Enable this path to allow packet trace of SPAN packets.
117 Note that all SPAN packets will show up on the trace output
118 with the first SPAN packet (since they are in the same frame)
119 thus making trace output of the original packet confusing */
120 mirror_frames[i]->flags |= VLIB_FRAME_TRACE;
121 c0->flags |= VLIB_BUFFER_IS_TRACED;
122#endif
123 }
124 }
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100125 }));
126 /* *INDENT-ON* */
127}
128
129static_always_inline uword
130span_node_inline_fn (vlib_main_t * vm, vlib_node_runtime_t * node,
Eyal Bari001fd402017-07-16 09:34:53 +0300131 vlib_frame_t * frame, vlib_rx_or_tx_t rxtx,
132 span_feat_t sf)
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100133{
134 span_main_t *sm = &span_main;
135 vnet_main_t *vnm = &vnet_main;
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100136 u32 n_left_from, *from, *to_next;
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100137 u32 next_index;
138 u32 sw_if_index;
139 static __thread vlib_frame_t **mirror_frames = 0;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100140
141 from = vlib_frame_vector_args (frame);
142 n_left_from = frame->n_vectors;
143 next_index = node->cached_next_index;
144
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100145 vec_validate_aligned (mirror_frames, sm->max_sw_if_index,
146 CLIB_CACHE_LINE_BYTES);
147
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100148 while (n_left_from > 0)
149 {
150 u32 n_left_to_next;
151
152 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
153
154 while (n_left_from >= 4 && n_left_to_next >= 2)
155 {
156 u32 bi0;
157 u32 bi1;
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100158 vlib_buffer_t *b0;
159 vlib_buffer_t *b1;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100160 u32 sw_if_index0;
Pavel Kotuceke2e95ce2016-11-29 11:03:37 +0100161 u32 next0 = 0;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100162 u32 sw_if_index1;
Pavel Kotuceke2e95ce2016-11-29 11:03:37 +0100163 u32 next1 = 0;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100164
165 /* speculatively enqueue b0, b1 to the current next frame */
166 to_next[0] = bi0 = from[0];
167 to_next[1] = bi1 = from[1];
168 to_next += 2;
169 n_left_to_next -= 2;
170 from += 2;
171 n_left_from -= 2;
172
173 b0 = vlib_get_buffer (vm, bi0);
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100174 b1 = vlib_get_buffer (vm, bi1);
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100175 sw_if_index0 = vnet_buffer (b0)->sw_if_index[rxtx];
176 sw_if_index1 = vnet_buffer (b1)->sw_if_index[rxtx];
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100177
Eyal Bari001fd402017-07-16 09:34:53 +0300178 span_mirror (vm, node, sw_if_index0, b0, mirror_frames, rxtx, sf);
179 span_mirror (vm, node, sw_if_index1, b1, mirror_frames, rxtx, sf);
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100180
Eyal Bari001fd402017-07-16 09:34:53 +0300181 switch (sf)
182 {
183 case SPAN_FEAT_L2:
184 if (rxtx == VLIB_RX)
185 {
186 next0 = vnet_l2_feature_next (b0, sm->l2_input_next,
187 L2INPUT_FEAT_SPAN);
188 next1 = vnet_l2_feature_next (b1, sm->l2_input_next,
189 L2INPUT_FEAT_SPAN);
190 }
191 else
192 {
193 next0 = vnet_l2_feature_next (b0, sm->l2_output_next,
194 L2OUTPUT_FEAT_SPAN);
195 next1 = vnet_l2_feature_next (b1, sm->l2_output_next,
196 L2OUTPUT_FEAT_SPAN);
197 }
198 break;
199 case SPAN_FEAT_DEVICE:
200 default:
Damjan Marion7d98a122018-07-19 20:42:08 +0200201 vnet_feature_next (&next0, b0);
202 vnet_feature_next (&next1, b1);
Eyal Bari001fd402017-07-16 09:34:53 +0300203 break;
204 }
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100205
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100206 /* verify speculative enqueue, maybe switch current next frame */
207 vlib_validate_buffer_enqueue_x2 (vm, node, next_index,
208 to_next, n_left_to_next,
209 bi0, bi1, next0, next1);
210 }
211 while (n_left_from > 0 && n_left_to_next > 0)
212 {
213 u32 bi0;
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100214 vlib_buffer_t *b0;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100215 u32 sw_if_index0;
Pavel Kotuceke2e95ce2016-11-29 11:03:37 +0100216 u32 next0 = 0;
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100217
218 /* speculatively enqueue b0 to the current next frame */
219 to_next[0] = bi0 = from[0];
220 to_next += 1;
221 n_left_to_next -= 1;
222 from += 1;
223 n_left_from -= 1;
224
225 b0 = vlib_get_buffer (vm, bi0);
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100226 sw_if_index0 = vnet_buffer (b0)->sw_if_index[rxtx];
Pavel Kotucek077d6ae2017-01-24 08:33:38 +0100227
Eyal Bari001fd402017-07-16 09:34:53 +0300228 span_mirror (vm, node, sw_if_index0, b0, mirror_frames, rxtx, sf);
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100229
Eyal Bari001fd402017-07-16 09:34:53 +0300230 switch (sf)
231 {
232 case SPAN_FEAT_L2:
233 if (rxtx == VLIB_RX)
234 next0 = vnet_l2_feature_next (b0, sm->l2_input_next,
235 L2INPUT_FEAT_SPAN);
236 else
237 next0 = vnet_l2_feature_next (b0, sm->l2_output_next,
238 L2OUTPUT_FEAT_SPAN);
239 break;
240 case SPAN_FEAT_DEVICE:
241 default:
Damjan Marion7d98a122018-07-19 20:42:08 +0200242 vnet_feature_next (&next0, b0);
Eyal Bari001fd402017-07-16 09:34:53 +0300243 break;
244 }
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100245
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100246 /* verify speculative enqueue, maybe switch current next frame */
247 vlib_validate_buffer_enqueue_x1 (vm, node, next_index, to_next,
248 n_left_to_next, bi0, next0);
249 }
250
251 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
252 }
253
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100254
255 for (sw_if_index = 0; sw_if_index < vec_len (mirror_frames); sw_if_index++)
256 {
Eyal Bari001fd402017-07-16 09:34:53 +0300257 vlib_frame_t *f = mirror_frames[sw_if_index];
258 if (f == 0)
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100259 continue;
260
Eyal Bari001fd402017-07-16 09:34:53 +0300261 if (sf == SPAN_FEAT_L2)
262 vlib_put_frame_to_node (vnm->vlib_main, l2output_node.index, f);
263 else
264 vnet_put_frame_to_sw_interface (vnm, sw_if_index, f);
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100265 mirror_frames[sw_if_index] = 0;
266 }
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100267
268 return frame->n_vectors;
269}
270
Filip Tehlara79271f2019-03-05 03:46:40 -0800271VLIB_NODE_FN (span_input_node) (vlib_main_t * vm, vlib_node_runtime_t * node,
272 vlib_frame_t * frame)
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100273{
Eyal Bari001fd402017-07-16 09:34:53 +0300274 return span_node_inline_fn (vm, node, frame, VLIB_RX, SPAN_FEAT_DEVICE);
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100275}
276
Filip Tehlara79271f2019-03-05 03:46:40 -0800277VLIB_NODE_FN (span_output_node) (vlib_main_t * vm, vlib_node_runtime_t * node,
278 vlib_frame_t * frame)
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100279{
Eyal Bari001fd402017-07-16 09:34:53 +0300280 return span_node_inline_fn (vm, node, frame, VLIB_TX, SPAN_FEAT_DEVICE);
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100281}
282
Filip Tehlara79271f2019-03-05 03:46:40 -0800283VLIB_NODE_FN (span_l2_input_node) (vlib_main_t * vm,
284 vlib_node_runtime_t * node,
285 vlib_frame_t * frame)
Eyal Bari001fd402017-07-16 09:34:53 +0300286{
287 return span_node_inline_fn (vm, node, frame, VLIB_RX, SPAN_FEAT_L2);
288}
289
Filip Tehlara79271f2019-03-05 03:46:40 -0800290VLIB_NODE_FN (span_l2_output_node) (vlib_main_t * vm,
291 vlib_node_runtime_t * node,
292 vlib_frame_t * frame)
Eyal Bari001fd402017-07-16 09:34:53 +0300293{
294 return span_node_inline_fn (vm, node, frame, VLIB_TX, SPAN_FEAT_L2);
295}
296
297#define span_node_defs \
298 .vector_size = sizeof (u32), \
299 .format_trace = format_span_trace, \
300 .type = VLIB_NODE_TYPE_INTERNAL, \
301 .n_errors = ARRAY_LEN(span_error_strings), \
302 .error_strings = span_error_strings, \
303 .n_next_nodes = 0, \
304 .next_nodes = { \
305 [0] = "error-drop" \
306 }
307
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100308/* *INDENT-OFF* */
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100309VLIB_REGISTER_NODE (span_input_node) = {
Eyal Bari001fd402017-07-16 09:34:53 +0300310 span_node_defs,
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100311 .name = "span-input",
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100312};
313
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100314VLIB_REGISTER_NODE (span_output_node) = {
Eyal Bari001fd402017-07-16 09:34:53 +0300315 span_node_defs,
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100316 .name = "span-output",
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +0100317};
318
Eyal Bari001fd402017-07-16 09:34:53 +0300319VLIB_REGISTER_NODE (span_l2_input_node) = {
320 span_node_defs,
Eyal Bari001fd402017-07-16 09:34:53 +0300321 .name = "span-l2-input",
322};
323
Eyal Bari001fd402017-07-16 09:34:53 +0300324VLIB_REGISTER_NODE (span_l2_output_node) = {
325 span_node_defs,
Eyal Bari001fd402017-07-16 09:34:53 +0300326 .name = "span-l2-output",
327};
328
Filip Tehlara79271f2019-03-05 03:46:40 -0800329#ifndef CLIB_MARCH_VARIANT
Eyal Bari001fd402017-07-16 09:34:53 +0300330clib_error_t *span_init (vlib_main_t * vm)
331{
332 span_main_t *sm = &span_main;
333
334 sm->vlib_main = vm;
335 sm->vnet_main = vnet_get_main ();
336
337 /* Initialize the feature next-node indexes */
338 feat_bitmap_init_next_nodes (vm,
339 span_l2_input_node.index,
340 L2INPUT_N_FEAT,
341 l2input_get_feat_names (),
342 sm->l2_input_next);
343
344 feat_bitmap_init_next_nodes (vm,
345 span_l2_output_node.index,
346 L2OUTPUT_N_FEAT,
347 l2output_get_feat_names (),
348 sm->l2_output_next);
349 return 0;
350}
351
352VLIB_INIT_FUNCTION (span_init);
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100353/* *INDENT-ON* */
Filip Tehlara79271f2019-03-05 03:46:40 -0800354#endif /* CLIB_MARCH_VARIANT */
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100355
Eyal Bari001fd402017-07-16 09:34:53 +0300356#undef span_node_defs
Pavel Kotucekf6e3dc42016-11-04 09:58:01 +0100357/*
358 * fd.io coding-style-patch-verification: ON
359 *
360 * Local Variables:
361 * eval: (c-set-style "gnu")
362 * End:
363 */