blob: e65fb8a809cf1c0eabe7cc8eb41a9ad2e25b4680 [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15/*
16 * interface_output.c: interface output node
17 *
18 * Copyright (c) 2008 Eliot Dresselhaus
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
27 *
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
30 *
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 */
39
40#include <vnet/vnet.h>
Dave Barach2c0a4f42017-06-29 09:30:15 -040041#include <vnet/ip/icmp46_packet.h>
Dave Barach4330c462020-06-10 17:07:32 -040042#include <vnet/ethernet/packet.h>
Florin Corasb040f982020-10-20 14:59:43 -070043#include <vnet/ip/format.h>
Dave Barach2c0a4f42017-06-29 09:30:15 -040044#include <vnet/ip/ip4.h>
45#include <vnet/ip/ip6.h>
46#include <vnet/udp/udp_packet.h>
Damjan Marion152e21d2016-11-29 14:55:43 +010047#include <vnet/feature/feature.h>
Dave Barach9137e542019-09-13 17:47:50 -040048#include <vnet/classify/trace_classify.h>
Dave Barach1bd2c012020-04-12 08:31:39 -040049#include <vnet/interface_output.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070050
Dave Barachba868bb2016-08-08 09:51:21 -040051typedef struct
52{
Ed Warnickecb9cada2015-12-08 15:45:58 -070053 u32 sw_if_index;
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020054 u32 flags;
Mohsin Kazmi29467b52019-10-08 19:42:38 +020055 u8 data[128 - 2 * sizeof (u32)];
Dave Barachba868bb2016-08-08 09:51:21 -040056}
57interface_output_trace_t;
Ed Warnickecb9cada2015-12-08 15:45:58 -070058
Filip Tehlar62668772019-03-04 03:33:32 -080059#ifndef CLIB_MARCH_VARIANT
Dave Barachba868bb2016-08-08 09:51:21 -040060u8 *
61format_vnet_interface_output_trace (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -070062{
63 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*va, vlib_main_t *);
Dave Barachba868bb2016-08-08 09:51:21 -040064 vlib_node_t *node = va_arg (*va, vlib_node_t *);
65 interface_output_trace_t *t = va_arg (*va, interface_output_trace_t *);
66 vnet_main_t *vnm = vnet_get_main ();
67 vnet_sw_interface_t *si;
Christophe Fontained3c008d2017-10-02 18:10:54 +020068 u32 indent;
Ed Warnickecb9cada2015-12-08 15:45:58 -070069
Dave Barachba868bb2016-08-08 09:51:21 -040070 if (t->sw_if_index != (u32) ~ 0)
Ed Warnickecb9cada2015-12-08 15:45:58 -070071 {
Ed Warnickecb9cada2015-12-08 15:45:58 -070072 indent = format_get_indent (s);
Dave Barachba868bb2016-08-08 09:51:21 -040073
Neale Ranns177bbdc2016-11-15 09:46:51 +000074 if (pool_is_free_index
75 (vnm->interface_main.sw_interfaces, t->sw_if_index))
76 {
77 /* the interface may have been deleted by the time the trace is printed */
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020078 s = format (s, "sw_if_index: %d ", t->sw_if_index);
Neale Ranns177bbdc2016-11-15 09:46:51 +000079 }
80 else
81 {
82 si = vnet_get_sw_interface (vnm, t->sw_if_index);
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020083 s =
84 format (s, "%U ", format_vnet_sw_interface_name, vnm, si,
85 t->flags);
Neale Ranns177bbdc2016-11-15 09:46:51 +000086 }
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020087 s =
88 format (s, "\n%U%U", format_white_space, indent,
89 node->format_buffer ? node->format_buffer : format_hex_bytes,
90 t->data, sizeof (t->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -070091 }
92 return s;
93}
BenoƮt Ganne7d2094c2020-11-09 15:23:52 +010094#endif /* CLIB_MARCH_VARIANT */
Ed Warnickecb9cada2015-12-08 15:45:58 -070095
96static void
97vnet_interface_output_trace (vlib_main_t * vm,
98 vlib_node_runtime_t * node,
Dave Barachba868bb2016-08-08 09:51:21 -040099 vlib_frame_t * frame, uword n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700100{
Dave Barachba868bb2016-08-08 09:51:21 -0400101 u32 n_left, *from;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700102
103 n_left = n_buffers;
Damjan Mariona3d59862018-11-10 10:23:00 +0100104 from = vlib_frame_vector_args (frame);
Dave Barachba868bb2016-08-08 09:51:21 -0400105
Ed Warnickecb9cada2015-12-08 15:45:58 -0700106 while (n_left >= 4)
107 {
108 u32 bi0, bi1;
Dave Barachba868bb2016-08-08 09:51:21 -0400109 vlib_buffer_t *b0, *b1;
110 interface_output_trace_t *t0, *t1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700111
112 /* Prefetch next iteration. */
113 vlib_prefetch_buffer_with_index (vm, from[2], LOAD);
114 vlib_prefetch_buffer_with_index (vm, from[3], LOAD);
115
116 bi0 = from[0];
117 bi1 = from[1];
118
119 b0 = vlib_get_buffer (vm, bi0);
120 b1 = vlib_get_buffer (vm, bi1);
121
122 if (b0->flags & VLIB_BUFFER_IS_TRACED)
123 {
124 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
125 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_TX];
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200126 t0->flags = b0->flags;
Dave Barach178cf492018-11-13 16:34:13 -0500127 clib_memcpy_fast (t0->data, vlib_buffer_get_current (b0),
128 sizeof (t0->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700129 }
130 if (b1->flags & VLIB_BUFFER_IS_TRACED)
131 {
132 t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
133 t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_TX];
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200134 t1->flags = b1->flags;
Dave Barach178cf492018-11-13 16:34:13 -0500135 clib_memcpy_fast (t1->data, vlib_buffer_get_current (b1),
136 sizeof (t1->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700137 }
138 from += 2;
139 n_left -= 2;
140 }
141
142 while (n_left >= 1)
143 {
144 u32 bi0;
Dave Barachba868bb2016-08-08 09:51:21 -0400145 vlib_buffer_t *b0;
146 interface_output_trace_t *t0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700147
148 bi0 = from[0];
149
150 b0 = vlib_get_buffer (vm, bi0);
151
152 if (b0->flags & VLIB_BUFFER_IS_TRACED)
153 {
154 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
155 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_TX];
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200156 t0->flags = b0->flags;
Dave Barach178cf492018-11-13 16:34:13 -0500157 clib_memcpy_fast (t0->data, vlib_buffer_get_current (b0),
158 sizeof (t0->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700159 }
160 from += 1;
161 n_left -= 1;
162 }
163}
164
Damjan Marion4d2726e2021-03-23 21:05:18 +0100165static_always_inline void
166vnet_interface_output_handle_offload (vlib_main_t *vm, vlib_buffer_t *b)
Dave Barach2c0a4f42017-06-29 09:30:15 -0400167{
Damjan Marion4d2726e2021-03-23 21:05:18 +0100168 if (b->flags & VNET_BUFFER_F_OFFLOAD)
169 vnet_calc_checksums_inline (vm, b, b->flags & VNET_BUFFER_F_IS_IP4,
170 b->flags & VNET_BUFFER_F_IS_IP6);
171}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700172
Damjan Marion4d2726e2021-03-23 21:05:18 +0100173static_always_inline uword
174vnet_interface_output_node_inline (vlib_main_t *vm, u32 sw_if_index,
175 vlib_combined_counter_main_t *ccm,
176 vlib_buffer_t **b, u32 config_index, u8 arc,
177 u32 n_left, int do_tx_offloads)
178{
179 u32 n_bytes = 0;
180 u32 n_bytes0, n_bytes1, n_bytes2, n_bytes3;
181 u32 ti = vm->thread_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700182
Damjan Marion4d2726e2021-03-23 21:05:18 +0100183 while (n_left >= 8)
Damjan Marion152e21d2016-11-29 14:55:43 +0100184 {
Damjan Marion4d2726e2021-03-23 21:05:18 +0100185 u32 tx_swif0, tx_swif1, tx_swif2, tx_swif3;
186 u32 or_flags;
Damjan Marion152e21d2016-11-29 14:55:43 +0100187
Damjan Marion4d2726e2021-03-23 21:05:18 +0100188 /* Prefetch next iteration. */
189 vlib_prefetch_buffer_header (b[4], LOAD);
190 vlib_prefetch_buffer_header (b[5], LOAD);
191 vlib_prefetch_buffer_header (b[6], LOAD);
192 vlib_prefetch_buffer_header (b[7], LOAD);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700193
Damjan Marion4d2726e2021-03-23 21:05:18 +0100194 or_flags = b[0]->flags | b[1]->flags | b[2]->flags | b[3]->flags;
195
196 /* Be grumpy about zero length buffers for benefit of
197 driver tx function. */
198 ASSERT (b[0]->current_length > 0);
199 ASSERT (b[1]->current_length > 0);
200 ASSERT (b[2]->current_length > 0);
201 ASSERT (b[3]->current_length > 0);
202
203 n_bytes += n_bytes0 = vlib_buffer_length_in_chain (vm, b[0]);
204 n_bytes += n_bytes1 = vlib_buffer_length_in_chain (vm, b[1]);
205 n_bytes += n_bytes2 = vlib_buffer_length_in_chain (vm, b[2]);
206 n_bytes += n_bytes3 = vlib_buffer_length_in_chain (vm, b[3]);
207
208 tx_swif0 = vnet_buffer (b[0])->sw_if_index[VLIB_TX];
209 tx_swif1 = vnet_buffer (b[1])->sw_if_index[VLIB_TX];
210 tx_swif2 = vnet_buffer (b[2])->sw_if_index[VLIB_TX];
211 tx_swif3 = vnet_buffer (b[3])->sw_if_index[VLIB_TX];
212
213 /* update vlan subif tx counts, if required */
214 if (PREDICT_FALSE (tx_swif0 != sw_if_index))
215 vlib_increment_combined_counter (ccm, ti, tx_swif0, 1, n_bytes0);
216
217 if (PREDICT_FALSE (tx_swif1 != sw_if_index))
218 vlib_increment_combined_counter (ccm, ti, tx_swif1, 1, n_bytes1);
219
220 if (PREDICT_FALSE (tx_swif2 != sw_if_index))
221 vlib_increment_combined_counter (ccm, ti, tx_swif2, 1, n_bytes2);
222
223 if (PREDICT_FALSE (tx_swif3 != sw_if_index))
224 vlib_increment_combined_counter (ccm, ti, tx_swif3, 1, n_bytes3);
225
226 if (PREDICT_FALSE (config_index != ~0))
Ed Warnickecb9cada2015-12-08 15:45:58 -0700227 {
Damjan Marion4d2726e2021-03-23 21:05:18 +0100228 vnet_buffer (b[0])->feature_arc_index = arc;
229 b[0]->current_config_index = config_index;
230 vnet_buffer (b[1])->feature_arc_index = arc;
231 b[1]->current_config_index = config_index;
232 vnet_buffer (b[2])->feature_arc_index = arc;
233 b[2]->current_config_index = config_index;
234 vnet_buffer (b[3])->feature_arc_index = arc;
235 b[3]->current_config_index = config_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700236 }
237
Damjan Marion4d2726e2021-03-23 21:05:18 +0100238 if (do_tx_offloads && (or_flags & VNET_BUFFER_F_OFFLOAD))
Ed Warnickecb9cada2015-12-08 15:45:58 -0700239 {
Damjan Marion4d2726e2021-03-23 21:05:18 +0100240 vnet_interface_output_handle_offload (vm, b[0]);
241 vnet_interface_output_handle_offload (vm, b[1]);
242 vnet_interface_output_handle_offload (vm, b[2]);
243 vnet_interface_output_handle_offload (vm, b[3]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700244 }
245
Damjan Marion4d2726e2021-03-23 21:05:18 +0100246 n_left -= 4;
247 b += 4;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700248 }
249
Damjan Marion4d2726e2021-03-23 21:05:18 +0100250 while (n_left)
251 {
252 u32 tx_swif0;
253
254 /* Be grumpy about zero length buffers for benefit of
255 driver tx function. */
256 ASSERT (b[0]->current_length > 0);
257
258 n_bytes += n_bytes0 = vlib_buffer_length_in_chain (vm, b[0]);
259 tx_swif0 = vnet_buffer (b[0])->sw_if_index[VLIB_TX];
260
261 if (PREDICT_FALSE (config_index != ~0))
262 {
263 vnet_buffer (b[0])->feature_arc_index = arc;
264 b[0]->current_config_index = config_index;
265 }
266
267 if (PREDICT_FALSE (tx_swif0 != sw_if_index))
268 vlib_increment_combined_counter (ccm, ti, tx_swif0, 1, n_bytes0);
269
270 if (do_tx_offloads)
271 vnet_interface_output_handle_offload (vm, b[0]);
272
273 n_left -= 1;
274 b += 1;
275 }
276
277 return n_bytes;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700278}
279
Dave Barach5ecd5a52019-02-25 15:27:28 -0500280static_always_inline void vnet_interface_pcap_tx_trace
281 (vlib_main_t * vm, vlib_node_runtime_t * node, vlib_frame_t * frame,
282 int sw_if_index_from_buffer)
283{
Damjan Marion8fb5add2021-03-04 18:41:59 +0100284 vnet_main_t *vnm = vnet_get_main ();
Dave Barach5ecd5a52019-02-25 15:27:28 -0500285 u32 n_left_from, *from;
286 u32 sw_if_index;
Damjan Marion8fb5add2021-03-04 18:41:59 +0100287 vnet_pcap_t *pp = &vnm->pcap;
Dave Barach5ecd5a52019-02-25 15:27:28 -0500288
Dave Barach33909772019-09-23 10:27:27 -0400289 if (PREDICT_TRUE (pp->pcap_tx_enable == 0))
Dave Barach5ecd5a52019-02-25 15:27:28 -0500290 return;
291
292 if (sw_if_index_from_buffer == 0)
293 {
294 vnet_interface_output_runtime_t *rt = (void *) node->runtime_data;
295 sw_if_index = rt->sw_if_index;
296 }
297 else
298 sw_if_index = ~0;
299
300 n_left_from = frame->n_vectors;
301 from = vlib_frame_vector_args (frame);
302
303 while (n_left_from > 0)
304 {
Dave Barach9137e542019-09-13 17:47:50 -0400305 int classify_filter_result;
Dave Barach5ecd5a52019-02-25 15:27:28 -0500306 u32 bi0 = from[0];
307 vlib_buffer_t *b0 = vlib_get_buffer (vm, bi0);
Dave Barach9137e542019-09-13 17:47:50 -0400308 from++;
309 n_left_from--;
310
Dave Barachf5667c32019-09-25 11:27:46 -0400311 if (pp->filter_classify_table_index != ~0)
Dave Barach9137e542019-09-13 17:47:50 -0400312 {
313 classify_filter_result =
314 vnet_is_packet_traced_inline
Dave Barachf5667c32019-09-25 11:27:46 -0400315 (b0, pp->filter_classify_table_index, 0 /* full classify */ );
Dave Barach9137e542019-09-13 17:47:50 -0400316 if (classify_filter_result)
Dave Barach33909772019-09-23 10:27:27 -0400317 pcap_add_buffer (&pp->pcap_main, vm, bi0, pp->max_bytes_per_pkt);
Dave Barach9137e542019-09-13 17:47:50 -0400318 continue;
319 }
Dave Barach5ecd5a52019-02-25 15:27:28 -0500320
321 if (sw_if_index_from_buffer)
322 sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_TX];
323
Dave Barach33909772019-09-23 10:27:27 -0400324 if (pp->pcap_sw_if_index == 0 || pp->pcap_sw_if_index == sw_if_index)
Dave Barachd28437c2019-11-20 09:28:31 -0500325 {
326 vnet_main_t *vnm = vnet_get_main ();
327 vnet_hw_interface_t *hi =
328 vnet_get_sup_hw_interface (vnm, sw_if_index);
329 /* Capture pkt if not filtered, or if filter hits */
330 if (hi->trace_classify_table_index == ~0 ||
331 vnet_is_packet_traced_inline
332 (b0, hi->trace_classify_table_index, 0 /* full classify */ ))
333 pcap_add_buffer (&pp->pcap_main, vm, bi0, pp->max_bytes_per_pkt);
334 }
Dave Barach5ecd5a52019-02-25 15:27:28 -0500335 }
336}
337
Damjan Marionf91098e2021-03-09 16:28:15 +0100338VLIB_NODE_FN (vnet_interface_output_node)
339(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
Dave Barach2c0a4f42017-06-29 09:30:15 -0400340{
341 vnet_main_t *vnm = vnet_get_main ();
Damjan Marion4d2726e2021-03-23 21:05:18 +0100342 vnet_interface_main_t *im = &vnm->interface_main;
343 vlib_combined_counter_main_t *ccm;
Dave Barach2c0a4f42017-06-29 09:30:15 -0400344 vnet_hw_interface_t *hi;
Damjan Marion3853b262021-03-23 18:47:34 +0100345 vnet_sw_interface_t *si;
Dave Barach2c0a4f42017-06-29 09:30:15 -0400346 vnet_interface_output_runtime_t *rt = (void *) node->runtime_data;
Damjan Marion3853b262021-03-23 18:47:34 +0100347 vlib_buffer_t *bufs[VLIB_FRAME_SIZE];
Damjan Marion4d2726e2021-03-23 21:05:18 +0100348 u32 n_bytes, n_buffers = frame->n_vectors;
349 u32 config_index = ~0;
350 u32 sw_if_index = rt->sw_if_index;
351 u32 next_index = VNET_INTERFACE_OUTPUT_NEXT_TX;
352 u32 ti = vm->thread_index;
353 u8 arc = im->output_feature_arc_index;
Damjan Marion3853b262021-03-23 18:47:34 +0100354 u32 *from;
355
356 if (node->flags & VLIB_NODE_FLAG_TRACE)
357 vnet_interface_output_trace (vm, node, frame, n_buffers);
358
359 from = vlib_frame_vector_args (frame);
360
361 if (rt->is_deleted)
362 return vlib_error_drop_buffers (
363 vm, node, from,
364 /* buffer stride */ 1, n_buffers, VNET_INTERFACE_OUTPUT_NEXT_DROP,
365 node->node_index, VNET_INTERFACE_OUTPUT_ERROR_INTERFACE_DELETED);
Dave Barach2c0a4f42017-06-29 09:30:15 -0400366
Dave Barach5ecd5a52019-02-25 15:27:28 -0500367 vnet_interface_pcap_tx_trace (vm, node, frame,
368 0 /* sw_if_index_from_buffer */ );
369
Damjan Marion3853b262021-03-23 18:47:34 +0100370 vlib_get_buffers (vm, from, bufs, n_buffers);
371
Damjan Marion4d2726e2021-03-23 21:05:18 +0100372 si = vnet_get_sw_interface (vnm, sw_if_index);
373 hi = vnet_get_sup_hw_interface (vnm, sw_if_index);
Damjan Marion3853b262021-03-23 18:47:34 +0100374
375 if (!(si->flags & VNET_SW_INTERFACE_FLAG_ADMIN_UP) ||
376 !(hi->flags & VNET_HW_INTERFACE_FLAG_LINK_UP))
377 {
378 vlib_simple_counter_main_t *cm;
379
380 cm = vec_elt_at_index (vnm->interface_main.sw_if_counters,
381 VNET_INTERFACE_COUNTER_TX_ERROR);
Damjan Marion4d2726e2021-03-23 21:05:18 +0100382 vlib_increment_simple_counter (cm, ti, sw_if_index, n_buffers);
Damjan Marion3853b262021-03-23 18:47:34 +0100383
384 return vlib_error_drop_buffers (
385 vm, node, from,
386 /* buffer stride */ 1, n_buffers, VNET_INTERFACE_OUTPUT_NEXT_DROP,
387 node->node_index, VNET_INTERFACE_OUTPUT_ERROR_INTERFACE_DOWN);
388 }
389
Damjan Marion4d2726e2021-03-23 21:05:18 +0100390 /* interface-output feature arc handling */
391 if (PREDICT_FALSE (vnet_have_features (arc, sw_if_index)))
392 {
393 vnet_feature_config_main_t *fcm;
394 fcm = vnet_feature_get_config_main (arc);
395 config_index = vnet_get_feature_config_index (arc, sw_if_index);
396 vnet_get_config_data (&fcm->config_main, &config_index, &next_index, 0);
397 }
398
399 ccm = im->combined_sw_if_counters + VNET_INTERFACE_COUNTER_TX;
400
Mohsin Kazmi5b3f5232021-02-10 12:03:53 +0100401 if (hi->caps & VNET_HW_INTERFACE_CAP_SUPPORTS_TX_CKSUM)
Damjan Marion4d2726e2021-03-23 21:05:18 +0100402 n_bytes = vnet_interface_output_node_inline (vm, sw_if_index, ccm, bufs,
403 config_index, arc, n_buffers,
404 /* do_tx_offloads */ 0);
Dave Barach2c0a4f42017-06-29 09:30:15 -0400405 else
Damjan Marion4d2726e2021-03-23 21:05:18 +0100406 n_bytes = vnet_interface_output_node_inline (vm, sw_if_index, ccm, bufs,
407 config_index, arc, n_buffers,
408 /* do_tx_offloads */ 1);
409
410 vlib_buffer_enqueue_to_single_next (vm, node, vlib_frame_vector_args (frame),
411 next_index, frame->n_vectors);
412
413 /* Update main interface stats. */
414 vlib_increment_combined_counter (ccm, ti, sw_if_index, n_buffers, n_bytes);
415 return n_buffers;
Dave Barach2c0a4f42017-06-29 09:30:15 -0400416}
BenoƮt Ganne7d2094c2020-11-09 15:23:52 +0100417
Damjan Marionf91098e2021-03-09 16:28:15 +0100418VLIB_REGISTER_NODE (vnet_interface_output_node) = {
419 .name = "interface-output-template",
420 .vector_size = sizeof (u32),
421};
Dave Barach2c0a4f42017-06-29 09:30:15 -0400422
Ed Warnickecb9cada2015-12-08 15:45:58 -0700423/* Use buffer's sw_if_index[VNET_TX] to choose output interface. */
Filip Tehlar62668772019-03-04 03:33:32 -0800424VLIB_NODE_FN (vnet_per_buffer_interface_output_node) (vlib_main_t * vm,
425 vlib_node_runtime_t *
426 node,
427 vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700428{
Dave Barachba868bb2016-08-08 09:51:21 -0400429 vnet_main_t *vnm = vnet_get_main ();
430 u32 n_left_to_next, *from, *to_next;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700431 u32 n_left_from, next_index;
432
Dave Barach5ecd5a52019-02-25 15:27:28 -0500433 vnet_interface_pcap_tx_trace (vm, node, frame,
434 1 /* sw_if_index_from_buffer */ );
435
Ed Warnickecb9cada2015-12-08 15:45:58 -0700436 n_left_from = frame->n_vectors;
437
Damjan Mariona3d59862018-11-10 10:23:00 +0100438 from = vlib_frame_vector_args (frame);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700439 next_index = node->cached_next_index;
440
441 while (n_left_from > 0)
442 {
443 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
444
445 while (n_left_from >= 4 && n_left_to_next >= 2)
446 {
447 u32 bi0, bi1, next0, next1;
Dave Barachba868bb2016-08-08 09:51:21 -0400448 vlib_buffer_t *b0, *b1;
449 vnet_hw_interface_t *hi0, *hi1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700450
451 /* Prefetch next iteration. */
452 vlib_prefetch_buffer_with_index (vm, from[2], LOAD);
453 vlib_prefetch_buffer_with_index (vm, from[3], LOAD);
454
455 bi0 = from[0];
456 bi1 = from[1];
457 to_next[0] = bi0;
458 to_next[1] = bi1;
459 from += 2;
460 to_next += 2;
461 n_left_to_next -= 2;
462 n_left_from -= 2;
463
464 b0 = vlib_get_buffer (vm, bi0);
465 b1 = vlib_get_buffer (vm, bi1);
466
Dave Barachba868bb2016-08-08 09:51:21 -0400467 hi0 =
468 vnet_get_sup_hw_interface (vnm,
469 vnet_buffer (b0)->sw_if_index
470 [VLIB_TX]);
471 hi1 =
472 vnet_get_sup_hw_interface (vnm,
473 vnet_buffer (b1)->sw_if_index
474 [VLIB_TX]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700475
John Loe5453d02018-01-23 19:21:34 -0500476 next0 = hi0->output_node_next_index;
477 next1 = hi1->output_node_next_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700478
Dave Barachba868bb2016-08-08 09:51:21 -0400479 vlib_validate_buffer_enqueue_x2 (vm, node, next_index, to_next,
480 n_left_to_next, bi0, bi1, next0,
481 next1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700482 }
483
484 while (n_left_from > 0 && n_left_to_next > 0)
485 {
486 u32 bi0, next0;
Dave Barachba868bb2016-08-08 09:51:21 -0400487 vlib_buffer_t *b0;
488 vnet_hw_interface_t *hi0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700489
490 bi0 = from[0];
491 to_next[0] = bi0;
492 from += 1;
493 to_next += 1;
494 n_left_to_next -= 1;
495 n_left_from -= 1;
496
497 b0 = vlib_get_buffer (vm, bi0);
498
Dave Barachba868bb2016-08-08 09:51:21 -0400499 hi0 =
500 vnet_get_sup_hw_interface (vnm,
501 vnet_buffer (b0)->sw_if_index
502 [VLIB_TX]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700503
John Loe5453d02018-01-23 19:21:34 -0500504 next0 = hi0->output_node_next_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700505
Dave Barachba868bb2016-08-08 09:51:21 -0400506 vlib_validate_buffer_enqueue_x1 (vm, node, next_index, to_next,
507 n_left_to_next, bi0, next0);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700508 }
509
510 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
511 }
512
513 return frame->n_vectors;
514}
515
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000516typedef struct vnet_error_trace_t_
Ed Warnickecb9cada2015-12-08 15:45:58 -0700517{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000518 u32 sw_if_index;
Dave Barach4330c462020-06-10 17:07:32 -0400519 i8 details_valid;
520 u8 is_ip6;
521 u8 pad[2];
522 u16 mactype;
523 ip46_address_t src, dst;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000524} vnet_error_trace_t;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700525
Dave Barachba868bb2016-08-08 09:51:21 -0400526static u8 *
527format_vnet_error_trace (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700528{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000529 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*va, vlib_main_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700530 CLIB_UNUSED (vlib_node_t * node) = va_arg (*va, vlib_node_t *);
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000531 vnet_error_trace_t *t = va_arg (*va, vnet_error_trace_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700532
Dave Barach4330c462020-06-10 17:07:32 -0400533 /* Normal, non-catchup trace */
534 if (t->details_valid == 0)
535 {
536 s = format (s, "rx:%U", format_vnet_sw_if_index_name,
537 vnet_get_main (), t->sw_if_index);
538 }
539 else if (t->details_valid == 1)
540 {
541 /* The trace capture code didn't understant the mactype */
542 s = format (s, "mactype 0x%4x (not decoded)", t->mactype);
543 }
544 else if (t->details_valid == 2)
545 {
546 /* Dump the src/dst addresses */
547 if (t->is_ip6 == 0)
548 s = format (s, "IP4: %U -> %U",
549 format_ip4_address, &t->src.ip4,
550 format_ip4_address, &t->dst.ip4);
551 else
552 s = format (s, "IP6: %U -> %U",
553 format_ip6_address, &t->src.ip6,
554 format_ip6_address, &t->dst.ip6);
555 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700556 return s;
557}
558
559static void
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000560interface_trace_buffers (vlib_main_t * vm,
561 vlib_node_runtime_t * node, vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700562{
Dave Barachba868bb2016-08-08 09:51:21 -0400563 u32 n_left, *buffers;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700564
565 buffers = vlib_frame_vector_args (frame);
566 n_left = frame->n_vectors;
Dave Barachba868bb2016-08-08 09:51:21 -0400567
Ed Warnickecb9cada2015-12-08 15:45:58 -0700568 while (n_left >= 4)
569 {
570 u32 bi0, bi1;
Dave Barachba868bb2016-08-08 09:51:21 -0400571 vlib_buffer_t *b0, *b1;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000572 vnet_error_trace_t *t0, *t1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700573
574 /* Prefetch next iteration. */
575 vlib_prefetch_buffer_with_index (vm, buffers[2], LOAD);
576 vlib_prefetch_buffer_with_index (vm, buffers[3], LOAD);
577
578 bi0 = buffers[0];
579 bi1 = buffers[1];
580
581 b0 = vlib_get_buffer (vm, bi0);
582 b1 = vlib_get_buffer (vm, bi1);
583
584 if (b0->flags & VLIB_BUFFER_IS_TRACED)
585 {
Dave Barach4330c462020-06-10 17:07:32 -0400586 t0 = vlib_add_trace (vm, node, b0,
587 STRUCT_OFFSET_OF (vnet_error_trace_t, pad));
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000588 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
Dave Barach4330c462020-06-10 17:07:32 -0400589 t0->details_valid = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700590 }
591 if (b1->flags & VLIB_BUFFER_IS_TRACED)
592 {
Dave Barach4330c462020-06-10 17:07:32 -0400593 t1 = vlib_add_trace (vm, node, b1,
594 STRUCT_OFFSET_OF (vnet_error_trace_t, pad));
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000595 t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_RX];
Dave Barach4330c462020-06-10 17:07:32 -0400596 t1->details_valid = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700597 }
598 buffers += 2;
599 n_left -= 2;
600 }
601
602 while (n_left >= 1)
603 {
604 u32 bi0;
Dave Barachba868bb2016-08-08 09:51:21 -0400605 vlib_buffer_t *b0;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000606 vnet_error_trace_t *t0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700607
608 bi0 = buffers[0];
609
610 b0 = vlib_get_buffer (vm, bi0);
611
612 if (b0->flags & VLIB_BUFFER_IS_TRACED)
613 {
Dave Barach4330c462020-06-10 17:07:32 -0400614 t0 = vlib_add_trace (vm, node, b0,
615 STRUCT_OFFSET_OF (vnet_error_trace_t, pad));
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000616 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
Dave Barach4330c462020-06-10 17:07:32 -0400617 t0->details_valid = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700618 }
619 buffers += 1;
620 n_left -= 1;
621 }
622}
623
Dave Barachba868bb2016-08-08 09:51:21 -0400624typedef enum
625{
Ed Warnickecb9cada2015-12-08 15:45:58 -0700626 VNET_ERROR_DISPOSITION_DROP,
627 VNET_ERROR_DISPOSITION_PUNT,
628 VNET_ERROR_N_DISPOSITION,
629} vnet_error_disposition_t;
630
Dave Barach4330c462020-06-10 17:07:32 -0400631static void
632drop_catchup_trace (vlib_main_t * vm,
633 vlib_node_runtime_t * node, vlib_buffer_t * b)
634{
635 /* Can we safely rewind the buffer? If not, fagedaboudit */
636 if (b->flags & VNET_BUFFER_F_L2_HDR_OFFSET_VALID)
637 {
638 vnet_error_trace_t *t;
639 ip4_header_t *ip4;
640 ip6_header_t *ip6;
641 ethernet_header_t *eh;
642 i16 delta;
643
644 t = vlib_add_trace (vm, node, b, sizeof (*t));
645 delta = vnet_buffer (b)->l2_hdr_offset - b->current_data;
646 vlib_buffer_advance (b, delta);
647
648 eh = vlib_buffer_get_current (b);
649 /* Save mactype */
650 t->mactype = clib_net_to_host_u16 (eh->type);
651 t->details_valid = 1;
652 switch (t->mactype)
653 {
654 case ETHERNET_TYPE_IP4:
655 ip4 = (void *) (eh + 1);
656 t->details_valid = 2;
657 t->is_ip6 = 0;
658 t->src.ip4.as_u32 = ip4->src_address.as_u32;
659 t->dst.ip4.as_u32 = ip4->dst_address.as_u32;
660 break;
661
662 case ETHERNET_TYPE_IP6:
663 ip6 = (void *) (eh + 1);
664 t->details_valid = 2;
665 t->is_ip6 = 1;
666 clib_memcpy_fast (t->src.as_u8, ip6->src_address.as_u8,
667 sizeof (ip6_address_t));
668 clib_memcpy_fast (t->dst.as_u8, ip6->dst_address.as_u8,
669 sizeof (ip6_address_t));
670 break;
671
672 default:
673 /* Dunno, do nothing, leave details_valid alone */
674 break;
675 }
676 /* Restore current data (probably unnecessary) */
677 vlib_buffer_advance (b, -delta);
678 }
679}
680
Ed Warnickecb9cada2015-12-08 15:45:58 -0700681static_always_inline uword
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000682interface_drop_punt (vlib_main_t * vm,
683 vlib_node_runtime_t * node,
684 vlib_frame_t * frame,
685 vnet_error_disposition_t disposition)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700686{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000687 u32 *from, n_left, thread_index, *sw_if_index;
688 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b;
689 u32 sw_if_indices[VLIB_FRAME_SIZE];
Dave Barachba868bb2016-08-08 09:51:21 -0400690 vlib_simple_counter_main_t *cm;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000691 u16 nexts[VLIB_FRAME_SIZE];
Dave Barach4330c462020-06-10 17:07:32 -0400692 u32 n_trace;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000693 vnet_main_t *vnm;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700694
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000695 vnm = vnet_get_main ();
696 thread_index = vm->thread_index;
697 from = vlib_frame_vector_args (frame);
698 n_left = frame->n_vectors;
699 b = bufs;
700 sw_if_index = sw_if_indices;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700701
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000702 vlib_get_buffers (vm, from, bufs, n_left);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700703
Dave Barach4330c462020-06-10 17:07:32 -0400704 /* "trace add error-drop NNN?" */
705 if (PREDICT_FALSE ((n_trace = vlib_get_trace_count (vm, node))))
706 {
707 /* If pkts aren't otherwise traced... */
708 if ((node->flags & VLIB_NODE_FLAG_TRACE) == 0)
709 {
710 /* Trace them from here */
711 node->flags |= VLIB_NODE_FLAG_TRACE;
712 while (n_trace && n_left)
713 {
BenoƮt Ganne9a3973e2020-10-02 19:36:57 +0200714 if (PREDICT_TRUE
715 (vlib_trace_buffer (vm, node, 0 /* next_index */ , b[0],
716 0 /* follow chain */ )))
717 {
718 /*
719 * Here we have a wireshark dissector problem.
720 * Packets may be well-formed, or not. We
721 * must not blow chunks in any case.
722 *
723 * Try to produce trace records which will help
724 * folks understand what's going on.
725 */
726 drop_catchup_trace (vm, node, b[0]);
727 n_trace--;
728 }
Dave Barach4330c462020-06-10 17:07:32 -0400729 n_left--;
730 b++;
731 }
732 }
733
734 vlib_set_trace_count (vm, node, n_trace);
735 b = bufs;
736 n_left = frame->n_vectors;
737 }
738
Ed Warnickecb9cada2015-12-08 15:45:58 -0700739 if (node->flags & VLIB_NODE_FLAG_TRACE)
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000740 interface_trace_buffers (vm, node, frame);
Dave Barachba868bb2016-08-08 09:51:21 -0400741
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000742 /* All going to drop regardless, this is just a counting exercise */
743 clib_memset (nexts, 0, sizeof (nexts));
744
Ed Warnickecb9cada2015-12-08 15:45:58 -0700745 cm = vec_elt_at_index (vnm->interface_main.sw_if_counters,
746 (disposition == VNET_ERROR_DISPOSITION_PUNT
747 ? VNET_INTERFACE_COUNTER_PUNT
748 : VNET_INTERFACE_COUNTER_DROP));
749
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000750 /* collect the array of interfaces first ... */
751 while (n_left >= 4)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700752 {
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000753 if (n_left >= 12)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700754 {
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000755 /* Prefetch 8 ahead - there's not much going on in each iteration */
756 vlib_prefetch_buffer_header (b[4], LOAD);
757 vlib_prefetch_buffer_header (b[5], LOAD);
758 vlib_prefetch_buffer_header (b[6], LOAD);
759 vlib_prefetch_buffer_header (b[7], LOAD);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700760 }
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000761 sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
762 sw_if_index[1] = vnet_buffer (b[1])->sw_if_index[VLIB_RX];
763 sw_if_index[2] = vnet_buffer (b[2])->sw_if_index[VLIB_RX];
764 sw_if_index[3] = vnet_buffer (b[3])->sw_if_index[VLIB_RX];
765
766 sw_if_index += 4;
767 n_left -= 4;
768 b += 4;
769 }
770 while (n_left)
771 {
772 sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
773
774 sw_if_index += 1;
775 n_left -= 1;
776 b += 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700777 }
778
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000779 /* ... then count against them in blocks */
780 n_left = frame->n_vectors;
781
782 while (n_left)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700783 {
Dave Barachba868bb2016-08-08 09:51:21 -0400784 vnet_sw_interface_t *sw_if0;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000785 u16 off, count;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700786
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000787 off = frame->n_vectors - n_left;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700788
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000789 sw_if_index = sw_if_indices + off;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700790
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000791 count = clib_count_equal_u32 (sw_if_index, n_left);
792 n_left -= count;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700793
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000794 vlib_increment_simple_counter (cm, thread_index, sw_if_index[0], count);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700795
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000796 /* Increment super-interface drop/punt counters for
797 sub-interfaces. */
798 sw_if0 = vnet_get_sw_interface (vnm, sw_if_index[0]);
799 if (sw_if0->sup_sw_if_index != sw_if_index[0])
800 vlib_increment_simple_counter
801 (cm, thread_index, sw_if0->sup_sw_if_index, count);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700802 }
803
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000804 vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700805
806 return frame->n_vectors;
807}
808
Dave Barachba868bb2016-08-08 09:51:21 -0400809static inline void
810pcap_drop_trace (vlib_main_t * vm,
Dave Barach33909772019-09-23 10:27:27 -0400811 vnet_interface_main_t * im,
812 vnet_pcap_t * pp, vlib_frame_t * f)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700813{
Dave Barachba868bb2016-08-08 09:51:21 -0400814 u32 *from;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700815 u32 n_left = f->n_vectors;
Dave Barachba868bb2016-08-08 09:51:21 -0400816 vlib_buffer_t *b0, *p1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700817 u32 bi0;
818 i16 save_current_data;
819 u16 save_current_length;
Dave Barach9382ad92019-09-23 16:03:49 -0400820 vlib_error_main_t *em = &vm->error_main;
Dave Barachf5667c32019-09-25 11:27:46 -0400821 int do_trace = 0;
822
Ed Warnickecb9cada2015-12-08 15:45:58 -0700823
824 from = vlib_frame_vector_args (f);
825
826 while (n_left > 0)
827 {
828 if (PREDICT_TRUE (n_left > 1))
Dave Barachba868bb2016-08-08 09:51:21 -0400829 {
830 p1 = vlib_get_buffer (vm, from[1]);
831 vlib_prefetch_buffer_header (p1, LOAD);
832 }
833
Ed Warnickecb9cada2015-12-08 15:45:58 -0700834 bi0 = from[0];
835 b0 = vlib_get_buffer (vm, bi0);
836 from++;
837 n_left--;
Dave Barachba868bb2016-08-08 09:51:21 -0400838
Ed Warnickecb9cada2015-12-08 15:45:58 -0700839 /* See if we're pointedly ignoring this specific error */
Dave Barachba868bb2016-08-08 09:51:21 -0400840 if (im->pcap_drop_filter_hash
841 && hash_get (im->pcap_drop_filter_hash, b0->error))
842 continue;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700843
Dave Barachf5667c32019-09-25 11:27:46 -0400844 do_trace = (pp->pcap_sw_if_index == 0) ||
845 pp->pcap_sw_if_index == vnet_buffer (b0)->sw_if_index[VLIB_RX];
846
847 if (PREDICT_FALSE
848 (do_trace == 0 && pp->filter_classify_table_index != ~0))
849 {
850 do_trace = vnet_is_packet_traced_inline
851 (b0, pp->filter_classify_table_index, 0 /* full classify */ );
852 }
853
Ed Warnickecb9cada2015-12-08 15:45:58 -0700854 /* Trace all drops, or drops received on a specific interface */
Dave Barachf5667c32019-09-25 11:27:46 -0400855 if (do_trace)
Dave Barachba868bb2016-08-08 09:51:21 -0400856 {
857 save_current_data = b0->current_data;
858 save_current_length = b0->current_length;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700859
Dave Barachba868bb2016-08-08 09:51:21 -0400860 /*
861 * Typically, we'll need to rewind the buffer
BenoƮt Ganne4e323cb2019-09-17 17:30:35 +0200862 * if l2_hdr_offset is valid, make sure to rewind to the start of
863 * the L2 header. This may not be the buffer start in case we pop-ed
864 * vlan tags.
865 * Otherwise, rewind to buffer start and hope for the best.
Dave Barachba868bb2016-08-08 09:51:21 -0400866 */
BenoƮt Ganne4e323cb2019-09-17 17:30:35 +0200867 if (b0->flags & VNET_BUFFER_F_L2_HDR_OFFSET_VALID)
868 {
869 if (b0->current_data > vnet_buffer (b0)->l2_hdr_offset)
870 vlib_buffer_advance (b0,
871 vnet_buffer (b0)->l2_hdr_offset -
872 b0->current_data);
873 }
874 else if (b0->current_data > 0)
Dave Barachba868bb2016-08-08 09:51:21 -0400875 vlib_buffer_advance (b0, (word) - b0->current_data);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700876
Dave Barach9382ad92019-09-23 16:03:49 -0400877 {
878 vlib_buffer_t *last = b0;
879 u32 error_node_index;
880 int drop_string_len;
881 vlib_node_t *n;
882 /* Length of the error string */
883 int error_string_len =
Ole Troan148c7b72020-10-07 18:05:37 +0200884 clib_strnlen (em->counters_heap[b0->error].name, 128);
Dave Barach9382ad92019-09-23 16:03:49 -0400885
886 /* Dig up the drop node */
887 error_node_index = vm->node_main.node_by_error[b0->error];
888 n = vlib_get_node (vm, error_node_index);
889
890 /* Length of full drop string, w/ "nodename: " prepended */
891 drop_string_len = error_string_len + vec_len (n->name) + 2;
892
893 /* Find the last buffer in the chain */
894 while (last->flags & VLIB_BUFFER_NEXT_PRESENT)
895 last = vlib_get_buffer (vm, last->next_buffer);
896
897 /*
898 * Append <nodename>: <error-string> to the capture,
899 * only if we can do that without allocating a new buffer.
900 */
901 if (PREDICT_TRUE ((last->current_data + last->current_length)
902 < (VLIB_BUFFER_DEFAULT_DATA_SIZE
903 - drop_string_len)))
904 {
905 clib_memcpy_fast (last->data + last->current_data +
906 last->current_length, n->name,
907 vec_len (n->name));
908 clib_memcpy_fast (last->data + last->current_data +
909 last->current_length + vec_len (n->name),
910 ": ", 2);
911 clib_memcpy_fast (last->data + last->current_data +
912 last->current_length + vec_len (n->name) +
Ole Troan148c7b72020-10-07 18:05:37 +0200913 2, em->counters_heap[b0->error].name,
Dave Barach9382ad92019-09-23 16:03:49 -0400914 error_string_len);
915 last->current_length += drop_string_len;
916 b0->flags &= ~(VLIB_BUFFER_TOTAL_LENGTH_VALID);
917 pcap_add_buffer (&pp->pcap_main, vm, bi0,
918 pp->max_bytes_per_pkt);
919 last->current_length -= drop_string_len;
920 b0->current_data = save_current_data;
921 b0->current_length = save_current_length;
922 continue;
923 }
924 }
925
926 /*
927 * Didn't have space in the last buffer, here's the dropped
928 * packet as-is
929 */
Dave Barach33909772019-09-23 10:27:27 -0400930 pcap_add_buffer (&pp->pcap_main, vm, bi0, pp->max_bytes_per_pkt);
Dave Barachba868bb2016-08-08 09:51:21 -0400931
932 b0->current_data = save_current_data;
933 b0->current_length = save_current_length;
934 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700935 }
936}
937
Filip Tehlar62668772019-03-04 03:33:32 -0800938#ifndef CLIB_MARCH_VARIANT
Dave Barachba868bb2016-08-08 09:51:21 -0400939void
940vnet_pcap_drop_trace_filter_add_del (u32 error_index, int is_add)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700941{
Dave Barachba868bb2016-08-08 09:51:21 -0400942 vnet_interface_main_t *im = &vnet_get_main ()->interface_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700943
944 if (im->pcap_drop_filter_hash == 0)
Dave Barachba868bb2016-08-08 09:51:21 -0400945 im->pcap_drop_filter_hash = hash_create (0, sizeof (uword));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700946
947 if (is_add)
948 hash_set (im->pcap_drop_filter_hash, error_index, 1);
949 else
950 hash_unset (im->pcap_drop_filter_hash, error_index);
951}
Filip Tehlar62668772019-03-04 03:33:32 -0800952#endif /* CLIB_MARCH_VARIANT */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700953
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000954VLIB_NODE_FN (interface_drop) (vlib_main_t * vm,
955 vlib_node_runtime_t * node,
956 vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700957{
Damjan Marion8fb5add2021-03-04 18:41:59 +0100958 vnet_main_t *vnm = vnet_get_main ();
Dave Barachba868bb2016-08-08 09:51:21 -0400959 vnet_interface_main_t *im = &vnet_get_main ()->interface_main;
Damjan Marion8fb5add2021-03-04 18:41:59 +0100960 vnet_pcap_t *pp = &vnm->pcap;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700961
Dave Barach33909772019-09-23 10:27:27 -0400962 if (PREDICT_FALSE (pp->pcap_drop_enable))
963 pcap_drop_trace (vm, im, pp, frame);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700964
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000965 return interface_drop_punt (vm, node, frame, VNET_ERROR_DISPOSITION_DROP);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700966}
967
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000968VLIB_NODE_FN (interface_punt) (vlib_main_t * vm,
969 vlib_node_runtime_t * node,
970 vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700971{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000972 return interface_drop_punt (vm, node, frame, VNET_ERROR_DISPOSITION_PUNT);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700973}
974
Dave Barachba868bb2016-08-08 09:51:21 -0400975/* *INDENT-OFF* */
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000976VLIB_REGISTER_NODE (interface_drop) = {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700977 .name = "error-drop",
Ed Warnickecb9cada2015-12-08 15:45:58 -0700978 .vector_size = sizeof (u32),
979 .format_trace = format_vnet_error_trace,
Dave Barach4330c462020-06-10 17:07:32 -0400980 .flags = VLIB_NODE_FLAG_TRACE_SUPPORTED,
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000981 .n_next_nodes = 1,
982 .next_nodes = {
983 [0] = "drop",
984 },
Ed Warnickecb9cada2015-12-08 15:45:58 -0700985};
Dave Barachba868bb2016-08-08 09:51:21 -0400986/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700987
Dave Barachba868bb2016-08-08 09:51:21 -0400988/* *INDENT-OFF* */
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000989VLIB_REGISTER_NODE (interface_punt) = {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700990 .name = "error-punt",
991 .vector_size = sizeof (u32),
992 .format_trace = format_vnet_error_trace,
Dave Barach4330c462020-06-10 17:07:32 -0400993 .flags = VLIB_NODE_FLAG_TRACE_SUPPORTED,
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000994 .n_next_nodes = 1,
995 .next_nodes = {
996 [0] = "punt",
997 },
Ed Warnickecb9cada2015-12-08 15:45:58 -0700998};
Dave Barachba868bb2016-08-08 09:51:21 -0400999/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -07001000
Dave Barachba868bb2016-08-08 09:51:21 -04001001/* *INDENT-OFF* */
Filip Tehlar62668772019-03-04 03:33:32 -08001002VLIB_REGISTER_NODE (vnet_per_buffer_interface_output_node) = {
Ed Warnickecb9cada2015-12-08 15:45:58 -07001003 .name = "interface-output",
1004 .vector_size = sizeof (u32),
1005};
Dave Barachba868bb2016-08-08 09:51:21 -04001006/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -07001007
Damjan Marion152e21d2016-11-29 14:55:43 +01001008static uword
1009interface_tx_node_fn (vlib_main_t * vm, vlib_node_runtime_t * node,
1010 vlib_frame_t * from_frame)
1011{
1012 vnet_main_t *vnm = vnet_get_main ();
1013 u32 last_sw_if_index = ~0;
1014 vlib_frame_t *to_frame = 0;
1015 vnet_hw_interface_t *hw = 0;
1016 u32 *from, *to_next = 0;
1017 u32 n_left_from;
1018
1019 from = vlib_frame_vector_args (from_frame);
1020 n_left_from = from_frame->n_vectors;
1021 while (n_left_from > 0)
1022 {
1023 u32 bi0;
1024 vlib_buffer_t *b0;
1025 u32 sw_if_index0;
1026
1027 bi0 = from[0];
1028 from++;
1029 n_left_from--;
1030 b0 = vlib_get_buffer (vm, bi0);
1031 sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_TX];
1032
1033 if (PREDICT_FALSE ((last_sw_if_index != sw_if_index0) || to_frame == 0))
1034 {
1035 if (to_frame)
1036 {
1037 hw = vnet_get_sup_hw_interface (vnm, last_sw_if_index);
1038 vlib_put_frame_to_node (vm, hw->tx_node_index, to_frame);
1039 }
1040 last_sw_if_index = sw_if_index0;
1041 hw = vnet_get_sup_hw_interface (vnm, sw_if_index0);
1042 to_frame = vlib_get_frame_to_node (vm, hw->tx_node_index);
1043 to_next = vlib_frame_vector_args (to_frame);
1044 }
1045
1046 to_next[0] = bi0;
1047 to_next++;
1048 to_frame->n_vectors++;
1049 }
1050 vlib_put_frame_to_node (vm, hw->tx_node_index, to_frame);
1051 return from_frame->n_vectors;
1052}
1053
1054/* *INDENT-OFF* */
Damjan Mariond770cfc2019-09-02 19:00:33 +02001055VLIB_REGISTER_NODE (interface_tx) = {
Damjan Marion152e21d2016-11-29 14:55:43 +01001056 .function = interface_tx_node_fn,
1057 .name = "interface-tx",
1058 .vector_size = sizeof (u32),
1059 .n_next_nodes = 1,
1060 .next_nodes = {
1061 [0] = "error-drop",
1062 },
1063};
1064
1065VNET_FEATURE_ARC_INIT (interface_output, static) =
1066{
1067 .arc_name = "interface-output",
1068 .start_nodes = VNET_FEATURES (0),
Dave Baracha25def72018-11-26 11:04:45 -05001069 .last_in_arc = "interface-tx",
Damjan Marion152e21d2016-11-29 14:55:43 +01001070 .arc_index_ptr = &vnet_main.interface_main.output_feature_arc_index,
1071};
1072
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +01001073VNET_FEATURE_INIT (span_tx, static) = {
1074 .arc_name = "interface-output",
1075 .node_name = "span-output",
1076 .runs_before = VNET_FEATURES ("interface-tx"),
1077};
1078
Matthew Smith537eeec2018-04-09 11:49:20 -05001079VNET_FEATURE_INIT (ipsec_if_tx, static) = {
1080 .arc_name = "interface-output",
1081 .node_name = "ipsec-if-output",
1082 .runs_before = VNET_FEATURES ("interface-tx"),
1083};
1084
Damjan Marion152e21d2016-11-29 14:55:43 +01001085VNET_FEATURE_INIT (interface_tx, static) = {
1086 .arc_name = "interface-output",
1087 .node_name = "interface-tx",
1088 .runs_before = 0,
1089};
1090/* *INDENT-ON* */
1091
Filip Tehlar62668772019-03-04 03:33:32 -08001092#ifndef CLIB_MARCH_VARIANT
Ed Warnickecb9cada2015-12-08 15:45:58 -07001093clib_error_t *
1094vnet_per_buffer_interface_output_hw_interface_add_del (vnet_main_t * vnm,
1095 u32 hw_if_index,
1096 u32 is_create)
1097{
Dave Barachba868bb2016-08-08 09:51:21 -04001098 vnet_hw_interface_t *hi = vnet_get_hw_interface (vnm, hw_if_index);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001099 u32 next_index;
1100
John Loe5453d02018-01-23 19:21:34 -05001101 if (hi->output_node_index == 0)
1102 return 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001103
John Loe5453d02018-01-23 19:21:34 -05001104 next_index = vlib_node_add_next
1105 (vnm->vlib_main, vnet_per_buffer_interface_output_node.index,
1106 hi->output_node_index);
1107 hi->output_node_next_index = next_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001108
1109 return 0;
1110}
1111
Dave Barachba868bb2016-08-08 09:51:21 -04001112VNET_HW_INTERFACE_ADD_DEL_FUNCTION
1113 (vnet_per_buffer_interface_output_hw_interface_add_del);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001114
John Loe5453d02018-01-23 19:21:34 -05001115void
1116vnet_set_interface_output_node (vnet_main_t * vnm,
1117 u32 hw_if_index, u32 node_index)
1118{
1119 ASSERT (node_index);
1120 vnet_hw_interface_t *hi = vnet_get_hw_interface (vnm, hw_if_index);
1121 u32 next_index = vlib_node_add_next
1122 (vnm->vlib_main, vnet_per_buffer_interface_output_node.index, node_index);
1123 hi->output_node_next_index = next_index;
1124 hi->output_node_index = node_index;
1125}
Filip Tehlar62668772019-03-04 03:33:32 -08001126#endif /* CLIB_MARCH_VARIANT */
John Loe5453d02018-01-23 19:21:34 -05001127
Dave Barachba868bb2016-08-08 09:51:21 -04001128/*
1129 * fd.io coding-style-patch-verification: ON
1130 *
1131 * Local Variables:
1132 * eval: (c-set-style "gnu")
1133 * End:
1134 */