blob: 7d058c200374ff7645672b7faa493d561cb8c9b7 [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15/*
16 * interface_output.c: interface output node
17 *
18 * Copyright (c) 2008 Eliot Dresselhaus
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
27 *
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
30 *
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 */
39
40#include <vnet/vnet.h>
Dave Barach2c0a4f42017-06-29 09:30:15 -040041#include <vnet/ip/icmp46_packet.h>
Dave Barach4330c462020-06-10 17:07:32 -040042#include <vnet/ethernet/packet.h>
Florin Corasb040f982020-10-20 14:59:43 -070043#include <vnet/ip/format.h>
Dave Barach2c0a4f42017-06-29 09:30:15 -040044#include <vnet/ip/ip4.h>
45#include <vnet/ip/ip6.h>
46#include <vnet/udp/udp_packet.h>
Damjan Marion152e21d2016-11-29 14:55:43 +010047#include <vnet/feature/feature.h>
Benoît Ganne30a81952021-02-26 13:47:41 +010048#include <vnet/classify/pcap_classify.h>
Dave Barach1bd2c012020-04-12 08:31:39 -040049#include <vnet/interface_output.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070050
Dave Barachba868bb2016-08-08 09:51:21 -040051typedef struct
52{
Ed Warnickecb9cada2015-12-08 15:45:58 -070053 u32 sw_if_index;
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020054 u32 flags;
Mohsin Kazmi29467b52019-10-08 19:42:38 +020055 u8 data[128 - 2 * sizeof (u32)];
Dave Barachba868bb2016-08-08 09:51:21 -040056}
57interface_output_trace_t;
Ed Warnickecb9cada2015-12-08 15:45:58 -070058
Filip Tehlar62668772019-03-04 03:33:32 -080059#ifndef CLIB_MARCH_VARIANT
Dave Barachba868bb2016-08-08 09:51:21 -040060u8 *
61format_vnet_interface_output_trace (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -070062{
63 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*va, vlib_main_t *);
Dave Barachba868bb2016-08-08 09:51:21 -040064 vlib_node_t *node = va_arg (*va, vlib_node_t *);
65 interface_output_trace_t *t = va_arg (*va, interface_output_trace_t *);
66 vnet_main_t *vnm = vnet_get_main ();
67 vnet_sw_interface_t *si;
Christophe Fontained3c008d2017-10-02 18:10:54 +020068 u32 indent;
Ed Warnickecb9cada2015-12-08 15:45:58 -070069
Dave Barachba868bb2016-08-08 09:51:21 -040070 if (t->sw_if_index != (u32) ~ 0)
Ed Warnickecb9cada2015-12-08 15:45:58 -070071 {
Ed Warnickecb9cada2015-12-08 15:45:58 -070072 indent = format_get_indent (s);
Dave Barachba868bb2016-08-08 09:51:21 -040073
Neale Ranns177bbdc2016-11-15 09:46:51 +000074 if (pool_is_free_index
75 (vnm->interface_main.sw_interfaces, t->sw_if_index))
76 {
77 /* the interface may have been deleted by the time the trace is printed */
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020078 s = format (s, "sw_if_index: %d ", t->sw_if_index);
Neale Ranns177bbdc2016-11-15 09:46:51 +000079 }
80 else
81 {
82 si = vnet_get_sw_interface (vnm, t->sw_if_index);
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020083 s =
84 format (s, "%U ", format_vnet_sw_interface_name, vnm, si,
85 t->flags);
Neale Ranns177bbdc2016-11-15 09:46:51 +000086 }
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020087 s =
88 format (s, "\n%U%U", format_white_space, indent,
89 node->format_buffer ? node->format_buffer : format_hex_bytes,
90 t->data, sizeof (t->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -070091 }
92 return s;
93}
Benoît Ganne7d2094c2020-11-09 15:23:52 +010094#endif /* CLIB_MARCH_VARIANT */
Ed Warnickecb9cada2015-12-08 15:45:58 -070095
96static void
97vnet_interface_output_trace (vlib_main_t * vm,
98 vlib_node_runtime_t * node,
Dave Barachba868bb2016-08-08 09:51:21 -040099 vlib_frame_t * frame, uword n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700100{
Dave Barachba868bb2016-08-08 09:51:21 -0400101 u32 n_left, *from;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700102
103 n_left = n_buffers;
Damjan Mariona3d59862018-11-10 10:23:00 +0100104 from = vlib_frame_vector_args (frame);
Dave Barachba868bb2016-08-08 09:51:21 -0400105
Ed Warnickecb9cada2015-12-08 15:45:58 -0700106 while (n_left >= 4)
107 {
108 u32 bi0, bi1;
Dave Barachba868bb2016-08-08 09:51:21 -0400109 vlib_buffer_t *b0, *b1;
110 interface_output_trace_t *t0, *t1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700111
112 /* Prefetch next iteration. */
113 vlib_prefetch_buffer_with_index (vm, from[2], LOAD);
114 vlib_prefetch_buffer_with_index (vm, from[3], LOAD);
115
116 bi0 = from[0];
117 bi1 = from[1];
118
119 b0 = vlib_get_buffer (vm, bi0);
120 b1 = vlib_get_buffer (vm, bi1);
121
122 if (b0->flags & VLIB_BUFFER_IS_TRACED)
123 {
124 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
125 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_TX];
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200126 t0->flags = b0->flags;
Dave Barach178cf492018-11-13 16:34:13 -0500127 clib_memcpy_fast (t0->data, vlib_buffer_get_current (b0),
128 sizeof (t0->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700129 }
130 if (b1->flags & VLIB_BUFFER_IS_TRACED)
131 {
132 t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
133 t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_TX];
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200134 t1->flags = b1->flags;
Dave Barach178cf492018-11-13 16:34:13 -0500135 clib_memcpy_fast (t1->data, vlib_buffer_get_current (b1),
136 sizeof (t1->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700137 }
138 from += 2;
139 n_left -= 2;
140 }
141
142 while (n_left >= 1)
143 {
144 u32 bi0;
Dave Barachba868bb2016-08-08 09:51:21 -0400145 vlib_buffer_t *b0;
146 interface_output_trace_t *t0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700147
148 bi0 = from[0];
149
150 b0 = vlib_get_buffer (vm, bi0);
151
152 if (b0->flags & VLIB_BUFFER_IS_TRACED)
153 {
154 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
155 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_TX];
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200156 t0->flags = b0->flags;
Dave Barach178cf492018-11-13 16:34:13 -0500157 clib_memcpy_fast (t0->data, vlib_buffer_get_current (b0),
158 sizeof (t0->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700159 }
160 from += 1;
161 n_left -= 1;
162 }
163}
164
Damjan Marion4d2726e2021-03-23 21:05:18 +0100165static_always_inline void
166vnet_interface_output_handle_offload (vlib_main_t *vm, vlib_buffer_t *b)
Dave Barach2c0a4f42017-06-29 09:30:15 -0400167{
Aloys Augustinac6c5282021-04-15 18:12:51 +0200168 vnet_calc_checksums_inline (vm, b, b->flags & VNET_BUFFER_F_IS_IP4,
169 b->flags & VNET_BUFFER_F_IS_IP6);
Damjan Marion4d2726e2021-03-23 21:05:18 +0100170}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700171
Damjan Marion4d2726e2021-03-23 21:05:18 +0100172static_always_inline uword
173vnet_interface_output_node_inline (vlib_main_t *vm, u32 sw_if_index,
174 vlib_combined_counter_main_t *ccm,
175 vlib_buffer_t **b, u32 config_index, u8 arc,
Damjan Mariond4008cf2021-03-24 11:45:51 +0100176 u32 n_left, int do_tx_offloads,
177 int arc_or_subif)
Damjan Marion4d2726e2021-03-23 21:05:18 +0100178{
179 u32 n_bytes = 0;
180 u32 n_bytes0, n_bytes1, n_bytes2, n_bytes3;
181 u32 ti = vm->thread_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700182
Damjan Marion4d2726e2021-03-23 21:05:18 +0100183 while (n_left >= 8)
Damjan Marion152e21d2016-11-29 14:55:43 +0100184 {
Damjan Marion4d2726e2021-03-23 21:05:18 +0100185 u32 or_flags;
Damjan Marion152e21d2016-11-29 14:55:43 +0100186
Damjan Marion4d2726e2021-03-23 21:05:18 +0100187 /* Prefetch next iteration. */
188 vlib_prefetch_buffer_header (b[4], LOAD);
189 vlib_prefetch_buffer_header (b[5], LOAD);
190 vlib_prefetch_buffer_header (b[6], LOAD);
191 vlib_prefetch_buffer_header (b[7], LOAD);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700192
Damjan Mariond4008cf2021-03-24 11:45:51 +0100193 if (do_tx_offloads)
194 or_flags = b[0]->flags | b[1]->flags | b[2]->flags | b[3]->flags;
Damjan Marion4d2726e2021-03-23 21:05:18 +0100195
196 /* Be grumpy about zero length buffers for benefit of
197 driver tx function. */
198 ASSERT (b[0]->current_length > 0);
199 ASSERT (b[1]->current_length > 0);
200 ASSERT (b[2]->current_length > 0);
201 ASSERT (b[3]->current_length > 0);
202
203 n_bytes += n_bytes0 = vlib_buffer_length_in_chain (vm, b[0]);
204 n_bytes += n_bytes1 = vlib_buffer_length_in_chain (vm, b[1]);
205 n_bytes += n_bytes2 = vlib_buffer_length_in_chain (vm, b[2]);
206 n_bytes += n_bytes3 = vlib_buffer_length_in_chain (vm, b[3]);
207
Damjan Mariond4008cf2021-03-24 11:45:51 +0100208 if (arc_or_subif)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700209 {
Damjan Mariond4008cf2021-03-24 11:45:51 +0100210 u32 tx_swif0, tx_swif1, tx_swif2, tx_swif3;
211 tx_swif0 = vnet_buffer (b[0])->sw_if_index[VLIB_TX];
212 tx_swif1 = vnet_buffer (b[1])->sw_if_index[VLIB_TX];
213 tx_swif2 = vnet_buffer (b[2])->sw_if_index[VLIB_TX];
214 tx_swif3 = vnet_buffer (b[3])->sw_if_index[VLIB_TX];
215
216 /* update vlan subif tx counts, if required */
217 if (PREDICT_FALSE (tx_swif0 != sw_if_index))
218 vlib_increment_combined_counter (ccm, ti, tx_swif0, 1, n_bytes0);
219
220 if (PREDICT_FALSE (tx_swif1 != sw_if_index))
221 vlib_increment_combined_counter (ccm, ti, tx_swif1, 1, n_bytes1);
222
223 if (PREDICT_FALSE (tx_swif2 != sw_if_index))
224 vlib_increment_combined_counter (ccm, ti, tx_swif2, 1, n_bytes2);
225
226 if (PREDICT_FALSE (tx_swif3 != sw_if_index))
227 vlib_increment_combined_counter (ccm, ti, tx_swif3, 1, n_bytes3);
228
229 if (PREDICT_FALSE (config_index != ~0))
230 {
231 vnet_buffer (b[0])->feature_arc_index = arc;
232 b[0]->current_config_index = config_index;
233 vnet_buffer (b[1])->feature_arc_index = arc;
234 b[1]->current_config_index = config_index;
235 vnet_buffer (b[2])->feature_arc_index = arc;
236 b[2]->current_config_index = config_index;
237 vnet_buffer (b[3])->feature_arc_index = arc;
238 b[3]->current_config_index = config_index;
239 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700240 }
241
Damjan Marion4d2726e2021-03-23 21:05:18 +0100242 if (do_tx_offloads && (or_flags & VNET_BUFFER_F_OFFLOAD))
Ed Warnickecb9cada2015-12-08 15:45:58 -0700243 {
Damjan Marion4d2726e2021-03-23 21:05:18 +0100244 vnet_interface_output_handle_offload (vm, b[0]);
245 vnet_interface_output_handle_offload (vm, b[1]);
246 vnet_interface_output_handle_offload (vm, b[2]);
247 vnet_interface_output_handle_offload (vm, b[3]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700248 }
249
Damjan Marion4d2726e2021-03-23 21:05:18 +0100250 n_left -= 4;
251 b += 4;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700252 }
253
Damjan Marion4d2726e2021-03-23 21:05:18 +0100254 while (n_left)
255 {
Damjan Marion4d2726e2021-03-23 21:05:18 +0100256 /* Be grumpy about zero length buffers for benefit of
257 driver tx function. */
258 ASSERT (b[0]->current_length > 0);
259
260 n_bytes += n_bytes0 = vlib_buffer_length_in_chain (vm, b[0]);
Damjan Marion4d2726e2021-03-23 21:05:18 +0100261
Damjan Mariond4008cf2021-03-24 11:45:51 +0100262 if (arc_or_subif)
Damjan Marion4d2726e2021-03-23 21:05:18 +0100263 {
Damjan Mariond4008cf2021-03-24 11:45:51 +0100264 u32 tx_swif0 = vnet_buffer (b[0])->sw_if_index[VLIB_TX];
Damjan Marion4d2726e2021-03-23 21:05:18 +0100265
Damjan Mariond4008cf2021-03-24 11:45:51 +0100266 if (PREDICT_FALSE (config_index != ~0))
267 {
268 vnet_buffer (b[0])->feature_arc_index = arc;
269 b[0]->current_config_index = config_index;
270 }
271
272 if (PREDICT_FALSE (tx_swif0 != sw_if_index))
273 vlib_increment_combined_counter (ccm, ti, tx_swif0, 1, n_bytes0);
274 }
Damjan Marion4d2726e2021-03-23 21:05:18 +0100275
276 if (do_tx_offloads)
277 vnet_interface_output_handle_offload (vm, b[0]);
278
279 n_left -= 1;
280 b += 1;
281 }
282
283 return n_bytes;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700284}
285
Dave Barach5ecd5a52019-02-25 15:27:28 -0500286static_always_inline void vnet_interface_pcap_tx_trace
287 (vlib_main_t * vm, vlib_node_runtime_t * node, vlib_frame_t * frame,
288 int sw_if_index_from_buffer)
289{
Damjan Marion8fb5add2021-03-04 18:41:59 +0100290 vnet_main_t *vnm = vnet_get_main ();
Dave Barach5ecd5a52019-02-25 15:27:28 -0500291 u32 n_left_from, *from;
292 u32 sw_if_index;
Damjan Marion8fb5add2021-03-04 18:41:59 +0100293 vnet_pcap_t *pp = &vnm->pcap;
Dave Barach5ecd5a52019-02-25 15:27:28 -0500294
Dave Barach33909772019-09-23 10:27:27 -0400295 if (PREDICT_TRUE (pp->pcap_tx_enable == 0))
Dave Barach5ecd5a52019-02-25 15:27:28 -0500296 return;
297
298 if (sw_if_index_from_buffer == 0)
299 {
300 vnet_interface_output_runtime_t *rt = (void *) node->runtime_data;
301 sw_if_index = rt->sw_if_index;
302 }
303 else
304 sw_if_index = ~0;
305
306 n_left_from = frame->n_vectors;
307 from = vlib_frame_vector_args (frame);
308
309 while (n_left_from > 0)
310 {
311 u32 bi0 = from[0];
312 vlib_buffer_t *b0 = vlib_get_buffer (vm, bi0);
Dave Barach9137e542019-09-13 17:47:50 -0400313 from++;
314 n_left_from--;
315
Dave Barach5ecd5a52019-02-25 15:27:28 -0500316 if (sw_if_index_from_buffer)
317 sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_TX];
318
Benoît Ganne30a81952021-02-26 13:47:41 +0100319 if (vnet_is_packet_pcaped (pp, b0, sw_if_index))
320 pcap_add_buffer (&pp->pcap_main, vm, bi0, pp->max_bytes_per_pkt);
Dave Barach5ecd5a52019-02-25 15:27:28 -0500321 }
322}
323
Damjan Marionf91098e2021-03-09 16:28:15 +0100324VLIB_NODE_FN (vnet_interface_output_node)
325(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
Dave Barach2c0a4f42017-06-29 09:30:15 -0400326{
327 vnet_main_t *vnm = vnet_get_main ();
Damjan Marion4d2726e2021-03-23 21:05:18 +0100328 vnet_interface_main_t *im = &vnm->interface_main;
329 vlib_combined_counter_main_t *ccm;
Dave Barach2c0a4f42017-06-29 09:30:15 -0400330 vnet_hw_interface_t *hi;
Damjan Marion3853b262021-03-23 18:47:34 +0100331 vnet_sw_interface_t *si;
Dave Barach2c0a4f42017-06-29 09:30:15 -0400332 vnet_interface_output_runtime_t *rt = (void *) node->runtime_data;
Damjan Marion3853b262021-03-23 18:47:34 +0100333 vlib_buffer_t *bufs[VLIB_FRAME_SIZE];
Damjan Marion4d2726e2021-03-23 21:05:18 +0100334 u32 n_bytes, n_buffers = frame->n_vectors;
335 u32 config_index = ~0;
336 u32 sw_if_index = rt->sw_if_index;
337 u32 next_index = VNET_INTERFACE_OUTPUT_NEXT_TX;
338 u32 ti = vm->thread_index;
339 u8 arc = im->output_feature_arc_index;
Damjan Mariond4008cf2021-03-24 11:45:51 +0100340 int arc_or_subif = 0;
341 int do_tx_offloads = 0;
Damjan Marion3853b262021-03-23 18:47:34 +0100342 u32 *from;
343
344 if (node->flags & VLIB_NODE_FLAG_TRACE)
345 vnet_interface_output_trace (vm, node, frame, n_buffers);
346
347 from = vlib_frame_vector_args (frame);
348
349 if (rt->is_deleted)
350 return vlib_error_drop_buffers (
351 vm, node, from,
352 /* buffer stride */ 1, n_buffers, VNET_INTERFACE_OUTPUT_NEXT_DROP,
353 node->node_index, VNET_INTERFACE_OUTPUT_ERROR_INTERFACE_DELETED);
Dave Barach2c0a4f42017-06-29 09:30:15 -0400354
Dave Barach5ecd5a52019-02-25 15:27:28 -0500355 vnet_interface_pcap_tx_trace (vm, node, frame,
356 0 /* sw_if_index_from_buffer */ );
357
Damjan Marion3853b262021-03-23 18:47:34 +0100358 vlib_get_buffers (vm, from, bufs, n_buffers);
359
Damjan Marion4d2726e2021-03-23 21:05:18 +0100360 si = vnet_get_sw_interface (vnm, sw_if_index);
361 hi = vnet_get_sup_hw_interface (vnm, sw_if_index);
Damjan Marion3853b262021-03-23 18:47:34 +0100362
363 if (!(si->flags & VNET_SW_INTERFACE_FLAG_ADMIN_UP) ||
364 !(hi->flags & VNET_HW_INTERFACE_FLAG_LINK_UP))
365 {
366 vlib_simple_counter_main_t *cm;
367
368 cm = vec_elt_at_index (vnm->interface_main.sw_if_counters,
369 VNET_INTERFACE_COUNTER_TX_ERROR);
Damjan Marion4d2726e2021-03-23 21:05:18 +0100370 vlib_increment_simple_counter (cm, ti, sw_if_index, n_buffers);
Damjan Marion3853b262021-03-23 18:47:34 +0100371
372 return vlib_error_drop_buffers (
373 vm, node, from,
374 /* buffer stride */ 1, n_buffers, VNET_INTERFACE_OUTPUT_NEXT_DROP,
375 node->node_index, VNET_INTERFACE_OUTPUT_ERROR_INTERFACE_DOWN);
376 }
377
Damjan Marion4d2726e2021-03-23 21:05:18 +0100378 /* interface-output feature arc handling */
379 if (PREDICT_FALSE (vnet_have_features (arc, sw_if_index)))
380 {
381 vnet_feature_config_main_t *fcm;
382 fcm = vnet_feature_get_config_main (arc);
383 config_index = vnet_get_feature_config_index (arc, sw_if_index);
384 vnet_get_config_data (&fcm->config_main, &config_index, &next_index, 0);
Damjan Mariond4008cf2021-03-24 11:45:51 +0100385 arc_or_subif = 1;
Damjan Marion4d2726e2021-03-23 21:05:18 +0100386 }
Damjan Mariond4008cf2021-03-24 11:45:51 +0100387 else if (hash_elts (hi->sub_interface_sw_if_index_by_id))
388 arc_or_subif = 1;
Damjan Marion4d2726e2021-03-23 21:05:18 +0100389
390 ccm = im->combined_sw_if_counters + VNET_INTERFACE_COUNTER_TX;
391
Damjan Mariond4008cf2021-03-24 11:45:51 +0100392 if ((hi->caps & VNET_HW_INTERFACE_CAP_SUPPORTS_TX_CKSUM) == 0)
393 do_tx_offloads = 1;
394
395 if (do_tx_offloads == 0 && arc_or_subif == 0)
396 n_bytes = vnet_interface_output_node_inline (
397 vm, sw_if_index, ccm, bufs, config_index, arc, n_buffers, 0, 0);
Damjan Marion4f81d442021-03-30 14:34:44 +0200398 else if (do_tx_offloads == 0 && arc_or_subif == 1)
Damjan Mariond4008cf2021-03-24 11:45:51 +0100399 n_bytes = vnet_interface_output_node_inline (
400 vm, sw_if_index, ccm, bufs, config_index, arc, n_buffers, 0, 1);
Damjan Marion4f81d442021-03-30 14:34:44 +0200401 else if (do_tx_offloads == 1 && arc_or_subif == 0)
Damjan Mariond4008cf2021-03-24 11:45:51 +0100402 n_bytes = vnet_interface_output_node_inline (
403 vm, sw_if_index, ccm, bufs, config_index, arc, n_buffers, 1, 0);
Dave Barach2c0a4f42017-06-29 09:30:15 -0400404 else
Damjan Mariond4008cf2021-03-24 11:45:51 +0100405 n_bytes = vnet_interface_output_node_inline (
406 vm, sw_if_index, ccm, bufs, config_index, arc, n_buffers, 1, 1);
Damjan Marion4d2726e2021-03-23 21:05:18 +0100407
408 vlib_buffer_enqueue_to_single_next (vm, node, vlib_frame_vector_args (frame),
409 next_index, frame->n_vectors);
410
411 /* Update main interface stats. */
412 vlib_increment_combined_counter (ccm, ti, sw_if_index, n_buffers, n_bytes);
413 return n_buffers;
Dave Barach2c0a4f42017-06-29 09:30:15 -0400414}
Benoît Ganne7d2094c2020-11-09 15:23:52 +0100415
Damjan Marionf91098e2021-03-09 16:28:15 +0100416VLIB_REGISTER_NODE (vnet_interface_output_node) = {
417 .name = "interface-output-template",
418 .vector_size = sizeof (u32),
419};
Dave Barach2c0a4f42017-06-29 09:30:15 -0400420
Ed Warnickecb9cada2015-12-08 15:45:58 -0700421/* Use buffer's sw_if_index[VNET_TX] to choose output interface. */
Filip Tehlar62668772019-03-04 03:33:32 -0800422VLIB_NODE_FN (vnet_per_buffer_interface_output_node) (vlib_main_t * vm,
423 vlib_node_runtime_t *
424 node,
425 vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700426{
Dave Barachba868bb2016-08-08 09:51:21 -0400427 vnet_main_t *vnm = vnet_get_main ();
428 u32 n_left_to_next, *from, *to_next;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700429 u32 n_left_from, next_index;
430
Dave Barach5ecd5a52019-02-25 15:27:28 -0500431 vnet_interface_pcap_tx_trace (vm, node, frame,
432 1 /* sw_if_index_from_buffer */ );
433
Ed Warnickecb9cada2015-12-08 15:45:58 -0700434 n_left_from = frame->n_vectors;
435
Damjan Mariona3d59862018-11-10 10:23:00 +0100436 from = vlib_frame_vector_args (frame);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700437 next_index = node->cached_next_index;
438
439 while (n_left_from > 0)
440 {
441 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
442
443 while (n_left_from >= 4 && n_left_to_next >= 2)
444 {
445 u32 bi0, bi1, next0, next1;
Dave Barachba868bb2016-08-08 09:51:21 -0400446 vlib_buffer_t *b0, *b1;
447 vnet_hw_interface_t *hi0, *hi1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700448
449 /* Prefetch next iteration. */
450 vlib_prefetch_buffer_with_index (vm, from[2], LOAD);
451 vlib_prefetch_buffer_with_index (vm, from[3], LOAD);
452
453 bi0 = from[0];
454 bi1 = from[1];
455 to_next[0] = bi0;
456 to_next[1] = bi1;
457 from += 2;
458 to_next += 2;
459 n_left_to_next -= 2;
460 n_left_from -= 2;
461
462 b0 = vlib_get_buffer (vm, bi0);
463 b1 = vlib_get_buffer (vm, bi1);
464
Dave Barachba868bb2016-08-08 09:51:21 -0400465 hi0 =
466 vnet_get_sup_hw_interface (vnm,
467 vnet_buffer (b0)->sw_if_index
468 [VLIB_TX]);
469 hi1 =
470 vnet_get_sup_hw_interface (vnm,
471 vnet_buffer (b1)->sw_if_index
472 [VLIB_TX]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700473
John Loe5453d02018-01-23 19:21:34 -0500474 next0 = hi0->output_node_next_index;
475 next1 = hi1->output_node_next_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700476
Dave Barachba868bb2016-08-08 09:51:21 -0400477 vlib_validate_buffer_enqueue_x2 (vm, node, next_index, to_next,
478 n_left_to_next, bi0, bi1, next0,
479 next1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700480 }
481
482 while (n_left_from > 0 && n_left_to_next > 0)
483 {
484 u32 bi0, next0;
Dave Barachba868bb2016-08-08 09:51:21 -0400485 vlib_buffer_t *b0;
486 vnet_hw_interface_t *hi0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700487
488 bi0 = from[0];
489 to_next[0] = bi0;
490 from += 1;
491 to_next += 1;
492 n_left_to_next -= 1;
493 n_left_from -= 1;
494
495 b0 = vlib_get_buffer (vm, bi0);
496
Dave Barachba868bb2016-08-08 09:51:21 -0400497 hi0 =
498 vnet_get_sup_hw_interface (vnm,
499 vnet_buffer (b0)->sw_if_index
500 [VLIB_TX]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700501
John Loe5453d02018-01-23 19:21:34 -0500502 next0 = hi0->output_node_next_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700503
Dave Barachba868bb2016-08-08 09:51:21 -0400504 vlib_validate_buffer_enqueue_x1 (vm, node, next_index, to_next,
505 n_left_to_next, bi0, next0);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700506 }
507
508 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
509 }
510
511 return frame->n_vectors;
512}
513
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000514typedef struct vnet_error_trace_t_
Ed Warnickecb9cada2015-12-08 15:45:58 -0700515{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000516 u32 sw_if_index;
Dave Barach4330c462020-06-10 17:07:32 -0400517 i8 details_valid;
518 u8 is_ip6;
519 u8 pad[2];
520 u16 mactype;
521 ip46_address_t src, dst;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000522} vnet_error_trace_t;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700523
Dave Barachba868bb2016-08-08 09:51:21 -0400524static u8 *
525format_vnet_error_trace (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700526{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000527 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*va, vlib_main_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700528 CLIB_UNUSED (vlib_node_t * node) = va_arg (*va, vlib_node_t *);
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000529 vnet_error_trace_t *t = va_arg (*va, vnet_error_trace_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700530
Dave Barach4330c462020-06-10 17:07:32 -0400531 /* Normal, non-catchup trace */
532 if (t->details_valid == 0)
533 {
534 s = format (s, "rx:%U", format_vnet_sw_if_index_name,
535 vnet_get_main (), t->sw_if_index);
536 }
537 else if (t->details_valid == 1)
538 {
539 /* The trace capture code didn't understant the mactype */
540 s = format (s, "mactype 0x%4x (not decoded)", t->mactype);
541 }
542 else if (t->details_valid == 2)
543 {
544 /* Dump the src/dst addresses */
545 if (t->is_ip6 == 0)
546 s = format (s, "IP4: %U -> %U",
547 format_ip4_address, &t->src.ip4,
548 format_ip4_address, &t->dst.ip4);
549 else
550 s = format (s, "IP6: %U -> %U",
551 format_ip6_address, &t->src.ip6,
552 format_ip6_address, &t->dst.ip6);
553 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700554 return s;
555}
556
557static void
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000558interface_trace_buffers (vlib_main_t * vm,
559 vlib_node_runtime_t * node, vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700560{
Dave Barachba868bb2016-08-08 09:51:21 -0400561 u32 n_left, *buffers;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700562
563 buffers = vlib_frame_vector_args (frame);
564 n_left = frame->n_vectors;
Dave Barachba868bb2016-08-08 09:51:21 -0400565
Ed Warnickecb9cada2015-12-08 15:45:58 -0700566 while (n_left >= 4)
567 {
568 u32 bi0, bi1;
Dave Barachba868bb2016-08-08 09:51:21 -0400569 vlib_buffer_t *b0, *b1;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000570 vnet_error_trace_t *t0, *t1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700571
572 /* Prefetch next iteration. */
573 vlib_prefetch_buffer_with_index (vm, buffers[2], LOAD);
574 vlib_prefetch_buffer_with_index (vm, buffers[3], LOAD);
575
576 bi0 = buffers[0];
577 bi1 = buffers[1];
578
579 b0 = vlib_get_buffer (vm, bi0);
580 b1 = vlib_get_buffer (vm, bi1);
581
582 if (b0->flags & VLIB_BUFFER_IS_TRACED)
583 {
Dave Barach4330c462020-06-10 17:07:32 -0400584 t0 = vlib_add_trace (vm, node, b0,
585 STRUCT_OFFSET_OF (vnet_error_trace_t, pad));
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000586 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
Dave Barach4330c462020-06-10 17:07:32 -0400587 t0->details_valid = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700588 }
589 if (b1->flags & VLIB_BUFFER_IS_TRACED)
590 {
Dave Barach4330c462020-06-10 17:07:32 -0400591 t1 = vlib_add_trace (vm, node, b1,
592 STRUCT_OFFSET_OF (vnet_error_trace_t, pad));
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000593 t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_RX];
Dave Barach4330c462020-06-10 17:07:32 -0400594 t1->details_valid = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700595 }
596 buffers += 2;
597 n_left -= 2;
598 }
599
600 while (n_left >= 1)
601 {
602 u32 bi0;
Dave Barachba868bb2016-08-08 09:51:21 -0400603 vlib_buffer_t *b0;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000604 vnet_error_trace_t *t0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700605
606 bi0 = buffers[0];
607
608 b0 = vlib_get_buffer (vm, bi0);
609
610 if (b0->flags & VLIB_BUFFER_IS_TRACED)
611 {
Dave Barach4330c462020-06-10 17:07:32 -0400612 t0 = vlib_add_trace (vm, node, b0,
613 STRUCT_OFFSET_OF (vnet_error_trace_t, pad));
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000614 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
Dave Barach4330c462020-06-10 17:07:32 -0400615 t0->details_valid = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700616 }
617 buffers += 1;
618 n_left -= 1;
619 }
620}
621
Dave Barachba868bb2016-08-08 09:51:21 -0400622typedef enum
623{
Ed Warnickecb9cada2015-12-08 15:45:58 -0700624 VNET_ERROR_DISPOSITION_DROP,
625 VNET_ERROR_DISPOSITION_PUNT,
626 VNET_ERROR_N_DISPOSITION,
627} vnet_error_disposition_t;
628
Dave Barach4330c462020-06-10 17:07:32 -0400629static void
630drop_catchup_trace (vlib_main_t * vm,
631 vlib_node_runtime_t * node, vlib_buffer_t * b)
632{
633 /* Can we safely rewind the buffer? If not, fagedaboudit */
634 if (b->flags & VNET_BUFFER_F_L2_HDR_OFFSET_VALID)
635 {
636 vnet_error_trace_t *t;
637 ip4_header_t *ip4;
638 ip6_header_t *ip6;
639 ethernet_header_t *eh;
640 i16 delta;
641
642 t = vlib_add_trace (vm, node, b, sizeof (*t));
643 delta = vnet_buffer (b)->l2_hdr_offset - b->current_data;
644 vlib_buffer_advance (b, delta);
645
646 eh = vlib_buffer_get_current (b);
647 /* Save mactype */
648 t->mactype = clib_net_to_host_u16 (eh->type);
649 t->details_valid = 1;
650 switch (t->mactype)
651 {
652 case ETHERNET_TYPE_IP4:
653 ip4 = (void *) (eh + 1);
654 t->details_valid = 2;
655 t->is_ip6 = 0;
656 t->src.ip4.as_u32 = ip4->src_address.as_u32;
657 t->dst.ip4.as_u32 = ip4->dst_address.as_u32;
658 break;
659
660 case ETHERNET_TYPE_IP6:
661 ip6 = (void *) (eh + 1);
662 t->details_valid = 2;
663 t->is_ip6 = 1;
664 clib_memcpy_fast (t->src.as_u8, ip6->src_address.as_u8,
665 sizeof (ip6_address_t));
666 clib_memcpy_fast (t->dst.as_u8, ip6->dst_address.as_u8,
667 sizeof (ip6_address_t));
668 break;
669
670 default:
671 /* Dunno, do nothing, leave details_valid alone */
672 break;
673 }
674 /* Restore current data (probably unnecessary) */
675 vlib_buffer_advance (b, -delta);
676 }
677}
678
Ed Warnickecb9cada2015-12-08 15:45:58 -0700679static_always_inline uword
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000680interface_drop_punt (vlib_main_t * vm,
681 vlib_node_runtime_t * node,
682 vlib_frame_t * frame,
683 vnet_error_disposition_t disposition)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700684{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000685 u32 *from, n_left, thread_index, *sw_if_index;
686 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b;
687 u32 sw_if_indices[VLIB_FRAME_SIZE];
Dave Barachba868bb2016-08-08 09:51:21 -0400688 vlib_simple_counter_main_t *cm;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000689 u16 nexts[VLIB_FRAME_SIZE];
Dave Barach4330c462020-06-10 17:07:32 -0400690 u32 n_trace;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000691 vnet_main_t *vnm;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700692
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000693 vnm = vnet_get_main ();
694 thread_index = vm->thread_index;
695 from = vlib_frame_vector_args (frame);
696 n_left = frame->n_vectors;
697 b = bufs;
698 sw_if_index = sw_if_indices;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700699
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000700 vlib_get_buffers (vm, from, bufs, n_left);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700701
Dave Barach4330c462020-06-10 17:07:32 -0400702 /* "trace add error-drop NNN?" */
703 if (PREDICT_FALSE ((n_trace = vlib_get_trace_count (vm, node))))
704 {
705 /* If pkts aren't otherwise traced... */
706 if ((node->flags & VLIB_NODE_FLAG_TRACE) == 0)
707 {
708 /* Trace them from here */
709 node->flags |= VLIB_NODE_FLAG_TRACE;
710 while (n_trace && n_left)
711 {
Benoît Ganne9a3973e2020-10-02 19:36:57 +0200712 if (PREDICT_TRUE
713 (vlib_trace_buffer (vm, node, 0 /* next_index */ , b[0],
714 0 /* follow chain */ )))
715 {
716 /*
717 * Here we have a wireshark dissector problem.
718 * Packets may be well-formed, or not. We
719 * must not blow chunks in any case.
720 *
721 * Try to produce trace records which will help
722 * folks understand what's going on.
723 */
724 drop_catchup_trace (vm, node, b[0]);
725 n_trace--;
726 }
Dave Barach4330c462020-06-10 17:07:32 -0400727 n_left--;
728 b++;
729 }
730 }
731
732 vlib_set_trace_count (vm, node, n_trace);
733 b = bufs;
734 n_left = frame->n_vectors;
735 }
736
Ed Warnickecb9cada2015-12-08 15:45:58 -0700737 if (node->flags & VLIB_NODE_FLAG_TRACE)
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000738 interface_trace_buffers (vm, node, frame);
Dave Barachba868bb2016-08-08 09:51:21 -0400739
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000740 /* All going to drop regardless, this is just a counting exercise */
741 clib_memset (nexts, 0, sizeof (nexts));
742
Ed Warnickecb9cada2015-12-08 15:45:58 -0700743 cm = vec_elt_at_index (vnm->interface_main.sw_if_counters,
744 (disposition == VNET_ERROR_DISPOSITION_PUNT
745 ? VNET_INTERFACE_COUNTER_PUNT
746 : VNET_INTERFACE_COUNTER_DROP));
747
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000748 /* collect the array of interfaces first ... */
749 while (n_left >= 4)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700750 {
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000751 if (n_left >= 12)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700752 {
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000753 /* Prefetch 8 ahead - there's not much going on in each iteration */
754 vlib_prefetch_buffer_header (b[4], LOAD);
755 vlib_prefetch_buffer_header (b[5], LOAD);
756 vlib_prefetch_buffer_header (b[6], LOAD);
757 vlib_prefetch_buffer_header (b[7], LOAD);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700758 }
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000759 sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
760 sw_if_index[1] = vnet_buffer (b[1])->sw_if_index[VLIB_RX];
761 sw_if_index[2] = vnet_buffer (b[2])->sw_if_index[VLIB_RX];
762 sw_if_index[3] = vnet_buffer (b[3])->sw_if_index[VLIB_RX];
763
764 sw_if_index += 4;
765 n_left -= 4;
766 b += 4;
767 }
768 while (n_left)
769 {
770 sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
771
772 sw_if_index += 1;
773 n_left -= 1;
774 b += 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700775 }
776
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000777 /* ... then count against them in blocks */
778 n_left = frame->n_vectors;
779
780 while (n_left)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700781 {
Dave Barachba868bb2016-08-08 09:51:21 -0400782 vnet_sw_interface_t *sw_if0;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000783 u16 off, count;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700784
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000785 off = frame->n_vectors - n_left;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700786
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000787 sw_if_index = sw_if_indices + off;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700788
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000789 count = clib_count_equal_u32 (sw_if_index, n_left);
790 n_left -= count;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700791
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000792 vlib_increment_simple_counter (cm, thread_index, sw_if_index[0], count);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700793
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000794 /* Increment super-interface drop/punt counters for
795 sub-interfaces. */
796 sw_if0 = vnet_get_sw_interface (vnm, sw_if_index[0]);
797 if (sw_if0->sup_sw_if_index != sw_if_index[0])
798 vlib_increment_simple_counter
799 (cm, thread_index, sw_if0->sup_sw_if_index, count);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700800 }
801
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000802 vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700803
804 return frame->n_vectors;
805}
806
Dave Barachba868bb2016-08-08 09:51:21 -0400807static inline void
808pcap_drop_trace (vlib_main_t * vm,
Dave Barach33909772019-09-23 10:27:27 -0400809 vnet_interface_main_t * im,
810 vnet_pcap_t * pp, vlib_frame_t * f)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700811{
Dave Barachba868bb2016-08-08 09:51:21 -0400812 u32 *from;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700813 u32 n_left = f->n_vectors;
Dave Barachba868bb2016-08-08 09:51:21 -0400814 vlib_buffer_t *b0, *p1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700815 u32 bi0;
816 i16 save_current_data;
817 u16 save_current_length;
Dave Barach9382ad92019-09-23 16:03:49 -0400818 vlib_error_main_t *em = &vm->error_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700819
820 from = vlib_frame_vector_args (f);
821
822 while (n_left > 0)
823 {
824 if (PREDICT_TRUE (n_left > 1))
Dave Barachba868bb2016-08-08 09:51:21 -0400825 {
826 p1 = vlib_get_buffer (vm, from[1]);
827 vlib_prefetch_buffer_header (p1, LOAD);
828 }
829
Ed Warnickecb9cada2015-12-08 15:45:58 -0700830 bi0 = from[0];
831 b0 = vlib_get_buffer (vm, bi0);
832 from++;
833 n_left--;
Dave Barachba868bb2016-08-08 09:51:21 -0400834
Ed Warnickecb9cada2015-12-08 15:45:58 -0700835 /* See if we're pointedly ignoring this specific error */
Dave Barachba868bb2016-08-08 09:51:21 -0400836 if (im->pcap_drop_filter_hash
837 && hash_get (im->pcap_drop_filter_hash, b0->error))
838 continue;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700839
Benoît Ganne30a81952021-02-26 13:47:41 +0100840 if (!vnet_is_packet_pcaped (pp, b0, ~0))
841 continue; /* not matching, skip */
Dave Barachf5667c32019-09-25 11:27:46 -0400842
Ed Warnickecb9cada2015-12-08 15:45:58 -0700843 /* Trace all drops, or drops received on a specific interface */
Benoît Ganne30a81952021-02-26 13:47:41 +0100844 save_current_data = b0->current_data;
845 save_current_length = b0->current_length;
846
847 /*
848 * Typically, we'll need to rewind the buffer
849 * if l2_hdr_offset is valid, make sure to rewind to the start of
850 * the L2 header. This may not be the buffer start in case we pop-ed
851 * vlan tags.
852 * Otherwise, rewind to buffer start and hope for the best.
853 */
854 if (b0->flags & VNET_BUFFER_F_L2_HDR_OFFSET_VALID)
Dave Barachba868bb2016-08-08 09:51:21 -0400855 {
Benoît Ganne30a81952021-02-26 13:47:41 +0100856 if (b0->current_data > vnet_buffer (b0)->l2_hdr_offset)
857 vlib_buffer_advance (b0, vnet_buffer (b0)->l2_hdr_offset -
858 b0->current_data);
Dave Barachba868bb2016-08-08 09:51:21 -0400859 }
Benoît Ganne30a81952021-02-26 13:47:41 +0100860 else if (b0->current_data > 0)
861 {
862 vlib_buffer_advance (b0, (word) -b0->current_data);
863 }
864
865 {
866 vlib_buffer_t *last = b0;
867 u32 error_node_index;
868 int drop_string_len;
869 vlib_node_t *n;
870 /* Length of the error string */
871 int error_string_len =
872 clib_strnlen (em->counters_heap[b0->error].name, 128);
873
874 /* Dig up the drop node */
875 error_node_index = vm->node_main.node_by_error[b0->error];
876 n = vlib_get_node (vm, error_node_index);
877
878 /* Length of full drop string, w/ "nodename: " prepended */
879 drop_string_len = error_string_len + vec_len (n->name) + 2;
880
881 /* Find the last buffer in the chain */
882 while (last->flags & VLIB_BUFFER_NEXT_PRESENT)
883 last = vlib_get_buffer (vm, last->next_buffer);
884
885 /*
886 * Append <nodename>: <error-string> to the capture,
887 * only if we can do that without allocating a new buffer.
888 */
889 if (PREDICT_TRUE ((last->current_data + last->current_length) <
890 (VLIB_BUFFER_DEFAULT_DATA_SIZE - drop_string_len)))
891 {
892 clib_memcpy_fast (last->data + last->current_data +
893 last->current_length,
894 n->name, vec_len (n->name));
895 clib_memcpy_fast (last->data + last->current_data +
896 last->current_length + vec_len (n->name),
897 ": ", 2);
898 clib_memcpy_fast (last->data + last->current_data +
899 last->current_length + vec_len (n->name) + 2,
900 em->counters_heap[b0->error].name,
901 error_string_len);
902 last->current_length += drop_string_len;
903 b0->flags &= ~(VLIB_BUFFER_TOTAL_LENGTH_VALID);
904 pcap_add_buffer (&pp->pcap_main, vm, bi0, pp->max_bytes_per_pkt);
905 last->current_length -= drop_string_len;
906 b0->current_data = save_current_data;
907 b0->current_length = save_current_length;
908 continue;
909 }
910 }
911
912 /*
913 * Didn't have space in the last buffer, here's the dropped
914 * packet as-is
915 */
916 pcap_add_buffer (&pp->pcap_main, vm, bi0, pp->max_bytes_per_pkt);
917
918 b0->current_data = save_current_data;
919 b0->current_length = save_current_length;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700920 }
921}
922
Filip Tehlar62668772019-03-04 03:33:32 -0800923#ifndef CLIB_MARCH_VARIANT
Dave Barachba868bb2016-08-08 09:51:21 -0400924void
925vnet_pcap_drop_trace_filter_add_del (u32 error_index, int is_add)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700926{
Dave Barachba868bb2016-08-08 09:51:21 -0400927 vnet_interface_main_t *im = &vnet_get_main ()->interface_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700928
929 if (im->pcap_drop_filter_hash == 0)
Dave Barachba868bb2016-08-08 09:51:21 -0400930 im->pcap_drop_filter_hash = hash_create (0, sizeof (uword));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700931
932 if (is_add)
933 hash_set (im->pcap_drop_filter_hash, error_index, 1);
934 else
935 hash_unset (im->pcap_drop_filter_hash, error_index);
936}
Filip Tehlar62668772019-03-04 03:33:32 -0800937#endif /* CLIB_MARCH_VARIANT */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700938
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000939VLIB_NODE_FN (interface_drop) (vlib_main_t * vm,
940 vlib_node_runtime_t * node,
941 vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700942{
Damjan Marion8fb5add2021-03-04 18:41:59 +0100943 vnet_main_t *vnm = vnet_get_main ();
Dave Barachba868bb2016-08-08 09:51:21 -0400944 vnet_interface_main_t *im = &vnet_get_main ()->interface_main;
Damjan Marion8fb5add2021-03-04 18:41:59 +0100945 vnet_pcap_t *pp = &vnm->pcap;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700946
Dave Barach33909772019-09-23 10:27:27 -0400947 if (PREDICT_FALSE (pp->pcap_drop_enable))
948 pcap_drop_trace (vm, im, pp, frame);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700949
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000950 return interface_drop_punt (vm, node, frame, VNET_ERROR_DISPOSITION_DROP);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700951}
952
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000953VLIB_NODE_FN (interface_punt) (vlib_main_t * vm,
954 vlib_node_runtime_t * node,
955 vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700956{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000957 return interface_drop_punt (vm, node, frame, VNET_ERROR_DISPOSITION_PUNT);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700958}
959
Dave Barachba868bb2016-08-08 09:51:21 -0400960/* *INDENT-OFF* */
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000961VLIB_REGISTER_NODE (interface_drop) = {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700962 .name = "error-drop",
Ed Warnickecb9cada2015-12-08 15:45:58 -0700963 .vector_size = sizeof (u32),
964 .format_trace = format_vnet_error_trace,
Dave Barach4330c462020-06-10 17:07:32 -0400965 .flags = VLIB_NODE_FLAG_TRACE_SUPPORTED,
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000966 .n_next_nodes = 1,
967 .next_nodes = {
968 [0] = "drop",
969 },
Ed Warnickecb9cada2015-12-08 15:45:58 -0700970};
Dave Barachba868bb2016-08-08 09:51:21 -0400971/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700972
Dave Barachba868bb2016-08-08 09:51:21 -0400973/* *INDENT-OFF* */
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000974VLIB_REGISTER_NODE (interface_punt) = {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700975 .name = "error-punt",
976 .vector_size = sizeof (u32),
977 .format_trace = format_vnet_error_trace,
Dave Barach4330c462020-06-10 17:07:32 -0400978 .flags = VLIB_NODE_FLAG_TRACE_SUPPORTED,
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000979 .n_next_nodes = 1,
980 .next_nodes = {
981 [0] = "punt",
982 },
Ed Warnickecb9cada2015-12-08 15:45:58 -0700983};
Dave Barachba868bb2016-08-08 09:51:21 -0400984/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700985
Filip Tehlar62668772019-03-04 03:33:32 -0800986VLIB_REGISTER_NODE (vnet_per_buffer_interface_output_node) = {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700987 .name = "interface-output",
988 .vector_size = sizeof (u32),
989};
990
Damjan Marion8932e452021-04-16 11:49:26 +0200991VLIB_NODE_FN (vnet_interface_output_arc_end_node)
992(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
Damjan Marion152e21d2016-11-29 14:55:43 +0100993{
994 vnet_main_t *vnm = vnet_get_main ();
Damjan Marion8932e452021-04-16 11:49:26 +0200995 vnet_interface_main_t *im = &vnm->interface_main;
996 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
997 u16 nexts[VLIB_FRAME_SIZE], *next = nexts;
998 u32 *from, n_left;
999 u16 *lt = im->if_out_arc_end_next_index_by_sw_if_index;
Damjan Marion152e21d2016-11-29 14:55:43 +01001000
Damjan Marion8932e452021-04-16 11:49:26 +02001001 from = vlib_frame_vector_args (frame);
1002 n_left = frame->n_vectors;
1003 vlib_get_buffers (vm, from, bufs, n_left);
1004
1005 while (n_left >= 8)
Damjan Marion152e21d2016-11-29 14:55:43 +01001006 {
Damjan Marion8932e452021-04-16 11:49:26 +02001007 vlib_prefetch_buffer_header (b[4], LOAD);
1008 vlib_prefetch_buffer_header (b[5], LOAD);
1009 vlib_prefetch_buffer_header (b[6], LOAD);
1010 vlib_prefetch_buffer_header (b[7], LOAD);
1011 next[0] = vec_elt (lt, vnet_buffer (b[0])->sw_if_index[VLIB_TX]);
1012 next[1] = vec_elt (lt, vnet_buffer (b[1])->sw_if_index[VLIB_TX]);
1013 next[2] = vec_elt (lt, vnet_buffer (b[2])->sw_if_index[VLIB_TX]);
1014 next[3] = vec_elt (lt, vnet_buffer (b[3])->sw_if_index[VLIB_TX]);
Damjan Marion152e21d2016-11-29 14:55:43 +01001015
Damjan Marion8932e452021-04-16 11:49:26 +02001016 b += 4;
1017 next += 4;
1018 n_left -= 4;
Damjan Marion152e21d2016-11-29 14:55:43 +01001019 }
Damjan Marion8932e452021-04-16 11:49:26 +02001020
1021 while (n_left)
1022 {
1023 next[0] = vec_elt (lt, vnet_buffer (b[0])->sw_if_index[VLIB_TX]);
1024 b++;
1025 next++;
1026 n_left--;
1027 }
1028
1029 vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
1030 return frame->n_vectors;
Damjan Marion152e21d2016-11-29 14:55:43 +01001031}
1032
Damjan Marion8932e452021-04-16 11:49:26 +02001033VLIB_REGISTER_NODE (vnet_interface_output_arc_end_node) = {
1034 .name = "interface-output-arc-end",
Damjan Marion152e21d2016-11-29 14:55:43 +01001035 .vector_size = sizeof (u32),
1036 .n_next_nodes = 1,
1037 .next_nodes = {
1038 [0] = "error-drop",
1039 },
1040};
1041
Damjan Marion8932e452021-04-16 11:49:26 +02001042VNET_FEATURE_ARC_INIT (interface_output, static) = {
1043 .arc_name = "interface-output",
Damjan Marion152e21d2016-11-29 14:55:43 +01001044 .start_nodes = VNET_FEATURES (0),
Damjan Marion8932e452021-04-16 11:49:26 +02001045 .last_in_arc = "interface-output-arc-end",
Damjan Marion152e21d2016-11-29 14:55:43 +01001046 .arc_index_ptr = &vnet_main.interface_main.output_feature_arc_index,
1047};
1048
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +01001049VNET_FEATURE_INIT (span_tx, static) = {
1050 .arc_name = "interface-output",
1051 .node_name = "span-output",
Damjan Marion8932e452021-04-16 11:49:26 +02001052 .runs_before = VNET_FEATURES ("interface-output-arc-end"),
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +01001053};
1054
Matthew Smith537eeec2018-04-09 11:49:20 -05001055VNET_FEATURE_INIT (ipsec_if_tx, static) = {
1056 .arc_name = "interface-output",
1057 .node_name = "ipsec-if-output",
Damjan Marion8932e452021-04-16 11:49:26 +02001058 .runs_before = VNET_FEATURES ("interface-output-arc-end"),
Matthew Smith537eeec2018-04-09 11:49:20 -05001059};
1060
Damjan Marion8932e452021-04-16 11:49:26 +02001061VNET_FEATURE_INIT (interface_output_arc_end, static) = {
Damjan Marion152e21d2016-11-29 14:55:43 +01001062 .arc_name = "interface-output",
Damjan Marion8932e452021-04-16 11:49:26 +02001063 .node_name = "interface-output-arc-end",
Damjan Marion152e21d2016-11-29 14:55:43 +01001064 .runs_before = 0,
1065};
Damjan Marion152e21d2016-11-29 14:55:43 +01001066
Filip Tehlar62668772019-03-04 03:33:32 -08001067#ifndef CLIB_MARCH_VARIANT
Ed Warnickecb9cada2015-12-08 15:45:58 -07001068clib_error_t *
1069vnet_per_buffer_interface_output_hw_interface_add_del (vnet_main_t * vnm,
1070 u32 hw_if_index,
1071 u32 is_create)
1072{
Dave Barachba868bb2016-08-08 09:51:21 -04001073 vnet_hw_interface_t *hi = vnet_get_hw_interface (vnm, hw_if_index);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001074 u32 next_index;
1075
John Loe5453d02018-01-23 19:21:34 -05001076 if (hi->output_node_index == 0)
1077 return 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001078
John Loe5453d02018-01-23 19:21:34 -05001079 next_index = vlib_node_add_next
1080 (vnm->vlib_main, vnet_per_buffer_interface_output_node.index,
1081 hi->output_node_index);
1082 hi->output_node_next_index = next_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001083
1084 return 0;
1085}
1086
Dave Barachba868bb2016-08-08 09:51:21 -04001087VNET_HW_INTERFACE_ADD_DEL_FUNCTION
1088 (vnet_per_buffer_interface_output_hw_interface_add_del);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001089
John Loe5453d02018-01-23 19:21:34 -05001090void
1091vnet_set_interface_output_node (vnet_main_t * vnm,
1092 u32 hw_if_index, u32 node_index)
1093{
1094 ASSERT (node_index);
1095 vnet_hw_interface_t *hi = vnet_get_hw_interface (vnm, hw_if_index);
1096 u32 next_index = vlib_node_add_next
1097 (vnm->vlib_main, vnet_per_buffer_interface_output_node.index, node_index);
1098 hi->output_node_next_index = next_index;
1099 hi->output_node_index = node_index;
1100}
Filip Tehlar62668772019-03-04 03:33:32 -08001101#endif /* CLIB_MARCH_VARIANT */
John Loe5453d02018-01-23 19:21:34 -05001102
Dave Barachba868bb2016-08-08 09:51:21 -04001103/*
1104 * fd.io coding-style-patch-verification: ON
1105 *
1106 * Local Variables:
1107 * eval: (c-set-style "gnu")
1108 * End:
1109 */