blob: f289fcc489e6e4fe8242643f628043e70b065f01 [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15/*
16 * interface_output.c: interface output node
17 *
18 * Copyright (c) 2008 Eliot Dresselhaus
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
27 *
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
30 *
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 */
39
40#include <vnet/vnet.h>
Dave Barach2c0a4f42017-06-29 09:30:15 -040041#include <vnet/ip/icmp46_packet.h>
42#include <vnet/ip/ip4.h>
43#include <vnet/ip/ip6.h>
44#include <vnet/udp/udp_packet.h>
Damjan Marion152e21d2016-11-29 14:55:43 +010045#include <vnet/feature/feature.h>
Dave Barach9137e542019-09-13 17:47:50 -040046#include <vnet/classify/trace_classify.h>
Dave Barach1bd2c012020-04-12 08:31:39 -040047#include <vnet/interface_output.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070048
Dave Barachba868bb2016-08-08 09:51:21 -040049typedef struct
50{
Ed Warnickecb9cada2015-12-08 15:45:58 -070051 u32 sw_if_index;
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020052 u32 flags;
Mohsin Kazmi29467b52019-10-08 19:42:38 +020053 u8 data[128 - 2 * sizeof (u32)];
Dave Barachba868bb2016-08-08 09:51:21 -040054}
55interface_output_trace_t;
Ed Warnickecb9cada2015-12-08 15:45:58 -070056
Filip Tehlar62668772019-03-04 03:33:32 -080057#ifndef CLIB_MARCH_VARIANT
Dave Barachba868bb2016-08-08 09:51:21 -040058u8 *
59format_vnet_interface_output_trace (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -070060{
61 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*va, vlib_main_t *);
Dave Barachba868bb2016-08-08 09:51:21 -040062 vlib_node_t *node = va_arg (*va, vlib_node_t *);
63 interface_output_trace_t *t = va_arg (*va, interface_output_trace_t *);
64 vnet_main_t *vnm = vnet_get_main ();
65 vnet_sw_interface_t *si;
Christophe Fontained3c008d2017-10-02 18:10:54 +020066 u32 indent;
Ed Warnickecb9cada2015-12-08 15:45:58 -070067
Dave Barachba868bb2016-08-08 09:51:21 -040068 if (t->sw_if_index != (u32) ~ 0)
Ed Warnickecb9cada2015-12-08 15:45:58 -070069 {
Ed Warnickecb9cada2015-12-08 15:45:58 -070070 indent = format_get_indent (s);
Dave Barachba868bb2016-08-08 09:51:21 -040071
Neale Ranns177bbdc2016-11-15 09:46:51 +000072 if (pool_is_free_index
73 (vnm->interface_main.sw_interfaces, t->sw_if_index))
74 {
75 /* the interface may have been deleted by the time the trace is printed */
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020076 s = format (s, "sw_if_index: %d ", t->sw_if_index);
Neale Ranns177bbdc2016-11-15 09:46:51 +000077 }
78 else
79 {
80 si = vnet_get_sw_interface (vnm, t->sw_if_index);
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020081 s =
82 format (s, "%U ", format_vnet_sw_interface_name, vnm, si,
83 t->flags);
Neale Ranns177bbdc2016-11-15 09:46:51 +000084 }
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020085 s =
86 format (s, "\n%U%U", format_white_space, indent,
87 node->format_buffer ? node->format_buffer : format_hex_bytes,
88 t->data, sizeof (t->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -070089 }
90 return s;
91}
92
93static void
94vnet_interface_output_trace (vlib_main_t * vm,
95 vlib_node_runtime_t * node,
Dave Barachba868bb2016-08-08 09:51:21 -040096 vlib_frame_t * frame, uword n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -070097{
Dave Barachba868bb2016-08-08 09:51:21 -040098 u32 n_left, *from;
Ed Warnickecb9cada2015-12-08 15:45:58 -070099
100 n_left = n_buffers;
Damjan Mariona3d59862018-11-10 10:23:00 +0100101 from = vlib_frame_vector_args (frame);
Dave Barachba868bb2016-08-08 09:51:21 -0400102
Ed Warnickecb9cada2015-12-08 15:45:58 -0700103 while (n_left >= 4)
104 {
105 u32 bi0, bi1;
Dave Barachba868bb2016-08-08 09:51:21 -0400106 vlib_buffer_t *b0, *b1;
107 interface_output_trace_t *t0, *t1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700108
109 /* Prefetch next iteration. */
110 vlib_prefetch_buffer_with_index (vm, from[2], LOAD);
111 vlib_prefetch_buffer_with_index (vm, from[3], LOAD);
112
113 bi0 = from[0];
114 bi1 = from[1];
115
116 b0 = vlib_get_buffer (vm, bi0);
117 b1 = vlib_get_buffer (vm, bi1);
118
119 if (b0->flags & VLIB_BUFFER_IS_TRACED)
120 {
121 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
122 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_TX];
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200123 t0->flags = b0->flags;
Dave Barach178cf492018-11-13 16:34:13 -0500124 clib_memcpy_fast (t0->data, vlib_buffer_get_current (b0),
125 sizeof (t0->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700126 }
127 if (b1->flags & VLIB_BUFFER_IS_TRACED)
128 {
129 t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
130 t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_TX];
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200131 t1->flags = b1->flags;
Dave Barach178cf492018-11-13 16:34:13 -0500132 clib_memcpy_fast (t1->data, vlib_buffer_get_current (b1),
133 sizeof (t1->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700134 }
135 from += 2;
136 n_left -= 2;
137 }
138
139 while (n_left >= 1)
140 {
141 u32 bi0;
Dave Barachba868bb2016-08-08 09:51:21 -0400142 vlib_buffer_t *b0;
143 interface_output_trace_t *t0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700144
145 bi0 = from[0];
146
147 b0 = vlib_get_buffer (vm, bi0);
148
149 if (b0->flags & VLIB_BUFFER_IS_TRACED)
150 {
151 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
152 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_TX];
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200153 t0->flags = b0->flags;
Dave Barach178cf492018-11-13 16:34:13 -0500154 clib_memcpy_fast (t0->data, vlib_buffer_get_current (b0),
155 sizeof (t0->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700156 }
157 from += 1;
158 n_left -= 1;
159 }
160}
161
Dave Barach2c0a4f42017-06-29 09:30:15 -0400162static_always_inline uword
Mohsin Kazmi29467b52019-10-08 19:42:38 +0200163vnet_interface_output_node_inline (vlib_main_t * vm,
164 vlib_node_runtime_t * node,
165 vlib_frame_t * frame,
166 vnet_main_t * vnm,
167 vnet_hw_interface_t * hi,
168 int do_tx_offloads)
Dave Barach2c0a4f42017-06-29 09:30:15 -0400169{
Dave Barachba868bb2016-08-08 09:51:21 -0400170 vnet_interface_output_runtime_t *rt = (void *) node->runtime_data;
171 vnet_sw_interface_t *si;
Dave Barachba868bb2016-08-08 09:51:21 -0400172 u32 n_left_to_tx, *from, *from_end, *to_tx;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700173 u32 n_bytes, n_buffers, n_packets;
Damjan Marion363640d2017-03-06 11:53:10 +0100174 u32 n_bytes_b0, n_bytes_b1, n_bytes_b2, n_bytes_b3;
Damjan Marion586afd72017-04-05 19:18:20 +0200175 u32 thread_index = vm->thread_index;
Dave Barachba868bb2016-08-08 09:51:21 -0400176 vnet_interface_main_t *im = &vnm->interface_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700177 u32 next_index = VNET_INTERFACE_OUTPUT_NEXT_TX;
Damjan Marion152e21d2016-11-29 14:55:43 +0100178 u32 current_config_index = ~0;
179 u8 arc = im->output_feature_arc_index;
Zhiyong Yanga462c072019-05-05 19:32:25 +0800180 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700181
182 n_buffers = frame->n_vectors;
183
184 if (node->flags & VLIB_NODE_FLAG_TRACE)
185 vnet_interface_output_trace (vm, node, frame, n_buffers);
186
Damjan Mariona3d59862018-11-10 10:23:00 +0100187 from = vlib_frame_vector_args (frame);
Zhiyong Yanga462c072019-05-05 19:32:25 +0800188 vlib_get_buffers (vm, from, b, n_buffers);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700189
190 if (rt->is_deleted)
Dave Barachba868bb2016-08-08 09:51:21 -0400191 return vlib_error_drop_buffers (vm, node, from,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700192 /* buffer stride */ 1,
193 n_buffers,
194 VNET_INTERFACE_OUTPUT_NEXT_DROP,
195 node->node_index,
196 VNET_INTERFACE_OUTPUT_ERROR_INTERFACE_DELETED);
197
198 si = vnet_get_sw_interface (vnm, rt->sw_if_index);
199 hi = vnet_get_sup_hw_interface (vnm, rt->sw_if_index);
Steven Luong5ad541e2019-08-27 07:43:27 -0700200 if (!(si->flags & VNET_SW_INTERFACE_FLAG_ADMIN_UP) ||
Dave Barachba868bb2016-08-08 09:51:21 -0400201 !(hi->flags & VNET_HW_INTERFACE_FLAG_LINK_UP))
Ed Warnickecb9cada2015-12-08 15:45:58 -0700202 {
Dave Barachba868bb2016-08-08 09:51:21 -0400203 vlib_simple_counter_main_t *cm;
204
Ed Warnickecb9cada2015-12-08 15:45:58 -0700205 cm = vec_elt_at_index (vnm->interface_main.sw_if_counters,
Dave Barachba868bb2016-08-08 09:51:21 -0400206 VNET_INTERFACE_COUNTER_TX_ERROR);
Damjan Marion586afd72017-04-05 19:18:20 +0200207 vlib_increment_simple_counter (cm, thread_index,
Dave Barachba868bb2016-08-08 09:51:21 -0400208 rt->sw_if_index, n_buffers);
209
210 return vlib_error_drop_buffers (vm, node, from,
211 /* buffer stride */ 1,
212 n_buffers,
213 VNET_INTERFACE_OUTPUT_NEXT_DROP,
214 node->node_index,
215 VNET_INTERFACE_OUTPUT_ERROR_INTERFACE_DOWN);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700216 }
217
218 from_end = from + n_buffers;
219
220 /* Total byte count of all buffers. */
221 n_bytes = 0;
222 n_packets = 0;
223
Damjan Marion152e21d2016-11-29 14:55:43 +0100224 /* interface-output feature arc handling */
225 if (PREDICT_FALSE (vnet_have_features (arc, rt->sw_if_index)))
226 {
227 vnet_feature_config_main_t *fcm;
228 fcm = vnet_feature_get_config_main (arc);
229 current_config_index = vnet_get_feature_config_index (arc,
230 rt->sw_if_index);
231 vnet_get_config_data (&fcm->config_main, &current_config_index,
232 &next_index, 0);
233 }
234
Ed Warnickecb9cada2015-12-08 15:45:58 -0700235 while (from < from_end)
236 {
237 /* Get new next frame since previous incomplete frame may have less
Dave Barachba868bb2016-08-08 09:51:21 -0400238 than VNET_FRAME_SIZE vectors in it. */
239 vlib_get_new_next_frame (vm, node, next_index, to_tx, n_left_to_tx);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700240
Damjan Marion363640d2017-03-06 11:53:10 +0100241 while (from + 8 <= from_end && n_left_to_tx >= 4)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700242 {
Damjan Marion363640d2017-03-06 11:53:10 +0100243 u32 bi0, bi1, bi2, bi3;
Damjan Marion363640d2017-03-06 11:53:10 +0100244 u32 tx_swif0, tx_swif1, tx_swif2, tx_swif3;
Dave Barach2c0a4f42017-06-29 09:30:15 -0400245 u32 or_flags;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700246
247 /* Prefetch next iteration. */
Zhiyong Yanga462c072019-05-05 19:32:25 +0800248 vlib_prefetch_buffer_header (b[4], LOAD);
249 vlib_prefetch_buffer_header (b[5], LOAD);
250 vlib_prefetch_buffer_header (b[6], LOAD);
251 vlib_prefetch_buffer_header (b[7], LOAD);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700252
253 bi0 = from[0];
254 bi1 = from[1];
Damjan Marion363640d2017-03-06 11:53:10 +0100255 bi2 = from[2];
256 bi3 = from[3];
Ed Warnickecb9cada2015-12-08 15:45:58 -0700257 to_tx[0] = bi0;
258 to_tx[1] = bi1;
Damjan Marion363640d2017-03-06 11:53:10 +0100259 to_tx[2] = bi2;
260 to_tx[3] = bi3;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700261
Zhiyong Yanga462c072019-05-05 19:32:25 +0800262 or_flags = b[0]->flags | b[1]->flags | b[2]->flags | b[3]->flags;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700263
Zhiyong Yanga462c072019-05-05 19:32:25 +0800264 from += 4;
265 to_tx += 4;
266 n_left_to_tx -= 4;
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200267
Ed Warnickecb9cada2015-12-08 15:45:58 -0700268 /* Be grumpy about zero length buffers for benefit of
269 driver tx function. */
Zhiyong Yanga462c072019-05-05 19:32:25 +0800270 ASSERT (b[0]->current_length > 0);
271 ASSERT (b[1]->current_length > 0);
272 ASSERT (b[2]->current_length > 0);
273 ASSERT (b[3]->current_length > 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700274
Zhiyong Yanga462c072019-05-05 19:32:25 +0800275 n_bytes_b0 = vlib_buffer_length_in_chain (vm, b[0]);
276 n_bytes_b1 = vlib_buffer_length_in_chain (vm, b[1]);
277 n_bytes_b2 = vlib_buffer_length_in_chain (vm, b[2]);
278 n_bytes_b3 = vlib_buffer_length_in_chain (vm, b[3]);
279 tx_swif0 = vnet_buffer (b[0])->sw_if_index[VLIB_TX];
280 tx_swif1 = vnet_buffer (b[1])->sw_if_index[VLIB_TX];
281 tx_swif2 = vnet_buffer (b[2])->sw_if_index[VLIB_TX];
282 tx_swif3 = vnet_buffer (b[3])->sw_if_index[VLIB_TX];
Ed Warnickecb9cada2015-12-08 15:45:58 -0700283
284 n_bytes += n_bytes_b0 + n_bytes_b1;
Damjan Marion363640d2017-03-06 11:53:10 +0100285 n_bytes += n_bytes_b2 + n_bytes_b3;
286 n_packets += 4;
Dave Barachba868bb2016-08-08 09:51:21 -0400287
Damjan Marion152e21d2016-11-29 14:55:43 +0100288 if (PREDICT_FALSE (current_config_index != ~0))
289 {
Zhiyong Yanga462c072019-05-05 19:32:25 +0800290 vnet_buffer (b[0])->feature_arc_index = arc;
291 vnet_buffer (b[1])->feature_arc_index = arc;
292 vnet_buffer (b[2])->feature_arc_index = arc;
293 vnet_buffer (b[3])->feature_arc_index = arc;
294 b[0]->current_config_index = current_config_index;
295 b[1]->current_config_index = current_config_index;
296 b[2]->current_config_index = current_config_index;
297 b[3]->current_config_index = current_config_index;
Damjan Marion152e21d2016-11-29 14:55:43 +0100298 }
299
Damjan Marion363640d2017-03-06 11:53:10 +0100300 /* update vlan subif tx counts, if required */
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100301 if (PREDICT_FALSE (tx_swif0 != rt->sw_if_index))
Dave Barachba868bb2016-08-08 09:51:21 -0400302 {
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100303 vlib_increment_combined_counter (im->combined_sw_if_counters +
304 VNET_INTERFACE_COUNTER_TX,
Damjan Marion586afd72017-04-05 19:18:20 +0200305 thread_index, tx_swif0, 1,
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100306 n_bytes_b0);
Dave Barachba868bb2016-08-08 09:51:21 -0400307 }
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100308
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100309 if (PREDICT_FALSE (tx_swif1 != rt->sw_if_index))
310 {
311
312 vlib_increment_combined_counter (im->combined_sw_if_counters +
313 VNET_INTERFACE_COUNTER_TX,
Damjan Marion586afd72017-04-05 19:18:20 +0200314 thread_index, tx_swif1, 1,
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100315 n_bytes_b1);
316 }
Damjan Marion363640d2017-03-06 11:53:10 +0100317
318 if (PREDICT_FALSE (tx_swif2 != rt->sw_if_index))
319 {
320
321 vlib_increment_combined_counter (im->combined_sw_if_counters +
322 VNET_INTERFACE_COUNTER_TX,
Damjan Marion586afd72017-04-05 19:18:20 +0200323 thread_index, tx_swif2, 1,
Damjan Marion363640d2017-03-06 11:53:10 +0100324 n_bytes_b2);
325 }
326 if (PREDICT_FALSE (tx_swif3 != rt->sw_if_index))
327 {
328
329 vlib_increment_combined_counter (im->combined_sw_if_counters +
330 VNET_INTERFACE_COUNTER_TX,
Damjan Marion586afd72017-04-05 19:18:20 +0200331 thread_index, tx_swif3, 1,
Damjan Marion363640d2017-03-06 11:53:10 +0100332 n_bytes_b3);
333 }
Dave Barach2c0a4f42017-06-29 09:30:15 -0400334
Dave Barach2c0a4f42017-06-29 09:30:15 -0400335 if (do_tx_offloads)
336 {
337 if (or_flags &
338 (VNET_BUFFER_F_OFFLOAD_TCP_CKSUM |
339 VNET_BUFFER_F_OFFLOAD_UDP_CKSUM |
340 VNET_BUFFER_F_OFFLOAD_IP_CKSUM))
341 {
Dave Barach1bd2c012020-04-12 08:31:39 -0400342 vnet_calc_checksums_inline
343 (vm, b[0],
344 b[0]->flags & VNET_BUFFER_F_IS_IP4,
345 b[0]->flags & VNET_BUFFER_F_IS_IP6, 1 /* with gso */ );
346 vnet_calc_checksums_inline
347 (vm, b[1],
348 b[1]->flags & VNET_BUFFER_F_IS_IP4,
349 b[1]->flags & VNET_BUFFER_F_IS_IP6, 1 /* with gso */ );
350 vnet_calc_checksums_inline
351 (vm, b[2],
352 b[2]->flags & VNET_BUFFER_F_IS_IP4,
353 b[2]->flags & VNET_BUFFER_F_IS_IP6, 1 /* with gso */ );
354 vnet_calc_checksums_inline
355 (vm, b[3],
356 b[3]->flags & VNET_BUFFER_F_IS_IP4,
357 b[3]->flags & VNET_BUFFER_F_IS_IP6, 1 /* with gso */ );
Dave Barach2c0a4f42017-06-29 09:30:15 -0400358 }
359 }
Zhiyong Yanga462c072019-05-05 19:32:25 +0800360 b += 4;
361
Ed Warnickecb9cada2015-12-08 15:45:58 -0700362 }
363
364 while (from + 1 <= from_end && n_left_to_tx >= 1)
365 {
366 u32 bi0;
Dave Barachba868bb2016-08-08 09:51:21 -0400367 u32 tx_swif0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700368
369 bi0 = from[0];
370 to_tx[0] = bi0;
371 from += 1;
372 to_tx += 1;
373 n_left_to_tx -= 1;
374
Ed Warnickecb9cada2015-12-08 15:45:58 -0700375 /* Be grumpy about zero length buffers for benefit of
376 driver tx function. */
Zhiyong Yanga462c072019-05-05 19:32:25 +0800377 ASSERT (b[0]->current_length > 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700378
Zhiyong Yanga462c072019-05-05 19:32:25 +0800379 n_bytes_b0 = vlib_buffer_length_in_chain (vm, b[0]);
380 tx_swif0 = vnet_buffer (b[0])->sw_if_index[VLIB_TX];
Ed Warnickecb9cada2015-12-08 15:45:58 -0700381 n_bytes += n_bytes_b0;
382 n_packets += 1;
383
Damjan Marion152e21d2016-11-29 14:55:43 +0100384 if (PREDICT_FALSE (current_config_index != ~0))
385 {
Zhiyong Yanga462c072019-05-05 19:32:25 +0800386 vnet_buffer (b[0])->feature_arc_index = arc;
387 b[0]->current_config_index = current_config_index;
Damjan Marion152e21d2016-11-29 14:55:43 +0100388 }
389
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100390 if (PREDICT_FALSE (tx_swif0 != rt->sw_if_index))
Dave Barachba868bb2016-08-08 09:51:21 -0400391 {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700392
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100393 vlib_increment_combined_counter (im->combined_sw_if_counters +
394 VNET_INTERFACE_COUNTER_TX,
Damjan Marion586afd72017-04-05 19:18:20 +0200395 thread_index, tx_swif0, 1,
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100396 n_bytes_b0);
Dave Barachba868bb2016-08-08 09:51:21 -0400397 }
Dave Barach2c0a4f42017-06-29 09:30:15 -0400398
399 if (do_tx_offloads)
Mohsin Kazmi29467b52019-10-08 19:42:38 +0200400 {
401 if (b[0]->flags &
402 (VNET_BUFFER_F_OFFLOAD_TCP_CKSUM |
403 VNET_BUFFER_F_OFFLOAD_UDP_CKSUM |
404 VNET_BUFFER_F_OFFLOAD_IP_CKSUM))
Dave Barach1bd2c012020-04-12 08:31:39 -0400405 vnet_calc_checksums_inline
406 (vm, b[0],
407 b[0]->flags & VNET_BUFFER_F_IS_IP4,
408 b[0]->flags & VNET_BUFFER_F_IS_IP6, 1 /* with gso */ );
Mohsin Kazmi29467b52019-10-08 19:42:38 +0200409 }
Zhiyong Yanga462c072019-05-05 19:32:25 +0800410 b += 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700411 }
412
Dave Barachba868bb2016-08-08 09:51:21 -0400413 vlib_put_next_frame (vm, node, next_index, n_left_to_tx);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700414 }
415
416 /* Update main interface stats. */
417 vlib_increment_combined_counter (im->combined_sw_if_counters
Dave Barachba868bb2016-08-08 09:51:21 -0400418 + VNET_INTERFACE_COUNTER_TX,
Damjan Marion586afd72017-04-05 19:18:20 +0200419 thread_index,
Dave Barachba868bb2016-08-08 09:51:21 -0400420 rt->sw_if_index, n_packets, n_bytes);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700421 return n_buffers;
422}
Filip Tehlar62668772019-03-04 03:33:32 -0800423#endif /* CLIB_MARCH_VARIANT */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700424
Dave Barach5ecd5a52019-02-25 15:27:28 -0500425static_always_inline void vnet_interface_pcap_tx_trace
426 (vlib_main_t * vm, vlib_node_runtime_t * node, vlib_frame_t * frame,
427 int sw_if_index_from_buffer)
428{
429 u32 n_left_from, *from;
430 u32 sw_if_index;
Dave Barach33909772019-09-23 10:27:27 -0400431 vnet_pcap_t *pp = &vlib_global_main.pcap;
Dave Barach5ecd5a52019-02-25 15:27:28 -0500432
Dave Barach33909772019-09-23 10:27:27 -0400433 if (PREDICT_TRUE (pp->pcap_tx_enable == 0))
Dave Barach5ecd5a52019-02-25 15:27:28 -0500434 return;
435
436 if (sw_if_index_from_buffer == 0)
437 {
438 vnet_interface_output_runtime_t *rt = (void *) node->runtime_data;
439 sw_if_index = rt->sw_if_index;
440 }
441 else
442 sw_if_index = ~0;
443
444 n_left_from = frame->n_vectors;
445 from = vlib_frame_vector_args (frame);
446
447 while (n_left_from > 0)
448 {
Dave Barach9137e542019-09-13 17:47:50 -0400449 int classify_filter_result;
Dave Barach5ecd5a52019-02-25 15:27:28 -0500450 u32 bi0 = from[0];
451 vlib_buffer_t *b0 = vlib_get_buffer (vm, bi0);
Dave Barach9137e542019-09-13 17:47:50 -0400452 from++;
453 n_left_from--;
454
Dave Barachf5667c32019-09-25 11:27:46 -0400455 if (pp->filter_classify_table_index != ~0)
Dave Barach9137e542019-09-13 17:47:50 -0400456 {
457 classify_filter_result =
458 vnet_is_packet_traced_inline
Dave Barachf5667c32019-09-25 11:27:46 -0400459 (b0, pp->filter_classify_table_index, 0 /* full classify */ );
Dave Barach9137e542019-09-13 17:47:50 -0400460 if (classify_filter_result)
Dave Barach33909772019-09-23 10:27:27 -0400461 pcap_add_buffer (&pp->pcap_main, vm, bi0, pp->max_bytes_per_pkt);
Dave Barach9137e542019-09-13 17:47:50 -0400462 continue;
463 }
Dave Barach5ecd5a52019-02-25 15:27:28 -0500464
465 if (sw_if_index_from_buffer)
466 sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_TX];
467
Dave Barach33909772019-09-23 10:27:27 -0400468 if (pp->pcap_sw_if_index == 0 || pp->pcap_sw_if_index == sw_if_index)
Dave Barachd28437c2019-11-20 09:28:31 -0500469 {
470 vnet_main_t *vnm = vnet_get_main ();
471 vnet_hw_interface_t *hi =
472 vnet_get_sup_hw_interface (vnm, sw_if_index);
473 /* Capture pkt if not filtered, or if filter hits */
474 if (hi->trace_classify_table_index == ~0 ||
475 vnet_is_packet_traced_inline
476 (b0, hi->trace_classify_table_index, 0 /* full classify */ ))
477 pcap_add_buffer (&pp->pcap_main, vm, bi0, pp->max_bytes_per_pkt);
478 }
Dave Barach5ecd5a52019-02-25 15:27:28 -0500479 }
480}
481
Filip Tehlar62668772019-03-04 03:33:32 -0800482#ifndef CLIB_MARCH_VARIANT
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200483
Damjan Marion652d2e12019-02-02 00:15:27 +0100484uword
Dave Barach2c0a4f42017-06-29 09:30:15 -0400485vnet_interface_output_node (vlib_main_t * vm, vlib_node_runtime_t * node,
486 vlib_frame_t * frame)
487{
488 vnet_main_t *vnm = vnet_get_main ();
489 vnet_hw_interface_t *hi;
490 vnet_interface_output_runtime_t *rt = (void *) node->runtime_data;
491 hi = vnet_get_sup_hw_interface (vnm, rt->sw_if_index);
492
Dave Barach5ecd5a52019-02-25 15:27:28 -0500493 vnet_interface_pcap_tx_trace (vm, node, frame,
494 0 /* sw_if_index_from_buffer */ );
495
Dave Barach2c0a4f42017-06-29 09:30:15 -0400496 if (hi->flags & VNET_HW_INTERFACE_FLAG_SUPPORTS_TX_L4_CKSUM_OFFLOAD)
497 return vnet_interface_output_node_inline (vm, node, frame, vnm, hi,
498 /* do_tx_offloads */ 0);
499 else
500 return vnet_interface_output_node_inline (vm, node, frame, vnm, hi,
501 /* do_tx_offloads */ 1);
502}
Filip Tehlar62668772019-03-04 03:33:32 -0800503#endif /* CLIB_MARCH_VARIANT */
Dave Barach2c0a4f42017-06-29 09:30:15 -0400504
Ed Warnickecb9cada2015-12-08 15:45:58 -0700505/* Use buffer's sw_if_index[VNET_TX] to choose output interface. */
Filip Tehlar62668772019-03-04 03:33:32 -0800506VLIB_NODE_FN (vnet_per_buffer_interface_output_node) (vlib_main_t * vm,
507 vlib_node_runtime_t *
508 node,
509 vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700510{
Dave Barachba868bb2016-08-08 09:51:21 -0400511 vnet_main_t *vnm = vnet_get_main ();
512 u32 n_left_to_next, *from, *to_next;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700513 u32 n_left_from, next_index;
514
Dave Barach5ecd5a52019-02-25 15:27:28 -0500515 vnet_interface_pcap_tx_trace (vm, node, frame,
516 1 /* sw_if_index_from_buffer */ );
517
Ed Warnickecb9cada2015-12-08 15:45:58 -0700518 n_left_from = frame->n_vectors;
519
Damjan Mariona3d59862018-11-10 10:23:00 +0100520 from = vlib_frame_vector_args (frame);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700521 next_index = node->cached_next_index;
522
523 while (n_left_from > 0)
524 {
525 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
526
527 while (n_left_from >= 4 && n_left_to_next >= 2)
528 {
529 u32 bi0, bi1, next0, next1;
Dave Barachba868bb2016-08-08 09:51:21 -0400530 vlib_buffer_t *b0, *b1;
531 vnet_hw_interface_t *hi0, *hi1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700532
533 /* Prefetch next iteration. */
534 vlib_prefetch_buffer_with_index (vm, from[2], LOAD);
535 vlib_prefetch_buffer_with_index (vm, from[3], LOAD);
536
537 bi0 = from[0];
538 bi1 = from[1];
539 to_next[0] = bi0;
540 to_next[1] = bi1;
541 from += 2;
542 to_next += 2;
543 n_left_to_next -= 2;
544 n_left_from -= 2;
545
546 b0 = vlib_get_buffer (vm, bi0);
547 b1 = vlib_get_buffer (vm, bi1);
548
Dave Barachba868bb2016-08-08 09:51:21 -0400549 hi0 =
550 vnet_get_sup_hw_interface (vnm,
551 vnet_buffer (b0)->sw_if_index
552 [VLIB_TX]);
553 hi1 =
554 vnet_get_sup_hw_interface (vnm,
555 vnet_buffer (b1)->sw_if_index
556 [VLIB_TX]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700557
John Loe5453d02018-01-23 19:21:34 -0500558 next0 = hi0->output_node_next_index;
559 next1 = hi1->output_node_next_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700560
Dave Barachba868bb2016-08-08 09:51:21 -0400561 vlib_validate_buffer_enqueue_x2 (vm, node, next_index, to_next,
562 n_left_to_next, bi0, bi1, next0,
563 next1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700564 }
565
566 while (n_left_from > 0 && n_left_to_next > 0)
567 {
568 u32 bi0, next0;
Dave Barachba868bb2016-08-08 09:51:21 -0400569 vlib_buffer_t *b0;
570 vnet_hw_interface_t *hi0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700571
572 bi0 = from[0];
573 to_next[0] = bi0;
574 from += 1;
575 to_next += 1;
576 n_left_to_next -= 1;
577 n_left_from -= 1;
578
579 b0 = vlib_get_buffer (vm, bi0);
580
Dave Barachba868bb2016-08-08 09:51:21 -0400581 hi0 =
582 vnet_get_sup_hw_interface (vnm,
583 vnet_buffer (b0)->sw_if_index
584 [VLIB_TX]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700585
John Loe5453d02018-01-23 19:21:34 -0500586 next0 = hi0->output_node_next_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700587
Dave Barachba868bb2016-08-08 09:51:21 -0400588 vlib_validate_buffer_enqueue_x1 (vm, node, next_index, to_next,
589 n_left_to_next, bi0, next0);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700590 }
591
592 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
593 }
594
595 return frame->n_vectors;
596}
597
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000598typedef struct vnet_error_trace_t_
Ed Warnickecb9cada2015-12-08 15:45:58 -0700599{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000600 u32 sw_if_index;
601} vnet_error_trace_t;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700602
Ed Warnickecb9cada2015-12-08 15:45:58 -0700603
Dave Barachba868bb2016-08-08 09:51:21 -0400604static u8 *
605format_vnet_error_trace (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700606{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000607 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*va, vlib_main_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700608 CLIB_UNUSED (vlib_node_t * node) = va_arg (*va, vlib_node_t *);
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000609 vnet_error_trace_t *t = va_arg (*va, vnet_error_trace_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700610
Neale Ranns7227c392019-03-21 10:20:15 +0000611 s = format (s, "rx:%U", format_vnet_sw_if_index_name,
612 vnet_get_main (), t->sw_if_index);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700613
614 return s;
615}
616
617static void
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000618interface_trace_buffers (vlib_main_t * vm,
619 vlib_node_runtime_t * node, vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700620{
Dave Barachba868bb2016-08-08 09:51:21 -0400621 u32 n_left, *buffers;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700622
623 buffers = vlib_frame_vector_args (frame);
624 n_left = frame->n_vectors;
Dave Barachba868bb2016-08-08 09:51:21 -0400625
Ed Warnickecb9cada2015-12-08 15:45:58 -0700626 while (n_left >= 4)
627 {
628 u32 bi0, bi1;
Dave Barachba868bb2016-08-08 09:51:21 -0400629 vlib_buffer_t *b0, *b1;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000630 vnet_error_trace_t *t0, *t1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700631
632 /* Prefetch next iteration. */
633 vlib_prefetch_buffer_with_index (vm, buffers[2], LOAD);
634 vlib_prefetch_buffer_with_index (vm, buffers[3], LOAD);
635
636 bi0 = buffers[0];
637 bi1 = buffers[1];
638
639 b0 = vlib_get_buffer (vm, bi0);
640 b1 = vlib_get_buffer (vm, bi1);
641
642 if (b0->flags & VLIB_BUFFER_IS_TRACED)
643 {
644 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000645 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
Ed Warnickecb9cada2015-12-08 15:45:58 -0700646 }
647 if (b1->flags & VLIB_BUFFER_IS_TRACED)
648 {
649 t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000650 t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_RX];
Ed Warnickecb9cada2015-12-08 15:45:58 -0700651 }
652 buffers += 2;
653 n_left -= 2;
654 }
655
656 while (n_left >= 1)
657 {
658 u32 bi0;
Dave Barachba868bb2016-08-08 09:51:21 -0400659 vlib_buffer_t *b0;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000660 vnet_error_trace_t *t0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700661
662 bi0 = buffers[0];
663
664 b0 = vlib_get_buffer (vm, bi0);
665
666 if (b0->flags & VLIB_BUFFER_IS_TRACED)
667 {
668 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000669 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
Ed Warnickecb9cada2015-12-08 15:45:58 -0700670 }
671 buffers += 1;
672 n_left -= 1;
673 }
674}
675
Dave Barachba868bb2016-08-08 09:51:21 -0400676typedef enum
677{
Ed Warnickecb9cada2015-12-08 15:45:58 -0700678 VNET_ERROR_DISPOSITION_DROP,
679 VNET_ERROR_DISPOSITION_PUNT,
680 VNET_ERROR_N_DISPOSITION,
681} vnet_error_disposition_t;
682
Ed Warnickecb9cada2015-12-08 15:45:58 -0700683static_always_inline uword
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000684interface_drop_punt (vlib_main_t * vm,
685 vlib_node_runtime_t * node,
686 vlib_frame_t * frame,
687 vnet_error_disposition_t disposition)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700688{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000689 u32 *from, n_left, thread_index, *sw_if_index;
690 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b;
691 u32 sw_if_indices[VLIB_FRAME_SIZE];
Dave Barachba868bb2016-08-08 09:51:21 -0400692 vlib_simple_counter_main_t *cm;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000693 u16 nexts[VLIB_FRAME_SIZE];
694 vnet_main_t *vnm;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700695
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000696 vnm = vnet_get_main ();
697 thread_index = vm->thread_index;
698 from = vlib_frame_vector_args (frame);
699 n_left = frame->n_vectors;
700 b = bufs;
701 sw_if_index = sw_if_indices;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700702
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000703 vlib_get_buffers (vm, from, bufs, n_left);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700704
705 if (node->flags & VLIB_NODE_FLAG_TRACE)
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000706 interface_trace_buffers (vm, node, frame);
Dave Barachba868bb2016-08-08 09:51:21 -0400707
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000708 /* All going to drop regardless, this is just a counting exercise */
709 clib_memset (nexts, 0, sizeof (nexts));
710
Ed Warnickecb9cada2015-12-08 15:45:58 -0700711 cm = vec_elt_at_index (vnm->interface_main.sw_if_counters,
712 (disposition == VNET_ERROR_DISPOSITION_PUNT
713 ? VNET_INTERFACE_COUNTER_PUNT
714 : VNET_INTERFACE_COUNTER_DROP));
715
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000716 /* collect the array of interfaces first ... */
717 while (n_left >= 4)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700718 {
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000719 if (n_left >= 12)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700720 {
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000721 /* Prefetch 8 ahead - there's not much going on in each iteration */
722 vlib_prefetch_buffer_header (b[4], LOAD);
723 vlib_prefetch_buffer_header (b[5], LOAD);
724 vlib_prefetch_buffer_header (b[6], LOAD);
725 vlib_prefetch_buffer_header (b[7], LOAD);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700726 }
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000727 sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
728 sw_if_index[1] = vnet_buffer (b[1])->sw_if_index[VLIB_RX];
729 sw_if_index[2] = vnet_buffer (b[2])->sw_if_index[VLIB_RX];
730 sw_if_index[3] = vnet_buffer (b[3])->sw_if_index[VLIB_RX];
731
732 sw_if_index += 4;
733 n_left -= 4;
734 b += 4;
735 }
736 while (n_left)
737 {
738 sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
739
740 sw_if_index += 1;
741 n_left -= 1;
742 b += 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700743 }
744
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000745 /* ... then count against them in blocks */
746 n_left = frame->n_vectors;
747
748 while (n_left)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700749 {
Dave Barachba868bb2016-08-08 09:51:21 -0400750 vnet_sw_interface_t *sw_if0;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000751 u16 off, count;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700752
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000753 off = frame->n_vectors - n_left;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700754
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000755 sw_if_index = sw_if_indices + off;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700756
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000757 count = clib_count_equal_u32 (sw_if_index, n_left);
758 n_left -= count;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700759
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000760 vlib_increment_simple_counter (cm, thread_index, sw_if_index[0], count);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700761
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000762 /* Increment super-interface drop/punt counters for
763 sub-interfaces. */
764 sw_if0 = vnet_get_sw_interface (vnm, sw_if_index[0]);
765 if (sw_if0->sup_sw_if_index != sw_if_index[0])
766 vlib_increment_simple_counter
767 (cm, thread_index, sw_if0->sup_sw_if_index, count);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700768 }
769
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000770 vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700771
772 return frame->n_vectors;
773}
774
Dave Barachba868bb2016-08-08 09:51:21 -0400775static inline void
776pcap_drop_trace (vlib_main_t * vm,
Dave Barach33909772019-09-23 10:27:27 -0400777 vnet_interface_main_t * im,
778 vnet_pcap_t * pp, vlib_frame_t * f)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700779{
Dave Barachba868bb2016-08-08 09:51:21 -0400780 u32 *from;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700781 u32 n_left = f->n_vectors;
Dave Barachba868bb2016-08-08 09:51:21 -0400782 vlib_buffer_t *b0, *p1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700783 u32 bi0;
784 i16 save_current_data;
785 u16 save_current_length;
Dave Barach9382ad92019-09-23 16:03:49 -0400786 vlib_error_main_t *em = &vm->error_main;
Dave Barachf5667c32019-09-25 11:27:46 -0400787 int do_trace = 0;
788
Ed Warnickecb9cada2015-12-08 15:45:58 -0700789
790 from = vlib_frame_vector_args (f);
791
792 while (n_left > 0)
793 {
794 if (PREDICT_TRUE (n_left > 1))
Dave Barachba868bb2016-08-08 09:51:21 -0400795 {
796 p1 = vlib_get_buffer (vm, from[1]);
797 vlib_prefetch_buffer_header (p1, LOAD);
798 }
799
Ed Warnickecb9cada2015-12-08 15:45:58 -0700800 bi0 = from[0];
801 b0 = vlib_get_buffer (vm, bi0);
802 from++;
803 n_left--;
Dave Barachba868bb2016-08-08 09:51:21 -0400804
Ed Warnickecb9cada2015-12-08 15:45:58 -0700805 /* See if we're pointedly ignoring this specific error */
Dave Barachba868bb2016-08-08 09:51:21 -0400806 if (im->pcap_drop_filter_hash
807 && hash_get (im->pcap_drop_filter_hash, b0->error))
808 continue;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700809
Dave Barachf5667c32019-09-25 11:27:46 -0400810 do_trace = (pp->pcap_sw_if_index == 0) ||
811 pp->pcap_sw_if_index == vnet_buffer (b0)->sw_if_index[VLIB_RX];
812
813 if (PREDICT_FALSE
814 (do_trace == 0 && pp->filter_classify_table_index != ~0))
815 {
816 do_trace = vnet_is_packet_traced_inline
817 (b0, pp->filter_classify_table_index, 0 /* full classify */ );
818 }
819
Ed Warnickecb9cada2015-12-08 15:45:58 -0700820 /* Trace all drops, or drops received on a specific interface */
Dave Barachf5667c32019-09-25 11:27:46 -0400821 if (do_trace)
Dave Barachba868bb2016-08-08 09:51:21 -0400822 {
823 save_current_data = b0->current_data;
824 save_current_length = b0->current_length;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700825
Dave Barachba868bb2016-08-08 09:51:21 -0400826 /*
827 * Typically, we'll need to rewind the buffer
Benoît Ganne4e323cb2019-09-17 17:30:35 +0200828 * if l2_hdr_offset is valid, make sure to rewind to the start of
829 * the L2 header. This may not be the buffer start in case we pop-ed
830 * vlan tags.
831 * Otherwise, rewind to buffer start and hope for the best.
Dave Barachba868bb2016-08-08 09:51:21 -0400832 */
Benoît Ganne4e323cb2019-09-17 17:30:35 +0200833 if (b0->flags & VNET_BUFFER_F_L2_HDR_OFFSET_VALID)
834 {
835 if (b0->current_data > vnet_buffer (b0)->l2_hdr_offset)
836 vlib_buffer_advance (b0,
837 vnet_buffer (b0)->l2_hdr_offset -
838 b0->current_data);
839 }
840 else if (b0->current_data > 0)
Dave Barachba868bb2016-08-08 09:51:21 -0400841 vlib_buffer_advance (b0, (word) - b0->current_data);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700842
Dave Barach9382ad92019-09-23 16:03:49 -0400843 {
844 vlib_buffer_t *last = b0;
845 u32 error_node_index;
846 int drop_string_len;
847 vlib_node_t *n;
848 /* Length of the error string */
849 int error_string_len =
850 clib_strnlen (em->error_strings_heap[b0->error], 128);
851
852 /* Dig up the drop node */
853 error_node_index = vm->node_main.node_by_error[b0->error];
854 n = vlib_get_node (vm, error_node_index);
855
856 /* Length of full drop string, w/ "nodename: " prepended */
857 drop_string_len = error_string_len + vec_len (n->name) + 2;
858
859 /* Find the last buffer in the chain */
860 while (last->flags & VLIB_BUFFER_NEXT_PRESENT)
861 last = vlib_get_buffer (vm, last->next_buffer);
862
863 /*
864 * Append <nodename>: <error-string> to the capture,
865 * only if we can do that without allocating a new buffer.
866 */
867 if (PREDICT_TRUE ((last->current_data + last->current_length)
868 < (VLIB_BUFFER_DEFAULT_DATA_SIZE
869 - drop_string_len)))
870 {
871 clib_memcpy_fast (last->data + last->current_data +
872 last->current_length, n->name,
873 vec_len (n->name));
874 clib_memcpy_fast (last->data + last->current_data +
875 last->current_length + vec_len (n->name),
876 ": ", 2);
877 clib_memcpy_fast (last->data + last->current_data +
878 last->current_length + vec_len (n->name) +
879 2, em->error_strings_heap[b0->error],
880 error_string_len);
881 last->current_length += drop_string_len;
882 b0->flags &= ~(VLIB_BUFFER_TOTAL_LENGTH_VALID);
883 pcap_add_buffer (&pp->pcap_main, vm, bi0,
884 pp->max_bytes_per_pkt);
885 last->current_length -= drop_string_len;
886 b0->current_data = save_current_data;
887 b0->current_length = save_current_length;
888 continue;
889 }
890 }
891
892 /*
893 * Didn't have space in the last buffer, here's the dropped
894 * packet as-is
895 */
Dave Barach33909772019-09-23 10:27:27 -0400896 pcap_add_buffer (&pp->pcap_main, vm, bi0, pp->max_bytes_per_pkt);
Dave Barachba868bb2016-08-08 09:51:21 -0400897
898 b0->current_data = save_current_data;
899 b0->current_length = save_current_length;
900 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700901 }
902}
903
Filip Tehlar62668772019-03-04 03:33:32 -0800904#ifndef CLIB_MARCH_VARIANT
Dave Barachba868bb2016-08-08 09:51:21 -0400905void
906vnet_pcap_drop_trace_filter_add_del (u32 error_index, int is_add)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700907{
Dave Barachba868bb2016-08-08 09:51:21 -0400908 vnet_interface_main_t *im = &vnet_get_main ()->interface_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700909
910 if (im->pcap_drop_filter_hash == 0)
Dave Barachba868bb2016-08-08 09:51:21 -0400911 im->pcap_drop_filter_hash = hash_create (0, sizeof (uword));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700912
913 if (is_add)
914 hash_set (im->pcap_drop_filter_hash, error_index, 1);
915 else
916 hash_unset (im->pcap_drop_filter_hash, error_index);
917}
Filip Tehlar62668772019-03-04 03:33:32 -0800918#endif /* CLIB_MARCH_VARIANT */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700919
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000920VLIB_NODE_FN (interface_drop) (vlib_main_t * vm,
921 vlib_node_runtime_t * node,
922 vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700923{
Dave Barachba868bb2016-08-08 09:51:21 -0400924 vnet_interface_main_t *im = &vnet_get_main ()->interface_main;
Dave Barach33909772019-09-23 10:27:27 -0400925 vnet_pcap_t *pp = &vlib_global_main.pcap;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700926
Dave Barach33909772019-09-23 10:27:27 -0400927 if (PREDICT_FALSE (pp->pcap_drop_enable))
928 pcap_drop_trace (vm, im, pp, frame);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700929
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000930 return interface_drop_punt (vm, node, frame, VNET_ERROR_DISPOSITION_DROP);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700931}
932
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000933VLIB_NODE_FN (interface_punt) (vlib_main_t * vm,
934 vlib_node_runtime_t * node,
935 vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700936{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000937 return interface_drop_punt (vm, node, frame, VNET_ERROR_DISPOSITION_PUNT);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700938}
939
Dave Barachba868bb2016-08-08 09:51:21 -0400940/* *INDENT-OFF* */
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000941VLIB_REGISTER_NODE (interface_drop) = {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700942 .name = "error-drop",
Ed Warnickecb9cada2015-12-08 15:45:58 -0700943 .vector_size = sizeof (u32),
944 .format_trace = format_vnet_error_trace,
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000945 .n_next_nodes = 1,
946 .next_nodes = {
947 [0] = "drop",
948 },
Ed Warnickecb9cada2015-12-08 15:45:58 -0700949};
Dave Barachba868bb2016-08-08 09:51:21 -0400950/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700951
Dave Barachba868bb2016-08-08 09:51:21 -0400952/* *INDENT-OFF* */
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000953VLIB_REGISTER_NODE (interface_punt) = {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700954 .name = "error-punt",
955 .vector_size = sizeof (u32),
956 .format_trace = format_vnet_error_trace,
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000957 .n_next_nodes = 1,
958 .next_nodes = {
959 [0] = "punt",
960 },
Ed Warnickecb9cada2015-12-08 15:45:58 -0700961};
Dave Barachba868bb2016-08-08 09:51:21 -0400962/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700963
Dave Barachba868bb2016-08-08 09:51:21 -0400964/* *INDENT-OFF* */
Filip Tehlar62668772019-03-04 03:33:32 -0800965VLIB_REGISTER_NODE (vnet_per_buffer_interface_output_node) = {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700966 .name = "interface-output",
967 .vector_size = sizeof (u32),
968};
Dave Barachba868bb2016-08-08 09:51:21 -0400969/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700970
Damjan Marion152e21d2016-11-29 14:55:43 +0100971static uword
972interface_tx_node_fn (vlib_main_t * vm, vlib_node_runtime_t * node,
973 vlib_frame_t * from_frame)
974{
975 vnet_main_t *vnm = vnet_get_main ();
976 u32 last_sw_if_index = ~0;
977 vlib_frame_t *to_frame = 0;
978 vnet_hw_interface_t *hw = 0;
979 u32 *from, *to_next = 0;
980 u32 n_left_from;
981
982 from = vlib_frame_vector_args (from_frame);
983 n_left_from = from_frame->n_vectors;
984 while (n_left_from > 0)
985 {
986 u32 bi0;
987 vlib_buffer_t *b0;
988 u32 sw_if_index0;
989
990 bi0 = from[0];
991 from++;
992 n_left_from--;
993 b0 = vlib_get_buffer (vm, bi0);
994 sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_TX];
995
996 if (PREDICT_FALSE ((last_sw_if_index != sw_if_index0) || to_frame == 0))
997 {
998 if (to_frame)
999 {
1000 hw = vnet_get_sup_hw_interface (vnm, last_sw_if_index);
1001 vlib_put_frame_to_node (vm, hw->tx_node_index, to_frame);
1002 }
1003 last_sw_if_index = sw_if_index0;
1004 hw = vnet_get_sup_hw_interface (vnm, sw_if_index0);
1005 to_frame = vlib_get_frame_to_node (vm, hw->tx_node_index);
1006 to_next = vlib_frame_vector_args (to_frame);
1007 }
1008
1009 to_next[0] = bi0;
1010 to_next++;
1011 to_frame->n_vectors++;
1012 }
1013 vlib_put_frame_to_node (vm, hw->tx_node_index, to_frame);
1014 return from_frame->n_vectors;
1015}
1016
1017/* *INDENT-OFF* */
Damjan Mariond770cfc2019-09-02 19:00:33 +02001018VLIB_REGISTER_NODE (interface_tx) = {
Damjan Marion152e21d2016-11-29 14:55:43 +01001019 .function = interface_tx_node_fn,
1020 .name = "interface-tx",
1021 .vector_size = sizeof (u32),
1022 .n_next_nodes = 1,
1023 .next_nodes = {
1024 [0] = "error-drop",
1025 },
1026};
1027
1028VNET_FEATURE_ARC_INIT (interface_output, static) =
1029{
1030 .arc_name = "interface-output",
1031 .start_nodes = VNET_FEATURES (0),
Dave Baracha25def72018-11-26 11:04:45 -05001032 .last_in_arc = "interface-tx",
Damjan Marion152e21d2016-11-29 14:55:43 +01001033 .arc_index_ptr = &vnet_main.interface_main.output_feature_arc_index,
1034};
1035
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +01001036VNET_FEATURE_INIT (span_tx, static) = {
1037 .arc_name = "interface-output",
1038 .node_name = "span-output",
1039 .runs_before = VNET_FEATURES ("interface-tx"),
1040};
1041
Matthew Smith537eeec2018-04-09 11:49:20 -05001042VNET_FEATURE_INIT (ipsec_if_tx, static) = {
1043 .arc_name = "interface-output",
1044 .node_name = "ipsec-if-output",
1045 .runs_before = VNET_FEATURES ("interface-tx"),
1046};
1047
Damjan Marion152e21d2016-11-29 14:55:43 +01001048VNET_FEATURE_INIT (interface_tx, static) = {
1049 .arc_name = "interface-output",
1050 .node_name = "interface-tx",
1051 .runs_before = 0,
1052};
1053/* *INDENT-ON* */
1054
Filip Tehlar62668772019-03-04 03:33:32 -08001055#ifndef CLIB_MARCH_VARIANT
Ed Warnickecb9cada2015-12-08 15:45:58 -07001056clib_error_t *
1057vnet_per_buffer_interface_output_hw_interface_add_del (vnet_main_t * vnm,
1058 u32 hw_if_index,
1059 u32 is_create)
1060{
Dave Barachba868bb2016-08-08 09:51:21 -04001061 vnet_hw_interface_t *hi = vnet_get_hw_interface (vnm, hw_if_index);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001062 u32 next_index;
1063
John Loe5453d02018-01-23 19:21:34 -05001064 if (hi->output_node_index == 0)
1065 return 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001066
John Loe5453d02018-01-23 19:21:34 -05001067 next_index = vlib_node_add_next
1068 (vnm->vlib_main, vnet_per_buffer_interface_output_node.index,
1069 hi->output_node_index);
1070 hi->output_node_next_index = next_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001071
1072 return 0;
1073}
1074
Dave Barachba868bb2016-08-08 09:51:21 -04001075VNET_HW_INTERFACE_ADD_DEL_FUNCTION
1076 (vnet_per_buffer_interface_output_hw_interface_add_del);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001077
John Loe5453d02018-01-23 19:21:34 -05001078void
1079vnet_set_interface_output_node (vnet_main_t * vnm,
1080 u32 hw_if_index, u32 node_index)
1081{
1082 ASSERT (node_index);
1083 vnet_hw_interface_t *hi = vnet_get_hw_interface (vnm, hw_if_index);
1084 u32 next_index = vlib_node_add_next
1085 (vnm->vlib_main, vnet_per_buffer_interface_output_node.index, node_index);
1086 hi->output_node_next_index = next_index;
1087 hi->output_node_index = node_index;
1088}
Filip Tehlar62668772019-03-04 03:33:32 -08001089#endif /* CLIB_MARCH_VARIANT */
John Loe5453d02018-01-23 19:21:34 -05001090
Dave Barachba868bb2016-08-08 09:51:21 -04001091/*
1092 * fd.io coding-style-patch-verification: ON
1093 *
1094 * Local Variables:
1095 * eval: (c-set-style "gnu")
1096 * End:
1097 */