blob: 5e702e3b96c367bbdfed02b223102813f5ebb6e3 [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15/*
16 * interface_output.c: interface output node
17 *
18 * Copyright (c) 2008 Eliot Dresselhaus
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
27 *
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
30 *
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 */
39
40#include <vnet/vnet.h>
Dave Barach2c0a4f42017-06-29 09:30:15 -040041#include <vnet/ip/icmp46_packet.h>
42#include <vnet/ip/ip4.h>
43#include <vnet/ip/ip6.h>
44#include <vnet/udp/udp_packet.h>
Damjan Marion152e21d2016-11-29 14:55:43 +010045#include <vnet/feature/feature.h>
Dave Barach9137e542019-09-13 17:47:50 -040046#include <vnet/classify/trace_classify.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070047
Dave Barachba868bb2016-08-08 09:51:21 -040048typedef struct
49{
Ed Warnickecb9cada2015-12-08 15:45:58 -070050 u32 sw_if_index;
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020051 u32 flags;
Mohsin Kazmi29467b52019-10-08 19:42:38 +020052 u8 data[128 - 2 * sizeof (u32)];
Dave Barachba868bb2016-08-08 09:51:21 -040053}
54interface_output_trace_t;
Ed Warnickecb9cada2015-12-08 15:45:58 -070055
Filip Tehlar62668772019-03-04 03:33:32 -080056#ifndef CLIB_MARCH_VARIANT
Dave Barachba868bb2016-08-08 09:51:21 -040057u8 *
58format_vnet_interface_output_trace (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -070059{
60 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*va, vlib_main_t *);
Dave Barachba868bb2016-08-08 09:51:21 -040061 vlib_node_t *node = va_arg (*va, vlib_node_t *);
62 interface_output_trace_t *t = va_arg (*va, interface_output_trace_t *);
63 vnet_main_t *vnm = vnet_get_main ();
64 vnet_sw_interface_t *si;
Christophe Fontained3c008d2017-10-02 18:10:54 +020065 u32 indent;
Ed Warnickecb9cada2015-12-08 15:45:58 -070066
Dave Barachba868bb2016-08-08 09:51:21 -040067 if (t->sw_if_index != (u32) ~ 0)
Ed Warnickecb9cada2015-12-08 15:45:58 -070068 {
Ed Warnickecb9cada2015-12-08 15:45:58 -070069 indent = format_get_indent (s);
Dave Barachba868bb2016-08-08 09:51:21 -040070
Neale Ranns177bbdc2016-11-15 09:46:51 +000071 if (pool_is_free_index
72 (vnm->interface_main.sw_interfaces, t->sw_if_index))
73 {
74 /* the interface may have been deleted by the time the trace is printed */
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020075 s = format (s, "sw_if_index: %d ", t->sw_if_index);
Neale Ranns177bbdc2016-11-15 09:46:51 +000076 }
77 else
78 {
79 si = vnet_get_sw_interface (vnm, t->sw_if_index);
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020080 s =
81 format (s, "%U ", format_vnet_sw_interface_name, vnm, si,
82 t->flags);
Neale Ranns177bbdc2016-11-15 09:46:51 +000083 }
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020084 s =
85 format (s, "\n%U%U", format_white_space, indent,
86 node->format_buffer ? node->format_buffer : format_hex_bytes,
87 t->data, sizeof (t->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -070088 }
89 return s;
90}
91
92static void
93vnet_interface_output_trace (vlib_main_t * vm,
94 vlib_node_runtime_t * node,
Dave Barachba868bb2016-08-08 09:51:21 -040095 vlib_frame_t * frame, uword n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -070096{
Dave Barachba868bb2016-08-08 09:51:21 -040097 u32 n_left, *from;
Ed Warnickecb9cada2015-12-08 15:45:58 -070098
99 n_left = n_buffers;
Damjan Mariona3d59862018-11-10 10:23:00 +0100100 from = vlib_frame_vector_args (frame);
Dave Barachba868bb2016-08-08 09:51:21 -0400101
Ed Warnickecb9cada2015-12-08 15:45:58 -0700102 while (n_left >= 4)
103 {
104 u32 bi0, bi1;
Dave Barachba868bb2016-08-08 09:51:21 -0400105 vlib_buffer_t *b0, *b1;
106 interface_output_trace_t *t0, *t1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700107
108 /* Prefetch next iteration. */
109 vlib_prefetch_buffer_with_index (vm, from[2], LOAD);
110 vlib_prefetch_buffer_with_index (vm, from[3], LOAD);
111
112 bi0 = from[0];
113 bi1 = from[1];
114
115 b0 = vlib_get_buffer (vm, bi0);
116 b1 = vlib_get_buffer (vm, bi1);
117
118 if (b0->flags & VLIB_BUFFER_IS_TRACED)
119 {
120 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
121 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_TX];
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200122 t0->flags = b0->flags;
Dave Barach178cf492018-11-13 16:34:13 -0500123 clib_memcpy_fast (t0->data, vlib_buffer_get_current (b0),
124 sizeof (t0->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700125 }
126 if (b1->flags & VLIB_BUFFER_IS_TRACED)
127 {
128 t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
129 t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_TX];
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200130 t1->flags = b1->flags;
Dave Barach178cf492018-11-13 16:34:13 -0500131 clib_memcpy_fast (t1->data, vlib_buffer_get_current (b1),
132 sizeof (t1->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700133 }
134 from += 2;
135 n_left -= 2;
136 }
137
138 while (n_left >= 1)
139 {
140 u32 bi0;
Dave Barachba868bb2016-08-08 09:51:21 -0400141 vlib_buffer_t *b0;
142 interface_output_trace_t *t0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700143
144 bi0 = from[0];
145
146 b0 = vlib_get_buffer (vm, bi0);
147
148 if (b0->flags & VLIB_BUFFER_IS_TRACED)
149 {
150 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
151 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_TX];
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200152 t0->flags = b0->flags;
Dave Barach178cf492018-11-13 16:34:13 -0500153 clib_memcpy_fast (t0->data, vlib_buffer_get_current (b0),
154 sizeof (t0->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700155 }
156 from += 1;
157 n_left -= 1;
158 }
159}
160
Dave Barach2c0a4f42017-06-29 09:30:15 -0400161static_always_inline void
162calc_checksums (vlib_main_t * vm, vlib_buffer_t * b)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700163{
Dave Barach2c0a4f42017-06-29 09:30:15 -0400164 tcp_header_t *th;
165 udp_header_t *uh;
166
167 int is_ip4 = (b->flags & VNET_BUFFER_F_IS_IP4) != 0;
168 int is_ip6 = (b->flags & VNET_BUFFER_F_IS_IP6) != 0;
169
170 ASSERT (!(is_ip4 && is_ip6));
171
Dave Barach2c0a4f42017-06-29 09:30:15 -0400172 th = (tcp_header_t *) (b->data + vnet_buffer (b)->l4_hdr_offset);
173 uh = (udp_header_t *) (b->data + vnet_buffer (b)->l4_hdr_offset);
174
175 if (is_ip4)
176 {
Zhiyong Yang24c50122019-04-19 05:22:31 -0400177 ip4_header_t *ip4;
178
Dave Barach2c0a4f42017-06-29 09:30:15 -0400179 ip4 = (ip4_header_t *) (b->data + vnet_buffer (b)->l3_hdr_offset);
180 if (b->flags & VNET_BUFFER_F_OFFLOAD_IP_CKSUM)
181 ip4->checksum = ip4_header_checksum (ip4);
182 if (b->flags & VNET_BUFFER_F_OFFLOAD_TCP_CKSUM)
Andrej Kozemcak9d7570c2019-01-07 08:39:22 +0100183 {
184 th->checksum = 0;
185 th->checksum = ip4_tcp_udp_compute_checksum (vm, b, ip4);
186 }
Zhiyong Yang24c50122019-04-19 05:22:31 -0400187 else if (b->flags & VNET_BUFFER_F_OFFLOAD_UDP_CKSUM)
Dave Barach2c0a4f42017-06-29 09:30:15 -0400188 uh->checksum = ip4_tcp_udp_compute_checksum (vm, b, ip4);
189 }
Zhiyong Yang24c50122019-04-19 05:22:31 -0400190 else if (is_ip6)
Dave Barach2c0a4f42017-06-29 09:30:15 -0400191 {
192 int bogus;
Zhiyong Yang24c50122019-04-19 05:22:31 -0400193 ip6_header_t *ip6;
194
195 ip6 = (ip6_header_t *) (b->data + vnet_buffer (b)->l3_hdr_offset);
Dave Barach2c0a4f42017-06-29 09:30:15 -0400196 if (b->flags & VNET_BUFFER_F_OFFLOAD_TCP_CKSUM)
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200197 {
198 th->checksum = 0;
199 th->checksum =
200 ip6_tcp_udp_icmp_compute_checksum (vm, b, ip6, &bogus);
201 }
Zhiyong Yang24c50122019-04-19 05:22:31 -0400202 else if (b->flags & VNET_BUFFER_F_OFFLOAD_UDP_CKSUM)
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200203 {
204 uh->checksum = 0;
205 uh->checksum =
206 ip6_tcp_udp_icmp_compute_checksum (vm, b, ip6, &bogus);
207 }
Dave Barach2c0a4f42017-06-29 09:30:15 -0400208 }
Florin Coras08f2a5d2019-08-06 13:48:50 -0700209 b->flags &= ~VNET_BUFFER_F_OFFLOAD_TCP_CKSUM;
210 b->flags &= ~VNET_BUFFER_F_OFFLOAD_UDP_CKSUM;
211 b->flags &= ~VNET_BUFFER_F_OFFLOAD_IP_CKSUM;
Dave Barach2c0a4f42017-06-29 09:30:15 -0400212}
213
214static_always_inline uword
Mohsin Kazmi29467b52019-10-08 19:42:38 +0200215vnet_interface_output_node_inline (vlib_main_t * vm,
216 vlib_node_runtime_t * node,
217 vlib_frame_t * frame,
218 vnet_main_t * vnm,
219 vnet_hw_interface_t * hi,
220 int do_tx_offloads)
Dave Barach2c0a4f42017-06-29 09:30:15 -0400221{
Dave Barachba868bb2016-08-08 09:51:21 -0400222 vnet_interface_output_runtime_t *rt = (void *) node->runtime_data;
223 vnet_sw_interface_t *si;
Dave Barachba868bb2016-08-08 09:51:21 -0400224 u32 n_left_to_tx, *from, *from_end, *to_tx;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700225 u32 n_bytes, n_buffers, n_packets;
Damjan Marion363640d2017-03-06 11:53:10 +0100226 u32 n_bytes_b0, n_bytes_b1, n_bytes_b2, n_bytes_b3;
Damjan Marion586afd72017-04-05 19:18:20 +0200227 u32 thread_index = vm->thread_index;
Dave Barachba868bb2016-08-08 09:51:21 -0400228 vnet_interface_main_t *im = &vnm->interface_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700229 u32 next_index = VNET_INTERFACE_OUTPUT_NEXT_TX;
Damjan Marion152e21d2016-11-29 14:55:43 +0100230 u32 current_config_index = ~0;
231 u8 arc = im->output_feature_arc_index;
Zhiyong Yanga462c072019-05-05 19:32:25 +0800232 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700233
234 n_buffers = frame->n_vectors;
235
236 if (node->flags & VLIB_NODE_FLAG_TRACE)
237 vnet_interface_output_trace (vm, node, frame, n_buffers);
238
Damjan Mariona3d59862018-11-10 10:23:00 +0100239 from = vlib_frame_vector_args (frame);
Zhiyong Yanga462c072019-05-05 19:32:25 +0800240 vlib_get_buffers (vm, from, b, n_buffers);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700241
242 if (rt->is_deleted)
Dave Barachba868bb2016-08-08 09:51:21 -0400243 return vlib_error_drop_buffers (vm, node, from,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700244 /* buffer stride */ 1,
245 n_buffers,
246 VNET_INTERFACE_OUTPUT_NEXT_DROP,
247 node->node_index,
248 VNET_INTERFACE_OUTPUT_ERROR_INTERFACE_DELETED);
249
250 si = vnet_get_sw_interface (vnm, rt->sw_if_index);
251 hi = vnet_get_sup_hw_interface (vnm, rt->sw_if_index);
Steven Luong5ad541e2019-08-27 07:43:27 -0700252 if (!(si->flags & VNET_SW_INTERFACE_FLAG_ADMIN_UP) ||
Dave Barachba868bb2016-08-08 09:51:21 -0400253 !(hi->flags & VNET_HW_INTERFACE_FLAG_LINK_UP))
Ed Warnickecb9cada2015-12-08 15:45:58 -0700254 {
Dave Barachba868bb2016-08-08 09:51:21 -0400255 vlib_simple_counter_main_t *cm;
256
Ed Warnickecb9cada2015-12-08 15:45:58 -0700257 cm = vec_elt_at_index (vnm->interface_main.sw_if_counters,
Dave Barachba868bb2016-08-08 09:51:21 -0400258 VNET_INTERFACE_COUNTER_TX_ERROR);
Damjan Marion586afd72017-04-05 19:18:20 +0200259 vlib_increment_simple_counter (cm, thread_index,
Dave Barachba868bb2016-08-08 09:51:21 -0400260 rt->sw_if_index, n_buffers);
261
262 return vlib_error_drop_buffers (vm, node, from,
263 /* buffer stride */ 1,
264 n_buffers,
265 VNET_INTERFACE_OUTPUT_NEXT_DROP,
266 node->node_index,
267 VNET_INTERFACE_OUTPUT_ERROR_INTERFACE_DOWN);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700268 }
269
270 from_end = from + n_buffers;
271
272 /* Total byte count of all buffers. */
273 n_bytes = 0;
274 n_packets = 0;
275
Damjan Marion152e21d2016-11-29 14:55:43 +0100276 /* interface-output feature arc handling */
277 if (PREDICT_FALSE (vnet_have_features (arc, rt->sw_if_index)))
278 {
279 vnet_feature_config_main_t *fcm;
280 fcm = vnet_feature_get_config_main (arc);
281 current_config_index = vnet_get_feature_config_index (arc,
282 rt->sw_if_index);
283 vnet_get_config_data (&fcm->config_main, &current_config_index,
284 &next_index, 0);
285 }
286
Ed Warnickecb9cada2015-12-08 15:45:58 -0700287 while (from < from_end)
288 {
289 /* Get new next frame since previous incomplete frame may have less
Dave Barachba868bb2016-08-08 09:51:21 -0400290 than VNET_FRAME_SIZE vectors in it. */
291 vlib_get_new_next_frame (vm, node, next_index, to_tx, n_left_to_tx);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700292
Damjan Marion363640d2017-03-06 11:53:10 +0100293 while (from + 8 <= from_end && n_left_to_tx >= 4)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700294 {
Damjan Marion363640d2017-03-06 11:53:10 +0100295 u32 bi0, bi1, bi2, bi3;
Damjan Marion363640d2017-03-06 11:53:10 +0100296 u32 tx_swif0, tx_swif1, tx_swif2, tx_swif3;
Dave Barach2c0a4f42017-06-29 09:30:15 -0400297 u32 or_flags;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700298
299 /* Prefetch next iteration. */
Zhiyong Yanga462c072019-05-05 19:32:25 +0800300 vlib_prefetch_buffer_header (b[4], LOAD);
301 vlib_prefetch_buffer_header (b[5], LOAD);
302 vlib_prefetch_buffer_header (b[6], LOAD);
303 vlib_prefetch_buffer_header (b[7], LOAD);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700304
305 bi0 = from[0];
306 bi1 = from[1];
Damjan Marion363640d2017-03-06 11:53:10 +0100307 bi2 = from[2];
308 bi3 = from[3];
Ed Warnickecb9cada2015-12-08 15:45:58 -0700309 to_tx[0] = bi0;
310 to_tx[1] = bi1;
Damjan Marion363640d2017-03-06 11:53:10 +0100311 to_tx[2] = bi2;
312 to_tx[3] = bi3;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700313
Zhiyong Yanga462c072019-05-05 19:32:25 +0800314 or_flags = b[0]->flags | b[1]->flags | b[2]->flags | b[3]->flags;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700315
Zhiyong Yanga462c072019-05-05 19:32:25 +0800316 from += 4;
317 to_tx += 4;
318 n_left_to_tx -= 4;
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200319
Ed Warnickecb9cada2015-12-08 15:45:58 -0700320 /* Be grumpy about zero length buffers for benefit of
321 driver tx function. */
Zhiyong Yanga462c072019-05-05 19:32:25 +0800322 ASSERT (b[0]->current_length > 0);
323 ASSERT (b[1]->current_length > 0);
324 ASSERT (b[2]->current_length > 0);
325 ASSERT (b[3]->current_length > 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700326
Zhiyong Yanga462c072019-05-05 19:32:25 +0800327 n_bytes_b0 = vlib_buffer_length_in_chain (vm, b[0]);
328 n_bytes_b1 = vlib_buffer_length_in_chain (vm, b[1]);
329 n_bytes_b2 = vlib_buffer_length_in_chain (vm, b[2]);
330 n_bytes_b3 = vlib_buffer_length_in_chain (vm, b[3]);
331 tx_swif0 = vnet_buffer (b[0])->sw_if_index[VLIB_TX];
332 tx_swif1 = vnet_buffer (b[1])->sw_if_index[VLIB_TX];
333 tx_swif2 = vnet_buffer (b[2])->sw_if_index[VLIB_TX];
334 tx_swif3 = vnet_buffer (b[3])->sw_if_index[VLIB_TX];
Ed Warnickecb9cada2015-12-08 15:45:58 -0700335
336 n_bytes += n_bytes_b0 + n_bytes_b1;
Damjan Marion363640d2017-03-06 11:53:10 +0100337 n_bytes += n_bytes_b2 + n_bytes_b3;
338 n_packets += 4;
Dave Barachba868bb2016-08-08 09:51:21 -0400339
Damjan Marion152e21d2016-11-29 14:55:43 +0100340 if (PREDICT_FALSE (current_config_index != ~0))
341 {
Zhiyong Yanga462c072019-05-05 19:32:25 +0800342 vnet_buffer (b[0])->feature_arc_index = arc;
343 vnet_buffer (b[1])->feature_arc_index = arc;
344 vnet_buffer (b[2])->feature_arc_index = arc;
345 vnet_buffer (b[3])->feature_arc_index = arc;
346 b[0]->current_config_index = current_config_index;
347 b[1]->current_config_index = current_config_index;
348 b[2]->current_config_index = current_config_index;
349 b[3]->current_config_index = current_config_index;
Damjan Marion152e21d2016-11-29 14:55:43 +0100350 }
351
Damjan Marion363640d2017-03-06 11:53:10 +0100352 /* update vlan subif tx counts, if required */
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100353 if (PREDICT_FALSE (tx_swif0 != rt->sw_if_index))
Dave Barachba868bb2016-08-08 09:51:21 -0400354 {
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100355 vlib_increment_combined_counter (im->combined_sw_if_counters +
356 VNET_INTERFACE_COUNTER_TX,
Damjan Marion586afd72017-04-05 19:18:20 +0200357 thread_index, tx_swif0, 1,
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100358 n_bytes_b0);
Dave Barachba868bb2016-08-08 09:51:21 -0400359 }
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100360
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100361 if (PREDICT_FALSE (tx_swif1 != rt->sw_if_index))
362 {
363
364 vlib_increment_combined_counter (im->combined_sw_if_counters +
365 VNET_INTERFACE_COUNTER_TX,
Damjan Marion586afd72017-04-05 19:18:20 +0200366 thread_index, tx_swif1, 1,
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100367 n_bytes_b1);
368 }
Damjan Marion363640d2017-03-06 11:53:10 +0100369
370 if (PREDICT_FALSE (tx_swif2 != rt->sw_if_index))
371 {
372
373 vlib_increment_combined_counter (im->combined_sw_if_counters +
374 VNET_INTERFACE_COUNTER_TX,
Damjan Marion586afd72017-04-05 19:18:20 +0200375 thread_index, tx_swif2, 1,
Damjan Marion363640d2017-03-06 11:53:10 +0100376 n_bytes_b2);
377 }
378 if (PREDICT_FALSE (tx_swif3 != rt->sw_if_index))
379 {
380
381 vlib_increment_combined_counter (im->combined_sw_if_counters +
382 VNET_INTERFACE_COUNTER_TX,
Damjan Marion586afd72017-04-05 19:18:20 +0200383 thread_index, tx_swif3, 1,
Damjan Marion363640d2017-03-06 11:53:10 +0100384 n_bytes_b3);
385 }
Dave Barach2c0a4f42017-06-29 09:30:15 -0400386
Dave Barach2c0a4f42017-06-29 09:30:15 -0400387 if (do_tx_offloads)
388 {
389 if (or_flags &
390 (VNET_BUFFER_F_OFFLOAD_TCP_CKSUM |
391 VNET_BUFFER_F_OFFLOAD_UDP_CKSUM |
392 VNET_BUFFER_F_OFFLOAD_IP_CKSUM))
393 {
Zhiyong Yanga462c072019-05-05 19:32:25 +0800394 calc_checksums (vm, b[0]);
395 calc_checksums (vm, b[1]);
396 calc_checksums (vm, b[2]);
397 calc_checksums (vm, b[3]);
Dave Barach2c0a4f42017-06-29 09:30:15 -0400398 }
399 }
Zhiyong Yanga462c072019-05-05 19:32:25 +0800400 b += 4;
401
Ed Warnickecb9cada2015-12-08 15:45:58 -0700402 }
403
404 while (from + 1 <= from_end && n_left_to_tx >= 1)
405 {
406 u32 bi0;
Dave Barachba868bb2016-08-08 09:51:21 -0400407 u32 tx_swif0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700408
409 bi0 = from[0];
410 to_tx[0] = bi0;
411 from += 1;
412 to_tx += 1;
413 n_left_to_tx -= 1;
414
Ed Warnickecb9cada2015-12-08 15:45:58 -0700415 /* Be grumpy about zero length buffers for benefit of
416 driver tx function. */
Zhiyong Yanga462c072019-05-05 19:32:25 +0800417 ASSERT (b[0]->current_length > 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700418
Zhiyong Yanga462c072019-05-05 19:32:25 +0800419 n_bytes_b0 = vlib_buffer_length_in_chain (vm, b[0]);
420 tx_swif0 = vnet_buffer (b[0])->sw_if_index[VLIB_TX];
Ed Warnickecb9cada2015-12-08 15:45:58 -0700421 n_bytes += n_bytes_b0;
422 n_packets += 1;
423
Damjan Marion152e21d2016-11-29 14:55:43 +0100424 if (PREDICT_FALSE (current_config_index != ~0))
425 {
Zhiyong Yanga462c072019-05-05 19:32:25 +0800426 vnet_buffer (b[0])->feature_arc_index = arc;
427 b[0]->current_config_index = current_config_index;
Damjan Marion152e21d2016-11-29 14:55:43 +0100428 }
429
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100430 if (PREDICT_FALSE (tx_swif0 != rt->sw_if_index))
Dave Barachba868bb2016-08-08 09:51:21 -0400431 {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700432
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100433 vlib_increment_combined_counter (im->combined_sw_if_counters +
434 VNET_INTERFACE_COUNTER_TX,
Damjan Marion586afd72017-04-05 19:18:20 +0200435 thread_index, tx_swif0, 1,
Damjan Marion9c6ae5f2016-11-15 23:20:01 +0100436 n_bytes_b0);
Dave Barachba868bb2016-08-08 09:51:21 -0400437 }
Dave Barach2c0a4f42017-06-29 09:30:15 -0400438
439 if (do_tx_offloads)
Mohsin Kazmi29467b52019-10-08 19:42:38 +0200440 {
441 if (b[0]->flags &
442 (VNET_BUFFER_F_OFFLOAD_TCP_CKSUM |
443 VNET_BUFFER_F_OFFLOAD_UDP_CKSUM |
444 VNET_BUFFER_F_OFFLOAD_IP_CKSUM))
445 calc_checksums (vm, b[0]);
446 }
Zhiyong Yanga462c072019-05-05 19:32:25 +0800447 b += 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700448 }
449
Dave Barachba868bb2016-08-08 09:51:21 -0400450 vlib_put_next_frame (vm, node, next_index, n_left_to_tx);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700451 }
452
453 /* Update main interface stats. */
454 vlib_increment_combined_counter (im->combined_sw_if_counters
Dave Barachba868bb2016-08-08 09:51:21 -0400455 + VNET_INTERFACE_COUNTER_TX,
Damjan Marion586afd72017-04-05 19:18:20 +0200456 thread_index,
Dave Barachba868bb2016-08-08 09:51:21 -0400457 rt->sw_if_index, n_packets, n_bytes);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700458 return n_buffers;
459}
Filip Tehlar62668772019-03-04 03:33:32 -0800460#endif /* CLIB_MARCH_VARIANT */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700461
Dave Barach5ecd5a52019-02-25 15:27:28 -0500462static_always_inline void vnet_interface_pcap_tx_trace
463 (vlib_main_t * vm, vlib_node_runtime_t * node, vlib_frame_t * frame,
464 int sw_if_index_from_buffer)
465{
466 u32 n_left_from, *from;
467 u32 sw_if_index;
Dave Barach33909772019-09-23 10:27:27 -0400468 vnet_pcap_t *pp = &vlib_global_main.pcap;
Dave Barach5ecd5a52019-02-25 15:27:28 -0500469
Dave Barach33909772019-09-23 10:27:27 -0400470 if (PREDICT_TRUE (pp->pcap_tx_enable == 0))
Dave Barach5ecd5a52019-02-25 15:27:28 -0500471 return;
472
473 if (sw_if_index_from_buffer == 0)
474 {
475 vnet_interface_output_runtime_t *rt = (void *) node->runtime_data;
476 sw_if_index = rt->sw_if_index;
477 }
478 else
479 sw_if_index = ~0;
480
481 n_left_from = frame->n_vectors;
482 from = vlib_frame_vector_args (frame);
483
484 while (n_left_from > 0)
485 {
Dave Barach9137e542019-09-13 17:47:50 -0400486 int classify_filter_result;
Dave Barach5ecd5a52019-02-25 15:27:28 -0500487 u32 bi0 = from[0];
488 vlib_buffer_t *b0 = vlib_get_buffer (vm, bi0);
Dave Barach9137e542019-09-13 17:47:50 -0400489 from++;
490 n_left_from--;
491
Dave Barachf5667c32019-09-25 11:27:46 -0400492 if (pp->filter_classify_table_index != ~0)
Dave Barach9137e542019-09-13 17:47:50 -0400493 {
494 classify_filter_result =
495 vnet_is_packet_traced_inline
Dave Barachf5667c32019-09-25 11:27:46 -0400496 (b0, pp->filter_classify_table_index, 0 /* full classify */ );
Dave Barach9137e542019-09-13 17:47:50 -0400497 if (classify_filter_result)
Dave Barach33909772019-09-23 10:27:27 -0400498 pcap_add_buffer (&pp->pcap_main, vm, bi0, pp->max_bytes_per_pkt);
Dave Barach9137e542019-09-13 17:47:50 -0400499 continue;
500 }
Dave Barach5ecd5a52019-02-25 15:27:28 -0500501
502 if (sw_if_index_from_buffer)
503 sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_TX];
504
Dave Barach33909772019-09-23 10:27:27 -0400505 if (pp->pcap_sw_if_index == 0 || pp->pcap_sw_if_index == sw_if_index)
506 pcap_add_buffer (&pp->pcap_main, vm, bi0, pp->max_bytes_per_pkt);
Dave Barach5ecd5a52019-02-25 15:27:28 -0500507 }
508}
509
Filip Tehlar62668772019-03-04 03:33:32 -0800510#ifndef CLIB_MARCH_VARIANT
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200511
Damjan Marion652d2e12019-02-02 00:15:27 +0100512uword
Dave Barach2c0a4f42017-06-29 09:30:15 -0400513vnet_interface_output_node (vlib_main_t * vm, vlib_node_runtime_t * node,
514 vlib_frame_t * frame)
515{
516 vnet_main_t *vnm = vnet_get_main ();
517 vnet_hw_interface_t *hi;
518 vnet_interface_output_runtime_t *rt = (void *) node->runtime_data;
519 hi = vnet_get_sup_hw_interface (vnm, rt->sw_if_index);
520
Dave Barach5ecd5a52019-02-25 15:27:28 -0500521 vnet_interface_pcap_tx_trace (vm, node, frame,
522 0 /* sw_if_index_from_buffer */ );
523
Dave Barach2c0a4f42017-06-29 09:30:15 -0400524 if (hi->flags & VNET_HW_INTERFACE_FLAG_SUPPORTS_TX_L4_CKSUM_OFFLOAD)
525 return vnet_interface_output_node_inline (vm, node, frame, vnm, hi,
526 /* do_tx_offloads */ 0);
527 else
528 return vnet_interface_output_node_inline (vm, node, frame, vnm, hi,
529 /* do_tx_offloads */ 1);
530}
Filip Tehlar62668772019-03-04 03:33:32 -0800531#endif /* CLIB_MARCH_VARIANT */
Dave Barach2c0a4f42017-06-29 09:30:15 -0400532
Ed Warnickecb9cada2015-12-08 15:45:58 -0700533/* Use buffer's sw_if_index[VNET_TX] to choose output interface. */
Filip Tehlar62668772019-03-04 03:33:32 -0800534VLIB_NODE_FN (vnet_per_buffer_interface_output_node) (vlib_main_t * vm,
535 vlib_node_runtime_t *
536 node,
537 vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700538{
Dave Barachba868bb2016-08-08 09:51:21 -0400539 vnet_main_t *vnm = vnet_get_main ();
540 u32 n_left_to_next, *from, *to_next;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700541 u32 n_left_from, next_index;
542
Dave Barach5ecd5a52019-02-25 15:27:28 -0500543 vnet_interface_pcap_tx_trace (vm, node, frame,
544 1 /* sw_if_index_from_buffer */ );
545
Ed Warnickecb9cada2015-12-08 15:45:58 -0700546 n_left_from = frame->n_vectors;
547
Damjan Mariona3d59862018-11-10 10:23:00 +0100548 from = vlib_frame_vector_args (frame);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700549 next_index = node->cached_next_index;
550
551 while (n_left_from > 0)
552 {
553 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
554
555 while (n_left_from >= 4 && n_left_to_next >= 2)
556 {
557 u32 bi0, bi1, next0, next1;
Dave Barachba868bb2016-08-08 09:51:21 -0400558 vlib_buffer_t *b0, *b1;
559 vnet_hw_interface_t *hi0, *hi1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700560
561 /* Prefetch next iteration. */
562 vlib_prefetch_buffer_with_index (vm, from[2], LOAD);
563 vlib_prefetch_buffer_with_index (vm, from[3], LOAD);
564
565 bi0 = from[0];
566 bi1 = from[1];
567 to_next[0] = bi0;
568 to_next[1] = bi1;
569 from += 2;
570 to_next += 2;
571 n_left_to_next -= 2;
572 n_left_from -= 2;
573
574 b0 = vlib_get_buffer (vm, bi0);
575 b1 = vlib_get_buffer (vm, bi1);
576
Dave Barachba868bb2016-08-08 09:51:21 -0400577 hi0 =
578 vnet_get_sup_hw_interface (vnm,
579 vnet_buffer (b0)->sw_if_index
580 [VLIB_TX]);
581 hi1 =
582 vnet_get_sup_hw_interface (vnm,
583 vnet_buffer (b1)->sw_if_index
584 [VLIB_TX]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700585
John Loe5453d02018-01-23 19:21:34 -0500586 next0 = hi0->output_node_next_index;
587 next1 = hi1->output_node_next_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700588
Dave Barachba868bb2016-08-08 09:51:21 -0400589 vlib_validate_buffer_enqueue_x2 (vm, node, next_index, to_next,
590 n_left_to_next, bi0, bi1, next0,
591 next1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700592 }
593
594 while (n_left_from > 0 && n_left_to_next > 0)
595 {
596 u32 bi0, next0;
Dave Barachba868bb2016-08-08 09:51:21 -0400597 vlib_buffer_t *b0;
598 vnet_hw_interface_t *hi0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700599
600 bi0 = from[0];
601 to_next[0] = bi0;
602 from += 1;
603 to_next += 1;
604 n_left_to_next -= 1;
605 n_left_from -= 1;
606
607 b0 = vlib_get_buffer (vm, bi0);
608
Dave Barachba868bb2016-08-08 09:51:21 -0400609 hi0 =
610 vnet_get_sup_hw_interface (vnm,
611 vnet_buffer (b0)->sw_if_index
612 [VLIB_TX]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700613
John Loe5453d02018-01-23 19:21:34 -0500614 next0 = hi0->output_node_next_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700615
Dave Barachba868bb2016-08-08 09:51:21 -0400616 vlib_validate_buffer_enqueue_x1 (vm, node, next_index, to_next,
617 n_left_to_next, bi0, next0);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700618 }
619
620 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
621 }
622
623 return frame->n_vectors;
624}
625
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000626typedef struct vnet_error_trace_t_
Ed Warnickecb9cada2015-12-08 15:45:58 -0700627{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000628 u32 sw_if_index;
629} vnet_error_trace_t;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700630
Ed Warnickecb9cada2015-12-08 15:45:58 -0700631
Dave Barachba868bb2016-08-08 09:51:21 -0400632static u8 *
633format_vnet_error_trace (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700634{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000635 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*va, vlib_main_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700636 CLIB_UNUSED (vlib_node_t * node) = va_arg (*va, vlib_node_t *);
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000637 vnet_error_trace_t *t = va_arg (*va, vnet_error_trace_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700638
Neale Ranns7227c392019-03-21 10:20:15 +0000639 s = format (s, "rx:%U", format_vnet_sw_if_index_name,
640 vnet_get_main (), t->sw_if_index);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700641
642 return s;
643}
644
645static void
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000646interface_trace_buffers (vlib_main_t * vm,
647 vlib_node_runtime_t * node, vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700648{
Dave Barachba868bb2016-08-08 09:51:21 -0400649 u32 n_left, *buffers;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700650
651 buffers = vlib_frame_vector_args (frame);
652 n_left = frame->n_vectors;
Dave Barachba868bb2016-08-08 09:51:21 -0400653
Ed Warnickecb9cada2015-12-08 15:45:58 -0700654 while (n_left >= 4)
655 {
656 u32 bi0, bi1;
Dave Barachba868bb2016-08-08 09:51:21 -0400657 vlib_buffer_t *b0, *b1;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000658 vnet_error_trace_t *t0, *t1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700659
660 /* Prefetch next iteration. */
661 vlib_prefetch_buffer_with_index (vm, buffers[2], LOAD);
662 vlib_prefetch_buffer_with_index (vm, buffers[3], LOAD);
663
664 bi0 = buffers[0];
665 bi1 = buffers[1];
666
667 b0 = vlib_get_buffer (vm, bi0);
668 b1 = vlib_get_buffer (vm, bi1);
669
670 if (b0->flags & VLIB_BUFFER_IS_TRACED)
671 {
672 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000673 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
Ed Warnickecb9cada2015-12-08 15:45:58 -0700674 }
675 if (b1->flags & VLIB_BUFFER_IS_TRACED)
676 {
677 t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000678 t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_RX];
Ed Warnickecb9cada2015-12-08 15:45:58 -0700679 }
680 buffers += 2;
681 n_left -= 2;
682 }
683
684 while (n_left >= 1)
685 {
686 u32 bi0;
Dave Barachba868bb2016-08-08 09:51:21 -0400687 vlib_buffer_t *b0;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000688 vnet_error_trace_t *t0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700689
690 bi0 = buffers[0];
691
692 b0 = vlib_get_buffer (vm, bi0);
693
694 if (b0->flags & VLIB_BUFFER_IS_TRACED)
695 {
696 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000697 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
Ed Warnickecb9cada2015-12-08 15:45:58 -0700698 }
699 buffers += 1;
700 n_left -= 1;
701 }
702}
703
Dave Barachba868bb2016-08-08 09:51:21 -0400704typedef enum
705{
Ed Warnickecb9cada2015-12-08 15:45:58 -0700706 VNET_ERROR_DISPOSITION_DROP,
707 VNET_ERROR_DISPOSITION_PUNT,
708 VNET_ERROR_N_DISPOSITION,
709} vnet_error_disposition_t;
710
Ed Warnickecb9cada2015-12-08 15:45:58 -0700711static_always_inline uword
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000712interface_drop_punt (vlib_main_t * vm,
713 vlib_node_runtime_t * node,
714 vlib_frame_t * frame,
715 vnet_error_disposition_t disposition)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700716{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000717 u32 *from, n_left, thread_index, *sw_if_index;
718 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b;
719 u32 sw_if_indices[VLIB_FRAME_SIZE];
Dave Barachba868bb2016-08-08 09:51:21 -0400720 vlib_simple_counter_main_t *cm;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000721 u16 nexts[VLIB_FRAME_SIZE];
722 vnet_main_t *vnm;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700723
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000724 vnm = vnet_get_main ();
725 thread_index = vm->thread_index;
726 from = vlib_frame_vector_args (frame);
727 n_left = frame->n_vectors;
728 b = bufs;
729 sw_if_index = sw_if_indices;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700730
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000731 vlib_get_buffers (vm, from, bufs, n_left);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700732
733 if (node->flags & VLIB_NODE_FLAG_TRACE)
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000734 interface_trace_buffers (vm, node, frame);
Dave Barachba868bb2016-08-08 09:51:21 -0400735
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000736 /* All going to drop regardless, this is just a counting exercise */
737 clib_memset (nexts, 0, sizeof (nexts));
738
Ed Warnickecb9cada2015-12-08 15:45:58 -0700739 cm = vec_elt_at_index (vnm->interface_main.sw_if_counters,
740 (disposition == VNET_ERROR_DISPOSITION_PUNT
741 ? VNET_INTERFACE_COUNTER_PUNT
742 : VNET_INTERFACE_COUNTER_DROP));
743
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000744 /* collect the array of interfaces first ... */
745 while (n_left >= 4)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700746 {
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000747 if (n_left >= 12)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700748 {
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000749 /* Prefetch 8 ahead - there's not much going on in each iteration */
750 vlib_prefetch_buffer_header (b[4], LOAD);
751 vlib_prefetch_buffer_header (b[5], LOAD);
752 vlib_prefetch_buffer_header (b[6], LOAD);
753 vlib_prefetch_buffer_header (b[7], LOAD);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700754 }
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000755 sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
756 sw_if_index[1] = vnet_buffer (b[1])->sw_if_index[VLIB_RX];
757 sw_if_index[2] = vnet_buffer (b[2])->sw_if_index[VLIB_RX];
758 sw_if_index[3] = vnet_buffer (b[3])->sw_if_index[VLIB_RX];
759
760 sw_if_index += 4;
761 n_left -= 4;
762 b += 4;
763 }
764 while (n_left)
765 {
766 sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
767
768 sw_if_index += 1;
769 n_left -= 1;
770 b += 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700771 }
772
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000773 /* ... then count against them in blocks */
774 n_left = frame->n_vectors;
775
776 while (n_left)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700777 {
Dave Barachba868bb2016-08-08 09:51:21 -0400778 vnet_sw_interface_t *sw_if0;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000779 u16 off, count;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700780
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000781 off = frame->n_vectors - n_left;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700782
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000783 sw_if_index = sw_if_indices + off;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700784
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000785 count = clib_count_equal_u32 (sw_if_index, n_left);
786 n_left -= count;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700787
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000788 vlib_increment_simple_counter (cm, thread_index, sw_if_index[0], count);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700789
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000790 /* Increment super-interface drop/punt counters for
791 sub-interfaces. */
792 sw_if0 = vnet_get_sw_interface (vnm, sw_if_index[0]);
793 if (sw_if0->sup_sw_if_index != sw_if_index[0])
794 vlib_increment_simple_counter
795 (cm, thread_index, sw_if0->sup_sw_if_index, count);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700796 }
797
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000798 vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700799
800 return frame->n_vectors;
801}
802
Dave Barachba868bb2016-08-08 09:51:21 -0400803static inline void
804pcap_drop_trace (vlib_main_t * vm,
Dave Barach33909772019-09-23 10:27:27 -0400805 vnet_interface_main_t * im,
806 vnet_pcap_t * pp, vlib_frame_t * f)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700807{
Dave Barachba868bb2016-08-08 09:51:21 -0400808 u32 *from;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700809 u32 n_left = f->n_vectors;
Dave Barachba868bb2016-08-08 09:51:21 -0400810 vlib_buffer_t *b0, *p1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700811 u32 bi0;
812 i16 save_current_data;
813 u16 save_current_length;
Dave Barach9382ad92019-09-23 16:03:49 -0400814 vlib_error_main_t *em = &vm->error_main;
Dave Barachf5667c32019-09-25 11:27:46 -0400815 int do_trace = 0;
816
Ed Warnickecb9cada2015-12-08 15:45:58 -0700817
818 from = vlib_frame_vector_args (f);
819
820 while (n_left > 0)
821 {
822 if (PREDICT_TRUE (n_left > 1))
Dave Barachba868bb2016-08-08 09:51:21 -0400823 {
824 p1 = vlib_get_buffer (vm, from[1]);
825 vlib_prefetch_buffer_header (p1, LOAD);
826 }
827
Ed Warnickecb9cada2015-12-08 15:45:58 -0700828 bi0 = from[0];
829 b0 = vlib_get_buffer (vm, bi0);
830 from++;
831 n_left--;
Dave Barachba868bb2016-08-08 09:51:21 -0400832
Ed Warnickecb9cada2015-12-08 15:45:58 -0700833 /* See if we're pointedly ignoring this specific error */
Dave Barachba868bb2016-08-08 09:51:21 -0400834 if (im->pcap_drop_filter_hash
835 && hash_get (im->pcap_drop_filter_hash, b0->error))
836 continue;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700837
Dave Barachf5667c32019-09-25 11:27:46 -0400838 do_trace = (pp->pcap_sw_if_index == 0) ||
839 pp->pcap_sw_if_index == vnet_buffer (b0)->sw_if_index[VLIB_RX];
840
841 if (PREDICT_FALSE
842 (do_trace == 0 && pp->filter_classify_table_index != ~0))
843 {
844 do_trace = vnet_is_packet_traced_inline
845 (b0, pp->filter_classify_table_index, 0 /* full classify */ );
846 }
847
Ed Warnickecb9cada2015-12-08 15:45:58 -0700848 /* Trace all drops, or drops received on a specific interface */
Dave Barachf5667c32019-09-25 11:27:46 -0400849 if (do_trace)
Dave Barachba868bb2016-08-08 09:51:21 -0400850 {
851 save_current_data = b0->current_data;
852 save_current_length = b0->current_length;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700853
Dave Barachba868bb2016-08-08 09:51:21 -0400854 /*
855 * Typically, we'll need to rewind the buffer
Benoît Ganne4e323cb2019-09-17 17:30:35 +0200856 * if l2_hdr_offset is valid, make sure to rewind to the start of
857 * the L2 header. This may not be the buffer start in case we pop-ed
858 * vlan tags.
859 * Otherwise, rewind to buffer start and hope for the best.
Dave Barachba868bb2016-08-08 09:51:21 -0400860 */
Benoît Ganne4e323cb2019-09-17 17:30:35 +0200861 if (b0->flags & VNET_BUFFER_F_L2_HDR_OFFSET_VALID)
862 {
863 if (b0->current_data > vnet_buffer (b0)->l2_hdr_offset)
864 vlib_buffer_advance (b0,
865 vnet_buffer (b0)->l2_hdr_offset -
866 b0->current_data);
867 }
868 else if (b0->current_data > 0)
Dave Barachba868bb2016-08-08 09:51:21 -0400869 vlib_buffer_advance (b0, (word) - b0->current_data);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700870
Dave Barach9382ad92019-09-23 16:03:49 -0400871 {
872 vlib_buffer_t *last = b0;
873 u32 error_node_index;
874 int drop_string_len;
875 vlib_node_t *n;
876 /* Length of the error string */
877 int error_string_len =
878 clib_strnlen (em->error_strings_heap[b0->error], 128);
879
880 /* Dig up the drop node */
881 error_node_index = vm->node_main.node_by_error[b0->error];
882 n = vlib_get_node (vm, error_node_index);
883
884 /* Length of full drop string, w/ "nodename: " prepended */
885 drop_string_len = error_string_len + vec_len (n->name) + 2;
886
887 /* Find the last buffer in the chain */
888 while (last->flags & VLIB_BUFFER_NEXT_PRESENT)
889 last = vlib_get_buffer (vm, last->next_buffer);
890
891 /*
892 * Append <nodename>: <error-string> to the capture,
893 * only if we can do that without allocating a new buffer.
894 */
895 if (PREDICT_TRUE ((last->current_data + last->current_length)
896 < (VLIB_BUFFER_DEFAULT_DATA_SIZE
897 - drop_string_len)))
898 {
899 clib_memcpy_fast (last->data + last->current_data +
900 last->current_length, n->name,
901 vec_len (n->name));
902 clib_memcpy_fast (last->data + last->current_data +
903 last->current_length + vec_len (n->name),
904 ": ", 2);
905 clib_memcpy_fast (last->data + last->current_data +
906 last->current_length + vec_len (n->name) +
907 2, em->error_strings_heap[b0->error],
908 error_string_len);
909 last->current_length += drop_string_len;
910 b0->flags &= ~(VLIB_BUFFER_TOTAL_LENGTH_VALID);
911 pcap_add_buffer (&pp->pcap_main, vm, bi0,
912 pp->max_bytes_per_pkt);
913 last->current_length -= drop_string_len;
914 b0->current_data = save_current_data;
915 b0->current_length = save_current_length;
916 continue;
917 }
918 }
919
920 /*
921 * Didn't have space in the last buffer, here's the dropped
922 * packet as-is
923 */
Dave Barach33909772019-09-23 10:27:27 -0400924 pcap_add_buffer (&pp->pcap_main, vm, bi0, pp->max_bytes_per_pkt);
Dave Barachba868bb2016-08-08 09:51:21 -0400925
926 b0->current_data = save_current_data;
927 b0->current_length = save_current_length;
928 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700929 }
930}
931
Filip Tehlar62668772019-03-04 03:33:32 -0800932#ifndef CLIB_MARCH_VARIANT
Dave Barachba868bb2016-08-08 09:51:21 -0400933void
934vnet_pcap_drop_trace_filter_add_del (u32 error_index, int is_add)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700935{
Dave Barachba868bb2016-08-08 09:51:21 -0400936 vnet_interface_main_t *im = &vnet_get_main ()->interface_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700937
938 if (im->pcap_drop_filter_hash == 0)
Dave Barachba868bb2016-08-08 09:51:21 -0400939 im->pcap_drop_filter_hash = hash_create (0, sizeof (uword));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700940
941 if (is_add)
942 hash_set (im->pcap_drop_filter_hash, error_index, 1);
943 else
944 hash_unset (im->pcap_drop_filter_hash, error_index);
945}
Filip Tehlar62668772019-03-04 03:33:32 -0800946#endif /* CLIB_MARCH_VARIANT */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700947
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000948VLIB_NODE_FN (interface_drop) (vlib_main_t * vm,
949 vlib_node_runtime_t * node,
950 vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700951{
Dave Barachba868bb2016-08-08 09:51:21 -0400952 vnet_interface_main_t *im = &vnet_get_main ()->interface_main;
Dave Barach33909772019-09-23 10:27:27 -0400953 vnet_pcap_t *pp = &vlib_global_main.pcap;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700954
Dave Barach33909772019-09-23 10:27:27 -0400955 if (PREDICT_FALSE (pp->pcap_drop_enable))
956 pcap_drop_trace (vm, im, pp, frame);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700957
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000958 return interface_drop_punt (vm, node, frame, VNET_ERROR_DISPOSITION_DROP);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700959}
960
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000961VLIB_NODE_FN (interface_punt) (vlib_main_t * vm,
962 vlib_node_runtime_t * node,
963 vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700964{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000965 return interface_drop_punt (vm, node, frame, VNET_ERROR_DISPOSITION_PUNT);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700966}
967
Dave Barachba868bb2016-08-08 09:51:21 -0400968/* *INDENT-OFF* */
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000969VLIB_REGISTER_NODE (interface_drop) = {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700970 .name = "error-drop",
Ed Warnickecb9cada2015-12-08 15:45:58 -0700971 .vector_size = sizeof (u32),
972 .format_trace = format_vnet_error_trace,
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000973 .n_next_nodes = 1,
974 .next_nodes = {
975 [0] = "drop",
976 },
Ed Warnickecb9cada2015-12-08 15:45:58 -0700977};
Dave Barachba868bb2016-08-08 09:51:21 -0400978/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700979
Dave Barachba868bb2016-08-08 09:51:21 -0400980/* *INDENT-OFF* */
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000981VLIB_REGISTER_NODE (interface_punt) = {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700982 .name = "error-punt",
983 .vector_size = sizeof (u32),
984 .format_trace = format_vnet_error_trace,
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000985 .n_next_nodes = 1,
986 .next_nodes = {
987 [0] = "punt",
988 },
Ed Warnickecb9cada2015-12-08 15:45:58 -0700989};
Dave Barachba868bb2016-08-08 09:51:21 -0400990/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700991
Dave Barachba868bb2016-08-08 09:51:21 -0400992/* *INDENT-OFF* */
Filip Tehlar62668772019-03-04 03:33:32 -0800993VLIB_REGISTER_NODE (vnet_per_buffer_interface_output_node) = {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700994 .name = "interface-output",
995 .vector_size = sizeof (u32),
996};
Dave Barachba868bb2016-08-08 09:51:21 -0400997/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700998
Damjan Marion152e21d2016-11-29 14:55:43 +0100999static uword
1000interface_tx_node_fn (vlib_main_t * vm, vlib_node_runtime_t * node,
1001 vlib_frame_t * from_frame)
1002{
1003 vnet_main_t *vnm = vnet_get_main ();
1004 u32 last_sw_if_index = ~0;
1005 vlib_frame_t *to_frame = 0;
1006 vnet_hw_interface_t *hw = 0;
1007 u32 *from, *to_next = 0;
1008 u32 n_left_from;
1009
1010 from = vlib_frame_vector_args (from_frame);
1011 n_left_from = from_frame->n_vectors;
1012 while (n_left_from > 0)
1013 {
1014 u32 bi0;
1015 vlib_buffer_t *b0;
1016 u32 sw_if_index0;
1017
1018 bi0 = from[0];
1019 from++;
1020 n_left_from--;
1021 b0 = vlib_get_buffer (vm, bi0);
1022 sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_TX];
1023
1024 if (PREDICT_FALSE ((last_sw_if_index != sw_if_index0) || to_frame == 0))
1025 {
1026 if (to_frame)
1027 {
1028 hw = vnet_get_sup_hw_interface (vnm, last_sw_if_index);
1029 vlib_put_frame_to_node (vm, hw->tx_node_index, to_frame);
1030 }
1031 last_sw_if_index = sw_if_index0;
1032 hw = vnet_get_sup_hw_interface (vnm, sw_if_index0);
1033 to_frame = vlib_get_frame_to_node (vm, hw->tx_node_index);
1034 to_next = vlib_frame_vector_args (to_frame);
1035 }
1036
1037 to_next[0] = bi0;
1038 to_next++;
1039 to_frame->n_vectors++;
1040 }
1041 vlib_put_frame_to_node (vm, hw->tx_node_index, to_frame);
1042 return from_frame->n_vectors;
1043}
1044
1045/* *INDENT-OFF* */
Damjan Mariond770cfc2019-09-02 19:00:33 +02001046VLIB_REGISTER_NODE (interface_tx) = {
Damjan Marion152e21d2016-11-29 14:55:43 +01001047 .function = interface_tx_node_fn,
1048 .name = "interface-tx",
1049 .vector_size = sizeof (u32),
1050 .n_next_nodes = 1,
1051 .next_nodes = {
1052 [0] = "error-drop",
1053 },
1054};
1055
1056VNET_FEATURE_ARC_INIT (interface_output, static) =
1057{
1058 .arc_name = "interface-output",
1059 .start_nodes = VNET_FEATURES (0),
Dave Baracha25def72018-11-26 11:04:45 -05001060 .last_in_arc = "interface-tx",
Damjan Marion152e21d2016-11-29 14:55:43 +01001061 .arc_index_ptr = &vnet_main.interface_main.output_feature_arc_index,
1062};
1063
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +01001064VNET_FEATURE_INIT (span_tx, static) = {
1065 .arc_name = "interface-output",
1066 .node_name = "span-output",
1067 .runs_before = VNET_FEATURES ("interface-tx"),
1068};
1069
Matthew Smith537eeec2018-04-09 11:49:20 -05001070VNET_FEATURE_INIT (ipsec_if_tx, static) = {
1071 .arc_name = "interface-output",
1072 .node_name = "ipsec-if-output",
1073 .runs_before = VNET_FEATURES ("interface-tx"),
1074};
1075
Damjan Marion152e21d2016-11-29 14:55:43 +01001076VNET_FEATURE_INIT (interface_tx, static) = {
1077 .arc_name = "interface-output",
1078 .node_name = "interface-tx",
1079 .runs_before = 0,
1080};
1081/* *INDENT-ON* */
1082
Filip Tehlar62668772019-03-04 03:33:32 -08001083#ifndef CLIB_MARCH_VARIANT
Ed Warnickecb9cada2015-12-08 15:45:58 -07001084clib_error_t *
1085vnet_per_buffer_interface_output_hw_interface_add_del (vnet_main_t * vnm,
1086 u32 hw_if_index,
1087 u32 is_create)
1088{
Dave Barachba868bb2016-08-08 09:51:21 -04001089 vnet_hw_interface_t *hi = vnet_get_hw_interface (vnm, hw_if_index);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001090 u32 next_index;
1091
John Loe5453d02018-01-23 19:21:34 -05001092 if (hi->output_node_index == 0)
1093 return 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001094
John Loe5453d02018-01-23 19:21:34 -05001095 next_index = vlib_node_add_next
1096 (vnm->vlib_main, vnet_per_buffer_interface_output_node.index,
1097 hi->output_node_index);
1098 hi->output_node_next_index = next_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001099
1100 return 0;
1101}
1102
Dave Barachba868bb2016-08-08 09:51:21 -04001103VNET_HW_INTERFACE_ADD_DEL_FUNCTION
1104 (vnet_per_buffer_interface_output_hw_interface_add_del);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001105
John Loe5453d02018-01-23 19:21:34 -05001106void
1107vnet_set_interface_output_node (vnet_main_t * vnm,
1108 u32 hw_if_index, u32 node_index)
1109{
1110 ASSERT (node_index);
1111 vnet_hw_interface_t *hi = vnet_get_hw_interface (vnm, hw_if_index);
1112 u32 next_index = vlib_node_add_next
1113 (vnm->vlib_main, vnet_per_buffer_interface_output_node.index, node_index);
1114 hi->output_node_next_index = next_index;
1115 hi->output_node_index = node_index;
1116}
Filip Tehlar62668772019-03-04 03:33:32 -08001117#endif /* CLIB_MARCH_VARIANT */
John Loe5453d02018-01-23 19:21:34 -05001118
Dave Barachba868bb2016-08-08 09:51:21 -04001119/*
1120 * fd.io coding-style-patch-verification: ON
1121 *
1122 * Local Variables:
1123 * eval: (c-set-style "gnu")
1124 * End:
1125 */