blob: 723a3226251bcfc75405ff1130b761e033318b37 [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15/*
16 * interface_output.c: interface output node
17 *
18 * Copyright (c) 2008 Eliot Dresselhaus
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
27 *
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
30 *
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 */
39
40#include <vnet/vnet.h>
Dave Barach2c0a4f42017-06-29 09:30:15 -040041#include <vnet/ip/icmp46_packet.h>
Dave Barach4330c462020-06-10 17:07:32 -040042#include <vnet/ethernet/packet.h>
Florin Corasb040f982020-10-20 14:59:43 -070043#include <vnet/ip/format.h>
Dave Barach2c0a4f42017-06-29 09:30:15 -040044#include <vnet/ip/ip4.h>
45#include <vnet/ip/ip6.h>
46#include <vnet/udp/udp_packet.h>
Damjan Marion152e21d2016-11-29 14:55:43 +010047#include <vnet/feature/feature.h>
Benoît Ganne30a81952021-02-26 13:47:41 +010048#include <vnet/classify/pcap_classify.h>
Dave Barach1bd2c012020-04-12 08:31:39 -040049#include <vnet/interface_output.h>
Damjan Mariond154a172021-07-13 21:12:41 +020050#include <vppinfra/vector/mask_compare.h>
51#include <vppinfra/vector/compress.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070052
Dave Barachba868bb2016-08-08 09:51:21 -040053typedef struct
54{
Ed Warnickecb9cada2015-12-08 15:45:58 -070055 u32 sw_if_index;
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020056 u32 flags;
Mohsin Kazmi29467b52019-10-08 19:42:38 +020057 u8 data[128 - 2 * sizeof (u32)];
Dave Barachba868bb2016-08-08 09:51:21 -040058}
59interface_output_trace_t;
Ed Warnickecb9cada2015-12-08 15:45:58 -070060
Filip Tehlar62668772019-03-04 03:33:32 -080061#ifndef CLIB_MARCH_VARIANT
Dave Barachba868bb2016-08-08 09:51:21 -040062u8 *
63format_vnet_interface_output_trace (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -070064{
65 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*va, vlib_main_t *);
Dave Barachba868bb2016-08-08 09:51:21 -040066 vlib_node_t *node = va_arg (*va, vlib_node_t *);
67 interface_output_trace_t *t = va_arg (*va, interface_output_trace_t *);
68 vnet_main_t *vnm = vnet_get_main ();
69 vnet_sw_interface_t *si;
Christophe Fontained3c008d2017-10-02 18:10:54 +020070 u32 indent;
Ed Warnickecb9cada2015-12-08 15:45:58 -070071
Dave Barachba868bb2016-08-08 09:51:21 -040072 if (t->sw_if_index != (u32) ~ 0)
Ed Warnickecb9cada2015-12-08 15:45:58 -070073 {
Ed Warnickecb9cada2015-12-08 15:45:58 -070074 indent = format_get_indent (s);
Dave Barachba868bb2016-08-08 09:51:21 -040075
Neale Ranns177bbdc2016-11-15 09:46:51 +000076 if (pool_is_free_index
77 (vnm->interface_main.sw_interfaces, t->sw_if_index))
78 {
79 /* the interface may have been deleted by the time the trace is printed */
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020080 s = format (s, "sw_if_index: %d ", t->sw_if_index);
Neale Ranns177bbdc2016-11-15 09:46:51 +000081 }
82 else
83 {
84 si = vnet_get_sw_interface (vnm, t->sw_if_index);
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020085 s =
86 format (s, "%U ", format_vnet_sw_interface_name, vnm, si,
87 t->flags);
Neale Ranns177bbdc2016-11-15 09:46:51 +000088 }
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +020089 s =
90 format (s, "\n%U%U", format_white_space, indent,
91 node->format_buffer ? node->format_buffer : format_hex_bytes,
92 t->data, sizeof (t->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -070093 }
94 return s;
95}
Benoît Ganne7d2094c2020-11-09 15:23:52 +010096#endif /* CLIB_MARCH_VARIANT */
Ed Warnickecb9cada2015-12-08 15:45:58 -070097
98static void
99vnet_interface_output_trace (vlib_main_t * vm,
100 vlib_node_runtime_t * node,
Dave Barachba868bb2016-08-08 09:51:21 -0400101 vlib_frame_t * frame, uword n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700102{
Dave Barachba868bb2016-08-08 09:51:21 -0400103 u32 n_left, *from;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700104
105 n_left = n_buffers;
Damjan Mariona3d59862018-11-10 10:23:00 +0100106 from = vlib_frame_vector_args (frame);
Dave Barachba868bb2016-08-08 09:51:21 -0400107
Ed Warnickecb9cada2015-12-08 15:45:58 -0700108 while (n_left >= 4)
109 {
110 u32 bi0, bi1;
Dave Barachba868bb2016-08-08 09:51:21 -0400111 vlib_buffer_t *b0, *b1;
112 interface_output_trace_t *t0, *t1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700113
114 /* Prefetch next iteration. */
115 vlib_prefetch_buffer_with_index (vm, from[2], LOAD);
116 vlib_prefetch_buffer_with_index (vm, from[3], LOAD);
117
118 bi0 = from[0];
119 bi1 = from[1];
120
121 b0 = vlib_get_buffer (vm, bi0);
122 b1 = vlib_get_buffer (vm, bi1);
123
124 if (b0->flags & VLIB_BUFFER_IS_TRACED)
125 {
126 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
127 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_TX];
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200128 t0->flags = b0->flags;
Dave Barach178cf492018-11-13 16:34:13 -0500129 clib_memcpy_fast (t0->data, vlib_buffer_get_current (b0),
130 sizeof (t0->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700131 }
132 if (b1->flags & VLIB_BUFFER_IS_TRACED)
133 {
134 t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
135 t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_TX];
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200136 t1->flags = b1->flags;
Dave Barach178cf492018-11-13 16:34:13 -0500137 clib_memcpy_fast (t1->data, vlib_buffer_get_current (b1),
138 sizeof (t1->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700139 }
140 from += 2;
141 n_left -= 2;
142 }
143
144 while (n_left >= 1)
145 {
146 u32 bi0;
Dave Barachba868bb2016-08-08 09:51:21 -0400147 vlib_buffer_t *b0;
148 interface_output_trace_t *t0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700149
150 bi0 = from[0];
151
152 b0 = vlib_get_buffer (vm, bi0);
153
154 if (b0->flags & VLIB_BUFFER_IS_TRACED)
155 {
156 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
157 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_TX];
Andrew Yourtchenko6a7cff72018-10-12 16:09:22 +0200158 t0->flags = b0->flags;
Dave Barach178cf492018-11-13 16:34:13 -0500159 clib_memcpy_fast (t0->data, vlib_buffer_get_current (b0),
160 sizeof (t0->data));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700161 }
162 from += 1;
163 n_left -= 1;
164 }
165}
166
Damjan Marion4d2726e2021-03-23 21:05:18 +0100167static_always_inline void
168vnet_interface_output_handle_offload (vlib_main_t *vm, vlib_buffer_t *b)
Dave Barach2c0a4f42017-06-29 09:30:15 -0400169{
Aloys Augustinac6c5282021-04-15 18:12:51 +0200170 vnet_calc_checksums_inline (vm, b, b->flags & VNET_BUFFER_F_IS_IP4,
171 b->flags & VNET_BUFFER_F_IS_IP6);
Damjan Marion4d2726e2021-03-23 21:05:18 +0100172}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700173
Damjan Marion4d2726e2021-03-23 21:05:18 +0100174static_always_inline uword
175vnet_interface_output_node_inline (vlib_main_t *vm, u32 sw_if_index,
176 vlib_combined_counter_main_t *ccm,
177 vlib_buffer_t **b, u32 config_index, u8 arc,
Mohsin Kazmi8f3415d2021-10-14 12:21:16 +0000178 u32 n_left, int processing_level)
Damjan Marion4d2726e2021-03-23 21:05:18 +0100179{
180 u32 n_bytes = 0;
181 u32 n_bytes0, n_bytes1, n_bytes2, n_bytes3;
182 u32 ti = vm->thread_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700183
Damjan Marion4d2726e2021-03-23 21:05:18 +0100184 while (n_left >= 8)
Damjan Marion152e21d2016-11-29 14:55:43 +0100185 {
Damjan Marion4d2726e2021-03-23 21:05:18 +0100186 u32 or_flags;
Damjan Marion152e21d2016-11-29 14:55:43 +0100187
Damjan Marion4d2726e2021-03-23 21:05:18 +0100188 /* Prefetch next iteration. */
189 vlib_prefetch_buffer_header (b[4], LOAD);
190 vlib_prefetch_buffer_header (b[5], LOAD);
191 vlib_prefetch_buffer_header (b[6], LOAD);
192 vlib_prefetch_buffer_header (b[7], LOAD);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700193
Mohsin Kazmi8f3415d2021-10-14 12:21:16 +0000194 if (processing_level >= 1)
Damjan Mariond4008cf2021-03-24 11:45:51 +0100195 or_flags = b[0]->flags | b[1]->flags | b[2]->flags | b[3]->flags;
Damjan Marion4d2726e2021-03-23 21:05:18 +0100196
197 /* Be grumpy about zero length buffers for benefit of
198 driver tx function. */
199 ASSERT (b[0]->current_length > 0);
200 ASSERT (b[1]->current_length > 0);
201 ASSERT (b[2]->current_length > 0);
202 ASSERT (b[3]->current_length > 0);
203
204 n_bytes += n_bytes0 = vlib_buffer_length_in_chain (vm, b[0]);
205 n_bytes += n_bytes1 = vlib_buffer_length_in_chain (vm, b[1]);
206 n_bytes += n_bytes2 = vlib_buffer_length_in_chain (vm, b[2]);
207 n_bytes += n_bytes3 = vlib_buffer_length_in_chain (vm, b[3]);
208
Mohsin Kazmi8f3415d2021-10-14 12:21:16 +0000209 if (processing_level >= 2)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700210 {
Damjan Mariond4008cf2021-03-24 11:45:51 +0100211 u32 tx_swif0, tx_swif1, tx_swif2, tx_swif3;
212 tx_swif0 = vnet_buffer (b[0])->sw_if_index[VLIB_TX];
213 tx_swif1 = vnet_buffer (b[1])->sw_if_index[VLIB_TX];
214 tx_swif2 = vnet_buffer (b[2])->sw_if_index[VLIB_TX];
215 tx_swif3 = vnet_buffer (b[3])->sw_if_index[VLIB_TX];
216
217 /* update vlan subif tx counts, if required */
218 if (PREDICT_FALSE (tx_swif0 != sw_if_index))
219 vlib_increment_combined_counter (ccm, ti, tx_swif0, 1, n_bytes0);
220
221 if (PREDICT_FALSE (tx_swif1 != sw_if_index))
222 vlib_increment_combined_counter (ccm, ti, tx_swif1, 1, n_bytes1);
223
224 if (PREDICT_FALSE (tx_swif2 != sw_if_index))
225 vlib_increment_combined_counter (ccm, ti, tx_swif2, 1, n_bytes2);
226
227 if (PREDICT_FALSE (tx_swif3 != sw_if_index))
228 vlib_increment_combined_counter (ccm, ti, tx_swif3, 1, n_bytes3);
229
230 if (PREDICT_FALSE (config_index != ~0))
231 {
232 vnet_buffer (b[0])->feature_arc_index = arc;
233 b[0]->current_config_index = config_index;
234 vnet_buffer (b[1])->feature_arc_index = arc;
235 b[1]->current_config_index = config_index;
236 vnet_buffer (b[2])->feature_arc_index = arc;
237 b[2]->current_config_index = config_index;
238 vnet_buffer (b[3])->feature_arc_index = arc;
239 b[3]->current_config_index = config_index;
240 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700241 }
242
Mohsin Kazmi8f3415d2021-10-14 12:21:16 +0000243 if (processing_level >= 1 && (or_flags & VNET_BUFFER_F_OFFLOAD))
Ed Warnickecb9cada2015-12-08 15:45:58 -0700244 {
Damjan Marion4d2726e2021-03-23 21:05:18 +0100245 vnet_interface_output_handle_offload (vm, b[0]);
246 vnet_interface_output_handle_offload (vm, b[1]);
247 vnet_interface_output_handle_offload (vm, b[2]);
248 vnet_interface_output_handle_offload (vm, b[3]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700249 }
250
Damjan Marion4d2726e2021-03-23 21:05:18 +0100251 n_left -= 4;
252 b += 4;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700253 }
254
Damjan Marion4d2726e2021-03-23 21:05:18 +0100255 while (n_left)
256 {
Damjan Marion4d2726e2021-03-23 21:05:18 +0100257 /* Be grumpy about zero length buffers for benefit of
258 driver tx function. */
259 ASSERT (b[0]->current_length > 0);
260
261 n_bytes += n_bytes0 = vlib_buffer_length_in_chain (vm, b[0]);
Damjan Marion4d2726e2021-03-23 21:05:18 +0100262
Mohsin Kazmi8f3415d2021-10-14 12:21:16 +0000263 if (processing_level >= 2)
Damjan Marion4d2726e2021-03-23 21:05:18 +0100264 {
Damjan Mariond4008cf2021-03-24 11:45:51 +0100265 u32 tx_swif0 = vnet_buffer (b[0])->sw_if_index[VLIB_TX];
Damjan Marion4d2726e2021-03-23 21:05:18 +0100266
Damjan Mariond4008cf2021-03-24 11:45:51 +0100267 if (PREDICT_FALSE (config_index != ~0))
268 {
269 vnet_buffer (b[0])->feature_arc_index = arc;
270 b[0]->current_config_index = config_index;
271 }
272
273 if (PREDICT_FALSE (tx_swif0 != sw_if_index))
274 vlib_increment_combined_counter (ccm, ti, tx_swif0, 1, n_bytes0);
275 }
Damjan Marion4d2726e2021-03-23 21:05:18 +0100276
Mohsin Kazmi8f3415d2021-10-14 12:21:16 +0000277 if (processing_level >= 1)
Damjan Marion4d2726e2021-03-23 21:05:18 +0100278 vnet_interface_output_handle_offload (vm, b[0]);
279
280 n_left -= 1;
281 b += 1;
282 }
283
284 return n_bytes;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700285}
286
Benoît Ganne14331852021-07-15 19:21:31 +0200287static_always_inline void
288vnet_interface_pcap_tx_trace (vlib_main_t *vm, vlib_node_runtime_t *node,
289 vlib_frame_t *frame, int in_interface_ouput)
Dave Barach5ecd5a52019-02-25 15:27:28 -0500290{
Damjan Marion8fb5add2021-03-04 18:41:59 +0100291 vnet_main_t *vnm = vnet_get_main ();
Dave Barach5ecd5a52019-02-25 15:27:28 -0500292 u32 n_left_from, *from;
Benoît Ganne14331852021-07-15 19:21:31 +0200293 u32 sw_if_index = ~0, hw_if_index = ~0;
Damjan Marion8fb5add2021-03-04 18:41:59 +0100294 vnet_pcap_t *pp = &vnm->pcap;
Dave Barach5ecd5a52019-02-25 15:27:28 -0500295
Dave Barach33909772019-09-23 10:27:27 -0400296 if (PREDICT_TRUE (pp->pcap_tx_enable == 0))
Dave Barach5ecd5a52019-02-25 15:27:28 -0500297 return;
298
Benoît Ganne14331852021-07-15 19:21:31 +0200299 if (in_interface_ouput)
300 {
301 /* interface-output is called right before interface-output-template.
302 * We only want to capture packets here if there is a per-interface
303 * filter, in case it matches the sub-interface sw_if_index.
304 * If there is no per-interface filter configured, let the
305 * interface-output-template node deal with it */
306 if (pp->pcap_sw_if_index == 0)
307 return;
308 }
309 else
Dave Barach5ecd5a52019-02-25 15:27:28 -0500310 {
311 vnet_interface_output_runtime_t *rt = (void *) node->runtime_data;
312 sw_if_index = rt->sw_if_index;
313 }
Dave Barach5ecd5a52019-02-25 15:27:28 -0500314
315 n_left_from = frame->n_vectors;
316 from = vlib_frame_vector_args (frame);
317
318 while (n_left_from > 0)
319 {
320 u32 bi0 = from[0];
321 vlib_buffer_t *b0 = vlib_get_buffer (vm, bi0);
Dave Barach9137e542019-09-13 17:47:50 -0400322 from++;
323 n_left_from--;
324
Benoît Ganne14331852021-07-15 19:21:31 +0200325 if (in_interface_ouput)
326 {
327 const u32 sii = vnet_buffer (b0)->sw_if_index[VLIB_TX];
328 if (PREDICT_FALSE (sii != sw_if_index))
329 {
330 const vnet_hw_interface_t *hi =
331 vnet_get_sup_hw_interface (vnm, sii);
332 hw_if_index = hi->sw_if_index;
333 sw_if_index = sii;
334 }
335 if (hw_if_index == sw_if_index)
336 continue; /* defer to interface-output-template */
337 }
Dave Barach5ecd5a52019-02-25 15:27:28 -0500338
Benoît Ganne30a81952021-02-26 13:47:41 +0100339 if (vnet_is_packet_pcaped (pp, b0, sw_if_index))
340 pcap_add_buffer (&pp->pcap_main, vm, bi0, pp->max_bytes_per_pkt);
Dave Barach5ecd5a52019-02-25 15:27:28 -0500341 }
342}
343
Damjan Marion1bd6cbb2021-04-15 13:12:51 +0200344static_always_inline void
345store_tx_frame_scalar_data (vnet_hw_if_output_node_runtime_t *r,
346 vnet_hw_if_tx_frame_t *tf)
347{
348 if (r)
349 clib_memcpy_fast (tf, &r->frame, sizeof (vnet_hw_if_tx_frame_t));
350}
351
352static_always_inline void
353enqueu_to_tx_node (vlib_main_t *vm, vlib_node_runtime_t *node,
354 vnet_hw_interface_t *hi, u32 *from, u32 n_vectors)
355{
356 u32 next_index = VNET_INTERFACE_OUTPUT_NEXT_TX;
357 vnet_hw_if_output_node_runtime_t *r = 0;
358 u32 n_free, n_copy, *to;
359 vnet_hw_if_tx_frame_t *tf;
360 vlib_frame_t *f;
361
362 ASSERT (n_vectors <= VLIB_FRAME_SIZE);
363
364 if (hi->output_node_thread_runtimes)
365 r = vec_elt_at_index (hi->output_node_thread_runtimes, vm->thread_index);
366
367 f = vlib_get_next_frame_internal (vm, node, next_index, 0);
368 tf = vlib_frame_scalar_args (f);
369
370 if (f->n_vectors > 0 && (r == 0 || tf->queue_id == r->frame.queue_id))
371 {
372 /* append current next frame */
373 n_free = VLIB_FRAME_SIZE - f->n_vectors;
374 n_copy = clib_min (n_vectors, n_free);
375 n_vectors -= n_copy;
376 to = vlib_frame_vector_args (f);
377 to += f->n_vectors;
378 }
379 else
380 {
381 if (f->n_vectors > 0)
382 {
383 /* current frame doesn't fit - grab empty one */
384 f = vlib_get_next_frame_internal (vm, node, next_index, 1);
385 tf = vlib_frame_scalar_args (f);
386 }
387
388 /* empty frame - store scalar data */
389 store_tx_frame_scalar_data (r, tf);
390 to = vlib_frame_vector_args (f);
391 n_free = VLIB_FRAME_SIZE;
392 n_copy = n_vectors;
393 n_vectors = 0;
394 }
395
396 vlib_buffer_copy_indices (to, from, n_copy);
397 vlib_put_next_frame (vm, node, next_index, n_free - n_copy);
398
399 if (n_vectors == 0)
400 return;
401
402 /* we have more indices to store, take empty frame */
403 from += n_copy;
404 f = vlib_get_next_frame_internal (vm, node, next_index, 1);
405 store_tx_frame_scalar_data (r, vlib_frame_scalar_args (f));
406 vlib_buffer_copy_indices (vlib_frame_vector_args (f), from, n_vectors);
407 vlib_put_next_frame (vm, node, next_index, VLIB_FRAME_SIZE - n_vectors);
408}
409
Damjan Marionf91098e2021-03-09 16:28:15 +0100410VLIB_NODE_FN (vnet_interface_output_node)
411(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
Dave Barach2c0a4f42017-06-29 09:30:15 -0400412{
413 vnet_main_t *vnm = vnet_get_main ();
Damjan Marion4d2726e2021-03-23 21:05:18 +0100414 vnet_interface_main_t *im = &vnm->interface_main;
415 vlib_combined_counter_main_t *ccm;
Dave Barach2c0a4f42017-06-29 09:30:15 -0400416 vnet_hw_interface_t *hi;
Damjan Marion3853b262021-03-23 18:47:34 +0100417 vnet_sw_interface_t *si;
Dave Barach2c0a4f42017-06-29 09:30:15 -0400418 vnet_interface_output_runtime_t *rt = (void *) node->runtime_data;
Damjan Marion3853b262021-03-23 18:47:34 +0100419 vlib_buffer_t *bufs[VLIB_FRAME_SIZE];
Damjan Marion4d2726e2021-03-23 21:05:18 +0100420 u32 n_bytes, n_buffers = frame->n_vectors;
421 u32 config_index = ~0;
422 u32 sw_if_index = rt->sw_if_index;
423 u32 next_index = VNET_INTERFACE_OUTPUT_NEXT_TX;
424 u32 ti = vm->thread_index;
425 u8 arc = im->output_feature_arc_index;
Damjan Mariond4008cf2021-03-24 11:45:51 +0100426 int arc_or_subif = 0;
427 int do_tx_offloads = 0;
Damjan Marion3853b262021-03-23 18:47:34 +0100428 u32 *from;
429
430 if (node->flags & VLIB_NODE_FLAG_TRACE)
431 vnet_interface_output_trace (vm, node, frame, n_buffers);
432
433 from = vlib_frame_vector_args (frame);
434
435 if (rt->is_deleted)
436 return vlib_error_drop_buffers (
437 vm, node, from,
438 /* buffer stride */ 1, n_buffers, VNET_INTERFACE_OUTPUT_NEXT_DROP,
439 node->node_index, VNET_INTERFACE_OUTPUT_ERROR_INTERFACE_DELETED);
Dave Barach2c0a4f42017-06-29 09:30:15 -0400440
Benoît Ganne14331852021-07-15 19:21:31 +0200441 vnet_interface_pcap_tx_trace (vm, node, frame, 0 /* in_interface_ouput */);
Dave Barach5ecd5a52019-02-25 15:27:28 -0500442
Damjan Marion3853b262021-03-23 18:47:34 +0100443 vlib_get_buffers (vm, from, bufs, n_buffers);
444
Damjan Marion4d2726e2021-03-23 21:05:18 +0100445 si = vnet_get_sw_interface (vnm, sw_if_index);
446 hi = vnet_get_sup_hw_interface (vnm, sw_if_index);
Damjan Marion3853b262021-03-23 18:47:34 +0100447
448 if (!(si->flags & VNET_SW_INTERFACE_FLAG_ADMIN_UP) ||
449 !(hi->flags & VNET_HW_INTERFACE_FLAG_LINK_UP))
450 {
451 vlib_simple_counter_main_t *cm;
452
453 cm = vec_elt_at_index (vnm->interface_main.sw_if_counters,
454 VNET_INTERFACE_COUNTER_TX_ERROR);
Damjan Marion4d2726e2021-03-23 21:05:18 +0100455 vlib_increment_simple_counter (cm, ti, sw_if_index, n_buffers);
Damjan Marion3853b262021-03-23 18:47:34 +0100456
457 return vlib_error_drop_buffers (
458 vm, node, from,
459 /* buffer stride */ 1, n_buffers, VNET_INTERFACE_OUTPUT_NEXT_DROP,
460 node->node_index, VNET_INTERFACE_OUTPUT_ERROR_INTERFACE_DOWN);
461 }
462
Damjan Marion4d2726e2021-03-23 21:05:18 +0100463 /* interface-output feature arc handling */
464 if (PREDICT_FALSE (vnet_have_features (arc, sw_if_index)))
465 {
466 vnet_feature_config_main_t *fcm;
467 fcm = vnet_feature_get_config_main (arc);
468 config_index = vnet_get_feature_config_index (arc, sw_if_index);
469 vnet_get_config_data (&fcm->config_main, &config_index, &next_index, 0);
Damjan Mariond4008cf2021-03-24 11:45:51 +0100470 arc_or_subif = 1;
Damjan Marion4d2726e2021-03-23 21:05:18 +0100471 }
Damjan Mariond4008cf2021-03-24 11:45:51 +0100472 else if (hash_elts (hi->sub_interface_sw_if_index_by_id))
473 arc_or_subif = 1;
Damjan Marion4d2726e2021-03-23 21:05:18 +0100474
475 ccm = im->combined_sw_if_counters + VNET_INTERFACE_COUNTER_TX;
476
Nathan Skrzypczak2e558232021-05-27 19:34:59 +0200477 /* if not all three flags IP4_,TCP_,UDP_CKSUM set, do compute them
478 * here before sending to the interface */
479 if ((hi->caps & VNET_HW_INTERFACE_CAP_SUPPORTS_TX_CKSUM) !=
480 VNET_HW_INTERFACE_CAP_SUPPORTS_TX_CKSUM)
Damjan Mariond4008cf2021-03-24 11:45:51 +0100481 do_tx_offloads = 1;
482
483 if (do_tx_offloads == 0 && arc_or_subif == 0)
484 n_bytes = vnet_interface_output_node_inline (
Mohsin Kazmi8f3415d2021-10-14 12:21:16 +0000485 vm, sw_if_index, ccm, bufs, config_index, arc, n_buffers, 0);
Damjan Marion4f81d442021-03-30 14:34:44 +0200486 else if (do_tx_offloads == 1 && arc_or_subif == 0)
Damjan Mariond4008cf2021-03-24 11:45:51 +0100487 n_bytes = vnet_interface_output_node_inline (
Mohsin Kazmi8f3415d2021-10-14 12:21:16 +0000488 vm, sw_if_index, ccm, bufs, config_index, arc, n_buffers, 1);
Dave Barach2c0a4f42017-06-29 09:30:15 -0400489 else
Damjan Mariond4008cf2021-03-24 11:45:51 +0100490 n_bytes = vnet_interface_output_node_inline (
Mohsin Kazmi8f3415d2021-10-14 12:21:16 +0000491 vm, sw_if_index, ccm, bufs, config_index, arc, n_buffers, 2);
Damjan Marion4d2726e2021-03-23 21:05:18 +0100492
Damjan Marion1bd6cbb2021-04-15 13:12:51 +0200493 from = vlib_frame_vector_args (frame);
494 if (PREDICT_TRUE (next_index == VNET_INTERFACE_OUTPUT_NEXT_TX))
495 {
496 enqueu_to_tx_node (vm, node, hi, from, frame->n_vectors);
497 }
498 else
499 {
500 vlib_buffer_enqueue_to_single_next (vm, node, from, next_index,
501 frame->n_vectors);
502 }
Damjan Marion4d2726e2021-03-23 21:05:18 +0100503
504 /* Update main interface stats. */
505 vlib_increment_combined_counter (ccm, ti, sw_if_index, n_buffers, n_bytes);
506 return n_buffers;
Dave Barach2c0a4f42017-06-29 09:30:15 -0400507}
Benoît Ganne7d2094c2020-11-09 15:23:52 +0100508
Damjan Marionf91098e2021-03-09 16:28:15 +0100509VLIB_REGISTER_NODE (vnet_interface_output_node) = {
510 .name = "interface-output-template",
511 .vector_size = sizeof (u32),
512};
Dave Barach2c0a4f42017-06-29 09:30:15 -0400513
Ed Warnickecb9cada2015-12-08 15:45:58 -0700514/* Use buffer's sw_if_index[VNET_TX] to choose output interface. */
Filip Tehlar62668772019-03-04 03:33:32 -0800515VLIB_NODE_FN (vnet_per_buffer_interface_output_node) (vlib_main_t * vm,
516 vlib_node_runtime_t *
517 node,
518 vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700519{
Dave Barachba868bb2016-08-08 09:51:21 -0400520 vnet_main_t *vnm = vnet_get_main ();
521 u32 n_left_to_next, *from, *to_next;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700522 u32 n_left_from, next_index;
523
Benoît Ganne14331852021-07-15 19:21:31 +0200524 vnet_interface_pcap_tx_trace (vm, node, frame, 1 /* in_interface_ouput */);
Dave Barach5ecd5a52019-02-25 15:27:28 -0500525
Ed Warnickecb9cada2015-12-08 15:45:58 -0700526 n_left_from = frame->n_vectors;
527
Damjan Mariona3d59862018-11-10 10:23:00 +0100528 from = vlib_frame_vector_args (frame);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700529 next_index = node->cached_next_index;
530
531 while (n_left_from > 0)
532 {
533 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
534
535 while (n_left_from >= 4 && n_left_to_next >= 2)
536 {
537 u32 bi0, bi1, next0, next1;
Dave Barachba868bb2016-08-08 09:51:21 -0400538 vlib_buffer_t *b0, *b1;
539 vnet_hw_interface_t *hi0, *hi1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700540
541 /* Prefetch next iteration. */
542 vlib_prefetch_buffer_with_index (vm, from[2], LOAD);
543 vlib_prefetch_buffer_with_index (vm, from[3], LOAD);
544
545 bi0 = from[0];
546 bi1 = from[1];
547 to_next[0] = bi0;
548 to_next[1] = bi1;
549 from += 2;
550 to_next += 2;
551 n_left_to_next -= 2;
552 n_left_from -= 2;
553
554 b0 = vlib_get_buffer (vm, bi0);
555 b1 = vlib_get_buffer (vm, bi1);
556
Dave Barachba868bb2016-08-08 09:51:21 -0400557 hi0 =
558 vnet_get_sup_hw_interface (vnm,
559 vnet_buffer (b0)->sw_if_index
560 [VLIB_TX]);
561 hi1 =
562 vnet_get_sup_hw_interface (vnm,
563 vnet_buffer (b1)->sw_if_index
564 [VLIB_TX]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700565
John Loe5453d02018-01-23 19:21:34 -0500566 next0 = hi0->output_node_next_index;
567 next1 = hi1->output_node_next_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700568
Dave Barachba868bb2016-08-08 09:51:21 -0400569 vlib_validate_buffer_enqueue_x2 (vm, node, next_index, to_next,
570 n_left_to_next, bi0, bi1, next0,
571 next1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700572 }
573
574 while (n_left_from > 0 && n_left_to_next > 0)
575 {
576 u32 bi0, next0;
Dave Barachba868bb2016-08-08 09:51:21 -0400577 vlib_buffer_t *b0;
578 vnet_hw_interface_t *hi0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700579
580 bi0 = from[0];
581 to_next[0] = bi0;
582 from += 1;
583 to_next += 1;
584 n_left_to_next -= 1;
585 n_left_from -= 1;
586
587 b0 = vlib_get_buffer (vm, bi0);
588
Dave Barachba868bb2016-08-08 09:51:21 -0400589 hi0 =
590 vnet_get_sup_hw_interface (vnm,
591 vnet_buffer (b0)->sw_if_index
592 [VLIB_TX]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700593
John Loe5453d02018-01-23 19:21:34 -0500594 next0 = hi0->output_node_next_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700595
Dave Barachba868bb2016-08-08 09:51:21 -0400596 vlib_validate_buffer_enqueue_x1 (vm, node, next_index, to_next,
597 n_left_to_next, bi0, next0);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700598 }
599
600 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
601 }
602
603 return frame->n_vectors;
604}
605
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000606typedef struct vnet_error_trace_t_
Ed Warnickecb9cada2015-12-08 15:45:58 -0700607{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000608 u32 sw_if_index;
Dave Barach4330c462020-06-10 17:07:32 -0400609 i8 details_valid;
610 u8 is_ip6;
611 u8 pad[2];
612 u16 mactype;
613 ip46_address_t src, dst;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000614} vnet_error_trace_t;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700615
Dave Barachba868bb2016-08-08 09:51:21 -0400616static u8 *
617format_vnet_error_trace (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700618{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000619 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*va, vlib_main_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700620 CLIB_UNUSED (vlib_node_t * node) = va_arg (*va, vlib_node_t *);
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000621 vnet_error_trace_t *t = va_arg (*va, vnet_error_trace_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700622
Dave Barach4330c462020-06-10 17:07:32 -0400623 /* Normal, non-catchup trace */
624 if (t->details_valid == 0)
625 {
626 s = format (s, "rx:%U", format_vnet_sw_if_index_name,
627 vnet_get_main (), t->sw_if_index);
628 }
629 else if (t->details_valid == 1)
630 {
631 /* The trace capture code didn't understant the mactype */
632 s = format (s, "mactype 0x%4x (not decoded)", t->mactype);
633 }
634 else if (t->details_valid == 2)
635 {
636 /* Dump the src/dst addresses */
637 if (t->is_ip6 == 0)
638 s = format (s, "IP4: %U -> %U",
639 format_ip4_address, &t->src.ip4,
640 format_ip4_address, &t->dst.ip4);
641 else
642 s = format (s, "IP6: %U -> %U",
643 format_ip6_address, &t->src.ip6,
644 format_ip6_address, &t->dst.ip6);
645 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700646 return s;
647}
648
649static void
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000650interface_trace_buffers (vlib_main_t * vm,
651 vlib_node_runtime_t * node, vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700652{
Dave Barachba868bb2016-08-08 09:51:21 -0400653 u32 n_left, *buffers;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700654
655 buffers = vlib_frame_vector_args (frame);
656 n_left = frame->n_vectors;
Dave Barachba868bb2016-08-08 09:51:21 -0400657
Ed Warnickecb9cada2015-12-08 15:45:58 -0700658 while (n_left >= 4)
659 {
660 u32 bi0, bi1;
Dave Barachba868bb2016-08-08 09:51:21 -0400661 vlib_buffer_t *b0, *b1;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000662 vnet_error_trace_t *t0, *t1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700663
664 /* Prefetch next iteration. */
665 vlib_prefetch_buffer_with_index (vm, buffers[2], LOAD);
666 vlib_prefetch_buffer_with_index (vm, buffers[3], LOAD);
667
668 bi0 = buffers[0];
669 bi1 = buffers[1];
670
671 b0 = vlib_get_buffer (vm, bi0);
672 b1 = vlib_get_buffer (vm, bi1);
673
674 if (b0->flags & VLIB_BUFFER_IS_TRACED)
675 {
Dave Barach4330c462020-06-10 17:07:32 -0400676 t0 = vlib_add_trace (vm, node, b0,
677 STRUCT_OFFSET_OF (vnet_error_trace_t, pad));
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000678 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
Dave Barach4330c462020-06-10 17:07:32 -0400679 t0->details_valid = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700680 }
681 if (b1->flags & VLIB_BUFFER_IS_TRACED)
682 {
Dave Barach4330c462020-06-10 17:07:32 -0400683 t1 = vlib_add_trace (vm, node, b1,
684 STRUCT_OFFSET_OF (vnet_error_trace_t, pad));
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000685 t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_RX];
Dave Barach4330c462020-06-10 17:07:32 -0400686 t1->details_valid = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700687 }
688 buffers += 2;
689 n_left -= 2;
690 }
691
692 while (n_left >= 1)
693 {
694 u32 bi0;
Dave Barachba868bb2016-08-08 09:51:21 -0400695 vlib_buffer_t *b0;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000696 vnet_error_trace_t *t0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700697
698 bi0 = buffers[0];
699
700 b0 = vlib_get_buffer (vm, bi0);
701
702 if (b0->flags & VLIB_BUFFER_IS_TRACED)
703 {
Dave Barach4330c462020-06-10 17:07:32 -0400704 t0 = vlib_add_trace (vm, node, b0,
705 STRUCT_OFFSET_OF (vnet_error_trace_t, pad));
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000706 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
Dave Barach4330c462020-06-10 17:07:32 -0400707 t0->details_valid = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700708 }
709 buffers += 1;
710 n_left -= 1;
711 }
712}
713
Dave Barachba868bb2016-08-08 09:51:21 -0400714typedef enum
715{
Ed Warnickecb9cada2015-12-08 15:45:58 -0700716 VNET_ERROR_DISPOSITION_DROP,
717 VNET_ERROR_DISPOSITION_PUNT,
718 VNET_ERROR_N_DISPOSITION,
719} vnet_error_disposition_t;
720
Dave Barach4330c462020-06-10 17:07:32 -0400721static void
722drop_catchup_trace (vlib_main_t * vm,
723 vlib_node_runtime_t * node, vlib_buffer_t * b)
724{
725 /* Can we safely rewind the buffer? If not, fagedaboudit */
726 if (b->flags & VNET_BUFFER_F_L2_HDR_OFFSET_VALID)
727 {
728 vnet_error_trace_t *t;
729 ip4_header_t *ip4;
730 ip6_header_t *ip6;
731 ethernet_header_t *eh;
732 i16 delta;
733
734 t = vlib_add_trace (vm, node, b, sizeof (*t));
735 delta = vnet_buffer (b)->l2_hdr_offset - b->current_data;
736 vlib_buffer_advance (b, delta);
737
738 eh = vlib_buffer_get_current (b);
739 /* Save mactype */
740 t->mactype = clib_net_to_host_u16 (eh->type);
741 t->details_valid = 1;
742 switch (t->mactype)
743 {
744 case ETHERNET_TYPE_IP4:
745 ip4 = (void *) (eh + 1);
746 t->details_valid = 2;
747 t->is_ip6 = 0;
748 t->src.ip4.as_u32 = ip4->src_address.as_u32;
749 t->dst.ip4.as_u32 = ip4->dst_address.as_u32;
750 break;
751
752 case ETHERNET_TYPE_IP6:
753 ip6 = (void *) (eh + 1);
754 t->details_valid = 2;
755 t->is_ip6 = 1;
756 clib_memcpy_fast (t->src.as_u8, ip6->src_address.as_u8,
757 sizeof (ip6_address_t));
758 clib_memcpy_fast (t->dst.as_u8, ip6->dst_address.as_u8,
759 sizeof (ip6_address_t));
760 break;
761
762 default:
763 /* Dunno, do nothing, leave details_valid alone */
764 break;
765 }
766 /* Restore current data (probably unnecessary) */
767 vlib_buffer_advance (b, -delta);
768 }
769}
770
Ed Warnickecb9cada2015-12-08 15:45:58 -0700771static_always_inline uword
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000772interface_drop_punt (vlib_main_t * vm,
773 vlib_node_runtime_t * node,
774 vlib_frame_t * frame,
775 vnet_error_disposition_t disposition)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700776{
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000777 u32 *from, n_left, thread_index, *sw_if_index;
778 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b;
779 u32 sw_if_indices[VLIB_FRAME_SIZE];
Dave Barachba868bb2016-08-08 09:51:21 -0400780 vlib_simple_counter_main_t *cm;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000781 u16 nexts[VLIB_FRAME_SIZE];
Dave Barach4330c462020-06-10 17:07:32 -0400782 u32 n_trace;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000783 vnet_main_t *vnm;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700784
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000785 vnm = vnet_get_main ();
786 thread_index = vm->thread_index;
787 from = vlib_frame_vector_args (frame);
788 n_left = frame->n_vectors;
789 b = bufs;
790 sw_if_index = sw_if_indices;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700791
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000792 vlib_get_buffers (vm, from, bufs, n_left);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700793
Dave Barach4330c462020-06-10 17:07:32 -0400794 /* "trace add error-drop NNN?" */
795 if (PREDICT_FALSE ((n_trace = vlib_get_trace_count (vm, node))))
796 {
797 /* If pkts aren't otherwise traced... */
798 if ((node->flags & VLIB_NODE_FLAG_TRACE) == 0)
799 {
800 /* Trace them from here */
801 node->flags |= VLIB_NODE_FLAG_TRACE;
802 while (n_trace && n_left)
803 {
Benoît Ganne9a3973e2020-10-02 19:36:57 +0200804 if (PREDICT_TRUE
805 (vlib_trace_buffer (vm, node, 0 /* next_index */ , b[0],
806 0 /* follow chain */ )))
807 {
808 /*
809 * Here we have a wireshark dissector problem.
810 * Packets may be well-formed, or not. We
811 * must not blow chunks in any case.
812 *
813 * Try to produce trace records which will help
814 * folks understand what's going on.
815 */
816 drop_catchup_trace (vm, node, b[0]);
817 n_trace--;
818 }
Dave Barach4330c462020-06-10 17:07:32 -0400819 n_left--;
820 b++;
821 }
822 }
823
824 vlib_set_trace_count (vm, node, n_trace);
825 b = bufs;
826 n_left = frame->n_vectors;
827 }
828
Ed Warnickecb9cada2015-12-08 15:45:58 -0700829 if (node->flags & VLIB_NODE_FLAG_TRACE)
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000830 interface_trace_buffers (vm, node, frame);
Dave Barachba868bb2016-08-08 09:51:21 -0400831
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000832 /* All going to drop regardless, this is just a counting exercise */
833 clib_memset (nexts, 0, sizeof (nexts));
834
Ed Warnickecb9cada2015-12-08 15:45:58 -0700835 cm = vec_elt_at_index (vnm->interface_main.sw_if_counters,
836 (disposition == VNET_ERROR_DISPOSITION_PUNT
837 ? VNET_INTERFACE_COUNTER_PUNT
838 : VNET_INTERFACE_COUNTER_DROP));
839
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000840 /* collect the array of interfaces first ... */
841 while (n_left >= 4)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700842 {
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000843 if (n_left >= 12)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700844 {
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000845 /* Prefetch 8 ahead - there's not much going on in each iteration */
846 vlib_prefetch_buffer_header (b[4], LOAD);
847 vlib_prefetch_buffer_header (b[5], LOAD);
848 vlib_prefetch_buffer_header (b[6], LOAD);
849 vlib_prefetch_buffer_header (b[7], LOAD);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700850 }
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000851 sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
852 sw_if_index[1] = vnet_buffer (b[1])->sw_if_index[VLIB_RX];
853 sw_if_index[2] = vnet_buffer (b[2])->sw_if_index[VLIB_RX];
854 sw_if_index[3] = vnet_buffer (b[3])->sw_if_index[VLIB_RX];
855
856 sw_if_index += 4;
857 n_left -= 4;
858 b += 4;
859 }
860 while (n_left)
861 {
862 sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
863
864 sw_if_index += 1;
865 n_left -= 1;
866 b += 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700867 }
868
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000869 /* ... then count against them in blocks */
870 n_left = frame->n_vectors;
871
872 while (n_left)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700873 {
Dave Barachba868bb2016-08-08 09:51:21 -0400874 vnet_sw_interface_t *sw_if0;
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000875 u16 off, count;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700876
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000877 off = frame->n_vectors - n_left;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700878
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000879 sw_if_index = sw_if_indices + off;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700880
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000881 count = clib_count_equal_u32 (sw_if_index, n_left);
882 n_left -= count;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700883
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000884 vlib_increment_simple_counter (cm, thread_index, sw_if_index[0], count);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700885
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000886 /* Increment super-interface drop/punt counters for
887 sub-interfaces. */
888 sw_if0 = vnet_get_sw_interface (vnm, sw_if_index[0]);
889 if (sw_if0->sup_sw_if_index != sw_if_index[0])
890 vlib_increment_simple_counter
891 (cm, thread_index, sw_if0->sup_sw_if_index, count);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700892 }
893
Neale Ranns22e1f1d2019-03-01 15:53:11 +0000894 vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700895
896 return frame->n_vectors;
897}
898
Dave Barachba868bb2016-08-08 09:51:21 -0400899static inline void
900pcap_drop_trace (vlib_main_t * vm,
Dave Barach33909772019-09-23 10:27:27 -0400901 vnet_interface_main_t * im,
902 vnet_pcap_t * pp, vlib_frame_t * f)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700903{
Dave Barachba868bb2016-08-08 09:51:21 -0400904 u32 *from;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700905 u32 n_left = f->n_vectors;
Dave Barachba868bb2016-08-08 09:51:21 -0400906 vlib_buffer_t *b0, *p1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700907 u32 bi0;
908 i16 save_current_data;
909 u16 save_current_length;
Dave Barach9382ad92019-09-23 16:03:49 -0400910 vlib_error_main_t *em = &vm->error_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700911
912 from = vlib_frame_vector_args (f);
913
914 while (n_left > 0)
915 {
916 if (PREDICT_TRUE (n_left > 1))
Dave Barachba868bb2016-08-08 09:51:21 -0400917 {
918 p1 = vlib_get_buffer (vm, from[1]);
919 vlib_prefetch_buffer_header (p1, LOAD);
920 }
921
Ed Warnickecb9cada2015-12-08 15:45:58 -0700922 bi0 = from[0];
923 b0 = vlib_get_buffer (vm, bi0);
924 from++;
925 n_left--;
Dave Barachba868bb2016-08-08 09:51:21 -0400926
Ed Warnickecb9cada2015-12-08 15:45:58 -0700927 /* See if we're pointedly ignoring this specific error */
Dave Barachba868bb2016-08-08 09:51:21 -0400928 if (im->pcap_drop_filter_hash
929 && hash_get (im->pcap_drop_filter_hash, b0->error))
930 continue;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700931
Benoît Ganne30a81952021-02-26 13:47:41 +0100932 if (!vnet_is_packet_pcaped (pp, b0, ~0))
933 continue; /* not matching, skip */
Dave Barachf5667c32019-09-25 11:27:46 -0400934
Ed Warnickecb9cada2015-12-08 15:45:58 -0700935 /* Trace all drops, or drops received on a specific interface */
Benoît Ganne30a81952021-02-26 13:47:41 +0100936 save_current_data = b0->current_data;
937 save_current_length = b0->current_length;
938
939 /*
940 * Typically, we'll need to rewind the buffer
941 * if l2_hdr_offset is valid, make sure to rewind to the start of
942 * the L2 header. This may not be the buffer start in case we pop-ed
943 * vlan tags.
944 * Otherwise, rewind to buffer start and hope for the best.
945 */
946 if (b0->flags & VNET_BUFFER_F_L2_HDR_OFFSET_VALID)
Dave Barachba868bb2016-08-08 09:51:21 -0400947 {
Benoît Ganne30a81952021-02-26 13:47:41 +0100948 if (b0->current_data > vnet_buffer (b0)->l2_hdr_offset)
949 vlib_buffer_advance (b0, vnet_buffer (b0)->l2_hdr_offset -
950 b0->current_data);
Dave Barachba868bb2016-08-08 09:51:21 -0400951 }
Benoît Ganne30a81952021-02-26 13:47:41 +0100952 else if (b0->current_data > 0)
953 {
954 vlib_buffer_advance (b0, (word) -b0->current_data);
955 }
956
957 {
958 vlib_buffer_t *last = b0;
959 u32 error_node_index;
960 int drop_string_len;
961 vlib_node_t *n;
962 /* Length of the error string */
963 int error_string_len =
964 clib_strnlen (em->counters_heap[b0->error].name, 128);
965
966 /* Dig up the drop node */
967 error_node_index = vm->node_main.node_by_error[b0->error];
968 n = vlib_get_node (vm, error_node_index);
969
970 /* Length of full drop string, w/ "nodename: " prepended */
971 drop_string_len = error_string_len + vec_len (n->name) + 2;
972
973 /* Find the last buffer in the chain */
974 while (last->flags & VLIB_BUFFER_NEXT_PRESENT)
975 last = vlib_get_buffer (vm, last->next_buffer);
976
977 /*
978 * Append <nodename>: <error-string> to the capture,
979 * only if we can do that without allocating a new buffer.
980 */
981 if (PREDICT_TRUE ((last->current_data + last->current_length) <
982 (VLIB_BUFFER_DEFAULT_DATA_SIZE - drop_string_len)))
983 {
984 clib_memcpy_fast (last->data + last->current_data +
985 last->current_length,
986 n->name, vec_len (n->name));
987 clib_memcpy_fast (last->data + last->current_data +
988 last->current_length + vec_len (n->name),
989 ": ", 2);
990 clib_memcpy_fast (last->data + last->current_data +
991 last->current_length + vec_len (n->name) + 2,
992 em->counters_heap[b0->error].name,
993 error_string_len);
994 last->current_length += drop_string_len;
995 b0->flags &= ~(VLIB_BUFFER_TOTAL_LENGTH_VALID);
996 pcap_add_buffer (&pp->pcap_main, vm, bi0, pp->max_bytes_per_pkt);
997 last->current_length -= drop_string_len;
998 b0->current_data = save_current_data;
999 b0->current_length = save_current_length;
1000 continue;
1001 }
1002 }
1003
1004 /*
1005 * Didn't have space in the last buffer, here's the dropped
1006 * packet as-is
1007 */
1008 pcap_add_buffer (&pp->pcap_main, vm, bi0, pp->max_bytes_per_pkt);
1009
1010 b0->current_data = save_current_data;
1011 b0->current_length = save_current_length;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001012 }
1013}
1014
Filip Tehlar62668772019-03-04 03:33:32 -08001015#ifndef CLIB_MARCH_VARIANT
Dave Barachba868bb2016-08-08 09:51:21 -04001016void
1017vnet_pcap_drop_trace_filter_add_del (u32 error_index, int is_add)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001018{
Dave Barachba868bb2016-08-08 09:51:21 -04001019 vnet_interface_main_t *im = &vnet_get_main ()->interface_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001020
1021 if (im->pcap_drop_filter_hash == 0)
Dave Barachba868bb2016-08-08 09:51:21 -04001022 im->pcap_drop_filter_hash = hash_create (0, sizeof (uword));
Ed Warnickecb9cada2015-12-08 15:45:58 -07001023
1024 if (is_add)
1025 hash_set (im->pcap_drop_filter_hash, error_index, 1);
1026 else
1027 hash_unset (im->pcap_drop_filter_hash, error_index);
1028}
Filip Tehlar62668772019-03-04 03:33:32 -08001029#endif /* CLIB_MARCH_VARIANT */
Ed Warnickecb9cada2015-12-08 15:45:58 -07001030
Neale Ranns22e1f1d2019-03-01 15:53:11 +00001031VLIB_NODE_FN (interface_drop) (vlib_main_t * vm,
1032 vlib_node_runtime_t * node,
1033 vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001034{
Damjan Marion8fb5add2021-03-04 18:41:59 +01001035 vnet_main_t *vnm = vnet_get_main ();
Dave Barachba868bb2016-08-08 09:51:21 -04001036 vnet_interface_main_t *im = &vnet_get_main ()->interface_main;
Damjan Marion8fb5add2021-03-04 18:41:59 +01001037 vnet_pcap_t *pp = &vnm->pcap;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001038
Dave Barach33909772019-09-23 10:27:27 -04001039 if (PREDICT_FALSE (pp->pcap_drop_enable))
1040 pcap_drop_trace (vm, im, pp, frame);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001041
Neale Ranns22e1f1d2019-03-01 15:53:11 +00001042 return interface_drop_punt (vm, node, frame, VNET_ERROR_DISPOSITION_DROP);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001043}
1044
Neale Ranns22e1f1d2019-03-01 15:53:11 +00001045VLIB_NODE_FN (interface_punt) (vlib_main_t * vm,
1046 vlib_node_runtime_t * node,
1047 vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001048{
Neale Ranns22e1f1d2019-03-01 15:53:11 +00001049 return interface_drop_punt (vm, node, frame, VNET_ERROR_DISPOSITION_PUNT);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001050}
1051
Dave Barachba868bb2016-08-08 09:51:21 -04001052/* *INDENT-OFF* */
Neale Ranns22e1f1d2019-03-01 15:53:11 +00001053VLIB_REGISTER_NODE (interface_drop) = {
Ed Warnickecb9cada2015-12-08 15:45:58 -07001054 .name = "error-drop",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001055 .vector_size = sizeof (u32),
1056 .format_trace = format_vnet_error_trace,
Dave Barach4330c462020-06-10 17:07:32 -04001057 .flags = VLIB_NODE_FLAG_TRACE_SUPPORTED,
Neale Ranns22e1f1d2019-03-01 15:53:11 +00001058 .n_next_nodes = 1,
1059 .next_nodes = {
1060 [0] = "drop",
1061 },
Ed Warnickecb9cada2015-12-08 15:45:58 -07001062};
Dave Barachba868bb2016-08-08 09:51:21 -04001063/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -07001064
Dave Barachba868bb2016-08-08 09:51:21 -04001065/* *INDENT-OFF* */
Neale Ranns22e1f1d2019-03-01 15:53:11 +00001066VLIB_REGISTER_NODE (interface_punt) = {
Ed Warnickecb9cada2015-12-08 15:45:58 -07001067 .name = "error-punt",
1068 .vector_size = sizeof (u32),
1069 .format_trace = format_vnet_error_trace,
Dave Barach4330c462020-06-10 17:07:32 -04001070 .flags = VLIB_NODE_FLAG_TRACE_SUPPORTED,
Neale Ranns22e1f1d2019-03-01 15:53:11 +00001071 .n_next_nodes = 1,
1072 .next_nodes = {
1073 [0] = "punt",
1074 },
Ed Warnickecb9cada2015-12-08 15:45:58 -07001075};
Dave Barachba868bb2016-08-08 09:51:21 -04001076/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -07001077
Filip Tehlar62668772019-03-04 03:33:32 -08001078VLIB_REGISTER_NODE (vnet_per_buffer_interface_output_node) = {
Ed Warnickecb9cada2015-12-08 15:45:58 -07001079 .name = "interface-output",
1080 .vector_size = sizeof (u32),
1081};
1082
Damjan Marion8932e452021-04-16 11:49:26 +02001083VLIB_NODE_FN (vnet_interface_output_arc_end_node)
1084(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *frame)
Damjan Marion152e21d2016-11-29 14:55:43 +01001085{
1086 vnet_main_t *vnm = vnet_get_main ();
Damjan Marion8932e452021-04-16 11:49:26 +02001087 vnet_interface_main_t *im = &vnm->interface_main;
Damjan Marion1bd6cbb2021-04-15 13:12:51 +02001088 vnet_hw_if_output_node_runtime_t *r = 0;
1089 vnet_hw_interface_t *hi;
1090 vnet_hw_if_tx_frame_t *tf;
Damjan Marion8932e452021-04-16 11:49:26 +02001091 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
Damjan Marion1bd6cbb2021-04-15 13:12:51 +02001092 u32 sw_if_indices[VLIB_FRAME_SIZE], *sw_if_index = sw_if_indices;
1093 u64 used_elts[VLIB_FRAME_SIZE / 64] = {};
1094 u64 mask[VLIB_FRAME_SIZE / 64] = {};
1095 u32 *tmp, *from, n_left, n_free, n_comp, *to, swif, off;
1096 u16 next_index;
1097 vlib_frame_t *f;
Damjan Marion152e21d2016-11-29 14:55:43 +01001098
Damjan Marion8932e452021-04-16 11:49:26 +02001099 from = vlib_frame_vector_args (frame);
1100 n_left = frame->n_vectors;
1101 vlib_get_buffers (vm, from, bufs, n_left);
1102
1103 while (n_left >= 8)
Damjan Marion152e21d2016-11-29 14:55:43 +01001104 {
Damjan Marion8932e452021-04-16 11:49:26 +02001105 vlib_prefetch_buffer_header (b[4], LOAD);
1106 vlib_prefetch_buffer_header (b[5], LOAD);
1107 vlib_prefetch_buffer_header (b[6], LOAD);
1108 vlib_prefetch_buffer_header (b[7], LOAD);
Damjan Marion1bd6cbb2021-04-15 13:12:51 +02001109 sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_TX];
1110 sw_if_index[1] = vnet_buffer (b[1])->sw_if_index[VLIB_TX];
1111 sw_if_index[2] = vnet_buffer (b[2])->sw_if_index[VLIB_TX];
1112 sw_if_index[3] = vnet_buffer (b[3])->sw_if_index[VLIB_TX];
Damjan Marion152e21d2016-11-29 14:55:43 +01001113
Damjan Marion8932e452021-04-16 11:49:26 +02001114 b += 4;
Damjan Marion1bd6cbb2021-04-15 13:12:51 +02001115 sw_if_index += 4;
Damjan Marion8932e452021-04-16 11:49:26 +02001116 n_left -= 4;
Damjan Marion152e21d2016-11-29 14:55:43 +01001117 }
Damjan Marion8932e452021-04-16 11:49:26 +02001118
1119 while (n_left)
1120 {
Damjan Marion1bd6cbb2021-04-15 13:12:51 +02001121 sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_TX];
Damjan Marion8932e452021-04-16 11:49:26 +02001122 b++;
Damjan Marion1bd6cbb2021-04-15 13:12:51 +02001123 sw_if_index++;
Damjan Marion8932e452021-04-16 11:49:26 +02001124 n_left--;
1125 }
1126
Damjan Marion1bd6cbb2021-04-15 13:12:51 +02001127 n_left = frame->n_vectors;
1128 swif = sw_if_indices[0];
1129 off = 0;
1130
1131 /* a bit ugly but it allows us to reuse stack space for temporary store
1132 * which may also improve memory latency */
1133 tmp = (u32 *) bufs;
1134
1135more:
1136 next_index = vec_elt (im->if_out_arc_end_next_index_by_sw_if_index, swif);
1137 hi = vnet_get_sup_hw_interface (vnm, swif);
1138 if (hi->output_node_thread_runtimes)
1139 r = vec_elt_at_index (hi->output_node_thread_runtimes, vm->thread_index);
1140 f = vlib_get_next_frame_internal (vm, node, next_index, 0);
1141 tf = vlib_frame_scalar_args (f);
1142
1143 if (f->n_vectors > 0 && (r == 0 || r->frame.queue_id == tf->queue_id))
1144 {
1145 /* append frame */
1146 n_free = VLIB_FRAME_SIZE - f->n_vectors;
1147 if (n_free >= f->n_vectors)
1148 to = (u32 *) vlib_frame_vector_args (f) + f->n_vectors;
1149 else
1150 to = tmp;
1151 }
1152 else
1153 {
1154 if (f->n_vectors > 0)
1155 {
1156 /* current frame doesn't fit - grab empty one */
1157 f = vlib_get_next_frame_internal (vm, node, next_index, 1);
1158 tf = vlib_frame_scalar_args (f);
1159 }
1160
1161 /* empty frame - store scalar data */
1162 store_tx_frame_scalar_data (r, tf);
1163 n_free = VLIB_FRAME_SIZE;
1164 to = vlib_frame_vector_args (f);
1165 }
1166
1167 /* compare and compress based on comparison mask */
1168 clib_mask_compare_u32 (swif, sw_if_indices, mask, frame->n_vectors);
1169 n_comp = clib_compress_u32 (to, from, mask, frame->n_vectors);
1170
1171 if (tmp != to)
1172 {
1173 /* indices already written to frame, just close it */
1174 vlib_put_next_frame (vm, node, next_index, n_free - n_comp);
1175 }
1176 else if (n_free >= n_comp)
1177 {
1178 /* enough space in the existing frame */
1179 to = (u32 *) vlib_frame_vector_args (f) + f->n_vectors;
1180 vlib_buffer_copy_indices (to, tmp, n_comp);
1181 vlib_put_next_frame (vm, node, next_index, n_free - n_comp);
1182 }
1183 else
1184 {
1185 /* full frame */
1186 to = (u32 *) vlib_frame_vector_args (f) + f->n_vectors;
1187 vlib_buffer_copy_indices (to, tmp, n_free);
1188 vlib_put_next_frame (vm, node, next_index, 0);
1189
1190 /* second frame */
1191 u32 n_frame2 = n_comp - n_free;
1192 f = vlib_get_next_frame_internal (vm, node, next_index, 1);
1193 to = vlib_frame_vector_args (f);
1194 vlib_buffer_copy_indices (to, tmp + n_free, n_frame2);
1195 tf = vlib_frame_scalar_args (f);
1196 store_tx_frame_scalar_data (r, tf);
1197 vlib_put_next_frame (vm, node, next_index, VLIB_FRAME_SIZE - n_frame2);
1198 }
1199
1200 n_left -= n_comp;
1201 if (n_left)
1202 {
1203 /* store comparison mask so we can find next unused element */
1204 for (int i = 0; i < ARRAY_LEN (used_elts); i++)
1205 used_elts[i] |= mask[i];
1206
1207 /* fine first unused sw_if_index by scanning trough used_elts bitmap */
1208 while (PREDICT_FALSE (used_elts[off] == ~0))
1209 off++;
1210
1211 swif =
1212 sw_if_indices[(off << 6) + count_trailing_zeros (~used_elts[off])];
1213 goto more;
1214 }
1215
Damjan Marion8932e452021-04-16 11:49:26 +02001216 return frame->n_vectors;
Damjan Marion152e21d2016-11-29 14:55:43 +01001217}
1218
Damjan Marion8932e452021-04-16 11:49:26 +02001219VLIB_REGISTER_NODE (vnet_interface_output_arc_end_node) = {
1220 .name = "interface-output-arc-end",
Damjan Marion152e21d2016-11-29 14:55:43 +01001221 .vector_size = sizeof (u32),
1222 .n_next_nodes = 1,
1223 .next_nodes = {
1224 [0] = "error-drop",
1225 },
1226};
1227
Damjan Marion8932e452021-04-16 11:49:26 +02001228VNET_FEATURE_ARC_INIT (interface_output, static) = {
1229 .arc_name = "interface-output",
Damjan Marion152e21d2016-11-29 14:55:43 +01001230 .start_nodes = VNET_FEATURES (0),
Damjan Marion8932e452021-04-16 11:49:26 +02001231 .last_in_arc = "interface-output-arc-end",
Damjan Marion152e21d2016-11-29 14:55:43 +01001232 .arc_index_ptr = &vnet_main.interface_main.output_feature_arc_index,
1233};
1234
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +01001235VNET_FEATURE_INIT (span_tx, static) = {
1236 .arc_name = "interface-output",
1237 .node_name = "span-output",
Damjan Marion8932e452021-04-16 11:49:26 +02001238 .runs_before = VNET_FEATURES ("interface-output-arc-end"),
Pavel Kotucek3a2a1c42016-12-06 10:10:10 +01001239};
1240
Matthew Smith537eeec2018-04-09 11:49:20 -05001241VNET_FEATURE_INIT (ipsec_if_tx, static) = {
1242 .arc_name = "interface-output",
1243 .node_name = "ipsec-if-output",
Damjan Marion8932e452021-04-16 11:49:26 +02001244 .runs_before = VNET_FEATURES ("interface-output-arc-end"),
Matthew Smith537eeec2018-04-09 11:49:20 -05001245};
1246
Damjan Marion8932e452021-04-16 11:49:26 +02001247VNET_FEATURE_INIT (interface_output_arc_end, static) = {
Damjan Marion152e21d2016-11-29 14:55:43 +01001248 .arc_name = "interface-output",
Damjan Marion8932e452021-04-16 11:49:26 +02001249 .node_name = "interface-output-arc-end",
Damjan Marion152e21d2016-11-29 14:55:43 +01001250 .runs_before = 0,
1251};
Damjan Marion152e21d2016-11-29 14:55:43 +01001252
Filip Tehlar62668772019-03-04 03:33:32 -08001253#ifndef CLIB_MARCH_VARIANT
Ed Warnickecb9cada2015-12-08 15:45:58 -07001254clib_error_t *
1255vnet_per_buffer_interface_output_hw_interface_add_del (vnet_main_t * vnm,
1256 u32 hw_if_index,
1257 u32 is_create)
1258{
Dave Barachba868bb2016-08-08 09:51:21 -04001259 vnet_hw_interface_t *hi = vnet_get_hw_interface (vnm, hw_if_index);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001260 u32 next_index;
1261
John Loe5453d02018-01-23 19:21:34 -05001262 if (hi->output_node_index == 0)
1263 return 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001264
John Loe5453d02018-01-23 19:21:34 -05001265 next_index = vlib_node_add_next
1266 (vnm->vlib_main, vnet_per_buffer_interface_output_node.index,
1267 hi->output_node_index);
1268 hi->output_node_next_index = next_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001269
1270 return 0;
1271}
1272
Dave Barachba868bb2016-08-08 09:51:21 -04001273VNET_HW_INTERFACE_ADD_DEL_FUNCTION
1274 (vnet_per_buffer_interface_output_hw_interface_add_del);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001275
John Loe5453d02018-01-23 19:21:34 -05001276void
1277vnet_set_interface_output_node (vnet_main_t * vnm,
1278 u32 hw_if_index, u32 node_index)
1279{
1280 ASSERT (node_index);
1281 vnet_hw_interface_t *hi = vnet_get_hw_interface (vnm, hw_if_index);
1282 u32 next_index = vlib_node_add_next
1283 (vnm->vlib_main, vnet_per_buffer_interface_output_node.index, node_index);
1284 hi->output_node_next_index = next_index;
1285 hi->output_node_index = node_index;
1286}
Filip Tehlar62668772019-03-04 03:33:32 -08001287#endif /* CLIB_MARCH_VARIANT */
John Loe5453d02018-01-23 19:21:34 -05001288
Dave Barachba868bb2016-08-08 09:51:21 -04001289/*
1290 * fd.io coding-style-patch-verification: ON
1291 *
1292 * Local Variables:
1293 * eval: (c-set-style "gnu")
1294 * End:
1295 */