blob: 21a968177feac8439829040752362332adc69d15 [file] [log] [blame]
Steven9cd2d7a2017-12-20 12:43:01 -08001/*
2 *------------------------------------------------------------------
3 * Copyright (c) 2017 Cisco and/or its affiliates.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at:
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *------------------------------------------------------------------
16 */
17
18#define _GNU_SOURCE
19#include <stdint.h>
20#include <vnet/llc/llc.h>
21#include <vnet/snap/snap.h>
22#include <vnet/bonding/node.h>
23
Benoît Ganne47727c02019-02-12 13:35:08 +010024#ifndef CLIB_MARCH_VARIANT
Steven9cd2d7a2017-12-20 12:43:01 -080025bond_main_t bond_main;
Benoît Ganne47727c02019-02-12 13:35:08 +010026#endif /* CLIB_MARCH_VARIANT */
Steven9cd2d7a2017-12-20 12:43:01 -080027
28#define foreach_bond_input_error \
29 _(NONE, "no error") \
30 _(IF_DOWN, "interface down") \
Steven Luong6dfd3782019-12-06 21:12:41 -080031 _(PASSIVE_IF, "traffic received on passive interface") \
Damjan Marion5e5adb32018-05-26 00:50:39 +020032 _(PASS_THRU, "pass through (CDP, LLDP, slow protocols)")
Steven9cd2d7a2017-12-20 12:43:01 -080033
34typedef enum
35{
36#define _(f,s) BOND_INPUT_ERROR_##f,
37 foreach_bond_input_error
38#undef _
39 BOND_INPUT_N_ERROR,
40} bond_input_error_t;
41
42static char *bond_input_error_strings[] = {
43#define _(n,s) s,
44 foreach_bond_input_error
45#undef _
46};
47
48static u8 *
49format_bond_input_trace (u8 * s, va_list * args)
50{
51 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
52 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
53 bond_packet_trace_t *t = va_arg (*args, bond_packet_trace_t *);
Steven9cd2d7a2017-12-20 12:43:01 -080054
Steven70488ab2018-03-28 17:59:00 -070055 s = format (s, "src %U, dst %U, %U -> %U",
Steven9cd2d7a2017-12-20 12:43:01 -080056 format_ethernet_address, t->ethernet.src_address,
57 format_ethernet_address, t->ethernet.dst_address,
Steven70488ab2018-03-28 17:59:00 -070058 format_vnet_sw_if_index_name, vnet_get_main (),
59 t->sw_if_index,
60 format_vnet_sw_if_index_name, vnet_get_main (),
61 t->bond_sw_if_index);
Steven9cd2d7a2017-12-20 12:43:01 -080062
63 return s;
64}
Damjan Marion5e5adb32018-05-26 00:50:39 +020065
66typedef enum
67{
68 BOND_INPUT_NEXT_DROP,
69 BOND_INPUT_N_NEXT,
Steven9f781d82018-06-05 11:09:32 -070070} bond_output_next_t;
Steven9cd2d7a2017-12-20 12:43:01 -080071
72static_always_inline u8
73packet_is_cdp (ethernet_header_t * eth)
74{
75 llc_header_t *llc;
76 snap_header_t *snap;
77
78 llc = (llc_header_t *) (eth + 1);
79 snap = (snap_header_t *) (llc + 1);
80
81 return ((eth->type == htons (ETHERNET_TYPE_CDP)) ||
82 ((llc->src_sap == 0xAA) && (llc->control == 0x03) &&
83 (snap->protocol == htons (0x2000)) &&
84 (snap->oui[0] == 0) && (snap->oui[1] == 0) &&
85 (snap->oui[2] == 0x0C)));
86}
87
Steven Luongc94afaa2019-07-24 21:16:09 -070088static inline void
Damjan Marion5e5adb32018-05-26 00:50:39 +020089bond_sw_if_idx_rewrite (vlib_main_t * vm, vlib_node_runtime_t * node,
Steven Luongc94afaa2019-07-24 21:16:09 -070090 vlib_buffer_t * b, u32 bond_sw_if_index,
91 u32 * n_rx_packets, u32 * n_rx_bytes)
Steven9cd2d7a2017-12-20 12:43:01 -080092{
Steven9cd2d7a2017-12-20 12:43:01 -080093 u16 *ethertype_p, ethertype;
94 ethernet_vlan_header_t *vlan;
Damjan Marion5e5adb32018-05-26 00:50:39 +020095 ethernet_header_t *eth = (ethernet_header_t *) vlib_buffer_get_current (b);
Steven9cd2d7a2017-12-20 12:43:01 -080096
Steven Luongc94afaa2019-07-24 21:16:09 -070097 (*n_rx_packets)++;
98 *n_rx_bytes += b->current_length;
Damjan Marion5e5adb32018-05-26 00:50:39 +020099 ethertype = clib_mem_unaligned (&eth->type, u16);
100 if (!ethernet_frame_is_tagged (ntohs (ethertype)))
Steven9cd2d7a2017-12-20 12:43:01 -0800101 {
Damjan Marion5e5adb32018-05-26 00:50:39 +0200102 // Let some layer2 packets pass through.
103 if (PREDICT_TRUE ((ethertype != htons (ETHERNET_TYPE_SLOW_PROTOCOLS))
104 && !packet_is_cdp (eth)
105 && (ethertype != htons (ETHERNET_TYPE_802_1_LLDP))))
Steven9cd2d7a2017-12-20 12:43:01 -0800106 {
Damjan Marion5e5adb32018-05-26 00:50:39 +0200107 /* Change the physical interface to bond interface */
108 vnet_buffer (b)->sw_if_index[VLIB_RX] = bond_sw_if_index;
Steven Luongc94afaa2019-07-24 21:16:09 -0700109 return;
Steven9cd2d7a2017-12-20 12:43:01 -0800110 }
111 }
112 else
113 {
Damjan Marion5e5adb32018-05-26 00:50:39 +0200114 vlan = (void *) (eth + 1);
115 ethertype_p = &vlan->type;
116 ethertype = clib_mem_unaligned (ethertype_p, u16);
117 if (ethertype == ntohs (ETHERNET_TYPE_VLAN))
118 {
119 vlan++;
120 ethertype_p = &vlan->type;
121 }
122 ethertype = clib_mem_unaligned (ethertype_p, u16);
123 if (PREDICT_TRUE ((ethertype != htons (ETHERNET_TYPE_SLOW_PROTOCOLS))
124 && (ethertype != htons (ETHERNET_TYPE_CDP))
125 && (ethertype != htons (ETHERNET_TYPE_802_1_LLDP))))
126 {
127 /* Change the physical interface to bond interface */
128 vnet_buffer (b)->sw_if_index[VLIB_RX] = bond_sw_if_index;
Steven Luongc94afaa2019-07-24 21:16:09 -0700129 return;
Damjan Marion5e5adb32018-05-26 00:50:39 +0200130 }
Steven9cd2d7a2017-12-20 12:43:01 -0800131 }
132
Damjan Marion5e5adb32018-05-26 00:50:39 +0200133 vlib_error_count (vm, node->node_index, BOND_INPUT_ERROR_PASS_THRU, 1);
Steven Luongc94afaa2019-07-24 21:16:09 -0700134 return;
Steven9cd2d7a2017-12-20 12:43:01 -0800135}
136
Damjan Marion5e5adb32018-05-26 00:50:39 +0200137static inline void
138bond_update_next (vlib_main_t * vm, vlib_node_runtime_t * node,
Steven Luong4c4223e2020-07-15 08:44:54 -0700139 u32 * last_member_sw_if_index, u32 member_sw_if_index,
Damjan Marion5e5adb32018-05-26 00:50:39 +0200140 u32 * bond_sw_if_index, vlib_buffer_t * b,
141 u32 * next_index, vlib_error_t * error)
Steven9cd2d7a2017-12-20 12:43:01 -0800142{
Steven Luong4c4223e2020-07-15 08:44:54 -0700143 member_if_t *mif;
Damjan Marion5e5adb32018-05-26 00:50:39 +0200144 bond_if_t *bif;
145
Steven Luong71e5b472019-10-10 11:36:41 -0700146 *next_index = BOND_INPUT_NEXT_DROP;
147 *error = 0;
148
Steven Luong4c4223e2020-07-15 08:44:54 -0700149 if (PREDICT_TRUE (*last_member_sw_if_index == member_sw_if_index))
Steven Luong71e5b472019-10-10 11:36:41 -0700150 goto next;
Damjan Marion5e5adb32018-05-26 00:50:39 +0200151
Steven Luong4c4223e2020-07-15 08:44:54 -0700152 *last_member_sw_if_index = member_sw_if_index;
Damjan Marion5e5adb32018-05-26 00:50:39 +0200153
Steven Luong4c4223e2020-07-15 08:44:54 -0700154 mif = bond_get_member_by_sw_if_index (member_sw_if_index);
155 ALWAYS_ASSERT (mif);
Damjan Marion5e5adb32018-05-26 00:50:39 +0200156
Steven Luong4c4223e2020-07-15 08:44:54 -0700157 bif = bond_get_bond_if_by_dev_instance (mif->bif_dev_instance);
Damjan Marion5e5adb32018-05-26 00:50:39 +0200158
Dave Barachffd15462020-02-18 10:12:23 -0500159 ALWAYS_ASSERT (bif);
Steven Luong4c4223e2020-07-15 08:44:54 -0700160 ASSERT (vec_len (bif->members));
Damjan Marion5e5adb32018-05-26 00:50:39 +0200161
Steven Luong6dfd3782019-12-06 21:12:41 -0800162 if (PREDICT_FALSE (bif->admin_up == 0))
Damjan Marion5e5adb32018-05-26 00:50:39 +0200163 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700164 *bond_sw_if_index = member_sw_if_index;
Damjan Marion5e5adb32018-05-26 00:50:39 +0200165 *error = node->errors[BOND_INPUT_ERROR_IF_DOWN];
Steven Luong6dfd3782019-12-06 21:12:41 -0800166 }
167
168 if (PREDICT_FALSE ((bif->mode == BOND_MODE_ACTIVE_BACKUP) &&
Steven Luong4c4223e2020-07-15 08:44:54 -0700169 vec_len (bif->active_members) &&
170 (member_sw_if_index != bif->active_members[0])))
Steven Luong6dfd3782019-12-06 21:12:41 -0800171 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700172 *bond_sw_if_index = member_sw_if_index;
Steven Luong6dfd3782019-12-06 21:12:41 -0800173 *error = node->errors[BOND_INPUT_ERROR_PASSIVE_IF];
174 return;
Damjan Marion5e5adb32018-05-26 00:50:39 +0200175 }
176
177 *bond_sw_if_index = bif->sw_if_index;
Steven Luong71e5b472019-10-10 11:36:41 -0700178
179next:
Damjan Marion7d98a122018-07-19 20:42:08 +0200180 vnet_feature_next (next_index, b);
Damjan Marion5e5adb32018-05-26 00:50:39 +0200181}
182
Steven Luong71e5b472019-10-10 11:36:41 -0700183static_always_inline void
184bond_update_next_x4 (vlib_buffer_t * b0, vlib_buffer_t * b1,
185 vlib_buffer_t * b2, vlib_buffer_t * b3)
186{
187 u32 tmp0, tmp1, tmp2, tmp3;
188
189 tmp0 = tmp1 = tmp2 = tmp3 = BOND_INPUT_NEXT_DROP;
190 vnet_feature_next (&tmp0, b0);
191 vnet_feature_next (&tmp1, b1);
192 vnet_feature_next (&tmp2, b2);
193 vnet_feature_next (&tmp3, b3);
194}
195
Damjan Marion812b32d2018-05-28 21:26:47 +0200196VLIB_NODE_FN (bond_input_node) (vlib_main_t * vm,
197 vlib_node_runtime_t * node,
198 vlib_frame_t * frame)
Damjan Marion5e5adb32018-05-26 00:50:39 +0200199{
Damjan Marion067cd622018-07-11 12:47:43 +0200200 u16 thread_index = vm->thread_index;
Damjan Marion5e5adb32018-05-26 00:50:39 +0200201 u32 *from, n_left;
202 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b;
203 u32 sw_if_indices[VLIB_FRAME_SIZE], *sw_if_index;
204 u16 nexts[VLIB_FRAME_SIZE], *next;
Steven Luong4c4223e2020-07-15 08:44:54 -0700205 u32 last_member_sw_if_index = ~0;
Damjan Marion5e5adb32018-05-26 00:50:39 +0200206 u32 bond_sw_if_index = 0;
207 vlib_error_t error = 0;
208 u32 next_index = 0;
Steven Luongc94afaa2019-07-24 21:16:09 -0700209 u32 n_rx_bytes = 0, n_rx_packets = 0;
Steven9cd2d7a2017-12-20 12:43:01 -0800210
211 /* Vector of buffer / pkt indices we're supposed to process */
212 from = vlib_frame_vector_args (frame);
213
214 /* Number of buffers / pkts */
Damjan Marion5e5adb32018-05-26 00:50:39 +0200215 n_left = frame->n_vectors;
Steven9cd2d7a2017-12-20 12:43:01 -0800216
Damjan Marion5e5adb32018-05-26 00:50:39 +0200217 vlib_get_buffers (vm, from, bufs, n_left);
Steven9cd2d7a2017-12-20 12:43:01 -0800218
Damjan Marion5e5adb32018-05-26 00:50:39 +0200219 b = bufs;
220 next = nexts;
221 sw_if_index = sw_if_indices;
222
223 while (n_left >= 4)
Steven9cd2d7a2017-12-20 12:43:01 -0800224 {
Damjan Marion5e5adb32018-05-26 00:50:39 +0200225 u32 x = 0;
226 /* Prefetch next iteration */
227 if (PREDICT_TRUE (n_left >= 16))
Steven9cd2d7a2017-12-20 12:43:01 -0800228 {
Damjan Mariond30bf012018-11-18 23:48:43 +0100229 vlib_prefetch_buffer_data (b[8], LOAD);
230 vlib_prefetch_buffer_data (b[9], LOAD);
231 vlib_prefetch_buffer_data (b[10], LOAD);
232 vlib_prefetch_buffer_data (b[11], LOAD);
Steven9cd2d7a2017-12-20 12:43:01 -0800233
Damjan Marion5e5adb32018-05-26 00:50:39 +0200234 vlib_prefetch_buffer_header (b[12], LOAD);
235 vlib_prefetch_buffer_header (b[13], LOAD);
236 vlib_prefetch_buffer_header (b[14], LOAD);
237 vlib_prefetch_buffer_header (b[15], LOAD);
Steven9cd2d7a2017-12-20 12:43:01 -0800238 }
239
Damjan Marion5e5adb32018-05-26 00:50:39 +0200240 sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
241 sw_if_index[1] = vnet_buffer (b[1])->sw_if_index[VLIB_RX];
242 sw_if_index[2] = vnet_buffer (b[2])->sw_if_index[VLIB_RX];
243 sw_if_index[3] = vnet_buffer (b[3])->sw_if_index[VLIB_RX];
244
Steven Luong4c4223e2020-07-15 08:44:54 -0700245 x |= sw_if_index[0] ^ last_member_sw_if_index;
246 x |= sw_if_index[1] ^ last_member_sw_if_index;
247 x |= sw_if_index[2] ^ last_member_sw_if_index;
248 x |= sw_if_index[3] ^ last_member_sw_if_index;
Damjan Marion5e5adb32018-05-26 00:50:39 +0200249
250 if (PREDICT_TRUE (x == 0))
Steven9cd2d7a2017-12-20 12:43:01 -0800251 {
Steven Luong71e5b472019-10-10 11:36:41 -0700252 /*
253 * Optimize to call update_next only if there is a feature arc
254 * after bond-input. Test feature count greater than 1 because
Steven Luong4c4223e2020-07-15 08:44:54 -0700255 * bond-input itself is a feature arc for this member interface.
Steven Luong71e5b472019-10-10 11:36:41 -0700256 */
257 ASSERT ((vnet_buffer (b[0])->feature_arc_index ==
258 vnet_buffer (b[1])->feature_arc_index) &&
259 (vnet_buffer (b[0])->feature_arc_index ==
260 vnet_buffer (b[2])->feature_arc_index) &&
261 (vnet_buffer (b[0])->feature_arc_index ==
262 vnet_buffer (b[3])->feature_arc_index));
263 if (PREDICT_FALSE (vnet_get_feature_count
264 (vnet_buffer (b[0])->feature_arc_index,
Steven Luong4c4223e2020-07-15 08:44:54 -0700265 last_member_sw_if_index) > 1))
Steven Luong71e5b472019-10-10 11:36:41 -0700266 bond_update_next_x4 (b[0], b[1], b[2], b[3]);
267
Damjan Marion5e5adb32018-05-26 00:50:39 +0200268 next[0] = next[1] = next[2] = next[3] = next_index;
269 if (next_index == BOND_INPUT_NEXT_DROP)
Steven9cd2d7a2017-12-20 12:43:01 -0800270 {
Damjan Marion5e5adb32018-05-26 00:50:39 +0200271 b[0]->error = error;
272 b[1]->error = error;
273 b[2]->error = error;
274 b[3]->error = error;
Steven9cd2d7a2017-12-20 12:43:01 -0800275 }
Damjan Marion5e5adb32018-05-26 00:50:39 +0200276 else
Steven70488ab2018-03-28 17:59:00 -0700277 {
Steven Luongc94afaa2019-07-24 21:16:09 -0700278 bond_sw_if_idx_rewrite (vm, node, b[0], bond_sw_if_index,
279 &n_rx_packets, &n_rx_bytes);
280 bond_sw_if_idx_rewrite (vm, node, b[1], bond_sw_if_index,
281 &n_rx_packets, &n_rx_bytes);
282 bond_sw_if_idx_rewrite (vm, node, b[2], bond_sw_if_index,
283 &n_rx_packets, &n_rx_bytes);
284 bond_sw_if_idx_rewrite (vm, node, b[3], bond_sw_if_index,
285 &n_rx_packets, &n_rx_bytes);
Steven70488ab2018-03-28 17:59:00 -0700286 }
Steven9cd2d7a2017-12-20 12:43:01 -0800287 }
Damjan Marion5e5adb32018-05-26 00:50:39 +0200288 else
289 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700290 bond_update_next (vm, node, &last_member_sw_if_index,
291 sw_if_index[0], &bond_sw_if_index, b[0],
292 &next_index, &error);
Damjan Marion5e5adb32018-05-26 00:50:39 +0200293 next[0] = next_index;
294 if (next_index == BOND_INPUT_NEXT_DROP)
295 b[0]->error = error;
296 else
Steven Luongc94afaa2019-07-24 21:16:09 -0700297 bond_sw_if_idx_rewrite (vm, node, b[0], bond_sw_if_index,
298 &n_rx_packets, &n_rx_bytes);
Damjan Marion5e5adb32018-05-26 00:50:39 +0200299
Steven Luong4c4223e2020-07-15 08:44:54 -0700300 bond_update_next (vm, node, &last_member_sw_if_index,
301 sw_if_index[1], &bond_sw_if_index, b[1],
302 &next_index, &error);
Damjan Marion5e5adb32018-05-26 00:50:39 +0200303 next[1] = next_index;
304 if (next_index == BOND_INPUT_NEXT_DROP)
305 b[1]->error = error;
306 else
Steven Luongc94afaa2019-07-24 21:16:09 -0700307 bond_sw_if_idx_rewrite (vm, node, b[1], bond_sw_if_index,
308 &n_rx_packets, &n_rx_bytes);
Damjan Marion5e5adb32018-05-26 00:50:39 +0200309
Steven Luong4c4223e2020-07-15 08:44:54 -0700310 bond_update_next (vm, node, &last_member_sw_if_index,
311 sw_if_index[2], &bond_sw_if_index, b[2],
312 &next_index, &error);
Damjan Marion5e5adb32018-05-26 00:50:39 +0200313 next[2] = next_index;
314 if (next_index == BOND_INPUT_NEXT_DROP)
315 b[2]->error = error;
316 else
Steven Luongc94afaa2019-07-24 21:16:09 -0700317 bond_sw_if_idx_rewrite (vm, node, b[2], bond_sw_if_index,
318 &n_rx_packets, &n_rx_bytes);
Damjan Marion5e5adb32018-05-26 00:50:39 +0200319
Steven Luong4c4223e2020-07-15 08:44:54 -0700320 bond_update_next (vm, node, &last_member_sw_if_index,
321 sw_if_index[3], &bond_sw_if_index, b[3],
322 &next_index, &error);
Damjan Marion5e5adb32018-05-26 00:50:39 +0200323 next[3] = next_index;
324 if (next_index == BOND_INPUT_NEXT_DROP)
325 b[3]->error = error;
326 else
Steven Luongc94afaa2019-07-24 21:16:09 -0700327 bond_sw_if_idx_rewrite (vm, node, b[3], bond_sw_if_index,
328 &n_rx_packets, &n_rx_bytes);
Damjan Marion5e5adb32018-05-26 00:50:39 +0200329 }
330
Damjan Marion5e5adb32018-05-26 00:50:39 +0200331 /* next */
332 n_left -= 4;
333 b += 4;
334 sw_if_index += 4;
335 next += 4;
Steven9cd2d7a2017-12-20 12:43:01 -0800336 }
337
Damjan Marion5e5adb32018-05-26 00:50:39 +0200338 while (n_left)
339 {
340 sw_if_index[0] = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
Steven Luong4c4223e2020-07-15 08:44:54 -0700341 bond_update_next (vm, node, &last_member_sw_if_index, sw_if_index[0],
Steven Luongc94afaa2019-07-24 21:16:09 -0700342 &bond_sw_if_index, b[0], &next_index, &error);
Damjan Marion5e5adb32018-05-26 00:50:39 +0200343 next[0] = next_index;
344 if (next_index == BOND_INPUT_NEXT_DROP)
345 b[0]->error = error;
346 else
Steven Luongc94afaa2019-07-24 21:16:09 -0700347 bond_sw_if_idx_rewrite (vm, node, b[0], bond_sw_if_index,
348 &n_rx_packets, &n_rx_bytes);
Damjan Marion5e5adb32018-05-26 00:50:39 +0200349
Damjan Marion5e5adb32018-05-26 00:50:39 +0200350 /* next */
351 n_left -= 1;
352 b += 1;
353 sw_if_index += 1;
354 next += 1;
355 }
356
357 if (PREDICT_FALSE ((node->flags & VLIB_NODE_FLAG_TRACE)))
358 {
359 n_left = frame->n_vectors; /* number of packets to process */
360 b = bufs;
361 sw_if_index = sw_if_indices;
Damjan Marion5e5adb32018-05-26 00:50:39 +0200362 bond_packet_trace_t *t0;
Damjan Marion5e5adb32018-05-26 00:50:39 +0200363
Damjan Marionf264f082018-05-30 00:03:34 +0200364 while (n_left)
Damjan Marion5e5adb32018-05-26 00:50:39 +0200365 {
366 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
367 {
Damjan Marion5e5adb32018-05-26 00:50:39 +0200368 t0 = vlib_add_trace (vm, node, b[0], sizeof (*t0));
369 t0->sw_if_index = sw_if_index[0];
Dave Barach178cf492018-11-13 16:34:13 -0500370 clib_memcpy_fast (&t0->ethernet, vlib_buffer_get_current (b[0]),
371 sizeof (ethernet_header_t));
Damjan Marion5e5adb32018-05-26 00:50:39 +0200372 t0->bond_sw_if_index = vnet_buffer (b[0])->sw_if_index[VLIB_RX];
373 }
374 /* next */
375 n_left--;
376 b++;
377 sw_if_index++;
Damjan Marion5e5adb32018-05-26 00:50:39 +0200378 }
379 }
380
381 /* increase rx counters */
Steven Luongc94afaa2019-07-24 21:16:09 -0700382 vlib_increment_combined_counter
383 (vnet_main.interface_main.combined_sw_if_counters +
384 VNET_INTERFACE_COUNTER_RX, thread_index, bond_sw_if_index, n_rx_packets,
385 n_rx_bytes);
Damjan Marion5e5adb32018-05-26 00:50:39 +0200386
387 vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
Steven9cd2d7a2017-12-20 12:43:01 -0800388 vlib_node_increment_counter (vm, bond_input_node.index,
389 BOND_INPUT_ERROR_NONE, frame->n_vectors);
390
Steven9cd2d7a2017-12-20 12:43:01 -0800391 return frame->n_vectors;
392}
393
394static clib_error_t *
395bond_input_init (vlib_main_t * vm)
396{
397 return 0;
398}
399
400/* *INDENT-OFF* */
401VLIB_REGISTER_NODE (bond_input_node) = {
Steven9cd2d7a2017-12-20 12:43:01 -0800402 .name = "bond-input",
403 .vector_size = sizeof (u32),
404 .format_buffer = format_ethernet_header_with_length,
405 .format_trace = format_bond_input_trace,
406 .type = VLIB_NODE_TYPE_INTERNAL,
407 .n_errors = BOND_INPUT_N_ERROR,
408 .error_strings = bond_input_error_strings,
Damjan Marion5e5adb32018-05-26 00:50:39 +0200409 .n_next_nodes = BOND_INPUT_N_NEXT,
Steven9cd2d7a2017-12-20 12:43:01 -0800410 .next_nodes =
411 {
Damjan Marion5e5adb32018-05-26 00:50:39 +0200412 [BOND_INPUT_NEXT_DROP] = "error-drop"
Steven9cd2d7a2017-12-20 12:43:01 -0800413 }
414};
415
416VLIB_INIT_FUNCTION (bond_input_init);
417
418VNET_FEATURE_INIT (bond_input, static) =
419{
420 .arc_name = "device-input",
421 .node_name = "bond-input",
422 .runs_before = VNET_FEATURES ("ethernet-input"),
423};
Steven9cd2d7a2017-12-20 12:43:01 -0800424/* *INDENT-ON* */
425
426static clib_error_t *
427bond_sw_interface_up_down (vnet_main_t * vnm, u32 sw_if_index, u32 flags)
428{
429 bond_main_t *bm = &bond_main;
Steven Luong4c4223e2020-07-15 08:44:54 -0700430 member_if_t *mif;
Steven9cd2d7a2017-12-20 12:43:01 -0800431 vlib_main_t *vm = bm->vlib_main;
432
Steven Luong4c4223e2020-07-15 08:44:54 -0700433 mif = bond_get_member_by_sw_if_index (sw_if_index);
434 if (mif)
Steven9cd2d7a2017-12-20 12:43:01 -0800435 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700436 if (mif->lacp_enabled)
Stevene43278f2019-01-17 15:11:29 -0800437 return 0;
438
Steven Luongbac326c2019-08-05 09:47:58 -0700439 /* port_enabled is both admin up and hw link up */
Steven Luong4c4223e2020-07-15 08:44:54 -0700440 mif->port_enabled = ((flags & VNET_SW_INTERFACE_FLAG_ADMIN_UP) &&
Steven Luongbac326c2019-08-05 09:47:58 -0700441 vnet_sw_interface_is_link_up (vnm, sw_if_index));
Steven Luong4c4223e2020-07-15 08:44:54 -0700442 if (mif->port_enabled == 0)
443 bond_disable_collecting_distributing (vm, mif);
Steven9cd2d7a2017-12-20 12:43:01 -0800444 else
Steven Luong4c4223e2020-07-15 08:44:54 -0700445 bond_enable_collecting_distributing (vm, mif);
Steven9cd2d7a2017-12-20 12:43:01 -0800446 }
447
448 return 0;
449}
450
451VNET_SW_INTERFACE_ADMIN_UP_DOWN_FUNCTION (bond_sw_interface_up_down);
452
453static clib_error_t *
454bond_hw_interface_up_down (vnet_main_t * vnm, u32 hw_if_index, u32 flags)
455{
456 bond_main_t *bm = &bond_main;
Steven Luong4c4223e2020-07-15 08:44:54 -0700457 member_if_t *mif;
Steven9cd2d7a2017-12-20 12:43:01 -0800458 vnet_sw_interface_t *sw;
459 vlib_main_t *vm = bm->vlib_main;
Steven9cd2d7a2017-12-20 12:43:01 -0800460
Stevence2db6a2018-04-23 17:06:24 -0700461 sw = vnet_get_hw_sw_interface (vnm, hw_if_index);
Steven Luong4c4223e2020-07-15 08:44:54 -0700462 mif = bond_get_member_by_sw_if_index (sw->sw_if_index);
463 if (mif)
Steven9cd2d7a2017-12-20 12:43:01 -0800464 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700465 if (mif->lacp_enabled)
Stevene43278f2019-01-17 15:11:29 -0800466 return 0;
467
Steven Luongbac326c2019-08-05 09:47:58 -0700468 /* port_enabled is both admin up and hw link up */
Steven Luong4c4223e2020-07-15 08:44:54 -0700469 mif->port_enabled = ((flags & VNET_HW_INTERFACE_FLAG_LINK_UP) &&
Steven Luongbac326c2019-08-05 09:47:58 -0700470 vnet_sw_interface_is_admin_up (vnm,
471 sw->sw_if_index));
Steven Luong4c4223e2020-07-15 08:44:54 -0700472 if (mif->port_enabled == 0)
473 bond_disable_collecting_distributing (vm, mif);
Steven Luongbac326c2019-08-05 09:47:58 -0700474 else
Steven Luong4c4223e2020-07-15 08:44:54 -0700475 bond_enable_collecting_distributing (vm, mif);
Steven9cd2d7a2017-12-20 12:43:01 -0800476 }
477
478 return 0;
479}
480
481VNET_HW_INTERFACE_LINK_UP_DOWN_FUNCTION (bond_hw_interface_up_down);
Steven9cd2d7a2017-12-20 12:43:01 -0800482
483/*
484 * fd.io coding-style-patch-verification: ON
485 *
486 * Local Variables:
487 * eval: (c-set-style "gnu")
488 * End:
489 */