blob: e14d08a2f9eb735a4fefc7c9ce21f815905e4d61 [file] [log] [blame]
Steven9cd2d7a2017-12-20 12:43:01 -08001/*
2 *------------------------------------------------------------------
3 * Copyright (c) 2017 Cisco and/or its affiliates.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at:
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *------------------------------------------------------------------
16 */
17
18#define _GNU_SOURCE
19#include <stdint.h>
20#include <vnet/ethernet/ethernet.h>
21#include <vnet/ip/ip4_packet.h>
22#include <vnet/ip/ip6_packet.h>
23#include <vnet/ip/ip6_hop_by_hop_packet.h>
24#include <vnet/bonding/node.h>
Steven0d883012018-05-11 11:06:23 -070025#include <vppinfra/lb_hash_hash.h>
Steven9f781d82018-06-05 11:09:32 -070026#include <vnet/ip/ip.h>
Neale Rannscbe25aa2019-09-30 10:53:31 +000027#include <vnet/ip-neighbor/ip_neighbor.h>
Neale Rannsdc617b82020-08-20 08:22:56 +000028#include <vnet/ip-neighbor/ip4_neighbor.h>
29#include <vnet/ip-neighbor/ip6_neighbor.h>
Steven9cd2d7a2017-12-20 12:43:01 -080030
31#define foreach_bond_tx_error \
32 _(NONE, "no error") \
33 _(IF_DOWN, "interface down") \
Steven Luong4c4223e2020-07-15 08:44:54 -070034 _(NO_MEMBER, "no member")
Steven9cd2d7a2017-12-20 12:43:01 -080035
36typedef enum
37{
38#define _(f,s) BOND_TX_ERROR_##f,
39 foreach_bond_tx_error
40#undef _
41 BOND_TX_N_ERROR,
42} bond_tx_error_t;
43
44static char *bond_tx_error_strings[] = {
45#define _(n,s) s,
46 foreach_bond_tx_error
47#undef _
48};
49
50static u8 *
51format_bond_tx_trace (u8 * s, va_list * args)
52{
53 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
54 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
55 bond_packet_trace_t *t = va_arg (*args, bond_packet_trace_t *);
56 vnet_hw_interface_t *hw, *hw1;
57 vnet_main_t *vnm = vnet_get_main ();
58
59 hw = vnet_get_sup_hw_interface (vnm, t->sw_if_index);
60 hw1 = vnet_get_sup_hw_interface (vnm, t->bond_sw_if_index);
61 s = format (s, "src %U, dst %U, %s -> %s",
62 format_ethernet_address, t->ethernet.src_address,
63 format_ethernet_address, t->ethernet.dst_address,
64 hw->name, hw1->name);
65
66 return s;
67}
68
Damjan Marioncefe1342018-09-21 18:11:33 +020069#ifndef CLIB_MARCH_VARIANT
Steven9cd2d7a2017-12-20 12:43:01 -080070u8 *
71format_bond_interface_name (u8 * s, va_list * args)
72{
73 u32 dev_instance = va_arg (*args, u32);
74 bond_main_t *bm = &bond_main;
75 bond_if_t *bif = pool_elt_at_index (bm->interfaces, dev_instance);
76
Alexander Chernavinad9d5282018-12-13 09:08:09 -050077 s = format (s, "BondEthernet%lu", bif->id);
Steven9cd2d7a2017-12-20 12:43:01 -080078
79 return s;
80}
Damjan Marioncefe1342018-09-21 18:11:33 +020081#endif
Steven9cd2d7a2017-12-20 12:43:01 -080082
83static __clib_unused clib_error_t *
Steven4f8863b2018-04-12 19:36:19 -070084bond_set_l2_mode_function (vnet_main_t * vnm,
85 struct vnet_hw_interface_t *bif_hw,
86 i32 l2_if_adjust)
87{
88 bond_if_t *bif;
89 u32 *sw_if_index;
Steven Luong4c4223e2020-07-15 08:44:54 -070090 struct vnet_hw_interface_t *mif_hw;
Steven4f8863b2018-04-12 19:36:19 -070091
Steven Luong4c4223e2020-07-15 08:44:54 -070092 bif = bond_get_bond_if_by_sw_if_index (bif_hw->sw_if_index);
Steven4f8863b2018-04-12 19:36:19 -070093 if (!bif)
94 return 0;
95
96 if ((bif_hw->l2_if_count == 1) && (l2_if_adjust == 1))
97 {
98 /* Just added first L2 interface on this port */
Steven Luong4c4223e2020-07-15 08:44:54 -070099 vec_foreach (sw_if_index, bif->members)
Steven4f8863b2018-04-12 19:36:19 -0700100 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700101 mif_hw = vnet_get_sup_hw_interface (vnm, *sw_if_index);
102 ethernet_set_flags (vnm, mif_hw->hw_if_index,
Steven4f8863b2018-04-12 19:36:19 -0700103 ETHERNET_INTERFACE_FLAG_ACCEPT_ALL);
Steven4f8863b2018-04-12 19:36:19 -0700104 }
105 }
Steven Luonga1f9ee82019-04-09 12:18:46 -0700106 else if ((bif_hw->l2_if_count == 0) && (l2_if_adjust == -1))
107 {
108 /* Just removed last L2 subinterface on this port */
Steven Luong4c4223e2020-07-15 08:44:54 -0700109 vec_foreach (sw_if_index, bif->members)
Steven Luonga1f9ee82019-04-09 12:18:46 -0700110 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700111 mif_hw = vnet_get_sup_hw_interface (vnm, *sw_if_index);
112 ethernet_set_flags (vnm, mif_hw->hw_if_index,
John Lof415a3b2020-05-14 15:02:16 -0400113 /*ETHERNET_INTERFACE_FLAG_DEFAULT_L3 */ 0);
Steven Luonga1f9ee82019-04-09 12:18:46 -0700114 }
115 }
Steven4f8863b2018-04-12 19:36:19 -0700116
117 return 0;
118}
119
120static __clib_unused clib_error_t *
Steven9cd2d7a2017-12-20 12:43:01 -0800121bond_subif_add_del_function (vnet_main_t * vnm, u32 hw_if_index,
122 struct vnet_sw_interface_t *st, int is_add)
123{
124 /* Nothing for now */
125 return 0;
126}
127
128static clib_error_t *
129bond_interface_admin_up_down (vnet_main_t * vnm, u32 hw_if_index, u32 flags)
130{
131 vnet_hw_interface_t *hif = vnet_get_hw_interface (vnm, hw_if_index);
132 uword is_up = (flags & VNET_SW_INTERFACE_FLAG_ADMIN_UP) != 0;
133 bond_main_t *bm = &bond_main;
134 bond_if_t *bif = pool_elt_at_index (bm->interfaces, hif->dev_instance);
135
136 bif->admin_up = is_up;
Steven Luongdc2abbe2020-07-28 12:28:03 -0700137 if (is_up)
Steven9cd2d7a2017-12-20 12:43:01 -0800138 vnet_hw_interface_set_flags (vnm, bif->hw_if_index,
139 VNET_HW_INTERFACE_FLAG_LINK_UP);
140 return 0;
141}
142
Matthew Smithe83aa452019-11-14 10:36:02 -0600143static clib_error_t *
144bond_add_del_mac_address (vnet_hw_interface_t * hi, const u8 * address,
145 u8 is_add)
146{
147 vnet_main_t *vnm = vnet_get_main ();
148 bond_if_t *bif;
149 clib_error_t *error = 0;
150 vnet_hw_interface_t *s_hi;
151 int i;
152
153
Steven Luong4c4223e2020-07-15 08:44:54 -0700154 bif = bond_get_bond_if_by_sw_if_index (hi->sw_if_index);
Matthew Smithe83aa452019-11-14 10:36:02 -0600155 if (!bif)
156 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700157 return clib_error_return (0,
158 "No bond interface found for sw_if_index %u",
Matthew Smithe83aa452019-11-14 10:36:02 -0600159 hi->sw_if_index);
160 }
161
Steven Luong4c4223e2020-07-15 08:44:54 -0700162 /* Add/del address on each member hw intf, they control the hardware */
163 vec_foreach_index (i, bif->members)
Matthew Smithe83aa452019-11-14 10:36:02 -0600164 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700165 s_hi = vnet_get_sup_hw_interface (vnm, vec_elt (bif->members, i));
Matthew Smithe83aa452019-11-14 10:36:02 -0600166 error = vnet_hw_interface_add_del_mac_address (vnm, s_hi->hw_if_index,
167 address, is_add);
168
169 if (error)
170 {
171 int j;
172
173 /* undo any that were completed before the failure */
174 for (j = i - 1; j > -1; j--)
175 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700176 s_hi = vnet_get_sup_hw_interface (vnm, vec_elt (bif->members, j));
Matthew Smithe83aa452019-11-14 10:36:02 -0600177 vnet_hw_interface_add_del_mac_address (vnm, s_hi->hw_if_index,
178 address, !(is_add));
179 }
180
181 return error;
182 }
183 }
184
185 return 0;
186}
187
Damjan Marion69fdfee2018-10-06 14:33:18 +0200188static_always_inline void
189bond_tx_add_to_queue (bond_per_thread_data_t * ptd, u32 port, u32 bi)
190{
191 u32 idx = ptd->per_port_queue[port].n_buffers++;
192 ptd->per_port_queue[port].buffers[idx] = bi;
193}
194
Steven0d883012018-05-11 11:06:23 -0700195static_always_inline u32
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400196bond_lb_broadcast (vlib_main_t * vm,
Steven Luong4c4223e2020-07-15 08:44:54 -0700197 bond_if_t * bif, vlib_buffer_t * b0, uword n_members)
Steven9cd2d7a2017-12-20 12:43:01 -0800198{
Stevenc4e99c52018-09-27 20:06:26 -0700199 bond_main_t *bm = &bond_main;
Steven9cd2d7a2017-12-20 12:43:01 -0800200 vlib_buffer_t *c0;
Steven22b5be02018-04-11 15:32:15 -0700201 int port;
Steven9cd2d7a2017-12-20 12:43:01 -0800202 u32 sw_if_index;
Damjan Marion067cd622018-07-11 12:47:43 +0200203 u16 thread_index = vm->thread_index;
Stevenc4e99c52018-09-27 20:06:26 -0700204 bond_per_thread_data_t *ptd = vec_elt_at_index (bm->per_thread_data,
205 thread_index);
Steven9cd2d7a2017-12-20 12:43:01 -0800206
Steven Luong4c4223e2020-07-15 08:44:54 -0700207 for (port = 1; port < n_members; port++)
Steven9cd2d7a2017-12-20 12:43:01 -0800208 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700209 sw_if_index = *vec_elt_at_index (bif->active_members, port);
Steven9cd2d7a2017-12-20 12:43:01 -0800210 c0 = vlib_buffer_copy (vm, b0);
211 if (PREDICT_TRUE (c0 != 0))
212 {
213 vnet_buffer (c0)->sw_if_index[VLIB_TX] = sw_if_index;
Damjan Marion69fdfee2018-10-06 14:33:18 +0200214 bond_tx_add_to_queue (ptd, port, vlib_get_buffer_index (vm, c0));
Steven9cd2d7a2017-12-20 12:43:01 -0800215 }
216 }
217
218 return 0;
219}
220
Steven0d883012018-05-11 11:06:23 -0700221static_always_inline u32
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400222bond_lb_l2 (vlib_buffer_t * b0)
Steven9cd2d7a2017-12-20 12:43:01 -0800223{
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400224 ethernet_header_t *eth = vlib_buffer_get_current (b0);
Steven0d883012018-05-11 11:06:23 -0700225 u64 *dst = (u64 *) & eth->dst_address[0];
226 u64 a = clib_mem_unaligned (dst, u64);
227 u32 *src = (u32 *) & eth->src_address[2];
228 u32 b = clib_mem_unaligned (src, u32);
Steven9cd2d7a2017-12-20 12:43:01 -0800229
Damjan Marion69fdfee2018-10-06 14:33:18 +0200230 return lb_hash_hash_2_tuples (a, b);
Steven9cd2d7a2017-12-20 12:43:01 -0800231}
232
Steven0d883012018-05-11 11:06:23 -0700233static_always_inline u16 *
Steven9cd2d7a2017-12-20 12:43:01 -0800234bond_locate_ethertype (ethernet_header_t * eth)
235{
236 u16 *ethertype_p;
237 ethernet_vlan_header_t *vlan;
238
239 if (!ethernet_frame_is_tagged (clib_net_to_host_u16 (eth->type)))
240 {
241 ethertype_p = &eth->type;
242 }
243 else
244 {
245 vlan = (void *) (eth + 1);
246 ethertype_p = &vlan->type;
247 if (*ethertype_p == ntohs (ETHERNET_TYPE_VLAN))
248 {
249 vlan++;
250 ethertype_p = &vlan->type;
251 }
252 }
253 return ethertype_p;
254}
255
Steven0d883012018-05-11 11:06:23 -0700256static_always_inline u32
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400257bond_lb_l23 (vlib_buffer_t * b0)
Steven9cd2d7a2017-12-20 12:43:01 -0800258{
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400259 ethernet_header_t *eth = vlib_buffer_get_current (b0);
Steven9cd2d7a2017-12-20 12:43:01 -0800260 u8 ip_version;
261 ip4_header_t *ip4;
262 u16 ethertype, *ethertype_p;
Steven0d883012018-05-11 11:06:23 -0700263 u32 *mac1, *mac2, *mac3;
Steven9cd2d7a2017-12-20 12:43:01 -0800264
265 ethertype_p = bond_locate_ethertype (eth);
Steven0d883012018-05-11 11:06:23 -0700266 ethertype = clib_mem_unaligned (ethertype_p, u16);
Steven9cd2d7a2017-12-20 12:43:01 -0800267
268 if ((ethertype != htons (ETHERNET_TYPE_IP4)) &&
269 (ethertype != htons (ETHERNET_TYPE_IP6)))
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400270 return bond_lb_l2 (b0);
Steven9cd2d7a2017-12-20 12:43:01 -0800271
272 ip4 = (ip4_header_t *) (ethertype_p + 1);
273 ip_version = (ip4->ip_version_and_header_length >> 4);
274
275 if (ip_version == 0x4)
276 {
Steven0d883012018-05-11 11:06:23 -0700277 u32 a, c;
Steven9cd2d7a2017-12-20 12:43:01 -0800278
Steven0d883012018-05-11 11:06:23 -0700279 mac1 = (u32 *) & eth->dst_address[0];
280 mac2 = (u32 *) & eth->dst_address[4];
281 mac3 = (u32 *) & eth->src_address[2];
Steven9cd2d7a2017-12-20 12:43:01 -0800282
Steven0d883012018-05-11 11:06:23 -0700283 a = clib_mem_unaligned (mac1, u32) ^ clib_mem_unaligned (mac2, u32) ^
284 clib_mem_unaligned (mac3, u32);
285 c =
286 lb_hash_hash_2_tuples (clib_mem_unaligned (&ip4->address_pair, u64),
287 a);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200288 return c;
Steven9cd2d7a2017-12-20 12:43:01 -0800289 }
290 else if (ip_version == 0x6)
291 {
Steven0d883012018-05-11 11:06:23 -0700292 u64 a;
293 u32 c;
Steven9cd2d7a2017-12-20 12:43:01 -0800294 ip6_header_t *ip6 = (ip6_header_t *) (eth + 1);
295
Steven0d883012018-05-11 11:06:23 -0700296 mac1 = (u32 *) & eth->dst_address[0];
297 mac2 = (u32 *) & eth->dst_address[4];
298 mac3 = (u32 *) & eth->src_address[2];
Steven9cd2d7a2017-12-20 12:43:01 -0800299
Steven0d883012018-05-11 11:06:23 -0700300 a = clib_mem_unaligned (mac1, u32) ^ clib_mem_unaligned (mac2, u32) ^
301 clib_mem_unaligned (mac3, u32);
302 c =
303 lb_hash_hash (clib_mem_unaligned
304 (&ip6->src_address.as_uword[0], uword),
305 clib_mem_unaligned (&ip6->src_address.as_uword[1],
306 uword),
307 clib_mem_unaligned (&ip6->dst_address.as_uword[0],
308 uword),
309 clib_mem_unaligned (&ip6->dst_address.as_uword[1],
310 uword), a);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200311 return c;
Steven9cd2d7a2017-12-20 12:43:01 -0800312 }
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400313 return bond_lb_l2 (b0);
Steven9cd2d7a2017-12-20 12:43:01 -0800314}
315
Steven0d883012018-05-11 11:06:23 -0700316static_always_inline u32
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400317bond_lb_l34 (vlib_buffer_t * b0)
Steven9cd2d7a2017-12-20 12:43:01 -0800318{
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400319 ethernet_header_t *eth = vlib_buffer_get_current (b0);
Steven9cd2d7a2017-12-20 12:43:01 -0800320 u8 ip_version;
Steven0d883012018-05-11 11:06:23 -0700321 uword is_tcp_udp;
Steven9cd2d7a2017-12-20 12:43:01 -0800322 ip4_header_t *ip4;
323 u16 ethertype, *ethertype_p;
324
325 ethertype_p = bond_locate_ethertype (eth);
Steven0d883012018-05-11 11:06:23 -0700326 ethertype = clib_mem_unaligned (ethertype_p, u16);
Steven9cd2d7a2017-12-20 12:43:01 -0800327
328 if ((ethertype != htons (ETHERNET_TYPE_IP4)) &&
329 (ethertype != htons (ETHERNET_TYPE_IP6)))
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400330 return (bond_lb_l2 (b0));
Steven9cd2d7a2017-12-20 12:43:01 -0800331
332 ip4 = (ip4_header_t *) (ethertype_p + 1);
333 ip_version = (ip4->ip_version_and_header_length >> 4);
334
335 if (ip_version == 0x4)
336 {
Damjan Marion69fdfee2018-10-06 14:33:18 +0200337 u32 a, t1, t2;
Steven9cd2d7a2017-12-20 12:43:01 -0800338 tcp_header_t *tcp = (void *) (ip4 + 1);
Steven0d883012018-05-11 11:06:23 -0700339
Steven9cd2d7a2017-12-20 12:43:01 -0800340 is_tcp_udp = (ip4->protocol == IP_PROTOCOL_TCP) ||
341 (ip4->protocol == IP_PROTOCOL_UDP);
Steven0d883012018-05-11 11:06:23 -0700342 t1 = is_tcp_udp ? clib_mem_unaligned (&tcp->src, u16) : 0;
343 t2 = is_tcp_udp ? clib_mem_unaligned (&tcp->dst, u16) : 0;
344 a = t1 ^ t2;
Damjan Marion69fdfee2018-10-06 14:33:18 +0200345 return
Steven0d883012018-05-11 11:06:23 -0700346 lb_hash_hash_2_tuples (clib_mem_unaligned (&ip4->address_pair, u64),
347 a);
Steven9cd2d7a2017-12-20 12:43:01 -0800348 }
349 else if (ip_version == 0x6)
350 {
Steven0d883012018-05-11 11:06:23 -0700351 u64 a;
352 u32 c, t1, t2;
Steven9cd2d7a2017-12-20 12:43:01 -0800353 ip6_header_t *ip6 = (ip6_header_t *) (eth + 1);
354 tcp_header_t *tcp = (void *) (ip6 + 1);
355
Steven0d883012018-05-11 11:06:23 -0700356 is_tcp_udp = 0;
Steven9cd2d7a2017-12-20 12:43:01 -0800357 if (PREDICT_TRUE ((ip6->protocol == IP_PROTOCOL_TCP) ||
358 (ip6->protocol == IP_PROTOCOL_UDP)))
359 {
360 is_tcp_udp = 1;
361 tcp = (void *) (ip6 + 1);
362 }
363 else if (ip6->protocol == IP_PROTOCOL_IP6_HOP_BY_HOP_OPTIONS)
364 {
365 ip6_hop_by_hop_header_t *hbh =
366 (ip6_hop_by_hop_header_t *) (ip6 + 1);
367 if ((hbh->protocol == IP_PROTOCOL_TCP)
368 || (hbh->protocol == IP_PROTOCOL_UDP))
369 {
370 is_tcp_udp = 1;
371 tcp = (tcp_header_t *) ((u8 *) hbh + ((hbh->length + 1) << 3));
372 }
373 }
Steven0d883012018-05-11 11:06:23 -0700374 t1 = is_tcp_udp ? clib_mem_unaligned (&tcp->src, u16) : 0;
375 t2 = is_tcp_udp ? clib_mem_unaligned (&tcp->dst, u16) : 0;
376 a = t1 ^ t2;
377 c =
378 lb_hash_hash (clib_mem_unaligned
379 (&ip6->src_address.as_uword[0], uword),
380 clib_mem_unaligned (&ip6->src_address.as_uword[1],
381 uword),
382 clib_mem_unaligned (&ip6->dst_address.as_uword[0],
383 uword),
384 clib_mem_unaligned (&ip6->dst_address.as_uword[1],
385 uword), a);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200386 return c;
Steven9cd2d7a2017-12-20 12:43:01 -0800387 }
388
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400389 return bond_lb_l2 (b0);
Steven9cd2d7a2017-12-20 12:43:01 -0800390}
391
Steven0d883012018-05-11 11:06:23 -0700392static_always_inline u32
Steven Luong4c4223e2020-07-15 08:44:54 -0700393bond_lb_round_robin (bond_if_t * bif, vlib_buffer_t * b0, uword n_members)
Steven9cd2d7a2017-12-20 12:43:01 -0800394{
395 bif->lb_rr_last_index++;
Steven Luong4c4223e2020-07-15 08:44:54 -0700396 if (bif->lb_rr_last_index >= n_members)
Damjan Marion69fdfee2018-10-06 14:33:18 +0200397 bif->lb_rr_last_index = 0;
Steven9cd2d7a2017-12-20 12:43:01 -0800398
399 return bif->lb_rr_last_index;
400}
401
Damjan Marion16de39e2018-09-26 10:15:41 +0200402static_always_inline void
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400403bond_tx_inline (vlib_main_t * vm, bond_if_t * bif, vlib_buffer_t ** b,
Steven Luong4c4223e2020-07-15 08:44:54 -0700404 u32 * h, u32 n_left, uword n_members, u32 lb_alg)
Steven9cd2d7a2017-12-20 12:43:01 -0800405{
Damjan Marioncefe1342018-09-21 18:11:33 +0200406 while (n_left >= 4)
Steven9cd2d7a2017-12-20 12:43:01 -0800407 {
Damjan Marioncefe1342018-09-21 18:11:33 +0200408 // Prefetch next iteration
409 if (n_left >= 8)
Steven9cd2d7a2017-12-20 12:43:01 -0800410 {
Damjan Marioncefe1342018-09-21 18:11:33 +0200411 vlib_buffer_t **pb = b + 4;
Steven0d883012018-05-11 11:06:23 -0700412
Damjan Marioncefe1342018-09-21 18:11:33 +0200413 vlib_prefetch_buffer_header (pb[0], LOAD);
414 vlib_prefetch_buffer_header (pb[1], LOAD);
415 vlib_prefetch_buffer_header (pb[2], LOAD);
416 vlib_prefetch_buffer_header (pb[3], LOAD);
417
418 CLIB_PREFETCH (pb[0]->data, CLIB_CACHE_LINE_BYTES, LOAD);
419 CLIB_PREFETCH (pb[1]->data, CLIB_CACHE_LINE_BYTES, LOAD);
420 CLIB_PREFETCH (pb[2]->data, CLIB_CACHE_LINE_BYTES, LOAD);
421 CLIB_PREFETCH (pb[3]->data, CLIB_CACHE_LINE_BYTES, LOAD);
422 }
423
424 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);
425 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[1]);
426 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[2]);
427 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[3]);
428
Damjan Marion69fdfee2018-10-06 14:33:18 +0200429 if (lb_alg == BOND_LB_L2)
Damjan Marioncefe1342018-09-21 18:11:33 +0200430 {
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400431 h[0] = bond_lb_l2 (b[0]);
432 h[1] = bond_lb_l2 (b[1]);
433 h[2] = bond_lb_l2 (b[2]);
434 h[3] = bond_lb_l2 (b[3]);
Damjan Marioncefe1342018-09-21 18:11:33 +0200435 }
Damjan Marion69fdfee2018-10-06 14:33:18 +0200436 else if (lb_alg == BOND_LB_L34)
Damjan Marioncefe1342018-09-21 18:11:33 +0200437 {
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400438 h[0] = bond_lb_l34 (b[0]);
439 h[1] = bond_lb_l34 (b[1]);
440 h[2] = bond_lb_l34 (b[2]);
441 h[3] = bond_lb_l34 (b[3]);
Steven9cd2d7a2017-12-20 12:43:01 -0800442 }
Damjan Marion69fdfee2018-10-06 14:33:18 +0200443 else if (lb_alg == BOND_LB_L23)
444 {
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400445 h[0] = bond_lb_l23 (b[0]);
446 h[1] = bond_lb_l23 (b[1]);
447 h[2] = bond_lb_l23 (b[2]);
448 h[3] = bond_lb_l23 (b[3]);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200449 }
450 else if (lb_alg == BOND_LB_RR)
451 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700452 h[0] = bond_lb_round_robin (bif, b[0], n_members);
453 h[1] = bond_lb_round_robin (bif, b[1], n_members);
454 h[2] = bond_lb_round_robin (bif, b[2], n_members);
455 h[3] = bond_lb_round_robin (bif, b[3], n_members);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200456 }
457 else if (lb_alg == BOND_LB_BC)
458 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700459 h[0] = bond_lb_broadcast (vm, bif, b[0], n_members);
460 h[1] = bond_lb_broadcast (vm, bif, b[1], n_members);
461 h[2] = bond_lb_broadcast (vm, bif, b[2], n_members);
462 h[3] = bond_lb_broadcast (vm, bif, b[3], n_members);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200463 }
464 else
465 {
466 ASSERT (0);
467 }
Stevenc4e99c52018-09-27 20:06:26 -0700468
Damjan Marioncefe1342018-09-21 18:11:33 +0200469 n_left -= 4;
470 b += 4;
Damjan Marion69fdfee2018-10-06 14:33:18 +0200471 h += 4;
Damjan Marioncefe1342018-09-21 18:11:33 +0200472 }
Steven9cd2d7a2017-12-20 12:43:01 -0800473
Damjan Marioncefe1342018-09-21 18:11:33 +0200474 while (n_left > 0)
475 {
Damjan Marioncefe1342018-09-21 18:11:33 +0200476 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);
477
Damjan Marion69fdfee2018-10-06 14:33:18 +0200478 if (bif->lb == BOND_LB_L2)
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400479 h[0] = bond_lb_l2 (b[0]);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200480 else if (bif->lb == BOND_LB_L34)
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400481 h[0] = bond_lb_l34 (b[0]);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200482 else if (bif->lb == BOND_LB_L23)
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400483 h[0] = bond_lb_l23 (b[0]);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200484 else if (bif->lb == BOND_LB_RR)
Steven Luong4c4223e2020-07-15 08:44:54 -0700485 h[0] = bond_lb_round_robin (bif, b[0], n_members);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200486 else if (bif->lb == BOND_LB_BC)
Steven Luong4c4223e2020-07-15 08:44:54 -0700487 h[0] = bond_lb_broadcast (vm, bif, b[0], n_members);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200488 else
Damjan Marion16de39e2018-09-26 10:15:41 +0200489 {
Damjan Marion69fdfee2018-10-06 14:33:18 +0200490 ASSERT (0);
Damjan Marion16de39e2018-09-26 10:15:41 +0200491 }
492
Damjan Marioncefe1342018-09-21 18:11:33 +0200493 n_left -= 1;
494 b += 1;
Steven Luongde0302c2019-10-04 14:18:37 -0700495 h += 1;
Steven9cd2d7a2017-12-20 12:43:01 -0800496 }
Damjan Marion69fdfee2018-10-06 14:33:18 +0200497}
Steven9cd2d7a2017-12-20 12:43:01 -0800498
Damjan Marion69fdfee2018-10-06 14:33:18 +0200499static_always_inline void
Steven Luong4c4223e2020-07-15 08:44:54 -0700500bond_hash_to_port (u32 * h, u32 n_left, u32 n_members,
501 int use_modulo_shortcut)
Damjan Marion69fdfee2018-10-06 14:33:18 +0200502{
Steven Luong4c4223e2020-07-15 08:44:54 -0700503 u32 mask = n_members - 1;
Damjan Marion69fdfee2018-10-06 14:33:18 +0200504
505#ifdef CLIB_HAVE_VEC256
Paul Vinciguerra8feeaff2019-03-27 11:25:48 -0700506 /* only lower 16 bits of hash due to single precision fp arithmetic */
Damjan Marion69fdfee2018-10-06 14:33:18 +0200507 u32x8 mask8, sc8u, h8a, h8b;
508 f32x8 sc8f;
509
510 if (use_modulo_shortcut)
Steven9cd2d7a2017-12-20 12:43:01 -0800511 {
Damjan Marion69fdfee2018-10-06 14:33:18 +0200512 mask8 = u32x8_splat (mask);
513 }
514 else
515 {
516 mask8 = u32x8_splat (0xffff);
Steven Luong4c4223e2020-07-15 08:44:54 -0700517 sc8u = u32x8_splat (n_members);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200518 sc8f = f32x8_from_u32x8 (sc8u);
Steven9cd2d7a2017-12-20 12:43:01 -0800519 }
520
Damjan Marion69fdfee2018-10-06 14:33:18 +0200521 while (n_left > 16)
522 {
523 h8a = u32x8_load_unaligned (h) & mask8;
524 h8b = u32x8_load_unaligned (h + 8) & mask8;
525
526 if (use_modulo_shortcut == 0)
527 {
528 h8a -= sc8u * u32x8_from_f32x8 (f32x8_from_u32x8 (h8a) / sc8f);
529 h8b -= sc8u * u32x8_from_f32x8 (f32x8_from_u32x8 (h8b) / sc8f);
530 }
531
532 u32x8_store_unaligned (h8a, h);
533 u32x8_store_unaligned (h8b, h + 8);
534 n_left -= 16;
535 h += 16;
536 }
537#endif
538
539 while (n_left > 4)
540 {
541 if (use_modulo_shortcut)
542 {
543 h[0] &= mask;
544 h[1] &= mask;
545 h[2] &= mask;
546 h[3] &= mask;
547 }
548 else
549 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700550 h[0] %= n_members;
551 h[1] %= n_members;
552 h[2] %= n_members;
553 h[3] %= n_members;
Damjan Marion69fdfee2018-10-06 14:33:18 +0200554 }
555 n_left -= 4;
556 h += 4;
557 }
558 while (n_left)
559 {
560 if (use_modulo_shortcut)
561 h[0] &= mask;
562 else
Steven Luong4c4223e2020-07-15 08:44:54 -0700563 h[0] %= n_members;
Damjan Marion69fdfee2018-10-06 14:33:18 +0200564 n_left -= 1;
565 h += 1;
566 }
567}
568
569static_always_inline void
570bond_update_sw_if_index (bond_per_thread_data_t * ptd, bond_if_t * bif,
571 u32 * bi, vlib_buffer_t ** b, u32 * data, u32 n_left,
572 int single_sw_if_index)
573{
574 u32 sw_if_index = data[0];
575 u32 *h = data;
576
577 while (n_left >= 4)
578 {
579 // Prefetch next iteration
580 if (n_left >= 8)
581 {
582 vlib_buffer_t **pb = b + 4;
583 vlib_prefetch_buffer_header (pb[0], LOAD);
584 vlib_prefetch_buffer_header (pb[1], LOAD);
585 vlib_prefetch_buffer_header (pb[2], LOAD);
586 vlib_prefetch_buffer_header (pb[3], LOAD);
587 }
588
589 if (PREDICT_FALSE (single_sw_if_index))
590 {
591 vnet_buffer (b[0])->sw_if_index[VLIB_TX] = sw_if_index;
592 vnet_buffer (b[1])->sw_if_index[VLIB_TX] = sw_if_index;
593 vnet_buffer (b[2])->sw_if_index[VLIB_TX] = sw_if_index;
594 vnet_buffer (b[3])->sw_if_index[VLIB_TX] = sw_if_index;
595
596 bond_tx_add_to_queue (ptd, 0, bi[0]);
597 bond_tx_add_to_queue (ptd, 0, bi[1]);
598 bond_tx_add_to_queue (ptd, 0, bi[2]);
599 bond_tx_add_to_queue (ptd, 0, bi[3]);
600 }
601 else
602 {
603 u32 sw_if_index[4];
604
Steven Luong4c4223e2020-07-15 08:44:54 -0700605 sw_if_index[0] = *vec_elt_at_index (bif->active_members, h[0]);
606 sw_if_index[1] = *vec_elt_at_index (bif->active_members, h[1]);
607 sw_if_index[2] = *vec_elt_at_index (bif->active_members, h[2]);
608 sw_if_index[3] = *vec_elt_at_index (bif->active_members, h[3]);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200609
610 vnet_buffer (b[0])->sw_if_index[VLIB_TX] = sw_if_index[0];
611 vnet_buffer (b[1])->sw_if_index[VLIB_TX] = sw_if_index[1];
612 vnet_buffer (b[2])->sw_if_index[VLIB_TX] = sw_if_index[2];
613 vnet_buffer (b[3])->sw_if_index[VLIB_TX] = sw_if_index[3];
614
615 bond_tx_add_to_queue (ptd, h[0], bi[0]);
616 bond_tx_add_to_queue (ptd, h[1], bi[1]);
617 bond_tx_add_to_queue (ptd, h[2], bi[2]);
618 bond_tx_add_to_queue (ptd, h[3], bi[3]);
619 }
620
621 bi += 4;
622 h += 4;
623 b += 4;
624 n_left -= 4;
625 }
626 while (n_left)
627 {
628 if (PREDICT_FALSE (single_sw_if_index))
629 {
630 vnet_buffer (b[0])->sw_if_index[VLIB_TX] = sw_if_index;
631 bond_tx_add_to_queue (ptd, 0, bi[0]);
632 }
633 else
634 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700635 u32 sw_if_index0 = *vec_elt_at_index (bif->active_members, h[0]);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200636
637 vnet_buffer (b[0])->sw_if_index[VLIB_TX] = sw_if_index0;
638 bond_tx_add_to_queue (ptd, h[0], bi[0]);
639 }
640
641 bi += 1;
642 h += 1;
643 b += 1;
644 n_left -= 1;
645 }
646}
647
648static_always_inline void
649bond_tx_trace (vlib_main_t * vm, vlib_node_runtime_t * node, bond_if_t * bif,
650 vlib_buffer_t ** b, u32 n_left, u32 * h)
651{
652 uword n_trace = vlib_get_trace_count (vm, node);
653
654 while (n_trace > 0 && n_left > 0)
655 {
Benoît Ganne9a3973e2020-10-02 19:36:57 +0200656 if (PREDICT_TRUE
657 (vlib_trace_buffer (vm, node, 0, b[0], 0 /* follow_chain */ )))
658 {
659 bond_packet_trace_t *t0;
660 ethernet_header_t *eth;
Damjan Marion69fdfee2018-10-06 14:33:18 +0200661
Benoît Ganne9a3973e2020-10-02 19:36:57 +0200662 vlib_set_trace_count (vm, node, --n_trace);
663 t0 = vlib_add_trace (vm, node, b[0], sizeof (*t0));
664 eth = vlib_buffer_get_current (b[0]);
665 t0->ethernet = *eth;
666 t0->sw_if_index = vnet_buffer (b[0])->sw_if_index[VLIB_TX];
667 if (!h)
668 {
669 t0->bond_sw_if_index =
670 *vec_elt_at_index (bif->active_members, 0);
671 }
672 else
673 {
674 t0->bond_sw_if_index =
675 *vec_elt_at_index (bif->active_members, h[0]);
676 h++;
677 }
Damjan Marion69fdfee2018-10-06 14:33:18 +0200678 }
679 b++;
680 n_left--;
681 }
Damjan Marion16de39e2018-09-26 10:15:41 +0200682}
683
684VNET_DEVICE_CLASS_TX_FN (bond_dev_class) (vlib_main_t * vm,
685 vlib_node_runtime_t * node,
686 vlib_frame_t * frame)
687{
688 vnet_interface_output_runtime_t *rund = (void *) node->runtime_data;
689 bond_main_t *bm = &bond_main;
690 u16 thread_index = vm->thread_index;
691 bond_if_t *bif = pool_elt_at_index (bm->interfaces, rund->dev_instance);
Steven Luong4c4223e2020-07-15 08:44:54 -0700692 uword n_members;
Damjan Marion69fdfee2018-10-06 14:33:18 +0200693 vlib_buffer_t *bufs[VLIB_FRAME_SIZE];
694 u32 *from = vlib_frame_vector_args (frame);
695 u32 n_left = frame->n_vectors;
696 u32 hashes[VLIB_FRAME_SIZE], *h;
697 vnet_main_t *vnm = vnet_get_main ();
698 bond_per_thread_data_t *ptd = vec_elt_at_index (bm->per_thread_data,
699 thread_index);
700 u32 p, sw_if_index;
Damjan Marion16de39e2018-09-26 10:15:41 +0200701
702 if (PREDICT_FALSE (bif->admin_up == 0))
703 {
Damjan Mariona3d59862018-11-10 10:23:00 +0100704 vlib_buffer_free (vm, vlib_frame_vector_args (frame), frame->n_vectors);
Damjan Marion16de39e2018-09-26 10:15:41 +0200705 vlib_increment_simple_counter (vnet_main.interface_main.sw_if_counters +
706 VNET_INTERFACE_COUNTER_DROP,
707 thread_index, bif->sw_if_index,
708 frame->n_vectors);
709 vlib_error_count (vm, node->node_index, BOND_TX_ERROR_IF_DOWN,
710 frame->n_vectors);
711 return frame->n_vectors;
712 }
713
Steven Luong4c4223e2020-07-15 08:44:54 -0700714 n_members = vec_len (bif->active_members);
715 if (PREDICT_FALSE (n_members == 0))
Damjan Marion16de39e2018-09-26 10:15:41 +0200716 {
Damjan Mariona3d59862018-11-10 10:23:00 +0100717 vlib_buffer_free (vm, vlib_frame_vector_args (frame), frame->n_vectors);
Damjan Marion16de39e2018-09-26 10:15:41 +0200718 vlib_increment_simple_counter (vnet_main.interface_main.sw_if_counters +
719 VNET_INTERFACE_COUNTER_DROP,
720 thread_index, bif->sw_if_index,
721 frame->n_vectors);
Steven Luong4c4223e2020-07-15 08:44:54 -0700722 vlib_error_count (vm, node->node_index, BOND_TX_ERROR_NO_MEMBER,
Damjan Marion16de39e2018-09-26 10:15:41 +0200723 frame->n_vectors);
724 return frame->n_vectors;
725 }
726
Damjan Marion69fdfee2018-10-06 14:33:18 +0200727 vlib_get_buffers (vm, from, bufs, n_left);
728
Paul Vinciguerra8feeaff2019-03-27 11:25:48 -0700729 /* active-backup mode, ship everything to first sw if index */
Steven Luong4c4223e2020-07-15 08:44:54 -0700730 if ((bif->lb == BOND_LB_AB) || PREDICT_FALSE (n_members == 1))
Damjan Marion69fdfee2018-10-06 14:33:18 +0200731 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700732 sw_if_index = *vec_elt_at_index (bif->active_members, 0);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200733
734 bond_tx_trace (vm, node, bif, bufs, frame->n_vectors, 0);
735 bond_update_sw_if_index (ptd, bif, from, bufs, &sw_if_index, n_left,
736 /* single_sw_if_index */ 1);
737 goto done;
738 }
739
740 if (bif->lb == BOND_LB_BC)
741 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700742 sw_if_index = *vec_elt_at_index (bif->active_members, 0);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200743
Steven Luong4c4223e2020-07-15 08:44:54 -0700744 bond_tx_inline (vm, bif, bufs, hashes, n_left, n_members, BOND_LB_BC);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200745 bond_tx_trace (vm, node, bif, bufs, frame->n_vectors, 0);
746 bond_update_sw_if_index (ptd, bif, from, bufs, &sw_if_index, n_left,
747 /* single_sw_if_index */ 1);
748 goto done;
749 }
750
Steven Luong4c4223e2020-07-15 08:44:54 -0700751 /* if have at least one member on local numa node, only members on local numa
Zhiyong Yang751e3f32019-06-26 05:49:14 -0400752 node will transmit pkts when bif->local_numa_only is enabled */
Steven Luong4c4223e2020-07-15 08:44:54 -0700753 if (bif->n_numa_members >= 1)
754 n_members = bif->n_numa_members;
Zhiyong Yang751e3f32019-06-26 05:49:14 -0400755
Damjan Marion16de39e2018-09-26 10:15:41 +0200756 if (bif->lb == BOND_LB_L2)
Steven Luong4c4223e2020-07-15 08:44:54 -0700757 bond_tx_inline (vm, bif, bufs, hashes, n_left, n_members, BOND_LB_L2);
Damjan Marion16de39e2018-09-26 10:15:41 +0200758 else if (bif->lb == BOND_LB_L34)
Steven Luong4c4223e2020-07-15 08:44:54 -0700759 bond_tx_inline (vm, bif, bufs, hashes, n_left, n_members, BOND_LB_L34);
Damjan Marion16de39e2018-09-26 10:15:41 +0200760 else if (bif->lb == BOND_LB_L23)
Steven Luong4c4223e2020-07-15 08:44:54 -0700761 bond_tx_inline (vm, bif, bufs, hashes, n_left, n_members, BOND_LB_L23);
Damjan Marion16de39e2018-09-26 10:15:41 +0200762 else if (bif->lb == BOND_LB_RR)
Steven Luong4c4223e2020-07-15 08:44:54 -0700763 bond_tx_inline (vm, bif, bufs, hashes, n_left, n_members, BOND_LB_RR);
Damjan Marion16de39e2018-09-26 10:15:41 +0200764 else
765 ASSERT (0);
Steven9cd2d7a2017-12-20 12:43:01 -0800766
Damjan Marion69fdfee2018-10-06 14:33:18 +0200767 /* calculate port out of hash */
768 h = hashes;
Steven Luong4c4223e2020-07-15 08:44:54 -0700769 if (BOND_MODULO_SHORTCUT (n_members))
770 bond_hash_to_port (h, frame->n_vectors, n_members, 1);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200771 else
Steven Luong4c4223e2020-07-15 08:44:54 -0700772 bond_hash_to_port (h, frame->n_vectors, n_members, 0);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200773
774 bond_tx_trace (vm, node, bif, bufs, frame->n_vectors, h);
775
776 bond_update_sw_if_index (ptd, bif, from, bufs, hashes, frame->n_vectors,
777 /* single_sw_if_index */ 0);
778
779done:
Steven Luong4c4223e2020-07-15 08:44:54 -0700780 for (p = 0; p < n_members; p++)
Damjan Marion69fdfee2018-10-06 14:33:18 +0200781 {
782 vlib_frame_t *f;
783 u32 *to_next;
784
Steven Luong4c4223e2020-07-15 08:44:54 -0700785 sw_if_index = *vec_elt_at_index (bif->active_members, p);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200786 if (PREDICT_TRUE (ptd->per_port_queue[p].n_buffers))
787 {
788 f = vnet_get_frame_to_sw_interface (vnm, sw_if_index);
789 f->n_vectors = ptd->per_port_queue[p].n_buffers;
790 to_next = vlib_frame_vector_args (f);
Dave Barach178cf492018-11-13 16:34:13 -0500791 clib_memcpy_fast (to_next, ptd->per_port_queue[p].buffers,
792 f->n_vectors * sizeof (u32));
Damjan Marion69fdfee2018-10-06 14:33:18 +0200793 vnet_put_frame_to_sw_interface (vnm, sw_if_index, f);
794 ptd->per_port_queue[p].n_buffers = 0;
795 }
796 }
Steven9cd2d7a2017-12-20 12:43:01 -0800797 return frame->n_vectors;
798}
799
Steven9f781d82018-06-05 11:09:32 -0700800static walk_rc_t
801bond_active_interface_switch_cb (vnet_main_t * vnm, u32 sw_if_index,
802 void *arg)
803{
804 bond_main_t *bm = &bond_main;
805
Neale Rannsdc617b82020-08-20 08:22:56 +0000806 ip4_neighbor_advertise (bm->vlib_main, bm->vnet_main, sw_if_index, NULL);
807 ip6_neighbor_advertise (bm->vlib_main, bm->vnet_main, sw_if_index, NULL);
Steven9f781d82018-06-05 11:09:32 -0700808
809 return (WALK_CONTINUE);
810}
811
812static uword
813bond_process (vlib_main_t * vm, vlib_node_runtime_t * rt, vlib_frame_t * f)
814{
815 vnet_main_t *vnm = vnet_get_main ();
816 uword event_type, *event_data = 0;
817
818 while (1)
819 {
820 u32 i;
821 u32 hw_if_index;
822
823 vlib_process_wait_for_event (vm);
824 event_type = vlib_process_get_events (vm, &event_data);
825 ASSERT (event_type == BOND_SEND_GARP_NA);
826 for (i = 0; i < vec_len (event_data); i++)
827 {
828 hw_if_index = event_data[i];
Steven Luongbac326c2019-08-05 09:47:58 -0700829 if (vnet_get_hw_interface_or_null (vnm, hw_if_index))
830 /* walk hw interface to process all subinterfaces */
831 vnet_hw_interface_walk_sw (vnm, hw_if_index,
832 bond_active_interface_switch_cb, 0);
Steven9f781d82018-06-05 11:09:32 -0700833 }
834 vec_reset_length (event_data);
835 }
836 return 0;
837}
838
839/* *INDENT-OFF* */
840VLIB_REGISTER_NODE (bond_process_node) = {
841 .function = bond_process,
Damjan Marion7ca5aaa2019-09-24 18:10:49 +0200842 .flags = VLIB_NODE_FLAG_TRACE_SUPPORTED,
Steven9f781d82018-06-05 11:09:32 -0700843 .type = VLIB_NODE_TYPE_PROCESS,
844 .name = "bond-process",
845};
846/* *INDENT-ON* */
847
Steven9cd2d7a2017-12-20 12:43:01 -0800848/* *INDENT-OFF* */
849VNET_DEVICE_CLASS (bond_dev_class) = {
850 .name = "bond",
Steven9cd2d7a2017-12-20 12:43:01 -0800851 .tx_function_n_errors = BOND_TX_N_ERROR,
852 .tx_function_error_strings = bond_tx_error_strings,
853 .format_device_name = format_bond_interface_name,
Steven4f8863b2018-04-12 19:36:19 -0700854 .set_l2_mode_function = bond_set_l2_mode_function,
Steven9cd2d7a2017-12-20 12:43:01 -0800855 .admin_up_down_function = bond_interface_admin_up_down,
856 .subif_add_del_function = bond_subif_add_del_function,
857 .format_tx_trace = format_bond_tx_trace,
Matthew Smithe83aa452019-11-14 10:36:02 -0600858 .mac_addr_add_del_function = bond_add_del_mac_address,
Steven9cd2d7a2017-12-20 12:43:01 -0800859};
860
Steven9cd2d7a2017-12-20 12:43:01 -0800861/* *INDENT-ON* */
862
Steven Luongbac326c2019-08-05 09:47:58 -0700863static clib_error_t *
Steven Luong4c4223e2020-07-15 08:44:54 -0700864bond_member_interface_add_del (vnet_main_t * vnm, u32 sw_if_index, u32 is_add)
Steven Luongbac326c2019-08-05 09:47:58 -0700865{
866 bond_main_t *bm = &bond_main;
Steven Luong4c4223e2020-07-15 08:44:54 -0700867 member_if_t *mif;
868 bond_detach_member_args_t args = { 0 };
Steven Luongbac326c2019-08-05 09:47:58 -0700869
870 if (is_add)
871 return 0;
Steven Luong4c4223e2020-07-15 08:44:54 -0700872 mif = bond_get_member_by_sw_if_index (sw_if_index);
873 if (!mif)
Steven Luongbac326c2019-08-05 09:47:58 -0700874 return 0;
Steven Luong4c4223e2020-07-15 08:44:54 -0700875 args.member = sw_if_index;
876 bond_detach_member (bm->vlib_main, &args);
Steven Luongbac326c2019-08-05 09:47:58 -0700877 return args.error;
878}
879
Steven Luong4c4223e2020-07-15 08:44:54 -0700880VNET_SW_INTERFACE_ADD_DEL_FUNCTION (bond_member_interface_add_del);
Steven Luongbac326c2019-08-05 09:47:58 -0700881
Steven9cd2d7a2017-12-20 12:43:01 -0800882/*
883 * fd.io coding-style-patch-verification: ON
884 *
885 * Local Variables:
886 * eval: (c-set-style "gnu")
887 * End:
888 */