blob: bc185867c10f94e160fa2ac876dc3480dc09c939 [file] [log] [blame]
Steven9cd2d7a2017-12-20 12:43:01 -08001/*
2 *------------------------------------------------------------------
3 * Copyright (c) 2017 Cisco and/or its affiliates.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at:
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *------------------------------------------------------------------
16 */
17
18#define _GNU_SOURCE
19#include <stdint.h>
20#include <vnet/ethernet/ethernet.h>
21#include <vnet/ip/ip4_packet.h>
22#include <vnet/ip/ip6_packet.h>
23#include <vnet/ip/ip6_hop_by_hop_packet.h>
24#include <vnet/bonding/node.h>
Steven0d883012018-05-11 11:06:23 -070025#include <vppinfra/lb_hash_hash.h>
Steven9f781d82018-06-05 11:09:32 -070026#include <vnet/ip/ip.h>
Neale Rannscbe25aa2019-09-30 10:53:31 +000027#include <vnet/ip-neighbor/ip_neighbor.h>
Steven9cd2d7a2017-12-20 12:43:01 -080028
29#define foreach_bond_tx_error \
30 _(NONE, "no error") \
31 _(IF_DOWN, "interface down") \
Steven Luong4c4223e2020-07-15 08:44:54 -070032 _(NO_MEMBER, "no member")
Steven9cd2d7a2017-12-20 12:43:01 -080033
34typedef enum
35{
36#define _(f,s) BOND_TX_ERROR_##f,
37 foreach_bond_tx_error
38#undef _
39 BOND_TX_N_ERROR,
40} bond_tx_error_t;
41
42static char *bond_tx_error_strings[] = {
43#define _(n,s) s,
44 foreach_bond_tx_error
45#undef _
46};
47
48static u8 *
49format_bond_tx_trace (u8 * s, va_list * args)
50{
51 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
52 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
53 bond_packet_trace_t *t = va_arg (*args, bond_packet_trace_t *);
54 vnet_hw_interface_t *hw, *hw1;
55 vnet_main_t *vnm = vnet_get_main ();
56
57 hw = vnet_get_sup_hw_interface (vnm, t->sw_if_index);
58 hw1 = vnet_get_sup_hw_interface (vnm, t->bond_sw_if_index);
59 s = format (s, "src %U, dst %U, %s -> %s",
60 format_ethernet_address, t->ethernet.src_address,
61 format_ethernet_address, t->ethernet.dst_address,
62 hw->name, hw1->name);
63
64 return s;
65}
66
Damjan Marioncefe1342018-09-21 18:11:33 +020067#ifndef CLIB_MARCH_VARIANT
Steven9cd2d7a2017-12-20 12:43:01 -080068u8 *
69format_bond_interface_name (u8 * s, va_list * args)
70{
71 u32 dev_instance = va_arg (*args, u32);
72 bond_main_t *bm = &bond_main;
73 bond_if_t *bif = pool_elt_at_index (bm->interfaces, dev_instance);
74
Alexander Chernavinad9d5282018-12-13 09:08:09 -050075 s = format (s, "BondEthernet%lu", bif->id);
Steven9cd2d7a2017-12-20 12:43:01 -080076
77 return s;
78}
Damjan Marioncefe1342018-09-21 18:11:33 +020079#endif
Steven9cd2d7a2017-12-20 12:43:01 -080080
81static __clib_unused clib_error_t *
Steven4f8863b2018-04-12 19:36:19 -070082bond_set_l2_mode_function (vnet_main_t * vnm,
83 struct vnet_hw_interface_t *bif_hw,
84 i32 l2_if_adjust)
85{
86 bond_if_t *bif;
87 u32 *sw_if_index;
Steven Luong4c4223e2020-07-15 08:44:54 -070088 struct vnet_hw_interface_t *mif_hw;
Steven4f8863b2018-04-12 19:36:19 -070089
Steven Luong4c4223e2020-07-15 08:44:54 -070090 bif = bond_get_bond_if_by_sw_if_index (bif_hw->sw_if_index);
Steven4f8863b2018-04-12 19:36:19 -070091 if (!bif)
92 return 0;
93
94 if ((bif_hw->l2_if_count == 1) && (l2_if_adjust == 1))
95 {
96 /* Just added first L2 interface on this port */
Steven Luong4c4223e2020-07-15 08:44:54 -070097 vec_foreach (sw_if_index, bif->members)
Steven4f8863b2018-04-12 19:36:19 -070098 {
Steven Luong4c4223e2020-07-15 08:44:54 -070099 mif_hw = vnet_get_sup_hw_interface (vnm, *sw_if_index);
100 ethernet_set_flags (vnm, mif_hw->hw_if_index,
Steven4f8863b2018-04-12 19:36:19 -0700101 ETHERNET_INTERFACE_FLAG_ACCEPT_ALL);
Steven4f8863b2018-04-12 19:36:19 -0700102 }
103 }
Steven Luonga1f9ee82019-04-09 12:18:46 -0700104 else if ((bif_hw->l2_if_count == 0) && (l2_if_adjust == -1))
105 {
106 /* Just removed last L2 subinterface on this port */
Steven Luong4c4223e2020-07-15 08:44:54 -0700107 vec_foreach (sw_if_index, bif->members)
Steven Luonga1f9ee82019-04-09 12:18:46 -0700108 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700109 mif_hw = vnet_get_sup_hw_interface (vnm, *sw_if_index);
110 ethernet_set_flags (vnm, mif_hw->hw_if_index,
John Lof415a3b2020-05-14 15:02:16 -0400111 /*ETHERNET_INTERFACE_FLAG_DEFAULT_L3 */ 0);
Steven Luonga1f9ee82019-04-09 12:18:46 -0700112 }
113 }
Steven4f8863b2018-04-12 19:36:19 -0700114
115 return 0;
116}
117
118static __clib_unused clib_error_t *
Steven9cd2d7a2017-12-20 12:43:01 -0800119bond_subif_add_del_function (vnet_main_t * vnm, u32 hw_if_index,
120 struct vnet_sw_interface_t *st, int is_add)
121{
122 /* Nothing for now */
123 return 0;
124}
125
126static clib_error_t *
127bond_interface_admin_up_down (vnet_main_t * vnm, u32 hw_if_index, u32 flags)
128{
129 vnet_hw_interface_t *hif = vnet_get_hw_interface (vnm, hw_if_index);
130 uword is_up = (flags & VNET_SW_INTERFACE_FLAG_ADMIN_UP) != 0;
131 bond_main_t *bm = &bond_main;
132 bond_if_t *bif = pool_elt_at_index (bm->interfaces, hif->dev_instance);
133
134 bif->admin_up = is_up;
Steven Luongdc2abbe2020-07-28 12:28:03 -0700135 if (is_up)
Steven9cd2d7a2017-12-20 12:43:01 -0800136 vnet_hw_interface_set_flags (vnm, bif->hw_if_index,
137 VNET_HW_INTERFACE_FLAG_LINK_UP);
138 return 0;
139}
140
Matthew Smithe83aa452019-11-14 10:36:02 -0600141static clib_error_t *
142bond_add_del_mac_address (vnet_hw_interface_t * hi, const u8 * address,
143 u8 is_add)
144{
145 vnet_main_t *vnm = vnet_get_main ();
146 bond_if_t *bif;
147 clib_error_t *error = 0;
148 vnet_hw_interface_t *s_hi;
149 int i;
150
151
Steven Luong4c4223e2020-07-15 08:44:54 -0700152 bif = bond_get_bond_if_by_sw_if_index (hi->sw_if_index);
Matthew Smithe83aa452019-11-14 10:36:02 -0600153 if (!bif)
154 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700155 return clib_error_return (0,
156 "No bond interface found for sw_if_index %u",
Matthew Smithe83aa452019-11-14 10:36:02 -0600157 hi->sw_if_index);
158 }
159
Steven Luong4c4223e2020-07-15 08:44:54 -0700160 /* Add/del address on each member hw intf, they control the hardware */
161 vec_foreach_index (i, bif->members)
Matthew Smithe83aa452019-11-14 10:36:02 -0600162 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700163 s_hi = vnet_get_sup_hw_interface (vnm, vec_elt (bif->members, i));
Matthew Smithe83aa452019-11-14 10:36:02 -0600164 error = vnet_hw_interface_add_del_mac_address (vnm, s_hi->hw_if_index,
165 address, is_add);
166
167 if (error)
168 {
169 int j;
170
171 /* undo any that were completed before the failure */
172 for (j = i - 1; j > -1; j--)
173 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700174 s_hi = vnet_get_sup_hw_interface (vnm, vec_elt (bif->members, j));
Matthew Smithe83aa452019-11-14 10:36:02 -0600175 vnet_hw_interface_add_del_mac_address (vnm, s_hi->hw_if_index,
176 address, !(is_add));
177 }
178
179 return error;
180 }
181 }
182
183 return 0;
184}
185
Damjan Marion69fdfee2018-10-06 14:33:18 +0200186static_always_inline void
187bond_tx_add_to_queue (bond_per_thread_data_t * ptd, u32 port, u32 bi)
188{
189 u32 idx = ptd->per_port_queue[port].n_buffers++;
190 ptd->per_port_queue[port].buffers[idx] = bi;
191}
192
Steven0d883012018-05-11 11:06:23 -0700193static_always_inline u32
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400194bond_lb_broadcast (vlib_main_t * vm,
Steven Luong4c4223e2020-07-15 08:44:54 -0700195 bond_if_t * bif, vlib_buffer_t * b0, uword n_members)
Steven9cd2d7a2017-12-20 12:43:01 -0800196{
Stevenc4e99c52018-09-27 20:06:26 -0700197 bond_main_t *bm = &bond_main;
Steven9cd2d7a2017-12-20 12:43:01 -0800198 vlib_buffer_t *c0;
Steven22b5be02018-04-11 15:32:15 -0700199 int port;
Steven9cd2d7a2017-12-20 12:43:01 -0800200 u32 sw_if_index;
Damjan Marion067cd622018-07-11 12:47:43 +0200201 u16 thread_index = vm->thread_index;
Stevenc4e99c52018-09-27 20:06:26 -0700202 bond_per_thread_data_t *ptd = vec_elt_at_index (bm->per_thread_data,
203 thread_index);
Steven9cd2d7a2017-12-20 12:43:01 -0800204
Steven Luong4c4223e2020-07-15 08:44:54 -0700205 for (port = 1; port < n_members; port++)
Steven9cd2d7a2017-12-20 12:43:01 -0800206 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700207 sw_if_index = *vec_elt_at_index (bif->active_members, port);
Steven9cd2d7a2017-12-20 12:43:01 -0800208 c0 = vlib_buffer_copy (vm, b0);
209 if (PREDICT_TRUE (c0 != 0))
210 {
211 vnet_buffer (c0)->sw_if_index[VLIB_TX] = sw_if_index;
Damjan Marion69fdfee2018-10-06 14:33:18 +0200212 bond_tx_add_to_queue (ptd, port, vlib_get_buffer_index (vm, c0));
Steven9cd2d7a2017-12-20 12:43:01 -0800213 }
214 }
215
216 return 0;
217}
218
Steven0d883012018-05-11 11:06:23 -0700219static_always_inline u32
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400220bond_lb_l2 (vlib_buffer_t * b0)
Steven9cd2d7a2017-12-20 12:43:01 -0800221{
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400222 ethernet_header_t *eth = vlib_buffer_get_current (b0);
Steven0d883012018-05-11 11:06:23 -0700223 u64 *dst = (u64 *) & eth->dst_address[0];
224 u64 a = clib_mem_unaligned (dst, u64);
225 u32 *src = (u32 *) & eth->src_address[2];
226 u32 b = clib_mem_unaligned (src, u32);
Steven9cd2d7a2017-12-20 12:43:01 -0800227
Damjan Marion69fdfee2018-10-06 14:33:18 +0200228 return lb_hash_hash_2_tuples (a, b);
Steven9cd2d7a2017-12-20 12:43:01 -0800229}
230
Steven0d883012018-05-11 11:06:23 -0700231static_always_inline u16 *
Steven9cd2d7a2017-12-20 12:43:01 -0800232bond_locate_ethertype (ethernet_header_t * eth)
233{
234 u16 *ethertype_p;
235 ethernet_vlan_header_t *vlan;
236
237 if (!ethernet_frame_is_tagged (clib_net_to_host_u16 (eth->type)))
238 {
239 ethertype_p = &eth->type;
240 }
241 else
242 {
243 vlan = (void *) (eth + 1);
244 ethertype_p = &vlan->type;
245 if (*ethertype_p == ntohs (ETHERNET_TYPE_VLAN))
246 {
247 vlan++;
248 ethertype_p = &vlan->type;
249 }
250 }
251 return ethertype_p;
252}
253
Steven0d883012018-05-11 11:06:23 -0700254static_always_inline u32
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400255bond_lb_l23 (vlib_buffer_t * b0)
Steven9cd2d7a2017-12-20 12:43:01 -0800256{
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400257 ethernet_header_t *eth = vlib_buffer_get_current (b0);
Steven9cd2d7a2017-12-20 12:43:01 -0800258 u8 ip_version;
259 ip4_header_t *ip4;
260 u16 ethertype, *ethertype_p;
Steven0d883012018-05-11 11:06:23 -0700261 u32 *mac1, *mac2, *mac3;
Steven9cd2d7a2017-12-20 12:43:01 -0800262
263 ethertype_p = bond_locate_ethertype (eth);
Steven0d883012018-05-11 11:06:23 -0700264 ethertype = clib_mem_unaligned (ethertype_p, u16);
Steven9cd2d7a2017-12-20 12:43:01 -0800265
266 if ((ethertype != htons (ETHERNET_TYPE_IP4)) &&
267 (ethertype != htons (ETHERNET_TYPE_IP6)))
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400268 return bond_lb_l2 (b0);
Steven9cd2d7a2017-12-20 12:43:01 -0800269
270 ip4 = (ip4_header_t *) (ethertype_p + 1);
271 ip_version = (ip4->ip_version_and_header_length >> 4);
272
273 if (ip_version == 0x4)
274 {
Steven0d883012018-05-11 11:06:23 -0700275 u32 a, c;
Steven9cd2d7a2017-12-20 12:43:01 -0800276
Steven0d883012018-05-11 11:06:23 -0700277 mac1 = (u32 *) & eth->dst_address[0];
278 mac2 = (u32 *) & eth->dst_address[4];
279 mac3 = (u32 *) & eth->src_address[2];
Steven9cd2d7a2017-12-20 12:43:01 -0800280
Steven0d883012018-05-11 11:06:23 -0700281 a = clib_mem_unaligned (mac1, u32) ^ clib_mem_unaligned (mac2, u32) ^
282 clib_mem_unaligned (mac3, u32);
283 c =
284 lb_hash_hash_2_tuples (clib_mem_unaligned (&ip4->address_pair, u64),
285 a);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200286 return c;
Steven9cd2d7a2017-12-20 12:43:01 -0800287 }
288 else if (ip_version == 0x6)
289 {
Steven0d883012018-05-11 11:06:23 -0700290 u64 a;
291 u32 c;
Steven9cd2d7a2017-12-20 12:43:01 -0800292 ip6_header_t *ip6 = (ip6_header_t *) (eth + 1);
293
Steven0d883012018-05-11 11:06:23 -0700294 mac1 = (u32 *) & eth->dst_address[0];
295 mac2 = (u32 *) & eth->dst_address[4];
296 mac3 = (u32 *) & eth->src_address[2];
Steven9cd2d7a2017-12-20 12:43:01 -0800297
Steven0d883012018-05-11 11:06:23 -0700298 a = clib_mem_unaligned (mac1, u32) ^ clib_mem_unaligned (mac2, u32) ^
299 clib_mem_unaligned (mac3, u32);
300 c =
301 lb_hash_hash (clib_mem_unaligned
302 (&ip6->src_address.as_uword[0], uword),
303 clib_mem_unaligned (&ip6->src_address.as_uword[1],
304 uword),
305 clib_mem_unaligned (&ip6->dst_address.as_uword[0],
306 uword),
307 clib_mem_unaligned (&ip6->dst_address.as_uword[1],
308 uword), a);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200309 return c;
Steven9cd2d7a2017-12-20 12:43:01 -0800310 }
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400311 return bond_lb_l2 (b0);
Steven9cd2d7a2017-12-20 12:43:01 -0800312}
313
Steven0d883012018-05-11 11:06:23 -0700314static_always_inline u32
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400315bond_lb_l34 (vlib_buffer_t * b0)
Steven9cd2d7a2017-12-20 12:43:01 -0800316{
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400317 ethernet_header_t *eth = vlib_buffer_get_current (b0);
Steven9cd2d7a2017-12-20 12:43:01 -0800318 u8 ip_version;
Steven0d883012018-05-11 11:06:23 -0700319 uword is_tcp_udp;
Steven9cd2d7a2017-12-20 12:43:01 -0800320 ip4_header_t *ip4;
321 u16 ethertype, *ethertype_p;
322
323 ethertype_p = bond_locate_ethertype (eth);
Steven0d883012018-05-11 11:06:23 -0700324 ethertype = clib_mem_unaligned (ethertype_p, u16);
Steven9cd2d7a2017-12-20 12:43:01 -0800325
326 if ((ethertype != htons (ETHERNET_TYPE_IP4)) &&
327 (ethertype != htons (ETHERNET_TYPE_IP6)))
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400328 return (bond_lb_l2 (b0));
Steven9cd2d7a2017-12-20 12:43:01 -0800329
330 ip4 = (ip4_header_t *) (ethertype_p + 1);
331 ip_version = (ip4->ip_version_and_header_length >> 4);
332
333 if (ip_version == 0x4)
334 {
Damjan Marion69fdfee2018-10-06 14:33:18 +0200335 u32 a, t1, t2;
Steven9cd2d7a2017-12-20 12:43:01 -0800336 tcp_header_t *tcp = (void *) (ip4 + 1);
Steven0d883012018-05-11 11:06:23 -0700337
Steven9cd2d7a2017-12-20 12:43:01 -0800338 is_tcp_udp = (ip4->protocol == IP_PROTOCOL_TCP) ||
339 (ip4->protocol == IP_PROTOCOL_UDP);
Steven0d883012018-05-11 11:06:23 -0700340 t1 = is_tcp_udp ? clib_mem_unaligned (&tcp->src, u16) : 0;
341 t2 = is_tcp_udp ? clib_mem_unaligned (&tcp->dst, u16) : 0;
342 a = t1 ^ t2;
Damjan Marion69fdfee2018-10-06 14:33:18 +0200343 return
Steven0d883012018-05-11 11:06:23 -0700344 lb_hash_hash_2_tuples (clib_mem_unaligned (&ip4->address_pair, u64),
345 a);
Steven9cd2d7a2017-12-20 12:43:01 -0800346 }
347 else if (ip_version == 0x6)
348 {
Steven0d883012018-05-11 11:06:23 -0700349 u64 a;
350 u32 c, t1, t2;
Steven9cd2d7a2017-12-20 12:43:01 -0800351 ip6_header_t *ip6 = (ip6_header_t *) (eth + 1);
352 tcp_header_t *tcp = (void *) (ip6 + 1);
353
Steven0d883012018-05-11 11:06:23 -0700354 is_tcp_udp = 0;
Steven9cd2d7a2017-12-20 12:43:01 -0800355 if (PREDICT_TRUE ((ip6->protocol == IP_PROTOCOL_TCP) ||
356 (ip6->protocol == IP_PROTOCOL_UDP)))
357 {
358 is_tcp_udp = 1;
359 tcp = (void *) (ip6 + 1);
360 }
361 else if (ip6->protocol == IP_PROTOCOL_IP6_HOP_BY_HOP_OPTIONS)
362 {
363 ip6_hop_by_hop_header_t *hbh =
364 (ip6_hop_by_hop_header_t *) (ip6 + 1);
365 if ((hbh->protocol == IP_PROTOCOL_TCP)
366 || (hbh->protocol == IP_PROTOCOL_UDP))
367 {
368 is_tcp_udp = 1;
369 tcp = (tcp_header_t *) ((u8 *) hbh + ((hbh->length + 1) << 3));
370 }
371 }
Steven0d883012018-05-11 11:06:23 -0700372 t1 = is_tcp_udp ? clib_mem_unaligned (&tcp->src, u16) : 0;
373 t2 = is_tcp_udp ? clib_mem_unaligned (&tcp->dst, u16) : 0;
374 a = t1 ^ t2;
375 c =
376 lb_hash_hash (clib_mem_unaligned
377 (&ip6->src_address.as_uword[0], uword),
378 clib_mem_unaligned (&ip6->src_address.as_uword[1],
379 uword),
380 clib_mem_unaligned (&ip6->dst_address.as_uword[0],
381 uword),
382 clib_mem_unaligned (&ip6->dst_address.as_uword[1],
383 uword), a);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200384 return c;
Steven9cd2d7a2017-12-20 12:43:01 -0800385 }
386
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400387 return bond_lb_l2 (b0);
Steven9cd2d7a2017-12-20 12:43:01 -0800388}
389
Steven0d883012018-05-11 11:06:23 -0700390static_always_inline u32
Steven Luong4c4223e2020-07-15 08:44:54 -0700391bond_lb_round_robin (bond_if_t * bif, vlib_buffer_t * b0, uword n_members)
Steven9cd2d7a2017-12-20 12:43:01 -0800392{
393 bif->lb_rr_last_index++;
Steven Luong4c4223e2020-07-15 08:44:54 -0700394 if (bif->lb_rr_last_index >= n_members)
Damjan Marion69fdfee2018-10-06 14:33:18 +0200395 bif->lb_rr_last_index = 0;
Steven9cd2d7a2017-12-20 12:43:01 -0800396
397 return bif->lb_rr_last_index;
398}
399
Damjan Marion16de39e2018-09-26 10:15:41 +0200400static_always_inline void
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400401bond_tx_inline (vlib_main_t * vm, bond_if_t * bif, vlib_buffer_t ** b,
Steven Luong4c4223e2020-07-15 08:44:54 -0700402 u32 * h, u32 n_left, uword n_members, u32 lb_alg)
Steven9cd2d7a2017-12-20 12:43:01 -0800403{
Damjan Marioncefe1342018-09-21 18:11:33 +0200404 while (n_left >= 4)
Steven9cd2d7a2017-12-20 12:43:01 -0800405 {
Damjan Marioncefe1342018-09-21 18:11:33 +0200406 // Prefetch next iteration
407 if (n_left >= 8)
Steven9cd2d7a2017-12-20 12:43:01 -0800408 {
Damjan Marioncefe1342018-09-21 18:11:33 +0200409 vlib_buffer_t **pb = b + 4;
Steven0d883012018-05-11 11:06:23 -0700410
Damjan Marioncefe1342018-09-21 18:11:33 +0200411 vlib_prefetch_buffer_header (pb[0], LOAD);
412 vlib_prefetch_buffer_header (pb[1], LOAD);
413 vlib_prefetch_buffer_header (pb[2], LOAD);
414 vlib_prefetch_buffer_header (pb[3], LOAD);
415
416 CLIB_PREFETCH (pb[0]->data, CLIB_CACHE_LINE_BYTES, LOAD);
417 CLIB_PREFETCH (pb[1]->data, CLIB_CACHE_LINE_BYTES, LOAD);
418 CLIB_PREFETCH (pb[2]->data, CLIB_CACHE_LINE_BYTES, LOAD);
419 CLIB_PREFETCH (pb[3]->data, CLIB_CACHE_LINE_BYTES, LOAD);
420 }
421
422 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);
423 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[1]);
424 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[2]);
425 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[3]);
426
Damjan Marion69fdfee2018-10-06 14:33:18 +0200427 if (lb_alg == BOND_LB_L2)
Damjan Marioncefe1342018-09-21 18:11:33 +0200428 {
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400429 h[0] = bond_lb_l2 (b[0]);
430 h[1] = bond_lb_l2 (b[1]);
431 h[2] = bond_lb_l2 (b[2]);
432 h[3] = bond_lb_l2 (b[3]);
Damjan Marioncefe1342018-09-21 18:11:33 +0200433 }
Damjan Marion69fdfee2018-10-06 14:33:18 +0200434 else if (lb_alg == BOND_LB_L34)
Damjan Marioncefe1342018-09-21 18:11:33 +0200435 {
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400436 h[0] = bond_lb_l34 (b[0]);
437 h[1] = bond_lb_l34 (b[1]);
438 h[2] = bond_lb_l34 (b[2]);
439 h[3] = bond_lb_l34 (b[3]);
Steven9cd2d7a2017-12-20 12:43:01 -0800440 }
Damjan Marion69fdfee2018-10-06 14:33:18 +0200441 else if (lb_alg == BOND_LB_L23)
442 {
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400443 h[0] = bond_lb_l23 (b[0]);
444 h[1] = bond_lb_l23 (b[1]);
445 h[2] = bond_lb_l23 (b[2]);
446 h[3] = bond_lb_l23 (b[3]);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200447 }
448 else if (lb_alg == BOND_LB_RR)
449 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700450 h[0] = bond_lb_round_robin (bif, b[0], n_members);
451 h[1] = bond_lb_round_robin (bif, b[1], n_members);
452 h[2] = bond_lb_round_robin (bif, b[2], n_members);
453 h[3] = bond_lb_round_robin (bif, b[3], n_members);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200454 }
455 else if (lb_alg == BOND_LB_BC)
456 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700457 h[0] = bond_lb_broadcast (vm, bif, b[0], n_members);
458 h[1] = bond_lb_broadcast (vm, bif, b[1], n_members);
459 h[2] = bond_lb_broadcast (vm, bif, b[2], n_members);
460 h[3] = bond_lb_broadcast (vm, bif, b[3], n_members);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200461 }
462 else
463 {
464 ASSERT (0);
465 }
Stevenc4e99c52018-09-27 20:06:26 -0700466
Damjan Marioncefe1342018-09-21 18:11:33 +0200467 n_left -= 4;
468 b += 4;
Damjan Marion69fdfee2018-10-06 14:33:18 +0200469 h += 4;
Damjan Marioncefe1342018-09-21 18:11:33 +0200470 }
Steven9cd2d7a2017-12-20 12:43:01 -0800471
Damjan Marioncefe1342018-09-21 18:11:33 +0200472 while (n_left > 0)
473 {
Damjan Marioncefe1342018-09-21 18:11:33 +0200474 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b[0]);
475
Damjan Marion69fdfee2018-10-06 14:33:18 +0200476 if (bif->lb == BOND_LB_L2)
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400477 h[0] = bond_lb_l2 (b[0]);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200478 else if (bif->lb == BOND_LB_L34)
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400479 h[0] = bond_lb_l34 (b[0]);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200480 else if (bif->lb == BOND_LB_L23)
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400481 h[0] = bond_lb_l23 (b[0]);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200482 else if (bif->lb == BOND_LB_RR)
Steven Luong4c4223e2020-07-15 08:44:54 -0700483 h[0] = bond_lb_round_robin (bif, b[0], n_members);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200484 else if (bif->lb == BOND_LB_BC)
Steven Luong4c4223e2020-07-15 08:44:54 -0700485 h[0] = bond_lb_broadcast (vm, bif, b[0], n_members);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200486 else
Damjan Marion16de39e2018-09-26 10:15:41 +0200487 {
Damjan Marion69fdfee2018-10-06 14:33:18 +0200488 ASSERT (0);
Damjan Marion16de39e2018-09-26 10:15:41 +0200489 }
490
Damjan Marioncefe1342018-09-21 18:11:33 +0200491 n_left -= 1;
492 b += 1;
Steven Luongde0302c2019-10-04 14:18:37 -0700493 h += 1;
Steven9cd2d7a2017-12-20 12:43:01 -0800494 }
Damjan Marion69fdfee2018-10-06 14:33:18 +0200495}
Steven9cd2d7a2017-12-20 12:43:01 -0800496
Damjan Marion69fdfee2018-10-06 14:33:18 +0200497static_always_inline void
Steven Luong4c4223e2020-07-15 08:44:54 -0700498bond_hash_to_port (u32 * h, u32 n_left, u32 n_members,
499 int use_modulo_shortcut)
Damjan Marion69fdfee2018-10-06 14:33:18 +0200500{
Steven Luong4c4223e2020-07-15 08:44:54 -0700501 u32 mask = n_members - 1;
Damjan Marion69fdfee2018-10-06 14:33:18 +0200502
503#ifdef CLIB_HAVE_VEC256
Paul Vinciguerra8feeaff2019-03-27 11:25:48 -0700504 /* only lower 16 bits of hash due to single precision fp arithmetic */
Damjan Marion69fdfee2018-10-06 14:33:18 +0200505 u32x8 mask8, sc8u, h8a, h8b;
506 f32x8 sc8f;
507
508 if (use_modulo_shortcut)
Steven9cd2d7a2017-12-20 12:43:01 -0800509 {
Damjan Marion69fdfee2018-10-06 14:33:18 +0200510 mask8 = u32x8_splat (mask);
511 }
512 else
513 {
514 mask8 = u32x8_splat (0xffff);
Steven Luong4c4223e2020-07-15 08:44:54 -0700515 sc8u = u32x8_splat (n_members);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200516 sc8f = f32x8_from_u32x8 (sc8u);
Steven9cd2d7a2017-12-20 12:43:01 -0800517 }
518
Damjan Marion69fdfee2018-10-06 14:33:18 +0200519 while (n_left > 16)
520 {
521 h8a = u32x8_load_unaligned (h) & mask8;
522 h8b = u32x8_load_unaligned (h + 8) & mask8;
523
524 if (use_modulo_shortcut == 0)
525 {
526 h8a -= sc8u * u32x8_from_f32x8 (f32x8_from_u32x8 (h8a) / sc8f);
527 h8b -= sc8u * u32x8_from_f32x8 (f32x8_from_u32x8 (h8b) / sc8f);
528 }
529
530 u32x8_store_unaligned (h8a, h);
531 u32x8_store_unaligned (h8b, h + 8);
532 n_left -= 16;
533 h += 16;
534 }
535#endif
536
537 while (n_left > 4)
538 {
539 if (use_modulo_shortcut)
540 {
541 h[0] &= mask;
542 h[1] &= mask;
543 h[2] &= mask;
544 h[3] &= mask;
545 }
546 else
547 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700548 h[0] %= n_members;
549 h[1] %= n_members;
550 h[2] %= n_members;
551 h[3] %= n_members;
Damjan Marion69fdfee2018-10-06 14:33:18 +0200552 }
553 n_left -= 4;
554 h += 4;
555 }
556 while (n_left)
557 {
558 if (use_modulo_shortcut)
559 h[0] &= mask;
560 else
Steven Luong4c4223e2020-07-15 08:44:54 -0700561 h[0] %= n_members;
Damjan Marion69fdfee2018-10-06 14:33:18 +0200562 n_left -= 1;
563 h += 1;
564 }
565}
566
567static_always_inline void
568bond_update_sw_if_index (bond_per_thread_data_t * ptd, bond_if_t * bif,
569 u32 * bi, vlib_buffer_t ** b, u32 * data, u32 n_left,
570 int single_sw_if_index)
571{
572 u32 sw_if_index = data[0];
573 u32 *h = data;
574
575 while (n_left >= 4)
576 {
577 // Prefetch next iteration
578 if (n_left >= 8)
579 {
580 vlib_buffer_t **pb = b + 4;
581 vlib_prefetch_buffer_header (pb[0], LOAD);
582 vlib_prefetch_buffer_header (pb[1], LOAD);
583 vlib_prefetch_buffer_header (pb[2], LOAD);
584 vlib_prefetch_buffer_header (pb[3], LOAD);
585 }
586
587 if (PREDICT_FALSE (single_sw_if_index))
588 {
589 vnet_buffer (b[0])->sw_if_index[VLIB_TX] = sw_if_index;
590 vnet_buffer (b[1])->sw_if_index[VLIB_TX] = sw_if_index;
591 vnet_buffer (b[2])->sw_if_index[VLIB_TX] = sw_if_index;
592 vnet_buffer (b[3])->sw_if_index[VLIB_TX] = sw_if_index;
593
594 bond_tx_add_to_queue (ptd, 0, bi[0]);
595 bond_tx_add_to_queue (ptd, 0, bi[1]);
596 bond_tx_add_to_queue (ptd, 0, bi[2]);
597 bond_tx_add_to_queue (ptd, 0, bi[3]);
598 }
599 else
600 {
601 u32 sw_if_index[4];
602
Steven Luong4c4223e2020-07-15 08:44:54 -0700603 sw_if_index[0] = *vec_elt_at_index (bif->active_members, h[0]);
604 sw_if_index[1] = *vec_elt_at_index (bif->active_members, h[1]);
605 sw_if_index[2] = *vec_elt_at_index (bif->active_members, h[2]);
606 sw_if_index[3] = *vec_elt_at_index (bif->active_members, h[3]);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200607
608 vnet_buffer (b[0])->sw_if_index[VLIB_TX] = sw_if_index[0];
609 vnet_buffer (b[1])->sw_if_index[VLIB_TX] = sw_if_index[1];
610 vnet_buffer (b[2])->sw_if_index[VLIB_TX] = sw_if_index[2];
611 vnet_buffer (b[3])->sw_if_index[VLIB_TX] = sw_if_index[3];
612
613 bond_tx_add_to_queue (ptd, h[0], bi[0]);
614 bond_tx_add_to_queue (ptd, h[1], bi[1]);
615 bond_tx_add_to_queue (ptd, h[2], bi[2]);
616 bond_tx_add_to_queue (ptd, h[3], bi[3]);
617 }
618
619 bi += 4;
620 h += 4;
621 b += 4;
622 n_left -= 4;
623 }
624 while (n_left)
625 {
626 if (PREDICT_FALSE (single_sw_if_index))
627 {
628 vnet_buffer (b[0])->sw_if_index[VLIB_TX] = sw_if_index;
629 bond_tx_add_to_queue (ptd, 0, bi[0]);
630 }
631 else
632 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700633 u32 sw_if_index0 = *vec_elt_at_index (bif->active_members, h[0]);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200634
635 vnet_buffer (b[0])->sw_if_index[VLIB_TX] = sw_if_index0;
636 bond_tx_add_to_queue (ptd, h[0], bi[0]);
637 }
638
639 bi += 1;
640 h += 1;
641 b += 1;
642 n_left -= 1;
643 }
644}
645
646static_always_inline void
647bond_tx_trace (vlib_main_t * vm, vlib_node_runtime_t * node, bond_if_t * bif,
648 vlib_buffer_t ** b, u32 n_left, u32 * h)
649{
650 uword n_trace = vlib_get_trace_count (vm, node);
651
652 while (n_trace > 0 && n_left > 0)
653 {
654 bond_packet_trace_t *t0;
655 ethernet_header_t *eth;
656 u32 next0 = 0;
657
658 vlib_trace_buffer (vm, node, next0, b[0], 0 /* follow_chain */ );
659 vlib_set_trace_count (vm, node, --n_trace);
660 t0 = vlib_add_trace (vm, node, b[0], sizeof (*t0));
Zhiyong Yangb388e1a2019-05-08 22:57:53 -0400661 eth = vlib_buffer_get_current (b[0]);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200662 t0->ethernet = *eth;
663 t0->sw_if_index = vnet_buffer (b[0])->sw_if_index[VLIB_TX];
664 if (!h)
665 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700666 t0->bond_sw_if_index = *vec_elt_at_index (bif->active_members, 0);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200667 }
668 else
669 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700670 t0->bond_sw_if_index =
671 *vec_elt_at_index (bif->active_members, h[0]);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200672 h++;
673 }
674 b++;
675 n_left--;
676 }
Damjan Marion16de39e2018-09-26 10:15:41 +0200677}
678
679VNET_DEVICE_CLASS_TX_FN (bond_dev_class) (vlib_main_t * vm,
680 vlib_node_runtime_t * node,
681 vlib_frame_t * frame)
682{
683 vnet_interface_output_runtime_t *rund = (void *) node->runtime_data;
684 bond_main_t *bm = &bond_main;
685 u16 thread_index = vm->thread_index;
686 bond_if_t *bif = pool_elt_at_index (bm->interfaces, rund->dev_instance);
Steven Luong4c4223e2020-07-15 08:44:54 -0700687 uword n_members;
Damjan Marion69fdfee2018-10-06 14:33:18 +0200688 vlib_buffer_t *bufs[VLIB_FRAME_SIZE];
689 u32 *from = vlib_frame_vector_args (frame);
690 u32 n_left = frame->n_vectors;
691 u32 hashes[VLIB_FRAME_SIZE], *h;
692 vnet_main_t *vnm = vnet_get_main ();
693 bond_per_thread_data_t *ptd = vec_elt_at_index (bm->per_thread_data,
694 thread_index);
695 u32 p, sw_if_index;
Damjan Marion16de39e2018-09-26 10:15:41 +0200696
697 if (PREDICT_FALSE (bif->admin_up == 0))
698 {
Damjan Mariona3d59862018-11-10 10:23:00 +0100699 vlib_buffer_free (vm, vlib_frame_vector_args (frame), frame->n_vectors);
Damjan Marion16de39e2018-09-26 10:15:41 +0200700 vlib_increment_simple_counter (vnet_main.interface_main.sw_if_counters +
701 VNET_INTERFACE_COUNTER_DROP,
702 thread_index, bif->sw_if_index,
703 frame->n_vectors);
704 vlib_error_count (vm, node->node_index, BOND_TX_ERROR_IF_DOWN,
705 frame->n_vectors);
706 return frame->n_vectors;
707 }
708
Steven Luong4c4223e2020-07-15 08:44:54 -0700709 n_members = vec_len (bif->active_members);
710 if (PREDICT_FALSE (n_members == 0))
Damjan Marion16de39e2018-09-26 10:15:41 +0200711 {
Damjan Mariona3d59862018-11-10 10:23:00 +0100712 vlib_buffer_free (vm, vlib_frame_vector_args (frame), frame->n_vectors);
Damjan Marion16de39e2018-09-26 10:15:41 +0200713 vlib_increment_simple_counter (vnet_main.interface_main.sw_if_counters +
714 VNET_INTERFACE_COUNTER_DROP,
715 thread_index, bif->sw_if_index,
716 frame->n_vectors);
Steven Luong4c4223e2020-07-15 08:44:54 -0700717 vlib_error_count (vm, node->node_index, BOND_TX_ERROR_NO_MEMBER,
Damjan Marion16de39e2018-09-26 10:15:41 +0200718 frame->n_vectors);
719 return frame->n_vectors;
720 }
721
Damjan Marion69fdfee2018-10-06 14:33:18 +0200722 vlib_get_buffers (vm, from, bufs, n_left);
723
Paul Vinciguerra8feeaff2019-03-27 11:25:48 -0700724 /* active-backup mode, ship everything to first sw if index */
Steven Luong4c4223e2020-07-15 08:44:54 -0700725 if ((bif->lb == BOND_LB_AB) || PREDICT_FALSE (n_members == 1))
Damjan Marion69fdfee2018-10-06 14:33:18 +0200726 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700727 sw_if_index = *vec_elt_at_index (bif->active_members, 0);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200728
729 bond_tx_trace (vm, node, bif, bufs, frame->n_vectors, 0);
730 bond_update_sw_if_index (ptd, bif, from, bufs, &sw_if_index, n_left,
731 /* single_sw_if_index */ 1);
732 goto done;
733 }
734
735 if (bif->lb == BOND_LB_BC)
736 {
Steven Luong4c4223e2020-07-15 08:44:54 -0700737 sw_if_index = *vec_elt_at_index (bif->active_members, 0);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200738
Steven Luong4c4223e2020-07-15 08:44:54 -0700739 bond_tx_inline (vm, bif, bufs, hashes, n_left, n_members, BOND_LB_BC);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200740 bond_tx_trace (vm, node, bif, bufs, frame->n_vectors, 0);
741 bond_update_sw_if_index (ptd, bif, from, bufs, &sw_if_index, n_left,
742 /* single_sw_if_index */ 1);
743 goto done;
744 }
745
Steven Luong4c4223e2020-07-15 08:44:54 -0700746 /* if have at least one member on local numa node, only members on local numa
Zhiyong Yang751e3f32019-06-26 05:49:14 -0400747 node will transmit pkts when bif->local_numa_only is enabled */
Steven Luong4c4223e2020-07-15 08:44:54 -0700748 if (bif->n_numa_members >= 1)
749 n_members = bif->n_numa_members;
Zhiyong Yang751e3f32019-06-26 05:49:14 -0400750
Damjan Marion16de39e2018-09-26 10:15:41 +0200751 if (bif->lb == BOND_LB_L2)
Steven Luong4c4223e2020-07-15 08:44:54 -0700752 bond_tx_inline (vm, bif, bufs, hashes, n_left, n_members, BOND_LB_L2);
Damjan Marion16de39e2018-09-26 10:15:41 +0200753 else if (bif->lb == BOND_LB_L34)
Steven Luong4c4223e2020-07-15 08:44:54 -0700754 bond_tx_inline (vm, bif, bufs, hashes, n_left, n_members, BOND_LB_L34);
Damjan Marion16de39e2018-09-26 10:15:41 +0200755 else if (bif->lb == BOND_LB_L23)
Steven Luong4c4223e2020-07-15 08:44:54 -0700756 bond_tx_inline (vm, bif, bufs, hashes, n_left, n_members, BOND_LB_L23);
Damjan Marion16de39e2018-09-26 10:15:41 +0200757 else if (bif->lb == BOND_LB_RR)
Steven Luong4c4223e2020-07-15 08:44:54 -0700758 bond_tx_inline (vm, bif, bufs, hashes, n_left, n_members, BOND_LB_RR);
Damjan Marion16de39e2018-09-26 10:15:41 +0200759 else
760 ASSERT (0);
Steven9cd2d7a2017-12-20 12:43:01 -0800761
Damjan Marion69fdfee2018-10-06 14:33:18 +0200762 /* calculate port out of hash */
763 h = hashes;
Steven Luong4c4223e2020-07-15 08:44:54 -0700764 if (BOND_MODULO_SHORTCUT (n_members))
765 bond_hash_to_port (h, frame->n_vectors, n_members, 1);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200766 else
Steven Luong4c4223e2020-07-15 08:44:54 -0700767 bond_hash_to_port (h, frame->n_vectors, n_members, 0);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200768
769 bond_tx_trace (vm, node, bif, bufs, frame->n_vectors, h);
770
771 bond_update_sw_if_index (ptd, bif, from, bufs, hashes, frame->n_vectors,
772 /* single_sw_if_index */ 0);
773
774done:
Steven Luong4c4223e2020-07-15 08:44:54 -0700775 for (p = 0; p < n_members; p++)
Damjan Marion69fdfee2018-10-06 14:33:18 +0200776 {
777 vlib_frame_t *f;
778 u32 *to_next;
779
Steven Luong4c4223e2020-07-15 08:44:54 -0700780 sw_if_index = *vec_elt_at_index (bif->active_members, p);
Damjan Marion69fdfee2018-10-06 14:33:18 +0200781 if (PREDICT_TRUE (ptd->per_port_queue[p].n_buffers))
782 {
783 f = vnet_get_frame_to_sw_interface (vnm, sw_if_index);
784 f->n_vectors = ptd->per_port_queue[p].n_buffers;
785 to_next = vlib_frame_vector_args (f);
Dave Barach178cf492018-11-13 16:34:13 -0500786 clib_memcpy_fast (to_next, ptd->per_port_queue[p].buffers,
787 f->n_vectors * sizeof (u32));
Damjan Marion69fdfee2018-10-06 14:33:18 +0200788 vnet_put_frame_to_sw_interface (vnm, sw_if_index, f);
789 ptd->per_port_queue[p].n_buffers = 0;
790 }
791 }
Steven9cd2d7a2017-12-20 12:43:01 -0800792 return frame->n_vectors;
793}
794
Steven9f781d82018-06-05 11:09:32 -0700795static walk_rc_t
796bond_active_interface_switch_cb (vnet_main_t * vnm, u32 sw_if_index,
797 void *arg)
798{
799 bond_main_t *bm = &bond_main;
800
Neale Rannscbe25aa2019-09-30 10:53:31 +0000801 ip_neighbor_advertise (bm->vlib_main, IP46_TYPE_BOTH, NULL, sw_if_index);
Steven9f781d82018-06-05 11:09:32 -0700802
803 return (WALK_CONTINUE);
804}
805
806static uword
807bond_process (vlib_main_t * vm, vlib_node_runtime_t * rt, vlib_frame_t * f)
808{
809 vnet_main_t *vnm = vnet_get_main ();
810 uword event_type, *event_data = 0;
811
812 while (1)
813 {
814 u32 i;
815 u32 hw_if_index;
816
817 vlib_process_wait_for_event (vm);
818 event_type = vlib_process_get_events (vm, &event_data);
819 ASSERT (event_type == BOND_SEND_GARP_NA);
820 for (i = 0; i < vec_len (event_data); i++)
821 {
822 hw_if_index = event_data[i];
Steven Luongbac326c2019-08-05 09:47:58 -0700823 if (vnet_get_hw_interface_or_null (vnm, hw_if_index))
824 /* walk hw interface to process all subinterfaces */
825 vnet_hw_interface_walk_sw (vnm, hw_if_index,
826 bond_active_interface_switch_cb, 0);
Steven9f781d82018-06-05 11:09:32 -0700827 }
828 vec_reset_length (event_data);
829 }
830 return 0;
831}
832
833/* *INDENT-OFF* */
834VLIB_REGISTER_NODE (bond_process_node) = {
835 .function = bond_process,
Damjan Marion7ca5aaa2019-09-24 18:10:49 +0200836 .flags = VLIB_NODE_FLAG_TRACE_SUPPORTED,
Steven9f781d82018-06-05 11:09:32 -0700837 .type = VLIB_NODE_TYPE_PROCESS,
838 .name = "bond-process",
839};
840/* *INDENT-ON* */
841
Steven9cd2d7a2017-12-20 12:43:01 -0800842/* *INDENT-OFF* */
843VNET_DEVICE_CLASS (bond_dev_class) = {
844 .name = "bond",
Steven9cd2d7a2017-12-20 12:43:01 -0800845 .tx_function_n_errors = BOND_TX_N_ERROR,
846 .tx_function_error_strings = bond_tx_error_strings,
847 .format_device_name = format_bond_interface_name,
Steven4f8863b2018-04-12 19:36:19 -0700848 .set_l2_mode_function = bond_set_l2_mode_function,
Steven9cd2d7a2017-12-20 12:43:01 -0800849 .admin_up_down_function = bond_interface_admin_up_down,
850 .subif_add_del_function = bond_subif_add_del_function,
851 .format_tx_trace = format_bond_tx_trace,
Matthew Smithe83aa452019-11-14 10:36:02 -0600852 .mac_addr_add_del_function = bond_add_del_mac_address,
Steven9cd2d7a2017-12-20 12:43:01 -0800853};
854
Steven9cd2d7a2017-12-20 12:43:01 -0800855/* *INDENT-ON* */
856
Steven Luongbac326c2019-08-05 09:47:58 -0700857static clib_error_t *
Steven Luong4c4223e2020-07-15 08:44:54 -0700858bond_member_interface_add_del (vnet_main_t * vnm, u32 sw_if_index, u32 is_add)
Steven Luongbac326c2019-08-05 09:47:58 -0700859{
860 bond_main_t *bm = &bond_main;
Steven Luong4c4223e2020-07-15 08:44:54 -0700861 member_if_t *mif;
862 bond_detach_member_args_t args = { 0 };
Steven Luongbac326c2019-08-05 09:47:58 -0700863
864 if (is_add)
865 return 0;
Steven Luong4c4223e2020-07-15 08:44:54 -0700866 mif = bond_get_member_by_sw_if_index (sw_if_index);
867 if (!mif)
Steven Luongbac326c2019-08-05 09:47:58 -0700868 return 0;
Steven Luong4c4223e2020-07-15 08:44:54 -0700869 args.member = sw_if_index;
870 bond_detach_member (bm->vlib_main, &args);
Steven Luongbac326c2019-08-05 09:47:58 -0700871 return args.error;
872}
873
Steven Luong4c4223e2020-07-15 08:44:54 -0700874VNET_SW_INTERFACE_ADD_DEL_FUNCTION (bond_member_interface_add_del);
Steven Luongbac326c2019-08-05 09:47:58 -0700875
Steven9cd2d7a2017-12-20 12:43:01 -0800876/*
877 * fd.io coding-style-patch-verification: ON
878 *
879 * Local Variables:
880 * eval: (c-set-style "gnu")
881 * End:
882 */