blob: 85a53aa06ff460676b30130effe0bbea84b3d6e3 [file] [log] [blame]
Pierre Pfisterd6f5b962016-03-21 16:17:52 +00001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include <vlib/vlib.h>
17#include <vnet/l2/feat_bitmap.h>
18#include <vnet/l2/l2_rw.h>
19
Billy McFall22aa3e92016-09-09 08:46:40 -040020/**
21 * @file
22 * @brief Layer 2 Rewrite.
23 *
24 * Layer 2-Rewrite node uses classify tables to match packets. Then, using
Paul Vinciguerrabdc0e6b2018-09-22 05:32:50 -070025 * the provisioned mask and value, modifies the packet header.
Billy McFall22aa3e92016-09-09 08:46:40 -040026 */
27
28
Filip Tehlar44f0f712019-03-11 04:26:37 -070029#ifndef CLIB_MARCH_VARIANT
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000030l2_rw_main_t l2_rw_main;
Filip Tehlar44f0f712019-03-11 04:26:37 -070031#endif /* CLIB_MARCH_VARIANT */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000032
Dave Barach97d8dc22016-08-15 15:31:15 -040033typedef struct
34{
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000035 u32 sw_if_index;
36 u32 classify_table_index;
37 u32 rewrite_entry_index;
38} l2_rw_trace_t;
39
Dave Barach97d8dc22016-08-15 15:31:15 -040040static u8 *
41format_l2_rw_entry (u8 * s, va_list * args)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000042{
43 l2_rw_entry_t *e = va_arg (*args, l2_rw_entry_t *);
44 l2_rw_main_t *rw = &l2_rw_main;
45 s = format (s, "%d - mask:%U value:%U\n",
Dave Barach97d8dc22016-08-15 15:31:15 -040046 e - rw->entries,
47 format_hex_bytes, e->mask,
48 e->rewrite_n_vectors * sizeof (u32x4), format_hex_bytes,
49 e->value, e->rewrite_n_vectors * sizeof (u32x4));
50 s =
51 format (s, " hits:%d skip_bytes:%d", e->hit_count,
52 e->skip_n_vectors * sizeof (u32x4));
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000053 return s;
54}
55
Dave Barach97d8dc22016-08-15 15:31:15 -040056static u8 *
57format_l2_rw_config (u8 * s, va_list * args)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000058{
59 l2_rw_config_t *c = va_arg (*args, l2_rw_config_t *);
Dave Barach97d8dc22016-08-15 15:31:15 -040060 return format (s, "table-index:%d miss-index:%d",
61 c->table_index, c->miss_index);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000062}
63
64/* packet trace format function */
Dave Barach97d8dc22016-08-15 15:31:15 -040065static u8 *
66format_l2_rw_trace (u8 * s, va_list * args)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000067{
68 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
69 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
Dave Barach97d8dc22016-08-15 15:31:15 -040070 l2_rw_trace_t *t = va_arg (*args, l2_rw_trace_t *);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000071 return format (s, "l2-rw: sw_if_index %d, table %d, entry %d",
Dave Barach97d8dc22016-08-15 15:31:15 -040072 t->sw_if_index, t->classify_table_index,
73 t->rewrite_entry_index);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000074}
75
Dave Barach97d8dc22016-08-15 15:31:15 -040076always_inline l2_rw_config_t *
77l2_rw_get_config (u32 sw_if_index)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000078{
79 l2_rw_main_t *rw = &l2_rw_main;
Dave Barach97d8dc22016-08-15 15:31:15 -040080 if (PREDICT_FALSE (!clib_bitmap_get (rw->configs_bitmap, sw_if_index)))
81 {
82 vec_validate (rw->configs, sw_if_index);
83 rw->configs[sw_if_index].table_index = ~0;
84 rw->configs[sw_if_index].miss_index = ~0;
85 rw->configs_bitmap =
86 clib_bitmap_set (rw->configs_bitmap, sw_if_index, 1);
87 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000088 return &rw->configs[sw_if_index];
89}
90
Dave Barach97d8dc22016-08-15 15:31:15 -040091static_always_inline void
92l2_rw_rewrite (l2_rw_entry_t * rwe, u8 * h)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000093{
Dave Barach97d8dc22016-08-15 15:31:15 -040094 if (U32X4_ALIGNED (h))
95 {
96 u32x4 *d = ((u32x4 *) h) + rwe->skip_n_vectors;
97 switch (rwe->rewrite_n_vectors)
98 {
99 case 5:
100 d[4] = (d[4] & ~rwe->mask[4]) | rwe->value[4];
101 /* FALLTHROUGH */
102 case 4:
103 d[3] = (d[3] & ~rwe->mask[3]) | rwe->value[3];
104 /* FALLTHROUGH */
105 case 3:
106 d[2] = (d[2] & ~rwe->mask[2]) | rwe->value[2];
107 /* FALLTHROUGH */
108 case 2:
109 d[1] = (d[1] & ~rwe->mask[1]) | rwe->value[1];
110 /* FALLTHROUGH */
111 case 1:
112 d[0] = (d[0] & ~rwe->mask[0]) | rwe->value[0];
113 break;
114 default:
115 abort ();
116 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000117 }
Dave Barach97d8dc22016-08-15 15:31:15 -0400118 else
119 {
120 u64 *d = ((u64 *) h) + rwe->skip_n_vectors * 2;
121 switch (rwe->rewrite_n_vectors)
122 {
123 case 5:
124 d[8] =
125 (d[8] & ~(((u64 *) rwe->mask)[8])) | (((u64 *) rwe->value)[8]);
126 d[9] =
127 (d[9] & ~(((u64 *) rwe->mask)[9])) | (((u64 *) rwe->value)[9]);
128 /* FALLTHROUGH */
129 case 4:
130 d[6] =
131 (d[6] & ~(((u64 *) rwe->mask)[6])) | (((u64 *) rwe->value)[6]);
132 d[7] =
133 (d[7] & ~(((u64 *) rwe->mask)[7])) | (((u64 *) rwe->value)[7]);
134 /* FALLTHROUGH */
135 case 3:
136 d[4] =
137 (d[4] & ~(((u64 *) rwe->mask)[4])) | (((u64 *) rwe->value)[4]);
138 d[5] =
139 (d[5] & ~(((u64 *) rwe->mask)[5])) | (((u64 *) rwe->value)[5]);
140 /* FALLTHROUGH */
141 case 2:
142 d[2] =
143 (d[2] & ~(((u64 *) rwe->mask)[2])) | (((u64 *) rwe->value)[2]);
144 d[3] =
145 (d[3] & ~(((u64 *) rwe->mask)[3])) | (((u64 *) rwe->value)[3]);
146 /* FALLTHROUGH */
147 case 1:
148 d[0] =
149 (d[0] & ~(((u64 *) rwe->mask)[0])) | (((u64 *) rwe->value)[0]);
150 d[1] =
151 (d[1] & ~(((u64 *) rwe->mask)[1])) | (((u64 *) rwe->value)[1]);
152 break;
153 default:
154 abort ();
155 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000156 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000157}
158
Filip Tehlar44f0f712019-03-11 04:26:37 -0700159VLIB_NODE_FN (l2_rw_node) (vlib_main_t * vm,
160 vlib_node_runtime_t * node, vlib_frame_t * frame)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000161{
162 l2_rw_main_t *rw = &l2_rw_main;
Dave Barach97d8dc22016-08-15 15:31:15 -0400163 u32 n_left_from, *from, *to_next, next_index;
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000164 vnet_classify_main_t *vcm = &vnet_classify_main;
Dave Barach97d8dc22016-08-15 15:31:15 -0400165 f64 now = vlib_time_now (vlib_get_main ());
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000166
167 from = vlib_frame_vector_args (frame);
Dave Barach97d8dc22016-08-15 15:31:15 -0400168 n_left_from = frame->n_vectors; /* number of packets to process */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000169 next_index = node->cached_next_index;
170
171 while (n_left_from > 0)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000172 {
Dave Barach97d8dc22016-08-15 15:31:15 -0400173 u32 n_left_to_next;
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000174
Dave Barach97d8dc22016-08-15 15:31:15 -0400175 /* get space to enqueue frame to graph node "next_index" */
176 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000177
Damjan Mariond30bf012018-11-18 23:48:43 +0100178 while (n_left_from >= 6 && n_left_to_next >= 2)
Dave Barach97d8dc22016-08-15 15:31:15 -0400179 {
John Lobeb0b2e2017-07-22 00:21:36 -0400180 u32 bi0, next0, sw_if_index0, rwe_index0;
181 u32 bi1, next1, sw_if_index1, rwe_index1;
Dave Barach97d8dc22016-08-15 15:31:15 -0400182 vlib_buffer_t *b0, *b1;
183 ethernet_header_t *h0, *h1;
184 l2_rw_config_t *config0, *config1;
185 u64 hash0, hash1;
186 vnet_classify_table_t *t0, *t1;
187 vnet_classify_entry_t *e0, *e1;
188 l2_rw_entry_t *rwe0, *rwe1;
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000189
Dave Barach97d8dc22016-08-15 15:31:15 -0400190 {
Damjan Mariond30bf012018-11-18 23:48:43 +0100191 vlib_buffer_t *p2, *p3, *p4, *p5;
Dave Barach97d8dc22016-08-15 15:31:15 -0400192 p2 = vlib_get_buffer (vm, from[2]);
193 p3 = vlib_get_buffer (vm, from[3]);
Damjan Mariond30bf012018-11-18 23:48:43 +0100194 p4 = vlib_get_buffer (vm, from[4]);
195 p5 = vlib_get_buffer (vm, from[5]);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000196
Damjan Mariond30bf012018-11-18 23:48:43 +0100197 vlib_prefetch_buffer_header (p4, LOAD);
198 vlib_prefetch_buffer_header (p5, LOAD);
199 vlib_prefetch_buffer_data (p2, LOAD);
200 vlib_prefetch_buffer_data (p3, LOAD);
Dave Barach97d8dc22016-08-15 15:31:15 -0400201 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000202
Dave Barach97d8dc22016-08-15 15:31:15 -0400203 bi0 = from[0];
204 bi1 = from[1];
205 to_next[0] = bi0;
206 to_next[1] = bi1;
207 from += 2;
208 to_next += 2;
209 n_left_from -= 2;
210 n_left_to_next -= 2;
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000211
Dave Barach97d8dc22016-08-15 15:31:15 -0400212 b0 = vlib_get_buffer (vm, bi0);
213 b1 = vlib_get_buffer (vm, bi1);
214 h0 = vlib_buffer_get_current (b0);
215 h1 = vlib_buffer_get_current (b1);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000216
Dave Barach97d8dc22016-08-15 15:31:15 -0400217 sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
218 sw_if_index1 = vnet_buffer (b1)->sw_if_index[VLIB_RX];
219 config0 = l2_rw_get_config (sw_if_index0); /*TODO: check sw_if_index0 value */
220 config1 = l2_rw_get_config (sw_if_index1); /*TODO: check sw_if_index0 value */
221 t0 = pool_elt_at_index (vcm->tables, config0->table_index);
222 t1 = pool_elt_at_index (vcm->tables, config1->table_index);
Pierre Pfister6b70c212016-05-13 07:47:06 +0100223
Dave Barach97d8dc22016-08-15 15:31:15 -0400224 hash0 = vnet_classify_hash_packet (t0, (u8 *) h0);
225 hash1 = vnet_classify_hash_packet (t1, (u8 *) h1);
226 e0 = vnet_classify_find_entry (t0, (u8 *) h0, hash0, now);
227 e1 = vnet_classify_find_entry (t1, (u8 *) h1, hash1, now);
Pierre Pfister6b70c212016-05-13 07:47:06 +0100228
Dave Barach97d8dc22016-08-15 15:31:15 -0400229 while (!e0 && (t0->next_table_index != ~0))
230 {
231 t0 = pool_elt_at_index (vcm->tables, t0->next_table_index);
232 hash0 = vnet_classify_hash_packet (t0, (u8 *) h0);
233 e0 = vnet_classify_find_entry (t0, (u8 *) h0, hash0, now);
234 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000235
Dave Barach97d8dc22016-08-15 15:31:15 -0400236 while (!e1 && (t1->next_table_index != ~0))
237 {
238 t1 = pool_elt_at_index (vcm->tables, t1->next_table_index);
239 hash1 = vnet_classify_hash_packet (t1, (u8 *) h1);
240 e1 = vnet_classify_find_entry (t1, (u8 *) h1, hash1, now);
241 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000242
Dave Barach97d8dc22016-08-15 15:31:15 -0400243 rwe_index0 = e0 ? e0->opaque_index : config0->miss_index;
244 rwe_index1 = e1 ? e1->opaque_index : config1->miss_index;
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000245
Dave Barach97d8dc22016-08-15 15:31:15 -0400246 if (rwe_index0 != ~0)
247 {
248 rwe0 = pool_elt_at_index (rw->entries, rwe_index0);
249 l2_rw_rewrite (rwe0, (u8 *) h0);
250 }
251 if (rwe_index1 != ~0)
252 {
253 rwe1 = pool_elt_at_index (rw->entries, rwe_index1);
254 l2_rw_rewrite (rwe1, (u8 *) h1);
255 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000256
Dave Barach97d8dc22016-08-15 15:31:15 -0400257 if (PREDICT_FALSE ((b0->flags & VLIB_BUFFER_IS_TRACED)))
258 {
259 l2_rw_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t));
260 t->sw_if_index = sw_if_index0;
261 t->classify_table_index = config0->table_index;
262 t->rewrite_entry_index = rwe_index0;
263 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000264
Dave Barach97d8dc22016-08-15 15:31:15 -0400265 if (PREDICT_FALSE ((b1->flags & VLIB_BUFFER_IS_TRACED)))
266 {
267 l2_rw_trace_t *t = vlib_add_trace (vm, node, b1, sizeof (*t));
268 t->sw_if_index = sw_if_index1;
269 t->classify_table_index = config1->table_index;
270 t->rewrite_entry_index = rwe_index1;
271 }
272
273 /* Update feature bitmap and get next feature index */
John Lobeb0b2e2017-07-22 00:21:36 -0400274 next0 = vnet_l2_feature_next (b0, rw->feat_next_node_index,
275 L2INPUT_FEAT_RW);
276 next1 = vnet_l2_feature_next (b1, rw->feat_next_node_index,
277 L2INPUT_FEAT_RW);
Dave Barach97d8dc22016-08-15 15:31:15 -0400278
279 vlib_validate_buffer_enqueue_x2 (vm, node, next_index,
280 to_next, n_left_to_next,
281 bi0, bi1, next0, next1);
282 }
283
284 while (n_left_from > 0 && n_left_to_next > 0)
285 {
John Lobeb0b2e2017-07-22 00:21:36 -0400286 u32 bi0, next0, sw_if_index0, rwe_index0;
Dave Barach97d8dc22016-08-15 15:31:15 -0400287 vlib_buffer_t *b0;
288 ethernet_header_t *h0;
289 l2_rw_config_t *config0;
290 u64 hash0;
291 vnet_classify_table_t *t0;
292 vnet_classify_entry_t *e0;
293 l2_rw_entry_t *rwe0;
294
295 bi0 = from[0];
296 to_next[0] = bi0;
297 from += 1;
298 to_next += 1;
299 n_left_from -= 1;
300 n_left_to_next -= 1;
301
302 b0 = vlib_get_buffer (vm, bi0);
303 h0 = vlib_buffer_get_current (b0);
304
305 sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
306 config0 = l2_rw_get_config (sw_if_index0); /*TODO: check sw_if_index0 value */
307 t0 = pool_elt_at_index (vcm->tables, config0->table_index);
308
309 hash0 = vnet_classify_hash_packet (t0, (u8 *) h0);
310 e0 = vnet_classify_find_entry (t0, (u8 *) h0, hash0, now);
311
312 while (!e0 && (t0->next_table_index != ~0))
313 {
314 t0 = pool_elt_at_index (vcm->tables, t0->next_table_index);
315 hash0 = vnet_classify_hash_packet (t0, (u8 *) h0);
316 e0 = vnet_classify_find_entry (t0, (u8 *) h0, hash0, now);
317 }
318
319 rwe_index0 = e0 ? e0->opaque_index : config0->miss_index;
320
321 if (rwe_index0 != ~0)
322 {
323 rwe0 = pool_elt_at_index (rw->entries, rwe_index0);
324 l2_rw_rewrite (rwe0, (u8 *) h0);
325 }
326
327 if (PREDICT_FALSE ((b0->flags & VLIB_BUFFER_IS_TRACED)))
328 {
329 l2_rw_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t));
330 t->sw_if_index = sw_if_index0;
331 t->classify_table_index = config0->table_index;
332 t->rewrite_entry_index = rwe_index0;
333 }
334
335 /* Update feature bitmap and get next feature index */
John Lobeb0b2e2017-07-22 00:21:36 -0400336 next0 = vnet_l2_feature_next (b0, rw->feat_next_node_index,
337 L2INPUT_FEAT_RW);
Dave Barach97d8dc22016-08-15 15:31:15 -0400338
339 vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
340 to_next, n_left_to_next,
341 bi0, next0);
342 }
343 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000344 }
345
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000346 return frame->n_vectors;
347}
348
Filip Tehlar44f0f712019-03-11 04:26:37 -0700349#ifndef CLIB_MARCH_VARIANT
Dave Barach97d8dc22016-08-15 15:31:15 -0400350int
351l2_rw_mod_entry (u32 * index,
352 u8 * mask, u8 * value, u32 len, u32 skip, u8 is_del)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000353{
354 l2_rw_main_t *rw = &l2_rw_main;
355 l2_rw_entry_t *e = 0;
Dave Barach97d8dc22016-08-15 15:31:15 -0400356 if (*index != ~0)
357 {
358 if (pool_is_free_index (rw->entries, *index))
359 {
360 return -1;
361 }
362 e = pool_elt_at_index (rw->entries, *index);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000363 }
Dave Barach97d8dc22016-08-15 15:31:15 -0400364 else
365 {
366 pool_get (rw->entries, e);
367 *index = e - rw->entries;
368 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000369
370 if (!e)
371 return -1;
372
Dave Barach97d8dc22016-08-15 15:31:15 -0400373 if (is_del)
374 {
375 pool_put (rw->entries, e);
376 return 0;
377 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000378
Dave Barach97d8dc22016-08-15 15:31:15 -0400379 e->skip_n_vectors = skip / sizeof (u32x4);
380 skip -= e->skip_n_vectors * sizeof (u32x4);
381 e->rewrite_n_vectors = (skip + len - 1) / sizeof (u32x4) + 1;
382 vec_alloc_aligned (e->mask, e->rewrite_n_vectors, sizeof (u32x4));
Dave Barachb7b92992018-10-17 10:38:51 -0400383 clib_memset (e->mask, 0, e->rewrite_n_vectors * sizeof (u32x4));
Dave Barach97d8dc22016-08-15 15:31:15 -0400384 vec_alloc_aligned (e->value, e->rewrite_n_vectors, sizeof (u32x4));
Dave Barachb7b92992018-10-17 10:38:51 -0400385 clib_memset (e->value, 0, e->rewrite_n_vectors * sizeof (u32x4));
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000386
Dave Barach97d8dc22016-08-15 15:31:15 -0400387 clib_memcpy (((u8 *) e->value) + skip, value, len);
388 clib_memcpy (((u8 *) e->mask) + skip, mask, len);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000389
390 int i;
Dave Barach97d8dc22016-08-15 15:31:15 -0400391 for (i = 0; i < e->rewrite_n_vectors; i++)
392 {
393 e->value[i] &= e->mask[i];
394 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000395
396 return 0;
397}
Filip Tehlar44f0f712019-03-11 04:26:37 -0700398#endif /* CLIB_MARCH_VARIANT */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000399
400static clib_error_t *
401l2_rw_entry_cli_fn (vlib_main_t * vm,
Dave Barach97d8dc22016-08-15 15:31:15 -0400402 unformat_input_t * input, vlib_cli_command_t * cmd)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000403{
404 u32 index = ~0;
405 u8 *mask = 0;
406 u8 *value = 0;
407 u32 skip = 0;
408 u8 del = 0;
409
Dave Barach97d8dc22016-08-15 15:31:15 -0400410 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
411 {
412 if (unformat (input, "index %d", &index))
413 ;
414 else if (unformat (input, "mask %U", unformat_hex_string, &mask))
415 ;
416 else if (unformat (input, "value %U", unformat_hex_string, &value))
417 ;
418 else if (unformat (input, "skip %d", &skip))
419 ;
420 else if (unformat (input, "del"))
421 del = 1;
422 else
423 break;
424 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000425
426 if (!mask || !value)
Dave Barach97d8dc22016-08-15 15:31:15 -0400427 return clib_error_return (0, "Unspecified mask or value");
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000428
Dave Barach97d8dc22016-08-15 15:31:15 -0400429 if (vec_len (mask) != vec_len (value))
430 return clib_error_return (0, "Mask and value lengths must be identical");
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000431
432 int ret;
Dave Barach97d8dc22016-08-15 15:31:15 -0400433 if ((ret =
434 l2_rw_mod_entry (&index, mask, value, vec_len (mask), skip, del)))
435 return clib_error_return (0, "Could not add entry");
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000436
437 return 0;
438}
439
Billy McFall22aa3e92016-09-09 08:46:40 -0400440/*?
441 * Layer 2-Rewrite node uses classify tables to match packets. Then, using
Paul Vinciguerrabdc0e6b2018-09-22 05:32:50 -0700442 * the provisioned mask and value, modifies the packet header.
Billy McFall22aa3e92016-09-09 08:46:40 -0400443 *
444 * @cliexpar
445 * @todo This is incomplete. This needs a detailed description and a
446 * practical example.
447?*/
Dave Barach97d8dc22016-08-15 15:31:15 -0400448/* *INDENT-OFF* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000449VLIB_CLI_COMMAND (l2_rw_entry_cli, static) = {
450 .path = "l2 rewrite entry",
451 .short_help =
452 "l2 rewrite entry [index <index>] [mask <hex-mask>] [value <hex-value>] [skip <n_bytes>] [del]",
453 .function = l2_rw_entry_cli_fn,
454};
Dave Barach97d8dc22016-08-15 15:31:15 -0400455/* *INDENT-ON* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000456
Filip Tehlar44f0f712019-03-11 04:26:37 -0700457#ifndef CLIB_MARCH_VARIANT
Dave Barach97d8dc22016-08-15 15:31:15 -0400458int
459l2_rw_interface_set_table (u32 sw_if_index, u32 table_index, u32 miss_index)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000460{
Dave Barach97d8dc22016-08-15 15:31:15 -0400461 l2_rw_config_t *c = l2_rw_get_config (sw_if_index);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000462 l2_rw_main_t *rw = &l2_rw_main;
463
464 c->table_index = table_index;
465 c->miss_index = miss_index;
Dave Barach97d8dc22016-08-15 15:31:15 -0400466 u32 feature_bitmap = (table_index == ~0) ? 0 : L2INPUT_FEAT_RW;
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000467
Dave Barach97d8dc22016-08-15 15:31:15 -0400468 l2input_intf_bitmap_enable (sw_if_index, L2INPUT_FEAT_RW, feature_bitmap);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000469
470 if (c->table_index == ~0)
Dave Barach97d8dc22016-08-15 15:31:15 -0400471 clib_bitmap_set (rw->configs_bitmap, sw_if_index, 0);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000472
473 return 0;
474}
Filip Tehlar44f0f712019-03-11 04:26:37 -0700475#endif /* CLIB_MARCH_VARIANT */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000476
477static clib_error_t *
478l2_rw_interface_cli_fn (vlib_main_t * vm,
Dave Barach97d8dc22016-08-15 15:31:15 -0400479 unformat_input_t * input, vlib_cli_command_t * cmd)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000480{
Dave Barach97d8dc22016-08-15 15:31:15 -0400481 vnet_main_t *vnm = vnet_get_main ();
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000482 u32 table_index = ~0;
483 u32 sw_if_index = ~0;
484 u32 miss_index = ~0;
485
Dave Barach97d8dc22016-08-15 15:31:15 -0400486 if (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
487 {
488 unformat (input, "%U", unformat_vnet_sw_interface, vnm, &sw_if_index);
489 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000490
Dave Barach97d8dc22016-08-15 15:31:15 -0400491 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
492 {
493 if (unformat (input, "table %d", &table_index))
494 ;
495 else if (unformat (input, "miss-index %d", &miss_index))
496 ;
497 else
498 break;
499 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000500
501 if (sw_if_index == ~0)
Dave Barach97d8dc22016-08-15 15:31:15 -0400502 return clib_error_return (0,
503 "You must specify an interface 'iface <interface>'",
504 format_unformat_error, input);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000505 int ret;
Dave Barach97d8dc22016-08-15 15:31:15 -0400506 if ((ret =
507 l2_rw_interface_set_table (sw_if_index, table_index, miss_index)))
508 return clib_error_return (0, "l2_rw_interface_set_table returned %d",
509 ret);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000510
511 return 0;
512}
513
Billy McFall22aa3e92016-09-09 08:46:40 -0400514/*?
515 * Layer 2-Rewrite node uses classify tables to match packets. Then, using
Paul Vinciguerrabdc0e6b2018-09-22 05:32:50 -0700516 * the provisioned mask and value, modifies the packet header.
Billy McFall22aa3e92016-09-09 08:46:40 -0400517 *
518 * @cliexpar
519 * @todo This is incomplete. This needs a detailed description and a
520 * practical example.
521?*/
Dave Barach97d8dc22016-08-15 15:31:15 -0400522/* *INDENT-OFF* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000523VLIB_CLI_COMMAND (l2_rw_interface_cli, static) = {
524 .path = "set interface l2 rewrite",
525 .short_help =
526 "set interface l2 rewrite <interface> [table <table index>] [miss-index <entry-index>]",
527 .function = l2_rw_interface_cli_fn,
528};
Dave Barach97d8dc22016-08-15 15:31:15 -0400529/* *INDENT-ON* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000530
531static clib_error_t *
532l2_rw_show_interfaces_cli_fn (vlib_main_t * vm,
Dave Barach97d8dc22016-08-15 15:31:15 -0400533 unformat_input_t * input,
534 vlib_cli_command_t * cmd)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000535{
536 l2_rw_main_t *rw = &l2_rw_main;
Dave Barach97d8dc22016-08-15 15:31:15 -0400537 if (clib_bitmap_count_set_bits (rw->configs_bitmap) == 0)
538 vlib_cli_output (vm, "No interface is currently using l2 rewrite\n");
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000539
540 uword i;
Dave Barach97d8dc22016-08-15 15:31:15 -0400541 /* *INDENT-OFF* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000542 clib_bitmap_foreach(i, rw->configs_bitmap, {
543 vlib_cli_output (vm, "sw_if_index:%d %U\n", i, format_l2_rw_config, &rw->configs[i]);
544 });
Dave Barach97d8dc22016-08-15 15:31:15 -0400545 /* *INDENT-ON* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000546 return 0;
547}
548
Billy McFall22aa3e92016-09-09 08:46:40 -0400549/*?
550 * Layer 2-Rewrite node uses classify tables to match packets. Then, using
Paul Vinciguerrabdc0e6b2018-09-22 05:32:50 -0700551 * the provisioned mask and value, modifies the packet header.
Billy McFall22aa3e92016-09-09 08:46:40 -0400552 *
553 * @cliexpar
554 * @todo This is incomplete. This needs a detailed description and a
555 * practical example.
556?*/
Dave Barach97d8dc22016-08-15 15:31:15 -0400557/* *INDENT-OFF* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000558VLIB_CLI_COMMAND (l2_rw_show_interfaces_cli, static) = {
559 .path = "show l2 rewrite interfaces",
560 .short_help =
561 "show l2 rewrite interfaces",
562 .function = l2_rw_show_interfaces_cli_fn,
563};
Dave Barach97d8dc22016-08-15 15:31:15 -0400564/* *INDENT-ON* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000565
566static clib_error_t *
567l2_rw_show_entries_cli_fn (vlib_main_t * vm,
Dave Barach97d8dc22016-08-15 15:31:15 -0400568 unformat_input_t * input, vlib_cli_command_t * cmd)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000569{
570 l2_rw_main_t *rw = &l2_rw_main;
571 l2_rw_entry_t *e;
Dave Barach97d8dc22016-08-15 15:31:15 -0400572 if (pool_elts (rw->entries) == 0)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000573 vlib_cli_output (vm, "No entries\n");
574
Dave Barach97d8dc22016-08-15 15:31:15 -0400575 /* *INDENT-OFF* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000576 pool_foreach(e, rw->entries, {
577 vlib_cli_output (vm, "%U\n", format_l2_rw_entry, e);
578 });
Dave Barach97d8dc22016-08-15 15:31:15 -0400579 /* *INDENT-ON* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000580 return 0;
581}
582
Billy McFall22aa3e92016-09-09 08:46:40 -0400583/*?
584 * Layer 2-Rewrite node uses classify tables to match packets. Then, using
Paul Vinciguerrabdc0e6b2018-09-22 05:32:50 -0700585 * the provisioned mask and value, modifies the packet header.
Billy McFall22aa3e92016-09-09 08:46:40 -0400586 *
587 * @cliexpar
588 * @todo This is incomplete. This needs a detailed description and a
589 * practical example.
590?*/
Dave Barach97d8dc22016-08-15 15:31:15 -0400591/* *INDENT-OFF* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000592VLIB_CLI_COMMAND (l2_rw_show_entries_cli, static) = {
593 .path = "show l2 rewrite entries",
594 .short_help =
595 "show l2 rewrite entries",
596 .function = l2_rw_show_entries_cli_fn,
597};
Dave Barach97d8dc22016-08-15 15:31:15 -0400598/* *INDENT-ON* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000599
Filip Tehlar44f0f712019-03-11 04:26:37 -0700600static int
Dave Barach97d8dc22016-08-15 15:31:15 -0400601l2_rw_enable_disable (u32 bridge_domain, u8 disable)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000602{
603 u32 mask = L2INPUT_FEAT_RW;
Dave Barach97d8dc22016-08-15 15:31:15 -0400604 l2input_set_bridge_features (bridge_domain, mask, disable ? 0 : mask);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000605 return 0;
606}
607
608static clib_error_t *
609l2_rw_set_cli_fn (vlib_main_t * vm,
Dave Barach97d8dc22016-08-15 15:31:15 -0400610 unformat_input_t * input, vlib_cli_command_t * cmd)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000611{
612 u32 bridge_domain;
613 u8 disable = 0;
614
Dave Barach97d8dc22016-08-15 15:31:15 -0400615 if (unformat_check_input (input) == UNFORMAT_END_OF_INPUT ||
616 !unformat (input, "%d", &bridge_domain))
617 {
618 return clib_error_return (0, "You must specify a bridge domain");
619 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000620
Dave Barach97d8dc22016-08-15 15:31:15 -0400621 if (unformat_check_input (input) != UNFORMAT_END_OF_INPUT &&
622 unformat (input, "disable"))
623 {
624 disable = 1;
625 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000626
Dave Barach97d8dc22016-08-15 15:31:15 -0400627 if (l2_rw_enable_disable (bridge_domain, disable))
628 return clib_error_return (0, "Could not enable or disable rewrite");
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000629
630 return 0;
631}
632
Billy McFall22aa3e92016-09-09 08:46:40 -0400633/*?
634 * Layer 2-Rewrite node uses classify tables to match packets. Then, using
635 * the provisioned mask and value, modfies the packet header.
636 *
637 * @cliexpar
638 * @todo This is incomplete. This needs a detailed description and a
639 * practical example.
640?*/
Dave Barach97d8dc22016-08-15 15:31:15 -0400641/* *INDENT-OFF* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000642VLIB_CLI_COMMAND (l2_rw_set_cli, static) = {
643 .path = "set bridge-domain rewrite",
644 .short_help =
645 "set bridge-domain rewrite <bridge-domain> [disable]",
646 .function = l2_rw_set_cli_fn,
647};
Dave Barach97d8dc22016-08-15 15:31:15 -0400648/* *INDENT-ON* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000649
Dave Barach97d8dc22016-08-15 15:31:15 -0400650static clib_error_t *
651l2_rw_init (vlib_main_t * vm)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000652{
653 l2_rw_main_t *rw = &l2_rw_main;
654 rw->configs = 0;
655 rw->entries = 0;
Dave Barach97d8dc22016-08-15 15:31:15 -0400656 clib_bitmap_alloc (rw->configs_bitmap, 1);
657 feat_bitmap_init_next_nodes (vm,
658 l2_rw_node.index,
659 L2INPUT_N_FEAT,
660 l2input_get_feat_names (),
661 rw->feat_next_node_index);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000662 return 0;
663}
Dave Barach97d8dc22016-08-15 15:31:15 -0400664
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000665VLIB_INIT_FUNCTION (l2_rw_init);
666
Dave Barach97d8dc22016-08-15 15:31:15 -0400667enum
668{
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000669 L2_RW_NEXT_DROP,
670 L2_RW_N_NEXT,
671};
672
673#define foreach_l2_rw_error \
674_(UNKNOWN, "Unknown error")
675
Dave Barach97d8dc22016-08-15 15:31:15 -0400676typedef enum
677{
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000678#define _(sym,str) L2_RW_ERROR_##sym,
679 foreach_l2_rw_error
680#undef _
Dave Barach97d8dc22016-08-15 15:31:15 -0400681 L2_RW_N_ERROR,
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000682} l2_rw_error_t;
683
Dave Barach97d8dc22016-08-15 15:31:15 -0400684static char *l2_rw_error_strings[] = {
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000685#define _(sym,string) string,
Dave Barach97d8dc22016-08-15 15:31:15 -0400686 foreach_l2_rw_error
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000687#undef _
688};
689
Dave Barach97d8dc22016-08-15 15:31:15 -0400690/* *INDENT-OFF* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000691VLIB_REGISTER_NODE (l2_rw_node) = {
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000692 .name = "l2-rw",
693 .vector_size = sizeof (u32),
694 .format_trace = format_l2_rw_trace,
695 .type = VLIB_NODE_TYPE_INTERNAL,
696 .n_errors = ARRAY_LEN(l2_rw_error_strings),
697 .error_strings = l2_rw_error_strings,
698 .runtime_data_bytes = 0,
699 .n_next_nodes = L2_RW_N_NEXT,
700 .next_nodes = { [L2_RW_NEXT_DROP] = "error-drop"},
701};
Dave Barach97d8dc22016-08-15 15:31:15 -0400702/* *INDENT-ON* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000703
Dave Barach97d8dc22016-08-15 15:31:15 -0400704/*
705 * fd.io coding-style-patch-verification: ON
706 *
707 * Local Variables:
708 * eval: (c-set-style "gnu")
709 * End:
710 */