blob: 5b0034ca43e6519ad6f54a516423a763e7156c0c [file] [log] [blame]
Pierre Pfisterd6f5b962016-03-21 16:17:52 +00001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include <vlib/vlib.h>
17#include <vnet/l2/feat_bitmap.h>
18#include <vnet/l2/l2_rw.h>
19
Billy McFall22aa3e92016-09-09 08:46:40 -040020/**
21 * @file
22 * @brief Layer 2 Rewrite.
23 *
24 * Layer 2-Rewrite node uses classify tables to match packets. Then, using
Paul Vinciguerrabdc0e6b2018-09-22 05:32:50 -070025 * the provisioned mask and value, modifies the packet header.
Billy McFall22aa3e92016-09-09 08:46:40 -040026 */
27
28
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000029l2_rw_main_t l2_rw_main;
30
31vlib_node_registration_t l2_rw_node;
32
Dave Barach97d8dc22016-08-15 15:31:15 -040033typedef struct
34{
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000035 u32 sw_if_index;
36 u32 classify_table_index;
37 u32 rewrite_entry_index;
38} l2_rw_trace_t;
39
Dave Barach97d8dc22016-08-15 15:31:15 -040040static u8 *
41format_l2_rw_entry (u8 * s, va_list * args)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000042{
43 l2_rw_entry_t *e = va_arg (*args, l2_rw_entry_t *);
44 l2_rw_main_t *rw = &l2_rw_main;
45 s = format (s, "%d - mask:%U value:%U\n",
Dave Barach97d8dc22016-08-15 15:31:15 -040046 e - rw->entries,
47 format_hex_bytes, e->mask,
48 e->rewrite_n_vectors * sizeof (u32x4), format_hex_bytes,
49 e->value, e->rewrite_n_vectors * sizeof (u32x4));
50 s =
51 format (s, " hits:%d skip_bytes:%d", e->hit_count,
52 e->skip_n_vectors * sizeof (u32x4));
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000053 return s;
54}
55
Dave Barach97d8dc22016-08-15 15:31:15 -040056static u8 *
57format_l2_rw_config (u8 * s, va_list * args)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000058{
59 l2_rw_config_t *c = va_arg (*args, l2_rw_config_t *);
Dave Barach97d8dc22016-08-15 15:31:15 -040060 return format (s, "table-index:%d miss-index:%d",
61 c->table_index, c->miss_index);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000062}
63
64/* packet trace format function */
Dave Barach97d8dc22016-08-15 15:31:15 -040065static u8 *
66format_l2_rw_trace (u8 * s, va_list * args)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000067{
68 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
69 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
Dave Barach97d8dc22016-08-15 15:31:15 -040070 l2_rw_trace_t *t = va_arg (*args, l2_rw_trace_t *);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000071 return format (s, "l2-rw: sw_if_index %d, table %d, entry %d",
Dave Barach97d8dc22016-08-15 15:31:15 -040072 t->sw_if_index, t->classify_table_index,
73 t->rewrite_entry_index);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000074}
75
Dave Barach97d8dc22016-08-15 15:31:15 -040076always_inline l2_rw_config_t *
77l2_rw_get_config (u32 sw_if_index)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000078{
79 l2_rw_main_t *rw = &l2_rw_main;
Dave Barach97d8dc22016-08-15 15:31:15 -040080 if (PREDICT_FALSE (!clib_bitmap_get (rw->configs_bitmap, sw_if_index)))
81 {
82 vec_validate (rw->configs, sw_if_index);
83 rw->configs[sw_if_index].table_index = ~0;
84 rw->configs[sw_if_index].miss_index = ~0;
85 rw->configs_bitmap =
86 clib_bitmap_set (rw->configs_bitmap, sw_if_index, 1);
87 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000088 return &rw->configs[sw_if_index];
89}
90
Dave Barach97d8dc22016-08-15 15:31:15 -040091static_always_inline void
92l2_rw_rewrite (l2_rw_entry_t * rwe, u8 * h)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +000093{
Dave Barach97d8dc22016-08-15 15:31:15 -040094 if (U32X4_ALIGNED (h))
95 {
96 u32x4 *d = ((u32x4 *) h) + rwe->skip_n_vectors;
97 switch (rwe->rewrite_n_vectors)
98 {
99 case 5:
100 d[4] = (d[4] & ~rwe->mask[4]) | rwe->value[4];
101 /* FALLTHROUGH */
102 case 4:
103 d[3] = (d[3] & ~rwe->mask[3]) | rwe->value[3];
104 /* FALLTHROUGH */
105 case 3:
106 d[2] = (d[2] & ~rwe->mask[2]) | rwe->value[2];
107 /* FALLTHROUGH */
108 case 2:
109 d[1] = (d[1] & ~rwe->mask[1]) | rwe->value[1];
110 /* FALLTHROUGH */
111 case 1:
112 d[0] = (d[0] & ~rwe->mask[0]) | rwe->value[0];
113 break;
114 default:
115 abort ();
116 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000117 }
Dave Barach97d8dc22016-08-15 15:31:15 -0400118 else
119 {
120 u64 *d = ((u64 *) h) + rwe->skip_n_vectors * 2;
121 switch (rwe->rewrite_n_vectors)
122 {
123 case 5:
124 d[8] =
125 (d[8] & ~(((u64 *) rwe->mask)[8])) | (((u64 *) rwe->value)[8]);
126 d[9] =
127 (d[9] & ~(((u64 *) rwe->mask)[9])) | (((u64 *) rwe->value)[9]);
128 /* FALLTHROUGH */
129 case 4:
130 d[6] =
131 (d[6] & ~(((u64 *) rwe->mask)[6])) | (((u64 *) rwe->value)[6]);
132 d[7] =
133 (d[7] & ~(((u64 *) rwe->mask)[7])) | (((u64 *) rwe->value)[7]);
134 /* FALLTHROUGH */
135 case 3:
136 d[4] =
137 (d[4] & ~(((u64 *) rwe->mask)[4])) | (((u64 *) rwe->value)[4]);
138 d[5] =
139 (d[5] & ~(((u64 *) rwe->mask)[5])) | (((u64 *) rwe->value)[5]);
140 /* FALLTHROUGH */
141 case 2:
142 d[2] =
143 (d[2] & ~(((u64 *) rwe->mask)[2])) | (((u64 *) rwe->value)[2]);
144 d[3] =
145 (d[3] & ~(((u64 *) rwe->mask)[3])) | (((u64 *) rwe->value)[3]);
146 /* FALLTHROUGH */
147 case 1:
148 d[0] =
149 (d[0] & ~(((u64 *) rwe->mask)[0])) | (((u64 *) rwe->value)[0]);
150 d[1] =
151 (d[1] & ~(((u64 *) rwe->mask)[1])) | (((u64 *) rwe->value)[1]);
152 break;
153 default:
154 abort ();
155 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000156 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000157}
158
159static uword
Dave Barach97d8dc22016-08-15 15:31:15 -0400160l2_rw_node_fn (vlib_main_t * vm,
161 vlib_node_runtime_t * node, vlib_frame_t * frame)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000162{
163 l2_rw_main_t *rw = &l2_rw_main;
Dave Barach97d8dc22016-08-15 15:31:15 -0400164 u32 n_left_from, *from, *to_next, next_index;
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000165 vnet_classify_main_t *vcm = &vnet_classify_main;
Dave Barach97d8dc22016-08-15 15:31:15 -0400166 f64 now = vlib_time_now (vlib_get_main ());
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000167 u32 prefetch_size = 0;
168
169 from = vlib_frame_vector_args (frame);
Dave Barach97d8dc22016-08-15 15:31:15 -0400170 n_left_from = frame->n_vectors; /* number of packets to process */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000171 next_index = node->cached_next_index;
172
173 while (n_left_from > 0)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000174 {
Dave Barach97d8dc22016-08-15 15:31:15 -0400175 u32 n_left_to_next;
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000176
Dave Barach97d8dc22016-08-15 15:31:15 -0400177 /* get space to enqueue frame to graph node "next_index" */
178 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000179
Dave Barach97d8dc22016-08-15 15:31:15 -0400180 while (n_left_from >= 4 && n_left_to_next >= 2)
181 {
John Lobeb0b2e2017-07-22 00:21:36 -0400182 u32 bi0, next0, sw_if_index0, rwe_index0;
183 u32 bi1, next1, sw_if_index1, rwe_index1;
Dave Barach97d8dc22016-08-15 15:31:15 -0400184 vlib_buffer_t *b0, *b1;
185 ethernet_header_t *h0, *h1;
186 l2_rw_config_t *config0, *config1;
187 u64 hash0, hash1;
188 vnet_classify_table_t *t0, *t1;
189 vnet_classify_entry_t *e0, *e1;
190 l2_rw_entry_t *rwe0, *rwe1;
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000191
Dave Barach97d8dc22016-08-15 15:31:15 -0400192 {
193 vlib_buffer_t *p2, *p3;
194 p2 = vlib_get_buffer (vm, from[2]);
195 p3 = vlib_get_buffer (vm, from[3]);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000196
Dave Barach97d8dc22016-08-15 15:31:15 -0400197 vlib_prefetch_buffer_header (p2, LOAD);
198 vlib_prefetch_buffer_header (p3, LOAD);
199 CLIB_PREFETCH (vlib_buffer_get_current (p2), prefetch_size, LOAD);
200 CLIB_PREFETCH (vlib_buffer_get_current (p3), prefetch_size, LOAD);
201 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000202
Dave Barach97d8dc22016-08-15 15:31:15 -0400203 bi0 = from[0];
204 bi1 = from[1];
205 to_next[0] = bi0;
206 to_next[1] = bi1;
207 from += 2;
208 to_next += 2;
209 n_left_from -= 2;
210 n_left_to_next -= 2;
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000211
Dave Barach97d8dc22016-08-15 15:31:15 -0400212 b0 = vlib_get_buffer (vm, bi0);
213 b1 = vlib_get_buffer (vm, bi1);
214 h0 = vlib_buffer_get_current (b0);
215 h1 = vlib_buffer_get_current (b1);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000216
Dave Barach97d8dc22016-08-15 15:31:15 -0400217 sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
218 sw_if_index1 = vnet_buffer (b1)->sw_if_index[VLIB_RX];
219 config0 = l2_rw_get_config (sw_if_index0); /*TODO: check sw_if_index0 value */
220 config1 = l2_rw_get_config (sw_if_index1); /*TODO: check sw_if_index0 value */
221 t0 = pool_elt_at_index (vcm->tables, config0->table_index);
222 t1 = pool_elt_at_index (vcm->tables, config1->table_index);
223 prefetch_size =
224 (t1->skip_n_vectors + t1->match_n_vectors) * sizeof (u32x4);
Pierre Pfister6b70c212016-05-13 07:47:06 +0100225
Dave Barach97d8dc22016-08-15 15:31:15 -0400226 hash0 = vnet_classify_hash_packet (t0, (u8 *) h0);
227 hash1 = vnet_classify_hash_packet (t1, (u8 *) h1);
228 e0 = vnet_classify_find_entry (t0, (u8 *) h0, hash0, now);
229 e1 = vnet_classify_find_entry (t1, (u8 *) h1, hash1, now);
Pierre Pfister6b70c212016-05-13 07:47:06 +0100230
Dave Barach97d8dc22016-08-15 15:31:15 -0400231 while (!e0 && (t0->next_table_index != ~0))
232 {
233 t0 = pool_elt_at_index (vcm->tables, t0->next_table_index);
234 hash0 = vnet_classify_hash_packet (t0, (u8 *) h0);
235 e0 = vnet_classify_find_entry (t0, (u8 *) h0, hash0, now);
236 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000237
Dave Barach97d8dc22016-08-15 15:31:15 -0400238 while (!e1 && (t1->next_table_index != ~0))
239 {
240 t1 = pool_elt_at_index (vcm->tables, t1->next_table_index);
241 hash1 = vnet_classify_hash_packet (t1, (u8 *) h1);
242 e1 = vnet_classify_find_entry (t1, (u8 *) h1, hash1, now);
243 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000244
Dave Barach97d8dc22016-08-15 15:31:15 -0400245 rwe_index0 = e0 ? e0->opaque_index : config0->miss_index;
246 rwe_index1 = e1 ? e1->opaque_index : config1->miss_index;
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000247
Dave Barach97d8dc22016-08-15 15:31:15 -0400248 if (rwe_index0 != ~0)
249 {
250 rwe0 = pool_elt_at_index (rw->entries, rwe_index0);
251 l2_rw_rewrite (rwe0, (u8 *) h0);
252 }
253 if (rwe_index1 != ~0)
254 {
255 rwe1 = pool_elt_at_index (rw->entries, rwe_index1);
256 l2_rw_rewrite (rwe1, (u8 *) h1);
257 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000258
Dave Barach97d8dc22016-08-15 15:31:15 -0400259 if (PREDICT_FALSE ((b0->flags & VLIB_BUFFER_IS_TRACED)))
260 {
261 l2_rw_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t));
262 t->sw_if_index = sw_if_index0;
263 t->classify_table_index = config0->table_index;
264 t->rewrite_entry_index = rwe_index0;
265 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000266
Dave Barach97d8dc22016-08-15 15:31:15 -0400267 if (PREDICT_FALSE ((b1->flags & VLIB_BUFFER_IS_TRACED)))
268 {
269 l2_rw_trace_t *t = vlib_add_trace (vm, node, b1, sizeof (*t));
270 t->sw_if_index = sw_if_index1;
271 t->classify_table_index = config1->table_index;
272 t->rewrite_entry_index = rwe_index1;
273 }
274
275 /* Update feature bitmap and get next feature index */
John Lobeb0b2e2017-07-22 00:21:36 -0400276 next0 = vnet_l2_feature_next (b0, rw->feat_next_node_index,
277 L2INPUT_FEAT_RW);
278 next1 = vnet_l2_feature_next (b1, rw->feat_next_node_index,
279 L2INPUT_FEAT_RW);
Dave Barach97d8dc22016-08-15 15:31:15 -0400280
281 vlib_validate_buffer_enqueue_x2 (vm, node, next_index,
282 to_next, n_left_to_next,
283 bi0, bi1, next0, next1);
284 }
285
286 while (n_left_from > 0 && n_left_to_next > 0)
287 {
John Lobeb0b2e2017-07-22 00:21:36 -0400288 u32 bi0, next0, sw_if_index0, rwe_index0;
Dave Barach97d8dc22016-08-15 15:31:15 -0400289 vlib_buffer_t *b0;
290 ethernet_header_t *h0;
291 l2_rw_config_t *config0;
292 u64 hash0;
293 vnet_classify_table_t *t0;
294 vnet_classify_entry_t *e0;
295 l2_rw_entry_t *rwe0;
296
297 bi0 = from[0];
298 to_next[0] = bi0;
299 from += 1;
300 to_next += 1;
301 n_left_from -= 1;
302 n_left_to_next -= 1;
303
304 b0 = vlib_get_buffer (vm, bi0);
305 h0 = vlib_buffer_get_current (b0);
306
307 sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_RX];
308 config0 = l2_rw_get_config (sw_if_index0); /*TODO: check sw_if_index0 value */
309 t0 = pool_elt_at_index (vcm->tables, config0->table_index);
310
311 hash0 = vnet_classify_hash_packet (t0, (u8 *) h0);
312 e0 = vnet_classify_find_entry (t0, (u8 *) h0, hash0, now);
313
314 while (!e0 && (t0->next_table_index != ~0))
315 {
316 t0 = pool_elt_at_index (vcm->tables, t0->next_table_index);
317 hash0 = vnet_classify_hash_packet (t0, (u8 *) h0);
318 e0 = vnet_classify_find_entry (t0, (u8 *) h0, hash0, now);
319 }
320
321 rwe_index0 = e0 ? e0->opaque_index : config0->miss_index;
322
323 if (rwe_index0 != ~0)
324 {
325 rwe0 = pool_elt_at_index (rw->entries, rwe_index0);
326 l2_rw_rewrite (rwe0, (u8 *) h0);
327 }
328
329 if (PREDICT_FALSE ((b0->flags & VLIB_BUFFER_IS_TRACED)))
330 {
331 l2_rw_trace_t *t = vlib_add_trace (vm, node, b0, sizeof (*t));
332 t->sw_if_index = sw_if_index0;
333 t->classify_table_index = config0->table_index;
334 t->rewrite_entry_index = rwe_index0;
335 }
336
337 /* Update feature bitmap and get next feature index */
John Lobeb0b2e2017-07-22 00:21:36 -0400338 next0 = vnet_l2_feature_next (b0, rw->feat_next_node_index,
339 L2INPUT_FEAT_RW);
Dave Barach97d8dc22016-08-15 15:31:15 -0400340
341 vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
342 to_next, n_left_to_next,
343 bi0, next0);
344 }
345 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000346 }
347
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000348 return frame->n_vectors;
349}
350
Dave Barach97d8dc22016-08-15 15:31:15 -0400351int
352l2_rw_mod_entry (u32 * index,
353 u8 * mask, u8 * value, u32 len, u32 skip, u8 is_del)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000354{
355 l2_rw_main_t *rw = &l2_rw_main;
356 l2_rw_entry_t *e = 0;
Dave Barach97d8dc22016-08-15 15:31:15 -0400357 if (*index != ~0)
358 {
359 if (pool_is_free_index (rw->entries, *index))
360 {
361 return -1;
362 }
363 e = pool_elt_at_index (rw->entries, *index);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000364 }
Dave Barach97d8dc22016-08-15 15:31:15 -0400365 else
366 {
367 pool_get (rw->entries, e);
368 *index = e - rw->entries;
369 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000370
371 if (!e)
372 return -1;
373
Dave Barach97d8dc22016-08-15 15:31:15 -0400374 if (is_del)
375 {
376 pool_put (rw->entries, e);
377 return 0;
378 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000379
Dave Barach97d8dc22016-08-15 15:31:15 -0400380 e->skip_n_vectors = skip / sizeof (u32x4);
381 skip -= e->skip_n_vectors * sizeof (u32x4);
382 e->rewrite_n_vectors = (skip + len - 1) / sizeof (u32x4) + 1;
383 vec_alloc_aligned (e->mask, e->rewrite_n_vectors, sizeof (u32x4));
Dave Barachb7b92992018-10-17 10:38:51 -0400384 clib_memset (e->mask, 0, e->rewrite_n_vectors * sizeof (u32x4));
Dave Barach97d8dc22016-08-15 15:31:15 -0400385 vec_alloc_aligned (e->value, e->rewrite_n_vectors, sizeof (u32x4));
Dave Barachb7b92992018-10-17 10:38:51 -0400386 clib_memset (e->value, 0, e->rewrite_n_vectors * sizeof (u32x4));
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000387
Dave Barach97d8dc22016-08-15 15:31:15 -0400388 clib_memcpy (((u8 *) e->value) + skip, value, len);
389 clib_memcpy (((u8 *) e->mask) + skip, mask, len);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000390
391 int i;
Dave Barach97d8dc22016-08-15 15:31:15 -0400392 for (i = 0; i < e->rewrite_n_vectors; i++)
393 {
394 e->value[i] &= e->mask[i];
395 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000396
397 return 0;
398}
399
400static clib_error_t *
401l2_rw_entry_cli_fn (vlib_main_t * vm,
Dave Barach97d8dc22016-08-15 15:31:15 -0400402 unformat_input_t * input, vlib_cli_command_t * cmd)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000403{
404 u32 index = ~0;
405 u8 *mask = 0;
406 u8 *value = 0;
407 u32 skip = 0;
408 u8 del = 0;
409
Dave Barach97d8dc22016-08-15 15:31:15 -0400410 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
411 {
412 if (unformat (input, "index %d", &index))
413 ;
414 else if (unformat (input, "mask %U", unformat_hex_string, &mask))
415 ;
416 else if (unformat (input, "value %U", unformat_hex_string, &value))
417 ;
418 else if (unformat (input, "skip %d", &skip))
419 ;
420 else if (unformat (input, "del"))
421 del = 1;
422 else
423 break;
424 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000425
426 if (!mask || !value)
Dave Barach97d8dc22016-08-15 15:31:15 -0400427 return clib_error_return (0, "Unspecified mask or value");
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000428
Dave Barach97d8dc22016-08-15 15:31:15 -0400429 if (vec_len (mask) != vec_len (value))
430 return clib_error_return (0, "Mask and value lengths must be identical");
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000431
432 int ret;
Dave Barach97d8dc22016-08-15 15:31:15 -0400433 if ((ret =
434 l2_rw_mod_entry (&index, mask, value, vec_len (mask), skip, del)))
435 return clib_error_return (0, "Could not add entry");
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000436
437 return 0;
438}
439
Billy McFall22aa3e92016-09-09 08:46:40 -0400440/*?
441 * Layer 2-Rewrite node uses classify tables to match packets. Then, using
Paul Vinciguerrabdc0e6b2018-09-22 05:32:50 -0700442 * the provisioned mask and value, modifies the packet header.
Billy McFall22aa3e92016-09-09 08:46:40 -0400443 *
444 * @cliexpar
445 * @todo This is incomplete. This needs a detailed description and a
446 * practical example.
447?*/
Dave Barach97d8dc22016-08-15 15:31:15 -0400448/* *INDENT-OFF* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000449VLIB_CLI_COMMAND (l2_rw_entry_cli, static) = {
450 .path = "l2 rewrite entry",
451 .short_help =
452 "l2 rewrite entry [index <index>] [mask <hex-mask>] [value <hex-value>] [skip <n_bytes>] [del]",
453 .function = l2_rw_entry_cli_fn,
454};
Dave Barach97d8dc22016-08-15 15:31:15 -0400455/* *INDENT-ON* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000456
Dave Barach97d8dc22016-08-15 15:31:15 -0400457int
458l2_rw_interface_set_table (u32 sw_if_index, u32 table_index, u32 miss_index)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000459{
Dave Barach97d8dc22016-08-15 15:31:15 -0400460 l2_rw_config_t *c = l2_rw_get_config (sw_if_index);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000461 l2_rw_main_t *rw = &l2_rw_main;
462
463 c->table_index = table_index;
464 c->miss_index = miss_index;
Dave Barach97d8dc22016-08-15 15:31:15 -0400465 u32 feature_bitmap = (table_index == ~0) ? 0 : L2INPUT_FEAT_RW;
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000466
Dave Barach97d8dc22016-08-15 15:31:15 -0400467 l2input_intf_bitmap_enable (sw_if_index, L2INPUT_FEAT_RW, feature_bitmap);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000468
469 if (c->table_index == ~0)
Dave Barach97d8dc22016-08-15 15:31:15 -0400470 clib_bitmap_set (rw->configs_bitmap, sw_if_index, 0);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000471
472 return 0;
473}
474
475static clib_error_t *
476l2_rw_interface_cli_fn (vlib_main_t * vm,
Dave Barach97d8dc22016-08-15 15:31:15 -0400477 unformat_input_t * input, vlib_cli_command_t * cmd)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000478{
Dave Barach97d8dc22016-08-15 15:31:15 -0400479 vnet_main_t *vnm = vnet_get_main ();
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000480 u32 table_index = ~0;
481 u32 sw_if_index = ~0;
482 u32 miss_index = ~0;
483
Dave Barach97d8dc22016-08-15 15:31:15 -0400484 if (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
485 {
486 unformat (input, "%U", unformat_vnet_sw_interface, vnm, &sw_if_index);
487 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000488
Dave Barach97d8dc22016-08-15 15:31:15 -0400489 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
490 {
491 if (unformat (input, "table %d", &table_index))
492 ;
493 else if (unformat (input, "miss-index %d", &miss_index))
494 ;
495 else
496 break;
497 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000498
499 if (sw_if_index == ~0)
Dave Barach97d8dc22016-08-15 15:31:15 -0400500 return clib_error_return (0,
501 "You must specify an interface 'iface <interface>'",
502 format_unformat_error, input);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000503 int ret;
Dave Barach97d8dc22016-08-15 15:31:15 -0400504 if ((ret =
505 l2_rw_interface_set_table (sw_if_index, table_index, miss_index)))
506 return clib_error_return (0, "l2_rw_interface_set_table returned %d",
507 ret);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000508
509 return 0;
510}
511
Billy McFall22aa3e92016-09-09 08:46:40 -0400512/*?
513 * Layer 2-Rewrite node uses classify tables to match packets. Then, using
Paul Vinciguerrabdc0e6b2018-09-22 05:32:50 -0700514 * the provisioned mask and value, modifies the packet header.
Billy McFall22aa3e92016-09-09 08:46:40 -0400515 *
516 * @cliexpar
517 * @todo This is incomplete. This needs a detailed description and a
518 * practical example.
519?*/
Dave Barach97d8dc22016-08-15 15:31:15 -0400520/* *INDENT-OFF* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000521VLIB_CLI_COMMAND (l2_rw_interface_cli, static) = {
522 .path = "set interface l2 rewrite",
523 .short_help =
524 "set interface l2 rewrite <interface> [table <table index>] [miss-index <entry-index>]",
525 .function = l2_rw_interface_cli_fn,
526};
Dave Barach97d8dc22016-08-15 15:31:15 -0400527/* *INDENT-ON* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000528
529static clib_error_t *
530l2_rw_show_interfaces_cli_fn (vlib_main_t * vm,
Dave Barach97d8dc22016-08-15 15:31:15 -0400531 unformat_input_t * input,
532 vlib_cli_command_t * cmd)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000533{
534 l2_rw_main_t *rw = &l2_rw_main;
Dave Barach97d8dc22016-08-15 15:31:15 -0400535 if (clib_bitmap_count_set_bits (rw->configs_bitmap) == 0)
536 vlib_cli_output (vm, "No interface is currently using l2 rewrite\n");
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000537
538 uword i;
Dave Barach97d8dc22016-08-15 15:31:15 -0400539 /* *INDENT-OFF* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000540 clib_bitmap_foreach(i, rw->configs_bitmap, {
541 vlib_cli_output (vm, "sw_if_index:%d %U\n", i, format_l2_rw_config, &rw->configs[i]);
542 });
Dave Barach97d8dc22016-08-15 15:31:15 -0400543 /* *INDENT-ON* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000544 return 0;
545}
546
Billy McFall22aa3e92016-09-09 08:46:40 -0400547/*?
548 * Layer 2-Rewrite node uses classify tables to match packets. Then, using
Paul Vinciguerrabdc0e6b2018-09-22 05:32:50 -0700549 * the provisioned mask and value, modifies the packet header.
Billy McFall22aa3e92016-09-09 08:46:40 -0400550 *
551 * @cliexpar
552 * @todo This is incomplete. This needs a detailed description and a
553 * practical example.
554?*/
Dave Barach97d8dc22016-08-15 15:31:15 -0400555/* *INDENT-OFF* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000556VLIB_CLI_COMMAND (l2_rw_show_interfaces_cli, static) = {
557 .path = "show l2 rewrite interfaces",
558 .short_help =
559 "show l2 rewrite interfaces",
560 .function = l2_rw_show_interfaces_cli_fn,
561};
Dave Barach97d8dc22016-08-15 15:31:15 -0400562/* *INDENT-ON* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000563
564static clib_error_t *
565l2_rw_show_entries_cli_fn (vlib_main_t * vm,
Dave Barach97d8dc22016-08-15 15:31:15 -0400566 unformat_input_t * input, vlib_cli_command_t * cmd)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000567{
568 l2_rw_main_t *rw = &l2_rw_main;
569 l2_rw_entry_t *e;
Dave Barach97d8dc22016-08-15 15:31:15 -0400570 if (pool_elts (rw->entries) == 0)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000571 vlib_cli_output (vm, "No entries\n");
572
Dave Barach97d8dc22016-08-15 15:31:15 -0400573 /* *INDENT-OFF* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000574 pool_foreach(e, rw->entries, {
575 vlib_cli_output (vm, "%U\n", format_l2_rw_entry, e);
576 });
Dave Barach97d8dc22016-08-15 15:31:15 -0400577 /* *INDENT-ON* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000578 return 0;
579}
580
Billy McFall22aa3e92016-09-09 08:46:40 -0400581/*?
582 * Layer 2-Rewrite node uses classify tables to match packets. Then, using
Paul Vinciguerrabdc0e6b2018-09-22 05:32:50 -0700583 * the provisioned mask and value, modifies the packet header.
Billy McFall22aa3e92016-09-09 08:46:40 -0400584 *
585 * @cliexpar
586 * @todo This is incomplete. This needs a detailed description and a
587 * practical example.
588?*/
Dave Barach97d8dc22016-08-15 15:31:15 -0400589/* *INDENT-OFF* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000590VLIB_CLI_COMMAND (l2_rw_show_entries_cli, static) = {
591 .path = "show l2 rewrite entries",
592 .short_help =
593 "show l2 rewrite entries",
594 .function = l2_rw_show_entries_cli_fn,
595};
Dave Barach97d8dc22016-08-15 15:31:15 -0400596/* *INDENT-ON* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000597
598int
Dave Barach97d8dc22016-08-15 15:31:15 -0400599l2_rw_enable_disable (u32 bridge_domain, u8 disable)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000600{
601 u32 mask = L2INPUT_FEAT_RW;
Dave Barach97d8dc22016-08-15 15:31:15 -0400602 l2input_set_bridge_features (bridge_domain, mask, disable ? 0 : mask);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000603 return 0;
604}
605
606static clib_error_t *
607l2_rw_set_cli_fn (vlib_main_t * vm,
Dave Barach97d8dc22016-08-15 15:31:15 -0400608 unformat_input_t * input, vlib_cli_command_t * cmd)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000609{
610 u32 bridge_domain;
611 u8 disable = 0;
612
Dave Barach97d8dc22016-08-15 15:31:15 -0400613 if (unformat_check_input (input) == UNFORMAT_END_OF_INPUT ||
614 !unformat (input, "%d", &bridge_domain))
615 {
616 return clib_error_return (0, "You must specify a bridge domain");
617 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000618
Dave Barach97d8dc22016-08-15 15:31:15 -0400619 if (unformat_check_input (input) != UNFORMAT_END_OF_INPUT &&
620 unformat (input, "disable"))
621 {
622 disable = 1;
623 }
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000624
Dave Barach97d8dc22016-08-15 15:31:15 -0400625 if (l2_rw_enable_disable (bridge_domain, disable))
626 return clib_error_return (0, "Could not enable or disable rewrite");
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000627
628 return 0;
629}
630
Billy McFall22aa3e92016-09-09 08:46:40 -0400631/*?
632 * Layer 2-Rewrite node uses classify tables to match packets. Then, using
633 * the provisioned mask and value, modfies the packet header.
634 *
635 * @cliexpar
636 * @todo This is incomplete. This needs a detailed description and a
637 * practical example.
638?*/
Dave Barach97d8dc22016-08-15 15:31:15 -0400639/* *INDENT-OFF* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000640VLIB_CLI_COMMAND (l2_rw_set_cli, static) = {
641 .path = "set bridge-domain rewrite",
642 .short_help =
643 "set bridge-domain rewrite <bridge-domain> [disable]",
644 .function = l2_rw_set_cli_fn,
645};
Dave Barach97d8dc22016-08-15 15:31:15 -0400646/* *INDENT-ON* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000647
Dave Barach97d8dc22016-08-15 15:31:15 -0400648static clib_error_t *
649l2_rw_init (vlib_main_t * vm)
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000650{
651 l2_rw_main_t *rw = &l2_rw_main;
652 rw->configs = 0;
653 rw->entries = 0;
Dave Barach97d8dc22016-08-15 15:31:15 -0400654 clib_bitmap_alloc (rw->configs_bitmap, 1);
655 feat_bitmap_init_next_nodes (vm,
656 l2_rw_node.index,
657 L2INPUT_N_FEAT,
658 l2input_get_feat_names (),
659 rw->feat_next_node_index);
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000660 return 0;
661}
Dave Barach97d8dc22016-08-15 15:31:15 -0400662
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000663VLIB_INIT_FUNCTION (l2_rw_init);
664
Dave Barach97d8dc22016-08-15 15:31:15 -0400665enum
666{
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000667 L2_RW_NEXT_DROP,
668 L2_RW_N_NEXT,
669};
670
671#define foreach_l2_rw_error \
672_(UNKNOWN, "Unknown error")
673
Dave Barach97d8dc22016-08-15 15:31:15 -0400674typedef enum
675{
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000676#define _(sym,str) L2_RW_ERROR_##sym,
677 foreach_l2_rw_error
678#undef _
Dave Barach97d8dc22016-08-15 15:31:15 -0400679 L2_RW_N_ERROR,
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000680} l2_rw_error_t;
681
Dave Barach97d8dc22016-08-15 15:31:15 -0400682static char *l2_rw_error_strings[] = {
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000683#define _(sym,string) string,
Dave Barach97d8dc22016-08-15 15:31:15 -0400684 foreach_l2_rw_error
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000685#undef _
686};
687
Dave Barach97d8dc22016-08-15 15:31:15 -0400688/* *INDENT-OFF* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000689VLIB_REGISTER_NODE (l2_rw_node) = {
690 .function = l2_rw_node_fn,
691 .name = "l2-rw",
692 .vector_size = sizeof (u32),
693 .format_trace = format_l2_rw_trace,
694 .type = VLIB_NODE_TYPE_INTERNAL,
695 .n_errors = ARRAY_LEN(l2_rw_error_strings),
696 .error_strings = l2_rw_error_strings,
697 .runtime_data_bytes = 0,
698 .n_next_nodes = L2_RW_N_NEXT,
699 .next_nodes = { [L2_RW_NEXT_DROP] = "error-drop"},
700};
Dave Barach97d8dc22016-08-15 15:31:15 -0400701/* *INDENT-ON* */
Pierre Pfisterd6f5b962016-03-21 16:17:52 +0000702
Damjan Marion1c80e832016-05-11 23:07:18 +0200703VLIB_NODE_FUNCTION_MULTIARCH (l2_rw_node, l2_rw_node_fn)
Dave Barach97d8dc22016-08-15 15:31:15 -0400704/*
705 * fd.io coding-style-patch-verification: ON
706 *
707 * Local Variables:
708 * eval: (c-set-style "gnu")
709 * End:
710 */