blob: 940bf124c56bb7de563899e0f93812c27af7e216 [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15/*
16 * pg_input.c: buffer generator input
17 *
18 * Copyright (c) 2008 Eliot Dresselhaus
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
27 *
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
30 *
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 */
39
Dave Barach3c8e1462019-01-05 16:51:41 -050040 /*
41 * To be honest, the packet generator needs an extreme
42 * makeover. Two key assumptions which drove the current implementation
43 * are no longer true. First, buffer managers implement a
44 * post-TX recycle list. Second, that packet generator performance
45 * is first-order important.
46 */
47
Ed Warnickecb9cada2015-12-08 15:45:58 -070048#include <vlib/vlib.h>
49#include <vnet/pg/pg.h>
50#include <vnet/vnet.h>
Damjan Marion650223c2018-11-14 16:55:53 +010051#include <vnet/ethernet/ethernet.h>
Damjan Mariond2017f62016-11-07 12:24:50 +010052#include <vnet/feature/feature.h>
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +020053#include <vnet/ip/ip4_packet.h>
54#include <vnet/ip/ip6_packet.h>
55#include <vnet/udp/udp_packet.h>
Damjan Mariond2017f62016-11-07 12:24:50 +010056#include <vnet/devices/devices.h>
Mohsin Kazmif382b062020-08-11 15:00:44 +020057#include <vnet/gso/gro_func.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070058
Ed Warnickecb9cada2015-12-08 15:45:58 -070059static int
60validate_buffer_data2 (vlib_buffer_t * b, pg_stream_t * s,
61 u32 data_offset, u32 n_bytes)
62{
Calvin71e97c62016-08-19 16:23:14 -040063 u8 *bd, *pd, *pm;
Ed Warnickecb9cada2015-12-08 15:45:58 -070064 u32 i;
65
66 bd = b->data;
67 pd = s->fixed_packet_data + data_offset;
68 pm = s->fixed_packet_data_mask + data_offset;
69
70 if (pd + n_bytes >= vec_end (s->fixed_packet_data))
71 n_bytes = (pd < vec_end (s->fixed_packet_data)
Calvin71e97c62016-08-19 16:23:14 -040072 ? vec_end (s->fixed_packet_data) - pd : 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -070073
74 for (i = 0; i < n_bytes; i++)
75 if ((bd[i] & pm[i]) != pd[i])
76 break;
77
78 if (i >= n_bytes)
79 return 1;
80
Damjan Marionbd846cd2017-11-21 13:12:41 +010081 clib_warning ("buffer %U", format_vnet_buffer, b);
Ed Warnickecb9cada2015-12-08 15:45:58 -070082 clib_warning ("differ at index %d", i);
83 clib_warning ("is %U", format_hex_bytes, bd, n_bytes);
84 clib_warning ("mask %U", format_hex_bytes, pm, n_bytes);
85 clib_warning ("expect %U", format_hex_bytes, pd, n_bytes);
86 return 0;
87}
88
89static int
90validate_buffer_data (vlib_buffer_t * b, pg_stream_t * s)
Calvin71e97c62016-08-19 16:23:14 -040091{
92 return validate_buffer_data2 (b, s, 0, s->buffer_bytes);
93}
Ed Warnickecb9cada2015-12-08 15:45:58 -070094
95always_inline void
Calvin71e97c62016-08-19 16:23:14 -040096set_1 (void *a0,
97 u64 v0, u64 v_min, u64 v_max, u32 n_bits, u32 is_net_byte_order)
Ed Warnickecb9cada2015-12-08 15:45:58 -070098{
99 ASSERT (v0 >= v_min && v0 <= v_max);
100 if (n_bits == BITS (u8))
101 {
102 ((u8 *) a0)[0] = v0;
103 }
104 else if (n_bits == BITS (u16))
105 {
106 if (is_net_byte_order)
107 v0 = clib_host_to_net_u16 (v0);
108 clib_mem_unaligned (a0, u16) = v0;
109 }
110 else if (n_bits == BITS (u32))
111 {
112 if (is_net_byte_order)
113 v0 = clib_host_to_net_u32 (v0);
114 clib_mem_unaligned (a0, u32) = v0;
115 }
116 else if (n_bits == BITS (u64))
117 {
118 if (is_net_byte_order)
119 v0 = clib_host_to_net_u64 (v0);
120 clib_mem_unaligned (a0, u64) = v0;
121 }
122}
123
124always_inline void
Calvin71e97c62016-08-19 16:23:14 -0400125set_2 (void *a0, void *a1,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700126 u64 v0, u64 v1,
127 u64 v_min, u64 v_max,
Calvin71e97c62016-08-19 16:23:14 -0400128 u32 n_bits, u32 is_net_byte_order, u32 is_increment)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700129{
130 ASSERT (v0 >= v_min && v0 <= v_max);
131 ASSERT (v1 >= v_min && v1 <= (v_max + is_increment));
132 if (n_bits == BITS (u8))
133 {
134 ((u8 *) a0)[0] = v0;
135 ((u8 *) a1)[0] = v1;
136 }
137 else if (n_bits == BITS (u16))
138 {
139 if (is_net_byte_order)
140 {
141 v0 = clib_host_to_net_u16 (v0);
142 v1 = clib_host_to_net_u16 (v1);
143 }
144 clib_mem_unaligned (a0, u16) = v0;
145 clib_mem_unaligned (a1, u16) = v1;
146 }
147 else if (n_bits == BITS (u32))
148 {
149 if (is_net_byte_order)
150 {
151 v0 = clib_host_to_net_u32 (v0);
152 v1 = clib_host_to_net_u32 (v1);
153 }
154 clib_mem_unaligned (a0, u32) = v0;
155 clib_mem_unaligned (a1, u32) = v1;
156 }
157 else if (n_bits == BITS (u64))
158 {
159 if (is_net_byte_order)
160 {
161 v0 = clib_host_to_net_u64 (v0);
162 v1 = clib_host_to_net_u64 (v1);
163 }
164 clib_mem_unaligned (a0, u64) = v0;
165 clib_mem_unaligned (a1, u64) = v1;
166 }
167}
168
169static_always_inline void
170do_set_fixed (pg_main_t * pg,
171 pg_stream_t * s,
172 u32 * buffers,
173 u32 n_buffers,
174 u32 n_bits,
Calvin71e97c62016-08-19 16:23:14 -0400175 u32 byte_offset, u32 is_net_byte_order, u64 v_min, u64 v_max)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700176{
Damjan Marion64034362016-11-07 22:19:55 +0100177 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700178
179 while (n_buffers >= 4)
180 {
Calvin71e97c62016-08-19 16:23:14 -0400181 vlib_buffer_t *b0, *b1, *b2, *b3;
182 void *a0, *a1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700183
184 b0 = vlib_get_buffer (vm, buffers[0]);
185 b1 = vlib_get_buffer (vm, buffers[1]);
186 b2 = vlib_get_buffer (vm, buffers[2]);
187 b3 = vlib_get_buffer (vm, buffers[3]);
188 buffers += 2;
189 n_buffers -= 2;
190
191 a0 = (void *) b0 + byte_offset;
192 a1 = (void *) b1 + byte_offset;
193 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
194 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
195
Calvin71e97c62016-08-19 16:23:14 -0400196 set_2 (a0, a1, v_min, v_min, v_min, v_max, n_bits, is_net_byte_order,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700197 /* is_increment */ 0);
198
199 ASSERT (validate_buffer_data (b0, s));
200 ASSERT (validate_buffer_data (b1, s));
201 }
202
203 while (n_buffers > 0)
204 {
Calvin71e97c62016-08-19 16:23:14 -0400205 vlib_buffer_t *b0;
206 void *a0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700207
208 b0 = vlib_get_buffer (vm, buffers[0]);
209 buffers += 1;
210 n_buffers -= 1;
211
212 a0 = (void *) b0 + byte_offset;
213
Calvin71e97c62016-08-19 16:23:14 -0400214 set_1 (a0, v_min, v_min, v_max, n_bits, is_net_byte_order);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700215
216 ASSERT (validate_buffer_data (b0, s));
217 }
218}
219
220static_always_inline u64
221do_set_increment (pg_main_t * pg,
222 pg_stream_t * s,
223 u32 * buffers,
224 u32 n_buffers,
225 u32 n_bits,
226 u32 byte_offset,
227 u32 is_net_byte_order,
Calvin71e97c62016-08-19 16:23:14 -0400228 u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max, u64 v)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700229{
Damjan Marion64034362016-11-07 22:19:55 +0100230 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700231 u64 sum = 0;
232
233 ASSERT (v >= v_min && v <= v_max);
234
235 while (n_buffers >= 4)
236 {
Calvin71e97c62016-08-19 16:23:14 -0400237 vlib_buffer_t *b0, *b1, *b2, *b3;
238 void *a0, *a1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700239 u64 v_old;
240
241 b0 = vlib_get_buffer (vm, buffers[0]);
242 b1 = vlib_get_buffer (vm, buffers[1]);
243 b2 = vlib_get_buffer (vm, buffers[2]);
244 b3 = vlib_get_buffer (vm, buffers[3]);
245 buffers += 2;
246 n_buffers -= 2;
247
248 a0 = (void *) b0 + byte_offset;
249 a1 = (void *) b1 + byte_offset;
250 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
251 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
252
253 v_old = v;
254 v = v_old + 2;
255 v = v > v_max ? v_min : v;
256 set_2 (a0, a1,
Calvin71e97c62016-08-19 16:23:14 -0400257 v_old + 0, v_old + 1, v_min, v_max, n_bits, is_net_byte_order,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700258 /* is_increment */ 1);
259
260 if (want_sum)
Calvin71e97c62016-08-19 16:23:14 -0400261 sum += 2 * v_old + 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700262
263 if (PREDICT_FALSE (v_old + 1 > v_max))
264 {
265 if (want_sum)
Calvin71e97c62016-08-19 16:23:14 -0400266 sum -= 2 * v_old + 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700267
268 v = v_old;
269 set_1 (a0, v + 0, v_min, v_max, n_bits, is_net_byte_order);
270 if (want_sum)
271 sum += v;
272 v += 1;
273
274 v = v > v_max ? v_min : v;
275 set_1 (a1, v + 0, v_min, v_max, n_bits, is_net_byte_order);
276 if (want_sum)
277 sum += v;
278 v += 1;
279 }
280
281 ASSERT (validate_buffer_data (b0, s));
282 ASSERT (validate_buffer_data (b1, s));
283 }
284
285 while (n_buffers > 0)
286 {
Calvin71e97c62016-08-19 16:23:14 -0400287 vlib_buffer_t *b0;
288 void *a0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700289 u64 v_old;
290
291 b0 = vlib_get_buffer (vm, buffers[0]);
292 buffers += 1;
293 n_buffers -= 1;
294
295 a0 = (void *) b0 + byte_offset;
296
297 v_old = v;
298 if (want_sum)
299 sum += v_old;
300 v += 1;
301 v = v > v_max ? v_min : v;
302
303 ASSERT (v_old >= v_min && v_old <= v_max);
304 set_1 (a0, v_old, v_min, v_max, n_bits, is_net_byte_order);
305
306 ASSERT (validate_buffer_data (b0, s));
307 }
308
309 if (want_sum)
310 *sum_result = sum;
311
312 return v;
313}
314
315static_always_inline void
316do_set_random (pg_main_t * pg,
317 pg_stream_t * s,
318 u32 * buffers,
319 u32 n_buffers,
320 u32 n_bits,
321 u32 byte_offset,
322 u32 is_net_byte_order,
Calvin71e97c62016-08-19 16:23:14 -0400323 u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700324{
Damjan Marion64034362016-11-07 22:19:55 +0100325 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700326 u64 v_diff = v_max - v_min + 1;
327 u64 r_mask = max_pow2 (v_diff) - 1;
328 u64 v0, v1;
329 u64 sum = 0;
Calvin71e97c62016-08-19 16:23:14 -0400330 void *random_data;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700331
332 random_data = clib_random_buffer_get_data
333 (&vm->random_buffer, n_buffers * n_bits / BITS (u8));
334
335 v0 = v1 = v_min;
336
337 while (n_buffers >= 4)
338 {
Calvin71e97c62016-08-19 16:23:14 -0400339 vlib_buffer_t *b0, *b1, *b2, *b3;
340 void *a0, *a1;
341 u64 r0 = 0, r1 = 0; /* warnings be gone */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700342
343 b0 = vlib_get_buffer (vm, buffers[0]);
344 b1 = vlib_get_buffer (vm, buffers[1]);
345 b2 = vlib_get_buffer (vm, buffers[2]);
346 b3 = vlib_get_buffer (vm, buffers[3]);
347 buffers += 2;
348 n_buffers -= 2;
349
350 a0 = (void *) b0 + byte_offset;
351 a1 = (void *) b1 + byte_offset;
352 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
353 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
354
355 switch (n_bits)
356 {
357#define _(n) \
358 case BITS (u##n): \
359 { \
360 u##n * r = random_data; \
361 r0 = r[0]; \
362 r1 = r[1]; \
363 random_data = r + 2; \
364 } \
365 break;
366
Calvin71e97c62016-08-19 16:23:14 -0400367 _(8);
368 _(16);
369 _(32);
370 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700371
372#undef _
373 }
374
375 /* Add power of 2 sized random number which may be out of range. */
376 v0 += r0 & r_mask;
377 v1 += r1 & r_mask;
378
379 /* Twice should be enough to reduce to v_min .. v_max range. */
380 v0 = v0 > v_max ? v0 - v_diff : v0;
381 v1 = v1 > v_max ? v1 - v_diff : v1;
382 v0 = v0 > v_max ? v0 - v_diff : v0;
383 v1 = v1 > v_max ? v1 - v_diff : v1;
384
385 if (want_sum)
386 sum += v0 + v1;
387
Calvin71e97c62016-08-19 16:23:14 -0400388 set_2 (a0, a1, v0, v1, v_min, v_max, n_bits, is_net_byte_order,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700389 /* is_increment */ 0);
390
391 ASSERT (validate_buffer_data (b0, s));
392 ASSERT (validate_buffer_data (b1, s));
393 }
394
395 while (n_buffers > 0)
396 {
Calvin71e97c62016-08-19 16:23:14 -0400397 vlib_buffer_t *b0;
398 void *a0;
399 u64 r0 = 0; /* warnings be gone */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700400
401 b0 = vlib_get_buffer (vm, buffers[0]);
402 buffers += 1;
403 n_buffers -= 1;
404
405 a0 = (void *) b0 + byte_offset;
406
407 switch (n_bits)
408 {
409#define _(n) \
410 case BITS (u##n): \
411 { \
412 u##n * r = random_data; \
413 r0 = r[0]; \
414 random_data = r + 1; \
415 } \
416 break;
417
Calvin71e97c62016-08-19 16:23:14 -0400418 _(8);
419 _(16);
420 _(32);
421 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700422
423#undef _
424 }
425
426 /* Add power of 2 sized random number which may be out of range. */
427 v0 += r0 & r_mask;
428
429 /* Twice should be enough to reduce to v_min .. v_max range. */
430 v0 = v0 > v_max ? v0 - v_diff : v0;
431 v0 = v0 > v_max ? v0 - v_diff : v0;
432
433 if (want_sum)
434 sum += v0;
435
436 set_1 (a0, v0, v_min, v_max, n_bits, is_net_byte_order);
437
438 ASSERT (validate_buffer_data (b0, s));
439 }
440
441 if (want_sum)
442 *sum_result = sum;
443}
444
445#define _(i,t) \
446 clib_mem_unaligned (a##i, t) = \
447 clib_host_to_net_##t ((clib_net_to_host_mem_##t (a##i) &~ mask) \
448 | (v##i << shift))
Calvin71e97c62016-08-19 16:23:14 -0400449
Ed Warnickecb9cada2015-12-08 15:45:58 -0700450always_inline void
Calvin71e97c62016-08-19 16:23:14 -0400451setbits_1 (void *a0,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700452 u64 v0,
453 u64 v_min, u64 v_max,
Calvin71e97c62016-08-19 16:23:14 -0400454 u32 max_bits, u32 n_bits, u64 mask, u32 shift)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700455{
456 ASSERT (v0 >= v_min && v0 <= v_max);
457 if (max_bits == BITS (u8))
Calvin71e97c62016-08-19 16:23:14 -0400458 ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700459
460 else if (max_bits == BITS (u16))
461 {
Calvin71e97c62016-08-19 16:23:14 -0400462 _(0, u16);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700463 }
464 else if (max_bits == BITS (u32))
465 {
Calvin71e97c62016-08-19 16:23:14 -0400466 _(0, u32);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700467 }
468 else if (max_bits == BITS (u64))
469 {
Calvin71e97c62016-08-19 16:23:14 -0400470 _(0, u64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700471 }
472}
473
474always_inline void
Calvin71e97c62016-08-19 16:23:14 -0400475setbits_2 (void *a0, void *a1,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700476 u64 v0, u64 v1,
477 u64 v_min, u64 v_max,
Calvin71e97c62016-08-19 16:23:14 -0400478 u32 max_bits, u32 n_bits, u64 mask, u32 shift, u32 is_increment)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700479{
480 ASSERT (v0 >= v_min && v0 <= v_max);
481 ASSERT (v1 >= v_min && v1 <= v_max + is_increment);
482 if (max_bits == BITS (u8))
483 {
Calvin71e97c62016-08-19 16:23:14 -0400484 ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
485 ((u8 *) a1)[0] = (((u8 *) a1)[0] & ~mask) | (v1 << shift);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700486 }
487
488 else if (max_bits == BITS (u16))
489 {
Calvin71e97c62016-08-19 16:23:14 -0400490 _(0, u16);
491 _(1, u16);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700492 }
493 else if (max_bits == BITS (u32))
494 {
Calvin71e97c62016-08-19 16:23:14 -0400495 _(0, u32);
496 _(1, u32);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700497 }
498 else if (max_bits == BITS (u64))
499 {
Calvin71e97c62016-08-19 16:23:14 -0400500 _(0, u64);
501 _(1, u64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700502 }
503}
504
505#undef _
506
507static_always_inline void
508do_setbits_fixed (pg_main_t * pg,
509 pg_stream_t * s,
510 u32 * buffers,
511 u32 n_buffers,
512 u32 max_bits,
513 u32 n_bits,
Calvin71e97c62016-08-19 16:23:14 -0400514 u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700515{
Damjan Marion64034362016-11-07 22:19:55 +0100516 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700517
518 while (n_buffers >= 4)
519 {
Calvin71e97c62016-08-19 16:23:14 -0400520 vlib_buffer_t *b0, *b1, *b2, *b3;
521 void *a0, *a1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700522
523 b0 = vlib_get_buffer (vm, buffers[0]);
524 b1 = vlib_get_buffer (vm, buffers[1]);
525 b2 = vlib_get_buffer (vm, buffers[2]);
526 b3 = vlib_get_buffer (vm, buffers[3]);
527 buffers += 2;
528 n_buffers -= 2;
529
530 a0 = (void *) b0 + byte_offset;
531 a1 = (void *) b1 + byte_offset;
532 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
533 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
534
535 setbits_2 (a0, a1,
Calvin71e97c62016-08-19 16:23:14 -0400536 v_min, v_min, v_min, v_max, max_bits, n_bits, mask, shift,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700537 /* is_increment */ 0);
538
539 ASSERT (validate_buffer_data (b0, s));
540 ASSERT (validate_buffer_data (b1, s));
541 }
542
543 while (n_buffers > 0)
544 {
Calvin71e97c62016-08-19 16:23:14 -0400545 vlib_buffer_t *b0;
546 void *a0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700547
548 b0 = vlib_get_buffer (vm, buffers[0]);
549 buffers += 1;
550 n_buffers -= 1;
551
552 a0 = (void *) b0 + byte_offset;
553
554 setbits_1 (a0, v_min, v_min, v_max, max_bits, n_bits, mask, shift);
555 ASSERT (validate_buffer_data (b0, s));
556 }
557}
558
559static_always_inline u64
560do_setbits_increment (pg_main_t * pg,
561 pg_stream_t * s,
562 u32 * buffers,
563 u32 n_buffers,
564 u32 max_bits,
565 u32 n_bits,
566 u32 byte_offset,
Calvin71e97c62016-08-19 16:23:14 -0400567 u64 v_min, u64 v_max, u64 v, u64 mask, u32 shift)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700568{
Damjan Marion64034362016-11-07 22:19:55 +0100569 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700570
571 ASSERT (v >= v_min && v <= v_max);
572
573 while (n_buffers >= 4)
574 {
Calvin71e97c62016-08-19 16:23:14 -0400575 vlib_buffer_t *b0, *b1, *b2, *b3;
576 void *a0, *a1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700577 u64 v_old;
578
579 b0 = vlib_get_buffer (vm, buffers[0]);
580 b1 = vlib_get_buffer (vm, buffers[1]);
581 b2 = vlib_get_buffer (vm, buffers[2]);
582 b3 = vlib_get_buffer (vm, buffers[3]);
583 buffers += 2;
584 n_buffers -= 2;
585
586 a0 = (void *) b0 + byte_offset;
587 a1 = (void *) b1 + byte_offset;
588 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
589 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
590
591 v_old = v;
592 v = v_old + 2;
593 v = v > v_max ? v_min : v;
594 setbits_2 (a0, a1,
595 v_old + 0, v_old + 1,
Calvin71e97c62016-08-19 16:23:14 -0400596 v_min, v_max, max_bits, n_bits, mask, shift,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700597 /* is_increment */ 1);
598
599 if (PREDICT_FALSE (v_old + 1 > v_max))
600 {
601 v = v_old;
602 setbits_1 (a0, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
603 v += 1;
604
605 v = v > v_max ? v_min : v;
606 setbits_1 (a1, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
607 v += 1;
608 }
609 ASSERT (validate_buffer_data (b0, s));
610 ASSERT (validate_buffer_data (b1, s));
611 }
612
613 while (n_buffers > 0)
614 {
Calvin71e97c62016-08-19 16:23:14 -0400615 vlib_buffer_t *b0;
616 void *a0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700617 u64 v_old;
618
619 b0 = vlib_get_buffer (vm, buffers[0]);
620 buffers += 1;
621 n_buffers -= 1;
622
623 a0 = (void *) b0 + byte_offset;
624
625 v_old = v;
626 v = v_old + 1;
627 v = v > v_max ? v_min : v;
628
629 ASSERT (v_old >= v_min && v_old <= v_max);
630 setbits_1 (a0, v_old, v_min, v_max, max_bits, n_bits, mask, shift);
631
632 ASSERT (validate_buffer_data (b0, s));
633 }
634
635 return v;
636}
637
638static_always_inline void
639do_setbits_random (pg_main_t * pg,
640 pg_stream_t * s,
641 u32 * buffers,
642 u32 n_buffers,
643 u32 max_bits,
644 u32 n_bits,
Calvin71e97c62016-08-19 16:23:14 -0400645 u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700646{
Damjan Marion64034362016-11-07 22:19:55 +0100647 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700648 u64 v_diff = v_max - v_min + 1;
649 u64 r_mask = max_pow2 (v_diff) - 1;
650 u64 v0, v1;
Calvin71e97c62016-08-19 16:23:14 -0400651 void *random_data;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700652
653 random_data = clib_random_buffer_get_data
654 (&vm->random_buffer, n_buffers * max_bits / BITS (u8));
655 v0 = v1 = v_min;
656
657 while (n_buffers >= 4)
658 {
Calvin71e97c62016-08-19 16:23:14 -0400659 vlib_buffer_t *b0, *b1, *b2, *b3;
660 void *a0, *a1;
661 u64 r0 = 0, r1 = 0; /* warnings be gone */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700662
663 b0 = vlib_get_buffer (vm, buffers[0]);
664 b1 = vlib_get_buffer (vm, buffers[1]);
665 b2 = vlib_get_buffer (vm, buffers[2]);
666 b3 = vlib_get_buffer (vm, buffers[3]);
667 buffers += 2;
668 n_buffers -= 2;
669
670 a0 = (void *) b0 + byte_offset;
671 a1 = (void *) b1 + byte_offset;
672 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
673 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
674
675 switch (max_bits)
676 {
677#define _(n) \
678 case BITS (u##n): \
679 { \
680 u##n * r = random_data; \
681 r0 = r[0]; \
682 r1 = r[1]; \
683 random_data = r + 2; \
684 } \
685 break;
686
Calvin71e97c62016-08-19 16:23:14 -0400687 _(8);
688 _(16);
689 _(32);
690 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700691
692#undef _
693 }
694
695 /* Add power of 2 sized random number which may be out of range. */
696 v0 += r0 & r_mask;
697 v1 += r1 & r_mask;
698
699 /* Twice should be enough to reduce to v_min .. v_max range. */
700 v0 = v0 > v_max ? v0 - v_diff : v0;
701 v1 = v1 > v_max ? v1 - v_diff : v1;
702 v0 = v0 > v_max ? v0 - v_diff : v0;
703 v1 = v1 > v_max ? v1 - v_diff : v1;
704
Calvin71e97c62016-08-19 16:23:14 -0400705 setbits_2 (a0, a1, v0, v1, v_min, v_max, max_bits, n_bits, mask, shift,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700706 /* is_increment */ 0);
707
708 ASSERT (validate_buffer_data (b0, s));
709 ASSERT (validate_buffer_data (b1, s));
710 }
711
712 while (n_buffers > 0)
713 {
Calvin71e97c62016-08-19 16:23:14 -0400714 vlib_buffer_t *b0;
715 void *a0;
716 u64 r0 = 0; /* warnings be gone */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700717
718 b0 = vlib_get_buffer (vm, buffers[0]);
719 buffers += 1;
720 n_buffers -= 1;
721
722 a0 = (void *) b0 + byte_offset;
723
724 switch (max_bits)
725 {
726#define _(n) \
727 case BITS (u##n): \
728 { \
729 u##n * r = random_data; \
730 r0 = r[0]; \
731 random_data = r + 1; \
732 } \
733 break;
734
Calvin71e97c62016-08-19 16:23:14 -0400735 _(8);
736 _(16);
737 _(32);
738 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700739
740#undef _
741 }
742
743 /* Add power of 2 sized random number which may be out of range. */
744 v0 += r0 & r_mask;
745
746 /* Twice should be enough to reduce to v_min .. v_max range. */
747 v0 = v0 > v_max ? v0 - v_diff : v0;
748 v0 = v0 > v_max ? v0 - v_diff : v0;
749
750 setbits_1 (a0, v0, v_min, v_max, max_bits, n_bits, mask, shift);
751
752 ASSERT (validate_buffer_data (b0, s));
753 }
754}
755
Calvin71e97c62016-08-19 16:23:14 -0400756static u64
757do_it (pg_main_t * pg,
758 pg_stream_t * s,
759 u32 * buffers,
760 u32 n_buffers,
761 u32 lo_bit, u32 hi_bit,
762 u64 v_min, u64 v_max, u64 v, pg_edit_type_t edit_type)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700763{
764 u32 max_bits, l0, l1, h1, start_bit;
765
766 if (v_min == v_max)
767 edit_type = PG_EDIT_FIXED;
768
769 l0 = lo_bit / BITS (u8);
770 l1 = lo_bit % BITS (u8);
771 h1 = hi_bit % BITS (u8);
772
773 start_bit = l0 * BITS (u8);
774
775 max_bits = hi_bit - start_bit;
776 ASSERT (max_bits <= 64);
777
778#define _(n) \
779 case (n): \
780 if (edit_type == PG_EDIT_INCREMENT) \
781 v = do_set_increment (pg, s, buffers, n_buffers, \
782 BITS (u##n), \
783 l0, \
784 /* is_net_byte_order */ 1, \
785 /* want sum */ 0, 0, \
786 v_min, v_max, \
787 v); \
788 else if (edit_type == PG_EDIT_RANDOM) \
789 do_set_random (pg, s, buffers, n_buffers, \
790 BITS (u##n), \
791 l0, \
792 /* is_net_byte_order */ 1, \
793 /* want sum */ 0, 0, \
794 v_min, v_max); \
795 else /* edit_type == PG_EDIT_FIXED */ \
796 do_set_fixed (pg, s, buffers, n_buffers, \
797 BITS (u##n), \
798 l0, \
799 /* is_net_byte_order */ 1, \
800 v_min, v_max); \
801 goto done;
802
803 if (l1 == 0 && h1 == 0)
804 {
805 switch (max_bits)
806 {
Calvin71e97c62016-08-19 16:23:14 -0400807 _(8);
808 _(16);
809 _(32);
810 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700811 }
812 }
813
814#undef _
815
816 {
817 u64 mask;
818 u32 shift = l1;
Calvin71e97c62016-08-19 16:23:14 -0400819 u32 n_bits = max_bits;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700820
821 max_bits = clib_max (max_pow2 (n_bits), 8);
822
823 mask = ((u64) 1 << (u64) n_bits) - 1;
824 mask &= ~(((u64) 1 << (u64) shift) - 1);
825
826 mask <<= max_bits - n_bits;
827 shift += max_bits - n_bits;
828
829 switch (max_bits)
830 {
831#define _(n) \
832 case (n): \
833 if (edit_type == PG_EDIT_INCREMENT) \
834 v = do_setbits_increment (pg, s, buffers, n_buffers, \
835 BITS (u##n), n_bits, \
836 l0, v_min, v_max, v, \
837 mask, shift); \
838 else if (edit_type == PG_EDIT_RANDOM) \
839 do_setbits_random (pg, s, buffers, n_buffers, \
840 BITS (u##n), n_bits, \
841 l0, v_min, v_max, \
842 mask, shift); \
843 else /* edit_type == PG_EDIT_FIXED */ \
844 do_setbits_fixed (pg, s, buffers, n_buffers, \
845 BITS (u##n), n_bits, \
846 l0, v_min, v_max, \
847 mask, shift); \
848 goto done;
849
Calvin71e97c62016-08-19 16:23:14 -0400850 _(8);
851 _(16);
852 _(32);
853 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700854
855#undef _
856 }
857 }
858
Calvin71e97c62016-08-19 16:23:14 -0400859done:
Ed Warnickecb9cada2015-12-08 15:45:58 -0700860 return v;
861}
862
863static void
864pg_generate_set_lengths (pg_main_t * pg,
Calvin71e97c62016-08-19 16:23:14 -0400865 pg_stream_t * s, u32 * buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700866{
867 u64 v_min, v_max, length_sum;
868 pg_edit_type_t edit_type;
869
870 v_min = s->min_packet_bytes;
871 v_max = s->max_packet_bytes;
872 edit_type = s->packet_size_edit_type;
873
874 if (edit_type == PG_EDIT_INCREMENT)
875 s->last_increment_packet_size
876 = do_set_increment (pg, s, buffers, n_buffers,
877 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
878 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
879 /* is_net_byte_order */ 0,
880 /* want sum */ 1, &length_sum,
Calvin71e97c62016-08-19 16:23:14 -0400881 v_min, v_max, s->last_increment_packet_size);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700882
883 else if (edit_type == PG_EDIT_RANDOM)
884 do_set_random (pg, s, buffers, n_buffers,
885 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
886 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
887 /* is_net_byte_order */ 0,
888 /* want sum */ 1, &length_sum,
889 v_min, v_max);
890
Calvin71e97c62016-08-19 16:23:14 -0400891 else /* edit_type == PG_EDIT_FIXED */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700892 {
893 do_set_fixed (pg, s, buffers, n_buffers,
894 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
895 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
896 /* is_net_byte_order */ 0,
897 v_min, v_max);
898 length_sum = v_min * n_buffers;
899 }
900
901 {
Calvin71e97c62016-08-19 16:23:14 -0400902 vnet_main_t *vnm = vnet_get_main ();
903 vnet_interface_main_t *im = &vnm->interface_main;
904 vnet_sw_interface_t *si =
905 vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700906
907 vlib_increment_combined_counter (im->combined_sw_if_counters
908 + VNET_INTERFACE_COUNTER_RX,
Damjan Marion586afd72017-04-05 19:18:20 +0200909 vlib_get_thread_index (),
Calvin71e97c62016-08-19 16:23:14 -0400910 si->sw_if_index, n_buffers, length_sum);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700911 }
912
Ed Warnickecb9cada2015-12-08 15:45:58 -0700913}
914
915static void
916pg_generate_fix_multi_buffer_lengths (pg_main_t * pg,
917 pg_stream_t * s,
Calvin71e97c62016-08-19 16:23:14 -0400918 u32 * buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700919{
Damjan Marion64034362016-11-07 22:19:55 +0100920 vlib_main_t *vm = vlib_get_main ();
Calvin71e97c62016-08-19 16:23:14 -0400921 pg_buffer_index_t *pbi;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700922 uword n_bytes_left;
Calvin71e97c62016-08-19 16:23:14 -0400923 static u32 *unused_buffers = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700924
925 while (n_buffers > 0)
926 {
Calvin71e97c62016-08-19 16:23:14 -0400927 vlib_buffer_t *b;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700928 u32 bi;
929
930 bi = buffers[0];
931 b = vlib_get_buffer (vm, bi);
932
933 /* Current length here is length of whole packet. */
934 n_bytes_left = b->current_length;
935
936 pbi = s->buffer_indices;
937 while (1)
938 {
939 uword n = clib_min (n_bytes_left, s->buffer_bytes);
940
941 b->current_length = n;
942 n_bytes_left -= n;
943 if (n_bytes_left > 0)
944 b->flags |= VLIB_BUFFER_NEXT_PRESENT;
945 else
946 b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
947
948 /* Return unused buffers to fifos. */
949 if (n == 0)
950 vec_add1 (unused_buffers, bi);
951
952 pbi++;
953 if (pbi >= vec_end (s->buffer_indices))
954 break;
955
956 bi = b->next_buffer;
957 b = vlib_get_buffer (vm, bi);
958 }
959 ASSERT (n_bytes_left == 0);
960
961 buffers += 1;
962 n_buffers -= 1;
963 }
964
965 if (vec_len (unused_buffers) > 0)
966 {
Calvin71e97c62016-08-19 16:23:14 -0400967 vlib_buffer_free_no_next (vm, unused_buffers, vec_len (unused_buffers));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700968 _vec_len (unused_buffers) = 0;
969 }
970}
971
972static void
973pg_generate_edit (pg_main_t * pg,
Calvin71e97c62016-08-19 16:23:14 -0400974 pg_stream_t * s, u32 * buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700975{
Calvin71e97c62016-08-19 16:23:14 -0400976 pg_edit_t *e;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700977
978 vec_foreach (e, s->non_fixed_edits)
Calvin71e97c62016-08-19 16:23:14 -0400979 {
980 switch (e->type)
981 {
982 case PG_EDIT_RANDOM:
983 case PG_EDIT_INCREMENT:
Ed Warnickecb9cada2015-12-08 15:45:58 -0700984 {
Calvin71e97c62016-08-19 16:23:14 -0400985 u32 lo_bit, hi_bit;
986 u64 v_min, v_max;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700987
Calvin71e97c62016-08-19 16:23:14 -0400988 v_min = pg_edit_get_value (e, PG_EDIT_LO);
989 v_max = pg_edit_get_value (e, PG_EDIT_HI);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700990
Calvin71e97c62016-08-19 16:23:14 -0400991 hi_bit = (BITS (u8) * STRUCT_OFFSET_OF (vlib_buffer_t, data)
992 + BITS (u8) + e->lsb_bit_offset);
993 lo_bit = hi_bit - e->n_bits;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700994
Calvin71e97c62016-08-19 16:23:14 -0400995 e->last_increment_value
996 = do_it (pg, s, buffers, n_buffers, lo_bit, hi_bit, v_min, v_max,
997 e->last_increment_value, e->type);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700998 }
Calvin71e97c62016-08-19 16:23:14 -0400999 break;
1000
1001 case PG_EDIT_UNSPECIFIED:
1002 break;
1003
1004 default:
1005 /* Should not be any fixed edits left. */
1006 ASSERT (0);
1007 break;
1008 }
1009 }
Ed Warnickecb9cada2015-12-08 15:45:58 -07001010
1011 /* Call any edit functions to e.g. completely IP lengths, checksums, ... */
1012 {
1013 int i;
1014 for (i = vec_len (s->edit_groups) - 1; i >= 0; i--)
1015 {
Calvin71e97c62016-08-19 16:23:14 -04001016 pg_edit_group_t *g = s->edit_groups + i;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001017 if (g->edit_function)
1018 g->edit_function (pg, s, g, buffers, n_buffers);
1019 }
1020 }
1021}
1022
1023static void
1024pg_set_next_buffer_pointers (pg_main_t * pg,
1025 pg_stream_t * s,
Calvin71e97c62016-08-19 16:23:14 -04001026 u32 * buffers, u32 * next_buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001027{
Damjan Marion64034362016-11-07 22:19:55 +01001028 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -07001029
1030 while (n_buffers >= 4)
1031 {
1032 u32 ni0, ni1;
Calvin71e97c62016-08-19 16:23:14 -04001033 vlib_buffer_t *b0, *b1;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001034
1035 b0 = vlib_get_buffer (vm, buffers[0]);
1036 b1 = vlib_get_buffer (vm, buffers[1]);
1037 ni0 = next_buffers[0];
1038 ni1 = next_buffers[1];
1039
1040 vlib_prefetch_buffer_with_index (vm, buffers[2], WRITE);
1041 vlib_prefetch_buffer_with_index (vm, buffers[3], WRITE);
1042
1043 b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1044 b1->flags |= VLIB_BUFFER_NEXT_PRESENT;
1045 b0->next_buffer = ni0;
1046 b1->next_buffer = ni1;
1047
1048 buffers += 2;
1049 next_buffers += 2;
1050 n_buffers -= 2;
1051 }
1052
1053 while (n_buffers > 0)
1054 {
1055 u32 ni0;
Calvin71e97c62016-08-19 16:23:14 -04001056 vlib_buffer_t *b0;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001057
1058 b0 = vlib_get_buffer (vm, buffers[0]);
1059 ni0 = next_buffers[0];
1060 buffers += 1;
1061 next_buffers += 1;
1062 n_buffers -= 1;
1063
1064 b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1065 b0->next_buffer = ni0;
1066 }
1067}
1068
1069static_always_inline void
Ed Warnickecb9cada2015-12-08 15:45:58 -07001070init_buffers_inline (vlib_main_t * vm,
1071 pg_stream_t * s,
1072 u32 * buffers,
Calvin71e97c62016-08-19 16:23:14 -04001073 u32 n_buffers, u32 data_offset, u32 n_data, u32 set_data)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001074{
Calvin71e97c62016-08-19 16:23:14 -04001075 u32 n_left, *b;
1076 u8 *data, *mask;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001077
Dave Barach3c8e1462019-01-05 16:51:41 -05001078 ASSERT (s->replay_packet_templates == 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001079
1080 data = s->fixed_packet_data + data_offset;
1081 mask = s->fixed_packet_data_mask + data_offset;
1082 if (data + n_data >= vec_end (s->fixed_packet_data))
1083 n_data = (data < vec_end (s->fixed_packet_data)
Calvin71e97c62016-08-19 16:23:14 -04001084 ? vec_end (s->fixed_packet_data) - data : 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001085 if (n_data > 0)
1086 {
1087 ASSERT (data + n_data <= vec_end (s->fixed_packet_data));
1088 ASSERT (mask + n_data <= vec_end (s->fixed_packet_data_mask));
1089 }
1090
1091 n_left = n_buffers;
1092 b = buffers;
1093
1094 while (n_left >= 4)
1095 {
1096 u32 bi0, bi1;
Calvin71e97c62016-08-19 16:23:14 -04001097 vlib_buffer_t *b0, *b1;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001098
1099 /* Prefetch next iteration. */
1100 vlib_prefetch_buffer_with_index (vm, b[2], STORE);
1101 vlib_prefetch_buffer_with_index (vm, b[3], STORE);
1102
1103 bi0 = b[0];
1104 bi1 = b[1];
1105 b += 2;
1106 n_left -= 2;
1107
1108 b0 = vlib_get_buffer (vm, bi0);
1109 b1 = vlib_get_buffer (vm, bi1);
Dave Barach08eb2bb2020-04-15 09:34:43 -04001110 b0->flags |= s->buffer_flags;
1111 b1->flags |= s->buffer_flags;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001112
1113 vnet_buffer (b0)->sw_if_index[VLIB_RX] =
1114 vnet_buffer (b1)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1115
1116 vnet_buffer (b0)->sw_if_index[VLIB_TX] =
Dave Barach7d31ab22019-05-08 19:18:18 -04001117 vnet_buffer (b1)->sw_if_index[VLIB_TX] = s->sw_if_index[VLIB_TX];
Ed Warnickecb9cada2015-12-08 15:45:58 -07001118
1119 if (set_data)
1120 {
Dave Barach178cf492018-11-13 16:34:13 -05001121 clib_memcpy_fast (b0->data, data, n_data);
1122 clib_memcpy_fast (b1->data, data, n_data);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001123 }
1124 else
1125 {
1126 ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1127 ASSERT (validate_buffer_data2 (b1, s, data_offset, n_data));
1128 }
1129 }
1130
1131 while (n_left >= 1)
1132 {
1133 u32 bi0;
Calvin71e97c62016-08-19 16:23:14 -04001134 vlib_buffer_t *b0;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001135
1136 bi0 = b[0];
1137 b += 1;
1138 n_left -= 1;
1139
1140 b0 = vlib_get_buffer (vm, bi0);
Dave Barach08eb2bb2020-04-15 09:34:43 -04001141 b0->flags |= s->buffer_flags;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001142 vnet_buffer (b0)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
Dave Barach7d31ab22019-05-08 19:18:18 -04001143 vnet_buffer (b0)->sw_if_index[VLIB_TX] = s->sw_if_index[VLIB_TX];
Ed Warnickecb9cada2015-12-08 15:45:58 -07001144
1145 if (set_data)
Dave Barach178cf492018-11-13 16:34:13 -05001146 clib_memcpy_fast (b0->data, data, n_data);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001147 else
1148 ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1149 }
1150}
1151
Ed Warnickecb9cada2015-12-08 15:45:58 -07001152static u32
1153pg_stream_fill_helper (pg_main_t * pg,
1154 pg_stream_t * s,
1155 pg_buffer_index_t * bi,
Calvin71e97c62016-08-19 16:23:14 -04001156 u32 * buffers, u32 * next_buffers, u32 n_alloc)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001157{
Damjan Marion64034362016-11-07 22:19:55 +01001158 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -07001159 uword is_start_of_packet = bi == s->buffer_indices;
1160 u32 n_allocated;
1161
Dave Barach3c8e1462019-01-05 16:51:41 -05001162 ASSERT (vec_len (s->replay_packet_templates) == 0);
1163
Damjan Marion671e60e2018-12-30 18:09:59 +01001164 n_allocated = vlib_buffer_alloc (vm, buffers, n_alloc);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001165 if (n_allocated == 0)
1166 return 0;
1167
Calvin71e97c62016-08-19 16:23:14 -04001168 /*
1169 * We can't assume we got all the buffers we asked for...
Ed Warnickecb9cada2015-12-08 15:45:58 -07001170 * This never worked until recently.
1171 */
1172 n_alloc = n_allocated;
1173
1174 /* Reinitialize buffers */
Damjan Marionef2e5842018-03-07 13:21:04 +01001175 init_buffers_inline
1176 (vm, s,
1177 buffers,
1178 n_alloc, (bi - s->buffer_indices) * s->buffer_bytes /* data offset */ ,
1179 s->buffer_bytes,
1180 /* set_data */ 1);
Calvin71e97c62016-08-19 16:23:14 -04001181
Ed Warnickecb9cada2015-12-08 15:45:58 -07001182 if (next_buffers)
1183 pg_set_next_buffer_pointers (pg, s, buffers, next_buffers, n_alloc);
1184
1185 if (is_start_of_packet)
1186 {
Dave Barach3c8e1462019-01-05 16:51:41 -05001187 pg_generate_set_lengths (pg, s, buffers, n_alloc);
1188 if (vec_len (s->buffer_indices) > 1)
1189 pg_generate_fix_multi_buffer_lengths (pg, s, buffers, n_alloc);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001190
Dave Barach3c8e1462019-01-05 16:51:41 -05001191 pg_generate_edit (pg, s, buffers, n_alloc);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001192 }
1193
1194 return n_alloc;
1195}
1196
1197static u32
Dave Barach3c8e1462019-01-05 16:51:41 -05001198pg_stream_fill_replay (pg_main_t * pg, pg_stream_t * s, u32 n_alloc)
1199{
1200 pg_buffer_index_t *bi;
1201 u32 n_left, i, l;
1202 u32 buffer_alloc_request = 0;
1203 u32 buffer_alloc_result;
1204 u32 current_buffer_index;
1205 u32 *buffers;
1206 vlib_main_t *vm = vlib_get_main ();
1207 vnet_main_t *vnm = vnet_get_main ();
Damjan Marion8934a042019-02-09 23:29:26 +01001208 u32 buf_sz = vlib_buffer_get_default_data_size (vm);
Dave Barach3c8e1462019-01-05 16:51:41 -05001209 vnet_interface_main_t *im = &vnm->interface_main;
1210 vnet_sw_interface_t *si;
1211
1212 buffers = pg->replay_buffers_by_thread[vm->thread_index];
1213 vec_reset_length (buffers);
1214 bi = s->buffer_indices;
1215
1216 n_left = n_alloc;
1217 i = s->current_replay_packet_index;
1218 l = vec_len (s->replay_packet_templates);
1219
1220 /* Figure out how many buffers we need */
1221 while (n_left > 0)
1222 {
1223 u8 *d0;
1224
1225 d0 = vec_elt (s->replay_packet_templates, i);
Damjan Marion5de3fec2019-02-06 14:22:32 +01001226 buffer_alloc_request += (vec_len (d0) + (buf_sz - 1)) / buf_sz;
Dave Barach3c8e1462019-01-05 16:51:41 -05001227
1228 i = ((i + 1) == l) ? 0 : i + 1;
1229 n_left--;
1230 }
1231
1232 ASSERT (buffer_alloc_request > 0);
1233 vec_validate (buffers, buffer_alloc_request - 1);
1234
1235 /* Allocate that many buffers */
1236 buffer_alloc_result = vlib_buffer_alloc (vm, buffers, buffer_alloc_request);
1237 if (buffer_alloc_result < buffer_alloc_request)
1238 {
1239 clib_warning ("alloc failure, got %d not %d", buffer_alloc_result,
1240 buffer_alloc_request);
1241 vlib_buffer_free_no_next (vm, buffers, buffer_alloc_result);
1242 pg->replay_buffers_by_thread[vm->thread_index] = buffers;
1243 return 0;
1244 }
1245
1246 /* Now go generate the buffers, and add them to the FIFO */
1247 n_left = n_alloc;
1248
1249 current_buffer_index = 0;
1250 i = s->current_replay_packet_index;
1251 l = vec_len (s->replay_packet_templates);
1252 while (n_left > 0)
1253 {
1254 u8 *d0;
1255 int not_last;
1256 u32 data_offset;
1257 u32 bytes_to_copy, bytes_this_chunk;
1258 vlib_buffer_t *b;
1259
1260 d0 = vec_elt (s->replay_packet_templates, i);
1261 data_offset = 0;
1262 bytes_to_copy = vec_len (d0);
1263
1264 /* Add head chunk to pg fifo */
1265 clib_fifo_add1 (bi->buffer_fifo, buffers[current_buffer_index]);
1266
1267 /* Copy the data */
1268 while (bytes_to_copy)
1269 {
Damjan Marion5de3fec2019-02-06 14:22:32 +01001270 bytes_this_chunk = clib_min (bytes_to_copy, buf_sz);
Dave Barach3c8e1462019-01-05 16:51:41 -05001271 ASSERT (current_buffer_index < vec_len (buffers));
1272 b = vlib_get_buffer (vm, buffers[current_buffer_index]);
1273 clib_memcpy_fast (b->data, d0 + data_offset, bytes_this_chunk);
1274 vnet_buffer (b)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
Dave Barach7d31ab22019-05-08 19:18:18 -04001275 vnet_buffer (b)->sw_if_index[VLIB_TX] = s->sw_if_index[VLIB_TX];
Dave Barach08eb2bb2020-04-15 09:34:43 -04001276 b->flags = s->buffer_flags;
Dave Barach3c8e1462019-01-05 16:51:41 -05001277 b->next_buffer = 0;
1278 b->current_data = 0;
1279 b->current_length = bytes_this_chunk;
1280
1281 not_last = bytes_this_chunk < bytes_to_copy;
1282 if (not_last)
1283 {
1284 ASSERT (current_buffer_index < (vec_len (buffers) - 1));
1285 b->flags |= VLIB_BUFFER_NEXT_PRESENT;
1286 b->next_buffer = buffers[current_buffer_index + 1];
1287 }
1288 bytes_to_copy -= bytes_this_chunk;
1289 data_offset += bytes_this_chunk;
1290 current_buffer_index++;
1291 }
1292
1293 i = ((i + 1) == l) ? 0 : i + 1;
1294 n_left--;
1295 }
1296
1297 /* Update the interface counters */
1298 si = vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
1299 l = 0;
1300 for (i = 0; i < n_alloc; i++)
1301 l += vlib_buffer_index_length_in_chain (vm, buffers[i]);
1302 vlib_increment_combined_counter (im->combined_sw_if_counters
1303 + VNET_INTERFACE_COUNTER_RX,
1304 vlib_get_thread_index (),
1305 si->sw_if_index, n_alloc, l);
1306
1307 s->current_replay_packet_index += n_alloc;
1308 s->current_replay_packet_index %= vec_len (s->replay_packet_templates);
1309
1310 pg->replay_buffers_by_thread[vm->thread_index] = buffers;
1311 return n_alloc;
1312}
1313
1314
1315static u32
Ed Warnickecb9cada2015-12-08 15:45:58 -07001316pg_stream_fill (pg_main_t * pg, pg_stream_t * s, u32 n_buffers)
1317{
Calvin71e97c62016-08-19 16:23:14 -04001318 pg_buffer_index_t *bi;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001319 word i, n_in_fifo, n_alloc, n_free, n_added;
Calvin71e97c62016-08-19 16:23:14 -04001320 u32 *tail, *start, *end, *last_tail, *last_start;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001321
1322 bi = s->buffer_indices;
1323
1324 n_in_fifo = clib_fifo_elts (bi->buffer_fifo);
1325 if (n_in_fifo >= n_buffers)
1326 return n_in_fifo;
1327
1328 n_alloc = n_buffers - n_in_fifo;
1329
1330 /* Round up, but never generate more than limit. */
1331 n_alloc = clib_max (VLIB_FRAME_SIZE, n_alloc);
1332
1333 if (s->n_packets_limit > 0
1334 && s->n_packets_generated + n_in_fifo + n_alloc >= s->n_packets_limit)
1335 {
1336 n_alloc = s->n_packets_limit - s->n_packets_generated - n_in_fifo;
1337 if (n_alloc < 0)
1338 n_alloc = 0;
1339 }
1340
Dave Barach3c8e1462019-01-05 16:51:41 -05001341 /*
1342 * Handle pcap replay directly
1343 */
1344 if (s->replay_packet_templates)
1345 return pg_stream_fill_replay (pg, s, n_alloc);
1346
Ed Warnickecb9cada2015-12-08 15:45:58 -07001347 /* All buffer fifos should have the same size. */
1348 if (CLIB_DEBUG > 0)
1349 {
1350 uword l = ~0, e;
1351 vec_foreach (bi, s->buffer_indices)
Calvin71e97c62016-08-19 16:23:14 -04001352 {
1353 e = clib_fifo_elts (bi->buffer_fifo);
1354 if (bi == s->buffer_indices)
1355 l = e;
1356 ASSERT (l == e);
1357 }
Ed Warnickecb9cada2015-12-08 15:45:58 -07001358 }
1359
1360 last_tail = last_start = 0;
1361 n_added = n_alloc;
1362
1363 for (i = vec_len (s->buffer_indices) - 1; i >= 0; i--)
1364 {
1365 bi = vec_elt_at_index (s->buffer_indices, i);
1366
1367 n_free = clib_fifo_free_elts (bi->buffer_fifo);
1368 if (n_free < n_alloc)
1369 clib_fifo_resize (bi->buffer_fifo, n_alloc - n_free);
1370
1371 tail = clib_fifo_advance_tail (bi->buffer_fifo, n_alloc);
1372 start = bi->buffer_fifo;
1373 end = clib_fifo_end (bi->buffer_fifo);
1374
1375 if (tail + n_alloc <= end)
Calvin71e97c62016-08-19 16:23:14 -04001376 {
1377 n_added =
1378 pg_stream_fill_helper (pg, s, bi, tail, last_tail, n_alloc);
1379 }
Ed Warnickecb9cada2015-12-08 15:45:58 -07001380 else
1381 {
1382 u32 n = clib_min (end - tail, n_alloc);
1383 n_added = pg_stream_fill_helper (pg, s, bi, tail, last_tail, n);
1384
1385 if (n_added == n && n_alloc > n_added)
Calvin71e97c62016-08-19 16:23:14 -04001386 {
1387 n_added += pg_stream_fill_helper
1388 (pg, s, bi, start, last_start, n_alloc - n_added);
1389 }
Ed Warnickecb9cada2015-12-08 15:45:58 -07001390 }
1391
1392 if (PREDICT_FALSE (n_added < n_alloc))
1393 tail = clib_fifo_advance_tail (bi->buffer_fifo, n_added - n_alloc);
1394
1395 last_tail = tail;
1396 last_start = start;
1397
1398 /* Verify that pkts in the fifo are properly allocated */
Ed Warnickecb9cada2015-12-08 15:45:58 -07001399 }
Calvin71e97c62016-08-19 16:23:14 -04001400
Ed Warnickecb9cada2015-12-08 15:45:58 -07001401 return n_in_fifo + n_added;
1402}
1403
Calvin71e97c62016-08-19 16:23:14 -04001404typedef struct
1405{
Ed Warnickecb9cada2015-12-08 15:45:58 -07001406 u32 stream_index;
1407
1408 u32 packet_length;
Neale Ranns3466c302017-02-16 07:45:03 -08001409 u32 sw_if_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001410
1411 /* Use pre data for packet data. */
1412 vlib_buffer_t buffer;
1413} pg_input_trace_t;
1414
Calvin71e97c62016-08-19 16:23:14 -04001415static u8 *
1416format_pg_input_trace (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001417{
Calvin71e97c62016-08-19 16:23:14 -04001418 vlib_main_t *vm = va_arg (*va, vlib_main_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001419 CLIB_UNUSED (vlib_node_t * node) = va_arg (*va, vlib_node_t *);
Calvin71e97c62016-08-19 16:23:14 -04001420 pg_input_trace_t *t = va_arg (*va, pg_input_trace_t *);
1421 pg_main_t *pg = &pg_main;
1422 pg_stream_t *stream;
1423 vlib_node_t *n;
Christophe Fontained3c008d2017-10-02 18:10:54 +02001424 u32 indent = format_get_indent (s);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001425
1426 stream = 0;
Calvin71e97c62016-08-19 16:23:14 -04001427 if (!pool_is_free_index (pg->streams, t->stream_index))
Ed Warnickecb9cada2015-12-08 15:45:58 -07001428 stream = pool_elt_at_index (pg->streams, t->stream_index);
1429
1430 if (stream)
1431 s = format (s, "stream %v", pg->streams[t->stream_index].name);
1432 else
1433 s = format (s, "stream %d", t->stream_index);
1434
1435 s = format (s, ", %d bytes", t->packet_length);
Paul Vinciguerra1671d3b2019-06-25 21:02:40 -04001436 s = format (s, ", sw_if_index %d", t->sw_if_index);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001437
1438 s = format (s, "\n%U%U",
Damjan Marionbd846cd2017-11-21 13:12:41 +01001439 format_white_space, indent, format_vnet_buffer, &t->buffer);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001440
Calvin71e97c62016-08-19 16:23:14 -04001441 s = format (s, "\n%U", format_white_space, indent);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001442
1443 n = 0;
1444 if (stream)
1445 n = vlib_get_node (vm, stream->node_index);
1446
1447 if (n && n->format_buffer)
1448 s = format (s, "%U", n->format_buffer,
Calvin71e97c62016-08-19 16:23:14 -04001449 t->buffer.pre_data, sizeof (t->buffer.pre_data));
Ed Warnickecb9cada2015-12-08 15:45:58 -07001450 else
Calvin71e97c62016-08-19 16:23:14 -04001451 s = format (s, "%U",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001452 format_hex_bytes, t->buffer.pre_data,
1453 ARRAY_LEN (t->buffer.pre_data));
1454 return s;
1455}
1456
Benoît Ganne9a3973e2020-10-02 19:36:57 +02001457static int
Ed Warnickecb9cada2015-12-08 15:45:58 -07001458pg_input_trace (pg_main_t * pg,
Damjan Marion65cbcfe2019-02-20 15:34:00 +01001459 vlib_node_runtime_t * node, u32 stream_index, u32 next_index,
Benoît Ganne9a3973e2020-10-02 19:36:57 +02001460 u32 * buffers, const u32 n_buffers, const u32 n_trace)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001461{
Damjan Marion64034362016-11-07 22:19:55 +01001462 vlib_main_t *vm = vlib_get_main ();
Damjan Marion65cbcfe2019-02-20 15:34:00 +01001463 u32 *b, n_left;
Benoît Ganne9a3973e2020-10-02 19:36:57 +02001464 u32 n_trace0 = 0, n_trace1 = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001465
Benoît Ganne9a3973e2020-10-02 19:36:57 +02001466 n_left = clib_min (n_buffers, n_trace);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001467 b = buffers;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001468
1469 while (n_left >= 2)
1470 {
1471 u32 bi0, bi1;
Calvin71e97c62016-08-19 16:23:14 -04001472 vlib_buffer_t *b0, *b1;
1473 pg_input_trace_t *t0, *t1;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001474
1475 bi0 = b[0];
1476 bi1 = b[1];
1477 b += 2;
1478 n_left -= 2;
1479
1480 b0 = vlib_get_buffer (vm, bi0);
1481 b1 = vlib_get_buffer (vm, bi1);
1482
Benoît Ganne9a3973e2020-10-02 19:36:57 +02001483 n_trace0 +=
1484 vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1485 n_trace1 +=
1486 vlib_trace_buffer (vm, node, next_index, b1, /* follow_chain */ 1);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001487
1488 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1489 t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
1490
1491 t0->stream_index = stream_index;
1492 t1->stream_index = stream_index;
1493
1494 t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
1495 t1->packet_length = vlib_buffer_length_in_chain (vm, b1);
1496
Neale Ranns3466c302017-02-16 07:45:03 -08001497 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
1498 t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_RX];
1499
Dave Barach178cf492018-11-13 16:34:13 -05001500 clib_memcpy_fast (&t0->buffer, b0,
1501 sizeof (b0[0]) - sizeof (b0->pre_data));
1502 clib_memcpy_fast (&t1->buffer, b1,
1503 sizeof (b1[0]) - sizeof (b1->pre_data));
Ed Warnickecb9cada2015-12-08 15:45:58 -07001504
Dave Barach178cf492018-11-13 16:34:13 -05001505 clib_memcpy_fast (t0->buffer.pre_data, b0->data,
1506 sizeof (t0->buffer.pre_data));
1507 clib_memcpy_fast (t1->buffer.pre_data, b1->data,
1508 sizeof (t1->buffer.pre_data));
Ed Warnickecb9cada2015-12-08 15:45:58 -07001509 }
1510
1511 while (n_left >= 1)
1512 {
1513 u32 bi0;
Calvin71e97c62016-08-19 16:23:14 -04001514 vlib_buffer_t *b0;
1515 pg_input_trace_t *t0;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001516
1517 bi0 = b[0];
1518 b += 1;
1519 n_left -= 1;
1520
1521 b0 = vlib_get_buffer (vm, bi0);
1522
Benoît Ganne9a3973e2020-10-02 19:36:57 +02001523 n_trace0 +=
1524 vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001525 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1526
1527 t0->stream_index = stream_index;
1528 t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
Neale Ranns3466c302017-02-16 07:45:03 -08001529 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
Dave Barach178cf492018-11-13 16:34:13 -05001530 clib_memcpy_fast (&t0->buffer, b0,
1531 sizeof (b0[0]) - sizeof (b0->pre_data));
1532 clib_memcpy_fast (t0->buffer.pre_data, b0->data,
1533 sizeof (t0->buffer.pre_data));
Ed Warnickecb9cada2015-12-08 15:45:58 -07001534 }
Benoît Ganne9a3973e2020-10-02 19:36:57 +02001535
1536 return n_trace - n_trace0 - n_trace1;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001537}
1538
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001539static_always_inline void
Vladimir Isaev698eb872020-05-21 16:34:17 +03001540fill_buffer_offload_flags (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
1541 int gso_enabled, u32 gso_size)
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001542{
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001543 for (int i = 0; i < n_buffers; i++)
1544 {
1545 vlib_buffer_t *b0 = vlib_get_buffer (vm, buffers[i]);
1546 u8 l4_proto = 0;
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001547
Mohsin Kazmi72e73122019-10-22 13:33:13 +02001548 ethernet_header_t *eh =
1549 (ethernet_header_t *) vlib_buffer_get_current (b0);
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001550 u16 ethertype = clib_net_to_host_u16 (eh->type);
1551 u16 l2hdr_sz = sizeof (ethernet_header_t);
1552
Mohsin Kazmi14bea1b2019-07-29 11:39:26 +02001553 if (ethernet_frame_is_tagged (ethertype))
1554 {
1555 ethernet_vlan_header_t *vlan = (ethernet_vlan_header_t *) (eh + 1);
1556
1557 ethertype = clib_net_to_host_u16 (vlan->type);
1558 l2hdr_sz += sizeof (*vlan);
1559 if (ethertype == ETHERNET_TYPE_VLAN)
1560 {
1561 vlan++;
1562 ethertype = clib_net_to_host_u16 (vlan->type);
1563 l2hdr_sz += sizeof (*vlan);
1564 }
1565 }
1566
Mohsin Kazmi157a4ab2019-12-06 15:47:48 +01001567 vnet_buffer (b0)->l2_hdr_offset = 0;
1568 vnet_buffer (b0)->l3_hdr_offset = l2hdr_sz;
1569
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001570 if (PREDICT_TRUE (ethertype == ETHERNET_TYPE_IP4))
1571 {
Mohsin Kazmi72e73122019-10-22 13:33:13 +02001572 ip4_header_t *ip4 =
1573 (ip4_header_t *) (vlib_buffer_get_current (b0) + l2hdr_sz);
Mohsin Kazmi157a4ab2019-12-06 15:47:48 +01001574 vnet_buffer (b0)->l4_hdr_offset = l2hdr_sz + ip4_header_bytes (ip4);
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001575 l4_proto = ip4->protocol;
1576 b0->flags |=
Mohsin Kazmi72e73122019-10-22 13:33:13 +02001577 (VNET_BUFFER_F_IS_IP4 | VNET_BUFFER_F_OFFLOAD_IP_CKSUM);
Mohsin Kazmi157a4ab2019-12-06 15:47:48 +01001578 b0->flags |= (VNET_BUFFER_F_L2_HDR_OFFSET_VALID
1579 | VNET_BUFFER_F_L3_HDR_OFFSET_VALID |
1580 VNET_BUFFER_F_L4_HDR_OFFSET_VALID);
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001581 }
1582 else if (PREDICT_TRUE (ethertype == ETHERNET_TYPE_IP6))
1583 {
Mohsin Kazmi72e73122019-10-22 13:33:13 +02001584 ip6_header_t *ip6 =
1585 (ip6_header_t *) (vlib_buffer_get_current (b0) + l2hdr_sz);
Mohsin Kazmi157a4ab2019-12-06 15:47:48 +01001586 vnet_buffer (b0)->l4_hdr_offset = l2hdr_sz + sizeof (ip6_header_t);
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001587 /* FIXME IPv6 EH traversal */
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001588 l4_proto = ip6->protocol;
Mohsin Kazmi157a4ab2019-12-06 15:47:48 +01001589 b0->flags |=
1590 (VNET_BUFFER_F_IS_IP6 | VNET_BUFFER_F_L2_HDR_OFFSET_VALID |
1591 VNET_BUFFER_F_L3_HDR_OFFSET_VALID |
1592 VNET_BUFFER_F_L4_HDR_OFFSET_VALID);
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001593 }
Vladimir Isaev698eb872020-05-21 16:34:17 +03001594
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001595 if (l4_proto == IP_PROTOCOL_TCP)
1596 {
Vladimir Isaev698eb872020-05-21 16:34:17 +03001597 b0->flags |= VNET_BUFFER_F_OFFLOAD_TCP_CKSUM;
Mohsin Kazmi0cf52822020-06-10 12:35:05 +02001598
1599 /* only set GSO flag for chained buffers */
1600 if (gso_enabled && (b0->flags & VLIB_BUFFER_NEXT_PRESENT))
Vladimir Isaev698eb872020-05-21 16:34:17 +03001601 {
1602 b0->flags |= VNET_BUFFER_F_GSO;
Mohsin Kazmi0cf52822020-06-10 12:35:05 +02001603 tcp_header_t *tcp =
1604 (tcp_header_t *) (vlib_buffer_get_current (b0) +
1605 vnet_buffer (b0)->l4_hdr_offset);
1606 vnet_buffer2 (b0)->gso_l4_hdr_sz = tcp_header_bytes (tcp);
Vladimir Isaev698eb872020-05-21 16:34:17 +03001607 vnet_buffer2 (b0)->gso_size = gso_size;
1608 }
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001609 }
1610 else if (l4_proto == IP_PROTOCOL_UDP)
1611 {
1612 b0->flags |= VNET_BUFFER_F_OFFLOAD_UDP_CKSUM;
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001613 }
1614 }
1615}
1616
Ed Warnickecb9cada2015-12-08 15:45:58 -07001617static uword
1618pg_generate_packets (vlib_node_runtime_t * node,
1619 pg_main_t * pg,
Calvin71e97c62016-08-19 16:23:14 -04001620 pg_stream_t * s, uword n_packets_to_generate)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001621{
Damjan Marion64034362016-11-07 22:19:55 +01001622 vlib_main_t *vm = vlib_get_main ();
Calvin71e97c62016-08-19 16:23:14 -04001623 u32 *to_next, n_this_frame, n_left, n_trace, n_packets_in_fifo;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001624 uword n_packets_generated;
Calvin71e97c62016-08-19 16:23:14 -04001625 pg_buffer_index_t *bi, *bi0;
Damjan Mariond2017f62016-11-07 12:24:50 +01001626 u32 next_index = s->next_index;
1627 vnet_feature_main_t *fm = &feature_main;
1628 vnet_feature_config_main_t *cm;
1629 u8 feature_arc_index = fm->device_input_feature_arc_index;
1630 cm = &fm->feature_config_mains[feature_arc_index];
1631 u32 current_config_index = ~(u32) 0;
Neale Ranns0e121312020-10-08 14:02:35 +00001632 pg_interface_t *pi;
Damjan Mariond2017f62016-11-07 12:24:50 +01001633 int i;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001634
Neale Ranns0e121312020-10-08 14:02:35 +00001635 pi = pool_elt_at_index (pg->interfaces,
1636 pg->if_id_by_sw_if_index[s->sw_if_index[VLIB_RX]]);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001637 bi0 = s->buffer_indices;
1638
1639 n_packets_in_fifo = pg_stream_fill (pg, s, n_packets_to_generate);
1640 n_packets_to_generate = clib_min (n_packets_in_fifo, n_packets_to_generate);
1641 n_packets_generated = 0;
1642
Damjan Mariond2017f62016-11-07 12:24:50 +01001643 if (PREDICT_FALSE
1644 (vnet_have_features (feature_arc_index, s->sw_if_index[VLIB_RX])))
1645 {
1646 current_config_index =
1647 vec_elt (cm->config_index_by_sw_if_index, s->sw_if_index[VLIB_RX]);
1648 vnet_get_config_data (&cm->config_main, &current_config_index,
1649 &next_index, 0);
1650 }
1651
Mohsin Kazmif382b062020-08-11 15:00:44 +02001652 if (PREDICT_FALSE (pi->coalesce_enabled))
1653 vnet_gro_flow_table_schedule_node_on_dispatcher (vm, pi->flow_table);
1654
Ed Warnickecb9cada2015-12-08 15:45:58 -07001655 while (n_packets_to_generate > 0)
1656 {
Calvin71e97c62016-08-19 16:23:14 -04001657 u32 *head, *start, *end;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001658
Damjan Marion650223c2018-11-14 16:55:53 +01001659 if (PREDICT_TRUE (next_index == VNET_DEVICE_INPUT_NEXT_ETHERNET_INPUT))
1660 {
1661 vlib_next_frame_t *nf;
1662 vlib_frame_t *f;
1663 ethernet_input_frame_t *ef;
Damjan Marion650223c2018-11-14 16:55:53 +01001664 vlib_get_new_next_frame (vm, node, next_index, to_next, n_left);
1665 nf = vlib_node_runtime_get_next_frame (vm, node, next_index);
Andreas Schultz58b2eb12019-07-15 15:40:56 +02001666 f = vlib_get_frame (vm, nf->frame);
Damjan Marion650223c2018-11-14 16:55:53 +01001667 f->flags = ETH_INPUT_FRAME_F_SINGLE_SW_IF_IDX;
1668
1669 ef = vlib_frame_scalar_args (f);
Damjan Marion650223c2018-11-14 16:55:53 +01001670 ef->sw_if_index = pi->sw_if_index;
1671 ef->hw_if_index = pi->hw_if_index;
Damjan Marion296988d2019-02-21 20:24:54 +01001672 vlib_frame_no_append (f);
Damjan Marion650223c2018-11-14 16:55:53 +01001673 }
1674 else
1675 vlib_get_next_frame (vm, node, next_index, to_next, n_left);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001676
1677 n_this_frame = n_packets_to_generate;
1678 if (n_this_frame > n_left)
1679 n_this_frame = n_left;
1680
1681 start = bi0->buffer_fifo;
1682 end = clib_fifo_end (bi0->buffer_fifo);
1683 head = clib_fifo_head (bi0->buffer_fifo);
1684
1685 if (head + n_this_frame <= end)
Damjan Marion64d557c2019-01-18 20:03:41 +01001686 vlib_buffer_copy_indices (to_next, head, n_this_frame);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001687 else
1688 {
1689 u32 n = end - head;
Damjan Marion64d557c2019-01-18 20:03:41 +01001690 vlib_buffer_copy_indices (to_next + 0, head, n);
1691 vlib_buffer_copy_indices (to_next + n, start, n_this_frame - n);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001692 }
1693
Dave Barach3c8e1462019-01-05 16:51:41 -05001694 if (s->replay_packet_templates == 0)
1695 {
1696 vec_foreach (bi, s->buffer_indices)
1697 clib_fifo_advance_head (bi->buffer_fifo, n_this_frame);
1698 }
1699 else
1700 {
1701 clib_fifo_advance_head (bi0->buffer_fifo, n_this_frame);
1702 }
Ed Warnickecb9cada2015-12-08 15:45:58 -07001703
Damjan Mariond2017f62016-11-07 12:24:50 +01001704 if (current_config_index != ~(u32) 0)
1705 for (i = 0; i < n_this_frame; i++)
1706 {
1707 vlib_buffer_t *b;
1708 b = vlib_get_buffer (vm, to_next[i]);
Damjan Mariond2017f62016-11-07 12:24:50 +01001709 b->current_config_index = current_config_index;
Damjan Marionaa682a32018-04-26 22:45:40 +02001710 vnet_buffer (b)->feature_arc_index = feature_arc_index;
Damjan Mariond2017f62016-11-07 12:24:50 +01001711 }
1712
Vladimir Isaev698eb872020-05-21 16:34:17 +03001713 if (pi->gso_enabled ||
1714 (s->buffer_flags & (VNET_BUFFER_F_OFFLOAD_TCP_CKSUM |
1715 VNET_BUFFER_F_OFFLOAD_UDP_CKSUM |
1716 VNET_BUFFER_F_OFFLOAD_IP_CKSUM)))
1717 {
1718 fill_buffer_offload_flags (vm, to_next, n_this_frame,
1719 pi->gso_enabled, pi->gso_size);
1720 }
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001721
Ed Warnickecb9cada2015-12-08 15:45:58 -07001722 n_trace = vlib_get_trace_count (vm, node);
Benoît Ganne9a3973e2020-10-02 19:36:57 +02001723 if (PREDICT_FALSE (n_trace > 0))
Ed Warnickecb9cada2015-12-08 15:45:58 -07001724 {
Benoît Ganne9a3973e2020-10-02 19:36:57 +02001725 n_trace =
1726 pg_input_trace (pg, node, s - pg->streams, next_index, to_next,
1727 n_this_frame, n_trace);
1728 vlib_set_trace_count (vm, node, n_trace);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001729 }
1730 n_packets_to_generate -= n_this_frame;
1731 n_packets_generated += n_this_frame;
1732 n_left -= n_this_frame;
Dave Barach3c8e1462019-01-05 16:51:41 -05001733 if (CLIB_DEBUG > 0)
1734 {
1735 int i;
1736 vlib_buffer_t *b;
1737
Damjan Marion2768cdc2019-02-20 14:11:51 +01001738 for (i = 0; i < n_this_frame; i++)
Dave Barach3c8e1462019-01-05 16:51:41 -05001739 {
1740 b = vlib_get_buffer (vm, to_next[i]);
1741 ASSERT ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0 ||
1742 b->current_length >= VLIB_BUFFER_MIN_CHAIN_SEG_SIZE);
1743 }
1744 }
Damjan Mariond2017f62016-11-07 12:24:50 +01001745 vlib_put_next_frame (vm, node, next_index, n_left);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001746 }
1747
1748 return n_packets_generated;
1749}
1750
1751static uword
Calvin71e97c62016-08-19 16:23:14 -04001752pg_input_stream (vlib_node_runtime_t * node, pg_main_t * pg, pg_stream_t * s)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001753{
Damjan Marion64034362016-11-07 22:19:55 +01001754 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -07001755 uword n_packets;
1756 f64 time_now, dt;
1757
Calvin71e97c62016-08-19 16:23:14 -04001758 if (s->n_packets_limit > 0 && s->n_packets_generated >= s->n_packets_limit)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001759 {
1760 pg_stream_enable_disable (pg, s, /* want_enabled */ 0);
1761 return 0;
1762 }
1763
1764 /* Apply rate limit. */
1765 time_now = vlib_time_now (vm);
1766 if (s->time_last_generate == 0)
1767 s->time_last_generate = time_now;
1768
1769 dt = time_now - s->time_last_generate;
1770 s->time_last_generate = time_now;
1771
1772 n_packets = VLIB_FRAME_SIZE;
1773 if (s->rate_packets_per_second > 0)
1774 {
1775 s->packet_accumulator += dt * s->rate_packets_per_second;
1776 n_packets = s->packet_accumulator;
1777
1778 /* Never allow accumulator to grow if we get behind. */
1779 s->packet_accumulator -= n_packets;
1780 }
1781
1782 /* Apply fixed limit. */
1783 if (s->n_packets_limit > 0
1784 && s->n_packets_generated + n_packets > s->n_packets_limit)
1785 n_packets = s->n_packets_limit - s->n_packets_generated;
1786
1787 /* Generate up to one frame's worth of packets. */
Christian E. Hopps87d7bac2019-09-27 12:59:30 -04001788 if (n_packets > s->n_max_frame)
1789 n_packets = s->n_max_frame;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001790
1791 if (n_packets > 0)
1792 n_packets = pg_generate_packets (node, pg, s, n_packets);
1793
1794 s->n_packets_generated += n_packets;
1795
1796 return n_packets;
1797}
1798
1799uword
Calvin71e97c62016-08-19 16:23:14 -04001800pg_input (vlib_main_t * vm, vlib_node_runtime_t * node, vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001801{
1802 uword i;
Calvin71e97c62016-08-19 16:23:14 -04001803 pg_main_t *pg = &pg_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001804 uword n_packets = 0;
Damjan Marion3a4ed392016-11-08 13:20:42 +01001805 u32 worker_index = 0;
1806
1807 if (vlib_num_workers ())
1808 worker_index = vlib_get_current_worker_index ();
Ed Warnickecb9cada2015-12-08 15:45:58 -07001809
Calvin71e97c62016-08-19 16:23:14 -04001810 /* *INDENT-OFF* */
Damjan Marionf0ca1e82020-12-13 23:26:56 +01001811 clib_bitmap_foreach (i, pg->enabled_streams[worker_index]) {
Damjan Marion64034362016-11-07 22:19:55 +01001812 pg_stream_t *s = vec_elt_at_index (pg->streams, i);
Damjan Marion3a4ed392016-11-08 13:20:42 +01001813 n_packets += pg_input_stream (node, pg, s);
Damjan Marionf0ca1e82020-12-13 23:26:56 +01001814 }
Calvin71e97c62016-08-19 16:23:14 -04001815 /* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -07001816
1817 return n_packets;
1818}
1819
Calvin71e97c62016-08-19 16:23:14 -04001820/* *INDENT-OFF* */
Ed Warnickecb9cada2015-12-08 15:45:58 -07001821VLIB_REGISTER_NODE (pg_input_node) = {
1822 .function = pg_input,
Damjan Marion7ca5aaa2019-09-24 18:10:49 +02001823 .flags = VLIB_NODE_FLAG_TRACE_SUPPORTED,
Ed Warnickecb9cada2015-12-08 15:45:58 -07001824 .name = "pg-input",
Damjan Marion51327ac2016-11-09 11:59:42 +01001825 .sibling_of = "device-input",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001826 .type = VLIB_NODE_TYPE_INPUT,
1827
1828 .format_trace = format_pg_input_trace,
1829
1830 /* Input node will be left disabled until a stream is active. */
1831 .state = VLIB_NODE_STATE_DISABLED,
1832};
Calvin71e97c62016-08-19 16:23:14 -04001833/* *INDENT-ON* */
1834
Neale Ranns21fb4f72020-10-05 12:26:47 +00001835VLIB_NODE_FN (pg_input_mac_filter) (vlib_main_t * vm,
1836 vlib_node_runtime_t * node,
1837 vlib_frame_t * frame)
1838{
1839 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
1840 u16 nexts[VLIB_FRAME_SIZE], *next;
1841 pg_main_t *pg = &pg_main;
1842 u32 n_left, *from;
1843
1844 from = vlib_frame_vector_args (frame);
1845 n_left = frame->n_vectors;
1846 next = nexts;
1847
1848 clib_memset_u16 (next, 0, VLIB_FRAME_SIZE);
1849
1850 vlib_get_buffers (vm, from, bufs, n_left);
1851
1852 while (n_left)
1853 {
1854 const ethernet_header_t *eth;
1855 pg_interface_t *pi;
1856 mac_address_t in;
1857
1858 pi = pool_elt_at_index
1859 (pg->interfaces,
1860 pg->if_id_by_sw_if_index[vnet_buffer (b[0])->sw_if_index[VLIB_RX]]);
1861 eth = vlib_buffer_get_current (b[0]);
1862
1863 mac_address_from_bytes (&in, eth->dst_address);
1864
1865 if (PREDICT_FALSE (ethernet_address_cast (in.bytes)))
1866 {
1867 mac_address_t *allowed;
1868
1869 if (0 != vec_len (pi->allowed_mcast_macs))
1870 {
1871 vec_foreach (allowed, pi->allowed_mcast_macs)
1872 {
1873 if (0 != mac_address_cmp (allowed, &in))
1874 break;
1875 }
1876
1877 if (vec_is_member (allowed, pi->allowed_mcast_macs))
1878 vnet_feature_next_u16 (&next[0], b[0]);
1879 }
1880 }
1881
1882 b += 1;
1883 next += 1;
1884 n_left -= 1;
1885 }
1886
1887 vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
1888
1889 return (frame->n_vectors);
1890}
1891
1892/* *INDENT-OFF* */
1893VLIB_REGISTER_NODE (pg_input_mac_filter) = {
1894 .name = "pg-input-mac-filter",
1895 .vector_size = sizeof (u32),
1896 .format_trace = format_pg_input_trace,
1897 .n_next_nodes = 1,
1898 .next_nodes = {
1899 [0] = "error-drop",
1900 },
1901};
1902VNET_FEATURE_INIT (pg_input_mac_filter_feat, static) = {
1903 .arc_name = "device-input",
1904 .node_name = "pg-input-mac-filter",
1905};
1906/* *INDENT-ON* */
1907
1908static clib_error_t *
1909pg_input_mac_filter_cfg (vlib_main_t * vm,
1910 unformat_input_t * input, vlib_cli_command_t * cmd)
1911{
1912 unformat_input_t _line_input, *line_input = &_line_input;
1913 u32 sw_if_index = ~0;
Andrew Yourtchenkoa5180ba2020-11-11 11:22:40 +00001914 int is_enable = 1;
Neale Ranns21fb4f72020-10-05 12:26:47 +00001915
1916 if (!unformat_user (input, unformat_line_input, line_input))
1917 return 0;
1918
1919 while (unformat_check_input (line_input) != UNFORMAT_END_OF_INPUT)
1920 {
1921 if (unformat (line_input, "%U",
1922 unformat_vnet_sw_interface,
1923 vnet_get_main (), &sw_if_index))
1924 ;
1925 else if (unformat (line_input, "%U",
1926 unformat_vlib_enable_disable, &is_enable))
1927 ;
1928 else
1929 return clib_error_create ("unknown input `%U'",
1930 format_unformat_error, line_input);
1931 }
1932 unformat_free (line_input);
1933
1934 if (~0 == sw_if_index)
1935 return clib_error_create ("specify interface");
1936
1937 vnet_feature_enable_disable ("device-input",
1938 "pg-input-mac-filter",
1939 sw_if_index, is_enable, 0, 0);
1940
1941 return NULL;
1942}
1943
1944/* *INDENT-OFF* */
1945VLIB_CLI_COMMAND (enable_streams_cli, static) = {
1946 .path = "packet-generator mac-filter",
1947 .short_help = "packet-generator mac-filter <INTERFACE> <on|off>",
1948 .function = pg_input_mac_filter_cfg,
1949};
1950/* *INDENT-ON* */
1951
1952
Calvin71e97c62016-08-19 16:23:14 -04001953/*
1954 * fd.io coding-style-patch-verification: ON
1955 *
1956 * Local Variables:
1957 * eval: (c-set-style "gnu")
1958 * End:
1959 */