blob: 2d850e939b0edc17cd6d84c9d9e0ba778e0e39ae [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15/*
16 * pg_input.c: buffer generator input
17 *
18 * Copyright (c) 2008 Eliot Dresselhaus
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
27 *
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
30 *
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 */
39
Dave Barach3c8e1462019-01-05 16:51:41 -050040 /*
41 * To be honest, the packet generator needs an extreme
42 * makeover. Two key assumptions which drove the current implementation
43 * are no longer true. First, buffer managers implement a
44 * post-TX recycle list. Second, that packet generator performance
45 * is first-order important.
46 */
47
Ed Warnickecb9cada2015-12-08 15:45:58 -070048#include <vlib/vlib.h>
49#include <vnet/pg/pg.h>
50#include <vnet/vnet.h>
Damjan Marion650223c2018-11-14 16:55:53 +010051#include <vnet/ethernet/ethernet.h>
Damjan Mariond2017f62016-11-07 12:24:50 +010052#include <vnet/feature/feature.h>
53#include <vnet/devices/devices.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070054
Ed Warnickecb9cada2015-12-08 15:45:58 -070055static int
56validate_buffer_data2 (vlib_buffer_t * b, pg_stream_t * s,
57 u32 data_offset, u32 n_bytes)
58{
Calvin71e97c62016-08-19 16:23:14 -040059 u8 *bd, *pd, *pm;
Ed Warnickecb9cada2015-12-08 15:45:58 -070060 u32 i;
61
62 bd = b->data;
63 pd = s->fixed_packet_data + data_offset;
64 pm = s->fixed_packet_data_mask + data_offset;
65
66 if (pd + n_bytes >= vec_end (s->fixed_packet_data))
67 n_bytes = (pd < vec_end (s->fixed_packet_data)
Calvin71e97c62016-08-19 16:23:14 -040068 ? vec_end (s->fixed_packet_data) - pd : 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -070069
70 for (i = 0; i < n_bytes; i++)
71 if ((bd[i] & pm[i]) != pd[i])
72 break;
73
74 if (i >= n_bytes)
75 return 1;
76
Damjan Marionbd846cd2017-11-21 13:12:41 +010077 clib_warning ("buffer %U", format_vnet_buffer, b);
Ed Warnickecb9cada2015-12-08 15:45:58 -070078 clib_warning ("differ at index %d", i);
79 clib_warning ("is %U", format_hex_bytes, bd, n_bytes);
80 clib_warning ("mask %U", format_hex_bytes, pm, n_bytes);
81 clib_warning ("expect %U", format_hex_bytes, pd, n_bytes);
82 return 0;
83}
84
85static int
86validate_buffer_data (vlib_buffer_t * b, pg_stream_t * s)
Calvin71e97c62016-08-19 16:23:14 -040087{
88 return validate_buffer_data2 (b, s, 0, s->buffer_bytes);
89}
Ed Warnickecb9cada2015-12-08 15:45:58 -070090
91always_inline void
Calvin71e97c62016-08-19 16:23:14 -040092set_1 (void *a0,
93 u64 v0, u64 v_min, u64 v_max, u32 n_bits, u32 is_net_byte_order)
Ed Warnickecb9cada2015-12-08 15:45:58 -070094{
95 ASSERT (v0 >= v_min && v0 <= v_max);
96 if (n_bits == BITS (u8))
97 {
98 ((u8 *) a0)[0] = v0;
99 }
100 else if (n_bits == BITS (u16))
101 {
102 if (is_net_byte_order)
103 v0 = clib_host_to_net_u16 (v0);
104 clib_mem_unaligned (a0, u16) = v0;
105 }
106 else if (n_bits == BITS (u32))
107 {
108 if (is_net_byte_order)
109 v0 = clib_host_to_net_u32 (v0);
110 clib_mem_unaligned (a0, u32) = v0;
111 }
112 else if (n_bits == BITS (u64))
113 {
114 if (is_net_byte_order)
115 v0 = clib_host_to_net_u64 (v0);
116 clib_mem_unaligned (a0, u64) = v0;
117 }
118}
119
120always_inline void
Calvin71e97c62016-08-19 16:23:14 -0400121set_2 (void *a0, void *a1,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700122 u64 v0, u64 v1,
123 u64 v_min, u64 v_max,
Calvin71e97c62016-08-19 16:23:14 -0400124 u32 n_bits, u32 is_net_byte_order, u32 is_increment)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700125{
126 ASSERT (v0 >= v_min && v0 <= v_max);
127 ASSERT (v1 >= v_min && v1 <= (v_max + is_increment));
128 if (n_bits == BITS (u8))
129 {
130 ((u8 *) a0)[0] = v0;
131 ((u8 *) a1)[0] = v1;
132 }
133 else if (n_bits == BITS (u16))
134 {
135 if (is_net_byte_order)
136 {
137 v0 = clib_host_to_net_u16 (v0);
138 v1 = clib_host_to_net_u16 (v1);
139 }
140 clib_mem_unaligned (a0, u16) = v0;
141 clib_mem_unaligned (a1, u16) = v1;
142 }
143 else if (n_bits == BITS (u32))
144 {
145 if (is_net_byte_order)
146 {
147 v0 = clib_host_to_net_u32 (v0);
148 v1 = clib_host_to_net_u32 (v1);
149 }
150 clib_mem_unaligned (a0, u32) = v0;
151 clib_mem_unaligned (a1, u32) = v1;
152 }
153 else if (n_bits == BITS (u64))
154 {
155 if (is_net_byte_order)
156 {
157 v0 = clib_host_to_net_u64 (v0);
158 v1 = clib_host_to_net_u64 (v1);
159 }
160 clib_mem_unaligned (a0, u64) = v0;
161 clib_mem_unaligned (a1, u64) = v1;
162 }
163}
164
165static_always_inline void
166do_set_fixed (pg_main_t * pg,
167 pg_stream_t * s,
168 u32 * buffers,
169 u32 n_buffers,
170 u32 n_bits,
Calvin71e97c62016-08-19 16:23:14 -0400171 u32 byte_offset, u32 is_net_byte_order, u64 v_min, u64 v_max)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700172{
Damjan Marion64034362016-11-07 22:19:55 +0100173 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700174
175 while (n_buffers >= 4)
176 {
Calvin71e97c62016-08-19 16:23:14 -0400177 vlib_buffer_t *b0, *b1, *b2, *b3;
178 void *a0, *a1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700179
180 b0 = vlib_get_buffer (vm, buffers[0]);
181 b1 = vlib_get_buffer (vm, buffers[1]);
182 b2 = vlib_get_buffer (vm, buffers[2]);
183 b3 = vlib_get_buffer (vm, buffers[3]);
184 buffers += 2;
185 n_buffers -= 2;
186
187 a0 = (void *) b0 + byte_offset;
188 a1 = (void *) b1 + byte_offset;
189 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
190 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
191
Calvin71e97c62016-08-19 16:23:14 -0400192 set_2 (a0, a1, v_min, v_min, v_min, v_max, n_bits, is_net_byte_order,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700193 /* is_increment */ 0);
194
195 ASSERT (validate_buffer_data (b0, s));
196 ASSERT (validate_buffer_data (b1, s));
197 }
198
199 while (n_buffers > 0)
200 {
Calvin71e97c62016-08-19 16:23:14 -0400201 vlib_buffer_t *b0;
202 void *a0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700203
204 b0 = vlib_get_buffer (vm, buffers[0]);
205 buffers += 1;
206 n_buffers -= 1;
207
208 a0 = (void *) b0 + byte_offset;
209
Calvin71e97c62016-08-19 16:23:14 -0400210 set_1 (a0, v_min, v_min, v_max, n_bits, is_net_byte_order);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700211
212 ASSERT (validate_buffer_data (b0, s));
213 }
214}
215
216static_always_inline u64
217do_set_increment (pg_main_t * pg,
218 pg_stream_t * s,
219 u32 * buffers,
220 u32 n_buffers,
221 u32 n_bits,
222 u32 byte_offset,
223 u32 is_net_byte_order,
Calvin71e97c62016-08-19 16:23:14 -0400224 u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max, u64 v)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700225{
Damjan Marion64034362016-11-07 22:19:55 +0100226 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700227 u64 sum = 0;
228
229 ASSERT (v >= v_min && v <= v_max);
230
231 while (n_buffers >= 4)
232 {
Calvin71e97c62016-08-19 16:23:14 -0400233 vlib_buffer_t *b0, *b1, *b2, *b3;
234 void *a0, *a1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700235 u64 v_old;
236
237 b0 = vlib_get_buffer (vm, buffers[0]);
238 b1 = vlib_get_buffer (vm, buffers[1]);
239 b2 = vlib_get_buffer (vm, buffers[2]);
240 b3 = vlib_get_buffer (vm, buffers[3]);
241 buffers += 2;
242 n_buffers -= 2;
243
244 a0 = (void *) b0 + byte_offset;
245 a1 = (void *) b1 + byte_offset;
246 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
247 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
248
249 v_old = v;
250 v = v_old + 2;
251 v = v > v_max ? v_min : v;
252 set_2 (a0, a1,
Calvin71e97c62016-08-19 16:23:14 -0400253 v_old + 0, v_old + 1, v_min, v_max, n_bits, is_net_byte_order,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700254 /* is_increment */ 1);
255
256 if (want_sum)
Calvin71e97c62016-08-19 16:23:14 -0400257 sum += 2 * v_old + 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700258
259 if (PREDICT_FALSE (v_old + 1 > v_max))
260 {
261 if (want_sum)
Calvin71e97c62016-08-19 16:23:14 -0400262 sum -= 2 * v_old + 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700263
264 v = v_old;
265 set_1 (a0, v + 0, v_min, v_max, n_bits, is_net_byte_order);
266 if (want_sum)
267 sum += v;
268 v += 1;
269
270 v = v > v_max ? v_min : v;
271 set_1 (a1, v + 0, v_min, v_max, n_bits, is_net_byte_order);
272 if (want_sum)
273 sum += v;
274 v += 1;
275 }
276
277 ASSERT (validate_buffer_data (b0, s));
278 ASSERT (validate_buffer_data (b1, s));
279 }
280
281 while (n_buffers > 0)
282 {
Calvin71e97c62016-08-19 16:23:14 -0400283 vlib_buffer_t *b0;
284 void *a0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700285 u64 v_old;
286
287 b0 = vlib_get_buffer (vm, buffers[0]);
288 buffers += 1;
289 n_buffers -= 1;
290
291 a0 = (void *) b0 + byte_offset;
292
293 v_old = v;
294 if (want_sum)
295 sum += v_old;
296 v += 1;
297 v = v > v_max ? v_min : v;
298
299 ASSERT (v_old >= v_min && v_old <= v_max);
300 set_1 (a0, v_old, v_min, v_max, n_bits, is_net_byte_order);
301
302 ASSERT (validate_buffer_data (b0, s));
303 }
304
305 if (want_sum)
306 *sum_result = sum;
307
308 return v;
309}
310
311static_always_inline void
312do_set_random (pg_main_t * pg,
313 pg_stream_t * s,
314 u32 * buffers,
315 u32 n_buffers,
316 u32 n_bits,
317 u32 byte_offset,
318 u32 is_net_byte_order,
Calvin71e97c62016-08-19 16:23:14 -0400319 u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700320{
Damjan Marion64034362016-11-07 22:19:55 +0100321 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700322 u64 v_diff = v_max - v_min + 1;
323 u64 r_mask = max_pow2 (v_diff) - 1;
324 u64 v0, v1;
325 u64 sum = 0;
Calvin71e97c62016-08-19 16:23:14 -0400326 void *random_data;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700327
328 random_data = clib_random_buffer_get_data
329 (&vm->random_buffer, n_buffers * n_bits / BITS (u8));
330
331 v0 = v1 = v_min;
332
333 while (n_buffers >= 4)
334 {
Calvin71e97c62016-08-19 16:23:14 -0400335 vlib_buffer_t *b0, *b1, *b2, *b3;
336 void *a0, *a1;
337 u64 r0 = 0, r1 = 0; /* warnings be gone */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700338
339 b0 = vlib_get_buffer (vm, buffers[0]);
340 b1 = vlib_get_buffer (vm, buffers[1]);
341 b2 = vlib_get_buffer (vm, buffers[2]);
342 b3 = vlib_get_buffer (vm, buffers[3]);
343 buffers += 2;
344 n_buffers -= 2;
345
346 a0 = (void *) b0 + byte_offset;
347 a1 = (void *) b1 + byte_offset;
348 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
349 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
350
351 switch (n_bits)
352 {
353#define _(n) \
354 case BITS (u##n): \
355 { \
356 u##n * r = random_data; \
357 r0 = r[0]; \
358 r1 = r[1]; \
359 random_data = r + 2; \
360 } \
361 break;
362
Calvin71e97c62016-08-19 16:23:14 -0400363 _(8);
364 _(16);
365 _(32);
366 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700367
368#undef _
369 }
370
371 /* Add power of 2 sized random number which may be out of range. */
372 v0 += r0 & r_mask;
373 v1 += r1 & r_mask;
374
375 /* Twice should be enough to reduce to v_min .. v_max range. */
376 v0 = v0 > v_max ? v0 - v_diff : v0;
377 v1 = v1 > v_max ? v1 - v_diff : v1;
378 v0 = v0 > v_max ? v0 - v_diff : v0;
379 v1 = v1 > v_max ? v1 - v_diff : v1;
380
381 if (want_sum)
382 sum += v0 + v1;
383
Calvin71e97c62016-08-19 16:23:14 -0400384 set_2 (a0, a1, v0, v1, v_min, v_max, n_bits, is_net_byte_order,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700385 /* is_increment */ 0);
386
387 ASSERT (validate_buffer_data (b0, s));
388 ASSERT (validate_buffer_data (b1, s));
389 }
390
391 while (n_buffers > 0)
392 {
Calvin71e97c62016-08-19 16:23:14 -0400393 vlib_buffer_t *b0;
394 void *a0;
395 u64 r0 = 0; /* warnings be gone */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700396
397 b0 = vlib_get_buffer (vm, buffers[0]);
398 buffers += 1;
399 n_buffers -= 1;
400
401 a0 = (void *) b0 + byte_offset;
402
403 switch (n_bits)
404 {
405#define _(n) \
406 case BITS (u##n): \
407 { \
408 u##n * r = random_data; \
409 r0 = r[0]; \
410 random_data = r + 1; \
411 } \
412 break;
413
Calvin71e97c62016-08-19 16:23:14 -0400414 _(8);
415 _(16);
416 _(32);
417 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700418
419#undef _
420 }
421
422 /* Add power of 2 sized random number which may be out of range. */
423 v0 += r0 & r_mask;
424
425 /* Twice should be enough to reduce to v_min .. v_max range. */
426 v0 = v0 > v_max ? v0 - v_diff : v0;
427 v0 = v0 > v_max ? v0 - v_diff : v0;
428
429 if (want_sum)
430 sum += v0;
431
432 set_1 (a0, v0, v_min, v_max, n_bits, is_net_byte_order);
433
434 ASSERT (validate_buffer_data (b0, s));
435 }
436
437 if (want_sum)
438 *sum_result = sum;
439}
440
441#define _(i,t) \
442 clib_mem_unaligned (a##i, t) = \
443 clib_host_to_net_##t ((clib_net_to_host_mem_##t (a##i) &~ mask) \
444 | (v##i << shift))
Calvin71e97c62016-08-19 16:23:14 -0400445
Ed Warnickecb9cada2015-12-08 15:45:58 -0700446always_inline void
Calvin71e97c62016-08-19 16:23:14 -0400447setbits_1 (void *a0,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700448 u64 v0,
449 u64 v_min, u64 v_max,
Calvin71e97c62016-08-19 16:23:14 -0400450 u32 max_bits, u32 n_bits, u64 mask, u32 shift)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700451{
452 ASSERT (v0 >= v_min && v0 <= v_max);
453 if (max_bits == BITS (u8))
Calvin71e97c62016-08-19 16:23:14 -0400454 ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700455
456 else if (max_bits == BITS (u16))
457 {
Calvin71e97c62016-08-19 16:23:14 -0400458 _(0, u16);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700459 }
460 else if (max_bits == BITS (u32))
461 {
Calvin71e97c62016-08-19 16:23:14 -0400462 _(0, u32);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700463 }
464 else if (max_bits == BITS (u64))
465 {
Calvin71e97c62016-08-19 16:23:14 -0400466 _(0, u64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700467 }
468}
469
470always_inline void
Calvin71e97c62016-08-19 16:23:14 -0400471setbits_2 (void *a0, void *a1,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700472 u64 v0, u64 v1,
473 u64 v_min, u64 v_max,
Calvin71e97c62016-08-19 16:23:14 -0400474 u32 max_bits, u32 n_bits, u64 mask, u32 shift, u32 is_increment)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700475{
476 ASSERT (v0 >= v_min && v0 <= v_max);
477 ASSERT (v1 >= v_min && v1 <= v_max + is_increment);
478 if (max_bits == BITS (u8))
479 {
Calvin71e97c62016-08-19 16:23:14 -0400480 ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
481 ((u8 *) a1)[0] = (((u8 *) a1)[0] & ~mask) | (v1 << shift);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700482 }
483
484 else if (max_bits == BITS (u16))
485 {
Calvin71e97c62016-08-19 16:23:14 -0400486 _(0, u16);
487 _(1, u16);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700488 }
489 else if (max_bits == BITS (u32))
490 {
Calvin71e97c62016-08-19 16:23:14 -0400491 _(0, u32);
492 _(1, u32);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700493 }
494 else if (max_bits == BITS (u64))
495 {
Calvin71e97c62016-08-19 16:23:14 -0400496 _(0, u64);
497 _(1, u64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700498 }
499}
500
501#undef _
502
503static_always_inline void
504do_setbits_fixed (pg_main_t * pg,
505 pg_stream_t * s,
506 u32 * buffers,
507 u32 n_buffers,
508 u32 max_bits,
509 u32 n_bits,
Calvin71e97c62016-08-19 16:23:14 -0400510 u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700511{
Damjan Marion64034362016-11-07 22:19:55 +0100512 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700513
514 while (n_buffers >= 4)
515 {
Calvin71e97c62016-08-19 16:23:14 -0400516 vlib_buffer_t *b0, *b1, *b2, *b3;
517 void *a0, *a1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700518
519 b0 = vlib_get_buffer (vm, buffers[0]);
520 b1 = vlib_get_buffer (vm, buffers[1]);
521 b2 = vlib_get_buffer (vm, buffers[2]);
522 b3 = vlib_get_buffer (vm, buffers[3]);
523 buffers += 2;
524 n_buffers -= 2;
525
526 a0 = (void *) b0 + byte_offset;
527 a1 = (void *) b1 + byte_offset;
528 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
529 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
530
531 setbits_2 (a0, a1,
Calvin71e97c62016-08-19 16:23:14 -0400532 v_min, v_min, v_min, v_max, max_bits, n_bits, mask, shift,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700533 /* is_increment */ 0);
534
535 ASSERT (validate_buffer_data (b0, s));
536 ASSERT (validate_buffer_data (b1, s));
537 }
538
539 while (n_buffers > 0)
540 {
Calvin71e97c62016-08-19 16:23:14 -0400541 vlib_buffer_t *b0;
542 void *a0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700543
544 b0 = vlib_get_buffer (vm, buffers[0]);
545 buffers += 1;
546 n_buffers -= 1;
547
548 a0 = (void *) b0 + byte_offset;
549
550 setbits_1 (a0, v_min, v_min, v_max, max_bits, n_bits, mask, shift);
551 ASSERT (validate_buffer_data (b0, s));
552 }
553}
554
555static_always_inline u64
556do_setbits_increment (pg_main_t * pg,
557 pg_stream_t * s,
558 u32 * buffers,
559 u32 n_buffers,
560 u32 max_bits,
561 u32 n_bits,
562 u32 byte_offset,
Calvin71e97c62016-08-19 16:23:14 -0400563 u64 v_min, u64 v_max, u64 v, u64 mask, u32 shift)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700564{
Damjan Marion64034362016-11-07 22:19:55 +0100565 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700566
567 ASSERT (v >= v_min && v <= v_max);
568
569 while (n_buffers >= 4)
570 {
Calvin71e97c62016-08-19 16:23:14 -0400571 vlib_buffer_t *b0, *b1, *b2, *b3;
572 void *a0, *a1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700573 u64 v_old;
574
575 b0 = vlib_get_buffer (vm, buffers[0]);
576 b1 = vlib_get_buffer (vm, buffers[1]);
577 b2 = vlib_get_buffer (vm, buffers[2]);
578 b3 = vlib_get_buffer (vm, buffers[3]);
579 buffers += 2;
580 n_buffers -= 2;
581
582 a0 = (void *) b0 + byte_offset;
583 a1 = (void *) b1 + byte_offset;
584 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
585 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
586
587 v_old = v;
588 v = v_old + 2;
589 v = v > v_max ? v_min : v;
590 setbits_2 (a0, a1,
591 v_old + 0, v_old + 1,
Calvin71e97c62016-08-19 16:23:14 -0400592 v_min, v_max, max_bits, n_bits, mask, shift,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700593 /* is_increment */ 1);
594
595 if (PREDICT_FALSE (v_old + 1 > v_max))
596 {
597 v = v_old;
598 setbits_1 (a0, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
599 v += 1;
600
601 v = v > v_max ? v_min : v;
602 setbits_1 (a1, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
603 v += 1;
604 }
605 ASSERT (validate_buffer_data (b0, s));
606 ASSERT (validate_buffer_data (b1, s));
607 }
608
609 while (n_buffers > 0)
610 {
Calvin71e97c62016-08-19 16:23:14 -0400611 vlib_buffer_t *b0;
612 void *a0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700613 u64 v_old;
614
615 b0 = vlib_get_buffer (vm, buffers[0]);
616 buffers += 1;
617 n_buffers -= 1;
618
619 a0 = (void *) b0 + byte_offset;
620
621 v_old = v;
622 v = v_old + 1;
623 v = v > v_max ? v_min : v;
624
625 ASSERT (v_old >= v_min && v_old <= v_max);
626 setbits_1 (a0, v_old, v_min, v_max, max_bits, n_bits, mask, shift);
627
628 ASSERT (validate_buffer_data (b0, s));
629 }
630
631 return v;
632}
633
634static_always_inline void
635do_setbits_random (pg_main_t * pg,
636 pg_stream_t * s,
637 u32 * buffers,
638 u32 n_buffers,
639 u32 max_bits,
640 u32 n_bits,
Calvin71e97c62016-08-19 16:23:14 -0400641 u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700642{
Damjan Marion64034362016-11-07 22:19:55 +0100643 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700644 u64 v_diff = v_max - v_min + 1;
645 u64 r_mask = max_pow2 (v_diff) - 1;
646 u64 v0, v1;
Calvin71e97c62016-08-19 16:23:14 -0400647 void *random_data;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700648
649 random_data = clib_random_buffer_get_data
650 (&vm->random_buffer, n_buffers * max_bits / BITS (u8));
651 v0 = v1 = v_min;
652
653 while (n_buffers >= 4)
654 {
Calvin71e97c62016-08-19 16:23:14 -0400655 vlib_buffer_t *b0, *b1, *b2, *b3;
656 void *a0, *a1;
657 u64 r0 = 0, r1 = 0; /* warnings be gone */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700658
659 b0 = vlib_get_buffer (vm, buffers[0]);
660 b1 = vlib_get_buffer (vm, buffers[1]);
661 b2 = vlib_get_buffer (vm, buffers[2]);
662 b3 = vlib_get_buffer (vm, buffers[3]);
663 buffers += 2;
664 n_buffers -= 2;
665
666 a0 = (void *) b0 + byte_offset;
667 a1 = (void *) b1 + byte_offset;
668 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
669 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
670
671 switch (max_bits)
672 {
673#define _(n) \
674 case BITS (u##n): \
675 { \
676 u##n * r = random_data; \
677 r0 = r[0]; \
678 r1 = r[1]; \
679 random_data = r + 2; \
680 } \
681 break;
682
Calvin71e97c62016-08-19 16:23:14 -0400683 _(8);
684 _(16);
685 _(32);
686 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700687
688#undef _
689 }
690
691 /* Add power of 2 sized random number which may be out of range. */
692 v0 += r0 & r_mask;
693 v1 += r1 & r_mask;
694
695 /* Twice should be enough to reduce to v_min .. v_max range. */
696 v0 = v0 > v_max ? v0 - v_diff : v0;
697 v1 = v1 > v_max ? v1 - v_diff : v1;
698 v0 = v0 > v_max ? v0 - v_diff : v0;
699 v1 = v1 > v_max ? v1 - v_diff : v1;
700
Calvin71e97c62016-08-19 16:23:14 -0400701 setbits_2 (a0, a1, v0, v1, v_min, v_max, max_bits, n_bits, mask, shift,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700702 /* is_increment */ 0);
703
704 ASSERT (validate_buffer_data (b0, s));
705 ASSERT (validate_buffer_data (b1, s));
706 }
707
708 while (n_buffers > 0)
709 {
Calvin71e97c62016-08-19 16:23:14 -0400710 vlib_buffer_t *b0;
711 void *a0;
712 u64 r0 = 0; /* warnings be gone */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700713
714 b0 = vlib_get_buffer (vm, buffers[0]);
715 buffers += 1;
716 n_buffers -= 1;
717
718 a0 = (void *) b0 + byte_offset;
719
720 switch (max_bits)
721 {
722#define _(n) \
723 case BITS (u##n): \
724 { \
725 u##n * r = random_data; \
726 r0 = r[0]; \
727 random_data = r + 1; \
728 } \
729 break;
730
Calvin71e97c62016-08-19 16:23:14 -0400731 _(8);
732 _(16);
733 _(32);
734 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700735
736#undef _
737 }
738
739 /* Add power of 2 sized random number which may be out of range. */
740 v0 += r0 & r_mask;
741
742 /* Twice should be enough to reduce to v_min .. v_max range. */
743 v0 = v0 > v_max ? v0 - v_diff : v0;
744 v0 = v0 > v_max ? v0 - v_diff : v0;
745
746 setbits_1 (a0, v0, v_min, v_max, max_bits, n_bits, mask, shift);
747
748 ASSERT (validate_buffer_data (b0, s));
749 }
750}
751
Calvin71e97c62016-08-19 16:23:14 -0400752static u64
753do_it (pg_main_t * pg,
754 pg_stream_t * s,
755 u32 * buffers,
756 u32 n_buffers,
757 u32 lo_bit, u32 hi_bit,
758 u64 v_min, u64 v_max, u64 v, pg_edit_type_t edit_type)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700759{
760 u32 max_bits, l0, l1, h1, start_bit;
761
762 if (v_min == v_max)
763 edit_type = PG_EDIT_FIXED;
764
765 l0 = lo_bit / BITS (u8);
766 l1 = lo_bit % BITS (u8);
767 h1 = hi_bit % BITS (u8);
768
769 start_bit = l0 * BITS (u8);
770
771 max_bits = hi_bit - start_bit;
772 ASSERT (max_bits <= 64);
773
774#define _(n) \
775 case (n): \
776 if (edit_type == PG_EDIT_INCREMENT) \
777 v = do_set_increment (pg, s, buffers, n_buffers, \
778 BITS (u##n), \
779 l0, \
780 /* is_net_byte_order */ 1, \
781 /* want sum */ 0, 0, \
782 v_min, v_max, \
783 v); \
784 else if (edit_type == PG_EDIT_RANDOM) \
785 do_set_random (pg, s, buffers, n_buffers, \
786 BITS (u##n), \
787 l0, \
788 /* is_net_byte_order */ 1, \
789 /* want sum */ 0, 0, \
790 v_min, v_max); \
791 else /* edit_type == PG_EDIT_FIXED */ \
792 do_set_fixed (pg, s, buffers, n_buffers, \
793 BITS (u##n), \
794 l0, \
795 /* is_net_byte_order */ 1, \
796 v_min, v_max); \
797 goto done;
798
799 if (l1 == 0 && h1 == 0)
800 {
801 switch (max_bits)
802 {
Calvin71e97c62016-08-19 16:23:14 -0400803 _(8);
804 _(16);
805 _(32);
806 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700807 }
808 }
809
810#undef _
811
812 {
813 u64 mask;
814 u32 shift = l1;
Calvin71e97c62016-08-19 16:23:14 -0400815 u32 n_bits = max_bits;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700816
817 max_bits = clib_max (max_pow2 (n_bits), 8);
818
819 mask = ((u64) 1 << (u64) n_bits) - 1;
820 mask &= ~(((u64) 1 << (u64) shift) - 1);
821
822 mask <<= max_bits - n_bits;
823 shift += max_bits - n_bits;
824
825 switch (max_bits)
826 {
827#define _(n) \
828 case (n): \
829 if (edit_type == PG_EDIT_INCREMENT) \
830 v = do_setbits_increment (pg, s, buffers, n_buffers, \
831 BITS (u##n), n_bits, \
832 l0, v_min, v_max, v, \
833 mask, shift); \
834 else if (edit_type == PG_EDIT_RANDOM) \
835 do_setbits_random (pg, s, buffers, n_buffers, \
836 BITS (u##n), n_bits, \
837 l0, v_min, v_max, \
838 mask, shift); \
839 else /* edit_type == PG_EDIT_FIXED */ \
840 do_setbits_fixed (pg, s, buffers, n_buffers, \
841 BITS (u##n), n_bits, \
842 l0, v_min, v_max, \
843 mask, shift); \
844 goto done;
845
Calvin71e97c62016-08-19 16:23:14 -0400846 _(8);
847 _(16);
848 _(32);
849 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700850
851#undef _
852 }
853 }
854
Calvin71e97c62016-08-19 16:23:14 -0400855done:
Ed Warnickecb9cada2015-12-08 15:45:58 -0700856 return v;
857}
858
859static void
860pg_generate_set_lengths (pg_main_t * pg,
Calvin71e97c62016-08-19 16:23:14 -0400861 pg_stream_t * s, u32 * buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700862{
863 u64 v_min, v_max, length_sum;
864 pg_edit_type_t edit_type;
865
866 v_min = s->min_packet_bytes;
867 v_max = s->max_packet_bytes;
868 edit_type = s->packet_size_edit_type;
869
870 if (edit_type == PG_EDIT_INCREMENT)
871 s->last_increment_packet_size
872 = do_set_increment (pg, s, buffers, n_buffers,
873 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
874 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
875 /* is_net_byte_order */ 0,
876 /* want sum */ 1, &length_sum,
Calvin71e97c62016-08-19 16:23:14 -0400877 v_min, v_max, s->last_increment_packet_size);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700878
879 else if (edit_type == PG_EDIT_RANDOM)
880 do_set_random (pg, s, buffers, n_buffers,
881 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
882 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
883 /* is_net_byte_order */ 0,
884 /* want sum */ 1, &length_sum,
885 v_min, v_max);
886
Calvin71e97c62016-08-19 16:23:14 -0400887 else /* edit_type == PG_EDIT_FIXED */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700888 {
889 do_set_fixed (pg, s, buffers, n_buffers,
890 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
891 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
892 /* is_net_byte_order */ 0,
893 v_min, v_max);
894 length_sum = v_min * n_buffers;
895 }
896
897 {
Calvin71e97c62016-08-19 16:23:14 -0400898 vnet_main_t *vnm = vnet_get_main ();
899 vnet_interface_main_t *im = &vnm->interface_main;
900 vnet_sw_interface_t *si =
901 vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700902
903 vlib_increment_combined_counter (im->combined_sw_if_counters
904 + VNET_INTERFACE_COUNTER_RX,
Damjan Marion586afd72017-04-05 19:18:20 +0200905 vlib_get_thread_index (),
Calvin71e97c62016-08-19 16:23:14 -0400906 si->sw_if_index, n_buffers, length_sum);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700907 }
908
Ed Warnickecb9cada2015-12-08 15:45:58 -0700909}
910
911static void
912pg_generate_fix_multi_buffer_lengths (pg_main_t * pg,
913 pg_stream_t * s,
Calvin71e97c62016-08-19 16:23:14 -0400914 u32 * buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700915{
Damjan Marion64034362016-11-07 22:19:55 +0100916 vlib_main_t *vm = vlib_get_main ();
Calvin71e97c62016-08-19 16:23:14 -0400917 pg_buffer_index_t *pbi;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700918 uword n_bytes_left;
Calvin71e97c62016-08-19 16:23:14 -0400919 static u32 *unused_buffers = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700920
921 while (n_buffers > 0)
922 {
Calvin71e97c62016-08-19 16:23:14 -0400923 vlib_buffer_t *b;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700924 u32 bi;
925
926 bi = buffers[0];
927 b = vlib_get_buffer (vm, bi);
928
929 /* Current length here is length of whole packet. */
930 n_bytes_left = b->current_length;
931
932 pbi = s->buffer_indices;
933 while (1)
934 {
935 uword n = clib_min (n_bytes_left, s->buffer_bytes);
936
937 b->current_length = n;
938 n_bytes_left -= n;
939 if (n_bytes_left > 0)
940 b->flags |= VLIB_BUFFER_NEXT_PRESENT;
941 else
942 b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
943
944 /* Return unused buffers to fifos. */
945 if (n == 0)
946 vec_add1 (unused_buffers, bi);
947
948 pbi++;
949 if (pbi >= vec_end (s->buffer_indices))
950 break;
951
952 bi = b->next_buffer;
953 b = vlib_get_buffer (vm, bi);
954 }
955 ASSERT (n_bytes_left == 0);
956
957 buffers += 1;
958 n_buffers -= 1;
959 }
960
961 if (vec_len (unused_buffers) > 0)
962 {
Calvin71e97c62016-08-19 16:23:14 -0400963 vlib_buffer_free_no_next (vm, unused_buffers, vec_len (unused_buffers));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700964 _vec_len (unused_buffers) = 0;
965 }
966}
967
968static void
969pg_generate_edit (pg_main_t * pg,
Calvin71e97c62016-08-19 16:23:14 -0400970 pg_stream_t * s, u32 * buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700971{
Calvin71e97c62016-08-19 16:23:14 -0400972 pg_edit_t *e;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700973
974 vec_foreach (e, s->non_fixed_edits)
Calvin71e97c62016-08-19 16:23:14 -0400975 {
976 switch (e->type)
977 {
978 case PG_EDIT_RANDOM:
979 case PG_EDIT_INCREMENT:
Ed Warnickecb9cada2015-12-08 15:45:58 -0700980 {
Calvin71e97c62016-08-19 16:23:14 -0400981 u32 lo_bit, hi_bit;
982 u64 v_min, v_max;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700983
Calvin71e97c62016-08-19 16:23:14 -0400984 v_min = pg_edit_get_value (e, PG_EDIT_LO);
985 v_max = pg_edit_get_value (e, PG_EDIT_HI);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700986
Calvin71e97c62016-08-19 16:23:14 -0400987 hi_bit = (BITS (u8) * STRUCT_OFFSET_OF (vlib_buffer_t, data)
988 + BITS (u8) + e->lsb_bit_offset);
989 lo_bit = hi_bit - e->n_bits;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700990
Calvin71e97c62016-08-19 16:23:14 -0400991 e->last_increment_value
992 = do_it (pg, s, buffers, n_buffers, lo_bit, hi_bit, v_min, v_max,
993 e->last_increment_value, e->type);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700994 }
Calvin71e97c62016-08-19 16:23:14 -0400995 break;
996
997 case PG_EDIT_UNSPECIFIED:
998 break;
999
1000 default:
1001 /* Should not be any fixed edits left. */
1002 ASSERT (0);
1003 break;
1004 }
1005 }
Ed Warnickecb9cada2015-12-08 15:45:58 -07001006
1007 /* Call any edit functions to e.g. completely IP lengths, checksums, ... */
1008 {
1009 int i;
1010 for (i = vec_len (s->edit_groups) - 1; i >= 0; i--)
1011 {
Calvin71e97c62016-08-19 16:23:14 -04001012 pg_edit_group_t *g = s->edit_groups + i;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001013 if (g->edit_function)
1014 g->edit_function (pg, s, g, buffers, n_buffers);
1015 }
1016 }
1017}
1018
1019static void
1020pg_set_next_buffer_pointers (pg_main_t * pg,
1021 pg_stream_t * s,
Calvin71e97c62016-08-19 16:23:14 -04001022 u32 * buffers, u32 * next_buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001023{
Damjan Marion64034362016-11-07 22:19:55 +01001024 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -07001025
1026 while (n_buffers >= 4)
1027 {
1028 u32 ni0, ni1;
Calvin71e97c62016-08-19 16:23:14 -04001029 vlib_buffer_t *b0, *b1;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001030
1031 b0 = vlib_get_buffer (vm, buffers[0]);
1032 b1 = vlib_get_buffer (vm, buffers[1]);
1033 ni0 = next_buffers[0];
1034 ni1 = next_buffers[1];
1035
1036 vlib_prefetch_buffer_with_index (vm, buffers[2], WRITE);
1037 vlib_prefetch_buffer_with_index (vm, buffers[3], WRITE);
1038
1039 b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1040 b1->flags |= VLIB_BUFFER_NEXT_PRESENT;
1041 b0->next_buffer = ni0;
1042 b1->next_buffer = ni1;
1043
1044 buffers += 2;
1045 next_buffers += 2;
1046 n_buffers -= 2;
1047 }
1048
1049 while (n_buffers > 0)
1050 {
1051 u32 ni0;
Calvin71e97c62016-08-19 16:23:14 -04001052 vlib_buffer_t *b0;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001053
1054 b0 = vlib_get_buffer (vm, buffers[0]);
1055 ni0 = next_buffers[0];
1056 buffers += 1;
1057 next_buffers += 1;
1058 n_buffers -= 1;
1059
1060 b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1061 b0->next_buffer = ni0;
1062 }
1063}
1064
1065static_always_inline void
Ed Warnickecb9cada2015-12-08 15:45:58 -07001066init_buffers_inline (vlib_main_t * vm,
1067 pg_stream_t * s,
1068 u32 * buffers,
Calvin71e97c62016-08-19 16:23:14 -04001069 u32 n_buffers, u32 data_offset, u32 n_data, u32 set_data)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001070{
Calvin71e97c62016-08-19 16:23:14 -04001071 u32 n_left, *b;
1072 u8 *data, *mask;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001073
Dave Barach3c8e1462019-01-05 16:51:41 -05001074 ASSERT (s->replay_packet_templates == 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001075
1076 data = s->fixed_packet_data + data_offset;
1077 mask = s->fixed_packet_data_mask + data_offset;
1078 if (data + n_data >= vec_end (s->fixed_packet_data))
1079 n_data = (data < vec_end (s->fixed_packet_data)
Calvin71e97c62016-08-19 16:23:14 -04001080 ? vec_end (s->fixed_packet_data) - data : 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001081 if (n_data > 0)
1082 {
1083 ASSERT (data + n_data <= vec_end (s->fixed_packet_data));
1084 ASSERT (mask + n_data <= vec_end (s->fixed_packet_data_mask));
1085 }
1086
1087 n_left = n_buffers;
1088 b = buffers;
1089
1090 while (n_left >= 4)
1091 {
1092 u32 bi0, bi1;
Calvin71e97c62016-08-19 16:23:14 -04001093 vlib_buffer_t *b0, *b1;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001094
1095 /* Prefetch next iteration. */
1096 vlib_prefetch_buffer_with_index (vm, b[2], STORE);
1097 vlib_prefetch_buffer_with_index (vm, b[3], STORE);
1098
1099 bi0 = b[0];
1100 bi1 = b[1];
1101 b += 2;
1102 n_left -= 2;
1103
1104 b0 = vlib_get_buffer (vm, bi0);
1105 b1 = vlib_get_buffer (vm, bi1);
1106
1107 vnet_buffer (b0)->sw_if_index[VLIB_RX] =
1108 vnet_buffer (b1)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1109
1110 vnet_buffer (b0)->sw_if_index[VLIB_TX] =
Dave Barach7d31ab22019-05-08 19:18:18 -04001111 vnet_buffer (b1)->sw_if_index[VLIB_TX] = s->sw_if_index[VLIB_TX];
Ed Warnickecb9cada2015-12-08 15:45:58 -07001112
1113 if (set_data)
1114 {
Dave Barach178cf492018-11-13 16:34:13 -05001115 clib_memcpy_fast (b0->data, data, n_data);
1116 clib_memcpy_fast (b1->data, data, n_data);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001117 }
1118 else
1119 {
1120 ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1121 ASSERT (validate_buffer_data2 (b1, s, data_offset, n_data));
1122 }
1123 }
1124
1125 while (n_left >= 1)
1126 {
1127 u32 bi0;
Calvin71e97c62016-08-19 16:23:14 -04001128 vlib_buffer_t *b0;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001129
1130 bi0 = b[0];
1131 b += 1;
1132 n_left -= 1;
1133
1134 b0 = vlib_get_buffer (vm, bi0);
1135 vnet_buffer (b0)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
Dave Barach7d31ab22019-05-08 19:18:18 -04001136 vnet_buffer (b0)->sw_if_index[VLIB_TX] = s->sw_if_index[VLIB_TX];
Ed Warnickecb9cada2015-12-08 15:45:58 -07001137
1138 if (set_data)
Dave Barach178cf492018-11-13 16:34:13 -05001139 clib_memcpy_fast (b0->data, data, n_data);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001140 else
1141 ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1142 }
1143}
1144
Ed Warnickecb9cada2015-12-08 15:45:58 -07001145static u32
1146pg_stream_fill_helper (pg_main_t * pg,
1147 pg_stream_t * s,
1148 pg_buffer_index_t * bi,
Calvin71e97c62016-08-19 16:23:14 -04001149 u32 * buffers, u32 * next_buffers, u32 n_alloc)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001150{
Damjan Marion64034362016-11-07 22:19:55 +01001151 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -07001152 uword is_start_of_packet = bi == s->buffer_indices;
1153 u32 n_allocated;
1154
Dave Barach3c8e1462019-01-05 16:51:41 -05001155 ASSERT (vec_len (s->replay_packet_templates) == 0);
1156
Damjan Marion671e60e2018-12-30 18:09:59 +01001157 n_allocated = vlib_buffer_alloc (vm, buffers, n_alloc);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001158 if (n_allocated == 0)
1159 return 0;
1160
Calvin71e97c62016-08-19 16:23:14 -04001161 /*
1162 * We can't assume we got all the buffers we asked for...
Ed Warnickecb9cada2015-12-08 15:45:58 -07001163 * This never worked until recently.
1164 */
1165 n_alloc = n_allocated;
1166
1167 /* Reinitialize buffers */
Damjan Marionef2e5842018-03-07 13:21:04 +01001168 init_buffers_inline
1169 (vm, s,
1170 buffers,
1171 n_alloc, (bi - s->buffer_indices) * s->buffer_bytes /* data offset */ ,
1172 s->buffer_bytes,
1173 /* set_data */ 1);
Calvin71e97c62016-08-19 16:23:14 -04001174
Ed Warnickecb9cada2015-12-08 15:45:58 -07001175 if (next_buffers)
1176 pg_set_next_buffer_pointers (pg, s, buffers, next_buffers, n_alloc);
1177
1178 if (is_start_of_packet)
1179 {
Dave Barach3c8e1462019-01-05 16:51:41 -05001180 pg_generate_set_lengths (pg, s, buffers, n_alloc);
1181 if (vec_len (s->buffer_indices) > 1)
1182 pg_generate_fix_multi_buffer_lengths (pg, s, buffers, n_alloc);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001183
Dave Barach3c8e1462019-01-05 16:51:41 -05001184 pg_generate_edit (pg, s, buffers, n_alloc);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001185 }
1186
1187 return n_alloc;
1188}
1189
1190static u32
Dave Barach3c8e1462019-01-05 16:51:41 -05001191pg_stream_fill_replay (pg_main_t * pg, pg_stream_t * s, u32 n_alloc)
1192{
1193 pg_buffer_index_t *bi;
1194 u32 n_left, i, l;
1195 u32 buffer_alloc_request = 0;
1196 u32 buffer_alloc_result;
1197 u32 current_buffer_index;
1198 u32 *buffers;
1199 vlib_main_t *vm = vlib_get_main ();
1200 vnet_main_t *vnm = vnet_get_main ();
Damjan Marion8934a042019-02-09 23:29:26 +01001201 u32 buf_sz = vlib_buffer_get_default_data_size (vm);
Dave Barach3c8e1462019-01-05 16:51:41 -05001202 vnet_interface_main_t *im = &vnm->interface_main;
1203 vnet_sw_interface_t *si;
1204
1205 buffers = pg->replay_buffers_by_thread[vm->thread_index];
1206 vec_reset_length (buffers);
1207 bi = s->buffer_indices;
1208
1209 n_left = n_alloc;
1210 i = s->current_replay_packet_index;
1211 l = vec_len (s->replay_packet_templates);
1212
1213 /* Figure out how many buffers we need */
1214 while (n_left > 0)
1215 {
1216 u8 *d0;
1217
1218 d0 = vec_elt (s->replay_packet_templates, i);
Damjan Marion5de3fec2019-02-06 14:22:32 +01001219 buffer_alloc_request += (vec_len (d0) + (buf_sz - 1)) / buf_sz;
Dave Barach3c8e1462019-01-05 16:51:41 -05001220
1221 i = ((i + 1) == l) ? 0 : i + 1;
1222 n_left--;
1223 }
1224
1225 ASSERT (buffer_alloc_request > 0);
1226 vec_validate (buffers, buffer_alloc_request - 1);
1227
1228 /* Allocate that many buffers */
1229 buffer_alloc_result = vlib_buffer_alloc (vm, buffers, buffer_alloc_request);
1230 if (buffer_alloc_result < buffer_alloc_request)
1231 {
1232 clib_warning ("alloc failure, got %d not %d", buffer_alloc_result,
1233 buffer_alloc_request);
1234 vlib_buffer_free_no_next (vm, buffers, buffer_alloc_result);
1235 pg->replay_buffers_by_thread[vm->thread_index] = buffers;
1236 return 0;
1237 }
1238
1239 /* Now go generate the buffers, and add them to the FIFO */
1240 n_left = n_alloc;
1241
1242 current_buffer_index = 0;
1243 i = s->current_replay_packet_index;
1244 l = vec_len (s->replay_packet_templates);
1245 while (n_left > 0)
1246 {
1247 u8 *d0;
1248 int not_last;
1249 u32 data_offset;
1250 u32 bytes_to_copy, bytes_this_chunk;
1251 vlib_buffer_t *b;
1252
1253 d0 = vec_elt (s->replay_packet_templates, i);
1254 data_offset = 0;
1255 bytes_to_copy = vec_len (d0);
1256
1257 /* Add head chunk to pg fifo */
1258 clib_fifo_add1 (bi->buffer_fifo, buffers[current_buffer_index]);
1259
1260 /* Copy the data */
1261 while (bytes_to_copy)
1262 {
Damjan Marion5de3fec2019-02-06 14:22:32 +01001263 bytes_this_chunk = clib_min (bytes_to_copy, buf_sz);
Dave Barach3c8e1462019-01-05 16:51:41 -05001264 ASSERT (current_buffer_index < vec_len (buffers));
1265 b = vlib_get_buffer (vm, buffers[current_buffer_index]);
1266 clib_memcpy_fast (b->data, d0 + data_offset, bytes_this_chunk);
1267 vnet_buffer (b)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
Dave Barach7d31ab22019-05-08 19:18:18 -04001268 vnet_buffer (b)->sw_if_index[VLIB_TX] = s->sw_if_index[VLIB_TX];
Dave Barach3c8e1462019-01-05 16:51:41 -05001269 b->flags = 0;
1270 b->next_buffer = 0;
1271 b->current_data = 0;
1272 b->current_length = bytes_this_chunk;
1273
1274 not_last = bytes_this_chunk < bytes_to_copy;
1275 if (not_last)
1276 {
1277 ASSERT (current_buffer_index < (vec_len (buffers) - 1));
1278 b->flags |= VLIB_BUFFER_NEXT_PRESENT;
1279 b->next_buffer = buffers[current_buffer_index + 1];
1280 }
1281 bytes_to_copy -= bytes_this_chunk;
1282 data_offset += bytes_this_chunk;
1283 current_buffer_index++;
1284 }
1285
1286 i = ((i + 1) == l) ? 0 : i + 1;
1287 n_left--;
1288 }
1289
1290 /* Update the interface counters */
1291 si = vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
1292 l = 0;
1293 for (i = 0; i < n_alloc; i++)
1294 l += vlib_buffer_index_length_in_chain (vm, buffers[i]);
1295 vlib_increment_combined_counter (im->combined_sw_if_counters
1296 + VNET_INTERFACE_COUNTER_RX,
1297 vlib_get_thread_index (),
1298 si->sw_if_index, n_alloc, l);
1299
1300 s->current_replay_packet_index += n_alloc;
1301 s->current_replay_packet_index %= vec_len (s->replay_packet_templates);
1302
1303 pg->replay_buffers_by_thread[vm->thread_index] = buffers;
1304 return n_alloc;
1305}
1306
1307
1308static u32
Ed Warnickecb9cada2015-12-08 15:45:58 -07001309pg_stream_fill (pg_main_t * pg, pg_stream_t * s, u32 n_buffers)
1310{
Calvin71e97c62016-08-19 16:23:14 -04001311 pg_buffer_index_t *bi;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001312 word i, n_in_fifo, n_alloc, n_free, n_added;
Calvin71e97c62016-08-19 16:23:14 -04001313 u32 *tail, *start, *end, *last_tail, *last_start;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001314
1315 bi = s->buffer_indices;
1316
1317 n_in_fifo = clib_fifo_elts (bi->buffer_fifo);
1318 if (n_in_fifo >= n_buffers)
1319 return n_in_fifo;
1320
1321 n_alloc = n_buffers - n_in_fifo;
1322
1323 /* Round up, but never generate more than limit. */
1324 n_alloc = clib_max (VLIB_FRAME_SIZE, n_alloc);
1325
1326 if (s->n_packets_limit > 0
1327 && s->n_packets_generated + n_in_fifo + n_alloc >= s->n_packets_limit)
1328 {
1329 n_alloc = s->n_packets_limit - s->n_packets_generated - n_in_fifo;
1330 if (n_alloc < 0)
1331 n_alloc = 0;
1332 }
1333
Dave Barach3c8e1462019-01-05 16:51:41 -05001334 /*
1335 * Handle pcap replay directly
1336 */
1337 if (s->replay_packet_templates)
1338 return pg_stream_fill_replay (pg, s, n_alloc);
1339
Ed Warnickecb9cada2015-12-08 15:45:58 -07001340 /* All buffer fifos should have the same size. */
1341 if (CLIB_DEBUG > 0)
1342 {
1343 uword l = ~0, e;
1344 vec_foreach (bi, s->buffer_indices)
Calvin71e97c62016-08-19 16:23:14 -04001345 {
1346 e = clib_fifo_elts (bi->buffer_fifo);
1347 if (bi == s->buffer_indices)
1348 l = e;
1349 ASSERT (l == e);
1350 }
Ed Warnickecb9cada2015-12-08 15:45:58 -07001351 }
1352
1353 last_tail = last_start = 0;
1354 n_added = n_alloc;
1355
1356 for (i = vec_len (s->buffer_indices) - 1; i >= 0; i--)
1357 {
1358 bi = vec_elt_at_index (s->buffer_indices, i);
1359
1360 n_free = clib_fifo_free_elts (bi->buffer_fifo);
1361 if (n_free < n_alloc)
1362 clib_fifo_resize (bi->buffer_fifo, n_alloc - n_free);
1363
1364 tail = clib_fifo_advance_tail (bi->buffer_fifo, n_alloc);
1365 start = bi->buffer_fifo;
1366 end = clib_fifo_end (bi->buffer_fifo);
1367
1368 if (tail + n_alloc <= end)
Calvin71e97c62016-08-19 16:23:14 -04001369 {
1370 n_added =
1371 pg_stream_fill_helper (pg, s, bi, tail, last_tail, n_alloc);
1372 }
Ed Warnickecb9cada2015-12-08 15:45:58 -07001373 else
1374 {
1375 u32 n = clib_min (end - tail, n_alloc);
1376 n_added = pg_stream_fill_helper (pg, s, bi, tail, last_tail, n);
1377
1378 if (n_added == n && n_alloc > n_added)
Calvin71e97c62016-08-19 16:23:14 -04001379 {
1380 n_added += pg_stream_fill_helper
1381 (pg, s, bi, start, last_start, n_alloc - n_added);
1382 }
Ed Warnickecb9cada2015-12-08 15:45:58 -07001383 }
1384
1385 if (PREDICT_FALSE (n_added < n_alloc))
1386 tail = clib_fifo_advance_tail (bi->buffer_fifo, n_added - n_alloc);
1387
1388 last_tail = tail;
1389 last_start = start;
1390
1391 /* Verify that pkts in the fifo are properly allocated */
Ed Warnickecb9cada2015-12-08 15:45:58 -07001392 }
Calvin71e97c62016-08-19 16:23:14 -04001393
Ed Warnickecb9cada2015-12-08 15:45:58 -07001394 return n_in_fifo + n_added;
1395}
1396
Calvin71e97c62016-08-19 16:23:14 -04001397typedef struct
1398{
Ed Warnickecb9cada2015-12-08 15:45:58 -07001399 u32 stream_index;
1400
1401 u32 packet_length;
Neale Ranns3466c302017-02-16 07:45:03 -08001402 u32 sw_if_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001403
1404 /* Use pre data for packet data. */
1405 vlib_buffer_t buffer;
1406} pg_input_trace_t;
1407
Calvin71e97c62016-08-19 16:23:14 -04001408static u8 *
1409format_pg_input_trace (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001410{
Calvin71e97c62016-08-19 16:23:14 -04001411 vlib_main_t *vm = va_arg (*va, vlib_main_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001412 CLIB_UNUSED (vlib_node_t * node) = va_arg (*va, vlib_node_t *);
Calvin71e97c62016-08-19 16:23:14 -04001413 pg_input_trace_t *t = va_arg (*va, pg_input_trace_t *);
1414 pg_main_t *pg = &pg_main;
1415 pg_stream_t *stream;
1416 vlib_node_t *n;
Christophe Fontained3c008d2017-10-02 18:10:54 +02001417 u32 indent = format_get_indent (s);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001418
1419 stream = 0;
Calvin71e97c62016-08-19 16:23:14 -04001420 if (!pool_is_free_index (pg->streams, t->stream_index))
Ed Warnickecb9cada2015-12-08 15:45:58 -07001421 stream = pool_elt_at_index (pg->streams, t->stream_index);
1422
1423 if (stream)
1424 s = format (s, "stream %v", pg->streams[t->stream_index].name);
1425 else
1426 s = format (s, "stream %d", t->stream_index);
1427
1428 s = format (s, ", %d bytes", t->packet_length);
Paul Vinciguerra1671d3b2019-06-25 21:02:40 -04001429 s = format (s, ", sw_if_index %d", t->sw_if_index);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001430
1431 s = format (s, "\n%U%U",
Damjan Marionbd846cd2017-11-21 13:12:41 +01001432 format_white_space, indent, format_vnet_buffer, &t->buffer);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001433
Calvin71e97c62016-08-19 16:23:14 -04001434 s = format (s, "\n%U", format_white_space, indent);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001435
1436 n = 0;
1437 if (stream)
1438 n = vlib_get_node (vm, stream->node_index);
1439
1440 if (n && n->format_buffer)
1441 s = format (s, "%U", n->format_buffer,
Calvin71e97c62016-08-19 16:23:14 -04001442 t->buffer.pre_data, sizeof (t->buffer.pre_data));
Ed Warnickecb9cada2015-12-08 15:45:58 -07001443 else
Calvin71e97c62016-08-19 16:23:14 -04001444 s = format (s, "%U",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001445 format_hex_bytes, t->buffer.pre_data,
1446 ARRAY_LEN (t->buffer.pre_data));
1447 return s;
1448}
1449
1450static void
1451pg_input_trace (pg_main_t * pg,
Damjan Marion65cbcfe2019-02-20 15:34:00 +01001452 vlib_node_runtime_t * node, u32 stream_index, u32 next_index,
1453 u32 * buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001454{
Damjan Marion64034362016-11-07 22:19:55 +01001455 vlib_main_t *vm = vlib_get_main ();
Damjan Marion65cbcfe2019-02-20 15:34:00 +01001456 u32 *b, n_left;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001457
1458 n_left = n_buffers;
1459 b = buffers;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001460
1461 while (n_left >= 2)
1462 {
1463 u32 bi0, bi1;
Calvin71e97c62016-08-19 16:23:14 -04001464 vlib_buffer_t *b0, *b1;
1465 pg_input_trace_t *t0, *t1;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001466
1467 bi0 = b[0];
1468 bi1 = b[1];
1469 b += 2;
1470 n_left -= 2;
1471
1472 b0 = vlib_get_buffer (vm, bi0);
1473 b1 = vlib_get_buffer (vm, bi1);
1474
1475 vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1476 vlib_trace_buffer (vm, node, next_index, b1, /* follow_chain */ 1);
1477
1478 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1479 t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
1480
1481 t0->stream_index = stream_index;
1482 t1->stream_index = stream_index;
1483
1484 t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
1485 t1->packet_length = vlib_buffer_length_in_chain (vm, b1);
1486
Neale Ranns3466c302017-02-16 07:45:03 -08001487 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
1488 t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_RX];
1489
Dave Barach178cf492018-11-13 16:34:13 -05001490 clib_memcpy_fast (&t0->buffer, b0,
1491 sizeof (b0[0]) - sizeof (b0->pre_data));
1492 clib_memcpy_fast (&t1->buffer, b1,
1493 sizeof (b1[0]) - sizeof (b1->pre_data));
Ed Warnickecb9cada2015-12-08 15:45:58 -07001494
Dave Barach178cf492018-11-13 16:34:13 -05001495 clib_memcpy_fast (t0->buffer.pre_data, b0->data,
1496 sizeof (t0->buffer.pre_data));
1497 clib_memcpy_fast (t1->buffer.pre_data, b1->data,
1498 sizeof (t1->buffer.pre_data));
Ed Warnickecb9cada2015-12-08 15:45:58 -07001499 }
1500
1501 while (n_left >= 1)
1502 {
1503 u32 bi0;
Calvin71e97c62016-08-19 16:23:14 -04001504 vlib_buffer_t *b0;
1505 pg_input_trace_t *t0;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001506
1507 bi0 = b[0];
1508 b += 1;
1509 n_left -= 1;
1510
1511 b0 = vlib_get_buffer (vm, bi0);
1512
1513 vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1514 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1515
1516 t0->stream_index = stream_index;
1517 t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
Neale Ranns3466c302017-02-16 07:45:03 -08001518 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
Dave Barach178cf492018-11-13 16:34:13 -05001519 clib_memcpy_fast (&t0->buffer, b0,
1520 sizeof (b0[0]) - sizeof (b0->pre_data));
1521 clib_memcpy_fast (t0->buffer.pre_data, b0->data,
1522 sizeof (t0->buffer.pre_data));
Ed Warnickecb9cada2015-12-08 15:45:58 -07001523 }
1524}
1525
1526static uword
1527pg_generate_packets (vlib_node_runtime_t * node,
1528 pg_main_t * pg,
Calvin71e97c62016-08-19 16:23:14 -04001529 pg_stream_t * s, uword n_packets_to_generate)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001530{
Damjan Marion64034362016-11-07 22:19:55 +01001531 vlib_main_t *vm = vlib_get_main ();
Calvin71e97c62016-08-19 16:23:14 -04001532 u32 *to_next, n_this_frame, n_left, n_trace, n_packets_in_fifo;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001533 uword n_packets_generated;
Calvin71e97c62016-08-19 16:23:14 -04001534 pg_buffer_index_t *bi, *bi0;
Damjan Mariond2017f62016-11-07 12:24:50 +01001535 u32 next_index = s->next_index;
1536 vnet_feature_main_t *fm = &feature_main;
1537 vnet_feature_config_main_t *cm;
1538 u8 feature_arc_index = fm->device_input_feature_arc_index;
1539 cm = &fm->feature_config_mains[feature_arc_index];
1540 u32 current_config_index = ~(u32) 0;
1541 int i;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001542
1543 bi0 = s->buffer_indices;
1544
1545 n_packets_in_fifo = pg_stream_fill (pg, s, n_packets_to_generate);
1546 n_packets_to_generate = clib_min (n_packets_in_fifo, n_packets_to_generate);
1547 n_packets_generated = 0;
1548
Damjan Mariond2017f62016-11-07 12:24:50 +01001549 if (PREDICT_FALSE
1550 (vnet_have_features (feature_arc_index, s->sw_if_index[VLIB_RX])))
1551 {
1552 current_config_index =
1553 vec_elt (cm->config_index_by_sw_if_index, s->sw_if_index[VLIB_RX]);
1554 vnet_get_config_data (&cm->config_main, &current_config_index,
1555 &next_index, 0);
1556 }
1557
Ed Warnickecb9cada2015-12-08 15:45:58 -07001558 while (n_packets_to_generate > 0)
1559 {
Calvin71e97c62016-08-19 16:23:14 -04001560 u32 *head, *start, *end;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001561
Damjan Marion650223c2018-11-14 16:55:53 +01001562 if (PREDICT_TRUE (next_index == VNET_DEVICE_INPUT_NEXT_ETHERNET_INPUT))
1563 {
1564 vlib_next_frame_t *nf;
1565 vlib_frame_t *f;
1566 ethernet_input_frame_t *ef;
1567 pg_interface_t *pi;
1568 vlib_get_new_next_frame (vm, node, next_index, to_next, n_left);
1569 nf = vlib_node_runtime_get_next_frame (vm, node, next_index);
Andreas Schultz58b2eb12019-07-15 15:40:56 +02001570 f = vlib_get_frame (vm, nf->frame);
Damjan Marion650223c2018-11-14 16:55:53 +01001571 f->flags = ETH_INPUT_FRAME_F_SINGLE_SW_IF_IDX;
1572
1573 ef = vlib_frame_scalar_args (f);
1574 pi = pool_elt_at_index (pg->interfaces, s->pg_if_index);
1575 ef->sw_if_index = pi->sw_if_index;
1576 ef->hw_if_index = pi->hw_if_index;
Damjan Marion296988d2019-02-21 20:24:54 +01001577 vlib_frame_no_append (f);
Damjan Marion650223c2018-11-14 16:55:53 +01001578 }
1579 else
1580 vlib_get_next_frame (vm, node, next_index, to_next, n_left);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001581
1582 n_this_frame = n_packets_to_generate;
1583 if (n_this_frame > n_left)
1584 n_this_frame = n_left;
1585
1586 start = bi0->buffer_fifo;
1587 end = clib_fifo_end (bi0->buffer_fifo);
1588 head = clib_fifo_head (bi0->buffer_fifo);
1589
1590 if (head + n_this_frame <= end)
Damjan Marion64d557c2019-01-18 20:03:41 +01001591 vlib_buffer_copy_indices (to_next, head, n_this_frame);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001592 else
1593 {
1594 u32 n = end - head;
Damjan Marion64d557c2019-01-18 20:03:41 +01001595 vlib_buffer_copy_indices (to_next + 0, head, n);
1596 vlib_buffer_copy_indices (to_next + n, start, n_this_frame - n);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001597 }
1598
Dave Barach3c8e1462019-01-05 16:51:41 -05001599 if (s->replay_packet_templates == 0)
1600 {
1601 vec_foreach (bi, s->buffer_indices)
1602 clib_fifo_advance_head (bi->buffer_fifo, n_this_frame);
1603 }
1604 else
1605 {
1606 clib_fifo_advance_head (bi0->buffer_fifo, n_this_frame);
1607 }
Ed Warnickecb9cada2015-12-08 15:45:58 -07001608
Damjan Mariond2017f62016-11-07 12:24:50 +01001609 if (current_config_index != ~(u32) 0)
1610 for (i = 0; i < n_this_frame; i++)
1611 {
1612 vlib_buffer_t *b;
1613 b = vlib_get_buffer (vm, to_next[i]);
Damjan Mariond2017f62016-11-07 12:24:50 +01001614 b->current_config_index = current_config_index;
Damjan Marionaa682a32018-04-26 22:45:40 +02001615 vnet_buffer (b)->feature_arc_index = feature_arc_index;
Damjan Mariond2017f62016-11-07 12:24:50 +01001616 }
1617
Ed Warnickecb9cada2015-12-08 15:45:58 -07001618 n_trace = vlib_get_trace_count (vm, node);
1619 if (n_trace > 0)
1620 {
1621 u32 n = clib_min (n_trace, n_this_frame);
Damjan Marion65cbcfe2019-02-20 15:34:00 +01001622 pg_input_trace (pg, node, s - pg->streams, next_index, to_next, n);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001623 vlib_set_trace_count (vm, node, n_trace - n);
1624 }
1625 n_packets_to_generate -= n_this_frame;
1626 n_packets_generated += n_this_frame;
1627 n_left -= n_this_frame;
Dave Barach3c8e1462019-01-05 16:51:41 -05001628 if (CLIB_DEBUG > 0)
1629 {
1630 int i;
1631 vlib_buffer_t *b;
1632
Damjan Marion2768cdc2019-02-20 14:11:51 +01001633 for (i = 0; i < n_this_frame; i++)
Dave Barach3c8e1462019-01-05 16:51:41 -05001634 {
1635 b = vlib_get_buffer (vm, to_next[i]);
1636 ASSERT ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0 ||
1637 b->current_length >= VLIB_BUFFER_MIN_CHAIN_SEG_SIZE);
1638 }
1639 }
Damjan Mariond2017f62016-11-07 12:24:50 +01001640 vlib_put_next_frame (vm, node, next_index, n_left);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001641 }
1642
1643 return n_packets_generated;
1644}
1645
1646static uword
Calvin71e97c62016-08-19 16:23:14 -04001647pg_input_stream (vlib_node_runtime_t * node, pg_main_t * pg, pg_stream_t * s)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001648{
Damjan Marion64034362016-11-07 22:19:55 +01001649 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -07001650 uword n_packets;
1651 f64 time_now, dt;
1652
Calvin71e97c62016-08-19 16:23:14 -04001653 if (s->n_packets_limit > 0 && s->n_packets_generated >= s->n_packets_limit)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001654 {
1655 pg_stream_enable_disable (pg, s, /* want_enabled */ 0);
1656 return 0;
1657 }
1658
1659 /* Apply rate limit. */
1660 time_now = vlib_time_now (vm);
1661 if (s->time_last_generate == 0)
1662 s->time_last_generate = time_now;
1663
1664 dt = time_now - s->time_last_generate;
1665 s->time_last_generate = time_now;
1666
1667 n_packets = VLIB_FRAME_SIZE;
1668 if (s->rate_packets_per_second > 0)
1669 {
1670 s->packet_accumulator += dt * s->rate_packets_per_second;
1671 n_packets = s->packet_accumulator;
1672
1673 /* Never allow accumulator to grow if we get behind. */
1674 s->packet_accumulator -= n_packets;
1675 }
1676
1677 /* Apply fixed limit. */
1678 if (s->n_packets_limit > 0
1679 && s->n_packets_generated + n_packets > s->n_packets_limit)
1680 n_packets = s->n_packets_limit - s->n_packets_generated;
1681
1682 /* Generate up to one frame's worth of packets. */
1683 if (n_packets > VLIB_FRAME_SIZE)
1684 n_packets = VLIB_FRAME_SIZE;
1685
1686 if (n_packets > 0)
1687 n_packets = pg_generate_packets (node, pg, s, n_packets);
1688
1689 s->n_packets_generated += n_packets;
1690
1691 return n_packets;
1692}
1693
1694uword
Calvin71e97c62016-08-19 16:23:14 -04001695pg_input (vlib_main_t * vm, vlib_node_runtime_t * node, vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001696{
1697 uword i;
Calvin71e97c62016-08-19 16:23:14 -04001698 pg_main_t *pg = &pg_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001699 uword n_packets = 0;
Damjan Marion3a4ed392016-11-08 13:20:42 +01001700 u32 worker_index = 0;
1701
1702 if (vlib_num_workers ())
1703 worker_index = vlib_get_current_worker_index ();
Ed Warnickecb9cada2015-12-08 15:45:58 -07001704
Calvin71e97c62016-08-19 16:23:14 -04001705 /* *INDENT-OFF* */
Damjan Marion3a4ed392016-11-08 13:20:42 +01001706 clib_bitmap_foreach (i, pg->enabled_streams[worker_index], ({
Damjan Marion64034362016-11-07 22:19:55 +01001707 pg_stream_t *s = vec_elt_at_index (pg->streams, i);
Damjan Marion3a4ed392016-11-08 13:20:42 +01001708 n_packets += pg_input_stream (node, pg, s);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001709 }));
Calvin71e97c62016-08-19 16:23:14 -04001710 /* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -07001711
1712 return n_packets;
1713}
1714
Calvin71e97c62016-08-19 16:23:14 -04001715/* *INDENT-OFF* */
Ed Warnickecb9cada2015-12-08 15:45:58 -07001716VLIB_REGISTER_NODE (pg_input_node) = {
1717 .function = pg_input,
1718 .name = "pg-input",
Damjan Marion51327ac2016-11-09 11:59:42 +01001719 .sibling_of = "device-input",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001720 .type = VLIB_NODE_TYPE_INPUT,
1721
1722 .format_trace = format_pg_input_trace,
1723
1724 /* Input node will be left disabled until a stream is active. */
1725 .state = VLIB_NODE_STATE_DISABLED,
1726};
Calvin71e97c62016-08-19 16:23:14 -04001727/* *INDENT-ON* */
1728
1729/*
1730 * fd.io coding-style-patch-verification: ON
1731 *
1732 * Local Variables:
1733 * eval: (c-set-style "gnu")
1734 * End:
1735 */