blob: c47dfe270e7867dfa91945bea3f5d827f6bf06a4 [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15/*
16 * pg_input.c: buffer generator input
17 *
18 * Copyright (c) 2008 Eliot Dresselhaus
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
27 *
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
30 *
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 */
39
Dave Barach3c8e1462019-01-05 16:51:41 -050040 /*
41 * To be honest, the packet generator needs an extreme
42 * makeover. Two key assumptions which drove the current implementation
43 * are no longer true. First, buffer managers implement a
44 * post-TX recycle list. Second, that packet generator performance
45 * is first-order important.
46 */
47
Ed Warnickecb9cada2015-12-08 15:45:58 -070048#include <vlib/vlib.h>
49#include <vnet/pg/pg.h>
50#include <vnet/vnet.h>
Damjan Marion650223c2018-11-14 16:55:53 +010051#include <vnet/ethernet/ethernet.h>
Damjan Mariond2017f62016-11-07 12:24:50 +010052#include <vnet/feature/feature.h>
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +020053#include <vnet/ip/ip4_packet.h>
54#include <vnet/ip/ip6_packet.h>
55#include <vnet/udp/udp_packet.h>
Damjan Mariond2017f62016-11-07 12:24:50 +010056#include <vnet/devices/devices.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070057
Ed Warnickecb9cada2015-12-08 15:45:58 -070058static int
59validate_buffer_data2 (vlib_buffer_t * b, pg_stream_t * s,
60 u32 data_offset, u32 n_bytes)
61{
Calvin71e97c62016-08-19 16:23:14 -040062 u8 *bd, *pd, *pm;
Ed Warnickecb9cada2015-12-08 15:45:58 -070063 u32 i;
64
65 bd = b->data;
66 pd = s->fixed_packet_data + data_offset;
67 pm = s->fixed_packet_data_mask + data_offset;
68
69 if (pd + n_bytes >= vec_end (s->fixed_packet_data))
70 n_bytes = (pd < vec_end (s->fixed_packet_data)
Calvin71e97c62016-08-19 16:23:14 -040071 ? vec_end (s->fixed_packet_data) - pd : 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -070072
73 for (i = 0; i < n_bytes; i++)
74 if ((bd[i] & pm[i]) != pd[i])
75 break;
76
77 if (i >= n_bytes)
78 return 1;
79
Damjan Marionbd846cd2017-11-21 13:12:41 +010080 clib_warning ("buffer %U", format_vnet_buffer, b);
Ed Warnickecb9cada2015-12-08 15:45:58 -070081 clib_warning ("differ at index %d", i);
82 clib_warning ("is %U", format_hex_bytes, bd, n_bytes);
83 clib_warning ("mask %U", format_hex_bytes, pm, n_bytes);
84 clib_warning ("expect %U", format_hex_bytes, pd, n_bytes);
85 return 0;
86}
87
88static int
89validate_buffer_data (vlib_buffer_t * b, pg_stream_t * s)
Calvin71e97c62016-08-19 16:23:14 -040090{
91 return validate_buffer_data2 (b, s, 0, s->buffer_bytes);
92}
Ed Warnickecb9cada2015-12-08 15:45:58 -070093
94always_inline void
Calvin71e97c62016-08-19 16:23:14 -040095set_1 (void *a0,
96 u64 v0, u64 v_min, u64 v_max, u32 n_bits, u32 is_net_byte_order)
Ed Warnickecb9cada2015-12-08 15:45:58 -070097{
98 ASSERT (v0 >= v_min && v0 <= v_max);
99 if (n_bits == BITS (u8))
100 {
101 ((u8 *) a0)[0] = v0;
102 }
103 else if (n_bits == BITS (u16))
104 {
105 if (is_net_byte_order)
106 v0 = clib_host_to_net_u16 (v0);
107 clib_mem_unaligned (a0, u16) = v0;
108 }
109 else if (n_bits == BITS (u32))
110 {
111 if (is_net_byte_order)
112 v0 = clib_host_to_net_u32 (v0);
113 clib_mem_unaligned (a0, u32) = v0;
114 }
115 else if (n_bits == BITS (u64))
116 {
117 if (is_net_byte_order)
118 v0 = clib_host_to_net_u64 (v0);
119 clib_mem_unaligned (a0, u64) = v0;
120 }
121}
122
123always_inline void
Calvin71e97c62016-08-19 16:23:14 -0400124set_2 (void *a0, void *a1,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700125 u64 v0, u64 v1,
126 u64 v_min, u64 v_max,
Calvin71e97c62016-08-19 16:23:14 -0400127 u32 n_bits, u32 is_net_byte_order, u32 is_increment)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700128{
129 ASSERT (v0 >= v_min && v0 <= v_max);
130 ASSERT (v1 >= v_min && v1 <= (v_max + is_increment));
131 if (n_bits == BITS (u8))
132 {
133 ((u8 *) a0)[0] = v0;
134 ((u8 *) a1)[0] = v1;
135 }
136 else if (n_bits == BITS (u16))
137 {
138 if (is_net_byte_order)
139 {
140 v0 = clib_host_to_net_u16 (v0);
141 v1 = clib_host_to_net_u16 (v1);
142 }
143 clib_mem_unaligned (a0, u16) = v0;
144 clib_mem_unaligned (a1, u16) = v1;
145 }
146 else if (n_bits == BITS (u32))
147 {
148 if (is_net_byte_order)
149 {
150 v0 = clib_host_to_net_u32 (v0);
151 v1 = clib_host_to_net_u32 (v1);
152 }
153 clib_mem_unaligned (a0, u32) = v0;
154 clib_mem_unaligned (a1, u32) = v1;
155 }
156 else if (n_bits == BITS (u64))
157 {
158 if (is_net_byte_order)
159 {
160 v0 = clib_host_to_net_u64 (v0);
161 v1 = clib_host_to_net_u64 (v1);
162 }
163 clib_mem_unaligned (a0, u64) = v0;
164 clib_mem_unaligned (a1, u64) = v1;
165 }
166}
167
168static_always_inline void
169do_set_fixed (pg_main_t * pg,
170 pg_stream_t * s,
171 u32 * buffers,
172 u32 n_buffers,
173 u32 n_bits,
Calvin71e97c62016-08-19 16:23:14 -0400174 u32 byte_offset, u32 is_net_byte_order, u64 v_min, u64 v_max)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700175{
Damjan Marion64034362016-11-07 22:19:55 +0100176 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700177
178 while (n_buffers >= 4)
179 {
Calvin71e97c62016-08-19 16:23:14 -0400180 vlib_buffer_t *b0, *b1, *b2, *b3;
181 void *a0, *a1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700182
183 b0 = vlib_get_buffer (vm, buffers[0]);
184 b1 = vlib_get_buffer (vm, buffers[1]);
185 b2 = vlib_get_buffer (vm, buffers[2]);
186 b3 = vlib_get_buffer (vm, buffers[3]);
187 buffers += 2;
188 n_buffers -= 2;
189
190 a0 = (void *) b0 + byte_offset;
191 a1 = (void *) b1 + byte_offset;
192 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
193 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
194
Calvin71e97c62016-08-19 16:23:14 -0400195 set_2 (a0, a1, v_min, v_min, v_min, v_max, n_bits, is_net_byte_order,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700196 /* is_increment */ 0);
197
198 ASSERT (validate_buffer_data (b0, s));
199 ASSERT (validate_buffer_data (b1, s));
200 }
201
202 while (n_buffers > 0)
203 {
Calvin71e97c62016-08-19 16:23:14 -0400204 vlib_buffer_t *b0;
205 void *a0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700206
207 b0 = vlib_get_buffer (vm, buffers[0]);
208 buffers += 1;
209 n_buffers -= 1;
210
211 a0 = (void *) b0 + byte_offset;
212
Calvin71e97c62016-08-19 16:23:14 -0400213 set_1 (a0, v_min, v_min, v_max, n_bits, is_net_byte_order);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700214
215 ASSERT (validate_buffer_data (b0, s));
216 }
217}
218
219static_always_inline u64
220do_set_increment (pg_main_t * pg,
221 pg_stream_t * s,
222 u32 * buffers,
223 u32 n_buffers,
224 u32 n_bits,
225 u32 byte_offset,
226 u32 is_net_byte_order,
Calvin71e97c62016-08-19 16:23:14 -0400227 u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max, u64 v)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700228{
Damjan Marion64034362016-11-07 22:19:55 +0100229 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700230 u64 sum = 0;
231
232 ASSERT (v >= v_min && v <= v_max);
233
234 while (n_buffers >= 4)
235 {
Calvin71e97c62016-08-19 16:23:14 -0400236 vlib_buffer_t *b0, *b1, *b2, *b3;
237 void *a0, *a1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700238 u64 v_old;
239
240 b0 = vlib_get_buffer (vm, buffers[0]);
241 b1 = vlib_get_buffer (vm, buffers[1]);
242 b2 = vlib_get_buffer (vm, buffers[2]);
243 b3 = vlib_get_buffer (vm, buffers[3]);
244 buffers += 2;
245 n_buffers -= 2;
246
247 a0 = (void *) b0 + byte_offset;
248 a1 = (void *) b1 + byte_offset;
249 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
250 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
251
252 v_old = v;
253 v = v_old + 2;
254 v = v > v_max ? v_min : v;
255 set_2 (a0, a1,
Calvin71e97c62016-08-19 16:23:14 -0400256 v_old + 0, v_old + 1, v_min, v_max, n_bits, is_net_byte_order,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700257 /* is_increment */ 1);
258
259 if (want_sum)
Calvin71e97c62016-08-19 16:23:14 -0400260 sum += 2 * v_old + 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700261
262 if (PREDICT_FALSE (v_old + 1 > v_max))
263 {
264 if (want_sum)
Calvin71e97c62016-08-19 16:23:14 -0400265 sum -= 2 * v_old + 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700266
267 v = v_old;
268 set_1 (a0, v + 0, v_min, v_max, n_bits, is_net_byte_order);
269 if (want_sum)
270 sum += v;
271 v += 1;
272
273 v = v > v_max ? v_min : v;
274 set_1 (a1, v + 0, v_min, v_max, n_bits, is_net_byte_order);
275 if (want_sum)
276 sum += v;
277 v += 1;
278 }
279
280 ASSERT (validate_buffer_data (b0, s));
281 ASSERT (validate_buffer_data (b1, s));
282 }
283
284 while (n_buffers > 0)
285 {
Calvin71e97c62016-08-19 16:23:14 -0400286 vlib_buffer_t *b0;
287 void *a0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700288 u64 v_old;
289
290 b0 = vlib_get_buffer (vm, buffers[0]);
291 buffers += 1;
292 n_buffers -= 1;
293
294 a0 = (void *) b0 + byte_offset;
295
296 v_old = v;
297 if (want_sum)
298 sum += v_old;
299 v += 1;
300 v = v > v_max ? v_min : v;
301
302 ASSERT (v_old >= v_min && v_old <= v_max);
303 set_1 (a0, v_old, v_min, v_max, n_bits, is_net_byte_order);
304
305 ASSERT (validate_buffer_data (b0, s));
306 }
307
308 if (want_sum)
309 *sum_result = sum;
310
311 return v;
312}
313
314static_always_inline void
315do_set_random (pg_main_t * pg,
316 pg_stream_t * s,
317 u32 * buffers,
318 u32 n_buffers,
319 u32 n_bits,
320 u32 byte_offset,
321 u32 is_net_byte_order,
Calvin71e97c62016-08-19 16:23:14 -0400322 u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700323{
Damjan Marion64034362016-11-07 22:19:55 +0100324 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700325 u64 v_diff = v_max - v_min + 1;
326 u64 r_mask = max_pow2 (v_diff) - 1;
327 u64 v0, v1;
328 u64 sum = 0;
Calvin71e97c62016-08-19 16:23:14 -0400329 void *random_data;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700330
331 random_data = clib_random_buffer_get_data
332 (&vm->random_buffer, n_buffers * n_bits / BITS (u8));
333
334 v0 = v1 = v_min;
335
336 while (n_buffers >= 4)
337 {
Calvin71e97c62016-08-19 16:23:14 -0400338 vlib_buffer_t *b0, *b1, *b2, *b3;
339 void *a0, *a1;
340 u64 r0 = 0, r1 = 0; /* warnings be gone */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700341
342 b0 = vlib_get_buffer (vm, buffers[0]);
343 b1 = vlib_get_buffer (vm, buffers[1]);
344 b2 = vlib_get_buffer (vm, buffers[2]);
345 b3 = vlib_get_buffer (vm, buffers[3]);
346 buffers += 2;
347 n_buffers -= 2;
348
349 a0 = (void *) b0 + byte_offset;
350 a1 = (void *) b1 + byte_offset;
351 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
352 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
353
354 switch (n_bits)
355 {
356#define _(n) \
357 case BITS (u##n): \
358 { \
359 u##n * r = random_data; \
360 r0 = r[0]; \
361 r1 = r[1]; \
362 random_data = r + 2; \
363 } \
364 break;
365
Calvin71e97c62016-08-19 16:23:14 -0400366 _(8);
367 _(16);
368 _(32);
369 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700370
371#undef _
372 }
373
374 /* Add power of 2 sized random number which may be out of range. */
375 v0 += r0 & r_mask;
376 v1 += r1 & r_mask;
377
378 /* Twice should be enough to reduce to v_min .. v_max range. */
379 v0 = v0 > v_max ? v0 - v_diff : v0;
380 v1 = v1 > v_max ? v1 - v_diff : v1;
381 v0 = v0 > v_max ? v0 - v_diff : v0;
382 v1 = v1 > v_max ? v1 - v_diff : v1;
383
384 if (want_sum)
385 sum += v0 + v1;
386
Calvin71e97c62016-08-19 16:23:14 -0400387 set_2 (a0, a1, v0, v1, v_min, v_max, n_bits, is_net_byte_order,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700388 /* is_increment */ 0);
389
390 ASSERT (validate_buffer_data (b0, s));
391 ASSERT (validate_buffer_data (b1, s));
392 }
393
394 while (n_buffers > 0)
395 {
Calvin71e97c62016-08-19 16:23:14 -0400396 vlib_buffer_t *b0;
397 void *a0;
398 u64 r0 = 0; /* warnings be gone */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700399
400 b0 = vlib_get_buffer (vm, buffers[0]);
401 buffers += 1;
402 n_buffers -= 1;
403
404 a0 = (void *) b0 + byte_offset;
405
406 switch (n_bits)
407 {
408#define _(n) \
409 case BITS (u##n): \
410 { \
411 u##n * r = random_data; \
412 r0 = r[0]; \
413 random_data = r + 1; \
414 } \
415 break;
416
Calvin71e97c62016-08-19 16:23:14 -0400417 _(8);
418 _(16);
419 _(32);
420 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700421
422#undef _
423 }
424
425 /* Add power of 2 sized random number which may be out of range. */
426 v0 += r0 & r_mask;
427
428 /* Twice should be enough to reduce to v_min .. v_max range. */
429 v0 = v0 > v_max ? v0 - v_diff : v0;
430 v0 = v0 > v_max ? v0 - v_diff : v0;
431
432 if (want_sum)
433 sum += v0;
434
435 set_1 (a0, v0, v_min, v_max, n_bits, is_net_byte_order);
436
437 ASSERT (validate_buffer_data (b0, s));
438 }
439
440 if (want_sum)
441 *sum_result = sum;
442}
443
444#define _(i,t) \
445 clib_mem_unaligned (a##i, t) = \
446 clib_host_to_net_##t ((clib_net_to_host_mem_##t (a##i) &~ mask) \
447 | (v##i << shift))
Calvin71e97c62016-08-19 16:23:14 -0400448
Ed Warnickecb9cada2015-12-08 15:45:58 -0700449always_inline void
Calvin71e97c62016-08-19 16:23:14 -0400450setbits_1 (void *a0,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700451 u64 v0,
452 u64 v_min, u64 v_max,
Calvin71e97c62016-08-19 16:23:14 -0400453 u32 max_bits, u32 n_bits, u64 mask, u32 shift)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700454{
455 ASSERT (v0 >= v_min && v0 <= v_max);
456 if (max_bits == BITS (u8))
Calvin71e97c62016-08-19 16:23:14 -0400457 ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700458
459 else if (max_bits == BITS (u16))
460 {
Calvin71e97c62016-08-19 16:23:14 -0400461 _(0, u16);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700462 }
463 else if (max_bits == BITS (u32))
464 {
Calvin71e97c62016-08-19 16:23:14 -0400465 _(0, u32);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700466 }
467 else if (max_bits == BITS (u64))
468 {
Calvin71e97c62016-08-19 16:23:14 -0400469 _(0, u64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700470 }
471}
472
473always_inline void
Calvin71e97c62016-08-19 16:23:14 -0400474setbits_2 (void *a0, void *a1,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700475 u64 v0, u64 v1,
476 u64 v_min, u64 v_max,
Calvin71e97c62016-08-19 16:23:14 -0400477 u32 max_bits, u32 n_bits, u64 mask, u32 shift, u32 is_increment)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700478{
479 ASSERT (v0 >= v_min && v0 <= v_max);
480 ASSERT (v1 >= v_min && v1 <= v_max + is_increment);
481 if (max_bits == BITS (u8))
482 {
Calvin71e97c62016-08-19 16:23:14 -0400483 ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
484 ((u8 *) a1)[0] = (((u8 *) a1)[0] & ~mask) | (v1 << shift);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700485 }
486
487 else if (max_bits == BITS (u16))
488 {
Calvin71e97c62016-08-19 16:23:14 -0400489 _(0, u16);
490 _(1, u16);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700491 }
492 else if (max_bits == BITS (u32))
493 {
Calvin71e97c62016-08-19 16:23:14 -0400494 _(0, u32);
495 _(1, u32);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700496 }
497 else if (max_bits == BITS (u64))
498 {
Calvin71e97c62016-08-19 16:23:14 -0400499 _(0, u64);
500 _(1, u64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700501 }
502}
503
504#undef _
505
506static_always_inline void
507do_setbits_fixed (pg_main_t * pg,
508 pg_stream_t * s,
509 u32 * buffers,
510 u32 n_buffers,
511 u32 max_bits,
512 u32 n_bits,
Calvin71e97c62016-08-19 16:23:14 -0400513 u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700514{
Damjan Marion64034362016-11-07 22:19:55 +0100515 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700516
517 while (n_buffers >= 4)
518 {
Calvin71e97c62016-08-19 16:23:14 -0400519 vlib_buffer_t *b0, *b1, *b2, *b3;
520 void *a0, *a1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700521
522 b0 = vlib_get_buffer (vm, buffers[0]);
523 b1 = vlib_get_buffer (vm, buffers[1]);
524 b2 = vlib_get_buffer (vm, buffers[2]);
525 b3 = vlib_get_buffer (vm, buffers[3]);
526 buffers += 2;
527 n_buffers -= 2;
528
529 a0 = (void *) b0 + byte_offset;
530 a1 = (void *) b1 + byte_offset;
531 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
532 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
533
534 setbits_2 (a0, a1,
Calvin71e97c62016-08-19 16:23:14 -0400535 v_min, v_min, v_min, v_max, max_bits, n_bits, mask, shift,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700536 /* is_increment */ 0);
537
538 ASSERT (validate_buffer_data (b0, s));
539 ASSERT (validate_buffer_data (b1, s));
540 }
541
542 while (n_buffers > 0)
543 {
Calvin71e97c62016-08-19 16:23:14 -0400544 vlib_buffer_t *b0;
545 void *a0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700546
547 b0 = vlib_get_buffer (vm, buffers[0]);
548 buffers += 1;
549 n_buffers -= 1;
550
551 a0 = (void *) b0 + byte_offset;
552
553 setbits_1 (a0, v_min, v_min, v_max, max_bits, n_bits, mask, shift);
554 ASSERT (validate_buffer_data (b0, s));
555 }
556}
557
558static_always_inline u64
559do_setbits_increment (pg_main_t * pg,
560 pg_stream_t * s,
561 u32 * buffers,
562 u32 n_buffers,
563 u32 max_bits,
564 u32 n_bits,
565 u32 byte_offset,
Calvin71e97c62016-08-19 16:23:14 -0400566 u64 v_min, u64 v_max, u64 v, u64 mask, u32 shift)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700567{
Damjan Marion64034362016-11-07 22:19:55 +0100568 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700569
570 ASSERT (v >= v_min && v <= v_max);
571
572 while (n_buffers >= 4)
573 {
Calvin71e97c62016-08-19 16:23:14 -0400574 vlib_buffer_t *b0, *b1, *b2, *b3;
575 void *a0, *a1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700576 u64 v_old;
577
578 b0 = vlib_get_buffer (vm, buffers[0]);
579 b1 = vlib_get_buffer (vm, buffers[1]);
580 b2 = vlib_get_buffer (vm, buffers[2]);
581 b3 = vlib_get_buffer (vm, buffers[3]);
582 buffers += 2;
583 n_buffers -= 2;
584
585 a0 = (void *) b0 + byte_offset;
586 a1 = (void *) b1 + byte_offset;
587 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
588 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
589
590 v_old = v;
591 v = v_old + 2;
592 v = v > v_max ? v_min : v;
593 setbits_2 (a0, a1,
594 v_old + 0, v_old + 1,
Calvin71e97c62016-08-19 16:23:14 -0400595 v_min, v_max, max_bits, n_bits, mask, shift,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700596 /* is_increment */ 1);
597
598 if (PREDICT_FALSE (v_old + 1 > v_max))
599 {
600 v = v_old;
601 setbits_1 (a0, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
602 v += 1;
603
604 v = v > v_max ? v_min : v;
605 setbits_1 (a1, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
606 v += 1;
607 }
608 ASSERT (validate_buffer_data (b0, s));
609 ASSERT (validate_buffer_data (b1, s));
610 }
611
612 while (n_buffers > 0)
613 {
Calvin71e97c62016-08-19 16:23:14 -0400614 vlib_buffer_t *b0;
615 void *a0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700616 u64 v_old;
617
618 b0 = vlib_get_buffer (vm, buffers[0]);
619 buffers += 1;
620 n_buffers -= 1;
621
622 a0 = (void *) b0 + byte_offset;
623
624 v_old = v;
625 v = v_old + 1;
626 v = v > v_max ? v_min : v;
627
628 ASSERT (v_old >= v_min && v_old <= v_max);
629 setbits_1 (a0, v_old, v_min, v_max, max_bits, n_bits, mask, shift);
630
631 ASSERT (validate_buffer_data (b0, s));
632 }
633
634 return v;
635}
636
637static_always_inline void
638do_setbits_random (pg_main_t * pg,
639 pg_stream_t * s,
640 u32 * buffers,
641 u32 n_buffers,
642 u32 max_bits,
643 u32 n_bits,
Calvin71e97c62016-08-19 16:23:14 -0400644 u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700645{
Damjan Marion64034362016-11-07 22:19:55 +0100646 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700647 u64 v_diff = v_max - v_min + 1;
648 u64 r_mask = max_pow2 (v_diff) - 1;
649 u64 v0, v1;
Calvin71e97c62016-08-19 16:23:14 -0400650 void *random_data;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700651
652 random_data = clib_random_buffer_get_data
653 (&vm->random_buffer, n_buffers * max_bits / BITS (u8));
654 v0 = v1 = v_min;
655
656 while (n_buffers >= 4)
657 {
Calvin71e97c62016-08-19 16:23:14 -0400658 vlib_buffer_t *b0, *b1, *b2, *b3;
659 void *a0, *a1;
660 u64 r0 = 0, r1 = 0; /* warnings be gone */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700661
662 b0 = vlib_get_buffer (vm, buffers[0]);
663 b1 = vlib_get_buffer (vm, buffers[1]);
664 b2 = vlib_get_buffer (vm, buffers[2]);
665 b3 = vlib_get_buffer (vm, buffers[3]);
666 buffers += 2;
667 n_buffers -= 2;
668
669 a0 = (void *) b0 + byte_offset;
670 a1 = (void *) b1 + byte_offset;
671 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
672 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
673
674 switch (max_bits)
675 {
676#define _(n) \
677 case BITS (u##n): \
678 { \
679 u##n * r = random_data; \
680 r0 = r[0]; \
681 r1 = r[1]; \
682 random_data = r + 2; \
683 } \
684 break;
685
Calvin71e97c62016-08-19 16:23:14 -0400686 _(8);
687 _(16);
688 _(32);
689 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700690
691#undef _
692 }
693
694 /* Add power of 2 sized random number which may be out of range. */
695 v0 += r0 & r_mask;
696 v1 += r1 & r_mask;
697
698 /* Twice should be enough to reduce to v_min .. v_max range. */
699 v0 = v0 > v_max ? v0 - v_diff : v0;
700 v1 = v1 > v_max ? v1 - v_diff : v1;
701 v0 = v0 > v_max ? v0 - v_diff : v0;
702 v1 = v1 > v_max ? v1 - v_diff : v1;
703
Calvin71e97c62016-08-19 16:23:14 -0400704 setbits_2 (a0, a1, v0, v1, v_min, v_max, max_bits, n_bits, mask, shift,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700705 /* is_increment */ 0);
706
707 ASSERT (validate_buffer_data (b0, s));
708 ASSERT (validate_buffer_data (b1, s));
709 }
710
711 while (n_buffers > 0)
712 {
Calvin71e97c62016-08-19 16:23:14 -0400713 vlib_buffer_t *b0;
714 void *a0;
715 u64 r0 = 0; /* warnings be gone */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700716
717 b0 = vlib_get_buffer (vm, buffers[0]);
718 buffers += 1;
719 n_buffers -= 1;
720
721 a0 = (void *) b0 + byte_offset;
722
723 switch (max_bits)
724 {
725#define _(n) \
726 case BITS (u##n): \
727 { \
728 u##n * r = random_data; \
729 r0 = r[0]; \
730 random_data = r + 1; \
731 } \
732 break;
733
Calvin71e97c62016-08-19 16:23:14 -0400734 _(8);
735 _(16);
736 _(32);
737 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700738
739#undef _
740 }
741
742 /* Add power of 2 sized random number which may be out of range. */
743 v0 += r0 & r_mask;
744
745 /* Twice should be enough to reduce to v_min .. v_max range. */
746 v0 = v0 > v_max ? v0 - v_diff : v0;
747 v0 = v0 > v_max ? v0 - v_diff : v0;
748
749 setbits_1 (a0, v0, v_min, v_max, max_bits, n_bits, mask, shift);
750
751 ASSERT (validate_buffer_data (b0, s));
752 }
753}
754
Calvin71e97c62016-08-19 16:23:14 -0400755static u64
756do_it (pg_main_t * pg,
757 pg_stream_t * s,
758 u32 * buffers,
759 u32 n_buffers,
760 u32 lo_bit, u32 hi_bit,
761 u64 v_min, u64 v_max, u64 v, pg_edit_type_t edit_type)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700762{
763 u32 max_bits, l0, l1, h1, start_bit;
764
765 if (v_min == v_max)
766 edit_type = PG_EDIT_FIXED;
767
768 l0 = lo_bit / BITS (u8);
769 l1 = lo_bit % BITS (u8);
770 h1 = hi_bit % BITS (u8);
771
772 start_bit = l0 * BITS (u8);
773
774 max_bits = hi_bit - start_bit;
775 ASSERT (max_bits <= 64);
776
777#define _(n) \
778 case (n): \
779 if (edit_type == PG_EDIT_INCREMENT) \
780 v = do_set_increment (pg, s, buffers, n_buffers, \
781 BITS (u##n), \
782 l0, \
783 /* is_net_byte_order */ 1, \
784 /* want sum */ 0, 0, \
785 v_min, v_max, \
786 v); \
787 else if (edit_type == PG_EDIT_RANDOM) \
788 do_set_random (pg, s, buffers, n_buffers, \
789 BITS (u##n), \
790 l0, \
791 /* is_net_byte_order */ 1, \
792 /* want sum */ 0, 0, \
793 v_min, v_max); \
794 else /* edit_type == PG_EDIT_FIXED */ \
795 do_set_fixed (pg, s, buffers, n_buffers, \
796 BITS (u##n), \
797 l0, \
798 /* is_net_byte_order */ 1, \
799 v_min, v_max); \
800 goto done;
801
802 if (l1 == 0 && h1 == 0)
803 {
804 switch (max_bits)
805 {
Calvin71e97c62016-08-19 16:23:14 -0400806 _(8);
807 _(16);
808 _(32);
809 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700810 }
811 }
812
813#undef _
814
815 {
816 u64 mask;
817 u32 shift = l1;
Calvin71e97c62016-08-19 16:23:14 -0400818 u32 n_bits = max_bits;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700819
820 max_bits = clib_max (max_pow2 (n_bits), 8);
821
822 mask = ((u64) 1 << (u64) n_bits) - 1;
823 mask &= ~(((u64) 1 << (u64) shift) - 1);
824
825 mask <<= max_bits - n_bits;
826 shift += max_bits - n_bits;
827
828 switch (max_bits)
829 {
830#define _(n) \
831 case (n): \
832 if (edit_type == PG_EDIT_INCREMENT) \
833 v = do_setbits_increment (pg, s, buffers, n_buffers, \
834 BITS (u##n), n_bits, \
835 l0, v_min, v_max, v, \
836 mask, shift); \
837 else if (edit_type == PG_EDIT_RANDOM) \
838 do_setbits_random (pg, s, buffers, n_buffers, \
839 BITS (u##n), n_bits, \
840 l0, v_min, v_max, \
841 mask, shift); \
842 else /* edit_type == PG_EDIT_FIXED */ \
843 do_setbits_fixed (pg, s, buffers, n_buffers, \
844 BITS (u##n), n_bits, \
845 l0, v_min, v_max, \
846 mask, shift); \
847 goto done;
848
Calvin71e97c62016-08-19 16:23:14 -0400849 _(8);
850 _(16);
851 _(32);
852 _(64);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700853
854#undef _
855 }
856 }
857
Calvin71e97c62016-08-19 16:23:14 -0400858done:
Ed Warnickecb9cada2015-12-08 15:45:58 -0700859 return v;
860}
861
862static void
863pg_generate_set_lengths (pg_main_t * pg,
Calvin71e97c62016-08-19 16:23:14 -0400864 pg_stream_t * s, u32 * buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700865{
866 u64 v_min, v_max, length_sum;
867 pg_edit_type_t edit_type;
868
869 v_min = s->min_packet_bytes;
870 v_max = s->max_packet_bytes;
871 edit_type = s->packet_size_edit_type;
872
873 if (edit_type == PG_EDIT_INCREMENT)
874 s->last_increment_packet_size
875 = do_set_increment (pg, s, buffers, n_buffers,
876 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
877 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
878 /* is_net_byte_order */ 0,
879 /* want sum */ 1, &length_sum,
Calvin71e97c62016-08-19 16:23:14 -0400880 v_min, v_max, s->last_increment_packet_size);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700881
882 else if (edit_type == PG_EDIT_RANDOM)
883 do_set_random (pg, s, buffers, n_buffers,
884 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
885 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
886 /* is_net_byte_order */ 0,
887 /* want sum */ 1, &length_sum,
888 v_min, v_max);
889
Calvin71e97c62016-08-19 16:23:14 -0400890 else /* edit_type == PG_EDIT_FIXED */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700891 {
892 do_set_fixed (pg, s, buffers, n_buffers,
893 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
894 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
895 /* is_net_byte_order */ 0,
896 v_min, v_max);
897 length_sum = v_min * n_buffers;
898 }
899
900 {
Calvin71e97c62016-08-19 16:23:14 -0400901 vnet_main_t *vnm = vnet_get_main ();
902 vnet_interface_main_t *im = &vnm->interface_main;
903 vnet_sw_interface_t *si =
904 vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700905
906 vlib_increment_combined_counter (im->combined_sw_if_counters
907 + VNET_INTERFACE_COUNTER_RX,
Damjan Marion586afd72017-04-05 19:18:20 +0200908 vlib_get_thread_index (),
Calvin71e97c62016-08-19 16:23:14 -0400909 si->sw_if_index, n_buffers, length_sum);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700910 }
911
Ed Warnickecb9cada2015-12-08 15:45:58 -0700912}
913
914static void
915pg_generate_fix_multi_buffer_lengths (pg_main_t * pg,
916 pg_stream_t * s,
Calvin71e97c62016-08-19 16:23:14 -0400917 u32 * buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700918{
Damjan Marion64034362016-11-07 22:19:55 +0100919 vlib_main_t *vm = vlib_get_main ();
Calvin71e97c62016-08-19 16:23:14 -0400920 pg_buffer_index_t *pbi;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700921 uword n_bytes_left;
Calvin71e97c62016-08-19 16:23:14 -0400922 static u32 *unused_buffers = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700923
924 while (n_buffers > 0)
925 {
Calvin71e97c62016-08-19 16:23:14 -0400926 vlib_buffer_t *b;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700927 u32 bi;
928
929 bi = buffers[0];
930 b = vlib_get_buffer (vm, bi);
931
932 /* Current length here is length of whole packet. */
933 n_bytes_left = b->current_length;
934
935 pbi = s->buffer_indices;
936 while (1)
937 {
938 uword n = clib_min (n_bytes_left, s->buffer_bytes);
939
940 b->current_length = n;
941 n_bytes_left -= n;
942 if (n_bytes_left > 0)
943 b->flags |= VLIB_BUFFER_NEXT_PRESENT;
944 else
945 b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
946
947 /* Return unused buffers to fifos. */
948 if (n == 0)
949 vec_add1 (unused_buffers, bi);
950
951 pbi++;
952 if (pbi >= vec_end (s->buffer_indices))
953 break;
954
955 bi = b->next_buffer;
956 b = vlib_get_buffer (vm, bi);
957 }
958 ASSERT (n_bytes_left == 0);
959
960 buffers += 1;
961 n_buffers -= 1;
962 }
963
964 if (vec_len (unused_buffers) > 0)
965 {
Calvin71e97c62016-08-19 16:23:14 -0400966 vlib_buffer_free_no_next (vm, unused_buffers, vec_len (unused_buffers));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700967 _vec_len (unused_buffers) = 0;
968 }
969}
970
971static void
972pg_generate_edit (pg_main_t * pg,
Calvin71e97c62016-08-19 16:23:14 -0400973 pg_stream_t * s, u32 * buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700974{
Calvin71e97c62016-08-19 16:23:14 -0400975 pg_edit_t *e;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700976
977 vec_foreach (e, s->non_fixed_edits)
Calvin71e97c62016-08-19 16:23:14 -0400978 {
979 switch (e->type)
980 {
981 case PG_EDIT_RANDOM:
982 case PG_EDIT_INCREMENT:
Ed Warnickecb9cada2015-12-08 15:45:58 -0700983 {
Calvin71e97c62016-08-19 16:23:14 -0400984 u32 lo_bit, hi_bit;
985 u64 v_min, v_max;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700986
Calvin71e97c62016-08-19 16:23:14 -0400987 v_min = pg_edit_get_value (e, PG_EDIT_LO);
988 v_max = pg_edit_get_value (e, PG_EDIT_HI);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700989
Calvin71e97c62016-08-19 16:23:14 -0400990 hi_bit = (BITS (u8) * STRUCT_OFFSET_OF (vlib_buffer_t, data)
991 + BITS (u8) + e->lsb_bit_offset);
992 lo_bit = hi_bit - e->n_bits;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700993
Calvin71e97c62016-08-19 16:23:14 -0400994 e->last_increment_value
995 = do_it (pg, s, buffers, n_buffers, lo_bit, hi_bit, v_min, v_max,
996 e->last_increment_value, e->type);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700997 }
Calvin71e97c62016-08-19 16:23:14 -0400998 break;
999
1000 case PG_EDIT_UNSPECIFIED:
1001 break;
1002
1003 default:
1004 /* Should not be any fixed edits left. */
1005 ASSERT (0);
1006 break;
1007 }
1008 }
Ed Warnickecb9cada2015-12-08 15:45:58 -07001009
1010 /* Call any edit functions to e.g. completely IP lengths, checksums, ... */
1011 {
1012 int i;
1013 for (i = vec_len (s->edit_groups) - 1; i >= 0; i--)
1014 {
Calvin71e97c62016-08-19 16:23:14 -04001015 pg_edit_group_t *g = s->edit_groups + i;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001016 if (g->edit_function)
1017 g->edit_function (pg, s, g, buffers, n_buffers);
1018 }
1019 }
1020}
1021
1022static void
1023pg_set_next_buffer_pointers (pg_main_t * pg,
1024 pg_stream_t * s,
Calvin71e97c62016-08-19 16:23:14 -04001025 u32 * buffers, u32 * next_buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001026{
Damjan Marion64034362016-11-07 22:19:55 +01001027 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -07001028
1029 while (n_buffers >= 4)
1030 {
1031 u32 ni0, ni1;
Calvin71e97c62016-08-19 16:23:14 -04001032 vlib_buffer_t *b0, *b1;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001033
1034 b0 = vlib_get_buffer (vm, buffers[0]);
1035 b1 = vlib_get_buffer (vm, buffers[1]);
1036 ni0 = next_buffers[0];
1037 ni1 = next_buffers[1];
1038
1039 vlib_prefetch_buffer_with_index (vm, buffers[2], WRITE);
1040 vlib_prefetch_buffer_with_index (vm, buffers[3], WRITE);
1041
1042 b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1043 b1->flags |= VLIB_BUFFER_NEXT_PRESENT;
1044 b0->next_buffer = ni0;
1045 b1->next_buffer = ni1;
1046
1047 buffers += 2;
1048 next_buffers += 2;
1049 n_buffers -= 2;
1050 }
1051
1052 while (n_buffers > 0)
1053 {
1054 u32 ni0;
Calvin71e97c62016-08-19 16:23:14 -04001055 vlib_buffer_t *b0;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001056
1057 b0 = vlib_get_buffer (vm, buffers[0]);
1058 ni0 = next_buffers[0];
1059 buffers += 1;
1060 next_buffers += 1;
1061 n_buffers -= 1;
1062
1063 b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1064 b0->next_buffer = ni0;
1065 }
1066}
1067
1068static_always_inline void
Ed Warnickecb9cada2015-12-08 15:45:58 -07001069init_buffers_inline (vlib_main_t * vm,
1070 pg_stream_t * s,
1071 u32 * buffers,
Calvin71e97c62016-08-19 16:23:14 -04001072 u32 n_buffers, u32 data_offset, u32 n_data, u32 set_data)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001073{
Calvin71e97c62016-08-19 16:23:14 -04001074 u32 n_left, *b;
1075 u8 *data, *mask;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001076
Dave Barach3c8e1462019-01-05 16:51:41 -05001077 ASSERT (s->replay_packet_templates == 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001078
1079 data = s->fixed_packet_data + data_offset;
1080 mask = s->fixed_packet_data_mask + data_offset;
1081 if (data + n_data >= vec_end (s->fixed_packet_data))
1082 n_data = (data < vec_end (s->fixed_packet_data)
Calvin71e97c62016-08-19 16:23:14 -04001083 ? vec_end (s->fixed_packet_data) - data : 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001084 if (n_data > 0)
1085 {
1086 ASSERT (data + n_data <= vec_end (s->fixed_packet_data));
1087 ASSERT (mask + n_data <= vec_end (s->fixed_packet_data_mask));
1088 }
1089
1090 n_left = n_buffers;
1091 b = buffers;
1092
1093 while (n_left >= 4)
1094 {
1095 u32 bi0, bi1;
Calvin71e97c62016-08-19 16:23:14 -04001096 vlib_buffer_t *b0, *b1;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001097
1098 /* Prefetch next iteration. */
1099 vlib_prefetch_buffer_with_index (vm, b[2], STORE);
1100 vlib_prefetch_buffer_with_index (vm, b[3], STORE);
1101
1102 bi0 = b[0];
1103 bi1 = b[1];
1104 b += 2;
1105 n_left -= 2;
1106
1107 b0 = vlib_get_buffer (vm, bi0);
1108 b1 = vlib_get_buffer (vm, bi1);
1109
1110 vnet_buffer (b0)->sw_if_index[VLIB_RX] =
1111 vnet_buffer (b1)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1112
1113 vnet_buffer (b0)->sw_if_index[VLIB_TX] =
Dave Barach7d31ab22019-05-08 19:18:18 -04001114 vnet_buffer (b1)->sw_if_index[VLIB_TX] = s->sw_if_index[VLIB_TX];
Ed Warnickecb9cada2015-12-08 15:45:58 -07001115
1116 if (set_data)
1117 {
Dave Barach178cf492018-11-13 16:34:13 -05001118 clib_memcpy_fast (b0->data, data, n_data);
1119 clib_memcpy_fast (b1->data, data, n_data);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001120 }
1121 else
1122 {
1123 ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1124 ASSERT (validate_buffer_data2 (b1, s, data_offset, n_data));
1125 }
1126 }
1127
1128 while (n_left >= 1)
1129 {
1130 u32 bi0;
Calvin71e97c62016-08-19 16:23:14 -04001131 vlib_buffer_t *b0;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001132
1133 bi0 = b[0];
1134 b += 1;
1135 n_left -= 1;
1136
1137 b0 = vlib_get_buffer (vm, bi0);
1138 vnet_buffer (b0)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
Dave Barach7d31ab22019-05-08 19:18:18 -04001139 vnet_buffer (b0)->sw_if_index[VLIB_TX] = s->sw_if_index[VLIB_TX];
Ed Warnickecb9cada2015-12-08 15:45:58 -07001140
1141 if (set_data)
Dave Barach178cf492018-11-13 16:34:13 -05001142 clib_memcpy_fast (b0->data, data, n_data);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001143 else
1144 ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1145 }
1146}
1147
Ed Warnickecb9cada2015-12-08 15:45:58 -07001148static u32
1149pg_stream_fill_helper (pg_main_t * pg,
1150 pg_stream_t * s,
1151 pg_buffer_index_t * bi,
Calvin71e97c62016-08-19 16:23:14 -04001152 u32 * buffers, u32 * next_buffers, u32 n_alloc)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001153{
Damjan Marion64034362016-11-07 22:19:55 +01001154 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -07001155 uword is_start_of_packet = bi == s->buffer_indices;
1156 u32 n_allocated;
1157
Dave Barach3c8e1462019-01-05 16:51:41 -05001158 ASSERT (vec_len (s->replay_packet_templates) == 0);
1159
Damjan Marion671e60e2018-12-30 18:09:59 +01001160 n_allocated = vlib_buffer_alloc (vm, buffers, n_alloc);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001161 if (n_allocated == 0)
1162 return 0;
1163
Calvin71e97c62016-08-19 16:23:14 -04001164 /*
1165 * We can't assume we got all the buffers we asked for...
Ed Warnickecb9cada2015-12-08 15:45:58 -07001166 * This never worked until recently.
1167 */
1168 n_alloc = n_allocated;
1169
1170 /* Reinitialize buffers */
Damjan Marionef2e5842018-03-07 13:21:04 +01001171 init_buffers_inline
1172 (vm, s,
1173 buffers,
1174 n_alloc, (bi - s->buffer_indices) * s->buffer_bytes /* data offset */ ,
1175 s->buffer_bytes,
1176 /* set_data */ 1);
Calvin71e97c62016-08-19 16:23:14 -04001177
Ed Warnickecb9cada2015-12-08 15:45:58 -07001178 if (next_buffers)
1179 pg_set_next_buffer_pointers (pg, s, buffers, next_buffers, n_alloc);
1180
1181 if (is_start_of_packet)
1182 {
Dave Barach3c8e1462019-01-05 16:51:41 -05001183 pg_generate_set_lengths (pg, s, buffers, n_alloc);
1184 if (vec_len (s->buffer_indices) > 1)
1185 pg_generate_fix_multi_buffer_lengths (pg, s, buffers, n_alloc);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001186
Dave Barach3c8e1462019-01-05 16:51:41 -05001187 pg_generate_edit (pg, s, buffers, n_alloc);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001188 }
1189
1190 return n_alloc;
1191}
1192
1193static u32
Dave Barach3c8e1462019-01-05 16:51:41 -05001194pg_stream_fill_replay (pg_main_t * pg, pg_stream_t * s, u32 n_alloc)
1195{
1196 pg_buffer_index_t *bi;
1197 u32 n_left, i, l;
1198 u32 buffer_alloc_request = 0;
1199 u32 buffer_alloc_result;
1200 u32 current_buffer_index;
1201 u32 *buffers;
1202 vlib_main_t *vm = vlib_get_main ();
1203 vnet_main_t *vnm = vnet_get_main ();
Damjan Marion8934a042019-02-09 23:29:26 +01001204 u32 buf_sz = vlib_buffer_get_default_data_size (vm);
Dave Barach3c8e1462019-01-05 16:51:41 -05001205 vnet_interface_main_t *im = &vnm->interface_main;
1206 vnet_sw_interface_t *si;
1207
1208 buffers = pg->replay_buffers_by_thread[vm->thread_index];
1209 vec_reset_length (buffers);
1210 bi = s->buffer_indices;
1211
1212 n_left = n_alloc;
1213 i = s->current_replay_packet_index;
1214 l = vec_len (s->replay_packet_templates);
1215
1216 /* Figure out how many buffers we need */
1217 while (n_left > 0)
1218 {
1219 u8 *d0;
1220
1221 d0 = vec_elt (s->replay_packet_templates, i);
Damjan Marion5de3fec2019-02-06 14:22:32 +01001222 buffer_alloc_request += (vec_len (d0) + (buf_sz - 1)) / buf_sz;
Dave Barach3c8e1462019-01-05 16:51:41 -05001223
1224 i = ((i + 1) == l) ? 0 : i + 1;
1225 n_left--;
1226 }
1227
1228 ASSERT (buffer_alloc_request > 0);
1229 vec_validate (buffers, buffer_alloc_request - 1);
1230
1231 /* Allocate that many buffers */
1232 buffer_alloc_result = vlib_buffer_alloc (vm, buffers, buffer_alloc_request);
1233 if (buffer_alloc_result < buffer_alloc_request)
1234 {
1235 clib_warning ("alloc failure, got %d not %d", buffer_alloc_result,
1236 buffer_alloc_request);
1237 vlib_buffer_free_no_next (vm, buffers, buffer_alloc_result);
1238 pg->replay_buffers_by_thread[vm->thread_index] = buffers;
1239 return 0;
1240 }
1241
1242 /* Now go generate the buffers, and add them to the FIFO */
1243 n_left = n_alloc;
1244
1245 current_buffer_index = 0;
1246 i = s->current_replay_packet_index;
1247 l = vec_len (s->replay_packet_templates);
1248 while (n_left > 0)
1249 {
1250 u8 *d0;
1251 int not_last;
1252 u32 data_offset;
1253 u32 bytes_to_copy, bytes_this_chunk;
1254 vlib_buffer_t *b;
1255
1256 d0 = vec_elt (s->replay_packet_templates, i);
1257 data_offset = 0;
1258 bytes_to_copy = vec_len (d0);
1259
1260 /* Add head chunk to pg fifo */
1261 clib_fifo_add1 (bi->buffer_fifo, buffers[current_buffer_index]);
1262
1263 /* Copy the data */
1264 while (bytes_to_copy)
1265 {
Damjan Marion5de3fec2019-02-06 14:22:32 +01001266 bytes_this_chunk = clib_min (bytes_to_copy, buf_sz);
Dave Barach3c8e1462019-01-05 16:51:41 -05001267 ASSERT (current_buffer_index < vec_len (buffers));
1268 b = vlib_get_buffer (vm, buffers[current_buffer_index]);
1269 clib_memcpy_fast (b->data, d0 + data_offset, bytes_this_chunk);
1270 vnet_buffer (b)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
Dave Barach7d31ab22019-05-08 19:18:18 -04001271 vnet_buffer (b)->sw_if_index[VLIB_TX] = s->sw_if_index[VLIB_TX];
Dave Barach3c8e1462019-01-05 16:51:41 -05001272 b->flags = 0;
1273 b->next_buffer = 0;
1274 b->current_data = 0;
1275 b->current_length = bytes_this_chunk;
1276
1277 not_last = bytes_this_chunk < bytes_to_copy;
1278 if (not_last)
1279 {
1280 ASSERT (current_buffer_index < (vec_len (buffers) - 1));
1281 b->flags |= VLIB_BUFFER_NEXT_PRESENT;
1282 b->next_buffer = buffers[current_buffer_index + 1];
1283 }
1284 bytes_to_copy -= bytes_this_chunk;
1285 data_offset += bytes_this_chunk;
1286 current_buffer_index++;
1287 }
1288
1289 i = ((i + 1) == l) ? 0 : i + 1;
1290 n_left--;
1291 }
1292
1293 /* Update the interface counters */
1294 si = vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
1295 l = 0;
1296 for (i = 0; i < n_alloc; i++)
1297 l += vlib_buffer_index_length_in_chain (vm, buffers[i]);
1298 vlib_increment_combined_counter (im->combined_sw_if_counters
1299 + VNET_INTERFACE_COUNTER_RX,
1300 vlib_get_thread_index (),
1301 si->sw_if_index, n_alloc, l);
1302
1303 s->current_replay_packet_index += n_alloc;
1304 s->current_replay_packet_index %= vec_len (s->replay_packet_templates);
1305
1306 pg->replay_buffers_by_thread[vm->thread_index] = buffers;
1307 return n_alloc;
1308}
1309
1310
1311static u32
Ed Warnickecb9cada2015-12-08 15:45:58 -07001312pg_stream_fill (pg_main_t * pg, pg_stream_t * s, u32 n_buffers)
1313{
Calvin71e97c62016-08-19 16:23:14 -04001314 pg_buffer_index_t *bi;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001315 word i, n_in_fifo, n_alloc, n_free, n_added;
Calvin71e97c62016-08-19 16:23:14 -04001316 u32 *tail, *start, *end, *last_tail, *last_start;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001317
1318 bi = s->buffer_indices;
1319
1320 n_in_fifo = clib_fifo_elts (bi->buffer_fifo);
1321 if (n_in_fifo >= n_buffers)
1322 return n_in_fifo;
1323
1324 n_alloc = n_buffers - n_in_fifo;
1325
1326 /* Round up, but never generate more than limit. */
1327 n_alloc = clib_max (VLIB_FRAME_SIZE, n_alloc);
1328
1329 if (s->n_packets_limit > 0
1330 && s->n_packets_generated + n_in_fifo + n_alloc >= s->n_packets_limit)
1331 {
1332 n_alloc = s->n_packets_limit - s->n_packets_generated - n_in_fifo;
1333 if (n_alloc < 0)
1334 n_alloc = 0;
1335 }
1336
Dave Barach3c8e1462019-01-05 16:51:41 -05001337 /*
1338 * Handle pcap replay directly
1339 */
1340 if (s->replay_packet_templates)
1341 return pg_stream_fill_replay (pg, s, n_alloc);
1342
Ed Warnickecb9cada2015-12-08 15:45:58 -07001343 /* All buffer fifos should have the same size. */
1344 if (CLIB_DEBUG > 0)
1345 {
1346 uword l = ~0, e;
1347 vec_foreach (bi, s->buffer_indices)
Calvin71e97c62016-08-19 16:23:14 -04001348 {
1349 e = clib_fifo_elts (bi->buffer_fifo);
1350 if (bi == s->buffer_indices)
1351 l = e;
1352 ASSERT (l == e);
1353 }
Ed Warnickecb9cada2015-12-08 15:45:58 -07001354 }
1355
1356 last_tail = last_start = 0;
1357 n_added = n_alloc;
1358
1359 for (i = vec_len (s->buffer_indices) - 1; i >= 0; i--)
1360 {
1361 bi = vec_elt_at_index (s->buffer_indices, i);
1362
1363 n_free = clib_fifo_free_elts (bi->buffer_fifo);
1364 if (n_free < n_alloc)
1365 clib_fifo_resize (bi->buffer_fifo, n_alloc - n_free);
1366
1367 tail = clib_fifo_advance_tail (bi->buffer_fifo, n_alloc);
1368 start = bi->buffer_fifo;
1369 end = clib_fifo_end (bi->buffer_fifo);
1370
1371 if (tail + n_alloc <= end)
Calvin71e97c62016-08-19 16:23:14 -04001372 {
1373 n_added =
1374 pg_stream_fill_helper (pg, s, bi, tail, last_tail, n_alloc);
1375 }
Ed Warnickecb9cada2015-12-08 15:45:58 -07001376 else
1377 {
1378 u32 n = clib_min (end - tail, n_alloc);
1379 n_added = pg_stream_fill_helper (pg, s, bi, tail, last_tail, n);
1380
1381 if (n_added == n && n_alloc > n_added)
Calvin71e97c62016-08-19 16:23:14 -04001382 {
1383 n_added += pg_stream_fill_helper
1384 (pg, s, bi, start, last_start, n_alloc - n_added);
1385 }
Ed Warnickecb9cada2015-12-08 15:45:58 -07001386 }
1387
1388 if (PREDICT_FALSE (n_added < n_alloc))
1389 tail = clib_fifo_advance_tail (bi->buffer_fifo, n_added - n_alloc);
1390
1391 last_tail = tail;
1392 last_start = start;
1393
1394 /* Verify that pkts in the fifo are properly allocated */
Ed Warnickecb9cada2015-12-08 15:45:58 -07001395 }
Calvin71e97c62016-08-19 16:23:14 -04001396
Ed Warnickecb9cada2015-12-08 15:45:58 -07001397 return n_in_fifo + n_added;
1398}
1399
Calvin71e97c62016-08-19 16:23:14 -04001400typedef struct
1401{
Ed Warnickecb9cada2015-12-08 15:45:58 -07001402 u32 stream_index;
1403
1404 u32 packet_length;
Neale Ranns3466c302017-02-16 07:45:03 -08001405 u32 sw_if_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001406
1407 /* Use pre data for packet data. */
1408 vlib_buffer_t buffer;
1409} pg_input_trace_t;
1410
Calvin71e97c62016-08-19 16:23:14 -04001411static u8 *
1412format_pg_input_trace (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001413{
Calvin71e97c62016-08-19 16:23:14 -04001414 vlib_main_t *vm = va_arg (*va, vlib_main_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001415 CLIB_UNUSED (vlib_node_t * node) = va_arg (*va, vlib_node_t *);
Calvin71e97c62016-08-19 16:23:14 -04001416 pg_input_trace_t *t = va_arg (*va, pg_input_trace_t *);
1417 pg_main_t *pg = &pg_main;
1418 pg_stream_t *stream;
1419 vlib_node_t *n;
Christophe Fontained3c008d2017-10-02 18:10:54 +02001420 u32 indent = format_get_indent (s);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001421
1422 stream = 0;
Calvin71e97c62016-08-19 16:23:14 -04001423 if (!pool_is_free_index (pg->streams, t->stream_index))
Ed Warnickecb9cada2015-12-08 15:45:58 -07001424 stream = pool_elt_at_index (pg->streams, t->stream_index);
1425
1426 if (stream)
1427 s = format (s, "stream %v", pg->streams[t->stream_index].name);
1428 else
1429 s = format (s, "stream %d", t->stream_index);
1430
1431 s = format (s, ", %d bytes", t->packet_length);
Paul Vinciguerra1671d3b2019-06-25 21:02:40 -04001432 s = format (s, ", sw_if_index %d", t->sw_if_index);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001433
1434 s = format (s, "\n%U%U",
Damjan Marionbd846cd2017-11-21 13:12:41 +01001435 format_white_space, indent, format_vnet_buffer, &t->buffer);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001436
Calvin71e97c62016-08-19 16:23:14 -04001437 s = format (s, "\n%U", format_white_space, indent);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001438
1439 n = 0;
1440 if (stream)
1441 n = vlib_get_node (vm, stream->node_index);
1442
1443 if (n && n->format_buffer)
1444 s = format (s, "%U", n->format_buffer,
Calvin71e97c62016-08-19 16:23:14 -04001445 t->buffer.pre_data, sizeof (t->buffer.pre_data));
Ed Warnickecb9cada2015-12-08 15:45:58 -07001446 else
Calvin71e97c62016-08-19 16:23:14 -04001447 s = format (s, "%U",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001448 format_hex_bytes, t->buffer.pre_data,
1449 ARRAY_LEN (t->buffer.pre_data));
1450 return s;
1451}
1452
1453static void
1454pg_input_trace (pg_main_t * pg,
Damjan Marion65cbcfe2019-02-20 15:34:00 +01001455 vlib_node_runtime_t * node, u32 stream_index, u32 next_index,
1456 u32 * buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001457{
Damjan Marion64034362016-11-07 22:19:55 +01001458 vlib_main_t *vm = vlib_get_main ();
Damjan Marion65cbcfe2019-02-20 15:34:00 +01001459 u32 *b, n_left;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001460
1461 n_left = n_buffers;
1462 b = buffers;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001463
1464 while (n_left >= 2)
1465 {
1466 u32 bi0, bi1;
Calvin71e97c62016-08-19 16:23:14 -04001467 vlib_buffer_t *b0, *b1;
1468 pg_input_trace_t *t0, *t1;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001469
1470 bi0 = b[0];
1471 bi1 = b[1];
1472 b += 2;
1473 n_left -= 2;
1474
1475 b0 = vlib_get_buffer (vm, bi0);
1476 b1 = vlib_get_buffer (vm, bi1);
1477
1478 vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1479 vlib_trace_buffer (vm, node, next_index, b1, /* follow_chain */ 1);
1480
1481 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1482 t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
1483
1484 t0->stream_index = stream_index;
1485 t1->stream_index = stream_index;
1486
1487 t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
1488 t1->packet_length = vlib_buffer_length_in_chain (vm, b1);
1489
Neale Ranns3466c302017-02-16 07:45:03 -08001490 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
1491 t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_RX];
1492
Dave Barach178cf492018-11-13 16:34:13 -05001493 clib_memcpy_fast (&t0->buffer, b0,
1494 sizeof (b0[0]) - sizeof (b0->pre_data));
1495 clib_memcpy_fast (&t1->buffer, b1,
1496 sizeof (b1[0]) - sizeof (b1->pre_data));
Ed Warnickecb9cada2015-12-08 15:45:58 -07001497
Dave Barach178cf492018-11-13 16:34:13 -05001498 clib_memcpy_fast (t0->buffer.pre_data, b0->data,
1499 sizeof (t0->buffer.pre_data));
1500 clib_memcpy_fast (t1->buffer.pre_data, b1->data,
1501 sizeof (t1->buffer.pre_data));
Ed Warnickecb9cada2015-12-08 15:45:58 -07001502 }
1503
1504 while (n_left >= 1)
1505 {
1506 u32 bi0;
Calvin71e97c62016-08-19 16:23:14 -04001507 vlib_buffer_t *b0;
1508 pg_input_trace_t *t0;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001509
1510 bi0 = b[0];
1511 b += 1;
1512 n_left -= 1;
1513
1514 b0 = vlib_get_buffer (vm, bi0);
1515
1516 vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1517 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1518
1519 t0->stream_index = stream_index;
1520 t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
Neale Ranns3466c302017-02-16 07:45:03 -08001521 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
Dave Barach178cf492018-11-13 16:34:13 -05001522 clib_memcpy_fast (&t0->buffer, b0,
1523 sizeof (b0[0]) - sizeof (b0->pre_data));
1524 clib_memcpy_fast (t0->buffer.pre_data, b0->data,
1525 sizeof (t0->buffer.pre_data));
Ed Warnickecb9cada2015-12-08 15:45:58 -07001526 }
1527}
1528
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001529static_always_inline void
1530fill_gso_buffer_flags (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
1531 u32 packet_data_size)
1532{
1533
1534 for (int i = 0; i < n_buffers; i++)
1535 {
1536 vlib_buffer_t *b0 = vlib_get_buffer (vm, buffers[i]);
1537 u8 l4_proto = 0;
1538 u8 l4_hdr_sz = 0;
1539
1540 ethernet_header_t *eh = (ethernet_header_t *) b0->data;
1541 u16 ethertype = clib_net_to_host_u16 (eh->type);
1542 u16 l2hdr_sz = sizeof (ethernet_header_t);
1543
Mohsin Kazmi14bea1b2019-07-29 11:39:26 +02001544 if (ethernet_frame_is_tagged (ethertype))
1545 {
1546 ethernet_vlan_header_t *vlan = (ethernet_vlan_header_t *) (eh + 1);
1547
1548 ethertype = clib_net_to_host_u16 (vlan->type);
1549 l2hdr_sz += sizeof (*vlan);
1550 if (ethertype == ETHERNET_TYPE_VLAN)
1551 {
1552 vlan++;
1553 ethertype = clib_net_to_host_u16 (vlan->type);
1554 l2hdr_sz += sizeof (*vlan);
1555 }
1556 }
1557
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001558 vnet_buffer (b0)->l2_hdr_offset = 0;
1559 vnet_buffer (b0)->l3_hdr_offset = l2hdr_sz;
1560 if (PREDICT_TRUE (ethertype == ETHERNET_TYPE_IP4))
1561 {
1562 ip4_header_t *ip4 = (ip4_header_t *) (b0->data + l2hdr_sz);
1563 vnet_buffer (b0)->l4_hdr_offset = l2hdr_sz + ip4_header_bytes (ip4);
1564 l4_proto = ip4->protocol;
1565 b0->flags |=
1566 (VNET_BUFFER_F_IS_IP4 | VNET_BUFFER_F_L2_HDR_OFFSET_VALID
1567 | VNET_BUFFER_F_L3_HDR_OFFSET_VALID |
1568 VNET_BUFFER_F_L4_HDR_OFFSET_VALID);
1569 b0->flags |= VNET_BUFFER_F_OFFLOAD_IP_CKSUM;
1570 }
1571 else if (PREDICT_TRUE (ethertype == ETHERNET_TYPE_IP6))
1572 {
1573 ip6_header_t *ip6 = (ip6_header_t *) (b0->data + l2hdr_sz);
1574 /* FIXME IPv6 EH traversal */
1575 vnet_buffer (b0)->l4_hdr_offset = l2hdr_sz + sizeof (ip6_header_t);
1576 l4_proto = ip6->protocol;
1577 b0->flags |=
1578 (VNET_BUFFER_F_IS_IP6 | VNET_BUFFER_F_L2_HDR_OFFSET_VALID
1579 | VNET_BUFFER_F_L3_HDR_OFFSET_VALID |
1580 VNET_BUFFER_F_L4_HDR_OFFSET_VALID);
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001581 }
1582 if (l4_proto == IP_PROTOCOL_TCP)
1583 {
1584 b0->flags |= VNET_BUFFER_F_OFFLOAD_TCP_CKSUM;
1585 tcp_header_t *tcp = (tcp_header_t *) (b0->data +
1586 vnet_buffer
1587 (b0)->l4_hdr_offset);
1588 l4_hdr_sz = tcp_header_bytes (tcp);
1589 tcp->checksum = 0;
1590 vnet_buffer2 (b0)->gso_l4_hdr_sz = l4_hdr_sz;
1591 vnet_buffer2 (b0)->gso_size = packet_data_size;
1592 b0->flags |= VNET_BUFFER_F_GSO;
1593 }
1594 else if (l4_proto == IP_PROTOCOL_UDP)
1595 {
1596 b0->flags |= VNET_BUFFER_F_OFFLOAD_UDP_CKSUM;
1597 udp_header_t *udp = (udp_header_t *) (b0->data +
1598 vnet_buffer
1599 (b0)->l4_hdr_offset);
1600 vnet_buffer2 (b0)->gso_l4_hdr_sz = sizeof (*udp);
1601 udp->checksum = 0;
1602 }
1603 }
1604}
1605
Ed Warnickecb9cada2015-12-08 15:45:58 -07001606static uword
1607pg_generate_packets (vlib_node_runtime_t * node,
1608 pg_main_t * pg,
Calvin71e97c62016-08-19 16:23:14 -04001609 pg_stream_t * s, uword n_packets_to_generate)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001610{
Damjan Marion64034362016-11-07 22:19:55 +01001611 vlib_main_t *vm = vlib_get_main ();
Calvin71e97c62016-08-19 16:23:14 -04001612 u32 *to_next, n_this_frame, n_left, n_trace, n_packets_in_fifo;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001613 uword n_packets_generated;
Calvin71e97c62016-08-19 16:23:14 -04001614 pg_buffer_index_t *bi, *bi0;
Damjan Mariond2017f62016-11-07 12:24:50 +01001615 u32 next_index = s->next_index;
1616 vnet_feature_main_t *fm = &feature_main;
1617 vnet_feature_config_main_t *cm;
1618 u8 feature_arc_index = fm->device_input_feature_arc_index;
1619 cm = &fm->feature_config_mains[feature_arc_index];
1620 u32 current_config_index = ~(u32) 0;
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001621 pg_interface_t *pi = pool_elt_at_index (pg->interfaces, s->pg_if_index);
Damjan Mariond2017f62016-11-07 12:24:50 +01001622 int i;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001623
1624 bi0 = s->buffer_indices;
1625
1626 n_packets_in_fifo = pg_stream_fill (pg, s, n_packets_to_generate);
1627 n_packets_to_generate = clib_min (n_packets_in_fifo, n_packets_to_generate);
1628 n_packets_generated = 0;
1629
Damjan Mariond2017f62016-11-07 12:24:50 +01001630 if (PREDICT_FALSE
1631 (vnet_have_features (feature_arc_index, s->sw_if_index[VLIB_RX])))
1632 {
1633 current_config_index =
1634 vec_elt (cm->config_index_by_sw_if_index, s->sw_if_index[VLIB_RX]);
1635 vnet_get_config_data (&cm->config_main, &current_config_index,
1636 &next_index, 0);
1637 }
1638
Ed Warnickecb9cada2015-12-08 15:45:58 -07001639 while (n_packets_to_generate > 0)
1640 {
Calvin71e97c62016-08-19 16:23:14 -04001641 u32 *head, *start, *end;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001642
Damjan Marion650223c2018-11-14 16:55:53 +01001643 if (PREDICT_TRUE (next_index == VNET_DEVICE_INPUT_NEXT_ETHERNET_INPUT))
1644 {
1645 vlib_next_frame_t *nf;
1646 vlib_frame_t *f;
1647 ethernet_input_frame_t *ef;
Damjan Marion650223c2018-11-14 16:55:53 +01001648 vlib_get_new_next_frame (vm, node, next_index, to_next, n_left);
1649 nf = vlib_node_runtime_get_next_frame (vm, node, next_index);
Andreas Schultz58b2eb12019-07-15 15:40:56 +02001650 f = vlib_get_frame (vm, nf->frame);
Damjan Marion650223c2018-11-14 16:55:53 +01001651 f->flags = ETH_INPUT_FRAME_F_SINGLE_SW_IF_IDX;
1652
1653 ef = vlib_frame_scalar_args (f);
Damjan Marion650223c2018-11-14 16:55:53 +01001654 ef->sw_if_index = pi->sw_if_index;
1655 ef->hw_if_index = pi->hw_if_index;
Damjan Marion296988d2019-02-21 20:24:54 +01001656 vlib_frame_no_append (f);
Damjan Marion650223c2018-11-14 16:55:53 +01001657 }
1658 else
1659 vlib_get_next_frame (vm, node, next_index, to_next, n_left);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001660
1661 n_this_frame = n_packets_to_generate;
1662 if (n_this_frame > n_left)
1663 n_this_frame = n_left;
1664
1665 start = bi0->buffer_fifo;
1666 end = clib_fifo_end (bi0->buffer_fifo);
1667 head = clib_fifo_head (bi0->buffer_fifo);
1668
1669 if (head + n_this_frame <= end)
Damjan Marion64d557c2019-01-18 20:03:41 +01001670 vlib_buffer_copy_indices (to_next, head, n_this_frame);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001671 else
1672 {
1673 u32 n = end - head;
Damjan Marion64d557c2019-01-18 20:03:41 +01001674 vlib_buffer_copy_indices (to_next + 0, head, n);
1675 vlib_buffer_copy_indices (to_next + n, start, n_this_frame - n);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001676 }
1677
Dave Barach3c8e1462019-01-05 16:51:41 -05001678 if (s->replay_packet_templates == 0)
1679 {
1680 vec_foreach (bi, s->buffer_indices)
1681 clib_fifo_advance_head (bi->buffer_fifo, n_this_frame);
1682 }
1683 else
1684 {
1685 clib_fifo_advance_head (bi0->buffer_fifo, n_this_frame);
1686 }
Ed Warnickecb9cada2015-12-08 15:45:58 -07001687
Damjan Mariond2017f62016-11-07 12:24:50 +01001688 if (current_config_index != ~(u32) 0)
1689 for (i = 0; i < n_this_frame; i++)
1690 {
1691 vlib_buffer_t *b;
1692 b = vlib_get_buffer (vm, to_next[i]);
Damjan Mariond2017f62016-11-07 12:24:50 +01001693 b->current_config_index = current_config_index;
Damjan Marionaa682a32018-04-26 22:45:40 +02001694 vnet_buffer (b)->feature_arc_index = feature_arc_index;
Damjan Mariond2017f62016-11-07 12:24:50 +01001695 }
1696
Mohsin Kazmi22e9cfd2019-07-23 11:54:48 +02001697 if (pi->gso_enabled)
1698 fill_gso_buffer_flags (vm, to_next, n_this_frame, pi->gso_size);
1699
Ed Warnickecb9cada2015-12-08 15:45:58 -07001700 n_trace = vlib_get_trace_count (vm, node);
1701 if (n_trace > 0)
1702 {
1703 u32 n = clib_min (n_trace, n_this_frame);
Damjan Marion65cbcfe2019-02-20 15:34:00 +01001704 pg_input_trace (pg, node, s - pg->streams, next_index, to_next, n);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001705 vlib_set_trace_count (vm, node, n_trace - n);
1706 }
1707 n_packets_to_generate -= n_this_frame;
1708 n_packets_generated += n_this_frame;
1709 n_left -= n_this_frame;
Dave Barach3c8e1462019-01-05 16:51:41 -05001710 if (CLIB_DEBUG > 0)
1711 {
1712 int i;
1713 vlib_buffer_t *b;
1714
Damjan Marion2768cdc2019-02-20 14:11:51 +01001715 for (i = 0; i < n_this_frame; i++)
Dave Barach3c8e1462019-01-05 16:51:41 -05001716 {
1717 b = vlib_get_buffer (vm, to_next[i]);
1718 ASSERT ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0 ||
1719 b->current_length >= VLIB_BUFFER_MIN_CHAIN_SEG_SIZE);
1720 }
1721 }
Damjan Mariond2017f62016-11-07 12:24:50 +01001722 vlib_put_next_frame (vm, node, next_index, n_left);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001723 }
1724
1725 return n_packets_generated;
1726}
1727
1728static uword
Calvin71e97c62016-08-19 16:23:14 -04001729pg_input_stream (vlib_node_runtime_t * node, pg_main_t * pg, pg_stream_t * s)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001730{
Damjan Marion64034362016-11-07 22:19:55 +01001731 vlib_main_t *vm = vlib_get_main ();
Ed Warnickecb9cada2015-12-08 15:45:58 -07001732 uword n_packets;
1733 f64 time_now, dt;
1734
Calvin71e97c62016-08-19 16:23:14 -04001735 if (s->n_packets_limit > 0 && s->n_packets_generated >= s->n_packets_limit)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001736 {
1737 pg_stream_enable_disable (pg, s, /* want_enabled */ 0);
1738 return 0;
1739 }
1740
1741 /* Apply rate limit. */
1742 time_now = vlib_time_now (vm);
1743 if (s->time_last_generate == 0)
1744 s->time_last_generate = time_now;
1745
1746 dt = time_now - s->time_last_generate;
1747 s->time_last_generate = time_now;
1748
1749 n_packets = VLIB_FRAME_SIZE;
1750 if (s->rate_packets_per_second > 0)
1751 {
1752 s->packet_accumulator += dt * s->rate_packets_per_second;
1753 n_packets = s->packet_accumulator;
1754
1755 /* Never allow accumulator to grow if we get behind. */
1756 s->packet_accumulator -= n_packets;
1757 }
1758
1759 /* Apply fixed limit. */
1760 if (s->n_packets_limit > 0
1761 && s->n_packets_generated + n_packets > s->n_packets_limit)
1762 n_packets = s->n_packets_limit - s->n_packets_generated;
1763
1764 /* Generate up to one frame's worth of packets. */
Christian E. Hopps87d7bac2019-09-27 12:59:30 -04001765 if (n_packets > s->n_max_frame)
1766 n_packets = s->n_max_frame;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001767
1768 if (n_packets > 0)
1769 n_packets = pg_generate_packets (node, pg, s, n_packets);
1770
1771 s->n_packets_generated += n_packets;
1772
1773 return n_packets;
1774}
1775
1776uword
Calvin71e97c62016-08-19 16:23:14 -04001777pg_input (vlib_main_t * vm, vlib_node_runtime_t * node, vlib_frame_t * frame)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001778{
1779 uword i;
Calvin71e97c62016-08-19 16:23:14 -04001780 pg_main_t *pg = &pg_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001781 uword n_packets = 0;
Damjan Marion3a4ed392016-11-08 13:20:42 +01001782 u32 worker_index = 0;
1783
1784 if (vlib_num_workers ())
1785 worker_index = vlib_get_current_worker_index ();
Ed Warnickecb9cada2015-12-08 15:45:58 -07001786
Calvin71e97c62016-08-19 16:23:14 -04001787 /* *INDENT-OFF* */
Damjan Marion3a4ed392016-11-08 13:20:42 +01001788 clib_bitmap_foreach (i, pg->enabled_streams[worker_index], ({
Damjan Marion64034362016-11-07 22:19:55 +01001789 pg_stream_t *s = vec_elt_at_index (pg->streams, i);
Damjan Marion3a4ed392016-11-08 13:20:42 +01001790 n_packets += pg_input_stream (node, pg, s);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001791 }));
Calvin71e97c62016-08-19 16:23:14 -04001792 /* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -07001793
1794 return n_packets;
1795}
1796
Calvin71e97c62016-08-19 16:23:14 -04001797/* *INDENT-OFF* */
Ed Warnickecb9cada2015-12-08 15:45:58 -07001798VLIB_REGISTER_NODE (pg_input_node) = {
1799 .function = pg_input,
Damjan Marion7ca5aaa2019-09-24 18:10:49 +02001800 .flags = VLIB_NODE_FLAG_TRACE_SUPPORTED,
Ed Warnickecb9cada2015-12-08 15:45:58 -07001801 .name = "pg-input",
Damjan Marion51327ac2016-11-09 11:59:42 +01001802 .sibling_of = "device-input",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001803 .type = VLIB_NODE_TYPE_INPUT,
1804
1805 .format_trace = format_pg_input_trace,
1806
1807 /* Input node will be left disabled until a stream is active. */
1808 .state = VLIB_NODE_STATE_DISABLED,
1809};
Calvin71e97c62016-08-19 16:23:14 -04001810/* *INDENT-ON* */
1811
1812/*
1813 * fd.io coding-style-patch-verification: ON
1814 *
1815 * Local Variables:
1816 * eval: (c-set-style "gnu")
1817 * End:
1818 */