blob: 133d640d6937705a26e2114ece3944567ce32781 [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15/*
16 * buffer.c: allocate/free network buffers.
17 *
18 * Copyright (c) 2008 Eliot Dresselhaus
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
27 *
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
30 *
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 */
39
Chris Luked4024f52016-09-06 09:32:36 -040040/**
Chris Luked4024f52016-09-06 09:32:36 -040041 * @file
42 *
43 * Allocate/free network buffers.
44 */
45
Ed Warnickecb9cada2015-12-08 15:45:58 -070046#include <vlib/vlib.h>
Damjan Marion374e2c52017-03-09 20:38:15 +010047#include <vlib/unix/unix.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070048
Damjan Marion04a7f052017-07-10 15:06:17 +020049vlib_buffer_callbacks_t *vlib_buffer_callbacks = 0;
Damjan Marion901d16c2018-10-23 19:50:20 +020050
51/* when running unpriviledged we are limited by RLIMIT_MEMLOCK which is
52 typically set to 16MB so setting default size for buffer memory to 14MB
53 */
54static u32 vlib_buffer_physmem_sz = 14 << 20;
Damjan Marion04a7f052017-07-10 15:06:17 +020055
Damjan Mariond1274cb2018-03-13 21:32:17 +010056vlib_buffer_main_t buffer_main;
57
Damjan Marion567e61d2018-10-24 17:08:26 +020058/* logging */
59static vlib_log_class_t buffer_log_default;
60
Dave Barach9b8ffd92016-07-08 08:13:45 -040061uword
62vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
63 vlib_buffer_t * b_first)
Ed Warnickecb9cada2015-12-08 15:45:58 -070064{
Dave Barach9b8ffd92016-07-08 08:13:45 -040065 vlib_buffer_t *b = b_first;
Ed Warnickecb9cada2015-12-08 15:45:58 -070066 uword l_first = b_first->current_length;
67 uword l = 0;
68 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
69 {
70 b = vlib_get_buffer (vm, b->next_buffer);
71 l += b->current_length;
72 }
73 b_first->total_length_not_including_first_buffer = l;
74 b_first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
75 return l + l_first;
76}
77
Dave Barach9b8ffd92016-07-08 08:13:45 -040078u8 *
79format_vlib_buffer (u8 * s, va_list * args)
Ed Warnickecb9cada2015-12-08 15:45:58 -070080{
Dave Barach9b8ffd92016-07-08 08:13:45 -040081 vlib_buffer_t *b = va_arg (*args, vlib_buffer_t *);
Christophe Fontained3c008d2017-10-02 18:10:54 +020082 u32 indent = format_get_indent (s);
Damjan Mariondac03522018-02-01 15:30:13 +010083 u8 *a = 0;
Damjan Marion1cd8f3c2016-11-24 02:07:32 +010084
Damjan Mariondac03522018-02-01 15:30:13 +010085#define _(bit, name, v) \
86 if (v && (b->flags & VLIB_BUFFER_##name)) \
87 a = format (a, "%s ", v);
88 foreach_vlib_buffer_flag
89#undef _
90 s = format (s, "current data %d, length %d, free-list %d, clone-count %u",
91 b->current_data, b->current_length,
92 vlib_buffer_get_free_list_index (b), b->n_add_refs);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +010093
94 if (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
95 s = format (s, ", totlen-nifb %d",
96 b->total_length_not_including_first_buffer);
97
98 if (b->flags & VLIB_BUFFER_IS_TRACED)
99 s = format (s, ", trace 0x%x", b->trace_index);
100
Damjan Mariondac03522018-02-01 15:30:13 +0100101 if (a)
102 s = format (s, "\n%U%v", format_white_space, indent, a);
103 vec_free (a);
104
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100105 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
106 {
107 vlib_main_t *vm = vlib_get_main ();
108 u32 next_buffer = b->next_buffer;
109 b = vlib_get_buffer (vm, next_buffer);
110
Damjan Marionc47ed032017-01-25 14:18:03 +0100111 s =
112 format (s, "\n%Unext-buffer 0x%x, segment length %d, clone-count %u",
113 format_white_space, indent, next_buffer, b->current_length,
114 b->n_add_refs);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100115 }
116
Ed Warnickecb9cada2015-12-08 15:45:58 -0700117 return s;
118}
119
Dave Barach9b8ffd92016-07-08 08:13:45 -0400120u8 *
121format_vlib_buffer_and_data (u8 * s, va_list * args)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700122{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400123 vlib_buffer_t *b = va_arg (*args, vlib_buffer_t *);
124
Ed Warnickecb9cada2015-12-08 15:45:58 -0700125 s = format (s, "%U, %U",
126 format_vlib_buffer, b,
127 format_hex_bytes, vlib_buffer_get_current (b), 64);
128
129 return s;
130}
131
Dave Barach9b8ffd92016-07-08 08:13:45 -0400132static u8 *
133format_vlib_buffer_known_state (u8 * s, va_list * args)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700134{
135 vlib_buffer_known_state_t state = va_arg (*args, vlib_buffer_known_state_t);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400136 char *t;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700137
138 switch (state)
139 {
140 case VLIB_BUFFER_UNKNOWN:
141 t = "unknown";
142 break;
143
144 case VLIB_BUFFER_KNOWN_ALLOCATED:
145 t = "known-allocated";
146 break;
147
148 case VLIB_BUFFER_KNOWN_FREE:
149 t = "known-free";
150 break;
151
152 default:
153 t = "invalid";
154 break;
155 }
156
157 return format (s, "%s", t);
158}
159
Dave Barach9b8ffd92016-07-08 08:13:45 -0400160u8 *
161format_vlib_buffer_contents (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700162{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400163 vlib_main_t *vm = va_arg (*va, vlib_main_t *);
164 vlib_buffer_t *b = va_arg (*va, vlib_buffer_t *);
165
Ed Warnickecb9cada2015-12-08 15:45:58 -0700166 while (1)
167 {
Dave Barach9b8ffd92016-07-08 08:13:45 -0400168 vec_add (s, vlib_buffer_get_current (b), b->current_length);
169 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
Ed Warnickecb9cada2015-12-08 15:45:58 -0700170 break;
171 b = vlib_get_buffer (vm, b->next_buffer);
172 }
173
174 return s;
175}
176
177static u8 *
178vlib_validate_buffer_helper (vlib_main_t * vm,
179 u32 bi,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400180 uword follow_buffer_next, uword ** unique_hash)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700181{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400182 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400183 vlib_buffer_free_list_t *fl;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700184
Damjan Marion072401e2017-07-13 18:53:27 +0200185 if (pool_is_free_index
Damjan Mariond1274cb2018-03-13 21:32:17 +0100186 (vm->buffer_free_list_pool, vlib_buffer_get_free_list_index (b)))
Damjan Marion072401e2017-07-13 18:53:27 +0200187 return format (0, "unknown free list 0x%x",
188 vlib_buffer_get_free_list_index (b));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700189
Damjan Marion072401e2017-07-13 18:53:27 +0200190 fl =
Damjan Mariond1274cb2018-03-13 21:32:17 +0100191 pool_elt_at_index (vm->buffer_free_list_pool,
Damjan Marion072401e2017-07-13 18:53:27 +0200192 vlib_buffer_get_free_list_index (b));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700193
Dave Barach9b8ffd92016-07-08 08:13:45 -0400194 if ((signed) b->current_data < (signed) -VLIB_BUFFER_PRE_DATA_SIZE)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700195 return format (0, "current data %d before pre-data", b->current_data);
Damjan Marion878c6092017-01-04 13:19:27 +0100196
Ed Warnickecb9cada2015-12-08 15:45:58 -0700197 if (b->current_data + b->current_length > fl->n_data_bytes)
198 return format (0, "%d-%d beyond end of buffer %d",
Dave Barach9b8ffd92016-07-08 08:13:45 -0400199 b->current_data, b->current_length, fl->n_data_bytes);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700200
Dave Barach9b8ffd92016-07-08 08:13:45 -0400201 if (follow_buffer_next && (b->flags & VLIB_BUFFER_NEXT_PRESENT))
Ed Warnickecb9cada2015-12-08 15:45:58 -0700202 {
203 vlib_buffer_known_state_t k;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400204 u8 *msg, *result;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700205
Steven899a84b2018-01-29 20:09:09 -0800206 k = vlib_buffer_is_known (b->next_buffer);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700207 if (k != VLIB_BUFFER_KNOWN_ALLOCATED)
208 return format (0, "next 0x%x: %U",
Dave Barach9b8ffd92016-07-08 08:13:45 -0400209 b->next_buffer, format_vlib_buffer_known_state, k);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700210
211 if (unique_hash)
212 {
213 if (hash_get (*unique_hash, b->next_buffer))
214 return format (0, "duplicate buffer 0x%x", b->next_buffer);
215
216 hash_set1 (*unique_hash, b->next_buffer);
217 }
218
219 msg = vlib_validate_buffer (vm, b->next_buffer, follow_buffer_next);
220 if (msg)
221 {
222 result = format (0, "next 0x%x: %v", b->next_buffer, msg);
223 vec_free (msg);
224 return result;
225 }
226 }
227
228 return 0;
229}
230
231u8 *
232vlib_validate_buffer (vlib_main_t * vm, u32 bi, uword follow_buffer_next)
Dave Barach9b8ffd92016-07-08 08:13:45 -0400233{
234 return vlib_validate_buffer_helper (vm, bi, follow_buffer_next,
235 /* unique_hash */ 0);
236}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700237
238u8 *
239vlib_validate_buffers (vlib_main_t * vm,
240 u32 * buffers,
241 uword next_buffer_stride,
242 uword n_buffers,
243 vlib_buffer_known_state_t known_state,
244 uword follow_buffer_next)
245{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400246 uword i, *hash;
247 u32 bi, *b = buffers;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700248 vlib_buffer_known_state_t k;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400249 u8 *msg = 0, *result = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700250
251 hash = hash_create (0, 0);
252 for (i = 0; i < n_buffers; i++)
253 {
254 bi = b[0];
255 b += next_buffer_stride;
256
257 /* Buffer is not unique. */
258 if (hash_get (hash, bi))
259 {
260 msg = format (0, "not unique");
261 goto done;
262 }
263
Steven899a84b2018-01-29 20:09:09 -0800264 k = vlib_buffer_is_known (bi);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700265 if (k != known_state)
266 {
267 msg = format (0, "is %U; expected %U",
268 format_vlib_buffer_known_state, k,
269 format_vlib_buffer_known_state, known_state);
270 goto done;
271 }
272
273 msg = vlib_validate_buffer_helper (vm, bi, follow_buffer_next, &hash);
274 if (msg)
275 goto done;
276
277 hash_set1 (hash, bi);
278 }
279
Dave Barach9b8ffd92016-07-08 08:13:45 -0400280done:
Ed Warnickecb9cada2015-12-08 15:45:58 -0700281 if (msg)
282 {
283 result = format (0, "0x%x: %v", bi, msg);
284 vec_free (msg);
285 }
286 hash_free (hash);
287 return result;
288}
289
Dave Barach80f54e22017-03-08 19:08:56 -0500290/*
291 * Hand-craft a static vector w/ length 1, so vec_len(vlib_mains) =1
292 * and vlib_mains[0] = &vlib_global_main from the beginning of time.
293 *
294 * The only place which should ever expand vlib_mains is start_workers()
295 * in threads.c. It knows about the bootstrap vector.
296 */
297/* *INDENT-OFF* */
298static struct
299{
300 vec_header_t h;
301 vlib_main_t *vm;
302} __attribute__ ((packed)) __bootstrap_vlib_main_vector
303 __attribute__ ((aligned (CLIB_CACHE_LINE_BYTES))) =
304{
305 .h.len = 1,
306 .vm = &vlib_global_main,
307};
308/* *INDENT-ON* */
309
310vlib_main_t **vlib_mains = &__bootstrap_vlib_main_vector.vm;
311
Ed Warnickecb9cada2015-12-08 15:45:58 -0700312
313/* When dubugging validate that given buffers are either known allocated
314 or known free. */
Damjan Marionc8a26c62017-11-24 20:15:23 +0100315void
Ed Warnickecb9cada2015-12-08 15:45:58 -0700316vlib_buffer_validate_alloc_free (vlib_main_t * vm,
317 u32 * buffers,
318 uword n_buffers,
319 vlib_buffer_known_state_t expected_state)
320{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400321 u32 *b;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700322 uword i, bi, is_free;
323
324 if (CLIB_DEBUG == 0)
325 return;
326
Damjan Marionc8a26c62017-11-24 20:15:23 +0100327 if (vlib_buffer_callbacks)
328 return;
329
Ed Warnickecb9cada2015-12-08 15:45:58 -0700330 is_free = expected_state == VLIB_BUFFER_KNOWN_ALLOCATED;
331 b = buffers;
332 for (i = 0; i < n_buffers; i++)
333 {
334 vlib_buffer_known_state_t known;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400335
Ed Warnickecb9cada2015-12-08 15:45:58 -0700336 bi = b[0];
337 b += 1;
Steven899a84b2018-01-29 20:09:09 -0800338 known = vlib_buffer_is_known (bi);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700339 if (known != expected_state)
340 {
341 ASSERT (0);
342 vlib_panic_with_msg
343 (vm, "%s %U buffer 0x%x",
344 is_free ? "freeing" : "allocating",
Dave Barach9b8ffd92016-07-08 08:13:45 -0400345 format_vlib_buffer_known_state, known, bi);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700346 }
347
348 vlib_buffer_set_known_state
Steven899a84b2018-01-29 20:09:09 -0800349 (bi, is_free ? VLIB_BUFFER_KNOWN_FREE : VLIB_BUFFER_KNOWN_ALLOCATED);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700350 }
351}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700352
Ed Warnickecb9cada2015-12-08 15:45:58 -0700353/* Add buffer free list. */
Damjan Mariondac03522018-02-01 15:30:13 +0100354static vlib_buffer_free_list_index_t
Ed Warnickecb9cada2015-12-08 15:45:58 -0700355vlib_buffer_create_free_list_helper (vlib_main_t * vm,
356 u32 n_data_bytes,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400357 u32 is_public, u32 is_default, u8 * name)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700358{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100359 vlib_buffer_main_t *bm = &buffer_main;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400360 vlib_buffer_free_list_t *f;
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100361 int i;
362
Damjan Marion586afd72017-04-05 19:18:20 +0200363 ASSERT (vlib_get_thread_index () == 0);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100364
Damjan Mariond1274cb2018-03-13 21:32:17 +0100365 if (!is_default && pool_elts (vm->buffer_free_list_pool) == 0)
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100366 {
Damjan Mariondac03522018-02-01 15:30:13 +0100367 vlib_buffer_free_list_index_t default_free_free_list_index;
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100368
369 /* *INDENT-OFF* */
370 default_free_free_list_index =
371 vlib_buffer_create_free_list_helper
372 (vm,
373 /* default buffer size */ VLIB_BUFFER_DEFAULT_FREE_LIST_BYTES,
374 /* is_public */ 1,
375 /* is_default */ 1,
376 (u8 *) "default");
377 /* *INDENT-ON* */
378 ASSERT (default_free_free_list_index ==
379 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
380
381 if (n_data_bytes == VLIB_BUFFER_DEFAULT_FREE_LIST_BYTES && is_public)
382 return default_free_free_list_index;
383 }
384
Damjan Mariond1274cb2018-03-13 21:32:17 +0100385 pool_get_aligned (vm->buffer_free_list_pool, f, CLIB_CACHE_LINE_BYTES);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100386
Dave Barachb7b92992018-10-17 10:38:51 -0400387 clib_memset (f, 0, sizeof (f[0]));
Damjan Mariond1274cb2018-03-13 21:32:17 +0100388 f->index = f - vm->buffer_free_list_pool;
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100389 f->n_data_bytes = vlib_buffer_round_size (n_data_bytes);
Damjan Mariond1274cb2018-03-13 21:32:17 +0100390 f->min_n_buffers_each_alloc = VLIB_FRAME_SIZE;
391 f->buffer_pool_index = 0;
Chris Luke7447d072017-07-05 12:57:10 -0400392 f->name = clib_mem_is_vec (name) ? name : format (0, "%s", name);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100393
394 /* Setup free buffer template. */
Damjan Marion072401e2017-07-13 18:53:27 +0200395 vlib_buffer_set_free_list_index (&f->buffer_init_template, f->index);
Damjan Marionc47ed032017-01-25 14:18:03 +0100396 f->buffer_init_template.n_add_refs = 0;
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100397
398 if (is_public)
399 {
400 uword *p = hash_get (bm->free_list_by_size, f->n_data_bytes);
401 if (!p)
402 hash_set (bm->free_list_by_size, f->n_data_bytes, f->index);
403 }
404
405 for (i = 1; i < vec_len (vlib_mains); i++)
406 {
Damjan Mariond1274cb2018-03-13 21:32:17 +0100407 vlib_main_t *wvm = vlib_mains[i];
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100408 vlib_buffer_free_list_t *wf;
Damjan Mariond1274cb2018-03-13 21:32:17 +0100409 pool_get_aligned (wvm->buffer_free_list_pool,
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100410 wf, CLIB_CACHE_LINE_BYTES);
Damjan Mariond1274cb2018-03-13 21:32:17 +0100411 ASSERT (f - vm->buffer_free_list_pool ==
412 wf - wvm->buffer_free_list_pool);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100413 wf[0] = f[0];
Damjan Marionbd69a5f2017-02-05 23:44:42 +0100414 wf->buffers = 0;
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100415 wf->n_alloc = 0;
416 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700417
418 return f->index;
419}
420
Damjan Mariondac03522018-02-01 15:30:13 +0100421vlib_buffer_free_list_index_t
Dave Barach9b8ffd92016-07-08 08:13:45 -0400422vlib_buffer_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
423 char *fmt, ...)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700424{
425 va_list va;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400426 u8 *name;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700427
428 va_start (va, fmt);
429 name = va_format (0, fmt, &va);
430 va_end (va);
431
432 return vlib_buffer_create_free_list_helper (vm, n_data_bytes,
433 /* is_public */ 0,
434 /* is_default */ 0,
435 name);
436}
437
Ed Warnickecb9cada2015-12-08 15:45:58 -0700438static void
439del_free_list (vlib_main_t * vm, vlib_buffer_free_list_t * f)
440{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100441 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (f->buffer_pool_index);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700442
Damjan Mariond1274cb2018-03-13 21:32:17 +0100443 vec_add_aligned (bp->buffers, f->buffers, vec_len (f->buffers),
444 CLIB_CACHE_LINE_BYTES);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700445 vec_free (f->name);
Damjan Marionbd69a5f2017-02-05 23:44:42 +0100446 vec_free (f->buffers);
Damjan Mariond1274cb2018-03-13 21:32:17 +0100447
448 /* Poison it. */
Dave Barachb7b92992018-10-17 10:38:51 -0400449 clib_memset (f, 0xab, sizeof (f[0]));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700450}
451
452/* Add buffer free list. */
Dave Barach9b8ffd92016-07-08 08:13:45 -0400453void
Damjan Mariondac03522018-02-01 15:30:13 +0100454vlib_buffer_delete_free_list_internal (vlib_main_t * vm,
455 vlib_buffer_free_list_index_t index)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700456{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400457 vlib_buffer_free_list_t *f;
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100458 int i;
459
Damjan Marion586afd72017-04-05 19:18:20 +0200460 ASSERT (vlib_get_thread_index () == 0);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100461
Damjan Mariondac03522018-02-01 15:30:13 +0100462 f = vlib_buffer_get_free_list (vm, index);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100463
Damjan Marionbd69a5f2017-02-05 23:44:42 +0100464 ASSERT (vec_len (f->buffers) == f->n_alloc);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100465
466 del_free_list (vm, f);
467
Damjan Mariond1274cb2018-03-13 21:32:17 +0100468 pool_put (vm->buffer_free_list_pool, f);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100469
470 for (i = 1; i < vec_len (vlib_mains); i++)
471 {
Damjan Mariond1274cb2018-03-13 21:32:17 +0100472 vlib_main_t *wvm = vlib_mains[i];
473 f = vlib_buffer_get_free_list (vlib_mains[i], index);
474 del_free_list (wvm, f);
475 pool_put (wvm->buffer_free_list_pool, f);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100476 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700477}
478
Damjan Mariond1274cb2018-03-13 21:32:17 +0100479static_always_inline void *
Damjan Marion68b4da62018-09-30 18:26:20 +0200480vlib_buffer_pool_get_buffer (vlib_main_t * vm, vlib_buffer_pool_t * bp)
Damjan Mariond1274cb2018-03-13 21:32:17 +0100481{
Damjan Marion68b4da62018-09-30 18:26:20 +0200482 return vlib_physmem_alloc_from_map (vm, bp->physmem_map_index,
483 bp->buffer_size, CLIB_CACHE_LINE_BYTES);
Damjan Mariond1274cb2018-03-13 21:32:17 +0100484}
485
Ed Warnickecb9cada2015-12-08 15:45:58 -0700486/* Make sure free list has at least given number of free buffers. */
487static uword
Damjan Marionc8a26c62017-11-24 20:15:23 +0100488vlib_buffer_fill_free_list_internal (vlib_main_t * vm,
489 vlib_buffer_free_list_t * fl,
490 uword min_free_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700491{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100492 vlib_buffer_t *b;
493 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (fl->buffer_pool_index);
494 int n;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400495 u32 *bi;
Damjan Mariond1274cb2018-03-13 21:32:17 +0100496 u32 n_alloc = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700497
Ed Warnickecb9cada2015-12-08 15:45:58 -0700498 /* Already have enough free buffers on free list? */
Damjan Marionbd69a5f2017-02-05 23:44:42 +0100499 n = min_free_buffers - vec_len (fl->buffers);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700500 if (n <= 0)
501 return min_free_buffers;
502
Damjan Mariond1274cb2018-03-13 21:32:17 +0100503 if (vec_len (bp->buffers) > 0)
Damjan Marionb6a8ed72017-08-29 00:15:35 +0200504 {
505 int n_copy, n_left;
Damjan Mariond1274cb2018-03-13 21:32:17 +0100506 clib_spinlock_lock (&bp->lock);
507 n_copy = clib_min (vec_len (bp->buffers), n);
508 n_left = vec_len (bp->buffers) - n_copy;
509 vec_add_aligned (fl->buffers, bp->buffers + n_left, n_copy,
Damjan Marionb6a8ed72017-08-29 00:15:35 +0200510 CLIB_CACHE_LINE_BYTES);
Damjan Mariond1274cb2018-03-13 21:32:17 +0100511 _vec_len (bp->buffers) = n_left;
512 clib_spinlock_unlock (&bp->lock);
Damjan Marionb6a8ed72017-08-29 00:15:35 +0200513 n = min_free_buffers - vec_len (fl->buffers);
514 if (n <= 0)
515 return min_free_buffers;
516 }
517
Ed Warnickecb9cada2015-12-08 15:45:58 -0700518 /* Always allocate round number of buffers. */
Damjan Marionbd69a5f2017-02-05 23:44:42 +0100519 n = round_pow2 (n, CLIB_CACHE_LINE_BYTES / sizeof (u32));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700520
521 /* Always allocate new buffers in reasonably large sized chunks. */
Damjan Mariond1274cb2018-03-13 21:32:17 +0100522 n = clib_max (n, fl->min_n_buffers_each_alloc);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700523
Damjan Mariond1274cb2018-03-13 21:32:17 +0100524 clib_spinlock_lock (&bp->lock);
525 while (n_alloc < n)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700526 {
Damjan Marion68b4da62018-09-30 18:26:20 +0200527 if ((b = vlib_buffer_pool_get_buffer (vm, bp)) == 0)
Damjan Mariond1274cb2018-03-13 21:32:17 +0100528 goto done;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400529
Damjan Mariond1274cb2018-03-13 21:32:17 +0100530 n_alloc += 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700531
Damjan Mariond1274cb2018-03-13 21:32:17 +0100532 vec_add2_aligned (fl->buffers, bi, 1, CLIB_CACHE_LINE_BYTES);
533 bi[0] = vlib_get_buffer_index (vm, b);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700534
Damjan Mariond1274cb2018-03-13 21:32:17 +0100535 if (CLIB_DEBUG > 0)
536 vlib_buffer_set_known_state (bi[0], VLIB_BUFFER_KNOWN_FREE);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700537
Dave Barachb7b92992018-10-17 10:38:51 -0400538 clib_memset (b, 0, sizeof (vlib_buffer_t));
Damjan Mariond1274cb2018-03-13 21:32:17 +0100539 vlib_buffer_init_for_free_list (b, fl);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400540
Ed Warnickecb9cada2015-12-08 15:45:58 -0700541 if (fl->buffer_init_function)
Damjan Mariond1274cb2018-03-13 21:32:17 +0100542 fl->buffer_init_function (vm, fl, bi, 1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700543 }
Damjan Mariond1274cb2018-03-13 21:32:17 +0100544
545done:
546 clib_spinlock_unlock (&bp->lock);
547 fl->n_alloc += n_alloc;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700548 return n_alloc;
549}
550
Dave Barach9b8ffd92016-07-08 08:13:45 -0400551void *
552vlib_set_buffer_free_callback (vlib_main_t * vm, void *fp)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700553{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100554 vlib_buffer_main_t *bm = &buffer_main;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400555 void *rv = bm->buffer_free_callback;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700556
557 bm->buffer_free_callback = fp;
558 return rv;
559}
560
Ed Warnickecb9cada2015-12-08 15:45:58 -0700561static_always_inline void
Damjan Marioneac3b112018-03-05 09:36:31 +0100562recycle_or_free (vlib_main_t * vm, vlib_buffer_main_t * bm, u32 bi,
563 vlib_buffer_t * b, u32 follow_buffer_next)
564{
565 vlib_buffer_free_list_t *fl;
566 vlib_buffer_free_list_index_t fi;
Damjan Marion8e715292018-09-03 15:41:45 +0200567 u32 flags, next;
568
Damjan Marioneac3b112018-03-05 09:36:31 +0100569 fl = vlib_buffer_get_buffer_free_list (vm, b, &fi);
570
Damjan Marion8e715292018-09-03 15:41:45 +0200571 do
Damjan Marioneac3b112018-03-05 09:36:31 +0100572 {
Damjan Marion8e715292018-09-03 15:41:45 +0200573 vlib_buffer_t *nb = vlib_get_buffer (vm, bi);
574 flags = nb->flags;
575 next = nb->next_buffer;
576 if (nb->n_add_refs)
577 nb->n_add_refs--;
578 else
Damjan Marioneac3b112018-03-05 09:36:31 +0100579 {
Damjan Marion8e715292018-09-03 15:41:45 +0200580 vlib_buffer_validate_alloc_free (vm, &bi, 1,
581 VLIB_BUFFER_KNOWN_ALLOCATED);
582 vlib_buffer_add_to_free_list (vm, fl, bi, 1);
Damjan Marioneac3b112018-03-05 09:36:31 +0100583 }
Damjan Marion8e715292018-09-03 15:41:45 +0200584 bi = next;
Damjan Marioneac3b112018-03-05 09:36:31 +0100585 }
Damjan Marion8e715292018-09-03 15:41:45 +0200586 while (follow_buffer_next && (flags & VLIB_BUFFER_NEXT_PRESENT));
Damjan Marioneac3b112018-03-05 09:36:31 +0100587}
588
589static_always_inline void
Ed Warnickecb9cada2015-12-08 15:45:58 -0700590vlib_buffer_free_inline (vlib_main_t * vm,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400591 u32 * buffers, u32 n_buffers, u32 follow_buffer_next)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700592{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100593 vlib_buffer_main_t *bm = &buffer_main;
Damjan Marioneac3b112018-03-05 09:36:31 +0100594 vlib_buffer_t *p, *b0, *b1, *b2, *b3;
595 int i = 0;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400596 u32 (*cb) (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
597 u32 follow_buffer_next);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700598
Ed Warnickecb9cada2015-12-08 15:45:58 -0700599 cb = bm->buffer_free_callback;
600
601 if (PREDICT_FALSE (cb != 0))
Dave Barach9b8ffd92016-07-08 08:13:45 -0400602 n_buffers = (*cb) (vm, buffers, n_buffers, follow_buffer_next);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700603
Dave Barach9b8ffd92016-07-08 08:13:45 -0400604 if (!n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700605 return;
606
Damjan Marioneac3b112018-03-05 09:36:31 +0100607 while (i + 11 < n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700608 {
Damjan Marioneac3b112018-03-05 09:36:31 +0100609 p = vlib_get_buffer (vm, buffers[i + 8]);
610 vlib_prefetch_buffer_header (p, LOAD);
611 p = vlib_get_buffer (vm, buffers[i + 9]);
612 vlib_prefetch_buffer_header (p, LOAD);
613 p = vlib_get_buffer (vm, buffers[i + 10]);
614 vlib_prefetch_buffer_header (p, LOAD);
615 p = vlib_get_buffer (vm, buffers[i + 11]);
616 vlib_prefetch_buffer_header (p, LOAD);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700617
Damjan Marioneac3b112018-03-05 09:36:31 +0100618 b0 = vlib_get_buffer (vm, buffers[i]);
619 b1 = vlib_get_buffer (vm, buffers[i + 1]);
620 b2 = vlib_get_buffer (vm, buffers[i + 2]);
621 b3 = vlib_get_buffer (vm, buffers[i + 3]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700622
Damjan Marioneac3b112018-03-05 09:36:31 +0100623 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b0);
624 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b1);
625 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b2);
626 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b3);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700627
Damjan Marioneac3b112018-03-05 09:36:31 +0100628 recycle_or_free (vm, bm, buffers[i], b0, follow_buffer_next);
629 recycle_or_free (vm, bm, buffers[i + 1], b1, follow_buffer_next);
630 recycle_or_free (vm, bm, buffers[i + 2], b2, follow_buffer_next);
631 recycle_or_free (vm, bm, buffers[i + 3], b3, follow_buffer_next);
Damjan Marionc47ed032017-01-25 14:18:03 +0100632
Damjan Marioneac3b112018-03-05 09:36:31 +0100633 i += 4;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700634 }
Damjan Marioneac3b112018-03-05 09:36:31 +0100635
636 while (i < n_buffers)
637 {
638 b0 = vlib_get_buffer (vm, buffers[i]);
639 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b0);
640 recycle_or_free (vm, bm, buffers[i], b0, follow_buffer_next);
641 i++;
642 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700643}
644
Damjan Marion878c6092017-01-04 13:19:27 +0100645static void
646vlib_buffer_free_internal (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700647{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400648 vlib_buffer_free_inline (vm, buffers, n_buffers, /* follow_buffer_next */
649 1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700650}
651
Damjan Marion878c6092017-01-04 13:19:27 +0100652static void
653vlib_buffer_free_no_next_internal (vlib_main_t * vm, u32 * buffers,
654 u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700655{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400656 vlib_buffer_free_inline (vm, buffers, n_buffers, /* follow_buffer_next */
657 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700658}
659
660/* Copy template packet data into buffers as they are allocated. */
Damjan Marion878c6092017-01-04 13:19:27 +0100661static void __attribute__ ((unused))
Ed Warnickecb9cada2015-12-08 15:45:58 -0700662vlib_packet_template_buffer_init (vlib_main_t * vm,
663 vlib_buffer_free_list_t * fl,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400664 u32 * buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700665{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400666 vlib_packet_template_t *t =
667 uword_to_pointer (fl->buffer_init_function_opaque,
668 vlib_packet_template_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700669 uword i;
670
671 for (i = 0; i < n_buffers; i++)
672 {
Dave Barach9b8ffd92016-07-08 08:13:45 -0400673 vlib_buffer_t *b = vlib_get_buffer (vm, buffers[i]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700674 ASSERT (b->current_length == vec_len (t->packet_data));
Dave Barach178cf492018-11-13 16:34:13 -0500675 clib_memcpy_fast (vlib_buffer_get_current (b), t->packet_data,
676 b->current_length);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700677 }
678}
679
Dave Barach9b8ffd92016-07-08 08:13:45 -0400680void
681vlib_packet_template_init (vlib_main_t * vm,
682 vlib_packet_template_t * t,
683 void *packet_data,
684 uword n_packet_data_bytes,
Damjan Mariond1274cb2018-03-13 21:32:17 +0100685 uword min_n_buffers_each_alloc, char *fmt, ...)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700686{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100687 vlib_buffer_main_t *bm = &buffer_main;
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100688 va_list va;
Chris Luke7447d072017-07-05 12:57:10 -0400689 u8 *name;
Damjan Marion878c6092017-01-04 13:19:27 +0100690 vlib_buffer_free_list_t *fl;
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100691
692 va_start (va, fmt);
693 name = va_format (0, fmt, &va);
694 va_end (va);
695
Damjan Marion878c6092017-01-04 13:19:27 +0100696 if (bm->cb.vlib_packet_template_init_cb)
697 bm->cb.vlib_packet_template_init_cb (vm, (void *) t, packet_data,
698 n_packet_data_bytes,
Damjan Mariond1274cb2018-03-13 21:32:17 +0100699 min_n_buffers_each_alloc, name);
Damjan Marion878c6092017-01-04 13:19:27 +0100700
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100701 vlib_worker_thread_barrier_sync (vm);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700702
Dave Barachb7b92992018-10-17 10:38:51 -0400703 clib_memset (t, 0, sizeof (t[0]));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700704
705 vec_add (t->packet_data, packet_data, n_packet_data_bytes);
Damjan Mariond1274cb2018-03-13 21:32:17 +0100706 t->min_n_buffers_each_alloc = min_n_buffers_each_alloc;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700707
708 t->free_list_index = vlib_buffer_create_free_list_helper
709 (vm, n_packet_data_bytes,
710 /* is_public */ 1,
711 /* is_default */ 0,
712 name);
713
714 ASSERT (t->free_list_index != 0);
715 fl = vlib_buffer_get_free_list (vm, t->free_list_index);
Damjan Mariond1274cb2018-03-13 21:32:17 +0100716 fl->min_n_buffers_each_alloc = t->min_n_buffers_each_alloc;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700717
718 fl->buffer_init_function = vlib_packet_template_buffer_init;
719 fl->buffer_init_function_opaque = pointer_to_uword (t);
720
721 fl->buffer_init_template.current_data = 0;
722 fl->buffer_init_template.current_length = n_packet_data_bytes;
723 fl->buffer_init_template.flags = 0;
Damjan Marionc47ed032017-01-25 14:18:03 +0100724 fl->buffer_init_template.n_add_refs = 0;
Damjan Marion878c6092017-01-04 13:19:27 +0100725 vlib_worker_thread_barrier_release (vm);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700726}
727
728void *
Dave Barach9b8ffd92016-07-08 08:13:45 -0400729vlib_packet_template_get_packet (vlib_main_t * vm,
730 vlib_packet_template_t * t, u32 * bi_result)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700731{
732 u32 bi;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400733 vlib_buffer_t *b;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700734
735 if (vlib_buffer_alloc (vm, &bi, 1) != 1)
736 return 0;
737
738 *bi_result = bi;
739
740 b = vlib_get_buffer (vm, bi);
Dave Barach178cf492018-11-13 16:34:13 -0500741 clib_memcpy_fast (vlib_buffer_get_current (b),
742 t->packet_data, vec_len (t->packet_data));
Dave Barach9b8ffd92016-07-08 08:13:45 -0400743 b->current_length = vec_len (t->packet_data);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700744
745 return b->data;
746}
747
Dave Barach9b8ffd92016-07-08 08:13:45 -0400748void
749vlib_packet_template_get_packet_helper (vlib_main_t * vm,
750 vlib_packet_template_t * t)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700751{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100752 word n = t->min_n_buffers_each_alloc;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700753 word l = vec_len (t->packet_data);
754 word n_alloc;
755
756 ASSERT (l > 0);
757 ASSERT (vec_len (t->free_buffers) == 0);
758
759 vec_validate (t->free_buffers, n - 1);
760 n_alloc = vlib_buffer_alloc_from_free_list (vm, t->free_buffers,
761 n, t->free_list_index);
762 _vec_len (t->free_buffers) = n_alloc;
763}
764
765/* Append given data to end of buffer, possibly allocating new buffers. */
Dave Barach9b8ffd92016-07-08 08:13:45 -0400766u32
767vlib_buffer_add_data (vlib_main_t * vm,
Damjan Mariondac03522018-02-01 15:30:13 +0100768 vlib_buffer_free_list_index_t free_list_index,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400769 u32 buffer_index, void *data, u32 n_data_bytes)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700770{
771 u32 n_buffer_bytes, n_left, n_left_this_buffer, bi;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400772 vlib_buffer_t *b;
773 void *d;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700774
775 bi = buffer_index;
Dave Barach0d65d112018-03-16 15:21:35 -0400776 if (bi == ~0
Ed Warnickecb9cada2015-12-08 15:45:58 -0700777 && 1 != vlib_buffer_alloc_from_free_list (vm, &bi, 1, free_list_index))
778 goto out_of_buffers;
779
780 d = data;
781 n_left = n_data_bytes;
782 n_buffer_bytes = vlib_buffer_free_list_buffer_size (vm, free_list_index);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400783
Ed Warnickecb9cada2015-12-08 15:45:58 -0700784 b = vlib_get_buffer (vm, bi);
785 b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
786
Dave Barach9b8ffd92016-07-08 08:13:45 -0400787 /* Get to the end of the chain before we try to append data... */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700788 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
789 b = vlib_get_buffer (vm, b->next_buffer);
790
791 while (1)
792 {
793 u32 n;
794
795 ASSERT (n_buffer_bytes >= b->current_length);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400796 n_left_this_buffer =
797 n_buffer_bytes - (b->current_data + b->current_length);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700798 n = clib_min (n_left_this_buffer, n_left);
Dave Barach178cf492018-11-13 16:34:13 -0500799 clib_memcpy_fast (vlib_buffer_get_current (b) + b->current_length, d,
800 n);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700801 b->current_length += n;
802 n_left -= n;
803 if (n_left == 0)
804 break;
805
806 d += n;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400807 if (1 !=
808 vlib_buffer_alloc_from_free_list (vm, &b->next_buffer, 1,
809 free_list_index))
Ed Warnickecb9cada2015-12-08 15:45:58 -0700810 goto out_of_buffers;
811
812 b->flags |= VLIB_BUFFER_NEXT_PRESENT;
813
814 b = vlib_get_buffer (vm, b->next_buffer);
815 }
816
817 return bi;
818
Dave Barach9b8ffd92016-07-08 08:13:45 -0400819out_of_buffers:
Ed Warnickecb9cada2015-12-08 15:45:58 -0700820 clib_error ("out of buffers");
821 return bi;
822}
823
Pierre Pfister328e99b2016-02-12 13:18:42 +0000824u16
Dave Barach9b8ffd92016-07-08 08:13:45 -0400825vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
Damjan Mariondac03522018-02-01 15:30:13 +0100826 vlib_buffer_free_list_index_t
827 free_list_index,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400828 vlib_buffer_t * first,
Damjan Mariondac03522018-02-01 15:30:13 +0100829 vlib_buffer_t ** last, void *data,
830 u16 data_len)
Dave Barach9b8ffd92016-07-08 08:13:45 -0400831{
Pierre Pfister328e99b2016-02-12 13:18:42 +0000832 vlib_buffer_t *l = *last;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400833 u32 n_buffer_bytes =
834 vlib_buffer_free_list_buffer_size (vm, free_list_index);
Pierre Pfister328e99b2016-02-12 13:18:42 +0000835 u16 copied = 0;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400836 ASSERT (n_buffer_bytes >= l->current_length + l->current_data);
837 while (data_len)
838 {
839 u16 max = n_buffer_bytes - l->current_length - l->current_data;
840 if (max == 0)
841 {
842 if (1 !=
843 vlib_buffer_alloc_from_free_list (vm, &l->next_buffer, 1,
844 free_list_index))
845 return copied;
Eyal Barib688fb12018-11-12 16:13:49 +0200846 *last = l = vlib_buffer_chain_buffer (vm, l, l->next_buffer);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400847 max = n_buffer_bytes - l->current_length - l->current_data;
848 }
Pierre Pfister328e99b2016-02-12 13:18:42 +0000849
Dave Barach9b8ffd92016-07-08 08:13:45 -0400850 u16 len = (data_len > max) ? max : data_len;
Dave Barach178cf492018-11-13 16:34:13 -0500851 clib_memcpy_fast (vlib_buffer_get_current (l) + l->current_length,
852 data + copied, len);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400853 vlib_buffer_chain_increase_length (first, l, len);
854 data_len -= len;
855 copied += len;
856 }
Pierre Pfister328e99b2016-02-12 13:18:42 +0000857 return copied;
858}
859
Damjan Marion149ba772017-10-12 13:09:26 +0200860u8
Damjan Marion68b4da62018-09-30 18:26:20 +0200861vlib_buffer_register_physmem_map (vlib_main_t * vm, u32 physmem_map_index)
Damjan Marion04a7f052017-07-10 15:06:17 +0200862{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100863 vlib_buffer_main_t *bm = &buffer_main;
Damjan Marion149ba772017-10-12 13:09:26 +0200864 vlib_buffer_pool_t *p;
Damjan Marion68b4da62018-09-30 18:26:20 +0200865 vlib_physmem_map_t *m = vlib_physmem_get_map (vm, physmem_map_index);
866 uword start = pointer_to_uword (m->base);
Damjan Marion8e8d3c82018-10-23 22:54:40 +0200867 uword size = (uword) m->n_pages << m->log2_page_size;
Damjan Marion04a7f052017-07-10 15:06:17 +0200868
869 if (bm->buffer_mem_size == 0)
870 {
871 bm->buffer_mem_start = start;
872 bm->buffer_mem_size = size;
873 }
874 else if (start < bm->buffer_mem_start)
875 {
876 bm->buffer_mem_size += bm->buffer_mem_start - start;
877 bm->buffer_mem_start = start;
878 if (size > bm->buffer_mem_size)
879 bm->buffer_mem_size = size;
880 }
881 else if (start > bm->buffer_mem_start)
882 {
883 uword new_size = start - bm->buffer_mem_start + size;
884 if (new_size > bm->buffer_mem_size)
885 bm->buffer_mem_size = new_size;
886 }
887
888 if ((u64) bm->buffer_mem_size >
889 ((u64) 1 << (32 + CLIB_LOG2_CACHE_LINE_BYTES)))
890 {
891 clib_panic ("buffer memory size out of range!");
892 }
Damjan Marion149ba772017-10-12 13:09:26 +0200893
894 vec_add2 (bm->buffer_pools, p, 1);
895 p->start = start;
896 p->size = size;
Damjan Marion68b4da62018-09-30 18:26:20 +0200897 p->physmem_map_index = physmem_map_index;
Damjan Mariond1274cb2018-03-13 21:32:17 +0100898
Damjan Mariond1274cb2018-03-13 21:32:17 +0100899 ASSERT (p - bm->buffer_pools < 256);
Damjan Marion149ba772017-10-12 13:09:26 +0200900 return p - bm->buffer_pools;
Damjan Marion04a7f052017-07-10 15:06:17 +0200901}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700902
Dave Barach9b8ffd92016-07-08 08:13:45 -0400903static u8 *
904format_vlib_buffer_free_list (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700905{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400906 vlib_buffer_free_list_t *f = va_arg (*va, vlib_buffer_free_list_t *);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100907 u32 threadnum = va_arg (*va, u32);
908 uword bytes_alloc, bytes_free, n_free, size;
909
910 if (!f)
911 return format (s, "%=7s%=30s%=12s%=12s%=12s%=12s%=12s%=12s",
912 "Thread", "Name", "Index", "Size", "Alloc", "Free",
913 "#Alloc", "#Free");
914
915 size = sizeof (vlib_buffer_t) + f->n_data_bytes;
Damjan Marionbd69a5f2017-02-05 23:44:42 +0100916 n_free = vec_len (f->buffers);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100917 bytes_alloc = size * f->n_alloc;
918 bytes_free = size * n_free;
919
Chris Luke7447d072017-07-05 12:57:10 -0400920 s = format (s, "%7d%30v%12d%12d%=12U%=12U%=12d%=12d", threadnum,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700921 f->name, f->index, f->n_data_bytes,
922 format_memory_size, bytes_alloc,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400923 format_memory_size, bytes_free, f->n_alloc, n_free);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700924
925 return s;
926}
927
928static clib_error_t *
929show_buffers (vlib_main_t * vm,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400930 unformat_input_t * input, vlib_cli_command_t * cmd)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700931{
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100932 vlib_buffer_free_list_t *f;
933 vlib_main_t *curr_vm;
934 u32 vm_index = 0;
935
936 vlib_cli_output (vm, "%U", format_vlib_buffer_free_list, 0, 0);
937
938 do
939 {
Dave Barach80f54e22017-03-08 19:08:56 -0500940 curr_vm = vlib_mains[vm_index];
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100941
942 /* *INDENT-OFF* */
Damjan Mariond1274cb2018-03-13 21:32:17 +0100943 pool_foreach (f, curr_vm->buffer_free_list_pool, ({
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100944 vlib_cli_output (vm, "%U", format_vlib_buffer_free_list, f, vm_index);
945 }));
946 /* *INDENT-ON* */
947
948 vm_index++;
949 }
950 while (vm_index < vec_len (vlib_mains));
951
Ed Warnickecb9cada2015-12-08 15:45:58 -0700952 return 0;
953}
954
Dave Barach9b8ffd92016-07-08 08:13:45 -0400955/* *INDENT-OFF* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700956VLIB_CLI_COMMAND (show_buffers_command, static) = {
957 .path = "show buffers",
958 .short_help = "Show packet buffer allocation",
959 .function = show_buffers,
960};
Dave Barach9b8ffd92016-07-08 08:13:45 -0400961/* *INDENT-ON* */
962
Damjan Marion49d66f12017-07-20 18:10:35 +0200963clib_error_t *
964vlib_buffer_main_init (struct vlib_main_t * vm)
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100965{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100966 vlib_buffer_main_t *bm = &buffer_main;
Damjan Marion49d66f12017-07-20 18:10:35 +0200967 clib_error_t *error;
Damjan Marion68b4da62018-09-30 18:26:20 +0200968 u32 physmem_map_index;
969 u8 pool_index;
Damjan Marion567e61d2018-10-24 17:08:26 +0200970 int log2_page_size = 0;
971
972 buffer_log_default = vlib_log_register_class ("buffer", 0);
Damjan Marion49d66f12017-07-20 18:10:35 +0200973
Damjan Marion49d66f12017-07-20 18:10:35 +0200974 if (vlib_buffer_callbacks)
975 {
976 /* external plugin has registered own buffer callbacks
977 so we just copy them and quit */
Dave Barach178cf492018-11-13 16:34:13 -0500978 clib_memcpy_fast (&bm->cb, vlib_buffer_callbacks,
979 sizeof (vlib_buffer_callbacks_t));
Damjan Marion49d66f12017-07-20 18:10:35 +0200980 bm->callbacks_registered = 1;
981 return 0;
982 }
Damjan Marion04a7f052017-07-10 15:06:17 +0200983
Damjan Marionc8a26c62017-11-24 20:15:23 +0100984 bm->cb.vlib_buffer_fill_free_list_cb = &vlib_buffer_fill_free_list_internal;
Damjan Marion878c6092017-01-04 13:19:27 +0100985 bm->cb.vlib_buffer_free_cb = &vlib_buffer_free_internal;
986 bm->cb.vlib_buffer_free_no_next_cb = &vlib_buffer_free_no_next_internal;
987 bm->cb.vlib_buffer_delete_free_list_cb =
988 &vlib_buffer_delete_free_list_internal;
Damjan Marion6b0f5892017-07-27 04:01:24 -0400989 clib_spinlock_init (&bm->buffer_known_hash_lockp);
Damjan Marion49d66f12017-07-20 18:10:35 +0200990
Damjan Marion567e61d2018-10-24 17:08:26 +0200991retry:
992 error = vlib_physmem_shared_map_create (vm, "buffers",
993 vlib_buffer_physmem_sz,
994 log2_page_size,
995 CLIB_PMALLOC_NUMA_LOCAL,
996 &physmem_map_index);
997
998 if (error && log2_page_size == 0)
999 {
1000 vlib_log_warn (buffer_log_default, "%U", format_clib_error, error);
1001 clib_error_free (error);
1002 vlib_log_warn (buffer_log_default, "falling back to non-hugepage "
1003 "backed buffer pool");
1004 log2_page_size = min_log2 (clib_mem_get_page_size ());
1005 goto retry;
1006 }
1007
1008 if (error)
Damjan Marion68b4da62018-09-30 18:26:20 +02001009 return error;
Damjan Marion49d66f12017-07-20 18:10:35 +02001010
Damjan Marion68b4da62018-09-30 18:26:20 +02001011 pool_index = vlib_buffer_register_physmem_map (vm, physmem_map_index);
1012 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (pool_index);
1013 clib_spinlock_init (&bp->lock);
1014 bp->buffer_size = VLIB_BUFFER_DEFAULT_FREE_LIST_BYTES +
1015 sizeof (vlib_buffer_t);
Damjan Marion49d66f12017-07-20 18:10:35 +02001016
Damjan Marion68b4da62018-09-30 18:26:20 +02001017 return 0;
Damjan Marion878c6092017-01-04 13:19:27 +01001018}
Damjan Marion1cd8f3c2016-11-24 02:07:32 +01001019
Damjan Marion49d66f12017-07-20 18:10:35 +02001020static clib_error_t *
1021vlib_buffers_configure (vlib_main_t * vm, unformat_input_t * input)
1022{
1023 u32 size_in_mb;
1024
1025 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
1026 {
1027 if (unformat (input, "memory-size-in-mb %d", &size_in_mb))
1028 vlib_buffer_physmem_sz = size_in_mb << 20;
1029 else
1030 return unformat_parse_error (input);
1031 }
1032
1033 unformat_free (input);
1034 return 0;
1035}
1036
1037VLIB_EARLY_CONFIG_FUNCTION (vlib_buffers_configure, "buffers");
1038
1039
Chris Luked4024f52016-09-06 09:32:36 -04001040/** @endcond */
Dave Barach9b8ffd92016-07-08 08:13:45 -04001041/*
1042 * fd.io coding-style-patch-verification: ON
1043 *
1044 * Local Variables:
1045 * eval: (c-set-style "gnu")
1046 * End:
1047 */