blob: d024aba1e0faae2e1b78935f3130c77e12de7e5a [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15/*
16 * buffer.c: allocate/free network buffers.
17 *
18 * Copyright (c) 2008 Eliot Dresselhaus
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
27 *
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
30 *
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 */
39
Chris Luked4024f52016-09-06 09:32:36 -040040/**
Chris Luked4024f52016-09-06 09:32:36 -040041 * @file
42 *
43 * Allocate/free network buffers.
44 */
45
Ed Warnickecb9cada2015-12-08 15:45:58 -070046#include <vlib/vlib.h>
Damjan Marion374e2c52017-03-09 20:38:15 +010047#include <vlib/unix/unix.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070048
Damjan Marion04a7f052017-07-10 15:06:17 +020049vlib_buffer_callbacks_t *vlib_buffer_callbacks = 0;
Damjan Marion901d16c2018-10-23 19:50:20 +020050
51/* when running unpriviledged we are limited by RLIMIT_MEMLOCK which is
52 typically set to 16MB so setting default size for buffer memory to 14MB
53 */
54static u32 vlib_buffer_physmem_sz = 14 << 20;
Damjan Marion04a7f052017-07-10 15:06:17 +020055
Damjan Mariond1274cb2018-03-13 21:32:17 +010056vlib_buffer_main_t buffer_main;
57
Damjan Marion567e61d2018-10-24 17:08:26 +020058/* logging */
59static vlib_log_class_t buffer_log_default;
60
Dave Barach9b8ffd92016-07-08 08:13:45 -040061uword
62vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
63 vlib_buffer_t * b_first)
Ed Warnickecb9cada2015-12-08 15:45:58 -070064{
Dave Barach9b8ffd92016-07-08 08:13:45 -040065 vlib_buffer_t *b = b_first;
Ed Warnickecb9cada2015-12-08 15:45:58 -070066 uword l_first = b_first->current_length;
67 uword l = 0;
68 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
69 {
70 b = vlib_get_buffer (vm, b->next_buffer);
71 l += b->current_length;
72 }
73 b_first->total_length_not_including_first_buffer = l;
74 b_first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
75 return l + l_first;
76}
77
Dave Barach9b8ffd92016-07-08 08:13:45 -040078u8 *
79format_vlib_buffer (u8 * s, va_list * args)
Ed Warnickecb9cada2015-12-08 15:45:58 -070080{
Dave Barach9b8ffd92016-07-08 08:13:45 -040081 vlib_buffer_t *b = va_arg (*args, vlib_buffer_t *);
Christophe Fontained3c008d2017-10-02 18:10:54 +020082 u32 indent = format_get_indent (s);
Damjan Mariondac03522018-02-01 15:30:13 +010083 u8 *a = 0;
Damjan Marion1cd8f3c2016-11-24 02:07:32 +010084
Damjan Mariondac03522018-02-01 15:30:13 +010085#define _(bit, name, v) \
86 if (v && (b->flags & VLIB_BUFFER_##name)) \
87 a = format (a, "%s ", v);
88 foreach_vlib_buffer_flag
89#undef _
90 s = format (s, "current data %d, length %d, free-list %d, clone-count %u",
91 b->current_data, b->current_length,
92 vlib_buffer_get_free_list_index (b), b->n_add_refs);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +010093
94 if (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
95 s = format (s, ", totlen-nifb %d",
96 b->total_length_not_including_first_buffer);
97
98 if (b->flags & VLIB_BUFFER_IS_TRACED)
99 s = format (s, ", trace 0x%x", b->trace_index);
100
Damjan Mariondac03522018-02-01 15:30:13 +0100101 if (a)
102 s = format (s, "\n%U%v", format_white_space, indent, a);
103 vec_free (a);
104
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100105 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
106 {
107 vlib_main_t *vm = vlib_get_main ();
108 u32 next_buffer = b->next_buffer;
109 b = vlib_get_buffer (vm, next_buffer);
110
Damjan Marionc47ed032017-01-25 14:18:03 +0100111 s =
112 format (s, "\n%Unext-buffer 0x%x, segment length %d, clone-count %u",
113 format_white_space, indent, next_buffer, b->current_length,
114 b->n_add_refs);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100115 }
116
Ed Warnickecb9cada2015-12-08 15:45:58 -0700117 return s;
118}
119
Dave Barach9b8ffd92016-07-08 08:13:45 -0400120u8 *
121format_vlib_buffer_and_data (u8 * s, va_list * args)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700122{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400123 vlib_buffer_t *b = va_arg (*args, vlib_buffer_t *);
124
Ed Warnickecb9cada2015-12-08 15:45:58 -0700125 s = format (s, "%U, %U",
126 format_vlib_buffer, b,
127 format_hex_bytes, vlib_buffer_get_current (b), 64);
128
129 return s;
130}
131
Dave Barach9b8ffd92016-07-08 08:13:45 -0400132static u8 *
133format_vlib_buffer_known_state (u8 * s, va_list * args)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700134{
135 vlib_buffer_known_state_t state = va_arg (*args, vlib_buffer_known_state_t);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400136 char *t;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700137
138 switch (state)
139 {
140 case VLIB_BUFFER_UNKNOWN:
141 t = "unknown";
142 break;
143
144 case VLIB_BUFFER_KNOWN_ALLOCATED:
145 t = "known-allocated";
146 break;
147
148 case VLIB_BUFFER_KNOWN_FREE:
149 t = "known-free";
150 break;
151
152 default:
153 t = "invalid";
154 break;
155 }
156
157 return format (s, "%s", t);
158}
159
Dave Barach9b8ffd92016-07-08 08:13:45 -0400160u8 *
161format_vlib_buffer_contents (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700162{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400163 vlib_main_t *vm = va_arg (*va, vlib_main_t *);
164 vlib_buffer_t *b = va_arg (*va, vlib_buffer_t *);
165
Ed Warnickecb9cada2015-12-08 15:45:58 -0700166 while (1)
167 {
Dave Barach9b8ffd92016-07-08 08:13:45 -0400168 vec_add (s, vlib_buffer_get_current (b), b->current_length);
169 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
Ed Warnickecb9cada2015-12-08 15:45:58 -0700170 break;
171 b = vlib_get_buffer (vm, b->next_buffer);
172 }
173
174 return s;
175}
176
177static u8 *
178vlib_validate_buffer_helper (vlib_main_t * vm,
179 u32 bi,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400180 uword follow_buffer_next, uword ** unique_hash)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700181{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400182 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400183 vlib_buffer_free_list_t *fl;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700184
Damjan Marion072401e2017-07-13 18:53:27 +0200185 if (pool_is_free_index
Damjan Mariond1274cb2018-03-13 21:32:17 +0100186 (vm->buffer_free_list_pool, vlib_buffer_get_free_list_index (b)))
Damjan Marion072401e2017-07-13 18:53:27 +0200187 return format (0, "unknown free list 0x%x",
188 vlib_buffer_get_free_list_index (b));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700189
Damjan Marion072401e2017-07-13 18:53:27 +0200190 fl =
Damjan Mariond1274cb2018-03-13 21:32:17 +0100191 pool_elt_at_index (vm->buffer_free_list_pool,
Damjan Marion072401e2017-07-13 18:53:27 +0200192 vlib_buffer_get_free_list_index (b));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700193
Dave Barach9b8ffd92016-07-08 08:13:45 -0400194 if ((signed) b->current_data < (signed) -VLIB_BUFFER_PRE_DATA_SIZE)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700195 return format (0, "current data %d before pre-data", b->current_data);
Damjan Marion878c6092017-01-04 13:19:27 +0100196
Ed Warnickecb9cada2015-12-08 15:45:58 -0700197 if (b->current_data + b->current_length > fl->n_data_bytes)
198 return format (0, "%d-%d beyond end of buffer %d",
Dave Barach9b8ffd92016-07-08 08:13:45 -0400199 b->current_data, b->current_length, fl->n_data_bytes);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700200
Dave Barach9b8ffd92016-07-08 08:13:45 -0400201 if (follow_buffer_next && (b->flags & VLIB_BUFFER_NEXT_PRESENT))
Ed Warnickecb9cada2015-12-08 15:45:58 -0700202 {
203 vlib_buffer_known_state_t k;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400204 u8 *msg, *result;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700205
Steven899a84b2018-01-29 20:09:09 -0800206 k = vlib_buffer_is_known (b->next_buffer);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700207 if (k != VLIB_BUFFER_KNOWN_ALLOCATED)
208 return format (0, "next 0x%x: %U",
Dave Barach9b8ffd92016-07-08 08:13:45 -0400209 b->next_buffer, format_vlib_buffer_known_state, k);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700210
211 if (unique_hash)
212 {
213 if (hash_get (*unique_hash, b->next_buffer))
214 return format (0, "duplicate buffer 0x%x", b->next_buffer);
215
216 hash_set1 (*unique_hash, b->next_buffer);
217 }
218
219 msg = vlib_validate_buffer (vm, b->next_buffer, follow_buffer_next);
220 if (msg)
221 {
222 result = format (0, "next 0x%x: %v", b->next_buffer, msg);
223 vec_free (msg);
224 return result;
225 }
226 }
227
228 return 0;
229}
230
231u8 *
232vlib_validate_buffer (vlib_main_t * vm, u32 bi, uword follow_buffer_next)
Dave Barach9b8ffd92016-07-08 08:13:45 -0400233{
234 return vlib_validate_buffer_helper (vm, bi, follow_buffer_next,
235 /* unique_hash */ 0);
236}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700237
238u8 *
239vlib_validate_buffers (vlib_main_t * vm,
240 u32 * buffers,
241 uword next_buffer_stride,
242 uword n_buffers,
243 vlib_buffer_known_state_t known_state,
244 uword follow_buffer_next)
245{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400246 uword i, *hash;
247 u32 bi, *b = buffers;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700248 vlib_buffer_known_state_t k;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400249 u8 *msg = 0, *result = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700250
251 hash = hash_create (0, 0);
252 for (i = 0; i < n_buffers; i++)
253 {
254 bi = b[0];
255 b += next_buffer_stride;
256
257 /* Buffer is not unique. */
258 if (hash_get (hash, bi))
259 {
260 msg = format (0, "not unique");
261 goto done;
262 }
263
Steven899a84b2018-01-29 20:09:09 -0800264 k = vlib_buffer_is_known (bi);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700265 if (k != known_state)
266 {
267 msg = format (0, "is %U; expected %U",
268 format_vlib_buffer_known_state, k,
269 format_vlib_buffer_known_state, known_state);
270 goto done;
271 }
272
273 msg = vlib_validate_buffer_helper (vm, bi, follow_buffer_next, &hash);
274 if (msg)
275 goto done;
276
277 hash_set1 (hash, bi);
278 }
279
Dave Barach9b8ffd92016-07-08 08:13:45 -0400280done:
Ed Warnickecb9cada2015-12-08 15:45:58 -0700281 if (msg)
282 {
283 result = format (0, "0x%x: %v", bi, msg);
284 vec_free (msg);
285 }
286 hash_free (hash);
287 return result;
288}
289
Dave Barach80f54e22017-03-08 19:08:56 -0500290/*
291 * Hand-craft a static vector w/ length 1, so vec_len(vlib_mains) =1
292 * and vlib_mains[0] = &vlib_global_main from the beginning of time.
293 *
294 * The only place which should ever expand vlib_mains is start_workers()
295 * in threads.c. It knows about the bootstrap vector.
296 */
297/* *INDENT-OFF* */
298static struct
299{
300 vec_header_t h;
301 vlib_main_t *vm;
302} __attribute__ ((packed)) __bootstrap_vlib_main_vector
303 __attribute__ ((aligned (CLIB_CACHE_LINE_BYTES))) =
304{
305 .h.len = 1,
306 .vm = &vlib_global_main,
307};
308/* *INDENT-ON* */
309
310vlib_main_t **vlib_mains = &__bootstrap_vlib_main_vector.vm;
311
Ed Warnickecb9cada2015-12-08 15:45:58 -0700312
313/* When dubugging validate that given buffers are either known allocated
314 or known free. */
Damjan Marionc8a26c62017-11-24 20:15:23 +0100315void
Ed Warnickecb9cada2015-12-08 15:45:58 -0700316vlib_buffer_validate_alloc_free (vlib_main_t * vm,
317 u32 * buffers,
318 uword n_buffers,
319 vlib_buffer_known_state_t expected_state)
320{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400321 u32 *b;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700322 uword i, bi, is_free;
323
324 if (CLIB_DEBUG == 0)
325 return;
326
Damjan Marionc8a26c62017-11-24 20:15:23 +0100327 if (vlib_buffer_callbacks)
328 return;
329
Ed Warnickecb9cada2015-12-08 15:45:58 -0700330 is_free = expected_state == VLIB_BUFFER_KNOWN_ALLOCATED;
331 b = buffers;
332 for (i = 0; i < n_buffers; i++)
333 {
334 vlib_buffer_known_state_t known;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400335
Ed Warnickecb9cada2015-12-08 15:45:58 -0700336 bi = b[0];
337 b += 1;
Steven899a84b2018-01-29 20:09:09 -0800338 known = vlib_buffer_is_known (bi);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700339 if (known != expected_state)
340 {
341 ASSERT (0);
342 vlib_panic_with_msg
343 (vm, "%s %U buffer 0x%x",
344 is_free ? "freeing" : "allocating",
Dave Barach9b8ffd92016-07-08 08:13:45 -0400345 format_vlib_buffer_known_state, known, bi);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700346 }
347
348 vlib_buffer_set_known_state
Steven899a84b2018-01-29 20:09:09 -0800349 (bi, is_free ? VLIB_BUFFER_KNOWN_FREE : VLIB_BUFFER_KNOWN_ALLOCATED);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700350 }
351}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700352
Ed Warnickecb9cada2015-12-08 15:45:58 -0700353/* Add buffer free list. */
Damjan Mariondac03522018-02-01 15:30:13 +0100354static vlib_buffer_free_list_index_t
Ed Warnickecb9cada2015-12-08 15:45:58 -0700355vlib_buffer_create_free_list_helper (vlib_main_t * vm,
356 u32 n_data_bytes,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400357 u32 is_public, u32 is_default, u8 * name)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700358{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100359 vlib_buffer_main_t *bm = &buffer_main;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400360 vlib_buffer_free_list_t *f;
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100361 int i;
362
Damjan Marion586afd72017-04-05 19:18:20 +0200363 ASSERT (vlib_get_thread_index () == 0);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100364
Damjan Mariond1274cb2018-03-13 21:32:17 +0100365 if (!is_default && pool_elts (vm->buffer_free_list_pool) == 0)
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100366 {
Damjan Mariondac03522018-02-01 15:30:13 +0100367 vlib_buffer_free_list_index_t default_free_free_list_index;
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100368
369 /* *INDENT-OFF* */
370 default_free_free_list_index =
371 vlib_buffer_create_free_list_helper
372 (vm,
373 /* default buffer size */ VLIB_BUFFER_DEFAULT_FREE_LIST_BYTES,
374 /* is_public */ 1,
375 /* is_default */ 1,
376 (u8 *) "default");
377 /* *INDENT-ON* */
378 ASSERT (default_free_free_list_index ==
379 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
380
381 if (n_data_bytes == VLIB_BUFFER_DEFAULT_FREE_LIST_BYTES && is_public)
382 return default_free_free_list_index;
383 }
384
Damjan Mariond1274cb2018-03-13 21:32:17 +0100385 pool_get_aligned (vm->buffer_free_list_pool, f, CLIB_CACHE_LINE_BYTES);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100386
Dave Barachb7b92992018-10-17 10:38:51 -0400387 clib_memset (f, 0, sizeof (f[0]));
Damjan Mariond1274cb2018-03-13 21:32:17 +0100388 f->index = f - vm->buffer_free_list_pool;
Eyal Barice55bcd2018-11-25 15:42:47 +0200389 vec_validate (f->buffers, 0);
390 vec_reset_length (f->buffers);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100391 f->n_data_bytes = vlib_buffer_round_size (n_data_bytes);
Damjan Mariond1274cb2018-03-13 21:32:17 +0100392 f->min_n_buffers_each_alloc = VLIB_FRAME_SIZE;
393 f->buffer_pool_index = 0;
Chris Luke7447d072017-07-05 12:57:10 -0400394 f->name = clib_mem_is_vec (name) ? name : format (0, "%s", name);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100395
396 /* Setup free buffer template. */
Damjan Marion072401e2017-07-13 18:53:27 +0200397 vlib_buffer_set_free_list_index (&f->buffer_init_template, f->index);
Damjan Marionc47ed032017-01-25 14:18:03 +0100398 f->buffer_init_template.n_add_refs = 0;
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100399
400 if (is_public)
401 {
402 uword *p = hash_get (bm->free_list_by_size, f->n_data_bytes);
403 if (!p)
404 hash_set (bm->free_list_by_size, f->n_data_bytes, f->index);
405 }
406
407 for (i = 1; i < vec_len (vlib_mains); i++)
408 {
Damjan Mariond1274cb2018-03-13 21:32:17 +0100409 vlib_main_t *wvm = vlib_mains[i];
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100410 vlib_buffer_free_list_t *wf;
Damjan Mariond1274cb2018-03-13 21:32:17 +0100411 pool_get_aligned (wvm->buffer_free_list_pool,
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100412 wf, CLIB_CACHE_LINE_BYTES);
Damjan Mariond1274cb2018-03-13 21:32:17 +0100413 ASSERT (f - vm->buffer_free_list_pool ==
414 wf - wvm->buffer_free_list_pool);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100415 wf[0] = f[0];
Damjan Marionbd69a5f2017-02-05 23:44:42 +0100416 wf->buffers = 0;
Eyal Barice55bcd2018-11-25 15:42:47 +0200417 vec_validate (wf->buffers, 0);
418 vec_reset_length (wf->buffers);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100419 wf->n_alloc = 0;
420 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700421
422 return f->index;
423}
424
Damjan Mariondac03522018-02-01 15:30:13 +0100425vlib_buffer_free_list_index_t
Dave Barach9b8ffd92016-07-08 08:13:45 -0400426vlib_buffer_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
427 char *fmt, ...)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700428{
429 va_list va;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400430 u8 *name;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700431
432 va_start (va, fmt);
433 name = va_format (0, fmt, &va);
434 va_end (va);
435
436 return vlib_buffer_create_free_list_helper (vm, n_data_bytes,
437 /* is_public */ 0,
438 /* is_default */ 0,
439 name);
440}
441
Ed Warnickecb9cada2015-12-08 15:45:58 -0700442static void
443del_free_list (vlib_main_t * vm, vlib_buffer_free_list_t * f)
444{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100445 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (f->buffer_pool_index);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700446
Damjan Mariond1274cb2018-03-13 21:32:17 +0100447 vec_add_aligned (bp->buffers, f->buffers, vec_len (f->buffers),
448 CLIB_CACHE_LINE_BYTES);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700449 vec_free (f->name);
Damjan Marionbd69a5f2017-02-05 23:44:42 +0100450 vec_free (f->buffers);
Damjan Mariond1274cb2018-03-13 21:32:17 +0100451
452 /* Poison it. */
Dave Barachb7b92992018-10-17 10:38:51 -0400453 clib_memset (f, 0xab, sizeof (f[0]));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700454}
455
456/* Add buffer free list. */
Dave Barach9b8ffd92016-07-08 08:13:45 -0400457void
Damjan Mariondac03522018-02-01 15:30:13 +0100458vlib_buffer_delete_free_list_internal (vlib_main_t * vm,
459 vlib_buffer_free_list_index_t index)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700460{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400461 vlib_buffer_free_list_t *f;
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100462 int i;
463
Damjan Marion586afd72017-04-05 19:18:20 +0200464 ASSERT (vlib_get_thread_index () == 0);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100465
Damjan Mariondac03522018-02-01 15:30:13 +0100466 f = vlib_buffer_get_free_list (vm, index);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100467
Damjan Marionbd69a5f2017-02-05 23:44:42 +0100468 ASSERT (vec_len (f->buffers) == f->n_alloc);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100469
470 del_free_list (vm, f);
471
Damjan Mariond1274cb2018-03-13 21:32:17 +0100472 pool_put (vm->buffer_free_list_pool, f);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100473
474 for (i = 1; i < vec_len (vlib_mains); i++)
475 {
Damjan Mariond1274cb2018-03-13 21:32:17 +0100476 vlib_main_t *wvm = vlib_mains[i];
477 f = vlib_buffer_get_free_list (vlib_mains[i], index);
478 del_free_list (wvm, f);
479 pool_put (wvm->buffer_free_list_pool, f);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100480 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700481}
482
Damjan Mariond1274cb2018-03-13 21:32:17 +0100483static_always_inline void *
Damjan Marion68b4da62018-09-30 18:26:20 +0200484vlib_buffer_pool_get_buffer (vlib_main_t * vm, vlib_buffer_pool_t * bp)
Damjan Mariond1274cb2018-03-13 21:32:17 +0100485{
Damjan Marion68b4da62018-09-30 18:26:20 +0200486 return vlib_physmem_alloc_from_map (vm, bp->physmem_map_index,
487 bp->buffer_size, CLIB_CACHE_LINE_BYTES);
Damjan Mariond1274cb2018-03-13 21:32:17 +0100488}
489
Ed Warnickecb9cada2015-12-08 15:45:58 -0700490/* Make sure free list has at least given number of free buffers. */
491static uword
Damjan Marionc8a26c62017-11-24 20:15:23 +0100492vlib_buffer_fill_free_list_internal (vlib_main_t * vm,
493 vlib_buffer_free_list_t * fl,
494 uword min_free_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700495{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100496 vlib_buffer_t *b;
497 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (fl->buffer_pool_index);
498 int n;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400499 u32 *bi;
Damjan Mariond1274cb2018-03-13 21:32:17 +0100500 u32 n_alloc = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700501
Ed Warnickecb9cada2015-12-08 15:45:58 -0700502 /* Already have enough free buffers on free list? */
Damjan Marionbd69a5f2017-02-05 23:44:42 +0100503 n = min_free_buffers - vec_len (fl->buffers);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700504 if (n <= 0)
505 return min_free_buffers;
506
Damjan Mariond1274cb2018-03-13 21:32:17 +0100507 if (vec_len (bp->buffers) > 0)
Damjan Marionb6a8ed72017-08-29 00:15:35 +0200508 {
509 int n_copy, n_left;
Damjan Mariond1274cb2018-03-13 21:32:17 +0100510 clib_spinlock_lock (&bp->lock);
511 n_copy = clib_min (vec_len (bp->buffers), n);
512 n_left = vec_len (bp->buffers) - n_copy;
513 vec_add_aligned (fl->buffers, bp->buffers + n_left, n_copy,
Damjan Marionb6a8ed72017-08-29 00:15:35 +0200514 CLIB_CACHE_LINE_BYTES);
Damjan Mariond1274cb2018-03-13 21:32:17 +0100515 _vec_len (bp->buffers) = n_left;
516 clib_spinlock_unlock (&bp->lock);
Damjan Marionb6a8ed72017-08-29 00:15:35 +0200517 n = min_free_buffers - vec_len (fl->buffers);
518 if (n <= 0)
519 return min_free_buffers;
520 }
521
Ed Warnickecb9cada2015-12-08 15:45:58 -0700522 /* Always allocate round number of buffers. */
Damjan Marionbd69a5f2017-02-05 23:44:42 +0100523 n = round_pow2 (n, CLIB_CACHE_LINE_BYTES / sizeof (u32));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700524
525 /* Always allocate new buffers in reasonably large sized chunks. */
Damjan Mariond1274cb2018-03-13 21:32:17 +0100526 n = clib_max (n, fl->min_n_buffers_each_alloc);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700527
Damjan Mariond1274cb2018-03-13 21:32:17 +0100528 clib_spinlock_lock (&bp->lock);
529 while (n_alloc < n)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700530 {
Damjan Marion68b4da62018-09-30 18:26:20 +0200531 if ((b = vlib_buffer_pool_get_buffer (vm, bp)) == 0)
Damjan Mariond1274cb2018-03-13 21:32:17 +0100532 goto done;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400533
Damjan Mariond1274cb2018-03-13 21:32:17 +0100534 n_alloc += 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700535
Damjan Mariond1274cb2018-03-13 21:32:17 +0100536 vec_add2_aligned (fl->buffers, bi, 1, CLIB_CACHE_LINE_BYTES);
537 bi[0] = vlib_get_buffer_index (vm, b);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700538
Damjan Mariond1274cb2018-03-13 21:32:17 +0100539 if (CLIB_DEBUG > 0)
540 vlib_buffer_set_known_state (bi[0], VLIB_BUFFER_KNOWN_FREE);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700541
Dave Barachb7b92992018-10-17 10:38:51 -0400542 clib_memset (b, 0, sizeof (vlib_buffer_t));
Damjan Mariond1274cb2018-03-13 21:32:17 +0100543 vlib_buffer_init_for_free_list (b, fl);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400544
Ed Warnickecb9cada2015-12-08 15:45:58 -0700545 if (fl->buffer_init_function)
Damjan Mariond1274cb2018-03-13 21:32:17 +0100546 fl->buffer_init_function (vm, fl, bi, 1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700547 }
Damjan Mariond1274cb2018-03-13 21:32:17 +0100548
549done:
550 clib_spinlock_unlock (&bp->lock);
551 fl->n_alloc += n_alloc;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700552 return n_alloc;
553}
554
Dave Barach9b8ffd92016-07-08 08:13:45 -0400555void *
556vlib_set_buffer_free_callback (vlib_main_t * vm, void *fp)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700557{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100558 vlib_buffer_main_t *bm = &buffer_main;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400559 void *rv = bm->buffer_free_callback;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700560
561 bm->buffer_free_callback = fp;
562 return rv;
563}
564
Ed Warnickecb9cada2015-12-08 15:45:58 -0700565static_always_inline void
Damjan Marioneac3b112018-03-05 09:36:31 +0100566recycle_or_free (vlib_main_t * vm, vlib_buffer_main_t * bm, u32 bi,
567 vlib_buffer_t * b, u32 follow_buffer_next)
568{
569 vlib_buffer_free_list_t *fl;
570 vlib_buffer_free_list_index_t fi;
Damjan Marion8e715292018-09-03 15:41:45 +0200571 u32 flags, next;
572
Damjan Marioneac3b112018-03-05 09:36:31 +0100573 fl = vlib_buffer_get_buffer_free_list (vm, b, &fi);
574
Damjan Marion8e715292018-09-03 15:41:45 +0200575 do
Damjan Marioneac3b112018-03-05 09:36:31 +0100576 {
Damjan Marion8e715292018-09-03 15:41:45 +0200577 vlib_buffer_t *nb = vlib_get_buffer (vm, bi);
578 flags = nb->flags;
579 next = nb->next_buffer;
580 if (nb->n_add_refs)
581 nb->n_add_refs--;
582 else
Damjan Marioneac3b112018-03-05 09:36:31 +0100583 {
Damjan Marion8e715292018-09-03 15:41:45 +0200584 vlib_buffer_validate_alloc_free (vm, &bi, 1,
585 VLIB_BUFFER_KNOWN_ALLOCATED);
586 vlib_buffer_add_to_free_list (vm, fl, bi, 1);
Damjan Marioneac3b112018-03-05 09:36:31 +0100587 }
Damjan Marion8e715292018-09-03 15:41:45 +0200588 bi = next;
Damjan Marioneac3b112018-03-05 09:36:31 +0100589 }
Damjan Marion8e715292018-09-03 15:41:45 +0200590 while (follow_buffer_next && (flags & VLIB_BUFFER_NEXT_PRESENT));
Damjan Marioneac3b112018-03-05 09:36:31 +0100591}
592
593static_always_inline void
Ed Warnickecb9cada2015-12-08 15:45:58 -0700594vlib_buffer_free_inline (vlib_main_t * vm,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400595 u32 * buffers, u32 n_buffers, u32 follow_buffer_next)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700596{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100597 vlib_buffer_main_t *bm = &buffer_main;
Damjan Marioneac3b112018-03-05 09:36:31 +0100598 vlib_buffer_t *p, *b0, *b1, *b2, *b3;
599 int i = 0;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400600 u32 (*cb) (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
601 u32 follow_buffer_next);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700602
Ed Warnickecb9cada2015-12-08 15:45:58 -0700603 cb = bm->buffer_free_callback;
604
605 if (PREDICT_FALSE (cb != 0))
Dave Barach9b8ffd92016-07-08 08:13:45 -0400606 n_buffers = (*cb) (vm, buffers, n_buffers, follow_buffer_next);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700607
Dave Barach9b8ffd92016-07-08 08:13:45 -0400608 if (!n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700609 return;
610
Damjan Marioneac3b112018-03-05 09:36:31 +0100611 while (i + 11 < n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700612 {
Damjan Marioneac3b112018-03-05 09:36:31 +0100613 p = vlib_get_buffer (vm, buffers[i + 8]);
614 vlib_prefetch_buffer_header (p, LOAD);
615 p = vlib_get_buffer (vm, buffers[i + 9]);
616 vlib_prefetch_buffer_header (p, LOAD);
617 p = vlib_get_buffer (vm, buffers[i + 10]);
618 vlib_prefetch_buffer_header (p, LOAD);
619 p = vlib_get_buffer (vm, buffers[i + 11]);
620 vlib_prefetch_buffer_header (p, LOAD);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700621
Damjan Marioneac3b112018-03-05 09:36:31 +0100622 b0 = vlib_get_buffer (vm, buffers[i]);
623 b1 = vlib_get_buffer (vm, buffers[i + 1]);
624 b2 = vlib_get_buffer (vm, buffers[i + 2]);
625 b3 = vlib_get_buffer (vm, buffers[i + 3]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700626
Damjan Marioneac3b112018-03-05 09:36:31 +0100627 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b0);
628 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b1);
629 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b2);
630 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b3);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700631
Damjan Marioneac3b112018-03-05 09:36:31 +0100632 recycle_or_free (vm, bm, buffers[i], b0, follow_buffer_next);
633 recycle_or_free (vm, bm, buffers[i + 1], b1, follow_buffer_next);
634 recycle_or_free (vm, bm, buffers[i + 2], b2, follow_buffer_next);
635 recycle_or_free (vm, bm, buffers[i + 3], b3, follow_buffer_next);
Damjan Marionc47ed032017-01-25 14:18:03 +0100636
Damjan Marioneac3b112018-03-05 09:36:31 +0100637 i += 4;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700638 }
Damjan Marioneac3b112018-03-05 09:36:31 +0100639
640 while (i < n_buffers)
641 {
642 b0 = vlib_get_buffer (vm, buffers[i]);
643 VLIB_BUFFER_TRACE_TRAJECTORY_INIT (b0);
644 recycle_or_free (vm, bm, buffers[i], b0, follow_buffer_next);
645 i++;
646 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700647}
648
Damjan Marion878c6092017-01-04 13:19:27 +0100649static void
650vlib_buffer_free_internal (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700651{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400652 vlib_buffer_free_inline (vm, buffers, n_buffers, /* follow_buffer_next */
653 1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700654}
655
Damjan Marion878c6092017-01-04 13:19:27 +0100656static void
657vlib_buffer_free_no_next_internal (vlib_main_t * vm, u32 * buffers,
658 u32 n_buffers)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700659{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400660 vlib_buffer_free_inline (vm, buffers, n_buffers, /* follow_buffer_next */
661 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700662}
663
Dave Barach9b8ffd92016-07-08 08:13:45 -0400664void
665vlib_packet_template_init (vlib_main_t * vm,
666 vlib_packet_template_t * t,
667 void *packet_data,
668 uword n_packet_data_bytes,
Damjan Mariond1274cb2018-03-13 21:32:17 +0100669 uword min_n_buffers_each_alloc, char *fmt, ...)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700670{
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100671 va_list va;
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100672
673 va_start (va, fmt);
Damjan Marion671e60e2018-12-30 18:09:59 +0100674 t->name = va_format (0, fmt, &va);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100675 va_end (va);
676
677 vlib_worker_thread_barrier_sync (vm);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700678
Dave Barachb7b92992018-10-17 10:38:51 -0400679 clib_memset (t, 0, sizeof (t[0]));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700680
681 vec_add (t->packet_data, packet_data, n_packet_data_bytes);
Damjan Mariond1274cb2018-03-13 21:32:17 +0100682 t->min_n_buffers_each_alloc = min_n_buffers_each_alloc;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700683
Damjan Marion878c6092017-01-04 13:19:27 +0100684 vlib_worker_thread_barrier_release (vm);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700685}
686
687void *
Dave Barach9b8ffd92016-07-08 08:13:45 -0400688vlib_packet_template_get_packet (vlib_main_t * vm,
689 vlib_packet_template_t * t, u32 * bi_result)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700690{
691 u32 bi;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400692 vlib_buffer_t *b;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700693
694 if (vlib_buffer_alloc (vm, &bi, 1) != 1)
695 return 0;
696
697 *bi_result = bi;
698
699 b = vlib_get_buffer (vm, bi);
Dave Barach178cf492018-11-13 16:34:13 -0500700 clib_memcpy_fast (vlib_buffer_get_current (b),
701 t->packet_data, vec_len (t->packet_data));
Dave Barach9b8ffd92016-07-08 08:13:45 -0400702 b->current_length = vec_len (t->packet_data);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700703
704 return b->data;
705}
706
Ed Warnickecb9cada2015-12-08 15:45:58 -0700707/* Append given data to end of buffer, possibly allocating new buffers. */
Dave Barach9b8ffd92016-07-08 08:13:45 -0400708u32
709vlib_buffer_add_data (vlib_main_t * vm,
Damjan Mariondac03522018-02-01 15:30:13 +0100710 vlib_buffer_free_list_index_t free_list_index,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400711 u32 buffer_index, void *data, u32 n_data_bytes)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700712{
713 u32 n_buffer_bytes, n_left, n_left_this_buffer, bi;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400714 vlib_buffer_t *b;
715 void *d;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700716
717 bi = buffer_index;
Dave Barach0d65d112018-03-16 15:21:35 -0400718 if (bi == ~0
Ed Warnickecb9cada2015-12-08 15:45:58 -0700719 && 1 != vlib_buffer_alloc_from_free_list (vm, &bi, 1, free_list_index))
720 goto out_of_buffers;
721
722 d = data;
723 n_left = n_data_bytes;
724 n_buffer_bytes = vlib_buffer_free_list_buffer_size (vm, free_list_index);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400725
Ed Warnickecb9cada2015-12-08 15:45:58 -0700726 b = vlib_get_buffer (vm, bi);
727 b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
728
Dave Barach9b8ffd92016-07-08 08:13:45 -0400729 /* Get to the end of the chain before we try to append data... */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700730 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
731 b = vlib_get_buffer (vm, b->next_buffer);
732
733 while (1)
734 {
735 u32 n;
736
737 ASSERT (n_buffer_bytes >= b->current_length);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400738 n_left_this_buffer =
739 n_buffer_bytes - (b->current_data + b->current_length);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700740 n = clib_min (n_left_this_buffer, n_left);
Dave Barach178cf492018-11-13 16:34:13 -0500741 clib_memcpy_fast (vlib_buffer_get_current (b) + b->current_length, d,
742 n);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700743 b->current_length += n;
744 n_left -= n;
745 if (n_left == 0)
746 break;
747
748 d += n;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400749 if (1 !=
750 vlib_buffer_alloc_from_free_list (vm, &b->next_buffer, 1,
751 free_list_index))
Ed Warnickecb9cada2015-12-08 15:45:58 -0700752 goto out_of_buffers;
753
754 b->flags |= VLIB_BUFFER_NEXT_PRESENT;
755
756 b = vlib_get_buffer (vm, b->next_buffer);
757 }
758
759 return bi;
760
Dave Barach9b8ffd92016-07-08 08:13:45 -0400761out_of_buffers:
Ed Warnickecb9cada2015-12-08 15:45:58 -0700762 clib_error ("out of buffers");
763 return bi;
764}
765
Pierre Pfister328e99b2016-02-12 13:18:42 +0000766u16
Dave Barach9b8ffd92016-07-08 08:13:45 -0400767vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
Damjan Mariondac03522018-02-01 15:30:13 +0100768 vlib_buffer_free_list_index_t
769 free_list_index,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400770 vlib_buffer_t * first,
Damjan Mariondac03522018-02-01 15:30:13 +0100771 vlib_buffer_t ** last, void *data,
772 u16 data_len)
Dave Barach9b8ffd92016-07-08 08:13:45 -0400773{
Pierre Pfister328e99b2016-02-12 13:18:42 +0000774 vlib_buffer_t *l = *last;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400775 u32 n_buffer_bytes =
776 vlib_buffer_free_list_buffer_size (vm, free_list_index);
Pierre Pfister328e99b2016-02-12 13:18:42 +0000777 u16 copied = 0;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400778 ASSERT (n_buffer_bytes >= l->current_length + l->current_data);
779 while (data_len)
780 {
781 u16 max = n_buffer_bytes - l->current_length - l->current_data;
782 if (max == 0)
783 {
784 if (1 !=
785 vlib_buffer_alloc_from_free_list (vm, &l->next_buffer, 1,
786 free_list_index))
787 return copied;
Eyal Barib688fb12018-11-12 16:13:49 +0200788 *last = l = vlib_buffer_chain_buffer (vm, l, l->next_buffer);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400789 max = n_buffer_bytes - l->current_length - l->current_data;
790 }
Pierre Pfister328e99b2016-02-12 13:18:42 +0000791
Dave Barach9b8ffd92016-07-08 08:13:45 -0400792 u16 len = (data_len > max) ? max : data_len;
Dave Barach178cf492018-11-13 16:34:13 -0500793 clib_memcpy_fast (vlib_buffer_get_current (l) + l->current_length,
794 data + copied, len);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400795 vlib_buffer_chain_increase_length (first, l, len);
796 data_len -= len;
797 copied += len;
798 }
Pierre Pfister328e99b2016-02-12 13:18:42 +0000799 return copied;
800}
801
Damjan Marion149ba772017-10-12 13:09:26 +0200802u8
Damjan Marion68b4da62018-09-30 18:26:20 +0200803vlib_buffer_register_physmem_map (vlib_main_t * vm, u32 physmem_map_index)
Damjan Marion04a7f052017-07-10 15:06:17 +0200804{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100805 vlib_buffer_main_t *bm = &buffer_main;
Damjan Marion149ba772017-10-12 13:09:26 +0200806 vlib_buffer_pool_t *p;
Damjan Marion68b4da62018-09-30 18:26:20 +0200807 vlib_physmem_map_t *m = vlib_physmem_get_map (vm, physmem_map_index);
808 uword start = pointer_to_uword (m->base);
Damjan Marion8e8d3c82018-10-23 22:54:40 +0200809 uword size = (uword) m->n_pages << m->log2_page_size;
Damjan Marion04a7f052017-07-10 15:06:17 +0200810
811 if (bm->buffer_mem_size == 0)
812 {
813 bm->buffer_mem_start = start;
814 bm->buffer_mem_size = size;
815 }
816 else if (start < bm->buffer_mem_start)
817 {
818 bm->buffer_mem_size += bm->buffer_mem_start - start;
819 bm->buffer_mem_start = start;
820 if (size > bm->buffer_mem_size)
821 bm->buffer_mem_size = size;
822 }
823 else if (start > bm->buffer_mem_start)
824 {
825 uword new_size = start - bm->buffer_mem_start + size;
826 if (new_size > bm->buffer_mem_size)
827 bm->buffer_mem_size = new_size;
828 }
829
830 if ((u64) bm->buffer_mem_size >
831 ((u64) 1 << (32 + CLIB_LOG2_CACHE_LINE_BYTES)))
832 {
833 clib_panic ("buffer memory size out of range!");
834 }
Damjan Marion149ba772017-10-12 13:09:26 +0200835
836 vec_add2 (bm->buffer_pools, p, 1);
837 p->start = start;
838 p->size = size;
Damjan Marion68b4da62018-09-30 18:26:20 +0200839 p->physmem_map_index = physmem_map_index;
Damjan Mariond1274cb2018-03-13 21:32:17 +0100840
Damjan Mariond1274cb2018-03-13 21:32:17 +0100841 ASSERT (p - bm->buffer_pools < 256);
Damjan Marion149ba772017-10-12 13:09:26 +0200842 return p - bm->buffer_pools;
Damjan Marion04a7f052017-07-10 15:06:17 +0200843}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700844
Dave Barach9b8ffd92016-07-08 08:13:45 -0400845static u8 *
846format_vlib_buffer_free_list (u8 * s, va_list * va)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700847{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400848 vlib_buffer_free_list_t *f = va_arg (*va, vlib_buffer_free_list_t *);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100849 u32 threadnum = va_arg (*va, u32);
850 uword bytes_alloc, bytes_free, n_free, size;
851
852 if (!f)
853 return format (s, "%=7s%=30s%=12s%=12s%=12s%=12s%=12s%=12s",
854 "Thread", "Name", "Index", "Size", "Alloc", "Free",
855 "#Alloc", "#Free");
856
857 size = sizeof (vlib_buffer_t) + f->n_data_bytes;
Damjan Marionbd69a5f2017-02-05 23:44:42 +0100858 n_free = vec_len (f->buffers);
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100859 bytes_alloc = size * f->n_alloc;
860 bytes_free = size * n_free;
861
Chris Luke7447d072017-07-05 12:57:10 -0400862 s = format (s, "%7d%30v%12d%12d%=12U%=12U%=12d%=12d", threadnum,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700863 f->name, f->index, f->n_data_bytes,
864 format_memory_size, bytes_alloc,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400865 format_memory_size, bytes_free, f->n_alloc, n_free);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700866
867 return s;
868}
869
870static clib_error_t *
871show_buffers (vlib_main_t * vm,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400872 unformat_input_t * input, vlib_cli_command_t * cmd)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700873{
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100874 vlib_buffer_free_list_t *f;
875 vlib_main_t *curr_vm;
876 u32 vm_index = 0;
877
878 vlib_cli_output (vm, "%U", format_vlib_buffer_free_list, 0, 0);
879
880 do
881 {
Dave Barach80f54e22017-03-08 19:08:56 -0500882 curr_vm = vlib_mains[vm_index];
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100883
884 /* *INDENT-OFF* */
Damjan Mariond1274cb2018-03-13 21:32:17 +0100885 pool_foreach (f, curr_vm->buffer_free_list_pool, ({
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100886 vlib_cli_output (vm, "%U", format_vlib_buffer_free_list, f, vm_index);
887 }));
888 /* *INDENT-ON* */
889
890 vm_index++;
891 }
892 while (vm_index < vec_len (vlib_mains));
893
Ed Warnickecb9cada2015-12-08 15:45:58 -0700894 return 0;
895}
896
Dave Barach9b8ffd92016-07-08 08:13:45 -0400897/* *INDENT-OFF* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700898VLIB_CLI_COMMAND (show_buffers_command, static) = {
899 .path = "show buffers",
900 .short_help = "Show packet buffer allocation",
901 .function = show_buffers,
902};
Dave Barach9b8ffd92016-07-08 08:13:45 -0400903/* *INDENT-ON* */
904
Damjan Marion49d66f12017-07-20 18:10:35 +0200905clib_error_t *
906vlib_buffer_main_init (struct vlib_main_t * vm)
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100907{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100908 vlib_buffer_main_t *bm = &buffer_main;
Damjan Marion49d66f12017-07-20 18:10:35 +0200909 clib_error_t *error;
Damjan Marion68b4da62018-09-30 18:26:20 +0200910 u32 physmem_map_index;
911 u8 pool_index;
Damjan Marion567e61d2018-10-24 17:08:26 +0200912 int log2_page_size = 0;
913
914 buffer_log_default = vlib_log_register_class ("buffer", 0);
Damjan Marion49d66f12017-07-20 18:10:35 +0200915
Damjan Marion49d66f12017-07-20 18:10:35 +0200916 if (vlib_buffer_callbacks)
917 {
918 /* external plugin has registered own buffer callbacks
919 so we just copy them and quit */
Dave Barach178cf492018-11-13 16:34:13 -0500920 clib_memcpy_fast (&bm->cb, vlib_buffer_callbacks,
921 sizeof (vlib_buffer_callbacks_t));
Damjan Marion49d66f12017-07-20 18:10:35 +0200922 bm->callbacks_registered = 1;
923 return 0;
924 }
Damjan Marion04a7f052017-07-10 15:06:17 +0200925
Damjan Marionc8a26c62017-11-24 20:15:23 +0100926 bm->cb.vlib_buffer_fill_free_list_cb = &vlib_buffer_fill_free_list_internal;
Damjan Marion878c6092017-01-04 13:19:27 +0100927 bm->cb.vlib_buffer_free_cb = &vlib_buffer_free_internal;
928 bm->cb.vlib_buffer_free_no_next_cb = &vlib_buffer_free_no_next_internal;
929 bm->cb.vlib_buffer_delete_free_list_cb =
930 &vlib_buffer_delete_free_list_internal;
Damjan Marion6b0f5892017-07-27 04:01:24 -0400931 clib_spinlock_init (&bm->buffer_known_hash_lockp);
Damjan Marion49d66f12017-07-20 18:10:35 +0200932
Damjan Marion567e61d2018-10-24 17:08:26 +0200933retry:
934 error = vlib_physmem_shared_map_create (vm, "buffers",
935 vlib_buffer_physmem_sz,
936 log2_page_size,
937 CLIB_PMALLOC_NUMA_LOCAL,
938 &physmem_map_index);
939
940 if (error && log2_page_size == 0)
941 {
942 vlib_log_warn (buffer_log_default, "%U", format_clib_error, error);
943 clib_error_free (error);
944 vlib_log_warn (buffer_log_default, "falling back to non-hugepage "
945 "backed buffer pool");
946 log2_page_size = min_log2 (clib_mem_get_page_size ());
947 goto retry;
948 }
949
950 if (error)
Damjan Marion68b4da62018-09-30 18:26:20 +0200951 return error;
Damjan Marion49d66f12017-07-20 18:10:35 +0200952
Damjan Marion68b4da62018-09-30 18:26:20 +0200953 pool_index = vlib_buffer_register_physmem_map (vm, physmem_map_index);
954 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (pool_index);
955 clib_spinlock_init (&bp->lock);
956 bp->buffer_size = VLIB_BUFFER_DEFAULT_FREE_LIST_BYTES +
957 sizeof (vlib_buffer_t);
Damjan Marion49d66f12017-07-20 18:10:35 +0200958
Damjan Marion68b4da62018-09-30 18:26:20 +0200959 return 0;
Damjan Marion878c6092017-01-04 13:19:27 +0100960}
Damjan Marion1cd8f3c2016-11-24 02:07:32 +0100961
Damjan Marion49d66f12017-07-20 18:10:35 +0200962static clib_error_t *
963vlib_buffers_configure (vlib_main_t * vm, unformat_input_t * input)
964{
965 u32 size_in_mb;
966
967 while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT)
968 {
969 if (unformat (input, "memory-size-in-mb %d", &size_in_mb))
970 vlib_buffer_physmem_sz = size_in_mb << 20;
971 else
972 return unformat_parse_error (input);
973 }
974
975 unformat_free (input);
976 return 0;
977}
978
979VLIB_EARLY_CONFIG_FUNCTION (vlib_buffers_configure, "buffers");
980
981
Chris Luked4024f52016-09-06 09:32:36 -0400982/** @endcond */
Dave Barach9b8ffd92016-07-08 08:13:45 -0400983/*
984 * fd.io coding-style-patch-verification: ON
985 *
986 * Local Variables:
987 * eval: (c-set-style "gnu")
988 * End:
989 */