blob: 0ac05f3f703c32f47e60a2e9ce1c33bcf51b15af [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15/*
16 * buffer_funcs.h: VLIB buffer related functions/inlines
17 *
18 * Copyright (c) 2008 Eliot Dresselhaus
19 *
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
27 *
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
30 *
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 */
39
40#ifndef included_vlib_buffer_funcs_h
41#define included_vlib_buffer_funcs_h
42
43#include <vppinfra/hash.h>
44
45/** \file
46 vlib buffer access methods.
47*/
48
49
50/** \brief Translate buffer index into buffer pointer
51
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
Dave Barach9b8ffd92016-07-08 08:13:45 -040055*/
Ed Warnickecb9cada2015-12-08 15:45:58 -070056always_inline vlib_buffer_t *
57vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
58{
Damjan Mariond1274cb2018-03-13 21:32:17 +010059 vlib_buffer_main_t *bm = &buffer_main;
Damjan Marion04a7f052017-07-10 15:06:17 +020060 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
61 ASSERT (offset < bm->buffer_mem_size);
62
63 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
Ed Warnickecb9cada2015-12-08 15:45:58 -070064}
65
66/** \brief Translate buffer pointer into buffer index
67
68 @param vm - (vlib_main_t *) vlib main data structure pointer
Chris Luked4024f52016-09-06 09:32:36 -040069 @param p - (void *) buffer pointer
Ed Warnickecb9cada2015-12-08 15:45:58 -070070 @return - (u32) buffer index
Dave Barach9b8ffd92016-07-08 08:13:45 -040071*/
Damjan Marion04a7f052017-07-10 15:06:17 +020072
Ed Warnickecb9cada2015-12-08 15:45:58 -070073always_inline u32
Dave Barach9b8ffd92016-07-08 08:13:45 -040074vlib_get_buffer_index (vlib_main_t * vm, void *p)
Ed Warnickecb9cada2015-12-08 15:45:58 -070075{
Damjan Mariond1274cb2018-03-13 21:32:17 +010076 vlib_buffer_main_t *bm = &buffer_main;
Damjan Marion04a7f052017-07-10 15:06:17 +020077 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
78 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
79 ASSERT (offset < bm->buffer_mem_size);
Dave Barach9b8ffd92016-07-08 08:13:45 -040080 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -070081 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
82}
83
84/** \brief Get next buffer in buffer linklist, or zero for end of list.
85
86 @param vm - (vlib_main_t *) vlib main data structure pointer
87 @param b - (void *) buffer pointer
88 @return - (vlib_buffer_t *) next buffer, or NULL
Dave Barach9b8ffd92016-07-08 08:13:45 -040089*/
Ed Warnickecb9cada2015-12-08 15:45:58 -070090always_inline vlib_buffer_t *
91vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
92{
93 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
Dave Barach9b8ffd92016-07-08 08:13:45 -040094 ? vlib_get_buffer (vm, b->next_buffer) : 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -070095}
96
Dave Barach9b8ffd92016-07-08 08:13:45 -040097uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
98 vlib_buffer_t * b_first);
Ed Warnickecb9cada2015-12-08 15:45:58 -070099
100/** \brief Get length in bytes of the buffer chain
101
102 @param vm - (vlib_main_t *) vlib main data structure pointer
103 @param b - (void *) buffer pointer
104 @return - (uword) length of buffer chain
Dave Barach9b8ffd92016-07-08 08:13:45 -0400105*/
Ed Warnickecb9cada2015-12-08 15:45:58 -0700106always_inline uword
107vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
108{
Damjan Marion072401e2017-07-13 18:53:27 +0200109 uword len = b->current_length;
110
111 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
112 return len;
113
114 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
115 return len + b->total_length_not_including_first_buffer;
116
117 return vlib_buffer_length_in_chain_slow_path (vm, b);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700118}
119
120/** \brief Get length in bytes of the buffer index buffer chain
121
122 @param vm - (vlib_main_t *) vlib main data structure pointer
123 @param bi - (u32) buffer index
124 @return - (uword) length of buffer chain
Dave Barach9b8ffd92016-07-08 08:13:45 -0400125*/
Ed Warnickecb9cada2015-12-08 15:45:58 -0700126always_inline uword
127vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
128{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400129 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700130 return vlib_buffer_length_in_chain (vm, b);
131}
132
133/** \brief Copy buffer contents to memory
134
135 @param vm - (vlib_main_t *) vlib main data structure pointer
Chris Luked4024f52016-09-06 09:32:36 -0400136 @param buffer_index - (u32) buffer index
Ed Warnickecb9cada2015-12-08 15:45:58 -0700137 @param contents - (u8 *) memory, <strong>must be large enough</strong>
138 @return - (uword) length of buffer chain
Dave Barach9b8ffd92016-07-08 08:13:45 -0400139*/
Ed Warnickecb9cada2015-12-08 15:45:58 -0700140always_inline uword
141vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
142{
143 uword content_len = 0;
144 uword l;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400145 vlib_buffer_t *b;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700146
147 while (1)
148 {
149 b = vlib_get_buffer (vm, buffer_index);
150 l = b->current_length;
Damjan Marionf1213b82016-03-13 02:22:06 +0100151 clib_memcpy (contents + content_len, b->data + b->current_data, l);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700152 content_len += l;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400153 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
Ed Warnickecb9cada2015-12-08 15:45:58 -0700154 break;
155 buffer_index = b->next_buffer;
156 }
157
158 return content_len;
159}
160
161/* Return physical address of buffer->data start. */
162always_inline u64
163vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
164{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100165 vlib_buffer_main_t *bm = &buffer_main;
Damjan Marion149ba772017-10-12 13:09:26 +0200166 vlib_buffer_t *b = vlib_get_buffer (vm, buffer_index);
Damjan Marioncef87f12017-10-05 15:32:41 +0200167 vlib_buffer_pool_t *pool = vec_elt_at_index (bm->buffer_pools,
168 b->buffer_pool_index);
169
170 return vlib_physmem_virtual_to_physical (vm, pool->physmem_region, b->data);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700171}
172
173/** \brief Prefetch buffer metadata by buffer index
174 The first 64 bytes of buffer contains most header information
175
176 @param vm - (vlib_main_t *) vlib main data structure pointer
177 @param bi - (u32) buffer index
178 @param type - LOAD, STORE. In most cases, STORE is the right answer
179*/
180/* Prefetch buffer header given index. */
181#define vlib_prefetch_buffer_with_index(vm,bi,type) \
182 do { \
183 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
184 vlib_prefetch_buffer_header (_b, type); \
185 } while (0)
186
187#if 0
188/* Iterate over known allocated vlib bufs. You probably do not want
189 * to do this!
190 @param vm the vlib_main_t
191 @param bi found allocated buffer index
192 @param body operation to perform on buffer index
193 function executes body for each allocated buffer index
194 */
195#define vlib_buffer_foreach_allocated(vm,bi,body) \
196do { \
197 vlib_main_t * _vmain = (vm); \
198 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
199 hash_pair_t * _vbpair; \
200 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
201 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
202 (bi) = _vbpair->key; \
203 body; \
204 } \
205 })); \
206} while (0)
207#endif
208
Dave Barach9b8ffd92016-07-08 08:13:45 -0400209typedef enum
210{
Ed Warnickecb9cada2015-12-08 15:45:58 -0700211 /* Index is unknown. */
212 VLIB_BUFFER_UNKNOWN,
213
214 /* Index is known and free/allocated. */
215 VLIB_BUFFER_KNOWN_FREE,
216 VLIB_BUFFER_KNOWN_ALLOCATED,
217} vlib_buffer_known_state_t;
218
Damjan Marionc8a26c62017-11-24 20:15:23 +0100219void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
220 uword n_buffers,
221 vlib_buffer_known_state_t
222 expected_state);
223
Ed Warnickecb9cada2015-12-08 15:45:58 -0700224always_inline vlib_buffer_known_state_t
Steven899a84b2018-01-29 20:09:09 -0800225vlib_buffer_is_known (u32 buffer_index)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700226{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100227 vlib_buffer_main_t *bm = &buffer_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700228
Damjan Marion6b0f5892017-07-27 04:01:24 -0400229 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400230 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
Damjan Marion6b0f5892017-07-27 04:01:24 -0400231 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700232 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
233}
234
235always_inline void
Steven899a84b2018-01-29 20:09:09 -0800236vlib_buffer_set_known_state (u32 buffer_index,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700237 vlib_buffer_known_state_t state)
238{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100239 vlib_buffer_main_t *bm = &buffer_main;
Steven899a84b2018-01-29 20:09:09 -0800240
Damjan Marion6b0f5892017-07-27 04:01:24 -0400241 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700242 hash_set (bm->buffer_known_hash, buffer_index, state);
Damjan Marion6b0f5892017-07-27 04:01:24 -0400243 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700244}
245
246/* Validates sanity of a single buffer.
247 Returns format'ed vector with error message if any. */
Dave Barach9b8ffd92016-07-08 08:13:45 -0400248u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
249 uword follow_chain);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700250
Ed Warnickecb9cada2015-12-08 15:45:58 -0700251always_inline u32
252vlib_buffer_round_size (u32 size)
Dave Barach9b8ffd92016-07-08 08:13:45 -0400253{
254 return round_pow2 (size, sizeof (vlib_buffer_t));
255}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700256
Damjan Mariondac03522018-02-01 15:30:13 +0100257always_inline vlib_buffer_free_list_index_t
Damjan Marion072401e2017-07-13 18:53:27 +0200258vlib_buffer_get_free_list_index (vlib_buffer_t * b)
259{
Damjan Mariondac03522018-02-01 15:30:13 +0100260 if (PREDICT_FALSE (b->flags & VLIB_BUFFER_NON_DEFAULT_FREELIST))
261 return b->free_list_index;
262
263 return 0;
Damjan Marion072401e2017-07-13 18:53:27 +0200264}
265
266always_inline void
Damjan Mariondac03522018-02-01 15:30:13 +0100267vlib_buffer_set_free_list_index (vlib_buffer_t * b,
268 vlib_buffer_free_list_index_t index)
Damjan Marion072401e2017-07-13 18:53:27 +0200269{
Damjan Mariondac03522018-02-01 15:30:13 +0100270 if (PREDICT_FALSE (index))
271 {
272 b->flags |= VLIB_BUFFER_NON_DEFAULT_FREELIST;
273 b->free_list_index = index;
274 }
275 else
276 b->flags &= ~VLIB_BUFFER_NON_DEFAULT_FREELIST;
Damjan Marion072401e2017-07-13 18:53:27 +0200277}
278
Ed Warnickecb9cada2015-12-08 15:45:58 -0700279/** \brief Allocate buffers from specific freelist into supplied array
280
281 @param vm - (vlib_main_t *) vlib main data structure pointer
282 @param buffers - (u32 * ) buffer index array
283 @param n_buffers - (u32) number of buffers requested
Dave Barach9b8ffd92016-07-08 08:13:45 -0400284 @return - (u32) number of buffers actually allocated, may be
Ed Warnickecb9cada2015-12-08 15:45:58 -0700285 less than the number requested or zero
286*/
Damjan Marion878c6092017-01-04 13:19:27 +0100287always_inline u32
288vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
289 u32 * buffers,
Damjan Mariondac03522018-02-01 15:30:13 +0100290 u32 n_buffers,
291 vlib_buffer_free_list_index_t index)
Damjan Marion878c6092017-01-04 13:19:27 +0100292{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100293 vlib_buffer_main_t *bm = &buffer_main;
Damjan Marionc8a26c62017-11-24 20:15:23 +0100294 vlib_buffer_free_list_t *fl;
295 u32 *src;
296 uword len;
Damjan Marion878c6092017-01-04 13:19:27 +0100297
Damjan Marionc8a26c62017-11-24 20:15:23 +0100298 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
Damjan Marion878c6092017-01-04 13:19:27 +0100299
Damjan Mariond1274cb2018-03-13 21:32:17 +0100300 fl = pool_elt_at_index (vm->buffer_free_list_pool, index);
Damjan Marionc8a26c62017-11-24 20:15:23 +0100301
302 len = vec_len (fl->buffers);
303
304 if (PREDICT_FALSE (len < n_buffers))
305 {
306 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
Damjan Marionc22fcba2018-03-06 18:46:54 +0100307 if (PREDICT_FALSE ((len = vec_len (fl->buffers)) == 0))
308 return 0;
Damjan Marionc8a26c62017-11-24 20:15:23 +0100309
310 /* even if fill free list didn't manage to refill free list
311 we should give what we have */
312 n_buffers = clib_min (len, n_buffers);
313
314 /* following code is intentionaly duplicated to allow compiler
315 to optimize fast path when n_buffers is constant value */
316 src = fl->buffers + len - n_buffers;
317 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
318 _vec_len (fl->buffers) -= n_buffers;
319
320 /* Verify that buffers are known free. */
321 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
322 VLIB_BUFFER_KNOWN_FREE);
323
324 return n_buffers;
325 }
326
327 src = fl->buffers + len - n_buffers;
328 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
329 _vec_len (fl->buffers) -= n_buffers;
330
331 /* Verify that buffers are known free. */
332 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
333 VLIB_BUFFER_KNOWN_FREE);
334
335 return n_buffers;
336}
337
338/** \brief Allocate buffers into supplied array
339
340 @param vm - (vlib_main_t *) vlib main data structure pointer
341 @param buffers - (u32 * ) buffer index array
342 @param n_buffers - (u32) number of buffers requested
343 @return - (u32) number of buffers actually allocated, may be
344 less than the number requested or zero
345*/
346always_inline u32
347vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
348{
349 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
350 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
Damjan Marion878c6092017-01-04 13:19:27 +0100351}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700352
Damjan Marionc58408c2018-01-18 14:54:04 +0100353/** \brief Allocate buffers into ring
354
355 @param vm - (vlib_main_t *) vlib main data structure pointer
356 @param buffers - (u32 * ) buffer index ring
357 @param start - (u32) first slot in the ring
358 @param ring_size - (u32) ring size
359 @param n_buffers - (u32) number of buffers requested
360 @return - (u32) number of buffers actually allocated, may be
361 less than the number requested or zero
362*/
363always_inline u32
364vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
365 u32 ring_size, u32 n_buffers)
366{
367 u32 n_alloc;
368
369 ASSERT (n_buffers <= ring_size);
370
371 if (PREDICT_TRUE (start + n_buffers <= ring_size))
372 return vlib_buffer_alloc (vm, ring + start, n_buffers);
373
374 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
375
376 if (PREDICT_TRUE (n_alloc == ring_size - start))
377 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
378
379 return n_alloc;
380}
381
Ed Warnickecb9cada2015-12-08 15:45:58 -0700382/** \brief Free buffers
383 Frees the entire buffer chain for each buffer
384
385 @param vm - (vlib_main_t *) vlib main data structure pointer
386 @param buffers - (u32 * ) buffer index array
387 @param n_buffers - (u32) number of buffers to free
388
389*/
Damjan Marion878c6092017-01-04 13:19:27 +0100390always_inline void
391vlib_buffer_free (vlib_main_t * vm,
392 /* pointer to first buffer */
393 u32 * buffers,
394 /* number of buffers to free */
395 u32 n_buffers)
396{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100397 vlib_buffer_main_t *bm = &buffer_main;
Damjan Marion878c6092017-01-04 13:19:27 +0100398
399 ASSERT (bm->cb.vlib_buffer_free_cb);
400
401 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
402}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700403
404/** \brief Free buffers, does not free the buffer chain for each buffer
405
406 @param vm - (vlib_main_t *) vlib main data structure pointer
407 @param buffers - (u32 * ) buffer index array
408 @param n_buffers - (u32) number of buffers to free
409
410*/
Damjan Marion878c6092017-01-04 13:19:27 +0100411always_inline void
412vlib_buffer_free_no_next (vlib_main_t * vm,
413 /* pointer to first buffer */
414 u32 * buffers,
415 /* number of buffers to free */
416 u32 n_buffers)
417{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100418 vlib_buffer_main_t *bm = &buffer_main;
Damjan Marion878c6092017-01-04 13:19:27 +0100419
420 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
421
422 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
423}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700424
425/** \brief Free one buffer
Dave Barach9b8ffd92016-07-08 08:13:45 -0400426 Shorthand to free a single buffer chain.
Ed Warnickecb9cada2015-12-08 15:45:58 -0700427
428 @param vm - (vlib_main_t *) vlib main data structure pointer
429 @param buffer_index - (u32) buffer index to free
430*/
431always_inline void
432vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
433{
434 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
435}
436
Damjan Mariona3731492018-02-25 22:50:39 +0100437/** \brief Free buffers from ring
438
439 @param vm - (vlib_main_t *) vlib main data structure pointer
440 @param buffers - (u32 * ) buffer index ring
441 @param start - (u32) first slot in the ring
442 @param ring_size - (u32) ring size
443 @param n_buffers - (u32) number of buffers
444*/
445always_inline void
446vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
447 u32 ring_size, u32 n_buffers)
448{
449 ASSERT (n_buffers <= ring_size);
450
451 if (PREDICT_TRUE (start + n_buffers <= ring_size))
452 {
453 vlib_buffer_free (vm, ring + start, n_buffers);
454 }
455 else
456 {
457 vlib_buffer_free (vm, ring + start, ring_size - start);
458 vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
459 }
460}
461
462
Ed Warnickecb9cada2015-12-08 15:45:58 -0700463/* Add/delete buffer free lists. */
Damjan Mariondac03522018-02-01 15:30:13 +0100464vlib_buffer_free_list_index_t vlib_buffer_create_free_list (vlib_main_t * vm,
465 u32 n_data_bytes,
466 char *fmt, ...);
Damjan Marion878c6092017-01-04 13:19:27 +0100467always_inline void
Damjan Mariondac03522018-02-01 15:30:13 +0100468vlib_buffer_delete_free_list (vlib_main_t * vm,
469 vlib_buffer_free_list_index_t free_list_index)
Damjan Marion878c6092017-01-04 13:19:27 +0100470{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100471 vlib_buffer_main_t *bm = &buffer_main;
Damjan Marion878c6092017-01-04 13:19:27 +0100472
473 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
474
475 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
476}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700477
Damjan Marion8a6a3b22017-01-17 14:12:42 +0100478/* Make sure we have at least given number of unaligned buffers. */
479void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
480 vlib_buffer_free_list_t *
481 free_list,
482 uword n_unaligned_buffers);
483
Damjan Marion8a6a3b22017-01-17 14:12:42 +0100484always_inline vlib_buffer_free_list_t *
485vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
Damjan Mariondac03522018-02-01 15:30:13 +0100486 vlib_buffer_free_list_index_t * index)
Damjan Marion8a6a3b22017-01-17 14:12:42 +0100487{
Damjan Mariondac03522018-02-01 15:30:13 +0100488 vlib_buffer_free_list_index_t i;
Damjan Marion8a6a3b22017-01-17 14:12:42 +0100489
Damjan Marion072401e2017-07-13 18:53:27 +0200490 *index = i = vlib_buffer_get_free_list_index (b);
Damjan Mariond1274cb2018-03-13 21:32:17 +0100491 return pool_elt_at_index (vm->buffer_free_list_pool, i);
Damjan Marion8a6a3b22017-01-17 14:12:42 +0100492}
493
Ed Warnickecb9cada2015-12-08 15:45:58 -0700494always_inline vlib_buffer_free_list_t *
Damjan Mariondac03522018-02-01 15:30:13 +0100495vlib_buffer_get_free_list (vlib_main_t * vm,
496 vlib_buffer_free_list_index_t free_list_index)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700497{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400498 vlib_buffer_free_list_t *f;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700499
Damjan Mariond1274cb2018-03-13 21:32:17 +0100500 f = pool_elt_at_index (vm->buffer_free_list_pool, free_list_index);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700501
502 /* Sanity: indices must match. */
503 ASSERT (f->index == free_list_index);
504
505 return f;
506}
507
508always_inline u32
Damjan Mariondac03522018-02-01 15:30:13 +0100509vlib_buffer_free_list_buffer_size (vlib_main_t * vm,
510 vlib_buffer_free_list_index_t index)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700511{
Damjan Mariondac03522018-02-01 15:30:13 +0100512 vlib_buffer_free_list_t *f = vlib_buffer_get_free_list (vm, index);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700513 return f->n_data_bytes;
514}
515
Dave Barach9b8ffd92016-07-08 08:13:45 -0400516void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700517
518/* Reasonably fast buffer copy routine. */
519always_inline void
520vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
521{
522 while (n >= 4)
523 {
524 dst[0] = src[0];
525 dst[1] = src[1];
526 dst[2] = src[2];
527 dst[3] = src[3];
528 dst += 4;
529 src += 4;
530 n -= 4;
531 }
532 while (n > 0)
533 {
534 dst[0] = src[0];
535 dst += 1;
536 src += 1;
537 n -= 1;
538 }
539}
540
Ed Warnickecb9cada2015-12-08 15:45:58 -0700541/* Append given data to end of buffer, possibly allocating new buffers. */
542u32 vlib_buffer_add_data (vlib_main_t * vm,
Damjan Mariondac03522018-02-01 15:30:13 +0100543 vlib_buffer_free_list_index_t free_list_index,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400544 u32 buffer_index, void *data, u32 n_data_bytes);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700545
Damjan Marion05ab8cb2016-11-03 20:16:04 +0100546/* duplicate all buffers in chain */
547always_inline vlib_buffer_t *
548vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
549{
550 vlib_buffer_t *s, *d, *fd;
551 uword n_alloc, n_buffers = 1;
Damjan Marion67655492016-11-15 12:50:28 +0100552 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
Damjan Marion05ab8cb2016-11-03 20:16:04 +0100553 int i;
554
555 s = b;
556 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
557 {
558 n_buffers++;
559 s = vlib_get_buffer (vm, s->next_buffer);
560 }
Neale Ranns9d676af2017-03-15 01:28:31 -0700561 u32 new_buffers[n_buffers];
Damjan Marion05ab8cb2016-11-03 20:16:04 +0100562
Damjan Marion05ab8cb2016-11-03 20:16:04 +0100563 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
Dave Barach26cd8c12017-02-23 17:11:26 -0500564
565 /* No guarantee that we'll get all the buffers we asked for */
566 if (PREDICT_FALSE (n_alloc < n_buffers))
567 {
568 if (n_alloc > 0)
569 vlib_buffer_free (vm, new_buffers, n_alloc);
Dave Barach26cd8c12017-02-23 17:11:26 -0500570 return 0;
571 }
Damjan Marion05ab8cb2016-11-03 20:16:04 +0100572
573 /* 1st segment */
574 s = b;
575 fd = d = vlib_get_buffer (vm, new_buffers[0]);
Damjan Marion05ab8cb2016-11-03 20:16:04 +0100576 d->current_data = s->current_data;
577 d->current_length = s->current_length;
Damjan Marion67655492016-11-15 12:50:28 +0100578 d->flags = s->flags & flag_mask;
Damjan Marion05ab8cb2016-11-03 20:16:04 +0100579 d->total_length_not_including_first_buffer =
580 s->total_length_not_including_first_buffer;
581 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
Damjan Mariondce05452016-12-01 11:59:33 +0100582 clib_memcpy (vlib_buffer_get_current (d),
583 vlib_buffer_get_current (s), s->current_length);
Damjan Marion05ab8cb2016-11-03 20:16:04 +0100584
585 /* next segments */
586 for (i = 1; i < n_buffers; i++)
587 {
588 /* previous */
589 d->next_buffer = new_buffers[i];
590 /* current */
591 s = vlib_get_buffer (vm, s->next_buffer);
592 d = vlib_get_buffer (vm, new_buffers[i]);
593 d->current_data = s->current_data;
594 d->current_length = s->current_length;
595 clib_memcpy (vlib_buffer_get_current (d),
596 vlib_buffer_get_current (s), s->current_length);
Damjan Marion67655492016-11-15 12:50:28 +0100597 d->flags = s->flags & flag_mask;
Damjan Marion05ab8cb2016-11-03 20:16:04 +0100598 }
599
600 return fd;
601}
602
Neale Ranns8f36e4a2018-01-11 09:02:01 -0800603/** \brief Create a maximum of 256 clones of buffer and store them
604 in the supplied array
Damjan Marionc47ed032017-01-25 14:18:03 +0100605
606 @param vm - (vlib_main_t *) vlib main data structure pointer
607 @param src_buffer - (u32) source buffer index
608 @param buffers - (u32 * ) buffer index array
Neale Ranns8f36e4a2018-01-11 09:02:01 -0800609 @param n_buffers - (u16) number of buffer clones requested (<=256)
Damjan Marionc47ed032017-01-25 14:18:03 +0100610 @param head_end_offset - (u16) offset relative to current position
611 where packet head ends
Neale Ranns8f36e4a2018-01-11 09:02:01 -0800612 @return - (u16) number of buffers actually cloned, may be
Damjan Marionc47ed032017-01-25 14:18:03 +0100613 less than the number requested or zero
614*/
Neale Ranns8f36e4a2018-01-11 09:02:01 -0800615always_inline u16
616vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
617 u16 n_buffers, u16 head_end_offset)
Damjan Marionc47ed032017-01-25 14:18:03 +0100618{
Neale Ranns8f36e4a2018-01-11 09:02:01 -0800619 u16 i;
Damjan Marionc47ed032017-01-25 14:18:03 +0100620 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
621
622 ASSERT (s->n_add_refs == 0);
623 ASSERT (n_buffers);
Neale Ranns8f36e4a2018-01-11 09:02:01 -0800624 ASSERT (n_buffers <= 256);
Damjan Marionc47ed032017-01-25 14:18:03 +0100625
626 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
627 {
628 buffers[0] = src_buffer;
629 for (i = 1; i < n_buffers; i++)
630 {
631 vlib_buffer_t *d;
632 d = vlib_buffer_copy (vm, s);
633 if (d == 0)
634 return i;
635 buffers[i] = vlib_get_buffer_index (vm, d);
636
637 }
638 return n_buffers;
639 }
640
Neale Ranns8f36e4a2018-01-11 09:02:01 -0800641 if (PREDICT_FALSE (n_buffers == 1))
Damjan Marionc47ed032017-01-25 14:18:03 +0100642 {
643 buffers[0] = src_buffer;
644 return 1;
645 }
646
Neale Ranns8f36e4a2018-01-11 09:02:01 -0800647 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
648 vlib_buffer_get_free_list_index
649 (s));
650
Damjan Marionc47ed032017-01-25 14:18:03 +0100651 for (i = 0; i < n_buffers; i++)
652 {
653 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
654 d->current_data = s->current_data;
655 d->current_length = head_end_offset;
Damjan Marion072401e2017-07-13 18:53:27 +0200656 vlib_buffer_set_free_list_index (d,
657 vlib_buffer_get_free_list_index (s));
Damjan Marionc47ed032017-01-25 14:18:03 +0100658 d->total_length_not_including_first_buffer =
659 s->total_length_not_including_first_buffer + s->current_length -
660 head_end_offset;
661 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
662 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
663 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
664 clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
665 head_end_offset);
666 d->next_buffer = src_buffer;
667 }
668 vlib_buffer_advance (s, head_end_offset);
669 s->n_add_refs = n_buffers - 1;
670 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
671 {
672 s = vlib_get_buffer (vm, s->next_buffer);
673 s->n_add_refs = n_buffers - 1;
674 }
675
676 return n_buffers;
677}
678
Neale Ranns8f36e4a2018-01-11 09:02:01 -0800679/** \brief Create multiple clones of buffer and store them
680 in the supplied array
681
682 @param vm - (vlib_main_t *) vlib main data structure pointer
683 @param src_buffer - (u32) source buffer index
684 @param buffers - (u32 * ) buffer index array
685 @param n_buffers - (u16) number of buffer clones requested (<=256)
686 @param head_end_offset - (u16) offset relative to current position
687 where packet head ends
688 @return - (u16) number of buffers actually cloned, may be
689 less than the number requested or zero
690*/
691always_inline u16
692vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
693 u16 n_buffers, u16 head_end_offset)
694{
695 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
696 u16 n_cloned = 0;
697
698 while (n_buffers > 256)
699 {
700 vlib_buffer_t *copy;
701 copy = vlib_buffer_copy (vm, s);
702 n_cloned += vlib_buffer_clone_256 (vm,
703 vlib_get_buffer_index (vm, copy),
704 (buffers + n_cloned),
705 256, head_end_offset);
706 n_buffers -= 256;
707 }
708 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
709 buffers + n_cloned,
710 n_buffers, head_end_offset);
711
712 return n_cloned;
713}
714
Damjan Marionc47ed032017-01-25 14:18:03 +0100715/** \brief Attach cloned tail to the buffer
716
717 @param vm - (vlib_main_t *) vlib main data structure pointer
718 @param head - (vlib_buffer_t *) head buffer
719 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
720*/
721
722always_inline void
723vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
724 vlib_buffer_t * tail)
725{
726 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
Damjan Marion072401e2017-07-13 18:53:27 +0200727 ASSERT (vlib_buffer_get_free_list_index (head) ==
728 vlib_buffer_get_free_list_index (tail));
Damjan Marionc47ed032017-01-25 14:18:03 +0100729
730 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
731 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
732 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
733 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
734 head->next_buffer = vlib_get_buffer_index (vm, tail);
735 head->total_length_not_including_first_buffer = tail->current_length +
736 tail->total_length_not_including_first_buffer;
737
738next_segment:
739 __sync_add_and_fetch (&tail->n_add_refs, 1);
740
741 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
742 {
743 tail = vlib_get_buffer (vm, tail->next_buffer);
744 goto next_segment;
745 }
746}
747
Pierre Pfister328e99b2016-02-12 13:18:42 +0000748/* Initializes the buffer as an empty packet with no chained buffers. */
749always_inline void
Dave Barach9b8ffd92016-07-08 08:13:45 -0400750vlib_buffer_chain_init (vlib_buffer_t * first)
Pierre Pfister328e99b2016-02-12 13:18:42 +0000751{
752 first->total_length_not_including_first_buffer = 0;
753 first->current_length = 0;
754 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
755 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
Pierre Pfister328e99b2016-02-12 13:18:42 +0000756}
757
758/* The provided next_bi buffer index is appended to the end of the packet. */
759always_inline vlib_buffer_t *
Dave Barach9b8ffd92016-07-08 08:13:45 -0400760vlib_buffer_chain_buffer (vlib_main_t * vm,
761 vlib_buffer_t * first,
762 vlib_buffer_t * last, u32 next_bi)
Pierre Pfister328e99b2016-02-12 13:18:42 +0000763{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400764 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
Pierre Pfister328e99b2016-02-12 13:18:42 +0000765 last->next_buffer = next_bi;
766 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
767 next_buffer->current_length = 0;
768 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
Pierre Pfister328e99b2016-02-12 13:18:42 +0000769 return next_buffer;
770}
771
772/* Increases or decreases the packet length.
773 * It does not allocate or deallocate new buffers.
774 * Therefore, the added length must be compatible
775 * with the last buffer. */
776always_inline void
Dave Barach9b8ffd92016-07-08 08:13:45 -0400777vlib_buffer_chain_increase_length (vlib_buffer_t * first,
778 vlib_buffer_t * last, i32 len)
Pierre Pfister328e99b2016-02-12 13:18:42 +0000779{
780 last->current_length += len;
781 if (first != last)
782 first->total_length_not_including_first_buffer += len;
Pierre Pfister328e99b2016-02-12 13:18:42 +0000783}
784
785/* Copy data to the end of the packet and increases its length.
786 * It does not allocate new buffers.
787 * Returns the number of copied bytes. */
788always_inline u16
Dave Barach9b8ffd92016-07-08 08:13:45 -0400789vlib_buffer_chain_append_data (vlib_main_t * vm,
Damjan Mariondac03522018-02-01 15:30:13 +0100790 vlib_buffer_free_list_index_t free_list_index,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400791 vlib_buffer_t * first,
792 vlib_buffer_t * last, void *data, u16 data_len)
Pierre Pfister328e99b2016-02-12 13:18:42 +0000793{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400794 u32 n_buffer_bytes =
795 vlib_buffer_free_list_buffer_size (vm, free_list_index);
796 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
797 u16 len = clib_min (data_len,
798 n_buffer_bytes - last->current_length -
799 last->current_data);
800 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
801 len);
802 vlib_buffer_chain_increase_length (first, last, len);
Pierre Pfister328e99b2016-02-12 13:18:42 +0000803 return len;
804}
805
806/* Copy data to the end of the packet and increases its length.
807 * Allocates additional buffers from the free list if necessary.
808 * Returns the number of copied bytes.
809 * 'last' value is modified whenever new buffers are allocated and
810 * chained and points to the last buffer in the chain. */
811u16
Dave Barach9b8ffd92016-07-08 08:13:45 -0400812vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
Damjan Mariondac03522018-02-01 15:30:13 +0100813 vlib_buffer_free_list_index_t
814 free_list_index,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400815 vlib_buffer_t * first,
Damjan Mariondac03522018-02-01 15:30:13 +0100816 vlib_buffer_t ** last, void *data,
817 u16 data_len);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400818void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
Pierre Pfister328e99b2016-02-12 13:18:42 +0000819
Dave Barach9b8ffd92016-07-08 08:13:45 -0400820format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
821 format_vlib_buffer_contents;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700822
Dave Barach9b8ffd92016-07-08 08:13:45 -0400823typedef struct
824{
Ed Warnickecb9cada2015-12-08 15:45:58 -0700825 /* Vector of packet data. */
Dave Barach9b8ffd92016-07-08 08:13:45 -0400826 u8 *packet_data;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700827
Damjan Mariond1274cb2018-03-13 21:32:17 +0100828 /* Number of buffers to allocate in each call to allocator. */
829 u32 min_n_buffers_each_alloc;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700830
831 /* Buffer free list for this template. */
Damjan Mariondac03522018-02-01 15:30:13 +0100832 vlib_buffer_free_list_index_t free_list_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700833
Dave Barach9b8ffd92016-07-08 08:13:45 -0400834 u32 *free_buffers;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700835} vlib_packet_template_t;
836
837void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
838 vlib_packet_template_t * t);
839
840void vlib_packet_template_init (vlib_main_t * vm,
841 vlib_packet_template_t * t,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400842 void *packet_data,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700843 uword n_packet_data_bytes,
Damjan Mariond1274cb2018-03-13 21:32:17 +0100844 uword min_n_buffers_each_alloc,
Dave Barach9b8ffd92016-07-08 08:13:45 -0400845 char *fmt, ...);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700846
Dave Barach9b8ffd92016-07-08 08:13:45 -0400847void *vlib_packet_template_get_packet (vlib_main_t * vm,
848 vlib_packet_template_t * t,
849 u32 * bi_result);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700850
851always_inline void
852vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
853{
854 vec_free (t->packet_data);
855}
856
857always_inline u32
858unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
859{
Dave Barach9b8ffd92016-07-08 08:13:45 -0400860 serialize_stream_t *s = &m->stream;
861 vlib_serialize_buffer_main_t *sm
862 = uword_to_pointer (m->stream.data_function_opaque,
863 vlib_serialize_buffer_main_t *);
864 vlib_main_t *vm = sm->vlib_main;
865 u32 n, *f;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700866
867 n = s->n_buffer_bytes - s->current_buffer_index;
868 if (sm->last_buffer != ~0)
869 {
Dave Barach9b8ffd92016-07-08 08:13:45 -0400870 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700871 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
872 {
873 b = vlib_get_buffer (vm, b->next_buffer);
874 n += b->current_length;
875 }
876 }
877
Dave Barach9b8ffd92016-07-08 08:13:45 -0400878 /* *INDENT-OFF* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700879 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
880 n += vlib_buffer_index_length_in_chain (vm, f[0]);
881 }));
Dave Barach9b8ffd92016-07-08 08:13:45 -0400882/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700883
884 return n;
885}
886
Ed Warnickecb9cada2015-12-08 15:45:58 -0700887/* Set a buffer quickly into "uninitialized" state. We want this to
888 be extremely cheap and arrange for all fields that need to be
889 initialized to be in the first 128 bits of the buffer. */
890always_inline void
Damjan Marionbd69a5f2017-02-05 23:44:42 +0100891vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700892 vlib_buffer_free_list_t * fl)
893{
Damjan Marionbd69a5f2017-02-05 23:44:42 +0100894 vlib_buffer_t *src = &fl->buffer_init_template;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700895
Damjan Marion19010202016-03-24 17:17:47 +0100896 /* Make sure vlib_buffer_t is cacheline aligned and sized */
Dave Barach9b8ffd92016-07-08 08:13:45 -0400897 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
898 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
899 CLIB_CACHE_LINE_BYTES);
900 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
901 CLIB_CACHE_LINE_BYTES * 2);
Damjan Marion19010202016-03-24 17:17:47 +0100902
Ed Warnickecb9cada2015-12-08 15:45:58 -0700903 /* Make sure buffer template is sane. */
Damjan Marion072401e2017-07-13 18:53:27 +0200904 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700905
Dave Barachf8690282017-03-01 11:38:02 -0500906 clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
907 STRUCT_MARK_PTR (src, template_start),
908 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
909 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
910
911 /* Not in the first 16 octets. */
912 dst->n_add_refs = src->n_add_refs;
Damjan Mariondac03522018-02-01 15:30:13 +0100913 vlib_buffer_set_free_list_index (dst, fl->index);
Dave Barachf8690282017-03-01 11:38:02 -0500914
Ed Warnickecb9cada2015-12-08 15:45:58 -0700915 /* Make sure it really worked. */
Dave Barachf8690282017-03-01 11:38:02 -0500916#define _(f) ASSERT (dst->f == src->f);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400917 _(current_data);
918 _(current_length);
919 _(flags);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700920#undef _
Florin Corasb2215d62017-08-01 16:56:58 -0700921 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
922 /* total_length_not_including_first_buffer is not in the template anymore
923 * so it may actually not zeroed for some buffers. One option is to
924 * uncomment the line lower (comes at a cost), the other, is to just not
925 * care */
926 /* dst->total_length_not_including_first_buffer = 0; */
Damjan Marionc47ed032017-01-25 14:18:03 +0100927 ASSERT (dst->n_add_refs == 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700928}
929
930always_inline void
Damjan Marion8a6a3b22017-01-17 14:12:42 +0100931vlib_buffer_add_to_free_list (vlib_main_t * vm,
932 vlib_buffer_free_list_t * f,
933 u32 buffer_index, u8 do_init)
934{
Damjan Mariond1274cb2018-03-13 21:32:17 +0100935 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (f->buffer_pool_index);
Damjan Marion8a6a3b22017-01-17 14:12:42 +0100936 vlib_buffer_t *b;
937 b = vlib_get_buffer (vm, buffer_index);
938 if (PREDICT_TRUE (do_init))
939 vlib_buffer_init_for_free_list (b, f);
Damjan Marionbd69a5f2017-02-05 23:44:42 +0100940 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
Damjan Marion6b0f5892017-07-27 04:01:24 -0400941
Damjan Marionb6a8ed72017-08-29 00:15:35 +0200942 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
Damjan Marion6b0f5892017-07-27 04:01:24 -0400943 {
Damjan Mariond1274cb2018-03-13 21:32:17 +0100944 clib_spinlock_lock (&bp->lock);
Damjan Marion6b0f5892017-07-27 04:01:24 -0400945 /* keep last stored buffers, as they are more likely hot in the cache */
Damjan Mariond1274cb2018-03-13 21:32:17 +0100946 vec_add_aligned (bp->buffers, f->buffers, VLIB_FRAME_SIZE,
Damjan Marionb6a8ed72017-08-29 00:15:35 +0200947 CLIB_CACHE_LINE_BYTES);
Damjan Marion6b0f5892017-07-27 04:01:24 -0400948 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
Klement Sekera75e974b2017-11-09 09:12:12 +0100949 f->n_alloc -= VLIB_FRAME_SIZE;
Damjan Mariond1274cb2018-03-13 21:32:17 +0100950 clib_spinlock_unlock (&bp->lock);
Damjan Marion6b0f5892017-07-27 04:01:24 -0400951 }
Damjan Marion8a6a3b22017-01-17 14:12:42 +0100952}
953
Ed Warnickecb9cada2015-12-08 15:45:58 -0700954#if CLIB_DEBUG > 0
Damjan Marion6a7acc22016-12-19 16:28:36 +0100955extern u32 *vlib_buffer_state_validation_lock;
956extern uword *vlib_buffer_state_validation_hash;
957extern void *vlib_buffer_state_heap;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700958#endif
959
Dave Barach9b8ffd92016-07-08 08:13:45 -0400960static inline void
Ed Warnickecb9cada2015-12-08 15:45:58 -0700961vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
962{
963#if CLIB_DEBUG > 0
Dave Barach9b8ffd92016-07-08 08:13:45 -0400964 uword *p;
965 void *oldheap;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700966
967 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
968
969 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
970 ;
971
972 p = hash_get (vlib_buffer_state_validation_hash, b);
973
974 /* If we don't know about b, declare it to be in the expected state */
975 if (!p)
976 {
977 hash_set (vlib_buffer_state_validation_hash, b, expected);
978 goto out;
979 }
Dave Barach9b8ffd92016-07-08 08:13:45 -0400980
Ed Warnickecb9cada2015-12-08 15:45:58 -0700981 if (p[0] != expected)
982 {
Dave Barach9b8ffd92016-07-08 08:13:45 -0400983 void cj_stop (void);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700984 u32 bi;
Dave Barach9b8ffd92016-07-08 08:13:45 -0400985 vlib_main_t *vm = &vlib_global_main;
986
987 cj_stop ();
988
Ed Warnickecb9cada2015-12-08 15:45:58 -0700989 bi = vlib_get_buffer_index (vm, b);
990
991 clib_mem_set_heap (oldheap);
Dave Barach9b8ffd92016-07-08 08:13:45 -0400992 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
993 vlib_time_now (vm), bi,
994 p[0] ? "busy" : "free", expected ? "busy" : "free");
995 os_panic ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700996 }
Dave Barach9b8ffd92016-07-08 08:13:45 -0400997out:
998 CLIB_MEMORY_BARRIER ();
Ed Warnickecb9cada2015-12-08 15:45:58 -0700999 *vlib_buffer_state_validation_lock = 0;
1000 clib_mem_set_heap (oldheap);
1001#endif
1002}
1003
Dave Barach9b8ffd92016-07-08 08:13:45 -04001004static inline void
Ed Warnickecb9cada2015-12-08 15:45:58 -07001005vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
1006{
1007#if CLIB_DEBUG > 0
Dave Barach9b8ffd92016-07-08 08:13:45 -04001008 void *oldheap;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001009
1010 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1011
1012 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
1013 ;
1014
1015 hash_set (vlib_buffer_state_validation_hash, b, expected);
1016
Dave Barach9b8ffd92016-07-08 08:13:45 -04001017 CLIB_MEMORY_BARRIER ();
Ed Warnickecb9cada2015-12-08 15:45:58 -07001018 *vlib_buffer_state_validation_lock = 0;
1019 clib_mem_set_heap (oldheap);
Dave Barach9b8ffd92016-07-08 08:13:45 -04001020#endif
Ed Warnickecb9cada2015-12-08 15:45:58 -07001021}
1022
Klement Sekera75e7d132017-09-20 08:26:30 +02001023/** minimum data size of first buffer in a buffer chain */
1024#define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
1025
1026/**
1027 * @brief compress buffer chain in a way where the first buffer is at least
1028 * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
1029 *
1030 * @param[in] vm - vlib_main
1031 * @param[in,out] first - first buffer in chain
1032 * @param[in,out] discard_vector - vector of buffer indexes which were removed
1033 * from the chain
1034 */
1035always_inline void
1036vlib_buffer_chain_compress (vlib_main_t * vm,
1037 vlib_buffer_t * first, u32 ** discard_vector)
1038{
1039 if (first->current_length >= VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE ||
1040 !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
1041 {
1042 /* this is already big enough or not a chain */
1043 return;
1044 }
1045 /* probe free list to find allocated buffer size to avoid overfill */
Damjan Mariondac03522018-02-01 15:30:13 +01001046 vlib_buffer_free_list_index_t index;
Klement Sekera75e7d132017-09-20 08:26:30 +02001047 vlib_buffer_free_list_t *free_list =
1048 vlib_buffer_get_buffer_free_list (vm, first, &index);
1049
1050 u32 want_first_size = clib_min (VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE,
1051 free_list->n_data_bytes -
1052 first->current_data);
1053 do
1054 {
1055 vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
1056 u32 need = want_first_size - first->current_length;
1057 u32 amount_to_copy = clib_min (need, second->current_length);
1058 clib_memcpy (((u8 *) vlib_buffer_get_current (first)) +
1059 first->current_length,
1060 vlib_buffer_get_current (second), amount_to_copy);
1061 first->current_length += amount_to_copy;
1062 vlib_buffer_advance (second, amount_to_copy);
1063 if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1064 {
1065 first->total_length_not_including_first_buffer -= amount_to_copy;
1066 }
1067 if (!second->current_length)
1068 {
1069 vec_add1 (*discard_vector, first->next_buffer);
1070 if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
1071 {
1072 first->next_buffer = second->next_buffer;
1073 }
1074 else
1075 {
1076 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1077 }
1078 second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1079 }
1080 }
1081 while ((first->current_length < want_first_size) &&
1082 (first->flags & VLIB_BUFFER_NEXT_PRESENT));
1083}
1084
Ed Warnickecb9cada2015-12-08 15:45:58 -07001085#endif /* included_vlib_buffer_funcs_h */
Dave Barach9b8ffd92016-07-08 08:13:45 -04001086
1087/*
1088 * fd.io coding-style-patch-verification: ON
1089 *
1090 * Local Variables:
1091 * eval: (c-set-style "gnu")
1092 * End:
1093 */