blob: 80bb30e9f932d36f0d2951856473372fbe4a6617 [file] [log] [blame]
Damjan Marion1c229712021-04-21 12:55:15 +02001/* SPDX-License-Identifier: Apache-2.0
2 * Copyright(c) 2021 Cisco Systems, Inc.
3 */
4
Damjan Marionef0bac72021-04-22 18:08:28 +02005#include <vppinfra/clib.h>
Damjan Marion1c229712021-04-21 12:55:15 +02006#include <vlib/vlib.h>
Damjan Mariond154a172021-07-13 21:12:41 +02007#include <vppinfra/vector/mask_compare.h>
8#include <vppinfra/vector/compress.h>
Damjan Marion1c229712021-04-21 12:55:15 +02009
Damjan Marionef0bac72021-04-22 18:08:28 +020010static_always_inline u32
Damjan Marionefeea5b2022-02-10 15:01:03 +010011enqueue_one (vlib_main_t *vm, vlib_node_runtime_t *node,
12 vlib_frame_bitmap_t used_elt_bmp, u16 next_index, u32 *buffers,
Mohammed Hawaricd758e62022-05-31 18:11:05 +020013 u16 *nexts, u32 n_buffers, u32 n_left, u32 *tmp, u8 maybe_aux,
14 u32 *aux_data, u32 *tmp_aux)
Damjan Marionef0bac72021-04-22 18:08:28 +020015{
Damjan Marionefeea5b2022-02-10 15:01:03 +010016 vlib_frame_bitmap_t match_bmp;
Damjan Marionef0bac72021-04-22 18:08:28 +020017 vlib_frame_t *f;
18 u32 n_extracted, n_free;
Mohammed Hawaricd758e62022-05-31 18:11:05 +020019 u32 *to, *to_aux;
Damjan Marionef0bac72021-04-22 18:08:28 +020020
21 f = vlib_get_next_frame_internal (vm, node, next_index, 0);
22
Mohammed Hawaricd758e62022-05-31 18:11:05 +020023 maybe_aux = maybe_aux && f->aux_offset;
24
Damjan Marionef0bac72021-04-22 18:08:28 +020025 n_free = VLIB_FRAME_SIZE - f->n_vectors;
26
27 /* if frame contains enough space for worst case scenario, we can avoid
28 * use of tmp */
29 if (n_free >= n_left)
Mohammed Hawaricd758e62022-05-31 18:11:05 +020030 {
31 to = (u32 *) vlib_frame_vector_args (f) + f->n_vectors;
32 if (maybe_aux)
33 to_aux = (u32 *) vlib_frame_aux_args (f) + f->n_vectors;
34 }
Damjan Marionef0bac72021-04-22 18:08:28 +020035 else
Mohammed Hawaricd758e62022-05-31 18:11:05 +020036 {
37 to = tmp;
38 if (maybe_aux)
39 to_aux = tmp_aux;
40 }
Damjan Marione3e35552021-05-06 17:34:49 +020041 clib_mask_compare_u16 (next_index, nexts, match_bmp, n_buffers);
Damjan Marione3e35552021-05-06 17:34:49 +020042 n_extracted = clib_compress_u32 (to, buffers, match_bmp, n_buffers);
Mohammed Hawaricd758e62022-05-31 18:11:05 +020043 if (maybe_aux)
44 clib_compress_u32 (to_aux, aux_data, match_bmp, n_buffers);
Damjan Marionefeea5b2022-02-10 15:01:03 +010045 vlib_frame_bitmap_or (used_elt_bmp, match_bmp);
Damjan Marionef0bac72021-04-22 18:08:28 +020046
47 if (to != tmp)
48 {
49 /* indices already written to frame, just close it */
50 vlib_put_next_frame (vm, node, next_index, n_free - n_extracted);
51 }
52 else if (n_free >= n_extracted)
53 {
54 /* enough space in the existing frame */
55 to = (u32 *) vlib_frame_vector_args (f) + f->n_vectors;
56 vlib_buffer_copy_indices (to, tmp, n_extracted);
Mohammed Hawaricd758e62022-05-31 18:11:05 +020057 if (maybe_aux)
58 {
59 to_aux = (u32 *) vlib_frame_aux_args (f) + f->n_vectors;
60 vlib_buffer_copy_indices (to_aux, tmp_aux, n_extracted);
61 }
Damjan Marionef0bac72021-04-22 18:08:28 +020062 vlib_put_next_frame (vm, node, next_index, n_free - n_extracted);
63 }
64 else
65 {
66 /* full frame */
67 to = (u32 *) vlib_frame_vector_args (f) + f->n_vectors;
68 vlib_buffer_copy_indices (to, tmp, n_free);
Mohammed Hawaricd758e62022-05-31 18:11:05 +020069 if (maybe_aux)
70 {
71 to_aux = (u32 *) vlib_frame_aux_args (f) + f->n_vectors;
72 vlib_buffer_copy_indices (to_aux, tmp_aux, n_free);
73 }
Damjan Marionef0bac72021-04-22 18:08:28 +020074 vlib_put_next_frame (vm, node, next_index, 0);
75
76 /* second frame */
77 u32 n_2nd_frame = n_extracted - n_free;
78 f = vlib_get_next_frame_internal (vm, node, next_index, 1);
79 to = vlib_frame_vector_args (f);
80 vlib_buffer_copy_indices (to, tmp + n_free, n_2nd_frame);
Mohammed Hawaricd758e62022-05-31 18:11:05 +020081 if (maybe_aux)
82 {
83 to_aux = vlib_frame_aux_args (f);
84 vlib_buffer_copy_indices (to_aux, tmp_aux + n_free, n_2nd_frame);
85 }
Damjan Marionef0bac72021-04-22 18:08:28 +020086 vlib_put_next_frame (vm, node, next_index,
87 VLIB_FRAME_SIZE - n_2nd_frame);
88 }
89
90 return n_left - n_extracted;
91}
92
Mohammed Hawaricd758e62022-05-31 18:11:05 +020093static_always_inline void
94vlib_buffer_enqueue_to_next_fn_inline (vlib_main_t *vm,
95 vlib_node_runtime_t *node, u32 *buffers,
96 u32 *aux_data, u16 *nexts, uword count,
97 u8 maybe_aux)
Damjan Marion1c229712021-04-21 12:55:15 +020098{
Damjan Marionef0bac72021-04-22 18:08:28 +020099 u32 tmp[VLIB_FRAME_SIZE];
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200100 u32 tmp_aux[VLIB_FRAME_SIZE];
Damjan Marionef0bac72021-04-22 18:08:28 +0200101 u32 n_left;
Damjan Marion1c229712021-04-21 12:55:15 +0200102 u16 next_index;
103
Damjan Marionef0bac72021-04-22 18:08:28 +0200104 while (count >= VLIB_FRAME_SIZE)
Damjan Marion1c229712021-04-21 12:55:15 +0200105 {
Damjan Marionefeea5b2022-02-10 15:01:03 +0100106 vlib_frame_bitmap_t used_elt_bmp = {};
Damjan Marionef0bac72021-04-22 18:08:28 +0200107 n_left = VLIB_FRAME_SIZE;
Damjan Marione3e35552021-05-06 17:34:49 +0200108 u32 off = 0;
Damjan Marion1c229712021-04-21 12:55:15 +0200109
Damjan Marionef0bac72021-04-22 18:08:28 +0200110 next_index = nexts[0];
Damjan Marione3e35552021-05-06 17:34:49 +0200111 n_left = enqueue_one (vm, node, used_elt_bmp, next_index, buffers, nexts,
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200112 VLIB_FRAME_SIZE, n_left, tmp, maybe_aux, aux_data,
113 tmp_aux);
Damjan Marion1c229712021-04-21 12:55:15 +0200114
Damjan Marionef0bac72021-04-22 18:08:28 +0200115 while (n_left)
Damjan Marion1c229712021-04-21 12:55:15 +0200116 {
Damjan Marione3e35552021-05-06 17:34:49 +0200117 while (PREDICT_FALSE (used_elt_bmp[off] == ~0))
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200118 {
119 off++;
120 ASSERT (off < ARRAY_LEN (used_elt_bmp));
121 }
Damjan Marione3e35552021-05-06 17:34:49 +0200122
123 next_index =
124 nexts[off * 64 + count_trailing_zeros (~used_elt_bmp[off])];
125 n_left = enqueue_one (vm, node, used_elt_bmp, next_index, buffers,
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200126 nexts, VLIB_FRAME_SIZE, n_left, tmp, maybe_aux,
127 aux_data, tmp_aux);
Damjan Marion1c229712021-04-21 12:55:15 +0200128 }
129
Damjan Marionef0bac72021-04-22 18:08:28 +0200130 buffers += VLIB_FRAME_SIZE;
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200131 if (maybe_aux)
132 aux_data += VLIB_FRAME_SIZE;
Damjan Marionef0bac72021-04-22 18:08:28 +0200133 nexts += VLIB_FRAME_SIZE;
134 count -= VLIB_FRAME_SIZE;
Damjan Marion1c229712021-04-21 12:55:15 +0200135 }
Damjan Marionef0bac72021-04-22 18:08:28 +0200136
137 if (count)
138 {
Damjan Marionefeea5b2022-02-10 15:01:03 +0100139 vlib_frame_bitmap_t used_elt_bmp = {};
Damjan Marionef0bac72021-04-22 18:08:28 +0200140 next_index = nexts[0];
141 n_left = count;
Damjan Marione3e35552021-05-06 17:34:49 +0200142 u32 off = 0;
Damjan Marionef0bac72021-04-22 18:08:28 +0200143
Damjan Marione3e35552021-05-06 17:34:49 +0200144 n_left = enqueue_one (vm, node, used_elt_bmp, next_index, buffers, nexts,
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200145 count, n_left, tmp, maybe_aux, aux_data, tmp_aux);
Damjan Marionef0bac72021-04-22 18:08:28 +0200146
147 while (n_left)
148 {
Damjan Marione3e35552021-05-06 17:34:49 +0200149 while (PREDICT_FALSE (used_elt_bmp[off] == ~0))
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200150 {
151 off++;
152 ASSERT (off < ARRAY_LEN (used_elt_bmp));
153 }
Damjan Marione3e35552021-05-06 17:34:49 +0200154
155 next_index =
156 nexts[off * 64 + count_trailing_zeros (~used_elt_bmp[off])];
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200157 n_left =
158 enqueue_one (vm, node, used_elt_bmp, next_index, buffers, nexts,
159 count, n_left, tmp, maybe_aux, aux_data, tmp_aux);
Damjan Marionef0bac72021-04-22 18:08:28 +0200160 }
161 }
Damjan Marion1c229712021-04-21 12:55:15 +0200162}
Damjan Marion23c34882021-04-25 10:46:26 +0200163
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200164void __clib_section (".vlib_buffer_enqueue_to_next_fn")
165CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_next_fn)
166(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 *nexts,
167 uword count)
168{
169 vlib_buffer_enqueue_to_next_fn_inline (vm, node, buffers, NULL, nexts, count,
170 0 /* maybe_aux */);
171}
172
Damjan Marion1c229712021-04-21 12:55:15 +0200173CLIB_MARCH_FN_REGISTRATION (vlib_buffer_enqueue_to_next_fn);
174
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200175void __clib_section (".vlib_buffer_enqueue_to_next_with_aux_fn")
176CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_next_with_aux_fn)
177(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u32 *aux_data,
178 u16 *nexts, uword count)
Damjan Marion1c229712021-04-21 12:55:15 +0200179{
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200180 vlib_buffer_enqueue_to_next_fn_inline (vm, node, buffers, aux_data, nexts,
181 count, 1 /* maybe_aux */);
182}
Damjan Marion1c229712021-04-21 12:55:15 +0200183
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200184CLIB_MARCH_FN_REGISTRATION (vlib_buffer_enqueue_to_next_with_aux_fn);
185
186static_always_inline void
187vlib_buffer_enqueue_to_single_next_fn_inline (vlib_main_t *vm,
188 vlib_node_runtime_t *node,
189 u32 *buffers, u32 *aux_data,
190 u16 next_index, u32 count,
191 u8 with_aux)
192{
193 u32 *to_next, *to_next_aux, n_left_to_next, n_enq;
194
195 if (with_aux)
196 vlib_get_next_frame_with_aux (vm, node, next_index, to_next, to_next_aux,
197 n_left_to_next);
198 else
199 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
Damjan Marion1c229712021-04-21 12:55:15 +0200200
201 if (PREDICT_TRUE (n_left_to_next >= count))
202 {
203 vlib_buffer_copy_indices (to_next, buffers, count);
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200204 if (with_aux)
205 vlib_buffer_copy_indices (to_next_aux, aux_data, count);
Damjan Marion1c229712021-04-21 12:55:15 +0200206 n_left_to_next -= count;
207 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
208 return;
209 }
210
211 n_enq = n_left_to_next;
212next:
213 vlib_buffer_copy_indices (to_next, buffers, n_enq);
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200214 if (with_aux)
215 vlib_buffer_copy_indices (to_next_aux, aux_data, n_enq);
Damjan Marion1c229712021-04-21 12:55:15 +0200216 n_left_to_next -= n_enq;
217
218 if (PREDICT_FALSE (count > n_enq))
219 {
220 count -= n_enq;
221 buffers += n_enq;
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200222 if (with_aux)
223 aux_data += n_enq;
Damjan Marion1c229712021-04-21 12:55:15 +0200224
225 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200226 if (with_aux)
227 vlib_get_next_frame_with_aux (vm, node, next_index, to_next,
228 to_next_aux, n_left_to_next);
229 else
230 vlib_get_next_frame (vm, node, next_index, to_next, n_left_to_next);
Damjan Marion1c229712021-04-21 12:55:15 +0200231 n_enq = clib_min (n_left_to_next, count);
232 goto next;
233 }
234 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
235}
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200236
237void __clib_section (".vlib_buffer_enqueue_to_single_next_fn")
238CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_single_next_fn)
239(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u16 next_index,
240 u32 count)
241{
242 vlib_buffer_enqueue_to_single_next_fn_inline (
243 vm, node, buffers, NULL, next_index, count, 0 /* with_aux */);
244}
Damjan Marion1c229712021-04-21 12:55:15 +0200245CLIB_MARCH_FN_REGISTRATION (vlib_buffer_enqueue_to_single_next_fn);
246
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200247void __clib_section (".vlib_buffer_enqueue_to_single_next_with_aux_fn")
248CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_single_next_with_aux_fn)
249(vlib_main_t *vm, vlib_node_runtime_t *node, u32 *buffers, u32 *aux_data,
250 u16 next_index, u32 count)
251{
252 vlib_buffer_enqueue_to_single_next_fn_inline (
253 vm, node, buffers, aux_data, next_index, count, 1 /* with_aux */);
254}
255CLIB_MARCH_FN_REGISTRATION (vlib_buffer_enqueue_to_single_next_with_aux_fn);
256
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200257static inline vlib_frame_queue_elt_t *
258vlib_get_frame_queue_elt (vlib_frame_queue_main_t *fqm, u32 index,
259 int dont_wait)
Damjan Marion1c229712021-04-21 12:55:15 +0200260{
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200261 vlib_frame_queue_t *fq;
262 u64 nelts, tail, new_tail;
Damjan Marion1c229712021-04-21 12:55:15 +0200263
Jon Loeligerf8631ce2022-06-13 10:39:32 -0500264 if (index >= vec_len (fqm->vlib_frame_queues))
265 return 0;
266
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200267 fq = fqm->vlib_frame_queues[index];
268 ASSERT (fq);
269 nelts = fq->nelts;
Damjan Marion1c229712021-04-21 12:55:15 +0200270
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200271retry:
272 tail = __atomic_load_n (&fq->tail, __ATOMIC_ACQUIRE);
273 new_tail = tail + 1;
274
275 if (new_tail >= fq->head + nelts)
Damjan Marion1c229712021-04-21 12:55:15 +0200276 {
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200277 if (dont_wait)
278 return 0;
Damjan Marion1c229712021-04-21 12:55:15 +0200279
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200280 /* Wait until a ring slot is available */
281 while (new_tail >= fq->head + nelts)
282 vlib_worker_thread_barrier_check ();
Damjan Marion1c229712021-04-21 12:55:15 +0200283 }
284
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200285 if (!__atomic_compare_exchange_n (&fq->tail, &tail, new_tail, 0 /* weak */,
286 __ATOMIC_RELAXED, __ATOMIC_RELAXED))
287 goto retry;
Damjan Marion1c229712021-04-21 12:55:15 +0200288
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200289 return fq->elts + (new_tail & (nelts - 1));
290}
291
292static_always_inline u32
293vlib_buffer_enqueue_to_thread_inline (vlib_main_t *vm,
294 vlib_node_runtime_t *node,
295 vlib_frame_queue_main_t *fqm,
296 u32 *buffer_indices, u16 *thread_indices,
Mohammed Hawarie7149262022-05-18 10:08:47 +0200297 u32 n_packets, int drop_on_congestion,
298 int with_aux, u32 *aux_data)
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200299{
300 u32 drop_list[VLIB_FRAME_SIZE], n_drop = 0;
Damjan Marionefeea5b2022-02-10 15:01:03 +0100301 vlib_frame_bitmap_t mask, used_elts = {};
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200302 vlib_frame_queue_elt_t *hf = 0;
303 u16 thread_index;
304 u32 n_comp, off = 0, n_left = n_packets;
305
306 thread_index = thread_indices[0];
307
308more:
309 clib_mask_compare_u16 (thread_index, thread_indices, mask, n_packets);
310 hf = vlib_get_frame_queue_elt (fqm, thread_index, drop_on_congestion);
311
312 n_comp = clib_compress_u32 (hf ? hf->buffer_index : drop_list + n_drop,
313 buffer_indices, mask, n_packets);
Mohammed Hawarie7149262022-05-18 10:08:47 +0200314 if (with_aux)
315 clib_compress_u32 (hf ? hf->aux_data : drop_list + n_drop, aux_data, mask,
316 n_packets);
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200317
318 if (hf)
Damjan Marion1c229712021-04-21 12:55:15 +0200319 {
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200320 if (node->flags & VLIB_NODE_FLAG_TRACE)
321 hf->maybe_trace = 1;
322 hf->n_vectors = n_comp;
323 __atomic_store_n (&hf->valid, 1, __ATOMIC_RELEASE);
324 vlib_get_main_by_index (thread_index)->check_frame_queues = 1;
325 }
326 else
327 n_drop += n_comp;
328
329 n_left -= n_comp;
330
331 if (n_left)
332 {
Damjan Marionefeea5b2022-02-10 15:01:03 +0100333 vlib_frame_bitmap_or (used_elts, mask);
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200334
335 while (PREDICT_FALSE (used_elts[off] == ~0))
Damjan Marion1c229712021-04-21 12:55:15 +0200336 {
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200337 off++;
338 ASSERT (off < ARRAY_LEN (used_elts));
Damjan Marion1c229712021-04-21 12:55:15 +0200339 }
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200340
341 thread_index =
342 thread_indices[off * 64 + count_trailing_zeros (~used_elts[off])];
343 goto more;
Damjan Marion1c229712021-04-21 12:55:15 +0200344 }
345
346 if (drop_on_congestion && n_drop)
347 vlib_buffer_free (vm, drop_list, n_drop);
348
349 return n_packets - n_drop;
350}
351
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200352u32 __clib_section (".vlib_buffer_enqueue_to_thread_fn")
353CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_thread_fn)
354(vlib_main_t *vm, vlib_node_runtime_t *node, u32 frame_queue_index,
355 u32 *buffer_indices, u16 *thread_indices, u32 n_packets,
356 int drop_on_congestion)
357{
358 vlib_thread_main_t *tm = vlib_get_thread_main ();
359 vlib_frame_queue_main_t *fqm;
360 u32 n_enq = 0;
361
362 fqm = vec_elt_at_index (tm->frame_queue_mains, frame_queue_index);
363
364 while (n_packets >= VLIB_FRAME_SIZE)
365 {
366 n_enq += vlib_buffer_enqueue_to_thread_inline (
367 vm, node, fqm, buffer_indices, thread_indices, VLIB_FRAME_SIZE,
Mohammed Hawarie7149262022-05-18 10:08:47 +0200368 drop_on_congestion, 0 /* with_aux */, NULL);
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200369 buffer_indices += VLIB_FRAME_SIZE;
370 thread_indices += VLIB_FRAME_SIZE;
371 n_packets -= VLIB_FRAME_SIZE;
372 }
373
374 if (n_packets == 0)
375 return n_enq;
376
Mohammed Hawarie7149262022-05-18 10:08:47 +0200377 n_enq += vlib_buffer_enqueue_to_thread_inline (
378 vm, node, fqm, buffer_indices, thread_indices, n_packets,
379 drop_on_congestion, 0 /* with_aux */, NULL);
380
381 return n_enq;
382}
383
384u32 __clib_section (".vlib_buffer_enqueue_to_thread_with_aux_fn")
385CLIB_MULTIARCH_FN (vlib_buffer_enqueue_to_thread_with_aux_fn)
386(vlib_main_t *vm, vlib_node_runtime_t *node, u32 frame_queue_index,
387 u32 *buffer_indices, u32 *aux, u16 *thread_indices, u32 n_packets,
388 int drop_on_congestion)
389{
390 vlib_thread_main_t *tm = vlib_get_thread_main ();
391 vlib_frame_queue_main_t *fqm;
392 u32 n_enq = 0;
393
394 fqm = vec_elt_at_index (tm->frame_queue_mains, frame_queue_index);
395
396 while (n_packets >= VLIB_FRAME_SIZE)
397 {
398 n_enq += vlib_buffer_enqueue_to_thread_inline (
399 vm, node, fqm, buffer_indices, thread_indices, VLIB_FRAME_SIZE,
400 drop_on_congestion, 1 /* with_aux */, aux);
401 buffer_indices += VLIB_FRAME_SIZE;
402 thread_indices += VLIB_FRAME_SIZE;
403 n_packets -= VLIB_FRAME_SIZE;
404 }
405
406 if (n_packets == 0)
407 return n_enq;
408
409 n_enq += vlib_buffer_enqueue_to_thread_inline (
410 vm, node, fqm, buffer_indices, thread_indices, n_packets,
411 drop_on_congestion, 1 /* with_aux */, aux);
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200412
413 return n_enq;
414}
415
Damjan Marion1c229712021-04-21 12:55:15 +0200416CLIB_MARCH_FN_REGISTRATION (vlib_buffer_enqueue_to_thread_fn);
Mohammed Hawarie7149262022-05-18 10:08:47 +0200417CLIB_MARCH_FN_REGISTRATION (vlib_buffer_enqueue_to_thread_with_aux_fn);
Damjan Marion1c229712021-04-21 12:55:15 +0200418
Mohammed Hawarie7149262022-05-18 10:08:47 +0200419static_always_inline u32
420vlib_frame_queue_dequeue_inline (vlib_main_t *vm, vlib_frame_queue_main_t *fqm,
421 u8 with_aux)
Damjan Marioneee099e2021-05-01 14:56:13 +0200422{
423 u32 thread_id = vm->thread_index;
424 vlib_frame_queue_t *fq = fqm->vlib_frame_queues[thread_id];
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200425 u32 mask = fq->nelts - 1;
Damjan Marioneee099e2021-05-01 14:56:13 +0200426 vlib_frame_queue_elt_t *elt;
Mohammed Hawarie7149262022-05-18 10:08:47 +0200427 u32 n_free, n_copy, *from, *from_aux, *to = 0, *to_aux = 0, processed = 0,
428 vectors = 0;
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200429 vlib_frame_t *f = 0;
Damjan Marioneee099e2021-05-01 14:56:13 +0200430
431 ASSERT (fq);
432 ASSERT (vm == vlib_global_main.vlib_mains[thread_id]);
433
434 if (PREDICT_FALSE (fqm->node_index == ~0))
435 return 0;
436 /*
437 * Gather trace data for frame queues
438 */
439 if (PREDICT_FALSE (fq->trace))
440 {
441 frame_queue_trace_t *fqt;
442 frame_queue_nelt_counter_t *fqh;
443 u32 elix;
444
445 fqt = &fqm->frame_queue_traces[thread_id];
446
447 fqt->nelts = fq->nelts;
448 fqt->head = fq->head;
Damjan Marioneee099e2021-05-01 14:56:13 +0200449 fqt->tail = fq->tail;
450 fqt->threshold = fq->vector_threshold;
451 fqt->n_in_use = fqt->tail - fqt->head;
452 if (fqt->n_in_use >= fqt->nelts)
453 {
454 // if beyond max then use max
455 fqt->n_in_use = fqt->nelts - 1;
456 }
457
458 /* Record the number of elements in use in the histogram */
459 fqh = &fqm->frame_queue_histogram[thread_id];
460 fqh->count[fqt->n_in_use]++;
461
462 /* Record a snapshot of the elements in use */
463 for (elix = 0; elix < fqt->nelts; elix++)
464 {
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200465 elt = fq->elts + ((fq->head + 1 + elix) & (mask));
Damjan Marioneee099e2021-05-01 14:56:13 +0200466 if (1 || elt->valid)
467 {
468 fqt->n_vectors[elix] = elt->n_vectors;
469 }
470 }
471 fqt->written = 1;
472 }
473
474 while (1)
475 {
Damjan Marioneee099e2021-05-01 14:56:13 +0200476 if (fq->head == fq->tail)
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200477 break;
478
479 elt = fq->elts + ((fq->head + 1) & mask);
480
481 if (!__atomic_load_n (&elt->valid, __ATOMIC_ACQUIRE))
482 break;
483
484 from = elt->buffer_index + elt->offset;
Mohammed Hawarie7149262022-05-18 10:08:47 +0200485 if (with_aux)
486 from_aux = elt->aux_data + elt->offset;
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200487 ASSERT (elt->offset + elt->n_vectors <= VLIB_FRAME_SIZE);
488
489 if (f == 0)
Damjan Marioneee099e2021-05-01 14:56:13 +0200490 {
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200491 f = vlib_get_frame_to_node (vm, fqm->node_index);
492 to = vlib_frame_vector_args (f);
Mohammed Hawarie7149262022-05-18 10:08:47 +0200493 if (with_aux)
494 to_aux = vlib_frame_aux_args (f);
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200495 n_free = VLIB_FRAME_SIZE;
Damjan Marioneee099e2021-05-01 14:56:13 +0200496 }
497
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200498 if (elt->maybe_trace)
Damjan Marioneee099e2021-05-01 14:56:13 +0200499 f->frame_flags |= VLIB_NODE_FLAG_TRACE;
500
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200501 n_copy = clib_min (n_free, elt->n_vectors);
Damjan Marioneee099e2021-05-01 14:56:13 +0200502
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200503 vlib_buffer_copy_indices (to, from, n_copy);
504 to += n_copy;
Mohammed Hawarie7149262022-05-18 10:08:47 +0200505 if (with_aux)
506 {
507 vlib_buffer_copy_indices (to_aux, from_aux, n_copy);
508 to_aux += n_copy;
509 }
510
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200511 n_free -= n_copy;
512 vectors += n_copy;
Damjan Marioneee099e2021-05-01 14:56:13 +0200513
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200514 if (n_free == 0)
Damjan Marioneee099e2021-05-01 14:56:13 +0200515 {
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200516 f->n_vectors = VLIB_FRAME_SIZE;
517 vlib_put_frame_to_node (vm, fqm->node_index, f);
518 f = 0;
Damjan Marioneee099e2021-05-01 14:56:13 +0200519 }
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200520
521 if (n_copy < elt->n_vectors)
522 {
523 /* not empty - leave it on the ring */
524 elt->n_vectors -= n_copy;
525 elt->offset += n_copy;
526 }
527 else
528 {
529 /* empty - reset and bump head */
530 u32 sz = STRUCT_OFFSET_OF (vlib_frame_queue_elt_t, end_of_reset);
531 clib_memset (elt, 0, sz);
532 __atomic_store_n (&fq->head, fq->head + 1, __ATOMIC_RELEASE);
533 processed++;
534 }
535
536 /* Limit the number of packets pushed into the graph */
537 if (vectors >= fq->vector_threshold)
538 break;
Damjan Marioneee099e2021-05-01 14:56:13 +0200539 }
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200540
541 if (f)
542 {
543 f->n_vectors = VLIB_FRAME_SIZE - n_free;
544 vlib_put_frame_to_node (vm, fqm->node_index, f);
545 }
546
Damjan Marioneee099e2021-05-01 14:56:13 +0200547 return processed;
548}
Damjan Marionc0d9ca72021-05-11 09:39:24 +0200549
Mohammed Hawarie7149262022-05-18 10:08:47 +0200550u32 __clib_section (".vlib_frame_queue_dequeue_fn")
551CLIB_MULTIARCH_FN (vlib_frame_queue_dequeue_fn)
552(vlib_main_t *vm, vlib_frame_queue_main_t *fqm)
553{
554 return vlib_frame_queue_dequeue_inline (vm, fqm, 0 /* with_aux */);
555}
556
Damjan Marioneee099e2021-05-01 14:56:13 +0200557CLIB_MARCH_FN_REGISTRATION (vlib_frame_queue_dequeue_fn);
558
Mohammed Hawarie7149262022-05-18 10:08:47 +0200559u32 __clib_section (".vlib_frame_queue_dequeue_with_aux_fn")
560CLIB_MULTIARCH_FN (vlib_frame_queue_dequeue_with_aux_fn)
561(vlib_main_t *vm, vlib_frame_queue_main_t *fqm)
562{
563 return vlib_frame_queue_dequeue_inline (vm, fqm, 1 /* with_aux */);
564}
565
566CLIB_MARCH_FN_REGISTRATION (vlib_frame_queue_dequeue_with_aux_fn);
567
Damjan Marion1c229712021-04-21 12:55:15 +0200568#ifndef CLIB_MARCH_VARIANT
569vlib_buffer_func_main_t vlib_buffer_func_main;
570
571static clib_error_t *
572vlib_buffer_funcs_init (vlib_main_t *vm)
573{
574 vlib_buffer_func_main_t *bfm = &vlib_buffer_func_main;
575 bfm->buffer_enqueue_to_next_fn =
576 CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_next_fn);
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200577 bfm->buffer_enqueue_to_next_with_aux_fn =
578 CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_next_with_aux_fn);
Damjan Marion1c229712021-04-21 12:55:15 +0200579 bfm->buffer_enqueue_to_single_next_fn =
580 CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_single_next_fn);
Mohammed Hawaricd758e62022-05-31 18:11:05 +0200581 bfm->buffer_enqueue_to_single_next_with_aux_fn =
582 CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_single_next_with_aux_fn);
Damjan Marion1c229712021-04-21 12:55:15 +0200583 bfm->buffer_enqueue_to_thread_fn =
584 CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_thread_fn);
Mohammed Hawarie7149262022-05-18 10:08:47 +0200585 bfm->buffer_enqueue_to_thread_with_aux_fn =
586 CLIB_MARCH_FN_POINTER (vlib_buffer_enqueue_to_thread_with_aux_fn);
Damjan Marion1c229712021-04-21 12:55:15 +0200587 return 0;
588}
589
590VLIB_INIT_FUNCTION (vlib_buffer_funcs_init);
591#endif