blob: 9920528732de09600203ab15f6dad5f70aeed842 [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15/*
16 Copyright (c) 2001, 2002, 2003 Eliot Dresselhaus
17
18 Permission is hereby granted, free of charge, to any person obtaining
19 a copy of this software and associated documentation files (the
20 "Software"), to deal in the Software without restriction, including
21 without limitation the rights to use, copy, modify, merge, publish,
22 distribute, sublicense, and/or sell copies of the Software, and to
23 permit persons to whom the Software is furnished to do so, subject to
24 the following conditions:
25
26 The above copyright notice and this permission notice shall be
27 included in all copies or substantial portions of the Software.
28
29 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
30 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
31 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
32 NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
33 LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
34 OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
35 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
36*/
37
Dave Barachc3799992016-08-15 11:12:27 -040038#include <vppinfra/cache.h> /* for CLIB_CACHE_LINE_BYTES */
Ed Warnickecb9cada2015-12-08 15:45:58 -070039#include <vppinfra/mem.h>
40#include <vppinfra/hash.h>
41#include <vppinfra/vec.h>
42#include <vppinfra/heap.h>
43#include <vppinfra/error.h>
44
Dave Barachc3799992016-08-15 11:12:27 -040045always_inline heap_elt_t *
46elt_at (heap_header_t * h, uword i)
Ed Warnickecb9cada2015-12-08 15:45:58 -070047{
48 ASSERT (i < vec_len (h->elts));
49 return h->elts + i;
50}
51
Dave Barachc3799992016-08-15 11:12:27 -040052always_inline heap_elt_t *
53last (heap_header_t * h)
54{
55 return elt_at (h, h->tail);
56}
Ed Warnickecb9cada2015-12-08 15:45:58 -070057
Dave Barachc3799992016-08-15 11:12:27 -040058always_inline heap_elt_t *
59first (heap_header_t * h)
60{
61 return elt_at (h, h->head);
62}
Ed Warnickecb9cada2015-12-08 15:45:58 -070063
64/* Objects sizes are binned into N_BINS bins.
65 Objects with size <= SMALL_BINS have their own bins.
66 Larger objects are grouped together in power or 2 sized
67 bins.
68
69 Sizes are in units of elt_bytes bytes. */
70
71/* Convert size to bin. */
Dave Barachc3799992016-08-15 11:12:27 -040072always_inline uword
73size_to_bin (uword size)
Ed Warnickecb9cada2015-12-08 15:45:58 -070074{
75 uword bin;
76
77 ASSERT (size > 0);
78
79 if (size <= HEAP_SMALL_BINS)
80 {
81 bin = size - 1;
82 if (size == 0)
83 bin = 0;
84 }
85 else
86 {
87 bin = HEAP_SMALL_BINS + max_log2 (size) - (HEAP_LOG2_SMALL_BINS + 1);
88 if (bin >= HEAP_N_BINS)
89 bin = HEAP_N_BINS - 1;
90 }
91
92 return bin;
93}
94
95/* Convert bin to size. */
Dave Barachc3799992016-08-15 11:12:27 -040096always_inline __attribute__ ((unused))
97 uword bin_to_size (uword bin)
Ed Warnickecb9cada2015-12-08 15:45:58 -070098{
99 uword size;
100
101 if (bin <= HEAP_SMALL_BINS - 1)
102 size = bin + 1;
103 else
104 size = (uword) 1 << ((bin - HEAP_SMALL_BINS) + HEAP_LOG2_SMALL_BINS + 1);
105
106 return size;
107}
108
Dave Barachc3799992016-08-15 11:12:27 -0400109static void
110elt_delete (heap_header_t * h, heap_elt_t * e)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700111{
Dave Barachc3799992016-08-15 11:12:27 -0400112 heap_elt_t *l = vec_end (h->elts) - 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700113
114 ASSERT (e >= h->elts && e <= l);
115
116 /* Update doubly linked pointers. */
117 {
Dave Barachc3799992016-08-15 11:12:27 -0400118 heap_elt_t *p = heap_prev (e);
119 heap_elt_t *n = heap_next (e);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700120
121 if (p == e)
122 {
123 n->prev = 0;
124 h->head = n - h->elts;
125 }
126 else if (n == e)
127 {
128 p->next = 0;
129 h->tail = p - h->elts;
130 }
131 else
132 {
133 p->next = n - p;
134 n->prev = p - n;
135 }
136 }
137
138 /* Add to index free list or delete from end. */
139 if (e < l)
140 vec_add1 (h->free_elts, e - h->elts);
141 else
Damjan Marion8bea5892022-04-04 22:40:45 +0200142 vec_dec_len (h->elts, 1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700143}
144
145/*
146 Before: P ... E
147 After : P ... NEW ... E
148*/
Dave Barachc3799992016-08-15 11:12:27 -0400149always_inline void
150elt_insert_before (heap_header_t * h, heap_elt_t * e, heap_elt_t * new)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700151{
Dave Barachc3799992016-08-15 11:12:27 -0400152 heap_elt_t *p = heap_prev (e);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700153
154 if (p == e)
155 {
156 new->prev = 0;
157 new->next = e - new;
158 p->prev = new - p;
159 h->head = new - h->elts;
160 }
161 else
162 {
163 new->prev = p - new;
164 new->next = e - new;
165 e->prev = new - e;
166 p->next = new - p;
167 }
168}
169
170/*
171 Before: E ... N
172 After : E ... NEW ... N
173*/
Dave Barachc3799992016-08-15 11:12:27 -0400174always_inline void
175elt_insert_after (heap_header_t * h, heap_elt_t * e, heap_elt_t * new)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700176{
Dave Barachc3799992016-08-15 11:12:27 -0400177 heap_elt_t *n = heap_next (e);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700178
179 if (n == e)
180 {
181 new->next = 0;
182 new->prev = e - new;
183 e->next = new - e;
184 h->tail = new - h->elts;
185 }
186 else
187 {
188 new->prev = e - new;
189 new->next = n - new;
190 e->next = new - e;
191 n->prev = new - n;
192 }
193}
194
Dave Barachc3799992016-08-15 11:12:27 -0400195always_inline heap_elt_t *
196elt_new (heap_header_t * h)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700197{
Dave Barachc3799992016-08-15 11:12:27 -0400198 heap_elt_t *e;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700199 uword l;
200 if ((l = vec_len (h->free_elts)) > 0)
201 {
Dave Barachc3799992016-08-15 11:12:27 -0400202 e = elt_at (h, h->free_elts[l - 1]);
Damjan Marion8bea5892022-04-04 22:40:45 +0200203 vec_dec_len (h->free_elts, 1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700204 }
205 else
206 vec_add2 (h->elts, e, 1);
207 return e;
208}
209
210/* Return pointer to object at given offset.
211 Used to write free list index of free objects. */
Dave Barachc3799992016-08-15 11:12:27 -0400212always_inline u32 *
213elt_data (void *v, heap_elt_t * e)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700214{
Dave Barachc3799992016-08-15 11:12:27 -0400215 heap_header_t *h = heap_header (v);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700216 return v + heap_offset (e) * h->elt_bytes;
217}
218
219always_inline void
Dave Barachc3799992016-08-15 11:12:27 -0400220set_free_elt (void *v, heap_elt_t * e, uword fi)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700221{
Dave Barachc3799992016-08-15 11:12:27 -0400222 heap_header_t *h = heap_header (v);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700223
224 e->offset |= HEAP_ELT_FREE_BIT;
225 if (h->elt_bytes >= sizeof (u32))
226 {
227 *elt_data (v, e) = fi;
228 }
229 else
230 {
231 /* For elt_bytes < 4 we must store free index in separate
Dave Barachc3799992016-08-15 11:12:27 -0400232 vector. */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700233 uword elt_index = e - h->elts;
234 vec_validate (h->small_free_elt_free_index, elt_index);
235 h->small_free_elt_free_index[elt_index] = fi;
236 }
237}
238
239always_inline uword
Dave Barachc3799992016-08-15 11:12:27 -0400240get_free_elt (void *v, heap_elt_t * e, uword * bin_result)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700241{
Dave Barachc3799992016-08-15 11:12:27 -0400242 heap_header_t *h = heap_header (v);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700243 uword fb, fi;
244
245 ASSERT (heap_is_free (e));
246 fb = size_to_bin (heap_elt_size (v, e));
247
248 if (h->elt_bytes >= sizeof (u32))
249 {
250 fi = *elt_data (v, e);
251 }
252 else
253 {
254 uword elt_index = e - h->elts;
255 fi = vec_elt (h->small_free_elt_free_index, elt_index);
256 }
257
258 *bin_result = fb;
259 return fi;
260}
261
Dave Barachc3799992016-08-15 11:12:27 -0400262always_inline void
263remove_free_block (void *v, uword b, uword i)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700264{
Dave Barachc3799992016-08-15 11:12:27 -0400265 heap_header_t *h = heap_header (v);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700266 uword l;
267
268 ASSERT (b < vec_len (h->free_lists));
269 ASSERT (i < vec_len (h->free_lists[b]));
270
271 l = vec_len (h->free_lists[b]);
272
273 if (i < l - 1)
274 {
275 uword t = h->free_lists[b][l - 1];
276 h->free_lists[b][i] = t;
277 set_free_elt (v, elt_at (h, t), i);
278 }
Damjan Marion8bea5892022-04-04 22:40:45 +0200279 vec_set_len (h->free_lists[b], l - 1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700280}
281
Dave Barachc3799992016-08-15 11:12:27 -0400282static heap_elt_t *
283search_free_list (void *v, uword size)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700284{
Dave Barachc3799992016-08-15 11:12:27 -0400285 heap_header_t *h = heap_header (v);
286 heap_elt_t *f, *u;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700287 uword b, fb, f_size, f_index;
288 word s, l;
289
Dave Barachc3799992016-08-15 11:12:27 -0400290 if (!v)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700291 return 0;
292
293 /* Search free lists for bins >= given size. */
294 for (b = size_to_bin (size); b < vec_len (h->free_lists); b++)
295 if ((l = vec_len (h->free_lists[b])) > 0)
296 {
297 /* Find an object that is large enough.
298 Search list in reverse so that more recently freed objects will be
299 allocated again sooner. */
Steven Luonga5436ae2020-05-08 04:50:05 -0700300 u8 found = 0;
Dave Barachc3799992016-08-15 11:12:27 -0400301 do
302 {
303 l--;
304 f_index = h->free_lists[b][l];
305 f = elt_at (h, f_index);
306 f_size = heap_elt_size (v, f);
307 if ((s = f_size - size) >= 0)
Steven Luonga5436ae2020-05-08 04:50:05 -0700308 {
309 found = 1;
310 break;
311 }
Dave Barachc3799992016-08-15 11:12:27 -0400312 }
Steven Luongec7012e2020-05-07 10:47:33 -0700313 while (l > 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700314
315 /* If we fail to find a large enough object, try the next larger size. */
Steven Luonga5436ae2020-05-08 04:50:05 -0700316 if (found == 0)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700317 continue;
318
319 ASSERT (heap_is_free (f));
320
321 /* Link in used object (u) after free object (f). */
322 if (s == 0)
323 {
324 u = f;
325 fb = HEAP_N_BINS;
326 }
327 else
328 {
329 u = elt_new (h);
330 f = elt_at (h, f_index);
331 elt_insert_after (h, f, u);
332 fb = size_to_bin (s);
333 }
334
335 u->offset = heap_offset (f) + s;
336
337 if (fb != b)
338 {
339 if (fb < HEAP_N_BINS)
340 {
341 uword i;
342 vec_validate (h->free_lists, fb);
343 i = vec_len (h->free_lists[fb]);
344 vec_add1 (h->free_lists[fb], f - h->elts);
345 set_free_elt (v, f, i);
346 }
347
348 remove_free_block (v, b, l);
349 }
350
351 return u;
352 }
353
354 return 0;
355}
356
Dave Barachc3799992016-08-15 11:12:27 -0400357static void combine_free_blocks (void *v, heap_elt_t * e0, heap_elt_t * e1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700358
Dave Barachc3799992016-08-15 11:12:27 -0400359static inline void
360dealloc_elt (void *v, heap_elt_t * e)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700361{
Dave Barachc3799992016-08-15 11:12:27 -0400362 heap_header_t *h = heap_header (v);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700363 uword b, l;
Dave Barachc3799992016-08-15 11:12:27 -0400364 heap_elt_t *n, *p;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700365
366 b = size_to_bin (heap_elt_size (v, e));
367 vec_validate (h->free_lists, b);
368 l = vec_len (h->free_lists[b]);
369 vec_add1 (h->free_lists[b], e - h->elts);
370 set_free_elt (v, e, l);
371
372 /* See if we can combine the block we just freed with neighboring free blocks. */
373 p = heap_prev (e);
Dave Barachc3799992016-08-15 11:12:27 -0400374 if (!heap_is_free (p))
Ed Warnickecb9cada2015-12-08 15:45:58 -0700375 p = e;
376
377 n = heap_next (e);
Dave Barachc3799992016-08-15 11:12:27 -0400378 if (!heap_is_free (n))
Ed Warnickecb9cada2015-12-08 15:45:58 -0700379 n = e;
380
381 if (p != n)
382 combine_free_blocks (v, p, n);
383}
384
Damjan Mariondae1c7e2020-10-17 13:32:25 +0200385__clib_export void *
Dave Barachc3799992016-08-15 11:12:27 -0400386_heap_alloc (void *v,
387 uword size,
388 uword align,
389 uword elt_bytes, uword * offset_return, uword * handle_return)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700390{
391 uword offset = 0, align_size;
Dave Barachc3799992016-08-15 11:12:27 -0400392 heap_header_t *h;
393 heap_elt_t *e;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700394
395 if (size == 0)
396 goto error;
397
398 /* Round up alignment to power of 2. */
399 if (align <= 1)
400 {
401 align = 0;
402 align_size = size;
403 }
404 else
405 {
406 align = max_pow2 (align);
407 align_size = size + align - 1;
408 }
409
410 e = search_free_list (v, align_size);
411
412 /* If nothing found on free list, allocate object from end of vector. */
Dave Barachc3799992016-08-15 11:12:27 -0400413 if (!e)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700414 {
415 uword max_len;
Damjan Marione4fa1d22022-04-11 18:41:49 +0200416 vec_attr_t va = { .elt_sz = elt_bytes,
417 .hdr_sz = sizeof (h[0]),
418 .align = HEAP_DATA_ALIGN };
Ed Warnickecb9cada2015-12-08 15:45:58 -0700419
420 offset = vec_len (v);
421 max_len = heap_get_max_len (v);
422
423 if (max_len && offset + align_size > max_len)
424 goto error;
425
426 h = heap_header (v);
Dave Barachc3799992016-08-15 11:12:27 -0400427 if (!v || !(h->flags & HEAP_IS_STATIC))
Damjan Marione4fa1d22022-04-11 18:41:49 +0200428 v = _vec_realloc_internal (v, offset + align_size, &va);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700429 else
Damjan Marion8bea5892022-04-04 22:40:45 +0200430 vec_inc_len (v, align_size);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700431
432 if (offset == 0)
433 {
434 h = heap_header (v);
435 h->elt_bytes = elt_bytes;
436 }
437 }
438
439 h = heap_header (v);
440
441 /* Add new element to doubly linked chain of elements. */
Dave Barachc3799992016-08-15 11:12:27 -0400442 if (!e)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700443 {
444 e = elt_new (h);
445 e->offset = offset;
446 elt_insert_after (h, last (h), e);
447 }
448
449 if (align > 0)
450 {
451 uword e_index;
452 uword new_offset, old_offset;
453
454 old_offset = e->offset;
Dave Barachc3799992016-08-15 11:12:27 -0400455 new_offset = (old_offset + align - 1) & ~(align - 1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700456 e->offset = new_offset;
457 e_index = e - h->elts;
458
459 /* Free fragments before and after aligned object. */
460 if (new_offset > old_offset)
461 {
Dave Barachc3799992016-08-15 11:12:27 -0400462 heap_elt_t *before_e = elt_new (h);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700463 before_e->offset = old_offset;
464 elt_insert_before (h, h->elts + e_index, before_e);
465 dealloc_elt (v, before_e);
466 }
467
468 if (new_offset + size < old_offset + align_size)
469 {
Dave Barachc3799992016-08-15 11:12:27 -0400470 heap_elt_t *after_e = elt_new (h);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700471 after_e->offset = new_offset + size;
472 elt_insert_after (h, h->elts + e_index, after_e);
473 dealloc_elt (v, after_e);
474 }
Dave Barachc3799992016-08-15 11:12:27 -0400475
Ed Warnickecb9cada2015-12-08 15:45:58 -0700476 e = h->elts + e_index;
477 }
478
479 h->used_count++;
480
481 /* Keep track of used elements when debugging.
482 This allows deallocation to check that passed objects are valid. */
483 if (CLIB_DEBUG > 0)
484 {
485 uword handle = e - h->elts;
Dave Barachc3799992016-08-15 11:12:27 -0400486 ASSERT (!clib_bitmap_get (h->used_elt_bitmap, handle));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700487 h->used_elt_bitmap = clib_bitmap_ori (h->used_elt_bitmap, handle);
488 }
489
490 *offset_return = e->offset;
Dave Barachc3799992016-08-15 11:12:27 -0400491 *handle_return = e - h->elts;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700492 return v;
493
Dave Barachc3799992016-08-15 11:12:27 -0400494error:
Ed Warnickecb9cada2015-12-08 15:45:58 -0700495 *offset_return = *handle_return = ~0;
496 return v;
497}
498
Damjan Mariondae1c7e2020-10-17 13:32:25 +0200499__clib_export void
Dave Barachc3799992016-08-15 11:12:27 -0400500heap_dealloc (void *v, uword handle)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700501{
Dave Barachc3799992016-08-15 11:12:27 -0400502 heap_header_t *h = heap_header (v);
503 heap_elt_t *e;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700504
505 ASSERT (handle < vec_len (h->elts));
506
507 /* For debugging we keep track of indices for valid objects.
508 We make sure user is not trying to free object with an invalid index. */
509 if (CLIB_DEBUG > 0)
510 {
511 ASSERT (clib_bitmap_get (h->used_elt_bitmap, handle));
512 h->used_elt_bitmap = clib_bitmap_andnoti (h->used_elt_bitmap, handle);
513 }
514
515 h->used_count--;
516
517 e = h->elts + handle;
Dave Barachc3799992016-08-15 11:12:27 -0400518 ASSERT (!heap_is_free (e));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700519
520 dealloc_elt (v, e);
521}
522
523/* While freeing objects at INDEX we noticed free blocks i0 <= index and
524 i1 >= index. We combine these two or three blocks into one big free block. */
Dave Barachc3799992016-08-15 11:12:27 -0400525static void
526combine_free_blocks (void *v, heap_elt_t * e0, heap_elt_t * e1)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700527{
Dave Barachc3799992016-08-15 11:12:27 -0400528 heap_header_t *h = heap_header (v);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700529 uword total_size, i, b, tb, ti, i_last, g_offset;
Dave Barachc3799992016-08-15 11:12:27 -0400530 heap_elt_t *e;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700531
Dave Barachc3799992016-08-15 11:12:27 -0400532 struct
533 {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700534 u32 index;
535 u32 bin;
536 u32 bin_index;
537 } f[3], g;
538
539 /* Compute total size of free objects i0 through i1. */
540 total_size = 0;
541 for (i = 0, e = e0; 1; e = heap_next (e), i++)
542 {
543 ASSERT (i < ARRAY_LEN (f));
544
545 ti = get_free_elt (v, e, &tb);
546
547 ASSERT (tb < vec_len (h->free_lists));
548 ASSERT (ti < vec_len (h->free_lists[tb]));
549
550 f[i].index = h->free_lists[tb][ti];
551 f[i].bin = tb;
552 f[i].bin_index = ti;
553
554 total_size += heap_elt_size (v, elt_at (h, f[i].index));
555
556 if (e == e1)
557 {
558 i_last = i;
559 break;
560 }
561 }
562
563 /* Compute combined bin. See if all objects can be
564 combined into existing bin. */
565 b = size_to_bin (total_size);
566 g.index = g.bin_index = 0;
567 for (i = 0; i <= i_last; i++)
568 if (b == f[i].bin)
569 {
570 g = f[i];
571 break;
572 }
573
574 /* Make sure we found a bin. */
575 if (i > i_last)
576 {
577 g.index = elt_new (h) - h->elts;
578 vec_validate (h->free_lists, b);
579 g.bin_index = vec_len (h->free_lists[b]);
580 vec_add1 (h->free_lists[b], g.index);
581 elt_insert_before (h, elt_at (h, f[0].index), elt_at (h, g.index));
582 }
583
584 g_offset = elt_at (h, f[0].index)->offset;
585
586 /* Delete unused bins. */
587 for (i = 0; i <= i_last; i++)
588 if (g.index != f[i].index)
589 {
590 ti = get_free_elt (v, elt_at (h, f[i].index), &tb);
591 remove_free_block (v, tb, ti);
592 elt_delete (h, elt_at (h, f[i].index));
593 }
594
595 /* Initialize new element. */
596 elt_at (h, g.index)->offset = g_offset;
597 set_free_elt (v, elt_at (h, g.index), g.bin_index);
598}
599
Damjan Marion4a251d02021-05-06 17:28:12 +0200600__clib_export uword
Dave Barachc3799992016-08-15 11:12:27 -0400601heap_len (void *v, word handle)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700602{
Dave Barachc3799992016-08-15 11:12:27 -0400603 heap_header_t *h = heap_header (v);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700604
605 if (CLIB_DEBUG > 0)
606 ASSERT (clib_bitmap_get (h->used_elt_bitmap, handle));
607 return heap_elt_size (v, elt_at (h, handle));
608}
609
Damjan Mariondae1c7e2020-10-17 13:32:25 +0200610__clib_export void *
Dave Barachc3799992016-08-15 11:12:27 -0400611_heap_free (void *v)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700612{
Dave Barachc3799992016-08-15 11:12:27 -0400613 heap_header_t *h = heap_header (v);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700614 uword b;
615
Dave Barachc3799992016-08-15 11:12:27 -0400616 if (!v)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700617 return v;
618
619 clib_bitmap_free (h->used_elt_bitmap);
620 for (b = 0; b < vec_len (h->free_lists); b++)
621 vec_free (h->free_lists[b]);
622 vec_free (h->free_lists);
623 vec_free (h->elts);
624 vec_free (h->free_elts);
625 vec_free (h->small_free_elt_free_index);
Dave Barachc3799992016-08-15 11:12:27 -0400626 if (!(h->flags & HEAP_IS_STATIC))
Damjan Marion05563c92022-03-17 18:29:32 +0100627 vec_free (v);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700628 return v;
629}
630
Dave Barachc3799992016-08-15 11:12:27 -0400631uword
632heap_bytes (void *v)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700633{
Dave Barachc3799992016-08-15 11:12:27 -0400634 heap_header_t *h = heap_header (v);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700635 uword bytes, b;
636
Dave Barachc3799992016-08-15 11:12:27 -0400637 if (!v)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700638 return 0;
639
640 bytes = sizeof (h[0]);
641 bytes += vec_len (v) * sizeof (h->elt_bytes);
642 for (b = 0; b < vec_len (h->free_lists); b++)
Damjan Marion5c45d1c2022-03-17 15:32:56 +0100643 bytes += vec_mem_size (h->free_lists[b]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700644 bytes += vec_bytes (h->free_lists);
Damjan Marion5c45d1c2022-03-17 15:32:56 +0100645 bytes += vec_mem_size (h->elts);
646 bytes += vec_mem_size (h->free_elts);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700647 bytes += vec_bytes (h->used_elt_bitmap);
648
649 return bytes;
650}
651
Dave Barachc3799992016-08-15 11:12:27 -0400652static u8 *
653debug_elt (u8 * s, void *v, word i, word n)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700654{
Dave Barachc3799992016-08-15 11:12:27 -0400655 heap_elt_t *e, *e0, *e1;
656 heap_header_t *h = heap_header (v);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700657 word j;
658
659 if (vec_len (h->elts) == 0)
660 return s;
661
662 if (i < 0)
663 e0 = first (h);
664 else
665 {
666 e0 = h->elts + i;
Dave Barachc3799992016-08-15 11:12:27 -0400667 for (j = 0; j < n / 2; j++)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700668 e0 = heap_prev (e0);
669 }
670
671 if (n < 0)
672 e1 = h->elts + h->tail;
673 else
674 {
675 e1 = h->elts + i;
Dave Barachc3799992016-08-15 11:12:27 -0400676 for (j = 0; j < n / 2; j++)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700677 e1 = heap_next (e1);
678 }
679
Dave Barachc3799992016-08-15 11:12:27 -0400680 i = -n / 2;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700681 for (e = e0; 1; e = heap_next (e))
682 {
Vladislav Grishenko70328922023-12-14 17:48:28 +0100683 s = format (s, " ");
Ed Warnickecb9cada2015-12-08 15:45:58 -0700684 if (heap_is_free (e))
685 s = format (s, "index %4d, free\n", e - h->elts);
686 else if (h->format_elt)
687 s = format (s, "%U", h->format_elt, v, elt_data (v, e));
688 else
689 s = format (s, "index %4d, used\n", e - h->elts);
690 i++;
691 if (e == e1)
692 break;
693 }
694
695 return s;
696}
697
Damjan Marion4a251d02021-05-06 17:28:12 +0200698__clib_export u8 *
699format_heap (u8 *s, va_list *va)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700700{
Dave Barachc3799992016-08-15 11:12:27 -0400701 void *v = va_arg (*va, void *);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700702 uword verbose = va_arg (*va, uword);
Dave Barachc3799992016-08-15 11:12:27 -0400703 heap_header_t *h = heap_header (v);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700704 heap_header_t zero;
705
Dave Barachb7b92992018-10-17 10:38:51 -0400706 clib_memset (&zero, 0, sizeof (zero));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700707
Dave Barachc3799992016-08-15 11:12:27 -0400708 if (!v)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700709 h = &zero;
710
711 {
712 f64 elt_bytes = vec_len (v) * h->elt_bytes;
713 f64 overhead_bytes = heap_bytes (v);
Dave Barachc3799992016-08-15 11:12:27 -0400714
Ed Warnickecb9cada2015-12-08 15:45:58 -0700715 s = format (s, "heap %p, %6d objects, size %.1fk + overhead %.1fk\n",
Dave Barachc3799992016-08-15 11:12:27 -0400716 v, h->used_count, elt_bytes / 1024,
717 (overhead_bytes - elt_bytes) / 1024);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700718 }
719
720 if (v && verbose)
721 s = debug_elt (s, v, -1, -1);
722
723 return s;
724}
725
Damjan Marion4a251d02021-05-06 17:28:12 +0200726__clib_export void
Dave Barachc3799992016-08-15 11:12:27 -0400727heap_validate (void *v)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700728{
Dave Barachc3799992016-08-15 11:12:27 -0400729 heap_header_t *h = heap_header (v);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700730 uword i, o, s;
Dave Barachc3799992016-08-15 11:12:27 -0400731 u8 *free_map;
732 heap_elt_t *e, *n;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700733
734 uword used_count, total_size;
735 uword free_count, free_size;
736
737 ASSERT (h->used_count == clib_bitmap_count_set_bits (h->used_elt_bitmap));
738
Dave Barachc3799992016-08-15 11:12:27 -0400739 ASSERT (first (h)->prev == 0);
740 ASSERT (last (h)->next == 0);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700741
742 /* Validate number of elements and size. */
743 free_size = free_count = 0;
744 for (i = 0; i < vec_len (h->free_lists); i++)
745 {
746 free_count += vec_len (h->free_lists[i]);
747 for (o = 0; o < vec_len (h->free_lists[i]); o++)
748 {
749 e = h->elts + h->free_lists[i][o];
750 s = heap_elt_size (v, e);
751 ASSERT (size_to_bin (s) == i);
752 ASSERT (heap_is_free (e));
753 free_size += s;
754 }
755 }
756
757 {
758 uword elt_free_size, elt_free_count;
759
760 used_count = total_size = elt_free_size = elt_free_count = 0;
761 for (e = first (h); 1; e = n)
762 {
763 int is_free = heap_is_free (e);
764 used_count++;
765 s = heap_elt_size (v, e);
766 total_size += s;
Dave Barachc3799992016-08-15 11:12:27 -0400767 ASSERT (is_free ==
768 !clib_bitmap_get (h->used_elt_bitmap, e - h->elts));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700769 if (is_free)
770 {
771 elt_free_count++;
772 elt_free_size += s;
773 }
774 n = heap_next (e);
775 if (e == n)
776 {
777 ASSERT (last (h) == n);
778 break;
779 }
780
781 /* We should never have two free adjacent elements. */
Dave Barachc3799992016-08-15 11:12:27 -0400782 ASSERT (!(heap_is_free (e) && heap_is_free (n)));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700783 }
784
785 ASSERT (free_count == elt_free_count);
786 ASSERT (free_size == elt_free_size);
787 ASSERT (used_count == h->used_count + free_count);
788 ASSERT (total_size == vec_len (v));
789 }
790
791 free_map = vec_new (u8, used_count);
792
793 e = first (h);
794 for (i = o = 0; 1; i++)
795 {
796 ASSERT (heap_offset (e) == o);
797 s = heap_elt_size (v, e);
798
799 if (heap_is_free (e))
800 {
801 uword fb, fi;
802
803 fi = get_free_elt (v, e, &fb);
804
805 ASSERT (fb < vec_len (h->free_lists));
806 ASSERT (fi < vec_len (h->free_lists[fb]));
807 ASSERT (h->free_lists[fb][fi] == e - h->elts);
808
Dave Barachc3799992016-08-15 11:12:27 -0400809 ASSERT (!free_map[i]);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700810 free_map[i] = 1;
811 }
812
813 n = heap_next (e);
814
815 if (e == n)
816 break;
817
818 ASSERT (heap_prev (n) == e);
819
820 o += s;
821 e = n;
822 }
823
824 vec_free (free_map);
825}
Dave Barachc3799992016-08-15 11:12:27 -0400826
827/*
828 * fd.io coding-style-patch-verification: ON
829 *
830 * Local Variables:
831 * eval: (c-set-style "gnu")
832 * End:
833 */