blob: dbaadad2dd50c4e3eaa1bda38c8874ed33fba2e3 [file] [log] [blame]
Damjan Marion299571a2022-03-19 00:07:52 +01001/* SPDX-License-Identifier: Apache-2.0
2 * Copyright(c) 2022 Cisco Systems, Inc.
Ed Warnickecb9cada2015-12-08 15:45:58 -07003 */
Ed Warnickecb9cada2015-12-08 15:45:58 -07004
5#include <vppinfra/vec.h>
6#include <vppinfra/mem.h>
7
Damjan Marionedca8c62021-04-28 17:30:51 +02008#ifndef CLIB_VECTOR_GROW_BY_ONE
9#define CLIB_VECTOR_GROW_BY_ONE 0
10#endif
11
Damjan Marion299571a2022-03-19 00:07:52 +010012__clib_export uword
13vec_mem_size (void *v)
Ed Warnickecb9cada2015-12-08 15:45:58 -070014{
Damjan Marion299571a2022-03-19 00:07:52 +010015 return v ? clib_mem_size (v - vec_get_header_size (v)) : 0;
16}
Ed Warnickecb9cada2015-12-08 15:45:58 -070017
Damjan Marion299571a2022-03-19 00:07:52 +010018__clib_export void *
Damjan Marione4fa1d22022-04-11 18:41:49 +020019_vec_alloc_internal (uword n_elts, const vec_attr_t *const attr)
Damjan Marion299571a2022-03-19 00:07:52 +010020{
Damjan Marione4fa1d22022-04-11 18:41:49 +020021 uword req_size, alloc_size, data_offset, align;
22 uword elt_sz = attr->elt_sz;
23 void *p, *v, *heap = attr->heap;
Ed Warnickecb9cada2015-12-08 15:45:58 -070024
Damjan Marion1da361f2022-03-16 17:57:29 +010025 /* alignment must be power of 2 */
Damjan Marione4fa1d22022-04-11 18:41:49 +020026 align = clib_max (attr->align, VEC_MIN_ALIGN);
Damjan Marion299571a2022-03-19 00:07:52 +010027 ASSERT (count_set_bits (align) == 1);
Damjan Marion1da361f2022-03-16 17:57:29 +010028
Damjan Marione4fa1d22022-04-11 18:41:49 +020029 /* calc offset where vector data starts */
30 data_offset = attr->hdr_sz + sizeof (vec_header_t);
31 data_offset += heap ? sizeof (void *) : 0;
32 data_offset = round_pow2 (data_offset, align);
Damjan Marion299571a2022-03-19 00:07:52 +010033
Damjan Marione4fa1d22022-04-11 18:41:49 +020034 req_size = data_offset + n_elts * elt_sz;
35 p = clib_mem_heap_alloc_aligned (heap, req_size, align);
36
37 /* zero out whole alocation */
38 alloc_size = clib_mem_size (p);
39 clib_mem_unpoison (p, alloc_size);
40 clib_memset_u8 (p, 0, alloc_size);
41
42 /* fill vector header */
43 v = p + data_offset;
44 _vec_find (v)->len = n_elts;
45 _vec_find (v)->hdr_size = data_offset / VEC_MIN_ALIGN;
46 _vec_find (v)->log2_align = min_log2 (align);
47 if (heap)
Ed Warnickecb9cada2015-12-08 15:45:58 -070048 {
Damjan Marione4fa1d22022-04-11 18:41:49 +020049 _vec_find (v)->default_heap = 0;
50 _vec_heap (v) = heap;
Damjan Marion299571a2022-03-19 00:07:52 +010051 }
52 else
Damjan Marione4fa1d22022-04-11 18:41:49 +020053 _vec_find (v)->default_heap = 1;
Damjan Marion24738582022-03-31 15:12:20 +020054
Damjan Marione4fa1d22022-04-11 18:41:49 +020055 /* poison extra space given by allocator */
56 clib_mem_poison (p + req_size, alloc_size - req_size);
57 _vec_set_grow_elts (v, (alloc_size - req_size) / elt_sz);
58 return v;
59}
60
61static inline void
62_vec_update_len (void *v, uword n_elts, uword elt_sz, uword n_data_bytes,
63 uword unused_bytes)
64{
65 _vec_find (v)->len = n_elts;
66 _vec_set_grow_elts (v, unused_bytes / elt_sz);
67 clib_mem_unpoison (v, n_data_bytes);
68 clib_mem_poison (v + n_data_bytes, unused_bytes);
69}
70
71__clib_export void *
72_vec_realloc_internal (void *v, uword n_elts, const vec_attr_t *const attr)
73{
74 uword old_alloc_sz, new_alloc_sz, new_data_size, n_data_bytes, data_offset;
75 uword elt_sz;
76
77 if (PREDICT_FALSE (v == 0))
78 return _vec_alloc_internal (n_elts, attr);
79
80 elt_sz = attr->elt_sz;
81 n_data_bytes = n_elts * elt_sz;
82 data_offset = vec_get_header_size (v);
83 new_data_size = data_offset + n_data_bytes;
84 new_alloc_sz = old_alloc_sz = clib_mem_size (vec_header (v));
85
86 /* realloc if new size cannot fit into existing allocation */
87 if (old_alloc_sz < new_data_size)
88 {
89 uword n_bytes, req_size = new_data_size;
90 void *p = v - data_offset;
91
92 req_size += CLIB_VECTOR_GROW_BY_ONE ? 0 : n_data_bytes / 2;
93
94 p = clib_mem_heap_realloc_aligned (vec_get_heap (v), p, req_size,
95 vec_get_align (v));
96 new_alloc_sz = clib_mem_size (p);
Damjan Marion299571a2022-03-19 00:07:52 +010097 v = p + data_offset;
Damjan Marione4fa1d22022-04-11 18:41:49 +020098
99 /* zero out new allocation */
100 n_bytes = new_alloc_sz - old_alloc_sz;
101 clib_mem_unpoison (p + old_alloc_sz, n_bytes);
102 clib_memset_u8 (p + old_alloc_sz, 0, n_bytes);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700103 }
104
Damjan Marione4fa1d22022-04-11 18:41:49 +0200105 _vec_update_len (v, n_elts, elt_sz, n_data_bytes,
106 new_alloc_sz - new_data_size);
Damjan Marion299571a2022-03-19 00:07:52 +0100107 return v;
Dave Barachc3799992016-08-15 11:12:27 -0400108}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700109
Damjan Marione4fa1d22022-04-11 18:41:49 +0200110__clib_export void *
111_vec_resize_internal (void *v, uword n_elts, const vec_attr_t *const attr)
112{
113 uword elt_sz = attr->elt_sz;
114 if (PREDICT_TRUE (v != 0))
115 {
116 uword hs = _vec_find (v)->hdr_size * VEC_MIN_ALIGN;
117 uword alloc_sz = clib_mem_size (v - hs);
118 uword n_data_bytes = elt_sz * n_elts;
119 word unused_bytes = alloc_sz - (n_data_bytes + hs);
120
121 if (PREDICT_TRUE (unused_bytes >= 0))
122 {
123 _vec_update_len (v, n_elts, elt_sz, n_data_bytes, unused_bytes);
124 return v;
125 }
126 }
127
128 /* this shouled emit tail jump and likely avoid stack usasge inside this
129 * function */
130 return _vec_realloc_internal (v, n_elts, attr);
131}
132
Dave Barach07619242020-10-18 06:54:31 -0400133__clib_export u32
Dave Barache09ae012020-08-19 06:59:53 -0400134vec_len_not_inline (void *v)
135{
136 return vec_len (v);
137}
138
Dave Barach07619242020-10-18 06:54:31 -0400139__clib_export void
Dave Barache09ae012020-08-19 06:59:53 -0400140vec_free_not_inline (void *v)
141{
142 vec_free (v);
143}