blob: 4dd6a168217fe6bbca69b5a5b34f29b2321caf63 [file] [log] [blame]
Damjan Marion0da81682020-12-22 14:58:56 +01001/*
2 * Copyright (c) 2020 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include <vppinfra/clib.h>
17#include <vppinfra/mem.h>
18#include <vppinfra/time.h>
19#include <vppinfra/format.h>
20#include <vppinfra/clib_error.h>
21
22/* while usage of dlmalloc APIs is genrally discouraged, in this particular
23 * case there is significant benefit of calling them directly due to
24 * smaller memory consuption (no wwp and headroom space) */
25#include <vppinfra/dlmalloc.h>
26
27#define CLIB_MEM_BULK_DEFAULT_MIN_ELTS_PER_CHUNK 32
28
29typedef struct clib_mem_bulk_chunk_hdr
30{
31 u32 freelist;
32 u32 n_free;
33 struct clib_mem_bulk_chunk_hdr *prev, *next;
34} clib_mem_bulk_chunk_hdr_t;
35
36typedef struct
37{
38 u32 elt_sz;
39 u32 chunk_hdr_sz;
40 u32 elts_per_chunk;
41 u32 align;
42 u32 chunk_align;
43 void *mspace;
44 clib_mem_bulk_chunk_hdr_t *full_chunks, *avail_chunks;
45} clib_mem_bulk_t;
46
Florin Corasee5cd4e2021-02-11 08:44:23 -080047static inline uword
48bulk_chunk_size (clib_mem_bulk_t *b)
49{
50 return (uword) b->elts_per_chunk * b->elt_sz + b->chunk_hdr_sz;
51}
52
Damjan Marion0da81682020-12-22 14:58:56 +010053__clib_export clib_mem_bulk_handle_t
54clib_mem_bulk_init (u32 elt_sz, u32 align, u32 min_elts_per_chunk)
55{
56 clib_mem_heap_t *heap = clib_mem_get_heap ();
57 clib_mem_bulk_t *b;
58 uword sz;
59
60 if ((b = mspace_memalign (heap->mspace, 16, sizeof (clib_mem_bulk_t))) == 0)
61 return 0;
62
63 if (align < 16)
64 align = 16;
65
66 if (min_elts_per_chunk == 0)
67 min_elts_per_chunk = CLIB_MEM_BULK_DEFAULT_MIN_ELTS_PER_CHUNK;
68
Damjan Marion79934e82022-04-05 12:40:31 +020069 clib_mem_unpoison (b, sizeof (clib_mem_bulk_t));
Damjan Marion0da81682020-12-22 14:58:56 +010070 clib_memset (b, 0, sizeof (clib_mem_bulk_t));
71 b->mspace = heap->mspace;
72 b->align = align;
73 b->elt_sz = round_pow2 (elt_sz, align);
74 b->chunk_hdr_sz = round_pow2 (sizeof (clib_mem_bulk_chunk_hdr_t), align);
75 b->elts_per_chunk = min_elts_per_chunk;
Florin Corasee5cd4e2021-02-11 08:44:23 -080076 sz = bulk_chunk_size (b);
Damjan Marion0da81682020-12-22 14:58:56 +010077 b->chunk_align = max_pow2 (sz);
78 b->elts_per_chunk += (b->chunk_align - sz) / b->elt_sz;
79 return b;
80}
81
82__clib_export void
83clib_mem_bulk_destroy (clib_mem_bulk_handle_t h)
84{
85 clib_mem_bulk_t *b = h;
86 clib_mem_bulk_chunk_hdr_t *c, *next;
Florin Corasee5cd4e2021-02-11 08:44:23 -080087 void *ms = b->mspace;
Damjan Marion0da81682020-12-22 14:58:56 +010088
89 c = b->full_chunks;
90
91again:
92 while (c)
93 {
94 next = c->next;
Damjan Marion79934e82022-04-05 12:40:31 +020095 clib_mem_poison (c, bulk_chunk_size (b));
Florin Corasee5cd4e2021-02-11 08:44:23 -080096 mspace_free (ms, c);
Damjan Marion0da81682020-12-22 14:58:56 +010097 c = next;
98 }
99
100 if (b->avail_chunks)
101 {
102 c = b->avail_chunks;
103 b->avail_chunks = 0;
104 goto again;
105 }
106
Damjan Marion79934e82022-04-05 12:40:31 +0200107 clib_mem_poison (b, sizeof (clib_mem_bulk_t));
Florin Corasee5cd4e2021-02-11 08:44:23 -0800108 mspace_free (ms, b);
Damjan Marion0da81682020-12-22 14:58:56 +0100109}
110
111static inline void *
112get_chunk_elt_ptr (clib_mem_bulk_t *b, clib_mem_bulk_chunk_hdr_t *c, u32 index)
113{
114 return (u8 *) c + b->chunk_hdr_sz + index * b->elt_sz;
115}
116
117static inline void
118add_to_chunk_list (clib_mem_bulk_chunk_hdr_t **first,
119 clib_mem_bulk_chunk_hdr_t *c)
120{
121 c->next = *first;
122 c->prev = 0;
123 if (c->next)
124 c->next->prev = c;
125 *first = c;
126}
127
128static inline void
129remove_from_chunk_list (clib_mem_bulk_chunk_hdr_t **first,
130 clib_mem_bulk_chunk_hdr_t *c)
131{
132 if (c->next)
133 c->next->prev = c->prev;
134 if (c->prev)
135 c->prev->next = c->next;
136 else
137 *first = c->next;
138}
139
140__clib_export void *
141clib_mem_bulk_alloc (clib_mem_bulk_handle_t h)
142{
143 clib_mem_bulk_t *b = h;
144 clib_mem_bulk_chunk_hdr_t *c = b->avail_chunks;
145 u32 elt_idx;
146
147 if (b->avail_chunks == 0)
148 {
Florin Corasee5cd4e2021-02-11 08:44:23 -0800149 u32 i, sz = bulk_chunk_size (b);
Damjan Marion0da81682020-12-22 14:58:56 +0100150 c = mspace_memalign (b->mspace, b->chunk_align, sz);
Damjan Marion79934e82022-04-05 12:40:31 +0200151 clib_mem_unpoison (c, sz);
Damjan Marion0da81682020-12-22 14:58:56 +0100152 clib_memset (c, 0, sizeof (clib_mem_bulk_chunk_hdr_t));
153 b->avail_chunks = c;
154 c->n_free = b->elts_per_chunk;
155
156 /* populate freelist */
157 for (i = 0; i < b->elts_per_chunk - 1; i++)
158 *((u32 *) get_chunk_elt_ptr (b, c, i)) = i + 1;
159 *((u32 *) get_chunk_elt_ptr (b, c, i)) = ~0;
160 }
161
162 ASSERT (c->freelist != ~0);
163 elt_idx = c->freelist;
164 c->freelist = *((u32 *) get_chunk_elt_ptr (b, c, elt_idx));
165 c->n_free--;
166
167 if (c->n_free == 0)
168 {
169 /* chunk is full */
170 ASSERT (c->freelist == ~0);
171 remove_from_chunk_list (&b->avail_chunks, c);
172 add_to_chunk_list (&b->full_chunks, c);
173 }
174
175 return get_chunk_elt_ptr (b, c, elt_idx);
176}
177
178__clib_export void
179clib_mem_bulk_free (clib_mem_bulk_handle_t h, void *p)
180{
181 clib_mem_bulk_t *b = h;
182 uword offset = (uword) p & (b->chunk_align - 1);
183 clib_mem_bulk_chunk_hdr_t *c = (void *) ((u8 *) p - offset);
184 u32 elt_idx = (offset - b->chunk_hdr_sz) / b->elt_sz;
185
186 ASSERT (elt_idx < b->elts_per_chunk);
Florin Coras7b0fa552020-12-23 10:18:16 -0800187 ASSERT (get_chunk_elt_ptr (b, c, elt_idx) == p);
Damjan Marion0da81682020-12-22 14:58:56 +0100188
189 c->n_free++;
190
191 if (c->n_free == b->elts_per_chunk)
192 {
193 /* chunk is empty - give it back */
194 remove_from_chunk_list (&b->avail_chunks, c);
Damjan Marion79934e82022-04-05 12:40:31 +0200195 clib_mem_poison (c, bulk_chunk_size (b));
Damjan Marion0da81682020-12-22 14:58:56 +0100196 mspace_free (b->mspace, c);
197 return;
198 }
199
200 if (c->n_free == 1)
201 {
202 /* move chunk to avail chunks */
203 remove_from_chunk_list (&b->full_chunks, c);
204 add_to_chunk_list (&b->avail_chunks, c);
205 }
206
207 /* add elt to freelist */
208 *(u32 *) p = c->freelist;
209 c->freelist = elt_idx;
210}
211
212__clib_export u8 *
213format_clib_mem_bulk (u8 *s, va_list *args)
214{
215 clib_mem_bulk_t *b = va_arg (*args, clib_mem_bulk_handle_t);
216 clib_mem_bulk_chunk_hdr_t *c;
217 uword n_chunks = 0, n_free_elts = 0, n_elts, chunk_sz;
218
219 c = b->full_chunks;
220 while (c)
221 {
222 n_chunks++;
223 c = c->next;
224 }
225
226 c = b->avail_chunks;
227 while (c)
228 {
229 n_chunks++;
230 n_free_elts += c->n_free;
231 c = c->next;
232 }
233
234 n_elts = n_chunks * b->elts_per_chunk;
Florin Corasc25882c2021-02-09 10:03:50 -0800235 chunk_sz = b->chunk_hdr_sz + (uword) b->elts_per_chunk * b->elt_sz;
Damjan Marion0da81682020-12-22 14:58:56 +0100236
237 s = format (s, "%u bytes/elt, align %u, chunk-align %u, ", b->elt_sz,
238 b->align, b->chunk_align);
239 s = format (s, "%u elts-per-chunk, chunk size %lu bytes", b->elts_per_chunk,
240 chunk_sz);
241
242 if (n_chunks == 0)
243 return format (s, "\nempty");
244
245 s = format (s, "\n%lu chunks allocated, ", n_chunks);
246 s = format (s, "%lu / %lu free elts (%.1f%%), ", n_free_elts, n_elts,
247 (f64) n_free_elts * 100 / n_elts);
248 s = format (s, "%lu bytes of memory consumed", n_chunks * chunk_sz);
249
250 return s;
251}