L4Re Operating System Framework
Interface and Usage Documentation
Loading...
Searching...
No Matches
list_alloc
1// vim:set ft=cpp: -*- Mode: C++ -*-
2/*
3 * (c) 2008-2009 Alexander Warg <warg@os.inf.tu-dresden.de>,
4 * Torsten Frenzel <frenzel@os.inf.tu-dresden.de>
5 * economic rights: Technische Universität Dresden (Germany)
6 *
7 * This file is part of TUD:OS and distributed under the terms of the
8 * GNU General Public License 2.
9 * Please see the COPYING-GPL-2 file for details.
10 *
11 * As a special exception, you may use this file as part of a free software
12 * library without restriction. Specifically, if other files instantiate
13 * templates or use macros or inline functions from this file, or you compile
14 * this file and link it with other files to produce an executable, this
15 * file does not by itself cause the resulting executable to be covered by
16 * the GNU General Public License. This exception does not however
17 * invalidate any other reasons why the executable file might be covered by
18 * the GNU General Public License.
19 */
20
21#pragma once
22
23#include <l4/cxx/arith>
24#include <l4/cxx/minmax>
25#include <l4/sys/consts.h>
26
27namespace cxx {
28
33{
34private:
35 friend class List_alloc_sanity_guard;
36
37 struct Mem_block
38 {
39 Mem_block *next;
40 unsigned long size;
41 };
42
43 Mem_block *_first;
44
45 inline void check_overlap(void *, unsigned long );
46 inline void sanity_check_list(char const *, char const *);
47 inline void merge();
48
49public:
50
57 List_alloc() : _first(0) {}
58
71 inline void free(void *block, unsigned long size, bool initial_free = false);
72
87 inline void *alloc(unsigned long size, unsigned long align,
88 unsigned long lower = 0, unsigned long upper = ~0UL);
89
110 inline void *alloc_max(unsigned long min, unsigned long *max,
111 unsigned long align, unsigned granularity,
112 unsigned long lower = 0, unsigned long upper = ~0UL);
113
119 inline unsigned long avail();
120
121 template <typename DBG>
122 void dump_free_list(DBG &out);
123};
124
125#if !defined (CXX_LIST_ALLOC_SANITY)
126class List_alloc_sanity_guard
127{
128public:
129 List_alloc_sanity_guard(List_alloc *, char const *)
130 {}
131
132};
133
134
135void
136List_alloc::check_overlap(void *, unsigned long )
137{}
138
139void
140List_alloc::sanity_check_list(char const *, char const *)
141{}
142
143#else
144
145class List_alloc_sanity_guard
146{
147private:
148 List_alloc *a;
149 char const *func;
150
151public:
152 List_alloc_sanity_guard(List_alloc *a, char const *func)
153 : a(a), func(func)
154 { a->sanity_check_list(func, "entry"); }
155
156 ~List_alloc_sanity_guard()
157 { a->sanity_check_list(func, "exit"); }
158};
159
160void
161List_alloc::check_overlap(void *b, unsigned long s)
162{
163 unsigned long const mb_align = (1UL << arith::Ld<sizeof(Mem_block)>::value) - 1;
164 if ((unsigned long)b & mb_align)
165 {
166 L4::cerr << "List_alloc(FATAL): trying to free unaligned memory: "
167 << b << " align=" << arith::Ld<sizeof(Mem_block)>::value << "\n";
168 }
169
170 Mem_block *c = _first;
171 for (;c ; c = c->next)
172 {
173 unsigned long x_s = (unsigned long)b;
174 unsigned long x_e = x_s + s;
175 unsigned long b_s = (unsigned long)c;
176 unsigned long b_e = b_s + c->size;
177
178 if ((x_s >= b_s && x_s < b_e)
179 || (x_e > b_s && x_e <= b_e)
180 || (b_s >= x_s && b_s < x_e)
181 || (b_e > x_s && b_e <= x_e))
182 {
183 L4::cerr << "List_alloc(FATAL): trying to free memory that "
184 "is already free: \n ["
185 << (void*)x_s << '-' << (void*)x_e << ") overlaps ["
186 << (void*)b_s << '-' << (void*)b_e << ")\n";
187 }
188 }
189}
190
191void
192List_alloc::sanity_check_list(char const *func, char const *info)
193{
194 Mem_block *c = _first;
195 for (;c ; c = c->next)
196 {
197 if (c->next)
198 {
199 if (c >= c->next)
200 {
201 L4::cerr << "List_alloc(FATAL): " << func << '(' << info
202 << "): list order violation\n";
203 }
204
205 if (((unsigned long)c) + c->size > (unsigned long)c->next)
206 {
207 L4::cerr << "List_alloc(FATAL): " << func << '(' << info
208 << "): list order violation\n";
209 }
210 }
211 }
212}
213
214#endif
215
216void
217List_alloc::merge()
218{
219 List_alloc_sanity_guard __attribute__((unused)) guard(this, __func__);
220 Mem_block *c = _first;
221 while (c && c->next)
222 {
223 unsigned long f_start = reinterpret_cast<unsigned long>(c);
224 unsigned long f_end = f_start + c->size;
225 unsigned long n_start = reinterpret_cast<unsigned long>(c->next);
226
227 if (f_end == n_start)
228 {
229 c->size += c->next->size;
230 c->next = c->next->next;
231 continue;
232 }
233
234 c = c->next;
235 }
236}
237
238void
239List_alloc::free(void *block, unsigned long size, bool initial_free)
240{
241 List_alloc_sanity_guard __attribute__((unused)) guard(this, __func__);
242
243 unsigned long const mb_align = (1UL << arith::Ld<sizeof(Mem_block)>::value) - 1;
244
245 if (initial_free)
246 {
247 // enforce alignment constraint on initial memory
248 unsigned long nblock = (reinterpret_cast<unsigned long>(block) + mb_align)
249 & ~mb_align;
250 size = (size - (nblock - reinterpret_cast<unsigned long>(block)))
251 & ~mb_align;
252 block = reinterpret_cast<void*>(nblock);
253 }
254 else
255 // blow up size to the minimum aligned size
256 size = (size + mb_align) & ~mb_align;
257
258 check_overlap(block, size);
259
260 Mem_block **c = &_first;
261 Mem_block *next = 0;
262
263 if (*c)
264 {
265 while (*c && *c < block)
266 c = &(*c)->next;
267
268 next = *c;
269 }
270
271 *c = reinterpret_cast<Mem_block*>(block);
272
273 (*c)->next = next;
274 (*c)->size = size;
275
276 merge();
277}
278
279void *
280List_alloc::alloc_max(unsigned long min, unsigned long *max, unsigned long align,
281 unsigned granularity, unsigned long lower,
282 unsigned long upper)
283{
284 List_alloc_sanity_guard __attribute__((unused)) guard(this, __func__);
285
286 unsigned char const mb_bits = arith::Ld<sizeof(Mem_block)>::value;
287 unsigned long const mb_align = (1UL << mb_bits) - 1;
288
289 // blow minimum up to at least the minimum aligned size of a Mem_block
290 min = l4_round_size(min, mb_bits);
291 // truncate maximum to at least the size of a Mem_block
292 *max = l4_trunc_size(*max, mb_bits);
293 // truncate maximum size according to granularity
294 *max = *max & ~(granularity - 1UL);
295
296 if (min > *max)
297 return 0;
298
299 unsigned long almask = align ? (align - 1UL) : 0;
300
301 // minimum alignment is given by the size of a Mem_block
302 if (almask < mb_align)
303 almask = mb_align;
304
305 Mem_block **c = &_first;
306 Mem_block **fit = 0;
307 unsigned long max_fit = 0;
308 unsigned long a_lower = (lower + almask) & ~almask;
309
310 for (; *c; c = &(*c)->next)
311 {
312 // address of free memory block
313 unsigned long n_start = reinterpret_cast<unsigned long>(*c);
314
315 // block too small, next
316 // XXX: maybe we can skip this and just do the test below
317 if ((*c)->size < min)
318 continue;
319
320 // block outside region, next
321 if (upper < n_start || a_lower > n_start + (*c)->size)
322 continue;
323
324 // aligned start address within the free block
325 unsigned long a_start = (n_start + almask) & ~almask;
326
327 // check if aligned start address is behind the block, next
328 if (a_start - n_start >= (*c)->size)
329 continue;
330
331 a_start = a_start < a_lower ? a_lower : a_start;
332
333 // end address would overflow, next
334 if (min > ~0UL - a_start)
335 continue;
336
337 // block outside region, next
338 if (a_start + min - 1UL > upper)
339 continue;
340
341 // remaining size after subtracting the padding for the alignment
342 unsigned long r_size = (*c)->size - a_start + n_start;
343
344 // upper limit can limit maximum size
345 if (a_start + r_size - 1UL > upper)
346 r_size = upper - a_start + 1UL;
347
348 // round down according to granularity
349 r_size &= ~(granularity - 1UL);
350
351 // block too small
352 if (r_size < min)
353 continue;
354
355 if (r_size >= *max)
356 {
357 fit = c;
358 max_fit = *max;
359 break;
360 }
361
362 if (r_size > max_fit)
363 {
364 max_fit = r_size;
365 fit = c;
366 }
367 }
368
369 if (fit)
370 {
371 unsigned long n_start = reinterpret_cast<unsigned long>(*fit);
372 unsigned long a_lower = (lower + almask) & ~almask;
373 unsigned long a_start = (n_start + almask) & ~almask;
374 a_start = a_start < a_lower ? a_lower : a_start;
375 unsigned long r_size = (*fit)->size - a_start + n_start;
376
377 if (a_start > n_start)
378 {
379 (*fit)->size -= r_size;
380 fit = &(*fit)->next;
381 }
382 else
383 *fit = (*fit)->next;
384
385 *max = max_fit;
386 if (r_size == max_fit)
387 return reinterpret_cast<void *>(a_start);
388
389 Mem_block *m = reinterpret_cast<Mem_block*>(a_start + max_fit);
390 m->next = *fit;
391 m->size = r_size - max_fit;
392 *fit = m;
393 return reinterpret_cast<void *>(a_start);
394 }
395
396 return 0;
397}
398
399void *
400List_alloc::alloc(unsigned long size, unsigned long align, unsigned long lower,
401 unsigned long upper)
402{
403 List_alloc_sanity_guard __attribute__((unused)) guard(this, __func__);
404
405 unsigned long const mb_align
406 = (1UL << arith::Ld<sizeof(Mem_block)>::value) - 1;
407
408 // blow up size to the minimum aligned size
409 size = (size + mb_align) & ~mb_align;
410
411 unsigned long almask = align ? (align - 1UL) : 0;
412
413 // minimum alignment is given by the size of a Mem_block
414 if (almask < mb_align)
415 almask = mb_align;
416
417 Mem_block **c = &_first;
418 unsigned long a_lower = (lower + almask) & ~almask;
419
420 for (; *c; c=&(*c)->next)
421 {
422 // address of free memory block
423 unsigned long n_start = reinterpret_cast<unsigned long>(*c);
424
425 // block too small, next
426 // XXX: maybe we can skip this and just do the test below
427 if ((*c)->size < size)
428 continue;
429
430 // block outside region, next
431 if (upper < n_start || a_lower > n_start + (*c)->size)
432 continue;
433
434 // aligned start address within the free block
435 unsigned long a_start = (n_start + almask) & ~almask;
436
437 // block too small after alignment, next
438 if (a_start - n_start >= (*c)->size)
439 continue;
440
441 a_start = a_start < a_lower ? a_lower : a_start;
442
443 // end address would overflow, next
444 if (size > ~0UL - a_start)
445 continue;
446
447 // block outside region, next
448 if (a_start + size - 1UL > upper)
449 continue;
450
451 // remaining size after subtracting the padding
452 // for the alignment
453 unsigned long r_size = (*c)->size - a_start + n_start;
454
455 // block too small
456 if (r_size < size)
457 continue;
458
459 if (a_start > n_start)
460 {
461 // have free space before the allocated block
462 // shrink the block and set c to the next pointer of that
463 // block
464 (*c)->size -= r_size;
465 c = &(*c)->next;
466 }
467 else
468 // drop the block, c remains the next pointer of the
469 // previous block
470 *c = (*c)->next;
471
472 // allocated the whole remaining space
473 if (r_size == size)
474 return reinterpret_cast<void*>(a_start);
475
476 // add a new free block behind the allocated block
477 Mem_block *m = reinterpret_cast<Mem_block*>(a_start + size);
478 m->next = *c;
479 m->size = r_size - size;
480 *c = m;
481 return reinterpret_cast<void *>(a_start);
482 }
483
484 return 0;
485}
486
487unsigned long
489{
490 List_alloc_sanity_guard __attribute__((unused)) guard(this, __FUNCTION__);
491 Mem_block *c = _first;
492 unsigned long a = 0;
493 while (c)
494 {
495 a += c->size;
496 c = c->next;
497 }
498
499 return a;
500}
501
502template <typename DBG>
503void
504List_alloc::dump_free_list(DBG &out)
505{
506 Mem_block *c = _first;
507 while (c)
508 {
509 unsigned sz;
510 const char *unit;
511
512 if (c->size < 1024)
513 {
514 sz = c->size;
515 unit = "Byte";
516 }
517 else if (c->size < 1 << 20)
518 {
519 sz = c->size >> 10;
520 unit = "kB";
521 }
522 else
523 {
524 sz = c->size >> 20;
525 unit = "MB";
526 }
527
528 out.printf("%12p - %12p (%u %s)\n", c,
529 reinterpret_cast<char *>(c) + c->size - 1, sz, unit);
530
531 c = c->next;
532 }
533}
534
535}
Standard list-based allocator.
Definition list_alloc:33
unsigned long avail()
Get the amount of available memory.
Definition list_alloc:488
void * alloc_max(unsigned long min, unsigned long *max, unsigned long align, unsigned granularity, unsigned long lower=0, unsigned long upper=~0UL)
Allocate a memory block of min <= size <= max.
Definition list_alloc:280
List_alloc()
Initializes an empty list allocator.
Definition list_alloc:57
void * alloc(unsigned long size, unsigned long align, unsigned long lower=0, unsigned long upper=~0UL)
Allocate a memory block.
Definition list_alloc:400
void free(void *block, unsigned long size, bool initial_free=false)
Return a free memory block to the allocator.
Definition list_alloc:239
l4_addr_t l4_trunc_size(l4_addr_t address, unsigned char bits) L4_NOTHROW
Round an address down to the next lower flex page with size bits.
Definition consts.h:448
l4_addr_t l4_round_size(l4_addr_t value, unsigned char bits) L4_NOTHROW
Round value up to the next alignment with bits size.
Definition consts.h:473
BasicOStream cerr
Standard error stream.
Our C++ library.
Definition arith:22