9d13f6ad69d69b972be070df32c91450e4fda3f5
2 * Copyright 2010 Marek Olšák <maraeo@gmail.com>
3 * Copyright 2016 Advanced Micro Devices, Inc.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * on the rights to use, copy, modify, merge, publish, distribute, sub
9 * license, and/or sell copies of the Software, and to permit persons to whom
10 * the Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
20 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
21 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
22 * USE OR OTHER DEALINGS IN THE SOFTWARE. */
25 * Slab allocator for equally sized memory allocations.
26 * The thread-safe path ("*_mt" functions) is usually slower than malloc/free.
27 * The single-threaded path ("*_st" functions) is faster than malloc/free.
33 #include "c11/threads.h"
35 /* The page is an array of allocations in one block. */
36 struct slab_page_header
{
37 /* The header (linked-list pointers). */
38 struct slab_page_header
*prev
, *next
;
40 /* Memory after the last member is dedicated to the page itself.
41 * The allocated size is always larger than this structure.
47 unsigned element_size
;
48 unsigned num_elements
;
49 struct slab_element_header
*first_free
;
50 struct slab_page_header list
;
53 void slab_create(struct slab_mempool
*pool
,
56 void slab_destroy(struct slab_mempool
*pool
);
57 void *slab_alloc_st(struct slab_mempool
*pool
);
58 void slab_free_st(struct slab_mempool
*pool
, void *ptr
);
59 void *slab_alloc_mt(struct slab_mempool
*pool
);
60 void slab_free_mt(struct slab_mempool
*pool
, void *ptr
);