14#error "The number of bits (8,16,32,64) to use for the arena's key"
19#error "The value type to place in the arena must be defined"
22} derive_c_parameter_value;
23#define V derive_c_parameter_value
24void derive_c_parameter_value_delete(derive_c_parameter_value*) {}
25#define V_DELETE derive_c_parameter_value_delete
29#define V_DELETE(value)
33#define INDEX_TYPE uint8_t
34#define MAX_CAPACITY (UINT8_MAX + 1ULL)
35#define MAX_INDEX (UINT8_MAX - 1ULL)
36#define INDEX_NONE UINT8_MAX
38#define INDEX_TYPE uint16_t
39#define MAX_CAPACITY (UINT16_MAX + 1ULL)
40#define MAX_INDEX (UINT16_MAX - 1ULL)
41#define INDEX_NONE UINT16_MAX
43#define INDEX_TYPE uint32_t
44#define MAX_CAPACITY (UINT32_MAX + 1ULL)
45#define MAX_INDEX (UINT32_MAX - 1ULL)
46#define INDEX_NONE UINT32_MAX
48#define INDEX_TYPE uint64_t
50#define MAX_CAPACITY UINT64_MAX
51#define MAX_INDEX (UINT64_MAX - 1ULL)
52#define INDEX_NONE UINT64_MAX
55#define SLOT NAME(SELF, SLOT)
57#define CHECK_ACCESS_INDEX(self, index) (index.index < self->exclusive_end)
61#define RESIZE_FACTOR 2
64#define INDEX NAME(SELF, index)
107 .capacity = (INDEX_TYPE)capacity,
108 .free_list = INDEX_NONE,
116 if (self->free_list != INDEX_NONE) {
117 INDEX_TYPE free_index = self->free_list;
118 SLOT* slot = &self->slots[free_index];
124 return (
INDEX){.index = free_index};
127 if (self->exclusive_end == self->capacity) {
130 SLOT* new_alloc = (
SLOT*)realloc(self->slots, self->capacity *
sizeof(
SLOT));
132 self->slots = new_alloc;
135 INDEX_TYPE new_index = self->exclusive_end;
136 SLOT* slot = &self->slots[new_index];
140 self->exclusive_end++;
141 return (
INDEX){.index = new_index};
149 SLOT* slot = &self->slots[index.index];
167 SLOT* slot = &self->slots[index.index];
182 SLOT* slots = (
SLOT*)malloc(self->capacity *
sizeof(
SLOT));
184 memcpy(slots, self->slots, self->exclusive_end *
sizeof(
SLOT));
187 .capacity = self->capacity,
188 .free_list = self->free_list,
189 .exclusive_end = self->exclusive_end,
190 .count = self->count,
202 if (self->free_list == INDEX_NONE) {
218 SLOT* entry = &self->slots[index.index];
220 *destination = entry->
value;
223 self->free_list = index.index;
243 SLOT* entry = &self->slots[index.index];
248 self->free_list = index.index;
255#define IV_PAIR NAME(SELF, iv)
262 .index = {.index = INDEX_NONE},
266#define ITER NAME(SELF, iter)
269 INDEX_TYPE next_index;
278 return iter->next_index == INDEX_NONE || iter->next_index >= iter->arena->exclusive_end;
285 return (
IV_PAIR){.index = (
INDEX){.index = INDEX_NONE}, .value = NULL};
288 .value = &iter->arena->slots[iter->next_index].value};
290 while (iter->next_index < INDEX_NONE && iter->next_index < iter->arena->exclusive_end &&
291 !iter->arena->slots[iter->next_index].present) {
307 INDEX_TYPE index = 0;
308 while (index < INDEX_NONE && index < self->exclusive_end && !self->slots[index].present) {
335#define IV_PAIR_CONST NAME(SELF, iv_const)
342 .index = {.index = INDEX_NONE},
346#define ITER_CONST NAME(SELF, iter_const)
349 INDEX_TYPE next_index;
353static bool NAME(ITER_CONST,
empty)(ITER_CONST
const* iter) {
355 return iter->next_index == INDEX_NONE || iter->next_index >= iter->arena->exclusive_end;
365 .value = &iter->arena->slots[iter->next_index].value};
367 while (iter->next_index != INDEX_NONE && iter->next_index < iter->arena->exclusive_end &&
368 !iter->arena->slots[iter->next_index].present) {
384 INDEX_TYPE index = 0;
385 while (index < INDEX_NONE && index < self->exclusive_end && !self->slots[index].present) {
405#undef CHECK_ACCESS_INDEX
static INDEX insert(SELF *self, V value)
static bool try_remove(SELF *self, INDEX index, V *destination)
static bool full(SELF const *self)
static ITER_CONST get_iter_const(SELF const *self)
static bool empty(ITER const *iter)
static V remove(SELF *self, INDEX index)
static ITER get_iter(SELF *self)
static IV_PAIR next(ITER *iter)
static size_t max_capacity
static INDEX_TYPE size(SELF const *self)
static V const * try_read(SELF const *self, INDEX index)
#define CHECK_ACCESS_INDEX(self, index)
static SELF new_with_capacity_for(INDEX_TYPE items)
static V * write(SELF *self, INDEX index)
static IV_PAIR_CONST iv_const_empty
static bool delete_entry(SELF *self, INDEX index)
static SELF shallow_clone(SELF const *self)
static V * try_write(SELF *self, INDEX index)
static size_t position(ITER const *iter)
static V const * read(SELF const *self, INDEX index)
static size_t next_power_of_2(size_t x)
#define DEBUG_ASSERT(expr)