2#if !defined(SKIP_INCLUDES)
10 #if !defined DC_PLACEHOLDERS
11TEMPLATE_ERROR(
"The number of bits (8,16,32,64) to use for the arena's key")
16#if !defined BLOCK_INDEX_BITS
17 #if !defined DC_PLACEHOLDERS
18TEMPLATE_ERROR(
"The number of bits used to get the offset within a block must be specified")
20 #define BLOCK_INDEX_BITS 7
26 "The number of bits for offset within a block must be "
27 "less than the number of bits used for an index");
30 #if !defined DC_PLACEHOLDERS
31TEMPLATE_ERROR(
"The value type to place in the arena must be defined")
37 #define VALUE_DELETE value_delete
39 #define VALUE_CLONE value_clone
41 #define VALUE_DEBUG value_debug
47#if !defined VALUE_DELETE
48 #define VALUE_DELETE DC_NO_DELETE
51#if !defined VALUE_CLONE
52 #define VALUE_CLONE DC_COPY_CLONE
55#if !defined VALUE_DEBUG
56 #define VALUE_DEBUG DC_DEFAULT_DEBUG
65#define SLOT NS(NAME, slot)
67#define SLOT_INDEX_TYPE INDEX_TYPE
68#define SLOT_VALUE VALUE
69#define SLOT_VALUE_CLONE VALUE_CLONE
70#define SLOT_VALUE_CLONE VALUE_CLONE
71#define SLOT_VALUE_DELETE VALUE_DELETE
72#define INTERNAL_NAME SLOT
90#define INVARIANT_CHECK(self) \
92 DC_ASSUME(((self))->count <= MAX_INDEX); \
93 DC_ASSUME(((self)->block_current_exclusive_end) <= \
94 DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS)); \
95 DC_ASSUME(DC_WHEN((self)->free_list == INDEX_NONE, \
97 (DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS) * (self)->block_current + \
98 (self)->block_current_exclusive_end)), \
99 "All slots are full if the free list is empty");
107 blocks[0] = first_block;
116 .free_list = INDEX_NONE,
119 .block_current_exclusive_end = 0,
120 .alloc_ref = alloc_ref,
130 "Arena is full, cannot insert {count=%lu, max_index=%lu, value=%s}",
133 if (self->free_list != INDEX_NONE) {
134 INDEX_TYPE free_index = self->free_list;
138 SLOT* slot = &(*self->blocks[block])[offset];
141 self->free_list = slot->next_free;
146 return (INDEX){.index = free_index};
150 self->block_current++;
151 self->block_current_exclusive_end = 0;
153 size_t blocks_current_size = self->block_current *
sizeof(
PRIV(
NS(
SELF, block))*);
154 size_t blocks_new_size = blocks_current_size +
sizeof(
PRIV(
NS(
SELF, block))*);
157 self->alloc_ref, (
void*)self->blocks, blocks_current_size, blocks_new_size);
160 self->alloc_ref,
sizeof(
PRIV(
NS(
SELF, block))));
162 self->blocks[self->block_current] = new_block;
169 SLOT* slot = &(*self->blocks[self->block_current])[self->block_current_exclusive_end];
175 self->block_current_exclusive_end++;
177 return (INDEX){.index = index};
186 if (block > self->block_current ||
187 (block == self->block_current && offset >= self->block_current_exclusive_end)) {
191 SLOT* slot = &(*self->blocks[block])[offset];
193 if (!slot->present) {
201 DC_ASSERT(value,
"Cannot read item {index=%lu}", (
size_t)index.index);
211 DC_ASSERT(value,
"Cannot write item {index=%lu}", (
size_t)index.index);
219 self->alloc_ref,
sizeof(
PRIV(
NS(
SELF, block))*) * (self->block_current + 1));
221 for (INDEX_TYPE b = 0; b <= self->block_current; b++) {
223 self->alloc_ref,
sizeof(
PRIV(
NS(
SELF, block))));
226 for (INDEX_TYPE b = 0; b < self->block_current; b++) {
228 PRIV(
NS(
SELF, block))
const* from_block = self->blocks[b];
234 PRIV(
NS(
SELF, block))* to_current_block = blocks[self->block_current];
235 PRIV(
NS(
SELF, block))
const* from_current_block = self->blocks[self->block_current];
236 for (INDEX_TYPE i = 0; i < self->block_current_exclusive_end; i++) {
241 .count = self->count,
242 .free_list = self->free_list,
244 .block_current = self->block_current,
245 .block_current_exclusive_end = self->block_current_exclusive_end,
246 .alloc_ref = self->alloc_ref,
259 return self->count < MAX_INDEX;
272 if (block > self->block_current ||
273 (block == self->block_current && offset >= self->block_current_exclusive_end)) {
277 PRIV(
NS(
SELF, block))* current_block = self->blocks[block];
278 SLOT* entry = &(*current_block)[offset];
280 if (entry->present) {
281 *destination = entry->
value;
285 self->free_list = index.index;
298 "Failed to remove item, index not found {index=%lu}", (
size_t)index.index);
303 INDEX_TYPE from_index) {
304 for (INDEX_TYPE next_index = from_index + 1;; next_index++) {
308 if (block > self->block_current ||
309 (block == self->block_current && offset >= self->block_current_exclusive_end)) {
315 SLOT* slot = &(*self->blocks[block])[offset];
322#define ITER NS(SELF, iter)
323#define IV_PAIR NS(ITER, item)
331#define ITER_INVARIANT_CHECK(iter) \
334 DC_WHEN((iter)->next_index != INDEX_NONE, \
335 NS(SELF, try_read)(iter->arena, (INDEX){.index = (iter)->next_index}) != NULL), \
336 "The next index is either valid, or the iterator is empty");
344 return (
IV_PAIR){.index = (INDEX){.index = INDEX_NONE}, .value = NULL};
352 return iter->next_index == INDEX_NONE;
359 if (iter->next_index == INDEX_NONE) {
363 INDEX index = {.index = iter->next_index};
377 INDEX_TYPE first_index;
378 if (self->block_current_exclusive_end > 0 && (*self->blocks[0])[0].present) {
386 .next_index = first_index,
400 for (INDEX_TYPE b = 0; b <= self->block_current; b++) {
404 self->block_current *
sizeof(
PRIV(
NS(
SELF, block))*));
407#undef ITER_INVARIANT_CHECK
411#define ITER_CONST NS(SELF, iter_const)
412#define IV_PAIR_CONST NS(ITER_CONST, item)
420#define ITER_CONST_INVARIANT_CHECK(iter) \
423 DC_WHEN((iter)->next_index != INDEX_NONE, \
424 NS(SELF, try_read)(iter->arena, (INDEX){.index = (iter)->next_index}) != NULL), \
425 "The next index is either valid, or the iterator is empty");
433 return (
IV_PAIR_CONST){.index = (INDEX){.index = INDEX_NONE}, .value = NULL};
437 return item->value == NULL;
443 return iter->next_index == INDEX_NONE;
450 if (iter->next_index == INDEX_NONE) {
454 INDEX index = {.index = iter->next_index};
468 INDEX_TYPE first_index;
469 if (self->block_current_exclusive_end > 0 && (*self->blocks[0])[0].present) {
477 .next_index = first_index,
494 (
size_t)self->block_current_exclusive_end);
498 for (INDEX_TYPE b = 0; b <= self->block_current; b++) {
503 INDEX_TYPE block_entry_exclusive_end = b == self->block_current
504 ? self->block_current_exclusive_end
507 for (INDEX_TYPE i = 0; i < block_entry_exclusive_end; i++) {
510 SLOT* entry = &(*self->blocks[b])[i];
512 if (entry->present) {
514 fmt,
stream,
"[index=%lu] ",
520 fmt,
stream,
"[index=%lu]{ next_free=%lu }\n",
522 (
size_t)entry->next_free);
537#undef ITER_CONST_INVARIANT_CHECK
541#undef INVARIANT_CHECK
552#undef BLOCK_INDEX_BITS
static DC_PUBLIC void deallocate(SELF *self, void *ptr, size_t size)
static DC_PUBLIC void debug(SELF const *self, dc_debug_fmt fmt, FILE *stream)
static DC_PUBLIC void * allocate_uninit(SELF *self, size_t size)
static DC_PUBLIC void * reallocate(SELF *self, void *ptr, size_t old_size, size_t new_size)
#define DC_ARENA_CHUNKED_BLOCK_OFFSET_TO_INDEX(BLOCK, OFFSET, BLOCK_INDEX_BITS)
#define DC_ARENA_CHUNKED_INDEX_TO_OFFSET(INDEX, BLOCK_INDEX_BITS)
#define DC_ARENA_CHUNKED_INDEX_TO_BLOCK(INDEX, BLOCK_INDEX_BITS)
#define DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS)
static DC_PUBLIC VALUE const * try_read(SELF const *self, INDEX index)
static DC_PUBLIC VALUE * try_write(SELF *self, INDEX index)
static DC_PUBLIC IV_PAIR iv_empty()
static DC_PUBLIC INDEX_TYPE PRIV next_index_value(SELF const *self, INDEX_TYPE from_index)
static DC_PUBLIC const size_t max_entries
#define ITER_CONST_INVARIANT_CHECK(iter)
#define INVARIANT_CHECK(self)
static DC_PUBLIC IV_PAIR next(ITER *iter)
static DC_PUBLIC INDEX insert(SELF *self, VALUE value)
static DC_PUBLIC bool empty(ITER const *iter)
static DC_PUBLIC bool full(SELF const *self)
static DC_PUBLIC VALUE const * read(SELF const *self, INDEX index)
static DC_PUBLIC bool try_remove(SELF *self, INDEX index, VALUE *destination)
static DC_PUBLIC VALUE * write(SELF *self, INDEX index)
static DC_PUBLIC bool empty_item(IV_PAIR const *item)
static DC_PUBLIC VALUE remove(SELF *self, INDEX index)
#define ITER_INVARIANT_CHECK(iter)
static DC_PUBLIC ITER get_iter(SELF *self)
static DC_PUBLIC IV_PAIR_CONST iv_const_empty()
static DC_PUBLIC ITER_CONST get_iter_const(SELF const *self)
static DC_PUBLIC size_t size(SELF const *self)
static DC_PUBLIC SELF clone(SELF const *self)
#define DC_TRAIT_ARENA(SELF)
#define TEMPLATE_ERROR(...)
With the user provided name, even in nested templates.
#define DC_DEBUG(DEBUG_FN, DEBUG_PTR)
static DC_PUBLIC void dc_debug_fmt_print(dc_debug_fmt fmt, FILE *stream, const char *format,...)
static DC_PUBLIC dc_debug_fmt dc_debug_fmt_scope_end(dc_debug_fmt fmt)
static DC_PUBLIC dc_debug_fmt dc_debug_fmt_scope_begin(dc_debug_fmt fmt)
static DC_PUBLIC dc_gdb_marker dc_gdb_marker_new()
static DC_PUBLIC void mutation_tracker_mutate(mutation_tracker *self)
static DC_PUBLIC void mutation_version_check(mutation_version const *self)
static DC_PUBLIC mutation_tracker mutation_tracker_new()
static DC_PUBLIC mutation_version mutation_tracker_get(mutation_tracker const *self)
#define DC_EXPAND_STRING(NAME)
#define DC_ASSERT(expr,...)
#define DC_ASSUME(expr,...)
An allocator that prints to stdout when it allocates or frees memory.
mutation_tracker iterator_invalidation_tracker
INDEX_TYPE block_current_exclusive_end
dc_gdb_marker derive_c_arena_basic
Debug format helpers for debug printin data structures.
tracks a specific version of a value, so that this can be compared later to check modification For ex...
static DC_INTERNAL void clone_from(SELF const *from_slot, SELF *to_slot)
static DC_INTERNAL void set_empty(SELF *slot, SLOT_INDEX_TYPE next_free)
static DC_INTERNAL void fill(SELF *slot, SLOT_VALUE value)
static DC_INTERNAL void memory_tracker_empty(SELF const *slot)
static DC_PUBLIC FILE * stream(SELF *self)