2#if !defined(SKIP_INCLUDES)
10 #if !defined PLACEHOLDERS
11TEMPLATE_ERROR(
"The number of bits (8,16,32,64) to use for the arena's key")
16#if !defined BLOCK_INDEX_BITS
17 #if !defined PLACEHOLDERS
18TEMPLATE_ERROR(
"The number of bits used to get the offset within a block must be specified")
20 #define BLOCK_INDEX_BITS 8
24 "The number of bits for offset within a block must be "
25 "less than the number of bits used for an index");
28 #if !defined PLACEHOLDERS
29TEMPLATE_ERROR(
"The value type to place in the arena must be defined")
35 #define VALUE_DELETE value_delete
37 #define VALUE_CLONE value_clone
39 #define VALUE_DEBUG value_debug
45#if !defined VALUE_DELETE
46 #define VALUE_DELETE DC_NO_DELETE
49#if !defined VALUE_CLONE
50 #define VALUE_CLONE DC_COPY_CLONE
53#if !defined VALUE_DEBUG
54 #define VALUE_DEBUG DC_DEFAULT_DEBUG
63#define SLOT NS(NAME, slot)
65#define SLOT_INDEX_TYPE INDEX_TYPE
66#define SLOT_VALUE VALUE
67#define SLOT_VALUE_CLONE VALUE_CLONE
68#define SLOT_VALUE_CLONE VALUE_CLONE
69#define SLOT_VALUE_DELETE VALUE_DELETE
70#define INTERNAL_NAME SLOT
88#define INVARIANT_CHECK(self) \
90 DC_ASSUME(((self))->count <= MAX_INDEX); \
91 DC_ASSUME(((self)->block_current_exclusive_end) <= \
92 DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS)); \
93 DC_ASSUME(DC_WHEN((self)->free_list == INDEX_NONE, \
95 (DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS) * (self)->block_current + \
96 (self)->block_current_exclusive_end)), \
97 "All slots are full if the free list is empty");
108 blocks[0] = first_block;
117 .free_list = INDEX_NONE,
120 .block_current_exclusive_end = 0,
132 if (self->free_list != INDEX_NONE) {
137 SLOT* slot = &(*self->blocks[block])[offset];
140 self->free_list = slot->next_free;
141 slot->present =
true;
145 return (INDEX){.index = free_index};
149 self->block_current++;
150 self->block_current_exclusive_end = 0;
153 self->alloc, (
void*)self->blocks,
154 (self->block_current + 1) *
sizeof(
PRIV(
NS(
SELF, block))*));
160 self->blocks[self->block_current] = new_block;
167 SLOT* slot = &(*self->blocks[self->block_current])[self->block_current_exclusive_end];
168 slot->present =
true;
175 self->block_current_exclusive_end++;
177 return (INDEX){.index = index};
186 if (block > self->block_current ||
187 (block == self->block_current && offset >= self->block_current_exclusive_end)) {
191 SLOT* slot = &(*self->blocks[block])[offset];
193 if (!slot->present) {
219 self->alloc,
sizeof(
PRIV(
NS(
SELF, block))*) * (self->block_current + 1));
221 for (
INDEX_TYPE b = 0; b <= self->block_current; b++) {
226 for (
INDEX_TYPE b = 0; b < self->block_current; b++) {
228 PRIV(
NS(
SELF, block))
const* from_block = self->blocks[b];
234 PRIV(
NS(
SELF, block))* to_current_block = blocks[self->block_current];
235 PRIV(
NS(
SELF, block))
const* from_current_block = self->blocks[self->block_current];
236 for (
INDEX_TYPE i = 0; i < self->block_current_exclusive_end; i++) {
241 .count = self->count,
242 .free_list = self->free_list,
244 .block_current = self->block_current,
245 .block_current_exclusive_end = self->block_current_exclusive_end,
246 .alloc = self->alloc,
259 return self->count < MAX_INDEX;
272 if (block > self->block_current ||
273 (block == self->block_current && offset >= self->block_current_exclusive_end)) {
277 PRIV(
NS(
SELF, block))* current_block = self->blocks[block];
278 SLOT* entry = &(*current_block)[offset];
280 if (entry->present) {
281 *destination = entry->
value;
282 entry->present =
false;
284 entry->next_free = self->free_list;
285 self->free_list = index.index;
302 for (
INDEX_TYPE next_index = from_index + 1;; next_index++) {
306 if (block > self->block_current ||
307 (block == self->block_current && offset >= self->block_current_exclusive_end)) {
313 SLOT* slot = &(*self->blocks[block])[offset];
320#define ITER NS(SELF, iter)
321#define IV_PAIR NS(ITER, item)
329#define ITER_INVARIANT_CHECK(iter) \
332 DC_WHEN((iter)->next_index != INDEX_NONE, \
333 NS(SELF, try_read)(iter->arena, (INDEX){.index = (iter)->next_index}) != NULL), \
334 "The next index is either valid, or the iterator is empty");
342 return (
IV_PAIR){.index = (INDEX){.index = INDEX_NONE}, .value = NULL};
349 return iter->next_index == INDEX_NONE;
356 if (iter->next_index == INDEX_NONE) {
360 INDEX index = {.index = iter->next_index};
388 for (
INDEX_TYPE b = 0; b <= self->block_current; b++) {
394#undef ITER_INVARIANT_CHECK
398#define ITER_CONST NS(SELF, iter_const)
399#define IV_PAIR_CONST NS(ITER_CONST, item)
407#define ITER_CONST_INVARIANT_CHECK(iter) \
410 DC_WHEN((iter)->next_index != INDEX_NONE, \
411 NS(SELF, try_read)(iter->arena, (INDEX){.index = (iter)->next_index}) != NULL), \
412 "The next index is either valid, or the iterator is empty");
420 return (
IV_PAIR_CONST){.index = (INDEX){.index = INDEX_NONE}, .value = NULL};
428 return iter->next_index == INDEX_NONE;
435 if (iter->next_index == INDEX_NONE) {
439 INDEX index = {.index = iter->next_index};
470 (
size_t)self->block_current_exclusive_end);
474 for (
INDEX_TYPE b = 0; b <= self->block_current; b++) {
479 INDEX_TYPE block_entry_exclusive_end = b == self->block_current
480 ? self->block_current_exclusive_end
483 for (
INDEX_TYPE i = 0; i < block_entry_exclusive_end; i++) {
486 SLOT* entry = &(*self->blocks[b])[i];
488 if (entry->present) {
490 fmt,
stream,
"[index=%lu]{\n",
499 fmt,
stream,
"[index=%lu]{ next_free=%lu }\n",
501 (
size_t)entry->next_free);
516#undef ITER_CONST_INVARIANT_CHECK
520#undef INVARIANT_CHECK
531#undef BLOCK_INDEX_BITS
static void debug(SELF const *self, dc_debug_fmt fmt, FILE *stream)
static void free(SELF *self, void *ptr)
static void * realloc(SELF *self, void *ptr, size_t size)
static void * malloc(SELF *self, size_t size)
#define DC_ARENA_CHUNKED_BLOCK_OFFSET_TO_INDEX(BLOCK, OFFSET, BLOCK_INDEX_BITS)
#define DC_ARENA_CHUNKED_INDEX_TO_OFFSET(INDEX, BLOCK_INDEX_BITS)
#define DC_ARENA_CHUNKED_INDEX_TO_BLOCK(INDEX, BLOCK_INDEX_BITS)
#define DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS)
static INDEX insert(SELF *self, VALUE value)
static bool full(SELF const *self)
static ITER_CONST get_iter_const(SELF const *self)
static bool empty(ITER const *iter)
#define ITER_CONST_INVARIANT_CHECK(iter)
static ITER get_iter(SELF *self)
static IV_PAIR next(ITER *iter)
static INDEX_TYPE size(SELF const *self)
static INDEX_TYPE PRIV next_index_value(SELF const *self, INDEX_TYPE from_index)
#define INVARIANT_CHECK(self)
static VALUE remove(SELF *self, INDEX index)
static bool empty_item(IV_PAIR const *item)
static IV_PAIR_CONST iv_const_empty()
SLOT PRIV(block)[DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS)]
static VALUE * try_write(SELF *self, INDEX index)
static VALUE const * read(SELF const *self, INDEX index)
static VALUE * write(SELF *self, INDEX index)
#define ITER_INVARIANT_CHECK(iter)
static bool try_remove(SELF *self, INDEX index, VALUE *destination)
static const size_t max_entries
static SELF clone(SELF const *self)
static VALUE const * try_read(SELF const *self, INDEX index)
static IV_PAIR iv_empty()
#define DC_TRAIT_ARENA(SELF)
dc_debug_fmt dc_debug_fmt_scope_end(dc_debug_fmt fmt)
dc_debug_fmt dc_debug_fmt_scope_begin(dc_debug_fmt fmt)
static void dc_debug_fmt_print(dc_debug_fmt fmt, FILE *stream, const char *format,...)
static dc_gdb_marker dc_gdb_marker_new()
static mutation_tracker mutation_tracker_new()
static void mutation_version_check(mutation_version const *self)
static mutation_version mutation_tracker_get(mutation_tracker const *self)
static void mutation_tracker_mutate(mutation_tracker *self)
#define EXPAND_STRING(NAME)
#define DC_ASSERT(expr,...)
#define DC_ASSUME(expr,...)
#define TEMPLATE_ERROR(...)
With the user provided name, even in nested templates.
An allocator that prints to stdout when it allocates or frees memory.
mutation_tracker iterator_invalidation_tracker
INDEX_TYPE block_current_exclusive_end
dc_gdb_marker derive_c_arena_basic
SLOT * blocks[DC_ARENA_GEO_MAX_NUM_BLOCKS(INDEX_BITS, INITIAL_BLOCK_INDEX_BITS)]
Debug format helpers for debug printin data structures.
tracks a specific version of a value, so that this can be compared later to check modification For ex...
static void memory_tracker_present(SELF const *slot)
static void clone_from(SELF const *from_slot, SELF *to_slot)
static void memory_tracker_empty(SELF const *slot)
static FILE * stream(SELF *self)
Opens a file for.