Derive-C
Loading...
Searching...
No Matches
template.h
Go to the documentation of this file.
2#if !defined(SKIP_INCLUDES)
3 #include "includes.h"
4#endif
5
8
9#if !defined INDEX_BITS
10 #if !defined DC_PLACEHOLDERS
11TEMPLATE_ERROR("The number of bits (8,16,32,64) to use for the arena's key")
12 #endif
13 #define INDEX_BITS 8
14#endif
15
16#if !defined BLOCK_INDEX_BITS
17 #if !defined DC_PLACEHOLDERS
18TEMPLATE_ERROR("The number of bits used to get the offset within a block must be specified")
19 #endif
20 #define BLOCK_INDEX_BITS 7
21#endif
22
23DC_STATIC_ASSERT(BLOCK_INDEX_BITS > 0, "Cannot have zero block index bits");
24DC_STATIC_ASSERT(INDEX_BITS > 0, "Cannot have zero index bits");
26 "The number of bits for offset within a block must be "
27 "less than the number of bits used for an index");
28
29#if !defined VALUE
30 #if !defined DC_PLACEHOLDERS
31TEMPLATE_ERROR("The value type to place in the arena must be defined")
32 #endif
33 #define VALUE value_t
34typedef struct {
35 int x;
36} VALUE;
37 #define VALUE_DELETE value_delete
38static void VALUE_DELETE(value_t* /* self */) {}
39 #define VALUE_CLONE value_clone
40static value_t VALUE_CLONE(value_t const* self) { return *self; }
41 #define VALUE_DEBUG value_debug
42static void VALUE_DEBUG(VALUE const* /* self */, dc_debug_fmt /* fmt */, FILE* /* stream */) {}
43#endif
44
45DC_STATIC_ASSERT(sizeof(VALUE), "VALUE must be a non-zero sized type");
46
47#if !defined VALUE_DELETE
48 #define VALUE_DELETE DC_NO_DELETE
49#endif
50
51#if !defined VALUE_CLONE
52 #define VALUE_CLONE DC_COPY_CLONE
53#endif
54
55#if !defined VALUE_DEBUG
56 #define VALUE_DEBUG DC_DEFAULT_DEBUG
57#endif
58
61
62typedef VALUE NS(SELF, value_t);
63typedef ALLOC NS(SELF, alloc_t);
64
65#define SLOT NS(NAME, slot)
66
67#define SLOT_INDEX_TYPE INDEX_TYPE // [DERIVE-C] for template
68#define SLOT_VALUE VALUE // [DERIVE-C] for template
69#define SLOT_VALUE_CLONE VALUE_CLONE // [DERIVE-C] for template
70#define SLOT_VALUE_CLONE VALUE_CLONE // [DERIVE-C] for template
71#define SLOT_VALUE_DELETE VALUE_DELETE // [DERIVE-C] for template
72#define INTERNAL_NAME SLOT // [DERIVE-C] for template
74
76
77typedef struct {
78 size_t count;
79 INDEX_TYPE free_list;
80
81 PRIV(NS(SELF, block)) * *blocks;
82 INDEX_TYPE block_current;
84
85 NS(ALLOC, ref) alloc_ref;
86 dc_gdb_marker derive_c_arena_basic;
88} SELF;
89
90#define INVARIANT_CHECK(self) \
91 DC_ASSUME(self); \
92 DC_ASSUME(((self))->count <= MAX_INDEX); \
93 DC_ASSUME(((self)->block_current_exclusive_end) <= \
94 DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS)); \
95 DC_ASSUME(DC_WHEN((self)->free_list == INDEX_NONE, \
96 (self)->count == \
97 (DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS) * (self)->block_current + \
98 (self)->block_current_exclusive_end)), \
99 "All slots are full if the free list is empty");
100
101DC_PUBLIC static SELF NS(SELF, new)(NS(ALLOC, ref) alloc_ref) {
102 PRIV(NS(SELF, block))* first_block = (PRIV(NS(SELF, block))*)NS(ALLOC, allocate_uninit)(
103 alloc_ref, sizeof(PRIV(NS(SELF, block))));
104 PRIV(NS(SELF, block))** blocks = (PRIV(NS(SELF, block))**)NS(ALLOC, allocate_uninit)(
105 alloc_ref, sizeof(PRIV(NS(SELF, block))*));
106
107 blocks[0] = first_block;
108
109 for (INDEX_TYPE offset = 0; offset < DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS); offset++) {
110 /* Properly index the slots within the allocated block */
111 NS(SLOT, memory_tracker_empty)(&(*first_block)[offset]);
112 }
113
114 return (SELF){
115 .count = 0,
116 .free_list = INDEX_NONE,
117 .blocks = blocks,
118 .block_current = 0,
119 .block_current_exclusive_end = 0,
120 .alloc_ref = alloc_ref,
121 .derive_c_arena_basic = dc_gdb_marker_new(),
122 .iterator_invalidation_tracker = mutation_tracker_new(),
123 };
124}
125
126DC_PUBLIC static INDEX NS(SELF, insert)(SELF* self, VALUE value) {
127 INVARIANT_CHECK(self);
128 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
129 DC_ASSERT(self->count < MAX_INDEX,
130 "Arena is full, cannot insert {count=%lu, max_index=%lu, value=%s}",
131 (size_t)self->count, (size_t)MAX_INDEX, DC_DEBUG(VALUE_DEBUG, &value));
132
133 if (self->free_list != INDEX_NONE) {
134 INDEX_TYPE free_index = self->free_list;
135 INDEX_TYPE block = DC_ARENA_CHUNKED_INDEX_TO_BLOCK(free_index, BLOCK_INDEX_BITS);
136 INDEX_TYPE offset = DC_ARENA_CHUNKED_INDEX_TO_OFFSET(free_index, BLOCK_INDEX_BITS);
137
138 SLOT* slot = &(*self->blocks[block])[offset];
139
140 DC_ASSUME(!slot->present);
141 self->free_list = slot->next_free;
142
143 NS(SLOT, fill)(slot, value);
144
145 self->count++;
146 return (INDEX){.index = free_index};
147 }
148
149 if (self->block_current_exclusive_end == DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS)) {
150 self->block_current++;
151 self->block_current_exclusive_end = 0;
152
153 size_t blocks_current_size = self->block_current * sizeof(PRIV(NS(SELF, block))*);
154 size_t blocks_new_size = blocks_current_size + sizeof(PRIV(NS(SELF, block))*);
155
156 self->blocks = (PRIV(NS(SELF, block))**)NS(ALLOC, reallocate)(
157 self->alloc_ref, (void*)self->blocks, blocks_current_size, blocks_new_size);
158
159 PRIV(NS(SELF, block))* new_block = (PRIV(NS(SELF, block))*)NS(ALLOC, allocate_uninit)(
160 self->alloc_ref, sizeof(PRIV(NS(SELF, block))));
161
162 self->blocks[self->block_current] = new_block;
163
164 for (size_t offset = 0; offset < DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS); offset++) {
165 NS(SLOT, memory_tracker_empty)(&(*new_block)[offset]);
166 }
167 }
168
169 SLOT* slot = &(*self->blocks[self->block_current])[self->block_current_exclusive_end];
170 NS(SLOT, fill)(slot, value);
171
172 INDEX_TYPE index = (INDEX_TYPE)DC_ARENA_CHUNKED_BLOCK_OFFSET_TO_INDEX(
173 self->block_current, self->block_current_exclusive_end, BLOCK_INDEX_BITS);
174 self->count++;
175 self->block_current_exclusive_end++;
176
177 return (INDEX){.index = index};
178}
179
180DC_PUBLIC static VALUE const* NS(SELF, try_read)(SELF const* self, INDEX index) {
181 INVARIANT_CHECK(self);
182
183 INDEX_TYPE block = DC_ARENA_CHUNKED_INDEX_TO_BLOCK(index.index, BLOCK_INDEX_BITS);
184 INDEX_TYPE offset = DC_ARENA_CHUNKED_INDEX_TO_OFFSET(index.index, BLOCK_INDEX_BITS);
185
186 if (block > self->block_current ||
187 (block == self->block_current && offset >= self->block_current_exclusive_end)) {
188 return NULL;
189 }
190
191 SLOT* slot = &(*self->blocks[block])[offset];
192
193 if (!slot->present) {
194 return NULL;
195 }
196 return &slot->value;
197}
198
199DC_PUBLIC static VALUE const* NS(SELF, read)(SELF const* self, INDEX index) {
200 VALUE const* value = NS(SELF, try_read)(self, index);
201 DC_ASSERT(value, "Cannot read item {index=%lu}", (size_t)index.index);
202 return value;
203}
204
205DC_PUBLIC static VALUE* NS(SELF, try_write)(SELF* self, INDEX index) {
206 return (VALUE*)NS(SELF, try_read)(self, index);
207}
208
209DC_PUBLIC static VALUE* NS(SELF, write)(SELF* self, INDEX index) {
210 VALUE* value = NS(SELF, try_write)(self, index);
211 DC_ASSERT(value, "Cannot write item {index=%lu}", (size_t)index.index);
212 return value;
213}
214
215DC_PUBLIC static SELF NS(SELF, clone)(SELF const* self) {
216 INVARIANT_CHECK(self);
217
218 PRIV(NS(SELF, block))** blocks = (PRIV(NS(SELF, block))**)NS(ALLOC, allocate_uninit)(
219 self->alloc_ref, sizeof(PRIV(NS(SELF, block))*) * (self->block_current + 1));
220
221 for (INDEX_TYPE b = 0; b <= self->block_current; b++) {
222 blocks[b] = (PRIV(NS(SELF, block))*)NS(ALLOC, allocate_uninit)(
223 self->alloc_ref, sizeof(PRIV(NS(SELF, block))));
224 }
225
226 for (INDEX_TYPE b = 0; b < self->block_current; b++) {
227 PRIV(NS(SELF, block))* to_block = blocks[b];
228 PRIV(NS(SELF, block)) const* from_block = self->blocks[b];
229 for (INDEX_TYPE i = 0; i < DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS); i++) {
230 NS(SLOT, clone_from)(&(*from_block)[i], &(*to_block)[i]);
231 }
232 }
233
234 PRIV(NS(SELF, block))* to_current_block = blocks[self->block_current];
235 PRIV(NS(SELF, block)) const* from_current_block = self->blocks[self->block_current];
236 for (INDEX_TYPE i = 0; i < self->block_current_exclusive_end; i++) {
237 NS(SLOT, clone_from)(&(*from_current_block)[i], &(*to_current_block)[i]);
238 }
239
240 return (SELF){
241 .count = self->count,
242 .free_list = self->free_list,
243 .blocks = blocks,
244 .block_current = self->block_current,
245 .block_current_exclusive_end = self->block_current_exclusive_end,
246 .alloc_ref = self->alloc_ref,
247 .derive_c_arena_basic = dc_gdb_marker_new(),
248 .iterator_invalidation_tracker = mutation_tracker_new(),
249 };
250}
251
252DC_PUBLIC static size_t NS(SELF, size)(SELF const* self) {
253 INVARIANT_CHECK(self);
254 return self->count;
255}
256
257DC_PUBLIC static bool NS(SELF, full)(SELF const* self) {
258 INVARIANT_CHECK(self);
259 return self->count < MAX_INDEX;
260}
261
262DC_PUBLIC static const size_t NS(SELF, max_entries) = MAX_INDEX;
263
264DC_PUBLIC static bool NS(SELF, try_remove)(SELF* self, INDEX index, VALUE* destination) {
265 INVARIANT_CHECK(self);
266 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
267
268 INDEX_TYPE block = DC_ARENA_CHUNKED_INDEX_TO_BLOCK(index.index, BLOCK_INDEX_BITS);
269 INDEX_TYPE offset = DC_ARENA_CHUNKED_INDEX_TO_OFFSET(index.index, BLOCK_INDEX_BITS);
270
271 /* Only treat offset vs block_current_exclusive_end for the last block */
272 if (block > self->block_current ||
273 (block == self->block_current && offset >= self->block_current_exclusive_end)) {
274 return false;
275 }
276
277 PRIV(NS(SELF, block))* current_block = self->blocks[block];
278 SLOT* entry = &(*current_block)[offset];
279
280 if (entry->present) {
281 *destination = entry->value;
282
283 NS(SLOT, set_empty)(entry, self->free_list);
284
285 self->free_list = index.index;
286 self->count--;
287 return true;
288 }
289 return false;
290}
291
292DC_PUBLIC static VALUE NS(SELF, remove)(SELF* self, INDEX index) {
293 INVARIANT_CHECK(self);
294 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
295
296 VALUE value;
297 DC_ASSERT(NS(SELF, try_remove)(self, index, &value),
298 "Failed to remove item, index not found {index=%lu}", (size_t)index.index);
299 return value;
300}
301
302DC_PUBLIC static INDEX_TYPE PRIV(NS(SELF, next_index_value))(SELF const* self,
303 INDEX_TYPE from_index) {
304 for (INDEX_TYPE next_index = from_index + 1;; next_index++) {
305 INDEX_TYPE block = DC_ARENA_CHUNKED_INDEX_TO_BLOCK(next_index, BLOCK_INDEX_BITS);
306 INDEX_TYPE offset = DC_ARENA_CHUNKED_INDEX_TO_OFFSET(next_index, BLOCK_INDEX_BITS);
307
308 if (block > self->block_current ||
309 (block == self->block_current && offset >= self->block_current_exclusive_end)) {
310 return INDEX_NONE;
311 }
312
313 /* Fix wrong indexing: use &(*self->blocks[block])[offset] rather than
314 self->blocks[block][offset] which indexes by block-sized strides. */
315 SLOT* slot = &(*self->blocks[block])[offset];
316 if (slot->present) {
317 return next_index;
318 }
319 }
320}
321
322#define ITER NS(SELF, iter)
323#define IV_PAIR NS(ITER, item)
324
325typedef struct {
327 INDEX_TYPE next_index;
329} ITER;
330
331#define ITER_INVARIANT_CHECK(iter) \
332 DC_ASSUME(iter); \
333 DC_DEBUG_ASSERT( \
334 DC_WHEN((iter)->next_index != INDEX_NONE, \
335 NS(SELF, try_read)(iter->arena, (INDEX){.index = (iter)->next_index}) != NULL), \
336 "The next index is either valid, or the iterator is empty");
337
338typedef struct {
339 INDEX index;
341} IV_PAIR;
342
344 return (IV_PAIR){.index = (INDEX){.index = INDEX_NONE}, .value = NULL};
345}
346
347DC_PUBLIC static bool NS(ITER, empty_item)(IV_PAIR const* item) { return item->value == NULL; }
348
349DC_PUBLIC static bool NS(ITER, empty)(ITER const* iter) {
351 mutation_version_check(&iter->version);
352 return iter->next_index == INDEX_NONE;
353}
354
355DC_PUBLIC static IV_PAIR NS(ITER, next)(ITER* iter) {
357 mutation_version_check(&iter->version);
358
359 if (iter->next_index == INDEX_NONE) {
360 return NS(SELF, iv_empty)();
361 }
362
363 INDEX index = {.index = iter->next_index};
364 IV_PAIR result = (IV_PAIR){
365 .index = index,
366 .value = NS(SELF, write)(iter->arena, index),
367 };
368
369 iter->next_index = PRIV(NS(SELF, next_index_value))(iter->arena, iter->next_index);
370 return result;
371}
372
374 INVARIANT_CHECK(self);
375
376 // Check if index 0 is present, otherwise find the next valid index
377 INDEX_TYPE first_index;
378 if (self->block_current_exclusive_end > 0 && (*self->blocks[0])[0].present) {
379 first_index = 0;
380 } else {
381 first_index = PRIV(NS(SELF, next_index_value))(self, 0);
382 }
383
384 return (ITER){
385 .arena = self,
386 .next_index = first_index,
387 .version = mutation_tracker_get(&self->iterator_invalidation_tracker),
388 };
389}
390
391DC_PUBLIC static void NS(SELF, delete)(SELF* self) {
392 INVARIANT_CHECK(self);
393 ITER iter = NS(SELF, get_iter)(self);
394
395 for (IV_PAIR entry = NS(ITER, next)(&iter); !NS(ITER, empty_item)(&entry);
396 entry = NS(ITER, next)(&iter)) {
397 VALUE_DELETE(entry.value);
398 }
399
400 for (INDEX_TYPE b = 0; b <= self->block_current; b++) {
401 NS(ALLOC, deallocate)(self->alloc_ref, self->blocks[b], sizeof(PRIV(NS(SELF, block))));
402 }
403 NS(ALLOC, deallocate)(self->alloc_ref, (void*)self->blocks,
404 self->block_current * sizeof(PRIV(NS(SELF, block))*));
405}
406
407#undef ITER_INVARIANT_CHECK
408#undef IV_PAIR
409#undef ITER
410
411#define ITER_CONST NS(SELF, iter_const)
412#define IV_PAIR_CONST NS(ITER_CONST, item)
413
414typedef struct {
415 SELF const* arena;
416 INDEX_TYPE next_index;
418} ITER_CONST;
419
420#define ITER_CONST_INVARIANT_CHECK(iter) \
421 DC_ASSUME(iter); \
422 DC_DEBUG_ASSERT( \
423 DC_WHEN((iter)->next_index != INDEX_NONE, \
424 NS(SELF, try_read)(iter->arena, (INDEX){.index = (iter)->next_index}) != NULL), \
425 "The next index is either valid, or the iterator is empty");
426
427typedef struct {
428 INDEX index;
429 VALUE const* value;
431
433 return (IV_PAIR_CONST){.index = (INDEX){.index = INDEX_NONE}, .value = NULL};
434}
435
437 return item->value == NULL;
438}
439
440DC_PUBLIC static bool NS(ITER_CONST, empty)(ITER_CONST const* iter) {
442 mutation_version_check(&iter->version);
443 return iter->next_index == INDEX_NONE;
444}
445
448 mutation_version_check(&iter->version);
449
450 if (iter->next_index == INDEX_NONE) {
451 return NS(SELF, iv_const_empty)();
452 }
453
454 INDEX index = {.index = iter->next_index};
455 IV_PAIR_CONST result = (IV_PAIR_CONST){
456 .index = index,
457 .value = NS(SELF, read)(iter->arena, index),
458 };
459
460 iter->next_index = PRIV(NS(SELF, next_index_value))(iter->arena, iter->next_index);
461 return result;
462}
463
465 INVARIANT_CHECK(self);
466
467 // Check if index 0 is present, otherwise find the next valid index
468 INDEX_TYPE first_index;
469 if (self->block_current_exclusive_end > 0 && (*self->blocks[0])[0].present) {
470 first_index = 0;
471 } else {
472 first_index = PRIV(NS(SELF, next_index_value))(self, 0);
473 }
474
475 return (ITER_CONST){
476 .arena = self,
477 .next_index = first_index,
478 .version = mutation_tracker_get(&self->iterator_invalidation_tracker),
479 };
480}
481
482DC_PUBLIC static void NS(SELF, debug)(SELF const* self, dc_debug_fmt fmt, FILE* stream) {
483 fprintf(stream, DC_EXPAND_STRING(SELF) "@%p {\n", (void*)self);
484 fmt = dc_debug_fmt_scope_begin(fmt);
485 dc_debug_fmt_print(fmt, stream, "count: %lu,\n", self->count);
486 dc_debug_fmt_print(fmt, stream, "free_list: %lu,\n", (size_t)self->free_list);
487
488 dc_debug_fmt_print(fmt, stream, "alloc: ");
489 NS(ALLOC, debug)(NS(NS(ALLOC, ref), deref)(self->alloc_ref), fmt, stream);
490 fprintf(stream, ",\n");
491
492 dc_debug_fmt_print(fmt, stream, "current_block: %lu\n", (size_t)self->block_current);
493 dc_debug_fmt_print(fmt, stream, "block_current_exclusive_end: %lu\n",
494 (size_t)self->block_current_exclusive_end);
495 dc_debug_fmt_print(fmt, stream, "blocks: [\n");
496 fmt = dc_debug_fmt_scope_begin(fmt);
497
498 for (INDEX_TYPE b = 0; b <= self->block_current; b++) {
499
500 dc_debug_fmt_print(fmt, stream, "block[%lu]: @%p [\n", (size_t)b, (void*)self->blocks[b]);
501 fmt = dc_debug_fmt_scope_begin(fmt);
502
503 INDEX_TYPE block_entry_exclusive_end = b == self->block_current
504 ? self->block_current_exclusive_end
506
507 for (INDEX_TYPE i = 0; i < block_entry_exclusive_end; i++) {
508 /* Previously used self->blocks[b][i] which computes wrong address.
509 Use the dereference-then-index form to get the SLOT. */
510 SLOT* entry = &(*self->blocks[b])[i];
511
512 if (entry->present) {
514 fmt, stream, "[index=%lu] ",
516 VALUE_DEBUG(&entry->value, fmt, stream);
517 fprintf(stream, ",\n");
518 } else {
520 fmt, stream, "[index=%lu]{ next_free=%lu }\n",
522 (size_t)entry->next_free);
523 }
524 }
525
526 fmt = dc_debug_fmt_scope_end(fmt);
527 dc_debug_fmt_print(fmt, stream, "],\n");
528 }
529
530 fmt = dc_debug_fmt_scope_end(fmt);
531 dc_debug_fmt_print(fmt, stream, "],\n");
532
533 fmt = dc_debug_fmt_scope_end(fmt);
534 dc_debug_fmt_print(fmt, stream, "}");
535}
536
537#undef ITER_CONST_INVARIANT_CHECK
538#undef IV_PAIR_CONST
539#undef ITER_CONST
540
541#undef INVARIANT_CHECK
542#undef SLOT
543
546
547#undef VALUE_DEBUG
548#undef VALUE_CLONE
549#undef VALUE_DELETE
550#undef VALUE
551
552#undef BLOCK_INDEX_BITS
553#undef INDEX_BITS
554
556
static DC_PUBLIC void deallocate(SELF *self, void *ptr, size_t size)
Definition template.h:127
static DC_PUBLIC void debug(SELF const *self, dc_debug_fmt fmt, FILE *stream)
Definition template.h:212
static DC_PUBLIC void * allocate_uninit(SELF *self, size_t size)
Definition template.h:92
static DC_PUBLIC void * reallocate(SELF *self, void *ptr, size_t old_size, size_t new_size)
Definition template.h:137
#define ALLOC
Definition template.h:31
#define VALUE
Definition template.h:35
#define DC_ARENA_CHUNKED_BLOCK_OFFSET_TO_INDEX(BLOCK, OFFSET, BLOCK_INDEX_BITS)
Definition utils.h:9
#define DC_ARENA_CHUNKED_INDEX_TO_OFFSET(INDEX, BLOCK_INDEX_BITS)
Definition utils.h:6
#define DC_ARENA_CHUNKED_INDEX_TO_BLOCK(INDEX, BLOCK_INDEX_BITS)
Definition utils.h:4
#define DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS)
Definition utils.h:12
static DC_PUBLIC VALUE const * try_read(SELF const *self, INDEX index)
Definition template.h:180
static DC_PUBLIC VALUE * try_write(SELF *self, INDEX index)
Definition template.h:205
static DC_PUBLIC IV_PAIR iv_empty()
Definition template.h:343
static DC_PUBLIC INDEX_TYPE PRIV next_index_value(SELF const *self, INDEX_TYPE from_index)
Definition template.h:302
static DC_PUBLIC const size_t max_entries
Definition template.h:262
#define ITER_CONST_INVARIANT_CHECK(iter)
Definition template.h:420
#define INDEX_BITS
Definition template.h:13
#define BLOCK_INDEX_BITS
Definition template.h:20
#define VALUE_DEBUG
Definition template.h:41
#define IV_PAIR
Definition template.h:323
#define SLOT
Definition template.h:65
#define INVARIANT_CHECK(self)
Definition template.h:90
static DC_PUBLIC IV_PAIR next(ITER *iter)
Definition template.h:355
static DC_PUBLIC INDEX insert(SELF *self, VALUE value)
Definition template.h:126
static DC_PUBLIC bool empty(ITER const *iter)
Definition template.h:349
static DC_PUBLIC bool full(SELF const *self)
Definition template.h:257
static DC_PUBLIC VALUE const * read(SELF const *self, INDEX index)
Definition template.h:199
static DC_PUBLIC bool try_remove(SELF *self, INDEX index, VALUE *destination)
Definition template.h:264
#define IV_PAIR_CONST
Definition template.h:412
ALLOC alloc_t
Definition template.h:63
static DC_PUBLIC VALUE * write(SELF *self, INDEX index)
Definition template.h:209
static DC_PUBLIC bool empty_item(IV_PAIR const *item)
Definition template.h:347
static DC_PUBLIC VALUE remove(SELF *self, INDEX index)
Definition template.h:292
#define ITER
Definition template.h:322
#define VALUE_CLONE
Definition template.h:39
#define ITER_INVARIANT_CHECK(iter)
Definition template.h:331
static DC_PUBLIC ITER get_iter(SELF *self)
Definition template.h:373
#define VALUE_DELETE
Definition template.h:37
#define ITER_CONST
Definition template.h:411
static DC_PUBLIC IV_PAIR_CONST iv_const_empty()
Definition template.h:432
static DC_PUBLIC ITER_CONST get_iter_const(SELF const *self)
Definition template.h:464
static DC_PUBLIC size_t size(SELF const *self)
Definition template.h:252
static DC_PUBLIC SELF clone(SELF const *self)
Definition template.h:215
IV_PAIR item
Definition template.h:281
#define VALUE
Definition template.h:51
#define DC_TRAIT_ARENA(SELF)
Definition trait.h:5
#define TEMPLATE_ERROR(...)
With the user provided name, even in nested templates.
Definition def.h:56
#define SELF
Definition def.h:52
#define DC_DEBUG(DEBUG_FN, DEBUG_PTR)
Definition dump.h:92
static DC_PUBLIC void dc_debug_fmt_print(dc_debug_fmt fmt, FILE *stream, const char *format,...)
Definition fmt.h:32
static DC_PUBLIC dc_debug_fmt dc_debug_fmt_scope_end(dc_debug_fmt fmt)
Definition fmt.h:57
static DC_PUBLIC dc_debug_fmt dc_debug_fmt_scope_begin(dc_debug_fmt fmt)
Definition fmt.h:50
static DC_PUBLIC dc_gdb_marker dc_gdb_marker_new()
Definition gdb_marker.h:8
static DC_PUBLIC void mutation_tracker_mutate(mutation_tracker *self)
static DC_PUBLIC void mutation_version_check(mutation_version const *self)
static DC_PUBLIC mutation_tracker mutation_tracker_new()
static DC_PUBLIC mutation_version mutation_tracker_get(mutation_tracker const *self)
#define DC_PUBLIC
Definition namespace.h:25
#define NS(pre, post)
Definition namespace.h:14
#define DC_EXPAND_STRING(NAME)
Definition namespace.h:6
#define PRIV(name)
Definition namespace.h:20
#define DC_ASSERT(expr,...)
Definition panic.h:37
#define DC_STATIC_ASSERT
Definition panic.h:22
#define DC_ASSUME(expr,...)
Definition panic.h:57
INDEX_TYPE next_index
Definition template.h:416
mutation_version version
Definition template.h:417
SELF const * arena
Definition template.h:415
INDEX_TYPE next_index
Definition template.h:327
mutation_version version
Definition template.h:328
SELF * arena
Definition template.h:326
VALUE const * value
Definition template.h:429
VALUE * value
Definition template.h:340
INDEX index
Definition template.h:339
An allocator that prints to stdout when it allocates or frees memory.
Definition template.h:45
PRIV(block) **blocks
INDEX_TYPE block_current
Definition template.h:82
mutation_tracker iterator_invalidation_tracker
Definition template.h:87
INDEX_TYPE block_current_exclusive_end
Definition template.h:83
BLOCK_VECTOR blocks
Definition template.h:46
INDEX_TYPE free_list
Definition template.h:79
dc_gdb_marker derive_c_arena_basic
Definition template.h:86
size_t count
Definition template.h:78
ref alloc_ref
Definition template.h:49
VALUE value
Definition template.h:78
int x
Definition template.h:35
Debug format helpers for debug printin data structures.
Definition fmt.h:11
tracks a specific version of a value, so that this can be compared later to check modification For ex...
static DC_INTERNAL void clone_from(SELF const *from_slot, SELF *to_slot)
Definition template.h:94
static DC_INTERNAL void set_empty(SELF *slot, SLOT_INDEX_TYPE next_free)
Definition template.h:73
static DC_INTERNAL void fill(SELF *slot, SLOT_VALUE value)
Definition template.h:88
static DC_INTERNAL void memory_tracker_empty(SELF const *slot)
Definition template.h:64
static DC_PUBLIC FILE * stream(SELF *self)
Definition template.h:108