Derive-C
Loading...
Searching...
No Matches
template.h
Go to the documentation of this file.
1
2
3#include <assert.h>
4#include <stdbool.h>
5#include <stddef.h>
6#include <stdint.h>
7#include <stdlib.h>
8#include <string.h>
9
15
18
19#if !defined INDEX_BITS
20 #if !defined PLACEHOLDERS
21 #error "The number of bits (8,16,32,64) to use for the arena's key"
22 #endif
23 #define INDEX_BITS 32
24#endif
25
26#if !defined VALUE
27 #if !defined PLACEHOLDERS
28 #error "The value type to place in the arena must be defined"
29 #endif
30typedef struct {
31 int x;
32} value_t;
33 #define VALUE value_t
34static void value_delete(value_t* self) { (void)self; }
35 #define VALUE_DELETE value_delete
36static value_t value_clone(value_t const* self) { return *self; }
37 #define VALUE_CLONE value_clone
38#endif
39
40STATIC_ASSERT(sizeof(VALUE), "VALUE must be a non-zero sized type");
41
42#if !defined VALUE_DELETE
43 #define VALUE_DELETE(value) (void)value
44#endif
45
46#if !defined VALUE_CLONE
47 #define VALUE_CLONE(value) (*(value))
48#endif
49
50#if INDEX_BITS == 8
51 #define INDEX_TYPE uint8_t
52 #define CAPACITY_EXCLUSIVE_UPPER (UINT8_MAX + 1ULL)
53 #define MAX_INDEX (UINT8_MAX - 1ULL)
54 #define INDEX_NONE UINT8_MAX
55#elif INDEX_BITS == 16
56 #define INDEX_TYPE uint16_t
57 #define CAPACITY_EXCLUSIVE_UPPER (UINT16_MAX + 1ULL)
58 #define MAX_INDEX (UINT16_MAX - 1ULL)
59 #define INDEX_NONE UINT16_MAX
60#elif INDEX_BITS == 32
61 #define INDEX_TYPE uint32_t
62 #define CAPACITY_EXCLUSIVE_UPPER (UINT32_MAX + 1ULL)
63 #define MAX_INDEX (UINT32_MAX - 1ULL)
64 #define INDEX_NONE UINT32_MAX
65#elif INDEX_BITS == 64
66 #define INDEX_TYPE uint64_t
67 // JUSTIFY: Special case, we cannot store the max capacity as a size_t integer
68 #define CAPACITY_EXCLUSIVE_UPPER UINT64_MAX
69 #define MAX_INDEX (UINT64_MAX - 1ULL)
70 #define INDEX_NONE UINT64_MAX
71#endif
72
73#define SLOT NS(SELF, slot)
74
75#define CHECK_ACCESS_INDEX(self, index) ((index).index < (self)->exclusive_end)
76
77// JUSTIFY: Macro rather than static
78// - Avoids the need to cast to the INDEX_TYPE
79#define RESIZE_FACTOR 2
80
81// INVARIANT: < CAPACITY_EXCLUSIVE_UPPER
82#define INDEX NS(SELF, index_t)
83
84typedef VALUE NS(SELF, value_t);
85typedef ALLOC NS(SELF, alloc_t);
86
87typedef struct {
89} INDEX;
90
91typedef struct {
92 union {
95 };
96
97 // JUSTIFY: Present flag last
98 // - Reduces size, C ABI orders fields, placing it before a larger
99 // (aligned) value would add (alignment - 1 byte) of unecessary
100 // padding
102} SLOT;
103
112
121
122typedef struct {
124 size_t capacity;
126
127 // JUSTIFY: Separately recording the first_uninit_entry
128 // - Allows us to not use calloc for the buffer (for < first_uninit_entry
129 // we can check represent, for >= first_uninit_entry we know none are
130 // present so having uninitialised entries is fine)
132
133 // INVARIANT: If free_list == EMPTY_INDEX, then all values from [0, count)
134 // are present
135 size_t count;
136
137 ALLOC* alloc;
140} SELF;
141
142#define INVARIANT_CHECK(self) \
143 ASSUME(self); \
144 ASSUME((self)->count <= (self)->capacity); \
145 ASSUME((self)->exclusive_end >= (self)->count); \
146 ASSUME((self)->count <= MAX_INDEX);
147
149 ASSERT(items > 0);
150 size_t capacity = next_power_of_2(items);
151 ASSERT(capacity <= CAPACITY_EXCLUSIVE_UPPER);
152 SLOT* slots = (SLOT*)NS(ALLOC, calloc)(alloc, capacity, sizeof(SLOT));
153 ASSERT(slots);
154
155 for (INDEX_TYPE index = 0; index < capacity; index++) {
156 NS(SLOT, memory_tracker_empty)(&slots[index]);
157 }
158
159 return (SELF){
160 .slots = slots,
161 .capacity = (INDEX_TYPE)capacity,
162 .free_list = INDEX_NONE,
163 .exclusive_end = 0,
164 .count = 0,
165 .alloc = alloc,
166 .derive_c_arena_basic = gdb_marker_new(),
167 .iterator_invalidation_tracker = mutation_tracker_new(),
168 };
169}
170
171static INDEX NS(SELF, insert)(SELF* self, VALUE value) {
172 INVARIANT_CHECK(self);
173 ASSERT(self->count < MAX_INDEX);
174
175 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
176 if (self->free_list != INDEX_NONE) {
177 INDEX_TYPE free_index = self->free_list;
178 SLOT* slot = &self->slots[free_index];
179 ASSUME(!slot->present);
180 self->free_list = slot->next_free;
181 slot->present = true;
183 slot->value = value;
184 self->count++;
185 return (INDEX){.index = free_index};
186 }
187
188 if (self->exclusive_end == self->capacity) {
189 ASSERT(self->capacity <= (CAPACITY_EXCLUSIVE_UPPER / RESIZE_FACTOR));
190 self->capacity *= RESIZE_FACTOR;
191 SLOT* new_alloc = (SLOT*)realloc(self->slots, self->capacity * sizeof(SLOT));
192 ASSERT(new_alloc);
193 self->slots = new_alloc;
194
195 for (size_t index = self->exclusive_end; index < self->capacity; index++) {
196 NS(SLOT, memory_tracker_empty)(&self->slots[index]);
197 }
198 }
199
200 INDEX_TYPE new_index = self->exclusive_end;
201 SLOT* slot = &self->slots[new_index];
202 slot->present = true;
204 slot->value = value;
205 self->count++;
206 self->exclusive_end++;
207 return (INDEX){.index = new_index};
208}
209
210static VALUE* NS(SELF, try_write)(SELF* self, INDEX index) {
211 INVARIANT_CHECK(self);
212 if (!CHECK_ACCESS_INDEX(self, index)) {
213 return NULL;
214 }
215 SLOT* slot = &self->slots[index.index];
216 if (!slot->present) {
217 return NULL;
218 }
219 return &slot->value;
220}
221
222static VALUE* NS(SELF, write)(SELF* self, INDEX index) {
223 VALUE* value = NS(SELF, try_write)(self, index);
224 ASSERT(value);
225 return value;
226}
227
228static VALUE const* NS(SELF, try_read)(SELF const* self, INDEX index) {
229 INVARIANT_CHECK(self);
230 if (!CHECK_ACCESS_INDEX(self, index)) {
231 return NULL;
232 }
233 SLOT* slot = &self->slots[index.index];
234 if (!slot->present) {
235 return NULL;
236 }
237 return &slot->value;
238}
239
240static VALUE const* NS(SELF, read)(SELF const* self, INDEX index) {
241 VALUE const* value = NS(SELF, try_read)(self, index);
242 ASSERT(value);
243 return value;
244}
245
246static SELF NS(SELF, clone)(SELF const* self) {
247 INVARIANT_CHECK(self);
248 SLOT* slots = (SLOT*)NS(ALLOC, calloc)(self->alloc, self->capacity, sizeof(SLOT));
249 ASSERT(slots);
250
251 for (INDEX_TYPE index = 0; index < self->exclusive_end; index++) {
252 if (self->slots[index].present) {
253 NS(SLOT, memory_tracker_present)(&slots[index]);
254 slots[index].present = true;
255 slots[index].value = VALUE_CLONE(&self->slots[index].value);
256 } else {
257 NS(SLOT, memory_tracker_empty)(&slots[index]);
258 slots[index].present = false;
259 slots[index].next_free = self->slots[index].next_free;
260 }
261 }
262
263 return (SELF){
264 .slots = slots,
265 .capacity = self->capacity,
266 .free_list = self->free_list,
267 .exclusive_end = self->exclusive_end,
268 .count = self->count,
269 .alloc = self->alloc,
270 .derive_c_arena_basic = gdb_marker_new(),
271 .iterator_invalidation_tracker = mutation_tracker_new(),
272 };
273}
274
275static INDEX_TYPE NS(SELF, size)(SELF const* self) {
276 INVARIANT_CHECK(self);
277 return self->count;
278}
279
280static bool NS(SELF, full)(SELF const* self) {
281 INVARIANT_CHECK(self);
282 if (self->capacity == CAPACITY_EXCLUSIVE_UPPER) {
283 if (self->free_list == INDEX_NONE) {
284 return true;
285 }
286 }
287 return false;
288}
289
290static size_t NS(SELF, max_entries) = MAX_INDEX;
291
292static bool NS(SELF, try_remove)(SELF* self, INDEX index, VALUE* destination) {
293 INVARIANT_CHECK(self);
294 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
295
296 if (!CHECK_ACCESS_INDEX(self, index)) {
297 return false;
298 }
299
300 SLOT* entry = &self->slots[index.index];
301 if (entry->present) {
302 *destination = entry->value;
303 entry->present = false;
305 entry->next_free = self->free_list;
306 self->free_list = index.index;
307 self->count--;
308 return true;
309 }
310 return false;
311}
312
313static VALUE NS(SELF, remove)(SELF* self, INDEX index) {
314 INVARIANT_CHECK(self);
315 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
316
317 VALUE value;
318 ASSERT(NS(SELF, try_remove)(self, index, &value));
319 return value;
320}
321
322static bool NS(SELF, delete_entry)(SELF* self, INDEX index) {
323 INVARIANT_CHECK(self);
324 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
325
326 if (!CHECK_ACCESS_INDEX(self, index)) {
327 return false;
328 }
329
330 SLOT* entry = &self->slots[index.index];
331 if (entry->present) {
332 VALUE_DELETE(&entry->value);
333 entry->present = false;
335 entry->next_free = self->free_list;
336 self->free_list = index.index;
337 self->count--;
338 return true;
339 }
340 return false;
341}
342
343#define IV_PAIR NS(SELF, iv)
344typedef struct {
347} IV_PAIR;
348
349#define ITER NS(SELF, iter)
350typedef IV_PAIR const* NS(ITER, item);
351
352static bool NS(ITER, empty_item)(IV_PAIR const* const* item) { return *item == NULL; }
353
354typedef struct {
355 SELF* arena;
356 INDEX_TYPE next_index;
357 IV_PAIR curr;
358 mutation_version version;
359} ITER;
360
361static bool NS(ITER, empty)(ITER const* iter) {
362 ASSUME(iter);
363 mutation_version_check(&iter->version);
364 // JUSTIFY: If no entries are left, then the previous '.._next' call moved
365 // the index to the exclusive end
366 // NOTE: `index + 1 > exclusive_end` implies `index >= exclusive_end`
367 return iter->next_index == INDEX_NONE || iter->next_index >= iter->arena->exclusive_end;
368}
369
370static IV_PAIR const* NS(ITER, next)(ITER* iter) {
371 ASSUME(iter);
372 mutation_version_check(&iter->version);
373
374 if (NS(ITER, empty)(iter)) {
375 return NULL;
376 }
377 iter->curr = (IV_PAIR){.index = (INDEX){.index = iter->next_index},
378 .value = &iter->arena->slots[iter->next_index].value};
379 iter->next_index++;
380 while (iter->next_index < INDEX_NONE && iter->next_index < iter->arena->exclusive_end &&
381 !iter->arena->slots[iter->next_index].present) {
382 iter->next_index++;
383 }
384
385 return &iter->curr;
386}
387
388static ITER NS(SELF, get_iter)(SELF* self) {
389 INVARIANT_CHECK(self);
390 INDEX_TYPE index = 0;
391 while (index < INDEX_NONE && index < self->exclusive_end && !self->slots[index].present) {
392 index++;
393 }
394
395 return (ITER){
396 .arena = self,
397 .next_index = index,
398 .curr = (IV_PAIR){.index = (INDEX){.index = INDEX_NONE}, .value = NULL},
399 .version = mutation_tracker_get(&self->iterator_invalidation_tracker),
400 };
401}
402
403static void NS(SELF, delete)(SELF* self) {
404 INVARIANT_CHECK(self);
405 ITER iter = NS(SELF, get_iter)(self);
406 IV_PAIR const* entry;
407 while ((entry = NS(ITER, next)(&iter))) {
408 VALUE_DELETE(entry->value);
409 }
410
411 NS(ALLOC, free)(self->alloc, self->slots);
412}
413
414#undef ITER
415#undef IV_PAIR
416
417#define IV_PAIR_CONST NS(SELF, iv_const)
418typedef struct {
420 VALUE const* value;
422
424 .index = {.index = INDEX_NONE},
425 .value = NULL,
426};
427
428#define ITER_CONST NS(SELF, iter_const)
429typedef IV_PAIR_CONST const* NS(ITER_CONST, item);
430
431static bool NS(ITER_CONST, empty_item)(IV_PAIR_CONST const* const* item) { return *item == NULL; }
432
433typedef struct {
434 SELF const* arena;
435 INDEX_TYPE next_index;
436 IV_PAIR_CONST curr;
437 mutation_version version;
438} ITER_CONST;
439
440static bool NS(ITER_CONST, empty)(ITER_CONST const* iter) {
441 ASSUME(iter);
442 mutation_version_check(&iter->version);
443 return iter->next_index == INDEX_NONE || iter->next_index >= iter->arena->exclusive_end;
444}
445
446static IV_PAIR_CONST const* NS(ITER_CONST, next)(ITER_CONST* iter) {
447 ASSUME(iter);
448 mutation_version_check(&iter->version);
449
450 if (NS(ITER_CONST, empty)(iter)) {
451 return NULL;
452 }
453
454 iter->curr = (IV_PAIR_CONST){.index = (INDEX){.index = iter->next_index},
455 .value = &iter->arena->slots[iter->next_index].value};
456 iter->next_index++;
457 while (iter->next_index != INDEX_NONE && iter->next_index < iter->arena->exclusive_end &&
458 !iter->arena->slots[iter->next_index].present) {
459 iter->next_index++;
460 }
461
462 return &iter->curr;
463}
464
465static ITER_CONST NS(SELF, get_iter_const)(SELF const* self) {
466 INVARIANT_CHECK(self);
467 INDEX_TYPE index = 0;
468 while (index < INDEX_NONE && index < self->exclusive_end && !self->slots[index].present) {
469 index++;
470 }
471
472 return (ITER_CONST){
473 .arena = self,
474 .next_index = index,
475 .curr = (IV_PAIR_CONST){.index = (INDEX){.index = INDEX_NONE}, .value = NULL},
476 .version = mutation_tracker_get(&self->iterator_invalidation_tracker),
477 };
478}
479
480#undef ITER_CONST
481#undef IV_PAIR_CONST
482
483#undef ALLOC
484#undef INDEX_BITS
485#undef VALUE
486#undef VALUE_DELETE
487#undef VALUE_CLONE
488
489#undef INDEX_TYPE
490#undef CAPACITY_EXCLUSIVE_UPPER
491#undef MAX_INDEX
492#undef INDEX_NONE
493
494#undef SLOT
495#undef CHECK_ACCESS_INDEX
496#undef RESIZE_FACTOR
497#undef INDEX
498
499#undef INVARIANT_CHECK
500
503
static void free(SELF *self, void *ptr)
Definition template.h:58
static void * realloc(SELF *self, void *ptr, size_t size)
Definition template.h:47
static void * calloc(SELF *self, size_t count, size_t size)
Definition template.h:36
#define ALLOC
Definition template.h:59
static INDEX insert(SELF *self, VALUE value)
Definition template.h:171
static bool empty_item(IV_PAIR const *const *item)
Definition template.h:352
static bool full(SELF const *self)
Definition template.h:280
static ITER_CONST get_iter_const(SELF const *self)
Definition template.h:465
IV_PAIR const * item
Definition template.h:350
static bool empty(ITER const *iter)
Definition template.h:361
static ITER get_iter(SELF *self)
Definition template.h:388
static SELF new_with_capacity_for(INDEX_TYPE items, ALLOC *alloc)
Definition template.h:148
#define IV_PAIR
Definition template.h:343
static INDEX_TYPE size(SELF const *self)
Definition template.h:275
static IV_PAIR const * next(ITER *iter)
Definition template.h:370
#define SLOT
Definition template.h:73
#define INVARIANT_CHECK(self)
Definition template.h:142
static VALUE remove(SELF *self, INDEX index)
Definition template.h:313
#define CHECK_ACCESS_INDEX(self, index)
Definition template.h:75
static VALUE * try_write(SELF *self, INDEX index)
Definition template.h:210
static IV_PAIR_CONST iv_const_empty
Definition template.h:423
#define IV_PAIR_CONST
Definition template.h:417
ALLOC alloc_t
Definition template.h:85
static bool delete_entry(SELF *self, INDEX index)
Definition template.h:322
static VALUE const * read(SELF const *self, INDEX index)
Definition template.h:240
static void memory_tracker_present(SLOT const *slot)
Definition template.h:113
static VALUE * write(SELF *self, INDEX index)
Definition template.h:222
#define VALUE
Definition template.h:33
static size_t max_entries
Definition template.h:290
static value_t value_clone(value_t const *self)
Definition template.h:36
#define RESIZE_FACTOR
Definition template.h:79
static void value_delete(value_t *self)
Definition template.h:34
#define VALUE_CLONE
Definition template.h:37
#define VALUE_DELETE
Definition template.h:35
static bool try_remove(SELF *self, INDEX index, VALUE *destination)
Definition template.h:292
#define INDEX
Definition template.h:82
static SELF clone(SELF const *self)
Definition template.h:246
static void memory_tracker_empty(SLOT const *slot)
Definition template.h:104
static VALUE const * try_read(SELF const *self, INDEX index)
Definition template.h:228
#define TRAIT_ARENA(SELF)
Definition trait.h:7
#define INDEX_TYPE
Definition template.h:46
static gdb_marker gdb_marker_new()
Definition gdb_marker.h:7
#define ASSERT(expr,...)
Definition macros.h:42
#define STATIC_ASSERT
Definition macros.h:27
#define ASSUME(expr,...)
Definition macros.h:62
static INLINE CONST size_t next_power_of_2(size_t x)
Definition math.h:8
@ MEMORY_TRACKER_LVL_CONTAINER
static void memory_tracker_set(memory_tracker_level level, memory_tracker_capability cap, const volatile void *addr, size_t size)
@ MEMORY_TRACKER_CAP_NONE
@ MEMORY_TRACKER_CAP_WRITE
@ MEMORY_TRACKER_CAP_READ_WRITE
static mutation_tracker mutation_tracker_new()
static void mutation_version_check(mutation_version const *self)
static mutation_version mutation_tracker_get(mutation_tracker const *self)
static void mutation_tracker_mutate(mutation_tracker *self)
#define NS(pre, post)
Definition namespace.h:4
#define SELF
Definition def.h:52
INDEX_TYPE index
Definition template.h:88
VALUE const * value
Definition template.h:420
VALUE * value
Definition template.h:346
INDEX index
Definition template.h:345
size_t capacity
Definition template.h:124
mutation_tracker iterator_invalidation_tracker
Definition template.h:139
INDEX_TYPE free_list
Definition template.h:125
size_t exclusive_end
Definition template.h:131
SLOT * slots
Definition template.h:123
size_t count
Definition template.h:135
gdb_marker derive_c_arena_basic
Definition template.h:138
ALLOC * alloc
Definition template.h:66
VALUE value
Definition template.h:93
INDEX_TYPE next_free
Definition template.h:94
bool present
Definition template.h:101
tracks a specific version of a value, so that this can be compared later to check modification For ex...
int x
Definition template.h:31