Derive-C
Loading...
Searching...
No Matches
template.h
Go to the documentation of this file.
2#if !defined(SKIP_INCLUDES)
3 #include "includes.h"
4#endif
5
8
9#if !defined INDEX_BITS
10 #if !defined PLACEHOLDERS
11TEMPLATE_ERROR("The number of bits (8,16,32,64) to use for the arena's key")
12 #endif
13 #define INDEX_BITS 32
14#endif
15
16#if !defined BLOCK_INDEX_BITS
17 #if !defined PLACEHOLDERS
18TEMPLATE_ERROR("The number of bits used to get the offset within a block must be specified")
19 #endif
20 #define BLOCK_INDEX_BITS 8
21#endif
22
24 "The number of bits for offset within a block must be "
25 "less than the number of bits used for an index");
26
27#if !defined VALUE
28 #if !defined PLACEHOLDERS
29TEMPLATE_ERROR("The value type to place in the arena must be defined")
30 #endif
31 #define VALUE value_t
32typedef struct {
33 int x;
34} VALUE;
35 #define VALUE_DELETE value_delete
36static void VALUE_DELETE(value_t* self);
37 #define VALUE_CLONE value_clone
38static value_t VALUE_CLONE(value_t const* self);
39 #define VALUE_DEBUG value_debug
40static void VALUE_DEBUG(VALUE const*, dc_debug_fmt, FILE* stream);
41#endif
42
43DC_STATIC_ASSERT(sizeof(VALUE), "VALUE must be a non-zero sized type");
44
45#if !defined VALUE_DELETE
46 #define VALUE_DELETE DC_NO_DELETE
47#endif
48
49#if !defined VALUE_CLONE
50 #define VALUE_CLONE DC_COPY_CLONE
51#endif
52
53#if !defined VALUE_DEBUG
54 #define VALUE_DEBUG DC_DEFAULT_DEBUG
55#endif
56
59
60typedef VALUE NS(SELF, value_t);
61typedef ALLOC NS(SELF, alloc_t);
62
63#define SLOT NS(NAME, slot)
64
65#define SLOT_INDEX_TYPE INDEX_TYPE // [DERIVE-C] for template
66#define SLOT_VALUE VALUE // [DERIVE-C] for template
67#define SLOT_VALUE_CLONE VALUE_CLONE // [DERIVE-C] for template
68#define SLOT_VALUE_CLONE VALUE_CLONE // [DERIVE-C] for template
69#define SLOT_VALUE_DELETE VALUE_DELETE // [DERIVE-C] for template
70#define INTERNAL_NAME SLOT // [DERIVE-C] for template
72
74
75typedef struct {
76 size_t count;
78
79 PRIV(NS(SELF, block)) * *blocks;
82
83 ALLOC* alloc;
84 dc_gdb_marker derive_c_arena_basic;
86} SELF;
87
88#define INVARIANT_CHECK(self) \
89 DC_ASSUME(self); \
90 DC_ASSUME(((self))->count <= MAX_INDEX); \
91 DC_ASSUME(((self)->block_current_exclusive_end) <= \
92 DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS)); \
93 DC_ASSUME(DC_WHEN((self)->free_list == INDEX_NONE, \
94 (self)->count == \
95 (DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS) * (self)->block_current + \
96 (self)->block_current_exclusive_end)), \
97 "All slots are full if the free list is empty");
98
99static SELF NS(SELF, new)(ALLOC* alloc) {
100 PRIV(NS(SELF, block))* first_block =
101 (PRIV(NS(SELF, block))*)NS(ALLOC, malloc)(alloc, sizeof(PRIV(NS(SELF, block))));
102 DC_ASSERT(first_block);
103
104 PRIV(NS(SELF, block))** blocks =
105 (PRIV(NS(SELF, block))**)NS(ALLOC, malloc)(alloc, sizeof(PRIV(NS(SELF, block))*));
106 DC_ASSERT(blocks);
107
108 blocks[0] = first_block;
109
110 for (INDEX_TYPE offset = 0; offset < DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS); offset++) {
111 /* Properly index the slots within the allocated block */
112 NS(SLOT, memory_tracker_empty)(&(*first_block)[offset]);
113 }
114
115 return (SELF){
116 .count = 0,
117 .free_list = INDEX_NONE,
118 .blocks = blocks,
119 .block_current = 0,
120 .block_current_exclusive_end = 0,
121 .alloc = alloc,
122 .derive_c_arena_basic = dc_gdb_marker_new(),
123 .iterator_invalidation_tracker = mutation_tracker_new(),
124 };
125}
126
127static INDEX NS(SELF, insert)(SELF* self, VALUE value) {
128 INVARIANT_CHECK(self);
129 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
130 DC_ASSERT(self->count < MAX_INDEX);
131
132 if (self->free_list != INDEX_NONE) {
133 INDEX_TYPE free_index = self->free_list;
136
137 SLOT* slot = &(*self->blocks[block])[offset];
138
139 DC_ASSUME(!slot->present);
140 self->free_list = slot->next_free;
141 slot->present = true;
143 slot->value = value;
144 self->count++;
145 return (INDEX){.index = free_index};
146 }
147
148 if (self->block_current_exclusive_end == DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS)) {
149 self->block_current++;
150 self->block_current_exclusive_end = 0;
151
152 self->blocks = (PRIV(NS(SELF, block))**)NS(ALLOC, realloc)(
153 self->alloc, (void*)self->blocks,
154 (self->block_current + 1) * sizeof(PRIV(NS(SELF, block))*));
155 DC_ASSERT(self->blocks);
156
157 PRIV(NS(SELF, block))* new_block =
158 (PRIV(NS(SELF, block))*)NS(ALLOC, malloc)(self->alloc, sizeof(PRIV(NS(SELF, block))));
159
160 self->blocks[self->block_current] = new_block;
161
162 for (size_t offset = 0; offset < DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS); offset++) {
163 NS(SLOT, memory_tracker_empty)(&(*new_block)[offset]);
164 }
165 }
166
167 SLOT* slot = &(*self->blocks[self->block_current])[self->block_current_exclusive_end];
168 slot->present = true;
170 slot->value = value;
171
173 self->block_current, self->block_current_exclusive_end, BLOCK_INDEX_BITS);
174 self->count++;
175 self->block_current_exclusive_end++;
176
177 return (INDEX){.index = index};
178}
179
180static VALUE const* NS(SELF, try_read)(SELF const* self, INDEX index) {
181 INVARIANT_CHECK(self);
182
185
186 if (block > self->block_current ||
187 (block == self->block_current && offset >= self->block_current_exclusive_end)) {
188 return NULL;
189 }
190
191 SLOT* slot = &(*self->blocks[block])[offset];
192
193 if (!slot->present) {
194 return NULL;
195 }
196 return &slot->value;
197}
198
199static VALUE const* NS(SELF, read)(SELF const* self, INDEX index) {
200 VALUE const* value = NS(SELF, try_read)(self, index);
201 DC_ASSERT(value);
202 return value;
203}
204
205static VALUE* NS(SELF, try_write)(SELF* self, INDEX index) {
206 return (VALUE*)NS(SELF, try_read)(self, index);
207}
208
209static VALUE* NS(SELF, write)(SELF* self, INDEX index) {
210 VALUE* value = NS(SELF, try_write)(self, index);
211 DC_ASSERT(value);
212 return value;
213}
214
215static SELF NS(SELF, clone)(SELF const* self) {
216 INVARIANT_CHECK(self);
217
218 PRIV(NS(SELF, block))** blocks = (PRIV(NS(SELF, block))**)NS(ALLOC, malloc)(
219 self->alloc, sizeof(PRIV(NS(SELF, block))*) * (self->block_current + 1));
220
221 for (INDEX_TYPE b = 0; b <= self->block_current; b++) {
222 blocks[b] =
223 (PRIV(NS(SELF, block))*)NS(ALLOC, malloc)(self->alloc, sizeof(PRIV(NS(SELF, block))));
224 }
225
226 for (INDEX_TYPE b = 0; b < self->block_current; b++) {
227 PRIV(NS(SELF, block))* to_block = blocks[b];
228 PRIV(NS(SELF, block)) const* from_block = self->blocks[b];
230 NS(SLOT, clone_from)(&(*from_block)[i], &(*to_block)[i]);
231 }
232 }
233
234 PRIV(NS(SELF, block))* to_current_block = blocks[self->block_current];
235 PRIV(NS(SELF, block)) const* from_current_block = self->blocks[self->block_current];
236 for (INDEX_TYPE i = 0; i < self->block_current_exclusive_end; i++) {
237 NS(SLOT, clone_from)(&(*from_current_block)[i], &(*to_current_block)[i]);
238 }
239
240 return (SELF){
241 .count = self->count,
242 .free_list = self->free_list,
243 .blocks = blocks,
244 .block_current = self->block_current,
245 .block_current_exclusive_end = self->block_current_exclusive_end,
246 .alloc = self->alloc,
247 .derive_c_arena_basic = dc_gdb_marker_new(),
248 .iterator_invalidation_tracker = mutation_tracker_new(),
249 };
250}
251
252static INDEX_TYPE NS(SELF, size)(SELF const* self) {
253 INVARIANT_CHECK(self);
254 return self->count;
255}
256
257static bool NS(SELF, full)(SELF const* self) {
258 INVARIANT_CHECK(self);
259 return self->count < MAX_INDEX;
260}
261
262static const size_t NS(SELF, max_entries) = MAX_INDEX;
263
264static bool NS(SELF, try_remove)(SELF* self, INDEX index, VALUE* destination) {
265 INVARIANT_CHECK(self);
266 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
267
270
271 /* Only treat offset vs block_current_exclusive_end for the last block */
272 if (block > self->block_current ||
273 (block == self->block_current && offset >= self->block_current_exclusive_end)) {
274 return false;
275 }
276
277 PRIV(NS(SELF, block))* current_block = self->blocks[block];
278 SLOT* entry = &(*current_block)[offset];
279
280 if (entry->present) {
281 *destination = entry->value;
282 entry->present = false;
284 entry->next_free = self->free_list;
285 self->free_list = index.index;
286 self->count--;
287 return true;
288 }
289 return false;
290}
291
292static VALUE NS(SELF, remove)(SELF* self, INDEX index) {
293 INVARIANT_CHECK(self);
294 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
295
296 VALUE value;
297 DC_ASSERT(NS(SELF, try_remove)(self, index, &value));
298 return value;
299}
300
301static INDEX_TYPE PRIV(NS(SELF, next_index_value))(SELF const* self, INDEX_TYPE from_index) {
302 for (INDEX_TYPE next_index = from_index + 1;; next_index++) {
305
306 if (block > self->block_current ||
307 (block == self->block_current && offset >= self->block_current_exclusive_end)) {
308 return INDEX_NONE;
309 }
310
311 /* Fix wrong indexing: use &(*self->blocks[block])[offset] rather than
312 self->blocks[block][offset] which indexes by block-sized strides. */
313 SLOT* slot = &(*self->blocks[block])[offset];
314 if (slot->present) {
315 return next_index;
316 }
317 }
318}
319
320#define ITER NS(SELF, iter)
321#define IV_PAIR NS(ITER, item)
322
328
329#define ITER_INVARIANT_CHECK(iter) \
330 DC_ASSUME(iter); \
331 DC_ASSUME( \
332 DC_WHEN((iter)->next_index != INDEX_NONE, \
333 NS(SELF, try_read)(iter->arena, (INDEX){.index = (iter)->next_index}) != NULL), \
334 "The next index is either valid, or the iterator is empty");
335
336typedef struct {
337 INDEX index;
339} IV_PAIR;
340
342 return (IV_PAIR){.index = (INDEX){.index = INDEX_NONE}, .value = NULL};
343}
344static bool NS(ITER, empty_item)(IV_PAIR const* item) { return item->value == NULL; }
345
346static bool NS(ITER, empty)(ITER const* iter) {
348 mutation_version_check(&iter->version);
349 return iter->next_index == INDEX_NONE;
350}
351
352static IV_PAIR NS(ITER, next)(ITER* iter) {
354 mutation_version_check(&iter->version);
355
356 if (iter->next_index == INDEX_NONE) {
357 return NS(SELF, iv_empty)();
358 }
359
360 INDEX index = {.index = iter->next_index};
361 IV_PAIR result = (IV_PAIR){
362 .index = index,
363 .value = NS(SELF, write)(iter->arena, index),
364 };
365
366 iter->next_index = PRIV(NS(SELF, next_index_value))(iter->arena, iter->next_index);
367 return result;
368}
369
370static ITER NS(SELF, get_iter)(SELF* self) {
371 INVARIANT_CHECK(self);
372 return (ITER){
373 .arena = self,
374 .next_index = PRIV(NS(SELF, next_index_value))(self, 0),
375 .version = mutation_tracker_get(&self->iterator_invalidation_tracker),
376 };
377}
378
379static void NS(SELF, delete)(SELF* self) {
380 INVARIANT_CHECK(self);
381 ITER iter = NS(SELF, get_iter)(self);
382
383 for (IV_PAIR entry = NS(ITER, next)(&iter); !NS(ITER, empty_item)(&entry);
384 entry = NS(ITER, next)(&iter)) {
385 VALUE_DELETE(entry.value);
386 }
387
388 for (INDEX_TYPE b = 0; b <= self->block_current; b++) {
389 NS(ALLOC, free)(self->alloc, self->blocks[b]);
390 }
391 NS(ALLOC, free)(self->alloc, (void*)self->blocks);
392}
393
394#undef ITER_INVARIANT_CHECK
395#undef IV_PAIR
396#undef ITER
397
398#define ITER_CONST NS(SELF, iter_const)
399#define IV_PAIR_CONST NS(ITER_CONST, item)
400
406
407#define ITER_CONST_INVARIANT_CHECK(iter) \
408 DC_ASSUME(iter); \
409 DC_ASSUME( \
410 DC_WHEN((iter)->next_index != INDEX_NONE, \
411 NS(SELF, try_read)(iter->arena, (INDEX){.index = (iter)->next_index}) != NULL), \
412 "The next index is either valid, or the iterator is empty");
413
414typedef struct {
415 INDEX index;
416 VALUE const* value;
418
420 return (IV_PAIR_CONST){.index = (INDEX){.index = INDEX_NONE}, .value = NULL};
421}
422
423static bool NS(ITER_CONST, empty_item)(IV_PAIR_CONST const* item) { return item->value == NULL; }
424
425static bool NS(ITER_CONST, empty)(ITER_CONST const* iter) {
427 mutation_version_check(&iter->version);
428 return iter->next_index == INDEX_NONE;
429}
430
433 mutation_version_check(&iter->version);
434
435 if (iter->next_index == INDEX_NONE) {
436 return NS(SELF, iv_const_empty)();
437 }
438
439 INDEX index = {.index = iter->next_index};
440 IV_PAIR_CONST result = (IV_PAIR_CONST){
441 .index = index,
442 .value = NS(SELF, read)(iter->arena, index),
443 };
444
445 iter->next_index = PRIV(NS(SELF, next_index_value))(iter->arena, iter->next_index);
446 return result;
447}
448
449static ITER_CONST NS(SELF, get_iter_const)(SELF const* self) {
450 INVARIANT_CHECK(self);
451 return (ITER_CONST){
452 .arena = self,
453 .next_index = PRIV(NS(SELF, next_index_value))(self, 0),
454 .version = mutation_tracker_get(&self->iterator_invalidation_tracker),
455 };
456}
457
458static void NS(SELF, debug)(SELF const* self, dc_debug_fmt fmt, FILE* stream) {
459 fprintf(stream, EXPAND_STRING(SELF) "@%p {\n", self);
460 fmt = dc_debug_fmt_scope_begin(fmt);
461 dc_debug_fmt_print(fmt, stream, "count: %lu,\n", self->count);
462 dc_debug_fmt_print(fmt, stream, "free_list: %lu,\n", (size_t)self->free_list);
463
464 dc_debug_fmt_print(fmt, stream, "alloc: ");
465 NS(ALLOC, debug)(self->alloc, fmt, stream);
466 fprintf(stream, ",\n");
467
468 dc_debug_fmt_print(fmt, stream, "current_block: %lu\n", (size_t)self->block_current);
469 dc_debug_fmt_print(fmt, stream, "block_current_exclusive_end: %lu\n",
470 (size_t)self->block_current_exclusive_end);
471 dc_debug_fmt_print(fmt, stream, "blocks: [");
472 fmt = dc_debug_fmt_scope_begin(fmt);
473
474 for (INDEX_TYPE b = 0; b <= self->block_current; b++) {
475
476 dc_debug_fmt_print(fmt, stream, "block[%lu]: @%p [", (size_t)b, self->blocks[b]);
477 fmt = dc_debug_fmt_scope_begin(fmt);
478
479 INDEX_TYPE block_entry_exclusive_end = b == self->block_current
480 ? self->block_current_exclusive_end
482
483 for (INDEX_TYPE i = 0; i < block_entry_exclusive_end; i++) {
484 /* Previously used self->blocks[b][i] which computes wrong address.
485 Use the dereference-then-index form to get the SLOT. */
486 SLOT* entry = &(*self->blocks[b])[i];
487
488 if (entry->present) {
490 fmt, stream, "[index=%lu]{\n",
492 fmt = dc_debug_fmt_scope_begin(fmt);
493 VALUE_DEBUG(&entry->value, fmt, stream);
494 fprintf(stream, ",\n");
495 fmt = dc_debug_fmt_scope_end(fmt);
496 dc_debug_fmt_print(fmt, stream, "},\n");
497 } else {
499 fmt, stream, "[index=%lu]{ next_free=%lu }\n",
501 (size_t)entry->next_free);
502 }
503 }
504
505 fmt = dc_debug_fmt_scope_end(fmt);
506 dc_debug_fmt_print(fmt, stream, "],\n");
507 }
508
509 fmt = dc_debug_fmt_scope_end(fmt);
510 dc_debug_fmt_print(fmt, stream, "],\n");
511
512 fmt = dc_debug_fmt_scope_end(fmt);
513 dc_debug_fmt_print(fmt, stream, "}");
514}
515
516#undef ITER_CONST_INVARIANT_CHECK
517#undef IV_PAIR_CONST
518#undef ITER_CONST
519
520#undef INVARIANT_CHECK
521#undef SLOT
522
525
526#undef VALUE_DEBUG
527#undef VALUE_CLONE
528#undef VALUE_DELETE
529#undef VALUE
530
531#undef BLOCK_INDEX_BITS
532#undef INDEX_BITS
533
535
static void debug(SELF const *self, dc_debug_fmt fmt, FILE *stream)
Definition template.h:62
static void free(SELF *self, void *ptr)
Definition template.h:56
static void * realloc(SELF *self, void *ptr, size_t size)
Definition template.h:45
static void * malloc(SELF *self, size_t size)
Definition template.h:23
#define ALLOC
Definition template.h:64
#define DC_ARENA_CHUNKED_BLOCK_OFFSET_TO_INDEX(BLOCK, OFFSET, BLOCK_INDEX_BITS)
Definition utils.h:9
#define DC_ARENA_CHUNKED_INDEX_TO_OFFSET(INDEX, BLOCK_INDEX_BITS)
Definition utils.h:6
#define DC_ARENA_CHUNKED_INDEX_TO_BLOCK(INDEX, BLOCK_INDEX_BITS)
Definition utils.h:4
#define DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS)
Definition utils.h:12
static INDEX insert(SELF *self, VALUE value)
Definition template.h:127
static bool full(SELF const *self)
Definition template.h:257
static ITER_CONST get_iter_const(SELF const *self)
Definition template.h:449
static bool empty(ITER const *iter)
Definition template.h:346
#define ITER_CONST_INVARIANT_CHECK(iter)
Definition template.h:407
#define INDEX_BITS
Definition template.h:13
#define BLOCK_INDEX_BITS
Definition template.h:20
static ITER get_iter(SELF *self)
Definition template.h:370
#define VALUE_DEBUG
Definition template.h:39
static IV_PAIR next(ITER *iter)
Definition template.h:352
#define IV_PAIR
Definition template.h:321
static INDEX_TYPE size(SELF const *self)
Definition template.h:252
static INDEX_TYPE PRIV next_index_value(SELF const *self, INDEX_TYPE from_index)
Definition template.h:301
#define SLOT
Definition template.h:63
#define INVARIANT_CHECK(self)
Definition template.h:88
static VALUE remove(SELF *self, INDEX index)
Definition template.h:292
static bool empty_item(IV_PAIR const *item)
Definition template.h:344
static IV_PAIR_CONST iv_const_empty()
Definition template.h:419
SLOT PRIV(block)[DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS)]
Definition template.h:73
static VALUE * try_write(SELF *self, INDEX index)
Definition template.h:205
#define IV_PAIR_CONST
Definition template.h:399
ALLOC alloc_t
Definition template.h:61
static VALUE const * read(SELF const *self, INDEX index)
Definition template.h:199
static VALUE * write(SELF *self, INDEX index)
Definition template.h:209
#define VALUE
Definition template.h:31
#define ITER
Definition template.h:320
#define VALUE_CLONE
Definition template.h:37
#define ITER_INVARIANT_CHECK(iter)
Definition template.h:329
#define VALUE_DELETE
Definition template.h:35
static bool try_remove(SELF *self, INDEX index, VALUE *destination)
Definition template.h:264
#define ITER_CONST
Definition template.h:398
static const size_t max_entries
Definition template.h:262
static SELF clone(SELF const *self)
Definition template.h:215
static VALUE const * try_read(SELF const *self, INDEX index)
Definition template.h:180
static IV_PAIR iv_empty()
Definition template.h:341
IV_PAIR item
Definition template.h:283
#define VALUE
Definition template.h:51
#define DC_TRAIT_ARENA(SELF)
Definition trait.h:5
#define INDEX_TYPE
Definition template.h:74
dc_debug_fmt dc_debug_fmt_scope_end(dc_debug_fmt fmt)
Definition fmt.h:39
dc_debug_fmt dc_debug_fmt_scope_begin(dc_debug_fmt fmt)
Definition fmt.h:33
static void dc_debug_fmt_print(dc_debug_fmt fmt, FILE *stream, const char *format,...)
Definition fmt.h:22
static dc_gdb_marker dc_gdb_marker_new()
Definition gdb_marker.h:7
static mutation_tracker mutation_tracker_new()
static void mutation_version_check(mutation_version const *self)
static mutation_version mutation_tracker_get(mutation_tracker const *self)
static void mutation_tracker_mutate(mutation_tracker *self)
#define EXPAND_STRING(NAME)
Definition namespace.h:8
#define NS(pre, post)
Definition namespace.h:4
#define PRIV(name)
Definition namespace.h:6
#define DC_ASSERT(expr,...)
Definition panic.h:36
#define DC_STATIC_ASSERT
Definition panic.h:21
#define DC_ASSUME(expr,...)
Definition panic.h:56
#define TEMPLATE_ERROR(...)
With the user provided name, even in nested templates.
Definition def.h:56
#define SELF
Definition def.h:52
INDEX_TYPE next_index
Definition template.h:403
mutation_version version
Definition template.h:404
SELF const * arena
Definition template.h:402
INDEX_TYPE next_index
Definition template.h:325
mutation_version version
Definition template.h:326
SELF * arena
Definition template.h:324
VALUE const * value
Definition template.h:416
VALUE * value
Definition template.h:338
INDEX index
Definition template.h:337
An allocator that prints to stdout when it allocates or frees memory.
Definition template.h:14
PRIV(block) **blocks
INDEX_TYPE block_current
Definition template.h:80
mutation_tracker iterator_invalidation_tracker
Definition template.h:85
INDEX_TYPE block_current_exclusive_end
Definition template.h:81
INDEX_TYPE free_list
Definition template.h:77
dc_gdb_marker derive_c_arena_basic
Definition template.h:84
size_t count
Definition template.h:76
SLOT * blocks[DC_ARENA_GEO_MAX_NUM_BLOCKS(INDEX_BITS, INITIAL_BLOCK_INDEX_BITS)]
Definition template.h:116
ALLOC * alloc
Definition template.h:71
VALUE value
Definition template.h:78
int x
Definition template.h:33
Debug format helpers for debug printin data structures.
Definition fmt.h:10
tracks a specific version of a value, so that this can be compared later to check modification For ex...
static void memory_tracker_present(SELF const *slot)
Definition template.h:68
static void clone_from(SELF const *from_slot, SELF *to_slot)
Definition template.h:83
static void memory_tracker_empty(SELF const *slot)
Definition template.h:53
static FILE * stream(SELF *self)
Opens a file for.
Definition template.h:107