Derive-C
Loading...
Searching...
No Matches
template.h
Go to the documentation of this file.
1
28
30#if !defined(SKIP_INCLUDES)
31 #include "includes.h"
32#endif
33
36
37#if !defined INDEX_BITS
38 #if !defined PLACEHOLDERS
39TEMPLATE_ERROR("The number of bits (8,16,32,64) to use for the arena's key")
40 #endif
41 #define INDEX_BITS 32
42#endif
43
44#if !defined VALUE
45 #if !defined PLACEHOLDERS
46TEMPLATE_ERROR("The value type to place in the arena must be defined")
47 #endif
48typedef struct {
49 int x;
50} value_t;
51 #define VALUE value_t
52 #define VALUE_DELETE value_delete
53static void VALUE_DELETE(value_t* self);
54 #define VALUE_CLONE value_clone
55static value_t VALUE_CLONE(value_t const* self);
56 #define VALUE_DEBUG value_debug
57static void VALUE_DEBUG(VALUE const* self, dc_debug_fmt fmt, FILE* stream);
58#endif
59
60DC_STATIC_ASSERT(sizeof(VALUE), "VALUE must be a non-zero sized type");
61
62#if !defined VALUE_DELETE
63 #define VALUE_DELETE DC_NO_DELETE
64#endif
65
66#if !defined VALUE_CLONE
67 #define VALUE_CLONE DC_COPY_CLONE
68#endif
69
70#if !defined VALUE_DEBUG
71 #define VALUE_DEBUG DC_DEFAULT_DEBUG
72#endif
73
76
77#if !defined INITIAL_BLOCK_INDEX_BITS
78 #define INITIAL_BLOCK_INDEX_BITS 8
79#endif
80
82 "INITIAL_BLOCK_INDEX_BITS must be less than INDEX_BITS");
84 "INITIAL_BLOCK_INDEX_BITS must be greater than zero");
85
86static const size_t NS(SELF, max_entries) = MAX_INDEX;
87
88typedef VALUE NS(SELF, value_t);
89typedef ALLOC NS(SELF, alloc_t);
90
91#define SLOT NS(NAME, slot)
92
93#define SLOT_INDEX_TYPE INDEX_TYPE // [DERIVE-C] for template
94#define SLOT_VALUE VALUE // [DERIVE-C] for template
95#define SLOT_VALUE_CLONE VALUE_CLONE // [DERIVE-C] for template
96#define SLOT_VALUE_CLONE VALUE_CLONE // [DERIVE-C] for template
97#define SLOT_VALUE_DELETE VALUE_DELETE // [DERIVE-C] for template
98#define INTERNAL_NAME SLOT // [DERIVE-C] for template
100
101typedef struct {
102 // INVARIANT: If free_list == EMPTY_INDEX, then all values from [0, count)
103 // are present
104 INDEX_TYPE free_list;
105 size_t count;
106
107 ALLOC* alloc;
110
111 // JUSTIFY: Using index type for the block exclusive end
112 // - We can use a smaller integer type (than size_t), as this is guarenteed to be smaller
113 // half the largest index.
117} SELF;
118
119static void PRIV(NS(SELF, set_memory_tracking))(SELF const* self) {
120 for (uint8_t block_index = 0; block_index <= self->block_current; block_index++) {
121 size_t block_items =
122 block_index == self->block_current
123 ? self->block_current_exclusive_end
125 for (size_t offset = 0; offset < block_items; offset++) {
126 SLOT* slot = &self->blocks[block_index][offset];
127 if (slot->present) {
129 } else {
131 }
132 }
133 }
134
135 size_t tail_slots = DC_ARENA_GEO_BLOCK_TO_SIZE(self->block_current, INITIAL_BLOCK_INDEX_BITS) -
136 (self->block_current_exclusive_end - 1);
138 &self->blocks[self->block_current][self->block_current_exclusive_end],
139 tail_slots * sizeof(SLOT));
140}
141
142#define INVARIANT_CHECK(self) \
143 DC_ASSUME(self); \
144 DC_ASSUME(DC_ARENA_GEO_BLOCK_TO_SIZE((self)->block_current, INITIAL_BLOCK_INDEX_BITS) >= \
145 (self)->block_current_exclusive_end); \
146 DC_ASSUME((self)->count <= MAX_INDEX);
147
148static SELF NS(SELF, new)(ALLOC* alloc) {
149 uint8_t initial_block = 0;
150 size_t initial_block_items =
152 SLOT* initial_block_slots = (SLOT*)NS(ALLOC, malloc)(alloc, initial_block_items * sizeof(SLOT));
153 DC_ASSERT(initial_block_slots);
154
155 SELF self = {
156 .free_list = INDEX_NONE,
157 .count = 0,
158 .alloc = alloc,
159 .derive_c_arena_blocks = dc_gdb_marker_new(),
160 .iterator_invalidation_tracker = mutation_tracker_new(),
161 .block_current_exclusive_end = 0,
162 .block_current = initial_block,
163 .blocks =
164 {
165 initial_block_slots,
166 },
167 };
168
170 return self;
171}
172
173static INDEX NS(SELF, insert)(SELF* self, VALUE value) {
174 INVARIANT_CHECK(self);
175 DC_ASSERT(self->count < MAX_INDEX);
176
177 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
178
179 if (self->free_list != INDEX_NONE) {
180 INDEX_TYPE free_index = self->free_list;
181
182 uint8_t block = DC_ARENA_GEO_INDEX_TO_BLOCK(free_index, INITIAL_BLOCK_INDEX_BITS);
183 size_t offset = DC_ARENA_GEO_INDEX_TO_OFFSET(free_index, block, INITIAL_BLOCK_INDEX_BITS);
184 SLOT* free_slot = &self->blocks[block][offset];
185
186 DC_ASSUME(!free_slot->present, "The free list should only contain free slots");
187 self->free_list = free_slot->next_free;
188
189 free_slot->present = true;
190 NS(SLOT, fill)(free_slot, value);
191 self->count++;
192
193 return (INDEX){.index = free_index};
194 }
195
196 if (self->block_current_exclusive_end ==
198 DC_ASSUME(self->block_current < sizeof(self->blocks) / sizeof(SLOT*));
199
200 self->block_current++;
201 size_t block_items =
203 SLOT* block_slots = (SLOT*)NS(ALLOC, malloc)(self->alloc, block_items * sizeof(SLOT));
204 DC_ASSERT(block_slots);
205
206 self->blocks[self->block_current] = block_slots;
207 self->block_current_exclusive_end = 0;
208 }
209
210 size_t offset = self->block_current_exclusive_end;
211 NS(SLOT, fill)(&self->blocks[self->block_current][offset], value);
213 self->block_current, offset, INITIAL_BLOCK_INDEX_BITS));
214
215 self->block_current_exclusive_end++;
216 self->count++;
217
219
220 return (INDEX){.index = new_index};
221}
222
223static VALUE const* NS(SELF, try_read)(SELF const* self, INDEX index) {
224 INVARIANT_CHECK(self);
225
226 uint8_t block = DC_ARENA_GEO_INDEX_TO_BLOCK(index.index, INITIAL_BLOCK_INDEX_BITS);
227 if (block > self->block_current) {
228 return NULL;
229 }
230
231 size_t offset = DC_ARENA_GEO_INDEX_TO_OFFSET(index.index, block, INITIAL_BLOCK_INDEX_BITS);
232
233 if (block == self->block_current && offset >= self->block_current_exclusive_end) {
234 return NULL;
235 }
236
237 SLOT* slot = &self->blocks[block][offset];
238 if (!slot->present) {
239 return NULL;
240 }
241
242 return &slot->value;
243}
244
245static VALUE const* NS(SELF, read)(SELF const* self, INDEX index) {
246 VALUE const* value = NS(SELF, try_read)(self, index);
247 DC_ASSERT(value);
248 return value;
249}
250
251static VALUE* NS(SELF, try_write)(SELF* self, INDEX index) {
252 return (VALUE*)NS(SELF, try_read)(self, index);
253}
254
255static VALUE* NS(SELF, write)(SELF* self, INDEX index) {
256 return (VALUE*)NS(SELF, read)(self, index);
257}
258
259static INDEX_TYPE NS(SELF, size)(SELF const* self) {
260 INVARIANT_CHECK(self);
261 return self->count;
262}
263
264static SELF NS(SELF, clone)(SELF const* self) {
265 INVARIANT_CHECK(self);
266
267 SELF new_self = {
268 .free_list = self->free_list,
269 .count = self->count,
270 .alloc = self->alloc,
271 .derive_c_arena_blocks = dc_gdb_marker_new(),
272 .iterator_invalidation_tracker = mutation_tracker_new(),
273 .block_current_exclusive_end = self->block_current_exclusive_end,
274 .block_current = self->block_current,
275 .blocks = {},
276 };
277
278 for (size_t block_index = 0; block_index <= self->block_current; block_index++) {
279 size_t block_items = DC_ARENA_GEO_BLOCK_TO_SIZE(block_index, INITIAL_BLOCK_INDEX_BITS);
280 SLOT* block_slots = (SLOT*)NS(ALLOC, malloc)(self->alloc, block_items * sizeof(SLOT));
281 DC_ASSERT(block_slots);
282 new_self.blocks[block_index] = block_slots;
283
284 size_t const to_offset =
285 block_index == self->block_current ? self->block_current_exclusive_end : block_items;
286
287 for (size_t offset = 0; offset < to_offset; offset++) {
288 SLOT* src_slot = &self->blocks[block_index][offset];
289 SLOT* dst_slot = &new_self.blocks[block_index][offset];
290
291 if (src_slot->present) {
292 dst_slot->present = true;
293 dst_slot->value = VALUE_CLONE(&src_slot->value);
294 new_self.count++;
295 } else {
296 dst_slot->present = false;
297 }
298 }
299 }
300
301 PRIV(NS(SELF, set_memory_tracking))(&new_self);
302
303 return new_self;
304}
305
306static bool NS(SELF, try_remove)(SELF* self, INDEX index, VALUE* destination) {
307 INVARIANT_CHECK(self);
308 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
309
310 uint8_t block = DC_ARENA_GEO_INDEX_TO_BLOCK(index.index, INITIAL_BLOCK_INDEX_BITS);
311 if (block > self->block_current) {
312 return false;
313 }
314
315 size_t offset = DC_ARENA_GEO_INDEX_TO_OFFSET(index.index, block, INITIAL_BLOCK_INDEX_BITS);
316
317 if (block == self->block_current && offset >= self->block_current_exclusive_end) {
318 return false;
319 }
320
321 SLOT* slot = &self->blocks[block][offset];
322 if (slot->present) {
323 *destination = slot->value;
324 slot->present = false;
325 NS(SLOT, set_empty)(slot, self->free_list);
326 self->free_list = index.index;
327 self->count--;
328 return true;
329 }
330 return false;
331}
332
333static VALUE NS(SELF, remove)(SELF* self, INDEX index) {
334 INVARIANT_CHECK(self);
335 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
336
337 VALUE value;
338 DC_ASSERT(NS(SELF, try_remove)(self, index, &value));
339 return value;
340}
341
342static void NS(SELF, delete)(SELF* self) {
343 INVARIANT_CHECK(self);
344
345 for (uint8_t block = 0; block <= self->block_current; block++) {
346 size_t const to_offset = block == self->block_current
347 ? self->block_current_exclusive_end
349
350 for (size_t offset = 0; offset < to_offset; offset++) {
351 SLOT* slot = &self->blocks[block][offset];
352 if (slot->present) {
353 VALUE_DELETE(&slot->value);
354 }
355 }
356
358 self->blocks[block],
360 NS(ALLOC, free)(self->alloc, self->blocks[block]);
361 }
362}
363
364#define IV_PAIR_CONST NS(SELF, iv_const)
365typedef struct {
366 INDEX index;
367 VALUE const* value;
369
371 return (IV_PAIR_CONST){
372 .index = {.index = INDEX_NONE},
373 .value = NULL,
374 };
375}
376
377#define ITER_CONST NS(SELF, iter_const)
379
380static bool NS(ITER_CONST, empty_item)(IV_PAIR_CONST const* item) { return item->value == NULL; }
381
382typedef struct {
383 SELF const* arena;
384 INDEX_TYPE next_index;
385 mutation_version version;
386} ITER_CONST;
387
389 DC_ASSUME(iter);
390 mutation_version_check(&iter->version);
391
392 while (iter->next_index < MAX_INDEX) {
393 uint8_t block = DC_ARENA_GEO_INDEX_TO_BLOCK(iter->next_index, INITIAL_BLOCK_INDEX_BITS);
394 size_t offset =
396
397 if ((block == iter->arena->block_current &&
398 offset >= iter->arena->block_current_exclusive_end) ||
399 (block > iter->arena->block_current)) {
400 break;
401 }
402
403 SLOT* slot = &iter->arena->blocks[block][offset];
404 if (slot->present) {
405 IV_PAIR_CONST const result = {
406 .index = (INDEX){.index = iter->next_index},
407 .value = &slot->value,
408 };
409 iter->next_index++;
410 return result;
411 }
412
413 iter->next_index++;
414 }
415
416 return NS(SELF, iv_const_empty)();
417}
418
419static ITER_CONST NS(SELF, get_iter_const)(SELF const* self) {
420 INVARIANT_CHECK(self);
421
422 return (ITER_CONST){
423 .arena = self,
424 .next_index = 0,
425 .version = mutation_tracker_get(&self->iterator_invalidation_tracker),
426 };
427}
428
429static void NS(SELF, debug)(SELF const* self, dc_debug_fmt fmt, FILE* stream) {
430 fprintf(stream, EXPAND_STRING(SELF) "@%p {\n", self);
431 fmt = dc_debug_fmt_scope_begin(fmt);
432 dc_debug_fmt_print(fmt, stream, "count: %lu,\n", self->count);
433
434 if (self->free_list == INDEX_NONE) {
435 dc_debug_fmt_print(fmt, stream, "free_list: INDEX_NONE,\n");
436 } else {
437 dc_debug_fmt_print(fmt, stream, "free_list: %lu,\n", (size_t)self->free_list);
438 }
439
440 dc_debug_fmt_print(fmt, stream, "alloc: ");
441 NS(ALLOC, debug)(self->alloc, fmt, stream);
442 fprintf(stream, ",\n");
443
444 dc_debug_fmt_print(fmt, stream, "blocks: [");
445 fmt = dc_debug_fmt_scope_begin(fmt);
446 for (size_t block = 0; block <= self->block_current; block++) {
447 dc_debug_fmt_print(fmt, stream, "{\n");
448 fmt = dc_debug_fmt_scope_begin(fmt);
449
451 size_t const to_offset =
452 block == self->block_current ? self->block_current_exclusive_end : capacity;
453
454 dc_debug_fmt_print(fmt, stream, "block_index: %lu,\n", block);
455 dc_debug_fmt_print(fmt, stream, "block_ptr: %p,\n", self->blocks[block]);
456 dc_debug_fmt_print(fmt, stream, "capacity: %lu,\n", capacity);
457 dc_debug_fmt_print(fmt, stream, "size: %lu,\n", to_offset);
458 dc_debug_fmt_print(fmt, stream, "slots: [\n");
459 fmt = dc_debug_fmt_scope_begin(fmt);
460
461 for (size_t offset = 0; offset < to_offset; offset++) {
462 SLOT* slot = &self->blocks[block][offset];
463 dc_debug_fmt_print(fmt, stream, "{\n");
464 fmt = dc_debug_fmt_scope_begin(fmt);
465
466 dc_debug_fmt_print(fmt, stream, "present: %s,\n", slot->present ? "true" : "false");
467 if (slot->present) {
468 dc_debug_fmt_print(fmt, stream, "value: ");
469 VALUE_DEBUG(&slot->value, fmt, stream);
470 fprintf(stream, ",\n");
471 } else {
472 dc_debug_fmt_print(fmt, stream, "next_free: %lu,\n", (size_t)slot->next_free);
473 }
474
475 fmt = dc_debug_fmt_scope_end(fmt);
476 dc_debug_fmt_print(fmt, stream, "},\n");
477 }
478
479 fmt = dc_debug_fmt_scope_end(fmt);
480 dc_debug_fmt_print(fmt, stream, "],\n");
481
482 /* Close the block's scope and print its closing brace */
483 fmt = dc_debug_fmt_scope_end(fmt);
484 dc_debug_fmt_print(fmt, stream, "},\n");
485 }
486
487 fmt = dc_debug_fmt_scope_end(fmt);
488 dc_debug_fmt_print(fmt, stream, "],\n");
489
490 fmt = dc_debug_fmt_scope_end(fmt);
491 dc_debug_fmt_print(fmt, stream, "}");
492}
493
494#undef ITER_CONST
495#undef IV_PAIR_CONST
496
497#define IV_PAIR NS(SELF, iv)
498typedef struct {
499 INDEX index;
500 VALUE const* value;
501} IV_PAIR;
502
504 return (IV_PAIR){
505 .index = {.index = INDEX_NONE},
506 .value = NULL,
507 };
508}
509
510#define ITER NS(SELF, iter)
511typedef IV_PAIR NS(ITER, item);
512
513static bool NS(ITER, empty_item)(IV_PAIR const* item) { return item->value == NULL; }
514
515typedef struct {
516 SELF* arena;
517 INDEX_TYPE next_index;
518 mutation_version version;
519} ITER;
520
521static IV_PAIR NS(ITER, next)(ITER* iter) {
522 DC_ASSUME(iter);
523 mutation_version_check(&iter->version);
524
525 while (iter->next_index < MAX_INDEX) {
526 uint8_t block = DC_ARENA_GEO_INDEX_TO_BLOCK(iter->next_index, INITIAL_BLOCK_INDEX_BITS);
527 size_t offset =
529
530 if ((block == iter->arena->block_current &&
531 offset >= iter->arena->block_current_exclusive_end) ||
532 (block > iter->arena->block_current)) {
533 break;
534 }
535
536 SLOT* slot = &iter->arena->blocks[block][offset];
537 if (slot->present) {
538 IV_PAIR result = {
539 .index = (INDEX){.index = iter->next_index},
540 .value = &slot->value,
541 };
542 iter->next_index++;
543 return result;
544 }
545
546 iter->next_index++;
547 }
548
549 return NS(SELF, iv_empty)();
550}
551
552static ITER NS(SELF, get_iter)(SELF* self) {
553 INVARIANT_CHECK(self);
554
555 return (ITER){
556 .arena = self,
557 .next_index = 0,
558 .version = mutation_tracker_get(&self->iterator_invalidation_tracker),
559 };
560}
561
562#undef ITER
563#undef IV_PAIR
564#undef INVARIANT_CHECK
565#undef SLOT
566#undef INITIAL_BLOCK_INDEX_BITS
567
570
571#undef VALUE_DEBUG
572#undef VALUE_CLONE
573#undef VALUE_DELETE
574#undef VALUE
575#undef INDEX_BITS
576
578
static void debug(SELF const *self, dc_debug_fmt fmt, FILE *stream)
Definition template.h:62
static void free(SELF *self, void *ptr)
Definition template.h:56
static void * malloc(SELF *self, size_t size)
Definition template.h:23
#define ALLOC
Definition template.h:64
#define DC_ARENA_GEO_MAX_NUM_BLOCKS(INDEX_BITS, INITIAL_BLOCK_INDEX_BITS)
Definition utils.h:19
#define DC_ARENA_GEO_BLOCK_TO_SIZE(BLOCK, INITIAL_BLOCK_INDEX_BITS)
Definition utils.h:12
#define DC_ARENA_GEO_INDEX_TO_OFFSET(IDX, BLOCK, INITIAL_BLOCK_INDEX_BITS)
Definition utils.h:9
#define DC_ARENA_GEO_BLOCK_OFFSET_TO_INDEX(BLOCK, OFFSET, INITIAL_BLOCK_INDEX_BITS)
Definition utils.h:16
#define DC_ARENA_GEO_INDEX_TO_BLOCK(IDX, INITIAL_BLOCK_INDEX_BITS)
Definition utils.h:4
static INDEX insert(SELF *self, VALUE value)
Definition template.h:127
static ITER_CONST get_iter_const(SELF const *self)
Definition template.h:449
#define INDEX_BITS
Definition template.h:13
static ITER get_iter(SELF *self)
Definition template.h:370
#define VALUE_DEBUG
Definition template.h:39
static IV_PAIR next(ITER *iter)
Definition template.h:352
#define IV_PAIR
Definition template.h:321
static INDEX_TYPE size(SELF const *self)
Definition template.h:252
#define SLOT
Definition template.h:63
#define INVARIANT_CHECK(self)
Definition template.h:88
static VALUE remove(SELF *self, INDEX index)
Definition template.h:292
static bool empty_item(IV_PAIR const *item)
Definition template.h:344
static IV_PAIR_CONST iv_const_empty()
Definition template.h:419
SLOT PRIV(block)[DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS)]
Definition template.h:73
static VALUE * try_write(SELF *self, INDEX index)
Definition template.h:205
#define IV_PAIR_CONST
Definition template.h:399
ALLOC alloc_t
Definition template.h:61
static VALUE const * read(SELF const *self, INDEX index)
Definition template.h:199
static VALUE * write(SELF *self, INDEX index)
Definition template.h:209
#define VALUE
Definition template.h:31
#define ITER
Definition template.h:320
#define VALUE_CLONE
Definition template.h:37
#define VALUE_DELETE
Definition template.h:35
static bool try_remove(SELF *self, INDEX index, VALUE *destination)
Definition template.h:264
#define ITER_CONST
Definition template.h:398
static const size_t max_entries
Definition template.h:262
static SELF clone(SELF const *self)
Definition template.h:215
static VALUE const * try_read(SELF const *self, INDEX index)
Definition template.h:180
static IV_PAIR iv_empty()
Definition template.h:341
IV_PAIR item
Definition template.h:283
static void PRIV set_memory_tracking(SELF const *self)
Definition template.h:119
#define VALUE
Definition template.h:51
#define INITIAL_BLOCK_INDEX_BITS
Definition template.h:78
#define DC_TRAIT_ARENA(SELF)
Definition trait.h:5
#define INDEX_TYPE
Definition template.h:74
static size_t capacity()
Definition template.h:66
dc_debug_fmt dc_debug_fmt_scope_end(dc_debug_fmt fmt)
Definition fmt.h:39
dc_debug_fmt dc_debug_fmt_scope_begin(dc_debug_fmt fmt)
Definition fmt.h:33
static void dc_debug_fmt_print(dc_debug_fmt fmt, FILE *stream, const char *format,...)
Definition fmt.h:22
static dc_gdb_marker dc_gdb_marker_new()
Definition gdb_marker.h:7
static void dc_memory_tracker_set(dc_memory_tracker_level level, dc_memory_tracker_capability cap, const volatile void *addr, size_t size)
@ DC_MEMORY_TRACKER_LVL_CONTAINER
@ DC_MEMORY_TRACKER_CAP_WRITE
@ DC_MEMORY_TRACKER_CAP_NONE
static mutation_tracker mutation_tracker_new()
static void mutation_version_check(mutation_version const *self)
static mutation_version mutation_tracker_get(mutation_tracker const *self)
static void mutation_tracker_mutate(mutation_tracker *self)
#define EXPAND_STRING(NAME)
Definition namespace.h:8
#define NS(pre, post)
Definition namespace.h:4
#define DC_ASSERT(expr,...)
Definition panic.h:36
#define DC_STATIC_ASSERT
Definition panic.h:21
#define DC_ASSUME(expr,...)
Definition panic.h:56
#define TEMPLATE_ERROR(...)
With the user provided name, even in nested templates.
Definition def.h:56
#define SELF
Definition def.h:52
VALUE * value
Definition template.h:338
INDEX_TYPE block_current
Definition template.h:80
mutation_tracker iterator_invalidation_tracker
Definition template.h:85
INDEX_TYPE block_current_exclusive_end
Definition template.h:81
dc_gdb_marker derive_c_arena_blocks
Definition template.h:108
size_t count
Definition template.h:76
SLOT * blocks[DC_ARENA_GEO_MAX_NUM_BLOCKS(INDEX_BITS, INITIAL_BLOCK_INDEX_BITS)]
Definition template.h:116
VALUE value
Definition template.h:78
Debug format helpers for debug printin data structures.
Definition fmt.h:10
tracks a specific version of a value, so that this can be compared later to check modification For ex...
static void memory_tracker_present(SELF const *slot)
Definition template.h:68
static void memory_tracker_empty(SELF const *slot)
Definition template.h:53
static void set_empty(SELF *slot, SLOT_INDEX_TYPE next_free)
Definition template.h:62
static void fill(SELF *slot, SLOT_VALUE value)
Definition template.h:77
static FILE * stream(SELF *self)
Opens a file for.
Definition template.h:107