Derive-C
Loading...
Searching...
No Matches
template.h
Go to the documentation of this file.
1
2
3#include <stdbool.h>
4#include <stddef.h>
5#include <stdint.h>
6#include <string.h>
7
13
16
17#if !defined ITEM
18 #if !defined PLACEHOLDERS
19 #error "The contained type must be defined for a queue template"
20 #endif
21
22typedef struct {
23 int x;
24} item_t;
25 #define ITEM item_t
26static void item_delete(item_t* self) { (void)self; }
27 #define ITEM_DELETE item_delete
28static item_t item_clone(item_t const* self) { return *self; }
29 #define ITEM_CLONE item_clone
30#endif
31
32#if !defined ITEM_DELETE
33 #define ITEM_DELETE(value)
34#endif
35
36#if !defined ITEM_CLONE
37 #define ITEM_CLONE(value) (*(value))
38#endif
39
40typedef ITEM NS(SELF, item_t);
41typedef ALLOC NS(SELF, alloc_t);
42
43typedef struct {
45 size_t capacity;
46 size_t head; /* Index of the first element */
47 size_t tail; /* Index of the last element */
48 bool empty; /* Used to differentiate between head == tail when 1 element, or empty */
49 ALLOC* alloc;
52} SELF;
53
54#define INVARIANT_CHECK(self) \
55 ASSUME(self); \
56 ASSUME((self)->alloc); \
57 ASSUME( \
58 WHEN(!(self)->empty, (self)->head < (self)->capacity && (self)->tail < (self)->capacity)); \
59 ASSUME(WHEN((self)->empty, (self)->head == (self)->tail)); \
60 ASSUME(WHEN(!(self)->data, (self)->head == 0 && (self)->tail == 0));
61
62static SELF NS(SELF, new)(ALLOC* alloc) {
63 return (SELF){
64 .data = NULL,
65 .head = 0,
66 .tail = 0,
67 .empty = true,
68 .alloc = alloc,
69 .derive_c_circular = gdb_marker_new(),
70 .iterator_invalidation_tracker = mutation_tracker_new(),
71 };
72}
73
74static SELF NS(SELF, new_with_capacity_for)(size_t capacity_for, ALLOC* alloc) {
75 if (capacity_for == 0) {
76 return NS(SELF, new)(alloc);
77 }
78 size_t const capacity = next_power_of_2(capacity_for);
79 ASSERT(is_power_of_2(capacity));
80 ITEM* data = (ITEM*)NS(ALLOC, malloc)(alloc, capacity * sizeof(ITEM));
81 ASSERT(data);
82
84 capacity * sizeof(ITEM));
85
86 return (SELF){
87 .data = data,
88 .capacity = capacity,
89 .head = 0,
90 .tail = 0,
91 .empty = true,
92 .alloc = alloc,
93 .derive_c_circular = gdb_marker_new(),
94 .iterator_invalidation_tracker = mutation_tracker_new(),
95 };
96}
97
98static bool NS(SELF, empty)(SELF const* self) {
99 ASSUME(self);
100 if (self->empty) {
101 ASSUME(self->head == self->tail);
102 }
103 return self->empty;
104}
105
106static size_t NS(SELF, size)(SELF const* self) {
107 ASSUME(self);
108 if (self->empty) {
109 ASSUME(self->tail == self->head);
110 return 0;
111 }
112
113 if (self->head <= self->tail) {
114 return (self->tail - self->head) + 1;
115 }
116 return (self->capacity - self->head) + self->tail + 1;
117}
118
121 if (self->empty) {
123 self->capacity * sizeof(ITEM));
124 } else if (self->head <= self->tail) {
125 memory_tracker_set(MEMORY_TRACKER_LVL_CONTAINER, cap, &self->data[self->tail + 1],
126 (self->capacity - (self->tail + 1)) * sizeof(ITEM));
128 self->head * sizeof(ITEM));
129 } else {
130 memory_tracker_set(MEMORY_TRACKER_LVL_CONTAINER, cap, &self->data[self->tail + 1],
131 (self->head - (self->tail + 1)) * sizeof(ITEM));
132 }
133}
134
135static void NS(SELF, reserve)(SELF* self, size_t new_capacity_for) {
136 INVARIANT_CHECK(self);
137 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
138
139 if (new_capacity_for > self->capacity) {
140 size_t const new_capacity = next_power_of_2(new_capacity_for * 2);
141
142 // We set the capability to write for the allocator, and only touch the
143 // state for memory that is inaccessible
145
146 ITEM* new_data =
147 (ITEM*)NS(ALLOC, realloc)(self->alloc, self->data, new_capacity * sizeof(ITEM));
148
149 ASSERT(new_data);
150 if (self->head > self->tail) {
151 // The queue wraps at the old end, so we need to either:
152 // - shift elements of the tail around into the new area at the end of the buffer
153 // - shift the head of the queue along to the new arena
154 // need to move the wrapped around part to the end of the new buffer
155 size_t const old_capacity = self->capacity;
156 size_t const additional_capacity = new_capacity - old_capacity;
157 size_t const front_tail_items = self->tail + 1;
158 size_t const back_head_items = old_capacity - self->head;
159
160 if (front_tail_items > back_head_items) {
161 size_t const new_head = self->head + additional_capacity;
162 memmove(&new_data[new_head], &new_data[self->head], back_head_items * sizeof(ITEM));
163 self->head = new_head;
164 } else {
165 // as we go to the next power of 2 each time, the additional capacity is always >=
166 // the number of items at the front. As a result, we never need to consider:
167 // - Only copying the first n from the front
168 // - Shifting some of the front items to the start of the buffer
169 ASSUME(front_tail_items <= additional_capacity);
170
171 memcpy(&new_data[old_capacity], &new_data[0], front_tail_items * sizeof(ITEM));
172 self->tail = old_capacity + front_tail_items - 1;
173 }
174 }
175 self->capacity = new_capacity;
176 self->data = new_data;
177
178 // Set the new inaccessible memory
180 }
181 INVARIANT_CHECK(self);
182}
183
184static void NS(SELF, push_back)(SELF* self, ITEM item) {
185 INVARIANT_CHECK(self);
186 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
187 NS(SELF, reserve)(self, NS(SELF, size)(self) + 1);
188
189 if (!self->empty) {
190 self->tail = modulus_power_of_2_capacity(self->tail + 1, self->capacity);
191 }
193 &self->data[self->tail], sizeof(ITEM));
194 self->data[self->tail] = item;
195 self->empty = false;
196}
197
198static void NS(SELF, push_front)(SELF* self, ITEM item) {
199 INVARIANT_CHECK(self);
200 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
201 NS(SELF, reserve)(self, NS(SELF, size)(self) + 1);
202
203 if (!self->empty) {
204 if (self->head == 0) {
205 self->head = self->capacity - 1;
206 } else {
207 self->head--;
208 }
209 }
211 &self->data[self->head], sizeof(ITEM));
212 self->data[self->head] = item;
213 self->empty = false;
214}
215
216static ITEM NS(SELF, pop_front)(SELF* self) {
217 INVARIANT_CHECK(self);
218 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
219 ASSERT(!NS(SELF, empty)(self));
220
221 ITEM value = self->data[self->head];
223 &self->data[self->head], sizeof(ITEM));
225 &self->data[self->head], sizeof(ITEM));
226
227 if (self->head == self->tail) {
228 self->empty = true;
229 } else {
230 self->head = modulus_power_of_2_capacity(self->head + 1, self->capacity);
231 }
232 return value;
233}
234
235static ITEM NS(SELF, pop_back)(SELF* self) {
236 INVARIANT_CHECK(self);
237 mutation_tracker_mutate(&self->iterator_invalidation_tracker);
238 ASSERT(!NS(SELF, empty)(self));
239
240 ITEM value = self->data[self->tail];
242 &self->data[self->tail], sizeof(ITEM));
243 if (self->head == self->tail) {
244 self->empty = true;
245 } else {
246 if (self->tail == 0) {
247 self->tail = self->capacity - 1;
248 } else {
249 self->tail--;
250 }
251 }
252 return value;
253}
254
255static ITEM const* NS(SELF, try_read_from_front)(SELF const* self, size_t index) {
256 INVARIANT_CHECK(self);
257 if (index < NS(SELF, size)(self)) {
258 size_t const real_index = modulus_power_of_2_capacity(self->head + index, self->capacity);
259 return &self->data[real_index];
260 }
261 return NULL;
262}
263
264static ITEM const* NS(SELF, read_from_front)(SELF const* self, size_t index) {
265 ITEM const* item = NS(SELF, try_read_from_front)(self, index);
266 ASSERT(item);
267 return item;
268}
269
270static ITEM* NS(SELF, try_write_from_front)(SELF* self, size_t index) {
271 INVARIANT_CHECK(self);
272 if (index < NS(SELF, size)(self)) {
273 size_t const real_index = modulus_power_of_2_capacity(self->head + index, self->capacity);
274 return &self->data[real_index];
275 }
276 return NULL;
277}
278
279static ITEM* NS(SELF, write_from_front)(SELF* self, size_t index) {
280 ITEM* value = NS(SELF, try_write_from_front)(self, index);
281 ASSERT(value);
282 return value;
283}
284
285static ITEM const* NS(SELF, try_read_from_back)(SELF const* self, size_t index) {
286 INVARIANT_CHECK(self);
287 if (index >= NS(SELF, size)(self)) {
288 return NULL;
289 }
290 if (index <= self->tail) {
291 return &self->data[self->tail - index];
292 }
293 size_t const from_end = index - self->tail;
294 return &self->data[self->capacity - from_end];
295}
296
297static ITEM const* NS(SELF, read_from_back)(SELF const* self, size_t index) {
298 ITEM const* item = NS(SELF, try_read_from_back)(self, index);
299 ASSERT(item);
300 return item;
301}
302
303static ITEM* NS(SELF, try_write_from_back)(SELF* self, size_t index) {
304 INVARIANT_CHECK(self);
305 if (index >= NS(SELF, size)(self)) {
306 return NULL;
307 }
308 if (index <= self->tail) {
309 return &self->data[self->tail - index];
310 }
311 size_t const from_end = index - self->tail;
312 return &self->data[self->capacity - from_end];
313}
314
315static ITEM* NS(SELF, write_from_back)(SELF* self, size_t index) {
316 INVARIANT_CHECK(self);
317 ITEM* value = NS(SELF, try_write_from_back)(self, index);
318 ASSERT(value);
319 return value;
320}
321
322#define ITER NS(SELF, iter)
323typedef ITEM* NS(ITER, item);
324
325static bool NS(ITER, empty_item)(ITEM* const* item) { return *item == NULL; }
326
327typedef struct {
328 SELF* circular;
329 size_t position;
330 mutation_version version;
331} ITER;
332
333static bool NS(ITER, empty)(ITER const* iter) {
334 ASSUME(iter);
335 mutation_version_check(&iter->version);
336 return !NS(SELF, try_read_from_front)(iter->circular, iter->position);
337}
338
339static ITEM* NS(ITER, next)(ITER* iter) {
340 ASSUME(iter);
341 mutation_version_check(&iter->version);
342 ITEM* item = NS(SELF, try_write_from_front)(iter->circular, iter->position);
343 if (!item) {
344 return NULL;
345 }
346 iter->position++;
347 return item;
348}
349
350static ITER NS(SELF, get_iter)(SELF* self) {
351 INVARIANT_CHECK(self);
352 return (ITER){
353 .circular = self,
354 .position = 0,
355 .version = mutation_tracker_get(&self->iterator_invalidation_tracker),
356 };
357}
358
359static void NS(SELF, delete)(SELF* self) {
360 INVARIANT_CHECK(self);
361 if (self->data) {
362 ITER iter = NS(SELF, get_iter)(self);
363 ITEM* item;
364 while ((item = NS(ITER, next)(&iter))) {
367 sizeof(ITEM));
368 }
370 self->capacity * sizeof(ITEM));
371 NS(ALLOC, free)(self->alloc, self->data);
372 }
373}
374
375#undef ITER
376
377#define ITER_CONST NS(SELF, iter_const)
378typedef ITEM const* NS(ITER_CONST, item);
379
380static bool NS(ITER_CONST, empty_item)(ITEM const* const* item) { return *item == NULL; }
381
382typedef struct {
383 SELF const* circular;
384 size_t position;
385 mutation_version version;
386} ITER_CONST;
387
388static bool NS(ITER_CONST, empty)(ITER_CONST const* iter) {
389 ASSUME(iter);
390 mutation_version_check(&iter->version);
391 return !NS(SELF, try_read_from_front)(iter->circular, iter->position);
392}
393
394static ITEM const* NS(ITER_CONST, next)(ITER_CONST* iter) {
395 ASSUME(iter);
396 mutation_version_check(&iter->version);
397 ITEM const* item = NS(SELF, try_read_from_front)(iter->circular, iter->position);
398 if (!item) {
399 return NULL;
400 }
401 iter->position++;
402 return item;
403}
404
405static ITER_CONST NS(SELF, get_iter_const)(SELF const* self) {
406 INVARIANT_CHECK(self);
407 return (ITER_CONST){
408 .circular = self,
409 .position = 0,
410 .version = mutation_tracker_get(&self->iterator_invalidation_tracker),
411 };
412}
413
414static SELF NS(SELF, clone)(SELF const* self) {
415 INVARIANT_CHECK(self);
416 ITEM* new_data = NULL;
417 size_t tail = 0;
418 size_t new_capacity = 0;
419 size_t const old_size = NS(SELF, size)(self);
420
421 if (old_size > 0) {
422 new_capacity = next_power_of_2(old_size);
423 new_data = (ITEM*)NS(ALLOC, malloc)(self->alloc, new_capacity * sizeof(ITEM));
424 ASSERT(new_data);
425
426 ITER_CONST iter = NS(SELF, get_iter_const)(self);
427 ITEM const* item;
428 while ((item = NS(ITER_CONST, next)(&iter))) {
429 new_data[tail] = ITEM_CLONE(item);
430 tail++;
431 }
432 tail--;
433 }
434 SELF new_self = {
435 .data = new_data,
436 .capacity = new_capacity,
437 .head = 0,
438 .tail = tail,
439 .empty = self->empty,
440 .alloc = self->alloc,
441 .derive_c_circular = gdb_marker_new(),
442 .iterator_invalidation_tracker = mutation_tracker_new(),
443 };
445 return new_self;
446}
447
448#undef ITER_CONST
449
450#undef ITEM
451#undef ITEM_DELETE
452#undef ITEM_CLONE
453
454#undef INVARIANT_CHECK
455
458
static void free(SELF *self, void *ptr)
Definition template.h:58
static void * realloc(SELF *self, void *ptr, size_t size)
Definition template.h:47
static void * malloc(SELF *self, size_t size)
Definition template.h:25
#define ALLOC
Definition template.h:59
#define ITEM
Definition template.h:58
static bool empty_item(IV_PAIR const *const *item)
Definition template.h:352
static ITER_CONST get_iter_const(SELF const *self)
Definition template.h:465
IV_PAIR const * item
Definition template.h:350
static bool empty(ITER const *iter)
Definition template.h:361
static ITER get_iter(SELF *self)
Definition template.h:388
static SELF new_with_capacity_for(INDEX_TYPE items, ALLOC *alloc)
Definition template.h:148
static INDEX_TYPE size(SELF const *self)
Definition template.h:275
static IV_PAIR const * next(ITER *iter)
Definition template.h:370
#define INVARIANT_CHECK(self)
Definition template.h:142
ALLOC alloc_t
Definition template.h:85
static SELF clone(SELF const *self)
Definition template.h:246
static ITEM const * read_from_front(SELF const *self, size_t index)
Definition template.h:264
static ITEM pop_front(SELF *self)
Definition template.h:216
static ITEM const * read_from_back(SELF const *self, size_t index)
Definition template.h:297
static ITEM * try_write_from_back(SELF *self, size_t index)
Definition template.h:303
#define ITEM_DELETE
Definition template.h:27
static ITEM pop_back(SELF *self)
Definition template.h:235
static ITEM * write_from_back(SELF *self, size_t index)
Definition template.h:315
static void reserve(SELF *self, size_t new_capacity_for)
Definition template.h:135
static ITEM * try_write_from_front(SELF *self, size_t index)
Definition template.h:270
static ITEM * write_from_front(SELF *self, size_t index)
Definition template.h:279
static void push_back(SELF *self, ITEM item)
Definition template.h:184
static void PRIV set_inaccessible_memory_caps(SELF *self, memory_tracker_capability cap)
Definition template.h:119
static void item_delete(item_t *self)
Definition template.h:26
#define ITEM_CLONE
Definition template.h:29
static ITEM const * try_read_from_back(SELF const *self, size_t index)
Definition template.h:285
static ITEM const * try_read_from_front(SELF const *self, size_t index)
Definition template.h:255
static item_t item_clone(item_t const *self)
Definition template.h:28
static void push_front(SELF *self, ITEM item)
Definition template.h:198
#define TRAIT_QUEUE(SELF)
Definition trait.h:6
static size_t position(ITER const *iter)
Definition template.h:366
static ITEM * data(SELF *self)
Definition template.h:259
static gdb_marker gdb_marker_new()
Definition gdb_marker.h:7
#define ASSERT(expr,...)
Definition macros.h:42
#define ASSUME(expr,...)
Definition macros.h:62
static INLINE CONST size_t next_power_of_2(size_t x)
Definition math.h:8
static size_t INLINE CONST modulus_power_of_2_capacity(size_t index, size_t capacity)
Definition math.h:25
static bool INLINE CONST is_power_of_2(size_t x)
Definition math.h:23
static void memory_tracker_check(memory_tracker_level level, memory_tracker_capability cap, const void *addr, size_t size)
@ MEMORY_TRACKER_LVL_CONTAINER
static void memory_tracker_set(memory_tracker_level level, memory_tracker_capability cap, const volatile void *addr, size_t size)
memory_tracker_capability
a wrapper over asan & msan Containers and allocators can use this for custom asan & msan poisoning,...
@ MEMORY_TRACKER_CAP_NONE
@ MEMORY_TRACKER_CAP_WRITE
static mutation_tracker mutation_tracker_new()
static void mutation_version_check(mutation_version const *self)
static mutation_version mutation_tracker_get(mutation_tracker const *self)
static void mutation_tracker_mutate(mutation_tracker *self)
#define NS(pre, post)
Definition namespace.h:4
#define PRIV(name)
Definition namespace.h:6
#define SELF
Definition def.h:52
size_t head
Definition template.h:46
size_t capacity
Definition template.h:124
mutation_tracker iterator_invalidation_tracker
Definition template.h:139
gdb_marker derive_c_circular
Definition template.h:50
ITEM * data
Definition template.h:44
bool empty
Definition template.h:48
size_t tail
Definition template.h:47
ALLOC * alloc
Definition template.h:66
A queue comprised of an extendable circular buffer.
Definition template.h:22
int x
Definition template.h:23
tracks a specific version of a value, so that this can be compared later to check modification For ex...