4#if !defined(SKIP_INCLUDES)
12 #if !defined PLACEHOLDERS
20 #define ITEM_DELETE item_delete
22 #define ITEM_CLONE item_clone
24 #define ITEM_DEBUG item_debug
28#if !defined ITEM_DELETE
29 #define ITEM_DELETE DC_NO_DELETE
32#if !defined ITEM_CLONE
33 #define ITEM_CLONE DC_COPY_CLONE
36#if !defined ITEM_DEBUG
37 #define ITEM_DEBUG DC_DEFAULT_DEBUG
54#define INVARIANT_CHECK(self) \
56 DC_ASSUME((self)->alloc); \
57 DC_ASSUME(DC_WHEN(!(self)->empty, \
58 (self)->head < (self)->capacity && (self)->tail < (self)->capacity)); \
59 DC_ASSUME(DC_WHEN((self)->empty, (self)->head == (self)->tail)); \
60 DC_ASSUME(DC_WHEN(!(self)->data, (self)->head == 0 && (self)->tail == 0));
75 if (capacity_for == 0) {
76 return NS(
SELF,
new)(alloc);
113 if (self->head <= self->tail) {
114 return (self->tail - self->head) + 1;
116 return (self->capacity - self->head) + self->tail + 1;
123 self->capacity *
sizeof(
ITEM));
124 }
else if (self->head <= self->tail) {
126 (self->capacity - (self->tail + 1)) *
sizeof(
ITEM));
128 self->head *
sizeof(
ITEM));
131 (self->head - (self->tail + 1)) *
sizeof(
ITEM));
139 if (new_capacity_for > self->capacity) {
150 if (self->head > self->tail) {
155 size_t const old_capacity = self->capacity;
156 size_t const additional_capacity = new_capacity - old_capacity;
157 size_t const front_tail_items = self->tail + 1;
158 size_t const back_head_items = old_capacity - self->head;
160 if (front_tail_items > back_head_items) {
161 size_t const new_head = self->head + additional_capacity;
162 memmove(&new_data[new_head], &new_data[self->head], back_head_items *
sizeof(
ITEM));
163 self->head = new_head;
169 DC_ASSUME(front_tail_items <= additional_capacity);
171 memcpy(&new_data[old_capacity], &new_data[0], front_tail_items *
sizeof(
ITEM));
172 self->tail = old_capacity + front_tail_items - 1;
175 self->capacity = new_capacity;
176 self->data = new_data;
193 &self->data[self->tail],
sizeof(
ITEM));
194 self->data[self->tail] =
item;
204 if (self->head == 0) {
205 self->head = self->capacity - 1;
211 &self->data[self->head],
sizeof(
ITEM));
212 self->data[self->head] =
item;
221 ITEM value = self->data[self->head];
223 &self->data[self->head],
sizeof(
ITEM));
225 &self->data[self->head],
sizeof(
ITEM));
227 if (self->head == self->tail) {
240 ITEM value = self->data[self->tail];
242 &self->data[self->tail],
sizeof(
ITEM));
243 if (self->head == self->tail) {
246 if (self->tail == 0) {
247 self->tail = self->capacity - 1;
258 size_t const real_index =
260 return &self->data[real_index];
270 if (index <= self->tail) {
271 return &self->data[self->tail - index];
273 size_t const from_end = index - self->tail;
274 return &self->data[self->capacity - from_end];
285#define ITER NS(SELF, iter)
333 self->data, self->capacity *
sizeof(
ITEM));
340#define ITER_CONST NS(SELF, iter_const)
379 ITEM* new_data = NULL;
381 size_t new_capacity = 0;
399 .capacity = new_capacity,
402 .empty = self->empty,
403 .alloc = self->alloc,
440#undef INVARIANT_CHECK
static void debug(SELF const *self, dc_debug_fmt fmt, FILE *stream)
static void free(SELF *self, void *ptr)
static void * realloc(SELF *self, void *ptr, size_t size)
static void * malloc(SELF *self, size_t size)
static ITER_CONST get_iter_const(SELF const *self)
static bool empty(ITER const *iter)
static ITER get_iter(SELF *self)
static IV_PAIR next(ITER *iter)
static INDEX_TYPE size(SELF const *self)
#define INVARIANT_CHECK(self)
static bool empty_item(IV_PAIR const *item)
SLOT PRIV(block)[DC_ARENA_CHUNKED_BLOCK_SIZE(BLOCK_INDEX_BITS)]
static SELF clone(SELF const *self)
static SELF new_with_capacity_for(INDEX_TYPE items, ALLOC *alloc)
static ITEM pop_front(SELF *self)
static ITEM * try_write_from_back(SELF *self, size_t index)
static ITEM pop_back(SELF *self)
static void reserve(SELF *self, size_t new_capacity_for)
static ITEM * try_write_from_front(SELF *self, size_t index)
static void push_back(SELF *self, ITEM item)
static ITEM const * try_read_from_back(SELF const *self, size_t index)
static ITEM const * try_read_from_front(SELF const *self, size_t index)
static void PRIV set_inaccessible_memory_caps(SELF *self, dc_memory_tracker_capability cap)
static void push_front(SELF *self, ITEM item)
#define DC_TRAIT_QUEUE(SELF)
static ITEM * data(SELF *self)
dc_debug_fmt dc_debug_fmt_scope_end(dc_debug_fmt fmt)
static void dc_debug_fmt_print_indents(dc_debug_fmt fmt, FILE *stream)
dc_debug_fmt dc_debug_fmt_scope_begin(dc_debug_fmt fmt)
static void dc_debug_fmt_print(dc_debug_fmt fmt, FILE *stream, const char *format,...)
static dc_gdb_marker dc_gdb_marker_new()
#define DC_MATH_IS_POWER_OF_2(x)
static DC_INLINE DC_CONST size_t dc_math_next_power_of_2(size_t x)
static size_t DC_INLINE DC_CONST dc_math_modulus_power_of_2_capacity(size_t index, size_t capacity)
static void dc_memory_tracker_set(dc_memory_tracker_level level, dc_memory_tracker_capability cap, const volatile void *addr, size_t size)
static void dc_memory_tracker_check(dc_memory_tracker_level level, dc_memory_tracker_capability cap, const void *addr, size_t size)
@ DC_MEMORY_TRACKER_LVL_CONTAINER
dc_memory_tracker_capability
a wrapper over asan & msan Containers and allocators can use this for custom asan & msan poisoning,...
@ DC_MEMORY_TRACKER_CAP_WRITE
@ DC_MEMORY_TRACKER_CAP_NONE
static mutation_tracker mutation_tracker_new()
static void mutation_version_check(mutation_version const *self)
static mutation_version mutation_tracker_get(mutation_tracker const *self)
static void mutation_tracker_mutate(mutation_tracker *self)
#define EXPAND_STRING(NAME)
#define DC_ASSERT(expr,...)
#define DC_ASSUME(expr,...)
#define TEMPLATE_ERROR(...)
With the user provided name, even in nested templates.
mutation_tracker iterator_invalidation_tracker
dc_gdb_marker derive_c_circular
Debug format helpers for debug printin data structures.
A queue comprised of an extendable circular buffer.
tracks a specific version of a value, so that this can be compared later to check modification For ex...
static FILE * stream(SELF *self)
Opens a file for.