C Container Collection (CCC)
Loading...
Searching...
No Matches
private_array_adaptive_map.h
1
16#ifndef CCC_PRIVATE_ARRAY_ADAPTIVE_MAP_H
17#define CCC_PRIVATE_ARRAY_ADAPTIVE_MAP_H
18
20#include <stddef.h>
23#include "../types.h"
24#include "private_types.h" /* NOLINT */
25
26/* NOLINTBEGIN(readability-identifier-naming) */
27
34{
36 size_t branch[2];
37 union
38 {
40 size_t parent;
42 size_t next_free;
43 };
44};
45
72{
74 void *data;
78 size_t capacity;
80 size_t count;
82 size_t root;
84 size_t free_list;
88 size_t key_offset;
94 void *context;
95};
96
100{
104 size_t index;
108 CCC_Entry_status status;
109};
110
116{
119};
120
121/*=========================== Private Interface ===========================*/
122
124void CCC_private_array_adaptive_map_insert(struct CCC_Array_adaptive_map *,
125 size_t);
128CCC_private_array_adaptive_map_handle(struct CCC_Array_adaptive_map *,
129 void const *key);
131void *
132CCC_private_array_adaptive_map_data_at(struct CCC_Array_adaptive_map const *,
133 size_t slot);
135void *
136CCC_private_array_adaptive_map_key_at(struct CCC_Array_adaptive_map const *,
137 size_t slot);
139size_t
140CCC_private_array_adaptive_map_allocate_slot(struct CCC_Array_adaptive_map *);
141
142/*======================== Initialization =========================*/
143
146#define CCC_private_array_adaptive_map_declare_fixed( \
147 private_fixed_map_type_name, private_key_val_type_name, private_capacity) \
148 static_assert((private_capacity) > 1, \
149 "fixed size map must have capacity greater than 1"); \
150 typedef struct \
151 { \
152 private_key_val_type_name data[(private_capacity)]; \
153 struct CCC_Array_adaptive_map_node nodes[(private_capacity)]; \
154 }(private_fixed_map_type_name)
155
158#define CCC_private_array_adaptive_map_fixed_capacity(fixed_map_type_name) \
159 (sizeof((fixed_map_type_name){}.nodes) \
160 / sizeof(struct CCC_Array_adaptive_map_node))
161
163#define CCC_private_array_adaptive_map_initialize( \
164 private_type_name, private_key_node_field, private_key_order_fn, \
165 private_allocate, private_context_data, private_capacity, \
166 private_memory_pointer) \
167 { \
168 .data = (private_memory_pointer), \
169 .nodes = NULL, \
170 .capacity = (private_capacity), \
171 .count = 0, \
172 .root = 0, \
173 .free_list = 0, \
174 .sizeof_type = sizeof(private_type_name), \
175 .key_offset = offsetof(private_type_name, private_key_node_field), \
176 .compare = (private_key_order_fn), \
177 .allocate = (private_allocate), \
178 .context = (private_context_data), \
179 }
180
182#define CCC_private_array_adaptive_map_from( \
183 private_key_field, private_key_compare, private_allocate, \
184 private_context_data, private_optional_cap, \
185 private_array_compound_literal...) \
186 (__extension__({ \
187 typeof(*private_array_compound_literal) \
188 *private_array_adaptive_map_initializer_list \
189 = private_array_compound_literal; \
190 struct CCC_Array_adaptive_map private_array_adaptive_map \
191 = CCC_private_array_adaptive_map_initialize( \
192 typeof(*private_array_adaptive_map_initializer_list), \
193 private_key_field, private_key_compare, private_allocate, \
194 private_context_data, 0, NULL); \
195 size_t const private_array_adaptive_n \
196 = sizeof(private_array_compound_literal) \
197 / sizeof(*private_array_adaptive_map_initializer_list); \
198 size_t const private_cap = private_optional_cap; \
199 if (CCC_array_adaptive_map_reserve( \
200 &private_array_adaptive_map, \
201 (private_array_adaptive_n > private_cap \
202 ? private_array_adaptive_n \
203 : private_cap), \
204 private_allocate) \
205 == CCC_RESULT_OK) \
206 { \
207 for (size_t i = 0; i < private_array_adaptive_n; ++i) \
208 { \
209 struct CCC_Array_adaptive_map_handle \
210 private_array_adaptive_entry \
211 = CCC_private_array_adaptive_map_handle( \
212 &private_array_adaptive_map, \
213 (void const \
214 *)&private_array_adaptive_map_initializer_list[i] \
215 .private_key_field); \
216 CCC_Handle_index private_index \
217 = private_array_adaptive_entry.index; \
218 if (!(private_array_adaptive_entry.status \
219 & CCC_ENTRY_OCCUPIED)) \
220 { \
221 private_index \
222 = CCC_private_array_adaptive_map_allocate_slot( \
223 &private_array_adaptive_map); \
224 } \
225 *((typeof(*private_array_adaptive_map_initializer_list) *) \
226 CCC_private_array_adaptive_map_data_at( \
227 private_array_adaptive_entry.map, private_index)) \
228 = private_array_adaptive_map_initializer_list[i]; \
229 if (!(private_array_adaptive_entry.status \
230 & CCC_ENTRY_OCCUPIED)) \
231 { \
232 CCC_private_array_adaptive_map_insert( \
233 private_array_adaptive_entry.map, private_index); \
234 } \
235 } \
236 } \
237 private_array_adaptive_map; \
238 }))
239
241#define CCC_private_array_adaptive_map_with_capacity( \
242 private_type_name, private_key_field, private_key_compare, \
243 private_allocate, private_context_data, private_cap) \
244 (__extension__({ \
245 struct CCC_Array_adaptive_map private_array_adaptive_map \
246 = CCC_private_array_adaptive_map_initialize( \
247 private_type_name, private_key_field, private_key_compare, \
248 private_allocate, private_context_data, 0, NULL); \
249 (void)CCC_array_adaptive_map_reserve(&private_array_adaptive_map, \
250 private_cap, private_allocate); \
251 private_array_adaptive_map; \
252 }))
253
255#define CCC_private_array_adaptive_map_with_compound_literal( \
256 private_key_node_field, private_key_order_fn, private_compound_literal) \
257 { \
258 .data = &(private_compound_literal), \
259 .nodes = NULL, \
260 .capacity = CCC_private_array_adaptive_map_fixed_capacity( \
261 typeof(private_compound_literal)), \
262 .count = 0, \
263 .root = 0, \
264 .free_list = 0, \
265 .sizeof_type = sizeof(*(private_compound_literal.data)) /*NOLINT*/, \
266 .key_offset \
267 = offsetof(typeof(*(private_compound_literal.data)) /*NOLINT*/, \
268 private_key_node_field), \
269 .compare = (private_key_order_fn), \
270 .allocate = NULL, \
271 .context = NULL, \
272 }
273
275#define CCC_private_array_adaptive_map_with_context_compound_literal( \
276 private_key_node_field, private_key_order_fn, private_context, \
277 private_compound_literal) \
278 { \
279 .data = &(private_compound_literal), \
280 .nodes = NULL, \
281 .capacity = CCC_private_array_adaptive_map_fixed_capacity( \
282 typeof(private_compound_literal)), \
283 .count = 0, \
284 .root = 0, \
285 .free_list = 0, \
286 .sizeof_type = sizeof(*(private_compound_literal.data)) /*NOLINT*/, \
287 .key_offset \
288 = offsetof(typeof(*(private_compound_literal.data)) /*NOLINT*/, \
289 private_key_node_field), \
290 .compare = (private_key_order_fn), \
291 .allocate = NULL, \
292 .context = (private_context), \
293 }
294
296#define CCC_private_array_adaptive_map_with_allocator( \
297 private_type_name, private_key_field, private_compare, private_allocate) \
298 { \
299 .data = NULL, \
300 .nodes = NULL, \
301 .capacity = 0, \
302 .count = 0, \
303 .root = 0, \
304 .free_list = 0, \
305 .sizeof_type = sizeof(private_type_name), \
306 .key_offset = offsetof(private_type_name, private_key_field), \
307 .compare = (private_compare), \
308 .allocate = (private_allocate), \
309 .context = NULL, \
310 }
311
313#define CCC_private_array_adaptive_map_with_context_allocator( \
314 private_type_name, private_key_field, private_compare, private_allocate, \
315 private_context) \
316 { \
317 .data = NULL, \
318 .nodes = NULL, \
319 .capacity = 0, \
320 .count = 0, \
321 .root = 0, \
322 .free_list = 0, \
323 .sizeof_type = sizeof(private_type_name), \
324 .key_offset = offsetof(private_type_name, private_key_field), \
325 .compare = (private_compare), \
326 .allocate = (private_allocate), \
327 .context = (private_context), \
328 }
329
331#define CCC_private_array_adaptive_map_as(array_adaptive_map_pointer, \
332 type_name, handle...) \
333 ((type_name *)CCC_private_array_adaptive_map_data_at( \
334 (array_adaptive_map_pointer), (handle)))
335
336/*================== Core Macro Implementations =====================*/
337
339#define CCC_private_array_adaptive_map_and_modify_with( \
340 array_adaptive_map_array_pointer, type_name, closure_over_T...) \
341 (__extension__({ \
342 __auto_type private_array_adaptive_map_mod_hndl_pointer \
343 = (array_adaptive_map_array_pointer); \
344 struct CCC_Array_adaptive_map_handle \
345 private_array_adaptive_map_mod_hndl \
346 = {.status = CCC_ENTRY_ARGUMENT_ERROR}; \
347 if (private_array_adaptive_map_mod_hndl_pointer) \
348 { \
349 private_array_adaptive_map_mod_hndl \
350 = private_array_adaptive_map_mod_hndl_pointer->private; \
351 if (private_array_adaptive_map_mod_hndl.status \
352 & CCC_ENTRY_OCCUPIED) \
353 { \
354 type_name *const T = CCC_private_array_adaptive_map_data_at( \
355 private_array_adaptive_map_mod_hndl.map, \
356 private_array_adaptive_map_mod_hndl.index); \
357 if (T) \
358 { \
359 closure_over_T \
360 } \
361 } \
362 } \
363 private_array_adaptive_map_mod_hndl; \
364 }))
365
367#define CCC_private_array_adaptive_map_or_insert_with( \
368 array_adaptive_map_array_pointer, type_compound_literal...) \
369 (__extension__({ \
370 __auto_type private_array_adaptive_map_or_ins_hndl_pointer \
371 = (array_adaptive_map_array_pointer); \
372 CCC_Handle_index private_array_adaptive_map_or_ins_ret = 0; \
373 if (private_array_adaptive_map_or_ins_hndl_pointer) \
374 { \
375 if (private_array_adaptive_map_or_ins_hndl_pointer->private.status \
376 == CCC_ENTRY_OCCUPIED) \
377 { \
378 private_array_adaptive_map_or_ins_ret \
379 = private_array_adaptive_map_or_ins_hndl_pointer->private \
380 .index; \
381 } \
382 else \
383 { \
384 private_array_adaptive_map_or_ins_ret \
385 = CCC_private_array_adaptive_map_allocate_slot( \
386 private_array_adaptive_map_or_ins_hndl_pointer \
387 ->private.map); \
388 if (private_array_adaptive_map_or_ins_ret) \
389 { \
390 *((typeof(type_compound_literal) *) \
391 CCC_private_array_adaptive_map_data_at( \
392 private_array_adaptive_map_or_ins_hndl_pointer \
393 ->private.map, \
394 private_array_adaptive_map_or_ins_ret)) \
395 = type_compound_literal; \
396 CCC_private_array_adaptive_map_insert( \
397 private_array_adaptive_map_or_ins_hndl_pointer \
398 ->private.map, \
399 private_array_adaptive_map_or_ins_ret); \
400 } \
401 } \
402 } \
403 private_array_adaptive_map_or_ins_ret; \
404 }))
405
407#define CCC_private_array_adaptive_map_insert_array_with( \
408 array_adaptive_map_array_pointer, type_compound_literal...) \
409 (__extension__({ \
410 __auto_type private_array_adaptive_map_ins_hndl_pointer \
411 = (array_adaptive_map_array_pointer); \
412 CCC_Handle_index private_array_adaptive_map_ins_hndl_ret = 0; \
413 if (private_array_adaptive_map_ins_hndl_pointer) \
414 { \
415 if (!(private_array_adaptive_map_ins_hndl_pointer->private.status \
416 & CCC_ENTRY_OCCUPIED)) \
417 { \
418 private_array_adaptive_map_ins_hndl_ret \
419 = CCC_private_array_adaptive_map_allocate_slot( \
420 private_array_adaptive_map_ins_hndl_pointer->private \
421 .map); \
422 if (private_array_adaptive_map_ins_hndl_ret) \
423 { \
424 *((typeof(type_compound_literal) *) \
425 CCC_private_array_adaptive_map_data_at( \
426 private_array_adaptive_map_ins_hndl_pointer \
427 ->private.map, \
428 private_array_adaptive_map_ins_hndl_ret)) \
429 = type_compound_literal; \
430 CCC_private_array_adaptive_map_insert( \
431 private_array_adaptive_map_ins_hndl_pointer->private \
432 .map, \
433 private_array_adaptive_map_ins_hndl_ret); \
434 } \
435 } \
436 else if (private_array_adaptive_map_ins_hndl_pointer->private \
437 .status \
438 == CCC_ENTRY_OCCUPIED) \
439 { \
440 *((typeof(type_compound_literal) *) \
441 CCC_private_array_adaptive_map_data_at( \
442 private_array_adaptive_map_ins_hndl_pointer->private \
443 .map, \
444 private_array_adaptive_map_ins_hndl_pointer->private \
445 .index)) \
446 = type_compound_literal; \
447 private_array_adaptive_map_ins_hndl_ret \
448 = private_array_adaptive_map_ins_hndl_pointer->private \
449 .index; \
450 } \
451 } \
452 private_array_adaptive_map_ins_hndl_ret; \
453 }))
454
456#define CCC_private_array_adaptive_map_try_insert_with( \
457 array_adaptive_map_pointer, key, type_compound_literal...) \
458 (__extension__({ \
459 __auto_type private_array_adaptive_map_try_ins_map_pointer \
460 = (array_adaptive_map_pointer); \
461 struct CCC_Handle private_array_adaptive_map_try_ins_hndl_ret \
462 = {.status = CCC_ENTRY_ARGUMENT_ERROR}; \
463 if (private_array_adaptive_map_try_ins_map_pointer) \
464 { \
465 __auto_type private_array_adaptive_map_key = (key); \
466 struct CCC_Array_adaptive_map_handle \
467 private_array_adaptive_map_try_ins_hndl \
468 = CCC_private_array_adaptive_map_handle( \
469 private_array_adaptive_map_try_ins_map_pointer, \
470 (void *)&private_array_adaptive_map_key); \
471 if (!(private_array_adaptive_map_try_ins_hndl.status \
472 & CCC_ENTRY_OCCUPIED)) \
473 { \
474 private_array_adaptive_map_try_ins_hndl_ret \
475 = (struct CCC_Handle){ \
476 .index = CCC_private_array_adaptive_map_allocate_slot( \
477 private_array_adaptive_map_try_ins_hndl.map), \
478 .status = CCC_ENTRY_INSERT_ERROR, \
479 }; \
480 if (private_array_adaptive_map_try_ins_hndl_ret.index) \
481 { \
482 *((typeof(type_compound_literal) *) \
483 CCC_private_array_adaptive_map_data_at( \
484 private_array_adaptive_map_try_ins_map_pointer, \
485 private_array_adaptive_map_try_ins_hndl_ret \
486 .index)) \
487 = type_compound_literal; \
488 *((typeof(private_array_adaptive_map_key) *) \
489 CCC_private_array_adaptive_map_key_at( \
490 private_array_adaptive_map_try_ins_hndl.map, \
491 private_array_adaptive_map_try_ins_hndl_ret \
492 .index)) \
493 = private_array_adaptive_map_key; \
494 CCC_private_array_adaptive_map_insert( \
495 private_array_adaptive_map_try_ins_hndl.map, \
496 private_array_adaptive_map_try_ins_hndl_ret.index); \
497 private_array_adaptive_map_try_ins_hndl_ret.status \
498 = CCC_ENTRY_VACANT; \
499 } \
500 } \
501 else if (private_array_adaptive_map_try_ins_hndl.status \
502 == CCC_ENTRY_OCCUPIED) \
503 { \
504 private_array_adaptive_map_try_ins_hndl_ret \
505 = (struct CCC_Handle){ \
506 .index \
507 = private_array_adaptive_map_try_ins_hndl.index, \
508 .status \
509 = private_array_adaptive_map_try_ins_hndl.status}; \
510 } \
511 } \
512 private_array_adaptive_map_try_ins_hndl_ret; \
513 }))
514
516#define CCC_private_array_adaptive_map_insert_or_assign_with( \
517 array_adaptive_map_pointer, key, type_compound_literal...) \
518 (__extension__({ \
519 __auto_type private_array_adaptive_map_ins_or_assign_map_pointer \
520 = (array_adaptive_map_pointer); \
521 struct CCC_Handle private_array_adaptive_map_ins_or_assign_hndl_ret \
522 = {.status = CCC_ENTRY_ARGUMENT_ERROR}; \
523 if (private_array_adaptive_map_ins_or_assign_map_pointer) \
524 { \
525 __auto_type private_array_adaptive_map_key = (key); \
526 struct CCC_Array_adaptive_map_handle \
527 private_array_adaptive_map_ins_or_assign_hndl \
528 = CCC_private_array_adaptive_map_handle( \
529 private_array_adaptive_map_ins_or_assign_map_pointer, \
530 (void *)&private_array_adaptive_map_key); \
531 if (!(private_array_adaptive_map_ins_or_assign_hndl.status \
532 & CCC_ENTRY_OCCUPIED)) \
533 { \
534 private_array_adaptive_map_ins_or_assign_hndl_ret \
535 = (struct CCC_Handle){ \
536 .index = CCC_private_array_adaptive_map_allocate_slot( \
537 private_array_adaptive_map_ins_or_assign_hndl \
538 .map), \
539 .status = CCC_ENTRY_INSERT_ERROR, \
540 }; \
541 if (private_array_adaptive_map_ins_or_assign_hndl_ret.index) \
542 { \
543 *((typeof(type_compound_literal) *) \
544 CCC_private_array_adaptive_map_data_at( \
545 private_array_adaptive_map_ins_or_assign_map_pointer, \
546 private_array_adaptive_map_ins_or_assign_hndl_ret \
547 .index)) \
548 = type_compound_literal; \
549 *((typeof(private_array_adaptive_map_key) *) \
550 CCC_private_array_adaptive_map_key_at( \
551 private_array_adaptive_map_ins_or_assign_hndl \
552 .map, \
553 private_array_adaptive_map_ins_or_assign_hndl_ret \
554 .index)) \
555 = private_array_adaptive_map_key; \
556 CCC_private_array_adaptive_map_insert( \
557 private_array_adaptive_map_ins_or_assign_hndl.map, \
558 private_array_adaptive_map_ins_or_assign_hndl_ret \
559 .index); \
560 private_array_adaptive_map_ins_or_assign_hndl_ret.status \
561 = CCC_ENTRY_VACANT; \
562 } \
563 } \
564 else if (private_array_adaptive_map_ins_or_assign_hndl.status \
565 == CCC_ENTRY_OCCUPIED) \
566 { \
567 *((typeof(type_compound_literal) *) \
568 CCC_private_array_adaptive_map_data_at( \
569 private_array_adaptive_map_ins_or_assign_hndl.map, \
570 private_array_adaptive_map_ins_or_assign_hndl \
571 .index)) \
572 = type_compound_literal; \
573 private_array_adaptive_map_ins_or_assign_hndl_ret \
574 = (struct CCC_Handle){ \
575 .index \
576 = private_array_adaptive_map_ins_or_assign_hndl.index, \
577 .status \
578 = private_array_adaptive_map_ins_or_assign_hndl \
579 .status, \
580 }; \
581 *((typeof(private_array_adaptive_map_key) *) \
582 CCC_private_array_adaptive_map_key_at( \
583 private_array_adaptive_map_ins_or_assign_hndl.map, \
584 private_array_adaptive_map_ins_or_assign_hndl \
585 .index)) \
586 = private_array_adaptive_map_key; \
587 } \
588 } \
589 private_array_adaptive_map_ins_or_assign_hndl_ret; \
590 }))
591
592/* NOLINTEND(readability-identifier-naming) */
593
594#endif /* CCC_PRIVATE_ARRAY_ADAPTIVE_MAP_H */
Definition: private_array_adaptive_map.h:100
CCC_Order last_order
Definition: private_array_adaptive_map.h:106
CCC_Entry_status status
Definition: private_array_adaptive_map.h:108
size_t index
Definition: private_array_adaptive_map.h:104
struct CCC_Array_adaptive_map * map
Definition: private_array_adaptive_map.h:102
Definition: private_array_adaptive_map.h:34
size_t branch[2]
Definition: private_array_adaptive_map.h:36
size_t next_free
Definition: private_array_adaptive_map.h:42
size_t parent
Definition: private_array_adaptive_map.h:40
Definition: private_array_adaptive_map.h:72
size_t free_list
Definition: private_array_adaptive_map.h:84
size_t count
Definition: private_array_adaptive_map.h:80
void * data
Definition: private_array_adaptive_map.h:74
void * context
Definition: private_array_adaptive_map.h:94
CCC_Key_comparator * compare
Definition: private_array_adaptive_map.h:90
size_t sizeof_type
Definition: private_array_adaptive_map.h:86
size_t root
Definition: private_array_adaptive_map.h:82
struct CCC_Array_adaptive_map_node * nodes
Definition: private_array_adaptive_map.h:76
CCC_Allocator * allocate
Definition: private_array_adaptive_map.h:92
size_t capacity
Definition: private_array_adaptive_map.h:78
size_t key_offset
Definition: private_array_adaptive_map.h:88
CCC_Order
A three-way comparison for comparison functions.
Definition: types.h:171
CCC_Order CCC_Key_comparator(CCC_Key_comparator_context)
A callback function for three-way comparing two stored keys.
Definition: types.h:383
void * CCC_Allocator(CCC_Allocator_context)
An allocation function at the core of all containers.
Definition: types.h:340
Definition: private_array_adaptive_map.h:116
struct CCC_Array_adaptive_map_handle private
Definition: private_array_adaptive_map.h:118