16#ifndef CCC_PRIVATE_ARRAY_ADAPTIVE_MAP_H
17#define CCC_PRIVATE_ARRAY_ADAPTIVE_MAP_H
24#include "private_types.h"
146#define CCC_private_array_adaptive_map_declare_fixed( \
147 private_fixed_map_type_name, private_key_val_type_name, private_capacity) \
148 static_assert((private_capacity) > 1, \
149 "fixed size map must have capacity greater than 1"); \
152 private_key_val_type_name data[(private_capacity)]; \
153 struct CCC_Array_adaptive_map_node nodes[(private_capacity)]; \
154 }(private_fixed_map_type_name)
158#define CCC_private_array_adaptive_map_fixed_capacity(fixed_map_type_name) \
159 (sizeof((fixed_map_type_name){}.nodes) \
160 / sizeof(struct CCC_Array_adaptive_map_node))
163#define CCC_private_array_adaptive_map_initialize( \
164 private_type_name, private_key_node_field, private_key_order_fn, \
165 private_allocate, private_context_data, private_capacity, \
166 private_memory_pointer) \
168 .data = (private_memory_pointer), \
170 .capacity = (private_capacity), \
174 .sizeof_type = sizeof(private_type_name), \
175 .key_offset = offsetof(private_type_name, private_key_node_field), \
176 .compare = (private_key_order_fn), \
177 .allocate = (private_allocate), \
178 .context = (private_context_data), \
182#define CCC_private_array_adaptive_map_from( \
183 private_key_field, private_key_compare, private_allocate, \
184 private_context_data, private_optional_cap, \
185 private_array_compound_literal...) \
187 typeof(*private_array_compound_literal) \
188 *private_array_adaptive_map_initializer_list \
189 = private_array_compound_literal; \
190 struct CCC_Array_adaptive_map private_array_adaptive_map \
191 = CCC_private_array_adaptive_map_initialize( \
192 typeof(*private_array_adaptive_map_initializer_list), \
193 private_key_field, private_key_compare, private_allocate, \
194 private_context_data, 0, NULL); \
195 size_t const private_array_adaptive_n \
196 = sizeof(private_array_compound_literal) \
197 / sizeof(*private_array_adaptive_map_initializer_list); \
198 size_t const private_cap = private_optional_cap; \
199 if (CCC_array_adaptive_map_reserve( \
200 &private_array_adaptive_map, \
201 (private_array_adaptive_n > private_cap \
202 ? private_array_adaptive_n \
207 for (size_t i = 0; i < private_array_adaptive_n; ++i) \
209 struct CCC_Array_adaptive_map_handle \
210 private_array_adaptive_entry \
211 = CCC_private_array_adaptive_map_handle( \
212 &private_array_adaptive_map, \
214 *)&private_array_adaptive_map_initializer_list[i] \
215 .private_key_field); \
216 CCC_Handle_index private_index \
217 = private_array_adaptive_entry.index; \
218 if (!(private_array_adaptive_entry.status \
219 & CCC_ENTRY_OCCUPIED)) \
222 = CCC_private_array_adaptive_map_allocate_slot( \
223 &private_array_adaptive_map); \
225 *((typeof(*private_array_adaptive_map_initializer_list) *) \
226 CCC_private_array_adaptive_map_data_at( \
227 private_array_adaptive_entry.map, private_index)) \
228 = private_array_adaptive_map_initializer_list[i]; \
229 if (!(private_array_adaptive_entry.status \
230 & CCC_ENTRY_OCCUPIED)) \
232 CCC_private_array_adaptive_map_insert( \
233 private_array_adaptive_entry.map, private_index); \
237 private_array_adaptive_map; \
241#define CCC_private_array_adaptive_map_with_capacity( \
242 private_type_name, private_key_field, private_key_compare, \
243 private_allocate, private_context_data, private_cap) \
245 struct CCC_Array_adaptive_map private_array_adaptive_map \
246 = CCC_private_array_adaptive_map_initialize( \
247 private_type_name, private_key_field, private_key_compare, \
248 private_allocate, private_context_data, 0, NULL); \
249 (void)CCC_array_adaptive_map_reserve(&private_array_adaptive_map, \
250 private_cap, private_allocate); \
251 private_array_adaptive_map; \
255#define CCC_private_array_adaptive_map_with_compound_literal( \
256 private_key_node_field, private_key_order_fn, private_compound_literal) \
258 .data = &(private_compound_literal), \
260 .capacity = CCC_private_array_adaptive_map_fixed_capacity( \
261 typeof(private_compound_literal)), \
265 .sizeof_type = sizeof(*(private_compound_literal.data)) , \
267 = offsetof(typeof(*(private_compound_literal.data)) , \
268 private_key_node_field), \
269 .compare = (private_key_order_fn), \
275#define CCC_private_array_adaptive_map_with_context_compound_literal( \
276 private_key_node_field, private_key_order_fn, private_context, \
277 private_compound_literal) \
279 .data = &(private_compound_literal), \
281 .capacity = CCC_private_array_adaptive_map_fixed_capacity( \
282 typeof(private_compound_literal)), \
286 .sizeof_type = sizeof(*(private_compound_literal.data)) , \
288 = offsetof(typeof(*(private_compound_literal.data)) , \
289 private_key_node_field), \
290 .compare = (private_key_order_fn), \
292 .context = (private_context), \
296#define CCC_private_array_adaptive_map_with_allocator( \
297 private_type_name, private_key_field, private_compare, private_allocate) \
305 .sizeof_type = sizeof(private_type_name), \
306 .key_offset = offsetof(private_type_name, private_key_field), \
307 .compare = (private_compare), \
308 .allocate = (private_allocate), \
313#define CCC_private_array_adaptive_map_with_context_allocator( \
314 private_type_name, private_key_field, private_compare, private_allocate, \
323 .sizeof_type = sizeof(private_type_name), \
324 .key_offset = offsetof(private_type_name, private_key_field), \
325 .compare = (private_compare), \
326 .allocate = (private_allocate), \
327 .context = (private_context), \
331#define CCC_private_array_adaptive_map_as(array_adaptive_map_pointer, \
332 type_name, handle...) \
333 ((type_name *)CCC_private_array_adaptive_map_data_at( \
334 (array_adaptive_map_pointer), (handle)))
339#define CCC_private_array_adaptive_map_and_modify_with( \
340 array_adaptive_map_array_pointer, type_name, closure_over_T...) \
342 __auto_type private_array_adaptive_map_mod_hndl_pointer \
343 = (array_adaptive_map_array_pointer); \
344 struct CCC_Array_adaptive_map_handle \
345 private_array_adaptive_map_mod_hndl \
346 = {.status = CCC_ENTRY_ARGUMENT_ERROR}; \
347 if (private_array_adaptive_map_mod_hndl_pointer) \
349 private_array_adaptive_map_mod_hndl \
350 = private_array_adaptive_map_mod_hndl_pointer->private; \
351 if (private_array_adaptive_map_mod_hndl.status \
352 & CCC_ENTRY_OCCUPIED) \
354 type_name *const T = CCC_private_array_adaptive_map_data_at( \
355 private_array_adaptive_map_mod_hndl.map, \
356 private_array_adaptive_map_mod_hndl.index); \
363 private_array_adaptive_map_mod_hndl; \
367#define CCC_private_array_adaptive_map_or_insert_with( \
368 array_adaptive_map_array_pointer, type_compound_literal...) \
370 __auto_type private_array_adaptive_map_or_ins_hndl_pointer \
371 = (array_adaptive_map_array_pointer); \
372 CCC_Handle_index private_array_adaptive_map_or_ins_ret = 0; \
373 if (private_array_adaptive_map_or_ins_hndl_pointer) \
375 if (private_array_adaptive_map_or_ins_hndl_pointer->private.status \
376 == CCC_ENTRY_OCCUPIED) \
378 private_array_adaptive_map_or_ins_ret \
379 = private_array_adaptive_map_or_ins_hndl_pointer->private \
384 private_array_adaptive_map_or_ins_ret \
385 = CCC_private_array_adaptive_map_allocate_slot( \
386 private_array_adaptive_map_or_ins_hndl_pointer \
388 if (private_array_adaptive_map_or_ins_ret) \
390 *((typeof(type_compound_literal) *) \
391 CCC_private_array_adaptive_map_data_at( \
392 private_array_adaptive_map_or_ins_hndl_pointer \
394 private_array_adaptive_map_or_ins_ret)) \
395 = type_compound_literal; \
396 CCC_private_array_adaptive_map_insert( \
397 private_array_adaptive_map_or_ins_hndl_pointer \
399 private_array_adaptive_map_or_ins_ret); \
403 private_array_adaptive_map_or_ins_ret; \
407#define CCC_private_array_adaptive_map_insert_array_with( \
408 array_adaptive_map_array_pointer, type_compound_literal...) \
410 __auto_type private_array_adaptive_map_ins_hndl_pointer \
411 = (array_adaptive_map_array_pointer); \
412 CCC_Handle_index private_array_adaptive_map_ins_hndl_ret = 0; \
413 if (private_array_adaptive_map_ins_hndl_pointer) \
415 if (!(private_array_adaptive_map_ins_hndl_pointer->private.status \
416 & CCC_ENTRY_OCCUPIED)) \
418 private_array_adaptive_map_ins_hndl_ret \
419 = CCC_private_array_adaptive_map_allocate_slot( \
420 private_array_adaptive_map_ins_hndl_pointer->private \
422 if (private_array_adaptive_map_ins_hndl_ret) \
424 *((typeof(type_compound_literal) *) \
425 CCC_private_array_adaptive_map_data_at( \
426 private_array_adaptive_map_ins_hndl_pointer \
428 private_array_adaptive_map_ins_hndl_ret)) \
429 = type_compound_literal; \
430 CCC_private_array_adaptive_map_insert( \
431 private_array_adaptive_map_ins_hndl_pointer->private \
433 private_array_adaptive_map_ins_hndl_ret); \
436 else if (private_array_adaptive_map_ins_hndl_pointer->private \
438 == CCC_ENTRY_OCCUPIED) \
440 *((typeof(type_compound_literal) *) \
441 CCC_private_array_adaptive_map_data_at( \
442 private_array_adaptive_map_ins_hndl_pointer->private \
444 private_array_adaptive_map_ins_hndl_pointer->private \
446 = type_compound_literal; \
447 private_array_adaptive_map_ins_hndl_ret \
448 = private_array_adaptive_map_ins_hndl_pointer->private \
452 private_array_adaptive_map_ins_hndl_ret; \
456#define CCC_private_array_adaptive_map_try_insert_with( \
457 array_adaptive_map_pointer, key, type_compound_literal...) \
459 __auto_type private_array_adaptive_map_try_ins_map_pointer \
460 = (array_adaptive_map_pointer); \
461 struct CCC_Handle private_array_adaptive_map_try_ins_hndl_ret \
462 = {.status = CCC_ENTRY_ARGUMENT_ERROR}; \
463 if (private_array_adaptive_map_try_ins_map_pointer) \
465 __auto_type private_array_adaptive_map_key = (key); \
466 struct CCC_Array_adaptive_map_handle \
467 private_array_adaptive_map_try_ins_hndl \
468 = CCC_private_array_adaptive_map_handle( \
469 private_array_adaptive_map_try_ins_map_pointer, \
470 (void *)&private_array_adaptive_map_key); \
471 if (!(private_array_adaptive_map_try_ins_hndl.status \
472 & CCC_ENTRY_OCCUPIED)) \
474 private_array_adaptive_map_try_ins_hndl_ret \
475 = (struct CCC_Handle){ \
476 .index = CCC_private_array_adaptive_map_allocate_slot( \
477 private_array_adaptive_map_try_ins_hndl.map), \
478 .status = CCC_ENTRY_INSERT_ERROR, \
480 if (private_array_adaptive_map_try_ins_hndl_ret.index) \
482 *((typeof(type_compound_literal) *) \
483 CCC_private_array_adaptive_map_data_at( \
484 private_array_adaptive_map_try_ins_map_pointer, \
485 private_array_adaptive_map_try_ins_hndl_ret \
487 = type_compound_literal; \
488 *((typeof(private_array_adaptive_map_key) *) \
489 CCC_private_array_adaptive_map_key_at( \
490 private_array_adaptive_map_try_ins_hndl.map, \
491 private_array_adaptive_map_try_ins_hndl_ret \
493 = private_array_adaptive_map_key; \
494 CCC_private_array_adaptive_map_insert( \
495 private_array_adaptive_map_try_ins_hndl.map, \
496 private_array_adaptive_map_try_ins_hndl_ret.index); \
497 private_array_adaptive_map_try_ins_hndl_ret.status \
498 = CCC_ENTRY_VACANT; \
501 else if (private_array_adaptive_map_try_ins_hndl.status \
502 == CCC_ENTRY_OCCUPIED) \
504 private_array_adaptive_map_try_ins_hndl_ret \
505 = (struct CCC_Handle){ \
507 = private_array_adaptive_map_try_ins_hndl.index, \
509 = private_array_adaptive_map_try_ins_hndl.status}; \
512 private_array_adaptive_map_try_ins_hndl_ret; \
516#define CCC_private_array_adaptive_map_insert_or_assign_with( \
517 array_adaptive_map_pointer, key, type_compound_literal...) \
519 __auto_type private_array_adaptive_map_ins_or_assign_map_pointer \
520 = (array_adaptive_map_pointer); \
521 struct CCC_Handle private_array_adaptive_map_ins_or_assign_hndl_ret \
522 = {.status = CCC_ENTRY_ARGUMENT_ERROR}; \
523 if (private_array_adaptive_map_ins_or_assign_map_pointer) \
525 __auto_type private_array_adaptive_map_key = (key); \
526 struct CCC_Array_adaptive_map_handle \
527 private_array_adaptive_map_ins_or_assign_hndl \
528 = CCC_private_array_adaptive_map_handle( \
529 private_array_adaptive_map_ins_or_assign_map_pointer, \
530 (void *)&private_array_adaptive_map_key); \
531 if (!(private_array_adaptive_map_ins_or_assign_hndl.status \
532 & CCC_ENTRY_OCCUPIED)) \
534 private_array_adaptive_map_ins_or_assign_hndl_ret \
535 = (struct CCC_Handle){ \
536 .index = CCC_private_array_adaptive_map_allocate_slot( \
537 private_array_adaptive_map_ins_or_assign_hndl \
539 .status = CCC_ENTRY_INSERT_ERROR, \
541 if (private_array_adaptive_map_ins_or_assign_hndl_ret.index) \
543 *((typeof(type_compound_literal) *) \
544 CCC_private_array_adaptive_map_data_at( \
545 private_array_adaptive_map_ins_or_assign_map_pointer, \
546 private_array_adaptive_map_ins_or_assign_hndl_ret \
548 = type_compound_literal; \
549 *((typeof(private_array_adaptive_map_key) *) \
550 CCC_private_array_adaptive_map_key_at( \
551 private_array_adaptive_map_ins_or_assign_hndl \
553 private_array_adaptive_map_ins_or_assign_hndl_ret \
555 = private_array_adaptive_map_key; \
556 CCC_private_array_adaptive_map_insert( \
557 private_array_adaptive_map_ins_or_assign_hndl.map, \
558 private_array_adaptive_map_ins_or_assign_hndl_ret \
560 private_array_adaptive_map_ins_or_assign_hndl_ret.status \
561 = CCC_ENTRY_VACANT; \
564 else if (private_array_adaptive_map_ins_or_assign_hndl.status \
565 == CCC_ENTRY_OCCUPIED) \
567 *((typeof(type_compound_literal) *) \
568 CCC_private_array_adaptive_map_data_at( \
569 private_array_adaptive_map_ins_or_assign_hndl.map, \
570 private_array_adaptive_map_ins_or_assign_hndl \
572 = type_compound_literal; \
573 private_array_adaptive_map_ins_or_assign_hndl_ret \
574 = (struct CCC_Handle){ \
576 = private_array_adaptive_map_ins_or_assign_hndl.index, \
578 = private_array_adaptive_map_ins_or_assign_hndl \
581 *((typeof(private_array_adaptive_map_key) *) \
582 CCC_private_array_adaptive_map_key_at( \
583 private_array_adaptive_map_ins_or_assign_hndl.map, \
584 private_array_adaptive_map_ins_or_assign_hndl \
586 = private_array_adaptive_map_key; \
589 private_array_adaptive_map_ins_or_assign_hndl_ret; \
Definition: private_array_adaptive_map.h:100
CCC_Order last_order
Definition: private_array_adaptive_map.h:106
CCC_Entry_status status
Definition: private_array_adaptive_map.h:108
size_t index
Definition: private_array_adaptive_map.h:104
struct CCC_Array_adaptive_map * map
Definition: private_array_adaptive_map.h:102
Definition: private_array_adaptive_map.h:34
size_t branch[2]
Definition: private_array_adaptive_map.h:36
size_t next_free
Definition: private_array_adaptive_map.h:42
size_t parent
Definition: private_array_adaptive_map.h:40
Definition: private_array_adaptive_map.h:72
size_t free_list
Definition: private_array_adaptive_map.h:84
size_t count
Definition: private_array_adaptive_map.h:80
void * data
Definition: private_array_adaptive_map.h:74
void * context
Definition: private_array_adaptive_map.h:94
CCC_Key_comparator * compare
Definition: private_array_adaptive_map.h:90
size_t sizeof_type
Definition: private_array_adaptive_map.h:86
size_t root
Definition: private_array_adaptive_map.h:82
struct CCC_Array_adaptive_map_node * nodes
Definition: private_array_adaptive_map.h:76
CCC_Allocator * allocate
Definition: private_array_adaptive_map.h:92
size_t capacity
Definition: private_array_adaptive_map.h:78
size_t key_offset
Definition: private_array_adaptive_map.h:88
CCC_Order
A three-way comparison for comparison functions.
Definition: types.h:171
CCC_Order CCC_Key_comparator(CCC_Key_comparator_context)
A callback function for three-way comparing two stored keys.
Definition: types.h:383
void * CCC_Allocator(CCC_Allocator_context)
An allocation function at the core of all containers.
Definition: types.h:340
Definition: private_array_adaptive_map.h:116
struct CCC_Array_adaptive_map_handle private
Definition: private_array_adaptive_map.h:118