C Container Collection (CCC)
Loading...
Searching...
No Matches
private_adaptive_map.h
Go to the documentation of this file.
1
28#ifndef CCC_PRIVATE_ADAPTIVE_MAP_H
29#define CCC_PRIVATE_ADAPTIVE_MAP_H
30
32#include <stddef.h>
35#include "../types.h"
36
37/* NOLINTBEGIN(readability-identifier-naming) */
38
47};
48
60 size_t size;
66 size_t key_offset;
69};
70
90};
91
92/*========================== Private Interface ============================*/
93
96 struct CCC_Adaptive_map const *, void const *
97);
100 struct CCC_Adaptive_map const *, void const *
101);
107 struct CCC_Adaptive_map *, struct CCC_Adaptive_map_node *
108);
109
110/*====================== Macro Implementations ========================*/
111
113#define CCC_private_adaptive_map_default( \
114 private_struct_name, \
115 private_node_node_field, \
116 private_key_node_field, \
117 private_comparator... \
118) \
119 (struct CCC_Adaptive_map) { \
120 .sizeof_type = sizeof(private_struct_name), \
121 .type_intruder_offset \
122 = offsetof(private_struct_name, private_node_node_field), \
123 .key_offset = offsetof(private_struct_name, private_key_node_field), \
124 .comparator = private_comparator, \
125 }
126
128#define CCC_private_adaptive_map_for( \
129 private_struct_name, \
130 private_node_node_field, \
131 private_key_node_field, \
132 private_comparator... \
133) \
134 (struct CCC_Adaptive_map) { \
135 .root = NULL, .size = 0, .sizeof_type = sizeof(private_struct_name), \
136 .type_intruder_offset \
137 = offsetof(private_struct_name, private_node_node_field), \
138 .key_offset = offsetof(private_struct_name, private_key_node_field), \
139 .comparator = private_comparator, \
140 }
141
143#define CCC_private_adaptive_map_from( \
144 private_type_intruder_field_name, \
145 private_key_field_name, \
146 private_comparator, \
147 private_allocator, \
148 private_destructor, \
149 private_compound_literal_array... \
150) \
151 (struct { struct CCC_Adaptive_map private; }){(__extension__({ \
152 typeof(*private_compound_literal_array) \
153 *private_adaptive_map_type_array = private_compound_literal_array; \
154 struct CCC_Adaptive_map private_map \
155 = CCC_private_adaptive_map_default( \
156 typeof(*private_adaptive_map_type_array), \
157 private_type_intruder_field_name, \
158 private_key_field_name, \
159 private_comparator \
160 ); \
161 CCC_Allocator const private_adaptive_map_allocator \
162 = private_allocator; \
163 if (private_adaptive_map_allocator.allocate) { \
164 size_t const private_count \
165 = sizeof(private_compound_literal_array) \
166 / sizeof(*private_adaptive_map_type_array); \
167 for (size_t private_i = 0; private_i < private_count; \
168 ++private_i) { \
169 struct CCC_Adaptive_map_entry private_adaptive_map_entry \
170 = CCC_private_adaptive_map_entry( \
171 &private_map, \
172 (void *)&private_adaptive_map_type_array[private_i] \
173 .private_key_field_name \
174 ); \
175 if (!(private_adaptive_map_entry.entry.status \
176 & CCC_ENTRY_OCCUPIED)) { \
177 typeof(*private_adaptive_map_type_array) *const \
178 private_new_slot \
179 = private_adaptive_map_allocator.allocate(( \
180 CCC_Allocator_arguments \
181 ){ \
182 .input = NULL, \
183 .bytes = private_map.sizeof_type, \
184 .context = private_adaptive_map_allocator.context, \
185 }); \
186 if (!private_new_slot) { \
187 (void)CCC_adaptive_map_clear( \
188 &private_map, \
189 &(private_destructor), \
190 &private_adaptive_map_allocator \
191 ); \
192 break; \
193 } \
194 *private_new_slot \
195 = private_adaptive_map_type_array[private_i]; \
196 CCC_private_adaptive_map_insert( \
197 &private_map, \
198 CCC_private_adaptive_map_node_in_slot( \
199 &private_map, private_new_slot \
200 ) \
201 ); \
202 } else { \
203 struct CCC_Adaptive_map_node private_node_saved \
204 = *CCC_private_adaptive_map_node_in_slot( \
205 &private_map, \
206 private_adaptive_map_entry.entry.type \
207 ); \
208 *((typeof(*private_adaptive_map_type_array) *) \
209 private_adaptive_map_entry.entry.type) \
210 = private_adaptive_map_type_array[private_i]; \
211 *CCC_private_adaptive_map_node_in_slot( \
212 &private_map, private_adaptive_map_entry.entry.type \
213 ) = private_node_saved; \
214 } \
215 } \
216 } \
217 private_map; \
218 }))}.private
219
221#define CCC_private_adaptive_map_new(adaptive_map_entry, private_allocator) \
222 (__extension__({ \
223 void *private_adaptive_map_ins_allocate_ret = NULL; \
224 if ((private_allocator)->allocate) { \
225 private_adaptive_map_ins_allocate_ret \
226 = (private_allocator) \
227 ->allocate((CCC_Allocator_arguments){ \
228 .input = NULL, \
229 .bytes = (adaptive_map_entry)->map->sizeof_type, \
230 .context = (private_allocator)->context, \
231 }); \
232 } \
233 private_adaptive_map_ins_allocate_ret; \
234 }))
235
237#define CCC_private_adaptive_map_insert_key_val( \
238 adaptive_map_entry, new_data, type_compound_literal... \
239) \
240 (__extension__({ \
241 if (new_data) { \
242 *new_data = type_compound_literal; \
243 new_data = CCC_private_adaptive_map_insert( \
244 (adaptive_map_entry)->map, \
245 CCC_private_adaptive_map_node_in_slot( \
246 (adaptive_map_entry)->map, new_data \
247 ) \
248 ); \
249 } \
250 }))
251
253#define CCC_private_adaptive_map_insert_and_copy_key( \
254 om_insert_entry, \
255 om_insert_entry_ret, \
256 key, \
257 private_allocator, \
258 type_compound_literal... \
259) \
260 (__extension__({ \
261 typeof(type_compound_literal) *private_adaptive_map_new_ins_base \
262 = CCC_private_adaptive_map_new( \
263 (&om_insert_entry), private_allocator \
264 ); \
265 om_insert_entry_ret = (CCC_Entry){ \
266 .type = private_adaptive_map_new_ins_base, \
267 .status = CCC_ENTRY_INSERT_ERROR, \
268 }; \
269 if (private_adaptive_map_new_ins_base) { \
270 om_insert_entry_ret.status = CCC_ENTRY_VACANT; \
271 *((typeof(type_compound_literal) *) \
272 private_adaptive_map_new_ins_base) = type_compound_literal; \
273 *((typeof(key) *)CCC_private_adaptive_map_key_in_slot( \
274 om_insert_entry.map, private_adaptive_map_new_ins_base \
275 )) = key; \
276 (void)CCC_private_adaptive_map_insert( \
277 om_insert_entry.map, \
278 CCC_private_adaptive_map_node_in_slot( \
279 om_insert_entry.map, private_adaptive_map_new_ins_base \
280 ) \
281 ); \
282 } \
283 }))
284
285/*===================== Core Macro Implementations ==================*/
286
288#define CCC_private_adaptive_map_and_modify_with( \
289 adaptive_map_entry_pointer, \
290 closure_parameter, \
291 closure_over_closure_parameter... \
292) \
293 (__extension__({ \
294 __auto_type private_adaptive_map_ent_pointer \
295 = (adaptive_map_entry_pointer); \
296 struct CCC_Adaptive_map_entry private_adaptive_map_mod_ent \
297 = {.entry = {.status = CCC_ENTRY_ARGUMENT_ERROR}}; \
298 if (private_adaptive_map_ent_pointer) { \
299 private_adaptive_map_mod_ent = *private_adaptive_map_ent_pointer; \
300 if (private_adaptive_map_mod_ent.entry.status \
301 & CCC_ENTRY_OCCUPIED) { \
302 closure_parameter = private_adaptive_map_mod_ent.entry.type; \
303 closure_over_closure_parameter \
304 } \
305 } \
306 private_adaptive_map_mod_ent; \
307 }))
308
310#define CCC_private_adaptive_map_or_insert_with( \
311 adaptive_map_entry_pointer, \
312 private_allocator_pointer, \
313 type_compound_literal... \
314) \
315 (__extension__({ \
316 __auto_type private_or_ins_entry_pointer \
317 = (adaptive_map_entry_pointer); \
318 typeof(type_compound_literal) *private_or_ins_ret = NULL; \
319 CCC_Allocator const *const private_adaptive_map_allocator \
320 = (private_allocator_pointer); \
321 if (private_adaptive_map_allocator && private_or_ins_entry_pointer) { \
322 if (private_or_ins_entry_pointer->entry.status \
323 == CCC_ENTRY_OCCUPIED) { \
324 private_or_ins_ret = private_or_ins_entry_pointer->entry.type; \
325 } else { \
326 private_or_ins_ret = CCC_private_adaptive_map_new( \
327 private_or_ins_entry_pointer, \
328 private_adaptive_map_allocator \
329 ); \
330 CCC_private_adaptive_map_insert_key_val( \
331 private_or_ins_entry_pointer, \
332 private_or_ins_ret, \
333 type_compound_literal \
334 ); \
335 } \
336 } \
337 private_or_ins_ret; \
338 }))
339
341#define CCC_private_adaptive_map_insert_entry_with( \
342 adaptive_map_entry_pointer, \
343 private_allocator_pointer, \
344 type_compound_literal... \
345) \
346 (__extension__({ \
347 __auto_type private_ins_entry_pointer = (adaptive_map_entry_pointer); \
348 typeof(type_compound_literal) *private_adaptive_map_ins_ent_ret \
349 = NULL; \
350 CCC_Allocator const *const private_adaptive_map_allocator \
351 = (private_allocator_pointer); \
352 if (private_adaptive_map_allocator && private_ins_entry_pointer) { \
353 if (!(private_ins_entry_pointer->entry.status \
354 & CCC_ENTRY_OCCUPIED)) { \
355 private_adaptive_map_ins_ent_ret \
356 = CCC_private_adaptive_map_new( \
357 private_ins_entry_pointer, \
358 private_adaptive_map_allocator \
359 ); \
360 CCC_private_adaptive_map_insert_key_val( \
361 private_ins_entry_pointer, \
362 private_adaptive_map_ins_ent_ret, \
363 type_compound_literal \
364 ); \
365 } else if (private_ins_entry_pointer->entry.status \
366 == CCC_ENTRY_OCCUPIED) { \
367 struct CCC_Adaptive_map_node private_ins_ent_saved \
368 = *CCC_private_adaptive_map_node_in_slot( \
369 private_ins_entry_pointer->map, \
370 private_ins_entry_pointer->entry.type \
371 ); \
372 *((typeof(type_compound_literal) *) \
373 private_ins_entry_pointer->entry.type) \
374 = type_compound_literal; \
375 *CCC_private_adaptive_map_node_in_slot( \
376 private_ins_entry_pointer->map, \
377 private_ins_entry_pointer->entry.type \
378 ) = private_ins_ent_saved; \
379 private_adaptive_map_ins_ent_ret \
380 = private_ins_entry_pointer->entry.type; \
381 } \
382 } \
383 private_adaptive_map_ins_ent_ret; \
384 }))
385
387#define CCC_private_adaptive_map_try_insert_with( \
388 adaptive_map_pointer, \
389 key, \
390 private_allocator_pointer, \
391 type_compound_literal... \
392) \
393 (__extension__({ \
394 __auto_type private_try_ins_map_pointer = (adaptive_map_pointer); \
395 CCC_Entry private_adaptive_map_try_ins_ent_ret \
396 = {.status = CCC_ENTRY_ARGUMENT_ERROR}; \
397 CCC_Allocator const *const private_adaptive_map_allocator \
398 = (private_allocator_pointer); \
399 if (private_adaptive_map_allocator && private_try_ins_map_pointer) { \
400 __auto_type private_adaptive_map_key = (key); \
401 struct CCC_Adaptive_map_entry private_adaptive_map_try_ins_ent \
402 = CCC_private_adaptive_map_entry( \
403 private_try_ins_map_pointer, \
404 (void *)&private_adaptive_map_key \
405 ); \
406 if (!(private_adaptive_map_try_ins_ent.entry.status \
407 & CCC_ENTRY_OCCUPIED)) { \
408 CCC_private_adaptive_map_insert_and_copy_key( \
409 private_adaptive_map_try_ins_ent, \
410 private_adaptive_map_try_ins_ent_ret, \
411 private_adaptive_map_key, \
412 private_adaptive_map_allocator, \
413 type_compound_literal \
414 ); \
415 } else if (private_adaptive_map_try_ins_ent.entry.status \
416 == CCC_ENTRY_OCCUPIED) { \
417 private_adaptive_map_try_ins_ent_ret \
418 = private_adaptive_map_try_ins_ent.entry; \
419 } \
420 } \
421 private_adaptive_map_try_ins_ent_ret; \
422 }))
423
425#define CCC_private_adaptive_map_insert_or_assign_with( \
426 adaptive_map_pointer, \
427 key, \
428 private_allocator_pointer, \
429 type_compound_literal... \
430) \
431 (__extension__({ \
432 __auto_type private_ins_or_assign_map_pointer \
433 = (adaptive_map_pointer); \
434 CCC_Entry private_adaptive_map_ins_or_assign_ent_ret \
435 = {.status = CCC_ENTRY_ARGUMENT_ERROR}; \
436 CCC_Allocator const *const private_adaptive_map_allocator \
437 = (private_allocator_pointer); \
438 if (private_adaptive_map_allocator \
439 && private_ins_or_assign_map_pointer) { \
440 __auto_type private_adaptive_map_key = (key); \
441 struct CCC_Adaptive_map_entry \
442 private_adaptive_map_ins_or_assign_ent \
443 = CCC_private_adaptive_map_entry( \
444 private_ins_or_assign_map_pointer, \
445 (void *)&private_adaptive_map_key \
446 ); \
447 if (!(private_adaptive_map_ins_or_assign_ent.entry.status \
448 & CCC_ENTRY_OCCUPIED)) { \
449 CCC_private_adaptive_map_insert_and_copy_key( \
450 private_adaptive_map_ins_or_assign_ent, \
451 private_adaptive_map_ins_or_assign_ent_ret, \
452 private_adaptive_map_key, \
453 private_adaptive_map_allocator, \
454 type_compound_literal \
455 ); \
456 } else if (private_adaptive_map_ins_or_assign_ent.entry.status \
457 == CCC_ENTRY_OCCUPIED) { \
458 struct CCC_Adaptive_map_node private_ins_ent_saved \
459 = *CCC_private_adaptive_map_node_in_slot( \
460 private_adaptive_map_ins_or_assign_ent.map, \
461 private_adaptive_map_ins_or_assign_ent.entry.type \
462 ); \
463 *((typeof(type_compound_literal) *) \
464 private_adaptive_map_ins_or_assign_ent.entry.type) \
465 = type_compound_literal; \
466 *CCC_private_adaptive_map_node_in_slot( \
467 private_adaptive_map_ins_or_assign_ent.map, \
468 private_adaptive_map_ins_or_assign_ent.entry.type \
469 ) = private_ins_ent_saved; \
470 private_adaptive_map_ins_or_assign_ent_ret \
471 = private_adaptive_map_ins_or_assign_ent.entry; \
472 *((typeof(private_adaptive_map_key) *) \
473 CCC_private_adaptive_map_key_in_slot( \
474 private_ins_or_assign_map_pointer, \
475 private_adaptive_map_ins_or_assign_ent_ret.type \
476 )) = private_adaptive_map_key; \
477 } \
478 } \
479 private_adaptive_map_ins_or_assign_ent_ret; \
480 }))
481
482/* NOLINTEND(readability-identifier-naming) */
483
484#endif /* CCC_PRIVATE_ADAPTIVE_MAP_H */
void * CCC_private_adaptive_map_insert(struct CCC_Adaptive_map *, struct CCC_Adaptive_map_node *)
struct CCC_Adaptive_map_entry CCC_private_adaptive_map_entry(struct CCC_Adaptive_map *, void const *)
void * CCC_private_adaptive_map_key_in_slot(struct CCC_Adaptive_map const *, void const *)
struct CCC_Adaptive_map_node * CCC_private_adaptive_map_node_in_slot(struct CCC_Adaptive_map const *, void const *)
Definition: private_adaptive_map.h:85
CCC_Entry entry
Definition: private_adaptive_map.h:89
struct CCC_Adaptive_map * map
Definition: private_adaptive_map.h:87
Definition: private_adaptive_map.h:42
struct CCC_Adaptive_map_node * branch[2]
Definition: private_adaptive_map.h:44
struct CCC_Adaptive_map_node * parent
Definition: private_adaptive_map.h:46
Definition: private_adaptive_map.h:56
size_t type_intruder_offset
Definition: private_adaptive_map.h:64
size_t size
Definition: private_adaptive_map.h:60
size_t key_offset
Definition: private_adaptive_map.h:66
struct CCC_Adaptive_map_node * root
Definition: private_adaptive_map.h:58
size_t sizeof_type
Definition: private_adaptive_map.h:62
CCC_Key_comparator comparator
Definition: private_adaptive_map.h:68
An Occupied or Vacant position in a searchable container.
Definition: types.h:135
The type passed by reference to any container function that may need to compare keys....
Definition: types.h:512