C Container Collection (CCC)
Loading...
Searching...
No Matches
private_adaptive_map.h
Go to the documentation of this file.
1
28#ifndef CCC_PRIVATE_ADAPTIVE_MAP_H
29#define CCC_PRIVATE_ADAPTIVE_MAP_H
30
32#include <stddef.h>
35#include "../types.h"
36#include "private_types.h"
37
38/* NOLINTBEGIN(readability-identifier-naming) */
39
44{
49};
50
59{
63 size_t size;
69 size_t key_offset;
75 void *context;
76};
77
93{
98};
99
105{
108};
109
110/*========================== Private Interface ============================*/
111
114 void const *);
118 void const *);
124 struct CCC_Adaptive_map_node *);
125
126/*====================== Macro Implementations ========================*/
127
129#define CCC_private_adaptive_map_initialize( \
130 private_struct_name, private_node_node_field, private_key_node_field, \
131 private_key_comparator, private_allocate, private_context_data) \
132 { \
133 .root = NULL, \
134 .compare = (private_key_comparator), \
135 .allocate = (private_allocate), \
136 .context = (private_context_data), \
137 .size = 0, \
138 .sizeof_type = sizeof(private_struct_name), \
139 .type_intruder_offset \
140 = offsetof(private_struct_name, private_node_node_field), \
141 .key_offset = offsetof(private_struct_name, private_key_node_field), \
142 }
143
145#define CCC_private_adaptive_map_from( \
146 private_type_intruder_field_name, private_key_field_name, private_compare, \
147 private_allocate, private_destroy, private_context_data, \
148 private_compound_literal_array...) \
149 (__extension__({ \
150 typeof(*private_compound_literal_array) \
151 *private_adaptive_map_type_array \
152 = private_compound_literal_array; \
153 struct CCC_Adaptive_map private_map \
154 = CCC_private_adaptive_map_initialize( \
155 typeof(*private_adaptive_map_type_array), \
156 private_type_intruder_field_name, private_key_field_name, \
157 private_compare, private_allocate, private_context_data); \
158 if (private_map.allocate) \
159 { \
160 size_t const private_count \
161 = sizeof(private_compound_literal_array) \
162 / sizeof(*private_adaptive_map_type_array); \
163 for (size_t private_i = 0; private_i < private_count; ++private_i) \
164 { \
165 struct CCC_Adaptive_map_entry private_adaptive_map_entry \
166 = CCC_private_adaptive_map_entry( \
167 &private_map, \
168 (void *)&private_adaptive_map_type_array[private_i] \
169 .private_key_field_name); \
170 if (!(private_adaptive_map_entry.entry.status \
171 & CCC_ENTRY_OCCUPIED)) \
172 { \
173 typeof(*private_adaptive_map_type_array) *const \
174 private_new_slot \
175 = private_map.allocate((CCC_Allocator_context){ \
176 .input = NULL, \
177 .bytes = private_map.sizeof_type, \
178 .context = private_map.context, \
179 }); \
180 if (!private_new_slot) \
181 { \
182 (void)CCC_adaptive_map_clear(&private_map, \
183 private_destroy); \
184 break; \
185 } \
186 *private_new_slot \
187 = private_adaptive_map_type_array[private_i]; \
188 CCC_private_adaptive_map_insert( \
189 &private_map, CCC_private_adaptive_map_node_in_slot( \
190 &private_map, private_new_slot)); \
191 } \
192 else \
193 { \
194 struct CCC_Adaptive_map_node private_node_saved \
195 = *CCC_private_adaptive_map_node_in_slot( \
196 &private_map, \
197 private_adaptive_map_entry.entry.type); \
198 *((typeof(*private_adaptive_map_type_array) *) \
199 private_adaptive_map_entry.entry.type) \
200 = private_adaptive_map_type_array[private_i]; \
201 *CCC_private_adaptive_map_node_in_slot( \
202 &private_map, private_adaptive_map_entry.entry.type) \
203 = private_node_saved; \
204 } \
205 } \
206 } \
207 private_map; \
208 }))
209
211#define CCC_private_adaptive_map_with_allocator( \
212 private_struct_name, private_node_node_field, private_key_node_field, \
213 private_key_comparator, private_allocate) \
214 { \
215 .root = NULL, \
216 .compare = (private_key_comparator), \
217 .allocate = (private_allocate), \
218 .context = NULL, \
219 .size = 0, \
220 .sizeof_type = sizeof(private_struct_name), \
221 .type_intruder_offset \
222 = offsetof(private_struct_name, private_node_node_field), \
223 .key_offset = offsetof(private_struct_name, private_key_node_field), \
224 }
225
226#define CCC_private_adaptive_map_with_context_allocator( \
227 private_struct_name, private_node_node_field, private_key_node_field, \
228 private_key_comparator, private_allocate, private_context_data) \
229 { \
230 .root = NULL, \
231 .allocate = (private_allocate), \
232 .compare = (private_key_comparator), \
233 .context = (private_context_data), \
234 .size = 0, \
235 .sizeof_type = sizeof(private_struct_name), \
236 .type_intruder_offset \
237 = offsetof(private_struct_name, private_node_node_field), \
238 .key_offset = offsetof(private_struct_name, private_key_node_field), \
239 }
240
242#define CCC_private_adaptive_map_new(adaptive_map_entry) \
243 (__extension__({ \
244 void *private_adaptive_map_ins_allocate_ret = NULL; \
245 if ((adaptive_map_entry)->map->allocate) \
246 { \
247 private_adaptive_map_ins_allocate_ret \
248 = (adaptive_map_entry) \
249 ->map->allocate((CCC_Allocator_context){ \
250 .input = NULL, \
251 .bytes = (adaptive_map_entry)->map->sizeof_type, \
252 .context = (adaptive_map_entry)->map->context, \
253 }); \
254 } \
255 private_adaptive_map_ins_allocate_ret; \
256 }))
257
259#define CCC_private_adaptive_map_insert_key_val(adaptive_map_entry, new_data, \
260 type_compound_literal...) \
261 (__extension__({ \
262 if (new_data) \
263 { \
264 *new_data = type_compound_literal; \
265 new_data = CCC_private_adaptive_map_insert( \
266 (adaptive_map_entry)->map, \
267 CCC_private_adaptive_map_node_in_slot( \
268 (adaptive_map_entry)->map, new_data)); \
269 } \
270 }))
271
273#define CCC_private_adaptive_map_insert_and_copy_key( \
274 om_insert_entry, om_insert_entry_ret, key, type_compound_literal...) \
275 (__extension__({ \
276 typeof(type_compound_literal) *private_adaptive_map_new_ins_base \
277 = CCC_private_adaptive_map_new((&om_insert_entry)); \
278 om_insert_entry_ret = (struct CCC_Entry){ \
279 .type = private_adaptive_map_new_ins_base, \
280 .status = CCC_ENTRY_INSERT_ERROR, \
281 }; \
282 if (private_adaptive_map_new_ins_base) \
283 { \
284 *((typeof(type_compound_literal) *) \
285 private_adaptive_map_new_ins_base) \
286 = type_compound_literal; \
287 *((typeof(key) *)CCC_private_adaptive_map_key_in_slot( \
288 om_insert_entry.map, private_adaptive_map_new_ins_base)) \
289 = key; \
290 (void)CCC_private_adaptive_map_insert( \
291 om_insert_entry.map, \
292 CCC_private_adaptive_map_node_in_slot( \
293 om_insert_entry.map, private_adaptive_map_new_ins_base)); \
294 } \
295 }))
296
297/*===================== Core Macro Implementations ==================*/
298
300#define CCC_private_adaptive_map_and_modify_with(adaptive_map_entry_pointer, \
301 type_name, closure_over_T...) \
302 (__extension__({ \
303 __auto_type private_adaptive_map_ent_pointer \
304 = (adaptive_map_entry_pointer); \
305 struct CCC_Adaptive_map_entry private_adaptive_map_mod_ent \
306 = {.entry = {.status = CCC_ENTRY_ARGUMENT_ERROR}}; \
307 if (private_adaptive_map_ent_pointer) \
308 { \
309 private_adaptive_map_mod_ent \
310 = private_adaptive_map_ent_pointer->private; \
311 if (private_adaptive_map_mod_ent.entry.status \
312 & CCC_ENTRY_OCCUPIED) \
313 { \
314 type_name *const T = private_adaptive_map_mod_ent.entry.type; \
315 if (T) \
316 { \
317 closure_over_T \
318 } \
319 } \
320 } \
321 private_adaptive_map_mod_ent; \
322 }))
323
325#define CCC_private_adaptive_map_or_insert_with(adaptive_map_entry_pointer, \
326 type_compound_literal...) \
327 (__extension__({ \
328 __auto_type private_or_ins_entry_pointer \
329 = (adaptive_map_entry_pointer); \
330 typeof(type_compound_literal) *private_or_ins_ret = NULL; \
331 if (private_or_ins_entry_pointer) \
332 { \
333 if (private_or_ins_entry_pointer->private.entry.status \
334 == CCC_ENTRY_OCCUPIED) \
335 { \
336 private_or_ins_ret \
337 = private_or_ins_entry_pointer->private.entry.type; \
338 } \
339 else \
340 { \
341 private_or_ins_ret = CCC_private_adaptive_map_new( \
342 &private_or_ins_entry_pointer->private); \
343 CCC_private_adaptive_map_insert_key_val( \
344 &private_or_ins_entry_pointer->private, \
345 private_or_ins_ret, type_compound_literal); \
346 } \
347 } \
348 private_or_ins_ret; \
349 }))
350
352#define CCC_private_adaptive_map_insert_entry_with(adaptive_map_entry_pointer, \
353 type_compound_literal...) \
354 (__extension__({ \
355 __auto_type private_ins_entry_pointer = (adaptive_map_entry_pointer); \
356 typeof(type_compound_literal) *private_adaptive_map_ins_ent_ret \
357 = NULL; \
358 if (private_ins_entry_pointer) \
359 { \
360 if (!(private_ins_entry_pointer->private.entry.status \
361 & CCC_ENTRY_OCCUPIED)) \
362 { \
363 private_adaptive_map_ins_ent_ret \
364 = CCC_private_adaptive_map_new( \
365 &private_ins_entry_pointer->private); \
366 CCC_private_adaptive_map_insert_key_val( \
367 &private_ins_entry_pointer->private, \
368 private_adaptive_map_ins_ent_ret, type_compound_literal); \
369 } \
370 else if (private_ins_entry_pointer->private.entry.status \
371 == CCC_ENTRY_OCCUPIED) \
372 { \
373 struct CCC_Adaptive_map_node private_ins_ent_saved \
374 = *CCC_private_adaptive_map_node_in_slot( \
375 private_ins_entry_pointer->private.map, \
376 private_ins_entry_pointer->private.entry.type); \
377 *((typeof(type_compound_literal) *) \
378 private_ins_entry_pointer->private.entry.type) \
379 = type_compound_literal; \
380 *CCC_private_adaptive_map_node_in_slot( \
381 private_ins_entry_pointer->private.map, \
382 private_ins_entry_pointer->private.entry.type) \
383 = private_ins_ent_saved; \
384 private_adaptive_map_ins_ent_ret \
385 = private_ins_entry_pointer->private.entry.type; \
386 } \
387 } \
388 private_adaptive_map_ins_ent_ret; \
389 }))
390
392#define CCC_private_adaptive_map_try_insert_with(adaptive_map_pointer, key, \
393 type_compound_literal...) \
394 (__extension__({ \
395 __auto_type private_try_ins_map_pointer = (adaptive_map_pointer); \
396 struct CCC_Entry private_adaptive_map_try_ins_ent_ret \
397 = {.status = CCC_ENTRY_ARGUMENT_ERROR}; \
398 if (private_try_ins_map_pointer) \
399 { \
400 __auto_type private_adaptive_map_key = (key); \
401 struct CCC_Adaptive_map_entry private_adaptive_map_try_ins_ent \
402 = CCC_private_adaptive_map_entry( \
403 private_try_ins_map_pointer, \
404 (void *)&private_adaptive_map_key); \
405 if (!(private_adaptive_map_try_ins_ent.entry.status \
406 & CCC_ENTRY_OCCUPIED)) \
407 { \
408 CCC_private_adaptive_map_insert_and_copy_key( \
409 private_adaptive_map_try_ins_ent, \
410 private_adaptive_map_try_ins_ent_ret, \
411 private_adaptive_map_key, type_compound_literal); \
412 } \
413 else if (private_adaptive_map_try_ins_ent.entry.status \
414 == CCC_ENTRY_OCCUPIED) \
415 { \
416 private_adaptive_map_try_ins_ent_ret \
417 = private_adaptive_map_try_ins_ent.entry; \
418 } \
419 } \
420 private_adaptive_map_try_ins_ent_ret; \
421 }))
422
424#define CCC_private_adaptive_map_insert_or_assign_with( \
425 adaptive_map_pointer, key, type_compound_literal...) \
426 (__extension__({ \
427 __auto_type private_ins_or_assign_map_pointer \
428 = (adaptive_map_pointer); \
429 struct CCC_Entry private_adaptive_map_ins_or_assign_ent_ret \
430 = {.status = CCC_ENTRY_ARGUMENT_ERROR}; \
431 if (private_ins_or_assign_map_pointer) \
432 { \
433 __auto_type private_adaptive_map_key = (key); \
434 struct CCC_Adaptive_map_entry \
435 private_adaptive_map_ins_or_assign_ent \
436 = CCC_private_adaptive_map_entry( \
437 private_ins_or_assign_map_pointer, \
438 (void *)&private_adaptive_map_key); \
439 if (!(private_adaptive_map_ins_or_assign_ent.entry.status \
440 & CCC_ENTRY_OCCUPIED)) \
441 { \
442 CCC_private_adaptive_map_insert_and_copy_key( \
443 private_adaptive_map_ins_or_assign_ent, \
444 private_adaptive_map_ins_or_assign_ent_ret, \
445 private_adaptive_map_key, type_compound_literal); \
446 } \
447 else if (private_adaptive_map_ins_or_assign_ent.entry.status \
448 == CCC_ENTRY_OCCUPIED) \
449 { \
450 struct CCC_Adaptive_map_node private_ins_ent_saved \
451 = *CCC_private_adaptive_map_node_in_slot( \
452 private_adaptive_map_ins_or_assign_ent.map, \
453 private_adaptive_map_ins_or_assign_ent.entry.type); \
454 *((typeof(type_compound_literal) *) \
455 private_adaptive_map_ins_or_assign_ent.entry.type) \
456 = type_compound_literal; \
457 *CCC_private_adaptive_map_node_in_slot( \
458 private_adaptive_map_ins_or_assign_ent.map, \
459 private_adaptive_map_ins_or_assign_ent.entry.type) \
460 = private_ins_ent_saved; \
461 private_adaptive_map_ins_or_assign_ent_ret \
462 = private_adaptive_map_ins_or_assign_ent.entry; \
463 *((typeof(private_adaptive_map_key) *) \
464 CCC_private_adaptive_map_key_in_slot( \
465 private_ins_or_assign_map_pointer, \
466 private_adaptive_map_ins_or_assign_ent_ret.type)) \
467 = private_adaptive_map_key; \
468 } \
469 } \
470 private_adaptive_map_ins_or_assign_ent_ret; \
471 }))
472
473/* NOLINTEND(readability-identifier-naming) */
474
475#endif /* CCC_PRIVATE_ADAPTIVE_MAP_H */
void * CCC_private_adaptive_map_insert(struct CCC_Adaptive_map *, struct CCC_Adaptive_map_node *)
struct CCC_Adaptive_map_entry CCC_private_adaptive_map_entry(struct CCC_Adaptive_map *, void const *)
void * CCC_private_adaptive_map_key_in_slot(struct CCC_Adaptive_map const *, void const *)
struct CCC_Adaptive_map_node * CCC_private_adaptive_map_node_in_slot(struct CCC_Adaptive_map const *, void const *)
Definition: private_adaptive_map.h:93
struct CCC_Adaptive_map * map
Definition: private_adaptive_map.h:95
struct CCC_Entry entry
Definition: private_adaptive_map.h:97
Definition: private_adaptive_map.h:44
struct CCC_Adaptive_map_node * branch[2]
Definition: private_adaptive_map.h:46
struct CCC_Adaptive_map_node * parent
Definition: private_adaptive_map.h:48
Definition: private_adaptive_map.h:59
void * context
Definition: private_adaptive_map.h:75
size_t type_intruder_offset
Definition: private_adaptive_map.h:67
CCC_Key_comparator * compare
Definition: private_adaptive_map.h:71
size_t size
Definition: private_adaptive_map.h:63
size_t key_offset
Definition: private_adaptive_map.h:69
struct CCC_Adaptive_map_node * root
Definition: private_adaptive_map.h:61
CCC_Allocator * allocate
Definition: private_adaptive_map.h:73
size_t sizeof_type
Definition: private_adaptive_map.h:65
Definition: private_types.h:53
CCC_Order CCC_Key_comparator(CCC_Key_comparator_context)
A callback function for three-way comparing two stored keys.
Definition: types.h:383
void * CCC_Allocator(CCC_Allocator_context)
An allocation function at the core of all containers.
Definition: types.h:340
Definition: private_adaptive_map.h:105
struct CCC_Adaptive_map_entry private
Definition: private_adaptive_map.h:107