C Container Collection (CCC)
Loading...
Searching...
No Matches
private_adaptive_map.h
Go to the documentation of this file.
1
28#ifndef CCC_PRIVATE_ADAPTIVE_MAP_H
29#define CCC_PRIVATE_ADAPTIVE_MAP_H
30
32#include <stddef.h>
35#include "../types.h"
36#include "private_types.h"
37
38/* NOLINTBEGIN(readability-identifier-naming) */
39
44{
49};
50
59{
63 size_t size;
69 size_t key_offset;
75 void *context;
76};
77
93{
98};
99
105{
108};
109
110/*========================== Private Interface ============================*/
111
114 void const *);
118 void const *);
124 struct CCC_Adaptive_map_node *);
125
126/*====================== Macro Implementations ========================*/
127
129#define CCC_private_adaptive_map_initialize( \
130 private_struct_name, private_node_node_field, private_key_node_field, \
131 private_key_comparator, private_allocate, private_context_data) \
132 { \
133 .root = NULL, \
134 .allocate = (private_allocate), \
135 .compare = (private_key_comparator), \
136 .context = (private_context_data), \
137 .size = 0, \
138 .sizeof_type = sizeof(private_struct_name), \
139 .type_intruder_offset \
140 = offsetof(private_struct_name, private_node_node_field), \
141 .key_offset = offsetof(private_struct_name, private_key_node_field), \
142 }
143
145#define CCC_private_adaptive_map_from( \
146 private_type_intruder_field_name, private_key_field_name, private_compare, \
147 private_allocate, private_destroy, private_context_data, \
148 private_compound_literal_array...) \
149 (__extension__({ \
150 typeof(*private_compound_literal_array) \
151 *private_adaptive_map_type_array \
152 = private_compound_literal_array; \
153 struct CCC_Adaptive_map private_map \
154 = CCC_private_adaptive_map_initialize( \
155 typeof(*private_adaptive_map_type_array), \
156 private_type_intruder_field_name, private_key_field_name, \
157 private_compare, private_allocate, private_context_data); \
158 if (private_map.allocate) \
159 { \
160 size_t const private_count \
161 = sizeof(private_compound_literal_array) \
162 / sizeof(*private_adaptive_map_type_array); \
163 for (size_t private_i = 0; private_i < private_count; ++private_i) \
164 { \
165 struct CCC_Adaptive_map_entry private_adaptive_map_entry \
166 = CCC_private_adaptive_map_entry( \
167 &private_map, \
168 (void *)&private_adaptive_map_type_array[private_i] \
169 .private_key_field_name); \
170 if (!(private_adaptive_map_entry.entry.status \
171 & CCC_ENTRY_OCCUPIED)) \
172 { \
173 typeof(*private_adaptive_map_type_array) *const \
174 private_new_slot \
175 = private_map.allocate((CCC_Allocator_context){ \
176 .input = NULL, \
177 .bytes = private_map.sizeof_type, \
178 .context = private_map.context, \
179 }); \
180 if (!private_new_slot) \
181 { \
182 (void)CCC_adaptive_map_clear(&private_map, \
183 private_destroy); \
184 break; \
185 } \
186 *private_new_slot \
187 = private_adaptive_map_type_array[private_i]; \
188 CCC_private_adaptive_map_insert( \
189 &private_map, CCC_private_adaptive_map_node_in_slot( \
190 &private_map, private_new_slot)); \
191 } \
192 else \
193 { \
194 struct CCC_Adaptive_map_node private_node_saved \
195 = *CCC_private_adaptive_map_node_in_slot( \
196 &private_map, \
197 private_adaptive_map_entry.entry.type); \
198 *((typeof(*private_adaptive_map_type_array) *) \
199 private_adaptive_map_entry.entry.type) \
200 = private_adaptive_map_type_array[private_i]; \
201 *CCC_private_adaptive_map_node_in_slot( \
202 &private_map, private_adaptive_map_entry.entry.type) \
203 = private_node_saved; \
204 } \
205 } \
206 } \
207 private_map; \
208 }))
209
211#define CCC_private_adaptive_map_new(adaptive_map_entry) \
212 (__extension__({ \
213 void *private_adaptive_map_ins_allocate_ret = NULL; \
214 if ((adaptive_map_entry)->map->allocate) \
215 { \
216 private_adaptive_map_ins_allocate_ret \
217 = (adaptive_map_entry) \
218 ->map->allocate((CCC_Allocator_context){ \
219 .input = NULL, \
220 .bytes = (adaptive_map_entry)->map->sizeof_type, \
221 .context = (adaptive_map_entry)->map->context, \
222 }); \
223 } \
224 private_adaptive_map_ins_allocate_ret; \
225 }))
226
228#define CCC_private_adaptive_map_insert_key_val(adaptive_map_entry, new_data, \
229 type_compound_literal...) \
230 (__extension__({ \
231 if (new_data) \
232 { \
233 *new_data = type_compound_literal; \
234 new_data = CCC_private_adaptive_map_insert( \
235 (adaptive_map_entry)->map, \
236 CCC_private_adaptive_map_node_in_slot( \
237 (adaptive_map_entry)->map, new_data)); \
238 } \
239 }))
240
242#define CCC_private_adaptive_map_insert_and_copy_key( \
243 om_insert_entry, om_insert_entry_ret, key, type_compound_literal...) \
244 (__extension__({ \
245 typeof(type_compound_literal) *private_adaptive_map_new_ins_base \
246 = CCC_private_adaptive_map_new((&om_insert_entry)); \
247 om_insert_entry_ret = (struct CCC_Entry){ \
248 .type = private_adaptive_map_new_ins_base, \
249 .status = CCC_ENTRY_INSERT_ERROR, \
250 }; \
251 if (private_adaptive_map_new_ins_base) \
252 { \
253 *((typeof(type_compound_literal) *) \
254 private_adaptive_map_new_ins_base) \
255 = type_compound_literal; \
256 *((typeof(key) *)CCC_private_adaptive_map_key_in_slot( \
257 om_insert_entry.map, private_adaptive_map_new_ins_base)) \
258 = key; \
259 (void)CCC_private_adaptive_map_insert( \
260 om_insert_entry.map, \
261 CCC_private_adaptive_map_node_in_slot( \
262 om_insert_entry.map, private_adaptive_map_new_ins_base)); \
263 } \
264 }))
265
266/*===================== Core Macro Implementations ==================*/
267
269#define CCC_private_adaptive_map_and_modify_with(adaptive_map_entry_pointer, \
270 type_name, closure_over_T...) \
271 (__extension__({ \
272 __auto_type private_adaptive_map_ent_pointer \
273 = (adaptive_map_entry_pointer); \
274 struct CCC_Adaptive_map_entry private_adaptive_map_mod_ent \
275 = {.entry = {.status = CCC_ENTRY_ARGUMENT_ERROR}}; \
276 if (private_adaptive_map_ent_pointer) \
277 { \
278 private_adaptive_map_mod_ent \
279 = private_adaptive_map_ent_pointer->private; \
280 if (private_adaptive_map_mod_ent.entry.status \
281 & CCC_ENTRY_OCCUPIED) \
282 { \
283 type_name *const T = private_adaptive_map_mod_ent.entry.type; \
284 if (T) \
285 { \
286 closure_over_T \
287 } \
288 } \
289 } \
290 private_adaptive_map_mod_ent; \
291 }))
292
294#define CCC_private_adaptive_map_or_insert_with(adaptive_map_entry_pointer, \
295 type_compound_literal...) \
296 (__extension__({ \
297 __auto_type private_or_ins_entry_pointer \
298 = (adaptive_map_entry_pointer); \
299 typeof(type_compound_literal) *private_or_ins_ret = NULL; \
300 if (private_or_ins_entry_pointer) \
301 { \
302 if (private_or_ins_entry_pointer->private.entry.status \
303 == CCC_ENTRY_OCCUPIED) \
304 { \
305 private_or_ins_ret \
306 = private_or_ins_entry_pointer->private.entry.type; \
307 } \
308 else \
309 { \
310 private_or_ins_ret = CCC_private_adaptive_map_new( \
311 &private_or_ins_entry_pointer->private); \
312 CCC_private_adaptive_map_insert_key_val( \
313 &private_or_ins_entry_pointer->private, \
314 private_or_ins_ret, type_compound_literal); \
315 } \
316 } \
317 private_or_ins_ret; \
318 }))
319
321#define CCC_private_adaptive_map_insert_entry_with(adaptive_map_entry_pointer, \
322 type_compound_literal...) \
323 (__extension__({ \
324 __auto_type private_ins_entry_pointer = (adaptive_map_entry_pointer); \
325 typeof(type_compound_literal) *private_adaptive_map_ins_ent_ret \
326 = NULL; \
327 if (private_ins_entry_pointer) \
328 { \
329 if (!(private_ins_entry_pointer->private.entry.status \
330 & CCC_ENTRY_OCCUPIED)) \
331 { \
332 private_adaptive_map_ins_ent_ret \
333 = CCC_private_adaptive_map_new( \
334 &private_ins_entry_pointer->private); \
335 CCC_private_adaptive_map_insert_key_val( \
336 &private_ins_entry_pointer->private, \
337 private_adaptive_map_ins_ent_ret, type_compound_literal); \
338 } \
339 else if (private_ins_entry_pointer->private.entry.status \
340 == CCC_ENTRY_OCCUPIED) \
341 { \
342 struct CCC_Adaptive_map_node private_ins_ent_saved \
343 = *CCC_private_adaptive_map_node_in_slot( \
344 private_ins_entry_pointer->private.map, \
345 private_ins_entry_pointer->private.entry.type); \
346 *((typeof(type_compound_literal) *) \
347 private_ins_entry_pointer->private.entry.type) \
348 = type_compound_literal; \
349 *CCC_private_adaptive_map_node_in_slot( \
350 private_ins_entry_pointer->private.map, \
351 private_ins_entry_pointer->private.entry.type) \
352 = private_ins_ent_saved; \
353 private_adaptive_map_ins_ent_ret \
354 = private_ins_entry_pointer->private.entry.type; \
355 } \
356 } \
357 private_adaptive_map_ins_ent_ret; \
358 }))
359
361#define CCC_private_adaptive_map_try_insert_with(adaptive_map_pointer, key, \
362 type_compound_literal...) \
363 (__extension__({ \
364 __auto_type private_try_ins_map_pointer = (adaptive_map_pointer); \
365 struct CCC_Entry private_adaptive_map_try_ins_ent_ret \
366 = {.status = CCC_ENTRY_ARGUMENT_ERROR}; \
367 if (private_try_ins_map_pointer) \
368 { \
369 __auto_type private_adaptive_map_key = (key); \
370 struct CCC_Adaptive_map_entry private_adaptive_map_try_ins_ent \
371 = CCC_private_adaptive_map_entry( \
372 private_try_ins_map_pointer, \
373 (void *)&private_adaptive_map_key); \
374 if (!(private_adaptive_map_try_ins_ent.entry.status \
375 & CCC_ENTRY_OCCUPIED)) \
376 { \
377 CCC_private_adaptive_map_insert_and_copy_key( \
378 private_adaptive_map_try_ins_ent, \
379 private_adaptive_map_try_ins_ent_ret, \
380 private_adaptive_map_key, type_compound_literal); \
381 } \
382 else if (private_adaptive_map_try_ins_ent.entry.status \
383 == CCC_ENTRY_OCCUPIED) \
384 { \
385 private_adaptive_map_try_ins_ent_ret \
386 = private_adaptive_map_try_ins_ent.entry; \
387 } \
388 } \
389 private_adaptive_map_try_ins_ent_ret; \
390 }))
391
393#define CCC_private_adaptive_map_insert_or_assign_with( \
394 adaptive_map_pointer, key, type_compound_literal...) \
395 (__extension__({ \
396 __auto_type private_ins_or_assign_map_pointer \
397 = (adaptive_map_pointer); \
398 struct CCC_Entry private_adaptive_map_ins_or_assign_ent_ret \
399 = {.status = CCC_ENTRY_ARGUMENT_ERROR}; \
400 if (private_ins_or_assign_map_pointer) \
401 { \
402 __auto_type private_adaptive_map_key = (key); \
403 struct CCC_Adaptive_map_entry \
404 private_adaptive_map_ins_or_assign_ent \
405 = CCC_private_adaptive_map_entry( \
406 private_ins_or_assign_map_pointer, \
407 (void *)&private_adaptive_map_key); \
408 if (!(private_adaptive_map_ins_or_assign_ent.entry.status \
409 & CCC_ENTRY_OCCUPIED)) \
410 { \
411 CCC_private_adaptive_map_insert_and_copy_key( \
412 private_adaptive_map_ins_or_assign_ent, \
413 private_adaptive_map_ins_or_assign_ent_ret, \
414 private_adaptive_map_key, type_compound_literal); \
415 } \
416 else if (private_adaptive_map_ins_or_assign_ent.entry.status \
417 == CCC_ENTRY_OCCUPIED) \
418 { \
419 struct CCC_Adaptive_map_node private_ins_ent_saved \
420 = *CCC_private_adaptive_map_node_in_slot( \
421 private_adaptive_map_ins_or_assign_ent.map, \
422 private_adaptive_map_ins_or_assign_ent.entry.type); \
423 *((typeof(type_compound_literal) *) \
424 private_adaptive_map_ins_or_assign_ent.entry.type) \
425 = type_compound_literal; \
426 *CCC_private_adaptive_map_node_in_slot( \
427 private_adaptive_map_ins_or_assign_ent.map, \
428 private_adaptive_map_ins_or_assign_ent.entry.type) \
429 = private_ins_ent_saved; \
430 private_adaptive_map_ins_or_assign_ent_ret \
431 = private_adaptive_map_ins_or_assign_ent.entry; \
432 *((typeof(private_adaptive_map_key) *) \
433 CCC_private_adaptive_map_key_in_slot( \
434 private_ins_or_assign_map_pointer, \
435 private_adaptive_map_ins_or_assign_ent_ret.type)) \
436 = private_adaptive_map_key; \
437 } \
438 } \
439 private_adaptive_map_ins_or_assign_ent_ret; \
440 }))
441
442/* NOLINTEND(readability-identifier-naming) */
443
444#endif /* CCC_PRIVATE_ADAPTIVE_MAP_H */
void * CCC_private_adaptive_map_insert(struct CCC_Adaptive_map *, struct CCC_Adaptive_map_node *)
struct CCC_Adaptive_map_entry CCC_private_adaptive_map_entry(struct CCC_Adaptive_map *, void const *)
void * CCC_private_adaptive_map_key_in_slot(struct CCC_Adaptive_map const *, void const *)
struct CCC_Adaptive_map_node * CCC_private_adaptive_map_node_in_slot(struct CCC_Adaptive_map const *, void const *)
Definition: private_adaptive_map.h:93
struct CCC_Adaptive_map * map
Definition: private_adaptive_map.h:95
struct CCC_Entry entry
Definition: private_adaptive_map.h:97
Definition: private_adaptive_map.h:44
struct CCC_Adaptive_map_node * branch[2]
Definition: private_adaptive_map.h:46
struct CCC_Adaptive_map_node * parent
Definition: private_adaptive_map.h:48
Definition: private_adaptive_map.h:59
void * context
Definition: private_adaptive_map.h:75
size_t type_intruder_offset
Definition: private_adaptive_map.h:67
CCC_Key_comparator * compare
Definition: private_adaptive_map.h:71
size_t size
Definition: private_adaptive_map.h:63
size_t key_offset
Definition: private_adaptive_map.h:69
struct CCC_Adaptive_map_node * root
Definition: private_adaptive_map.h:61
CCC_Allocator * allocate
Definition: private_adaptive_map.h:73
size_t sizeof_type
Definition: private_adaptive_map.h:65
Definition: private_types.h:53
CCC_Order CCC_Key_comparator(CCC_Key_comparator_context)
A callback function for three-way comparing two stored keys.
Definition: types.h:383
void * CCC_Allocator(CCC_Allocator_context)
An allocation function at the core of all containers.
Definition: types.h:340
Definition: private_adaptive_map.h:105
struct CCC_Adaptive_map_entry private
Definition: private_adaptive_map.h:107