data_structures_rmolinari 0.5.0 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +6 -4
- data/ext/cc.h +3879 -0
- data/ext/shared.h +33 -0
- metadata +3 -1
data/ext/cc.h
ADDED
@@ -0,0 +1,3879 @@
|
|
1
|
+
/*----------------------------------------- CC: CONVENIENT CONTAINERS v1.0.1 -------------------------------------------
|
2
|
+
|
3
|
+
This library provides usability-oriented generic containers (vectors, linked lists, unordered maps, and unordered sets).
|
4
|
+
|
5
|
+
Features:
|
6
|
+
|
7
|
+
- Fully generic API (no need to specify element or key type except when first declaring a container).
|
8
|
+
- No need to pre-declare container types per element or key/element pair.
|
9
|
+
- Type safety.
|
10
|
+
- User-defined destructor, comparison, and hash functions associated with element and key types.
|
11
|
+
- Handles memory allocation failure.
|
12
|
+
- Single header.
|
13
|
+
- Compiles in C and C++.
|
14
|
+
|
15
|
+
Requires C23, or C11 and compiler support for __typeof__, or C++11.
|
16
|
+
|
17
|
+
Tested with GCC, MinGW, and Clang.
|
18
|
+
|
19
|
+
#including the library:
|
20
|
+
|
21
|
+
Place this at the top of your file/s:
|
22
|
+
|
23
|
+
#include "cc.h"
|
24
|
+
|
25
|
+
The following can be #defined before #including the library in any file:
|
26
|
+
|
27
|
+
#define CC_NO_SHORT_NAMES
|
28
|
+
By default, CC exposes API macros without the "cc_" prefix.
|
29
|
+
Define this flag to withhold the unprefixed names.
|
30
|
+
|
31
|
+
The following can be #defined anywhere and affect all calls to API macros where the definition is visible:
|
32
|
+
|
33
|
+
#define CC_REALLOC my_realloc
|
34
|
+
Causes API macros to use a custom realloc function rather than the one in the standard library.
|
35
|
+
|
36
|
+
#define CC_FREE my_free
|
37
|
+
Causes API macros to use a custom free function rather than the one in the standard library.
|
38
|
+
|
39
|
+
API:
|
40
|
+
|
41
|
+
General notes:
|
42
|
+
|
43
|
+
- API macros may evaluate their first argument - the pointer to the container - multiple times, so never use
|
44
|
+
expressions with side effects (e.g. &our_containers[ ++i ] ) for that argument. In GCC and Clang, attempting to do
|
45
|
+
so will cause a compiler warning. All other arguments are only evaluated once.
|
46
|
+
- If CC_NO_SHORT_NAMES was declared, all API macros are prefixed with "cc_".
|
47
|
+
- Duplicating a container handle via assignment and then operating on the duplicate will invalidate the original.
|
48
|
+
Hence, only create a duplicate via assignment (including through function parameters and return values) if you have
|
49
|
+
finished with the original.
|
50
|
+
- An iterator is a pointer to an element in the container or to the associated end (or r_end, if the container
|
51
|
+
supports it). In the documentation below, these pointers are called "pointer-iterators".
|
52
|
+
- In the documentation below, el_ty is the container's element type and key_ty is the container's key type (where
|
53
|
+
applicable).
|
54
|
+
|
55
|
+
All containers:
|
56
|
+
|
57
|
+
The following macros behave the same way for all containers:
|
58
|
+
|
59
|
+
void init( <any container type> *cntr )
|
60
|
+
|
61
|
+
Initializes cntr for use.
|
62
|
+
This call cannot fail (it does not allocate memory).
|
63
|
+
|
64
|
+
bool init_clone( <any container type> *cntr, <same container type> *src )
|
65
|
+
|
66
|
+
Initializes cntr as a shallow copy of src.
|
67
|
+
Returns true, or false if unsuccessful due to memory allocation failure.
|
68
|
+
|
69
|
+
size_t size( <any container type> *cntr )
|
70
|
+
|
71
|
+
Returns the number of elements.
|
72
|
+
|
73
|
+
void clear( <any container type> *cntr )
|
74
|
+
|
75
|
+
Erases all elements, calling the element and key types' destructors if they exist.
|
76
|
+
|
77
|
+
void cleanup( <any container type> *cntr )
|
78
|
+
|
79
|
+
Erases all elements (calling the element and key types' destructors if they exist), frees any other memory
|
80
|
+
associated with the container, and initializes the container for reuse.
|
81
|
+
|
82
|
+
for_each( <any container type> *cntr, i_name )
|
83
|
+
|
84
|
+
Creates a loop iterating over all elements from first to last.
|
85
|
+
This macro declares a pointer-iterator (el_ty *) named i_name.
|
86
|
+
It is equivalent to
|
87
|
+
for( el_ty *i_name = first( cntr ); i_name != end( cntr ); i_name = next( cntr, i_name ) )
|
88
|
+
and should be followed by the loop body.
|
89
|
+
|
90
|
+
Vector (a dynamic array that stores elements in contiguous memory):
|
91
|
+
|
92
|
+
vec( el_ty ) cntr
|
93
|
+
|
94
|
+
Declares an uninitialized vector named cntr.
|
95
|
+
|
96
|
+
size_t cap( vec( el_ty ) *cntr )
|
97
|
+
|
98
|
+
Returns the current capacity.
|
99
|
+
|
100
|
+
bool reserve( vec( el_ty ) *cntr, size_t n )
|
101
|
+
|
102
|
+
Ensures that the the capacity is large enough to support n elements.
|
103
|
+
Returns true, or false if unsuccessful due to memory allocation failure.
|
104
|
+
|
105
|
+
bool resize( vec( el_ty ) *cntr, size_t n )
|
106
|
+
|
107
|
+
Sets the number of elements to n.
|
108
|
+
If n is above the current size, the new elements are uninitialized.
|
109
|
+
If n is below the current size, the element type's destructor (if it exists) is called for each erased element.
|
110
|
+
Returns true, or false if unsuccessful due to memory allocation failure.
|
111
|
+
|
112
|
+
bool shrink( vec( el_ty ) *cntr )
|
113
|
+
|
114
|
+
Shrinks the capacity to the current size.
|
115
|
+
Returns true, or false if unsuccessful due to memory allocation failure.
|
116
|
+
|
117
|
+
el_ty *get( vec( el_ty ) *cntr, size_t i )
|
118
|
+
|
119
|
+
Returns an a pointer-iterator to the element at index i.
|
120
|
+
|
121
|
+
el_ty *push( vec( el_ty ) *cntr, el_ty el )
|
122
|
+
|
123
|
+
Inserts el at the end of the vector.
|
124
|
+
Returns a pointer-iterator to the new element, or NULL in the case of memory allocation failure.
|
125
|
+
|
126
|
+
el_ty *push_n( vec( el_ty ) *cntr, el_ty *els, size_t n )
|
127
|
+
|
128
|
+
Inserts n elements from array els at the end of the vector.
|
129
|
+
Returns a pointer-iterator to the first new element, or NULL in the case of memory allocation failure.
|
130
|
+
|
131
|
+
el_ty *insert( vec( el_ty ) *cntr, size_t i, el_ty el )
|
132
|
+
|
133
|
+
Inserts el at index i.
|
134
|
+
Returns a pointer-iterator to the new element, or NULL in the case of memory allocation failure.
|
135
|
+
|
136
|
+
el_ty *insert_n( vec( el_ty ) *cntr, size_t i, el_ty *els, size_t n )
|
137
|
+
|
138
|
+
Inserts n elements from array els at index i.
|
139
|
+
Returns a pointer-iterator to the first new element, or NULL in the case of memory allocation failure.
|
140
|
+
|
141
|
+
el_ty *erase( vec( el_ty ) *cntr, size_t i )
|
142
|
+
|
143
|
+
Erases the element at index i, calling the element type's destructor if it exists.
|
144
|
+
Returns a pointer-iterator to the element after the erased element, or an end pointer-iterator if there
|
145
|
+
is no subsequent element.
|
146
|
+
|
147
|
+
el_ty *erase_n( vec( el_ty ) *cntr, size_t i, size_t n )
|
148
|
+
|
149
|
+
Erases n elements beginning at index i, calling the element type's destructor, if it exists, for each
|
150
|
+
erased element.
|
151
|
+
Returns a pointer-iterator to the element after the erased elements, or an end pointer-iterator if there is no
|
152
|
+
subsequent element.
|
153
|
+
|
154
|
+
el_ty *end( vec( el_ty ) *cntr )
|
155
|
+
|
156
|
+
Returns an end pointer-iterator.
|
157
|
+
This call is synonymous with get( cntr, size( cntr ) ).
|
158
|
+
|
159
|
+
el_ty *first( vec( el_ty ) *cntr )
|
160
|
+
|
161
|
+
Returns an pointer-iterator to the first element, or an end pointer-iterator if the vector is empty.
|
162
|
+
This call is synonymous with get( cntr, 0 ).
|
163
|
+
|
164
|
+
el_ty *last( vec( el_ty ) *cntr )
|
165
|
+
|
166
|
+
Returns a pointer-iterator to the last element.
|
167
|
+
This call is synonymous with get( cntr, size( cntr ) - 1 ).
|
168
|
+
It assumes that at the vector is not empty.
|
169
|
+
|
170
|
+
el_ty *next( vec( el_ty ) *cntr, el_ty *i )
|
171
|
+
|
172
|
+
Returns a pointer-iterator to the element after the element pointed to by i, or an end pointer-iterator if i
|
173
|
+
points to the last element.
|
174
|
+
|
175
|
+
Notes:
|
176
|
+
- Vector pointer-iterators (including end) are invalidated by any API calls that cause memory reallocation.
|
177
|
+
|
178
|
+
List (a doubly linked list with sentinels):
|
179
|
+
|
180
|
+
list( el_ty ) cntr
|
181
|
+
|
182
|
+
Declares an uninitialized list named cntr.
|
183
|
+
|
184
|
+
el_ty *insert( list( el_ty ) *cntr, el_ty *i, el_ty el )
|
185
|
+
|
186
|
+
Inserts el before pointer-iterator i.
|
187
|
+
Returns a pointer-iterator to the new element, or NULL in the case of memory allocation failure.
|
188
|
+
|
189
|
+
el_ty *push( list( el_ty ) *cntr, el_ty el )
|
190
|
+
|
191
|
+
Inserts el at the end of the list.
|
192
|
+
Returns a pointer-iterator to the new element, or NULL in the case of memory allocation failure.
|
193
|
+
This call is synonymous with insert( cntr, end( cntr ), el ).
|
194
|
+
|
195
|
+
el_ty *erase( list( el_ty ) *cntr, el_ty *i )
|
196
|
+
|
197
|
+
Erases element pointed to by pointer-iterator i, calling the element type's destructor if it exists.
|
198
|
+
Returns a pointer-iterator to the element after i, or an end pointer-iterator if i was the last element.
|
199
|
+
|
200
|
+
bool splice( list( el_ty ) *cntr, el_ty *i, list( el_ty ) src, el_ty *src_i )
|
201
|
+
|
202
|
+
Removes element pointed to by pointer-iterator src_i from src and inserts it before the element pointed to by
|
203
|
+
pointer-iterator i in cntr.
|
204
|
+
Returns true, or false if unsuccessful.
|
205
|
+
This call only allocates memory, and therefore can only fail, if the list has not had any element inserted,
|
206
|
+
pushed, or spliced into it since it was initialized.
|
207
|
+
|
208
|
+
el_ty *first( list( el_ty ) *cntr )
|
209
|
+
|
210
|
+
Returns a pointer-iterator to the first element, or an end pointer-iterator if the list is empty.
|
211
|
+
|
212
|
+
el_ty *last( list( el_ty ) *cntr )
|
213
|
+
|
214
|
+
Returns a pointer-iterator to the last element, or an r_end pointer-iterator if the list is empty.
|
215
|
+
|
216
|
+
el_ty *r_end( list( el_ty ) *cntr )
|
217
|
+
|
218
|
+
Returns an r_end (reverse end) pointer-iterator for the list. r_end acts as a sentinel node.
|
219
|
+
|
220
|
+
el_ty *end( list( el_ty ) *cntr )
|
221
|
+
|
222
|
+
Returns an end pointer-iterator for the list. End acts as a sentinel node.
|
223
|
+
|
224
|
+
el_ty *next( list( el_ty ) *cntr, el_ty *i )
|
225
|
+
|
226
|
+
Returns a pointer-iterator to the element after the one pointed to by i.
|
227
|
+
If i points to the last element, the return value is an end pointer-iterator.
|
228
|
+
If i points to r_end, the return value is a pointer-iterator to the first element, or an end pointer-iterator if
|
229
|
+
the list is empty.
|
230
|
+
|
231
|
+
el_ty *prev( list( el_ty ) *cntr, el_ty *i )
|
232
|
+
|
233
|
+
Returns a pointer-iterator to the element before the one pointed to by i.
|
234
|
+
If i points to the first element, the return value is an r_end pointer-iterator.
|
235
|
+
If i points to end, then the return value is a pointer-iterator to the last element, or an r_end pointer-iterator
|
236
|
+
if the list is empty.
|
237
|
+
|
238
|
+
r_for_each( list( el_ty ) *cntr, i_name )
|
239
|
+
|
240
|
+
Creates a loop iterating over all elements from last to first.
|
241
|
+
This macro declares an el_ty * pointer-iterator named i_name.
|
242
|
+
It is equivalent to
|
243
|
+
for( el_ty *i_name = last( cntr ); i_name != r_end( cntr ); i_name = prev( cntr, i_name ) )
|
244
|
+
and should be followed by the body of the loop.
|
245
|
+
|
246
|
+
Notes:
|
247
|
+
- List pointer-iterators (including r_end and end) are not invalidated by any API calls besides init and cleanup,
|
248
|
+
unless they point to erased elements.
|
249
|
+
|
250
|
+
Map (an unordered container associating elements with keys, implemented as a Robin Hood hash table):
|
251
|
+
|
252
|
+
map( key_ty, el_ty ) cntr
|
253
|
+
|
254
|
+
Declares an uninitialized map named cntr.
|
255
|
+
key_ty must be a type, or alias for a type, for which comparison and hash functions have been defined.
|
256
|
+
This requirement is enforced internally such that neglecting it causes a compiler error.
|
257
|
+
For types with in-built comparison and hash functions, and for details on how to declare new comparison and hash
|
258
|
+
functions, see "Destructor, comparison, and hash functions and custom max load factors" below.
|
259
|
+
|
260
|
+
size_t cap( map( key_ty, el_ty ) *cntr )
|
261
|
+
|
262
|
+
Returns the current capacity, i.e. bucket count.
|
263
|
+
Note that the number of elements a map can support without rehashing is not its capacity but its capacity
|
264
|
+
multiplied by the max load factor associated with its key type.
|
265
|
+
|
266
|
+
bool reserve( map( key_ty, el_ty ) *cntr, size_t n )
|
267
|
+
|
268
|
+
Ensures that the capacity is large enough to support n elements without rehashing.
|
269
|
+
Returns true, or false if unsuccessful due to memory allocation failure.
|
270
|
+
|
271
|
+
bool shrink( map( key_ty, el_ty ) *cntr )
|
272
|
+
|
273
|
+
Shrinks the capacity to best accommodate the current size.
|
274
|
+
Returns true, or false if unsuccessful due to memory allocation failure.
|
275
|
+
|
276
|
+
el_ty *insert( map( key_ty, el_ty ) *cntr, key_ty key, el_ty el )
|
277
|
+
|
278
|
+
Inserts element el with the specified key.
|
279
|
+
If an element with the same key already exists, the existing element is replaced.
|
280
|
+
Returns a pointer-iterator to the new element, or NULL in the case of memory allocation failure.
|
281
|
+
If adding one element would violate the map's max load factor, failure can occur even if it already contains the
|
282
|
+
key.
|
283
|
+
|
284
|
+
el_ty *get( map( key_ty, el_ty ) *cntr, key_ty key )
|
285
|
+
|
286
|
+
Returns a pointer-iterator to the element with the specified key, or NULL if no such element exists.
|
287
|
+
|
288
|
+
el_ty *get_or_insert( map( key_ty, el_ty ) *cntr, key_ty key, el_ty el )
|
289
|
+
|
290
|
+
Inserts element el if no element with the specified key already exist.
|
291
|
+
Returns a pointer-iterator to the new element if it was inserted, or a pointer-iterator to the existing
|
292
|
+
element with the same key, or NULL in the case of memory allocation failure.
|
293
|
+
If adding one element would violate the map's max load factor, failure can occur even if it already contains the
|
294
|
+
key.
|
295
|
+
Determine whether an element was inserted by comparing the map's size before and after the call.
|
296
|
+
|
297
|
+
const key_ty *key_for( map( key_ty, el_ty ) *cntr, el_ty *i )
|
298
|
+
|
299
|
+
Returns a const pointer to the key for the element pointed to by pointer-iterator i.
|
300
|
+
|
301
|
+
bool erase( map( key_ty, el_ty ) *cntr, key_ty key )
|
302
|
+
|
303
|
+
Erases the element with the specified key, if it exists.
|
304
|
+
Returns true if an element was erased, or false if no such element exists.
|
305
|
+
|
306
|
+
void erase_itr( map( key_ty, el_ty ) *cntr, el_ty *i )
|
307
|
+
|
308
|
+
Erases the element pointed to by pointer-iterator i.
|
309
|
+
|
310
|
+
el_ty *first( map( key_ty, el_ty ) *cntr )
|
311
|
+
|
312
|
+
Returns a pointer-iterator to the first element, or an end pointer-iterator if the map is empty.
|
313
|
+
|
314
|
+
el_ty *last( map( key_ty, el_ty ) *cntr )
|
315
|
+
|
316
|
+
Returns a pointer-iterator to the last element, or an r_end pointer-iterator if the map is empty.
|
317
|
+
|
318
|
+
el_ty *r_end( map( key_ty, el_ty ) *cntr )
|
319
|
+
|
320
|
+
Returns an r_end (reverse end) pointer-iterator for the map.
|
321
|
+
|
322
|
+
el_ty *end( map( key_ty, el_ty ) *cntr )
|
323
|
+
|
324
|
+
Returns an end pointer-iterator for the map.
|
325
|
+
|
326
|
+
el_ty *next( map( key_ty, el_ty ) *cntr, el_ty *i )
|
327
|
+
|
328
|
+
Returns a pointer-iterator to the element after the one pointed to by i.
|
329
|
+
If i points to the last element, the value returned is an end pointer-iterator.
|
330
|
+
If i points to r_end, the value returned points to the first element, or is an end pointer-iterator if the map is
|
331
|
+
empty.
|
332
|
+
|
333
|
+
el_ty *prev( map( key_ty, el_ty ) *cntr, el_ty *i )
|
334
|
+
|
335
|
+
Returns a pointer-iterator to the element before the one pointed to by i.
|
336
|
+
If i points to the first element, the value returned is an r_end pointer-iterator.
|
337
|
+
If i points to end, then the value returned points to the last element, or is an r_end pointer-iterator if the map
|
338
|
+
is empty.
|
339
|
+
|
340
|
+
for_each( map( key_ty, el_ty ) *cntr, key_ptr_name, i_name )
|
341
|
+
|
342
|
+
Creates a loop iterating over all elements from first to last, with easy access to the corresponding keys.
|
343
|
+
This macro declares a pointer to the key (const key_ty *) named key_ptr_name and a pointer-iterator (el_ty *)
|
344
|
+
named i_name.
|
345
|
+
It should be followed by the body of the loop.
|
346
|
+
|
347
|
+
r_for_each( map( key_ty, el_ty ) *cntr, i_name )
|
348
|
+
|
349
|
+
Creates a loop iterating over all elements from last to first.
|
350
|
+
This macro declares an el_ty * pointer-iterator named i_name.
|
351
|
+
It is equivalent to
|
352
|
+
for( el_ty *i_name = last( cntr ); i_name != r_end( cntr ); i_name = prev( cntr, i_name ) )
|
353
|
+
and should be followed by the body of the loop.
|
354
|
+
|
355
|
+
r_for_each( map( key_ty, el_ty ) *cntr, key_ptr_name, i_name )
|
356
|
+
|
357
|
+
Creates a loop iterating over all elements from last to first, with easy access to the corresponding keys.
|
358
|
+
This macro declares a pointer to the key (const key_ty *) named key_ptr_name and a pointer-iterator (el_ty *)
|
359
|
+
named i_name.
|
360
|
+
It should be followed by the body of the loop.
|
361
|
+
|
362
|
+
Notes:
|
363
|
+
- Map pointer-iterators (including r_end and end) may be invalidated by any API calls that cause memory
|
364
|
+
reallocation.
|
365
|
+
|
366
|
+
Set (Robin Hood hash table for elements without a separate key):
|
367
|
+
|
368
|
+
set( el_ty ) cntr
|
369
|
+
|
370
|
+
Declares an uninitialized set named cntr.
|
371
|
+
el_ty must be a type, or alias for a type, for which comparison and hash functions have been defined.
|
372
|
+
This requirement is enforced internally such that neglecting it causes a compiler error.
|
373
|
+
For types with in-built comparison and hash functions, and for details on how to declare new comparison and hash
|
374
|
+
functions, see "Destructor, comparison, and hash functions and custom max load factors" below.
|
375
|
+
|
376
|
+
size_t cap( set( el_ty ) *cntr )
|
377
|
+
|
378
|
+
Returns the current capacity, i.e. bucket count.
|
379
|
+
Note that the number of elements a set can support without rehashing is not its capacity but its capacity
|
380
|
+
multiplied by the max load factor associated with its key type.
|
381
|
+
|
382
|
+
bool reserve( set( el_ty ) *cntr, size_t n )
|
383
|
+
|
384
|
+
Ensures that the capacity is large enough to support n elements without rehashing.
|
385
|
+
Returns true, or false if unsuccessful due to memory allocation failure.
|
386
|
+
|
387
|
+
bool shrink( set( el_ty ) *cntr )
|
388
|
+
|
389
|
+
Shrinks the capacity to best accommodate the current size.
|
390
|
+
Returns true, or false if unsuccessful due to memory allocation failure.
|
391
|
+
|
392
|
+
el_ty *insert( set( el_ty ) *cntr, el_ty el )
|
393
|
+
|
394
|
+
Inserts element el.
|
395
|
+
If the element already exists, the existing element is replaced.
|
396
|
+
Returns a pointer-iterator to the new element, or NULL in the case of memory allocation failure.
|
397
|
+
Note that if adding one element would violate the set's max load factor, failure can occur even if it already
|
398
|
+
contains el.
|
399
|
+
|
400
|
+
el_ty *get( set( el_ty ) *cntr, el_ty el )
|
401
|
+
|
402
|
+
Returns a pointer-iterator to element el, or NULL if no such element exists.
|
403
|
+
|
404
|
+
el_ty *get_or_insert( set( el_ty ) *cntr, el_ty el )
|
405
|
+
|
406
|
+
Inserts element el if it does not already exist.
|
407
|
+
Returns a pointer-iterator to the new element if it was inserted, or a pointer-iterator to the existing element,
|
408
|
+
or NULL in the case of memory allocation failure.
|
409
|
+
If adding one element would violate the set's max load factor, failure can occur even if it already contains the
|
410
|
+
element.
|
411
|
+
Determine whether an element was inserted by comparing the set's size before and after the call.
|
412
|
+
|
413
|
+
bool erase( set( el_ty ) *cntr, el_ty el )
|
414
|
+
|
415
|
+
Erases the element el, if it exists.
|
416
|
+
Returns true if an element was erased, or false if no such element exists.
|
417
|
+
|
418
|
+
el_ty *first( set( el_ty ) *cntr )
|
419
|
+
|
420
|
+
Returns a pointer-iterator to the first element, or an end pointer-iterator if the set is empty.
|
421
|
+
|
422
|
+
el_ty *last( set( el_ty ) *cntr )
|
423
|
+
|
424
|
+
Returns a pointer-iterator to the last element, or an r_end pointer-iterator if the set is empty.
|
425
|
+
|
426
|
+
el_ty *r_end( set( el_ty ) *cntr )
|
427
|
+
|
428
|
+
Returns an r_end (reverse end) pointer-iterator for the set.
|
429
|
+
|
430
|
+
el_ty *end( set( el_ty ) *cntr )
|
431
|
+
|
432
|
+
Returns an end pointer-iterator for the set.
|
433
|
+
|
434
|
+
el_ty *next( set( el_ty ) *cntr, el_ty *i )
|
435
|
+
|
436
|
+
Returns a pointer-iterator to the element after the one pointed to by i.
|
437
|
+
If i points to the last element, the pointer-iterator returned is an end pointer-iterator.
|
438
|
+
If i points to r_end, then the pointer-iterator returned points to the first element, or is an end
|
439
|
+
pointer-iterator if the set is empty.
|
440
|
+
|
441
|
+
el_ty *prev( set( el_ty ) *cntr, el_ty *i )
|
442
|
+
|
443
|
+
Returns a pointer-iterator to the element before the one pointed to by i.
|
444
|
+
If i points to the first element, the return value is an r_end pointer-iterator.
|
445
|
+
If i points to end, then the pointer-iterator returned points to the last element, or is an r_end pointer-iterator
|
446
|
+
if the set is empty.
|
447
|
+
|
448
|
+
r_for_each( set( el_ty ) *cntr, i_name )
|
449
|
+
|
450
|
+
Creates a loop iterating over all elements from last to first.
|
451
|
+
This macro declares an el_ty * pointer-iterator named i_name.
|
452
|
+
It is equivalent to
|
453
|
+
for( el_ty *i_name = last( cntr ); i_name != r_end( cntr ); i_name = prev( cntr, i_name ) )
|
454
|
+
and should be followed by the body of the loop.
|
455
|
+
|
456
|
+
Notes:
|
457
|
+
- Set pointer-iterators (including r_end and end) may be invalidated by any API calls that cause memory
|
458
|
+
reallocation.
|
459
|
+
|
460
|
+
Destructor, comparison, and hash functions and custom max load factors:
|
461
|
+
|
462
|
+
This part of the API allows the user to define custom destructor, comparison, and hash functions and max load
|
463
|
+
factors for a type.
|
464
|
+
Once these functions are defined, any container using that type for its elements or keys will call them
|
465
|
+
automatically.
|
466
|
+
Once the max load factor is defined, any map using the type for its key and any set using the type for its elements
|
467
|
+
will use the defined load factor to determine when rehashing is necessary.
|
468
|
+
|
469
|
+
#define CC_DTOR ty, { function body }
|
470
|
+
#include "cc.h"
|
471
|
+
|
472
|
+
Defines a destructor for type ty.
|
473
|
+
The signature of the function is void ( ty val ).
|
474
|
+
|
475
|
+
#define CC_CMPR ty, { function body }
|
476
|
+
#include "cc.h"
|
477
|
+
|
478
|
+
Defines a comparison function for type ty.
|
479
|
+
The signature of the function is int ( ty val_1, ty val_2 ).
|
480
|
+
The function should return 0 if val_1 and val_2 are equal, a negative integer if val_1 is less than val_2, and a
|
481
|
+
positive integer if val_1 is more than val_2.
|
482
|
+
|
483
|
+
#define CC_HASH ty, { function body }
|
484
|
+
#include "cc.h"
|
485
|
+
|
486
|
+
Defines a hash function for type ty.
|
487
|
+
The signature of the function is size_t ( ty val ).
|
488
|
+
The function should return the hash of val.
|
489
|
+
|
490
|
+
#define CC_LOAD ty, max_load_factor
|
491
|
+
|
492
|
+
Defines the max load factor for type ty.
|
493
|
+
max_load_factor should be a float or double between 0.0 and 1.0.
|
494
|
+
The default max load factor is 0.8.
|
495
|
+
|
496
|
+
Trivial example:
|
497
|
+
|
498
|
+
typedef struct { int x; } my_type;
|
499
|
+
#define CC_DTOR my_type, { printf( "!%d\n", val.x ); }
|
500
|
+
#define CC_CMPR my_type, { return ( val_1.x > val_2.x ) - ( val_1.x < val_2.x ); }
|
501
|
+
#define CC_HASH my_type, { return val.x * 2654435761ull; }
|
502
|
+
#define CC_LOAD my_type, 0.5
|
503
|
+
#include "cc.h"
|
504
|
+
|
505
|
+
Notes:
|
506
|
+
- These functions are inline and have static scope, so you need to either redefine them in each translation unit
|
507
|
+
from which they should be called or (preferably) define them in a shared header. For structs or unions, a sensible
|
508
|
+
place to define them would be immediately after the definition of the struct or union.
|
509
|
+
- Only one destructor, comparison, or hash function or max load factor should be defined by the user for each type.
|
510
|
+
- #including cc.h in these cases does not #include the full header, so you still need to #include it separately
|
511
|
+
at the top of your files.
|
512
|
+
- In-built comparison and hash functions are already defined for the following types: char, unsigned char, signed
|
513
|
+
char, unsigned short, short, unsigned int, int, unsigned long, long, unsigned long long, long long, size_t, and
|
514
|
+
char * (a NULL-terminated string). Defining a comparsion or hash function for one of these types will overwrite
|
515
|
+
the in-built function.
|
516
|
+
|
517
|
+
Version history:
|
518
|
+
|
519
|
+
27/01/2023 1.0.1: Minor corrections to code comments.
|
520
|
+
26/12/2022 1.0.0: Initial release.
|
521
|
+
|
522
|
+
License (MIT):
|
523
|
+
|
524
|
+
Copyright (c) 2022-2023 Jackson L. Allan
|
525
|
+
|
526
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
527
|
+
of this software and associated documentation files (the "Software"), to deal
|
528
|
+
in the Software without restriction, including without limitation the rights
|
529
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
530
|
+
copies of the Software, and to permit persons to whom the Software is
|
531
|
+
furnished to do so, subject to the following conditions:
|
532
|
+
|
533
|
+
The above copyright notice and this permission notice shall be included in all
|
534
|
+
copies or substantial portions of the Software.
|
535
|
+
|
536
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
537
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
538
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
539
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
540
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
541
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
542
|
+
SOFTWARE.
|
543
|
+
*/
|
544
|
+
|
545
|
+
#if !defined( CC_DTOR ) && !defined( CC_CMPR ) && !defined( CC_HASH )/*-----------------------------------------------*/
|
546
|
+
/* */
|
547
|
+
/* REGULAR HEADER MODE */
|
548
|
+
/* */
|
549
|
+
/*--------------------------------------------------------------------------------------------------------------------*/
|
550
|
+
|
551
|
+
#ifndef CC_NO_SHORT_NAMES
|
552
|
+
#define vec( ... ) cc_vec( __VA_ARGS__ )
|
553
|
+
#define list( ... ) cc_list( __VA_ARGS__ )
|
554
|
+
#define map( ... ) cc_map( __VA_ARGS__ )
|
555
|
+
#define set( ... ) cc_set( __VA_ARGS__ )
|
556
|
+
#define init( ... ) cc_init( __VA_ARGS__ )
|
557
|
+
#define init_clone( ... ) cc_init_clone( __VA_ARGS__ )
|
558
|
+
#define size( ... ) cc_size( __VA_ARGS__ )
|
559
|
+
#define cap( ... ) cc_cap( __VA_ARGS__ )
|
560
|
+
#define reserve( ... ) cc_reserve( __VA_ARGS__ )
|
561
|
+
#define resize( ... ) cc_resize( __VA_ARGS__ )
|
562
|
+
#define shrink( ... ) cc_shrink( __VA_ARGS__ )
|
563
|
+
#define insert( ... ) cc_insert( __VA_ARGS__ )
|
564
|
+
#define insert_n( ... ) cc_insert_n( __VA_ARGS__ )
|
565
|
+
#define get_or_insert( ... ) cc_get_or_insert( __VA_ARGS__ )
|
566
|
+
#define push( ... ) cc_push( __VA_ARGS__ )
|
567
|
+
#define push_n( ... ) cc_push_n( __VA_ARGS__ )
|
568
|
+
#define splice( ... ) cc_splice( __VA_ARGS__ )
|
569
|
+
#define get( ... ) cc_get( __VA_ARGS__ )
|
570
|
+
#define key_for( ... ) cc_key_for( __VA_ARGS__ )
|
571
|
+
#define erase( ... ) cc_erase( __VA_ARGS__ )
|
572
|
+
#define erase_n( ... ) cc_erase_n( __VA_ARGS__ )
|
573
|
+
#define erase_itr( ... ) cc_erase_itr( __VA_ARGS__ )
|
574
|
+
#define clear( ... ) cc_clear( __VA_ARGS__ )
|
575
|
+
#define cleanup( ... ) cc_cleanup( __VA_ARGS__ )
|
576
|
+
#define first( ... ) cc_first( __VA_ARGS__ )
|
577
|
+
#define last( ... ) cc_last( __VA_ARGS__ )
|
578
|
+
#define r_end( ... ) cc_r_end( __VA_ARGS__ )
|
579
|
+
#define end( ... ) cc_end( __VA_ARGS__ )
|
580
|
+
#define next( ... ) cc_next( __VA_ARGS__ )
|
581
|
+
#define prev( ... ) cc_prev( __VA_ARGS__ )
|
582
|
+
#define for_each( ... ) cc_for_each( __VA_ARGS__ )
|
583
|
+
#define r_for_each( ... ) cc_r_for_each( __VA_ARGS__ )
|
584
|
+
#endif
|
585
|
+
|
586
|
+
#ifndef CC_H
|
587
|
+
#define CC_H
|
588
|
+
|
589
|
+
#include <stdalign.h>
|
590
|
+
#include <stdbool.h>
|
591
|
+
#include <stddef.h>
|
592
|
+
#include <stdlib.h>
|
593
|
+
#include <string.h>
|
594
|
+
|
595
|
+
#ifdef __cplusplus
|
596
|
+
#include <type_traits>
|
597
|
+
#endif
|
598
|
+
|
599
|
+
/*--------------------------------------------------------------------------------------------------------------------*/
|
600
|
+
/* Preliminary */
|
601
|
+
/*--------------------------------------------------------------------------------------------------------------------*/
|
602
|
+
|
603
|
+
// _Static_assert alternative that can be used inside an expression.
|
604
|
+
#define CC_STATIC_ASSERT( xp ) (void)sizeof( char[ (xp) ? 1 : -1 ] )
|
605
|
+
|
606
|
+
// In GCC and Clang, we can generate a warning if the user passes an expression that may have side effects as the first
|
607
|
+
// argument of API macros.
|
608
|
+
// If the expression could have side effects, the compiler will not able able to resolve a comparision of it with itself
|
609
|
+
// at compile time, which we can check using __builtin_constant_p.
|
610
|
+
// The warning itself is generated via a division by zero.
|
611
|
+
// This macro may produce false positives (e.g. for &my_containers[ my_func() ] where my_func is a pure function that
|
612
|
+
// always returns the same value), but that is a reasonable price to pay for more macro safety.
|
613
|
+
|
614
|
+
#ifdef __GNUC__
|
615
|
+
|
616
|
+
#define CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ) \
|
617
|
+
(void)( \
|
618
|
+
"WARNING: CONTAINER ARGUMENT MAY HAVE DUPLICATED SIDE EFFECTS" &&1/__builtin_constant_p((cntr)==(cntr)) \
|
619
|
+
) \
|
620
|
+
|
621
|
+
#else
|
622
|
+
#define CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ) (void)0
|
623
|
+
#endif
|
624
|
+
|
625
|
+
// CC_SELECT_ON_NUM_ARGS macro for overloading API macros based on number of arguments.
|
626
|
+
#define CC_CAT_( a, b ) a##b
|
627
|
+
#define CC_CAT( a, b ) CC_CAT_( a, b )
|
628
|
+
#define CC_N_ARGS_( _1, _2, _3, _4, _5, _6, n, ... ) n
|
629
|
+
#define CC_N_ARGS( ... ) CC_N_ARGS_( __VA_ARGS__, _6, _5, _4, _3, _2, _1, x )
|
630
|
+
#define CC_SELECT_ON_NUM_ARGS( func, ... ) CC_CAT( func, CC_N_ARGS( __VA_ARGS__ ) )( __VA_ARGS__ )
|
631
|
+
|
632
|
+
// Typeof for expressions and abstract declarations.
|
633
|
+
#ifdef __cplusplus
|
634
|
+
#define CC_TYPEOF_XP( xp ) std::decay<std::remove_reference<decltype( xp )>::type>::type
|
635
|
+
#define CC_TYPEOF_TY( ty ) std::decay<std::remove_reference<decltype( std::declval<ty>() )>::type>::type
|
636
|
+
#else
|
637
|
+
// TODO: Add C23 check once C23 is supported by major compilers.
|
638
|
+
#define CC_TYPEOF_XP( xp ) __typeof__( xp )
|
639
|
+
#define CC_TYPEOF_TY( ty ) __typeof__( ty )
|
640
|
+
#endif
|
641
|
+
|
642
|
+
// Macro used with CC_STATIC_ASSERT to provide type safety in cc_init_clone and cc_splice calls.
|
643
|
+
#ifdef __cplusplus
|
644
|
+
#define CC_IS_SAME_TY( a, b ) std::is_same<CC_TYPEOF_XP( a ), CC_TYPEOF_XP( b )>::value
|
645
|
+
#else
|
646
|
+
#define CC_IS_SAME_TY( a, b ) _Generic( (a), CC_TYPEOF_XP( b ): true, default: false )
|
647
|
+
#endif
|
648
|
+
|
649
|
+
// Macro used primarily for silencing unused-expression warnings for macros that return cast pointers.
|
650
|
+
// This issued seems to affect Clang in particular.
|
651
|
+
// GCC, on the other hand, seems to accept that pointer casts may be redundant.
|
652
|
+
#ifdef __cplusplus
|
653
|
+
template<typename ty_1, typename ty_2> ty_1 cc_maybe_unused( ty_2 xp ){ return (ty_1)xp; }
|
654
|
+
#define CC_CAST_MAYBE_UNUSED( ty, xp ) cc_maybe_unused<ty>( xp )
|
655
|
+
#else
|
656
|
+
#define CC_CAST_MAYBE_UNUSED( ty, xp ) ( ( ty ){ 0 } = ( (ty)( xp ) ) )
|
657
|
+
#endif
|
658
|
+
|
659
|
+
// Some functions that must return true/false must return the value in the form of a pointer.
|
660
|
+
// This is because they are paired in ternary expressions inside API macros with other functions for other containers
|
661
|
+
// that return a pointer (primarily cc_erase).
|
662
|
+
// While any suitably aligned pointer - e.g. the container handle - would do, we declare a global cc_dummy_true_ptr for
|
663
|
+
// the sake of code readability.
|
664
|
+
static max_align_t cc_dummy_true;
|
665
|
+
static void *cc_dummy_true_ptr = &cc_dummy_true;
|
666
|
+
|
667
|
+
// Container ids to identify container type at compile-time.
|
668
|
+
#define CC_VEC 1
|
669
|
+
#define CC_LIST 2
|
670
|
+
#define CC_MAP 3
|
671
|
+
#define CC_SET 4
|
672
|
+
|
673
|
+
// Produces underlying function pointer type for a given element/key type pair.
|
674
|
+
#define CC_MAKE_BASE_FNPTR_TY( el_ty, key_ty ) CC_TYPEOF_TY( CC_TYPEOF_TY( el_ty ) (*)( CC_TYPEOF_TY( key_ty )* ) )
|
675
|
+
|
676
|
+
// Produces container handle for a given element type, key type, and container id.
|
677
|
+
// In other words, this macro creates a pointer that carries all the information needed to identify and operate on a
|
678
|
+
// container at compile time: el_ty (*(*)[ cntr_id ])( key_ty * ).
|
679
|
+
// That is a pointer to an array of function pointers whose signature is el_ty ( key_ty * ).
|
680
|
+
#define CC_MAKE_CNTR_TY( el_ty, key_ty, cntr_id ) CC_TYPEOF_TY( CC_MAKE_BASE_FNPTR_TY( el_ty, key_ty )(*)[ cntr_id ] )
|
681
|
+
|
682
|
+
// Dummy type used as key type in containers that don't use keys.
|
683
|
+
// This greatly reduces the complexity of API macros.
|
684
|
+
typedef struct{ char nothing; } cc_dummy_ty;
|
685
|
+
|
686
|
+
// API macros for declaring containers.
|
687
|
+
|
688
|
+
#define cc_vec( el_ty ) CC_MAKE_CNTR_TY( el_ty, cc_dummy_ty, CC_VEC )
|
689
|
+
|
690
|
+
#define cc_list( el_ty ) CC_MAKE_CNTR_TY( el_ty, cc_dummy_ty, CC_LIST )
|
691
|
+
|
692
|
+
#define cc_map( key_ty, el_ty ) CC_MAKE_CNTR_TY( \
|
693
|
+
el_ty, \
|
694
|
+
key_ty, \
|
695
|
+
/* Compiler error if key type lacks compare and hash functions. */ \
|
696
|
+
CC_MAP * ( ( CC_HAS_CMPR( key_ty ) && CC_HAS_HASH( key_ty ) ) ? 1 : -1 ) \
|
697
|
+
) \
|
698
|
+
|
699
|
+
#define cc_set( el_ty ) CC_MAKE_CNTR_TY( \
|
700
|
+
/* As set simply wraps map, we use el_ty as both the element and key types. */ \
|
701
|
+
/* This allows minimal changes to map macros and functions to make sets work. */ \
|
702
|
+
el_ty, \
|
703
|
+
el_ty, \
|
704
|
+
/* Compiler error if el type lacks compare and hash functions. */ \
|
705
|
+
CC_SET * ( ( CC_HAS_CMPR( el_ty ) && CC_HAS_HASH( el_ty ) ) ? 1 : -1 ) \
|
706
|
+
) \
|
707
|
+
|
708
|
+
// Retrieves a container's id (CC_VEC, CC_LIST, etc.) from its handle.
|
709
|
+
#define CC_CNTR_ID( cntr ) ( sizeof( *cntr ) / sizeof( **cntr ) )
|
710
|
+
|
711
|
+
// Retrieves a container's element type from its handle.
|
712
|
+
#define CC_EL_TY( cntr ) CC_TYPEOF_XP( (**cntr)( NULL ) )
|
713
|
+
|
714
|
+
// CC_KEY_TY macros for retrieving a container's key type from its handle (i.e. from the argument of the base function
|
715
|
+
// pointer).
|
716
|
+
#ifdef __cplusplus // For C++, this can be done easily with the help of a template function.
|
717
|
+
|
718
|
+
template<typename el_ty, typename key_ty>
|
719
|
+
key_ty cc_key_ty( el_ty (*)( key_ty * ) )
|
720
|
+
{
|
721
|
+
key_ty dummy;
|
722
|
+
return dummy;
|
723
|
+
}
|
724
|
+
|
725
|
+
#define CC_KEY_TY( cntr ) CC_TYPEOF_XP( cc_key_ty( **cntr ) )
|
726
|
+
|
727
|
+
#else // For C, we need to use _Generic trickery to match the base function pointer type with a key type previously
|
728
|
+
// coupled with a compare function.
|
729
|
+
// Returns cc_dummy_t if no compare function for type has been defined.
|
730
|
+
|
731
|
+
#define CC_KEY_TY_SLOT( n, arg ) CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( arg ), cc_cmpr_##n##_ty ): ( cc_cmpr_##n##_ty ){ 0 },
|
732
|
+
#define CC_KEY_TY( cntr ) \
|
733
|
+
CC_TYPEOF_XP( \
|
734
|
+
_Generic( (**cntr), \
|
735
|
+
CC_FOR_EACH_CMPR( CC_KEY_TY_SLOT, cntr ) \
|
736
|
+
default: _Generic( (**cntr), \
|
737
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), char ): ( char ){ 0 }, \
|
738
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned char ): ( unsigned char ){ 0 }, \
|
739
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), signed char ): ( signed char ){ 0 }, \
|
740
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned short ): ( unsigned short ){ 0 }, \
|
741
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), short ): ( short ){ 0 }, \
|
742
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned int ): ( unsigned int ){ 0 }, \
|
743
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), int ): ( int ){ 0 }, \
|
744
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned long ): ( unsigned long ){ 0 }, \
|
745
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), long ): ( long ){ 0 }, \
|
746
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned long long ): ( unsigned long long ){ 0 }, \
|
747
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), long long ): ( long long ){ 0 }, \
|
748
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), cc_maybe_size_t ): ( size_t ){ 0 }, \
|
749
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), char * ): ( char * ){ 0 }, \
|
750
|
+
default: (cc_dummy_ty){ 0 } \
|
751
|
+
) \
|
752
|
+
) \
|
753
|
+
) \
|
754
|
+
|
755
|
+
#endif
|
756
|
+
|
757
|
+
// Macros to provide easy access to a container's element and key sizes.
|
758
|
+
#define CC_EL_SIZE( cntr ) sizeof( CC_EL_TY( cntr ) )
|
759
|
+
#define CC_KEY_SIZE( cntr ) sizeof( CC_KEY_TY( cntr ) )
|
760
|
+
|
761
|
+
// Macro to round up a number to a multiple of a factor.
|
762
|
+
// This is used to determine bucket size and offsets in sets and maps.
|
763
|
+
#define CC_ROUND_UP( n, factor ) ( ( ( (n) + (factor) - 1 ) / (factor) ) * (factor) )
|
764
|
+
|
765
|
+
// If the user has defined CC_REALLOC and CC_FREE, then CC_GET_REALLOC and CC_GET_FREE are replaced with those macros.
|
766
|
+
// Otherwise, they are replaced by realloc and free from the standard library.
|
767
|
+
#define CC_ARG_2_( _1, _2, ... ) _2
|
768
|
+
#define CC_ARG_2( ... ) CC_ARG_2_( __VA_ARGS__ )
|
769
|
+
#define CC_REALLOC_COMMA ,
|
770
|
+
#define CC_GET_REALLOC CC_ARG_2( CC_CAT( CC_REALLOC, _COMMA ) realloc, CC_REALLOC, )
|
771
|
+
#define CC_FREE_COMMA ,
|
772
|
+
#define CC_GET_FREE CC_ARG_2( CC_CAT( CC_FREE, _COMMA ) free, CC_FREE, )
|
773
|
+
|
774
|
+
// Default max load factor for maps and sets.
|
775
|
+
#define CC_DEFAULT_LOAD 0.8
|
776
|
+
|
777
|
+
// Swaps a block of memory (used for Robin-Hooding in maps and sets).
|
778
|
+
static inline void cc_memswap( void *a, void *b, size_t size )
|
779
|
+
{
|
780
|
+
for( size_t i = 0; i < size; ++i )
|
781
|
+
{
|
782
|
+
char temp = ( (char *)a )[ i ];
|
783
|
+
( (char *)a )[ i ] = ( (char *)b )[ i ];
|
784
|
+
( (char *)b )[ i ] = temp;
|
785
|
+
}
|
786
|
+
}
|
787
|
+
|
788
|
+
// CC_MAKE_LVAL_COPY macro for making an addressable temporary copy of a variable or expression.
|
789
|
+
// The copy is valid until at least the end of full expression surrounding the macro call.
|
790
|
+
// In C, this is accomplished using a compound literal.
|
791
|
+
// In C++, we use rvalue reference magic.
|
792
|
+
// This is used, for example, to pass a pointer to an element (which the user may have provided as an rvalue) into an
|
793
|
+
// insert function.
|
794
|
+
#ifdef __cplusplus
|
795
|
+
template<class ty> ty& cc_unmove( ty&& var ) { return var; }
|
796
|
+
#define CC_MAKE_LVAL_COPY( ty, xp ) cc_unmove( ty( xp ) )
|
797
|
+
#else
|
798
|
+
#define CC_MAKE_LVAL_COPY( ty, xp ) *( ty[ 1 ] ){ xp }
|
799
|
+
#endif
|
800
|
+
|
801
|
+
// CC_IF_THEN_XP_ELSE_DUMMY macro for allowing an expression only if a condition is true.
|
802
|
+
// If not true, then the macro returns a dummy value of a specified type.
|
803
|
+
// This macros allows API macros to have arguments whose type depends on the container id where the different container
|
804
|
+
// types are incompatible.
|
805
|
+
// Without it, those macros could not compile.
|
806
|
+
// In other words, this macro achieves SFINAE-like functionality.
|
807
|
+
#ifdef __cplusplus
|
808
|
+
|
809
|
+
template<bool cond, class ty, class xp_ty, typename std::enable_if<cond, bool>::type = true>
|
810
|
+
xp_ty cc_if_then_xp_else_dummy( xp_ty xp ){ return xp; }
|
811
|
+
|
812
|
+
template<bool cond, class ty, class xp_ty, typename std::enable_if<!cond, bool>::type = true>
|
813
|
+
ty cc_if_then_xp_else_dummy( xp_ty xp ){ return ty(); }
|
814
|
+
|
815
|
+
#define CC_IF_THEN_XP_ELSE_DUMMY( cond, xp, ty ) ( cc_if_then_xp_else_dummy<cond, ty>( xp ) )
|
816
|
+
|
817
|
+
#else
|
818
|
+
|
819
|
+
#define CC_IF_THEN_XP_ELSE_DUMMY( cond, xp, dummy_type ) \
|
820
|
+
_Generic( (char (*)[ 1 + (bool)( cond ) ]){ 0 }, \
|
821
|
+
char (*)[ 1 ]: (dummy_type){ 0 }, \
|
822
|
+
char (*)[ 2 ]: xp \
|
823
|
+
) \
|
824
|
+
|
825
|
+
#endif
|
826
|
+
|
827
|
+
// CC_IF_THEN_PTR_TO_BOOL_ELSE_PTR macro for casting a pointer to bool only if a condition is true.
|
828
|
+
// It is necessary because some API macros (e.g. cc_erase) should return a bool to the user for certain containers and a
|
829
|
+
// pointer to an element for others.
|
830
|
+
// With this macro, all the corresponding internal functions can return pointers, and the pointer is cast to bool at the
|
831
|
+
// highest level if dictated by the container id.
|
832
|
+
#ifdef __cplusplus
|
833
|
+
|
834
|
+
template<bool cond, class ptr_ty, typename std::enable_if<cond, bool>::type = true> \
|
835
|
+
bool cc_if_then_ptr_to_bool_else_ptr( ptr_ty ptr ){ return ptr; } \
|
836
|
+
|
837
|
+
template<bool cond, class ptr_ty, typename std::enable_if<!cond, bool>::type = true> \
|
838
|
+
ptr_ty cc_if_then_ptr_to_bool_else_ptr( ptr_ty ptr ){ return ptr; } \
|
839
|
+
|
840
|
+
#define CC_IF_THEN_PTR_TO_BOOL_ELSE_PTR( cond, ptr ) \
|
841
|
+
cc_if_then_ptr_to_bool_else_ptr<cond>( ptr ) \
|
842
|
+
|
843
|
+
#else
|
844
|
+
|
845
|
+
#define CC_IF_THEN_PTR_TO_BOOL_ELSE_PTR( cond, ptr ) \
|
846
|
+
_Generic( (char (*)[ 1 + (bool)( cond ) ]){ 0 }, \
|
847
|
+
char (*)[ 1 ]: ptr, \
|
848
|
+
char (*)[ 2 ]: CC_CAST_MAYBE_UNUSED( bool, ptr ) \
|
849
|
+
) \
|
850
|
+
|
851
|
+
#endif
|
852
|
+
|
853
|
+
// Return type for all functions that could reallocate a container's memory.
|
854
|
+
// It contains a new container handle (the pointer may have changed to due reallocation) and an additional pointer whose
|
855
|
+
// purpose depends on the function.
|
856
|
+
// For functions that insert elements, that pointer points to the newly inserted elements, or NULL in the case of
|
857
|
+
// allocation failure.
|
858
|
+
// For other functions, that pointer is a dummy pointer that evaluates to true, or NULL in the case of allocation
|
859
|
+
// failure (i.e. its only purpose is to convey whether the operation was successful).
|
860
|
+
// The struct is aligned to max_align_t because the container handle must temporarily point to it (see
|
861
|
+
// CC_POINT_HNDL_TO_ALLOCING_FN_RESULT below).
|
862
|
+
typedef struct
|
863
|
+
{
|
864
|
+
alignas( max_align_t )
|
865
|
+
void *new_cntr;
|
866
|
+
void *other_ptr;
|
867
|
+
} cc_allocing_fn_result_ty;
|
868
|
+
|
869
|
+
// Helper function for one-line cc_allocing_fn_result_ty.
|
870
|
+
static inline cc_allocing_fn_result_ty cc_make_allocing_fn_result( void *new_cntr, void *other_ptr )
|
871
|
+
{
|
872
|
+
cc_allocing_fn_result_ty result;
|
873
|
+
result.new_cntr = new_cntr;
|
874
|
+
result.other_ptr = other_ptr;
|
875
|
+
return result;
|
876
|
+
}
|
877
|
+
|
878
|
+
// Performs memcpy and returns ptr.
|
879
|
+
// Used in conjunction with cc_allocing_fn_result_ty (see below).
|
880
|
+
static inline void *cc_memcpy_and_return_ptr( void *dest, void *src, size_t size, void *ptr )
|
881
|
+
{
|
882
|
+
memcpy( dest, src, size );
|
883
|
+
return ptr;
|
884
|
+
}
|
885
|
+
|
886
|
+
// All macros that call functions that could cause memory reallocation do two essential things to circumvent limitations
|
887
|
+
// of ISO C (such as the inability to declare variables in expressions and _Thread_local linking issues).
|
888
|
+
// Firstly, they temporarily set the container handle to point to a temporary cc_insert_result object returned by the
|
889
|
+
// call to the function (see CC_MAKE_LVAL_COPY above).
|
890
|
+
#define CC_POINT_HNDL_TO_ALLOCING_FN_RESULT( cntr, fn_call ) \
|
891
|
+
cntr = (CC_TYPEOF_XP( cntr ))&CC_MAKE_LVAL_COPY( cc_allocing_fn_result_ty, fn_call ) \
|
892
|
+
|
893
|
+
// Secondly, they call cc_memcpy_and_return_ptr to restore the correct container handle (a new pointer in the case of
|
894
|
+
// reallocation) and return a pointer, either to any new elements or to signify success/failure, to the user.
|
895
|
+
// (In the latter case, that pointer is cast to bool in the API macro before being passed to the user.)
|
896
|
+
// Without this call, we would be unable to access the new elements/success-or-failure pointer stored in the
|
897
|
+
// cc_allocing_fn_result_ty after restoring the correct container handle.
|
898
|
+
// Note that outside the function, a temporary container handle is created from the new handle in the
|
899
|
+
// cc_allocing_fn_result_ty so that the later (void *) is properly converted to the correct handle type.
|
900
|
+
// This new, correctly typed handle is then memcpy-ed over the user-supplied handle inside the function.
|
901
|
+
#define CC_FIX_HNDL_AND_RETURN_OTHER_PTR( cntr ) \
|
902
|
+
cc_memcpy_and_return_ptr( \
|
903
|
+
&cntr, \
|
904
|
+
&CC_MAKE_LVAL_COPY( CC_TYPEOF_XP( cntr ), ( (cc_allocing_fn_result_ty *)cntr )->new_cntr ), \
|
905
|
+
sizeof( cntr ), \
|
906
|
+
( (cc_allocing_fn_result_ty *)cntr )->other_ptr \
|
907
|
+
) \
|
908
|
+
|
909
|
+
/*--------------------------------------------------------------------------------------------------------------------*/
|
910
|
+
/* Vector */
|
911
|
+
/*--------------------------------------------------------------------------------------------------------------------*/
|
912
|
+
|
913
|
+
// Vector header.
|
914
|
+
typedef struct
|
915
|
+
{
|
916
|
+
alignas( max_align_t )
|
917
|
+
size_t size;
|
918
|
+
size_t cap;
|
919
|
+
} cc_vec_hdr_ty;
|
920
|
+
|
921
|
+
// Global placeholder for vector with no allocated storage.
|
922
|
+
// In the case of vectors, the placeholder allows us to avoid checking for a NULL container handle inside functions.
|
923
|
+
static const cc_vec_hdr_ty cc_vec_placeholder = { 0, 0 };
|
924
|
+
|
925
|
+
#define CC_VEC_INIT( cntr ) \
|
926
|
+
( \
|
927
|
+
cntr = (CC_TYPEOF_XP( cntr ))&cc_vec_placeholder, \
|
928
|
+
(void)0 \
|
929
|
+
) \
|
930
|
+
|
931
|
+
// Provides easy access to a vector's header.
|
932
|
+
static inline cc_vec_hdr_ty *cc_vec_hdr( void *cntr )
|
933
|
+
{
|
934
|
+
return (cc_vec_hdr_ty *)cntr;
|
935
|
+
}
|
936
|
+
|
937
|
+
static inline size_t cc_vec_size( void *cntr )
|
938
|
+
{
|
939
|
+
return cc_vec_hdr( cntr )->size;
|
940
|
+
}
|
941
|
+
|
942
|
+
static inline size_t cc_vec_cap( void *cntr )
|
943
|
+
{
|
944
|
+
return cc_vec_hdr( cntr )->cap;
|
945
|
+
}
|
946
|
+
|
947
|
+
static inline bool cc_vec_is_placeholder( void *cntr )
|
948
|
+
{
|
949
|
+
return cc_vec_hdr( cntr )->cap == 0;
|
950
|
+
}
|
951
|
+
|
952
|
+
// Returns a pointer-iterator to the element at index i.
|
953
|
+
static inline void *cc_vec_get( void *cntr, size_t i, size_t el_size )
|
954
|
+
{
|
955
|
+
return (char *)cntr + sizeof( cc_vec_hdr_ty ) + el_size * i;
|
956
|
+
}
|
957
|
+
|
958
|
+
#define CC_VEC_GET( cntr, i ) cc_vec_get( cntr, i, CC_EL_SIZE( cntr ) )
|
959
|
+
|
960
|
+
// Ensures that the capacity is large enough to support n elements without reallocation.
|
961
|
+
// Returns a cc_allocing_fn_result_ty containing the new handle and a pointer that evaluates to true if the operation was
|
962
|
+
// successful.
|
963
|
+
static inline cc_allocing_fn_result_ty cc_vec_reserve(
|
964
|
+
void *cntr,
|
965
|
+
size_t n,
|
966
|
+
size_t el_size,
|
967
|
+
void *(*realloc_)( void *, size_t )
|
968
|
+
)
|
969
|
+
{
|
970
|
+
if( cc_vec_cap( cntr ) >= n )
|
971
|
+
return cc_make_allocing_fn_result( cntr, cc_dummy_true_ptr );
|
972
|
+
|
973
|
+
bool is_placeholder = cc_vec_is_placeholder( cntr );
|
974
|
+
|
975
|
+
cc_vec_hdr_ty *new_cntr = (cc_vec_hdr_ty *)realloc_(
|
976
|
+
is_placeholder ? NULL : cntr,
|
977
|
+
sizeof( cc_vec_hdr_ty ) + el_size * n
|
978
|
+
);
|
979
|
+
|
980
|
+
if( !new_cntr )
|
981
|
+
return cc_make_allocing_fn_result( cntr, NULL );
|
982
|
+
|
983
|
+
if( is_placeholder )
|
984
|
+
new_cntr->size = 0;
|
985
|
+
|
986
|
+
new_cntr->cap = n;
|
987
|
+
return cc_make_allocing_fn_result( new_cntr, cc_dummy_true_ptr );
|
988
|
+
}
|
989
|
+
|
990
|
+
#define CC_VEC_RESERVE( cntr, n ) \
|
991
|
+
( \
|
992
|
+
CC_POINT_HNDL_TO_ALLOCING_FN_RESULT( \
|
993
|
+
cntr, \
|
994
|
+
cc_vec_reserve( cntr, n, CC_EL_SIZE( cntr ), CC_GET_REALLOC ) \
|
995
|
+
), \
|
996
|
+
(bool)CC_FIX_HNDL_AND_RETURN_OTHER_PTR( cntr ) \
|
997
|
+
) \
|
998
|
+
|
999
|
+
// Inserts elements at index i.
|
1000
|
+
// Returns a cc_allocing_fn_result_ty containing the new handle and a pointer-iterator to the newly inserted elements.
|
1001
|
+
// If the underlying storage needed to be expanded and an allocation failure occurred, or if n is zero, the latter
|
1002
|
+
// pointer will be NULL.
|
1003
|
+
static inline cc_allocing_fn_result_ty cc_vec_insert(
|
1004
|
+
void *cntr,
|
1005
|
+
size_t i,
|
1006
|
+
void *els,
|
1007
|
+
size_t n,
|
1008
|
+
size_t el_size,
|
1009
|
+
void *( *realloc_ )( void *, size_t )
|
1010
|
+
)
|
1011
|
+
{
|
1012
|
+
if( n == 0 )
|
1013
|
+
return cc_make_allocing_fn_result( cntr, NULL );
|
1014
|
+
|
1015
|
+
if( cc_vec_size( cntr ) + n > cc_vec_cap( cntr ) )
|
1016
|
+
{
|
1017
|
+
size_t cap = cc_vec_cap( cntr );
|
1018
|
+
if( !cap )
|
1019
|
+
cap = 2;
|
1020
|
+
|
1021
|
+
while( cap < cc_vec_size( cntr ) + n )
|
1022
|
+
cap *= 2;
|
1023
|
+
|
1024
|
+
cc_allocing_fn_result_ty result = cc_vec_reserve( cntr, cap, el_size, realloc_ );
|
1025
|
+
if( !result.other_ptr )
|
1026
|
+
return result;
|
1027
|
+
|
1028
|
+
cntr = result.new_cntr;
|
1029
|
+
}
|
1030
|
+
|
1031
|
+
void *new_els = cc_vec_get( cntr, i, el_size );
|
1032
|
+
memmove( cc_vec_get( cntr, i + n, el_size ), new_els, el_size * ( cc_vec_hdr( cntr )->size - i ) );
|
1033
|
+
memcpy( new_els, els, el_size * n );
|
1034
|
+
cc_vec_hdr( cntr )->size += n;
|
1035
|
+
|
1036
|
+
return cc_make_allocing_fn_result( cntr, new_els );
|
1037
|
+
}
|
1038
|
+
|
1039
|
+
#define CC_VEC_INSERT_N( cntr, i, els, n ) \
|
1040
|
+
( \
|
1041
|
+
CC_POINT_HNDL_TO_ALLOCING_FN_RESULT( \
|
1042
|
+
cntr, \
|
1043
|
+
cc_vec_insert( cntr, i, els, n, CC_EL_SIZE( cntr ), CC_GET_REALLOC ) \
|
1044
|
+
), \
|
1045
|
+
CC_FIX_HNDL_AND_RETURN_OTHER_PTR( cntr ) \
|
1046
|
+
) \
|
1047
|
+
|
1048
|
+
#define CC_VEC_INSERT( cntr, i, el ) CC_VEC_INSERT_N( cntr, i, &CC_MAKE_LVAL_COPY( CC_EL_TY( cntr ), el ), 1 )
|
1049
|
+
|
1050
|
+
#define CC_VEC_PUSH_N( cntr, els, n ) CC_VEC_INSERT_N( cntr, cc_vec_size( cntr ), els, n )
|
1051
|
+
|
1052
|
+
#define CC_VEC_PUSH( cntr, el ) CC_VEC_PUSH_N( cntr, &CC_MAKE_LVAL_COPY( CC_EL_TY( cntr ), el ), 1 )
|
1053
|
+
|
1054
|
+
// Erases n elements at index i.
|
1055
|
+
// Returns a pointer-iterator to the element after the erased elements, or an end pointer-iterator if there is no
|
1056
|
+
// subsequent element.
|
1057
|
+
static inline void *cc_vec_erase_n( void *cntr, size_t i, size_t n, size_t el_size, void ( *dtor )( void * ) )
|
1058
|
+
{
|
1059
|
+
if( n == 0 )
|
1060
|
+
return cc_vec_get( cntr, i, el_size );
|
1061
|
+
|
1062
|
+
if( dtor )
|
1063
|
+
for( size_t j = 0; j < n; ++j )
|
1064
|
+
dtor( cc_vec_get( cntr, i + j, el_size ) );
|
1065
|
+
|
1066
|
+
memmove(
|
1067
|
+
cc_vec_get( cntr, i, el_size ),
|
1068
|
+
cc_vec_get( cntr, i + n, el_size ),
|
1069
|
+
( cc_vec_hdr( cntr )->size - n - i ) * el_size
|
1070
|
+
);
|
1071
|
+
|
1072
|
+
cc_vec_hdr( cntr )->size -= n;
|
1073
|
+
return cc_vec_get( cntr, i, el_size );
|
1074
|
+
}
|
1075
|
+
|
1076
|
+
#define CC_VEC_ERASE_N( cntr, i, n ) cc_vec_erase_n( cntr, i, n, CC_EL_SIZE( cntr ), CC_EL_DTOR( cntr ) )
|
1077
|
+
|
1078
|
+
#define CC_VEC_ERASE( cntr, i ) CC_VEC_ERASE_N( cntr, i, 1 )
|
1079
|
+
|
1080
|
+
// Sets the number of elements in the vector.
|
1081
|
+
// If n is below the current size, then the destructor is called for all erased elements.
|
1082
|
+
// In this case, the vector's capacity is not changed.
|
1083
|
+
// If n is above the current size, the new elements are uninitialized.
|
1084
|
+
// Returns a cc_allocing_fn_result_ty containing new container handle and a pointer that evaluates to true if the
|
1085
|
+
// operation was successful and false in the case of allocation failure.
|
1086
|
+
static inline cc_allocing_fn_result_ty cc_vec_resize(
|
1087
|
+
void *cntr,
|
1088
|
+
size_t n,
|
1089
|
+
size_t el_size,
|
1090
|
+
void ( *dtor )( void * ),
|
1091
|
+
void *( *realloc_ )( void *, size_t )
|
1092
|
+
)
|
1093
|
+
{
|
1094
|
+
// No resize necessary (also handles placeholder).
|
1095
|
+
if( n == cc_vec_size( cntr ) )
|
1096
|
+
return cc_make_allocing_fn_result( cntr, cc_dummy_true_ptr );
|
1097
|
+
|
1098
|
+
// Downsizing.
|
1099
|
+
if( n < cc_vec_size( cntr ) )
|
1100
|
+
{
|
1101
|
+
cc_vec_erase_n( cntr, n, cc_vec_size( cntr ) - n, el_size, dtor );
|
1102
|
+
return cc_make_allocing_fn_result( cntr, cc_dummy_true_ptr );
|
1103
|
+
}
|
1104
|
+
|
1105
|
+
// Up-sizing.
|
1106
|
+
cc_allocing_fn_result_ty result = cc_vec_reserve( cntr, n, el_size, realloc_ );
|
1107
|
+
if( !result.other_ptr )
|
1108
|
+
return result;
|
1109
|
+
|
1110
|
+
cc_vec_hdr( result.new_cntr )->size = n;
|
1111
|
+
|
1112
|
+
return result;
|
1113
|
+
}
|
1114
|
+
|
1115
|
+
#define CC_VEC_RESIZE( cntr, n ) \
|
1116
|
+
( \
|
1117
|
+
CC_POINT_HNDL_TO_ALLOCING_FN_RESULT( \
|
1118
|
+
cntr, \
|
1119
|
+
cc_vec_resize( cntr, n, CC_EL_SIZE( cntr ), CC_EL_DTOR( cntr ), CC_GET_REALLOC ) \
|
1120
|
+
), \
|
1121
|
+
(bool)CC_FIX_HNDL_AND_RETURN_OTHER_PTR( cntr ) \
|
1122
|
+
) \
|
1123
|
+
|
1124
|
+
// Shrinks vector's capacity to its current size.
|
1125
|
+
// Returns a cc_allocing_fn_result_ty containing the new container handle and a pointer that evaluates to true if the
|
1126
|
+
// operation was successful and false in the case of allocation failure.
|
1127
|
+
static inline cc_allocing_fn_result_ty cc_vec_shrink(
|
1128
|
+
void *cntr,
|
1129
|
+
size_t el_size,
|
1130
|
+
void *( *realloc_ )( void *, size_t ),
|
1131
|
+
void ( *free_ )( void * )
|
1132
|
+
)
|
1133
|
+
{
|
1134
|
+
if( cc_vec_size( cntr ) == cc_vec_cap( cntr ) ) // Also handles placeholder.
|
1135
|
+
return cc_make_allocing_fn_result( cntr, cc_dummy_true_ptr );
|
1136
|
+
|
1137
|
+
if( cc_vec_size( cntr ) == 0 )
|
1138
|
+
{
|
1139
|
+
// Restore placeholder.
|
1140
|
+
free_( cntr );
|
1141
|
+
return cc_make_allocing_fn_result( (void *)&cc_vec_placeholder, cc_dummy_true_ptr );
|
1142
|
+
}
|
1143
|
+
|
1144
|
+
cc_vec_hdr_ty *new_cntr = (cc_vec_hdr_ty *)realloc_( cntr, sizeof( cc_vec_hdr_ty ) + el_size * cc_vec_size( cntr ) );
|
1145
|
+
if( !new_cntr )
|
1146
|
+
return cc_make_allocing_fn_result( cntr, NULL );
|
1147
|
+
|
1148
|
+
cc_vec_hdr( new_cntr )->cap = cc_vec_size( new_cntr );
|
1149
|
+
return cc_make_allocing_fn_result( new_cntr, cc_dummy_true_ptr );
|
1150
|
+
}
|
1151
|
+
|
1152
|
+
#define CC_VEC_SHRINK( cntr ) \
|
1153
|
+
( \
|
1154
|
+
CC_POINT_HNDL_TO_ALLOCING_FN_RESULT( \
|
1155
|
+
cntr, \
|
1156
|
+
cc_vec_shrink( cntr, CC_EL_SIZE( cntr ), CC_GET_REALLOC, CC_GET_FREE ) \
|
1157
|
+
), \
|
1158
|
+
(bool)CC_FIX_HNDL_AND_RETURN_OTHER_PTR( cntr ) \
|
1159
|
+
) \
|
1160
|
+
|
1161
|
+
// Initializes a shallow copy of the source vector.
|
1162
|
+
// The capacity of the new vector is the size of the source vector, not its capacity.
|
1163
|
+
// Returns a the pointer to the copy, or NULL in the case of allocation failure.
|
1164
|
+
// That return value is cast to bool in the corresponding macro.
|
1165
|
+
static inline void *cc_vec_init_clone( void *src, size_t el_size, void *( *realloc_ )( void *, size_t ) )
|
1166
|
+
{
|
1167
|
+
if( cc_vec_size( src ) == 0 )
|
1168
|
+
return (void *)&cc_vec_placeholder;
|
1169
|
+
|
1170
|
+
cc_allocing_fn_result_ty result = cc_vec_resize(
|
1171
|
+
(void *)&cc_vec_placeholder,
|
1172
|
+
cc_vec_size( src ),
|
1173
|
+
el_size,
|
1174
|
+
NULL, // dtor unused.
|
1175
|
+
realloc_
|
1176
|
+
);
|
1177
|
+
|
1178
|
+
if( !result.other_ptr )
|
1179
|
+
return NULL;
|
1180
|
+
|
1181
|
+
memcpy( cc_vec_get( result.new_cntr, 0, el_size ), cc_vec_get( src, 0, el_size ), el_size * cc_vec_size( src ) );
|
1182
|
+
return result.new_cntr;
|
1183
|
+
}
|
1184
|
+
|
1185
|
+
#define CC_VEC_INIT_CLONE( cntr, src ) \
|
1186
|
+
( cntr = (CC_TYPEOF_XP( cntr ))cc_vec_init_clone( src, CC_EL_SIZE( cntr ), CC_GET_REALLOC ) ) \
|
1187
|
+
|
1188
|
+
// Erases all elements, calling the destructors if necessary, without changing the vector's capacity.
|
1189
|
+
void cc_vec_clear( void *cntr, size_t el_size, void (*dtor)( void * ) )
|
1190
|
+
{
|
1191
|
+
cc_vec_erase_n( cntr, 0, cc_vec_size( cntr ), el_size, dtor );
|
1192
|
+
}
|
1193
|
+
|
1194
|
+
#define CC_VEC_CLEAR( cntr ) cc_vec_clear( cntr, CC_EL_SIZE( cntr ), CC_EL_DTOR( cntr ) )
|
1195
|
+
|
1196
|
+
// Clears the vector and frees its memory if it is not a placeholder.
|
1197
|
+
void cc_vec_cleanup( void *cntr, size_t el_size, void (*dtor)( void * ), void (*free_)( void * ) )
|
1198
|
+
{
|
1199
|
+
cc_vec_clear( cntr, el_size, dtor );
|
1200
|
+
|
1201
|
+
if( !cc_vec_is_placeholder( cntr ) )
|
1202
|
+
free_( cntr );
|
1203
|
+
}
|
1204
|
+
|
1205
|
+
#define CC_VEC_CLEANUP( cntr ) \
|
1206
|
+
( \
|
1207
|
+
cc_vec_cleanup( cntr, CC_EL_SIZE( cntr ), CC_EL_DTOR( cntr ), CC_GET_FREE ), \
|
1208
|
+
CC_VEC_INIT( cntr ) \
|
1209
|
+
) \
|
1210
|
+
|
1211
|
+
static inline void *cc_vec_first( void *cntr )
|
1212
|
+
{
|
1213
|
+
return (char *)cntr + sizeof( cc_vec_hdr_ty );
|
1214
|
+
}
|
1215
|
+
|
1216
|
+
static inline void *cc_vec_last( void *cntr, size_t el_size )
|
1217
|
+
{
|
1218
|
+
return cc_vec_get( cntr, cc_vec_size( cntr ) - 1, el_size );
|
1219
|
+
}
|
1220
|
+
|
1221
|
+
#define CC_VEC_LAST( cntr ) cc_vec_last( cntr, CC_EL_SIZE( cntr ) )
|
1222
|
+
|
1223
|
+
static inline void *cc_vec_end( void *cntr, size_t el_size )
|
1224
|
+
{
|
1225
|
+
return cc_vec_get( cntr, cc_vec_size( cntr ), el_size );
|
1226
|
+
}
|
1227
|
+
|
1228
|
+
#define CC_VEC_END( cntr ) cc_vec_end( cntr, CC_EL_SIZE( cntr ) )
|
1229
|
+
|
1230
|
+
static inline void *cc_vec_next( void *i, size_t el_size )
|
1231
|
+
{
|
1232
|
+
return (char *)i + el_size;
|
1233
|
+
}
|
1234
|
+
|
1235
|
+
#define CC_VEC_NEXT( cntr, i ) cc_vec_next( i, CC_EL_SIZE( cntr ) )
|
1236
|
+
|
1237
|
+
/*--------------------------------------------------------------------------------------------------------------------*/
|
1238
|
+
/* List */
|
1239
|
+
/*--------------------------------------------------------------------------------------------------------------------*/
|
1240
|
+
|
1241
|
+
// List is implemented a doubly linked list with sentinel nodes.
|
1242
|
+
|
1243
|
+
// Node header.
|
1244
|
+
// It does not need to be aligned to alignof( max_align_t ) because no memory is allocated after the header.
|
1245
|
+
typedef struct cc_listnode_hdr_ty
|
1246
|
+
{
|
1247
|
+
alignas( max_align_t )
|
1248
|
+
struct cc_listnode_hdr_ty *prev;
|
1249
|
+
struct cc_listnode_hdr_ty *next;
|
1250
|
+
} cc_listnode_hdr_ty;
|
1251
|
+
|
1252
|
+
// List header.
|
1253
|
+
typedef struct
|
1254
|
+
{
|
1255
|
+
size_t size;
|
1256
|
+
cc_listnode_hdr_ty r_end;
|
1257
|
+
cc_listnode_hdr_ty end;
|
1258
|
+
} cc_list_hdr_ty;
|
1259
|
+
|
1260
|
+
// Placeholder for list with no allocated header.
|
1261
|
+
// The main purpose this serves is to provide every list with stable r_end and end iterators across translation units
|
1262
|
+
// and irrespective of whether any memory has been allocated for its header.
|
1263
|
+
// Every list initially (after cc_init) points to this placeholder, which differs across translation units, and is then
|
1264
|
+
// associated with that placeholder until cc_cleanup is called.
|
1265
|
+
// Calls to cc_r_end and cc_end on a list return pointers to the associated placeholder's r_end and end elements even
|
1266
|
+
// after a header has been dynamically allocated to it.
|
1267
|
+
// The way this works is that the placeholder's r_end.prev pointer points to the placeholder's r_end (i.e. a circular
|
1268
|
+
// link), and ditto for end.next.
|
1269
|
+
// Meanwhile, a list with an allocated header has it's r_end.prev pointer point to the placeholder's r_end, and ditto
|
1270
|
+
// for it's end.next pointer.
|
1271
|
+
// Therefore cc_list_hdr( cntr )->r_end.prev always produces the associated placeholder's r_end, and ditto for
|
1272
|
+
// cc_list_hdr( cntr )->end.next.
|
1273
|
+
// Unfortunately, this means that r_end and end must be handled as special cases during inserts, splices, and iteration.
|
1274
|
+
const static cc_list_hdr_ty cc_list_placeholder = {
|
1275
|
+
0,
|
1276
|
+
{
|
1277
|
+
(cc_listnode_hdr_ty *)&cc_list_placeholder.r_end /* Circular link */,
|
1278
|
+
(cc_listnode_hdr_ty *)&cc_list_placeholder.end
|
1279
|
+
},
|
1280
|
+
{
|
1281
|
+
(cc_listnode_hdr_ty *)&cc_list_placeholder.r_end,
|
1282
|
+
(cc_listnode_hdr_ty *)&cc_list_placeholder.end /* Circular link */
|
1283
|
+
}
|
1284
|
+
};
|
1285
|
+
|
1286
|
+
#define CC_LIST_INIT( cntr ) \
|
1287
|
+
( \
|
1288
|
+
cntr = (CC_TYPEOF_XP( cntr ))&cc_list_placeholder, \
|
1289
|
+
(void)0 \
|
1290
|
+
) \
|
1291
|
+
|
1292
|
+
// Provides easy access to list header.
|
1293
|
+
static inline cc_list_hdr_ty *cc_list_hdr( void *cntr )
|
1294
|
+
{
|
1295
|
+
return (cc_list_hdr_ty *)cntr;
|
1296
|
+
}
|
1297
|
+
|
1298
|
+
// Provides easy access to a list node header from a pointer-iterator.
|
1299
|
+
static inline cc_listnode_hdr_ty *cc_listnode_hdr( void *i )
|
1300
|
+
{
|
1301
|
+
return (cc_listnode_hdr_ty *)( (char *)i - sizeof( cc_listnode_hdr_ty ) );
|
1302
|
+
}
|
1303
|
+
|
1304
|
+
// Provides easy access to a pointer-iterator from pointer to a list node header.
|
1305
|
+
static inline void *cc_list_el( void *i )
|
1306
|
+
{
|
1307
|
+
return (char *)i + sizeof( cc_listnode_hdr_ty );
|
1308
|
+
}
|
1309
|
+
|
1310
|
+
static inline bool cc_list_is_placeholder( void *cntr )
|
1311
|
+
{
|
1312
|
+
return cc_list_hdr( cntr )->r_end.prev == &cc_list_hdr( cntr )->r_end;
|
1313
|
+
}
|
1314
|
+
|
1315
|
+
// Iteration.
|
1316
|
+
|
1317
|
+
static inline void *cc_list_r_end( void *cntr )
|
1318
|
+
{
|
1319
|
+
return cc_list_el( cc_list_hdr( cntr )->r_end.prev );
|
1320
|
+
}
|
1321
|
+
|
1322
|
+
static inline void *cc_list_end( void *cntr )
|
1323
|
+
{
|
1324
|
+
return cc_list_el( cc_list_hdr( cntr )->end.next );
|
1325
|
+
}
|
1326
|
+
|
1327
|
+
static inline void *cc_list_prev( void *cntr, void *i )
|
1328
|
+
{
|
1329
|
+
cc_listnode_hdr_ty *prev = cc_listnode_hdr( i )->prev;
|
1330
|
+
|
1331
|
+
// If i is r_end, then we need to decrement the iterator once more to ensure that the returned iterator is the r_end
|
1332
|
+
// of the placeholder originally associated with the list.
|
1333
|
+
if( prev == &cc_list_hdr( cntr )->r_end )
|
1334
|
+
prev = prev->prev;
|
1335
|
+
|
1336
|
+
return cc_list_el( prev );
|
1337
|
+
}
|
1338
|
+
|
1339
|
+
static inline void *cc_list_next( void *cntr, void *i )
|
1340
|
+
{
|
1341
|
+
cc_listnode_hdr_ty *next = cc_listnode_hdr( i )->next;
|
1342
|
+
|
1343
|
+
// See comment in cc_list_prev above.
|
1344
|
+
if( next == &cc_list_hdr( cntr )->end )
|
1345
|
+
next = next->next;
|
1346
|
+
|
1347
|
+
return cc_list_el( next );
|
1348
|
+
}
|
1349
|
+
|
1350
|
+
static inline void *cc_list_first( void *cntr )
|
1351
|
+
{
|
1352
|
+
return cc_list_next( cntr, cc_list_el( &cc_list_hdr( cntr )->r_end ) );
|
1353
|
+
}
|
1354
|
+
|
1355
|
+
static inline void *cc_list_last( void *cntr )
|
1356
|
+
{
|
1357
|
+
return cc_list_prev( cntr, cc_list_el( &cc_list_hdr( cntr )->end ) );
|
1358
|
+
}
|
1359
|
+
|
1360
|
+
static inline size_t cc_list_size( void *cntr )
|
1361
|
+
{
|
1362
|
+
return cc_list_hdr( cntr )->size;
|
1363
|
+
}
|
1364
|
+
|
1365
|
+
// Allocates a header for a list that is currently a placeholder.
|
1366
|
+
// Returns the new container handle, or NULL in the case of allocation failure.
|
1367
|
+
static inline void *cc_list_alloc_hdr( void *cntr, void *( *realloc_ )( void *, size_t ) )
|
1368
|
+
{
|
1369
|
+
cc_list_hdr_ty *new_cntr = (cc_list_hdr_ty *)realloc_( NULL, sizeof( cc_list_hdr_ty ) );
|
1370
|
+
if( !new_cntr )
|
1371
|
+
return NULL;
|
1372
|
+
|
1373
|
+
new_cntr->r_end.next = &new_cntr->end;
|
1374
|
+
new_cntr->end.prev = &new_cntr->r_end;
|
1375
|
+
|
1376
|
+
// Link the new header's r_end and end with the original placeholder's r_end and end.
|
1377
|
+
new_cntr->r_end.prev = &cc_list_hdr( cntr )->r_end;
|
1378
|
+
new_cntr->end.next = &cc_list_hdr( cntr )->end;
|
1379
|
+
|
1380
|
+
new_cntr->size = 0;
|
1381
|
+
return new_cntr;
|
1382
|
+
}
|
1383
|
+
|
1384
|
+
// Attaches a node to the list before the node pointed to be pointer-iterator i.
|
1385
|
+
static inline void cc_list_attach( void *cntr, void *i, cc_listnode_hdr_ty *node )
|
1386
|
+
{
|
1387
|
+
// Handle r_end and end iterators as a special case.
|
1388
|
+
// We need to convert the iterator from the global placeholder's r_end or end to the local r_end or end.
|
1389
|
+
if( i == cc_list_r_end( cntr ) )
|
1390
|
+
i = cc_list_el( &cc_list_hdr( cntr )->r_end );
|
1391
|
+
else if( i == cc_list_end( cntr ) )
|
1392
|
+
i = cc_list_el( &cc_list_hdr( cntr )->end );
|
1393
|
+
|
1394
|
+
// Link node.
|
1395
|
+
node->next = cc_listnode_hdr( i );
|
1396
|
+
node->prev = node->next->prev;
|
1397
|
+
node->next->prev = node;
|
1398
|
+
node->prev->next = node;
|
1399
|
+
}
|
1400
|
+
|
1401
|
+
// Inserts an element into the list before before the node pointed to be pointer-iterator i.
|
1402
|
+
// Returns a cc_allocing_fn_result_ty containing the new container handle and a pointer-iterator to the newly inserted
|
1403
|
+
// element (or NULL in the case of allocation failure).
|
1404
|
+
static inline cc_allocing_fn_result_ty cc_list_insert(
|
1405
|
+
void *cntr,
|
1406
|
+
void *i,
|
1407
|
+
void *el,
|
1408
|
+
size_t el_size,
|
1409
|
+
void *( *realloc_ )( void *, size_t )
|
1410
|
+
)
|
1411
|
+
{
|
1412
|
+
if( cc_list_is_placeholder( cntr ) )
|
1413
|
+
{
|
1414
|
+
void *new_cntr = cc_list_alloc_hdr( cntr, realloc_ );
|
1415
|
+
if( !new_cntr )
|
1416
|
+
return cc_make_allocing_fn_result( cntr, NULL );
|
1417
|
+
|
1418
|
+
cntr = new_cntr;
|
1419
|
+
}
|
1420
|
+
|
1421
|
+
cc_listnode_hdr_ty *new_node = (cc_listnode_hdr_ty *)realloc_( NULL, sizeof( cc_listnode_hdr_ty ) + el_size );
|
1422
|
+
if( !new_node )
|
1423
|
+
return cc_make_allocing_fn_result( cntr, NULL );
|
1424
|
+
|
1425
|
+
memcpy( cc_list_el( new_node ), el, el_size );
|
1426
|
+
|
1427
|
+
// Handle r_end and end iterators as a special case.
|
1428
|
+
// We need to convert the iterator from the associated placeholder's r_end or end to the local r_end or end.
|
1429
|
+
if( i == cc_list_r_end( cntr ) )
|
1430
|
+
i = cc_list_el( &cc_list_hdr( cntr )->r_end );
|
1431
|
+
else if( i == cc_list_end( cntr ) )
|
1432
|
+
i = cc_list_el( &cc_list_hdr( cntr )->end );
|
1433
|
+
|
1434
|
+
cc_list_attach( cntr, i, new_node );
|
1435
|
+
|
1436
|
+
++cc_list_hdr( cntr )->size;
|
1437
|
+
|
1438
|
+
return cc_make_allocing_fn_result( cntr, cc_list_el( new_node ) );
|
1439
|
+
}
|
1440
|
+
|
1441
|
+
#define CC_LIST_INSERT( cntr, i, el ) \
|
1442
|
+
( \
|
1443
|
+
CC_POINT_HNDL_TO_ALLOCING_FN_RESULT( \
|
1444
|
+
cntr, \
|
1445
|
+
cc_list_insert( cntr, i, &CC_MAKE_LVAL_COPY( CC_EL_TY( cntr ), el ), CC_EL_SIZE( cntr ), CC_GET_REALLOC ) \
|
1446
|
+
), \
|
1447
|
+
CC_FIX_HNDL_AND_RETURN_OTHER_PTR( cntr ) \
|
1448
|
+
) \
|
1449
|
+
|
1450
|
+
#define CC_LIST_PUSH( cntr, el ) CC_LIST_INSERT( cntr, cc_list_end( cntr ), el )
|
1451
|
+
|
1452
|
+
// Erases the element pointed to by pointer-iterator i and returns a pointer-iterator to next element (or end if the
|
1453
|
+
// element was the last element in the list.)
|
1454
|
+
static inline void *cc_list_erase( void *cntr, void *i, void ( *dtor )( void * ), void ( *free_ )( void * ) )
|
1455
|
+
{
|
1456
|
+
cc_listnode_hdr_ty *hdr = cc_listnode_hdr( i );
|
1457
|
+
cc_listnode_hdr_ty *next = hdr->next;
|
1458
|
+
hdr->prev->next = next;
|
1459
|
+
next->prev = hdr->prev;
|
1460
|
+
|
1461
|
+
if( dtor )
|
1462
|
+
dtor( i );
|
1463
|
+
|
1464
|
+
free_( hdr );
|
1465
|
+
--cc_list_hdr( cntr )->size;
|
1466
|
+
|
1467
|
+
// If next is end, we need to make sure we're returning the associated placeholder's end.
|
1468
|
+
if( next == &cc_list_hdr( cntr )->end )
|
1469
|
+
next = next->next;
|
1470
|
+
|
1471
|
+
return cc_list_el( next );
|
1472
|
+
}
|
1473
|
+
|
1474
|
+
#define CC_LIST_ERASE( cntr, i ) cc_list_erase( cntr, i, CC_EL_DTOR( cntr ), CC_GET_FREE )
|
1475
|
+
|
1476
|
+
// Removes the element pointed to by pointer-iterator src_i from the source list and attaches it to the list.
|
1477
|
+
// Although this function never allocates memory for the element/node itself, it must allocate the list's header if the
|
1478
|
+
// list is currently a placeholder.
|
1479
|
+
// Returns a cc_allocing_fn_result_ty containing the new container handle and a pointer that evaluates to true if the
|
1480
|
+
// operation was successful or false in the case of allocation failure.
|
1481
|
+
static inline cc_allocing_fn_result_ty cc_list_splice(
|
1482
|
+
void *cntr,
|
1483
|
+
void *i,
|
1484
|
+
void *src,
|
1485
|
+
void *src_i,
|
1486
|
+
void *( *realloc_ )( void *, size_t )
|
1487
|
+
)
|
1488
|
+
{
|
1489
|
+
if( cc_list_is_placeholder( cntr ) )
|
1490
|
+
{
|
1491
|
+
void *new_cntr = cc_list_alloc_hdr( cntr, realloc_ );
|
1492
|
+
if( !new_cntr )
|
1493
|
+
return cc_make_allocing_fn_result( cntr, NULL );
|
1494
|
+
|
1495
|
+
cntr = new_cntr;
|
1496
|
+
}
|
1497
|
+
|
1498
|
+
cc_listnode_hdr( src_i )->prev->next = cc_listnode_hdr( src_i )->next;
|
1499
|
+
cc_listnode_hdr( src_i )->next->prev = cc_listnode_hdr( src_i )->prev;
|
1500
|
+
cc_list_attach( cntr, i, cc_listnode_hdr( src_i ) );
|
1501
|
+
|
1502
|
+
--cc_list_hdr( src )->size;
|
1503
|
+
++cc_list_hdr( cntr )->size;
|
1504
|
+
|
1505
|
+
return cc_make_allocing_fn_result( cntr, cc_dummy_true_ptr );
|
1506
|
+
}
|
1507
|
+
|
1508
|
+
#define CC_LIST_SPLICE( cntr, i, src, src_i ) \
|
1509
|
+
( \
|
1510
|
+
CC_POINT_HNDL_TO_ALLOCING_FN_RESULT( \
|
1511
|
+
cntr, \
|
1512
|
+
cc_list_splice( cntr, i, src, src_i, CC_GET_REALLOC ) \
|
1513
|
+
), \
|
1514
|
+
CC_FIX_HNDL_AND_RETURN_OTHER_PTR( cntr ) \
|
1515
|
+
) \
|
1516
|
+
|
1517
|
+
// Initializes a shallow copy of the source list.
|
1518
|
+
// This requires allocating memory for every node, as well as for the list's header unless src is a placeholder.
|
1519
|
+
// Returns a the pointer to the copy, or NULL in the case of allocation failure.
|
1520
|
+
// That return value is cast to bool in the corresponding macro.
|
1521
|
+
static inline void *cc_list_init_clone(
|
1522
|
+
void *src,
|
1523
|
+
size_t el_size,
|
1524
|
+
void *( *realloc_ )( void *, size_t ),
|
1525
|
+
void ( *free_ )( void * )
|
1526
|
+
)
|
1527
|
+
{
|
1528
|
+
cc_allocing_fn_result_ty result = { (void *)&cc_list_placeholder, cc_dummy_true_ptr };
|
1529
|
+
for( void *i = cc_list_first( src ); i != cc_list_end( src ); i = cc_list_next( src, i ) )
|
1530
|
+
{
|
1531
|
+
result = cc_list_insert( result.new_cntr, cc_list_end( result.new_cntr ), i, el_size, realloc_ );
|
1532
|
+
if( !result.other_ptr )
|
1533
|
+
{
|
1534
|
+
// Erase incomplete clone without invoking destructors.
|
1535
|
+
|
1536
|
+
void *j = cc_list_first( result.new_cntr );
|
1537
|
+
while( j != cc_list_end( result.new_cntr ) )
|
1538
|
+
{
|
1539
|
+
void *next = cc_list_next( result.new_cntr, j );
|
1540
|
+
free_( cc_listnode_hdr( j ) );
|
1541
|
+
j = next;
|
1542
|
+
}
|
1543
|
+
|
1544
|
+
if( !cc_list_is_placeholder( result.new_cntr ) )
|
1545
|
+
free_( result.new_cntr );
|
1546
|
+
|
1547
|
+
return NULL;
|
1548
|
+
}
|
1549
|
+
}
|
1550
|
+
|
1551
|
+
return result.new_cntr;
|
1552
|
+
}
|
1553
|
+
|
1554
|
+
#define CC_LIST_INIT_CLONE( cntr, src ) \
|
1555
|
+
( cntr = (CC_TYPEOF_XP( cntr ))cc_list_init_clone( src, CC_EL_SIZE( cntr ), CC_GET_REALLOC, CC_GET_FREE ) ) \
|
1556
|
+
|
1557
|
+
// Erases all elements, calling their destructors if necessary.
|
1558
|
+
static inline void cc_list_clear( void *cntr, void ( *dtor )( void * ), void ( *free_ )( void * ) )
|
1559
|
+
{
|
1560
|
+
while( cc_list_first( cntr ) != cc_list_end( cntr ) )
|
1561
|
+
cc_list_erase( cntr, cc_list_first( cntr ), dtor, free_ );
|
1562
|
+
}
|
1563
|
+
|
1564
|
+
#define CC_LIST_CLEAR( cntr ) cc_list_clear( cntr, CC_EL_DTOR( cntr ), CC_GET_FREE )
|
1565
|
+
|
1566
|
+
// Erases all elements, calling their destructors if necessary, and frees memory for the list's header if it is not
|
1567
|
+
// a placeholder.
|
1568
|
+
static inline void cc_list_cleanup( void *cntr, void ( *dtor )( void * ), void ( *free_ )( void * ) )
|
1569
|
+
{
|
1570
|
+
cc_list_clear( cntr, dtor, free_ );
|
1571
|
+
if( !cc_list_is_placeholder( cntr ) )
|
1572
|
+
free_( cntr );
|
1573
|
+
}
|
1574
|
+
|
1575
|
+
#define CC_LIST_CLEANUP( cntr ) \
|
1576
|
+
( \
|
1577
|
+
cc_list_cleanup( cntr, CC_EL_DTOR( cntr ), CC_GET_FREE ), \
|
1578
|
+
CC_LIST_INIT( cntr ) \
|
1579
|
+
) \
|
1580
|
+
|
1581
|
+
/*--------------------------------------------------------------------------------------------------------------------*/
|
1582
|
+
/* Map */
|
1583
|
+
/*--------------------------------------------------------------------------------------------------------------------*/
|
1584
|
+
|
1585
|
+
// Map is implemented as a Robin Hood (open-addressing) hash table with a power-of-2 capacity.
|
1586
|
+
|
1587
|
+
// Probe length type.
|
1588
|
+
// An unsigned char would probably be fine, but we use unsigned int just in case.
|
1589
|
+
// A probe length of 0 denotes an empty bucket, whereas a probe length of 1 denotes an element in its home bucket.
|
1590
|
+
// This optimization allows us to eliminate separate checks for empty buckets.
|
1591
|
+
typedef unsigned int cc_probelen_ty;
|
1592
|
+
|
1593
|
+
// Macros for calculating the position of element and probe length inside a bucket, as well as bucket size.
|
1594
|
+
// The element is at the beginning of bucket.
|
1595
|
+
|
1596
|
+
#define CC_MAP_KEY_OFFSET( cntr ) CC_ROUND_UP( CC_EL_SIZE( cntr ), alignof( CC_KEY_TY( cntr ) ) )
|
1597
|
+
|
1598
|
+
#define CC_MAP_PROBELEN_OFFSET( cntr ) \
|
1599
|
+
CC_ROUND_UP( CC_MAP_KEY_OFFSET( cntr ) + CC_KEY_SIZE( cntr ), alignof( cc_probelen_ty ) ) \
|
1600
|
+
|
1601
|
+
#define CC_MAP_BUCKET_SIZE( cntr ) \
|
1602
|
+
CC_ROUND_UP( \
|
1603
|
+
CC_MAP_PROBELEN_OFFSET( cntr ) + sizeof( cc_probelen_ty ), \
|
1604
|
+
alignof( CC_KEY_TY( cntr ) ) > alignof( CC_EL_TY( cntr ) ) ? \
|
1605
|
+
alignof( CC_KEY_TY( cntr ) ) \
|
1606
|
+
: \
|
1607
|
+
alignof( CC_EL_TY( cntr ) ) \
|
1608
|
+
) \
|
1609
|
+
|
1610
|
+
// Map header.
|
1611
|
+
typedef struct
|
1612
|
+
{
|
1613
|
+
alignas( max_align_t )
|
1614
|
+
size_t size;
|
1615
|
+
size_t cap;
|
1616
|
+
} cc_map_hdr_ty;
|
1617
|
+
|
1618
|
+
// Placeholder for map with no allocated memory.
|
1619
|
+
// In the case of maps, this placeholder allows us to avoid checking for a NULL handle inside functions.
|
1620
|
+
static const cc_map_hdr_ty cc_map_placeholder = { 0, 0 };
|
1621
|
+
|
1622
|
+
#define CC_MAP_INIT( cntr ) \
|
1623
|
+
( \
|
1624
|
+
cntr = (CC_TYPEOF_XP( cntr ))&cc_map_placeholder, \
|
1625
|
+
(void)0 \
|
1626
|
+
) \
|
1627
|
+
|
1628
|
+
// Provides easy access to map header.
|
1629
|
+
static inline cc_map_hdr_ty *cc_map_hdr( void *cntr )
|
1630
|
+
{
|
1631
|
+
return (cc_map_hdr_ty *)cntr;
|
1632
|
+
}
|
1633
|
+
|
1634
|
+
// Functions for easily accessing element, key, and probe length for the bucket at index i.
|
1635
|
+
// The element pointer also denotes the beginning of the bucket.
|
1636
|
+
|
1637
|
+
static inline void *cc_map_el( void *cntr, size_t i, size_t bucket_size )
|
1638
|
+
{
|
1639
|
+
return (char *)cntr + sizeof( cc_map_hdr_ty ) + bucket_size * i;
|
1640
|
+
}
|
1641
|
+
|
1642
|
+
static inline void *cc_map_key( void *cntr, size_t i, size_t bucket_size, size_t key_offset )
|
1643
|
+
{
|
1644
|
+
return (char *)cc_map_el( cntr, i, bucket_size ) + key_offset;
|
1645
|
+
}
|
1646
|
+
|
1647
|
+
static inline cc_probelen_ty *cc_map_probelen( void *cntr, size_t i, size_t bucket_size, size_t probelen_offset )
|
1648
|
+
{
|
1649
|
+
return (cc_probelen_ty *)( (char *)cc_map_el( cntr, i, bucket_size ) + probelen_offset );
|
1650
|
+
}
|
1651
|
+
|
1652
|
+
static inline size_t cc_map_size( void *cntr )
|
1653
|
+
{
|
1654
|
+
return cc_map_hdr( cntr )->size;
|
1655
|
+
}
|
1656
|
+
|
1657
|
+
static inline size_t cc_map_cap( void *cntr )
|
1658
|
+
{
|
1659
|
+
return cc_map_hdr( cntr )->cap;
|
1660
|
+
}
|
1661
|
+
|
1662
|
+
static inline bool cc_map_is_placeholder( void *cntr )
|
1663
|
+
{
|
1664
|
+
return cc_map_cap( cntr ) == 0;
|
1665
|
+
}
|
1666
|
+
|
1667
|
+
// Inserts an element into the map.
|
1668
|
+
// Assumes that the map has empty slots and therefore that failure cannot occur (hence the "raw" label).
|
1669
|
+
// If replace is true, then el will replace any existing element with the same key.
|
1670
|
+
// Returns a pointer-iterator to the newly inserted element, or to the existing element with the same key if replace is
|
1671
|
+
// false.
|
1672
|
+
// For the exact mechanics of Robin-Hood hashing, see Sebastian Sylvan's helpful article:
|
1673
|
+
// www.sebastiansylvan.com/post/robin-hood-hashing-should-be-your-default-hash-table-implementation
|
1674
|
+
static inline void *cc_map_insert_raw(
|
1675
|
+
void *cntr,
|
1676
|
+
void *el,
|
1677
|
+
void *key,
|
1678
|
+
bool replace,
|
1679
|
+
size_t bucket_size,
|
1680
|
+
size_t el_size,
|
1681
|
+
size_t key_offset,
|
1682
|
+
size_t key_size,
|
1683
|
+
size_t probelen_offset,
|
1684
|
+
int ( *cmpr )( void *, void *),
|
1685
|
+
size_t ( *hash )( void *),
|
1686
|
+
void ( *key_dtor )( void * ),
|
1687
|
+
void ( *el_dtor )( void * )
|
1688
|
+
)
|
1689
|
+
{
|
1690
|
+
size_t i = hash( key ) & ( cc_map_hdr( cntr )->cap - 1 );
|
1691
|
+
cc_probelen_ty probelen = 1;
|
1692
|
+
void *placed_original_el = NULL;
|
1693
|
+
|
1694
|
+
while( true )
|
1695
|
+
{
|
1696
|
+
// Empty bucket.
|
1697
|
+
if( !*cc_map_probelen( cntr, i, bucket_size, probelen_offset ) )
|
1698
|
+
{
|
1699
|
+
memcpy( cc_map_key( cntr, i, bucket_size, key_offset ), key, key_size );
|
1700
|
+
memcpy( cc_map_el( cntr, i, bucket_size ), el, el_size );
|
1701
|
+
|
1702
|
+
*cc_map_probelen( cntr, i, bucket_size, probelen_offset ) = probelen;
|
1703
|
+
|
1704
|
+
++cc_map_hdr( cntr )->size;
|
1705
|
+
|
1706
|
+
return placed_original_el ? placed_original_el : cc_map_el( cntr, i, bucket_size );
|
1707
|
+
}
|
1708
|
+
|
1709
|
+
// Existing element with same key.
|
1710
|
+
// This case can only occur before any steal occurs.
|
1711
|
+
if(
|
1712
|
+
probelen == *cc_map_probelen( cntr, i, bucket_size, probelen_offset ) &&
|
1713
|
+
cmpr( cc_map_key( cntr, i, bucket_size, key_offset ), key ) == 0
|
1714
|
+
)
|
1715
|
+
{
|
1716
|
+
if( replace )
|
1717
|
+
{
|
1718
|
+
if( key_dtor )
|
1719
|
+
key_dtor( cc_map_key( cntr, i, bucket_size, key_offset ) );
|
1720
|
+
|
1721
|
+
if( el_dtor )
|
1722
|
+
el_dtor( cc_map_el( cntr, i, bucket_size ) );
|
1723
|
+
|
1724
|
+
memcpy( cc_map_key( cntr, i, bucket_size, key_offset ), key, key_size );
|
1725
|
+
memcpy( cc_map_el( cntr, i, bucket_size ), el, el_size );
|
1726
|
+
}
|
1727
|
+
|
1728
|
+
return cc_map_el( cntr, i, bucket_size );
|
1729
|
+
}
|
1730
|
+
|
1731
|
+
// Stealing bucket.
|
1732
|
+
if( probelen > *cc_map_probelen( cntr, i, bucket_size, probelen_offset ) )
|
1733
|
+
{
|
1734
|
+
cc_memswap( key, cc_map_key( cntr, i, bucket_size, key_offset ), key_size );
|
1735
|
+
cc_memswap( el, cc_map_el( cntr, i, bucket_size ), el_size );
|
1736
|
+
|
1737
|
+
cc_probelen_ty temp_probelen = *cc_map_probelen( cntr, i, bucket_size, probelen_offset );
|
1738
|
+
*cc_map_probelen( cntr, i, bucket_size, probelen_offset ) = probelen;
|
1739
|
+
probelen = temp_probelen;
|
1740
|
+
|
1741
|
+
if( !placed_original_el )
|
1742
|
+
placed_original_el = cc_map_el( cntr, i, bucket_size );
|
1743
|
+
}
|
1744
|
+
|
1745
|
+
i = ( i + 1 ) & ( cc_map_hdr( cntr )->cap - 1 );
|
1746
|
+
++probelen;
|
1747
|
+
}
|
1748
|
+
}
|
1749
|
+
|
1750
|
+
// Returns the minimum capacity required to accommodate n elements, which is governed by the max load factor associated
|
1751
|
+
// with the map's key type.
|
1752
|
+
static inline size_t cc_map_min_cap_for_n_els( size_t n, double max_load )
|
1753
|
+
{
|
1754
|
+
if( n == 0 )
|
1755
|
+
return 0;
|
1756
|
+
|
1757
|
+
// Round up to power of 2.
|
1758
|
+
size_t cap = 8;
|
1759
|
+
while( n > cap * max_load )
|
1760
|
+
cap *= 2;
|
1761
|
+
|
1762
|
+
return cap;
|
1763
|
+
}
|
1764
|
+
|
1765
|
+
// Creates a rehashed duplicate of cntr with capacity cap.
|
1766
|
+
// Assumes that cap is large enough to accommodate all elements in cntr without violating the max load factor.
|
1767
|
+
// Returns pointer to the duplicate, or NULL in the case of allocation failure.
|
1768
|
+
static inline void *cc_map_make_rehash(
|
1769
|
+
void *cntr,
|
1770
|
+
size_t cap,
|
1771
|
+
size_t bucket_size,
|
1772
|
+
size_t el_size,
|
1773
|
+
size_t key_offset,
|
1774
|
+
size_t key_size,
|
1775
|
+
size_t probelen_offset,
|
1776
|
+
int ( *cmpr )( void *, void * ),
|
1777
|
+
size_t ( *hash )( void * ),
|
1778
|
+
void *( *realloc_ )( void *, size_t )
|
1779
|
+
)
|
1780
|
+
{
|
1781
|
+
cc_map_hdr_ty *new_cntr = (cc_map_hdr_ty *)realloc_( NULL, sizeof( cc_map_hdr_ty ) + bucket_size * cap );
|
1782
|
+
if( !new_cntr )
|
1783
|
+
return NULL;
|
1784
|
+
|
1785
|
+
new_cntr->size = 0;
|
1786
|
+
new_cntr->cap = cap;
|
1787
|
+
for( size_t i = 0; i < cap; ++i )
|
1788
|
+
*cc_map_probelen( new_cntr, i, bucket_size, probelen_offset ) = 0;
|
1789
|
+
|
1790
|
+
for( size_t i = 0; i < cc_map_hdr( cntr )->cap; ++i )
|
1791
|
+
if( *cc_map_probelen( cntr, i, bucket_size, probelen_offset ) )
|
1792
|
+
cc_map_insert_raw(
|
1793
|
+
new_cntr,
|
1794
|
+
cc_map_el( cntr, i, bucket_size ),
|
1795
|
+
cc_map_key( cntr, i, bucket_size, key_offset ),
|
1796
|
+
false, // No replacements can occur anyway as all keys already in the map are unique.
|
1797
|
+
bucket_size,
|
1798
|
+
el_size,
|
1799
|
+
key_offset,
|
1800
|
+
key_size,
|
1801
|
+
probelen_offset,
|
1802
|
+
cmpr,
|
1803
|
+
hash,
|
1804
|
+
// No need to pass destructors as no elements will be erased.
|
1805
|
+
NULL,
|
1806
|
+
NULL
|
1807
|
+
);
|
1808
|
+
|
1809
|
+
return new_cntr;
|
1810
|
+
}
|
1811
|
+
|
1812
|
+
// Reserves capacity such that the map can accommodate n elements without reallocation (i.e. without violating the
|
1813
|
+
// max load factor).
|
1814
|
+
// Returns a cc_allocing_fn_result_ty containing new container handle and a pointer that evaluates to true if the
|
1815
|
+
// operation successful or false in the case of allocation failure.
|
1816
|
+
static inline cc_allocing_fn_result_ty cc_map_reserve(
|
1817
|
+
void *cntr,
|
1818
|
+
size_t n,
|
1819
|
+
size_t bucket_size,
|
1820
|
+
size_t el_size,
|
1821
|
+
size_t key_offset,
|
1822
|
+
size_t key_size,
|
1823
|
+
size_t probelen_offset,
|
1824
|
+
int ( *cmpr )( void *, void * ),
|
1825
|
+
size_t ( *hash )( void * ),
|
1826
|
+
double max_load,
|
1827
|
+
void *( *realloc_ )( void *, size_t ),
|
1828
|
+
void ( *free_ )( void * )
|
1829
|
+
)
|
1830
|
+
{
|
1831
|
+
size_t cap = cc_map_min_cap_for_n_els( n, max_load );
|
1832
|
+
|
1833
|
+
if( cc_map_cap( cntr ) >= cap )
|
1834
|
+
return cc_make_allocing_fn_result( cntr, cc_dummy_true_ptr );
|
1835
|
+
|
1836
|
+
void *new_cntr = cc_map_make_rehash(
|
1837
|
+
cntr,
|
1838
|
+
cap,
|
1839
|
+
bucket_size,
|
1840
|
+
el_size,
|
1841
|
+
key_offset,
|
1842
|
+
key_size,
|
1843
|
+
probelen_offset,
|
1844
|
+
cmpr,
|
1845
|
+
hash,
|
1846
|
+
realloc_
|
1847
|
+
);
|
1848
|
+
if( !new_cntr )
|
1849
|
+
return cc_make_allocing_fn_result( cntr, NULL );
|
1850
|
+
|
1851
|
+
if( !cc_map_is_placeholder( cntr ) )
|
1852
|
+
free_( cntr );
|
1853
|
+
|
1854
|
+
return cc_make_allocing_fn_result( new_cntr, cc_dummy_true_ptr );
|
1855
|
+
}
|
1856
|
+
|
1857
|
+
#define CC_MAP_RESERVE( cntr, n ) \
|
1858
|
+
( \
|
1859
|
+
CC_POINT_HNDL_TO_ALLOCING_FN_RESULT( \
|
1860
|
+
cntr, \
|
1861
|
+
cc_map_reserve( \
|
1862
|
+
cntr, \
|
1863
|
+
n, \
|
1864
|
+
CC_MAP_BUCKET_SIZE( cntr ), \
|
1865
|
+
CC_EL_SIZE( cntr ), \
|
1866
|
+
CC_MAP_KEY_OFFSET( cntr ), \
|
1867
|
+
CC_KEY_SIZE( cntr ), \
|
1868
|
+
CC_MAP_PROBELEN_OFFSET( cntr ), \
|
1869
|
+
CC_KEY_CMPR( cntr ), \
|
1870
|
+
CC_KEY_HASH( cntr ), \
|
1871
|
+
CC_KEY_LOAD( cntr ), \
|
1872
|
+
CC_GET_REALLOC, \
|
1873
|
+
CC_GET_FREE \
|
1874
|
+
) \
|
1875
|
+
), \
|
1876
|
+
(bool)CC_FIX_HNDL_AND_RETURN_OTHER_PTR( cntr ) \
|
1877
|
+
) \
|
1878
|
+
|
1879
|
+
// Inserts an element.
|
1880
|
+
// If replace is true, then el replaces any existing element with the same key.
|
1881
|
+
// If the map exceeds its load factor, the underlying storage is expanded and a complete rehash occurs.
|
1882
|
+
// Returns a cc_allocing_fn_result_ty containing the new container handle and a pointer to the newly inserted element, or
|
1883
|
+
// to the existing element with the same key if replace is false.
|
1884
|
+
// If the underlying storage needed to be expanded and an allocation failure occurred, the latter pointer will be NULL.
|
1885
|
+
// This function checks to ensure that the map could accommodate an insertion before searching for the existing element.
|
1886
|
+
// Therefore, failure can occur even if an element with the same key already exists and no reallocation was actually
|
1887
|
+
// necessary.
|
1888
|
+
// This was a design choice in favor of code simplicity and readability over ideal behavior in a corner case.
|
1889
|
+
static inline cc_allocing_fn_result_ty cc_map_insert(
|
1890
|
+
void *cntr,
|
1891
|
+
void *el,
|
1892
|
+
void *key,
|
1893
|
+
bool replace,
|
1894
|
+
size_t bucket_size,
|
1895
|
+
size_t el_size,
|
1896
|
+
size_t key_offset,
|
1897
|
+
size_t key_size,
|
1898
|
+
size_t probelen_offset,
|
1899
|
+
int (*cmpr)( void *, void * ),
|
1900
|
+
size_t (*hash)( void * ),
|
1901
|
+
double max_load,
|
1902
|
+
void ( *key_dtor )( void * ),
|
1903
|
+
void ( *el_dtor )( void * ),
|
1904
|
+
void *( *realloc_ )( void *, size_t ),
|
1905
|
+
void ( *free_ )( void * )
|
1906
|
+
)
|
1907
|
+
{
|
1908
|
+
if( cc_map_size( cntr ) + 1 > cc_map_cap( cntr ) * max_load )
|
1909
|
+
{
|
1910
|
+
cc_allocing_fn_result_ty result = cc_map_reserve(
|
1911
|
+
cntr,
|
1912
|
+
cc_map_size( cntr ) + 1,
|
1913
|
+
bucket_size,
|
1914
|
+
el_size,
|
1915
|
+
key_offset,
|
1916
|
+
key_size,
|
1917
|
+
probelen_offset,
|
1918
|
+
cmpr,
|
1919
|
+
hash,
|
1920
|
+
max_load,
|
1921
|
+
realloc_,
|
1922
|
+
free_
|
1923
|
+
);
|
1924
|
+
|
1925
|
+
if( !result.other_ptr )
|
1926
|
+
return result;
|
1927
|
+
|
1928
|
+
cntr = result.new_cntr;
|
1929
|
+
}
|
1930
|
+
|
1931
|
+
void *new_el = cc_map_insert_raw(
|
1932
|
+
cntr,
|
1933
|
+
el,
|
1934
|
+
key,
|
1935
|
+
replace,
|
1936
|
+
bucket_size,
|
1937
|
+
el_size,
|
1938
|
+
key_offset,
|
1939
|
+
key_size,
|
1940
|
+
probelen_offset,
|
1941
|
+
cmpr,
|
1942
|
+
hash,
|
1943
|
+
key_dtor,
|
1944
|
+
el_dtor
|
1945
|
+
);
|
1946
|
+
|
1947
|
+
return cc_make_allocing_fn_result( cntr, new_el );
|
1948
|
+
}
|
1949
|
+
|
1950
|
+
#define CC_MAP_INSERT( cntr, key, el, replace ) \
|
1951
|
+
( \
|
1952
|
+
CC_POINT_HNDL_TO_ALLOCING_FN_RESULT( \
|
1953
|
+
cntr, \
|
1954
|
+
cc_map_insert( \
|
1955
|
+
cntr, \
|
1956
|
+
&CC_MAKE_LVAL_COPY( CC_EL_TY( cntr ), el ), \
|
1957
|
+
&CC_MAKE_LVAL_COPY( CC_KEY_TY( cntr ), key ), \
|
1958
|
+
replace, \
|
1959
|
+
CC_MAP_BUCKET_SIZE( cntr ), \
|
1960
|
+
CC_EL_SIZE( cntr ), \
|
1961
|
+
CC_MAP_KEY_OFFSET( cntr ), \
|
1962
|
+
CC_KEY_SIZE( cntr ), \
|
1963
|
+
CC_MAP_PROBELEN_OFFSET( cntr ), \
|
1964
|
+
CC_KEY_CMPR( cntr ), \
|
1965
|
+
CC_KEY_HASH( cntr ), \
|
1966
|
+
CC_KEY_LOAD( cntr ), \
|
1967
|
+
CC_KEY_DTOR( cntr ), \
|
1968
|
+
CC_EL_DTOR( cntr ), \
|
1969
|
+
CC_GET_REALLOC, \
|
1970
|
+
CC_GET_FREE \
|
1971
|
+
) \
|
1972
|
+
), \
|
1973
|
+
CC_FIX_HNDL_AND_RETURN_OTHER_PTR( cntr ) \
|
1974
|
+
) \
|
1975
|
+
|
1976
|
+
// Returns a pointer-iterator to the element with the specified key, or NULL if no such element exists.
|
1977
|
+
static inline void *cc_map_get(
|
1978
|
+
void *cntr,
|
1979
|
+
void *key,
|
1980
|
+
size_t bucket_size,
|
1981
|
+
size_t key_offset,
|
1982
|
+
size_t probelen_offset,
|
1983
|
+
int (*cmpr)( void *, void *),
|
1984
|
+
size_t (*hash)( void *) )
|
1985
|
+
{
|
1986
|
+
if( cc_map_size( cntr ) == 0 )
|
1987
|
+
return NULL;
|
1988
|
+
|
1989
|
+
size_t i = hash( key ) & ( cc_map_hdr( cntr )->cap - 1 );
|
1990
|
+
cc_probelen_ty probelen = 1;
|
1991
|
+
|
1992
|
+
while( probelen <= *cc_map_probelen( cntr, i, bucket_size, probelen_offset ) )
|
1993
|
+
{
|
1994
|
+
if(
|
1995
|
+
probelen == *cc_map_probelen( cntr, i, bucket_size, probelen_offset ) &&
|
1996
|
+
cmpr( cc_map_key( cntr, i, bucket_size, key_offset ), key ) == 0
|
1997
|
+
)
|
1998
|
+
return cc_map_el( cntr, i, bucket_size );
|
1999
|
+
|
2000
|
+
i = ( i + 1 ) & ( cc_map_hdr( cntr )->cap - 1 );
|
2001
|
+
++probelen;
|
2002
|
+
}
|
2003
|
+
|
2004
|
+
return NULL;
|
2005
|
+
}
|
2006
|
+
|
2007
|
+
#define CC_MAP_GET( cntr, key ) \
|
2008
|
+
cc_map_get( \
|
2009
|
+
cntr, \
|
2010
|
+
&CC_MAKE_LVAL_COPY( CC_KEY_TY( cntr ), key ), \
|
2011
|
+
CC_MAP_BUCKET_SIZE( cntr ), \
|
2012
|
+
CC_MAP_KEY_OFFSET( cntr ), \
|
2013
|
+
CC_MAP_PROBELEN_OFFSET( cntr ), \
|
2014
|
+
CC_KEY_CMPR( cntr ), \
|
2015
|
+
CC_KEY_HASH( cntr ) \
|
2016
|
+
) \
|
2017
|
+
|
2018
|
+
// Returns a pointer to the key for the element pointed to by pointer-iterator i.
|
2019
|
+
static inline void *cc_map_key_for( void *i, size_t key_offset )
|
2020
|
+
{
|
2021
|
+
return (char *)i + key_offset;
|
2022
|
+
}
|
2023
|
+
|
2024
|
+
#define CC_MAP_KEY_FOR( cntr, i ) cc_map_key_for( i, CC_MAP_KEY_OFFSET( cntr ) )
|
2025
|
+
|
2026
|
+
// Erases the element pointer to by pointer-iterator itr.
|
2027
|
+
// For the exact mechanics of erasing elements in a Robin-Hood hash table, see Sebastian Sylvan's:
|
2028
|
+
// www.sebastiansylvan.com/post/more-on-robin-hood-hashing-2/
|
2029
|
+
static inline void cc_map_erase_itr(
|
2030
|
+
void *cntr,
|
2031
|
+
void *itr,
|
2032
|
+
size_t bucket_size,
|
2033
|
+
size_t el_size,
|
2034
|
+
size_t key_offset,
|
2035
|
+
size_t key_size,
|
2036
|
+
size_t probelen_offset,
|
2037
|
+
void ( *key_dtor )( void * ),
|
2038
|
+
void ( *el_dtor )( void * )
|
2039
|
+
)
|
2040
|
+
{
|
2041
|
+
size_t i = ( (char *)itr - (char *)cc_map_el( cntr, 0, bucket_size ) ) / bucket_size;
|
2042
|
+
*cc_map_probelen( cntr, i, bucket_size, probelen_offset ) = 0;
|
2043
|
+
--cc_map_hdr( cntr )->size;
|
2044
|
+
|
2045
|
+
if( key_dtor )
|
2046
|
+
key_dtor( cc_map_key( cntr, i, bucket_size, key_offset ) );
|
2047
|
+
|
2048
|
+
if( el_dtor )
|
2049
|
+
el_dtor( cc_map_el( cntr, i, bucket_size ) );
|
2050
|
+
|
2051
|
+
while( true )
|
2052
|
+
{
|
2053
|
+
size_t next = ( i + 1 ) & ( cc_map_hdr( cntr )->cap - 1 );
|
2054
|
+
if( *cc_map_probelen( cntr, next, bucket_size, probelen_offset ) <= 1 )
|
2055
|
+
break; // Empty slot or key already in its home bucket, so all done.
|
2056
|
+
|
2057
|
+
//Bump backwards.
|
2058
|
+
|
2059
|
+
memcpy(
|
2060
|
+
cc_map_key( cntr, i, bucket_size, key_offset ),
|
2061
|
+
cc_map_key( cntr, next, bucket_size, key_offset ),
|
2062
|
+
key_size
|
2063
|
+
);
|
2064
|
+
memcpy( cc_map_el( cntr, i, bucket_size ), cc_map_el( cntr, next, bucket_size ), el_size );
|
2065
|
+
|
2066
|
+
*cc_map_probelen( cntr, i, bucket_size, probelen_offset ) =
|
2067
|
+
*cc_map_probelen( cntr, next, bucket_size, probelen_offset ) - 1;
|
2068
|
+
*cc_map_probelen( cntr, next, bucket_size, probelen_offset ) = 0;
|
2069
|
+
|
2070
|
+
i = next;
|
2071
|
+
}
|
2072
|
+
}
|
2073
|
+
|
2074
|
+
#define CC_MAP_ERASE_ITR( cntr, i ) \
|
2075
|
+
cc_map_erase_itr( \
|
2076
|
+
cntr, \
|
2077
|
+
i, \
|
2078
|
+
CC_MAP_BUCKET_SIZE( cntr ), \
|
2079
|
+
CC_EL_SIZE( cntr ), \
|
2080
|
+
CC_MAP_KEY_OFFSET( cntr ), \
|
2081
|
+
CC_KEY_SIZE( cntr ), \
|
2082
|
+
CC_MAP_PROBELEN_OFFSET( cntr ), \
|
2083
|
+
CC_KEY_DTOR( cntr ), \
|
2084
|
+
CC_EL_DTOR( cntr ) \
|
2085
|
+
) \
|
2086
|
+
|
2087
|
+
|
2088
|
+
// Erases the element with the specified key, if it exists.
|
2089
|
+
// Returns a pointer that evaluates to true if an element was erased, or else is NULL.
|
2090
|
+
// This pointer is eventually cast to bool by the cc_erase API macro.
|
2091
|
+
static inline void *cc_map_erase(
|
2092
|
+
void *cntr,
|
2093
|
+
void *key,
|
2094
|
+
size_t bucket_size,
|
2095
|
+
size_t el_size,
|
2096
|
+
size_t key_offset,
|
2097
|
+
size_t key_size,
|
2098
|
+
size_t probelen_offset,
|
2099
|
+
int ( *cmpr )( void *, void *),
|
2100
|
+
size_t ( *hash )( void * ),
|
2101
|
+
void ( *key_dtor )( void * ),
|
2102
|
+
void ( *el_dtor )( void * )
|
2103
|
+
)
|
2104
|
+
{
|
2105
|
+
if( cc_map_size( cntr ) == 0 )
|
2106
|
+
return NULL;
|
2107
|
+
|
2108
|
+
size_t i = hash( key ) & ( cc_map_hdr( cntr )->cap - 1 );
|
2109
|
+
cc_probelen_ty probelen = 1;
|
2110
|
+
|
2111
|
+
while( probelen <= *cc_map_probelen( cntr, i, bucket_size, probelen_offset ) )
|
2112
|
+
{
|
2113
|
+
if(
|
2114
|
+
probelen == *cc_map_probelen( cntr, i, bucket_size, probelen_offset ) &&
|
2115
|
+
cmpr( cc_map_key( cntr, i, bucket_size, key_offset ), key ) == 0
|
2116
|
+
)
|
2117
|
+
{
|
2118
|
+
cc_map_erase_itr(
|
2119
|
+
cntr,
|
2120
|
+
cc_map_el( cntr, i, bucket_size ),
|
2121
|
+
bucket_size,
|
2122
|
+
el_size,
|
2123
|
+
key_offset,
|
2124
|
+
key_size,
|
2125
|
+
probelen_offset,
|
2126
|
+
key_dtor,
|
2127
|
+
el_dtor
|
2128
|
+
);
|
2129
|
+
|
2130
|
+
return cc_dummy_true_ptr;
|
2131
|
+
}
|
2132
|
+
|
2133
|
+
i = ( i + 1 ) & ( cc_map_hdr( cntr )->cap - 1 );
|
2134
|
+
++probelen;
|
2135
|
+
}
|
2136
|
+
|
2137
|
+
return NULL;
|
2138
|
+
}
|
2139
|
+
|
2140
|
+
#define CC_MAP_ERASE( cntr, key ) \
|
2141
|
+
cc_map_erase( \
|
2142
|
+
cntr, \
|
2143
|
+
&CC_MAKE_LVAL_COPY( CC_KEY_TY( cntr ), key ), \
|
2144
|
+
CC_MAP_BUCKET_SIZE( cntr ), \
|
2145
|
+
CC_EL_SIZE( cntr ), \
|
2146
|
+
CC_MAP_KEY_OFFSET( cntr ), \
|
2147
|
+
CC_KEY_SIZE( cntr ), \
|
2148
|
+
CC_MAP_PROBELEN_OFFSET( cntr ), \
|
2149
|
+
CC_KEY_CMPR( cntr ), \
|
2150
|
+
CC_KEY_HASH( cntr ), \
|
2151
|
+
CC_KEY_DTOR( cntr ), \
|
2152
|
+
CC_EL_DTOR( cntr ) \
|
2153
|
+
) \
|
2154
|
+
|
2155
|
+
// Initializes a shallow copy of the source map.
|
2156
|
+
// The capacity of the copy is the same as the capacity of the source map, unless the source map is empty, in which case
|
2157
|
+
// the copy is a placeholder.
|
2158
|
+
// Hence, this function does no rehashing.
|
2159
|
+
// Returns a the pointer to the copy, or NULL in the case of allocation failure.
|
2160
|
+
// That return value is cast to bool in the corresponding macro.
|
2161
|
+
static inline void *cc_map_init_clone(
|
2162
|
+
void *src,
|
2163
|
+
size_t bucket_size,
|
2164
|
+
void *( *realloc_ )( void *, size_t )
|
2165
|
+
)
|
2166
|
+
{
|
2167
|
+
if( cc_map_size( src ) == 0 ) // Also handles placeholder.
|
2168
|
+
return (void *)&cc_map_placeholder;
|
2169
|
+
|
2170
|
+
cc_map_hdr_ty *new_cntr = (cc_map_hdr_ty*)realloc_( NULL, sizeof( cc_map_hdr_ty ) + bucket_size * cc_map_cap( src ) );
|
2171
|
+
if( !new_cntr )
|
2172
|
+
return NULL;
|
2173
|
+
|
2174
|
+
memcpy( new_cntr, src, sizeof( cc_map_hdr_ty ) + bucket_size * cc_map_cap( src ) );
|
2175
|
+
return new_cntr;
|
2176
|
+
}
|
2177
|
+
|
2178
|
+
#define CC_MAP_INIT_CLONE( cntr, src ) \
|
2179
|
+
( cntr = (CC_TYPEOF_XP( cntr ))cc_map_init_clone( src, CC_MAP_BUCKET_SIZE( cntr ), CC_GET_REALLOC ) ) \
|
2180
|
+
|
2181
|
+
// Shrinks map's capacity to the minimum possible without violating the max load factor associated with the key type.
|
2182
|
+
// If shrinking is necessary, then a complete rehash occurs.
|
2183
|
+
// Returns a cc_allocing_fn_result_ty containing the new container handle and a pointer that evaluates to true if the
|
2184
|
+
// operation was successful and false in the case of allocation failure.
|
2185
|
+
static inline cc_allocing_fn_result_ty cc_map_shrink(
|
2186
|
+
void *cntr,
|
2187
|
+
size_t bucket_size,
|
2188
|
+
size_t el_size,
|
2189
|
+
size_t key_offset,
|
2190
|
+
size_t key_size,
|
2191
|
+
size_t probelen_offset,
|
2192
|
+
int ( *cmpr )( void *, void * ),
|
2193
|
+
size_t ( *hash )( void * ),
|
2194
|
+
double max_load,
|
2195
|
+
void *( *realloc_ )( void *, size_t ),
|
2196
|
+
void ( *free_ )( void * )
|
2197
|
+
)
|
2198
|
+
{
|
2199
|
+
size_t cap = cc_map_min_cap_for_n_els( cc_map_size( cntr ), max_load );
|
2200
|
+
|
2201
|
+
if( cap == cc_map_cap( cntr ) ) // Shrink unnecessary.
|
2202
|
+
return cc_make_allocing_fn_result( cntr, cc_dummy_true_ptr );
|
2203
|
+
|
2204
|
+
if( cap == 0 ) // Restore placeholder.
|
2205
|
+
{
|
2206
|
+
if( !cc_map_is_placeholder( cntr ) )
|
2207
|
+
free_( cntr );
|
2208
|
+
|
2209
|
+
return cc_make_allocing_fn_result( (void *)&cc_map_placeholder, cc_dummy_true_ptr );
|
2210
|
+
}
|
2211
|
+
|
2212
|
+
void *new_cntr = cc_map_make_rehash(
|
2213
|
+
cntr,
|
2214
|
+
cap,
|
2215
|
+
bucket_size,
|
2216
|
+
el_size,
|
2217
|
+
key_offset,
|
2218
|
+
key_size,
|
2219
|
+
probelen_offset,
|
2220
|
+
cmpr,
|
2221
|
+
hash,
|
2222
|
+
realloc_
|
2223
|
+
);
|
2224
|
+
if( !new_cntr )
|
2225
|
+
return cc_make_allocing_fn_result( cntr, NULL );
|
2226
|
+
|
2227
|
+
if( !cc_map_is_placeholder( cntr ) )
|
2228
|
+
free_( cntr );
|
2229
|
+
|
2230
|
+
return cc_make_allocing_fn_result( new_cntr, cc_dummy_true_ptr );
|
2231
|
+
}
|
2232
|
+
|
2233
|
+
#define CC_MAP_SHRINK( cntr ) \
|
2234
|
+
( \
|
2235
|
+
CC_POINT_HNDL_TO_ALLOCING_FN_RESULT( \
|
2236
|
+
cntr, \
|
2237
|
+
cc_map_shrink( \
|
2238
|
+
cntr, \
|
2239
|
+
CC_MAP_BUCKET_SIZE( cntr ), \
|
2240
|
+
CC_EL_SIZE( cntr ), \
|
2241
|
+
CC_MAP_KEY_OFFSET( cntr ), \
|
2242
|
+
CC_KEY_SIZE( cntr ), \
|
2243
|
+
CC_MAP_PROBELEN_OFFSET( cntr ), \
|
2244
|
+
CC_KEY_CMPR( cntr ), \
|
2245
|
+
CC_KEY_HASH( cntr ), \
|
2246
|
+
CC_KEY_LOAD( cntr ), \
|
2247
|
+
CC_GET_REALLOC, \
|
2248
|
+
CC_GET_FREE \
|
2249
|
+
) \
|
2250
|
+
), \
|
2251
|
+
(bool)CC_FIX_HNDL_AND_RETURN_OTHER_PTR( cntr ) \
|
2252
|
+
) \
|
2253
|
+
|
2254
|
+
// Erases all elements, calling the destructors for the key and element types if necessary, without changing the map's
|
2255
|
+
// capacity.
|
2256
|
+
static inline void cc_map_clear(
|
2257
|
+
void *cntr,
|
2258
|
+
size_t bucket_size,
|
2259
|
+
size_t key_offset,
|
2260
|
+
size_t probelen_offset,
|
2261
|
+
void ( *key_dtor )( void * ),
|
2262
|
+
void ( *el_dtor )( void * )
|
2263
|
+
)
|
2264
|
+
{
|
2265
|
+
if( cc_map_size( cntr ) == 0 ) // Also handles placeholder map.
|
2266
|
+
return;
|
2267
|
+
|
2268
|
+
for( size_t i = 0; i < cc_map_hdr( cntr )->cap; ++i )
|
2269
|
+
if( *cc_map_probelen( cntr, i, bucket_size, probelen_offset ) )
|
2270
|
+
{
|
2271
|
+
if( key_dtor )
|
2272
|
+
key_dtor( cc_map_key( cntr, i, bucket_size, key_offset ) );
|
2273
|
+
|
2274
|
+
if( el_dtor )
|
2275
|
+
el_dtor( cc_map_el( cntr, i, bucket_size ) );
|
2276
|
+
|
2277
|
+
*cc_map_probelen( cntr, i, bucket_size, probelen_offset ) = 0;
|
2278
|
+
}
|
2279
|
+
|
2280
|
+
cc_map_hdr( cntr )->size = 0;
|
2281
|
+
}
|
2282
|
+
|
2283
|
+
#define CC_MAP_CLEAR( cntr ) \
|
2284
|
+
cc_map_clear( \
|
2285
|
+
cntr, \
|
2286
|
+
CC_MAP_BUCKET_SIZE( cntr ), \
|
2287
|
+
CC_MAP_KEY_OFFSET( cntr ), \
|
2288
|
+
CC_MAP_PROBELEN_OFFSET( cntr ), \
|
2289
|
+
CC_KEY_DTOR( cntr ), \
|
2290
|
+
CC_EL_DTOR( cntr ) \
|
2291
|
+
) \
|
2292
|
+
|
2293
|
+
// Clears the map and frees its memory if is not placeholder.
|
2294
|
+
static inline void cc_map_cleanup(
|
2295
|
+
void *cntr,
|
2296
|
+
size_t bucket_size,
|
2297
|
+
size_t key_offset,
|
2298
|
+
size_t probelen_offset,
|
2299
|
+
void (*key_dtor)( void * ),
|
2300
|
+
void (*el_dtor)( void * ),
|
2301
|
+
void (*free_)( void * )
|
2302
|
+
)
|
2303
|
+
{
|
2304
|
+
cc_map_clear( cntr, bucket_size, key_offset, probelen_offset, key_dtor, el_dtor );
|
2305
|
+
|
2306
|
+
if( !cc_map_is_placeholder( cntr ) )
|
2307
|
+
free_( cntr );
|
2308
|
+
}
|
2309
|
+
|
2310
|
+
#define CC_MAP_CLEANUP( cntr ) \
|
2311
|
+
( \
|
2312
|
+
cc_map_cleanup( \
|
2313
|
+
cntr, \
|
2314
|
+
CC_MAP_BUCKET_SIZE( cntr ), \
|
2315
|
+
CC_MAP_KEY_OFFSET( cntr ), \
|
2316
|
+
CC_MAP_PROBELEN_OFFSET( cntr ), \
|
2317
|
+
CC_KEY_DTOR( cntr ), \
|
2318
|
+
CC_EL_DTOR( cntr ), \
|
2319
|
+
CC_GET_FREE \
|
2320
|
+
), \
|
2321
|
+
CC_MAP_INIT( cntr ) \
|
2322
|
+
) \
|
2323
|
+
|
2324
|
+
// For maps, the container handle doubles up as r_end.
|
2325
|
+
static inline void *cc_map_r_end( void *cntr )
|
2326
|
+
{
|
2327
|
+
return cntr;
|
2328
|
+
}
|
2329
|
+
|
2330
|
+
// Returns a pointer-iterator to the end of the bucket array.
|
2331
|
+
static inline void *cc_map_end( void *cntr, size_t bucket_size )
|
2332
|
+
{
|
2333
|
+
return cc_map_el( cntr, cc_map_hdr( cntr )->cap, bucket_size );
|
2334
|
+
}
|
2335
|
+
|
2336
|
+
#define CC_MAP_END( cntr ) cc_map_end( cntr, CC_MAP_BUCKET_SIZE( cntr ) )
|
2337
|
+
|
2338
|
+
// Returns a pointer-iterator to the first element, or end if the map is empty.
|
2339
|
+
static inline void *cc_map_first( void *cntr, size_t bucket_size, size_t probelen_offset )
|
2340
|
+
{
|
2341
|
+
for( size_t i = 0; i < cc_map_hdr( cntr )->cap; ++i )
|
2342
|
+
if( *cc_map_probelen( cntr, i, bucket_size, probelen_offset ) )
|
2343
|
+
return cc_map_el( cntr, i, bucket_size );
|
2344
|
+
|
2345
|
+
return cc_map_end( cntr, bucket_size );
|
2346
|
+
}
|
2347
|
+
|
2348
|
+
#define CC_MAP_FIRST( cntr ) cc_map_first( cntr, CC_MAP_BUCKET_SIZE( cntr ), CC_MAP_PROBELEN_OFFSET( cntr ) )
|
2349
|
+
|
2350
|
+
// Returns a pointer-iterator to the last element, or r_end if the map is empty.
|
2351
|
+
static inline void *cc_map_last( void *cntr, size_t bucket_size, size_t probelen_offset )
|
2352
|
+
{
|
2353
|
+
for( size_t i = cc_map_hdr( cntr )->cap; i-- > 0; )
|
2354
|
+
if( *cc_map_probelen( cntr, i, bucket_size, probelen_offset ) )
|
2355
|
+
return cc_map_el( cntr, i, bucket_size );
|
2356
|
+
|
2357
|
+
return cc_map_r_end( cntr );
|
2358
|
+
}
|
2359
|
+
|
2360
|
+
#define CC_MAP_LAST( cntr ) cc_map_last( cntr, CC_MAP_BUCKET_SIZE( cntr ), CC_MAP_PROBELEN_OFFSET( cntr ) )
|
2361
|
+
|
2362
|
+
static inline void *cc_map_next( void *cntr, void *i, size_t bucket_size, size_t probelen_offset )
|
2363
|
+
{
|
2364
|
+
size_t j = ( (char *)i - (char *)cc_map_el( cntr, 0, bucket_size ) ) / bucket_size + 1;
|
2365
|
+
|
2366
|
+
while( j < cc_map_hdr( cntr )->cap && !*cc_map_probelen( cntr, j, bucket_size, probelen_offset ) )
|
2367
|
+
++j;
|
2368
|
+
|
2369
|
+
return cc_map_el( cntr, j, bucket_size );
|
2370
|
+
}
|
2371
|
+
|
2372
|
+
#define CC_MAP_NEXT( cntr, i ) cc_map_next( cntr, i, CC_MAP_BUCKET_SIZE( cntr ), CC_MAP_PROBELEN_OFFSET( cntr ) )
|
2373
|
+
|
2374
|
+
static inline void *cc_map_prev( void *cntr, void *i, size_t bucket_size, size_t probelen_offset )
|
2375
|
+
{
|
2376
|
+
size_t j = ( (char *)i - (char *)cc_map_el( cntr, 0, bucket_size ) ) / bucket_size;
|
2377
|
+
|
2378
|
+
while( true )
|
2379
|
+
{
|
2380
|
+
if( j == 0 )
|
2381
|
+
return cc_map_r_end( cntr );
|
2382
|
+
|
2383
|
+
--j;
|
2384
|
+
if( *cc_map_probelen( cntr, j, bucket_size, probelen_offset ) )
|
2385
|
+
return cc_map_el( cntr, j, bucket_size );
|
2386
|
+
}
|
2387
|
+
}
|
2388
|
+
|
2389
|
+
#define CC_MAP_PREV( cntr, i ) cc_map_prev( cntr, i, CC_MAP_BUCKET_SIZE( cntr ), CC_MAP_PROBELEN_OFFSET( cntr ) )
|
2390
|
+
|
2391
|
+
/*--------------------------------------------------------------------------------------------------------------------*/
|
2392
|
+
/* Set */
|
2393
|
+
/*--------------------------------------------------------------------------------------------------------------------*/
|
2394
|
+
|
2395
|
+
// Set is implemented as a map where the key and element are merged into one space in memory.
|
2396
|
+
// Hence, it reuses the functions for map, except that the key offset passed in is zero and the element size passed in
|
2397
|
+
// is also zero to avoid double-memcpying.
|
2398
|
+
// For simplicity's sake, the idea here is to deviate as little from the code for map as possible.
|
2399
|
+
// Note that for set, CC_EL_TYPE and CC_KEY_TYPE are synonymous (as are CC_KEY_SIZE and CC_EL_SIZE).
|
2400
|
+
|
2401
|
+
#define CC_SET_PROBELEN_OFFSET( cntr ) CC_ROUND_UP( CC_EL_SIZE( cntr ), alignof( cc_probelen_ty ) )
|
2402
|
+
|
2403
|
+
#define CC_SET_BUCKET_SIZE( cntr ) \
|
2404
|
+
CC_ROUND_UP( CC_SET_PROBELEN_OFFSET( cntr ) + sizeof( cc_probelen_ty ), alignof( CC_EL_TY( cntr ) ) ) \
|
2405
|
+
|
2406
|
+
#define CC_SET_INIT CC_MAP_INIT
|
2407
|
+
|
2408
|
+
#define CC_SET_SIZE cc_map_size
|
2409
|
+
|
2410
|
+
#define CC_SET_CAP cc_map_cap
|
2411
|
+
|
2412
|
+
#define CC_SET_RESERVE( cntr, n ) \
|
2413
|
+
( \
|
2414
|
+
CC_POINT_HNDL_TO_ALLOCING_FN_RESULT( \
|
2415
|
+
cntr, \
|
2416
|
+
cc_map_reserve( \
|
2417
|
+
cntr, \
|
2418
|
+
n, \
|
2419
|
+
CC_SET_BUCKET_SIZE( cntr ), \
|
2420
|
+
0, /* zero el size */ \
|
2421
|
+
0, /* zero key offset */ \
|
2422
|
+
CC_KEY_SIZE( cntr ), \
|
2423
|
+
CC_SET_PROBELEN_OFFSET( cntr ), \
|
2424
|
+
CC_KEY_CMPR( cntr ), \
|
2425
|
+
CC_KEY_HASH( cntr ), \
|
2426
|
+
CC_KEY_LOAD( cntr ), \
|
2427
|
+
CC_GET_REALLOC, \
|
2428
|
+
CC_GET_FREE \
|
2429
|
+
) \
|
2430
|
+
), \
|
2431
|
+
(bool)CC_FIX_HNDL_AND_RETURN_OTHER_PTR( cntr ) \
|
2432
|
+
) \
|
2433
|
+
|
2434
|
+
#define CC_SET_INSERT( cntr, el, replace ) \
|
2435
|
+
( \
|
2436
|
+
CC_POINT_HNDL_TO_ALLOCING_FN_RESULT( \
|
2437
|
+
cntr, \
|
2438
|
+
cc_map_insert( \
|
2439
|
+
cntr, \
|
2440
|
+
/* Copying el is a non-op, but we still \
|
2441
|
+
need to pass a valid pointer for memcpy. */ \
|
2442
|
+
cntr, \
|
2443
|
+
&CC_MAKE_LVAL_COPY( CC_KEY_TY( cntr ), el ), \
|
2444
|
+
replace, \
|
2445
|
+
CC_SET_BUCKET_SIZE( cntr ), \
|
2446
|
+
0, /* Zero el size. */ \
|
2447
|
+
0, /* Zero key offset. */ \
|
2448
|
+
CC_KEY_SIZE( cntr ), \
|
2449
|
+
CC_SET_PROBELEN_OFFSET( cntr ), \
|
2450
|
+
CC_KEY_CMPR( cntr ), \
|
2451
|
+
CC_KEY_HASH( cntr ), \
|
2452
|
+
CC_KEY_LOAD( cntr ), \
|
2453
|
+
CC_KEY_DTOR( cntr ), \
|
2454
|
+
NULL, /* Only need one dtor. */ \
|
2455
|
+
CC_GET_REALLOC, \
|
2456
|
+
CC_GET_FREE \
|
2457
|
+
) \
|
2458
|
+
), \
|
2459
|
+
CC_FIX_HNDL_AND_RETURN_OTHER_PTR( cntr ) \
|
2460
|
+
) \
|
2461
|
+
|
2462
|
+
#define CC_SET_GET( cntr, el ) \
|
2463
|
+
cc_map_get( \
|
2464
|
+
cntr, \
|
2465
|
+
&CC_MAKE_LVAL_COPY( CC_KEY_TY( cntr ), el ), \
|
2466
|
+
CC_SET_BUCKET_SIZE( cntr ), \
|
2467
|
+
0, /* Zero key offset. */ \
|
2468
|
+
CC_SET_PROBELEN_OFFSET( cntr ), \
|
2469
|
+
CC_KEY_CMPR( cntr ), \
|
2470
|
+
CC_KEY_HASH( cntr ) \
|
2471
|
+
) \
|
2472
|
+
|
2473
|
+
#define CC_SET_ERASE_ITR( cntr, i ) \
|
2474
|
+
cc_map_erase_itr( \
|
2475
|
+
cntr, \
|
2476
|
+
i, \
|
2477
|
+
CC_SET_BUCKET_SIZE( cntr ), \
|
2478
|
+
0, /* Zero el size. */ \
|
2479
|
+
0, /* Zero key offset. */ \
|
2480
|
+
CC_KEY_SIZE( cntr ), \
|
2481
|
+
CC_SET_PROBELEN_OFFSET( cntr ), \
|
2482
|
+
CC_KEY_DTOR( cntr ), \
|
2483
|
+
NULL /* Only need one dtor */ \
|
2484
|
+
) \
|
2485
|
+
|
2486
|
+
#define CC_SET_ERASE( cntr, key ) \
|
2487
|
+
cc_map_erase( \
|
2488
|
+
cntr, \
|
2489
|
+
&CC_MAKE_LVAL_COPY( CC_KEY_TY( cntr ), key ), \
|
2490
|
+
CC_SET_BUCKET_SIZE( cntr ), \
|
2491
|
+
0, /* Zero el size. */ \
|
2492
|
+
0, /* Zero key offset. */ \
|
2493
|
+
CC_KEY_SIZE( cntr ), \
|
2494
|
+
CC_SET_PROBELEN_OFFSET( cntr ), \
|
2495
|
+
CC_KEY_CMPR( cntr ), \
|
2496
|
+
CC_KEY_HASH( cntr ), \
|
2497
|
+
CC_KEY_DTOR( cntr ), \
|
2498
|
+
NULL /* Only need one dtor. */ \
|
2499
|
+
) \
|
2500
|
+
|
2501
|
+
#define CC_SET_INIT_CLONE( cntr, src ) \
|
2502
|
+
( cntr = (CC_TYPEOF_XP( cntr ))cc_map_init_clone( src, CC_SET_BUCKET_SIZE( cntr ), CC_GET_REALLOC ) ) \
|
2503
|
+
|
2504
|
+
#define CC_SET_SHRINK( cntr ) \
|
2505
|
+
( \
|
2506
|
+
CC_POINT_HNDL_TO_ALLOCING_FN_RESULT( \
|
2507
|
+
cntr, \
|
2508
|
+
cc_map_shrink( \
|
2509
|
+
cntr, \
|
2510
|
+
CC_SET_BUCKET_SIZE( cntr ), \
|
2511
|
+
0, /* Zero el size. */ \
|
2512
|
+
0, /* Zero key offset. */ \
|
2513
|
+
CC_KEY_SIZE( cntr ), \
|
2514
|
+
CC_SET_PROBELEN_OFFSET( cntr ), \
|
2515
|
+
CC_KEY_CMPR( cntr ), \
|
2516
|
+
CC_KEY_HASH( cntr ), \
|
2517
|
+
CC_KEY_LOAD( cntr ), \
|
2518
|
+
CC_GET_REALLOC, \
|
2519
|
+
CC_GET_FREE \
|
2520
|
+
) \
|
2521
|
+
), \
|
2522
|
+
(bool)CC_FIX_HNDL_AND_RETURN_OTHER_PTR( cntr ) \
|
2523
|
+
) \
|
2524
|
+
|
2525
|
+
#define CC_SET_CLEAR( cntr ) \
|
2526
|
+
cc_map_clear( \
|
2527
|
+
cntr, \
|
2528
|
+
CC_SET_BUCKET_SIZE( cntr ), \
|
2529
|
+
0, /* Zero key offset. */ \
|
2530
|
+
CC_SET_PROBELEN_OFFSET( cntr ), \
|
2531
|
+
CC_KEY_DTOR( cntr ), \
|
2532
|
+
NULL /* Only need one dtor. */ \
|
2533
|
+
) \
|
2534
|
+
|
2535
|
+
#define CC_SET_CLEANUP( cntr ) \
|
2536
|
+
( \
|
2537
|
+
cc_map_cleanup( \
|
2538
|
+
cntr, \
|
2539
|
+
CC_SET_BUCKET_SIZE( cntr ), \
|
2540
|
+
0, /* Zero key offset. */ \
|
2541
|
+
CC_SET_PROBELEN_OFFSET( cntr ), \
|
2542
|
+
CC_KEY_DTOR( cntr ), \
|
2543
|
+
NULL, /* Only need one dtor. */ \
|
2544
|
+
CC_GET_FREE \
|
2545
|
+
), \
|
2546
|
+
CC_SET_INIT( cntr ) \
|
2547
|
+
) \
|
2548
|
+
|
2549
|
+
#define CC_SET_R_END cc_map_r_end
|
2550
|
+
|
2551
|
+
#define CC_SET_END( cntr ) cc_map_end( cntr, CC_SET_BUCKET_SIZE( cntr ) )
|
2552
|
+
|
2553
|
+
#define CC_SET_FIRST( cntr ) cc_map_first( cntr, CC_SET_BUCKET_SIZE( cntr ), CC_SET_PROBELEN_OFFSET( cntr ) )
|
2554
|
+
|
2555
|
+
#define CC_SET_LAST( cntr ) cc_map_last( cntr, CC_SET_BUCKET_SIZE( cntr ), CC_SET_PROBELEN_OFFSET( cntr ) )
|
2556
|
+
|
2557
|
+
#define CC_SET_NEXT( cntr, i ) cc_map_next( cntr, i, CC_SET_BUCKET_SIZE( cntr ), CC_SET_PROBELEN_OFFSET( cntr ) )
|
2558
|
+
|
2559
|
+
#define CC_SET_PREV( cntr, i ) cc_map_prev( cntr, i, CC_SET_BUCKET_SIZE( cntr ), CC_SET_PROBELEN_OFFSET( cntr ) )
|
2560
|
+
|
2561
|
+
/*--------------------------------------------------------------------------------------------------------------------*/
|
2562
|
+
/* API */
|
2563
|
+
/*--------------------------------------------------------------------------------------------------------------------*/
|
2564
|
+
|
2565
|
+
// Generally, API macros need to do several things:
|
2566
|
+
// - For GCC and Clang, check the first argument - the pointer to the container handle - for potential duplicate side
|
2567
|
+
// effects and generate a compile-time warning if they exist.
|
2568
|
+
// Although this is only strictly necessary for macros that may cause reallocation operating on containers that store
|
2569
|
+
// their headers in the same block of memory as their elements, we extend this check to all API macros for the sake of
|
2570
|
+
// consistency.
|
2571
|
+
// - Check that the aforementioned pointer points to a container handle compatible with the macro.
|
2572
|
+
// - Convert the aforementioned pointer to the actual container handle (i.e. derference it).
|
2573
|
+
// While the internal macros take the container handle directly, API macros take a pointer to the handle so that it's
|
2574
|
+
// clear to the user that they may modify it.
|
2575
|
+
// - Call the correct container-specific function or macro.
|
2576
|
+
// - Provide dummy values as arguments to the non-called container-specific functions or macros to avoid compiler
|
2577
|
+
// errors.
|
2578
|
+
// - Cast the result of the container-specific function or macro, if it is a void pointer, to the correct pointer type
|
2579
|
+
// (usually the container's element type).
|
2580
|
+
// - For container-specific functions or macros that return a void pointer that actually denotes whether the operation
|
2581
|
+
// was successful, convert it to bool in the case of those containers.
|
2582
|
+
// Although these macros are long and ramose, the compiler should optimize away all the checks and irrelevant paths.
|
2583
|
+
|
2584
|
+
#define cc_init( cntr ) \
|
2585
|
+
( \
|
2586
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2587
|
+
CC_STATIC_ASSERT( \
|
2588
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC || \
|
2589
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST || \
|
2590
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP || \
|
2591
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET \
|
2592
|
+
), \
|
2593
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC ? CC_VEC_INIT( *(cntr) ) : \
|
2594
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST ? CC_LIST_INIT( *(cntr) ) : \
|
2595
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? CC_MAP_INIT( *(cntr) ) : \
|
2596
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? CC_SET_INIT( *(cntr) ) : \
|
2597
|
+
(void)0 /* Unreachable. */ \
|
2598
|
+
) \
|
2599
|
+
|
2600
|
+
#define cc_init_clone( cntr, src ) \
|
2601
|
+
CC_CAST_MAYBE_UNUSED( bool, ( \
|
2602
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2603
|
+
CC_STATIC_ASSERT( CC_IS_SAME_TY( *(cntr), *(src) ) ), \
|
2604
|
+
CC_STATIC_ASSERT( \
|
2605
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC || \
|
2606
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST || \
|
2607
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP || \
|
2608
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET \
|
2609
|
+
), \
|
2610
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC ? CC_VEC_INIT_CLONE( *(cntr), *(src) ) : \
|
2611
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST ? CC_LIST_INIT_CLONE( *(cntr), *(src) ) : \
|
2612
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? CC_MAP_INIT_CLONE( *(cntr), *(src) ) : \
|
2613
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? CC_SET_INIT_CLONE( *(cntr), *(src) ) : \
|
2614
|
+
NULL /* Unreachable. */ \
|
2615
|
+
) ) \
|
2616
|
+
|
2617
|
+
|
2618
|
+
#define cc_size( cntr ) \
|
2619
|
+
( \
|
2620
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2621
|
+
CC_STATIC_ASSERT( \
|
2622
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC || \
|
2623
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST || \
|
2624
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP || \
|
2625
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET \
|
2626
|
+
), \
|
2627
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC ? cc_vec_size( *(cntr) ) : \
|
2628
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST ? cc_list_size( *(cntr) ) : \
|
2629
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? cc_map_size( *(cntr) ) : \
|
2630
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? CC_SET_SIZE( *(cntr) ) : \
|
2631
|
+
0 /* Unreachable. */ \
|
2632
|
+
) \
|
2633
|
+
|
2634
|
+
#define cc_cap( cntr ) \
|
2635
|
+
( \
|
2636
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2637
|
+
CC_STATIC_ASSERT( \
|
2638
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC || \
|
2639
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP || \
|
2640
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET \
|
2641
|
+
), \
|
2642
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC ? cc_vec_cap( *(cntr) ) : \
|
2643
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? cc_map_cap( *(cntr) ) : \
|
2644
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? CC_SET_CAP( *(cntr) ) : \
|
2645
|
+
0 /* Unreachable. */ \
|
2646
|
+
) \
|
2647
|
+
|
2648
|
+
#define cc_reserve( cntr, n ) \
|
2649
|
+
CC_CAST_MAYBE_UNUSED( bool, ( \
|
2650
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2651
|
+
CC_STATIC_ASSERT( \
|
2652
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC || \
|
2653
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP || \
|
2654
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET \
|
2655
|
+
), \
|
2656
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC ? CC_VEC_RESERVE( *(cntr), (n) ) : \
|
2657
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? CC_MAP_RESERVE( *(cntr), (n) ) : \
|
2658
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? CC_SET_RESERVE( *(cntr), (n) ) : \
|
2659
|
+
false /* Unreachable. */ \
|
2660
|
+
) ) \
|
2661
|
+
|
2662
|
+
#define cc_resize( cntr, n ) \
|
2663
|
+
CC_CAST_MAYBE_UNUSED( bool, ( \
|
2664
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2665
|
+
CC_STATIC_ASSERT( CC_CNTR_ID( *(cntr) ) == CC_VEC ), \
|
2666
|
+
CC_VEC_RESIZE( *(cntr), (n) ) \
|
2667
|
+
) ) \
|
2668
|
+
|
2669
|
+
#define cc_shrink( cntr ) \
|
2670
|
+
CC_CAST_MAYBE_UNUSED( bool, ( \
|
2671
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2672
|
+
CC_STATIC_ASSERT( \
|
2673
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC || \
|
2674
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP || \
|
2675
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET \
|
2676
|
+
), \
|
2677
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC ? CC_VEC_SHRINK( *(cntr) ) : \
|
2678
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? CC_MAP_SHRINK( *(cntr) ) : \
|
2679
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? CC_SET_SHRINK( *(cntr) ) : \
|
2680
|
+
false /* Unreachable. */ \
|
2681
|
+
) ) \
|
2682
|
+
|
2683
|
+
#define cc_insert( ... ) CC_SELECT_ON_NUM_ARGS( cc_insert, __VA_ARGS__ )
|
2684
|
+
|
2685
|
+
#define cc_insert_2( cntr, el ) \
|
2686
|
+
CC_CAST_MAYBE_UNUSED( CC_EL_TY( *(cntr) ) *, ( \
|
2687
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2688
|
+
CC_STATIC_ASSERT( CC_CNTR_ID( *(cntr) ) == CC_SET ), \
|
2689
|
+
CC_SET_INSERT( *(cntr), (el), true ) \
|
2690
|
+
) ) \
|
2691
|
+
|
2692
|
+
#define cc_insert_3( cntr, i, el ) \
|
2693
|
+
CC_CAST_MAYBE_UNUSED( CC_EL_TY( *(cntr) ) *, ( \
|
2694
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2695
|
+
CC_STATIC_ASSERT( \
|
2696
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC || \
|
2697
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST || \
|
2698
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP \
|
2699
|
+
), \
|
2700
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC ? \
|
2701
|
+
CC_VEC_INSERT( *(cntr), CC_IF_THEN_XP_ELSE_DUMMY( CC_CNTR_ID( *(cntr) ) == CC_VEC, (i), size_t ), (el) ) : \
|
2702
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST ? \
|
2703
|
+
CC_LIST_INSERT( \
|
2704
|
+
*(cntr), \
|
2705
|
+
CC_IF_THEN_XP_ELSE_DUMMY( CC_CNTR_ID( *(cntr) ) == CC_LIST, (i), CC_EL_TY( *(cntr) )* ), \
|
2706
|
+
(el) \
|
2707
|
+
) : \
|
2708
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? \
|
2709
|
+
CC_MAP_INSERT( \
|
2710
|
+
*(cntr), \
|
2711
|
+
CC_IF_THEN_XP_ELSE_DUMMY( CC_CNTR_ID( *(cntr) ) == CC_MAP, (i), CC_KEY_TY( *(cntr) ) ), \
|
2712
|
+
(el), \
|
2713
|
+
true \
|
2714
|
+
) : \
|
2715
|
+
NULL /* Unreachable. */ \
|
2716
|
+
) ) \
|
2717
|
+
|
2718
|
+
#define cc_insert_n( cntr, i, els, n ) \
|
2719
|
+
CC_CAST_MAYBE_UNUSED( CC_EL_TY( *(cntr) ) *, ( \
|
2720
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2721
|
+
CC_STATIC_ASSERT( CC_CNTR_ID( *(cntr) ) == CC_VEC ), \
|
2722
|
+
CC_VEC_INSERT_N( *(cntr), (i), (els), (n) ) \
|
2723
|
+
) ) \
|
2724
|
+
|
2725
|
+
#define cc_push( cntr, el ) \
|
2726
|
+
CC_CAST_MAYBE_UNUSED( CC_EL_TY( *(cntr) ) *, ( \
|
2727
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2728
|
+
CC_STATIC_ASSERT( \
|
2729
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC || \
|
2730
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST \
|
2731
|
+
), \
|
2732
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC ? CC_VEC_PUSH( *(cntr), (el) ) : \
|
2733
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST ? CC_LIST_PUSH( *(cntr), (el) ) : \
|
2734
|
+
NULL /* Unreachable. */ \
|
2735
|
+
) ) \
|
2736
|
+
|
2737
|
+
#define cc_push_n( cntr, els, n ) \
|
2738
|
+
CC_CAST_MAYBE_UNUSED( CC_EL_TY( *(cntr) ) *, ( \
|
2739
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2740
|
+
CC_STATIC_ASSERT( CC_CNTR_ID( *(cntr) ) == CC_VEC ), \
|
2741
|
+
CC_VEC_PUSH_N( *(cntr), (els), (n) ) \
|
2742
|
+
) ) \
|
2743
|
+
|
2744
|
+
#define cc_splice( cntr, i, src, src_i ) \
|
2745
|
+
CC_CAST_MAYBE_UNUSED( bool, ( \
|
2746
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2747
|
+
CC_STATIC_ASSERT( CC_IS_SAME_TY( (cntr), (src) ) ), \
|
2748
|
+
CC_STATIC_ASSERT( CC_CNTR_ID( *(cntr) ) == CC_LIST ), \
|
2749
|
+
CC_LIST_SPLICE( *(cntr), (i), *(src), (src_i) ) \
|
2750
|
+
) ) \
|
2751
|
+
|
2752
|
+
#define cc_get( cntr, i ) \
|
2753
|
+
CC_CAST_MAYBE_UNUSED( CC_EL_TY( *(cntr) ) *, ( \
|
2754
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2755
|
+
CC_STATIC_ASSERT( \
|
2756
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC || \
|
2757
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP || \
|
2758
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET \
|
2759
|
+
), \
|
2760
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC ? \
|
2761
|
+
CC_VEC_GET( *(cntr), CC_IF_THEN_XP_ELSE_DUMMY( CC_CNTR_ID( *(cntr) ) == CC_VEC, (i), size_t ) ) : \
|
2762
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? \
|
2763
|
+
CC_MAP_GET( *(cntr), CC_IF_THEN_XP_ELSE_DUMMY( CC_CNTR_ID( *(cntr) ) == CC_MAP, (i), CC_KEY_TY( *(cntr) ) ) ) : \
|
2764
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? \
|
2765
|
+
CC_SET_GET( *(cntr), CC_IF_THEN_XP_ELSE_DUMMY( CC_CNTR_ID( *(cntr) ) == CC_SET, (i), CC_KEY_TY( *(cntr) ) ) ) : \
|
2766
|
+
NULL /* Unreachable. */ \
|
2767
|
+
) ) \
|
2768
|
+
|
2769
|
+
#define cc_get_or_insert( ... ) CC_SELECT_ON_NUM_ARGS( cc_get_or_insert, __VA_ARGS__ )
|
2770
|
+
|
2771
|
+
#define cc_get_or_insert_2( cntr, el ) \
|
2772
|
+
CC_CAST_MAYBE_UNUSED( CC_EL_TY( *(cntr) ) *, ( \
|
2773
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2774
|
+
CC_STATIC_ASSERT( CC_CNTR_ID( *(cntr) ) == CC_SET ), \
|
2775
|
+
CC_SET_INSERT( *(cntr), (el), false ) \
|
2776
|
+
) ) \
|
2777
|
+
|
2778
|
+
#define cc_get_or_insert_3( cntr, key, el ) \
|
2779
|
+
CC_CAST_MAYBE_UNUSED( CC_EL_TY( *(cntr) ) *, ( \
|
2780
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2781
|
+
CC_STATIC_ASSERT( CC_CNTR_ID( *(cntr) ) == CC_MAP ), \
|
2782
|
+
CC_MAP_INSERT( *(cntr), (key), (el), false ) \
|
2783
|
+
) ) \
|
2784
|
+
|
2785
|
+
#define cc_key_for( cntr, i ) \
|
2786
|
+
CC_CAST_MAYBE_UNUSED( const CC_KEY_TY( *(cntr) ) *, ( \
|
2787
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2788
|
+
CC_STATIC_ASSERT( CC_CNTR_ID( *(cntr) ) == CC_MAP ), \
|
2789
|
+
CC_MAP_KEY_FOR( *(cntr), (i) ) \
|
2790
|
+
) ) \
|
2791
|
+
|
2792
|
+
#define cc_erase( cntr, i ) \
|
2793
|
+
CC_IF_THEN_PTR_TO_BOOL_ELSE_PTR( CC_CNTR_ID( *(cntr) ) == CC_MAP || CC_CNTR_ID( *(cntr) ) == CC_SET, \
|
2794
|
+
CC_CAST_MAYBE_UNUSED( CC_EL_TY( *(cntr) ) *, ( \
|
2795
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2796
|
+
CC_STATIC_ASSERT( \
|
2797
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC || \
|
2798
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST || \
|
2799
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP || \
|
2800
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET \
|
2801
|
+
), \
|
2802
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC ? \
|
2803
|
+
CC_VEC_ERASE( *(cntr), CC_IF_THEN_XP_ELSE_DUMMY( CC_CNTR_ID( *(cntr) ) == CC_VEC, (i), size_t ) ) : \
|
2804
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST ? \
|
2805
|
+
CC_LIST_ERASE( \
|
2806
|
+
*(cntr), \
|
2807
|
+
CC_IF_THEN_XP_ELSE_DUMMY( CC_CNTR_ID( *(cntr) ) == CC_LIST, (i), CC_EL_TY( *(cntr) ) * ) \
|
2808
|
+
) : \
|
2809
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? \
|
2810
|
+
CC_MAP_ERASE( *(cntr), CC_IF_THEN_XP_ELSE_DUMMY( CC_CNTR_ID( *(cntr) ) == CC_MAP, (i), CC_KEY_TY( *(cntr) ) ) ) : \
|
2811
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? \
|
2812
|
+
CC_SET_ERASE( *(cntr), CC_IF_THEN_XP_ELSE_DUMMY( CC_CNTR_ID( *(cntr) ) == CC_SET, (i), CC_KEY_TY( *(cntr) ) ) ) : \
|
2813
|
+
NULL /* Unreachable. */ \
|
2814
|
+
) ) ) \
|
2815
|
+
|
2816
|
+
#define cc_erase_n( cntr, i, n ) \
|
2817
|
+
CC_CAST_MAYBE_UNUSED( CC_EL_TY( *(cntr) ) *, ( \
|
2818
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2819
|
+
CC_STATIC_ASSERT( CC_CNTR_ID( *(cntr) ) == CC_VEC ), \
|
2820
|
+
CC_VEC_ERASE_N( *(cntr), (i), (n) ) \
|
2821
|
+
) ) \
|
2822
|
+
|
2823
|
+
#define cc_erase_itr( cntr, i ) \
|
2824
|
+
( \
|
2825
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2826
|
+
CC_STATIC_ASSERT( \
|
2827
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP || \
|
2828
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET \
|
2829
|
+
), \
|
2830
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? CC_MAP_ERASE_ITR( *(cntr), (i) ) : \
|
2831
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? CC_SET_ERASE_ITR( *(cntr), (i) ) : \
|
2832
|
+
(void)0 /* Unreachable. */ \
|
2833
|
+
) \
|
2834
|
+
|
2835
|
+
#define cc_clear( cntr ) \
|
2836
|
+
( \
|
2837
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2838
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC ? CC_VEC_CLEAR( *(cntr) ) : \
|
2839
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST ? CC_LIST_CLEAR( *(cntr) ) : \
|
2840
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? CC_MAP_CLEAR( *(cntr) ) : \
|
2841
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? CC_SET_CLEAR( *(cntr) ) : \
|
2842
|
+
(void)0 /* Unreachable. */ \
|
2843
|
+
) \
|
2844
|
+
|
2845
|
+
#define cc_cleanup( cntr ) \
|
2846
|
+
( \
|
2847
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2848
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC ? CC_VEC_CLEANUP( *(cntr) ) : \
|
2849
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST ? CC_LIST_CLEANUP( *(cntr) ) : \
|
2850
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? CC_MAP_CLEANUP( *(cntr) ) : \
|
2851
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? CC_SET_CLEANUP( *(cntr) ) : \
|
2852
|
+
(void)0 /* Unreachable. */ \
|
2853
|
+
) \
|
2854
|
+
|
2855
|
+
#define cc_first( cntr ) \
|
2856
|
+
CC_CAST_MAYBE_UNUSED( CC_EL_TY( *(cntr) ) *, ( \
|
2857
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2858
|
+
CC_STATIC_ASSERT( \
|
2859
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC || \
|
2860
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST || \
|
2861
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP || \
|
2862
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET \
|
2863
|
+
), \
|
2864
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC ? cc_vec_first( *(cntr) ) : \
|
2865
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST ? cc_list_first( *(cntr) ) : \
|
2866
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? CC_MAP_FIRST( *(cntr) ) : \
|
2867
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? CC_SET_FIRST( *(cntr) ) : \
|
2868
|
+
NULL /* Unreachable. */ \
|
2869
|
+
) ) \
|
2870
|
+
|
2871
|
+
#define cc_last( cntr ) \
|
2872
|
+
CC_CAST_MAYBE_UNUSED( CC_EL_TY( *(cntr) ) *, ( \
|
2873
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2874
|
+
CC_STATIC_ASSERT( \
|
2875
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC || \
|
2876
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST || \
|
2877
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP || \
|
2878
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET \
|
2879
|
+
), \
|
2880
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC ? CC_VEC_LAST( *(cntr) ) : \
|
2881
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST ? cc_list_last( *(cntr) ) : \
|
2882
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? CC_MAP_LAST( *(cntr) ) : \
|
2883
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? CC_SET_LAST( *(cntr) ) : \
|
2884
|
+
NULL /* Unreachable. */ \
|
2885
|
+
) ) \
|
2886
|
+
|
2887
|
+
#define cc_r_end( cntr ) \
|
2888
|
+
CC_CAST_MAYBE_UNUSED( CC_EL_TY( *(cntr) ) *, ( \
|
2889
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2890
|
+
CC_STATIC_ASSERT( \
|
2891
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST || \
|
2892
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP || \
|
2893
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET \
|
2894
|
+
), \
|
2895
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST ? cc_list_r_end( *(cntr) ) : \
|
2896
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? cc_map_r_end( *(cntr) ) : \
|
2897
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? CC_SET_R_END( *(cntr) ) : \
|
2898
|
+
NULL /* Unreachable. */ \
|
2899
|
+
) ) \
|
2900
|
+
|
2901
|
+
#define cc_end( cntr ) \
|
2902
|
+
CC_CAST_MAYBE_UNUSED( CC_EL_TY( *(cntr) ) *, ( \
|
2903
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2904
|
+
CC_STATIC_ASSERT( \
|
2905
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC || \
|
2906
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST || \
|
2907
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP || \
|
2908
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET \
|
2909
|
+
), \
|
2910
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC ? CC_VEC_END( *(cntr) ) : \
|
2911
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST ? cc_list_end( *(cntr) ) : \
|
2912
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? CC_MAP_END( *(cntr) ) : \
|
2913
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? CC_SET_END( *(cntr) ) : \
|
2914
|
+
NULL /* Unreachable. */ \
|
2915
|
+
) ) \
|
2916
|
+
|
2917
|
+
#define cc_next( cntr, i ) \
|
2918
|
+
CC_CAST_MAYBE_UNUSED( CC_EL_TY( *(cntr) ) *, ( \
|
2919
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2920
|
+
CC_STATIC_ASSERT( \
|
2921
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC || \
|
2922
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST || \
|
2923
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP || \
|
2924
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET \
|
2925
|
+
), \
|
2926
|
+
CC_CNTR_ID( *(cntr) ) == CC_VEC ? CC_VEC_NEXT( *(cntr), (i) ) : \
|
2927
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST ? cc_list_next( *(cntr), (i) ) : \
|
2928
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? CC_MAP_NEXT( *(cntr), (i) ) : \
|
2929
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? CC_SET_NEXT( *(cntr), (i) ) : \
|
2930
|
+
NULL /* Unreachable. */ \
|
2931
|
+
) ) \
|
2932
|
+
|
2933
|
+
#define cc_prev( cntr, i ) \
|
2934
|
+
CC_CAST_MAYBE_UNUSED( CC_EL_TY( *(cntr) ) *, ( \
|
2935
|
+
CC_WARN_DUPLICATE_SIDE_EFFECTS( cntr ), \
|
2936
|
+
CC_STATIC_ASSERT( \
|
2937
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST || \
|
2938
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP || \
|
2939
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET \
|
2940
|
+
), \
|
2941
|
+
CC_CNTR_ID( *(cntr) ) == CC_LIST ? cc_list_prev( *(cntr), (i) ) : \
|
2942
|
+
CC_CNTR_ID( *(cntr) ) == CC_MAP ? CC_MAP_PREV( *(cntr), (i) ) : \
|
2943
|
+
CC_CNTR_ID( *(cntr) ) == CC_SET ? CC_SET_PREV( *(cntr), (i) ) : \
|
2944
|
+
NULL /* Unreachable. */ \
|
2945
|
+
) ) \
|
2946
|
+
|
2947
|
+
#define cc_for_each( ... ) CC_SELECT_ON_NUM_ARGS( cc_for_each, __VA_ARGS__ )
|
2948
|
+
|
2949
|
+
#define cc_for_each_2( cntr, i_name ) \
|
2950
|
+
for( CC_EL_TY( *(cntr) ) *i_name = cc_first( cntr ); i_name != cc_end( cntr ); i_name = cc_next( (cntr), i_name ) ) \
|
2951
|
+
|
2952
|
+
#define cc_for_each_3( cntr, key_ptr_name, i_name ) \
|
2953
|
+
for( CC_EL_TY( *(cntr) ) *i_name = cc_first( cntr ); i_name != cc_end( cntr ); i_name = cc_next( (cntr), i_name ) ) \
|
2954
|
+
for( const CC_KEY_TY( *(cntr) ) *key_ptr_name = cc_key_for( (cntr), i_name ); key_ptr_name; key_ptr_name = NULL ) \
|
2955
|
+
|
2956
|
+
#define cc_r_for_each( ... ) CC_SELECT_ON_NUM_ARGS( cc_r_for_each, __VA_ARGS__ )
|
2957
|
+
|
2958
|
+
#define cc_r_for_each_2( cntr, i_name ) \
|
2959
|
+
for( CC_EL_TY( *(cntr) ) *i_name = cc_last( cntr ); i_name != cc_r_end( cntr ); i_name = cc_prev( (cntr), i_name ) ) \
|
2960
|
+
|
2961
|
+
#define cc_r_for_each_3( cntr, key_ptr_name, i_name ) \
|
2962
|
+
for( CC_EL_TY( *(cntr) ) *i_name = cc_last( cntr ); i_name != cc_r_end( cntr ); i_name = cc_prev( (cntr), i_name ) ) \
|
2963
|
+
for( const CC_KEY_TY( *(cntr) ) *key_ptr_name = cc_key_for( (cntr), i ); key_ptr_name; key_ptr_name = NULL ) \
|
2964
|
+
|
2965
|
+
/*--------------------------------------------------------------------------------------------------------------------*/
|
2966
|
+
/* Destructor, comparison, and hash functions and custom load factors */
|
2967
|
+
/*--------------------------------------------------------------------------------------------------------------------*/
|
2968
|
+
|
2969
|
+
// Octal counters that support up to 511 of each function type and 511 load factors.
|
2970
|
+
#define CC_N_DTORS_D1 0 // D1 = digit 1, i.e. least significant digit.
|
2971
|
+
#define CC_N_DTORS_D2 0
|
2972
|
+
#define CC_N_DTORS_D3 0
|
2973
|
+
#define CC_N_CMPRS_D1 0
|
2974
|
+
#define CC_N_CMPRS_D2 0
|
2975
|
+
#define CC_N_CMPRS_D3 0
|
2976
|
+
#define CC_N_HASHS_D1 0
|
2977
|
+
#define CC_N_HASHS_D2 0
|
2978
|
+
#define CC_N_HASHS_D3 0
|
2979
|
+
#define CC_N_LOADS_D1 0
|
2980
|
+
#define CC_N_LOADS_D2 0
|
2981
|
+
#define CC_N_LOADS_D3 0
|
2982
|
+
|
2983
|
+
#define CC_CAT_3_( a, b, c ) a##b##c
|
2984
|
+
#define CC_CAT_3( a, b, c ) CC_CAT_3_( a, b, c )
|
2985
|
+
#define CC_CAT_4_( a, b, c, d ) a##b##c##d
|
2986
|
+
#define CC_CAT_4( a, b, c, d ) CC_CAT_4_( a, b, c, d )
|
2987
|
+
|
2988
|
+
// Macros that provide the current value of each counter as a three-digit octal number preceded by 0.
|
2989
|
+
// These numbers are used to form unique type and function names to plug into CC_EL_DTOR, CC_KEY_DTOR, CC_KEY_CMPR, etc.
|
2990
|
+
#define CC_N_DTORS CC_CAT_4( 0, CC_N_DTORS_D3, CC_N_DTORS_D2, CC_N_DTORS_D1 )
|
2991
|
+
#define CC_N_CMPRS CC_CAT_4( 0, CC_N_CMPRS_D3, CC_N_CMPRS_D2, CC_N_CMPRS_D1 )
|
2992
|
+
#define CC_N_HASHS CC_CAT_4( 0, CC_N_HASHS_D3, CC_N_HASHS_D2, CC_N_HASHS_D1 )
|
2993
|
+
#define CC_N_LOADS CC_CAT_4( 0, CC_N_LOADS_D3, CC_N_LOADS_D2, CC_N_LOADS_D1 )
|
2994
|
+
|
2995
|
+
// CC_FOR_EACH_XXX macros that call macro m with the first argument n, where n = [0, counter XXX ),
|
2996
|
+
// and the second argument arg.
|
2997
|
+
// These are used to construct _Generic or ternary expressions that select the correct function (or load factor) for a
|
2998
|
+
// container's key or element type (see below).
|
2999
|
+
|
3000
|
+
#define CC_CALL_MACRO( m, arg, n ) m( n, arg )
|
3001
|
+
#define CC_R1_0( m, arg, d3, d2 )
|
3002
|
+
#define CC_R1_1( m, arg, d3, d2 ) CC_CALL_MACRO( m, arg, CC_CAT_4( 0, d3, d2, 0 ) )
|
3003
|
+
#define CC_R1_2( m, arg, d3, d2 ) CC_CALL_MACRO( m, arg, CC_CAT_4( 0, d3, d2, 1 ) ) CC_R1_1( m, arg, d3, d2 )
|
3004
|
+
#define CC_R1_3( m, arg, d3, d2 ) CC_CALL_MACRO( m, arg, CC_CAT_4( 0, d3, d2, 2 ) ) CC_R1_2( m, arg, d3, d2 )
|
3005
|
+
#define CC_R1_4( m, arg, d3, d2 ) CC_CALL_MACRO( m, arg, CC_CAT_4( 0, d3, d2, 3 ) ) CC_R1_3( m, arg, d3, d2 )
|
3006
|
+
#define CC_R1_5( m, arg, d3, d2 ) CC_CALL_MACRO( m, arg, CC_CAT_4( 0, d3, d2, 4 ) ) CC_R1_4( m, arg, d3, d2 )
|
3007
|
+
#define CC_R1_6( m, arg, d3, d2 ) CC_CALL_MACRO( m, arg, CC_CAT_4( 0, d3, d2, 5 ) ) CC_R1_5( m, arg, d3, d2 )
|
3008
|
+
#define CC_R1_7( m, arg, d3, d2 ) CC_CALL_MACRO( m, arg, CC_CAT_4( 0, d3, d2, 6 ) ) CC_R1_6( m, arg, d3, d2 )
|
3009
|
+
#define CC_R1_8( m, arg, d3, d2 ) CC_CALL_MACRO( m, arg, CC_CAT_4( 0, d3, d2, 7 ) ) CC_R1_7( m, arg, d3, d2 )
|
3010
|
+
#define CC_R2_0( m, arg, d3 )
|
3011
|
+
#define CC_R2_1( m, arg, d3 ) CC_R1_8( m, arg, d3, 0 )
|
3012
|
+
#define CC_R2_2( m, arg, d3 ) CC_R1_8( m, arg, d3, 1 ) CC_R2_1( m, arg, d3 )
|
3013
|
+
#define CC_R2_3( m, arg, d3 ) CC_R1_8( m, arg, d3, 2 ) CC_R2_2( m, arg, d3 )
|
3014
|
+
#define CC_R2_4( m, arg, d3 ) CC_R1_8( m, arg, d3, 3 ) CC_R2_3( m, arg, d3 )
|
3015
|
+
#define CC_R2_5( m, arg, d3 ) CC_R1_8( m, arg, d3, 4 ) CC_R2_4( m, arg, d3 )
|
3016
|
+
#define CC_R2_6( m, arg, d3 ) CC_R1_8( m, arg, d3, 5 ) CC_R2_5( m, arg, d3 )
|
3017
|
+
#define CC_R2_7( m, arg, d3 ) CC_R1_8( m, arg, d3, 6 ) CC_R2_6( m, arg, d3 )
|
3018
|
+
#define CC_R2_8( m, arg, d3 ) CC_R1_8( m, arg, d3, 7 ) CC_R2_7( m, arg, d3 )
|
3019
|
+
#define CC_R3_0( m, arg )
|
3020
|
+
#define CC_R3_1( m, arg ) CC_R2_8( m, arg, 0 )
|
3021
|
+
#define CC_R3_2( m, arg ) CC_R2_8( m, arg, 1 ) CC_R3_1( m, arg )
|
3022
|
+
#define CC_R3_3( m, arg ) CC_R2_8( m, arg, 2 ) CC_R3_2( m, arg )
|
3023
|
+
#define CC_R3_4( m, arg ) CC_R2_8( m, arg, 3 ) CC_R3_3( m, arg )
|
3024
|
+
#define CC_R3_5( m, arg ) CC_R2_8( m, arg, 4 ) CC_R3_4( m, arg )
|
3025
|
+
#define CC_R3_6( m, arg ) CC_R2_8( m, arg, 5 ) CC_R3_5( m, arg )
|
3026
|
+
#define CC_R3_7( m, arg ) CC_R2_8( m, arg, 6 ) CC_R3_6( m, arg )
|
3027
|
+
|
3028
|
+
#define CC_FOR_OCT_COUNT( m, arg, d3, d2, d1 ) \
|
3029
|
+
CC_CAT( CC_R1_, d1 )( m, arg, d3, d2 ) \
|
3030
|
+
CC_CAT( CC_R2_, d2 )( m, arg, d3 ) \
|
3031
|
+
CC_CAT( CC_R3_, d3 )( m, arg ) \
|
3032
|
+
|
3033
|
+
#define CC_FOR_EACH_DTOR( m, arg ) CC_FOR_OCT_COUNT( m, arg, CC_N_DTORS_D3, CC_N_DTORS_D2, CC_N_DTORS_D1 )
|
3034
|
+
#define CC_FOR_EACH_CMPR( m, arg ) CC_FOR_OCT_COUNT( m, arg, CC_N_CMPRS_D3, CC_N_CMPRS_D2, CC_N_CMPRS_D1 )
|
3035
|
+
#define CC_FOR_EACH_HASH( m, arg ) CC_FOR_OCT_COUNT( m, arg, CC_N_HASHS_D3, CC_N_HASHS_D2, CC_N_HASHS_D1 )
|
3036
|
+
#define CC_FOR_EACH_LOAD( m, arg ) CC_FOR_OCT_COUNT( m, arg, CC_N_LOADS_D3, CC_N_LOADS_D2, CC_N_LOADS_D1 )
|
3037
|
+
|
3038
|
+
// Macros for inferring the destructor, comparison, or hash function or load factor associated with a container's
|
3039
|
+
// key or element type, as well as for determining whether a comparison or hash function exists for a type.
|
3040
|
+
// In C, we use the CC_FOR_EACH_XXXX macros above to create _Generic expressions that select the correct user-defined
|
3041
|
+
// function or load factor for the container's key or element types.
|
3042
|
+
// For comparison and hash functions, the list of user-defined functions is followed by a nested _Generic statement
|
3043
|
+
// containing the default functions for integer types and NULL-terminated strings, which allows the user to overwrite
|
3044
|
+
// the default functions.
|
3045
|
+
// In C++, we use the same macros combined with std::is_same and ternary expressions to emulate _Generic statements.
|
3046
|
+
// Unfortunately, the behavior is not identical because the C++ versions won't generate compile errors if the user
|
3047
|
+
// defines multiple functions (e.g. multiple destructors) for the same type.
|
3048
|
+
// Hence, it is up to the user to make sure they are not doing that if they are compiling for C++.
|
3049
|
+
|
3050
|
+
#ifdef __cplusplus
|
3051
|
+
|
3052
|
+
#define CC_EL_DTOR_SLOT( n, arg ) std::is_same<arg, cc_dtor_##n##_ty>::value ? cc_dtor_##n##_fn :
|
3053
|
+
#define CC_EL_DTOR( cntr ) \
|
3054
|
+
( \
|
3055
|
+
CC_FOR_EACH_DTOR( CC_EL_DTOR_SLOT, CC_EL_TY( cntr ) ) \
|
3056
|
+
(void (*)( void * ))NULL \
|
3057
|
+
) \
|
3058
|
+
|
3059
|
+
#define CC_KEY_DTOR_SLOT( n, arg ) \
|
3060
|
+
std::is_same< \
|
3061
|
+
CC_TYPEOF_XP(**arg), \
|
3062
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( arg ), cc_dtor_##n##_ty ) \
|
3063
|
+
>::value ? cc_dtor_##n##_fn : \
|
3064
|
+
|
3065
|
+
#define CC_KEY_DTOR( cntr ) \
|
3066
|
+
( \
|
3067
|
+
CC_FOR_EACH_DTOR( CC_KEY_DTOR_SLOT, cntr ) \
|
3068
|
+
(void (*)( void * ))NULL \
|
3069
|
+
) \
|
3070
|
+
|
3071
|
+
#define CC_KEY_CMPR_SLOT( n, arg ) \
|
3072
|
+
std::is_same<CC_TYPEOF_XP(**arg), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( arg ), cc_cmpr_##n##_ty ) >::value ? \
|
3073
|
+
cc_cmpr_##n##_fn : \
|
3074
|
+
|
3075
|
+
#define CC_KEY_CMPR( cntr ) \
|
3076
|
+
( \
|
3077
|
+
CC_FOR_EACH_CMPR( CC_KEY_CMPR_SLOT, cntr ) \
|
3078
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), char )>::value ? \
|
3079
|
+
cc_cmpr_char : \
|
3080
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned char )>::value ? \
|
3081
|
+
cc_cmpr_unsigned_char : \
|
3082
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), signed char )>::value ? \
|
3083
|
+
cc_cmpr_signed_char : \
|
3084
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned short )>::value ? \
|
3085
|
+
cc_cmpr_unsigned_short : \
|
3086
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), short )>::value ? \
|
3087
|
+
cc_cmpr_short : \
|
3088
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned int )>::value ? \
|
3089
|
+
cc_cmpr_unsigned_int : \
|
3090
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), int )>::value ? \
|
3091
|
+
cc_cmpr_int : \
|
3092
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned long )>::value ? \
|
3093
|
+
cc_cmpr_unsigned_long : \
|
3094
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), long )>::value ? \
|
3095
|
+
cc_cmpr_long : \
|
3096
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned long long )>::value ? \
|
3097
|
+
cc_cmpr_unsigned_long_long : \
|
3098
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), long long )>::value ? \
|
3099
|
+
cc_cmpr_long_long : \
|
3100
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), size_t )>::value ? \
|
3101
|
+
cc_cmpr_size_t : \
|
3102
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), char * )>::value ? \
|
3103
|
+
cc_cmpr_c_string : \
|
3104
|
+
(int (*)( void *, void *))NULL \
|
3105
|
+
) \
|
3106
|
+
|
3107
|
+
#define CC_KEY_HASH_SLOT( n, arg ) \
|
3108
|
+
std::is_same< \
|
3109
|
+
CC_TYPEOF_XP(**arg), \
|
3110
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( arg ), cc_hash_##n##_ty ) \
|
3111
|
+
>::value ? cc_hash_##n##_fn : \
|
3112
|
+
|
3113
|
+
#define CC_KEY_HASH( cntr ) \
|
3114
|
+
( \
|
3115
|
+
CC_FOR_EACH_HASH( CC_KEY_HASH_SLOT, cntr ) \
|
3116
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), char )>::value ? \
|
3117
|
+
cc_hash_char : \
|
3118
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned char )>::value ? \
|
3119
|
+
cc_hash_unsigned_char : \
|
3120
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), signed char )>::value ? \
|
3121
|
+
cc_hash_signed_char : \
|
3122
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned short )>::value ? \
|
3123
|
+
cc_hash_unsigned_short : \
|
3124
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), short )>::value ? \
|
3125
|
+
cc_hash_short : \
|
3126
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned int )>::value ? \
|
3127
|
+
cc_hash_unsigned_int : \
|
3128
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), int )>::value ? \
|
3129
|
+
cc_hash_int : \
|
3130
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned long )>::value ? \
|
3131
|
+
cc_hash_unsigned_long : \
|
3132
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), long )>::value ? \
|
3133
|
+
cc_hash_long : \
|
3134
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned long long )>::value ? \
|
3135
|
+
cc_hash_unsigned_long_long : \
|
3136
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), long long )>::value ? \
|
3137
|
+
cc_hash_long_long : \
|
3138
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), size_t )>::value ? \
|
3139
|
+
cc_hash_size_t : \
|
3140
|
+
std::is_same<CC_TYPEOF_XP(**cntr), CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), char * )>::value ? \
|
3141
|
+
cc_hash_c_string : \
|
3142
|
+
(size_t (*)( void *))NULL \
|
3143
|
+
) \
|
3144
|
+
|
3145
|
+
#define CC_HAS_CMPR_SLOT( n, arg ) std::is_same<arg, cc_cmpr_##n##_ty>::value ? true :
|
3146
|
+
#define CC_HAS_CMPR( ty ) \
|
3147
|
+
( \
|
3148
|
+
std::is_same<ty, char>::value ? true : \
|
3149
|
+
std::is_same<ty, unsigned char>::value ? true : \
|
3150
|
+
std::is_same<ty, signed char>::value ? true : \
|
3151
|
+
std::is_same<ty, unsigned short>::value ? true : \
|
3152
|
+
std::is_same<ty, signed short>::value ? true : \
|
3153
|
+
std::is_same<ty, unsigned int>::value ? true : \
|
3154
|
+
std::is_same<ty, signed int>::value ? true : \
|
3155
|
+
std::is_same<ty, unsigned long>::value ? true : \
|
3156
|
+
std::is_same<ty, signed long>::value ? true : \
|
3157
|
+
std::is_same<ty, unsigned long long>::value ? true : \
|
3158
|
+
std::is_same<ty, signed long long>::value ? true : \
|
3159
|
+
std::is_same<ty, size_t>::value ? true : \
|
3160
|
+
std::is_same<ty, char *>::value ? true : \
|
3161
|
+
CC_FOR_EACH_CMPR( CC_HAS_CMPR_SLOT, ty ) \
|
3162
|
+
false \
|
3163
|
+
) \
|
3164
|
+
|
3165
|
+
#define CC_HAS_HASH_SLOT( n, arg ) std::is_same<arg, cc_hash_##n##_ty>::value ? true :
|
3166
|
+
#define CC_HAS_HASH( ty ) \
|
3167
|
+
( \
|
3168
|
+
std::is_same<ty, char>::value ? true : \
|
3169
|
+
std::is_same<ty, unsigned char>::value ? true : \
|
3170
|
+
std::is_same<ty, signed char>::value ? true : \
|
3171
|
+
std::is_same<ty, unsigned short>::value ? true : \
|
3172
|
+
std::is_same<ty, signed short>::value ? true : \
|
3173
|
+
std::is_same<ty, unsigned int>::value ? true : \
|
3174
|
+
std::is_same<ty, signed int>::value ? true : \
|
3175
|
+
std::is_same<ty, unsigned long>::value ? true : \
|
3176
|
+
std::is_same<ty, signed long>::value ? true : \
|
3177
|
+
std::is_same<ty, unsigned long long>::value ? true : \
|
3178
|
+
std::is_same<ty, signed long long>::value ? true : \
|
3179
|
+
std::is_same<ty, size_t>::value ? true : \
|
3180
|
+
std::is_same<ty, char *>::value ? true : \
|
3181
|
+
CC_FOR_EACH_HASH( CC_HAS_HASH_SLOT, ty ) \
|
3182
|
+
false \
|
3183
|
+
) \
|
3184
|
+
|
3185
|
+
#define CC_KEY_LOAD_SLOT( n, arg ) \
|
3186
|
+
std::is_same< \
|
3187
|
+
CC_TYPEOF_XP(**arg), \
|
3188
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( arg ), cc_load_##n##_ty ) \
|
3189
|
+
>::value ? cc_load_##n##_val : \
|
3190
|
+
|
3191
|
+
#define CC_KEY_LOAD( cntr ) \
|
3192
|
+
( \
|
3193
|
+
CC_FOR_EACH_LOAD( CC_KEY_LOAD_SLOT, cntr ) \
|
3194
|
+
CC_DEFAULT_LOAD \
|
3195
|
+
) \
|
3196
|
+
|
3197
|
+
#else
|
3198
|
+
|
3199
|
+
#define CC_EL_DTOR_SLOT( n, arg ) cc_dtor_##n##_ty: cc_dtor_##n##_fn,
|
3200
|
+
#define CC_EL_DTOR( cntr ) \
|
3201
|
+
_Generic( (CC_EL_TY( cntr )){ 0 }, \
|
3202
|
+
CC_FOR_EACH_DTOR( CC_EL_DTOR_SLOT, ) \
|
3203
|
+
default: (void (*)( void * ))NULL \
|
3204
|
+
) \
|
3205
|
+
|
3206
|
+
#define CC_KEY_DTOR_SLOT( n, arg ) CC_MAKE_BASE_FNPTR_TY( arg, cc_dtor_##n##_ty ): cc_dtor_##n##_fn,
|
3207
|
+
#define CC_KEY_DTOR( cntr ) \
|
3208
|
+
_Generic( (**cntr), \
|
3209
|
+
CC_FOR_EACH_DTOR( CC_KEY_DTOR_SLOT, CC_EL_TY( cntr ) ) \
|
3210
|
+
default: (void (*)( void * ))NULL \
|
3211
|
+
) \
|
3212
|
+
|
3213
|
+
#define CC_KEY_CMPR_SLOT( n, arg ) CC_MAKE_BASE_FNPTR_TY( arg, cc_cmpr_##n##_ty ): cc_cmpr_##n##_fn,
|
3214
|
+
#define CC_KEY_CMPR( cntr ) \
|
3215
|
+
_Generic( (**cntr), \
|
3216
|
+
CC_FOR_EACH_CMPR( CC_KEY_CMPR_SLOT, CC_EL_TY( cntr ) ) \
|
3217
|
+
default: _Generic( (**cntr), \
|
3218
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), char ): cc_cmpr_char, \
|
3219
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned char ): cc_cmpr_unsigned_char, \
|
3220
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), signed char ): cc_cmpr_signed_char, \
|
3221
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned short ): cc_cmpr_unsigned_short, \
|
3222
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), short ): cc_cmpr_short, \
|
3223
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned int ): cc_cmpr_unsigned_int, \
|
3224
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), int ): cc_cmpr_int, \
|
3225
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned long ): cc_cmpr_unsigned_long, \
|
3226
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), long ): cc_cmpr_long, \
|
3227
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned long long ): cc_cmpr_unsigned_long_long, \
|
3228
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), long long ): cc_cmpr_long_long, \
|
3229
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), cc_maybe_size_t ): cc_cmpr_size_t, \
|
3230
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), char * ): cc_cmpr_c_string, \
|
3231
|
+
default: (int (*)( void *, void *))NULL \
|
3232
|
+
) \
|
3233
|
+
) \
|
3234
|
+
|
3235
|
+
#define CC_KEY_HASH_SLOT( n, arg ) CC_MAKE_BASE_FNPTR_TY( arg, cc_hash_##n##_ty ): cc_hash_##n##_fn,
|
3236
|
+
#define CC_KEY_HASH( cntr ) \
|
3237
|
+
_Generic( (**cntr), \
|
3238
|
+
CC_FOR_EACH_HASH( CC_KEY_HASH_SLOT, CC_EL_TY( cntr ) ) \
|
3239
|
+
default: _Generic( (**cntr), \
|
3240
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), char ): cc_hash_char, \
|
3241
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned char ): cc_hash_unsigned_char, \
|
3242
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), signed char ): cc_hash_signed_char, \
|
3243
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned short ): cc_hash_unsigned_short, \
|
3244
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), short ): cc_hash_short, \
|
3245
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned int ): cc_hash_unsigned_int, \
|
3246
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), int ): cc_hash_int, \
|
3247
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned long ): cc_hash_unsigned_long, \
|
3248
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), long ): cc_hash_long, \
|
3249
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), unsigned long long ): cc_hash_unsigned_long_long, \
|
3250
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), long long ): cc_hash_long_long, \
|
3251
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), cc_maybe_size_t ): cc_hash_size_t, \
|
3252
|
+
CC_MAKE_BASE_FNPTR_TY( CC_EL_TY( cntr ), char * ): cc_hash_c_string, \
|
3253
|
+
default: (size_t (*)( void *))NULL \
|
3254
|
+
) \
|
3255
|
+
) \
|
3256
|
+
|
3257
|
+
#define CC_HAS_CMPR_SLOT( n, arg ) cc_cmpr_##n##_ty: true,
|
3258
|
+
#define CC_HAS_CMPR( ty ) \
|
3259
|
+
_Generic( (ty){ 0 }, \
|
3260
|
+
CC_FOR_EACH_CMPR( CC_HAS_CMPR_SLOT, ) \
|
3261
|
+
default: _Generic( (ty){ 0 }, \
|
3262
|
+
char: true, \
|
3263
|
+
unsigned char: true, \
|
3264
|
+
signed char: true, \
|
3265
|
+
unsigned short: true, \
|
3266
|
+
short: true, \
|
3267
|
+
unsigned int: true, \
|
3268
|
+
int: true, \
|
3269
|
+
unsigned long: true, \
|
3270
|
+
long: true, \
|
3271
|
+
unsigned long long: true, \
|
3272
|
+
long long: true, \
|
3273
|
+
cc_maybe_size_t: true, \
|
3274
|
+
char *: true, \
|
3275
|
+
default: false \
|
3276
|
+
) \
|
3277
|
+
) \
|
3278
|
+
|
3279
|
+
#define CC_HAS_HASH_SLOT( n, arg ) cc_hash_##n##_ty: true,
|
3280
|
+
#define CC_HAS_HASH( ty ) \
|
3281
|
+
_Generic( (ty){ 0 }, \
|
3282
|
+
CC_FOR_EACH_HASH( CC_HAS_HASH_SLOT, ) \
|
3283
|
+
default: _Generic( (ty){ 0 }, \
|
3284
|
+
char: true, \
|
3285
|
+
unsigned char: true, \
|
3286
|
+
signed char: true, \
|
3287
|
+
unsigned short: true, \
|
3288
|
+
short: true, \
|
3289
|
+
unsigned int: true, \
|
3290
|
+
int: true, \
|
3291
|
+
unsigned long: true, \
|
3292
|
+
long: true, \
|
3293
|
+
unsigned long long: true, \
|
3294
|
+
long long: true, \
|
3295
|
+
cc_maybe_size_t: true, \
|
3296
|
+
char *: true, \
|
3297
|
+
default: false \
|
3298
|
+
) \
|
3299
|
+
) \
|
3300
|
+
|
3301
|
+
#define CC_KEY_LOAD_SLOT( n, arg ) CC_MAKE_BASE_FNPTR_TY( arg, cc_load_##n##_ty ): cc_load_##n##_val,
|
3302
|
+
#define CC_KEY_LOAD( cntr ) \
|
3303
|
+
_Generic( (**cntr), \
|
3304
|
+
CC_FOR_EACH_LOAD( CC_KEY_LOAD_SLOT, CC_EL_TY( cntr ) ) \
|
3305
|
+
default: CC_DEFAULT_LOAD \
|
3306
|
+
) \
|
3307
|
+
|
3308
|
+
#endif
|
3309
|
+
|
3310
|
+
// Macros for extracting the type and function body or load factor from user-defined DTOR, CMPR, HASH, and LOAD macros.
|
3311
|
+
#define CC_1ST_ARG_( _1, ... ) _1
|
3312
|
+
#define CC_1ST_ARG( ... ) CC_1ST_ARG_( __VA_ARGS__ )
|
3313
|
+
#define CC_OTHER_ARGS_( _1, ... ) __VA_ARGS__
|
3314
|
+
#define CC_OTHER_ARGS( ... ) CC_OTHER_ARGS_( __VA_ARGS__ )
|
3315
|
+
|
3316
|
+
// Default hash and comparison functions for fundamental types.
|
3317
|
+
|
3318
|
+
// Integer types.
|
3319
|
+
|
3320
|
+
static inline int cc_cmpr_char( void *void_val_1, void *void_val_2 )
|
3321
|
+
{
|
3322
|
+
return ( *(char *)void_val_1 > *(char *)void_val_2 ) - ( *(char *)void_val_1 < *(char *)void_val_2 );
|
3323
|
+
}
|
3324
|
+
|
3325
|
+
static inline size_t cc_hash_char( void *void_val )
|
3326
|
+
{
|
3327
|
+
return *(char *)void_val;
|
3328
|
+
}
|
3329
|
+
|
3330
|
+
static inline int cc_cmpr_unsigned_char( void *void_val_1, void *void_val_2 )
|
3331
|
+
{
|
3332
|
+
return ( *(unsigned char *)void_val_1 > *(unsigned char *)void_val_2 ) -
|
3333
|
+
( *(unsigned char *)void_val_1 < *(unsigned char *)void_val_2 );
|
3334
|
+
}
|
3335
|
+
|
3336
|
+
static inline size_t cc_hash_unsigned_char( void *void_val )
|
3337
|
+
{
|
3338
|
+
return *(unsigned char *)void_val;
|
3339
|
+
}
|
3340
|
+
|
3341
|
+
static inline int cc_cmpr_signed_char( void *void_val_1, void *void_val_2 )
|
3342
|
+
{
|
3343
|
+
return ( *(signed char *)void_val_1 > *(signed char *)void_val_2 ) -
|
3344
|
+
( *(signed char *)void_val_1 < *(signed char *)void_val_2 );
|
3345
|
+
}
|
3346
|
+
|
3347
|
+
static inline size_t cc_hash_signed_char( void *void_val )
|
3348
|
+
{
|
3349
|
+
return *(signed char *)void_val;
|
3350
|
+
}
|
3351
|
+
|
3352
|
+
static inline int cc_cmpr_unsigned_short( void *void_val_1, void *void_val_2 )
|
3353
|
+
{
|
3354
|
+
return ( *(unsigned short *)void_val_1 > *(unsigned short *)void_val_2 ) -
|
3355
|
+
( *(unsigned short *)void_val_1 < *(unsigned short *)void_val_2 );
|
3356
|
+
}
|
3357
|
+
|
3358
|
+
static inline size_t cc_hash_unsigned_short( void *void_val )
|
3359
|
+
{
|
3360
|
+
return *(unsigned short *)void_val * 2654435761ull;
|
3361
|
+
}
|
3362
|
+
|
3363
|
+
static inline int cc_cmpr_short( void *void_val_1, void *void_val_2 )
|
3364
|
+
{
|
3365
|
+
return ( *(short *)void_val_1 > *(short *)void_val_2 ) - ( *(short *)void_val_1 < *(short *)void_val_2 );
|
3366
|
+
}
|
3367
|
+
|
3368
|
+
static inline size_t cc_hash_short( void *void_val )
|
3369
|
+
{
|
3370
|
+
return *(short *)void_val * 2654435761ull;
|
3371
|
+
}
|
3372
|
+
|
3373
|
+
static inline int cc_cmpr_unsigned_int( void *void_val_1, void *void_val_2 )
|
3374
|
+
{
|
3375
|
+
return ( *(unsigned int *)void_val_1 > *(unsigned int *)void_val_2 ) -
|
3376
|
+
( *(unsigned int *)void_val_1 < *(unsigned int *)void_val_2 );
|
3377
|
+
}
|
3378
|
+
|
3379
|
+
static inline size_t cc_hash_unsigned_int( void *void_val )
|
3380
|
+
{
|
3381
|
+
return *(unsigned int *)void_val * 2654435761ull;
|
3382
|
+
}
|
3383
|
+
|
3384
|
+
static inline int cc_cmpr_int( void *void_val_1, void *void_val_2 )
|
3385
|
+
{
|
3386
|
+
return ( *(int *)void_val_1 > *(int *)void_val_2 ) - ( *(int *)void_val_1 < *(int *)void_val_2 );
|
3387
|
+
}
|
3388
|
+
|
3389
|
+
static inline size_t cc_hash_int( void *void_val )
|
3390
|
+
{
|
3391
|
+
return *(int *)void_val * 2654435761ull;
|
3392
|
+
}
|
3393
|
+
|
3394
|
+
static inline int cc_cmpr_unsigned_long( void *void_val_1, void *void_val_2 )
|
3395
|
+
{
|
3396
|
+
return ( *(unsigned long *)void_val_1 > *(unsigned long *)void_val_2 ) -
|
3397
|
+
( *(unsigned long *)void_val_1 < *(unsigned long *)void_val_2 );
|
3398
|
+
}
|
3399
|
+
|
3400
|
+
static inline size_t cc_hash_unsigned_long( void *void_val )
|
3401
|
+
{
|
3402
|
+
return *(unsigned long *)void_val * 2654435761ull;
|
3403
|
+
}
|
3404
|
+
|
3405
|
+
static inline int cc_cmpr_long( void *void_val_1, void *void_val_2 )
|
3406
|
+
{
|
3407
|
+
return ( *(long *)void_val_1 > *(long *)void_val_2 ) - ( *(long *)void_val_1 < *(long *)void_val_2 );
|
3408
|
+
}
|
3409
|
+
|
3410
|
+
static inline size_t cc_hash_long( void *void_val )
|
3411
|
+
{
|
3412
|
+
return *(long *)void_val * 2654435761ull;
|
3413
|
+
}
|
3414
|
+
|
3415
|
+
static inline int cc_cmpr_unsigned_long_long( void *void_val_1, void *void_val_2 )
|
3416
|
+
{
|
3417
|
+
return ( *(unsigned long long *)void_val_1 > *(unsigned long long *)void_val_2 ) -
|
3418
|
+
( *(unsigned long long *)void_val_1 < *(unsigned long long *)void_val_2 );
|
3419
|
+
}
|
3420
|
+
|
3421
|
+
static inline size_t cc_hash_unsigned_long_long( void *void_val )
|
3422
|
+
{
|
3423
|
+
return *(unsigned long long *)void_val * 2654435761ull;
|
3424
|
+
}
|
3425
|
+
|
3426
|
+
static inline int cc_cmpr_long_long( void *void_val_1, void *void_val_2 )
|
3427
|
+
{
|
3428
|
+
return ( *(long long *)void_val_1 > *(long long *)void_val_2 ) -
|
3429
|
+
( *(long long *)void_val_1 < *(long long *)void_val_2 );
|
3430
|
+
}
|
3431
|
+
|
3432
|
+
static inline size_t cc_hash_long_long( void *void_val )
|
3433
|
+
{
|
3434
|
+
return *(long long *)void_val * 2654435761ull;
|
3435
|
+
}
|
3436
|
+
|
3437
|
+
// size_t could be an alias for a fundamental integer type or a distinct type.
|
3438
|
+
// Hence, in C we have to handle it as a special case so that it doesn't clash with another type in _Generic statements.
|
3439
|
+
// If size_t is an alias, cc_maybe_size_t will be a dummy type used in no other context.
|
3440
|
+
// Otherwise, cc_maybe_size_t will be an alias for size_t.
|
3441
|
+
|
3442
|
+
#ifndef __cplusplus
|
3443
|
+
|
3444
|
+
typedef struct { char nothing; } cc_size_t_dummy;
|
3445
|
+
|
3446
|
+
typedef CC_TYPEOF_XP(
|
3447
|
+
_Generic( (size_t){ 0 },
|
3448
|
+
unsigned short: (cc_size_t_dummy){ 0 },
|
3449
|
+
short: (cc_size_t_dummy){ 0 },
|
3450
|
+
unsigned int: (cc_size_t_dummy){ 0 },
|
3451
|
+
int: (cc_size_t_dummy){ 0 },
|
3452
|
+
unsigned long: (cc_size_t_dummy){ 0 },
|
3453
|
+
long: (cc_size_t_dummy){ 0 },
|
3454
|
+
unsigned long long: (cc_size_t_dummy){ 0 },
|
3455
|
+
long long: (cc_size_t_dummy){ 0 },
|
3456
|
+
default: (size_t){ 0 }
|
3457
|
+
)
|
3458
|
+
) cc_maybe_size_t;
|
3459
|
+
|
3460
|
+
#endif
|
3461
|
+
|
3462
|
+
static inline int cc_cmpr_size_t( void *void_val_1, void *void_val_2 )
|
3463
|
+
{
|
3464
|
+
return ( *(size_t *)void_val_1 > *(size_t *)void_val_2 ) - ( *(size_t *)void_val_1 < *(size_t *)void_val_2 );
|
3465
|
+
}
|
3466
|
+
|
3467
|
+
static inline size_t cc_hash_size_t( void *void_val )
|
3468
|
+
{
|
3469
|
+
return *(size_t *)void_val * 2654435761ull;
|
3470
|
+
}
|
3471
|
+
|
3472
|
+
// Null-terminated C strings.
|
3473
|
+
// We use FNV-1a because newer, faster alternatives that process word-sized chunks require prior knowledge of the
|
3474
|
+
// string's length.
|
3475
|
+
|
3476
|
+
static inline int cc_cmpr_c_string( void *void_val_1, void *void_val_2 )
|
3477
|
+
{
|
3478
|
+
return strcmp( *(char **)void_val_1, *(char **)void_val_2 );
|
3479
|
+
}
|
3480
|
+
|
3481
|
+
#if SIZE_MAX == 0xFFFFFFFF // 32-bit size_t.
|
3482
|
+
|
3483
|
+
static inline size_t cc_hash_c_string( void *void_val )
|
3484
|
+
{
|
3485
|
+
char *val = *(char **)void_val;
|
3486
|
+
size_t hash = 0x01000193;
|
3487
|
+
while( *val )
|
3488
|
+
hash = ( (unsigned char)*val++ ^ hash ) * 0x811c9dc5;
|
3489
|
+
|
3490
|
+
return hash;
|
3491
|
+
}
|
3492
|
+
|
3493
|
+
#elif SIZE_MAX == 0xFFFFFFFFFFFFFFFF // 64-bit size_t.
|
3494
|
+
|
3495
|
+
static inline size_t cc_hash_c_string( void *void_val )
|
3496
|
+
{
|
3497
|
+
char *val = *(char **)void_val;
|
3498
|
+
size_t hash = 0xcbf29ce484222325;
|
3499
|
+
while( *val )
|
3500
|
+
hash = ( (unsigned char)*val++ ^ hash ) * 0x100000001b3;
|
3501
|
+
|
3502
|
+
return hash;
|
3503
|
+
}
|
3504
|
+
|
3505
|
+
#else // Strange size_t.
|
3506
|
+
|
3507
|
+
static inline size_t cc_hash_c_string( void *void_val )
|
3508
|
+
{
|
3509
|
+
char *val = *(char **)void_val;
|
3510
|
+
size_t hash = 0;
|
3511
|
+
while( *val )
|
3512
|
+
hash = hash * 131 + (unsigned char)*val++;
|
3513
|
+
|
3514
|
+
return hash;
|
3515
|
+
}
|
3516
|
+
|
3517
|
+
#endif
|
3518
|
+
|
3519
|
+
#endif
|
3520
|
+
#else/*---------------------------------------------------------------------------------------------------------------*/
|
3521
|
+
/* */
|
3522
|
+
/* DEFINING DESTRUCTOR, COMPARISON, OR HASH FUNCTION OR LOAD FACTOR MODE */
|
3523
|
+
/* */
|
3524
|
+
/*--------------------------------------------------------------------------------------------------------------------*/
|
3525
|
+
|
3526
|
+
#ifdef CC_DTOR
|
3527
|
+
|
3528
|
+
// Convert the user-defined CC_DTOR macro into a cc_dtor_XXXX_ty and cc_dtor_XXXX_fn pair that can be pluged into the
|
3529
|
+
// CC_GET_EL_DTOR and CC_GET_KEY_DTOR macros above.
|
3530
|
+
|
3531
|
+
typedef CC_TYPEOF_TY( CC_1ST_ARG( CC_DTOR ) ) CC_CAT_3( cc_dtor_, CC_N_DTORS, _ty );
|
3532
|
+
|
3533
|
+
static inline void CC_CAT_3( cc_dtor_, CC_N_DTORS, _fn )( void *void_val )
|
3534
|
+
{
|
3535
|
+
CC_CAT_3( cc_dtor_, CC_N_DTORS, _ty ) val = *(CC_CAT_3( cc_dtor_, CC_N_DTORS, _ty ) *)void_val;
|
3536
|
+
CC_OTHER_ARGS( CC_DTOR )
|
3537
|
+
}
|
3538
|
+
|
3539
|
+
// Increment DTOR counter.
|
3540
|
+
#if CC_N_DTORS_D1 == 0
|
3541
|
+
#undef CC_N_DTORS_D1
|
3542
|
+
#define CC_N_DTORS_D1 1
|
3543
|
+
#elif CC_N_DTORS_D1 == 1
|
3544
|
+
#undef CC_N_DTORS_D1
|
3545
|
+
#define CC_N_DTORS_D1 2
|
3546
|
+
#elif CC_N_DTORS_D1 == 2
|
3547
|
+
#undef CC_N_DTORS_D1
|
3548
|
+
#define CC_N_DTORS_D1 3
|
3549
|
+
#elif CC_N_DTORS_D1 == 3
|
3550
|
+
#undef CC_N_DTORS_D1
|
3551
|
+
#define CC_N_DTORS_D1 4
|
3552
|
+
#elif CC_N_DTORS_D1 == 4
|
3553
|
+
#undef CC_N_DTORS_D1
|
3554
|
+
#define CC_N_DTORS_D1 5
|
3555
|
+
#elif CC_N_DTORS_D1 == 5
|
3556
|
+
#undef CC_N_DTORS_D1
|
3557
|
+
#define CC_N_DTORS_D1 6
|
3558
|
+
#elif CC_N_DTORS_D1 == 6
|
3559
|
+
#undef CC_N_DTORS_D1
|
3560
|
+
#define CC_N_DTORS_D1 7
|
3561
|
+
#elif CC_N_DTORS_D1 == 7
|
3562
|
+
#undef CC_N_DTORS_D1
|
3563
|
+
#define CC_N_DTORS_D1 0
|
3564
|
+
#if CC_N_DTORS_D2 == 0
|
3565
|
+
#undef CC_N_DTORS_D2
|
3566
|
+
#define CC_N_DTORS_D2 1
|
3567
|
+
#elif CC_N_DTORS_D2 == 1
|
3568
|
+
#undef CC_N_DTORS_D2
|
3569
|
+
#define CC_N_DTORS_D2 2
|
3570
|
+
#elif CC_N_DTORS_D2 == 2
|
3571
|
+
#undef CC_N_DTORS_D2
|
3572
|
+
#define CC_N_DTORS_D2 3
|
3573
|
+
#elif CC_N_DTORS_D2 == 3
|
3574
|
+
#undef CC_N_DTORS_D2
|
3575
|
+
#define CC_N_DTORS_D2 4
|
3576
|
+
#elif CC_N_DTORS_D2 == 4
|
3577
|
+
#undef CC_N_DTORS_D2
|
3578
|
+
#define CC_N_DTORS_D2 5
|
3579
|
+
#elif CC_N_DTORS_D2 == 5
|
3580
|
+
#undef CC_N_DTORS_D2
|
3581
|
+
#define CC_N_DTORS_D2 6
|
3582
|
+
#elif CC_N_DTORS_D2 == 6
|
3583
|
+
#undef CC_N_DTORS_D2
|
3584
|
+
#define CC_N_DTORS_D2 7
|
3585
|
+
#elif CC_N_DTORS_D2 == 7
|
3586
|
+
#undef CC_N_DTORS_D2
|
3587
|
+
#define CC_N_DTORS_D2 0
|
3588
|
+
#if CC_N_DTORS_D3 == 0
|
3589
|
+
#undef CC_N_DTORS_D3
|
3590
|
+
#define CC_N_DTORS_D3 1
|
3591
|
+
#elif CC_N_DTORS_D3 == 1
|
3592
|
+
#undef CC_N_DTORS_D3
|
3593
|
+
#define CC_N_DTORS_D3 2
|
3594
|
+
#elif CC_N_DTORS_D3 == 2
|
3595
|
+
#undef CC_N_DTORS_D3
|
3596
|
+
#define CC_N_DTORS_D3 3
|
3597
|
+
#elif CC_N_DTORS_D3 == 3
|
3598
|
+
#undef CC_N_DTORS_D3
|
3599
|
+
#define CC_N_DTORS_D3 4
|
3600
|
+
#elif CC_N_DTORS_D3 == 4
|
3601
|
+
#undef CC_N_DTORS_D3
|
3602
|
+
#define CC_N_DTORS_D3 5
|
3603
|
+
#elif CC_N_DTORS_D3 == 5
|
3604
|
+
#undef CC_N_DTORS_D3
|
3605
|
+
#define CC_N_DTORS_D3 6
|
3606
|
+
#elif CC_N_DTORS_D3 == 6
|
3607
|
+
#undef CC_N_DTORS_D3
|
3608
|
+
#define CC_N_DTORS_D3 7
|
3609
|
+
#elif CC_N_DTORS_D3 == 7
|
3610
|
+
#error Sorry, number of destructor functions is limited to 511.
|
3611
|
+
#endif
|
3612
|
+
#endif
|
3613
|
+
#endif
|
3614
|
+
|
3615
|
+
#undef CC_DTOR
|
3616
|
+
#endif
|
3617
|
+
|
3618
|
+
#ifdef CC_CMPR
|
3619
|
+
|
3620
|
+
typedef CC_TYPEOF_TY( CC_1ST_ARG( CC_CMPR ) ) CC_CAT_3( cc_cmpr_, CC_N_CMPRS, _ty );
|
3621
|
+
|
3622
|
+
static inline int CC_CAT_3( cc_cmpr_, CC_N_CMPRS, _fn )( void *void_val_1, void *void_val_2 )
|
3623
|
+
{
|
3624
|
+
CC_CAT_3( cc_cmpr_, CC_N_CMPRS, _ty ) val_1 = *(CC_CAT_3( cc_cmpr_, CC_N_CMPRS, _ty ) *)void_val_1;
|
3625
|
+
CC_CAT_3( cc_cmpr_, CC_N_CMPRS, _ty ) val_2 = *(CC_CAT_3( cc_cmpr_, CC_N_CMPRS, _ty ) *)void_val_2;
|
3626
|
+
CC_OTHER_ARGS( CC_CMPR )
|
3627
|
+
}
|
3628
|
+
|
3629
|
+
#if CC_N_CMPRS_D1 == 0
|
3630
|
+
#undef CC_N_CMPRS_D1
|
3631
|
+
#define CC_N_CMPRS_D1 1
|
3632
|
+
#elif CC_N_CMPRS_D1 == 1
|
3633
|
+
#undef CC_N_CMPRS_D1
|
3634
|
+
#define CC_N_CMPRS_D1 2
|
3635
|
+
#elif CC_N_CMPRS_D1 == 2
|
3636
|
+
#undef CC_N_CMPRS_D1
|
3637
|
+
#define CC_N_CMPRS_D1 3
|
3638
|
+
#elif CC_N_CMPRS_D1 == 3
|
3639
|
+
#undef CC_N_CMPRS_D1
|
3640
|
+
#define CC_N_CMPRS_D1 4
|
3641
|
+
#elif CC_N_CMPRS_D1 == 4
|
3642
|
+
#undef CC_N_CMPRS_D1
|
3643
|
+
#define CC_N_CMPRS_D1 5
|
3644
|
+
#elif CC_N_CMPRS_D1 == 5
|
3645
|
+
#undef CC_N_CMPRS_D1
|
3646
|
+
#define CC_N_CMPRS_D1 6
|
3647
|
+
#elif CC_N_CMPRS_D1 == 6
|
3648
|
+
#undef CC_N_CMPRS_D1
|
3649
|
+
#define CC_N_CMPRS_D1 7
|
3650
|
+
#elif CC_N_CMPRS_D1 == 7
|
3651
|
+
#undef CC_N_CMPRS_D1
|
3652
|
+
#define CC_N_CMPRS_D1 0
|
3653
|
+
#if CC_N_CMPRS_D2 == 0
|
3654
|
+
#undef CC_N_CMPRS_D2
|
3655
|
+
#define CC_N_CMPRS_D2 1
|
3656
|
+
#elif CC_N_CMPRS_D2 == 1
|
3657
|
+
#undef CC_N_CMPRS_D2
|
3658
|
+
#define CC_N_CMPRS_D2 2
|
3659
|
+
#elif CC_N_CMPRS_D2 == 2
|
3660
|
+
#undef CC_N_CMPRS_D2
|
3661
|
+
#define CC_N_CMPRS_D2 3
|
3662
|
+
#elif CC_N_CMPRS_D2 == 3
|
3663
|
+
#undef CC_N_CMPRS_D2
|
3664
|
+
#define CC_N_CMPRS_D2 4
|
3665
|
+
#elif CC_N_CMPRS_D2 == 4
|
3666
|
+
#undef CC_N_CMPRS_D2
|
3667
|
+
#define CC_N_CMPRS_D2 5
|
3668
|
+
#elif CC_N_CMPRS_D2 == 5
|
3669
|
+
#undef CC_N_CMPRS_D2
|
3670
|
+
#define CC_N_CMPRS_D2 6
|
3671
|
+
#elif CC_N_CMPRS_D2 == 6
|
3672
|
+
#undef CC_N_CMPRS_D2
|
3673
|
+
#define CC_N_CMPRS_D2 7
|
3674
|
+
#elif CC_N_CMPRS_D2 == 7
|
3675
|
+
#undef CC_N_CMPRS_D2
|
3676
|
+
#define CC_N_CMPRS_D2 0
|
3677
|
+
#if CC_N_CMPRS_D3 == 0
|
3678
|
+
#undef CC_N_CMPRS_D3
|
3679
|
+
#define CC_N_CMPRS_D3 1
|
3680
|
+
#elif CC_N_CMPRS_D3 == 1
|
3681
|
+
#undef CC_N_CMPRS_D3
|
3682
|
+
#define CC_N_CMPRS_D3 2
|
3683
|
+
#elif CC_N_CMPRS_D3 == 2
|
3684
|
+
#undef CC_N_CMPRS_D3
|
3685
|
+
#define CC_N_CMPRS_D3 3
|
3686
|
+
#elif CC_N_CMPRS_D3 == 3
|
3687
|
+
#undef CC_N_CMPRS_D3
|
3688
|
+
#define CC_N_CMPRS_D3 4
|
3689
|
+
#elif CC_N_CMPRS_D3 == 4
|
3690
|
+
#undef CC_N_CMPRS_D3
|
3691
|
+
#define CC_N_CMPRS_D3 5
|
3692
|
+
#elif CC_N_CMPRS_D3 == 5
|
3693
|
+
#undef CC_N_CMPRS_D3
|
3694
|
+
#define CC_N_CMPRS_D3 6
|
3695
|
+
#elif CC_N_CMPRS_D3 == 6
|
3696
|
+
#undef CC_N_CMPRS_D3
|
3697
|
+
#define CC_N_CMPRS_D3 7
|
3698
|
+
#elif CC_N_CMPRS_D3 == 7
|
3699
|
+
#error Sorry, number of comparison functions is limited to 511.
|
3700
|
+
#endif
|
3701
|
+
#endif
|
3702
|
+
#endif
|
3703
|
+
|
3704
|
+
#undef CC_CMPR
|
3705
|
+
#endif
|
3706
|
+
|
3707
|
+
#ifdef CC_HASH
|
3708
|
+
|
3709
|
+
typedef CC_TYPEOF_TY( CC_1ST_ARG( CC_HASH ) ) CC_CAT_3( cc_hash_, CC_N_HASHS, _ty );
|
3710
|
+
|
3711
|
+
static inline size_t CC_CAT_3( cc_hash_, CC_N_HASHS, _fn )( void *void_val )
|
3712
|
+
{
|
3713
|
+
CC_CAT_3( cc_hash_, CC_N_HASHS, _ty ) val = *(CC_CAT_3( cc_hash_, CC_N_HASHS, _ty ) *)void_val;
|
3714
|
+
CC_OTHER_ARGS( CC_HASH )
|
3715
|
+
}
|
3716
|
+
|
3717
|
+
#if CC_N_HASHS_D1 == 0
|
3718
|
+
#undef CC_N_HASHS_D1
|
3719
|
+
#define CC_N_HASHS_D1 1
|
3720
|
+
#elif CC_N_HASHS_D1 == 1
|
3721
|
+
#undef CC_N_HASHS_D1
|
3722
|
+
#define CC_N_HASHS_D1 2
|
3723
|
+
#elif CC_N_HASHS_D1 == 2
|
3724
|
+
#undef CC_N_HASHS_D1
|
3725
|
+
#define CC_N_HASHS_D1 3
|
3726
|
+
#elif CC_N_HASHS_D1 == 3
|
3727
|
+
#undef CC_N_HASHS_D1
|
3728
|
+
#define CC_N_HASHS_D1 4
|
3729
|
+
#elif CC_N_HASHS_D1 == 4
|
3730
|
+
#undef CC_N_HASHS_D1
|
3731
|
+
#define CC_N_HASHS_D1 5
|
3732
|
+
#elif CC_N_HASHS_D1 == 5
|
3733
|
+
#undef CC_N_HASHS_D1
|
3734
|
+
#define CC_N_HASHS_D1 6
|
3735
|
+
#elif CC_N_HASHS_D1 == 6
|
3736
|
+
#undef CC_N_HASHS_D1
|
3737
|
+
#define CC_N_HASHS_D1 7
|
3738
|
+
#elif CC_N_HASHS_D1 == 7
|
3739
|
+
#undef CC_N_HASHS_D1
|
3740
|
+
#define CC_N_HASHS_D1 0
|
3741
|
+
#if CC_N_HASHS_D2 == 0
|
3742
|
+
#undef CC_N_HASHS_D2
|
3743
|
+
#define CC_N_HASHS_D2 1
|
3744
|
+
#elif CC_N_HASHS_D2 == 1
|
3745
|
+
#undef CC_N_HASHS_D2
|
3746
|
+
#define CC_N_HASHS_D2 2
|
3747
|
+
#elif CC_N_HASHS_D2 == 2
|
3748
|
+
#undef CC_N_HASHS_D2
|
3749
|
+
#define CC_N_HASHS_D2 3
|
3750
|
+
#elif CC_N_HASHS_D2 == 3
|
3751
|
+
#undef CC_N_HASHS_D2
|
3752
|
+
#define CC_N_HASHS_D2 4
|
3753
|
+
#elif CC_N_HASHS_D2 == 4
|
3754
|
+
#undef CC_N_HASHS_D2
|
3755
|
+
#define CC_N_HASHS_D2 5
|
3756
|
+
#elif CC_N_HASHS_D2 == 5
|
3757
|
+
#undef CC_N_HASHS_D2
|
3758
|
+
#define CC_N_HASHS_D2 6
|
3759
|
+
#elif CC_N_HASHS_D2 == 6
|
3760
|
+
#undef CC_N_HASHS_D2
|
3761
|
+
#define CC_N_HASHS_D2 7
|
3762
|
+
#elif CC_N_HASHS_D2 == 7
|
3763
|
+
#undef CC_N_HASHS_D2
|
3764
|
+
#define CC_N_HASHS_D2 0
|
3765
|
+
#if CC_N_HASHS_D3 == 0
|
3766
|
+
#undef CC_N_HASHS_D3
|
3767
|
+
#define CC_N_HASHS_D3 1
|
3768
|
+
#elif CC_N_HASHS_D3 == 1
|
3769
|
+
#undef CC_N_HASHS_D3
|
3770
|
+
#define CC_N_HASHS_D3 2
|
3771
|
+
#elif CC_N_HASHS_D3 == 2
|
3772
|
+
#undef CC_N_HASHS_D3
|
3773
|
+
#define CC_N_HASHS_D3 3
|
3774
|
+
#elif CC_N_HASHS_D3 == 3
|
3775
|
+
#undef CC_N_HASHS_D3
|
3776
|
+
#define CC_N_HASHS_D3 4
|
3777
|
+
#elif CC_N_HASHS_D3 == 4
|
3778
|
+
#undef CC_N_HASHS_D3
|
3779
|
+
#define CC_N_HASHS_D3 5
|
3780
|
+
#elif CC_N_HASHS_D3 == 5
|
3781
|
+
#undef CC_N_HASHS_D3
|
3782
|
+
#define CC_N_HASHS_D3 6
|
3783
|
+
#elif CC_N_HASHS_D3 == 6
|
3784
|
+
#undef CC_N_HASHS_D3
|
3785
|
+
#define CC_N_HASHS_D3 7
|
3786
|
+
#elif CC_N_HASHS_D3 == 7
|
3787
|
+
#error Sorry, number of hash functions is limited to 511.
|
3788
|
+
#endif
|
3789
|
+
#endif
|
3790
|
+
#endif
|
3791
|
+
|
3792
|
+
#undef CC_HASH
|
3793
|
+
#endif
|
3794
|
+
|
3795
|
+
#ifdef CC_LOAD
|
3796
|
+
|
3797
|
+
typedef CC_TYPEOF_TY( CC_1ST_ARG( CC_LOAD ) ) CC_CAT_3( cc_load_, CC_N_LOADS, _ty );
|
3798
|
+
|
3799
|
+
const double CC_CAT_3( cc_load_, CC_N_LOADS, _val ) = CC_OTHER_ARGS( CC_LOAD );
|
3800
|
+
|
3801
|
+
#if CC_N_LOADS_D1 == 0
|
3802
|
+
#undef CC_N_LOADS_D1
|
3803
|
+
#define CC_N_LOADS_D1 1
|
3804
|
+
#elif CC_N_LOADS_D1 == 1
|
3805
|
+
#undef CC_N_LOADS_D1
|
3806
|
+
#define CC_N_LOADS_D1 2
|
3807
|
+
#elif CC_N_LOADS_D1 == 2
|
3808
|
+
#undef CC_N_LOADS_D1
|
3809
|
+
#define CC_N_LOADS_D1 3
|
3810
|
+
#elif CC_N_LOADS_D1 == 3
|
3811
|
+
#undef CC_N_LOADS_D1
|
3812
|
+
#define CC_N_LOADS_D1 4
|
3813
|
+
#elif CC_N_LOADS_D1 == 4
|
3814
|
+
#undef CC_N_LOADS_D1
|
3815
|
+
#define CC_N_LOADS_D1 5
|
3816
|
+
#elif CC_N_LOADS_D1 == 5
|
3817
|
+
#undef CC_N_LOADS_D1
|
3818
|
+
#define CC_N_LOADS_D1 6
|
3819
|
+
#elif CC_N_LOADS_D1 == 6
|
3820
|
+
#undef CC_N_LOADS_D1
|
3821
|
+
#define CC_N_LOADS_D1 7
|
3822
|
+
#elif CC_N_LOADS_D1 == 7
|
3823
|
+
#undef CC_N_LOADS_D1
|
3824
|
+
#define CC_N_LOADS_D1 0
|
3825
|
+
#if CC_N_LOADS_D2 == 0
|
3826
|
+
#undef CC_N_LOADS_D2
|
3827
|
+
#define CC_N_LOADS_D2 1
|
3828
|
+
#elif CC_N_LOADS_D2 == 1
|
3829
|
+
#undef CC_N_LOADS_D2
|
3830
|
+
#define CC_N_LOADS_D2 2
|
3831
|
+
#elif CC_N_LOADS_D2 == 2
|
3832
|
+
#undef CC_N_LOADS_D2
|
3833
|
+
#define CC_N_LOADS_D2 3
|
3834
|
+
#elif CC_N_LOADS_D2 == 3
|
3835
|
+
#undef CC_N_LOADS_D2
|
3836
|
+
#define CC_N_LOADS_D2 4
|
3837
|
+
#elif CC_N_LOADS_D2 == 4
|
3838
|
+
#undef CC_N_LOADS_D2
|
3839
|
+
#define CC_N_LOADS_D2 5
|
3840
|
+
#elif CC_N_LOADS_D2 == 5
|
3841
|
+
#undef CC_N_LOADS_D2
|
3842
|
+
#define CC_N_LOADS_D2 6
|
3843
|
+
#elif CC_N_LOADS_D2 == 6
|
3844
|
+
#undef CC_N_LOADS_D2
|
3845
|
+
#define CC_N_LOADS_D2 7
|
3846
|
+
#elif CC_N_LOADS_D2 == 7
|
3847
|
+
#undef CC_N_LOADS_D2
|
3848
|
+
#define CC_N_LOADS_D2 0
|
3849
|
+
#if CC_N_LOADS_D3 == 0
|
3850
|
+
#undef CC_N_LOADS_D3
|
3851
|
+
#define CC_N_LOADS_D3 1
|
3852
|
+
#elif CC_N_LOADS_D3 == 1
|
3853
|
+
#undef CC_N_LOADS_D3
|
3854
|
+
#define CC_N_LOADS_D3 2
|
3855
|
+
#elif CC_N_LOADS_D3 == 2
|
3856
|
+
#undef CC_N_LOADS_D3
|
3857
|
+
#define CC_N_LOADS_D3 3
|
3858
|
+
#elif CC_N_LOADS_D3 == 3
|
3859
|
+
#undef CC_N_LOADS_D3
|
3860
|
+
#define CC_N_LOADS_D3 4
|
3861
|
+
#elif CC_N_LOADS_D3 == 4
|
3862
|
+
#undef CC_N_LOADS_D3
|
3863
|
+
#define CC_N_LOADS_D3 5
|
3864
|
+
#elif CC_N_LOADS_D3 == 5
|
3865
|
+
#undef CC_N_LOADS_D3
|
3866
|
+
#define CC_N_LOADS_D3 6
|
3867
|
+
#elif CC_N_LOADS_D3 == 6
|
3868
|
+
#undef CC_N_LOADS_D3
|
3869
|
+
#define CC_N_LOADS_D3 7
|
3870
|
+
#elif CC_N_LOADS_D3 == 7
|
3871
|
+
#error Sorry, number of load factors is limited to 511.
|
3872
|
+
#endif
|
3873
|
+
#endif
|
3874
|
+
#endif
|
3875
|
+
|
3876
|
+
#undef CC_LOAD
|
3877
|
+
#endif
|
3878
|
+
|
3879
|
+
#endif
|