buddy_alloc.c 1.2025.9 → 1.2025.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/buddy_alloc.h CHANGED
@@ -1,2226 +1,2232 @@
1
- /*
2
- * Copyright 2021 Stanislav Paskalev <spaskalev@protonmail.com>
3
- *
4
- * A binary buddy memory allocator
5
- *
6
- * To include and use it in your project do the following
7
- * 1. Add buddy_alloc.h (this file) to your include directory
8
- * 2. Include the header in places where you need to use the allocator
9
- * 3. In one of your source files #define BUDDY_ALLOC_IMPLEMENTATION
10
- * and then import the header. This will insert the implementation.
11
- *
12
- * Latest version is available at https://github.com/spaskalev/buddy_alloc
13
- */
14
-
15
- #ifndef BUDDY_ALLOC_H
16
- #define BUDDY_ALLOC_H
17
-
18
- #ifndef BUDDY_HEADER
19
- #include <limits.h>
20
- #include <stdbool.h>
21
- #include <stddef.h>
22
- #include <stdint.h>
23
- #include <string.h>
24
- #include <sys/types.h>
25
- #ifndef BUDDY_PRINTF
26
- #include <stdio.h>
27
- #endif
28
- #endif
29
-
30
- #ifdef __cplusplus
31
- #ifndef BUDDY_CPP_MANGLED
32
- extern "C" {
33
- #endif
34
- #endif
35
-
36
- struct buddy;
37
-
38
- /* Returns the size of a buddy required to manage a block of the specified size */
39
- size_t buddy_sizeof(size_t memory_size);
40
-
41
- /*
42
- * Returns the size of a buddy required to manage a block of the specified size
43
- * using a non-default alignment.
44
- */
45
- size_t buddy_sizeof_alignment(size_t memory_size, size_t alignment);
46
-
47
- /* Initializes a binary buddy memory allocator at the specified location */
48
- struct buddy *buddy_init(unsigned char *at, unsigned char *main, size_t memory_size);
49
-
50
- /* Initializes a binary buddy memory allocator at the specified location using a non-default alignment */
51
- struct buddy *buddy_init_alignment(unsigned char *at, unsigned char *main, size_t memory_size, size_t alignment);
52
-
53
- /*
54
- * Initializes a binary buddy memory allocator embedded in the specified arena.
55
- * The arena's capacity is reduced to account for the allocator metadata.
56
- */
57
- struct buddy *buddy_embed(unsigned char *main, size_t memory_size);
58
-
59
- /*
60
- * Returns the address of a previously-created buddy allocator at the arena.
61
- * Use to get a new handle to the allocator when the arena is moved or copied.
62
- */
63
- struct buddy *buddy_get_embed_at(unsigned char *main, size_t memory_size);
64
-
65
- /*
66
- * Initializes a binary buddy memory allocator embedded in the specified arena
67
- * using a non-default alignment.
68
- * The arena's capacity is reduced to account for the allocator metadata.
69
- */
70
- struct buddy *buddy_embed_alignment(unsigned char *main, size_t memory_size, size_t alignment);
71
-
72
- /*
73
- * Returns the address of a previously-created buddy allocator at the arena.
74
- * Use to get a new handle to the allocator when the arena is moved or copied.
75
- */
76
- struct buddy *buddy_get_embed_at_alignment(unsigned char *main, size_t memory_size, size_t alignment);
77
-
78
- /*
79
- * Resizes the arena and allocator metadata to a new size.
80
- *
81
- * Existing allocations are preserved. If an allocation is to fall outside
82
- * of the arena after a downsizing the resize operation fails.
83
- *
84
- * Returns a pointer to allocator on successful resize. This will be
85
- * the same pointer when the allocator is external to the arena. If the
86
- * allocator is embedded in the arena the old pointer to the allocator
87
- * must not be used after resizing!
88
- *
89
- * Returns NULL on failure. The allocations and allocator pointer
90
- * are preserved.
91
- */
92
- struct buddy *buddy_resize(struct buddy *buddy, size_t new_memory_size);
93
-
94
- /* Tests if the arena can be shrunk in half */
95
- bool buddy_can_shrink(struct buddy *buddy);
96
-
97
- /* Tests if the arena is completely empty */
98
- bool buddy_is_empty(struct buddy *buddy);
99
-
100
- /* Tests if the arena is completely full */
101
- bool buddy_is_full(struct buddy *buddy);
102
-
103
- /* Reports the arena size */
104
- size_t buddy_arena_size(struct buddy *buddy);
105
-
106
- /* Reports the arena's free size. Note that this is (often) not a continuous size
107
- but the sum of all free slots in the buddy. */
108
- size_t buddy_arena_free_size(struct buddy *buddy);
109
-
110
- /*
111
- * Allocation functions
112
- */
113
-
114
- /* Use the specified buddy to allocate memory. See malloc. */
115
- void *buddy_malloc(struct buddy *buddy, size_t requested_size);
116
-
117
- /* Use the specified buddy to allocate zeroed memory. See calloc. */
118
- void *buddy_calloc(struct buddy *buddy, size_t members_count, size_t member_size);
119
-
120
- /* Realloc semantics are a joke. See realloc. */
121
- void *buddy_realloc(struct buddy *buddy, void *ptr, size_t requested_size, bool ignore_data);
122
-
123
- /* Realloc-like behavior that checks for overflow. See reallocarray */
124
- void *buddy_reallocarray(struct buddy *buddy, void *ptr,
125
- size_t members_count, size_t member_size, bool ignore_data);
126
-
127
- /* Use the specified buddy to free memory. See free. */
128
- void buddy_free(struct buddy *buddy, void *ptr);
129
-
130
- enum buddy_safe_free_status {
131
- BUDDY_SAFE_FREE_SUCCESS,
132
- BUDDY_SAFE_FREE_BUDDY_IS_NULL,
133
- BUDDY_SAFE_FREE_INVALID_ADDRESS,
134
- BUDDY_SAFE_FREE_SIZE_MISMATCH,
135
- BUDDY_SAFE_FREE_ALREADY_FREE,
136
- };
137
-
138
- /* A (safer) free with a size. Will not free unless the size fits the target span. */
139
- enum buddy_safe_free_status buddy_safe_free(struct buddy *buddy, void *ptr, size_t requested_size);
140
-
141
- /* Reports the allocation size. This could be bigger than the requested size,
142
- it's the exact size that this allocation occupies in the arena.
143
- Returns 0 on failure, and a non-zero value on success. */
144
- size_t buddy_alloc_size(struct buddy *buddy, void *ptr);
145
-
146
- /*
147
- * Reservation functions
148
- */
149
-
150
- /* Reserve a range by marking it as allocated. Useful for dealing with physical memory. */
151
- void buddy_reserve_range(struct buddy *buddy, void *ptr, size_t requested_size);
152
-
153
- /* Release a reserved memory range. Unsafe, this can mess up other allocations if called with wrong parameters! */
154
- void buddy_unsafe_release_range(struct buddy *buddy, void *ptr, size_t requested_size);
155
-
156
- /*
157
- * Iteration functions
158
- */
159
-
160
- /*
161
- * Iterate through the free and allocated slots and call the provided function for each of them.
162
- *
163
- * If the provided function returns a non-NULL result the iteration stops and the result
164
- * is returned to called. NULL is returned upon completing iteration without stopping.
165
- *
166
- * The iteration order is implementation-defined and may change between versions.
167
- */
168
- void *buddy_walk(struct buddy *buddy, void *(fp)(void *ctx, void *addr, size_t slot_size, size_t allocated), void *ctx);
169
-
170
- /*
171
- * Miscellaneous functions
172
- */
173
-
174
- /*
175
- * Calculates the fragmentation in the allocator in a 0 - 255 range.
176
- * NOTE: if you are using a non-power-of-two sized arena the maximum upper bound can be lower.
177
- */
178
- unsigned char buddy_fragmentation(struct buddy *buddy);
179
-
180
- #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
181
- /*
182
- * Enable change tracking for this allocator instance.
183
- *
184
- * This will store a header at the start of the arena that contains the function pointer (tracker) and
185
- * a void* (context). The tracker will be called with the context, the start of changed memory and its length.
186
- *
187
- * This function MUST be called before any allocations are performed!
188
- *
189
- * Change tracking is in effect only for allocation functions, resizing functions are excluded from it.
190
- *
191
- * This is an experimental feature designed to facilitate integration with https://github.com/spaskalev/libpvl
192
- *
193
- * The API is not (yet) part of the allocator contract and its semantic versioning!
194
- */
195
- void buddy_enable_change_tracking(struct buddy* buddy, void* context, void (*tracker) (void*, unsigned char*, size_t));
196
- #endif
197
-
198
- #ifdef __cplusplus
199
- #ifndef BUDDY_CPP_MANGLED
200
- }
201
- #endif
202
- #endif
203
-
204
- #endif /* BUDDY_ALLOC_H */
205
-
206
- #ifdef BUDDY_ALLOC_IMPLEMENTATION
207
- #undef BUDDY_ALLOC_IMPLEMENTATION
208
-
209
- #ifdef __cplusplus
210
- #ifndef BUDDY_CPP_MANGLED
211
- extern "C" {
212
- #endif
213
- #endif
214
-
215
- #ifndef BUDDY_ALLOC_ALIGN
216
- #define BUDDY_ALLOC_ALIGN (sizeof(size_t) * CHAR_BIT)
217
- #endif
218
-
219
- #ifdef __cplusplus
220
- #ifndef BUDDY_ALIGNOF
221
- #define BUDDY_ALIGNOF(x) alignof(x)
222
- #endif
223
-
224
- #else /* not __cplusplus */
225
-
226
- #ifndef BUDDY_ALIGNOF
227
- #ifndef _MSC_VER
228
- #define BUDDY_ALIGNOF(x) __alignof__(x)
229
- #else
230
- #define BUDDY_ALIGNOF(x) _Alignof(x)
231
- #endif
232
- #endif
233
-
234
- #endif /* __cplusplus */
235
-
236
- /* ssize_t is a POSIX extension */
237
- #if defined(_MSC_VER) && !defined(_SSIZE_T_DEFINED)
238
- #if _WIN64
239
- typedef signed long long ssize_t;
240
- #else
241
- typedef signed long ssize_t;
242
- #endif
243
- #define _SSIZE_T_DEFINED
244
- #endif
245
-
246
- /* Support compiling with Pelles C */
247
- #if defined(__POCC__) && defined(__POCC_TARGET__)
248
- #if __POCC_TARGET__ == 3
249
- typedef signed long long ssize_t;
250
- #elif __POCC_TARGET__ == 1
251
- typedef signed long ssize_t;
252
- #else
253
- #error Uknown POCC target
254
- #endif
255
- #endif
256
-
257
- #ifndef BUDDY_PRINTF
258
- #define BUDDY_PRINTF printf
259
- #endif
260
-
261
- /*
262
- * Debug functions
263
- */
264
-
265
- /* Implementation defined */
266
- void buddy_debug(struct buddy *buddy);
267
-
268
- struct buddy_change_tracker {
269
- void* context;
270
- void (*tracker) (void*, unsigned char*, size_t);
271
- };
272
-
273
- struct buddy_tree;
274
-
275
- struct buddy_tree_pos {
276
- size_t index;
277
- size_t depth;
278
- };
279
-
280
- #ifdef __cplusplus
281
- #define INVALID_POS buddy_tree_pos{ 0, 0 }
282
- #else
283
- #define INVALID_POS ((struct buddy_tree_pos){ 0, 0 })
284
- #endif
285
-
286
- struct buddy_tree_interval {
287
- struct buddy_tree_pos from;
288
- struct buddy_tree_pos to;
289
- };
290
-
291
- struct buddy_tree_walk_state {
292
- struct buddy_tree_pos starting_pos;
293
- struct buddy_tree_pos current_pos;
294
- unsigned int going_up;
295
- unsigned int walk_done;
296
- };
297
-
298
- /*
299
- * Initialization functions
300
- */
301
-
302
- /* Returns the size of a buddy allocation tree of the desired order*/
303
- static size_t buddy_tree_sizeof(uint8_t order);
304
-
305
- /* Initializes a buddy allocation tree at the specified location */
306
- static struct buddy_tree *buddy_tree_init(unsigned char *at, uint8_t order);
307
-
308
- /* Indicates whether this is a valid position for the tree */
309
- static bool buddy_tree_valid(struct buddy_tree *t, struct buddy_tree_pos pos);
310
-
311
- /* Returns the order of the specified buddy allocation tree */
312
- static uint8_t buddy_tree_order(struct buddy_tree *t);
313
-
314
- /*
315
- * Resize the tree to the new order. When downsizing the left subtree is picked.
316
- * Caller must ensure enough space for the new order.
317
- */
318
- static void buddy_tree_resize(struct buddy_tree *t, uint8_t desired_order);
319
-
320
- #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
321
- /* Enable change tracking state for this tree. */
322
- static void buddy_tree_enable_change_tracking(struct buddy_tree *t);
323
- #endif /* BUDDY_EXPERIMENTAL_CHANGE_TRACKING */
324
-
325
- /*
326
- * Navigation functions
327
- */
328
-
329
- /* Returns a position at the root of a buddy allocation tree */
330
- static struct buddy_tree_pos buddy_tree_root(void);
331
-
332
- /* Returns the leftmost child node */
333
- static struct buddy_tree_pos buddy_tree_leftmost_child(struct buddy_tree *t);
334
-
335
- /* Returns the tree depth of the indicated position */
336
- static inline size_t buddy_tree_depth(struct buddy_tree_pos pos);
337
-
338
- /* Returns the left child node position. Does not check if that is a valid position */
339
- static inline struct buddy_tree_pos buddy_tree_left_child(struct buddy_tree_pos pos);
340
-
341
- /* Returns the right child node position. Does not check if that is a valid position */
342
- static inline struct buddy_tree_pos buddy_tree_right_child(struct buddy_tree_pos pos);
343
-
344
- /* Returns the current sibling node position. Does not check if that is a valid position */
345
- static inline struct buddy_tree_pos buddy_tree_sibling(struct buddy_tree_pos pos);
346
-
347
- /* Returns the parent node position or an invalid position if there is no parent node */
348
- static inline struct buddy_tree_pos buddy_tree_parent(struct buddy_tree_pos pos);
349
-
350
- /* Returns the right adjacent node position or an invalid position if there is no right adjacent node */
351
- static struct buddy_tree_pos buddy_tree_right_adjacent(struct buddy_tree_pos pos);
352
-
353
- /* Returns the at-depth index of the indicated position */
354
- static size_t buddy_tree_index(struct buddy_tree_pos pos);
355
-
356
- /* Return the interval of the deepest positions spanning the indicated position */
357
- static struct buddy_tree_interval to_buddy_tree_interval(struct buddy_tree *t, struct buddy_tree_pos pos);
358
-
359
- /* Checks if one interval contains another */
360
- static bool buddy_tree_interval_contains(struct buddy_tree_interval outer,
361
- struct buddy_tree_interval inner);
362
-
363
- /* Return a walk state structure starting from the root of a tree */
364
- static struct buddy_tree_walk_state buddy_tree_walk_state_root(void);
365
-
366
- /* Walk the tree, keeping track in the provided state structure */
367
- static unsigned int buddy_tree_walk(struct buddy_tree *t, struct buddy_tree_walk_state *state);
368
-
369
-
370
- /*
371
- * Allocation functions
372
- */
373
-
374
- /* Returns the free capacity at or underneath the indicated position */
375
- static size_t buddy_tree_status(struct buddy_tree *t, struct buddy_tree_pos pos);
376
-
377
- /* Marks the indicated position as allocated and propagates the change */
378
- static void buddy_tree_mark(struct buddy_tree *t, struct buddy_tree_pos pos);
379
-
380
- enum buddy_tree_release_status {
381
- BUDDY_TREE_RELEASE_SUCCESS,
382
- BUDDY_TREE_RELEASE_FAIL_PARTIALLY_USED,
383
- };
384
-
385
- /* Marks the indicated position as free and propagates the change */
386
- static enum buddy_tree_release_status buddy_tree_release(struct buddy_tree *t, struct buddy_tree_pos pos);
387
-
388
- /* Returns a free position at the specified depth or an invalid position */
389
- static struct buddy_tree_pos buddy_tree_find_free(struct buddy_tree *t, uint8_t depth);
390
-
391
- /* Tests if the indicated position is available for allocation */
392
- static bool buddy_tree_is_free(struct buddy_tree *t, struct buddy_tree_pos pos);
393
-
394
- /* Tests if the tree can be shrank in half */
395
- static bool buddy_tree_can_shrink(struct buddy_tree *t);
396
-
397
- /*
398
- * Integration functions
399
- */
400
-
401
- #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
402
- /* Get a pointer to the parent buddy struct */
403
- static struct buddy* buddy_tree_buddy(struct buddy_tree* t);
404
- #endif /* BUDDY_EXPERIMENTAL_CHANGE_TRACKING */
405
-
406
- /*
407
- * Debug functions
408
- */
409
-
410
- /* Implementation defined */
411
- static void buddy_tree_debug(struct buddy_tree *t, struct buddy_tree_pos pos, size_t start_size);
412
-
413
- /* Implementation defined */
414
- unsigned int buddy_tree_check_invariant(struct buddy_tree *t, struct buddy_tree_pos pos);
415
-
416
- /* Report fragmentation in a 0 - 255 range */
417
- static unsigned char buddy_tree_fragmentation(struct buddy_tree *t);
418
-
419
- /*
420
- * A char-backed bitset implementation
421
- */
422
-
423
- static size_t bitset_sizeof(size_t elements);
424
-
425
- struct bitset_range {
426
- size_t from_bucket;
427
- size_t to_bucket;
428
-
429
- uint8_t from_index;
430
- uint8_t to_index;
431
- };
432
-
433
- static inline struct bitset_range to_bitset_range(size_t from_pos, size_t to_pos);
434
-
435
- static void bitset_set_range(unsigned char *bitset, struct bitset_range range);
436
-
437
- static void bitset_clear_range(unsigned char *bitset, struct bitset_range range);
438
-
439
- static size_t bitset_count_range(unsigned char *bitset, struct bitset_range range);
440
-
441
- static inline void bitset_set(unsigned char *bitset, size_t pos);
442
-
443
- static inline void bitset_clear(unsigned char *bitset, size_t pos);
444
-
445
- static inline bool bitset_test(const unsigned char *bitset, size_t pos);
446
-
447
- static void bitset_shift_left(unsigned char *bitset, size_t from_pos, size_t to_pos, size_t by);
448
-
449
- static void bitset_shift_right(unsigned char *bitset, size_t from_pos, size_t to_pos, size_t by);
450
-
451
- /*
452
- * Debug functions
453
- */
454
-
455
- /* Implementation defined */
456
- void bitset_debug(unsigned char *bitset, size_t length);
457
-
458
- /*
459
- * Bits
460
- */
461
-
462
- /* Returns the number of set bits in the given byte */
463
- static unsigned int popcount_byte(unsigned char b);
464
-
465
- /* Count the number of trailing zeroes in the given value */
466
- static unsigned char count_trailing_zeroes(size_t val);
467
-
468
- /* Returns the index of the highest bit set (1-based) */
469
- static size_t highest_bit_position(size_t value);
470
-
471
- /* Returns the nearest larger or equal power of two */
472
- static inline size_t ceiling_power_of_two(size_t value);
473
-
474
- /* Return two to the power of order */
475
- static inline size_t two_to_the_power_of(size_t order);
476
-
477
- /*
478
- * Math
479
- */
480
-
481
- /* Calculates the integer square root of an integer */
482
- static inline size_t integer_square_root(size_t f);
483
-
484
- /*
485
- Implementation
486
- */
487
-
488
- const unsigned int BUDDY_RELATIVE_MODE = 1;
489
-
490
- /*
491
- * A binary buddy memory allocator
492
- */
493
-
494
- struct buddy {
495
- size_t memory_size;
496
- size_t alignment;
497
- union {
498
- unsigned char *main;
499
- ptrdiff_t main_offset;
500
- } arena;
501
- size_t buddy_flags;
502
- };
503
-
504
- struct buddy_embed_check {
505
- unsigned int can_fit;
506
- size_t offset;
507
- size_t buddy_size;
508
- };
509
-
510
- static unsigned int is_valid_alignment(size_t alignment);
511
- static size_t buddy_tree_order_for_memory(size_t memory_size, size_t alignment);
512
- static size_t depth_for_size(struct buddy *buddy, size_t requested_size);
513
- static inline size_t size_for_depth(struct buddy *buddy, size_t depth);
514
- static unsigned char *address_for_position(struct buddy *buddy, struct buddy_tree_pos pos);
515
- static struct buddy_tree_pos position_for_address(struct buddy *buddy, const unsigned char *addr);
516
- static unsigned char *buddy_main(struct buddy *buddy);
517
- static unsigned int buddy_relative_mode(struct buddy *buddy);
518
- static struct buddy_tree *buddy_tree_for(struct buddy *buddy);
519
- static size_t buddy_effective_memory_size(struct buddy *buddy);
520
- static size_t buddy_virtual_slots(struct buddy *buddy);
521
- static void buddy_toggle_virtual_slots(struct buddy *buddy, unsigned int state);
522
- static void buddy_toggle_range_reservation(struct buddy *buddy, void *ptr, size_t requested_size, unsigned int state);
523
- static struct buddy *buddy_resize_standard(struct buddy *buddy, size_t new_memory_size);
524
- static struct buddy *buddy_resize_embedded(struct buddy *buddy, size_t new_memory_size);
525
- static bool buddy_is_free(struct buddy *buddy, size_t from);
526
- static struct buddy_embed_check buddy_embed_offset(size_t memory_size, size_t alignment);
527
- static struct buddy_tree_pos deepest_position_for_offset(struct buddy *buddy, size_t offset);
528
-
529
- size_t buddy_sizeof(size_t memory_size) {
530
- return buddy_sizeof_alignment(memory_size, BUDDY_ALLOC_ALIGN);
531
- }
532
-
533
- size_t buddy_sizeof_alignment(size_t memory_size, size_t alignment) {
534
- size_t buddy_tree_order;
535
-
536
- if (!is_valid_alignment(alignment)) {
537
- return 0; /* invalid */
538
- }
539
- if (memory_size < alignment) {
540
- return 0; /* invalid */
541
- }
542
- buddy_tree_order = buddy_tree_order_for_memory(memory_size, alignment);
543
- return sizeof(struct buddy) + buddy_tree_sizeof((uint8_t)buddy_tree_order);
544
- }
545
-
546
- struct buddy *buddy_init(unsigned char *at, unsigned char *main, size_t memory_size) {
547
- return buddy_init_alignment(at, main, memory_size, BUDDY_ALLOC_ALIGN);
548
- }
549
-
550
- struct buddy *buddy_init_alignment(unsigned char *at, unsigned char *main, size_t memory_size,
551
- size_t alignment) {
552
- size_t at_alignment, main_alignment, buddy_size, buddy_tree_order;
553
- struct buddy *buddy;
554
-
555
- if (at == NULL) {
556
- return NULL;
557
- }
558
- if (main == NULL) {
559
- return NULL;
560
- }
561
- if (at == main) {
562
- return NULL;
563
- }
564
- if (!is_valid_alignment(alignment)) {
565
- return NULL; /* invalid */
566
- }
567
- at_alignment = ((uintptr_t) at) % BUDDY_ALIGNOF(struct buddy);
568
- if (at_alignment != 0) {
569
- return NULL;
570
- }
571
- main_alignment = ((uintptr_t) main) % BUDDY_ALIGNOF(size_t);
572
- if (main_alignment != 0) {
573
- return NULL;
574
- }
575
- /* Trim down memory to alignment */
576
- if (memory_size % alignment) {
577
- memory_size -= (memory_size % alignment);
578
- }
579
- buddy_size = buddy_sizeof_alignment(memory_size, alignment);
580
- if (buddy_size == 0) {
581
- return NULL;
582
- }
583
- buddy_tree_order = buddy_tree_order_for_memory(memory_size, alignment);
584
-
585
- /* TODO check for overlap between buddy metadata and main block */
586
- buddy = (struct buddy *) at;
587
- buddy->arena.main = main;
588
- buddy->memory_size = memory_size;
589
- buddy->buddy_flags = 0;
590
- buddy->alignment = alignment;
591
- buddy_tree_init((unsigned char *)buddy + sizeof(*buddy), (uint8_t) buddy_tree_order);
592
- buddy_toggle_virtual_slots(buddy, 1);
593
- return buddy;
594
- }
595
-
596
- struct buddy *buddy_embed(unsigned char *main, size_t memory_size) {
597
- return buddy_embed_alignment(main, memory_size, BUDDY_ALLOC_ALIGN);
598
- }
599
-
600
- struct buddy *buddy_get_embed_at(unsigned char *main, size_t memory_size) {
601
- return buddy_get_embed_at_alignment(main, memory_size, BUDDY_ALLOC_ALIGN);
602
- }
603
-
604
- struct buddy *buddy_embed_alignment(unsigned char *main, size_t memory_size, size_t alignment) {
605
- struct buddy_embed_check check_result;
606
- struct buddy *buddy;
607
-
608
- if (! main) {
609
- return NULL;
610
- }
611
- if (!is_valid_alignment(alignment)) {
612
- return NULL; /* invalid */
613
- }
614
- check_result = buddy_embed_offset(memory_size, alignment);
615
- if (! check_result.can_fit) {
616
- return NULL;
617
- }
618
-
619
- buddy = buddy_init_alignment(main+check_result.offset, main, check_result.offset, alignment);
620
- if (! buddy) { /* regular initialization failed */
621
- return NULL;
622
- }
623
-
624
- buddy->buddy_flags |= BUDDY_RELATIVE_MODE;
625
- buddy->arena.main_offset = (unsigned char *)buddy - main;
626
- return buddy;
627
- }
628
-
629
- struct buddy *buddy_get_embed_at_alignment(unsigned char *main, size_t memory_size, size_t alignment) {
630
- struct buddy_embed_check check_result = buddy_embed_offset(memory_size, alignment);
631
- if (!check_result.can_fit) {
632
- return NULL;
633
- }
634
- return (struct buddy *)(main + check_result.offset);
635
- }
636
-
637
- struct buddy *buddy_resize(struct buddy *buddy, size_t new_memory_size) {
638
- if (new_memory_size == buddy->memory_size) {
639
- return buddy;
640
- }
641
-
642
- if (buddy_relative_mode(buddy)) {
643
- return buddy_resize_embedded(buddy, new_memory_size);
644
- } else {
645
- return buddy_resize_standard(buddy, new_memory_size);
646
- }
647
- }
648
-
649
- static struct buddy *buddy_resize_standard(struct buddy *buddy, size_t new_memory_size) {
650
- size_t new_buddy_tree_order;
651
-
652
- /* Trim down memory to alignment */
653
- if (new_memory_size % buddy->alignment) {
654
- new_memory_size -= (new_memory_size % buddy->alignment);
655
- }
656
-
657
- /* Account for tree use */
658
- if (!buddy_is_free(buddy, new_memory_size)) {
659
- return NULL;
660
- }
661
-
662
- /* Release the virtual slots */
663
- buddy_toggle_virtual_slots(buddy, 0);
664
-
665
- /* Calculate new tree order and resize it */
666
- new_buddy_tree_order = buddy_tree_order_for_memory(new_memory_size, buddy->alignment);
667
- buddy_tree_resize(buddy_tree_for(buddy), (uint8_t) new_buddy_tree_order);
668
-
669
- /* Store the new memory size and reconstruct any virtual slots */
670
- buddy->memory_size = new_memory_size;
671
- buddy_toggle_virtual_slots(buddy, 1);
672
-
673
- /* Resize successful */
674
- return buddy;
675
- }
676
-
677
- static struct buddy *buddy_resize_embedded(struct buddy *buddy, size_t new_memory_size) {
678
- struct buddy_embed_check check_result;
679
- unsigned char *main, *buddy_destination;
680
- struct buddy *resized, *relocated;
681
-
682
- /* Ensure that the embedded allocator can fit */
683
- check_result = buddy_embed_offset(new_memory_size, buddy->alignment);
684
- if (! check_result.can_fit) {
685
- return NULL;
686
- }
687
-
688
- /* Resize the allocator in the normal way */
689
- resized = buddy_resize_standard(buddy, check_result.offset);
690
- if (! resized) {
691
- return NULL;
692
- }
693
-
694
- /* Get the absolute main address. The relative will be invalid after relocation. */
695
- main = buddy_main(buddy);
696
-
697
- /* Relocate the allocator */
698
- buddy_destination = buddy_main(buddy) + check_result.offset;
699
- memmove(buddy_destination, resized, check_result.buddy_size);
700
-
701
- /* Update the main offset in the allocator */
702
- relocated = (struct buddy *) buddy_destination;
703
- relocated->arena.main_offset = buddy_destination - main;
704
-
705
- return relocated;
706
- }
707
-
708
- bool buddy_can_shrink(struct buddy *buddy) {
709
- if (buddy == NULL) {
710
- return false;
711
- }
712
- return buddy_is_free(buddy, buddy->memory_size / 2);
713
- }
714
-
715
- bool buddy_is_empty(struct buddy *buddy) {
716
- if (buddy == NULL) {
717
- return false;
718
- }
719
- return buddy_is_free(buddy, 0);
720
- }
721
-
722
- bool buddy_is_full(struct buddy *buddy) {
723
- struct buddy_tree *tree;
724
- struct buddy_tree_pos pos;
725
-
726
- if (buddy == NULL) {
727
- return false;
728
- }
729
- tree = buddy_tree_for(buddy);
730
- pos = buddy_tree_root();
731
- return buddy_tree_status(tree, pos) == buddy_tree_order(tree);
732
- }
733
-
734
- size_t buddy_arena_size(struct buddy *buddy) {
735
- if (buddy == NULL) {
736
- return 0;
737
- }
738
- return buddy->memory_size;
739
- }
740
-
741
- size_t buddy_arena_free_size(struct buddy *buddy) {
742
- size_t result = 0;
743
- struct buddy_tree *tree = buddy_tree_for(buddy);
744
- size_t tree_order = buddy_tree_order(tree);
745
-
746
- struct buddy_tree_walk_state state = buddy_tree_walk_state_root();
747
- do {
748
- size_t pos_status = buddy_tree_status(tree, state.current_pos);
749
- if (pos_status == (tree_order - state.current_pos.depth + 1)) { /* Fully-allocated */
750
- state.going_up = 1;
751
- } else if (pos_status == 0) { /* Free */
752
- state.going_up = 1;
753
- result += size_for_depth(buddy, state.current_pos.depth);
754
- } else { /* Partial */
755
- continue;
756
- }
757
- } while (buddy_tree_walk(tree, &state));
758
- return result;
759
- }
760
-
761
- static unsigned int is_valid_alignment(size_t alignment) {
762
- return ceiling_power_of_two(alignment) == alignment;
763
- }
764
-
765
- static size_t buddy_tree_order_for_memory(size_t memory_size, size_t alignment) {
766
- // cppcheck-suppress zerodiv
767
- size_t blocks = memory_size / alignment;
768
- return highest_bit_position(ceiling_power_of_two(blocks));
769
- }
770
-
771
- void *buddy_malloc(struct buddy *buddy, size_t requested_size) {
772
- size_t target_depth;
773
- struct buddy_tree *tree;
774
- struct buddy_tree_pos pos;
775
-
776
- if (buddy == NULL) {
777
- return NULL;
778
- }
779
- if (requested_size == 0) {
780
- /*
781
- * Batshit crazy code exists that calls malloc(0) and expects
782
- * a result that can be safely passed to free().
783
- * And even though this allocator will safely handle a free(NULL)
784
- * the particular batshit code will expect a non-NULL malloc(0) result!
785
- *
786
- * See also https://wiki.sei.cmu.edu/confluence/display/c/MEM04-C.+Beware+of+zero-length+allocations
787
- */
788
- requested_size = 1;
789
- }
790
- if (requested_size > buddy->memory_size) {
791
- return NULL;
792
- }
793
-
794
- target_depth = depth_for_size(buddy, requested_size);
795
- tree = buddy_tree_for(buddy);
796
- pos = buddy_tree_find_free(tree, (uint8_t) target_depth);
797
-
798
- if (! buddy_tree_valid(tree, pos)) {
799
- return NULL; /* no slot found */
800
- }
801
-
802
- /* Allocate the slot */
803
- buddy_tree_mark(tree, pos);
804
-
805
- /* Find and return the actual memory address */
806
- return address_for_position(buddy, pos);
807
- }
808
-
809
- void *buddy_calloc(struct buddy *buddy, size_t members_count, size_t member_size) {
810
- size_t total_size;
811
- void *result;
812
-
813
- if (members_count == 0 || member_size == 0) {
814
- /* See the gleeful remark in malloc */
815
- members_count = 1;
816
- member_size = 1;
817
- }
818
- /* Check for overflow */
819
- if (((members_count * member_size)/members_count) != member_size) {
820
- return NULL;
821
- }
822
- total_size = members_count * member_size;
823
- result = buddy_malloc(buddy, total_size);
824
- if (result) {
825
- memset(result, 0, total_size);
826
- }
827
- return result;
828
- }
829
-
830
- void *buddy_realloc(struct buddy *buddy, void *ptr, size_t requested_size, bool ignore_data) {
831
- struct buddy_tree *tree;
832
- struct buddy_tree_pos origin, new_pos;
833
- size_t current_depth, target_depth;
834
- void *source, *destination;
835
-
836
- /*
837
- * realloc is a joke:
838
- * - NULL ptr degrades into malloc
839
- * - Zero size degrades into free
840
- * - Same size as previous malloc/calloc/realloc is a no-op or a rellocation
841
- * - Smaller size than previous *alloc decrease the allocated size with an optional rellocation
842
- * - If the new allocation cannot be satisfied NULL is returned BUT the slot is preserved
843
- * - Larger size than previous *alloc increase tha allocated size with an optional rellocation
844
- */
845
- if (ptr == NULL) {
846
- return buddy_malloc(buddy, requested_size);
847
- }
848
- if (requested_size == 0) {
849
- buddy_free(buddy, ptr);
850
- return NULL;
851
- }
852
- if (requested_size > buddy->memory_size) {
853
- return NULL;
854
- }
855
-
856
- /* Find the position tracking this address */
857
- tree = buddy_tree_for(buddy);
858
- origin = position_for_address(buddy, (unsigned char *) ptr);
859
- if (! buddy_tree_valid(tree, origin)) {
860
- return NULL;
861
- }
862
- current_depth = buddy_tree_depth(origin);
863
- target_depth = depth_for_size(buddy, requested_size);
864
-
865
- /* Release the position and perform a search */
866
- buddy_tree_release(tree, origin);
867
- new_pos = buddy_tree_find_free(tree, (uint8_t) target_depth);
868
-
869
- if (! buddy_tree_valid(tree, new_pos)) {
870
- /* allocation failure, restore mark and return null */
871
- buddy_tree_mark(tree, origin);
872
- return NULL;
873
- }
874
-
875
- if (origin.index == new_pos.index) {
876
- /* Allocated to the same slot, restore mark and return null */
877
- buddy_tree_mark(tree, origin);
878
- return ptr;
879
- }
880
-
881
- destination = address_for_position(buddy, new_pos);
882
-
883
- if (! ignore_data) {
884
- /* Copy the content */
885
- source = address_for_position(buddy, origin);
886
- memmove(destination, source, size_for_depth(buddy,
887
- current_depth > target_depth ? current_depth : target_depth));
888
- }
889
-
890
- /* Allocate and return */
891
- buddy_tree_mark(tree, new_pos);
892
- return destination;
893
- }
894
-
895
- void *buddy_reallocarray(struct buddy *buddy, void *ptr,
896
- size_t members_count, size_t member_size, bool ignore_data) {
897
- if (members_count == 0 || member_size == 0) {
898
- return buddy_realloc(buddy, ptr, 0, ignore_data);
899
- }
900
- /* Check for overflow */
901
- if ((members_count * member_size)/members_count != member_size) {
902
- return NULL;
903
- }
904
- return buddy_realloc(buddy, ptr, members_count * member_size, ignore_data);
905
- }
906
-
907
- void buddy_free(struct buddy *buddy, void *ptr) {
908
- unsigned char *dst, *main;
909
- struct buddy_tree *tree;
910
- struct buddy_tree_pos pos;
911
-
912
- if (buddy == NULL) {
913
- return;
914
- }
915
- if (ptr == NULL) {
916
- return;
917
- }
918
- dst = (unsigned char *)ptr;
919
- main = buddy_main(buddy);
920
- if ((dst < main) || (dst >= (main + buddy->memory_size))) {
921
- return;
922
- }
923
-
924
- /* Find the position tracking this address */
925
- tree = buddy_tree_for(buddy);
926
- pos = position_for_address(buddy, dst);
927
-
928
- if (! buddy_tree_valid(tree, pos)) {
929
- return;
930
- }
931
-
932
- /* Release the position */
933
- buddy_tree_release(tree, pos);
934
- }
935
-
936
- enum buddy_safe_free_status buddy_safe_free(struct buddy* buddy, void* ptr, size_t requested_size) {
937
- unsigned char* dst, * main;
938
- struct buddy_tree* tree;
939
- struct buddy_tree_pos pos;
940
- size_t allocated_size_for_depth;
941
- enum buddy_tree_release_status status;
942
-
943
- if (buddy == NULL) {
944
- return BUDDY_SAFE_FREE_BUDDY_IS_NULL;
945
- }
946
- if (ptr == NULL) {
947
- return BUDDY_SAFE_FREE_INVALID_ADDRESS;
948
- }
949
- dst = (unsigned char*)ptr;
950
- main = buddy_main(buddy);
951
- if ((dst < main) || (dst >= (main + buddy->memory_size))) {
952
- return BUDDY_SAFE_FREE_INVALID_ADDRESS;
953
- }
954
-
955
- /* Find an allocated position tracking this address */
956
- tree = buddy_tree_for(buddy);
957
- pos = position_for_address(buddy, dst);
958
-
959
- if (!buddy_tree_valid(tree, pos)) {
960
- return BUDDY_SAFE_FREE_INVALID_ADDRESS;
961
- }
962
-
963
- allocated_size_for_depth = size_for_depth(buddy, pos.depth);
964
- if (requested_size < buddy->alignment) {
965
- requested_size = buddy->alignment;
966
- }
967
- if (requested_size > allocated_size_for_depth) {
968
- return BUDDY_SAFE_FREE_SIZE_MISMATCH;
969
- }
970
- if (requested_size <= (allocated_size_for_depth / 2)) {
971
- return BUDDY_SAFE_FREE_SIZE_MISMATCH;
972
- }
973
-
974
- /* Release the position */
975
- status = buddy_tree_release(tree, pos);
976
-
977
- switch (status) {
978
- case BUDDY_TREE_RELEASE_FAIL_PARTIALLY_USED:
979
- return BUDDY_SAFE_FREE_INVALID_ADDRESS;
980
- case BUDDY_TREE_RELEASE_SUCCESS:
981
- break;
982
- }
983
-
984
- return BUDDY_SAFE_FREE_SUCCESS;
985
- }
986
-
987
- size_t buddy_alloc_size(struct buddy *buddy, void *ptr) {
988
- unsigned char* dst, * main;
989
- struct buddy_tree* tree;
990
- struct buddy_tree_pos pos;
991
-
992
- if (buddy == NULL) {
993
- return 0;
994
- }
995
- if (ptr == NULL) {
996
- return 0;
997
- }
998
- dst = (unsigned char*)ptr;
999
- main = buddy_main(buddy);
1000
- if ((dst < main) || (dst >= (main + buddy->memory_size))) {
1001
- return 0;
1002
- }
1003
-
1004
- /* Find an allocated position tracking this address */
1005
- tree = buddy_tree_for(buddy);
1006
- pos = position_for_address(buddy, dst);
1007
-
1008
- if (!buddy_tree_valid(tree, pos)) {
1009
- return 0;
1010
- }
1011
-
1012
- return size_for_depth(buddy, pos.depth);
1013
- }
1014
-
1015
- void buddy_reserve_range(struct buddy *buddy, void *ptr, size_t requested_size) {
1016
- buddy_toggle_range_reservation(buddy, ptr, requested_size, 1);
1017
- }
1018
-
1019
- void buddy_unsafe_release_range(struct buddy *buddy, void *ptr, size_t requested_size) {
1020
- buddy_toggle_range_reservation(buddy, ptr, requested_size, 0);
1021
- }
1022
-
1023
- void *buddy_walk(struct buddy *buddy,
1024
- void *(fp)(void *ctx, void *addr, size_t slot_size, size_t allocated),
1025
- void *ctx) {
1026
- unsigned char *main;
1027
- size_t effective_memory_size, tree_order, pos_status, pos_size;
1028
- struct buddy_tree *tree;
1029
- unsigned char *addr;
1030
- struct buddy_tree_walk_state state;
1031
- struct buddy_tree_pos test_pos;
1032
- void *callback_result;
1033
-
1034
- if (buddy == NULL) {
1035
- return NULL;
1036
- }
1037
- if (fp == NULL) {
1038
- return NULL;
1039
- }
1040
- main = buddy_main(buddy);
1041
- effective_memory_size = buddy_effective_memory_size(buddy);
1042
- tree = buddy_tree_for(buddy);
1043
- tree_order = buddy_tree_order(tree);
1044
-
1045
- state = buddy_tree_walk_state_root();
1046
- do {
1047
- pos_status = buddy_tree_status(tree, state.current_pos);
1048
- if (pos_status != (tree_order - state.current_pos.depth + 1)) { /* Partially-allocated */
1049
- continue;
1050
- }
1051
-
1052
- /*
1053
- * The tree doesn't make a distinction of a fully-allocated node
1054
- * due to a single allocation and a fully-allocated due to maxed out
1055
- * child allocations - we need to check the children.
1056
- * A child-allocated node will have both children set to their maximum
1057
- * but it is sufficient to check just one for non-zero.
1058
- */
1059
- test_pos = buddy_tree_left_child(state.current_pos);
1060
- if (buddy_tree_valid(tree, test_pos) && buddy_tree_status(tree, test_pos)) {
1061
- continue;
1062
- }
1063
-
1064
- /* Current node is free or allocated, process */
1065
- pos_size = effective_memory_size >> (state.current_pos.depth - 1u);
1066
- addr = address_for_position(buddy, state.current_pos);
1067
- if (((size_t)(addr - main) + pos_size) > buddy->memory_size) {
1068
- /*
1069
- * Do not process virtual slots
1070
- * As virtual slots are on the right side of the tree
1071
- * if we see a one with the current iteration order this
1072
- * means that all subsequent slots will be virtual,
1073
- * hence we can return early.
1074
- */
1075
- return NULL;
1076
- }
1077
- callback_result = (fp)(ctx, addr, pos_size, pos_status > 0);
1078
- if (callback_result != NULL) {
1079
- return callback_result;
1080
- }
1081
- state.going_up = 1;
1082
-
1083
- } while (buddy_tree_walk(tree, &state));
1084
- return NULL;
1085
- }
1086
-
1087
- unsigned char buddy_fragmentation(struct buddy *buddy) {
1088
- if (buddy == NULL) {
1089
- return 0;
1090
- }
1091
- return buddy_tree_fragmentation(buddy_tree_for(buddy));
1092
- }
1093
-
1094
- #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
1095
- void buddy_enable_change_tracking(struct buddy* buddy, void* context, void (*tracker) (void*, unsigned char*, size_t)) {
1096
- struct buddy_tree *t = buddy_tree_for(buddy);
1097
- struct buddy_change_tracker *header = (struct buddy_change_tracker *) buddy_main(buddy);
1098
-
1099
- /* Allocate memory for the change tracking header */
1100
- buddy_reserve_range(buddy, buddy_main(buddy), sizeof(struct buddy_change_tracker));
1101
-
1102
- /* Fill in the change tracking header */
1103
- header->context = context;
1104
- header->tracker = tracker;
1105
-
1106
- /* Indicate that the tree should perform change tracking */
1107
- buddy_tree_enable_change_tracking(t);
1108
- }
1109
- #endif
1110
-
1111
-
1112
- static size_t depth_for_size(struct buddy *buddy, size_t requested_size) {
1113
- size_t depth, effective_memory_size, p2_of_requested_size;
1114
- if (requested_size < buddy->alignment) {
1115
- requested_size = buddy->alignment;
1116
- }
1117
- depth = 1;
1118
- effective_memory_size = buddy_effective_memory_size(buddy);
1119
-
1120
- p2_of_requested_size = ceiling_power_of_two(requested_size);
1121
- depth = count_trailing_zeroes(effective_memory_size) + 1
1122
- - count_trailing_zeroes(p2_of_requested_size);
1123
- return depth;
1124
- }
1125
-
1126
- static inline size_t size_for_depth(struct buddy *buddy, size_t depth) {
1127
- return ceiling_power_of_two(buddy->memory_size) >> (depth-1);
1128
- }
1129
-
1130
- static struct buddy_tree *buddy_tree_for(struct buddy *buddy) {
1131
- return (struct buddy_tree*) ((unsigned char *)buddy + sizeof(*buddy));
1132
- }
1133
-
1134
- static size_t buddy_effective_memory_size(struct buddy *buddy) {
1135
- return ceiling_power_of_two(buddy->memory_size);
1136
- }
1137
-
1138
- static size_t buddy_virtual_slots(struct buddy *buddy) {
1139
- size_t memory_size = buddy->memory_size;
1140
- size_t effective_memory_size = buddy_effective_memory_size(buddy);
1141
- if (effective_memory_size == memory_size) {
1142
- return 0;
1143
- }
1144
- return (effective_memory_size - memory_size) / buddy->alignment;
1145
- }
1146
-
1147
- static unsigned char *address_for_position(struct buddy *buddy, struct buddy_tree_pos pos) {
1148
- size_t block_size = size_for_depth(buddy, buddy_tree_depth(pos));
1149
- size_t addr = block_size * buddy_tree_index(pos);
1150
- return buddy_main(buddy) + addr;
1151
- }
1152
-
1153
- static struct buddy_tree_pos deepest_position_for_offset(struct buddy *buddy, size_t offset) {
1154
- size_t index = offset / buddy->alignment;
1155
- struct buddy_tree_pos pos = buddy_tree_leftmost_child(buddy_tree_for(buddy));
1156
- pos.index += index;
1157
- return pos;
1158
- }
1159
-
1160
- static struct buddy_tree_pos position_for_address(struct buddy *buddy, const unsigned char *addr) {
1161
- unsigned char *main;
1162
- struct buddy_tree *tree;
1163
- struct buddy_tree_pos pos;
1164
- size_t offset;
1165
-
1166
- main = buddy_main(buddy);
1167
- offset = (size_t) (addr - main);
1168
-
1169
- if (offset % buddy->alignment) {
1170
- return INVALID_POS; /* invalid alignment */
1171
- }
1172
-
1173
- tree = buddy_tree_for(buddy);
1174
- pos = deepest_position_for_offset(buddy, offset);
1175
-
1176
- /* Find the actual allocated position tracking this address */
1177
- while (!buddy_tree_status(tree, pos)) {
1178
- pos = buddy_tree_parent(pos);
1179
-
1180
- if (!buddy_tree_valid(tree, pos)) {
1181
- return INVALID_POS;
1182
- }
1183
- }
1184
-
1185
- if (address_for_position(buddy, pos) != addr) {
1186
- return INVALID_POS; /* invalid alignment */
1187
- }
1188
-
1189
- return pos;
1190
- }
1191
-
1192
- static unsigned char *buddy_main(struct buddy *buddy) {
1193
- if (buddy_relative_mode(buddy)) {
1194
- return (unsigned char *)buddy - buddy->arena.main_offset;
1195
- }
1196
- return buddy->arena.main;
1197
- }
1198
-
1199
- static unsigned int buddy_relative_mode(struct buddy *buddy) {
1200
- return (unsigned int)buddy->buddy_flags & BUDDY_RELATIVE_MODE;
1201
- }
1202
-
1203
- static void buddy_toggle_virtual_slots(struct buddy *buddy, unsigned int state) {
1204
- size_t delta, memory_size, effective_memory_size;
1205
- struct buddy_tree *tree;
1206
- struct buddy_tree_pos pos;
1207
-
1208
- memory_size = buddy->memory_size;
1209
- /* Mask/unmask the virtual space if memory is not a power of two */
1210
- effective_memory_size = buddy_effective_memory_size(buddy);
1211
- if (effective_memory_size == memory_size) {
1212
- return;
1213
- }
1214
-
1215
- /* Get the area that we need to mask and pad it to alignment */
1216
- /* Node memory size is already aligned to buddy->alignment */
1217
- delta = effective_memory_size - memory_size;
1218
-
1219
- tree = buddy_tree_for(buddy);
1220
- pos = buddy_tree_right_child(buddy_tree_root());
1221
- while (delta) {
1222
- size_t current_pos_size = size_for_depth(buddy, buddy_tree_depth(pos));
1223
- if (delta == current_pos_size) {
1224
- /* toggle current pos */
1225
- if (state) {
1226
- buddy_tree_mark(tree, pos);
1227
- }
1228
- else {
1229
- buddy_tree_release(tree, pos);
1230
- }
1231
- break;
1232
- }
1233
- if (delta <= (current_pos_size / 2)) {
1234
- /* re-run for right child */
1235
- pos = buddy_tree_right_child(pos);
1236
- continue;
1237
- } else {
1238
- /* toggle right child */
1239
- if (state) {
1240
- buddy_tree_mark(tree, buddy_tree_right_child(pos));
1241
- }
1242
- else {
1243
- buddy_tree_release(tree, buddy_tree_right_child(pos));
1244
- }
1245
- /* reduce delta */
1246
- delta -= current_pos_size / 2;
1247
- /* re-run for left child */
1248
- pos = buddy_tree_left_child(pos);
1249
- continue;
1250
- }
1251
- }
1252
- }
1253
-
1254
- static void buddy_toggle_range_reservation(struct buddy *buddy, void *ptr, size_t requested_size, unsigned int state) {
1255
- unsigned char *dst, *main;
1256
- struct buddy_tree *tree;
1257
- size_t offset;
1258
- struct buddy_tree_pos pos;
1259
-
1260
- if (buddy == NULL) {
1261
- return;
1262
- }
1263
- if (ptr == NULL) {
1264
- return;
1265
- }
1266
- if (requested_size == 0) {
1267
- return;
1268
- }
1269
- dst = (unsigned char *)ptr;
1270
- main = buddy_main(buddy);
1271
- if ((dst < main) || ((dst + requested_size) > (main + buddy->memory_size))) {
1272
- return;
1273
- }
1274
-
1275
- /* Find the deepest position tracking this address */
1276
- tree = buddy_tree_for(buddy);
1277
- offset = (size_t) (dst - main);
1278
- pos = deepest_position_for_offset(buddy, offset);
1279
-
1280
- /* Advance one position at a time and process */
1281
- while (requested_size) {
1282
- if (state) {
1283
- buddy_tree_mark(tree, pos);
1284
- }
1285
- else {
1286
- buddy_tree_release(tree, pos);
1287
- }
1288
- requested_size = (requested_size < buddy->alignment) ? 0 : (requested_size - buddy->alignment);
1289
- pos.index++;
1290
- }
1291
-
1292
- return;
1293
- }
1294
-
1295
- /* Internal function that checks if there are any allocations
1296
- after the indicated relative memory index. Used to check if
1297
- the arena can be downsized.
1298
- The from argument is already adjusted for alignment by caller */
1299
- static bool buddy_is_free(struct buddy *buddy, size_t from) {
1300
- struct buddy_tree *tree;
1301
- struct buddy_tree_interval query_range;
1302
- struct buddy_tree_pos pos;
1303
- size_t effective_memory_size, virtual_slots, to;
1304
-
1305
- effective_memory_size = buddy_effective_memory_size(buddy);
1306
- virtual_slots = buddy_virtual_slots(buddy);
1307
- to = effective_memory_size -
1308
- ((virtual_slots ? (virtual_slots + 1) : 1) * buddy->alignment);
1309
-
1310
- tree = buddy_tree_for(buddy);
1311
-
1312
- query_range.from = deepest_position_for_offset(buddy, from);
1313
- query_range.to = deepest_position_for_offset(buddy, to);
1314
-
1315
- pos = deepest_position_for_offset(buddy, from);
1316
- while(buddy_tree_valid(tree, pos) && (pos.index < query_range.to.index)) {
1317
- struct buddy_tree_interval current_test_range = to_buddy_tree_interval(tree, pos);
1318
- struct buddy_tree_interval parent_test_range =
1319
- to_buddy_tree_interval(tree, buddy_tree_parent(pos));
1320
- while(buddy_tree_interval_contains(query_range, parent_test_range)) {
1321
- pos = buddy_tree_parent(pos);
1322
- current_test_range = parent_test_range;
1323
- parent_test_range = to_buddy_tree_interval(tree, buddy_tree_parent(pos));
1324
- }
1325
- /* pos is now tracking an overlapping segment */
1326
- if (! buddy_tree_is_free(tree, pos)) {
1327
- return false;
1328
- }
1329
- /* Advance check */
1330
- pos = buddy_tree_right_adjacent(current_test_range.to);
1331
- }
1332
- return true;
1333
- }
1334
-
1335
- static struct buddy_embed_check buddy_embed_offset(size_t memory_size, size_t alignment) {
1336
- size_t buddy_size, offset;
1337
- struct buddy_embed_check check_result;
1338
-
1339
- memset(&check_result, 0, sizeof(check_result));
1340
- check_result.can_fit = 1;
1341
- buddy_size = buddy_sizeof_alignment(memory_size, alignment);
1342
- if (buddy_size >= memory_size) {
1343
- check_result.can_fit = 0;
1344
- }
1345
-
1346
- offset = memory_size - buddy_size;
1347
- if (offset % BUDDY_ALIGNOF(struct buddy) != 0) {
1348
- buddy_size += offset % BUDDY_ALIGNOF(struct buddy);
1349
- if (buddy_size >= memory_size) {
1350
- check_result.can_fit = 0;
1351
- }
1352
- offset = memory_size - buddy_size;
1353
- }
1354
-
1355
- if (check_result.can_fit) {
1356
- check_result.offset = offset;
1357
- check_result.buddy_size = buddy_size;
1358
- }
1359
- return check_result;
1360
- }
1361
-
1362
- void buddy_debug(struct buddy *buddy) {
1363
- BUDDY_PRINTF("buddy allocator at: %p arena at: %p\n", (void *)buddy, (void *)buddy_main(buddy));
1364
- BUDDY_PRINTF("memory size: %zu\n", buddy->memory_size);
1365
- BUDDY_PRINTF("mode: ");
1366
- if (buddy_relative_mode(buddy)) {
1367
- BUDDY_PRINTF("embedded");
1368
- } else {
1369
- BUDDY_PRINTF("standard");
1370
- }
1371
- BUDDY_PRINTF("\n");
1372
- BUDDY_PRINTF("virtual slots: %zu\n", buddy_virtual_slots(buddy));
1373
- BUDDY_PRINTF("allocator tree follows:\n");
1374
- buddy_tree_debug(buddy_tree_for(buddy), buddy_tree_root(), buddy_effective_memory_size(buddy));
1375
- }
1376
-
1377
- /*
1378
- * A buddy allocation tree
1379
- */
1380
-
1381
- struct buddy_tree {
1382
- size_t upper_pos_bound;
1383
- size_t size_for_order_offset;
1384
- uint8_t order;
1385
- uint8_t flags;
1386
- /*
1387
- * struct padding rules mean that there are
1388
- * 16/48 bits available until the next increment
1389
- */
1390
- };
1391
-
1392
- enum buddy_tree_flags {
1393
- BUDDY_TREE_CHANGE_TRACKING = 1,
1394
- };
1395
-
1396
- struct internal_position {
1397
- size_t local_offset;
1398
- size_t bitset_location;
1399
- };
1400
-
1401
- static inline size_t size_for_order(uint8_t order, uint8_t to);
1402
- static inline size_t buddy_tree_index_internal(struct buddy_tree_pos pos);
1403
- static struct buddy_tree_pos buddy_tree_leftmost_child_internal(size_t tree_order);
1404
- static struct internal_position buddy_tree_internal_position_order(
1405
- size_t tree_order, struct buddy_tree_pos pos);
1406
- static struct internal_position buddy_tree_internal_position_tree(
1407
- struct buddy_tree *t, struct buddy_tree_pos pos);
1408
- static void buddy_tree_grow(struct buddy_tree *t, uint8_t desired_order);
1409
- static void buddy_tree_shrink(struct buddy_tree *t, uint8_t desired_order);
1410
- static void update_parent_chain(struct buddy_tree *t, struct buddy_tree_pos pos,
1411
- struct internal_position pos_internal, size_t size_current);
1412
- static inline unsigned char *buddy_tree_bits(struct buddy_tree *t);
1413
- static void buddy_tree_populate_size_for_order(struct buddy_tree *t);
1414
- static inline size_t buddy_tree_size_for_order(struct buddy_tree *t, uint8_t to);
1415
- static void write_to_internal_position(struct buddy_tree* t, struct internal_position pos, size_t value);
1416
- static size_t read_from_internal_position(unsigned char *bitset, struct internal_position pos);
1417
- static inline unsigned char compare_with_internal_position(unsigned char *bitset, struct internal_position pos, size_t value);
1418
-
1419
- #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
1420
- static inline void buddy_tree_track_change(struct buddy_tree* t, unsigned char* addr, size_t length);
1421
- #endif /* BUDDY_EXPERIMENTAL_CHANGE_TRACKING */
1422
-
1423
- static inline size_t size_for_order(uint8_t order, uint8_t to) {
1424
- size_t result = 0;
1425
- size_t multi = 1u;
1426
- while (order != to) {
1427
- result += order * multi;
1428
- order--;
1429
- multi *= 2;
1430
- }
1431
- return result;
1432
- }
1433
-
1434
- static inline struct internal_position buddy_tree_internal_position_order(
1435
- size_t tree_order, struct buddy_tree_pos pos) {
1436
- struct internal_position p;
1437
- size_t total_offset, local_index;
1438
-
1439
- p.local_offset = tree_order - buddy_tree_depth(pos) + 1;
1440
- total_offset = size_for_order((uint8_t) tree_order, (uint8_t) p.local_offset);
1441
- local_index = buddy_tree_index_internal(pos);
1442
- p.bitset_location = total_offset + (p.local_offset * local_index);
1443
- return p;
1444
- }
1445
-
1446
- static inline struct internal_position buddy_tree_internal_position_tree(
1447
- struct buddy_tree *t, struct buddy_tree_pos pos) {
1448
- struct internal_position p;
1449
- size_t total_offset, local_index;
1450
-
1451
- p.local_offset = t->order - buddy_tree_depth(pos) + 1;
1452
- total_offset = buddy_tree_size_for_order(t, (uint8_t) p.local_offset);
1453
- local_index = buddy_tree_index_internal(pos);
1454
- p.bitset_location = total_offset + (p.local_offset * local_index);
1455
- return p;
1456
- }
1457
-
1458
- static size_t buddy_tree_sizeof(uint8_t order) {
1459
- size_t tree_size, bitset_size, size_for_order_size;
1460
-
1461
- tree_size = sizeof(struct buddy_tree);
1462
- /* Account for the bitset */
1463
- bitset_size = bitset_sizeof(size_for_order(order, 0));
1464
- if (bitset_size % sizeof(size_t)) {
1465
- bitset_size += (bitset_size % sizeof(size_t));
1466
- }
1467
- /* Account for the size_for_order memoization */
1468
- size_for_order_size = ((order+2) * sizeof(size_t));
1469
- return tree_size + bitset_size + size_for_order_size;
1470
- }
1471
-
1472
- static struct buddy_tree *buddy_tree_init(unsigned char *at, uint8_t order) {
1473
- size_t size = buddy_tree_sizeof(order);
1474
- struct buddy_tree *t = (struct buddy_tree*) at;
1475
- memset(at, 0, size);
1476
- t->order = order;
1477
- t->upper_pos_bound = two_to_the_power_of(t->order);
1478
- buddy_tree_populate_size_for_order(t);
1479
- return t;
1480
- }
1481
-
1482
- #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
1483
- static void buddy_tree_enable_change_tracking(struct buddy_tree* t) {
1484
- t->flags |= BUDDY_TREE_CHANGE_TRACKING;
1485
- }
1486
- #endif /* BUDDY_EXPERIMENTAL_CHANGE_TRACKING */
1487
-
1488
- static void buddy_tree_resize(struct buddy_tree *t, uint8_t desired_order) {
1489
- if (t->order == desired_order) {
1490
- return;
1491
- }
1492
- if (t->order < desired_order) {
1493
- buddy_tree_grow(t, desired_order);
1494
- } else {
1495
- buddy_tree_shrink(t, desired_order);
1496
- }
1497
- }
1498
-
1499
- static void buddy_tree_grow(struct buddy_tree *t, uint8_t desired_order) {
1500
- struct buddy_tree_pos pos;
1501
-
1502
- while (desired_order > t->order) {
1503
- /* Grow the tree a single order at a time */
1504
- size_t current_order = t->order;
1505
- struct buddy_tree_pos current_pos = buddy_tree_leftmost_child_internal(current_order);
1506
- struct buddy_tree_pos next_pos = buddy_tree_leftmost_child_internal(current_order + 1u);
1507
- while(current_order) {
1508
- /* Get handles into the rows at the tracked depth */
1509
- struct internal_position current_internal = buddy_tree_internal_position_order(
1510
- t->order, current_pos);
1511
- struct internal_position next_internal = buddy_tree_internal_position_order(
1512
- t->order + 1u, next_pos);
1513
-
1514
- /* There are this many nodes at the current level */
1515
- size_t node_count = two_to_the_power_of(current_order - 1u);
1516
-
1517
- /* Transfer the bits*/
1518
- bitset_shift_right(buddy_tree_bits(t),
1519
- current_internal.bitset_location /* from here */,
1520
- current_internal.bitset_location + (current_internal.local_offset * node_count) /* up to here */,
1521
- next_internal.bitset_location - current_internal.bitset_location /* by */);
1522
-
1523
- /* Clear right section */
1524
- bitset_clear_range(buddy_tree_bits(t),
1525
- to_bitset_range(next_internal.bitset_location + (next_internal.local_offset * node_count),
1526
- next_internal.bitset_location + (next_internal.local_offset * node_count * 2) - 1));
1527
-
1528
- /* Handle the upper level */
1529
- current_order -= 1u;
1530
- current_pos = buddy_tree_parent(current_pos);
1531
- next_pos = buddy_tree_parent(next_pos);
1532
- }
1533
- /* Advance the order and refresh the root */
1534
- t->order += 1u;
1535
- t->upper_pos_bound = two_to_the_power_of(t->order);
1536
- buddy_tree_populate_size_for_order(t);
1537
-
1538
- /* Update the root */
1539
- pos = buddy_tree_right_child(buddy_tree_root());
1540
- update_parent_chain(t, pos, buddy_tree_internal_position_tree(t, pos), 0);
1541
- }
1542
- }
1543
-
1544
- static void buddy_tree_shrink(struct buddy_tree *t, uint8_t desired_order) {
1545
- size_t current_order, next_order, node_count;
1546
- struct buddy_tree_pos left_start;
1547
- struct internal_position current_internal, next_internal;
1548
-
1549
- while (desired_order < t->order) {
1550
- if (!buddy_tree_can_shrink(t)) {
1551
- return;
1552
- }
1553
-
1554
- /* Shrink the tree a single order at a time */
1555
- current_order = t->order;
1556
- next_order = current_order - 1;
1557
-
1558
- left_start = buddy_tree_left_child(buddy_tree_root());
1559
- while(buddy_tree_valid(t, left_start)) {
1560
- /* Get handles into the rows at the tracked depth */
1561
- current_internal = buddy_tree_internal_position_order(current_order, left_start);
1562
- next_internal = buddy_tree_internal_position_order(next_order, buddy_tree_parent(left_start));
1563
-
1564
- /* There are this many nodes at the current level */
1565
- node_count = two_to_the_power_of(left_start.depth - 1u);
1566
-
1567
- /* Transfer the bits*/
1568
- bitset_shift_left(buddy_tree_bits(t),
1569
- current_internal.bitset_location /* from here */,
1570
- current_internal.bitset_location + (current_internal.local_offset * node_count / 2) /* up to here */,
1571
- current_internal.bitset_location - next_internal.bitset_location/* at here */);
1572
-
1573
- /* Handle the lower level */
1574
- left_start = buddy_tree_left_child(left_start);
1575
- }
1576
-
1577
- /* Advance the order */
1578
- t->order = (uint8_t) next_order;
1579
- t->upper_pos_bound = two_to_the_power_of(t->order);
1580
- buddy_tree_populate_size_for_order(t);
1581
- }
1582
- }
1583
-
1584
- static bool buddy_tree_valid(struct buddy_tree *t, struct buddy_tree_pos pos) {
1585
- return pos.index && (pos.index < t->upper_pos_bound);
1586
- }
1587
-
1588
- static uint8_t buddy_tree_order(struct buddy_tree *t) {
1589
- return t->order;
1590
- }
1591
-
1592
- static struct buddy_tree_pos buddy_tree_root(void) {
1593
- struct buddy_tree_pos identity = { 1, 1 };
1594
- return identity;
1595
- }
1596
-
1597
- static struct buddy_tree_pos buddy_tree_leftmost_child(struct buddy_tree *t) {
1598
- return buddy_tree_leftmost_child_internal(t->order);
1599
- }
1600
-
1601
- static struct buddy_tree_pos buddy_tree_leftmost_child_internal(size_t tree_order) {
1602
- struct buddy_tree_pos result;
1603
- result.index = two_to_the_power_of(tree_order - 1u);
1604
- result.depth = tree_order;
1605
- return result;
1606
- }
1607
-
1608
- static inline size_t buddy_tree_depth(struct buddy_tree_pos pos) {
1609
- return pos.depth;
1610
- }
1611
-
1612
- static inline struct buddy_tree_pos buddy_tree_left_child(struct buddy_tree_pos pos) {
1613
- pos.index *= 2;
1614
- pos.depth++;
1615
- return pos;
1616
- }
1617
-
1618
- static inline struct buddy_tree_pos buddy_tree_right_child(struct buddy_tree_pos pos) {
1619
- pos.index *= 2;
1620
- pos.index++;
1621
- pos.depth++;
1622
- return pos;
1623
- }
1624
-
1625
- static inline struct buddy_tree_pos buddy_tree_sibling(struct buddy_tree_pos pos) {
1626
- pos.index ^= 1;
1627
- return pos;
1628
- }
1629
-
1630
- static inline struct buddy_tree_pos buddy_tree_parent(struct buddy_tree_pos pos) {
1631
- pos.index /= 2;
1632
- pos.depth--;
1633
- return pos;
1634
- }
1635
-
1636
- static struct buddy_tree_pos buddy_tree_right_adjacent(struct buddy_tree_pos pos) {
1637
- if (((pos.index + 1) ^ pos.index) > pos.index) {
1638
- return INVALID_POS;
1639
- }
1640
- pos.index++;
1641
- return pos;
1642
- }
1643
-
1644
- static size_t buddy_tree_index(struct buddy_tree_pos pos) {
1645
- return buddy_tree_index_internal(pos);
1646
- }
1647
-
1648
- static inline size_t buddy_tree_index_internal(struct buddy_tree_pos pos) {
1649
- /* Clear out the highest bit, this gives us the index
1650
- * in a row of sibling nodes */
1651
- size_t mask = two_to_the_power_of(pos.depth - 1u);
1652
- size_t result = pos.index & ~mask;
1653
- return result;
1654
- }
1655
-
1656
- static inline unsigned char *buddy_tree_bits(struct buddy_tree *t) {
1657
- return ((unsigned char *) t) + sizeof(*t);
1658
- }
1659
-
1660
- static void buddy_tree_populate_size_for_order(struct buddy_tree *t) {
1661
- size_t bitset_offset = bitset_sizeof(size_for_order(t->order, 0));
1662
- if (bitset_offset % sizeof(size_t)) {
1663
- bitset_offset += (bitset_offset % sizeof(size_t));
1664
- }
1665
- t->size_for_order_offset = bitset_offset / sizeof(size_t);
1666
- t->size_for_order_offset++;
1667
- for (size_t i = 0; i <= t->order; i++) {
1668
- *((size_t *)(((unsigned char *) t) + sizeof(*t)) + t->size_for_order_offset + i) = size_for_order(t->order, (uint8_t) i);
1669
- }
1670
- }
1671
-
1672
- static inline size_t buddy_tree_size_for_order(struct buddy_tree *t,
1673
- uint8_t to) {
1674
- return *((size_t *)(((unsigned char *) t) + sizeof(*t)) + t->size_for_order_offset + to);
1675
- }
1676
-
1677
- static void write_to_internal_position(struct buddy_tree* t, struct internal_position pos, size_t value) {
1678
- unsigned char *bitset = buddy_tree_bits(t);
1679
- struct bitset_range clear_range = to_bitset_range(pos.bitset_location, pos.bitset_location + pos.local_offset - 1);
1680
-
1681
- bitset_clear_range(bitset, clear_range);
1682
- if (value) {
1683
- bitset_set_range(bitset, to_bitset_range(pos.bitset_location, pos.bitset_location+value-1));
1684
- }
1685
-
1686
- #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
1687
- /* Ignore the same bucket condition - we don't care if we track one more byte here */
1688
- buddy_tree_track_change(t, bitset, clear_range.to_bucket - clear_range.from_bucket + 1);
1689
- #endif
1690
- }
1691
-
1692
- static size_t read_from_internal_position(unsigned char *bitset, struct internal_position pos) {
1693
- if (! bitset_test(bitset, pos.bitset_location)) {
1694
- return 0; /* Fast test without complete extraction */
1695
- }
1696
- return bitset_count_range(bitset, to_bitset_range(pos.bitset_location, pos.bitset_location+pos.local_offset-1));
1697
- }
1698
-
1699
- static inline unsigned char compare_with_internal_position(unsigned char *bitset, struct internal_position pos, size_t value) {
1700
- return bitset_test(bitset, pos.bitset_location+value-1);
1701
- }
1702
-
1703
- static struct buddy_tree_interval to_buddy_tree_interval(struct buddy_tree *t, struct buddy_tree_pos pos) {
1704
- struct buddy_tree_interval result;
1705
- size_t depth;
1706
-
1707
- result.from = pos;
1708
- result.to = pos;
1709
- depth = pos.depth;
1710
- while (depth != t->order) {
1711
- result.from = buddy_tree_left_child(result.from);
1712
- result.to = buddy_tree_right_child(result.to);
1713
- depth += 1;
1714
- }
1715
- return result;
1716
- }
1717
-
1718
- static bool buddy_tree_interval_contains(struct buddy_tree_interval outer,
1719
- struct buddy_tree_interval inner) {
1720
- return (inner.from.index >= outer.from.index)
1721
- && (inner.from.index <= outer.to.index)
1722
- && (inner.to.index >= outer.from.index)
1723
- && (inner.to.index <= outer.to.index);
1724
- }
1725
-
1726
- static struct buddy_tree_walk_state buddy_tree_walk_state_root(void) {
1727
- struct buddy_tree_walk_state state;
1728
- memset(&state, 0, sizeof(state));
1729
- state.starting_pos = buddy_tree_root();
1730
- state.current_pos = buddy_tree_root();
1731
- return state;
1732
- }
1733
-
1734
- static unsigned int buddy_tree_walk(struct buddy_tree *t, struct buddy_tree_walk_state *state) {
1735
- do {
1736
- if (state->going_up) {
1737
- if (state->current_pos.index == state->starting_pos.index) {
1738
- state->walk_done = 1;
1739
- state->going_up = 0;
1740
- } else if (state->current_pos.index & 1u) {
1741
- state->current_pos = buddy_tree_parent(state->current_pos); /* Ascend */
1742
- } else {
1743
- state->current_pos = buddy_tree_right_adjacent(state->current_pos); /* Descend right */
1744
- state->going_up = 0;
1745
- }
1746
- } else if (buddy_tree_valid(t, buddy_tree_left_child(state->current_pos))) {
1747
- /* Descend left */
1748
- state->current_pos = buddy_tree_left_child(state->current_pos);
1749
- } else { /* Ascend */
1750
- state->going_up = 1;
1751
- }
1752
- } while(state->going_up);
1753
- return ! state->walk_done;
1754
- }
1755
-
1756
- static size_t buddy_tree_status(struct buddy_tree *t, struct buddy_tree_pos pos) {
1757
- struct internal_position internal = buddy_tree_internal_position_tree(t, pos);
1758
- return read_from_internal_position(buddy_tree_bits(t), internal);
1759
- }
1760
-
1761
- static void buddy_tree_mark(struct buddy_tree *t, struct buddy_tree_pos pos) {
1762
- /* Calling mark on a used position is a bug in caller */
1763
- struct internal_position internal = buddy_tree_internal_position_tree(t, pos);
1764
-
1765
- /* Mark the node as used */
1766
- write_to_internal_position(t, internal, internal.local_offset);
1767
-
1768
- /* Update the tree upwards */
1769
- update_parent_chain(t, pos, internal, internal.local_offset);
1770
- }
1771
-
1772
- static enum buddy_tree_release_status buddy_tree_release(struct buddy_tree *t, struct buddy_tree_pos pos) {
1773
- /* Calling release on an unused or a partially-used position a bug in caller */
1774
- struct internal_position internal = buddy_tree_internal_position_tree(t, pos);
1775
-
1776
- if (read_from_internal_position(buddy_tree_bits(t), internal) != internal.local_offset) {
1777
- return BUDDY_TREE_RELEASE_FAIL_PARTIALLY_USED;
1778
- }
1779
-
1780
- /* Mark the node as unused */
1781
- write_to_internal_position(t, internal, 0);
1782
-
1783
- /* Update the tree upwards */
1784
- update_parent_chain(t, pos, internal, 0);
1785
-
1786
- return BUDDY_TREE_RELEASE_SUCCESS;
1787
- }
1788
-
1789
- static void update_parent_chain(struct buddy_tree *t, struct buddy_tree_pos pos,
1790
- struct internal_position pos_internal, size_t size_current) {
1791
- size_t size_sibling, size_parent, target_parent;
1792
- unsigned char *bits = buddy_tree_bits(t);
1793
-
1794
- while (pos.index != 1) {
1795
- pos_internal.bitset_location += pos_internal.local_offset
1796
- - (2 * pos_internal.local_offset * (pos.index & 1u));
1797
- size_sibling = read_from_internal_position(bits, pos_internal);
1798
-
1799
- pos = buddy_tree_parent(pos);
1800
- pos_internal = buddy_tree_internal_position_tree(t, pos);
1801
- size_parent = read_from_internal_position(bits, pos_internal);
1802
-
1803
- target_parent = (size_current || size_sibling)
1804
- * ((size_current <= size_sibling ? size_current : size_sibling) + 1);
1805
- if (target_parent == size_parent) {
1806
- return;
1807
- }
1808
-
1809
- write_to_internal_position(t, pos_internal, target_parent);
1810
- size_current = target_parent;
1811
- };
1812
- }
1813
-
1814
- static struct buddy_tree_pos buddy_tree_find_free(struct buddy_tree *t, uint8_t target_depth) {
1815
- struct buddy_tree_pos current_pos, left_pos, right_pos;
1816
- uint8_t target_status;
1817
- size_t current_depth, right_status;
1818
- struct internal_position left_internal, right_internal;
1819
- unsigned char *tree_bits;
1820
-
1821
- current_pos = buddy_tree_root();
1822
- target_status = target_depth - 1;
1823
- current_depth = buddy_tree_depth(current_pos);
1824
- if (buddy_tree_status(t, current_pos) > target_status) {
1825
- return INVALID_POS; /* No position available down the tree */
1826
- }
1827
- tree_bits = buddy_tree_bits(t);
1828
- while (current_depth != target_depth) {
1829
- /* Advance criteria */
1830
- target_status -= 1;
1831
- current_depth += 1;
1832
-
1833
- left_pos = buddy_tree_left_child(current_pos);
1834
- right_pos = buddy_tree_sibling(left_pos);
1835
-
1836
- left_internal = buddy_tree_internal_position_tree(t, left_pos);
1837
-
1838
- right_internal = left_internal;
1839
- right_internal.bitset_location += right_internal.local_offset; /* advance to the right */
1840
-
1841
- if (compare_with_internal_position(tree_bits, left_internal, target_status+1)) { /* left branch is busy, pick right */
1842
- current_pos = right_pos;
1843
- } else if (compare_with_internal_position(tree_bits, right_internal, target_status+1)) { /* right branch is busy, pick left */
1844
- current_pos = left_pos;
1845
- } else {
1846
- /* One of the child nodes must be read in order to compare it to its sibling. */
1847
- right_status = read_from_internal_position(tree_bits, right_internal);
1848
- if (right_status) {
1849
- if (compare_with_internal_position(tree_bits, left_internal, right_status)) {
1850
- current_pos = left_pos; /* Left is equal or more busy than right, prefer left */
1851
- } else {
1852
- current_pos = right_pos;
1853
- }
1854
- } else { /* Right is empty, prefer left */
1855
- current_pos = left_pos;
1856
- }
1857
- }
1858
- }
1859
- return current_pos;
1860
- }
1861
-
1862
- static bool buddy_tree_is_free(struct buddy_tree *t, struct buddy_tree_pos pos) {
1863
- if (buddy_tree_status(t, pos)) {
1864
- return false;
1865
- }
1866
- pos = buddy_tree_parent(pos);
1867
- while(buddy_tree_valid(t, pos)) {
1868
- struct internal_position internal = buddy_tree_internal_position_tree(t, pos);
1869
- size_t value = read_from_internal_position(buddy_tree_bits(t), internal);
1870
- if (value) {
1871
- return value != internal.local_offset;
1872
- }
1873
- pos = buddy_tree_parent(pos);
1874
- }
1875
- return true;
1876
- }
1877
-
1878
- static bool buddy_tree_can_shrink(struct buddy_tree *t) {
1879
- struct internal_position root_internal;
1880
- size_t root_value;
1881
-
1882
- if (buddy_tree_status(t, buddy_tree_right_child(buddy_tree_root())) != 0) {
1883
- return false; /* Refusing to shrink with right subtree still used! */
1884
- }
1885
- root_internal = buddy_tree_internal_position_tree(t, buddy_tree_root());
1886
- root_value = read_from_internal_position(buddy_tree_bits(t), root_internal);
1887
- if (root_value == root_internal.local_offset) {
1888
- return false; /* Refusing to shrink with the root fully-allocated! */
1889
- }
1890
- return true;
1891
- }
1892
-
1893
- #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
1894
- static struct buddy* buddy_tree_buddy(struct buddy_tree* t) {
1895
- return (struct buddy*)(((unsigned char*)t) - sizeof(struct buddy));
1896
- }
1897
- #endif /* BUDDY_EXPERIMENTAL_CHANGE_TRACKING */
1898
-
1899
- static void buddy_tree_debug(struct buddy_tree *t, struct buddy_tree_pos pos,
1900
- size_t start_size) {
1901
- struct buddy_tree_walk_state state = buddy_tree_walk_state_root();
1902
- state.current_pos = pos;
1903
- do {
1904
- struct internal_position pos_internal = buddy_tree_internal_position_tree(t, state.current_pos);
1905
- size_t pos_status = read_from_internal_position(buddy_tree_bits(t), pos_internal);
1906
- size_t pos_size = start_size >> ((buddy_tree_depth(state.current_pos) - 1u) % ((sizeof(size_t) * CHAR_BIT)-1));
1907
- BUDDY_PRINTF("%.*s",
1908
- (int) buddy_tree_depth(state.current_pos),
1909
- " ");
1910
- BUDDY_PRINTF("pos index: %zu pos depth: %zu status: %zu bitset-len: %zu bitset-at: %zu",
1911
- state.current_pos.index, state.current_pos.depth, pos_status,
1912
- pos_internal.local_offset, pos_internal.bitset_location);
1913
- if (pos_status == pos_internal.local_offset) {
1914
- BUDDY_PRINTF(" size: %zu", pos_size);
1915
- }
1916
- BUDDY_PRINTF("\n");
1917
- } while (buddy_tree_walk(t, &state));
1918
- }
1919
-
1920
- unsigned int buddy_tree_check_invariant(struct buddy_tree *t, struct buddy_tree_pos pos) {
1921
- unsigned int fail = 0;
1922
- struct buddy_tree_walk_state state = buddy_tree_walk_state_root();
1923
- state.current_pos = pos;
1924
- do {
1925
- struct internal_position current_internal = buddy_tree_internal_position_tree(t, pos);
1926
- size_t current_status = read_from_internal_position(buddy_tree_bits(t), current_internal);
1927
- size_t left_child_status = buddy_tree_status(t, buddy_tree_left_child(pos));
1928
- size_t right_child_status = buddy_tree_status(t, buddy_tree_right_child(pos));
1929
- unsigned int violated = 0;
1930
-
1931
- if (left_child_status || right_child_status) {
1932
- size_t min = left_child_status <= right_child_status
1933
- ? left_child_status : right_child_status;
1934
- if (current_status != (min + 1)) {
1935
- violated = 1;
1936
- }
1937
- } else {
1938
- if ((current_status > 0) && (current_status < current_internal.local_offset)) {
1939
- violated = 1;
1940
- }
1941
- }
1942
-
1943
- if (violated) {
1944
- fail = 1;
1945
- BUDDY_PRINTF("invariant violation at position [ index: %zu depth: %zu ]!\n", pos.index, pos.depth);
1946
- BUDDY_PRINTF("current: %zu left %zu right %zu max %zu\n",
1947
- current_status, left_child_status, right_child_status, current_internal.local_offset);
1948
- }
1949
-
1950
- } while (buddy_tree_walk(t, &state));
1951
- return fail;
1952
- }
1953
-
1954
- /*
1955
- * Calculate tree fragmentation based on free slots.
1956
- * Based on https://asawicki.info/news_1757_a_metric_for_memory_fragmentation
1957
- */
1958
- static unsigned char buddy_tree_fragmentation(struct buddy_tree *t) {
1959
- const unsigned char fractional_bits = 8;
1960
- const unsigned char fractional_mask = 255;
1961
-
1962
- uint8_t tree_order;
1963
- size_t root_status, quality, total_free_size, virtual_size, quality_percent;
1964
- struct buddy_tree_walk_state state;
1965
-
1966
- tree_order = buddy_tree_order(t);
1967
- root_status = buddy_tree_status(t, buddy_tree_root());
1968
- if (root_status == 0) { /* Emptry tree */
1969
- return 0;
1970
- }
1971
-
1972
- quality = 0;
1973
- total_free_size = 0;
1974
-
1975
- state = buddy_tree_walk_state_root();
1976
- do {
1977
- size_t pos_status = buddy_tree_status(t, state.current_pos);
1978
- if (pos_status == 0) {
1979
- /* Empty node, process */
1980
- virtual_size = two_to_the_power_of((tree_order - state.current_pos.depth) % ((sizeof(size_t) * CHAR_BIT)-1));
1981
- quality += (virtual_size * virtual_size);
1982
- total_free_size += virtual_size;
1983
- /* Ascend */
1984
- state.going_up = 1;
1985
- } else if (pos_status == (tree_order - state.current_pos.depth + 1)) {
1986
- /* Busy node, ascend */
1987
- state.going_up = 1;
1988
- }
1989
- } while (buddy_tree_walk(t, &state));
1990
-
1991
- if (total_free_size == 0) { /* Fully-allocated tree */
1992
- return 0;
1993
- }
1994
-
1995
- quality_percent = (integer_square_root(quality) << fractional_bits) / total_free_size;
1996
- quality_percent *= quality_percent;
1997
- quality_percent >>= fractional_bits;
1998
- return fractional_mask - (quality_percent & fractional_mask);
1999
- }
2000
-
2001
- #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
2002
- static inline void buddy_tree_track_change(struct buddy_tree* t, unsigned char* addr, size_t length) {
2003
- struct buddy_change_tracker *header;
2004
-
2005
- if (!(t->flags && BUDDY_TREE_CHANGE_TRACKING)) {
2006
- return;
2007
- }
2008
-
2009
- header = (struct buddy_change_tracker *) buddy_main(buddy_tree_buddy(t));
2010
- header->tracker(header->context, addr, length);
2011
- }
2012
- #endif /* BUDDY_EXPERIMENTAL_CHANGE_TRACKING */
2013
-
2014
- /*
2015
- * A char-backed bitset implementation
2016
- */
2017
-
2018
- size_t bitset_sizeof(size_t elements) {
2019
- return ((elements) + CHAR_BIT - 1u) / CHAR_BIT;
2020
- }
2021
-
2022
- static uint8_t bitset_index_mask[8] = {1, 2, 4, 8, 16, 32, 64, 128};
2023
-
2024
- static inline void bitset_set(unsigned char *bitset, size_t pos) {
2025
- size_t bucket = pos / CHAR_BIT;
2026
- size_t index = pos % CHAR_BIT;
2027
- bitset[bucket] |= bitset_index_mask[index];
2028
- }
2029
-
2030
- static inline void bitset_clear(unsigned char *bitset, size_t pos) {
2031
- size_t bucket = pos / CHAR_BIT;
2032
- size_t index = pos % CHAR_BIT;
2033
- bitset[bucket] &= ~bitset_index_mask[index];
2034
- }
2035
-
2036
- static inline bool bitset_test(const unsigned char *bitset, size_t pos) {
2037
- size_t bucket = pos / CHAR_BIT;
2038
- size_t index = pos % CHAR_BIT;
2039
- return bitset[bucket] & bitset_index_mask[index];
2040
- }
2041
-
2042
- static const uint8_t bitset_char_mask[8][8] = {
2043
- {1, 3, 7, 15, 31, 63, 127, 255},
2044
- {0, 2, 6, 14, 30, 62, 126, 254},
2045
- {0, 0, 4, 12, 28, 60, 124, 252},
2046
- {0, 0, 0, 8, 24, 56, 120, 248},
2047
- {0, 0, 0, 0, 16, 48, 112, 240},
2048
- {0, 0, 0, 0, 0, 32, 96, 224},
2049
- {0, 0, 0, 0, 0, 0, 64, 192},
2050
- {0, 0, 0, 0, 0, 0, 0, 128},
2051
- };
2052
-
2053
- static inline struct bitset_range to_bitset_range(size_t from_pos, size_t to_pos) {
2054
- struct bitset_range range = {0};
2055
- range.from_bucket = from_pos / CHAR_BIT;
2056
- range.to_bucket = to_pos / CHAR_BIT;
2057
-
2058
- range.from_index = from_pos % CHAR_BIT;
2059
- range.to_index = to_pos % CHAR_BIT;
2060
- return range;
2061
- }
2062
-
2063
- static void bitset_set_range(unsigned char *bitset, struct bitset_range range) {
2064
- if (range.from_bucket == range.to_bucket) {
2065
- bitset[range.from_bucket] |= bitset_char_mask[range.from_index][range.to_index];
2066
- } else {
2067
- bitset[range.from_bucket] |= bitset_char_mask[range.from_index][7];
2068
- bitset[range.to_bucket] |= bitset_char_mask[0][range.to_index];
2069
- while(++range.from_bucket != range.to_bucket) {
2070
- bitset[range.from_bucket] = 255u;
2071
- }
2072
- }
2073
- }
2074
-
2075
- static void bitset_clear_range(unsigned char* bitset, struct bitset_range range) {
2076
- if (range.from_bucket == range.to_bucket) {
2077
- bitset[range.from_bucket] &= ~bitset_char_mask[range.from_index][range.to_index];
2078
- }
2079
- else {
2080
- bitset[range.from_bucket] &= ~bitset_char_mask[range.from_index][7];
2081
- bitset[range.to_bucket] &= ~bitset_char_mask[0][range.to_index];
2082
- while (++range.from_bucket != range.to_bucket) {
2083
- bitset[range.from_bucket] = 0;
2084
- }
2085
- }
2086
- }
2087
-
2088
- static size_t bitset_count_range(unsigned char *bitset, struct bitset_range range) {
2089
- size_t result;
2090
-
2091
- if (range.from_bucket == range.to_bucket) {
2092
- return popcount_byte(bitset[range.from_bucket] & bitset_char_mask[range.from_index][range.to_index]);
2093
- }
2094
-
2095
- result = popcount_byte(bitset[range.from_bucket] & bitset_char_mask[range.from_index][7])
2096
- + popcount_byte(bitset[range.to_bucket] & bitset_char_mask[0][range.to_index]);
2097
- while(++range.from_bucket != range.to_bucket) {
2098
- result += popcount_byte(bitset[range.from_bucket]);
2099
- }
2100
- return result;
2101
- }
2102
-
2103
- static void bitset_shift_left(unsigned char *bitset, size_t from_pos, size_t to_pos, size_t by) {
2104
- size_t length = to_pos - from_pos;
2105
- for(size_t i = 0; i < length; i++) {
2106
- size_t at = from_pos + i;
2107
- if (bitset_test(bitset, at)) {
2108
- bitset_set(bitset, at-by);
2109
- } else {
2110
- bitset_clear(bitset, at-by);
2111
- }
2112
- bitset_clear(bitset, at);
2113
- }
2114
- }
2115
-
2116
- static void bitset_shift_right(unsigned char *bitset, size_t from_pos, size_t to_pos, size_t by) {
2117
- ssize_t length = (ssize_t) to_pos - (ssize_t) from_pos;
2118
- while (length >= 0) {
2119
- size_t at = from_pos + (size_t) length;
2120
- if (bitset_test(bitset, at)) {
2121
- bitset_set(bitset, at+by);
2122
- } else {
2123
- bitset_clear(bitset, at+by);
2124
- }
2125
- bitset_clear(bitset, at);
2126
- length -= 1;
2127
- }
2128
- }
2129
-
2130
- void bitset_debug(unsigned char *bitset, size_t length) {
2131
- for (size_t i = 0; i < length; i++) {
2132
- BUDDY_PRINTF("%zu: %d\n", i, bitset_test(bitset, i) > 0);
2133
- }
2134
- }
2135
-
2136
- /*
2137
- Bits
2138
- */
2139
-
2140
- static inline unsigned int popcount_byte(unsigned char b) {
2141
- static const unsigned char popcount_lookup[256] = {
2142
- 0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,
2143
- 1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,
2144
- 1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,
2145
- 2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,
2146
- 1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,
2147
- 2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,
2148
- 2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,
2149
- 3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,4,5,5,6,5,6,6,7,5,6,6,7,6,7,7,8
2150
- };
2151
- return popcount_lookup[b];
2152
- }
2153
-
2154
- static unsigned char count_trailing_zeroes(size_t val) {
2155
- /* Implementation from https://www.chessprogramming.org/BitScan */
2156
- static const signed char lookup67[67+1] = {
2157
- 64, 0, 1, 39, 2, 15, 40, 23,
2158
- 3, 12, 16, 59, 41, 19, 24, 54,
2159
- 4, -1, 13, 10, 17, 62, 60, 28,
2160
- 42, 30, 20, 51, 25, 44, 55, 47,
2161
- 5, 32, -1, 38, 14, 22, 11, 58,
2162
- 18, 53, 63, 9, 61, 27, 29, 50,
2163
- 43, 46, 31, 37, 21, 57, 52, 8,
2164
- 26, 49, 45, 36, 56, 7, 48, 35,
2165
- 6, 34, 33, -1 };
2166
- return ((unsigned char) lookup67[(val & -val) % 67]);
2167
- }
2168
-
2169
- /* Returns the highest set bit position for the given value. Returns zero for zero. */
2170
- static size_t highest_bit_position(size_t value) {
2171
- size_t result = 0;
2172
- /* some other millennia when size_t becomes 128-bit this will break :) */
2173
- #if SIZE_MAX == 0xFFFFFFFFFFFFFFFF
2174
- const size_t all_set[] = {4294967295, 65535, 255, 15, 7, 3, 1};
2175
- const size_t count[] = {32, 16, 8, 4, 2, 1, 1};
2176
- #elif SIZE_MAX == 0xFFFFFFFF
2177
- const size_t all_set[] = {65535, 255, 15, 7, 3, 1};
2178
- const size_t count[] = {16, 8, 4, 2, 1, 1};
2179
- #else
2180
- #error Unsupported platform
2181
- #endif
2182
-
2183
- for (size_t i = 0; i < (sizeof all_set / sizeof *all_set); i++) {
2184
- if (value >= all_set[i]) {
2185
- value >>= count[i];
2186
- result += count[i];
2187
- }
2188
- }
2189
- return result + value;
2190
- }
2191
-
2192
- static inline size_t ceiling_power_of_two(size_t value) {
2193
- value += !value; /* branchless x -> { 1 for 0, x for x } */
2194
- return two_to_the_power_of(highest_bit_position(value + value - 1)-1);
2195
- }
2196
-
2197
- static inline size_t two_to_the_power_of(size_t order) {
2198
- return ((size_t)1) << order;
2199
- }
2200
-
2201
- static inline size_t integer_square_root(size_t op) {
2202
- /* by Martin Guy, 1985 - http://medialab.freaknet.org/martin/src/sqrt/ */
2203
- size_t result = 0;
2204
- size_t cursor = (SIZE_MAX - (SIZE_MAX >> 1)) >> 1; /* second-to-top bit set */
2205
- while (cursor > op) {
2206
- cursor >>= 2;
2207
- }
2208
- /* "cursor" starts at the highest power of four <= than the argument. */
2209
- while (cursor != 0) {
2210
- if (op >= result + cursor) {
2211
- op -= result + cursor;
2212
- result += 2 * cursor;
2213
- }
2214
- result >>= 1;
2215
- cursor >>= 2;
2216
- }
2217
- return result;
2218
- }
2219
-
2220
- #ifdef __cplusplus
2221
- #ifndef BUDDY_CPP_MANGLED
2222
- }
2223
- #endif
2224
- #endif
2225
-
2226
- #endif /* BUDDY_ALLOC_IMPLEMENTATION */
1
+ /*
2
+ * Copyright 2021 Stanislav Paskalev <spaskalev@protonmail.com>
3
+ *
4
+ * A binary buddy memory allocator
5
+ *
6
+ * To include and use it in your project do the following
7
+ * 1. Add buddy_alloc.h (this file) to your include directory
8
+ * 2. Include the header in places where you need to use the allocator
9
+ * 3. In one of your source files #define BUDDY_ALLOC_IMPLEMENTATION
10
+ * and then import the header. This will insert the implementation.
11
+ *
12
+ * Latest version is available at https://github.com/spaskalev/buddy_alloc
13
+ */
14
+
15
+ #ifndef BUDDY_ALLOC_H
16
+ #define BUDDY_ALLOC_H
17
+
18
+ #ifndef BUDDY_HEADER
19
+ #include <limits.h>
20
+ #include <stdbool.h>
21
+ #include <stddef.h>
22
+ #include <stdint.h>
23
+ #include <string.h>
24
+ #include <sys/types.h>
25
+ #ifndef BUDDY_PRINTF
26
+ #include <stdio.h>
27
+ #endif
28
+ #endif
29
+
30
+ #ifdef __cplusplus
31
+ #ifndef BUDDY_CPP_MANGLED
32
+ extern "C" {
33
+ #endif
34
+ #endif
35
+
36
+ struct buddy;
37
+
38
+ /* Returns the size of a buddy required to manage a block of the specified size */
39
+ size_t buddy_sizeof(size_t memory_size);
40
+
41
+ /*
42
+ * Returns the size of a buddy required to manage a block of the specified size
43
+ * using a non-default alignment.
44
+ */
45
+ size_t buddy_sizeof_alignment(size_t memory_size, size_t alignment);
46
+
47
+ /* Initializes a binary buddy memory allocator at the specified location */
48
+ struct buddy *buddy_init(unsigned char *at, unsigned char *main, size_t memory_size);
49
+
50
+ /* Initializes a binary buddy memory allocator at the specified location using a non-default alignment */
51
+ struct buddy *buddy_init_alignment(unsigned char *at, unsigned char *main, size_t memory_size, size_t alignment);
52
+
53
+ /*
54
+ * Initializes a binary buddy memory allocator embedded in the specified arena.
55
+ * The arena's capacity is reduced to account for the allocator metadata.
56
+ */
57
+ struct buddy *buddy_embed(unsigned char *main, size_t memory_size);
58
+
59
+ /*
60
+ * Returns the address of a previously-created buddy allocator at the arena.
61
+ * Use to get a new handle to the allocator when the arena is moved or copied.
62
+ */
63
+ struct buddy *buddy_get_embed_at(unsigned char *main, size_t memory_size);
64
+
65
+ /*
66
+ * Initializes a binary buddy memory allocator embedded in the specified arena
67
+ * using a non-default alignment.
68
+ * The arena's capacity is reduced to account for the allocator metadata.
69
+ */
70
+ struct buddy *buddy_embed_alignment(unsigned char *main, size_t memory_size, size_t alignment);
71
+
72
+ /*
73
+ * Returns the address of a previously-created buddy allocator at the arena.
74
+ * Use to get a new handle to the allocator when the arena is moved or copied.
75
+ */
76
+ struct buddy *buddy_get_embed_at_alignment(unsigned char *main, size_t memory_size, size_t alignment);
77
+
78
+ /*
79
+ * Resizes the arena and allocator metadata to a new size.
80
+ *
81
+ * Existing allocations are preserved. If an allocation is to fall outside
82
+ * of the arena after a downsizing the resize operation fails.
83
+ *
84
+ * Returns a pointer to allocator on successful resize. This will be
85
+ * the same pointer when the allocator is external to the arena. If the
86
+ * allocator is embedded in the arena the old pointer to the allocator
87
+ * must not be used after resizing!
88
+ *
89
+ * Returns NULL on failure. The allocations and allocator pointer
90
+ * are preserved.
91
+ */
92
+ struct buddy *buddy_resize(struct buddy *buddy, size_t new_memory_size);
93
+
94
+ /* Tests if the arena can be shrunk in half */
95
+ bool buddy_can_shrink(struct buddy *buddy);
96
+
97
+ /* Tests if the arena is completely empty */
98
+ bool buddy_is_empty(struct buddy *buddy);
99
+
100
+ /* Tests if the arena is completely full */
101
+ bool buddy_is_full(struct buddy *buddy);
102
+
103
+ /* Reports the arena size */
104
+ size_t buddy_arena_size(struct buddy *buddy);
105
+
106
+ /* Reports the arena's free size. Note that this is (often) not a continuous size
107
+ but the sum of all free slots in the buddy. */
108
+ size_t buddy_arena_free_size(struct buddy *buddy);
109
+
110
+ /*
111
+ * Allocation functions
112
+ */
113
+
114
+ /* Use the specified buddy to allocate memory. See malloc. */
115
+ void *buddy_malloc(struct buddy *buddy, size_t requested_size);
116
+
117
+ /* Use the specified buddy to allocate zeroed memory. See calloc. */
118
+ void *buddy_calloc(struct buddy *buddy, size_t members_count, size_t member_size);
119
+
120
+ /* Realloc semantics are a joke. See realloc. */
121
+ void *buddy_realloc(struct buddy *buddy, void *ptr, size_t requested_size, bool ignore_data);
122
+
123
+ /* Realloc-like behavior that checks for overflow. See reallocarray */
124
+ void *buddy_reallocarray(struct buddy *buddy, void *ptr,
125
+ size_t members_count, size_t member_size, bool ignore_data);
126
+
127
+ /* Use the specified buddy to free memory. See free. */
128
+ void buddy_free(struct buddy *buddy, void *ptr);
129
+
130
+ enum buddy_safe_free_status {
131
+ BUDDY_SAFE_FREE_SUCCESS,
132
+ BUDDY_SAFE_FREE_BUDDY_IS_NULL,
133
+ BUDDY_SAFE_FREE_INVALID_ADDRESS,
134
+ BUDDY_SAFE_FREE_SIZE_MISMATCH,
135
+ BUDDY_SAFE_FREE_ALREADY_FREE,
136
+ };
137
+
138
+ /* A (safer) free with a size. Will not free unless the size fits the target span. */
139
+ enum buddy_safe_free_status buddy_safe_free(struct buddy *buddy, void *ptr, size_t requested_size);
140
+
141
+ /* Reports the allocation size. This could be bigger than the requested size,
142
+ it's the exact size that this allocation occupies in the arena.
143
+ Returns 0 on failure, and a non-zero value on success. */
144
+ size_t buddy_alloc_size(struct buddy *buddy, void *ptr);
145
+
146
+ /*
147
+ * Reservation functions
148
+ */
149
+
150
+ /* Reserve a range by marking it as allocated. Useful for dealing with physical memory. */
151
+ void buddy_reserve_range(struct buddy *buddy, void *ptr, size_t requested_size);
152
+
153
+ /* Release a reserved memory range. Unsafe, this can mess up other allocations if called with wrong parameters! */
154
+ void buddy_unsafe_release_range(struct buddy *buddy, void *ptr, size_t requested_size);
155
+
156
+ /*
157
+ * Iteration functions
158
+ */
159
+
160
+ /*
161
+ * Iterate through the free and allocated slots and call the provided function for each of them.
162
+ *
163
+ * If the provided function returns a non-NULL result the iteration stops and the result
164
+ * is returned to called. NULL is returned upon completing iteration without stopping.
165
+ *
166
+ * The iteration order is implementation-defined and may change between versions.
167
+ */
168
+ void *buddy_walk(struct buddy *buddy, void *(fp)(void *ctx, void *addr, size_t slot_size, size_t allocated), void *ctx);
169
+
170
+ /*
171
+ * Miscellaneous functions
172
+ */
173
+
174
+ /*
175
+ * Calculates the fragmentation in the allocator in a 0 - 255 range.
176
+ * NOTE: if you are using a non-power-of-two sized arena the maximum upper bound can be lower.
177
+ */
178
+ unsigned char buddy_fragmentation(struct buddy *buddy);
179
+
180
+ #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
181
+ /*
182
+ * Enable change tracking for this allocator instance.
183
+ *
184
+ * This will store a header at the start of the arena that contains the function pointer (tracker) and
185
+ * a void* (context). The tracker will be called with the context, the start of changed memory and its length.
186
+ *
187
+ * This function MUST be called before any allocations are performed!
188
+ *
189
+ * Change tracking is in effect only for allocation functions, resizing functions are excluded from it.
190
+ *
191
+ * This is an experimental feature designed to facilitate integration with https://github.com/spaskalev/libpvl
192
+ *
193
+ * The API is not (yet) part of the allocator contract and its semantic versioning!
194
+ */
195
+ void buddy_enable_change_tracking(struct buddy* buddy, void* context, void (*tracker) (void*, unsigned char*, size_t));
196
+ #endif
197
+
198
+ #ifdef __cplusplus
199
+ #ifndef BUDDY_CPP_MANGLED
200
+ }
201
+ #endif
202
+ #endif
203
+
204
+ #endif /* BUDDY_ALLOC_H */
205
+
206
+ #ifdef BUDDY_ALLOC_IMPLEMENTATION
207
+ #undef BUDDY_ALLOC_IMPLEMENTATION
208
+
209
+ #ifdef __cplusplus
210
+ #ifndef BUDDY_CPP_MANGLED
211
+ extern "C" {
212
+ #endif
213
+ #endif
214
+
215
+ #ifndef BUDDY_ALLOC_ALIGN
216
+ #define BUDDY_ALLOC_ALIGN (sizeof(size_t) * CHAR_BIT)
217
+ #endif
218
+
219
+ #ifdef __cplusplus
220
+ #ifndef BUDDY_ALIGNOF
221
+ #define BUDDY_ALIGNOF(x) alignof(x)
222
+ #endif
223
+
224
+ #else /* not __cplusplus */
225
+
226
+ #ifndef BUDDY_ALIGNOF
227
+ #ifndef _MSC_VER
228
+ #define BUDDY_ALIGNOF(x) __alignof__(x)
229
+ #else
230
+ #define BUDDY_ALIGNOF(x) _Alignof(x)
231
+ #endif
232
+ #endif
233
+
234
+ #endif /* __cplusplus */
235
+
236
+ /* ssize_t is a POSIX extension */
237
+ #if defined(_MSC_VER) && !defined(_SSIZE_T_DEFINED)
238
+ #if _WIN64
239
+ typedef signed long long ssize_t;
240
+ #else
241
+ typedef signed long ssize_t;
242
+ #endif
243
+ #define _SSIZE_T_DEFINED
244
+ #endif
245
+
246
+ /* Support compiling with Pelles C */
247
+ #if defined(__POCC__) && defined(__POCC_TARGET__)
248
+ #if __POCC_TARGET__ == 3
249
+ typedef signed long long ssize_t;
250
+ #elif __POCC_TARGET__ == 1
251
+ typedef signed long ssize_t;
252
+ #else
253
+ #error Uknown POCC target
254
+ #endif
255
+ #endif
256
+
257
+ #ifndef BUDDY_PRINTF
258
+ #define BUDDY_PRINTF printf
259
+ #endif
260
+
261
+ /*
262
+ * Debug functions
263
+ */
264
+
265
+ /* Implementation defined */
266
+ void buddy_debug(struct buddy *buddy);
267
+
268
+ struct buddy_change_tracker {
269
+ void* context;
270
+ void (*tracker) (void*, unsigned char*, size_t);
271
+ };
272
+
273
+ struct buddy_tree;
274
+
275
+ struct buddy_tree_pos {
276
+ size_t index;
277
+ size_t depth;
278
+ };
279
+
280
+ #ifdef __cplusplus
281
+ #define INVALID_POS buddy_tree_pos{ 0, 0 }
282
+ #else
283
+ #define INVALID_POS ((struct buddy_tree_pos){ 0, 0 })
284
+ #endif
285
+
286
+ struct buddy_tree_interval {
287
+ struct buddy_tree_pos from;
288
+ struct buddy_tree_pos to;
289
+ };
290
+
291
+ struct buddy_tree_walk_state {
292
+ struct buddy_tree_pos starting_pos;
293
+ struct buddy_tree_pos current_pos;
294
+ unsigned int going_up;
295
+ unsigned int walk_done;
296
+ };
297
+
298
+ /*
299
+ * Initialization functions
300
+ */
301
+
302
+ /* Returns the size of a buddy allocation tree of the desired order*/
303
+ static size_t buddy_tree_sizeof(uint8_t order);
304
+
305
+ /* Initializes a buddy allocation tree at the specified location */
306
+ static struct buddy_tree *buddy_tree_init(unsigned char *at, uint8_t order);
307
+
308
+ /* Indicates whether this is a valid position for the tree */
309
+ static bool buddy_tree_valid(struct buddy_tree *t, struct buddy_tree_pos pos);
310
+
311
+ /* Returns the order of the specified buddy allocation tree */
312
+ static uint8_t buddy_tree_order(struct buddy_tree *t);
313
+
314
+ /*
315
+ * Resize the tree to the new order. When downsizing the left subtree is picked.
316
+ * Caller must ensure enough space for the new order.
317
+ */
318
+ static void buddy_tree_resize(struct buddy_tree *t, uint8_t desired_order);
319
+
320
+ #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
321
+ /* Enable change tracking state for this tree. */
322
+ static void buddy_tree_enable_change_tracking(struct buddy_tree *t);
323
+ #endif /* BUDDY_EXPERIMENTAL_CHANGE_TRACKING */
324
+
325
+ /*
326
+ * Navigation functions
327
+ */
328
+
329
+ /* Returns a position at the root of a buddy allocation tree */
330
+ static struct buddy_tree_pos buddy_tree_root(void);
331
+
332
+ /* Returns the leftmost child node */
333
+ static struct buddy_tree_pos buddy_tree_leftmost_child(struct buddy_tree *t);
334
+
335
+ /* Returns the tree depth of the indicated position */
336
+ static inline size_t buddy_tree_depth(struct buddy_tree_pos pos);
337
+
338
+ /* Returns the left child node position. Does not check if that is a valid position */
339
+ static inline struct buddy_tree_pos buddy_tree_left_child(struct buddy_tree_pos pos);
340
+
341
+ /* Returns the right child node position. Does not check if that is a valid position */
342
+ static inline struct buddy_tree_pos buddy_tree_right_child(struct buddy_tree_pos pos);
343
+
344
+ /* Returns the current sibling node position. Does not check if that is a valid position */
345
+ static inline struct buddy_tree_pos buddy_tree_sibling(struct buddy_tree_pos pos);
346
+
347
+ /* Returns the parent node position or an invalid position if there is no parent node */
348
+ static inline struct buddy_tree_pos buddy_tree_parent(struct buddy_tree_pos pos);
349
+
350
+ /* Returns the right adjacent node position or an invalid position if there is no right adjacent node */
351
+ static struct buddy_tree_pos buddy_tree_right_adjacent(struct buddy_tree_pos pos);
352
+
353
+ /* Returns the at-depth index of the indicated position */
354
+ static size_t buddy_tree_index(struct buddy_tree_pos pos);
355
+
356
+ /* Return the interval of the deepest positions spanning the indicated position */
357
+ static struct buddy_tree_interval to_buddy_tree_interval(struct buddy_tree *t, struct buddy_tree_pos pos);
358
+
359
+ /* Checks if one interval contains another */
360
+ static bool buddy_tree_interval_contains(struct buddy_tree_interval outer,
361
+ struct buddy_tree_interval inner);
362
+
363
+ /* Return a walk state structure starting from the root of a tree */
364
+ static struct buddy_tree_walk_state buddy_tree_walk_state_root(void);
365
+
366
+ /* Walk the tree, keeping track in the provided state structure */
367
+ static unsigned int buddy_tree_walk(struct buddy_tree *t, struct buddy_tree_walk_state *state);
368
+
369
+
370
+ /*
371
+ * Allocation functions
372
+ */
373
+
374
+ /* Returns the free capacity at or underneath the indicated position */
375
+ static size_t buddy_tree_status(struct buddy_tree *t, struct buddy_tree_pos pos);
376
+
377
+ /* Marks the indicated position as allocated and propagates the change */
378
+ static void buddy_tree_mark(struct buddy_tree *t, struct buddy_tree_pos pos);
379
+
380
+ enum buddy_tree_release_status {
381
+ BUDDY_TREE_RELEASE_SUCCESS,
382
+ BUDDY_TREE_RELEASE_FAIL_PARTIALLY_USED,
383
+ };
384
+
385
+ /* Marks the indicated position as free and propagates the change */
386
+ static enum buddy_tree_release_status buddy_tree_release(struct buddy_tree *t, struct buddy_tree_pos pos);
387
+
388
+ /* Returns a free position at the specified depth or an invalid position */
389
+ static struct buddy_tree_pos buddy_tree_find_free(struct buddy_tree *t, uint8_t depth);
390
+
391
+ /* Tests if the indicated position is available for allocation */
392
+ static bool buddy_tree_is_free(struct buddy_tree *t, struct buddy_tree_pos pos);
393
+
394
+ /* Tests if the tree can be shrank in half */
395
+ static bool buddy_tree_can_shrink(struct buddy_tree *t);
396
+
397
+ /*
398
+ * Integration functions
399
+ */
400
+
401
+ #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
402
+ /* Get a pointer to the parent buddy struct */
403
+ static struct buddy* buddy_tree_buddy(struct buddy_tree* t);
404
+ #endif /* BUDDY_EXPERIMENTAL_CHANGE_TRACKING */
405
+
406
+ /*
407
+ * Debug functions
408
+ */
409
+
410
+ /* Implementation defined */
411
+ static void buddy_tree_debug(struct buddy_tree *t, struct buddy_tree_pos pos, size_t start_size);
412
+
413
+ /* Implementation defined */
414
+ unsigned int buddy_tree_check_invariant(struct buddy_tree *t, struct buddy_tree_pos pos);
415
+
416
+ /* Report fragmentation in a 0 - 255 range */
417
+ static unsigned char buddy_tree_fragmentation(struct buddy_tree *t);
418
+
419
+ /*
420
+ * A char-backed bitset implementation
421
+ */
422
+
423
+ static size_t bitset_sizeof(size_t elements);
424
+
425
+ struct bitset_range {
426
+ size_t from_bucket;
427
+ size_t to_bucket;
428
+
429
+ uint8_t from_index;
430
+ uint8_t to_index;
431
+ };
432
+
433
+ static inline struct bitset_range to_bitset_range(size_t from_pos, size_t to_pos);
434
+
435
+ static void bitset_set_range(unsigned char *bitset, struct bitset_range range);
436
+
437
+ static void bitset_clear_range(unsigned char *bitset, struct bitset_range range);
438
+
439
+ static size_t bitset_count_range(unsigned char *bitset, struct bitset_range range);
440
+
441
+ static inline void bitset_set(unsigned char *bitset, size_t pos);
442
+
443
+ static inline void bitset_clear(unsigned char *bitset, size_t pos);
444
+
445
+ static inline bool bitset_test(const unsigned char *bitset, size_t pos);
446
+
447
+ static void bitset_shift_left(unsigned char *bitset, size_t from_pos, size_t to_pos, size_t by);
448
+
449
+ static void bitset_shift_right(unsigned char *bitset, size_t from_pos, size_t to_pos, size_t by);
450
+
451
+ /*
452
+ * Debug functions
453
+ */
454
+
455
+ /* Implementation defined */
456
+ void bitset_debug(unsigned char *bitset, size_t length);
457
+
458
+ /*
459
+ * Bits
460
+ */
461
+
462
+ /* Returns the number of set bits in the given byte */
463
+ static unsigned int popcount_byte(unsigned char b);
464
+
465
+ /* Count the number of trailing zeroes in the given value */
466
+ static unsigned char count_trailing_zeroes(size_t val);
467
+
468
+ /* Returns the index of the highest bit set (1-based) */
469
+ static size_t highest_bit_position(size_t value);
470
+
471
+ /* Returns the nearest larger or equal power of two */
472
+ static inline size_t ceiling_power_of_two(size_t value);
473
+
474
+ /* Return two to the power of order */
475
+ static inline size_t two_to_the_power_of(size_t order);
476
+
477
+ /*
478
+ * Math
479
+ */
480
+
481
+ /* Calculates the integer square root of an integer */
482
+ static inline size_t integer_square_root(size_t f);
483
+
484
+ /*
485
+ Implementation
486
+ */
487
+
488
+ const unsigned int BUDDY_RELATIVE_MODE = 1;
489
+
490
+ /*
491
+ * A binary buddy memory allocator
492
+ */
493
+
494
+ struct buddy {
495
+ size_t memory_size;
496
+ size_t alignment;
497
+ union {
498
+ unsigned char *main;
499
+ ptrdiff_t main_offset;
500
+ } arena;
501
+ size_t buddy_flags;
502
+ };
503
+
504
+ struct buddy_embed_check {
505
+ unsigned int can_fit;
506
+ size_t offset;
507
+ size_t buddy_size;
508
+ };
509
+
510
+ static unsigned int is_valid_alignment(size_t alignment);
511
+ static size_t buddy_tree_order_for_memory(size_t memory_size, size_t alignment);
512
+ static size_t depth_for_size(struct buddy *buddy, size_t requested_size);
513
+ static inline size_t size_for_depth(struct buddy *buddy, size_t depth);
514
+ static unsigned char *address_for_position(struct buddy *buddy, struct buddy_tree_pos pos);
515
+ static struct buddy_tree_pos position_for_address(struct buddy *buddy, const unsigned char *addr);
516
+ static unsigned char *buddy_main(struct buddy *buddy);
517
+ static unsigned int buddy_relative_mode(struct buddy *buddy);
518
+ static struct buddy_tree *buddy_tree_for(struct buddy *buddy);
519
+ static size_t buddy_effective_memory_size(struct buddy *buddy);
520
+ static size_t buddy_virtual_slots(struct buddy *buddy);
521
+ static void buddy_toggle_virtual_slots(struct buddy *buddy, unsigned int state);
522
+ static void buddy_toggle_range_reservation(struct buddy *buddy, void *ptr, size_t requested_size, unsigned int state);
523
+ static struct buddy *buddy_resize_standard(struct buddy *buddy, size_t new_memory_size);
524
+ static struct buddy *buddy_resize_embedded(struct buddy *buddy, size_t new_memory_size);
525
+ static bool buddy_is_free(struct buddy *buddy, size_t from);
526
+ static struct buddy_embed_check buddy_embed_offset(size_t memory_size, size_t alignment);
527
+ static struct buddy_tree_pos deepest_position_for_offset(struct buddy *buddy, size_t offset);
528
+
529
+ size_t buddy_sizeof(size_t memory_size) {
530
+ return buddy_sizeof_alignment(memory_size, BUDDY_ALLOC_ALIGN);
531
+ }
532
+
533
+ size_t buddy_sizeof_alignment(size_t memory_size, size_t alignment) {
534
+ size_t buddy_tree_order;
535
+
536
+ if (!is_valid_alignment(alignment)) {
537
+ return 0; /* invalid */
538
+ }
539
+ if (memory_size < alignment) {
540
+ return 0; /* invalid */
541
+ }
542
+ buddy_tree_order = buddy_tree_order_for_memory(memory_size, alignment);
543
+ return sizeof(struct buddy) + buddy_tree_sizeof((uint8_t)buddy_tree_order);
544
+ }
545
+
546
+ struct buddy *buddy_init(unsigned char *at, unsigned char *main, size_t memory_size) {
547
+ return buddy_init_alignment(at, main, memory_size, BUDDY_ALLOC_ALIGN);
548
+ }
549
+
550
+ struct buddy *buddy_init_alignment(unsigned char *at, unsigned char *main, size_t memory_size,
551
+ size_t alignment) {
552
+ size_t at_alignment, main_alignment, buddy_size, buddy_tree_order;
553
+ struct buddy *buddy;
554
+
555
+ if (at == NULL) {
556
+ return NULL;
557
+ }
558
+ if (main == NULL) {
559
+ return NULL;
560
+ }
561
+ if (at == main) {
562
+ return NULL;
563
+ }
564
+ if (!is_valid_alignment(alignment)) {
565
+ return NULL; /* invalid */
566
+ }
567
+ at_alignment = ((uintptr_t) at) % BUDDY_ALIGNOF(struct buddy);
568
+ if (at_alignment != 0) {
569
+ return NULL;
570
+ }
571
+ main_alignment = ((uintptr_t) main) % BUDDY_ALIGNOF(size_t);
572
+ if (main_alignment != 0) {
573
+ return NULL;
574
+ }
575
+ /* Trim down memory to alignment */
576
+ if (memory_size % alignment) {
577
+ memory_size -= (memory_size % alignment);
578
+ }
579
+ buddy_size = buddy_sizeof_alignment(memory_size, alignment);
580
+ if (buddy_size == 0) {
581
+ return NULL;
582
+ }
583
+ buddy_tree_order = buddy_tree_order_for_memory(memory_size, alignment);
584
+
585
+ /* TODO check for overlap between buddy metadata and main block */
586
+ buddy = (struct buddy *) at;
587
+ buddy->arena.main = main;
588
+ buddy->memory_size = memory_size;
589
+ buddy->buddy_flags = 0;
590
+ buddy->alignment = alignment;
591
+ buddy_tree_init((unsigned char *)buddy + sizeof(*buddy), (uint8_t) buddy_tree_order);
592
+ buddy_toggle_virtual_slots(buddy, 1);
593
+ return buddy;
594
+ }
595
+
596
+ struct buddy *buddy_embed(unsigned char *main, size_t memory_size) {
597
+ return buddy_embed_alignment(main, memory_size, BUDDY_ALLOC_ALIGN);
598
+ }
599
+
600
+ struct buddy *buddy_get_embed_at(unsigned char *main, size_t memory_size) {
601
+ return buddy_get_embed_at_alignment(main, memory_size, BUDDY_ALLOC_ALIGN);
602
+ }
603
+
604
+ struct buddy *buddy_embed_alignment(unsigned char *main, size_t memory_size, size_t alignment) {
605
+ struct buddy_embed_check check_result;
606
+ struct buddy *buddy;
607
+
608
+ if (! main) {
609
+ return NULL;
610
+ }
611
+ if (!is_valid_alignment(alignment)) {
612
+ return NULL; /* invalid */
613
+ }
614
+ check_result = buddy_embed_offset(memory_size, alignment);
615
+ if (! check_result.can_fit) {
616
+ return NULL;
617
+ }
618
+
619
+ buddy = buddy_init_alignment(main+check_result.offset, main, check_result.offset, alignment);
620
+ if (! buddy) { /* regular initialization failed */
621
+ return NULL;
622
+ }
623
+
624
+ buddy->buddy_flags |= BUDDY_RELATIVE_MODE;
625
+ buddy->arena.main_offset = (unsigned char *)buddy - main;
626
+ return buddy;
627
+ }
628
+
629
+ struct buddy *buddy_get_embed_at_alignment(unsigned char *main, size_t memory_size, size_t alignment) {
630
+ struct buddy_embed_check check_result = buddy_embed_offset(memory_size, alignment);
631
+ if (!check_result.can_fit) {
632
+ return NULL;
633
+ }
634
+ return (struct buddy *)(main + check_result.offset);
635
+ }
636
+
637
+ struct buddy *buddy_resize(struct buddy *buddy, size_t new_memory_size) {
638
+ if (new_memory_size == buddy->memory_size) {
639
+ return buddy;
640
+ }
641
+
642
+ if (buddy_relative_mode(buddy)) {
643
+ return buddy_resize_embedded(buddy, new_memory_size);
644
+ } else {
645
+ return buddy_resize_standard(buddy, new_memory_size);
646
+ }
647
+ }
648
+
649
+ static struct buddy *buddy_resize_standard(struct buddy *buddy, size_t new_memory_size) {
650
+ size_t new_buddy_tree_order;
651
+
652
+ /* Trim down memory to alignment */
653
+ if (new_memory_size % buddy->alignment) {
654
+ new_memory_size -= (new_memory_size % buddy->alignment);
655
+ }
656
+
657
+ /* Account for tree use */
658
+ if (!buddy_is_free(buddy, new_memory_size)) {
659
+ return NULL;
660
+ }
661
+
662
+ /* Release the virtual slots */
663
+ buddy_toggle_virtual_slots(buddy, 0);
664
+
665
+ /* Calculate new tree order and resize it */
666
+ new_buddy_tree_order = buddy_tree_order_for_memory(new_memory_size, buddy->alignment);
667
+ buddy_tree_resize(buddy_tree_for(buddy), (uint8_t) new_buddy_tree_order);
668
+
669
+ /* Store the new memory size and reconstruct any virtual slots */
670
+ buddy->memory_size = new_memory_size;
671
+ buddy_toggle_virtual_slots(buddy, 1);
672
+
673
+ /* Resize successful */
674
+ return buddy;
675
+ }
676
+
677
+ static struct buddy *buddy_resize_embedded(struct buddy *buddy, size_t new_memory_size) {
678
+ struct buddy_embed_check check_result;
679
+ unsigned char *main, *buddy_destination;
680
+ struct buddy *resized, *relocated;
681
+
682
+ /* Ensure that the embedded allocator can fit */
683
+ check_result = buddy_embed_offset(new_memory_size, buddy->alignment);
684
+ if (! check_result.can_fit) {
685
+ return NULL;
686
+ }
687
+
688
+ /* Resize the allocator in the normal way */
689
+ resized = buddy_resize_standard(buddy, check_result.offset);
690
+ if (! resized) {
691
+ return NULL;
692
+ }
693
+
694
+ /* Get the absolute main address. The relative will be invalid after relocation. */
695
+ main = buddy_main(buddy);
696
+
697
+ /* Relocate the allocator */
698
+ buddy_destination = buddy_main(buddy) + check_result.offset;
699
+ memmove(buddy_destination, resized, check_result.buddy_size);
700
+
701
+ /* Update the main offset in the allocator */
702
+ relocated = (struct buddy *) buddy_destination;
703
+ relocated->arena.main_offset = buddy_destination - main;
704
+
705
+ return relocated;
706
+ }
707
+
708
+ bool buddy_can_shrink(struct buddy *buddy) {
709
+ if (buddy == NULL) {
710
+ return false;
711
+ }
712
+ return buddy_is_free(buddy, buddy->memory_size / 2);
713
+ }
714
+
715
+ bool buddy_is_empty(struct buddy *buddy) {
716
+ if (buddy == NULL) {
717
+ return false;
718
+ }
719
+ return buddy_is_free(buddy, 0);
720
+ }
721
+
722
+ bool buddy_is_full(struct buddy *buddy) {
723
+ struct buddy_tree *tree;
724
+ struct buddy_tree_pos pos;
725
+
726
+ if (buddy == NULL) {
727
+ return false;
728
+ }
729
+ tree = buddy_tree_for(buddy);
730
+ pos = buddy_tree_root();
731
+ return buddy_tree_status(tree, pos) == buddy_tree_order(tree);
732
+ }
733
+
734
+ size_t buddy_arena_size(struct buddy *buddy) {
735
+ if (buddy == NULL) {
736
+ return 0;
737
+ }
738
+ return buddy->memory_size;
739
+ }
740
+
741
+ size_t buddy_arena_free_size(struct buddy *buddy) {
742
+ size_t result = 0;
743
+ struct buddy_tree *tree = buddy_tree_for(buddy);
744
+ size_t tree_order = buddy_tree_order(tree);
745
+
746
+ struct buddy_tree_walk_state state = buddy_tree_walk_state_root();
747
+ do {
748
+ size_t pos_status = buddy_tree_status(tree, state.current_pos);
749
+ if (pos_status == (tree_order - state.current_pos.depth + 1)) { /* Fully-allocated */
750
+ state.going_up = 1;
751
+ } else if (pos_status == 0) { /* Free */
752
+ state.going_up = 1;
753
+ result += size_for_depth(buddy, state.current_pos.depth);
754
+ } else { /* Partial */
755
+ continue;
756
+ }
757
+ } while (buddy_tree_walk(tree, &state));
758
+ return result;
759
+ }
760
+
761
+ static unsigned int is_valid_alignment(size_t alignment) {
762
+ return ceiling_power_of_two(alignment) == alignment;
763
+ }
764
+
765
+ static size_t buddy_tree_order_for_memory(size_t memory_size, size_t alignment) {
766
+ // cppcheck-suppress zerodiv
767
+ size_t blocks = memory_size / alignment;
768
+ return highest_bit_position(ceiling_power_of_two(blocks));
769
+ }
770
+
771
+ void *buddy_malloc(struct buddy *buddy, size_t requested_size) {
772
+ size_t target_depth;
773
+ struct buddy_tree *tree;
774
+ struct buddy_tree_pos pos;
775
+
776
+ if (buddy == NULL) {
777
+ return NULL;
778
+ }
779
+ if (requested_size == 0) {
780
+ /*
781
+ * Batshit crazy code exists that calls malloc(0) and expects
782
+ * a result that can be safely passed to free().
783
+ * And even though this allocator will safely handle a free(NULL)
784
+ * the particular batshit code will expect a non-NULL malloc(0) result!
785
+ *
786
+ * See also https://wiki.sei.cmu.edu/confluence/display/c/MEM04-C.+Beware+of+zero-length+allocations
787
+ */
788
+ requested_size = 1;
789
+ }
790
+ if (requested_size > buddy->memory_size) {
791
+ return NULL;
792
+ }
793
+
794
+ target_depth = depth_for_size(buddy, requested_size);
795
+ tree = buddy_tree_for(buddy);
796
+ pos = buddy_tree_find_free(tree, (uint8_t) target_depth);
797
+
798
+ if (! buddy_tree_valid(tree, pos)) {
799
+ return NULL; /* no slot found */
800
+ }
801
+
802
+ /* Allocate the slot */
803
+ buddy_tree_mark(tree, pos);
804
+
805
+ /* Find and return the actual memory address */
806
+ return address_for_position(buddy, pos);
807
+ }
808
+
809
+ void *buddy_calloc(struct buddy *buddy, size_t members_count, size_t member_size) {
810
+ size_t total_size;
811
+ void *result;
812
+
813
+ if (members_count == 0 || member_size == 0) {
814
+ /* See the gleeful remark in malloc */
815
+ members_count = 1;
816
+ member_size = 1;
817
+ }
818
+ /* Check for overflow */
819
+ if (((members_count * member_size)/members_count) != member_size) {
820
+ return NULL;
821
+ }
822
+ total_size = members_count * member_size;
823
+ result = buddy_malloc(buddy, total_size);
824
+ if (result) {
825
+ memset(result, 0, total_size);
826
+ }
827
+ return result;
828
+ }
829
+
830
+ void *buddy_realloc(struct buddy *buddy, void *ptr, size_t requested_size, bool ignore_data) {
831
+ struct buddy_tree *tree;
832
+ struct buddy_tree_pos origin, new_pos;
833
+ size_t current_depth, target_depth;
834
+ void *source, *destination;
835
+
836
+ /*
837
+ * realloc is a joke:
838
+ * - NULL ptr degrades into malloc
839
+ * - Zero size degrades into free
840
+ * - Same size as previous malloc/calloc/realloc is a no-op or a rellocation
841
+ * - Smaller size than previous *alloc decrease the allocated size with an optional rellocation
842
+ * - If the new allocation cannot be satisfied NULL is returned BUT the slot is preserved
843
+ * - Larger size than previous *alloc increase tha allocated size with an optional rellocation
844
+ */
845
+ if (ptr == NULL) {
846
+ return buddy_malloc(buddy, requested_size);
847
+ }
848
+ if (requested_size == 0) {
849
+ buddy_free(buddy, ptr);
850
+ return NULL;
851
+ }
852
+ if (requested_size > buddy->memory_size) {
853
+ return NULL;
854
+ }
855
+
856
+ /* Find the position tracking this address */
857
+ tree = buddy_tree_for(buddy);
858
+ origin = position_for_address(buddy, (unsigned char *) ptr);
859
+ if (! buddy_tree_valid(tree, origin)) {
860
+ return NULL;
861
+ }
862
+ current_depth = buddy_tree_depth(origin);
863
+ target_depth = depth_for_size(buddy, requested_size);
864
+
865
+ /* Release the position and perform a search */
866
+ buddy_tree_release(tree, origin);
867
+ new_pos = buddy_tree_find_free(tree, (uint8_t) target_depth);
868
+
869
+ if (! buddy_tree_valid(tree, new_pos)) {
870
+ /* allocation failure, restore mark and return null */
871
+ buddy_tree_mark(tree, origin);
872
+ return NULL;
873
+ }
874
+
875
+ if (origin.index == new_pos.index) {
876
+ /* Allocated to the same slot, restore mark and return null */
877
+ buddy_tree_mark(tree, origin);
878
+ return ptr;
879
+ }
880
+
881
+ destination = address_for_position(buddy, new_pos);
882
+
883
+ if (! ignore_data) {
884
+ /* Copy the content */
885
+ source = address_for_position(buddy, origin);
886
+ memmove(destination, source, size_for_depth(buddy,
887
+ current_depth > target_depth ? current_depth : target_depth));
888
+ }
889
+
890
+ /* Allocate and return */
891
+ buddy_tree_mark(tree, new_pos);
892
+ return destination;
893
+ }
894
+
895
+ void *buddy_reallocarray(struct buddy *buddy, void *ptr,
896
+ size_t members_count, size_t member_size, bool ignore_data) {
897
+ if (members_count == 0 || member_size == 0) {
898
+ return buddy_realloc(buddy, ptr, 0, ignore_data);
899
+ }
900
+ /* Check for overflow */
901
+ if ((members_count * member_size)/members_count != member_size) {
902
+ return NULL;
903
+ }
904
+ return buddy_realloc(buddy, ptr, members_count * member_size, ignore_data);
905
+ }
906
+
907
+ void buddy_free(struct buddy *buddy, void *ptr) {
908
+ unsigned char *dst, *main;
909
+ struct buddy_tree *tree;
910
+ struct buddy_tree_pos pos;
911
+
912
+ if (buddy == NULL) {
913
+ return;
914
+ }
915
+ if (ptr == NULL) {
916
+ return;
917
+ }
918
+ dst = (unsigned char *)ptr;
919
+ main = buddy_main(buddy);
920
+ if ((dst < main) || (dst >= (main + buddy->memory_size))) {
921
+ return;
922
+ }
923
+
924
+ /* Find the position tracking this address */
925
+ tree = buddy_tree_for(buddy);
926
+ pos = position_for_address(buddy, dst);
927
+
928
+ if (! buddy_tree_valid(tree, pos)) {
929
+ return;
930
+ }
931
+
932
+ /* Release the position */
933
+ buddy_tree_release(tree, pos);
934
+ }
935
+
936
+ enum buddy_safe_free_status buddy_safe_free(struct buddy* buddy, void* ptr, size_t requested_size) {
937
+ unsigned char* dst, * main;
938
+ struct buddy_tree* tree;
939
+ struct buddy_tree_pos pos;
940
+ size_t allocated_size_for_depth;
941
+ enum buddy_tree_release_status status;
942
+
943
+ if (buddy == NULL) {
944
+ return BUDDY_SAFE_FREE_BUDDY_IS_NULL;
945
+ }
946
+ if (ptr == NULL) {
947
+ return BUDDY_SAFE_FREE_INVALID_ADDRESS;
948
+ }
949
+ dst = (unsigned char*)ptr;
950
+ main = buddy_main(buddy);
951
+ if ((dst < main) || (dst >= (main + buddy->memory_size))) {
952
+ return BUDDY_SAFE_FREE_INVALID_ADDRESS;
953
+ }
954
+
955
+ /* Find an allocated position tracking this address */
956
+ tree = buddy_tree_for(buddy);
957
+ pos = position_for_address(buddy, dst);
958
+
959
+ if (!buddy_tree_valid(tree, pos)) {
960
+ return BUDDY_SAFE_FREE_INVALID_ADDRESS;
961
+ }
962
+
963
+ allocated_size_for_depth = size_for_depth(buddy, pos.depth);
964
+ if (requested_size < buddy->alignment) {
965
+ requested_size = buddy->alignment;
966
+ }
967
+ if (requested_size > allocated_size_for_depth) {
968
+ return BUDDY_SAFE_FREE_SIZE_MISMATCH;
969
+ }
970
+ if (requested_size <= (allocated_size_for_depth / 2)) {
971
+ return BUDDY_SAFE_FREE_SIZE_MISMATCH;
972
+ }
973
+
974
+ /* Release the position */
975
+ status = buddy_tree_release(tree, pos);
976
+
977
+ switch (status) {
978
+ case BUDDY_TREE_RELEASE_FAIL_PARTIALLY_USED:
979
+ return BUDDY_SAFE_FREE_INVALID_ADDRESS;
980
+ case BUDDY_TREE_RELEASE_SUCCESS:
981
+ break;
982
+ }
983
+
984
+ return BUDDY_SAFE_FREE_SUCCESS;
985
+ }
986
+
987
+ size_t buddy_alloc_size(struct buddy *buddy, void *ptr) {
988
+ unsigned char* dst, * main;
989
+ struct buddy_tree* tree;
990
+ struct buddy_tree_pos pos;
991
+
992
+ if (buddy == NULL) {
993
+ return 0;
994
+ }
995
+ if (ptr == NULL) {
996
+ return 0;
997
+ }
998
+ dst = (unsigned char*)ptr;
999
+ main = buddy_main(buddy);
1000
+ if ((dst < main) || (dst >= (main + buddy->memory_size))) {
1001
+ return 0;
1002
+ }
1003
+
1004
+ /* Find an allocated position tracking this address */
1005
+ tree = buddy_tree_for(buddy);
1006
+ pos = position_for_address(buddy, dst);
1007
+
1008
+ if (!buddy_tree_valid(tree, pos)) {
1009
+ return 0;
1010
+ }
1011
+
1012
+ return size_for_depth(buddy, pos.depth);
1013
+ }
1014
+
1015
+ void buddy_reserve_range(struct buddy *buddy, void *ptr, size_t requested_size) {
1016
+ buddy_toggle_range_reservation(buddy, ptr, requested_size, 1);
1017
+ }
1018
+
1019
+ void buddy_unsafe_release_range(struct buddy *buddy, void *ptr, size_t requested_size) {
1020
+ buddy_toggle_range_reservation(buddy, ptr, requested_size, 0);
1021
+ }
1022
+
1023
+ void *buddy_walk(struct buddy *buddy,
1024
+ void *(fp)(void *ctx, void *addr, size_t slot_size, size_t allocated),
1025
+ void *ctx) {
1026
+ unsigned char *main;
1027
+ size_t effective_memory_size, tree_order, pos_status, pos_size;
1028
+ struct buddy_tree *tree;
1029
+ unsigned char *addr;
1030
+ struct buddy_tree_walk_state state;
1031
+ struct buddy_tree_pos test_pos;
1032
+ void *callback_result;
1033
+
1034
+ if (buddy == NULL) {
1035
+ return NULL;
1036
+ }
1037
+ if (fp == NULL) {
1038
+ return NULL;
1039
+ }
1040
+ main = buddy_main(buddy);
1041
+ effective_memory_size = buddy_effective_memory_size(buddy);
1042
+ tree = buddy_tree_for(buddy);
1043
+ tree_order = buddy_tree_order(tree);
1044
+
1045
+ state = buddy_tree_walk_state_root();
1046
+ do {
1047
+ pos_status = buddy_tree_status(tree, state.current_pos);
1048
+ if (pos_status != (tree_order - state.current_pos.depth + 1)) { /* Partially-allocated */
1049
+ continue;
1050
+ }
1051
+
1052
+ /*
1053
+ * The tree doesn't make a distinction of a fully-allocated node
1054
+ * due to a single allocation and a fully-allocated due to maxed out
1055
+ * child allocations - we need to check the children.
1056
+ * A child-allocated node will have both children set to their maximum
1057
+ * but it is sufficient to check just one for non-zero.
1058
+ */
1059
+ test_pos = buddy_tree_left_child(state.current_pos);
1060
+ if (buddy_tree_valid(tree, test_pos) && buddy_tree_status(tree, test_pos)) {
1061
+ continue;
1062
+ }
1063
+
1064
+ /* Current node is free or allocated, process */
1065
+ pos_size = effective_memory_size >> (state.current_pos.depth - 1u);
1066
+ addr = address_for_position(buddy, state.current_pos);
1067
+ if (((size_t)(addr - main) + pos_size) > buddy->memory_size) {
1068
+ /*
1069
+ * Do not process virtual slots
1070
+ * As virtual slots are on the right side of the tree
1071
+ * if we see a one with the current iteration order this
1072
+ * means that all subsequent slots will be virtual,
1073
+ * hence we can return early.
1074
+ */
1075
+ return NULL;
1076
+ }
1077
+ callback_result = (fp)(ctx, addr, pos_size, pos_status > 0);
1078
+ if (callback_result != NULL) {
1079
+ return callback_result;
1080
+ }
1081
+ state.going_up = 1;
1082
+
1083
+ } while (buddy_tree_walk(tree, &state));
1084
+ return NULL;
1085
+ }
1086
+
1087
+ unsigned char buddy_fragmentation(struct buddy *buddy) {
1088
+ if (buddy == NULL) {
1089
+ return 0;
1090
+ }
1091
+ return buddy_tree_fragmentation(buddy_tree_for(buddy));
1092
+ }
1093
+
1094
+ #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
1095
+ void buddy_enable_change_tracking(struct buddy* buddy, void* context, void (*tracker) (void*, unsigned char*, size_t)) {
1096
+ struct buddy_tree *t = buddy_tree_for(buddy);
1097
+ struct buddy_change_tracker *header = (struct buddy_change_tracker *) buddy_main(buddy);
1098
+
1099
+ /* Allocate memory for the change tracking header */
1100
+ buddy_reserve_range(buddy, buddy_main(buddy), sizeof(struct buddy_change_tracker));
1101
+
1102
+ /* Fill in the change tracking header */
1103
+ header->context = context;
1104
+ header->tracker = tracker;
1105
+
1106
+ /* Indicate that the tree should perform change tracking */
1107
+ buddy_tree_enable_change_tracking(t);
1108
+ }
1109
+ #endif
1110
+
1111
+
1112
+ static size_t depth_for_size(struct buddy *buddy, size_t requested_size) {
1113
+ size_t depth, effective_memory_size, p2_of_requested_size;
1114
+ if (requested_size < buddy->alignment) {
1115
+ requested_size = buddy->alignment;
1116
+ }
1117
+ depth = 1;
1118
+ effective_memory_size = buddy_effective_memory_size(buddy);
1119
+
1120
+ p2_of_requested_size = ceiling_power_of_two(requested_size);
1121
+ depth = count_trailing_zeroes(effective_memory_size) + 1
1122
+ - count_trailing_zeroes(p2_of_requested_size);
1123
+ return depth;
1124
+ }
1125
+
1126
+ static inline size_t size_for_depth(struct buddy *buddy, size_t depth) {
1127
+ return ceiling_power_of_two(buddy->memory_size) >> (depth-1);
1128
+ }
1129
+
1130
+ static struct buddy_tree *buddy_tree_for(struct buddy *buddy) {
1131
+ return (struct buddy_tree*) ((unsigned char *)buddy + sizeof(*buddy));
1132
+ }
1133
+
1134
+ static size_t buddy_effective_memory_size(struct buddy *buddy) {
1135
+ return ceiling_power_of_two(buddy->memory_size);
1136
+ }
1137
+
1138
+ static size_t buddy_virtual_slots(struct buddy *buddy) {
1139
+ size_t memory_size = buddy->memory_size;
1140
+ size_t effective_memory_size = buddy_effective_memory_size(buddy);
1141
+ if (effective_memory_size == memory_size) {
1142
+ return 0;
1143
+ }
1144
+ return (effective_memory_size - memory_size) / buddy->alignment;
1145
+ }
1146
+
1147
+ static unsigned char *address_for_position(struct buddy *buddy, struct buddy_tree_pos pos) {
1148
+ size_t block_size = size_for_depth(buddy, buddy_tree_depth(pos));
1149
+ size_t addr = block_size * buddy_tree_index(pos);
1150
+ return buddy_main(buddy) + addr;
1151
+ }
1152
+
1153
+ static struct buddy_tree_pos deepest_position_for_offset(struct buddy *buddy, size_t offset) {
1154
+ size_t index = offset / buddy->alignment;
1155
+ struct buddy_tree_pos pos = buddy_tree_leftmost_child(buddy_tree_for(buddy));
1156
+ pos.index += index;
1157
+ return pos;
1158
+ }
1159
+
1160
+ static struct buddy_tree_pos position_for_address(struct buddy *buddy, const unsigned char *addr) {
1161
+ unsigned char *main;
1162
+ struct buddy_tree *tree;
1163
+ struct buddy_tree_pos pos;
1164
+ size_t offset;
1165
+
1166
+ main = buddy_main(buddy);
1167
+ offset = (size_t) (addr - main);
1168
+
1169
+ if (offset % buddy->alignment) {
1170
+ return INVALID_POS; /* invalid alignment */
1171
+ }
1172
+
1173
+ tree = buddy_tree_for(buddy);
1174
+ pos = deepest_position_for_offset(buddy, offset);
1175
+
1176
+ /* Find the actual allocated position tracking this address */
1177
+ while (!buddy_tree_status(tree, pos)) {
1178
+ pos = buddy_tree_parent(pos);
1179
+
1180
+ if (!buddy_tree_valid(tree, pos)) {
1181
+ return INVALID_POS;
1182
+ }
1183
+ }
1184
+
1185
+ if (address_for_position(buddy, pos) != addr) {
1186
+ return INVALID_POS; /* invalid alignment */
1187
+ }
1188
+
1189
+ return pos;
1190
+ }
1191
+
1192
+ static unsigned char *buddy_main(struct buddy *buddy) {
1193
+ if (buddy_relative_mode(buddy)) {
1194
+ return (unsigned char *)buddy - buddy->arena.main_offset;
1195
+ }
1196
+ return buddy->arena.main;
1197
+ }
1198
+
1199
+ static unsigned int buddy_relative_mode(struct buddy *buddy) {
1200
+ return (unsigned int)buddy->buddy_flags & BUDDY_RELATIVE_MODE;
1201
+ }
1202
+
1203
+ static void buddy_toggle_virtual_slots(struct buddy *buddy, unsigned int state) {
1204
+ size_t delta, memory_size, effective_memory_size;
1205
+ struct buddy_tree *tree;
1206
+ struct buddy_tree_pos pos;
1207
+
1208
+ memory_size = buddy->memory_size;
1209
+ /* Mask/unmask the virtual space if memory is not a power of two */
1210
+ effective_memory_size = buddy_effective_memory_size(buddy);
1211
+ if (effective_memory_size == memory_size) {
1212
+ return;
1213
+ }
1214
+
1215
+ /* Get the area that we need to mask and pad it to alignment */
1216
+ /* Node memory size is already aligned to buddy->alignment */
1217
+ delta = effective_memory_size - memory_size;
1218
+
1219
+ tree = buddy_tree_for(buddy);
1220
+ pos = buddy_tree_right_child(buddy_tree_root());
1221
+ while (delta) {
1222
+ size_t current_pos_size = size_for_depth(buddy, buddy_tree_depth(pos));
1223
+ if (delta == current_pos_size) {
1224
+ /* toggle current pos */
1225
+ if (state) {
1226
+ buddy_tree_mark(tree, pos);
1227
+ }
1228
+ else {
1229
+ buddy_tree_release(tree, pos);
1230
+ }
1231
+ break;
1232
+ }
1233
+ if (delta <= (current_pos_size / 2)) {
1234
+ /* re-run for right child */
1235
+ pos = buddy_tree_right_child(pos);
1236
+ continue;
1237
+ } else {
1238
+ /* toggle right child */
1239
+ if (state) {
1240
+ buddy_tree_mark(tree, buddy_tree_right_child(pos));
1241
+ }
1242
+ else {
1243
+ buddy_tree_release(tree, buddy_tree_right_child(pos));
1244
+ }
1245
+ /* reduce delta */
1246
+ delta -= current_pos_size / 2;
1247
+ /* re-run for left child */
1248
+ pos = buddy_tree_left_child(pos);
1249
+ continue;
1250
+ }
1251
+ }
1252
+ }
1253
+
1254
+ static void buddy_toggle_range_reservation(struct buddy *buddy, void *ptr, size_t requested_size, unsigned int state) {
1255
+ unsigned char *dst, *main;
1256
+ struct buddy_tree *tree;
1257
+ size_t offset;
1258
+ struct buddy_tree_pos pos;
1259
+
1260
+ if (buddy == NULL) {
1261
+ return;
1262
+ }
1263
+ if (ptr == NULL) {
1264
+ return;
1265
+ }
1266
+ if (requested_size == 0) {
1267
+ return;
1268
+ }
1269
+ dst = (unsigned char *)ptr;
1270
+ main = buddy_main(buddy);
1271
+ if ((dst < main) || ((dst + requested_size) > (main + buddy->memory_size))) {
1272
+ return;
1273
+ }
1274
+
1275
+ /* Find the deepest position tracking this address */
1276
+ tree = buddy_tree_for(buddy);
1277
+ offset = (size_t) (dst - main);
1278
+ pos = deepest_position_for_offset(buddy, offset);
1279
+
1280
+ /* Advance one position at a time and process */
1281
+ while (requested_size) {
1282
+ if (state) {
1283
+ buddy_tree_mark(tree, pos);
1284
+ }
1285
+ else {
1286
+ buddy_tree_release(tree, pos);
1287
+ }
1288
+ requested_size = (requested_size < buddy->alignment) ? 0 : (requested_size - buddy->alignment);
1289
+ pos.index++;
1290
+ }
1291
+
1292
+ return;
1293
+ }
1294
+
1295
+ /* Internal function that checks if there are any allocations
1296
+ after the indicated relative memory index. Used to check if
1297
+ the arena can be downsized.
1298
+ The from argument is already adjusted for alignment by caller */
1299
+ static bool buddy_is_free(struct buddy *buddy, size_t from) {
1300
+ struct buddy_tree *tree;
1301
+ struct buddy_tree_interval query_range;
1302
+ struct buddy_tree_pos pos;
1303
+ size_t effective_memory_size, virtual_slots, to;
1304
+
1305
+ effective_memory_size = buddy_effective_memory_size(buddy);
1306
+ virtual_slots = buddy_virtual_slots(buddy);
1307
+ to = effective_memory_size -
1308
+ ((virtual_slots ? (virtual_slots + 1) : 1) * buddy->alignment);
1309
+
1310
+ tree = buddy_tree_for(buddy);
1311
+
1312
+ query_range.from = deepest_position_for_offset(buddy, from);
1313
+ query_range.to = deepest_position_for_offset(buddy, to);
1314
+
1315
+ pos = deepest_position_for_offset(buddy, from);
1316
+ while(buddy_tree_valid(tree, pos) && (pos.index < query_range.to.index)) {
1317
+ struct buddy_tree_interval current_test_range = to_buddy_tree_interval(tree, pos);
1318
+ struct buddy_tree_interval parent_test_range =
1319
+ to_buddy_tree_interval(tree, buddy_tree_parent(pos));
1320
+ while(buddy_tree_interval_contains(query_range, parent_test_range)) {
1321
+ pos = buddy_tree_parent(pos);
1322
+ current_test_range = parent_test_range;
1323
+ parent_test_range = to_buddy_tree_interval(tree, buddy_tree_parent(pos));
1324
+ }
1325
+ /* pos is now tracking an overlapping segment */
1326
+ if (! buddy_tree_is_free(tree, pos)) {
1327
+ return false;
1328
+ }
1329
+ /* Advance check */
1330
+ pos = buddy_tree_right_adjacent(current_test_range.to);
1331
+ }
1332
+ return true;
1333
+ }
1334
+
1335
+ static struct buddy_embed_check buddy_embed_offset(size_t memory_size, size_t alignment) {
1336
+ size_t buddy_size, offset;
1337
+ struct buddy_embed_check check_result;
1338
+
1339
+ memset(&check_result, 0, sizeof(check_result));
1340
+ check_result.can_fit = 1;
1341
+ buddy_size = buddy_sizeof_alignment(memory_size, alignment);
1342
+ if (buddy_size >= memory_size) {
1343
+ check_result.can_fit = 0;
1344
+ }
1345
+
1346
+ offset = memory_size - buddy_size;
1347
+ if (offset % BUDDY_ALIGNOF(struct buddy) != 0) {
1348
+ buddy_size += offset % BUDDY_ALIGNOF(struct buddy);
1349
+ if (buddy_size >= memory_size) {
1350
+ check_result.can_fit = 0;
1351
+ }
1352
+ offset = memory_size - buddy_size;
1353
+ }
1354
+
1355
+ if (check_result.can_fit) {
1356
+ check_result.offset = offset;
1357
+ check_result.buddy_size = buddy_size;
1358
+ }
1359
+ return check_result;
1360
+ }
1361
+
1362
+ void buddy_debug(struct buddy *buddy) {
1363
+ BUDDY_PRINTF("buddy allocator at: %p arena at: %p\n", (void *)buddy, (void *)buddy_main(buddy));
1364
+ BUDDY_PRINTF("memory size: %zu\n", buddy->memory_size);
1365
+ BUDDY_PRINTF("mode: ");
1366
+ if (buddy_relative_mode(buddy)) {
1367
+ BUDDY_PRINTF("embedded");
1368
+ } else {
1369
+ BUDDY_PRINTF("standard");
1370
+ }
1371
+ BUDDY_PRINTF("\n");
1372
+ BUDDY_PRINTF("virtual slots: %zu\n", buddy_virtual_slots(buddy));
1373
+ BUDDY_PRINTF("allocator tree follows:\n");
1374
+ buddy_tree_debug(buddy_tree_for(buddy), buddy_tree_root(), buddy_effective_memory_size(buddy));
1375
+ }
1376
+
1377
+ /*
1378
+ * A buddy allocation tree
1379
+ */
1380
+
1381
+ struct buddy_tree {
1382
+ size_t upper_pos_bound;
1383
+ size_t size_for_order_offset;
1384
+ uint8_t order;
1385
+ uint8_t flags;
1386
+ /*
1387
+ * struct padding rules mean that there are
1388
+ * 16/48 bits available until the next increment
1389
+ */
1390
+ };
1391
+
1392
+ enum buddy_tree_flags {
1393
+ BUDDY_TREE_CHANGE_TRACKING = 1,
1394
+ };
1395
+
1396
+ struct internal_position {
1397
+ size_t local_offset;
1398
+ size_t bitset_location;
1399
+ };
1400
+
1401
+ static inline size_t size_for_order(uint8_t order, uint8_t to);
1402
+ static inline size_t buddy_tree_index_internal(struct buddy_tree_pos pos);
1403
+ static struct buddy_tree_pos buddy_tree_leftmost_child_internal(size_t tree_order);
1404
+ static struct internal_position buddy_tree_internal_position_order(
1405
+ size_t tree_order, struct buddy_tree_pos pos);
1406
+ static struct internal_position buddy_tree_internal_position_tree(
1407
+ struct buddy_tree *t, struct buddy_tree_pos pos);
1408
+ static void buddy_tree_grow(struct buddy_tree *t, uint8_t desired_order);
1409
+ static void buddy_tree_shrink(struct buddy_tree *t, uint8_t desired_order);
1410
+ static void update_parent_chain(struct buddy_tree *t, struct buddy_tree_pos pos,
1411
+ struct internal_position pos_internal, size_t size_current);
1412
+ static inline unsigned char *buddy_tree_bits(struct buddy_tree *t);
1413
+ static void buddy_tree_populate_size_for_order(struct buddy_tree *t);
1414
+ static inline size_t buddy_tree_size_for_order(struct buddy_tree *t, uint8_t to);
1415
+ static void write_to_internal_position(struct buddy_tree* t, struct internal_position pos, size_t value);
1416
+ static inline size_t read_from_internal_position(unsigned char *bitset, struct internal_position pos);
1417
+ static inline unsigned char compare_with_internal_position(unsigned char *bitset, struct internal_position pos, size_t value);
1418
+
1419
+ #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
1420
+ static inline void buddy_tree_track_change(struct buddy_tree* t, unsigned char* addr, size_t length);
1421
+ #endif /* BUDDY_EXPERIMENTAL_CHANGE_TRACKING */
1422
+
1423
+ static inline size_t size_for_order(uint8_t order, uint8_t to) {
1424
+ size_t result = 0;
1425
+ size_t multi = 1u;
1426
+ while (order != to) {
1427
+ result += order * multi;
1428
+ order--;
1429
+ multi *= 2;
1430
+ }
1431
+ return result;
1432
+ }
1433
+
1434
+ static inline struct internal_position buddy_tree_internal_position_order(
1435
+ size_t tree_order, struct buddy_tree_pos pos) {
1436
+ struct internal_position p;
1437
+ size_t total_offset, local_index;
1438
+
1439
+ p.local_offset = tree_order - buddy_tree_depth(pos) + 1;
1440
+ total_offset = size_for_order((uint8_t) tree_order, (uint8_t) p.local_offset);
1441
+ local_index = buddy_tree_index_internal(pos);
1442
+ p.bitset_location = total_offset + (p.local_offset * local_index);
1443
+ return p;
1444
+ }
1445
+
1446
+ static inline struct internal_position buddy_tree_internal_position_tree(
1447
+ struct buddy_tree *t, struct buddy_tree_pos pos) {
1448
+ struct internal_position p;
1449
+ size_t total_offset, local_index;
1450
+
1451
+ p.local_offset = t->order - buddy_tree_depth(pos) + 1;
1452
+ total_offset = buddy_tree_size_for_order(t, (uint8_t) p.local_offset);
1453
+ local_index = buddy_tree_index_internal(pos);
1454
+ p.bitset_location = total_offset + (p.local_offset * local_index);
1455
+ return p;
1456
+ }
1457
+
1458
+ static size_t buddy_tree_sizeof(uint8_t order) {
1459
+ size_t tree_size, bitset_size, size_for_order_size;
1460
+
1461
+ tree_size = sizeof(struct buddy_tree);
1462
+ /* Account for the bitset */
1463
+ bitset_size = bitset_sizeof(size_for_order(order, 0));
1464
+ if (bitset_size % sizeof(size_t)) {
1465
+ bitset_size += (bitset_size % sizeof(size_t));
1466
+ }
1467
+ /* Account for the size_for_order memoization */
1468
+ size_for_order_size = ((order+2) * sizeof(size_t));
1469
+ return tree_size + bitset_size + size_for_order_size;
1470
+ }
1471
+
1472
+ static struct buddy_tree *buddy_tree_init(unsigned char *at, uint8_t order) {
1473
+ size_t size = buddy_tree_sizeof(order);
1474
+ struct buddy_tree *t = (struct buddy_tree*) at;
1475
+ memset(at, 0, size);
1476
+ t->order = order;
1477
+ t->upper_pos_bound = two_to_the_power_of(t->order);
1478
+ buddy_tree_populate_size_for_order(t);
1479
+ return t;
1480
+ }
1481
+
1482
+ #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
1483
+ static void buddy_tree_enable_change_tracking(struct buddy_tree* t) {
1484
+ t->flags |= BUDDY_TREE_CHANGE_TRACKING;
1485
+ }
1486
+ #endif /* BUDDY_EXPERIMENTAL_CHANGE_TRACKING */
1487
+
1488
+ static void buddy_tree_resize(struct buddy_tree *t, uint8_t desired_order) {
1489
+ if (t->order == desired_order) {
1490
+ return;
1491
+ }
1492
+ if (t->order < desired_order) {
1493
+ buddy_tree_grow(t, desired_order);
1494
+ } else {
1495
+ buddy_tree_shrink(t, desired_order);
1496
+ }
1497
+ }
1498
+
1499
+ static void buddy_tree_grow(struct buddy_tree *t, uint8_t desired_order) {
1500
+ struct buddy_tree_pos pos;
1501
+
1502
+ while (desired_order > t->order) {
1503
+ /* Grow the tree a single order at a time */
1504
+ size_t current_order = t->order;
1505
+ struct buddy_tree_pos current_pos = buddy_tree_leftmost_child_internal(current_order);
1506
+ struct buddy_tree_pos next_pos = buddy_tree_leftmost_child_internal(current_order + 1u);
1507
+ while(current_order) {
1508
+ /* Get handles into the rows at the tracked depth */
1509
+ struct internal_position current_internal = buddy_tree_internal_position_order(
1510
+ t->order, current_pos);
1511
+ struct internal_position next_internal = buddy_tree_internal_position_order(
1512
+ t->order + 1u, next_pos);
1513
+
1514
+ /* There are this many nodes at the current level */
1515
+ size_t node_count = two_to_the_power_of(current_order - 1u);
1516
+
1517
+ /* Transfer the bits*/
1518
+ bitset_shift_right(buddy_tree_bits(t),
1519
+ current_internal.bitset_location /* from here */,
1520
+ current_internal.bitset_location + (current_internal.local_offset * node_count) /* up to here */,
1521
+ next_internal.bitset_location - current_internal.bitset_location /* by */);
1522
+
1523
+ /* Clear right section */
1524
+ bitset_clear_range(buddy_tree_bits(t),
1525
+ to_bitset_range(next_internal.bitset_location + (next_internal.local_offset * node_count),
1526
+ next_internal.bitset_location + (next_internal.local_offset * node_count * 2) - 1));
1527
+
1528
+ /* Handle the upper level */
1529
+ current_order -= 1u;
1530
+ current_pos = buddy_tree_parent(current_pos);
1531
+ next_pos = buddy_tree_parent(next_pos);
1532
+ }
1533
+ /* Advance the order and refresh the root */
1534
+ t->order += 1u;
1535
+ t->upper_pos_bound = two_to_the_power_of(t->order);
1536
+ buddy_tree_populate_size_for_order(t);
1537
+
1538
+ /* Update the root */
1539
+ pos = buddy_tree_right_child(buddy_tree_root());
1540
+ update_parent_chain(t, pos, buddy_tree_internal_position_tree(t, pos), 0);
1541
+ }
1542
+ }
1543
+
1544
+ static void buddy_tree_shrink(struct buddy_tree *t, uint8_t desired_order) {
1545
+ size_t current_order, next_order, node_count;
1546
+ struct buddy_tree_pos left_start;
1547
+ struct internal_position current_internal, next_internal;
1548
+
1549
+ while (desired_order < t->order) {
1550
+ if (!buddy_tree_can_shrink(t)) {
1551
+ return;
1552
+ }
1553
+
1554
+ /* Shrink the tree a single order at a time */
1555
+ current_order = t->order;
1556
+ next_order = current_order - 1;
1557
+
1558
+ left_start = buddy_tree_left_child(buddy_tree_root());
1559
+ while(buddy_tree_valid(t, left_start)) {
1560
+ /* Get handles into the rows at the tracked depth */
1561
+ current_internal = buddy_tree_internal_position_order(current_order, left_start);
1562
+ next_internal = buddy_tree_internal_position_order(next_order, buddy_tree_parent(left_start));
1563
+
1564
+ /* There are this many nodes at the current level */
1565
+ node_count = two_to_the_power_of(left_start.depth - 1u);
1566
+
1567
+ /* Transfer the bits*/
1568
+ bitset_shift_left(buddy_tree_bits(t),
1569
+ current_internal.bitset_location /* from here */,
1570
+ current_internal.bitset_location + (current_internal.local_offset * node_count / 2) /* up to here */,
1571
+ current_internal.bitset_location - next_internal.bitset_location/* at here */);
1572
+
1573
+ /* Handle the lower level */
1574
+ left_start = buddy_tree_left_child(left_start);
1575
+ }
1576
+
1577
+ /* Advance the order */
1578
+ t->order = (uint8_t) next_order;
1579
+ t->upper_pos_bound = two_to_the_power_of(t->order);
1580
+ buddy_tree_populate_size_for_order(t);
1581
+ }
1582
+ }
1583
+
1584
+ static bool buddy_tree_valid(struct buddy_tree *t, struct buddy_tree_pos pos) {
1585
+ return pos.index && (pos.index < t->upper_pos_bound);
1586
+ }
1587
+
1588
+ static uint8_t buddy_tree_order(struct buddy_tree *t) {
1589
+ return t->order;
1590
+ }
1591
+
1592
+ static struct buddy_tree_pos buddy_tree_root(void) {
1593
+ struct buddy_tree_pos identity = { 1, 1 };
1594
+ return identity;
1595
+ }
1596
+
1597
+ static struct buddy_tree_pos buddy_tree_leftmost_child(struct buddy_tree *t) {
1598
+ return buddy_tree_leftmost_child_internal(t->order);
1599
+ }
1600
+
1601
+ static struct buddy_tree_pos buddy_tree_leftmost_child_internal(size_t tree_order) {
1602
+ struct buddy_tree_pos result;
1603
+ result.index = two_to_the_power_of(tree_order - 1u);
1604
+ result.depth = tree_order;
1605
+ return result;
1606
+ }
1607
+
1608
+ static inline size_t buddy_tree_depth(struct buddy_tree_pos pos) {
1609
+ return pos.depth;
1610
+ }
1611
+
1612
+ static inline struct buddy_tree_pos buddy_tree_left_child(struct buddy_tree_pos pos) {
1613
+ pos.index *= 2;
1614
+ pos.depth++;
1615
+ return pos;
1616
+ }
1617
+
1618
+ static inline struct buddy_tree_pos buddy_tree_right_child(struct buddy_tree_pos pos) {
1619
+ pos.index *= 2;
1620
+ pos.index++;
1621
+ pos.depth++;
1622
+ return pos;
1623
+ }
1624
+
1625
+ static inline struct buddy_tree_pos buddy_tree_sibling(struct buddy_tree_pos pos) {
1626
+ pos.index ^= 1;
1627
+ return pos;
1628
+ }
1629
+
1630
+ static inline struct buddy_tree_pos buddy_tree_parent(struct buddy_tree_pos pos) {
1631
+ pos.index /= 2;
1632
+ pos.depth--;
1633
+ return pos;
1634
+ }
1635
+
1636
+ static struct buddy_tree_pos buddy_tree_right_adjacent(struct buddy_tree_pos pos) {
1637
+ if (((pos.index + 1) ^ pos.index) > pos.index) {
1638
+ return INVALID_POS;
1639
+ }
1640
+ pos.index++;
1641
+ return pos;
1642
+ }
1643
+
1644
+ static size_t buddy_tree_index(struct buddy_tree_pos pos) {
1645
+ return buddy_tree_index_internal(pos);
1646
+ }
1647
+
1648
+ static inline size_t buddy_tree_index_internal(struct buddy_tree_pos pos) {
1649
+ /* Clear out the highest bit, this gives us the index
1650
+ * in a row of sibling nodes */
1651
+ size_t mask = two_to_the_power_of(pos.depth - 1u);
1652
+ size_t result = pos.index & ~mask;
1653
+ return result;
1654
+ }
1655
+
1656
+ static inline unsigned char *buddy_tree_bits(struct buddy_tree *t) {
1657
+ return ((unsigned char *) t) + sizeof(*t);
1658
+ }
1659
+
1660
+ static void buddy_tree_populate_size_for_order(struct buddy_tree *t) {
1661
+ size_t bitset_offset = bitset_sizeof(size_for_order(t->order, 0));
1662
+ if (bitset_offset % sizeof(size_t)) {
1663
+ bitset_offset += (bitset_offset % sizeof(size_t));
1664
+ }
1665
+ t->size_for_order_offset = bitset_offset / sizeof(size_t);
1666
+ t->size_for_order_offset++;
1667
+ for (size_t i = 0; i <= t->order; i++) {
1668
+ *((size_t *)(((unsigned char *) t) + sizeof(*t)) + t->size_for_order_offset + i) = size_for_order(t->order, (uint8_t) i);
1669
+ }
1670
+ }
1671
+
1672
+ static inline size_t buddy_tree_size_for_order(struct buddy_tree *t,
1673
+ uint8_t to) {
1674
+ return *((size_t *)(((unsigned char *) t) + sizeof(*t)) + t->size_for_order_offset + to);
1675
+ }
1676
+
1677
+ static void write_to_internal_position(struct buddy_tree* t, struct internal_position pos, size_t value) {
1678
+ unsigned char *bitset = buddy_tree_bits(t);
1679
+ struct bitset_range clear_range = to_bitset_range(pos.bitset_location, pos.bitset_location + pos.local_offset - 1);
1680
+
1681
+ bitset_clear_range(bitset, clear_range);
1682
+ if (value) {
1683
+ bitset_set_range(bitset, to_bitset_range(pos.bitset_location, pos.bitset_location+value-1));
1684
+ }
1685
+
1686
+ #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
1687
+ /* Ignore the same bucket condition - we don't care if we track one more byte here */
1688
+ buddy_tree_track_change(t, bitset, clear_range.to_bucket - clear_range.from_bucket + 1);
1689
+ #endif
1690
+ }
1691
+
1692
+ static inline size_t read_from_internal_position(unsigned char *bitset, struct internal_position pos) {
1693
+ if (! bitset_test(bitset, pos.bitset_location)) {
1694
+ return 0; /* Fast test without complete extraction */
1695
+ }
1696
+ return bitset_count_range(bitset, to_bitset_range(pos.bitset_location, pos.bitset_location+pos.local_offset-1));
1697
+ }
1698
+
1699
+ static inline unsigned char compare_with_internal_position(unsigned char *bitset, struct internal_position pos, size_t value) {
1700
+ return bitset_test(bitset, pos.bitset_location+value-1);
1701
+ }
1702
+
1703
+ static struct buddy_tree_interval to_buddy_tree_interval(struct buddy_tree *t, struct buddy_tree_pos pos) {
1704
+ struct buddy_tree_interval result;
1705
+ size_t depth;
1706
+
1707
+ result.from = pos;
1708
+ result.to = pos;
1709
+ depth = pos.depth;
1710
+ while (depth != t->order) {
1711
+ result.from = buddy_tree_left_child(result.from);
1712
+ result.to = buddy_tree_right_child(result.to);
1713
+ depth += 1;
1714
+ }
1715
+ return result;
1716
+ }
1717
+
1718
+ static bool buddy_tree_interval_contains(struct buddy_tree_interval outer,
1719
+ struct buddy_tree_interval inner) {
1720
+ return (inner.from.index >= outer.from.index)
1721
+ && (inner.from.index <= outer.to.index)
1722
+ && (inner.to.index >= outer.from.index)
1723
+ && (inner.to.index <= outer.to.index);
1724
+ }
1725
+
1726
+ static struct buddy_tree_walk_state buddy_tree_walk_state_root(void) {
1727
+ struct buddy_tree_walk_state state;
1728
+ memset(&state, 0, sizeof(state));
1729
+ state.starting_pos = buddy_tree_root();
1730
+ state.current_pos = buddy_tree_root();
1731
+ return state;
1732
+ }
1733
+
1734
+ static unsigned int buddy_tree_walk(struct buddy_tree *t, struct buddy_tree_walk_state *state) {
1735
+ do {
1736
+ if (state->going_up) {
1737
+ if (state->current_pos.index == state->starting_pos.index) {
1738
+ state->walk_done = 1;
1739
+ state->going_up = 0;
1740
+ } else if (state->current_pos.index & 1u) {
1741
+ state->current_pos = buddy_tree_parent(state->current_pos); /* Ascend */
1742
+ } else {
1743
+ state->current_pos = buddy_tree_right_adjacent(state->current_pos); /* Descend right */
1744
+ state->going_up = 0;
1745
+ }
1746
+ } else if (buddy_tree_valid(t, buddy_tree_left_child(state->current_pos))) {
1747
+ /* Descend left */
1748
+ state->current_pos = buddy_tree_left_child(state->current_pos);
1749
+ } else { /* Ascend */
1750
+ state->going_up = 1;
1751
+ }
1752
+ } while(state->going_up);
1753
+ return ! state->walk_done;
1754
+ }
1755
+
1756
+ static size_t buddy_tree_status(struct buddy_tree *t, struct buddy_tree_pos pos) {
1757
+ struct internal_position internal = buddy_tree_internal_position_tree(t, pos);
1758
+ return read_from_internal_position(buddy_tree_bits(t), internal);
1759
+ }
1760
+
1761
+ static void buddy_tree_mark(struct buddy_tree *t, struct buddy_tree_pos pos) {
1762
+ /* Calling mark on a used position is a bug in caller */
1763
+ struct internal_position internal = buddy_tree_internal_position_tree(t, pos);
1764
+
1765
+ /* Mark the node as used */
1766
+ write_to_internal_position(t, internal, internal.local_offset);
1767
+
1768
+ /* Update the tree upwards */
1769
+ update_parent_chain(t, pos, internal, internal.local_offset);
1770
+ }
1771
+
1772
+ static enum buddy_tree_release_status buddy_tree_release(struct buddy_tree *t, struct buddy_tree_pos pos) {
1773
+ /* Calling release on an unused or a partially-used position a bug in caller */
1774
+ struct internal_position internal = buddy_tree_internal_position_tree(t, pos);
1775
+
1776
+ if (read_from_internal_position(buddy_tree_bits(t), internal) != internal.local_offset) {
1777
+ return BUDDY_TREE_RELEASE_FAIL_PARTIALLY_USED;
1778
+ }
1779
+
1780
+ /* Mark the node as unused */
1781
+ write_to_internal_position(t, internal, 0);
1782
+
1783
+ /* Update the tree upwards */
1784
+ update_parent_chain(t, pos, internal, 0);
1785
+
1786
+ return BUDDY_TREE_RELEASE_SUCCESS;
1787
+ }
1788
+
1789
+ static void update_parent_chain(struct buddy_tree *t, struct buddy_tree_pos pos,
1790
+ struct internal_position pos_internal, size_t size_current) {
1791
+ size_t size_sibling, size_parent, target_parent;
1792
+ unsigned char *bits = buddy_tree_bits(t);
1793
+
1794
+ while (pos.index != 1) {
1795
+ pos_internal.bitset_location += pos_internal.local_offset
1796
+ - (2 * pos_internal.local_offset * (pos.index & 1u));
1797
+ size_sibling = read_from_internal_position(bits, pos_internal);
1798
+
1799
+ pos = buddy_tree_parent(pos);
1800
+ pos_internal = buddy_tree_internal_position_tree(t, pos);
1801
+ size_parent = read_from_internal_position(bits, pos_internal);
1802
+
1803
+ target_parent = (size_current || size_sibling)
1804
+ * ((size_current <= size_sibling ? size_current : size_sibling) + 1);
1805
+ if (target_parent == size_parent) {
1806
+ return;
1807
+ }
1808
+
1809
+ write_to_internal_position(t, pos_internal, target_parent);
1810
+ size_current = target_parent;
1811
+ };
1812
+ }
1813
+
1814
+ static struct buddy_tree_pos buddy_tree_find_free(struct buddy_tree *t, uint8_t target_depth) {
1815
+ struct buddy_tree_pos current_pos, left_pos, right_pos;
1816
+ uint8_t target_status;
1817
+ size_t current_depth, right_status;
1818
+ struct internal_position left_internal, right_internal;
1819
+ unsigned char *tree_bits;
1820
+
1821
+ current_pos = buddy_tree_root();
1822
+ target_status = target_depth - 1;
1823
+ current_depth = buddy_tree_depth(current_pos);
1824
+ if (buddy_tree_status(t, current_pos) > target_status) {
1825
+ return INVALID_POS; /* No position available down the tree */
1826
+ }
1827
+ tree_bits = buddy_tree_bits(t);
1828
+ while (current_depth != target_depth) {
1829
+ /* Advance criteria */
1830
+ target_status -= 1;
1831
+ current_depth += 1;
1832
+
1833
+ left_pos = buddy_tree_left_child(current_pos);
1834
+ right_pos = buddy_tree_sibling(left_pos);
1835
+
1836
+ left_internal = buddy_tree_internal_position_tree(t, left_pos);
1837
+
1838
+ right_internal = left_internal;
1839
+ right_internal.bitset_location += right_internal.local_offset; /* advance to the right */
1840
+
1841
+ if (compare_with_internal_position(tree_bits, left_internal, target_status+1)) { /* left branch is busy, pick right */
1842
+ current_pos = right_pos;
1843
+ } else if (compare_with_internal_position(tree_bits, right_internal, target_status+1)) { /* right branch is busy, pick left */
1844
+ current_pos = left_pos;
1845
+ } else {
1846
+ /* One of the child nodes must be read in order to compare it to its sibling. */
1847
+ right_status = read_from_internal_position(tree_bits, right_internal);
1848
+ if (right_status) {
1849
+ if (compare_with_internal_position(tree_bits, left_internal, right_status)) {
1850
+ current_pos = left_pos; /* Left is equal or more busy than right, prefer left */
1851
+ } else {
1852
+ current_pos = right_pos;
1853
+ }
1854
+ } else { /* Right is empty, prefer left */
1855
+ current_pos = left_pos;
1856
+ }
1857
+ }
1858
+ }
1859
+ return current_pos;
1860
+ }
1861
+
1862
+ static bool buddy_tree_is_free(struct buddy_tree *t, struct buddy_tree_pos pos) {
1863
+ if (buddy_tree_status(t, pos)) {
1864
+ return false;
1865
+ }
1866
+ pos = buddy_tree_parent(pos);
1867
+ while(buddy_tree_valid(t, pos)) {
1868
+ struct internal_position internal = buddy_tree_internal_position_tree(t, pos);
1869
+ size_t value = read_from_internal_position(buddy_tree_bits(t), internal);
1870
+ if (value) {
1871
+ return value != internal.local_offset;
1872
+ }
1873
+ pos = buddy_tree_parent(pos);
1874
+ }
1875
+ return true;
1876
+ }
1877
+
1878
+ static bool buddy_tree_can_shrink(struct buddy_tree *t) {
1879
+ struct internal_position root_internal;
1880
+ size_t root_value;
1881
+
1882
+ if (buddy_tree_status(t, buddy_tree_right_child(buddy_tree_root())) != 0) {
1883
+ return false; /* Refusing to shrink with right subtree still used! */
1884
+ }
1885
+ root_internal = buddy_tree_internal_position_tree(t, buddy_tree_root());
1886
+ root_value = read_from_internal_position(buddy_tree_bits(t), root_internal);
1887
+ if (root_value == root_internal.local_offset) {
1888
+ return false; /* Refusing to shrink with the root fully-allocated! */
1889
+ }
1890
+ return true;
1891
+ }
1892
+
1893
+ #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
1894
+ static struct buddy* buddy_tree_buddy(struct buddy_tree* t) {
1895
+ return (struct buddy*)(((unsigned char*)t) - sizeof(struct buddy));
1896
+ }
1897
+ #endif /* BUDDY_EXPERIMENTAL_CHANGE_TRACKING */
1898
+
1899
+ static void buddy_tree_debug(struct buddy_tree *t, struct buddy_tree_pos pos,
1900
+ size_t start_size) {
1901
+ struct buddy_tree_walk_state state = buddy_tree_walk_state_root();
1902
+ state.current_pos = pos;
1903
+ do {
1904
+ struct internal_position pos_internal = buddy_tree_internal_position_tree(t, state.current_pos);
1905
+ size_t pos_status = read_from_internal_position(buddy_tree_bits(t), pos_internal);
1906
+ size_t pos_size = start_size >> ((buddy_tree_depth(state.current_pos) - 1u) % ((sizeof(size_t) * CHAR_BIT)-1));
1907
+ BUDDY_PRINTF("%.*s",
1908
+ (int) buddy_tree_depth(state.current_pos),
1909
+ " ");
1910
+ BUDDY_PRINTF("pos index: %zu pos depth: %zu status: %zu bitset-len: %zu bitset-at: %zu",
1911
+ state.current_pos.index, state.current_pos.depth, pos_status,
1912
+ pos_internal.local_offset, pos_internal.bitset_location);
1913
+ if (pos_status == pos_internal.local_offset) {
1914
+ BUDDY_PRINTF(" size: %zu", pos_size);
1915
+ }
1916
+ BUDDY_PRINTF("\n");
1917
+ } while (buddy_tree_walk(t, &state));
1918
+ }
1919
+
1920
+ unsigned int buddy_tree_check_invariant(struct buddy_tree *t, struct buddy_tree_pos pos) {
1921
+ unsigned int fail = 0;
1922
+ struct buddy_tree_walk_state state = buddy_tree_walk_state_root();
1923
+ state.current_pos = pos;
1924
+ do {
1925
+ struct internal_position current_internal = buddy_tree_internal_position_tree(t, pos);
1926
+ size_t current_status = read_from_internal_position(buddy_tree_bits(t), current_internal);
1927
+ size_t left_child_status = buddy_tree_status(t, buddy_tree_left_child(pos));
1928
+ size_t right_child_status = buddy_tree_status(t, buddy_tree_right_child(pos));
1929
+ unsigned int violated = 0;
1930
+
1931
+ if (left_child_status || right_child_status) {
1932
+ size_t min = left_child_status <= right_child_status
1933
+ ? left_child_status : right_child_status;
1934
+ if (current_status != (min + 1)) {
1935
+ violated = 1;
1936
+ }
1937
+ } else {
1938
+ if ((current_status > 0) && (current_status < current_internal.local_offset)) {
1939
+ violated = 1;
1940
+ }
1941
+ }
1942
+
1943
+ if (violated) {
1944
+ fail = 1;
1945
+ BUDDY_PRINTF("invariant violation at position [ index: %zu depth: %zu ]!\n", pos.index, pos.depth);
1946
+ BUDDY_PRINTF("current: %zu left %zu right %zu max %zu\n",
1947
+ current_status, left_child_status, right_child_status, current_internal.local_offset);
1948
+ }
1949
+
1950
+ } while (buddy_tree_walk(t, &state));
1951
+ return fail;
1952
+ }
1953
+
1954
+ /*
1955
+ * Calculate tree fragmentation based on free slots.
1956
+ * Based on https://asawicki.info/news_1757_a_metric_for_memory_fragmentation
1957
+ */
1958
+ static unsigned char buddy_tree_fragmentation(struct buddy_tree *t) {
1959
+ const unsigned char fractional_bits = 8;
1960
+ const unsigned char fractional_mask = 255;
1961
+
1962
+ uint8_t tree_order;
1963
+ size_t root_status, quality, total_free_size, virtual_size, quality_percent;
1964
+ struct buddy_tree_walk_state state;
1965
+
1966
+ tree_order = buddy_tree_order(t);
1967
+ root_status = buddy_tree_status(t, buddy_tree_root());
1968
+ if (root_status == 0) { /* Emptry tree */
1969
+ return 0;
1970
+ }
1971
+
1972
+ quality = 0;
1973
+ total_free_size = 0;
1974
+
1975
+ state = buddy_tree_walk_state_root();
1976
+ do {
1977
+ size_t pos_status = buddy_tree_status(t, state.current_pos);
1978
+ if (pos_status == 0) {
1979
+ /* Empty node, process */
1980
+ virtual_size = two_to_the_power_of((tree_order - state.current_pos.depth) % ((sizeof(size_t) * CHAR_BIT)-1));
1981
+ quality += (virtual_size * virtual_size);
1982
+ total_free_size += virtual_size;
1983
+ /* Ascend */
1984
+ state.going_up = 1;
1985
+ } else if (pos_status == (tree_order - state.current_pos.depth + 1)) {
1986
+ /* Busy node, ascend */
1987
+ state.going_up = 1;
1988
+ }
1989
+ } while (buddy_tree_walk(t, &state));
1990
+
1991
+ if (total_free_size == 0) { /* Fully-allocated tree */
1992
+ return 0;
1993
+ }
1994
+
1995
+ quality_percent = (integer_square_root(quality) << fractional_bits) / total_free_size;
1996
+ quality_percent *= quality_percent;
1997
+ quality_percent >>= fractional_bits;
1998
+ return fractional_mask - (quality_percent & fractional_mask);
1999
+ }
2000
+
2001
+ #ifdef BUDDY_EXPERIMENTAL_CHANGE_TRACKING
2002
+ static inline void buddy_tree_track_change(struct buddy_tree* t, unsigned char* addr, size_t length) {
2003
+ struct buddy_change_tracker *header;
2004
+
2005
+ if (!(t->flags && BUDDY_TREE_CHANGE_TRACKING)) {
2006
+ return;
2007
+ }
2008
+
2009
+ header = (struct buddy_change_tracker *) buddy_main(buddy_tree_buddy(t));
2010
+ header->tracker(header->context, addr, length);
2011
+ }
2012
+ #endif /* BUDDY_EXPERIMENTAL_CHANGE_TRACKING */
2013
+
2014
+ /*
2015
+ * A char-backed bitset implementation
2016
+ */
2017
+
2018
+ size_t bitset_sizeof(size_t elements) {
2019
+ return ((elements) + CHAR_BIT - 1u) / CHAR_BIT;
2020
+ }
2021
+
2022
+ static uint8_t bitset_index_mask[8] = {1, 2, 4, 8, 16, 32, 64, 128};
2023
+
2024
+ static inline void bitset_set(unsigned char *bitset, size_t pos) {
2025
+ size_t bucket = pos / CHAR_BIT;
2026
+ size_t index = pos % CHAR_BIT;
2027
+ bitset[bucket] |= bitset_index_mask[index];
2028
+ }
2029
+
2030
+ static inline void bitset_clear(unsigned char *bitset, size_t pos) {
2031
+ size_t bucket = pos / CHAR_BIT;
2032
+ size_t index = pos % CHAR_BIT;
2033
+ bitset[bucket] &= ~bitset_index_mask[index];
2034
+ }
2035
+
2036
+ static inline bool bitset_test(const unsigned char *bitset, size_t pos) {
2037
+ size_t bucket = pos / CHAR_BIT;
2038
+ size_t index = pos % CHAR_BIT;
2039
+ return bitset[bucket] & bitset_index_mask[index];
2040
+ }
2041
+
2042
+ static const uint8_t bitset_char_mask[8][8] = {
2043
+ {1, 3, 7, 15, 31, 63, 127, 255},
2044
+ {0, 2, 6, 14, 30, 62, 126, 254},
2045
+ {0, 0, 4, 12, 28, 60, 124, 252},
2046
+ {0, 0, 0, 8, 24, 56, 120, 248},
2047
+ {0, 0, 0, 0, 16, 48, 112, 240},
2048
+ {0, 0, 0, 0, 0, 32, 96, 224},
2049
+ {0, 0, 0, 0, 0, 0, 64, 192},
2050
+ {0, 0, 0, 0, 0, 0, 0, 128},
2051
+ };
2052
+
2053
+ static inline struct bitset_range to_bitset_range(size_t from_pos, size_t to_pos) {
2054
+ struct bitset_range range = {0};
2055
+ range.from_bucket = from_pos / CHAR_BIT;
2056
+ range.to_bucket = to_pos / CHAR_BIT;
2057
+
2058
+ range.from_index = from_pos % CHAR_BIT;
2059
+ range.to_index = to_pos % CHAR_BIT;
2060
+ return range;
2061
+ }
2062
+
2063
+ static void bitset_set_range(unsigned char *bitset, struct bitset_range range) {
2064
+ if (range.from_bucket == range.to_bucket) {
2065
+ bitset[range.from_bucket] |= bitset_char_mask[range.from_index][range.to_index];
2066
+ } else {
2067
+ bitset[range.from_bucket] |= bitset_char_mask[range.from_index][7];
2068
+ bitset[range.to_bucket] |= bitset_char_mask[0][range.to_index];
2069
+
2070
+ range.from_bucket++;
2071
+
2072
+ if (range.to_bucket - range.from_bucket) {
2073
+ memset(bitset + range.from_bucket, 255u, range.to_bucket - range.from_bucket);
2074
+ }
2075
+ }
2076
+ }
2077
+
2078
+ static void bitset_clear_range(unsigned char* bitset, struct bitset_range range) {
2079
+ if (range.from_bucket == range.to_bucket) {
2080
+ bitset[range.from_bucket] &= ~bitset_char_mask[range.from_index][range.to_index];
2081
+ }
2082
+ else {
2083
+ bitset[range.from_bucket] &= ~bitset_char_mask[range.from_index][7];
2084
+ bitset[range.to_bucket] &= ~bitset_char_mask[0][range.to_index];
2085
+
2086
+ range.from_bucket++;
2087
+
2088
+ if (range.to_bucket - range.from_bucket) {
2089
+ memset(bitset + range.from_bucket, 0, range.to_bucket - range.from_bucket);
2090
+ }
2091
+ }
2092
+ }
2093
+
2094
+ static size_t bitset_count_range(unsigned char *bitset, struct bitset_range range) {
2095
+ size_t result;
2096
+
2097
+ if (range.from_bucket == range.to_bucket) {
2098
+ return popcount_byte(bitset[range.from_bucket] & bitset_char_mask[range.from_index][range.to_index]);
2099
+ }
2100
+
2101
+ result = popcount_byte(bitset[range.from_bucket] & bitset_char_mask[range.from_index][7])
2102
+ + popcount_byte(bitset[range.to_bucket] & bitset_char_mask[0][range.to_index]);
2103
+ while(++range.from_bucket != range.to_bucket) {
2104
+ result += popcount_byte(bitset[range.from_bucket]);
2105
+ }
2106
+ return result;
2107
+ }
2108
+
2109
+ static void bitset_shift_left(unsigned char *bitset, size_t from_pos, size_t to_pos, size_t by) {
2110
+ size_t length = to_pos - from_pos;
2111
+ for(size_t i = 0; i < length; i++) {
2112
+ size_t at = from_pos + i;
2113
+ if (bitset_test(bitset, at)) {
2114
+ bitset_set(bitset, at-by);
2115
+ } else {
2116
+ bitset_clear(bitset, at-by);
2117
+ }
2118
+ bitset_clear(bitset, at);
2119
+ }
2120
+ }
2121
+
2122
+ static void bitset_shift_right(unsigned char *bitset, size_t from_pos, size_t to_pos, size_t by) {
2123
+ ssize_t length = (ssize_t) to_pos - (ssize_t) from_pos;
2124
+ while (length >= 0) {
2125
+ size_t at = from_pos + (size_t) length;
2126
+ if (bitset_test(bitset, at)) {
2127
+ bitset_set(bitset, at+by);
2128
+ } else {
2129
+ bitset_clear(bitset, at+by);
2130
+ }
2131
+ bitset_clear(bitset, at);
2132
+ length -= 1;
2133
+ }
2134
+ }
2135
+
2136
+ void bitset_debug(unsigned char *bitset, size_t length) {
2137
+ for (size_t i = 0; i < length; i++) {
2138
+ BUDDY_PRINTF("%zu: %d\n", i, bitset_test(bitset, i) > 0);
2139
+ }
2140
+ }
2141
+
2142
+ /*
2143
+ Bits
2144
+ */
2145
+
2146
+ static inline unsigned int popcount_byte(unsigned char b) {
2147
+ static const unsigned char popcount_lookup[256] = {
2148
+ 0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,
2149
+ 1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,
2150
+ 1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,
2151
+ 2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,
2152
+ 1,2,2,3,2,3,3,4,2,3,3,4,3,4,4,5,2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,
2153
+ 2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,
2154
+ 2,3,3,4,3,4,4,5,3,4,4,5,4,5,5,6,3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,
2155
+ 3,4,4,5,4,5,5,6,4,5,5,6,5,6,6,7,4,5,5,6,5,6,6,7,5,6,6,7,6,7,7,8
2156
+ };
2157
+ return popcount_lookup[b];
2158
+ }
2159
+
2160
+ static unsigned char count_trailing_zeroes(size_t val) {
2161
+ /* Implementation from https://www.chessprogramming.org/BitScan */
2162
+ static const signed char lookup67[67+1] = {
2163
+ 64, 0, 1, 39, 2, 15, 40, 23,
2164
+ 3, 12, 16, 59, 41, 19, 24, 54,
2165
+ 4, -1, 13, 10, 17, 62, 60, 28,
2166
+ 42, 30, 20, 51, 25, 44, 55, 47,
2167
+ 5, 32, -1, 38, 14, 22, 11, 58,
2168
+ 18, 53, 63, 9, 61, 27, 29, 50,
2169
+ 43, 46, 31, 37, 21, 57, 52, 8,
2170
+ 26, 49, 45, 36, 56, 7, 48, 35,
2171
+ 6, 34, 33, -1 };
2172
+ return ((unsigned char) lookup67[(val & -val) % 67]);
2173
+ }
2174
+
2175
+ /* Returns the highest set bit position for the given value. Returns zero for zero. */
2176
+ static size_t highest_bit_position(size_t value) {
2177
+ size_t result = 0;
2178
+ /* some other millennia when size_t becomes 128-bit this will break :) */
2179
+ #if SIZE_MAX == 0xFFFFFFFFFFFFFFFF
2180
+ const size_t all_set[] = {4294967295, 65535, 255, 15, 7, 3, 1};
2181
+ const size_t count[] = {32, 16, 8, 4, 2, 1, 1};
2182
+ #elif SIZE_MAX == 0xFFFFFFFF
2183
+ const size_t all_set[] = {65535, 255, 15, 7, 3, 1};
2184
+ const size_t count[] = {16, 8, 4, 2, 1, 1};
2185
+ #else
2186
+ #error Unsupported platform
2187
+ #endif
2188
+
2189
+ for (size_t i = 0; i < (sizeof all_set / sizeof *all_set); i++) {
2190
+ if (value >= all_set[i]) {
2191
+ value >>= count[i];
2192
+ result += count[i];
2193
+ }
2194
+ }
2195
+ return result + value;
2196
+ }
2197
+
2198
+ static inline size_t ceiling_power_of_two(size_t value) {
2199
+ value += !value; /* branchless x -> { 1 for 0, x for x } */
2200
+ return two_to_the_power_of(highest_bit_position(value + value - 1)-1);
2201
+ }
2202
+
2203
+ static inline size_t two_to_the_power_of(size_t order) {
2204
+ return ((size_t)1) << order;
2205
+ }
2206
+
2207
+ static inline size_t integer_square_root(size_t op) {
2208
+ /* by Martin Guy, 1985 - http://medialab.freaknet.org/martin/src/sqrt/ */
2209
+ size_t result = 0;
2210
+ size_t cursor = (SIZE_MAX - (SIZE_MAX >> 1)) >> 1; /* second-to-top bit set */
2211
+ while (cursor > op) {
2212
+ cursor >>= 2;
2213
+ }
2214
+ /* "cursor" starts at the highest power of four <= than the argument. */
2215
+ while (cursor != 0) {
2216
+ if (op >= result + cursor) {
2217
+ op -= result + cursor;
2218
+ result += 2 * cursor;
2219
+ }
2220
+ result >>= 1;
2221
+ cursor >>= 2;
2222
+ }
2223
+ return result;
2224
+ }
2225
+
2226
+ #ifdef __cplusplus
2227
+ #ifndef BUDDY_CPP_MANGLED
2228
+ }
2229
+ #endif
2230
+ #endif
2231
+
2232
+ #endif /* BUDDY_ALLOC_IMPLEMENTATION */