tigerbeetle-node 0.9.0 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (112) hide show
  1. package/README.md +305 -103
  2. package/dist/index.d.ts +70 -67
  3. package/dist/index.js +70 -67
  4. package/dist/index.js.map +1 -1
  5. package/package.json +6 -6
  6. package/scripts/download_node_headers.sh +14 -7
  7. package/src/index.ts +11 -10
  8. package/src/node.zig +22 -20
  9. package/src/tigerbeetle/scripts/benchmark.bat +4 -3
  10. package/src/tigerbeetle/scripts/benchmark.sh +25 -10
  11. package/src/tigerbeetle/scripts/confirm_image.sh +44 -0
  12. package/src/tigerbeetle/scripts/fuzz_loop.sh +15 -0
  13. package/src/tigerbeetle/scripts/fuzz_unique_errors.sh +7 -0
  14. package/src/tigerbeetle/scripts/install.sh +20 -4
  15. package/src/tigerbeetle/scripts/install_zig.bat +5 -1
  16. package/src/tigerbeetle/scripts/install_zig.sh +32 -26
  17. package/src/tigerbeetle/scripts/pre-commit.sh +9 -0
  18. package/src/tigerbeetle/scripts/shellcheck.sh +5 -0
  19. package/src/tigerbeetle/scripts/tests_on_alpine.sh +10 -0
  20. package/src/tigerbeetle/scripts/tests_on_ubuntu.sh +14 -0
  21. package/src/tigerbeetle/scripts/upgrade_ubuntu_kernel.sh +12 -3
  22. package/src/tigerbeetle/src/benchmark.zig +19 -9
  23. package/src/tigerbeetle/src/benchmark_array_search.zig +317 -0
  24. package/src/tigerbeetle/src/benchmarks/perf.zig +299 -0
  25. package/src/tigerbeetle/src/c/tb_client/context.zig +103 -0
  26. package/src/tigerbeetle/src/c/tb_client/packet.zig +80 -0
  27. package/src/tigerbeetle/src/c/tb_client/signal.zig +288 -0
  28. package/src/tigerbeetle/src/c/tb_client/thread.zig +328 -0
  29. package/src/tigerbeetle/src/c/tb_client.h +221 -0
  30. package/src/tigerbeetle/src/c/tb_client.zig +104 -0
  31. package/src/tigerbeetle/src/c/test.zig +1 -0
  32. package/src/tigerbeetle/src/cli.zig +143 -84
  33. package/src/tigerbeetle/src/config.zig +161 -20
  34. package/src/tigerbeetle/src/demo.zig +14 -8
  35. package/src/tigerbeetle/src/demo_05_post_pending_transfers.zig +2 -2
  36. package/src/tigerbeetle/src/ewah.zig +318 -0
  37. package/src/tigerbeetle/src/ewah_benchmark.zig +121 -0
  38. package/src/tigerbeetle/src/eytzinger_benchmark.zig +317 -0
  39. package/src/tigerbeetle/src/fifo.zig +17 -1
  40. package/src/tigerbeetle/src/io/darwin.zig +12 -10
  41. package/src/tigerbeetle/src/io/linux.zig +25 -9
  42. package/src/tigerbeetle/src/io/windows.zig +13 -9
  43. package/src/tigerbeetle/src/iops.zig +101 -0
  44. package/src/tigerbeetle/src/lsm/README.md +214 -0
  45. package/src/tigerbeetle/src/lsm/binary_search.zig +341 -0
  46. package/src/tigerbeetle/src/lsm/bloom_filter.zig +125 -0
  47. package/src/tigerbeetle/src/lsm/compaction.zig +557 -0
  48. package/src/tigerbeetle/src/lsm/composite_key.zig +77 -0
  49. package/src/tigerbeetle/src/lsm/direction.zig +11 -0
  50. package/src/tigerbeetle/src/lsm/eytzinger.zig +587 -0
  51. package/src/tigerbeetle/src/lsm/forest.zig +204 -0
  52. package/src/tigerbeetle/src/lsm/forest_fuzz.zig +412 -0
  53. package/src/tigerbeetle/src/lsm/grid.zig +549 -0
  54. package/src/tigerbeetle/src/lsm/groove.zig +1002 -0
  55. package/src/tigerbeetle/src/lsm/k_way_merge.zig +474 -0
  56. package/src/tigerbeetle/src/lsm/level_iterator.zig +315 -0
  57. package/src/tigerbeetle/src/lsm/manifest.zig +580 -0
  58. package/src/tigerbeetle/src/lsm/manifest_level.zig +925 -0
  59. package/src/tigerbeetle/src/lsm/manifest_log.zig +953 -0
  60. package/src/tigerbeetle/src/lsm/node_pool.zig +231 -0
  61. package/src/tigerbeetle/src/lsm/posted_groove.zig +387 -0
  62. package/src/tigerbeetle/src/lsm/segmented_array.zig +1318 -0
  63. package/src/tigerbeetle/src/lsm/segmented_array_benchmark.zig +148 -0
  64. package/src/tigerbeetle/src/lsm/segmented_array_fuzz.zig +9 -0
  65. package/src/tigerbeetle/src/lsm/set_associative_cache.zig +894 -0
  66. package/src/tigerbeetle/src/lsm/table.zig +967 -0
  67. package/src/tigerbeetle/src/lsm/table_immutable.zig +203 -0
  68. package/src/tigerbeetle/src/lsm/table_iterator.zig +306 -0
  69. package/src/tigerbeetle/src/lsm/table_mutable.zig +174 -0
  70. package/src/tigerbeetle/src/lsm/test.zig +423 -0
  71. package/src/tigerbeetle/src/lsm/tree.zig +1090 -0
  72. package/src/tigerbeetle/src/lsm/tree_fuzz.zig +457 -0
  73. package/src/tigerbeetle/src/main.zig +141 -109
  74. package/src/tigerbeetle/src/message_bus.zig +49 -48
  75. package/src/tigerbeetle/src/message_pool.zig +22 -12
  76. package/src/tigerbeetle/src/ring_buffer.zig +126 -30
  77. package/src/tigerbeetle/src/simulator.zig +205 -140
  78. package/src/tigerbeetle/src/state_machine.zig +1268 -721
  79. package/src/tigerbeetle/src/static_allocator.zig +65 -0
  80. package/src/tigerbeetle/src/storage.zig +40 -14
  81. package/src/tigerbeetle/src/test/accounting/auditor.zig +577 -0
  82. package/src/tigerbeetle/src/test/accounting/workload.zig +819 -0
  83. package/src/tigerbeetle/src/test/cluster.zig +104 -88
  84. package/src/tigerbeetle/src/test/conductor.zig +365 -0
  85. package/src/tigerbeetle/src/test/fuzz.zig +121 -0
  86. package/src/tigerbeetle/src/test/id.zig +89 -0
  87. package/src/tigerbeetle/src/test/message_bus.zig +15 -24
  88. package/src/tigerbeetle/src/test/network.zig +26 -17
  89. package/src/tigerbeetle/src/test/priority_queue.zig +645 -0
  90. package/src/tigerbeetle/src/test/state_checker.zig +94 -68
  91. package/src/tigerbeetle/src/test/state_machine.zig +135 -69
  92. package/src/tigerbeetle/src/test/storage.zig +78 -28
  93. package/src/tigerbeetle/src/tigerbeetle.zig +19 -16
  94. package/src/tigerbeetle/src/unit_tests.zig +15 -0
  95. package/src/tigerbeetle/src/util.zig +51 -0
  96. package/src/tigerbeetle/src/vopr.zig +494 -0
  97. package/src/tigerbeetle/src/vopr_hub/README.md +58 -0
  98. package/src/tigerbeetle/src/vopr_hub/SETUP.md +199 -0
  99. package/src/tigerbeetle/src/vopr_hub/go.mod +3 -0
  100. package/src/tigerbeetle/src/vopr_hub/main.go +1022 -0
  101. package/src/tigerbeetle/src/vopr_hub/scheduler/go.mod +3 -0
  102. package/src/tigerbeetle/src/vopr_hub/scheduler/main.go +403 -0
  103. package/src/tigerbeetle/src/vsr/client.zig +34 -7
  104. package/src/tigerbeetle/src/vsr/journal.zig +164 -174
  105. package/src/tigerbeetle/src/vsr/replica.zig +1602 -651
  106. package/src/tigerbeetle/src/vsr/superblock.zig +1761 -0
  107. package/src/tigerbeetle/src/vsr/superblock_client_table.zig +255 -0
  108. package/src/tigerbeetle/src/vsr/superblock_free_set.zig +644 -0
  109. package/src/tigerbeetle/src/vsr/superblock_manifest.zig +561 -0
  110. package/src/tigerbeetle/src/vsr.zig +118 -170
  111. package/src/tigerbeetle/scripts/vopr.bat +0 -48
  112. package/src/tigerbeetle/scripts/vopr.sh +0 -33
@@ -0,0 +1,894 @@
1
+ const std = @import("std");
2
+ const builtin = @import("builtin");
3
+
4
+ const assert = std.debug.assert;
5
+ const math = std.math;
6
+ const mem = std.mem;
7
+ const meta = std.meta;
8
+ const Vector = meta.Vector;
9
+
10
+ const config = @import("../config.zig");
11
+ const div_ceil = @import("../util.zig").div_ceil;
12
+ const verify = config.verify;
13
+
14
+ pub const Layout = struct {
15
+ ways: u64 = 16,
16
+ tag_bits: u64 = 8,
17
+ clock_bits: u64 = 2,
18
+ cache_line_size: u64 = 64,
19
+ /// Set this to a non-null value to override the alignment of the stored values.
20
+ value_alignment: ?u29 = null,
21
+ };
22
+
23
+ /// Each Key is associated with a set of n consecutive ways (or slots) that may contain the Value.
24
+ pub fn SetAssociativeCache(
25
+ comptime Key: type,
26
+ comptime Value: type,
27
+ comptime key_from_value: fn (*const Value) callconv(.Inline) Key,
28
+ comptime hash: fn (Key) callconv(.Inline) u64,
29
+ comptime equal: fn (Key, Key) callconv(.Inline) bool,
30
+ comptime layout: Layout,
31
+ ) type {
32
+ assert(math.isPowerOfTwo(@sizeOf(Key)));
33
+ assert(math.isPowerOfTwo(@sizeOf(Value)));
34
+
35
+ switch (layout.ways) {
36
+ // An 8-way set-associative cache has the clock hand as a u3, which would introduce padding.
37
+ 2, 4, 16 => {},
38
+ else => @compileError("ways must be 2, 4 or 16 for optimal CLOCK hand size."),
39
+ }
40
+ switch (layout.tag_bits) {
41
+ 8, 16 => {},
42
+ else => @compileError("tag_bits must be 8 or 16."),
43
+ }
44
+ switch (layout.clock_bits) {
45
+ 1, 2, 4 => {},
46
+ else => @compileError("clock_bits must be 1, 2 or 4."),
47
+ }
48
+
49
+ if (layout.value_alignment) |alignment| {
50
+ assert(alignment >= @alignOf(Value));
51
+ assert(@sizeOf(Value) % alignment == 0);
52
+ }
53
+ const value_alignment = layout.value_alignment orelse @alignOf(Value);
54
+
55
+ assert(math.isPowerOfTwo(layout.ways));
56
+ assert(math.isPowerOfTwo(layout.tag_bits));
57
+ assert(math.isPowerOfTwo(layout.clock_bits));
58
+ assert(math.isPowerOfTwo(layout.cache_line_size));
59
+
60
+ assert(@sizeOf(Key) <= @sizeOf(Value));
61
+ assert(@sizeOf(Key) < layout.cache_line_size);
62
+ assert(layout.cache_line_size % @sizeOf(Key) == 0);
63
+
64
+ if (layout.cache_line_size > @sizeOf(Value)) {
65
+ assert(layout.cache_line_size % @sizeOf(Value) == 0);
66
+ } else {
67
+ assert(@sizeOf(Value) % layout.cache_line_size == 0);
68
+ }
69
+
70
+ const clock_hand_bits = math.log2_int(u64, layout.ways);
71
+ assert(math.isPowerOfTwo(clock_hand_bits));
72
+ assert((1 << clock_hand_bits) == layout.ways);
73
+
74
+ const tags_per_line = @divExact(layout.cache_line_size * 8, layout.ways * layout.tag_bits);
75
+ assert(tags_per_line > 0);
76
+
77
+ const clocks_per_line = @divExact(layout.cache_line_size * 8, layout.ways * layout.clock_bits);
78
+ assert(clocks_per_line > 0);
79
+
80
+ const clock_hands_per_line = @divExact(layout.cache_line_size * 8, clock_hand_bits);
81
+ assert(clock_hands_per_line > 0);
82
+
83
+ return struct {
84
+ const Self = @This();
85
+
86
+ const Tag = meta.Int(.unsigned, layout.tag_bits);
87
+ const Count = meta.Int(.unsigned, layout.clock_bits);
88
+ const Clock = meta.Int(.unsigned, clock_hand_bits);
89
+
90
+ sets: u64,
91
+
92
+ /// A short, partial hash of a Key, corresponding to a Value.
93
+ /// Because the tag is small, collisions are possible:
94
+ /// `tag(v₁) = tag(v₂)` does not imply `v₁ = v₂`.
95
+ /// However, most of the time, where the tag differs, a full key comparison can be avoided.
96
+ /// Since tags are 16-32x smaller than keys, they can also be kept hot in cache.
97
+ tags: []Tag,
98
+
99
+ /// When the corresponding Count is zero, the Value is absent.
100
+ values: []align(value_alignment) Value,
101
+
102
+ /// Each value has a Count, which tracks the number of recent reads.
103
+ ///
104
+ /// * A Count is incremented when the value is accessed by `get`.
105
+ /// * A Count is decremented when a cache write to the value's Set misses.
106
+ /// * The value is evicted when its Count reaches zero.
107
+ ///
108
+ counts: PackedUnsignedIntegerArray(Count),
109
+
110
+ /// Each set has a Clock: a counter that cycles between each of the set's ways (i.e. slots).
111
+ ///
112
+ /// On cache write, entries are checked for occupancy (or eviction) beginning from the
113
+ /// clock's position, wrapping around.
114
+ ///
115
+ /// The algorithm implemented is "CLOCK Nth-Chance" — each way has more than one bit,
116
+ /// to give ways more than one chance before eviction.
117
+ ///
118
+ /// * A similar algorithm called "RRIParoo" is described in
119
+ /// "Kangaroo: Caching Billions of Tiny Objects on Flash".
120
+ /// * For more general information on CLOCK algorithms, see:
121
+ /// https://en.wikipedia.org/wiki/Page_replacement_algorithm.
122
+ clocks: PackedUnsignedIntegerArray(Clock),
123
+
124
+ pub fn init(allocator: mem.Allocator, value_count_max: u64) !Self {
125
+ assert(math.isPowerOfTwo(value_count_max));
126
+ assert(value_count_max > 0);
127
+ assert(value_count_max >= layout.ways);
128
+ assert(value_count_max % layout.ways == 0);
129
+
130
+ const sets = @divExact(value_count_max, layout.ways);
131
+ assert(math.isPowerOfTwo(sets));
132
+
133
+ const values_size_max = value_count_max * @sizeOf(Value);
134
+ assert(values_size_max >= layout.cache_line_size);
135
+ assert(values_size_max % layout.cache_line_size == 0);
136
+
137
+ const counts_size = @divExact(value_count_max * layout.clock_bits, 8);
138
+ assert(counts_size >= layout.cache_line_size);
139
+ assert(counts_size % layout.cache_line_size == 0);
140
+
141
+ const clocks_size = @divExact(sets * clock_hand_bits, 8);
142
+ assert(clocks_size >= layout.cache_line_size);
143
+ assert(clocks_size % layout.cache_line_size == 0);
144
+
145
+ const tags = try allocator.alloc(Tag, value_count_max);
146
+ errdefer allocator.free(tags);
147
+
148
+ const values = try allocator.allocAdvanced(
149
+ Value,
150
+ value_alignment,
151
+ value_count_max,
152
+ .exact,
153
+ );
154
+ errdefer allocator.free(values);
155
+
156
+ const counts = try allocator.alloc(u64, @divExact(counts_size, @sizeOf(u64)));
157
+ errdefer allocator.free(counts);
158
+
159
+ const clocks = try allocator.alloc(u64, @divExact(clocks_size, @sizeOf(u64)));
160
+ errdefer allocator.free(clocks);
161
+
162
+ var self = Self{
163
+ .sets = sets,
164
+ .tags = tags,
165
+ .values = values,
166
+ .counts = .{ .words = counts },
167
+ .clocks = .{ .words = clocks },
168
+ };
169
+
170
+ self.reset();
171
+
172
+ return self;
173
+ }
174
+
175
+ pub fn deinit(self: *Self, allocator: mem.Allocator) void {
176
+ assert(self.sets > 0);
177
+ self.sets = 0;
178
+
179
+ allocator.free(self.tags);
180
+ allocator.free(self.values);
181
+ allocator.free(self.counts.words);
182
+ allocator.free(self.clocks.words);
183
+ }
184
+
185
+ pub fn reset(self: *Self) void {
186
+ mem.set(Tag, self.tags, 0);
187
+ mem.set(u64, self.counts.words, 0);
188
+ mem.set(u64, self.clocks.words, 0);
189
+ }
190
+
191
+ /// Returns whether an entry with the given key is cached,
192
+ /// without modifying the entry's counter.
193
+ pub fn exists(self: *Self, key: Key) bool {
194
+ const set = self.associate(key);
195
+ return self.search(set, key) != null;
196
+ }
197
+
198
+ pub fn get(self: *Self, key: Key) ?*align(value_alignment) Value {
199
+ const set = self.associate(key);
200
+ const way = self.search(set, key) orelse return null;
201
+
202
+ const count = self.counts.get(set.offset + way);
203
+ self.counts.set(set.offset + way, count +| 1);
204
+
205
+ return @alignCast(value_alignment, &set.values[way]);
206
+ }
207
+
208
+ /// Remove a key from the set associative cache if present.
209
+ pub fn remove(self: *Self, key: Key) void {
210
+ const set = self.associate(key);
211
+ const way = self.search(set, key) orelse return;
212
+
213
+ self.counts.set(set.offset + way, 0);
214
+ }
215
+
216
+ /// Hint that the key is less likely to be accessed in the future, without actually removing
217
+ /// it from the cache.
218
+ pub fn demote(self: *Self, key: Key) void {
219
+ const set = self.associate(key);
220
+ const way = self.search(set, key) orelse return;
221
+
222
+ self.counts.set(set.offset + way, 1);
223
+ }
224
+
225
+ /// If the key is present in the set, returns the way. Otherwise returns null.
226
+ inline fn search(self: *const Self, set: Set, key: Key) ?usize {
227
+ const ways = search_tags(set.tags, set.tag);
228
+
229
+ var it = BitIterator(Ways){ .bits = ways };
230
+ while (it.next()) |way| {
231
+ const count = self.counts.get(set.offset + way);
232
+ if (count > 0 and equal(key_from_value(&set.values[way]), key)) {
233
+ return way;
234
+ }
235
+ }
236
+
237
+ return null;
238
+ }
239
+
240
+ /// Where each set bit represents the index of a way that has the same tag.
241
+ const Ways = meta.Int(.unsigned, layout.ways);
242
+
243
+ inline fn search_tags(tags: *const [layout.ways]Tag, tag: Tag) Ways {
244
+ const x: Vector(layout.ways, Tag) = tags.*;
245
+ const y: Vector(layout.ways, Tag) = @splat(layout.ways, tag);
246
+
247
+ const result: Vector(layout.ways, bool) = x == y;
248
+ return @ptrCast(*const Ways, &result).*;
249
+ }
250
+
251
+ pub fn insert(self: *Self, key: Key) *align(value_alignment) Value {
252
+ return self.insert_preserve_locked(
253
+ void,
254
+ struct {
255
+ inline fn locked(_: void, _: *const Value) bool {
256
+ return false;
257
+ }
258
+ }.locked,
259
+ {},
260
+ key,
261
+ );
262
+ }
263
+
264
+ /// Add a key, evicting an older entry if needed, and return a pointer to the value.
265
+ /// The key must not already be in the cache.
266
+ /// Never evicts keys for which locked() returns true.
267
+ /// The caller must guarantee that locked() returns true for less than layout.ways keys.
268
+ pub fn insert_preserve_locked(
269
+ self: *Self,
270
+ comptime Context: type,
271
+ comptime locked: fn (
272
+ Context,
273
+ *align(value_alignment) const Value,
274
+ ) callconv(.Inline) bool,
275
+ context: Context,
276
+ key: Key,
277
+ ) *align(value_alignment) Value {
278
+ const set = self.associate(key);
279
+ if (self.search(set, key)) |way| {
280
+ // Remove the old entry for this key.
281
+ // It should be a different value, but since we are returning a value pointer we
282
+ // can't check against the new one.
283
+ self.counts.set(set.offset + way, 0);
284
+ }
285
+
286
+ const clock_index = @divExact(set.offset, layout.ways);
287
+
288
+ var way = self.clocks.get(clock_index);
289
+ comptime assert(math.maxInt(@TypeOf(way)) == layout.ways - 1);
290
+ comptime assert(@as(@TypeOf(way), math.maxInt(@TypeOf(way))) +% 1 == 0);
291
+
292
+ // The maximum number of iterations happens when every slot in the set has the maximum
293
+ // count. In this case, the loop will iterate until all counts have been decremented
294
+ // to 1. Then in the next iteration it will decrement a count to 0 and break.
295
+ const clock_iterations_max = layout.ways * (math.maxInt(Count) - 1);
296
+
297
+ var safety_count: usize = 0;
298
+ while (safety_count <= clock_iterations_max) : ({
299
+ safety_count += 1;
300
+ way +%= 1;
301
+ }) {
302
+ // We pass a value pointer to the callback here so that a cache miss
303
+ // can be avoided if the caller is able to determine if the value is
304
+ // locked by comparing pointers directly.
305
+ if (locked(context, @alignCast(value_alignment, &set.values[way]))) continue;
306
+
307
+ var count = self.counts.get(set.offset + way);
308
+ if (count == 0) break; // Way is already free.
309
+
310
+ count -= 1;
311
+ self.counts.set(set.offset + way, count);
312
+ if (count == 0) break; // Way has become free.
313
+ } else {
314
+ unreachable;
315
+ }
316
+ assert(self.counts.get(set.offset + way) == 0);
317
+
318
+ set.tags[way] = set.tag;
319
+ self.counts.set(set.offset + way, 1);
320
+ self.clocks.set(clock_index, way +% 1);
321
+
322
+ return @alignCast(value_alignment, &set.values[way]);
323
+ }
324
+
325
+ const Set = struct {
326
+ tag: Tag,
327
+ offset: u64,
328
+ tags: *[layout.ways]Tag,
329
+ values: *[layout.ways]Value,
330
+
331
+ fn inspect(set: Set, sac: Self) void {
332
+ const clock_index = @divExact(set.offset, layout.ways);
333
+ std.debug.print(
334
+ \\{{
335
+ \\ tag={}
336
+ \\ offset={}
337
+ \\ clock_hand={}
338
+ , .{
339
+ set.tag,
340
+ set.offset,
341
+ sac.clocks.get(clock_index),
342
+ });
343
+
344
+ std.debug.print("\n tags={}", .{set.tags[0]});
345
+ for (set.tags[1..]) |tag| std.debug.print(", {}", .{tag});
346
+
347
+ std.debug.print("\n values={}", .{set.values[0]});
348
+ for (set.values[1..]) |value| std.debug.print(", {}", .{value});
349
+
350
+ std.debug.print("\n counts={}", .{sac.counts.get(set.offset)});
351
+ var i: usize = 1;
352
+ while (i < layout.ways) : (i += 1) {
353
+ std.debug.print(", {}", .{sac.counts.get(set.offset + i)});
354
+ }
355
+
356
+ std.debug.print("\n}}\n", .{});
357
+ }
358
+ };
359
+
360
+ inline fn associate(self: *Self, key: Key) Set {
361
+ const entropy = hash(key);
362
+
363
+ const tag = @truncate(Tag, entropy >> math.log2_int(u64, self.sets));
364
+ const index = entropy % self.sets;
365
+ const offset = index * layout.ways;
366
+
367
+ return .{
368
+ .tag = tag,
369
+ .offset = offset,
370
+ .tags = self.tags[offset..][0..layout.ways],
371
+ .values = self.values[offset..][0..layout.ways],
372
+ };
373
+ }
374
+
375
+ pub fn inspect() void {
376
+ std.debug.print("\nKey={} Value={} ways={} tag_bits={} clock_bits={} " ++
377
+ "clock_hand_bits={} tags_per_line={} clocks_per_line={} " ++
378
+ "clock_hands_per_line={}\n", .{
379
+ @bitSizeOf(Key),
380
+ @sizeOf(Value),
381
+ layout.ways,
382
+ layout.tag_bits,
383
+ layout.clock_bits,
384
+ clock_hand_bits,
385
+ tags_per_line,
386
+ clocks_per_line,
387
+ clock_hands_per_line,
388
+ });
389
+ }
390
+ };
391
+ }
392
+
393
+ fn set_associative_cache_test(
394
+ comptime Key: type,
395
+ comptime Value: type,
396
+ comptime context: type,
397
+ comptime layout: Layout,
398
+ ) type {
399
+ const testing = std.testing;
400
+ const expect = testing.expect;
401
+ const expectEqual = testing.expectEqual;
402
+
403
+ const log = false;
404
+
405
+ const SAC = SetAssociativeCache(
406
+ Key,
407
+ Value,
408
+ context.key_from_value,
409
+ context.hash,
410
+ context.equal,
411
+ layout,
412
+ );
413
+
414
+ return struct {
415
+ fn run() !void {
416
+ if (log) SAC.inspect();
417
+
418
+ // TODO Add a nice calculator method to help solve the minimum value_count_max required:
419
+ var sac = try SAC.init(testing.allocator, 16 * 16 * 8);
420
+ defer sac.deinit(testing.allocator);
421
+
422
+ for (sac.tags) |tag| try testing.expectEqual(@as(SAC.Tag, 0), tag);
423
+ for (sac.counts.words) |word| try testing.expectEqual(@as(u64, 0), word);
424
+ for (sac.clocks.words) |word| try testing.expectEqual(@as(u64, 0), word);
425
+
426
+ // Fill up the first set entirely.
427
+ {
428
+ var i: usize = 0;
429
+ while (i < layout.ways) : (i += 1) {
430
+ try expectEqual(i, sac.clocks.get(0));
431
+
432
+ const key = i * sac.sets;
433
+ sac.insert(key).* = key;
434
+ try expect(sac.counts.get(i) == 1);
435
+ try expectEqual(key, sac.get(key).?.*);
436
+ try expect(sac.counts.get(i) == 2);
437
+ }
438
+ try expect(sac.clocks.get(0) == 0);
439
+ }
440
+
441
+ if (log) sac.associate(0).inspect(sac);
442
+
443
+ // Insert another element into the first set, causing key 0 to be evicted.
444
+ {
445
+ const key = layout.ways * sac.sets;
446
+ sac.insert(key).* = key;
447
+ try expect(sac.counts.get(0) == 1);
448
+ try expectEqual(key, sac.get(key).?.*);
449
+ try expect(sac.counts.get(0) == 2);
450
+
451
+ try expectEqual(@as(?*Value, null), sac.get(0));
452
+
453
+ {
454
+ var i: usize = 1;
455
+ while (i < layout.ways) : (i += 1) {
456
+ try expect(sac.counts.get(i) == 1);
457
+ }
458
+ }
459
+ }
460
+
461
+ if (log) sac.associate(0).inspect(sac);
462
+
463
+ // Lock all other slots, causing key layout.ways * sac.sets to be evicted despite having the
464
+ // highest count.
465
+ {
466
+ {
467
+ assert(sac.counts.get(0) == 2);
468
+ var i: usize = 1;
469
+ while (i < layout.ways) : (i += 1) assert(sac.counts.get(i) == 1);
470
+ }
471
+
472
+ const key = (layout.ways + 1) * sac.sets;
473
+ const expect_evicted = layout.ways * sac.sets;
474
+
475
+ sac.insert_preserve_locked(
476
+ u64,
477
+ struct {
478
+ inline fn locked(only_unlocked: u64, value: *const Value) bool {
479
+ return value.* != only_unlocked;
480
+ }
481
+ }.locked,
482
+ expect_evicted,
483
+ key,
484
+ ).* = key;
485
+
486
+ try expectEqual(@as(?*Value, null), sac.get(expect_evicted));
487
+ }
488
+
489
+ if (log) sac.associate(0).inspect(sac);
490
+
491
+ // Ensure removal works.
492
+ {
493
+ const key = 5 * sac.sets;
494
+ assert(sac.get(key).?.* == key);
495
+ try expect(sac.counts.get(5) == 2);
496
+
497
+ sac.remove(key);
498
+ try expectEqual(@as(?*Value, null), sac.get(key));
499
+ try expect(sac.counts.get(5) == 0);
500
+ }
501
+
502
+ sac.reset();
503
+
504
+ for (sac.tags) |tag| try testing.expectEqual(@as(SAC.Tag, 0), tag);
505
+ for (sac.counts.words) |word| try testing.expectEqual(@as(u64, 0), word);
506
+ for (sac.clocks.words) |word| try testing.expectEqual(@as(u64, 0), word);
507
+
508
+ // Fill up the first set entirely, maxing out the count for each slot.
509
+ {
510
+ var i: usize = 0;
511
+ while (i < layout.ways) : (i += 1) {
512
+ try expectEqual(i, sac.clocks.get(0));
513
+
514
+ const key = i * sac.sets;
515
+ sac.insert(key).* = key;
516
+ try expect(sac.counts.get(i) == 1);
517
+ var j: usize = 2;
518
+ while (j <= math.maxInt(SAC.Count)) : (j += 1) {
519
+ try expectEqual(key, sac.get(key).?.*);
520
+ try expect(sac.counts.get(i) == j);
521
+ }
522
+ try expectEqual(key, sac.get(key).?.*);
523
+ try expect(sac.counts.get(i) == math.maxInt(SAC.Count));
524
+ }
525
+ try expect(sac.clocks.get(0) == 0);
526
+ }
527
+
528
+ if (log) sac.associate(0).inspect(sac);
529
+
530
+ // Insert another element into the first set, causing key 0 to be evicted.
531
+ {
532
+ const key = layout.ways * sac.sets;
533
+ sac.insert(key).* = key;
534
+ try expect(sac.counts.get(0) == 1);
535
+ try expectEqual(key, sac.get(key).?.*);
536
+ try expect(sac.counts.get(0) == 2);
537
+
538
+ try expectEqual(@as(?*Value, null), sac.get(0));
539
+
540
+ {
541
+ var i: usize = 1;
542
+ while (i < layout.ways) : (i += 1) {
543
+ try expect(sac.counts.get(i) == 1);
544
+ }
545
+ }
546
+ }
547
+
548
+ if (log) sac.associate(0).inspect(sac);
549
+ }
550
+ };
551
+ }
552
+
553
+ test "SetAssociativeCache: eviction" {
554
+ const Key = u64;
555
+ const Value = u64;
556
+
557
+ const context = struct {
558
+ inline fn key_from_value(value: *const Value) Key {
559
+ return value.*;
560
+ }
561
+ inline fn hash(key: Key) u64 {
562
+ return key;
563
+ }
564
+ inline fn equal(a: Key, b: Key) bool {
565
+ return a == b;
566
+ }
567
+ };
568
+
569
+ try set_associative_cache_test(Key, Value, context, .{}).run();
570
+ }
571
+
572
+ test "SetAssociativeCache: hash collision" {
573
+ const Key = u64;
574
+ const Value = u64;
575
+
576
+ const context = struct {
577
+ inline fn key_from_value(value: *const Value) Key {
578
+ return value.*;
579
+ }
580
+ /// This hash function is intentionally broken to simulate hash collision.
581
+ inline fn hash(key: Key) u64 {
582
+ _ = key;
583
+ return 0;
584
+ }
585
+ inline fn equal(a: Key, b: Key) bool {
586
+ return a == b;
587
+ }
588
+ };
589
+
590
+ try set_associative_cache_test(Key, Value, context, .{}).run();
591
+ }
592
+
593
+ /// A little simpler than PackedIntArray in the std lib, restricted to little endian 64-bit words,
594
+ /// and using words exactly without padding.
595
+ fn PackedUnsignedIntegerArray(comptime UInt: type) type {
596
+ const Word = u64;
597
+
598
+ assert(builtin.target.cpu.arch.endian() == .Little);
599
+ assert(@typeInfo(UInt).Int.signedness == .unsigned);
600
+ assert(@typeInfo(UInt).Int.bits < meta.bitCount(u8));
601
+ assert(math.isPowerOfTwo(@typeInfo(UInt).Int.bits));
602
+
603
+ const word_bits = meta.bitCount(Word);
604
+ const uint_bits = meta.bitCount(UInt);
605
+ const uints_per_word = @divExact(word_bits, uint_bits);
606
+
607
+ // An index bounded by the number of unsigned integers that fit exactly into a word.
608
+ const WordIndex = meta.Int(.unsigned, math.log2_int(u64, uints_per_word));
609
+ assert(math.maxInt(WordIndex) == uints_per_word - 1);
610
+
611
+ // An index bounded by the number of bits (not unsigned integers) that fit exactly into a word.
612
+ const BitsIndex = math.Log2Int(Word);
613
+ assert(math.maxInt(BitsIndex) == meta.bitCount(Word) - 1);
614
+ assert(math.maxInt(BitsIndex) == word_bits - 1);
615
+ assert(math.maxInt(BitsIndex) == uint_bits * (math.maxInt(WordIndex) + 1) - 1);
616
+
617
+ return struct {
618
+ const Self = @This();
619
+
620
+ words: []Word,
621
+
622
+ /// Returns the unsigned integer at `index`.
623
+ pub inline fn get(self: Self, index: u64) UInt {
624
+ // This truncate is safe since we want to mask the right-shifted word by exactly a UInt:
625
+ return @truncate(UInt, self.word(index).* >> bits_index(index));
626
+ }
627
+
628
+ /// Sets the unsigned integer at `index` to `value`.
629
+ pub inline fn set(self: Self, index: u64, value: UInt) void {
630
+ const w = self.word(index);
631
+ w.* &= ~mask(index);
632
+ w.* |= @as(Word, value) << bits_index(index);
633
+ }
634
+
635
+ inline fn mask(index: u64) Word {
636
+ return @as(Word, math.maxInt(UInt)) << bits_index(index);
637
+ }
638
+
639
+ inline fn word(self: Self, index: u64) *Word {
640
+ return &self.words[@divFloor(index, uints_per_word)];
641
+ }
642
+
643
+ inline fn bits_index(index: u64) BitsIndex {
644
+ // If uint_bits=2, then it's normal for the maximum return value value to be 62, even
645
+ // where BitsIndex allows up to 63 (inclusive) for a 64-bit word. This is because 62 is
646
+ // the bit index of the highest 2-bit UInt (e.g. bit index + bit length == 64).
647
+ comptime assert(uint_bits * (math.maxInt(WordIndex) + 1) == math.maxInt(BitsIndex) + 1);
648
+
649
+ return @as(BitsIndex, uint_bits) * @truncate(WordIndex, index);
650
+ }
651
+ };
652
+ }
653
+
654
+ test "PackedUnsignedIntegerArray: unit" {
655
+ const expectEqual = std.testing.expectEqual;
656
+
657
+ var words = [8]u64{ 0, 0b10110010, 0, 0, 0, 0, 0, 0 };
658
+
659
+ var p: PackedUnsignedIntegerArray(u2) = .{
660
+ .words = &words,
661
+ };
662
+
663
+ try expectEqual(@as(u2, 0b10), p.get(32 + 0));
664
+ try expectEqual(@as(u2, 0b00), p.get(32 + 1));
665
+ try expectEqual(@as(u2, 0b11), p.get(32 + 2));
666
+ try expectEqual(@as(u2, 0b10), p.get(32 + 3));
667
+
668
+ p.set(0, 0b01);
669
+ try expectEqual(@as(u64, 0b00000001), words[0]);
670
+ try expectEqual(@as(u2, 0b01), p.get(0));
671
+ p.set(1, 0b10);
672
+ try expectEqual(@as(u64, 0b00001001), words[0]);
673
+ try expectEqual(@as(u2, 0b10), p.get(1));
674
+ p.set(2, 0b11);
675
+ try expectEqual(@as(u64, 0b00111001), words[0]);
676
+ try expectEqual(@as(u2, 0b11), p.get(2));
677
+ p.set(3, 0b11);
678
+ try expectEqual(@as(u64, 0b11111001), words[0]);
679
+ try expectEqual(@as(u2, 0b11), p.get(3));
680
+ p.set(3, 0b01);
681
+ try expectEqual(@as(u64, 0b01111001), words[0]);
682
+ try expectEqual(@as(u2, 0b01), p.get(3));
683
+ p.set(3, 0b00);
684
+ try expectEqual(@as(u64, 0b00111001), words[0]);
685
+ try expectEqual(@as(u2, 0b00), p.get(3));
686
+
687
+ p.set(4, 0b11);
688
+ try expectEqual(
689
+ @as(u64, 0b0000000000000000000000000000000000000000000000000000001100111001),
690
+ words[0],
691
+ );
692
+ p.set(31, 0b11);
693
+ try expectEqual(
694
+ @as(u64, 0b1100000000000000000000000000000000000000000000000000001100111001),
695
+ words[0],
696
+ );
697
+ }
698
+
699
+ fn PackedUnsignedIntegerArrayFuzzTest(comptime UInt: type) type {
700
+ const testing = std.testing;
701
+
702
+ return struct {
703
+ const Self = @This();
704
+
705
+ const Array = PackedUnsignedIntegerArray(UInt);
706
+ random: std.rand.Random,
707
+
708
+ array: Array,
709
+ reference: []UInt,
710
+
711
+ fn init(random: std.rand.Random, len: usize) !Self {
712
+ const words = try testing.allocator.alloc(u64, @divExact(len * @bitSizeOf(UInt), 64));
713
+ errdefer testing.allocator.free(words);
714
+
715
+ const reference = try testing.allocator.alloc(UInt, len);
716
+ errdefer testing.allocator.free(reference);
717
+
718
+ mem.set(u64, words, 0);
719
+ mem.set(UInt, reference, 0);
720
+
721
+ return Self{
722
+ .random = random,
723
+ .array = Array{ .words = words },
724
+ .reference = reference,
725
+ };
726
+ }
727
+
728
+ fn deinit(context: *Self) void {
729
+ testing.allocator.free(context.array.words);
730
+ testing.allocator.free(context.reference);
731
+ }
732
+
733
+ fn run(context: *Self) !void {
734
+ var iterations: usize = 0;
735
+ while (iterations < 10_000) : (iterations += 1) {
736
+ const index = context.random.uintLessThanBiased(usize, context.reference.len);
737
+ const value = context.random.int(UInt);
738
+
739
+ context.array.set(index, value);
740
+ context.reference[index] = value;
741
+
742
+ try context.verify();
743
+ }
744
+ }
745
+
746
+ fn verify(context: *Self) !void {
747
+ for (context.reference) |value, index| {
748
+ try testing.expectEqual(value, context.array.get(index));
749
+ }
750
+ }
751
+ };
752
+ }
753
+
754
+ test "PackedUnsignedIntegerArray: fuzz" {
755
+ const seed = 42;
756
+
757
+ var prng = std.rand.DefaultPrng.init(seed);
758
+ const random = prng.random();
759
+
760
+ inline for (.{ u1, u2, u4 }) |UInt| {
761
+ const Context = PackedUnsignedIntegerArrayFuzzTest(UInt);
762
+
763
+ var context = try Context.init(random, 1024);
764
+ defer context.deinit();
765
+
766
+ try context.run();
767
+ }
768
+ }
769
+
770
+ fn BitIterator(comptime Bits: type) type {
771
+ return struct {
772
+ const Self = @This();
773
+ const BitIndex = math.Log2Int(Bits);
774
+
775
+ bits: Bits,
776
+
777
+ /// Iterates over the bits, consuming them.
778
+ /// Returns the bit index of each set bit until there are no more set bits, then null.
779
+ inline fn next(it: *Self) ?BitIndex {
780
+ if (it.bits == 0) return null;
781
+ // This @intCast() is safe since we never pass 0 to @ctz().
782
+ const index = @intCast(BitIndex, @ctz(Bits, it.bits));
783
+ // Zero the lowest set bit.
784
+ it.bits &= it.bits - 1;
785
+ return index;
786
+ }
787
+ };
788
+ }
789
+
790
+ test "BitIterator" {
791
+ const expectEqual = @import("std").testing.expectEqual;
792
+
793
+ var it = BitIterator(u16){ .bits = 0b1000_0000_0100_0101 };
794
+
795
+ for ([_]u4{ 0, 2, 6, 15 }) |e| {
796
+ try expectEqual(@as(?u4, e), it.next());
797
+ }
798
+ try expectEqual(it.next(), null);
799
+ }
800
+
801
+ fn search_tags_test(comptime Key: type, comptime Value: type, comptime layout: Layout) type {
802
+ const testing = std.testing;
803
+
804
+ const log = false;
805
+
806
+ const context = struct {
807
+ inline fn key_from_value(value: *const Value) Key {
808
+ return value.*;
809
+ }
810
+ inline fn hash(key: Key) u64 {
811
+ return key;
812
+ }
813
+ inline fn equal(a: Key, b: Key) bool {
814
+ return a == b;
815
+ }
816
+ };
817
+
818
+ const SAC = SetAssociativeCache(
819
+ Key,
820
+ Value,
821
+ context.key_from_value,
822
+ context.hash,
823
+ context.equal,
824
+ layout,
825
+ );
826
+
827
+ const reference = struct {
828
+ inline fn search_tags(tags: *[layout.ways]SAC.Tag, tag: SAC.Tag) SAC.Ways {
829
+ var bits: SAC.Ways = 0;
830
+ var count: usize = 0;
831
+ for (tags) |t, i| {
832
+ if (t == tag) {
833
+ const bit = @intCast(math.Log2Int(SAC.Ways), i);
834
+ bits |= (@as(SAC.Ways, 1) << bit);
835
+ count += 1;
836
+ }
837
+ }
838
+ assert(@popCount(SAC.Ways, bits) == count);
839
+ return bits;
840
+ }
841
+ };
842
+
843
+ return struct {
844
+ fn run(random: std.rand.Random) !void {
845
+ if (log) SAC.inspect();
846
+
847
+ var iterations: usize = 0;
848
+ while (iterations < 10_000) : (iterations += 1) {
849
+ var tags: [layout.ways]SAC.Tag = undefined;
850
+ random.bytes(mem.asBytes(&tags));
851
+
852
+ const tag = random.int(SAC.Tag);
853
+
854
+ var indexes: [layout.ways]usize = undefined;
855
+ for (indexes) |*x, i| x.* = i;
856
+ random.shuffle(usize, &indexes);
857
+
858
+ const matches_count_min = random.uintAtMostBiased(u32, layout.ways);
859
+ for (indexes[0..matches_count_min]) |index| {
860
+ tags[index] = tag;
861
+ }
862
+
863
+ const expected = reference.search_tags(&tags, tag);
864
+ const actual = SAC.search_tags(&tags, tag);
865
+ if (log) std.debug.print("expected: {b:0>16}, actual: {b:0>16}\n", .{
866
+ expected,
867
+ actual,
868
+ });
869
+ try testing.expectEqual(expected, actual);
870
+ }
871
+ }
872
+ };
873
+ }
874
+
875
+ test "SetAssociativeCache: search_tags()" {
876
+ const seed = 42;
877
+
878
+ const Key = u64;
879
+ const Value = u64;
880
+
881
+ var prng = std.rand.DefaultPrng.init(seed);
882
+ const random = prng.random();
883
+
884
+ inline for ([_]u64{ 2, 4, 16 }) |ways| {
885
+ inline for ([_]u64{ 8, 16 }) |tag_bits| {
886
+ const case = search_tags_test(Key, Value, .{
887
+ .ways = ways,
888
+ .tag_bits = tag_bits,
889
+ });
890
+
891
+ try case.run(random);
892
+ }
893
+ }
894
+ }