@git-stunts/git-warp 10.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (143) hide show
  1. package/LICENSE +201 -0
  2. package/NOTICE +16 -0
  3. package/README.md +480 -0
  4. package/SECURITY.md +30 -0
  5. package/bin/git-warp +24 -0
  6. package/bin/warp-graph.js +1574 -0
  7. package/index.d.ts +2366 -0
  8. package/index.js +180 -0
  9. package/package.json +129 -0
  10. package/scripts/install-git-warp.sh +258 -0
  11. package/scripts/uninstall-git-warp.sh +139 -0
  12. package/src/domain/WarpGraph.js +3157 -0
  13. package/src/domain/crdt/Dot.js +160 -0
  14. package/src/domain/crdt/LWW.js +154 -0
  15. package/src/domain/crdt/ORSet.js +371 -0
  16. package/src/domain/crdt/VersionVector.js +222 -0
  17. package/src/domain/entities/GraphNode.js +60 -0
  18. package/src/domain/errors/EmptyMessageError.js +47 -0
  19. package/src/domain/errors/ForkError.js +30 -0
  20. package/src/domain/errors/IndexError.js +23 -0
  21. package/src/domain/errors/OperationAbortedError.js +22 -0
  22. package/src/domain/errors/QueryError.js +39 -0
  23. package/src/domain/errors/SchemaUnsupportedError.js +17 -0
  24. package/src/domain/errors/ShardCorruptionError.js +56 -0
  25. package/src/domain/errors/ShardLoadError.js +57 -0
  26. package/src/domain/errors/ShardValidationError.js +61 -0
  27. package/src/domain/errors/StorageError.js +57 -0
  28. package/src/domain/errors/SyncError.js +30 -0
  29. package/src/domain/errors/TraversalError.js +23 -0
  30. package/src/domain/errors/WarpError.js +31 -0
  31. package/src/domain/errors/WormholeError.js +28 -0
  32. package/src/domain/errors/WriterError.js +39 -0
  33. package/src/domain/errors/index.js +21 -0
  34. package/src/domain/services/AnchorMessageCodec.js +99 -0
  35. package/src/domain/services/BitmapIndexBuilder.js +225 -0
  36. package/src/domain/services/BitmapIndexReader.js +435 -0
  37. package/src/domain/services/BoundaryTransitionRecord.js +463 -0
  38. package/src/domain/services/CheckpointMessageCodec.js +147 -0
  39. package/src/domain/services/CheckpointSerializerV5.js +281 -0
  40. package/src/domain/services/CheckpointService.js +384 -0
  41. package/src/domain/services/CommitDagTraversalService.js +156 -0
  42. package/src/domain/services/DagPathFinding.js +712 -0
  43. package/src/domain/services/DagTopology.js +239 -0
  44. package/src/domain/services/DagTraversal.js +245 -0
  45. package/src/domain/services/Frontier.js +108 -0
  46. package/src/domain/services/GCMetrics.js +101 -0
  47. package/src/domain/services/GCPolicy.js +122 -0
  48. package/src/domain/services/GitLogParser.js +205 -0
  49. package/src/domain/services/HealthCheckService.js +246 -0
  50. package/src/domain/services/HookInstaller.js +326 -0
  51. package/src/domain/services/HttpSyncServer.js +262 -0
  52. package/src/domain/services/IndexRebuildService.js +426 -0
  53. package/src/domain/services/IndexStalenessChecker.js +103 -0
  54. package/src/domain/services/JoinReducer.js +582 -0
  55. package/src/domain/services/KeyCodec.js +113 -0
  56. package/src/domain/services/LegacyAnchorDetector.js +67 -0
  57. package/src/domain/services/LogicalTraversal.js +351 -0
  58. package/src/domain/services/MessageCodecInternal.js +132 -0
  59. package/src/domain/services/MessageSchemaDetector.js +145 -0
  60. package/src/domain/services/MigrationService.js +55 -0
  61. package/src/domain/services/ObserverView.js +265 -0
  62. package/src/domain/services/PatchBuilderV2.js +669 -0
  63. package/src/domain/services/PatchMessageCodec.js +140 -0
  64. package/src/domain/services/ProvenanceIndex.js +337 -0
  65. package/src/domain/services/ProvenancePayload.js +242 -0
  66. package/src/domain/services/QueryBuilder.js +835 -0
  67. package/src/domain/services/StateDiff.js +300 -0
  68. package/src/domain/services/StateSerializerV5.js +156 -0
  69. package/src/domain/services/StreamingBitmapIndexBuilder.js +709 -0
  70. package/src/domain/services/SyncProtocol.js +593 -0
  71. package/src/domain/services/TemporalQuery.js +201 -0
  72. package/src/domain/services/TranslationCost.js +221 -0
  73. package/src/domain/services/TraversalService.js +8 -0
  74. package/src/domain/services/WarpMessageCodec.js +29 -0
  75. package/src/domain/services/WarpStateIndexBuilder.js +127 -0
  76. package/src/domain/services/WormholeService.js +353 -0
  77. package/src/domain/types/TickReceipt.js +285 -0
  78. package/src/domain/types/WarpTypes.js +209 -0
  79. package/src/domain/types/WarpTypesV2.js +200 -0
  80. package/src/domain/utils/CachedValue.js +140 -0
  81. package/src/domain/utils/EventId.js +89 -0
  82. package/src/domain/utils/LRUCache.js +112 -0
  83. package/src/domain/utils/MinHeap.js +114 -0
  84. package/src/domain/utils/RefLayout.js +280 -0
  85. package/src/domain/utils/WriterId.js +205 -0
  86. package/src/domain/utils/cancellation.js +33 -0
  87. package/src/domain/utils/canonicalStringify.js +42 -0
  88. package/src/domain/utils/defaultClock.js +20 -0
  89. package/src/domain/utils/defaultCodec.js +51 -0
  90. package/src/domain/utils/nullLogger.js +21 -0
  91. package/src/domain/utils/roaring.js +181 -0
  92. package/src/domain/utils/shardVersion.js +9 -0
  93. package/src/domain/warp/PatchSession.js +217 -0
  94. package/src/domain/warp/Writer.js +181 -0
  95. package/src/hooks/post-merge.sh +60 -0
  96. package/src/infrastructure/adapters/BunHttpAdapter.js +225 -0
  97. package/src/infrastructure/adapters/ClockAdapter.js +57 -0
  98. package/src/infrastructure/adapters/ConsoleLogger.js +150 -0
  99. package/src/infrastructure/adapters/DenoHttpAdapter.js +230 -0
  100. package/src/infrastructure/adapters/GitGraphAdapter.js +787 -0
  101. package/src/infrastructure/adapters/GlobalClockAdapter.js +5 -0
  102. package/src/infrastructure/adapters/NoOpLogger.js +62 -0
  103. package/src/infrastructure/adapters/NodeCryptoAdapter.js +32 -0
  104. package/src/infrastructure/adapters/NodeHttpAdapter.js +98 -0
  105. package/src/infrastructure/adapters/PerformanceClockAdapter.js +5 -0
  106. package/src/infrastructure/adapters/WebCryptoAdapter.js +121 -0
  107. package/src/infrastructure/codecs/CborCodec.js +384 -0
  108. package/src/ports/BlobPort.js +30 -0
  109. package/src/ports/ClockPort.js +25 -0
  110. package/src/ports/CodecPort.js +25 -0
  111. package/src/ports/CommitPort.js +114 -0
  112. package/src/ports/ConfigPort.js +31 -0
  113. package/src/ports/CryptoPort.js +38 -0
  114. package/src/ports/GraphPersistencePort.js +57 -0
  115. package/src/ports/HttpServerPort.js +25 -0
  116. package/src/ports/IndexStoragePort.js +39 -0
  117. package/src/ports/LoggerPort.js +68 -0
  118. package/src/ports/RefPort.js +51 -0
  119. package/src/ports/TreePort.js +51 -0
  120. package/src/visualization/index.js +26 -0
  121. package/src/visualization/layouts/converters.js +75 -0
  122. package/src/visualization/layouts/elkAdapter.js +86 -0
  123. package/src/visualization/layouts/elkLayout.js +95 -0
  124. package/src/visualization/layouts/index.js +29 -0
  125. package/src/visualization/renderers/ascii/box.js +16 -0
  126. package/src/visualization/renderers/ascii/check.js +271 -0
  127. package/src/visualization/renderers/ascii/colors.js +13 -0
  128. package/src/visualization/renderers/ascii/formatters.js +73 -0
  129. package/src/visualization/renderers/ascii/graph.js +344 -0
  130. package/src/visualization/renderers/ascii/history.js +335 -0
  131. package/src/visualization/renderers/ascii/index.js +14 -0
  132. package/src/visualization/renderers/ascii/info.js +245 -0
  133. package/src/visualization/renderers/ascii/materialize.js +255 -0
  134. package/src/visualization/renderers/ascii/path.js +240 -0
  135. package/src/visualization/renderers/ascii/progress.js +32 -0
  136. package/src/visualization/renderers/ascii/symbols.js +33 -0
  137. package/src/visualization/renderers/ascii/table.js +19 -0
  138. package/src/visualization/renderers/browser/index.js +1 -0
  139. package/src/visualization/renderers/svg/index.js +159 -0
  140. package/src/visualization/utils/ansi.js +14 -0
  141. package/src/visualization/utils/time.js +40 -0
  142. package/src/visualization/utils/truncate.js +40 -0
  143. package/src/visualization/utils/unicode.js +52 -0
@@ -0,0 +1,239 @@
1
+ /**
2
+ * Service for DAG topology operations: topological sort and
3
+ * common ancestor finding.
4
+ *
5
+ * Split from CommitDagTraversalService as part of the SRP refactor.
6
+ *
7
+ * @module domain/services/DagTopology
8
+ */
9
+
10
+ import nullLogger from '../utils/nullLogger.js';
11
+ import TraversalError from '../errors/TraversalError.js';
12
+ import { checkAborted } from '../utils/cancellation.js';
13
+
14
+ /**
15
+ * @typedef {'forward' | 'reverse'} TraversalDirection
16
+ */
17
+
18
+ /**
19
+ * Default limits for topology operations.
20
+ * @const
21
+ */
22
+ const DEFAULT_MAX_NODES = 100000;
23
+ const DEFAULT_MAX_DEPTH = 1000;
24
+
25
+ /**
26
+ * Service for DAG topology operations.
27
+ *
28
+ * Provides topological sort (Kahn's algorithm) and common
29
+ * ancestor finding using the index reader for O(1) lookups.
30
+ */
31
+ export default class DagTopology {
32
+ /**
33
+ * Creates a new DagTopology service.
34
+ *
35
+ * @param {Object} options
36
+ * @param {import('./BitmapIndexReader.js').default} options.indexReader - Index reader for O(1) lookups
37
+ * @param {import('../../ports/LoggerPort.js').default} [options.logger] - Logger instance
38
+ * @param {import('./DagTraversal.js').default} [options.traversal] - Traversal service for ancestor enumeration
39
+ */
40
+ constructor({ indexReader, logger = nullLogger, traversal } = {}) {
41
+ if (!indexReader) {
42
+ throw new Error('DagTopology requires an indexReader');
43
+ }
44
+ this._indexReader = indexReader;
45
+ this._logger = logger;
46
+ this._traversal = traversal;
47
+ }
48
+
49
+ /**
50
+ * Gets neighbors for a node based on direction.
51
+ *
52
+ * @param {string} sha - Node SHA to get neighbors for
53
+ * @param {TraversalDirection} direction - 'forward' for children, 'reverse' for parents
54
+ * @returns {Promise<string[]>} Array of neighbor SHAs
55
+ * @private
56
+ */
57
+ async _getNeighbors(sha, direction) {
58
+ if (direction === 'forward') {
59
+ return await this._indexReader.getChildren(sha);
60
+ }
61
+ return await this._indexReader.getParents(sha);
62
+ }
63
+
64
+ /**
65
+ * Finds common ancestors of multiple nodes.
66
+ *
67
+ * An ancestor is "common" if it can be reached by following parent edges
68
+ * from ALL of the input nodes.
69
+ *
70
+ * @param {Object} options - Common ancestor options
71
+ * @param {string[]} options.shas - Array of node SHAs
72
+ * @param {number} [options.maxResults=100] - Maximum ancestors to return
73
+ * @param {number} [options.maxDepth=1000] - Maximum depth to search
74
+ * @param {AbortSignal} [options.signal] - Optional AbortSignal for cancellation
75
+ * @returns {Promise<string[]>} Array of common ancestor SHAs
76
+ */
77
+ async commonAncestors({ shas, maxResults = 100, maxDepth = DEFAULT_MAX_DEPTH, signal }) {
78
+ if (shas.length === 0) { return []; }
79
+ if (shas.length === 1) {
80
+ const ancestors = [];
81
+ for await (const node of this._traversal.ancestors({ sha: shas[0], maxNodes: maxResults, maxDepth, signal })) {
82
+ ancestors.push(node.sha);
83
+ }
84
+ return ancestors;
85
+ }
86
+
87
+ this._logger.debug('commonAncestors started', { shaCount: shas.length, maxDepth });
88
+
89
+ const ancestorCounts = new Map();
90
+ const requiredCount = shas.length;
91
+
92
+ for (const sha of shas) {
93
+ checkAborted(signal, 'commonAncestors');
94
+ const visited = new Set();
95
+ for await (const node of this._traversal.ancestors({ sha, maxDepth, signal })) {
96
+ if (!visited.has(node.sha)) {
97
+ visited.add(node.sha);
98
+ ancestorCounts.set(node.sha, (ancestorCounts.get(node.sha) || 0) + 1);
99
+ }
100
+ }
101
+ }
102
+
103
+ const common = [];
104
+ for (const [ancestor, count] of ancestorCounts) {
105
+ if (count === requiredCount) {
106
+ common.push(ancestor);
107
+ if (common.length >= maxResults) { break; }
108
+ }
109
+ }
110
+
111
+ this._logger.debug('commonAncestors completed', { found: common.length });
112
+ return common;
113
+ }
114
+
115
+ /**
116
+ * Yields nodes in topological order using Kahn's algorithm.
117
+ *
118
+ * Topological order ensures that for every directed edge A -> B, node A
119
+ * is yielded before node B.
120
+ *
121
+ * @param {Object} options - Topological sort options
122
+ * @param {string} options.start - Starting node SHA
123
+ * @param {number} [options.maxNodes=100000] - Maximum nodes to yield
124
+ * @param {TraversalDirection} [options.direction='forward'] - Direction
125
+ * @param {boolean} [options.throwOnCycle=false] - If true, throws on cycle detection
126
+ * @param {AbortSignal} [options.signal] - Optional AbortSignal for cancellation
127
+ * @yields {{sha: string, depth: number, parent: null}} Nodes in topological order
128
+ * @throws {TraversalError} With code 'CYCLE_DETECTED' if throwOnCycle is true
129
+ */
130
+ async *topologicalSort({
131
+ start,
132
+ maxNodes = DEFAULT_MAX_NODES,
133
+ direction = 'forward',
134
+ throwOnCycle = false,
135
+ signal,
136
+ }) {
137
+ this._logger.debug('topologicalSort started', { start, direction, maxNodes });
138
+
139
+ // Phase 1: Discover all reachable nodes and compute in-degrees
140
+ const inDegree = new Map();
141
+ const allNodes = new Set();
142
+ const edges = new Map();
143
+
144
+ const queue = [start];
145
+ allNodes.add(start);
146
+
147
+ while (queue.length > 0) {
148
+ if (allNodes.size % 1000 === 0) {
149
+ checkAborted(signal, 'topologicalSort');
150
+ }
151
+
152
+ const sha = queue.shift();
153
+ const neighbors = await this._getNeighbors(sha, direction);
154
+ edges.set(sha, neighbors);
155
+
156
+ for (const neighbor of neighbors) {
157
+ inDegree.set(neighbor, (inDegree.get(neighbor) || 0) + 1);
158
+ if (!allNodes.has(neighbor)) {
159
+ allNodes.add(neighbor);
160
+ queue.push(neighbor);
161
+ }
162
+ }
163
+ }
164
+
165
+ if (!inDegree.has(start)) {
166
+ inDegree.set(start, 0);
167
+ }
168
+
169
+ // Phase 2: Yield nodes with in-degree 0
170
+ const ready = [];
171
+ for (const sha of allNodes) {
172
+ if (!inDegree.has(sha) || inDegree.get(sha) === 0) {
173
+ ready.push(sha);
174
+ }
175
+ }
176
+
177
+ let nodesYielded = 0;
178
+ const depthMap = new Map([[start, 0]]);
179
+
180
+ while (ready.length > 0 && nodesYielded < maxNodes) {
181
+ if (nodesYielded % 1000 === 0) {
182
+ checkAborted(signal, 'topologicalSort');
183
+ }
184
+
185
+ const sha = ready.shift();
186
+ const depth = depthMap.get(sha) || 0;
187
+
188
+ nodesYielded++;
189
+ yield { sha, depth, parent: null };
190
+
191
+ const neighbors = edges.get(sha) || [];
192
+ for (const neighbor of neighbors) {
193
+ const newDegree = inDegree.get(neighbor) - 1;
194
+ inDegree.set(neighbor, newDegree);
195
+
196
+ if (!depthMap.has(neighbor)) {
197
+ depthMap.set(neighbor, depth + 1);
198
+ }
199
+
200
+ if (newDegree === 0) {
201
+ ready.push(neighbor);
202
+ }
203
+ }
204
+ }
205
+
206
+ // Phase 3: Detect cycles
207
+ const cycleDetected = nodesYielded < allNodes.size;
208
+
209
+ if (cycleDetected) {
210
+ const cycleNodeCount = allNodes.size - nodesYielded;
211
+ this._logger.warn('Cycle detected in topological sort', {
212
+ start,
213
+ direction,
214
+ nodesYielded,
215
+ totalNodes: allNodes.size,
216
+ nodesInCycle: cycleNodeCount,
217
+ });
218
+
219
+ if (throwOnCycle) {
220
+ throw new TraversalError('Cycle detected in graph during topological sort', {
221
+ code: 'CYCLE_DETECTED',
222
+ context: {
223
+ start,
224
+ direction,
225
+ nodesYielded,
226
+ totalNodes: allNodes.size,
227
+ nodesInCycle: cycleNodeCount,
228
+ },
229
+ });
230
+ }
231
+ }
232
+
233
+ this._logger.debug('topologicalSort completed', {
234
+ nodesYielded,
235
+ totalNodes: allNodes.size,
236
+ cycleDetected,
237
+ });
238
+ }
239
+ }
@@ -0,0 +1,245 @@
1
+ /**
2
+ * Service for DAG traversal operations: BFS, DFS, ancestor/descendant
3
+ * enumeration, and reachability checks.
4
+ *
5
+ * Split from CommitDagTraversalService as part of the SRP refactor.
6
+ *
7
+ * @module domain/services/DagTraversal
8
+ */
9
+
10
+ import nullLogger from '../utils/nullLogger.js';
11
+ import { checkAborted } from '../utils/cancellation.js';
12
+
13
+ /**
14
+ * @typedef {'forward' | 'reverse'} TraversalDirection
15
+ */
16
+
17
+ /**
18
+ * @typedef {Object} TraversalNode
19
+ * @property {string} sha - The node's SHA
20
+ * @property {number} depth - Distance from start node
21
+ * @property {string|null} parent - SHA of the node that led to this one
22
+ */
23
+
24
+ /**
25
+ * Default limits for traversal operations.
26
+ * @const
27
+ */
28
+ const DEFAULT_MAX_NODES = 100000;
29
+ const DEFAULT_MAX_DEPTH = 1000;
30
+
31
+ /**
32
+ * Service for DAG traversal operations.
33
+ *
34
+ * Provides BFS, DFS, ancestor/descendant enumeration,
35
+ * and reachability checks using async generators for
36
+ * memory-efficient processing of arbitrarily large graphs.
37
+ */
38
+ export default class DagTraversal {
39
+ /**
40
+ * Creates a new DagTraversal service.
41
+ *
42
+ * @param {Object} options
43
+ * @param {import('./BitmapIndexReader.js').default} options.indexReader - Index reader for O(1) lookups
44
+ * @param {import('../../ports/LoggerPort.js').default} [options.logger] - Logger instance
45
+ */
46
+ constructor({ indexReader, logger = nullLogger } = {}) {
47
+ if (!indexReader) {
48
+ throw new Error('DagTraversal requires an indexReader');
49
+ }
50
+ this._indexReader = indexReader;
51
+ this._logger = logger;
52
+ }
53
+
54
+ /**
55
+ * Gets neighbors for a node based on direction.
56
+ *
57
+ * @param {string} sha - Node SHA to get neighbors for
58
+ * @param {TraversalDirection} direction - 'forward' for children, 'reverse' for parents
59
+ * @returns {Promise<string[]>} Array of neighbor SHAs
60
+ * @private
61
+ */
62
+ async _getNeighbors(sha, direction) {
63
+ if (direction === 'forward') {
64
+ return await this._indexReader.getChildren(sha);
65
+ }
66
+ return await this._indexReader.getParents(sha);
67
+ }
68
+
69
+ /**
70
+ * Breadth-first traversal from a starting node.
71
+ *
72
+ * BFS explores nodes level-by-level, visiting all nodes at depth N before
73
+ * moving to depth N+1. This guarantees that nodes are yielded in order of
74
+ * increasing distance from the start node.
75
+ *
76
+ * @param {Object} options - Traversal options
77
+ * @param {string} options.start - Starting node SHA
78
+ * @param {number} [options.maxNodes=100000] - Maximum nodes to visit
79
+ * @param {number} [options.maxDepth=1000] - Maximum depth to traverse
80
+ * @param {TraversalDirection} [options.direction='forward'] - Traversal direction
81
+ * @param {AbortSignal} [options.signal] - Optional AbortSignal for cancellation
82
+ * @yields {TraversalNode} Nodes in BFS order
83
+ */
84
+ async *bfs({
85
+ start,
86
+ maxNodes = DEFAULT_MAX_NODES,
87
+ maxDepth = DEFAULT_MAX_DEPTH,
88
+ direction = 'forward',
89
+ signal,
90
+ }) {
91
+ const visited = new Set();
92
+ const queue = [{ sha: start, depth: 0, parent: null }];
93
+ let nodesYielded = 0;
94
+
95
+ this._logger.debug('BFS started', { start, direction, maxNodes, maxDepth });
96
+
97
+ while (queue.length > 0 && nodesYielded < maxNodes) {
98
+ if (nodesYielded % 1000 === 0) {
99
+ checkAborted(signal, 'bfs');
100
+ }
101
+
102
+ const current = queue.shift();
103
+
104
+ if (visited.has(current.sha)) { continue; }
105
+ if (current.depth > maxDepth) { continue; }
106
+
107
+ visited.add(current.sha);
108
+ nodesYielded++;
109
+ yield current;
110
+
111
+ if (current.depth < maxDepth) {
112
+ const neighbors = await this._getNeighbors(current.sha, direction);
113
+ for (const neighborSha of neighbors) {
114
+ if (!visited.has(neighborSha)) {
115
+ queue.push({ sha: neighborSha, depth: current.depth + 1, parent: current.sha });
116
+ }
117
+ }
118
+ }
119
+ }
120
+
121
+ this._logger.debug('BFS completed', { nodesVisited: nodesYielded, start, direction });
122
+ }
123
+
124
+ /**
125
+ * Depth-first pre-order traversal from a starting node.
126
+ *
127
+ * DFS explores as far as possible along each branch before backtracking.
128
+ *
129
+ * @param {Object} options - Traversal options
130
+ * @param {string} options.start - Starting node SHA
131
+ * @param {number} [options.maxNodes=100000] - Maximum nodes to visit
132
+ * @param {number} [options.maxDepth=1000] - Maximum depth to traverse
133
+ * @param {TraversalDirection} [options.direction='forward'] - Traversal direction
134
+ * @param {AbortSignal} [options.signal] - Optional AbortSignal for cancellation
135
+ * @yields {TraversalNode} Nodes in DFS pre-order
136
+ */
137
+ async *dfs({
138
+ start,
139
+ maxNodes = DEFAULT_MAX_NODES,
140
+ maxDepth = DEFAULT_MAX_DEPTH,
141
+ direction = 'forward',
142
+ signal,
143
+ }) {
144
+ const visited = new Set();
145
+ const stack = [{ sha: start, depth: 0, parent: null }];
146
+ let nodesYielded = 0;
147
+
148
+ this._logger.debug('DFS started', { start, direction, maxNodes, maxDepth });
149
+
150
+ while (stack.length > 0 && nodesYielded < maxNodes) {
151
+ if (nodesYielded % 1000 === 0) {
152
+ checkAborted(signal, 'dfs');
153
+ }
154
+
155
+ const current = stack.pop();
156
+
157
+ if (visited.has(current.sha)) { continue; }
158
+ if (current.depth > maxDepth) { continue; }
159
+
160
+ visited.add(current.sha);
161
+ nodesYielded++;
162
+ yield current;
163
+
164
+ if (current.depth < maxDepth) {
165
+ const neighbors = await this._getNeighbors(current.sha, direction);
166
+ // Push in reverse order so first neighbor is processed first
167
+ for (let i = neighbors.length - 1; i >= 0; i--) {
168
+ if (!visited.has(neighbors[i])) {
169
+ stack.push({ sha: neighbors[i], depth: current.depth + 1, parent: current.sha });
170
+ }
171
+ }
172
+ }
173
+ }
174
+
175
+ this._logger.debug('DFS completed', { nodesVisited: nodesYielded, start, direction });
176
+ }
177
+
178
+ /**
179
+ * Yields all ancestors of a node (transitive closure going backwards).
180
+ *
181
+ * @param {Object} options - Traversal options
182
+ * @param {string} options.sha - Starting node SHA
183
+ * @param {number} [options.maxNodes=100000] - Maximum ancestor nodes to yield
184
+ * @param {number} [options.maxDepth=1000] - Maximum generations to traverse
185
+ * @param {AbortSignal} [options.signal] - Optional AbortSignal for cancellation
186
+ * @yields {TraversalNode} Ancestor nodes in BFS order
187
+ */
188
+ async *ancestors({ sha, maxNodes = DEFAULT_MAX_NODES, maxDepth = DEFAULT_MAX_DEPTH, signal }) {
189
+ yield* this.bfs({ start: sha, maxNodes, maxDepth, direction: 'reverse', signal });
190
+ }
191
+
192
+ /**
193
+ * Yields all descendants of a node (transitive closure going forwards).
194
+ *
195
+ * @param {Object} options - Traversal options
196
+ * @param {string} options.sha - Starting node SHA
197
+ * @param {number} [options.maxNodes=100000] - Maximum descendant nodes to yield
198
+ * @param {number} [options.maxDepth=1000] - Maximum generations to traverse
199
+ * @param {AbortSignal} [options.signal] - Optional AbortSignal for cancellation
200
+ * @yields {TraversalNode} Descendant nodes in BFS order
201
+ */
202
+ async *descendants({ sha, maxNodes = DEFAULT_MAX_NODES, maxDepth = DEFAULT_MAX_DEPTH, signal }) {
203
+ yield* this.bfs({ start: sha, maxNodes, maxDepth, direction: 'forward', signal });
204
+ }
205
+
206
+ /**
207
+ * Checks if there is any path from one node to another.
208
+ *
209
+ * Delegates to the path-finding service's findPath if one is set,
210
+ * otherwise performs its own BFS-based reachability check.
211
+ *
212
+ * @param {Object} options - Reachability options
213
+ * @param {string} options.from - Source node SHA
214
+ * @param {string} options.to - Target node SHA
215
+ * @param {number} [options.maxDepth=1000] - Maximum search depth
216
+ * @param {AbortSignal} [options.signal] - Optional AbortSignal for cancellation
217
+ * @returns {Promise<boolean>} True if a path exists
218
+ */
219
+ async isReachable({ from, to, maxDepth = DEFAULT_MAX_DEPTH, signal }) {
220
+ if (this._pathFinder) {
221
+ const result = await this._pathFinder.findPath({ from, to, maxDepth, signal });
222
+ return result.found;
223
+ }
224
+ // Fallback: BFS-based reachability
225
+ if (from === to) {
226
+ return true;
227
+ }
228
+ for await (const node of this.bfs({ start: from, maxDepth, direction: 'forward', signal })) {
229
+ if (node.sha === to) {
230
+ return true;
231
+ }
232
+ }
233
+ return false;
234
+ }
235
+
236
+ /**
237
+ * Sets the path-finding service for reachability delegation.
238
+ *
239
+ * @param {import('./DagPathFinding.js').default} pathFinder - Path finding service
240
+ * @internal
241
+ */
242
+ _setPathFinder(pathFinder) {
243
+ this._pathFinder = pathFinder;
244
+ }
245
+ }
@@ -0,0 +1,108 @@
1
+ import defaultCodec from '../utils/defaultCodec.js';
2
+
3
+ /**
4
+ * Frontier: Map of writerId -> lastSeenPatchSha
5
+ * @typedef {Map<string, string>} Frontier
6
+ */
7
+
8
+ /**
9
+ * Creates an empty frontier.
10
+ * @returns {Frontier}
11
+ */
12
+ export function createFrontier() {
13
+ return new Map();
14
+ }
15
+
16
+ /**
17
+ * Updates the frontier with a new patch.
18
+ * Mutates the frontier in place.
19
+ *
20
+ * @param {Frontier} frontier - The frontier to update
21
+ * @param {string} writerId - Writer ID
22
+ * @param {string} patchSha - Latest patch SHA for this writer
23
+ * @returns {void}
24
+ */
25
+ export function updateFrontier(frontier, writerId, patchSha) {
26
+ frontier.set(writerId, patchSha);
27
+ }
28
+
29
+ /**
30
+ * Gets the last-seen patch SHA for a writer.
31
+ * @param {Frontier} frontier
32
+ * @param {string} writerId
33
+ * @returns {string | undefined}
34
+ */
35
+ export function getFrontierEntry(frontier, writerId) {
36
+ return frontier.get(writerId);
37
+ }
38
+
39
+ /**
40
+ * Lists all writers in the frontier.
41
+ * @param {Frontier} frontier
42
+ * @returns {string[]} Sorted list of writer IDs
43
+ */
44
+ export function getWriters(frontier) {
45
+ return Array.from(frontier.keys()).sort();
46
+ }
47
+
48
+ /**
49
+ * Serializes frontier to canonical CBOR bytes.
50
+ * Keys are sorted for determinism.
51
+ * @param {Frontier} frontier
52
+ * @param {Object} [options]
53
+ * @param {import('../../ports/CodecPort.js').default} options.codec - Codec for serialization
54
+ * @returns {Buffer}
55
+ */
56
+ export function serializeFrontier(frontier, { codec } = {}) {
57
+ const c = codec || defaultCodec;
58
+ // Convert Map to sorted object for deterministic encoding
59
+ const obj = {};
60
+ const sortedKeys = Array.from(frontier.keys()).sort();
61
+ for (const key of sortedKeys) {
62
+ obj[key] = frontier.get(key);
63
+ }
64
+ return c.encode(obj);
65
+ }
66
+
67
+ /**
68
+ * Deserializes frontier from CBOR bytes.
69
+ * @param {Buffer} buffer
70
+ * @param {Object} [options]
71
+ * @param {import('../../ports/CodecPort.js').default} options.codec - Codec for deserialization
72
+ * @returns {Frontier}
73
+ */
74
+ export function deserializeFrontier(buffer, { codec } = {}) {
75
+ const c = codec || defaultCodec;
76
+ const obj = c.decode(buffer);
77
+ const frontier = new Map();
78
+ for (const [writerId, patchSha] of Object.entries(obj)) {
79
+ frontier.set(writerId, patchSha);
80
+ }
81
+ return frontier;
82
+ }
83
+
84
+ /**
85
+ * Clones a frontier.
86
+ * @param {Frontier} frontier
87
+ * @returns {Frontier}
88
+ */
89
+ export function cloneFrontier(frontier) {
90
+ return new Map(frontier);
91
+ }
92
+
93
+ /**
94
+ * Merges two frontiers, taking the "later" entry for each writer.
95
+ * Note: This is a simple merge that takes entries from both.
96
+ * For proper "later" detection, you'd need to compare patch ancestry.
97
+ * @param {Frontier} a
98
+ * @param {Frontier} b
99
+ * @returns {Frontier}
100
+ */
101
+ export function mergeFrontiers(a, b) {
102
+ const merged = new Map(a);
103
+ for (const [writerId, patchSha] of b) {
104
+ // Simple: b overwrites a (caller determines order)
105
+ merged.set(writerId, patchSha);
106
+ }
107
+ return merged;
108
+ }
@@ -0,0 +1,101 @@
1
+ /**
2
+ * GCMetrics - Collects garbage collection metrics from WARP V5 state.
3
+ */
4
+
5
+ /**
6
+ * @typedef {Object} GCMetrics
7
+ * @property {number} nodeEntries - Total dot entries in nodeAlive
8
+ * @property {number} edgeEntries - Total dot entries in edgeAlive
9
+ * @property {number} totalEntries - Sum of all entries
10
+ * @property {number} nodeTombstones - Tombstoned dots in nodeAlive that reference entry dots
11
+ * @property {number} edgeTombstones - Tombstoned dots in edgeAlive that reference entry dots
12
+ * @property {number} totalTombstones - Sum of all tombstones
13
+ * @property {number} nodeLiveDots - Live (non-tombstoned) dots in nodeAlive
14
+ * @property {number} edgeLiveDots - Live (non-tombstoned) dots in edgeAlive
15
+ * @property {number} totalLiveDots - Sum of all live dots
16
+ * @property {number} tombstoneRatio - Ratio of tombstones to (tombstones + liveDots)
17
+ */
18
+
19
+ /**
20
+ * Counts total entries (dots) in an ORSet across all elements.
21
+ * @param {import('../crdt/ORSet.js').ORSet} orset
22
+ * @returns {number}
23
+ */
24
+ export function countEntries(orset) {
25
+ let count = 0;
26
+ for (const dots of orset.entries.values()) {
27
+ count += dots.size;
28
+ }
29
+ return count;
30
+ }
31
+
32
+ /**
33
+ * Counts live dots in an ORSet (entries minus tombstoned).
34
+ * @param {import('../crdt/ORSet.js').ORSet} orset
35
+ * @returns {number}
36
+ */
37
+ export function countLiveDots(orset) {
38
+ let count = 0;
39
+ for (const dots of orset.entries.values()) {
40
+ for (const dot of dots) {
41
+ if (!orset.tombstones.has(dot)) {
42
+ count++;
43
+ }
44
+ }
45
+ }
46
+ return count;
47
+ }
48
+
49
+ /**
50
+ * Counts tombstones in an ORSet that reference entry dots.
51
+ * Only counts tombstones that actually correspond to dots in entries.
52
+ * @param {import('../crdt/ORSet.js').ORSet} orset
53
+ * @returns {number}
54
+ */
55
+ export function countTombstones(orset) {
56
+ let count = 0;
57
+ for (const dots of orset.entries.values()) {
58
+ for (const dot of dots) {
59
+ if (orset.tombstones.has(dot)) {
60
+ count++;
61
+ }
62
+ }
63
+ }
64
+ return count;
65
+ }
66
+
67
+ /**
68
+ * Collects GC metrics from state.
69
+ * @param {import('./JoinReducer.js').WarpStateV5} state
70
+ * @returns {GCMetrics}
71
+ */
72
+ export function collectGCMetrics(state) {
73
+ const nodeEntries = countEntries(state.nodeAlive);
74
+ const edgeEntries = countEntries(state.edgeAlive);
75
+ const totalEntries = nodeEntries + edgeEntries;
76
+
77
+ const nodeLiveDots = countLiveDots(state.nodeAlive);
78
+ const edgeLiveDots = countLiveDots(state.edgeAlive);
79
+ const totalLiveDots = nodeLiveDots + edgeLiveDots;
80
+
81
+ const nodeTombstones = countTombstones(state.nodeAlive);
82
+ const edgeTombstones = countTombstones(state.edgeAlive);
83
+ const totalTombstones = nodeTombstones + edgeTombstones;
84
+
85
+ // tombstoneRatio = tombstones / (tombstones + liveDots)
86
+ const denominator = totalTombstones + totalLiveDots;
87
+ const tombstoneRatio = denominator > 0 ? totalTombstones / denominator : 0;
88
+
89
+ return {
90
+ nodeEntries,
91
+ edgeEntries,
92
+ totalEntries,
93
+ nodeTombstones,
94
+ edgeTombstones,
95
+ totalTombstones,
96
+ nodeLiveDots,
97
+ edgeLiveDots,
98
+ totalLiveDots,
99
+ tombstoneRatio,
100
+ };
101
+ }