@git-stunts/git-warp 10.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/NOTICE +16 -0
- package/README.md +480 -0
- package/SECURITY.md +30 -0
- package/bin/git-warp +24 -0
- package/bin/warp-graph.js +1574 -0
- package/index.d.ts +2366 -0
- package/index.js +180 -0
- package/package.json +129 -0
- package/scripts/install-git-warp.sh +258 -0
- package/scripts/uninstall-git-warp.sh +139 -0
- package/src/domain/WarpGraph.js +3157 -0
- package/src/domain/crdt/Dot.js +160 -0
- package/src/domain/crdt/LWW.js +154 -0
- package/src/domain/crdt/ORSet.js +371 -0
- package/src/domain/crdt/VersionVector.js +222 -0
- package/src/domain/entities/GraphNode.js +60 -0
- package/src/domain/errors/EmptyMessageError.js +47 -0
- package/src/domain/errors/ForkError.js +30 -0
- package/src/domain/errors/IndexError.js +23 -0
- package/src/domain/errors/OperationAbortedError.js +22 -0
- package/src/domain/errors/QueryError.js +39 -0
- package/src/domain/errors/SchemaUnsupportedError.js +17 -0
- package/src/domain/errors/ShardCorruptionError.js +56 -0
- package/src/domain/errors/ShardLoadError.js +57 -0
- package/src/domain/errors/ShardValidationError.js +61 -0
- package/src/domain/errors/StorageError.js +57 -0
- package/src/domain/errors/SyncError.js +30 -0
- package/src/domain/errors/TraversalError.js +23 -0
- package/src/domain/errors/WarpError.js +31 -0
- package/src/domain/errors/WormholeError.js +28 -0
- package/src/domain/errors/WriterError.js +39 -0
- package/src/domain/errors/index.js +21 -0
- package/src/domain/services/AnchorMessageCodec.js +99 -0
- package/src/domain/services/BitmapIndexBuilder.js +225 -0
- package/src/domain/services/BitmapIndexReader.js +435 -0
- package/src/domain/services/BoundaryTransitionRecord.js +463 -0
- package/src/domain/services/CheckpointMessageCodec.js +147 -0
- package/src/domain/services/CheckpointSerializerV5.js +281 -0
- package/src/domain/services/CheckpointService.js +384 -0
- package/src/domain/services/CommitDagTraversalService.js +156 -0
- package/src/domain/services/DagPathFinding.js +712 -0
- package/src/domain/services/DagTopology.js +239 -0
- package/src/domain/services/DagTraversal.js +245 -0
- package/src/domain/services/Frontier.js +108 -0
- package/src/domain/services/GCMetrics.js +101 -0
- package/src/domain/services/GCPolicy.js +122 -0
- package/src/domain/services/GitLogParser.js +205 -0
- package/src/domain/services/HealthCheckService.js +246 -0
- package/src/domain/services/HookInstaller.js +326 -0
- package/src/domain/services/HttpSyncServer.js +262 -0
- package/src/domain/services/IndexRebuildService.js +426 -0
- package/src/domain/services/IndexStalenessChecker.js +103 -0
- package/src/domain/services/JoinReducer.js +582 -0
- package/src/domain/services/KeyCodec.js +113 -0
- package/src/domain/services/LegacyAnchorDetector.js +67 -0
- package/src/domain/services/LogicalTraversal.js +351 -0
- package/src/domain/services/MessageCodecInternal.js +132 -0
- package/src/domain/services/MessageSchemaDetector.js +145 -0
- package/src/domain/services/MigrationService.js +55 -0
- package/src/domain/services/ObserverView.js +265 -0
- package/src/domain/services/PatchBuilderV2.js +669 -0
- package/src/domain/services/PatchMessageCodec.js +140 -0
- package/src/domain/services/ProvenanceIndex.js +337 -0
- package/src/domain/services/ProvenancePayload.js +242 -0
- package/src/domain/services/QueryBuilder.js +835 -0
- package/src/domain/services/StateDiff.js +300 -0
- package/src/domain/services/StateSerializerV5.js +156 -0
- package/src/domain/services/StreamingBitmapIndexBuilder.js +709 -0
- package/src/domain/services/SyncProtocol.js +593 -0
- package/src/domain/services/TemporalQuery.js +201 -0
- package/src/domain/services/TranslationCost.js +221 -0
- package/src/domain/services/TraversalService.js +8 -0
- package/src/domain/services/WarpMessageCodec.js +29 -0
- package/src/domain/services/WarpStateIndexBuilder.js +127 -0
- package/src/domain/services/WormholeService.js +353 -0
- package/src/domain/types/TickReceipt.js +285 -0
- package/src/domain/types/WarpTypes.js +209 -0
- package/src/domain/types/WarpTypesV2.js +200 -0
- package/src/domain/utils/CachedValue.js +140 -0
- package/src/domain/utils/EventId.js +89 -0
- package/src/domain/utils/LRUCache.js +112 -0
- package/src/domain/utils/MinHeap.js +114 -0
- package/src/domain/utils/RefLayout.js +280 -0
- package/src/domain/utils/WriterId.js +205 -0
- package/src/domain/utils/cancellation.js +33 -0
- package/src/domain/utils/canonicalStringify.js +42 -0
- package/src/domain/utils/defaultClock.js +20 -0
- package/src/domain/utils/defaultCodec.js +51 -0
- package/src/domain/utils/nullLogger.js +21 -0
- package/src/domain/utils/roaring.js +181 -0
- package/src/domain/utils/shardVersion.js +9 -0
- package/src/domain/warp/PatchSession.js +217 -0
- package/src/domain/warp/Writer.js +181 -0
- package/src/hooks/post-merge.sh +60 -0
- package/src/infrastructure/adapters/BunHttpAdapter.js +225 -0
- package/src/infrastructure/adapters/ClockAdapter.js +57 -0
- package/src/infrastructure/adapters/ConsoleLogger.js +150 -0
- package/src/infrastructure/adapters/DenoHttpAdapter.js +230 -0
- package/src/infrastructure/adapters/GitGraphAdapter.js +787 -0
- package/src/infrastructure/adapters/GlobalClockAdapter.js +5 -0
- package/src/infrastructure/adapters/NoOpLogger.js +62 -0
- package/src/infrastructure/adapters/NodeCryptoAdapter.js +32 -0
- package/src/infrastructure/adapters/NodeHttpAdapter.js +98 -0
- package/src/infrastructure/adapters/PerformanceClockAdapter.js +5 -0
- package/src/infrastructure/adapters/WebCryptoAdapter.js +121 -0
- package/src/infrastructure/codecs/CborCodec.js +384 -0
- package/src/ports/BlobPort.js +30 -0
- package/src/ports/ClockPort.js +25 -0
- package/src/ports/CodecPort.js +25 -0
- package/src/ports/CommitPort.js +114 -0
- package/src/ports/ConfigPort.js +31 -0
- package/src/ports/CryptoPort.js +38 -0
- package/src/ports/GraphPersistencePort.js +57 -0
- package/src/ports/HttpServerPort.js +25 -0
- package/src/ports/IndexStoragePort.js +39 -0
- package/src/ports/LoggerPort.js +68 -0
- package/src/ports/RefPort.js +51 -0
- package/src/ports/TreePort.js +51 -0
- package/src/visualization/index.js +26 -0
- package/src/visualization/layouts/converters.js +75 -0
- package/src/visualization/layouts/elkAdapter.js +86 -0
- package/src/visualization/layouts/elkLayout.js +95 -0
- package/src/visualization/layouts/index.js +29 -0
- package/src/visualization/renderers/ascii/box.js +16 -0
- package/src/visualization/renderers/ascii/check.js +271 -0
- package/src/visualization/renderers/ascii/colors.js +13 -0
- package/src/visualization/renderers/ascii/formatters.js +73 -0
- package/src/visualization/renderers/ascii/graph.js +344 -0
- package/src/visualization/renderers/ascii/history.js +335 -0
- package/src/visualization/renderers/ascii/index.js +14 -0
- package/src/visualization/renderers/ascii/info.js +245 -0
- package/src/visualization/renderers/ascii/materialize.js +255 -0
- package/src/visualization/renderers/ascii/path.js +240 -0
- package/src/visualization/renderers/ascii/progress.js +32 -0
- package/src/visualization/renderers/ascii/symbols.js +33 -0
- package/src/visualization/renderers/ascii/table.js +19 -0
- package/src/visualization/renderers/browser/index.js +1 -0
- package/src/visualization/renderers/svg/index.js +159 -0
- package/src/visualization/utils/ansi.js +14 -0
- package/src/visualization/utils/time.js +40 -0
- package/src/visualization/utils/truncate.js +40 -0
- package/src/visualization/utils/unicode.js +52 -0
|
@@ -0,0 +1,353 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* WormholeService - Wormhole Compression for WARP Graphs
|
|
3
|
+
*
|
|
4
|
+
* Implements wormhole compression from Paper III (Computational Holography):
|
|
5
|
+
* Compress multi-tick segments into single edges carrying sub-payloads.
|
|
6
|
+
*
|
|
7
|
+
* A wormhole is a compressed representation of a contiguous range of patches
|
|
8
|
+
* from a single writer. It preserves provenance by storing the original
|
|
9
|
+
* patches as a ProvenancePayload that can be replayed during materialization.
|
|
10
|
+
*
|
|
11
|
+
* ## Key Properties
|
|
12
|
+
*
|
|
13
|
+
* - **Provenance Preservation**: The wormhole contains the full sub-payload,
|
|
14
|
+
* allowing exact replay of the compressed segment.
|
|
15
|
+
* - **Monoid Composition**: Two consecutive wormholes can be composed by
|
|
16
|
+
* concatenating their sub-payloads.
|
|
17
|
+
* - **Materialization Equivalence**: A wormhole + remaining patches produces
|
|
18
|
+
* the same state as materializing all patches.
|
|
19
|
+
*
|
|
20
|
+
* @module domain/services/WormholeService
|
|
21
|
+
*/
|
|
22
|
+
|
|
23
|
+
import defaultCodec from '../utils/defaultCodec.js';
|
|
24
|
+
import ProvenancePayload from './ProvenancePayload.js';
|
|
25
|
+
import WormholeError from '../errors/WormholeError.js';
|
|
26
|
+
import { detectMessageKind, decodePatchMessage } from './WarpMessageCodec.js';
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Validates that a SHA parameter is a non-empty string.
|
|
30
|
+
* @param {*} sha - The SHA to validate
|
|
31
|
+
* @param {string} paramName - Parameter name for error messages
|
|
32
|
+
* @throws {WormholeError} If SHA is invalid
|
|
33
|
+
* @private
|
|
34
|
+
*/
|
|
35
|
+
function validateSha(sha, paramName) {
|
|
36
|
+
if (!sha || typeof sha !== 'string') {
|
|
37
|
+
throw new WormholeError(`${paramName} is required and must be a string`, {
|
|
38
|
+
code: 'E_WORMHOLE_SHA_NOT_FOUND',
|
|
39
|
+
context: { [paramName]: sha },
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Verifies that a SHA exists in the repository.
|
|
46
|
+
* @param {Object} persistence - Git persistence adapter
|
|
47
|
+
* @param {string} sha - The SHA to verify
|
|
48
|
+
* @param {string} paramName - Parameter name for error messages
|
|
49
|
+
* @throws {WormholeError} If SHA doesn't exist
|
|
50
|
+
* @private
|
|
51
|
+
*/
|
|
52
|
+
async function verifyShaExists(persistence, sha, paramName) {
|
|
53
|
+
const exists = await persistence.nodeExists(sha);
|
|
54
|
+
if (!exists) {
|
|
55
|
+
throw new WormholeError(`Patch SHA '${sha}' does not exist`, {
|
|
56
|
+
code: 'E_WORMHOLE_SHA_NOT_FOUND',
|
|
57
|
+
context: { sha, which: paramName },
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Processes a single commit in the wormhole chain.
|
|
64
|
+
* @param {Object} opts - Options
|
|
65
|
+
* @param {Object} opts.persistence - Git persistence adapter
|
|
66
|
+
* @param {string} opts.sha - The commit SHA
|
|
67
|
+
* @param {string} opts.graphName - Expected graph name
|
|
68
|
+
* @param {string|null} opts.expectedWriter - Expected writer ID (null for first commit)
|
|
69
|
+
* @returns {Promise<{patch: Object, sha: string, writerId: string, parentSha: string|null}>}
|
|
70
|
+
* @throws {WormholeError} On validation errors
|
|
71
|
+
* @private
|
|
72
|
+
*/
|
|
73
|
+
async function processCommit({ persistence, sha, graphName, expectedWriter, codec: codecOpt }) {
|
|
74
|
+
const codec = codecOpt || defaultCodec;
|
|
75
|
+
const nodeInfo = await persistence.getNodeInfo(sha);
|
|
76
|
+
const { message, parents } = nodeInfo;
|
|
77
|
+
|
|
78
|
+
const kind = detectMessageKind(message);
|
|
79
|
+
if (kind !== 'patch') {
|
|
80
|
+
throw new WormholeError(`Commit '${sha}' is not a patch commit (kind: ${kind})`, {
|
|
81
|
+
code: 'E_WORMHOLE_NOT_PATCH',
|
|
82
|
+
context: { sha, kind },
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
const patchMeta = decodePatchMessage(message);
|
|
87
|
+
|
|
88
|
+
if (patchMeta.graph !== graphName) {
|
|
89
|
+
throw new WormholeError(`Patch '${sha}' belongs to graph '${patchMeta.graph}', not '${graphName}'`, {
|
|
90
|
+
code: 'E_WORMHOLE_INVALID_RANGE',
|
|
91
|
+
context: { sha, expectedGraph: graphName, actualGraph: patchMeta.graph },
|
|
92
|
+
});
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
if (expectedWriter !== null && patchMeta.writer !== expectedWriter) {
|
|
96
|
+
throw new WormholeError(`Patches span multiple writers: '${expectedWriter}' and '${patchMeta.writer}'`, {
|
|
97
|
+
code: 'E_WORMHOLE_MULTI_WRITER',
|
|
98
|
+
context: { sha, expectedWriter, actualWriter: patchMeta.writer },
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
const patchBuffer = await persistence.readBlob(patchMeta.patchOid);
|
|
103
|
+
const patch = codec.decode(patchBuffer);
|
|
104
|
+
|
|
105
|
+
return {
|
|
106
|
+
patch,
|
|
107
|
+
sha,
|
|
108
|
+
writerId: patchMeta.writer,
|
|
109
|
+
parentSha: parents && parents.length > 0 ? parents[0] : null,
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Represents a compressed range of patches (wormhole).
|
|
115
|
+
*
|
|
116
|
+
* A WormholeEdge contains:
|
|
117
|
+
* - The SHA of the first (oldest) patch in the range (fromSha)
|
|
118
|
+
* - The SHA of the last (newest) patch in the range (toSha)
|
|
119
|
+
* - The writer ID who created all patches in the range
|
|
120
|
+
* - A ProvenancePayload containing all patches for replay
|
|
121
|
+
*
|
|
122
|
+
* @typedef {Object} WormholeEdge
|
|
123
|
+
* @property {string} fromSha - SHA of the first (oldest) patch commit
|
|
124
|
+
* @property {string} toSha - SHA of the last (newest) patch commit
|
|
125
|
+
* @property {string} writerId - Writer ID of all patches in the range
|
|
126
|
+
* @property {ProvenancePayload} payload - Sub-payload for replay
|
|
127
|
+
* @property {number} patchCount - Number of patches compressed
|
|
128
|
+
*/
|
|
129
|
+
|
|
130
|
+
/**
|
|
131
|
+
* Creates a wormhole compressing a range of patches.
|
|
132
|
+
*
|
|
133
|
+
* The range is specified by two patch SHAs from the same writer. The `fromSha`
|
|
134
|
+
* must be an ancestor of `toSha` in the writer's patch chain. Both endpoints
|
|
135
|
+
* are inclusive in the wormhole.
|
|
136
|
+
*
|
|
137
|
+
* @param {Object} options - Wormhole creation options
|
|
138
|
+
* @param {import('../../ports/GraphPersistencePort.js').default} options.persistence - Git persistence adapter
|
|
139
|
+
* @param {string} options.graphName - Name of the graph
|
|
140
|
+
* @param {string} options.fromSha - SHA of the first (oldest) patch commit
|
|
141
|
+
* @param {string} options.toSha - SHA of the last (newest) patch commit
|
|
142
|
+
* @returns {Promise<WormholeEdge>} The created wormhole
|
|
143
|
+
* @throws {WormholeError} If fromSha or toSha doesn't exist (E_WORMHOLE_SHA_NOT_FOUND)
|
|
144
|
+
* @throws {WormholeError} If fromSha is not an ancestor of toSha (E_WORMHOLE_INVALID_RANGE)
|
|
145
|
+
* @throws {WormholeError} If commits span multiple writers (E_WORMHOLE_MULTI_WRITER)
|
|
146
|
+
* @throws {WormholeError} If a commit is not a patch commit (E_WORMHOLE_NOT_PATCH)
|
|
147
|
+
*/
|
|
148
|
+
export async function createWormhole({ persistence, graphName, fromSha, toSha, codec }) {
|
|
149
|
+
validateSha(fromSha, 'fromSha');
|
|
150
|
+
validateSha(toSha, 'toSha');
|
|
151
|
+
await verifyShaExists(persistence, fromSha, 'fromSha');
|
|
152
|
+
await verifyShaExists(persistence, toSha, 'toSha');
|
|
153
|
+
|
|
154
|
+
const patches = await collectPatchRange({ persistence, graphName, fromSha, toSha, codec });
|
|
155
|
+
|
|
156
|
+
// Reverse to get oldest-first order (as required by ProvenancePayload)
|
|
157
|
+
patches.reverse();
|
|
158
|
+
|
|
159
|
+
const writerId = patches.length > 0 ? patches[0].writerId : null;
|
|
160
|
+
// Strip writerId to match ProvenancePayload's PatchEntry typedef ({patch, sha})
|
|
161
|
+
const payload = new ProvenancePayload(patches.map(({ patch, sha }) => ({ patch, sha })));
|
|
162
|
+
|
|
163
|
+
return { fromSha, toSha, writerId, payload, patchCount: patches.length };
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
/**
|
|
167
|
+
* Collects patches from toSha back to fromSha (newest-first order).
|
|
168
|
+
*
|
|
169
|
+
* Walks the parent chain from toSha towards fromSha, collecting and
|
|
170
|
+
* validating each commit along the way.
|
|
171
|
+
*
|
|
172
|
+
* @param {Object} options
|
|
173
|
+
* @param {import('../../ports/GraphPersistencePort.js').default} options.persistence - Git persistence adapter
|
|
174
|
+
* @param {string} options.graphName - Expected graph name
|
|
175
|
+
* @param {string} options.fromSha - SHA of the first (oldest) patch commit
|
|
176
|
+
* @param {string} options.toSha - SHA of the last (newest) patch commit
|
|
177
|
+
* @returns {Promise<Array<{patch: Object, sha: string, writerId: string}>>} Patches in newest-first order
|
|
178
|
+
* @throws {WormholeError} If fromSha is not an ancestor of toSha or range is empty
|
|
179
|
+
* @private
|
|
180
|
+
*/
|
|
181
|
+
async function collectPatchRange({ persistence, graphName, fromSha, toSha, codec }) {
|
|
182
|
+
const patches = [];
|
|
183
|
+
let currentSha = toSha;
|
|
184
|
+
let writerId = null;
|
|
185
|
+
|
|
186
|
+
while (currentSha) {
|
|
187
|
+
const result = await processCommit({ persistence, sha: currentSha, graphName, expectedWriter: writerId, codec });
|
|
188
|
+
writerId = result.writerId;
|
|
189
|
+
patches.push({ patch: result.patch, sha: result.sha, writerId: result.writerId });
|
|
190
|
+
|
|
191
|
+
if (currentSha === fromSha) {
|
|
192
|
+
break;
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
if (!result.parentSha) {
|
|
196
|
+
throw new WormholeError(`'${fromSha}' is not an ancestor of '${toSha}'`, {
|
|
197
|
+
code: 'E_WORMHOLE_INVALID_RANGE',
|
|
198
|
+
context: { fromSha, toSha },
|
|
199
|
+
});
|
|
200
|
+
}
|
|
201
|
+
currentSha = result.parentSha;
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
if (currentSha !== fromSha) {
|
|
205
|
+
throw new WormholeError(`'${fromSha}' is not an ancestor of '${toSha}'`, {
|
|
206
|
+
code: 'E_WORMHOLE_INVALID_RANGE',
|
|
207
|
+
context: { fromSha, toSha },
|
|
208
|
+
});
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
if (patches.length === 0) {
|
|
212
|
+
throw new WormholeError('No patches found in the specified range', {
|
|
213
|
+
code: 'E_WORMHOLE_EMPTY_RANGE',
|
|
214
|
+
context: { fromSha, toSha },
|
|
215
|
+
});
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
return patches;
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
/**
|
|
222
|
+
* Composes two consecutive wormholes into a single wormhole.
|
|
223
|
+
*
|
|
224
|
+
* The wormholes must be consecutive: the first wormhole's toSha must be
|
|
225
|
+
* the parent of the second wormhole's fromSha.
|
|
226
|
+
*
|
|
227
|
+
* This leverages the ProvenancePayload monoid structure:
|
|
228
|
+
* `composed.payload = first.payload.concat(second.payload)`
|
|
229
|
+
*
|
|
230
|
+
* @param {WormholeEdge} first - The earlier (older) wormhole
|
|
231
|
+
* @param {WormholeEdge} second - The later (newer) wormhole
|
|
232
|
+
* @param {Object} [options] - Composition options
|
|
233
|
+
* @param {import('../../ports/GraphPersistencePort.js').default} [options.persistence] - Git persistence adapter (for validation)
|
|
234
|
+
* @returns {Promise<WormholeEdge>} The composed wormhole
|
|
235
|
+
* @throws {WormholeError} If wormholes are from different writers (E_WORMHOLE_MULTI_WRITER)
|
|
236
|
+
* @throws {WormholeError} If wormholes are not consecutive (E_WORMHOLE_INVALID_RANGE)
|
|
237
|
+
*/
|
|
238
|
+
export async function composeWormholes(first, second, options = {}) {
|
|
239
|
+
// Validate writer consistency
|
|
240
|
+
if (first.writerId !== second.writerId) {
|
|
241
|
+
throw new WormholeError(`Cannot compose wormholes from different writers: '${first.writerId}' and '${second.writerId}'`, {
|
|
242
|
+
code: 'E_WORMHOLE_MULTI_WRITER',
|
|
243
|
+
context: { firstWriter: first.writerId, secondWriter: second.writerId },
|
|
244
|
+
});
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
// If persistence is provided, validate that wormholes are consecutive
|
|
248
|
+
if (options.persistence) {
|
|
249
|
+
const secondFirstInfo = await options.persistence.getNodeInfo(second.fromSha);
|
|
250
|
+
const parents = secondFirstInfo.parents || [];
|
|
251
|
+
|
|
252
|
+
if (!parents.includes(first.toSha)) {
|
|
253
|
+
throw new WormholeError('Wormholes are not consecutive', {
|
|
254
|
+
code: 'E_WORMHOLE_INVALID_RANGE',
|
|
255
|
+
context: {
|
|
256
|
+
firstToSha: first.toSha,
|
|
257
|
+
secondFromSha: second.fromSha,
|
|
258
|
+
secondParents: parents,
|
|
259
|
+
},
|
|
260
|
+
});
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
// Compose using payload monoid concatenation
|
|
265
|
+
const composedPayload = first.payload.concat(second.payload);
|
|
266
|
+
|
|
267
|
+
return {
|
|
268
|
+
fromSha: first.fromSha,
|
|
269
|
+
toSha: second.toSha,
|
|
270
|
+
writerId: first.writerId,
|
|
271
|
+
payload: composedPayload,
|
|
272
|
+
patchCount: first.patchCount + second.patchCount,
|
|
273
|
+
};
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
/**
|
|
277
|
+
* Replays a wormhole's sub-payload to materialize the compressed state.
|
|
278
|
+
*
|
|
279
|
+
* This is equivalent to materializing all the patches in the wormhole
|
|
280
|
+
* individually. The replay uses CRDT merge semantics as defined in JoinReducer.
|
|
281
|
+
*
|
|
282
|
+
* @param {WormholeEdge} wormhole - The wormhole to replay
|
|
283
|
+
* @param {import('./JoinReducer.js').WarpStateV5} [initialState] - Optional initial state
|
|
284
|
+
* @returns {import('./JoinReducer.js').WarpStateV5} The materialized state
|
|
285
|
+
*/
|
|
286
|
+
export function replayWormhole(wormhole, initialState) {
|
|
287
|
+
return wormhole.payload.replay(initialState);
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
/**
|
|
291
|
+
* Serializes a wormhole to a JSON-serializable object.
|
|
292
|
+
*
|
|
293
|
+
* @param {WormholeEdge} wormhole - The wormhole to serialize
|
|
294
|
+
* @returns {Object} JSON-serializable representation
|
|
295
|
+
*/
|
|
296
|
+
export function serializeWormhole(wormhole) {
|
|
297
|
+
return {
|
|
298
|
+
fromSha: wormhole.fromSha,
|
|
299
|
+
toSha: wormhole.toSha,
|
|
300
|
+
writerId: wormhole.writerId,
|
|
301
|
+
patchCount: wormhole.patchCount,
|
|
302
|
+
payload: wormhole.payload.toJSON(),
|
|
303
|
+
};
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
/**
|
|
307
|
+
* Deserializes a wormhole from a JSON object.
|
|
308
|
+
*
|
|
309
|
+
* @param {Object} json - The JSON object to deserialize
|
|
310
|
+
* @returns {WormholeEdge} The deserialized wormhole
|
|
311
|
+
* @throws {WormholeError} If the JSON structure is invalid
|
|
312
|
+
*/
|
|
313
|
+
export function deserializeWormhole(json) {
|
|
314
|
+
// Validate required fields
|
|
315
|
+
if (!json || typeof json !== 'object') {
|
|
316
|
+
throw new WormholeError('Invalid wormhole JSON: expected object', {
|
|
317
|
+
code: 'E_INVALID_WORMHOLE_JSON',
|
|
318
|
+
});
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
const requiredFields = ['fromSha', 'toSha', 'writerId', 'patchCount', 'payload'];
|
|
322
|
+
for (const field of requiredFields) {
|
|
323
|
+
if (json[field] === undefined) {
|
|
324
|
+
throw new WormholeError(`Invalid wormhole JSON: missing required field '${field}'`, {
|
|
325
|
+
code: 'E_INVALID_WORMHOLE_JSON',
|
|
326
|
+
context: { missingField: field },
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
if (typeof json.patchCount !== 'number' || json.patchCount < 0) {
|
|
332
|
+
throw new WormholeError('Invalid wormhole JSON: patchCount must be a non-negative number', {
|
|
333
|
+
code: 'E_INVALID_WORMHOLE_JSON',
|
|
334
|
+
context: { patchCount: json.patchCount },
|
|
335
|
+
});
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
return {
|
|
339
|
+
fromSha: json.fromSha,
|
|
340
|
+
toSha: json.toSha,
|
|
341
|
+
writerId: json.writerId,
|
|
342
|
+
patchCount: json.patchCount,
|
|
343
|
+
payload: ProvenancePayload.fromJSON(json.payload),
|
|
344
|
+
};
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
export default {
|
|
348
|
+
createWormhole,
|
|
349
|
+
composeWormholes,
|
|
350
|
+
replayWormhole,
|
|
351
|
+
serializeWormhole,
|
|
352
|
+
deserializeWormhole,
|
|
353
|
+
};
|
|
@@ -0,0 +1,285 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* TickReceipt — immutable record of per-operation outcomes from a single patch application.
|
|
3
|
+
*
|
|
4
|
+
* A tick receipt captures what happened to each operation in a patch during
|
|
5
|
+
* materialization: whether it was applied, superseded by a concurrent write,
|
|
6
|
+
* or redundant (already present in the state).
|
|
7
|
+
*
|
|
8
|
+
* This is a type definition only — emission logic lives in LH/RECEIPTS/2.
|
|
9
|
+
*
|
|
10
|
+
* @module TickReceipt
|
|
11
|
+
* @see Paper II, Section 5 — Tick receipts: event posets recording accepted/rejected matches
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
// ============================================================================
|
|
15
|
+
// Constants
|
|
16
|
+
// ============================================================================
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Valid operation types that can appear in a tick receipt.
|
|
20
|
+
* @type {ReadonlyArray<string>}
|
|
21
|
+
*/
|
|
22
|
+
export const OP_TYPES = Object.freeze([
|
|
23
|
+
'NodeAdd',
|
|
24
|
+
'NodeTombstone',
|
|
25
|
+
'EdgeAdd',
|
|
26
|
+
'EdgeTombstone',
|
|
27
|
+
'PropSet',
|
|
28
|
+
'BlobValue',
|
|
29
|
+
]);
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Valid result values for an operation outcome.
|
|
33
|
+
* @type {ReadonlyArray<string>}
|
|
34
|
+
*/
|
|
35
|
+
export const RESULT_TYPES = Object.freeze([
|
|
36
|
+
'applied',
|
|
37
|
+
'superseded',
|
|
38
|
+
'redundant',
|
|
39
|
+
]);
|
|
40
|
+
|
|
41
|
+
// ============================================================================
|
|
42
|
+
// Validation Helpers
|
|
43
|
+
// ============================================================================
|
|
44
|
+
|
|
45
|
+
const opTypeSet = new Set(OP_TYPES);
|
|
46
|
+
const resultTypeSet = new Set(RESULT_TYPES);
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Asserts that a value is a non-null object.
|
|
50
|
+
*
|
|
51
|
+
* @param {unknown} value
|
|
52
|
+
* @returns {boolean}
|
|
53
|
+
*/
|
|
54
|
+
function isObject(value) {
|
|
55
|
+
return value !== null && typeof value === 'object';
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* Validates a single operation outcome entry.
|
|
60
|
+
*
|
|
61
|
+
* @param {unknown} op - The operation outcome to validate
|
|
62
|
+
* @param {number} index - Index within the ops array (for error messages)
|
|
63
|
+
* @throws {Error} If validation fails
|
|
64
|
+
*/
|
|
65
|
+
function validateOp(op, index) {
|
|
66
|
+
if (!isObject(op)) {
|
|
67
|
+
throw new Error(`ops[${index}] must be an object`);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
validateOpType(op.op, index);
|
|
71
|
+
validateOpTarget(op.target, index);
|
|
72
|
+
validateOpResult(op.result, index);
|
|
73
|
+
|
|
74
|
+
if (op.reason !== undefined && typeof op.reason !== 'string') {
|
|
75
|
+
throw new Error(`ops[${index}].reason must be a string or undefined`);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Validates that an operation type is one of the allowed OP_TYPES.
|
|
81
|
+
*
|
|
82
|
+
* Valid operation types correspond to the six patch operations defined
|
|
83
|
+
* in PatchBuilderV2: NodeAdd, NodeTombstone, EdgeAdd, EdgeTombstone,
|
|
84
|
+
* PropSet, and BlobValue.
|
|
85
|
+
*
|
|
86
|
+
* @param {unknown} value - The operation type to validate
|
|
87
|
+
* @param {number} i - Index of the operation in the ops array (for error messages)
|
|
88
|
+
* @returns {void}
|
|
89
|
+
* @throws {Error} If value is not a string or not one of the valid OP_TYPES
|
|
90
|
+
*
|
|
91
|
+
* @example
|
|
92
|
+
* validateOpType('NodeAdd', 0); // OK
|
|
93
|
+
* validateOpType('InvalidOp', 0); // throws Error
|
|
94
|
+
* validateOpType(123, 0); // throws Error
|
|
95
|
+
*/
|
|
96
|
+
function validateOpType(value, i) {
|
|
97
|
+
if (typeof value !== 'string' || !opTypeSet.has(value)) {
|
|
98
|
+
throw new Error(`ops[${i}].op must be one of: ${OP_TYPES.join(', ')}`);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Validates that an operation target is a non-empty string.
|
|
104
|
+
*
|
|
105
|
+
* The target identifies what entity was affected by the operation:
|
|
106
|
+
* - For node operations: the node ID (e.g., "user:alice")
|
|
107
|
+
* - For edge operations: the edge key (e.g., "user:alice\0user:bob\0follows")
|
|
108
|
+
* - For property operations: the property key (e.g., "user:alice\0name")
|
|
109
|
+
*
|
|
110
|
+
* @param {unknown} value - The target to validate
|
|
111
|
+
* @param {number} i - Index of the operation in the ops array (for error messages)
|
|
112
|
+
* @returns {void}
|
|
113
|
+
* @throws {Error} If value is not a non-empty string
|
|
114
|
+
*
|
|
115
|
+
* @example
|
|
116
|
+
* validateOpTarget('user:alice', 0); // OK
|
|
117
|
+
* validateOpTarget('', 0); // throws Error
|
|
118
|
+
* validateOpTarget(null, 0); // throws Error
|
|
119
|
+
*/
|
|
120
|
+
function validateOpTarget(value, i) {
|
|
121
|
+
if (typeof value !== 'string' || value.length === 0) {
|
|
122
|
+
throw new Error(`ops[${i}].target must be a non-empty string`);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
/**
|
|
127
|
+
* Validates that an operation result is one of the allowed RESULT_TYPES.
|
|
128
|
+
*
|
|
129
|
+
* Valid results describe the outcome of applying the operation:
|
|
130
|
+
* - `'applied'`: The operation was successfully applied to the state
|
|
131
|
+
* - `'superseded'`: The operation was overridden by a concurrent write
|
|
132
|
+
* (e.g., LWW register where another writer had a higher timestamp)
|
|
133
|
+
* - `'redundant'`: The operation had no effect because the state already
|
|
134
|
+
* reflected it (e.g., adding a node that was already present)
|
|
135
|
+
*
|
|
136
|
+
* @param {unknown} value - The result to validate
|
|
137
|
+
* @param {number} i - Index of the operation in the ops array (for error messages)
|
|
138
|
+
* @returns {void}
|
|
139
|
+
* @throws {Error} If value is not a string or not one of the valid RESULT_TYPES
|
|
140
|
+
*
|
|
141
|
+
* @example
|
|
142
|
+
* validateOpResult('applied', 0); // OK
|
|
143
|
+
* validateOpResult('superseded', 1); // OK
|
|
144
|
+
* validateOpResult('failed', 0); // throws Error
|
|
145
|
+
*/
|
|
146
|
+
function validateOpResult(value, i) {
|
|
147
|
+
if (typeof value !== 'string' || !resultTypeSet.has(value)) {
|
|
148
|
+
throw new Error(`ops[${i}].result must be one of: ${RESULT_TYPES.join(', ')}`);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// ============================================================================
|
|
153
|
+
// Factory
|
|
154
|
+
// ============================================================================
|
|
155
|
+
|
|
156
|
+
/**
|
|
157
|
+
* @typedef {Object} OpOutcome
|
|
158
|
+
* @property {string} op - Operation type ('NodeAdd' | 'NodeTombstone' | 'EdgeAdd' | 'EdgeTombstone' | 'PropSet' | 'BlobValue')
|
|
159
|
+
* @property {string} target - Node ID or edge key
|
|
160
|
+
* @property {'applied' | 'superseded' | 'redundant'} result - Outcome of the operation
|
|
161
|
+
* @property {string} [reason] - Human-readable explanation (e.g., "LWW: writer bob at lamport 43 wins")
|
|
162
|
+
*/
|
|
163
|
+
|
|
164
|
+
/**
|
|
165
|
+
* @typedef {Object} TickReceipt
|
|
166
|
+
* @property {string} patchSha - SHA of the patch commit
|
|
167
|
+
* @property {string} writer - Writer ID that produced the patch
|
|
168
|
+
* @property {number} lamport - Lamport timestamp of the patch
|
|
169
|
+
* @property {ReadonlyArray<Readonly<OpOutcome>>} ops - Per-operation outcomes (frozen)
|
|
170
|
+
*/
|
|
171
|
+
|
|
172
|
+
/**
|
|
173
|
+
* Creates an immutable TickReceipt.
|
|
174
|
+
*
|
|
175
|
+
* @param {Object} params
|
|
176
|
+
* @param {string} params.patchSha - SHA of the patch commit
|
|
177
|
+
* @param {string} params.writer - Writer ID
|
|
178
|
+
* @param {number} params.lamport - Lamport timestamp (non-negative integer)
|
|
179
|
+
* @param {OpOutcome[]} params.ops - Per-operation outcome records
|
|
180
|
+
* @returns {Readonly<TickReceipt>} Frozen tick receipt
|
|
181
|
+
* @throws {Error} If any parameter is invalid
|
|
182
|
+
*/
|
|
183
|
+
export function createTickReceipt({ patchSha, writer, lamport, ops }) {
|
|
184
|
+
// --- patchSha ---
|
|
185
|
+
if (typeof patchSha !== 'string' || patchSha.length === 0) {
|
|
186
|
+
throw new Error('patchSha must be a non-empty string');
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// --- writer ---
|
|
190
|
+
if (typeof writer !== 'string' || writer.length === 0) {
|
|
191
|
+
throw new Error('writer must be a non-empty string');
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
// --- lamport ---
|
|
195
|
+
if (!Number.isInteger(lamport) || lamport < 0) {
|
|
196
|
+
throw new Error('lamport must be a non-negative integer');
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
// --- ops ---
|
|
200
|
+
if (!Array.isArray(ops)) {
|
|
201
|
+
throw new Error('ops must be an array');
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
for (let i = 0; i < ops.length; i++) {
|
|
205
|
+
validateOp(ops[i], i);
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
// Build frozen op copies (defensive: don't alias caller's objects)
|
|
209
|
+
const frozenOps = Object.freeze(
|
|
210
|
+
ops.map((o) => {
|
|
211
|
+
const entry = { op: o.op, target: o.target, result: o.result };
|
|
212
|
+
if (o.reason !== undefined) {
|
|
213
|
+
entry.reason = o.reason;
|
|
214
|
+
}
|
|
215
|
+
return Object.freeze(entry);
|
|
216
|
+
}),
|
|
217
|
+
);
|
|
218
|
+
|
|
219
|
+
return Object.freeze({
|
|
220
|
+
patchSha,
|
|
221
|
+
writer,
|
|
222
|
+
lamport,
|
|
223
|
+
ops: frozenOps,
|
|
224
|
+
});
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
// ============================================================================
|
|
228
|
+
// Canonical JSON Serialization
|
|
229
|
+
// ============================================================================
|
|
230
|
+
|
|
231
|
+
/**
|
|
232
|
+
* Produces a deterministic JSON string for a TickReceipt.
|
|
233
|
+
*
|
|
234
|
+
* Keys are sorted alphabetically at every nesting level, ensuring
|
|
235
|
+
* identical receipts always produce identical byte strings regardless
|
|
236
|
+
* of property insertion order.
|
|
237
|
+
*
|
|
238
|
+
* @param {TickReceipt} receipt - A TickReceipt (as returned by createTickReceipt)
|
|
239
|
+
* @returns {string} Deterministic JSON string
|
|
240
|
+
*/
|
|
241
|
+
export function canonicalJson(receipt) {
|
|
242
|
+
return JSON.stringify(receipt, sortedReplacer);
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
/**
|
|
246
|
+
* JSON.stringify replacer callback that sorts object keys alphabetically.
|
|
247
|
+
*
|
|
248
|
+
* This function is passed as the second argument to `JSON.stringify()` and
|
|
249
|
+
* is called recursively for every key-value pair in the object being serialized.
|
|
250
|
+
* For plain objects, it returns a new object with keys in sorted order, ensuring
|
|
251
|
+
* deterministic JSON output regardless of property insertion order.
|
|
252
|
+
*
|
|
253
|
+
* Arrays are passed through unchanged since their order is semantically significant.
|
|
254
|
+
* Primitive values (strings, numbers, booleans, null) are also passed through unchanged.
|
|
255
|
+
*
|
|
256
|
+
* This is essential for producing canonical JSON representations that can be
|
|
257
|
+
* compared byte-for-byte or hashed consistently.
|
|
258
|
+
*
|
|
259
|
+
* @param {string} _key - The current property key being processed (unused)
|
|
260
|
+
* @param {unknown} value - The current value being processed
|
|
261
|
+
* @returns {unknown} For objects: a new object with alphabetically sorted keys.
|
|
262
|
+
* For arrays and primitives: the value unchanged.
|
|
263
|
+
*
|
|
264
|
+
* @example
|
|
265
|
+
* // Used internally by canonicalJson
|
|
266
|
+
* JSON.stringify({ b: 1, a: 2 }, sortedReplacer);
|
|
267
|
+
* // Returns: '{"a":2,"b":1}'
|
|
268
|
+
*
|
|
269
|
+
* @example
|
|
270
|
+
* // Nested objects are also sorted
|
|
271
|
+
* JSON.stringify({ z: { b: 1, a: 2 }, y: 3 }, sortedReplacer);
|
|
272
|
+
* // Returns: '{"y":3,"z":{"a":2,"b":1}}'
|
|
273
|
+
*
|
|
274
|
+
* @private
|
|
275
|
+
*/
|
|
276
|
+
function sortedReplacer(_key, value) {
|
|
277
|
+
if (value !== null && typeof value === 'object' && !Array.isArray(value)) {
|
|
278
|
+
const sorted = {};
|
|
279
|
+
for (const k of Object.keys(value).sort()) {
|
|
280
|
+
sorted[k] = value[k];
|
|
281
|
+
}
|
|
282
|
+
return sorted;
|
|
283
|
+
}
|
|
284
|
+
return value;
|
|
285
|
+
}
|