@git-stunts/git-warp 12.2.0 → 12.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/README.md +9 -6
  2. package/bin/cli/commands/trust.js +37 -1
  3. package/bin/cli/infrastructure.js +14 -1
  4. package/bin/cli/schemas.js +4 -4
  5. package/bin/presenters/text.js +10 -3
  6. package/bin/warp-graph.js +4 -1
  7. package/index.d.ts +17 -1
  8. package/package.json +1 -1
  9. package/src/domain/WarpGraph.js +1 -1
  10. package/src/domain/crdt/Dot.js +5 -0
  11. package/src/domain/crdt/LWW.js +3 -1
  12. package/src/domain/crdt/ORSet.js +33 -23
  13. package/src/domain/crdt/VersionVector.js +12 -0
  14. package/src/domain/errors/PatchError.js +27 -0
  15. package/src/domain/errors/StorageError.js +8 -0
  16. package/src/domain/errors/WriterError.js +5 -0
  17. package/src/domain/errors/index.js +1 -0
  18. package/src/domain/services/AuditReceiptService.js +2 -1
  19. package/src/domain/services/AuditVerifierService.js +33 -2
  20. package/src/domain/services/BitmapIndexBuilder.js +14 -9
  21. package/src/domain/services/BoundaryTransitionRecord.js +1 -0
  22. package/src/domain/services/CheckpointMessageCodec.js +5 -0
  23. package/src/domain/services/CheckpointService.js +29 -2
  24. package/src/domain/services/GCPolicy.js +25 -4
  25. package/src/domain/services/GraphTraversal.js +3 -1
  26. package/src/domain/services/IncrementalIndexUpdater.js +179 -36
  27. package/src/domain/services/JoinReducer.js +311 -75
  28. package/src/domain/services/KeyCodec.js +48 -0
  29. package/src/domain/services/MaterializedViewService.js +14 -3
  30. package/src/domain/services/MessageSchemaDetector.js +35 -5
  31. package/src/domain/services/OpNormalizer.js +79 -0
  32. package/src/domain/services/PatchBuilderV2.js +240 -160
  33. package/src/domain/services/QueryBuilder.js +4 -0
  34. package/src/domain/services/SyncAuthService.js +3 -0
  35. package/src/domain/services/SyncController.js +12 -31
  36. package/src/domain/services/SyncProtocol.js +76 -32
  37. package/src/domain/services/WarpMessageCodec.js +2 -0
  38. package/src/domain/trust/TrustCrypto.js +8 -5
  39. package/src/domain/trust/TrustRecordService.js +50 -36
  40. package/src/domain/types/TickReceipt.js +6 -4
  41. package/src/domain/types/WarpTypesV2.js +77 -5
  42. package/src/domain/utils/CachedValue.js +34 -5
  43. package/src/domain/utils/EventId.js +4 -1
  44. package/src/domain/utils/LRUCache.js +3 -1
  45. package/src/domain/utils/RefLayout.js +4 -0
  46. package/src/domain/utils/canonicalStringify.js +48 -18
  47. package/src/domain/utils/defaultClock.js +1 -0
  48. package/src/domain/utils/matchGlob.js +7 -0
  49. package/src/domain/warp/PatchSession.js +30 -24
  50. package/src/domain/warp/Writer.js +12 -1
  51. package/src/domain/warp/_wiredMethods.d.ts +1 -1
  52. package/src/domain/warp/checkpoint.methods.js +36 -7
  53. package/src/domain/warp/fork.methods.js +1 -1
  54. package/src/domain/warp/materialize.methods.js +44 -5
  55. package/src/domain/warp/materializeAdvanced.methods.js +50 -10
  56. package/src/domain/warp/patch.methods.js +21 -11
  57. package/src/infrastructure/adapters/GitGraphAdapter.js +55 -52
  58. package/src/infrastructure/codecs/CborCodec.js +2 -0
  59. package/src/domain/utils/fnv1a.js +0 -20
package/README.md CHANGED
@@ -8,12 +8,13 @@
8
8
  <img src="docs/images/hero.gif" alt="git-warp CLI demo" width="600">
9
9
  </p>
10
10
 
11
- ## What's New in v12.2.0
11
+ ## What's New in v12.3.0
12
12
 
13
- - **O(N log N) topological sort** — `topologicalSort()` now uses a MinHeap ready queue instead of sorted-array merging, eliminating the O(N²) hot path for large DAGs.
14
- - **QueryBuilder batching + memoization** — property fetches are now bounded (chunks of 100) and cached per-run, reducing redundant I/O across where-clauses, result building, and aggregation.
15
- - **Fast materialization guard** — `_materializeGraph()` skips full materialization when cached state is clean, improving query/traversal latency.
16
- - **Checkpoint `visible.cbor` removed** — checkpoints no longer write the unused visible-projection blob, saving one serialize + blob write per checkpoint.
13
+ - **M13 ADR 1 canonical edge property ops** — internal model now uses honest `NodePropSet`/`EdgePropSet` semantics. Legacy raw `PropSet` is normalized at reducer entry points and lowered back at write time. No wire-format change — persisted patches remain backward-compatible.
14
+ - **Wire gate hardened** — sync boundary now explicitly rejects canonical-only op types (`NodePropSet`, `EdgePropSet`) arriving over the wire, preventing premature schema migration before ADR 2 capability cutover.
15
+ - **Reserved-byte validation** — new writes reject node IDs containing `\0` or starting with `\x01`, preventing ambiguous legacy edge-property encoding.
16
+ - **Version namespace separation** — patch schema and checkpoint schema constants are now distinct (`PATCH_SCHEMA_V2`/`V3` vs `CHECKPOINT_SCHEMA_STANDARD`/`INDEX_TREE`).
17
+ - **ADR governance** — ADR 3 readiness gates formalize when the persisted wire-format migration may proceed, with GitHub issue template and go/no-go checklist.
17
18
 
18
19
  See the [full changelog](CHANGELOG.md) for details.
19
20
 
@@ -105,7 +106,7 @@ When you want to read the graph, you **materialize** — which means replaying a
105
106
 
106
107
  Every operation gets a unique **EventId** — `(lamport, writerId, patchSha, opIndex)` — which creates a total ordering that makes merge results identical no matter which machine runs them.
107
108
 
108
- **Checkpoints** snapshot the materialized state into a single commit for fast incremental recovery. Subsequent materializations only need to replay patches created after the checkpoint.
109
+ **Checkpoints** snapshot the materialized state into a single commit for fast incremental recovery. Subsequent materializations only need to replay patches created after the checkpoint. During incremental replay, checkpoint ancestry is validated once per writer tip (not once per patch), which keeps long writer chains efficient.
109
110
 
110
111
  ## Multi-Writer Collaboration
111
112
 
@@ -474,6 +475,8 @@ const graph = await WarpGraph.open({
474
475
  });
475
476
  ```
476
477
 
478
+ When cached state is clean, local commits take an eager path that applies the patch in-memory and threads a patch diff into view rebuild (`_setMaterializedState(..., { diff })`). That allows incremental bitmap index updates on the hot write path instead of full index rebuilds.
479
+
477
480
  ## Observability
478
481
 
479
482
  ```javascript
@@ -14,6 +14,7 @@ import defaultCodec from '../../../src/domain/utils/defaultCodec.js';
14
14
  import { TrustRecordService } from '../../../src/domain/trust/TrustRecordService.js';
15
15
  import { buildState } from '../../../src/domain/trust/TrustStateBuilder.js';
16
16
  import { evaluateWriters } from '../../../src/domain/trust/TrustEvaluator.js';
17
+ import { TRUST_REASON_CODES } from '../../../src/domain/trust/reasonCodes.js';
17
18
 
18
19
  /** @typedef {import('../types.js').CliOptions} CliOptions */
19
20
 
@@ -112,8 +113,43 @@ export default async function handleTrust({ options, args }) {
112
113
  const { pin, source, sourceDetail, status } = resolveTrustPin(trustPin);
113
114
 
114
115
  // Read trust records
115
- const records = await recordService.readRecords(graphName, pin ? { tip: pin } : {});
116
+ const recordsResult = await recordService.readRecords(graphName, pin ? { tip: pin } : {});
117
+ if (!recordsResult.ok) {
118
+ const payload = {
119
+ graph: graphName,
120
+ trustSchemaVersion: 1,
121
+ mode: 'signed_evidence_v1',
122
+ trustVerdict: 'fail',
123
+ trust: {
124
+ status: 'error',
125
+ source,
126
+ sourceDetail,
127
+ evaluatedWriters: [],
128
+ untrustedWriters: [],
129
+ explanations: [
130
+ {
131
+ writerId: '*',
132
+ trusted: false,
133
+ reasonCode: TRUST_REASON_CODES.TRUST_RECORD_CHAIN_INVALID,
134
+ reason: `Trust chain read failed: ${recordsResult.error.message}`,
135
+ },
136
+ ],
137
+ evidenceSummary: {
138
+ recordsScanned: 0,
139
+ activeKeys: 0,
140
+ revokedKeys: 0,
141
+ activeBindings: 0,
142
+ revokedBindings: 0,
143
+ },
144
+ },
145
+ };
146
+ return {
147
+ payload,
148
+ exitCode: mode === 'enforce' ? EXIT_CODES.TRUST_FAIL : EXIT_CODES.OK,
149
+ };
150
+ }
116
151
 
152
+ const { records } = recordsResult;
117
153
  if (records.length === 0) {
118
154
  return buildNotConfiguredResult(graphName);
119
155
  }
@@ -166,6 +166,12 @@ const BASE_OPTIONS = {
166
166
  * Pre-processes argv to handle --view's optional-value semantics.
167
167
  * If --view is followed by a command name or flag (or is last), injects 'ascii'.
168
168
  * Validates the view mode value.
169
+ *
170
+ * When --view is passed without a value, we inject 'ascii' as the default.
171
+ * This happens before validation so the downstream parser sees a concrete
172
+ * value. The synthetic injection is intentional — parseArgs requires --view
173
+ * to have a value even though the CLI allows bare --view.
174
+ *
169
175
  * @param {string[]} argv
170
176
  * @returns {string[]}
171
177
  */
@@ -223,6 +229,7 @@ function extractBaseArgs(argv) {
223
229
  const rest = [];
224
230
  /** @type {string|undefined} */
225
231
  let command;
232
+ // Phase 1: Pre-command — scan for base flags (--repo, --json, --view, etc.)
226
233
  let pastCommand = false;
227
234
 
228
235
  for (let i = 0; i < argv.length; i++) {
@@ -266,7 +273,13 @@ function extractBaseArgs(argv) {
266
273
  continue;
267
274
  }
268
275
 
269
- if (!pastCommand && !arg.startsWith('-')) {
276
+ if (pastCommand) {
277
+ // Phase 2: Post-command — remaining args are command-specific, stop scanning
278
+ rest.push(arg);
279
+ continue;
280
+ }
281
+
282
+ if (!arg.startsWith('-')) {
270
283
  command = arg;
271
284
  pastCommand = true;
272
285
  continue;
@@ -36,7 +36,7 @@ export const pathSchema = z.object({
36
36
  to: z.string().optional(),
37
37
  dir: z.enum(['out', 'in', 'both']).optional(),
38
38
  label: z.union([z.string(), z.array(z.string())]).optional(),
39
- 'max-depth': z.coerce.number().int().nonnegative().optional(),
39
+ 'max-depth': z.coerce.number().int().nonnegative().refine(n => Number.isFinite(n), { message: 'must be a finite number' }).optional(),
40
40
  }).strict().transform((val) => ({
41
41
  from: val.from ?? null,
42
42
  to: val.to ?? null,
@@ -102,7 +102,7 @@ export const seekSchema = z.object({
102
102
  'clear-cache': z.boolean().default(false),
103
103
  'no-persistent-cache': z.boolean().default(false),
104
104
  diff: z.boolean().default(false),
105
- 'diff-limit': z.coerce.number().int({ message: '--diff-limit must be a positive integer' }).positive({ message: '--diff-limit must be a positive integer' }).default(2000),
105
+ 'diff-limit': z.coerce.number().int({ message: '--diff-limit must be a positive integer' }).positive({ message: '--diff-limit must be a positive integer' }).refine(n => Number.isFinite(n), { message: '--diff-limit must be a finite number' }).default(2000),
106
106
  }).strict().superRefine((val, ctx) => {
107
107
  // Count mutually exclusive action flags
108
108
  const actions = [
@@ -181,8 +181,8 @@ export const seekSchema = z.object({
181
181
  // ============================================================================
182
182
 
183
183
  export const verifyIndexSchema = z.object({
184
- seed: z.coerce.number().int().min(-2147483648).max(2147483647).optional(),
185
- 'sample-rate': z.coerce.number().gt(0, '--sample-rate must be greater than 0').max(1).optional().default(0.1),
184
+ seed: z.coerce.number().int().min(-2147483648).max(2147483647).refine(n => Number.isFinite(n), { message: 'must be a finite number' }).optional(),
185
+ 'sample-rate': z.coerce.number().gt(0, '--sample-rate must be greater than 0').max(1).refine(n => Number.isFinite(n), { message: 'must be a finite number' }).optional().default(0.1),
186
186
  }).strict().transform((val) => ({
187
187
  seed: val.seed,
188
188
  sampleRate: val['sample-rate'],
@@ -291,7 +291,8 @@ function formatOpSummaryPlain(summary) {
291
291
  const order = [
292
292
  ['NodeAdd', '+', 'node'],
293
293
  ['EdgeAdd', '+', 'edge'],
294
- ['PropSet', '~', 'prop'],
294
+ ['prop', '~', 'prop'], // coalesced PropSet + NodePropSet
295
+ ['EdgePropSet', '~', 'eprop'],
295
296
  ['NodeTombstone', '-', 'node'],
296
297
  ['EdgeTombstone', '-', 'edge'],
297
298
  ['BlobValue', '+', 'blob'],
@@ -299,7 +300,10 @@ function formatOpSummaryPlain(summary) {
299
300
 
300
301
  const parts = [];
301
302
  for (const [opType, symbol, label] of order) {
302
- const n = summary?.[opType];
303
+ // Coalesce PropSet + NodePropSet into one bucket
304
+ const n = opType === 'prop'
305
+ ? (summary?.PropSet || 0) + (summary?.NodePropSet || 0) || undefined
306
+ : summary?.[opType];
303
307
  if (typeof n === 'number' && Number.isFinite(n) && n > 0) {
304
308
  parts.push(`${symbol}${n}${label}`);
305
309
  }
@@ -612,9 +616,12 @@ function formatPatchOp(op) {
612
616
  if (op.type === 'EdgeTombstone') {
613
617
  return ` - edge ${op.from} -[${op.label}]-> ${op.to}`;
614
618
  }
615
- if (op.type === 'PropSet') {
619
+ if (op.type === 'PropSet' || op.type === 'NodePropSet') {
616
620
  return ` ~ ${op.node}.${op.key} = ${JSON.stringify(op.value)}`;
617
621
  }
622
+ if (op.type === 'EdgePropSet') {
623
+ return ` ~ edge(${op.from} -[${op.label}]-> ${op.to}).${op.key} = ${JSON.stringify(op.value)}`;
624
+ }
618
625
  if (op.type === 'BlobValue') {
619
626
  return ` + blob ${op.node}`;
620
627
  }
package/bin/warp-graph.js CHANGED
@@ -9,7 +9,10 @@ import { COMMANDS } from './cli/commands/registry.js';
9
9
 
10
10
  const VIEW_SUPPORTED_COMMANDS = ['info', 'check', 'history', 'path', 'materialize', 'query', 'seek'];
11
11
 
12
- // C8: Capture output format early so the error handler can use it
12
+ // Output format must be captured from raw process.argv BEFORE parseArgs() runs.
13
+ // If parseArgs() itself throws (e.g., unknown flag, malformed input), the `options`
14
+ // object will not exist, so the error handler cannot read `options.json`. By
15
+ // pre-scanning argv, the error handler can still emit structured output.
13
16
  const hasJsonFlag = process.argv.includes('--json');
14
17
  const hasNdjsonFlag = process.argv.includes('--ndjson');
15
18
 
package/index.d.ts CHANGED
@@ -1060,6 +1060,20 @@ export class SchemaUnsupportedError extends Error {
1060
1060
  });
1061
1061
  }
1062
1062
 
1063
+ /**
1064
+ * Error class for malformed or invalid patch operations.
1065
+ */
1066
+ export class PatchError extends Error {
1067
+ readonly name: 'PatchError';
1068
+ readonly code: string;
1069
+ readonly context: Record<string, unknown>;
1070
+
1071
+ constructor(message: string, options?: {
1072
+ code?: string;
1073
+ context?: Record<string, unknown>;
1074
+ });
1075
+ }
1076
+
1063
1077
  /**
1064
1078
  * Error class for sync transport operations.
1065
1079
  */
@@ -1530,6 +1544,7 @@ export interface SyncResponse {
1530
1544
  type: 'sync-response';
1531
1545
  frontier: Record<string, string>;
1532
1546
  patches: Array<{ writerId: string; sha: string; patch: unknown }>;
1547
+ skippedWriters?: Array<{ writerId: string; reason: string; localSha: string; remoteSha: string | null }>;
1533
1548
  }
1534
1549
 
1535
1550
  /**
@@ -1539,6 +1554,7 @@ export interface ApplySyncResult {
1539
1554
  state: WarpStateV5;
1540
1555
  frontier: Map<string, number>;
1541
1556
  applied: number;
1557
+ skippedWriters: Array<{ writerId: string; reason: string; localSha: string; remoteSha: string | null }>;
1542
1558
  }
1543
1559
 
1544
1560
  /**
@@ -1831,7 +1847,7 @@ export default class WarpGraph {
1831
1847
  auth?: SyncAuthClientOptions;
1832
1848
  /** Auto-materialize after sync; when true, result includes `state` */
1833
1849
  materialize?: boolean;
1834
- }): Promise<{ applied: number; attempts: number; state?: WarpStateV5 }>;
1850
+ }): Promise<{ applied: number; attempts: number; skippedWriters: Array<{ writerId: string; reason: string; localSha: string; remoteSha: string | null }>; state?: WarpStateV5 }>;
1835
1851
 
1836
1852
  /**
1837
1853
  * Creates a fork of this graph at a specific point in a writer's history.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@git-stunts/git-warp",
3
- "version": "12.2.0",
3
+ "version": "12.3.0",
4
4
  "description": "Deterministic WARP graph over Git: graph-native storage, traversal, and tooling.",
5
5
  "type": "module",
6
6
  "license": "Apache-2.0",
@@ -39,7 +39,7 @@ const DEFAULT_ADJACENCY_CACHE_SIZE = 3;
39
39
  /**
40
40
  * @typedef {Object} MaterializedGraph
41
41
  * @property {import('./services/JoinReducer.js').WarpStateV5} state
42
- * @property {string} stateHash
42
+ * @property {string|null} stateHash
43
43
  * @property {{outgoing: Map<string, Array<{neighborId: string, label: string}>>, incoming: Map<string, Array<{neighborId: string, label: string}>>}} adjacency
44
44
  * @property {import('./services/BitmapNeighborProvider.js').default} [provider]
45
45
  */
@@ -109,6 +109,11 @@ export function encodeDot(dot) {
109
109
  /**
110
110
  * Decodes an encoded dot string back to a Dot object.
111
111
  *
112
+ * Writer IDs are parsed using lastIndexOf(':') as separator. Writer IDs
113
+ * containing colons are supported because the counter (after the last colon)
114
+ * is always numeric. However, empty writer IDs or IDs ending with a colon
115
+ * may produce unexpected results.
116
+ *
112
117
  * @param {string} encoded - Format: "writerId:counter"
113
118
  * @returns {Dot}
114
119
  * @throws {Error} If format is invalid
@@ -126,7 +126,9 @@ export function lwwSet(eventId, value) {
126
126
  * @returns {LWWRegister<T> | null} Register with greater EventId, or null if both null/undefined
127
127
  */
128
128
  export function lwwMax(a, b) {
129
- // Handle null/undefined cases
129
+ // Null/undefined values are handled defensively for forward compatibility.
130
+ // Current callers always provide non-null values, but the LWW register
131
+ // contract permits null as a valid tombstone value.
130
132
  if ((a === null || a === undefined) && (b === null || b === undefined)) {
131
133
  return null;
132
134
  }
@@ -116,6 +116,9 @@ export function createORSet() {
116
116
  * @param {import('./Dot.js').Dot} dot - The dot representing this add operation
117
117
  */
118
118
  export function orsetAdd(set, element, dot) {
119
+ if (!dot || typeof dot.writerId !== 'string' || !Number.isInteger(dot.counter)) {
120
+ throw new Error(`orsetAdd: invalid dot -- expected {writerId: string, counter: integer}, got ${JSON.stringify(dot)}`);
121
+ }
119
122
  const encoded = encodeDot(dot);
120
123
 
121
124
  let dots = set.entries.get(element);
@@ -221,24 +224,26 @@ export function orsetGetDots(set, element) {
221
224
  export function orsetJoin(a, b) {
222
225
  const result = createORSet();
223
226
 
224
- // Union entries from a
227
+ // Copy entries from a into result — each dot set is shallow-copied so the
228
+ // caller cannot mutate the original through the result.
225
229
  for (const [element, dots] of a.entries) {
226
230
  result.entries.set(element, new Set(dots));
227
231
  }
228
232
 
229
- // Union entries from b
233
+ // Merge entries from b — if the element already exists (from a), add into
234
+ // the cloned set; otherwise clone b's dot set the same way for consistency.
230
235
  for (const [element, dots] of b.entries) {
231
- let resultDots = result.entries.get(element);
232
- if (!resultDots) {
233
- resultDots = new Set();
234
- result.entries.set(element, resultDots);
235
- }
236
- for (const dot of dots) {
237
- resultDots.add(dot);
236
+ const existing = result.entries.get(element);
237
+ if (existing) {
238
+ for (const dot of dots) {
239
+ existing.add(dot);
240
+ }
241
+ } else {
242
+ result.entries.set(element, new Set(dots));
238
243
  }
239
244
  }
240
245
 
241
- // Union tombstones
246
+ // Union tombstones from both sides
242
247
  for (const dot of a.tombstones) {
243
248
  result.tombstones.add(dot);
244
249
  }
@@ -343,16 +348,19 @@ export function orsetClone(set) {
343
348
  * @returns {{entries: Array<[string, string[]]>, tombstones: string[]}}
344
349
  */
345
350
  export function orsetSerialize(set) {
346
- // Serialize entries: convert Map to array of [element, sortedDots]
351
+ // Serialize entries: convert Map to array of [element, sortedDots].
352
+ // Pre-decode dots before sorting to avoid O(N log N) decodeDot calls
353
+ // during comparisons.
347
354
  /** @type {Array<[string, string[]]>} */
348
355
  const entriesArray = [];
349
356
  for (const [element, dots] of set.entries) {
350
- const sortedDots = [...dots].sort((a, b) => {
351
- const dotA = decodeDot(a);
352
- const dotB = decodeDot(b);
353
- return compareDots(dotA, dotB);
354
- });
355
- entriesArray.push([element, sortedDots]);
357
+ /** @type {Array<{encoded: string, decoded: import('./Dot.js').Dot}>} */
358
+ const pairs = [];
359
+ for (const encoded of dots) {
360
+ pairs.push({ encoded, decoded: decodeDot(encoded) });
361
+ }
362
+ pairs.sort((a, b) => compareDots(a.decoded, b.decoded));
363
+ entriesArray.push([element, pairs.map((p) => p.encoded)]);
356
364
  }
357
365
 
358
366
  // Sort entries by element (stringified for consistency)
@@ -362,12 +370,14 @@ export function orsetSerialize(set) {
362
370
  return keyA < keyB ? -1 : keyA > keyB ? 1 : 0;
363
371
  });
364
372
 
365
- // Serialize tombstones: sorted array
366
- const sortedTombstones = [...set.tombstones].sort((a, b) => {
367
- const dotA = decodeDot(a);
368
- const dotB = decodeDot(b);
369
- return compareDots(dotA, dotB);
370
- });
373
+ // Serialize tombstones: pre-decode then sort
374
+ /** @type {Array<{encoded: string, decoded: import('./Dot.js').Dot}>} */
375
+ const tombPairs = [];
376
+ for (const encoded of set.tombstones) {
377
+ tombPairs.push({ encoded, decoded: decodeDot(encoded) });
378
+ }
379
+ tombPairs.sort((a, b) => compareDots(a.decoded, b.decoded));
380
+ const sortedTombstones = tombPairs.map((p) => p.encoded);
371
381
 
372
382
  return {
373
383
  entries: entriesArray,
@@ -160,11 +160,19 @@ export function vvContains(vv, dot) {
160
160
  export function vvSerialize(vv) {
161
161
  /** @type {Record<string, number>} */
162
162
  const obj = {};
163
+ // Key sort is required for deterministic serialization. The writer count
164
+ // is typically small (<100), so the O(W log W) cost is negligible.
163
165
  const sortedKeys = [...vv.keys()].sort();
164
166
 
165
167
  for (const key of sortedKeys) {
166
168
  const val = vv.get(key);
167
169
  if (val !== undefined) {
170
+ // Invariant assertion — not input validation. Zero counters must never
171
+ // appear in a VersionVector. They carry no causal information and would
172
+ // be elided on deserialization, breaking round-trip equality.
173
+ if (val === 0) {
174
+ throw new Error(`vvSerialize: zero counter for writerId "${key}" — VersionVector must not contain zero counters`);
175
+ }
168
176
  obj[key] = val;
169
177
  }
170
178
  }
@@ -175,6 +183,10 @@ export function vvSerialize(vv) {
175
183
  /**
176
184
  * Deserializes a plain object back to a VersionVector.
177
185
  *
186
+ * Zero counters are elided during deserialization. This is intentional — a
187
+ * counter of 0 carries no causal information and wastes space. Serialization
188
+ * must never emit zero counters.
189
+ *
178
190
  * @param {Object<string, number>} obj
179
191
  * @returns {VersionVector}
180
192
  * @throws {Error} If any counter value is not a non-negative integer
@@ -0,0 +1,27 @@
1
+ import WarpError from './WarpError.js';
2
+
3
+ /**
4
+ * Error class for malformed or invalid patch operations.
5
+ *
6
+ * ## Error Codes
7
+ *
8
+ * | Code | Description |
9
+ * |------|-------------|
10
+ * | `E_PATCH_MALFORMED` | Operation is missing required fields or has invalid types |
11
+ *
12
+ * @class PatchError
13
+ * @extends WarpError
14
+ *
15
+ * @property {string} name - Always 'PatchError' for instanceof checks
16
+ * @property {string} code - Machine-readable error code for programmatic handling
17
+ * @property {Object} context - Serializable context object with error details
18
+ */
19
+ export default class PatchError extends WarpError {
20
+ /**
21
+ * @param {string} message
22
+ * @param {{ code?: string, context?: Record<string, unknown> }} [options={}]
23
+ */
24
+ constructor(message, options = {}) {
25
+ super(message, 'E_PATCH_MALFORMED', options);
26
+ }
27
+ }
@@ -3,6 +3,10 @@ import IndexError from './IndexError.js';
3
3
  /**
4
4
  * Error thrown when a storage operation fails.
5
5
  *
6
+ * StorageError extends IndexError because storage errors originate from
7
+ * index operations. This hierarchy is intentional — IndexError provides
8
+ * the storage-specific error context.
9
+ *
6
10
  * This error indicates that a read or write operation to storage failed,
7
11
  * typically due to I/O errors, permission issues, or storage unavailability.
8
12
  *
@@ -31,6 +35,10 @@ export default class StorageError extends IndexError {
31
35
  /**
32
36
  * Creates a new StorageError.
33
37
  *
38
+ * Context is merged via Object.assign — duplicate keys from the second
39
+ * argument overwrite the first. Callers should ensure context keys don't
40
+ * collide, or use unique prefixes.
41
+ *
34
42
  * @param {string} message - Human-readable error message
35
43
  * @param {{ operation?: string, oid?: string, cause?: Error, context?: Record<string, unknown> }} [options={}] - Error options
36
44
  */
@@ -26,6 +26,11 @@ import WarpError from './WarpError.js';
26
26
  */
27
27
  export default class WriterError extends WarpError {
28
28
  /**
29
+ * Note: constructor parameter order differs from other WarpError subclasses
30
+ * (code, message vs message, code). This is intentional to match the most
31
+ * common call sites in PatchSession and PatchBuilderV2 where the error code
32
+ * is the primary discriminator.
33
+ *
29
34
  * @param {string} code - Error code
30
35
  * @param {string} message - Human-readable error message
31
36
  * @param {Error} [cause] - Original error that caused this error
@@ -9,6 +9,7 @@ export { default as WarpError } from './WarpError.js';
9
9
  export { default as ForkError } from './ForkError.js';
10
10
  export { default as IndexError } from './IndexError.js';
11
11
  export { default as OperationAbortedError } from './OperationAbortedError.js';
12
+ export { default as PatchError } from './PatchError.js';
12
13
  export { default as QueryError } from './QueryError.js';
13
14
  export { default as SyncError } from './SyncError.js';
14
15
  export { default as ShardCorruptionError } from './ShardCorruptionError.js';
@@ -339,7 +339,8 @@ export class AuditReceiptService {
339
339
  // Compute opsDigest
340
340
  const opsDigest = await computeOpsDigest(ops, this._crypto);
341
341
 
342
- // Timestamp
342
+ // Wall-clock timestamp for audit receipt (not a perf timer)
343
+ // eslint-disable-next-line no-restricted-syntax
343
344
  const timestamp = Date.now();
344
345
 
345
346
  // Determine prevAuditCommit
@@ -34,6 +34,7 @@ import { decodeAuditMessage } from './AuditMessageCodec.js';
34
34
  import { TrustRecordService } from '../trust/TrustRecordService.js';
35
35
  import { buildState } from '../trust/TrustStateBuilder.js';
36
36
  import { evaluateWriters } from '../trust/TrustEvaluator.js';
37
+ import { TRUST_REASON_CODES } from '../trust/reasonCodes.js';
37
38
 
38
39
  // ============================================================================
39
40
  // Constants
@@ -256,6 +257,7 @@ export class AuditVerifierService {
256
257
 
257
258
  return {
258
259
  graph: graphName,
260
+ // eslint-disable-next-line no-restricted-syntax -- wall-clock timestamp for audit report
259
261
  verifiedAt: new Date().toISOString(),
260
262
  summary: { total: chains.length, valid, partial, invalid },
261
263
  chains,
@@ -667,7 +669,37 @@ export class AuditVerifierService {
667
669
  codec: this._codec,
668
670
  });
669
671
 
670
- const records = await recordService.readRecords(graphName, options.pin ? { tip: options.pin } : {});
672
+ const recordsResult = await recordService.readRecords(graphName, options.pin ? { tip: options.pin } : {});
673
+ if (!recordsResult.ok) {
674
+ return {
675
+ trustSchemaVersion: 1,
676
+ mode: 'signed_evidence_v1',
677
+ trustVerdict: 'fail',
678
+ trust: {
679
+ status: 'error',
680
+ source: options.pin ? 'pinned' : 'ref',
681
+ sourceDetail: options.pin ?? null,
682
+ evaluatedWriters: [],
683
+ untrustedWriters: [],
684
+ explanations: [
685
+ {
686
+ writerId: '*',
687
+ trusted: false,
688
+ reasonCode: TRUST_REASON_CODES.TRUST_RECORD_CHAIN_INVALID,
689
+ reason: `Trust chain read failed: ${recordsResult.error.message}`,
690
+ },
691
+ ],
692
+ evidenceSummary: {
693
+ recordsScanned: 0,
694
+ activeKeys: 0,
695
+ revokedKeys: 0,
696
+ activeBindings: 0,
697
+ revokedBindings: 0,
698
+ },
699
+ },
700
+ };
701
+ }
702
+ const { records } = recordsResult;
671
703
 
672
704
  if (records.length === 0) {
673
705
  return {
@@ -704,4 +736,3 @@ export class AuditVerifierService {
704
736
  return evaluateWriters(writerIds, trustState, policy);
705
737
  }
706
738
  }
707
-
@@ -22,12 +22,20 @@ const computeChecksum = async (data, crypto) => {
22
22
  };
23
23
 
24
24
  /** @type {boolean|null} Whether native Roaring bindings are available (null = unknown until first use) */
25
- export let NATIVE_ROARING_AVAILABLE = null;
25
+ let _nativeRoaringAvailable = null;
26
+
27
+ /**
28
+ * Resets native Roaring availability detection (test-only utility).
29
+ * @returns {void}
30
+ */
31
+ export function resetNativeRoaringFlag() {
32
+ _nativeRoaringAvailable = null;
33
+ }
26
34
 
27
35
  const ensureRoaringBitmap32 = () => {
28
36
  const RoaringBitmap32 = getRoaringBitmap32();
29
- if (NATIVE_ROARING_AVAILABLE === null) {
30
- NATIVE_ROARING_AVAILABLE = getNativeRoaringAvailable();
37
+ if (_nativeRoaringAvailable === null) {
38
+ _nativeRoaringAvailable = getNativeRoaringAvailable();
31
39
  }
32
40
  return RoaringBitmap32;
33
41
  };
@@ -71,14 +79,11 @@ function serializeFrontierToTree(frontier, tree, codec) {
71
79
  * BlobPort + TreePort + RefPort from the persistence layer.
72
80
  *
73
81
  * **Performance Note**: Uses Roaring Bitmaps for compression. Native bindings
74
- * provide best performance. Check `NATIVE_ROARING_AVAILABLE` export if
75
- * performance is critical.
82
+ * provide best performance. Use `getNativeRoaringAvailable()` from
83
+ * `src/domain/utils/roaring.js` if runtime capability checks are needed.
76
84
  *
77
85
  * @example
78
- * import BitmapIndexBuilder, { NATIVE_ROARING_AVAILABLE } from './BitmapIndexBuilder.js';
79
- * if (NATIVE_ROARING_AVAILABLE === false) {
80
- * console.warn('Consider installing native Roaring bindings for better performance');
81
- * }
86
+ * import BitmapIndexBuilder from './BitmapIndexBuilder.js';
82
87
  * const builder = new BitmapIndexBuilder();
83
88
  */
84
89
  export default class BitmapIndexBuilder {
@@ -164,6 +164,7 @@ export async function createBTR(initialState, payload, options) {
164
164
  throw new TypeError('payload must be a ProvenancePayload');
165
165
  }
166
166
 
167
+ // eslint-disable-next-line no-restricted-syntax -- wall-clock default for BTR timestamp
167
168
  const { key, timestamp = new Date().toISOString(), crypto, codec } = options;
168
169
 
169
170
  // Validate HMAC key is not empty/falsy
@@ -5,6 +5,11 @@
5
5
  * materialized graph state. See {@link module:domain/services/WarpMessageCodec}
6
6
  * for the facade that re-exports all codec functions.
7
7
  *
8
+ * **Schema namespace note:** Checkpoint schema versions (2, 3, 4) are
9
+ * distinct from patch schema versions (PATCH_SCHEMA_V2, PATCH_SCHEMA_V3).
10
+ * See {@link module:domain/services/CheckpointService} for named constants
11
+ * `CHECKPOINT_SCHEMA_STANDARD` and `CHECKPOINT_SCHEMA_INDEX_TREE`.
12
+ *
8
13
  * @module domain/services/CheckpointMessageCodec
9
14
  */
10
15