@git-stunts/git-warp 12.2.0 → 12.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/README.md +8 -6
  2. package/bin/cli/commands/trust.js +37 -1
  3. package/bin/cli/infrastructure.js +14 -1
  4. package/bin/cli/schemas.js +4 -4
  5. package/bin/warp-graph.js +4 -1
  6. package/index.d.ts +17 -1
  7. package/package.json +1 -1
  8. package/src/domain/WarpGraph.js +1 -1
  9. package/src/domain/crdt/Dot.js +5 -0
  10. package/src/domain/crdt/LWW.js +3 -1
  11. package/src/domain/crdt/ORSet.js +30 -23
  12. package/src/domain/crdt/VersionVector.js +12 -0
  13. package/src/domain/errors/PatchError.js +27 -0
  14. package/src/domain/errors/StorageError.js +8 -0
  15. package/src/domain/errors/WriterError.js +5 -0
  16. package/src/domain/errors/index.js +1 -0
  17. package/src/domain/services/AuditVerifierService.js +32 -2
  18. package/src/domain/services/BitmapIndexBuilder.js +14 -9
  19. package/src/domain/services/CheckpointService.js +10 -1
  20. package/src/domain/services/GCPolicy.js +25 -4
  21. package/src/domain/services/GraphTraversal.js +3 -1
  22. package/src/domain/services/IncrementalIndexUpdater.js +179 -36
  23. package/src/domain/services/JoinReducer.js +141 -31
  24. package/src/domain/services/MaterializedViewService.js +13 -2
  25. package/src/domain/services/PatchBuilderV2.js +181 -142
  26. package/src/domain/services/QueryBuilder.js +4 -0
  27. package/src/domain/services/SyncController.js +12 -31
  28. package/src/domain/services/SyncProtocol.js +75 -32
  29. package/src/domain/trust/TrustRecordService.js +50 -36
  30. package/src/domain/utils/CachedValue.js +34 -5
  31. package/src/domain/utils/EventId.js +4 -1
  32. package/src/domain/utils/LRUCache.js +3 -1
  33. package/src/domain/utils/RefLayout.js +4 -0
  34. package/src/domain/utils/canonicalStringify.js +48 -18
  35. package/src/domain/utils/matchGlob.js +7 -0
  36. package/src/domain/warp/PatchSession.js +30 -24
  37. package/src/domain/warp/Writer.js +5 -0
  38. package/src/domain/warp/_wiredMethods.d.ts +1 -1
  39. package/src/domain/warp/checkpoint.methods.js +36 -7
  40. package/src/domain/warp/materialize.methods.js +44 -5
  41. package/src/domain/warp/materializeAdvanced.methods.js +50 -10
  42. package/src/domain/warp/patch.methods.js +19 -11
  43. package/src/infrastructure/adapters/GitGraphAdapter.js +55 -52
  44. package/src/infrastructure/codecs/CborCodec.js +2 -0
  45. package/src/domain/utils/fnv1a.js +0 -20
package/README.md CHANGED
@@ -8,12 +8,12 @@
8
8
  <img src="docs/images/hero.gif" alt="git-warp CLI demo" width="600">
9
9
  </p>
10
10
 
11
- ## What's New in v12.2.0
11
+ ## What's New in v12.2.1
12
12
 
13
- - **O(N log N) topological sort** — `topologicalSort()` now uses a MinHeap ready queue instead of sorted-array merging, eliminating the O(N²) hot path for large DAGs.
14
- - **QueryBuilder batching + memoization** — property fetches are now bounded (chunks of 100) and cached per-run, reducing redundant I/O across where-clauses, result building, and aggregation.
15
- - **Fast materialization guard** — `_materializeGraph()` skips full materialization when cached state is clean, improving query/traversal latency.
16
- - **Checkpoint `visible.cbor` removed** — checkpoints no longer write the unused visible-projection blob, saving one serialize + blob write per checkpoint.
13
+ - **M12 SCALPEL complete** — 42-item STANK audit fully resolved: 15 bug fixes, 20+ JSDoc/documentation improvements, and 6 refactors across CRDT core, services, sync, and CLI.
14
+ - **Sync correctness hardened** — `join()` state install, `applySyncResponse` cache coherence, unknown-op rejection (fail-closed), and divergence pre-check all fixed.
15
+ - **Incremental index improvements** — stale label ID collision fix, re-add edge restoration via adjacency cache, and bitmap churn reduction for node removal.
16
+ - **canonicalStringify shared-reference fix** — cycle detection now correctly allows valid DAG structures (shared non-circular references).
17
17
 
18
18
  See the [full changelog](CHANGELOG.md) for details.
19
19
 
@@ -105,7 +105,7 @@ When you want to read the graph, you **materialize** — which means replaying a
105
105
 
106
106
  Every operation gets a unique **EventId** — `(lamport, writerId, patchSha, opIndex)` — which creates a total ordering that makes merge results identical no matter which machine runs them.
107
107
 
108
- **Checkpoints** snapshot the materialized state into a single commit for fast incremental recovery. Subsequent materializations only need to replay patches created after the checkpoint.
108
+ **Checkpoints** snapshot the materialized state into a single commit for fast incremental recovery. Subsequent materializations only need to replay patches created after the checkpoint. During incremental replay, checkpoint ancestry is validated once per writer tip (not once per patch), which keeps long writer chains efficient.
109
109
 
110
110
  ## Multi-Writer Collaboration
111
111
 
@@ -474,6 +474,8 @@ const graph = await WarpGraph.open({
474
474
  });
475
475
  ```
476
476
 
477
+ When cached state is clean, local commits take an eager path that applies the patch in-memory and threads a patch diff into view rebuild (`_setMaterializedState(..., { diff })`). That allows incremental bitmap index updates on the hot write path instead of full index rebuilds.
478
+
477
479
  ## Observability
478
480
 
479
481
  ```javascript
@@ -14,6 +14,7 @@ import defaultCodec from '../../../src/domain/utils/defaultCodec.js';
14
14
  import { TrustRecordService } from '../../../src/domain/trust/TrustRecordService.js';
15
15
  import { buildState } from '../../../src/domain/trust/TrustStateBuilder.js';
16
16
  import { evaluateWriters } from '../../../src/domain/trust/TrustEvaluator.js';
17
+ import { TRUST_REASON_CODES } from '../../../src/domain/trust/reasonCodes.js';
17
18
 
18
19
  /** @typedef {import('../types.js').CliOptions} CliOptions */
19
20
 
@@ -112,8 +113,43 @@ export default async function handleTrust({ options, args }) {
112
113
  const { pin, source, sourceDetail, status } = resolveTrustPin(trustPin);
113
114
 
114
115
  // Read trust records
115
- const records = await recordService.readRecords(graphName, pin ? { tip: pin } : {});
116
+ const recordsResult = await recordService.readRecords(graphName, pin ? { tip: pin } : {});
117
+ if (!recordsResult.ok) {
118
+ const payload = {
119
+ graph: graphName,
120
+ trustSchemaVersion: 1,
121
+ mode: 'signed_evidence_v1',
122
+ trustVerdict: 'fail',
123
+ trust: {
124
+ status: 'error',
125
+ source,
126
+ sourceDetail,
127
+ evaluatedWriters: [],
128
+ untrustedWriters: [],
129
+ explanations: [
130
+ {
131
+ writerId: '*',
132
+ trusted: false,
133
+ reasonCode: TRUST_REASON_CODES.TRUST_RECORD_CHAIN_INVALID,
134
+ reason: `Trust chain read failed: ${recordsResult.error.message}`,
135
+ },
136
+ ],
137
+ evidenceSummary: {
138
+ recordsScanned: 0,
139
+ activeKeys: 0,
140
+ revokedKeys: 0,
141
+ activeBindings: 0,
142
+ revokedBindings: 0,
143
+ },
144
+ },
145
+ };
146
+ return {
147
+ payload,
148
+ exitCode: mode === 'enforce' ? EXIT_CODES.TRUST_FAIL : EXIT_CODES.OK,
149
+ };
150
+ }
116
151
 
152
+ const { records } = recordsResult;
117
153
  if (records.length === 0) {
118
154
  return buildNotConfiguredResult(graphName);
119
155
  }
@@ -166,6 +166,12 @@ const BASE_OPTIONS = {
166
166
  * Pre-processes argv to handle --view's optional-value semantics.
167
167
  * If --view is followed by a command name or flag (or is last), injects 'ascii'.
168
168
  * Validates the view mode value.
169
+ *
170
+ * When --view is passed without a value, we inject 'ascii' as the default.
171
+ * This happens before validation so the downstream parser sees a concrete
172
+ * value. The synthetic injection is intentional — parseArgs requires --view
173
+ * to have a value even though the CLI allows bare --view.
174
+ *
169
175
  * @param {string[]} argv
170
176
  * @returns {string[]}
171
177
  */
@@ -223,6 +229,7 @@ function extractBaseArgs(argv) {
223
229
  const rest = [];
224
230
  /** @type {string|undefined} */
225
231
  let command;
232
+ // Phase 1: Pre-command — scan for base flags (--repo, --json, --view, etc.)
226
233
  let pastCommand = false;
227
234
 
228
235
  for (let i = 0; i < argv.length; i++) {
@@ -266,7 +273,13 @@ function extractBaseArgs(argv) {
266
273
  continue;
267
274
  }
268
275
 
269
- if (!pastCommand && !arg.startsWith('-')) {
276
+ if (pastCommand) {
277
+ // Phase 2: Post-command — remaining args are command-specific, stop scanning
278
+ rest.push(arg);
279
+ continue;
280
+ }
281
+
282
+ if (!arg.startsWith('-')) {
270
283
  command = arg;
271
284
  pastCommand = true;
272
285
  continue;
@@ -36,7 +36,7 @@ export const pathSchema = z.object({
36
36
  to: z.string().optional(),
37
37
  dir: z.enum(['out', 'in', 'both']).optional(),
38
38
  label: z.union([z.string(), z.array(z.string())]).optional(),
39
- 'max-depth': z.coerce.number().int().nonnegative().optional(),
39
+ 'max-depth': z.coerce.number().int().nonnegative().refine(n => Number.isFinite(n), { message: 'must be a finite number' }).optional(),
40
40
  }).strict().transform((val) => ({
41
41
  from: val.from ?? null,
42
42
  to: val.to ?? null,
@@ -102,7 +102,7 @@ export const seekSchema = z.object({
102
102
  'clear-cache': z.boolean().default(false),
103
103
  'no-persistent-cache': z.boolean().default(false),
104
104
  diff: z.boolean().default(false),
105
- 'diff-limit': z.coerce.number().int({ message: '--diff-limit must be a positive integer' }).positive({ message: '--diff-limit must be a positive integer' }).default(2000),
105
+ 'diff-limit': z.coerce.number().int({ message: '--diff-limit must be a positive integer' }).positive({ message: '--diff-limit must be a positive integer' }).refine(n => Number.isFinite(n), { message: '--diff-limit must be a finite number' }).default(2000),
106
106
  }).strict().superRefine((val, ctx) => {
107
107
  // Count mutually exclusive action flags
108
108
  const actions = [
@@ -181,8 +181,8 @@ export const seekSchema = z.object({
181
181
  // ============================================================================
182
182
 
183
183
  export const verifyIndexSchema = z.object({
184
- seed: z.coerce.number().int().min(-2147483648).max(2147483647).optional(),
185
- 'sample-rate': z.coerce.number().gt(0, '--sample-rate must be greater than 0').max(1).optional().default(0.1),
184
+ seed: z.coerce.number().int().min(-2147483648).max(2147483647).refine(n => Number.isFinite(n), { message: 'must be a finite number' }).optional(),
185
+ 'sample-rate': z.coerce.number().gt(0, '--sample-rate must be greater than 0').max(1).refine(n => Number.isFinite(n), { message: 'must be a finite number' }).optional().default(0.1),
186
186
  }).strict().transform((val) => ({
187
187
  seed: val.seed,
188
188
  sampleRate: val['sample-rate'],
package/bin/warp-graph.js CHANGED
@@ -9,7 +9,10 @@ import { COMMANDS } from './cli/commands/registry.js';
9
9
 
10
10
  const VIEW_SUPPORTED_COMMANDS = ['info', 'check', 'history', 'path', 'materialize', 'query', 'seek'];
11
11
 
12
- // C8: Capture output format early so the error handler can use it
12
+ // Output format must be captured from raw process.argv BEFORE parseArgs() runs.
13
+ // If parseArgs() itself throws (e.g., unknown flag, malformed input), the `options`
14
+ // object will not exist, so the error handler cannot read `options.json`. By
15
+ // pre-scanning argv, the error handler can still emit structured output.
13
16
  const hasJsonFlag = process.argv.includes('--json');
14
17
  const hasNdjsonFlag = process.argv.includes('--ndjson');
15
18
 
package/index.d.ts CHANGED
@@ -1060,6 +1060,20 @@ export class SchemaUnsupportedError extends Error {
1060
1060
  });
1061
1061
  }
1062
1062
 
1063
+ /**
1064
+ * Error class for malformed or invalid patch operations.
1065
+ */
1066
+ export class PatchError extends Error {
1067
+ readonly name: 'PatchError';
1068
+ readonly code: string;
1069
+ readonly context: Record<string, unknown>;
1070
+
1071
+ constructor(message: string, options?: {
1072
+ code?: string;
1073
+ context?: Record<string, unknown>;
1074
+ });
1075
+ }
1076
+
1063
1077
  /**
1064
1078
  * Error class for sync transport operations.
1065
1079
  */
@@ -1530,6 +1544,7 @@ export interface SyncResponse {
1530
1544
  type: 'sync-response';
1531
1545
  frontier: Record<string, string>;
1532
1546
  patches: Array<{ writerId: string; sha: string; patch: unknown }>;
1547
+ skippedWriters?: Array<{ writerId: string; reason: string; localSha: string; remoteSha: string | null }>;
1533
1548
  }
1534
1549
 
1535
1550
  /**
@@ -1539,6 +1554,7 @@ export interface ApplySyncResult {
1539
1554
  state: WarpStateV5;
1540
1555
  frontier: Map<string, number>;
1541
1556
  applied: number;
1557
+ skippedWriters: Array<{ writerId: string; reason: string; localSha: string; remoteSha: string | null }>;
1542
1558
  }
1543
1559
 
1544
1560
  /**
@@ -1831,7 +1847,7 @@ export default class WarpGraph {
1831
1847
  auth?: SyncAuthClientOptions;
1832
1848
  /** Auto-materialize after sync; when true, result includes `state` */
1833
1849
  materialize?: boolean;
1834
- }): Promise<{ applied: number; attempts: number; state?: WarpStateV5 }>;
1850
+ }): Promise<{ applied: number; attempts: number; skippedWriters: Array<{ writerId: string; reason: string; localSha: string; remoteSha: string | null }>; state?: WarpStateV5 }>;
1835
1851
 
1836
1852
  /**
1837
1853
  * Creates a fork of this graph at a specific point in a writer's history.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@git-stunts/git-warp",
3
- "version": "12.2.0",
3
+ "version": "12.2.1",
4
4
  "description": "Deterministic WARP graph over Git: graph-native storage, traversal, and tooling.",
5
5
  "type": "module",
6
6
  "license": "Apache-2.0",
@@ -39,7 +39,7 @@ const DEFAULT_ADJACENCY_CACHE_SIZE = 3;
39
39
  /**
40
40
  * @typedef {Object} MaterializedGraph
41
41
  * @property {import('./services/JoinReducer.js').WarpStateV5} state
42
- * @property {string} stateHash
42
+ * @property {string|null} stateHash
43
43
  * @property {{outgoing: Map<string, Array<{neighborId: string, label: string}>>, incoming: Map<string, Array<{neighborId: string, label: string}>>}} adjacency
44
44
  * @property {import('./services/BitmapNeighborProvider.js').default} [provider]
45
45
  */
@@ -109,6 +109,11 @@ export function encodeDot(dot) {
109
109
  /**
110
110
  * Decodes an encoded dot string back to a Dot object.
111
111
  *
112
+ * Writer IDs are parsed using lastIndexOf(':') as separator. Writer IDs
113
+ * containing colons are supported because the counter (after the last colon)
114
+ * is always numeric. However, empty writer IDs or IDs ending with a colon
115
+ * may produce unexpected results.
116
+ *
112
117
  * @param {string} encoded - Format: "writerId:counter"
113
118
  * @returns {Dot}
114
119
  * @throws {Error} If format is invalid
@@ -126,7 +126,9 @@ export function lwwSet(eventId, value) {
126
126
  * @returns {LWWRegister<T> | null} Register with greater EventId, or null if both null/undefined
127
127
  */
128
128
  export function lwwMax(a, b) {
129
- // Handle null/undefined cases
129
+ // Null/undefined values are handled defensively for forward compatibility.
130
+ // Current callers always provide non-null values, but the LWW register
131
+ // contract permits null as a valid tombstone value.
130
132
  if ((a === null || a === undefined) && (b === null || b === undefined)) {
131
133
  return null;
132
134
  }
@@ -221,24 +221,26 @@ export function orsetGetDots(set, element) {
221
221
  export function orsetJoin(a, b) {
222
222
  const result = createORSet();
223
223
 
224
- // Union entries from a
224
+ // Copy entries from a into result — each dot set is shallow-copied so the
225
+ // caller cannot mutate the original through the result.
225
226
  for (const [element, dots] of a.entries) {
226
227
  result.entries.set(element, new Set(dots));
227
228
  }
228
229
 
229
- // Union entries from b
230
+ // Merge entries from b — if the element already exists (from a), add into
231
+ // the cloned set; otherwise clone b's dot set the same way for consistency.
230
232
  for (const [element, dots] of b.entries) {
231
- let resultDots = result.entries.get(element);
232
- if (!resultDots) {
233
- resultDots = new Set();
234
- result.entries.set(element, resultDots);
235
- }
236
- for (const dot of dots) {
237
- resultDots.add(dot);
233
+ const existing = result.entries.get(element);
234
+ if (existing) {
235
+ for (const dot of dots) {
236
+ existing.add(dot);
237
+ }
238
+ } else {
239
+ result.entries.set(element, new Set(dots));
238
240
  }
239
241
  }
240
242
 
241
- // Union tombstones
243
+ // Union tombstones from both sides
242
244
  for (const dot of a.tombstones) {
243
245
  result.tombstones.add(dot);
244
246
  }
@@ -343,16 +345,19 @@ export function orsetClone(set) {
343
345
  * @returns {{entries: Array<[string, string[]]>, tombstones: string[]}}
344
346
  */
345
347
  export function orsetSerialize(set) {
346
- // Serialize entries: convert Map to array of [element, sortedDots]
348
+ // Serialize entries: convert Map to array of [element, sortedDots].
349
+ // Pre-decode dots before sorting to avoid O(N log N) decodeDot calls
350
+ // during comparisons.
347
351
  /** @type {Array<[string, string[]]>} */
348
352
  const entriesArray = [];
349
353
  for (const [element, dots] of set.entries) {
350
- const sortedDots = [...dots].sort((a, b) => {
351
- const dotA = decodeDot(a);
352
- const dotB = decodeDot(b);
353
- return compareDots(dotA, dotB);
354
- });
355
- entriesArray.push([element, sortedDots]);
354
+ /** @type {Array<{encoded: string, decoded: import('./Dot.js').Dot}>} */
355
+ const pairs = [];
356
+ for (const encoded of dots) {
357
+ pairs.push({ encoded, decoded: decodeDot(encoded) });
358
+ }
359
+ pairs.sort((a, b) => compareDots(a.decoded, b.decoded));
360
+ entriesArray.push([element, pairs.map((p) => p.encoded)]);
356
361
  }
357
362
 
358
363
  // Sort entries by element (stringified for consistency)
@@ -362,12 +367,14 @@ export function orsetSerialize(set) {
362
367
  return keyA < keyB ? -1 : keyA > keyB ? 1 : 0;
363
368
  });
364
369
 
365
- // Serialize tombstones: sorted array
366
- const sortedTombstones = [...set.tombstones].sort((a, b) => {
367
- const dotA = decodeDot(a);
368
- const dotB = decodeDot(b);
369
- return compareDots(dotA, dotB);
370
- });
370
+ // Serialize tombstones: pre-decode then sort
371
+ /** @type {Array<{encoded: string, decoded: import('./Dot.js').Dot}>} */
372
+ const tombPairs = [];
373
+ for (const encoded of set.tombstones) {
374
+ tombPairs.push({ encoded, decoded: decodeDot(encoded) });
375
+ }
376
+ tombPairs.sort((a, b) => compareDots(a.decoded, b.decoded));
377
+ const sortedTombstones = tombPairs.map((p) => p.encoded);
371
378
 
372
379
  return {
373
380
  entries: entriesArray,
@@ -160,11 +160,19 @@ export function vvContains(vv, dot) {
160
160
  export function vvSerialize(vv) {
161
161
  /** @type {Record<string, number>} */
162
162
  const obj = {};
163
+ // Key sort is required for deterministic serialization. The writer count
164
+ // is typically small (<100), so the O(W log W) cost is negligible.
163
165
  const sortedKeys = [...vv.keys()].sort();
164
166
 
165
167
  for (const key of sortedKeys) {
166
168
  const val = vv.get(key);
167
169
  if (val !== undefined) {
170
+ // Invariant assertion — not input validation. Zero counters must never
171
+ // appear in a VersionVector. They carry no causal information and would
172
+ // be elided on deserialization, breaking round-trip equality.
173
+ if (val === 0) {
174
+ throw new Error(`vvSerialize: zero counter for writerId "${key}" — VersionVector must not contain zero counters`);
175
+ }
168
176
  obj[key] = val;
169
177
  }
170
178
  }
@@ -175,6 +183,10 @@ export function vvSerialize(vv) {
175
183
  /**
176
184
  * Deserializes a plain object back to a VersionVector.
177
185
  *
186
+ * Zero counters are elided during deserialization. This is intentional — a
187
+ * counter of 0 carries no causal information and wastes space. Serialization
188
+ * must never emit zero counters.
189
+ *
178
190
  * @param {Object<string, number>} obj
179
191
  * @returns {VersionVector}
180
192
  * @throws {Error} If any counter value is not a non-negative integer
@@ -0,0 +1,27 @@
1
+ import WarpError from './WarpError.js';
2
+
3
+ /**
4
+ * Error class for malformed or invalid patch operations.
5
+ *
6
+ * ## Error Codes
7
+ *
8
+ * | Code | Description |
9
+ * |------|-------------|
10
+ * | `E_PATCH_MALFORMED` | Operation is missing required fields or has invalid types |
11
+ *
12
+ * @class PatchError
13
+ * @extends WarpError
14
+ *
15
+ * @property {string} name - Always 'PatchError' for instanceof checks
16
+ * @property {string} code - Machine-readable error code for programmatic handling
17
+ * @property {Object} context - Serializable context object with error details
18
+ */
19
+ export default class PatchError extends WarpError {
20
+ /**
21
+ * @param {string} message
22
+ * @param {{ code?: string, context?: Record<string, unknown> }} [options={}]
23
+ */
24
+ constructor(message, options = {}) {
25
+ super(message, 'E_PATCH_MALFORMED', options);
26
+ }
27
+ }
@@ -3,6 +3,10 @@ import IndexError from './IndexError.js';
3
3
  /**
4
4
  * Error thrown when a storage operation fails.
5
5
  *
6
+ * StorageError extends IndexError because storage errors originate from
7
+ * index operations. This hierarchy is intentional — IndexError provides
8
+ * the storage-specific error context.
9
+ *
6
10
  * This error indicates that a read or write operation to storage failed,
7
11
  * typically due to I/O errors, permission issues, or storage unavailability.
8
12
  *
@@ -31,6 +35,10 @@ export default class StorageError extends IndexError {
31
35
  /**
32
36
  * Creates a new StorageError.
33
37
  *
38
+ * Context is merged via Object.assign — duplicate keys from the second
39
+ * argument overwrite the first. Callers should ensure context keys don't
40
+ * collide, or use unique prefixes.
41
+ *
34
42
  * @param {string} message - Human-readable error message
35
43
  * @param {{ operation?: string, oid?: string, cause?: Error, context?: Record<string, unknown> }} [options={}] - Error options
36
44
  */
@@ -26,6 +26,11 @@ import WarpError from './WarpError.js';
26
26
  */
27
27
  export default class WriterError extends WarpError {
28
28
  /**
29
+ * Note: constructor parameter order differs from other WarpError subclasses
30
+ * (code, message vs message, code). This is intentional to match the most
31
+ * common call sites in PatchSession and PatchBuilderV2 where the error code
32
+ * is the primary discriminator.
33
+ *
29
34
  * @param {string} code - Error code
30
35
  * @param {string} message - Human-readable error message
31
36
  * @param {Error} [cause] - Original error that caused this error
@@ -9,6 +9,7 @@ export { default as WarpError } from './WarpError.js';
9
9
  export { default as ForkError } from './ForkError.js';
10
10
  export { default as IndexError } from './IndexError.js';
11
11
  export { default as OperationAbortedError } from './OperationAbortedError.js';
12
+ export { default as PatchError } from './PatchError.js';
12
13
  export { default as QueryError } from './QueryError.js';
13
14
  export { default as SyncError } from './SyncError.js';
14
15
  export { default as ShardCorruptionError } from './ShardCorruptionError.js';
@@ -34,6 +34,7 @@ import { decodeAuditMessage } from './AuditMessageCodec.js';
34
34
  import { TrustRecordService } from '../trust/TrustRecordService.js';
35
35
  import { buildState } from '../trust/TrustStateBuilder.js';
36
36
  import { evaluateWriters } from '../trust/TrustEvaluator.js';
37
+ import { TRUST_REASON_CODES } from '../trust/reasonCodes.js';
37
38
 
38
39
  // ============================================================================
39
40
  // Constants
@@ -667,7 +668,37 @@ export class AuditVerifierService {
667
668
  codec: this._codec,
668
669
  });
669
670
 
670
- const records = await recordService.readRecords(graphName, options.pin ? { tip: options.pin } : {});
671
+ const recordsResult = await recordService.readRecords(graphName, options.pin ? { tip: options.pin } : {});
672
+ if (!recordsResult.ok) {
673
+ return {
674
+ trustSchemaVersion: 1,
675
+ mode: 'signed_evidence_v1',
676
+ trustVerdict: 'fail',
677
+ trust: {
678
+ status: 'error',
679
+ source: options.pin ? 'pinned' : 'ref',
680
+ sourceDetail: options.pin ?? null,
681
+ evaluatedWriters: [],
682
+ untrustedWriters: [],
683
+ explanations: [
684
+ {
685
+ writerId: '*',
686
+ trusted: false,
687
+ reasonCode: TRUST_REASON_CODES.TRUST_RECORD_CHAIN_INVALID,
688
+ reason: `Trust chain read failed: ${recordsResult.error.message}`,
689
+ },
690
+ ],
691
+ evidenceSummary: {
692
+ recordsScanned: 0,
693
+ activeKeys: 0,
694
+ revokedKeys: 0,
695
+ activeBindings: 0,
696
+ revokedBindings: 0,
697
+ },
698
+ },
699
+ };
700
+ }
701
+ const { records } = recordsResult;
671
702
 
672
703
  if (records.length === 0) {
673
704
  return {
@@ -704,4 +735,3 @@ export class AuditVerifierService {
704
735
  return evaluateWriters(writerIds, trustState, policy);
705
736
  }
706
737
  }
707
-
@@ -22,12 +22,20 @@ const computeChecksum = async (data, crypto) => {
22
22
  };
23
23
 
24
24
  /** @type {boolean|null} Whether native Roaring bindings are available (null = unknown until first use) */
25
- export let NATIVE_ROARING_AVAILABLE = null;
25
+ let _nativeRoaringAvailable = null;
26
+
27
+ /**
28
+ * Resets native Roaring availability detection (test-only utility).
29
+ * @returns {void}
30
+ */
31
+ export function resetNativeRoaringFlag() {
32
+ _nativeRoaringAvailable = null;
33
+ }
26
34
 
27
35
  const ensureRoaringBitmap32 = () => {
28
36
  const RoaringBitmap32 = getRoaringBitmap32();
29
- if (NATIVE_ROARING_AVAILABLE === null) {
30
- NATIVE_ROARING_AVAILABLE = getNativeRoaringAvailable();
37
+ if (_nativeRoaringAvailable === null) {
38
+ _nativeRoaringAvailable = getNativeRoaringAvailable();
31
39
  }
32
40
  return RoaringBitmap32;
33
41
  };
@@ -71,14 +79,11 @@ function serializeFrontierToTree(frontier, tree, codec) {
71
79
  * BlobPort + TreePort + RefPort from the persistence layer.
72
80
  *
73
81
  * **Performance Note**: Uses Roaring Bitmaps for compression. Native bindings
74
- * provide best performance. Check `NATIVE_ROARING_AVAILABLE` export if
75
- * performance is critical.
82
+ * provide best performance. Use `getNativeRoaringAvailable()` from
83
+ * `src/domain/utils/roaring.js` if runtime capability checks are needed.
76
84
  *
77
85
  * @example
78
- * import BitmapIndexBuilder, { NATIVE_ROARING_AVAILABLE } from './BitmapIndexBuilder.js';
79
- * if (NATIVE_ROARING_AVAILABLE === false) {
80
- * console.warn('Consider installing native Roaring bindings for better performance');
81
- * }
86
+ * import BitmapIndexBuilder from './BitmapIndexBuilder.js';
82
87
  * const builder = new BitmapIndexBuilder();
83
88
  */
84
89
  export default class BitmapIndexBuilder {
@@ -148,7 +148,9 @@ export async function createV5({
148
148
  // 1. Compute appliedVV from actual state dots
149
149
  const appliedVV = computeAppliedVV(state);
150
150
 
151
- // 2. Optionally compact (only tombstoned dots <= appliedVV)
151
+ // 2. Optionally compact (only tombstoned dots <= appliedVV).
152
+ // When compact=false, checkpointState aliases the caller's state but the
153
+ // remaining path is read-only (serialize + hash), so no clone is needed.
152
154
  let checkpointState = state;
153
155
  if (compact) {
154
156
  checkpointState = cloneStateV5(state);
@@ -188,6 +190,11 @@ export async function createV5({
188
190
  // If patch commits are ever pruned, content blobs remain reachable via
189
191
  // the checkpoint tree. Without this, git gc would nuke content blobs
190
192
  // whose only anchor was the (now-pruned) patch commit tree.
193
+ //
194
+ // O(P) scan over all properties — acceptable because checkpoint creation
195
+ // is infrequent. The property key format is deterministic (encodePropKey /
196
+ // encodeEdgePropKey), but content keys are interleaved with regular keys
197
+ // so no prefix filter can skip non-content entries without decoding.
191
198
  const contentOids = new Set();
192
199
  for (const [propKey, register] of checkpointState.prop) {
193
200
  const { propKey: decodedKey } = isEdgePropKey(propKey)
@@ -235,6 +242,8 @@ export async function createV5({
235
242
  stateHash,
236
243
  frontierOid: frontierBlobOid,
237
244
  indexOid: treeOid,
245
+ // Schema 3 was used for edge-property-aware patches but is never emitted
246
+ // by checkpoint creation. Schema 4 indicates an index tree is present.
238
247
  schema: indexTree ? 4 : 2,
239
248
  });
240
249
 
@@ -4,6 +4,7 @@
4
4
 
5
5
  import { orsetCompact } from '../crdt/ORSet.js';
6
6
  import { collectGCMetrics } from './GCMetrics.js';
7
+ import WarpError from '../errors/WarpError.js';
7
8
 
8
9
  /**
9
10
  * @typedef {Object} GCPolicy
@@ -92,21 +93,41 @@ export function shouldRunGC(metrics, policy) {
92
93
 
93
94
  /**
94
95
  * Executes GC on state. Only compacts tombstoned dots <= appliedVV.
95
- * Mutates state in place.
96
+ * Mutates state **in place** — callers must clone-then-swap to preserve
97
+ * a rollback copy (see CheckpointService for the canonical pattern).
96
98
  *
97
99
  * @param {import('./JoinReducer.js').WarpStateV5} state - State to compact (mutated!)
98
100
  * @param {import('../crdt/VersionVector.js').VersionVector} appliedVV - Version vector cutoff
99
101
  * @returns {GCExecuteResult}
102
+ * @throws {WarpError} E_GC_INVALID_VV if appliedVV is not a Map
103
+ * @throws {WarpError} E_GC_COMPACT_FAILED if orsetCompact throws
100
104
  */
101
105
  export function executeGC(state, appliedVV) {
106
+ if (!(appliedVV instanceof Map)) {
107
+ throw new WarpError(
108
+ 'executeGC requires appliedVV to be a Map (VersionVector)',
109
+ 'E_GC_INVALID_VV',
110
+ );
111
+ }
112
+
102
113
  const startTime = performance.now();
103
114
 
104
115
  // Collect metrics before compaction
105
116
  const beforeMetrics = collectGCMetrics(state);
106
117
 
107
- // Compact both ORSets
108
- orsetCompact(state.nodeAlive, appliedVV);
109
- orsetCompact(state.edgeAlive, appliedVV);
118
+ // Compact both ORSets — wrap each phase so partial failure is diagnosable
119
+ let nodesDone = false;
120
+ try {
121
+ orsetCompact(state.nodeAlive, appliedVV);
122
+ nodesDone = true;
123
+ orsetCompact(state.edgeAlive, appliedVV);
124
+ } catch {
125
+ throw new WarpError(
126
+ `GC compaction failed during ${nodesDone ? 'edgeAlive' : 'nodeAlive'} phase`,
127
+ 'E_GC_COMPACT_FAILED',
128
+ { context: { phase: nodesDone ? 'edgeAlive' : 'nodeAlive', partialCompaction: nodesDone } },
129
+ );
130
+ }
110
131
 
111
132
  // Collect metrics after compaction
112
133
  const afterMetrics = collectGCMetrics(state);
@@ -165,7 +165,9 @@ export default class GraphTraversal {
165
165
  return await this._provider.getNeighbors(nodeId, direction, options);
166
166
  }
167
167
 
168
- const labelsKey = options?.labels ? JSON.stringify([...options.labels].sort()) : '*';
168
+ const labelsKey = options?.labels
169
+ ? [...options.labels].sort().join('\0')
170
+ : '*';
169
171
  const key = `${nodeId}\0${direction}\0${labelsKey}`;
170
172
  const cached = cache.get(key);
171
173
  if (cached !== undefined) {