@git-stunts/git-warp 12.1.0 → 12.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -8,10 +8,12 @@
8
8
  <img src="docs/images/hero.gif" alt="git-warp CLI demo" width="600">
9
9
  </p>
10
10
 
11
- ## What's New in v12.1.0
11
+ ## What's New in v12.2.0
12
12
 
13
- - **Multi-pattern glob support** — `graph.observer()`, `query().match()`, and `translationCost()` now accept an array of glob patterns (e.g. `['campaign:*', 'milestone:*']`). Nodes matching *any* pattern in the array are included (OR semantics).
14
- - **Release preflight** — `npm run release:preflight` runs a 10-check local gate (version agreement, CHANGELOG, README, lint, types, tests, pack dry-runs) before tagging.
13
+ - **O(N log N) topological sort** — `topologicalSort()` now uses a MinHeap ready queue instead of sorted-array merging, eliminating the O(N²) hot path for large DAGs.
14
+ - **QueryBuilder batching + memoization** — property fetches are now bounded (chunks of 100) and cached per-run, reducing redundant I/O across where-clauses, result building, and aggregation.
15
+ - **Fast materialization guard** — `_materializeGraph()` skips full materialization when cached state is clean, improving query/traversal latency.
16
+ - **Checkpoint `visible.cbor` removed** — checkpoints no longer write the unused visible-projection blob, saving one serialize + blob write per checkpoint.
15
17
 
16
18
  See the [full changelog](CHANGELOG.md) for details.
17
19
 
package/bin/warp-graph.js CHANGED
@@ -9,6 +9,10 @@ import { COMMANDS } from './cli/commands/registry.js';
9
9
 
10
10
  const VIEW_SUPPORTED_COMMANDS = ['info', 'check', 'history', 'path', 'materialize', 'query', 'seek'];
11
11
 
12
+ // C8: Capture output format early so the error handler can use it
13
+ const hasJsonFlag = process.argv.includes('--json');
14
+ const hasNdjsonFlag = process.argv.includes('--ndjson');
15
+
12
16
  /**
13
17
  * CLI entry point. Parses arguments, dispatches to the appropriate command handler,
14
18
  * and emits the result to stdout (JSON or human-readable).
@@ -78,8 +82,8 @@ main().catch((error) => {
78
82
  payload.error.cause = error.cause instanceof Error ? error.cause.message : error.cause;
79
83
  }
80
84
 
81
- if (process.argv.includes('--json') || process.argv.includes('--ndjson')) {
82
- const stringify = process.argv.includes('--ndjson') ? compactStringify : stableStringify;
85
+ if (hasJsonFlag || hasNdjsonFlag) {
86
+ const stringify = hasNdjsonFlag ? compactStringify : stableStringify;
83
87
  process.stdout.write(`${stringify(payload)}\n`);
84
88
  } else {
85
89
  process.stderr.write(renderError(payload));
package/index.d.ts CHANGED
@@ -1915,7 +1915,7 @@ export default class WarpGraph {
1915
1915
 
1916
1916
  /** Filtered watcher that only fires for changes matching a glob pattern. */
1917
1917
  watch(
1918
- pattern: string,
1918
+ pattern: string | string[],
1919
1919
  options: {
1920
1920
  onChange: (diff: StateDiffResult) => void;
1921
1921
  onError?: (error: Error) => void;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@git-stunts/git-warp",
3
- "version": "12.1.0",
3
+ "version": "12.2.0",
4
4
  "description": "Deterministic WARP graph over Git: graph-native storage, traversal, and tooling.",
5
5
  "type": "module",
6
6
  "license": "Apache-2.0",
@@ -195,6 +195,9 @@ export default class WarpGraph {
195
195
 
196
196
  /** @type {Record<string, Uint8Array>|null} */
197
197
  this._cachedIndexTree = null;
198
+
199
+ /** @type {boolean} */
200
+ this._indexDegraded = false;
198
201
  }
199
202
 
200
203
  /**
@@ -290,19 +290,48 @@ export function orsetJoin(a, b) {
290
290
  * All replicas are known to have observed at least this causal context.
291
291
  */
292
292
  export function orsetCompact(set, includedVV) {
293
+ // Collect deletions in temp arrays to avoid mutation-during-iteration (J8)
294
+ /** @type {Array<{element: string, dot: string}>} */
295
+ const toDelete = [];
296
+
293
297
  for (const [element, dots] of set.entries) {
294
298
  for (const encodedDot of dots) {
295
299
  const dot = decodeDot(encodedDot);
296
300
  // Only compact if: (1) dot is tombstoned AND (2) dot <= includedVV
297
301
  if (set.tombstones.has(encodedDot) && vvContains(includedVV, dot)) {
298
- dots.delete(encodedDot);
299
- set.tombstones.delete(encodedDot);
302
+ toDelete.push({ element, dot: encodedDot });
300
303
  }
301
304
  }
302
- if (dots.size === 0) {
303
- set.entries.delete(element);
305
+ }
306
+
307
+ // Apply deletions
308
+ for (const { element, dot: encodedDot } of toDelete) {
309
+ const dots = set.entries.get(element);
310
+ if (dots) {
311
+ dots.delete(encodedDot);
312
+ if (dots.size === 0) {
313
+ set.entries.delete(element);
314
+ }
304
315
  }
316
+ set.tombstones.delete(encodedDot);
317
+ }
318
+ }
319
+
320
+ /**
321
+ * Creates a deep clone of an ORSet.
322
+ *
323
+ * @param {ORSet} set - The ORSet to clone
324
+ * @returns {ORSet} A new ORSet with independent data structures
325
+ */
326
+ export function orsetClone(set) {
327
+ const result = createORSet();
328
+ for (const [element, dots] of set.entries) {
329
+ result.entries.set(element, new Set(dots));
330
+ }
331
+ for (const dot of set.tombstones) {
332
+ result.tombstones.add(dot);
305
333
  }
334
+ return result;
306
335
  }
307
336
 
308
337
  /**
@@ -14,6 +14,7 @@ import WarpError from './WarpError.js';
14
14
  * | `E_SYNC_TIMEOUT` | Sync request exceeded timeout |
15
15
  * | `E_SYNC_REMOTE` | Remote server returned a 5xx error |
16
16
  * | `E_SYNC_PROTOCOL` | Protocol violation: 4xx, invalid JSON, or malformed response |
17
+ * | `E_SYNC_PAYLOAD_INVALID` | Sync payload failed shape/resource-limit validation (B64) |
17
18
  * | `SYNC_ERROR` | Generic/default sync error |
18
19
  *
19
20
  * @class SyncError
@@ -9,6 +9,8 @@ import WarpError from './WarpError.js';
9
9
  * |------|-------------|
10
10
  * | `E_TRUST_UNSUPPORTED_ALGORITHM` | Algorithm is not `ed25519` |
11
11
  * | `E_TRUST_INVALID_KEY` | Public key is malformed (wrong length or bad base64) |
12
+ * | `E_TRUST_CAS_CONFLICT` | Concurrent append advanced the trust chain; caller must rebuild + re-sign |
13
+ * | `E_TRUST_CAS_EXHAUSTED` | CAS retry budget exhausted (transient failures) |
12
14
  * | `TRUST_ERROR` | Generic/default trust error |
13
15
  *
14
16
  * @class TrustError
@@ -11,7 +11,7 @@
11
11
  * @see WARP Spec Section 10
12
12
  */
13
13
 
14
- import { serializeStateV5, computeStateHashV5 } from './StateSerializerV5.js';
14
+ import { computeStateHashV5 } from './StateSerializerV5.js';
15
15
  import {
16
16
  serializeFullStateV5,
17
17
  deserializeFullStateV5,
@@ -86,7 +86,6 @@ function partitionTreeOids(rawOids) {
86
86
  * ```
87
87
  * <checkpoint_commit_tree>/
88
88
  * ├── state.cbor # AUTHORITATIVE: Full V5 state (ORSets + props)
89
- * ├── visible.cbor # CACHE ONLY: Visible projection for fast queries
90
89
  * ├── frontier.cbor # Writer frontiers
91
90
  * ├── appliedVV.cbor # Version vector of dots in state
92
91
  * └── provenanceIndex.cbor # Optional: node-to-patchSha index (HG/IO/2)
@@ -116,7 +115,6 @@ export async function create({ persistence, graphName, state, frontier, parents
116
115
  * ```
117
116
  * <checkpoint_tree>/
118
117
  * ├── state.cbor # AUTHORITATIVE: Full V5 state (ORSets + props)
119
- * ├── visible.cbor # CACHE ONLY: Visible projection for fast queries
120
118
  * ├── frontier.cbor # Writer frontiers
121
119
  * ├── appliedVV.cbor # Version vector of dots in state
122
120
  * └── provenanceIndex.cbor # Optional: node-to-patchSha index (HG/IO/2)
@@ -161,8 +159,7 @@ export async function createV5({
161
159
  // 3. Serialize full state (AUTHORITATIVE)
162
160
  const stateBuffer = serializeFullStateV5(checkpointState, { codec });
163
161
 
164
- // 4. Serialize visible projection (CACHE)
165
- const visibleBuffer = serializeStateV5(checkpointState, { codec });
162
+ // 4. Compute state hash
166
163
  const stateHash = await computeStateHashV5(checkpointState, { codec, crypto: /** @type {import('../../ports/CryptoPort.js').default} */ (crypto) });
167
164
 
168
165
  // 5. Serialize frontier and appliedVV
@@ -171,7 +168,6 @@ export async function createV5({
171
168
 
172
169
  // 6. Write blobs to git
173
170
  const stateBlobOid = await persistence.writeBlob(/** @type {Buffer} */ (stateBuffer));
174
- const visibleBlobOid = await persistence.writeBlob(/** @type {Buffer} */ (visibleBuffer));
175
171
  const frontierBlobOid = await persistence.writeBlob(/** @type {Buffer} */ (frontierBuffer));
176
172
  const appliedVVBlobOid = await persistence.writeBlob(/** @type {Buffer} */ (appliedVVBuffer));
177
173
 
@@ -207,7 +203,6 @@ export async function createV5({
207
203
  `100644 blob ${appliedVVBlobOid}\tappliedVV.cbor`,
208
204
  `100644 blob ${frontierBlobOid}\tfrontier.cbor`,
209
205
  `100644 blob ${stateBlobOid}\tstate.cbor`,
210
- `100644 blob ${visibleBlobOid}\tvisible.cbor`,
211
206
  ];
212
207
 
213
208
  // Add provenance index if present
@@ -91,6 +91,24 @@ export function cloneFrontier(frontier) {
91
91
  return new Map(frontier);
92
92
  }
93
93
 
94
+ /**
95
+ * Produces a stable, deterministic fingerprint of a frontier.
96
+ *
97
+ * Sorts entries by writer ID and JSON-stringifies the sorted pairs.
98
+ * Two frontiers produce the same fingerprint iff they have identical
99
+ * writer→SHA mappings. Used for snapshot isolation checks (B63)
100
+ * and diagnostic logging.
101
+ *
102
+ * @param {Frontier} frontier
103
+ * @returns {string} Deterministic JSON string of sorted entries
104
+ */
105
+ export function frontierFingerprint(frontier) {
106
+ const sorted = [...frontier.entries()].sort(
107
+ ([a], [b]) => (a < b ? -1 : a > b ? 1 : 0),
108
+ );
109
+ return JSON.stringify(sorted);
110
+ }
111
+
94
112
  /**
95
113
  * Merges two frontiers, taking the "later" entry for each writer.
96
114
  * Note: This is a simple merge that takes entries from both.
@@ -830,52 +830,38 @@ export default class GraphTraversal {
830
830
  }
831
831
  }
832
832
 
833
- // Phase 2: Kahn's — collect zero-indegree nodes, sort them lex, yield in order
834
- /** @type {string[]} */
835
- const ready = [];
833
+ // Phase 2: Kahn's — MinHeap for O(N log N) zero-indegree processing
834
+ const ready = new MinHeap({ tieBreaker: lexTieBreaker });
836
835
  for (const nodeId of discovered) {
837
836
  if ((inDegree.get(nodeId) || 0) === 0) {
838
- ready.push(nodeId);
837
+ ready.insert(nodeId, 0);
839
838
  }
840
839
  }
841
- ready.sort(lexTieBreaker);
842
840
 
841
+ /** @type {string[]} */
843
842
  const sorted = [];
844
- let rHead = 0;
845
- while (rHead < ready.length && sorted.length < maxNodes) {
843
+ while (!ready.isEmpty() && sorted.length < maxNodes) {
846
844
  if (sorted.length % 1000 === 0) {
847
845
  checkAborted(signal, 'topologicalSort');
848
846
  }
849
- const nodeId = /** @type {string} */ (ready[rHead++]);
847
+ const nodeId = /** @type {string} */ (ready.extractMin());
850
848
  sorted.push(nodeId);
851
849
 
852
850
  const neighbors = adjList.get(nodeId) || [];
853
- /** @type {string[]} */
854
- const newlyReady = [];
855
851
  for (const neighborId of neighbors) {
856
852
  const deg = /** @type {number} */ (inDegree.get(neighborId)) - 1;
857
853
  inDegree.set(neighborId, deg);
858
854
  if (deg === 0) {
859
- newlyReady.push(neighborId);
855
+ ready.insert(neighborId, 0);
860
856
  }
861
857
  }
862
- // Insert newly ready nodes in sorted position
863
- if (newlyReady.length > 0) {
864
- newlyReady.sort(lexTieBreaker);
865
- // Compact consumed prefix before merge to keep rHead at 0
866
- if (rHead > 0) {
867
- ready.splice(0, rHead);
868
- rHead = 0;
869
- }
870
- this._insertSorted(ready, newlyReady);
871
- }
872
858
  }
873
859
 
874
860
  const hasCycle = computeTopoHasCycle({
875
861
  sortedLength: sorted.length,
876
862
  discoveredSize: discovered.size,
877
863
  maxNodes,
878
- readyRemaining: rHead < ready.length,
864
+ readyRemaining: !ready.isEmpty(),
879
865
  });
880
866
  if (hasCycle && throwOnCycle) {
881
867
  // Find a back-edge as witness
@@ -1209,31 +1195,4 @@ export default class GraphTraversal {
1209
1195
  return candidatePred < current;
1210
1196
  }
1211
1197
 
1212
- /**
1213
- * Inserts sorted items into a sorted array maintaining order.
1214
- * Both input arrays must be sorted by lexTieBreaker.
1215
- *
1216
- * @param {string[]} target - Sorted array to insert into (mutated in place)
1217
- * @param {string[]} items - Sorted items to insert
1218
- * @private
1219
- */
1220
- _insertSorted(target, items) {
1221
- // O(n+k) merge: build merged array from two sorted inputs
1222
- const merged = [];
1223
- let ti = 0;
1224
- let ii = 0;
1225
- while (ti < target.length && ii < items.length) {
1226
- if (target[ti] <= items[ii]) {
1227
- merged.push(target[ti++]);
1228
- } else {
1229
- merged.push(items[ii++]);
1230
- }
1231
- }
1232
- while (ti < target.length) { merged.push(target[ti++]); }
1233
- while (ii < items.length) { merged.push(items[ii++]); }
1234
- target.length = 0;
1235
- for (let i = 0; i < merged.length; i++) {
1236
- target.push(merged[i]);
1237
- }
1238
- }
1239
1198
  }
@@ -10,6 +10,7 @@
10
10
 
11
11
  import { z } from 'zod';
12
12
  import SyncAuthService from './SyncAuthService.js';
13
+ import { validateSyncRequest } from './SyncPayloadSchema.js';
13
14
 
14
15
  const DEFAULT_MAX_REQUEST_BYTES = 4 * 1024 * 1024;
15
16
  const MAX_REQUEST_BYTES_CEILING = 128 * 1024 * 1024; // 134217728
@@ -117,26 +118,7 @@ function jsonResponse(data) {
117
118
  };
118
119
  }
119
120
 
120
- /**
121
- * Validates that a sync request object has the expected shape.
122
- *
123
- * @param {unknown} parsed - Parsed JSON body
124
- * @returns {boolean} True if valid
125
- * @private
126
- */
127
- function isValidSyncRequest(parsed) {
128
- if (!parsed || typeof parsed !== 'object') {
129
- return false;
130
- }
131
- const rec = /** @type {Record<string, unknown>} */ (parsed);
132
- if (rec.type !== 'sync-request') {
133
- return false;
134
- }
135
- if (!rec.frontier || typeof rec.frontier !== 'object' || Array.isArray(rec.frontier)) {
136
- return false;
137
- }
138
- return true;
139
- }
121
+ // isValidSyncRequest replaced by SyncPayloadSchema.validateSyncRequest (B64)
140
122
 
141
123
  /**
142
124
  * Checks the content-type header. Returns an error response if the
@@ -200,6 +182,7 @@ function checkBodySize(body, maxBytes) {
200
182
 
201
183
  /**
202
184
  * Parses and validates the request body as a sync request.
185
+ * Uses Zod-based SyncPayloadSchema for shape + resource limit validation.
203
186
  *
204
187
  * @param {Buffer|undefined} body
205
188
  * @returns {{ error: { status: number, headers: Object, body: string }, parsed: null } | { error: null, parsed: import('./SyncProtocol.js').SyncRequest }}
@@ -215,11 +198,12 @@ function parseBody(body) {
215
198
  return { error: errorResponse(400, 'Invalid JSON'), parsed: null };
216
199
  }
217
200
 
218
- if (!isValidSyncRequest(parsed)) {
219
- return { error: errorResponse(400, 'Invalid sync request'), parsed: null };
201
+ const validation = validateSyncRequest(parsed);
202
+ if (!validation.ok) {
203
+ return { error: errorResponse(400, `Invalid sync request: ${validation.error}`), parsed: null };
220
204
  }
221
205
 
222
- return { error: null, parsed };
206
+ return { error: null, parsed: /** @type {import('./SyncProtocol.js').SyncRequest} */ (validation.value) };
223
207
  }
224
208
 
225
209
  /**
@@ -298,12 +282,17 @@ export default class HttpSyncServer {
298
282
  this._auth.recordLogOnlyPassthrough();
299
283
  }
300
284
 
301
- // Writer whitelist (uses parsed body for writer IDs)
302
- if (parsed.patches && typeof parsed.patches === 'object') {
303
- const writerIds = Object.keys(parsed.patches);
304
- const writerResult = this._auth.enforceWriters(writerIds);
305
- if (!writerResult.ok) {
306
- return errorResponse(writerResult.status, writerResult.reason);
285
+ // Writer whitelist: for sync-requests, extract writer IDs from frontier
286
+ // keys (the writers the peer claims to have). Sync-requests don't carry
287
+ // patches the server generates the response. For sync-responses with
288
+ // patches, trust-gate should be on patch authors (handled client-side).
289
+ if (parsed.frontier && typeof parsed.frontier === 'object') {
290
+ const writerIds = Object.keys(/** @type {Record<string, string>} */ (parsed.frontier));
291
+ if (writerIds.length > 0) {
292
+ const writerResult = this._auth.enforceWriters(writerIds);
293
+ if (!writerResult.ok) {
294
+ return errorResponse(writerResult.status, writerResult.reason);
295
+ }
307
296
  }
308
297
  }
309
298
 
@@ -86,6 +86,29 @@ export function createEmptyStateV5() {
86
86
  * @param {import('../utils/EventId.js').EventId} eventId - Event ID for causality tracking
87
87
  * @returns {void}
88
88
  */
89
+ /**
90
+ * Known V2 operation types. Used for forward-compatibility validation.
91
+ * @type {ReadonlySet<string>}
92
+ */
93
+ const KNOWN_OPS = new Set(['NodeAdd', 'NodeRemove', 'EdgeAdd', 'EdgeRemove', 'PropSet', 'BlobValue']);
94
+
95
+ /**
96
+ * Validates that an operation has a known type.
97
+ *
98
+ * @param {{ type: string }} op
99
+ * @returns {boolean} True if the op type is in KNOWN_OPS
100
+ */
101
+ export function isKnownOp(op) {
102
+ return op && typeof op.type === 'string' && KNOWN_OPS.has(op.type);
103
+ }
104
+
105
+ /**
106
+ * Applies a single V2 operation to the given CRDT state.
107
+ *
108
+ * @param {WarpStateV5} state - The mutable CRDT state to update
109
+ * @param {{type: string, node?: string, dot?: import('../crdt/Dot.js').Dot, observedDots?: string[], from?: string, to?: string, label?: string, key?: string, value?: unknown, oid?: string}} op - The operation to apply
110
+ * @param {import('../utils/EventId.js').EventId} eventId - The event ID for LWW ordering
111
+ */
89
112
  export function applyOpV2(state, op, eventId) {
90
113
  switch (op.type) {
91
114
  case 'NodeAdd':
@@ -103,6 +103,15 @@ export class PatchBuilderV2 {
103
103
  /** @type {Function} */
104
104
  this._getCurrentState = getCurrentState; // Function to get current materialized state
105
105
 
106
+ /**
107
+ * Snapshot of state captured at construction time (C4).
108
+ * Lazily populated on first call to _getSnapshotState().
109
+ * Prevents TOCTOU races where concurrent writes change state
110
+ * between remove operations in the same patch.
111
+ * @type {import('./JoinReducer.js').WarpStateV5|null}
112
+ */
113
+ this._snapshotState = /** @type {import('./JoinReducer.js').WarpStateV5|null} */ (/** @type {unknown} */ (undefined)); // undefined = not yet captured
114
+
106
115
  /** @type {string|null} */
107
116
  this._expectedParentSha = expectedParentSha;
108
117
 
@@ -156,6 +165,23 @@ export class PatchBuilderV2 {
156
165
  this._writes = new Set();
157
166
  }
158
167
 
168
+ /**
169
+ * Returns a snapshot of the current state, captured lazily on first call (C4).
170
+ *
171
+ * All remove operations within this patch observe dots from the same
172
+ * state snapshot, preventing TOCTOU races where concurrent writers
173
+ * change state between operations.
174
+ *
175
+ * @returns {import('./JoinReducer.js').WarpStateV5|null}
176
+ * @private
177
+ */
178
+ _getSnapshotState() {
179
+ if (this._snapshotState === undefined) {
180
+ this._snapshotState = this._getCurrentState() || null;
181
+ }
182
+ return this._snapshotState;
183
+ }
184
+
159
185
  /**
160
186
  * Adds a node to the graph.
161
187
  *
@@ -213,7 +239,7 @@ export class PatchBuilderV2 {
213
239
  */
214
240
  removeNode(nodeId) {
215
241
  // Get observed dots from current state (orsetGetDots returns already-encoded dot strings)
216
- const state = this._getCurrentState();
242
+ const state = this._getSnapshotState();
217
243
 
218
244
  // Cascade mode: auto-generate EdgeRemove ops for all connected edges before NodeRemove.
219
245
  // Generated ops appear in the patch for auditability.
@@ -330,7 +356,7 @@ export class PatchBuilderV2 {
330
356
  */
331
357
  removeEdge(from, to, label) {
332
358
  // Get observed dots from current state (orsetGetDots returns already-encoded dot strings)
333
- const state = this._getCurrentState();
359
+ const state = this._getSnapshotState();
334
360
  const edgeKey = encodeEdgeKey(from, to, label);
335
361
  const observedDots = state ? [...orsetGetDots(state.edgeAlive, edgeKey)] : [];
336
362
  this._ops.push(createEdgeRemoveV2(from, to, label, observedDots));
@@ -418,7 +444,7 @@ export class PatchBuilderV2 {
418
444
  // Validate edge exists in this patch or in current state
419
445
  const ek = encodeEdgeKey(from, to, label);
420
446
  if (!this._edgesAdded.has(ek)) {
421
- const state = this._getCurrentState();
447
+ const state = this._getSnapshotState();
422
448
  if (!state || !orsetContains(state.edgeAlive, ek)) {
423
449
  throw new Error(`Cannot set property on unknown edge (${from} → ${to} [${label}]): add the edge first`);
424
450
  }
@@ -9,6 +9,27 @@ import { matchGlob } from '../utils/matchGlob.js';
9
9
 
10
10
  const DEFAULT_PATTERN = '*';
11
11
 
12
+ /**
13
+ * Processes items in batches with bounded concurrency.
14
+ *
15
+ * @template T, R
16
+ * @param {T[]} items - Items to process
17
+ * @param {(item: T) => Promise<R>} fn - Async function to apply to each item
18
+ * @param {number} [limit=100] - Maximum concurrent operations per batch
19
+ * @returns {Promise<R[]>} Results in input order
20
+ */
21
+ async function batchMap(items, fn, limit = 100) {
22
+ const results = [];
23
+ for (let i = 0; i < items.length; i += limit) {
24
+ const batch = items.slice(i, i + limit);
25
+ const batchResults = await Promise.all(batch.map(fn));
26
+ for (const r of batchResults) {
27
+ results.push(r);
28
+ }
29
+ }
30
+ return results;
31
+ }
32
+
12
33
  /**
13
34
  * @typedef {Object} QueryNodeSnapshot
14
35
  * @property {string} id - The unique identifier of the node
@@ -652,22 +673,33 @@ export default class QueryBuilder {
652
673
 
653
674
  const pattern = this._pattern ?? DEFAULT_PATTERN;
654
675
 
676
+ // Per-run props memo to avoid redundant getNodeProps calls
677
+ /** @type {Map<string, Map<string, unknown>>} */
678
+ const propsMemo = new Map();
679
+ const getProps = async (/** @type {string} */ nodeId) => {
680
+ const cached = propsMemo.get(nodeId);
681
+ if (cached !== undefined) {
682
+ return cached;
683
+ }
684
+ const propsMap = (await this._graph.getNodeProps(nodeId)) || new Map();
685
+ propsMemo.set(nodeId, propsMap);
686
+ return propsMap;
687
+ };
688
+
655
689
  let workingSet;
656
690
  workingSet = allNodes.filter((nodeId) => matchGlob(pattern, nodeId));
657
691
 
658
692
  for (const op of this._operations) {
659
693
  if (op.type === 'where') {
660
- const snapshots = await Promise.all(
661
- workingSet.map(async (nodeId) => {
662
- const propsMap = (await this._graph.getNodeProps(nodeId)) || new Map();
663
- const edgesOut = adjacency.outgoing.get(nodeId) || [];
664
- const edgesIn = adjacency.incoming.get(nodeId) || [];
665
- return {
666
- nodeId,
667
- snapshot: createNodeSnapshot({ id: nodeId, propsMap, edgesOut, edgesIn }),
668
- };
669
- })
670
- );
694
+ const snapshots = await batchMap(workingSet, async (nodeId) => {
695
+ const propsMap = await getProps(nodeId);
696
+ const edgesOut = adjacency.outgoing.get(nodeId) || [];
697
+ const edgesIn = adjacency.incoming.get(nodeId) || [];
698
+ return {
699
+ nodeId,
700
+ snapshot: createNodeSnapshot({ id: nodeId, propsMap, edgesOut, edgesIn }),
701
+ };
702
+ });
671
703
  const predicate = /** @type {(node: QueryNodeSnapshot) => boolean} */ (op.fn);
672
704
  const filtered = snapshots
673
705
  .filter(({ snapshot }) => predicate(snapshot))
@@ -698,7 +730,7 @@ export default class QueryBuilder {
698
730
  }
699
731
 
700
732
  if (this._aggregate) {
701
- return await this._runAggregate(workingSet, stateHash);
733
+ return await this._runAggregate(workingSet, stateHash, getProps);
702
734
  }
703
735
 
704
736
  const selected = this._select;
@@ -718,22 +750,20 @@ export default class QueryBuilder {
718
750
  const includeId = !selectFields || selectFields.includes('id');
719
751
  const includeProps = !selectFields || selectFields.includes('props');
720
752
 
721
- const nodes = await Promise.all(
722
- workingSet.map(async (nodeId) => {
723
- const entry = {};
724
- if (includeId) {
725
- entry.id = nodeId;
726
- }
727
- if (includeProps) {
728
- const propsMap = (await this._graph.getNodeProps(nodeId)) || new Map();
729
- const props = buildPropsSnapshot(propsMap);
730
- if (selectFields || Object.keys(props).length > 0) {
731
- entry.props = props;
732
- }
753
+ const nodes = await batchMap(workingSet, async (nodeId) => {
754
+ const entry = {};
755
+ if (includeId) {
756
+ entry.id = nodeId;
757
+ }
758
+ if (includeProps) {
759
+ const propsMap = await getProps(nodeId);
760
+ const props = buildPropsSnapshot(propsMap);
761
+ if (selectFields || Object.keys(props).length > 0) {
762
+ entry.props = props;
733
763
  }
734
- return entry;
735
- })
736
- );
764
+ }
765
+ return entry;
766
+ });
737
767
 
738
768
  return { stateHash, nodes };
739
769
  }
@@ -747,10 +777,11 @@ export default class QueryBuilder {
747
777
  *
748
778
  * @param {string[]} workingSet - Array of matched node IDs
749
779
  * @param {string} stateHash - Hash of the materialized state
780
+ * @param {(nodeId: string) => Promise<Map<string, unknown>>} getProps - Memoized props fetcher
750
781
  * @returns {Promise<AggregateResult>} Object containing stateHash and requested aggregation values
751
782
  * @private
752
783
  */
753
- async _runAggregate(workingSet, stateHash) {
784
+ async _runAggregate(workingSet, stateHash, getProps) {
754
785
  const spec = /** @type {AggregateSpec} */ (this._aggregate);
755
786
  /** @type {AggregateResult} */
756
787
  const result = { stateHash };
@@ -773,8 +804,10 @@ export default class QueryBuilder {
773
804
  });
774
805
  }
775
806
 
776
- for (const nodeId of workingSet) {
777
- const propsMap = (await this._graph.getNodeProps(nodeId)) || new Map();
807
+ // Pre-fetch all props with bounded concurrency
808
+ const propsList = await batchMap(workingSet, getProps);
809
+
810
+ for (const propsMap of propsList) {
778
811
  for (const { segments, values } of propsByAgg.values()) {
779
812
  /** @type {unknown} */
780
813
  let value = propsMap.get(segments[0]);