@typicalday/firegraph 0.7.1 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/dist/backend.cjs +222 -0
  2. package/dist/backend.cjs.map +1 -0
  3. package/dist/backend.d.cts +121 -0
  4. package/dist/backend.d.ts +121 -0
  5. package/dist/backend.js +136 -0
  6. package/dist/backend.js.map +1 -0
  7. package/dist/chunk-5753Y42M.js +118 -0
  8. package/dist/chunk-5753Y42M.js.map +1 -0
  9. package/dist/chunk-EVUM6ORB.js +1575 -0
  10. package/dist/chunk-EVUM6ORB.js.map +1 -0
  11. package/dist/chunk-GLOVWKQH.js +94 -0
  12. package/dist/chunk-GLOVWKQH.js.map +1 -0
  13. package/dist/{chunk-KFA7G37W.js → chunk-SU4FNLC3.js} +32 -30
  14. package/dist/chunk-SU4FNLC3.js.map +1 -0
  15. package/dist/chunk-SZ6W4VAS.js +701 -0
  16. package/dist/chunk-SZ6W4VAS.js.map +1 -0
  17. package/dist/chunk-TYYPRVIE.js +57 -0
  18. package/dist/chunk-TYYPRVIE.js.map +1 -0
  19. package/dist/codegen/index.d.cts +25 -1
  20. package/dist/codegen/index.d.ts +25 -1
  21. package/dist/d1.cjs +2421 -0
  22. package/dist/d1.cjs.map +1 -0
  23. package/dist/d1.d.cts +54 -0
  24. package/dist/d1.d.ts +54 -0
  25. package/dist/d1.js +76 -0
  26. package/dist/d1.js.map +1 -0
  27. package/dist/do-sqlite.cjs +2424 -0
  28. package/dist/do-sqlite.cjs.map +1 -0
  29. package/dist/do-sqlite.d.cts +41 -0
  30. package/dist/do-sqlite.d.ts +41 -0
  31. package/dist/do-sqlite.js +79 -0
  32. package/dist/do-sqlite.js.map +1 -0
  33. package/dist/editor/client/assets/index-Bq2bfzeY.js +411 -0
  34. package/dist/editor/client/index.html +1 -1
  35. package/dist/editor/server/index.mjs +6524 -6355
  36. package/dist/index.cjs +2881 -2714
  37. package/dist/index.cjs.map +1 -1
  38. package/dist/index.d.cts +259 -275
  39. package/dist/index.d.ts +259 -275
  40. package/dist/index.js +728 -2304
  41. package/dist/index.js.map +1 -1
  42. package/dist/query-client/index.cjs +30 -28
  43. package/dist/query-client/index.cjs.map +1 -1
  44. package/dist/query-client/index.d.cts +2 -2
  45. package/dist/query-client/index.d.ts +2 -2
  46. package/dist/query-client/index.js +1 -1
  47. package/dist/react.cjs +0 -1
  48. package/dist/react.cjs.map +1 -1
  49. package/dist/react.js +0 -1
  50. package/dist/react.js.map +1 -1
  51. package/dist/scope-path-BtajqNK5.d.ts +234 -0
  52. package/dist/scope-path-D2mNENJ-.d.cts +234 -0
  53. package/dist/serialization-ZZ7RSDRX.js +13 -0
  54. package/dist/serialization-ZZ7RSDRX.js.map +1 -0
  55. package/dist/svelte.cjs +0 -2
  56. package/dist/svelte.cjs.map +1 -1
  57. package/dist/svelte.js +0 -2
  58. package/dist/svelte.js.map +1 -1
  59. package/dist/{index-B9aodfYD.d.ts → types-DfWVTsMn.d.cts} +28 -26
  60. package/dist/{index-B9aodfYD.d.cts → types-DfWVTsMn.d.ts} +28 -26
  61. package/package.json +35 -1
  62. package/dist/chunk-KFA7G37W.js.map +0 -1
  63. package/dist/editor/client/assets/index-tyFcX6qG.js +0 -411
@@ -0,0 +1,2424 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __esm = (fn, res) => function __init() {
9
+ return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
10
+ };
11
+ var __export = (target, all) => {
12
+ for (var name in all)
13
+ __defProp(target, name, { get: all[name], enumerable: true });
14
+ };
15
+ var __copyProps = (to, from, except, desc) => {
16
+ if (from && typeof from === "object" || typeof from === "function") {
17
+ for (let key of __getOwnPropNames(from))
18
+ if (!__hasOwnProp.call(to, key) && key !== except)
19
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
20
+ }
21
+ return to;
22
+ };
23
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
24
+ // If the importer is in node compatibility mode or this is not an ESM
25
+ // file that has been converted to a CommonJS file using a Babel-
26
+ // compatible transform (i.e. "__esModule" has not been set), then set
27
+ // "default" to the CommonJS "module.exports" for node compatibility.
28
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
29
+ mod
30
+ ));
31
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
32
+
33
+ // src/serialization.ts
34
+ var serialization_exports = {};
35
+ __export(serialization_exports, {
36
+ SERIALIZATION_TAG: () => SERIALIZATION_TAG,
37
+ deserializeFirestoreTypes: () => deserializeFirestoreTypes,
38
+ isTaggedValue: () => isTaggedValue,
39
+ serializeFirestoreTypes: () => serializeFirestoreTypes
40
+ });
41
+ function isTaggedValue(value) {
42
+ if (value === null || typeof value !== "object") return false;
43
+ const tag = value[SERIALIZATION_TAG];
44
+ return typeof tag === "string" && KNOWN_TYPES.has(tag);
45
+ }
46
+ function isTimestamp(value) {
47
+ return value instanceof import_firestore.Timestamp;
48
+ }
49
+ function isGeoPoint(value) {
50
+ return value instanceof import_firestore.GeoPoint;
51
+ }
52
+ function isDocumentReference(value) {
53
+ if (value === null || typeof value !== "object") return false;
54
+ const v = value;
55
+ return typeof v.path === "string" && v.firestore !== void 0 && typeof v.id === "string" && v.constructor?.name === "DocumentReference";
56
+ }
57
+ function isVectorValue(value) {
58
+ if (value === null || typeof value !== "object") return false;
59
+ const v = value;
60
+ return v.constructor?.name === "VectorValue" && Array.isArray(v._values);
61
+ }
62
+ function serializeFirestoreTypes(data) {
63
+ return serializeValue(data);
64
+ }
65
+ function serializeValue(value) {
66
+ if (value === null || value === void 0) return value;
67
+ if (typeof value !== "object") return value;
68
+ if (isTimestamp(value)) {
69
+ return {
70
+ [SERIALIZATION_TAG]: "Timestamp",
71
+ seconds: value.seconds,
72
+ nanoseconds: value.nanoseconds
73
+ };
74
+ }
75
+ if (isGeoPoint(value)) {
76
+ return {
77
+ [SERIALIZATION_TAG]: "GeoPoint",
78
+ latitude: value.latitude,
79
+ longitude: value.longitude
80
+ };
81
+ }
82
+ if (isDocumentReference(value)) {
83
+ return { [SERIALIZATION_TAG]: "DocumentReference", path: value.path };
84
+ }
85
+ if (isVectorValue(value)) {
86
+ const v = value;
87
+ const values = typeof v.toArray === "function" ? v.toArray() : v._values;
88
+ return { [SERIALIZATION_TAG]: "VectorValue", values: [...values] };
89
+ }
90
+ if (Array.isArray(value)) {
91
+ return value.map(serializeValue);
92
+ }
93
+ const result = {};
94
+ for (const key of Object.keys(value)) {
95
+ result[key] = serializeValue(value[key]);
96
+ }
97
+ return result;
98
+ }
99
+ function deserializeFirestoreTypes(data, db) {
100
+ return deserializeValue(data, db);
101
+ }
102
+ function deserializeValue(value, db) {
103
+ if (value === null || value === void 0) return value;
104
+ if (typeof value !== "object") return value;
105
+ if (isTimestamp(value) || isGeoPoint(value) || isDocumentReference(value) || isVectorValue(value)) {
106
+ return value;
107
+ }
108
+ if (Array.isArray(value)) {
109
+ return value.map((v) => deserializeValue(v, db));
110
+ }
111
+ const obj = value;
112
+ if (isTaggedValue(obj)) {
113
+ const tag = obj[SERIALIZATION_TAG];
114
+ switch (tag) {
115
+ case "Timestamp":
116
+ if (typeof obj.seconds !== "number" || typeof obj.nanoseconds !== "number") return obj;
117
+ return new import_firestore.Timestamp(obj.seconds, obj.nanoseconds);
118
+ case "GeoPoint":
119
+ if (typeof obj.latitude !== "number" || typeof obj.longitude !== "number") return obj;
120
+ return new import_firestore.GeoPoint(obj.latitude, obj.longitude);
121
+ case "VectorValue":
122
+ if (!Array.isArray(obj.values)) return obj;
123
+ return import_firestore.FieldValue.vector(obj.values);
124
+ case "DocumentReference":
125
+ if (typeof obj.path !== "string") return obj;
126
+ if (db) {
127
+ return db.doc(obj.path);
128
+ }
129
+ if (!_docRefWarned) {
130
+ _docRefWarned = true;
131
+ console.warn(
132
+ "[firegraph] DocumentReference encountered during migration deserialization but no Firestore instance available. The reference will remain as a tagged object with its path. Enable write-back for full reconstruction."
133
+ );
134
+ }
135
+ return obj;
136
+ default:
137
+ return obj;
138
+ }
139
+ }
140
+ const result = {};
141
+ for (const key of Object.keys(obj)) {
142
+ result[key] = deserializeValue(obj[key], db);
143
+ }
144
+ return result;
145
+ }
146
+ var import_firestore, SERIALIZATION_TAG, KNOWN_TYPES, _docRefWarned;
147
+ var init_serialization = __esm({
148
+ "src/serialization.ts"() {
149
+ "use strict";
150
+ import_firestore = require("@google-cloud/firestore");
151
+ SERIALIZATION_TAG = "__firegraph_ser__";
152
+ KNOWN_TYPES = /* @__PURE__ */ new Set(["Timestamp", "GeoPoint", "VectorValue", "DocumentReference"]);
153
+ _docRefWarned = false;
154
+ }
155
+ });
156
+
157
+ // src/do-sqlite.ts
158
+ var do_sqlite_exports = {};
159
+ __export(do_sqlite_exports, {
160
+ createDOSqliteGraphClient: () => createDOSqliteGraphClient
161
+ });
162
+ module.exports = __toCommonJS(do_sqlite_exports);
163
+
164
+ // src/docid.ts
165
+ var import_node_crypto = require("crypto");
166
+
167
+ // src/internal/constants.ts
168
+ var NODE_RELATION = "is";
169
+ var DEFAULT_QUERY_LIMIT = 500;
170
+ var BUILTIN_FIELDS = /* @__PURE__ */ new Set([
171
+ "aType",
172
+ "aUid",
173
+ "axbType",
174
+ "bType",
175
+ "bUid",
176
+ "createdAt",
177
+ "updatedAt"
178
+ ]);
179
+ var SHARD_SEPARATOR = ":";
180
+
181
+ // src/docid.ts
182
+ function computeNodeDocId(uid) {
183
+ return uid;
184
+ }
185
+ function computeEdgeDocId(aUid, axbType, bUid) {
186
+ const composite = `${aUid}${SHARD_SEPARATOR}${axbType}${SHARD_SEPARATOR}${bUid}`;
187
+ const hash = (0, import_node_crypto.createHash)("sha256").update(composite).digest("hex");
188
+ const shard = hash[0];
189
+ return `${shard}${SHARD_SEPARATOR}${aUid}${SHARD_SEPARATOR}${axbType}${SHARD_SEPARATOR}${bUid}`;
190
+ }
191
+
192
+ // src/batch.ts
193
+ function buildWritableNodeRecord(aType, uid, data) {
194
+ return { aType, aUid: uid, axbType: NODE_RELATION, bType: aType, bUid: uid, data };
195
+ }
196
+ function buildWritableEdgeRecord(aType, aUid, axbType, bType, bUid, data) {
197
+ return { aType, aUid, axbType, bType, bUid, data };
198
+ }
199
+ var GraphBatchImpl = class {
200
+ constructor(backend, registry, scopePath = "") {
201
+ this.backend = backend;
202
+ this.registry = registry;
203
+ this.scopePath = scopePath;
204
+ }
205
+ async putNode(aType, uid, data) {
206
+ if (this.registry) {
207
+ this.registry.validate(aType, NODE_RELATION, aType, data, this.scopePath);
208
+ }
209
+ const docId = computeNodeDocId(uid);
210
+ const record = buildWritableNodeRecord(aType, uid, data);
211
+ if (this.registry) {
212
+ const entry = this.registry.lookup(aType, NODE_RELATION, aType);
213
+ if (entry?.schemaVersion && entry.schemaVersion > 0) {
214
+ record.v = entry.schemaVersion;
215
+ }
216
+ }
217
+ this.backend.setDoc(docId, record);
218
+ }
219
+ async putEdge(aType, aUid, axbType, bType, bUid, data) {
220
+ if (this.registry) {
221
+ this.registry.validate(aType, axbType, bType, data, this.scopePath);
222
+ }
223
+ const docId = computeEdgeDocId(aUid, axbType, bUid);
224
+ const record = buildWritableEdgeRecord(aType, aUid, axbType, bType, bUid, data);
225
+ if (this.registry) {
226
+ const entry = this.registry.lookup(aType, axbType, bType);
227
+ if (entry?.schemaVersion && entry.schemaVersion > 0) {
228
+ record.v = entry.schemaVersion;
229
+ }
230
+ }
231
+ this.backend.setDoc(docId, record);
232
+ }
233
+ async updateNode(uid, data) {
234
+ const docId = computeNodeDocId(uid);
235
+ this.backend.updateDoc(docId, { dataFields: data });
236
+ }
237
+ async removeNode(uid) {
238
+ const docId = computeNodeDocId(uid);
239
+ this.backend.deleteDoc(docId);
240
+ }
241
+ async removeEdge(aUid, axbType, bUid) {
242
+ const docId = computeEdgeDocId(aUid, axbType, bUid);
243
+ this.backend.deleteDoc(docId);
244
+ }
245
+ async commit() {
246
+ await this.backend.commit();
247
+ }
248
+ };
249
+
250
+ // src/dynamic-registry.ts
251
+ var import_node_crypto3 = require("crypto");
252
+
253
+ // src/errors.ts
254
+ var FiregraphError = class extends Error {
255
+ constructor(message, code) {
256
+ super(message);
257
+ this.code = code;
258
+ this.name = "FiregraphError";
259
+ }
260
+ };
261
+ var ValidationError = class extends FiregraphError {
262
+ constructor(message, details) {
263
+ super(message, "VALIDATION_ERROR");
264
+ this.details = details;
265
+ this.name = "ValidationError";
266
+ }
267
+ };
268
+ var RegistryViolationError = class extends FiregraphError {
269
+ constructor(aType, axbType, bType) {
270
+ super(`Unregistered triple: (${aType}) -[${axbType}]-> (${bType})`, "REGISTRY_VIOLATION");
271
+ this.name = "RegistryViolationError";
272
+ }
273
+ };
274
+ var InvalidQueryError = class extends FiregraphError {
275
+ constructor(message) {
276
+ super(message, "INVALID_QUERY");
277
+ this.name = "InvalidQueryError";
278
+ }
279
+ };
280
+ var DynamicRegistryError = class extends FiregraphError {
281
+ constructor(message) {
282
+ super(message, "DYNAMIC_REGISTRY_ERROR");
283
+ this.name = "DynamicRegistryError";
284
+ }
285
+ };
286
+ var QuerySafetyError = class extends FiregraphError {
287
+ constructor(message) {
288
+ super(message, "QUERY_SAFETY");
289
+ this.name = "QuerySafetyError";
290
+ }
291
+ };
292
+ var RegistryScopeError = class extends FiregraphError {
293
+ constructor(aType, axbType, bType, scopePath, allowedIn) {
294
+ super(
295
+ `Type (${aType}) -[${axbType}]-> (${bType}) is not allowed at scope "${scopePath || "root"}". Allowed in: [${allowedIn.join(", ")}]`,
296
+ "REGISTRY_SCOPE"
297
+ );
298
+ this.name = "RegistryScopeError";
299
+ }
300
+ };
301
+ var MigrationError = class extends FiregraphError {
302
+ constructor(message) {
303
+ super(message, "MIGRATION_ERROR");
304
+ this.name = "MigrationError";
305
+ }
306
+ };
307
+
308
+ // src/json-schema.ts
309
+ var import_ajv = __toESM(require("ajv"), 1);
310
+ var import_ajv_formats = __toESM(require("ajv-formats"), 1);
311
+ var ajv = new import_ajv.default({ allErrors: true, strict: false });
312
+ (0, import_ajv_formats.default)(ajv);
313
+ function compileSchema(schema, label) {
314
+ const validate = ajv.compile(schema);
315
+ return (data) => {
316
+ if (!validate(data)) {
317
+ const errors = validate.errors ?? [];
318
+ const messages = errors.map((err) => `${err.instancePath || "/"}${err.message ? ": " + err.message : ""}`).join("; ");
319
+ throw new ValidationError(
320
+ `Data validation failed${label ? " for " + label : ""}: ${messages}`,
321
+ errors
322
+ );
323
+ }
324
+ };
325
+ }
326
+
327
+ // src/migration.ts
328
+ async function applyMigrationChain(data, currentVersion, targetVersion, migrations) {
329
+ const sorted = [...migrations].sort((a, b) => a.fromVersion - b.fromVersion);
330
+ let result = { ...data };
331
+ let version = currentVersion;
332
+ for (const step of sorted) {
333
+ if (step.fromVersion === version) {
334
+ try {
335
+ result = await step.up(result);
336
+ } catch (err) {
337
+ if (err instanceof MigrationError) throw err;
338
+ throw new MigrationError(
339
+ `Migration from v${step.fromVersion} to v${step.toVersion} failed: ${err.message}`
340
+ );
341
+ }
342
+ if (!result || typeof result !== "object") {
343
+ throw new MigrationError(
344
+ `Migration from v${step.fromVersion} to v${step.toVersion} returned invalid data (expected object)`
345
+ );
346
+ }
347
+ version = step.toVersion;
348
+ }
349
+ }
350
+ if (version !== targetVersion) {
351
+ throw new MigrationError(
352
+ `Incomplete migration chain: reached v${version} but target is v${targetVersion}`
353
+ );
354
+ }
355
+ return result;
356
+ }
357
+ function validateMigrationChain(migrations, label) {
358
+ if (migrations.length === 0) return;
359
+ const seen = /* @__PURE__ */ new Set();
360
+ for (const step of migrations) {
361
+ if (step.toVersion <= step.fromVersion) {
362
+ throw new MigrationError(
363
+ `${label}: migration step has toVersion (${step.toVersion}) <= fromVersion (${step.fromVersion})`
364
+ );
365
+ }
366
+ if (seen.has(step.fromVersion)) {
367
+ throw new MigrationError(
368
+ `${label}: duplicate migration step for fromVersion ${step.fromVersion}`
369
+ );
370
+ }
371
+ seen.add(step.fromVersion);
372
+ }
373
+ const sorted = [...migrations].sort((a, b) => a.fromVersion - b.fromVersion);
374
+ const targetVersion = Math.max(...migrations.map((m) => m.toVersion));
375
+ let version = 0;
376
+ for (const step of sorted) {
377
+ if (step.fromVersion === version) {
378
+ version = step.toVersion;
379
+ } else if (step.fromVersion > version) {
380
+ throw new MigrationError(
381
+ `${label}: migration chain has a gap \u2014 no step covers v${version} \u2192 v${step.fromVersion}`
382
+ );
383
+ }
384
+ }
385
+ if (version !== targetVersion) {
386
+ throw new MigrationError(
387
+ `${label}: migration chain does not reach v${targetVersion} (stuck at v${version})`
388
+ );
389
+ }
390
+ }
391
+ async function migrateRecord(record, registry, globalWriteBack = "off") {
392
+ const entry = registry.lookup(record.aType, record.axbType, record.bType);
393
+ if (!entry?.migrations?.length || !entry.schemaVersion) {
394
+ return { record, migrated: false, writeBack: "off" };
395
+ }
396
+ const currentVersion = record.v ?? 0;
397
+ if (currentVersion >= entry.schemaVersion) {
398
+ return { record, migrated: false, writeBack: "off" };
399
+ }
400
+ const migratedData = await applyMigrationChain(
401
+ record.data,
402
+ currentVersion,
403
+ entry.schemaVersion,
404
+ entry.migrations
405
+ );
406
+ const writeBack = entry.migrationWriteBack ?? globalWriteBack ?? "off";
407
+ return {
408
+ record: { ...record, data: migratedData, v: entry.schemaVersion },
409
+ migrated: true,
410
+ writeBack
411
+ };
412
+ }
413
+ async function migrateRecords(records, registry, globalWriteBack = "off") {
414
+ return Promise.all(
415
+ records.map((r) => migrateRecord(r, registry, globalWriteBack))
416
+ );
417
+ }
418
+
419
+ // src/scope.ts
420
+ function matchScope(scopePath, pattern) {
421
+ if (pattern === "root") return scopePath === "";
422
+ if (pattern === "**") return true;
423
+ const pathSegments = scopePath === "" ? [] : scopePath.split("/");
424
+ const patternSegments = pattern.split("/");
425
+ return matchSegments(pathSegments, 0, patternSegments, 0);
426
+ }
427
+ function matchScopeAny(scopePath, patterns) {
428
+ if (!patterns || patterns.length === 0) return true;
429
+ return patterns.some((p) => matchScope(scopePath, p));
430
+ }
431
+ function matchSegments(path, pi, pattern, qi) {
432
+ if (pi === path.length && qi === pattern.length) return true;
433
+ if (qi === pattern.length) return false;
434
+ const seg = pattern[qi];
435
+ if (seg === "**") {
436
+ if (qi === pattern.length - 1) return true;
437
+ for (let skip = 0; skip <= path.length - pi; skip++) {
438
+ if (matchSegments(path, pi + skip, pattern, qi + 1)) return true;
439
+ }
440
+ return false;
441
+ }
442
+ if (pi === path.length) return false;
443
+ if (seg === "*") {
444
+ return matchSegments(path, pi + 1, pattern, qi + 1);
445
+ }
446
+ if (path[pi] === seg) {
447
+ return matchSegments(path, pi + 1, pattern, qi + 1);
448
+ }
449
+ return false;
450
+ }
451
+
452
+ // src/registry.ts
453
+ function tripleKey(aType, axbType, bType) {
454
+ return `${aType}:${axbType}:${bType}`;
455
+ }
456
+ function tripleKeyFor(e) {
457
+ return tripleKey(e.aType, e.axbType, e.bType);
458
+ }
459
+ function createRegistry(input) {
460
+ const map = /* @__PURE__ */ new Map();
461
+ let entries;
462
+ if (Array.isArray(input)) {
463
+ entries = input;
464
+ } else {
465
+ entries = discoveryToEntries(input);
466
+ }
467
+ const entryList = Object.freeze([...entries]);
468
+ for (const entry of entries) {
469
+ if (entry.targetGraph && entry.targetGraph.includes("/")) {
470
+ throw new ValidationError(
471
+ `Entry (${entry.aType}) -[${entry.axbType}]-> (${entry.bType}) has invalid targetGraph "${entry.targetGraph}" \u2014 must be a single segment (no "/")`
472
+ );
473
+ }
474
+ if (entry.migrations?.length) {
475
+ const label = `Entry (${entry.aType}) -[${entry.axbType}]-> (${entry.bType})`;
476
+ validateMigrationChain(entry.migrations, label);
477
+ entry.schemaVersion = Math.max(...entry.migrations.map((m) => m.toVersion));
478
+ } else {
479
+ entry.schemaVersion = void 0;
480
+ }
481
+ const key = tripleKey(entry.aType, entry.axbType, entry.bType);
482
+ const validator = entry.jsonSchema ? compileSchema(entry.jsonSchema, `(${entry.aType}) -[${entry.axbType}]-> (${entry.bType})`) : void 0;
483
+ map.set(key, { entry, validate: validator });
484
+ }
485
+ const axbIndex = /* @__PURE__ */ new Map();
486
+ const axbBuild = /* @__PURE__ */ new Map();
487
+ for (const entry of entries) {
488
+ const existing = axbBuild.get(entry.axbType);
489
+ if (existing) {
490
+ existing.push(entry);
491
+ } else {
492
+ axbBuild.set(entry.axbType, [entry]);
493
+ }
494
+ }
495
+ for (const [key, arr] of axbBuild) {
496
+ axbIndex.set(key, Object.freeze(arr));
497
+ }
498
+ return {
499
+ lookup(aType, axbType, bType) {
500
+ return map.get(tripleKey(aType, axbType, bType))?.entry;
501
+ },
502
+ lookupByAxbType(axbType) {
503
+ return axbIndex.get(axbType) ?? [];
504
+ },
505
+ validate(aType, axbType, bType, data, scopePath) {
506
+ const rec = map.get(tripleKey(aType, axbType, bType));
507
+ if (!rec) {
508
+ throw new RegistryViolationError(aType, axbType, bType);
509
+ }
510
+ if (scopePath !== void 0 && rec.entry.allowedIn && rec.entry.allowedIn.length > 0) {
511
+ if (!matchScopeAny(scopePath, rec.entry.allowedIn)) {
512
+ throw new RegistryScopeError(aType, axbType, bType, scopePath, rec.entry.allowedIn);
513
+ }
514
+ }
515
+ if (rec.validate) {
516
+ try {
517
+ rec.validate(data);
518
+ } catch (err) {
519
+ if (err instanceof ValidationError) throw err;
520
+ throw new ValidationError(
521
+ `Data validation failed for (${aType}) -[${axbType}]-> (${bType})`,
522
+ err
523
+ );
524
+ }
525
+ }
526
+ },
527
+ entries() {
528
+ return entryList;
529
+ }
530
+ };
531
+ }
532
+ function createMergedRegistry(base, extension) {
533
+ const baseKeys = new Set(base.entries().map(tripleKeyFor));
534
+ return {
535
+ lookup(aType, axbType, bType) {
536
+ return base.lookup(aType, axbType, bType) ?? extension.lookup(aType, axbType, bType);
537
+ },
538
+ lookupByAxbType(axbType) {
539
+ const baseResults = base.lookupByAxbType(axbType);
540
+ const extResults = extension.lookupByAxbType(axbType);
541
+ if (extResults.length === 0) return baseResults;
542
+ if (baseResults.length === 0) return extResults;
543
+ const seen = new Set(baseResults.map(tripleKeyFor));
544
+ const merged = [...baseResults];
545
+ for (const entry of extResults) {
546
+ if (!seen.has(tripleKeyFor(entry))) {
547
+ merged.push(entry);
548
+ }
549
+ }
550
+ return Object.freeze(merged);
551
+ },
552
+ validate(aType, axbType, bType, data, scopePath) {
553
+ if (baseKeys.has(tripleKey(aType, axbType, bType))) {
554
+ return base.validate(aType, axbType, bType, data, scopePath);
555
+ }
556
+ return extension.validate(aType, axbType, bType, data, scopePath);
557
+ },
558
+ entries() {
559
+ const extEntries = extension.entries();
560
+ if (extEntries.length === 0) return base.entries();
561
+ const merged = [...base.entries()];
562
+ for (const entry of extEntries) {
563
+ if (!baseKeys.has(tripleKeyFor(entry))) {
564
+ merged.push(entry);
565
+ }
566
+ }
567
+ return Object.freeze(merged);
568
+ }
569
+ };
570
+ }
571
+ function discoveryToEntries(discovery) {
572
+ const entries = [];
573
+ for (const [name, entity] of discovery.nodes) {
574
+ entries.push({
575
+ aType: name,
576
+ axbType: NODE_RELATION,
577
+ bType: name,
578
+ jsonSchema: entity.schema,
579
+ description: entity.description,
580
+ titleField: entity.titleField,
581
+ subtitleField: entity.subtitleField,
582
+ allowedIn: entity.allowedIn,
583
+ migrations: entity.migrations,
584
+ migrationWriteBack: entity.migrationWriteBack
585
+ });
586
+ }
587
+ for (const [axbType, entity] of discovery.edges) {
588
+ const topology = entity.topology;
589
+ if (!topology) continue;
590
+ const fromTypes = Array.isArray(topology.from) ? topology.from : [topology.from];
591
+ const toTypes = Array.isArray(topology.to) ? topology.to : [topology.to];
592
+ const resolvedTargetGraph = entity.targetGraph ?? topology.targetGraph;
593
+ if (resolvedTargetGraph && resolvedTargetGraph.includes("/")) {
594
+ throw new ValidationError(
595
+ `Edge "${axbType}" has invalid targetGraph "${resolvedTargetGraph}" \u2014 must be a single segment (no "/")`
596
+ );
597
+ }
598
+ for (const aType of fromTypes) {
599
+ for (const bType of toTypes) {
600
+ entries.push({
601
+ aType,
602
+ axbType,
603
+ bType,
604
+ jsonSchema: entity.schema,
605
+ description: entity.description,
606
+ inverseLabel: topology.inverseLabel,
607
+ titleField: entity.titleField,
608
+ subtitleField: entity.subtitleField,
609
+ allowedIn: entity.allowedIn,
610
+ targetGraph: resolvedTargetGraph,
611
+ migrations: entity.migrations,
612
+ migrationWriteBack: entity.migrationWriteBack
613
+ });
614
+ }
615
+ }
616
+ }
617
+ return entries;
618
+ }
619
+
620
+ // src/sandbox.ts
621
+ var import_node_crypto2 = require("crypto");
622
+ var import_meta = {};
623
+ var _worker = null;
624
+ var _requestId = 0;
625
+ var _pending = /* @__PURE__ */ new Map();
626
+ var WORKER_SOURCE = [
627
+ `'use strict';`,
628
+ `var _wt = require('node:worker_threads');`,
629
+ `var _mod = require('node:module');`,
630
+ `var _crypto = require('node:crypto');`,
631
+ `var parentPort = _wt.parentPort;`,
632
+ `var workerData = _wt.workerData;`,
633
+ ``,
634
+ `// Load SES using the parent module's resolution context`,
635
+ `var esmRequire = _mod.createRequire(workerData.parentUrl);`,
636
+ `esmRequire('ses');`,
637
+ ``,
638
+ `lockdown({`,
639
+ ` errorTaming: 'unsafe',`,
640
+ ` consoleTaming: 'unsafe',`,
641
+ ` evalTaming: 'safe-eval',`,
642
+ ` overrideTaming: 'moderate',`,
643
+ ` stackFiltering: 'verbose'`,
644
+ `});`,
645
+ ``,
646
+ `// Defense-in-depth: verify lockdown() actually hardened JSON.`,
647
+ `if (!Object.isFrozen(JSON)) {`,
648
+ ` throw new Error('SES lockdown failed: JSON is not frozen');`,
649
+ `}`,
650
+ ``,
651
+ `var cache = new Map();`,
652
+ ``,
653
+ `function hashSource(s) {`,
654
+ ` return _crypto.createHash('sha256').update(s).digest('hex');`,
655
+ `}`,
656
+ ``,
657
+ `function buildWrapper(source) {`,
658
+ ` return '(function() {' +`,
659
+ ` ' var fn = (' + source + ');\\n' +`,
660
+ ` ' if (typeof fn !== "function") return null;\\n' +`,
661
+ ` ' return function(jsonIn) {\\n' +`,
662
+ ` ' var data = JSON.parse(jsonIn);\\n' +`,
663
+ ` ' var result = fn(data);\\n' +`,
664
+ ` ' if (result !== null && typeof result === "object" && typeof result.then === "function") {\\n' +`,
665
+ ` ' return result.then(function(r) { return JSON.stringify(r); });\\n' +`,
666
+ ` ' }\\n' +`,
667
+ ` ' return JSON.stringify(result);\\n' +`,
668
+ ` ' };\\n' +`,
669
+ ` '})()';`,
670
+ `}`,
671
+ ``,
672
+ `function compileSource(source) {`,
673
+ ` var key = hashSource(source);`,
674
+ ` var cached = cache.get(key);`,
675
+ ` if (cached) return cached;`,
676
+ ``,
677
+ ` var compartmentFn;`,
678
+ ` try {`,
679
+ ` var c = new Compartment({ JSON: JSON });`,
680
+ ` compartmentFn = c.evaluate(buildWrapper(source));`,
681
+ ` } catch (err) {`,
682
+ ` throw new Error('Failed to compile migration source: ' + (err.message || String(err)));`,
683
+ ` }`,
684
+ ``,
685
+ ` if (typeof compartmentFn !== 'function') {`,
686
+ ` throw new Error('Migration source did not produce a function: ' + source.slice(0, 80));`,
687
+ ` }`,
688
+ ``,
689
+ ` cache.set(key, compartmentFn);`,
690
+ ` return compartmentFn;`,
691
+ `}`,
692
+ ``,
693
+ `parentPort.on('message', function(msg) {`,
694
+ ` var id = msg.id;`,
695
+ ` try {`,
696
+ ` if (msg.type === 'compile') {`,
697
+ ` compileSource(msg.source);`,
698
+ ` parentPort.postMessage({ id: id, type: 'compiled' });`,
699
+ ` return;`,
700
+ ` }`,
701
+ ` if (msg.type === 'execute') {`,
702
+ ` var fn = compileSource(msg.source);`,
703
+ ` var raw;`,
704
+ ` try {`,
705
+ ` raw = fn(msg.jsonData);`,
706
+ ` } catch (err) {`,
707
+ ` parentPort.postMessage({ id: id, type: 'error', message: 'Migration function threw: ' + (err.message || String(err)) });`,
708
+ ` return;`,
709
+ ` }`,
710
+ ` if (raw !== null && typeof raw === 'object' && typeof raw.then === 'function') {`,
711
+ ` raw.then(`,
712
+ ` function(jsonResult) {`,
713
+ ` if (jsonResult === undefined || jsonResult === null) {`,
714
+ ` parentPort.postMessage({ id: id, type: 'error', message: 'Migration returned a non-JSON-serializable value' });`,
715
+ ` } else {`,
716
+ ` parentPort.postMessage({ id: id, type: 'result', jsonResult: jsonResult });`,
717
+ ` }`,
718
+ ` },`,
719
+ ` function(err) {`,
720
+ ` parentPort.postMessage({ id: id, type: 'error', message: 'Async migration function threw: ' + (err.message || String(err)) });`,
721
+ ` }`,
722
+ ` );`,
723
+ ` return;`,
724
+ ` }`,
725
+ ` if (raw === undefined || raw === null) {`,
726
+ ` parentPort.postMessage({ id: id, type: 'error', message: 'Migration returned a non-JSON-serializable value' });`,
727
+ ` } else {`,
728
+ ` parentPort.postMessage({ id: id, type: 'result', jsonResult: raw });`,
729
+ ` }`,
730
+ ` }`,
731
+ ` } catch (err) {`,
732
+ ` parentPort.postMessage({ id: id, type: 'error', message: err.message || String(err) });`,
733
+ ` }`,
734
+ `});`
735
+ ].join("\n");
736
+ var _WorkerCtor = null;
737
+ async function loadWorkerCtor() {
738
+ if (_WorkerCtor) return _WorkerCtor;
739
+ const wt = await import("worker_threads");
740
+ _WorkerCtor = wt.Worker;
741
+ return _WorkerCtor;
742
+ }
743
+ async function ensureWorker() {
744
+ if (_worker) return _worker;
745
+ const Ctor = await loadWorkerCtor();
746
+ _worker = new Ctor(WORKER_SOURCE, {
747
+ eval: true,
748
+ workerData: { parentUrl: import_meta.url }
749
+ });
750
+ _worker.unref();
751
+ _worker.on("message", (msg) => {
752
+ if (msg.id === void 0) return;
753
+ const pending = _pending.get(msg.id);
754
+ if (!pending) return;
755
+ _pending.delete(msg.id);
756
+ if (msg.type === "error") {
757
+ pending.reject(new MigrationError(msg.message ?? "Unknown sandbox error"));
758
+ } else {
759
+ pending.resolve(msg);
760
+ }
761
+ });
762
+ _worker.on("error", (err) => {
763
+ for (const [, p] of _pending) {
764
+ p.reject(new MigrationError(`Sandbox worker error: ${err.message}`));
765
+ }
766
+ _pending.clear();
767
+ _worker = null;
768
+ });
769
+ _worker.on("exit", (code) => {
770
+ if (_pending.size > 0) {
771
+ for (const [, p] of _pending) {
772
+ p.reject(new MigrationError(`Sandbox worker exited with code ${code}`));
773
+ }
774
+ _pending.clear();
775
+ }
776
+ _worker = null;
777
+ });
778
+ return _worker;
779
+ }
780
+ async function sendToWorker(msg) {
781
+ const worker = await ensureWorker();
782
+ if (_requestId >= Number.MAX_SAFE_INTEGER) _requestId = 0;
783
+ const id = ++_requestId;
784
+ return new Promise((resolve, reject) => {
785
+ _pending.set(id, { resolve, reject });
786
+ worker.postMessage({ ...msg, id });
787
+ });
788
+ }
789
+ var compiledCache = /* @__PURE__ */ new WeakMap();
790
+ function getExecutorCache(executor) {
791
+ let cache = compiledCache.get(executor);
792
+ if (!cache) {
793
+ cache = /* @__PURE__ */ new Map();
794
+ compiledCache.set(executor, cache);
795
+ }
796
+ return cache;
797
+ }
798
+ function hashSource(source) {
799
+ return (0, import_node_crypto2.createHash)("sha256").update(source).digest("hex");
800
+ }
801
+ var _serializationModule = null;
802
+ async function loadSerialization() {
803
+ if (_serializationModule) return _serializationModule;
804
+ _serializationModule = await Promise.resolve().then(() => (init_serialization(), serialization_exports));
805
+ return _serializationModule;
806
+ }
807
+ function defaultExecutor(source) {
808
+ return (async (data) => {
809
+ const { serializeFirestoreTypes: serializeFirestoreTypes2, deserializeFirestoreTypes: deserializeFirestoreTypes2 } = await loadSerialization();
810
+ const jsonData = JSON.stringify(serializeFirestoreTypes2(data));
811
+ const response = await sendToWorker({ type: "execute", source, jsonData });
812
+ if (response.jsonResult === void 0 || response.jsonResult === null) {
813
+ throw new MigrationError("Migration returned a non-JSON-serializable value");
814
+ }
815
+ try {
816
+ return deserializeFirestoreTypes2(JSON.parse(response.jsonResult));
817
+ } catch {
818
+ throw new MigrationError("Migration returned a non-JSON-serializable value");
819
+ }
820
+ });
821
+ }
822
+ async function precompileSource(source, executor) {
823
+ if (executor && executor !== defaultExecutor) {
824
+ try {
825
+ executor(source);
826
+ } catch (err) {
827
+ if (err instanceof MigrationError) throw err;
828
+ throw new MigrationError(`Failed to compile migration source: ${err.message}`);
829
+ }
830
+ return;
831
+ }
832
+ await sendToWorker({ type: "compile", source });
833
+ }
834
+ function compileMigrationFn(source, executor = defaultExecutor) {
835
+ const cache = getExecutorCache(executor);
836
+ const key = hashSource(source);
837
+ const cached = cache.get(key);
838
+ if (cached) return cached;
839
+ try {
840
+ const fn = executor(source);
841
+ cache.set(key, fn);
842
+ return fn;
843
+ } catch (err) {
844
+ if (err instanceof MigrationError) throw err;
845
+ throw new MigrationError(`Failed to compile migration source: ${err.message}`);
846
+ }
847
+ }
848
+ function compileMigrations(stored, executor) {
849
+ return stored.map((step) => ({
850
+ fromVersion: step.fromVersion,
851
+ toVersion: step.toVersion,
852
+ up: compileMigrationFn(step.up, executor)
853
+ }));
854
+ }
855
+
856
+ // src/dynamic-registry.ts
857
+ var META_NODE_TYPE = "nodeType";
858
+ var META_EDGE_TYPE = "edgeType";
859
+ var STORED_MIGRATION_STEP_SCHEMA = {
860
+ type: "object",
861
+ required: ["fromVersion", "toVersion", "up"],
862
+ properties: {
863
+ fromVersion: { type: "integer", minimum: 0 },
864
+ toVersion: { type: "integer", minimum: 1 },
865
+ up: { type: "string", minLength: 1 }
866
+ },
867
+ additionalProperties: false
868
+ };
869
+ var NODE_TYPE_SCHEMA = {
870
+ type: "object",
871
+ required: ["name", "jsonSchema"],
872
+ properties: {
873
+ name: { type: "string", minLength: 1 },
874
+ jsonSchema: { type: "object" },
875
+ description: { type: "string" },
876
+ titleField: { type: "string" },
877
+ subtitleField: { type: "string" },
878
+ viewTemplate: { type: "string" },
879
+ viewCss: { type: "string" },
880
+ allowedIn: { type: "array", items: { type: "string", minLength: 1 } },
881
+ schemaVersion: { type: "integer", minimum: 0 },
882
+ migrations: { type: "array", items: STORED_MIGRATION_STEP_SCHEMA },
883
+ migrationWriteBack: { type: "string", enum: ["off", "eager", "background"] }
884
+ },
885
+ additionalProperties: false
886
+ };
887
+ var EDGE_TYPE_SCHEMA = {
888
+ type: "object",
889
+ required: ["name", "from", "to"],
890
+ properties: {
891
+ name: { type: "string", minLength: 1 },
892
+ from: {
893
+ oneOf: [
894
+ { type: "string", minLength: 1 },
895
+ { type: "array", items: { type: "string", minLength: 1 }, minItems: 1 }
896
+ ]
897
+ },
898
+ to: {
899
+ oneOf: [
900
+ { type: "string", minLength: 1 },
901
+ { type: "array", items: { type: "string", minLength: 1 }, minItems: 1 }
902
+ ]
903
+ },
904
+ jsonSchema: { type: "object" },
905
+ inverseLabel: { type: "string" },
906
+ description: { type: "string" },
907
+ titleField: { type: "string" },
908
+ subtitleField: { type: "string" },
909
+ viewTemplate: { type: "string" },
910
+ viewCss: { type: "string" },
911
+ allowedIn: { type: "array", items: { type: "string", minLength: 1 } },
912
+ targetGraph: { type: "string", minLength: 1, pattern: "^[^/]+$" },
913
+ schemaVersion: { type: "integer", minimum: 0 },
914
+ migrations: { type: "array", items: STORED_MIGRATION_STEP_SCHEMA },
915
+ migrationWriteBack: { type: "string", enum: ["off", "eager", "background"] }
916
+ },
917
+ additionalProperties: false
918
+ };
919
+ var BOOTSTRAP_ENTRIES = [
920
+ {
921
+ aType: META_NODE_TYPE,
922
+ axbType: NODE_RELATION,
923
+ bType: META_NODE_TYPE,
924
+ jsonSchema: NODE_TYPE_SCHEMA,
925
+ description: "Meta-type: defines a node type"
926
+ },
927
+ {
928
+ aType: META_EDGE_TYPE,
929
+ axbType: NODE_RELATION,
930
+ bType: META_EDGE_TYPE,
931
+ jsonSchema: EDGE_TYPE_SCHEMA,
932
+ description: "Meta-type: defines an edge type"
933
+ }
934
+ ];
935
+ function createBootstrapRegistry() {
936
+ return createRegistry([...BOOTSTRAP_ENTRIES]);
937
+ }
938
+ function generateDeterministicUid(metaType, name) {
939
+ const hash = (0, import_node_crypto3.createHash)("sha256").update(`${metaType}:${name}`).digest("base64url");
940
+ return hash.slice(0, 21);
941
+ }
942
+ async function createRegistryFromGraph(reader, executor) {
943
+ const [nodeTypes, edgeTypes] = await Promise.all([
944
+ reader.findNodes({ aType: META_NODE_TYPE }),
945
+ reader.findNodes({ aType: META_EDGE_TYPE })
946
+ ]);
947
+ const entries = [...BOOTSTRAP_ENTRIES];
948
+ const prevalidations = [];
949
+ for (const record of nodeTypes) {
950
+ const data = record.data;
951
+ if (data.migrations) {
952
+ for (const m of data.migrations) {
953
+ prevalidations.push(precompileSource(m.up, executor));
954
+ }
955
+ }
956
+ }
957
+ for (const record of edgeTypes) {
958
+ const data = record.data;
959
+ if (data.migrations) {
960
+ for (const m of data.migrations) {
961
+ prevalidations.push(precompileSource(m.up, executor));
962
+ }
963
+ }
964
+ }
965
+ await Promise.all(prevalidations);
966
+ for (const record of nodeTypes) {
967
+ const data = record.data;
968
+ entries.push({
969
+ aType: data.name,
970
+ axbType: NODE_RELATION,
971
+ bType: data.name,
972
+ jsonSchema: data.jsonSchema,
973
+ description: data.description,
974
+ titleField: data.titleField,
975
+ subtitleField: data.subtitleField,
976
+ allowedIn: data.allowedIn,
977
+ migrations: data.migrations ? compileMigrations(data.migrations, executor) : void 0,
978
+ migrationWriteBack: data.migrationWriteBack
979
+ });
980
+ }
981
+ for (const record of edgeTypes) {
982
+ const data = record.data;
983
+ const fromTypes = Array.isArray(data.from) ? data.from : [data.from];
984
+ const toTypes = Array.isArray(data.to) ? data.to : [data.to];
985
+ const compiledMigrations = data.migrations ? compileMigrations(data.migrations, executor) : void 0;
986
+ for (const aType of fromTypes) {
987
+ for (const bType of toTypes) {
988
+ entries.push({
989
+ aType,
990
+ axbType: data.name,
991
+ bType,
992
+ jsonSchema: data.jsonSchema,
993
+ description: data.description,
994
+ inverseLabel: data.inverseLabel,
995
+ titleField: data.titleField,
996
+ subtitleField: data.subtitleField,
997
+ allowedIn: data.allowedIn,
998
+ targetGraph: data.targetGraph,
999
+ migrations: compiledMigrations,
1000
+ migrationWriteBack: data.migrationWriteBack
1001
+ });
1002
+ }
1003
+ }
1004
+ }
1005
+ return createRegistry(entries);
1006
+ }
1007
+
1008
+ // src/query.ts
1009
+ function buildEdgeQueryPlan(params) {
1010
+ const { aType, aUid, axbType, bType, bUid, limit, orderBy } = params;
1011
+ if (aUid && axbType && bUid && !params.where?.length) {
1012
+ return { strategy: "get", docId: computeEdgeDocId(aUid, axbType, bUid) };
1013
+ }
1014
+ const filters = [];
1015
+ if (aType) filters.push({ field: "aType", op: "==", value: aType });
1016
+ if (aUid) filters.push({ field: "aUid", op: "==", value: aUid });
1017
+ if (axbType) filters.push({ field: "axbType", op: "==", value: axbType });
1018
+ if (bType) filters.push({ field: "bType", op: "==", value: bType });
1019
+ if (bUid) filters.push({ field: "bUid", op: "==", value: bUid });
1020
+ if (params.where) {
1021
+ for (const clause of params.where) {
1022
+ const field = BUILTIN_FIELDS.has(clause.field) ? clause.field : clause.field.startsWith("data.") ? clause.field : `data.${clause.field}`;
1023
+ filters.push({ field, op: clause.op, value: clause.value });
1024
+ }
1025
+ }
1026
+ if (filters.length === 0) {
1027
+ throw new InvalidQueryError("findEdges requires at least one filter parameter");
1028
+ }
1029
+ const effectiveLimit = limit === void 0 ? DEFAULT_QUERY_LIMIT : limit || void 0;
1030
+ return { strategy: "query", filters, options: { limit: effectiveLimit, orderBy } };
1031
+ }
1032
+ function buildNodeQueryPlan(params) {
1033
+ const { aType, limit, orderBy } = params;
1034
+ const filters = [
1035
+ { field: "aType", op: "==", value: aType },
1036
+ { field: "axbType", op: "==", value: NODE_RELATION }
1037
+ ];
1038
+ if (params.where) {
1039
+ for (const clause of params.where) {
1040
+ const field = BUILTIN_FIELDS.has(clause.field) ? clause.field : clause.field.startsWith("data.") ? clause.field : `data.${clause.field}`;
1041
+ filters.push({ field, op: clause.op, value: clause.value });
1042
+ }
1043
+ }
1044
+ const effectiveLimit = limit === void 0 ? DEFAULT_QUERY_LIMIT : limit || void 0;
1045
+ return { strategy: "query", filters, options: { limit: effectiveLimit, orderBy } };
1046
+ }
1047
+
1048
+ // src/query-safety.ts
1049
+ var SAFE_INDEX_PATTERNS = [
1050
+ /* @__PURE__ */ new Set(["aUid", "axbType"]),
1051
+ /* @__PURE__ */ new Set(["axbType", "bUid"]),
1052
+ /* @__PURE__ */ new Set(["aType", "axbType"]),
1053
+ /* @__PURE__ */ new Set(["axbType", "bType"])
1054
+ ];
1055
+ function analyzeQuerySafety(filters) {
1056
+ const builtinFieldsPresent = /* @__PURE__ */ new Set();
1057
+ let hasDataFilters = false;
1058
+ for (const f of filters) {
1059
+ if (BUILTIN_FIELDS.has(f.field)) {
1060
+ builtinFieldsPresent.add(f.field);
1061
+ } else {
1062
+ hasDataFilters = true;
1063
+ }
1064
+ }
1065
+ for (const pattern of SAFE_INDEX_PATTERNS) {
1066
+ let matched = true;
1067
+ for (const field of pattern) {
1068
+ if (!builtinFieldsPresent.has(field)) {
1069
+ matched = false;
1070
+ break;
1071
+ }
1072
+ }
1073
+ if (matched) {
1074
+ return { safe: true };
1075
+ }
1076
+ }
1077
+ const presentFields = [...builtinFieldsPresent];
1078
+ if (presentFields.length === 0 && hasDataFilters) {
1079
+ return {
1080
+ safe: false,
1081
+ reason: "Query filters only use data.* fields with no builtin field constraints. This requires a full collection scan. Add aType, aUid, axbType, bType, or bUid filters, or set allowCollectionScan: true."
1082
+ };
1083
+ }
1084
+ if (hasDataFilters) {
1085
+ return {
1086
+ safe: false,
1087
+ reason: `Query filters on [${presentFields.join(", ")}] do not match any indexed pattern. data.* filters without an indexed base require a full collection scan. Safe patterns: (aUid + axbType), (axbType + bUid), (aType + axbType), (axbType + bType). Set allowCollectionScan: true to override.`
1088
+ };
1089
+ }
1090
+ return {
1091
+ safe: false,
1092
+ reason: `Query filters on [${presentFields.join(", ")}] do not match any indexed pattern. This may cause a full collection scan on Firestore Enterprise. Safe patterns: (aUid + axbType), (axbType + bUid), (aType + axbType), (axbType + bType). Set allowCollectionScan: true to override.`
1093
+ };
1094
+ }
1095
+
1096
+ // src/transaction.ts
1097
+ function buildWritableNodeRecord2(aType, uid, data) {
1098
+ return { aType, aUid: uid, axbType: NODE_RELATION, bType: aType, bUid: uid, data };
1099
+ }
1100
+ function buildWritableEdgeRecord2(aType, aUid, axbType, bType, bUid, data) {
1101
+ return { aType, aUid, axbType, bType, bUid, data };
1102
+ }
1103
+ var GraphTransactionImpl = class {
1104
+ constructor(backend, registry, scanProtection = "error", scopePath = "", globalWriteBack = "off") {
1105
+ this.backend = backend;
1106
+ this.registry = registry;
1107
+ this.scanProtection = scanProtection;
1108
+ this.scopePath = scopePath;
1109
+ this.globalWriteBack = globalWriteBack;
1110
+ }
1111
+ async getNode(uid) {
1112
+ const docId = computeNodeDocId(uid);
1113
+ const record = await this.backend.getDoc(docId);
1114
+ if (!record || !this.registry) return record;
1115
+ const result = await migrateRecord(record, this.registry, this.globalWriteBack);
1116
+ if (result.migrated && result.writeBack !== "off") {
1117
+ await this.backend.updateDoc(docId, {
1118
+ replaceData: result.record.data,
1119
+ v: result.record.v
1120
+ });
1121
+ }
1122
+ return result.record;
1123
+ }
1124
+ async getEdge(aUid, axbType, bUid) {
1125
+ const docId = computeEdgeDocId(aUid, axbType, bUid);
1126
+ const record = await this.backend.getDoc(docId);
1127
+ if (!record || !this.registry) return record;
1128
+ const result = await migrateRecord(record, this.registry, this.globalWriteBack);
1129
+ if (result.migrated && result.writeBack !== "off") {
1130
+ await this.backend.updateDoc(docId, {
1131
+ replaceData: result.record.data,
1132
+ v: result.record.v
1133
+ });
1134
+ }
1135
+ return result.record;
1136
+ }
1137
+ async edgeExists(aUid, axbType, bUid) {
1138
+ const docId = computeEdgeDocId(aUid, axbType, bUid);
1139
+ const record = await this.backend.getDoc(docId);
1140
+ return record !== null;
1141
+ }
1142
+ checkQuerySafety(filters, allowCollectionScan) {
1143
+ if (allowCollectionScan || this.scanProtection === "off") return;
1144
+ const result = analyzeQuerySafety(filters);
1145
+ if (result.safe) return;
1146
+ if (this.scanProtection === "error") {
1147
+ throw new QuerySafetyError(result.reason);
1148
+ }
1149
+ console.warn(`[firegraph] Query safety warning: ${result.reason}`);
1150
+ }
1151
+ async findEdges(params) {
1152
+ const plan = buildEdgeQueryPlan(params);
1153
+ let records;
1154
+ if (plan.strategy === "get") {
1155
+ const record = await this.backend.getDoc(plan.docId);
1156
+ records = record ? [record] : [];
1157
+ } else {
1158
+ this.checkQuerySafety(plan.filters, params.allowCollectionScan);
1159
+ records = await this.backend.query(plan.filters, plan.options);
1160
+ }
1161
+ return this.applyMigrations(records);
1162
+ }
1163
+ async findNodes(params) {
1164
+ const plan = buildNodeQueryPlan(params);
1165
+ let records;
1166
+ if (plan.strategy === "get") {
1167
+ const record = await this.backend.getDoc(plan.docId);
1168
+ records = record ? [record] : [];
1169
+ } else {
1170
+ this.checkQuerySafety(plan.filters, params.allowCollectionScan);
1171
+ records = await this.backend.query(plan.filters, plan.options);
1172
+ }
1173
+ return this.applyMigrations(records);
1174
+ }
1175
+ async applyMigrations(records) {
1176
+ if (!this.registry || records.length === 0) return records;
1177
+ const results = await migrateRecords(records, this.registry, this.globalWriteBack);
1178
+ for (const result of results) {
1179
+ if (result.migrated && result.writeBack !== "off") {
1180
+ const docId = result.record.axbType === NODE_RELATION ? computeNodeDocId(result.record.aUid) : computeEdgeDocId(result.record.aUid, result.record.axbType, result.record.bUid);
1181
+ await this.backend.updateDoc(docId, {
1182
+ replaceData: result.record.data,
1183
+ v: result.record.v
1184
+ });
1185
+ }
1186
+ }
1187
+ return results.map((r) => r.record);
1188
+ }
1189
+ async putNode(aType, uid, data) {
1190
+ if (this.registry) {
1191
+ this.registry.validate(aType, NODE_RELATION, aType, data, this.scopePath);
1192
+ }
1193
+ const docId = computeNodeDocId(uid);
1194
+ const record = buildWritableNodeRecord2(aType, uid, data);
1195
+ if (this.registry) {
1196
+ const entry = this.registry.lookup(aType, NODE_RELATION, aType);
1197
+ if (entry?.schemaVersion && entry.schemaVersion > 0) {
1198
+ record.v = entry.schemaVersion;
1199
+ }
1200
+ }
1201
+ await this.backend.setDoc(docId, record);
1202
+ }
1203
+ async putEdge(aType, aUid, axbType, bType, bUid, data) {
1204
+ if (this.registry) {
1205
+ this.registry.validate(aType, axbType, bType, data, this.scopePath);
1206
+ }
1207
+ const docId = computeEdgeDocId(aUid, axbType, bUid);
1208
+ const record = buildWritableEdgeRecord2(aType, aUid, axbType, bType, bUid, data);
1209
+ if (this.registry) {
1210
+ const entry = this.registry.lookup(aType, axbType, bType);
1211
+ if (entry?.schemaVersion && entry.schemaVersion > 0) {
1212
+ record.v = entry.schemaVersion;
1213
+ }
1214
+ }
1215
+ await this.backend.setDoc(docId, record);
1216
+ }
1217
+ async updateNode(uid, data) {
1218
+ const docId = computeNodeDocId(uid);
1219
+ await this.backend.updateDoc(docId, { dataFields: data });
1220
+ }
1221
+ async removeNode(uid) {
1222
+ const docId = computeNodeDocId(uid);
1223
+ await this.backend.deleteDoc(docId);
1224
+ }
1225
+ async removeEdge(aUid, axbType, bUid) {
1226
+ const docId = computeEdgeDocId(aUid, axbType, bUid);
1227
+ await this.backend.deleteDoc(docId);
1228
+ }
1229
+ };
1230
+
1231
+ // src/client.ts
1232
+ var RESERVED_TYPE_NAMES = /* @__PURE__ */ new Set([META_NODE_TYPE, META_EDGE_TYPE]);
1233
+ function buildWritableNodeRecord3(aType, uid, data) {
1234
+ return { aType, aUid: uid, axbType: NODE_RELATION, bType: aType, bUid: uid, data };
1235
+ }
1236
+ function buildWritableEdgeRecord3(aType, aUid, axbType, bType, bUid, data) {
1237
+ return { aType, aUid, axbType, bType, bUid, data };
1238
+ }
1239
+ var GraphClientImpl = class _GraphClientImpl {
1240
+ constructor(backend, options, metaBackend) {
1241
+ this.backend = backend;
1242
+ this.globalWriteBack = options?.migrationWriteBack ?? "off";
1243
+ this.migrationSandbox = options?.migrationSandbox;
1244
+ if (options?.registryMode) {
1245
+ this.dynamicConfig = options.registryMode;
1246
+ this.bootstrapRegistry = createBootstrapRegistry();
1247
+ if (options.registry) {
1248
+ this.staticRegistry = options.registry;
1249
+ }
1250
+ this.metaBackend = metaBackend;
1251
+ } else {
1252
+ this.staticRegistry = options?.registry;
1253
+ }
1254
+ this.scanProtection = options?.scanProtection ?? "error";
1255
+ }
1256
+ scanProtection;
1257
+ // Static mode
1258
+ staticRegistry;
1259
+ // Dynamic mode
1260
+ dynamicConfig;
1261
+ bootstrapRegistry;
1262
+ dynamicRegistry;
1263
+ metaBackend;
1264
+ // Migration settings
1265
+ globalWriteBack;
1266
+ migrationSandbox;
1267
+ // ---------------------------------------------------------------------------
1268
+ // Backend access (exposed for traversal helpers and subgraph cloning)
1269
+ // ---------------------------------------------------------------------------
1270
+ /** @internal */
1271
+ getBackend() {
1272
+ return this.backend;
1273
+ }
1274
+ // ---------------------------------------------------------------------------
1275
+ // Registry routing
1276
+ // ---------------------------------------------------------------------------
1277
+ getRegistryForType(aType) {
1278
+ if (!this.dynamicConfig) return this.staticRegistry;
1279
+ if (aType === META_NODE_TYPE || aType === META_EDGE_TYPE) {
1280
+ return this.bootstrapRegistry;
1281
+ }
1282
+ return this.dynamicRegistry ?? this.staticRegistry ?? this.bootstrapRegistry;
1283
+ }
1284
+ getBackendForType(aType) {
1285
+ if (this.metaBackend && (aType === META_NODE_TYPE || aType === META_EDGE_TYPE)) {
1286
+ return this.metaBackend;
1287
+ }
1288
+ return this.backend;
1289
+ }
1290
+ getCombinedRegistry() {
1291
+ if (!this.dynamicConfig) return this.staticRegistry;
1292
+ return this.dynamicRegistry ?? this.staticRegistry ?? this.bootstrapRegistry;
1293
+ }
1294
+ // ---------------------------------------------------------------------------
1295
+ // Query safety
1296
+ // ---------------------------------------------------------------------------
1297
+ checkQuerySafety(filters, allowCollectionScan) {
1298
+ if (allowCollectionScan || this.scanProtection === "off") return;
1299
+ const result = analyzeQuerySafety(filters);
1300
+ if (result.safe) return;
1301
+ if (this.scanProtection === "error") {
1302
+ throw new QuerySafetyError(result.reason);
1303
+ }
1304
+ console.warn(`[firegraph] Query safety warning: ${result.reason}`);
1305
+ }
1306
+ // ---------------------------------------------------------------------------
1307
+ // Migration helpers
1308
+ // ---------------------------------------------------------------------------
1309
+ async applyMigration(record, docId) {
1310
+ const registry = this.getCombinedRegistry();
1311
+ if (!registry) return record;
1312
+ const result = await migrateRecord(record, registry, this.globalWriteBack);
1313
+ if (result.migrated) {
1314
+ this.handleWriteBack(result, docId);
1315
+ }
1316
+ return result.record;
1317
+ }
1318
+ async applyMigrations(records) {
1319
+ const registry = this.getCombinedRegistry();
1320
+ if (!registry || records.length === 0) return records;
1321
+ const results = await migrateRecords(records, registry, this.globalWriteBack);
1322
+ for (const result of results) {
1323
+ if (result.migrated) {
1324
+ const docId = result.record.axbType === NODE_RELATION ? computeNodeDocId(result.record.aUid) : computeEdgeDocId(result.record.aUid, result.record.axbType, result.record.bUid);
1325
+ this.handleWriteBack(result, docId);
1326
+ }
1327
+ }
1328
+ return results.map((r) => r.record);
1329
+ }
1330
+ /**
1331
+ * Fire-and-forget write-back for a migrated record. Both `'eager'` and
1332
+ * `'background'` are non-blocking; the difference is the log level on
1333
+ * failure. For synchronous write-back, use a transaction — see
1334
+ * `GraphTransactionImpl`.
1335
+ */
1336
+ handleWriteBack(result, docId) {
1337
+ if (result.writeBack === "off") return;
1338
+ const doWriteBack = async () => {
1339
+ try {
1340
+ await this.backend.updateDoc(docId, {
1341
+ replaceData: result.record.data,
1342
+ v: result.record.v
1343
+ });
1344
+ } catch (err) {
1345
+ const msg = `[firegraph] Migration write-back failed for ${docId}: ${err.message}`;
1346
+ if (result.writeBack === "eager") {
1347
+ console.error(msg);
1348
+ } else {
1349
+ console.warn(msg);
1350
+ }
1351
+ }
1352
+ };
1353
+ void doWriteBack();
1354
+ }
1355
+ // ---------------------------------------------------------------------------
1356
+ // GraphReader
1357
+ // ---------------------------------------------------------------------------
1358
+ async getNode(uid) {
1359
+ const docId = computeNodeDocId(uid);
1360
+ const record = await this.backend.getDoc(docId);
1361
+ if (!record) return null;
1362
+ return this.applyMigration(record, docId);
1363
+ }
1364
+ async getEdge(aUid, axbType, bUid) {
1365
+ const docId = computeEdgeDocId(aUid, axbType, bUid);
1366
+ const record = await this.backend.getDoc(docId);
1367
+ if (!record) return null;
1368
+ return this.applyMigration(record, docId);
1369
+ }
1370
+ async edgeExists(aUid, axbType, bUid) {
1371
+ const docId = computeEdgeDocId(aUid, axbType, bUid);
1372
+ const record = await this.backend.getDoc(docId);
1373
+ return record !== null;
1374
+ }
1375
+ async findEdges(params) {
1376
+ const plan = buildEdgeQueryPlan(params);
1377
+ let records;
1378
+ if (plan.strategy === "get") {
1379
+ const record = await this.backend.getDoc(plan.docId);
1380
+ records = record ? [record] : [];
1381
+ } else {
1382
+ this.checkQuerySafety(plan.filters, params.allowCollectionScan);
1383
+ records = await this.backend.query(plan.filters, plan.options);
1384
+ }
1385
+ return this.applyMigrations(records);
1386
+ }
1387
+ async findNodes(params) {
1388
+ const plan = buildNodeQueryPlan(params);
1389
+ let records;
1390
+ if (plan.strategy === "get") {
1391
+ const record = await this.backend.getDoc(plan.docId);
1392
+ records = record ? [record] : [];
1393
+ } else {
1394
+ this.checkQuerySafety(plan.filters, params.allowCollectionScan);
1395
+ records = await this.backend.query(plan.filters, plan.options);
1396
+ }
1397
+ return this.applyMigrations(records);
1398
+ }
1399
+ // ---------------------------------------------------------------------------
1400
+ // GraphWriter
1401
+ // ---------------------------------------------------------------------------
1402
+ async putNode(aType, uid, data) {
1403
+ const registry = this.getRegistryForType(aType);
1404
+ if (registry) {
1405
+ registry.validate(aType, NODE_RELATION, aType, data, this.backend.scopePath);
1406
+ }
1407
+ const backend = this.getBackendForType(aType);
1408
+ const docId = computeNodeDocId(uid);
1409
+ const record = buildWritableNodeRecord3(aType, uid, data);
1410
+ if (registry) {
1411
+ const entry = registry.lookup(aType, NODE_RELATION, aType);
1412
+ if (entry?.schemaVersion && entry.schemaVersion > 0) {
1413
+ record.v = entry.schemaVersion;
1414
+ }
1415
+ }
1416
+ await backend.setDoc(docId, record);
1417
+ }
1418
+ async putEdge(aType, aUid, axbType, bType, bUid, data) {
1419
+ const registry = this.getRegistryForType(aType);
1420
+ if (registry) {
1421
+ registry.validate(aType, axbType, bType, data, this.backend.scopePath);
1422
+ }
1423
+ const backend = this.getBackendForType(aType);
1424
+ const docId = computeEdgeDocId(aUid, axbType, bUid);
1425
+ const record = buildWritableEdgeRecord3(aType, aUid, axbType, bType, bUid, data);
1426
+ if (registry) {
1427
+ const entry = registry.lookup(aType, axbType, bType);
1428
+ if (entry?.schemaVersion && entry.schemaVersion > 0) {
1429
+ record.v = entry.schemaVersion;
1430
+ }
1431
+ }
1432
+ await backend.setDoc(docId, record);
1433
+ }
1434
+ async updateNode(uid, data) {
1435
+ const docId = computeNodeDocId(uid);
1436
+ await this.backend.updateDoc(docId, { dataFields: data });
1437
+ }
1438
+ async removeNode(uid) {
1439
+ const docId = computeNodeDocId(uid);
1440
+ await this.backend.deleteDoc(docId);
1441
+ }
1442
+ async removeEdge(aUid, axbType, bUid) {
1443
+ const docId = computeEdgeDocId(aUid, axbType, bUid);
1444
+ await this.backend.deleteDoc(docId);
1445
+ }
1446
+ // ---------------------------------------------------------------------------
1447
+ // Transactions & Batches
1448
+ // ---------------------------------------------------------------------------
1449
+ async runTransaction(fn) {
1450
+ return this.backend.runTransaction(async (txBackend) => {
1451
+ const graphTx = new GraphTransactionImpl(
1452
+ txBackend,
1453
+ this.getCombinedRegistry(),
1454
+ this.scanProtection,
1455
+ this.backend.scopePath,
1456
+ this.globalWriteBack
1457
+ );
1458
+ return fn(graphTx);
1459
+ });
1460
+ }
1461
+ batch() {
1462
+ return new GraphBatchImpl(
1463
+ this.backend.createBatch(),
1464
+ this.getCombinedRegistry(),
1465
+ this.backend.scopePath
1466
+ );
1467
+ }
1468
+ // ---------------------------------------------------------------------------
1469
+ // Subgraph
1470
+ // ---------------------------------------------------------------------------
1471
+ subgraph(parentNodeUid, name = "graph") {
1472
+ if (!parentNodeUid || parentNodeUid.includes("/")) {
1473
+ throw new FiregraphError(
1474
+ `Invalid parentNodeUid for subgraph: "${parentNodeUid}". Must be a non-empty string without "/".`,
1475
+ "INVALID_SUBGRAPH"
1476
+ );
1477
+ }
1478
+ if (name.includes("/")) {
1479
+ throw new FiregraphError(
1480
+ `Subgraph name must not contain "/": got "${name}". Use chained .subgraph() calls for nested subgraphs.`,
1481
+ "INVALID_SUBGRAPH"
1482
+ );
1483
+ }
1484
+ const childBackend = this.backend.subgraph(parentNodeUid, name);
1485
+ return new _GraphClientImpl(
1486
+ childBackend,
1487
+ {
1488
+ registry: this.getCombinedRegistry(),
1489
+ scanProtection: this.scanProtection,
1490
+ migrationWriteBack: this.globalWriteBack,
1491
+ migrationSandbox: this.migrationSandbox
1492
+ }
1493
+ // Subgraphs do not have meta-backends; meta lives only at the root.
1494
+ );
1495
+ }
1496
+ // ---------------------------------------------------------------------------
1497
+ // Collection group query
1498
+ // ---------------------------------------------------------------------------
1499
+ async findEdgesGlobal(params, collectionName) {
1500
+ if (!this.backend.findEdgesGlobal) {
1501
+ throw new FiregraphError(
1502
+ "findEdgesGlobal() is not supported by the current storage backend.",
1503
+ "UNSUPPORTED_OPERATION"
1504
+ );
1505
+ }
1506
+ const plan = buildEdgeQueryPlan(params);
1507
+ if (plan.strategy === "get") {
1508
+ throw new FiregraphError(
1509
+ "findEdgesGlobal() requires a query, not a direct document lookup. Omit one of aUid/axbType/bUid to force a query strategy.",
1510
+ "INVALID_QUERY"
1511
+ );
1512
+ }
1513
+ this.checkQuerySafety(plan.filters, params.allowCollectionScan);
1514
+ const records = await this.backend.findEdgesGlobal(params, collectionName);
1515
+ return this.applyMigrations(records);
1516
+ }
1517
+ // ---------------------------------------------------------------------------
1518
+ // Bulk operations
1519
+ // ---------------------------------------------------------------------------
1520
+ async removeNodeCascade(uid, options) {
1521
+ return this.backend.removeNodeCascade(uid, this, options);
1522
+ }
1523
+ async bulkRemoveEdges(params, options) {
1524
+ return this.backend.bulkRemoveEdges(params, this, options);
1525
+ }
1526
+ // ---------------------------------------------------------------------------
1527
+ // Dynamic registry methods
1528
+ // ---------------------------------------------------------------------------
1529
+ async defineNodeType(name, jsonSchema, description, options) {
1530
+ if (!this.dynamicConfig) {
1531
+ throw new DynamicRegistryError(
1532
+ 'defineNodeType() is only available in dynamic registry mode. Pass registryMode: { mode: "dynamic" } to createGraphClient().'
1533
+ );
1534
+ }
1535
+ if (RESERVED_TYPE_NAMES.has(name)) {
1536
+ throw new DynamicRegistryError(
1537
+ `Cannot define type "${name}": this name is reserved for the meta-registry.`
1538
+ );
1539
+ }
1540
+ if (this.staticRegistry?.lookup(name, NODE_RELATION, name)) {
1541
+ throw new DynamicRegistryError(
1542
+ `Cannot define node type "${name}": already defined in the static registry.`
1543
+ );
1544
+ }
1545
+ const uid = generateDeterministicUid(META_NODE_TYPE, name);
1546
+ const data = { name, jsonSchema };
1547
+ if (description !== void 0) data.description = description;
1548
+ if (options?.titleField !== void 0) data.titleField = options.titleField;
1549
+ if (options?.subtitleField !== void 0) data.subtitleField = options.subtitleField;
1550
+ if (options?.viewTemplate !== void 0) data.viewTemplate = options.viewTemplate;
1551
+ if (options?.viewCss !== void 0) data.viewCss = options.viewCss;
1552
+ if (options?.allowedIn !== void 0) data.allowedIn = options.allowedIn;
1553
+ if (options?.migrationWriteBack !== void 0)
1554
+ data.migrationWriteBack = options.migrationWriteBack;
1555
+ if (options?.migrations !== void 0) {
1556
+ data.migrations = await this.serializeMigrations(options.migrations);
1557
+ }
1558
+ await this.putNode(META_NODE_TYPE, uid, data);
1559
+ }
1560
+ async defineEdgeType(name, topology, jsonSchema, description, options) {
1561
+ if (!this.dynamicConfig) {
1562
+ throw new DynamicRegistryError(
1563
+ 'defineEdgeType() is only available in dynamic registry mode. Pass registryMode: { mode: "dynamic" } to createGraphClient().'
1564
+ );
1565
+ }
1566
+ if (RESERVED_TYPE_NAMES.has(name)) {
1567
+ throw new DynamicRegistryError(
1568
+ `Cannot define type "${name}": this name is reserved for the meta-registry.`
1569
+ );
1570
+ }
1571
+ if (this.staticRegistry) {
1572
+ const fromTypes = Array.isArray(topology.from) ? topology.from : [topology.from];
1573
+ const toTypes = Array.isArray(topology.to) ? topology.to : [topology.to];
1574
+ for (const aType of fromTypes) {
1575
+ for (const bType of toTypes) {
1576
+ if (this.staticRegistry.lookup(aType, name, bType)) {
1577
+ throw new DynamicRegistryError(
1578
+ `Cannot define edge type "${name}" for (${aType}) -> (${bType}): already defined in the static registry.`
1579
+ );
1580
+ }
1581
+ }
1582
+ }
1583
+ }
1584
+ const uid = generateDeterministicUid(META_EDGE_TYPE, name);
1585
+ const data = {
1586
+ name,
1587
+ from: topology.from,
1588
+ to: topology.to
1589
+ };
1590
+ if (jsonSchema !== void 0) data.jsonSchema = jsonSchema;
1591
+ if (topology.inverseLabel !== void 0) data.inverseLabel = topology.inverseLabel;
1592
+ if (topology.targetGraph !== void 0) data.targetGraph = topology.targetGraph;
1593
+ if (description !== void 0) data.description = description;
1594
+ if (options?.titleField !== void 0) data.titleField = options.titleField;
1595
+ if (options?.subtitleField !== void 0) data.subtitleField = options.subtitleField;
1596
+ if (options?.viewTemplate !== void 0) data.viewTemplate = options.viewTemplate;
1597
+ if (options?.viewCss !== void 0) data.viewCss = options.viewCss;
1598
+ if (options?.allowedIn !== void 0) data.allowedIn = options.allowedIn;
1599
+ if (options?.migrationWriteBack !== void 0)
1600
+ data.migrationWriteBack = options.migrationWriteBack;
1601
+ if (options?.migrations !== void 0) {
1602
+ data.migrations = await this.serializeMigrations(options.migrations);
1603
+ }
1604
+ await this.putNode(META_EDGE_TYPE, uid, data);
1605
+ }
1606
+ async reloadRegistry() {
1607
+ if (!this.dynamicConfig) {
1608
+ throw new DynamicRegistryError(
1609
+ 'reloadRegistry() is only available in dynamic registry mode. Pass registryMode: { mode: "dynamic" } to createGraphClient().'
1610
+ );
1611
+ }
1612
+ const reader = this.createMetaReader();
1613
+ const dynamicOnly = await createRegistryFromGraph(reader, this.migrationSandbox);
1614
+ if (this.staticRegistry) {
1615
+ this.dynamicRegistry = createMergedRegistry(this.staticRegistry, dynamicOnly);
1616
+ } else {
1617
+ this.dynamicRegistry = dynamicOnly;
1618
+ }
1619
+ }
1620
+ async serializeMigrations(migrations) {
1621
+ const result = migrations.map((m) => {
1622
+ const source = typeof m.up === "function" ? m.up.toString() : m.up;
1623
+ return { fromVersion: m.fromVersion, toVersion: m.toVersion, up: source };
1624
+ });
1625
+ await Promise.all(result.map((m) => precompileSource(m.up, this.migrationSandbox)));
1626
+ return result;
1627
+ }
1628
+ /**
1629
+ * Build a `GraphReader` over the meta-backend. If meta lives in the same
1630
+ * collection as the main backend, `this` is returned directly.
1631
+ */
1632
+ createMetaReader() {
1633
+ if (!this.metaBackend) return this;
1634
+ const backend = this.metaBackend;
1635
+ const executeMetaQuery = (filters, options) => backend.query(filters, options);
1636
+ return {
1637
+ async getNode(uid) {
1638
+ return backend.getDoc(computeNodeDocId(uid));
1639
+ },
1640
+ async getEdge(aUid, axbType, bUid) {
1641
+ return backend.getDoc(computeEdgeDocId(aUid, axbType, bUid));
1642
+ },
1643
+ async edgeExists(aUid, axbType, bUid) {
1644
+ const record = await backend.getDoc(computeEdgeDocId(aUid, axbType, bUid));
1645
+ return record !== null;
1646
+ },
1647
+ async findEdges(params) {
1648
+ const plan = buildEdgeQueryPlan(params);
1649
+ if (plan.strategy === "get") {
1650
+ const record = await backend.getDoc(plan.docId);
1651
+ return record ? [record] : [];
1652
+ }
1653
+ return executeMetaQuery(plan.filters, plan.options);
1654
+ },
1655
+ async findNodes(params) {
1656
+ const plan = buildNodeQueryPlan(params);
1657
+ if (plan.strategy === "get") {
1658
+ const record = await backend.getDoc(plan.docId);
1659
+ return record ? [record] : [];
1660
+ }
1661
+ return executeMetaQuery(plan.filters, plan.options);
1662
+ }
1663
+ };
1664
+ }
1665
+ };
1666
+ function createGraphClientFromBackend(backend, options, metaBackend) {
1667
+ return new GraphClientImpl(backend, options, metaBackend);
1668
+ }
1669
+
1670
+ // src/timestamp.ts
1671
+ var GraphTimestampImpl = class _GraphTimestampImpl {
1672
+ constructor(seconds, nanoseconds) {
1673
+ this.seconds = seconds;
1674
+ this.nanoseconds = nanoseconds;
1675
+ }
1676
+ toDate() {
1677
+ return new Date(this.toMillis());
1678
+ }
1679
+ toMillis() {
1680
+ return this.seconds * 1e3 + Math.floor(this.nanoseconds / 1e6);
1681
+ }
1682
+ toJSON() {
1683
+ return { seconds: this.seconds, nanoseconds: this.nanoseconds };
1684
+ }
1685
+ static fromMillis(ms) {
1686
+ const seconds = Math.floor(ms / 1e3);
1687
+ const nanoseconds = (ms - seconds * 1e3) * 1e6;
1688
+ return new _GraphTimestampImpl(seconds, nanoseconds);
1689
+ }
1690
+ static now() {
1691
+ return _GraphTimestampImpl.fromMillis(Date.now());
1692
+ }
1693
+ };
1694
+
1695
+ // src/internal/sqlite-schema.ts
1696
+ var FIELD_TO_COLUMN = {
1697
+ aType: "a_type",
1698
+ aUid: "a_uid",
1699
+ axbType: "axb_type",
1700
+ bType: "b_type",
1701
+ bUid: "b_uid",
1702
+ v: "v",
1703
+ createdAt: "created_at",
1704
+ updatedAt: "updated_at"
1705
+ };
1706
+ function buildSchemaStatements(table) {
1707
+ const t = quoteIdent(table);
1708
+ return [
1709
+ `CREATE TABLE IF NOT EXISTS ${t} (
1710
+ doc_id TEXT NOT NULL,
1711
+ scope TEXT NOT NULL DEFAULT '',
1712
+ a_type TEXT NOT NULL,
1713
+ a_uid TEXT NOT NULL,
1714
+ axb_type TEXT NOT NULL,
1715
+ b_type TEXT NOT NULL,
1716
+ b_uid TEXT NOT NULL,
1717
+ data TEXT NOT NULL,
1718
+ v INTEGER,
1719
+ created_at INTEGER NOT NULL,
1720
+ updated_at INTEGER NOT NULL,
1721
+ PRIMARY KEY (scope, doc_id)
1722
+ )`,
1723
+ `CREATE INDEX IF NOT EXISTS ${quoteIdent(`${table}_idx_scope_a_uid`)} ON ${t}(scope, a_uid)`,
1724
+ `CREATE INDEX IF NOT EXISTS ${quoteIdent(`${table}_idx_scope_b_uid`)} ON ${t}(scope, b_uid)`,
1725
+ `CREATE INDEX IF NOT EXISTS ${quoteIdent(`${table}_idx_scope_axb_type_b_uid`)} ON ${t}(scope, axb_type, b_uid)`,
1726
+ `CREATE INDEX IF NOT EXISTS ${quoteIdent(`${table}_idx_scope_a_type`)} ON ${t}(scope, a_type)`,
1727
+ `CREATE INDEX IF NOT EXISTS ${quoteIdent(`${table}_idx_scope_b_type`)} ON ${t}(scope, b_type)`,
1728
+ `CREATE INDEX IF NOT EXISTS ${quoteIdent(`${table}_idx_doc_id`)} ON ${t}(doc_id)`
1729
+ ];
1730
+ }
1731
+ function quoteIdent(name) {
1732
+ validateTableName(name);
1733
+ return `"${name}"`;
1734
+ }
1735
+ function validateTableName(name) {
1736
+ if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(name)) {
1737
+ throw new Error(`Invalid SQL identifier: ${name}. Must match /^[A-Za-z_][A-Za-z0-9_]*$/.`);
1738
+ }
1739
+ }
1740
+
1741
+ // src/internal/sqlite-sql.ts
1742
+ function compileFieldRef(field) {
1743
+ const column = FIELD_TO_COLUMN[field];
1744
+ if (column) {
1745
+ return { expr: quoteIdent(column) };
1746
+ }
1747
+ if (field.startsWith("data.")) {
1748
+ const key = field.slice(5);
1749
+ validateJsonPathKey(key);
1750
+ return { expr: 'json_extract("data", ?)', pathParam: `$.${key}` };
1751
+ }
1752
+ if (field === "data") {
1753
+ return { expr: 'json_extract("data", ?)', pathParam: "$" };
1754
+ }
1755
+ throw new FiregraphError(`SQLite backend cannot resolve filter field: ${field}`, "INVALID_QUERY");
1756
+ }
1757
+ var FIRESTORE_TYPE_NAMES = /* @__PURE__ */ new Set([
1758
+ "Timestamp",
1759
+ "GeoPoint",
1760
+ "VectorValue",
1761
+ "DocumentReference",
1762
+ "FieldValue"
1763
+ ]);
1764
+ function isFirestoreSpecialType(value) {
1765
+ const ctorName = value.constructor?.name;
1766
+ if (ctorName && FIRESTORE_TYPE_NAMES.has(ctorName)) return ctorName;
1767
+ return null;
1768
+ }
1769
+ function bindValue(value) {
1770
+ if (value === null || value === void 0) return null;
1771
+ if (typeof value === "string" || typeof value === "number" || typeof value === "bigint" || typeof value === "boolean") {
1772
+ return value;
1773
+ }
1774
+ if (value instanceof Date) return value.getTime();
1775
+ if (typeof value === "object") {
1776
+ const firestoreType = isFirestoreSpecialType(value);
1777
+ if (firestoreType) {
1778
+ throw new FiregraphError(
1779
+ `SQLite backend cannot bind a Firestore ${firestoreType} value \u2014 JSON serialization would silently drop fields and the resulting bind would never match a stored row. Convert to a primitive (e.g. \`ts.toMillis()\` for Timestamp) before filtering or updating.`,
1780
+ "INVALID_QUERY"
1781
+ );
1782
+ }
1783
+ return JSON.stringify(value);
1784
+ }
1785
+ return String(value);
1786
+ }
1787
+ var JSON_PATH_KEY_RE = /^[A-Za-z_][A-Za-z0-9_-]*$/;
1788
+ function validateJsonPathKey(key) {
1789
+ if (key.length === 0) {
1790
+ throw new FiregraphError(
1791
+ "SQLite backend: empty JSON path component is not allowed",
1792
+ "INVALID_QUERY"
1793
+ );
1794
+ }
1795
+ if (!JSON_PATH_KEY_RE.test(key)) {
1796
+ throw new FiregraphError(
1797
+ `SQLite backend: data field path component "${key}" is not a safe JSON-path identifier. Allowed pattern: /^[A-Za-z_][A-Za-z0-9_-]*$/. Use replaceData (full-data overwrite) for keys with reserved characters (whitespace, dots, brackets, quotes, etc.).`,
1798
+ "INVALID_QUERY"
1799
+ );
1800
+ }
1801
+ }
1802
+ function compileFilter(filter, params) {
1803
+ const { expr, pathParam } = compileFieldRef(filter.field);
1804
+ if (pathParam !== void 0) params.push(pathParam);
1805
+ switch (filter.op) {
1806
+ case "==":
1807
+ params.push(bindValue(filter.value));
1808
+ return `${expr} = ?`;
1809
+ case "!=":
1810
+ params.push(bindValue(filter.value));
1811
+ return `${expr} != ?`;
1812
+ case "<":
1813
+ params.push(bindValue(filter.value));
1814
+ return `${expr} < ?`;
1815
+ case "<=":
1816
+ params.push(bindValue(filter.value));
1817
+ return `${expr} <= ?`;
1818
+ case ">":
1819
+ params.push(bindValue(filter.value));
1820
+ return `${expr} > ?`;
1821
+ case ">=":
1822
+ params.push(bindValue(filter.value));
1823
+ return `${expr} >= ?`;
1824
+ case "in": {
1825
+ const values = asArray(filter.value, "in");
1826
+ const placeholders = values.map(() => "?").join(", ");
1827
+ for (const v of values) params.push(bindValue(v));
1828
+ return `${expr} IN (${placeholders})`;
1829
+ }
1830
+ case "not-in": {
1831
+ const values = asArray(filter.value, "not-in");
1832
+ const placeholders = values.map(() => "?").join(", ");
1833
+ for (const v of values) params.push(bindValue(v));
1834
+ return `${expr} NOT IN (${placeholders})`;
1835
+ }
1836
+ case "array-contains": {
1837
+ params.push(bindValue(filter.value));
1838
+ return `EXISTS (SELECT 1 FROM json_each(${expr}) WHERE value = ?)`;
1839
+ }
1840
+ case "array-contains-any": {
1841
+ const values = asArray(filter.value, "array-contains-any");
1842
+ const placeholders = values.map(() => "?").join(", ");
1843
+ for (const v of values) params.push(bindValue(v));
1844
+ return `EXISTS (SELECT 1 FROM json_each(${expr}) WHERE value IN (${placeholders}))`;
1845
+ }
1846
+ default:
1847
+ throw new FiregraphError(
1848
+ `SQLite backend does not support filter operator: ${String(filter.op)}`,
1849
+ "INVALID_QUERY"
1850
+ );
1851
+ }
1852
+ }
1853
+ function asArray(value, op) {
1854
+ if (!Array.isArray(value) || value.length === 0) {
1855
+ throw new FiregraphError(`Operator "${op}" requires a non-empty array value`, "INVALID_QUERY");
1856
+ }
1857
+ return value;
1858
+ }
1859
+ function compileOrderBy(options, params) {
1860
+ if (!options?.orderBy) return "";
1861
+ const { field, direction } = options.orderBy;
1862
+ const { expr, pathParam } = compileFieldRef(field);
1863
+ if (pathParam !== void 0) params.push(pathParam);
1864
+ const dir = direction === "desc" ? "DESC" : "ASC";
1865
+ return ` ORDER BY ${expr} ${dir}`;
1866
+ }
1867
+ function compileLimit(options, params) {
1868
+ if (options?.limit === void 0) return "";
1869
+ params.push(options.limit);
1870
+ return ` LIMIT ?`;
1871
+ }
1872
+ function compileSelect(table, scope, filters, options) {
1873
+ const params = [];
1874
+ const conditions = ['"scope" = ?'];
1875
+ params.push(scope);
1876
+ for (const f of filters) {
1877
+ conditions.push(compileFilter(f, params));
1878
+ }
1879
+ let sql = `SELECT * FROM ${quoteIdent(table)} WHERE ${conditions.join(" AND ")}`;
1880
+ const orderClause = compileOrderBy(options, params);
1881
+ sql += orderClause;
1882
+ sql += compileLimit(options, params);
1883
+ return { sql, params };
1884
+ }
1885
+ function compileSelectGlobal(table, filters, options, scopeNameFilter) {
1886
+ if (filters.length === 0) {
1887
+ throw new FiregraphError(
1888
+ "compileSelectGlobal requires at least one filter \u2014 refusing to issue an unbounded SELECT.",
1889
+ "INVALID_QUERY"
1890
+ );
1891
+ }
1892
+ const params = [];
1893
+ const conditions = [];
1894
+ if (scopeNameFilter) {
1895
+ if (scopeNameFilter.isRoot) {
1896
+ conditions.push(`"scope" = ?`);
1897
+ params.push("");
1898
+ } else {
1899
+ conditions.push(`"scope" LIKE ? ESCAPE '\\'`);
1900
+ params.push(`%/${escapeLike(scopeNameFilter.name)}`);
1901
+ }
1902
+ }
1903
+ for (const f of filters) {
1904
+ conditions.push(compileFilter(f, params));
1905
+ }
1906
+ const sql = `SELECT * FROM ${quoteIdent(table)} WHERE ${conditions.join(" AND ")}` + compileOrderBy(options, params) + compileLimit(options, params);
1907
+ return { sql, params };
1908
+ }
1909
+ function compileSelectByDocId(table, scope, docId) {
1910
+ return {
1911
+ sql: `SELECT * FROM ${quoteIdent(table)} WHERE "scope" = ? AND "doc_id" = ? LIMIT 1`,
1912
+ params: [scope, docId]
1913
+ };
1914
+ }
1915
+ function compileSet(table, scope, docId, record, nowMillis) {
1916
+ const sql = `INSERT OR REPLACE INTO ${quoteIdent(table)} (
1917
+ doc_id, scope, a_type, a_uid, axb_type, b_type, b_uid, data, v, created_at, updated_at
1918
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;
1919
+ const params = [
1920
+ docId,
1921
+ scope,
1922
+ record.aType,
1923
+ record.aUid,
1924
+ record.axbType,
1925
+ record.bType,
1926
+ record.bUid,
1927
+ JSON.stringify(record.data ?? {}),
1928
+ record.v ?? null,
1929
+ nowMillis,
1930
+ nowMillis
1931
+ ];
1932
+ return { sql, params };
1933
+ }
1934
+ function compileUpdate(table, scope, docId, update, nowMillis) {
1935
+ const setClauses = [];
1936
+ const params = [];
1937
+ if (update.replaceData) {
1938
+ setClauses.push(`"data" = ?`);
1939
+ params.push(JSON.stringify(update.replaceData));
1940
+ } else if (update.dataFields && Object.keys(update.dataFields).length > 0) {
1941
+ const entries = Object.entries(update.dataFields);
1942
+ const pathArgs = entries.map(() => `?, ?`).join(", ");
1943
+ setClauses.push(`"data" = json_set(COALESCE("data", '{}'), ${pathArgs})`);
1944
+ for (const [k, v] of entries) {
1945
+ validateJsonPathKey(k);
1946
+ params.push(`$.${k}`);
1947
+ params.push(bindValue(v));
1948
+ }
1949
+ }
1950
+ if (update.v !== void 0) {
1951
+ setClauses.push(`"v" = ?`);
1952
+ params.push(update.v);
1953
+ }
1954
+ setClauses.push(`"updated_at" = ?`);
1955
+ params.push(nowMillis);
1956
+ params.push(scope, docId);
1957
+ return {
1958
+ sql: `UPDATE ${quoteIdent(table)} SET ${setClauses.join(", ")} WHERE "scope" = ? AND "doc_id" = ?`,
1959
+ params
1960
+ };
1961
+ }
1962
+ function compileDelete(table, scope, docId) {
1963
+ return {
1964
+ sql: `DELETE FROM ${quoteIdent(table)} WHERE "scope" = ? AND "doc_id" = ?`,
1965
+ params: [scope, docId]
1966
+ };
1967
+ }
1968
+ function compileDeleteScopePrefix(table, scopePrefix) {
1969
+ const escaped = escapeLike(scopePrefix);
1970
+ return {
1971
+ sql: `DELETE FROM ${quoteIdent(table)} WHERE "scope" LIKE ? ESCAPE '\\'`,
1972
+ params: [`${escaped}/%`]
1973
+ };
1974
+ }
1975
+ function compileCountScopePrefix(table, scopePrefix) {
1976
+ const escaped = escapeLike(scopePrefix);
1977
+ return {
1978
+ sql: `SELECT COUNT(*) AS n FROM ${quoteIdent(table)} WHERE "scope" LIKE ? ESCAPE '\\'`,
1979
+ params: [`${escaped}/%`]
1980
+ };
1981
+ }
1982
+ function escapeLike(value) {
1983
+ return value.replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
1984
+ }
1985
+ function rowToRecord(row) {
1986
+ const dataString = row.data;
1987
+ const data = dataString ? JSON.parse(dataString) : {};
1988
+ const createdMs = toMillis(row.created_at);
1989
+ const updatedMs = toMillis(row.updated_at);
1990
+ const record = {
1991
+ aType: row.a_type,
1992
+ aUid: row.a_uid,
1993
+ axbType: row.axb_type,
1994
+ bType: row.b_type,
1995
+ bUid: row.b_uid,
1996
+ data,
1997
+ createdAt: GraphTimestampImpl.fromMillis(createdMs),
1998
+ updatedAt: GraphTimestampImpl.fromMillis(updatedMs)
1999
+ };
2000
+ if (row.v !== null && row.v !== void 0) {
2001
+ record.v = Number(row.v);
2002
+ }
2003
+ return record;
2004
+ }
2005
+ function toMillis(value) {
2006
+ if (typeof value === "number") return value;
2007
+ if (typeof value === "bigint") return Number(value);
2008
+ if (typeof value === "string") return Number(value);
2009
+ return 0;
2010
+ }
2011
+
2012
+ // src/internal/sqlite-backend.ts
2013
+ var DEFAULT_MAX_RETRIES = 3;
2014
+ var BASE_RETRY_DELAY_MS = 200;
2015
+ var MAX_RETRY_DELAY_MS = 5e3;
2016
+ function sleep(ms) {
2017
+ return new Promise((resolve) => setTimeout(resolve, ms));
2018
+ }
2019
+ function minDefined(a, b) {
2020
+ if (a === void 0) return b;
2021
+ if (b === void 0) return a;
2022
+ return Math.min(a, b);
2023
+ }
2024
+ function chunkStatements(statements, maxStatements, maxParams) {
2025
+ const stmtCap = maxStatements && maxStatements > 0 && Number.isFinite(maxStatements) ? Math.floor(maxStatements) : Infinity;
2026
+ const paramCap = maxParams && maxParams > 0 && Number.isFinite(maxParams) ? Math.floor(maxParams) : Infinity;
2027
+ if (stmtCap === Infinity && paramCap === Infinity) {
2028
+ return [statements];
2029
+ }
2030
+ const chunks = [];
2031
+ let current = [];
2032
+ let currentParamCount = 0;
2033
+ for (const stmt of statements) {
2034
+ const stmtParams = stmt.params.length;
2035
+ const wouldExceedStmt = current.length + 1 > stmtCap;
2036
+ const wouldExceedParam = currentParamCount + stmtParams > paramCap;
2037
+ if (current.length > 0 && (wouldExceedStmt || wouldExceedParam)) {
2038
+ chunks.push(current);
2039
+ current = [];
2040
+ currentParamCount = 0;
2041
+ }
2042
+ current.push(stmt);
2043
+ currentParamCount += stmtParams;
2044
+ }
2045
+ if (current.length > 0) chunks.push(current);
2046
+ return chunks;
2047
+ }
2048
+ var SqliteTransactionBackendImpl = class {
2049
+ constructor(tx, tableName, storageScope) {
2050
+ this.tx = tx;
2051
+ this.tableName = tableName;
2052
+ this.storageScope = storageScope;
2053
+ }
2054
+ async getDoc(docId) {
2055
+ const stmt = compileSelectByDocId(this.tableName, this.storageScope, docId);
2056
+ const rows = await this.tx.all(stmt.sql, stmt.params);
2057
+ return rows.length === 0 ? null : rowToRecord(rows[0]);
2058
+ }
2059
+ async query(filters, options) {
2060
+ const stmt = compileSelect(this.tableName, this.storageScope, filters, options);
2061
+ const rows = await this.tx.all(stmt.sql, stmt.params);
2062
+ return rows.map(rowToRecord);
2063
+ }
2064
+ async setDoc(docId, record) {
2065
+ const stmt = compileSet(this.tableName, this.storageScope, docId, record, Date.now());
2066
+ await this.tx.run(stmt.sql, stmt.params);
2067
+ }
2068
+ async updateDoc(docId, update) {
2069
+ const stmt = compileUpdate(this.tableName, this.storageScope, docId, update, Date.now());
2070
+ const sqlWithReturning = `${stmt.sql} RETURNING "doc_id"`;
2071
+ const rows = await this.tx.all(sqlWithReturning, stmt.params);
2072
+ if (rows.length === 0) {
2073
+ throw new FiregraphError(
2074
+ `updateDoc: no document found for doc_id=${docId} (scope=${this.storageScope})`,
2075
+ "NOT_FOUND"
2076
+ );
2077
+ }
2078
+ }
2079
+ async deleteDoc(docId) {
2080
+ const stmt = compileDelete(this.tableName, this.storageScope, docId);
2081
+ await this.tx.run(stmt.sql, stmt.params);
2082
+ }
2083
+ };
2084
+ var SqliteBatchBackendImpl = class {
2085
+ constructor(executor, tableName, storageScope) {
2086
+ this.executor = executor;
2087
+ this.tableName = tableName;
2088
+ this.storageScope = storageScope;
2089
+ }
2090
+ statements = [];
2091
+ setDoc(docId, record) {
2092
+ this.statements.push(compileSet(this.tableName, this.storageScope, docId, record, Date.now()));
2093
+ }
2094
+ updateDoc(docId, update) {
2095
+ this.statements.push(
2096
+ compileUpdate(this.tableName, this.storageScope, docId, update, Date.now())
2097
+ );
2098
+ }
2099
+ deleteDoc(docId) {
2100
+ this.statements.push(compileDelete(this.tableName, this.storageScope, docId));
2101
+ }
2102
+ async commit() {
2103
+ if (this.statements.length === 0) return;
2104
+ await this.executor.batch(this.statements);
2105
+ this.statements.length = 0;
2106
+ }
2107
+ };
2108
+ var SqliteBackendImpl = class _SqliteBackendImpl {
2109
+ constructor(executor, tableName, storageScope, scopePath) {
2110
+ this.executor = executor;
2111
+ this.collectionPath = tableName;
2112
+ this.storageScope = storageScope;
2113
+ this.scopePath = scopePath;
2114
+ }
2115
+ /** Logical table name (returned through `collectionPath` for parity with Firestore). */
2116
+ collectionPath;
2117
+ scopePath;
2118
+ /** Materialized storage scope (interleaved parent UIDs + subgraph names). */
2119
+ storageScope;
2120
+ // --- Reads ---
2121
+ async getDoc(docId) {
2122
+ const stmt = compileSelectByDocId(this.collectionPath, this.storageScope, docId);
2123
+ const rows = await this.executor.all(stmt.sql, stmt.params);
2124
+ return rows.length === 0 ? null : rowToRecord(rows[0]);
2125
+ }
2126
+ async query(filters, options) {
2127
+ const stmt = compileSelect(this.collectionPath, this.storageScope, filters, options);
2128
+ const rows = await this.executor.all(stmt.sql, stmt.params);
2129
+ return rows.map(rowToRecord);
2130
+ }
2131
+ // --- Writes ---
2132
+ async setDoc(docId, record) {
2133
+ const stmt = compileSet(this.collectionPath, this.storageScope, docId, record, Date.now());
2134
+ await this.executor.run(stmt.sql, stmt.params);
2135
+ }
2136
+ async updateDoc(docId, update) {
2137
+ const stmt = compileUpdate(this.collectionPath, this.storageScope, docId, update, Date.now());
2138
+ const sqlWithReturning = `${stmt.sql} RETURNING "doc_id"`;
2139
+ const rows = await this.executor.all(sqlWithReturning, stmt.params);
2140
+ if (rows.length === 0) {
2141
+ throw new FiregraphError(
2142
+ `updateDoc: no document found for doc_id=${docId} (scope=${this.storageScope})`,
2143
+ "NOT_FOUND"
2144
+ );
2145
+ }
2146
+ }
2147
+ async deleteDoc(docId) {
2148
+ const stmt = compileDelete(this.collectionPath, this.storageScope, docId);
2149
+ await this.executor.run(stmt.sql, stmt.params);
2150
+ }
2151
+ // --- Transactions / Batches ---
2152
+ async runTransaction(fn) {
2153
+ if (!this.executor.transaction) {
2154
+ throw new FiregraphError(
2155
+ "Interactive transactions are not supported by this SQLite driver. D1 in particular has no read-then-conditional-write transactions; use a Durable Object SQLite client instead, or rewrite the code path as a batch().",
2156
+ "UNSUPPORTED_OPERATION"
2157
+ );
2158
+ }
2159
+ return this.executor.transaction(async (tx) => {
2160
+ const txBackend = new SqliteTransactionBackendImpl(
2161
+ tx,
2162
+ this.collectionPath,
2163
+ this.storageScope
2164
+ );
2165
+ return fn(txBackend);
2166
+ });
2167
+ }
2168
+ createBatch() {
2169
+ return new SqliteBatchBackendImpl(this.executor, this.collectionPath, this.storageScope);
2170
+ }
2171
+ // --- Subgraphs ---
2172
+ subgraph(parentNodeUid, name) {
2173
+ if (!parentNodeUid || parentNodeUid.includes("/")) {
2174
+ throw new FiregraphError(
2175
+ `Invalid parentNodeUid for subgraph: "${parentNodeUid}". Must be a non-empty string without "/".`,
2176
+ "INVALID_SUBGRAPH"
2177
+ );
2178
+ }
2179
+ if (!name || name.includes("/")) {
2180
+ throw new FiregraphError(
2181
+ `Subgraph name must not contain "/" and must be non-empty: got "${name}". Use chained .subgraph() calls for nested subgraphs.`,
2182
+ "INVALID_SUBGRAPH"
2183
+ );
2184
+ }
2185
+ const newStorageScope = this.storageScope ? `${this.storageScope}/${parentNodeUid}/${name}` : `${parentNodeUid}/${name}`;
2186
+ const newScope = this.scopePath ? `${this.scopePath}/${name}` : name;
2187
+ return new _SqliteBackendImpl(this.executor, this.collectionPath, newStorageScope, newScope);
2188
+ }
2189
+ // --- Cascade & bulk ---
2190
+ async removeNodeCascade(uid, reader, options) {
2191
+ const [outgoingRaw, incomingRaw] = await Promise.all([
2192
+ reader.findEdges({ aUid: uid, allowCollectionScan: true, limit: 0 }),
2193
+ reader.findEdges({ bUid: uid, allowCollectionScan: true, limit: 0 })
2194
+ ]);
2195
+ const seen = /* @__PURE__ */ new Set();
2196
+ const edgeDocIds = [];
2197
+ for (const edge of [...outgoingRaw, ...incomingRaw]) {
2198
+ if (edge.axbType === NODE_RELATION) continue;
2199
+ const docId = computeEdgeDocId(edge.aUid, edge.axbType, edge.bUid);
2200
+ if (!seen.has(docId)) {
2201
+ seen.add(docId);
2202
+ edgeDocIds.push(docId);
2203
+ }
2204
+ }
2205
+ const nodeDocId = computeNodeDocId(uid);
2206
+ const shouldDeleteSubgraphs = options?.deleteSubcollections !== false;
2207
+ let subgraphRowCount = 0;
2208
+ if (shouldDeleteSubgraphs) {
2209
+ const prefix = this.storageScope ? `${this.storageScope}/${uid}` : uid;
2210
+ const countStmt = compileCountScopePrefix(this.collectionPath, prefix);
2211
+ const countRows = await this.executor.all(countStmt.sql, countStmt.params);
2212
+ const first = countRows[0];
2213
+ const n = first?.n;
2214
+ subgraphRowCount = typeof n === "bigint" ? Number(n) : Number(n ?? 0);
2215
+ }
2216
+ const writeStatements = edgeDocIds.map(
2217
+ (id) => compileDelete(this.collectionPath, this.storageScope, id)
2218
+ );
2219
+ writeStatements.push(compileDelete(this.collectionPath, this.storageScope, nodeDocId));
2220
+ if (shouldDeleteSubgraphs) {
2221
+ const prefix = this.storageScope ? `${this.storageScope}/${uid}` : uid;
2222
+ writeStatements.push(compileDeleteScopePrefix(this.collectionPath, prefix));
2223
+ }
2224
+ const {
2225
+ deleted: stmtDeleted,
2226
+ batches,
2227
+ errors
2228
+ } = await this.executeChunkedBatches(writeStatements, options);
2229
+ const allOk = errors.length === 0;
2230
+ const edgesDeleted = allOk ? edgeDocIds.length : 0;
2231
+ const nodeDeleted = allOk;
2232
+ const prefixStatementContribution = shouldDeleteSubgraphs && allOk ? 1 : 0;
2233
+ const deleted = stmtDeleted - prefixStatementContribution + (allOk ? subgraphRowCount : 0);
2234
+ return { deleted, batches, errors, edgesDeleted, nodeDeleted };
2235
+ }
2236
+ async bulkRemoveEdges(params, reader, options) {
2237
+ const effectiveParams = params.limit !== void 0 ? { ...params, allowCollectionScan: params.allowCollectionScan ?? true } : { ...params, limit: 0, allowCollectionScan: params.allowCollectionScan ?? true };
2238
+ const edges = await reader.findEdges(effectiveParams);
2239
+ const docIds = edges.map((e) => computeEdgeDocId(e.aUid, e.axbType, e.bUid));
2240
+ if (docIds.length === 0) {
2241
+ return { deleted: 0, batches: 0, errors: [] };
2242
+ }
2243
+ const statements = docIds.map(
2244
+ (id) => compileDelete(this.collectionPath, this.storageScope, id)
2245
+ );
2246
+ return this.executeChunkedBatches(statements, options);
2247
+ }
2248
+ /**
2249
+ * Submit `statements` to the executor as one or more `batch()` calls,
2250
+ * chunking by `executor.maxBatchSize` (e.g. D1's ~100-statement cap).
2251
+ * Drivers that don't advertise a cap submit everything in one batch,
2252
+ * preserving cross-batch atomicity.
2253
+ *
2254
+ * Each chunk is retried with exponential backoff up to `maxRetries`
2255
+ * (default 3) before being recorded in `errors`. The loop continues past
2256
+ * a permanently failed chunk so the caller still gets partial progress
2257
+ * visibility — to halt on first failure, set `maxRetries: 0` and check
2258
+ * `result.errors.length` after the call.
2259
+ *
2260
+ * Returns `BulkResult`-shaped fields. `deleted` reflects only the
2261
+ * statement count of *successfully committed* batches — a prefix-delete
2262
+ * statement contributes 1 to that total even though it may match many
2263
+ * rows; `removeNodeCascade` patches that up with a pre-counted row total.
2264
+ *
2265
+ * **Atomicity caveat (D1):** when chunking kicks in, atomicity is lost
2266
+ * across chunk boundaries — one chunk may commit while a later one fails.
2267
+ * `removeNodeCascade` is idempotent (deleting the same docs again is a
2268
+ * no-op) so a caller can simply retry on partial failure. `bulkRemoveEdges`
2269
+ * is also idempotent for the same reason. DO SQLite leaves `maxBatchSize`
2270
+ * unset, so everything funnels through one atomic `transactionSync` and
2271
+ * this caveat does not apply.
2272
+ */
2273
+ async executeChunkedBatches(statements, options) {
2274
+ if (statements.length === 0) {
2275
+ return { deleted: 0, batches: 0, errors: [] };
2276
+ }
2277
+ const maxRetries = options?.maxRetries ?? DEFAULT_MAX_RETRIES;
2278
+ const callerBatchSize = options?.batchSize;
2279
+ const stmtCap = minDefined(callerBatchSize, this.executor.maxBatchSize);
2280
+ const chunks = chunkStatements(statements, stmtCap, this.executor.maxBatchParams);
2281
+ const errors = [];
2282
+ let deleted = 0;
2283
+ let batches = 0;
2284
+ const totalBatches = chunks.length;
2285
+ const driverParamCap = this.executor.maxBatchParams;
2286
+ for (let batchIndex = 0; batchIndex < chunks.length; batchIndex++) {
2287
+ const chunk = chunks[batchIndex];
2288
+ const isUnretriableOversize = chunk.length === 1 && driverParamCap !== void 0 && chunk[0].params.length > driverParamCap;
2289
+ let committed = false;
2290
+ let lastError = null;
2291
+ const effectiveRetries = isUnretriableOversize ? 0 : maxRetries;
2292
+ for (let attempt = 0; attempt <= effectiveRetries; attempt++) {
2293
+ try {
2294
+ await this.executor.batch(chunk);
2295
+ committed = true;
2296
+ break;
2297
+ } catch (err) {
2298
+ lastError = err instanceof Error ? err : new Error(String(err));
2299
+ if (attempt < effectiveRetries) {
2300
+ const delay = Math.min(BASE_RETRY_DELAY_MS * Math.pow(2, attempt), MAX_RETRY_DELAY_MS);
2301
+ await sleep(delay);
2302
+ }
2303
+ }
2304
+ }
2305
+ if (committed) {
2306
+ deleted += chunk.length;
2307
+ batches += 1;
2308
+ } else if (lastError) {
2309
+ errors.push({
2310
+ batchIndex,
2311
+ error: lastError,
2312
+ operationCount: chunk.length
2313
+ });
2314
+ }
2315
+ if (options?.onProgress) {
2316
+ options.onProgress({
2317
+ completedBatches: batches,
2318
+ totalBatches,
2319
+ deletedSoFar: deleted
2320
+ });
2321
+ }
2322
+ }
2323
+ return { deleted, batches, errors };
2324
+ }
2325
+ // --- Cross-scope (collection group) ---
2326
+ async findEdgesGlobal(params, collectionName) {
2327
+ const plan = buildEdgeQueryPlan(params);
2328
+ if (plan.strategy === "get") {
2329
+ throw new FiregraphError(
2330
+ "findEdgesGlobal() requires a query, not a direct document lookup. Omit one of aUid/axbType/bUid to force a query strategy.",
2331
+ "INVALID_QUERY"
2332
+ );
2333
+ }
2334
+ const name = collectionName ?? this.collectionPath;
2335
+ const scopeNameFilter = {
2336
+ name,
2337
+ isRoot: name === this.collectionPath
2338
+ };
2339
+ const stmt = compileSelectGlobal(
2340
+ this.collectionPath,
2341
+ plan.filters,
2342
+ plan.options,
2343
+ scopeNameFilter
2344
+ );
2345
+ const rows = await this.executor.all(stmt.sql, stmt.params);
2346
+ return rows.map(rowToRecord);
2347
+ }
2348
+ };
2349
+ function createSqliteBackend(executor, tableName, options = {}) {
2350
+ const storageScope = options.storageScope ?? "";
2351
+ const scopePath = options.scopePath ?? "";
2352
+ return new SqliteBackendImpl(executor, tableName, storageScope, scopePath);
2353
+ }
2354
+
2355
+ // src/do-sqlite.ts
2356
+ var DOSqliteExecutor = class {
2357
+ constructor(storage) {
2358
+ this.storage = storage;
2359
+ }
2360
+ async all(sql, params) {
2361
+ return this.storage.sql.exec(sql, ...params).toArray();
2362
+ }
2363
+ async run(sql, params) {
2364
+ this.storage.sql.exec(sql, ...params).toArray();
2365
+ }
2366
+ async batch(statements) {
2367
+ if (statements.length === 0) return;
2368
+ this.storage.transactionSync(() => {
2369
+ for (const s of statements) {
2370
+ this.storage.sql.exec(s.sql, ...s.params).toArray();
2371
+ }
2372
+ });
2373
+ }
2374
+ async transaction(fn) {
2375
+ this.storage.sql.exec("BEGIN IMMEDIATE").toArray();
2376
+ try {
2377
+ const txExec = {
2378
+ all: async (sql, params) => this.storage.sql.exec(sql, ...params).toArray(),
2379
+ run: async (sql, params) => {
2380
+ this.storage.sql.exec(sql, ...params).toArray();
2381
+ }
2382
+ };
2383
+ const result = await fn(txExec);
2384
+ this.storage.sql.exec("COMMIT").toArray();
2385
+ return result;
2386
+ } catch (err) {
2387
+ this.storage.sql.exec("ROLLBACK").toArray();
2388
+ throw err;
2389
+ }
2390
+ }
2391
+ };
2392
+ function ensureSchema(storage, table) {
2393
+ const statements = buildSchemaStatements(table);
2394
+ for (const sql of statements) {
2395
+ storage.sql.exec(sql).toArray();
2396
+ }
2397
+ }
2398
+ function createDOSqliteGraphClient(storage, options = {}) {
2399
+ const table = options.table ?? "firegraph";
2400
+ validateTableName(table);
2401
+ if (options.autoMigrate !== false) {
2402
+ ensureSchema(storage, table);
2403
+ }
2404
+ const executor = new DOSqliteExecutor(storage);
2405
+ const backend = createSqliteBackend(executor, table);
2406
+ const { table: _t, autoMigrate: _m, ...clientOptions } = options;
2407
+ void _t;
2408
+ void _m;
2409
+ let metaBackend;
2410
+ if (clientOptions.registryMode && typeof clientOptions.registryMode === "object" && clientOptions.registryMode.collection && clientOptions.registryMode.collection !== table) {
2411
+ const metaTable = clientOptions.registryMode.collection;
2412
+ validateTableName(metaTable);
2413
+ if (options.autoMigrate !== false) {
2414
+ ensureSchema(storage, metaTable);
2415
+ }
2416
+ metaBackend = createSqliteBackend(executor, metaTable);
2417
+ }
2418
+ return createGraphClientFromBackend(backend, clientOptions, metaBackend);
2419
+ }
2420
+ // Annotate the CommonJS export names for ESM import in node:
2421
+ 0 && (module.exports = {
2422
+ createDOSqliteGraphClient
2423
+ });
2424
+ //# sourceMappingURL=do-sqlite.cjs.map