@typicalday/firegraph 0.12.0 → 0.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +317 -73
- package/dist/backend-DuvHGgK1.d.cts +1897 -0
- package/dist/backend-DuvHGgK1.d.ts +1897 -0
- package/dist/backend.cjs +222 -3
- package/dist/backend.cjs.map +1 -1
- package/dist/backend.d.cts +25 -5
- package/dist/backend.d.ts +25 -5
- package/dist/backend.js +197 -4
- package/dist/backend.js.map +1 -1
- package/dist/chunk-2DHMNTV6.js +16 -0
- package/dist/chunk-2DHMNTV6.js.map +1 -0
- package/dist/chunk-4MMQ5W74.js +288 -0
- package/dist/chunk-4MMQ5W74.js.map +1 -0
- package/dist/chunk-D4J7Z4FE.js +67 -0
- package/dist/chunk-D4J7Z4FE.js.map +1 -0
- package/dist/chunk-N5HFDWQX.js +23 -0
- package/dist/chunk-N5HFDWQX.js.map +1 -0
- package/dist/chunk-PAD7WFFU.js +573 -0
- package/dist/chunk-PAD7WFFU.js.map +1 -0
- package/dist/{chunk-AWW4MUJ5.js → chunk-TK64DNVK.js} +12 -1
- package/dist/chunk-TK64DNVK.js.map +1 -0
- package/dist/{chunk-HONQY4HF.js → chunk-WRTFC5NG.js} +362 -17
- package/dist/chunk-WRTFC5NG.js.map +1 -0
- package/dist/client-BKi3vk0Q.d.ts +34 -0
- package/dist/client-BrsaXtDV.d.cts +34 -0
- package/dist/cloudflare/index.cjs +930 -3
- package/dist/cloudflare/index.cjs.map +1 -1
- package/dist/cloudflare/index.d.cts +213 -12
- package/dist/cloudflare/index.d.ts +213 -12
- package/dist/cloudflare/index.js +562 -281
- package/dist/cloudflare/index.js.map +1 -1
- package/dist/codegen/index.d.cts +1 -1
- package/dist/codegen/index.d.ts +1 -1
- package/dist/errors-BRc3I_eH.d.cts +73 -0
- package/dist/errors-BRc3I_eH.d.ts +73 -0
- package/dist/firestore-enterprise/index.cjs +3877 -0
- package/dist/firestore-enterprise/index.cjs.map +1 -0
- package/dist/firestore-enterprise/index.d.cts +141 -0
- package/dist/firestore-enterprise/index.d.ts +141 -0
- package/dist/firestore-enterprise/index.js +985 -0
- package/dist/firestore-enterprise/index.js.map +1 -0
- package/dist/firestore-standard/index.cjs +3117 -0
- package/dist/firestore-standard/index.cjs.map +1 -0
- package/dist/firestore-standard/index.d.cts +49 -0
- package/dist/firestore-standard/index.d.ts +49 -0
- package/dist/firestore-standard/index.js +283 -0
- package/dist/firestore-standard/index.js.map +1 -0
- package/dist/index.cjs +590 -550
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +9 -37
- package/dist/index.d.ts +9 -37
- package/dist/index.js +178 -555
- package/dist/index.js.map +1 -1
- package/dist/{registry-Fi074zVa.d.ts → registry-Bc7h6WTM.d.cts} +1 -1
- package/dist/{registry-B1qsVL0E.d.cts → registry-C2KUPVZj.d.ts} +1 -1
- package/dist/{scope-path-B1G3YiA7.d.cts → scope-path-CROFZGr9.d.cts} +1 -56
- package/dist/{scope-path-B1G3YiA7.d.ts → scope-path-CROFZGr9.d.ts} +1 -56
- package/dist/sqlite/index.cjs +3631 -0
- package/dist/sqlite/index.cjs.map +1 -0
- package/dist/sqlite/index.d.cts +111 -0
- package/dist/sqlite/index.d.ts +111 -0
- package/dist/sqlite/index.js +1164 -0
- package/dist/sqlite/index.js.map +1 -0
- package/package.json +33 -3
- package/dist/backend-BsR0lnFL.d.ts +0 -200
- package/dist/backend-Ct-fLlkG.d.cts +0 -200
- package/dist/chunk-AWW4MUJ5.js.map +0 -1
- package/dist/chunk-HONQY4HF.js.map +0 -1
- package/dist/types-DxYLy8Ol.d.cts +0 -770
- package/dist/types-DxYLy8Ol.d.ts +0 -770
package/dist/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/internal/serialization-tag.ts","../src/serialization.ts","../src/index.ts","../src/docid.ts","../src/internal/constants.ts","../src/internal/write-plan.ts","../src/batch.ts","../src/dynamic-registry.ts","../src/errors.ts","../src/json-schema.ts","../src/migration.ts","../src/scope.ts","../src/registry.ts","../src/sandbox.ts","../src/query.ts","../src/query-safety.ts","../src/transaction.ts","../src/client.ts","../src/codegen/index.ts","../src/config.ts","../src/cross-graph.ts","../src/default-indexes.ts","../src/discover.ts","../src/internal/firestore-backend.ts","../src/bulk.ts","../src/internal/firestore-adapter.ts","../src/internal/pipeline-adapter.ts","../src/firestore.ts","../src/id.ts","../src/indexes.ts","../src/query-client/client.ts","../src/query-client/config.ts","../src/query-client/shaping.ts","../src/record.ts","../src/scope-path.ts","../src/traverse.ts","../src/views.ts"],"sourcesContent":["/**\n * Firegraph serialization tag — split from `src/serialization.ts` so it can\n * be imported from Workers-facing code without dragging in\n * `@google-cloud/firestore`.\n *\n * The full serialization module (with Timestamp/GeoPoint round-tripping)\n * lives one folder up because the sandbox migration pipeline needs it; the\n * write-plan helper only needs to recognise tagged objects to keep them\n * terminal during patch flattening, so it imports just the tag from here.\n */\n\n/** Sentinel key used to tag serialized Firestore types. */\nexport const SERIALIZATION_TAG = '__firegraph_ser__' as const;\n\nconst KNOWN_TYPES = new Set(['Timestamp', 'GeoPoint', 'VectorValue', 'DocumentReference']);\n\n/** Check if a value is a tagged serialized Firestore type. */\nexport function isTaggedValue(value: unknown): boolean {\n if (value === null || typeof value !== 'object') return false;\n const tag = (value as Record<string, unknown>)[SERIALIZATION_TAG];\n return typeof tag === 'string' && KNOWN_TYPES.has(tag);\n}\n","/**\n * Firestore-aware serialization for the sandbox migration pipeline.\n *\n * Firestore documents can contain special types (Timestamp, GeoPoint,\n * VectorValue, DocumentReference) that don't survive plain JSON\n * round-tripping. This module provides tagged serialization: Firestore\n * types are wrapped in tagged plain objects before JSON marshaling and\n * reconstructed after.\n *\n * Only used by the `defaultExecutor` sandbox path. Static migrations\n * (in-memory functions) receive raw Firestore objects directly.\n */\n\nimport type { DocumentReference, Firestore } from '@google-cloud/firestore';\nimport { FieldValue, GeoPoint, Timestamp } from '@google-cloud/firestore';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n// SERIALIZATION_TAG and isTaggedValue live in `internal/serialization-tag.ts`\n// so Workers-facing code (e.g. `src/internal/write-plan.ts` and the\n// `firegraph/cloudflare` bundle) can recognise tagged values without\n// pulling in `@google-cloud/firestore`. Re-exported here so callers that\n// already import from `src/serialization.ts` keep working.\nexport { isTaggedValue, SERIALIZATION_TAG } from './internal/serialization-tag.js';\nimport { isTaggedValue, SERIALIZATION_TAG } from './internal/serialization-tag.js';\n\n// One-time warning for DocumentReference deserialization without db\nlet _docRefWarned = false;\n\n// ---------------------------------------------------------------------------\n// Detection helpers\n// ---------------------------------------------------------------------------\n\nfunction isTimestamp(value: unknown): value is Timestamp {\n return value instanceof Timestamp;\n}\n\nfunction isGeoPoint(value: unknown): value is GeoPoint {\n return value instanceof GeoPoint;\n}\n\nfunction isDocumentReference(value: unknown): value is DocumentReference {\n // Duck-type check: DocumentReference has path (string) and firestore properties\n if (value === null || typeof value !== 'object') return false;\n const v = value as Record<string, unknown>;\n return (\n typeof v.path === 'string' &&\n v.firestore !== undefined &&\n typeof v.id === 'string' &&\n v.constructor?.name === 'DocumentReference'\n );\n}\n\nfunction isVectorValue(value: unknown): boolean {\n if (value === null || typeof value !== 'object') return false;\n const v = value as Record<string, unknown>;\n return (\n v.constructor?.name === 'VectorValue' && Array.isArray((v as Record<string, unknown>)._values)\n );\n}\n\n// ---------------------------------------------------------------------------\n// Serialize\n// ---------------------------------------------------------------------------\n\n/**\n * Recursively walk a data object and replace Firestore types with tagged\n * plain objects suitable for JSON serialization.\n *\n * Returns a new object tree — the input is never mutated.\n */\nexport function serializeFirestoreTypes(data: Record<string, unknown>): Record<string, unknown> {\n return serializeValue(data) as Record<string, unknown>;\n}\n\nfunction serializeValue(value: unknown): unknown {\n // Primitives\n if (value === null || value === undefined) return value;\n if (typeof value !== 'object') return value;\n\n // Firestore types (check before generic object/array)\n if (isTimestamp(value)) {\n return {\n [SERIALIZATION_TAG]: 'Timestamp',\n seconds: value.seconds,\n nanoseconds: value.nanoseconds,\n };\n }\n if (isGeoPoint(value)) {\n return {\n [SERIALIZATION_TAG]: 'GeoPoint',\n latitude: value.latitude,\n longitude: value.longitude,\n };\n }\n if (isDocumentReference(value)) {\n return { [SERIALIZATION_TAG]: 'DocumentReference', path: (value as DocumentReference).path };\n }\n if (isVectorValue(value)) {\n // Prefer toArray() (public API) over _values (private internal property)\n const v = value as Record<string, unknown>;\n const values =\n typeof v.toArray === 'function' ? (v.toArray as () => number[])() : (v._values as number[]);\n return { [SERIALIZATION_TAG]: 'VectorValue', values: [...values] };\n }\n\n // Arrays\n if (Array.isArray(value)) {\n return value.map(serializeValue);\n }\n\n // Plain objects — recurse\n const result: Record<string, unknown> = {};\n for (const key of Object.keys(value as Record<string, unknown>)) {\n result[key] = serializeValue((value as Record<string, unknown>)[key]);\n }\n return result;\n}\n\n// ---------------------------------------------------------------------------\n// Deserialize\n// ---------------------------------------------------------------------------\n\n/**\n * Recursively walk a data object and reconstruct Firestore types from\n * tagged plain objects.\n *\n * @param data - The data to deserialize (typically from JSON.parse)\n * @param db - Optional Firestore instance for DocumentReference reconstruction.\n * If not provided, tagged DocumentReferences are left as-is with a one-time warning.\n *\n * Returns a new object tree — the input is never mutated.\n */\nexport function deserializeFirestoreTypes(\n data: Record<string, unknown>,\n db?: Firestore,\n): Record<string, unknown> {\n return deserializeValue(data, db) as Record<string, unknown>;\n}\n\nfunction deserializeValue(value: unknown, db?: Firestore): unknown {\n if (value === null || value === undefined) return value;\n if (typeof value !== 'object') return value;\n\n // Short-circuit for values that are already real Firestore types.\n // This makes deserializeFirestoreTypes idempotent — safe to call on data\n // that has already been deserialized (e.g., write-back after defaultExecutor\n // already reconstructed types, or static migrations that return raw types).\n if (\n isTimestamp(value) ||\n isGeoPoint(value) ||\n isDocumentReference(value) ||\n isVectorValue(value)\n ) {\n return value;\n }\n\n // Arrays\n if (Array.isArray(value)) {\n return value.map((v) => deserializeValue(v, db));\n }\n\n const obj = value as Record<string, unknown>;\n\n // Check for tagged Firestore type\n if (isTaggedValue(obj)) {\n const tag = obj[SERIALIZATION_TAG] as string;\n\n switch (tag) {\n case 'Timestamp':\n // Validate expected fields before reconstruction\n if (typeof obj.seconds !== 'number' || typeof obj.nanoseconds !== 'number') return obj;\n return new Timestamp(obj.seconds, obj.nanoseconds);\n\n case 'GeoPoint':\n if (typeof obj.latitude !== 'number' || typeof obj.longitude !== 'number') return obj;\n return new GeoPoint(obj.latitude, obj.longitude);\n\n case 'VectorValue':\n if (!Array.isArray(obj.values)) return obj;\n return FieldValue.vector(obj.values as number[]);\n\n case 'DocumentReference':\n if (typeof obj.path !== 'string') return obj;\n if (db) {\n return db.doc(obj.path);\n }\n // No db available — leave as tagged object with one-time warning\n if (!_docRefWarned) {\n _docRefWarned = true;\n console.warn(\n '[firegraph] DocumentReference encountered during migration deserialization ' +\n 'but no Firestore instance available. The reference will remain as a tagged ' +\n 'object with its path. Enable write-back for full reconstruction.',\n );\n }\n return obj;\n\n default:\n // Unknown tag — leave as-is (forward compatibility)\n return obj;\n }\n }\n\n // Plain object — recurse\n const result: Record<string, unknown> = {};\n for (const key of Object.keys(obj)) {\n result[key] = deserializeValue(obj[key], db);\n }\n return result;\n}\n","export { createGraphClientFromBackend } from './client.js';\nexport type { CodegenOptions } from './codegen/index.js';\nexport { generateTypes } from './codegen/index.js';\nexport type {\n FiregraphConfig,\n ViewContext,\n ViewDefaultsConfig,\n ViewResolverConfig,\n} from './config.js';\nexport { defineConfig, resolveView } from './config.js';\nexport { isAncestorUid, resolveAncestorCollection } from './cross-graph.js';\nexport { DEFAULT_CORE_INDEXES } from './default-indexes.js';\nexport type { DiscoverResult, DiscoveryWarning } from './discover.js';\nexport { discoverEntities } from './discover.js';\nexport { DiscoveryError } from './discover.js';\nexport { computeEdgeDocId, computeNodeDocId } from './docid.js';\nexport {\n BOOTSTRAP_ENTRIES,\n createBootstrapRegistry,\n createRegistryFromGraph,\n EDGE_TYPE_SCHEMA,\n generateDeterministicUid,\n META_EDGE_TYPE,\n META_NODE_TYPE,\n NODE_TYPE_SCHEMA,\n} from './dynamic-registry.js';\nexport {\n CrossBackendTransactionError,\n DynamicRegistryError,\n EdgeNotFoundError,\n FiregraphError,\n InvalidQueryError,\n MigrationError,\n NodeNotFoundError,\n QuerySafetyError,\n RegistryScopeError,\n RegistryViolationError,\n TraversalError,\n ValidationError,\n} from './errors.js';\nexport { createGraphClient } from './firestore.js';\nexport { generateId } from './id.js';\nexport type {\n FirestoreIndex,\n FirestoreIndexConfig,\n FirestoreIndexField,\n GenerateIndexOptions,\n} from './indexes.js';\nexport { generateIndexConfig } from './indexes.js';\nexport { DEFAULT_QUERY_LIMIT } from './internal/constants.js';\nexport { deleteField } from './internal/write-plan.js';\nexport type { FieldMeta } from './json-schema.js';\nexport { compileSchema, jsonSchemaToFieldMeta } from './json-schema.js';\nexport type { MigrationResult } from './migration.js';\nexport {\n applyMigrationChain,\n migrateRecord,\n migrateRecords,\n validateMigrationChain,\n} from './migration.js';\nexport { buildEdgeQueryPlan, buildNodeQueryPlan } from './query.js';\nexport type { QueryClientErrorCode, QueryClientOptions } from './query-client/index.js';\nexport { QueryClient, QueryClientError } from './query-client/index.js';\nexport type { QuerySafetyResult } from './query-safety.js';\nexport { analyzeQuerySafety } from './query-safety.js';\nexport { buildEdgeRecord, buildNodeRecord } from './record.js';\nexport { createMergedRegistry, createRegistry } from './registry.js';\nexport {\n compileMigrationFn,\n compileMigrations,\n defaultExecutor,\n destroySandboxWorker,\n precompileSource,\n} from './sandbox.js';\nexport { matchScope, matchScopeAny } from './scope.js';\nexport type { StorageScopeSegment } from './scope-path.js';\nexport {\n appendStorageScope,\n isAncestorScopeUid,\n parseStorageScope,\n resolveAncestorScope,\n} from './scope-path.js';\nexport {\n deserializeFirestoreTypes,\n isTaggedValue,\n SERIALIZATION_TAG,\n serializeFirestoreTypes,\n} from './serialization.js';\nexport { createTraversal } from './traverse.js';\nexport type {\n BulkBatchError,\n BulkOptions,\n BulkProgress,\n BulkResult,\n CascadeResult,\n DefineTypeOptions,\n DiscoveredEntity,\n DiscoveryResult,\n DynamicGraphClient,\n DynamicRegistryConfig,\n EdgeTopology,\n EdgeTypeData,\n FindEdgesParams,\n FindNodesParams,\n GraphBatch,\n GraphClient,\n GraphClientOptions,\n GraphReader,\n GraphRecord,\n GraphRegistry,\n GraphTransaction,\n GraphWriter,\n HopDefinition,\n HopResult,\n IndexFieldSpec,\n IndexSpec,\n MigrationExecutor,\n MigrationFn,\n MigrationStep,\n MigrationWriteBack,\n NodeTypeData,\n QueryFilter,\n QueryMode,\n QueryOptions,\n QueryPlan,\n RegistryEntry,\n ScanProtection,\n StoredGraphRecord,\n StoredMigrationStep,\n TraversalBuilder,\n TraversalOptions,\n TraversalResult,\n WhereClause,\n} from './types.js';\nexport type {\n EntityViewConfig,\n EntityViewMeta,\n ViewComponentClass,\n ViewMeta,\n ViewRegistry,\n ViewRegistryInput,\n} from './views.js';\nexport { defineViews } from './views.js';\n","import { createHash } from 'node:crypto';\n\nimport { SHARD_SEPARATOR } from './internal/constants.js';\n\nexport function computeNodeDocId(uid: string): string {\n return uid;\n}\n\nexport function computeEdgeDocId(aUid: string, axbType: string, bUid: string): string {\n const composite = `${aUid}${SHARD_SEPARATOR}${axbType}${SHARD_SEPARATOR}${bUid}`;\n const hash = createHash('sha256').update(composite).digest('hex');\n const shard = hash[0];\n return `${shard}${SHARD_SEPARATOR}${aUid}${SHARD_SEPARATOR}${axbType}${SHARD_SEPARATOR}${bUid}`;\n}\n","export const NODE_RELATION = 'is';\n\n/**\n * Default result limit applied to findEdges/findNodes queries\n * when no explicit limit is provided. Prevents unbounded result sets\n * that could be expensive on Enterprise Firestore.\n */\nexport const DEFAULT_QUERY_LIMIT = 500;\n\n/**\n * Fields that are part of the firegraph record structure (not user data).\n * Used by the query planner and safety analysis to distinguish builtin\n * fields from data.* fields.\n */\nexport const BUILTIN_FIELDS = new Set([\n 'aType',\n 'aUid',\n 'axbType',\n 'bType',\n 'bUid',\n 'createdAt',\n 'updatedAt',\n]);\n\nexport const SHARD_ALGORITHM = 'sha256';\nexport const SHARD_SEPARATOR = ':';\nexport const SHARD_BUCKETS = 16;\n","/**\n * Write-plan helper — flattens partial-update payloads into a list of\n * deep-path operations every backend can execute identically.\n *\n * Background: firegraph used to ship two write semantics that quietly\n * disagreed about depth.\n * - `putNode`/`putEdge` did a full document replace.\n * - `updateNode`/`updateEdge` did a one-level shallow merge: top-level\n * keys were preserved, but nested objects were replaced wholesale.\n *\n * Both behaviours dropped sibling keys silently. The 0.12 contract is that\n * `put*` and `update*` deep-merge by default (sibling keys at any depth\n * survive); `replace*` is the explicit escape hatch.\n *\n * `flattenPatch` walks a partial-update payload and emits one\n * {@link DataPathOp} per terminal value. Plain objects recurse; arrays,\n * primitives, Firestore special types, and tagged firegraph-serialization\n * objects are terminal (replaced as a unit). `undefined` values are\n * skipped; `null` is preserved as a real `null` write; the\n * {@link DELETE_FIELD} sentinel marks a field for removal.\n *\n * The output is deliberately backend-agnostic. Each backend translates ops\n * into its native dialect:\n * - Firestore: dotted field path → `data.a.b.c` for `update()`.\n * - SQLite / DO SQLite: `json_set(data, '$.a.b.c', ?)` /\n * `json_remove(data, '$.a.b.c')`.\n */\n\nimport { isTaggedValue, SERIALIZATION_TAG } from './serialization-tag.js';\n\n// ---------------------------------------------------------------------------\n// Public sentinel\n// ---------------------------------------------------------------------------\n\n/**\n * Sentinel returned by {@link deleteField}. Treated by all backends as\n * \"remove this field from the stored document\".\n *\n * Equivalent to Firestore's `FieldValue.delete()`, but works for SQLite\n * backends too. Use inside `updateNode`/`updateEdge` payloads.\n */\nexport const DELETE_FIELD: unique symbol = Symbol.for('firegraph.deleteField');\nexport type DeleteSentinel = typeof DELETE_FIELD;\n\n/**\n * Returns the firegraph delete sentinel. Place this anywhere in an\n * `updateNode`/`updateEdge` payload to remove the corresponding field.\n *\n * ```ts\n * await client.updateNode('tour', uid, {\n * attrs: { obsoleteFlag: deleteField() },\n * });\n * ```\n */\nexport function deleteField(): DeleteSentinel {\n return DELETE_FIELD;\n}\n\n/** Type guard for the delete sentinel. */\nexport function isDeleteSentinel(value: unknown): value is DeleteSentinel {\n return value === DELETE_FIELD;\n}\n\n// ---------------------------------------------------------------------------\n// Terminal-detection helpers\n// ---------------------------------------------------------------------------\n\nconst FIRESTORE_TERMINAL_CTOR = new Set([\n 'Timestamp',\n 'GeoPoint',\n 'VectorValue',\n 'DocumentReference',\n 'FieldValue',\n 'NumericIncrementTransform',\n 'ArrayUnionTransform',\n 'ArrayRemoveTransform',\n 'ServerTimestampTransform',\n 'DeleteTransform',\n]);\n\n/**\n * Should this value be written as a single terminal op (no recursion)?\n *\n * Plain JS objects (constructor === Object, or no prototype) are recursed.\n * Everything else — arrays, primitives, class instances, Firestore special\n * types, tagged serialization payloads — is terminal.\n */\nexport function isTerminalValue(value: unknown): boolean {\n if (value === null) return true;\n const t = typeof value;\n if (t !== 'object') return true;\n if (Array.isArray(value)) return true;\n // Tagged serialization payloads carry the SERIALIZATION_TAG sentinel and\n // should be persisted whole — never split into per-field ops.\n if (isTaggedValue(value)) return true;\n const proto = Object.getPrototypeOf(value);\n if (proto === null || proto === Object.prototype) return false;\n // Class instances — Firestore types or anything else exotic.\n const ctor = (value as { constructor?: { name?: string } }).constructor;\n if (ctor && typeof ctor.name === 'string' && FIRESTORE_TERMINAL_CTOR.has(ctor.name)) return true;\n // Unknown class instance: treat as terminal. Recursing into a class\n // instance is almost always wrong (Map, Set, Date, Buffer...).\n return true;\n}\n\n// ---------------------------------------------------------------------------\n// Core type\n// ---------------------------------------------------------------------------\n\n/**\n * Single terminal write operation produced by {@link flattenPatch}.\n *\n * `path` is a non-empty array of plain object keys. `value` is the value to\n * write; ignored when `delete` is `true`. Arrays / primitives / Firestore\n * special types appear here as whole terminal values.\n */\nexport interface DataPathOp {\n path: readonly string[];\n value: unknown;\n delete: boolean;\n}\n\n// ---------------------------------------------------------------------------\n// Path-segment validation\n// ---------------------------------------------------------------------------\n\n/**\n * Object keys that are safe to embed in SQLite `json_set`/`json_remove`\n * paths. The SQLite backend uses an allowlist regex too — keep these in\n * sync (see `JSON_PATH_KEY_RE` in `internal/sqlite-sql.ts` and\n * `cloudflare/sql.ts`).\n *\n * Allows: ASCII letters, digits, `_`, `-`. Must start with a letter or\n * underscore. This rejects keys containing dots, brackets, quotes, or\n * non-ASCII characters that could break path parsing or be used to\n * inject into the path expression.\n */\nconst SAFE_KEY_RE = /^[A-Za-z_][A-Za-z0-9_-]*$/;\n\n/**\n * Mutual-exclusion guard for {@link UpdatePayload}. The two branches of the\n * shape — `dataOps` (deep-merge) and `replaceData` (full replace) — are\n * structurally incompatible: combining them would tell the backend to\n * simultaneously merge AND wipe, and the three backends disagree on which\n * wins. This helper centralises the runtime check so all three backends\n * trip the same error.\n *\n * Imported as a runtime check from `firestore-backend`, `sqlite-sql`, and\n * `cloudflare/sql`. Backend authors implementing the public `StorageBackend`\n * contract should call it too.\n */\nexport function assertUpdatePayloadExclusive(update: {\n dataOps?: unknown;\n replaceData?: unknown;\n}): void {\n if (update.replaceData !== undefined && update.dataOps !== undefined) {\n throw new Error(\n 'firegraph: UpdatePayload cannot specify both `replaceData` and `dataOps`. ' +\n 'Use one or the other — `replaceData` is the migration-write-back form, ' +\n '`dataOps` is the standard partial-update form.',\n );\n }\n}\n\n/**\n * Reject `DELETE_FIELD` sentinels in payloads where field deletion isn't a\n * meaningful operation: full-document replace (`replaceNode`/`replaceEdge`)\n * and the merge-default put surface (`putNode`/`putEdge`).\n *\n * Why both:\n * - In **replace**, the entire `data` field is overwritten. A delete\n * sentinel in that payload either silently disappears (Firestore drops\n * the Symbol during `.set()` serialization) or produces an empty SQLite\n * `json_remove` no-op, depending on backend. Either way the caller's\n * intent — \"remove field X\" — is lost. Use `updateNode` instead.\n * - In **put** (merge mode), behaviour diverges across backends today:\n * SQLite's flattenPatch emits a real delete op, but Firestore's\n * `.set(..., {merge: true})` silently drops the Symbol. Until that's\n * fixed end-to-end, the safest contract is to reject sentinels at the\n * entry point and steer callers to `updateNode`.\n *\n * The walk mirrors `flattenPatch`: plain objects recurse, everything else\n * is terminal. Tagged serialization payloads short-circuit so we don't\n * recurse into the `__firegraph_ser__` envelope.\n */\nexport function assertNoDeleteSentinels(data: unknown, callerLabel: string): void {\n walkForDeleteSentinels(data, [], { kind: 'root' }, ({ path }) => {\n const where = path.length === 0 ? '<root>' : path.map((p) => JSON.stringify(p)).join(' > ');\n throw new Error(\n `firegraph: ${callerLabel} payload contains a deleteField() sentinel at ${where}. ` +\n `deleteField() is only valid inside updateNode/updateEdge — full-data ` +\n `writes (put*, replace*) cannot delete individual fields. Use updateNode ` +\n `with a deleteField() value, or omit the field from the replace payload.`,\n );\n });\n}\n\ntype SentinelParent = { kind: 'root' } | { kind: 'object' } | { kind: 'array'; index: number };\n\nfunction walkForDeleteSentinels(\n node: unknown,\n path: readonly string[],\n parent: SentinelParent,\n visit: (ctx: { path: readonly string[]; parent: SentinelParent }) => void,\n): void {\n if (node === null || node === undefined) return;\n if (isDeleteSentinel(node)) {\n visit({ path, parent });\n return;\n }\n if (typeof node !== 'object') return;\n if (isTaggedValue(node)) return;\n if (Array.isArray(node)) {\n for (let i = 0; i < node.length; i++) {\n walkForDeleteSentinels(node[i], [...path, String(i)], { kind: 'array', index: i }, visit);\n }\n return;\n }\n const proto = Object.getPrototypeOf(node);\n if (proto !== null && proto !== Object.prototype) return;\n const obj = node as Record<string, unknown>;\n for (const key of Object.keys(obj)) {\n walkForDeleteSentinels(obj[key], [...path, key], { kind: 'object' }, visit);\n }\n}\n\n/** Throws if any path segment in the patch is unsafe for SQLite paths. */\nexport function assertSafePath(path: readonly string[]): void {\n for (const seg of path) {\n if (!SAFE_KEY_RE.test(seg)) {\n throw new Error(\n `firegraph: unsafe object key ${JSON.stringify(seg)} at path ${path\n .map((p) => JSON.stringify(p))\n .join(' > ')}. Keys used inside update payloads must match ` +\n `/^[A-Za-z_][A-Za-z0-9_-]*$/ so they can be embedded safely in ` +\n `SQLite JSON paths.`,\n );\n }\n }\n}\n\n// ---------------------------------------------------------------------------\n// flattenPatch\n// ---------------------------------------------------------------------------\n\n/**\n * Flatten a partial-update payload into a list of terminal {@link DataPathOp}s.\n *\n * Rules:\n * - Plain objects (no prototype or `Object.prototype`) recurse — each\n * key becomes another path segment.\n * - Arrays are terminal: writing `{tags: ['a']}` overwrites the whole\n * `tags` array. Element-wise array merging is intentionally NOT\n * supported — it's almost never what callers actually want, and\n * Firestore `arrayUnion`/`arrayRemove` give precise semantics when\n * they are.\n * - `undefined` values are skipped (no op generated). Use\n * {@link deleteField} if you actually want to remove a field.\n * - `null` is preserved verbatim — emits a terminal op with `value: null`.\n * - {@link DELETE_FIELD} produces an op with `delete: true`.\n * - Firestore special types and tagged serialization payloads are terminal.\n * - Class instances are terminal.\n *\n * Throws if any object key on the recursion path is unsafe (see\n * {@link assertSafePath}).\n */\nexport function flattenPatch(data: Record<string, unknown>): DataPathOp[] {\n const ops: DataPathOp[] = [];\n walk(data, [], ops);\n return ops;\n}\n\nfunction assertNoDeleteSentinelsInArrayValue(\n arr: readonly unknown[],\n arrayPath: readonly string[],\n): void {\n walkForDeleteSentinels(arr, arrayPath, { kind: 'root' }, ({ parent }) => {\n const arrayPathStr =\n arrayPath.length === 0 ? '<root>' : arrayPath.map((p) => JSON.stringify(p)).join(' > ');\n if (parent.kind === 'array') {\n throw new Error(\n `firegraph: deleteField() sentinel at index ${parent.index} inside an array at ` +\n `path ${arrayPathStr}. Arrays are ` +\n `terminal in update payloads (replaced as a unit), so the sentinel ` +\n `would be silently dropped by JSON serialization. To remove the ` +\n `field entirely, pass deleteField() in place of the whole array.`,\n );\n }\n throw new Error(\n `firegraph: deleteField() sentinel inside an array element at ` +\n `path ${arrayPathStr}. ` +\n `Arrays are terminal in update payloads — the sentinel would ` +\n `be silently dropped by JSON serialization.`,\n );\n });\n}\n\nfunction walk(node: unknown, path: string[], out: DataPathOp[]): void {\n // Caller guarantees the root is a plain object; this branch only\n // matters for recursion.\n if (node === undefined) return;\n if (isDeleteSentinel(node)) {\n if (path.length === 0) {\n throw new Error('firegraph: deleteField() cannot be the entire update payload.');\n }\n assertSafePath(path);\n out.push({ path: [...path], value: undefined, delete: true });\n return;\n }\n if (isTerminalValue(node)) {\n if (path.length === 0) {\n // `null` / array / primitive at the root is illegal — patches must\n // describe per-key changes.\n throw new Error(\n 'firegraph: update payload must be a plain object. Got ' +\n (node === null ? 'null' : Array.isArray(node) ? 'array' : typeof node) +\n '.',\n );\n }\n // A DELETE_FIELD sentinel embedded inside an array (which is terminal\n // and replaced as a unit) would silently disappear: JSON.stringify drops\n // Symbols, and Firestore's serializer does likewise. Reject loudly so\n // the divergence between \"user wrote a delete\" and \"field stayed put\"\n // can't happen.\n if (Array.isArray(node)) {\n assertNoDeleteSentinelsInArrayValue(node, path);\n }\n assertSafePath(path);\n out.push({ path: [...path], value: node, delete: false });\n return;\n }\n // Plain object: recurse into its own enumerable keys.\n const obj = node as Record<string, unknown>;\n const keys = Object.keys(obj);\n if (keys.length === 0) {\n // Empty object at non-root: emit terminal op so an empty object can\n // be written explicitly when the caller really wants one. Skip at\n // the root — no-op patches should produce no ops.\n if (path.length > 0) {\n assertSafePath(path);\n out.push({ path: [...path], value: {}, delete: false });\n }\n return;\n }\n for (const key of keys) {\n if (key === SERIALIZATION_TAG) {\n const where = path.length === 0 ? '<root>' : path.map((p) => JSON.stringify(p)).join(' > ');\n throw new Error(\n `firegraph: update payload contains a literal \\`${SERIALIZATION_TAG}\\` key at ` +\n `${where}. That key is reserved for firegraph's serialization envelope and ` +\n `cannot appear on a plain object in user data. Use a different field name, ` +\n `or pass a recognized tagged value through replaceNode/replaceEdge instead.`,\n );\n }\n walk(obj[key], [...path, key], out);\n }\n}\n","import { computeEdgeDocId, computeNodeDocId } from './docid.js';\nimport type { BatchBackend, WritableRecord } from './internal/backend.js';\nimport { NODE_RELATION } from './internal/constants.js';\nimport { assertNoDeleteSentinels, flattenPatch } from './internal/write-plan.js';\nimport type { GraphBatch, GraphRegistry } from './types.js';\n\nfunction buildWritableNodeRecord(\n aType: string,\n uid: string,\n data: Record<string, unknown>,\n): WritableRecord {\n return { aType, aUid: uid, axbType: NODE_RELATION, bType: aType, bUid: uid, data };\n}\n\nfunction buildWritableEdgeRecord(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n): WritableRecord {\n return { aType, aUid, axbType, bType, bUid, data };\n}\n\nexport class GraphBatchImpl implements GraphBatch {\n constructor(\n private readonly backend: BatchBackend,\n private readonly registry?: GraphRegistry,\n private readonly scopePath: string = '',\n ) {}\n\n async putNode(aType: string, uid: string, data: Record<string, unknown>): Promise<void> {\n this.writeNode(aType, uid, data, 'merge');\n }\n\n async putEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n this.writeEdge(aType, aUid, axbType, bType, bUid, data, 'merge');\n }\n\n async replaceNode(aType: string, uid: string, data: Record<string, unknown>): Promise<void> {\n this.writeNode(aType, uid, data, 'replace');\n }\n\n async replaceEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n this.writeEdge(aType, aUid, axbType, bType, bUid, data, 'replace');\n }\n\n private writeNode(\n aType: string,\n uid: string,\n data: Record<string, unknown>,\n mode: 'merge' | 'replace',\n ): void {\n assertNoDeleteSentinels(data, mode === 'replace' ? 'replaceNode' : 'putNode');\n if (this.registry) {\n this.registry.validate(aType, NODE_RELATION, aType, data, this.scopePath);\n }\n const docId = computeNodeDocId(uid);\n const record = buildWritableNodeRecord(aType, uid, data);\n if (this.registry) {\n const entry = this.registry.lookup(aType, NODE_RELATION, aType);\n if (entry?.schemaVersion && entry.schemaVersion > 0) {\n record.v = entry.schemaVersion;\n }\n }\n this.backend.setDoc(docId, record, mode);\n }\n\n private writeEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n mode: 'merge' | 'replace',\n ): void {\n assertNoDeleteSentinels(data, mode === 'replace' ? 'replaceEdge' : 'putEdge');\n if (this.registry) {\n this.registry.validate(aType, axbType, bType, data, this.scopePath);\n }\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n const record = buildWritableEdgeRecord(aType, aUid, axbType, bType, bUid, data);\n if (this.registry) {\n const entry = this.registry.lookup(aType, axbType, bType);\n if (entry?.schemaVersion && entry.schemaVersion > 0) {\n record.v = entry.schemaVersion;\n }\n }\n this.backend.setDoc(docId, record, mode);\n }\n\n async updateNode(uid: string, data: Record<string, unknown>): Promise<void> {\n const docId = computeNodeDocId(uid);\n this.backend.updateDoc(docId, { dataOps: flattenPatch(data) });\n }\n\n async updateEdge(\n aUid: string,\n axbType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n this.backend.updateDoc(docId, { dataOps: flattenPatch(data) });\n }\n\n async removeNode(uid: string): Promise<void> {\n const docId = computeNodeDocId(uid);\n this.backend.deleteDoc(docId);\n }\n\n async removeEdge(aUid: string, axbType: string, bUid: string): Promise<void> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n this.backend.deleteDoc(docId);\n }\n\n async commit(): Promise<void> {\n await this.backend.commit();\n }\n}\n","import { createHash } from 'node:crypto';\n\nimport { NODE_RELATION } from './internal/constants.js';\nimport { createRegistry } from './registry.js';\nimport { compileMigrations, precompileSource } from './sandbox.js';\nimport type {\n EdgeTypeData,\n GraphReader,\n GraphRegistry,\n MigrationExecutor,\n NodeTypeData,\n RegistryEntry,\n} from './types.js';\n\n// ---------------------------------------------------------------------------\n// Meta-type constants\n// ---------------------------------------------------------------------------\n\n/** The aType used for node type definition meta-nodes. */\nexport const META_NODE_TYPE = 'nodeType';\n\n/** The aType used for edge type definition meta-nodes. */\nexport const META_EDGE_TYPE = 'edgeType';\n\n// ---------------------------------------------------------------------------\n// JSON Schemas for meta-type data payloads\n// ---------------------------------------------------------------------------\n\n/** JSON Schema for a single stored migration step. */\nconst STORED_MIGRATION_STEP_SCHEMA = {\n type: 'object',\n required: ['fromVersion', 'toVersion', 'up'],\n properties: {\n fromVersion: { type: 'integer', minimum: 0 },\n toVersion: { type: 'integer', minimum: 1 },\n up: { type: 'string', minLength: 1 },\n },\n additionalProperties: false,\n};\n\n/** JSON Schema for the `data` payload of a `nodeType` meta-node. */\nexport const NODE_TYPE_SCHEMA: object = {\n type: 'object',\n required: ['name', 'jsonSchema'],\n properties: {\n name: { type: 'string', minLength: 1 },\n jsonSchema: { type: 'object' },\n description: { type: 'string' },\n titleField: { type: 'string' },\n subtitleField: { type: 'string' },\n viewTemplate: { type: 'string' },\n viewCss: { type: 'string' },\n allowedIn: { type: 'array', items: { type: 'string', minLength: 1 } },\n schemaVersion: { type: 'integer', minimum: 0 },\n migrations: { type: 'array', items: STORED_MIGRATION_STEP_SCHEMA },\n migrationWriteBack: { type: 'string', enum: ['off', 'eager', 'background'] },\n },\n additionalProperties: false,\n};\n\n/** JSON Schema for the `data` payload of an `edgeType` meta-node. */\nexport const EDGE_TYPE_SCHEMA: object = {\n type: 'object',\n required: ['name', 'from', 'to'],\n properties: {\n name: { type: 'string', minLength: 1 },\n from: {\n oneOf: [\n { type: 'string', minLength: 1 },\n { type: 'array', items: { type: 'string', minLength: 1 }, minItems: 1 },\n ],\n },\n to: {\n oneOf: [\n { type: 'string', minLength: 1 },\n { type: 'array', items: { type: 'string', minLength: 1 }, minItems: 1 },\n ],\n },\n jsonSchema: { type: 'object' },\n inverseLabel: { type: 'string' },\n description: { type: 'string' },\n titleField: { type: 'string' },\n subtitleField: { type: 'string' },\n viewTemplate: { type: 'string' },\n viewCss: { type: 'string' },\n allowedIn: { type: 'array', items: { type: 'string', minLength: 1 } },\n targetGraph: { type: 'string', minLength: 1, pattern: '^[^/]+$' },\n schemaVersion: { type: 'integer', minimum: 0 },\n migrations: { type: 'array', items: STORED_MIGRATION_STEP_SCHEMA },\n migrationWriteBack: { type: 'string', enum: ['off', 'eager', 'background'] },\n },\n additionalProperties: false,\n};\n\n// ---------------------------------------------------------------------------\n// Bootstrap registry\n// ---------------------------------------------------------------------------\n\n/** Registry entries for the two meta-types (always present). */\nexport const BOOTSTRAP_ENTRIES: readonly RegistryEntry[] = [\n {\n aType: META_NODE_TYPE,\n axbType: NODE_RELATION,\n bType: META_NODE_TYPE,\n jsonSchema: NODE_TYPE_SCHEMA,\n description: 'Meta-type: defines a node type',\n },\n {\n aType: META_EDGE_TYPE,\n axbType: NODE_RELATION,\n bType: META_EDGE_TYPE,\n jsonSchema: EDGE_TYPE_SCHEMA,\n description: 'Meta-type: defines an edge type',\n },\n];\n\n/**\n * Build the bootstrap registry that validates meta-type writes.\n * This is always available, even before any dynamic types are loaded.\n *\n * Memoized at module scope: `BOOTSTRAP_ENTRIES` is a `readonly` array\n * of module-level constants and `createRegistry` is pure over them, so\n * the resulting registry — including its compiled cfworker\n * `Validator`s — can be reused across every `GraphClientImpl`\n * constructor. This matters on Cloudflare Workers, where the dynamic\n * client constructor runs on every request that touches the\n * meta-registry path; without memoization we'd re-walk +\n * re-dereference these schemas per request.\n */\nlet _bootstrapRegistry: GraphRegistry | null = null;\nexport function createBootstrapRegistry(): GraphRegistry {\n if (_bootstrapRegistry) return _bootstrapRegistry;\n _bootstrapRegistry = createRegistry([...BOOTSTRAP_ENTRIES]);\n return _bootstrapRegistry;\n}\n\n// ---------------------------------------------------------------------------\n// Deterministic UID generation\n// ---------------------------------------------------------------------------\n\n/**\n * Generate a deterministic UID for a meta-type definition.\n * This ensures that defining the same type name always targets the same\n * Firestore document, enabling upsert semantics.\n *\n * Format: 21-char base64url substring of SHA-256(`metaType:name`).\n */\nexport function generateDeterministicUid(metaType: string, name: string): string {\n const hash = createHash('sha256').update(`${metaType}:${name}`).digest('base64url');\n return hash.slice(0, 21);\n}\n\n// ---------------------------------------------------------------------------\n// createRegistryFromGraph\n// ---------------------------------------------------------------------------\n\n/**\n * Read meta-type nodes from the graph and compile them into a GraphRegistry.\n *\n * The returned registry includes both the dynamic entries AND the bootstrap\n * meta-type entries, so meta-type writes remain validateable after a reload.\n *\n * @param reader - A GraphReader pointed at the collection containing meta-nodes.\n * @param executor - Optional custom executor for compiling stored migration source strings.\n */\nexport async function createRegistryFromGraph(\n reader: GraphReader,\n executor?: MigrationExecutor,\n): Promise<GraphRegistry> {\n const [nodeTypes, edgeTypes] = await Promise.all([\n reader.findNodes({ aType: META_NODE_TYPE }),\n reader.findNodes({ aType: META_EDGE_TYPE }),\n ]);\n\n const entries: RegistryEntry[] = [...BOOTSTRAP_ENTRIES];\n\n // Eagerly pre-validate all migration sources in the sandbox before building\n // the registry. This ensures reloadRegistry() fails fast on invalid sources.\n const prevalidations: Promise<void>[] = [];\n for (const record of nodeTypes) {\n const data = record.data as unknown as NodeTypeData;\n if (data.migrations) {\n for (const m of data.migrations) {\n prevalidations.push(precompileSource(m.up, executor));\n }\n }\n }\n for (const record of edgeTypes) {\n const data = record.data as unknown as EdgeTypeData;\n if (data.migrations) {\n for (const m of data.migrations) {\n prevalidations.push(precompileSource(m.up, executor));\n }\n }\n }\n await Promise.all(prevalidations);\n\n // Convert nodeType records → self-loop RegistryEntries\n for (const record of nodeTypes) {\n const data = record.data as unknown as NodeTypeData;\n entries.push({\n aType: data.name,\n axbType: NODE_RELATION,\n bType: data.name,\n jsonSchema: data.jsonSchema,\n description: data.description,\n titleField: data.titleField,\n subtitleField: data.subtitleField,\n allowedIn: data.allowedIn,\n migrations: data.migrations ? compileMigrations(data.migrations, executor) : undefined,\n migrationWriteBack: data.migrationWriteBack,\n });\n }\n\n // Convert edgeType records → RegistryEntries (expand from/to arrays)\n for (const record of edgeTypes) {\n const data = record.data as unknown as EdgeTypeData;\n const fromTypes = Array.isArray(data.from) ? data.from : [data.from];\n const toTypes = Array.isArray(data.to) ? data.to : [data.to];\n\n const compiledMigrations = data.migrations\n ? compileMigrations(data.migrations, executor)\n : undefined;\n\n for (const aType of fromTypes) {\n for (const bType of toTypes) {\n entries.push({\n aType,\n axbType: data.name,\n bType,\n jsonSchema: data.jsonSchema,\n description: data.description,\n inverseLabel: data.inverseLabel,\n titleField: data.titleField,\n subtitleField: data.subtitleField,\n allowedIn: data.allowedIn,\n targetGraph: data.targetGraph,\n migrations: compiledMigrations,\n migrationWriteBack: data.migrationWriteBack,\n });\n }\n }\n }\n\n return createRegistry(entries);\n}\n","export class FiregraphError extends Error {\n constructor(\n message: string,\n public readonly code: string,\n ) {\n super(message);\n this.name = 'FiregraphError';\n }\n}\n\nexport class NodeNotFoundError extends FiregraphError {\n constructor(uid: string) {\n super(`Node not found: ${uid}`, 'NODE_NOT_FOUND');\n this.name = 'NodeNotFoundError';\n }\n}\n\nexport class EdgeNotFoundError extends FiregraphError {\n constructor(aUid: string, axbType: string, bUid: string) {\n super(`Edge not found: ${aUid} -[${axbType}]-> ${bUid}`, 'EDGE_NOT_FOUND');\n this.name = 'EdgeNotFoundError';\n }\n}\n\nexport class ValidationError extends FiregraphError {\n constructor(\n message: string,\n public readonly details?: unknown,\n ) {\n super(message, 'VALIDATION_ERROR');\n this.name = 'ValidationError';\n }\n}\n\nexport class RegistryViolationError extends FiregraphError {\n constructor(aType: string, axbType: string, bType: string) {\n super(`Unregistered triple: (${aType}) -[${axbType}]-> (${bType})`, 'REGISTRY_VIOLATION');\n this.name = 'RegistryViolationError';\n }\n}\n\nexport class InvalidQueryError extends FiregraphError {\n constructor(message: string) {\n super(message, 'INVALID_QUERY');\n this.name = 'InvalidQueryError';\n }\n}\n\nexport class TraversalError extends FiregraphError {\n constructor(message: string) {\n super(message, 'TRAVERSAL_ERROR');\n this.name = 'TraversalError';\n }\n}\n\nexport class DynamicRegistryError extends FiregraphError {\n constructor(message: string) {\n super(message, 'DYNAMIC_REGISTRY_ERROR');\n this.name = 'DynamicRegistryError';\n }\n}\n\nexport class QuerySafetyError extends FiregraphError {\n constructor(message: string) {\n super(message, 'QUERY_SAFETY');\n this.name = 'QuerySafetyError';\n }\n}\n\nexport class RegistryScopeError extends FiregraphError {\n constructor(\n aType: string,\n axbType: string,\n bType: string,\n scopePath: string,\n allowedIn: string[],\n ) {\n super(\n `Type (${aType}) -[${axbType}]-> (${bType}) is not allowed at scope \"${scopePath || 'root'}\". ` +\n `Allowed in: [${allowedIn.join(', ')}]`,\n 'REGISTRY_SCOPE',\n );\n this.name = 'RegistryScopeError';\n }\n}\n\nexport class MigrationError extends FiregraphError {\n constructor(message: string) {\n super(message, 'MIGRATION_ERROR');\n this.name = 'MigrationError';\n }\n}\n\n/**\n * Thrown when a caller tries to perform an operation that would require\n * atomicity across two physical storage backends — e.g. opening a routed\n * subgraph client from inside a transaction callback. Cross-backend\n * atomicity cannot be honoured by real-world storage engines (Firestore,\n * SQLite drivers over D1/DO/better-sqlite3, etc.), so firegraph surfaces\n * this as a typed error instead of silently confining the write to the\n * base backend.\n *\n * Normally `TransactionBackend` and `BatchBackend` don't expose `subgraph()`\n * at the type level, so this error is unreachable through well-typed code.\n * It exists as a public catchable type for app code that needs to tolerate\n * this case deliberately (e.g. dynamic code paths that bypass the type\n * system) and as future-proofing if the interface ever grows a way to\n * request a sub-scope inside a transaction.\n */\nexport class CrossBackendTransactionError extends FiregraphError {\n constructor(message: string) {\n super(message, 'CROSS_BACKEND_TRANSACTION');\n this.name = 'CrossBackendTransactionError';\n }\n}\n","/**\n * JSON Schema validation and introspection utilities.\n *\n * Uses `@cfworker/json-schema` for validation — a runtime-interpreter\n * JSON Schema validator that does not rely on `new Function()` and is\n * therefore compatible with Cloudflare Workers (which run V8 with\n * `--disallow-code-generation-from-strings`). Ajv was used here\n * previously, but its `ajv.compile(schema)` generates a validator via\n * the Function constructor and fails with \"Code generation from strings\n * disallowed for this context\" whenever firegraph's dynamic-registry\n * bootstrap or `reloadRegistry` runs inside a Worker.\n *\n * The introspection half (`jsonSchemaToFieldMeta`) is pure string/object\n * manipulation with no validator dependency.\n */\n\nimport { type OutputUnit, type Schema, Validator } from '@cfworker/json-schema';\n\nimport { ValidationError } from './errors.js';\n\n// ---------------------------------------------------------------------------\n// FieldMeta types (previously in editor/server/schema-introspect.ts)\n// ---------------------------------------------------------------------------\n\nexport interface FieldMeta {\n name: string;\n type: 'string' | 'number' | 'boolean' | 'enum' | 'array' | 'object' | 'unknown';\n required: boolean;\n description?: string;\n enumValues?: string[];\n minLength?: number;\n maxLength?: number;\n pattern?: string;\n min?: number;\n max?: number;\n isInt?: boolean;\n itemMeta?: FieldMeta;\n fields?: FieldMeta[];\n}\n\n// ---------------------------------------------------------------------------\n// Validation\n// ---------------------------------------------------------------------------\n\n/** Cap on how many errors get joined into the human-readable message. */\nconst MAX_RENDERED_ERRORS = 20;\n\n/**\n * Compile a JSON Schema into a validation function.\n *\n * The returned function throws `ValidationError` if data is invalid. The\n * error's `details` is the `OutputUnit[]` array produced by\n * `@cfworker/json-schema` — consumers that previously inspected Ajv's\n * `ErrorObject[]` need to map to the cfworker shape\n * (`{ keyword, keywordLocation, instanceLocation, error }`).\n *\n * Draft 2020-12 is requested by default to match the library's richest\n * feature set; schemas that omit `$schema` still validate under it\n * since keyword semantics back-compat to draft-07 for the fields\n * firegraph actually uses.\n *\n * `shortCircuit` is explicitly disabled so `result.errors` contains\n * every violation, not just the first one — humans rely on the joined\n * error message to debug bad writes from the editor / chat UI. The\n * full array is preserved on `ValidationError.details`; only the\n * rendered message is capped at `MAX_RENDERED_ERRORS` lines so\n * pathological `oneOf`/`anyOf` schemas can't blow up log lines.\n *\n * Format keywords supported by `@cfworker/json-schema` (anything else\n * is silently passed through — see node_modules/@cfworker/json-schema/\n * src/format.ts):\n * `date`, `time`, `date-time`, `duration`,\n * `email`, `hostname`, `ipv4`, `ipv6`,\n * `uri`, `uri-reference`, `uri-template`, `url`,\n * `uuid`, `regex`,\n * `json-pointer`, `relative-json-pointer`, `json-pointer-uri-fragment`.\n */\nexport function compileSchema(schema: object, label?: string): (data: unknown) => void {\n // `object` is the public type used throughout `RegistryEntry.jsonSchema`\n // and the dynamic-client API; cfworker's `Schema` is structurally\n // `{ [k: string]: any }`, which a JSON Schema document always\n // satisfies at runtime. The cast is therefore safe in practice —\n // pass anything other than a plain JSON-Schema-shaped object and\n // `dereference()` inside the validator will throw at construction.\n const validator = new Validator(schema as Schema, '2020-12', false);\n return (data: unknown) => {\n const result = validator.validate(data);\n if (!result.valid) {\n const total = result.errors.length;\n const head = result.errors.slice(0, MAX_RENDERED_ERRORS).map(formatError).join('; ');\n const overflow = total > MAX_RENDERED_ERRORS ? ` (+${total - MAX_RENDERED_ERRORS} more)` : '';\n throw new ValidationError(\n `Data validation failed${label ? ' for ' + label : ''}: ${head}${overflow}`,\n result.errors,\n );\n }\n };\n}\n\n/**\n * Format a single cfworker `OutputUnit` into a human-readable line.\n *\n * cfworker's `instanceLocation` is a JSON-Pointer-as-URI-fragment\n * (`#`, `#/foo`, `#/foo/0/bar`); strip the leading `#` so the rendered\n * path looks like Ajv's `instancePath` (`/foo/0/bar`) and root errors\n * read as `/` rather than `#`. The `[keyword]` prefix is included so\n * messages stay actionable when `error` is terse (e.g. `not`, `enum`).\n */\nfunction formatError(err: OutputUnit): string {\n const path = err.instanceLocation.replace(/^#/, '') || '/';\n const keyword = err.keyword ? `[${err.keyword}] ` : '';\n const detail = err.error ? `: ${keyword}${err.error}` : '';\n return `${path}${detail}`;\n}\n\n// ---------------------------------------------------------------------------\n// JSON Schema → FieldMeta introspection\n// ---------------------------------------------------------------------------\n\n/**\n * Convert a JSON Schema (expected to be `type: \"object\"`) into `FieldMeta[]`\n * suitable for the editor's SchemaForm component.\n */\nexport function jsonSchemaToFieldMeta(schema: any): FieldMeta[] {\n if (!schema || schema.type !== 'object' || !schema.properties) return [];\n\n const requiredSet = new Set<string>(Array.isArray(schema.required) ? schema.required : []);\n\n return Object.entries(schema.properties).map(([name, prop]) =>\n propertyToFieldMeta(name, prop as any, requiredSet.has(name)),\n );\n}\n\n/**\n * Convert a single JSON Schema property into a `FieldMeta`.\n */\nfunction propertyToFieldMeta(name: string, prop: any, required: boolean): FieldMeta {\n if (!prop) return { name, type: 'unknown', required };\n\n // Handle enum (can appear with or without type)\n if (Array.isArray(prop.enum)) {\n return {\n name,\n type: 'enum',\n required,\n enumValues: prop.enum as string[],\n description: prop.description,\n };\n }\n\n // Handle oneOf/anyOf for nullable patterns like { oneOf: [{type:'string'}, {type:'null'}] }\n if (Array.isArray(prop.oneOf) || Array.isArray(prop.anyOf)) {\n const variants = (prop.oneOf ?? prop.anyOf) as any[];\n const nonNull = variants.filter((v: any) => v.type !== 'null');\n if (nonNull.length === 1) {\n // Nullable wrapper — unwrap and mark as optional\n return propertyToFieldMeta(name, nonNull[0], false);\n }\n return { name, type: 'unknown', required, description: prop.description };\n }\n\n const type = prop.type;\n\n if (type === 'string') {\n return {\n name,\n type: 'string',\n required,\n minLength: prop.minLength,\n maxLength: prop.maxLength,\n pattern: prop.pattern,\n description: prop.description,\n };\n }\n\n if (type === 'number' || type === 'integer') {\n return {\n name,\n type: 'number',\n required,\n min: prop.minimum,\n max: prop.maximum,\n isInt: type === 'integer' ? true : undefined,\n description: prop.description,\n };\n }\n\n if (type === 'boolean') {\n return { name, type: 'boolean', required, description: prop.description };\n }\n\n if (type === 'array') {\n const itemMeta = prop.items ? propertyToFieldMeta('item', prop.items, true) : undefined;\n return {\n name,\n type: 'array',\n required,\n itemMeta,\n description: prop.description,\n };\n }\n\n if (type === 'object') {\n return {\n name,\n type: 'object',\n required,\n fields: jsonSchemaToFieldMeta(prop),\n description: prop.description,\n };\n }\n\n return { name, type: 'unknown', required, description: prop.description };\n}\n","/**\n * Migration pipeline for auto-migrating records on read.\n *\n * When a record's `v` is behind the version derived from the registry\n * entry's migrations, the pipeline applies migration steps sequentially\n * to bring the data up to the current version.\n */\n\nimport { MigrationError } from './errors.js';\nimport type {\n GraphRegistry,\n MigrationStep,\n MigrationWriteBack,\n StoredGraphRecord,\n} from './types.js';\n\n/** Result of attempting to migrate a single record. */\nexport interface MigrationResult {\n record: StoredGraphRecord;\n migrated: boolean;\n /** Resolved write-back mode for this record (entry-level > global > 'off'). */\n writeBack: MigrationWriteBack;\n}\n\n/**\n * Apply a chain of migration steps to transform data from `currentVersion`\n * to `targetVersion`. Throws `MigrationError` if the chain is incomplete\n * or a migration function fails.\n *\n * Returns the migrated data payload only — the caller is responsible for\n * stamping `v` on the record envelope.\n */\nexport async function applyMigrationChain(\n data: Record<string, unknown>,\n currentVersion: number,\n targetVersion: number,\n migrations: MigrationStep[],\n): Promise<Record<string, unknown>> {\n const sorted = [...migrations].sort((a, b) => a.fromVersion - b.fromVersion);\n let result = { ...data };\n let version = currentVersion;\n\n for (const step of sorted) {\n if (step.fromVersion === version) {\n try {\n result = await step.up(result);\n } catch (err: unknown) {\n if (err instanceof MigrationError) throw err;\n throw new MigrationError(\n `Migration from v${step.fromVersion} to v${step.toVersion} failed: ${(err as Error).message}`,\n );\n }\n if (!result || typeof result !== 'object') {\n throw new MigrationError(\n `Migration from v${step.fromVersion} to v${step.toVersion} returned invalid data (expected object)`,\n );\n }\n version = step.toVersion;\n }\n }\n\n if (version !== targetVersion) {\n throw new MigrationError(\n `Incomplete migration chain: reached v${version} but target is v${targetVersion}`,\n );\n }\n\n return result;\n}\n\n/**\n * Validate that a migration chain forms a contiguous path from version 0\n * to the highest `toVersion`. Throws `MigrationError` if the chain has\n * gaps or duplicate `fromVersion` values.\n *\n * Called at registry construction time to catch incomplete chains early,\n * rather than at read time when a record is migrated.\n */\nexport function validateMigrationChain(migrations: MigrationStep[], label: string): void {\n if (migrations.length === 0) return;\n\n // Validate individual steps\n const seen = new Set<number>();\n for (const step of migrations) {\n if (step.toVersion <= step.fromVersion) {\n throw new MigrationError(\n `${label}: migration step has toVersion (${step.toVersion}) <= fromVersion (${step.fromVersion})`,\n );\n }\n if (seen.has(step.fromVersion)) {\n throw new MigrationError(\n `${label}: duplicate migration step for fromVersion ${step.fromVersion}`,\n );\n }\n seen.add(step.fromVersion);\n }\n\n const sorted = [...migrations].sort((a, b) => a.fromVersion - b.fromVersion);\n const targetVersion = Math.max(...migrations.map((m) => m.toVersion));\n let version = 0;\n\n for (const step of sorted) {\n if (step.fromVersion === version) {\n version = step.toVersion;\n } else if (step.fromVersion > version) {\n throw new MigrationError(\n `${label}: migration chain has a gap — no step covers v${version} → v${step.fromVersion}`,\n );\n }\n }\n\n if (version !== targetVersion) {\n throw new MigrationError(\n `${label}: migration chain does not reach v${targetVersion} (stuck at v${version})`,\n );\n }\n}\n\n/**\n * Attempt to migrate a single record based on its registry entry.\n *\n * Returns the original record unchanged if no migration is needed\n * (no schema version, already at current version, or no migrations defined).\n */\nexport async function migrateRecord(\n record: StoredGraphRecord,\n registry: GraphRegistry,\n globalWriteBack: MigrationWriteBack = 'off',\n): Promise<MigrationResult> {\n const entry = registry.lookup(record.aType, record.axbType, record.bType);\n\n if (!entry?.migrations?.length || !entry.schemaVersion) {\n return { record, migrated: false, writeBack: 'off' };\n }\n\n const currentVersion = record.v ?? 0;\n\n if (currentVersion >= entry.schemaVersion) {\n return { record, migrated: false, writeBack: 'off' };\n }\n\n const migratedData = await applyMigrationChain(\n record.data,\n currentVersion,\n entry.schemaVersion,\n entry.migrations,\n );\n\n // Two-tier resolution: entry-level > global > 'off'\n const writeBack = entry.migrationWriteBack ?? globalWriteBack ?? 'off';\n\n return {\n record: { ...record, data: migratedData, v: entry.schemaVersion },\n migrated: true,\n writeBack,\n };\n}\n\n/**\n * Migrate an array of records, returning all results.\n * If any single migration fails, the entire call rejects — a broken\n * migration function is a bug that should surface immediately.\n */\nexport async function migrateRecords(\n records: StoredGraphRecord[],\n registry: GraphRegistry,\n globalWriteBack: MigrationWriteBack = 'off',\n): Promise<MigrationResult[]> {\n return Promise.all(records.map((r) => migrateRecord(r, registry, globalWriteBack)));\n}\n","/**\n * Scope path matching for subgraph-level registry constraints.\n *\n * Scope paths are slash-separated names derived from the chain of\n * `subgraph()` calls (e.g., `'agents'`, `'agents/memories'`).\n * The root graph has an empty scope path (`''`).\n *\n * Patterns:\n * - `'root'` — matches only the root graph (empty scope path)\n * - `'agents'` — matches exactly `'agents'`\n * - `'agents/memories'` — matches exactly `'agents/memories'`\n * - `'*/agents'` — `*` matches one segment: `'foo/agents'` but not `'a/b/agents'`\n * - `'**/memories'` — `**` matches zero or more segments\n * - `'**'` — matches everything including root\n */\n\n/**\n * Test whether a scope path matches a single pattern.\n *\n * @param scopePath - The current scope path (empty string for root)\n * @param pattern - The pattern to match against\n */\nexport function matchScope(scopePath: string, pattern: string): boolean {\n // Special case: 'root' matches only the root graph\n if (pattern === 'root') return scopePath === '';\n\n // Special case: '**' matches everything\n if (pattern === '**') return true;\n\n const pathSegments = scopePath === '' ? [] : scopePath.split('/');\n const patternSegments = pattern.split('/');\n\n return matchSegments(pathSegments, 0, patternSegments, 0);\n}\n\n/**\n * Test whether a scope path matches any pattern in a list.\n * Returns `true` if the list is empty or undefined (allowed everywhere).\n *\n * @param scopePath - The current scope path (empty string for root)\n * @param patterns - Array of patterns to match against\n */\nexport function matchScopeAny(scopePath: string, patterns: string[]): boolean {\n if (!patterns || patterns.length === 0) return true;\n return patterns.some((p) => matchScope(scopePath, p));\n}\n\n/**\n * Recursive segment matcher with support for `*` (one segment) and\n * `**` (zero or more segments).\n */\nfunction matchSegments(path: string[], pi: number, pattern: string[], qi: number): boolean {\n // Both exhausted — match\n if (pi === path.length && qi === pattern.length) return true;\n\n // Pattern exhausted but path remains — no match\n if (qi === pattern.length) return false;\n\n const seg = pattern[qi];\n\n if (seg === '**') {\n // '**' at the end of pattern — matches everything remaining\n if (qi === pattern.length - 1) return true;\n\n // Try consuming 0, 1, 2, ... path segments\n for (let skip = 0; skip <= path.length - pi; skip++) {\n if (matchSegments(path, pi + skip, pattern, qi + 1)) return true;\n }\n return false;\n }\n\n // Path exhausted but pattern has non-** segments remaining — no match\n if (pi === path.length) return false;\n\n if (seg === '*') {\n // '*' matches exactly one segment\n return matchSegments(path, pi + 1, pattern, qi + 1);\n }\n\n // Literal match\n if (path[pi] === seg) {\n return matchSegments(path, pi + 1, pattern, qi + 1);\n }\n\n return false;\n}\n","import { RegistryScopeError, RegistryViolationError, ValidationError } from './errors.js';\nimport { NODE_RELATION } from './internal/constants.js';\nimport { compileSchema } from './json-schema.js';\nimport { validateMigrationChain } from './migration.js';\nimport { matchScopeAny } from './scope.js';\nimport type { DiscoveryResult, GraphRegistry, RegistryEntry } from './types.js';\n\nfunction tripleKey(aType: string, axbType: string, bType: string): string {\n return `${aType}:${axbType}:${bType}`;\n}\n\nfunction tripleKeyFor(e: RegistryEntry): string {\n return tripleKey(e.aType, e.axbType, e.bType);\n}\n\n/**\n * Build a registry from either explicit entries or a DiscoveryResult.\n *\n * @example\n * ```ts\n * // From explicit entries (programmatic)\n * const registry = createRegistry([\n * { aType: 'user', axbType: 'is', bType: 'user', jsonSchema: userSchema },\n * { aType: 'user', axbType: 'follows', bType: 'user', jsonSchema: followsSchema },\n * ]);\n *\n * // From discovery result (folder convention)\n * const discovered = await discoverEntities('./entities');\n * const registry = createRegistry(discovered);\n * ```\n */\nexport function createRegistry(input: RegistryEntry[] | DiscoveryResult): GraphRegistry {\n const map = new Map<string, { entry: RegistryEntry; validate?: (data: unknown) => void }>();\n\n let entries: RegistryEntry[];\n\n if (Array.isArray(input)) {\n entries = input;\n } else {\n entries = discoveryToEntries(input);\n }\n\n const entryList: ReadonlyArray<RegistryEntry> = Object.freeze([...entries]);\n\n for (const entry of entries) {\n if (entry.targetGraph && entry.targetGraph.includes('/')) {\n throw new ValidationError(\n `Entry (${entry.aType}) -[${entry.axbType}]-> (${entry.bType}) has invalid targetGraph \"${entry.targetGraph}\" — must be a single segment (no \"/\")`,\n );\n }\n if (entry.migrations?.length) {\n const label = `Entry (${entry.aType}) -[${entry.axbType}]-> (${entry.bType})`;\n validateMigrationChain(entry.migrations, label);\n // Derive schemaVersion from migrations — single source of truth\n entry.schemaVersion = Math.max(...entry.migrations.map((m) => m.toVersion));\n } else {\n // No migrations → no versioning (ignore any user-supplied schemaVersion)\n entry.schemaVersion = undefined;\n }\n const key = tripleKey(entry.aType, entry.axbType, entry.bType);\n const validator = entry.jsonSchema\n ? compileSchema(entry.jsonSchema, `(${entry.aType}) -[${entry.axbType}]-> (${entry.bType})`)\n : undefined;\n map.set(key, { entry, validate: validator });\n }\n\n // Build axbType index for lookupByAxbType\n const axbIndex = new Map<string, ReadonlyArray<RegistryEntry>>();\n const axbBuild = new Map<string, RegistryEntry[]>();\n for (const entry of entries) {\n const existing = axbBuild.get(entry.axbType);\n if (existing) {\n existing.push(entry);\n } else {\n axbBuild.set(entry.axbType, [entry]);\n }\n }\n for (const [key, arr] of axbBuild) {\n axbIndex.set(key, Object.freeze(arr));\n }\n\n // Build aType → subgraph-topology index.\n //\n // For each source aType, collect edge entries whose `targetGraph` is set —\n // these are the aType's direct subgraph children. Dedupe by `targetGraph`\n // alone (not by axbType): the physical subgraph store is addressed by\n // (parentUid, targetGraph) and the cascade caller only cares about which\n // child subgraphs to tear down. Two distinct edge relations pointing into\n // the same `targetGraph` would otherwise produce duplicate destroy calls\n // on the same physical backend.\n const topologyIndex = new Map<string, ReadonlyArray<RegistryEntry>>();\n const topologyBuild = new Map<string, RegistryEntry[]>();\n const topologySeen = new Map<string, Set<string>>();\n for (const entry of entries) {\n if (!entry.targetGraph) continue;\n let seen = topologySeen.get(entry.aType);\n if (!seen) {\n seen = new Set();\n topologySeen.set(entry.aType, seen);\n }\n if (seen.has(entry.targetGraph)) continue;\n seen.add(entry.targetGraph);\n const existing = topologyBuild.get(entry.aType);\n if (existing) {\n existing.push(entry);\n } else {\n topologyBuild.set(entry.aType, [entry]);\n }\n }\n for (const [key, arr] of topologyBuild) {\n topologyIndex.set(key, Object.freeze(arr));\n }\n\n return {\n lookup(aType: string, axbType: string, bType: string): RegistryEntry | undefined {\n return map.get(tripleKey(aType, axbType, bType))?.entry;\n },\n\n lookupByAxbType(axbType: string): ReadonlyArray<RegistryEntry> {\n return axbIndex.get(axbType) ?? [];\n },\n\n getSubgraphTopology(aType: string): ReadonlyArray<RegistryEntry> {\n return topologyIndex.get(aType) ?? [];\n },\n\n validate(\n aType: string,\n axbType: string,\n bType: string,\n data: unknown,\n scopePath?: string,\n ): void {\n const rec = map.get(tripleKey(aType, axbType, bType));\n\n if (!rec) {\n throw new RegistryViolationError(aType, axbType, bType);\n }\n\n // Scope validation: check allowedIn patterns when a scope context is provided\n if (scopePath !== undefined && rec.entry.allowedIn && rec.entry.allowedIn.length > 0) {\n if (!matchScopeAny(scopePath, rec.entry.allowedIn)) {\n throw new RegistryScopeError(aType, axbType, bType, scopePath, rec.entry.allowedIn);\n }\n }\n\n if (rec.validate) {\n try {\n rec.validate(data);\n } catch (err: unknown) {\n if (err instanceof ValidationError) throw err;\n throw new ValidationError(\n `Data validation failed for (${aType}) -[${axbType}]-> (${bType})`,\n err,\n );\n }\n }\n },\n\n entries(): ReadonlyArray<RegistryEntry> {\n return entryList;\n },\n };\n}\n\n/**\n * Create a merged registry where `base` entries take priority and `extension`\n * entries fill in gaps. Lookups and validation check `base` first; only if the\n * triple is not found there does the merged registry fall through to\n * `extension`.\n *\n * The `entries()` method returns a deduplicated list (base wins on collision).\n * The `lookupByAxbType()` method merges results from both registries,\n * deduplicating by triple key with base entries winning.\n */\nexport function createMergedRegistry(base: GraphRegistry, extension: GraphRegistry): GraphRegistry {\n // Build a set of triple keys from the base registry for fast collision checks.\n const baseKeys = new Set(base.entries().map(tripleKeyFor));\n\n return {\n lookup(aType: string, axbType: string, bType: string): RegistryEntry | undefined {\n return base.lookup(aType, axbType, bType) ?? extension.lookup(aType, axbType, bType);\n },\n\n lookupByAxbType(axbType: string): ReadonlyArray<RegistryEntry> {\n const baseResults = base.lookupByAxbType(axbType);\n const extResults = extension.lookupByAxbType(axbType);\n if (extResults.length === 0) return baseResults;\n if (baseResults.length === 0) return extResults;\n\n // Merge, base wins on triple-key collision\n const seen = new Set(baseResults.map(tripleKeyFor));\n const merged = [...baseResults];\n for (const entry of extResults) {\n if (!seen.has(tripleKeyFor(entry))) {\n merged.push(entry);\n }\n }\n return Object.freeze(merged);\n },\n\n getSubgraphTopology(aType: string): ReadonlyArray<RegistryEntry> {\n const baseResults = base.getSubgraphTopology(aType);\n const extResults = extension.getSubgraphTopology(aType);\n if (extResults.length === 0) return baseResults;\n if (baseResults.length === 0) return extResults;\n\n // Merge, base wins on `targetGraph` collision. Extension entries only\n // contribute new subgraph segments the base doesn't cover. Dedupe key\n // matches the physical DO address — (parentUid, targetGraph) — so two\n // different axbTypes pointing into the same segment collapse to one.\n const seen = new Set(baseResults.map((e) => e.targetGraph));\n const merged = [...baseResults];\n for (const entry of extResults) {\n if (!seen.has(entry.targetGraph)) {\n seen.add(entry.targetGraph);\n merged.push(entry);\n }\n }\n return Object.freeze(merged);\n },\n\n validate(\n aType: string,\n axbType: string,\n bType: string,\n data: unknown,\n scopePath?: string,\n ): void {\n if (baseKeys.has(tripleKey(aType, axbType, bType))) {\n return base.validate(aType, axbType, bType, data, scopePath);\n }\n // Falls through to extension (which throws RegistryViolationError if not found)\n return extension.validate(aType, axbType, bType, data, scopePath);\n },\n\n entries(): ReadonlyArray<RegistryEntry> {\n const extEntries = extension.entries();\n if (extEntries.length === 0) return base.entries();\n\n const merged = [...base.entries()];\n for (const entry of extEntries) {\n if (!baseKeys.has(tripleKeyFor(entry))) {\n merged.push(entry);\n }\n }\n return Object.freeze(merged);\n },\n };\n}\n\n/**\n * Convert a DiscoveryResult into flat RegistryEntry[].\n * Nodes become self-loop triples `(name, 'is', name)`.\n * Edges expand `from`/`to` arrays into one triple per combination.\n */\nfunction discoveryToEntries(discovery: DiscoveryResult): RegistryEntry[] {\n const entries: RegistryEntry[] = [];\n\n // Nodes → self-loop triples\n for (const [name, entity] of discovery.nodes) {\n entries.push({\n aType: name,\n axbType: NODE_RELATION,\n bType: name,\n jsonSchema: entity.schema,\n description: entity.description,\n titleField: entity.titleField,\n subtitleField: entity.subtitleField,\n allowedIn: entity.allowedIn,\n migrations: entity.migrations,\n migrationWriteBack: entity.migrationWriteBack,\n indexes: entity.indexes,\n });\n }\n\n // Edges → expand from/to into one triple per combination\n for (const [axbType, entity] of discovery.edges) {\n const topology = entity.topology;\n if (!topology) continue;\n\n const fromTypes = Array.isArray(topology.from) ? topology.from : [topology.from];\n const toTypes = Array.isArray(topology.to) ? topology.to : [topology.to];\n\n const resolvedTargetGraph = entity.targetGraph ?? topology.targetGraph;\n if (resolvedTargetGraph && resolvedTargetGraph.includes('/')) {\n throw new ValidationError(\n `Edge \"${axbType}\" has invalid targetGraph \"${resolvedTargetGraph}\" — must be a single segment (no \"/\")`,\n );\n }\n\n for (const aType of fromTypes) {\n for (const bType of toTypes) {\n entries.push({\n aType,\n axbType,\n bType,\n jsonSchema: entity.schema,\n description: entity.description,\n inverseLabel: topology.inverseLabel,\n titleField: entity.titleField,\n subtitleField: entity.subtitleField,\n allowedIn: entity.allowedIn,\n targetGraph: resolvedTargetGraph,\n migrations: entity.migrations,\n migrationWriteBack: entity.migrationWriteBack,\n indexes: entity.indexes,\n });\n }\n }\n }\n\n return entries;\n}\n","/**\n * Sandbox module for compiling dynamic registry migration source strings\n * into executable functions.\n *\n * Uses a dedicated worker thread with SES (Secure ECMAScript) Compartments\n * for isolation. SES `lockdown()` and `Compartment` evaluation run in the\n * worker thread so that the host process's intrinsics remain unaffected.\n *\n * Each migration function runs in a hardened compartment with no ambient\n * authority — no access to `process`, `require`, `fetch`, `setTimeout`,\n * or any other host-provided globals. Data crosses the compartment boundary\n * as JSON strings to prevent prototype chain escapes.\n *\n * Static registry migrations are already in-memory functions and never\n * go through this module.\n */\n\nimport { createHash } from 'node:crypto';\nimport type { Worker } from 'node:worker_threads';\n\nimport { MigrationError } from './errors.js';\nimport type * as SerializationModule from './serialization.js';\nimport type {\n MigrationExecutor,\n MigrationFn,\n MigrationStep,\n StoredMigrationStep,\n} from './types.js';\n\n// ---------------------------------------------------------------------------\n// Sandbox worker — SES lockdown and Compartment evaluation run in a\n// dedicated worker thread so that lockdown() does not affect the host\n// process's intrinsics. The worker is spawned lazily on first use.\n// ---------------------------------------------------------------------------\n\nlet _worker: Worker | null = null;\nlet _requestId = 0;\nconst _pending = new Map<\n number,\n {\n resolve: (value: unknown) => void;\n reject: (reason: Error) => void;\n }\n>();\n\n/**\n * Inline worker source evaluated as CJS in a dedicated worker thread.\n * Contains all SES setup, compilation, and execution logic.\n *\n * **Why inline?** Using `new Worker(code, { eval: true })` avoids\n * ESM/CJS file resolution issues when the library is consumed from\n * different module formats or bundlers.\n */\nconst WORKER_SOURCE = [\n `'use strict';`,\n `var _wt = require('node:worker_threads');`,\n `var _mod = require('node:module');`,\n `var _crypto = require('node:crypto');`,\n `var parentPort = _wt.parentPort;`,\n `var workerData = _wt.workerData;`,\n ``,\n `// Load SES using the parent module's resolution context`,\n `var esmRequire = _mod.createRequire(workerData.parentUrl);`,\n `esmRequire('ses');`,\n ``,\n `lockdown({`,\n ` errorTaming: 'unsafe',`,\n ` consoleTaming: 'unsafe',`,\n ` evalTaming: 'safe-eval',`,\n ` overrideTaming: 'moderate',`,\n ` stackFiltering: 'verbose'`,\n `});`,\n ``,\n `// Defense-in-depth: verify lockdown() actually hardened JSON.`,\n `if (!Object.isFrozen(JSON)) {`,\n ` throw new Error('SES lockdown failed: JSON is not frozen');`,\n `}`,\n ``,\n `var cache = new Map();`,\n ``,\n `function hashSource(s) {`,\n ` return _crypto.createHash('sha256').update(s).digest('hex');`,\n `}`,\n ``,\n `function buildWrapper(source) {`,\n ` return '(function() {' +`,\n ` ' var fn = (' + source + ');\\\\n' +`,\n ` ' if (typeof fn !== \"function\") return null;\\\\n' +`,\n ` ' return function(jsonIn) {\\\\n' +`,\n ` ' var data = JSON.parse(jsonIn);\\\\n' +`,\n ` ' var result = fn(data);\\\\n' +`,\n ` ' if (result !== null && typeof result === \"object\" && typeof result.then === \"function\") {\\\\n' +`,\n ` ' return result.then(function(r) { return JSON.stringify(r); });\\\\n' +`,\n ` ' }\\\\n' +`,\n ` ' return JSON.stringify(result);\\\\n' +`,\n ` ' };\\\\n' +`,\n ` '})()';`,\n `}`,\n ``,\n `function compileSource(source) {`,\n ` var key = hashSource(source);`,\n ` var cached = cache.get(key);`,\n ` if (cached) return cached;`,\n ``,\n ` var compartmentFn;`,\n ` try {`,\n ` var c = new Compartment({ JSON: JSON });`,\n ` compartmentFn = c.evaluate(buildWrapper(source));`,\n ` } catch (err) {`,\n ` throw new Error('Failed to compile migration source: ' + (err.message || String(err)));`,\n ` }`,\n ``,\n ` if (typeof compartmentFn !== 'function') {`,\n ` throw new Error('Migration source did not produce a function: ' + source.slice(0, 80));`,\n ` }`,\n ``,\n ` cache.set(key, compartmentFn);`,\n ` return compartmentFn;`,\n `}`,\n ``,\n `parentPort.on('message', function(msg) {`,\n ` var id = msg.id;`,\n ` try {`,\n ` if (msg.type === 'compile') {`,\n ` compileSource(msg.source);`,\n ` parentPort.postMessage({ id: id, type: 'compiled' });`,\n ` return;`,\n ` }`,\n ` if (msg.type === 'execute') {`,\n ` var fn = compileSource(msg.source);`,\n ` var raw;`,\n ` try {`,\n ` raw = fn(msg.jsonData);`,\n ` } catch (err) {`,\n ` parentPort.postMessage({ id: id, type: 'error', message: 'Migration function threw: ' + (err.message || String(err)) });`,\n ` return;`,\n ` }`,\n ` if (raw !== null && typeof raw === 'object' && typeof raw.then === 'function') {`,\n ` raw.then(`,\n ` function(jsonResult) {`,\n ` if (jsonResult === undefined || jsonResult === null) {`,\n ` parentPort.postMessage({ id: id, type: 'error', message: 'Migration returned a non-JSON-serializable value' });`,\n ` } else {`,\n ` parentPort.postMessage({ id: id, type: 'result', jsonResult: jsonResult });`,\n ` }`,\n ` },`,\n ` function(err) {`,\n ` parentPort.postMessage({ id: id, type: 'error', message: 'Async migration function threw: ' + (err.message || String(err)) });`,\n ` }`,\n ` );`,\n ` return;`,\n ` }`,\n ` if (raw === undefined || raw === null) {`,\n ` parentPort.postMessage({ id: id, type: 'error', message: 'Migration returned a non-JSON-serializable value' });`,\n ` } else {`,\n ` parentPort.postMessage({ id: id, type: 'result', jsonResult: raw });`,\n ` }`,\n ` }`,\n ` } catch (err) {`,\n ` parentPort.postMessage({ id: id, type: 'error', message: err.message || String(err) });`,\n ` }`,\n `});`,\n].join('\\n');\n\n// ---------------------------------------------------------------------------\n// Worker lifecycle management\n// ---------------------------------------------------------------------------\n\ninterface WorkerResponse {\n id: number;\n type: string;\n message?: string;\n jsonResult?: string;\n}\n\n// `node:worker_threads` is loaded lazily so this module can be imported in\n// runtimes without it (Cloudflare Workers, browsers). Only callers that\n// actually exercise the default migration sandbox will trigger the import.\nlet _WorkerCtor: (new (source: string, opts: Record<string, unknown>) => Worker) | null = null;\n\nasync function loadWorkerCtor(): Promise<NonNullable<typeof _WorkerCtor>> {\n if (_WorkerCtor) return _WorkerCtor;\n const wt = await import('node:worker_threads');\n _WorkerCtor = wt.Worker as unknown as NonNullable<typeof _WorkerCtor>;\n return _WorkerCtor;\n}\n\nasync function ensureWorker(): Promise<Worker> {\n if (_worker) return _worker;\n\n const Ctor = await loadWorkerCtor();\n _worker = new Ctor(WORKER_SOURCE, {\n eval: true,\n workerData: { parentUrl: import.meta.url },\n });\n\n // Don't let the worker prevent process exit\n _worker.unref();\n\n _worker.on('message', (msg: WorkerResponse) => {\n if (msg.id === undefined) return;\n const pending = _pending.get(msg.id);\n if (!pending) return;\n _pending.delete(msg.id);\n\n if (msg.type === 'error') {\n pending.reject(new MigrationError(msg.message ?? 'Unknown sandbox error'));\n } else {\n pending.resolve(msg);\n }\n });\n\n _worker.on('error', (err: Error) => {\n // Worker crashed — reject all pending requests and allow respawn\n for (const [, p] of _pending) {\n p.reject(new MigrationError(`Sandbox worker error: ${err.message}`));\n }\n _pending.clear();\n _worker = null;\n });\n\n _worker.on('exit', (code: number) => {\n // Always reject pending requests — a worker exiting while requests\n // are in-flight is always an error from the caller's perspective,\n // even if the exit code is 0 (e.g., graceful termination).\n if (_pending.size > 0) {\n for (const [, p] of _pending) {\n p.reject(new MigrationError(`Sandbox worker exited with code ${code}`));\n }\n _pending.clear();\n }\n _worker = null;\n });\n\n return _worker;\n}\n\nasync function sendToWorker(msg: Record<string, unknown>): Promise<WorkerResponse> {\n const worker = await ensureWorker();\n if (_requestId >= Number.MAX_SAFE_INTEGER) _requestId = 0;\n const id = ++_requestId;\n return new Promise<WorkerResponse>((resolve, reject) => {\n _pending.set(id, { resolve: resolve as (v: unknown) => void, reject });\n worker.postMessage({ ...msg, id });\n });\n}\n\n// ---------------------------------------------------------------------------\n// Compiled function cache (keyed by executor → SHA-256 hash of source string)\n// ---------------------------------------------------------------------------\n\n// Two-level cache: outer key is the executor reference (WeakMap so that\n// short-lived executors and their caches can be garbage collected), inner\n// key is the SHA-256 hash of the source string. This prevents cache\n// poisoning when different clients use different sandbox executors in\n// the same process.\nconst compiledCache = new WeakMap<MigrationExecutor, Map<string, MigrationFn>>();\n\nfunction getExecutorCache(executor: MigrationExecutor): Map<string, MigrationFn> {\n let cache = compiledCache.get(executor);\n if (!cache) {\n cache = new Map();\n compiledCache.set(executor, cache);\n }\n return cache;\n}\n\nfunction hashSource(source: string): string {\n return createHash('sha256').update(source).digest('hex');\n}\n\n// ---------------------------------------------------------------------------\n// Lazy serialization loader. Pulls `@google-cloud/firestore` only when the\n// default executor actually runs a migration — keeps Firestore out of\n// non-Firestore bundles (e.g. the Cloudflare DO backend).\n// ---------------------------------------------------------------------------\n\nlet _serializationModule: typeof SerializationModule | null = null;\n\nasync function loadSerialization(): Promise<typeof SerializationModule> {\n if (_serializationModule) return _serializationModule;\n _serializationModule = await import('./serialization.js');\n return _serializationModule;\n}\n\n// ---------------------------------------------------------------------------\n// Default executor\n// ---------------------------------------------------------------------------\n\n/**\n * Default executor using a worker-thread SES Compartment with JSON marshaling.\n *\n * Migration source is compiled and executed inside an isolated SES\n * Compartment running in a dedicated worker thread. The worker calls\n * `lockdown()` in its own V8 isolate, leaving the host process's\n * intrinsics completely unaffected.\n *\n * Data crosses the compartment boundary as JSON strings, preventing\n * prototype chain escapes. The compartment receives only `JSON` as an\n * endowment for parsing/stringifying data.\n *\n * The returned `MigrationFn` always returns a `Promise` (communication\n * with the worker is inherently async via `postMessage`).\n */\nexport function defaultExecutor(source: string): MigrationFn {\n // Worker is spawned lazily on first execution via `sendToWorker`.\n // Eager spawning here would force a top-level `node:worker_threads`\n // load and break Cloudflare Workers / browser callers that never\n // exercise the default sandbox.\n\n // Return a MigrationFn that delegates to the worker thread.\n // Compilation + execution happen in the worker's SES Compartment.\n return (async (data: Record<string, unknown>) => {\n const { serializeFirestoreTypes, deserializeFirestoreTypes } = await loadSerialization();\n const jsonData = JSON.stringify(serializeFirestoreTypes(data));\n const response = await sendToWorker({ type: 'execute', source, jsonData });\n if (response.jsonResult === undefined || response.jsonResult === null) {\n throw new MigrationError('Migration returned a non-JSON-serializable value');\n }\n try {\n return deserializeFirestoreTypes(JSON.parse(response.jsonResult));\n } catch {\n throw new MigrationError('Migration returned a non-JSON-serializable value');\n }\n }) as MigrationFn;\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Eagerly validate a migration source string by compiling it in the\n * sandbox worker (or via a custom executor) without executing it.\n *\n * Use this to catch syntax errors at define-time or reload-time rather\n * than at first migration execution.\n *\n * @throws {MigrationError} If the source is syntactically invalid or\n * does not produce a function.\n */\nexport async function precompileSource(\n source: string,\n executor?: MigrationExecutor,\n): Promise<void> {\n if (executor && executor !== defaultExecutor) {\n // Custom executors validate synchronously the old way\n try {\n executor(source);\n } catch (err: unknown) {\n if (err instanceof MigrationError) throw err;\n throw new MigrationError(`Failed to compile migration source: ${(err as Error).message}`);\n }\n return;\n }\n\n // Default executor: send a compile-only message to the worker\n await sendToWorker({ type: 'compile', source });\n}\n\n/**\n * Compile a stored migration source string into an executable function.\n * Results are cached by SHA-256 hash of the source string so repeated\n * reads never re-parse the same migration.\n *\n * **Important:** When using the default executor, this function does NOT\n * validate the source synchronously — validation is deferred to the\n * worker thread at execution time. Callers that need eager validation\n * (e.g., `defineNodeType`, `reloadRegistry`) should call\n * `precompileSource()` before or alongside `compileMigrationFn()`.\n */\nexport function compileMigrationFn(\n source: string,\n executor: MigrationExecutor = defaultExecutor,\n): MigrationFn {\n const cache = getExecutorCache(executor);\n const key = hashSource(source);\n const cached = cache.get(key);\n if (cached) return cached;\n\n try {\n const fn = executor(source);\n cache.set(key, fn);\n return fn;\n } catch (err: unknown) {\n if (err instanceof MigrationError) throw err;\n throw new MigrationError(`Failed to compile migration source: ${(err as Error).message}`);\n }\n}\n\n/**\n * Batch compile stored migration steps into executable MigrationStep[].\n *\n * With the default executor, source validation is deferred to execution\n * time. Use `precompileSource()` to validate eagerly — see\n * `createRegistryFromGraph()` for the recommended pattern.\n */\nexport function compileMigrations(\n stored: StoredMigrationStep[],\n executor?: MigrationExecutor,\n): MigrationStep[] {\n return stored.map((step) => ({\n fromVersion: step.fromVersion,\n toVersion: step.toVersion,\n up: compileMigrationFn(step.up, executor),\n }));\n}\n\n/**\n * Terminate the sandbox worker thread. The worker will be respawned\n * on the next `defaultExecutor` call.\n *\n * Primarily useful for test cleanup to avoid vitest hanging on\n * unfinished worker threads.\n */\nexport async function destroySandboxWorker(): Promise<void> {\n if (!_worker) return;\n const w = _worker;\n _worker = null;\n // Reject any remaining pending requests\n for (const [, p] of _pending) {\n p.reject(new MigrationError('Sandbox worker terminated'));\n }\n _pending.clear();\n await w.terminate();\n}\n","import { computeEdgeDocId } from './docid.js';\nimport { InvalidQueryError } from './errors.js';\nimport { BUILTIN_FIELDS, DEFAULT_QUERY_LIMIT, NODE_RELATION } from './internal/constants.js';\nimport type { FindEdgesParams, FindNodesParams, QueryFilter, QueryPlan } from './types.js';\n\nexport function buildEdgeQueryPlan(params: FindEdgesParams): QueryPlan {\n const { aType, aUid, axbType, bType, bUid, limit, orderBy } = params;\n\n if (aUid && axbType && bUid && !params.where?.length) {\n return { strategy: 'get', docId: computeEdgeDocId(aUid, axbType, bUid) };\n }\n\n const filters: QueryFilter[] = [];\n\n if (aType) filters.push({ field: 'aType', op: '==', value: aType });\n if (aUid) filters.push({ field: 'aUid', op: '==', value: aUid });\n if (axbType) filters.push({ field: 'axbType', op: '==', value: axbType });\n if (bType) filters.push({ field: 'bType', op: '==', value: bType });\n if (bUid) filters.push({ field: 'bUid', op: '==', value: bUid });\n\n if (params.where) {\n for (const clause of params.where) {\n const field = BUILTIN_FIELDS.has(clause.field)\n ? clause.field\n : clause.field.startsWith('data.')\n ? clause.field\n : `data.${clause.field}`;\n filters.push({ field, op: clause.op, value: clause.value });\n }\n }\n\n if (filters.length === 0) {\n throw new InvalidQueryError('findEdges requires at least one filter parameter');\n }\n\n // limit: undefined → apply DEFAULT_QUERY_LIMIT\n // limit: 0 → no limit (unlimited, used by internal bulk operations)\n // limit: N → use N\n const effectiveLimit = limit === undefined ? DEFAULT_QUERY_LIMIT : limit || undefined;\n return { strategy: 'query', filters, options: { limit: effectiveLimit, orderBy } };\n}\n\nexport function buildNodeQueryPlan(params: FindNodesParams): QueryPlan {\n const { aType, limit, orderBy } = params;\n\n const filters: QueryFilter[] = [\n { field: 'aType', op: '==', value: aType },\n { field: 'axbType', op: '==', value: NODE_RELATION },\n ];\n\n if (params.where) {\n for (const clause of params.where) {\n const field = BUILTIN_FIELDS.has(clause.field)\n ? clause.field\n : clause.field.startsWith('data.')\n ? clause.field\n : `data.${clause.field}`;\n filters.push({ field, op: clause.op, value: clause.value });\n }\n }\n\n const effectiveLimit = limit === undefined ? DEFAULT_QUERY_LIMIT : limit || undefined;\n return { strategy: 'query', filters, options: { limit: effectiveLimit, orderBy } };\n}\n","import { BUILTIN_FIELDS } from './internal/constants.js';\nimport type { QueryFilter } from './types.js';\n\n/**\n * Result of analyzing a query for collection scan risk.\n */\nexport interface QuerySafetyResult {\n /** Whether the query matches a known indexed pattern. */\n safe: boolean;\n /** Human-readable explanation when the query is unsafe. */\n reason?: string;\n}\n\n/**\n * Known composite index patterns that prevent full collection scans.\n * Each pattern is a set of field names that must ALL be present in the\n * query filters. Order within the set doesn't matter — what matters is\n * that the Firestore composite index covers the combination.\n *\n * These correspond to the indexes in firestore.indexes.json:\n * (aUid, axbType) — forward edge lookup\n * (axbType, bUid) — reverse edge lookup\n * (aType, axbType) — type-scoped queries + findNodes\n * (axbType, bType) — edge type + target type\n */\nconst SAFE_INDEX_PATTERNS: ReadonlyArray<ReadonlySet<string>> = [\n new Set(['aUid', 'axbType']),\n new Set(['axbType', 'bUid']),\n new Set(['aType', 'axbType']),\n new Set(['axbType', 'bType']),\n];\n\n/**\n * Analyzes a set of query filters to determine whether the query would\n * likely cause a full collection scan on Firestore Enterprise.\n *\n * A query is considered \"safe\" if the builtin fields present in the filters\n * match at least one known composite index pattern. Queries that only use\n * `data.*` fields without a safe base pattern are flagged as unsafe.\n */\nexport function analyzeQuerySafety(filters: QueryFilter[]): QuerySafetyResult {\n // Extract the set of builtin fields being filtered on (equality checks are\n // the primary index-usable operations, but we're generous here and count\n // any filter on a builtin field as potentially index-backed).\n const builtinFieldsPresent = new Set<string>();\n let hasDataFilters = false;\n\n for (const f of filters) {\n if (BUILTIN_FIELDS.has(f.field)) {\n builtinFieldsPresent.add(f.field);\n } else {\n // data.* or other non-builtin fields\n hasDataFilters = true;\n }\n }\n\n // Check if the builtin fields match any known safe index pattern.\n // A pattern is \"matched\" if all fields in the pattern are present in the query.\n for (const pattern of SAFE_INDEX_PATTERNS) {\n let matched = true;\n for (const field of pattern) {\n if (!builtinFieldsPresent.has(field)) {\n matched = false;\n break;\n }\n }\n if (matched) {\n // Even with data.* filters, the base index narrows the scan significantly.\n // The data.* filters are applied as post-filters on the index results.\n return { safe: true };\n }\n }\n\n // No safe pattern matched — build an explanation.\n const presentFields = [...builtinFieldsPresent];\n if (presentFields.length === 0 && hasDataFilters) {\n return {\n safe: false,\n reason:\n 'Query filters only use data.* fields with no builtin field constraints. ' +\n 'This requires a full collection scan. Add aType, aUid, axbType, bType, or bUid filters, ' +\n 'or set allowCollectionScan: true.',\n };\n }\n\n if (hasDataFilters) {\n return {\n safe: false,\n reason:\n `Query filters on [${presentFields.join(', ')}] do not match any indexed pattern. ` +\n 'data.* filters without an indexed base require a full collection scan. ' +\n `Safe patterns: (aUid + axbType), (axbType + bUid), (aType + axbType), (axbType + bType). ` +\n 'Set allowCollectionScan: true to override.',\n };\n }\n\n return {\n safe: false,\n reason:\n `Query filters on [${presentFields.join(', ')}] do not match any indexed pattern. ` +\n 'This may cause a full collection scan on Firestore Enterprise. ' +\n `Safe patterns: (aUid + axbType), (axbType + bUid), (aType + axbType), (axbType + bType). ` +\n 'Set allowCollectionScan: true to override.',\n };\n}\n","import { computeEdgeDocId, computeNodeDocId } from './docid.js';\nimport { QuerySafetyError } from './errors.js';\nimport type { TransactionBackend, WritableRecord } from './internal/backend.js';\nimport { NODE_RELATION } from './internal/constants.js';\nimport { assertNoDeleteSentinels, flattenPatch } from './internal/write-plan.js';\nimport { migrateRecord, migrateRecords } from './migration.js';\nimport { buildEdgeQueryPlan, buildNodeQueryPlan } from './query.js';\nimport { analyzeQuerySafety } from './query-safety.js';\nimport type {\n FindEdgesParams,\n FindNodesParams,\n GraphRegistry,\n GraphTransaction,\n MigrationWriteBack,\n QueryFilter,\n ScanProtection,\n StoredGraphRecord,\n} from './types.js';\n\nfunction buildWritableNodeRecord(\n aType: string,\n uid: string,\n data: Record<string, unknown>,\n): WritableRecord {\n return { aType, aUid: uid, axbType: NODE_RELATION, bType: aType, bUid: uid, data };\n}\n\nfunction buildWritableEdgeRecord(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n): WritableRecord {\n return { aType, aUid, axbType, bType, bUid, data };\n}\n\nexport class GraphTransactionImpl implements GraphTransaction {\n constructor(\n private readonly backend: TransactionBackend,\n private readonly registry?: GraphRegistry,\n private readonly scanProtection: ScanProtection = 'error',\n private readonly scopePath: string = '',\n private readonly globalWriteBack: MigrationWriteBack = 'off',\n ) {}\n\n async getNode(uid: string): Promise<StoredGraphRecord | null> {\n const docId = computeNodeDocId(uid);\n const record = await this.backend.getDoc(docId);\n if (!record || !this.registry) return record;\n const result = await migrateRecord(record, this.registry, this.globalWriteBack);\n if (result.migrated && result.writeBack !== 'off') {\n await this.backend.updateDoc(docId, {\n replaceData: result.record.data as Record<string, unknown>,\n v: result.record.v,\n });\n }\n return result.record;\n }\n\n async getEdge(aUid: string, axbType: string, bUid: string): Promise<StoredGraphRecord | null> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n const record = await this.backend.getDoc(docId);\n if (!record || !this.registry) return record;\n const result = await migrateRecord(record, this.registry, this.globalWriteBack);\n if (result.migrated && result.writeBack !== 'off') {\n await this.backend.updateDoc(docId, {\n replaceData: result.record.data as Record<string, unknown>,\n v: result.record.v,\n });\n }\n return result.record;\n }\n\n async edgeExists(aUid: string, axbType: string, bUid: string): Promise<boolean> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n const record = await this.backend.getDoc(docId);\n return record !== null;\n }\n\n private checkQuerySafety(filters: QueryFilter[], allowCollectionScan?: boolean): void {\n if (allowCollectionScan || this.scanProtection === 'off') return;\n\n const result = analyzeQuerySafety(filters);\n if (result.safe) return;\n\n if (this.scanProtection === 'error') {\n throw new QuerySafetyError(result.reason!);\n }\n\n console.warn(`[firegraph] Query safety warning: ${result.reason}`);\n }\n\n async findEdges(params: FindEdgesParams): Promise<StoredGraphRecord[]> {\n const plan = buildEdgeQueryPlan(params);\n let records: StoredGraphRecord[];\n if (plan.strategy === 'get') {\n const record = await this.backend.getDoc(plan.docId);\n records = record ? [record] : [];\n } else {\n this.checkQuerySafety(plan.filters, params.allowCollectionScan);\n records = await this.backend.query(plan.filters, plan.options);\n }\n return this.applyMigrations(records);\n }\n\n async findNodes(params: FindNodesParams): Promise<StoredGraphRecord[]> {\n const plan = buildNodeQueryPlan(params);\n let records: StoredGraphRecord[];\n if (plan.strategy === 'get') {\n const record = await this.backend.getDoc(plan.docId);\n records = record ? [record] : [];\n } else {\n this.checkQuerySafety(plan.filters, params.allowCollectionScan);\n records = await this.backend.query(plan.filters, plan.options);\n }\n return this.applyMigrations(records);\n }\n\n private async applyMigrations(records: StoredGraphRecord[]): Promise<StoredGraphRecord[]> {\n if (!this.registry || records.length === 0) return records;\n const results = await migrateRecords(records, this.registry, this.globalWriteBack);\n for (const result of results) {\n if (result.migrated && result.writeBack !== 'off') {\n const docId =\n result.record.axbType === NODE_RELATION\n ? computeNodeDocId(result.record.aUid)\n : computeEdgeDocId(result.record.aUid, result.record.axbType, result.record.bUid);\n await this.backend.updateDoc(docId, {\n replaceData: result.record.data as Record<string, unknown>,\n v: result.record.v,\n });\n }\n }\n return results.map((r) => r.record);\n }\n\n async putNode(aType: string, uid: string, data: Record<string, unknown>): Promise<void> {\n await this.writeNode(aType, uid, data, 'merge');\n }\n\n async putEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n await this.writeEdge(aType, aUid, axbType, bType, bUid, data, 'merge');\n }\n\n async replaceNode(aType: string, uid: string, data: Record<string, unknown>): Promise<void> {\n await this.writeNode(aType, uid, data, 'replace');\n }\n\n async replaceEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n await this.writeEdge(aType, aUid, axbType, bType, bUid, data, 'replace');\n }\n\n private async writeNode(\n aType: string,\n uid: string,\n data: Record<string, unknown>,\n mode: 'merge' | 'replace',\n ): Promise<void> {\n assertNoDeleteSentinels(data, mode === 'replace' ? 'replaceNode' : 'putNode');\n if (this.registry) {\n this.registry.validate(aType, NODE_RELATION, aType, data, this.scopePath);\n }\n const docId = computeNodeDocId(uid);\n const record = buildWritableNodeRecord(aType, uid, data);\n if (this.registry) {\n const entry = this.registry.lookup(aType, NODE_RELATION, aType);\n if (entry?.schemaVersion && entry.schemaVersion > 0) {\n record.v = entry.schemaVersion;\n }\n }\n await this.backend.setDoc(docId, record, mode);\n }\n\n private async writeEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n mode: 'merge' | 'replace',\n ): Promise<void> {\n assertNoDeleteSentinels(data, mode === 'replace' ? 'replaceEdge' : 'putEdge');\n if (this.registry) {\n this.registry.validate(aType, axbType, bType, data, this.scopePath);\n }\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n const record = buildWritableEdgeRecord(aType, aUid, axbType, bType, bUid, data);\n if (this.registry) {\n const entry = this.registry.lookup(aType, axbType, bType);\n if (entry?.schemaVersion && entry.schemaVersion > 0) {\n record.v = entry.schemaVersion;\n }\n }\n await this.backend.setDoc(docId, record, mode);\n }\n\n async updateNode(uid: string, data: Record<string, unknown>): Promise<void> {\n const docId = computeNodeDocId(uid);\n await this.backend.updateDoc(docId, { dataOps: flattenPatch(data) });\n }\n\n async updateEdge(\n aUid: string,\n axbType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n await this.backend.updateDoc(docId, { dataOps: flattenPatch(data) });\n }\n\n async removeNode(uid: string): Promise<void> {\n const docId = computeNodeDocId(uid);\n await this.backend.deleteDoc(docId);\n }\n\n async removeEdge(aUid: string, axbType: string, bUid: string): Promise<void> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n await this.backend.deleteDoc(docId);\n }\n}\n","import { GraphBatchImpl } from './batch.js';\nimport { computeEdgeDocId, computeNodeDocId } from './docid.js';\nimport {\n createBootstrapRegistry,\n createRegistryFromGraph,\n generateDeterministicUid,\n META_EDGE_TYPE,\n META_NODE_TYPE,\n} from './dynamic-registry.js';\nimport { DynamicRegistryError, FiregraphError, QuerySafetyError } from './errors.js';\nimport type { StorageBackend, WritableRecord } from './internal/backend.js';\nimport { NODE_RELATION } from './internal/constants.js';\nimport { assertNoDeleteSentinels, flattenPatch } from './internal/write-plan.js';\nimport type { MigrationResult } from './migration.js';\nimport { migrateRecord, migrateRecords } from './migration.js';\nimport { buildEdgeQueryPlan, buildNodeQueryPlan } from './query.js';\nimport { analyzeQuerySafety } from './query-safety.js';\nimport { createMergedRegistry } from './registry.js';\nimport { precompileSource } from './sandbox.js';\nimport { GraphTransactionImpl } from './transaction.js';\nimport type {\n BulkOptions,\n BulkResult,\n CascadeResult,\n DefineTypeOptions,\n DynamicGraphClient,\n DynamicRegistryConfig,\n EdgeTopology,\n FindEdgesParams,\n FindNodesParams,\n GraphBatch,\n GraphClient,\n GraphClientOptions,\n GraphReader,\n GraphRegistry,\n GraphTransaction,\n MigrationExecutor,\n MigrationFn,\n MigrationWriteBack,\n QueryFilter,\n QueryOptions,\n ScanProtection,\n StoredGraphRecord,\n} from './types.js';\n\nconst RESERVED_TYPE_NAMES = new Set([META_NODE_TYPE, META_EDGE_TYPE]);\n\nfunction buildWritableNodeRecord(\n aType: string,\n uid: string,\n data: Record<string, unknown>,\n): WritableRecord {\n return { aType, aUid: uid, axbType: NODE_RELATION, bType: aType, bUid: uid, data };\n}\n\nfunction buildWritableEdgeRecord(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n): WritableRecord {\n return { aType, aUid, axbType, bType, bUid, data };\n}\n\nexport class GraphClientImpl implements DynamicGraphClient {\n readonly scanProtection: ScanProtection;\n\n // Static mode\n private readonly staticRegistry?: GraphRegistry;\n\n // Dynamic mode\n private readonly dynamicConfig?: DynamicRegistryConfig;\n private readonly bootstrapRegistry?: GraphRegistry;\n private dynamicRegistry?: GraphRegistry;\n private readonly metaBackend?: StorageBackend;\n\n // Migration settings\n private readonly globalWriteBack: MigrationWriteBack;\n private readonly migrationSandbox?: MigrationExecutor;\n\n constructor(\n private readonly backend: StorageBackend,\n options?: GraphClientOptions,\n /** @internal Optional pre-built meta-backend (used by subgraph clones). */\n metaBackend?: StorageBackend,\n ) {\n this.globalWriteBack = options?.migrationWriteBack ?? 'off';\n this.migrationSandbox = options?.migrationSandbox;\n\n if (options?.registryMode) {\n this.dynamicConfig = options.registryMode;\n this.bootstrapRegistry = createBootstrapRegistry();\n if (options.registry) {\n this.staticRegistry = options.registry;\n }\n this.metaBackend = metaBackend;\n } else {\n this.staticRegistry = options?.registry;\n }\n\n this.scanProtection = options?.scanProtection ?? 'error';\n }\n\n // ---------------------------------------------------------------------------\n // Backend access (exposed for traversal helpers and subgraph cloning)\n // ---------------------------------------------------------------------------\n\n /** @internal */\n getBackend(): StorageBackend {\n return this.backend;\n }\n\n /**\n * Snapshot of the currently-effective registry. Returns the merged view\n * used for domain-type validation and migration — in dynamic mode this is\n * `dynamicRegistry ?? staticRegistry ?? bootstrapRegistry`, so callers see\n * updates after `reloadRegistry()` without having to re-resolve anything.\n *\n * Exposed for backends that need topology access during bulk operations\n * (e.g. the Cloudflare DO backend's cross-DO cascade). Not part of the\n * public `GraphClient` surface.\n *\n * @internal\n */\n getRegistrySnapshot(): GraphRegistry | undefined {\n return this.getCombinedRegistry();\n }\n\n // ---------------------------------------------------------------------------\n // Registry routing\n // ---------------------------------------------------------------------------\n\n private getRegistryForType(aType: string): GraphRegistry | undefined {\n if (!this.dynamicConfig) return this.staticRegistry;\n\n if (aType === META_NODE_TYPE || aType === META_EDGE_TYPE) {\n return this.bootstrapRegistry;\n }\n\n return this.dynamicRegistry ?? this.staticRegistry ?? this.bootstrapRegistry;\n }\n\n private getBackendForType(aType: string): StorageBackend {\n if (this.metaBackend && (aType === META_NODE_TYPE || aType === META_EDGE_TYPE)) {\n return this.metaBackend;\n }\n return this.backend;\n }\n\n private getCombinedRegistry(): GraphRegistry | undefined {\n if (!this.dynamicConfig) return this.staticRegistry;\n return this.dynamicRegistry ?? this.staticRegistry ?? this.bootstrapRegistry;\n }\n\n // ---------------------------------------------------------------------------\n // Query safety\n // ---------------------------------------------------------------------------\n\n private checkQuerySafety(filters: QueryFilter[], allowCollectionScan?: boolean): void {\n if (allowCollectionScan || this.scanProtection === 'off') return;\n\n const result = analyzeQuerySafety(filters);\n if (result.safe) return;\n\n if (this.scanProtection === 'error') {\n throw new QuerySafetyError(result.reason!);\n }\n\n console.warn(`[firegraph] Query safety warning: ${result.reason}`);\n }\n\n // ---------------------------------------------------------------------------\n // Migration helpers\n // ---------------------------------------------------------------------------\n\n private async applyMigration(\n record: StoredGraphRecord,\n docId: string,\n ): Promise<StoredGraphRecord> {\n const registry = this.getCombinedRegistry();\n if (!registry) return record;\n\n const result = await migrateRecord(record, registry, this.globalWriteBack);\n if (result.migrated) {\n this.handleWriteBack(result, docId);\n }\n return result.record;\n }\n\n private async applyMigrations(records: StoredGraphRecord[]): Promise<StoredGraphRecord[]> {\n const registry = this.getCombinedRegistry();\n if (!registry || records.length === 0) return records;\n\n const results = await migrateRecords(records, registry, this.globalWriteBack);\n for (const result of results) {\n if (result.migrated) {\n const docId =\n result.record.axbType === NODE_RELATION\n ? computeNodeDocId(result.record.aUid)\n : computeEdgeDocId(result.record.aUid, result.record.axbType, result.record.bUid);\n this.handleWriteBack(result, docId);\n }\n }\n return results.map((r) => r.record);\n }\n\n /**\n * Fire-and-forget write-back for a migrated record. Both `'eager'` and\n * `'background'` are non-blocking; the difference is the log level on\n * failure. For synchronous write-back, use a transaction — see\n * `GraphTransactionImpl`.\n */\n private handleWriteBack(result: MigrationResult, docId: string): void {\n if (result.writeBack === 'off') return;\n\n const doWriteBack = async () => {\n try {\n await this.backend.updateDoc(docId, {\n replaceData: result.record.data as Record<string, unknown>,\n v: result.record.v,\n });\n } catch (err: unknown) {\n const msg = `[firegraph] Migration write-back failed for ${docId}: ${(err as Error).message}`;\n if (result.writeBack === 'eager') {\n console.error(msg);\n } else {\n console.warn(msg);\n }\n }\n };\n\n void doWriteBack();\n }\n\n // ---------------------------------------------------------------------------\n // GraphReader\n // ---------------------------------------------------------------------------\n\n async getNode(uid: string): Promise<StoredGraphRecord | null> {\n const docId = computeNodeDocId(uid);\n const record = await this.backend.getDoc(docId);\n if (!record) return null;\n return this.applyMigration(record, docId);\n }\n\n async getEdge(aUid: string, axbType: string, bUid: string): Promise<StoredGraphRecord | null> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n const record = await this.backend.getDoc(docId);\n if (!record) return null;\n return this.applyMigration(record, docId);\n }\n\n async edgeExists(aUid: string, axbType: string, bUid: string): Promise<boolean> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n const record = await this.backend.getDoc(docId);\n return record !== null;\n }\n\n async findEdges(params: FindEdgesParams): Promise<StoredGraphRecord[]> {\n const plan = buildEdgeQueryPlan(params);\n let records: StoredGraphRecord[];\n if (plan.strategy === 'get') {\n const record = await this.backend.getDoc(plan.docId);\n records = record ? [record] : [];\n } else {\n this.checkQuerySafety(plan.filters, params.allowCollectionScan);\n records = await this.backend.query(plan.filters, plan.options);\n }\n return this.applyMigrations(records);\n }\n\n async findNodes(params: FindNodesParams): Promise<StoredGraphRecord[]> {\n const plan = buildNodeQueryPlan(params);\n let records: StoredGraphRecord[];\n if (plan.strategy === 'get') {\n const record = await this.backend.getDoc(plan.docId);\n records = record ? [record] : [];\n } else {\n this.checkQuerySafety(plan.filters, params.allowCollectionScan);\n records = await this.backend.query(plan.filters, plan.options);\n }\n return this.applyMigrations(records);\n }\n\n // ---------------------------------------------------------------------------\n // GraphWriter\n // ---------------------------------------------------------------------------\n\n async putNode(aType: string, uid: string, data: Record<string, unknown>): Promise<void> {\n await this.writeNode(aType, uid, data, 'merge');\n }\n\n async putEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n await this.writeEdge(aType, aUid, axbType, bType, bUid, data, 'merge');\n }\n\n async replaceNode(aType: string, uid: string, data: Record<string, unknown>): Promise<void> {\n await this.writeNode(aType, uid, data, 'replace');\n }\n\n async replaceEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n await this.writeEdge(aType, aUid, axbType, bType, bUid, data, 'replace');\n }\n\n private async writeNode(\n aType: string,\n uid: string,\n data: Record<string, unknown>,\n mode: 'merge' | 'replace',\n ): Promise<void> {\n assertNoDeleteSentinels(data, mode === 'replace' ? 'replaceNode' : 'putNode');\n const registry = this.getRegistryForType(aType);\n if (registry) {\n registry.validate(aType, NODE_RELATION, aType, data, this.backend.scopePath);\n }\n const backend = this.getBackendForType(aType);\n const docId = computeNodeDocId(uid);\n const record = buildWritableNodeRecord(aType, uid, data);\n if (registry) {\n const entry = registry.lookup(aType, NODE_RELATION, aType);\n if (entry?.schemaVersion && entry.schemaVersion > 0) {\n record.v = entry.schemaVersion;\n }\n }\n await backend.setDoc(docId, record, mode);\n }\n\n private async writeEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n mode: 'merge' | 'replace',\n ): Promise<void> {\n assertNoDeleteSentinels(data, mode === 'replace' ? 'replaceEdge' : 'putEdge');\n const registry = this.getRegistryForType(aType);\n if (registry) {\n registry.validate(aType, axbType, bType, data, this.backend.scopePath);\n }\n const backend = this.getBackendForType(aType);\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n const record = buildWritableEdgeRecord(aType, aUid, axbType, bType, bUid, data);\n if (registry) {\n const entry = registry.lookup(aType, axbType, bType);\n if (entry?.schemaVersion && entry.schemaVersion > 0) {\n record.v = entry.schemaVersion;\n }\n }\n await backend.setDoc(docId, record, mode);\n }\n\n async updateNode(uid: string, data: Record<string, unknown>): Promise<void> {\n const docId = computeNodeDocId(uid);\n await this.backend.updateDoc(docId, { dataOps: flattenPatch(data) });\n }\n\n async updateEdge(\n aUid: string,\n axbType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n await this.backend.updateDoc(docId, { dataOps: flattenPatch(data) });\n }\n\n async removeNode(uid: string): Promise<void> {\n const docId = computeNodeDocId(uid);\n await this.backend.deleteDoc(docId);\n }\n\n async removeEdge(aUid: string, axbType: string, bUid: string): Promise<void> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n await this.backend.deleteDoc(docId);\n }\n\n // ---------------------------------------------------------------------------\n // Transactions & Batches\n // ---------------------------------------------------------------------------\n\n async runTransaction<T>(fn: (tx: GraphTransaction) => Promise<T>): Promise<T> {\n return this.backend.runTransaction(async (txBackend) => {\n const graphTx = new GraphTransactionImpl(\n txBackend,\n this.getCombinedRegistry(),\n this.scanProtection,\n this.backend.scopePath,\n this.globalWriteBack,\n );\n return fn(graphTx);\n });\n }\n\n batch(): GraphBatch {\n return new GraphBatchImpl(\n this.backend.createBatch(),\n this.getCombinedRegistry(),\n this.backend.scopePath,\n );\n }\n\n // ---------------------------------------------------------------------------\n // Subgraph\n // ---------------------------------------------------------------------------\n\n subgraph(parentNodeUid: string, name: string = 'graph'): GraphClient {\n if (!parentNodeUid || parentNodeUid.includes('/')) {\n throw new FiregraphError(\n `Invalid parentNodeUid for subgraph: \"${parentNodeUid}\". ` +\n 'Must be a non-empty string without \"/\".',\n 'INVALID_SUBGRAPH',\n );\n }\n if (name.includes('/')) {\n throw new FiregraphError(\n `Subgraph name must not contain \"/\": got \"${name}\". ` +\n 'Use chained .subgraph() calls for nested subgraphs.',\n 'INVALID_SUBGRAPH',\n );\n }\n\n const childBackend = this.backend.subgraph(parentNodeUid, name);\n\n return new GraphClientImpl(\n childBackend,\n {\n registry: this.getCombinedRegistry(),\n scanProtection: this.scanProtection,\n migrationWriteBack: this.globalWriteBack,\n migrationSandbox: this.migrationSandbox,\n },\n // Subgraphs do not have meta-backends; meta lives only at the root.\n );\n }\n\n // ---------------------------------------------------------------------------\n // Collection group query\n // ---------------------------------------------------------------------------\n\n async findEdgesGlobal(\n params: FindEdgesParams,\n collectionName?: string,\n ): Promise<StoredGraphRecord[]> {\n if (!this.backend.findEdgesGlobal) {\n throw new FiregraphError(\n 'findEdgesGlobal() is not supported by the current storage backend.',\n 'UNSUPPORTED_OPERATION',\n );\n }\n const plan = buildEdgeQueryPlan(params);\n if (plan.strategy === 'get') {\n throw new FiregraphError(\n 'findEdgesGlobal() requires a query, not a direct document lookup. ' +\n 'Omit one of aUid/axbType/bUid to force a query strategy.',\n 'INVALID_QUERY',\n );\n }\n this.checkQuerySafety(plan.filters, params.allowCollectionScan);\n const records = await this.backend.findEdgesGlobal(params, collectionName);\n return this.applyMigrations(records);\n }\n\n // ---------------------------------------------------------------------------\n // Bulk operations\n // ---------------------------------------------------------------------------\n\n async removeNodeCascade(uid: string, options?: BulkOptions): Promise<CascadeResult> {\n return this.backend.removeNodeCascade(uid, this, options);\n }\n\n async bulkRemoveEdges(params: FindEdgesParams, options?: BulkOptions): Promise<BulkResult> {\n return this.backend.bulkRemoveEdges(params, this, options);\n }\n\n // ---------------------------------------------------------------------------\n // Dynamic registry methods\n // ---------------------------------------------------------------------------\n\n async defineNodeType(\n name: string,\n jsonSchema: object,\n description?: string,\n options?: DefineTypeOptions,\n ): Promise<void> {\n if (!this.dynamicConfig) {\n throw new DynamicRegistryError(\n 'defineNodeType() is only available in dynamic registry mode. ' +\n 'Pass registryMode: { mode: \"dynamic\" } to createGraphClient().',\n );\n }\n\n if (RESERVED_TYPE_NAMES.has(name)) {\n throw new DynamicRegistryError(\n `Cannot define type \"${name}\": this name is reserved for the meta-registry.`,\n );\n }\n\n if (this.staticRegistry?.lookup(name, NODE_RELATION, name)) {\n throw new DynamicRegistryError(\n `Cannot define node type \"${name}\": already defined in the static registry.`,\n );\n }\n\n const uid = generateDeterministicUid(META_NODE_TYPE, name);\n const data: Record<string, unknown> = { name, jsonSchema };\n if (description !== undefined) data.description = description;\n if (options?.titleField !== undefined) data.titleField = options.titleField;\n if (options?.subtitleField !== undefined) data.subtitleField = options.subtitleField;\n if (options?.viewTemplate !== undefined) data.viewTemplate = options.viewTemplate;\n if (options?.viewCss !== undefined) data.viewCss = options.viewCss;\n if (options?.allowedIn !== undefined) data.allowedIn = options.allowedIn;\n if (options?.migrationWriteBack !== undefined)\n data.migrationWriteBack = options.migrationWriteBack;\n if (options?.migrations !== undefined) {\n data.migrations = await this.serializeMigrations(options.migrations);\n }\n\n await this.putNode(META_NODE_TYPE, uid, data);\n }\n\n async defineEdgeType(\n name: string,\n topology: EdgeTopology,\n jsonSchema?: object,\n description?: string,\n options?: DefineTypeOptions,\n ): Promise<void> {\n if (!this.dynamicConfig) {\n throw new DynamicRegistryError(\n 'defineEdgeType() is only available in dynamic registry mode. ' +\n 'Pass registryMode: { mode: \"dynamic\" } to createGraphClient().',\n );\n }\n\n if (RESERVED_TYPE_NAMES.has(name)) {\n throw new DynamicRegistryError(\n `Cannot define type \"${name}\": this name is reserved for the meta-registry.`,\n );\n }\n\n if (this.staticRegistry) {\n const fromTypes = Array.isArray(topology.from) ? topology.from : [topology.from];\n const toTypes = Array.isArray(topology.to) ? topology.to : [topology.to];\n for (const aType of fromTypes) {\n for (const bType of toTypes) {\n if (this.staticRegistry.lookup(aType, name, bType)) {\n throw new DynamicRegistryError(\n `Cannot define edge type \"${name}\" for (${aType}) -> (${bType}): already defined in the static registry.`,\n );\n }\n }\n }\n }\n\n const uid = generateDeterministicUid(META_EDGE_TYPE, name);\n const data: Record<string, unknown> = {\n name,\n from: topology.from,\n to: topology.to,\n };\n if (jsonSchema !== undefined) data.jsonSchema = jsonSchema;\n if (topology.inverseLabel !== undefined) data.inverseLabel = topology.inverseLabel;\n if (topology.targetGraph !== undefined) data.targetGraph = topology.targetGraph;\n if (description !== undefined) data.description = description;\n if (options?.titleField !== undefined) data.titleField = options.titleField;\n if (options?.subtitleField !== undefined) data.subtitleField = options.subtitleField;\n if (options?.viewTemplate !== undefined) data.viewTemplate = options.viewTemplate;\n if (options?.viewCss !== undefined) data.viewCss = options.viewCss;\n if (options?.allowedIn !== undefined) data.allowedIn = options.allowedIn;\n if (options?.migrationWriteBack !== undefined)\n data.migrationWriteBack = options.migrationWriteBack;\n if (options?.migrations !== undefined) {\n data.migrations = await this.serializeMigrations(options.migrations);\n }\n\n await this.putNode(META_EDGE_TYPE, uid, data);\n }\n\n async reloadRegistry(): Promise<void> {\n if (!this.dynamicConfig) {\n throw new DynamicRegistryError(\n 'reloadRegistry() is only available in dynamic registry mode. ' +\n 'Pass registryMode: { mode: \"dynamic\" } to createGraphClient().',\n );\n }\n\n const reader = this.createMetaReader();\n const dynamicOnly = await createRegistryFromGraph(reader, this.migrationSandbox);\n\n if (this.staticRegistry) {\n this.dynamicRegistry = createMergedRegistry(this.staticRegistry, dynamicOnly);\n } else {\n this.dynamicRegistry = dynamicOnly;\n }\n }\n\n private async serializeMigrations(\n migrations: Array<{ fromVersion: number; toVersion: number; up: MigrationFn | string }>,\n ): Promise<Array<{ fromVersion: number; toVersion: number; up: string }>> {\n const result = migrations.map((m) => {\n const source = typeof m.up === 'function' ? m.up.toString() : m.up;\n return { fromVersion: m.fromVersion, toVersion: m.toVersion, up: source };\n });\n await Promise.all(result.map((m) => precompileSource(m.up, this.migrationSandbox)));\n return result;\n }\n\n /**\n * Build a `GraphReader` over the meta-backend. If meta lives in the same\n * collection as the main backend, `this` is returned directly.\n */\n private createMetaReader(): GraphReader {\n if (!this.metaBackend) return this;\n\n const backend = this.metaBackend;\n\n const executeMetaQuery = (\n filters: QueryFilter[],\n options?: QueryOptions,\n ): Promise<StoredGraphRecord[]> => backend.query(filters, options);\n\n return {\n async getNode(uid: string): Promise<StoredGraphRecord | null> {\n return backend.getDoc(computeNodeDocId(uid));\n },\n async getEdge(\n aUid: string,\n axbType: string,\n bUid: string,\n ): Promise<StoredGraphRecord | null> {\n return backend.getDoc(computeEdgeDocId(aUid, axbType, bUid));\n },\n async edgeExists(aUid: string, axbType: string, bUid: string): Promise<boolean> {\n const record = await backend.getDoc(computeEdgeDocId(aUid, axbType, bUid));\n return record !== null;\n },\n async findEdges(params: FindEdgesParams): Promise<StoredGraphRecord[]> {\n const plan = buildEdgeQueryPlan(params);\n if (plan.strategy === 'get') {\n const record = await backend.getDoc(plan.docId);\n return record ? [record] : [];\n }\n return executeMetaQuery(plan.filters, plan.options);\n },\n async findNodes(params: FindNodesParams): Promise<StoredGraphRecord[]> {\n const plan = buildNodeQueryPlan(params);\n if (plan.strategy === 'get') {\n const record = await backend.getDoc(plan.docId);\n return record ? [record] : [];\n }\n return executeMetaQuery(plan.filters, plan.options);\n },\n };\n }\n}\n\n/**\n * Create a `GraphClient` backed by an arbitrary `StorageBackend`.\n *\n * Used by backend-specific factories (e.g. `createDOClient` in\n * `firegraph/cloudflare`) — most callers should use the higher-level\n * `createGraphClient(firestore, ...)` overload below for Firestore, or the\n * Cloudflare factory for DO-backed graphs.\n */\nexport function createGraphClientFromBackend(\n backend: StorageBackend,\n options?: GraphClientOptions,\n metaBackend?: StorageBackend,\n): GraphClient | DynamicGraphClient {\n return new GraphClientImpl(backend, options, metaBackend) as GraphClient | DynamicGraphClient;\n}\n","/**\n * Code generation — produces TypeScript type definitions from JSON Schema\n * files discovered via the entity folder convention.\n *\n * Uses `json-schema-to-typescript` to compile each entity's `schema.json`\n * into a TypeScript interface.\n *\n * Naming convention:\n * - Nodes: `{PascalName}Data` (e.g. `TaskData`)\n * - Edges: `{PascalName}EdgeData` (e.g. `HasStepEdgeData`)\n */\n\nimport type { DiscoveryResult } from '../types.js';\n\nfunction pascalCase(s: string): string {\n return s.replace(/(^|[^a-zA-Z0-9])([a-zA-Z])/g, (_, _sep, ch) => ch.toUpperCase());\n}\n\nexport interface CodegenOptions {\n /** Add banner comment at top of output. Defaults to true. */\n banner?: boolean;\n}\n\n/**\n * Generate TypeScript type definitions from a DiscoveryResult.\n * Returns the full file content as a string.\n */\nexport async function generateTypes(\n discovery: DiscoveryResult,\n options: CodegenOptions = {},\n): Promise<string> {\n // Lazy-load to avoid requiring this dep at runtime for non-codegen usage\n const { compile } = await import('json-schema-to-typescript');\n\n const { banner = true } = options;\n const chunks: string[] = [];\n\n if (banner) {\n chunks.push('// Auto-generated by firegraph codegen — do not edit manually\\n');\n }\n\n // Sort for deterministic output\n const sortedNodes = [...discovery.nodes.entries()].sort(([a], [b]) => a.localeCompare(b));\n const sortedEdges = [...discovery.edges.entries()].sort(([a], [b]) => a.localeCompare(b));\n\n for (const [name, entity] of sortedNodes) {\n const typeName = `${pascalCase(name)}Data`;\n const ts = await compile(entity.schema as any, typeName, {\n bannerComment: '',\n additionalProperties: false,\n });\n chunks.push(ts.trim());\n chunks.push('');\n }\n\n for (const [name, entity] of sortedEdges) {\n const typeName = `${pascalCase(name)}EdgeData`;\n const ts = await compile(entity.schema as any, typeName, {\n bannerComment: '',\n additionalProperties: false,\n });\n chunks.push(ts.trim());\n chunks.push('');\n }\n\n return chunks.join('\\n').trimEnd() + '\\n';\n}\n","/**\n * Firegraph Configuration — project-level config file support.\n *\n * Projects create a `firegraph.config.ts` (or `.js`/`.mjs`) in their root:\n *\n * @example\n * ```ts\n * import { defineConfig } from 'firegraph';\n *\n * export default defineConfig({\n * entities: './entities',\n * project: 'my-project',\n * collection: 'graph',\n * });\n * ```\n */\n\nimport type { DynamicRegistryConfig, QueryMode } from './types.js';\n\n// ---------------------------------------------------------------------------\n// View Resolution Types\n// ---------------------------------------------------------------------------\n\n/** Display contexts where views can appear. */\nexport type ViewContext = 'listing' | 'detail' | 'inline';\n\n/** View resolution configuration for a single entity type. */\nexport interface ViewResolverConfig {\n /** Default view name (e.g. 'card'). Falls back to 'json' if unset. */\n default?: string;\n /** View to use in NodeBrowser listing rows. */\n listing?: string;\n /** View to use on the NodeDetail page. */\n detail?: string;\n /** View to use for inline/embedded previews (edge rows, traversal). */\n inline?: string;\n}\n\n/** Declarative view defaults, keyed by entity type. */\nexport interface ViewDefaultsConfig {\n /** Node view defaults keyed by aType (e.g. 'user', 'task'). */\n nodes?: Record<string, ViewResolverConfig>;\n /** Edge view defaults keyed by axbType (e.g. 'hasDeparture'). */\n edges?: Record<string, ViewResolverConfig>;\n}\n\n// ---------------------------------------------------------------------------\n// Config Shape\n// ---------------------------------------------------------------------------\n\n/** Project-level firegraph configuration. */\nexport interface FiregraphConfig {\n /** Path to entities directory (per-entity folder convention). */\n entities?: string;\n /** GCP project ID. */\n project?: string;\n /** Firestore collection path (default: 'graph'). */\n collection?: string;\n /** Firestore emulator address (e.g. '127.0.0.1:8080'). */\n emulator?: string;\n /**\n * Query execution backend.\n *\n * - `'pipeline'` (default) — Uses Firestore Pipeline API. Requires Enterprise\n * Firestore. Enables indexless queries on `data.*` fields.\n * - `'standard'` — Uses standard Firestore `.where().get()` queries. Not\n * recommended for production. See README for risk details.\n *\n * When the emulator is active, always falls back to `'standard'`.\n */\n queryMode?: QueryMode;\n\n /**\n * AI chat configuration. Auto-detects `claude` CLI on PATH by default.\n * Set to `false` to disable chat even if claude is available.\n */\n chat?:\n | false\n | {\n /** Claude model to use (default: 'sonnet'). */\n model?: string;\n /** Maximum concurrent claude processes (default: 2). */\n maxConcurrency?: number;\n };\n\n /** Editor-specific settings. */\n editor?: {\n /** Server port (default: 3883). */\n port?: number;\n /** Force read-only mode. */\n readonly?: boolean;\n };\n\n /** Declarative view defaults per entity type (overrides per-entity meta.json). */\n viewDefaults?: ViewDefaultsConfig;\n\n /**\n * Dynamic registry mode. When set, the editor loads type definitions\n * from Firestore meta-nodes in addition to filesystem entities.\n * Filesystem types take precedence on name conflicts.\n */\n registryMode?: DynamicRegistryConfig;\n}\n\n// ---------------------------------------------------------------------------\n// defineConfig()\n// ---------------------------------------------------------------------------\n\n/**\n * Identity function providing type-checking and autocomplete for config files.\n *\n * @example\n * ```ts\n * import { defineConfig } from 'firegraph';\n * export default defineConfig({ entities: './entities' });\n * ```\n */\nexport function defineConfig(config: FiregraphConfig): FiregraphConfig {\n return config;\n}\n\n// ---------------------------------------------------------------------------\n// View Resolution (pure — works client-side and server-side)\n// ---------------------------------------------------------------------------\n\n/**\n * Resolve which view to show for a given entity.\n *\n * 1. If `context` is provided and a context-specific default exists, use it.\n * 2. Falls back to `resolverConfig.default`.\n * 3. Ultimate fallback: `'json'`.\n *\n * Only returns view names that exist in `availableViewNames`.\n */\nexport function resolveView(\n resolverConfig: ViewResolverConfig | undefined,\n availableViewNames: string[],\n context?: ViewContext,\n): string {\n if (!resolverConfig) return 'json';\n\n const available = new Set(availableViewNames);\n\n if (context) {\n const contextDefault = resolverConfig[context];\n if (contextDefault && available.has(contextDefault)) {\n return contextDefault;\n }\n }\n\n if (resolverConfig.default && available.has(resolverConfig.default)) {\n return resolverConfig.default;\n }\n\n return 'json';\n}\n","/**\n * Cross-graph edge resolution utilities.\n *\n * Provides path-scanning resolution for determining whether an edge's source\n * (aUid) is an ancestor node by checking if the UID appears in the Firestore\n * collection path.\n *\n * Firestore paths have a rigid alternating structure:\n * collection / docId / collection / docId / collection\n *\n * Given a path like `graph/A/workspace/B/context`, segments at even indices\n * are collection names and odd indices are document IDs. When we find a UID\n * at an odd index, the collection containing that document is the path up to\n * (and including) the preceding even-index segment.\n */\n\n/**\n * Parse a Firestore collection path and determine the collection path\n * where a given UID's document lives, if that UID is an ancestor in the path.\n *\n * @param collectionPath - The full Firestore collection path of the current client\n * @param uid - The UID to search for in the path\n * @returns The collection path containing the UID, or `null` if not found in the path\n *\n * @example\n * ```ts\n * // Path: graph/A/workspace/B/context\n * resolveAncestorCollection('graph/A/workspace/B/context', 'A')\n * // → 'graph'\n *\n * resolveAncestorCollection('graph/A/workspace/B/context', 'B')\n * // → 'graph/A/workspace'\n *\n * resolveAncestorCollection('graph/A/workspace/B/context', 'unknown')\n * // → null\n * ```\n */\nexport function resolveAncestorCollection(collectionPath: string, uid: string): string | null {\n const segments = collectionPath.split('/');\n\n // Walk odd-indexed segments (document IDs in Firestore's alternating path structure)\n for (let i = 1; i < segments.length; i += 2) {\n if (segments[i] === uid) {\n // The collection containing this doc is everything up to index i-1\n return segments.slice(0, i).join('/');\n }\n }\n\n return null;\n}\n\n/**\n * Check whether a UID belongs to an ancestor node by scanning the collection path.\n *\n * @param collectionPath - The full Firestore collection path of the current client\n * @param uid - The UID to check\n * @returns `true` if the UID appears as a document segment in the path\n */\nexport function isAncestorUid(collectionPath: string, uid: string): boolean {\n return resolveAncestorCollection(collectionPath, uid) !== null;\n}\n","/**\n * Default core index preset.\n *\n * This set covers the query patterns firegraph's query planner emits for\n * built-in operations — `findNodes`, `findEdges`, cascade delete, traversal,\n * and the DO/SQLite path compilers. Apps that need additional indexes\n * (descending timestamps, `data.*` filters, composite fields unique to\n * their query shapes) declare them on `RegistryEntry.indexes` or override\n * this preset wholesale via the backend-specific `coreIndexes` option —\n * `FiregraphDOOptions.coreIndexes` for the DO backend,\n * `BuildSchemaOptions.coreIndexes` for the legacy SQLite backend, and\n * `GenerateIndexOptions.coreIndexes` for the Firestore CLI generator.\n *\n * ## Ownership model\n *\n * This list is firegraph's *recommendation* — not non-negotiable policy.\n * Consumers can:\n *\n * 1. Accept the preset as-is (default).\n * 2. Extend it: `coreIndexes: [...DEFAULT_CORE_INDEXES, ...more]`.\n * 3. Replace it entirely with a tailored set.\n * 4. Disable it (`coreIndexes: []`) and take full responsibility for\n * index coverage — only do this if you're provisioning a complete\n * custom set.\n *\n * ## Per-backend emission\n *\n * The Firestore generator skips single-field entries (Firestore implicitly\n * indexes every field) and emits one composite index per multi-field spec.\n * The SQLite-flavored generators (DO, legacy) emit every spec as-is.\n *\n * ## Why these specific indexes\n *\n * - `aUid` / `bUid` — required for `_fgRemoveNodeCascade`, which scans by\n * each UID side independently. A composite `(aUid, axbType)` also\n * satisfies `aUid`-alone via leading-column prefix, but the single-field\n * form is cheaper for the common case.\n * - `aType` / `bType` — `findNodes({ aType })` and cross-type enumeration.\n * - `(aUid, axbType)` — forward edge lookup (`findEdges({ aUid, axbType })`)\n * and the `get` strategy fallback when only two of three triple fields\n * are present.\n * - `(axbType, bUid)` — reverse edge traversal.\n * - `(aType, axbType)` — type-scoped edge scans (e.g., `findEdges({ aType, axbType })`).\n * - `(axbType, bType)` — scope edges of one relation to a target type.\n */\n\nimport type { IndexSpec } from './types.js';\n\nexport const DEFAULT_CORE_INDEXES: ReadonlyArray<IndexSpec> = Object.freeze([\n { fields: ['aUid'] },\n { fields: ['bUid'] },\n { fields: ['aType'] },\n { fields: ['bType'] },\n { fields: ['aUid', 'axbType'] },\n { fields: ['axbType', 'bUid'] },\n { fields: ['aType', 'axbType'] },\n { fields: ['axbType', 'bType'] },\n]);\n","/**\n * Entity Discovery — convention-based auto-discovery of entities from\n * a per-entity folder structure.\n *\n * Scans `entitiesDir/nodes/` and `entitiesDir/edges/` subdirectories.\n * Each subfolder is treated as an entity type.\n *\n * Schema files can be either `schema.json` (plain JSON Schema) or\n * `schema.ts` / `schema.js` (a module whose default export is a JSON Schema\n * object). When both exist, the TS/JS file takes precedence so that authors\n * can compose schemas programmatically while keeping a JSON fallback.\n *\n * @example\n * ```\n * entities/\n * nodes/\n * task/\n * schema.json | schema.ts (required — one or both)\n * views.ts (optional)\n * sample.json (optional)\n * meta.json (optional)\n * edges/\n * hasStep/\n * schema.json | schema.ts (required — one or both)\n * edge.json (required — topology)\n * views.ts (optional)\n * sample.json (optional)\n * meta.json (optional)\n * ```\n */\n\nimport { existsSync, readdirSync, readFileSync, statSync } from 'node:fs';\nimport { createRequire } from 'node:module';\nimport { join, resolve } from 'node:path';\n\nimport type * as jitiNS from 'jiti';\n\nimport type { ViewResolverConfig } from './config.js';\nimport { FiregraphError } from './errors.js';\nimport type {\n DiscoveredEntity,\n DiscoveryResult,\n EdgeTopology,\n IndexSpec,\n MigrationStep,\n MigrationWriteBack,\n} from './types.js';\n\nexport class DiscoveryError extends FiregraphError {\n constructor(message: string) {\n super(message, 'DISCOVERY_ERROR');\n this.name = 'DiscoveryError';\n }\n}\n\n// ---------------------------------------------------------------------------\n// JSON parsing helpers\n// ---------------------------------------------------------------------------\n\nfunction readJson(filePath: string): unknown {\n try {\n const raw = readFileSync(filePath, 'utf-8');\n return JSON.parse(raw);\n } catch (err: unknown) {\n const msg =\n err instanceof SyntaxError\n ? `Invalid JSON in ${filePath}: ${err.message}`\n : `Cannot read ${filePath}: ${(err as Error).message}`;\n throw new DiscoveryError(msg);\n }\n}\n\nfunction readJsonIfExists(filePath: string): unknown | undefined {\n if (!existsSync(filePath)) return undefined;\n return readJson(filePath);\n}\n\n// ---------------------------------------------------------------------------\n// Schema file loading (JSON or TS/JS via jiti)\n// ---------------------------------------------------------------------------\n\nconst SCHEMA_SCRIPT_EXTENSIONS = ['.ts', '.js', '.mts', '.mjs'];\n\n/**\n * Attempt to load a schema from a TS/JS module (default export) or fall back\n * to schema.json. Returns the parsed schema object or throws.\n */\nfunction loadSchema(dir: string, entityLabel: string): object {\n // Prefer TS/JS schema — allows programmatic composition & shared definitions\n for (const ext of SCHEMA_SCRIPT_EXTENSIONS) {\n const candidate = join(dir, `schema${ext}`);\n if (existsSync(candidate)) {\n return loadSchemaModule(candidate, entityLabel);\n }\n }\n\n // Fall back to schema.json\n const jsonPath = join(dir, 'schema.json');\n if (existsSync(jsonPath)) {\n return readJson(jsonPath) as object;\n }\n\n throw new DiscoveryError(\n `Missing schema for ${entityLabel} in ${dir}. ` +\n 'Provide a schema.ts (or .js/.mts/.mjs) or schema.json file.',\n );\n}\n\nlet _jiti: ((id: string) => unknown) | undefined;\n\nfunction getJiti(): (id: string) => unknown {\n if (!_jiti) {\n const base = typeof __filename !== 'undefined' ? __filename : import.meta.url;\n const esmRequire = createRequire(base);\n const { createJiti } = esmRequire('jiti') as typeof jitiNS;\n _jiti = createJiti(base, { interopDefault: true });\n }\n return _jiti;\n}\n\nfunction loadSchemaModule(filePath: string, entityLabel: string): object {\n try {\n const jiti = getJiti();\n const mod = jiti(filePath) as { default?: unknown } | unknown;\n const schema =\n mod && typeof mod === 'object' && 'default' in mod\n ? (mod as { default: unknown }).default\n : mod;\n\n if (!schema || typeof schema !== 'object') {\n throw new DiscoveryError(\n `Schema file ${filePath} for ${entityLabel} must default-export a JSON Schema object.`,\n );\n }\n return schema as object;\n } catch (err: unknown) {\n if (err instanceof DiscoveryError) throw err;\n throw new DiscoveryError(\n `Failed to load schema module ${filePath} for ${entityLabel}: ${(err as Error).message}`,\n );\n }\n}\n\n// ---------------------------------------------------------------------------\n// View file detection\n// ---------------------------------------------------------------------------\n\nconst VIEW_EXTENSIONS = ['.ts', '.js', '.mts', '.mjs'];\n\nfunction findViewsFile(dir: string): string | undefined {\n for (const ext of VIEW_EXTENSIONS) {\n const candidate = join(dir, `views${ext}`);\n if (existsSync(candidate)) return candidate;\n }\n return undefined;\n}\n\n// ---------------------------------------------------------------------------\n// Migration file detection & loading\n// ---------------------------------------------------------------------------\n\nconst MIGRATION_EXTENSIONS = ['.ts', '.js', '.mts', '.mjs'];\n\nfunction findMigrationsFile(dir: string): string | undefined {\n for (const ext of MIGRATION_EXTENSIONS) {\n const candidate = join(dir, `migrations${ext}`);\n if (existsSync(candidate)) return candidate;\n }\n return undefined;\n}\n\nfunction loadMigrations(filePath: string, entityLabel: string): MigrationStep[] {\n try {\n const jiti = getJiti();\n const mod = jiti(filePath) as { default?: unknown } | unknown;\n const migrations =\n mod && typeof mod === 'object' && 'default' in mod\n ? (mod as { default: unknown }).default\n : mod;\n\n if (!Array.isArray(migrations)) {\n throw new DiscoveryError(\n `Migrations file ${filePath} for ${entityLabel} must default-export an array of MigrationStep.`,\n );\n }\n return migrations as MigrationStep[];\n } catch (err: unknown) {\n if (err instanceof DiscoveryError) throw err;\n throw new DiscoveryError(\n `Failed to load migrations ${filePath} for ${entityLabel}: ${(err as Error).message}`,\n );\n }\n}\n\n// ---------------------------------------------------------------------------\n// Entity loaders\n// ---------------------------------------------------------------------------\n\nfunction loadNodeEntity(dir: string, name: string): DiscoveredEntity {\n const schema = loadSchema(dir, `node type \"${name}\"`);\n const meta = readJsonIfExists(join(dir, 'meta.json')) as\n | {\n description?: string;\n titleField?: string;\n subtitleField?: string;\n viewDefaults?: ViewResolverConfig;\n allowedIn?: string[];\n migrationWriteBack?: MigrationWriteBack;\n indexes?: IndexSpec[];\n }\n | undefined;\n const sampleData = readJsonIfExists(join(dir, 'sample.json')) as\n | Record<string, unknown>\n | undefined;\n const viewsPath = findViewsFile(dir);\n const migrationsPath = findMigrationsFile(dir);\n const migrations = migrationsPath\n ? loadMigrations(migrationsPath, `node type \"${name}\"`)\n : undefined;\n\n return {\n kind: 'node',\n name,\n schema,\n description: meta?.description,\n titleField: meta?.titleField,\n subtitleField: meta?.subtitleField,\n viewDefaults: meta?.viewDefaults,\n viewsPath,\n sampleData,\n allowedIn: meta?.allowedIn,\n migrations,\n migrationWriteBack: meta?.migrationWriteBack,\n indexes: meta?.indexes,\n };\n}\n\nfunction loadEdgeEntity(dir: string, name: string): DiscoveredEntity {\n const schema = loadSchema(dir, `edge type \"${name}\"`);\n\n const edgePath = join(dir, 'edge.json');\n if (!existsSync(edgePath)) {\n throw new DiscoveryError(\n `Missing edge.json for edge type \"${name}\" in ${dir}. ` +\n 'Edge entities must declare topology (from/to node types).',\n );\n }\n const topology = readJson(edgePath) as EdgeTopology;\n\n // Validate topology shape\n if (!topology.from) {\n throw new DiscoveryError(`edge.json for \"${name}\" is missing required \"from\" field`);\n }\n if (!topology.to) {\n throw new DiscoveryError(`edge.json for \"${name}\" is missing required \"to\" field`);\n }\n\n const meta = readJsonIfExists(join(dir, 'meta.json')) as\n | {\n description?: string;\n titleField?: string;\n subtitleField?: string;\n viewDefaults?: ViewResolverConfig;\n allowedIn?: string[];\n targetGraph?: string;\n migrationWriteBack?: MigrationWriteBack;\n indexes?: IndexSpec[];\n }\n | undefined;\n const sampleData = readJsonIfExists(join(dir, 'sample.json')) as\n | Record<string, unknown>\n | undefined;\n const viewsPath = findViewsFile(dir);\n const migrationsPath = findMigrationsFile(dir);\n const migrations = migrationsPath\n ? loadMigrations(migrationsPath, `edge type \"${name}\"`)\n : undefined;\n\n return {\n kind: 'edge',\n name,\n schema,\n topology,\n description: meta?.description,\n titleField: meta?.titleField,\n subtitleField: meta?.subtitleField,\n viewDefaults: meta?.viewDefaults,\n viewsPath,\n sampleData,\n allowedIn: meta?.allowedIn,\n targetGraph:\n topology.targetGraph ?? (meta as { targetGraph?: string } | undefined)?.targetGraph,\n migrations,\n migrationWriteBack: meta?.migrationWriteBack,\n indexes: meta?.indexes,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Directory scanner\n// ---------------------------------------------------------------------------\n\nfunction getSubdirectories(dir: string): string[] {\n if (!existsSync(dir)) return [];\n return readdirSync(dir, { withFileTypes: true })\n .filter((d) => d.isDirectory())\n .map((d) => d.name);\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\nexport interface DiscoveryWarning {\n code: 'DANGLING_TOPOLOGY_REF';\n message: string;\n}\n\nexport interface DiscoverResult {\n result: DiscoveryResult;\n warnings: DiscoveryWarning[];\n}\n\n/**\n * Scan an entities directory and return all discovered nodes and edges.\n *\n * @param entitiesDir - Path to the entities directory (absolute or relative to cwd)\n * @returns Discovery result with nodes and edges maps, plus any warnings\n */\nexport function discoverEntities(entitiesDir: string): DiscoverResult {\n const absDir = resolve(entitiesDir);\n\n if (!existsSync(absDir) || !statSync(absDir).isDirectory()) {\n throw new DiscoveryError(`Entities directory not found: ${entitiesDir}`);\n }\n\n const nodes = new Map<string, DiscoveredEntity>();\n const edges = new Map<string, DiscoveredEntity>();\n const warnings: DiscoveryWarning[] = [];\n\n // Discover nodes\n const nodesDir = join(absDir, 'nodes');\n for (const name of getSubdirectories(nodesDir)) {\n nodes.set(name, loadNodeEntity(join(nodesDir, name), name));\n }\n\n // Discover edges\n const edgesDir = join(absDir, 'edges');\n for (const name of getSubdirectories(edgesDir)) {\n edges.set(name, loadEdgeEntity(join(edgesDir, name), name));\n }\n\n // Validate topology references\n const nodeNames = new Set(nodes.keys());\n for (const [axbType, entity] of edges) {\n const topology = entity.topology!;\n const fromTypes = Array.isArray(topology.from) ? topology.from : [topology.from];\n const toTypes = Array.isArray(topology.to) ? topology.to : [topology.to];\n\n for (const ref of [...fromTypes, ...toTypes]) {\n if (!nodeNames.has(ref)) {\n warnings.push({\n code: 'DANGLING_TOPOLOGY_REF',\n message: `Edge \"${axbType}\" references node type \"${ref}\" which was not found in the nodes directory`,\n });\n }\n }\n }\n\n return {\n result: { nodes, edges },\n warnings,\n };\n}\n","/**\n * Firestore implementation of `StorageBackend`.\n *\n * Wraps the existing `FirestoreAdapter`, `TransactionAdapter`, and\n * `BatchAdapter` so the Firestore code path keeps the exact behavior it\n * had before the backend abstraction landed.\n */\n\nimport type { Firestore, Query, Transaction } from '@google-cloud/firestore';\nimport { FieldValue } from '@google-cloud/firestore';\n\nimport {\n bulkRemoveEdges as bulkRemoveEdgesImpl,\n removeNodeCascade as removeNodeCascadeImpl,\n} from '../bulk.js';\nimport { FiregraphError } from '../errors.js';\nimport { buildEdgeQueryPlan } from '../query.js';\nimport { deserializeFirestoreTypes } from '../serialization.js';\nimport type {\n BulkOptions,\n BulkResult,\n CascadeResult,\n FindEdgesParams,\n GraphReader,\n QueryFilter,\n QueryMode,\n QueryOptions,\n StoredGraphRecord,\n} from '../types.js';\nimport type {\n BatchBackend,\n StorageBackend,\n TransactionBackend,\n UpdatePayload,\n WritableRecord,\n WriteMode,\n} from './backend.js';\nimport type { BatchAdapter, FirestoreAdapter, TransactionAdapter } from './firestore-adapter.js';\nimport {\n createBatchAdapter,\n createFirestoreAdapter,\n createTransactionAdapter,\n} from './firestore-adapter.js';\nimport type { PipelineQueryAdapter } from './pipeline-adapter.js';\nimport { createPipelineQueryAdapter } from './pipeline-adapter.js';\nimport type { DataPathOp } from './write-plan.js';\nimport { assertSafePath, assertUpdatePayloadExclusive } from './write-plan.js';\n\nexport interface FirestoreBackendOptions {\n queryMode?: QueryMode;\n scopePath?: string;\n}\n\n/** Build a `data.a.b.c` dotted path for Firestore's `update()` API. */\nfunction dottedDataPath(op: DataPathOp): string {\n assertSafePath(op.path);\n return `data.${op.path.join('.')}`;\n}\n\n/**\n * Build the patch payload Firestore expects from an `UpdatePayload`.\n *\n * - `replaceData` sets the whole `data` field at once (full replacement).\n * Tagged Firestore types from the migration sandbox are reconstructed\n * here. Cannot be combined with `dataOps`.\n * - `dataOps` becomes one Firestore field-update entry per terminal op,\n * keyed by `data.<dotted.path>`. Delete ops use `FieldValue.delete()`.\n * Sibling keys at every depth are preserved by Firestore's update\n * semantics for nested maps.\n * - `updatedAt` is always stamped with `FieldValue.serverTimestamp()`.\n * - `v` is stamped at the root when provided.\n */\nfunction buildFirestoreUpdate(update: UpdatePayload, db: Firestore): Record<string, unknown> {\n assertUpdatePayloadExclusive(update);\n const out: Record<string, unknown> = {\n updatedAt: FieldValue.serverTimestamp(),\n };\n if (update.replaceData) {\n out.data = deserializeFirestoreTypes(update.replaceData, db);\n } else if (update.dataOps) {\n for (const op of update.dataOps) {\n const key = dottedDataPath(op);\n out[key] = op.delete ? FieldValue.delete() : op.value;\n }\n }\n if (update.v !== undefined) {\n out.v = update.v;\n }\n return out;\n}\n\n/**\n * Stamp `createdAt`/`updatedAt` server-timestamp sentinels on a\n * timestampless record. Used for `setDoc`.\n */\nfunction stampWritableRecord(record: WritableRecord): Record<string, unknown> {\n const now = FieldValue.serverTimestamp();\n const out: Record<string, unknown> = {\n aType: record.aType,\n aUid: record.aUid,\n axbType: record.axbType,\n bType: record.bType,\n bUid: record.bUid,\n data: record.data,\n createdAt: now,\n updatedAt: now,\n };\n if (record.v !== undefined) out.v = record.v;\n return out;\n}\n\nclass FirestoreTransactionBackend implements TransactionBackend {\n constructor(\n private readonly adapter: TransactionAdapter,\n private readonly db: Firestore,\n ) {}\n\n getDoc(docId: string): Promise<StoredGraphRecord | null> {\n return this.adapter.getDoc(docId);\n }\n\n query(filters: QueryFilter[], options?: QueryOptions): Promise<StoredGraphRecord[]> {\n return this.adapter.query(filters, options);\n }\n\n async setDoc(docId: string, record: WritableRecord, mode: WriteMode): Promise<void> {\n this.adapter.setDoc(\n docId,\n stampWritableRecord(record),\n mode === 'merge' ? { merge: true } : undefined,\n );\n }\n\n async updateDoc(docId: string, update: UpdatePayload): Promise<void> {\n this.adapter.updateDoc(docId, buildFirestoreUpdate(update, this.db));\n }\n\n async deleteDoc(docId: string): Promise<void> {\n this.adapter.deleteDoc(docId);\n }\n}\n\nclass FirestoreBatchBackend implements BatchBackend {\n constructor(\n private readonly adapter: BatchAdapter,\n private readonly db: Firestore,\n ) {}\n\n setDoc(docId: string, record: WritableRecord, mode: WriteMode): void {\n this.adapter.setDoc(\n docId,\n stampWritableRecord(record),\n mode === 'merge' ? { merge: true } : undefined,\n );\n }\n\n updateDoc(docId: string, update: UpdatePayload): void {\n this.adapter.updateDoc(docId, buildFirestoreUpdate(update, this.db));\n }\n\n deleteDoc(docId: string): void {\n this.adapter.deleteDoc(docId);\n }\n\n commit(): Promise<void> {\n return this.adapter.commit();\n }\n}\n\nclass FirestoreBackendImpl implements StorageBackend {\n readonly collectionPath: string;\n readonly scopePath: string;\n private readonly adapter: FirestoreAdapter;\n private readonly pipelineAdapter?: PipelineQueryAdapter;\n\n constructor(\n private readonly db: Firestore,\n collectionPath: string,\n private readonly queryMode: QueryMode,\n scopePath: string,\n ) {\n this.collectionPath = collectionPath;\n this.scopePath = scopePath;\n this.adapter = createFirestoreAdapter(db, collectionPath);\n if (queryMode === 'pipeline') {\n this.pipelineAdapter = createPipelineQueryAdapter(db, collectionPath);\n }\n }\n\n // --- Reads ---\n\n getDoc(docId: string): Promise<StoredGraphRecord | null> {\n return this.adapter.getDoc(docId);\n }\n\n query(filters: QueryFilter[], options?: QueryOptions): Promise<StoredGraphRecord[]> {\n if (this.pipelineAdapter) {\n return this.pipelineAdapter.query(filters, options);\n }\n return this.adapter.query(filters, options);\n }\n\n // --- Writes ---\n\n setDoc(docId: string, record: WritableRecord, mode: WriteMode): Promise<void> {\n return this.adapter.setDoc(\n docId,\n stampWritableRecord(record),\n mode === 'merge' ? { merge: true } : undefined,\n );\n }\n\n updateDoc(docId: string, update: UpdatePayload): Promise<void> {\n return this.adapter.updateDoc(docId, buildFirestoreUpdate(update, this.db));\n }\n\n deleteDoc(docId: string): Promise<void> {\n return this.adapter.deleteDoc(docId);\n }\n\n // --- Transactions / Batches ---\n\n runTransaction<T>(fn: (tx: TransactionBackend) => Promise<T>): Promise<T> {\n return this.db.runTransaction(async (firestoreTx: Transaction) => {\n const txAdapter = createTransactionAdapter(this.db, this.collectionPath, firestoreTx);\n return fn(new FirestoreTransactionBackend(txAdapter, this.db));\n });\n }\n\n createBatch(): BatchBackend {\n const batchAdapter = createBatchAdapter(this.db, this.collectionPath);\n return new FirestoreBatchBackend(batchAdapter, this.db);\n }\n\n // --- Subgraphs ---\n\n subgraph(parentNodeUid: string, name: string): StorageBackend {\n const subPath = `${this.collectionPath}/${parentNodeUid}/${name}`;\n const newScope = this.scopePath ? `${this.scopePath}/${name}` : name;\n return new FirestoreBackendImpl(this.db, subPath, this.queryMode, newScope);\n }\n\n // --- Cascade & bulk ---\n\n removeNodeCascade(\n uid: string,\n reader: GraphReader,\n options?: BulkOptions,\n ): Promise<CascadeResult> {\n return removeNodeCascadeImpl(this.db, this.collectionPath, reader, uid, options);\n }\n\n bulkRemoveEdges(\n params: FindEdgesParams,\n reader: GraphReader,\n options?: BulkOptions,\n ): Promise<BulkResult> {\n return bulkRemoveEdgesImpl(this.db, this.collectionPath, reader, params, options);\n }\n\n // --- Cross-collection ---\n\n async findEdgesGlobal(\n params: FindEdgesParams,\n collectionName?: string,\n ): Promise<StoredGraphRecord[]> {\n const name = collectionName ?? this.collectionPath.split('/').pop()!;\n const plan = buildEdgeQueryPlan(params);\n\n if (plan.strategy === 'get') {\n throw new FiregraphError(\n 'findEdgesGlobal() requires a query, not a direct document lookup. ' +\n 'Omit one of aUid/axbType/bUid to force a query strategy.',\n 'INVALID_QUERY',\n );\n }\n\n const collectionGroupRef = this.db.collectionGroup(name);\n let q: Query = collectionGroupRef;\n for (const f of plan.filters) {\n q = q.where(f.field, f.op, f.value);\n }\n if (plan.options?.orderBy) {\n q = q.orderBy(plan.options.orderBy.field, plan.options.orderBy.direction ?? 'asc');\n }\n if (plan.options?.limit !== undefined) {\n q = q.limit(plan.options.limit);\n }\n const snap = await q.get();\n return snap.docs.map((doc) => doc.data() as StoredGraphRecord);\n }\n}\n\n/**\n * Create a Firestore-backed `StorageBackend`.\n *\n * The query-mode auto-fallback for the emulator (`FIRESTORE_EMULATOR_HOST`)\n * is performed at the call site (`createGraphClient`) so that the backend\n * itself doesn't reach into `process.env`.\n */\nexport function createFirestoreBackend(\n db: Firestore,\n collectionPath: string,\n options: FirestoreBackendOptions = {},\n): StorageBackend {\n const queryMode = options.queryMode ?? 'pipeline';\n const scopePath = options.scopePath ?? '';\n return new FirestoreBackendImpl(db, collectionPath, queryMode, scopePath);\n}\n","import type { Firestore } from '@google-cloud/firestore';\n\nimport { computeEdgeDocId, computeNodeDocId } from './docid.js';\nimport { NODE_RELATION } from './internal/constants.js';\nimport type {\n BulkBatchError,\n BulkOptions,\n BulkResult,\n CascadeResult,\n FindEdgesParams,\n GraphReader,\n StoredGraphRecord,\n} from './types.js';\n\nconst MAX_BATCH_SIZE = 500;\nconst DEFAULT_MAX_RETRIES = 3;\nconst BASE_DELAY_MS = 200;\n\nfunction sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n\n/**\n * Splits an array into chunks of at most `size` elements.\n */\nfunction chunk<T>(arr: T[], size: number): T[][] {\n const chunks: T[][] = [];\n for (let i = 0; i < arr.length; i += size) {\n chunks.push(arr.slice(i, i + size));\n }\n return chunks;\n}\n\n/**\n * Deletes a list of document IDs in chunked Firestore batches with retries.\n */\nexport async function bulkDeleteDocIds(\n db: Firestore,\n collectionPath: string,\n docIds: string[],\n options?: BulkOptions,\n): Promise<BulkResult> {\n if (docIds.length === 0) {\n return { deleted: 0, batches: 0, errors: [] };\n }\n\n const batchSize = Math.min(options?.batchSize ?? MAX_BATCH_SIZE, MAX_BATCH_SIZE);\n const maxRetries = options?.maxRetries ?? DEFAULT_MAX_RETRIES;\n const onProgress = options?.onProgress;\n\n const chunks = chunk(docIds, batchSize);\n const errors: BulkBatchError[] = [];\n let deleted = 0;\n let completedBatches = 0;\n\n for (let i = 0; i < chunks.length; i++) {\n const ids = chunks[i];\n let committed = false;\n\n for (let attempt = 0; attempt <= maxRetries; attempt++) {\n try {\n const batch = db.batch();\n const collectionRef = db.collection(collectionPath);\n for (const id of ids) {\n batch.delete(collectionRef.doc(id));\n }\n await batch.commit();\n committed = true;\n deleted += ids.length;\n break;\n } catch (err) {\n if (attempt < maxRetries) {\n const delay = BASE_DELAY_MS * Math.pow(2, attempt);\n await sleep(delay);\n } else {\n errors.push({\n batchIndex: i,\n error: err instanceof Error ? err : new Error(String(err)),\n operationCount: ids.length,\n });\n }\n }\n }\n\n if (committed) {\n completedBatches++;\n }\n\n if (onProgress) {\n onProgress({\n completedBatches,\n totalBatches: chunks.length,\n deletedSoFar: deleted,\n });\n }\n }\n\n return { deleted, batches: completedBatches, errors };\n}\n\n/**\n * Finds all edges matching `params`, then deletes them in chunked batches.\n */\nexport async function bulkRemoveEdges(\n db: Firestore,\n collectionPath: string,\n reader: GraphReader,\n params: FindEdgesParams,\n options?: BulkOptions,\n): Promise<BulkResult> {\n // Override default query limit for bulk deletion — we need all matching edges.\n // limit: 0 bypasses DEFAULT_QUERY_LIMIT; an explicit user limit is preserved.\n // allowCollectionScan: true — bulk deletion inherently implies scanning.\n const effectiveParams =\n params.limit !== undefined\n ? { ...params, allowCollectionScan: params.allowCollectionScan ?? true }\n : { ...params, limit: 0, allowCollectionScan: params.allowCollectionScan ?? true };\n const edges = await reader.findEdges(effectiveParams);\n const docIds = edges.map((e) => computeEdgeDocId(e.aUid, e.axbType, e.bUid));\n return bulkDeleteDocIds(db, collectionPath, docIds, options);\n}\n\n/** Result from recursive subcollection deletion. */\ninterface SubcollectionDeleteResult {\n deleted: number;\n errors: BulkBatchError[];\n}\n\n/**\n * Recursively delete all documents in all subcollections under a given document.\n * Uses `listCollections()` (Admin SDK) to discover subcollections, then for each\n * subcollection: recurse into each document's subcollections first (depth-first),\n * then bulk delete all documents in the subcollection.\n *\n * The `onProgress` callback is intentionally NOT forwarded to subcollection\n * deletes to avoid confusing callers with interleaved progress from different\n * collection depths.\n */\nasync function deleteSubcollectionsRecursive(\n db: Firestore,\n collectionPath: string,\n docId: string,\n options?: BulkOptions,\n): Promise<SubcollectionDeleteResult> {\n const docRef = db.collection(collectionPath).doc(docId);\n const subcollections = await docRef.listCollections();\n\n if (subcollections.length === 0) return { deleted: 0, errors: [] };\n\n let totalDeleted = 0;\n const allErrors: BulkBatchError[] = [];\n\n // Strip onProgress for subcollection deletes — callers should only see\n // top-level progress, not interleaved reports from nested depths.\n const subOptions: BulkOptions | undefined = options\n ? { batchSize: options.batchSize, maxRetries: options.maxRetries }\n : undefined;\n\n for (const subCollRef of subcollections) {\n const subCollPath = subCollRef.path;\n // List all documents in this subcollection\n const snapshot = await subCollRef.select().get();\n const subDocIds = snapshot.docs.map((d) => d.id);\n\n // Depth-first: recurse into each document's subcollections\n for (const subDocId of subDocIds) {\n const subResult = await deleteSubcollectionsRecursive(db, subCollPath, subDocId, subOptions);\n totalDeleted += subResult.deleted;\n allErrors.push(...subResult.errors);\n }\n\n // Now delete all documents in this subcollection\n if (subDocIds.length > 0) {\n const result = await bulkDeleteDocIds(db, subCollPath, subDocIds, subOptions);\n totalDeleted += result.deleted;\n allErrors.push(...result.errors);\n }\n }\n\n return { deleted: totalDeleted, errors: allErrors };\n}\n\n/**\n * Deletes a node and all of its outgoing and incoming edges.\n *\n * Edges are deleted first in chunked batches, then the node document\n * is deleted in the final batch. This is NOT atomic across batches —\n * if a batch fails after retries, remaining batches still execute.\n *\n * By default, subcollections (subgraphs) under the node's document are\n * recursively deleted. Set `options.deleteSubcollections` to `false` to skip.\n */\nexport async function removeNodeCascade(\n db: Firestore,\n collectionPath: string,\n reader: GraphReader,\n uid: string,\n options?: BulkOptions,\n): Promise<CascadeResult> {\n // Find all edges touching this node (outgoing + incoming).\n // Filter out the node's own self-loop record (axbType === 'is').\n // These queries intentionally scan broadly — allowCollectionScan bypasses safety checks.\n // limit: 0 bypasses the DEFAULT_QUERY_LIMIT to ensure we find all edges.\n const [outgoingRaw, incomingRaw] = await Promise.all([\n reader.findEdges({ aUid: uid, allowCollectionScan: true, limit: 0 }),\n reader.findEdges({ bUid: uid, allowCollectionScan: true, limit: 0 }),\n ]);\n const outgoing = outgoingRaw.filter((e) => e.axbType !== NODE_RELATION);\n const incoming = incomingRaw.filter((e) => e.axbType !== NODE_RELATION);\n\n // Deduplicate: a self-referencing edge could appear in both lists.\n const edgeDocIdSet = new Set<string>();\n const allEdges: StoredGraphRecord[] = [];\n for (const edge of [...outgoing, ...incoming]) {\n const docId = computeEdgeDocId(edge.aUid, edge.axbType, edge.bUid);\n if (!edgeDocIdSet.has(docId)) {\n edgeDocIdSet.add(docId);\n allEdges.push(edge);\n }\n }\n\n // Delete subcollections (subgraphs) under this node's document (depth-first).\n const shouldDeleteSubcollections = options?.deleteSubcollections !== false;\n const nodeDocId = computeNodeDocId(uid);\n let subcollectionResult: SubcollectionDeleteResult = { deleted: 0, errors: [] };\n\n if (shouldDeleteSubcollections) {\n subcollectionResult = await deleteSubcollectionsRecursive(\n db,\n collectionPath,\n nodeDocId,\n options,\n );\n }\n\n // Build doc IDs: edges first, then the node last.\n const edgeDocIds = allEdges.map((e) => computeEdgeDocId(e.aUid, e.axbType, e.bUid));\n const allDocIds = [...edgeDocIds, nodeDocId];\n\n // Wrap the progress callback to track overall progress.\n const batchSize = Math.min(options?.batchSize ?? MAX_BATCH_SIZE, MAX_BATCH_SIZE);\n const result = await bulkDeleteDocIds(db, collectionPath, allDocIds, {\n ...options,\n batchSize,\n });\n\n // Determine if the node doc was in a failed batch.\n // The node is always in the last doc ID. If the last batch errored, node wasn't deleted.\n const totalChunks = Math.ceil(allDocIds.length / batchSize);\n const nodeChunkIndex = totalChunks - 1;\n const nodeDeleted = !result.errors.some((e) => e.batchIndex === nodeChunkIndex);\n\n // edgesDeleted counts only top-level edges (not subcollection docs).\n // deleted includes everything: top-level edges + node + subcollection docs.\n const topLevelEdgesDeleted = nodeDeleted ? result.deleted - 1 : result.deleted;\n\n return {\n deleted: result.deleted + subcollectionResult.deleted,\n batches: result.batches,\n errors: [...result.errors, ...subcollectionResult.errors],\n edgesDeleted: topLevelEdgesDeleted,\n nodeDeleted,\n };\n}\n","import type { Firestore, Query, Transaction } from '@google-cloud/firestore';\n\nimport type { QueryFilter, QueryOptions, StoredGraphRecord } from '../types.js';\n\nexport interface FirestoreAdapter {\n collectionPath: string;\n getDoc(docId: string): Promise<StoredGraphRecord | null>;\n setDoc(\n docId: string,\n data: Record<string, unknown>,\n options?: { merge?: boolean },\n ): Promise<void>;\n updateDoc(docId: string, data: Record<string, unknown>): Promise<void>;\n deleteDoc(docId: string): Promise<void>;\n query(filters: QueryFilter[], options?: QueryOptions): Promise<StoredGraphRecord[]>;\n}\n\nexport function createFirestoreAdapter(db: Firestore, collectionPath: string): FirestoreAdapter {\n const collectionRef = db.collection(collectionPath);\n\n return {\n collectionPath,\n\n async getDoc(docId: string): Promise<StoredGraphRecord | null> {\n const snap = await collectionRef.doc(docId).get();\n if (!snap.exists) return null;\n return snap.data() as StoredGraphRecord;\n },\n\n async setDoc(\n docId: string,\n data: Record<string, unknown>,\n options?: { merge?: boolean },\n ): Promise<void> {\n if (options?.merge) {\n await collectionRef.doc(docId).set(data, { merge: true });\n } else {\n await collectionRef.doc(docId).set(data);\n }\n },\n\n async updateDoc(docId: string, data: Record<string, unknown>): Promise<void> {\n await collectionRef.doc(docId).update(data);\n },\n\n async deleteDoc(docId: string): Promise<void> {\n await collectionRef.doc(docId).delete();\n },\n\n async query(filters: QueryFilter[], options?: QueryOptions): Promise<StoredGraphRecord[]> {\n let q: Query = collectionRef;\n for (const f of filters) {\n q = q.where(f.field, f.op, f.value);\n }\n if (options?.orderBy) {\n q = q.orderBy(options.orderBy.field, options.orderBy.direction ?? 'asc');\n }\n if (options?.limit !== undefined) {\n q = q.limit(options.limit);\n }\n const snap = await q.get();\n return snap.docs.map((doc) => doc.data() as StoredGraphRecord);\n },\n };\n}\n\nexport interface TransactionAdapter {\n getDoc(docId: string): Promise<StoredGraphRecord | null>;\n setDoc(docId: string, data: Record<string, unknown>, options?: { merge?: boolean }): void;\n updateDoc(docId: string, data: Record<string, unknown>): void;\n deleteDoc(docId: string): void;\n query(filters: QueryFilter[], options?: QueryOptions): Promise<StoredGraphRecord[]>;\n}\n\nexport function createTransactionAdapter(\n db: Firestore,\n collectionPath: string,\n tx: Transaction,\n): TransactionAdapter {\n const collectionRef = db.collection(collectionPath);\n\n return {\n async getDoc(docId: string): Promise<StoredGraphRecord | null> {\n const snap = await tx.get(collectionRef.doc(docId));\n if (!snap.exists) return null;\n return snap.data() as StoredGraphRecord;\n },\n\n setDoc(docId: string, data: Record<string, unknown>, options?: { merge?: boolean }): void {\n if (options?.merge) {\n tx.set(collectionRef.doc(docId), data, { merge: true });\n } else {\n tx.set(collectionRef.doc(docId), data);\n }\n },\n\n updateDoc(docId: string, data: Record<string, unknown>): void {\n tx.update(collectionRef.doc(docId), data);\n },\n\n deleteDoc(docId: string): void {\n tx.delete(collectionRef.doc(docId));\n },\n\n async query(filters: QueryFilter[], options?: QueryOptions): Promise<StoredGraphRecord[]> {\n let q: Query = collectionRef;\n for (const f of filters) {\n q = q.where(f.field, f.op, f.value);\n }\n if (options?.orderBy) {\n q = q.orderBy(options.orderBy.field, options.orderBy.direction ?? 'asc');\n }\n if (options?.limit !== undefined) {\n q = q.limit(options.limit);\n }\n const snap = await tx.get(q);\n return snap.docs.map((doc) => doc.data() as StoredGraphRecord);\n },\n };\n}\n\nexport interface BatchAdapter {\n setDoc(docId: string, data: Record<string, unknown>, options?: { merge?: boolean }): void;\n updateDoc(docId: string, data: Record<string, unknown>): void;\n deleteDoc(docId: string): void;\n commit(): Promise<void>;\n}\n\nexport function createBatchAdapter(db: Firestore, collectionPath: string): BatchAdapter {\n const collectionRef = db.collection(collectionPath);\n const batch = db.batch();\n\n return {\n setDoc(docId: string, data: Record<string, unknown>, options?: { merge?: boolean }): void {\n if (options?.merge) {\n batch.set(collectionRef.doc(docId), data, { merge: true });\n } else {\n batch.set(collectionRef.doc(docId), data);\n }\n },\n\n updateDoc(docId: string, data: Record<string, unknown>): void {\n batch.update(collectionRef.doc(docId), data);\n },\n\n deleteDoc(docId: string): void {\n batch.delete(collectionRef.doc(docId));\n },\n\n async commit(): Promise<void> {\n await batch.commit();\n },\n };\n}\n","/**\n * Pipeline query adapter — translates QueryFilter[] to Firestore Pipeline\n * expressions and executes them via db.pipeline().\n *\n * Only handles query() — doc-level operations (get/set/update/delete) stay\n * on the standard FirestoreAdapter.\n */\nimport type { Firestore, Pipelines } from '@google-cloud/firestore';\n\nimport type { QueryFilter, QueryOptions, StoredGraphRecord } from '../types.js';\n\n/**\n * Minimal interface for the Pipeline query adapter.\n * Only implements the query path — doc operations are handled by FirestoreAdapter.\n */\nexport interface PipelineQueryAdapter {\n query(filters: QueryFilter[], options?: QueryOptions): Promise<StoredGraphRecord[]>;\n}\n\n/**\n * Lazily loaded Pipelines module. We use dynamic import so that standard-mode\n * users (and the emulator) don't pull in pipeline-related code at module load.\n */\nlet _Pipelines: typeof Pipelines | null = null;\n\nasync function getPipelines(): Promise<typeof Pipelines> {\n if (!_Pipelines) {\n const mod = await import('@google-cloud/firestore');\n _Pipelines = mod.Pipelines;\n }\n return _Pipelines;\n}\n\ntype PipelinesType = typeof Pipelines;\ntype BooleanExpr = Pipelines.BooleanExpression;\n\n/**\n * Maps a QueryFilter to a Pipeline BooleanExpression.\n *\n * Uses the string-based overloads (e.g. `equal(fieldName, value)`) which\n * accept `unknown` values, avoiding type issues with `constant()` overloads.\n */\nfunction buildFilterExpression(P: PipelinesType, filter: QueryFilter): BooleanExpr {\n const { field: fieldName, op, value } = filter;\n\n switch (op) {\n case '==':\n return P.equal(fieldName, value);\n case '!=':\n return P.notEqual(fieldName, value);\n case '<':\n return P.lessThan(fieldName, value);\n case '<=':\n return P.lessThanOrEqual(fieldName, value);\n case '>':\n return P.greaterThan(fieldName, value);\n case '>=':\n return P.greaterThanOrEqual(fieldName, value);\n case 'in':\n return P.equalAny(fieldName, value as Array<unknown>);\n case 'not-in':\n return P.notEqualAny(fieldName, value as Array<unknown>);\n case 'array-contains':\n return P.arrayContains(fieldName, value);\n case 'array-contains-any':\n return P.arrayContainsAny(fieldName, value as Array<unknown>);\n default:\n throw new Error(`Unsupported filter op for pipeline mode: ${op}`);\n }\n}\n\nexport function createPipelineQueryAdapter(\n db: Firestore,\n collectionPath: string,\n): PipelineQueryAdapter {\n return {\n async query(filters: QueryFilter[], options?: QueryOptions): Promise<StoredGraphRecord[]> {\n const P = await getPipelines();\n\n // Build pipeline\n let pipeline = db.pipeline().collection(collectionPath);\n\n // Apply filters\n if (filters.length === 1) {\n pipeline = pipeline.where(buildFilterExpression(P, filters[0]));\n } else if (filters.length > 1) {\n const [first, second, ...rest] = filters.map((f) => buildFilterExpression(P, f));\n pipeline = pipeline.where(P.and(first, second, ...rest));\n }\n\n // Apply sort\n if (options?.orderBy) {\n const f = P.field(options.orderBy.field);\n const ordering = options.orderBy.direction === 'desc' ? f.descending() : f.ascending();\n pipeline = pipeline.sort(ordering);\n }\n\n // Apply limit\n if (options?.limit !== undefined) {\n pipeline = pipeline.limit(options.limit);\n }\n\n const snap = await pipeline.execute();\n return snap.results.map((r) => r.data() as StoredGraphRecord);\n },\n };\n}\n","/**\n * Firestore-specific client factory.\n *\n * Kept in its own module so that bundlers don't pull\n * `@google-cloud/firestore` into non-Firestore entry points — most\n * importantly the Cloudflare DO backend (`firegraph/cloudflare`) and the\n * routing primitive (`firegraph/backend`), both of which must load cleanly\n * in a Workers environment where the Node Firestore SDK is unavailable.\n */\n\nimport type { Firestore } from '@google-cloud/firestore';\n\nimport { GraphClientImpl } from './client.js';\nimport type { StorageBackend } from './internal/backend.js';\nimport { createFirestoreBackend } from './internal/firestore-backend.js';\nimport type {\n DynamicGraphClient,\n DynamicRegistryConfig,\n GraphClient,\n GraphClientOptions,\n QueryMode,\n} from './types.js';\n\nlet _standardModeWarned = false;\n\nexport function createGraphClient(\n db: Firestore,\n collectionPath: string,\n options: GraphClientOptions & { registryMode: DynamicRegistryConfig },\n): DynamicGraphClient;\nexport function createGraphClient(\n db: Firestore,\n collectionPath: string,\n options?: GraphClientOptions,\n): GraphClient;\nexport function createGraphClient(\n db: Firestore,\n collectionPath: string,\n options?: GraphClientOptions,\n): GraphClient | DynamicGraphClient {\n const requestedMode = options?.queryMode ?? 'pipeline';\n const isEmulator = !!process.env.FIRESTORE_EMULATOR_HOST;\n const effectiveMode: QueryMode = isEmulator ? 'standard' : requestedMode;\n\n if (\n effectiveMode === 'standard' &&\n !isEmulator &&\n requestedMode === 'standard' &&\n !_standardModeWarned\n ) {\n _standardModeWarned = true;\n console.warn(\n '[firegraph] Standard query mode enabled. This is NOT recommended for production:\\n' +\n ' - Enterprise Firestore: data.* filters cause full collection scans (high billing)\\n' +\n ' - Standard Firestore: data.* filters without composite indexes will fail\\n' +\n ' See: https://github.com/typicalday/firegraph#query-modes',\n );\n }\n\n const backend = createFirestoreBackend(db, collectionPath, { queryMode: effectiveMode });\n\n let metaBackend: StorageBackend | undefined;\n if (options?.registryMode?.collection && options.registryMode.collection !== collectionPath) {\n metaBackend = createFirestoreBackend(db, options.registryMode.collection, {\n queryMode: effectiveMode,\n });\n }\n\n return new GraphClientImpl(backend, options, metaBackend) as GraphClient | DynamicGraphClient;\n}\n","import { nanoid } from 'nanoid';\n\nexport function generateId(): string {\n return nanoid();\n}\n","/**\n * Firestore composite index generator.\n *\n * Translates firegraph's declarative `IndexSpec[]` (core preset plus per-entry\n * registry indexes) into the `firestore.indexes.json` shape consumed by\n * `firebase deploy --only firestore:indexes`.\n *\n * ## What Firestore needs\n *\n * Firestore auto-indexes every top-level field (including `data.*`) for\n * single-field equality queries — we only need to emit *composite* indexes\n * here. That means:\n *\n * 1. Single-field specs are dropped (Firestore already covers them).\n * 2. Composite specs (two or more fields) get one `FirestoreIndex`.\n * 3. Specs with `where` are dropped with a warning — Firestore composite\n * indexes do not support partial predicates.\n * 4. When a registry entry has `targetGraph` set, every composite is also\n * emitted with `queryScope: 'COLLECTION_GROUP'` under the targetGraph\n * name, so `findEdgesGlobal()` queries across subgraphs can hit an\n * index.\n *\n * The SQLite-flavored backends (DO, legacy) consume the same `IndexSpec[]`\n * via `src/internal/sqlite-index-ddl.ts` but emit every spec (single fields\n * included) as `CREATE INDEX` DDL.\n */\n\nimport { DEFAULT_CORE_INDEXES } from './default-indexes.js';\nimport type { DiscoveryResult, IndexFieldSpec, IndexSpec, RegistryEntry } from './types.js';\n\nexport interface FirestoreIndexField {\n fieldPath: string;\n order: 'ASCENDING' | 'DESCENDING';\n}\n\nexport interface FirestoreIndex {\n collectionGroup: string;\n queryScope: 'COLLECTION' | 'COLLECTION_GROUP';\n fields: FirestoreIndexField[];\n}\n\nexport interface FirestoreIndexConfig {\n indexes: FirestoreIndex[];\n fieldOverrides: unknown[];\n}\n\nexport interface GenerateIndexOptions {\n /**\n * Replaces firegraph's built-in core preset. Defaults to\n * `DEFAULT_CORE_INDEXES`. Pass `[]` to disable core indexes entirely.\n */\n coreIndexes?: IndexSpec[];\n /**\n * Registry entries supplying per-triple `indexes`. Entries without\n * `indexes` contribute no composites; entries with `targetGraph` also\n * trigger `COLLECTION_GROUP` mirrors under each distinct targetGraph\n * segment name.\n */\n registryEntries?: ReadonlyArray<RegistryEntry>;\n /**\n * Entity discovery result. Convenience for callers that have a\n * `DiscoveryResult` but not a built registry — treated as if every\n * discovered entity were expanded to its registry entries carrying just\n * `indexes` + `targetGraph`. Mutually usable with `registryEntries`\n * (both are concatenated and deduplicated at the spec level).\n */\n entities?: DiscoveryResult;\n}\n\nfunction normalizeField(f: string | IndexFieldSpec): IndexFieldSpec {\n return typeof f === 'string' ? { path: f, desc: false } : { path: f.path, desc: !!f.desc };\n}\n\nfunction specFingerprint(spec: IndexSpec, scope: string): string {\n const normalized = spec.fields.map(normalizeField);\n return `${scope}::${JSON.stringify(normalized)}`;\n}\n\nfunction toFirestoreFields(spec: IndexSpec): FirestoreIndexField[] {\n return spec.fields.map((f) => {\n const n = normalizeField(f);\n return {\n fieldPath: n.path,\n order: n.desc ? 'DESCENDING' : 'ASCENDING',\n };\n });\n}\n\nlet warnedOnPartialIndex = false;\n\n/**\n * Build a Firestore index configuration from firegraph's declarative index\n * specs. Deduplicates by field list + scope before emitting. Single-field\n * specs are dropped; partial-index specs (`where` set) are dropped with a\n * one-time warning.\n */\nexport function generateIndexConfig(\n collection: string,\n options: GenerateIndexOptions = {},\n): FirestoreIndexConfig {\n const core = options.coreIndexes ?? [...DEFAULT_CORE_INDEXES];\n const fromEntries = (options.registryEntries ?? []).flatMap((e) => {\n if (!e.indexes) return [] as IndexSpec[];\n return e.indexes;\n });\n\n // DiscoveryResult is a pre-registry shape — it doesn't carry `indexes`\n // per triple (those live on registry entries once built). Accept it to\n // keep the CLI ergonomic, but the only thing we can pull from it right\n // now is the set of distinct `targetGraph` values, which belongs to\n // discovery-time topology metadata. Consumers who need per-entity data\n // indexes must go through the registry path.\n const targetGraphNames = new Set<string>();\n for (const entry of options.registryEntries ?? []) {\n if (entry.targetGraph) targetGraphNames.add(entry.targetGraph);\n }\n if (options.entities) {\n for (const [, entity] of options.entities.edges) {\n const tg = entity.targetGraph ?? entity.topology?.targetGraph;\n if (tg) targetGraphNames.add(tg);\n }\n }\n\n const allSpecs = [...core, ...fromEntries];\n const seen = new Set<string>();\n const indexes: FirestoreIndex[] = [];\n\n for (const spec of allSpecs) {\n if (!spec.fields || spec.fields.length < 2) {\n // Single-field: Firestore auto-indexes — nothing to emit.\n continue;\n }\n if (spec.where) {\n if (!warnedOnPartialIndex) {\n warnedOnPartialIndex = true;\n console.warn(\n 'firegraph: IndexSpec.where is ignored by the Firestore generator — ' +\n 'Firestore composite indexes do not support predicates. ' +\n 'The SQLite backends will still honor `where`.',\n );\n }\n continue;\n }\n\n const fields = toFirestoreFields(spec);\n\n const colKey = specFingerprint(spec, `col:${collection}`);\n if (!seen.has(colKey)) {\n seen.add(colKey);\n indexes.push({\n collectionGroup: collection,\n queryScope: 'COLLECTION',\n fields,\n });\n }\n\n // Mirror into every distinct `targetGraph` as a collection group index.\n // `findEdgesGlobal()` runs across all subcollections matching the\n // targetGraph name, and each pattern needs its own CG index.\n for (const tg of targetGraphNames) {\n const cgKey = specFingerprint(spec, `cg:${tg}`);\n if (seen.has(cgKey)) continue;\n seen.add(cgKey);\n indexes.push({\n collectionGroup: tg,\n queryScope: 'COLLECTION_GROUP',\n fields,\n });\n }\n }\n\n return { indexes, fieldOverrides: [] };\n}\n\n/**\n * Internal test hook — reset the one-time partial-index warning flag so\n * tests covering the warn branch can run sequentially without sharing\n * state.\n */\nexport function _resetIndexGenWarningsForTest(): void {\n warnedOnPartialIndex = false;\n}\n","import http from 'node:http';\n\nimport { readEditorPort } from './config.js';\nimport { summarizeEdge, summarizeRecord } from './shaping.js';\nimport type {\n GetEdgesInput,\n GetEdgesResult,\n GetNodeDetailInput,\n GetNodesInput,\n GetNodesResult,\n NodeDetailResult,\n QueryClientOptions,\n SchemaResult,\n SearchInput,\n SearchResult,\n SummarizedEdge,\n SummarizedRecord,\n TraverseHopResult,\n TraverseInput,\n TraverseResult,\n} from './types.js';\n\n// --- Error ---\n\nexport type QueryClientErrorCode = 'VALIDATION_ERROR' | 'CONNECTION_FAILED' | 'SERVER_ERROR';\n\nexport class QueryClientError extends Error {\n constructor(\n message: string,\n public readonly code: QueryClientErrorCode,\n ) {\n super(message);\n this.name = 'QueryClientError';\n }\n}\n\n// --- Validation helpers ---\n\nfunction requireString(value: unknown, name: string): asserts value is string {\n if (typeof value !== 'string' || value.length === 0) {\n throw new QueryClientError(`${name} must be a non-empty string`, 'VALIDATION_ERROR');\n }\n}\n\nfunction clampInt(value: number | undefined, min: number, max: number, fallback: number): number {\n if (value == null) return fallback;\n if (!Number.isInteger(value)) {\n throw new QueryClientError(`limit must be an integer`, 'VALIDATION_ERROR');\n }\n return Math.max(min, Math.min(max, value));\n}\n\nfunction validateSortDir(dir: string | undefined): void {\n if (dir != null && dir !== 'asc' && dir !== 'desc') {\n throw new QueryClientError(`sortDir must be 'asc' or 'desc'`, 'VALIDATION_ERROR');\n }\n}\n\n// --- HTTP helpers ---\n\nfunction httpGet(url: string): Promise<string> {\n return new Promise((resolve, reject) => {\n http\n .get(url, (res) => {\n let body = '';\n res.on('data', (c: string) => (body += c));\n res.on('end', () => resolve(body));\n })\n .on('error', (err) => {\n reject(new QueryClientError(`Connection failed: ${err.message}`, 'CONNECTION_FAILED'));\n });\n });\n}\n\nfunction httpPost(url: string, payload: string): Promise<string> {\n const parsed = new URL(url);\n return new Promise((resolve, reject) => {\n const req = http.request(\n {\n hostname: parsed.hostname,\n port: parsed.port,\n path: parsed.pathname,\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'Content-Length': Buffer.byteLength(payload),\n },\n },\n (res) => {\n let body = '';\n res.on('data', (c: string) => (body += c));\n res.on('end', () => resolve(body));\n },\n );\n req.on('error', (err) => {\n reject(new QueryClientError(`Connection failed: ${err.message}`, 'CONNECTION_FAILED'));\n });\n req.write(payload);\n req.end();\n });\n}\n\nfunction parseTrpcResponse(raw: string, procedure: string): unknown {\n let parsed: Record<string, unknown>;\n try {\n parsed = JSON.parse(raw);\n } catch {\n throw new QueryClientError(\n `Invalid JSON from ${procedure}: ${raw.slice(0, 200)}`,\n 'SERVER_ERROR',\n );\n }\n if (parsed.error) {\n const msg =\n typeof parsed.error === 'object' && parsed.error !== null\n ? ((parsed.error as Record<string, unknown>).message ?? JSON.stringify(parsed.error))\n : String(parsed.error);\n throw new QueryClientError(`Server error from ${procedure}: ${msg}`, 'SERVER_ERROR');\n }\n return (parsed.result as Record<string, unknown>)?.data ?? parsed;\n}\n\n// --- Client ---\n\nexport class QueryClient {\n private readonly baseUrl: string;\n\n constructor(options?: QueryClientOptions) {\n const host = options?.host ?? 'localhost';\n const port = options?.port ?? readEditorPort();\n this.baseUrl = `http://${host}:${port}/api/trpc`;\n }\n\n private async query(procedure: string, input?: unknown): Promise<unknown> {\n const qs = input != null ? `?input=${encodeURIComponent(JSON.stringify(input))}` : '';\n const url = `${this.baseUrl}/${procedure}${qs}`;\n const raw = await httpGet(url);\n return parseTrpcResponse(raw, procedure);\n }\n\n private async mutate(procedure: string, input: unknown): Promise<unknown> {\n const url = `${this.baseUrl}/${procedure}`;\n const raw = await httpPost(url, JSON.stringify(input));\n return parseTrpcResponse(raw, procedure);\n }\n\n // --- Public API ---\n\n async getSchema(): Promise<SchemaResult> {\n const data = (await this.query('getSchema')) as Record<string, unknown>;\n return {\n nodeTypes: ((data.nodeTypes as unknown[]) ?? []).map(\n (t) =>\n (typeof t === 'object' && t !== null ? (t as Record<string, unknown>).type : t) as string,\n ),\n edgeTypes: ((data.edgeTypes as unknown[]) ?? []).map((t) => {\n const e = t as Record<string, unknown>;\n return {\n relation: e.axbType as string,\n from: e.aType as string,\n to: e.bType as string,\n inverseLabel: (e.inverseLabel as string) ?? null,\n };\n }),\n };\n }\n\n async getNodeDetail(input: GetNodeDetailInput): Promise<NodeDetailResult> {\n requireString(input.uid, 'uid');\n const data = (await this.query('getNodeDetail', { uid: input.uid })) as Record<string, unknown>;\n return {\n node: summarizeRecord(data.node as Record<string, unknown> | null),\n outEdges: ((data.outEdges as Record<string, unknown>[]) ?? [])\n .map(summarizeEdge)\n .filter(Boolean) as SummarizedEdge[],\n inEdges: ((data.inEdges as Record<string, unknown>[]) ?? [])\n .map(summarizeEdge)\n .filter(Boolean) as SummarizedEdge[],\n };\n }\n\n async getNodes(input: GetNodesInput): Promise<GetNodesResult> {\n const limit = clampInt(input.limit, 1, 200, 25);\n validateSortDir(input.sortDir);\n const data = (await this.query('getNodes', {\n type: input.type,\n limit,\n startAfter: input.startAfter,\n sortBy: input.sortBy,\n sortDir: input.sortDir,\n where: input.where,\n })) as Record<string, unknown>;\n return {\n nodes: ((data.nodes as Record<string, unknown>[]) ?? [])\n .map(summarizeRecord)\n .filter(Boolean) as SummarizedRecord[],\n hasMore: (data.hasMore as boolean) ?? false,\n nextCursor: data.nextCursor as string | null | undefined,\n };\n }\n\n async getEdges(input: GetEdgesInput): Promise<GetEdgesResult> {\n const hasFilter =\n input.aType ||\n input.aUid ||\n input.axbType ||\n input.bType ||\n input.bUid ||\n (input.where && input.where.length > 0);\n if (!hasFilter) {\n throw new QueryClientError(\n 'getEdges requires at least one filter field (aType, aUid, axbType, bType, bUid, or where)',\n 'VALIDATION_ERROR',\n );\n }\n const limit = clampInt(input.limit, 1, 200, 25);\n validateSortDir(input.sortDir);\n const data = (await this.query('getEdges', {\n aType: input.aType,\n aUid: input.aUid,\n axbType: input.axbType,\n bType: input.bType,\n bUid: input.bUid,\n limit,\n startAfter: input.startAfter,\n sortBy: input.sortBy,\n sortDir: input.sortDir,\n where: input.where,\n })) as Record<string, unknown>;\n return {\n edges: ((data.edges as Record<string, unknown>[]) ?? [])\n .map(summarizeEdge)\n .filter(Boolean) as SummarizedEdge[],\n hasMore: (data.hasMore as boolean) ?? false,\n nextCursor: data.nextCursor as string | null | undefined,\n };\n }\n\n async traverse(input: TraverseInput): Promise<TraverseResult> {\n requireString(input.startUid, 'startUid');\n if (!input.hops || input.hops.length === 0) {\n throw new QueryClientError('traverse requires at least one hop', 'VALIDATION_ERROR');\n }\n for (let i = 0; i < input.hops.length; i++) {\n const hop = input.hops[i];\n requireString(hop.axbType, `hops[${i}].axbType`);\n if (hop.direction != null && hop.direction !== 'forward' && hop.direction !== 'reverse') {\n throw new QueryClientError(\n `hops[${i}].direction must be 'forward' or 'reverse'`,\n 'VALIDATION_ERROR',\n );\n }\n if (hop.limit != null && (!Number.isInteger(hop.limit) || hop.limit < 1)) {\n throw new QueryClientError(\n `hops[${i}].limit must be a positive integer`,\n 'VALIDATION_ERROR',\n );\n }\n }\n if (input.maxReads != null && (!Number.isInteger(input.maxReads) || input.maxReads < 1)) {\n throw new QueryClientError('maxReads must be a positive integer', 'VALIDATION_ERROR');\n }\n if (\n input.concurrency != null &&\n (!Number.isInteger(input.concurrency) || input.concurrency < 1)\n ) {\n throw new QueryClientError('concurrency must be a positive integer', 'VALIDATION_ERROR');\n }\n\n const data = (await this.mutate('traverse', input)) as Record<string, unknown>;\n return {\n hops: ((data.hops as Record<string, unknown>[]) ?? []).map(\n (h): TraverseHopResult => ({\n relation: h.axbType as string,\n direction: h.direction as string,\n depth: h.depth as number,\n edgeCount: ((h.edges as unknown[]) ?? []).length,\n edges: ((h.edges as Record<string, unknown>[]) ?? [])\n .map(summarizeEdge)\n .filter(Boolean) as SummarizedEdge[],\n truncated: (h.truncated as boolean) ?? false,\n }),\n ),\n totalReads: (data.totalReads as number) ?? 0,\n truncated: (data.truncated as boolean) ?? false,\n };\n }\n\n async search(input: SearchInput): Promise<SearchResult> {\n requireString(input.q, 'q');\n const limit = clampInt(input.limit, 1, 50, 20);\n const data = (await this.query('search', { q: input.q, limit })) as Record<string, unknown>;\n return {\n results: ((data.results as Record<string, unknown>[]) ?? [])\n .map((r) => {\n const base = summarizeRecord(r);\n if (!base) return null;\n return {\n ...base,\n matchType: (r._matchType as string) ?? null,\n };\n })\n .filter(Boolean) as (SummarizedRecord & { matchType: string | null })[],\n };\n }\n}\n","import { readFileSync } from 'node:fs';\nimport { join } from 'node:path';\n\nconst CONFIG_FILES = ['firegraph.config.ts', 'firegraph.config.js', 'firegraph.config.mjs'];\nconst DEFAULT_PORT = 3884;\n\n/**\n * Read the editor port from firegraph config files using regex.\n * Zero-dependency — no jiti needed.\n */\nexport function readEditorPort(cwd?: string): number {\n const dir = cwd ?? process.cwd();\n for (const name of CONFIG_FILES) {\n try {\n const content = readFileSync(join(dir, name), 'utf8');\n const editorBlock = content.match(/editor\\s*:\\s*\\{[^}]*\\}/s)?.[0] ?? '';\n const portMatch = editorBlock.match(/port\\s*:\\s*(\\d+)/);\n if (portMatch) return parseInt(portMatch[1], 10);\n } catch {\n continue;\n }\n }\n return DEFAULT_PORT;\n}\n","import type { SummarizedEdge, SummarizedRecord } from './types.js';\n\nexport function summarizeRecord(r: Record<string, unknown> | null): SummarizedRecord | null {\n if (!r) return null;\n const out: SummarizedRecord = { type: r.aType as string, uid: r.aUid as string };\n const data = r.data as Record<string, unknown> | undefined;\n if (data && typeof data === 'object' && Object.keys(data).length > 0) {\n out.data = data;\n }\n return out;\n}\n\nexport function summarizeEdge(r: Record<string, unknown> | null): SummarizedEdge | null {\n if (!r) return null;\n const out: SummarizedEdge = {\n fromType: r.aType as string,\n fromUid: r.aUid as string,\n relation: r.axbType as string,\n toType: r.bType as string,\n toUid: r.bUid as string,\n };\n const data = r.data as Record<string, unknown> | undefined;\n if (data && typeof data === 'object' && Object.keys(data).length > 0) {\n out.data = data;\n }\n return out;\n}\n","import { FieldValue } from '@google-cloud/firestore';\n\nimport { NODE_RELATION } from './internal/constants.js';\nimport type { GraphRecord } from './types.js';\n\nexport function buildNodeRecord(\n aType: string,\n uid: string,\n data: Record<string, unknown>,\n): GraphRecord {\n const now = FieldValue.serverTimestamp();\n return {\n aType,\n aUid: uid,\n axbType: NODE_RELATION,\n bType: aType,\n bUid: uid,\n data,\n createdAt: now,\n updatedAt: now,\n };\n}\n\nexport function buildEdgeRecord(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n): GraphRecord {\n const now = FieldValue.serverTimestamp();\n return {\n aType,\n aUid,\n axbType,\n bType,\n bUid,\n data,\n createdAt: now,\n updatedAt: now,\n };\n}\n","/**\n * Storage-scope path utilities — materialized-path parsing helpers for the\n * SQLite backend's `storageScope` string and for any custom backend that\n * adopts the same encoding (e.g. a cross-DO routing layer that uses\n * `storageScope` as a Durable Object name).\n *\n * **Storage-scope** (as produced by `SqliteBackendImpl`) interleaves parent\n * UIDs with subgraph names:\n *\n * ```\n * '' // root\n * 'A/memories' // g.subgraph(A, 'memories')\n * 'A/memories/B/context' // .subgraph(B, 'context') on the above\n * ```\n *\n * The structure is the same as a Firestore collection path with the\n * collection/doc segments reordered: each pair is `<uid>/<name>`, where\n * `<uid>` is a node UID in the parent scope and `<name>` is the subgraph\n * name. Use these helpers to decode that structure when building cross-\n * backend routers (see `createRoutingBackend`).\n *\n * For Firestore paths (which begin with a collection segment), use\n * `resolveAncestorCollection` / `isAncestorUid` from `./cross-graph.js`.\n */\n\n/**\n * One segment of a materialized-path storage-scope — a `(uid, name)` pair\n * produced by one `subgraph(uid, name)` call.\n */\nexport interface StorageScopeSegment {\n /** Parent node UID at the enclosing scope. */\n uid: string;\n /** Subgraph name chosen by the caller (e.g. `'memories'`). */\n name: string;\n}\n\n/**\n * Parse a materialized-path storage-scope into its `(uid, name)` pairs.\n *\n * Returns `[]` for the root (`''`). Throws `Error('INVALID_SCOPE_PATH')`\n * when the string has an odd number of segments (a corrupt path — every\n * level contributes exactly two segments) or when any segment is empty.\n *\n * @example\n * ```ts\n * parseStorageScope(''); // []\n * parseStorageScope('A/memories'); // [{ uid: 'A', name: 'memories' }]\n * parseStorageScope('A/memories/B/context'); // [{ uid: 'A', name: 'memories' }, { uid: 'B', name: 'context' }]\n * ```\n */\nexport function parseStorageScope(scope: string): StorageScopeSegment[] {\n if (scope === '') return [];\n const parts = scope.split('/');\n if (parts.length % 2 !== 0) {\n throw new Error(\n `INVALID_SCOPE_PATH: storage-scope \"${scope}\" has an odd number of segments; ` +\n 'expected interleaved <uid>/<name> pairs.',\n );\n }\n const out: StorageScopeSegment[] = [];\n for (let i = 0; i < parts.length; i += 2) {\n const uid = parts[i];\n const name = parts[i + 1];\n if (!uid || !name) {\n throw new Error(\n `INVALID_SCOPE_PATH: storage-scope \"${scope}\" contains an empty segment at position ${i}.`,\n );\n }\n out.push({ uid, name });\n }\n return out;\n}\n\n/**\n * Resolve the ancestor **storage-scope** at which a given UID's node lives,\n * by scanning a materialized-path storage-scope for that UID.\n *\n * Mirrors `resolveAncestorCollection()` from `./cross-graph.js` for\n * Firestore paths, but operates on `storageScope` (no leading collection\n * segment — segments are `<uid>/<name>` pairs).\n *\n * @returns The storage-scope at which the UID's node was added via\n * `subgraph(uid, _)`, or `null` if the UID does not appear at a UID\n * position in the path.\n *\n * @example\n * ```ts\n * // Scope: 'A/memories/B/context'\n * resolveAncestorScope('A/memories/B/context', 'A'); // '' (A was added at root)\n * resolveAncestorScope('A/memories/B/context', 'B'); // 'A/memories'\n * resolveAncestorScope('A/memories/B/context', 'X'); // null\n * ```\n */\nexport function resolveAncestorScope(storageScope: string, uid: string): string | null {\n if (!uid) return null;\n if (storageScope === '') return null;\n const parts = storageScope.split('/');\n // UID positions are even indices (0, 2, 4, …); names are at odd indices.\n for (let i = 0; i < parts.length; i += 2) {\n if (parts[i] === uid) {\n return i === 0 ? '' : parts.slice(0, i).join('/');\n }\n }\n return null;\n}\n\n/**\n * Boolean shorthand for `resolveAncestorScope(scope, uid) !== null`.\n */\nexport function isAncestorScopeUid(storageScope: string, uid: string): boolean {\n return resolveAncestorScope(storageScope, uid) !== null;\n}\n\n/**\n * Join a parent storage-scope with a new `(uid, name)` pair, producing the\n * storage-scope that `backend.subgraph(uid, name)` would use internally.\n *\n * This is the inverse of `parseStorageScope`'s per-segment semantics and is\n * useful when computing DO names / shard keys from the router callback.\n */\nexport function appendStorageScope(parentScope: string, uid: string, name: string): string {\n if (!uid || uid.includes('/')) {\n throw new Error(\n `INVALID_SCOPE_PATH: uid must be non-empty and must not contain \"/\": got \"${uid}\".`,\n );\n }\n if (!name || name.includes('/')) {\n throw new Error(\n `INVALID_SCOPE_PATH: name must be non-empty and must not contain \"/\": got \"${name}\".`,\n );\n }\n return parentScope ? `${parentScope}/${uid}/${name}` : `${uid}/${name}`;\n}\n","import { TraversalError } from './errors.js';\nimport type {\n FindEdgesParams,\n GraphClient,\n GraphReader,\n GraphRegistry,\n HopDefinition,\n HopResult,\n StoredGraphRecord,\n TraversalBuilder,\n TraversalOptions,\n TraversalResult,\n} from './types.js';\n\nconst DEFAULT_LIMIT = 10;\nconst DEFAULT_MAX_READS = 100;\nconst DEFAULT_CONCURRENCY = 5;\n\n/** One-time warning flag: emitted when cross-graph hop is silently skipped. */\nlet _crossGraphWarned = false;\n\n/** Type guard to check if a reader is a GraphClient (has subgraph method). */\nfunction isGraphClient(reader: GraphReader): reader is GraphClient {\n return 'subgraph' in reader && typeof (reader as GraphClient).subgraph === 'function';\n}\n\nclass Semaphore {\n private queue: Array<() => void> = [];\n private active = 0;\n\n constructor(private readonly slots: number) {}\n\n async acquire(): Promise<void> {\n if (this.active < this.slots) {\n this.active++;\n return;\n }\n return new Promise<void>((resolve) => {\n this.queue.push(resolve);\n });\n }\n\n release(): void {\n this.active--;\n const next = this.queue.shift();\n if (next) {\n this.active++;\n next();\n }\n }\n}\n\nclass TraversalBuilderImpl implements TraversalBuilder {\n private readonly hops: HopDefinition[] = [];\n\n constructor(\n private readonly reader: GraphReader,\n private readonly startUid: string,\n private readonly registry?: GraphRegistry,\n ) {}\n\n follow(axbType: string, options?: Omit<HopDefinition, 'axbType'>): TraversalBuilder {\n this.hops.push({ axbType, ...options });\n return this;\n }\n\n async run(options?: TraversalOptions): Promise<TraversalResult> {\n if (this.hops.length === 0) {\n throw new TraversalError('Traversal requires at least one follow() hop');\n }\n\n const maxReads = options?.maxReads ?? DEFAULT_MAX_READS;\n const concurrency = options?.concurrency ?? DEFAULT_CONCURRENCY;\n const returnIntermediates = options?.returnIntermediates ?? false;\n const semaphore = new Semaphore(concurrency);\n\n let totalReads = 0;\n let truncated = false;\n // Track (uid, reader) pairs to support context carry-forward across hops.\n // When a hop crosses into a subgraph, the resulting UIDs carry the subgraph\n // reader so subsequent hops without targetGraph stay in that subgraph.\n let sources: Array<{ uid: string; reader: GraphReader }> = [\n { uid: this.startUid, reader: this.reader },\n ];\n const hopResults: HopResult[] = [];\n\n for (let depth = 0; depth < this.hops.length; depth++) {\n const hop = this.hops[depth];\n\n if (sources.length === 0) {\n hopResults.push({\n axbType: hop.axbType,\n depth,\n edges: [],\n sourceCount: 0,\n truncated: false,\n });\n continue;\n }\n\n const hopEdges: Array<{ edge: StoredGraphRecord; reader: GraphReader }> = [];\n const sourceCount = sources.length;\n let hopTruncated = false;\n\n // Resolve targetGraph for this hop:\n // 1. Explicit on the hop definition takes precedence\n // 2. Otherwise check the registry for the axbType\n const resolvedTargetGraph = this.resolveTargetGraph(hop);\n const direction = hop.direction ?? 'forward';\n const isCrossGraph = direction === 'forward' && !!resolvedTargetGraph;\n\n const tasks = sources.map(({ uid, reader: sourceReader }) => async () => {\n if (totalReads >= maxReads) {\n hopTruncated = true;\n return;\n }\n\n await semaphore.acquire();\n try {\n if (totalReads >= maxReads) {\n hopTruncated = true;\n return;\n }\n\n totalReads++;\n\n const params: FindEdgesParams = { axbType: hop.axbType };\n\n if (direction === 'forward') {\n params.aUid = uid;\n if (hop.bType) params.bType = hop.bType;\n } else {\n params.bUid = uid;\n if (hop.aType) params.aType = hop.aType;\n }\n\n if (direction === 'forward' && hop.aType) {\n params.aType = hop.aType;\n }\n if (direction === 'reverse' && hop.bType) {\n params.bType = hop.bType;\n }\n\n if (hop.orderBy) params.orderBy = hop.orderBy;\n\n const limit = hop.limit ?? DEFAULT_LIMIT;\n if (hop.filter) {\n params.limit = 0;\n } else {\n params.limit = limit;\n }\n\n // Choose the reader for this hop:\n // - Cross-graph hop: create a subgraph reader from the ROOT client\n // (targetGraph is always relative to root)\n // - No cross-graph: use the carried-forward reader from previous hop\n // (context tracking — stay in whatever subgraph we're already in)\n let hopReader: GraphReader;\n let nextReader: GraphReader;\n if (isCrossGraph) {\n if (isGraphClient(this.reader)) {\n hopReader = this.reader.subgraph(uid, resolvedTargetGraph!);\n nextReader = hopReader;\n } else {\n hopReader = sourceReader;\n nextReader = sourceReader;\n if (!_crossGraphWarned) {\n _crossGraphWarned = true;\n console.warn(\n `[firegraph] Traversal hop \"${hop.axbType}\" has targetGraph \"${resolvedTargetGraph}\" ` +\n 'but the reader does not support subgraph(). Cross-graph hop will query the current ' +\n 'collection instead. Pass a GraphClient to createTraversal() to enable cross-graph traversal.',\n );\n }\n }\n } else {\n // No targetGraph — carry forward context from previous hop\n hopReader = sourceReader;\n nextReader = sourceReader;\n }\n\n let edges = await hopReader.findEdges(params);\n\n if (hop.filter) {\n edges = edges.filter(hop.filter);\n edges = edges.slice(0, limit);\n }\n\n for (const edge of edges) {\n hopEdges.push({ edge, reader: nextReader });\n }\n } finally {\n semaphore.release();\n }\n });\n\n await Promise.all(tasks.map((task) => task()));\n\n const edges = hopEdges.map((h) => h.edge);\n\n hopResults.push({\n axbType: hop.axbType,\n depth,\n edges: returnIntermediates ? [...edges] : edges,\n sourceCount,\n truncated: hopTruncated,\n });\n\n if (hopTruncated) {\n truncated = true;\n }\n\n // Build next sources with deduplication by UID.\n // When the same UID appears from multiple source readers, the first one wins.\n const seen = new Map<string, GraphReader>();\n for (const { edge, reader: edgeReader } of hopEdges) {\n const nextUid = direction === 'forward' ? edge.bUid : edge.aUid;\n if (!seen.has(nextUid)) {\n seen.set(nextUid, edgeReader);\n }\n }\n sources = [...seen.entries()].map(([uid, reader]) => ({ uid, reader }));\n }\n\n const lastHop = hopResults[hopResults.length - 1];\n\n return {\n nodes: lastHop.edges,\n hops: hopResults,\n totalReads,\n truncated,\n };\n }\n\n /**\n * Resolve the targetGraph for a hop. Priority:\n * 1. Explicit `hop.targetGraph` (user override)\n * 2. Registry `targetGraph` for the axbType (if registry available)\n * 3. undefined (no cross-graph)\n */\n private resolveTargetGraph(hop: HopDefinition): string | undefined {\n if (hop.targetGraph) return hop.targetGraph;\n\n if (this.registry) {\n const entries = this.registry.lookupByAxbType(hop.axbType);\n // All entries for the same axbType should share targetGraph; use the first non-undefined\n for (const entry of entries) {\n if (entry.targetGraph) return entry.targetGraph;\n }\n }\n\n return undefined;\n }\n}\n\n/** @internal Reset the one-time cross-graph warning flag (for testing). */\nexport function _resetCrossGraphWarning(): void {\n _crossGraphWarned = false;\n}\n\n/**\n * Create a traversal builder for multi-hop graph traversal.\n *\n * Accepts either a `GraphReader` (backwards compatible) or a `GraphClient`.\n * When a `GraphClient` is provided, cross-graph traversal via `targetGraph`\n * is supported — the traversal can follow edges into subgraphs.\n *\n * @param reader - A `GraphClient` or `GraphReader` to execute queries against\n * @param startUid - UID of the starting node\n * @param registry - Optional registry for automatic `targetGraph` resolution\n */\nexport function createTraversal(\n reader: GraphClient | GraphReader,\n startUid: string,\n registry?: GraphRegistry,\n): TraversalBuilder {\n return new TraversalBuilderImpl(reader, startUid, registry);\n}\n","/**\n * Model Views — framework-agnostic view definitions for graph entities.\n *\n * Projects define Web Components that render entity data in purpose-driven\n * ways. Each view class declares a static `viewName`, and receives the\n * entity's `data` payload via a `data` property setter.\n *\n * @example\n * ```ts\n * import { defineViews } from 'firegraph';\n *\n * class UserCard extends HTMLElement {\n * static viewName = 'card';\n * static description = 'Compact user card';\n * private _data: Record<string, unknown> = {};\n * set data(v: Record<string, unknown>) { this._data = v; this.render(); }\n * connectedCallback() { this.render(); }\n * private render() {\n * this.innerHTML = `<strong>${this._data.displayName ?? ''}</strong>`;\n * }\n * }\n *\n * export default defineViews({\n * nodes: { user: { views: [UserCard] } },\n * });\n * ```\n */\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/**\n * A Web Component class used as a view. The class must have a static\n * `viewName` and must be constructable. It will be registered as a custom\n * element via `customElements.define()` in browser environments.\n *\n * Note: this interface avoids referencing `HTMLElement` directly so the\n * library can compile without DOM lib types. Consumer code (which has DOM)\n * will satisfy this constraint naturally.\n */\nexport interface ViewComponentClass {\n new (...args: any[]): { data: Record<string, unknown> };\n /** Short identifier for this view (e.g. 'card', 'profile'). */\n viewName: string;\n /** Optional human-readable description. */\n description?: string;\n}\n\n/** Configuration for all views of a single entity type. */\nexport interface EntityViewConfig {\n /** View component classes to register. */\n views: ViewComponentClass[];\n /**\n * Optional sample data for the gallery. A single object matching\n * the entity's JSON Schema — shared across all views.\n */\n sampleData?: Record<string, unknown>;\n}\n\n/** Input shape accepted by `defineViews()`. */\nexport interface ViewRegistryInput {\n /** Node views keyed by aType (e.g. 'user', 'tour'). */\n nodes?: Record<string, EntityViewConfig>;\n /** Edge views keyed by axbType (e.g. 'hasDeparture'). */\n edges?: Record<string, EntityViewConfig>;\n}\n\n/** Serialisable metadata for a single view. */\nexport interface ViewMeta {\n /** Custom element tag name (e.g. 'fg-user-card'). */\n tagName: string;\n /** Short identifier matching the component's static viewName. */\n viewName: string;\n /** Optional human-readable description. */\n description?: string;\n}\n\n/** Serialisable metadata for all views of a single entity type. */\nexport interface EntityViewMeta {\n views: ViewMeta[];\n sampleData?: Record<string, unknown>;\n}\n\n/** The resolved view registry returned by `defineViews()`. */\nexport interface ViewRegistry {\n nodes: Record<string, EntityViewMeta>;\n edges: Record<string, EntityViewMeta>;\n}\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Sanitise a string for use as part of a custom element tag name. */\nfunction sanitizeTagPart(s: string): string {\n return s\n .toLowerCase()\n .replace(/[^a-z0-9]/g, '-')\n .replace(/-+/g, '-')\n .replace(/^-|-$/g, '');\n}\n\n/** Minimal interface for CustomElementRegistry (avoids depending on DOM lib). */\ninterface CustomElementRegistryLike {\n get(name: string): unknown;\n define(name: string, constructor: unknown): void;\n}\n\n/**\n * Try to access the browser's `customElements` registry.\n * Returns `null` in Node.js or environments without Web Components support.\n */\nfunction getCustomElements(): CustomElementRegistryLike | null {\n const g = globalThis as any;\n if (g.customElements && typeof g.customElements.define === 'function') {\n return g.customElements as CustomElementRegistryLike;\n }\n return null;\n}\n\n/**\n * Wrap a view class so that errors in connectedCallback, disconnectedCallback,\n * and the data setter are caught and logged rather than crashing the page.\n * Shows an inline error message when the view fails to render.\n */\nfunction resilientView(ViewClass: ViewComponentClass, tagName: string): ViewComponentClass {\n const g = globalThis as any;\n if (!g.HTMLElement) return ViewClass; // Node.js — no wrapping needed\n\n const Wrapped = class extends (ViewClass as unknown as new (...args: any[]) => any) {\n connectedCallback() {\n try {\n super.connectedCallback?.();\n } catch (err) {\n console.warn(`[firegraph] <${tagName}> connectedCallback error:`, err);\n this._showError(err);\n }\n }\n\n disconnectedCallback() {\n try {\n super.disconnectedCallback?.();\n } catch (err) {\n console.warn(`[firegraph] <${tagName}> disconnectedCallback error:`, err);\n }\n }\n\n set data(v: Record<string, unknown>) {\n try {\n super.data = v;\n } catch (err) {\n console.warn(`[firegraph] <${tagName}> data setter error:`, err);\n this._showError(err);\n }\n }\n\n get data(): Record<string, unknown> {\n try {\n return super.data;\n } catch {\n return {};\n }\n }\n\n _showError(err: unknown) {\n try {\n this.innerHTML =\n `<div style=\"padding:6px;color:#f87171;font-size:11px;font-family:monospace;\">` +\n `View error in <${tagName}>: ${err instanceof Error ? err.message : String(err)}</div>`;\n } catch {\n /* last resort — don't throw from error handler */\n }\n }\n };\n\n // Preserve static metadata\n (Wrapped as unknown as ViewComponentClass).viewName = ViewClass.viewName;\n (Wrapped as unknown as ViewComponentClass).description = ViewClass.description;\n\n return Wrapped as unknown as ViewComponentClass;\n}\n\n// ---------------------------------------------------------------------------\n// defineViews()\n// ---------------------------------------------------------------------------\n\n/**\n * Build a `ViewRegistry` from component classes.\n *\n * In the browser the components are registered as custom elements with\n * deterministic tag names (`fg-{entityType}-{viewName}`). On the server\n * (Node.js) only metadata is returned — no custom element registration.\n */\nexport function defineViews(input: ViewRegistryInput): ViewRegistry {\n const nodes: Record<string, EntityViewMeta> = {};\n const edges: Record<string, EntityViewMeta> = {};\n const registry = getCustomElements();\n\n // --- nodes ---\n for (const [entityType, config] of Object.entries(input.nodes ?? {})) {\n const viewMetas: ViewMeta[] = [];\n for (const ViewClass of config.views) {\n const tagName = `fg-${sanitizeTagPart(entityType)}-${sanitizeTagPart(ViewClass.viewName)}`;\n viewMetas.push({\n tagName,\n viewName: ViewClass.viewName,\n description: ViewClass.description,\n });\n if (registry && !registry.get(tagName)) {\n registry.define(tagName, resilientView(ViewClass, tagName));\n }\n }\n nodes[entityType] = {\n views: viewMetas,\n sampleData: config.sampleData,\n };\n }\n\n // --- edges ---\n for (const [axbType, config] of Object.entries(input.edges ?? {})) {\n const viewMetas: ViewMeta[] = [];\n for (const ViewClass of config.views) {\n const tagName = `fg-edge-${sanitizeTagPart(axbType)}-${sanitizeTagPart(ViewClass.viewName)}`;\n viewMetas.push({\n tagName,\n viewName: ViewClass.viewName,\n description: ViewClass.description,\n });\n if (registry && !registry.get(tagName)) {\n registry.define(tagName, resilientView(ViewClass, tagName));\n }\n }\n edges[axbType] = {\n views: viewMetas,\n sampleData: config.sampleData,\n };\n }\n\n return { nodes, edges };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiBO,SAAS,cAAc,OAAyB;AACrD,MAAI,UAAU,QAAQ,OAAO,UAAU,SAAU,QAAO;AACxD,QAAM,MAAO,MAAkC,iBAAiB;AAChE,SAAO,OAAO,QAAQ,YAAY,YAAY,IAAI,GAAG;AACvD;AArBA,IAYa,mBAEP;AAdN;AAAA;AAAA;AAYO,IAAM,oBAAoB;AAEjC,IAAM,cAAc,oBAAI,IAAI,CAAC,aAAa,YAAY,eAAe,mBAAmB,CAAC;AAAA;AAAA;;;ACdzF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAmCA,SAAS,YAAY,OAAoC;AACvD,SAAO,iBAAiB;AAC1B;AAEA,SAAS,WAAW,OAAmC;AACrD,SAAO,iBAAiB;AAC1B;AAEA,SAAS,oBAAoB,OAA4C;AAEvE,MAAI,UAAU,QAAQ,OAAO,UAAU,SAAU,QAAO;AACxD,QAAM,IAAI;AACV,SACE,OAAO,EAAE,SAAS,YAClB,EAAE,cAAc,UAChB,OAAO,EAAE,OAAO,YAChB,EAAE,aAAa,SAAS;AAE5B;AAEA,SAAS,cAAc,OAAyB;AAC9C,MAAI,UAAU,QAAQ,OAAO,UAAU,SAAU,QAAO;AACxD,QAAM,IAAI;AACV,SACE,EAAE,aAAa,SAAS,iBAAiB,MAAM,QAAS,EAA8B,OAAO;AAEjG;AAYO,SAAS,wBAAwB,MAAwD;AAC9F,SAAO,eAAe,IAAI;AAC5B;AAEA,SAAS,eAAe,OAAyB;AAE/C,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,OAAO,UAAU,SAAU,QAAO;AAGtC,MAAI,YAAY,KAAK,GAAG;AACtB,WAAO;AAAA,MACL,CAAC,iBAAiB,GAAG;AAAA,MACrB,SAAS,MAAM;AAAA,MACf,aAAa,MAAM;AAAA,IACrB;AAAA,EACF;AACA,MAAI,WAAW,KAAK,GAAG;AACrB,WAAO;AAAA,MACL,CAAC,iBAAiB,GAAG;AAAA,MACrB,UAAU,MAAM;AAAA,MAChB,WAAW,MAAM;AAAA,IACnB;AAAA,EACF;AACA,MAAI,oBAAoB,KAAK,GAAG;AAC9B,WAAO,EAAE,CAAC,iBAAiB,GAAG,qBAAqB,MAAO,MAA4B,KAAK;AAAA,EAC7F;AACA,MAAI,cAAc,KAAK,GAAG;AAExB,UAAM,IAAI;AACV,UAAM,SACJ,OAAO,EAAE,YAAY,aAAc,EAAE,QAA2B,IAAK,EAAE;AACzE,WAAO,EAAE,CAAC,iBAAiB,GAAG,eAAe,QAAQ,CAAC,GAAG,MAAM,EAAE;AAAA,EACnE;AAGA,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,WAAO,MAAM,IAAI,cAAc;AAAA,EACjC;AAGA,QAAM,SAAkC,CAAC;AACzC,aAAW,OAAO,OAAO,KAAK,KAAgC,GAAG;AAC/D,WAAO,GAAG,IAAI,eAAgB,MAAkC,GAAG,CAAC;AAAA,EACtE;AACA,SAAO;AACT;AAgBO,SAAS,0BACd,MACA,IACyB;AACzB,SAAO,iBAAiB,MAAM,EAAE;AAClC;AAEA,SAAS,iBAAiB,OAAgB,IAAyB;AACjE,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,OAAO,UAAU,SAAU,QAAO;AAMtC,MACE,YAAY,KAAK,KACjB,WAAW,KAAK,KAChB,oBAAoB,KAAK,KACzB,cAAc,KAAK,GACnB;AACA,WAAO;AAAA,EACT;AAGA,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,WAAO,MAAM,IAAI,CAAC,MAAM,iBAAiB,GAAG,EAAE,CAAC;AAAA,EACjD;AAEA,QAAM,MAAM;AAGZ,MAAI,cAAc,GAAG,GAAG;AACtB,UAAM,MAAM,IAAI,iBAAiB;AAEjC,YAAQ,KAAK;AAAA,MACX,KAAK;AAEH,YAAI,OAAO,IAAI,YAAY,YAAY,OAAO,IAAI,gBAAgB,SAAU,QAAO;AACnF,eAAO,IAAI,2BAAU,IAAI,SAAS,IAAI,WAAW;AAAA,MAEnD,KAAK;AACH,YAAI,OAAO,IAAI,aAAa,YAAY,OAAO,IAAI,cAAc,SAAU,QAAO;AAClF,eAAO,IAAI,0BAAS,IAAI,UAAU,IAAI,SAAS;AAAA,MAEjD,KAAK;AACH,YAAI,CAAC,MAAM,QAAQ,IAAI,MAAM,EAAG,QAAO;AACvC,eAAO,4BAAW,OAAO,IAAI,MAAkB;AAAA,MAEjD,KAAK;AACH,YAAI,OAAO,IAAI,SAAS,SAAU,QAAO;AACzC,YAAI,IAAI;AACN,iBAAO,GAAG,IAAI,IAAI,IAAI;AAAA,QACxB;AAEA,YAAI,CAAC,eAAe;AAClB,0BAAgB;AAChB,kBAAQ;AAAA,YACN;AAAA,UAGF;AAAA,QACF;AACA,eAAO;AAAA,MAET;AAEE,eAAO;AAAA,IACX;AAAA,EACF;AAGA,QAAM,SAAkC,CAAC;AACzC,aAAW,OAAO,OAAO,KAAK,GAAG,GAAG;AAClC,WAAO,GAAG,IAAI,iBAAiB,IAAI,GAAG,GAAG,EAAE;AAAA,EAC7C;AACA,SAAO;AACT;AApNA,IAcA,kBAeI;AA7BJ;AAAA;AAAA;AAcA,uBAAgD;AAWhD;AACA;AAGA,IAAI,gBAAgB;AAAA;AAAA;;;AC7BpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,yBAA2B;;;ACApB,IAAM,gBAAgB;AAOtB,IAAM,sBAAsB;AAO5B,IAAM,iBAAiB,oBAAI,IAAI;AAAA,EACpC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAGM,IAAM,kBAAkB;;;ADrBxB,SAAS,iBAAiB,KAAqB;AACpD,SAAO;AACT;AAEO,SAAS,iBAAiB,MAAc,SAAiB,MAAsB;AACpF,QAAM,YAAY,GAAG,IAAI,GAAG,eAAe,GAAG,OAAO,GAAG,eAAe,GAAG,IAAI;AAC9E,QAAM,WAAO,+BAAW,QAAQ,EAAE,OAAO,SAAS,EAAE,OAAO,KAAK;AAChE,QAAM,QAAQ,KAAK,CAAC;AACpB,SAAO,GAAG,KAAK,GAAG,eAAe,GAAG,IAAI,GAAG,eAAe,GAAG,OAAO,GAAG,eAAe,GAAG,IAAI;AAC/F;;;AEeA;AAaO,IAAM,eAA8B,uBAAO,IAAI,uBAAuB;AAatE,SAAS,cAA8B;AAC5C,SAAO;AACT;AAGO,SAAS,iBAAiB,OAAyC;AACxE,SAAO,UAAU;AACnB;AAMA,IAAM,0BAA0B,oBAAI,IAAI;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AASM,SAAS,gBAAgB,OAAyB;AACvD,MAAI,UAAU,KAAM,QAAO;AAC3B,QAAM,IAAI,OAAO;AACjB,MAAI,MAAM,SAAU,QAAO;AAC3B,MAAI,MAAM,QAAQ,KAAK,EAAG,QAAO;AAGjC,MAAI,cAAc,KAAK,EAAG,QAAO;AACjC,QAAM,QAAQ,OAAO,eAAe,KAAK;AACzC,MAAI,UAAU,QAAQ,UAAU,OAAO,UAAW,QAAO;AAEzD,QAAM,OAAQ,MAA8C;AAC5D,MAAI,QAAQ,OAAO,KAAK,SAAS,YAAY,wBAAwB,IAAI,KAAK,IAAI,EAAG,QAAO;AAG5F,SAAO;AACT;AAkCA,IAAM,cAAc;AAcb,SAAS,6BAA6B,QAGpC;AACP,MAAI,OAAO,gBAAgB,UAAa,OAAO,YAAY,QAAW;AACpE,UAAM,IAAI;AAAA,MACR;AAAA,IAGF;AAAA,EACF;AACF;AAuBO,SAAS,wBAAwB,MAAe,aAA2B;AAChF,yBAAuB,MAAM,CAAC,GAAG,EAAE,MAAM,OAAO,GAAG,CAAC,EAAE,KAAK,MAAM;AAC/D,UAAM,QAAQ,KAAK,WAAW,IAAI,WAAW,KAAK,IAAI,CAAC,MAAM,KAAK,UAAU,CAAC,CAAC,EAAE,KAAK,KAAK;AAC1F,UAAM,IAAI;AAAA,MACR,cAAc,WAAW,iDAAiD,KAAK;AAAA,IAIjF;AAAA,EACF,CAAC;AACH;AAIA,SAAS,uBACP,MACA,MACA,QACA,OACM;AACN,MAAI,SAAS,QAAQ,SAAS,OAAW;AACzC,MAAI,iBAAiB,IAAI,GAAG;AAC1B,UAAM,EAAE,MAAM,OAAO,CAAC;AACtB;AAAA,EACF;AACA,MAAI,OAAO,SAAS,SAAU;AAC9B,MAAI,cAAc,IAAI,EAAG;AACzB,MAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,6BAAuB,KAAK,CAAC,GAAG,CAAC,GAAG,MAAM,OAAO,CAAC,CAAC,GAAG,EAAE,MAAM,SAAS,OAAO,EAAE,GAAG,KAAK;AAAA,IAC1F;AACA;AAAA,EACF;AACA,QAAM,QAAQ,OAAO,eAAe,IAAI;AACxC,MAAI,UAAU,QAAQ,UAAU,OAAO,UAAW;AAClD,QAAM,MAAM;AACZ,aAAW,OAAO,OAAO,KAAK,GAAG,GAAG;AAClC,2BAAuB,IAAI,GAAG,GAAG,CAAC,GAAG,MAAM,GAAG,GAAG,EAAE,MAAM,SAAS,GAAG,KAAK;AAAA,EAC5E;AACF;AAGO,SAAS,eAAe,MAA+B;AAC5D,aAAW,OAAO,MAAM;AACtB,QAAI,CAAC,YAAY,KAAK,GAAG,GAAG;AAC1B,YAAM,IAAI;AAAA,QACR,gCAAgC,KAAK,UAAU,GAAG,CAAC,YAAY,KAC5D,IAAI,CAAC,MAAM,KAAK,UAAU,CAAC,CAAC,EAC5B,KAAK,KAAK,CAAC;AAAA,MAGhB;AAAA,IACF;AAAA,EACF;AACF;AA2BO,SAAS,aAAa,MAA6C;AACxE,QAAM,MAAoB,CAAC;AAC3B,OAAK,MAAM,CAAC,GAAG,GAAG;AAClB,SAAO;AACT;AAEA,SAAS,oCACP,KACA,WACM;AACN,yBAAuB,KAAK,WAAW,EAAE,MAAM,OAAO,GAAG,CAAC,EAAE,OAAO,MAAM;AACvE,UAAM,eACJ,UAAU,WAAW,IAAI,WAAW,UAAU,IAAI,CAAC,MAAM,KAAK,UAAU,CAAC,CAAC,EAAE,KAAK,KAAK;AACxF,QAAI,OAAO,SAAS,SAAS;AAC3B,YAAM,IAAI;AAAA,QACR,8CAA8C,OAAO,KAAK,4BAChD,YAAY;AAAA,MAIxB;AAAA,IACF;AACA,UAAM,IAAI;AAAA,MACR,qEACU,YAAY;AAAA,IAGxB;AAAA,EACF,CAAC;AACH;AAEA,SAAS,KAAK,MAAe,MAAgB,KAAyB;AAGpE,MAAI,SAAS,OAAW;AACxB,MAAI,iBAAiB,IAAI,GAAG;AAC1B,QAAI,KAAK,WAAW,GAAG;AACrB,YAAM,IAAI,MAAM,+DAA+D;AAAA,IACjF;AACA,mBAAe,IAAI;AACnB,QAAI,KAAK,EAAE,MAAM,CAAC,GAAG,IAAI,GAAG,OAAO,QAAW,QAAQ,KAAK,CAAC;AAC5D;AAAA,EACF;AACA,MAAI,gBAAgB,IAAI,GAAG;AACzB,QAAI,KAAK,WAAW,GAAG;AAGrB,YAAM,IAAI;AAAA,QACR,4DACG,SAAS,OAAO,SAAS,MAAM,QAAQ,IAAI,IAAI,UAAU,OAAO,QACjE;AAAA,MACJ;AAAA,IACF;AAMA,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,0CAAoC,MAAM,IAAI;AAAA,IAChD;AACA,mBAAe,IAAI;AACnB,QAAI,KAAK,EAAE,MAAM,CAAC,GAAG,IAAI,GAAG,OAAO,MAAM,QAAQ,MAAM,CAAC;AACxD;AAAA,EACF;AAEA,QAAM,MAAM;AACZ,QAAM,OAAO,OAAO,KAAK,GAAG;AAC5B,MAAI,KAAK,WAAW,GAAG;AAIrB,QAAI,KAAK,SAAS,GAAG;AACnB,qBAAe,IAAI;AACnB,UAAI,KAAK,EAAE,MAAM,CAAC,GAAG,IAAI,GAAG,OAAO,CAAC,GAAG,QAAQ,MAAM,CAAC;AAAA,IACxD;AACA;AAAA,EACF;AACA,aAAW,OAAO,MAAM;AACtB,QAAI,QAAQ,mBAAmB;AAC7B,YAAM,QAAQ,KAAK,WAAW,IAAI,WAAW,KAAK,IAAI,CAAC,MAAM,KAAK,UAAU,CAAC,CAAC,EAAE,KAAK,KAAK;AAC1F,YAAM,IAAI;AAAA,QACR,kDAAkD,iBAAiB,aAC9D,KAAK;AAAA,MAGZ;AAAA,IACF;AACA,SAAK,IAAI,GAAG,GAAG,CAAC,GAAG,MAAM,GAAG,GAAG,GAAG;AAAA,EACpC;AACF;;;AC9VA,SAAS,wBACP,OACA,KACA,MACgB;AAChB,SAAO,EAAE,OAAO,MAAM,KAAK,SAAS,eAAe,OAAO,OAAO,MAAM,KAAK,KAAK;AACnF;AAEA,SAAS,wBACP,OACA,MACA,SACA,OACA,MACA,MACgB;AAChB,SAAO,EAAE,OAAO,MAAM,SAAS,OAAO,MAAM,KAAK;AACnD;AAEO,IAAM,iBAAN,MAA2C;AAAA,EAChD,YACmB,SACA,UACA,YAAoB,IACrC;AAHiB;AACA;AACA;AAAA,EAChB;AAAA,EAEH,MAAM,QAAQ,OAAe,KAAa,MAA8C;AACtF,SAAK,UAAU,OAAO,KAAK,MAAM,OAAO;AAAA,EAC1C;AAAA,EAEA,MAAM,QACJ,OACA,MACA,SACA,OACA,MACA,MACe;AACf,SAAK,UAAU,OAAO,MAAM,SAAS,OAAO,MAAM,MAAM,OAAO;AAAA,EACjE;AAAA,EAEA,MAAM,YAAY,OAAe,KAAa,MAA8C;AAC1F,SAAK,UAAU,OAAO,KAAK,MAAM,SAAS;AAAA,EAC5C;AAAA,EAEA,MAAM,YACJ,OACA,MACA,SACA,OACA,MACA,MACe;AACf,SAAK,UAAU,OAAO,MAAM,SAAS,OAAO,MAAM,MAAM,SAAS;AAAA,EACnE;AAAA,EAEQ,UACN,OACA,KACA,MACA,MACM;AACN,4BAAwB,MAAM,SAAS,YAAY,gBAAgB,SAAS;AAC5E,QAAI,KAAK,UAAU;AACjB,WAAK,SAAS,SAAS,OAAO,eAAe,OAAO,MAAM,KAAK,SAAS;AAAA,IAC1E;AACA,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,SAAS,wBAAwB,OAAO,KAAK,IAAI;AACvD,QAAI,KAAK,UAAU;AACjB,YAAM,QAAQ,KAAK,SAAS,OAAO,OAAO,eAAe,KAAK;AAC9D,UAAI,OAAO,iBAAiB,MAAM,gBAAgB,GAAG;AACnD,eAAO,IAAI,MAAM;AAAA,MACnB;AAAA,IACF;AACA,SAAK,QAAQ,OAAO,OAAO,QAAQ,IAAI;AAAA,EACzC;AAAA,EAEQ,UACN,OACA,MACA,SACA,OACA,MACA,MACA,MACM;AACN,4BAAwB,MAAM,SAAS,YAAY,gBAAgB,SAAS;AAC5E,QAAI,KAAK,UAAU;AACjB,WAAK,SAAS,SAAS,OAAO,SAAS,OAAO,MAAM,KAAK,SAAS;AAAA,IACpE;AACA,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,SAAS,wBAAwB,OAAO,MAAM,SAAS,OAAO,MAAM,IAAI;AAC9E,QAAI,KAAK,UAAU;AACjB,YAAM,QAAQ,KAAK,SAAS,OAAO,OAAO,SAAS,KAAK;AACxD,UAAI,OAAO,iBAAiB,MAAM,gBAAgB,GAAG;AACnD,eAAO,IAAI,MAAM;AAAA,MACnB;AAAA,IACF;AACA,SAAK,QAAQ,OAAO,OAAO,QAAQ,IAAI;AAAA,EACzC;AAAA,EAEA,MAAM,WAAW,KAAa,MAA8C;AAC1E,UAAM,QAAQ,iBAAiB,GAAG;AAClC,SAAK,QAAQ,UAAU,OAAO,EAAE,SAAS,aAAa,IAAI,EAAE,CAAC;AAAA,EAC/D;AAAA,EAEA,MAAM,WACJ,MACA,SACA,MACA,MACe;AACf,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,SAAK,QAAQ,UAAU,OAAO,EAAE,SAAS,aAAa,IAAI,EAAE,CAAC;AAAA,EAC/D;AAAA,EAEA,MAAM,WAAW,KAA4B;AAC3C,UAAM,QAAQ,iBAAiB,GAAG;AAClC,SAAK,QAAQ,UAAU,KAAK;AAAA,EAC9B;AAAA,EAEA,MAAM,WAAW,MAAc,SAAiB,MAA6B;AAC3E,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,SAAK,QAAQ,UAAU,KAAK;AAAA,EAC9B;AAAA,EAEA,MAAM,SAAwB;AAC5B,UAAM,KAAK,QAAQ,OAAO;AAAA,EAC5B;AACF;;;ACvIA,IAAAA,sBAA2B;;;ACApB,IAAM,iBAAN,cAA6B,MAAM;AAAA,EACxC,YACE,SACgB,MAChB;AACA,UAAM,OAAO;AAFG;AAGhB,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,oBAAN,cAAgC,eAAe;AAAA,EACpD,YAAY,KAAa;AACvB,UAAM,mBAAmB,GAAG,IAAI,gBAAgB;AAChD,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,oBAAN,cAAgC,eAAe;AAAA,EACpD,YAAY,MAAc,SAAiB,MAAc;AACvD,UAAM,mBAAmB,IAAI,MAAM,OAAO,OAAO,IAAI,IAAI,gBAAgB;AACzE,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,kBAAN,cAA8B,eAAe;AAAA,EAClD,YACE,SACgB,SAChB;AACA,UAAM,SAAS,kBAAkB;AAFjB;AAGhB,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,yBAAN,cAAqC,eAAe;AAAA,EACzD,YAAY,OAAe,SAAiB,OAAe;AACzD,UAAM,yBAAyB,KAAK,OAAO,OAAO,QAAQ,KAAK,KAAK,oBAAoB;AACxF,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,oBAAN,cAAgC,eAAe;AAAA,EACpD,YAAY,SAAiB;AAC3B,UAAM,SAAS,eAAe;AAC9B,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,iBAAN,cAA6B,eAAe;AAAA,EACjD,YAAY,SAAiB;AAC3B,UAAM,SAAS,iBAAiB;AAChC,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,uBAAN,cAAmC,eAAe;AAAA,EACvD,YAAY,SAAiB;AAC3B,UAAM,SAAS,wBAAwB;AACvC,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,mBAAN,cAA+B,eAAe;AAAA,EACnD,YAAY,SAAiB;AAC3B,UAAM,SAAS,cAAc;AAC7B,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,qBAAN,cAAiC,eAAe;AAAA,EACrD,YACE,OACA,SACA,OACA,WACA,WACA;AACA;AAAA,MACE,SAAS,KAAK,OAAO,OAAO,QAAQ,KAAK,8BAA8B,aAAa,MAAM,mBACxE,UAAU,KAAK,IAAI,CAAC;AAAA,MACtC;AAAA,IACF;AACA,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,iBAAN,cAA6B,eAAe;AAAA,EACjD,YAAY,SAAiB;AAC3B,UAAM,SAAS,iBAAiB;AAChC,SAAK,OAAO;AAAA,EACd;AACF;AAkBO,IAAM,+BAAN,cAA2C,eAAe;AAAA,EAC/D,YAAY,SAAiB;AAC3B,UAAM,SAAS,2BAA2B;AAC1C,SAAK,OAAO;AAAA,EACd;AACF;;;AClGA,yBAAwD;AA6BxD,IAAM,sBAAsB;AAgCrB,SAAS,cAAc,QAAgB,OAAyC;AAOrF,QAAM,YAAY,IAAI,6BAAU,QAAkB,WAAW,KAAK;AAClE,SAAO,CAAC,SAAkB;AACxB,UAAM,SAAS,UAAU,SAAS,IAAI;AACtC,QAAI,CAAC,OAAO,OAAO;AACjB,YAAM,QAAQ,OAAO,OAAO;AAC5B,YAAM,OAAO,OAAO,OAAO,MAAM,GAAG,mBAAmB,EAAE,IAAI,WAAW,EAAE,KAAK,IAAI;AACnF,YAAM,WAAW,QAAQ,sBAAsB,MAAM,QAAQ,mBAAmB,WAAW;AAC3F,YAAM,IAAI;AAAA,QACR,yBAAyB,QAAQ,UAAU,QAAQ,EAAE,KAAK,IAAI,GAAG,QAAQ;AAAA,QACzE,OAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AACF;AAWA,SAAS,YAAY,KAAyB;AAC5C,QAAM,OAAO,IAAI,iBAAiB,QAAQ,MAAM,EAAE,KAAK;AACvD,QAAM,UAAU,IAAI,UAAU,IAAI,IAAI,OAAO,OAAO;AACpD,QAAM,SAAS,IAAI,QAAQ,KAAK,OAAO,GAAG,IAAI,KAAK,KAAK;AACxD,SAAO,GAAG,IAAI,GAAG,MAAM;AACzB;AAUO,SAAS,sBAAsB,QAA0B;AAC9D,MAAI,CAAC,UAAU,OAAO,SAAS,YAAY,CAAC,OAAO,WAAY,QAAO,CAAC;AAEvE,QAAM,cAAc,IAAI,IAAY,MAAM,QAAQ,OAAO,QAAQ,IAAI,OAAO,WAAW,CAAC,CAAC;AAEzF,SAAO,OAAO,QAAQ,OAAO,UAAU,EAAE;AAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MACvD,oBAAoB,MAAM,MAAa,YAAY,IAAI,IAAI,CAAC;AAAA,EAC9D;AACF;AAKA,SAAS,oBAAoB,MAAc,MAAW,UAA8B;AAClF,MAAI,CAAC,KAAM,QAAO,EAAE,MAAM,MAAM,WAAW,SAAS;AAGpD,MAAI,MAAM,QAAQ,KAAK,IAAI,GAAG;AAC5B,WAAO;AAAA,MACL;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA,YAAY,KAAK;AAAA,MACjB,aAAa,KAAK;AAAA,IACpB;AAAA,EACF;AAGA,MAAI,MAAM,QAAQ,KAAK,KAAK,KAAK,MAAM,QAAQ,KAAK,KAAK,GAAG;AAC1D,UAAM,WAAY,KAAK,SAAS,KAAK;AACrC,UAAM,UAAU,SAAS,OAAO,CAAC,MAAW,EAAE,SAAS,MAAM;AAC7D,QAAI,QAAQ,WAAW,GAAG;AAExB,aAAO,oBAAoB,MAAM,QAAQ,CAAC,GAAG,KAAK;AAAA,IACpD;AACA,WAAO,EAAE,MAAM,MAAM,WAAW,UAAU,aAAa,KAAK,YAAY;AAAA,EAC1E;AAEA,QAAM,OAAO,KAAK;AAElB,MAAI,SAAS,UAAU;AACrB,WAAO;AAAA,MACL;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA,WAAW,KAAK;AAAA,MAChB,WAAW,KAAK;AAAA,MAChB,SAAS,KAAK;AAAA,MACd,aAAa,KAAK;AAAA,IACpB;AAAA,EACF;AAEA,MAAI,SAAS,YAAY,SAAS,WAAW;AAC3C,WAAO;AAAA,MACL;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA,KAAK,KAAK;AAAA,MACV,KAAK,KAAK;AAAA,MACV,OAAO,SAAS,YAAY,OAAO;AAAA,MACnC,aAAa,KAAK;AAAA,IACpB;AAAA,EACF;AAEA,MAAI,SAAS,WAAW;AACtB,WAAO,EAAE,MAAM,MAAM,WAAW,UAAU,aAAa,KAAK,YAAY;AAAA,EAC1E;AAEA,MAAI,SAAS,SAAS;AACpB,UAAM,WAAW,KAAK,QAAQ,oBAAoB,QAAQ,KAAK,OAAO,IAAI,IAAI;AAC9E,WAAO;AAAA,MACL;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,aAAa,KAAK;AAAA,IACpB;AAAA,EACF;AAEA,MAAI,SAAS,UAAU;AACrB,WAAO;AAAA,MACL;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA,QAAQ,sBAAsB,IAAI;AAAA,MAClC,aAAa,KAAK;AAAA,IACpB;AAAA,EACF;AAEA,SAAO,EAAE,MAAM,MAAM,WAAW,UAAU,aAAa,KAAK,YAAY;AAC1E;;;ACrLA,eAAsB,oBACpB,MACA,gBACA,eACA,YACkC;AAClC,QAAM,SAAS,CAAC,GAAG,UAAU,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE,WAAW;AAC3E,MAAI,SAAS,EAAE,GAAG,KAAK;AACvB,MAAI,UAAU;AAEd,aAAW,QAAQ,QAAQ;AACzB,QAAI,KAAK,gBAAgB,SAAS;AAChC,UAAI;AACF,iBAAS,MAAM,KAAK,GAAG,MAAM;AAAA,MAC/B,SAAS,KAAc;AACrB,YAAI,eAAe,eAAgB,OAAM;AACzC,cAAM,IAAI;AAAA,UACR,mBAAmB,KAAK,WAAW,QAAQ,KAAK,SAAS,YAAa,IAAc,OAAO;AAAA,QAC7F;AAAA,MACF;AACA,UAAI,CAAC,UAAU,OAAO,WAAW,UAAU;AACzC,cAAM,IAAI;AAAA,UACR,mBAAmB,KAAK,WAAW,QAAQ,KAAK,SAAS;AAAA,QAC3D;AAAA,MACF;AACA,gBAAU,KAAK;AAAA,IACjB;AAAA,EACF;AAEA,MAAI,YAAY,eAAe;AAC7B,UAAM,IAAI;AAAA,MACR,wCAAwC,OAAO,mBAAmB,aAAa;AAAA,IACjF;AAAA,EACF;AAEA,SAAO;AACT;AAUO,SAAS,uBAAuB,YAA6B,OAAqB;AACvF,MAAI,WAAW,WAAW,EAAG;AAG7B,QAAM,OAAO,oBAAI,IAAY;AAC7B,aAAW,QAAQ,YAAY;AAC7B,QAAI,KAAK,aAAa,KAAK,aAAa;AACtC,YAAM,IAAI;AAAA,QACR,GAAG,KAAK,mCAAmC,KAAK,SAAS,qBAAqB,KAAK,WAAW;AAAA,MAChG;AAAA,IACF;AACA,QAAI,KAAK,IAAI,KAAK,WAAW,GAAG;AAC9B,YAAM,IAAI;AAAA,QACR,GAAG,KAAK,8CAA8C,KAAK,WAAW;AAAA,MACxE;AAAA,IACF;AACA,SAAK,IAAI,KAAK,WAAW;AAAA,EAC3B;AAEA,QAAM,SAAS,CAAC,GAAG,UAAU,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE,WAAW;AAC3E,QAAM,gBAAgB,KAAK,IAAI,GAAG,WAAW,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;AACpE,MAAI,UAAU;AAEd,aAAW,QAAQ,QAAQ;AACzB,QAAI,KAAK,gBAAgB,SAAS;AAChC,gBAAU,KAAK;AAAA,IACjB,WAAW,KAAK,cAAc,SAAS;AACrC,YAAM,IAAI;AAAA,QACR,GAAG,KAAK,sDAAiD,OAAO,YAAO,KAAK,WAAW;AAAA,MACzF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,YAAY,eAAe;AAC7B,UAAM,IAAI;AAAA,MACR,GAAG,KAAK,qCAAqC,aAAa,eAAe,OAAO;AAAA,IAClF;AAAA,EACF;AACF;AAQA,eAAsB,cACpB,QACA,UACA,kBAAsC,OACZ;AAC1B,QAAM,QAAQ,SAAS,OAAO,OAAO,OAAO,OAAO,SAAS,OAAO,KAAK;AAExE,MAAI,CAAC,OAAO,YAAY,UAAU,CAAC,MAAM,eAAe;AACtD,WAAO,EAAE,QAAQ,UAAU,OAAO,WAAW,MAAM;AAAA,EACrD;AAEA,QAAM,iBAAiB,OAAO,KAAK;AAEnC,MAAI,kBAAkB,MAAM,eAAe;AACzC,WAAO,EAAE,QAAQ,UAAU,OAAO,WAAW,MAAM;AAAA,EACrD;AAEA,QAAM,eAAe,MAAM;AAAA,IACzB,OAAO;AAAA,IACP;AAAA,IACA,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AAGA,QAAM,YAAY,MAAM,sBAAsB,mBAAmB;AAEjE,SAAO;AAAA,IACL,QAAQ,EAAE,GAAG,QAAQ,MAAM,cAAc,GAAG,MAAM,cAAc;AAAA,IAChE,UAAU;AAAA,IACV;AAAA,EACF;AACF;AAOA,eAAsB,eACpB,SACA,UACA,kBAAsC,OACV;AAC5B,SAAO,QAAQ,IAAI,QAAQ,IAAI,CAAC,MAAM,cAAc,GAAG,UAAU,eAAe,CAAC,CAAC;AACpF;;;ACnJO,SAAS,WAAW,WAAmB,SAA0B;AAEtE,MAAI,YAAY,OAAQ,QAAO,cAAc;AAG7C,MAAI,YAAY,KAAM,QAAO;AAE7B,QAAM,eAAe,cAAc,KAAK,CAAC,IAAI,UAAU,MAAM,GAAG;AAChE,QAAM,kBAAkB,QAAQ,MAAM,GAAG;AAEzC,SAAO,cAAc,cAAc,GAAG,iBAAiB,CAAC;AAC1D;AASO,SAAS,cAAc,WAAmB,UAA6B;AAC5E,MAAI,CAAC,YAAY,SAAS,WAAW,EAAG,QAAO;AAC/C,SAAO,SAAS,KAAK,CAAC,MAAM,WAAW,WAAW,CAAC,CAAC;AACtD;AAMA,SAAS,cAAc,MAAgB,IAAY,SAAmB,IAAqB;AAEzF,MAAI,OAAO,KAAK,UAAU,OAAO,QAAQ,OAAQ,QAAO;AAGxD,MAAI,OAAO,QAAQ,OAAQ,QAAO;AAElC,QAAM,MAAM,QAAQ,EAAE;AAEtB,MAAI,QAAQ,MAAM;AAEhB,QAAI,OAAO,QAAQ,SAAS,EAAG,QAAO;AAGtC,aAAS,OAAO,GAAG,QAAQ,KAAK,SAAS,IAAI,QAAQ;AACnD,UAAI,cAAc,MAAM,KAAK,MAAM,SAAS,KAAK,CAAC,EAAG,QAAO;AAAA,IAC9D;AACA,WAAO;AAAA,EACT;AAGA,MAAI,OAAO,KAAK,OAAQ,QAAO;AAE/B,MAAI,QAAQ,KAAK;AAEf,WAAO,cAAc,MAAM,KAAK,GAAG,SAAS,KAAK,CAAC;AAAA,EACpD;AAGA,MAAI,KAAK,EAAE,MAAM,KAAK;AACpB,WAAO,cAAc,MAAM,KAAK,GAAG,SAAS,KAAK,CAAC;AAAA,EACpD;AAEA,SAAO;AACT;;;AC9EA,SAAS,UAAU,OAAe,SAAiB,OAAuB;AACxE,SAAO,GAAG,KAAK,IAAI,OAAO,IAAI,KAAK;AACrC;AAEA,SAAS,aAAa,GAA0B;AAC9C,SAAO,UAAU,EAAE,OAAO,EAAE,SAAS,EAAE,KAAK;AAC9C;AAkBO,SAAS,eAAe,OAAyD;AACtF,QAAM,MAAM,oBAAI,IAA0E;AAE1F,MAAI;AAEJ,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,cAAU;AAAA,EACZ,OAAO;AACL,cAAU,mBAAmB,KAAK;AAAA,EACpC;AAEA,QAAM,YAA0C,OAAO,OAAO,CAAC,GAAG,OAAO,CAAC;AAE1E,aAAW,SAAS,SAAS;AAC3B,QAAI,MAAM,eAAe,MAAM,YAAY,SAAS,GAAG,GAAG;AACxD,YAAM,IAAI;AAAA,QACR,UAAU,MAAM,KAAK,OAAO,MAAM,OAAO,QAAQ,MAAM,KAAK,8BAA8B,MAAM,WAAW;AAAA,MAC7G;AAAA,IACF;AACA,QAAI,MAAM,YAAY,QAAQ;AAC5B,YAAM,QAAQ,UAAU,MAAM,KAAK,OAAO,MAAM,OAAO,QAAQ,MAAM,KAAK;AAC1E,6BAAuB,MAAM,YAAY,KAAK;AAE9C,YAAM,gBAAgB,KAAK,IAAI,GAAG,MAAM,WAAW,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;AAAA,IAC5E,OAAO;AAEL,YAAM,gBAAgB;AAAA,IACxB;AACA,UAAM,MAAM,UAAU,MAAM,OAAO,MAAM,SAAS,MAAM,KAAK;AAC7D,UAAM,YAAY,MAAM,aACpB,cAAc,MAAM,YAAY,IAAI,MAAM,KAAK,OAAO,MAAM,OAAO,QAAQ,MAAM,KAAK,GAAG,IACzF;AACJ,QAAI,IAAI,KAAK,EAAE,OAAO,UAAU,UAAU,CAAC;AAAA,EAC7C;AAGA,QAAM,WAAW,oBAAI,IAA0C;AAC/D,QAAM,WAAW,oBAAI,IAA6B;AAClD,aAAW,SAAS,SAAS;AAC3B,UAAM,WAAW,SAAS,IAAI,MAAM,OAAO;AAC3C,QAAI,UAAU;AACZ,eAAS,KAAK,KAAK;AAAA,IACrB,OAAO;AACL,eAAS,IAAI,MAAM,SAAS,CAAC,KAAK,CAAC;AAAA,IACrC;AAAA,EACF;AACA,aAAW,CAAC,KAAK,GAAG,KAAK,UAAU;AACjC,aAAS,IAAI,KAAK,OAAO,OAAO,GAAG,CAAC;AAAA,EACtC;AAWA,QAAM,gBAAgB,oBAAI,IAA0C;AACpE,QAAM,gBAAgB,oBAAI,IAA6B;AACvD,QAAM,eAAe,oBAAI,IAAyB;AAClD,aAAW,SAAS,SAAS;AAC3B,QAAI,CAAC,MAAM,YAAa;AACxB,QAAI,OAAO,aAAa,IAAI,MAAM,KAAK;AACvC,QAAI,CAAC,MAAM;AACT,aAAO,oBAAI,IAAI;AACf,mBAAa,IAAI,MAAM,OAAO,IAAI;AAAA,IACpC;AACA,QAAI,KAAK,IAAI,MAAM,WAAW,EAAG;AACjC,SAAK,IAAI,MAAM,WAAW;AAC1B,UAAM,WAAW,cAAc,IAAI,MAAM,KAAK;AAC9C,QAAI,UAAU;AACZ,eAAS,KAAK,KAAK;AAAA,IACrB,OAAO;AACL,oBAAc,IAAI,MAAM,OAAO,CAAC,KAAK,CAAC;AAAA,IACxC;AAAA,EACF;AACA,aAAW,CAAC,KAAK,GAAG,KAAK,eAAe;AACtC,kBAAc,IAAI,KAAK,OAAO,OAAO,GAAG,CAAC;AAAA,EAC3C;AAEA,SAAO;AAAA,IACL,OAAO,OAAe,SAAiB,OAA0C;AAC/E,aAAO,IAAI,IAAI,UAAU,OAAO,SAAS,KAAK,CAAC,GAAG;AAAA,IACpD;AAAA,IAEA,gBAAgB,SAA+C;AAC7D,aAAO,SAAS,IAAI,OAAO,KAAK,CAAC;AAAA,IACnC;AAAA,IAEA,oBAAoB,OAA6C;AAC/D,aAAO,cAAc,IAAI,KAAK,KAAK,CAAC;AAAA,IACtC;AAAA,IAEA,SACE,OACA,SACA,OACA,MACA,WACM;AACN,YAAM,MAAM,IAAI,IAAI,UAAU,OAAO,SAAS,KAAK,CAAC;AAEpD,UAAI,CAAC,KAAK;AACR,cAAM,IAAI,uBAAuB,OAAO,SAAS,KAAK;AAAA,MACxD;AAGA,UAAI,cAAc,UAAa,IAAI,MAAM,aAAa,IAAI,MAAM,UAAU,SAAS,GAAG;AACpF,YAAI,CAAC,cAAc,WAAW,IAAI,MAAM,SAAS,GAAG;AAClD,gBAAM,IAAI,mBAAmB,OAAO,SAAS,OAAO,WAAW,IAAI,MAAM,SAAS;AAAA,QACpF;AAAA,MACF;AAEA,UAAI,IAAI,UAAU;AAChB,YAAI;AACF,cAAI,SAAS,IAAI;AAAA,QACnB,SAAS,KAAc;AACrB,cAAI,eAAe,gBAAiB,OAAM;AAC1C,gBAAM,IAAI;AAAA,YACR,+BAA+B,KAAK,OAAO,OAAO,QAAQ,KAAK;AAAA,YAC/D;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IAEA,UAAwC;AACtC,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAYO,SAAS,qBAAqB,MAAqB,WAAyC;AAEjG,QAAM,WAAW,IAAI,IAAI,KAAK,QAAQ,EAAE,IAAI,YAAY,CAAC;AAEzD,SAAO;AAAA,IACL,OAAO,OAAe,SAAiB,OAA0C;AAC/E,aAAO,KAAK,OAAO,OAAO,SAAS,KAAK,KAAK,UAAU,OAAO,OAAO,SAAS,KAAK;AAAA,IACrF;AAAA,IAEA,gBAAgB,SAA+C;AAC7D,YAAM,cAAc,KAAK,gBAAgB,OAAO;AAChD,YAAM,aAAa,UAAU,gBAAgB,OAAO;AACpD,UAAI,WAAW,WAAW,EAAG,QAAO;AACpC,UAAI,YAAY,WAAW,EAAG,QAAO;AAGrC,YAAM,OAAO,IAAI,IAAI,YAAY,IAAI,YAAY,CAAC;AAClD,YAAM,SAAS,CAAC,GAAG,WAAW;AAC9B,iBAAW,SAAS,YAAY;AAC9B,YAAI,CAAC,KAAK,IAAI,aAAa,KAAK,CAAC,GAAG;AAClC,iBAAO,KAAK,KAAK;AAAA,QACnB;AAAA,MACF;AACA,aAAO,OAAO,OAAO,MAAM;AAAA,IAC7B;AAAA,IAEA,oBAAoB,OAA6C;AAC/D,YAAM,cAAc,KAAK,oBAAoB,KAAK;AAClD,YAAM,aAAa,UAAU,oBAAoB,KAAK;AACtD,UAAI,WAAW,WAAW,EAAG,QAAO;AACpC,UAAI,YAAY,WAAW,EAAG,QAAO;AAMrC,YAAM,OAAO,IAAI,IAAI,YAAY,IAAI,CAAC,MAAM,EAAE,WAAW,CAAC;AAC1D,YAAM,SAAS,CAAC,GAAG,WAAW;AAC9B,iBAAW,SAAS,YAAY;AAC9B,YAAI,CAAC,KAAK,IAAI,MAAM,WAAW,GAAG;AAChC,eAAK,IAAI,MAAM,WAAW;AAC1B,iBAAO,KAAK,KAAK;AAAA,QACnB;AAAA,MACF;AACA,aAAO,OAAO,OAAO,MAAM;AAAA,IAC7B;AAAA,IAEA,SACE,OACA,SACA,OACA,MACA,WACM;AACN,UAAI,SAAS,IAAI,UAAU,OAAO,SAAS,KAAK,CAAC,GAAG;AAClD,eAAO,KAAK,SAAS,OAAO,SAAS,OAAO,MAAM,SAAS;AAAA,MAC7D;AAEA,aAAO,UAAU,SAAS,OAAO,SAAS,OAAO,MAAM,SAAS;AAAA,IAClE;AAAA,IAEA,UAAwC;AACtC,YAAM,aAAa,UAAU,QAAQ;AACrC,UAAI,WAAW,WAAW,EAAG,QAAO,KAAK,QAAQ;AAEjD,YAAM,SAAS,CAAC,GAAG,KAAK,QAAQ,CAAC;AACjC,iBAAW,SAAS,YAAY;AAC9B,YAAI,CAAC,SAAS,IAAI,aAAa,KAAK,CAAC,GAAG;AACtC,iBAAO,KAAK,KAAK;AAAA,QACnB;AAAA,MACF;AACA,aAAO,OAAO,OAAO,MAAM;AAAA,IAC7B;AAAA,EACF;AACF;AAOA,SAAS,mBAAmB,WAA6C;AACvE,QAAM,UAA2B,CAAC;AAGlC,aAAW,CAAC,MAAM,MAAM,KAAK,UAAU,OAAO;AAC5C,YAAQ,KAAK;AAAA,MACX,OAAO;AAAA,MACP,SAAS;AAAA,MACT,OAAO;AAAA,MACP,YAAY,OAAO;AAAA,MACnB,aAAa,OAAO;AAAA,MACpB,YAAY,OAAO;AAAA,MACnB,eAAe,OAAO;AAAA,MACtB,WAAW,OAAO;AAAA,MAClB,YAAY,OAAO;AAAA,MACnB,oBAAoB,OAAO;AAAA,MAC3B,SAAS,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAGA,aAAW,CAAC,SAAS,MAAM,KAAK,UAAU,OAAO;AAC/C,UAAM,WAAW,OAAO;AACxB,QAAI,CAAC,SAAU;AAEf,UAAM,YAAY,MAAM,QAAQ,SAAS,IAAI,IAAI,SAAS,OAAO,CAAC,SAAS,IAAI;AAC/E,UAAM,UAAU,MAAM,QAAQ,SAAS,EAAE,IAAI,SAAS,KAAK,CAAC,SAAS,EAAE;AAEvE,UAAM,sBAAsB,OAAO,eAAe,SAAS;AAC3D,QAAI,uBAAuB,oBAAoB,SAAS,GAAG,GAAG;AAC5D,YAAM,IAAI;AAAA,QACR,SAAS,OAAO,8BAA8B,mBAAmB;AAAA,MACnE;AAAA,IACF;AAEA,eAAW,SAAS,WAAW;AAC7B,iBAAW,SAAS,SAAS;AAC3B,gBAAQ,KAAK;AAAA,UACX;AAAA,UACA;AAAA,UACA;AAAA,UACA,YAAY,OAAO;AAAA,UACnB,aAAa,OAAO;AAAA,UACpB,cAAc,SAAS;AAAA,UACvB,YAAY,OAAO;AAAA,UACnB,eAAe,OAAO;AAAA,UACtB,WAAW,OAAO;AAAA,UAClB,aAAa;AAAA,UACb,YAAY,OAAO;AAAA,UACnB,oBAAoB,OAAO;AAAA,UAC3B,SAAS,OAAO;AAAA,QAClB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACxSA,IAAAC,sBAA2B;AAjB3B;AAmCA,IAAI,UAAyB;AAC7B,IAAI,aAAa;AACjB,IAAM,WAAW,oBAAI,IAMnB;AAUF,IAAM,gBAAgB;AAAA,EACpB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,EAAE,KAAK,IAAI;AAgBX,IAAI,cAAsF;AAE1F,eAAe,iBAA2D;AACxE,MAAI,YAAa,QAAO;AACxB,QAAM,KAAK,MAAM,OAAO,gBAAqB;AAC7C,gBAAc,GAAG;AACjB,SAAO;AACT;AAEA,eAAe,eAAgC;AAC7C,MAAI,QAAS,QAAO;AAEpB,QAAM,OAAO,MAAM,eAAe;AAClC,YAAU,IAAI,KAAK,eAAe;AAAA,IAChC,MAAM;AAAA,IACN,YAAY,EAAE,WAAW,YAAY,IAAI;AAAA,EAC3C,CAAC;AAGD,UAAQ,MAAM;AAEd,UAAQ,GAAG,WAAW,CAAC,QAAwB;AAC7C,QAAI,IAAI,OAAO,OAAW;AAC1B,UAAM,UAAU,SAAS,IAAI,IAAI,EAAE;AACnC,QAAI,CAAC,QAAS;AACd,aAAS,OAAO,IAAI,EAAE;AAEtB,QAAI,IAAI,SAAS,SAAS;AACxB,cAAQ,OAAO,IAAI,eAAe,IAAI,WAAW,uBAAuB,CAAC;AAAA,IAC3E,OAAO;AACL,cAAQ,QAAQ,GAAG;AAAA,IACrB;AAAA,EACF,CAAC;AAED,UAAQ,GAAG,SAAS,CAAC,QAAe;AAElC,eAAW,CAAC,EAAE,CAAC,KAAK,UAAU;AAC5B,QAAE,OAAO,IAAI,eAAe,yBAAyB,IAAI,OAAO,EAAE,CAAC;AAAA,IACrE;AACA,aAAS,MAAM;AACf,cAAU;AAAA,EACZ,CAAC;AAED,UAAQ,GAAG,QAAQ,CAAC,SAAiB;AAInC,QAAI,SAAS,OAAO,GAAG;AACrB,iBAAW,CAAC,EAAE,CAAC,KAAK,UAAU;AAC5B,UAAE,OAAO,IAAI,eAAe,mCAAmC,IAAI,EAAE,CAAC;AAAA,MACxE;AACA,eAAS,MAAM;AAAA,IACjB;AACA,cAAU;AAAA,EACZ,CAAC;AAED,SAAO;AACT;AAEA,eAAe,aAAa,KAAuD;AACjF,QAAM,SAAS,MAAM,aAAa;AAClC,MAAI,cAAc,OAAO,iBAAkB,cAAa;AACxD,QAAM,KAAK,EAAE;AACb,SAAO,IAAI,QAAwB,CAACC,UAAS,WAAW;AACtD,aAAS,IAAI,IAAI,EAAE,SAASA,UAAiC,OAAO,CAAC;AACrE,WAAO,YAAY,EAAE,GAAG,KAAK,GAAG,CAAC;AAAA,EACnC,CAAC;AACH;AAWA,IAAM,gBAAgB,oBAAI,QAAqD;AAE/E,SAAS,iBAAiB,UAAuD;AAC/E,MAAI,QAAQ,cAAc,IAAI,QAAQ;AACtC,MAAI,CAAC,OAAO;AACV,YAAQ,oBAAI,IAAI;AAChB,kBAAc,IAAI,UAAU,KAAK;AAAA,EACnC;AACA,SAAO;AACT;AAEA,SAAS,WAAW,QAAwB;AAC1C,aAAO,gCAAW,QAAQ,EAAE,OAAO,MAAM,EAAE,OAAO,KAAK;AACzD;AAQA,IAAI,uBAA0D;AAE9D,eAAe,oBAAyD;AACtE,MAAI,qBAAsB,QAAO;AACjC,yBAAuB,MAAM;AAC7B,SAAO;AACT;AAqBO,SAAS,gBAAgB,QAA6B;AAQ3D,UAAQ,OAAO,SAAkC;AAC/C,UAAM,EAAE,yBAAAC,0BAAyB,2BAAAC,2BAA0B,IAAI,MAAM,kBAAkB;AACvF,UAAM,WAAW,KAAK,UAAUD,yBAAwB,IAAI,CAAC;AAC7D,UAAM,WAAW,MAAM,aAAa,EAAE,MAAM,WAAW,QAAQ,SAAS,CAAC;AACzE,QAAI,SAAS,eAAe,UAAa,SAAS,eAAe,MAAM;AACrE,YAAM,IAAI,eAAe,kDAAkD;AAAA,IAC7E;AACA,QAAI;AACF,aAAOC,2BAA0B,KAAK,MAAM,SAAS,UAAU,CAAC;AAAA,IAClE,QAAQ;AACN,YAAM,IAAI,eAAe,kDAAkD;AAAA,IAC7E;AAAA,EACF;AACF;AAgBA,eAAsB,iBACpB,QACA,UACe;AACf,MAAI,YAAY,aAAa,iBAAiB;AAE5C,QAAI;AACF,eAAS,MAAM;AAAA,IACjB,SAAS,KAAc;AACrB,UAAI,eAAe,eAAgB,OAAM;AACzC,YAAM,IAAI,eAAe,uCAAwC,IAAc,OAAO,EAAE;AAAA,IAC1F;AACA;AAAA,EACF;AAGA,QAAM,aAAa,EAAE,MAAM,WAAW,OAAO,CAAC;AAChD;AAaO,SAAS,mBACd,QACA,WAA8B,iBACjB;AACb,QAAM,QAAQ,iBAAiB,QAAQ;AACvC,QAAM,MAAM,WAAW,MAAM;AAC7B,QAAM,SAAS,MAAM,IAAI,GAAG;AAC5B,MAAI,OAAQ,QAAO;AAEnB,MAAI;AACF,UAAM,KAAK,SAAS,MAAM;AAC1B,UAAM,IAAI,KAAK,EAAE;AACjB,WAAO;AAAA,EACT,SAAS,KAAc;AACrB,QAAI,eAAe,eAAgB,OAAM;AACzC,UAAM,IAAI,eAAe,uCAAwC,IAAc,OAAO,EAAE;AAAA,EAC1F;AACF;AASO,SAAS,kBACd,QACA,UACiB;AACjB,SAAO,OAAO,IAAI,CAAC,UAAU;AAAA,IAC3B,aAAa,KAAK;AAAA,IAClB,WAAW,KAAK;AAAA,IAChB,IAAI,mBAAmB,KAAK,IAAI,QAAQ;AAAA,EAC1C,EAAE;AACJ;AASA,eAAsB,uBAAsC;AAC1D,MAAI,CAAC,QAAS;AACd,QAAM,IAAI;AACV,YAAU;AAEV,aAAW,CAAC,EAAE,CAAC,KAAK,UAAU;AAC5B,MAAE,OAAO,IAAI,eAAe,2BAA2B,CAAC;AAAA,EAC1D;AACA,WAAS,MAAM;AACf,QAAM,EAAE,UAAU;AACpB;;;ANtZO,IAAM,iBAAiB;AAGvB,IAAM,iBAAiB;AAO9B,IAAM,+BAA+B;AAAA,EACnC,MAAM;AAAA,EACN,UAAU,CAAC,eAAe,aAAa,IAAI;AAAA,EAC3C,YAAY;AAAA,IACV,aAAa,EAAE,MAAM,WAAW,SAAS,EAAE;AAAA,IAC3C,WAAW,EAAE,MAAM,WAAW,SAAS,EAAE;AAAA,IACzC,IAAI,EAAE,MAAM,UAAU,WAAW,EAAE;AAAA,EACrC;AAAA,EACA,sBAAsB;AACxB;AAGO,IAAM,mBAA2B;AAAA,EACtC,MAAM;AAAA,EACN,UAAU,CAAC,QAAQ,YAAY;AAAA,EAC/B,YAAY;AAAA,IACV,MAAM,EAAE,MAAM,UAAU,WAAW,EAAE;AAAA,IACrC,YAAY,EAAE,MAAM,SAAS;AAAA,IAC7B,aAAa,EAAE,MAAM,SAAS;AAAA,IAC9B,YAAY,EAAE,MAAM,SAAS;AAAA,IAC7B,eAAe,EAAE,MAAM,SAAS;AAAA,IAChC,cAAc,EAAE,MAAM,SAAS;AAAA,IAC/B,SAAS,EAAE,MAAM,SAAS;AAAA,IAC1B,WAAW,EAAE,MAAM,SAAS,OAAO,EAAE,MAAM,UAAU,WAAW,EAAE,EAAE;AAAA,IACpE,eAAe,EAAE,MAAM,WAAW,SAAS,EAAE;AAAA,IAC7C,YAAY,EAAE,MAAM,SAAS,OAAO,6BAA6B;AAAA,IACjE,oBAAoB,EAAE,MAAM,UAAU,MAAM,CAAC,OAAO,SAAS,YAAY,EAAE;AAAA,EAC7E;AAAA,EACA,sBAAsB;AACxB;AAGO,IAAM,mBAA2B;AAAA,EACtC,MAAM;AAAA,EACN,UAAU,CAAC,QAAQ,QAAQ,IAAI;AAAA,EAC/B,YAAY;AAAA,IACV,MAAM,EAAE,MAAM,UAAU,WAAW,EAAE;AAAA,IACrC,MAAM;AAAA,MACJ,OAAO;AAAA,QACL,EAAE,MAAM,UAAU,WAAW,EAAE;AAAA,QAC/B,EAAE,MAAM,SAAS,OAAO,EAAE,MAAM,UAAU,WAAW,EAAE,GAAG,UAAU,EAAE;AAAA,MACxE;AAAA,IACF;AAAA,IACA,IAAI;AAAA,MACF,OAAO;AAAA,QACL,EAAE,MAAM,UAAU,WAAW,EAAE;AAAA,QAC/B,EAAE,MAAM,SAAS,OAAO,EAAE,MAAM,UAAU,WAAW,EAAE,GAAG,UAAU,EAAE;AAAA,MACxE;AAAA,IACF;AAAA,IACA,YAAY,EAAE,MAAM,SAAS;AAAA,IAC7B,cAAc,EAAE,MAAM,SAAS;AAAA,IAC/B,aAAa,EAAE,MAAM,SAAS;AAAA,IAC9B,YAAY,EAAE,MAAM,SAAS;AAAA,IAC7B,eAAe,EAAE,MAAM,SAAS;AAAA,IAChC,cAAc,EAAE,MAAM,SAAS;AAAA,IAC/B,SAAS,EAAE,MAAM,SAAS;AAAA,IAC1B,WAAW,EAAE,MAAM,SAAS,OAAO,EAAE,MAAM,UAAU,WAAW,EAAE,EAAE;AAAA,IACpE,aAAa,EAAE,MAAM,UAAU,WAAW,GAAG,SAAS,UAAU;AAAA,IAChE,eAAe,EAAE,MAAM,WAAW,SAAS,EAAE;AAAA,IAC7C,YAAY,EAAE,MAAM,SAAS,OAAO,6BAA6B;AAAA,IACjE,oBAAoB,EAAE,MAAM,UAAU,MAAM,CAAC,OAAO,SAAS,YAAY,EAAE;AAAA,EAC7E;AAAA,EACA,sBAAsB;AACxB;AAOO,IAAM,oBAA8C;AAAA,EACzD;AAAA,IACE,OAAO;AAAA,IACP,SAAS;AAAA,IACT,OAAO;AAAA,IACP,YAAY;AAAA,IACZ,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,SAAS;AAAA,IACT,OAAO;AAAA,IACP,YAAY;AAAA,IACZ,aAAa;AAAA,EACf;AACF;AAeA,IAAI,qBAA2C;AACxC,SAAS,0BAAyC;AACvD,MAAI,mBAAoB,QAAO;AAC/B,uBAAqB,eAAe,CAAC,GAAG,iBAAiB,CAAC;AAC1D,SAAO;AACT;AAaO,SAAS,yBAAyB,UAAkB,MAAsB;AAC/E,QAAM,WAAO,gCAAW,QAAQ,EAAE,OAAO,GAAG,QAAQ,IAAI,IAAI,EAAE,EAAE,OAAO,WAAW;AAClF,SAAO,KAAK,MAAM,GAAG,EAAE;AACzB;AAeA,eAAsB,wBACpB,QACA,UACwB;AACxB,QAAM,CAAC,WAAW,SAAS,IAAI,MAAM,QAAQ,IAAI;AAAA,IAC/C,OAAO,UAAU,EAAE,OAAO,eAAe,CAAC;AAAA,IAC1C,OAAO,UAAU,EAAE,OAAO,eAAe,CAAC;AAAA,EAC5C,CAAC;AAED,QAAM,UAA2B,CAAC,GAAG,iBAAiB;AAItD,QAAM,iBAAkC,CAAC;AACzC,aAAW,UAAU,WAAW;AAC9B,UAAM,OAAO,OAAO;AACpB,QAAI,KAAK,YAAY;AACnB,iBAAW,KAAK,KAAK,YAAY;AAC/B,uBAAe,KAAK,iBAAiB,EAAE,IAAI,QAAQ,CAAC;AAAA,MACtD;AAAA,IACF;AAAA,EACF;AACA,aAAW,UAAU,WAAW;AAC9B,UAAM,OAAO,OAAO;AACpB,QAAI,KAAK,YAAY;AACnB,iBAAW,KAAK,KAAK,YAAY;AAC/B,uBAAe,KAAK,iBAAiB,EAAE,IAAI,QAAQ,CAAC;AAAA,MACtD;AAAA,IACF;AAAA,EACF;AACA,QAAM,QAAQ,IAAI,cAAc;AAGhC,aAAW,UAAU,WAAW;AAC9B,UAAM,OAAO,OAAO;AACpB,YAAQ,KAAK;AAAA,MACX,OAAO,KAAK;AAAA,MACZ,SAAS;AAAA,MACT,OAAO,KAAK;AAAA,MACZ,YAAY,KAAK;AAAA,MACjB,aAAa,KAAK;AAAA,MAClB,YAAY,KAAK;AAAA,MACjB,eAAe,KAAK;AAAA,MACpB,WAAW,KAAK;AAAA,MAChB,YAAY,KAAK,aAAa,kBAAkB,KAAK,YAAY,QAAQ,IAAI;AAAA,MAC7E,oBAAoB,KAAK;AAAA,IAC3B,CAAC;AAAA,EACH;AAGA,aAAW,UAAU,WAAW;AAC9B,UAAM,OAAO,OAAO;AACpB,UAAM,YAAY,MAAM,QAAQ,KAAK,IAAI,IAAI,KAAK,OAAO,CAAC,KAAK,IAAI;AACnE,UAAM,UAAU,MAAM,QAAQ,KAAK,EAAE,IAAI,KAAK,KAAK,CAAC,KAAK,EAAE;AAE3D,UAAM,qBAAqB,KAAK,aAC5B,kBAAkB,KAAK,YAAY,QAAQ,IAC3C;AAEJ,eAAW,SAAS,WAAW;AAC7B,iBAAW,SAAS,SAAS;AAC3B,gBAAQ,KAAK;AAAA,UACX;AAAA,UACA,SAAS,KAAK;AAAA,UACd;AAAA,UACA,YAAY,KAAK;AAAA,UACjB,aAAa,KAAK;AAAA,UAClB,cAAc,KAAK;AAAA,UACnB,YAAY,KAAK;AAAA,UACjB,eAAe,KAAK;AAAA,UACpB,WAAW,KAAK;AAAA,UAChB,aAAa,KAAK;AAAA,UAClB,YAAY;AAAA,UACZ,oBAAoB,KAAK;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,SAAO,eAAe,OAAO;AAC/B;;;AOhPO,SAAS,mBAAmB,QAAoC;AACrE,QAAM,EAAE,OAAO,MAAM,SAAS,OAAO,MAAM,OAAO,QAAQ,IAAI;AAE9D,MAAI,QAAQ,WAAW,QAAQ,CAAC,OAAO,OAAO,QAAQ;AACpD,WAAO,EAAE,UAAU,OAAO,OAAO,iBAAiB,MAAM,SAAS,IAAI,EAAE;AAAA,EACzE;AAEA,QAAM,UAAyB,CAAC;AAEhC,MAAI,MAAO,SAAQ,KAAK,EAAE,OAAO,SAAS,IAAI,MAAM,OAAO,MAAM,CAAC;AAClE,MAAI,KAAM,SAAQ,KAAK,EAAE,OAAO,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAC/D,MAAI,QAAS,SAAQ,KAAK,EAAE,OAAO,WAAW,IAAI,MAAM,OAAO,QAAQ,CAAC;AACxE,MAAI,MAAO,SAAQ,KAAK,EAAE,OAAO,SAAS,IAAI,MAAM,OAAO,MAAM,CAAC;AAClE,MAAI,KAAM,SAAQ,KAAK,EAAE,OAAO,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAE/D,MAAI,OAAO,OAAO;AAChB,eAAW,UAAU,OAAO,OAAO;AACjC,YAAM,QAAQ,eAAe,IAAI,OAAO,KAAK,IACzC,OAAO,QACP,OAAO,MAAM,WAAW,OAAO,IAC7B,OAAO,QACP,QAAQ,OAAO,KAAK;AAC1B,cAAQ,KAAK,EAAE,OAAO,IAAI,OAAO,IAAI,OAAO,OAAO,MAAM,CAAC;AAAA,IAC5D;AAAA,EACF;AAEA,MAAI,QAAQ,WAAW,GAAG;AACxB,UAAM,IAAI,kBAAkB,kDAAkD;AAAA,EAChF;AAKA,QAAM,iBAAiB,UAAU,SAAY,sBAAsB,SAAS;AAC5E,SAAO,EAAE,UAAU,SAAS,SAAS,SAAS,EAAE,OAAO,gBAAgB,QAAQ,EAAE;AACnF;AAEO,SAAS,mBAAmB,QAAoC;AACrE,QAAM,EAAE,OAAO,OAAO,QAAQ,IAAI;AAElC,QAAM,UAAyB;AAAA,IAC7B,EAAE,OAAO,SAAS,IAAI,MAAM,OAAO,MAAM;AAAA,IACzC,EAAE,OAAO,WAAW,IAAI,MAAM,OAAO,cAAc;AAAA,EACrD;AAEA,MAAI,OAAO,OAAO;AAChB,eAAW,UAAU,OAAO,OAAO;AACjC,YAAM,QAAQ,eAAe,IAAI,OAAO,KAAK,IACzC,OAAO,QACP,OAAO,MAAM,WAAW,OAAO,IAC7B,OAAO,QACP,QAAQ,OAAO,KAAK;AAC1B,cAAQ,KAAK,EAAE,OAAO,IAAI,OAAO,IAAI,OAAO,OAAO,MAAM,CAAC;AAAA,IAC5D;AAAA,EACF;AAEA,QAAM,iBAAiB,UAAU,SAAY,sBAAsB,SAAS;AAC5E,SAAO,EAAE,UAAU,SAAS,SAAS,SAAS,EAAE,OAAO,gBAAgB,QAAQ,EAAE;AACnF;;;ACtCA,IAAM,sBAA0D;AAAA,EAC9D,oBAAI,IAAI,CAAC,QAAQ,SAAS,CAAC;AAAA,EAC3B,oBAAI,IAAI,CAAC,WAAW,MAAM,CAAC;AAAA,EAC3B,oBAAI,IAAI,CAAC,SAAS,SAAS,CAAC;AAAA,EAC5B,oBAAI,IAAI,CAAC,WAAW,OAAO,CAAC;AAC9B;AAUO,SAAS,mBAAmB,SAA2C;AAI5E,QAAM,uBAAuB,oBAAI,IAAY;AAC7C,MAAI,iBAAiB;AAErB,aAAW,KAAK,SAAS;AACvB,QAAI,eAAe,IAAI,EAAE,KAAK,GAAG;AAC/B,2BAAqB,IAAI,EAAE,KAAK;AAAA,IAClC,OAAO;AAEL,uBAAiB;AAAA,IACnB;AAAA,EACF;AAIA,aAAW,WAAW,qBAAqB;AACzC,QAAI,UAAU;AACd,eAAW,SAAS,SAAS;AAC3B,UAAI,CAAC,qBAAqB,IAAI,KAAK,GAAG;AACpC,kBAAU;AACV;AAAA,MACF;AAAA,IACF;AACA,QAAI,SAAS;AAGX,aAAO,EAAE,MAAM,KAAK;AAAA,IACtB;AAAA,EACF;AAGA,QAAM,gBAAgB,CAAC,GAAG,oBAAoB;AAC9C,MAAI,cAAc,WAAW,KAAK,gBAAgB;AAChD,WAAO;AAAA,MACL,MAAM;AAAA,MACN,QACE;AAAA,IAGJ;AAAA,EACF;AAEA,MAAI,gBAAgB;AAClB,WAAO;AAAA,MACL,MAAM;AAAA,MACN,QACE,qBAAqB,cAAc,KAAK,IAAI,CAAC;AAAA,IAIjD;AAAA,EACF;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,QACE,qBAAqB,cAAc,KAAK,IAAI,CAAC;AAAA,EAIjD;AACF;;;ACrFA,SAASC,yBACP,OACA,KACA,MACgB;AAChB,SAAO,EAAE,OAAO,MAAM,KAAK,SAAS,eAAe,OAAO,OAAO,MAAM,KAAK,KAAK;AACnF;AAEA,SAASC,yBACP,OACA,MACA,SACA,OACA,MACA,MACgB;AAChB,SAAO,EAAE,OAAO,MAAM,SAAS,OAAO,MAAM,KAAK;AACnD;AAEO,IAAM,uBAAN,MAAuD;AAAA,EAC5D,YACmB,SACA,UACA,iBAAiC,SACjC,YAAoB,IACpB,kBAAsC,OACvD;AALiB;AACA;AACA;AACA;AACA;AAAA,EAChB;AAAA,EAEH,MAAM,QAAQ,KAAgD;AAC5D,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK;AAC9C,QAAI,CAAC,UAAU,CAAC,KAAK,SAAU,QAAO;AACtC,UAAM,SAAS,MAAM,cAAc,QAAQ,KAAK,UAAU,KAAK,eAAe;AAC9E,QAAI,OAAO,YAAY,OAAO,cAAc,OAAO;AACjD,YAAM,KAAK,QAAQ,UAAU,OAAO;AAAA,QAClC,aAAa,OAAO,OAAO;AAAA,QAC3B,GAAG,OAAO,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AACA,WAAO,OAAO;AAAA,EAChB;AAAA,EAEA,MAAM,QAAQ,MAAc,SAAiB,MAAiD;AAC5F,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK;AAC9C,QAAI,CAAC,UAAU,CAAC,KAAK,SAAU,QAAO;AACtC,UAAM,SAAS,MAAM,cAAc,QAAQ,KAAK,UAAU,KAAK,eAAe;AAC9E,QAAI,OAAO,YAAY,OAAO,cAAc,OAAO;AACjD,YAAM,KAAK,QAAQ,UAAU,OAAO;AAAA,QAClC,aAAa,OAAO,OAAO;AAAA,QAC3B,GAAG,OAAO,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AACA,WAAO,OAAO;AAAA,EAChB;AAAA,EAEA,MAAM,WAAW,MAAc,SAAiB,MAAgC;AAC9E,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK;AAC9C,WAAO,WAAW;AAAA,EACpB;AAAA,EAEQ,iBAAiB,SAAwB,qBAAqC;AACpF,QAAI,uBAAuB,KAAK,mBAAmB,MAAO;AAE1D,UAAM,SAAS,mBAAmB,OAAO;AACzC,QAAI,OAAO,KAAM;AAEjB,QAAI,KAAK,mBAAmB,SAAS;AACnC,YAAM,IAAI,iBAAiB,OAAO,MAAO;AAAA,IAC3C;AAEA,YAAQ,KAAK,qCAAqC,OAAO,MAAM,EAAE;AAAA,EACnE;AAAA,EAEA,MAAM,UAAU,QAAuD;AACrE,UAAM,OAAO,mBAAmB,MAAM;AACtC,QAAI;AACJ,QAAI,KAAK,aAAa,OAAO;AAC3B,YAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK,KAAK;AACnD,gBAAU,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,IACjC,OAAO;AACL,WAAK,iBAAiB,KAAK,SAAS,OAAO,mBAAmB;AAC9D,gBAAU,MAAM,KAAK,QAAQ,MAAM,KAAK,SAAS,KAAK,OAAO;AAAA,IAC/D;AACA,WAAO,KAAK,gBAAgB,OAAO;AAAA,EACrC;AAAA,EAEA,MAAM,UAAU,QAAuD;AACrE,UAAM,OAAO,mBAAmB,MAAM;AACtC,QAAI;AACJ,QAAI,KAAK,aAAa,OAAO;AAC3B,YAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK,KAAK;AACnD,gBAAU,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,IACjC,OAAO;AACL,WAAK,iBAAiB,KAAK,SAAS,OAAO,mBAAmB;AAC9D,gBAAU,MAAM,KAAK,QAAQ,MAAM,KAAK,SAAS,KAAK,OAAO;AAAA,IAC/D;AACA,WAAO,KAAK,gBAAgB,OAAO;AAAA,EACrC;AAAA,EAEA,MAAc,gBAAgB,SAA4D;AACxF,QAAI,CAAC,KAAK,YAAY,QAAQ,WAAW,EAAG,QAAO;AACnD,UAAM,UAAU,MAAM,eAAe,SAAS,KAAK,UAAU,KAAK,eAAe;AACjF,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,YAAY,OAAO,cAAc,OAAO;AACjD,cAAM,QACJ,OAAO,OAAO,YAAY,gBACtB,iBAAiB,OAAO,OAAO,IAAI,IACnC,iBAAiB,OAAO,OAAO,MAAM,OAAO,OAAO,SAAS,OAAO,OAAO,IAAI;AACpF,cAAM,KAAK,QAAQ,UAAU,OAAO;AAAA,UAClC,aAAa,OAAO,OAAO;AAAA,UAC3B,GAAG,OAAO,OAAO;AAAA,QACnB,CAAC;AAAA,MACH;AAAA,IACF;AACA,WAAO,QAAQ,IAAI,CAAC,MAAM,EAAE,MAAM;AAAA,EACpC;AAAA,EAEA,MAAM,QAAQ,OAAe,KAAa,MAA8C;AACtF,UAAM,KAAK,UAAU,OAAO,KAAK,MAAM,OAAO;AAAA,EAChD;AAAA,EAEA,MAAM,QACJ,OACA,MACA,SACA,OACA,MACA,MACe;AACf,UAAM,KAAK,UAAU,OAAO,MAAM,SAAS,OAAO,MAAM,MAAM,OAAO;AAAA,EACvE;AAAA,EAEA,MAAM,YAAY,OAAe,KAAa,MAA8C;AAC1F,UAAM,KAAK,UAAU,OAAO,KAAK,MAAM,SAAS;AAAA,EAClD;AAAA,EAEA,MAAM,YACJ,OACA,MACA,SACA,OACA,MACA,MACe;AACf,UAAM,KAAK,UAAU,OAAO,MAAM,SAAS,OAAO,MAAM,MAAM,SAAS;AAAA,EACzE;AAAA,EAEA,MAAc,UACZ,OACA,KACA,MACA,MACe;AACf,4BAAwB,MAAM,SAAS,YAAY,gBAAgB,SAAS;AAC5E,QAAI,KAAK,UAAU;AACjB,WAAK,SAAS,SAAS,OAAO,eAAe,OAAO,MAAM,KAAK,SAAS;AAAA,IAC1E;AACA,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,SAASD,yBAAwB,OAAO,KAAK,IAAI;AACvD,QAAI,KAAK,UAAU;AACjB,YAAM,QAAQ,KAAK,SAAS,OAAO,OAAO,eAAe,KAAK;AAC9D,UAAI,OAAO,iBAAiB,MAAM,gBAAgB,GAAG;AACnD,eAAO,IAAI,MAAM;AAAA,MACnB;AAAA,IACF;AACA,UAAM,KAAK,QAAQ,OAAO,OAAO,QAAQ,IAAI;AAAA,EAC/C;AAAA,EAEA,MAAc,UACZ,OACA,MACA,SACA,OACA,MACA,MACA,MACe;AACf,4BAAwB,MAAM,SAAS,YAAY,gBAAgB,SAAS;AAC5E,QAAI,KAAK,UAAU;AACjB,WAAK,SAAS,SAAS,OAAO,SAAS,OAAO,MAAM,KAAK,SAAS;AAAA,IACpE;AACA,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,SAASC,yBAAwB,OAAO,MAAM,SAAS,OAAO,MAAM,IAAI;AAC9E,QAAI,KAAK,UAAU;AACjB,YAAM,QAAQ,KAAK,SAAS,OAAO,OAAO,SAAS,KAAK;AACxD,UAAI,OAAO,iBAAiB,MAAM,gBAAgB,GAAG;AACnD,eAAO,IAAI,MAAM;AAAA,MACnB;AAAA,IACF;AACA,UAAM,KAAK,QAAQ,OAAO,OAAO,QAAQ,IAAI;AAAA,EAC/C;AAAA,EAEA,MAAM,WAAW,KAAa,MAA8C;AAC1E,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,KAAK,QAAQ,UAAU,OAAO,EAAE,SAAS,aAAa,IAAI,EAAE,CAAC;AAAA,EACrE;AAAA,EAEA,MAAM,WACJ,MACA,SACA,MACA,MACe;AACf,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,KAAK,QAAQ,UAAU,OAAO,EAAE,SAAS,aAAa,IAAI,EAAE,CAAC;AAAA,EACrE;AAAA,EAEA,MAAM,WAAW,KAA4B;AAC3C,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,KAAK,QAAQ,UAAU,KAAK;AAAA,EACpC;AAAA,EAEA,MAAM,WAAW,MAAc,SAAiB,MAA6B;AAC3E,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,KAAK,QAAQ,UAAU,KAAK;AAAA,EACpC;AACF;;;AChMA,IAAM,sBAAsB,oBAAI,IAAI,CAAC,gBAAgB,cAAc,CAAC;AAEpE,SAASC,yBACP,OACA,KACA,MACgB;AAChB,SAAO,EAAE,OAAO,MAAM,KAAK,SAAS,eAAe,OAAO,OAAO,MAAM,KAAK,KAAK;AACnF;AAEA,SAASC,yBACP,OACA,MACA,SACA,OACA,MACA,MACgB;AAChB,SAAO,EAAE,OAAO,MAAM,SAAS,OAAO,MAAM,KAAK;AACnD;AAEO,IAAM,kBAAN,MAAM,iBAA8C;AAAA,EAgBzD,YACmB,SACjB,SAEA,aACA;AAJiB;AAKjB,SAAK,kBAAkB,SAAS,sBAAsB;AACtD,SAAK,mBAAmB,SAAS;AAEjC,QAAI,SAAS,cAAc;AACzB,WAAK,gBAAgB,QAAQ;AAC7B,WAAK,oBAAoB,wBAAwB;AACjD,UAAI,QAAQ,UAAU;AACpB,aAAK,iBAAiB,QAAQ;AAAA,MAChC;AACA,WAAK,cAAc;AAAA,IACrB,OAAO;AACL,WAAK,iBAAiB,SAAS;AAAA,IACjC;AAEA,SAAK,iBAAiB,SAAS,kBAAkB;AAAA,EACnD;AAAA,EApCS;AAAA;AAAA,EAGQ;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACT;AAAA,EACS;AAAA;AAAA,EAGA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8BjB,aAA6B;AAC3B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,sBAAiD;AAC/C,WAAO,KAAK,oBAAoB;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAMQ,mBAAmB,OAA0C;AACnE,QAAI,CAAC,KAAK,cAAe,QAAO,KAAK;AAErC,QAAI,UAAU,kBAAkB,UAAU,gBAAgB;AACxD,aAAO,KAAK;AAAA,IACd;AAEA,WAAO,KAAK,mBAAmB,KAAK,kBAAkB,KAAK;AAAA,EAC7D;AAAA,EAEQ,kBAAkB,OAA+B;AACvD,QAAI,KAAK,gBAAgB,UAAU,kBAAkB,UAAU,iBAAiB;AAC9E,aAAO,KAAK;AAAA,IACd;AACA,WAAO,KAAK;AAAA,EACd;AAAA,EAEQ,sBAAiD;AACvD,QAAI,CAAC,KAAK,cAAe,QAAO,KAAK;AACrC,WAAO,KAAK,mBAAmB,KAAK,kBAAkB,KAAK;AAAA,EAC7D;AAAA;AAAA;AAAA;AAAA,EAMQ,iBAAiB,SAAwB,qBAAqC;AACpF,QAAI,uBAAuB,KAAK,mBAAmB,MAAO;AAE1D,UAAM,SAAS,mBAAmB,OAAO;AACzC,QAAI,OAAO,KAAM;AAEjB,QAAI,KAAK,mBAAmB,SAAS;AACnC,YAAM,IAAI,iBAAiB,OAAO,MAAO;AAAA,IAC3C;AAEA,YAAQ,KAAK,qCAAqC,OAAO,MAAM,EAAE;AAAA,EACnE;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,eACZ,QACA,OAC4B;AAC5B,UAAM,WAAW,KAAK,oBAAoB;AAC1C,QAAI,CAAC,SAAU,QAAO;AAEtB,UAAM,SAAS,MAAM,cAAc,QAAQ,UAAU,KAAK,eAAe;AACzE,QAAI,OAAO,UAAU;AACnB,WAAK,gBAAgB,QAAQ,KAAK;AAAA,IACpC;AACA,WAAO,OAAO;AAAA,EAChB;AAAA,EAEA,MAAc,gBAAgB,SAA4D;AACxF,UAAM,WAAW,KAAK,oBAAoB;AAC1C,QAAI,CAAC,YAAY,QAAQ,WAAW,EAAG,QAAO;AAE9C,UAAM,UAAU,MAAM,eAAe,SAAS,UAAU,KAAK,eAAe;AAC5E,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,UAAU;AACnB,cAAM,QACJ,OAAO,OAAO,YAAY,gBACtB,iBAAiB,OAAO,OAAO,IAAI,IACnC,iBAAiB,OAAO,OAAO,MAAM,OAAO,OAAO,SAAS,OAAO,OAAO,IAAI;AACpF,aAAK,gBAAgB,QAAQ,KAAK;AAAA,MACpC;AAAA,IACF;AACA,WAAO,QAAQ,IAAI,CAAC,MAAM,EAAE,MAAM;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,gBAAgB,QAAyB,OAAqB;AACpE,QAAI,OAAO,cAAc,MAAO;AAEhC,UAAM,cAAc,YAAY;AAC9B,UAAI;AACF,cAAM,KAAK,QAAQ,UAAU,OAAO;AAAA,UAClC,aAAa,OAAO,OAAO;AAAA,UAC3B,GAAG,OAAO,OAAO;AAAA,QACnB,CAAC;AAAA,MACH,SAAS,KAAc;AACrB,cAAM,MAAM,+CAA+C,KAAK,KAAM,IAAc,OAAO;AAC3F,YAAI,OAAO,cAAc,SAAS;AAChC,kBAAQ,MAAM,GAAG;AAAA,QACnB,OAAO;AACL,kBAAQ,KAAK,GAAG;AAAA,QAClB;AAAA,MACF;AAAA,IACF;AAEA,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,QAAQ,KAAgD;AAC5D,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK;AAC9C,QAAI,CAAC,OAAQ,QAAO;AACpB,WAAO,KAAK,eAAe,QAAQ,KAAK;AAAA,EAC1C;AAAA,EAEA,MAAM,QAAQ,MAAc,SAAiB,MAAiD;AAC5F,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK;AAC9C,QAAI,CAAC,OAAQ,QAAO;AACpB,WAAO,KAAK,eAAe,QAAQ,KAAK;AAAA,EAC1C;AAAA,EAEA,MAAM,WAAW,MAAc,SAAiB,MAAgC;AAC9E,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK;AAC9C,WAAO,WAAW;AAAA,EACpB;AAAA,EAEA,MAAM,UAAU,QAAuD;AACrE,UAAM,OAAO,mBAAmB,MAAM;AACtC,QAAI;AACJ,QAAI,KAAK,aAAa,OAAO;AAC3B,YAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK,KAAK;AACnD,gBAAU,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,IACjC,OAAO;AACL,WAAK,iBAAiB,KAAK,SAAS,OAAO,mBAAmB;AAC9D,gBAAU,MAAM,KAAK,QAAQ,MAAM,KAAK,SAAS,KAAK,OAAO;AAAA,IAC/D;AACA,WAAO,KAAK,gBAAgB,OAAO;AAAA,EACrC;AAAA,EAEA,MAAM,UAAU,QAAuD;AACrE,UAAM,OAAO,mBAAmB,MAAM;AACtC,QAAI;AACJ,QAAI,KAAK,aAAa,OAAO;AAC3B,YAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK,KAAK;AACnD,gBAAU,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,IACjC,OAAO;AACL,WAAK,iBAAiB,KAAK,SAAS,OAAO,mBAAmB;AAC9D,gBAAU,MAAM,KAAK,QAAQ,MAAM,KAAK,SAAS,KAAK,OAAO;AAAA,IAC/D;AACA,WAAO,KAAK,gBAAgB,OAAO;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,QAAQ,OAAe,KAAa,MAA8C;AACtF,UAAM,KAAK,UAAU,OAAO,KAAK,MAAM,OAAO;AAAA,EAChD;AAAA,EAEA,MAAM,QACJ,OACA,MACA,SACA,OACA,MACA,MACe;AACf,UAAM,KAAK,UAAU,OAAO,MAAM,SAAS,OAAO,MAAM,MAAM,OAAO;AAAA,EACvE;AAAA,EAEA,MAAM,YAAY,OAAe,KAAa,MAA8C;AAC1F,UAAM,KAAK,UAAU,OAAO,KAAK,MAAM,SAAS;AAAA,EAClD;AAAA,EAEA,MAAM,YACJ,OACA,MACA,SACA,OACA,MACA,MACe;AACf,UAAM,KAAK,UAAU,OAAO,MAAM,SAAS,OAAO,MAAM,MAAM,SAAS;AAAA,EACzE;AAAA,EAEA,MAAc,UACZ,OACA,KACA,MACA,MACe;AACf,4BAAwB,MAAM,SAAS,YAAY,gBAAgB,SAAS;AAC5E,UAAM,WAAW,KAAK,mBAAmB,KAAK;AAC9C,QAAI,UAAU;AACZ,eAAS,SAAS,OAAO,eAAe,OAAO,MAAM,KAAK,QAAQ,SAAS;AAAA,IAC7E;AACA,UAAM,UAAU,KAAK,kBAAkB,KAAK;AAC5C,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,SAASD,yBAAwB,OAAO,KAAK,IAAI;AACvD,QAAI,UAAU;AACZ,YAAM,QAAQ,SAAS,OAAO,OAAO,eAAe,KAAK;AACzD,UAAI,OAAO,iBAAiB,MAAM,gBAAgB,GAAG;AACnD,eAAO,IAAI,MAAM;AAAA,MACnB;AAAA,IACF;AACA,UAAM,QAAQ,OAAO,OAAO,QAAQ,IAAI;AAAA,EAC1C;AAAA,EAEA,MAAc,UACZ,OACA,MACA,SACA,OACA,MACA,MACA,MACe;AACf,4BAAwB,MAAM,SAAS,YAAY,gBAAgB,SAAS;AAC5E,UAAM,WAAW,KAAK,mBAAmB,KAAK;AAC9C,QAAI,UAAU;AACZ,eAAS,SAAS,OAAO,SAAS,OAAO,MAAM,KAAK,QAAQ,SAAS;AAAA,IACvE;AACA,UAAM,UAAU,KAAK,kBAAkB,KAAK;AAC5C,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,SAASC,yBAAwB,OAAO,MAAM,SAAS,OAAO,MAAM,IAAI;AAC9E,QAAI,UAAU;AACZ,YAAM,QAAQ,SAAS,OAAO,OAAO,SAAS,KAAK;AACnD,UAAI,OAAO,iBAAiB,MAAM,gBAAgB,GAAG;AACnD,eAAO,IAAI,MAAM;AAAA,MACnB;AAAA,IACF;AACA,UAAM,QAAQ,OAAO,OAAO,QAAQ,IAAI;AAAA,EAC1C;AAAA,EAEA,MAAM,WAAW,KAAa,MAA8C;AAC1E,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,KAAK,QAAQ,UAAU,OAAO,EAAE,SAAS,aAAa,IAAI,EAAE,CAAC;AAAA,EACrE;AAAA,EAEA,MAAM,WACJ,MACA,SACA,MACA,MACe;AACf,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,KAAK,QAAQ,UAAU,OAAO,EAAE,SAAS,aAAa,IAAI,EAAE,CAAC;AAAA,EACrE;AAAA,EAEA,MAAM,WAAW,KAA4B;AAC3C,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,KAAK,QAAQ,UAAU,KAAK;AAAA,EACpC;AAAA,EAEA,MAAM,WAAW,MAAc,SAAiB,MAA6B;AAC3E,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,KAAK,QAAQ,UAAU,KAAK;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,eAAkB,IAAsD;AAC5E,WAAO,KAAK,QAAQ,eAAe,OAAO,cAAc;AACtD,YAAM,UAAU,IAAI;AAAA,QAClB;AAAA,QACA,KAAK,oBAAoB;AAAA,QACzB,KAAK;AAAA,QACL,KAAK,QAAQ;AAAA,QACb,KAAK;AAAA,MACP;AACA,aAAO,GAAG,OAAO;AAAA,IACnB,CAAC;AAAA,EACH;AAAA,EAEA,QAAoB;AAClB,WAAO,IAAI;AAAA,MACT,KAAK,QAAQ,YAAY;AAAA,MACzB,KAAK,oBAAoB;AAAA,MACzB,KAAK,QAAQ;AAAA,IACf;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,SAAS,eAAuB,OAAe,SAAsB;AACnE,QAAI,CAAC,iBAAiB,cAAc,SAAS,GAAG,GAAG;AACjD,YAAM,IAAI;AAAA,QACR,wCAAwC,aAAa;AAAA,QAErD;AAAA,MACF;AAAA,IACF;AACA,QAAI,KAAK,SAAS,GAAG,GAAG;AACtB,YAAM,IAAI;AAAA,QACR,4CAA4C,IAAI;AAAA,QAEhD;AAAA,MACF;AAAA,IACF;AAEA,UAAM,eAAe,KAAK,QAAQ,SAAS,eAAe,IAAI;AAE9D,WAAO,IAAI;AAAA,MACT;AAAA,MACA;AAAA,QACE,UAAU,KAAK,oBAAoB;AAAA,QACnC,gBAAgB,KAAK;AAAA,QACrB,oBAAoB,KAAK;AAAA,QACzB,kBAAkB,KAAK;AAAA,MACzB;AAAA;AAAA,IAEF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,gBACJ,QACA,gBAC8B;AAC9B,QAAI,CAAC,KAAK,QAAQ,iBAAiB;AACjC,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,MACF;AAAA,IACF;AACA,UAAM,OAAO,mBAAmB,MAAM;AACtC,QAAI,KAAK,aAAa,OAAO;AAC3B,YAAM,IAAI;AAAA,QACR;AAAA,QAEA;AAAA,MACF;AAAA,IACF;AACA,SAAK,iBAAiB,KAAK,SAAS,OAAO,mBAAmB;AAC9D,UAAM,UAAU,MAAM,KAAK,QAAQ,gBAAgB,QAAQ,cAAc;AACzE,WAAO,KAAK,gBAAgB,OAAO;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,kBAAkB,KAAa,SAA+C;AAClF,WAAO,KAAK,QAAQ,kBAAkB,KAAK,MAAM,OAAO;AAAA,EAC1D;AAAA,EAEA,MAAM,gBAAgB,QAAyB,SAA4C;AACzF,WAAO,KAAK,QAAQ,gBAAgB,QAAQ,MAAM,OAAO;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,eACJ,MACA,YACA,aACA,SACe;AACf,QAAI,CAAC,KAAK,eAAe;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AAEA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,YAAM,IAAI;AAAA,QACR,uBAAuB,IAAI;AAAA,MAC7B;AAAA,IACF;AAEA,QAAI,KAAK,gBAAgB,OAAO,MAAM,eAAe,IAAI,GAAG;AAC1D,YAAM,IAAI;AAAA,QACR,4BAA4B,IAAI;AAAA,MAClC;AAAA,IACF;AAEA,UAAM,MAAM,yBAAyB,gBAAgB,IAAI;AACzD,UAAM,OAAgC,EAAE,MAAM,WAAW;AACzD,QAAI,gBAAgB,OAAW,MAAK,cAAc;AAClD,QAAI,SAAS,eAAe,OAAW,MAAK,aAAa,QAAQ;AACjE,QAAI,SAAS,kBAAkB,OAAW,MAAK,gBAAgB,QAAQ;AACvE,QAAI,SAAS,iBAAiB,OAAW,MAAK,eAAe,QAAQ;AACrE,QAAI,SAAS,YAAY,OAAW,MAAK,UAAU,QAAQ;AAC3D,QAAI,SAAS,cAAc,OAAW,MAAK,YAAY,QAAQ;AAC/D,QAAI,SAAS,uBAAuB;AAClC,WAAK,qBAAqB,QAAQ;AACpC,QAAI,SAAS,eAAe,QAAW;AACrC,WAAK,aAAa,MAAM,KAAK,oBAAoB,QAAQ,UAAU;AAAA,IACrE;AAEA,UAAM,KAAK,QAAQ,gBAAgB,KAAK,IAAI;AAAA,EAC9C;AAAA,EAEA,MAAM,eACJ,MACA,UACA,YACA,aACA,SACe;AACf,QAAI,CAAC,KAAK,eAAe;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AAEA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,YAAM,IAAI;AAAA,QACR,uBAAuB,IAAI;AAAA,MAC7B;AAAA,IACF;AAEA,QAAI,KAAK,gBAAgB;AACvB,YAAM,YAAY,MAAM,QAAQ,SAAS,IAAI,IAAI,SAAS,OAAO,CAAC,SAAS,IAAI;AAC/E,YAAM,UAAU,MAAM,QAAQ,SAAS,EAAE,IAAI,SAAS,KAAK,CAAC,SAAS,EAAE;AACvE,iBAAW,SAAS,WAAW;AAC7B,mBAAW,SAAS,SAAS;AAC3B,cAAI,KAAK,eAAe,OAAO,OAAO,MAAM,KAAK,GAAG;AAClD,kBAAM,IAAI;AAAA,cACR,4BAA4B,IAAI,UAAU,KAAK,SAAS,KAAK;AAAA,YAC/D;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,UAAM,MAAM,yBAAyB,gBAAgB,IAAI;AACzD,UAAM,OAAgC;AAAA,MACpC;AAAA,MACA,MAAM,SAAS;AAAA,MACf,IAAI,SAAS;AAAA,IACf;AACA,QAAI,eAAe,OAAW,MAAK,aAAa;AAChD,QAAI,SAAS,iBAAiB,OAAW,MAAK,eAAe,SAAS;AACtE,QAAI,SAAS,gBAAgB,OAAW,MAAK,cAAc,SAAS;AACpE,QAAI,gBAAgB,OAAW,MAAK,cAAc;AAClD,QAAI,SAAS,eAAe,OAAW,MAAK,aAAa,QAAQ;AACjE,QAAI,SAAS,kBAAkB,OAAW,MAAK,gBAAgB,QAAQ;AACvE,QAAI,SAAS,iBAAiB,OAAW,MAAK,eAAe,QAAQ;AACrE,QAAI,SAAS,YAAY,OAAW,MAAK,UAAU,QAAQ;AAC3D,QAAI,SAAS,cAAc,OAAW,MAAK,YAAY,QAAQ;AAC/D,QAAI,SAAS,uBAAuB;AAClC,WAAK,qBAAqB,QAAQ;AACpC,QAAI,SAAS,eAAe,QAAW;AACrC,WAAK,aAAa,MAAM,KAAK,oBAAoB,QAAQ,UAAU;AAAA,IACrE;AAEA,UAAM,KAAK,QAAQ,gBAAgB,KAAK,IAAI;AAAA,EAC9C;AAAA,EAEA,MAAM,iBAAgC;AACpC,QAAI,CAAC,KAAK,eAAe;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,iBAAiB;AACrC,UAAM,cAAc,MAAM,wBAAwB,QAAQ,KAAK,gBAAgB;AAE/E,QAAI,KAAK,gBAAgB;AACvB,WAAK,kBAAkB,qBAAqB,KAAK,gBAAgB,WAAW;AAAA,IAC9E,OAAO;AACL,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AAAA,EAEA,MAAc,oBACZ,YACwE;AACxE,UAAM,SAAS,WAAW,IAAI,CAAC,MAAM;AACnC,YAAM,SAAS,OAAO,EAAE,OAAO,aAAa,EAAE,GAAG,SAAS,IAAI,EAAE;AAChE,aAAO,EAAE,aAAa,EAAE,aAAa,WAAW,EAAE,WAAW,IAAI,OAAO;AAAA,IAC1E,CAAC;AACD,UAAM,QAAQ,IAAI,OAAO,IAAI,CAAC,MAAM,iBAAiB,EAAE,IAAI,KAAK,gBAAgB,CAAC,CAAC;AAClF,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,mBAAgC;AACtC,QAAI,CAAC,KAAK,YAAa,QAAO;AAE9B,UAAM,UAAU,KAAK;AAErB,UAAM,mBAAmB,CACvB,SACA,YACiC,QAAQ,MAAM,SAAS,OAAO;AAEjE,WAAO;AAAA,MACL,MAAM,QAAQ,KAAgD;AAC5D,eAAO,QAAQ,OAAO,iBAAiB,GAAG,CAAC;AAAA,MAC7C;AAAA,MACA,MAAM,QACJ,MACA,SACA,MACmC;AACnC,eAAO,QAAQ,OAAO,iBAAiB,MAAM,SAAS,IAAI,CAAC;AAAA,MAC7D;AAAA,MACA,MAAM,WAAW,MAAc,SAAiB,MAAgC;AAC9E,cAAM,SAAS,MAAM,QAAQ,OAAO,iBAAiB,MAAM,SAAS,IAAI,CAAC;AACzE,eAAO,WAAW;AAAA,MACpB;AAAA,MACA,MAAM,UAAU,QAAuD;AACrE,cAAM,OAAO,mBAAmB,MAAM;AACtC,YAAI,KAAK,aAAa,OAAO;AAC3B,gBAAM,SAAS,MAAM,QAAQ,OAAO,KAAK,KAAK;AAC9C,iBAAO,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,QAC9B;AACA,eAAO,iBAAiB,KAAK,SAAS,KAAK,OAAO;AAAA,MACpD;AAAA,MACA,MAAM,UAAU,QAAuD;AACrE,cAAM,OAAO,mBAAmB,MAAM;AACtC,YAAI,KAAK,aAAa,OAAO;AAC3B,gBAAM,SAAS,MAAM,QAAQ,OAAO,KAAK,KAAK;AAC9C,iBAAO,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,QAC9B;AACA,eAAO,iBAAiB,KAAK,SAAS,KAAK,OAAO;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AACF;AAUO,SAAS,6BACd,SACA,SACA,aACkC;AAClC,SAAO,IAAI,gBAAgB,SAAS,SAAS,WAAW;AAC1D;;;AClqBA,SAAS,WAAW,GAAmB;AACrC,SAAO,EAAE,QAAQ,+BAA+B,CAAC,GAAG,MAAM,OAAO,GAAG,YAAY,CAAC;AACnF;AAWA,eAAsB,cACpB,WACA,UAA0B,CAAC,GACV;AAEjB,QAAM,EAAE,QAAQ,IAAI,MAAM,OAAO,2BAA2B;AAE5D,QAAM,EAAE,SAAS,KAAK,IAAI;AAC1B,QAAM,SAAmB,CAAC;AAE1B,MAAI,QAAQ;AACV,WAAO,KAAK,sEAAiE;AAAA,EAC/E;AAGA,QAAM,cAAc,CAAC,GAAG,UAAU,MAAM,QAAQ,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC;AACxF,QAAM,cAAc,CAAC,GAAG,UAAU,MAAM,QAAQ,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC;AAExF,aAAW,CAAC,MAAM,MAAM,KAAK,aAAa;AACxC,UAAM,WAAW,GAAG,WAAW,IAAI,CAAC;AACpC,UAAM,KAAK,MAAM,QAAQ,OAAO,QAAe,UAAU;AAAA,MACvD,eAAe;AAAA,MACf,sBAAsB;AAAA,IACxB,CAAC;AACD,WAAO,KAAK,GAAG,KAAK,CAAC;AACrB,WAAO,KAAK,EAAE;AAAA,EAChB;AAEA,aAAW,CAAC,MAAM,MAAM,KAAK,aAAa;AACxC,UAAM,WAAW,GAAG,WAAW,IAAI,CAAC;AACpC,UAAM,KAAK,MAAM,QAAQ,OAAO,QAAe,UAAU;AAAA,MACvD,eAAe;AAAA,MACf,sBAAsB;AAAA,IACxB,CAAC;AACD,WAAO,KAAK,GAAG,KAAK,CAAC;AACrB,WAAO,KAAK,EAAE;AAAA,EAChB;AAEA,SAAO,OAAO,KAAK,IAAI,EAAE,QAAQ,IAAI;AACvC;;;ACmDO,SAAS,aAAa,QAA0C;AACrE,SAAO;AACT;AAeO,SAAS,YACd,gBACA,oBACA,SACQ;AACR,MAAI,CAAC,eAAgB,QAAO;AAE5B,QAAM,YAAY,IAAI,IAAI,kBAAkB;AAE5C,MAAI,SAAS;AACX,UAAM,iBAAiB,eAAe,OAAO;AAC7C,QAAI,kBAAkB,UAAU,IAAI,cAAc,GAAG;AACnD,aAAO;AAAA,IACT;AAAA,EACF;AAEA,MAAI,eAAe,WAAW,UAAU,IAAI,eAAe,OAAO,GAAG;AACnE,WAAO,eAAe;AAAA,EACxB;AAEA,SAAO;AACT;;;ACtHO,SAAS,0BAA0B,gBAAwB,KAA4B;AAC5F,QAAM,WAAW,eAAe,MAAM,GAAG;AAGzC,WAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK,GAAG;AAC3C,QAAI,SAAS,CAAC,MAAM,KAAK;AAEvB,aAAO,SAAS,MAAM,GAAG,CAAC,EAAE,KAAK,GAAG;AAAA,IACtC;AAAA,EACF;AAEA,SAAO;AACT;AASO,SAAS,cAAc,gBAAwB,KAAsB;AAC1E,SAAO,0BAA0B,gBAAgB,GAAG,MAAM;AAC5D;;;ACZO,IAAM,uBAAiD,OAAO,OAAO;AAAA,EAC1E,EAAE,QAAQ,CAAC,MAAM,EAAE;AAAA,EACnB,EAAE,QAAQ,CAAC,MAAM,EAAE;AAAA,EACnB,EAAE,QAAQ,CAAC,OAAO,EAAE;AAAA,EACpB,EAAE,QAAQ,CAAC,OAAO,EAAE;AAAA,EACpB,EAAE,QAAQ,CAAC,QAAQ,SAAS,EAAE;AAAA,EAC9B,EAAE,QAAQ,CAAC,WAAW,MAAM,EAAE;AAAA,EAC9B,EAAE,QAAQ,CAAC,SAAS,SAAS,EAAE;AAAA,EAC/B,EAAE,QAAQ,CAAC,WAAW,OAAO,EAAE;AACjC,CAAC;;;AC1BD,qBAAgE;AAChE,yBAA8B;AAC9B,uBAA8B;AAjC9B,IAAAC,eAAA;AAgDO,IAAM,iBAAN,cAA6B,eAAe;AAAA,EACjD,YAAY,SAAiB;AAC3B,UAAM,SAAS,iBAAiB;AAChC,SAAK,OAAO;AAAA,EACd;AACF;AAMA,SAAS,SAAS,UAA2B;AAC3C,MAAI;AACF,UAAM,UAAM,6BAAa,UAAU,OAAO;AAC1C,WAAO,KAAK,MAAM,GAAG;AAAA,EACvB,SAAS,KAAc;AACrB,UAAM,MACJ,eAAe,cACX,mBAAmB,QAAQ,KAAK,IAAI,OAAO,KAC3C,eAAe,QAAQ,KAAM,IAAc,OAAO;AACxD,UAAM,IAAI,eAAe,GAAG;AAAA,EAC9B;AACF;AAEA,SAAS,iBAAiB,UAAuC;AAC/D,MAAI,KAAC,2BAAW,QAAQ,EAAG,QAAO;AAClC,SAAO,SAAS,QAAQ;AAC1B;AAMA,IAAM,2BAA2B,CAAC,OAAO,OAAO,QAAQ,MAAM;AAM9D,SAAS,WAAW,KAAa,aAA6B;AAE5D,aAAW,OAAO,0BAA0B;AAC1C,UAAM,gBAAY,uBAAK,KAAK,SAAS,GAAG,EAAE;AAC1C,YAAI,2BAAW,SAAS,GAAG;AACzB,aAAO,iBAAiB,WAAW,WAAW;AAAA,IAChD;AAAA,EACF;AAGA,QAAM,eAAW,uBAAK,KAAK,aAAa;AACxC,UAAI,2BAAW,QAAQ,GAAG;AACxB,WAAO,SAAS,QAAQ;AAAA,EAC1B;AAEA,QAAM,IAAI;AAAA,IACR,sBAAsB,WAAW,OAAO,GAAG;AAAA,EAE7C;AACF;AAEA,IAAI;AAEJ,SAAS,UAAmC;AAC1C,MAAI,CAAC,OAAO;AACV,UAAM,OAAO,OAAO,eAAe,cAAc,aAAaA,aAAY;AAC1E,UAAM,iBAAa,kCAAc,IAAI;AACrC,UAAM,EAAE,WAAW,IAAI,WAAW,MAAM;AACxC,YAAQ,WAAW,MAAM,EAAE,gBAAgB,KAAK,CAAC;AAAA,EACnD;AACA,SAAO;AACT;AAEA,SAAS,iBAAiB,UAAkB,aAA6B;AACvE,MAAI;AACF,UAAM,OAAO,QAAQ;AACrB,UAAM,MAAM,KAAK,QAAQ;AACzB,UAAM,SACJ,OAAO,OAAO,QAAQ,YAAY,aAAa,MAC1C,IAA6B,UAC9B;AAEN,QAAI,CAAC,UAAU,OAAO,WAAW,UAAU;AACzC,YAAM,IAAI;AAAA,QACR,eAAe,QAAQ,QAAQ,WAAW;AAAA,MAC5C;AAAA,IACF;AACA,WAAO;AAAA,EACT,SAAS,KAAc;AACrB,QAAI,eAAe,eAAgB,OAAM;AACzC,UAAM,IAAI;AAAA,MACR,gCAAgC,QAAQ,QAAQ,WAAW,KAAM,IAAc,OAAO;AAAA,IACxF;AAAA,EACF;AACF;AAMA,IAAM,kBAAkB,CAAC,OAAO,OAAO,QAAQ,MAAM;AAErD,SAAS,cAAc,KAAiC;AACtD,aAAW,OAAO,iBAAiB;AACjC,UAAM,gBAAY,uBAAK,KAAK,QAAQ,GAAG,EAAE;AACzC,YAAI,2BAAW,SAAS,EAAG,QAAO;AAAA,EACpC;AACA,SAAO;AACT;AAMA,IAAM,uBAAuB,CAAC,OAAO,OAAO,QAAQ,MAAM;AAE1D,SAAS,mBAAmB,KAAiC;AAC3D,aAAW,OAAO,sBAAsB;AACtC,UAAM,gBAAY,uBAAK,KAAK,aAAa,GAAG,EAAE;AAC9C,YAAI,2BAAW,SAAS,EAAG,QAAO;AAAA,EACpC;AACA,SAAO;AACT;AAEA,SAAS,eAAe,UAAkB,aAAsC;AAC9E,MAAI;AACF,UAAM,OAAO,QAAQ;AACrB,UAAM,MAAM,KAAK,QAAQ;AACzB,UAAM,aACJ,OAAO,OAAO,QAAQ,YAAY,aAAa,MAC1C,IAA6B,UAC9B;AAEN,QAAI,CAAC,MAAM,QAAQ,UAAU,GAAG;AAC9B,YAAM,IAAI;AAAA,QACR,mBAAmB,QAAQ,QAAQ,WAAW;AAAA,MAChD;AAAA,IACF;AACA,WAAO;AAAA,EACT,SAAS,KAAc;AACrB,QAAI,eAAe,eAAgB,OAAM;AACzC,UAAM,IAAI;AAAA,MACR,6BAA6B,QAAQ,QAAQ,WAAW,KAAM,IAAc,OAAO;AAAA,IACrF;AAAA,EACF;AACF;AAMA,SAAS,eAAe,KAAa,MAAgC;AACnE,QAAM,SAAS,WAAW,KAAK,cAAc,IAAI,GAAG;AACpD,QAAM,OAAO,qBAAiB,uBAAK,KAAK,WAAW,CAAC;AAWpD,QAAM,aAAa,qBAAiB,uBAAK,KAAK,aAAa,CAAC;AAG5D,QAAM,YAAY,cAAc,GAAG;AACnC,QAAM,iBAAiB,mBAAmB,GAAG;AAC7C,QAAM,aAAa,iBACf,eAAe,gBAAgB,cAAc,IAAI,GAAG,IACpD;AAEJ,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA,aAAa,MAAM;AAAA,IACnB,YAAY,MAAM;AAAA,IAClB,eAAe,MAAM;AAAA,IACrB,cAAc,MAAM;AAAA,IACpB;AAAA,IACA;AAAA,IACA,WAAW,MAAM;AAAA,IACjB;AAAA,IACA,oBAAoB,MAAM;AAAA,IAC1B,SAAS,MAAM;AAAA,EACjB;AACF;AAEA,SAAS,eAAe,KAAa,MAAgC;AACnE,QAAM,SAAS,WAAW,KAAK,cAAc,IAAI,GAAG;AAEpD,QAAM,eAAW,uBAAK,KAAK,WAAW;AACtC,MAAI,KAAC,2BAAW,QAAQ,GAAG;AACzB,UAAM,IAAI;AAAA,MACR,oCAAoC,IAAI,QAAQ,GAAG;AAAA,IAErD;AAAA,EACF;AACA,QAAM,WAAW,SAAS,QAAQ;AAGlC,MAAI,CAAC,SAAS,MAAM;AAClB,UAAM,IAAI,eAAe,kBAAkB,IAAI,oCAAoC;AAAA,EACrF;AACA,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI,eAAe,kBAAkB,IAAI,kCAAkC;AAAA,EACnF;AAEA,QAAM,OAAO,qBAAiB,uBAAK,KAAK,WAAW,CAAC;AAYpD,QAAM,aAAa,qBAAiB,uBAAK,KAAK,aAAa,CAAC;AAG5D,QAAM,YAAY,cAAc,GAAG;AACnC,QAAM,iBAAiB,mBAAmB,GAAG;AAC7C,QAAM,aAAa,iBACf,eAAe,gBAAgB,cAAc,IAAI,GAAG,IACpD;AAEJ,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA;AAAA,IACA,aAAa,MAAM;AAAA,IACnB,YAAY,MAAM;AAAA,IAClB,eAAe,MAAM;AAAA,IACrB,cAAc,MAAM;AAAA,IACpB;AAAA,IACA;AAAA,IACA,WAAW,MAAM;AAAA,IACjB,aACE,SAAS,eAAgB,MAA+C;AAAA,IAC1E;AAAA,IACA,oBAAoB,MAAM;AAAA,IAC1B,SAAS,MAAM;AAAA,EACjB;AACF;AAMA,SAAS,kBAAkB,KAAuB;AAChD,MAAI,KAAC,2BAAW,GAAG,EAAG,QAAO,CAAC;AAC9B,aAAO,4BAAY,KAAK,EAAE,eAAe,KAAK,CAAC,EAC5C,OAAO,CAAC,MAAM,EAAE,YAAY,CAAC,EAC7B,IAAI,CAAC,MAAM,EAAE,IAAI;AACtB;AAsBO,SAAS,iBAAiB,aAAqC;AACpE,QAAM,aAAS,0BAAQ,WAAW;AAElC,MAAI,KAAC,2BAAW,MAAM,KAAK,KAAC,yBAAS,MAAM,EAAE,YAAY,GAAG;AAC1D,UAAM,IAAI,eAAe,iCAAiC,WAAW,EAAE;AAAA,EACzE;AAEA,QAAM,QAAQ,oBAAI,IAA8B;AAChD,QAAM,QAAQ,oBAAI,IAA8B;AAChD,QAAM,WAA+B,CAAC;AAGtC,QAAM,eAAW,uBAAK,QAAQ,OAAO;AACrC,aAAW,QAAQ,kBAAkB,QAAQ,GAAG;AAC9C,UAAM,IAAI,MAAM,mBAAe,uBAAK,UAAU,IAAI,GAAG,IAAI,CAAC;AAAA,EAC5D;AAGA,QAAM,eAAW,uBAAK,QAAQ,OAAO;AACrC,aAAW,QAAQ,kBAAkB,QAAQ,GAAG;AAC9C,UAAM,IAAI,MAAM,mBAAe,uBAAK,UAAU,IAAI,GAAG,IAAI,CAAC;AAAA,EAC5D;AAGA,QAAM,YAAY,IAAI,IAAI,MAAM,KAAK,CAAC;AACtC,aAAW,CAAC,SAAS,MAAM,KAAK,OAAO;AACrC,UAAM,WAAW,OAAO;AACxB,UAAM,YAAY,MAAM,QAAQ,SAAS,IAAI,IAAI,SAAS,OAAO,CAAC,SAAS,IAAI;AAC/E,UAAM,UAAU,MAAM,QAAQ,SAAS,EAAE,IAAI,SAAS,KAAK,CAAC,SAAS,EAAE;AAEvE,eAAW,OAAO,CAAC,GAAG,WAAW,GAAG,OAAO,GAAG;AAC5C,UAAI,CAAC,UAAU,IAAI,GAAG,GAAG;AACvB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,SAAS,OAAO,2BAA2B,GAAG;AAAA,QACzD,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,QAAQ,EAAE,OAAO,MAAM;AAAA,IACvB;AAAA,EACF;AACF;;;AC5WA,IAAAC,oBAA2B;;;ACK3B,IAAM,iBAAiB;AACvB,IAAM,sBAAsB;AAC5B,IAAM,gBAAgB;AAEtB,SAAS,MAAM,IAA2B;AACxC,SAAO,IAAI,QAAQ,CAACC,aAAY,WAAWA,UAAS,EAAE,CAAC;AACzD;AAKA,SAAS,MAAS,KAAU,MAAqB;AAC/C,QAAM,SAAgB,CAAC;AACvB,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK,MAAM;AACzC,WAAO,KAAK,IAAI,MAAM,GAAG,IAAI,IAAI,CAAC;AAAA,EACpC;AACA,SAAO;AACT;AAKA,eAAsB,iBACpB,IACA,gBACA,QACA,SACqB;AACrB,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO,EAAE,SAAS,GAAG,SAAS,GAAG,QAAQ,CAAC,EAAE;AAAA,EAC9C;AAEA,QAAM,YAAY,KAAK,IAAI,SAAS,aAAa,gBAAgB,cAAc;AAC/E,QAAM,aAAa,SAAS,cAAc;AAC1C,QAAM,aAAa,SAAS;AAE5B,QAAM,SAAS,MAAM,QAAQ,SAAS;AACtC,QAAM,SAA2B,CAAC;AAClC,MAAI,UAAU;AACd,MAAI,mBAAmB;AAEvB,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,MAAM,OAAO,CAAC;AACpB,QAAI,YAAY;AAEhB,aAAS,UAAU,GAAG,WAAW,YAAY,WAAW;AACtD,UAAI;AACF,cAAM,QAAQ,GAAG,MAAM;AACvB,cAAM,gBAAgB,GAAG,WAAW,cAAc;AAClD,mBAAW,MAAM,KAAK;AACpB,gBAAM,OAAO,cAAc,IAAI,EAAE,CAAC;AAAA,QACpC;AACA,cAAM,MAAM,OAAO;AACnB,oBAAY;AACZ,mBAAW,IAAI;AACf;AAAA,MACF,SAAS,KAAK;AACZ,YAAI,UAAU,YAAY;AACxB,gBAAM,QAAQ,gBAAgB,KAAK,IAAI,GAAG,OAAO;AACjD,gBAAM,MAAM,KAAK;AAAA,QACnB,OAAO;AACL,iBAAO,KAAK;AAAA,YACV,YAAY;AAAA,YACZ,OAAO,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AAAA,YACzD,gBAAgB,IAAI;AAAA,UACtB,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAEA,QAAI,WAAW;AACb;AAAA,IACF;AAEA,QAAI,YAAY;AACd,iBAAW;AAAA,QACT;AAAA,QACA,cAAc,OAAO;AAAA,QACrB,cAAc;AAAA,MAChB,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO,EAAE,SAAS,SAAS,kBAAkB,OAAO;AACtD;AAKA,eAAsB,gBACpB,IACA,gBACA,QACA,QACA,SACqB;AAIrB,QAAM,kBACJ,OAAO,UAAU,SACb,EAAE,GAAG,QAAQ,qBAAqB,OAAO,uBAAuB,KAAK,IACrE,EAAE,GAAG,QAAQ,OAAO,GAAG,qBAAqB,OAAO,uBAAuB,KAAK;AACrF,QAAM,QAAQ,MAAM,OAAO,UAAU,eAAe;AACpD,QAAM,SAAS,MAAM,IAAI,CAAC,MAAM,iBAAiB,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,CAAC;AAC3E,SAAO,iBAAiB,IAAI,gBAAgB,QAAQ,OAAO;AAC7D;AAkBA,eAAe,8BACb,IACA,gBACA,OACA,SACoC;AACpC,QAAM,SAAS,GAAG,WAAW,cAAc,EAAE,IAAI,KAAK;AACtD,QAAM,iBAAiB,MAAM,OAAO,gBAAgB;AAEpD,MAAI,eAAe,WAAW,EAAG,QAAO,EAAE,SAAS,GAAG,QAAQ,CAAC,EAAE;AAEjE,MAAI,eAAe;AACnB,QAAM,YAA8B,CAAC;AAIrC,QAAM,aAAsC,UACxC,EAAE,WAAW,QAAQ,WAAW,YAAY,QAAQ,WAAW,IAC/D;AAEJ,aAAW,cAAc,gBAAgB;AACvC,UAAM,cAAc,WAAW;AAE/B,UAAM,WAAW,MAAM,WAAW,OAAO,EAAE,IAAI;AAC/C,UAAM,YAAY,SAAS,KAAK,IAAI,CAAC,MAAM,EAAE,EAAE;AAG/C,eAAW,YAAY,WAAW;AAChC,YAAM,YAAY,MAAM,8BAA8B,IAAI,aAAa,UAAU,UAAU;AAC3F,sBAAgB,UAAU;AAC1B,gBAAU,KAAK,GAAG,UAAU,MAAM;AAAA,IACpC;AAGA,QAAI,UAAU,SAAS,GAAG;AACxB,YAAM,SAAS,MAAM,iBAAiB,IAAI,aAAa,WAAW,UAAU;AAC5E,sBAAgB,OAAO;AACvB,gBAAU,KAAK,GAAG,OAAO,MAAM;AAAA,IACjC;AAAA,EACF;AAEA,SAAO,EAAE,SAAS,cAAc,QAAQ,UAAU;AACpD;AAYA,eAAsB,kBACpB,IACA,gBACA,QACA,KACA,SACwB;AAKxB,QAAM,CAAC,aAAa,WAAW,IAAI,MAAM,QAAQ,IAAI;AAAA,IACnD,OAAO,UAAU,EAAE,MAAM,KAAK,qBAAqB,MAAM,OAAO,EAAE,CAAC;AAAA,IACnE,OAAO,UAAU,EAAE,MAAM,KAAK,qBAAqB,MAAM,OAAO,EAAE,CAAC;AAAA,EACrE,CAAC;AACD,QAAM,WAAW,YAAY,OAAO,CAAC,MAAM,EAAE,YAAY,aAAa;AACtE,QAAM,WAAW,YAAY,OAAO,CAAC,MAAM,EAAE,YAAY,aAAa;AAGtE,QAAM,eAAe,oBAAI,IAAY;AACrC,QAAM,WAAgC,CAAC;AACvC,aAAW,QAAQ,CAAC,GAAG,UAAU,GAAG,QAAQ,GAAG;AAC7C,UAAM,QAAQ,iBAAiB,KAAK,MAAM,KAAK,SAAS,KAAK,IAAI;AACjE,QAAI,CAAC,aAAa,IAAI,KAAK,GAAG;AAC5B,mBAAa,IAAI,KAAK;AACtB,eAAS,KAAK,IAAI;AAAA,IACpB;AAAA,EACF;AAGA,QAAM,6BAA6B,SAAS,yBAAyB;AACrE,QAAM,YAAY,iBAAiB,GAAG;AACtC,MAAI,sBAAiD,EAAE,SAAS,GAAG,QAAQ,CAAC,EAAE;AAE9E,MAAI,4BAA4B;AAC9B,0BAAsB,MAAM;AAAA,MAC1B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAGA,QAAM,aAAa,SAAS,IAAI,CAAC,MAAM,iBAAiB,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,CAAC;AAClF,QAAM,YAAY,CAAC,GAAG,YAAY,SAAS;AAG3C,QAAM,YAAY,KAAK,IAAI,SAAS,aAAa,gBAAgB,cAAc;AAC/E,QAAM,SAAS,MAAM,iBAAiB,IAAI,gBAAgB,WAAW;AAAA,IACnE,GAAG;AAAA,IACH;AAAA,EACF,CAAC;AAID,QAAM,cAAc,KAAK,KAAK,UAAU,SAAS,SAAS;AAC1D,QAAM,iBAAiB,cAAc;AACrC,QAAM,cAAc,CAAC,OAAO,OAAO,KAAK,CAAC,MAAM,EAAE,eAAe,cAAc;AAI9E,QAAM,uBAAuB,cAAc,OAAO,UAAU,IAAI,OAAO;AAEvE,SAAO;AAAA,IACL,SAAS,OAAO,UAAU,oBAAoB;AAAA,IAC9C,SAAS,OAAO;AAAA,IAChB,QAAQ,CAAC,GAAG,OAAO,QAAQ,GAAG,oBAAoB,MAAM;AAAA,IACxD,cAAc;AAAA,IACd;AAAA,EACF;AACF;;;ADtPA;;;AEAO,SAAS,uBAAuB,IAAe,gBAA0C;AAC9F,QAAM,gBAAgB,GAAG,WAAW,cAAc;AAElD,SAAO;AAAA,IACL;AAAA,IAEA,MAAM,OAAO,OAAkD;AAC7D,YAAM,OAAO,MAAM,cAAc,IAAI,KAAK,EAAE,IAAI;AAChD,UAAI,CAAC,KAAK,OAAQ,QAAO;AACzB,aAAO,KAAK,KAAK;AAAA,IACnB;AAAA,IAEA,MAAM,OACJ,OACA,MACA,SACe;AACf,UAAI,SAAS,OAAO;AAClB,cAAM,cAAc,IAAI,KAAK,EAAE,IAAI,MAAM,EAAE,OAAO,KAAK,CAAC;AAAA,MAC1D,OAAO;AACL,cAAM,cAAc,IAAI,KAAK,EAAE,IAAI,IAAI;AAAA,MACzC;AAAA,IACF;AAAA,IAEA,MAAM,UAAU,OAAe,MAA8C;AAC3E,YAAM,cAAc,IAAI,KAAK,EAAE,OAAO,IAAI;AAAA,IAC5C;AAAA,IAEA,MAAM,UAAU,OAA8B;AAC5C,YAAM,cAAc,IAAI,KAAK,EAAE,OAAO;AAAA,IACxC;AAAA,IAEA,MAAM,MAAM,SAAwB,SAAsD;AACxF,UAAI,IAAW;AACf,iBAAW,KAAK,SAAS;AACvB,YAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,KAAK;AAAA,MACpC;AACA,UAAI,SAAS,SAAS;AACpB,YAAI,EAAE,QAAQ,QAAQ,QAAQ,OAAO,QAAQ,QAAQ,aAAa,KAAK;AAAA,MACzE;AACA,UAAI,SAAS,UAAU,QAAW;AAChC,YAAI,EAAE,MAAM,QAAQ,KAAK;AAAA,MAC3B;AACA,YAAM,OAAO,MAAM,EAAE,IAAI;AACzB,aAAO,KAAK,KAAK,IAAI,CAAC,QAAQ,IAAI,KAAK,CAAsB;AAAA,IAC/D;AAAA,EACF;AACF;AAUO,SAAS,yBACd,IACA,gBACA,IACoB;AACpB,QAAM,gBAAgB,GAAG,WAAW,cAAc;AAElD,SAAO;AAAA,IACL,MAAM,OAAO,OAAkD;AAC7D,YAAM,OAAO,MAAM,GAAG,IAAI,cAAc,IAAI,KAAK,CAAC;AAClD,UAAI,CAAC,KAAK,OAAQ,QAAO;AACzB,aAAO,KAAK,KAAK;AAAA,IACnB;AAAA,IAEA,OAAO,OAAe,MAA+B,SAAqC;AACxF,UAAI,SAAS,OAAO;AAClB,WAAG,IAAI,cAAc,IAAI,KAAK,GAAG,MAAM,EAAE,OAAO,KAAK,CAAC;AAAA,MACxD,OAAO;AACL,WAAG,IAAI,cAAc,IAAI,KAAK,GAAG,IAAI;AAAA,MACvC;AAAA,IACF;AAAA,IAEA,UAAU,OAAe,MAAqC;AAC5D,SAAG,OAAO,cAAc,IAAI,KAAK,GAAG,IAAI;AAAA,IAC1C;AAAA,IAEA,UAAU,OAAqB;AAC7B,SAAG,OAAO,cAAc,IAAI,KAAK,CAAC;AAAA,IACpC;AAAA,IAEA,MAAM,MAAM,SAAwB,SAAsD;AACxF,UAAI,IAAW;AACf,iBAAW,KAAK,SAAS;AACvB,YAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,KAAK;AAAA,MACpC;AACA,UAAI,SAAS,SAAS;AACpB,YAAI,EAAE,QAAQ,QAAQ,QAAQ,OAAO,QAAQ,QAAQ,aAAa,KAAK;AAAA,MACzE;AACA,UAAI,SAAS,UAAU,QAAW;AAChC,YAAI,EAAE,MAAM,QAAQ,KAAK;AAAA,MAC3B;AACA,YAAM,OAAO,MAAM,GAAG,IAAI,CAAC;AAC3B,aAAO,KAAK,KAAK,IAAI,CAAC,QAAQ,IAAI,KAAK,CAAsB;AAAA,IAC/D;AAAA,EACF;AACF;AASO,SAAS,mBAAmB,IAAe,gBAAsC;AACtF,QAAM,gBAAgB,GAAG,WAAW,cAAc;AAClD,QAAM,QAAQ,GAAG,MAAM;AAEvB,SAAO;AAAA,IACL,OAAO,OAAe,MAA+B,SAAqC;AACxF,UAAI,SAAS,OAAO;AAClB,cAAM,IAAI,cAAc,IAAI,KAAK,GAAG,MAAM,EAAE,OAAO,KAAK,CAAC;AAAA,MAC3D,OAAO;AACL,cAAM,IAAI,cAAc,IAAI,KAAK,GAAG,IAAI;AAAA,MAC1C;AAAA,IACF;AAAA,IAEA,UAAU,OAAe,MAAqC;AAC5D,YAAM,OAAO,cAAc,IAAI,KAAK,GAAG,IAAI;AAAA,IAC7C;AAAA,IAEA,UAAU,OAAqB;AAC7B,YAAM,OAAO,cAAc,IAAI,KAAK,CAAC;AAAA,IACvC;AAAA,IAEA,MAAM,SAAwB;AAC5B,YAAM,MAAM,OAAO;AAAA,IACrB;AAAA,EACF;AACF;;;AClIA,IAAI,aAAsC;AAE1C,eAAe,eAA0C;AACvD,MAAI,CAAC,YAAY;AACf,UAAM,MAAM,MAAM,OAAO,yBAAyB;AAClD,iBAAa,IAAI;AAAA,EACnB;AACA,SAAO;AACT;AAWA,SAAS,sBAAsB,GAAkB,QAAkC;AACjF,QAAM,EAAE,OAAO,WAAW,IAAI,MAAM,IAAI;AAExC,UAAQ,IAAI;AAAA,IACV,KAAK;AACH,aAAO,EAAE,MAAM,WAAW,KAAK;AAAA,IACjC,KAAK;AACH,aAAO,EAAE,SAAS,WAAW,KAAK;AAAA,IACpC,KAAK;AACH,aAAO,EAAE,SAAS,WAAW,KAAK;AAAA,IACpC,KAAK;AACH,aAAO,EAAE,gBAAgB,WAAW,KAAK;AAAA,IAC3C,KAAK;AACH,aAAO,EAAE,YAAY,WAAW,KAAK;AAAA,IACvC,KAAK;AACH,aAAO,EAAE,mBAAmB,WAAW,KAAK;AAAA,IAC9C,KAAK;AACH,aAAO,EAAE,SAAS,WAAW,KAAuB;AAAA,IACtD,KAAK;AACH,aAAO,EAAE,YAAY,WAAW,KAAuB;AAAA,IACzD,KAAK;AACH,aAAO,EAAE,cAAc,WAAW,KAAK;AAAA,IACzC,KAAK;AACH,aAAO,EAAE,iBAAiB,WAAW,KAAuB;AAAA,IAC9D;AACE,YAAM,IAAI,MAAM,4CAA4C,EAAE,EAAE;AAAA,EACpE;AACF;AAEO,SAAS,2BACd,IACA,gBACsB;AACtB,SAAO;AAAA,IACL,MAAM,MAAM,SAAwB,SAAsD;AACxF,YAAM,IAAI,MAAM,aAAa;AAG7B,UAAI,WAAW,GAAG,SAAS,EAAE,WAAW,cAAc;AAGtD,UAAI,QAAQ,WAAW,GAAG;AACxB,mBAAW,SAAS,MAAM,sBAAsB,GAAG,QAAQ,CAAC,CAAC,CAAC;AAAA,MAChE,WAAW,QAAQ,SAAS,GAAG;AAC7B,cAAM,CAAC,OAAO,QAAQ,GAAG,IAAI,IAAI,QAAQ,IAAI,CAAC,MAAM,sBAAsB,GAAG,CAAC,CAAC;AAC/E,mBAAW,SAAS,MAAM,EAAE,IAAI,OAAO,QAAQ,GAAG,IAAI,CAAC;AAAA,MACzD;AAGA,UAAI,SAAS,SAAS;AACpB,cAAM,IAAI,EAAE,MAAM,QAAQ,QAAQ,KAAK;AACvC,cAAM,WAAW,QAAQ,QAAQ,cAAc,SAAS,EAAE,WAAW,IAAI,EAAE,UAAU;AACrF,mBAAW,SAAS,KAAK,QAAQ;AAAA,MACnC;AAGA,UAAI,SAAS,UAAU,QAAW;AAChC,mBAAW,SAAS,MAAM,QAAQ,KAAK;AAAA,MACzC;AAEA,YAAM,OAAO,MAAM,SAAS,QAAQ;AACpC,aAAO,KAAK,QAAQ,IAAI,CAAC,MAAM,EAAE,KAAK,CAAsB;AAAA,IAC9D;AAAA,EACF;AACF;;;AHpDA,SAAS,eAAe,IAAwB;AAC9C,iBAAe,GAAG,IAAI;AACtB,SAAO,QAAQ,GAAG,KAAK,KAAK,GAAG,CAAC;AAClC;AAeA,SAAS,qBAAqB,QAAuB,IAAwC;AAC3F,+BAA6B,MAAM;AACnC,QAAM,MAA+B;AAAA,IACnC,WAAW,6BAAW,gBAAgB;AAAA,EACxC;AACA,MAAI,OAAO,aAAa;AACtB,QAAI,OAAO,0BAA0B,OAAO,aAAa,EAAE;AAAA,EAC7D,WAAW,OAAO,SAAS;AACzB,eAAW,MAAM,OAAO,SAAS;AAC/B,YAAM,MAAM,eAAe,EAAE;AAC7B,UAAI,GAAG,IAAI,GAAG,SAAS,6BAAW,OAAO,IAAI,GAAG;AAAA,IAClD;AAAA,EACF;AACA,MAAI,OAAO,MAAM,QAAW;AAC1B,QAAI,IAAI,OAAO;AAAA,EACjB;AACA,SAAO;AACT;AAMA,SAAS,oBAAoB,QAAiD;AAC5E,QAAM,MAAM,6BAAW,gBAAgB;AACvC,QAAM,MAA+B;AAAA,IACnC,OAAO,OAAO;AAAA,IACd,MAAM,OAAO;AAAA,IACb,SAAS,OAAO;AAAA,IAChB,OAAO,OAAO;AAAA,IACd,MAAM,OAAO;AAAA,IACb,MAAM,OAAO;AAAA,IACb,WAAW;AAAA,IACX,WAAW;AAAA,EACb;AACA,MAAI,OAAO,MAAM,OAAW,KAAI,IAAI,OAAO;AAC3C,SAAO;AACT;AAEA,IAAM,8BAAN,MAAgE;AAAA,EAC9D,YACmB,SACA,IACjB;AAFiB;AACA;AAAA,EAChB;AAAA,EAEH,OAAO,OAAkD;AACvD,WAAO,KAAK,QAAQ,OAAO,KAAK;AAAA,EAClC;AAAA,EAEA,MAAM,SAAwB,SAAsD;AAClF,WAAO,KAAK,QAAQ,MAAM,SAAS,OAAO;AAAA,EAC5C;AAAA,EAEA,MAAM,OAAO,OAAe,QAAwB,MAAgC;AAClF,SAAK,QAAQ;AAAA,MACX;AAAA,MACA,oBAAoB,MAAM;AAAA,MAC1B,SAAS,UAAU,EAAE,OAAO,KAAK,IAAI;AAAA,IACvC;AAAA,EACF;AAAA,EAEA,MAAM,UAAU,OAAe,QAAsC;AACnE,SAAK,QAAQ,UAAU,OAAO,qBAAqB,QAAQ,KAAK,EAAE,CAAC;AAAA,EACrE;AAAA,EAEA,MAAM,UAAU,OAA8B;AAC5C,SAAK,QAAQ,UAAU,KAAK;AAAA,EAC9B;AACF;AAEA,IAAM,wBAAN,MAAoD;AAAA,EAClD,YACmB,SACA,IACjB;AAFiB;AACA;AAAA,EAChB;AAAA,EAEH,OAAO,OAAe,QAAwB,MAAuB;AACnE,SAAK,QAAQ;AAAA,MACX;AAAA,MACA,oBAAoB,MAAM;AAAA,MAC1B,SAAS,UAAU,EAAE,OAAO,KAAK,IAAI;AAAA,IACvC;AAAA,EACF;AAAA,EAEA,UAAU,OAAe,QAA6B;AACpD,SAAK,QAAQ,UAAU,OAAO,qBAAqB,QAAQ,KAAK,EAAE,CAAC;AAAA,EACrE;AAAA,EAEA,UAAU,OAAqB;AAC7B,SAAK,QAAQ,UAAU,KAAK;AAAA,EAC9B;AAAA,EAEA,SAAwB;AACtB,WAAO,KAAK,QAAQ,OAAO;AAAA,EAC7B;AACF;AAEA,IAAM,uBAAN,MAAM,sBAA+C;AAAA,EAMnD,YACmB,IACjB,gBACiB,WACjB,WACA;AAJiB;AAEA;AAGjB,SAAK,iBAAiB;AACtB,SAAK,YAAY;AACjB,SAAK,UAAU,uBAAuB,IAAI,cAAc;AACxD,QAAI,cAAc,YAAY;AAC5B,WAAK,kBAAkB,2BAA2B,IAAI,cAAc;AAAA,IACtE;AAAA,EACF;AAAA,EAjBS;AAAA,EACA;AAAA,EACQ;AAAA,EACA;AAAA;AAAA,EAkBjB,OAAO,OAAkD;AACvD,WAAO,KAAK,QAAQ,OAAO,KAAK;AAAA,EAClC;AAAA,EAEA,MAAM,SAAwB,SAAsD;AAClF,QAAI,KAAK,iBAAiB;AACxB,aAAO,KAAK,gBAAgB,MAAM,SAAS,OAAO;AAAA,IACpD;AACA,WAAO,KAAK,QAAQ,MAAM,SAAS,OAAO;AAAA,EAC5C;AAAA;AAAA,EAIA,OAAO,OAAe,QAAwB,MAAgC;AAC5E,WAAO,KAAK,QAAQ;AAAA,MAClB;AAAA,MACA,oBAAoB,MAAM;AAAA,MAC1B,SAAS,UAAU,EAAE,OAAO,KAAK,IAAI;AAAA,IACvC;AAAA,EACF;AAAA,EAEA,UAAU,OAAe,QAAsC;AAC7D,WAAO,KAAK,QAAQ,UAAU,OAAO,qBAAqB,QAAQ,KAAK,EAAE,CAAC;AAAA,EAC5E;AAAA,EAEA,UAAU,OAA8B;AACtC,WAAO,KAAK,QAAQ,UAAU,KAAK;AAAA,EACrC;AAAA;AAAA,EAIA,eAAkB,IAAwD;AACxE,WAAO,KAAK,GAAG,eAAe,OAAO,gBAA6B;AAChE,YAAM,YAAY,yBAAyB,KAAK,IAAI,KAAK,gBAAgB,WAAW;AACpF,aAAO,GAAG,IAAI,4BAA4B,WAAW,KAAK,EAAE,CAAC;AAAA,IAC/D,CAAC;AAAA,EACH;AAAA,EAEA,cAA4B;AAC1B,UAAM,eAAe,mBAAmB,KAAK,IAAI,KAAK,cAAc;AACpE,WAAO,IAAI,sBAAsB,cAAc,KAAK,EAAE;AAAA,EACxD;AAAA;AAAA,EAIA,SAAS,eAAuB,MAA8B;AAC5D,UAAM,UAAU,GAAG,KAAK,cAAc,IAAI,aAAa,IAAI,IAAI;AAC/D,UAAM,WAAW,KAAK,YAAY,GAAG,KAAK,SAAS,IAAI,IAAI,KAAK;AAChE,WAAO,IAAI,sBAAqB,KAAK,IAAI,SAAS,KAAK,WAAW,QAAQ;AAAA,EAC5E;AAAA;AAAA,EAIA,kBACE,KACA,QACA,SACwB;AACxB,WAAO,kBAAsB,KAAK,IAAI,KAAK,gBAAgB,QAAQ,KAAK,OAAO;AAAA,EACjF;AAAA,EAEA,gBACE,QACA,QACA,SACqB;AACrB,WAAO,gBAAoB,KAAK,IAAI,KAAK,gBAAgB,QAAQ,QAAQ,OAAO;AAAA,EAClF;AAAA;AAAA,EAIA,MAAM,gBACJ,QACA,gBAC8B;AAC9B,UAAM,OAAO,kBAAkB,KAAK,eAAe,MAAM,GAAG,EAAE,IAAI;AAClE,UAAM,OAAO,mBAAmB,MAAM;AAEtC,QAAI,KAAK,aAAa,OAAO;AAC3B,YAAM,IAAI;AAAA,QACR;AAAA,QAEA;AAAA,MACF;AAAA,IACF;AAEA,UAAM,qBAAqB,KAAK,GAAG,gBAAgB,IAAI;AACvD,QAAI,IAAW;AACf,eAAW,KAAK,KAAK,SAAS;AAC5B,UAAI,EAAE,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,KAAK;AAAA,IACpC;AACA,QAAI,KAAK,SAAS,SAAS;AACzB,UAAI,EAAE,QAAQ,KAAK,QAAQ,QAAQ,OAAO,KAAK,QAAQ,QAAQ,aAAa,KAAK;AAAA,IACnF;AACA,QAAI,KAAK,SAAS,UAAU,QAAW;AACrC,UAAI,EAAE,MAAM,KAAK,QAAQ,KAAK;AAAA,IAChC;AACA,UAAM,OAAO,MAAM,EAAE,IAAI;AACzB,WAAO,KAAK,KAAK,IAAI,CAAC,QAAQ,IAAI,KAAK,CAAsB;AAAA,EAC/D;AACF;AASO,SAAS,uBACd,IACA,gBACA,UAAmC,CAAC,GACpB;AAChB,QAAM,YAAY,QAAQ,aAAa;AACvC,QAAM,YAAY,QAAQ,aAAa;AACvC,SAAO,IAAI,qBAAqB,IAAI,gBAAgB,WAAW,SAAS;AAC1E;;;AI7RA,IAAI,sBAAsB;AAYnB,SAAS,kBACd,IACA,gBACA,SACkC;AAClC,QAAM,gBAAgB,SAAS,aAAa;AAC5C,QAAM,aAAa,CAAC,CAAC,QAAQ,IAAI;AACjC,QAAM,gBAA2B,aAAa,aAAa;AAE3D,MACE,kBAAkB,cAClB,CAAC,cACD,kBAAkB,cAClB,CAAC,qBACD;AACA,0BAAsB;AACtB,YAAQ;AAAA,MACN;AAAA,IAIF;AAAA,EACF;AAEA,QAAM,UAAU,uBAAuB,IAAI,gBAAgB,EAAE,WAAW,cAAc,CAAC;AAEvF,MAAI;AACJ,MAAI,SAAS,cAAc,cAAc,QAAQ,aAAa,eAAe,gBAAgB;AAC3F,kBAAc,uBAAuB,IAAI,QAAQ,aAAa,YAAY;AAAA,MACxE,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AAEA,SAAO,IAAI,gBAAgB,SAAS,SAAS,WAAW;AAC1D;;;ACrEA,oBAAuB;AAEhB,SAAS,aAAqB;AACnC,aAAO,sBAAO;AAChB;;;ACiEA,SAAS,eAAe,GAA4C;AAClE,SAAO,OAAO,MAAM,WAAW,EAAE,MAAM,GAAG,MAAM,MAAM,IAAI,EAAE,MAAM,EAAE,MAAM,MAAM,CAAC,CAAC,EAAE,KAAK;AAC3F;AAEA,SAAS,gBAAgB,MAAiB,OAAuB;AAC/D,QAAM,aAAa,KAAK,OAAO,IAAI,cAAc;AACjD,SAAO,GAAG,KAAK,KAAK,KAAK,UAAU,UAAU,CAAC;AAChD;AAEA,SAAS,kBAAkB,MAAwC;AACjE,SAAO,KAAK,OAAO,IAAI,CAAC,MAAM;AAC5B,UAAM,IAAI,eAAe,CAAC;AAC1B,WAAO;AAAA,MACL,WAAW,EAAE;AAAA,MACb,OAAO,EAAE,OAAO,eAAe;AAAA,IACjC;AAAA,EACF,CAAC;AACH;AAEA,IAAI,uBAAuB;AAQpB,SAAS,oBACd,YACA,UAAgC,CAAC,GACX;AACtB,QAAM,OAAO,QAAQ,eAAe,CAAC,GAAG,oBAAoB;AAC5D,QAAM,eAAe,QAAQ,mBAAmB,CAAC,GAAG,QAAQ,CAAC,MAAM;AACjE,QAAI,CAAC,EAAE,QAAS,QAAO,CAAC;AACxB,WAAO,EAAE;AAAA,EACX,CAAC;AAQD,QAAM,mBAAmB,oBAAI,IAAY;AACzC,aAAW,SAAS,QAAQ,mBAAmB,CAAC,GAAG;AACjD,QAAI,MAAM,YAAa,kBAAiB,IAAI,MAAM,WAAW;AAAA,EAC/D;AACA,MAAI,QAAQ,UAAU;AACpB,eAAW,CAAC,EAAE,MAAM,KAAK,QAAQ,SAAS,OAAO;AAC/C,YAAM,KAAK,OAAO,eAAe,OAAO,UAAU;AAClD,UAAI,GAAI,kBAAiB,IAAI,EAAE;AAAA,IACjC;AAAA,EACF;AAEA,QAAM,WAAW,CAAC,GAAG,MAAM,GAAG,WAAW;AACzC,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,UAA4B,CAAC;AAEnC,aAAW,QAAQ,UAAU;AAC3B,QAAI,CAAC,KAAK,UAAU,KAAK,OAAO,SAAS,GAAG;AAE1C;AAAA,IACF;AACA,QAAI,KAAK,OAAO;AACd,UAAI,CAAC,sBAAsB;AACzB,+BAAuB;AACvB,gBAAQ;AAAA,UACN;AAAA,QAGF;AAAA,MACF;AACA;AAAA,IACF;AAEA,UAAM,SAAS,kBAAkB,IAAI;AAErC,UAAM,SAAS,gBAAgB,MAAM,OAAO,UAAU,EAAE;AACxD,QAAI,CAAC,KAAK,IAAI,MAAM,GAAG;AACrB,WAAK,IAAI,MAAM;AACf,cAAQ,KAAK;AAAA,QACX,iBAAiB;AAAA,QACjB,YAAY;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH;AAKA,eAAW,MAAM,kBAAkB;AACjC,YAAM,QAAQ,gBAAgB,MAAM,MAAM,EAAE,EAAE;AAC9C,UAAI,KAAK,IAAI,KAAK,EAAG;AACrB,WAAK,IAAI,KAAK;AACd,cAAQ,KAAK;AAAA,QACX,iBAAiB;AAAA,QACjB,YAAY;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO,EAAE,SAAS,gBAAgB,CAAC,EAAE;AACvC;;;AC5KA,uBAAiB;;;ACAjB,IAAAC,kBAA6B;AAC7B,IAAAC,oBAAqB;AAErB,IAAM,eAAe,CAAC,uBAAuB,uBAAuB,sBAAsB;AAC1F,IAAM,eAAe;AAMd,SAAS,eAAe,KAAsB;AACnD,QAAM,MAAM,OAAO,QAAQ,IAAI;AAC/B,aAAW,QAAQ,cAAc;AAC/B,QAAI;AACF,YAAM,cAAU,kCAAa,wBAAK,KAAK,IAAI,GAAG,MAAM;AACpD,YAAM,cAAc,QAAQ,MAAM,yBAAyB,IAAI,CAAC,KAAK;AACrE,YAAM,YAAY,YAAY,MAAM,kBAAkB;AACtD,UAAI,UAAW,QAAO,SAAS,UAAU,CAAC,GAAG,EAAE;AAAA,IACjD,QAAQ;AACN;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;;;ACrBO,SAAS,gBAAgB,GAA4D;AAC1F,MAAI,CAAC,EAAG,QAAO;AACf,QAAM,MAAwB,EAAE,MAAM,EAAE,OAAiB,KAAK,EAAE,KAAe;AAC/E,QAAM,OAAO,EAAE;AACf,MAAI,QAAQ,OAAO,SAAS,YAAY,OAAO,KAAK,IAAI,EAAE,SAAS,GAAG;AACpE,QAAI,OAAO;AAAA,EACb;AACA,SAAO;AACT;AAEO,SAAS,cAAc,GAA0D;AACtF,MAAI,CAAC,EAAG,QAAO;AACf,QAAM,MAAsB;AAAA,IAC1B,UAAU,EAAE;AAAA,IACZ,SAAS,EAAE;AAAA,IACX,UAAU,EAAE;AAAA,IACZ,QAAQ,EAAE;AAAA,IACV,OAAO,EAAE;AAAA,EACX;AACA,QAAM,OAAO,EAAE;AACf,MAAI,QAAQ,OAAO,SAAS,YAAY,OAAO,KAAK,IAAI,EAAE,SAAS,GAAG;AACpE,QAAI,OAAO;AAAA,EACb;AACA,SAAO;AACT;;;AFAO,IAAM,mBAAN,cAA+B,MAAM;AAAA,EAC1C,YACE,SACgB,MAChB;AACA,UAAM,OAAO;AAFG;AAGhB,SAAK,OAAO;AAAA,EACd;AACF;AAIA,SAAS,cAAc,OAAgB,MAAuC;AAC5E,MAAI,OAAO,UAAU,YAAY,MAAM,WAAW,GAAG;AACnD,UAAM,IAAI,iBAAiB,GAAG,IAAI,+BAA+B,kBAAkB;AAAA,EACrF;AACF;AAEA,SAAS,SAAS,OAA2B,KAAa,KAAa,UAA0B;AAC/F,MAAI,SAAS,KAAM,QAAO;AAC1B,MAAI,CAAC,OAAO,UAAU,KAAK,GAAG;AAC5B,UAAM,IAAI,iBAAiB,4BAA4B,kBAAkB;AAAA,EAC3E;AACA,SAAO,KAAK,IAAI,KAAK,KAAK,IAAI,KAAK,KAAK,CAAC;AAC3C;AAEA,SAAS,gBAAgB,KAA+B;AACtD,MAAI,OAAO,QAAQ,QAAQ,SAAS,QAAQ,QAAQ;AAClD,UAAM,IAAI,iBAAiB,mCAAmC,kBAAkB;AAAA,EAClF;AACF;AAIA,SAAS,QAAQ,KAA8B;AAC7C,SAAO,IAAI,QAAQ,CAACC,UAAS,WAAW;AACtC,qBAAAC,QACG,IAAI,KAAK,CAAC,QAAQ;AACjB,UAAI,OAAO;AACX,UAAI,GAAG,QAAQ,CAAC,MAAe,QAAQ,CAAE;AACzC,UAAI,GAAG,OAAO,MAAMD,SAAQ,IAAI,CAAC;AAAA,IACnC,CAAC,EACA,GAAG,SAAS,CAAC,QAAQ;AACpB,aAAO,IAAI,iBAAiB,sBAAsB,IAAI,OAAO,IAAI,mBAAmB,CAAC;AAAA,IACvF,CAAC;AAAA,EACL,CAAC;AACH;AAEA,SAAS,SAAS,KAAa,SAAkC;AAC/D,QAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,SAAO,IAAI,QAAQ,CAACA,UAAS,WAAW;AACtC,UAAM,MAAM,iBAAAC,QAAK;AAAA,MACf;AAAA,QACE,UAAU,OAAO;AAAA,QACjB,MAAM,OAAO;AAAA,QACb,MAAM,OAAO;AAAA,QACb,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,kBAAkB,OAAO,WAAW,OAAO;AAAA,QAC7C;AAAA,MACF;AAAA,MACA,CAAC,QAAQ;AACP,YAAI,OAAO;AACX,YAAI,GAAG,QAAQ,CAAC,MAAe,QAAQ,CAAE;AACzC,YAAI,GAAG,OAAO,MAAMD,SAAQ,IAAI,CAAC;AAAA,MACnC;AAAA,IACF;AACA,QAAI,GAAG,SAAS,CAAC,QAAQ;AACvB,aAAO,IAAI,iBAAiB,sBAAsB,IAAI,OAAO,IAAI,mBAAmB,CAAC;AAAA,IACvF,CAAC;AACD,QAAI,MAAM,OAAO;AACjB,QAAI,IAAI;AAAA,EACV,CAAC;AACH;AAEA,SAAS,kBAAkB,KAAa,WAA4B;AAClE,MAAI;AACJ,MAAI;AACF,aAAS,KAAK,MAAM,GAAG;AAAA,EACzB,QAAQ;AACN,UAAM,IAAI;AAAA,MACR,qBAAqB,SAAS,KAAK,IAAI,MAAM,GAAG,GAAG,CAAC;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AACA,MAAI,OAAO,OAAO;AAChB,UAAM,MACJ,OAAO,OAAO,UAAU,YAAY,OAAO,UAAU,OAC/C,OAAO,MAAkC,WAAW,KAAK,UAAU,OAAO,KAAK,IACjF,OAAO,OAAO,KAAK;AACzB,UAAM,IAAI,iBAAiB,qBAAqB,SAAS,KAAK,GAAG,IAAI,cAAc;AAAA,EACrF;AACA,SAAQ,OAAO,QAAoC,QAAQ;AAC7D;AAIO,IAAM,cAAN,MAAkB;AAAA,EACN;AAAA,EAEjB,YAAY,SAA8B;AACxC,UAAM,OAAO,SAAS,QAAQ;AAC9B,UAAM,OAAO,SAAS,QAAQ,eAAe;AAC7C,SAAK,UAAU,UAAU,IAAI,IAAI,IAAI;AAAA,EACvC;AAAA,EAEA,MAAc,MAAM,WAAmB,OAAmC;AACxE,UAAM,KAAK,SAAS,OAAO,UAAU,mBAAmB,KAAK,UAAU,KAAK,CAAC,CAAC,KAAK;AACnF,UAAM,MAAM,GAAG,KAAK,OAAO,IAAI,SAAS,GAAG,EAAE;AAC7C,UAAM,MAAM,MAAM,QAAQ,GAAG;AAC7B,WAAO,kBAAkB,KAAK,SAAS;AAAA,EACzC;AAAA,EAEA,MAAc,OAAO,WAAmB,OAAkC;AACxE,UAAM,MAAM,GAAG,KAAK,OAAO,IAAI,SAAS;AACxC,UAAM,MAAM,MAAM,SAAS,KAAK,KAAK,UAAU,KAAK,CAAC;AACrD,WAAO,kBAAkB,KAAK,SAAS;AAAA,EACzC;AAAA;AAAA,EAIA,MAAM,YAAmC;AACvC,UAAM,OAAQ,MAAM,KAAK,MAAM,WAAW;AAC1C,WAAO;AAAA,MACL,YAAa,KAAK,aAA2B,CAAC,GAAG;AAAA,QAC/C,CAAC,MACE,OAAO,MAAM,YAAY,MAAM,OAAQ,EAA8B,OAAO;AAAA,MACjF;AAAA,MACA,YAAa,KAAK,aAA2B,CAAC,GAAG,IAAI,CAAC,MAAM;AAC1D,cAAM,IAAI;AACV,eAAO;AAAA,UACL,UAAU,EAAE;AAAA,UACZ,MAAM,EAAE;AAAA,UACR,IAAI,EAAE;AAAA,UACN,cAAe,EAAE,gBAA2B;AAAA,QAC9C;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,MAAM,cAAc,OAAsD;AACxE,kBAAc,MAAM,KAAK,KAAK;AAC9B,UAAM,OAAQ,MAAM,KAAK,MAAM,iBAAiB,EAAE,KAAK,MAAM,IAAI,CAAC;AAClE,WAAO;AAAA,MACL,MAAM,gBAAgB,KAAK,IAAsC;AAAA,MACjE,WAAY,KAAK,YAA0C,CAAC,GACzD,IAAI,aAAa,EACjB,OAAO,OAAO;AAAA,MACjB,UAAW,KAAK,WAAyC,CAAC,GACvD,IAAI,aAAa,EACjB,OAAO,OAAO;AAAA,IACnB;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,OAA+C;AAC5D,UAAM,QAAQ,SAAS,MAAM,OAAO,GAAG,KAAK,EAAE;AAC9C,oBAAgB,MAAM,OAAO;AAC7B,UAAM,OAAQ,MAAM,KAAK,MAAM,YAAY;AAAA,MACzC,MAAM,MAAM;AAAA,MACZ;AAAA,MACA,YAAY,MAAM;AAAA,MAClB,QAAQ,MAAM;AAAA,MACd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IACf,CAAC;AACD,WAAO;AAAA,MACL,QAAS,KAAK,SAAuC,CAAC,GACnD,IAAI,eAAe,EACnB,OAAO,OAAO;AAAA,MACjB,SAAU,KAAK,WAAuB;AAAA,MACtC,YAAY,KAAK;AAAA,IACnB;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,OAA+C;AAC5D,UAAM,YACJ,MAAM,SACN,MAAM,QACN,MAAM,WACN,MAAM,SACN,MAAM,QACL,MAAM,SAAS,MAAM,MAAM,SAAS;AACvC,QAAI,CAAC,WAAW;AACd,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,MACF;AAAA,IACF;AACA,UAAM,QAAQ,SAAS,MAAM,OAAO,GAAG,KAAK,EAAE;AAC9C,oBAAgB,MAAM,OAAO;AAC7B,UAAM,OAAQ,MAAM,KAAK,MAAM,YAAY;AAAA,MACzC,OAAO,MAAM;AAAA,MACb,MAAM,MAAM;AAAA,MACZ,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,MACb,MAAM,MAAM;AAAA,MACZ;AAAA,MACA,YAAY,MAAM;AAAA,MAClB,QAAQ,MAAM;AAAA,MACd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IACf,CAAC;AACD,WAAO;AAAA,MACL,QAAS,KAAK,SAAuC,CAAC,GACnD,IAAI,aAAa,EACjB,OAAO,OAAO;AAAA,MACjB,SAAU,KAAK,WAAuB;AAAA,MACtC,YAAY,KAAK;AAAA,IACnB;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,OAA+C;AAC5D,kBAAc,MAAM,UAAU,UAAU;AACxC,QAAI,CAAC,MAAM,QAAQ,MAAM,KAAK,WAAW,GAAG;AAC1C,YAAM,IAAI,iBAAiB,sCAAsC,kBAAkB;AAAA,IACrF;AACA,aAAS,IAAI,GAAG,IAAI,MAAM,KAAK,QAAQ,KAAK;AAC1C,YAAM,MAAM,MAAM,KAAK,CAAC;AACxB,oBAAc,IAAI,SAAS,QAAQ,CAAC,WAAW;AAC/C,UAAI,IAAI,aAAa,QAAQ,IAAI,cAAc,aAAa,IAAI,cAAc,WAAW;AACvF,cAAM,IAAI;AAAA,UACR,QAAQ,CAAC;AAAA,UACT;AAAA,QACF;AAAA,MACF;AACA,UAAI,IAAI,SAAS,SAAS,CAAC,OAAO,UAAU,IAAI,KAAK,KAAK,IAAI,QAAQ,IAAI;AACxE,cAAM,IAAI;AAAA,UACR,QAAQ,CAAC;AAAA,UACT;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,QAAI,MAAM,YAAY,SAAS,CAAC,OAAO,UAAU,MAAM,QAAQ,KAAK,MAAM,WAAW,IAAI;AACvF,YAAM,IAAI,iBAAiB,uCAAuC,kBAAkB;AAAA,IACtF;AACA,QACE,MAAM,eAAe,SACpB,CAAC,OAAO,UAAU,MAAM,WAAW,KAAK,MAAM,cAAc,IAC7D;AACA,YAAM,IAAI,iBAAiB,0CAA0C,kBAAkB;AAAA,IACzF;AAEA,UAAM,OAAQ,MAAM,KAAK,OAAO,YAAY,KAAK;AACjD,WAAO;AAAA,MACL,OAAQ,KAAK,QAAsC,CAAC,GAAG;AAAA,QACrD,CAAC,OAA0B;AAAA,UACzB,UAAU,EAAE;AAAA,UACZ,WAAW,EAAE;AAAA,UACb,OAAO,EAAE;AAAA,UACT,YAAa,EAAE,SAAuB,CAAC,GAAG;AAAA,UAC1C,QAAS,EAAE,SAAuC,CAAC,GAChD,IAAI,aAAa,EACjB,OAAO,OAAO;AAAA,UACjB,WAAY,EAAE,aAAyB;AAAA,QACzC;AAAA,MACF;AAAA,MACA,YAAa,KAAK,cAAyB;AAAA,MAC3C,WAAY,KAAK,aAAyB;AAAA,IAC5C;AAAA,EACF;AAAA,EAEA,MAAM,OAAO,OAA2C;AACtD,kBAAc,MAAM,GAAG,GAAG;AAC1B,UAAM,QAAQ,SAAS,MAAM,OAAO,GAAG,IAAI,EAAE;AAC7C,UAAM,OAAQ,MAAM,KAAK,MAAM,UAAU,EAAE,GAAG,MAAM,GAAG,MAAM,CAAC;AAC9D,WAAO;AAAA,MACL,UAAW,KAAK,WAAyC,CAAC,GACvD,IAAI,CAAC,MAAM;AACV,cAAM,OAAO,gBAAgB,CAAC;AAC9B,YAAI,CAAC,KAAM,QAAO;AAClB,eAAO;AAAA,UACL,GAAG;AAAA,UACH,WAAY,EAAE,cAAyB;AAAA,QACzC;AAAA,MACF,CAAC,EACA,OAAO,OAAO;AAAA,IACnB;AAAA,EACF;AACF;;;AGjTA,IAAAE,oBAA2B;AAKpB,SAAS,gBACd,OACA,KACA,MACa;AACb,QAAM,MAAM,6BAAW,gBAAgB;AACvC,SAAO;AAAA,IACL;AAAA,IACA,MAAM;AAAA,IACN,SAAS;AAAA,IACT,OAAO;AAAA,IACP,MAAM;AAAA,IACN;AAAA,IACA,WAAW;AAAA,IACX,WAAW;AAAA,EACb;AACF;AAEO,SAAS,gBACd,OACA,MACA,SACA,OACA,MACA,MACa;AACb,QAAM,MAAM,6BAAW,gBAAgB;AACvC,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,WAAW;AAAA,IACX,WAAW;AAAA,EACb;AACF;;;ACQO,SAAS,kBAAkB,OAAsC;AACtE,MAAI,UAAU,GAAI,QAAO,CAAC;AAC1B,QAAM,QAAQ,MAAM,MAAM,GAAG;AAC7B,MAAI,MAAM,SAAS,MAAM,GAAG;AAC1B,UAAM,IAAI;AAAA,MACR,sCAAsC,KAAK;AAAA,IAE7C;AAAA,EACF;AACA,QAAM,MAA6B,CAAC;AACpC,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,GAAG;AACxC,UAAM,MAAM,MAAM,CAAC;AACnB,UAAM,OAAO,MAAM,IAAI,CAAC;AACxB,QAAI,CAAC,OAAO,CAAC,MAAM;AACjB,YAAM,IAAI;AAAA,QACR,sCAAsC,KAAK,2CAA2C,CAAC;AAAA,MACzF;AAAA,IACF;AACA,QAAI,KAAK,EAAE,KAAK,KAAK,CAAC;AAAA,EACxB;AACA,SAAO;AACT;AAsBO,SAAS,qBAAqB,cAAsB,KAA4B;AACrF,MAAI,CAAC,IAAK,QAAO;AACjB,MAAI,iBAAiB,GAAI,QAAO;AAChC,QAAM,QAAQ,aAAa,MAAM,GAAG;AAEpC,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,GAAG;AACxC,QAAI,MAAM,CAAC,MAAM,KAAK;AACpB,aAAO,MAAM,IAAI,KAAK,MAAM,MAAM,GAAG,CAAC,EAAE,KAAK,GAAG;AAAA,IAClD;AAAA,EACF;AACA,SAAO;AACT;AAKO,SAAS,mBAAmB,cAAsB,KAAsB;AAC7E,SAAO,qBAAqB,cAAc,GAAG,MAAM;AACrD;AASO,SAAS,mBAAmB,aAAqB,KAAa,MAAsB;AACzF,MAAI,CAAC,OAAO,IAAI,SAAS,GAAG,GAAG;AAC7B,UAAM,IAAI;AAAA,MACR,4EAA4E,GAAG;AAAA,IACjF;AAAA,EACF;AACA,MAAI,CAAC,QAAQ,KAAK,SAAS,GAAG,GAAG;AAC/B,UAAM,IAAI;AAAA,MACR,6EAA6E,IAAI;AAAA,IACnF;AAAA,EACF;AACA,SAAO,cAAc,GAAG,WAAW,IAAI,GAAG,IAAI,IAAI,KAAK,GAAG,GAAG,IAAI,IAAI;AACvE;;;AhClDA;;;AiCpEA,IAAM,gBAAgB;AACtB,IAAM,oBAAoB;AAC1B,IAAM,sBAAsB;AAG5B,IAAI,oBAAoB;AAGxB,SAAS,cAAc,QAA4C;AACjE,SAAO,cAAc,UAAU,OAAQ,OAAuB,aAAa;AAC7E;AAEA,IAAM,YAAN,MAAgB;AAAA,EAId,YAA6B,OAAe;AAAf;AAAA,EAAgB;AAAA,EAHrC,QAA2B,CAAC;AAAA,EAC5B,SAAS;AAAA,EAIjB,MAAM,UAAyB;AAC7B,QAAI,KAAK,SAAS,KAAK,OAAO;AAC5B,WAAK;AACL;AAAA,IACF;AACA,WAAO,IAAI,QAAc,CAACC,aAAY;AACpC,WAAK,MAAM,KAAKA,QAAO;AAAA,IACzB,CAAC;AAAA,EACH;AAAA,EAEA,UAAgB;AACd,SAAK;AACL,UAAM,OAAO,KAAK,MAAM,MAAM;AAC9B,QAAI,MAAM;AACR,WAAK;AACL,WAAK;AAAA,IACP;AAAA,EACF;AACF;AAEA,IAAM,uBAAN,MAAuD;AAAA,EAGrD,YACmB,QACA,UACA,UACjB;AAHiB;AACA;AACA;AAAA,EAChB;AAAA,EANc,OAAwB,CAAC;AAAA,EAQ1C,OAAO,SAAiB,SAA4D;AAClF,SAAK,KAAK,KAAK,EAAE,SAAS,GAAG,QAAQ,CAAC;AACtC,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,IAAI,SAAsD;AAC9D,QAAI,KAAK,KAAK,WAAW,GAAG;AAC1B,YAAM,IAAI,eAAe,8CAA8C;AAAA,IACzE;AAEA,UAAM,WAAW,SAAS,YAAY;AACtC,UAAM,cAAc,SAAS,eAAe;AAC5C,UAAM,sBAAsB,SAAS,uBAAuB;AAC5D,UAAM,YAAY,IAAI,UAAU,WAAW;AAE3C,QAAI,aAAa;AACjB,QAAI,YAAY;AAIhB,QAAI,UAAuD;AAAA,MACzD,EAAE,KAAK,KAAK,UAAU,QAAQ,KAAK,OAAO;AAAA,IAC5C;AACA,UAAM,aAA0B,CAAC;AAEjC,aAAS,QAAQ,GAAG,QAAQ,KAAK,KAAK,QAAQ,SAAS;AACrD,YAAM,MAAM,KAAK,KAAK,KAAK;AAE3B,UAAI,QAAQ,WAAW,GAAG;AACxB,mBAAW,KAAK;AAAA,UACd,SAAS,IAAI;AAAA,UACb;AAAA,UACA,OAAO,CAAC;AAAA,UACR,aAAa;AAAA,UACb,WAAW;AAAA,QACb,CAAC;AACD;AAAA,MACF;AAEA,YAAM,WAAoE,CAAC;AAC3E,YAAM,cAAc,QAAQ;AAC5B,UAAI,eAAe;AAKnB,YAAM,sBAAsB,KAAK,mBAAmB,GAAG;AACvD,YAAM,YAAY,IAAI,aAAa;AACnC,YAAM,eAAe,cAAc,aAAa,CAAC,CAAC;AAElD,YAAM,QAAQ,QAAQ,IAAI,CAAC,EAAE,KAAK,QAAQ,aAAa,MAAM,YAAY;AACvE,YAAI,cAAc,UAAU;AAC1B,yBAAe;AACf;AAAA,QACF;AAEA,cAAM,UAAU,QAAQ;AACxB,YAAI;AACF,cAAI,cAAc,UAAU;AAC1B,2BAAe;AACf;AAAA,UACF;AAEA;AAEA,gBAAM,SAA0B,EAAE,SAAS,IAAI,QAAQ;AAEvD,cAAI,cAAc,WAAW;AAC3B,mBAAO,OAAO;AACd,gBAAI,IAAI,MAAO,QAAO,QAAQ,IAAI;AAAA,UACpC,OAAO;AACL,mBAAO,OAAO;AACd,gBAAI,IAAI,MAAO,QAAO,QAAQ,IAAI;AAAA,UACpC;AAEA,cAAI,cAAc,aAAa,IAAI,OAAO;AACxC,mBAAO,QAAQ,IAAI;AAAA,UACrB;AACA,cAAI,cAAc,aAAa,IAAI,OAAO;AACxC,mBAAO,QAAQ,IAAI;AAAA,UACrB;AAEA,cAAI,IAAI,QAAS,QAAO,UAAU,IAAI;AAEtC,gBAAM,QAAQ,IAAI,SAAS;AAC3B,cAAI,IAAI,QAAQ;AACd,mBAAO,QAAQ;AAAA,UACjB,OAAO;AACL,mBAAO,QAAQ;AAAA,UACjB;AAOA,cAAI;AACJ,cAAI;AACJ,cAAI,cAAc;AAChB,gBAAI,cAAc,KAAK,MAAM,GAAG;AAC9B,0BAAY,KAAK,OAAO,SAAS,KAAK,mBAAoB;AAC1D,2BAAa;AAAA,YACf,OAAO;AACL,0BAAY;AACZ,2BAAa;AACb,kBAAI,CAAC,mBAAmB;AACtB,oCAAoB;AACpB,wBAAQ;AAAA,kBACN,8BAA8B,IAAI,OAAO,sBAAsB,mBAAmB;AAAA,gBAGpF;AAAA,cACF;AAAA,YACF;AAAA,UACF,OAAO;AAEL,wBAAY;AACZ,yBAAa;AAAA,UACf;AAEA,cAAIC,SAAQ,MAAM,UAAU,UAAU,MAAM;AAE5C,cAAI,IAAI,QAAQ;AACd,YAAAA,SAAQA,OAAM,OAAO,IAAI,MAAM;AAC/B,YAAAA,SAAQA,OAAM,MAAM,GAAG,KAAK;AAAA,UAC9B;AAEA,qBAAW,QAAQA,QAAO;AACxB,qBAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,CAAC;AAAA,UAC5C;AAAA,QACF,UAAE;AACA,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF,CAAC;AAED,YAAM,QAAQ,IAAI,MAAM,IAAI,CAAC,SAAS,KAAK,CAAC,CAAC;AAE7C,YAAM,QAAQ,SAAS,IAAI,CAAC,MAAM,EAAE,IAAI;AAExC,iBAAW,KAAK;AAAA,QACd,SAAS,IAAI;AAAA,QACb;AAAA,QACA,OAAO,sBAAsB,CAAC,GAAG,KAAK,IAAI;AAAA,QAC1C;AAAA,QACA,WAAW;AAAA,MACb,CAAC;AAED,UAAI,cAAc;AAChB,oBAAY;AAAA,MACd;AAIA,YAAM,OAAO,oBAAI,IAAyB;AAC1C,iBAAW,EAAE,MAAM,QAAQ,WAAW,KAAK,UAAU;AACnD,cAAM,UAAU,cAAc,YAAY,KAAK,OAAO,KAAK;AAC3D,YAAI,CAAC,KAAK,IAAI,OAAO,GAAG;AACtB,eAAK,IAAI,SAAS,UAAU;AAAA,QAC9B;AAAA,MACF;AACA,gBAAU,CAAC,GAAG,KAAK,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC,KAAK,MAAM,OAAO,EAAE,KAAK,OAAO,EAAE;AAAA,IACxE;AAEA,UAAM,UAAU,WAAW,WAAW,SAAS,CAAC;AAEhD,WAAO;AAAA,MACL,OAAO,QAAQ;AAAA,MACf,MAAM;AAAA,MACN;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,mBAAmB,KAAwC;AACjE,QAAI,IAAI,YAAa,QAAO,IAAI;AAEhC,QAAI,KAAK,UAAU;AACjB,YAAM,UAAU,KAAK,SAAS,gBAAgB,IAAI,OAAO;AAEzD,iBAAW,SAAS,SAAS;AAC3B,YAAI,MAAM,YAAa,QAAO,MAAM;AAAA,MACtC;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;AAkBO,SAAS,gBACd,QACA,UACA,UACkB;AAClB,SAAO,IAAI,qBAAqB,QAAQ,UAAU,QAAQ;AAC5D;;;ACtLA,SAAS,gBAAgB,GAAmB;AAC1C,SAAO,EACJ,YAAY,EACZ,QAAQ,cAAc,GAAG,EACzB,QAAQ,OAAO,GAAG,EAClB,QAAQ,UAAU,EAAE;AACzB;AAYA,SAAS,oBAAsD;AAC7D,QAAM,IAAI;AACV,MAAI,EAAE,kBAAkB,OAAO,EAAE,eAAe,WAAW,YAAY;AACrE,WAAO,EAAE;AAAA,EACX;AACA,SAAO;AACT;AAOA,SAAS,cAAc,WAA+B,SAAqC;AACzF,QAAM,IAAI;AACV,MAAI,CAAC,EAAE,YAAa,QAAO;AAE3B,QAAM,UAAU,cAAe,UAAqD;AAAA,IAClF,oBAAoB;AAClB,UAAI;AACF,cAAM,oBAAoB;AAAA,MAC5B,SAAS,KAAK;AACZ,gBAAQ,KAAK,gBAAgB,OAAO,8BAA8B,GAAG;AACrE,aAAK,WAAW,GAAG;AAAA,MACrB;AAAA,IACF;AAAA,IAEA,uBAAuB;AACrB,UAAI;AACF,cAAM,uBAAuB;AAAA,MAC/B,SAAS,KAAK;AACZ,gBAAQ,KAAK,gBAAgB,OAAO,iCAAiC,GAAG;AAAA,MAC1E;AAAA,IACF;AAAA,IAEA,IAAI,KAAK,GAA4B;AACnC,UAAI;AACF,cAAM,OAAO;AAAA,MACf,SAAS,KAAK;AACZ,gBAAQ,KAAK,gBAAgB,OAAO,wBAAwB,GAAG;AAC/D,aAAK,WAAW,GAAG;AAAA,MACrB;AAAA,IACF;AAAA,IAEA,IAAI,OAAgC;AAClC,UAAI;AACF,eAAO,MAAM;AAAA,MACf,QAAQ;AACN,eAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,IAEA,WAAW,KAAc;AACvB,UAAI;AACF,aAAK,YACH,kGACqB,OAAO,SAAS,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACzF,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAGA,EAAC,QAA0C,WAAW,UAAU;AAChE,EAAC,QAA0C,cAAc,UAAU;AAEnE,SAAO;AACT;AAaO,SAAS,YAAY,OAAwC;AAClE,QAAM,QAAwC,CAAC;AAC/C,QAAM,QAAwC,CAAC;AAC/C,QAAM,WAAW,kBAAkB;AAGnC,aAAW,CAAC,YAAY,MAAM,KAAK,OAAO,QAAQ,MAAM,SAAS,CAAC,CAAC,GAAG;AACpE,UAAM,YAAwB,CAAC;AAC/B,eAAW,aAAa,OAAO,OAAO;AACpC,YAAM,UAAU,MAAM,gBAAgB,UAAU,CAAC,IAAI,gBAAgB,UAAU,QAAQ,CAAC;AACxF,gBAAU,KAAK;AAAA,QACb;AAAA,QACA,UAAU,UAAU;AAAA,QACpB,aAAa,UAAU;AAAA,MACzB,CAAC;AACD,UAAI,YAAY,CAAC,SAAS,IAAI,OAAO,GAAG;AACtC,iBAAS,OAAO,SAAS,cAAc,WAAW,OAAO,CAAC;AAAA,MAC5D;AAAA,IACF;AACA,UAAM,UAAU,IAAI;AAAA,MAClB,OAAO;AAAA,MACP,YAAY,OAAO;AAAA,IACrB;AAAA,EACF;AAGA,aAAW,CAAC,SAAS,MAAM,KAAK,OAAO,QAAQ,MAAM,SAAS,CAAC,CAAC,GAAG;AACjE,UAAM,YAAwB,CAAC;AAC/B,eAAW,aAAa,OAAO,OAAO;AACpC,YAAM,UAAU,WAAW,gBAAgB,OAAO,CAAC,IAAI,gBAAgB,UAAU,QAAQ,CAAC;AAC1F,gBAAU,KAAK;AAAA,QACb;AAAA,QACA,UAAU,UAAU;AAAA,QACpB,aAAa,UAAU;AAAA,MACzB,CAAC;AACD,UAAI,YAAY,CAAC,SAAS,IAAI,OAAO,GAAG;AACtC,iBAAS,OAAO,SAAS,cAAc,WAAW,OAAO,CAAC;AAAA,MAC5D;AAAA,IACF;AACA,UAAM,OAAO,IAAI;AAAA,MACf,OAAO;AAAA,MACP,YAAY,OAAO;AAAA,IACrB;AAAA,EACF;AAEA,SAAO,EAAE,OAAO,MAAM;AACxB;","names":["import_node_crypto","import_node_crypto","resolve","serializeFirestoreTypes","deserializeFirestoreTypes","buildWritableNodeRecord","buildWritableEdgeRecord","buildWritableNodeRecord","buildWritableEdgeRecord","import_meta","import_firestore","resolve","import_node_fs","import_node_path","resolve","http","import_firestore","resolve","edges"]}
|
|
1
|
+
{"version":3,"sources":["../src/internal/serialization-tag.ts","../src/serialization.ts","../src/index.ts","../src/docid.ts","../src/internal/constants.ts","../src/internal/write-plan.ts","../src/batch.ts","../src/dynamic-registry.ts","../src/errors.ts","../src/json-schema.ts","../src/migration.ts","../src/scope.ts","../src/registry.ts","../src/sandbox.ts","../src/query.ts","../src/query-safety.ts","../src/transaction.ts","../src/client.ts","../src/codegen/index.ts","../src/config.ts","../src/cross-graph.ts","../src/default-indexes.ts","../src/discover.ts","../src/id.ts","../src/indexes.ts","../src/query-client/client.ts","../src/query-client/config.ts","../src/query-client/shaping.ts","../src/scope-path.ts","../src/internal/firestore-traverse-compiler.ts","../src/traverse.ts","../src/views.ts"],"sourcesContent":["/**\n * Firegraph serialization tag — split from `src/serialization.ts` so it can\n * be imported from Workers-facing code without dragging in\n * `@google-cloud/firestore`.\n *\n * The full serialization module (with Timestamp/GeoPoint round-tripping)\n * lives one folder up because the sandbox migration pipeline needs it; the\n * write-plan helper only needs to recognise tagged objects to keep them\n * terminal during patch flattening, so it imports just the tag from here.\n */\n\n/** Sentinel key used to tag serialized Firestore types. */\nexport const SERIALIZATION_TAG = '__firegraph_ser__' as const;\n\nconst KNOWN_TYPES = new Set(['Timestamp', 'GeoPoint', 'VectorValue', 'DocumentReference']);\n\n/** Check if a value is a tagged serialized Firestore type. */\nexport function isTaggedValue(value: unknown): boolean {\n if (value === null || typeof value !== 'object') return false;\n const tag = (value as Record<string, unknown>)[SERIALIZATION_TAG];\n return typeof tag === 'string' && KNOWN_TYPES.has(tag);\n}\n","/**\n * Firestore-aware serialization for the sandbox migration pipeline.\n *\n * Firestore documents can contain special types (Timestamp, GeoPoint,\n * VectorValue, DocumentReference) that don't survive plain JSON\n * round-tripping. This module provides tagged serialization: Firestore\n * types are wrapped in tagged plain objects before JSON marshaling and\n * reconstructed after.\n *\n * Only used by the `defaultExecutor` sandbox path. Static migrations\n * (in-memory functions) receive raw Firestore objects directly.\n */\n\nimport type { DocumentReference, Firestore } from '@google-cloud/firestore';\nimport { FieldValue, GeoPoint, Timestamp } from '@google-cloud/firestore';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n// SERIALIZATION_TAG and isTaggedValue live in `internal/serialization-tag.ts`\n// so Workers-facing code (e.g. `src/internal/write-plan.ts` and the\n// `firegraph/cloudflare` bundle) can recognise tagged values without\n// pulling in `@google-cloud/firestore`. Re-exported here so callers that\n// already import from `src/serialization.ts` keep working.\nexport { isTaggedValue, SERIALIZATION_TAG } from './internal/serialization-tag.js';\nimport { isTaggedValue, SERIALIZATION_TAG } from './internal/serialization-tag.js';\n\n// One-time warning for DocumentReference deserialization without db\nlet _docRefWarned = false;\n\n// ---------------------------------------------------------------------------\n// Detection helpers\n// ---------------------------------------------------------------------------\n\nfunction isTimestamp(value: unknown): value is Timestamp {\n return value instanceof Timestamp;\n}\n\nfunction isGeoPoint(value: unknown): value is GeoPoint {\n return value instanceof GeoPoint;\n}\n\nfunction isDocumentReference(value: unknown): value is DocumentReference {\n // Duck-type check: DocumentReference has path (string) and firestore properties\n if (value === null || typeof value !== 'object') return false;\n const v = value as Record<string, unknown>;\n return (\n typeof v.path === 'string' &&\n v.firestore !== undefined &&\n typeof v.id === 'string' &&\n v.constructor?.name === 'DocumentReference'\n );\n}\n\nfunction isVectorValue(value: unknown): boolean {\n if (value === null || typeof value !== 'object') return false;\n const v = value as Record<string, unknown>;\n return (\n v.constructor?.name === 'VectorValue' && Array.isArray((v as Record<string, unknown>)._values)\n );\n}\n\n// ---------------------------------------------------------------------------\n// Serialize\n// ---------------------------------------------------------------------------\n\n/**\n * Recursively walk a data object and replace Firestore types with tagged\n * plain objects suitable for JSON serialization.\n *\n * Returns a new object tree — the input is never mutated.\n */\nexport function serializeFirestoreTypes(data: Record<string, unknown>): Record<string, unknown> {\n return serializeValue(data) as Record<string, unknown>;\n}\n\nfunction serializeValue(value: unknown): unknown {\n // Primitives\n if (value === null || value === undefined) return value;\n if (typeof value !== 'object') return value;\n\n // Firestore types (check before generic object/array)\n if (isTimestamp(value)) {\n return {\n [SERIALIZATION_TAG]: 'Timestamp',\n seconds: value.seconds,\n nanoseconds: value.nanoseconds,\n };\n }\n if (isGeoPoint(value)) {\n return {\n [SERIALIZATION_TAG]: 'GeoPoint',\n latitude: value.latitude,\n longitude: value.longitude,\n };\n }\n if (isDocumentReference(value)) {\n return { [SERIALIZATION_TAG]: 'DocumentReference', path: (value as DocumentReference).path };\n }\n if (isVectorValue(value)) {\n // Prefer toArray() (public API) over _values (private internal property)\n const v = value as Record<string, unknown>;\n const values =\n typeof v.toArray === 'function' ? (v.toArray as () => number[])() : (v._values as number[]);\n return { [SERIALIZATION_TAG]: 'VectorValue', values: [...values] };\n }\n\n // Arrays\n if (Array.isArray(value)) {\n return value.map(serializeValue);\n }\n\n // Plain objects — recurse\n const result: Record<string, unknown> = {};\n for (const key of Object.keys(value as Record<string, unknown>)) {\n result[key] = serializeValue((value as Record<string, unknown>)[key]);\n }\n return result;\n}\n\n// ---------------------------------------------------------------------------\n// Deserialize\n// ---------------------------------------------------------------------------\n\n/**\n * Recursively walk a data object and reconstruct Firestore types from\n * tagged plain objects.\n *\n * @param data - The data to deserialize (typically from JSON.parse)\n * @param db - Optional Firestore instance for DocumentReference reconstruction.\n * If not provided, tagged DocumentReferences are left as-is with a one-time warning.\n *\n * Returns a new object tree — the input is never mutated.\n */\nexport function deserializeFirestoreTypes(\n data: Record<string, unknown>,\n db?: Firestore,\n): Record<string, unknown> {\n return deserializeValue(data, db) as Record<string, unknown>;\n}\n\nfunction deserializeValue(value: unknown, db?: Firestore): unknown {\n if (value === null || value === undefined) return value;\n if (typeof value !== 'object') return value;\n\n // Short-circuit for values that are already real Firestore types.\n // This makes deserializeFirestoreTypes idempotent — safe to call on data\n // that has already been deserialized (e.g., write-back after defaultExecutor\n // already reconstructed types, or static migrations that return raw types).\n if (\n isTimestamp(value) ||\n isGeoPoint(value) ||\n isDocumentReference(value) ||\n isVectorValue(value)\n ) {\n return value;\n }\n\n // Arrays\n if (Array.isArray(value)) {\n return value.map((v) => deserializeValue(v, db));\n }\n\n const obj = value as Record<string, unknown>;\n\n // Check for tagged Firestore type\n if (isTaggedValue(obj)) {\n const tag = obj[SERIALIZATION_TAG] as string;\n\n switch (tag) {\n case 'Timestamp':\n // Validate expected fields before reconstruction\n if (typeof obj.seconds !== 'number' || typeof obj.nanoseconds !== 'number') return obj;\n return new Timestamp(obj.seconds, obj.nanoseconds);\n\n case 'GeoPoint':\n if (typeof obj.latitude !== 'number' || typeof obj.longitude !== 'number') return obj;\n return new GeoPoint(obj.latitude, obj.longitude);\n\n case 'VectorValue':\n if (!Array.isArray(obj.values)) return obj;\n return FieldValue.vector(obj.values as number[]);\n\n case 'DocumentReference':\n if (typeof obj.path !== 'string') return obj;\n if (db) {\n return db.doc(obj.path);\n }\n // No db available — leave as tagged object with one-time warning\n if (!_docRefWarned) {\n _docRefWarned = true;\n console.warn(\n '[firegraph] DocumentReference encountered during migration deserialization ' +\n 'but no Firestore instance available. The reference will remain as a tagged ' +\n 'object with its path. Enable write-back for full reconstruction.',\n );\n }\n return obj;\n\n default:\n // Unknown tag — leave as-is (forward compatibility)\n return obj;\n }\n }\n\n // Plain object — recurse\n const result: Record<string, unknown> = {};\n for (const key of Object.keys(obj)) {\n result[key] = deserializeValue(obj[key], db);\n }\n return result;\n}\n","export { createGraphClient, createGraphClientFromBackend } from './client.js';\nexport type { CodegenOptions } from './codegen/index.js';\nexport { generateTypes } from './codegen/index.js';\nexport type {\n FiregraphConfig,\n ViewContext,\n ViewDefaultsConfig,\n ViewResolverConfig,\n} from './config.js';\nexport { defineConfig, resolveView } from './config.js';\nexport { isAncestorUid, resolveAncestorCollection } from './cross-graph.js';\nexport { DEFAULT_CORE_INDEXES } from './default-indexes.js';\nexport type { DiscoverResult, DiscoveryWarning } from './discover.js';\nexport { discoverEntities } from './discover.js';\nexport { DiscoveryError } from './discover.js';\nexport { computeEdgeDocId, computeNodeDocId } from './docid.js';\nexport {\n BOOTSTRAP_ENTRIES,\n createBootstrapRegistry,\n createRegistryFromGraph,\n EDGE_TYPE_SCHEMA,\n generateDeterministicUid,\n META_EDGE_TYPE,\n META_NODE_TYPE,\n NODE_TYPE_SCHEMA,\n} from './dynamic-registry.js';\nexport {\n CapabilityNotSupportedError,\n CrossBackendTransactionError,\n DynamicRegistryError,\n EdgeNotFoundError,\n FiregraphError,\n InvalidQueryError,\n MigrationError,\n NodeNotFoundError,\n QuerySafetyError,\n RegistryScopeError,\n RegistryViolationError,\n TraversalError,\n ValidationError,\n} from './errors.js';\nexport { generateId } from './id.js';\nexport type {\n FirestoreIndex,\n FirestoreIndexConfig,\n FirestoreIndexField,\n GenerateIndexOptions,\n} from './indexes.js';\nexport { generateIndexConfig } from './indexes.js';\nexport { DEFAULT_QUERY_LIMIT } from './internal/constants.js';\nexport { deleteField } from './internal/write-plan.js';\nexport type { FieldMeta } from './json-schema.js';\nexport { compileSchema, jsonSchemaToFieldMeta } from './json-schema.js';\nexport type { MigrationResult } from './migration.js';\nexport {\n applyMigrationChain,\n migrateRecord,\n migrateRecords,\n validateMigrationChain,\n} from './migration.js';\nexport { buildEdgeQueryPlan, buildNodeQueryPlan } from './query.js';\nexport type { QueryClientErrorCode, QueryClientOptions } from './query-client/index.js';\nexport { QueryClient, QueryClientError } from './query-client/index.js';\nexport type { QuerySafetyResult } from './query-safety.js';\nexport { analyzeQuerySafety } from './query-safety.js';\nexport { createMergedRegistry, createRegistry } from './registry.js';\nexport {\n compileMigrationFn,\n compileMigrations,\n defaultExecutor,\n destroySandboxWorker,\n precompileSource,\n} from './sandbox.js';\nexport { matchScope, matchScopeAny } from './scope.js';\nexport type { StorageScopeSegment } from './scope-path.js';\nexport {\n appendStorageScope,\n isAncestorScopeUid,\n parseStorageScope,\n resolveAncestorScope,\n} from './scope-path.js';\nexport {\n deserializeFirestoreTypes,\n isTaggedValue,\n SERIALIZATION_TAG,\n serializeFirestoreTypes,\n} from './serialization.js';\nexport { createTraversal } from './traverse.js';\nexport type {\n AggregateExtension,\n AggregateField,\n AggregateOp,\n AggregateResult,\n AggregateSpec,\n BulkBatchError,\n BulkOptions,\n BulkProgress,\n BulkResult,\n BulkUpdatePatch,\n Capability,\n CascadeResult,\n CoreGraphClient,\n DefineTypeOptions,\n DiscoveredEntity,\n DiscoveryResult,\n DistanceMeasure,\n DmlExtension,\n DynamicGraphClient,\n DynamicGraphMethods,\n DynamicRegistryConfig,\n EdgeTopology,\n EdgeTypeData,\n ExpandParams,\n ExpandResult,\n FindEdgesParams,\n FindEdgesProjectedParams,\n FindNearestParams,\n FindNodesParams,\n FullTextSearchExtension,\n GeoExtension,\n GraphBatch,\n GraphClient,\n GraphClientOptions,\n GraphReader,\n GraphRecord,\n GraphRegistry,\n GraphTransaction,\n GraphWriter,\n HopDefinition,\n HopResult,\n IndexFieldSpec,\n IndexSpec,\n JoinExtension,\n MigrationExecutor,\n MigrationFn,\n MigrationStep,\n MigrationWriteBack,\n NodeTypeData,\n ProjectedRow,\n QueryFilter,\n QueryMode,\n QueryOptions,\n QueryPlan,\n RawFirestoreExtension,\n RawSqlExtension,\n RealtimeListenExtension,\n RegistryEntry,\n ScanProtection,\n SelectExtension,\n StoredGraphRecord,\n StoredMigrationStep,\n TraversalBuilder,\n TraversalOptions,\n TraversalResult,\n VectorExtension,\n WhereClause,\n} from './types.js';\nexport type {\n EntityViewConfig,\n EntityViewMeta,\n ViewComponentClass,\n ViewMeta,\n ViewRegistry,\n ViewRegistryInput,\n} from './views.js';\nexport { defineViews } from './views.js';\n","import { createHash } from 'node:crypto';\n\nimport { SHARD_SEPARATOR } from './internal/constants.js';\n\nexport function computeNodeDocId(uid: string): string {\n return uid;\n}\n\nexport function computeEdgeDocId(aUid: string, axbType: string, bUid: string): string {\n const composite = `${aUid}${SHARD_SEPARATOR}${axbType}${SHARD_SEPARATOR}${bUid}`;\n const hash = createHash('sha256').update(composite).digest('hex');\n const shard = hash[0];\n return `${shard}${SHARD_SEPARATOR}${aUid}${SHARD_SEPARATOR}${axbType}${SHARD_SEPARATOR}${bUid}`;\n}\n","export const NODE_RELATION = 'is';\n\n/**\n * Default result limit applied to findEdges/findNodes queries\n * when no explicit limit is provided. Prevents unbounded result sets\n * that could be expensive on Enterprise Firestore.\n */\nexport const DEFAULT_QUERY_LIMIT = 500;\n\n/**\n * Fields that are part of the firegraph record structure (not user data).\n * Used by the query planner and safety analysis to distinguish builtin\n * fields from data.* fields.\n */\nexport const BUILTIN_FIELDS = new Set([\n 'aType',\n 'aUid',\n 'axbType',\n 'bType',\n 'bUid',\n 'createdAt',\n 'updatedAt',\n]);\n\nexport const SHARD_ALGORITHM = 'sha256';\nexport const SHARD_SEPARATOR = ':';\nexport const SHARD_BUCKETS = 16;\n","/**\n * Write-plan helper — flattens partial-update payloads into a list of\n * deep-path operations every backend can execute identically.\n *\n * Background: firegraph used to ship two write semantics that quietly\n * disagreed about depth.\n * - `putNode`/`putEdge` did a full document replace.\n * - `updateNode`/`updateEdge` did a one-level shallow merge: top-level\n * keys were preserved, but nested objects were replaced wholesale.\n *\n * Both behaviours dropped sibling keys silently. The 0.12 contract is that\n * `put*` and `update*` deep-merge by default (sibling keys at any depth\n * survive); `replace*` is the explicit escape hatch.\n *\n * `flattenPatch` walks a partial-update payload and emits one\n * {@link DataPathOp} per terminal value. Plain objects recurse; arrays,\n * primitives, Firestore special types, and tagged firegraph-serialization\n * objects are terminal (replaced as a unit). `undefined` values are\n * skipped; `null` is preserved as a real `null` write; the\n * {@link DELETE_FIELD} sentinel marks a field for removal.\n *\n * The output is deliberately backend-agnostic. Each backend translates ops\n * into its native dialect:\n * - Firestore: dotted field path → `data.a.b.c` for `update()`.\n * - SQLite / DO SQLite: `json_set(data, '$.a.b.c', ?)` /\n * `json_remove(data, '$.a.b.c')`.\n */\n\nimport { isTaggedValue, SERIALIZATION_TAG } from './serialization-tag.js';\n\n// ---------------------------------------------------------------------------\n// Public sentinel\n// ---------------------------------------------------------------------------\n\n/**\n * Sentinel returned by {@link deleteField}. Treated by all backends as\n * \"remove this field from the stored document\".\n *\n * Equivalent to Firestore's `FieldValue.delete()`, but works for SQLite\n * backends too. Use inside `updateNode`/`updateEdge` payloads.\n */\nexport const DELETE_FIELD: unique symbol = Symbol.for('firegraph.deleteField');\nexport type DeleteSentinel = typeof DELETE_FIELD;\n\n/**\n * Returns the firegraph delete sentinel. Place this anywhere in an\n * `updateNode`/`updateEdge` payload to remove the corresponding field.\n *\n * ```ts\n * await client.updateNode('tour', uid, {\n * attrs: { obsoleteFlag: deleteField() },\n * });\n * ```\n */\nexport function deleteField(): DeleteSentinel {\n return DELETE_FIELD;\n}\n\n/** Type guard for the delete sentinel. */\nexport function isDeleteSentinel(value: unknown): value is DeleteSentinel {\n return value === DELETE_FIELD;\n}\n\n// ---------------------------------------------------------------------------\n// Terminal-detection helpers\n// ---------------------------------------------------------------------------\n\nconst FIRESTORE_TERMINAL_CTOR = new Set([\n 'Timestamp',\n 'GeoPoint',\n 'VectorValue',\n 'DocumentReference',\n 'FieldValue',\n 'NumericIncrementTransform',\n 'ArrayUnionTransform',\n 'ArrayRemoveTransform',\n 'ServerTimestampTransform',\n 'DeleteTransform',\n]);\n\n/**\n * Should this value be written as a single terminal op (no recursion)?\n *\n * Plain JS objects (constructor === Object, or no prototype) are recursed.\n * Everything else — arrays, primitives, class instances, Firestore special\n * types, tagged serialization payloads — is terminal.\n */\nexport function isTerminalValue(value: unknown): boolean {\n if (value === null) return true;\n const t = typeof value;\n if (t !== 'object') return true;\n if (Array.isArray(value)) return true;\n // Tagged serialization payloads carry the SERIALIZATION_TAG sentinel and\n // should be persisted whole — never split into per-field ops.\n if (isTaggedValue(value)) return true;\n const proto = Object.getPrototypeOf(value);\n if (proto === null || proto === Object.prototype) return false;\n // Class instances — Firestore types or anything else exotic.\n const ctor = (value as { constructor?: { name?: string } }).constructor;\n if (ctor && typeof ctor.name === 'string' && FIRESTORE_TERMINAL_CTOR.has(ctor.name)) return true;\n // Unknown class instance: treat as terminal. Recursing into a class\n // instance is almost always wrong (Map, Set, Date, Buffer...).\n return true;\n}\n\n// ---------------------------------------------------------------------------\n// Core type\n// ---------------------------------------------------------------------------\n\n/**\n * Single terminal write operation produced by {@link flattenPatch}.\n *\n * `path` is a non-empty array of plain object keys. `value` is the value to\n * write; ignored when `delete` is `true`. Arrays / primitives / Firestore\n * special types appear here as whole terminal values.\n */\nexport interface DataPathOp {\n path: readonly string[];\n value: unknown;\n delete: boolean;\n}\n\n// ---------------------------------------------------------------------------\n// Path-segment validation\n// ---------------------------------------------------------------------------\n\n/**\n * Object keys that are safe to embed in SQLite `json_set`/`json_remove`\n * paths. The SQLite backend uses an allowlist regex too — keep these in\n * sync (see `JSON_PATH_KEY_RE` in `internal/sqlite-sql.ts` and\n * `cloudflare/sql.ts`).\n *\n * Allows: ASCII letters, digits, `_`, `-`. Must start with a letter or\n * underscore. This rejects keys containing dots, brackets, quotes, or\n * non-ASCII characters that could break path parsing or be used to\n * inject into the path expression.\n */\nconst SAFE_KEY_RE = /^[A-Za-z_][A-Za-z0-9_-]*$/;\n\n/**\n * Mutual-exclusion guard for {@link UpdatePayload}. The two branches of the\n * shape — `dataOps` (deep-merge) and `replaceData` (full replace) — are\n * structurally incompatible: combining them would tell the backend to\n * simultaneously merge AND wipe, and the three backends disagree on which\n * wins. This helper centralises the runtime check so all three backends\n * trip the same error.\n *\n * Imported as a runtime check from `firestore-backend`, `sqlite-sql`, and\n * `cloudflare/sql`. Backend authors implementing the public `StorageBackend`\n * contract should call it too.\n */\nexport function assertUpdatePayloadExclusive(update: {\n dataOps?: unknown;\n replaceData?: unknown;\n}): void {\n if (update.replaceData !== undefined && update.dataOps !== undefined) {\n throw new Error(\n 'firegraph: UpdatePayload cannot specify both `replaceData` and `dataOps`. ' +\n 'Use one or the other — `replaceData` is the migration-write-back form, ' +\n '`dataOps` is the standard partial-update form.',\n );\n }\n}\n\n/**\n * Reject `DELETE_FIELD` sentinels in payloads where field deletion isn't a\n * meaningful operation: full-document replace (`replaceNode`/`replaceEdge`)\n * and the merge-default put surface (`putNode`/`putEdge`).\n *\n * Why both:\n * - In **replace**, the entire `data` field is overwritten. A delete\n * sentinel in that payload either silently disappears (Firestore drops\n * the Symbol during `.set()` serialization) or produces an empty SQLite\n * `json_remove` no-op, depending on backend. Either way the caller's\n * intent — \"remove field X\" — is lost. Use `updateNode` instead.\n * - In **put** (merge mode), behaviour diverges across backends today:\n * SQLite's flattenPatch emits a real delete op, but Firestore's\n * `.set(..., {merge: true})` silently drops the Symbol. Until that's\n * fixed end-to-end, the safest contract is to reject sentinels at the\n * entry point and steer callers to `updateNode`.\n *\n * The walk mirrors `flattenPatch`: plain objects recurse, everything else\n * is terminal. Tagged serialization payloads short-circuit so we don't\n * recurse into the `__firegraph_ser__` envelope.\n */\nexport function assertNoDeleteSentinels(data: unknown, callerLabel: string): void {\n walkForDeleteSentinels(data, [], { kind: 'root' }, ({ path }) => {\n const where = path.length === 0 ? '<root>' : path.map((p) => JSON.stringify(p)).join(' > ');\n throw new Error(\n `firegraph: ${callerLabel} payload contains a deleteField() sentinel at ${where}. ` +\n `deleteField() is only valid inside updateNode/updateEdge — full-data ` +\n `writes (put*, replace*) cannot delete individual fields. Use updateNode ` +\n `with a deleteField() value, or omit the field from the replace payload.`,\n );\n });\n}\n\ntype SentinelParent = { kind: 'root' } | { kind: 'object' } | { kind: 'array'; index: number };\n\nfunction walkForDeleteSentinels(\n node: unknown,\n path: readonly string[],\n parent: SentinelParent,\n visit: (ctx: { path: readonly string[]; parent: SentinelParent }) => void,\n): void {\n if (node === null || node === undefined) return;\n if (isDeleteSentinel(node)) {\n visit({ path, parent });\n return;\n }\n if (typeof node !== 'object') return;\n if (isTaggedValue(node)) return;\n if (Array.isArray(node)) {\n for (let i = 0; i < node.length; i++) {\n walkForDeleteSentinels(node[i], [...path, String(i)], { kind: 'array', index: i }, visit);\n }\n return;\n }\n const proto = Object.getPrototypeOf(node);\n if (proto !== null && proto !== Object.prototype) return;\n const obj = node as Record<string, unknown>;\n for (const key of Object.keys(obj)) {\n walkForDeleteSentinels(obj[key], [...path, key], { kind: 'object' }, visit);\n }\n}\n\n/** Throws if any path segment in the patch is unsafe for SQLite paths. */\nexport function assertSafePath(path: readonly string[]): void {\n for (const seg of path) {\n if (!SAFE_KEY_RE.test(seg)) {\n throw new Error(\n `firegraph: unsafe object key ${JSON.stringify(seg)} at path ${path\n .map((p) => JSON.stringify(p))\n .join(' > ')}. Keys used inside update payloads must match ` +\n `/^[A-Za-z_][A-Za-z0-9_-]*$/ so they can be embedded safely in ` +\n `SQLite JSON paths.`,\n );\n }\n }\n}\n\n// ---------------------------------------------------------------------------\n// flattenPatch\n// ---------------------------------------------------------------------------\n\n/**\n * Flatten a partial-update payload into a list of terminal {@link DataPathOp}s.\n *\n * Rules:\n * - Plain objects (no prototype or `Object.prototype`) recurse — each\n * key becomes another path segment.\n * - Arrays are terminal: writing `{tags: ['a']}` overwrites the whole\n * `tags` array. Element-wise array merging is intentionally NOT\n * supported — it's almost never what callers actually want, and\n * Firestore `arrayUnion`/`arrayRemove` give precise semantics when\n * they are.\n * - `undefined` values are skipped (no op generated). Use\n * {@link deleteField} if you actually want to remove a field.\n * - `null` is preserved verbatim — emits a terminal op with `value: null`.\n * - {@link DELETE_FIELD} produces an op with `delete: true`.\n * - Firestore special types and tagged serialization payloads are terminal.\n * - Class instances are terminal.\n *\n * Throws if any object key on the recursion path is unsafe (see\n * {@link assertSafePath}).\n */\nexport function flattenPatch(data: Record<string, unknown>): DataPathOp[] {\n const ops: DataPathOp[] = [];\n walk(data, [], ops);\n return ops;\n}\n\nfunction assertNoDeleteSentinelsInArrayValue(\n arr: readonly unknown[],\n arrayPath: readonly string[],\n): void {\n walkForDeleteSentinels(arr, arrayPath, { kind: 'root' }, ({ parent }) => {\n const arrayPathStr =\n arrayPath.length === 0 ? '<root>' : arrayPath.map((p) => JSON.stringify(p)).join(' > ');\n if (parent.kind === 'array') {\n throw new Error(\n `firegraph: deleteField() sentinel at index ${parent.index} inside an array at ` +\n `path ${arrayPathStr}. Arrays are ` +\n `terminal in update payloads (replaced as a unit), so the sentinel ` +\n `would be silently dropped by JSON serialization. To remove the ` +\n `field entirely, pass deleteField() in place of the whole array.`,\n );\n }\n throw new Error(\n `firegraph: deleteField() sentinel inside an array element at ` +\n `path ${arrayPathStr}. ` +\n `Arrays are terminal in update payloads — the sentinel would ` +\n `be silently dropped by JSON serialization.`,\n );\n });\n}\n\nfunction walk(node: unknown, path: string[], out: DataPathOp[]): void {\n // Caller guarantees the root is a plain object; this branch only\n // matters for recursion.\n if (node === undefined) return;\n if (isDeleteSentinel(node)) {\n if (path.length === 0) {\n throw new Error('firegraph: deleteField() cannot be the entire update payload.');\n }\n assertSafePath(path);\n out.push({ path: [...path], value: undefined, delete: true });\n return;\n }\n if (isTerminalValue(node)) {\n if (path.length === 0) {\n // `null` / array / primitive at the root is illegal — patches must\n // describe per-key changes.\n throw new Error(\n 'firegraph: update payload must be a plain object. Got ' +\n (node === null ? 'null' : Array.isArray(node) ? 'array' : typeof node) +\n '.',\n );\n }\n // A DELETE_FIELD sentinel embedded inside an array (which is terminal\n // and replaced as a unit) would silently disappear: JSON.stringify drops\n // Symbols, and Firestore's serializer does likewise. Reject loudly so\n // the divergence between \"user wrote a delete\" and \"field stayed put\"\n // can't happen.\n if (Array.isArray(node)) {\n assertNoDeleteSentinelsInArrayValue(node, path);\n }\n assertSafePath(path);\n out.push({ path: [...path], value: node, delete: false });\n return;\n }\n // Plain object: recurse into its own enumerable keys.\n const obj = node as Record<string, unknown>;\n const keys = Object.keys(obj);\n if (keys.length === 0) {\n // Empty object at non-root: emit terminal op so an empty object can\n // be written explicitly when the caller really wants one. Skip at\n // the root — no-op patches should produce no ops.\n if (path.length > 0) {\n assertSafePath(path);\n out.push({ path: [...path], value: {}, delete: false });\n }\n return;\n }\n for (const key of keys) {\n if (key === SERIALIZATION_TAG) {\n const where = path.length === 0 ? '<root>' : path.map((p) => JSON.stringify(p)).join(' > ');\n throw new Error(\n `firegraph: update payload contains a literal \\`${SERIALIZATION_TAG}\\` key at ` +\n `${where}. That key is reserved for firegraph's serialization envelope and ` +\n `cannot appear on a plain object in user data. Use a different field name, ` +\n `or pass a recognized tagged value through replaceNode/replaceEdge instead.`,\n );\n }\n walk(obj[key], [...path, key], out);\n }\n}\n","import { computeEdgeDocId, computeNodeDocId } from './docid.js';\nimport type { BatchBackend, WritableRecord } from './internal/backend.js';\nimport { NODE_RELATION } from './internal/constants.js';\nimport { assertNoDeleteSentinels, flattenPatch } from './internal/write-plan.js';\nimport type { GraphBatch, GraphRegistry } from './types.js';\n\nfunction buildWritableNodeRecord(\n aType: string,\n uid: string,\n data: Record<string, unknown>,\n): WritableRecord {\n return { aType, aUid: uid, axbType: NODE_RELATION, bType: aType, bUid: uid, data };\n}\n\nfunction buildWritableEdgeRecord(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n): WritableRecord {\n return { aType, aUid, axbType, bType, bUid, data };\n}\n\nexport class GraphBatchImpl implements GraphBatch {\n constructor(\n private readonly backend: BatchBackend,\n private readonly registry?: GraphRegistry,\n private readonly scopePath: string = '',\n ) {}\n\n async putNode(aType: string, uid: string, data: Record<string, unknown>): Promise<void> {\n this.writeNode(aType, uid, data, 'merge');\n }\n\n async putEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n this.writeEdge(aType, aUid, axbType, bType, bUid, data, 'merge');\n }\n\n async replaceNode(aType: string, uid: string, data: Record<string, unknown>): Promise<void> {\n this.writeNode(aType, uid, data, 'replace');\n }\n\n async replaceEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n this.writeEdge(aType, aUid, axbType, bType, bUid, data, 'replace');\n }\n\n private writeNode(\n aType: string,\n uid: string,\n data: Record<string, unknown>,\n mode: 'merge' | 'replace',\n ): void {\n assertNoDeleteSentinels(data, mode === 'replace' ? 'replaceNode' : 'putNode');\n if (this.registry) {\n this.registry.validate(aType, NODE_RELATION, aType, data, this.scopePath);\n }\n const docId = computeNodeDocId(uid);\n const record = buildWritableNodeRecord(aType, uid, data);\n if (this.registry) {\n const entry = this.registry.lookup(aType, NODE_RELATION, aType);\n if (entry?.schemaVersion && entry.schemaVersion > 0) {\n record.v = entry.schemaVersion;\n }\n }\n this.backend.setDoc(docId, record, mode);\n }\n\n private writeEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n mode: 'merge' | 'replace',\n ): void {\n assertNoDeleteSentinels(data, mode === 'replace' ? 'replaceEdge' : 'putEdge');\n if (this.registry) {\n this.registry.validate(aType, axbType, bType, data, this.scopePath);\n }\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n const record = buildWritableEdgeRecord(aType, aUid, axbType, bType, bUid, data);\n if (this.registry) {\n const entry = this.registry.lookup(aType, axbType, bType);\n if (entry?.schemaVersion && entry.schemaVersion > 0) {\n record.v = entry.schemaVersion;\n }\n }\n this.backend.setDoc(docId, record, mode);\n }\n\n async updateNode(uid: string, data: Record<string, unknown>): Promise<void> {\n const docId = computeNodeDocId(uid);\n this.backend.updateDoc(docId, { dataOps: flattenPatch(data) });\n }\n\n async updateEdge(\n aUid: string,\n axbType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n this.backend.updateDoc(docId, { dataOps: flattenPatch(data) });\n }\n\n async removeNode(uid: string): Promise<void> {\n const docId = computeNodeDocId(uid);\n this.backend.deleteDoc(docId);\n }\n\n async removeEdge(aUid: string, axbType: string, bUid: string): Promise<void> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n this.backend.deleteDoc(docId);\n }\n\n async commit(): Promise<void> {\n await this.backend.commit();\n }\n}\n","import { createHash } from 'node:crypto';\n\nimport { NODE_RELATION } from './internal/constants.js';\nimport { createRegistry } from './registry.js';\nimport { compileMigrations, precompileSource } from './sandbox.js';\nimport type {\n EdgeTypeData,\n GraphReader,\n GraphRegistry,\n MigrationExecutor,\n NodeTypeData,\n RegistryEntry,\n} from './types.js';\n\n// ---------------------------------------------------------------------------\n// Meta-type constants\n// ---------------------------------------------------------------------------\n\n/** The aType used for node type definition meta-nodes. */\nexport const META_NODE_TYPE = 'nodeType';\n\n/** The aType used for edge type definition meta-nodes. */\nexport const META_EDGE_TYPE = 'edgeType';\n\n// ---------------------------------------------------------------------------\n// JSON Schemas for meta-type data payloads\n// ---------------------------------------------------------------------------\n\n/** JSON Schema for a single stored migration step. */\nconst STORED_MIGRATION_STEP_SCHEMA = {\n type: 'object',\n required: ['fromVersion', 'toVersion', 'up'],\n properties: {\n fromVersion: { type: 'integer', minimum: 0 },\n toVersion: { type: 'integer', minimum: 1 },\n up: { type: 'string', minLength: 1 },\n },\n additionalProperties: false,\n};\n\n/** JSON Schema for the `data` payload of a `nodeType` meta-node. */\nexport const NODE_TYPE_SCHEMA: object = {\n type: 'object',\n required: ['name', 'jsonSchema'],\n properties: {\n name: { type: 'string', minLength: 1 },\n jsonSchema: { type: 'object' },\n description: { type: 'string' },\n titleField: { type: 'string' },\n subtitleField: { type: 'string' },\n viewTemplate: { type: 'string' },\n viewCss: { type: 'string' },\n allowedIn: { type: 'array', items: { type: 'string', minLength: 1 } },\n schemaVersion: { type: 'integer', minimum: 0 },\n migrations: { type: 'array', items: STORED_MIGRATION_STEP_SCHEMA },\n migrationWriteBack: { type: 'string', enum: ['off', 'eager', 'background'] },\n },\n additionalProperties: false,\n};\n\n/** JSON Schema for the `data` payload of an `edgeType` meta-node. */\nexport const EDGE_TYPE_SCHEMA: object = {\n type: 'object',\n required: ['name', 'from', 'to'],\n properties: {\n name: { type: 'string', minLength: 1 },\n from: {\n oneOf: [\n { type: 'string', minLength: 1 },\n { type: 'array', items: { type: 'string', minLength: 1 }, minItems: 1 },\n ],\n },\n to: {\n oneOf: [\n { type: 'string', minLength: 1 },\n { type: 'array', items: { type: 'string', minLength: 1 }, minItems: 1 },\n ],\n },\n jsonSchema: { type: 'object' },\n inverseLabel: { type: 'string' },\n description: { type: 'string' },\n titleField: { type: 'string' },\n subtitleField: { type: 'string' },\n viewTemplate: { type: 'string' },\n viewCss: { type: 'string' },\n allowedIn: { type: 'array', items: { type: 'string', minLength: 1 } },\n targetGraph: { type: 'string', minLength: 1, pattern: '^[^/]+$' },\n schemaVersion: { type: 'integer', minimum: 0 },\n migrations: { type: 'array', items: STORED_MIGRATION_STEP_SCHEMA },\n migrationWriteBack: { type: 'string', enum: ['off', 'eager', 'background'] },\n },\n additionalProperties: false,\n};\n\n// ---------------------------------------------------------------------------\n// Bootstrap registry\n// ---------------------------------------------------------------------------\n\n/** Registry entries for the two meta-types (always present). */\nexport const BOOTSTRAP_ENTRIES: readonly RegistryEntry[] = [\n {\n aType: META_NODE_TYPE,\n axbType: NODE_RELATION,\n bType: META_NODE_TYPE,\n jsonSchema: NODE_TYPE_SCHEMA,\n description: 'Meta-type: defines a node type',\n },\n {\n aType: META_EDGE_TYPE,\n axbType: NODE_RELATION,\n bType: META_EDGE_TYPE,\n jsonSchema: EDGE_TYPE_SCHEMA,\n description: 'Meta-type: defines an edge type',\n },\n];\n\n/**\n * Build the bootstrap registry that validates meta-type writes.\n * This is always available, even before any dynamic types are loaded.\n *\n * Memoized at module scope: `BOOTSTRAP_ENTRIES` is a `readonly` array\n * of module-level constants and `createRegistry` is pure over them, so\n * the resulting registry — including its compiled cfworker\n * `Validator`s — can be reused across every `GraphClientImpl`\n * constructor. This matters on Cloudflare Workers, where the dynamic\n * client constructor runs on every request that touches the\n * meta-registry path; without memoization we'd re-walk +\n * re-dereference these schemas per request.\n */\nlet _bootstrapRegistry: GraphRegistry | null = null;\nexport function createBootstrapRegistry(): GraphRegistry {\n if (_bootstrapRegistry) return _bootstrapRegistry;\n _bootstrapRegistry = createRegistry([...BOOTSTRAP_ENTRIES]);\n return _bootstrapRegistry;\n}\n\n// ---------------------------------------------------------------------------\n// Deterministic UID generation\n// ---------------------------------------------------------------------------\n\n/**\n * Generate a deterministic UID for a meta-type definition.\n * This ensures that defining the same type name always targets the same\n * Firestore document, enabling upsert semantics.\n *\n * Format: 21-char base64url substring of SHA-256(`metaType:name`).\n */\nexport function generateDeterministicUid(metaType: string, name: string): string {\n const hash = createHash('sha256').update(`${metaType}:${name}`).digest('base64url');\n return hash.slice(0, 21);\n}\n\n// ---------------------------------------------------------------------------\n// createRegistryFromGraph\n// ---------------------------------------------------------------------------\n\n/**\n * Read meta-type nodes from the graph and compile them into a GraphRegistry.\n *\n * The returned registry includes both the dynamic entries AND the bootstrap\n * meta-type entries, so meta-type writes remain validateable after a reload.\n *\n * @param reader - A GraphReader pointed at the collection containing meta-nodes.\n * @param executor - Optional custom executor for compiling stored migration source strings.\n */\nexport async function createRegistryFromGraph(\n reader: GraphReader,\n executor?: MigrationExecutor,\n): Promise<GraphRegistry> {\n const [nodeTypes, edgeTypes] = await Promise.all([\n reader.findNodes({ aType: META_NODE_TYPE }),\n reader.findNodes({ aType: META_EDGE_TYPE }),\n ]);\n\n const entries: RegistryEntry[] = [...BOOTSTRAP_ENTRIES];\n\n // Eagerly pre-validate all migration sources in the sandbox before building\n // the registry. This ensures reloadRegistry() fails fast on invalid sources.\n const prevalidations: Promise<void>[] = [];\n for (const record of nodeTypes) {\n const data = record.data as unknown as NodeTypeData;\n if (data.migrations) {\n for (const m of data.migrations) {\n prevalidations.push(precompileSource(m.up, executor));\n }\n }\n }\n for (const record of edgeTypes) {\n const data = record.data as unknown as EdgeTypeData;\n if (data.migrations) {\n for (const m of data.migrations) {\n prevalidations.push(precompileSource(m.up, executor));\n }\n }\n }\n await Promise.all(prevalidations);\n\n // Convert nodeType records → self-loop RegistryEntries\n for (const record of nodeTypes) {\n const data = record.data as unknown as NodeTypeData;\n entries.push({\n aType: data.name,\n axbType: NODE_RELATION,\n bType: data.name,\n jsonSchema: data.jsonSchema,\n description: data.description,\n titleField: data.titleField,\n subtitleField: data.subtitleField,\n allowedIn: data.allowedIn,\n migrations: data.migrations ? compileMigrations(data.migrations, executor) : undefined,\n migrationWriteBack: data.migrationWriteBack,\n });\n }\n\n // Convert edgeType records → RegistryEntries (expand from/to arrays)\n for (const record of edgeTypes) {\n const data = record.data as unknown as EdgeTypeData;\n const fromTypes = Array.isArray(data.from) ? data.from : [data.from];\n const toTypes = Array.isArray(data.to) ? data.to : [data.to];\n\n const compiledMigrations = data.migrations\n ? compileMigrations(data.migrations, executor)\n : undefined;\n\n for (const aType of fromTypes) {\n for (const bType of toTypes) {\n entries.push({\n aType,\n axbType: data.name,\n bType,\n jsonSchema: data.jsonSchema,\n description: data.description,\n inverseLabel: data.inverseLabel,\n titleField: data.titleField,\n subtitleField: data.subtitleField,\n allowedIn: data.allowedIn,\n targetGraph: data.targetGraph,\n migrations: compiledMigrations,\n migrationWriteBack: data.migrationWriteBack,\n });\n }\n }\n }\n\n return createRegistry(entries);\n}\n","export class FiregraphError extends Error {\n constructor(\n message: string,\n public readonly code: string,\n ) {\n super(message);\n this.name = 'FiregraphError';\n }\n}\n\nexport class NodeNotFoundError extends FiregraphError {\n constructor(uid: string) {\n super(`Node not found: ${uid}`, 'NODE_NOT_FOUND');\n this.name = 'NodeNotFoundError';\n }\n}\n\nexport class EdgeNotFoundError extends FiregraphError {\n constructor(aUid: string, axbType: string, bUid: string) {\n super(`Edge not found: ${aUid} -[${axbType}]-> ${bUid}`, 'EDGE_NOT_FOUND');\n this.name = 'EdgeNotFoundError';\n }\n}\n\nexport class ValidationError extends FiregraphError {\n constructor(\n message: string,\n public readonly details?: unknown,\n ) {\n super(message, 'VALIDATION_ERROR');\n this.name = 'ValidationError';\n }\n}\n\nexport class RegistryViolationError extends FiregraphError {\n constructor(aType: string, axbType: string, bType: string) {\n super(`Unregistered triple: (${aType}) -[${axbType}]-> (${bType})`, 'REGISTRY_VIOLATION');\n this.name = 'RegistryViolationError';\n }\n}\n\nexport class InvalidQueryError extends FiregraphError {\n constructor(message: string) {\n super(message, 'INVALID_QUERY');\n this.name = 'InvalidQueryError';\n }\n}\n\nexport class TraversalError extends FiregraphError {\n constructor(message: string) {\n super(message, 'TRAVERSAL_ERROR');\n this.name = 'TraversalError';\n }\n}\n\nexport class DynamicRegistryError extends FiregraphError {\n constructor(message: string) {\n super(message, 'DYNAMIC_REGISTRY_ERROR');\n this.name = 'DynamicRegistryError';\n }\n}\n\nexport class QuerySafetyError extends FiregraphError {\n constructor(message: string) {\n super(message, 'QUERY_SAFETY');\n this.name = 'QuerySafetyError';\n }\n}\n\nexport class RegistryScopeError extends FiregraphError {\n constructor(\n aType: string,\n axbType: string,\n bType: string,\n scopePath: string,\n allowedIn: string[],\n ) {\n super(\n `Type (${aType}) -[${axbType}]-> (${bType}) is not allowed at scope \"${scopePath || 'root'}\". ` +\n `Allowed in: [${allowedIn.join(', ')}]`,\n 'REGISTRY_SCOPE',\n );\n this.name = 'RegistryScopeError';\n }\n}\n\nexport class MigrationError extends FiregraphError {\n constructor(message: string) {\n super(message, 'MIGRATION_ERROR');\n this.name = 'MigrationError';\n }\n}\n\n/**\n * Thrown when a caller tries to perform an operation that would require\n * atomicity across two physical storage backends — e.g. opening a routed\n * subgraph client from inside a transaction callback. Cross-backend\n * atomicity cannot be honoured by real-world storage engines (Firestore,\n * SQLite drivers over D1/DO/better-sqlite3, etc.), so firegraph surfaces\n * this as a typed error instead of silently confining the write to the\n * base backend.\n *\n * Normally `TransactionBackend` and `BatchBackend` don't expose `subgraph()`\n * at the type level, so this error is unreachable through well-typed code.\n * It exists as a public catchable type for app code that needs to tolerate\n * this case deliberately (e.g. dynamic code paths that bypass the type\n * system) and as future-proofing if the interface ever grows a way to\n * request a sub-scope inside a transaction.\n */\nexport class CrossBackendTransactionError extends FiregraphError {\n constructor(message: string) {\n super(message, 'CROSS_BACKEND_TRANSACTION');\n this.name = 'CrossBackendTransactionError';\n }\n}\n\n/**\n * Thrown when a caller invokes a capability-gated operation on a backend\n * that does not declare the required capability. Capability gating is\n * primarily a compile-time concern (see `BackendCapabilities` and the\n * type-level extension surfaces in `GraphClient<C>`), but this runtime\n * error covers the cases where the type system is bypassed — dynamic\n * registries, `as any` casts, or callers explicitly downcasting through\n * the generic-erased `StorageBackend` shape.\n *\n * The error code is `CAPABILITY_NOT_SUPPORTED`. The message names the\n * missing capability and the backend that was asked, so app code can\n * diagnose without inspecting the cap set itself.\n */\nexport class CapabilityNotSupportedError extends FiregraphError {\n constructor(\n public readonly capability: string,\n backendDescription: string,\n ) {\n super(\n `Capability \"${capability}\" is not supported by ${backendDescription}.`,\n 'CAPABILITY_NOT_SUPPORTED',\n );\n this.name = 'CapabilityNotSupportedError';\n }\n}\n","/**\n * JSON Schema validation and introspection utilities.\n *\n * Uses `@cfworker/json-schema` for validation — a runtime-interpreter\n * JSON Schema validator that does not rely on `new Function()` and is\n * therefore compatible with Cloudflare Workers (which run V8 with\n * `--disallow-code-generation-from-strings`). Ajv was used here\n * previously, but its `ajv.compile(schema)` generates a validator via\n * the Function constructor and fails with \"Code generation from strings\n * disallowed for this context\" whenever firegraph's dynamic-registry\n * bootstrap or `reloadRegistry` runs inside a Worker.\n *\n * The introspection half (`jsonSchemaToFieldMeta`) is pure string/object\n * manipulation with no validator dependency.\n */\n\nimport { type OutputUnit, type Schema, Validator } from '@cfworker/json-schema';\n\nimport { ValidationError } from './errors.js';\n\n// ---------------------------------------------------------------------------\n// FieldMeta types (previously in editor/server/schema-introspect.ts)\n// ---------------------------------------------------------------------------\n\nexport interface FieldMeta {\n name: string;\n type: 'string' | 'number' | 'boolean' | 'enum' | 'array' | 'object' | 'unknown';\n required: boolean;\n description?: string;\n enumValues?: string[];\n minLength?: number;\n maxLength?: number;\n pattern?: string;\n min?: number;\n max?: number;\n isInt?: boolean;\n itemMeta?: FieldMeta;\n fields?: FieldMeta[];\n}\n\n// ---------------------------------------------------------------------------\n// Validation\n// ---------------------------------------------------------------------------\n\n/** Cap on how many errors get joined into the human-readable message. */\nconst MAX_RENDERED_ERRORS = 20;\n\n/**\n * Compile a JSON Schema into a validation function.\n *\n * The returned function throws `ValidationError` if data is invalid. The\n * error's `details` is the `OutputUnit[]` array produced by\n * `@cfworker/json-schema` — consumers that previously inspected Ajv's\n * `ErrorObject[]` need to map to the cfworker shape\n * (`{ keyword, keywordLocation, instanceLocation, error }`).\n *\n * Draft 2020-12 is requested by default to match the library's richest\n * feature set; schemas that omit `$schema` still validate under it\n * since keyword semantics back-compat to draft-07 for the fields\n * firegraph actually uses.\n *\n * `shortCircuit` is explicitly disabled so `result.errors` contains\n * every violation, not just the first one — humans rely on the joined\n * error message to debug bad writes from the editor / chat UI. The\n * full array is preserved on `ValidationError.details`; only the\n * rendered message is capped at `MAX_RENDERED_ERRORS` lines so\n * pathological `oneOf`/`anyOf` schemas can't blow up log lines.\n *\n * Format keywords supported by `@cfworker/json-schema` (anything else\n * is silently passed through — see node_modules/@cfworker/json-schema/\n * src/format.ts):\n * `date`, `time`, `date-time`, `duration`,\n * `email`, `hostname`, `ipv4`, `ipv6`,\n * `uri`, `uri-reference`, `uri-template`, `url`,\n * `uuid`, `regex`,\n * `json-pointer`, `relative-json-pointer`, `json-pointer-uri-fragment`.\n */\nexport function compileSchema(schema: object, label?: string): (data: unknown) => void {\n // `object` is the public type used throughout `RegistryEntry.jsonSchema`\n // and the dynamic-client API; cfworker's `Schema` is structurally\n // `{ [k: string]: any }`, which a JSON Schema document always\n // satisfies at runtime. The cast is therefore safe in practice —\n // pass anything other than a plain JSON-Schema-shaped object and\n // `dereference()` inside the validator will throw at construction.\n const validator = new Validator(schema as Schema, '2020-12', false);\n return (data: unknown) => {\n const result = validator.validate(data);\n if (!result.valid) {\n const total = result.errors.length;\n const head = result.errors.slice(0, MAX_RENDERED_ERRORS).map(formatError).join('; ');\n const overflow = total > MAX_RENDERED_ERRORS ? ` (+${total - MAX_RENDERED_ERRORS} more)` : '';\n throw new ValidationError(\n `Data validation failed${label ? ' for ' + label : ''}: ${head}${overflow}`,\n result.errors,\n );\n }\n };\n}\n\n/**\n * Format a single cfworker `OutputUnit` into a human-readable line.\n *\n * cfworker's `instanceLocation` is a JSON-Pointer-as-URI-fragment\n * (`#`, `#/foo`, `#/foo/0/bar`); strip the leading `#` so the rendered\n * path looks like Ajv's `instancePath` (`/foo/0/bar`) and root errors\n * read as `/` rather than `#`. The `[keyword]` prefix is included so\n * messages stay actionable when `error` is terse (e.g. `not`, `enum`).\n */\nfunction formatError(err: OutputUnit): string {\n const path = err.instanceLocation.replace(/^#/, '') || '/';\n const keyword = err.keyword ? `[${err.keyword}] ` : '';\n const detail = err.error ? `: ${keyword}${err.error}` : '';\n return `${path}${detail}`;\n}\n\n// ---------------------------------------------------------------------------\n// JSON Schema → FieldMeta introspection\n// ---------------------------------------------------------------------------\n\n/**\n * Convert a JSON Schema (expected to be `type: \"object\"`) into `FieldMeta[]`\n * suitable for the editor's SchemaForm component.\n */\nexport function jsonSchemaToFieldMeta(schema: any): FieldMeta[] {\n if (!schema || schema.type !== 'object' || !schema.properties) return [];\n\n const requiredSet = new Set<string>(Array.isArray(schema.required) ? schema.required : []);\n\n return Object.entries(schema.properties).map(([name, prop]) =>\n propertyToFieldMeta(name, prop as any, requiredSet.has(name)),\n );\n}\n\n/**\n * Convert a single JSON Schema property into a `FieldMeta`.\n */\nfunction propertyToFieldMeta(name: string, prop: any, required: boolean): FieldMeta {\n if (!prop) return { name, type: 'unknown', required };\n\n // Handle enum (can appear with or without type)\n if (Array.isArray(prop.enum)) {\n return {\n name,\n type: 'enum',\n required,\n enumValues: prop.enum as string[],\n description: prop.description,\n };\n }\n\n // Handle oneOf/anyOf for nullable patterns like { oneOf: [{type:'string'}, {type:'null'}] }\n if (Array.isArray(prop.oneOf) || Array.isArray(prop.anyOf)) {\n const variants = (prop.oneOf ?? prop.anyOf) as any[];\n const nonNull = variants.filter((v: any) => v.type !== 'null');\n if (nonNull.length === 1) {\n // Nullable wrapper — unwrap and mark as optional\n return propertyToFieldMeta(name, nonNull[0], false);\n }\n return { name, type: 'unknown', required, description: prop.description };\n }\n\n const type = prop.type;\n\n if (type === 'string') {\n return {\n name,\n type: 'string',\n required,\n minLength: prop.minLength,\n maxLength: prop.maxLength,\n pattern: prop.pattern,\n description: prop.description,\n };\n }\n\n if (type === 'number' || type === 'integer') {\n return {\n name,\n type: 'number',\n required,\n min: prop.minimum,\n max: prop.maximum,\n isInt: type === 'integer' ? true : undefined,\n description: prop.description,\n };\n }\n\n if (type === 'boolean') {\n return { name, type: 'boolean', required, description: prop.description };\n }\n\n if (type === 'array') {\n const itemMeta = prop.items ? propertyToFieldMeta('item', prop.items, true) : undefined;\n return {\n name,\n type: 'array',\n required,\n itemMeta,\n description: prop.description,\n };\n }\n\n if (type === 'object') {\n return {\n name,\n type: 'object',\n required,\n fields: jsonSchemaToFieldMeta(prop),\n description: prop.description,\n };\n }\n\n return { name, type: 'unknown', required, description: prop.description };\n}\n","/**\n * Migration pipeline for auto-migrating records on read.\n *\n * When a record's `v` is behind the version derived from the registry\n * entry's migrations, the pipeline applies migration steps sequentially\n * to bring the data up to the current version.\n */\n\nimport { MigrationError } from './errors.js';\nimport type {\n GraphRegistry,\n MigrationStep,\n MigrationWriteBack,\n StoredGraphRecord,\n} from './types.js';\n\n/** Result of attempting to migrate a single record. */\nexport interface MigrationResult {\n record: StoredGraphRecord;\n migrated: boolean;\n /** Resolved write-back mode for this record (entry-level > global > 'off'). */\n writeBack: MigrationWriteBack;\n}\n\n/**\n * Apply a chain of migration steps to transform data from `currentVersion`\n * to `targetVersion`. Throws `MigrationError` if the chain is incomplete\n * or a migration function fails.\n *\n * Returns the migrated data payload only — the caller is responsible for\n * stamping `v` on the record envelope.\n */\nexport async function applyMigrationChain(\n data: Record<string, unknown>,\n currentVersion: number,\n targetVersion: number,\n migrations: MigrationStep[],\n): Promise<Record<string, unknown>> {\n const sorted = [...migrations].sort((a, b) => a.fromVersion - b.fromVersion);\n let result = { ...data };\n let version = currentVersion;\n\n for (const step of sorted) {\n if (step.fromVersion === version) {\n try {\n result = await step.up(result);\n } catch (err: unknown) {\n if (err instanceof MigrationError) throw err;\n throw new MigrationError(\n `Migration from v${step.fromVersion} to v${step.toVersion} failed: ${(err as Error).message}`,\n );\n }\n if (!result || typeof result !== 'object') {\n throw new MigrationError(\n `Migration from v${step.fromVersion} to v${step.toVersion} returned invalid data (expected object)`,\n );\n }\n version = step.toVersion;\n }\n }\n\n if (version !== targetVersion) {\n throw new MigrationError(\n `Incomplete migration chain: reached v${version} but target is v${targetVersion}`,\n );\n }\n\n return result;\n}\n\n/**\n * Validate that a migration chain forms a contiguous path from version 0\n * to the highest `toVersion`. Throws `MigrationError` if the chain has\n * gaps or duplicate `fromVersion` values.\n *\n * Called at registry construction time to catch incomplete chains early,\n * rather than at read time when a record is migrated.\n */\nexport function validateMigrationChain(migrations: MigrationStep[], label: string): void {\n if (migrations.length === 0) return;\n\n // Validate individual steps\n const seen = new Set<number>();\n for (const step of migrations) {\n if (step.toVersion <= step.fromVersion) {\n throw new MigrationError(\n `${label}: migration step has toVersion (${step.toVersion}) <= fromVersion (${step.fromVersion})`,\n );\n }\n if (seen.has(step.fromVersion)) {\n throw new MigrationError(\n `${label}: duplicate migration step for fromVersion ${step.fromVersion}`,\n );\n }\n seen.add(step.fromVersion);\n }\n\n const sorted = [...migrations].sort((a, b) => a.fromVersion - b.fromVersion);\n const targetVersion = Math.max(...migrations.map((m) => m.toVersion));\n let version = 0;\n\n for (const step of sorted) {\n if (step.fromVersion === version) {\n version = step.toVersion;\n } else if (step.fromVersion > version) {\n throw new MigrationError(\n `${label}: migration chain has a gap — no step covers v${version} → v${step.fromVersion}`,\n );\n }\n }\n\n if (version !== targetVersion) {\n throw new MigrationError(\n `${label}: migration chain does not reach v${targetVersion} (stuck at v${version})`,\n );\n }\n}\n\n/**\n * Attempt to migrate a single record based on its registry entry.\n *\n * Returns the original record unchanged if no migration is needed\n * (no schema version, already at current version, or no migrations defined).\n */\nexport async function migrateRecord(\n record: StoredGraphRecord,\n registry: GraphRegistry,\n globalWriteBack: MigrationWriteBack = 'off',\n): Promise<MigrationResult> {\n const entry = registry.lookup(record.aType, record.axbType, record.bType);\n\n if (!entry?.migrations?.length || !entry.schemaVersion) {\n return { record, migrated: false, writeBack: 'off' };\n }\n\n const currentVersion = record.v ?? 0;\n\n if (currentVersion >= entry.schemaVersion) {\n return { record, migrated: false, writeBack: 'off' };\n }\n\n const migratedData = await applyMigrationChain(\n record.data,\n currentVersion,\n entry.schemaVersion,\n entry.migrations,\n );\n\n // Two-tier resolution: entry-level > global > 'off'\n const writeBack = entry.migrationWriteBack ?? globalWriteBack ?? 'off';\n\n return {\n record: { ...record, data: migratedData, v: entry.schemaVersion },\n migrated: true,\n writeBack,\n };\n}\n\n/**\n * Migrate an array of records, returning all results.\n * If any single migration fails, the entire call rejects — a broken\n * migration function is a bug that should surface immediately.\n */\nexport async function migrateRecords(\n records: StoredGraphRecord[],\n registry: GraphRegistry,\n globalWriteBack: MigrationWriteBack = 'off',\n): Promise<MigrationResult[]> {\n return Promise.all(records.map((r) => migrateRecord(r, registry, globalWriteBack)));\n}\n","/**\n * Scope path matching for subgraph-level registry constraints.\n *\n * Scope paths are slash-separated names derived from the chain of\n * `subgraph()` calls (e.g., `'agents'`, `'agents/memories'`).\n * The root graph has an empty scope path (`''`).\n *\n * Patterns:\n * - `'root'` — matches only the root graph (empty scope path)\n * - `'agents'` — matches exactly `'agents'`\n * - `'agents/memories'` — matches exactly `'agents/memories'`\n * - `'*/agents'` — `*` matches one segment: `'foo/agents'` but not `'a/b/agents'`\n * - `'**/memories'` — `**` matches zero or more segments\n * - `'**'` — matches everything including root\n */\n\n/**\n * Test whether a scope path matches a single pattern.\n *\n * @param scopePath - The current scope path (empty string for root)\n * @param pattern - The pattern to match against\n */\nexport function matchScope(scopePath: string, pattern: string): boolean {\n // Special case: 'root' matches only the root graph\n if (pattern === 'root') return scopePath === '';\n\n // Special case: '**' matches everything\n if (pattern === '**') return true;\n\n const pathSegments = scopePath === '' ? [] : scopePath.split('/');\n const patternSegments = pattern.split('/');\n\n return matchSegments(pathSegments, 0, patternSegments, 0);\n}\n\n/**\n * Test whether a scope path matches any pattern in a list.\n * Returns `true` if the list is empty or undefined (allowed everywhere).\n *\n * @param scopePath - The current scope path (empty string for root)\n * @param patterns - Array of patterns to match against\n */\nexport function matchScopeAny(scopePath: string, patterns: string[]): boolean {\n if (!patterns || patterns.length === 0) return true;\n return patterns.some((p) => matchScope(scopePath, p));\n}\n\n/**\n * Recursive segment matcher with support for `*` (one segment) and\n * `**` (zero or more segments).\n */\nfunction matchSegments(path: string[], pi: number, pattern: string[], qi: number): boolean {\n // Both exhausted — match\n if (pi === path.length && qi === pattern.length) return true;\n\n // Pattern exhausted but path remains — no match\n if (qi === pattern.length) return false;\n\n const seg = pattern[qi];\n\n if (seg === '**') {\n // '**' at the end of pattern — matches everything remaining\n if (qi === pattern.length - 1) return true;\n\n // Try consuming 0, 1, 2, ... path segments\n for (let skip = 0; skip <= path.length - pi; skip++) {\n if (matchSegments(path, pi + skip, pattern, qi + 1)) return true;\n }\n return false;\n }\n\n // Path exhausted but pattern has non-** segments remaining — no match\n if (pi === path.length) return false;\n\n if (seg === '*') {\n // '*' matches exactly one segment\n return matchSegments(path, pi + 1, pattern, qi + 1);\n }\n\n // Literal match\n if (path[pi] === seg) {\n return matchSegments(path, pi + 1, pattern, qi + 1);\n }\n\n return false;\n}\n","import { RegistryScopeError, RegistryViolationError, ValidationError } from './errors.js';\nimport { NODE_RELATION } from './internal/constants.js';\nimport { compileSchema } from './json-schema.js';\nimport { validateMigrationChain } from './migration.js';\nimport { matchScopeAny } from './scope.js';\nimport type { DiscoveryResult, GraphRegistry, RegistryEntry } from './types.js';\n\nfunction tripleKey(aType: string, axbType: string, bType: string): string {\n return `${aType}:${axbType}:${bType}`;\n}\n\nfunction tripleKeyFor(e: RegistryEntry): string {\n return tripleKey(e.aType, e.axbType, e.bType);\n}\n\n/**\n * Build a registry from either explicit entries or a DiscoveryResult.\n *\n * @example\n * ```ts\n * // From explicit entries (programmatic)\n * const registry = createRegistry([\n * { aType: 'user', axbType: 'is', bType: 'user', jsonSchema: userSchema },\n * { aType: 'user', axbType: 'follows', bType: 'user', jsonSchema: followsSchema },\n * ]);\n *\n * // From discovery result (folder convention)\n * const discovered = await discoverEntities('./entities');\n * const registry = createRegistry(discovered);\n * ```\n */\nexport function createRegistry(input: RegistryEntry[] | DiscoveryResult): GraphRegistry {\n const map = new Map<string, { entry: RegistryEntry; validate?: (data: unknown) => void }>();\n\n let entries: RegistryEntry[];\n\n if (Array.isArray(input)) {\n entries = input;\n } else {\n entries = discoveryToEntries(input);\n }\n\n const entryList: ReadonlyArray<RegistryEntry> = Object.freeze([...entries]);\n\n for (const entry of entries) {\n if (entry.targetGraph && entry.targetGraph.includes('/')) {\n throw new ValidationError(\n `Entry (${entry.aType}) -[${entry.axbType}]-> (${entry.bType}) has invalid targetGraph \"${entry.targetGraph}\" — must be a single segment (no \"/\")`,\n );\n }\n if (entry.migrations?.length) {\n const label = `Entry (${entry.aType}) -[${entry.axbType}]-> (${entry.bType})`;\n validateMigrationChain(entry.migrations, label);\n // Derive schemaVersion from migrations — single source of truth\n entry.schemaVersion = Math.max(...entry.migrations.map((m) => m.toVersion));\n } else {\n // No migrations → no versioning (ignore any user-supplied schemaVersion)\n entry.schemaVersion = undefined;\n }\n const key = tripleKey(entry.aType, entry.axbType, entry.bType);\n const validator = entry.jsonSchema\n ? compileSchema(entry.jsonSchema, `(${entry.aType}) -[${entry.axbType}]-> (${entry.bType})`)\n : undefined;\n map.set(key, { entry, validate: validator });\n }\n\n // Build axbType index for lookupByAxbType\n const axbIndex = new Map<string, ReadonlyArray<RegistryEntry>>();\n const axbBuild = new Map<string, RegistryEntry[]>();\n for (const entry of entries) {\n const existing = axbBuild.get(entry.axbType);\n if (existing) {\n existing.push(entry);\n } else {\n axbBuild.set(entry.axbType, [entry]);\n }\n }\n for (const [key, arr] of axbBuild) {\n axbIndex.set(key, Object.freeze(arr));\n }\n\n // Build aType → subgraph-topology index.\n //\n // For each source aType, collect edge entries whose `targetGraph` is set —\n // these are the aType's direct subgraph children. Dedupe by `targetGraph`\n // alone (not by axbType): the physical subgraph store is addressed by\n // (parentUid, targetGraph) and the cascade caller only cares about which\n // child subgraphs to tear down. Two distinct edge relations pointing into\n // the same `targetGraph` would otherwise produce duplicate destroy calls\n // on the same physical backend.\n const topologyIndex = new Map<string, ReadonlyArray<RegistryEntry>>();\n const topologyBuild = new Map<string, RegistryEntry[]>();\n const topologySeen = new Map<string, Set<string>>();\n for (const entry of entries) {\n if (!entry.targetGraph) continue;\n let seen = topologySeen.get(entry.aType);\n if (!seen) {\n seen = new Set();\n topologySeen.set(entry.aType, seen);\n }\n if (seen.has(entry.targetGraph)) continue;\n seen.add(entry.targetGraph);\n const existing = topologyBuild.get(entry.aType);\n if (existing) {\n existing.push(entry);\n } else {\n topologyBuild.set(entry.aType, [entry]);\n }\n }\n for (const [key, arr] of topologyBuild) {\n topologyIndex.set(key, Object.freeze(arr));\n }\n\n return {\n lookup(aType: string, axbType: string, bType: string): RegistryEntry | undefined {\n return map.get(tripleKey(aType, axbType, bType))?.entry;\n },\n\n lookupByAxbType(axbType: string): ReadonlyArray<RegistryEntry> {\n return axbIndex.get(axbType) ?? [];\n },\n\n getSubgraphTopology(aType: string): ReadonlyArray<RegistryEntry> {\n return topologyIndex.get(aType) ?? [];\n },\n\n validate(\n aType: string,\n axbType: string,\n bType: string,\n data: unknown,\n scopePath?: string,\n ): void {\n const rec = map.get(tripleKey(aType, axbType, bType));\n\n if (!rec) {\n throw new RegistryViolationError(aType, axbType, bType);\n }\n\n // Scope validation: check allowedIn patterns when a scope context is provided\n if (scopePath !== undefined && rec.entry.allowedIn && rec.entry.allowedIn.length > 0) {\n if (!matchScopeAny(scopePath, rec.entry.allowedIn)) {\n throw new RegistryScopeError(aType, axbType, bType, scopePath, rec.entry.allowedIn);\n }\n }\n\n if (rec.validate) {\n try {\n rec.validate(data);\n } catch (err: unknown) {\n if (err instanceof ValidationError) throw err;\n throw new ValidationError(\n `Data validation failed for (${aType}) -[${axbType}]-> (${bType})`,\n err,\n );\n }\n }\n },\n\n entries(): ReadonlyArray<RegistryEntry> {\n return entryList;\n },\n };\n}\n\n/**\n * Create a merged registry where `base` entries take priority and `extension`\n * entries fill in gaps. Lookups and validation check `base` first; only if the\n * triple is not found there does the merged registry fall through to\n * `extension`.\n *\n * The `entries()` method returns a deduplicated list (base wins on collision).\n * The `lookupByAxbType()` method merges results from both registries,\n * deduplicating by triple key with base entries winning.\n */\nexport function createMergedRegistry(base: GraphRegistry, extension: GraphRegistry): GraphRegistry {\n // Build a set of triple keys from the base registry for fast collision checks.\n const baseKeys = new Set(base.entries().map(tripleKeyFor));\n\n return {\n lookup(aType: string, axbType: string, bType: string): RegistryEntry | undefined {\n return base.lookup(aType, axbType, bType) ?? extension.lookup(aType, axbType, bType);\n },\n\n lookupByAxbType(axbType: string): ReadonlyArray<RegistryEntry> {\n const baseResults = base.lookupByAxbType(axbType);\n const extResults = extension.lookupByAxbType(axbType);\n if (extResults.length === 0) return baseResults;\n if (baseResults.length === 0) return extResults;\n\n // Merge, base wins on triple-key collision\n const seen = new Set(baseResults.map(tripleKeyFor));\n const merged = [...baseResults];\n for (const entry of extResults) {\n if (!seen.has(tripleKeyFor(entry))) {\n merged.push(entry);\n }\n }\n return Object.freeze(merged);\n },\n\n getSubgraphTopology(aType: string): ReadonlyArray<RegistryEntry> {\n const baseResults = base.getSubgraphTopology(aType);\n const extResults = extension.getSubgraphTopology(aType);\n if (extResults.length === 0) return baseResults;\n if (baseResults.length === 0) return extResults;\n\n // Merge, base wins on `targetGraph` collision. Extension entries only\n // contribute new subgraph segments the base doesn't cover. Dedupe key\n // matches the physical DO address — (parentUid, targetGraph) — so two\n // different axbTypes pointing into the same segment collapse to one.\n const seen = new Set(baseResults.map((e) => e.targetGraph));\n const merged = [...baseResults];\n for (const entry of extResults) {\n if (!seen.has(entry.targetGraph)) {\n seen.add(entry.targetGraph);\n merged.push(entry);\n }\n }\n return Object.freeze(merged);\n },\n\n validate(\n aType: string,\n axbType: string,\n bType: string,\n data: unknown,\n scopePath?: string,\n ): void {\n if (baseKeys.has(tripleKey(aType, axbType, bType))) {\n return base.validate(aType, axbType, bType, data, scopePath);\n }\n // Falls through to extension (which throws RegistryViolationError if not found)\n return extension.validate(aType, axbType, bType, data, scopePath);\n },\n\n entries(): ReadonlyArray<RegistryEntry> {\n const extEntries = extension.entries();\n if (extEntries.length === 0) return base.entries();\n\n const merged = [...base.entries()];\n for (const entry of extEntries) {\n if (!baseKeys.has(tripleKeyFor(entry))) {\n merged.push(entry);\n }\n }\n return Object.freeze(merged);\n },\n };\n}\n\n/**\n * Convert a DiscoveryResult into flat RegistryEntry[].\n * Nodes become self-loop triples `(name, 'is', name)`.\n * Edges expand `from`/`to` arrays into one triple per combination.\n */\nfunction discoveryToEntries(discovery: DiscoveryResult): RegistryEntry[] {\n const entries: RegistryEntry[] = [];\n\n // Nodes → self-loop triples\n for (const [name, entity] of discovery.nodes) {\n entries.push({\n aType: name,\n axbType: NODE_RELATION,\n bType: name,\n jsonSchema: entity.schema,\n description: entity.description,\n titleField: entity.titleField,\n subtitleField: entity.subtitleField,\n allowedIn: entity.allowedIn,\n migrations: entity.migrations,\n migrationWriteBack: entity.migrationWriteBack,\n indexes: entity.indexes,\n });\n }\n\n // Edges → expand from/to into one triple per combination\n for (const [axbType, entity] of discovery.edges) {\n const topology = entity.topology;\n if (!topology) continue;\n\n const fromTypes = Array.isArray(topology.from) ? topology.from : [topology.from];\n const toTypes = Array.isArray(topology.to) ? topology.to : [topology.to];\n\n const resolvedTargetGraph = entity.targetGraph ?? topology.targetGraph;\n if (resolvedTargetGraph && resolvedTargetGraph.includes('/')) {\n throw new ValidationError(\n `Edge \"${axbType}\" has invalid targetGraph \"${resolvedTargetGraph}\" — must be a single segment (no \"/\")`,\n );\n }\n\n for (const aType of fromTypes) {\n for (const bType of toTypes) {\n entries.push({\n aType,\n axbType,\n bType,\n jsonSchema: entity.schema,\n description: entity.description,\n inverseLabel: topology.inverseLabel,\n titleField: entity.titleField,\n subtitleField: entity.subtitleField,\n allowedIn: entity.allowedIn,\n targetGraph: resolvedTargetGraph,\n migrations: entity.migrations,\n migrationWriteBack: entity.migrationWriteBack,\n indexes: entity.indexes,\n });\n }\n }\n }\n\n return entries;\n}\n","/**\n * Sandbox module for compiling dynamic registry migration source strings\n * into executable functions.\n *\n * Uses a dedicated worker thread with SES (Secure ECMAScript) Compartments\n * for isolation. SES `lockdown()` and `Compartment` evaluation run in the\n * worker thread so that the host process's intrinsics remain unaffected.\n *\n * Each migration function runs in a hardened compartment with no ambient\n * authority — no access to `process`, `require`, `fetch`, `setTimeout`,\n * or any other host-provided globals. Data crosses the compartment boundary\n * as JSON strings to prevent prototype chain escapes.\n *\n * Static registry migrations are already in-memory functions and never\n * go through this module.\n */\n\nimport { createHash } from 'node:crypto';\nimport type { Worker } from 'node:worker_threads';\n\nimport { MigrationError } from './errors.js';\nimport type * as SerializationModule from './serialization.js';\nimport type {\n MigrationExecutor,\n MigrationFn,\n MigrationStep,\n StoredMigrationStep,\n} from './types.js';\n\n// ---------------------------------------------------------------------------\n// Sandbox worker — SES lockdown and Compartment evaluation run in a\n// dedicated worker thread so that lockdown() does not affect the host\n// process's intrinsics. The worker is spawned lazily on first use.\n// ---------------------------------------------------------------------------\n\nlet _worker: Worker | null = null;\nlet _requestId = 0;\nconst _pending = new Map<\n number,\n {\n resolve: (value: unknown) => void;\n reject: (reason: Error) => void;\n }\n>();\n\n/**\n * Inline worker source evaluated as CJS in a dedicated worker thread.\n * Contains all SES setup, compilation, and execution logic.\n *\n * **Why inline?** Using `new Worker(code, { eval: true })` avoids\n * ESM/CJS file resolution issues when the library is consumed from\n * different module formats or bundlers.\n */\nconst WORKER_SOURCE = [\n `'use strict';`,\n `var _wt = require('node:worker_threads');`,\n `var _mod = require('node:module');`,\n `var _crypto = require('node:crypto');`,\n `var parentPort = _wt.parentPort;`,\n `var workerData = _wt.workerData;`,\n ``,\n `// Load SES using the parent module's resolution context`,\n `var esmRequire = _mod.createRequire(workerData.parentUrl);`,\n `esmRequire('ses');`,\n ``,\n `lockdown({`,\n ` errorTaming: 'unsafe',`,\n ` consoleTaming: 'unsafe',`,\n ` evalTaming: 'safe-eval',`,\n ` overrideTaming: 'moderate',`,\n ` stackFiltering: 'verbose'`,\n `});`,\n ``,\n `// Defense-in-depth: verify lockdown() actually hardened JSON.`,\n `if (!Object.isFrozen(JSON)) {`,\n ` throw new Error('SES lockdown failed: JSON is not frozen');`,\n `}`,\n ``,\n `var cache = new Map();`,\n ``,\n `function hashSource(s) {`,\n ` return _crypto.createHash('sha256').update(s).digest('hex');`,\n `}`,\n ``,\n `function buildWrapper(source) {`,\n ` return '(function() {' +`,\n ` ' var fn = (' + source + ');\\\\n' +`,\n ` ' if (typeof fn !== \"function\") return null;\\\\n' +`,\n ` ' return function(jsonIn) {\\\\n' +`,\n ` ' var data = JSON.parse(jsonIn);\\\\n' +`,\n ` ' var result = fn(data);\\\\n' +`,\n ` ' if (result !== null && typeof result === \"object\" && typeof result.then === \"function\") {\\\\n' +`,\n ` ' return result.then(function(r) { return JSON.stringify(r); });\\\\n' +`,\n ` ' }\\\\n' +`,\n ` ' return JSON.stringify(result);\\\\n' +`,\n ` ' };\\\\n' +`,\n ` '})()';`,\n `}`,\n ``,\n `function compileSource(source) {`,\n ` var key = hashSource(source);`,\n ` var cached = cache.get(key);`,\n ` if (cached) return cached;`,\n ``,\n ` var compartmentFn;`,\n ` try {`,\n ` var c = new Compartment({ JSON: JSON });`,\n ` compartmentFn = c.evaluate(buildWrapper(source));`,\n ` } catch (err) {`,\n ` throw new Error('Failed to compile migration source: ' + (err.message || String(err)));`,\n ` }`,\n ``,\n ` if (typeof compartmentFn !== 'function') {`,\n ` throw new Error('Migration source did not produce a function: ' + source.slice(0, 80));`,\n ` }`,\n ``,\n ` cache.set(key, compartmentFn);`,\n ` return compartmentFn;`,\n `}`,\n ``,\n `parentPort.on('message', function(msg) {`,\n ` var id = msg.id;`,\n ` try {`,\n ` if (msg.type === 'compile') {`,\n ` compileSource(msg.source);`,\n ` parentPort.postMessage({ id: id, type: 'compiled' });`,\n ` return;`,\n ` }`,\n ` if (msg.type === 'execute') {`,\n ` var fn = compileSource(msg.source);`,\n ` var raw;`,\n ` try {`,\n ` raw = fn(msg.jsonData);`,\n ` } catch (err) {`,\n ` parentPort.postMessage({ id: id, type: 'error', message: 'Migration function threw: ' + (err.message || String(err)) });`,\n ` return;`,\n ` }`,\n ` if (raw !== null && typeof raw === 'object' && typeof raw.then === 'function') {`,\n ` raw.then(`,\n ` function(jsonResult) {`,\n ` if (jsonResult === undefined || jsonResult === null) {`,\n ` parentPort.postMessage({ id: id, type: 'error', message: 'Migration returned a non-JSON-serializable value' });`,\n ` } else {`,\n ` parentPort.postMessage({ id: id, type: 'result', jsonResult: jsonResult });`,\n ` }`,\n ` },`,\n ` function(err) {`,\n ` parentPort.postMessage({ id: id, type: 'error', message: 'Async migration function threw: ' + (err.message || String(err)) });`,\n ` }`,\n ` );`,\n ` return;`,\n ` }`,\n ` if (raw === undefined || raw === null) {`,\n ` parentPort.postMessage({ id: id, type: 'error', message: 'Migration returned a non-JSON-serializable value' });`,\n ` } else {`,\n ` parentPort.postMessage({ id: id, type: 'result', jsonResult: raw });`,\n ` }`,\n ` }`,\n ` } catch (err) {`,\n ` parentPort.postMessage({ id: id, type: 'error', message: err.message || String(err) });`,\n ` }`,\n `});`,\n].join('\\n');\n\n// ---------------------------------------------------------------------------\n// Worker lifecycle management\n// ---------------------------------------------------------------------------\n\ninterface WorkerResponse {\n id: number;\n type: string;\n message?: string;\n jsonResult?: string;\n}\n\n// `node:worker_threads` is loaded lazily so this module can be imported in\n// runtimes without it (Cloudflare Workers, browsers). Only callers that\n// actually exercise the default migration sandbox will trigger the import.\nlet _WorkerCtor: (new (source: string, opts: Record<string, unknown>) => Worker) | null = null;\n\nasync function loadWorkerCtor(): Promise<NonNullable<typeof _WorkerCtor>> {\n if (_WorkerCtor) return _WorkerCtor;\n const wt = await import('node:worker_threads');\n _WorkerCtor = wt.Worker as unknown as NonNullable<typeof _WorkerCtor>;\n return _WorkerCtor;\n}\n\nasync function ensureWorker(): Promise<Worker> {\n if (_worker) return _worker;\n\n const Ctor = await loadWorkerCtor();\n _worker = new Ctor(WORKER_SOURCE, {\n eval: true,\n workerData: { parentUrl: import.meta.url },\n });\n\n // Don't let the worker prevent process exit\n _worker.unref();\n\n _worker.on('message', (msg: WorkerResponse) => {\n if (msg.id === undefined) return;\n const pending = _pending.get(msg.id);\n if (!pending) return;\n _pending.delete(msg.id);\n\n if (msg.type === 'error') {\n pending.reject(new MigrationError(msg.message ?? 'Unknown sandbox error'));\n } else {\n pending.resolve(msg);\n }\n });\n\n _worker.on('error', (err: Error) => {\n // Worker crashed — reject all pending requests and allow respawn\n for (const [, p] of _pending) {\n p.reject(new MigrationError(`Sandbox worker error: ${err.message}`));\n }\n _pending.clear();\n _worker = null;\n });\n\n _worker.on('exit', (code: number) => {\n // Always reject pending requests — a worker exiting while requests\n // are in-flight is always an error from the caller's perspective,\n // even if the exit code is 0 (e.g., graceful termination).\n if (_pending.size > 0) {\n for (const [, p] of _pending) {\n p.reject(new MigrationError(`Sandbox worker exited with code ${code}`));\n }\n _pending.clear();\n }\n _worker = null;\n });\n\n return _worker;\n}\n\nasync function sendToWorker(msg: Record<string, unknown>): Promise<WorkerResponse> {\n const worker = await ensureWorker();\n if (_requestId >= Number.MAX_SAFE_INTEGER) _requestId = 0;\n const id = ++_requestId;\n return new Promise<WorkerResponse>((resolve, reject) => {\n _pending.set(id, { resolve: resolve as (v: unknown) => void, reject });\n worker.postMessage({ ...msg, id });\n });\n}\n\n// ---------------------------------------------------------------------------\n// Compiled function cache (keyed by executor → SHA-256 hash of source string)\n// ---------------------------------------------------------------------------\n\n// Two-level cache: outer key is the executor reference (WeakMap so that\n// short-lived executors and their caches can be garbage collected), inner\n// key is the SHA-256 hash of the source string. This prevents cache\n// poisoning when different clients use different sandbox executors in\n// the same process.\nconst compiledCache = new WeakMap<MigrationExecutor, Map<string, MigrationFn>>();\n\nfunction getExecutorCache(executor: MigrationExecutor): Map<string, MigrationFn> {\n let cache = compiledCache.get(executor);\n if (!cache) {\n cache = new Map();\n compiledCache.set(executor, cache);\n }\n return cache;\n}\n\nfunction hashSource(source: string): string {\n return createHash('sha256').update(source).digest('hex');\n}\n\n// ---------------------------------------------------------------------------\n// Lazy serialization loader. Pulls `@google-cloud/firestore` only when the\n// default executor actually runs a migration — keeps Firestore out of\n// non-Firestore bundles (e.g. the Cloudflare DO backend).\n// ---------------------------------------------------------------------------\n\nlet _serializationModule: typeof SerializationModule | null = null;\n\nasync function loadSerialization(): Promise<typeof SerializationModule> {\n if (_serializationModule) return _serializationModule;\n _serializationModule = await import('./serialization.js');\n return _serializationModule;\n}\n\n// ---------------------------------------------------------------------------\n// Default executor\n// ---------------------------------------------------------------------------\n\n/**\n * Default executor using a worker-thread SES Compartment with JSON marshaling.\n *\n * Migration source is compiled and executed inside an isolated SES\n * Compartment running in a dedicated worker thread. The worker calls\n * `lockdown()` in its own V8 isolate, leaving the host process's\n * intrinsics completely unaffected.\n *\n * Data crosses the compartment boundary as JSON strings, preventing\n * prototype chain escapes. The compartment receives only `JSON` as an\n * endowment for parsing/stringifying data.\n *\n * The returned `MigrationFn` always returns a `Promise` (communication\n * with the worker is inherently async via `postMessage`).\n */\nexport function defaultExecutor(source: string): MigrationFn {\n // Worker is spawned lazily on first execution via `sendToWorker`.\n // Eager spawning here would force a top-level `node:worker_threads`\n // load and break Cloudflare Workers / browser callers that never\n // exercise the default sandbox.\n\n // Return a MigrationFn that delegates to the worker thread.\n // Compilation + execution happen in the worker's SES Compartment.\n return (async (data: Record<string, unknown>) => {\n const { serializeFirestoreTypes, deserializeFirestoreTypes } = await loadSerialization();\n const jsonData = JSON.stringify(serializeFirestoreTypes(data));\n const response = await sendToWorker({ type: 'execute', source, jsonData });\n if (response.jsonResult === undefined || response.jsonResult === null) {\n throw new MigrationError('Migration returned a non-JSON-serializable value');\n }\n try {\n return deserializeFirestoreTypes(JSON.parse(response.jsonResult));\n } catch {\n throw new MigrationError('Migration returned a non-JSON-serializable value');\n }\n }) as MigrationFn;\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Eagerly validate a migration source string by compiling it in the\n * sandbox worker (or via a custom executor) without executing it.\n *\n * Use this to catch syntax errors at define-time or reload-time rather\n * than at first migration execution.\n *\n * @throws {MigrationError} If the source is syntactically invalid or\n * does not produce a function.\n */\nexport async function precompileSource(\n source: string,\n executor?: MigrationExecutor,\n): Promise<void> {\n if (executor && executor !== defaultExecutor) {\n // Custom executors validate synchronously the old way\n try {\n executor(source);\n } catch (err: unknown) {\n if (err instanceof MigrationError) throw err;\n throw new MigrationError(`Failed to compile migration source: ${(err as Error).message}`);\n }\n return;\n }\n\n // Default executor: send a compile-only message to the worker\n await sendToWorker({ type: 'compile', source });\n}\n\n/**\n * Compile a stored migration source string into an executable function.\n * Results are cached by SHA-256 hash of the source string so repeated\n * reads never re-parse the same migration.\n *\n * **Important:** When using the default executor, this function does NOT\n * validate the source synchronously — validation is deferred to the\n * worker thread at execution time. Callers that need eager validation\n * (e.g., `defineNodeType`, `reloadRegistry`) should call\n * `precompileSource()` before or alongside `compileMigrationFn()`.\n */\nexport function compileMigrationFn(\n source: string,\n executor: MigrationExecutor = defaultExecutor,\n): MigrationFn {\n const cache = getExecutorCache(executor);\n const key = hashSource(source);\n const cached = cache.get(key);\n if (cached) return cached;\n\n try {\n const fn = executor(source);\n cache.set(key, fn);\n return fn;\n } catch (err: unknown) {\n if (err instanceof MigrationError) throw err;\n throw new MigrationError(`Failed to compile migration source: ${(err as Error).message}`);\n }\n}\n\n/**\n * Batch compile stored migration steps into executable MigrationStep[].\n *\n * With the default executor, source validation is deferred to execution\n * time. Use `precompileSource()` to validate eagerly — see\n * `createRegistryFromGraph()` for the recommended pattern.\n */\nexport function compileMigrations(\n stored: StoredMigrationStep[],\n executor?: MigrationExecutor,\n): MigrationStep[] {\n return stored.map((step) => ({\n fromVersion: step.fromVersion,\n toVersion: step.toVersion,\n up: compileMigrationFn(step.up, executor),\n }));\n}\n\n/**\n * Terminate the sandbox worker thread. The worker will be respawned\n * on the next `defaultExecutor` call.\n *\n * Primarily useful for test cleanup to avoid vitest hanging on\n * unfinished worker threads.\n */\nexport async function destroySandboxWorker(): Promise<void> {\n if (!_worker) return;\n const w = _worker;\n _worker = null;\n // Reject any remaining pending requests\n for (const [, p] of _pending) {\n p.reject(new MigrationError('Sandbox worker terminated'));\n }\n _pending.clear();\n await w.terminate();\n}\n","import { computeEdgeDocId } from './docid.js';\nimport { InvalidQueryError } from './errors.js';\nimport { BUILTIN_FIELDS, DEFAULT_QUERY_LIMIT, NODE_RELATION } from './internal/constants.js';\nimport type { FindEdgesParams, FindNodesParams, QueryFilter, QueryPlan } from './types.js';\n\nexport function buildEdgeQueryPlan(params: FindEdgesParams): QueryPlan {\n const { aType, aUid, axbType, bType, bUid, limit, orderBy } = params;\n\n if (aUid && axbType && bUid && !params.where?.length) {\n return { strategy: 'get', docId: computeEdgeDocId(aUid, axbType, bUid) };\n }\n\n const filters: QueryFilter[] = [];\n\n if (aType) filters.push({ field: 'aType', op: '==', value: aType });\n if (aUid) filters.push({ field: 'aUid', op: '==', value: aUid });\n if (axbType) filters.push({ field: 'axbType', op: '==', value: axbType });\n if (bType) filters.push({ field: 'bType', op: '==', value: bType });\n if (bUid) filters.push({ field: 'bUid', op: '==', value: bUid });\n\n if (params.where) {\n for (const clause of params.where) {\n const field = BUILTIN_FIELDS.has(clause.field)\n ? clause.field\n : clause.field.startsWith('data.')\n ? clause.field\n : `data.${clause.field}`;\n filters.push({ field, op: clause.op, value: clause.value });\n }\n }\n\n if (filters.length === 0) {\n throw new InvalidQueryError('findEdges requires at least one filter parameter');\n }\n\n // limit: undefined → apply DEFAULT_QUERY_LIMIT\n // limit: 0 → no limit (unlimited, used by internal bulk operations)\n // limit: N → use N\n const effectiveLimit = limit === undefined ? DEFAULT_QUERY_LIMIT : limit || undefined;\n return { strategy: 'query', filters, options: { limit: effectiveLimit, orderBy } };\n}\n\nexport function buildNodeQueryPlan(params: FindNodesParams): QueryPlan {\n const { aType, limit, orderBy } = params;\n\n const filters: QueryFilter[] = [\n { field: 'aType', op: '==', value: aType },\n { field: 'axbType', op: '==', value: NODE_RELATION },\n ];\n\n if (params.where) {\n for (const clause of params.where) {\n const field = BUILTIN_FIELDS.has(clause.field)\n ? clause.field\n : clause.field.startsWith('data.')\n ? clause.field\n : `data.${clause.field}`;\n filters.push({ field, op: clause.op, value: clause.value });\n }\n }\n\n const effectiveLimit = limit === undefined ? DEFAULT_QUERY_LIMIT : limit || undefined;\n return { strategy: 'query', filters, options: { limit: effectiveLimit, orderBy } };\n}\n","import { BUILTIN_FIELDS } from './internal/constants.js';\nimport type { QueryFilter } from './types.js';\n\n/**\n * Result of analyzing a query for collection scan risk.\n */\nexport interface QuerySafetyResult {\n /** Whether the query matches a known indexed pattern. */\n safe: boolean;\n /** Human-readable explanation when the query is unsafe. */\n reason?: string;\n}\n\n/**\n * Known composite index patterns that prevent full collection scans.\n * Each pattern is a set of field names that must ALL be present in the\n * query filters. Order within the set doesn't matter — what matters is\n * that the Firestore composite index covers the combination.\n *\n * These correspond to the indexes in firestore.indexes.json:\n * (aUid, axbType) — forward edge lookup\n * (axbType, bUid) — reverse edge lookup\n * (aType, axbType) — type-scoped queries + findNodes\n * (axbType, bType) — edge type + target type\n */\nconst SAFE_INDEX_PATTERNS: ReadonlyArray<ReadonlySet<string>> = [\n new Set(['aUid', 'axbType']),\n new Set(['axbType', 'bUid']),\n new Set(['aType', 'axbType']),\n new Set(['axbType', 'bType']),\n];\n\n/**\n * Analyzes a set of query filters to determine whether the query would\n * likely cause a full collection scan on Firestore Enterprise.\n *\n * A query is considered \"safe\" if the builtin fields present in the filters\n * match at least one known composite index pattern. Queries that only use\n * `data.*` fields without a safe base pattern are flagged as unsafe.\n */\nexport function analyzeQuerySafety(filters: QueryFilter[]): QuerySafetyResult {\n // Extract the set of builtin fields being filtered on (equality checks are\n // the primary index-usable operations, but we're generous here and count\n // any filter on a builtin field as potentially index-backed).\n const builtinFieldsPresent = new Set<string>();\n let hasDataFilters = false;\n\n for (const f of filters) {\n if (BUILTIN_FIELDS.has(f.field)) {\n builtinFieldsPresent.add(f.field);\n } else {\n // data.* or other non-builtin fields\n hasDataFilters = true;\n }\n }\n\n // Check if the builtin fields match any known safe index pattern.\n // A pattern is \"matched\" if all fields in the pattern are present in the query.\n for (const pattern of SAFE_INDEX_PATTERNS) {\n let matched = true;\n for (const field of pattern) {\n if (!builtinFieldsPresent.has(field)) {\n matched = false;\n break;\n }\n }\n if (matched) {\n // Even with data.* filters, the base index narrows the scan significantly.\n // The data.* filters are applied as post-filters on the index results.\n return { safe: true };\n }\n }\n\n // No safe pattern matched — build an explanation.\n const presentFields = [...builtinFieldsPresent];\n if (presentFields.length === 0 && hasDataFilters) {\n return {\n safe: false,\n reason:\n 'Query filters only use data.* fields with no builtin field constraints. ' +\n 'This requires a full collection scan. Add aType, aUid, axbType, bType, or bUid filters, ' +\n 'or set allowCollectionScan: true.',\n };\n }\n\n if (hasDataFilters) {\n return {\n safe: false,\n reason:\n `Query filters on [${presentFields.join(', ')}] do not match any indexed pattern. ` +\n 'data.* filters without an indexed base require a full collection scan. ' +\n `Safe patterns: (aUid + axbType), (axbType + bUid), (aType + axbType), (axbType + bType). ` +\n 'Set allowCollectionScan: true to override.',\n };\n }\n\n return {\n safe: false,\n reason:\n `Query filters on [${presentFields.join(', ')}] do not match any indexed pattern. ` +\n 'This may cause a full collection scan on Firestore Enterprise. ' +\n `Safe patterns: (aUid + axbType), (axbType + bUid), (aType + axbType), (axbType + bType). ` +\n 'Set allowCollectionScan: true to override.',\n };\n}\n","import { computeEdgeDocId, computeNodeDocId } from './docid.js';\nimport { QuerySafetyError } from './errors.js';\nimport type { TransactionBackend, WritableRecord } from './internal/backend.js';\nimport { NODE_RELATION } from './internal/constants.js';\nimport { assertNoDeleteSentinels, flattenPatch } from './internal/write-plan.js';\nimport { migrateRecord, migrateRecords } from './migration.js';\nimport { buildEdgeQueryPlan, buildNodeQueryPlan } from './query.js';\nimport { analyzeQuerySafety } from './query-safety.js';\nimport type {\n FindEdgesParams,\n FindNodesParams,\n GraphRegistry,\n GraphTransaction,\n MigrationWriteBack,\n QueryFilter,\n ScanProtection,\n StoredGraphRecord,\n} from './types.js';\n\nfunction buildWritableNodeRecord(\n aType: string,\n uid: string,\n data: Record<string, unknown>,\n): WritableRecord {\n return { aType, aUid: uid, axbType: NODE_RELATION, bType: aType, bUid: uid, data };\n}\n\nfunction buildWritableEdgeRecord(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n): WritableRecord {\n return { aType, aUid, axbType, bType, bUid, data };\n}\n\nexport class GraphTransactionImpl implements GraphTransaction {\n constructor(\n private readonly backend: TransactionBackend,\n private readonly registry?: GraphRegistry,\n private readonly scanProtection: ScanProtection = 'error',\n private readonly scopePath: string = '',\n private readonly globalWriteBack: MigrationWriteBack = 'off',\n ) {}\n\n async getNode(uid: string): Promise<StoredGraphRecord | null> {\n const docId = computeNodeDocId(uid);\n const record = await this.backend.getDoc(docId);\n if (!record || !this.registry) return record;\n const result = await migrateRecord(record, this.registry, this.globalWriteBack);\n if (result.migrated && result.writeBack !== 'off') {\n await this.backend.updateDoc(docId, {\n replaceData: result.record.data as Record<string, unknown>,\n v: result.record.v,\n });\n }\n return result.record;\n }\n\n async getEdge(aUid: string, axbType: string, bUid: string): Promise<StoredGraphRecord | null> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n const record = await this.backend.getDoc(docId);\n if (!record || !this.registry) return record;\n const result = await migrateRecord(record, this.registry, this.globalWriteBack);\n if (result.migrated && result.writeBack !== 'off') {\n await this.backend.updateDoc(docId, {\n replaceData: result.record.data as Record<string, unknown>,\n v: result.record.v,\n });\n }\n return result.record;\n }\n\n async edgeExists(aUid: string, axbType: string, bUid: string): Promise<boolean> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n const record = await this.backend.getDoc(docId);\n return record !== null;\n }\n\n private checkQuerySafety(filters: QueryFilter[], allowCollectionScan?: boolean): void {\n if (allowCollectionScan || this.scanProtection === 'off') return;\n\n const result = analyzeQuerySafety(filters);\n if (result.safe) return;\n\n if (this.scanProtection === 'error') {\n throw new QuerySafetyError(result.reason!);\n }\n\n console.warn(`[firegraph] Query safety warning: ${result.reason}`);\n }\n\n async findEdges(params: FindEdgesParams): Promise<StoredGraphRecord[]> {\n const plan = buildEdgeQueryPlan(params);\n let records: StoredGraphRecord[];\n if (plan.strategy === 'get') {\n const record = await this.backend.getDoc(plan.docId);\n records = record ? [record] : [];\n } else {\n this.checkQuerySafety(plan.filters, params.allowCollectionScan);\n records = await this.backend.query(plan.filters, plan.options);\n }\n return this.applyMigrations(records);\n }\n\n async findNodes(params: FindNodesParams): Promise<StoredGraphRecord[]> {\n const plan = buildNodeQueryPlan(params);\n let records: StoredGraphRecord[];\n if (plan.strategy === 'get') {\n const record = await this.backend.getDoc(plan.docId);\n records = record ? [record] : [];\n } else {\n this.checkQuerySafety(plan.filters, params.allowCollectionScan);\n records = await this.backend.query(plan.filters, plan.options);\n }\n return this.applyMigrations(records);\n }\n\n private async applyMigrations(records: StoredGraphRecord[]): Promise<StoredGraphRecord[]> {\n if (!this.registry || records.length === 0) return records;\n const results = await migrateRecords(records, this.registry, this.globalWriteBack);\n for (const result of results) {\n if (result.migrated && result.writeBack !== 'off') {\n const docId =\n result.record.axbType === NODE_RELATION\n ? computeNodeDocId(result.record.aUid)\n : computeEdgeDocId(result.record.aUid, result.record.axbType, result.record.bUid);\n await this.backend.updateDoc(docId, {\n replaceData: result.record.data as Record<string, unknown>,\n v: result.record.v,\n });\n }\n }\n return results.map((r) => r.record);\n }\n\n async putNode(aType: string, uid: string, data: Record<string, unknown>): Promise<void> {\n await this.writeNode(aType, uid, data, 'merge');\n }\n\n async putEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n await this.writeEdge(aType, aUid, axbType, bType, bUid, data, 'merge');\n }\n\n async replaceNode(aType: string, uid: string, data: Record<string, unknown>): Promise<void> {\n await this.writeNode(aType, uid, data, 'replace');\n }\n\n async replaceEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n await this.writeEdge(aType, aUid, axbType, bType, bUid, data, 'replace');\n }\n\n private async writeNode(\n aType: string,\n uid: string,\n data: Record<string, unknown>,\n mode: 'merge' | 'replace',\n ): Promise<void> {\n assertNoDeleteSentinels(data, mode === 'replace' ? 'replaceNode' : 'putNode');\n if (this.registry) {\n this.registry.validate(aType, NODE_RELATION, aType, data, this.scopePath);\n }\n const docId = computeNodeDocId(uid);\n const record = buildWritableNodeRecord(aType, uid, data);\n if (this.registry) {\n const entry = this.registry.lookup(aType, NODE_RELATION, aType);\n if (entry?.schemaVersion && entry.schemaVersion > 0) {\n record.v = entry.schemaVersion;\n }\n }\n await this.backend.setDoc(docId, record, mode);\n }\n\n private async writeEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n mode: 'merge' | 'replace',\n ): Promise<void> {\n assertNoDeleteSentinels(data, mode === 'replace' ? 'replaceEdge' : 'putEdge');\n if (this.registry) {\n this.registry.validate(aType, axbType, bType, data, this.scopePath);\n }\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n const record = buildWritableEdgeRecord(aType, aUid, axbType, bType, bUid, data);\n if (this.registry) {\n const entry = this.registry.lookup(aType, axbType, bType);\n if (entry?.schemaVersion && entry.schemaVersion > 0) {\n record.v = entry.schemaVersion;\n }\n }\n await this.backend.setDoc(docId, record, mode);\n }\n\n async updateNode(uid: string, data: Record<string, unknown>): Promise<void> {\n const docId = computeNodeDocId(uid);\n await this.backend.updateDoc(docId, { dataOps: flattenPatch(data) });\n }\n\n async updateEdge(\n aUid: string,\n axbType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n await this.backend.updateDoc(docId, { dataOps: flattenPatch(data) });\n }\n\n async removeNode(uid: string): Promise<void> {\n const docId = computeNodeDocId(uid);\n await this.backend.deleteDoc(docId);\n }\n\n async removeEdge(aUid: string, axbType: string, bUid: string): Promise<void> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n await this.backend.deleteDoc(docId);\n }\n}\n","import { GraphBatchImpl } from './batch.js';\nimport { computeEdgeDocId, computeNodeDocId } from './docid.js';\nimport {\n createBootstrapRegistry,\n createRegistryFromGraph,\n generateDeterministicUid,\n META_EDGE_TYPE,\n META_NODE_TYPE,\n} from './dynamic-registry.js';\nimport { DynamicRegistryError, FiregraphError, QuerySafetyError } from './errors.js';\nimport type { BackendCapabilities, StorageBackend, WritableRecord } from './internal/backend.js';\nimport { NODE_RELATION } from './internal/constants.js';\nimport { assertNoDeleteSentinels, flattenPatch } from './internal/write-plan.js';\nimport type { MigrationResult } from './migration.js';\nimport { migrateRecord, migrateRecords } from './migration.js';\nimport { buildEdgeQueryPlan, buildNodeQueryPlan } from './query.js';\nimport { analyzeQuerySafety } from './query-safety.js';\nimport { createMergedRegistry } from './registry.js';\nimport { precompileSource } from './sandbox.js';\nimport { GraphTransactionImpl } from './transaction.js';\nimport type {\n AggregateResult,\n AggregateSpec,\n BulkOptions,\n BulkResult,\n BulkUpdatePatch,\n Capability,\n CascadeResult,\n CoreGraphClient,\n DefineTypeOptions,\n DynamicGraphClient,\n DynamicGraphMethods,\n DynamicRegistryConfig,\n EdgeTopology,\n EngineTraversalParams,\n EngineTraversalResult,\n ExpandParams,\n ExpandResult,\n FindEdgesParams,\n FindEdgesProjectedParams,\n FindNearestParams,\n FindNodesParams,\n FullTextSearchParams,\n GeoSearchParams,\n GraphBatch,\n GraphClient,\n GraphClientOptions,\n GraphReader,\n GraphRegistry,\n GraphTransaction,\n MigrationExecutor,\n MigrationFn,\n MigrationWriteBack,\n ProjectedRow,\n QueryFilter,\n QueryOptions,\n ScanProtection,\n StoredGraphRecord,\n} from './types.js';\n\nconst RESERVED_TYPE_NAMES = new Set([META_NODE_TYPE, META_EDGE_TYPE]);\n\nfunction buildWritableNodeRecord(\n aType: string,\n uid: string,\n data: Record<string, unknown>,\n): WritableRecord {\n return { aType, aUid: uid, axbType: NODE_RELATION, bType: aType, bUid: uid, data };\n}\n\nfunction buildWritableEdgeRecord(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n): WritableRecord {\n return { aType, aUid, axbType, bType, bUid, data };\n}\n\nexport class GraphClientImpl implements CoreGraphClient, DynamicGraphMethods {\n readonly scanProtection: ScanProtection;\n\n /**\n * Capability set of the underlying backend. Mirrors `backend.capabilities`\n * verbatim so callers can portability-check (`client.capabilities.has(\n * 'query.join')`) without reaching for the backend handle. Static for the\n * lifetime of the client.\n */\n get capabilities(): BackendCapabilities {\n return this.backend.capabilities;\n }\n\n // Static mode\n private readonly staticRegistry?: GraphRegistry;\n\n // Dynamic mode\n private readonly dynamicConfig?: DynamicRegistryConfig;\n private readonly bootstrapRegistry?: GraphRegistry;\n private dynamicRegistry?: GraphRegistry;\n private readonly metaBackend?: StorageBackend;\n\n // Migration settings\n private readonly globalWriteBack: MigrationWriteBack;\n private readonly migrationSandbox?: MigrationExecutor;\n\n constructor(\n private readonly backend: StorageBackend,\n options?: GraphClientOptions,\n /** @internal Optional pre-built meta-backend (used by subgraph clones). */\n metaBackend?: StorageBackend,\n ) {\n this.globalWriteBack = options?.migrationWriteBack ?? 'off';\n this.migrationSandbox = options?.migrationSandbox;\n\n if (options?.registryMode) {\n this.dynamicConfig = options.registryMode;\n this.bootstrapRegistry = createBootstrapRegistry();\n if (options.registry) {\n this.staticRegistry = options.registry;\n }\n this.metaBackend = metaBackend;\n } else {\n this.staticRegistry = options?.registry;\n }\n\n this.scanProtection = options?.scanProtection ?? 'error';\n }\n\n // ---------------------------------------------------------------------------\n // Backend access (exposed for traversal helpers and subgraph cloning)\n // ---------------------------------------------------------------------------\n\n /** @internal */\n getBackend(): StorageBackend {\n return this.backend;\n }\n\n /**\n * Snapshot of the currently-effective registry. Returns the merged view\n * used for domain-type validation and migration — in dynamic mode this is\n * `dynamicRegistry ?? staticRegistry ?? bootstrapRegistry`, so callers see\n * updates after `reloadRegistry()` without having to re-resolve anything.\n *\n * Exposed for backends that need topology access during bulk operations\n * (e.g. the Cloudflare DO backend's cross-DO cascade). Not part of the\n * public `GraphClient` surface.\n *\n * @internal\n */\n getRegistrySnapshot(): GraphRegistry | undefined {\n return this.getCombinedRegistry();\n }\n\n // ---------------------------------------------------------------------------\n // Registry routing\n // ---------------------------------------------------------------------------\n\n private getRegistryForType(aType: string): GraphRegistry | undefined {\n if (!this.dynamicConfig) return this.staticRegistry;\n\n if (aType === META_NODE_TYPE || aType === META_EDGE_TYPE) {\n return this.bootstrapRegistry;\n }\n\n return this.dynamicRegistry ?? this.staticRegistry ?? this.bootstrapRegistry;\n }\n\n private getBackendForType(aType: string): StorageBackend {\n if (this.metaBackend && (aType === META_NODE_TYPE || aType === META_EDGE_TYPE)) {\n return this.metaBackend;\n }\n return this.backend;\n }\n\n private getCombinedRegistry(): GraphRegistry | undefined {\n if (!this.dynamicConfig) return this.staticRegistry;\n return this.dynamicRegistry ?? this.staticRegistry ?? this.bootstrapRegistry;\n }\n\n // ---------------------------------------------------------------------------\n // Query safety\n // ---------------------------------------------------------------------------\n\n private checkQuerySafety(filters: QueryFilter[], allowCollectionScan?: boolean): void {\n if (allowCollectionScan || this.scanProtection === 'off') return;\n\n const result = analyzeQuerySafety(filters);\n if (result.safe) return;\n\n if (this.scanProtection === 'error') {\n throw new QuerySafetyError(result.reason!);\n }\n\n console.warn(`[firegraph] Query safety warning: ${result.reason}`);\n }\n\n // ---------------------------------------------------------------------------\n // Migration helpers\n // ---------------------------------------------------------------------------\n\n private async applyMigration(\n record: StoredGraphRecord,\n docId: string,\n ): Promise<StoredGraphRecord> {\n const registry = this.getCombinedRegistry();\n if (!registry) return record;\n\n const result = await migrateRecord(record, registry, this.globalWriteBack);\n if (result.migrated) {\n this.handleWriteBack(result, docId);\n }\n return result.record;\n }\n\n private async applyMigrations(records: StoredGraphRecord[]): Promise<StoredGraphRecord[]> {\n const registry = this.getCombinedRegistry();\n if (!registry || records.length === 0) return records;\n\n const results = await migrateRecords(records, registry, this.globalWriteBack);\n for (const result of results) {\n if (result.migrated) {\n const docId =\n result.record.axbType === NODE_RELATION\n ? computeNodeDocId(result.record.aUid)\n : computeEdgeDocId(result.record.aUid, result.record.axbType, result.record.bUid);\n this.handleWriteBack(result, docId);\n }\n }\n return results.map((r) => r.record);\n }\n\n /**\n * Fire-and-forget write-back for a migrated record. Both `'eager'` and\n * `'background'` are non-blocking; the difference is the log level on\n * failure. For synchronous write-back, use a transaction — see\n * `GraphTransactionImpl`.\n */\n private handleWriteBack(result: MigrationResult, docId: string): void {\n if (result.writeBack === 'off') return;\n\n const doWriteBack = async () => {\n try {\n await this.backend.updateDoc(docId, {\n replaceData: result.record.data as Record<string, unknown>,\n v: result.record.v,\n });\n } catch (err: unknown) {\n const msg = `[firegraph] Migration write-back failed for ${docId}: ${(err as Error).message}`;\n if (result.writeBack === 'eager') {\n console.error(msg);\n } else {\n console.warn(msg);\n }\n }\n };\n\n void doWriteBack();\n }\n\n // ---------------------------------------------------------------------------\n // GraphReader\n // ---------------------------------------------------------------------------\n\n async getNode(uid: string): Promise<StoredGraphRecord | null> {\n const docId = computeNodeDocId(uid);\n const record = await this.backend.getDoc(docId);\n if (!record) return null;\n return this.applyMigration(record, docId);\n }\n\n async getEdge(aUid: string, axbType: string, bUid: string): Promise<StoredGraphRecord | null> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n const record = await this.backend.getDoc(docId);\n if (!record) return null;\n return this.applyMigration(record, docId);\n }\n\n async edgeExists(aUid: string, axbType: string, bUid: string): Promise<boolean> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n const record = await this.backend.getDoc(docId);\n return record !== null;\n }\n\n async findEdges(params: FindEdgesParams): Promise<StoredGraphRecord[]> {\n const plan = buildEdgeQueryPlan(params);\n let records: StoredGraphRecord[];\n if (plan.strategy === 'get') {\n const record = await this.backend.getDoc(plan.docId);\n records = record ? [record] : [];\n } else {\n this.checkQuerySafety(plan.filters, params.allowCollectionScan);\n records = await this.backend.query(plan.filters, plan.options);\n }\n return this.applyMigrations(records);\n }\n\n async findNodes(params: FindNodesParams): Promise<StoredGraphRecord[]> {\n const plan = buildNodeQueryPlan(params);\n let records: StoredGraphRecord[];\n if (plan.strategy === 'get') {\n const record = await this.backend.getDoc(plan.docId);\n records = record ? [record] : [];\n } else {\n this.checkQuerySafety(plan.filters, params.allowCollectionScan);\n records = await this.backend.query(plan.filters, plan.options);\n }\n return this.applyMigrations(records);\n }\n\n // ---------------------------------------------------------------------------\n // GraphWriter\n // ---------------------------------------------------------------------------\n\n async putNode(aType: string, uid: string, data: Record<string, unknown>): Promise<void> {\n await this.writeNode(aType, uid, data, 'merge');\n }\n\n async putEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n await this.writeEdge(aType, aUid, axbType, bType, bUid, data, 'merge');\n }\n\n async replaceNode(aType: string, uid: string, data: Record<string, unknown>): Promise<void> {\n await this.writeNode(aType, uid, data, 'replace');\n }\n\n async replaceEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n await this.writeEdge(aType, aUid, axbType, bType, bUid, data, 'replace');\n }\n\n private async writeNode(\n aType: string,\n uid: string,\n data: Record<string, unknown>,\n mode: 'merge' | 'replace',\n ): Promise<void> {\n assertNoDeleteSentinels(data, mode === 'replace' ? 'replaceNode' : 'putNode');\n const registry = this.getRegistryForType(aType);\n if (registry) {\n registry.validate(aType, NODE_RELATION, aType, data, this.backend.scopePath);\n }\n const backend = this.getBackendForType(aType);\n const docId = computeNodeDocId(uid);\n const record = buildWritableNodeRecord(aType, uid, data);\n if (registry) {\n const entry = registry.lookup(aType, NODE_RELATION, aType);\n if (entry?.schemaVersion && entry.schemaVersion > 0) {\n record.v = entry.schemaVersion;\n }\n }\n await backend.setDoc(docId, record, mode);\n }\n\n private async writeEdge(\n aType: string,\n aUid: string,\n axbType: string,\n bType: string,\n bUid: string,\n data: Record<string, unknown>,\n mode: 'merge' | 'replace',\n ): Promise<void> {\n assertNoDeleteSentinels(data, mode === 'replace' ? 'replaceEdge' : 'putEdge');\n const registry = this.getRegistryForType(aType);\n if (registry) {\n registry.validate(aType, axbType, bType, data, this.backend.scopePath);\n }\n const backend = this.getBackendForType(aType);\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n const record = buildWritableEdgeRecord(aType, aUid, axbType, bType, bUid, data);\n if (registry) {\n const entry = registry.lookup(aType, axbType, bType);\n if (entry?.schemaVersion && entry.schemaVersion > 0) {\n record.v = entry.schemaVersion;\n }\n }\n await backend.setDoc(docId, record, mode);\n }\n\n async updateNode(uid: string, data: Record<string, unknown>): Promise<void> {\n const docId = computeNodeDocId(uid);\n await this.backend.updateDoc(docId, { dataOps: flattenPatch(data) });\n }\n\n async updateEdge(\n aUid: string,\n axbType: string,\n bUid: string,\n data: Record<string, unknown>,\n ): Promise<void> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n await this.backend.updateDoc(docId, { dataOps: flattenPatch(data) });\n }\n\n async removeNode(uid: string): Promise<void> {\n const docId = computeNodeDocId(uid);\n await this.backend.deleteDoc(docId);\n }\n\n async removeEdge(aUid: string, axbType: string, bUid: string): Promise<void> {\n const docId = computeEdgeDocId(aUid, axbType, bUid);\n await this.backend.deleteDoc(docId);\n }\n\n // ---------------------------------------------------------------------------\n // Transactions & Batches\n // ---------------------------------------------------------------------------\n\n async runTransaction<T>(fn: (tx: GraphTransaction) => Promise<T>): Promise<T> {\n return this.backend.runTransaction(async (txBackend) => {\n const graphTx = new GraphTransactionImpl(\n txBackend,\n this.getCombinedRegistry(),\n this.scanProtection,\n this.backend.scopePath,\n this.globalWriteBack,\n );\n return fn(graphTx);\n });\n }\n\n batch(): GraphBatch {\n return new GraphBatchImpl(\n this.backend.createBatch(),\n this.getCombinedRegistry(),\n this.backend.scopePath,\n );\n }\n\n // ---------------------------------------------------------------------------\n // Subgraph\n // ---------------------------------------------------------------------------\n\n subgraph(parentNodeUid: string, name: string = 'graph'): GraphClient {\n if (!parentNodeUid || parentNodeUid.includes('/')) {\n throw new FiregraphError(\n `Invalid parentNodeUid for subgraph: \"${parentNodeUid}\". ` +\n 'Must be a non-empty string without \"/\".',\n 'INVALID_SUBGRAPH',\n );\n }\n if (name.includes('/')) {\n throw new FiregraphError(\n `Subgraph name must not contain \"/\": got \"${name}\". ` +\n 'Use chained .subgraph() calls for nested subgraphs.',\n 'INVALID_SUBGRAPH',\n );\n }\n\n const childBackend = this.backend.subgraph(parentNodeUid, name);\n\n return new GraphClientImpl(\n childBackend,\n {\n registry: this.getCombinedRegistry(),\n scanProtection: this.scanProtection,\n migrationWriteBack: this.globalWriteBack,\n migrationSandbox: this.migrationSandbox,\n },\n // Subgraphs do not have meta-backends; meta lives only at the root.\n );\n }\n\n // ---------------------------------------------------------------------------\n // Collection group query\n // ---------------------------------------------------------------------------\n\n async findEdgesGlobal(\n params: FindEdgesParams,\n collectionName?: string,\n ): Promise<StoredGraphRecord[]> {\n if (!this.backend.findEdgesGlobal) {\n throw new FiregraphError(\n 'findEdgesGlobal() is not supported by the current storage backend.',\n 'UNSUPPORTED_OPERATION',\n );\n }\n const plan = buildEdgeQueryPlan(params);\n if (plan.strategy === 'get') {\n throw new FiregraphError(\n 'findEdgesGlobal() requires a query, not a direct document lookup. ' +\n 'Omit one of aUid/axbType/bUid to force a query strategy.',\n 'INVALID_QUERY',\n );\n }\n this.checkQuerySafety(plan.filters, params.allowCollectionScan);\n const records = await this.backend.findEdgesGlobal(params, collectionName);\n return this.applyMigrations(records);\n }\n\n // ---------------------------------------------------------------------------\n // Aggregate query (capability: query.aggregate)\n // ---------------------------------------------------------------------------\n\n async aggregate<A extends AggregateSpec>(\n params: FindEdgesParams & { aggregates: A },\n ): Promise<AggregateResult<A>> {\n if (!this.backend.aggregate) {\n throw new FiregraphError(\n 'aggregate() is not supported by the current storage backend.',\n 'UNSUPPORTED_OPERATION',\n );\n }\n\n // Allow zero-filter aggregates (e.g. count(*) over the whole collection).\n // findEdges-style buildEdgeQueryPlan rejects empty filter sets because a\n // bare findEdges with no identifying fields would be a full collection\n // scan; aggregate() is the legitimate use case for that shape.\n const hasAnyFilter =\n params.aType ||\n params.aUid ||\n params.axbType ||\n params.bType ||\n params.bUid ||\n (params.where && params.where.length > 0);\n\n if (!hasAnyFilter) {\n this.checkQuerySafety([], params.allowCollectionScan);\n const result = await this.backend.aggregate(params.aggregates, []);\n return result as AggregateResult<A>;\n }\n\n const plan = buildEdgeQueryPlan(params);\n if (plan.strategy === 'get') {\n throw new FiregraphError(\n 'aggregate() requires a query, not a direct document lookup. ' +\n 'Omit one of aUid/axbType/bUid to force a query strategy.',\n 'INVALID_QUERY',\n );\n }\n this.checkQuerySafety(plan.filters, params.allowCollectionScan);\n const result = await this.backend.aggregate(params.aggregates, plan.filters);\n return result as AggregateResult<A>;\n }\n\n // ---------------------------------------------------------------------------\n // Bulk operations\n // ---------------------------------------------------------------------------\n\n async removeNodeCascade(uid: string, options?: BulkOptions): Promise<CascadeResult> {\n return this.backend.removeNodeCascade(uid, this, options);\n }\n\n async bulkRemoveEdges(params: FindEdgesParams, options?: BulkOptions): Promise<BulkResult> {\n return this.backend.bulkRemoveEdges(params, this, options);\n }\n\n // ---------------------------------------------------------------------------\n // Server-side DML (capability: query.dml)\n // ---------------------------------------------------------------------------\n\n /**\n * Single-statement bulk DELETE. Translates `params` to a filter list via\n * `buildEdgeQueryPlan` (the same plan `findEdges` uses) and dispatches to\n * `backend.bulkDelete`. The fetch-then-delete loop in `bulkRemoveEdges`\n * is the cap-less fallback; this method is the fast path on backends\n * declaring `query.dml`.\n *\n * Scan-protection rules match `findEdges`: a query with no identifying\n * fields requires `allowCollectionScan: true` to pass. A bare-empty\n * filter set (no `aType`, `aUid`, etc., no `where`) is allowed at this\n * layer — shared SQLite bounds the blast radius via its leading `scope`\n * predicate — but the DO RPC backend rejects empty filters at the wire\n * boundary as defense-in-depth. To wipe a routed subgraph DO, use\n * `removeNodeCascade` on the parent node instead.\n */\n async bulkDelete(params: FindEdgesParams, options?: BulkOptions): Promise<BulkResult> {\n if (!this.backend.bulkDelete) {\n throw new FiregraphError(\n 'bulkDelete() is not supported by the current storage backend. ' +\n 'Fall back to bulkRemoveEdges() for backends without query.dml ' +\n '(e.g. Firestore Standard).',\n 'UNSUPPORTED_OPERATION',\n );\n }\n const filters = this.buildDmlFilters(params);\n return this.backend.bulkDelete(filters, options);\n }\n\n /**\n * Single-statement bulk UPDATE. Same translation path as `bulkDelete`,\n * but the patch is deep-merged into each matching row's `data` via the\n * shared `flattenPatch` pipeline. Identifying columns are immutable\n * through this path (see `BulkUpdatePatch` JSDoc).\n *\n * Empty-patch rejection happens inside the backend (`compileBulkUpdate`)\n * — a `data: {}` payload would only rewrite `updated_at`, which is\n * almost certainly a bug.\n */\n async bulkUpdate(\n params: FindEdgesParams,\n patch: BulkUpdatePatch,\n options?: BulkOptions,\n ): Promise<BulkResult> {\n if (!this.backend.bulkUpdate) {\n throw new FiregraphError(\n 'bulkUpdate() is not supported by the current storage backend.',\n 'UNSUPPORTED_OPERATION',\n );\n }\n const filters = this.buildDmlFilters(params);\n return this.backend.bulkUpdate(filters, patch, options);\n }\n\n // ---------------------------------------------------------------------------\n // Multi-source fan-out (capability: query.join)\n // ---------------------------------------------------------------------------\n\n /**\n * Fan out from `params.sources` over a single edge type in one round trip.\n * On backends without `query.join`, throws `UNSUPPORTED_OPERATION` — the\n * cap-less fallback is the per-source `findEdges` loop, which lives in\n * `traverse.ts` (the higher-level traversal walker) rather than here.\n *\n * `expand()` is intentionally edge-type-only — the source set is a flat\n * UID list and the hop matches one `axbType`. Multi-axbType expansions\n * become multiple `expand()` calls, one per relation.\n *\n * `params.sources.length === 0` short-circuits to an empty result. The\n * backend never sees the call. (`compileExpand` itself rejects empty\n * because `IN ()` is not valid SQL.)\n */\n async expand(params: ExpandParams): Promise<ExpandResult> {\n if (!this.backend.expand) {\n throw new FiregraphError(\n 'expand() is not supported by the current storage backend. ' +\n 'Backends without `query.join` can use createTraversal() instead — ' +\n 'the per-hop loop is functionally equivalent (just slower).',\n 'UNSUPPORTED_OPERATION',\n );\n }\n if (params.sources.length === 0) {\n return params.hydrate ? { edges: [], targets: [] } : { edges: [] };\n }\n return this.backend.expand(params);\n }\n\n // ---------------------------------------------------------------------------\n // Engine-level multi-hop traversal (capability: traversal.serverSide)\n // ---------------------------------------------------------------------------\n\n /**\n * Compile a multi-hop traversal spec into one server-side nested\n * Pipeline and dispatch a single round trip.\n *\n * Backends declaring `traversal.serverSide` (Firestore Enterprise\n * today) install this method; everywhere else, it throws\n * `UNSUPPORTED_OPERATION`. The capability gate matches the type-level\n * surface — `GraphClient<C>` only exposes `runEngineTraversal` when\n * `'traversal.serverSide' extends C`.\n *\n * Most callers should not invoke this method directly; the\n * `createTraversal(...).run()` builder routes through it\n * automatically when `engineTraversal: 'auto'` (the default) and\n * the spec is eligible per `firestore-traverse-compiler.ts`. Calling\n * directly is appropriate for benchmarking or for callers that have\n * already shaped their hop chain into the strict\n * `EngineTraversalParams` shape.\n *\n * `params.sources.length === 0` short-circuits to empty per-hop\n * arrays. The backend never sees the call.\n */\n async runEngineTraversal(params: EngineTraversalParams): Promise<EngineTraversalResult> {\n if (!this.backend.runEngineTraversal) {\n throw new FiregraphError(\n 'runEngineTraversal() is not supported by the current storage backend. ' +\n 'Backends without `traversal.serverSide` can use createTraversal() instead — ' +\n 'the per-hop loop is functionally equivalent for in-graph specs (different ' +\n 'round-trip profile).',\n 'UNSUPPORTED_OPERATION',\n );\n }\n if (params.sources.length === 0) {\n return {\n hops: params.hops.map(() => ({ edges: [], sourceCount: 0 })),\n totalReads: 0,\n };\n }\n return this.backend.runEngineTraversal(params);\n }\n\n // ---------------------------------------------------------------------------\n // Server-side projection (capability: query.select)\n // ---------------------------------------------------------------------------\n\n /**\n * Server-side projection — fetch only the requested fields from each\n * matching edge. The backend translates the call into a projecting query\n * (`SELECT json_extract(...)` on SQLite/DO, `Query.select(...)` on\n * Firestore Standard, classic projection on Enterprise) so the wire\n * payload is reduced to just the requested fields.\n *\n * Resolution rules for `select` (mirrored across all backends):\n *\n * - Built-in envelope fields (`aType`, `aUid`, `axbType`, `bType`,\n * `bUid`, `createdAt`, `updatedAt`, `v`) → resolve to the typed\n * column / Firestore field directly.\n * - `'data'` literal → returns the whole user payload.\n * - `'data.<x>'` → explicit nested path, returned at the same shape.\n * - bare name → rewritten to `data.<name>` (the canonical \"give me a\n * few keys out of the JSON payload\" shape).\n *\n * Empty `select: []` is rejected with `INVALID_QUERY`. Duplicate entries\n * are de-duped (first-occurrence order preserved); the result row carries\n * one slot per unique field.\n *\n * Migrations are *not* applied to the result. The caller asked for a\n * partial shape, and rehydrating it through the migration pipeline would\n * require synthesising every absent field — see\n * `StorageBackend.findEdgesProjected` for the rationale.\n *\n * Scan protection follows the `findEdges` rules: a query with no\n * identifying fields requires `allowCollectionScan: true` to pass. The\n * cap-less fallback would be `findEdges` + JS-side projection, but that\n * defeats the wire-payload reduction; backends without `query.select`\n * throw `UNSUPPORTED_OPERATION` rather than silently materialising full\n * rows.\n */\n async findEdgesProjected<F extends ReadonlyArray<string>>(\n params: FindEdgesProjectedParams<F>,\n ): Promise<Array<ProjectedRow<F>>> {\n if (!this.backend.findEdgesProjected) {\n throw new FiregraphError(\n 'findEdgesProjected() is not supported by the current storage backend. ' +\n 'There is no client-side fallback because the wire-payload reduction ' +\n 'is the entire point of the API — use findEdges() and project in JS ' +\n 'if the backend does not declare `query.select`.',\n 'UNSUPPORTED_OPERATION',\n );\n }\n if (params.select.length === 0) {\n throw new FiregraphError(\n 'findEdgesProjected() requires a non-empty `select` list.',\n 'INVALID_QUERY',\n );\n }\n\n // Reuse the same plan + scan-safety pipeline as `findEdges` so the\n // identifier-vs-where rules and `allowCollectionScan` semantics behave\n // identically. A GET-shape (all three identifiers, no `where`) is also\n // allowed here — projection over a single edge is a meaningful shape.\n // We translate it to the equivalent equality filter list because the\n // backend `findEdgesProjected` contract takes filters, not a docId.\n const plan = buildEdgeQueryPlan(params);\n let filters: QueryFilter[];\n let options: QueryOptions | undefined;\n if (plan.strategy === 'get') {\n // GET means `aUid`, `axbType`, `bUid` are all set and there are no\n // `where` clauses. Synthesize the equivalent equality filters so the\n // backend can issue a single projecting query whose WHERE clause\n // resolves to the same row the docId would have looked up.\n filters = [\n { field: 'aUid', op: '==', value: params.aUid! },\n { field: 'axbType', op: '==', value: params.axbType! },\n { field: 'bUid', op: '==', value: params.bUid! },\n ];\n if (params.aType) filters.push({ field: 'aType', op: '==', value: params.aType });\n if (params.bType) filters.push({ field: 'bType', op: '==', value: params.bType });\n options = undefined;\n } else {\n filters = plan.filters;\n options = plan.options;\n }\n this.checkQuerySafety(filters, params.allowCollectionScan);\n const rows = await this.backend.findEdgesProjected(params.select, filters, options);\n return rows as Array<ProjectedRow<F>>;\n }\n\n /**\n * Native vector / nearest-neighbour search (capability `search.vector`).\n *\n * Resolves to the top-K records by similarity, sorted nearest-first\n * (`EUCLIDEAN` / `COSINE`) or highest-first (`DOT_PRODUCT`). The wrapper\n * is intentionally thin: capability check, scan-protection, then forward\n * `params` verbatim to the backend. All field-path normalisation and\n * SDK-shape validation lives in the shared\n * `runFirestoreFindNearest` helper that both Firestore editions call —\n * keeping it there means the validation surface stays in lockstep with\n * the SDK call site, regardless of which backend is plugged in.\n *\n * Migrations are NOT applied. The vector index walked the raw stored\n * shape; rehydrating each row through the migration pipeline would\n * change the candidate set the index already chose. If you need\n * migrated shape, follow up with `getNode` / `findEdges` on the\n * returned UIDs — those paths apply migrations normally.\n *\n * Scan-protection mirrors `findEdges`: if no identifying filters\n * (`aType` / `axbType` / `bType`) and no `where` clauses are supplied,\n * the request must opt in via `allowCollectionScan: true`. The ANN\n * query still walks the candidate set the WHERE clause produces, so\n * an unfiltered nearest-neighbour search over a million-row collection\n * is the same scan trap as an unfiltered `findEdges`.\n *\n * Backends without `search.vector` throw `UNSUPPORTED_OPERATION` —\n * there is no client-side fallback because emulating ANN over the\n * generic backend surface (`findEdges` + JS-side cosine) doesn't scale\n * past trivial datasets and would give callers the wrong mental model\n * about cost.\n */\n async findNearest(params: FindNearestParams): Promise<StoredGraphRecord[]> {\n if (!this.backend.findNearest) {\n throw new FiregraphError(\n 'findNearest() is not supported by the current storage backend. ' +\n 'Vector search requires a backend that declares `search.vector` ' +\n '(currently Firestore Standard and Enterprise). There is no ' +\n 'client-side fallback because emulating ANN on top of the generic ' +\n 'backend surface does not scale beyond toy datasets.',\n 'UNSUPPORTED_OPERATION',\n );\n }\n\n // Build the same filter list the helper passes to `applyFiltersToQuery`\n // so scan-protection sees exactly what the index will narrow on.\n // Identifiers come first (matching `buildVectorFilters` in the helper),\n // user-supplied `where` follows. We do NOT use `buildEdgeQueryPlan`\n // here — there is no GET-strategy notion for vector search; the\n // identifying-field filters are pure narrowing for the ANN walk.\n const filters: QueryFilter[] = [];\n if (params.aType) filters.push({ field: 'aType', op: '==', value: params.aType });\n if (params.axbType) filters.push({ field: 'axbType', op: '==', value: params.axbType });\n if (params.bType) filters.push({ field: 'bType', op: '==', value: params.bType });\n if (params.where) filters.push(...params.where);\n this.checkQuerySafety(filters, params.allowCollectionScan);\n\n return this.backend.findNearest(params);\n }\n\n /**\n * Native full-text search (capability `search.fullText`).\n *\n * Returns the top-N records by relevance, ordered by the search\n * index's score. Only Firestore Enterprise declares this capability\n * today — the underlying Pipelines `search({ query: documentMatches(...) })`\n * stage requires Enterprise's FTS index. Standard does not declare\n * the cap (FTS is an Enterprise-only product feature, not a\n * typed-API gap), and the SQLite-shaped backends have no native\n * FTS index. Backends without `search.fullText` throw\n * `UNSUPPORTED_OPERATION` from this wrapper.\n *\n * Scan-protection mirrors `findNearest`: a search with no\n * identifying filters (`aType` / `axbType` / `bType`) walks every\n * row the index scored, so the request must opt in via\n * `allowCollectionScan: true`.\n *\n * Migrations are NOT applied. The FTS index walked the raw stored\n * shape; rehydrating each row through the migration pipeline would\n * change the candidate set the index already scored. If you need\n * migrated shape, follow up with `getNode` / `findEdges` on the\n * returned UIDs.\n */\n async fullTextSearch(params: FullTextSearchParams): Promise<StoredGraphRecord[]> {\n if (!this.backend.fullTextSearch) {\n throw new FiregraphError(\n 'fullTextSearch() is not supported by the current storage backend. ' +\n 'Full-text search requires a backend that declares `search.fullText` ' +\n '(currently Firestore Enterprise only — FTS is an Enterprise product ' +\n 'feature). There is no client-side fallback because emulating FTS over ' +\n 'the generic backend surface would not scale beyond toy datasets.',\n 'UNSUPPORTED_OPERATION',\n );\n }\n const filters: QueryFilter[] = [];\n if (params.aType) filters.push({ field: 'aType', op: '==', value: params.aType });\n if (params.axbType) filters.push({ field: 'axbType', op: '==', value: params.axbType });\n if (params.bType) filters.push({ field: 'bType', op: '==', value: params.bType });\n this.checkQuerySafety(filters, params.allowCollectionScan);\n return this.backend.fullTextSearch(params);\n }\n\n /**\n * Native geospatial distance search (capability `search.geo`).\n *\n * Returns rows whose `geoField` lies within `radiusMeters` of\n * `point`, ordered nearest-first by default. Only Firestore\n * Enterprise declares this capability — same Enterprise-only\n * gating as `fullTextSearch`. Backends without `search.geo` throw\n * `UNSUPPORTED_OPERATION` from this wrapper.\n *\n * Scan-protection mirrors `findNearest` and `fullTextSearch`.\n *\n * Migrations are NOT applied — same rationale as the other search\n * extensions.\n */\n async geoSearch(params: GeoSearchParams): Promise<StoredGraphRecord[]> {\n if (!this.backend.geoSearch) {\n throw new FiregraphError(\n 'geoSearch() is not supported by the current storage backend. ' +\n 'Geospatial search requires a backend that declares `search.geo` ' +\n '(currently Firestore Enterprise only — geo queries are an ' +\n 'Enterprise product feature). There is no client-side fallback ' +\n 'because emulating geo over the generic backend surface (haversine ' +\n 'over `findEdges`) would not scale beyond trivial datasets.',\n 'UNSUPPORTED_OPERATION',\n );\n }\n const filters: QueryFilter[] = [];\n if (params.aType) filters.push({ field: 'aType', op: '==', value: params.aType });\n if (params.axbType) filters.push({ field: 'axbType', op: '==', value: params.axbType });\n if (params.bType) filters.push({ field: 'bType', op: '==', value: params.bType });\n this.checkQuerySafety(filters, params.allowCollectionScan);\n return this.backend.geoSearch(params);\n }\n\n /**\n * Translate a `FindEdgesParams` into the `QueryFilter[]` shape the\n * backend `bulkDelete` / `bulkUpdate` methods expect. Mirrors the\n * `aggregate()` plan: a bare-empty params object becomes an empty\n * filter list (after a scan-protection check); a GET-shape (all three\n * identifiers) is rejected so we never silently turn a single-row\n * lookup into a server-side DML; otherwise we run `buildEdgeQueryPlan`\n * and surface its filters.\n */\n private buildDmlFilters(params: FindEdgesParams): QueryFilter[] {\n const hasAnyFilter =\n params.aType ||\n params.aUid ||\n params.axbType ||\n params.bType ||\n params.bUid ||\n (params.where && params.where.length > 0);\n\n if (!hasAnyFilter) {\n this.checkQuerySafety([], params.allowCollectionScan);\n return [];\n }\n\n const plan = buildEdgeQueryPlan(params);\n if (plan.strategy === 'get') {\n throw new FiregraphError(\n 'bulkDelete() / bulkUpdate() require a query, not a direct document lookup. ' +\n 'Use removeEdge() / updateEdge() for single-row operations, or omit one of ' +\n 'aUid/axbType/bUid to force a query strategy.',\n 'INVALID_QUERY',\n );\n }\n this.checkQuerySafety(plan.filters, params.allowCollectionScan);\n return plan.filters;\n }\n\n // ---------------------------------------------------------------------------\n // Dynamic registry methods\n // ---------------------------------------------------------------------------\n\n async defineNodeType(\n name: string,\n jsonSchema: object,\n description?: string,\n options?: DefineTypeOptions,\n ): Promise<void> {\n if (!this.dynamicConfig) {\n throw new DynamicRegistryError(\n 'defineNodeType() is only available in dynamic registry mode. ' +\n 'Pass registryMode: { mode: \"dynamic\" } to createGraphClient().',\n );\n }\n\n if (RESERVED_TYPE_NAMES.has(name)) {\n throw new DynamicRegistryError(\n `Cannot define type \"${name}\": this name is reserved for the meta-registry.`,\n );\n }\n\n if (this.staticRegistry?.lookup(name, NODE_RELATION, name)) {\n throw new DynamicRegistryError(\n `Cannot define node type \"${name}\": already defined in the static registry.`,\n );\n }\n\n const uid = generateDeterministicUid(META_NODE_TYPE, name);\n const data: Record<string, unknown> = { name, jsonSchema };\n if (description !== undefined) data.description = description;\n if (options?.titleField !== undefined) data.titleField = options.titleField;\n if (options?.subtitleField !== undefined) data.subtitleField = options.subtitleField;\n if (options?.viewTemplate !== undefined) data.viewTemplate = options.viewTemplate;\n if (options?.viewCss !== undefined) data.viewCss = options.viewCss;\n if (options?.allowedIn !== undefined) data.allowedIn = options.allowedIn;\n if (options?.migrationWriteBack !== undefined)\n data.migrationWriteBack = options.migrationWriteBack;\n if (options?.migrations !== undefined) {\n data.migrations = await this.serializeMigrations(options.migrations);\n }\n\n await this.putNode(META_NODE_TYPE, uid, data);\n }\n\n async defineEdgeType(\n name: string,\n topology: EdgeTopology,\n jsonSchema?: object,\n description?: string,\n options?: DefineTypeOptions,\n ): Promise<void> {\n if (!this.dynamicConfig) {\n throw new DynamicRegistryError(\n 'defineEdgeType() is only available in dynamic registry mode. ' +\n 'Pass registryMode: { mode: \"dynamic\" } to createGraphClient().',\n );\n }\n\n if (RESERVED_TYPE_NAMES.has(name)) {\n throw new DynamicRegistryError(\n `Cannot define type \"${name}\": this name is reserved for the meta-registry.`,\n );\n }\n\n if (this.staticRegistry) {\n const fromTypes = Array.isArray(topology.from) ? topology.from : [topology.from];\n const toTypes = Array.isArray(topology.to) ? topology.to : [topology.to];\n for (const aType of fromTypes) {\n for (const bType of toTypes) {\n if (this.staticRegistry.lookup(aType, name, bType)) {\n throw new DynamicRegistryError(\n `Cannot define edge type \"${name}\" for (${aType}) -> (${bType}): already defined in the static registry.`,\n );\n }\n }\n }\n }\n\n const uid = generateDeterministicUid(META_EDGE_TYPE, name);\n const data: Record<string, unknown> = {\n name,\n from: topology.from,\n to: topology.to,\n };\n if (jsonSchema !== undefined) data.jsonSchema = jsonSchema;\n if (topology.inverseLabel !== undefined) data.inverseLabel = topology.inverseLabel;\n if (topology.targetGraph !== undefined) data.targetGraph = topology.targetGraph;\n if (description !== undefined) data.description = description;\n if (options?.titleField !== undefined) data.titleField = options.titleField;\n if (options?.subtitleField !== undefined) data.subtitleField = options.subtitleField;\n if (options?.viewTemplate !== undefined) data.viewTemplate = options.viewTemplate;\n if (options?.viewCss !== undefined) data.viewCss = options.viewCss;\n if (options?.allowedIn !== undefined) data.allowedIn = options.allowedIn;\n if (options?.migrationWriteBack !== undefined)\n data.migrationWriteBack = options.migrationWriteBack;\n if (options?.migrations !== undefined) {\n data.migrations = await this.serializeMigrations(options.migrations);\n }\n\n await this.putNode(META_EDGE_TYPE, uid, data);\n }\n\n async reloadRegistry(): Promise<void> {\n if (!this.dynamicConfig) {\n throw new DynamicRegistryError(\n 'reloadRegistry() is only available in dynamic registry mode. ' +\n 'Pass registryMode: { mode: \"dynamic\" } to createGraphClient().',\n );\n }\n\n const reader = this.createMetaReader();\n const dynamicOnly = await createRegistryFromGraph(reader, this.migrationSandbox);\n\n if (this.staticRegistry) {\n this.dynamicRegistry = createMergedRegistry(this.staticRegistry, dynamicOnly);\n } else {\n this.dynamicRegistry = dynamicOnly;\n }\n }\n\n private async serializeMigrations(\n migrations: Array<{ fromVersion: number; toVersion: number; up: MigrationFn | string }>,\n ): Promise<Array<{ fromVersion: number; toVersion: number; up: string }>> {\n const result = migrations.map((m) => {\n const source = typeof m.up === 'function' ? m.up.toString() : m.up;\n return { fromVersion: m.fromVersion, toVersion: m.toVersion, up: source };\n });\n await Promise.all(result.map((m) => precompileSource(m.up, this.migrationSandbox)));\n return result;\n }\n\n /**\n * Build a `GraphReader` over the meta-backend. If meta lives in the same\n * collection as the main backend, `this` is returned directly.\n */\n private createMetaReader(): GraphReader {\n if (!this.metaBackend) return this;\n\n const backend = this.metaBackend;\n\n const executeMetaQuery = (\n filters: QueryFilter[],\n options?: QueryOptions,\n ): Promise<StoredGraphRecord[]> => backend.query(filters, options);\n\n return {\n async getNode(uid: string): Promise<StoredGraphRecord | null> {\n return backend.getDoc(computeNodeDocId(uid));\n },\n async getEdge(\n aUid: string,\n axbType: string,\n bUid: string,\n ): Promise<StoredGraphRecord | null> {\n return backend.getDoc(computeEdgeDocId(aUid, axbType, bUid));\n },\n async edgeExists(aUid: string, axbType: string, bUid: string): Promise<boolean> {\n const record = await backend.getDoc(computeEdgeDocId(aUid, axbType, bUid));\n return record !== null;\n },\n async findEdges(params: FindEdgesParams): Promise<StoredGraphRecord[]> {\n const plan = buildEdgeQueryPlan(params);\n if (plan.strategy === 'get') {\n const record = await backend.getDoc(plan.docId);\n return record ? [record] : [];\n }\n return executeMetaQuery(plan.filters, plan.options);\n },\n async findNodes(params: FindNodesParams): Promise<StoredGraphRecord[]> {\n const plan = buildNodeQueryPlan(params);\n if (plan.strategy === 'get') {\n const record = await backend.getDoc(plan.docId);\n return record ? [record] : [];\n }\n return executeMetaQuery(plan.filters, plan.options);\n },\n };\n }\n}\n\n/**\n * Create a `GraphClient` backed by a `StorageBackend`.\n *\n * Phase 3: the type parameter `C` is inferred from\n * `StorageBackend<C>.capabilities` and propagates to the returned\n * `GraphClient<C>`. Extension surfaces (aggregate, search, raw escape\n * hatches, …) are conditionally intersected — they exist on the returned\n * type only when `C` declares the matching capability. Calls into\n * undeclared extensions are TypeScript errors at the call site, not\n * runtime failures.\n *\n * The runtime delegate `GraphClientImpl` carries only the portable core\n * methods today; extension methods land in Phases 4–10 alongside their\n * backend implementations. Until then the type-level surface is ahead of\n * the runtime, but no backend declares any extension capability so the\n * narrowing is effectively a no-op for current callers.\n *\n * `createGraphClientFromBackend` is retained as a deprecated alias for\n * backward compatibility while the codebase migrates off the old name.\n */\nexport function createGraphClient<C extends Capability = Capability>(\n backend: StorageBackend<C>,\n options: GraphClientOptions & { registryMode: DynamicRegistryConfig },\n metaBackend?: StorageBackend,\n): DynamicGraphClient<C>;\nexport function createGraphClient<C extends Capability = Capability>(\n backend: StorageBackend<C>,\n options?: GraphClientOptions,\n metaBackend?: StorageBackend,\n): GraphClient<C>;\nexport function createGraphClient<C extends Capability = Capability>(\n backend: StorageBackend<C>,\n options?: GraphClientOptions,\n metaBackend?: StorageBackend,\n): GraphClient<C> | DynamicGraphClient<C> {\n // The double cast bridges the gap between the runtime delegate\n // (`GraphClientImpl`, which structurally implements\n // `CoreGraphClient & DynamicGraphMethods`) and the conditionally-\n // intersected return types `GraphClient<C>` / `DynamicGraphClient<C>`.\n // The implementation signature can't pick between the two overloads\n // without inspecting `options.registryMode` at the type level, which\n // requires conditional types over the `options` argument; the cast\n // collapses that ambiguity. Sound today because every `*Extension`\n // body is empty and `DynamicGraphMethods` is always present at runtime\n // (the validation routing inside `GraphClientImpl` no-ops the dynamic\n // methods when registryMode is absent).\n return new GraphClientImpl(backend, options, metaBackend) as unknown as\n | GraphClient<C>\n | DynamicGraphClient<C>;\n}\n\n/**\n * @deprecated Use `createGraphClient` instead. Kept temporarily so existing\n * callers (Cloudflare client, routing backend, tests) continue to compile\n * during the Phase 2 transition.\n */\nexport const createGraphClientFromBackend = createGraphClient;\n","/**\n * Code generation — produces TypeScript type definitions from JSON Schema\n * files discovered via the entity folder convention.\n *\n * Uses `json-schema-to-typescript` to compile each entity's `schema.json`\n * into a TypeScript interface.\n *\n * Naming convention:\n * - Nodes: `{PascalName}Data` (e.g. `TaskData`)\n * - Edges: `{PascalName}EdgeData` (e.g. `HasStepEdgeData`)\n */\n\nimport type { DiscoveryResult } from '../types.js';\n\nfunction pascalCase(s: string): string {\n return s.replace(/(^|[^a-zA-Z0-9])([a-zA-Z])/g, (_, _sep, ch) => ch.toUpperCase());\n}\n\nexport interface CodegenOptions {\n /** Add banner comment at top of output. Defaults to true. */\n banner?: boolean;\n}\n\n/**\n * Generate TypeScript type definitions from a DiscoveryResult.\n * Returns the full file content as a string.\n */\nexport async function generateTypes(\n discovery: DiscoveryResult,\n options: CodegenOptions = {},\n): Promise<string> {\n // Lazy-load to avoid requiring this dep at runtime for non-codegen usage\n const { compile } = await import('json-schema-to-typescript');\n\n const { banner = true } = options;\n const chunks: string[] = [];\n\n if (banner) {\n chunks.push('// Auto-generated by firegraph codegen — do not edit manually\\n');\n }\n\n // Sort for deterministic output\n const sortedNodes = [...discovery.nodes.entries()].sort(([a], [b]) => a.localeCompare(b));\n const sortedEdges = [...discovery.edges.entries()].sort(([a], [b]) => a.localeCompare(b));\n\n for (const [name, entity] of sortedNodes) {\n const typeName = `${pascalCase(name)}Data`;\n const ts = await compile(entity.schema as any, typeName, {\n bannerComment: '',\n additionalProperties: false,\n });\n chunks.push(ts.trim());\n chunks.push('');\n }\n\n for (const [name, entity] of sortedEdges) {\n const typeName = `${pascalCase(name)}EdgeData`;\n const ts = await compile(entity.schema as any, typeName, {\n bannerComment: '',\n additionalProperties: false,\n });\n chunks.push(ts.trim());\n chunks.push('');\n }\n\n return chunks.join('\\n').trimEnd() + '\\n';\n}\n","/**\n * Firegraph Configuration — project-level config file support.\n *\n * Projects create a `firegraph.config.ts` (or `.js`/`.mjs`) in their root:\n *\n * @example\n * ```ts\n * import { defineConfig } from 'firegraph';\n *\n * export default defineConfig({\n * entities: './entities',\n * project: 'my-project',\n * collection: 'graph',\n * });\n * ```\n */\n\nimport type { DynamicRegistryConfig, QueryMode } from './types.js';\n\n// ---------------------------------------------------------------------------\n// View Resolution Types\n// ---------------------------------------------------------------------------\n\n/** Display contexts where views can appear. */\nexport type ViewContext = 'listing' | 'detail' | 'inline';\n\n/** View resolution configuration for a single entity type. */\nexport interface ViewResolverConfig {\n /** Default view name (e.g. 'card'). Falls back to 'json' if unset. */\n default?: string;\n /** View to use in NodeBrowser listing rows. */\n listing?: string;\n /** View to use on the NodeDetail page. */\n detail?: string;\n /** View to use for inline/embedded previews (edge rows, traversal). */\n inline?: string;\n}\n\n/** Declarative view defaults, keyed by entity type. */\nexport interface ViewDefaultsConfig {\n /** Node view defaults keyed by aType (e.g. 'user', 'task'). */\n nodes?: Record<string, ViewResolverConfig>;\n /** Edge view defaults keyed by axbType (e.g. 'hasDeparture'). */\n edges?: Record<string, ViewResolverConfig>;\n}\n\n// ---------------------------------------------------------------------------\n// Config Shape\n// ---------------------------------------------------------------------------\n\n/** Project-level firegraph configuration. */\nexport interface FiregraphConfig {\n /** Path to entities directory (per-entity folder convention). */\n entities?: string;\n /** GCP project ID. */\n project?: string;\n /** Firestore collection path (default: 'graph'). */\n collection?: string;\n /** Firestore emulator address (e.g. '127.0.0.1:8080'). */\n emulator?: string;\n /**\n * Query execution backend.\n *\n * - `'pipeline'` (default) — Uses Firestore Pipeline API. Requires Enterprise\n * Firestore. Enables indexless queries on `data.*` fields.\n * - `'standard'` — Uses standard Firestore `.where().get()` queries. Not\n * recommended for production. See README for risk details.\n *\n * When the emulator is active, always falls back to `'standard'`.\n */\n queryMode?: QueryMode;\n\n /**\n * AI chat configuration. Auto-detects `claude` CLI on PATH by default.\n * Set to `false` to disable chat even if claude is available.\n */\n chat?:\n | false\n | {\n /** Claude model to use (default: 'sonnet'). */\n model?: string;\n /** Maximum concurrent claude processes (default: 2). */\n maxConcurrency?: number;\n };\n\n /** Editor-specific settings. */\n editor?: {\n /** Server port (default: 3883). */\n port?: number;\n /** Force read-only mode. */\n readonly?: boolean;\n };\n\n /** Declarative view defaults per entity type (overrides per-entity meta.json). */\n viewDefaults?: ViewDefaultsConfig;\n\n /**\n * Dynamic registry mode. When set, the editor loads type definitions\n * from Firestore meta-nodes in addition to filesystem entities.\n * Filesystem types take precedence on name conflicts.\n */\n registryMode?: DynamicRegistryConfig;\n}\n\n// ---------------------------------------------------------------------------\n// defineConfig()\n// ---------------------------------------------------------------------------\n\n/**\n * Identity function providing type-checking and autocomplete for config files.\n *\n * @example\n * ```ts\n * import { defineConfig } from 'firegraph';\n * export default defineConfig({ entities: './entities' });\n * ```\n */\nexport function defineConfig(config: FiregraphConfig): FiregraphConfig {\n return config;\n}\n\n// ---------------------------------------------------------------------------\n// View Resolution (pure — works client-side and server-side)\n// ---------------------------------------------------------------------------\n\n/**\n * Resolve which view to show for a given entity.\n *\n * 1. If `context` is provided and a context-specific default exists, use it.\n * 2. Falls back to `resolverConfig.default`.\n * 3. Ultimate fallback: `'json'`.\n *\n * Only returns view names that exist in `availableViewNames`.\n */\nexport function resolveView(\n resolverConfig: ViewResolverConfig | undefined,\n availableViewNames: string[],\n context?: ViewContext,\n): string {\n if (!resolverConfig) return 'json';\n\n const available = new Set(availableViewNames);\n\n if (context) {\n const contextDefault = resolverConfig[context];\n if (contextDefault && available.has(contextDefault)) {\n return contextDefault;\n }\n }\n\n if (resolverConfig.default && available.has(resolverConfig.default)) {\n return resolverConfig.default;\n }\n\n return 'json';\n}\n","/**\n * Cross-graph edge resolution utilities.\n *\n * Provides path-scanning resolution for determining whether an edge's source\n * (aUid) is an ancestor node by checking if the UID appears in the Firestore\n * collection path.\n *\n * Firestore paths have a rigid alternating structure:\n * collection / docId / collection / docId / collection\n *\n * Given a path like `graph/A/workspace/B/context`, segments at even indices\n * are collection names and odd indices are document IDs. When we find a UID\n * at an odd index, the collection containing that document is the path up to\n * (and including) the preceding even-index segment.\n */\n\n/**\n * Parse a Firestore collection path and determine the collection path\n * where a given UID's document lives, if that UID is an ancestor in the path.\n *\n * @param collectionPath - The full Firestore collection path of the current client\n * @param uid - The UID to search for in the path\n * @returns The collection path containing the UID, or `null` if not found in the path\n *\n * @example\n * ```ts\n * // Path: graph/A/workspace/B/context\n * resolveAncestorCollection('graph/A/workspace/B/context', 'A')\n * // → 'graph'\n *\n * resolveAncestorCollection('graph/A/workspace/B/context', 'B')\n * // → 'graph/A/workspace'\n *\n * resolveAncestorCollection('graph/A/workspace/B/context', 'unknown')\n * // → null\n * ```\n */\nexport function resolveAncestorCollection(collectionPath: string, uid: string): string | null {\n const segments = collectionPath.split('/');\n\n // Walk odd-indexed segments (document IDs in Firestore's alternating path structure)\n for (let i = 1; i < segments.length; i += 2) {\n if (segments[i] === uid) {\n // The collection containing this doc is everything up to index i-1\n return segments.slice(0, i).join('/');\n }\n }\n\n return null;\n}\n\n/**\n * Check whether a UID belongs to an ancestor node by scanning the collection path.\n *\n * @param collectionPath - The full Firestore collection path of the current client\n * @param uid - The UID to check\n * @returns `true` if the UID appears as a document segment in the path\n */\nexport function isAncestorUid(collectionPath: string, uid: string): boolean {\n return resolveAncestorCollection(collectionPath, uid) !== null;\n}\n","/**\n * Default core index preset.\n *\n * This set covers the query patterns firegraph's query planner emits for\n * built-in operations — `findNodes`, `findEdges`, cascade delete, traversal,\n * and the DO/SQLite path compilers. Apps that need additional indexes\n * (descending timestamps, `data.*` filters, composite fields unique to\n * their query shapes) declare them on `RegistryEntry.indexes` or override\n * this preset wholesale via the backend-specific `coreIndexes` option —\n * `FiregraphDOOptions.coreIndexes` for the DO backend,\n * `BuildSchemaOptions.coreIndexes` for the legacy SQLite backend, and\n * `GenerateIndexOptions.coreIndexes` for the Firestore CLI generator.\n *\n * ## Ownership model\n *\n * This list is firegraph's *recommendation* — not non-negotiable policy.\n * Consumers can:\n *\n * 1. Accept the preset as-is (default).\n * 2. Extend it: `coreIndexes: [...DEFAULT_CORE_INDEXES, ...more]`.\n * 3. Replace it entirely with a tailored set.\n * 4. Disable it (`coreIndexes: []`) and take full responsibility for\n * index coverage — only do this if you're provisioning a complete\n * custom set.\n *\n * ## Per-backend emission\n *\n * The Firestore generator skips single-field entries (Firestore implicitly\n * indexes every field) and emits one composite index per multi-field spec.\n * The SQLite-flavored generators (DO, legacy) emit every spec as-is.\n *\n * ## Why these specific indexes\n *\n * - `aUid` / `bUid` — required for `_fgRemoveNodeCascade`, which scans by\n * each UID side independently. A composite `(aUid, axbType)` also\n * satisfies `aUid`-alone via leading-column prefix, but the single-field\n * form is cheaper for the common case.\n * - `aType` / `bType` — `findNodes({ aType })` and cross-type enumeration.\n * - `(aUid, axbType)` — forward edge lookup (`findEdges({ aUid, axbType })`)\n * and the `get` strategy fallback when only two of three triple fields\n * are present.\n * - `(axbType, bUid)` — reverse edge traversal.\n * - `(aType, axbType)` — type-scoped edge scans (e.g., `findEdges({ aType, axbType })`).\n * - `(axbType, bType)` — scope edges of one relation to a target type.\n */\n\nimport type { IndexSpec } from './types.js';\n\nexport const DEFAULT_CORE_INDEXES: ReadonlyArray<IndexSpec> = Object.freeze([\n { fields: ['aUid'] },\n { fields: ['bUid'] },\n { fields: ['aType'] },\n { fields: ['bType'] },\n { fields: ['aUid', 'axbType'] },\n { fields: ['axbType', 'bUid'] },\n { fields: ['aType', 'axbType'] },\n { fields: ['axbType', 'bType'] },\n]);\n","/**\n * Entity Discovery — convention-based auto-discovery of entities from\n * a per-entity folder structure.\n *\n * Scans `entitiesDir/nodes/` and `entitiesDir/edges/` subdirectories.\n * Each subfolder is treated as an entity type.\n *\n * Schema files can be either `schema.json` (plain JSON Schema) or\n * `schema.ts` / `schema.js` (a module whose default export is a JSON Schema\n * object). When both exist, the TS/JS file takes precedence so that authors\n * can compose schemas programmatically while keeping a JSON fallback.\n *\n * @example\n * ```\n * entities/\n * nodes/\n * task/\n * schema.json | schema.ts (required — one or both)\n * views.ts (optional)\n * sample.json (optional)\n * meta.json (optional)\n * edges/\n * hasStep/\n * schema.json | schema.ts (required — one or both)\n * edge.json (required — topology)\n * views.ts (optional)\n * sample.json (optional)\n * meta.json (optional)\n * ```\n */\n\nimport { existsSync, readdirSync, readFileSync, statSync } from 'node:fs';\nimport { createRequire } from 'node:module';\nimport { join, resolve } from 'node:path';\n\nimport type * as jitiNS from 'jiti';\n\nimport type { ViewResolverConfig } from './config.js';\nimport { FiregraphError } from './errors.js';\nimport type {\n DiscoveredEntity,\n DiscoveryResult,\n EdgeTopology,\n IndexSpec,\n MigrationStep,\n MigrationWriteBack,\n} from './types.js';\n\nexport class DiscoveryError extends FiregraphError {\n constructor(message: string) {\n super(message, 'DISCOVERY_ERROR');\n this.name = 'DiscoveryError';\n }\n}\n\n// ---------------------------------------------------------------------------\n// JSON parsing helpers\n// ---------------------------------------------------------------------------\n\nfunction readJson(filePath: string): unknown {\n try {\n const raw = readFileSync(filePath, 'utf-8');\n return JSON.parse(raw);\n } catch (err: unknown) {\n const msg =\n err instanceof SyntaxError\n ? `Invalid JSON in ${filePath}: ${err.message}`\n : `Cannot read ${filePath}: ${(err as Error).message}`;\n throw new DiscoveryError(msg);\n }\n}\n\nfunction readJsonIfExists(filePath: string): unknown | undefined {\n if (!existsSync(filePath)) return undefined;\n return readJson(filePath);\n}\n\n// ---------------------------------------------------------------------------\n// Schema file loading (JSON or TS/JS via jiti)\n// ---------------------------------------------------------------------------\n\nconst SCHEMA_SCRIPT_EXTENSIONS = ['.ts', '.js', '.mts', '.mjs'];\n\n/**\n * Attempt to load a schema from a TS/JS module (default export) or fall back\n * to schema.json. Returns the parsed schema object or throws.\n */\nfunction loadSchema(dir: string, entityLabel: string): object {\n // Prefer TS/JS schema — allows programmatic composition & shared definitions\n for (const ext of SCHEMA_SCRIPT_EXTENSIONS) {\n const candidate = join(dir, `schema${ext}`);\n if (existsSync(candidate)) {\n return loadSchemaModule(candidate, entityLabel);\n }\n }\n\n // Fall back to schema.json\n const jsonPath = join(dir, 'schema.json');\n if (existsSync(jsonPath)) {\n return readJson(jsonPath) as object;\n }\n\n throw new DiscoveryError(\n `Missing schema for ${entityLabel} in ${dir}. ` +\n 'Provide a schema.ts (or .js/.mts/.mjs) or schema.json file.',\n );\n}\n\nlet _jiti: ((id: string) => unknown) | undefined;\n\nfunction getJiti(): (id: string) => unknown {\n if (!_jiti) {\n const base = typeof __filename !== 'undefined' ? __filename : import.meta.url;\n const esmRequire = createRequire(base);\n const { createJiti } = esmRequire('jiti') as typeof jitiNS;\n _jiti = createJiti(base, { interopDefault: true });\n }\n return _jiti;\n}\n\nfunction loadSchemaModule(filePath: string, entityLabel: string): object {\n try {\n const jiti = getJiti();\n const mod = jiti(filePath) as { default?: unknown } | unknown;\n const schema =\n mod && typeof mod === 'object' && 'default' in mod\n ? (mod as { default: unknown }).default\n : mod;\n\n if (!schema || typeof schema !== 'object') {\n throw new DiscoveryError(\n `Schema file ${filePath} for ${entityLabel} must default-export a JSON Schema object.`,\n );\n }\n return schema as object;\n } catch (err: unknown) {\n if (err instanceof DiscoveryError) throw err;\n throw new DiscoveryError(\n `Failed to load schema module ${filePath} for ${entityLabel}: ${(err as Error).message}`,\n );\n }\n}\n\n// ---------------------------------------------------------------------------\n// View file detection\n// ---------------------------------------------------------------------------\n\nconst VIEW_EXTENSIONS = ['.ts', '.js', '.mts', '.mjs'];\n\nfunction findViewsFile(dir: string): string | undefined {\n for (const ext of VIEW_EXTENSIONS) {\n const candidate = join(dir, `views${ext}`);\n if (existsSync(candidate)) return candidate;\n }\n return undefined;\n}\n\n// ---------------------------------------------------------------------------\n// Migration file detection & loading\n// ---------------------------------------------------------------------------\n\nconst MIGRATION_EXTENSIONS = ['.ts', '.js', '.mts', '.mjs'];\n\nfunction findMigrationsFile(dir: string): string | undefined {\n for (const ext of MIGRATION_EXTENSIONS) {\n const candidate = join(dir, `migrations${ext}`);\n if (existsSync(candidate)) return candidate;\n }\n return undefined;\n}\n\nfunction loadMigrations(filePath: string, entityLabel: string): MigrationStep[] {\n try {\n const jiti = getJiti();\n const mod = jiti(filePath) as { default?: unknown } | unknown;\n const migrations =\n mod && typeof mod === 'object' && 'default' in mod\n ? (mod as { default: unknown }).default\n : mod;\n\n if (!Array.isArray(migrations)) {\n throw new DiscoveryError(\n `Migrations file ${filePath} for ${entityLabel} must default-export an array of MigrationStep.`,\n );\n }\n return migrations as MigrationStep[];\n } catch (err: unknown) {\n if (err instanceof DiscoveryError) throw err;\n throw new DiscoveryError(\n `Failed to load migrations ${filePath} for ${entityLabel}: ${(err as Error).message}`,\n );\n }\n}\n\n// ---------------------------------------------------------------------------\n// Entity loaders\n// ---------------------------------------------------------------------------\n\nfunction loadNodeEntity(dir: string, name: string): DiscoveredEntity {\n const schema = loadSchema(dir, `node type \"${name}\"`);\n const meta = readJsonIfExists(join(dir, 'meta.json')) as\n | {\n description?: string;\n titleField?: string;\n subtitleField?: string;\n viewDefaults?: ViewResolverConfig;\n allowedIn?: string[];\n migrationWriteBack?: MigrationWriteBack;\n indexes?: IndexSpec[];\n }\n | undefined;\n const sampleData = readJsonIfExists(join(dir, 'sample.json')) as\n | Record<string, unknown>\n | undefined;\n const viewsPath = findViewsFile(dir);\n const migrationsPath = findMigrationsFile(dir);\n const migrations = migrationsPath\n ? loadMigrations(migrationsPath, `node type \"${name}\"`)\n : undefined;\n\n return {\n kind: 'node',\n name,\n schema,\n description: meta?.description,\n titleField: meta?.titleField,\n subtitleField: meta?.subtitleField,\n viewDefaults: meta?.viewDefaults,\n viewsPath,\n sampleData,\n allowedIn: meta?.allowedIn,\n migrations,\n migrationWriteBack: meta?.migrationWriteBack,\n indexes: meta?.indexes,\n };\n}\n\nfunction loadEdgeEntity(dir: string, name: string): DiscoveredEntity {\n const schema = loadSchema(dir, `edge type \"${name}\"`);\n\n const edgePath = join(dir, 'edge.json');\n if (!existsSync(edgePath)) {\n throw new DiscoveryError(\n `Missing edge.json for edge type \"${name}\" in ${dir}. ` +\n 'Edge entities must declare topology (from/to node types).',\n );\n }\n const topology = readJson(edgePath) as EdgeTopology;\n\n // Validate topology shape\n if (!topology.from) {\n throw new DiscoveryError(`edge.json for \"${name}\" is missing required \"from\" field`);\n }\n if (!topology.to) {\n throw new DiscoveryError(`edge.json for \"${name}\" is missing required \"to\" field`);\n }\n\n const meta = readJsonIfExists(join(dir, 'meta.json')) as\n | {\n description?: string;\n titleField?: string;\n subtitleField?: string;\n viewDefaults?: ViewResolverConfig;\n allowedIn?: string[];\n targetGraph?: string;\n migrationWriteBack?: MigrationWriteBack;\n indexes?: IndexSpec[];\n }\n | undefined;\n const sampleData = readJsonIfExists(join(dir, 'sample.json')) as\n | Record<string, unknown>\n | undefined;\n const viewsPath = findViewsFile(dir);\n const migrationsPath = findMigrationsFile(dir);\n const migrations = migrationsPath\n ? loadMigrations(migrationsPath, `edge type \"${name}\"`)\n : undefined;\n\n return {\n kind: 'edge',\n name,\n schema,\n topology,\n description: meta?.description,\n titleField: meta?.titleField,\n subtitleField: meta?.subtitleField,\n viewDefaults: meta?.viewDefaults,\n viewsPath,\n sampleData,\n allowedIn: meta?.allowedIn,\n targetGraph:\n topology.targetGraph ?? (meta as { targetGraph?: string } | undefined)?.targetGraph,\n migrations,\n migrationWriteBack: meta?.migrationWriteBack,\n indexes: meta?.indexes,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Directory scanner\n// ---------------------------------------------------------------------------\n\nfunction getSubdirectories(dir: string): string[] {\n if (!existsSync(dir)) return [];\n return readdirSync(dir, { withFileTypes: true })\n .filter((d) => d.isDirectory())\n .map((d) => d.name);\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\nexport interface DiscoveryWarning {\n code: 'DANGLING_TOPOLOGY_REF';\n message: string;\n}\n\nexport interface DiscoverResult {\n result: DiscoveryResult;\n warnings: DiscoveryWarning[];\n}\n\n/**\n * Scan an entities directory and return all discovered nodes and edges.\n *\n * @param entitiesDir - Path to the entities directory (absolute or relative to cwd)\n * @returns Discovery result with nodes and edges maps, plus any warnings\n */\nexport function discoverEntities(entitiesDir: string): DiscoverResult {\n const absDir = resolve(entitiesDir);\n\n if (!existsSync(absDir) || !statSync(absDir).isDirectory()) {\n throw new DiscoveryError(`Entities directory not found: ${entitiesDir}`);\n }\n\n const nodes = new Map<string, DiscoveredEntity>();\n const edges = new Map<string, DiscoveredEntity>();\n const warnings: DiscoveryWarning[] = [];\n\n // Discover nodes\n const nodesDir = join(absDir, 'nodes');\n for (const name of getSubdirectories(nodesDir)) {\n nodes.set(name, loadNodeEntity(join(nodesDir, name), name));\n }\n\n // Discover edges\n const edgesDir = join(absDir, 'edges');\n for (const name of getSubdirectories(edgesDir)) {\n edges.set(name, loadEdgeEntity(join(edgesDir, name), name));\n }\n\n // Validate topology references\n const nodeNames = new Set(nodes.keys());\n for (const [axbType, entity] of edges) {\n const topology = entity.topology!;\n const fromTypes = Array.isArray(topology.from) ? topology.from : [topology.from];\n const toTypes = Array.isArray(topology.to) ? topology.to : [topology.to];\n\n for (const ref of [...fromTypes, ...toTypes]) {\n if (!nodeNames.has(ref)) {\n warnings.push({\n code: 'DANGLING_TOPOLOGY_REF',\n message: `Edge \"${axbType}\" references node type \"${ref}\" which was not found in the nodes directory`,\n });\n }\n }\n }\n\n return {\n result: { nodes, edges },\n warnings,\n };\n}\n","import { nanoid } from 'nanoid';\n\nexport function generateId(): string {\n return nanoid();\n}\n","/**\n * Firestore composite index generator.\n *\n * Translates firegraph's declarative `IndexSpec[]` (core preset plus per-entry\n * registry indexes) into the `firestore.indexes.json` shape consumed by\n * `firebase deploy --only firestore:indexes`.\n *\n * ## What Firestore needs\n *\n * Firestore auto-indexes every top-level field (including `data.*`) for\n * single-field equality queries — we only need to emit *composite* indexes\n * here. That means:\n *\n * 1. Single-field specs are dropped (Firestore already covers them).\n * 2. Composite specs (two or more fields) get one `FirestoreIndex`.\n * 3. Specs with `where` are dropped with a warning — Firestore composite\n * indexes do not support partial predicates.\n * 4. When a registry entry has `targetGraph` set, every composite is also\n * emitted with `queryScope: 'COLLECTION_GROUP'` under the targetGraph\n * name, so `findEdgesGlobal()` queries across subgraphs can hit an\n * index.\n *\n * The SQLite-flavored backends (DO, legacy) consume the same `IndexSpec[]`\n * via `src/internal/sqlite-index-ddl.ts` but emit every spec (single fields\n * included) as `CREATE INDEX` DDL.\n */\n\nimport { DEFAULT_CORE_INDEXES } from './default-indexes.js';\nimport type { DiscoveryResult, IndexFieldSpec, IndexSpec, RegistryEntry } from './types.js';\n\nexport interface FirestoreIndexField {\n fieldPath: string;\n order: 'ASCENDING' | 'DESCENDING';\n}\n\nexport interface FirestoreIndex {\n collectionGroup: string;\n queryScope: 'COLLECTION' | 'COLLECTION_GROUP';\n fields: FirestoreIndexField[];\n}\n\nexport interface FirestoreIndexConfig {\n indexes: FirestoreIndex[];\n fieldOverrides: unknown[];\n}\n\nexport interface GenerateIndexOptions {\n /**\n * Replaces firegraph's built-in core preset. Defaults to\n * `DEFAULT_CORE_INDEXES`. Pass `[]` to disable core indexes entirely.\n */\n coreIndexes?: IndexSpec[];\n /**\n * Registry entries supplying per-triple `indexes`. Entries without\n * `indexes` contribute no composites; entries with `targetGraph` also\n * trigger `COLLECTION_GROUP` mirrors under each distinct targetGraph\n * segment name.\n */\n registryEntries?: ReadonlyArray<RegistryEntry>;\n /**\n * Entity discovery result. Convenience for callers that have a\n * `DiscoveryResult` but not a built registry — treated as if every\n * discovered entity were expanded to its registry entries carrying just\n * `indexes` + `targetGraph`. Mutually usable with `registryEntries`\n * (both are concatenated and deduplicated at the spec level).\n */\n entities?: DiscoveryResult;\n}\n\nfunction normalizeField(f: string | IndexFieldSpec): IndexFieldSpec {\n return typeof f === 'string' ? { path: f, desc: false } : { path: f.path, desc: !!f.desc };\n}\n\nfunction specFingerprint(spec: IndexSpec, scope: string): string {\n const normalized = spec.fields.map(normalizeField);\n return `${scope}::${JSON.stringify(normalized)}`;\n}\n\nfunction toFirestoreFields(spec: IndexSpec): FirestoreIndexField[] {\n return spec.fields.map((f) => {\n const n = normalizeField(f);\n return {\n fieldPath: n.path,\n order: n.desc ? 'DESCENDING' : 'ASCENDING',\n };\n });\n}\n\nlet warnedOnPartialIndex = false;\n\n/**\n * Build a Firestore index configuration from firegraph's declarative index\n * specs. Deduplicates by field list + scope before emitting. Single-field\n * specs are dropped; partial-index specs (`where` set) are dropped with a\n * one-time warning.\n */\nexport function generateIndexConfig(\n collection: string,\n options: GenerateIndexOptions = {},\n): FirestoreIndexConfig {\n const core = options.coreIndexes ?? [...DEFAULT_CORE_INDEXES];\n const fromEntries = (options.registryEntries ?? []).flatMap((e) => {\n if (!e.indexes) return [] as IndexSpec[];\n return e.indexes;\n });\n\n // DiscoveryResult is a pre-registry shape — it doesn't carry `indexes`\n // per triple (those live on registry entries once built). Accept it to\n // keep the CLI ergonomic, but the only thing we can pull from it right\n // now is the set of distinct `targetGraph` values, which belongs to\n // discovery-time topology metadata. Consumers who need per-entity data\n // indexes must go through the registry path.\n const targetGraphNames = new Set<string>();\n for (const entry of options.registryEntries ?? []) {\n if (entry.targetGraph) targetGraphNames.add(entry.targetGraph);\n }\n if (options.entities) {\n for (const [, entity] of options.entities.edges) {\n const tg = entity.targetGraph ?? entity.topology?.targetGraph;\n if (tg) targetGraphNames.add(tg);\n }\n }\n\n const allSpecs = [...core, ...fromEntries];\n const seen = new Set<string>();\n const indexes: FirestoreIndex[] = [];\n\n for (const spec of allSpecs) {\n if (!spec.fields || spec.fields.length < 2) {\n // Single-field: Firestore auto-indexes — nothing to emit.\n continue;\n }\n if (spec.where) {\n if (!warnedOnPartialIndex) {\n warnedOnPartialIndex = true;\n console.warn(\n 'firegraph: IndexSpec.where is ignored by the Firestore generator — ' +\n 'Firestore composite indexes do not support predicates. ' +\n 'The SQLite backends will still honor `where`.',\n );\n }\n continue;\n }\n\n const fields = toFirestoreFields(spec);\n\n const colKey = specFingerprint(spec, `col:${collection}`);\n if (!seen.has(colKey)) {\n seen.add(colKey);\n indexes.push({\n collectionGroup: collection,\n queryScope: 'COLLECTION',\n fields,\n });\n }\n\n // Mirror into every distinct `targetGraph` as a collection group index.\n // `findEdgesGlobal()` runs across all subcollections matching the\n // targetGraph name, and each pattern needs its own CG index.\n for (const tg of targetGraphNames) {\n const cgKey = specFingerprint(spec, `cg:${tg}`);\n if (seen.has(cgKey)) continue;\n seen.add(cgKey);\n indexes.push({\n collectionGroup: tg,\n queryScope: 'COLLECTION_GROUP',\n fields,\n });\n }\n }\n\n return { indexes, fieldOverrides: [] };\n}\n\n/**\n * Internal test hook — reset the one-time partial-index warning flag so\n * tests covering the warn branch can run sequentially without sharing\n * state.\n */\nexport function _resetIndexGenWarningsForTest(): void {\n warnedOnPartialIndex = false;\n}\n","import http from 'node:http';\n\nimport { readEditorPort } from './config.js';\nimport { summarizeEdge, summarizeRecord } from './shaping.js';\nimport type {\n GetEdgesInput,\n GetEdgesResult,\n GetNodeDetailInput,\n GetNodesInput,\n GetNodesResult,\n NodeDetailResult,\n QueryClientOptions,\n SchemaResult,\n SearchInput,\n SearchResult,\n SummarizedEdge,\n SummarizedRecord,\n TraverseHopResult,\n TraverseInput,\n TraverseResult,\n} from './types.js';\n\n// --- Error ---\n\nexport type QueryClientErrorCode = 'VALIDATION_ERROR' | 'CONNECTION_FAILED' | 'SERVER_ERROR';\n\nexport class QueryClientError extends Error {\n constructor(\n message: string,\n public readonly code: QueryClientErrorCode,\n ) {\n super(message);\n this.name = 'QueryClientError';\n }\n}\n\n// --- Validation helpers ---\n\nfunction requireString(value: unknown, name: string): asserts value is string {\n if (typeof value !== 'string' || value.length === 0) {\n throw new QueryClientError(`${name} must be a non-empty string`, 'VALIDATION_ERROR');\n }\n}\n\nfunction clampInt(value: number | undefined, min: number, max: number, fallback: number): number {\n if (value == null) return fallback;\n if (!Number.isInteger(value)) {\n throw new QueryClientError(`limit must be an integer`, 'VALIDATION_ERROR');\n }\n return Math.max(min, Math.min(max, value));\n}\n\nfunction validateSortDir(dir: string | undefined): void {\n if (dir != null && dir !== 'asc' && dir !== 'desc') {\n throw new QueryClientError(`sortDir must be 'asc' or 'desc'`, 'VALIDATION_ERROR');\n }\n}\n\n// --- HTTP helpers ---\n\nfunction httpGet(url: string): Promise<string> {\n return new Promise((resolve, reject) => {\n http\n .get(url, (res) => {\n let body = '';\n res.on('data', (c: string) => (body += c));\n res.on('end', () => resolve(body));\n })\n .on('error', (err) => {\n reject(new QueryClientError(`Connection failed: ${err.message}`, 'CONNECTION_FAILED'));\n });\n });\n}\n\nfunction httpPost(url: string, payload: string): Promise<string> {\n const parsed = new URL(url);\n return new Promise((resolve, reject) => {\n const req = http.request(\n {\n hostname: parsed.hostname,\n port: parsed.port,\n path: parsed.pathname,\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'Content-Length': Buffer.byteLength(payload),\n },\n },\n (res) => {\n let body = '';\n res.on('data', (c: string) => (body += c));\n res.on('end', () => resolve(body));\n },\n );\n req.on('error', (err) => {\n reject(new QueryClientError(`Connection failed: ${err.message}`, 'CONNECTION_FAILED'));\n });\n req.write(payload);\n req.end();\n });\n}\n\nfunction parseTrpcResponse(raw: string, procedure: string): unknown {\n let parsed: Record<string, unknown>;\n try {\n parsed = JSON.parse(raw);\n } catch {\n throw new QueryClientError(\n `Invalid JSON from ${procedure}: ${raw.slice(0, 200)}`,\n 'SERVER_ERROR',\n );\n }\n if (parsed.error) {\n const msg =\n typeof parsed.error === 'object' && parsed.error !== null\n ? ((parsed.error as Record<string, unknown>).message ?? JSON.stringify(parsed.error))\n : String(parsed.error);\n throw new QueryClientError(`Server error from ${procedure}: ${msg}`, 'SERVER_ERROR');\n }\n return (parsed.result as Record<string, unknown>)?.data ?? parsed;\n}\n\n// --- Client ---\n\nexport class QueryClient {\n private readonly baseUrl: string;\n\n constructor(options?: QueryClientOptions) {\n const host = options?.host ?? 'localhost';\n const port = options?.port ?? readEditorPort();\n this.baseUrl = `http://${host}:${port}/api/trpc`;\n }\n\n private async query(procedure: string, input?: unknown): Promise<unknown> {\n const qs = input != null ? `?input=${encodeURIComponent(JSON.stringify(input))}` : '';\n const url = `${this.baseUrl}/${procedure}${qs}`;\n const raw = await httpGet(url);\n return parseTrpcResponse(raw, procedure);\n }\n\n private async mutate(procedure: string, input: unknown): Promise<unknown> {\n const url = `${this.baseUrl}/${procedure}`;\n const raw = await httpPost(url, JSON.stringify(input));\n return parseTrpcResponse(raw, procedure);\n }\n\n // --- Public API ---\n\n async getSchema(): Promise<SchemaResult> {\n const data = (await this.query('getSchema')) as Record<string, unknown>;\n return {\n nodeTypes: ((data.nodeTypes as unknown[]) ?? []).map(\n (t) =>\n (typeof t === 'object' && t !== null ? (t as Record<string, unknown>).type : t) as string,\n ),\n edgeTypes: ((data.edgeTypes as unknown[]) ?? []).map((t) => {\n const e = t as Record<string, unknown>;\n return {\n relation: e.axbType as string,\n from: e.aType as string,\n to: e.bType as string,\n inverseLabel: (e.inverseLabel as string) ?? null,\n };\n }),\n };\n }\n\n async getNodeDetail(input: GetNodeDetailInput): Promise<NodeDetailResult> {\n requireString(input.uid, 'uid');\n const data = (await this.query('getNodeDetail', { uid: input.uid })) as Record<string, unknown>;\n return {\n node: summarizeRecord(data.node as Record<string, unknown> | null),\n outEdges: ((data.outEdges as Record<string, unknown>[]) ?? [])\n .map(summarizeEdge)\n .filter(Boolean) as SummarizedEdge[],\n inEdges: ((data.inEdges as Record<string, unknown>[]) ?? [])\n .map(summarizeEdge)\n .filter(Boolean) as SummarizedEdge[],\n };\n }\n\n async getNodes(input: GetNodesInput): Promise<GetNodesResult> {\n const limit = clampInt(input.limit, 1, 200, 25);\n validateSortDir(input.sortDir);\n const data = (await this.query('getNodes', {\n type: input.type,\n limit,\n startAfter: input.startAfter,\n sortBy: input.sortBy,\n sortDir: input.sortDir,\n where: input.where,\n })) as Record<string, unknown>;\n return {\n nodes: ((data.nodes as Record<string, unknown>[]) ?? [])\n .map(summarizeRecord)\n .filter(Boolean) as SummarizedRecord[],\n hasMore: (data.hasMore as boolean) ?? false,\n nextCursor: data.nextCursor as string | null | undefined,\n };\n }\n\n async getEdges(input: GetEdgesInput): Promise<GetEdgesResult> {\n const hasFilter =\n input.aType ||\n input.aUid ||\n input.axbType ||\n input.bType ||\n input.bUid ||\n (input.where && input.where.length > 0);\n if (!hasFilter) {\n throw new QueryClientError(\n 'getEdges requires at least one filter field (aType, aUid, axbType, bType, bUid, or where)',\n 'VALIDATION_ERROR',\n );\n }\n const limit = clampInt(input.limit, 1, 200, 25);\n validateSortDir(input.sortDir);\n const data = (await this.query('getEdges', {\n aType: input.aType,\n aUid: input.aUid,\n axbType: input.axbType,\n bType: input.bType,\n bUid: input.bUid,\n limit,\n startAfter: input.startAfter,\n sortBy: input.sortBy,\n sortDir: input.sortDir,\n where: input.where,\n })) as Record<string, unknown>;\n return {\n edges: ((data.edges as Record<string, unknown>[]) ?? [])\n .map(summarizeEdge)\n .filter(Boolean) as SummarizedEdge[],\n hasMore: (data.hasMore as boolean) ?? false,\n nextCursor: data.nextCursor as string | null | undefined,\n };\n }\n\n async traverse(input: TraverseInput): Promise<TraverseResult> {\n requireString(input.startUid, 'startUid');\n if (!input.hops || input.hops.length === 0) {\n throw new QueryClientError('traverse requires at least one hop', 'VALIDATION_ERROR');\n }\n for (let i = 0; i < input.hops.length; i++) {\n const hop = input.hops[i];\n requireString(hop.axbType, `hops[${i}].axbType`);\n if (hop.direction != null && hop.direction !== 'forward' && hop.direction !== 'reverse') {\n throw new QueryClientError(\n `hops[${i}].direction must be 'forward' or 'reverse'`,\n 'VALIDATION_ERROR',\n );\n }\n if (hop.limit != null && (!Number.isInteger(hop.limit) || hop.limit < 1)) {\n throw new QueryClientError(\n `hops[${i}].limit must be a positive integer`,\n 'VALIDATION_ERROR',\n );\n }\n }\n if (input.maxReads != null && (!Number.isInteger(input.maxReads) || input.maxReads < 1)) {\n throw new QueryClientError('maxReads must be a positive integer', 'VALIDATION_ERROR');\n }\n if (\n input.concurrency != null &&\n (!Number.isInteger(input.concurrency) || input.concurrency < 1)\n ) {\n throw new QueryClientError('concurrency must be a positive integer', 'VALIDATION_ERROR');\n }\n\n const data = (await this.mutate('traverse', input)) as Record<string, unknown>;\n return {\n hops: ((data.hops as Record<string, unknown>[]) ?? []).map(\n (h): TraverseHopResult => ({\n relation: h.axbType as string,\n direction: h.direction as string,\n depth: h.depth as number,\n edgeCount: ((h.edges as unknown[]) ?? []).length,\n edges: ((h.edges as Record<string, unknown>[]) ?? [])\n .map(summarizeEdge)\n .filter(Boolean) as SummarizedEdge[],\n truncated: (h.truncated as boolean) ?? false,\n }),\n ),\n totalReads: (data.totalReads as number) ?? 0,\n truncated: (data.truncated as boolean) ?? false,\n };\n }\n\n async search(input: SearchInput): Promise<SearchResult> {\n requireString(input.q, 'q');\n const limit = clampInt(input.limit, 1, 50, 20);\n const data = (await this.query('search', { q: input.q, limit })) as Record<string, unknown>;\n return {\n results: ((data.results as Record<string, unknown>[]) ?? [])\n .map((r) => {\n const base = summarizeRecord(r);\n if (!base) return null;\n return {\n ...base,\n matchType: (r._matchType as string) ?? null,\n };\n })\n .filter(Boolean) as (SummarizedRecord & { matchType: string | null })[],\n };\n }\n}\n","import { readFileSync } from 'node:fs';\nimport { join } from 'node:path';\n\nconst CONFIG_FILES = ['firegraph.config.ts', 'firegraph.config.js', 'firegraph.config.mjs'];\nconst DEFAULT_PORT = 3884;\n\n/**\n * Read the editor port from firegraph config files using regex.\n * Zero-dependency — no jiti needed.\n */\nexport function readEditorPort(cwd?: string): number {\n const dir = cwd ?? process.cwd();\n for (const name of CONFIG_FILES) {\n try {\n const content = readFileSync(join(dir, name), 'utf8');\n const editorBlock = content.match(/editor\\s*:\\s*\\{[^}]*\\}/s)?.[0] ?? '';\n const portMatch = editorBlock.match(/port\\s*:\\s*(\\d+)/);\n if (portMatch) return parseInt(portMatch[1], 10);\n } catch {\n continue;\n }\n }\n return DEFAULT_PORT;\n}\n","import type { SummarizedEdge, SummarizedRecord } from './types.js';\n\nexport function summarizeRecord(r: Record<string, unknown> | null): SummarizedRecord | null {\n if (!r) return null;\n const out: SummarizedRecord = { type: r.aType as string, uid: r.aUid as string };\n const data = r.data as Record<string, unknown> | undefined;\n if (data && typeof data === 'object' && Object.keys(data).length > 0) {\n out.data = data;\n }\n return out;\n}\n\nexport function summarizeEdge(r: Record<string, unknown> | null): SummarizedEdge | null {\n if (!r) return null;\n const out: SummarizedEdge = {\n fromType: r.aType as string,\n fromUid: r.aUid as string,\n relation: r.axbType as string,\n toType: r.bType as string,\n toUid: r.bUid as string,\n };\n const data = r.data as Record<string, unknown> | undefined;\n if (data && typeof data === 'object' && Object.keys(data).length > 0) {\n out.data = data;\n }\n return out;\n}\n","/**\n * Storage-scope path utilities — materialized-path parsing helpers for the\n * SQLite backend's `storageScope` string and for any custom backend that\n * adopts the same encoding (e.g. a cross-DO routing layer that uses\n * `storageScope` as a Durable Object name).\n *\n * **Storage-scope** (as produced by `SqliteBackendImpl`) interleaves parent\n * UIDs with subgraph names:\n *\n * ```\n * '' // root\n * 'A/memories' // g.subgraph(A, 'memories')\n * 'A/memories/B/context' // .subgraph(B, 'context') on the above\n * ```\n *\n * The structure is the same as a Firestore collection path with the\n * collection/doc segments reordered: each pair is `<uid>/<name>`, where\n * `<uid>` is a node UID in the parent scope and `<name>` is the subgraph\n * name. Use these helpers to decode that structure when building cross-\n * backend routers (see `createRoutingBackend`).\n *\n * For Firestore paths (which begin with a collection segment), use\n * `resolveAncestorCollection` / `isAncestorUid` from `./cross-graph.js`.\n */\n\n/**\n * One segment of a materialized-path storage-scope — a `(uid, name)` pair\n * produced by one `subgraph(uid, name)` call.\n */\nexport interface StorageScopeSegment {\n /** Parent node UID at the enclosing scope. */\n uid: string;\n /** Subgraph name chosen by the caller (e.g. `'memories'`). */\n name: string;\n}\n\n/**\n * Parse a materialized-path storage-scope into its `(uid, name)` pairs.\n *\n * Returns `[]` for the root (`''`). Throws `Error('INVALID_SCOPE_PATH')`\n * when the string has an odd number of segments (a corrupt path — every\n * level contributes exactly two segments) or when any segment is empty.\n *\n * @example\n * ```ts\n * parseStorageScope(''); // []\n * parseStorageScope('A/memories'); // [{ uid: 'A', name: 'memories' }]\n * parseStorageScope('A/memories/B/context'); // [{ uid: 'A', name: 'memories' }, { uid: 'B', name: 'context' }]\n * ```\n */\nexport function parseStorageScope(scope: string): StorageScopeSegment[] {\n if (scope === '') return [];\n const parts = scope.split('/');\n if (parts.length % 2 !== 0) {\n throw new Error(\n `INVALID_SCOPE_PATH: storage-scope \"${scope}\" has an odd number of segments; ` +\n 'expected interleaved <uid>/<name> pairs.',\n );\n }\n const out: StorageScopeSegment[] = [];\n for (let i = 0; i < parts.length; i += 2) {\n const uid = parts[i];\n const name = parts[i + 1];\n if (!uid || !name) {\n throw new Error(\n `INVALID_SCOPE_PATH: storage-scope \"${scope}\" contains an empty segment at position ${i}.`,\n );\n }\n out.push({ uid, name });\n }\n return out;\n}\n\n/**\n * Resolve the ancestor **storage-scope** at which a given UID's node lives,\n * by scanning a materialized-path storage-scope for that UID.\n *\n * Mirrors `resolveAncestorCollection()` from `./cross-graph.js` for\n * Firestore paths, but operates on `storageScope` (no leading collection\n * segment — segments are `<uid>/<name>` pairs).\n *\n * @returns The storage-scope at which the UID's node was added via\n * `subgraph(uid, _)`, or `null` if the UID does not appear at a UID\n * position in the path.\n *\n * @example\n * ```ts\n * // Scope: 'A/memories/B/context'\n * resolveAncestorScope('A/memories/B/context', 'A'); // '' (A was added at root)\n * resolveAncestorScope('A/memories/B/context', 'B'); // 'A/memories'\n * resolveAncestorScope('A/memories/B/context', 'X'); // null\n * ```\n */\nexport function resolveAncestorScope(storageScope: string, uid: string): string | null {\n if (!uid) return null;\n if (storageScope === '') return null;\n const parts = storageScope.split('/');\n // UID positions are even indices (0, 2, 4, …); names are at odd indices.\n for (let i = 0; i < parts.length; i += 2) {\n if (parts[i] === uid) {\n return i === 0 ? '' : parts.slice(0, i).join('/');\n }\n }\n return null;\n}\n\n/**\n * Boolean shorthand for `resolveAncestorScope(scope, uid) !== null`.\n */\nexport function isAncestorScopeUid(storageScope: string, uid: string): boolean {\n return resolveAncestorScope(storageScope, uid) !== null;\n}\n\n/**\n * Join a parent storage-scope with a new `(uid, name)` pair, producing the\n * storage-scope that `backend.subgraph(uid, name)` would use internally.\n *\n * This is the inverse of `parseStorageScope`'s per-segment semantics and is\n * useful when computing DO names / shard keys from the router callback.\n */\nexport function appendStorageScope(parentScope: string, uid: string, name: string): string {\n if (!uid || uid.includes('/')) {\n throw new Error(\n `INVALID_SCOPE_PATH: uid must be non-empty and must not contain \"/\": got \"${uid}\".`,\n );\n }\n if (!name || name.includes('/')) {\n throw new Error(\n `INVALID_SCOPE_PATH: name must be non-empty and must not contain \"/\": got \"${name}\".`,\n );\n }\n return parentScope ? `${parentScope}/${uid}/${name}` : `${uid}/${name}`;\n}\n","/**\n * Pure compiler for engine-level multi-hop traversal.\n *\n * Takes an `EngineTraversalParams` spec and decides whether it can be\n * compiled into one nested-Pipeline round trip. Returns a discriminated\n * union — `{ eligible: true; normalized }` carries the validated spec\n * with defaults filled in; `{ eligible: false; reason }` carries a\n * human-readable explanation that the caller (traversal layer or\n * `engineTraversal: 'force'` test path) can either log, throw, or\n * silently fall back on.\n *\n * The compiler is split out so the validation surface is unit-testable\n * without spinning up a Firestore SDK or a real Pipeline. The actual\n * pipeline construction and result decoding live in\n * `firestore-traverse.ts` and depend on `@google-cloud/firestore`.\n *\n * Eligibility checks (in order):\n *\n * 1. `hops.length` ≥ 1 → otherwise no traversal to run\n * 2. `hops.length` ≤ `maxDepth` (default 5) → pipeline-depth cap\n * 3. Every hop has `limitPerSource` set → required to bound response size\n * 4. Every hop's `axbType` is non-empty → query needs a relation predicate\n * 5. Worst-case response size ≤ `maxReads` budget → prevent runaway tree responses\n *\n * The maxDepth bound is conservative — Firestore Pipelines don't\n * publish a hard limit on `addFields` / `define` nesting depth, but\n * empirically deep nesting starts to slow down planning. Five hops\n * covers the vast majority of real-world traversal specs; specs that\n * exceed it fall back to the per-hop loop with a debug-level signal.\n *\n * The response-size estimate is the conservative top-line:\n * `sources.length × Π(limitPerSource_i)`. This is the worst-case edge\n * count at the deepest hop, which dominates the total tree size for\n * branching factors > 1. We deliberately don't sum over hops — the\n * deepest-hop bound already triggers fallback well before any realistic\n * total response size matters.\n */\n\nimport type { EngineHopSpec, EngineTraversalParams } from '../types.js';\n\n/**\n * Default cap on `addFields` / `define` nesting depth. Traversal specs\n * deeper than this are rejected by the compiler and fall back to the\n * per-hop loop. Configurable per call via `compileEngineTraversal`'s\n * `opts.maxDepth`.\n */\nexport const MAX_PIPELINE_DEPTH = 5;\n\n/**\n * A normalized, validated engine-traversal spec ready for the executor\n * to translate into a nested Pipeline. Mirrors `EngineTraversalParams`\n * but with `direction` defaulted to `'forward'` on every hop and the\n * estimated worst-case response size attached for budget bookkeeping.\n */\nexport interface NormalizedEngineTraversal {\n sources: string[];\n hops: Array<\n Required<Pick<EngineHopSpec, 'axbType' | 'limitPerSource' | 'direction'>> & EngineHopSpec\n >;\n /** Worst-case edge count at the deepest hop — `sources.length × Π(limitPerSource_i)`. */\n estimatedReads: number;\n}\n\nexport type CompilerResult =\n | { eligible: true; normalized: NormalizedEngineTraversal }\n | { eligible: false; reason: string };\n\nexport interface CompilerOptions {\n /** Override the depth cap. Default `MAX_PIPELINE_DEPTH` (5). */\n maxDepth?: number;\n /**\n * Worst-case response-size budget. The compiler refuses to emit when\n * `sources.length × Π(limitPerSource_i)` exceeds this.\n */\n maxReads?: number;\n}\n\n/**\n * Validate an engine-traversal spec. Pure; no SDK interaction.\n *\n * Returns `{ eligible: true; normalized }` with `direction` defaulted\n * and `estimatedReads` attached, or `{ eligible: false; reason }` with\n * a one-line description suitable for logging or for an\n * `UNSUPPORTED_OPERATION` error message.\n */\nexport function compileEngineTraversal(\n params: EngineTraversalParams,\n opts?: CompilerOptions,\n): CompilerResult {\n const maxDepth = opts?.maxDepth ?? MAX_PIPELINE_DEPTH;\n const maxReads = opts?.maxReads ?? params.maxReads;\n\n if (!Array.isArray(params.hops) || params.hops.length === 0) {\n return { eligible: false, reason: 'engine traversal requires at least one hop' };\n }\n if (params.hops.length > maxDepth) {\n return {\n eligible: false,\n reason: `engine traversal depth ${params.hops.length} exceeds MAX_PIPELINE_DEPTH (${maxDepth})`,\n };\n }\n if (!Array.isArray(params.sources)) {\n return { eligible: false, reason: 'engine traversal requires a sources array' };\n }\n\n const normalizedHops: NormalizedEngineTraversal['hops'] = [];\n for (let i = 0; i < params.hops.length; i++) {\n const hop = params.hops[i];\n if (!hop.axbType || hop.axbType.length === 0) {\n return {\n eligible: false,\n reason: `engine traversal hop ${i} is missing axbType`,\n };\n }\n if (\n typeof hop.limitPerSource !== 'number' ||\n hop.limitPerSource <= 0 ||\n !Number.isFinite(hop.limitPerSource)\n ) {\n return {\n eligible: false,\n reason: `engine traversal hop ${i} (${hop.axbType}) requires a positive limitPerSource`,\n };\n }\n normalizedHops.push({\n ...hop,\n axbType: hop.axbType,\n direction: hop.direction ?? 'forward',\n limitPerSource: hop.limitPerSource,\n });\n }\n\n // Worst-case at deepest hop. We multiply iteratively so that an\n // overflowing product short-circuits before bumping into JS's float\n // precision. `Number.MAX_SAFE_INTEGER` is well past any reasonable\n // `maxReads` value, so that's the early-exit threshold even when the\n // caller didn't supply a budget.\n let estimatedReads = Math.max(1, params.sources.length);\n for (const hop of normalizedHops) {\n estimatedReads *= hop.limitPerSource;\n if (estimatedReads > Number.MAX_SAFE_INTEGER) {\n estimatedReads = Number.MAX_SAFE_INTEGER;\n break;\n }\n }\n\n if (maxReads !== undefined && estimatedReads > maxReads) {\n return {\n eligible: false,\n reason: `engine traversal worst-case response size ${estimatedReads} exceeds maxReads budget ${maxReads}`,\n };\n }\n\n return {\n eligible: true,\n normalized: {\n sources: params.sources,\n hops: normalizedHops,\n estimatedReads,\n },\n };\n}\n","import { FiregraphError, TraversalError } from './errors.js';\nimport { compileEngineTraversal } from './internal/firestore-traverse-compiler.js';\nimport type {\n EngineHopSpec,\n EngineTraversalParams,\n EngineTraversalResult,\n ExpandParams,\n FindEdgesParams,\n GraphClient,\n GraphReader,\n GraphRegistry,\n HopDefinition,\n HopResult,\n StoredGraphRecord,\n TraversalBuilder,\n TraversalOptions,\n TraversalResult,\n} from './types.js';\n\nconst DEFAULT_LIMIT = 10;\nconst DEFAULT_MAX_READS = 100;\nconst DEFAULT_CONCURRENCY = 5;\n\n/** One-time warning flag: emitted when cross-graph hop is silently skipped. */\nlet _crossGraphWarned = false;\n\n/** Type guard to check if a reader is a GraphClient (has subgraph method). */\nfunction isGraphClient(reader: GraphReader): reader is GraphClient {\n return 'subgraph' in reader && typeof (reader as GraphClient).subgraph === 'function';\n}\n\n/**\n * Type guard to detect whether a reader has the `query.join` capability —\n * i.e. the backend supports server-side multi-source fan-out via `expand()`.\n *\n * Branching on this lets us dispatch one `expand()` call per hop instead of\n * one `findEdges()` per source. The savings scale linearly with source-set\n * size; for the common case of a 50-source hop, that's 50 round trips\n * collapsed into 1.\n *\n * Cross-graph hops are explicitly NOT routed through `expand()` even when\n * the cap is present — each source UID resolves to a distinct subgraph\n * reader, which can't be batched into one server-side statement. The\n * traversal driver enforces that boundary directly (see the `isCrossGraph`\n * branch below).\n */\nfunction readerSupportsExpand(reader: GraphReader): reader is GraphClient & {\n expand(params: ExpandParams): Promise<{ edges: StoredGraphRecord[] }>;\n} {\n if (!isGraphClient(reader)) return false;\n const client = reader as GraphClient;\n // `capabilities` lives on the public client surface (see `CoreGraphClient`).\n // The runtime check is required because `expand` exists on every\n // `GraphClientImpl` (the permissive `GraphClient<Capability>` shape) but\n // throws `UNSUPPORTED_OPERATION` when the backend doesn't declare the cap.\n // Reading `capabilities` instead of feeling for the method is the cap-aware\n // dispatch the rest of the codebase uses.\n return (\n 'capabilities' in client &&\n typeof client.capabilities?.has === 'function' &&\n client.capabilities.has('query.join') &&\n typeof (client as { expand?: unknown }).expand === 'function'\n );\n}\n\n/**\n * Type guard mirroring `readerSupportsExpand` but for the `traversal.serverSide`\n * capability. When this returns `true`, the reader can dispatch a multi-hop\n * spec as one nested-Pipeline round trip via `runEngineTraversal()`.\n *\n * Eligibility at the spec level (no cross-graph hops, no JS filter callbacks,\n * `limitPerSource` set on every hop, depth ≤ `MAX_PIPELINE_DEPTH`,\n * response-size product ≤ `maxReads`) is checked separately by\n * `compileEngineTraversal`. This guard only certifies the reader has the\n * method to call.\n */\nfunction readerSupportsEngineTraversal(reader: GraphReader): reader is GraphClient & {\n runEngineTraversal(params: EngineTraversalParams): Promise<EngineTraversalResult>;\n} {\n if (!isGraphClient(reader)) return false;\n const client = reader as GraphClient;\n return (\n 'capabilities' in client &&\n typeof client.capabilities?.has === 'function' &&\n client.capabilities.has('traversal.serverSide') &&\n typeof (client as { runEngineTraversal?: unknown }).runEngineTraversal === 'function'\n );\n}\n\nclass Semaphore {\n private queue: Array<() => void> = [];\n private active = 0;\n\n constructor(private readonly slots: number) {}\n\n async acquire(): Promise<void> {\n if (this.active < this.slots) {\n this.active++;\n return;\n }\n return new Promise<void>((resolve) => {\n this.queue.push(resolve);\n });\n }\n\n release(): void {\n this.active--;\n const next = this.queue.shift();\n if (next) {\n this.active++;\n next();\n }\n }\n}\n\nclass TraversalBuilderImpl implements TraversalBuilder {\n private readonly hops: HopDefinition[] = [];\n\n constructor(\n private readonly reader: GraphReader,\n private readonly startUid: string,\n private readonly registry?: GraphRegistry,\n ) {}\n\n follow(axbType: string, options?: Omit<HopDefinition, 'axbType'>): TraversalBuilder {\n this.hops.push({ axbType, ...options });\n return this;\n }\n\n async run(options?: TraversalOptions): Promise<TraversalResult> {\n if (this.hops.length === 0) {\n throw new TraversalError('Traversal requires at least one follow() hop');\n }\n\n const maxReads = options?.maxReads ?? DEFAULT_MAX_READS;\n const concurrency = options?.concurrency ?? DEFAULT_CONCURRENCY;\n const returnIntermediates = options?.returnIntermediates ?? false;\n const engineMode = options?.engineTraversal ?? 'auto';\n const semaphore = new Semaphore(concurrency);\n\n // Engine-level traversal — try to compile the whole hop chain into one\n // nested-Pipeline round trip. Eligibility (in order of cheap-first):\n //\n // 1. `engineMode !== 'off'` → caller didn't opt out\n // 2. reader declares `traversal.serverSide` → backend has the path\n // 3. no hop carries a JS `filter` callback → can't run JS server-side\n // 4. no hop is cross-graph → distinct collection paths\n // 5. compiler accepts the spec → depth, limits, response size\n //\n // `engineMode === 'force'` flips failures from silent fallback to a\n // thrown `UNSUPPORTED_OPERATION`, which is what tests/benchmarks want.\n // `engineMode === 'auto'` (the default) silently falls back so existing\n // callers see the new fast-path on Enterprise without any code change.\n if (engineMode !== 'off') {\n const engineResult = await this.tryEngineTraversal({\n engineMode,\n returnIntermediates,\n });\n if (engineResult) return engineResult;\n }\n\n let totalReads = 0;\n let truncated = false;\n // Track (uid, reader) pairs to support context carry-forward across hops.\n // When a hop crosses into a subgraph, the resulting UIDs carry the subgraph\n // reader so subsequent hops without targetGraph stay in that subgraph.\n let sources: Array<{ uid: string; reader: GraphReader }> = [\n { uid: this.startUid, reader: this.reader },\n ];\n const hopResults: HopResult[] = [];\n\n for (let depth = 0; depth < this.hops.length; depth++) {\n const hop = this.hops[depth];\n\n if (sources.length === 0) {\n hopResults.push({\n axbType: hop.axbType,\n depth,\n edges: [],\n sourceCount: 0,\n truncated: false,\n });\n continue;\n }\n\n const hopEdges: Array<{ edge: StoredGraphRecord; reader: GraphReader }> = [];\n const sourceCount = sources.length;\n let hopTruncated = false;\n\n // Resolve targetGraph for this hop:\n // 1. Explicit on the hop definition takes precedence\n // 2. Otherwise check the registry for the axbType\n const resolvedTargetGraph = this.resolveTargetGraph(hop);\n const direction = hop.direction ?? 'forward';\n const isCrossGraph = direction === 'forward' && !!resolvedTargetGraph;\n\n // Fast path: server-side fan-out via `expand()` when the reader supports\n // `query.join`. Eligibility:\n // 1. Not a cross-graph hop — each cross-graph source resolves to its\n // own subgraph reader, which can't be batched into one statement.\n // 2. All sources share the same reader. Mixed readers happen only\n // after a previous cross-graph carry-forward; for the typical\n // single-graph or fully-routed-to-one-DO case, this is true.\n // 3. The shared reader's backend declares `query.join`.\n //\n // Budget accounting: one `expand()` call counts as ONE read against\n // `maxReads`, regardless of source-set size. That reflects reality\n // (1 server round trip = 1 read) and is the entire point of the\n // capability — a 50-source hop collapses 50 round trips into 1.\n // Callers who expect \"1 read per source\" semantics from `maxReads`\n // will see traversals reach further than they did with the per-source\n // loop; this is an improvement, not a regression.\n // Fast-path eligibility check (3): sources share a reader. Mixed-reader\n // sources happen only after a cross-graph carry-forward (hop N had a\n // `targetGraph`, fanning each source UID into its own subgraph reader).\n // The empty-sources branch is already handled by the `if (sources.length === 0)`\n // continue earlier in the loop, so `sources` is non-empty here.\n const sharedReader = sources.every((s) => s.reader === sources[0].reader)\n ? sources[0].reader\n : null;\n const canFastPath = !isCrossGraph && sharedReader && readerSupportsExpand(sharedReader);\n\n if (canFastPath && sharedReader) {\n if (totalReads >= maxReads) {\n hopTruncated = true;\n } else {\n totalReads++;\n const limit = hop.limit ?? DEFAULT_LIMIT;\n const expandParams: ExpandParams = {\n sources: sources.map((s) => s.uid),\n axbType: hop.axbType,\n direction,\n };\n if (hop.aType) expandParams.aType = hop.aType;\n if (hop.bType) expandParams.bType = hop.bType;\n if (hop.orderBy) expandParams.orderBy = hop.orderBy;\n // With a hop-level `filter`, we can't apply `limitPerSource` at the\n // SQL layer — the filter is a JS predicate that runs after rows\n // come back. Pass undefined to fetch all matching edges, filter,\n // then enforce per-source limit in JS below.\n if (!hop.filter) {\n expandParams.limitPerSource = limit;\n }\n const result = await (\n sharedReader as GraphClient & {\n expand(p: ExpandParams): Promise<{ edges: StoredGraphRecord[] }>;\n }\n ).expand(expandParams);\n let edges = result.edges;\n if (hop.filter) {\n edges = edges.filter(hop.filter);\n // Enforce per-source post-filter limit. Without this, a source\n // whose post-filter edge count exceeds `limit` would carry\n // through more next-hop sources than the user requested.\n const counts = new Map<string, number>();\n const kept: StoredGraphRecord[] = [];\n for (const e of edges) {\n const sourceUid = direction === 'forward' ? e.aUid : e.bUid;\n const c = counts.get(sourceUid) ?? 0;\n if (c < limit) {\n counts.set(sourceUid, c + 1);\n kept.push(e);\n }\n }\n edges = kept;\n }\n for (const edge of edges) {\n hopEdges.push({ edge, reader: sharedReader });\n }\n }\n\n // Skip the per-source task loop — we already filled `hopEdges`.\n const fastEdges = hopEdges.map((h) => h.edge);\n hopResults.push({\n axbType: hop.axbType,\n depth,\n edges: returnIntermediates ? [...fastEdges] : fastEdges,\n sourceCount,\n truncated: hopTruncated,\n });\n if (hopTruncated) truncated = true;\n\n // Build next sources, same dedup logic as the slow path.\n const seen = new Map<string, GraphReader>();\n for (const { edge, reader: edgeReader } of hopEdges) {\n const nextUid = direction === 'forward' ? edge.bUid : edge.aUid;\n if (!seen.has(nextUid)) seen.set(nextUid, edgeReader);\n }\n sources = [...seen.entries()].map(([uid, reader]) => ({ uid, reader }));\n continue;\n }\n\n // Slow path (per-source loop): cross-graph hops, mixed-reader sources,\n // or backends without `query.join`.\n const tasks = sources.map(({ uid, reader: sourceReader }) => async () => {\n if (totalReads >= maxReads) {\n hopTruncated = true;\n return;\n }\n\n await semaphore.acquire();\n try {\n if (totalReads >= maxReads) {\n hopTruncated = true;\n return;\n }\n\n totalReads++;\n\n const params: FindEdgesParams = { axbType: hop.axbType };\n\n if (direction === 'forward') {\n params.aUid = uid;\n if (hop.bType) params.bType = hop.bType;\n } else {\n params.bUid = uid;\n if (hop.aType) params.aType = hop.aType;\n }\n\n if (direction === 'forward' && hop.aType) {\n params.aType = hop.aType;\n }\n if (direction === 'reverse' && hop.bType) {\n params.bType = hop.bType;\n }\n\n if (hop.orderBy) params.orderBy = hop.orderBy;\n\n const limit = hop.limit ?? DEFAULT_LIMIT;\n if (hop.filter) {\n params.limit = 0;\n } else {\n params.limit = limit;\n }\n\n // Choose the reader for this hop:\n // - Cross-graph hop: create a subgraph reader from the ROOT client\n // (targetGraph is always relative to root)\n // - No cross-graph: use the carried-forward reader from previous hop\n // (context tracking — stay in whatever subgraph we're already in)\n let hopReader: GraphReader;\n let nextReader: GraphReader;\n if (isCrossGraph) {\n if (isGraphClient(this.reader)) {\n hopReader = this.reader.subgraph(uid, resolvedTargetGraph!);\n nextReader = hopReader;\n } else {\n hopReader = sourceReader;\n nextReader = sourceReader;\n if (!_crossGraphWarned) {\n _crossGraphWarned = true;\n console.warn(\n `[firegraph] Traversal hop \"${hop.axbType}\" has targetGraph \"${resolvedTargetGraph}\" ` +\n 'but the reader does not support subgraph(). Cross-graph hop will query the current ' +\n 'collection instead. Pass a GraphClient to createTraversal() to enable cross-graph traversal.',\n );\n }\n }\n } else {\n // No targetGraph — carry forward context from previous hop\n hopReader = sourceReader;\n nextReader = sourceReader;\n }\n\n let edges = await hopReader.findEdges(params);\n\n if (hop.filter) {\n edges = edges.filter(hop.filter);\n edges = edges.slice(0, limit);\n }\n\n for (const edge of edges) {\n hopEdges.push({ edge, reader: nextReader });\n }\n } finally {\n semaphore.release();\n }\n });\n\n await Promise.all(tasks.map((task) => task()));\n\n const edges = hopEdges.map((h) => h.edge);\n\n hopResults.push({\n axbType: hop.axbType,\n depth,\n edges: returnIntermediates ? [...edges] : edges,\n sourceCount,\n truncated: hopTruncated,\n });\n\n if (hopTruncated) {\n truncated = true;\n }\n\n // Build next sources with deduplication by UID.\n // When the same UID appears from multiple source readers, the first one wins.\n const seen = new Map<string, GraphReader>();\n for (const { edge, reader: edgeReader } of hopEdges) {\n const nextUid = direction === 'forward' ? edge.bUid : edge.aUid;\n if (!seen.has(nextUid)) {\n seen.set(nextUid, edgeReader);\n }\n }\n sources = [...seen.entries()].map(([uid, reader]) => ({ uid, reader }));\n }\n\n const lastHop = hopResults[hopResults.length - 1];\n\n return {\n nodes: lastHop.edges,\n hops: hopResults,\n totalReads,\n truncated,\n };\n }\n\n /**\n * Try to dispatch the entire hop chain as one engine-traversal call.\n * Returns a `TraversalResult` on success, or `undefined` if the spec is\n * ineligible and the caller should fall through to the per-hop loop.\n *\n * `'force'` mode throws on any ineligibility instead of returning\n * `undefined` — the caller intentionally opted out of fallback.\n */\n private async tryEngineTraversal(args: {\n engineMode: 'auto' | 'force';\n returnIntermediates: boolean;\n }): Promise<TraversalResult | undefined> {\n const { engineMode, returnIntermediates } = args;\n\n const refuse = (reason: string): TraversalResult | undefined => {\n if (engineMode === 'force') {\n throw new FiregraphError(`engineTraversal: 'force' but ${reason}`, 'UNSUPPORTED_OPERATION');\n }\n return undefined;\n };\n\n if (!readerSupportsEngineTraversal(this.reader)) {\n return refuse('reader does not declare traversal.serverSide capability');\n }\n const client = this.reader;\n\n // Per-hop eligibility — JS filters and cross-graph hops both prevent\n // engine compilation. Walk the full chain so the failure reason can\n // point at the offending hop.\n const engineHops: EngineHopSpec[] = [];\n for (let i = 0; i < this.hops.length; i++) {\n const hop = this.hops[i];\n if (hop.filter) {\n return refuse(`hop ${i} (${hop.axbType}) carries a JS filter callback`);\n }\n const targetGraph = this.resolveTargetGraph(hop);\n const direction = hop.direction ?? 'forward';\n if (targetGraph) {\n return refuse(`hop ${i} (${hop.axbType}) is cross-graph (targetGraph=${targetGraph})`);\n }\n const limit = hop.limit ?? DEFAULT_LIMIT;\n const engineHop: EngineHopSpec = {\n axbType: hop.axbType,\n direction,\n limitPerSource: limit,\n };\n if (hop.aType) engineHop.aType = hop.aType;\n if (hop.bType) engineHop.bType = hop.bType;\n if (hop.orderBy) engineHop.orderBy = hop.orderBy;\n engineHops.push(engineHop);\n }\n\n const params: EngineTraversalParams = {\n sources: [this.startUid],\n hops: engineHops,\n };\n\n // Compile-side validation (depth, limits, response-size budget) lives\n // in `firestore-traverse-compiler.ts`. We invoke it from the traversal\n // layer (rather than relying on the executor to throw) so 'auto' can\n // silently fall back without ever entering the SDK.\n const compiled = compileEngineTraversal(params);\n if (!compiled.eligible) {\n return refuse(compiled.reason);\n }\n\n let engineResult: EngineTraversalResult;\n try {\n engineResult = await client.runEngineTraversal(params);\n } catch (err) {\n if (engineMode === 'force') throw err;\n return undefined;\n }\n\n // Translate `EngineTraversalResult` into `TraversalResult` (`HopResult[]`).\n // Truncation is detected per-hop: if the returned edge count equals the\n // limitPerSource enforced in the pipeline, the server hit its cap and\n // there may be more edges. This is conservative — for depth-1+ hops with\n // multiple parents, deduplication may reduce the count below limitPerSource\n // per-parent while the aggregate still triggers the check.\n const hopResults: HopResult[] = [];\n for (let i = 0; i < this.hops.length; i++) {\n const definedHop = this.hops[i];\n const engineHopResult = engineResult.hops[i] ?? { edges: [], sourceCount: 0 };\n const edges = engineHopResult.edges;\n const hopTruncated = edges.length >= engineHops[i].limitPerSource;\n hopResults.push({\n axbType: definedHop.axbType,\n depth: i,\n edges: returnIntermediates ? [...edges] : edges,\n sourceCount: engineHopResult.sourceCount,\n truncated: hopTruncated,\n });\n }\n\n const lastHop = hopResults[hopResults.length - 1];\n return {\n nodes: lastHop.edges,\n hops: hopResults,\n // One server-side round trip — same accounting as the `expand()`\n // fast path. The tree response can carry up to `estimatedReads`\n // docs total, but the budget is in round trips, not docs.\n totalReads: 1,\n truncated: hopResults.some((h) => h.truncated),\n };\n }\n\n /**\n * Resolve the targetGraph for a hop. Priority:\n * 1. Explicit `hop.targetGraph` (user override)\n * 2. Registry `targetGraph` for the axbType (if registry available)\n * 3. undefined (no cross-graph)\n */\n private resolveTargetGraph(hop: HopDefinition): string | undefined {\n if (hop.targetGraph) return hop.targetGraph;\n\n if (this.registry) {\n const entries = this.registry.lookupByAxbType(hop.axbType);\n // All entries for the same axbType should share targetGraph; use the first non-undefined\n for (const entry of entries) {\n if (entry.targetGraph) return entry.targetGraph;\n }\n }\n\n return undefined;\n }\n}\n\n/** @internal Reset the one-time cross-graph warning flag (for testing). */\nexport function _resetCrossGraphWarning(): void {\n _crossGraphWarned = false;\n}\n\n/**\n * Create a traversal builder for multi-hop graph traversal.\n *\n * Accepts either a `GraphReader` (backwards compatible) or a `GraphClient`.\n * When a `GraphClient` is provided, cross-graph traversal via `targetGraph`\n * is supported — the traversal can follow edges into subgraphs.\n *\n * @param reader - A `GraphClient` or `GraphReader` to execute queries against\n * @param startUid - UID of the starting node\n * @param registry - Optional registry for automatic `targetGraph` resolution\n */\nexport function createTraversal(\n reader: GraphClient | GraphReader,\n startUid: string,\n registry?: GraphRegistry,\n): TraversalBuilder {\n return new TraversalBuilderImpl(reader, startUid, registry);\n}\n","/**\n * Model Views — framework-agnostic view definitions for graph entities.\n *\n * Projects define Web Components that render entity data in purpose-driven\n * ways. Each view class declares a static `viewName`, and receives the\n * entity's `data` payload via a `data` property setter.\n *\n * @example\n * ```ts\n * import { defineViews } from 'firegraph';\n *\n * class UserCard extends HTMLElement {\n * static viewName = 'card';\n * static description = 'Compact user card';\n * private _data: Record<string, unknown> = {};\n * set data(v: Record<string, unknown>) { this._data = v; this.render(); }\n * connectedCallback() { this.render(); }\n * private render() {\n * this.innerHTML = `<strong>${this._data.displayName ?? ''}</strong>`;\n * }\n * }\n *\n * export default defineViews({\n * nodes: { user: { views: [UserCard] } },\n * });\n * ```\n */\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/**\n * A Web Component class used as a view. The class must have a static\n * `viewName` and must be constructable. It will be registered as a custom\n * element via `customElements.define()` in browser environments.\n *\n * Note: this interface avoids referencing `HTMLElement` directly so the\n * library can compile without DOM lib types. Consumer code (which has DOM)\n * will satisfy this constraint naturally.\n */\nexport interface ViewComponentClass {\n new (...args: any[]): { data: Record<string, unknown> };\n /** Short identifier for this view (e.g. 'card', 'profile'). */\n viewName: string;\n /** Optional human-readable description. */\n description?: string;\n}\n\n/** Configuration for all views of a single entity type. */\nexport interface EntityViewConfig {\n /** View component classes to register. */\n views: ViewComponentClass[];\n /**\n * Optional sample data for the gallery. A single object matching\n * the entity's JSON Schema — shared across all views.\n */\n sampleData?: Record<string, unknown>;\n}\n\n/** Input shape accepted by `defineViews()`. */\nexport interface ViewRegistryInput {\n /** Node views keyed by aType (e.g. 'user', 'tour'). */\n nodes?: Record<string, EntityViewConfig>;\n /** Edge views keyed by axbType (e.g. 'hasDeparture'). */\n edges?: Record<string, EntityViewConfig>;\n}\n\n/** Serialisable metadata for a single view. */\nexport interface ViewMeta {\n /** Custom element tag name (e.g. 'fg-user-card'). */\n tagName: string;\n /** Short identifier matching the component's static viewName. */\n viewName: string;\n /** Optional human-readable description. */\n description?: string;\n}\n\n/** Serialisable metadata for all views of a single entity type. */\nexport interface EntityViewMeta {\n views: ViewMeta[];\n sampleData?: Record<string, unknown>;\n}\n\n/** The resolved view registry returned by `defineViews()`. */\nexport interface ViewRegistry {\n nodes: Record<string, EntityViewMeta>;\n edges: Record<string, EntityViewMeta>;\n}\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Sanitise a string for use as part of a custom element tag name. */\nfunction sanitizeTagPart(s: string): string {\n return s\n .toLowerCase()\n .replace(/[^a-z0-9]/g, '-')\n .replace(/-+/g, '-')\n .replace(/^-|-$/g, '');\n}\n\n/** Minimal interface for CustomElementRegistry (avoids depending on DOM lib). */\ninterface CustomElementRegistryLike {\n get(name: string): unknown;\n define(name: string, constructor: unknown): void;\n}\n\n/**\n * Try to access the browser's `customElements` registry.\n * Returns `null` in Node.js or environments without Web Components support.\n */\nfunction getCustomElements(): CustomElementRegistryLike | null {\n const g = globalThis as any;\n if (g.customElements && typeof g.customElements.define === 'function') {\n return g.customElements as CustomElementRegistryLike;\n }\n return null;\n}\n\n/**\n * Wrap a view class so that errors in connectedCallback, disconnectedCallback,\n * and the data setter are caught and logged rather than crashing the page.\n * Shows an inline error message when the view fails to render.\n */\nfunction resilientView(ViewClass: ViewComponentClass, tagName: string): ViewComponentClass {\n const g = globalThis as any;\n if (!g.HTMLElement) return ViewClass; // Node.js — no wrapping needed\n\n const Wrapped = class extends (ViewClass as unknown as new (...args: any[]) => any) {\n connectedCallback() {\n try {\n super.connectedCallback?.();\n } catch (err) {\n console.warn(`[firegraph] <${tagName}> connectedCallback error:`, err);\n this._showError(err);\n }\n }\n\n disconnectedCallback() {\n try {\n super.disconnectedCallback?.();\n } catch (err) {\n console.warn(`[firegraph] <${tagName}> disconnectedCallback error:`, err);\n }\n }\n\n set data(v: Record<string, unknown>) {\n try {\n super.data = v;\n } catch (err) {\n console.warn(`[firegraph] <${tagName}> data setter error:`, err);\n this._showError(err);\n }\n }\n\n get data(): Record<string, unknown> {\n try {\n return super.data;\n } catch {\n return {};\n }\n }\n\n _showError(err: unknown) {\n try {\n this.innerHTML =\n `<div style=\"padding:6px;color:#f87171;font-size:11px;font-family:monospace;\">` +\n `View error in <${tagName}>: ${err instanceof Error ? err.message : String(err)}</div>`;\n } catch {\n /* last resort — don't throw from error handler */\n }\n }\n };\n\n // Preserve static metadata\n (Wrapped as unknown as ViewComponentClass).viewName = ViewClass.viewName;\n (Wrapped as unknown as ViewComponentClass).description = ViewClass.description;\n\n return Wrapped as unknown as ViewComponentClass;\n}\n\n// ---------------------------------------------------------------------------\n// defineViews()\n// ---------------------------------------------------------------------------\n\n/**\n * Build a `ViewRegistry` from component classes.\n *\n * In the browser the components are registered as custom elements with\n * deterministic tag names (`fg-{entityType}-{viewName}`). On the server\n * (Node.js) only metadata is returned — no custom element registration.\n */\nexport function defineViews(input: ViewRegistryInput): ViewRegistry {\n const nodes: Record<string, EntityViewMeta> = {};\n const edges: Record<string, EntityViewMeta> = {};\n const registry = getCustomElements();\n\n // --- nodes ---\n for (const [entityType, config] of Object.entries(input.nodes ?? {})) {\n const viewMetas: ViewMeta[] = [];\n for (const ViewClass of config.views) {\n const tagName = `fg-${sanitizeTagPart(entityType)}-${sanitizeTagPart(ViewClass.viewName)}`;\n viewMetas.push({\n tagName,\n viewName: ViewClass.viewName,\n description: ViewClass.description,\n });\n if (registry && !registry.get(tagName)) {\n registry.define(tagName, resilientView(ViewClass, tagName));\n }\n }\n nodes[entityType] = {\n views: viewMetas,\n sampleData: config.sampleData,\n };\n }\n\n // --- edges ---\n for (const [axbType, config] of Object.entries(input.edges ?? {})) {\n const viewMetas: ViewMeta[] = [];\n for (const ViewClass of config.views) {\n const tagName = `fg-edge-${sanitizeTagPart(axbType)}-${sanitizeTagPart(ViewClass.viewName)}`;\n viewMetas.push({\n tagName,\n viewName: ViewClass.viewName,\n description: ViewClass.description,\n });\n if (registry && !registry.get(tagName)) {\n registry.define(tagName, resilientView(ViewClass, tagName));\n }\n }\n edges[axbType] = {\n views: viewMetas,\n sampleData: config.sampleData,\n };\n }\n\n return { nodes, edges };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiBO,SAAS,cAAc,OAAyB;AACrD,MAAI,UAAU,QAAQ,OAAO,UAAU,SAAU,QAAO;AACxD,QAAM,MAAO,MAAkC,iBAAiB;AAChE,SAAO,OAAO,QAAQ,YAAY,YAAY,IAAI,GAAG;AACvD;AArBA,IAYa,mBAEP;AAdN;AAAA;AAAA;AAYO,IAAM,oBAAoB;AAEjC,IAAM,cAAc,oBAAI,IAAI,CAAC,aAAa,YAAY,eAAe,mBAAmB,CAAC;AAAA;AAAA;;;ACdzF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAmCA,SAAS,YAAY,OAAoC;AACvD,SAAO,iBAAiB;AAC1B;AAEA,SAAS,WAAW,OAAmC;AACrD,SAAO,iBAAiB;AAC1B;AAEA,SAAS,oBAAoB,OAA4C;AAEvE,MAAI,UAAU,QAAQ,OAAO,UAAU,SAAU,QAAO;AACxD,QAAM,IAAI;AACV,SACE,OAAO,EAAE,SAAS,YAClB,EAAE,cAAc,UAChB,OAAO,EAAE,OAAO,YAChB,EAAE,aAAa,SAAS;AAE5B;AAEA,SAAS,cAAc,OAAyB;AAC9C,MAAI,UAAU,QAAQ,OAAO,UAAU,SAAU,QAAO;AACxD,QAAM,IAAI;AACV,SACE,EAAE,aAAa,SAAS,iBAAiB,MAAM,QAAS,EAA8B,OAAO;AAEjG;AAYO,SAAS,wBAAwB,MAAwD;AAC9F,SAAO,eAAe,IAAI;AAC5B;AAEA,SAAS,eAAe,OAAyB;AAE/C,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,OAAO,UAAU,SAAU,QAAO;AAGtC,MAAI,YAAY,KAAK,GAAG;AACtB,WAAO;AAAA,MACL,CAAC,iBAAiB,GAAG;AAAA,MACrB,SAAS,MAAM;AAAA,MACf,aAAa,MAAM;AAAA,IACrB;AAAA,EACF;AACA,MAAI,WAAW,KAAK,GAAG;AACrB,WAAO;AAAA,MACL,CAAC,iBAAiB,GAAG;AAAA,MACrB,UAAU,MAAM;AAAA,MAChB,WAAW,MAAM;AAAA,IACnB;AAAA,EACF;AACA,MAAI,oBAAoB,KAAK,GAAG;AAC9B,WAAO,EAAE,CAAC,iBAAiB,GAAG,qBAAqB,MAAO,MAA4B,KAAK;AAAA,EAC7F;AACA,MAAI,cAAc,KAAK,GAAG;AAExB,UAAM,IAAI;AACV,UAAM,SACJ,OAAO,EAAE,YAAY,aAAc,EAAE,QAA2B,IAAK,EAAE;AACzE,WAAO,EAAE,CAAC,iBAAiB,GAAG,eAAe,QAAQ,CAAC,GAAG,MAAM,EAAE;AAAA,EACnE;AAGA,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,WAAO,MAAM,IAAI,cAAc;AAAA,EACjC;AAGA,QAAM,SAAkC,CAAC;AACzC,aAAW,OAAO,OAAO,KAAK,KAAgC,GAAG;AAC/D,WAAO,GAAG,IAAI,eAAgB,MAAkC,GAAG,CAAC;AAAA,EACtE;AACA,SAAO;AACT;AAgBO,SAAS,0BACd,MACA,IACyB;AACzB,SAAO,iBAAiB,MAAM,EAAE;AAClC;AAEA,SAAS,iBAAiB,OAAgB,IAAyB;AACjE,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,OAAO,UAAU,SAAU,QAAO;AAMtC,MACE,YAAY,KAAK,KACjB,WAAW,KAAK,KAChB,oBAAoB,KAAK,KACzB,cAAc,KAAK,GACnB;AACA,WAAO;AAAA,EACT;AAGA,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,WAAO,MAAM,IAAI,CAAC,MAAM,iBAAiB,GAAG,EAAE,CAAC;AAAA,EACjD;AAEA,QAAM,MAAM;AAGZ,MAAI,cAAc,GAAG,GAAG;AACtB,UAAM,MAAM,IAAI,iBAAiB;AAEjC,YAAQ,KAAK;AAAA,MACX,KAAK;AAEH,YAAI,OAAO,IAAI,YAAY,YAAY,OAAO,IAAI,gBAAgB,SAAU,QAAO;AACnF,eAAO,IAAI,2BAAU,IAAI,SAAS,IAAI,WAAW;AAAA,MAEnD,KAAK;AACH,YAAI,OAAO,IAAI,aAAa,YAAY,OAAO,IAAI,cAAc,SAAU,QAAO;AAClF,eAAO,IAAI,0BAAS,IAAI,UAAU,IAAI,SAAS;AAAA,MAEjD,KAAK;AACH,YAAI,CAAC,MAAM,QAAQ,IAAI,MAAM,EAAG,QAAO;AACvC,eAAO,4BAAW,OAAO,IAAI,MAAkB;AAAA,MAEjD,KAAK;AACH,YAAI,OAAO,IAAI,SAAS,SAAU,QAAO;AACzC,YAAI,IAAI;AACN,iBAAO,GAAG,IAAI,IAAI,IAAI;AAAA,QACxB;AAEA,YAAI,CAAC,eAAe;AAClB,0BAAgB;AAChB,kBAAQ;AAAA,YACN;AAAA,UAGF;AAAA,QACF;AACA,eAAO;AAAA,MAET;AAEE,eAAO;AAAA,IACX;AAAA,EACF;AAGA,QAAM,SAAkC,CAAC;AACzC,aAAW,OAAO,OAAO,KAAK,GAAG,GAAG;AAClC,WAAO,GAAG,IAAI,iBAAiB,IAAI,GAAG,GAAG,EAAE;AAAA,EAC7C;AACA,SAAO;AACT;AApNA,IAcA,kBAeI;AA7BJ;AAAA;AAAA;AAcA,uBAAgD;AAWhD;AACA;AAGA,IAAI,gBAAgB;AAAA;AAAA;;;AC7BpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,yBAA2B;;;ACApB,IAAM,gBAAgB;AAOtB,IAAM,sBAAsB;AAO5B,IAAM,iBAAiB,oBAAI,IAAI;AAAA,EACpC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAGM,IAAM,kBAAkB;;;ADrBxB,SAAS,iBAAiB,KAAqB;AACpD,SAAO;AACT;AAEO,SAAS,iBAAiB,MAAc,SAAiB,MAAsB;AACpF,QAAM,YAAY,GAAG,IAAI,GAAG,eAAe,GAAG,OAAO,GAAG,eAAe,GAAG,IAAI;AAC9E,QAAM,WAAO,+BAAW,QAAQ,EAAE,OAAO,SAAS,EAAE,OAAO,KAAK;AAChE,QAAM,QAAQ,KAAK,CAAC;AACpB,SAAO,GAAG,KAAK,GAAG,eAAe,GAAG,IAAI,GAAG,eAAe,GAAG,OAAO,GAAG,eAAe,GAAG,IAAI;AAC/F;;;AEeA;AAaO,IAAM,eAA8B,uBAAO,IAAI,uBAAuB;AAatE,SAAS,cAA8B;AAC5C,SAAO;AACT;AAGO,SAAS,iBAAiB,OAAyC;AACxE,SAAO,UAAU;AACnB;AAMA,IAAM,0BAA0B,oBAAI,IAAI;AAAA,EACtC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AASM,SAAS,gBAAgB,OAAyB;AACvD,MAAI,UAAU,KAAM,QAAO;AAC3B,QAAM,IAAI,OAAO;AACjB,MAAI,MAAM,SAAU,QAAO;AAC3B,MAAI,MAAM,QAAQ,KAAK,EAAG,QAAO;AAGjC,MAAI,cAAc,KAAK,EAAG,QAAO;AACjC,QAAM,QAAQ,OAAO,eAAe,KAAK;AACzC,MAAI,UAAU,QAAQ,UAAU,OAAO,UAAW,QAAO;AAEzD,QAAM,OAAQ,MAA8C;AAC5D,MAAI,QAAQ,OAAO,KAAK,SAAS,YAAY,wBAAwB,IAAI,KAAK,IAAI,EAAG,QAAO;AAG5F,SAAO;AACT;AAkCA,IAAM,cAAc;AAgDb,SAAS,wBAAwB,MAAe,aAA2B;AAChF,yBAAuB,MAAM,CAAC,GAAG,EAAE,MAAM,OAAO,GAAG,CAAC,EAAE,KAAK,MAAM;AAC/D,UAAM,QAAQ,KAAK,WAAW,IAAI,WAAW,KAAK,IAAI,CAAC,MAAM,KAAK,UAAU,CAAC,CAAC,EAAE,KAAK,KAAK;AAC1F,UAAM,IAAI;AAAA,MACR,cAAc,WAAW,iDAAiD,KAAK;AAAA,IAIjF;AAAA,EACF,CAAC;AACH;AAIA,SAAS,uBACP,MACA,MACA,QACA,OACM;AACN,MAAI,SAAS,QAAQ,SAAS,OAAW;AACzC,MAAI,iBAAiB,IAAI,GAAG;AAC1B,UAAM,EAAE,MAAM,OAAO,CAAC;AACtB;AAAA,EACF;AACA,MAAI,OAAO,SAAS,SAAU;AAC9B,MAAI,cAAc,IAAI,EAAG;AACzB,MAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,6BAAuB,KAAK,CAAC,GAAG,CAAC,GAAG,MAAM,OAAO,CAAC,CAAC,GAAG,EAAE,MAAM,SAAS,OAAO,EAAE,GAAG,KAAK;AAAA,IAC1F;AACA;AAAA,EACF;AACA,QAAM,QAAQ,OAAO,eAAe,IAAI;AACxC,MAAI,UAAU,QAAQ,UAAU,OAAO,UAAW;AAClD,QAAM,MAAM;AACZ,aAAW,OAAO,OAAO,KAAK,GAAG,GAAG;AAClC,2BAAuB,IAAI,GAAG,GAAG,CAAC,GAAG,MAAM,GAAG,GAAG,EAAE,MAAM,SAAS,GAAG,KAAK;AAAA,EAC5E;AACF;AAGO,SAAS,eAAe,MAA+B;AAC5D,aAAW,OAAO,MAAM;AACtB,QAAI,CAAC,YAAY,KAAK,GAAG,GAAG;AAC1B,YAAM,IAAI;AAAA,QACR,gCAAgC,KAAK,UAAU,GAAG,CAAC,YAAY,KAC5D,IAAI,CAAC,MAAM,KAAK,UAAU,CAAC,CAAC,EAC5B,KAAK,KAAK,CAAC;AAAA,MAGhB;AAAA,IACF;AAAA,EACF;AACF;AA2BO,SAAS,aAAa,MAA6C;AACxE,QAAM,MAAoB,CAAC;AAC3B,OAAK,MAAM,CAAC,GAAG,GAAG;AAClB,SAAO;AACT;AAEA,SAAS,oCACP,KACA,WACM;AACN,yBAAuB,KAAK,WAAW,EAAE,MAAM,OAAO,GAAG,CAAC,EAAE,OAAO,MAAM;AACvE,UAAM,eACJ,UAAU,WAAW,IAAI,WAAW,UAAU,IAAI,CAAC,MAAM,KAAK,UAAU,CAAC,CAAC,EAAE,KAAK,KAAK;AACxF,QAAI,OAAO,SAAS,SAAS;AAC3B,YAAM,IAAI;AAAA,QACR,8CAA8C,OAAO,KAAK,4BAChD,YAAY;AAAA,MAIxB;AAAA,IACF;AACA,UAAM,IAAI;AAAA,MACR,qEACU,YAAY;AAAA,IAGxB;AAAA,EACF,CAAC;AACH;AAEA,SAAS,KAAK,MAAe,MAAgB,KAAyB;AAGpE,MAAI,SAAS,OAAW;AACxB,MAAI,iBAAiB,IAAI,GAAG;AAC1B,QAAI,KAAK,WAAW,GAAG;AACrB,YAAM,IAAI,MAAM,+DAA+D;AAAA,IACjF;AACA,mBAAe,IAAI;AACnB,QAAI,KAAK,EAAE,MAAM,CAAC,GAAG,IAAI,GAAG,OAAO,QAAW,QAAQ,KAAK,CAAC;AAC5D;AAAA,EACF;AACA,MAAI,gBAAgB,IAAI,GAAG;AACzB,QAAI,KAAK,WAAW,GAAG;AAGrB,YAAM,IAAI;AAAA,QACR,4DACG,SAAS,OAAO,SAAS,MAAM,QAAQ,IAAI,IAAI,UAAU,OAAO,QACjE;AAAA,MACJ;AAAA,IACF;AAMA,QAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,0CAAoC,MAAM,IAAI;AAAA,IAChD;AACA,mBAAe,IAAI;AACnB,QAAI,KAAK,EAAE,MAAM,CAAC,GAAG,IAAI,GAAG,OAAO,MAAM,QAAQ,MAAM,CAAC;AACxD;AAAA,EACF;AAEA,QAAM,MAAM;AACZ,QAAM,OAAO,OAAO,KAAK,GAAG;AAC5B,MAAI,KAAK,WAAW,GAAG;AAIrB,QAAI,KAAK,SAAS,GAAG;AACnB,qBAAe,IAAI;AACnB,UAAI,KAAK,EAAE,MAAM,CAAC,GAAG,IAAI,GAAG,OAAO,CAAC,GAAG,QAAQ,MAAM,CAAC;AAAA,IACxD;AACA;AAAA,EACF;AACA,aAAW,OAAO,MAAM;AACtB,QAAI,QAAQ,mBAAmB;AAC7B,YAAM,QAAQ,KAAK,WAAW,IAAI,WAAW,KAAK,IAAI,CAAC,MAAM,KAAK,UAAU,CAAC,CAAC,EAAE,KAAK,KAAK;AAC1F,YAAM,IAAI;AAAA,QACR,kDAAkD,iBAAiB,aAC9D,KAAK;AAAA,MAGZ;AAAA,IACF;AACA,SAAK,IAAI,GAAG,GAAG,CAAC,GAAG,MAAM,GAAG,GAAG,GAAG;AAAA,EACpC;AACF;;;AC9VA,SAAS,wBACP,OACA,KACA,MACgB;AAChB,SAAO,EAAE,OAAO,MAAM,KAAK,SAAS,eAAe,OAAO,OAAO,MAAM,KAAK,KAAK;AACnF;AAEA,SAAS,wBACP,OACA,MACA,SACA,OACA,MACA,MACgB;AAChB,SAAO,EAAE,OAAO,MAAM,SAAS,OAAO,MAAM,KAAK;AACnD;AAEO,IAAM,iBAAN,MAA2C;AAAA,EAChD,YACmB,SACA,UACA,YAAoB,IACrC;AAHiB;AACA;AACA;AAAA,EAChB;AAAA,EAEH,MAAM,QAAQ,OAAe,KAAa,MAA8C;AACtF,SAAK,UAAU,OAAO,KAAK,MAAM,OAAO;AAAA,EAC1C;AAAA,EAEA,MAAM,QACJ,OACA,MACA,SACA,OACA,MACA,MACe;AACf,SAAK,UAAU,OAAO,MAAM,SAAS,OAAO,MAAM,MAAM,OAAO;AAAA,EACjE;AAAA,EAEA,MAAM,YAAY,OAAe,KAAa,MAA8C;AAC1F,SAAK,UAAU,OAAO,KAAK,MAAM,SAAS;AAAA,EAC5C;AAAA,EAEA,MAAM,YACJ,OACA,MACA,SACA,OACA,MACA,MACe;AACf,SAAK,UAAU,OAAO,MAAM,SAAS,OAAO,MAAM,MAAM,SAAS;AAAA,EACnE;AAAA,EAEQ,UACN,OACA,KACA,MACA,MACM;AACN,4BAAwB,MAAM,SAAS,YAAY,gBAAgB,SAAS;AAC5E,QAAI,KAAK,UAAU;AACjB,WAAK,SAAS,SAAS,OAAO,eAAe,OAAO,MAAM,KAAK,SAAS;AAAA,IAC1E;AACA,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,SAAS,wBAAwB,OAAO,KAAK,IAAI;AACvD,QAAI,KAAK,UAAU;AACjB,YAAM,QAAQ,KAAK,SAAS,OAAO,OAAO,eAAe,KAAK;AAC9D,UAAI,OAAO,iBAAiB,MAAM,gBAAgB,GAAG;AACnD,eAAO,IAAI,MAAM;AAAA,MACnB;AAAA,IACF;AACA,SAAK,QAAQ,OAAO,OAAO,QAAQ,IAAI;AAAA,EACzC;AAAA,EAEQ,UACN,OACA,MACA,SACA,OACA,MACA,MACA,MACM;AACN,4BAAwB,MAAM,SAAS,YAAY,gBAAgB,SAAS;AAC5E,QAAI,KAAK,UAAU;AACjB,WAAK,SAAS,SAAS,OAAO,SAAS,OAAO,MAAM,KAAK,SAAS;AAAA,IACpE;AACA,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,SAAS,wBAAwB,OAAO,MAAM,SAAS,OAAO,MAAM,IAAI;AAC9E,QAAI,KAAK,UAAU;AACjB,YAAM,QAAQ,KAAK,SAAS,OAAO,OAAO,SAAS,KAAK;AACxD,UAAI,OAAO,iBAAiB,MAAM,gBAAgB,GAAG;AACnD,eAAO,IAAI,MAAM;AAAA,MACnB;AAAA,IACF;AACA,SAAK,QAAQ,OAAO,OAAO,QAAQ,IAAI;AAAA,EACzC;AAAA,EAEA,MAAM,WAAW,KAAa,MAA8C;AAC1E,UAAM,QAAQ,iBAAiB,GAAG;AAClC,SAAK,QAAQ,UAAU,OAAO,EAAE,SAAS,aAAa,IAAI,EAAE,CAAC;AAAA,EAC/D;AAAA,EAEA,MAAM,WACJ,MACA,SACA,MACA,MACe;AACf,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,SAAK,QAAQ,UAAU,OAAO,EAAE,SAAS,aAAa,IAAI,EAAE,CAAC;AAAA,EAC/D;AAAA,EAEA,MAAM,WAAW,KAA4B;AAC3C,UAAM,QAAQ,iBAAiB,GAAG;AAClC,SAAK,QAAQ,UAAU,KAAK;AAAA,EAC9B;AAAA,EAEA,MAAM,WAAW,MAAc,SAAiB,MAA6B;AAC3E,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,SAAK,QAAQ,UAAU,KAAK;AAAA,EAC9B;AAAA,EAEA,MAAM,SAAwB;AAC5B,UAAM,KAAK,QAAQ,OAAO;AAAA,EAC5B;AACF;;;ACvIA,IAAAA,sBAA2B;;;ACApB,IAAM,iBAAN,cAA6B,MAAM;AAAA,EACxC,YACE,SACgB,MAChB;AACA,UAAM,OAAO;AAFG;AAGhB,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,oBAAN,cAAgC,eAAe;AAAA,EACpD,YAAY,KAAa;AACvB,UAAM,mBAAmB,GAAG,IAAI,gBAAgB;AAChD,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,oBAAN,cAAgC,eAAe;AAAA,EACpD,YAAY,MAAc,SAAiB,MAAc;AACvD,UAAM,mBAAmB,IAAI,MAAM,OAAO,OAAO,IAAI,IAAI,gBAAgB;AACzE,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,kBAAN,cAA8B,eAAe;AAAA,EAClD,YACE,SACgB,SAChB;AACA,UAAM,SAAS,kBAAkB;AAFjB;AAGhB,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,yBAAN,cAAqC,eAAe;AAAA,EACzD,YAAY,OAAe,SAAiB,OAAe;AACzD,UAAM,yBAAyB,KAAK,OAAO,OAAO,QAAQ,KAAK,KAAK,oBAAoB;AACxF,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,oBAAN,cAAgC,eAAe;AAAA,EACpD,YAAY,SAAiB;AAC3B,UAAM,SAAS,eAAe;AAC9B,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,iBAAN,cAA6B,eAAe;AAAA,EACjD,YAAY,SAAiB;AAC3B,UAAM,SAAS,iBAAiB;AAChC,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,uBAAN,cAAmC,eAAe;AAAA,EACvD,YAAY,SAAiB;AAC3B,UAAM,SAAS,wBAAwB;AACvC,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,mBAAN,cAA+B,eAAe;AAAA,EACnD,YAAY,SAAiB;AAC3B,UAAM,SAAS,cAAc;AAC7B,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,qBAAN,cAAiC,eAAe;AAAA,EACrD,YACE,OACA,SACA,OACA,WACA,WACA;AACA;AAAA,MACE,SAAS,KAAK,OAAO,OAAO,QAAQ,KAAK,8BAA8B,aAAa,MAAM,mBACxE,UAAU,KAAK,IAAI,CAAC;AAAA,MACtC;AAAA,IACF;AACA,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,iBAAN,cAA6B,eAAe;AAAA,EACjD,YAAY,SAAiB;AAC3B,UAAM,SAAS,iBAAiB;AAChC,SAAK,OAAO;AAAA,EACd;AACF;AAkBO,IAAM,+BAAN,cAA2C,eAAe;AAAA,EAC/D,YAAY,SAAiB;AAC3B,UAAM,SAAS,2BAA2B;AAC1C,SAAK,OAAO;AAAA,EACd;AACF;AAeO,IAAM,8BAAN,cAA0C,eAAe;AAAA,EAC9D,YACkB,YAChB,oBACA;AACA;AAAA,MACE,eAAe,UAAU,yBAAyB,kBAAkB;AAAA,MACpE;AAAA,IACF;AANgB;AAOhB,SAAK,OAAO;AAAA,EACd;AACF;;;AC5HA,yBAAwD;AA6BxD,IAAM,sBAAsB;AAgCrB,SAAS,cAAc,QAAgB,OAAyC;AAOrF,QAAM,YAAY,IAAI,6BAAU,QAAkB,WAAW,KAAK;AAClE,SAAO,CAAC,SAAkB;AACxB,UAAM,SAAS,UAAU,SAAS,IAAI;AACtC,QAAI,CAAC,OAAO,OAAO;AACjB,YAAM,QAAQ,OAAO,OAAO;AAC5B,YAAM,OAAO,OAAO,OAAO,MAAM,GAAG,mBAAmB,EAAE,IAAI,WAAW,EAAE,KAAK,IAAI;AACnF,YAAM,WAAW,QAAQ,sBAAsB,MAAM,QAAQ,mBAAmB,WAAW;AAC3F,YAAM,IAAI;AAAA,QACR,yBAAyB,QAAQ,UAAU,QAAQ,EAAE,KAAK,IAAI,GAAG,QAAQ;AAAA,QACzE,OAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AACF;AAWA,SAAS,YAAY,KAAyB;AAC5C,QAAM,OAAO,IAAI,iBAAiB,QAAQ,MAAM,EAAE,KAAK;AACvD,QAAM,UAAU,IAAI,UAAU,IAAI,IAAI,OAAO,OAAO;AACpD,QAAM,SAAS,IAAI,QAAQ,KAAK,OAAO,GAAG,IAAI,KAAK,KAAK;AACxD,SAAO,GAAG,IAAI,GAAG,MAAM;AACzB;AAUO,SAAS,sBAAsB,QAA0B;AAC9D,MAAI,CAAC,UAAU,OAAO,SAAS,YAAY,CAAC,OAAO,WAAY,QAAO,CAAC;AAEvE,QAAM,cAAc,IAAI,IAAY,MAAM,QAAQ,OAAO,QAAQ,IAAI,OAAO,WAAW,CAAC,CAAC;AAEzF,SAAO,OAAO,QAAQ,OAAO,UAAU,EAAE;AAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MACvD,oBAAoB,MAAM,MAAa,YAAY,IAAI,IAAI,CAAC;AAAA,EAC9D;AACF;AAKA,SAAS,oBAAoB,MAAc,MAAW,UAA8B;AAClF,MAAI,CAAC,KAAM,QAAO,EAAE,MAAM,MAAM,WAAW,SAAS;AAGpD,MAAI,MAAM,QAAQ,KAAK,IAAI,GAAG;AAC5B,WAAO;AAAA,MACL;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA,YAAY,KAAK;AAAA,MACjB,aAAa,KAAK;AAAA,IACpB;AAAA,EACF;AAGA,MAAI,MAAM,QAAQ,KAAK,KAAK,KAAK,MAAM,QAAQ,KAAK,KAAK,GAAG;AAC1D,UAAM,WAAY,KAAK,SAAS,KAAK;AACrC,UAAM,UAAU,SAAS,OAAO,CAAC,MAAW,EAAE,SAAS,MAAM;AAC7D,QAAI,QAAQ,WAAW,GAAG;AAExB,aAAO,oBAAoB,MAAM,QAAQ,CAAC,GAAG,KAAK;AAAA,IACpD;AACA,WAAO,EAAE,MAAM,MAAM,WAAW,UAAU,aAAa,KAAK,YAAY;AAAA,EAC1E;AAEA,QAAM,OAAO,KAAK;AAElB,MAAI,SAAS,UAAU;AACrB,WAAO;AAAA,MACL;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA,WAAW,KAAK;AAAA,MAChB,WAAW,KAAK;AAAA,MAChB,SAAS,KAAK;AAAA,MACd,aAAa,KAAK;AAAA,IACpB;AAAA,EACF;AAEA,MAAI,SAAS,YAAY,SAAS,WAAW;AAC3C,WAAO;AAAA,MACL;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA,KAAK,KAAK;AAAA,MACV,KAAK,KAAK;AAAA,MACV,OAAO,SAAS,YAAY,OAAO;AAAA,MACnC,aAAa,KAAK;AAAA,IACpB;AAAA,EACF;AAEA,MAAI,SAAS,WAAW;AACtB,WAAO,EAAE,MAAM,MAAM,WAAW,UAAU,aAAa,KAAK,YAAY;AAAA,EAC1E;AAEA,MAAI,SAAS,SAAS;AACpB,UAAM,WAAW,KAAK,QAAQ,oBAAoB,QAAQ,KAAK,OAAO,IAAI,IAAI;AAC9E,WAAO;AAAA,MACL;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,aAAa,KAAK;AAAA,IACpB;AAAA,EACF;AAEA,MAAI,SAAS,UAAU;AACrB,WAAO;AAAA,MACL;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA,QAAQ,sBAAsB,IAAI;AAAA,MAClC,aAAa,KAAK;AAAA,IACpB;AAAA,EACF;AAEA,SAAO,EAAE,MAAM,MAAM,WAAW,UAAU,aAAa,KAAK,YAAY;AAC1E;;;ACrLA,eAAsB,oBACpB,MACA,gBACA,eACA,YACkC;AAClC,QAAM,SAAS,CAAC,GAAG,UAAU,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE,WAAW;AAC3E,MAAI,SAAS,EAAE,GAAG,KAAK;AACvB,MAAI,UAAU;AAEd,aAAW,QAAQ,QAAQ;AACzB,QAAI,KAAK,gBAAgB,SAAS;AAChC,UAAI;AACF,iBAAS,MAAM,KAAK,GAAG,MAAM;AAAA,MAC/B,SAAS,KAAc;AACrB,YAAI,eAAe,eAAgB,OAAM;AACzC,cAAM,IAAI;AAAA,UACR,mBAAmB,KAAK,WAAW,QAAQ,KAAK,SAAS,YAAa,IAAc,OAAO;AAAA,QAC7F;AAAA,MACF;AACA,UAAI,CAAC,UAAU,OAAO,WAAW,UAAU;AACzC,cAAM,IAAI;AAAA,UACR,mBAAmB,KAAK,WAAW,QAAQ,KAAK,SAAS;AAAA,QAC3D;AAAA,MACF;AACA,gBAAU,KAAK;AAAA,IACjB;AAAA,EACF;AAEA,MAAI,YAAY,eAAe;AAC7B,UAAM,IAAI;AAAA,MACR,wCAAwC,OAAO,mBAAmB,aAAa;AAAA,IACjF;AAAA,EACF;AAEA,SAAO;AACT;AAUO,SAAS,uBAAuB,YAA6B,OAAqB;AACvF,MAAI,WAAW,WAAW,EAAG;AAG7B,QAAM,OAAO,oBAAI,IAAY;AAC7B,aAAW,QAAQ,YAAY;AAC7B,QAAI,KAAK,aAAa,KAAK,aAAa;AACtC,YAAM,IAAI;AAAA,QACR,GAAG,KAAK,mCAAmC,KAAK,SAAS,qBAAqB,KAAK,WAAW;AAAA,MAChG;AAAA,IACF;AACA,QAAI,KAAK,IAAI,KAAK,WAAW,GAAG;AAC9B,YAAM,IAAI;AAAA,QACR,GAAG,KAAK,8CAA8C,KAAK,WAAW;AAAA,MACxE;AAAA,IACF;AACA,SAAK,IAAI,KAAK,WAAW;AAAA,EAC3B;AAEA,QAAM,SAAS,CAAC,GAAG,UAAU,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE,WAAW;AAC3E,QAAM,gBAAgB,KAAK,IAAI,GAAG,WAAW,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;AACpE,MAAI,UAAU;AAEd,aAAW,QAAQ,QAAQ;AACzB,QAAI,KAAK,gBAAgB,SAAS;AAChC,gBAAU,KAAK;AAAA,IACjB,WAAW,KAAK,cAAc,SAAS;AACrC,YAAM,IAAI;AAAA,QACR,GAAG,KAAK,sDAAiD,OAAO,YAAO,KAAK,WAAW;AAAA,MACzF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,YAAY,eAAe;AAC7B,UAAM,IAAI;AAAA,MACR,GAAG,KAAK,qCAAqC,aAAa,eAAe,OAAO;AAAA,IAClF;AAAA,EACF;AACF;AAQA,eAAsB,cACpB,QACA,UACA,kBAAsC,OACZ;AAC1B,QAAM,QAAQ,SAAS,OAAO,OAAO,OAAO,OAAO,SAAS,OAAO,KAAK;AAExE,MAAI,CAAC,OAAO,YAAY,UAAU,CAAC,MAAM,eAAe;AACtD,WAAO,EAAE,QAAQ,UAAU,OAAO,WAAW,MAAM;AAAA,EACrD;AAEA,QAAM,iBAAiB,OAAO,KAAK;AAEnC,MAAI,kBAAkB,MAAM,eAAe;AACzC,WAAO,EAAE,QAAQ,UAAU,OAAO,WAAW,MAAM;AAAA,EACrD;AAEA,QAAM,eAAe,MAAM;AAAA,IACzB,OAAO;AAAA,IACP;AAAA,IACA,MAAM;AAAA,IACN,MAAM;AAAA,EACR;AAGA,QAAM,YAAY,MAAM,sBAAsB,mBAAmB;AAEjE,SAAO;AAAA,IACL,QAAQ,EAAE,GAAG,QAAQ,MAAM,cAAc,GAAG,MAAM,cAAc;AAAA,IAChE,UAAU;AAAA,IACV;AAAA,EACF;AACF;AAOA,eAAsB,eACpB,SACA,UACA,kBAAsC,OACV;AAC5B,SAAO,QAAQ,IAAI,QAAQ,IAAI,CAAC,MAAM,cAAc,GAAG,UAAU,eAAe,CAAC,CAAC;AACpF;;;ACnJO,SAAS,WAAW,WAAmB,SAA0B;AAEtE,MAAI,YAAY,OAAQ,QAAO,cAAc;AAG7C,MAAI,YAAY,KAAM,QAAO;AAE7B,QAAM,eAAe,cAAc,KAAK,CAAC,IAAI,UAAU,MAAM,GAAG;AAChE,QAAM,kBAAkB,QAAQ,MAAM,GAAG;AAEzC,SAAO,cAAc,cAAc,GAAG,iBAAiB,CAAC;AAC1D;AASO,SAAS,cAAc,WAAmB,UAA6B;AAC5E,MAAI,CAAC,YAAY,SAAS,WAAW,EAAG,QAAO;AAC/C,SAAO,SAAS,KAAK,CAAC,MAAM,WAAW,WAAW,CAAC,CAAC;AACtD;AAMA,SAAS,cAAc,MAAgB,IAAY,SAAmB,IAAqB;AAEzF,MAAI,OAAO,KAAK,UAAU,OAAO,QAAQ,OAAQ,QAAO;AAGxD,MAAI,OAAO,QAAQ,OAAQ,QAAO;AAElC,QAAM,MAAM,QAAQ,EAAE;AAEtB,MAAI,QAAQ,MAAM;AAEhB,QAAI,OAAO,QAAQ,SAAS,EAAG,QAAO;AAGtC,aAAS,OAAO,GAAG,QAAQ,KAAK,SAAS,IAAI,QAAQ;AACnD,UAAI,cAAc,MAAM,KAAK,MAAM,SAAS,KAAK,CAAC,EAAG,QAAO;AAAA,IAC9D;AACA,WAAO;AAAA,EACT;AAGA,MAAI,OAAO,KAAK,OAAQ,QAAO;AAE/B,MAAI,QAAQ,KAAK;AAEf,WAAO,cAAc,MAAM,KAAK,GAAG,SAAS,KAAK,CAAC;AAAA,EACpD;AAGA,MAAI,KAAK,EAAE,MAAM,KAAK;AACpB,WAAO,cAAc,MAAM,KAAK,GAAG,SAAS,KAAK,CAAC;AAAA,EACpD;AAEA,SAAO;AACT;;;AC9EA,SAAS,UAAU,OAAe,SAAiB,OAAuB;AACxE,SAAO,GAAG,KAAK,IAAI,OAAO,IAAI,KAAK;AACrC;AAEA,SAAS,aAAa,GAA0B;AAC9C,SAAO,UAAU,EAAE,OAAO,EAAE,SAAS,EAAE,KAAK;AAC9C;AAkBO,SAAS,eAAe,OAAyD;AACtF,QAAM,MAAM,oBAAI,IAA0E;AAE1F,MAAI;AAEJ,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,cAAU;AAAA,EACZ,OAAO;AACL,cAAU,mBAAmB,KAAK;AAAA,EACpC;AAEA,QAAM,YAA0C,OAAO,OAAO,CAAC,GAAG,OAAO,CAAC;AAE1E,aAAW,SAAS,SAAS;AAC3B,QAAI,MAAM,eAAe,MAAM,YAAY,SAAS,GAAG,GAAG;AACxD,YAAM,IAAI;AAAA,QACR,UAAU,MAAM,KAAK,OAAO,MAAM,OAAO,QAAQ,MAAM,KAAK,8BAA8B,MAAM,WAAW;AAAA,MAC7G;AAAA,IACF;AACA,QAAI,MAAM,YAAY,QAAQ;AAC5B,YAAM,QAAQ,UAAU,MAAM,KAAK,OAAO,MAAM,OAAO,QAAQ,MAAM,KAAK;AAC1E,6BAAuB,MAAM,YAAY,KAAK;AAE9C,YAAM,gBAAgB,KAAK,IAAI,GAAG,MAAM,WAAW,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC;AAAA,IAC5E,OAAO;AAEL,YAAM,gBAAgB;AAAA,IACxB;AACA,UAAM,MAAM,UAAU,MAAM,OAAO,MAAM,SAAS,MAAM,KAAK;AAC7D,UAAM,YAAY,MAAM,aACpB,cAAc,MAAM,YAAY,IAAI,MAAM,KAAK,OAAO,MAAM,OAAO,QAAQ,MAAM,KAAK,GAAG,IACzF;AACJ,QAAI,IAAI,KAAK,EAAE,OAAO,UAAU,UAAU,CAAC;AAAA,EAC7C;AAGA,QAAM,WAAW,oBAAI,IAA0C;AAC/D,QAAM,WAAW,oBAAI,IAA6B;AAClD,aAAW,SAAS,SAAS;AAC3B,UAAM,WAAW,SAAS,IAAI,MAAM,OAAO;AAC3C,QAAI,UAAU;AACZ,eAAS,KAAK,KAAK;AAAA,IACrB,OAAO;AACL,eAAS,IAAI,MAAM,SAAS,CAAC,KAAK,CAAC;AAAA,IACrC;AAAA,EACF;AACA,aAAW,CAAC,KAAK,GAAG,KAAK,UAAU;AACjC,aAAS,IAAI,KAAK,OAAO,OAAO,GAAG,CAAC;AAAA,EACtC;AAWA,QAAM,gBAAgB,oBAAI,IAA0C;AACpE,QAAM,gBAAgB,oBAAI,IAA6B;AACvD,QAAM,eAAe,oBAAI,IAAyB;AAClD,aAAW,SAAS,SAAS;AAC3B,QAAI,CAAC,MAAM,YAAa;AACxB,QAAI,OAAO,aAAa,IAAI,MAAM,KAAK;AACvC,QAAI,CAAC,MAAM;AACT,aAAO,oBAAI,IAAI;AACf,mBAAa,IAAI,MAAM,OAAO,IAAI;AAAA,IACpC;AACA,QAAI,KAAK,IAAI,MAAM,WAAW,EAAG;AACjC,SAAK,IAAI,MAAM,WAAW;AAC1B,UAAM,WAAW,cAAc,IAAI,MAAM,KAAK;AAC9C,QAAI,UAAU;AACZ,eAAS,KAAK,KAAK;AAAA,IACrB,OAAO;AACL,oBAAc,IAAI,MAAM,OAAO,CAAC,KAAK,CAAC;AAAA,IACxC;AAAA,EACF;AACA,aAAW,CAAC,KAAK,GAAG,KAAK,eAAe;AACtC,kBAAc,IAAI,KAAK,OAAO,OAAO,GAAG,CAAC;AAAA,EAC3C;AAEA,SAAO;AAAA,IACL,OAAO,OAAe,SAAiB,OAA0C;AAC/E,aAAO,IAAI,IAAI,UAAU,OAAO,SAAS,KAAK,CAAC,GAAG;AAAA,IACpD;AAAA,IAEA,gBAAgB,SAA+C;AAC7D,aAAO,SAAS,IAAI,OAAO,KAAK,CAAC;AAAA,IACnC;AAAA,IAEA,oBAAoB,OAA6C;AAC/D,aAAO,cAAc,IAAI,KAAK,KAAK,CAAC;AAAA,IACtC;AAAA,IAEA,SACE,OACA,SACA,OACA,MACA,WACM;AACN,YAAM,MAAM,IAAI,IAAI,UAAU,OAAO,SAAS,KAAK,CAAC;AAEpD,UAAI,CAAC,KAAK;AACR,cAAM,IAAI,uBAAuB,OAAO,SAAS,KAAK;AAAA,MACxD;AAGA,UAAI,cAAc,UAAa,IAAI,MAAM,aAAa,IAAI,MAAM,UAAU,SAAS,GAAG;AACpF,YAAI,CAAC,cAAc,WAAW,IAAI,MAAM,SAAS,GAAG;AAClD,gBAAM,IAAI,mBAAmB,OAAO,SAAS,OAAO,WAAW,IAAI,MAAM,SAAS;AAAA,QACpF;AAAA,MACF;AAEA,UAAI,IAAI,UAAU;AAChB,YAAI;AACF,cAAI,SAAS,IAAI;AAAA,QACnB,SAAS,KAAc;AACrB,cAAI,eAAe,gBAAiB,OAAM;AAC1C,gBAAM,IAAI;AAAA,YACR,+BAA+B,KAAK,OAAO,OAAO,QAAQ,KAAK;AAAA,YAC/D;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IAEA,UAAwC;AACtC,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAYO,SAAS,qBAAqB,MAAqB,WAAyC;AAEjG,QAAM,WAAW,IAAI,IAAI,KAAK,QAAQ,EAAE,IAAI,YAAY,CAAC;AAEzD,SAAO;AAAA,IACL,OAAO,OAAe,SAAiB,OAA0C;AAC/E,aAAO,KAAK,OAAO,OAAO,SAAS,KAAK,KAAK,UAAU,OAAO,OAAO,SAAS,KAAK;AAAA,IACrF;AAAA,IAEA,gBAAgB,SAA+C;AAC7D,YAAM,cAAc,KAAK,gBAAgB,OAAO;AAChD,YAAM,aAAa,UAAU,gBAAgB,OAAO;AACpD,UAAI,WAAW,WAAW,EAAG,QAAO;AACpC,UAAI,YAAY,WAAW,EAAG,QAAO;AAGrC,YAAM,OAAO,IAAI,IAAI,YAAY,IAAI,YAAY,CAAC;AAClD,YAAM,SAAS,CAAC,GAAG,WAAW;AAC9B,iBAAW,SAAS,YAAY;AAC9B,YAAI,CAAC,KAAK,IAAI,aAAa,KAAK,CAAC,GAAG;AAClC,iBAAO,KAAK,KAAK;AAAA,QACnB;AAAA,MACF;AACA,aAAO,OAAO,OAAO,MAAM;AAAA,IAC7B;AAAA,IAEA,oBAAoB,OAA6C;AAC/D,YAAM,cAAc,KAAK,oBAAoB,KAAK;AAClD,YAAM,aAAa,UAAU,oBAAoB,KAAK;AACtD,UAAI,WAAW,WAAW,EAAG,QAAO;AACpC,UAAI,YAAY,WAAW,EAAG,QAAO;AAMrC,YAAM,OAAO,IAAI,IAAI,YAAY,IAAI,CAAC,MAAM,EAAE,WAAW,CAAC;AAC1D,YAAM,SAAS,CAAC,GAAG,WAAW;AAC9B,iBAAW,SAAS,YAAY;AAC9B,YAAI,CAAC,KAAK,IAAI,MAAM,WAAW,GAAG;AAChC,eAAK,IAAI,MAAM,WAAW;AAC1B,iBAAO,KAAK,KAAK;AAAA,QACnB;AAAA,MACF;AACA,aAAO,OAAO,OAAO,MAAM;AAAA,IAC7B;AAAA,IAEA,SACE,OACA,SACA,OACA,MACA,WACM;AACN,UAAI,SAAS,IAAI,UAAU,OAAO,SAAS,KAAK,CAAC,GAAG;AAClD,eAAO,KAAK,SAAS,OAAO,SAAS,OAAO,MAAM,SAAS;AAAA,MAC7D;AAEA,aAAO,UAAU,SAAS,OAAO,SAAS,OAAO,MAAM,SAAS;AAAA,IAClE;AAAA,IAEA,UAAwC;AACtC,YAAM,aAAa,UAAU,QAAQ;AACrC,UAAI,WAAW,WAAW,EAAG,QAAO,KAAK,QAAQ;AAEjD,YAAM,SAAS,CAAC,GAAG,KAAK,QAAQ,CAAC;AACjC,iBAAW,SAAS,YAAY;AAC9B,YAAI,CAAC,SAAS,IAAI,aAAa,KAAK,CAAC,GAAG;AACtC,iBAAO,KAAK,KAAK;AAAA,QACnB;AAAA,MACF;AACA,aAAO,OAAO,OAAO,MAAM;AAAA,IAC7B;AAAA,EACF;AACF;AAOA,SAAS,mBAAmB,WAA6C;AACvE,QAAM,UAA2B,CAAC;AAGlC,aAAW,CAAC,MAAM,MAAM,KAAK,UAAU,OAAO;AAC5C,YAAQ,KAAK;AAAA,MACX,OAAO;AAAA,MACP,SAAS;AAAA,MACT,OAAO;AAAA,MACP,YAAY,OAAO;AAAA,MACnB,aAAa,OAAO;AAAA,MACpB,YAAY,OAAO;AAAA,MACnB,eAAe,OAAO;AAAA,MACtB,WAAW,OAAO;AAAA,MAClB,YAAY,OAAO;AAAA,MACnB,oBAAoB,OAAO;AAAA,MAC3B,SAAS,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAGA,aAAW,CAAC,SAAS,MAAM,KAAK,UAAU,OAAO;AAC/C,UAAM,WAAW,OAAO;AACxB,QAAI,CAAC,SAAU;AAEf,UAAM,YAAY,MAAM,QAAQ,SAAS,IAAI,IAAI,SAAS,OAAO,CAAC,SAAS,IAAI;AAC/E,UAAM,UAAU,MAAM,QAAQ,SAAS,EAAE,IAAI,SAAS,KAAK,CAAC,SAAS,EAAE;AAEvE,UAAM,sBAAsB,OAAO,eAAe,SAAS;AAC3D,QAAI,uBAAuB,oBAAoB,SAAS,GAAG,GAAG;AAC5D,YAAM,IAAI;AAAA,QACR,SAAS,OAAO,8BAA8B,mBAAmB;AAAA,MACnE;AAAA,IACF;AAEA,eAAW,SAAS,WAAW;AAC7B,iBAAW,SAAS,SAAS;AAC3B,gBAAQ,KAAK;AAAA,UACX;AAAA,UACA;AAAA,UACA;AAAA,UACA,YAAY,OAAO;AAAA,UACnB,aAAa,OAAO;AAAA,UACpB,cAAc,SAAS;AAAA,UACvB,YAAY,OAAO;AAAA,UACnB,eAAe,OAAO;AAAA,UACtB,WAAW,OAAO;AAAA,UAClB,aAAa;AAAA,UACb,YAAY,OAAO;AAAA,UACnB,oBAAoB,OAAO;AAAA,UAC3B,SAAS,OAAO;AAAA,QAClB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACxSA,IAAAC,sBAA2B;AAjB3B;AAmCA,IAAI,UAAyB;AAC7B,IAAI,aAAa;AACjB,IAAM,WAAW,oBAAI,IAMnB;AAUF,IAAM,gBAAgB;AAAA,EACpB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,EAAE,KAAK,IAAI;AAgBX,IAAI,cAAsF;AAE1F,eAAe,iBAA2D;AACxE,MAAI,YAAa,QAAO;AACxB,QAAM,KAAK,MAAM,OAAO,gBAAqB;AAC7C,gBAAc,GAAG;AACjB,SAAO;AACT;AAEA,eAAe,eAAgC;AAC7C,MAAI,QAAS,QAAO;AAEpB,QAAM,OAAO,MAAM,eAAe;AAClC,YAAU,IAAI,KAAK,eAAe;AAAA,IAChC,MAAM;AAAA,IACN,YAAY,EAAE,WAAW,YAAY,IAAI;AAAA,EAC3C,CAAC;AAGD,UAAQ,MAAM;AAEd,UAAQ,GAAG,WAAW,CAAC,QAAwB;AAC7C,QAAI,IAAI,OAAO,OAAW;AAC1B,UAAM,UAAU,SAAS,IAAI,IAAI,EAAE;AACnC,QAAI,CAAC,QAAS;AACd,aAAS,OAAO,IAAI,EAAE;AAEtB,QAAI,IAAI,SAAS,SAAS;AACxB,cAAQ,OAAO,IAAI,eAAe,IAAI,WAAW,uBAAuB,CAAC;AAAA,IAC3E,OAAO;AACL,cAAQ,QAAQ,GAAG;AAAA,IACrB;AAAA,EACF,CAAC;AAED,UAAQ,GAAG,SAAS,CAAC,QAAe;AAElC,eAAW,CAAC,EAAE,CAAC,KAAK,UAAU;AAC5B,QAAE,OAAO,IAAI,eAAe,yBAAyB,IAAI,OAAO,EAAE,CAAC;AAAA,IACrE;AACA,aAAS,MAAM;AACf,cAAU;AAAA,EACZ,CAAC;AAED,UAAQ,GAAG,QAAQ,CAAC,SAAiB;AAInC,QAAI,SAAS,OAAO,GAAG;AACrB,iBAAW,CAAC,EAAE,CAAC,KAAK,UAAU;AAC5B,UAAE,OAAO,IAAI,eAAe,mCAAmC,IAAI,EAAE,CAAC;AAAA,MACxE;AACA,eAAS,MAAM;AAAA,IACjB;AACA,cAAU;AAAA,EACZ,CAAC;AAED,SAAO;AACT;AAEA,eAAe,aAAa,KAAuD;AACjF,QAAM,SAAS,MAAM,aAAa;AAClC,MAAI,cAAc,OAAO,iBAAkB,cAAa;AACxD,QAAM,KAAK,EAAE;AACb,SAAO,IAAI,QAAwB,CAACC,UAAS,WAAW;AACtD,aAAS,IAAI,IAAI,EAAE,SAASA,UAAiC,OAAO,CAAC;AACrE,WAAO,YAAY,EAAE,GAAG,KAAK,GAAG,CAAC;AAAA,EACnC,CAAC;AACH;AAWA,IAAM,gBAAgB,oBAAI,QAAqD;AAE/E,SAAS,iBAAiB,UAAuD;AAC/E,MAAI,QAAQ,cAAc,IAAI,QAAQ;AACtC,MAAI,CAAC,OAAO;AACV,YAAQ,oBAAI,IAAI;AAChB,kBAAc,IAAI,UAAU,KAAK;AAAA,EACnC;AACA,SAAO;AACT;AAEA,SAAS,WAAW,QAAwB;AAC1C,aAAO,gCAAW,QAAQ,EAAE,OAAO,MAAM,EAAE,OAAO,KAAK;AACzD;AAQA,IAAI,uBAA0D;AAE9D,eAAe,oBAAyD;AACtE,MAAI,qBAAsB,QAAO;AACjC,yBAAuB,MAAM;AAC7B,SAAO;AACT;AAqBO,SAAS,gBAAgB,QAA6B;AAQ3D,UAAQ,OAAO,SAAkC;AAC/C,UAAM,EAAE,yBAAAC,0BAAyB,2BAAAC,2BAA0B,IAAI,MAAM,kBAAkB;AACvF,UAAM,WAAW,KAAK,UAAUD,yBAAwB,IAAI,CAAC;AAC7D,UAAM,WAAW,MAAM,aAAa,EAAE,MAAM,WAAW,QAAQ,SAAS,CAAC;AACzE,QAAI,SAAS,eAAe,UAAa,SAAS,eAAe,MAAM;AACrE,YAAM,IAAI,eAAe,kDAAkD;AAAA,IAC7E;AACA,QAAI;AACF,aAAOC,2BAA0B,KAAK,MAAM,SAAS,UAAU,CAAC;AAAA,IAClE,QAAQ;AACN,YAAM,IAAI,eAAe,kDAAkD;AAAA,IAC7E;AAAA,EACF;AACF;AAgBA,eAAsB,iBACpB,QACA,UACe;AACf,MAAI,YAAY,aAAa,iBAAiB;AAE5C,QAAI;AACF,eAAS,MAAM;AAAA,IACjB,SAAS,KAAc;AACrB,UAAI,eAAe,eAAgB,OAAM;AACzC,YAAM,IAAI,eAAe,uCAAwC,IAAc,OAAO,EAAE;AAAA,IAC1F;AACA;AAAA,EACF;AAGA,QAAM,aAAa,EAAE,MAAM,WAAW,OAAO,CAAC;AAChD;AAaO,SAAS,mBACd,QACA,WAA8B,iBACjB;AACb,QAAM,QAAQ,iBAAiB,QAAQ;AACvC,QAAM,MAAM,WAAW,MAAM;AAC7B,QAAM,SAAS,MAAM,IAAI,GAAG;AAC5B,MAAI,OAAQ,QAAO;AAEnB,MAAI;AACF,UAAM,KAAK,SAAS,MAAM;AAC1B,UAAM,IAAI,KAAK,EAAE;AACjB,WAAO;AAAA,EACT,SAAS,KAAc;AACrB,QAAI,eAAe,eAAgB,OAAM;AACzC,UAAM,IAAI,eAAe,uCAAwC,IAAc,OAAO,EAAE;AAAA,EAC1F;AACF;AASO,SAAS,kBACd,QACA,UACiB;AACjB,SAAO,OAAO,IAAI,CAAC,UAAU;AAAA,IAC3B,aAAa,KAAK;AAAA,IAClB,WAAW,KAAK;AAAA,IAChB,IAAI,mBAAmB,KAAK,IAAI,QAAQ;AAAA,EAC1C,EAAE;AACJ;AASA,eAAsB,uBAAsC;AAC1D,MAAI,CAAC,QAAS;AACd,QAAM,IAAI;AACV,YAAU;AAEV,aAAW,CAAC,EAAE,CAAC,KAAK,UAAU;AAC5B,MAAE,OAAO,IAAI,eAAe,2BAA2B,CAAC;AAAA,EAC1D;AACA,WAAS,MAAM;AACf,QAAM,EAAE,UAAU;AACpB;;;ANtZO,IAAM,iBAAiB;AAGvB,IAAM,iBAAiB;AAO9B,IAAM,+BAA+B;AAAA,EACnC,MAAM;AAAA,EACN,UAAU,CAAC,eAAe,aAAa,IAAI;AAAA,EAC3C,YAAY;AAAA,IACV,aAAa,EAAE,MAAM,WAAW,SAAS,EAAE;AAAA,IAC3C,WAAW,EAAE,MAAM,WAAW,SAAS,EAAE;AAAA,IACzC,IAAI,EAAE,MAAM,UAAU,WAAW,EAAE;AAAA,EACrC;AAAA,EACA,sBAAsB;AACxB;AAGO,IAAM,mBAA2B;AAAA,EACtC,MAAM;AAAA,EACN,UAAU,CAAC,QAAQ,YAAY;AAAA,EAC/B,YAAY;AAAA,IACV,MAAM,EAAE,MAAM,UAAU,WAAW,EAAE;AAAA,IACrC,YAAY,EAAE,MAAM,SAAS;AAAA,IAC7B,aAAa,EAAE,MAAM,SAAS;AAAA,IAC9B,YAAY,EAAE,MAAM,SAAS;AAAA,IAC7B,eAAe,EAAE,MAAM,SAAS;AAAA,IAChC,cAAc,EAAE,MAAM,SAAS;AAAA,IAC/B,SAAS,EAAE,MAAM,SAAS;AAAA,IAC1B,WAAW,EAAE,MAAM,SAAS,OAAO,EAAE,MAAM,UAAU,WAAW,EAAE,EAAE;AAAA,IACpE,eAAe,EAAE,MAAM,WAAW,SAAS,EAAE;AAAA,IAC7C,YAAY,EAAE,MAAM,SAAS,OAAO,6BAA6B;AAAA,IACjE,oBAAoB,EAAE,MAAM,UAAU,MAAM,CAAC,OAAO,SAAS,YAAY,EAAE;AAAA,EAC7E;AAAA,EACA,sBAAsB;AACxB;AAGO,IAAM,mBAA2B;AAAA,EACtC,MAAM;AAAA,EACN,UAAU,CAAC,QAAQ,QAAQ,IAAI;AAAA,EAC/B,YAAY;AAAA,IACV,MAAM,EAAE,MAAM,UAAU,WAAW,EAAE;AAAA,IACrC,MAAM;AAAA,MACJ,OAAO;AAAA,QACL,EAAE,MAAM,UAAU,WAAW,EAAE;AAAA,QAC/B,EAAE,MAAM,SAAS,OAAO,EAAE,MAAM,UAAU,WAAW,EAAE,GAAG,UAAU,EAAE;AAAA,MACxE;AAAA,IACF;AAAA,IACA,IAAI;AAAA,MACF,OAAO;AAAA,QACL,EAAE,MAAM,UAAU,WAAW,EAAE;AAAA,QAC/B,EAAE,MAAM,SAAS,OAAO,EAAE,MAAM,UAAU,WAAW,EAAE,GAAG,UAAU,EAAE;AAAA,MACxE;AAAA,IACF;AAAA,IACA,YAAY,EAAE,MAAM,SAAS;AAAA,IAC7B,cAAc,EAAE,MAAM,SAAS;AAAA,IAC/B,aAAa,EAAE,MAAM,SAAS;AAAA,IAC9B,YAAY,EAAE,MAAM,SAAS;AAAA,IAC7B,eAAe,EAAE,MAAM,SAAS;AAAA,IAChC,cAAc,EAAE,MAAM,SAAS;AAAA,IAC/B,SAAS,EAAE,MAAM,SAAS;AAAA,IAC1B,WAAW,EAAE,MAAM,SAAS,OAAO,EAAE,MAAM,UAAU,WAAW,EAAE,EAAE;AAAA,IACpE,aAAa,EAAE,MAAM,UAAU,WAAW,GAAG,SAAS,UAAU;AAAA,IAChE,eAAe,EAAE,MAAM,WAAW,SAAS,EAAE;AAAA,IAC7C,YAAY,EAAE,MAAM,SAAS,OAAO,6BAA6B;AAAA,IACjE,oBAAoB,EAAE,MAAM,UAAU,MAAM,CAAC,OAAO,SAAS,YAAY,EAAE;AAAA,EAC7E;AAAA,EACA,sBAAsB;AACxB;AAOO,IAAM,oBAA8C;AAAA,EACzD;AAAA,IACE,OAAO;AAAA,IACP,SAAS;AAAA,IACT,OAAO;AAAA,IACP,YAAY;AAAA,IACZ,aAAa;AAAA,EACf;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,SAAS;AAAA,IACT,OAAO;AAAA,IACP,YAAY;AAAA,IACZ,aAAa;AAAA,EACf;AACF;AAeA,IAAI,qBAA2C;AACxC,SAAS,0BAAyC;AACvD,MAAI,mBAAoB,QAAO;AAC/B,uBAAqB,eAAe,CAAC,GAAG,iBAAiB,CAAC;AAC1D,SAAO;AACT;AAaO,SAAS,yBAAyB,UAAkB,MAAsB;AAC/E,QAAM,WAAO,gCAAW,QAAQ,EAAE,OAAO,GAAG,QAAQ,IAAI,IAAI,EAAE,EAAE,OAAO,WAAW;AAClF,SAAO,KAAK,MAAM,GAAG,EAAE;AACzB;AAeA,eAAsB,wBACpB,QACA,UACwB;AACxB,QAAM,CAAC,WAAW,SAAS,IAAI,MAAM,QAAQ,IAAI;AAAA,IAC/C,OAAO,UAAU,EAAE,OAAO,eAAe,CAAC;AAAA,IAC1C,OAAO,UAAU,EAAE,OAAO,eAAe,CAAC;AAAA,EAC5C,CAAC;AAED,QAAM,UAA2B,CAAC,GAAG,iBAAiB;AAItD,QAAM,iBAAkC,CAAC;AACzC,aAAW,UAAU,WAAW;AAC9B,UAAM,OAAO,OAAO;AACpB,QAAI,KAAK,YAAY;AACnB,iBAAW,KAAK,KAAK,YAAY;AAC/B,uBAAe,KAAK,iBAAiB,EAAE,IAAI,QAAQ,CAAC;AAAA,MACtD;AAAA,IACF;AAAA,EACF;AACA,aAAW,UAAU,WAAW;AAC9B,UAAM,OAAO,OAAO;AACpB,QAAI,KAAK,YAAY;AACnB,iBAAW,KAAK,KAAK,YAAY;AAC/B,uBAAe,KAAK,iBAAiB,EAAE,IAAI,QAAQ,CAAC;AAAA,MACtD;AAAA,IACF;AAAA,EACF;AACA,QAAM,QAAQ,IAAI,cAAc;AAGhC,aAAW,UAAU,WAAW;AAC9B,UAAM,OAAO,OAAO;AACpB,YAAQ,KAAK;AAAA,MACX,OAAO,KAAK;AAAA,MACZ,SAAS;AAAA,MACT,OAAO,KAAK;AAAA,MACZ,YAAY,KAAK;AAAA,MACjB,aAAa,KAAK;AAAA,MAClB,YAAY,KAAK;AAAA,MACjB,eAAe,KAAK;AAAA,MACpB,WAAW,KAAK;AAAA,MAChB,YAAY,KAAK,aAAa,kBAAkB,KAAK,YAAY,QAAQ,IAAI;AAAA,MAC7E,oBAAoB,KAAK;AAAA,IAC3B,CAAC;AAAA,EACH;AAGA,aAAW,UAAU,WAAW;AAC9B,UAAM,OAAO,OAAO;AACpB,UAAM,YAAY,MAAM,QAAQ,KAAK,IAAI,IAAI,KAAK,OAAO,CAAC,KAAK,IAAI;AACnE,UAAM,UAAU,MAAM,QAAQ,KAAK,EAAE,IAAI,KAAK,KAAK,CAAC,KAAK,EAAE;AAE3D,UAAM,qBAAqB,KAAK,aAC5B,kBAAkB,KAAK,YAAY,QAAQ,IAC3C;AAEJ,eAAW,SAAS,WAAW;AAC7B,iBAAW,SAAS,SAAS;AAC3B,gBAAQ,KAAK;AAAA,UACX;AAAA,UACA,SAAS,KAAK;AAAA,UACd;AAAA,UACA,YAAY,KAAK;AAAA,UACjB,aAAa,KAAK;AAAA,UAClB,cAAc,KAAK;AAAA,UACnB,YAAY,KAAK;AAAA,UACjB,eAAe,KAAK;AAAA,UACpB,WAAW,KAAK;AAAA,UAChB,aAAa,KAAK;AAAA,UAClB,YAAY;AAAA,UACZ,oBAAoB,KAAK;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,SAAO,eAAe,OAAO;AAC/B;;;AOhPO,SAAS,mBAAmB,QAAoC;AACrE,QAAM,EAAE,OAAO,MAAM,SAAS,OAAO,MAAM,OAAO,QAAQ,IAAI;AAE9D,MAAI,QAAQ,WAAW,QAAQ,CAAC,OAAO,OAAO,QAAQ;AACpD,WAAO,EAAE,UAAU,OAAO,OAAO,iBAAiB,MAAM,SAAS,IAAI,EAAE;AAAA,EACzE;AAEA,QAAM,UAAyB,CAAC;AAEhC,MAAI,MAAO,SAAQ,KAAK,EAAE,OAAO,SAAS,IAAI,MAAM,OAAO,MAAM,CAAC;AAClE,MAAI,KAAM,SAAQ,KAAK,EAAE,OAAO,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAC/D,MAAI,QAAS,SAAQ,KAAK,EAAE,OAAO,WAAW,IAAI,MAAM,OAAO,QAAQ,CAAC;AACxE,MAAI,MAAO,SAAQ,KAAK,EAAE,OAAO,SAAS,IAAI,MAAM,OAAO,MAAM,CAAC;AAClE,MAAI,KAAM,SAAQ,KAAK,EAAE,OAAO,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAE/D,MAAI,OAAO,OAAO;AAChB,eAAW,UAAU,OAAO,OAAO;AACjC,YAAM,QAAQ,eAAe,IAAI,OAAO,KAAK,IACzC,OAAO,QACP,OAAO,MAAM,WAAW,OAAO,IAC7B,OAAO,QACP,QAAQ,OAAO,KAAK;AAC1B,cAAQ,KAAK,EAAE,OAAO,IAAI,OAAO,IAAI,OAAO,OAAO,MAAM,CAAC;AAAA,IAC5D;AAAA,EACF;AAEA,MAAI,QAAQ,WAAW,GAAG;AACxB,UAAM,IAAI,kBAAkB,kDAAkD;AAAA,EAChF;AAKA,QAAM,iBAAiB,UAAU,SAAY,sBAAsB,SAAS;AAC5E,SAAO,EAAE,UAAU,SAAS,SAAS,SAAS,EAAE,OAAO,gBAAgB,QAAQ,EAAE;AACnF;AAEO,SAAS,mBAAmB,QAAoC;AACrE,QAAM,EAAE,OAAO,OAAO,QAAQ,IAAI;AAElC,QAAM,UAAyB;AAAA,IAC7B,EAAE,OAAO,SAAS,IAAI,MAAM,OAAO,MAAM;AAAA,IACzC,EAAE,OAAO,WAAW,IAAI,MAAM,OAAO,cAAc;AAAA,EACrD;AAEA,MAAI,OAAO,OAAO;AAChB,eAAW,UAAU,OAAO,OAAO;AACjC,YAAM,QAAQ,eAAe,IAAI,OAAO,KAAK,IACzC,OAAO,QACP,OAAO,MAAM,WAAW,OAAO,IAC7B,OAAO,QACP,QAAQ,OAAO,KAAK;AAC1B,cAAQ,KAAK,EAAE,OAAO,IAAI,OAAO,IAAI,OAAO,OAAO,MAAM,CAAC;AAAA,IAC5D;AAAA,EACF;AAEA,QAAM,iBAAiB,UAAU,SAAY,sBAAsB,SAAS;AAC5E,SAAO,EAAE,UAAU,SAAS,SAAS,SAAS,EAAE,OAAO,gBAAgB,QAAQ,EAAE;AACnF;;;ACtCA,IAAM,sBAA0D;AAAA,EAC9D,oBAAI,IAAI,CAAC,QAAQ,SAAS,CAAC;AAAA,EAC3B,oBAAI,IAAI,CAAC,WAAW,MAAM,CAAC;AAAA,EAC3B,oBAAI,IAAI,CAAC,SAAS,SAAS,CAAC;AAAA,EAC5B,oBAAI,IAAI,CAAC,WAAW,OAAO,CAAC;AAC9B;AAUO,SAAS,mBAAmB,SAA2C;AAI5E,QAAM,uBAAuB,oBAAI,IAAY;AAC7C,MAAI,iBAAiB;AAErB,aAAW,KAAK,SAAS;AACvB,QAAI,eAAe,IAAI,EAAE,KAAK,GAAG;AAC/B,2BAAqB,IAAI,EAAE,KAAK;AAAA,IAClC,OAAO;AAEL,uBAAiB;AAAA,IACnB;AAAA,EACF;AAIA,aAAW,WAAW,qBAAqB;AACzC,QAAI,UAAU;AACd,eAAW,SAAS,SAAS;AAC3B,UAAI,CAAC,qBAAqB,IAAI,KAAK,GAAG;AACpC,kBAAU;AACV;AAAA,MACF;AAAA,IACF;AACA,QAAI,SAAS;AAGX,aAAO,EAAE,MAAM,KAAK;AAAA,IACtB;AAAA,EACF;AAGA,QAAM,gBAAgB,CAAC,GAAG,oBAAoB;AAC9C,MAAI,cAAc,WAAW,KAAK,gBAAgB;AAChD,WAAO;AAAA,MACL,MAAM;AAAA,MACN,QACE;AAAA,IAGJ;AAAA,EACF;AAEA,MAAI,gBAAgB;AAClB,WAAO;AAAA,MACL,MAAM;AAAA,MACN,QACE,qBAAqB,cAAc,KAAK,IAAI,CAAC;AAAA,IAIjD;AAAA,EACF;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,QACE,qBAAqB,cAAc,KAAK,IAAI,CAAC;AAAA,EAIjD;AACF;;;ACrFA,SAASC,yBACP,OACA,KACA,MACgB;AAChB,SAAO,EAAE,OAAO,MAAM,KAAK,SAAS,eAAe,OAAO,OAAO,MAAM,KAAK,KAAK;AACnF;AAEA,SAASC,yBACP,OACA,MACA,SACA,OACA,MACA,MACgB;AAChB,SAAO,EAAE,OAAO,MAAM,SAAS,OAAO,MAAM,KAAK;AACnD;AAEO,IAAM,uBAAN,MAAuD;AAAA,EAC5D,YACmB,SACA,UACA,iBAAiC,SACjC,YAAoB,IACpB,kBAAsC,OACvD;AALiB;AACA;AACA;AACA;AACA;AAAA,EAChB;AAAA,EAEH,MAAM,QAAQ,KAAgD;AAC5D,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK;AAC9C,QAAI,CAAC,UAAU,CAAC,KAAK,SAAU,QAAO;AACtC,UAAM,SAAS,MAAM,cAAc,QAAQ,KAAK,UAAU,KAAK,eAAe;AAC9E,QAAI,OAAO,YAAY,OAAO,cAAc,OAAO;AACjD,YAAM,KAAK,QAAQ,UAAU,OAAO;AAAA,QAClC,aAAa,OAAO,OAAO;AAAA,QAC3B,GAAG,OAAO,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AACA,WAAO,OAAO;AAAA,EAChB;AAAA,EAEA,MAAM,QAAQ,MAAc,SAAiB,MAAiD;AAC5F,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK;AAC9C,QAAI,CAAC,UAAU,CAAC,KAAK,SAAU,QAAO;AACtC,UAAM,SAAS,MAAM,cAAc,QAAQ,KAAK,UAAU,KAAK,eAAe;AAC9E,QAAI,OAAO,YAAY,OAAO,cAAc,OAAO;AACjD,YAAM,KAAK,QAAQ,UAAU,OAAO;AAAA,QAClC,aAAa,OAAO,OAAO;AAAA,QAC3B,GAAG,OAAO,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AACA,WAAO,OAAO;AAAA,EAChB;AAAA,EAEA,MAAM,WAAW,MAAc,SAAiB,MAAgC;AAC9E,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK;AAC9C,WAAO,WAAW;AAAA,EACpB;AAAA,EAEQ,iBAAiB,SAAwB,qBAAqC;AACpF,QAAI,uBAAuB,KAAK,mBAAmB,MAAO;AAE1D,UAAM,SAAS,mBAAmB,OAAO;AACzC,QAAI,OAAO,KAAM;AAEjB,QAAI,KAAK,mBAAmB,SAAS;AACnC,YAAM,IAAI,iBAAiB,OAAO,MAAO;AAAA,IAC3C;AAEA,YAAQ,KAAK,qCAAqC,OAAO,MAAM,EAAE;AAAA,EACnE;AAAA,EAEA,MAAM,UAAU,QAAuD;AACrE,UAAM,OAAO,mBAAmB,MAAM;AACtC,QAAI;AACJ,QAAI,KAAK,aAAa,OAAO;AAC3B,YAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK,KAAK;AACnD,gBAAU,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,IACjC,OAAO;AACL,WAAK,iBAAiB,KAAK,SAAS,OAAO,mBAAmB;AAC9D,gBAAU,MAAM,KAAK,QAAQ,MAAM,KAAK,SAAS,KAAK,OAAO;AAAA,IAC/D;AACA,WAAO,KAAK,gBAAgB,OAAO;AAAA,EACrC;AAAA,EAEA,MAAM,UAAU,QAAuD;AACrE,UAAM,OAAO,mBAAmB,MAAM;AACtC,QAAI;AACJ,QAAI,KAAK,aAAa,OAAO;AAC3B,YAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK,KAAK;AACnD,gBAAU,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,IACjC,OAAO;AACL,WAAK,iBAAiB,KAAK,SAAS,OAAO,mBAAmB;AAC9D,gBAAU,MAAM,KAAK,QAAQ,MAAM,KAAK,SAAS,KAAK,OAAO;AAAA,IAC/D;AACA,WAAO,KAAK,gBAAgB,OAAO;AAAA,EACrC;AAAA,EAEA,MAAc,gBAAgB,SAA4D;AACxF,QAAI,CAAC,KAAK,YAAY,QAAQ,WAAW,EAAG,QAAO;AACnD,UAAM,UAAU,MAAM,eAAe,SAAS,KAAK,UAAU,KAAK,eAAe;AACjF,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,YAAY,OAAO,cAAc,OAAO;AACjD,cAAM,QACJ,OAAO,OAAO,YAAY,gBACtB,iBAAiB,OAAO,OAAO,IAAI,IACnC,iBAAiB,OAAO,OAAO,MAAM,OAAO,OAAO,SAAS,OAAO,OAAO,IAAI;AACpF,cAAM,KAAK,QAAQ,UAAU,OAAO;AAAA,UAClC,aAAa,OAAO,OAAO;AAAA,UAC3B,GAAG,OAAO,OAAO;AAAA,QACnB,CAAC;AAAA,MACH;AAAA,IACF;AACA,WAAO,QAAQ,IAAI,CAAC,MAAM,EAAE,MAAM;AAAA,EACpC;AAAA,EAEA,MAAM,QAAQ,OAAe,KAAa,MAA8C;AACtF,UAAM,KAAK,UAAU,OAAO,KAAK,MAAM,OAAO;AAAA,EAChD;AAAA,EAEA,MAAM,QACJ,OACA,MACA,SACA,OACA,MACA,MACe;AACf,UAAM,KAAK,UAAU,OAAO,MAAM,SAAS,OAAO,MAAM,MAAM,OAAO;AAAA,EACvE;AAAA,EAEA,MAAM,YAAY,OAAe,KAAa,MAA8C;AAC1F,UAAM,KAAK,UAAU,OAAO,KAAK,MAAM,SAAS;AAAA,EAClD;AAAA,EAEA,MAAM,YACJ,OACA,MACA,SACA,OACA,MACA,MACe;AACf,UAAM,KAAK,UAAU,OAAO,MAAM,SAAS,OAAO,MAAM,MAAM,SAAS;AAAA,EACzE;AAAA,EAEA,MAAc,UACZ,OACA,KACA,MACA,MACe;AACf,4BAAwB,MAAM,SAAS,YAAY,gBAAgB,SAAS;AAC5E,QAAI,KAAK,UAAU;AACjB,WAAK,SAAS,SAAS,OAAO,eAAe,OAAO,MAAM,KAAK,SAAS;AAAA,IAC1E;AACA,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,SAASD,yBAAwB,OAAO,KAAK,IAAI;AACvD,QAAI,KAAK,UAAU;AACjB,YAAM,QAAQ,KAAK,SAAS,OAAO,OAAO,eAAe,KAAK;AAC9D,UAAI,OAAO,iBAAiB,MAAM,gBAAgB,GAAG;AACnD,eAAO,IAAI,MAAM;AAAA,MACnB;AAAA,IACF;AACA,UAAM,KAAK,QAAQ,OAAO,OAAO,QAAQ,IAAI;AAAA,EAC/C;AAAA,EAEA,MAAc,UACZ,OACA,MACA,SACA,OACA,MACA,MACA,MACe;AACf,4BAAwB,MAAM,SAAS,YAAY,gBAAgB,SAAS;AAC5E,QAAI,KAAK,UAAU;AACjB,WAAK,SAAS,SAAS,OAAO,SAAS,OAAO,MAAM,KAAK,SAAS;AAAA,IACpE;AACA,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,SAASC,yBAAwB,OAAO,MAAM,SAAS,OAAO,MAAM,IAAI;AAC9E,QAAI,KAAK,UAAU;AACjB,YAAM,QAAQ,KAAK,SAAS,OAAO,OAAO,SAAS,KAAK;AACxD,UAAI,OAAO,iBAAiB,MAAM,gBAAgB,GAAG;AACnD,eAAO,IAAI,MAAM;AAAA,MACnB;AAAA,IACF;AACA,UAAM,KAAK,QAAQ,OAAO,OAAO,QAAQ,IAAI;AAAA,EAC/C;AAAA,EAEA,MAAM,WAAW,KAAa,MAA8C;AAC1E,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,KAAK,QAAQ,UAAU,OAAO,EAAE,SAAS,aAAa,IAAI,EAAE,CAAC;AAAA,EACrE;AAAA,EAEA,MAAM,WACJ,MACA,SACA,MACA,MACe;AACf,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,KAAK,QAAQ,UAAU,OAAO,EAAE,SAAS,aAAa,IAAI,EAAE,CAAC;AAAA,EACrE;AAAA,EAEA,MAAM,WAAW,KAA4B;AAC3C,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,KAAK,QAAQ,UAAU,KAAK;AAAA,EACpC;AAAA,EAEA,MAAM,WAAW,MAAc,SAAiB,MAA6B;AAC3E,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,KAAK,QAAQ,UAAU,KAAK;AAAA,EACpC;AACF;;;ACjLA,IAAM,sBAAsB,oBAAI,IAAI,CAAC,gBAAgB,cAAc,CAAC;AAEpE,SAASC,yBACP,OACA,KACA,MACgB;AAChB,SAAO,EAAE,OAAO,MAAM,KAAK,SAAS,eAAe,OAAO,OAAO,MAAM,KAAK,KAAK;AACnF;AAEA,SAASC,yBACP,OACA,MACA,SACA,OACA,MACA,MACgB;AAChB,SAAO,EAAE,OAAO,MAAM,SAAS,OAAO,MAAM,KAAK;AACnD;AAEO,IAAM,kBAAN,MAAM,iBAAgE;AAAA,EA0B3E,YACmB,SACjB,SAEA,aACA;AAJiB;AAKjB,SAAK,kBAAkB,SAAS,sBAAsB;AACtD,SAAK,mBAAmB,SAAS;AAEjC,QAAI,SAAS,cAAc;AACzB,WAAK,gBAAgB,QAAQ;AAC7B,WAAK,oBAAoB,wBAAwB;AACjD,UAAI,QAAQ,UAAU;AACpB,aAAK,iBAAiB,QAAQ;AAAA,MAChC;AACA,WAAK,cAAc;AAAA,IACrB,OAAO;AACL,WAAK,iBAAiB,SAAS;AAAA,IACjC;AAEA,SAAK,iBAAiB,SAAS,kBAAkB;AAAA,EACnD;AAAA,EA9CS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQT,IAAI,eAAoC;AACtC,WAAO,KAAK,QAAQ;AAAA,EACtB;AAAA;AAAA,EAGiB;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACT;AAAA,EACS;AAAA;AAAA,EAGA;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8BjB,aAA6B;AAC3B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcA,sBAAiD;AAC/C,WAAO,KAAK,oBAAoB;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAMQ,mBAAmB,OAA0C;AACnE,QAAI,CAAC,KAAK,cAAe,QAAO,KAAK;AAErC,QAAI,UAAU,kBAAkB,UAAU,gBAAgB;AACxD,aAAO,KAAK;AAAA,IACd;AAEA,WAAO,KAAK,mBAAmB,KAAK,kBAAkB,KAAK;AAAA,EAC7D;AAAA,EAEQ,kBAAkB,OAA+B;AACvD,QAAI,KAAK,gBAAgB,UAAU,kBAAkB,UAAU,iBAAiB;AAC9E,aAAO,KAAK;AAAA,IACd;AACA,WAAO,KAAK;AAAA,EACd;AAAA,EAEQ,sBAAiD;AACvD,QAAI,CAAC,KAAK,cAAe,QAAO,KAAK;AACrC,WAAO,KAAK,mBAAmB,KAAK,kBAAkB,KAAK;AAAA,EAC7D;AAAA;AAAA;AAAA;AAAA,EAMQ,iBAAiB,SAAwB,qBAAqC;AACpF,QAAI,uBAAuB,KAAK,mBAAmB,MAAO;AAE1D,UAAM,SAAS,mBAAmB,OAAO;AACzC,QAAI,OAAO,KAAM;AAEjB,QAAI,KAAK,mBAAmB,SAAS;AACnC,YAAM,IAAI,iBAAiB,OAAO,MAAO;AAAA,IAC3C;AAEA,YAAQ,KAAK,qCAAqC,OAAO,MAAM,EAAE;AAAA,EACnE;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,eACZ,QACA,OAC4B;AAC5B,UAAM,WAAW,KAAK,oBAAoB;AAC1C,QAAI,CAAC,SAAU,QAAO;AAEtB,UAAM,SAAS,MAAM,cAAc,QAAQ,UAAU,KAAK,eAAe;AACzE,QAAI,OAAO,UAAU;AACnB,WAAK,gBAAgB,QAAQ,KAAK;AAAA,IACpC;AACA,WAAO,OAAO;AAAA,EAChB;AAAA,EAEA,MAAc,gBAAgB,SAA4D;AACxF,UAAM,WAAW,KAAK,oBAAoB;AAC1C,QAAI,CAAC,YAAY,QAAQ,WAAW,EAAG,QAAO;AAE9C,UAAM,UAAU,MAAM,eAAe,SAAS,UAAU,KAAK,eAAe;AAC5E,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,UAAU;AACnB,cAAM,QACJ,OAAO,OAAO,YAAY,gBACtB,iBAAiB,OAAO,OAAO,IAAI,IACnC,iBAAiB,OAAO,OAAO,MAAM,OAAO,OAAO,SAAS,OAAO,OAAO,IAAI;AACpF,aAAK,gBAAgB,QAAQ,KAAK;AAAA,MACpC;AAAA,IACF;AACA,WAAO,QAAQ,IAAI,CAAC,MAAM,EAAE,MAAM;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,gBAAgB,QAAyB,OAAqB;AACpE,QAAI,OAAO,cAAc,MAAO;AAEhC,UAAM,cAAc,YAAY;AAC9B,UAAI;AACF,cAAM,KAAK,QAAQ,UAAU,OAAO;AAAA,UAClC,aAAa,OAAO,OAAO;AAAA,UAC3B,GAAG,OAAO,OAAO;AAAA,QACnB,CAAC;AAAA,MACH,SAAS,KAAc;AACrB,cAAM,MAAM,+CAA+C,KAAK,KAAM,IAAc,OAAO;AAC3F,YAAI,OAAO,cAAc,SAAS;AAChC,kBAAQ,MAAM,GAAG;AAAA,QACnB,OAAO;AACL,kBAAQ,KAAK,GAAG;AAAA,QAClB;AAAA,MACF;AAAA,IACF;AAEA,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,QAAQ,KAAgD;AAC5D,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK;AAC9C,QAAI,CAAC,OAAQ,QAAO;AACpB,WAAO,KAAK,eAAe,QAAQ,KAAK;AAAA,EAC1C;AAAA,EAEA,MAAM,QAAQ,MAAc,SAAiB,MAAiD;AAC5F,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK;AAC9C,QAAI,CAAC,OAAQ,QAAO;AACpB,WAAO,KAAK,eAAe,QAAQ,KAAK;AAAA,EAC1C;AAAA,EAEA,MAAM,WAAW,MAAc,SAAiB,MAAgC;AAC9E,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK;AAC9C,WAAO,WAAW;AAAA,EACpB;AAAA,EAEA,MAAM,UAAU,QAAuD;AACrE,UAAM,OAAO,mBAAmB,MAAM;AACtC,QAAI;AACJ,QAAI,KAAK,aAAa,OAAO;AAC3B,YAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK,KAAK;AACnD,gBAAU,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,IACjC,OAAO;AACL,WAAK,iBAAiB,KAAK,SAAS,OAAO,mBAAmB;AAC9D,gBAAU,MAAM,KAAK,QAAQ,MAAM,KAAK,SAAS,KAAK,OAAO;AAAA,IAC/D;AACA,WAAO,KAAK,gBAAgB,OAAO;AAAA,EACrC;AAAA,EAEA,MAAM,UAAU,QAAuD;AACrE,UAAM,OAAO,mBAAmB,MAAM;AACtC,QAAI;AACJ,QAAI,KAAK,aAAa,OAAO;AAC3B,YAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,KAAK,KAAK;AACnD,gBAAU,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,IACjC,OAAO;AACL,WAAK,iBAAiB,KAAK,SAAS,OAAO,mBAAmB;AAC9D,gBAAU,MAAM,KAAK,QAAQ,MAAM,KAAK,SAAS,KAAK,OAAO;AAAA,IAC/D;AACA,WAAO,KAAK,gBAAgB,OAAO;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,QAAQ,OAAe,KAAa,MAA8C;AACtF,UAAM,KAAK,UAAU,OAAO,KAAK,MAAM,OAAO;AAAA,EAChD;AAAA,EAEA,MAAM,QACJ,OACA,MACA,SACA,OACA,MACA,MACe;AACf,UAAM,KAAK,UAAU,OAAO,MAAM,SAAS,OAAO,MAAM,MAAM,OAAO;AAAA,EACvE;AAAA,EAEA,MAAM,YAAY,OAAe,KAAa,MAA8C;AAC1F,UAAM,KAAK,UAAU,OAAO,KAAK,MAAM,SAAS;AAAA,EAClD;AAAA,EAEA,MAAM,YACJ,OACA,MACA,SACA,OACA,MACA,MACe;AACf,UAAM,KAAK,UAAU,OAAO,MAAM,SAAS,OAAO,MAAM,MAAM,SAAS;AAAA,EACzE;AAAA,EAEA,MAAc,UACZ,OACA,KACA,MACA,MACe;AACf,4BAAwB,MAAM,SAAS,YAAY,gBAAgB,SAAS;AAC5E,UAAM,WAAW,KAAK,mBAAmB,KAAK;AAC9C,QAAI,UAAU;AACZ,eAAS,SAAS,OAAO,eAAe,OAAO,MAAM,KAAK,QAAQ,SAAS;AAAA,IAC7E;AACA,UAAM,UAAU,KAAK,kBAAkB,KAAK;AAC5C,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,SAASD,yBAAwB,OAAO,KAAK,IAAI;AACvD,QAAI,UAAU;AACZ,YAAM,QAAQ,SAAS,OAAO,OAAO,eAAe,KAAK;AACzD,UAAI,OAAO,iBAAiB,MAAM,gBAAgB,GAAG;AACnD,eAAO,IAAI,MAAM;AAAA,MACnB;AAAA,IACF;AACA,UAAM,QAAQ,OAAO,OAAO,QAAQ,IAAI;AAAA,EAC1C;AAAA,EAEA,MAAc,UACZ,OACA,MACA,SACA,OACA,MACA,MACA,MACe;AACf,4BAAwB,MAAM,SAAS,YAAY,gBAAgB,SAAS;AAC5E,UAAM,WAAW,KAAK,mBAAmB,KAAK;AAC9C,QAAI,UAAU;AACZ,eAAS,SAAS,OAAO,SAAS,OAAO,MAAM,KAAK,QAAQ,SAAS;AAAA,IACvE;AACA,UAAM,UAAU,KAAK,kBAAkB,KAAK;AAC5C,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,SAASC,yBAAwB,OAAO,MAAM,SAAS,OAAO,MAAM,IAAI;AAC9E,QAAI,UAAU;AACZ,YAAM,QAAQ,SAAS,OAAO,OAAO,SAAS,KAAK;AACnD,UAAI,OAAO,iBAAiB,MAAM,gBAAgB,GAAG;AACnD,eAAO,IAAI,MAAM;AAAA,MACnB;AAAA,IACF;AACA,UAAM,QAAQ,OAAO,OAAO,QAAQ,IAAI;AAAA,EAC1C;AAAA,EAEA,MAAM,WAAW,KAAa,MAA8C;AAC1E,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,KAAK,QAAQ,UAAU,OAAO,EAAE,SAAS,aAAa,IAAI,EAAE,CAAC;AAAA,EACrE;AAAA,EAEA,MAAM,WACJ,MACA,SACA,MACA,MACe;AACf,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,KAAK,QAAQ,UAAU,OAAO,EAAE,SAAS,aAAa,IAAI,EAAE,CAAC;AAAA,EACrE;AAAA,EAEA,MAAM,WAAW,KAA4B;AAC3C,UAAM,QAAQ,iBAAiB,GAAG;AAClC,UAAM,KAAK,QAAQ,UAAU,KAAK;AAAA,EACpC;AAAA,EAEA,MAAM,WAAW,MAAc,SAAiB,MAA6B;AAC3E,UAAM,QAAQ,iBAAiB,MAAM,SAAS,IAAI;AAClD,UAAM,KAAK,QAAQ,UAAU,KAAK;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,eAAkB,IAAsD;AAC5E,WAAO,KAAK,QAAQ,eAAe,OAAO,cAAc;AACtD,YAAM,UAAU,IAAI;AAAA,QAClB;AAAA,QACA,KAAK,oBAAoB;AAAA,QACzB,KAAK;AAAA,QACL,KAAK,QAAQ;AAAA,QACb,KAAK;AAAA,MACP;AACA,aAAO,GAAG,OAAO;AAAA,IACnB,CAAC;AAAA,EACH;AAAA,EAEA,QAAoB;AAClB,WAAO,IAAI;AAAA,MACT,KAAK,QAAQ,YAAY;AAAA,MACzB,KAAK,oBAAoB;AAAA,MACzB,KAAK,QAAQ;AAAA,IACf;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,SAAS,eAAuB,OAAe,SAAsB;AACnE,QAAI,CAAC,iBAAiB,cAAc,SAAS,GAAG,GAAG;AACjD,YAAM,IAAI;AAAA,QACR,wCAAwC,aAAa;AAAA,QAErD;AAAA,MACF;AAAA,IACF;AACA,QAAI,KAAK,SAAS,GAAG,GAAG;AACtB,YAAM,IAAI;AAAA,QACR,4CAA4C,IAAI;AAAA,QAEhD;AAAA,MACF;AAAA,IACF;AAEA,UAAM,eAAe,KAAK,QAAQ,SAAS,eAAe,IAAI;AAE9D,WAAO,IAAI;AAAA,MACT;AAAA,MACA;AAAA,QACE,UAAU,KAAK,oBAAoB;AAAA,QACnC,gBAAgB,KAAK;AAAA,QACrB,oBAAoB,KAAK;AAAA,QACzB,kBAAkB,KAAK;AAAA,MACzB;AAAA;AAAA,IAEF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,gBACJ,QACA,gBAC8B;AAC9B,QAAI,CAAC,KAAK,QAAQ,iBAAiB;AACjC,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,MACF;AAAA,IACF;AACA,UAAM,OAAO,mBAAmB,MAAM;AACtC,QAAI,KAAK,aAAa,OAAO;AAC3B,YAAM,IAAI;AAAA,QACR;AAAA,QAEA;AAAA,MACF;AAAA,IACF;AACA,SAAK,iBAAiB,KAAK,SAAS,OAAO,mBAAmB;AAC9D,UAAM,UAAU,MAAM,KAAK,QAAQ,gBAAgB,QAAQ,cAAc;AACzE,WAAO,KAAK,gBAAgB,OAAO;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,UACJ,QAC6B;AAC7B,QAAI,CAAC,KAAK,QAAQ,WAAW;AAC3B,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAMA,UAAM,eACJ,OAAO,SACP,OAAO,QACP,OAAO,WACP,OAAO,SACP,OAAO,QACN,OAAO,SAAS,OAAO,MAAM,SAAS;AAEzC,QAAI,CAAC,cAAc;AACjB,WAAK,iBAAiB,CAAC,GAAG,OAAO,mBAAmB;AACpD,YAAMC,UAAS,MAAM,KAAK,QAAQ,UAAU,OAAO,YAAY,CAAC,CAAC;AACjE,aAAOA;AAAA,IACT;AAEA,UAAM,OAAO,mBAAmB,MAAM;AACtC,QAAI,KAAK,aAAa,OAAO;AAC3B,YAAM,IAAI;AAAA,QACR;AAAA,QAEA;AAAA,MACF;AAAA,IACF;AACA,SAAK,iBAAiB,KAAK,SAAS,OAAO,mBAAmB;AAC9D,UAAM,SAAS,MAAM,KAAK,QAAQ,UAAU,OAAO,YAAY,KAAK,OAAO;AAC3E,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,kBAAkB,KAAa,SAA+C;AAClF,WAAO,KAAK,QAAQ,kBAAkB,KAAK,MAAM,OAAO;AAAA,EAC1D;AAAA,EAEA,MAAM,gBAAgB,QAAyB,SAA4C;AACzF,WAAO,KAAK,QAAQ,gBAAgB,QAAQ,MAAM,OAAO;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqBA,MAAM,WAAW,QAAyB,SAA4C;AACpF,QAAI,CAAC,KAAK,QAAQ,YAAY;AAC5B,YAAM,IAAI;AAAA,QACR;AAAA,QAGA;AAAA,MACF;AAAA,IACF;AACA,UAAM,UAAU,KAAK,gBAAgB,MAAM;AAC3C,WAAO,KAAK,QAAQ,WAAW,SAAS,OAAO;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,WACJ,QACA,OACA,SACqB;AACrB,QAAI,CAAC,KAAK,QAAQ,YAAY;AAC5B,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,MACF;AAAA,IACF;AACA,UAAM,UAAU,KAAK,gBAAgB,MAAM;AAC3C,WAAO,KAAK,QAAQ,WAAW,SAAS,OAAO,OAAO;AAAA,EACxD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoBA,MAAM,OAAO,QAA6C;AACxD,QAAI,CAAC,KAAK,QAAQ,QAAQ;AACxB,YAAM,IAAI;AAAA,QACR;AAAA,QAGA;AAAA,MACF;AAAA,IACF;AACA,QAAI,OAAO,QAAQ,WAAW,GAAG;AAC/B,aAAO,OAAO,UAAU,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,EAAE;AAAA,IACnE;AACA,WAAO,KAAK,QAAQ,OAAO,MAAM;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA2BA,MAAM,mBAAmB,QAA+D;AACtF,QAAI,CAAC,KAAK,QAAQ,oBAAoB;AACpC,YAAM,IAAI;AAAA,QACR;AAAA,QAIA;AAAA,MACF;AAAA,IACF;AACA,QAAI,OAAO,QAAQ,WAAW,GAAG;AAC/B,aAAO;AAAA,QACL,MAAM,OAAO,KAAK,IAAI,OAAO,EAAE,OAAO,CAAC,GAAG,aAAa,EAAE,EAAE;AAAA,QAC3D,YAAY;AAAA,MACd;AAAA,IACF;AACA,WAAO,KAAK,QAAQ,mBAAmB,MAAM;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAuCA,MAAM,mBACJ,QACiC;AACjC,QAAI,CAAC,KAAK,QAAQ,oBAAoB;AACpC,YAAM,IAAI;AAAA,QACR;AAAA,QAIA;AAAA,MACF;AAAA,IACF;AACA,QAAI,OAAO,OAAO,WAAW,GAAG;AAC9B,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAQA,UAAM,OAAO,mBAAmB,MAAM;AACtC,QAAI;AACJ,QAAI;AACJ,QAAI,KAAK,aAAa,OAAO;AAK3B,gBAAU;AAAA,QACR,EAAE,OAAO,QAAQ,IAAI,MAAM,OAAO,OAAO,KAAM;AAAA,QAC/C,EAAE,OAAO,WAAW,IAAI,MAAM,OAAO,OAAO,QAAS;AAAA,QACrD,EAAE,OAAO,QAAQ,IAAI,MAAM,OAAO,OAAO,KAAM;AAAA,MACjD;AACA,UAAI,OAAO,MAAO,SAAQ,KAAK,EAAE,OAAO,SAAS,IAAI,MAAM,OAAO,OAAO,MAAM,CAAC;AAChF,UAAI,OAAO,MAAO,SAAQ,KAAK,EAAE,OAAO,SAAS,IAAI,MAAM,OAAO,OAAO,MAAM,CAAC;AAChF,gBAAU;AAAA,IACZ,OAAO;AACL,gBAAU,KAAK;AACf,gBAAU,KAAK;AAAA,IACjB;AACA,SAAK,iBAAiB,SAAS,OAAO,mBAAmB;AACzD,UAAM,OAAO,MAAM,KAAK,QAAQ,mBAAmB,OAAO,QAAQ,SAAS,OAAO;AAClF,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiCA,MAAM,YAAY,QAAyD;AACzE,QAAI,CAAC,KAAK,QAAQ,aAAa;AAC7B,YAAM,IAAI;AAAA,QACR;AAAA,QAKA;AAAA,MACF;AAAA,IACF;AAQA,UAAM,UAAyB,CAAC;AAChC,QAAI,OAAO,MAAO,SAAQ,KAAK,EAAE,OAAO,SAAS,IAAI,MAAM,OAAO,OAAO,MAAM,CAAC;AAChF,QAAI,OAAO,QAAS,SAAQ,KAAK,EAAE,OAAO,WAAW,IAAI,MAAM,OAAO,OAAO,QAAQ,CAAC;AACtF,QAAI,OAAO,MAAO,SAAQ,KAAK,EAAE,OAAO,SAAS,IAAI,MAAM,OAAO,OAAO,MAAM,CAAC;AAChF,QAAI,OAAO,MAAO,SAAQ,KAAK,GAAG,OAAO,KAAK;AAC9C,SAAK,iBAAiB,SAAS,OAAO,mBAAmB;AAEzD,WAAO,KAAK,QAAQ,YAAY,MAAM;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyBA,MAAM,eAAe,QAA4D;AAC/E,QAAI,CAAC,KAAK,QAAQ,gBAAgB;AAChC,YAAM,IAAI;AAAA,QACR;AAAA,QAKA;AAAA,MACF;AAAA,IACF;AACA,UAAM,UAAyB,CAAC;AAChC,QAAI,OAAO,MAAO,SAAQ,KAAK,EAAE,OAAO,SAAS,IAAI,MAAM,OAAO,OAAO,MAAM,CAAC;AAChF,QAAI,OAAO,QAAS,SAAQ,KAAK,EAAE,OAAO,WAAW,IAAI,MAAM,OAAO,OAAO,QAAQ,CAAC;AACtF,QAAI,OAAO,MAAO,SAAQ,KAAK,EAAE,OAAO,SAAS,IAAI,MAAM,OAAO,OAAO,MAAM,CAAC;AAChF,SAAK,iBAAiB,SAAS,OAAO,mBAAmB;AACzD,WAAO,KAAK,QAAQ,eAAe,MAAM;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,MAAM,UAAU,QAAuD;AACrE,QAAI,CAAC,KAAK,QAAQ,WAAW;AAC3B,YAAM,IAAI;AAAA,QACR;AAAA,QAMA;AAAA,MACF;AAAA,IACF;AACA,UAAM,UAAyB,CAAC;AAChC,QAAI,OAAO,MAAO,SAAQ,KAAK,EAAE,OAAO,SAAS,IAAI,MAAM,OAAO,OAAO,MAAM,CAAC;AAChF,QAAI,OAAO,QAAS,SAAQ,KAAK,EAAE,OAAO,WAAW,IAAI,MAAM,OAAO,OAAO,QAAQ,CAAC;AACtF,QAAI,OAAO,MAAO,SAAQ,KAAK,EAAE,OAAO,SAAS,IAAI,MAAM,OAAO,OAAO,MAAM,CAAC;AAChF,SAAK,iBAAiB,SAAS,OAAO,mBAAmB;AACzD,WAAO,KAAK,QAAQ,UAAU,MAAM;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,gBAAgB,QAAwC;AAC9D,UAAM,eACJ,OAAO,SACP,OAAO,QACP,OAAO,WACP,OAAO,SACP,OAAO,QACN,OAAO,SAAS,OAAO,MAAM,SAAS;AAEzC,QAAI,CAAC,cAAc;AACjB,WAAK,iBAAiB,CAAC,GAAG,OAAO,mBAAmB;AACpD,aAAO,CAAC;AAAA,IACV;AAEA,UAAM,OAAO,mBAAmB,MAAM;AACtC,QAAI,KAAK,aAAa,OAAO;AAC3B,YAAM,IAAI;AAAA,QACR;AAAA,QAGA;AAAA,MACF;AAAA,IACF;AACA,SAAK,iBAAiB,KAAK,SAAS,OAAO,mBAAmB;AAC9D,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,eACJ,MACA,YACA,aACA,SACe;AACf,QAAI,CAAC,KAAK,eAAe;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AAEA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,YAAM,IAAI;AAAA,QACR,uBAAuB,IAAI;AAAA,MAC7B;AAAA,IACF;AAEA,QAAI,KAAK,gBAAgB,OAAO,MAAM,eAAe,IAAI,GAAG;AAC1D,YAAM,IAAI;AAAA,QACR,4BAA4B,IAAI;AAAA,MAClC;AAAA,IACF;AAEA,UAAM,MAAM,yBAAyB,gBAAgB,IAAI;AACzD,UAAM,OAAgC,EAAE,MAAM,WAAW;AACzD,QAAI,gBAAgB,OAAW,MAAK,cAAc;AAClD,QAAI,SAAS,eAAe,OAAW,MAAK,aAAa,QAAQ;AACjE,QAAI,SAAS,kBAAkB,OAAW,MAAK,gBAAgB,QAAQ;AACvE,QAAI,SAAS,iBAAiB,OAAW,MAAK,eAAe,QAAQ;AACrE,QAAI,SAAS,YAAY,OAAW,MAAK,UAAU,QAAQ;AAC3D,QAAI,SAAS,cAAc,OAAW,MAAK,YAAY,QAAQ;AAC/D,QAAI,SAAS,uBAAuB;AAClC,WAAK,qBAAqB,QAAQ;AACpC,QAAI,SAAS,eAAe,QAAW;AACrC,WAAK,aAAa,MAAM,KAAK,oBAAoB,QAAQ,UAAU;AAAA,IACrE;AAEA,UAAM,KAAK,QAAQ,gBAAgB,KAAK,IAAI;AAAA,EAC9C;AAAA,EAEA,MAAM,eACJ,MACA,UACA,YACA,aACA,SACe;AACf,QAAI,CAAC,KAAK,eAAe;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AAEA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,YAAM,IAAI;AAAA,QACR,uBAAuB,IAAI;AAAA,MAC7B;AAAA,IACF;AAEA,QAAI,KAAK,gBAAgB;AACvB,YAAM,YAAY,MAAM,QAAQ,SAAS,IAAI,IAAI,SAAS,OAAO,CAAC,SAAS,IAAI;AAC/E,YAAM,UAAU,MAAM,QAAQ,SAAS,EAAE,IAAI,SAAS,KAAK,CAAC,SAAS,EAAE;AACvE,iBAAW,SAAS,WAAW;AAC7B,mBAAW,SAAS,SAAS;AAC3B,cAAI,KAAK,eAAe,OAAO,OAAO,MAAM,KAAK,GAAG;AAClD,kBAAM,IAAI;AAAA,cACR,4BAA4B,IAAI,UAAU,KAAK,SAAS,KAAK;AAAA,YAC/D;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,UAAM,MAAM,yBAAyB,gBAAgB,IAAI;AACzD,UAAM,OAAgC;AAAA,MACpC;AAAA,MACA,MAAM,SAAS;AAAA,MACf,IAAI,SAAS;AAAA,IACf;AACA,QAAI,eAAe,OAAW,MAAK,aAAa;AAChD,QAAI,SAAS,iBAAiB,OAAW,MAAK,eAAe,SAAS;AACtE,QAAI,SAAS,gBAAgB,OAAW,MAAK,cAAc,SAAS;AACpE,QAAI,gBAAgB,OAAW,MAAK,cAAc;AAClD,QAAI,SAAS,eAAe,OAAW,MAAK,aAAa,QAAQ;AACjE,QAAI,SAAS,kBAAkB,OAAW,MAAK,gBAAgB,QAAQ;AACvE,QAAI,SAAS,iBAAiB,OAAW,MAAK,eAAe,QAAQ;AACrE,QAAI,SAAS,YAAY,OAAW,MAAK,UAAU,QAAQ;AAC3D,QAAI,SAAS,cAAc,OAAW,MAAK,YAAY,QAAQ;AAC/D,QAAI,SAAS,uBAAuB;AAClC,WAAK,qBAAqB,QAAQ;AACpC,QAAI,SAAS,eAAe,QAAW;AACrC,WAAK,aAAa,MAAM,KAAK,oBAAoB,QAAQ,UAAU;AAAA,IACrE;AAEA,UAAM,KAAK,QAAQ,gBAAgB,KAAK,IAAI;AAAA,EAC9C;AAAA,EAEA,MAAM,iBAAgC;AACpC,QAAI,CAAC,KAAK,eAAe;AACvB,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAAA,IACF;AAEA,UAAM,SAAS,KAAK,iBAAiB;AACrC,UAAM,cAAc,MAAM,wBAAwB,QAAQ,KAAK,gBAAgB;AAE/E,QAAI,KAAK,gBAAgB;AACvB,WAAK,kBAAkB,qBAAqB,KAAK,gBAAgB,WAAW;AAAA,IAC9E,OAAO;AACL,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AAAA,EAEA,MAAc,oBACZ,YACwE;AACxE,UAAM,SAAS,WAAW,IAAI,CAAC,MAAM;AACnC,YAAM,SAAS,OAAO,EAAE,OAAO,aAAa,EAAE,GAAG,SAAS,IAAI,EAAE;AAChE,aAAO,EAAE,aAAa,EAAE,aAAa,WAAW,EAAE,WAAW,IAAI,OAAO;AAAA,IAC1E,CAAC;AACD,UAAM,QAAQ,IAAI,OAAO,IAAI,CAAC,MAAM,iBAAiB,EAAE,IAAI,KAAK,gBAAgB,CAAC,CAAC;AAClF,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,mBAAgC;AACtC,QAAI,CAAC,KAAK,YAAa,QAAO;AAE9B,UAAM,UAAU,KAAK;AAErB,UAAM,mBAAmB,CACvB,SACA,YACiC,QAAQ,MAAM,SAAS,OAAO;AAEjE,WAAO;AAAA,MACL,MAAM,QAAQ,KAAgD;AAC5D,eAAO,QAAQ,OAAO,iBAAiB,GAAG,CAAC;AAAA,MAC7C;AAAA,MACA,MAAM,QACJ,MACA,SACA,MACmC;AACnC,eAAO,QAAQ,OAAO,iBAAiB,MAAM,SAAS,IAAI,CAAC;AAAA,MAC7D;AAAA,MACA,MAAM,WAAW,MAAc,SAAiB,MAAgC;AAC9E,cAAM,SAAS,MAAM,QAAQ,OAAO,iBAAiB,MAAM,SAAS,IAAI,CAAC;AACzE,eAAO,WAAW;AAAA,MACpB;AAAA,MACA,MAAM,UAAU,QAAuD;AACrE,cAAM,OAAO,mBAAmB,MAAM;AACtC,YAAI,KAAK,aAAa,OAAO;AAC3B,gBAAM,SAAS,MAAM,QAAQ,OAAO,KAAK,KAAK;AAC9C,iBAAO,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,QAC9B;AACA,eAAO,iBAAiB,KAAK,SAAS,KAAK,OAAO;AAAA,MACpD;AAAA,MACA,MAAM,UAAU,QAAuD;AACrE,cAAM,OAAO,mBAAmB,MAAM;AACtC,YAAI,KAAK,aAAa,OAAO;AAC3B,gBAAM,SAAS,MAAM,QAAQ,OAAO,KAAK,KAAK;AAC9C,iBAAO,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,QAC9B;AACA,eAAO,iBAAiB,KAAK,SAAS,KAAK,OAAO;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AACF;AAgCO,SAAS,kBACd,SACA,SACA,aACwC;AAYxC,SAAO,IAAI,gBAAgB,SAAS,SAAS,WAAW;AAG1D;AAOO,IAAM,+BAA+B;;;AC1pC5C,SAAS,WAAW,GAAmB;AACrC,SAAO,EAAE,QAAQ,+BAA+B,CAAC,GAAG,MAAM,OAAO,GAAG,YAAY,CAAC;AACnF;AAWA,eAAsB,cACpB,WACA,UAA0B,CAAC,GACV;AAEjB,QAAM,EAAE,QAAQ,IAAI,MAAM,OAAO,2BAA2B;AAE5D,QAAM,EAAE,SAAS,KAAK,IAAI;AAC1B,QAAM,SAAmB,CAAC;AAE1B,MAAI,QAAQ;AACV,WAAO,KAAK,sEAAiE;AAAA,EAC/E;AAGA,QAAM,cAAc,CAAC,GAAG,UAAU,MAAM,QAAQ,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC;AACxF,QAAM,cAAc,CAAC,GAAG,UAAU,MAAM,QAAQ,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC;AAExF,aAAW,CAAC,MAAM,MAAM,KAAK,aAAa;AACxC,UAAM,WAAW,GAAG,WAAW,IAAI,CAAC;AACpC,UAAM,KAAK,MAAM,QAAQ,OAAO,QAAe,UAAU;AAAA,MACvD,eAAe;AAAA,MACf,sBAAsB;AAAA,IACxB,CAAC;AACD,WAAO,KAAK,GAAG,KAAK,CAAC;AACrB,WAAO,KAAK,EAAE;AAAA,EAChB;AAEA,aAAW,CAAC,MAAM,MAAM,KAAK,aAAa;AACxC,UAAM,WAAW,GAAG,WAAW,IAAI,CAAC;AACpC,UAAM,KAAK,MAAM,QAAQ,OAAO,QAAe,UAAU;AAAA,MACvD,eAAe;AAAA,MACf,sBAAsB;AAAA,IACxB,CAAC;AACD,WAAO,KAAK,GAAG,KAAK,CAAC;AACrB,WAAO,KAAK,EAAE;AAAA,EAChB;AAEA,SAAO,OAAO,KAAK,IAAI,EAAE,QAAQ,IAAI;AACvC;;;ACmDO,SAAS,aAAa,QAA0C;AACrE,SAAO;AACT;AAeO,SAAS,YACd,gBACA,oBACA,SACQ;AACR,MAAI,CAAC,eAAgB,QAAO;AAE5B,QAAM,YAAY,IAAI,IAAI,kBAAkB;AAE5C,MAAI,SAAS;AACX,UAAM,iBAAiB,eAAe,OAAO;AAC7C,QAAI,kBAAkB,UAAU,IAAI,cAAc,GAAG;AACnD,aAAO;AAAA,IACT;AAAA,EACF;AAEA,MAAI,eAAe,WAAW,UAAU,IAAI,eAAe,OAAO,GAAG;AACnE,WAAO,eAAe;AAAA,EACxB;AAEA,SAAO;AACT;;;ACtHO,SAAS,0BAA0B,gBAAwB,KAA4B;AAC5F,QAAM,WAAW,eAAe,MAAM,GAAG;AAGzC,WAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK,GAAG;AAC3C,QAAI,SAAS,CAAC,MAAM,KAAK;AAEvB,aAAO,SAAS,MAAM,GAAG,CAAC,EAAE,KAAK,GAAG;AAAA,IACtC;AAAA,EACF;AAEA,SAAO;AACT;AASO,SAAS,cAAc,gBAAwB,KAAsB;AAC1E,SAAO,0BAA0B,gBAAgB,GAAG,MAAM;AAC5D;;;ACZO,IAAM,uBAAiD,OAAO,OAAO;AAAA,EAC1E,EAAE,QAAQ,CAAC,MAAM,EAAE;AAAA,EACnB,EAAE,QAAQ,CAAC,MAAM,EAAE;AAAA,EACnB,EAAE,QAAQ,CAAC,OAAO,EAAE;AAAA,EACpB,EAAE,QAAQ,CAAC,OAAO,EAAE;AAAA,EACpB,EAAE,QAAQ,CAAC,QAAQ,SAAS,EAAE;AAAA,EAC9B,EAAE,QAAQ,CAAC,WAAW,MAAM,EAAE;AAAA,EAC9B,EAAE,QAAQ,CAAC,SAAS,SAAS,EAAE;AAAA,EAC/B,EAAE,QAAQ,CAAC,WAAW,OAAO,EAAE;AACjC,CAAC;;;AC1BD,qBAAgE;AAChE,yBAA8B;AAC9B,uBAA8B;AAjC9B,IAAAC,eAAA;AAgDO,IAAM,iBAAN,cAA6B,eAAe;AAAA,EACjD,YAAY,SAAiB;AAC3B,UAAM,SAAS,iBAAiB;AAChC,SAAK,OAAO;AAAA,EACd;AACF;AAMA,SAAS,SAAS,UAA2B;AAC3C,MAAI;AACF,UAAM,UAAM,6BAAa,UAAU,OAAO;AAC1C,WAAO,KAAK,MAAM,GAAG;AAAA,EACvB,SAAS,KAAc;AACrB,UAAM,MACJ,eAAe,cACX,mBAAmB,QAAQ,KAAK,IAAI,OAAO,KAC3C,eAAe,QAAQ,KAAM,IAAc,OAAO;AACxD,UAAM,IAAI,eAAe,GAAG;AAAA,EAC9B;AACF;AAEA,SAAS,iBAAiB,UAAuC;AAC/D,MAAI,KAAC,2BAAW,QAAQ,EAAG,QAAO;AAClC,SAAO,SAAS,QAAQ;AAC1B;AAMA,IAAM,2BAA2B,CAAC,OAAO,OAAO,QAAQ,MAAM;AAM9D,SAAS,WAAW,KAAa,aAA6B;AAE5D,aAAW,OAAO,0BAA0B;AAC1C,UAAM,gBAAY,uBAAK,KAAK,SAAS,GAAG,EAAE;AAC1C,YAAI,2BAAW,SAAS,GAAG;AACzB,aAAO,iBAAiB,WAAW,WAAW;AAAA,IAChD;AAAA,EACF;AAGA,QAAM,eAAW,uBAAK,KAAK,aAAa;AACxC,UAAI,2BAAW,QAAQ,GAAG;AACxB,WAAO,SAAS,QAAQ;AAAA,EAC1B;AAEA,QAAM,IAAI;AAAA,IACR,sBAAsB,WAAW,OAAO,GAAG;AAAA,EAE7C;AACF;AAEA,IAAI;AAEJ,SAAS,UAAmC;AAC1C,MAAI,CAAC,OAAO;AACV,UAAM,OAAO,OAAO,eAAe,cAAc,aAAaA,aAAY;AAC1E,UAAM,iBAAa,kCAAc,IAAI;AACrC,UAAM,EAAE,WAAW,IAAI,WAAW,MAAM;AACxC,YAAQ,WAAW,MAAM,EAAE,gBAAgB,KAAK,CAAC;AAAA,EACnD;AACA,SAAO;AACT;AAEA,SAAS,iBAAiB,UAAkB,aAA6B;AACvE,MAAI;AACF,UAAM,OAAO,QAAQ;AACrB,UAAM,MAAM,KAAK,QAAQ;AACzB,UAAM,SACJ,OAAO,OAAO,QAAQ,YAAY,aAAa,MAC1C,IAA6B,UAC9B;AAEN,QAAI,CAAC,UAAU,OAAO,WAAW,UAAU;AACzC,YAAM,IAAI;AAAA,QACR,eAAe,QAAQ,QAAQ,WAAW;AAAA,MAC5C;AAAA,IACF;AACA,WAAO;AAAA,EACT,SAAS,KAAc;AACrB,QAAI,eAAe,eAAgB,OAAM;AACzC,UAAM,IAAI;AAAA,MACR,gCAAgC,QAAQ,QAAQ,WAAW,KAAM,IAAc,OAAO;AAAA,IACxF;AAAA,EACF;AACF;AAMA,IAAM,kBAAkB,CAAC,OAAO,OAAO,QAAQ,MAAM;AAErD,SAAS,cAAc,KAAiC;AACtD,aAAW,OAAO,iBAAiB;AACjC,UAAM,gBAAY,uBAAK,KAAK,QAAQ,GAAG,EAAE;AACzC,YAAI,2BAAW,SAAS,EAAG,QAAO;AAAA,EACpC;AACA,SAAO;AACT;AAMA,IAAM,uBAAuB,CAAC,OAAO,OAAO,QAAQ,MAAM;AAE1D,SAAS,mBAAmB,KAAiC;AAC3D,aAAW,OAAO,sBAAsB;AACtC,UAAM,gBAAY,uBAAK,KAAK,aAAa,GAAG,EAAE;AAC9C,YAAI,2BAAW,SAAS,EAAG,QAAO;AAAA,EACpC;AACA,SAAO;AACT;AAEA,SAAS,eAAe,UAAkB,aAAsC;AAC9E,MAAI;AACF,UAAM,OAAO,QAAQ;AACrB,UAAM,MAAM,KAAK,QAAQ;AACzB,UAAM,aACJ,OAAO,OAAO,QAAQ,YAAY,aAAa,MAC1C,IAA6B,UAC9B;AAEN,QAAI,CAAC,MAAM,QAAQ,UAAU,GAAG;AAC9B,YAAM,IAAI;AAAA,QACR,mBAAmB,QAAQ,QAAQ,WAAW;AAAA,MAChD;AAAA,IACF;AACA,WAAO;AAAA,EACT,SAAS,KAAc;AACrB,QAAI,eAAe,eAAgB,OAAM;AACzC,UAAM,IAAI;AAAA,MACR,6BAA6B,QAAQ,QAAQ,WAAW,KAAM,IAAc,OAAO;AAAA,IACrF;AAAA,EACF;AACF;AAMA,SAAS,eAAe,KAAa,MAAgC;AACnE,QAAM,SAAS,WAAW,KAAK,cAAc,IAAI,GAAG;AACpD,QAAM,OAAO,qBAAiB,uBAAK,KAAK,WAAW,CAAC;AAWpD,QAAM,aAAa,qBAAiB,uBAAK,KAAK,aAAa,CAAC;AAG5D,QAAM,YAAY,cAAc,GAAG;AACnC,QAAM,iBAAiB,mBAAmB,GAAG;AAC7C,QAAM,aAAa,iBACf,eAAe,gBAAgB,cAAc,IAAI,GAAG,IACpD;AAEJ,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA,aAAa,MAAM;AAAA,IACnB,YAAY,MAAM;AAAA,IAClB,eAAe,MAAM;AAAA,IACrB,cAAc,MAAM;AAAA,IACpB;AAAA,IACA;AAAA,IACA,WAAW,MAAM;AAAA,IACjB;AAAA,IACA,oBAAoB,MAAM;AAAA,IAC1B,SAAS,MAAM;AAAA,EACjB;AACF;AAEA,SAAS,eAAe,KAAa,MAAgC;AACnE,QAAM,SAAS,WAAW,KAAK,cAAc,IAAI,GAAG;AAEpD,QAAM,eAAW,uBAAK,KAAK,WAAW;AACtC,MAAI,KAAC,2BAAW,QAAQ,GAAG;AACzB,UAAM,IAAI;AAAA,MACR,oCAAoC,IAAI,QAAQ,GAAG;AAAA,IAErD;AAAA,EACF;AACA,QAAM,WAAW,SAAS,QAAQ;AAGlC,MAAI,CAAC,SAAS,MAAM;AAClB,UAAM,IAAI,eAAe,kBAAkB,IAAI,oCAAoC;AAAA,EACrF;AACA,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI,eAAe,kBAAkB,IAAI,kCAAkC;AAAA,EACnF;AAEA,QAAM,OAAO,qBAAiB,uBAAK,KAAK,WAAW,CAAC;AAYpD,QAAM,aAAa,qBAAiB,uBAAK,KAAK,aAAa,CAAC;AAG5D,QAAM,YAAY,cAAc,GAAG;AACnC,QAAM,iBAAiB,mBAAmB,GAAG;AAC7C,QAAM,aAAa,iBACf,eAAe,gBAAgB,cAAc,IAAI,GAAG,IACpD;AAEJ,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA;AAAA,IACA,aAAa,MAAM;AAAA,IACnB,YAAY,MAAM;AAAA,IAClB,eAAe,MAAM;AAAA,IACrB,cAAc,MAAM;AAAA,IACpB;AAAA,IACA;AAAA,IACA,WAAW,MAAM;AAAA,IACjB,aACE,SAAS,eAAgB,MAA+C;AAAA,IAC1E;AAAA,IACA,oBAAoB,MAAM;AAAA,IAC1B,SAAS,MAAM;AAAA,EACjB;AACF;AAMA,SAAS,kBAAkB,KAAuB;AAChD,MAAI,KAAC,2BAAW,GAAG,EAAG,QAAO,CAAC;AAC9B,aAAO,4BAAY,KAAK,EAAE,eAAe,KAAK,CAAC,EAC5C,OAAO,CAAC,MAAM,EAAE,YAAY,CAAC,EAC7B,IAAI,CAAC,MAAM,EAAE,IAAI;AACtB;AAsBO,SAAS,iBAAiB,aAAqC;AACpE,QAAM,aAAS,0BAAQ,WAAW;AAElC,MAAI,KAAC,2BAAW,MAAM,KAAK,KAAC,yBAAS,MAAM,EAAE,YAAY,GAAG;AAC1D,UAAM,IAAI,eAAe,iCAAiC,WAAW,EAAE;AAAA,EACzE;AAEA,QAAM,QAAQ,oBAAI,IAA8B;AAChD,QAAM,QAAQ,oBAAI,IAA8B;AAChD,QAAM,WAA+B,CAAC;AAGtC,QAAM,eAAW,uBAAK,QAAQ,OAAO;AACrC,aAAW,QAAQ,kBAAkB,QAAQ,GAAG;AAC9C,UAAM,IAAI,MAAM,mBAAe,uBAAK,UAAU,IAAI,GAAG,IAAI,CAAC;AAAA,EAC5D;AAGA,QAAM,eAAW,uBAAK,QAAQ,OAAO;AACrC,aAAW,QAAQ,kBAAkB,QAAQ,GAAG;AAC9C,UAAM,IAAI,MAAM,mBAAe,uBAAK,UAAU,IAAI,GAAG,IAAI,CAAC;AAAA,EAC5D;AAGA,QAAM,YAAY,IAAI,IAAI,MAAM,KAAK,CAAC;AACtC,aAAW,CAAC,SAAS,MAAM,KAAK,OAAO;AACrC,UAAM,WAAW,OAAO;AACxB,UAAM,YAAY,MAAM,QAAQ,SAAS,IAAI,IAAI,SAAS,OAAO,CAAC,SAAS,IAAI;AAC/E,UAAM,UAAU,MAAM,QAAQ,SAAS,EAAE,IAAI,SAAS,KAAK,CAAC,SAAS,EAAE;AAEvE,eAAW,OAAO,CAAC,GAAG,WAAW,GAAG,OAAO,GAAG;AAC5C,UAAI,CAAC,UAAU,IAAI,GAAG,GAAG;AACvB,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,SAAS,OAAO,2BAA2B,GAAG;AAAA,QACzD,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,QAAQ,EAAE,OAAO,MAAM;AAAA,IACvB;AAAA,EACF;AACF;;;ACrXA,oBAAuB;AAEhB,SAAS,aAAqB;AACnC,aAAO,sBAAO;AAChB;;;ACiEA,SAAS,eAAe,GAA4C;AAClE,SAAO,OAAO,MAAM,WAAW,EAAE,MAAM,GAAG,MAAM,MAAM,IAAI,EAAE,MAAM,EAAE,MAAM,MAAM,CAAC,CAAC,EAAE,KAAK;AAC3F;AAEA,SAAS,gBAAgB,MAAiB,OAAuB;AAC/D,QAAM,aAAa,KAAK,OAAO,IAAI,cAAc;AACjD,SAAO,GAAG,KAAK,KAAK,KAAK,UAAU,UAAU,CAAC;AAChD;AAEA,SAAS,kBAAkB,MAAwC;AACjE,SAAO,KAAK,OAAO,IAAI,CAAC,MAAM;AAC5B,UAAM,IAAI,eAAe,CAAC;AAC1B,WAAO;AAAA,MACL,WAAW,EAAE;AAAA,MACb,OAAO,EAAE,OAAO,eAAe;AAAA,IACjC;AAAA,EACF,CAAC;AACH;AAEA,IAAI,uBAAuB;AAQpB,SAAS,oBACd,YACA,UAAgC,CAAC,GACX;AACtB,QAAM,OAAO,QAAQ,eAAe,CAAC,GAAG,oBAAoB;AAC5D,QAAM,eAAe,QAAQ,mBAAmB,CAAC,GAAG,QAAQ,CAAC,MAAM;AACjE,QAAI,CAAC,EAAE,QAAS,QAAO,CAAC;AACxB,WAAO,EAAE;AAAA,EACX,CAAC;AAQD,QAAM,mBAAmB,oBAAI,IAAY;AACzC,aAAW,SAAS,QAAQ,mBAAmB,CAAC,GAAG;AACjD,QAAI,MAAM,YAAa,kBAAiB,IAAI,MAAM,WAAW;AAAA,EAC/D;AACA,MAAI,QAAQ,UAAU;AACpB,eAAW,CAAC,EAAE,MAAM,KAAK,QAAQ,SAAS,OAAO;AAC/C,YAAM,KAAK,OAAO,eAAe,OAAO,UAAU;AAClD,UAAI,GAAI,kBAAiB,IAAI,EAAE;AAAA,IACjC;AAAA,EACF;AAEA,QAAM,WAAW,CAAC,GAAG,MAAM,GAAG,WAAW;AACzC,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,UAA4B,CAAC;AAEnC,aAAW,QAAQ,UAAU;AAC3B,QAAI,CAAC,KAAK,UAAU,KAAK,OAAO,SAAS,GAAG;AAE1C;AAAA,IACF;AACA,QAAI,KAAK,OAAO;AACd,UAAI,CAAC,sBAAsB;AACzB,+BAAuB;AACvB,gBAAQ;AAAA,UACN;AAAA,QAGF;AAAA,MACF;AACA;AAAA,IACF;AAEA,UAAM,SAAS,kBAAkB,IAAI;AAErC,UAAM,SAAS,gBAAgB,MAAM,OAAO,UAAU,EAAE;AACxD,QAAI,CAAC,KAAK,IAAI,MAAM,GAAG;AACrB,WAAK,IAAI,MAAM;AACf,cAAQ,KAAK;AAAA,QACX,iBAAiB;AAAA,QACjB,YAAY;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH;AAKA,eAAW,MAAM,kBAAkB;AACjC,YAAM,QAAQ,gBAAgB,MAAM,MAAM,EAAE,EAAE;AAC9C,UAAI,KAAK,IAAI,KAAK,EAAG;AACrB,WAAK,IAAI,KAAK;AACd,cAAQ,KAAK;AAAA,QACX,iBAAiB;AAAA,QACjB,YAAY;AAAA,QACZ;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO,EAAE,SAAS,gBAAgB,CAAC,EAAE;AACvC;;;AC5KA,uBAAiB;;;ACAjB,IAAAC,kBAA6B;AAC7B,IAAAC,oBAAqB;AAErB,IAAM,eAAe,CAAC,uBAAuB,uBAAuB,sBAAsB;AAC1F,IAAM,eAAe;AAMd,SAAS,eAAe,KAAsB;AACnD,QAAM,MAAM,OAAO,QAAQ,IAAI;AAC/B,aAAW,QAAQ,cAAc;AAC/B,QAAI;AACF,YAAM,cAAU,kCAAa,wBAAK,KAAK,IAAI,GAAG,MAAM;AACpD,YAAM,cAAc,QAAQ,MAAM,yBAAyB,IAAI,CAAC,KAAK;AACrE,YAAM,YAAY,YAAY,MAAM,kBAAkB;AACtD,UAAI,UAAW,QAAO,SAAS,UAAU,CAAC,GAAG,EAAE;AAAA,IACjD,QAAQ;AACN;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;;;ACrBO,SAAS,gBAAgB,GAA4D;AAC1F,MAAI,CAAC,EAAG,QAAO;AACf,QAAM,MAAwB,EAAE,MAAM,EAAE,OAAiB,KAAK,EAAE,KAAe;AAC/E,QAAM,OAAO,EAAE;AACf,MAAI,QAAQ,OAAO,SAAS,YAAY,OAAO,KAAK,IAAI,EAAE,SAAS,GAAG;AACpE,QAAI,OAAO;AAAA,EACb;AACA,SAAO;AACT;AAEO,SAAS,cAAc,GAA0D;AACtF,MAAI,CAAC,EAAG,QAAO;AACf,QAAM,MAAsB;AAAA,IAC1B,UAAU,EAAE;AAAA,IACZ,SAAS,EAAE;AAAA,IACX,UAAU,EAAE;AAAA,IACZ,QAAQ,EAAE;AAAA,IACV,OAAO,EAAE;AAAA,EACX;AACA,QAAM,OAAO,EAAE;AACf,MAAI,QAAQ,OAAO,SAAS,YAAY,OAAO,KAAK,IAAI,EAAE,SAAS,GAAG;AACpE,QAAI,OAAO;AAAA,EACb;AACA,SAAO;AACT;;;AFAO,IAAM,mBAAN,cAA+B,MAAM;AAAA,EAC1C,YACE,SACgB,MAChB;AACA,UAAM,OAAO;AAFG;AAGhB,SAAK,OAAO;AAAA,EACd;AACF;AAIA,SAAS,cAAc,OAAgB,MAAuC;AAC5E,MAAI,OAAO,UAAU,YAAY,MAAM,WAAW,GAAG;AACnD,UAAM,IAAI,iBAAiB,GAAG,IAAI,+BAA+B,kBAAkB;AAAA,EACrF;AACF;AAEA,SAAS,SAAS,OAA2B,KAAa,KAAa,UAA0B;AAC/F,MAAI,SAAS,KAAM,QAAO;AAC1B,MAAI,CAAC,OAAO,UAAU,KAAK,GAAG;AAC5B,UAAM,IAAI,iBAAiB,4BAA4B,kBAAkB;AAAA,EAC3E;AACA,SAAO,KAAK,IAAI,KAAK,KAAK,IAAI,KAAK,KAAK,CAAC;AAC3C;AAEA,SAAS,gBAAgB,KAA+B;AACtD,MAAI,OAAO,QAAQ,QAAQ,SAAS,QAAQ,QAAQ;AAClD,UAAM,IAAI,iBAAiB,mCAAmC,kBAAkB;AAAA,EAClF;AACF;AAIA,SAAS,QAAQ,KAA8B;AAC7C,SAAO,IAAI,QAAQ,CAACC,UAAS,WAAW;AACtC,qBAAAC,QACG,IAAI,KAAK,CAAC,QAAQ;AACjB,UAAI,OAAO;AACX,UAAI,GAAG,QAAQ,CAAC,MAAe,QAAQ,CAAE;AACzC,UAAI,GAAG,OAAO,MAAMD,SAAQ,IAAI,CAAC;AAAA,IACnC,CAAC,EACA,GAAG,SAAS,CAAC,QAAQ;AACpB,aAAO,IAAI,iBAAiB,sBAAsB,IAAI,OAAO,IAAI,mBAAmB,CAAC;AAAA,IACvF,CAAC;AAAA,EACL,CAAC;AACH;AAEA,SAAS,SAAS,KAAa,SAAkC;AAC/D,QAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,SAAO,IAAI,QAAQ,CAACA,UAAS,WAAW;AACtC,UAAM,MAAM,iBAAAC,QAAK;AAAA,MACf;AAAA,QACE,UAAU,OAAO;AAAA,QACjB,MAAM,OAAO;AAAA,QACb,MAAM,OAAO;AAAA,QACb,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,kBAAkB,OAAO,WAAW,OAAO;AAAA,QAC7C;AAAA,MACF;AAAA,MACA,CAAC,QAAQ;AACP,YAAI,OAAO;AACX,YAAI,GAAG,QAAQ,CAAC,MAAe,QAAQ,CAAE;AACzC,YAAI,GAAG,OAAO,MAAMD,SAAQ,IAAI,CAAC;AAAA,MACnC;AAAA,IACF;AACA,QAAI,GAAG,SAAS,CAAC,QAAQ;AACvB,aAAO,IAAI,iBAAiB,sBAAsB,IAAI,OAAO,IAAI,mBAAmB,CAAC;AAAA,IACvF,CAAC;AACD,QAAI,MAAM,OAAO;AACjB,QAAI,IAAI;AAAA,EACV,CAAC;AACH;AAEA,SAAS,kBAAkB,KAAa,WAA4B;AAClE,MAAI;AACJ,MAAI;AACF,aAAS,KAAK,MAAM,GAAG;AAAA,EACzB,QAAQ;AACN,UAAM,IAAI;AAAA,MACR,qBAAqB,SAAS,KAAK,IAAI,MAAM,GAAG,GAAG,CAAC;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AACA,MAAI,OAAO,OAAO;AAChB,UAAM,MACJ,OAAO,OAAO,UAAU,YAAY,OAAO,UAAU,OAC/C,OAAO,MAAkC,WAAW,KAAK,UAAU,OAAO,KAAK,IACjF,OAAO,OAAO,KAAK;AACzB,UAAM,IAAI,iBAAiB,qBAAqB,SAAS,KAAK,GAAG,IAAI,cAAc;AAAA,EACrF;AACA,SAAQ,OAAO,QAAoC,QAAQ;AAC7D;AAIO,IAAM,cAAN,MAAkB;AAAA,EACN;AAAA,EAEjB,YAAY,SAA8B;AACxC,UAAM,OAAO,SAAS,QAAQ;AAC9B,UAAM,OAAO,SAAS,QAAQ,eAAe;AAC7C,SAAK,UAAU,UAAU,IAAI,IAAI,IAAI;AAAA,EACvC;AAAA,EAEA,MAAc,MAAM,WAAmB,OAAmC;AACxE,UAAM,KAAK,SAAS,OAAO,UAAU,mBAAmB,KAAK,UAAU,KAAK,CAAC,CAAC,KAAK;AACnF,UAAM,MAAM,GAAG,KAAK,OAAO,IAAI,SAAS,GAAG,EAAE;AAC7C,UAAM,MAAM,MAAM,QAAQ,GAAG;AAC7B,WAAO,kBAAkB,KAAK,SAAS;AAAA,EACzC;AAAA,EAEA,MAAc,OAAO,WAAmB,OAAkC;AACxE,UAAM,MAAM,GAAG,KAAK,OAAO,IAAI,SAAS;AACxC,UAAM,MAAM,MAAM,SAAS,KAAK,KAAK,UAAU,KAAK,CAAC;AACrD,WAAO,kBAAkB,KAAK,SAAS;AAAA,EACzC;AAAA;AAAA,EAIA,MAAM,YAAmC;AACvC,UAAM,OAAQ,MAAM,KAAK,MAAM,WAAW;AAC1C,WAAO;AAAA,MACL,YAAa,KAAK,aAA2B,CAAC,GAAG;AAAA,QAC/C,CAAC,MACE,OAAO,MAAM,YAAY,MAAM,OAAQ,EAA8B,OAAO;AAAA,MACjF;AAAA,MACA,YAAa,KAAK,aAA2B,CAAC,GAAG,IAAI,CAAC,MAAM;AAC1D,cAAM,IAAI;AACV,eAAO;AAAA,UACL,UAAU,EAAE;AAAA,UACZ,MAAM,EAAE;AAAA,UACR,IAAI,EAAE;AAAA,UACN,cAAe,EAAE,gBAA2B;AAAA,QAC9C;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,MAAM,cAAc,OAAsD;AACxE,kBAAc,MAAM,KAAK,KAAK;AAC9B,UAAM,OAAQ,MAAM,KAAK,MAAM,iBAAiB,EAAE,KAAK,MAAM,IAAI,CAAC;AAClE,WAAO;AAAA,MACL,MAAM,gBAAgB,KAAK,IAAsC;AAAA,MACjE,WAAY,KAAK,YAA0C,CAAC,GACzD,IAAI,aAAa,EACjB,OAAO,OAAO;AAAA,MACjB,UAAW,KAAK,WAAyC,CAAC,GACvD,IAAI,aAAa,EACjB,OAAO,OAAO;AAAA,IACnB;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,OAA+C;AAC5D,UAAM,QAAQ,SAAS,MAAM,OAAO,GAAG,KAAK,EAAE;AAC9C,oBAAgB,MAAM,OAAO;AAC7B,UAAM,OAAQ,MAAM,KAAK,MAAM,YAAY;AAAA,MACzC,MAAM,MAAM;AAAA,MACZ;AAAA,MACA,YAAY,MAAM;AAAA,MAClB,QAAQ,MAAM;AAAA,MACd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IACf,CAAC;AACD,WAAO;AAAA,MACL,QAAS,KAAK,SAAuC,CAAC,GACnD,IAAI,eAAe,EACnB,OAAO,OAAO;AAAA,MACjB,SAAU,KAAK,WAAuB;AAAA,MACtC,YAAY,KAAK;AAAA,IACnB;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,OAA+C;AAC5D,UAAM,YACJ,MAAM,SACN,MAAM,QACN,MAAM,WACN,MAAM,SACN,MAAM,QACL,MAAM,SAAS,MAAM,MAAM,SAAS;AACvC,QAAI,CAAC,WAAW;AACd,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,MACF;AAAA,IACF;AACA,UAAM,QAAQ,SAAS,MAAM,OAAO,GAAG,KAAK,EAAE;AAC9C,oBAAgB,MAAM,OAAO;AAC7B,UAAM,OAAQ,MAAM,KAAK,MAAM,YAAY;AAAA,MACzC,OAAO,MAAM;AAAA,MACb,MAAM,MAAM;AAAA,MACZ,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,MACb,MAAM,MAAM;AAAA,MACZ;AAAA,MACA,YAAY,MAAM;AAAA,MAClB,QAAQ,MAAM;AAAA,MACd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IACf,CAAC;AACD,WAAO;AAAA,MACL,QAAS,KAAK,SAAuC,CAAC,GACnD,IAAI,aAAa,EACjB,OAAO,OAAO;AAAA,MACjB,SAAU,KAAK,WAAuB;AAAA,MACtC,YAAY,KAAK;AAAA,IACnB;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,OAA+C;AAC5D,kBAAc,MAAM,UAAU,UAAU;AACxC,QAAI,CAAC,MAAM,QAAQ,MAAM,KAAK,WAAW,GAAG;AAC1C,YAAM,IAAI,iBAAiB,sCAAsC,kBAAkB;AAAA,IACrF;AACA,aAAS,IAAI,GAAG,IAAI,MAAM,KAAK,QAAQ,KAAK;AAC1C,YAAM,MAAM,MAAM,KAAK,CAAC;AACxB,oBAAc,IAAI,SAAS,QAAQ,CAAC,WAAW;AAC/C,UAAI,IAAI,aAAa,QAAQ,IAAI,cAAc,aAAa,IAAI,cAAc,WAAW;AACvF,cAAM,IAAI;AAAA,UACR,QAAQ,CAAC;AAAA,UACT;AAAA,QACF;AAAA,MACF;AACA,UAAI,IAAI,SAAS,SAAS,CAAC,OAAO,UAAU,IAAI,KAAK,KAAK,IAAI,QAAQ,IAAI;AACxE,cAAM,IAAI;AAAA,UACR,QAAQ,CAAC;AAAA,UACT;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,QAAI,MAAM,YAAY,SAAS,CAAC,OAAO,UAAU,MAAM,QAAQ,KAAK,MAAM,WAAW,IAAI;AACvF,YAAM,IAAI,iBAAiB,uCAAuC,kBAAkB;AAAA,IACtF;AACA,QACE,MAAM,eAAe,SACpB,CAAC,OAAO,UAAU,MAAM,WAAW,KAAK,MAAM,cAAc,IAC7D;AACA,YAAM,IAAI,iBAAiB,0CAA0C,kBAAkB;AAAA,IACzF;AAEA,UAAM,OAAQ,MAAM,KAAK,OAAO,YAAY,KAAK;AACjD,WAAO;AAAA,MACL,OAAQ,KAAK,QAAsC,CAAC,GAAG;AAAA,QACrD,CAAC,OAA0B;AAAA,UACzB,UAAU,EAAE;AAAA,UACZ,WAAW,EAAE;AAAA,UACb,OAAO,EAAE;AAAA,UACT,YAAa,EAAE,SAAuB,CAAC,GAAG;AAAA,UAC1C,QAAS,EAAE,SAAuC,CAAC,GAChD,IAAI,aAAa,EACjB,OAAO,OAAO;AAAA,UACjB,WAAY,EAAE,aAAyB;AAAA,QACzC;AAAA,MACF;AAAA,MACA,YAAa,KAAK,cAAyB;AAAA,MAC3C,WAAY,KAAK,aAAyB;AAAA,IAC5C;AAAA,EACF;AAAA,EAEA,MAAM,OAAO,OAA2C;AACtD,kBAAc,MAAM,GAAG,GAAG;AAC1B,UAAM,QAAQ,SAAS,MAAM,OAAO,GAAG,IAAI,EAAE;AAC7C,UAAM,OAAQ,MAAM,KAAK,MAAM,UAAU,EAAE,GAAG,MAAM,GAAG,MAAM,CAAC;AAC9D,WAAO;AAAA,MACL,UAAW,KAAK,WAAyC,CAAC,GACvD,IAAI,CAAC,MAAM;AACV,cAAM,OAAO,gBAAgB,CAAC;AAC9B,YAAI,CAAC,KAAM,QAAO;AAClB,eAAO;AAAA,UACL,GAAG;AAAA,UACH,WAAY,EAAE,cAAyB;AAAA,QACzC;AAAA,MACF,CAAC,EACA,OAAO,OAAO;AAAA,IACnB;AAAA,EACF;AACF;;;AG/PO,SAAS,kBAAkB,OAAsC;AACtE,MAAI,UAAU,GAAI,QAAO,CAAC;AAC1B,QAAM,QAAQ,MAAM,MAAM,GAAG;AAC7B,MAAI,MAAM,SAAS,MAAM,GAAG;AAC1B,UAAM,IAAI;AAAA,MACR,sCAAsC,KAAK;AAAA,IAE7C;AAAA,EACF;AACA,QAAM,MAA6B,CAAC;AACpC,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,GAAG;AACxC,UAAM,MAAM,MAAM,CAAC;AACnB,UAAM,OAAO,MAAM,IAAI,CAAC;AACxB,QAAI,CAAC,OAAO,CAAC,MAAM;AACjB,YAAM,IAAI;AAAA,QACR,sCAAsC,KAAK,2CAA2C,CAAC;AAAA,MACzF;AAAA,IACF;AACA,QAAI,KAAK,EAAE,KAAK,KAAK,CAAC;AAAA,EACxB;AACA,SAAO;AACT;AAsBO,SAAS,qBAAqB,cAAsB,KAA4B;AACrF,MAAI,CAAC,IAAK,QAAO;AACjB,MAAI,iBAAiB,GAAI,QAAO;AAChC,QAAM,QAAQ,aAAa,MAAM,GAAG;AAEpC,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,GAAG;AACxC,QAAI,MAAM,CAAC,MAAM,KAAK;AACpB,aAAO,MAAM,IAAI,KAAK,MAAM,MAAM,GAAG,CAAC,EAAE,KAAK,GAAG;AAAA,IAClD;AAAA,EACF;AACA,SAAO;AACT;AAKO,SAAS,mBAAmB,cAAsB,KAAsB;AAC7E,SAAO,qBAAqB,cAAc,GAAG,MAAM;AACrD;AASO,SAAS,mBAAmB,aAAqB,KAAa,MAAsB;AACzF,MAAI,CAAC,OAAO,IAAI,SAAS,GAAG,GAAG;AAC7B,UAAM,IAAI;AAAA,MACR,4EAA4E,GAAG;AAAA,IACjF;AAAA,EACF;AACA,MAAI,CAAC,QAAQ,KAAK,SAAS,GAAG,GAAG;AAC/B,UAAM,IAAI;AAAA,MACR,6EAA6E,IAAI;AAAA,IACnF;AAAA,EACF;AACA,SAAO,cAAc,GAAG,WAAW,IAAI,GAAG,IAAI,IAAI,KAAK,GAAG,GAAG,IAAI,IAAI;AACvE;;;A1BnDA;;;A2BnCO,IAAM,qBAAqB;AAuC3B,SAAS,uBACd,QACA,MACgB;AAChB,QAAM,WAAW,MAAM,YAAY;AACnC,QAAM,WAAW,MAAM,YAAY,OAAO;AAE1C,MAAI,CAAC,MAAM,QAAQ,OAAO,IAAI,KAAK,OAAO,KAAK,WAAW,GAAG;AAC3D,WAAO,EAAE,UAAU,OAAO,QAAQ,6CAA6C;AAAA,EACjF;AACA,MAAI,OAAO,KAAK,SAAS,UAAU;AACjC,WAAO;AAAA,MACL,UAAU;AAAA,MACV,QAAQ,0BAA0B,OAAO,KAAK,MAAM,gCAAgC,QAAQ;AAAA,IAC9F;AAAA,EACF;AACA,MAAI,CAAC,MAAM,QAAQ,OAAO,OAAO,GAAG;AAClC,WAAO,EAAE,UAAU,OAAO,QAAQ,4CAA4C;AAAA,EAChF;AAEA,QAAM,iBAAoD,CAAC;AAC3D,WAAS,IAAI,GAAG,IAAI,OAAO,KAAK,QAAQ,KAAK;AAC3C,UAAM,MAAM,OAAO,KAAK,CAAC;AACzB,QAAI,CAAC,IAAI,WAAW,IAAI,QAAQ,WAAW,GAAG;AAC5C,aAAO;AAAA,QACL,UAAU;AAAA,QACV,QAAQ,wBAAwB,CAAC;AAAA,MACnC;AAAA,IACF;AACA,QACE,OAAO,IAAI,mBAAmB,YAC9B,IAAI,kBAAkB,KACtB,CAAC,OAAO,SAAS,IAAI,cAAc,GACnC;AACA,aAAO;AAAA,QACL,UAAU;AAAA,QACV,QAAQ,wBAAwB,CAAC,KAAK,IAAI,OAAO;AAAA,MACnD;AAAA,IACF;AACA,mBAAe,KAAK;AAAA,MAClB,GAAG;AAAA,MACH,SAAS,IAAI;AAAA,MACb,WAAW,IAAI,aAAa;AAAA,MAC5B,gBAAgB,IAAI;AAAA,IACtB,CAAC;AAAA,EACH;AAOA,MAAI,iBAAiB,KAAK,IAAI,GAAG,OAAO,QAAQ,MAAM;AACtD,aAAW,OAAO,gBAAgB;AAChC,sBAAkB,IAAI;AACtB,QAAI,iBAAiB,OAAO,kBAAkB;AAC5C,uBAAiB,OAAO;AACxB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,aAAa,UAAa,iBAAiB,UAAU;AACvD,WAAO;AAAA,MACL,UAAU;AAAA,MACV,QAAQ,6CAA6C,cAAc,4BAA4B,QAAQ;AAAA,IACzG;AAAA,EACF;AAEA,SAAO;AAAA,IACL,UAAU;AAAA,IACV,YAAY;AAAA,MACV,SAAS,OAAO;AAAA,MAChB,MAAM;AAAA,MACN;AAAA,IACF;AAAA,EACF;AACF;;;AC9IA,IAAM,gBAAgB;AACtB,IAAM,oBAAoB;AAC1B,IAAM,sBAAsB;AAG5B,IAAI,oBAAoB;AAGxB,SAAS,cAAc,QAA4C;AACjE,SAAO,cAAc,UAAU,OAAQ,OAAuB,aAAa;AAC7E;AAiBA,SAAS,qBAAqB,QAE5B;AACA,MAAI,CAAC,cAAc,MAAM,EAAG,QAAO;AACnC,QAAM,SAAS;AAOf,SACE,kBAAkB,UAClB,OAAO,OAAO,cAAc,QAAQ,cACpC,OAAO,aAAa,IAAI,YAAY,KACpC,OAAQ,OAAgC,WAAW;AAEvD;AAaA,SAAS,8BAA8B,QAErC;AACA,MAAI,CAAC,cAAc,MAAM,EAAG,QAAO;AACnC,QAAM,SAAS;AACf,SACE,kBAAkB,UAClB,OAAO,OAAO,cAAc,QAAQ,cACpC,OAAO,aAAa,IAAI,sBAAsB,KAC9C,OAAQ,OAA4C,uBAAuB;AAE/E;AAEA,IAAM,YAAN,MAAgB;AAAA,EAId,YAA6B,OAAe;AAAf;AAAA,EAAgB;AAAA,EAHrC,QAA2B,CAAC;AAAA,EAC5B,SAAS;AAAA,EAIjB,MAAM,UAAyB;AAC7B,QAAI,KAAK,SAAS,KAAK,OAAO;AAC5B,WAAK;AACL;AAAA,IACF;AACA,WAAO,IAAI,QAAc,CAACE,aAAY;AACpC,WAAK,MAAM,KAAKA,QAAO;AAAA,IACzB,CAAC;AAAA,EACH;AAAA,EAEA,UAAgB;AACd,SAAK;AACL,UAAM,OAAO,KAAK,MAAM,MAAM;AAC9B,QAAI,MAAM;AACR,WAAK;AACL,WAAK;AAAA,IACP;AAAA,EACF;AACF;AAEA,IAAM,uBAAN,MAAuD;AAAA,EAGrD,YACmB,QACA,UACA,UACjB;AAHiB;AACA;AACA;AAAA,EAChB;AAAA,EANc,OAAwB,CAAC;AAAA,EAQ1C,OAAO,SAAiB,SAA4D;AAClF,SAAK,KAAK,KAAK,EAAE,SAAS,GAAG,QAAQ,CAAC;AACtC,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,IAAI,SAAsD;AAC9D,QAAI,KAAK,KAAK,WAAW,GAAG;AAC1B,YAAM,IAAI,eAAe,8CAA8C;AAAA,IACzE;AAEA,UAAM,WAAW,SAAS,YAAY;AACtC,UAAM,cAAc,SAAS,eAAe;AAC5C,UAAM,sBAAsB,SAAS,uBAAuB;AAC5D,UAAM,aAAa,SAAS,mBAAmB;AAC/C,UAAM,YAAY,IAAI,UAAU,WAAW;AAe3C,QAAI,eAAe,OAAO;AACxB,YAAM,eAAe,MAAM,KAAK,mBAAmB;AAAA,QACjD;AAAA,QACA;AAAA,MACF,CAAC;AACD,UAAI,aAAc,QAAO;AAAA,IAC3B;AAEA,QAAI,aAAa;AACjB,QAAI,YAAY;AAIhB,QAAI,UAAuD;AAAA,MACzD,EAAE,KAAK,KAAK,UAAU,QAAQ,KAAK,OAAO;AAAA,IAC5C;AACA,UAAM,aAA0B,CAAC;AAEjC,aAAS,QAAQ,GAAG,QAAQ,KAAK,KAAK,QAAQ,SAAS;AACrD,YAAM,MAAM,KAAK,KAAK,KAAK;AAE3B,UAAI,QAAQ,WAAW,GAAG;AACxB,mBAAW,KAAK;AAAA,UACd,SAAS,IAAI;AAAA,UACb;AAAA,UACA,OAAO,CAAC;AAAA,UACR,aAAa;AAAA,UACb,WAAW;AAAA,QACb,CAAC;AACD;AAAA,MACF;AAEA,YAAM,WAAoE,CAAC;AAC3E,YAAM,cAAc,QAAQ;AAC5B,UAAI,eAAe;AAKnB,YAAM,sBAAsB,KAAK,mBAAmB,GAAG;AACvD,YAAM,YAAY,IAAI,aAAa;AACnC,YAAM,eAAe,cAAc,aAAa,CAAC,CAAC;AAuBlD,YAAM,eAAe,QAAQ,MAAM,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,EAAE,MAAM,IACpE,QAAQ,CAAC,EAAE,SACX;AACJ,YAAM,cAAc,CAAC,gBAAgB,gBAAgB,qBAAqB,YAAY;AAEtF,UAAI,eAAe,cAAc;AAC/B,YAAI,cAAc,UAAU;AAC1B,yBAAe;AAAA,QACjB,OAAO;AACL;AACA,gBAAM,QAAQ,IAAI,SAAS;AAC3B,gBAAM,eAA6B;AAAA,YACjC,SAAS,QAAQ,IAAI,CAAC,MAAM,EAAE,GAAG;AAAA,YACjC,SAAS,IAAI;AAAA,YACb;AAAA,UACF;AACA,cAAI,IAAI,MAAO,cAAa,QAAQ,IAAI;AACxC,cAAI,IAAI,MAAO,cAAa,QAAQ,IAAI;AACxC,cAAI,IAAI,QAAS,cAAa,UAAU,IAAI;AAK5C,cAAI,CAAC,IAAI,QAAQ;AACf,yBAAa,iBAAiB;AAAA,UAChC;AACA,gBAAM,SAAS,MACb,aAGA,OAAO,YAAY;AACrB,cAAIC,SAAQ,OAAO;AACnB,cAAI,IAAI,QAAQ;AACd,YAAAA,SAAQA,OAAM,OAAO,IAAI,MAAM;AAI/B,kBAAM,SAAS,oBAAI,IAAoB;AACvC,kBAAM,OAA4B,CAAC;AACnC,uBAAW,KAAKA,QAAO;AACrB,oBAAM,YAAY,cAAc,YAAY,EAAE,OAAO,EAAE;AACvD,oBAAM,IAAI,OAAO,IAAI,SAAS,KAAK;AACnC,kBAAI,IAAI,OAAO;AACb,uBAAO,IAAI,WAAW,IAAI,CAAC;AAC3B,qBAAK,KAAK,CAAC;AAAA,cACb;AAAA,YACF;AACA,YAAAA,SAAQ;AAAA,UACV;AACA,qBAAW,QAAQA,QAAO;AACxB,qBAAS,KAAK,EAAE,MAAM,QAAQ,aAAa,CAAC;AAAA,UAC9C;AAAA,QACF;AAGA,cAAM,YAAY,SAAS,IAAI,CAAC,MAAM,EAAE,IAAI;AAC5C,mBAAW,KAAK;AAAA,UACd,SAAS,IAAI;AAAA,UACb;AAAA,UACA,OAAO,sBAAsB,CAAC,GAAG,SAAS,IAAI;AAAA,UAC9C;AAAA,UACA,WAAW;AAAA,QACb,CAAC;AACD,YAAI,aAAc,aAAY;AAG9B,cAAMC,QAAO,oBAAI,IAAyB;AAC1C,mBAAW,EAAE,MAAM,QAAQ,WAAW,KAAK,UAAU;AACnD,gBAAM,UAAU,cAAc,YAAY,KAAK,OAAO,KAAK;AAC3D,cAAI,CAACA,MAAK,IAAI,OAAO,EAAG,CAAAA,MAAK,IAAI,SAAS,UAAU;AAAA,QACtD;AACA,kBAAU,CAAC,GAAGA,MAAK,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC,KAAK,MAAM,OAAO,EAAE,KAAK,OAAO,EAAE;AACtE;AAAA,MACF;AAIA,YAAM,QAAQ,QAAQ,IAAI,CAAC,EAAE,KAAK,QAAQ,aAAa,MAAM,YAAY;AACvE,YAAI,cAAc,UAAU;AAC1B,yBAAe;AACf;AAAA,QACF;AAEA,cAAM,UAAU,QAAQ;AACxB,YAAI;AACF,cAAI,cAAc,UAAU;AAC1B,2BAAe;AACf;AAAA,UACF;AAEA;AAEA,gBAAM,SAA0B,EAAE,SAAS,IAAI,QAAQ;AAEvD,cAAI,cAAc,WAAW;AAC3B,mBAAO,OAAO;AACd,gBAAI,IAAI,MAAO,QAAO,QAAQ,IAAI;AAAA,UACpC,OAAO;AACL,mBAAO,OAAO;AACd,gBAAI,IAAI,MAAO,QAAO,QAAQ,IAAI;AAAA,UACpC;AAEA,cAAI,cAAc,aAAa,IAAI,OAAO;AACxC,mBAAO,QAAQ,IAAI;AAAA,UACrB;AACA,cAAI,cAAc,aAAa,IAAI,OAAO;AACxC,mBAAO,QAAQ,IAAI;AAAA,UACrB;AAEA,cAAI,IAAI,QAAS,QAAO,UAAU,IAAI;AAEtC,gBAAM,QAAQ,IAAI,SAAS;AAC3B,cAAI,IAAI,QAAQ;AACd,mBAAO,QAAQ;AAAA,UACjB,OAAO;AACL,mBAAO,QAAQ;AAAA,UACjB;AAOA,cAAI;AACJ,cAAI;AACJ,cAAI,cAAc;AAChB,gBAAI,cAAc,KAAK,MAAM,GAAG;AAC9B,0BAAY,KAAK,OAAO,SAAS,KAAK,mBAAoB;AAC1D,2BAAa;AAAA,YACf,OAAO;AACL,0BAAY;AACZ,2BAAa;AACb,kBAAI,CAAC,mBAAmB;AACtB,oCAAoB;AACpB,wBAAQ;AAAA,kBACN,8BAA8B,IAAI,OAAO,sBAAsB,mBAAmB;AAAA,gBAGpF;AAAA,cACF;AAAA,YACF;AAAA,UACF,OAAO;AAEL,wBAAY;AACZ,yBAAa;AAAA,UACf;AAEA,cAAID,SAAQ,MAAM,UAAU,UAAU,MAAM;AAE5C,cAAI,IAAI,QAAQ;AACd,YAAAA,SAAQA,OAAM,OAAO,IAAI,MAAM;AAC/B,YAAAA,SAAQA,OAAM,MAAM,GAAG,KAAK;AAAA,UAC9B;AAEA,qBAAW,QAAQA,QAAO;AACxB,qBAAS,KAAK,EAAE,MAAM,QAAQ,WAAW,CAAC;AAAA,UAC5C;AAAA,QACF,UAAE;AACA,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF,CAAC;AAED,YAAM,QAAQ,IAAI,MAAM,IAAI,CAAC,SAAS,KAAK,CAAC,CAAC;AAE7C,YAAM,QAAQ,SAAS,IAAI,CAAC,MAAM,EAAE,IAAI;AAExC,iBAAW,KAAK;AAAA,QACd,SAAS,IAAI;AAAA,QACb;AAAA,QACA,OAAO,sBAAsB,CAAC,GAAG,KAAK,IAAI;AAAA,QAC1C;AAAA,QACA,WAAW;AAAA,MACb,CAAC;AAED,UAAI,cAAc;AAChB,oBAAY;AAAA,MACd;AAIA,YAAM,OAAO,oBAAI,IAAyB;AAC1C,iBAAW,EAAE,MAAM,QAAQ,WAAW,KAAK,UAAU;AACnD,cAAM,UAAU,cAAc,YAAY,KAAK,OAAO,KAAK;AAC3D,YAAI,CAAC,KAAK,IAAI,OAAO,GAAG;AACtB,eAAK,IAAI,SAAS,UAAU;AAAA,QAC9B;AAAA,MACF;AACA,gBAAU,CAAC,GAAG,KAAK,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC,KAAK,MAAM,OAAO,EAAE,KAAK,OAAO,EAAE;AAAA,IACxE;AAEA,UAAM,UAAU,WAAW,WAAW,SAAS,CAAC;AAEhD,WAAO;AAAA,MACL,OAAO,QAAQ;AAAA,MACf,MAAM;AAAA,MACN;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAc,mBAAmB,MAGQ;AACvC,UAAM,EAAE,YAAY,oBAAoB,IAAI;AAE5C,UAAM,SAAS,CAAC,WAAgD;AAC9D,UAAI,eAAe,SAAS;AAC1B,cAAM,IAAI,eAAe,gCAAgC,MAAM,IAAI,uBAAuB;AAAA,MAC5F;AACA,aAAO;AAAA,IACT;AAEA,QAAI,CAAC,8BAA8B,KAAK,MAAM,GAAG;AAC/C,aAAO,OAAO,yDAAyD;AAAA,IACzE;AACA,UAAM,SAAS,KAAK;AAKpB,UAAM,aAA8B,CAAC;AACrC,aAAS,IAAI,GAAG,IAAI,KAAK,KAAK,QAAQ,KAAK;AACzC,YAAM,MAAM,KAAK,KAAK,CAAC;AACvB,UAAI,IAAI,QAAQ;AACd,eAAO,OAAO,OAAO,CAAC,KAAK,IAAI,OAAO,gCAAgC;AAAA,MACxE;AACA,YAAM,cAAc,KAAK,mBAAmB,GAAG;AAC/C,YAAM,YAAY,IAAI,aAAa;AACnC,UAAI,aAAa;AACf,eAAO,OAAO,OAAO,CAAC,KAAK,IAAI,OAAO,iCAAiC,WAAW,GAAG;AAAA,MACvF;AACA,YAAM,QAAQ,IAAI,SAAS;AAC3B,YAAM,YAA2B;AAAA,QAC/B,SAAS,IAAI;AAAA,QACb;AAAA,QACA,gBAAgB;AAAA,MAClB;AACA,UAAI,IAAI,MAAO,WAAU,QAAQ,IAAI;AACrC,UAAI,IAAI,MAAO,WAAU,QAAQ,IAAI;AACrC,UAAI,IAAI,QAAS,WAAU,UAAU,IAAI;AACzC,iBAAW,KAAK,SAAS;AAAA,IAC3B;AAEA,UAAM,SAAgC;AAAA,MACpC,SAAS,CAAC,KAAK,QAAQ;AAAA,MACvB,MAAM;AAAA,IACR;AAMA,UAAM,WAAW,uBAAuB,MAAM;AAC9C,QAAI,CAAC,SAAS,UAAU;AACtB,aAAO,OAAO,SAAS,MAAM;AAAA,IAC/B;AAEA,QAAI;AACJ,QAAI;AACF,qBAAe,MAAM,OAAO,mBAAmB,MAAM;AAAA,IACvD,SAAS,KAAK;AACZ,UAAI,eAAe,QAAS,OAAM;AAClC,aAAO;AAAA,IACT;AAQA,UAAM,aAA0B,CAAC;AACjC,aAAS,IAAI,GAAG,IAAI,KAAK,KAAK,QAAQ,KAAK;AACzC,YAAM,aAAa,KAAK,KAAK,CAAC;AAC9B,YAAM,kBAAkB,aAAa,KAAK,CAAC,KAAK,EAAE,OAAO,CAAC,GAAG,aAAa,EAAE;AAC5E,YAAM,QAAQ,gBAAgB;AAC9B,YAAM,eAAe,MAAM,UAAU,WAAW,CAAC,EAAE;AACnD,iBAAW,KAAK;AAAA,QACd,SAAS,WAAW;AAAA,QACpB,OAAO;AAAA,QACP,OAAO,sBAAsB,CAAC,GAAG,KAAK,IAAI;AAAA,QAC1C,aAAa,gBAAgB;AAAA,QAC7B,WAAW;AAAA,MACb,CAAC;AAAA,IACH;AAEA,UAAM,UAAU,WAAW,WAAW,SAAS,CAAC;AAChD,WAAO;AAAA,MACL,OAAO,QAAQ;AAAA,MACf,MAAM;AAAA;AAAA;AAAA;AAAA,MAIN,YAAY;AAAA,MACZ,WAAW,WAAW,KAAK,CAAC,MAAM,EAAE,SAAS;AAAA,IAC/C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,mBAAmB,KAAwC;AACjE,QAAI,IAAI,YAAa,QAAO,IAAI;AAEhC,QAAI,KAAK,UAAU;AACjB,YAAM,UAAU,KAAK,SAAS,gBAAgB,IAAI,OAAO;AAEzD,iBAAW,SAAS,SAAS;AAC3B,YAAI,MAAM,YAAa,QAAO,MAAM;AAAA,MACtC;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;AAkBO,SAAS,gBACd,QACA,UACA,UACkB;AAClB,SAAO,IAAI,qBAAqB,QAAQ,UAAU,QAAQ;AAC5D;;;ACxdA,SAAS,gBAAgB,GAAmB;AAC1C,SAAO,EACJ,YAAY,EACZ,QAAQ,cAAc,GAAG,EACzB,QAAQ,OAAO,GAAG,EAClB,QAAQ,UAAU,EAAE;AACzB;AAYA,SAAS,oBAAsD;AAC7D,QAAM,IAAI;AACV,MAAI,EAAE,kBAAkB,OAAO,EAAE,eAAe,WAAW,YAAY;AACrE,WAAO,EAAE;AAAA,EACX;AACA,SAAO;AACT;AAOA,SAAS,cAAc,WAA+B,SAAqC;AACzF,QAAM,IAAI;AACV,MAAI,CAAC,EAAE,YAAa,QAAO;AAE3B,QAAM,UAAU,cAAe,UAAqD;AAAA,IAClF,oBAAoB;AAClB,UAAI;AACF,cAAM,oBAAoB;AAAA,MAC5B,SAAS,KAAK;AACZ,gBAAQ,KAAK,gBAAgB,OAAO,8BAA8B,GAAG;AACrE,aAAK,WAAW,GAAG;AAAA,MACrB;AAAA,IACF;AAAA,IAEA,uBAAuB;AACrB,UAAI;AACF,cAAM,uBAAuB;AAAA,MAC/B,SAAS,KAAK;AACZ,gBAAQ,KAAK,gBAAgB,OAAO,iCAAiC,GAAG;AAAA,MAC1E;AAAA,IACF;AAAA,IAEA,IAAI,KAAK,GAA4B;AACnC,UAAI;AACF,cAAM,OAAO;AAAA,MACf,SAAS,KAAK;AACZ,gBAAQ,KAAK,gBAAgB,OAAO,wBAAwB,GAAG;AAC/D,aAAK,WAAW,GAAG;AAAA,MACrB;AAAA,IACF;AAAA,IAEA,IAAI,OAAgC;AAClC,UAAI;AACF,eAAO,MAAM;AAAA,MACf,QAAQ;AACN,eAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,IAEA,WAAW,KAAc;AACvB,UAAI;AACF,aAAK,YACH,kGACqB,OAAO,SAAS,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACzF,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAGA,EAAC,QAA0C,WAAW,UAAU;AAChE,EAAC,QAA0C,cAAc,UAAU;AAEnE,SAAO;AACT;AAaO,SAAS,YAAY,OAAwC;AAClE,QAAM,QAAwC,CAAC;AAC/C,QAAM,QAAwC,CAAC;AAC/C,QAAM,WAAW,kBAAkB;AAGnC,aAAW,CAAC,YAAY,MAAM,KAAK,OAAO,QAAQ,MAAM,SAAS,CAAC,CAAC,GAAG;AACpE,UAAM,YAAwB,CAAC;AAC/B,eAAW,aAAa,OAAO,OAAO;AACpC,YAAM,UAAU,MAAM,gBAAgB,UAAU,CAAC,IAAI,gBAAgB,UAAU,QAAQ,CAAC;AACxF,gBAAU,KAAK;AAAA,QACb;AAAA,QACA,UAAU,UAAU;AAAA,QACpB,aAAa,UAAU;AAAA,MACzB,CAAC;AACD,UAAI,YAAY,CAAC,SAAS,IAAI,OAAO,GAAG;AACtC,iBAAS,OAAO,SAAS,cAAc,WAAW,OAAO,CAAC;AAAA,MAC5D;AAAA,IACF;AACA,UAAM,UAAU,IAAI;AAAA,MAClB,OAAO;AAAA,MACP,YAAY,OAAO;AAAA,IACrB;AAAA,EACF;AAGA,aAAW,CAAC,SAAS,MAAM,KAAK,OAAO,QAAQ,MAAM,SAAS,CAAC,CAAC,GAAG;AACjE,UAAM,YAAwB,CAAC;AAC/B,eAAW,aAAa,OAAO,OAAO;AACpC,YAAM,UAAU,WAAW,gBAAgB,OAAO,CAAC,IAAI,gBAAgB,UAAU,QAAQ,CAAC;AAC1F,gBAAU,KAAK;AAAA,QACb;AAAA,QACA,UAAU,UAAU;AAAA,QACpB,aAAa,UAAU;AAAA,MACzB,CAAC;AACD,UAAI,YAAY,CAAC,SAAS,IAAI,OAAO,GAAG;AACtC,iBAAS,OAAO,SAAS,cAAc,WAAW,OAAO,CAAC;AAAA,MAC5D;AAAA,IACF;AACA,UAAM,OAAO,IAAI;AAAA,MACf,OAAO;AAAA,MACP,YAAY,OAAO;AAAA,IACrB;AAAA,EACF;AAEA,SAAO,EAAE,OAAO,MAAM;AACxB;","names":["import_node_crypto","import_node_crypto","resolve","serializeFirestoreTypes","deserializeFirestoreTypes","buildWritableNodeRecord","buildWritableEdgeRecord","buildWritableNodeRecord","buildWritableEdgeRecord","result","import_meta","import_node_fs","import_node_path","resolve","http","resolve","edges","seen"]}
|