@rocicorp/zero 1.2.0-canary.3 → 1.2.0-canary.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. package/out/ast-to-zql/src/format.d.ts.map +1 -1
  2. package/out/ast-to-zql/src/format.js +6 -6
  3. package/out/ast-to-zql/src/format.js.map +1 -1
  4. package/out/shared/src/bigint-json.d.ts.map +1 -1
  5. package/out/shared/src/bigint-json.js.map +1 -1
  6. package/out/shared/src/btree-set.d.ts.map +1 -1
  7. package/out/shared/src/btree-set.js +73 -41
  8. package/out/shared/src/btree-set.js.map +1 -1
  9. package/out/z2s/src/sql.js.map +1 -1
  10. package/out/zero/package.js +4 -5
  11. package/out/zero/package.js.map +1 -1
  12. package/out/zero-cache/src/auth/load-permissions.d.ts +2 -2
  13. package/out/zero-cache/src/auth/load-permissions.d.ts.map +1 -1
  14. package/out/zero-cache/src/auth/load-permissions.js.map +1 -1
  15. package/out/zero-cache/src/db/run-transaction.d.ts.map +1 -1
  16. package/out/zero-cache/src/db/run-transaction.js +2 -2
  17. package/out/zero-cache/src/db/run-transaction.js.map +1 -1
  18. package/out/zero-cache/src/db/transaction-pool.d.ts.map +1 -1
  19. package/out/zero-cache/src/db/transaction-pool.js.map +1 -1
  20. package/out/zero-cache/src/observability/metrics.d.ts +1 -1
  21. package/out/zero-cache/src/observability/metrics.d.ts.map +1 -1
  22. package/out/zero-cache/src/observability/metrics.js.map +1 -1
  23. package/out/zero-cache/src/server/anonymous-otel-start.d.ts.map +1 -1
  24. package/out/zero-cache/src/server/anonymous-otel-start.js +6 -1
  25. package/out/zero-cache/src/server/anonymous-otel-start.js.map +1 -1
  26. package/out/zero-cache/src/server/change-streamer.d.ts.map +1 -1
  27. package/out/zero-cache/src/server/change-streamer.js +3 -1
  28. package/out/zero-cache/src/server/change-streamer.js.map +1 -1
  29. package/out/zero-cache/src/server/logging.d.ts.map +1 -1
  30. package/out/zero-cache/src/server/logging.js +9 -1
  31. package/out/zero-cache/src/server/logging.js.map +1 -1
  32. package/out/zero-cache/src/server/replicator.d.ts.map +1 -1
  33. package/out/zero-cache/src/server/replicator.js +28 -1
  34. package/out/zero-cache/src/server/replicator.js.map +1 -1
  35. package/out/zero-cache/src/services/change-source/common/replica-schema.d.ts.map +1 -1
  36. package/out/zero-cache/src/services/change-source/common/replica-schema.js +13 -1
  37. package/out/zero-cache/src/services/change-source/common/replica-schema.js.map +1 -1
  38. package/out/zero-cache/src/services/change-source/custom/change-source.js +2 -2
  39. package/out/zero-cache/src/services/change-source/pg/change-source.d.ts.map +1 -1
  40. package/out/zero-cache/src/services/change-source/pg/change-source.js.map +1 -1
  41. package/out/zero-cache/src/services/change-source/pg/logical-replication/stream.js +1 -1
  42. package/out/zero-cache/src/services/change-streamer/backup-monitor.d.ts +1 -1
  43. package/out/zero-cache/src/services/change-streamer/backup-monitor.d.ts.map +1 -1
  44. package/out/zero-cache/src/services/change-streamer/backup-monitor.js +31 -1
  45. package/out/zero-cache/src/services/change-streamer/backup-monitor.js.map +1 -1
  46. package/out/zero-cache/src/services/change-streamer/change-streamer-http.js +1 -1
  47. package/out/zero-cache/src/services/change-streamer/change-streamer-service.js +1 -1
  48. package/out/zero-cache/src/services/change-streamer/storer.d.ts.map +1 -1
  49. package/out/zero-cache/src/services/change-streamer/storer.js +1 -1
  50. package/out/zero-cache/src/services/change-streamer/storer.js.map +1 -1
  51. package/out/zero-cache/src/services/life-cycle.d.ts +1 -0
  52. package/out/zero-cache/src/services/life-cycle.d.ts.map +1 -1
  53. package/out/zero-cache/src/services/life-cycle.js +2 -2
  54. package/out/zero-cache/src/services/life-cycle.js.map +1 -1
  55. package/out/zero-cache/src/services/mutagen/pusher.d.ts +2 -2
  56. package/out/zero-cache/src/services/mutagen/pusher.d.ts.map +1 -1
  57. package/out/zero-cache/src/services/mutagen/pusher.js.map +1 -1
  58. package/out/zero-cache/src/services/replicator/schema/column-metadata.d.ts +1 -1
  59. package/out/zero-cache/src/services/replicator/schema/column-metadata.d.ts.map +1 -1
  60. package/out/zero-cache/src/services/replicator/schema/column-metadata.js.map +1 -1
  61. package/out/zero-cache/src/services/replicator/schema/replication-state.d.ts.map +1 -1
  62. package/out/zero-cache/src/services/replicator/schema/replication-state.js +6 -3
  63. package/out/zero-cache/src/services/replicator/schema/replication-state.js.map +1 -1
  64. package/out/zero-cache/src/services/view-syncer/pipeline-driver.d.ts +1 -1
  65. package/out/zero-cache/src/services/view-syncer/pipeline-driver.d.ts.map +1 -1
  66. package/out/zero-cache/src/services/view-syncer/pipeline-driver.js.map +1 -1
  67. package/out/zero-cache/src/services/view-syncer/snapshotter.d.ts +1 -1
  68. package/out/zero-cache/src/services/view-syncer/snapshotter.d.ts.map +1 -1
  69. package/out/zero-cache/src/services/view-syncer/snapshotter.js.map +1 -1
  70. package/out/zero-cache/src/services/view-syncer/view-syncer.js +1 -1
  71. package/out/zero-cache/src/services/view-syncer/view-syncer.js.map +1 -1
  72. package/out/zero-cache/src/types/pg.d.ts.map +1 -1
  73. package/out/zero-cache/src/types/pg.js +16 -8
  74. package/out/zero-cache/src/types/pg.js.map +1 -1
  75. package/out/zero-cache/src/workers/connection.js.map +1 -1
  76. package/out/zero-cache/src/workers/replicator.d.ts +5 -2
  77. package/out/zero-cache/src/workers/replicator.d.ts.map +1 -1
  78. package/out/zero-cache/src/workers/replicator.js +10 -6
  79. package/out/zero-cache/src/workers/replicator.js.map +1 -1
  80. package/out/zero-client/src/client/version.js +1 -1
  81. package/out/zero-protocol/src/application-error.d.ts +1 -1
  82. package/out/zero-protocol/src/application-error.d.ts.map +1 -1
  83. package/out/zero-protocol/src/application-error.js.map +1 -1
  84. package/out/zql/src/ivm/memory-source.d.ts +1 -1
  85. package/out/zql/src/ivm/memory-source.d.ts.map +1 -1
  86. package/out/zql/src/ivm/memory-source.js +2 -2
  87. package/out/zql/src/ivm/memory-source.js.map +1 -1
  88. package/out/zql/src/ivm/view-apply-change.d.ts.map +1 -1
  89. package/out/zql/src/ivm/view-apply-change.js +34 -26
  90. package/out/zql/src/ivm/view-apply-change.js.map +1 -1
  91. package/out/zql/src/planner/planner-debug.d.ts.map +1 -1
  92. package/out/zql/src/planner/planner-debug.js.map +1 -1
  93. package/out/zql/src/query/expression.d.ts +1 -1
  94. package/out/zql/src/query/expression.d.ts.map +1 -1
  95. package/out/zql/src/query/expression.js.map +1 -1
  96. package/out/zql/src/query/query.d.ts +1 -2
  97. package/out/zql/src/query/query.d.ts.map +1 -1
  98. package/package.json +4 -5
@@ -1 +1 @@
1
- {"version":3,"file":"memory-source.js","names":["#tableName","#columns","#primaryKey","#primaryIndexSort","#indexes","#connections","#getPrimaryIndex","#fetch","#disconnect","#getSchema","#getOrCreateIndex","#overlay","#writeChange","#pushEpoch"],"sources":["../../../../../zql/src/ivm/memory-source.ts"],"sourcesContent":["import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport {BTreeSet} from '../../../shared/src/btree-set.ts';\nimport {hasOwn} from '../../../shared/src/has-own.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport {once} from '../../../shared/src/iterables.ts';\nimport type {\n Condition,\n Ordering,\n OrderPart,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {Row, Value} from '../../../zero-protocol/src/data.ts';\nimport type {PrimaryKey} from '../../../zero-protocol/src/primary-key.ts';\nimport type {SchemaValue} from '../../../zero-types/src/schema-value.ts';\nimport type {DebugDelegate} from '../builder/debug-delegate.ts';\nimport {\n createPredicate,\n transformFilters,\n type NoSubqueryCondition,\n} from '../builder/filter.ts';\nimport {assertOrderingIncludesPK} from '../query/complete-ordering.ts';\nimport type {Change} from './change.ts';\nimport {\n constraintMatchesPrimaryKey,\n constraintMatchesRow,\n primaryKeyConstraintFromFilters,\n type Constraint,\n} from './constraint.ts';\nimport {\n compareValues,\n makeComparator,\n valuesEqual,\n type Comparator,\n type Node,\n} from './data.ts';\nimport {filterPush} from './filter-push.ts';\nimport {\n skipYields,\n type FetchRequest,\n type Input,\n type Output,\n type Start,\n} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {\n Source,\n SourceChange,\n SourceChangeAdd,\n SourceChangeEdit,\n SourceChangeRemove,\n SourceInput,\n} from './source.ts';\nimport type {Stream} from './stream.ts';\n\nexport type Overlay = {\n epoch: number;\n change: SourceChange;\n};\n\nexport type Overlays = {\n add: Row | undefined;\n remove: Row | undefined;\n};\n\ntype Index = {\n comparator: Comparator;\n data: BTreeSet<Row>;\n usedBy: Set<Connection>;\n};\n\nexport type Connection = {\n input: Input;\n output: Output | undefined;\n sort?: Ordering | undefined;\n splitEditKeys: Set<string> | undefined;\n compareRows: Comparator;\n filters:\n | {\n condition: NoSubqueryCondition;\n predicate: (row: Row) => boolean;\n }\n | undefined;\n readonly debug?: DebugDelegate | undefined;\n lastPushedEpoch: number;\n};\n\n/**\n * A `MemorySource` is a source that provides data to the pipeline from an\n * in-memory data source.\n *\n * This data is kept in sorted order as downstream pipelines will always expect\n * the data they receive from `pull` to be in sorted order.\n */\nexport class MemorySource implements Source {\n readonly #tableName: string;\n readonly #columns: Record<string, SchemaValue>;\n readonly #primaryKey: PrimaryKey;\n readonly #primaryIndexSort: Ordering;\n readonly #indexes: Map<string, Index> = new Map();\n readonly #connections: Connection[] = [];\n\n #overlay: Overlay | undefined;\n #pushEpoch = 0;\n\n constructor(\n tableName: string,\n columns: Record<string, SchemaValue>,\n primaryKey: PrimaryKey,\n primaryIndexData?: BTreeSet<Row>,\n ) {\n this.#tableName = tableName;\n this.#columns = columns;\n this.#primaryKey = primaryKey;\n this.#primaryIndexSort = primaryKey.map(k => [k, 'asc']);\n const comparator = makeBoundComparator(this.#primaryIndexSort);\n this.#indexes.set(JSON.stringify(this.#primaryIndexSort), {\n comparator,\n data: primaryIndexData ?? new BTreeSet<Row>(comparator),\n usedBy: new Set(),\n });\n }\n\n get tableSchema() {\n return {\n name: this.#tableName,\n columns: this.#columns,\n primaryKey: this.#primaryKey,\n };\n }\n\n fork() {\n const primaryIndex = this.#getPrimaryIndex();\n return new MemorySource(\n this.#tableName,\n this.#columns,\n this.#primaryKey,\n primaryIndex.data.clone(),\n );\n }\n\n get data(): BTreeSet<Row> {\n return this.#getPrimaryIndex().data;\n }\n\n #getSchema(connection: Connection, unordered: boolean): SourceSchema {\n return {\n tableName: this.#tableName,\n columns: this.#columns,\n primaryKey: this.#primaryKey,\n sort: unordered ? undefined : connection.sort,\n system: 'client',\n relationships: {},\n isHidden: false,\n compareRows: connection.compareRows,\n };\n }\n\n connect(\n sort: Ordering | undefined,\n filters?: Condition,\n splitEditKeys?: Set<string>,\n ): SourceInput {\n const transformedFilters = transformFilters(filters);\n const unordered = sort === undefined;\n const internalSort = sort ?? this.#primaryIndexSort;\n\n const input: SourceInput = {\n getSchema: () => schema,\n fetch: req => this.#fetch(req, connection),\n setOutput: output => {\n connection.output = output;\n },\n destroy: () => {\n this.#disconnect(input);\n },\n fullyAppliedFilters: !transformedFilters.conditionsRemoved,\n };\n\n const connection: Connection = {\n input,\n output: undefined,\n sort: internalSort,\n splitEditKeys,\n compareRows: makeComparator(internalSort),\n filters: transformedFilters.filters\n ? {\n condition: transformedFilters.filters,\n predicate: createPredicate(transformedFilters.filters),\n }\n : undefined,\n lastPushedEpoch: 0,\n };\n const schema = this.#getSchema(connection, unordered);\n if (!unordered) {\n assertOrderingIncludesPK(internalSort, this.#primaryKey);\n }\n this.#connections.push(connection);\n return input;\n }\n\n #disconnect(input: Input): void {\n const idx = this.#connections.findIndex(c => c.input === input);\n assert(idx !== -1, 'Connection not found');\n this.#connections.splice(idx, 1);\n\n // TODO: We used to delete unused indexes here. But in common cases like\n // navigating into issue detail pages it caused a ton of constantly\n // building and destroying indexes.\n //\n // Perhaps some intelligent LRU or something is needed here but for now,\n // the opposite extreme of keeping all indexes for the lifetime of the\n // page seems better.\n }\n\n #getPrimaryIndex(): Index {\n const index = this.#indexes.get(JSON.stringify(this.#primaryIndexSort));\n assert(index, 'Primary index not found');\n return index;\n }\n\n #getOrCreateIndex(sort: Ordering, usedBy: Connection): Index {\n const key = JSON.stringify(sort);\n const index = this.#indexes.get(key);\n // Future optimization could use existing index if it's the same just sorted\n // in reverse of needed.\n if (index) {\n index.usedBy.add(usedBy);\n return index;\n }\n\n const comparator = makeBoundComparator(sort);\n\n // When creating these synchronously becomes a problem, a few options:\n // 1. Allow users to specify needed indexes up front\n // 2. Create indexes in a different thread asynchronously (this would require\n // modifying the BTree to be able to be passed over structured-clone, or using\n // a different library.)\n // 3. We could even theoretically do (2) on multiple threads and then merge the\n // results!\n const data = new BTreeSet<Row>(comparator);\n\n // I checked, there's no special path for adding data in bulk faster.\n // The constructor takes an array, but it just calls add/set over and over.\n for (const row of this.#getPrimaryIndex().data) {\n data.add(row);\n }\n\n const newIndex = {comparator, data, usedBy: new Set([usedBy])};\n this.#indexes.set(key, newIndex);\n return newIndex;\n }\n\n // For unit testing that we correctly clean up indexes.\n getIndexKeys(): string[] {\n return [...this.#indexes.keys()];\n }\n\n *#fetch(req: FetchRequest, conn: Connection): Stream<Node | 'yield'> {\n const requestedSort = must(conn.sort);\n const {compareRows} = conn;\n const connectionComparator = (r1: Row, r2: Row) =>\n compareRows(r1, r2) * (req.reverse ? -1 : 1);\n\n const pkConstraint = primaryKeyConstraintFromFilters(\n conn.filters?.condition,\n this.#primaryKey,\n );\n // The primary key constraint will be more limiting than the constraint\n // so swap out to that if it exists.\n const fetchOrPkConstraint = pkConstraint ?? req.constraint;\n\n // If there is a constraint, we need an index sorted by it first.\n const indexSort: OrderPart[] = [];\n if (fetchOrPkConstraint) {\n for (const key of Object.keys(fetchOrPkConstraint)) {\n indexSort.push([key, 'asc']);\n }\n }\n\n // For the special case of constraining by PK, we don't need to worry about\n // any requested sort since there can only be one result. Otherwise we also\n // need the index sorted by the requested sort.\n if (\n this.#primaryKey.length > 1 ||\n !fetchOrPkConstraint ||\n !constraintMatchesPrimaryKey(fetchOrPkConstraint, this.#primaryKey)\n ) {\n indexSort.push(...requestedSort);\n }\n\n const index = this.#getOrCreateIndex(indexSort, conn);\n const {data, comparator: compare} = index;\n const indexComparator = (r1: Row, r2: Row) =>\n compare(r1, r2) * (req.reverse ? -1 : 1);\n\n const startAt = req.start?.row;\n\n // If there is a constraint, we want to start our scan at the first row that\n // matches the constraint. But because the next OrderPart can be `desc`,\n // it's not true that {[constraintKey]: constraintValue} is the first\n // matching row. Because in that case, the other fields will all be\n // `undefined`, and in Zero `undefined` is always less than any other value.\n // So if the second OrderPart is descending then `undefined` values will\n // actually be the *last* row. We need a way to stay \"start at the first row\n // with this constraint value\". RowBound with the corresponding compareBound\n // comparator accomplishes this. The right thing is probably to teach the\n // btree library to support this concept.\n let scanStart: RowBound | undefined;\n\n if (fetchOrPkConstraint) {\n scanStart = {};\n for (const [key, dir] of indexSort) {\n if (hasOwn(fetchOrPkConstraint, key)) {\n scanStart[key] = fetchOrPkConstraint[key];\n } else {\n if (req.reverse) {\n scanStart[key] = dir === 'asc' ? maxValue : minValue;\n } else {\n scanStart[key] = dir === 'asc' ? minValue : maxValue;\n }\n }\n }\n } else {\n scanStart = startAt;\n }\n\n const rowsIterable = generateRows(data, scanStart, req.reverse);\n const withOverlay = generateWithOverlay(\n startAt,\n pkConstraint ? once(rowsIterable) : rowsIterable,\n // use `req.constraint` here and not `fetchOrPkConstraint` since `fetchOrPkConstraint` could be the\n // primary key constraint. The primary key constraint comes from filters and is acting as a filter\n // rather than as the fetch constraint.\n req.constraint,\n this.#overlay,\n conn.lastPushedEpoch,\n // Use indexComparator, generateWithOverlayInner has a subtle dependency\n // on this. Since generateWithConstraint is done after\n // generateWithOverlay, the generator consumed by generateWithOverlayInner\n // does not end when the constraint stops matching and so the final\n // check to yield an add overlay if not yet yielded is not reached.\n // However, using the indexComparator the add overlay will be less than\n // the first row that does not match the constraint, and so any\n // not yet yielded add overlay will be yielded when the first row\n // not matching the constraint is reached.\n indexComparator,\n conn.filters?.predicate,\n );\n\n const withConstraint = generateWithConstraint(\n skipYields(\n generateWithStart(withOverlay, req.start, connectionComparator),\n ),\n // we use `req.constraint` and not `fetchOrPkConstraint` here because we need to\n // AND the constraint with what could have been the primary key constraint\n req.constraint,\n );\n\n yield* conn.filters\n ? generateWithFilter(withConstraint, conn.filters.predicate)\n : withConstraint;\n }\n\n *push(change: SourceChange): Stream<'yield'> {\n for (const result of this.genPush(change)) {\n if (result === 'yield') {\n yield result;\n }\n }\n }\n\n *genPush(change: SourceChange) {\n const primaryIndex = this.#getPrimaryIndex();\n const {data} = primaryIndex;\n const exists = (row: Row) => data.has(row);\n const setOverlay = (o: Overlay | undefined) => (this.#overlay = o);\n const writeChange = (c: SourceChange) => this.#writeChange(c);\n yield* genPushAndWriteWithSplitEdit(\n this.#connections,\n change,\n exists,\n setOverlay,\n writeChange,\n () => ++this.#pushEpoch,\n );\n }\n\n #writeChange(change: SourceChange) {\n for (const {data} of this.#indexes.values()) {\n switch (change.type) {\n case 'add': {\n const added = data.add(change.row);\n // must succeed since we checked has() above.\n assert(\n added,\n 'MemorySource: add must succeed since row existence was already checked',\n );\n break;\n }\n case 'remove': {\n const removed = data.delete(change.row);\n // must succeed since we checked has() above.\n assert(\n removed,\n 'MemorySource: remove must succeed since row existence was already checked',\n );\n break;\n }\n case 'edit': {\n // TODO: We could see if the PK (form the index tree's perspective)\n // changed and if not we could use set.\n // We cannot just do `set` with the new value since the `oldRow` might\n // not map to the same entry as the new `row` in the index btree.\n const removed = data.delete(change.oldRow);\n // must succeed since we checked has() above.\n assert(\n removed,\n 'MemorySource: edit remove must succeed since row existence was already checked',\n );\n data.add(change.row);\n break;\n }\n default:\n unreachable(change);\n }\n }\n }\n}\n\nfunction* generateWithConstraint(\n it: Stream<Node>,\n constraint: Constraint | undefined,\n) {\n for (const node of it) {\n if (constraint && !constraintMatchesRow(constraint, node.row)) {\n break;\n }\n yield node;\n }\n}\n\nfunction* generateWithFilter(it: Stream<Node>, filter: (row: Row) => boolean) {\n for (const node of it) {\n if (filter(node.row)) {\n yield node;\n }\n }\n}\n\nexport function* genPushAndWriteWithSplitEdit(\n connections: readonly Connection[],\n change: SourceChange,\n exists: (row: Row) => boolean,\n setOverlay: (o: Overlay | undefined) => Overlay | undefined,\n writeChange: (c: SourceChange) => void,\n getNextEpoch: () => number,\n) {\n let shouldSplitEdit = false;\n if (change.type === 'edit') {\n for (const {splitEditKeys} of connections) {\n if (splitEditKeys) {\n for (const key of splitEditKeys) {\n if (!valuesEqual(change.row[key], change.oldRow[key])) {\n shouldSplitEdit = true;\n break;\n }\n }\n }\n }\n }\n\n if (change.type === 'edit' && shouldSplitEdit) {\n yield* genPushAndWrite(\n connections,\n {\n type: 'remove',\n row: change.oldRow,\n },\n exists,\n setOverlay,\n writeChange,\n getNextEpoch(),\n );\n yield* genPushAndWrite(\n connections,\n {\n type: 'add',\n row: change.row,\n },\n exists,\n setOverlay,\n writeChange,\n getNextEpoch(),\n );\n } else {\n yield* genPushAndWrite(\n connections,\n change,\n exists,\n setOverlay,\n writeChange,\n getNextEpoch(),\n );\n }\n}\n\nfunction* genPushAndWrite(\n connections: readonly Connection[],\n change: SourceChangeAdd | SourceChangeRemove | SourceChangeEdit,\n exists: (row: Row) => boolean,\n setOverlay: (o: Overlay | undefined) => Overlay | undefined,\n writeChange: (c: SourceChange) => void,\n pushEpoch: number,\n) {\n for (const x of genPush(connections, change, exists, setOverlay, pushEpoch)) {\n yield x;\n }\n writeChange(change);\n}\n\nfunction* genPush(\n connections: readonly Connection[],\n change: SourceChange,\n exists: (row: Row) => boolean,\n setOverlay: (o: Overlay | undefined) => void,\n pushEpoch: number,\n) {\n switch (change.type) {\n case 'add':\n assert(\n !exists(change.row),\n () => `Row already exists ${stringify(change)}`,\n );\n break;\n case 'remove':\n assert(exists(change.row), () => `Row not found ${stringify(change)}`);\n break;\n case 'edit':\n assert(exists(change.oldRow), () => `Row not found ${stringify(change)}`);\n break;\n default:\n unreachable(change);\n }\n\n for (const conn of connections) {\n const {output, filters, input} = conn;\n if (output) {\n conn.lastPushedEpoch = pushEpoch;\n setOverlay({epoch: pushEpoch, change});\n const outputChange: Change =\n change.type === 'edit'\n ? {\n type: change.type,\n oldNode: {\n row: change.oldRow,\n relationships: {},\n },\n node: {\n row: change.row,\n relationships: {},\n },\n }\n : {\n type: change.type,\n node: {\n row: change.row,\n relationships: {},\n },\n };\n yield* filterPush(outputChange, output, input, filters?.predicate);\n yield undefined;\n }\n }\n\n setOverlay(undefined);\n}\n\nexport function* generateWithStart(\n nodes: Iterable<Node | 'yield'>,\n start: Start | undefined,\n compare: (r1: Row, r2: Row) => number,\n): Stream<Node | 'yield'> {\n if (!start) {\n yield* nodes;\n return;\n }\n let started = false;\n for (const node of nodes) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n if (!started) {\n if (start.basis === 'at') {\n if (compare(node.row, start.row) >= 0) {\n started = true;\n }\n } else if (start.basis === 'after') {\n if (compare(node.row, start.row) > 0) {\n started = true;\n }\n }\n }\n if (started) {\n yield node;\n }\n }\n}\n\n/**\n * Takes an iterator and overlay.\n * Splices the overlay into the iterator at the correct position.\n *\n * @param startAt - if there is a lower bound to the stream. If the lower bound of the stream\n * is above the overlay, the overlay will be skipped.\n * @param rows - the stream into which the overlay should be spliced\n * @param constraint - constraint that was applied to the rowIterator and should\n * also be applied to the overlay.\n * @param overlay - the overlay values to splice in\n * @param compare - the comparator to use to find the position for the overlay\n */\nexport function* generateWithOverlay(\n startAt: Row | undefined,\n rows: Iterable<Row>,\n constraint: Constraint | undefined,\n overlay: Overlay | undefined,\n lastPushedEpoch: number,\n compare: Comparator,\n filterPredicate?: (row: Row) => boolean | undefined,\n) {\n let overlayToApply: Overlay | undefined = undefined;\n if (overlay && lastPushedEpoch >= overlay.epoch) {\n overlayToApply = overlay;\n }\n const overlays = computeOverlays(\n startAt,\n constraint,\n overlayToApply,\n compare,\n filterPredicate,\n );\n yield* generateWithOverlayInner(rows, overlays, compare);\n}\n\nfunction computeOverlays(\n startAt: Row | undefined,\n constraint: Constraint | undefined,\n overlay: Overlay | undefined,\n compare: Comparator,\n filterPredicate?: (row: Row) => boolean | undefined,\n): Overlays {\n let overlays: Overlays = {\n add: undefined,\n remove: undefined,\n };\n switch (overlay?.change.type) {\n case 'add':\n overlays = {\n add: overlay.change.row,\n remove: undefined,\n };\n break;\n case 'remove':\n overlays = {\n add: undefined,\n remove: overlay.change.row,\n };\n break;\n case 'edit':\n overlays = {\n add: overlay.change.row,\n remove: overlay.change.oldRow,\n };\n break;\n }\n\n if (startAt) {\n overlays = overlaysForStartAt(overlays, startAt, compare);\n }\n\n if (constraint) {\n overlays = overlaysForConstraint(overlays, constraint);\n }\n\n if (filterPredicate) {\n overlays = overlaysForFilterPredicate(overlays, filterPredicate);\n }\n\n return overlays;\n}\n\nexport {overlaysForStartAt as overlaysForStartAtForTest};\n\nfunction overlaysForStartAt(\n {add, remove}: Overlays,\n startAt: Row,\n compare: Comparator,\n): Overlays {\n const undefinedIfBeforeStartAt = (row: Row | undefined) =>\n row === undefined || compare(row, startAt) < 0 ? undefined : row;\n return {\n add: undefinedIfBeforeStartAt(add),\n remove: undefinedIfBeforeStartAt(remove),\n };\n}\n\nexport {overlaysForConstraint as overlaysForConstraintForTest};\n\nfunction overlaysForConstraint(\n {add, remove}: Overlays,\n constraint: Constraint,\n): Overlays {\n const undefinedIfDoesntMatchConstraint = (row: Row | undefined) =>\n row === undefined || !constraintMatchesRow(constraint, row)\n ? undefined\n : row;\n\n return {\n add: undefinedIfDoesntMatchConstraint(add),\n remove: undefinedIfDoesntMatchConstraint(remove),\n };\n}\n\nfunction overlaysForFilterPredicate(\n {add, remove}: Overlays,\n filterPredicate: (row: Row) => boolean | undefined,\n): Overlays {\n const undefinedIfDoesntMatchFilter = (row: Row | undefined) =>\n row === undefined || !filterPredicate(row) ? undefined : row;\n\n return {\n add: undefinedIfDoesntMatchFilter(add),\n remove: undefinedIfDoesntMatchFilter(remove),\n };\n}\n\nexport function* generateWithOverlayInner(\n rowIterator: Iterable<Row>,\n overlays: Overlays,\n compare: (r1: Row, r2: Row) => number,\n) {\n let addOverlayYielded = false;\n let removeOverlaySkipped = false;\n for (const row of rowIterator) {\n if (!addOverlayYielded && overlays.add) {\n const cmp = compare(overlays.add, row);\n if (cmp < 0) {\n addOverlayYielded = true;\n yield {row: overlays.add, relationships: {}};\n }\n }\n\n if (!removeOverlaySkipped && overlays.remove) {\n const cmp = compare(overlays.remove, row);\n if (cmp === 0) {\n removeOverlaySkipped = true;\n continue;\n }\n }\n yield {row, relationships: {}};\n }\n\n if (!addOverlayYielded && overlays.add) {\n yield {row: overlays.add, relationships: {}};\n }\n}\n\n/**\n * Like {@link generateWithOverlay} but for unordered streams.\n * No `startAt` or comparator needed. Injects remove/old-edit rows eagerly\n * at the start, and suppresses add/new-edit rows inline by PK match.\n */\nexport function* generateWithOverlayUnordered(\n rows: Iterable<Row>,\n constraint: Constraint | undefined,\n overlay: Overlay | undefined,\n lastPushedEpoch: number,\n primaryKey: PrimaryKey,\n filterPredicate?: ((row: Row) => boolean) | undefined,\n) {\n let overlayToApply: Overlay | undefined = undefined;\n if (overlay && lastPushedEpoch >= overlay.epoch) {\n overlayToApply = overlay;\n }\n let overlays: Overlays = {add: undefined, remove: undefined};\n switch (overlayToApply?.change.type) {\n case 'add':\n overlays = {add: overlayToApply.change.row, remove: undefined};\n break;\n case 'remove':\n overlays = {add: undefined, remove: overlayToApply.change.row};\n break;\n case 'edit':\n overlays = {\n add: overlayToApply.change.row,\n remove: overlayToApply.change.oldRow,\n };\n break;\n }\n if (constraint) {\n overlays = overlaysForConstraint(overlays, constraint);\n }\n if (filterPredicate) {\n overlays = overlaysForFilterPredicate(overlays, filterPredicate);\n }\n yield* generateWithOverlayInnerUnordered(rows, overlays, primaryKey);\n}\n\nexport function* generateWithOverlayInnerUnordered(\n rowIterator: Iterable<Row>,\n overlays: Overlays,\n primaryKey: PrimaryKey,\n) {\n // Eager inject: yield the add overlay at the start (row not yet in storage)\n if (overlays.add) {\n yield {row: overlays.add, relationships: {}};\n }\n // Stream with inline suppress: skip the remove overlay (row still in storage)\n let removeSkipped = false;\n for (const row of rowIterator) {\n if (\n !removeSkipped &&\n overlays.remove &&\n rowMatchesPK(overlays.remove, row, primaryKey)\n ) {\n removeSkipped = true;\n continue;\n }\n yield {row, relationships: {}};\n }\n}\n\nfunction rowMatchesPK(a: Row, b: Row, primaryKey: PrimaryKey): boolean {\n for (const key of primaryKey) {\n if (!valuesEqual(a[key], b[key])) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * A location to begin scanning an index from. Can either be a specific value\n * or the min or max possible value for the type. This is used to start a scan\n * at the beginning of the rows matching a constraint.\n */\ntype Bound = Value | MinValue | MaxValue;\ntype RowBound = Record<string, Bound>;\nconst minValue = Symbol('min-value');\ntype MinValue = typeof minValue;\nconst maxValue = Symbol('max-value');\ntype MaxValue = typeof maxValue;\n\nfunction makeBoundComparator(sort: Ordering) {\n return (a: RowBound, b: RowBound) => {\n // Hot! Do not use destructuring\n for (const entry of sort) {\n const key = entry[0];\n const cmp = compareBounds(a[key], b[key]);\n if (cmp !== 0) {\n return entry[1] === 'asc' ? cmp : -cmp;\n }\n }\n return 0;\n };\n}\n\nfunction compareBounds(a: Bound, b: Bound): number {\n if (a === b) {\n return 0;\n }\n if (a === minValue) {\n return -1;\n }\n if (b === minValue) {\n return 1;\n }\n if (a === maxValue) {\n return 1;\n }\n if (b === maxValue) {\n return -1;\n }\n return compareValues(a, b);\n}\n\nfunction* generateRows(\n data: BTreeSet<Row>,\n scanStart: RowBound | undefined,\n reverse: boolean | undefined,\n) {\n yield* data[reverse ? 'valuesFromReversed' : 'valuesFrom'](\n scanStart as Row | undefined,\n );\n}\n\nexport function stringify(change: SourceChange) {\n return JSON.stringify(change, (_, v) =>\n typeof v === 'bigint' ? v.toString() : v,\n );\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;AA4FA,IAAa,eAAb,MAAa,aAA+B;CAC1C;CACA;CACA;CACA;CACA,2BAAwC,IAAI,KAAK;CACjD,eAAsC,EAAE;CAExC;CACA,aAAa;CAEb,YACE,WACA,SACA,YACA,kBACA;AACA,QAAA,YAAkB;AAClB,QAAA,UAAgB;AAChB,QAAA,aAAmB;AACnB,QAAA,mBAAyB,WAAW,KAAI,MAAK,CAAC,GAAG,MAAM,CAAC;EACxD,MAAM,aAAa,oBAAoB,MAAA,iBAAuB;AAC9D,QAAA,QAAc,IAAI,KAAK,UAAU,MAAA,iBAAuB,EAAE;GACxD;GACA,MAAM,oBAAoB,IAAI,SAAc,WAAW;GACvD,wBAAQ,IAAI,KAAK;GAClB,CAAC;;CAGJ,IAAI,cAAc;AAChB,SAAO;GACL,MAAM,MAAA;GACN,SAAS,MAAA;GACT,YAAY,MAAA;GACb;;CAGH,OAAO;EACL,MAAM,eAAe,MAAA,iBAAuB;AAC5C,SAAO,IAAI,aACT,MAAA,WACA,MAAA,SACA,MAAA,YACA,aAAa,KAAK,OAAO,CAC1B;;CAGH,IAAI,OAAsB;AACxB,SAAO,MAAA,iBAAuB,CAAC;;CAGjC,WAAW,YAAwB,WAAkC;AACnE,SAAO;GACL,WAAW,MAAA;GACX,SAAS,MAAA;GACT,YAAY,MAAA;GACZ,MAAM,YAAY,KAAA,IAAY,WAAW;GACzC,QAAQ;GACR,eAAe,EAAE;GACjB,UAAU;GACV,aAAa,WAAW;GACzB;;CAGH,QACE,MACA,SACA,eACa;EACb,MAAM,qBAAqB,iBAAiB,QAAQ;EACpD,MAAM,YAAY,SAAS,KAAA;EAC3B,MAAM,eAAe,QAAQ,MAAA;EAE7B,MAAM,QAAqB;GACzB,iBAAiB;GACjB,QAAO,QAAO,MAAA,MAAY,KAAK,WAAW;GAC1C,YAAW,WAAU;AACnB,eAAW,SAAS;;GAEtB,eAAe;AACb,UAAA,WAAiB,MAAM;;GAEzB,qBAAqB,CAAC,mBAAmB;GAC1C;EAED,MAAM,aAAyB;GAC7B;GACA,QAAQ,KAAA;GACR,MAAM;GACN;GACA,aAAa,eAAe,aAAa;GACzC,SAAS,mBAAmB,UACxB;IACE,WAAW,mBAAmB;IAC9B,WAAW,gBAAgB,mBAAmB,QAAQ;IACvD,GACD,KAAA;GACJ,iBAAiB;GAClB;EACD,MAAM,SAAS,MAAA,UAAgB,YAAY,UAAU;AACrD,MAAI,CAAC,UACH,0BAAyB,cAAc,MAAA,WAAiB;AAE1D,QAAA,YAAkB,KAAK,WAAW;AAClC,SAAO;;CAGT,YAAY,OAAoB;EAC9B,MAAM,MAAM,MAAA,YAAkB,WAAU,MAAK,EAAE,UAAU,MAAM;AAC/D,SAAO,QAAQ,IAAI,uBAAuB;AAC1C,QAAA,YAAkB,OAAO,KAAK,EAAE;;CAWlC,mBAA0B;EACxB,MAAM,QAAQ,MAAA,QAAc,IAAI,KAAK,UAAU,MAAA,iBAAuB,CAAC;AACvE,SAAO,OAAO,0BAA0B;AACxC,SAAO;;CAGT,kBAAkB,MAAgB,QAA2B;EAC3D,MAAM,MAAM,KAAK,UAAU,KAAK;EAChC,MAAM,QAAQ,MAAA,QAAc,IAAI,IAAI;AAGpC,MAAI,OAAO;AACT,SAAM,OAAO,IAAI,OAAO;AACxB,UAAO;;EAGT,MAAM,aAAa,oBAAoB,KAAK;EAS5C,MAAM,OAAO,IAAI,SAAc,WAAW;AAI1C,OAAK,MAAM,OAAO,MAAA,iBAAuB,CAAC,KACxC,MAAK,IAAI,IAAI;EAGf,MAAM,WAAW;GAAC;GAAY;GAAM,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC;GAAC;AAC9D,QAAA,QAAc,IAAI,KAAK,SAAS;AAChC,SAAO;;CAIT,eAAyB;AACvB,SAAO,CAAC,GAAG,MAAA,QAAc,MAAM,CAAC;;CAGlC,EAAA,MAAQ,KAAmB,MAA0C;EACnE,MAAM,gBAAgB,KAAK,KAAK,KAAK;EACrC,MAAM,EAAC,gBAAe;EACtB,MAAM,wBAAwB,IAAS,OACrC,YAAY,IAAI,GAAG,IAAI,IAAI,UAAU,KAAK;EAE5C,MAAM,eAAe,gCACnB,KAAK,SAAS,WACd,MAAA,WACD;EAGD,MAAM,sBAAsB,gBAAgB,IAAI;EAGhD,MAAM,YAAyB,EAAE;AACjC,MAAI,oBACF,MAAK,MAAM,OAAO,OAAO,KAAK,oBAAoB,CAChD,WAAU,KAAK,CAAC,KAAK,MAAM,CAAC;AAOhC,MACE,MAAA,WAAiB,SAAS,KAC1B,CAAC,uBACD,CAAC,4BAA4B,qBAAqB,MAAA,WAAiB,CAEnE,WAAU,KAAK,GAAG,cAAc;EAIlC,MAAM,EAAC,MAAM,YAAY,YADX,MAAA,iBAAuB,WAAW,KAAK;EAErD,MAAM,mBAAmB,IAAS,OAChC,QAAQ,IAAI,GAAG,IAAI,IAAI,UAAU,KAAK;EAExC,MAAM,UAAU,IAAI,OAAO;EAY3B,IAAI;AAEJ,MAAI,qBAAqB;AACvB,eAAY,EAAE;AACd,QAAK,MAAM,CAAC,KAAK,QAAQ,UACvB,KAAI,OAAO,qBAAqB,IAAI,CAClC,WAAU,OAAO,oBAAoB;YAEjC,IAAI,QACN,WAAU,OAAO,QAAQ,QAAQ,WAAW;OAE5C,WAAU,OAAO,QAAQ,QAAQ,WAAW;QAKlD,aAAY;EAGd,MAAM,eAAe,aAAa,MAAM,WAAW,IAAI,QAAQ;EAuB/D,MAAM,iBAAiB,uBACrB,WACE,kBAxBgB,oBAClB,SACA,eAAe,KAAK,aAAa,GAAG,cAIpC,IAAI,YACJ,MAAA,SACA,KAAK,iBAUL,iBACA,KAAK,SAAS,UACf,EAIkC,IAAI,OAAO,qBAAqB,CAChE,EAGD,IAAI,WACL;AAED,SAAO,KAAK,UACR,mBAAmB,gBAAgB,KAAK,QAAQ,UAAU,GAC1D;;CAGN,CAAC,KAAK,QAAuC;AAC3C,OAAK,MAAM,UAAU,KAAK,QAAQ,OAAO,CACvC,KAAI,WAAW,QACb,OAAM;;CAKZ,CAAC,QAAQ,QAAsB;EAE7B,MAAM,EAAC,SADc,MAAA,iBAAuB;EAE5C,MAAM,UAAU,QAAa,KAAK,IAAI,IAAI;EAC1C,MAAM,cAAc,MAA4B,MAAA,UAAgB;EAChE,MAAM,eAAe,MAAoB,MAAA,YAAkB,EAAE;AAC7D,SAAO,6BACL,MAAA,aACA,QACA,QACA,YACA,mBACM,EAAE,MAAA,UACT;;CAGH,aAAa,QAAsB;AACjC,OAAK,MAAM,EAAC,UAAS,MAAA,QAAc,QAAQ,CACzC,SAAQ,OAAO,MAAf;GACE,KAAK;AAGH,WAFc,KAAK,IAAI,OAAO,IAAI,EAIhC,yEACD;AACD;GAEF,KAAK;AAGH,WAFgB,KAAK,OAAO,OAAO,IAAI,EAIrC,4EACD;AACD;GAEF,KAAK;AAOH,WAFgB,KAAK,OAAO,OAAO,OAAO,EAIxC,iFACD;AACD,SAAK,IAAI,OAAO,IAAI;AACpB;GAEF,QACE,aAAY,OAAO;;;;AAM7B,UAAU,uBACR,IACA,YACA;AACA,MAAK,MAAM,QAAQ,IAAI;AACrB,MAAI,cAAc,CAAC,qBAAqB,YAAY,KAAK,IAAI,CAC3D;AAEF,QAAM;;;AAIV,UAAU,mBAAmB,IAAkB,QAA+B;AAC5E,MAAK,MAAM,QAAQ,GACjB,KAAI,OAAO,KAAK,IAAI,CAClB,OAAM;;AAKZ,UAAiB,6BACf,aACA,QACA,QACA,YACA,aACA,cACA;CACA,IAAI,kBAAkB;AACtB,KAAI,OAAO,SAAS;OACb,MAAM,EAAC,mBAAkB,YAC5B,KAAI;QACG,MAAM,OAAO,cAChB,KAAI,CAAC,YAAY,OAAO,IAAI,MAAM,OAAO,OAAO,KAAK,EAAE;AACrD,sBAAkB;AAClB;;;;AAOV,KAAI,OAAO,SAAS,UAAU,iBAAiB;AAC7C,SAAO,gBACL,aACA;GACE,MAAM;GACN,KAAK,OAAO;GACb,EACD,QACA,YACA,aACA,cAAc,CACf;AACD,SAAO,gBACL,aACA;GACE,MAAM;GACN,KAAK,OAAO;GACb,EACD,QACA,YACA,aACA,cAAc,CACf;OAED,QAAO,gBACL,aACA,QACA,QACA,YACA,aACA,cAAc,CACf;;AAIL,UAAU,gBACR,aACA,QACA,QACA,YACA,aACA,WACA;AACA,MAAK,MAAM,KAAK,QAAQ,aAAa,QAAQ,QAAQ,YAAY,UAAU,CACzE,OAAM;AAER,aAAY,OAAO;;AAGrB,UAAU,QACR,aACA,QACA,QACA,YACA,WACA;AACA,SAAQ,OAAO,MAAf;EACE,KAAK;AACH,UACE,CAAC,OAAO,OAAO,IAAI,QACb,sBAAsB,UAAU,OAAO,GAC9C;AACD;EACF,KAAK;AACH,UAAO,OAAO,OAAO,IAAI,QAAQ,iBAAiB,UAAU,OAAO,GAAG;AACtE;EACF,KAAK;AACH,UAAO,OAAO,OAAO,OAAO,QAAQ,iBAAiB,UAAU,OAAO,GAAG;AACzE;EACF,QACE,aAAY,OAAO;;AAGvB,MAAK,MAAM,QAAQ,aAAa;EAC9B,MAAM,EAAC,QAAQ,SAAS,UAAS;AACjC,MAAI,QAAQ;AACV,QAAK,kBAAkB;AACvB,cAAW;IAAC,OAAO;IAAW;IAAO,CAAC;AAqBtC,UAAO,WAnBL,OAAO,SAAS,SACZ;IACE,MAAM,OAAO;IACb,SAAS;KACP,KAAK,OAAO;KACZ,eAAe,EAAE;KAClB;IACD,MAAM;KACJ,KAAK,OAAO;KACZ,eAAe,EAAE;KAClB;IACF,GACD;IACE,MAAM,OAAO;IACb,MAAM;KACJ,KAAK,OAAO;KACZ,eAAe,EAAE;KAClB;IACF,EACyB,QAAQ,OAAO,SAAS,UAAU;AAClE,SAAM,KAAA;;;AAIV,YAAW,KAAA,EAAU;;AAGvB,UAAiB,kBACf,OACA,OACA,SACwB;AACxB,KAAI,CAAC,OAAO;AACV,SAAO;AACP;;CAEF,IAAI,UAAU;AACd,MAAK,MAAM,QAAQ,OAAO;AACxB,MAAI,SAAS,SAAS;AACpB,SAAM;AACN;;AAEF,MAAI,CAAC;OACC,MAAM,UAAU;QACd,QAAQ,KAAK,KAAK,MAAM,IAAI,IAAI,EAClC,WAAU;cAEH,MAAM,UAAU;QACrB,QAAQ,KAAK,KAAK,MAAM,IAAI,GAAG,EACjC,WAAU;;;AAIhB,MAAI,QACF,OAAM;;;;;;;;;;;;;;;AAiBZ,UAAiB,oBACf,SACA,MACA,YACA,SACA,iBACA,SACA,iBACA;CACA,IAAI,iBAAsC,KAAA;AAC1C,KAAI,WAAW,mBAAmB,QAAQ,MACxC,kBAAiB;AASnB,QAAO,yBAAyB,MAPf,gBACf,SACA,YACA,gBACA,SACA,gBACD,EAC+C,QAAQ;;AAG1D,SAAS,gBACP,SACA,YACA,SACA,SACA,iBACU;CACV,IAAI,WAAqB;EACvB,KAAK,KAAA;EACL,QAAQ,KAAA;EACT;AACD,SAAQ,SAAS,OAAO,MAAxB;EACE,KAAK;AACH,cAAW;IACT,KAAK,QAAQ,OAAO;IACpB,QAAQ,KAAA;IACT;AACD;EACF,KAAK;AACH,cAAW;IACT,KAAK,KAAA;IACL,QAAQ,QAAQ,OAAO;IACxB;AACD;EACF,KAAK;AACH,cAAW;IACT,KAAK,QAAQ,OAAO;IACpB,QAAQ,QAAQ,OAAO;IACxB;AACD;;AAGJ,KAAI,QACF,YAAW,mBAAmB,UAAU,SAAS,QAAQ;AAG3D,KAAI,WACF,YAAW,sBAAsB,UAAU,WAAW;AAGxD,KAAI,gBACF,YAAW,2BAA2B,UAAU,gBAAgB;AAGlE,QAAO;;AAKT,SAAS,mBACP,EAAC,KAAK,UACN,SACA,SACU;CACV,MAAM,4BAA4B,QAChC,QAAQ,KAAA,KAAa,QAAQ,KAAK,QAAQ,GAAG,IAAI,KAAA,IAAY;AAC/D,QAAO;EACL,KAAK,yBAAyB,IAAI;EAClC,QAAQ,yBAAyB,OAAO;EACzC;;AAKH,SAAS,sBACP,EAAC,KAAK,UACN,YACU;CACV,MAAM,oCAAoC,QACxC,QAAQ,KAAA,KAAa,CAAC,qBAAqB,YAAY,IAAI,GACvD,KAAA,IACA;AAEN,QAAO;EACL,KAAK,iCAAiC,IAAI;EAC1C,QAAQ,iCAAiC,OAAO;EACjD;;AAGH,SAAS,2BACP,EAAC,KAAK,UACN,iBACU;CACV,MAAM,gCAAgC,QACpC,QAAQ,KAAA,KAAa,CAAC,gBAAgB,IAAI,GAAG,KAAA,IAAY;AAE3D,QAAO;EACL,KAAK,6BAA6B,IAAI;EACtC,QAAQ,6BAA6B,OAAO;EAC7C;;AAGH,UAAiB,yBACf,aACA,UACA,SACA;CACA,IAAI,oBAAoB;CACxB,IAAI,uBAAuB;AAC3B,MAAK,MAAM,OAAO,aAAa;AAC7B,MAAI,CAAC,qBAAqB,SAAS;OACrB,QAAQ,SAAS,KAAK,IAAI,GAC5B,GAAG;AACX,wBAAoB;AACpB,UAAM;KAAC,KAAK,SAAS;KAAK,eAAe,EAAE;KAAC;;;AAIhD,MAAI,CAAC,wBAAwB,SAAS;OACxB,QAAQ,SAAS,QAAQ,IAAI,KAC7B,GAAG;AACb,2BAAuB;AACvB;;;AAGJ,QAAM;GAAC;GAAK,eAAe,EAAE;GAAC;;AAGhC,KAAI,CAAC,qBAAqB,SAAS,IACjC,OAAM;EAAC,KAAK,SAAS;EAAK,eAAe,EAAE;EAAC;;;;;;;AAShD,UAAiB,6BACf,MACA,YACA,SACA,iBACA,YACA,iBACA;CACA,IAAI,iBAAsC,KAAA;AAC1C,KAAI,WAAW,mBAAmB,QAAQ,MACxC,kBAAiB;CAEnB,IAAI,WAAqB;EAAC,KAAK,KAAA;EAAW,QAAQ,KAAA;EAAU;AAC5D,SAAQ,gBAAgB,OAAO,MAA/B;EACE,KAAK;AACH,cAAW;IAAC,KAAK,eAAe,OAAO;IAAK,QAAQ,KAAA;IAAU;AAC9D;EACF,KAAK;AACH,cAAW;IAAC,KAAK,KAAA;IAAW,QAAQ,eAAe,OAAO;IAAI;AAC9D;EACF,KAAK;AACH,cAAW;IACT,KAAK,eAAe,OAAO;IAC3B,QAAQ,eAAe,OAAO;IAC/B;AACD;;AAEJ,KAAI,WACF,YAAW,sBAAsB,UAAU,WAAW;AAExD,KAAI,gBACF,YAAW,2BAA2B,UAAU,gBAAgB;AAElE,QAAO,kCAAkC,MAAM,UAAU,WAAW;;AAGtE,UAAiB,kCACf,aACA,UACA,YACA;AAEA,KAAI,SAAS,IACX,OAAM;EAAC,KAAK,SAAS;EAAK,eAAe,EAAE;EAAC;CAG9C,IAAI,gBAAgB;AACpB,MAAK,MAAM,OAAO,aAAa;AAC7B,MACE,CAAC,iBACD,SAAS,UACT,aAAa,SAAS,QAAQ,KAAK,WAAW,EAC9C;AACA,mBAAgB;AAChB;;AAEF,QAAM;GAAC;GAAK,eAAe,EAAE;GAAC;;;AAIlC,SAAS,aAAa,GAAQ,GAAQ,YAAiC;AACrE,MAAK,MAAM,OAAO,WAChB,KAAI,CAAC,YAAY,EAAE,MAAM,EAAE,KAAK,CAC9B,QAAO;AAGX,QAAO;;AAUT,IAAM,WAAW,OAAO,YAAY;AAEpC,IAAM,WAAW,OAAO,YAAY;AAGpC,SAAS,oBAAoB,MAAgB;AAC3C,SAAQ,GAAa,MAAgB;AAEnC,OAAK,MAAM,SAAS,MAAM;GACxB,MAAM,MAAM,MAAM;GAClB,MAAM,MAAM,cAAc,EAAE,MAAM,EAAE,KAAK;AACzC,OAAI,QAAQ,EACV,QAAO,MAAM,OAAO,QAAQ,MAAM,CAAC;;AAGvC,SAAO;;;AAIX,SAAS,cAAc,GAAU,GAAkB;AACjD,KAAI,MAAM,EACR,QAAO;AAET,KAAI,MAAM,SACR,QAAO;AAET,KAAI,MAAM,SACR,QAAO;AAET,KAAI,MAAM,SACR,QAAO;AAET,KAAI,MAAM,SACR,QAAO;AAET,QAAO,cAAc,GAAG,EAAE;;AAG5B,UAAU,aACR,MACA,WACA,SACA;AACA,QAAO,KAAK,UAAU,uBAAuB,cAC3C,UACD;;AAGH,SAAgB,UAAU,QAAsB;AAC9C,QAAO,KAAK,UAAU,SAAS,GAAG,MAChC,OAAO,MAAM,WAAW,EAAE,UAAU,GAAG,EACxC"}
1
+ {"version":3,"file":"memory-source.js","names":["#tableName","#columns","#primaryKey","#primaryIndexSort","#indexes","#connections","#getPrimaryIndex","#fetch","#disconnect","#getSchema","#getOrCreateIndex","#overlay","#writeChange","#pushEpoch"],"sources":["../../../../../zql/src/ivm/memory-source.ts"],"sourcesContent":["import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport {BTreeSet} from '../../../shared/src/btree-set.ts';\nimport {hasOwn} from '../../../shared/src/has-own.ts';\nimport {once} from '../../../shared/src/iterables.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport type {\n Condition,\n Ordering,\n OrderPart,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {Row, Value} from '../../../zero-protocol/src/data.ts';\nimport type {PrimaryKey} from '../../../zero-protocol/src/primary-key.ts';\nimport type {SchemaValue} from '../../../zero-types/src/schema-value.ts';\nimport type {DebugDelegate} from '../builder/debug-delegate.ts';\nimport {\n createPredicate,\n transformFilters,\n type NoSubqueryCondition,\n} from '../builder/filter.ts';\nimport {assertOrderingIncludesPK} from '../query/complete-ordering.ts';\nimport type {Change} from './change.ts';\nimport {\n constraintMatchesPrimaryKey,\n constraintMatchesRow,\n primaryKeyConstraintFromFilters,\n type Constraint,\n} from './constraint.ts';\nimport {\n compareValues,\n makeComparator,\n valuesEqual,\n type Comparator,\n type Node,\n} from './data.ts';\nimport {filterPush} from './filter-push.ts';\nimport {\n skipYields,\n type FetchRequest,\n type Input,\n type Output,\n type Start,\n} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {\n Source,\n SourceChange,\n SourceChangeAdd,\n SourceChangeEdit,\n SourceChangeRemove,\n SourceInput,\n} from './source.ts';\nimport type {Stream} from './stream.ts';\n\nexport type Overlay = {\n epoch: number;\n change: SourceChange;\n};\n\nexport type Overlays = {\n add: Row | undefined;\n remove: Row | undefined;\n};\n\ntype Index = {\n comparator: Comparator;\n data: BTreeSet<Row>;\n usedBy: Set<Connection>;\n};\n\nexport type Connection = {\n input: Input;\n output: Output | undefined;\n sort?: Ordering | undefined;\n splitEditKeys: Set<string> | undefined;\n compareRows: Comparator;\n filters:\n | {\n condition: NoSubqueryCondition;\n predicate: (row: Row) => boolean;\n }\n | undefined;\n readonly debug?: DebugDelegate | undefined;\n lastPushedEpoch: number;\n};\n\n/**\n * A `MemorySource` is a source that provides data to the pipeline from an\n * in-memory data source.\n *\n * This data is kept in sorted order as downstream pipelines will always expect\n * the data they receive from `pull` to be in sorted order.\n */\nexport class MemorySource implements Source {\n readonly #tableName: string;\n readonly #columns: Record<string, SchemaValue>;\n readonly #primaryKey: PrimaryKey;\n readonly #primaryIndexSort: Ordering;\n readonly #indexes: Map<string, Index> = new Map();\n readonly #connections: Connection[] = [];\n\n #overlay: Overlay | undefined;\n #pushEpoch = 0;\n\n constructor(\n tableName: string,\n columns: Record<string, SchemaValue>,\n primaryKey: PrimaryKey,\n primaryIndexData?: BTreeSet<Row>,\n ) {\n this.#tableName = tableName;\n this.#columns = columns;\n this.#primaryKey = primaryKey;\n this.#primaryIndexSort = primaryKey.map(k => [k, 'asc']);\n const comparator = makeBoundComparator(this.#primaryIndexSort);\n this.#indexes.set(JSON.stringify(this.#primaryIndexSort), {\n comparator,\n data: primaryIndexData ?? new BTreeSet<Row>(comparator),\n usedBy: new Set(),\n });\n }\n\n get tableSchema() {\n return {\n name: this.#tableName,\n columns: this.#columns,\n primaryKey: this.#primaryKey,\n };\n }\n\n fork() {\n const primaryIndex = this.#getPrimaryIndex();\n return new MemorySource(\n this.#tableName,\n this.#columns,\n this.#primaryKey,\n primaryIndex.data.clone(),\n );\n }\n\n get data(): BTreeSet<Row> {\n return this.#getPrimaryIndex().data;\n }\n\n #getSchema(connection: Connection, unordered: boolean): SourceSchema {\n return {\n tableName: this.#tableName,\n columns: this.#columns,\n primaryKey: this.#primaryKey,\n sort: unordered ? undefined : connection.sort,\n system: 'client',\n relationships: {},\n isHidden: false,\n compareRows: connection.compareRows,\n };\n }\n\n connect(\n sort: Ordering | undefined,\n filters?: Condition,\n splitEditKeys?: Set<string>,\n ): SourceInput {\n const transformedFilters = transformFilters(filters);\n const unordered = sort === undefined;\n const internalSort = sort ?? this.#primaryIndexSort;\n\n const input: SourceInput = {\n getSchema: () => schema,\n fetch: req => this.#fetch(req, connection),\n setOutput: output => {\n connection.output = output;\n },\n destroy: () => {\n this.#disconnect(input);\n },\n fullyAppliedFilters: !transformedFilters.conditionsRemoved,\n };\n\n const connection: Connection = {\n input,\n output: undefined,\n sort: internalSort,\n splitEditKeys,\n compareRows: makeComparator(internalSort),\n filters: transformedFilters.filters\n ? {\n condition: transformedFilters.filters,\n predicate: createPredicate(transformedFilters.filters),\n }\n : undefined,\n lastPushedEpoch: 0,\n };\n const schema = this.#getSchema(connection, unordered);\n if (!unordered) {\n assertOrderingIncludesPK(internalSort, this.#primaryKey);\n }\n this.#connections.push(connection);\n return input;\n }\n\n #disconnect(input: Input): void {\n const idx = this.#connections.findIndex(c => c.input === input);\n assert(idx !== -1, 'Connection not found');\n this.#connections.splice(idx, 1);\n\n // TODO: We used to delete unused indexes here. But in common cases like\n // navigating into issue detail pages it caused a ton of constantly\n // building and destroying indexes.\n //\n // Perhaps some intelligent LRU or something is needed here but for now,\n // the opposite extreme of keeping all indexes for the lifetime of the\n // page seems better.\n }\n\n #getPrimaryIndex(): Index {\n const index = this.#indexes.get(JSON.stringify(this.#primaryIndexSort));\n assert(index, 'Primary index not found');\n return index;\n }\n\n #getOrCreateIndex(sort: Ordering, usedBy: Connection): Index {\n const key = JSON.stringify(sort);\n const index = this.#indexes.get(key);\n // Future optimization could use existing index if it's the same just sorted\n // in reverse of needed.\n if (index) {\n index.usedBy.add(usedBy);\n return index;\n }\n\n const comparator = makeBoundComparator(sort);\n\n // When creating these synchronously becomes a problem, a few options:\n // 1. Allow users to specify needed indexes up front\n // 2. Create indexes in a different thread asynchronously (this would require\n // modifying the BTree to be able to be passed over structured-clone, or using\n // a different library.)\n // 3. We could even theoretically do (2) on multiple threads and then merge the\n // results!\n const data = new BTreeSet<Row>(comparator);\n\n // I checked, there's no special path for adding data in bulk faster.\n // The constructor takes an array, but it just calls add/set over and over.\n for (const row of this.#getPrimaryIndex().data) {\n data.add(row);\n }\n\n const newIndex = {comparator, data, usedBy: new Set([usedBy])};\n this.#indexes.set(key, newIndex);\n return newIndex;\n }\n\n // For unit testing that we correctly clean up indexes.\n getIndexKeys(): string[] {\n return [...this.#indexes.keys()];\n }\n\n *#fetch(req: FetchRequest, conn: Connection): Stream<Node | 'yield'> {\n const requestedSort = must(conn.sort);\n const {compareRows} = conn;\n // Avoid allocating a new closure when not reversing (the common case).\n const connectionComparator: Comparator = req.reverse\n ? (r1, r2) => compareRows(r2, r1)\n : compareRows;\n\n const pkConstraint = primaryKeyConstraintFromFilters(\n conn.filters?.condition,\n this.#primaryKey,\n );\n // The primary key constraint will be more limiting than the constraint\n // so swap out to that if it exists.\n const fetchOrPkConstraint = pkConstraint ?? req.constraint;\n\n // If there is a constraint, we need an index sorted by it first.\n const indexSort: OrderPart[] = [];\n if (fetchOrPkConstraint) {\n for (const key of Object.keys(fetchOrPkConstraint)) {\n indexSort.push([key, 'asc']);\n }\n }\n\n // For the special case of constraining by PK, we don't need to worry about\n // any requested sort since there can only be one result. Otherwise we also\n // need the index sorted by the requested sort.\n if (\n this.#primaryKey.length > 1 ||\n !fetchOrPkConstraint ||\n !constraintMatchesPrimaryKey(fetchOrPkConstraint, this.#primaryKey)\n ) {\n indexSort.push(...requestedSort);\n }\n\n const index = this.#getOrCreateIndex(indexSort, conn);\n const {data, comparator: compare} = index;\n // Avoid allocating a new closure when not reversing (the common case).\n const indexComparator: Comparator = req.reverse\n ? (r1, r2) => compare(r2, r1)\n : compare;\n\n const startAt = req.start?.row;\n\n // If there is a constraint, we want to start our scan at the first row that\n // matches the constraint. But because the next OrderPart can be `desc`,\n // it's not true that {[constraintKey]: constraintValue} is the first\n // matching row. Because in that case, the other fields will all be\n // `undefined`, and in Zero `undefined` is always less than any other value.\n // So if the second OrderPart is descending then `undefined` values will\n // actually be the *last* row. We need a way to stay \"start at the first row\n // with this constraint value\". RowBound with the corresponding compareBound\n // comparator accomplishes this. The right thing is probably to teach the\n // btree library to support this concept.\n let scanStart: RowBound | undefined;\n\n if (fetchOrPkConstraint) {\n scanStart = {};\n for (const [key, dir] of indexSort) {\n if (hasOwn(fetchOrPkConstraint, key)) {\n scanStart[key] = fetchOrPkConstraint[key];\n } else {\n if (req.reverse) {\n scanStart[key] = dir === 'asc' ? maxValue : minValue;\n } else {\n scanStart[key] = dir === 'asc' ? minValue : maxValue;\n }\n }\n }\n } else {\n scanStart = startAt;\n }\n\n const rowsIterable = generateRows(data, scanStart, req.reverse);\n const withOverlay = generateWithOverlay(\n startAt,\n pkConstraint ? once(rowsIterable) : rowsIterable,\n // use `req.constraint` here and not `fetchOrPkConstraint` since `fetchOrPkConstraint` could be the\n // primary key constraint. The primary key constraint comes from filters and is acting as a filter\n // rather than as the fetch constraint.\n req.constraint,\n this.#overlay,\n conn.lastPushedEpoch,\n // Use indexComparator, generateWithOverlayInner has a subtle dependency\n // on this. Since generateWithConstraint is done after\n // generateWithOverlay, the generator consumed by generateWithOverlayInner\n // does not end when the constraint stops matching and so the final\n // check to yield an add overlay if not yet yielded is not reached.\n // However, using the indexComparator the add overlay will be less than\n // the first row that does not match the constraint, and so any\n // not yet yielded add overlay will be yielded when the first row\n // not matching the constraint is reached.\n indexComparator,\n conn.filters?.predicate,\n );\n\n const withConstraint = generateWithConstraint(\n skipYields(\n generateWithStart(withOverlay, req.start, connectionComparator),\n ),\n // we use `req.constraint` and not `fetchOrPkConstraint` here because we need to\n // AND the constraint with what could have been the primary key constraint\n req.constraint,\n );\n\n yield* conn.filters\n ? generateWithFilter(withConstraint, conn.filters.predicate)\n : withConstraint;\n }\n\n *push(change: SourceChange): Stream<'yield'> {\n for (const result of this.genPush(change)) {\n if (result === 'yield') {\n yield result;\n }\n }\n }\n\n *genPush(change: SourceChange) {\n const primaryIndex = this.#getPrimaryIndex();\n const {data} = primaryIndex;\n const exists = (row: Row) => data.has(row);\n const setOverlay = (o: Overlay | undefined) => (this.#overlay = o);\n const writeChange = (c: SourceChange) => this.#writeChange(c);\n yield* genPushAndWriteWithSplitEdit(\n this.#connections,\n change,\n exists,\n setOverlay,\n writeChange,\n () => ++this.#pushEpoch,\n );\n }\n\n #writeChange(change: SourceChange) {\n for (const {data} of this.#indexes.values()) {\n switch (change.type) {\n case 'add': {\n const added = data.add(change.row);\n // must succeed since we checked has() above.\n assert(\n added,\n 'MemorySource: add must succeed since row existence was already checked',\n );\n break;\n }\n case 'remove': {\n const removed = data.delete(change.row);\n // must succeed since we checked has() above.\n assert(\n removed,\n 'MemorySource: remove must succeed since row existence was already checked',\n );\n break;\n }\n case 'edit': {\n // TODO: We could see if the PK (form the index tree's perspective)\n // changed and if not we could use set.\n // We cannot just do `set` with the new value since the `oldRow` might\n // not map to the same entry as the new `row` in the index btree.\n const removed = data.delete(change.oldRow);\n // must succeed since we checked has() above.\n assert(\n removed,\n 'MemorySource: edit remove must succeed since row existence was already checked',\n );\n data.add(change.row);\n break;\n }\n default:\n unreachable(change);\n }\n }\n }\n}\n\nfunction* generateWithConstraint(\n it: Stream<Node>,\n constraint: Constraint | undefined,\n) {\n for (const node of it) {\n if (constraint && !constraintMatchesRow(constraint, node.row)) {\n break;\n }\n yield node;\n }\n}\n\nfunction* generateWithFilter(it: Stream<Node>, filter: (row: Row) => boolean) {\n for (const node of it) {\n if (filter(node.row)) {\n yield node;\n }\n }\n}\n\nexport function* genPushAndWriteWithSplitEdit(\n connections: readonly Connection[],\n change: SourceChange,\n exists: (row: Row) => boolean,\n setOverlay: (o: Overlay | undefined) => Overlay | undefined,\n writeChange: (c: SourceChange) => void,\n getNextEpoch: () => number,\n) {\n let shouldSplitEdit = false;\n if (change.type === 'edit') {\n for (const {splitEditKeys} of connections) {\n if (splitEditKeys) {\n for (const key of splitEditKeys) {\n if (!valuesEqual(change.row[key], change.oldRow[key])) {\n shouldSplitEdit = true;\n break;\n }\n }\n }\n }\n }\n\n if (change.type === 'edit' && shouldSplitEdit) {\n yield* genPushAndWrite(\n connections,\n {\n type: 'remove',\n row: change.oldRow,\n },\n exists,\n setOverlay,\n writeChange,\n getNextEpoch(),\n );\n yield* genPushAndWrite(\n connections,\n {\n type: 'add',\n row: change.row,\n },\n exists,\n setOverlay,\n writeChange,\n getNextEpoch(),\n );\n } else {\n yield* genPushAndWrite(\n connections,\n change,\n exists,\n setOverlay,\n writeChange,\n getNextEpoch(),\n );\n }\n}\n\nfunction* genPushAndWrite(\n connections: readonly Connection[],\n change: SourceChangeAdd | SourceChangeRemove | SourceChangeEdit,\n exists: (row: Row) => boolean,\n setOverlay: (o: Overlay | undefined) => Overlay | undefined,\n writeChange: (c: SourceChange) => void,\n pushEpoch: number,\n) {\n for (const x of genPush(connections, change, exists, setOverlay, pushEpoch)) {\n yield x;\n }\n writeChange(change);\n}\n\nfunction* genPush(\n connections: readonly Connection[],\n change: SourceChange,\n exists: (row: Row) => boolean,\n setOverlay: (o: Overlay | undefined) => void,\n pushEpoch: number,\n) {\n switch (change.type) {\n case 'add':\n assert(\n !exists(change.row),\n () => `Row already exists ${stringify(change)}`,\n );\n break;\n case 'remove':\n assert(exists(change.row), () => `Row not found ${stringify(change)}`);\n break;\n case 'edit':\n assert(exists(change.oldRow), () => `Row not found ${stringify(change)}`);\n break;\n default:\n unreachable(change);\n }\n\n for (const conn of connections) {\n const {output, filters, input} = conn;\n if (output) {\n conn.lastPushedEpoch = pushEpoch;\n setOverlay({epoch: pushEpoch, change});\n const outputChange: Change =\n change.type === 'edit'\n ? {\n type: change.type,\n oldNode: {\n row: change.oldRow,\n relationships: {},\n },\n node: {\n row: change.row,\n relationships: {},\n },\n }\n : {\n type: change.type,\n node: {\n row: change.row,\n relationships: {},\n },\n };\n yield* filterPush(outputChange, output, input, filters?.predicate);\n yield undefined;\n }\n }\n\n setOverlay(undefined);\n}\n\nexport function* generateWithStart(\n nodes: Iterable<Node | 'yield'>,\n start: Start | undefined,\n compare: (r1: Row, r2: Row) => number,\n): Stream<Node | 'yield'> {\n if (!start) {\n yield* nodes;\n return;\n }\n let started = false;\n for (const node of nodes) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n if (!started) {\n if (start.basis === 'at') {\n if (compare(node.row, start.row) >= 0) {\n started = true;\n }\n } else if (start.basis === 'after') {\n if (compare(node.row, start.row) > 0) {\n started = true;\n }\n }\n }\n if (started) {\n yield node;\n }\n }\n}\n\n/**\n * Takes an iterator and overlay.\n * Splices the overlay into the iterator at the correct position.\n *\n * @param startAt - if there is a lower bound to the stream. If the lower bound of the stream\n * is above the overlay, the overlay will be skipped.\n * @param rows - the stream into which the overlay should be spliced\n * @param constraint - constraint that was applied to the rowIterator and should\n * also be applied to the overlay.\n * @param overlay - the overlay values to splice in\n * @param compare - the comparator to use to find the position for the overlay\n */\nexport function* generateWithOverlay(\n startAt: Row | undefined,\n rows: Iterable<Row>,\n constraint: Constraint | undefined,\n overlay: Overlay | undefined,\n lastPushedEpoch: number,\n compare: Comparator,\n filterPredicate?: (row: Row) => boolean | undefined,\n) {\n let overlayToApply: Overlay | undefined = undefined;\n if (overlay && lastPushedEpoch >= overlay.epoch) {\n overlayToApply = overlay;\n }\n const overlays = computeOverlays(\n startAt,\n constraint,\n overlayToApply,\n compare,\n filterPredicate,\n );\n yield* generateWithOverlayInner(rows, overlays, compare);\n}\n\nfunction computeOverlays(\n startAt: Row | undefined,\n constraint: Constraint | undefined,\n overlay: Overlay | undefined,\n compare: Comparator,\n filterPredicate?: (row: Row) => boolean | undefined,\n): Overlays {\n let overlays: Overlays = {\n add: undefined,\n remove: undefined,\n };\n switch (overlay?.change.type) {\n case 'add':\n overlays = {\n add: overlay.change.row,\n remove: undefined,\n };\n break;\n case 'remove':\n overlays = {\n add: undefined,\n remove: overlay.change.row,\n };\n break;\n case 'edit':\n overlays = {\n add: overlay.change.row,\n remove: overlay.change.oldRow,\n };\n break;\n }\n\n if (startAt) {\n overlays = overlaysForStartAt(overlays, startAt, compare);\n }\n\n if (constraint) {\n overlays = overlaysForConstraint(overlays, constraint);\n }\n\n if (filterPredicate) {\n overlays = overlaysForFilterPredicate(overlays, filterPredicate);\n }\n\n return overlays;\n}\n\nexport {overlaysForStartAt as overlaysForStartAtForTest};\n\nfunction overlaysForStartAt(\n {add, remove}: Overlays,\n startAt: Row,\n compare: Comparator,\n): Overlays {\n const undefinedIfBeforeStartAt = (row: Row | undefined) =>\n row === undefined || compare(row, startAt) < 0 ? undefined : row;\n return {\n add: undefinedIfBeforeStartAt(add),\n remove: undefinedIfBeforeStartAt(remove),\n };\n}\n\nexport {overlaysForConstraint as overlaysForConstraintForTest};\n\nfunction overlaysForConstraint(\n {add, remove}: Overlays,\n constraint: Constraint,\n): Overlays {\n const undefinedIfDoesntMatchConstraint = (row: Row | undefined) =>\n row === undefined || !constraintMatchesRow(constraint, row)\n ? undefined\n : row;\n\n return {\n add: undefinedIfDoesntMatchConstraint(add),\n remove: undefinedIfDoesntMatchConstraint(remove),\n };\n}\n\nfunction overlaysForFilterPredicate(\n {add, remove}: Overlays,\n filterPredicate: (row: Row) => boolean | undefined,\n): Overlays {\n const undefinedIfDoesntMatchFilter = (row: Row | undefined) =>\n row === undefined || !filterPredicate(row) ? undefined : row;\n\n return {\n add: undefinedIfDoesntMatchFilter(add),\n remove: undefinedIfDoesntMatchFilter(remove),\n };\n}\n\nexport function* generateWithOverlayInner(\n rowIterator: Iterable<Row>,\n overlays: Overlays,\n compare: (r1: Row, r2: Row) => number,\n) {\n let addOverlayYielded = false;\n let removeOverlaySkipped = false;\n for (const row of rowIterator) {\n if (!addOverlayYielded && overlays.add) {\n const cmp = compare(overlays.add, row);\n if (cmp < 0) {\n addOverlayYielded = true;\n yield {row: overlays.add, relationships: {}};\n }\n }\n\n if (!removeOverlaySkipped && overlays.remove) {\n const cmp = compare(overlays.remove, row);\n if (cmp === 0) {\n removeOverlaySkipped = true;\n continue;\n }\n }\n yield {row, relationships: {}};\n }\n\n if (!addOverlayYielded && overlays.add) {\n yield {row: overlays.add, relationships: {}};\n }\n}\n\n/**\n * Like {@link generateWithOverlay} but for unordered streams.\n * No `startAt` or comparator needed. Injects remove/old-edit rows eagerly\n * at the start, and suppresses add/new-edit rows inline by PK match.\n */\nexport function* generateWithOverlayUnordered(\n rows: Iterable<Row>,\n constraint: Constraint | undefined,\n overlay: Overlay | undefined,\n lastPushedEpoch: number,\n primaryKey: PrimaryKey,\n filterPredicate?: (row: Row) => boolean,\n) {\n let overlayToApply: Overlay | undefined = undefined;\n if (overlay && lastPushedEpoch >= overlay.epoch) {\n overlayToApply = overlay;\n }\n let overlays: Overlays = {add: undefined, remove: undefined};\n switch (overlayToApply?.change.type) {\n case 'add':\n overlays = {add: overlayToApply.change.row, remove: undefined};\n break;\n case 'remove':\n overlays = {add: undefined, remove: overlayToApply.change.row};\n break;\n case 'edit':\n overlays = {\n add: overlayToApply.change.row,\n remove: overlayToApply.change.oldRow,\n };\n break;\n }\n if (constraint) {\n overlays = overlaysForConstraint(overlays, constraint);\n }\n if (filterPredicate) {\n overlays = overlaysForFilterPredicate(overlays, filterPredicate);\n }\n yield* generateWithOverlayInnerUnordered(rows, overlays, primaryKey);\n}\n\nexport function* generateWithOverlayInnerUnordered(\n rowIterator: Iterable<Row>,\n overlays: Overlays,\n primaryKey: PrimaryKey,\n) {\n // Eager inject: yield the add overlay at the start (row not yet in storage)\n if (overlays.add) {\n yield {row: overlays.add, relationships: {}};\n }\n // Stream with inline suppress: skip the remove overlay (row still in storage)\n let removeSkipped = false;\n for (const row of rowIterator) {\n if (\n !removeSkipped &&\n overlays.remove &&\n rowMatchesPK(overlays.remove, row, primaryKey)\n ) {\n removeSkipped = true;\n continue;\n }\n yield {row, relationships: {}};\n }\n}\n\nfunction rowMatchesPK(a: Row, b: Row, primaryKey: PrimaryKey): boolean {\n for (const key of primaryKey) {\n if (!valuesEqual(a[key], b[key])) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * A location to begin scanning an index from. Can either be a specific value\n * or the min or max possible value for the type. This is used to start a scan\n * at the beginning of the rows matching a constraint.\n */\ntype Bound = Value | MinValue | MaxValue;\ntype RowBound = Record<string, Bound>;\nconst minValue = Symbol('min-value');\ntype MinValue = typeof minValue;\nconst maxValue = Symbol('max-value');\ntype MaxValue = typeof maxValue;\n\nfunction makeBoundComparator(sort: Ordering) {\n return (a: RowBound, b: RowBound) => {\n // Hot! Do not use destructuring\n for (const entry of sort) {\n const key = entry[0];\n const cmp = compareBounds(a[key], b[key]);\n if (cmp !== 0) {\n return entry[1] === 'asc' ? cmp : -cmp;\n }\n }\n return 0;\n };\n}\n\nfunction compareBounds(a: Bound, b: Bound): number {\n if (a === b) {\n return 0;\n }\n if (a === minValue) {\n return -1;\n }\n if (b === minValue) {\n return 1;\n }\n if (a === maxValue) {\n return 1;\n }\n if (b === maxValue) {\n return -1;\n }\n return compareValues(a, b);\n}\n\nfunction* generateRows(\n data: BTreeSet<Row>,\n scanStart: RowBound | undefined,\n reverse: boolean | undefined,\n) {\n yield* data[reverse ? 'valuesFromReversed' : 'valuesFrom'](\n scanStart as Row | undefined,\n );\n}\n\nexport function stringify(change: SourceChange) {\n return JSON.stringify(change, (_, v) =>\n typeof v === 'bigint' ? v.toString() : v,\n );\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;AA4FA,IAAa,eAAb,MAAa,aAA+B;CAC1C;CACA;CACA;CACA;CACA,2BAAwC,IAAI,KAAK;CACjD,eAAsC,EAAE;CAExC;CACA,aAAa;CAEb,YACE,WACA,SACA,YACA,kBACA;AACA,QAAA,YAAkB;AAClB,QAAA,UAAgB;AAChB,QAAA,aAAmB;AACnB,QAAA,mBAAyB,WAAW,KAAI,MAAK,CAAC,GAAG,MAAM,CAAC;EACxD,MAAM,aAAa,oBAAoB,MAAA,iBAAuB;AAC9D,QAAA,QAAc,IAAI,KAAK,UAAU,MAAA,iBAAuB,EAAE;GACxD;GACA,MAAM,oBAAoB,IAAI,SAAc,WAAW;GACvD,wBAAQ,IAAI,KAAK;GAClB,CAAC;;CAGJ,IAAI,cAAc;AAChB,SAAO;GACL,MAAM,MAAA;GACN,SAAS,MAAA;GACT,YAAY,MAAA;GACb;;CAGH,OAAO;EACL,MAAM,eAAe,MAAA,iBAAuB;AAC5C,SAAO,IAAI,aACT,MAAA,WACA,MAAA,SACA,MAAA,YACA,aAAa,KAAK,OAAO,CAC1B;;CAGH,IAAI,OAAsB;AACxB,SAAO,MAAA,iBAAuB,CAAC;;CAGjC,WAAW,YAAwB,WAAkC;AACnE,SAAO;GACL,WAAW,MAAA;GACX,SAAS,MAAA;GACT,YAAY,MAAA;GACZ,MAAM,YAAY,KAAA,IAAY,WAAW;GACzC,QAAQ;GACR,eAAe,EAAE;GACjB,UAAU;GACV,aAAa,WAAW;GACzB;;CAGH,QACE,MACA,SACA,eACa;EACb,MAAM,qBAAqB,iBAAiB,QAAQ;EACpD,MAAM,YAAY,SAAS,KAAA;EAC3B,MAAM,eAAe,QAAQ,MAAA;EAE7B,MAAM,QAAqB;GACzB,iBAAiB;GACjB,QAAO,QAAO,MAAA,MAAY,KAAK,WAAW;GAC1C,YAAW,WAAU;AACnB,eAAW,SAAS;;GAEtB,eAAe;AACb,UAAA,WAAiB,MAAM;;GAEzB,qBAAqB,CAAC,mBAAmB;GAC1C;EAED,MAAM,aAAyB;GAC7B;GACA,QAAQ,KAAA;GACR,MAAM;GACN;GACA,aAAa,eAAe,aAAa;GACzC,SAAS,mBAAmB,UACxB;IACE,WAAW,mBAAmB;IAC9B,WAAW,gBAAgB,mBAAmB,QAAQ;IACvD,GACD,KAAA;GACJ,iBAAiB;GAClB;EACD,MAAM,SAAS,MAAA,UAAgB,YAAY,UAAU;AACrD,MAAI,CAAC,UACH,0BAAyB,cAAc,MAAA,WAAiB;AAE1D,QAAA,YAAkB,KAAK,WAAW;AAClC,SAAO;;CAGT,YAAY,OAAoB;EAC9B,MAAM,MAAM,MAAA,YAAkB,WAAU,MAAK,EAAE,UAAU,MAAM;AAC/D,SAAO,QAAQ,IAAI,uBAAuB;AAC1C,QAAA,YAAkB,OAAO,KAAK,EAAE;;CAWlC,mBAA0B;EACxB,MAAM,QAAQ,MAAA,QAAc,IAAI,KAAK,UAAU,MAAA,iBAAuB,CAAC;AACvE,SAAO,OAAO,0BAA0B;AACxC,SAAO;;CAGT,kBAAkB,MAAgB,QAA2B;EAC3D,MAAM,MAAM,KAAK,UAAU,KAAK;EAChC,MAAM,QAAQ,MAAA,QAAc,IAAI,IAAI;AAGpC,MAAI,OAAO;AACT,SAAM,OAAO,IAAI,OAAO;AACxB,UAAO;;EAGT,MAAM,aAAa,oBAAoB,KAAK;EAS5C,MAAM,OAAO,IAAI,SAAc,WAAW;AAI1C,OAAK,MAAM,OAAO,MAAA,iBAAuB,CAAC,KACxC,MAAK,IAAI,IAAI;EAGf,MAAM,WAAW;GAAC;GAAY;GAAM,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC;GAAC;AAC9D,QAAA,QAAc,IAAI,KAAK,SAAS;AAChC,SAAO;;CAIT,eAAyB;AACvB,SAAO,CAAC,GAAG,MAAA,QAAc,MAAM,CAAC;;CAGlC,EAAA,MAAQ,KAAmB,MAA0C;EACnE,MAAM,gBAAgB,KAAK,KAAK,KAAK;EACrC,MAAM,EAAC,gBAAe;EAEtB,MAAM,uBAAmC,IAAI,WACxC,IAAI,OAAO,YAAY,IAAI,GAAG,GAC/B;EAEJ,MAAM,eAAe,gCACnB,KAAK,SAAS,WACd,MAAA,WACD;EAGD,MAAM,sBAAsB,gBAAgB,IAAI;EAGhD,MAAM,YAAyB,EAAE;AACjC,MAAI,oBACF,MAAK,MAAM,OAAO,OAAO,KAAK,oBAAoB,CAChD,WAAU,KAAK,CAAC,KAAK,MAAM,CAAC;AAOhC,MACE,MAAA,WAAiB,SAAS,KAC1B,CAAC,uBACD,CAAC,4BAA4B,qBAAqB,MAAA,WAAiB,CAEnE,WAAU,KAAK,GAAG,cAAc;EAIlC,MAAM,EAAC,MAAM,YAAY,YADX,MAAA,iBAAuB,WAAW,KAAK;EAGrD,MAAM,kBAA8B,IAAI,WACnC,IAAI,OAAO,QAAQ,IAAI,GAAG,GAC3B;EAEJ,MAAM,UAAU,IAAI,OAAO;EAY3B,IAAI;AAEJ,MAAI,qBAAqB;AACvB,eAAY,EAAE;AACd,QAAK,MAAM,CAAC,KAAK,QAAQ,UACvB,KAAI,OAAO,qBAAqB,IAAI,CAClC,WAAU,OAAO,oBAAoB;YAEjC,IAAI,QACN,WAAU,OAAO,QAAQ,QAAQ,WAAW;OAE5C,WAAU,OAAO,QAAQ,QAAQ,WAAW;QAKlD,aAAY;EAGd,MAAM,eAAe,aAAa,MAAM,WAAW,IAAI,QAAQ;EAuB/D,MAAM,iBAAiB,uBACrB,WACE,kBAxBgB,oBAClB,SACA,eAAe,KAAK,aAAa,GAAG,cAIpC,IAAI,YACJ,MAAA,SACA,KAAK,iBAUL,iBACA,KAAK,SAAS,UACf,EAIkC,IAAI,OAAO,qBAAqB,CAChE,EAGD,IAAI,WACL;AAED,SAAO,KAAK,UACR,mBAAmB,gBAAgB,KAAK,QAAQ,UAAU,GAC1D;;CAGN,CAAC,KAAK,QAAuC;AAC3C,OAAK,MAAM,UAAU,KAAK,QAAQ,OAAO,CACvC,KAAI,WAAW,QACb,OAAM;;CAKZ,CAAC,QAAQ,QAAsB;EAE7B,MAAM,EAAC,SADc,MAAA,iBAAuB;EAE5C,MAAM,UAAU,QAAa,KAAK,IAAI,IAAI;EAC1C,MAAM,cAAc,MAA4B,MAAA,UAAgB;EAChE,MAAM,eAAe,MAAoB,MAAA,YAAkB,EAAE;AAC7D,SAAO,6BACL,MAAA,aACA,QACA,QACA,YACA,mBACM,EAAE,MAAA,UACT;;CAGH,aAAa,QAAsB;AACjC,OAAK,MAAM,EAAC,UAAS,MAAA,QAAc,QAAQ,CACzC,SAAQ,OAAO,MAAf;GACE,KAAK;AAGH,WAFc,KAAK,IAAI,OAAO,IAAI,EAIhC,yEACD;AACD;GAEF,KAAK;AAGH,WAFgB,KAAK,OAAO,OAAO,IAAI,EAIrC,4EACD;AACD;GAEF,KAAK;AAOH,WAFgB,KAAK,OAAO,OAAO,OAAO,EAIxC,iFACD;AACD,SAAK,IAAI,OAAO,IAAI;AACpB;GAEF,QACE,aAAY,OAAO;;;;AAM7B,UAAU,uBACR,IACA,YACA;AACA,MAAK,MAAM,QAAQ,IAAI;AACrB,MAAI,cAAc,CAAC,qBAAqB,YAAY,KAAK,IAAI,CAC3D;AAEF,QAAM;;;AAIV,UAAU,mBAAmB,IAAkB,QAA+B;AAC5E,MAAK,MAAM,QAAQ,GACjB,KAAI,OAAO,KAAK,IAAI,CAClB,OAAM;;AAKZ,UAAiB,6BACf,aACA,QACA,QACA,YACA,aACA,cACA;CACA,IAAI,kBAAkB;AACtB,KAAI,OAAO,SAAS;OACb,MAAM,EAAC,mBAAkB,YAC5B,KAAI;QACG,MAAM,OAAO,cAChB,KAAI,CAAC,YAAY,OAAO,IAAI,MAAM,OAAO,OAAO,KAAK,EAAE;AACrD,sBAAkB;AAClB;;;;AAOV,KAAI,OAAO,SAAS,UAAU,iBAAiB;AAC7C,SAAO,gBACL,aACA;GACE,MAAM;GACN,KAAK,OAAO;GACb,EACD,QACA,YACA,aACA,cAAc,CACf;AACD,SAAO,gBACL,aACA;GACE,MAAM;GACN,KAAK,OAAO;GACb,EACD,QACA,YACA,aACA,cAAc,CACf;OAED,QAAO,gBACL,aACA,QACA,QACA,YACA,aACA,cAAc,CACf;;AAIL,UAAU,gBACR,aACA,QACA,QACA,YACA,aACA,WACA;AACA,MAAK,MAAM,KAAK,QAAQ,aAAa,QAAQ,QAAQ,YAAY,UAAU,CACzE,OAAM;AAER,aAAY,OAAO;;AAGrB,UAAU,QACR,aACA,QACA,QACA,YACA,WACA;AACA,SAAQ,OAAO,MAAf;EACE,KAAK;AACH,UACE,CAAC,OAAO,OAAO,IAAI,QACb,sBAAsB,UAAU,OAAO,GAC9C;AACD;EACF,KAAK;AACH,UAAO,OAAO,OAAO,IAAI,QAAQ,iBAAiB,UAAU,OAAO,GAAG;AACtE;EACF,KAAK;AACH,UAAO,OAAO,OAAO,OAAO,QAAQ,iBAAiB,UAAU,OAAO,GAAG;AACzE;EACF,QACE,aAAY,OAAO;;AAGvB,MAAK,MAAM,QAAQ,aAAa;EAC9B,MAAM,EAAC,QAAQ,SAAS,UAAS;AACjC,MAAI,QAAQ;AACV,QAAK,kBAAkB;AACvB,cAAW;IAAC,OAAO;IAAW;IAAO,CAAC;AAqBtC,UAAO,WAnBL,OAAO,SAAS,SACZ;IACE,MAAM,OAAO;IACb,SAAS;KACP,KAAK,OAAO;KACZ,eAAe,EAAE;KAClB;IACD,MAAM;KACJ,KAAK,OAAO;KACZ,eAAe,EAAE;KAClB;IACF,GACD;IACE,MAAM,OAAO;IACb,MAAM;KACJ,KAAK,OAAO;KACZ,eAAe,EAAE;KAClB;IACF,EACyB,QAAQ,OAAO,SAAS,UAAU;AAClE,SAAM,KAAA;;;AAIV,YAAW,KAAA,EAAU;;AAGvB,UAAiB,kBACf,OACA,OACA,SACwB;AACxB,KAAI,CAAC,OAAO;AACV,SAAO;AACP;;CAEF,IAAI,UAAU;AACd,MAAK,MAAM,QAAQ,OAAO;AACxB,MAAI,SAAS,SAAS;AACpB,SAAM;AACN;;AAEF,MAAI,CAAC;OACC,MAAM,UAAU;QACd,QAAQ,KAAK,KAAK,MAAM,IAAI,IAAI,EAClC,WAAU;cAEH,MAAM,UAAU;QACrB,QAAQ,KAAK,KAAK,MAAM,IAAI,GAAG,EACjC,WAAU;;;AAIhB,MAAI,QACF,OAAM;;;;;;;;;;;;;;;AAiBZ,UAAiB,oBACf,SACA,MACA,YACA,SACA,iBACA,SACA,iBACA;CACA,IAAI,iBAAsC,KAAA;AAC1C,KAAI,WAAW,mBAAmB,QAAQ,MACxC,kBAAiB;AASnB,QAAO,yBAAyB,MAPf,gBACf,SACA,YACA,gBACA,SACA,gBACD,EAC+C,QAAQ;;AAG1D,SAAS,gBACP,SACA,YACA,SACA,SACA,iBACU;CACV,IAAI,WAAqB;EACvB,KAAK,KAAA;EACL,QAAQ,KAAA;EACT;AACD,SAAQ,SAAS,OAAO,MAAxB;EACE,KAAK;AACH,cAAW;IACT,KAAK,QAAQ,OAAO;IACpB,QAAQ,KAAA;IACT;AACD;EACF,KAAK;AACH,cAAW;IACT,KAAK,KAAA;IACL,QAAQ,QAAQ,OAAO;IACxB;AACD;EACF,KAAK;AACH,cAAW;IACT,KAAK,QAAQ,OAAO;IACpB,QAAQ,QAAQ,OAAO;IACxB;AACD;;AAGJ,KAAI,QACF,YAAW,mBAAmB,UAAU,SAAS,QAAQ;AAG3D,KAAI,WACF,YAAW,sBAAsB,UAAU,WAAW;AAGxD,KAAI,gBACF,YAAW,2BAA2B,UAAU,gBAAgB;AAGlE,QAAO;;AAKT,SAAS,mBACP,EAAC,KAAK,UACN,SACA,SACU;CACV,MAAM,4BAA4B,QAChC,QAAQ,KAAA,KAAa,QAAQ,KAAK,QAAQ,GAAG,IAAI,KAAA,IAAY;AAC/D,QAAO;EACL,KAAK,yBAAyB,IAAI;EAClC,QAAQ,yBAAyB,OAAO;EACzC;;AAKH,SAAS,sBACP,EAAC,KAAK,UACN,YACU;CACV,MAAM,oCAAoC,QACxC,QAAQ,KAAA,KAAa,CAAC,qBAAqB,YAAY,IAAI,GACvD,KAAA,IACA;AAEN,QAAO;EACL,KAAK,iCAAiC,IAAI;EAC1C,QAAQ,iCAAiC,OAAO;EACjD;;AAGH,SAAS,2BACP,EAAC,KAAK,UACN,iBACU;CACV,MAAM,gCAAgC,QACpC,QAAQ,KAAA,KAAa,CAAC,gBAAgB,IAAI,GAAG,KAAA,IAAY;AAE3D,QAAO;EACL,KAAK,6BAA6B,IAAI;EACtC,QAAQ,6BAA6B,OAAO;EAC7C;;AAGH,UAAiB,yBACf,aACA,UACA,SACA;CACA,IAAI,oBAAoB;CACxB,IAAI,uBAAuB;AAC3B,MAAK,MAAM,OAAO,aAAa;AAC7B,MAAI,CAAC,qBAAqB,SAAS;OACrB,QAAQ,SAAS,KAAK,IAAI,GAC5B,GAAG;AACX,wBAAoB;AACpB,UAAM;KAAC,KAAK,SAAS;KAAK,eAAe,EAAE;KAAC;;;AAIhD,MAAI,CAAC,wBAAwB,SAAS;OACxB,QAAQ,SAAS,QAAQ,IAAI,KAC7B,GAAG;AACb,2BAAuB;AACvB;;;AAGJ,QAAM;GAAC;GAAK,eAAe,EAAE;GAAC;;AAGhC,KAAI,CAAC,qBAAqB,SAAS,IACjC,OAAM;EAAC,KAAK,SAAS;EAAK,eAAe,EAAE;EAAC;;;;;;;AAShD,UAAiB,6BACf,MACA,YACA,SACA,iBACA,YACA,iBACA;CACA,IAAI,iBAAsC,KAAA;AAC1C,KAAI,WAAW,mBAAmB,QAAQ,MACxC,kBAAiB;CAEnB,IAAI,WAAqB;EAAC,KAAK,KAAA;EAAW,QAAQ,KAAA;EAAU;AAC5D,SAAQ,gBAAgB,OAAO,MAA/B;EACE,KAAK;AACH,cAAW;IAAC,KAAK,eAAe,OAAO;IAAK,QAAQ,KAAA;IAAU;AAC9D;EACF,KAAK;AACH,cAAW;IAAC,KAAK,KAAA;IAAW,QAAQ,eAAe,OAAO;IAAI;AAC9D;EACF,KAAK;AACH,cAAW;IACT,KAAK,eAAe,OAAO;IAC3B,QAAQ,eAAe,OAAO;IAC/B;AACD;;AAEJ,KAAI,WACF,YAAW,sBAAsB,UAAU,WAAW;AAExD,KAAI,gBACF,YAAW,2BAA2B,UAAU,gBAAgB;AAElE,QAAO,kCAAkC,MAAM,UAAU,WAAW;;AAGtE,UAAiB,kCACf,aACA,UACA,YACA;AAEA,KAAI,SAAS,IACX,OAAM;EAAC,KAAK,SAAS;EAAK,eAAe,EAAE;EAAC;CAG9C,IAAI,gBAAgB;AACpB,MAAK,MAAM,OAAO,aAAa;AAC7B,MACE,CAAC,iBACD,SAAS,UACT,aAAa,SAAS,QAAQ,KAAK,WAAW,EAC9C;AACA,mBAAgB;AAChB;;AAEF,QAAM;GAAC;GAAK,eAAe,EAAE;GAAC;;;AAIlC,SAAS,aAAa,GAAQ,GAAQ,YAAiC;AACrE,MAAK,MAAM,OAAO,WAChB,KAAI,CAAC,YAAY,EAAE,MAAM,EAAE,KAAK,CAC9B,QAAO;AAGX,QAAO;;AAUT,IAAM,WAAW,OAAO,YAAY;AAEpC,IAAM,WAAW,OAAO,YAAY;AAGpC,SAAS,oBAAoB,MAAgB;AAC3C,SAAQ,GAAa,MAAgB;AAEnC,OAAK,MAAM,SAAS,MAAM;GACxB,MAAM,MAAM,MAAM;GAClB,MAAM,MAAM,cAAc,EAAE,MAAM,EAAE,KAAK;AACzC,OAAI,QAAQ,EACV,QAAO,MAAM,OAAO,QAAQ,MAAM,CAAC;;AAGvC,SAAO;;;AAIX,SAAS,cAAc,GAAU,GAAkB;AACjD,KAAI,MAAM,EACR,QAAO;AAET,KAAI,MAAM,SACR,QAAO;AAET,KAAI,MAAM,SACR,QAAO;AAET,KAAI,MAAM,SACR,QAAO;AAET,KAAI,MAAM,SACR,QAAO;AAET,QAAO,cAAc,GAAG,EAAE;;AAG5B,UAAU,aACR,MACA,WACA,SACA;AACA,QAAO,KAAK,UAAU,uBAAuB,cAC3C,UACD;;AAGH,SAAgB,UAAU,QAAsB;AAC9C,QAAO,KAAK,UAAU,SAAS,GAAG,MAChC,OAAO,MAAM,WAAW,EAAE,UAAU,GAAG,EACxC"}
@@ -1 +1 @@
1
- {"version":3,"file":"view-apply-change.d.ts","sourceRoot":"","sources":["../../../../../zql/src/ivm/view-apply-change.ts"],"names":[],"mappings":"AAQA,OAAO,KAAK,EAAC,GAAG,EAAC,MAAM,oCAAoC,CAAC;AAC5D,OAAO,EAAkB,KAAK,IAAI,EAAC,MAAM,WAAW,CAAC;AAErD,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,aAAa,CAAC;AAC9C,OAAO,KAAK,EAAC,KAAK,EAAE,MAAM,EAAC,MAAM,WAAW,CAAC;AAE7C,eAAO,MAAM,cAAc,eAAe,CAAC;AAC3C,eAAO,MAAM,QAAQ,eAAe,CAAC;AAQrC;;;;GAIG;AACH,MAAM,MAAM,UAAU,GAClB,aAAa,GACb,gBAAgB,GAChB,eAAe,GACf,cAAc,CAAC;AAEnB,MAAM,MAAM,WAAW,GAAG;IAAC,GAAG,EAAE,GAAG,CAAA;CAAC,CAAC;AAErC,MAAM,MAAM,aAAa,GAAG;IAC1B,IAAI,EAAE,KAAK,CAAC;IACZ,IAAI,EAAE,IAAI,CAAC;CACZ,CAAC;AAEF,MAAM,MAAM,gBAAgB,GAAG;IAC7B,IAAI,EAAE,QAAQ,CAAC;IACf,IAAI,EAAE,IAAI,CAAC;CACZ,CAAC;AAEF,KAAK,eAAe,GAAG;IACrB,IAAI,EAAE,OAAO,CAAC;IACd,IAAI,EAAE,WAAW,CAAC;IAClB,KAAK,EAAE;QACL,gBAAgB,EAAE,MAAM,CAAC;QACzB,MAAM,EAAE,UAAU,CAAC;KACpB,CAAC;CACH,CAAC;AAEF,KAAK,cAAc,GAAG;IACpB,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,WAAW,CAAC;IAClB,OAAO,EAAE,WAAW,CAAC;CACtB,CAAC;AAEF;;;GAGG;AACH,MAAM,WAAW,WAAW;IAC1B,GAAG,CAAC,KAAK,EAAE,KAAK,GAAG,MAAM,GAAG,SAAS,CAAC;IACtC,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IAC1C,MAAM,CAAC,KAAK,EAAE,KAAK,GAAG,OAAO,CAAC;CAC/B;AAED,wBAAgB,WAAW,CACzB,WAAW,EAAE,KAAK,EAClB,MAAM,EAAE,UAAU,EAClB,MAAM,EAAE,YAAY,EACpB,YAAY,EAAE,MAAM,EACpB,MAAM,EAAE,MAAM,EACd,OAAO,UAAQ,GACd,IAAI,CAyON"}
1
+ {"version":3,"file":"view-apply-change.d.ts","sourceRoot":"","sources":["../../../../../zql/src/ivm/view-apply-change.ts"],"names":[],"mappings":"AAQA,OAAO,KAAK,EAAC,GAAG,EAAC,MAAM,oCAAoC,CAAC;AAC5D,OAAO,EAAkB,KAAK,IAAI,EAAC,MAAM,WAAW,CAAC;AAErD,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,aAAa,CAAC;AAC9C,OAAO,KAAK,EAAC,KAAK,EAAE,MAAM,EAAC,MAAM,WAAW,CAAC;AAE7C,eAAO,MAAM,cAAc,eAAe,CAAC;AAC3C,eAAO,MAAM,QAAQ,eAAe,CAAC;AAQrC;;;;GAIG;AACH,MAAM,MAAM,UAAU,GAClB,aAAa,GACb,gBAAgB,GAChB,eAAe,GACf,cAAc,CAAC;AAEnB,MAAM,MAAM,WAAW,GAAG;IAAC,GAAG,EAAE,GAAG,CAAA;CAAC,CAAC;AAErC,MAAM,MAAM,aAAa,GAAG;IAC1B,IAAI,EAAE,KAAK,CAAC;IACZ,IAAI,EAAE,IAAI,CAAC;CACZ,CAAC;AAEF,MAAM,MAAM,gBAAgB,GAAG;IAC7B,IAAI,EAAE,QAAQ,CAAC;IACf,IAAI,EAAE,IAAI,CAAC;CACZ,CAAC;AAEF,KAAK,eAAe,GAAG;IACrB,IAAI,EAAE,OAAO,CAAC;IACd,IAAI,EAAE,WAAW,CAAC;IAClB,KAAK,EAAE;QACL,gBAAgB,EAAE,MAAM,CAAC;QACzB,MAAM,EAAE,UAAU,CAAC;KACpB,CAAC;CACH,CAAC;AAEF,KAAK,cAAc,GAAG;IACpB,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,WAAW,CAAC;IAClB,OAAO,EAAE,WAAW,CAAC;CACtB,CAAC;AAEF;;;GAGG;AACH,MAAM,WAAW,WAAW;IAC1B,GAAG,CAAC,KAAK,EAAE,KAAK,GAAG,MAAM,GAAG,SAAS,CAAC;IACtC,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IAC1C,MAAM,CAAC,KAAK,EAAE,KAAK,GAAG,OAAO,CAAC;CAC/B;AAED,wBAAgB,WAAW,CACzB,WAAW,EAAE,KAAK,EAClB,MAAM,EAAE,UAAU,EAClB,MAAM,EAAE,YAAY,EACpB,YAAY,EAAE,MAAM,EACpB,MAAM,EAAE,MAAM,EACd,OAAO,UAAQ,GACd,IAAI,CA4ON"}
@@ -38,7 +38,7 @@ function applyChange(parentEntry, change, schema, relationship, format, withIDs
38
38
  newEntry = makeNewMetaEntry(change.node.row, schema, withIDs, 1);
39
39
  parentEntry[relationship] = newEntry;
40
40
  }
41
- } else newEntry = add(change.node.row, getChildEntryList(parentEntry, relationship), schema, withIDs);
41
+ } else newEntry = addToView(change.node.row, getChildEntryList(parentEntry, relationship), schema, withIDs);
42
42
  if (newEntry) for (const [relationship, children] of Object.entries(change.node.relationships)) {
43
43
  const childSchema = must(schema.relationships[relationship]);
44
44
  const childFormat = childFormats[relationship];
@@ -65,9 +65,9 @@ function applyChange(parentEntry, change, schema, relationship, format, withIDs
65
65
  if (singular) existing = getSingularEntry(parentEntry, relationship);
66
66
  else {
67
67
  const view = getChildEntryList(parentEntry, relationship);
68
- const { pos, found } = binarySearch(view, change.node.row, schema.compareRows);
69
- assert(found, "node does not exist");
70
- existing = view[pos];
68
+ const bsResult = binarySearch(view, change.node.row, schema.compareRows);
69
+ assert(bsResult >= 0, "node does not exist");
70
+ existing = view[bsResult];
71
71
  }
72
72
  const childSchema = must(schema.relationships[change.child.relationshipName]);
73
73
  const childFormat = format.relationships[change.child.relationshipName];
@@ -82,10 +82,13 @@ function applyChange(parentEntry, change, schema, relationship, format, withIDs
82
82
  } else {
83
83
  const view = getChildEntryList(parentEntry, relationship);
84
84
  if (schema.compareRows(change.oldNode.row, change.node.row) !== 0) {
85
- const { pos: oldPos, found: oldFound } = binarySearch(view, change.oldNode.row, schema.compareRows);
86
- assert(oldFound, "old node does not exist");
85
+ const oldBsResult = binarySearch(view, change.oldNode.row, schema.compareRows);
86
+ assert(oldBsResult >= 0, "old node does not exist");
87
+ const oldPos = oldBsResult;
87
88
  const oldEntry = view[oldPos];
88
- const { pos, found } = binarySearch(view, change.node.row, schema.compareRows);
89
+ const newBsResult = binarySearch(view, change.node.row, schema.compareRows);
90
+ const found = newBsResult >= 0;
91
+ const pos = found ? newBsResult : ~newBsResult;
89
92
  if (oldEntry[refCountSymbol] === 1 && (pos === oldPos || pos - 1 === oldPos)) applyEdit(oldEntry, change, schema, withIDs);
90
93
  else {
91
94
  oldEntry[refCountSymbol]--;
@@ -105,9 +108,9 @@ function applyChange(parentEntry, change, schema, relationship, format, withIDs
105
108
  applyEdit(entryToEdit, change, schema, withIDs);
106
109
  }
107
110
  } else {
108
- const { pos, found } = binarySearch(view, change.oldNode.row, schema.compareRows);
109
- assert(found, "node does not exist");
110
- applyEdit(view[pos], change, schema, withIDs);
111
+ const bsResult = binarySearch(view, change.oldNode.row, schema.compareRows);
112
+ assert(bsResult >= 0, "node does not exist");
113
+ applyEdit(view[bsResult], change, schema, withIDs);
111
114
  }
112
115
  }
113
116
  break;
@@ -118,24 +121,35 @@ function applyEdit(existing, change, schema, withIDs) {
118
121
  Object.assign(existing, change.node.row);
119
122
  if (withIDs) existing[idSymbol] = makeID(change.node.row, schema);
120
123
  }
121
- function add(row, view, schema, withIDs) {
122
- const { pos, found } = binarySearch(view, row, schema.compareRows);
123
- if (found) {
124
- view[pos][refCountSymbol]++;
124
+ function addToView(row, view, schema, withIDs) {
125
+ const bsResult = binarySearch(view, row, schema.compareRows);
126
+ if (bsResult >= 0) {
127
+ view[bsResult][refCountSymbol]++;
125
128
  return;
126
129
  }
130
+ const pos = ~bsResult;
127
131
  const newEntry = makeNewMetaEntry(row, schema, withIDs, 1);
128
132
  view.splice(pos, 0, newEntry);
129
133
  return newEntry;
130
134
  }
131
135
  function removeAndUpdateRefCount(view, row, compareRows) {
132
- const { pos, found } = binarySearch(view, row, compareRows);
133
- assert(found, "node does not exist");
134
- const oldEntry = view[pos];
135
- if (oldEntry[refCountSymbol] === 1) view.splice(pos, 1);
136
+ const bsResult = binarySearch(view, row, compareRows);
137
+ assert(bsResult >= 0, "node does not exist");
138
+ const oldEntry = view[bsResult];
139
+ if (oldEntry[refCountSymbol] === 1) view.splice(bsResult, 1);
136
140
  oldEntry[refCountSymbol]--;
137
141
  return oldEntry;
138
142
  }
143
+ /**
144
+ * Binary search a sorted view.
145
+ *
146
+ * Returns the index of `target` if found (a non-negative integer), or
147
+ * `~insertionIndex` if not found (always negative, following Java's
148
+ * `Arrays.binarySearch` convention). To get the insertion index when
149
+ * not found use the bitwise NOT: `~result`.
150
+ *
151
+ * This avoids allocating a `{pos, found}` result object on every call.
152
+ */
139
153
  function binarySearch(view, target, comparator) {
140
154
  let low = 0;
141
155
  let high = view.length - 1;
@@ -144,15 +158,9 @@ function binarySearch(view, target, comparator) {
144
158
  const comparison = comparator(view[mid], target);
145
159
  if (comparison < 0) low = mid + 1;
146
160
  else if (comparison > 0) high = mid - 1;
147
- else return {
148
- pos: mid,
149
- found: true
150
- };
161
+ else return mid;
151
162
  }
152
- return {
153
- pos: low,
154
- found: false
155
- };
163
+ return ~low;
156
164
  }
157
165
  function getChildEntryList(parentEntry, relationship) {
158
166
  const view = parentEntry[relationship];
@@ -1 +1 @@
1
- {"version":3,"file":"view-apply-change.js","names":[],"sources":["../../../../../zql/src/ivm/view-apply-change.ts"],"sourcesContent":["import {\n assert,\n assertArray,\n assertNumber,\n unreachable,\n} from '../../../shared/src/asserts.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport type {Writable} from '../../../shared/src/writable.ts';\nimport type {Row} from '../../../zero-protocol/src/data.ts';\nimport {type Comparator, type Node} from './data.ts';\nimport {skipYields} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Entry, Format} from './view.ts';\n\nexport const refCountSymbol = Symbol('rc');\nexport const idSymbol = Symbol('id');\n\ntype MetaEntry = Writable<Entry> & {\n [refCountSymbol]: number;\n [idSymbol]?: string | undefined;\n};\ntype MetaEntryList = MetaEntry[];\n\n/**\n * `applyChange` does not consume the `relationships` of `ChildChange#node`,\n * `EditChange#node` and `EditChange#oldNode`. The `ViewChange` type\n * documents and enforces this via the type system.\n */\nexport type ViewChange =\n | AddViewChange\n | RemoveViewChange\n | ChildViewChange\n | EditViewChange;\n\nexport type RowOnlyNode = {row: Row};\n\nexport type AddViewChange = {\n type: 'add';\n node: Node;\n};\n\nexport type RemoveViewChange = {\n type: 'remove';\n node: Node;\n};\n\ntype ChildViewChange = {\n type: 'child';\n node: RowOnlyNode;\n child: {\n relationshipName: string;\n change: ViewChange;\n };\n};\n\ntype EditViewChange = {\n type: 'edit';\n node: RowOnlyNode;\n oldNode: RowOnlyNode;\n};\n\n/**\n * This is a subset of WeakMap but restricted to what we need.\n * @deprecated Not used anymore. This will be removed in the future.\n */\nexport interface RefCountMap {\n get(entry: Entry): number | undefined;\n set(entry: Entry, refCount: number): void;\n delete(entry: Entry): boolean;\n}\n\nexport function applyChange(\n parentEntry: Entry,\n change: ViewChange,\n schema: SourceSchema,\n relationship: string,\n format: Format,\n withIDs = false,\n): void {\n if (schema.isHidden) {\n switch (change.type) {\n case 'add':\n case 'remove':\n for (const [relationship, children] of Object.entries(\n change.node.relationships,\n )) {\n const childSchema = must(schema.relationships[relationship]);\n for (const node of skipYields(children())) {\n applyChange(\n parentEntry,\n {type: change.type, node},\n childSchema,\n relationship,\n format,\n withIDs,\n );\n }\n }\n return;\n case 'edit':\n // If hidden at this level it means that the hidden row was changed. If\n // the row was changed in such a way that it would change the\n // relationships then the edit would have been split into remove and\n // add.\n return;\n case 'child': {\n const childSchema = must(\n schema.relationships[change.child.relationshipName],\n );\n applyChange(\n parentEntry,\n change.child.change,\n childSchema,\n relationship,\n format,\n withIDs,\n );\n return;\n }\n default:\n unreachable(change);\n }\n }\n\n const {singular, relationships: childFormats} = format;\n switch (change.type) {\n case 'add': {\n let newEntry: MetaEntry | undefined;\n\n if (singular) {\n const oldEntry = parentEntry[relationship] as MetaEntry | undefined;\n if (oldEntry !== undefined) {\n assert(\n schema.compareRows(oldEntry, change.node.row) === 0,\n `Singular relationship '${relationship}' should not have multiple rows. You may need to declare this relationship with the \\`many\\` helper instead of the \\`one\\` helper in your schema.`,\n );\n // adding same again.\n oldEntry[refCountSymbol]++;\n } else {\n newEntry = makeNewMetaEntry(change.node.row, schema, withIDs, 1);\n\n (parentEntry as Writable<Entry>)[relationship] = newEntry;\n }\n } else {\n newEntry = add(\n change.node.row,\n getChildEntryList(parentEntry, relationship),\n schema,\n withIDs,\n );\n }\n\n if (newEntry) {\n for (const [relationship, children] of Object.entries(\n change.node.relationships,\n )) {\n // TODO: Is there a flag to make TypeScript complain that dictionary access might be undefined?\n const childSchema = must(schema.relationships[relationship]);\n const childFormat = childFormats[relationship];\n if (childFormat === undefined) {\n continue;\n }\n\n const newView = childFormat.singular\n ? undefined\n : ([] as MetaEntryList);\n newEntry[relationship] = newView;\n\n for (const node of skipYields(children())) {\n applyChange(\n newEntry,\n {type: 'add', node},\n childSchema,\n relationship,\n childFormat,\n withIDs,\n );\n }\n }\n }\n break;\n }\n case 'remove': {\n if (singular) {\n const oldEntry = parentEntry[relationship] as MetaEntry | undefined;\n assert(oldEntry !== undefined, 'node does not exist');\n const rc = oldEntry[refCountSymbol];\n if (rc === 1) {\n (parentEntry as Writable<Entry>)[relationship] = undefined;\n }\n oldEntry[refCountSymbol]--;\n } else {\n removeAndUpdateRefCount(\n getChildEntryList(parentEntry, relationship),\n change.node.row,\n schema.compareRows,\n );\n }\n break;\n }\n case 'child': {\n let existing: MetaEntry;\n if (singular) {\n existing = getSingularEntry(parentEntry, relationship);\n } else {\n const view = getChildEntryList(parentEntry, relationship);\n const {pos, found} = binarySearch(\n view,\n change.node.row,\n schema.compareRows,\n );\n assert(found, 'node does not exist');\n existing = view[pos];\n }\n\n const childSchema = must(\n schema.relationships[change.child.relationshipName],\n );\n const childFormat = format.relationships[change.child.relationshipName];\n if (childFormat !== undefined) {\n applyChange(\n existing,\n change.child.change,\n childSchema,\n change.child.relationshipName,\n childFormat,\n withIDs,\n );\n }\n break;\n }\n case 'edit': {\n if (singular) {\n const existing = parentEntry[relationship];\n assertMetaEntry(existing);\n applyEdit(existing, change, schema, withIDs);\n } else {\n const view = getChildEntryList(parentEntry, relationship);\n // The position of the row in the list may have changed due to the edit.\n if (schema.compareRows(change.oldNode.row, change.node.row) !== 0) {\n const {pos: oldPos, found: oldFound} = binarySearch(\n view,\n change.oldNode.row,\n schema.compareRows,\n );\n assert(oldFound, 'old node does not exist');\n const oldEntry = view[oldPos];\n const {pos, found} = binarySearch(\n view,\n change.node.row,\n schema.compareRows,\n );\n // A special case:\n // when refCount is 1 (so the row is being moved\n // without leaving a placeholder behind), and the new pos is\n // the same as the old, or directly after the old (so after the remove\n // of the old it would be in the same pos):\n // the row does not need to be moved, it can just be edited in place.\n if (\n oldEntry[refCountSymbol] === 1 &&\n (pos === oldPos || pos - 1 === oldPos)\n ) {\n applyEdit(oldEntry, change, schema, withIDs);\n } else {\n // Move the row. If the row has > 1 ref count, an edit should\n // be received for each ref count. On the first edit, the original\n // row is moved, the edit is applied to it and its ref count is set\n // to 1. A shallow copy of the row is left at the old pos for\n // processing of the remaining edit, and the copy's ref count\n // is decremented. As each edit is received the ref count of the\n // copy is decrement, and the ref count of the row at the new\n // position is incremented. When the copy's ref count goes to 0,\n // it is removed.\n oldEntry[refCountSymbol]--;\n let adjustedPos = pos;\n if (oldEntry[refCountSymbol] === 0) {\n view.splice(oldPos, 1);\n adjustedPos = oldPos < pos ? pos - 1 : pos;\n }\n\n let entryToEdit;\n if (found) {\n entryToEdit = view[adjustedPos];\n } else {\n view.splice(adjustedPos, 0, oldEntry);\n entryToEdit = oldEntry;\n if (oldEntry[refCountSymbol] > 0) {\n const oldEntryCopy = {...oldEntry};\n view[oldPos] = oldEntryCopy;\n }\n }\n entryToEdit[refCountSymbol]++;\n applyEdit(entryToEdit, change, schema, withIDs);\n }\n } else {\n // Position could not have changed, so simply edit in place.\n const {pos, found} = binarySearch(\n view,\n change.oldNode.row,\n schema.compareRows,\n );\n assert(found, 'node does not exist');\n applyEdit(view[pos], change, schema, withIDs);\n }\n }\n\n break;\n }\n default:\n unreachable(change);\n }\n}\n\nfunction applyEdit(\n existing: MetaEntry,\n change: EditViewChange,\n schema: SourceSchema,\n withIDs: boolean,\n) {\n Object.assign(existing, change.node.row);\n if (withIDs) {\n existing[idSymbol] = makeID(change.node.row, schema);\n }\n}\n\nfunction add(\n row: Row,\n view: MetaEntryList,\n schema: SourceSchema,\n withIDs: boolean,\n): MetaEntry | undefined {\n const {pos, found} = binarySearch(view, row, schema.compareRows);\n\n if (found) {\n view[pos][refCountSymbol]++;\n return undefined;\n }\n const newEntry = makeNewMetaEntry(row, schema, withIDs, 1);\n view.splice(pos, 0, newEntry);\n return newEntry;\n}\n\nfunction removeAndUpdateRefCount(\n view: MetaEntryList,\n row: Row,\n compareRows: Comparator,\n): MetaEntry {\n const {pos, found} = binarySearch(view, row, compareRows);\n assert(found, 'node does not exist');\n const oldEntry = view[pos];\n const rc = oldEntry[refCountSymbol];\n if (rc === 1) {\n view.splice(pos, 1);\n }\n oldEntry[refCountSymbol]--;\n\n return oldEntry;\n}\n\n// TODO: Do not return an object. It puts unnecessary pressure on the GC.\nfunction binarySearch(\n view: MetaEntryList,\n target: Row,\n comparator: Comparator,\n) {\n let low = 0;\n let high = view.length - 1;\n while (low <= high) {\n const mid = (low + high) >>> 1;\n const comparison = comparator(view[mid] as Row, target as Row);\n if (comparison < 0) {\n low = mid + 1;\n } else if (comparison > 0) {\n high = mid - 1;\n } else {\n return {pos: mid, found: true};\n }\n }\n return {pos: low, found: false};\n}\n\nfunction getChildEntryList(\n parentEntry: Entry,\n relationship: string,\n): MetaEntryList {\n const view = parentEntry[relationship];\n assertArray(view);\n return view as MetaEntryList;\n}\n\nfunction assertMetaEntry(v: unknown): asserts v is MetaEntry {\n assertNumber((v as Partial<MetaEntry>)[refCountSymbol]);\n}\n\nfunction getSingularEntry(parentEntry: Entry, relationship: string): MetaEntry {\n const e = parentEntry[relationship];\n assertNumber((e as Partial<MetaEntry>)[refCountSymbol]);\n return e as MetaEntry;\n}\n\nfunction makeNewMetaEntry(\n row: Row,\n schema: SourceSchema,\n withIDs: boolean,\n rc: number,\n): MetaEntry {\n if (withIDs) {\n return {...row, [refCountSymbol]: rc, [idSymbol]: makeID(row, schema)};\n }\n return {...row, [refCountSymbol]: rc};\n}\nfunction makeID(row: Row, schema: SourceSchema) {\n // optimization for case of non-compound primary key\n if (schema.primaryKey.length === 1) {\n return JSON.stringify(row[schema.primaryKey[0]]);\n }\n return JSON.stringify(schema.primaryKey.map(k => row[k]));\n}\n"],"mappings":";;;;;AAcA,IAAa,iBAAiB,OAAO,KAAK;AAC1C,IAAa,WAAW,OAAO,KAAK;AAwDpC,SAAgB,YACd,aACA,QACA,QACA,cACA,QACA,UAAU,OACJ;AACN,KAAI,OAAO,SACT,SAAQ,OAAO,MAAf;EACE,KAAK;EACL,KAAK;AACH,QAAK,MAAM,CAAC,cAAc,aAAa,OAAO,QAC5C,OAAO,KAAK,cACb,EAAE;IACD,MAAM,cAAc,KAAK,OAAO,cAAc,cAAc;AAC5D,SAAK,MAAM,QAAQ,WAAW,UAAU,CAAC,CACvC,aACE,aACA;KAAC,MAAM,OAAO;KAAM;KAAK,EACzB,aACA,cACA,QACA,QACD;;AAGL;EACF,KAAK,OAKH;EACF,KAAK,SAAS;GACZ,MAAM,cAAc,KAClB,OAAO,cAAc,OAAO,MAAM,kBACnC;AACD,eACE,aACA,OAAO,MAAM,QACb,aACA,cACA,QACA,QACD;AACD;;EAEF,QACE,aAAY,OAAO;;CAIzB,MAAM,EAAC,UAAU,eAAe,iBAAgB;AAChD,SAAQ,OAAO,MAAf;EACE,KAAK,OAAO;GACV,IAAI;AAEJ,OAAI,UAAU;IACZ,MAAM,WAAW,YAAY;AAC7B,QAAI,aAAa,KAAA,GAAW;AAC1B,YACE,OAAO,YAAY,UAAU,OAAO,KAAK,IAAI,KAAK,GAClD,0BAA0B,aAAa,mJACxC;AAED,cAAS;WACJ;AACL,gBAAW,iBAAiB,OAAO,KAAK,KAAK,QAAQ,SAAS,EAAE;AAE/D,iBAAgC,gBAAgB;;SAGnD,YAAW,IACT,OAAO,KAAK,KACZ,kBAAkB,aAAa,aAAa,EAC5C,QACA,QACD;AAGH,OAAI,SACF,MAAK,MAAM,CAAC,cAAc,aAAa,OAAO,QAC5C,OAAO,KAAK,cACb,EAAE;IAED,MAAM,cAAc,KAAK,OAAO,cAAc,cAAc;IAC5D,MAAM,cAAc,aAAa;AACjC,QAAI,gBAAgB,KAAA,EAClB;IAGF,MAAM,UAAU,YAAY,WACxB,KAAA,IACC,EAAE;AACP,aAAS,gBAAgB;AAEzB,SAAK,MAAM,QAAQ,WAAW,UAAU,CAAC,CACvC,aACE,UACA;KAAC,MAAM;KAAO;KAAK,EACnB,aACA,cACA,aACA,QACD;;AAIP;;EAEF,KAAK;AACH,OAAI,UAAU;IACZ,MAAM,WAAW,YAAY;AAC7B,WAAO,aAAa,KAAA,GAAW,sBAAsB;AAErD,QADW,SAAS,oBACT,EACR,aAAgC,gBAAgB,KAAA;AAEnD,aAAS;SAET,yBACE,kBAAkB,aAAa,aAAa,EAC5C,OAAO,KAAK,KACZ,OAAO,YACR;AAEH;EAEF,KAAK,SAAS;GACZ,IAAI;AACJ,OAAI,SACF,YAAW,iBAAiB,aAAa,aAAa;QACjD;IACL,MAAM,OAAO,kBAAkB,aAAa,aAAa;IACzD,MAAM,EAAC,KAAK,UAAS,aACnB,MACA,OAAO,KAAK,KACZ,OAAO,YACR;AACD,WAAO,OAAO,sBAAsB;AACpC,eAAW,KAAK;;GAGlB,MAAM,cAAc,KAClB,OAAO,cAAc,OAAO,MAAM,kBACnC;GACD,MAAM,cAAc,OAAO,cAAc,OAAO,MAAM;AACtD,OAAI,gBAAgB,KAAA,EAClB,aACE,UACA,OAAO,MAAM,QACb,aACA,OAAO,MAAM,kBACb,aACA,QACD;AAEH;;EAEF,KAAK;AACH,OAAI,UAAU;IACZ,MAAM,WAAW,YAAY;AAC7B,oBAAgB,SAAS;AACzB,cAAU,UAAU,QAAQ,QAAQ,QAAQ;UACvC;IACL,MAAM,OAAO,kBAAkB,aAAa,aAAa;AAEzD,QAAI,OAAO,YAAY,OAAO,QAAQ,KAAK,OAAO,KAAK,IAAI,KAAK,GAAG;KACjE,MAAM,EAAC,KAAK,QAAQ,OAAO,aAAY,aACrC,MACA,OAAO,QAAQ,KACf,OAAO,YACR;AACD,YAAO,UAAU,0BAA0B;KAC3C,MAAM,WAAW,KAAK;KACtB,MAAM,EAAC,KAAK,UAAS,aACnB,MACA,OAAO,KAAK,KACZ,OAAO,YACR;AAOD,SACE,SAAS,oBAAoB,MAC5B,QAAQ,UAAU,MAAM,MAAM,QAE/B,WAAU,UAAU,QAAQ,QAAQ,QAAQ;UACvC;AAUL,eAAS;MACT,IAAI,cAAc;AAClB,UAAI,SAAS,oBAAoB,GAAG;AAClC,YAAK,OAAO,QAAQ,EAAE;AACtB,qBAAc,SAAS,MAAM,MAAM,IAAI;;MAGzC,IAAI;AACJ,UAAI,MACF,eAAc,KAAK;WACd;AACL,YAAK,OAAO,aAAa,GAAG,SAAS;AACrC,qBAAc;AACd,WAAI,SAAS,kBAAkB,EAE7B,MAAK,UADgB,EAAC,GAAG,UAAS;;AAItC,kBAAY;AACZ,gBAAU,aAAa,QAAQ,QAAQ,QAAQ;;WAE5C;KAEL,MAAM,EAAC,KAAK,UAAS,aACnB,MACA,OAAO,QAAQ,KACf,OAAO,YACR;AACD,YAAO,OAAO,sBAAsB;AACpC,eAAU,KAAK,MAAM,QAAQ,QAAQ,QAAQ;;;AAIjD;EAEF,QACE,aAAY,OAAO;;;AAIzB,SAAS,UACP,UACA,QACA,QACA,SACA;AACA,QAAO,OAAO,UAAU,OAAO,KAAK,IAAI;AACxC,KAAI,QACF,UAAS,YAAY,OAAO,OAAO,KAAK,KAAK,OAAO;;AAIxD,SAAS,IACP,KACA,MACA,QACA,SACuB;CACvB,MAAM,EAAC,KAAK,UAAS,aAAa,MAAM,KAAK,OAAO,YAAY;AAEhE,KAAI,OAAO;AACT,OAAK,KAAK;AACV;;CAEF,MAAM,WAAW,iBAAiB,KAAK,QAAQ,SAAS,EAAE;AAC1D,MAAK,OAAO,KAAK,GAAG,SAAS;AAC7B,QAAO;;AAGT,SAAS,wBACP,MACA,KACA,aACW;CACX,MAAM,EAAC,KAAK,UAAS,aAAa,MAAM,KAAK,YAAY;AACzD,QAAO,OAAO,sBAAsB;CACpC,MAAM,WAAW,KAAK;AAEtB,KADW,SAAS,oBACT,EACT,MAAK,OAAO,KAAK,EAAE;AAErB,UAAS;AAET,QAAO;;AAIT,SAAS,aACP,MACA,QACA,YACA;CACA,IAAI,MAAM;CACV,IAAI,OAAO,KAAK,SAAS;AACzB,QAAO,OAAO,MAAM;EAClB,MAAM,MAAO,MAAM,SAAU;EAC7B,MAAM,aAAa,WAAW,KAAK,MAAa,OAAc;AAC9D,MAAI,aAAa,EACf,OAAM,MAAM;WACH,aAAa,EACtB,QAAO,MAAM;MAEb,QAAO;GAAC,KAAK;GAAK,OAAO;GAAK;;AAGlC,QAAO;EAAC,KAAK;EAAK,OAAO;EAAM;;AAGjC,SAAS,kBACP,aACA,cACe;CACf,MAAM,OAAO,YAAY;AACzB,aAAY,KAAK;AACjB,QAAO;;AAGT,SAAS,gBAAgB,GAAoC;AAC3D,cAAc,EAAyB,gBAAgB;;AAGzD,SAAS,iBAAiB,aAAoB,cAAiC;CAC7E,MAAM,IAAI,YAAY;AACtB,cAAc,EAAyB,gBAAgB;AACvD,QAAO;;AAGT,SAAS,iBACP,KACA,QACA,SACA,IACW;AACX,KAAI,QACF,QAAO;EAAC,GAAG;GAAM,iBAAiB;GAAK,WAAW,OAAO,KAAK,OAAO;EAAC;AAExE,QAAO;EAAC,GAAG;GAAM,iBAAiB;EAAG;;AAEvC,SAAS,OAAO,KAAU,QAAsB;AAE9C,KAAI,OAAO,WAAW,WAAW,EAC/B,QAAO,KAAK,UAAU,IAAI,OAAO,WAAW,IAAI;AAElD,QAAO,KAAK,UAAU,OAAO,WAAW,KAAI,MAAK,IAAI,GAAG,CAAC"}
1
+ {"version":3,"file":"view-apply-change.js","names":[],"sources":["../../../../../zql/src/ivm/view-apply-change.ts"],"sourcesContent":["import {\n assert,\n assertArray,\n assertNumber,\n unreachable,\n} from '../../../shared/src/asserts.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport type {Writable} from '../../../shared/src/writable.ts';\nimport type {Row} from '../../../zero-protocol/src/data.ts';\nimport {type Comparator, type Node} from './data.ts';\nimport {skipYields} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Entry, Format} from './view.ts';\n\nexport const refCountSymbol = Symbol('rc');\nexport const idSymbol = Symbol('id');\n\ntype MetaEntry = Writable<Entry> & {\n [refCountSymbol]: number;\n [idSymbol]?: string | undefined;\n};\ntype MetaEntryList = MetaEntry[];\n\n/**\n * `applyChange` does not consume the `relationships` of `ChildChange#node`,\n * `EditChange#node` and `EditChange#oldNode`. The `ViewChange` type\n * documents and enforces this via the type system.\n */\nexport type ViewChange =\n | AddViewChange\n | RemoveViewChange\n | ChildViewChange\n | EditViewChange;\n\nexport type RowOnlyNode = {row: Row};\n\nexport type AddViewChange = {\n type: 'add';\n node: Node;\n};\n\nexport type RemoveViewChange = {\n type: 'remove';\n node: Node;\n};\n\ntype ChildViewChange = {\n type: 'child';\n node: RowOnlyNode;\n child: {\n relationshipName: string;\n change: ViewChange;\n };\n};\n\ntype EditViewChange = {\n type: 'edit';\n node: RowOnlyNode;\n oldNode: RowOnlyNode;\n};\n\n/**\n * This is a subset of WeakMap but restricted to what we need.\n * @deprecated Not used anymore. This will be removed in the future.\n */\nexport interface RefCountMap {\n get(entry: Entry): number | undefined;\n set(entry: Entry, refCount: number): void;\n delete(entry: Entry): boolean;\n}\n\nexport function applyChange(\n parentEntry: Entry,\n change: ViewChange,\n schema: SourceSchema,\n relationship: string,\n format: Format,\n withIDs = false,\n): void {\n if (schema.isHidden) {\n switch (change.type) {\n case 'add':\n case 'remove':\n for (const [relationship, children] of Object.entries(\n change.node.relationships,\n )) {\n const childSchema = must(schema.relationships[relationship]);\n for (const node of skipYields(children())) {\n applyChange(\n parentEntry,\n {type: change.type, node},\n childSchema,\n relationship,\n format,\n withIDs,\n );\n }\n }\n return;\n case 'edit':\n // If hidden at this level it means that the hidden row was changed. If\n // the row was changed in such a way that it would change the\n // relationships then the edit would have been split into remove and\n // add.\n return;\n case 'child': {\n const childSchema = must(\n schema.relationships[change.child.relationshipName],\n );\n applyChange(\n parentEntry,\n change.child.change,\n childSchema,\n relationship,\n format,\n withIDs,\n );\n return;\n }\n default:\n unreachable(change);\n }\n }\n\n const {singular, relationships: childFormats} = format;\n switch (change.type) {\n case 'add': {\n let newEntry: MetaEntry | undefined;\n\n if (singular) {\n const oldEntry = parentEntry[relationship] as MetaEntry | undefined;\n if (oldEntry !== undefined) {\n assert(\n schema.compareRows(oldEntry, change.node.row) === 0,\n `Singular relationship '${relationship}' should not have multiple rows. You may need to declare this relationship with the \\`many\\` helper instead of the \\`one\\` helper in your schema.`,\n );\n // adding same again.\n oldEntry[refCountSymbol]++;\n } else {\n newEntry = makeNewMetaEntry(change.node.row, schema, withIDs, 1);\n\n (parentEntry as Writable<Entry>)[relationship] = newEntry;\n }\n } else {\n newEntry = addToView(\n change.node.row,\n getChildEntryList(parentEntry, relationship),\n schema,\n withIDs,\n );\n }\n\n if (newEntry) {\n for (const [relationship, children] of Object.entries(\n change.node.relationships,\n )) {\n // TODO: Is there a flag to make TypeScript complain that dictionary access might be undefined?\n const childSchema = must(schema.relationships[relationship]);\n const childFormat = childFormats[relationship];\n if (childFormat === undefined) {\n continue;\n }\n\n const newView = childFormat.singular\n ? undefined\n : ([] as MetaEntryList);\n newEntry[relationship] = newView;\n\n for (const node of skipYields(children())) {\n applyChange(\n newEntry,\n {type: 'add', node},\n childSchema,\n relationship,\n childFormat,\n withIDs,\n );\n }\n }\n }\n break;\n }\n case 'remove': {\n if (singular) {\n const oldEntry = parentEntry[relationship] as MetaEntry | undefined;\n assert(oldEntry !== undefined, 'node does not exist');\n const rc = oldEntry[refCountSymbol];\n if (rc === 1) {\n (parentEntry as Writable<Entry>)[relationship] = undefined;\n }\n oldEntry[refCountSymbol]--;\n } else {\n removeAndUpdateRefCount(\n getChildEntryList(parentEntry, relationship),\n change.node.row,\n schema.compareRows,\n );\n }\n break;\n }\n case 'child': {\n let existing: MetaEntry;\n if (singular) {\n existing = getSingularEntry(parentEntry, relationship);\n } else {\n const view = getChildEntryList(parentEntry, relationship);\n const bsResult = binarySearch(\n view,\n change.node.row,\n schema.compareRows,\n );\n assert(bsResult >= 0, 'node does not exist');\n existing = view[bsResult];\n }\n\n const childSchema = must(\n schema.relationships[change.child.relationshipName],\n );\n const childFormat = format.relationships[change.child.relationshipName];\n if (childFormat !== undefined) {\n applyChange(\n existing,\n change.child.change,\n childSchema,\n change.child.relationshipName,\n childFormat,\n withIDs,\n );\n }\n break;\n }\n case 'edit': {\n if (singular) {\n const existing = parentEntry[relationship];\n assertMetaEntry(existing);\n applyEdit(existing, change, schema, withIDs);\n } else {\n const view = getChildEntryList(parentEntry, relationship);\n // The position of the row in the list may have changed due to the edit.\n if (schema.compareRows(change.oldNode.row, change.node.row) !== 0) {\n const oldBsResult = binarySearch(\n view,\n change.oldNode.row,\n schema.compareRows,\n );\n assert(oldBsResult >= 0, 'old node does not exist');\n const oldPos = oldBsResult;\n const oldEntry = view[oldPos];\n const newBsResult = binarySearch(\n view,\n change.node.row,\n schema.compareRows,\n );\n const found = newBsResult >= 0;\n const pos = found ? newBsResult : ~newBsResult;\n // A special case:\n // when refCount is 1 (so the row is being moved\n // without leaving a placeholder behind), and the new pos is\n // the same as the old, or directly after the old (so after the remove\n // of the old it would be in the same pos):\n // the row does not need to be moved, it can just be edited in place.\n if (\n oldEntry[refCountSymbol] === 1 &&\n (pos === oldPos || pos - 1 === oldPos)\n ) {\n applyEdit(oldEntry, change, schema, withIDs);\n } else {\n // Move the row. If the row has > 1 ref count, an edit should\n // be received for each ref count. On the first edit, the original\n // row is moved, the edit is applied to it and its ref count is set\n // to 1. A shallow copy of the row is left at the old pos for\n // processing of the remaining edit, and the copy's ref count\n // is decremented. As each edit is received the ref count of the\n // copy is decrement, and the ref count of the row at the new\n // position is incremented. When the copy's ref count goes to 0,\n // it is removed.\n oldEntry[refCountSymbol]--;\n let adjustedPos = pos;\n if (oldEntry[refCountSymbol] === 0) {\n view.splice(oldPos, 1);\n adjustedPos = oldPos < pos ? pos - 1 : pos;\n }\n\n let entryToEdit;\n if (found) {\n entryToEdit = view[adjustedPos];\n } else {\n view.splice(adjustedPos, 0, oldEntry);\n entryToEdit = oldEntry;\n if (oldEntry[refCountSymbol] > 0) {\n const oldEntryCopy = {...oldEntry};\n view[oldPos] = oldEntryCopy;\n }\n }\n entryToEdit[refCountSymbol]++;\n applyEdit(entryToEdit, change, schema, withIDs);\n }\n } else {\n // Position could not have changed, so simply edit in place.\n const bsResult = binarySearch(\n view,\n change.oldNode.row,\n schema.compareRows,\n );\n assert(bsResult >= 0, 'node does not exist');\n applyEdit(view[bsResult], change, schema, withIDs);\n }\n }\n\n break;\n }\n default:\n unreachable(change);\n }\n}\n\nfunction applyEdit(\n existing: MetaEntry,\n change: EditViewChange,\n schema: SourceSchema,\n withIDs: boolean,\n) {\n Object.assign(existing, change.node.row);\n if (withIDs) {\n existing[idSymbol] = makeID(change.node.row, schema);\n }\n}\n\nfunction addToView(\n row: Row,\n view: MetaEntryList,\n schema: SourceSchema,\n withIDs: boolean,\n): MetaEntry | undefined {\n const bsResult = binarySearch(view, row, schema.compareRows);\n\n if (bsResult >= 0) {\n view[bsResult][refCountSymbol]++;\n return undefined;\n }\n const pos = ~bsResult;\n const newEntry = makeNewMetaEntry(row, schema, withIDs, 1);\n view.splice(pos, 0, newEntry);\n return newEntry;\n}\n\nfunction removeAndUpdateRefCount(\n view: MetaEntryList,\n row: Row,\n compareRows: Comparator,\n): MetaEntry {\n const bsResult = binarySearch(view, row, compareRows);\n assert(bsResult >= 0, 'node does not exist');\n const oldEntry = view[bsResult];\n const rc = oldEntry[refCountSymbol];\n if (rc === 1) {\n view.splice(bsResult, 1);\n }\n oldEntry[refCountSymbol]--;\n\n return oldEntry;\n}\n\n/**\n * Binary search a sorted view.\n *\n * Returns the index of `target` if found (a non-negative integer), or\n * `~insertionIndex` if not found (always negative, following Java's\n * `Arrays.binarySearch` convention). To get the insertion index when\n * not found use the bitwise NOT: `~result`.\n *\n * This avoids allocating a `{pos, found}` result object on every call.\n */\nfunction binarySearch(\n view: MetaEntryList,\n target: Row,\n comparator: Comparator,\n): number {\n let low = 0;\n let high = view.length - 1;\n while (low <= high) {\n const mid = (low + high) >>> 1;\n const comparison = comparator(view[mid] as Row, target as Row);\n if (comparison < 0) {\n low = mid + 1;\n } else if (comparison > 0) {\n high = mid - 1;\n } else {\n return mid; // found\n }\n }\n return ~low; // not found; use ~result to get insertion index\n}\n\nfunction getChildEntryList(\n parentEntry: Entry,\n relationship: string,\n): MetaEntryList {\n const view = parentEntry[relationship];\n assertArray(view);\n return view as MetaEntryList;\n}\n\nfunction assertMetaEntry(v: unknown): asserts v is MetaEntry {\n assertNumber((v as Partial<MetaEntry>)[refCountSymbol]);\n}\n\nfunction getSingularEntry(parentEntry: Entry, relationship: string): MetaEntry {\n const e = parentEntry[relationship];\n assertNumber((e as Partial<MetaEntry>)[refCountSymbol]);\n return e as MetaEntry;\n}\n\nfunction makeNewMetaEntry(\n row: Row,\n schema: SourceSchema,\n withIDs: boolean,\n rc: number,\n): MetaEntry {\n if (withIDs) {\n return {...row, [refCountSymbol]: rc, [idSymbol]: makeID(row, schema)};\n }\n return {...row, [refCountSymbol]: rc};\n}\nfunction makeID(row: Row, schema: SourceSchema) {\n // optimization for case of non-compound primary key\n if (schema.primaryKey.length === 1) {\n return JSON.stringify(row[schema.primaryKey[0]]);\n }\n return JSON.stringify(schema.primaryKey.map(k => row[k]));\n}\n"],"mappings":";;;;;AAcA,IAAa,iBAAiB,OAAO,KAAK;AAC1C,IAAa,WAAW,OAAO,KAAK;AAwDpC,SAAgB,YACd,aACA,QACA,QACA,cACA,QACA,UAAU,OACJ;AACN,KAAI,OAAO,SACT,SAAQ,OAAO,MAAf;EACE,KAAK;EACL,KAAK;AACH,QAAK,MAAM,CAAC,cAAc,aAAa,OAAO,QAC5C,OAAO,KAAK,cACb,EAAE;IACD,MAAM,cAAc,KAAK,OAAO,cAAc,cAAc;AAC5D,SAAK,MAAM,QAAQ,WAAW,UAAU,CAAC,CACvC,aACE,aACA;KAAC,MAAM,OAAO;KAAM;KAAK,EACzB,aACA,cACA,QACA,QACD;;AAGL;EACF,KAAK,OAKH;EACF,KAAK,SAAS;GACZ,MAAM,cAAc,KAClB,OAAO,cAAc,OAAO,MAAM,kBACnC;AACD,eACE,aACA,OAAO,MAAM,QACb,aACA,cACA,QACA,QACD;AACD;;EAEF,QACE,aAAY,OAAO;;CAIzB,MAAM,EAAC,UAAU,eAAe,iBAAgB;AAChD,SAAQ,OAAO,MAAf;EACE,KAAK,OAAO;GACV,IAAI;AAEJ,OAAI,UAAU;IACZ,MAAM,WAAW,YAAY;AAC7B,QAAI,aAAa,KAAA,GAAW;AAC1B,YACE,OAAO,YAAY,UAAU,OAAO,KAAK,IAAI,KAAK,GAClD,0BAA0B,aAAa,mJACxC;AAED,cAAS;WACJ;AACL,gBAAW,iBAAiB,OAAO,KAAK,KAAK,QAAQ,SAAS,EAAE;AAE/D,iBAAgC,gBAAgB;;SAGnD,YAAW,UACT,OAAO,KAAK,KACZ,kBAAkB,aAAa,aAAa,EAC5C,QACA,QACD;AAGH,OAAI,SACF,MAAK,MAAM,CAAC,cAAc,aAAa,OAAO,QAC5C,OAAO,KAAK,cACb,EAAE;IAED,MAAM,cAAc,KAAK,OAAO,cAAc,cAAc;IAC5D,MAAM,cAAc,aAAa;AACjC,QAAI,gBAAgB,KAAA,EAClB;IAGF,MAAM,UAAU,YAAY,WACxB,KAAA,IACC,EAAE;AACP,aAAS,gBAAgB;AAEzB,SAAK,MAAM,QAAQ,WAAW,UAAU,CAAC,CACvC,aACE,UACA;KAAC,MAAM;KAAO;KAAK,EACnB,aACA,cACA,aACA,QACD;;AAIP;;EAEF,KAAK;AACH,OAAI,UAAU;IACZ,MAAM,WAAW,YAAY;AAC7B,WAAO,aAAa,KAAA,GAAW,sBAAsB;AAErD,QADW,SAAS,oBACT,EACR,aAAgC,gBAAgB,KAAA;AAEnD,aAAS;SAET,yBACE,kBAAkB,aAAa,aAAa,EAC5C,OAAO,KAAK,KACZ,OAAO,YACR;AAEH;EAEF,KAAK,SAAS;GACZ,IAAI;AACJ,OAAI,SACF,YAAW,iBAAiB,aAAa,aAAa;QACjD;IACL,MAAM,OAAO,kBAAkB,aAAa,aAAa;IACzD,MAAM,WAAW,aACf,MACA,OAAO,KAAK,KACZ,OAAO,YACR;AACD,WAAO,YAAY,GAAG,sBAAsB;AAC5C,eAAW,KAAK;;GAGlB,MAAM,cAAc,KAClB,OAAO,cAAc,OAAO,MAAM,kBACnC;GACD,MAAM,cAAc,OAAO,cAAc,OAAO,MAAM;AACtD,OAAI,gBAAgB,KAAA,EAClB,aACE,UACA,OAAO,MAAM,QACb,aACA,OAAO,MAAM,kBACb,aACA,QACD;AAEH;;EAEF,KAAK;AACH,OAAI,UAAU;IACZ,MAAM,WAAW,YAAY;AAC7B,oBAAgB,SAAS;AACzB,cAAU,UAAU,QAAQ,QAAQ,QAAQ;UACvC;IACL,MAAM,OAAO,kBAAkB,aAAa,aAAa;AAEzD,QAAI,OAAO,YAAY,OAAO,QAAQ,KAAK,OAAO,KAAK,IAAI,KAAK,GAAG;KACjE,MAAM,cAAc,aAClB,MACA,OAAO,QAAQ,KACf,OAAO,YACR;AACD,YAAO,eAAe,GAAG,0BAA0B;KACnD,MAAM,SAAS;KACf,MAAM,WAAW,KAAK;KACtB,MAAM,cAAc,aAClB,MACA,OAAO,KAAK,KACZ,OAAO,YACR;KACD,MAAM,QAAQ,eAAe;KAC7B,MAAM,MAAM,QAAQ,cAAc,CAAC;AAOnC,SACE,SAAS,oBAAoB,MAC5B,QAAQ,UAAU,MAAM,MAAM,QAE/B,WAAU,UAAU,QAAQ,QAAQ,QAAQ;UACvC;AAUL,eAAS;MACT,IAAI,cAAc;AAClB,UAAI,SAAS,oBAAoB,GAAG;AAClC,YAAK,OAAO,QAAQ,EAAE;AACtB,qBAAc,SAAS,MAAM,MAAM,IAAI;;MAGzC,IAAI;AACJ,UAAI,MACF,eAAc,KAAK;WACd;AACL,YAAK,OAAO,aAAa,GAAG,SAAS;AACrC,qBAAc;AACd,WAAI,SAAS,kBAAkB,EAE7B,MAAK,UADgB,EAAC,GAAG,UAAS;;AAItC,kBAAY;AACZ,gBAAU,aAAa,QAAQ,QAAQ,QAAQ;;WAE5C;KAEL,MAAM,WAAW,aACf,MACA,OAAO,QAAQ,KACf,OAAO,YACR;AACD,YAAO,YAAY,GAAG,sBAAsB;AAC5C,eAAU,KAAK,WAAW,QAAQ,QAAQ,QAAQ;;;AAItD;EAEF,QACE,aAAY,OAAO;;;AAIzB,SAAS,UACP,UACA,QACA,QACA,SACA;AACA,QAAO,OAAO,UAAU,OAAO,KAAK,IAAI;AACxC,KAAI,QACF,UAAS,YAAY,OAAO,OAAO,KAAK,KAAK,OAAO;;AAIxD,SAAS,UACP,KACA,MACA,QACA,SACuB;CACvB,MAAM,WAAW,aAAa,MAAM,KAAK,OAAO,YAAY;AAE5D,KAAI,YAAY,GAAG;AACjB,OAAK,UAAU;AACf;;CAEF,MAAM,MAAM,CAAC;CACb,MAAM,WAAW,iBAAiB,KAAK,QAAQ,SAAS,EAAE;AAC1D,MAAK,OAAO,KAAK,GAAG,SAAS;AAC7B,QAAO;;AAGT,SAAS,wBACP,MACA,KACA,aACW;CACX,MAAM,WAAW,aAAa,MAAM,KAAK,YAAY;AACrD,QAAO,YAAY,GAAG,sBAAsB;CAC5C,MAAM,WAAW,KAAK;AAEtB,KADW,SAAS,oBACT,EACT,MAAK,OAAO,UAAU,EAAE;AAE1B,UAAS;AAET,QAAO;;;;;;;;;;;;AAaT,SAAS,aACP,MACA,QACA,YACQ;CACR,IAAI,MAAM;CACV,IAAI,OAAO,KAAK,SAAS;AACzB,QAAO,OAAO,MAAM;EAClB,MAAM,MAAO,MAAM,SAAU;EAC7B,MAAM,aAAa,WAAW,KAAK,MAAa,OAAc;AAC9D,MAAI,aAAa,EACf,OAAM,MAAM;WACH,aAAa,EACtB,QAAO,MAAM;MAEb,QAAO;;AAGX,QAAO,CAAC;;AAGV,SAAS,kBACP,aACA,cACe;CACf,MAAM,OAAO,YAAY;AACzB,aAAY,KAAK;AACjB,QAAO;;AAGT,SAAS,gBAAgB,GAAoC;AAC3D,cAAc,EAAyB,gBAAgB;;AAGzD,SAAS,iBAAiB,aAAoB,cAAiC;CAC7E,MAAM,IAAI,YAAY;AACtB,cAAc,EAAyB,gBAAgB;AACvD,QAAO;;AAGT,SAAS,iBACP,KACA,QACA,SACA,IACW;AACX,KAAI,QACF,QAAO;EAAC,GAAG;GAAM,iBAAiB;GAAK,WAAW,OAAO,KAAK,OAAO;EAAC;AAExE,QAAO;EAAC,GAAG;GAAM,iBAAiB;EAAG;;AAEvC,SAAS,OAAO,KAAU,QAAsB;AAE9C,KAAI,OAAO,WAAW,WAAW,EAC/B,QAAO,KAAK,UAAU,IAAI,OAAO,WAAW,IAAI;AAElD,QAAO,KAAK,UAAU,OAAO,WAAW,KAAI,MAAK,IAAI,GAAG,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"planner-debug.d.ts","sourceRoot":"","sources":["../../../../../zql/src/planner/planner-debug.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,CAAC,MAAM,+BAA+B,CAAC;AACxD,OAAO,KAAK,EACV,SAAS,EACT,QAAQ,EAET,MAAM,mCAAmC,CAAC;AAC3C,OAAO,KAAK,EACV,2BAA2B,EAC3B,+BAA+B,EAC/B,iCAAiC,EACjC,6BAA6B,EAC7B,kBAAkB,EAClB,yBAAyB,EAC1B,MAAM,oDAAoD,CAAC;AAC5D,OAAO,KAAK,EAAC,iBAAiB,EAAC,MAAM,yBAAyB,CAAC;AAC/D,OAAO,KAAK,EAAC,YAAY,EAAE,QAAQ,EAAC,MAAM,mBAAmB,CAAC;AAC9D,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,oBAAoB,CAAC;AAElD;;;;GAIG;AAEH;;GAEG;AACH,MAAM,MAAM,iBAAiB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,2BAA2B,CAAC,CAAC;AAE5E;;GAEG;AACH,MAAM,MAAM,oBAAoB,GAAG;IACjC,IAAI,EAAE,kBAAkB,CAAC;IACzB,aAAa,EAAE,MAAM,CAAC;IACtB,KAAK,EAAE,KAAK,CAAC;QACX,UAAU,EAAE,MAAM,CAAC;QACnB,IAAI,EAAE,MAAM,CAAC;QACb,YAAY,EAAE,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;QAC3C,MAAM,EAAE,OAAO,CAAC;QAChB,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,iBAAiB,GAAG,SAAS,CAAC,CAAC;QAC3D,eAAe,EAAE,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAC/D,CAAC,CAAC;CACJ,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,uBAAuB,GAAG,CAAC,CAAC,KAAK,CAC3C,OAAO,iCAAiC,CACzC,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,0BAA0B,GAAG;IACvC,IAAI,EAAE,wBAAwB,CAAC;IAC/B,aAAa,EAAE,MAAM,CAAC;IACtB,qBAAqB,EAAE,KAAK,CAAC;QAC3B,UAAU,EAAE,MAAM,CAAC;QACnB,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,iBAAiB,GAAG,SAAS,CAAC,CAAC;QAC3D,eAAe,EAAE,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAC/D,CAAC,CAAC;CACJ,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,iBAAiB,GAAG;IAC9B,IAAI,EAAE,eAAe,CAAC;IACtB,aAAa,EAAE,MAAM,CAAC;IACtB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,MAAM,CAAC;IACpB,UAAU,EAAE,KAAK,CAAC;QAChB,IAAI,EAAE,MAAM,CAAC;QACb,IAAI,EAAE,QAAQ,CAAC;KAChB,CAAC,CAAC;IAEH,YAAY,EAAE,SAAS,CAAC;CACzB,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,eAAe,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,yBAAyB,CAAC,CAAC;AAExE;;GAEG;AACH,MAAM,MAAM,qBAAqB,GAAG,CAAC,CAAC,KAAK,CACzC,OAAO,+BAA+B,CACvC,CAAC;AAEF;;;;GAIG;AACH,MAAM,MAAM,aAAa,GAAG;IAC1B,IAAI,EAAE,WAAW,CAAC;IAClB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,QAAQ,EAAE,YAAY,GAAG,MAAM,GAAG,SAAS,GAAG,QAAQ,GAAG,UAAU,CAAC;IACpE,IAAI,EAAE,MAAM,CAAC;IACb,aAAa,EAAE,MAAM,EAAE,CAAC;IACxB,0BAA0B,EAAE,MAAM,CAAC;IACnC,YAAY,EAAE,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;IAC3C,OAAO,CAAC,EAAE,SAAS,GAAG,SAAS,CAAC;IAChC,QAAQ,CAAC,EAAE,QAAQ,GAAG,SAAS,CAAC;IAChC,QAAQ,CAAC,EAAE,QAAQ,GAAG,SAAS,CAAC;CACjC,CAAC;AAEF;;;;GAIG;AACH,MAAM,MAAM,mBAAmB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,6BAA6B,CAAC,CAAC;AAEhF;;GAEG;AACH,MAAM,MAAM,cAAc,GACtB,iBAAiB,GACjB,oBAAoB,GACpB,uBAAuB,GACvB,0BAA0B,GAC1B,iBAAiB,GACjB,eAAe,GACf,qBAAqB,GACrB,aAAa,GACb,mBAAmB,CAAC;AAExB;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,GAAG,CAAC,KAAK,EAAE,cAAc,GAAG,IAAI,CAAC;CAClC;AAED;;;GAGG;AACH,qBAAa,mBAAoB,YAAW,YAAY;IACtD,QAAQ,CAAC,MAAM,EAAE,cAAc,EAAE,CAAM;IACvC,OAAO,CAAC,cAAc,CAAK;IAE3B,GAAG,CAAC,KAAK,EAAE,cAAc,GAAG,IAAI;IAehC;;OAEG;IACH,SAAS,CAAC,CAAC,SAAS,cAAc,CAAC,MAAM,CAAC,EACxC,IAAI,EAAE,CAAC,GACN,OAAO,CAAC,cAAc,EAAE;QAAC,IAAI,EAAE,CAAC,CAAA;KAAC,CAAC,EAAE;IAOvC;;OAEG;IACH,MAAM,IAAI,MAAM;CAGjB;AA8QD;;;;GAIG;AACH,wBAAgB,wBAAwB,CACtC,MAAM,EAAE,cAAc,EAAE,GACvB,kBAAkB,EAAE,CAEtB;AAED;;;;;;;;;;;;;;GAcG;AACH,wBAAgB,mBAAmB,CACjC,MAAM,EAAE,kBAAkB,EAAE,GAAG,cAAc,EAAE,GAC9C,MAAM,CAyDR"}
1
+ {"version":3,"file":"planner-debug.d.ts","sourceRoot":"","sources":["../../../../../zql/src/planner/planner-debug.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,CAAC,MAAM,+BAA+B,CAAC;AACxD,OAAO,KAAK,EACV,SAAS,EACT,QAAQ,EAET,MAAM,mCAAmC,CAAC;AAC3C,OAAO,KAAK,EACV,2BAA2B,EAC3B,+BAA+B,EAC/B,iCAAiC,EACjC,6BAA6B,EAC7B,kBAAkB,EAClB,yBAAyB,EAC1B,MAAM,oDAAoD,CAAC;AAC5D,OAAO,KAAK,EAAC,iBAAiB,EAAC,MAAM,yBAAyB,CAAC;AAC/D,OAAO,KAAK,EAAC,YAAY,EAAE,QAAQ,EAAC,MAAM,mBAAmB,CAAC;AAC9D,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,oBAAoB,CAAC;AAElD;;;;GAIG;AAEH;;GAEG;AACH,MAAM,MAAM,iBAAiB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,2BAA2B,CAAC,CAAC;AAE5E;;GAEG;AACH,MAAM,MAAM,oBAAoB,GAAG;IACjC,IAAI,EAAE,kBAAkB,CAAC;IACzB,aAAa,EAAE,MAAM,CAAC;IACtB,KAAK,EAAE,KAAK,CAAC;QACX,UAAU,EAAE,MAAM,CAAC;QACnB,IAAI,EAAE,MAAM,CAAC;QACb,YAAY,EAAE,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;QAC3C,MAAM,EAAE,OAAO,CAAC;QAChB,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,iBAAiB,GAAG,SAAS,CAAC,CAAC;QAC3D,eAAe,EAAE,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAC/D,CAAC,CAAC;CACJ,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,uBAAuB,GAAG,CAAC,CAAC,KAAK,CAC3C,OAAO,iCAAiC,CACzC,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,0BAA0B,GAAG;IACvC,IAAI,EAAE,wBAAwB,CAAC;IAC/B,aAAa,EAAE,MAAM,CAAC;IACtB,qBAAqB,EAAE,KAAK,CAAC;QAC3B,UAAU,EAAE,MAAM,CAAC;QACnB,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,iBAAiB,GAAG,SAAS,CAAC,CAAC;QAC3D,eAAe,EAAE,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC,CAAC;KAC/D,CAAC,CAAC;CACJ,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,iBAAiB,GAAG;IAC9B,IAAI,EAAE,eAAe,CAAC;IACtB,aAAa,EAAE,MAAM,CAAC;IACtB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,MAAM,CAAC;IACpB,UAAU,EAAE,KAAK,CAAC;QAChB,IAAI,EAAE,MAAM,CAAC;QACb,IAAI,EAAE,QAAQ,CAAC;KAChB,CAAC,CAAC;IAEH,YAAY,EAAE,SAAS,CAAC;CACzB,CAAC;AAEF;;GAEG;AACH,MAAM,MAAM,eAAe,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,yBAAyB,CAAC,CAAC;AAExE;;GAEG;AACH,MAAM,MAAM,qBAAqB,GAAG,CAAC,CAAC,KAAK,CACzC,OAAO,+BAA+B,CACvC,CAAC;AAEF;;;;GAIG;AACH,MAAM,MAAM,aAAa,GAAG;IAC1B,IAAI,EAAE,WAAW,CAAC;IAClB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,QAAQ,EAAE,YAAY,GAAG,MAAM,GAAG,SAAS,GAAG,QAAQ,GAAG,UAAU,CAAC;IACpE,IAAI,EAAE,MAAM,CAAC;IACb,aAAa,EAAE,MAAM,EAAE,CAAC;IACxB,0BAA0B,EAAE,MAAM,CAAC;IACnC,YAAY,EAAE,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;IAC3C,OAAO,CAAC,EAAE,SAAS,GAAG,SAAS,CAAC;IAChC,QAAQ,CAAC,EAAE,QAAQ,GAAG,SAAS,CAAC;IAChC,QAAQ,CAAC,EAAE,QAAQ,GAAG,SAAS,CAAC;CACjC,CAAC;AAEF;;;;GAIG;AACH,MAAM,MAAM,mBAAmB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,6BAA6B,CAAC,CAAC;AAEhF;;GAEG;AACH,MAAM,MAAM,cAAc,GACtB,iBAAiB,GACjB,oBAAoB,GACpB,uBAAuB,GACvB,0BAA0B,GAC1B,iBAAiB,GACjB,eAAe,GACf,qBAAqB,GACrB,aAAa,GACb,mBAAmB,CAAC;AAExB;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,GAAG,CAAC,KAAK,EAAE,cAAc,GAAG,IAAI,CAAC;CAClC;AAED;;;GAGG;AACH,qBAAa,mBAAoB,YAAW,YAAY;IACtD,QAAQ,CAAC,MAAM,EAAE,cAAc,EAAE,CAAM;IACvC,OAAO,CAAC,cAAc,CAAK;IAE3B,GAAG,CAAC,KAAK,EAAE,cAAc,GAAG,IAAI;IAehC;;OAEG;IACH,SAAS,CAAC,CAAC,SAAS,cAAc,CAAC,MAAM,CAAC,EACxC,IAAI,EAAE,CAAC,GACN,OAAO,CAAC,cAAc,EAAE;QAAC,IAAI,EAAE,CAAC,CAAA;KAAC,CAAC,EAAE;IAOvC;;OAEG;IACH,MAAM,IAAI,MAAM;CAGjB;AA2QD;;;;GAIG;AACH,wBAAgB,wBAAwB,CACtC,MAAM,EAAE,cAAc,EAAE,GACvB,kBAAkB,EAAE,CAEtB;AAED;;;;;;;;;;;;;;GAcG;AACH,wBAAgB,mBAAmB,CACjC,MAAM,EAAE,kBAAkB,EAAE,GAAG,cAAc,EAAE,GAC9C,MAAM,CAyDR"}
@@ -1 +1 @@
1
- {"version":3,"file":"planner-debug.js","names":[],"sources":["../../../../../zql/src/planner/planner-debug.ts"],"sourcesContent":["import type * as v from '../../../shared/src/valita.ts';\nimport type {\n Condition,\n Ordering,\n ValuePosition,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {\n attemptStartEventJSONSchema,\n bestPlanSelectedEventJSONSchema,\n connectionSelectedEventJSONSchema,\n nodeConstraintEventJSONSchema,\n PlanDebugEventJSON,\n planFailedEventJSONSchema,\n} from '../../../zero-protocol/src/analyze-query-result.ts';\nimport type {PlannerConstraint} from './planner-constraint.ts';\nimport type {CostEstimate, JoinType} from './planner-node.ts';\nimport type {PlanState} from './planner-graph.ts';\n\n/**\n * Structured debug events emitted during query planning.\n * These events can be accumulated, printed, or analyzed to understand\n * the planner's decision-making process.\n */\n\n/**\n * Starting a new planning attempt with a different root connection.\n */\nexport type AttemptStartEvent = v.Infer<typeof attemptStartEventJSONSchema>;\n\n/**\n * Snapshot of connection costs before selecting the next connection.\n */\nexport type ConnectionCostsEvent = {\n type: 'connection-costs';\n attemptNumber: number;\n costs: Array<{\n connection: string;\n cost: number;\n costEstimate: Omit<CostEstimate, 'fanout'>;\n pinned: boolean;\n constraints: Record<string, PlannerConstraint | undefined>;\n constraintCosts: Record<string, Omit<CostEstimate, 'fanout'>>;\n }>;\n};\n\n/**\n * A connection was chosen and pinned.\n */\nexport type ConnectionSelectedEvent = v.Infer<\n typeof connectionSelectedEventJSONSchema\n>;\n\n/**\n * Constraints have been propagated through the graph.\n */\nexport type ConstraintsPropagatedEvent = {\n type: 'constraints-propagated';\n attemptNumber: number;\n connectionConstraints: Array<{\n connection: string;\n constraints: Record<string, PlannerConstraint | undefined>;\n constraintCosts: Record<string, Omit<CostEstimate, 'fanout'>>;\n }>;\n};\n\n/**\n * A complete plan was found for this attempt.\n */\nexport type PlanCompleteEvent = {\n type: 'plan-complete';\n attemptNumber: number;\n totalCost: number;\n flipPattern: number; // Bitmask indicating which joins are flipped\n joinStates: Array<{\n join: string;\n type: JoinType;\n }>;\n // Planning snapshot that can be restored and applied to AST\n planSnapshot: PlanState;\n};\n\n/**\n * Planning attempt failed (e.g., unflippable join).\n */\nexport type PlanFailedEvent = v.Infer<typeof planFailedEventJSONSchema>;\n\n/**\n * The best plan across all attempts was selected.\n */\nexport type BestPlanSelectedEvent = v.Infer<\n typeof bestPlanSelectedEventJSONSchema\n>;\n\n/**\n * A node computed its cost estimate during planning.\n * Emitted by nodes during estimateCost() traversal.\n * attemptNumber is added by the debugger.\n */\nexport type NodeCostEvent = {\n type: 'node-cost';\n attemptNumber?: number;\n nodeType: 'connection' | 'join' | 'fan-out' | 'fan-in' | 'terminus';\n node: string;\n branchPattern: number[];\n downstreamChildSelectivity: number;\n costEstimate: Omit<CostEstimate, 'fanout'>;\n filters?: Condition | undefined; // Only for connections\n ordering?: Ordering | undefined; // Only for connections\n joinType?: JoinType | undefined; // Only for joins\n};\n\n/**\n * A node received constraints during constraint propagation.\n * Emitted by nodes during propagateConstraints() traversal.\n * attemptNumber is added by the debugger.\n */\nexport type NodeConstraintEvent = v.Infer<typeof nodeConstraintEventJSONSchema>;\n\n/**\n * Union of all debug event types.\n */\nexport type PlanDebugEvent =\n | AttemptStartEvent\n | ConnectionCostsEvent\n | ConnectionSelectedEvent\n | ConstraintsPropagatedEvent\n | PlanCompleteEvent\n | PlanFailedEvent\n | BestPlanSelectedEvent\n | NodeCostEvent\n | NodeConstraintEvent;\n\n/**\n * Interface for objects that receive debug events during planning.\n */\nexport interface PlanDebugger {\n log(event: PlanDebugEvent): void;\n}\n\n/**\n * Simple accumulator debugger that stores all events.\n * Useful for tests and debugging.\n */\nexport class AccumulatorDebugger implements PlanDebugger {\n readonly events: PlanDebugEvent[] = [];\n private currentAttempt = 0;\n\n log(event: PlanDebugEvent): void {\n // Track current attempt number\n if (event.type === 'attempt-start') {\n this.currentAttempt = event.attemptNumber;\n }\n\n // Add attempt number to node events\n if (event.type === 'node-cost' || event.type === 'node-constraint') {\n (event as NodeCostEvent | NodeConstraintEvent).attemptNumber =\n this.currentAttempt;\n }\n\n this.events.push(event);\n }\n\n /**\n * Get all events of a specific type.\n */\n getEvents<T extends PlanDebugEvent['type']>(\n type: T,\n ): Extract<PlanDebugEvent, {type: T}>[] {\n return this.events.filter(e => e.type === type) as Extract<\n PlanDebugEvent,\n {type: T}\n >[];\n }\n\n /**\n * Format events as a human-readable string.\n */\n format(): string {\n return formatPlannerEvents(this.events);\n }\n}\n\n/**\n * Format a constraint object as a human-readable string.\n */\nfunction formatConstraint(\n constraint: PlannerConstraint | Record<string, unknown> | null | undefined,\n): string {\n if (!constraint) return '{}';\n const keys = Object.keys(constraint);\n if (keys.length === 0) return '{}';\n return '{' + keys.join(', ') + '}';\n}\n\n/**\n * Format a ValuePosition (column, literal, or static parameter) as a human-readable string.\n */\nfunction formatValuePosition(value: ValuePosition): string {\n switch (value.type) {\n case 'column':\n return value.name;\n case 'literal':\n // Format literal values with SQL-style quoting for strings\n if (typeof value.value === 'string') {\n return `'${value.value}'`;\n }\n return JSON.stringify(value.value);\n case 'static':\n return `@${value.anchor}.${Array.isArray(value.field) ? value.field.join('.') : value.field}`;\n }\n}\n\n/**\n * Format a Condition (filter) as a human-readable string.\n */\nfunction formatFilter(filter: Condition | undefined): string {\n if (!filter) return 'none';\n\n switch (filter.type) {\n case 'simple':\n return `${formatValuePosition(filter.left)} ${filter.op} ${formatValuePosition(filter.right)}`;\n case 'and':\n return `(${filter.conditions.map(formatFilter).join(' AND ')})`;\n case 'or':\n return `(${filter.conditions.map(formatFilter).join(' OR ')})`;\n case 'correlatedSubquery':\n return `EXISTS(${filter.related.subquery.table})`;\n default:\n return JSON.stringify(filter);\n }\n}\n\n/**\n * Format an Ordering as a human-readable string.\n */\nfunction formatOrdering(ordering: Ordering | undefined): string {\n if (!ordering || ordering.length === 0) return 'none';\n return ordering\n .map(([field, direction]) => `${field} ${direction}`)\n .join(', ');\n}\n\n/**\n * Format a compact summary for a single planning attempt.\n */\nfunction formatAttemptSummary(\n attemptNum: number,\n events: (PlanDebugEvent | PlanDebugEventJSON)[],\n): string[] {\n const lines: string[] = [];\n\n // Find the attempt-start event to get total attempts\n const startEvent = events.find(e => e.type === 'attempt-start') as\n | AttemptStartEvent\n | undefined;\n const totalAttempts = startEvent?.totalAttempts ?? '?';\n\n // Calculate number of bits needed for pattern\n const numBits =\n typeof totalAttempts === 'number'\n ? Math.ceil(Math.log2(totalAttempts)) || 1\n : 1;\n const bitPattern = attemptNum.toString(2).padStart(numBits, '0');\n\n lines.push(\n `[Attempt ${attemptNum + 1}/${totalAttempts}] Pattern ${attemptNum} (${bitPattern})`,\n );\n\n // Collect connection costs (use array to preserve all connections, including duplicates)\n const connectionCostEvents: (\n | NodeCostEvent\n | Extract<PlanDebugEventJSON, {type: 'node-cost'}>\n )[] = [];\n const connectionConstraintEvents: (\n | NodeConstraintEvent\n | Extract<PlanDebugEventJSON, {type: 'node-constraint'}>\n )[] = [];\n\n for (const event of events) {\n if (event.type === 'node-cost' && event.nodeType === 'connection') {\n connectionCostEvents.push(event);\n }\n if (event.type === 'node-constraint' && event.nodeType === 'connection') {\n connectionConstraintEvents.push(event);\n }\n }\n\n // Show connection summary\n if (connectionCostEvents.length > 0) {\n lines.push(' Connections:');\n for (const cost of connectionCostEvents) {\n // Find matching constraint event (same node name and branch pattern)\n const constraint = connectionConstraintEvents.find(\n c =>\n c.node === cost.node &&\n c.branchPattern.join(',') === cost.branchPattern.join(','),\n )?.constraint;\n\n const constraintStr = formatConstraint(constraint);\n const filterStr = formatFilter(cost.filters);\n const orderingStr = formatOrdering(cost.ordering);\n const limitStr =\n cost.costEstimate.limit !== undefined\n ? cost.costEstimate.limit.toString()\n : 'none';\n\n lines.push(` ${cost.node}:`);\n lines.push(\n ` cost=${cost.costEstimate.cost.toFixed(2)}, startup=${cost.costEstimate.startupCost.toFixed(2)}, scan=${cost.costEstimate.scanEst.toFixed(2)}`,\n );\n lines.push(\n ` rows=${cost.costEstimate.returnedRows.toFixed(2)}, selectivity=${cost.costEstimate.selectivity.toFixed(8)}, limit=${limitStr}`,\n );\n lines.push(\n ` downstreamChildSelectivity=${cost.downstreamChildSelectivity.toFixed(8)}`,\n );\n lines.push(` constraints=${constraintStr}`);\n lines.push(` filters=${filterStr}`);\n lines.push(` ordering=${orderingStr}`);\n }\n }\n\n // Collect join costs from node-cost events\n const joinCosts: (\n | NodeCostEvent\n | Extract<PlanDebugEventJSON, {type: 'node-cost'}>\n )[] = [];\n for (const event of events) {\n if (event.type === 'node-cost' && event.nodeType === 'join') {\n joinCosts.push(event);\n }\n }\n\n if (joinCosts.length > 0) {\n lines.push(' Joins:');\n for (const cost of joinCosts) {\n const typeStr = cost.joinType ? ` (${cost.joinType})` : '';\n const limitStr =\n cost.costEstimate.limit !== undefined\n ? cost.costEstimate.limit.toString()\n : 'none';\n\n lines.push(` ${cost.node}${typeStr}:`);\n lines.push(\n ` cost=${cost.costEstimate.cost.toFixed(2)}, startup=${cost.costEstimate.startupCost.toFixed(2)}, scan=${cost.costEstimate.scanEst.toFixed(2)}`,\n );\n lines.push(\n ` rows=${cost.costEstimate.returnedRows.toFixed(2)}, selectivity=${cost.costEstimate.selectivity.toFixed(8)}, limit=${limitStr}`,\n );\n lines.push(\n ` downstreamChildSelectivity=${cost.downstreamChildSelectivity.toFixed(8)}`,\n );\n }\n }\n\n // Find completion/failure events\n const completeEvent = events.find(e => e.type === 'plan-complete') as\n | PlanCompleteEvent\n | undefined;\n const failedEvent = events.find(e => e.type === 'plan-failed') as\n | PlanFailedEvent\n | undefined;\n\n // Show final status\n\n if (completeEvent) {\n lines.push(\n ` ✓ Plan complete: total cost = ${completeEvent.totalCost.toFixed(2)}`,\n );\n } else if (failedEvent) {\n lines.push(` ✗ Plan failed: ${failedEvent.reason}`);\n }\n\n return lines;\n}\n\n/**\n * Convert undefined values to null in a constraint object for JSON serialization.\n * PlannerConstraint uses Record<string, undefined> which loses keys during JSON.stringify.\n */\nfunction convertConstraintUndefinedToNull(\n constraint: PlannerConstraint | Record<string, unknown> | undefined | null,\n): Record<string, unknown> | undefined | null {\n if (constraint === undefined) {\n return undefined;\n }\n if (constraint === null) {\n return null;\n }\n const result: Record<string, unknown> = {};\n for (const [key, val] of Object.entries(constraint)) {\n result[key] = val === undefined ? null : val;\n }\n return result;\n}\n\n/**\n * Serialize a single debug event to JSON-compatible format.\n * The fanout function is already omitted when events are created.\n * The planSnapshot is excluded as it's internal state not needed for debugging.\n * Undefined values in constraints are converted to null for JSON serialization.\n */\nfunction serializeEvent(event: PlanDebugEvent): PlanDebugEventJSON {\n // Remove planSnapshot from plan-complete events\n if (event.type === 'plan-complete') {\n const {planSnapshot: _, ...rest} = event;\n return rest as PlanDebugEventJSON;\n }\n\n // Convert constraint undefined values to null for specific event types\n if (event.type === 'node-constraint') {\n return {\n ...event,\n constraint: convertConstraintUndefinedToNull(event.constraint),\n } as PlanDebugEventJSON;\n }\n\n if (event.type === 'connection-costs') {\n return {\n ...event,\n costs: event.costs.map(cost => ({\n ...cost,\n constraints: Object.fromEntries(\n Object.entries(cost.constraints).map(([key, val]) => [\n key,\n convertConstraintUndefinedToNull(val),\n ]),\n ),\n })),\n } as PlanDebugEventJSON;\n }\n\n if (event.type === 'constraints-propagated') {\n return {\n ...event,\n connectionConstraints: event.connectionConstraints.map(cc => ({\n ...cc,\n constraints: Object.fromEntries(\n Object.entries(cc.constraints).map(([key, val]) => [\n key,\n convertConstraintUndefinedToNull(val),\n ]),\n ),\n })),\n } as PlanDebugEventJSON;\n }\n\n return event as PlanDebugEventJSON;\n}\n\n/**\n * Serialize an array of debug events to JSON-compatible format.\n * The fanout function is already omitted when events are created.\n * The planSnapshot is excluded as it's internal state not needed for debugging.\n */\nexport function serializePlanDebugEvents(\n events: PlanDebugEvent[],\n): PlanDebugEventJSON[] {\n return events.map(serializeEvent);\n}\n\n/**\n * Format planner debug events as a human-readable string.\n * Works with JSON-serialized events (from inspector API) or native events (from AccumulatorDebugger).\n *\n * @param events - Array of planner debug events (either JSON or native format)\n * @returns Formatted string showing planning attempts, costs, and final plan selection\n *\n * @example\n * ```typescript\n * const result = await inspector.analyzeQuery(query, { joinPlans: true });\n * if (result.joinPlans) {\n * console.log(formatPlannerEvents(result.joinPlans));\n * }\n * ```\n */\nexport function formatPlannerEvents(\n events: PlanDebugEventJSON[] | PlanDebugEvent[],\n): string {\n const lines: string[] = [];\n\n // Group events by attempt\n const eventsByAttempt = new Map<\n number,\n (PlanDebugEventJSON | PlanDebugEvent)[]\n >();\n let bestPlanEvent:\n | {\n type: 'best-plan-selected';\n bestAttemptNumber: number;\n totalCost: number;\n flipPattern: number;\n joinStates: Array<{join: string; type: string}>;\n }\n | undefined;\n\n for (const event of events) {\n if ('attemptNumber' in event) {\n const attempt = event.attemptNumber;\n if (attempt !== undefined) {\n let attemptEvents = eventsByAttempt.get(attempt);\n if (!attemptEvents) {\n attemptEvents = [];\n eventsByAttempt.set(attempt, attemptEvents);\n }\n attemptEvents.push(event);\n }\n } else if (event.type === 'best-plan-selected') {\n // Save for displaying at the end\n bestPlanEvent = event;\n }\n }\n\n // Format each attempt as a compact summary\n for (const [attemptNum, events] of eventsByAttempt.entries()) {\n lines.push(...formatAttemptSummary(attemptNum, events));\n lines.push(''); // Blank line between attempts\n }\n\n // Show the final plan selection\n if (bestPlanEvent) {\n lines.push('─'.repeat(60));\n lines.push(\n `✓ Best plan: Attempt ${bestPlanEvent.bestAttemptNumber + 1} (cost=${bestPlanEvent.totalCost.toFixed(2)})`,\n );\n if (bestPlanEvent.joinStates.length > 0) {\n lines.push(' Join types:');\n for (const j of bestPlanEvent.joinStates) {\n lines.push(` ${j.join}: ${j.type}`);\n }\n }\n lines.push('─'.repeat(60));\n }\n\n return lines.join('\\n');\n}\n"],"mappings":";;;;;AA+IA,IAAa,sBAAb,MAAyD;CACvD,SAAoC,EAAE;CACtC,iBAAyB;CAEzB,IAAI,OAA6B;AAE/B,MAAI,MAAM,SAAS,gBACjB,MAAK,iBAAiB,MAAM;AAI9B,MAAI,MAAM,SAAS,eAAe,MAAM,SAAS,kBAC9C,OAA8C,gBAC7C,KAAK;AAGT,OAAK,OAAO,KAAK,MAAM;;;;;CAMzB,UACE,MACsC;AACtC,SAAO,KAAK,OAAO,QAAO,MAAK,EAAE,SAAS,KAAK;;;;;CASjD,SAAiB;AACf,SAAO,oBAAoB,KAAK,OAAO;;;;;;AAO3C,SAAS,iBACP,YACQ;AACR,KAAI,CAAC,WAAY,QAAO;CACxB,MAAM,OAAO,OAAO,KAAK,WAAW;AACpC,KAAI,KAAK,WAAW,EAAG,QAAO;AAC9B,QAAO,MAAM,KAAK,KAAK,KAAK,GAAG;;;;;AAMjC,SAAS,oBAAoB,OAA8B;AACzD,SAAQ,MAAM,MAAd;EACE,KAAK,SACH,QAAO,MAAM;EACf,KAAK;AAEH,OAAI,OAAO,MAAM,UAAU,SACzB,QAAO,IAAI,MAAM,MAAM;AAEzB,UAAO,KAAK,UAAU,MAAM,MAAM;EACpC,KAAK,SACH,QAAO,IAAI,MAAM,OAAO,GAAG,MAAM,QAAQ,MAAM,MAAM,GAAG,MAAM,MAAM,KAAK,IAAI,GAAG,MAAM;;;;;;AAO5F,SAAS,aAAa,QAAuC;AAC3D,KAAI,CAAC,OAAQ,QAAO;AAEpB,SAAQ,OAAO,MAAf;EACE,KAAK,SACH,QAAO,GAAG,oBAAoB,OAAO,KAAK,CAAC,GAAG,OAAO,GAAG,GAAG,oBAAoB,OAAO,MAAM;EAC9F,KAAK,MACH,QAAO,IAAI,OAAO,WAAW,IAAI,aAAa,CAAC,KAAK,QAAQ,CAAC;EAC/D,KAAK,KACH,QAAO,IAAI,OAAO,WAAW,IAAI,aAAa,CAAC,KAAK,OAAO,CAAC;EAC9D,KAAK,qBACH,QAAO,UAAU,OAAO,QAAQ,SAAS,MAAM;EACjD,QACE,QAAO,KAAK,UAAU,OAAO;;;;;;AAOnC,SAAS,eAAe,UAAwC;AAC9D,KAAI,CAAC,YAAY,SAAS,WAAW,EAAG,QAAO;AAC/C,QAAO,SACJ,KAAK,CAAC,OAAO,eAAe,GAAG,MAAM,GAAG,YAAY,CACpD,KAAK,KAAK;;;;;AAMf,SAAS,qBACP,YACA,QACU;CACV,MAAM,QAAkB,EAAE;CAM1B,MAAM,gBAHa,OAAO,MAAK,MAAK,EAAE,SAAS,gBAAgB,EAG7B,iBAAiB;CAGnD,MAAM,UACJ,OAAO,kBAAkB,WACrB,KAAK,KAAK,KAAK,KAAK,cAAc,CAAC,IAAI,IACvC;CACN,MAAM,aAAa,WAAW,SAAS,EAAE,CAAC,SAAS,SAAS,IAAI;AAEhE,OAAM,KACJ,YAAY,aAAa,EAAE,GAAG,cAAc,YAAY,WAAW,IAAI,WAAW,GACnF;CAGD,MAAM,uBAGA,EAAE;CACR,MAAM,6BAGA,EAAE;AAER,MAAK,MAAM,SAAS,QAAQ;AAC1B,MAAI,MAAM,SAAS,eAAe,MAAM,aAAa,aACnD,sBAAqB,KAAK,MAAM;AAElC,MAAI,MAAM,SAAS,qBAAqB,MAAM,aAAa,aACzD,4BAA2B,KAAK,MAAM;;AAK1C,KAAI,qBAAqB,SAAS,GAAG;AACnC,QAAM,KAAK,iBAAiB;AAC5B,OAAK,MAAM,QAAQ,sBAAsB;GAEvC,MAAM,aAAa,2BAA2B,MAC5C,MACE,EAAE,SAAS,KAAK,QAChB,EAAE,cAAc,KAAK,IAAI,KAAK,KAAK,cAAc,KAAK,IAAI,CAC7D,EAAE;GAEH,MAAM,gBAAgB,iBAAiB,WAAW;GAClD,MAAM,YAAY,aAAa,KAAK,QAAQ;GAC5C,MAAM,cAAc,eAAe,KAAK,SAAS;GACjD,MAAM,WACJ,KAAK,aAAa,UAAU,KAAA,IACxB,KAAK,aAAa,MAAM,UAAU,GAClC;AAEN,SAAM,KAAK,OAAO,KAAK,KAAK,GAAG;AAC/B,SAAM,KACJ,cAAc,KAAK,aAAa,KAAK,QAAQ,EAAE,CAAC,YAAY,KAAK,aAAa,YAAY,QAAQ,EAAE,CAAC,SAAS,KAAK,aAAa,QAAQ,QAAQ,EAAE,GACnJ;AACD,SAAM,KACJ,cAAc,KAAK,aAAa,aAAa,QAAQ,EAAE,CAAC,gBAAgB,KAAK,aAAa,YAAY,QAAQ,EAAE,CAAC,UAAU,WAC5H;AACD,SAAM,KACJ,oCAAoC,KAAK,2BAA2B,QAAQ,EAAE,GAC/E;AACD,SAAM,KAAK,qBAAqB,gBAAgB;AAChD,SAAM,KAAK,iBAAiB,YAAY;AACxC,SAAM,KAAK,kBAAkB,cAAc;;;CAK/C,MAAM,YAGA,EAAE;AACR,MAAK,MAAM,SAAS,OAClB,KAAI,MAAM,SAAS,eAAe,MAAM,aAAa,OACnD,WAAU,KAAK,MAAM;AAIzB,KAAI,UAAU,SAAS,GAAG;AACxB,QAAM,KAAK,WAAW;AACtB,OAAK,MAAM,QAAQ,WAAW;GAC5B,MAAM,UAAU,KAAK,WAAW,KAAK,KAAK,SAAS,KAAK;GACxD,MAAM,WACJ,KAAK,aAAa,UAAU,KAAA,IACxB,KAAK,aAAa,MAAM,UAAU,GAClC;AAEN,SAAM,KAAK,OAAO,KAAK,OAAO,QAAQ,GAAG;AACzC,SAAM,KACJ,cAAc,KAAK,aAAa,KAAK,QAAQ,EAAE,CAAC,YAAY,KAAK,aAAa,YAAY,QAAQ,EAAE,CAAC,SAAS,KAAK,aAAa,QAAQ,QAAQ,EAAE,GACnJ;AACD,SAAM,KACJ,cAAc,KAAK,aAAa,aAAa,QAAQ,EAAE,CAAC,gBAAgB,KAAK,aAAa,YAAY,QAAQ,EAAE,CAAC,UAAU,WAC5H;AACD,SAAM,KACJ,oCAAoC,KAAK,2BAA2B,QAAQ,EAAE,GAC/E;;;CAKL,MAAM,gBAAgB,OAAO,MAAK,MAAK,EAAE,SAAS,gBAAgB;CAGlE,MAAM,cAAc,OAAO,MAAK,MAAK,EAAE,SAAS,cAAc;AAM9D,KAAI,cACF,OAAM,KACJ,mCAAmC,cAAc,UAAU,QAAQ,EAAE,GACtE;UACQ,YACT,OAAM,KAAK,oBAAoB,YAAY,SAAS;AAGtD,QAAO;;;;;;AAOT,SAAS,iCACP,YAC4C;AAC5C,KAAI,eAAe,KAAA,EACjB;AAEF,KAAI,eAAe,KACjB,QAAO;CAET,MAAM,SAAkC,EAAE;AAC1C,MAAK,MAAM,CAAC,KAAK,QAAQ,OAAO,QAAQ,WAAW,CACjD,QAAO,OAAO,QAAQ,KAAA,IAAY,OAAO;AAE3C,QAAO;;;;;;;;AAST,SAAS,eAAe,OAA2C;AAEjE,KAAI,MAAM,SAAS,iBAAiB;EAClC,MAAM,EAAC,cAAc,GAAG,GAAG,SAAQ;AACnC,SAAO;;AAIT,KAAI,MAAM,SAAS,kBACjB,QAAO;EACL,GAAG;EACH,YAAY,iCAAiC,MAAM,WAAW;EAC/D;AAGH,KAAI,MAAM,SAAS,mBACjB,QAAO;EACL,GAAG;EACH,OAAO,MAAM,MAAM,KAAI,UAAS;GAC9B,GAAG;GACH,aAAa,OAAO,YAClB,OAAO,QAAQ,KAAK,YAAY,CAAC,KAAK,CAAC,KAAK,SAAS,CACnD,KACA,iCAAiC,IAAI,CACtC,CAAC,CACH;GACF,EAAE;EACJ;AAGH,KAAI,MAAM,SAAS,yBACjB,QAAO;EACL,GAAG;EACH,uBAAuB,MAAM,sBAAsB,KAAI,QAAO;GAC5D,GAAG;GACH,aAAa,OAAO,YAClB,OAAO,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,KAAK,SAAS,CACjD,KACA,iCAAiC,IAAI,CACtC,CAAC,CACH;GACF,EAAE;EACJ;AAGH,QAAO;;;;;;;AAQT,SAAgB,yBACd,QACsB;AACtB,QAAO,OAAO,IAAI,eAAe;;;;;;;;;;;;;;;;;AAkBnC,SAAgB,oBACd,QACQ;CACR,MAAM,QAAkB,EAAE;CAG1B,MAAM,kCAAkB,IAAI,KAGzB;CACH,IAAI;AAUJ,MAAK,MAAM,SAAS,OAClB,KAAI,mBAAmB,OAAO;EAC5B,MAAM,UAAU,MAAM;AACtB,MAAI,YAAY,KAAA,GAAW;GACzB,IAAI,gBAAgB,gBAAgB,IAAI,QAAQ;AAChD,OAAI,CAAC,eAAe;AAClB,oBAAgB,EAAE;AAClB,oBAAgB,IAAI,SAAS,cAAc;;AAE7C,iBAAc,KAAK,MAAM;;YAElB,MAAM,SAAS,qBAExB,iBAAgB;AAKpB,MAAK,MAAM,CAAC,YAAY,WAAW,gBAAgB,SAAS,EAAE;AAC5D,QAAM,KAAK,GAAG,qBAAqB,YAAY,OAAO,CAAC;AACvD,QAAM,KAAK,GAAG;;AAIhB,KAAI,eAAe;AACjB,QAAM,KAAK,IAAI,OAAO,GAAG,CAAC;AAC1B,QAAM,KACJ,wBAAwB,cAAc,oBAAoB,EAAE,SAAS,cAAc,UAAU,QAAQ,EAAE,CAAC,GACzG;AACD,MAAI,cAAc,WAAW,SAAS,GAAG;AACvC,SAAM,KAAK,gBAAgB;AAC3B,QAAK,MAAM,KAAK,cAAc,WAC5B,OAAM,KAAK,OAAO,EAAE,KAAK,IAAI,EAAE,OAAO;;AAG1C,QAAM,KAAK,IAAI,OAAO,GAAG,CAAC;;AAG5B,QAAO,MAAM,KAAK,KAAK"}
1
+ {"version":3,"file":"planner-debug.js","names":[],"sources":["../../../../../zql/src/planner/planner-debug.ts"],"sourcesContent":["import type * as v from '../../../shared/src/valita.ts';\nimport type {\n Condition,\n Ordering,\n ValuePosition,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {\n attemptStartEventJSONSchema,\n bestPlanSelectedEventJSONSchema,\n connectionSelectedEventJSONSchema,\n nodeConstraintEventJSONSchema,\n PlanDebugEventJSON,\n planFailedEventJSONSchema,\n} from '../../../zero-protocol/src/analyze-query-result.ts';\nimport type {PlannerConstraint} from './planner-constraint.ts';\nimport type {CostEstimate, JoinType} from './planner-node.ts';\nimport type {PlanState} from './planner-graph.ts';\n\n/**\n * Structured debug events emitted during query planning.\n * These events can be accumulated, printed, or analyzed to understand\n * the planner's decision-making process.\n */\n\n/**\n * Starting a new planning attempt with a different root connection.\n */\nexport type AttemptStartEvent = v.Infer<typeof attemptStartEventJSONSchema>;\n\n/**\n * Snapshot of connection costs before selecting the next connection.\n */\nexport type ConnectionCostsEvent = {\n type: 'connection-costs';\n attemptNumber: number;\n costs: Array<{\n connection: string;\n cost: number;\n costEstimate: Omit<CostEstimate, 'fanout'>;\n pinned: boolean;\n constraints: Record<string, PlannerConstraint | undefined>;\n constraintCosts: Record<string, Omit<CostEstimate, 'fanout'>>;\n }>;\n};\n\n/**\n * A connection was chosen and pinned.\n */\nexport type ConnectionSelectedEvent = v.Infer<\n typeof connectionSelectedEventJSONSchema\n>;\n\n/**\n * Constraints have been propagated through the graph.\n */\nexport type ConstraintsPropagatedEvent = {\n type: 'constraints-propagated';\n attemptNumber: number;\n connectionConstraints: Array<{\n connection: string;\n constraints: Record<string, PlannerConstraint | undefined>;\n constraintCosts: Record<string, Omit<CostEstimate, 'fanout'>>;\n }>;\n};\n\n/**\n * A complete plan was found for this attempt.\n */\nexport type PlanCompleteEvent = {\n type: 'plan-complete';\n attemptNumber: number;\n totalCost: number;\n flipPattern: number; // Bitmask indicating which joins are flipped\n joinStates: Array<{\n join: string;\n type: JoinType;\n }>;\n // Planning snapshot that can be restored and applied to AST\n planSnapshot: PlanState;\n};\n\n/**\n * Planning attempt failed (e.g., unflippable join).\n */\nexport type PlanFailedEvent = v.Infer<typeof planFailedEventJSONSchema>;\n\n/**\n * The best plan across all attempts was selected.\n */\nexport type BestPlanSelectedEvent = v.Infer<\n typeof bestPlanSelectedEventJSONSchema\n>;\n\n/**\n * A node computed its cost estimate during planning.\n * Emitted by nodes during estimateCost() traversal.\n * attemptNumber is added by the debugger.\n */\nexport type NodeCostEvent = {\n type: 'node-cost';\n attemptNumber?: number;\n nodeType: 'connection' | 'join' | 'fan-out' | 'fan-in' | 'terminus';\n node: string;\n branchPattern: number[];\n downstreamChildSelectivity: number;\n costEstimate: Omit<CostEstimate, 'fanout'>;\n filters?: Condition | undefined; // Only for connections\n ordering?: Ordering | undefined; // Only for connections\n joinType?: JoinType | undefined; // Only for joins\n};\n\n/**\n * A node received constraints during constraint propagation.\n * Emitted by nodes during propagateConstraints() traversal.\n * attemptNumber is added by the debugger.\n */\nexport type NodeConstraintEvent = v.Infer<typeof nodeConstraintEventJSONSchema>;\n\n/**\n * Union of all debug event types.\n */\nexport type PlanDebugEvent =\n | AttemptStartEvent\n | ConnectionCostsEvent\n | ConnectionSelectedEvent\n | ConstraintsPropagatedEvent\n | PlanCompleteEvent\n | PlanFailedEvent\n | BestPlanSelectedEvent\n | NodeCostEvent\n | NodeConstraintEvent;\n\n/**\n * Interface for objects that receive debug events during planning.\n */\nexport interface PlanDebugger {\n log(event: PlanDebugEvent): void;\n}\n\n/**\n * Simple accumulator debugger that stores all events.\n * Useful for tests and debugging.\n */\nexport class AccumulatorDebugger implements PlanDebugger {\n readonly events: PlanDebugEvent[] = [];\n private currentAttempt = 0;\n\n log(event: PlanDebugEvent): void {\n // Track current attempt number\n if (event.type === 'attempt-start') {\n this.currentAttempt = event.attemptNumber;\n }\n\n // Add attempt number to node events\n if (event.type === 'node-cost' || event.type === 'node-constraint') {\n (event as NodeCostEvent | NodeConstraintEvent).attemptNumber =\n this.currentAttempt;\n }\n\n this.events.push(event);\n }\n\n /**\n * Get all events of a specific type.\n */\n getEvents<T extends PlanDebugEvent['type']>(\n type: T,\n ): Extract<PlanDebugEvent, {type: T}>[] {\n return this.events.filter(e => e.type === type) as Extract<\n PlanDebugEvent,\n {type: T}\n >[];\n }\n\n /**\n * Format events as a human-readable string.\n */\n format(): string {\n return formatPlannerEvents(this.events);\n }\n}\n\n/**\n * Format a constraint object as a human-readable string.\n */\nfunction formatConstraint(\n constraint: PlannerConstraint | Record<string, unknown> | null | undefined,\n): string {\n if (!constraint) return '{}';\n const keys = Object.keys(constraint);\n if (keys.length === 0) return '{}';\n return '{' + keys.join(', ') + '}';\n}\n\n/**\n * Format a ValuePosition (column, literal, or static parameter) as a human-readable string.\n */\nfunction formatValuePosition(value: ValuePosition): string {\n switch (value.type) {\n case 'column':\n return value.name;\n case 'literal':\n // Format literal values with SQL-style quoting for strings\n if (typeof value.value === 'string') {\n return `'${value.value}'`;\n }\n return JSON.stringify(value.value);\n case 'static':\n return `@${value.anchor}.${Array.isArray(value.field) ? value.field.join('.') : value.field}`;\n }\n}\n\n/**\n * Format a Condition (filter) as a human-readable string.\n */\nfunction formatFilter(filter: Condition | undefined): string {\n if (!filter) return 'none';\n\n switch (filter.type) {\n case 'simple':\n return `${formatValuePosition(filter.left)} ${filter.op} ${formatValuePosition(filter.right)}`;\n case 'and':\n return `(${filter.conditions.map(formatFilter).join(' AND ')})`;\n case 'or':\n return `(${filter.conditions.map(formatFilter).join(' OR ')})`;\n case 'correlatedSubquery':\n return `EXISTS(${filter.related.subquery.table})`;\n default:\n return JSON.stringify(filter);\n }\n}\n\n/**\n * Format an Ordering as a human-readable string.\n */\nfunction formatOrdering(ordering: Ordering | undefined): string {\n if (!ordering || ordering.length === 0) return 'none';\n return ordering\n .map(([field, direction]) => `${field} ${direction}`)\n .join(', ');\n}\n\n/**\n * Format a compact summary for a single planning attempt.\n */\nfunction formatAttemptSummary(\n attemptNum: number,\n events: (PlanDebugEvent | PlanDebugEventJSON)[],\n): string[] {\n const lines: string[] = [];\n\n // Find the attempt-start event to get total attempts\n const startEvent = events.find(e => e.type === 'attempt-start') as\n | AttemptStartEvent\n | undefined;\n const totalAttempts = startEvent?.totalAttempts ?? '?';\n\n // Calculate number of bits needed for pattern\n const numBits =\n typeof totalAttempts === 'number'\n ? Math.ceil(Math.log2(totalAttempts)) || 1\n : 1;\n const bitPattern = attemptNum.toString(2).padStart(numBits, '0');\n\n lines.push(\n `[Attempt ${attemptNum + 1}/${totalAttempts}] Pattern ${attemptNum} (${bitPattern})`,\n );\n\n // Collect connection costs (use array to preserve all connections, including duplicates)\n const connectionCostEvents: (\n | NodeCostEvent\n | Extract<PlanDebugEventJSON, {type: 'node-cost'}>\n )[] = [];\n const connectionConstraintEvents: NodeConstraintEvent[] = [];\n\n for (const event of events) {\n if (event.type === 'node-cost' && event.nodeType === 'connection') {\n connectionCostEvents.push(event);\n }\n if (event.type === 'node-constraint' && event.nodeType === 'connection') {\n connectionConstraintEvents.push(event);\n }\n }\n\n // Show connection summary\n if (connectionCostEvents.length > 0) {\n lines.push(' Connections:');\n for (const cost of connectionCostEvents) {\n // Find matching constraint event (same node name and branch pattern)\n const constraint = connectionConstraintEvents.find(\n c =>\n c.node === cost.node &&\n c.branchPattern.join(',') === cost.branchPattern.join(','),\n )?.constraint;\n\n const constraintStr = formatConstraint(constraint);\n const filterStr = formatFilter(cost.filters);\n const orderingStr = formatOrdering(cost.ordering);\n const limitStr =\n cost.costEstimate.limit !== undefined\n ? cost.costEstimate.limit.toString()\n : 'none';\n\n lines.push(` ${cost.node}:`);\n lines.push(\n ` cost=${cost.costEstimate.cost.toFixed(2)}, startup=${cost.costEstimate.startupCost.toFixed(2)}, scan=${cost.costEstimate.scanEst.toFixed(2)}`,\n );\n lines.push(\n ` rows=${cost.costEstimate.returnedRows.toFixed(2)}, selectivity=${cost.costEstimate.selectivity.toFixed(8)}, limit=${limitStr}`,\n );\n lines.push(\n ` downstreamChildSelectivity=${cost.downstreamChildSelectivity.toFixed(8)}`,\n );\n lines.push(` constraints=${constraintStr}`);\n lines.push(` filters=${filterStr}`);\n lines.push(` ordering=${orderingStr}`);\n }\n }\n\n // Collect join costs from node-cost events\n const joinCosts: (\n | NodeCostEvent\n | Extract<PlanDebugEventJSON, {type: 'node-cost'}>\n )[] = [];\n for (const event of events) {\n if (event.type === 'node-cost' && event.nodeType === 'join') {\n joinCosts.push(event);\n }\n }\n\n if (joinCosts.length > 0) {\n lines.push(' Joins:');\n for (const cost of joinCosts) {\n const typeStr = cost.joinType ? ` (${cost.joinType})` : '';\n const limitStr =\n cost.costEstimate.limit !== undefined\n ? cost.costEstimate.limit.toString()\n : 'none';\n\n lines.push(` ${cost.node}${typeStr}:`);\n lines.push(\n ` cost=${cost.costEstimate.cost.toFixed(2)}, startup=${cost.costEstimate.startupCost.toFixed(2)}, scan=${cost.costEstimate.scanEst.toFixed(2)}`,\n );\n lines.push(\n ` rows=${cost.costEstimate.returnedRows.toFixed(2)}, selectivity=${cost.costEstimate.selectivity.toFixed(8)}, limit=${limitStr}`,\n );\n lines.push(\n ` downstreamChildSelectivity=${cost.downstreamChildSelectivity.toFixed(8)}`,\n );\n }\n }\n\n // Find completion/failure events\n const completeEvent = events.find(e => e.type === 'plan-complete') as\n | PlanCompleteEvent\n | undefined;\n const failedEvent = events.find(e => e.type === 'plan-failed') as\n | PlanFailedEvent\n | undefined;\n\n // Show final status\n\n if (completeEvent) {\n lines.push(\n ` ✓ Plan complete: total cost = ${completeEvent.totalCost.toFixed(2)}`,\n );\n } else if (failedEvent) {\n lines.push(` ✗ Plan failed: ${failedEvent.reason}`);\n }\n\n return lines;\n}\n\n/**\n * Convert undefined values to null in a constraint object for JSON serialization.\n * PlannerConstraint uses Record<string, undefined> which loses keys during JSON.stringify.\n */\nfunction convertConstraintUndefinedToNull(\n constraint: PlannerConstraint | Record<string, unknown> | undefined | null,\n): Record<string, unknown> | undefined | null {\n if (constraint === undefined) {\n return undefined;\n }\n if (constraint === null) {\n return null;\n }\n const result: Record<string, unknown> = {};\n for (const [key, val] of Object.entries(constraint)) {\n result[key] = val === undefined ? null : val;\n }\n return result;\n}\n\n/**\n * Serialize a single debug event to JSON-compatible format.\n * The fanout function is already omitted when events are created.\n * The planSnapshot is excluded as it's internal state not needed for debugging.\n * Undefined values in constraints are converted to null for JSON serialization.\n */\nfunction serializeEvent(event: PlanDebugEvent): PlanDebugEventJSON {\n // Remove planSnapshot from plan-complete events\n if (event.type === 'plan-complete') {\n const {planSnapshot: _, ...rest} = event;\n return rest as PlanDebugEventJSON;\n }\n\n // Convert constraint undefined values to null for specific event types\n if (event.type === 'node-constraint') {\n return {\n ...event,\n constraint: convertConstraintUndefinedToNull(event.constraint),\n } as PlanDebugEventJSON;\n }\n\n if (event.type === 'connection-costs') {\n return {\n ...event,\n costs: event.costs.map(cost => ({\n ...cost,\n constraints: Object.fromEntries(\n Object.entries(cost.constraints).map(([key, val]) => [\n key,\n convertConstraintUndefinedToNull(val),\n ]),\n ),\n })),\n } as PlanDebugEventJSON;\n }\n\n if (event.type === 'constraints-propagated') {\n return {\n ...event,\n connectionConstraints: event.connectionConstraints.map(cc => ({\n ...cc,\n constraints: Object.fromEntries(\n Object.entries(cc.constraints).map(([key, val]) => [\n key,\n convertConstraintUndefinedToNull(val),\n ]),\n ),\n })),\n } as PlanDebugEventJSON;\n }\n\n return event as PlanDebugEventJSON;\n}\n\n/**\n * Serialize an array of debug events to JSON-compatible format.\n * The fanout function is already omitted when events are created.\n * The planSnapshot is excluded as it's internal state not needed for debugging.\n */\nexport function serializePlanDebugEvents(\n events: PlanDebugEvent[],\n): PlanDebugEventJSON[] {\n return events.map(serializeEvent);\n}\n\n/**\n * Format planner debug events as a human-readable string.\n * Works with JSON-serialized events (from inspector API) or native events (from AccumulatorDebugger).\n *\n * @param events - Array of planner debug events (either JSON or native format)\n * @returns Formatted string showing planning attempts, costs, and final plan selection\n *\n * @example\n * ```typescript\n * const result = await inspector.analyzeQuery(query, { joinPlans: true });\n * if (result.joinPlans) {\n * console.log(formatPlannerEvents(result.joinPlans));\n * }\n * ```\n */\nexport function formatPlannerEvents(\n events: PlanDebugEventJSON[] | PlanDebugEvent[],\n): string {\n const lines: string[] = [];\n\n // Group events by attempt\n const eventsByAttempt = new Map<\n number,\n (PlanDebugEventJSON | PlanDebugEvent)[]\n >();\n let bestPlanEvent:\n | {\n type: 'best-plan-selected';\n bestAttemptNumber: number;\n totalCost: number;\n flipPattern: number;\n joinStates: Array<{join: string; type: string}>;\n }\n | undefined;\n\n for (const event of events) {\n if ('attemptNumber' in event) {\n const attempt = event.attemptNumber;\n if (attempt !== undefined) {\n let attemptEvents = eventsByAttempt.get(attempt);\n if (!attemptEvents) {\n attemptEvents = [];\n eventsByAttempt.set(attempt, attemptEvents);\n }\n attemptEvents.push(event);\n }\n } else if (event.type === 'best-plan-selected') {\n // Save for displaying at the end\n bestPlanEvent = event;\n }\n }\n\n // Format each attempt as a compact summary\n for (const [attemptNum, events] of eventsByAttempt.entries()) {\n lines.push(...formatAttemptSummary(attemptNum, events));\n lines.push(''); // Blank line between attempts\n }\n\n // Show the final plan selection\n if (bestPlanEvent) {\n lines.push('─'.repeat(60));\n lines.push(\n `✓ Best plan: Attempt ${bestPlanEvent.bestAttemptNumber + 1} (cost=${bestPlanEvent.totalCost.toFixed(2)})`,\n );\n if (bestPlanEvent.joinStates.length > 0) {\n lines.push(' Join types:');\n for (const j of bestPlanEvent.joinStates) {\n lines.push(` ${j.join}: ${j.type}`);\n }\n }\n lines.push('─'.repeat(60));\n }\n\n return lines.join('\\n');\n}\n"],"mappings":";;;;;AA+IA,IAAa,sBAAb,MAAyD;CACvD,SAAoC,EAAE;CACtC,iBAAyB;CAEzB,IAAI,OAA6B;AAE/B,MAAI,MAAM,SAAS,gBACjB,MAAK,iBAAiB,MAAM;AAI9B,MAAI,MAAM,SAAS,eAAe,MAAM,SAAS,kBAC9C,OAA8C,gBAC7C,KAAK;AAGT,OAAK,OAAO,KAAK,MAAM;;;;;CAMzB,UACE,MACsC;AACtC,SAAO,KAAK,OAAO,QAAO,MAAK,EAAE,SAAS,KAAK;;;;;CASjD,SAAiB;AACf,SAAO,oBAAoB,KAAK,OAAO;;;;;;AAO3C,SAAS,iBACP,YACQ;AACR,KAAI,CAAC,WAAY,QAAO;CACxB,MAAM,OAAO,OAAO,KAAK,WAAW;AACpC,KAAI,KAAK,WAAW,EAAG,QAAO;AAC9B,QAAO,MAAM,KAAK,KAAK,KAAK,GAAG;;;;;AAMjC,SAAS,oBAAoB,OAA8B;AACzD,SAAQ,MAAM,MAAd;EACE,KAAK,SACH,QAAO,MAAM;EACf,KAAK;AAEH,OAAI,OAAO,MAAM,UAAU,SACzB,QAAO,IAAI,MAAM,MAAM;AAEzB,UAAO,KAAK,UAAU,MAAM,MAAM;EACpC,KAAK,SACH,QAAO,IAAI,MAAM,OAAO,GAAG,MAAM,QAAQ,MAAM,MAAM,GAAG,MAAM,MAAM,KAAK,IAAI,GAAG,MAAM;;;;;;AAO5F,SAAS,aAAa,QAAuC;AAC3D,KAAI,CAAC,OAAQ,QAAO;AAEpB,SAAQ,OAAO,MAAf;EACE,KAAK,SACH,QAAO,GAAG,oBAAoB,OAAO,KAAK,CAAC,GAAG,OAAO,GAAG,GAAG,oBAAoB,OAAO,MAAM;EAC9F,KAAK,MACH,QAAO,IAAI,OAAO,WAAW,IAAI,aAAa,CAAC,KAAK,QAAQ,CAAC;EAC/D,KAAK,KACH,QAAO,IAAI,OAAO,WAAW,IAAI,aAAa,CAAC,KAAK,OAAO,CAAC;EAC9D,KAAK,qBACH,QAAO,UAAU,OAAO,QAAQ,SAAS,MAAM;EACjD,QACE,QAAO,KAAK,UAAU,OAAO;;;;;;AAOnC,SAAS,eAAe,UAAwC;AAC9D,KAAI,CAAC,YAAY,SAAS,WAAW,EAAG,QAAO;AAC/C,QAAO,SACJ,KAAK,CAAC,OAAO,eAAe,GAAG,MAAM,GAAG,YAAY,CACpD,KAAK,KAAK;;;;;AAMf,SAAS,qBACP,YACA,QACU;CACV,MAAM,QAAkB,EAAE;CAM1B,MAAM,gBAHa,OAAO,MAAK,MAAK,EAAE,SAAS,gBAAgB,EAG7B,iBAAiB;CAGnD,MAAM,UACJ,OAAO,kBAAkB,WACrB,KAAK,KAAK,KAAK,KAAK,cAAc,CAAC,IAAI,IACvC;CACN,MAAM,aAAa,WAAW,SAAS,EAAE,CAAC,SAAS,SAAS,IAAI;AAEhE,OAAM,KACJ,YAAY,aAAa,EAAE,GAAG,cAAc,YAAY,WAAW,IAAI,WAAW,GACnF;CAGD,MAAM,uBAGA,EAAE;CACR,MAAM,6BAAoD,EAAE;AAE5D,MAAK,MAAM,SAAS,QAAQ;AAC1B,MAAI,MAAM,SAAS,eAAe,MAAM,aAAa,aACnD,sBAAqB,KAAK,MAAM;AAElC,MAAI,MAAM,SAAS,qBAAqB,MAAM,aAAa,aACzD,4BAA2B,KAAK,MAAM;;AAK1C,KAAI,qBAAqB,SAAS,GAAG;AACnC,QAAM,KAAK,iBAAiB;AAC5B,OAAK,MAAM,QAAQ,sBAAsB;GAEvC,MAAM,aAAa,2BAA2B,MAC5C,MACE,EAAE,SAAS,KAAK,QAChB,EAAE,cAAc,KAAK,IAAI,KAAK,KAAK,cAAc,KAAK,IAAI,CAC7D,EAAE;GAEH,MAAM,gBAAgB,iBAAiB,WAAW;GAClD,MAAM,YAAY,aAAa,KAAK,QAAQ;GAC5C,MAAM,cAAc,eAAe,KAAK,SAAS;GACjD,MAAM,WACJ,KAAK,aAAa,UAAU,KAAA,IACxB,KAAK,aAAa,MAAM,UAAU,GAClC;AAEN,SAAM,KAAK,OAAO,KAAK,KAAK,GAAG;AAC/B,SAAM,KACJ,cAAc,KAAK,aAAa,KAAK,QAAQ,EAAE,CAAC,YAAY,KAAK,aAAa,YAAY,QAAQ,EAAE,CAAC,SAAS,KAAK,aAAa,QAAQ,QAAQ,EAAE,GACnJ;AACD,SAAM,KACJ,cAAc,KAAK,aAAa,aAAa,QAAQ,EAAE,CAAC,gBAAgB,KAAK,aAAa,YAAY,QAAQ,EAAE,CAAC,UAAU,WAC5H;AACD,SAAM,KACJ,oCAAoC,KAAK,2BAA2B,QAAQ,EAAE,GAC/E;AACD,SAAM,KAAK,qBAAqB,gBAAgB;AAChD,SAAM,KAAK,iBAAiB,YAAY;AACxC,SAAM,KAAK,kBAAkB,cAAc;;;CAK/C,MAAM,YAGA,EAAE;AACR,MAAK,MAAM,SAAS,OAClB,KAAI,MAAM,SAAS,eAAe,MAAM,aAAa,OACnD,WAAU,KAAK,MAAM;AAIzB,KAAI,UAAU,SAAS,GAAG;AACxB,QAAM,KAAK,WAAW;AACtB,OAAK,MAAM,QAAQ,WAAW;GAC5B,MAAM,UAAU,KAAK,WAAW,KAAK,KAAK,SAAS,KAAK;GACxD,MAAM,WACJ,KAAK,aAAa,UAAU,KAAA,IACxB,KAAK,aAAa,MAAM,UAAU,GAClC;AAEN,SAAM,KAAK,OAAO,KAAK,OAAO,QAAQ,GAAG;AACzC,SAAM,KACJ,cAAc,KAAK,aAAa,KAAK,QAAQ,EAAE,CAAC,YAAY,KAAK,aAAa,YAAY,QAAQ,EAAE,CAAC,SAAS,KAAK,aAAa,QAAQ,QAAQ,EAAE,GACnJ;AACD,SAAM,KACJ,cAAc,KAAK,aAAa,aAAa,QAAQ,EAAE,CAAC,gBAAgB,KAAK,aAAa,YAAY,QAAQ,EAAE,CAAC,UAAU,WAC5H;AACD,SAAM,KACJ,oCAAoC,KAAK,2BAA2B,QAAQ,EAAE,GAC/E;;;CAKL,MAAM,gBAAgB,OAAO,MAAK,MAAK,EAAE,SAAS,gBAAgB;CAGlE,MAAM,cAAc,OAAO,MAAK,MAAK,EAAE,SAAS,cAAc;AAM9D,KAAI,cACF,OAAM,KACJ,mCAAmC,cAAc,UAAU,QAAQ,EAAE,GACtE;UACQ,YACT,OAAM,KAAK,oBAAoB,YAAY,SAAS;AAGtD,QAAO;;;;;;AAOT,SAAS,iCACP,YAC4C;AAC5C,KAAI,eAAe,KAAA,EACjB;AAEF,KAAI,eAAe,KACjB,QAAO;CAET,MAAM,SAAkC,EAAE;AAC1C,MAAK,MAAM,CAAC,KAAK,QAAQ,OAAO,QAAQ,WAAW,CACjD,QAAO,OAAO,QAAQ,KAAA,IAAY,OAAO;AAE3C,QAAO;;;;;;;;AAST,SAAS,eAAe,OAA2C;AAEjE,KAAI,MAAM,SAAS,iBAAiB;EAClC,MAAM,EAAC,cAAc,GAAG,GAAG,SAAQ;AACnC,SAAO;;AAIT,KAAI,MAAM,SAAS,kBACjB,QAAO;EACL,GAAG;EACH,YAAY,iCAAiC,MAAM,WAAW;EAC/D;AAGH,KAAI,MAAM,SAAS,mBACjB,QAAO;EACL,GAAG;EACH,OAAO,MAAM,MAAM,KAAI,UAAS;GAC9B,GAAG;GACH,aAAa,OAAO,YAClB,OAAO,QAAQ,KAAK,YAAY,CAAC,KAAK,CAAC,KAAK,SAAS,CACnD,KACA,iCAAiC,IAAI,CACtC,CAAC,CACH;GACF,EAAE;EACJ;AAGH,KAAI,MAAM,SAAS,yBACjB,QAAO;EACL,GAAG;EACH,uBAAuB,MAAM,sBAAsB,KAAI,QAAO;GAC5D,GAAG;GACH,aAAa,OAAO,YAClB,OAAO,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,KAAK,SAAS,CACjD,KACA,iCAAiC,IAAI,CACtC,CAAC,CACH;GACF,EAAE;EACJ;AAGH,QAAO;;;;;;;AAQT,SAAgB,yBACd,QACsB;AACtB,QAAO,OAAO,IAAI,eAAe;;;;;;;;;;;;;;;;;AAkBnC,SAAgB,oBACd,QACQ;CACR,MAAM,QAAkB,EAAE;CAG1B,MAAM,kCAAkB,IAAI,KAGzB;CACH,IAAI;AAUJ,MAAK,MAAM,SAAS,OAClB,KAAI,mBAAmB,OAAO;EAC5B,MAAM,UAAU,MAAM;AACtB,MAAI,YAAY,KAAA,GAAW;GACzB,IAAI,gBAAgB,gBAAgB,IAAI,QAAQ;AAChD,OAAI,CAAC,eAAe;AAClB,oBAAgB,EAAE;AAClB,oBAAgB,IAAI,SAAS,cAAc;;AAE7C,iBAAc,KAAK,MAAM;;YAElB,MAAM,SAAS,qBAExB,iBAAgB;AAKpB,MAAK,MAAM,CAAC,YAAY,WAAW,gBAAgB,SAAS,EAAE;AAC5D,QAAM,KAAK,GAAG,qBAAqB,YAAY,OAAO,CAAC;AACvD,QAAM,KAAK,GAAG;;AAIhB,KAAI,eAAe;AACjB,QAAM,KAAK,IAAI,OAAO,GAAG,CAAC;AAC1B,QAAM,KACJ,wBAAwB,cAAc,oBAAoB,EAAE,SAAS,cAAc,UAAU,QAAQ,EAAE,CAAC,GACzG;AACD,MAAI,cAAc,WAAW,SAAS,GAAG;AACvC,SAAM,KAAK,gBAAgB;AAC3B,QAAK,MAAM,KAAK,cAAc,WAC5B,OAAM,KAAK,OAAO,EAAE,KAAK,IAAI,EAAE,OAAO;;AAG1C,QAAM,KAAK,IAAI,OAAO,GAAG,CAAC;;AAG5B,QAAO,MAAM,KAAK,KAAK"}
@@ -38,7 +38,7 @@ export declare class ExpressionBuilder<TTable extends keyof TSchema['tables'] &
38
38
  export declare function and(...conditions: (Condition | undefined)[]): Condition;
39
39
  export declare function or(...conditions: (Condition | undefined)[]): Condition;
40
40
  export declare function not(expression: Condition): Condition;
41
- export declare function cmp(field: string, opOrValue: SimpleOperator | ParameterReference | LiteralValue | undefined, value?: ParameterReference | LiteralValue | undefined): Condition;
41
+ export declare function cmp(field: string, opOrValue: SimpleOperator | ParameterReference | LiteralValue | undefined, value?: ParameterReference | LiteralValue): Condition;
42
42
  export declare const TRUE: Condition;
43
43
  export declare function simplifyCondition(c: Condition): Condition;
44
44
  export declare function flatten(type: 'and' | 'or', conditions: readonly Condition[]): Condition[];
@@ -1 +1 @@
1
- {"version":3,"file":"expression.d.ts","sourceRoot":"","sources":["../../../../../zql/src/query/expression.ts"],"names":[],"mappings":"AAEA,OAAO,EACL,aAAa,EACb,KAAK,SAAS,EACd,KAAK,YAAY,EACjB,KAAK,SAAS,EACd,KAAK,cAAc,EACpB,MAAM,mCAAmC,CAAC;AAC3C,OAAO,KAAK,EAAC,MAAM,IAAI,UAAU,EAAC,MAAM,mCAAmC,CAAC;AAC5E,OAAO,KAAK,EACV,sBAAsB,EACtB,aAAa,EACb,aAAa,EACb,aAAa,EACb,sBAAsB,EACtB,eAAe,EACf,KAAK,EACN,MAAM,YAAY,CAAC;AAEpB,MAAM,MAAM,kBAAkB,GAAG;IAC/B,CAAC,aAAa,CAAC,IAAI,SAAS,CAAC;CAC9B,CAAC;AAEF;;;;;;;;;;;;;;;GAeG;AACH,MAAM,WAAW,iBAAiB,CAChC,MAAM,SAAS,MAAM,OAAO,CAAC,QAAQ,CAAC,GAAG,MAAM,EAC/C,OAAO,SAAS,UAAU;IAE1B,CAAC,EAAE,EAAE,iBAAiB,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC;CACrD;AAED,qBAAa,iBAAiB,CAC5B,MAAM,SAAS,MAAM,OAAO,CAAC,QAAQ,CAAC,GAAG,MAAM,EAC/C,OAAO,SAAS,UAAU;;gBASxB,MAAM,EAAE,CACN,YAAY,EAAE,MAAM,EACpB,EAAE,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,CAAC,EACnE,OAAO,CAAC,EAAE,aAAa,KACpB,SAAS;IAMhB,IAAI,EAAE,SAEL;IAED,GAAG,CACD,SAAS,SAAS,sBAAsB,CAAC,eAAe,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,EAC1E,SAAS,SAAS,cAAc,EAEhC,KAAK,EAAE,SAAS,EAChB,EAAE,EAAE,SAAS,EACb,KAAK,EACD,aAAa,CAAC,eAAe,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,SAAS,EAAE,SAAS,CAAC,GACrE,kBAAkB,GAClB,SAAS,GACZ,SAAS;IACZ,GAAG,CACD,SAAS,SAAS,sBAAsB,CAAC,eAAe,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,EAE1E,KAAK,EAAE,SAAS,EAChB,KAAK,EACD,aAAa,CAAC,eAAe,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,SAAS,EAAE,GAAG,CAAC,GAC/D,kBAAkB,GAClB,SAAS,GACZ,SAAS;IAYZ,MAAM,CACJ,IAAI,EAAE,kBAAkB,GAAG,YAAY,GAAG,SAAS,EACnD,EAAE,EAAE,cAAc,EAClB,KAAK,EAAE,kBAAkB,GAAG,YAAY,GAAG,SAAS,GACnD,SAAS;IAaZ,GAAG,aAAO;IACV,EAAE,YAAM;IACR,GAAG,aAAO;IAEV,MAAM,GAAI,aAAa,SAAS,sBAAsB,CAAC,MAAM,EAAE,OAAO,CAAC,EACrE,cAAc,aAAa,EAC3B,KAAK,CACH,KAAK,EAAE,KAAK,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,EAAE,aAAa,CAAC,EAAE,OAAO,CAAC,KACjE,KAAK,CAAC,GAAG,EAAE,OAAO,CAAC,EACxB,UAAU,aAAa,KACtB,SAAS,CAA4C;CACzD;AAED,wBAAgB,GAAG,CAAC,GAAG,UAAU,EAAE,CAAC,SAAS,GAAG,SAAS,CAAC,EAAE,GAAG,SAAS,CAYvE;AAED,wBAAgB,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,SAAS,GAAG,SAAS,CAAC,EAAE,GAAG,SAAS,CAYtE;AAED,wBAAgB,GAAG,CAAC,UAAU,EAAE,SAAS,GAAG,SAAS,CA4BpD;AAED,wBAAgB,GAAG,CACjB,KAAK,EAAE,MAAM,EACb,SAAS,EAAE,cAAc,GAAG,kBAAkB,GAAG,YAAY,GAAG,SAAS,EACzE,KAAK,CAAC,EAAE,kBAAkB,GAAG,YAAY,GAAG,SAAS,GACpD,SAAS,CAsBX;AAaD,eAAO,MAAM,IAAI,EAAE,SAGlB,CAAC;AAeF,wBAAgB,iBAAiB,CAAC,CAAC,EAAE,SAAS,GAAG,SAAS,CAkBzD;AAED,wBAAgB,OAAO,CACrB,IAAI,EAAE,KAAK,GAAG,IAAI,EAClB,UAAU,EAAE,SAAS,SAAS,EAAE,GAC/B,SAAS,EAAE,CAWb;AAmBD,QAAA,MAAM,iBAAiB;;;;;;;;;;;;;;;;;CAIb,CAAC;AAEX,wBAAgB,cAAc,CAAC,EAAE,SAAS,MAAM,OAAO,iBAAiB,EACtE,EAAE,EAAE,EAAE,GACL,CAAC,OAAO,iBAAiB,CAAC,CAAC,EAAE,CAAC,CAEhC"}
1
+ {"version":3,"file":"expression.d.ts","sourceRoot":"","sources":["../../../../../zql/src/query/expression.ts"],"names":[],"mappings":"AAEA,OAAO,EACL,aAAa,EACb,KAAK,SAAS,EACd,KAAK,YAAY,EACjB,KAAK,SAAS,EACd,KAAK,cAAc,EACpB,MAAM,mCAAmC,CAAC;AAC3C,OAAO,KAAK,EAAC,MAAM,IAAI,UAAU,EAAC,MAAM,mCAAmC,CAAC;AAC5E,OAAO,KAAK,EACV,sBAAsB,EACtB,aAAa,EACb,aAAa,EACb,aAAa,EACb,sBAAsB,EACtB,eAAe,EACf,KAAK,EACN,MAAM,YAAY,CAAC;AAEpB,MAAM,MAAM,kBAAkB,GAAG;IAC/B,CAAC,aAAa,CAAC,IAAI,SAAS,CAAC;CAC9B,CAAC;AAEF;;;;;;;;;;;;;;;GAeG;AACH,MAAM,WAAW,iBAAiB,CAChC,MAAM,SAAS,MAAM,OAAO,CAAC,QAAQ,CAAC,GAAG,MAAM,EAC/C,OAAO,SAAS,UAAU;IAE1B,CAAC,EAAE,EAAE,iBAAiB,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC;CACrD;AAED,qBAAa,iBAAiB,CAC5B,MAAM,SAAS,MAAM,OAAO,CAAC,QAAQ,CAAC,GAAG,MAAM,EAC/C,OAAO,SAAS,UAAU;;gBASxB,MAAM,EAAE,CACN,YAAY,EAAE,MAAM,EACpB,EAAE,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,CAAC,EACnE,OAAO,CAAC,EAAE,aAAa,KACpB,SAAS;IAMhB,IAAI,EAAE,SAEL;IAED,GAAG,CACD,SAAS,SAAS,sBAAsB,CAAC,eAAe,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,EAC1E,SAAS,SAAS,cAAc,EAEhC,KAAK,EAAE,SAAS,EAChB,EAAE,EAAE,SAAS,EACb,KAAK,EACD,aAAa,CAAC,eAAe,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,SAAS,EAAE,SAAS,CAAC,GACrE,kBAAkB,GAClB,SAAS,GACZ,SAAS;IACZ,GAAG,CACD,SAAS,SAAS,sBAAsB,CAAC,eAAe,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,EAE1E,KAAK,EAAE,SAAS,EAChB,KAAK,EACD,aAAa,CAAC,eAAe,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,SAAS,EAAE,GAAG,CAAC,GAC/D,kBAAkB,GAClB,SAAS,GACZ,SAAS;IAYZ,MAAM,CACJ,IAAI,EAAE,kBAAkB,GAAG,YAAY,GAAG,SAAS,EACnD,EAAE,EAAE,cAAc,EAClB,KAAK,EAAE,kBAAkB,GAAG,YAAY,GAAG,SAAS,GACnD,SAAS;IAaZ,GAAG,aAAO;IACV,EAAE,YAAM;IACR,GAAG,aAAO;IAEV,MAAM,GAAI,aAAa,SAAS,sBAAsB,CAAC,MAAM,EAAE,OAAO,CAAC,EACrE,cAAc,aAAa,EAC3B,KAAK,CACH,KAAK,EAAE,KAAK,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,EAAE,aAAa,CAAC,EAAE,OAAO,CAAC,KACjE,KAAK,CAAC,GAAG,EAAE,OAAO,CAAC,EACxB,UAAU,aAAa,KACtB,SAAS,CAA4C;CACzD;AAED,wBAAgB,GAAG,CAAC,GAAG,UAAU,EAAE,CAAC,SAAS,GAAG,SAAS,CAAC,EAAE,GAAG,SAAS,CAYvE;AAED,wBAAgB,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,SAAS,GAAG,SAAS,CAAC,EAAE,GAAG,SAAS,CAYtE;AAED,wBAAgB,GAAG,CAAC,UAAU,EAAE,SAAS,GAAG,SAAS,CA4BpD;AAED,wBAAgB,GAAG,CACjB,KAAK,EAAE,MAAM,EACb,SAAS,EAAE,cAAc,GAAG,kBAAkB,GAAG,YAAY,GAAG,SAAS,EACzE,KAAK,CAAC,EAAE,kBAAkB,GAAG,YAAY,GACxC,SAAS,CAsBX;AAaD,eAAO,MAAM,IAAI,EAAE,SAGlB,CAAC;AAeF,wBAAgB,iBAAiB,CAAC,CAAC,EAAE,SAAS,GAAG,SAAS,CAkBzD;AAED,wBAAgB,OAAO,CACrB,IAAI,EAAE,KAAK,GAAG,IAAI,EAClB,UAAU,EAAE,SAAS,SAAS,EAAE,GAC/B,SAAS,EAAE,CAWb;AAmBD,QAAA,MAAM,iBAAiB;;;;;;;;;;;;;;;;;CAIb,CAAC;AAEX,wBAAgB,cAAc,CAAC,EAAE,SAAS,MAAM,OAAO,iBAAiB,EACtE,EAAE,EAAE,EAAE,GACL,CAAC,OAAO,iBAAiB,CAAC,CAAC,EAAE,CAAC,CAEhC"}
@@ -1 +1 @@
1
- {"version":3,"file":"expression.js","names":["#exists"],"sources":["../../../../../zql/src/query/expression.ts"],"sourcesContent":["/* oxlint-disable @typescript-eslint/no-explicit-any */\nimport {must} from '../../../shared/src/must.ts';\nimport {\n toStaticParam,\n type Condition,\n type LiteralValue,\n type Parameter,\n type SimpleOperator,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {Schema as ZeroSchema} from '../../../zero-types/src/schema.ts';\nimport type {\n AvailableRelationships,\n DestTableName,\n ExistsOptions,\n GetFilterType,\n NoCompoundTypeSelector,\n PullTableSchema,\n Query,\n} from './query.ts';\n\nexport type ParameterReference = {\n [toStaticParam](): Parameter;\n};\n\n/**\n * A factory function that creates a condition. This is used to create\n * complex conditions that can be passed to the `where` method of a query.\n *\n * @example\n *\n * ```ts\n * const condition: ExpressionFactory<User> = ({and, cmp, or}) =>\n * and(\n * cmp('name', '=', 'Alice'),\n * or(cmp('age', '>', 18), cmp('isStudent', '=', true)),\n * );\n *\n * const query = z.query.user.where(condition);\n * ```\n */\nexport interface ExpressionFactory<\n TTable extends keyof TSchema['tables'] & string,\n TSchema extends ZeroSchema,\n> {\n (eb: ExpressionBuilder<TTable, TSchema>): Condition;\n}\n\nexport class ExpressionBuilder<\n TTable extends keyof TSchema['tables'] & string,\n TSchema extends ZeroSchema,\n> {\n readonly #exists: (\n relationship: string,\n cb?: (query: Query<TTable, TSchema>) => Query<TTable, TSchema, any>,\n options?: ExistsOptions,\n ) => Condition;\n\n constructor(\n exists: (\n relationship: string,\n cb?: (query: Query<TTable, TSchema>) => Query<TTable, TSchema, any>,\n options?: ExistsOptions,\n ) => Condition,\n ) {\n this.#exists = exists;\n this.exists = this.exists.bind(this);\n }\n\n get eb() {\n return this;\n }\n\n cmp<\n TSelector extends NoCompoundTypeSelector<PullTableSchema<TTable, TSchema>>,\n TOperator extends SimpleOperator,\n >(\n field: TSelector,\n op: TOperator,\n value:\n | GetFilterType<PullTableSchema<TTable, TSchema>, TSelector, TOperator>\n | ParameterReference\n | undefined,\n ): Condition;\n cmp<\n TSelector extends NoCompoundTypeSelector<PullTableSchema<TTable, TSchema>>,\n >(\n field: TSelector,\n value:\n | GetFilterType<PullTableSchema<TTable, TSchema>, TSelector, '='>\n | ParameterReference\n | undefined,\n ): Condition;\n cmp(\n field: string,\n opOrValue: SimpleOperator | ParameterReference | LiteralValue | undefined,\n value?: ParameterReference | LiteralValue | undefined,\n ): Condition {\n if (arguments.length === 2) {\n return cmp(field, opOrValue);\n }\n return cmp(field, opOrValue, value);\n }\n\n cmpLit(\n left: ParameterReference | LiteralValue | undefined,\n op: SimpleOperator,\n right: ParameterReference | LiteralValue | undefined,\n ): Condition {\n return {\n type: 'simple',\n left: isParameterReference(left)\n ? left[toStaticParam]()\n : {type: 'literal', value: left ?? null},\n right: isParameterReference(right)\n ? right[toStaticParam]()\n : {type: 'literal', value: right ?? null},\n op,\n };\n }\n\n and = and;\n or = or;\n not = not;\n\n exists = <TRelationship extends AvailableRelationships<TTable, TSchema>>(\n relationship: TRelationship,\n cb?: (\n query: Query<DestTableName<TTable, TSchema, TRelationship>, TSchema>,\n ) => Query<any, TSchema>,\n options?: ExistsOptions,\n ): Condition => this.#exists(relationship, cb, options);\n}\n\nexport function and(...conditions: (Condition | undefined)[]): Condition {\n const expressions = filterTrue(filterUndefined(conditions));\n\n if (expressions.length === 1) {\n return expressions[0];\n }\n\n if (expressions.some(isAlwaysFalse)) {\n return FALSE;\n }\n\n return {type: 'and', conditions: expressions};\n}\n\nexport function or(...conditions: (Condition | undefined)[]): Condition {\n const expressions = filterFalse(filterUndefined(conditions));\n\n if (expressions.length === 1) {\n return expressions[0];\n }\n\n if (expressions.some(isAlwaysTrue)) {\n return TRUE;\n }\n\n return {type: 'or', conditions: expressions};\n}\n\nexport function not(expression: Condition): Condition {\n switch (expression.type) {\n case 'and':\n return {\n type: 'or',\n conditions: expression.conditions.map(not),\n };\n case 'or':\n return {\n type: 'and',\n conditions: expression.conditions.map(not),\n };\n case 'correlatedSubquery':\n return {\n type: 'correlatedSubquery',\n related: expression.related,\n op: negateOperator(expression.op),\n ...(expression.flip !== undefined ? {flip: expression.flip} : {}),\n ...(expression.scalar !== undefined ? {scalar: expression.scalar} : {}),\n };\n case 'simple':\n return {\n type: 'simple',\n op: negateOperator(expression.op),\n left: expression.left,\n right: expression.right,\n };\n }\n}\n\nexport function cmp(\n field: string,\n opOrValue: SimpleOperator | ParameterReference | LiteralValue | undefined,\n value?: ParameterReference | LiteralValue | undefined,\n): Condition {\n let op: SimpleOperator;\n let actualValue: ParameterReference | LiteralValue | undefined;\n\n if (arguments.length === 2) {\n // 2-arg form: cmp(field, value) - defaults to '=' operator\n actualValue = opOrValue as ParameterReference | LiteralValue | undefined;\n op = '=';\n } else {\n // 3-arg form: cmp(field, op, value)\n op = opOrValue as SimpleOperator;\n actualValue = value;\n }\n\n return {\n type: 'simple',\n left: {type: 'column', name: field},\n right: isParameterReference(actualValue)\n ? actualValue[toStaticParam]()\n : {type: 'literal', value: actualValue ?? null},\n op,\n };\n}\n\nfunction isParameterReference(\n value: ParameterReference | LiteralValue | null | undefined,\n): value is ParameterReference {\n return (\n value !== null &&\n value !== undefined &&\n typeof value === 'object' &&\n (value as any)[toStaticParam]\n );\n}\n\nexport const TRUE: Condition = {\n type: 'and',\n conditions: [],\n};\n\nconst FALSE: Condition = {\n type: 'or',\n conditions: [],\n};\n\nfunction isAlwaysTrue(condition: Condition): boolean {\n return condition.type === 'and' && condition.conditions.length === 0;\n}\n\nfunction isAlwaysFalse(condition: Condition): boolean {\n return condition.type === 'or' && condition.conditions.length === 0;\n}\n\nexport function simplifyCondition(c: Condition): Condition {\n if (c.type === 'simple' || c.type === 'correlatedSubquery') {\n return c;\n }\n if (c.conditions.length === 1) {\n return simplifyCondition(c.conditions[0]);\n }\n const conditions = flatten(c.type, c.conditions.map(simplifyCondition));\n if (c.type === 'and' && conditions.some(isAlwaysFalse)) {\n return FALSE;\n }\n if (c.type === 'or' && conditions.some(isAlwaysTrue)) {\n return TRUE;\n }\n return {\n type: c.type,\n conditions,\n };\n}\n\nexport function flatten(\n type: 'and' | 'or',\n conditions: readonly Condition[],\n): Condition[] {\n const flattened: Condition[] = [];\n for (const c of conditions) {\n if (c.type === type) {\n flattened.push(...c.conditions);\n } else {\n flattened.push(c);\n }\n }\n\n return flattened;\n}\n\nconst negateSimpleOperatorMap = {\n ['=']: '!=',\n ['!=']: '=',\n ['<']: '>=',\n ['>']: '<=',\n ['>=']: '<',\n ['<=']: '>',\n ['IN']: 'NOT IN',\n ['NOT IN']: 'IN',\n ['LIKE']: 'NOT LIKE',\n ['NOT LIKE']: 'LIKE',\n ['ILIKE']: 'NOT ILIKE',\n ['NOT ILIKE']: 'ILIKE',\n ['IS']: 'IS NOT',\n ['IS NOT']: 'IS',\n} as const;\n\nconst negateOperatorMap = {\n ...negateSimpleOperatorMap,\n ['EXISTS']: 'NOT EXISTS',\n ['NOT EXISTS']: 'EXISTS',\n} as const;\n\nexport function negateOperator<OP extends keyof typeof negateOperatorMap>(\n op: OP,\n): (typeof negateOperatorMap)[OP] {\n return must(negateOperatorMap[op]);\n}\n\nfunction filterUndefined<T>(array: (T | undefined)[]): T[] {\n return array.filter(e => e !== undefined);\n}\n\nfunction filterTrue(conditions: Condition[]): Condition[] {\n return conditions.filter(c => !isAlwaysTrue(c));\n}\n\nfunction filterFalse(conditions: Condition[]): Condition[] {\n return conditions.filter(c => !isAlwaysFalse(c));\n}\n"],"mappings":";;;AA+CA,IAAa,oBAAb,MAGE;CACA;CAMA,YACE,QAKA;AACA,QAAA,SAAe;AACf,OAAK,SAAS,KAAK,OAAO,KAAK,KAAK;;CAGtC,IAAI,KAAK;AACP,SAAO;;CAuBT,IACE,OACA,WACA,OACW;AACX,MAAI,UAAU,WAAW,EACvB,QAAO,IAAI,OAAO,UAAU;AAE9B,SAAO,IAAI,OAAO,WAAW,MAAM;;CAGrC,OACE,MACA,IACA,OACW;AACX,SAAO;GACL,MAAM;GACN,MAAM,qBAAqB,KAAK,GAC5B,KAAK,gBAAgB,GACrB;IAAC,MAAM;IAAW,OAAO,QAAQ;IAAK;GAC1C,OAAO,qBAAqB,MAAM,GAC9B,MAAM,gBAAgB,GACtB;IAAC,MAAM;IAAW,OAAO,SAAS;IAAK;GAC3C;GACD;;CAGH,MAAM;CACN,KAAK;CACL,MAAM;CAEN,UACE,cACA,IAGA,YACc,MAAA,OAAa,cAAc,IAAI,QAAQ;;AAGzD,SAAgB,IAAI,GAAG,YAAkD;CACvE,MAAM,cAAc,WAAW,gBAAgB,WAAW,CAAC;AAE3D,KAAI,YAAY,WAAW,EACzB,QAAO,YAAY;AAGrB,KAAI,YAAY,KAAK,cAAc,CACjC,QAAO;AAGT,QAAO;EAAC,MAAM;EAAO,YAAY;EAAY;;AAG/C,SAAgB,GAAG,GAAG,YAAkD;CACtE,MAAM,cAAc,YAAY,gBAAgB,WAAW,CAAC;AAE5D,KAAI,YAAY,WAAW,EACzB,QAAO,YAAY;AAGrB,KAAI,YAAY,KAAK,aAAa,CAChC,QAAO;AAGT,QAAO;EAAC,MAAM;EAAM,YAAY;EAAY;;AAG9C,SAAgB,IAAI,YAAkC;AACpD,SAAQ,WAAW,MAAnB;EACE,KAAK,MACH,QAAO;GACL,MAAM;GACN,YAAY,WAAW,WAAW,IAAI,IAAI;GAC3C;EACH,KAAK,KACH,QAAO;GACL,MAAM;GACN,YAAY,WAAW,WAAW,IAAI,IAAI;GAC3C;EACH,KAAK,qBACH,QAAO;GACL,MAAM;GACN,SAAS,WAAW;GACpB,IAAI,eAAe,WAAW,GAAG;GACjC,GAAI,WAAW,SAAS,KAAA,IAAY,EAAC,MAAM,WAAW,MAAK,GAAG,EAAE;GAChE,GAAI,WAAW,WAAW,KAAA,IAAY,EAAC,QAAQ,WAAW,QAAO,GAAG,EAAE;GACvE;EACH,KAAK,SACH,QAAO;GACL,MAAM;GACN,IAAI,eAAe,WAAW,GAAG;GACjC,MAAM,WAAW;GACjB,OAAO,WAAW;GACnB;;;AAIP,SAAgB,IACd,OACA,WACA,OACW;CACX,IAAI;CACJ,IAAI;AAEJ,KAAI,UAAU,WAAW,GAAG;AAE1B,gBAAc;AACd,OAAK;QACA;AAEL,OAAK;AACL,gBAAc;;AAGhB,QAAO;EACL,MAAM;EACN,MAAM;GAAC,MAAM;GAAU,MAAM;GAAM;EACnC,OAAO,qBAAqB,YAAY,GACpC,YAAY,gBAAgB,GAC5B;GAAC,MAAM;GAAW,OAAO,eAAe;GAAK;EACjD;EACD;;AAGH,SAAS,qBACP,OAC6B;AAC7B,QACE,UAAU,QACV,UAAU,KAAA,KACV,OAAO,UAAU,YAChB,MAAc;;AAInB,IAAa,OAAkB;CAC7B,MAAM;CACN,YAAY,EAAE;CACf;AAED,IAAM,QAAmB;CACvB,MAAM;CACN,YAAY,EAAE;CACf;AAED,SAAS,aAAa,WAA+B;AACnD,QAAO,UAAU,SAAS,SAAS,UAAU,WAAW,WAAW;;AAGrE,SAAS,cAAc,WAA+B;AACpD,QAAO,UAAU,SAAS,QAAQ,UAAU,WAAW,WAAW;;AAGpE,SAAgB,kBAAkB,GAAyB;AACzD,KAAI,EAAE,SAAS,YAAY,EAAE,SAAS,qBACpC,QAAO;AAET,KAAI,EAAE,WAAW,WAAW,EAC1B,QAAO,kBAAkB,EAAE,WAAW,GAAG;CAE3C,MAAM,aAAa,QAAQ,EAAE,MAAM,EAAE,WAAW,IAAI,kBAAkB,CAAC;AACvE,KAAI,EAAE,SAAS,SAAS,WAAW,KAAK,cAAc,CACpD,QAAO;AAET,KAAI,EAAE,SAAS,QAAQ,WAAW,KAAK,aAAa,CAClD,QAAO;AAET,QAAO;EACL,MAAM,EAAE;EACR;EACD;;AAGH,SAAgB,QACd,MACA,YACa;CACb,MAAM,YAAyB,EAAE;AACjC,MAAK,MAAM,KAAK,WACd,KAAI,EAAE,SAAS,KACb,WAAU,KAAK,GAAG,EAAE,WAAW;KAE/B,WAAU,KAAK,EAAE;AAIrB,QAAO;;AAoBT,IAAM,oBAAoB;EAhBvB,MAAM;EACN,OAAO;EACP,MAAM;EACN,MAAM;EACN,OAAO;EACP,OAAO;EACP,OAAO;EACP,WAAW;EACX,SAAS;EACT,aAAa;EACb,UAAU;EACV,cAAc;EACd,OAAO;EACP,WAAW;EAKX,WAAW;EACX,eAAe;CACjB;AAED,SAAgB,eACd,IACgC;AAChC,QAAO,KAAK,kBAAkB,IAAI;;AAGpC,SAAS,gBAAmB,OAA+B;AACzD,QAAO,MAAM,QAAO,MAAK,MAAM,KAAA,EAAU;;AAG3C,SAAS,WAAW,YAAsC;AACxD,QAAO,WAAW,QAAO,MAAK,CAAC,aAAa,EAAE,CAAC;;AAGjD,SAAS,YAAY,YAAsC;AACzD,QAAO,WAAW,QAAO,MAAK,CAAC,cAAc,EAAE,CAAC"}
1
+ {"version":3,"file":"expression.js","names":["#exists"],"sources":["../../../../../zql/src/query/expression.ts"],"sourcesContent":["/* oxlint-disable @typescript-eslint/no-explicit-any */\nimport {must} from '../../../shared/src/must.ts';\nimport {\n toStaticParam,\n type Condition,\n type LiteralValue,\n type Parameter,\n type SimpleOperator,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {Schema as ZeroSchema} from '../../../zero-types/src/schema.ts';\nimport type {\n AvailableRelationships,\n DestTableName,\n ExistsOptions,\n GetFilterType,\n NoCompoundTypeSelector,\n PullTableSchema,\n Query,\n} from './query.ts';\n\nexport type ParameterReference = {\n [toStaticParam](): Parameter;\n};\n\n/**\n * A factory function that creates a condition. This is used to create\n * complex conditions that can be passed to the `where` method of a query.\n *\n * @example\n *\n * ```ts\n * const condition: ExpressionFactory<User> = ({and, cmp, or}) =>\n * and(\n * cmp('name', '=', 'Alice'),\n * or(cmp('age', '>', 18), cmp('isStudent', '=', true)),\n * );\n *\n * const query = z.query.user.where(condition);\n * ```\n */\nexport interface ExpressionFactory<\n TTable extends keyof TSchema['tables'] & string,\n TSchema extends ZeroSchema,\n> {\n (eb: ExpressionBuilder<TTable, TSchema>): Condition;\n}\n\nexport class ExpressionBuilder<\n TTable extends keyof TSchema['tables'] & string,\n TSchema extends ZeroSchema,\n> {\n readonly #exists: (\n relationship: string,\n cb?: (query: Query<TTable, TSchema>) => Query<TTable, TSchema, any>,\n options?: ExistsOptions,\n ) => Condition;\n\n constructor(\n exists: (\n relationship: string,\n cb?: (query: Query<TTable, TSchema>) => Query<TTable, TSchema, any>,\n options?: ExistsOptions,\n ) => Condition,\n ) {\n this.#exists = exists;\n this.exists = this.exists.bind(this);\n }\n\n get eb() {\n return this;\n }\n\n cmp<\n TSelector extends NoCompoundTypeSelector<PullTableSchema<TTable, TSchema>>,\n TOperator extends SimpleOperator,\n >(\n field: TSelector,\n op: TOperator,\n value:\n | GetFilterType<PullTableSchema<TTable, TSchema>, TSelector, TOperator>\n | ParameterReference\n | undefined,\n ): Condition;\n cmp<\n TSelector extends NoCompoundTypeSelector<PullTableSchema<TTable, TSchema>>,\n >(\n field: TSelector,\n value:\n | GetFilterType<PullTableSchema<TTable, TSchema>, TSelector, '='>\n | ParameterReference\n | undefined,\n ): Condition;\n cmp(\n field: string,\n opOrValue: SimpleOperator | ParameterReference | LiteralValue | undefined,\n value?: ParameterReference | LiteralValue,\n ): Condition {\n if (arguments.length === 2) {\n return cmp(field, opOrValue);\n }\n return cmp(field, opOrValue, value);\n }\n\n cmpLit(\n left: ParameterReference | LiteralValue | undefined,\n op: SimpleOperator,\n right: ParameterReference | LiteralValue | undefined,\n ): Condition {\n return {\n type: 'simple',\n left: isParameterReference(left)\n ? left[toStaticParam]()\n : {type: 'literal', value: left ?? null},\n right: isParameterReference(right)\n ? right[toStaticParam]()\n : {type: 'literal', value: right ?? null},\n op,\n };\n }\n\n and = and;\n or = or;\n not = not;\n\n exists = <TRelationship extends AvailableRelationships<TTable, TSchema>>(\n relationship: TRelationship,\n cb?: (\n query: Query<DestTableName<TTable, TSchema, TRelationship>, TSchema>,\n ) => Query<any, TSchema>,\n options?: ExistsOptions,\n ): Condition => this.#exists(relationship, cb, options);\n}\n\nexport function and(...conditions: (Condition | undefined)[]): Condition {\n const expressions = filterTrue(filterUndefined(conditions));\n\n if (expressions.length === 1) {\n return expressions[0];\n }\n\n if (expressions.some(isAlwaysFalse)) {\n return FALSE;\n }\n\n return {type: 'and', conditions: expressions};\n}\n\nexport function or(...conditions: (Condition | undefined)[]): Condition {\n const expressions = filterFalse(filterUndefined(conditions));\n\n if (expressions.length === 1) {\n return expressions[0];\n }\n\n if (expressions.some(isAlwaysTrue)) {\n return TRUE;\n }\n\n return {type: 'or', conditions: expressions};\n}\n\nexport function not(expression: Condition): Condition {\n switch (expression.type) {\n case 'and':\n return {\n type: 'or',\n conditions: expression.conditions.map(not),\n };\n case 'or':\n return {\n type: 'and',\n conditions: expression.conditions.map(not),\n };\n case 'correlatedSubquery':\n return {\n type: 'correlatedSubquery',\n related: expression.related,\n op: negateOperator(expression.op),\n ...(expression.flip !== undefined ? {flip: expression.flip} : {}),\n ...(expression.scalar !== undefined ? {scalar: expression.scalar} : {}),\n };\n case 'simple':\n return {\n type: 'simple',\n op: negateOperator(expression.op),\n left: expression.left,\n right: expression.right,\n };\n }\n}\n\nexport function cmp(\n field: string,\n opOrValue: SimpleOperator | ParameterReference | LiteralValue | undefined,\n value?: ParameterReference | LiteralValue,\n): Condition {\n let op: SimpleOperator;\n let actualValue: ParameterReference | LiteralValue | undefined;\n\n if (arguments.length === 2) {\n // 2-arg form: cmp(field, value) - defaults to '=' operator\n actualValue = opOrValue as ParameterReference | LiteralValue | undefined;\n op = '=';\n } else {\n // 3-arg form: cmp(field, op, value)\n op = opOrValue as SimpleOperator;\n actualValue = value;\n }\n\n return {\n type: 'simple',\n left: {type: 'column', name: field},\n right: isParameterReference(actualValue)\n ? actualValue[toStaticParam]()\n : {type: 'literal', value: actualValue ?? null},\n op,\n };\n}\n\nfunction isParameterReference(\n value: ParameterReference | LiteralValue | null | undefined,\n): value is ParameterReference {\n return (\n value !== null &&\n value !== undefined &&\n typeof value === 'object' &&\n (value as any)[toStaticParam]\n );\n}\n\nexport const TRUE: Condition = {\n type: 'and',\n conditions: [],\n};\n\nconst FALSE: Condition = {\n type: 'or',\n conditions: [],\n};\n\nfunction isAlwaysTrue(condition: Condition): boolean {\n return condition.type === 'and' && condition.conditions.length === 0;\n}\n\nfunction isAlwaysFalse(condition: Condition): boolean {\n return condition.type === 'or' && condition.conditions.length === 0;\n}\n\nexport function simplifyCondition(c: Condition): Condition {\n if (c.type === 'simple' || c.type === 'correlatedSubquery') {\n return c;\n }\n if (c.conditions.length === 1) {\n return simplifyCondition(c.conditions[0]);\n }\n const conditions = flatten(c.type, c.conditions.map(simplifyCondition));\n if (c.type === 'and' && conditions.some(isAlwaysFalse)) {\n return FALSE;\n }\n if (c.type === 'or' && conditions.some(isAlwaysTrue)) {\n return TRUE;\n }\n return {\n type: c.type,\n conditions,\n };\n}\n\nexport function flatten(\n type: 'and' | 'or',\n conditions: readonly Condition[],\n): Condition[] {\n const flattened: Condition[] = [];\n for (const c of conditions) {\n if (c.type === type) {\n flattened.push(...c.conditions);\n } else {\n flattened.push(c);\n }\n }\n\n return flattened;\n}\n\nconst negateSimpleOperatorMap = {\n ['=']: '!=',\n ['!=']: '=',\n ['<']: '>=',\n ['>']: '<=',\n ['>=']: '<',\n ['<=']: '>',\n ['IN']: 'NOT IN',\n ['NOT IN']: 'IN',\n ['LIKE']: 'NOT LIKE',\n ['NOT LIKE']: 'LIKE',\n ['ILIKE']: 'NOT ILIKE',\n ['NOT ILIKE']: 'ILIKE',\n ['IS']: 'IS NOT',\n ['IS NOT']: 'IS',\n} as const;\n\nconst negateOperatorMap = {\n ...negateSimpleOperatorMap,\n ['EXISTS']: 'NOT EXISTS',\n ['NOT EXISTS']: 'EXISTS',\n} as const;\n\nexport function negateOperator<OP extends keyof typeof negateOperatorMap>(\n op: OP,\n): (typeof negateOperatorMap)[OP] {\n return must(negateOperatorMap[op]);\n}\n\nfunction filterUndefined<T>(array: (T | undefined)[]): T[] {\n return array.filter(e => e !== undefined);\n}\n\nfunction filterTrue(conditions: Condition[]): Condition[] {\n return conditions.filter(c => !isAlwaysTrue(c));\n}\n\nfunction filterFalse(conditions: Condition[]): Condition[] {\n return conditions.filter(c => !isAlwaysFalse(c));\n}\n"],"mappings":";;;AA+CA,IAAa,oBAAb,MAGE;CACA;CAMA,YACE,QAKA;AACA,QAAA,SAAe;AACf,OAAK,SAAS,KAAK,OAAO,KAAK,KAAK;;CAGtC,IAAI,KAAK;AACP,SAAO;;CAuBT,IACE,OACA,WACA,OACW;AACX,MAAI,UAAU,WAAW,EACvB,QAAO,IAAI,OAAO,UAAU;AAE9B,SAAO,IAAI,OAAO,WAAW,MAAM;;CAGrC,OACE,MACA,IACA,OACW;AACX,SAAO;GACL,MAAM;GACN,MAAM,qBAAqB,KAAK,GAC5B,KAAK,gBAAgB,GACrB;IAAC,MAAM;IAAW,OAAO,QAAQ;IAAK;GAC1C,OAAO,qBAAqB,MAAM,GAC9B,MAAM,gBAAgB,GACtB;IAAC,MAAM;IAAW,OAAO,SAAS;IAAK;GAC3C;GACD;;CAGH,MAAM;CACN,KAAK;CACL,MAAM;CAEN,UACE,cACA,IAGA,YACc,MAAA,OAAa,cAAc,IAAI,QAAQ;;AAGzD,SAAgB,IAAI,GAAG,YAAkD;CACvE,MAAM,cAAc,WAAW,gBAAgB,WAAW,CAAC;AAE3D,KAAI,YAAY,WAAW,EACzB,QAAO,YAAY;AAGrB,KAAI,YAAY,KAAK,cAAc,CACjC,QAAO;AAGT,QAAO;EAAC,MAAM;EAAO,YAAY;EAAY;;AAG/C,SAAgB,GAAG,GAAG,YAAkD;CACtE,MAAM,cAAc,YAAY,gBAAgB,WAAW,CAAC;AAE5D,KAAI,YAAY,WAAW,EACzB,QAAO,YAAY;AAGrB,KAAI,YAAY,KAAK,aAAa,CAChC,QAAO;AAGT,QAAO;EAAC,MAAM;EAAM,YAAY;EAAY;;AAG9C,SAAgB,IAAI,YAAkC;AACpD,SAAQ,WAAW,MAAnB;EACE,KAAK,MACH,QAAO;GACL,MAAM;GACN,YAAY,WAAW,WAAW,IAAI,IAAI;GAC3C;EACH,KAAK,KACH,QAAO;GACL,MAAM;GACN,YAAY,WAAW,WAAW,IAAI,IAAI;GAC3C;EACH,KAAK,qBACH,QAAO;GACL,MAAM;GACN,SAAS,WAAW;GACpB,IAAI,eAAe,WAAW,GAAG;GACjC,GAAI,WAAW,SAAS,KAAA,IAAY,EAAC,MAAM,WAAW,MAAK,GAAG,EAAE;GAChE,GAAI,WAAW,WAAW,KAAA,IAAY,EAAC,QAAQ,WAAW,QAAO,GAAG,EAAE;GACvE;EACH,KAAK,SACH,QAAO;GACL,MAAM;GACN,IAAI,eAAe,WAAW,GAAG;GACjC,MAAM,WAAW;GACjB,OAAO,WAAW;GACnB;;;AAIP,SAAgB,IACd,OACA,WACA,OACW;CACX,IAAI;CACJ,IAAI;AAEJ,KAAI,UAAU,WAAW,GAAG;AAE1B,gBAAc;AACd,OAAK;QACA;AAEL,OAAK;AACL,gBAAc;;AAGhB,QAAO;EACL,MAAM;EACN,MAAM;GAAC,MAAM;GAAU,MAAM;GAAM;EACnC,OAAO,qBAAqB,YAAY,GACpC,YAAY,gBAAgB,GAC5B;GAAC,MAAM;GAAW,OAAO,eAAe;GAAK;EACjD;EACD;;AAGH,SAAS,qBACP,OAC6B;AAC7B,QACE,UAAU,QACV,UAAU,KAAA,KACV,OAAO,UAAU,YAChB,MAAc;;AAInB,IAAa,OAAkB;CAC7B,MAAM;CACN,YAAY,EAAE;CACf;AAED,IAAM,QAAmB;CACvB,MAAM;CACN,YAAY,EAAE;CACf;AAED,SAAS,aAAa,WAA+B;AACnD,QAAO,UAAU,SAAS,SAAS,UAAU,WAAW,WAAW;;AAGrE,SAAS,cAAc,WAA+B;AACpD,QAAO,UAAU,SAAS,QAAQ,UAAU,WAAW,WAAW;;AAGpE,SAAgB,kBAAkB,GAAyB;AACzD,KAAI,EAAE,SAAS,YAAY,EAAE,SAAS,qBACpC,QAAO;AAET,KAAI,EAAE,WAAW,WAAW,EAC1B,QAAO,kBAAkB,EAAE,WAAW,GAAG;CAE3C,MAAM,aAAa,QAAQ,EAAE,MAAM,EAAE,WAAW,IAAI,kBAAkB,CAAC;AACvE,KAAI,EAAE,SAAS,SAAS,WAAW,KAAK,cAAc,CACpD,QAAO;AAET,KAAI,EAAE,SAAS,QAAQ,WAAW,KAAK,aAAa,CAClD,QAAO;AAET,QAAO;EACL,MAAM,EAAE;EACR;EACD;;AAGH,SAAgB,QACd,MACA,YACa;CACb,MAAM,YAAyB,EAAE;AACjC,MAAK,MAAM,KAAK,WACd,KAAI,EAAE,SAAS,KACb,WAAU,KAAK,GAAG,EAAE,WAAW;KAE/B,WAAU,KAAK,EAAE;AAIrB,QAAO;;AAoBT,IAAM,oBAAoB;EAhBvB,MAAM;EACN,OAAO;EACP,MAAM;EACN,MAAM;EACN,OAAO;EACP,OAAO;EACP,OAAO;EACP,WAAW;EACX,SAAS;EACT,aAAa;EACb,UAAU;EACV,cAAc;EACd,OAAO;EACP,WAAW;EAKX,WAAW;EACX,eAAe;CACjB;AAED,SAAgB,eACd,IACgC;AAChC,QAAO,KAAK,kBAAkB,IAAI;;AAGpC,SAAS,gBAAmB,OAA+B;AACzD,QAAO,MAAM,QAAO,MAAK,MAAM,KAAA,EAAU;;AAG3C,SAAS,WAAW,YAAsC;AACxD,QAAO,WAAW,QAAO,MAAK,CAAC,aAAa,EAAE,CAAC;;AAGjD,SAAS,YAAY,YAAsC;AACzD,QAAO,WAAW,QAAO,MAAK,CAAC,cAAc,EAAE,CAAC"}