@rocicorp/zero 1.0.0 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. package/out/_virtual/{_@oxc-project_runtime@0.115.0 → _@oxc-project_runtime@0.122.0}/helpers/usingCtx.js +1 -1
  2. package/out/analyze-query/src/bin-analyze.js +19 -7
  3. package/out/analyze-query/src/bin-analyze.js.map +1 -1
  4. package/out/replicache/src/mutation-recovery.js +0 -3
  5. package/out/zero/package.js +7 -6
  6. package/out/zero/package.js.map +1 -1
  7. package/out/zero-cache/src/config/zero-config.d.ts +6 -0
  8. package/out/zero-cache/src/config/zero-config.d.ts.map +1 -1
  9. package/out/zero-cache/src/config/zero-config.js +12 -0
  10. package/out/zero-cache/src/config/zero-config.js.map +1 -1
  11. package/out/zero-cache/src/server/anonymous-otel-start.d.ts.map +1 -1
  12. package/out/zero-cache/src/server/anonymous-otel-start.js +1 -14
  13. package/out/zero-cache/src/server/anonymous-otel-start.js.map +1 -1
  14. package/out/zero-cache/src/server/change-streamer.d.ts.map +1 -1
  15. package/out/zero-cache/src/server/change-streamer.js +2 -2
  16. package/out/zero-cache/src/server/change-streamer.js.map +1 -1
  17. package/out/zero-cache/src/services/analyze.js +2 -2
  18. package/out/zero-cache/src/services/change-source/change-source.d.ts +7 -0
  19. package/out/zero-cache/src/services/change-source/change-source.d.ts.map +1 -1
  20. package/out/zero-cache/src/services/change-source/common/change-stream-multiplexer.d.ts.map +1 -1
  21. package/out/zero-cache/src/services/change-source/common/change-stream-multiplexer.js +1 -1
  22. package/out/zero-cache/src/services/change-source/common/change-stream-multiplexer.js.map +1 -1
  23. package/out/zero-cache/src/services/change-source/custom/change-source.d.ts.map +1 -1
  24. package/out/zero-cache/src/services/change-source/custom/change-source.js +3 -0
  25. package/out/zero-cache/src/services/change-source/custom/change-source.js.map +1 -1
  26. package/out/zero-cache/src/services/change-source/pg/change-source.d.ts +9 -1
  27. package/out/zero-cache/src/services/change-source/pg/change-source.d.ts.map +1 -1
  28. package/out/zero-cache/src/services/change-source/pg/change-source.js +172 -46
  29. package/out/zero-cache/src/services/change-source/pg/change-source.js.map +1 -1
  30. package/out/zero-cache/src/services/change-source/pg/lsn.js +1 -1
  31. package/out/zero-cache/src/services/change-source/protocol/current/downstream.d.ts +8 -0
  32. package/out/zero-cache/src/services/change-source/protocol/current/downstream.d.ts.map +1 -1
  33. package/out/zero-cache/src/services/change-source/protocol/current/status.d.ts +26 -1
  34. package/out/zero-cache/src/services/change-source/protocol/current/status.d.ts.map +1 -1
  35. package/out/zero-cache/src/services/change-source/protocol/current/status.js +7 -2
  36. package/out/zero-cache/src/services/change-source/protocol/current/status.js.map +1 -1
  37. package/out/zero-cache/src/services/change-source/protocol/current/upstream.d.ts +8 -0
  38. package/out/zero-cache/src/services/change-source/protocol/current/upstream.d.ts.map +1 -1
  39. package/out/zero-cache/src/services/change-streamer/change-streamer-service.d.ts.map +1 -1
  40. package/out/zero-cache/src/services/change-streamer/change-streamer-service.js +10 -2
  41. package/out/zero-cache/src/services/change-streamer/change-streamer-service.js.map +1 -1
  42. package/out/zero-cache/src/services/change-streamer/change-streamer.d.ts +25 -0
  43. package/out/zero-cache/src/services/change-streamer/change-streamer.d.ts.map +1 -1
  44. package/out/zero-cache/src/services/change-streamer/change-streamer.js +8 -1
  45. package/out/zero-cache/src/services/change-streamer/change-streamer.js.map +1 -1
  46. package/out/zero-cache/src/services/change-streamer/forwarder.d.ts +2 -0
  47. package/out/zero-cache/src/services/change-streamer/forwarder.d.ts.map +1 -1
  48. package/out/zero-cache/src/services/change-streamer/forwarder.js +3 -0
  49. package/out/zero-cache/src/services/change-streamer/forwarder.js.map +1 -1
  50. package/out/zero-cache/src/services/change-streamer/subscriber.d.ts +3 -2
  51. package/out/zero-cache/src/services/change-streamer/subscriber.d.ts.map +1 -1
  52. package/out/zero-cache/src/services/change-streamer/subscriber.js +17 -8
  53. package/out/zero-cache/src/services/change-streamer/subscriber.js.map +1 -1
  54. package/out/zero-cache/src/services/life-cycle.d.ts.map +1 -1
  55. package/out/zero-cache/src/services/life-cycle.js +6 -2
  56. package/out/zero-cache/src/services/life-cycle.js.map +1 -1
  57. package/out/zero-cache/src/services/replicator/incremental-sync.d.ts +2 -2
  58. package/out/zero-cache/src/services/replicator/incremental-sync.d.ts.map +1 -1
  59. package/out/zero-cache/src/services/replicator/incremental-sync.js +19 -4
  60. package/out/zero-cache/src/services/replicator/incremental-sync.js.map +1 -1
  61. package/out/zero-cache/src/services/replicator/replicator.d.ts.map +1 -1
  62. package/out/zero-cache/src/services/replicator/replicator.js +2 -2
  63. package/out/zero-cache/src/services/replicator/replicator.js.map +1 -1
  64. package/out/zero-cache/src/services/replicator/reporter/recorder.d.ts +12 -0
  65. package/out/zero-cache/src/services/replicator/reporter/recorder.d.ts.map +1 -0
  66. package/out/zero-cache/src/services/replicator/reporter/recorder.js +58 -0
  67. package/out/zero-cache/src/services/replicator/reporter/recorder.js.map +1 -0
  68. package/out/zero-cache/src/services/replicator/reporter/report-schema.d.ts +35 -0
  69. package/out/zero-cache/src/services/replicator/reporter/report-schema.d.ts.map +1 -0
  70. package/out/zero-cache/src/services/replicator/reporter/report-schema.js +20 -0
  71. package/out/zero-cache/src/services/replicator/reporter/report-schema.js.map +1 -0
  72. package/out/zero-cache/src/services/run-ast.js +1 -1
  73. package/out/zero-cache/src/services/view-syncer/inspect-handler.js +1 -1
  74. package/out/zero-cache/src/types/pg.d.ts.map +1 -1
  75. package/out/zero-cache/src/types/pg.js +2 -0
  76. package/out/zero-cache/src/types/pg.js.map +1 -1
  77. package/out/zero-cache/src/workers/replicator.d.ts.map +1 -1
  78. package/out/zero-cache/src/workers/replicator.js +1 -0
  79. package/out/zero-cache/src/workers/replicator.js.map +1 -1
  80. package/out/zero-client/src/client/version.js +1 -1
  81. package/out/zql/src/builder/builder.d.ts.map +1 -1
  82. package/out/zql/src/builder/builder.js +15 -5
  83. package/out/zql/src/builder/builder.js.map +1 -1
  84. package/out/zql/src/ivm/cap.d.ts +32 -0
  85. package/out/zql/src/ivm/cap.d.ts.map +1 -0
  86. package/out/zql/src/ivm/cap.js +226 -0
  87. package/out/zql/src/ivm/cap.js.map +1 -0
  88. package/out/zql/src/ivm/join-utils.d.ts +2 -0
  89. package/out/zql/src/ivm/join-utils.d.ts.map +1 -1
  90. package/out/zql/src/ivm/join-utils.js +35 -1
  91. package/out/zql/src/ivm/join-utils.js.map +1 -1
  92. package/out/zql/src/ivm/join.d.ts.map +1 -1
  93. package/out/zql/src/ivm/join.js +6 -2
  94. package/out/zql/src/ivm/join.js.map +1 -1
  95. package/out/zql/src/ivm/memory-source.d.ts +15 -2
  96. package/out/zql/src/ivm/memory-source.d.ts.map +1 -1
  97. package/out/zql/src/ivm/memory-source.js +69 -8
  98. package/out/zql/src/ivm/memory-source.js.map +1 -1
  99. package/out/zql/src/ivm/schema.d.ts +1 -1
  100. package/out/zql/src/ivm/schema.d.ts.map +1 -1
  101. package/out/zql/src/ivm/skip.d.ts.map +1 -1
  102. package/out/zql/src/ivm/skip.js +3 -0
  103. package/out/zql/src/ivm/skip.js.map +1 -1
  104. package/out/zql/src/ivm/source.d.ts +1 -1
  105. package/out/zql/src/ivm/source.d.ts.map +1 -1
  106. package/out/zql/src/ivm/take.d.ts +4 -1
  107. package/out/zql/src/ivm/take.d.ts.map +1 -1
  108. package/out/zql/src/ivm/take.js +4 -2
  109. package/out/zql/src/ivm/take.js.map +1 -1
  110. package/out/zql/src/ivm/union-fan-in.d.ts.map +1 -1
  111. package/out/zql/src/ivm/union-fan-in.js +1 -0
  112. package/out/zql/src/ivm/union-fan-in.js.map +1 -1
  113. package/out/zqlite/src/query-builder.d.ts +1 -1
  114. package/out/zqlite/src/query-builder.d.ts.map +1 -1
  115. package/out/zqlite/src/query-builder.js +7 -2
  116. package/out/zqlite/src/query-builder.js.map +1 -1
  117. package/out/zqlite/src/table-source.d.ts +1 -1
  118. package/out/zqlite/src/table-source.d.ts.map +1 -1
  119. package/out/zqlite/src/table-source.js +15 -10
  120. package/out/zqlite/src/table-source.js.map +1 -1
  121. package/package.json +7 -6
  122. package/out/analyze-query/src/run-ast.d.ts +0 -22
  123. package/out/analyze-query/src/run-ast.d.ts.map +0 -1
  124. package/out/analyze-query/src/run-ast.js +0 -75
  125. package/out/analyze-query/src/run-ast.js.map +0 -1
  126. package/out/replicache/src/mutation-recovery.js.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"file":"take.js","names":["#input","#storage","#limit","#partitionKey","#partitionKeyComparator","#output","#initialFetch","#rowHiddenFromFetch","#setTakeState","#pushEditChange","#getStateAndConstraint","#pushWithRowHiddenFromFetch"],"sources":["../../../../../zql/src/ivm/take.ts"],"sourcesContent":["import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport {hasOwn} from '../../../shared/src/has-own.ts';\nimport type {Row, Value} from '../../../zero-protocol/src/data.ts';\nimport type {PrimaryKey} from '../../../zero-protocol/src/primary-key.ts';\nimport {assertOrderingIncludesPK} from '../query/complete-ordering.ts';\nimport {type Change, type EditChange, type RemoveChange} from './change.ts';\nimport type {Constraint} from './constraint.ts';\nimport {compareValues, type Comparator, type Node} from './data.ts';\nimport {\n throwOutput,\n type FetchRequest,\n type Input,\n type Operator,\n type Output,\n type Storage,\n} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport {type Stream} from './stream.ts';\n\nconst MAX_BOUND_KEY = 'maxBound';\n\ntype TakeState = {\n size: number;\n bound: Row | undefined;\n};\n\ninterface TakeStorage {\n get(key: typeof MAX_BOUND_KEY): Row | undefined;\n get(key: string): TakeState | undefined;\n set(key: typeof MAX_BOUND_KEY, value: Row): void;\n set(key: string, value: TakeState): void;\n del(key: string): void;\n}\n\nexport type PartitionKey = PrimaryKey;\n\n/**\n * The Take operator is for implementing limit queries. It takes the first n\n * nodes of its input as determined by the input’s comparator. It then keeps\n * a *bound* of the last item it has accepted so that it can evaluate whether\n * new incoming pushes should be accepted or rejected.\n *\n * Take can count rows globally or by unique value of some field.\n *\n * Maintains the invariant that its output size is always <= limit, even\n * mid processing of a push.\n */\nexport class Take implements Operator {\n readonly #input: Input;\n readonly #storage: TakeStorage;\n readonly #limit: number;\n readonly #partitionKey: PartitionKey | undefined;\n readonly #partitionKeyComparator: Comparator | undefined;\n // Fetch overlay needed for some split push cases.\n #rowHiddenFromFetch: Row | undefined;\n\n #output: Output = throwOutput;\n\n constructor(\n input: Input,\n storage: Storage,\n limit: number,\n partitionKey?: PartitionKey,\n ) {\n assert(limit >= 0, 'Limit must be non-negative');\n assertOrderingIncludesPK(\n input.getSchema().sort,\n input.getSchema().primaryKey,\n );\n input.setOutput(this);\n this.#input = input;\n this.#storage = storage as TakeStorage;\n this.#limit = limit;\n this.#partitionKey = partitionKey;\n this.#partitionKeyComparator =\n partitionKey && makePartitionKeyComparator(partitionKey);\n }\n\n setOutput(output: Output): void {\n this.#output = output;\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n *fetch(req: FetchRequest): Stream<Node | 'yield'> {\n if (\n !this.#partitionKey ||\n (req.constraint &&\n constraintMatchesPartitionKey(req.constraint, this.#partitionKey))\n ) {\n const takeStateKey = getTakeStateKey(this.#partitionKey, req.constraint);\n const takeState = this.#storage.get(takeStateKey);\n if (!takeState) {\n yield* this.#initialFetch(req);\n return;\n }\n if (takeState.bound === undefined) {\n return;\n }\n for (const inputNode of this.#input.fetch(req)) {\n if (inputNode === 'yield') {\n yield inputNode;\n continue;\n }\n if (this.getSchema().compareRows(takeState.bound, inputNode.row) < 0) {\n return;\n }\n if (\n this.#rowHiddenFromFetch &&\n this.getSchema().compareRows(\n this.#rowHiddenFromFetch,\n inputNode.row,\n ) === 0\n ) {\n continue;\n }\n yield inputNode;\n }\n return;\n }\n // There is a partition key, but the fetch is not constrained or constrained\n // on a different key. Thus we don't have a single take state to bound by.\n // This currently only happens with nested sub-queries\n // e.g. issues include issuelabels include label. We could remove this\n // case if we added a translation layer (powered by some state) in join.\n // Specifically we need joinKeyValue => parent constraint key\n const maxBound = this.#storage.get(MAX_BOUND_KEY);\n if (maxBound === undefined) {\n return;\n }\n for (const inputNode of this.#input.fetch(req)) {\n if (inputNode === 'yield') {\n yield inputNode;\n continue;\n }\n if (this.getSchema().compareRows(inputNode.row, maxBound) > 0) {\n return;\n }\n const takeStateKey = getTakeStateKey(this.#partitionKey, inputNode.row);\n const takeState = this.#storage.get(takeStateKey);\n if (\n takeState?.bound !== undefined &&\n this.getSchema().compareRows(takeState.bound, inputNode.row) >= 0\n ) {\n yield inputNode;\n }\n }\n }\n\n *#initialFetch(req: FetchRequest): Stream<Node | 'yield'> {\n assert(req.start === undefined, 'Start should be undefined');\n assert(!req.reverse, 'Reverse should be false');\n assert(\n constraintMatchesPartitionKey(req.constraint, this.#partitionKey),\n 'Constraint should match partition key',\n );\n\n if (this.#limit === 0) {\n return;\n }\n\n const takeStateKey = getTakeStateKey(this.#partitionKey, req.constraint);\n assert(\n this.#storage.get(takeStateKey) === undefined,\n 'Take state should be undefined',\n );\n\n let size = 0;\n let bound: Row | undefined;\n let downstreamEarlyReturn = true;\n let exceptionThrown = false;\n try {\n for (const inputNode of this.#input.fetch(req)) {\n if (inputNode === 'yield') {\n yield 'yield';\n continue;\n }\n yield inputNode;\n bound = inputNode.row;\n size++;\n if (size === this.#limit) {\n break;\n }\n }\n downstreamEarlyReturn = false;\n } catch (e) {\n exceptionThrown = true;\n throw e;\n } finally {\n if (!exceptionThrown) {\n this.#setTakeState(\n takeStateKey,\n size,\n bound,\n this.#storage.get(MAX_BOUND_KEY),\n );\n // If it becomes necessary to support downstream early return, this\n // assert should be removed, and replaced with code that consumes\n // the input stream until limit is reached or the input stream is\n // exhausted so that takeState is properly hydrated.\n assert(\n !downstreamEarlyReturn,\n 'Unexpected early return prevented full hydration',\n );\n }\n }\n }\n\n #getStateAndConstraint(row: Row) {\n const takeStateKey = getTakeStateKey(this.#partitionKey, row);\n const takeState = this.#storage.get(takeStateKey);\n let maxBound: Row | undefined;\n let constraint: Constraint | undefined;\n if (takeState) {\n maxBound = this.#storage.get(MAX_BOUND_KEY);\n constraint =\n this.#partitionKey &&\n Object.fromEntries(\n this.#partitionKey.map(key => [key, row[key]] as const),\n );\n }\n\n return {takeState, takeStateKey, maxBound, constraint} as\n | {\n takeState: undefined;\n takeStateKey: string;\n maxBound: undefined;\n constraint: undefined;\n }\n | {\n takeState: TakeState;\n takeStateKey: string;\n maxBound: Row | undefined;\n constraint: Constraint | undefined;\n };\n }\n\n *push(change: Change): Stream<'yield'> {\n if (change.type === 'edit') {\n yield* this.#pushEditChange(change);\n return;\n }\n\n const {takeState, takeStateKey, maxBound, constraint} =\n this.#getStateAndConstraint(change.node.row);\n if (!takeState) {\n return;\n }\n\n const {compareRows} = this.getSchema();\n\n if (change.type === 'add') {\n if (takeState.size < this.#limit) {\n this.#setTakeState(\n takeStateKey,\n takeState.size + 1,\n takeState.bound === undefined ||\n compareRows(takeState.bound, change.node.row) < 0\n ? change.node.row\n : takeState.bound,\n maxBound,\n );\n yield* this.#output.push(change, this);\n return;\n }\n // size === limit\n if (\n takeState.bound === undefined ||\n compareRows(change.node.row, takeState.bound) >= 0\n ) {\n return;\n }\n // added row < bound\n let beforeBoundNode: Node | undefined;\n let boundNode: Node | undefined;\n if (this.#limit === 1) {\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n })) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n boundNode = node;\n break;\n }\n } else {\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n reverse: true,\n })) {\n if (node === 'yield') {\n yield node;\n continue;\n } else if (boundNode === undefined) {\n boundNode = node;\n } else {\n beforeBoundNode = node;\n break;\n }\n }\n }\n assert(\n boundNode !== undefined,\n 'Take: boundNode must be found during fetch',\n );\n const removeChange: RemoveChange = {\n type: 'remove',\n node: boundNode,\n };\n // Remove before add to maintain invariant that\n // output size <= limit.\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n beforeBoundNode === undefined ||\n compareRows(change.node.row, beforeBoundNode.row) > 0\n ? change.node.row\n : beforeBoundNode.row,\n maxBound,\n );\n yield* this.#pushWithRowHiddenFromFetch(change.node.row, removeChange);\n yield* this.#output.push(change, this);\n } else if (change.type === 'remove') {\n if (takeState.bound === undefined) {\n // change is after bound\n return;\n }\n const compToBound = compareRows(change.node.row, takeState.bound);\n if (compToBound > 0) {\n // change is after bound\n return;\n }\n let beforeBoundNode: Node | undefined;\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'after',\n },\n constraint,\n reverse: true,\n })) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n beforeBoundNode = node;\n break;\n }\n\n let newBound: {node: Node; push: boolean} | undefined;\n if (beforeBoundNode) {\n const push = compareRows(beforeBoundNode.row, takeState.bound) > 0;\n newBound = {\n node: beforeBoundNode,\n push,\n };\n }\n if (!newBound?.push) {\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n })) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n const push = compareRows(node.row, takeState.bound) > 0;\n newBound = {\n node,\n push,\n };\n if (push) {\n break;\n }\n }\n }\n\n if (newBound?.push) {\n yield* this.#output.push(change, this);\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n newBound.node.row,\n maxBound,\n );\n yield* this.#output.push(\n {\n type: 'add',\n node: newBound.node,\n },\n this,\n );\n return;\n }\n this.#setTakeState(\n takeStateKey,\n takeState.size - 1,\n newBound?.node.row,\n maxBound,\n );\n yield* this.#output.push(change, this);\n } else if (change.type === 'child') {\n // A 'child' change should be pushed to output if its row\n // is <= bound.\n if (\n takeState.bound &&\n compareRows(change.node.row, takeState.bound) <= 0\n ) {\n yield* this.#output.push(change, this);\n }\n }\n }\n\n *#pushEditChange(change: EditChange): Stream<'yield'> {\n assert(\n !this.#partitionKeyComparator ||\n this.#partitionKeyComparator(change.oldNode.row, change.node.row) === 0,\n 'Unexpected change of partition key',\n );\n\n const {takeState, takeStateKey, maxBound, constraint} =\n this.#getStateAndConstraint(change.oldNode.row);\n if (!takeState) {\n return;\n }\n\n assert(takeState.bound, 'Bound should be set');\n const {compareRows} = this.getSchema();\n const oldCmp = compareRows(change.oldNode.row, takeState.bound);\n const newCmp = compareRows(change.node.row, takeState.bound);\n\n const that = this;\n const replaceBoundAndForwardChange = function* () {\n that.#setTakeState(\n takeStateKey,\n takeState.size,\n change.node.row,\n maxBound,\n );\n yield* that.#output.push(change, that);\n };\n\n // The bounds row was changed.\n if (oldCmp === 0) {\n // The new row is the new bound.\n if (newCmp === 0) {\n // no need to update the state since we are keeping the bounds\n yield* this.#output.push(change, this);\n return;\n }\n\n if (newCmp < 0) {\n if (this.#limit === 1) {\n yield* replaceBoundAndForwardChange();\n return;\n }\n\n // New row will be in the result but it might not be the bounds any\n // more. We need to find the row before the bounds to determine the new\n // bounds.\n\n let beforeBoundNode: Node | undefined;\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'after',\n },\n constraint,\n reverse: true,\n })) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n beforeBoundNode = node;\n break;\n }\n assert(\n beforeBoundNode !== undefined,\n 'Take: beforeBoundNode must be found during fetch',\n );\n\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n beforeBoundNode.row,\n maxBound,\n );\n yield* this.#output.push(change, this);\n return;\n }\n\n assert(newCmp > 0, 'New comparison must be greater than 0');\n // Find the first item at the old bounds. This will be the new bounds.\n let newBoundNode: Node | undefined;\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n })) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n newBoundNode = node;\n break;\n }\n assert(\n newBoundNode !== undefined,\n 'Take: newBoundNode must be found during fetch',\n );\n\n // The next row is the new row. We can replace the bounds and keep the\n // edit change.\n if (compareRows(newBoundNode.row, change.node.row) === 0) {\n yield* replaceBoundAndForwardChange();\n return;\n }\n\n // The new row is now outside the bounds, so we need to remove the old\n // row and add the new bounds row.\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n newBoundNode.row,\n maxBound,\n );\n yield* this.#pushWithRowHiddenFromFetch(newBoundNode.row, {\n type: 'remove',\n node: change.oldNode,\n });\n yield* this.#output.push(\n {\n type: 'add',\n node: newBoundNode,\n },\n this,\n );\n return;\n }\n\n if (oldCmp > 0) {\n assert(newCmp !== 0, 'Invalid state. Row has duplicate primary key');\n\n // Both old and new outside of bounds\n if (newCmp > 0) {\n return;\n }\n\n // old was outside, new is inside. Pushing out the old bounds\n assert(newCmp < 0, 'New comparison must be less than 0');\n\n let oldBoundNode: Node | undefined;\n let newBoundNode: Node | undefined;\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n reverse: true,\n })) {\n if (node === 'yield') {\n yield node;\n continue;\n } else if (oldBoundNode === undefined) {\n oldBoundNode = node;\n } else {\n newBoundNode = node;\n break;\n }\n }\n assert(\n oldBoundNode !== undefined,\n 'Take: oldBoundNode must be found during fetch',\n );\n assert(\n newBoundNode !== undefined,\n 'Take: newBoundNode must be found during fetch',\n );\n\n // Remove before add to maintain invariant that\n // output size <= limit.\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n newBoundNode.row,\n maxBound,\n );\n yield* this.#pushWithRowHiddenFromFetch(change.node.row, {\n type: 'remove',\n node: oldBoundNode,\n });\n yield* this.#output.push(\n {\n type: 'add',\n node: change.node,\n },\n this,\n );\n\n return;\n }\n\n if (oldCmp < 0) {\n assert(newCmp !== 0, 'Invalid state. Row has duplicate primary key');\n\n // Both old and new inside of bounds\n if (newCmp < 0) {\n yield* this.#output.push(change, this);\n return;\n }\n\n // old was inside, new is larger than old bound\n\n assert(newCmp > 0, 'New comparison must be greater than 0');\n\n // at this point we need to find the row after the bound and use that or\n // the newRow as the new bound.\n let afterBoundNode: Node | undefined;\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'after',\n },\n constraint,\n })) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n afterBoundNode = node;\n break;\n }\n assert(\n afterBoundNode !== undefined,\n 'Take: afterBoundNode must be found during fetch',\n );\n\n // The new row is the new bound. Use an edit change.\n if (compareRows(afterBoundNode.row, change.node.row) === 0) {\n yield* replaceBoundAndForwardChange();\n return;\n }\n\n yield* this.#output.push(\n {\n type: 'remove',\n node: change.oldNode,\n },\n this,\n );\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n afterBoundNode.row,\n maxBound,\n );\n yield* this.#output.push(\n {\n type: 'add',\n node: afterBoundNode,\n },\n this,\n );\n return;\n }\n\n unreachable();\n }\n\n *#pushWithRowHiddenFromFetch(row: Row, change: Change) {\n this.#rowHiddenFromFetch = row;\n try {\n yield* this.#output.push(change, this);\n } finally {\n this.#rowHiddenFromFetch = undefined;\n }\n }\n\n #setTakeState(\n takeStateKey: string,\n size: number,\n bound: Row | undefined,\n maxBound: Row | undefined,\n ) {\n this.#storage.set(takeStateKey, {\n size,\n bound,\n });\n if (\n bound !== undefined &&\n (maxBound === undefined ||\n this.getSchema().compareRows(bound, maxBound) > 0)\n ) {\n this.#storage.set(MAX_BOUND_KEY, bound);\n }\n }\n\n destroy(): void {\n this.#input.destroy();\n }\n}\n\nfunction getTakeStateKey(\n partitionKey: PartitionKey | undefined,\n rowOrConstraint: Row | Constraint | undefined,\n): string {\n // The order must be consistent. We always use the order as defined by the\n // partition key.\n const partitionValues: Value[] = [];\n\n if (partitionKey && rowOrConstraint) {\n for (const key of partitionKey) {\n partitionValues.push(rowOrConstraint[key]);\n }\n }\n\n return JSON.stringify(['take', ...partitionValues]);\n}\n\nfunction constraintMatchesPartitionKey(\n constraint: Constraint | undefined,\n partitionKey: PartitionKey | undefined,\n): boolean {\n if (constraint === undefined || partitionKey === undefined) {\n return constraint === partitionKey;\n }\n if (partitionKey.length !== Object.keys(constraint).length) {\n return false;\n }\n for (const key of partitionKey) {\n if (!hasOwn(constraint, key)) {\n return false;\n }\n }\n return true;\n}\n\nfunction makePartitionKeyComparator(partitionKey: PartitionKey): Comparator {\n return (a, b) => {\n for (const key of partitionKey) {\n const cmp = compareValues(a[key], b[key]);\n if (cmp !== 0) {\n return cmp;\n }\n }\n return 0;\n };\n}\n"],"mappings":";;;;;;AAmBA,IAAM,gBAAgB;;;;;;;;;;;;AA4BtB,IAAa,OAAb,MAAsC;CACpC;CACA;CACA;CACA;CACA;CAEA;CAEA,UAAkB;CAElB,YACE,OACA,SACA,OACA,cACA;AACA,SAAO,SAAS,GAAG,6BAA6B;AAChD,2BACE,MAAM,WAAW,CAAC,MAClB,MAAM,WAAW,CAAC,WACnB;AACD,QAAM,UAAU,KAAK;AACrB,QAAA,QAAc;AACd,QAAA,UAAgB;AAChB,QAAA,QAAc;AACd,QAAA,eAAqB;AACrB,QAAA,yBACE,gBAAgB,2BAA2B,aAAa;;CAG5D,UAAU,QAAsB;AAC9B,QAAA,SAAe;;CAGjB,YAA0B;AACxB,SAAO,MAAA,MAAY,WAAW;;CAGhC,CAAC,MAAM,KAA2C;AAChD,MACE,CAAC,MAAA,gBACA,IAAI,cACH,8BAA8B,IAAI,YAAY,MAAA,aAAmB,EACnE;GACA,MAAM,eAAe,gBAAgB,MAAA,cAAoB,IAAI,WAAW;GACxE,MAAM,YAAY,MAAA,QAAc,IAAI,aAAa;AACjD,OAAI,CAAC,WAAW;AACd,WAAO,MAAA,aAAmB,IAAI;AAC9B;;AAEF,OAAI,UAAU,UAAU,KAAA,EACtB;AAEF,QAAK,MAAM,aAAa,MAAA,MAAY,MAAM,IAAI,EAAE;AAC9C,QAAI,cAAc,SAAS;AACzB,WAAM;AACN;;AAEF,QAAI,KAAK,WAAW,CAAC,YAAY,UAAU,OAAO,UAAU,IAAI,GAAG,EACjE;AAEF,QACE,MAAA,sBACA,KAAK,WAAW,CAAC,YACf,MAAA,oBACA,UAAU,IACX,KAAK,EAEN;AAEF,UAAM;;AAER;;EAQF,MAAM,WAAW,MAAA,QAAc,IAAI,cAAc;AACjD,MAAI,aAAa,KAAA,EACf;AAEF,OAAK,MAAM,aAAa,MAAA,MAAY,MAAM,IAAI,EAAE;AAC9C,OAAI,cAAc,SAAS;AACzB,UAAM;AACN;;AAEF,OAAI,KAAK,WAAW,CAAC,YAAY,UAAU,KAAK,SAAS,GAAG,EAC1D;GAEF,MAAM,eAAe,gBAAgB,MAAA,cAAoB,UAAU,IAAI;GACvE,MAAM,YAAY,MAAA,QAAc,IAAI,aAAa;AACjD,OACE,WAAW,UAAU,KAAA,KACrB,KAAK,WAAW,CAAC,YAAY,UAAU,OAAO,UAAU,IAAI,IAAI,EAEhE,OAAM;;;CAKZ,EAAA,aAAe,KAA2C;AACxD,SAAO,IAAI,UAAU,KAAA,GAAW,4BAA4B;AAC5D,SAAO,CAAC,IAAI,SAAS,0BAA0B;AAC/C,SACE,8BAA8B,IAAI,YAAY,MAAA,aAAmB,EACjE,wCACD;AAED,MAAI,MAAA,UAAgB,EAClB;EAGF,MAAM,eAAe,gBAAgB,MAAA,cAAoB,IAAI,WAAW;AACxE,SACE,MAAA,QAAc,IAAI,aAAa,KAAK,KAAA,GACpC,iCACD;EAED,IAAI,OAAO;EACX,IAAI;EACJ,IAAI,wBAAwB;EAC5B,IAAI,kBAAkB;AACtB,MAAI;AACF,QAAK,MAAM,aAAa,MAAA,MAAY,MAAM,IAAI,EAAE;AAC9C,QAAI,cAAc,SAAS;AACzB,WAAM;AACN;;AAEF,UAAM;AACN,YAAQ,UAAU;AAClB;AACA,QAAI,SAAS,MAAA,MACX;;AAGJ,2BAAwB;WACjB,GAAG;AACV,qBAAkB;AAClB,SAAM;YACE;AACR,OAAI,CAAC,iBAAiB;AACpB,UAAA,aACE,cACA,MACA,OACA,MAAA,QAAc,IAAI,cAAc,CACjC;AAKD,WACE,CAAC,uBACD,mDACD;;;;CAKP,uBAAuB,KAAU;EAC/B,MAAM,eAAe,gBAAgB,MAAA,cAAoB,IAAI;EAC7D,MAAM,YAAY,MAAA,QAAc,IAAI,aAAa;EACjD,IAAI;EACJ,IAAI;AACJ,MAAI,WAAW;AACb,cAAW,MAAA,QAAc,IAAI,cAAc;AAC3C,gBACE,MAAA,gBACA,OAAO,YACL,MAAA,aAAmB,KAAI,QAAO,CAAC,KAAK,IAAI,KAAK,CAAU,CACxD;;AAGL,SAAO;GAAC;GAAW;GAAc;GAAU;GAAW;;CAexD,CAAC,KAAK,QAAiC;AACrC,MAAI,OAAO,SAAS,QAAQ;AAC1B,UAAO,MAAA,eAAqB,OAAO;AACnC;;EAGF,MAAM,EAAC,WAAW,cAAc,UAAU,eACxC,MAAA,sBAA4B,OAAO,KAAK,IAAI;AAC9C,MAAI,CAAC,UACH;EAGF,MAAM,EAAC,gBAAe,KAAK,WAAW;AAEtC,MAAI,OAAO,SAAS,OAAO;AACzB,OAAI,UAAU,OAAO,MAAA,OAAa;AAChC,UAAA,aACE,cACA,UAAU,OAAO,GACjB,UAAU,UAAU,KAAA,KAClB,YAAY,UAAU,OAAO,OAAO,KAAK,IAAI,GAAG,IAC9C,OAAO,KAAK,MACZ,UAAU,OACd,SACD;AACD,WAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;AACtC;;AAGF,OACE,UAAU,UAAU,KAAA,KACpB,YAAY,OAAO,KAAK,KAAK,UAAU,MAAM,IAAI,EAEjD;GAGF,IAAI;GACJ,IAAI;AACJ,OAAI,MAAA,UAAgB,EAClB,MAAK,MAAM,QAAQ,MAAA,MAAY,MAAM;IACnC,OAAO;KACL,KAAK,UAAU;KACf,OAAO;KACR;IACD;IACD,CAAC,EAAE;AACF,QAAI,SAAS,SAAS;AACpB,WAAM;AACN;;AAEF,gBAAY;AACZ;;OAGF,MAAK,MAAM,QAAQ,MAAA,MAAY,MAAM;IACnC,OAAO;KACL,KAAK,UAAU;KACf,OAAO;KACR;IACD;IACA,SAAS;IACV,CAAC,CACA,KAAI,SAAS,SAAS;AACpB,UAAM;AACN;cACS,cAAc,KAAA,EACvB,aAAY;QACP;AACL,sBAAkB;AAClB;;AAIN,UACE,cAAc,KAAA,GACd,6CACD;GACD,MAAM,eAA6B;IACjC,MAAM;IACN,MAAM;IACP;AAGD,SAAA,aACE,cACA,UAAU,MACV,oBAAoB,KAAA,KAClB,YAAY,OAAO,KAAK,KAAK,gBAAgB,IAAI,GAAG,IAClD,OAAO,KAAK,MACZ,gBAAgB,KACpB,SACD;AACD,UAAO,MAAA,2BAAiC,OAAO,KAAK,KAAK,aAAa;AACtE,UAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;aAC7B,OAAO,SAAS,UAAU;AACnC,OAAI,UAAU,UAAU,KAAA,EAEtB;AAGF,OADoB,YAAY,OAAO,KAAK,KAAK,UAAU,MAAM,GAC/C,EAEhB;GAEF,IAAI;AACJ,QAAK,MAAM,QAAQ,MAAA,MAAY,MAAM;IACnC,OAAO;KACL,KAAK,UAAU;KACf,OAAO;KACR;IACD;IACA,SAAS;IACV,CAAC,EAAE;AACF,QAAI,SAAS,SAAS;AACpB,WAAM;AACN;;AAEF,sBAAkB;AAClB;;GAGF,IAAI;AACJ,OAAI,iBAAiB;IACnB,MAAM,OAAO,YAAY,gBAAgB,KAAK,UAAU,MAAM,GAAG;AACjE,eAAW;KACT,MAAM;KACN;KACD;;AAEH,OAAI,CAAC,UAAU,KACb,MAAK,MAAM,QAAQ,MAAA,MAAY,MAAM;IACnC,OAAO;KACL,KAAK,UAAU;KACf,OAAO;KACR;IACD;IACD,CAAC,EAAE;AACF,QAAI,SAAS,SAAS;AACpB,WAAM;AACN;;IAEF,MAAM,OAAO,YAAY,KAAK,KAAK,UAAU,MAAM,GAAG;AACtD,eAAW;KACT;KACA;KACD;AACD,QAAI,KACF;;AAKN,OAAI,UAAU,MAAM;AAClB,WAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;AACtC,UAAA,aACE,cACA,UAAU,MACV,SAAS,KAAK,KACd,SACD;AACD,WAAO,MAAA,OAAa,KAClB;KACE,MAAM;KACN,MAAM,SAAS;KAChB,EACD,KACD;AACD;;AAEF,SAAA,aACE,cACA,UAAU,OAAO,GACjB,UAAU,KAAK,KACf,SACD;AACD,UAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;aAC7B,OAAO,SAAS;OAIvB,UAAU,SACV,YAAY,OAAO,KAAK,KAAK,UAAU,MAAM,IAAI,EAEjD,QAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;;;CAK5C,EAAA,eAAiB,QAAqC;AACpD,SACE,CAAC,MAAA,0BACC,MAAA,uBAA6B,OAAO,QAAQ,KAAK,OAAO,KAAK,IAAI,KAAK,GACxE,qCACD;EAED,MAAM,EAAC,WAAW,cAAc,UAAU,eACxC,MAAA,sBAA4B,OAAO,QAAQ,IAAI;AACjD,MAAI,CAAC,UACH;AAGF,SAAO,UAAU,OAAO,sBAAsB;EAC9C,MAAM,EAAC,gBAAe,KAAK,WAAW;EACtC,MAAM,SAAS,YAAY,OAAO,QAAQ,KAAK,UAAU,MAAM;EAC/D,MAAM,SAAS,YAAY,OAAO,KAAK,KAAK,UAAU,MAAM;EAE5D,MAAM,OAAO;EACb,MAAM,+BAA+B,aAAa;AAChD,SAAA,aACE,cACA,UAAU,MACV,OAAO,KAAK,KACZ,SACD;AACD,UAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;;AAIxC,MAAI,WAAW,GAAG;AAEhB,OAAI,WAAW,GAAG;AAEhB,WAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;AACtC;;AAGF,OAAI,SAAS,GAAG;AACd,QAAI,MAAA,UAAgB,GAAG;AACrB,YAAO,8BAA8B;AACrC;;IAOF,IAAI;AACJ,SAAK,MAAM,QAAQ,MAAA,MAAY,MAAM;KACnC,OAAO;MACL,KAAK,UAAU;MACf,OAAO;MACR;KACD;KACA,SAAS;KACV,CAAC,EAAE;AACF,SAAI,SAAS,SAAS;AACpB,YAAM;AACN;;AAEF,uBAAkB;AAClB;;AAEF,WACE,oBAAoB,KAAA,GACpB,mDACD;AAED,UAAA,aACE,cACA,UAAU,MACV,gBAAgB,KAChB,SACD;AACD,WAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;AACtC;;AAGF,UAAO,SAAS,GAAG,wCAAwC;GAE3D,IAAI;AACJ,QAAK,MAAM,QAAQ,MAAA,MAAY,MAAM;IACnC,OAAO;KACL,KAAK,UAAU;KACf,OAAO;KACR;IACD;IACD,CAAC,EAAE;AACF,QAAI,SAAS,SAAS;AACpB,WAAM;AACN;;AAEF,mBAAe;AACf;;AAEF,UACE,iBAAiB,KAAA,GACjB,gDACD;AAID,OAAI,YAAY,aAAa,KAAK,OAAO,KAAK,IAAI,KAAK,GAAG;AACxD,WAAO,8BAA8B;AACrC;;AAKF,SAAA,aACE,cACA,UAAU,MACV,aAAa,KACb,SACD;AACD,UAAO,MAAA,2BAAiC,aAAa,KAAK;IACxD,MAAM;IACN,MAAM,OAAO;IACd,CAAC;AACF,UAAO,MAAA,OAAa,KAClB;IACE,MAAM;IACN,MAAM;IACP,EACD,KACD;AACD;;AAGF,MAAI,SAAS,GAAG;AACd,UAAO,WAAW,GAAG,+CAA+C;AAGpE,OAAI,SAAS,EACX;AAIF,UAAO,SAAS,GAAG,qCAAqC;GAExD,IAAI;GACJ,IAAI;AACJ,QAAK,MAAM,QAAQ,MAAA,MAAY,MAAM;IACnC,OAAO;KACL,KAAK,UAAU;KACf,OAAO;KACR;IACD;IACA,SAAS;IACV,CAAC,CACA,KAAI,SAAS,SAAS;AACpB,UAAM;AACN;cACS,iBAAiB,KAAA,EAC1B,gBAAe;QACV;AACL,mBAAe;AACf;;AAGJ,UACE,iBAAiB,KAAA,GACjB,gDACD;AACD,UACE,iBAAiB,KAAA,GACjB,gDACD;AAID,SAAA,aACE,cACA,UAAU,MACV,aAAa,KACb,SACD;AACD,UAAO,MAAA,2BAAiC,OAAO,KAAK,KAAK;IACvD,MAAM;IACN,MAAM;IACP,CAAC;AACF,UAAO,MAAA,OAAa,KAClB;IACE,MAAM;IACN,MAAM,OAAO;IACd,EACD,KACD;AAED;;AAGF,MAAI,SAAS,GAAG;AACd,UAAO,WAAW,GAAG,+CAA+C;AAGpE,OAAI,SAAS,GAAG;AACd,WAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;AACtC;;AAKF,UAAO,SAAS,GAAG,wCAAwC;GAI3D,IAAI;AACJ,QAAK,MAAM,QAAQ,MAAA,MAAY,MAAM;IACnC,OAAO;KACL,KAAK,UAAU;KACf,OAAO;KACR;IACD;IACD,CAAC,EAAE;AACF,QAAI,SAAS,SAAS;AACpB,WAAM;AACN;;AAEF,qBAAiB;AACjB;;AAEF,UACE,mBAAmB,KAAA,GACnB,kDACD;AAGD,OAAI,YAAY,eAAe,KAAK,OAAO,KAAK,IAAI,KAAK,GAAG;AAC1D,WAAO,8BAA8B;AACrC;;AAGF,UAAO,MAAA,OAAa,KAClB;IACE,MAAM;IACN,MAAM,OAAO;IACd,EACD,KACD;AACD,SAAA,aACE,cACA,UAAU,MACV,eAAe,KACf,SACD;AACD,UAAO,MAAA,OAAa,KAClB;IACE,MAAM;IACN,MAAM;IACP,EACD,KACD;AACD;;AAGF,eAAa;;CAGf,EAAA,2BAA6B,KAAU,QAAgB;AACrD,QAAA,qBAA2B;AAC3B,MAAI;AACF,UAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;YAC9B;AACR,SAAA,qBAA2B,KAAA;;;CAI/B,cACE,cACA,MACA,OACA,UACA;AACA,QAAA,QAAc,IAAI,cAAc;GAC9B;GACA;GACD,CAAC;AACF,MACE,UAAU,KAAA,MACT,aAAa,KAAA,KACZ,KAAK,WAAW,CAAC,YAAY,OAAO,SAAS,GAAG,GAElD,OAAA,QAAc,IAAI,eAAe,MAAM;;CAI3C,UAAgB;AACd,QAAA,MAAY,SAAS;;;AAIzB,SAAS,gBACP,cACA,iBACQ;CAGR,MAAM,kBAA2B,EAAE;AAEnC,KAAI,gBAAgB,gBAClB,MAAK,MAAM,OAAO,aAChB,iBAAgB,KAAK,gBAAgB,KAAK;AAI9C,QAAO,KAAK,UAAU,CAAC,QAAQ,GAAG,gBAAgB,CAAC;;AAGrD,SAAS,8BACP,YACA,cACS;AACT,KAAI,eAAe,KAAA,KAAa,iBAAiB,KAAA,EAC/C,QAAO,eAAe;AAExB,KAAI,aAAa,WAAW,OAAO,KAAK,WAAW,CAAC,OAClD,QAAO;AAET,MAAK,MAAM,OAAO,aAChB,KAAI,CAAC,OAAO,YAAY,IAAI,CAC1B,QAAO;AAGX,QAAO;;AAGT,SAAS,2BAA2B,cAAwC;AAC1E,SAAQ,GAAG,MAAM;AACf,OAAK,MAAM,OAAO,cAAc;GAC9B,MAAM,MAAM,cAAc,EAAE,MAAM,EAAE,KAAK;AACzC,OAAI,QAAQ,EACV,QAAO;;AAGX,SAAO"}
1
+ {"version":3,"file":"take.js","names":["#input","#storage","#limit","#partitionKey","#partitionKeyComparator","#output","#initialFetch","#rowHiddenFromFetch","#setTakeState","#pushEditChange","#getStateAndConstraint","#pushWithRowHiddenFromFetch"],"sources":["../../../../../zql/src/ivm/take.ts"],"sourcesContent":["import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport {hasOwn} from '../../../shared/src/has-own.ts';\nimport type {Row, Value} from '../../../zero-protocol/src/data.ts';\nimport type {PrimaryKey} from '../../../zero-protocol/src/primary-key.ts';\nimport {assertOrderingIncludesPK} from '../query/complete-ordering.ts';\nimport {type Change, type EditChange, type RemoveChange} from './change.ts';\nimport type {Constraint} from './constraint.ts';\nimport {compareValues, type Comparator, type Node} from './data.ts';\nimport {\n throwOutput,\n type FetchRequest,\n type Input,\n type Operator,\n type Output,\n type Storage,\n} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport {type Stream} from './stream.ts';\n\nconst MAX_BOUND_KEY = 'maxBound';\n\ntype TakeState = {\n size: number;\n bound: Row | undefined;\n};\n\ninterface TakeStorage {\n get(key: typeof MAX_BOUND_KEY): Row | undefined;\n get(key: string): TakeState | undefined;\n set(key: typeof MAX_BOUND_KEY, value: Row): void;\n set(key: string, value: TakeState): void;\n del(key: string): void;\n}\n\nexport type PartitionKey = PrimaryKey;\n\n/**\n * The Take operator is for implementing limit queries. It takes the first n\n * nodes of its input as determined by the input’s comparator. It then keeps\n * a *bound* of the last item it has accepted so that it can evaluate whether\n * new incoming pushes should be accepted or rejected.\n *\n * Take can count rows globally or by unique value of some field.\n *\n * Maintains the invariant that its output size is always <= limit, even\n * mid processing of a push.\n */\nexport class Take implements Operator {\n readonly #input: Input;\n readonly #storage: TakeStorage;\n readonly #limit: number;\n readonly #partitionKey: PartitionKey | undefined;\n readonly #partitionKeyComparator: Comparator | undefined;\n // Fetch overlay needed for some split push cases.\n #rowHiddenFromFetch: Row | undefined;\n\n #output: Output = throwOutput;\n\n constructor(\n input: Input,\n storage: Storage,\n limit: number,\n partitionKey?: PartitionKey,\n ) {\n assert(limit >= 0, 'Limit must be non-negative');\n const {sort} = input.getSchema();\n assert(sort !== undefined, 'Take requires sorted input');\n assertOrderingIncludesPK(sort, input.getSchema().primaryKey);\n input.setOutput(this);\n this.#input = input;\n this.#storage = storage as TakeStorage;\n this.#limit = limit;\n this.#partitionKey = partitionKey;\n this.#partitionKeyComparator =\n partitionKey && makePartitionKeyComparator(partitionKey);\n }\n\n setOutput(output: Output): void {\n this.#output = output;\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n *fetch(req: FetchRequest): Stream<Node | 'yield'> {\n if (\n !this.#partitionKey ||\n (req.constraint &&\n constraintMatchesPartitionKey(req.constraint, this.#partitionKey))\n ) {\n const takeStateKey = getTakeStateKey(this.#partitionKey, req.constraint);\n const takeState = this.#storage.get(takeStateKey);\n if (!takeState) {\n yield* this.#initialFetch(req);\n return;\n }\n if (takeState.bound === undefined) {\n return;\n }\n for (const inputNode of this.#input.fetch(req)) {\n if (inputNode === 'yield') {\n yield inputNode;\n continue;\n }\n if (this.getSchema().compareRows(takeState.bound, inputNode.row) < 0) {\n return;\n }\n if (\n this.#rowHiddenFromFetch &&\n this.getSchema().compareRows(\n this.#rowHiddenFromFetch,\n inputNode.row,\n ) === 0\n ) {\n continue;\n }\n yield inputNode;\n }\n return;\n }\n // There is a partition key, but the fetch is not constrained or constrained\n // on a different key. Thus we don't have a single take state to bound by.\n // This currently only happens with nested sub-queries\n // e.g. issues include issuelabels include label. We could remove this\n // case if we added a translation layer (powered by some state) in join.\n // Specifically we need joinKeyValue => parent constraint key\n const maxBound = this.#storage.get(MAX_BOUND_KEY);\n if (maxBound === undefined) {\n return;\n }\n for (const inputNode of this.#input.fetch(req)) {\n if (inputNode === 'yield') {\n yield inputNode;\n continue;\n }\n if (this.getSchema().compareRows(inputNode.row, maxBound) > 0) {\n return;\n }\n const takeStateKey = getTakeStateKey(this.#partitionKey, inputNode.row);\n const takeState = this.#storage.get(takeStateKey);\n if (\n takeState?.bound !== undefined &&\n this.getSchema().compareRows(takeState.bound, inputNode.row) >= 0\n ) {\n yield inputNode;\n }\n }\n }\n\n *#initialFetch(req: FetchRequest): Stream<Node | 'yield'> {\n assert(req.start === undefined, 'Start should be undefined');\n assert(!req.reverse, 'Reverse should be false');\n assert(\n constraintMatchesPartitionKey(req.constraint, this.#partitionKey),\n 'Constraint should match partition key',\n );\n\n if (this.#limit === 0) {\n return;\n }\n\n const takeStateKey = getTakeStateKey(this.#partitionKey, req.constraint);\n assert(\n this.#storage.get(takeStateKey) === undefined,\n 'Take state should be undefined',\n );\n\n let size = 0;\n let bound: Row | undefined;\n let downstreamEarlyReturn = true;\n let exceptionThrown = false;\n try {\n for (const inputNode of this.#input.fetch(req)) {\n if (inputNode === 'yield') {\n yield 'yield';\n continue;\n }\n yield inputNode;\n bound = inputNode.row;\n size++;\n if (size === this.#limit) {\n break;\n }\n }\n downstreamEarlyReturn = false;\n } catch (e) {\n exceptionThrown = true;\n throw e;\n } finally {\n if (!exceptionThrown) {\n this.#setTakeState(\n takeStateKey,\n size,\n bound,\n this.#storage.get(MAX_BOUND_KEY),\n );\n // If it becomes necessary to support downstream early return, this\n // assert should be removed, and replaced with code that consumes\n // the input stream until limit is reached or the input stream is\n // exhausted so that takeState is properly hydrated.\n assert(\n !downstreamEarlyReturn,\n 'Unexpected early return prevented full hydration',\n );\n }\n }\n }\n\n #getStateAndConstraint(row: Row) {\n const takeStateKey = getTakeStateKey(this.#partitionKey, row);\n const takeState = this.#storage.get(takeStateKey);\n let maxBound: Row | undefined;\n let constraint: Constraint | undefined;\n if (takeState) {\n maxBound = this.#storage.get(MAX_BOUND_KEY);\n constraint =\n this.#partitionKey &&\n Object.fromEntries(\n this.#partitionKey.map(key => [key, row[key]] as const),\n );\n }\n\n return {takeState, takeStateKey, maxBound, constraint} as\n | {\n takeState: undefined;\n takeStateKey: string;\n maxBound: undefined;\n constraint: undefined;\n }\n | {\n takeState: TakeState;\n takeStateKey: string;\n maxBound: Row | undefined;\n constraint: Constraint | undefined;\n };\n }\n\n *push(change: Change): Stream<'yield'> {\n if (change.type === 'edit') {\n yield* this.#pushEditChange(change);\n return;\n }\n\n const {takeState, takeStateKey, maxBound, constraint} =\n this.#getStateAndConstraint(change.node.row);\n if (!takeState) {\n return;\n }\n\n const {compareRows} = this.getSchema();\n\n if (change.type === 'add') {\n if (takeState.size < this.#limit) {\n this.#setTakeState(\n takeStateKey,\n takeState.size + 1,\n takeState.bound === undefined ||\n compareRows(takeState.bound, change.node.row) < 0\n ? change.node.row\n : takeState.bound,\n maxBound,\n );\n yield* this.#output.push(change, this);\n return;\n }\n // size === limit\n if (\n takeState.bound === undefined ||\n compareRows(change.node.row, takeState.bound) >= 0\n ) {\n return;\n }\n // added row < bound\n let beforeBoundNode: Node | undefined;\n let boundNode: Node | undefined;\n if (this.#limit === 1) {\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n })) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n boundNode = node;\n break;\n }\n } else {\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n reverse: true,\n })) {\n if (node === 'yield') {\n yield node;\n continue;\n } else if (boundNode === undefined) {\n boundNode = node;\n } else {\n beforeBoundNode = node;\n break;\n }\n }\n }\n assert(\n boundNode !== undefined,\n 'Take: boundNode must be found during fetch',\n );\n const removeChange: RemoveChange = {\n type: 'remove',\n node: boundNode,\n };\n // Remove before add to maintain invariant that\n // output size <= limit.\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n beforeBoundNode === undefined ||\n compareRows(change.node.row, beforeBoundNode.row) > 0\n ? change.node.row\n : beforeBoundNode.row,\n maxBound,\n );\n yield* this.#pushWithRowHiddenFromFetch(change.node.row, removeChange);\n yield* this.#output.push(change, this);\n } else if (change.type === 'remove') {\n if (takeState.bound === undefined) {\n // change is after bound\n return;\n }\n const compToBound = compareRows(change.node.row, takeState.bound);\n if (compToBound > 0) {\n // change is after bound\n return;\n }\n let beforeBoundNode: Node | undefined;\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'after',\n },\n constraint,\n reverse: true,\n })) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n beforeBoundNode = node;\n break;\n }\n\n let newBound: {node: Node; push: boolean} | undefined;\n if (beforeBoundNode) {\n const push = compareRows(beforeBoundNode.row, takeState.bound) > 0;\n newBound = {\n node: beforeBoundNode,\n push,\n };\n }\n if (!newBound?.push) {\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n })) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n const push = compareRows(node.row, takeState.bound) > 0;\n newBound = {\n node,\n push,\n };\n if (push) {\n break;\n }\n }\n }\n\n if (newBound?.push) {\n yield* this.#output.push(change, this);\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n newBound.node.row,\n maxBound,\n );\n yield* this.#output.push(\n {\n type: 'add',\n node: newBound.node,\n },\n this,\n );\n return;\n }\n this.#setTakeState(\n takeStateKey,\n takeState.size - 1,\n newBound?.node.row,\n maxBound,\n );\n yield* this.#output.push(change, this);\n } else if (change.type === 'child') {\n // A 'child' change should be pushed to output if its row\n // is <= bound.\n if (\n takeState.bound &&\n compareRows(change.node.row, takeState.bound) <= 0\n ) {\n yield* this.#output.push(change, this);\n }\n }\n }\n\n *#pushEditChange(change: EditChange): Stream<'yield'> {\n assert(\n !this.#partitionKeyComparator ||\n this.#partitionKeyComparator(change.oldNode.row, change.node.row) === 0,\n 'Unexpected change of partition key',\n );\n\n const {takeState, takeStateKey, maxBound, constraint} =\n this.#getStateAndConstraint(change.oldNode.row);\n if (!takeState) {\n return;\n }\n\n assert(takeState.bound, 'Bound should be set');\n const {compareRows} = this.getSchema();\n const oldCmp = compareRows(change.oldNode.row, takeState.bound);\n const newCmp = compareRows(change.node.row, takeState.bound);\n\n const that = this;\n const replaceBoundAndForwardChange = function* () {\n that.#setTakeState(\n takeStateKey,\n takeState.size,\n change.node.row,\n maxBound,\n );\n yield* that.#output.push(change, that);\n };\n\n // The bounds row was changed.\n if (oldCmp === 0) {\n // The new row is the new bound.\n if (newCmp === 0) {\n // no need to update the state since we are keeping the bounds\n yield* this.#output.push(change, this);\n return;\n }\n\n if (newCmp < 0) {\n if (this.#limit === 1) {\n yield* replaceBoundAndForwardChange();\n return;\n }\n\n // New row will be in the result but it might not be the bounds any\n // more. We need to find the row before the bounds to determine the new\n // bounds.\n\n let beforeBoundNode: Node | undefined;\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'after',\n },\n constraint,\n reverse: true,\n })) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n beforeBoundNode = node;\n break;\n }\n assert(\n beforeBoundNode !== undefined,\n 'Take: beforeBoundNode must be found during fetch',\n );\n\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n beforeBoundNode.row,\n maxBound,\n );\n yield* this.#output.push(change, this);\n return;\n }\n\n assert(newCmp > 0, 'New comparison must be greater than 0');\n // Find the first item at the old bounds. This will be the new bounds.\n let newBoundNode: Node | undefined;\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n })) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n newBoundNode = node;\n break;\n }\n assert(\n newBoundNode !== undefined,\n 'Take: newBoundNode must be found during fetch',\n );\n\n // The next row is the new row. We can replace the bounds and keep the\n // edit change.\n if (compareRows(newBoundNode.row, change.node.row) === 0) {\n yield* replaceBoundAndForwardChange();\n return;\n }\n\n // The new row is now outside the bounds, so we need to remove the old\n // row and add the new bounds row.\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n newBoundNode.row,\n maxBound,\n );\n yield* this.#pushWithRowHiddenFromFetch(newBoundNode.row, {\n type: 'remove',\n node: change.oldNode,\n });\n yield* this.#output.push(\n {\n type: 'add',\n node: newBoundNode,\n },\n this,\n );\n return;\n }\n\n if (oldCmp > 0) {\n assert(newCmp !== 0, 'Invalid state. Row has duplicate primary key');\n\n // Both old and new outside of bounds\n if (newCmp > 0) {\n return;\n }\n\n // old was outside, new is inside. Pushing out the old bounds\n assert(newCmp < 0, 'New comparison must be less than 0');\n\n let oldBoundNode: Node | undefined;\n let newBoundNode: Node | undefined;\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'at',\n },\n constraint,\n reverse: true,\n })) {\n if (node === 'yield') {\n yield node;\n continue;\n } else if (oldBoundNode === undefined) {\n oldBoundNode = node;\n } else {\n newBoundNode = node;\n break;\n }\n }\n assert(\n oldBoundNode !== undefined,\n 'Take: oldBoundNode must be found during fetch',\n );\n assert(\n newBoundNode !== undefined,\n 'Take: newBoundNode must be found during fetch',\n );\n\n // Remove before add to maintain invariant that\n // output size <= limit.\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n newBoundNode.row,\n maxBound,\n );\n yield* this.#pushWithRowHiddenFromFetch(change.node.row, {\n type: 'remove',\n node: oldBoundNode,\n });\n yield* this.#output.push(\n {\n type: 'add',\n node: change.node,\n },\n this,\n );\n\n return;\n }\n\n if (oldCmp < 0) {\n assert(newCmp !== 0, 'Invalid state. Row has duplicate primary key');\n\n // Both old and new inside of bounds\n if (newCmp < 0) {\n yield* this.#output.push(change, this);\n return;\n }\n\n // old was inside, new is larger than old bound\n\n assert(newCmp > 0, 'New comparison must be greater than 0');\n\n // at this point we need to find the row after the bound and use that or\n // the newRow as the new bound.\n let afterBoundNode: Node | undefined;\n for (const node of this.#input.fetch({\n start: {\n row: takeState.bound,\n basis: 'after',\n },\n constraint,\n })) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n afterBoundNode = node;\n break;\n }\n assert(\n afterBoundNode !== undefined,\n 'Take: afterBoundNode must be found during fetch',\n );\n\n // The new row is the new bound. Use an edit change.\n if (compareRows(afterBoundNode.row, change.node.row) === 0) {\n yield* replaceBoundAndForwardChange();\n return;\n }\n\n yield* this.#output.push(\n {\n type: 'remove',\n node: change.oldNode,\n },\n this,\n );\n this.#setTakeState(\n takeStateKey,\n takeState.size,\n afterBoundNode.row,\n maxBound,\n );\n yield* this.#output.push(\n {\n type: 'add',\n node: afterBoundNode,\n },\n this,\n );\n return;\n }\n\n unreachable();\n }\n\n *#pushWithRowHiddenFromFetch(row: Row, change: Change) {\n this.#rowHiddenFromFetch = row;\n try {\n yield* this.#output.push(change, this);\n } finally {\n this.#rowHiddenFromFetch = undefined;\n }\n }\n\n #setTakeState(\n takeStateKey: string,\n size: number,\n bound: Row | undefined,\n maxBound: Row | undefined,\n ) {\n this.#storage.set(takeStateKey, {\n size,\n bound,\n });\n if (\n bound !== undefined &&\n (maxBound === undefined ||\n this.getSchema().compareRows(bound, maxBound) > 0)\n ) {\n this.#storage.set(MAX_BOUND_KEY, bound);\n }\n }\n\n destroy(): void {\n this.#input.destroy();\n }\n}\n\nfunction getTakeStateKey(\n partitionKey: PartitionKey | undefined,\n rowOrConstraint: Row | Constraint | undefined,\n): string {\n // The order must be consistent. We always use the order as defined by the\n // partition key.\n const partitionValues: Value[] = [];\n\n if (partitionKey && rowOrConstraint) {\n for (const key of partitionKey) {\n partitionValues.push(rowOrConstraint[key]);\n }\n }\n\n return JSON.stringify(['take', ...partitionValues]);\n}\n\nexport function constraintMatchesPartitionKey(\n constraint: Constraint | undefined,\n partitionKey: PartitionKey | undefined,\n): boolean {\n if (constraint === undefined || partitionKey === undefined) {\n return constraint === partitionKey;\n }\n if (partitionKey.length !== Object.keys(constraint).length) {\n return false;\n }\n for (const key of partitionKey) {\n if (!hasOwn(constraint, key)) {\n return false;\n }\n }\n return true;\n}\n\nexport function makePartitionKeyComparator(\n partitionKey: PartitionKey,\n): Comparator {\n return (a, b) => {\n for (const key of partitionKey) {\n const cmp = compareValues(a[key], b[key]);\n if (cmp !== 0) {\n return cmp;\n }\n }\n return 0;\n };\n}\n"],"mappings":";;;;;;AAmBA,IAAM,gBAAgB;;;;;;;;;;;;AA4BtB,IAAa,OAAb,MAAsC;CACpC;CACA;CACA;CACA;CACA;CAEA;CAEA,UAAkB;CAElB,YACE,OACA,SACA,OACA,cACA;AACA,SAAO,SAAS,GAAG,6BAA6B;EAChD,MAAM,EAAC,SAAQ,MAAM,WAAW;AAChC,SAAO,SAAS,KAAA,GAAW,6BAA6B;AACxD,2BAAyB,MAAM,MAAM,WAAW,CAAC,WAAW;AAC5D,QAAM,UAAU,KAAK;AACrB,QAAA,QAAc;AACd,QAAA,UAAgB;AAChB,QAAA,QAAc;AACd,QAAA,eAAqB;AACrB,QAAA,yBACE,gBAAgB,2BAA2B,aAAa;;CAG5D,UAAU,QAAsB;AAC9B,QAAA,SAAe;;CAGjB,YAA0B;AACxB,SAAO,MAAA,MAAY,WAAW;;CAGhC,CAAC,MAAM,KAA2C;AAChD,MACE,CAAC,MAAA,gBACA,IAAI,cACH,8BAA8B,IAAI,YAAY,MAAA,aAAmB,EACnE;GACA,MAAM,eAAe,gBAAgB,MAAA,cAAoB,IAAI,WAAW;GACxE,MAAM,YAAY,MAAA,QAAc,IAAI,aAAa;AACjD,OAAI,CAAC,WAAW;AACd,WAAO,MAAA,aAAmB,IAAI;AAC9B;;AAEF,OAAI,UAAU,UAAU,KAAA,EACtB;AAEF,QAAK,MAAM,aAAa,MAAA,MAAY,MAAM,IAAI,EAAE;AAC9C,QAAI,cAAc,SAAS;AACzB,WAAM;AACN;;AAEF,QAAI,KAAK,WAAW,CAAC,YAAY,UAAU,OAAO,UAAU,IAAI,GAAG,EACjE;AAEF,QACE,MAAA,sBACA,KAAK,WAAW,CAAC,YACf,MAAA,oBACA,UAAU,IACX,KAAK,EAEN;AAEF,UAAM;;AAER;;EAQF,MAAM,WAAW,MAAA,QAAc,IAAI,cAAc;AACjD,MAAI,aAAa,KAAA,EACf;AAEF,OAAK,MAAM,aAAa,MAAA,MAAY,MAAM,IAAI,EAAE;AAC9C,OAAI,cAAc,SAAS;AACzB,UAAM;AACN;;AAEF,OAAI,KAAK,WAAW,CAAC,YAAY,UAAU,KAAK,SAAS,GAAG,EAC1D;GAEF,MAAM,eAAe,gBAAgB,MAAA,cAAoB,UAAU,IAAI;GACvE,MAAM,YAAY,MAAA,QAAc,IAAI,aAAa;AACjD,OACE,WAAW,UAAU,KAAA,KACrB,KAAK,WAAW,CAAC,YAAY,UAAU,OAAO,UAAU,IAAI,IAAI,EAEhE,OAAM;;;CAKZ,EAAA,aAAe,KAA2C;AACxD,SAAO,IAAI,UAAU,KAAA,GAAW,4BAA4B;AAC5D,SAAO,CAAC,IAAI,SAAS,0BAA0B;AAC/C,SACE,8BAA8B,IAAI,YAAY,MAAA,aAAmB,EACjE,wCACD;AAED,MAAI,MAAA,UAAgB,EAClB;EAGF,MAAM,eAAe,gBAAgB,MAAA,cAAoB,IAAI,WAAW;AACxE,SACE,MAAA,QAAc,IAAI,aAAa,KAAK,KAAA,GACpC,iCACD;EAED,IAAI,OAAO;EACX,IAAI;EACJ,IAAI,wBAAwB;EAC5B,IAAI,kBAAkB;AACtB,MAAI;AACF,QAAK,MAAM,aAAa,MAAA,MAAY,MAAM,IAAI,EAAE;AAC9C,QAAI,cAAc,SAAS;AACzB,WAAM;AACN;;AAEF,UAAM;AACN,YAAQ,UAAU;AAClB;AACA,QAAI,SAAS,MAAA,MACX;;AAGJ,2BAAwB;WACjB,GAAG;AACV,qBAAkB;AAClB,SAAM;YACE;AACR,OAAI,CAAC,iBAAiB;AACpB,UAAA,aACE,cACA,MACA,OACA,MAAA,QAAc,IAAI,cAAc,CACjC;AAKD,WACE,CAAC,uBACD,mDACD;;;;CAKP,uBAAuB,KAAU;EAC/B,MAAM,eAAe,gBAAgB,MAAA,cAAoB,IAAI;EAC7D,MAAM,YAAY,MAAA,QAAc,IAAI,aAAa;EACjD,IAAI;EACJ,IAAI;AACJ,MAAI,WAAW;AACb,cAAW,MAAA,QAAc,IAAI,cAAc;AAC3C,gBACE,MAAA,gBACA,OAAO,YACL,MAAA,aAAmB,KAAI,QAAO,CAAC,KAAK,IAAI,KAAK,CAAU,CACxD;;AAGL,SAAO;GAAC;GAAW;GAAc;GAAU;GAAW;;CAexD,CAAC,KAAK,QAAiC;AACrC,MAAI,OAAO,SAAS,QAAQ;AAC1B,UAAO,MAAA,eAAqB,OAAO;AACnC;;EAGF,MAAM,EAAC,WAAW,cAAc,UAAU,eACxC,MAAA,sBAA4B,OAAO,KAAK,IAAI;AAC9C,MAAI,CAAC,UACH;EAGF,MAAM,EAAC,gBAAe,KAAK,WAAW;AAEtC,MAAI,OAAO,SAAS,OAAO;AACzB,OAAI,UAAU,OAAO,MAAA,OAAa;AAChC,UAAA,aACE,cACA,UAAU,OAAO,GACjB,UAAU,UAAU,KAAA,KAClB,YAAY,UAAU,OAAO,OAAO,KAAK,IAAI,GAAG,IAC9C,OAAO,KAAK,MACZ,UAAU,OACd,SACD;AACD,WAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;AACtC;;AAGF,OACE,UAAU,UAAU,KAAA,KACpB,YAAY,OAAO,KAAK,KAAK,UAAU,MAAM,IAAI,EAEjD;GAGF,IAAI;GACJ,IAAI;AACJ,OAAI,MAAA,UAAgB,EAClB,MAAK,MAAM,QAAQ,MAAA,MAAY,MAAM;IACnC,OAAO;KACL,KAAK,UAAU;KACf,OAAO;KACR;IACD;IACD,CAAC,EAAE;AACF,QAAI,SAAS,SAAS;AACpB,WAAM;AACN;;AAEF,gBAAY;AACZ;;OAGF,MAAK,MAAM,QAAQ,MAAA,MAAY,MAAM;IACnC,OAAO;KACL,KAAK,UAAU;KACf,OAAO;KACR;IACD;IACA,SAAS;IACV,CAAC,CACA,KAAI,SAAS,SAAS;AACpB,UAAM;AACN;cACS,cAAc,KAAA,EACvB,aAAY;QACP;AACL,sBAAkB;AAClB;;AAIN,UACE,cAAc,KAAA,GACd,6CACD;GACD,MAAM,eAA6B;IACjC,MAAM;IACN,MAAM;IACP;AAGD,SAAA,aACE,cACA,UAAU,MACV,oBAAoB,KAAA,KAClB,YAAY,OAAO,KAAK,KAAK,gBAAgB,IAAI,GAAG,IAClD,OAAO,KAAK,MACZ,gBAAgB,KACpB,SACD;AACD,UAAO,MAAA,2BAAiC,OAAO,KAAK,KAAK,aAAa;AACtE,UAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;aAC7B,OAAO,SAAS,UAAU;AACnC,OAAI,UAAU,UAAU,KAAA,EAEtB;AAGF,OADoB,YAAY,OAAO,KAAK,KAAK,UAAU,MAAM,GAC/C,EAEhB;GAEF,IAAI;AACJ,QAAK,MAAM,QAAQ,MAAA,MAAY,MAAM;IACnC,OAAO;KACL,KAAK,UAAU;KACf,OAAO;KACR;IACD;IACA,SAAS;IACV,CAAC,EAAE;AACF,QAAI,SAAS,SAAS;AACpB,WAAM;AACN;;AAEF,sBAAkB;AAClB;;GAGF,IAAI;AACJ,OAAI,iBAAiB;IACnB,MAAM,OAAO,YAAY,gBAAgB,KAAK,UAAU,MAAM,GAAG;AACjE,eAAW;KACT,MAAM;KACN;KACD;;AAEH,OAAI,CAAC,UAAU,KACb,MAAK,MAAM,QAAQ,MAAA,MAAY,MAAM;IACnC,OAAO;KACL,KAAK,UAAU;KACf,OAAO;KACR;IACD;IACD,CAAC,EAAE;AACF,QAAI,SAAS,SAAS;AACpB,WAAM;AACN;;IAEF,MAAM,OAAO,YAAY,KAAK,KAAK,UAAU,MAAM,GAAG;AACtD,eAAW;KACT;KACA;KACD;AACD,QAAI,KACF;;AAKN,OAAI,UAAU,MAAM;AAClB,WAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;AACtC,UAAA,aACE,cACA,UAAU,MACV,SAAS,KAAK,KACd,SACD;AACD,WAAO,MAAA,OAAa,KAClB;KACE,MAAM;KACN,MAAM,SAAS;KAChB,EACD,KACD;AACD;;AAEF,SAAA,aACE,cACA,UAAU,OAAO,GACjB,UAAU,KAAK,KACf,SACD;AACD,UAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;aAC7B,OAAO,SAAS;OAIvB,UAAU,SACV,YAAY,OAAO,KAAK,KAAK,UAAU,MAAM,IAAI,EAEjD,QAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;;;CAK5C,EAAA,eAAiB,QAAqC;AACpD,SACE,CAAC,MAAA,0BACC,MAAA,uBAA6B,OAAO,QAAQ,KAAK,OAAO,KAAK,IAAI,KAAK,GACxE,qCACD;EAED,MAAM,EAAC,WAAW,cAAc,UAAU,eACxC,MAAA,sBAA4B,OAAO,QAAQ,IAAI;AACjD,MAAI,CAAC,UACH;AAGF,SAAO,UAAU,OAAO,sBAAsB;EAC9C,MAAM,EAAC,gBAAe,KAAK,WAAW;EACtC,MAAM,SAAS,YAAY,OAAO,QAAQ,KAAK,UAAU,MAAM;EAC/D,MAAM,SAAS,YAAY,OAAO,KAAK,KAAK,UAAU,MAAM;EAE5D,MAAM,OAAO;EACb,MAAM,+BAA+B,aAAa;AAChD,SAAA,aACE,cACA,UAAU,MACV,OAAO,KAAK,KACZ,SACD;AACD,UAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;;AAIxC,MAAI,WAAW,GAAG;AAEhB,OAAI,WAAW,GAAG;AAEhB,WAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;AACtC;;AAGF,OAAI,SAAS,GAAG;AACd,QAAI,MAAA,UAAgB,GAAG;AACrB,YAAO,8BAA8B;AACrC;;IAOF,IAAI;AACJ,SAAK,MAAM,QAAQ,MAAA,MAAY,MAAM;KACnC,OAAO;MACL,KAAK,UAAU;MACf,OAAO;MACR;KACD;KACA,SAAS;KACV,CAAC,EAAE;AACF,SAAI,SAAS,SAAS;AACpB,YAAM;AACN;;AAEF,uBAAkB;AAClB;;AAEF,WACE,oBAAoB,KAAA,GACpB,mDACD;AAED,UAAA,aACE,cACA,UAAU,MACV,gBAAgB,KAChB,SACD;AACD,WAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;AACtC;;AAGF,UAAO,SAAS,GAAG,wCAAwC;GAE3D,IAAI;AACJ,QAAK,MAAM,QAAQ,MAAA,MAAY,MAAM;IACnC,OAAO;KACL,KAAK,UAAU;KACf,OAAO;KACR;IACD;IACD,CAAC,EAAE;AACF,QAAI,SAAS,SAAS;AACpB,WAAM;AACN;;AAEF,mBAAe;AACf;;AAEF,UACE,iBAAiB,KAAA,GACjB,gDACD;AAID,OAAI,YAAY,aAAa,KAAK,OAAO,KAAK,IAAI,KAAK,GAAG;AACxD,WAAO,8BAA8B;AACrC;;AAKF,SAAA,aACE,cACA,UAAU,MACV,aAAa,KACb,SACD;AACD,UAAO,MAAA,2BAAiC,aAAa,KAAK;IACxD,MAAM;IACN,MAAM,OAAO;IACd,CAAC;AACF,UAAO,MAAA,OAAa,KAClB;IACE,MAAM;IACN,MAAM;IACP,EACD,KACD;AACD;;AAGF,MAAI,SAAS,GAAG;AACd,UAAO,WAAW,GAAG,+CAA+C;AAGpE,OAAI,SAAS,EACX;AAIF,UAAO,SAAS,GAAG,qCAAqC;GAExD,IAAI;GACJ,IAAI;AACJ,QAAK,MAAM,QAAQ,MAAA,MAAY,MAAM;IACnC,OAAO;KACL,KAAK,UAAU;KACf,OAAO;KACR;IACD;IACA,SAAS;IACV,CAAC,CACA,KAAI,SAAS,SAAS;AACpB,UAAM;AACN;cACS,iBAAiB,KAAA,EAC1B,gBAAe;QACV;AACL,mBAAe;AACf;;AAGJ,UACE,iBAAiB,KAAA,GACjB,gDACD;AACD,UACE,iBAAiB,KAAA,GACjB,gDACD;AAID,SAAA,aACE,cACA,UAAU,MACV,aAAa,KACb,SACD;AACD,UAAO,MAAA,2BAAiC,OAAO,KAAK,KAAK;IACvD,MAAM;IACN,MAAM;IACP,CAAC;AACF,UAAO,MAAA,OAAa,KAClB;IACE,MAAM;IACN,MAAM,OAAO;IACd,EACD,KACD;AAED;;AAGF,MAAI,SAAS,GAAG;AACd,UAAO,WAAW,GAAG,+CAA+C;AAGpE,OAAI,SAAS,GAAG;AACd,WAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;AACtC;;AAKF,UAAO,SAAS,GAAG,wCAAwC;GAI3D,IAAI;AACJ,QAAK,MAAM,QAAQ,MAAA,MAAY,MAAM;IACnC,OAAO;KACL,KAAK,UAAU;KACf,OAAO;KACR;IACD;IACD,CAAC,EAAE;AACF,QAAI,SAAS,SAAS;AACpB,WAAM;AACN;;AAEF,qBAAiB;AACjB;;AAEF,UACE,mBAAmB,KAAA,GACnB,kDACD;AAGD,OAAI,YAAY,eAAe,KAAK,OAAO,KAAK,IAAI,KAAK,GAAG;AAC1D,WAAO,8BAA8B;AACrC;;AAGF,UAAO,MAAA,OAAa,KAClB;IACE,MAAM;IACN,MAAM,OAAO;IACd,EACD,KACD;AACD,SAAA,aACE,cACA,UAAU,MACV,eAAe,KACf,SACD;AACD,UAAO,MAAA,OAAa,KAClB;IACE,MAAM;IACN,MAAM;IACP,EACD,KACD;AACD;;AAGF,eAAa;;CAGf,EAAA,2BAA6B,KAAU,QAAgB;AACrD,QAAA,qBAA2B;AAC3B,MAAI;AACF,UAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;YAC9B;AACR,SAAA,qBAA2B,KAAA;;;CAI/B,cACE,cACA,MACA,OACA,UACA;AACA,QAAA,QAAc,IAAI,cAAc;GAC9B;GACA;GACD,CAAC;AACF,MACE,UAAU,KAAA,MACT,aAAa,KAAA,KACZ,KAAK,WAAW,CAAC,YAAY,OAAO,SAAS,GAAG,GAElD,OAAA,QAAc,IAAI,eAAe,MAAM;;CAI3C,UAAgB;AACd,QAAA,MAAY,SAAS;;;AAIzB,SAAS,gBACP,cACA,iBACQ;CAGR,MAAM,kBAA2B,EAAE;AAEnC,KAAI,gBAAgB,gBAClB,MAAK,MAAM,OAAO,aAChB,iBAAgB,KAAK,gBAAgB,KAAK;AAI9C,QAAO,KAAK,UAAU,CAAC,QAAQ,GAAG,gBAAgB,CAAC;;AAGrD,SAAgB,8BACd,YACA,cACS;AACT,KAAI,eAAe,KAAA,KAAa,iBAAiB,KAAA,EAC/C,QAAO,eAAe;AAExB,KAAI,aAAa,WAAW,OAAO,KAAK,WAAW,CAAC,OAClD,QAAO;AAET,MAAK,MAAM,OAAO,aAChB,KAAI,CAAC,OAAO,YAAY,IAAI,CAC1B,QAAO;AAGX,QAAO;;AAGT,SAAgB,2BACd,cACY;AACZ,SAAQ,GAAG,MAAM;AACf,OAAK,MAAM,OAAO,cAAc;GAC9B,MAAM,MAAM,cAAc,EAAE,MAAM,EAAE,KAAK;AACzC,OAAI,QAAQ,EACV,QAAO;;AAGX,SAAO"}
@@ -1 +1 @@
1
- {"version":3,"file":"union-fan-in.d.ts","sourceRoot":"","sources":["../../../../../zql/src/ivm/union-fan-in.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,aAAa,CAAC;AAExC,OAAO,KAAK,EAAC,IAAI,EAAC,MAAM,WAAW,CAAC;AACpC,OAAO,EAEL,KAAK,YAAY,EACjB,KAAK,KAAK,EACV,KAAK,SAAS,EACd,KAAK,QAAQ,EACb,KAAK,MAAM,EACZ,MAAM,eAAe,CAAC;AAMvB,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,aAAa,CAAC;AAC9C,OAAO,EAAQ,KAAK,MAAM,EAAC,MAAM,aAAa,CAAC;AAC/C,OAAO,KAAK,EAAC,WAAW,EAAC,MAAM,oBAAoB,CAAC;AAEpD,qBAAa,UAAW,YAAW,QAAQ;;gBAO7B,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,KAAK,EAAE;IAgEhD,OAAO,IAAI,IAAI;IAMf,KAAK,CAAC,GAAG,EAAE,YAAY,GAAG,MAAM,CAAC,IAAI,GAAG,OAAO,CAAC;IAOhD,SAAS,IAAI,YAAY;IAIxB,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,GAAG,MAAM,CAAC,OAAO,CAAC;IAoEzD,oBAAoB;IAQnB,iBAAiB,CAAC,gBAAgB,EAAE,MAAM,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,OAAO,CAAC;IA0BrE,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI;CAGhC;AAED,wBAAiB,YAAY,CAC3B,OAAO,EAAE,QAAQ,CAAC,IAAI,GAAG,OAAO,CAAC,EAAE,EACnC,UAAU,EAAE,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,IAAI,KAAK,MAAM,GACvC,gBAAgB,CAAC,IAAI,GAAG,OAAO,CAAC,CAuElC"}
1
+ {"version":3,"file":"union-fan-in.d.ts","sourceRoot":"","sources":["../../../../../zql/src/ivm/union-fan-in.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,aAAa,CAAC;AAExC,OAAO,KAAK,EAAC,IAAI,EAAC,MAAM,WAAW,CAAC;AACpC,OAAO,EAEL,KAAK,YAAY,EACjB,KAAK,KAAK,EACV,KAAK,SAAS,EACd,KAAK,QAAQ,EACb,KAAK,MAAM,EACZ,MAAM,eAAe,CAAC;AAMvB,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,aAAa,CAAC;AAC9C,OAAO,EAAQ,KAAK,MAAM,EAAC,MAAM,aAAa,CAAC;AAC/C,OAAO,KAAK,EAAC,WAAW,EAAC,MAAM,oBAAoB,CAAC;AAEpD,qBAAa,UAAW,YAAW,QAAQ;;gBAO7B,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,KAAK,EAAE;IAiEhD,OAAO,IAAI,IAAI;IAMf,KAAK,CAAC,GAAG,EAAE,YAAY,GAAG,MAAM,CAAC,IAAI,GAAG,OAAO,CAAC;IAOhD,SAAS,IAAI,YAAY;IAIxB,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,GAAG,MAAM,CAAC,OAAO,CAAC;IAoEzD,oBAAoB;IAQnB,iBAAiB,CAAC,gBAAgB,EAAE,MAAM,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,OAAO,CAAC;IA0BrE,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI;CAGhC;AAED,wBAAiB,YAAY,CAC3B,OAAO,EAAE,QAAQ,CAAC,IAAI,GAAG,OAAO,CAAC,EAAE,EACnC,UAAU,EAAE,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,IAAI,KAAK,MAAM,GACvC,gBAAgB,CAAC,IAAI,GAAG,OAAO,CAAC,CAuElC"}
@@ -13,6 +13,7 @@ var UnionFanIn = class {
13
13
  this.#inputs = inputs;
14
14
  const fanOutSchema = fanOut.getSchema();
15
15
  fanOut.setFanIn(this);
16
+ assert(fanOutSchema.sort !== void 0, "UnionFanIn requires sorted input");
16
17
  const schema = {
17
18
  tableName: fanOutSchema.tableName,
18
19
  columns: fanOutSchema.columns,
@@ -1 +1 @@
1
- {"version":3,"file":"union-fan-in.js","names":["#inputs","#schema","#fanOutPushStarted","#pushInternalChange","#accumulatedPushes","#output"],"sources":["../../../../../zql/src/ivm/union-fan-in.ts"],"sourcesContent":["import {assert} from '../../../shared/src/asserts.ts';\nimport type {Writable} from '../../../shared/src/writable.ts';\nimport type {Change} from './change.ts';\nimport type {Constraint} from './constraint.ts';\nimport type {Node} from './data.ts';\nimport {\n throwOutput,\n type FetchRequest,\n type Input,\n type InputBase,\n type Operator,\n type Output,\n} from './operator.ts';\nimport {\n makeAddEmptyRelationships,\n mergeRelationships,\n pushAccumulatedChanges,\n} from './push-accumulated.ts';\nimport type {SourceSchema} from './schema.ts';\nimport {first, type Stream} from './stream.ts';\nimport type {UnionFanOut} from './union-fan-out.ts';\n\nexport class UnionFanIn implements Operator {\n readonly #inputs: readonly Input[];\n readonly #schema: SourceSchema;\n #fanOutPushStarted: boolean = false;\n #output: Output = throwOutput;\n #accumulatedPushes: Change[] = [];\n\n constructor(fanOut: UnionFanOut, inputs: Input[]) {\n this.#inputs = inputs;\n const fanOutSchema = fanOut.getSchema();\n fanOut.setFanIn(this);\n\n const schema: Writable<SourceSchema> = {\n tableName: fanOutSchema.tableName,\n columns: fanOutSchema.columns,\n primaryKey: fanOutSchema.primaryKey,\n relationships: {\n ...fanOutSchema.relationships,\n },\n isHidden: fanOutSchema.isHidden,\n system: fanOutSchema.system,\n compareRows: fanOutSchema.compareRows,\n sort: fanOutSchema.sort,\n };\n\n // now go through inputs and merge relationships\n const relationshipsFromBranches: Set<string> = new Set();\n for (const input of inputs) {\n const inputSchema = input.getSchema();\n assert(\n schema.tableName === inputSchema.tableName,\n `Table name mismatch in union fan-in: ${schema.tableName} !== ${inputSchema.tableName}`,\n );\n assert(\n schema.primaryKey === inputSchema.primaryKey,\n `Primary key mismatch in union fan-in`,\n );\n assert(\n schema.system === inputSchema.system,\n `System mismatch in union fan-in: ${schema.system} !== ${inputSchema.system}`,\n );\n assert(\n schema.compareRows === inputSchema.compareRows,\n `compareRows mismatch in union fan-in`,\n );\n assert(schema.sort === inputSchema.sort, `Sort mismatch in union fan-in`);\n\n for (const [relName, relSchema] of Object.entries(\n inputSchema.relationships,\n )) {\n if (relName in fanOutSchema.relationships) {\n continue;\n }\n\n // All branches will have unique relationship names except for relationships\n // that come in from `fanOut`.\n assert(\n !relationshipsFromBranches.has(relName),\n `Relationship ${relName} exists in multiple upstream inputs to union fan-in`,\n );\n schema.relationships[relName] = relSchema;\n relationshipsFromBranches.add(relName);\n }\n\n input.setOutput(this);\n }\n\n this.#schema = schema;\n this.#inputs = inputs;\n }\n\n destroy(): void {\n for (const input of this.#inputs) {\n input.destroy();\n }\n }\n\n fetch(req: FetchRequest): Stream<Node | 'yield'> {\n const iterables = this.#inputs.map(input => input.fetch(req));\n return mergeFetches(iterables, (l, r) =>\n this.#schema.compareRows(l.row, r.row),\n );\n }\n\n getSchema(): SourceSchema {\n return this.#schema;\n }\n\n *push(change: Change, pusher: InputBase): Stream<'yield'> {\n if (!this.#fanOutPushStarted) {\n yield* this.#pushInternalChange(change, pusher);\n } else {\n this.#accumulatedPushes.push(change);\n }\n }\n\n /**\n * An internal change means that a change was received inside the fan-out/fan-in sub-graph.\n *\n * These changes always come from children of a flip-join as no other push generating operators\n * currently exist between union-fan-in and union-fan-out. All other pushes\n * enter into union-fan-out before reaching union-fan-in.\n *\n * - normal joins for `exists` come before `union-fan-out`\n * - joins for `related` come after `union-fan-out`\n * - take comes after `union-fan-out`\n *\n * The algorithm for deciding whether or not to forward a push that came from inside the ufo/ufi sub-graph:\n * 1. If the change is a `child` change we can forward it. This is because all child branches in the ufo/ufi sub-graph are unique.\n * 2. If the change is `add` we can forward it iff no `fetches` for the row return any results.\n * If another branch has it, the add was already emitted in the past.\n * 3. If the change is `remove` we can forward it iff no `fetches` for the row return any results.\n * If no other branches have the change, the remove can be sent as the value is no longer present.\n * If other branches have it, the last branch the processes the remove will send the remove.\n * 4. Edits will always come through as child changes as flip join will flip them into children.\n * An edit that would result in a remove or add will have been split into an add/remove pair rather than being an edit.\n */\n *#pushInternalChange(change: Change, pusher: InputBase): Stream<'yield'> {\n if (change.type === 'child') {\n yield* this.#output.push(change, this);\n return;\n }\n\n assert(\n change.type === 'add' || change.type === 'remove',\n () =>\n `UnionFanIn: expected add or remove change type, got ${change.type}`,\n );\n\n let hadMatch = false;\n for (const input of this.#inputs) {\n if (input === pusher) {\n hadMatch = true;\n continue;\n }\n\n const constraint: Writable<Constraint> = {};\n for (const key of this.#schema.primaryKey) {\n constraint[key] = change.node.row[key];\n }\n const fetchResult = input.fetch({\n constraint,\n });\n\n if (first(fetchResult) !== undefined) {\n // Another branch has the row, so the add/remove is not needed.\n return;\n }\n }\n\n assert(hadMatch, 'Pusher was not one of the inputs to union-fan-in!');\n\n // No other branches have the row, so we can push the change.\n yield* this.#output.push(change, this);\n }\n\n fanOutStartedPushing() {\n assert(\n this.#fanOutPushStarted === false,\n 'UnionFanIn: fanOutStartedPushing called while already pushing',\n );\n this.#fanOutPushStarted = true;\n }\n\n *fanOutDonePushing(fanOutChangeType: Change['type']): Stream<'yield'> {\n assert(\n this.#fanOutPushStarted,\n 'UnionFanIn: fanOutDonePushing called without fanOutStartedPushing',\n );\n this.#fanOutPushStarted = false;\n if (this.#inputs.length === 0) {\n return;\n }\n\n if (this.#accumulatedPushes.length === 0) {\n // It is possible for no forks to pass along the push.\n // E.g., if no filters match in any fork.\n return;\n }\n\n yield* pushAccumulatedChanges(\n this.#accumulatedPushes,\n this.#output,\n this,\n fanOutChangeType,\n mergeRelationships,\n makeAddEmptyRelationships(this.#schema),\n );\n }\n\n setOutput(output: Output): void {\n this.#output = output;\n }\n}\n\nexport function* mergeFetches(\n fetches: Iterable<Node | 'yield'>[],\n comparator: (l: Node, r: Node) => number,\n): IterableIterator<Node | 'yield'> {\n const iterators = fetches.map(i => i[Symbol.iterator]());\n let threw = false;\n try {\n const current: (Node | null)[] = [];\n let lastNodeYielded: Node | undefined;\n for (let i = 0; i < iterators.length; i++) {\n const iter = iterators[i];\n let result = iter.next();\n // yield yields when initializing\n while (!result.done && result.value === 'yield') {\n yield result.value;\n result = iter.next();\n }\n current[i] = result.done ? null : (result.value as Node);\n }\n while (current.some(c => c !== null)) {\n const min = current.reduce(\n (acc: [Node, number] | undefined, c, i): [Node, number] | undefined => {\n if (c === null) {\n return acc;\n }\n if (acc === undefined || comparator(c, acc[0]) < 0) {\n return [c, i];\n }\n return acc;\n },\n undefined,\n );\n\n assert(min !== undefined, 'min is undefined');\n const [minNode, minIndex] = min;\n const iter = iterators[minIndex];\n let result = iter.next();\n while (!result.done && result.value === 'yield') {\n yield result.value;\n result = iter.next();\n }\n current[minIndex] = result.done ? null : (result.value as Node);\n if (\n lastNodeYielded !== undefined &&\n comparator(lastNodeYielded, minNode) === 0\n ) {\n continue;\n }\n lastNodeYielded = minNode;\n yield minNode;\n }\n } catch (e) {\n threw = true;\n for (const iter of iterators) {\n try {\n iter.throw?.(e);\n } catch (_cleanupError) {\n // error in the iter.throw cleanup,\n // catch so other iterators are cleaned up\n }\n }\n throw e;\n } finally {\n if (!threw) {\n for (const iter of iterators) {\n try {\n iter.return?.();\n } catch (_cleanupError) {\n // error in the iter.return cleanup,\n // catch so other iterators are cleaned up\n }\n }\n }\n }\n}\n"],"mappings":";;;;;AAsBA,IAAa,aAAb,MAA4C;CAC1C;CACA;CACA,qBAA8B;CAC9B,UAAkB;CAClB,qBAA+B,EAAE;CAEjC,YAAY,QAAqB,QAAiB;AAChD,QAAA,SAAe;EACf,MAAM,eAAe,OAAO,WAAW;AACvC,SAAO,SAAS,KAAK;EAErB,MAAM,SAAiC;GACrC,WAAW,aAAa;GACxB,SAAS,aAAa;GACtB,YAAY,aAAa;GACzB,eAAe,EACb,GAAG,aAAa,eACjB;GACD,UAAU,aAAa;GACvB,QAAQ,aAAa;GACrB,aAAa,aAAa;GAC1B,MAAM,aAAa;GACpB;EAGD,MAAM,4CAAyC,IAAI,KAAK;AACxD,OAAK,MAAM,SAAS,QAAQ;GAC1B,MAAM,cAAc,MAAM,WAAW;AACrC,UACE,OAAO,cAAc,YAAY,WACjC,wCAAwC,OAAO,UAAU,OAAO,YAAY,YAC7E;AACD,UACE,OAAO,eAAe,YAAY,YAClC,uCACD;AACD,UACE,OAAO,WAAW,YAAY,QAC9B,oCAAoC,OAAO,OAAO,OAAO,YAAY,SACtE;AACD,UACE,OAAO,gBAAgB,YAAY,aACnC,uCACD;AACD,UAAO,OAAO,SAAS,YAAY,MAAM,gCAAgC;AAEzE,QAAK,MAAM,CAAC,SAAS,cAAc,OAAO,QACxC,YAAY,cACb,EAAE;AACD,QAAI,WAAW,aAAa,cAC1B;AAKF,WACE,CAAC,0BAA0B,IAAI,QAAQ,EACvC,gBAAgB,QAAQ,qDACzB;AACD,WAAO,cAAc,WAAW;AAChC,8BAA0B,IAAI,QAAQ;;AAGxC,SAAM,UAAU,KAAK;;AAGvB,QAAA,SAAe;AACf,QAAA,SAAe;;CAGjB,UAAgB;AACd,OAAK,MAAM,SAAS,MAAA,OAClB,OAAM,SAAS;;CAInB,MAAM,KAA2C;AAE/C,SAAO,aADW,MAAA,OAAa,KAAI,UAAS,MAAM,MAAM,IAAI,CAAC,GAC7B,GAAG,MACjC,MAAA,OAAa,YAAY,EAAE,KAAK,EAAE,IAAI,CACvC;;CAGH,YAA0B;AACxB,SAAO,MAAA;;CAGT,CAAC,KAAK,QAAgB,QAAoC;AACxD,MAAI,CAAC,MAAA,kBACH,QAAO,MAAA,mBAAyB,QAAQ,OAAO;MAE/C,OAAA,kBAAwB,KAAK,OAAO;;;;;;;;;;;;;;;;;;;;;;;CAyBxC,EAAA,mBAAqB,QAAgB,QAAoC;AACvE,MAAI,OAAO,SAAS,SAAS;AAC3B,UAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;AACtC;;AAGF,SACE,OAAO,SAAS,SAAS,OAAO,SAAS,gBAEvC,uDAAuD,OAAO,OACjE;EAED,IAAI,WAAW;AACf,OAAK,MAAM,SAAS,MAAA,QAAc;AAChC,OAAI,UAAU,QAAQ;AACpB,eAAW;AACX;;GAGF,MAAM,aAAmC,EAAE;AAC3C,QAAK,MAAM,OAAO,MAAA,OAAa,WAC7B,YAAW,OAAO,OAAO,KAAK,IAAI;AAMpC,OAAI,MAJgB,MAAM,MAAM,EAC9B,YACD,CAAC,CAEoB,KAAK,KAAA,EAEzB;;AAIJ,SAAO,UAAU,oDAAoD;AAGrE,SAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;;CAGxC,uBAAuB;AACrB,SACE,MAAA,sBAA4B,OAC5B,gEACD;AACD,QAAA,oBAA0B;;CAG5B,CAAC,kBAAkB,kBAAmD;AACpE,SACE,MAAA,mBACA,oEACD;AACD,QAAA,oBAA0B;AAC1B,MAAI,MAAA,OAAa,WAAW,EAC1B;AAGF,MAAI,MAAA,kBAAwB,WAAW,EAGrC;AAGF,SAAO,uBACL,MAAA,mBACA,MAAA,QACA,MACA,kBACA,oBACA,0BAA0B,MAAA,OAAa,CACxC;;CAGH,UAAU,QAAsB;AAC9B,QAAA,SAAe;;;AAInB,UAAiB,aACf,SACA,YACkC;CAClC,MAAM,YAAY,QAAQ,KAAI,MAAK,EAAE,OAAO,WAAW,CAAC;CACxD,IAAI,QAAQ;AACZ,KAAI;EACF,MAAM,UAA2B,EAAE;EACnC,IAAI;AACJ,OAAK,IAAI,IAAI,GAAG,IAAI,UAAU,QAAQ,KAAK;GACzC,MAAM,OAAO,UAAU;GACvB,IAAI,SAAS,KAAK,MAAM;AAExB,UAAO,CAAC,OAAO,QAAQ,OAAO,UAAU,SAAS;AAC/C,UAAM,OAAO;AACb,aAAS,KAAK,MAAM;;AAEtB,WAAQ,KAAK,OAAO,OAAO,OAAQ,OAAO;;AAE5C,SAAO,QAAQ,MAAK,MAAK,MAAM,KAAK,EAAE;GACpC,MAAM,MAAM,QAAQ,QACjB,KAAiC,GAAG,MAAkC;AACrE,QAAI,MAAM,KACR,QAAO;AAET,QAAI,QAAQ,KAAA,KAAa,WAAW,GAAG,IAAI,GAAG,GAAG,EAC/C,QAAO,CAAC,GAAG,EAAE;AAEf,WAAO;MAET,KAAA,EACD;AAED,UAAO,QAAQ,KAAA,GAAW,mBAAmB;GAC7C,MAAM,CAAC,SAAS,YAAY;GAC5B,MAAM,OAAO,UAAU;GACvB,IAAI,SAAS,KAAK,MAAM;AACxB,UAAO,CAAC,OAAO,QAAQ,OAAO,UAAU,SAAS;AAC/C,UAAM,OAAO;AACb,aAAS,KAAK,MAAM;;AAEtB,WAAQ,YAAY,OAAO,OAAO,OAAQ,OAAO;AACjD,OACE,oBAAoB,KAAA,KACpB,WAAW,iBAAiB,QAAQ,KAAK,EAEzC;AAEF,qBAAkB;AAClB,SAAM;;UAED,GAAG;AACV,UAAQ;AACR,OAAK,MAAM,QAAQ,UACjB,KAAI;AACF,QAAK,QAAQ,EAAE;WACR,eAAe;AAK1B,QAAM;WACE;AACR,MAAI,CAAC,MACH,MAAK,MAAM,QAAQ,UACjB,KAAI;AACF,QAAK,UAAU;WACR,eAAe"}
1
+ {"version":3,"file":"union-fan-in.js","names":["#inputs","#schema","#fanOutPushStarted","#pushInternalChange","#accumulatedPushes","#output"],"sources":["../../../../../zql/src/ivm/union-fan-in.ts"],"sourcesContent":["import {assert} from '../../../shared/src/asserts.ts';\nimport type {Writable} from '../../../shared/src/writable.ts';\nimport type {Change} from './change.ts';\nimport type {Constraint} from './constraint.ts';\nimport type {Node} from './data.ts';\nimport {\n throwOutput,\n type FetchRequest,\n type Input,\n type InputBase,\n type Operator,\n type Output,\n} from './operator.ts';\nimport {\n makeAddEmptyRelationships,\n mergeRelationships,\n pushAccumulatedChanges,\n} from './push-accumulated.ts';\nimport type {SourceSchema} from './schema.ts';\nimport {first, type Stream} from './stream.ts';\nimport type {UnionFanOut} from './union-fan-out.ts';\n\nexport class UnionFanIn implements Operator {\n readonly #inputs: readonly Input[];\n readonly #schema: SourceSchema;\n #fanOutPushStarted: boolean = false;\n #output: Output = throwOutput;\n #accumulatedPushes: Change[] = [];\n\n constructor(fanOut: UnionFanOut, inputs: Input[]) {\n this.#inputs = inputs;\n const fanOutSchema = fanOut.getSchema();\n fanOut.setFanIn(this);\n assert(fanOutSchema.sort !== undefined, 'UnionFanIn requires sorted input');\n\n const schema: Writable<SourceSchema> = {\n tableName: fanOutSchema.tableName,\n columns: fanOutSchema.columns,\n primaryKey: fanOutSchema.primaryKey,\n relationships: {\n ...fanOutSchema.relationships,\n },\n isHidden: fanOutSchema.isHidden,\n system: fanOutSchema.system,\n compareRows: fanOutSchema.compareRows,\n sort: fanOutSchema.sort,\n };\n\n // now go through inputs and merge relationships\n const relationshipsFromBranches: Set<string> = new Set();\n for (const input of inputs) {\n const inputSchema = input.getSchema();\n assert(\n schema.tableName === inputSchema.tableName,\n `Table name mismatch in union fan-in: ${schema.tableName} !== ${inputSchema.tableName}`,\n );\n assert(\n schema.primaryKey === inputSchema.primaryKey,\n `Primary key mismatch in union fan-in`,\n );\n assert(\n schema.system === inputSchema.system,\n `System mismatch in union fan-in: ${schema.system} !== ${inputSchema.system}`,\n );\n assert(\n schema.compareRows === inputSchema.compareRows,\n `compareRows mismatch in union fan-in`,\n );\n assert(schema.sort === inputSchema.sort, `Sort mismatch in union fan-in`);\n\n for (const [relName, relSchema] of Object.entries(\n inputSchema.relationships,\n )) {\n if (relName in fanOutSchema.relationships) {\n continue;\n }\n\n // All branches will have unique relationship names except for relationships\n // that come in from `fanOut`.\n assert(\n !relationshipsFromBranches.has(relName),\n `Relationship ${relName} exists in multiple upstream inputs to union fan-in`,\n );\n schema.relationships[relName] = relSchema;\n relationshipsFromBranches.add(relName);\n }\n\n input.setOutput(this);\n }\n\n this.#schema = schema;\n this.#inputs = inputs;\n }\n\n destroy(): void {\n for (const input of this.#inputs) {\n input.destroy();\n }\n }\n\n fetch(req: FetchRequest): Stream<Node | 'yield'> {\n const iterables = this.#inputs.map(input => input.fetch(req));\n return mergeFetches(iterables, (l, r) =>\n this.#schema.compareRows(l.row, r.row),\n );\n }\n\n getSchema(): SourceSchema {\n return this.#schema;\n }\n\n *push(change: Change, pusher: InputBase): Stream<'yield'> {\n if (!this.#fanOutPushStarted) {\n yield* this.#pushInternalChange(change, pusher);\n } else {\n this.#accumulatedPushes.push(change);\n }\n }\n\n /**\n * An internal change means that a change was received inside the fan-out/fan-in sub-graph.\n *\n * These changes always come from children of a flip-join as no other push generating operators\n * currently exist between union-fan-in and union-fan-out. All other pushes\n * enter into union-fan-out before reaching union-fan-in.\n *\n * - normal joins for `exists` come before `union-fan-out`\n * - joins for `related` come after `union-fan-out`\n * - take comes after `union-fan-out`\n *\n * The algorithm for deciding whether or not to forward a push that came from inside the ufo/ufi sub-graph:\n * 1. If the change is a `child` change we can forward it. This is because all child branches in the ufo/ufi sub-graph are unique.\n * 2. If the change is `add` we can forward it iff no `fetches` for the row return any results.\n * If another branch has it, the add was already emitted in the past.\n * 3. If the change is `remove` we can forward it iff no `fetches` for the row return any results.\n * If no other branches have the change, the remove can be sent as the value is no longer present.\n * If other branches have it, the last branch the processes the remove will send the remove.\n * 4. Edits will always come through as child changes as flip join will flip them into children.\n * An edit that would result in a remove or add will have been split into an add/remove pair rather than being an edit.\n */\n *#pushInternalChange(change: Change, pusher: InputBase): Stream<'yield'> {\n if (change.type === 'child') {\n yield* this.#output.push(change, this);\n return;\n }\n\n assert(\n change.type === 'add' || change.type === 'remove',\n () =>\n `UnionFanIn: expected add or remove change type, got ${change.type}`,\n );\n\n let hadMatch = false;\n for (const input of this.#inputs) {\n if (input === pusher) {\n hadMatch = true;\n continue;\n }\n\n const constraint: Writable<Constraint> = {};\n for (const key of this.#schema.primaryKey) {\n constraint[key] = change.node.row[key];\n }\n const fetchResult = input.fetch({\n constraint,\n });\n\n if (first(fetchResult) !== undefined) {\n // Another branch has the row, so the add/remove is not needed.\n return;\n }\n }\n\n assert(hadMatch, 'Pusher was not one of the inputs to union-fan-in!');\n\n // No other branches have the row, so we can push the change.\n yield* this.#output.push(change, this);\n }\n\n fanOutStartedPushing() {\n assert(\n this.#fanOutPushStarted === false,\n 'UnionFanIn: fanOutStartedPushing called while already pushing',\n );\n this.#fanOutPushStarted = true;\n }\n\n *fanOutDonePushing(fanOutChangeType: Change['type']): Stream<'yield'> {\n assert(\n this.#fanOutPushStarted,\n 'UnionFanIn: fanOutDonePushing called without fanOutStartedPushing',\n );\n this.#fanOutPushStarted = false;\n if (this.#inputs.length === 0) {\n return;\n }\n\n if (this.#accumulatedPushes.length === 0) {\n // It is possible for no forks to pass along the push.\n // E.g., if no filters match in any fork.\n return;\n }\n\n yield* pushAccumulatedChanges(\n this.#accumulatedPushes,\n this.#output,\n this,\n fanOutChangeType,\n mergeRelationships,\n makeAddEmptyRelationships(this.#schema),\n );\n }\n\n setOutput(output: Output): void {\n this.#output = output;\n }\n}\n\nexport function* mergeFetches(\n fetches: Iterable<Node | 'yield'>[],\n comparator: (l: Node, r: Node) => number,\n): IterableIterator<Node | 'yield'> {\n const iterators = fetches.map(i => i[Symbol.iterator]());\n let threw = false;\n try {\n const current: (Node | null)[] = [];\n let lastNodeYielded: Node | undefined;\n for (let i = 0; i < iterators.length; i++) {\n const iter = iterators[i];\n let result = iter.next();\n // yield yields when initializing\n while (!result.done && result.value === 'yield') {\n yield result.value;\n result = iter.next();\n }\n current[i] = result.done ? null : (result.value as Node);\n }\n while (current.some(c => c !== null)) {\n const min = current.reduce(\n (acc: [Node, number] | undefined, c, i): [Node, number] | undefined => {\n if (c === null) {\n return acc;\n }\n if (acc === undefined || comparator(c, acc[0]) < 0) {\n return [c, i];\n }\n return acc;\n },\n undefined,\n );\n\n assert(min !== undefined, 'min is undefined');\n const [minNode, minIndex] = min;\n const iter = iterators[minIndex];\n let result = iter.next();\n while (!result.done && result.value === 'yield') {\n yield result.value;\n result = iter.next();\n }\n current[minIndex] = result.done ? null : (result.value as Node);\n if (\n lastNodeYielded !== undefined &&\n comparator(lastNodeYielded, minNode) === 0\n ) {\n continue;\n }\n lastNodeYielded = minNode;\n yield minNode;\n }\n } catch (e) {\n threw = true;\n for (const iter of iterators) {\n try {\n iter.throw?.(e);\n } catch (_cleanupError) {\n // error in the iter.throw cleanup,\n // catch so other iterators are cleaned up\n }\n }\n throw e;\n } finally {\n if (!threw) {\n for (const iter of iterators) {\n try {\n iter.return?.();\n } catch (_cleanupError) {\n // error in the iter.return cleanup,\n // catch so other iterators are cleaned up\n }\n }\n }\n }\n}\n"],"mappings":";;;;;AAsBA,IAAa,aAAb,MAA4C;CAC1C;CACA;CACA,qBAA8B;CAC9B,UAAkB;CAClB,qBAA+B,EAAE;CAEjC,YAAY,QAAqB,QAAiB;AAChD,QAAA,SAAe;EACf,MAAM,eAAe,OAAO,WAAW;AACvC,SAAO,SAAS,KAAK;AACrB,SAAO,aAAa,SAAS,KAAA,GAAW,mCAAmC;EAE3E,MAAM,SAAiC;GACrC,WAAW,aAAa;GACxB,SAAS,aAAa;GACtB,YAAY,aAAa;GACzB,eAAe,EACb,GAAG,aAAa,eACjB;GACD,UAAU,aAAa;GACvB,QAAQ,aAAa;GACrB,aAAa,aAAa;GAC1B,MAAM,aAAa;GACpB;EAGD,MAAM,4CAAyC,IAAI,KAAK;AACxD,OAAK,MAAM,SAAS,QAAQ;GAC1B,MAAM,cAAc,MAAM,WAAW;AACrC,UACE,OAAO,cAAc,YAAY,WACjC,wCAAwC,OAAO,UAAU,OAAO,YAAY,YAC7E;AACD,UACE,OAAO,eAAe,YAAY,YAClC,uCACD;AACD,UACE,OAAO,WAAW,YAAY,QAC9B,oCAAoC,OAAO,OAAO,OAAO,YAAY,SACtE;AACD,UACE,OAAO,gBAAgB,YAAY,aACnC,uCACD;AACD,UAAO,OAAO,SAAS,YAAY,MAAM,gCAAgC;AAEzE,QAAK,MAAM,CAAC,SAAS,cAAc,OAAO,QACxC,YAAY,cACb,EAAE;AACD,QAAI,WAAW,aAAa,cAC1B;AAKF,WACE,CAAC,0BAA0B,IAAI,QAAQ,EACvC,gBAAgB,QAAQ,qDACzB;AACD,WAAO,cAAc,WAAW;AAChC,8BAA0B,IAAI,QAAQ;;AAGxC,SAAM,UAAU,KAAK;;AAGvB,QAAA,SAAe;AACf,QAAA,SAAe;;CAGjB,UAAgB;AACd,OAAK,MAAM,SAAS,MAAA,OAClB,OAAM,SAAS;;CAInB,MAAM,KAA2C;AAE/C,SAAO,aADW,MAAA,OAAa,KAAI,UAAS,MAAM,MAAM,IAAI,CAAC,GAC7B,GAAG,MACjC,MAAA,OAAa,YAAY,EAAE,KAAK,EAAE,IAAI,CACvC;;CAGH,YAA0B;AACxB,SAAO,MAAA;;CAGT,CAAC,KAAK,QAAgB,QAAoC;AACxD,MAAI,CAAC,MAAA,kBACH,QAAO,MAAA,mBAAyB,QAAQ,OAAO;MAE/C,OAAA,kBAAwB,KAAK,OAAO;;;;;;;;;;;;;;;;;;;;;;;CAyBxC,EAAA,mBAAqB,QAAgB,QAAoC;AACvE,MAAI,OAAO,SAAS,SAAS;AAC3B,UAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;AACtC;;AAGF,SACE,OAAO,SAAS,SAAS,OAAO,SAAS,gBAEvC,uDAAuD,OAAO,OACjE;EAED,IAAI,WAAW;AACf,OAAK,MAAM,SAAS,MAAA,QAAc;AAChC,OAAI,UAAU,QAAQ;AACpB,eAAW;AACX;;GAGF,MAAM,aAAmC,EAAE;AAC3C,QAAK,MAAM,OAAO,MAAA,OAAa,WAC7B,YAAW,OAAO,OAAO,KAAK,IAAI;AAMpC,OAAI,MAJgB,MAAM,MAAM,EAC9B,YACD,CAAC,CAEoB,KAAK,KAAA,EAEzB;;AAIJ,SAAO,UAAU,oDAAoD;AAGrE,SAAO,MAAA,OAAa,KAAK,QAAQ,KAAK;;CAGxC,uBAAuB;AACrB,SACE,MAAA,sBAA4B,OAC5B,gEACD;AACD,QAAA,oBAA0B;;CAG5B,CAAC,kBAAkB,kBAAmD;AACpE,SACE,MAAA,mBACA,oEACD;AACD,QAAA,oBAA0B;AAC1B,MAAI,MAAA,OAAa,WAAW,EAC1B;AAGF,MAAI,MAAA,kBAAwB,WAAW,EAGrC;AAGF,SAAO,uBACL,MAAA,mBACA,MAAA,QACA,MACA,kBACA,oBACA,0BAA0B,MAAA,OAAa,CACxC;;CAGH,UAAU,QAAsB;AAC9B,QAAA,SAAe;;;AAInB,UAAiB,aACf,SACA,YACkC;CAClC,MAAM,YAAY,QAAQ,KAAI,MAAK,EAAE,OAAO,WAAW,CAAC;CACxD,IAAI,QAAQ;AACZ,KAAI;EACF,MAAM,UAA2B,EAAE;EACnC,IAAI;AACJ,OAAK,IAAI,IAAI,GAAG,IAAI,UAAU,QAAQ,KAAK;GACzC,MAAM,OAAO,UAAU;GACvB,IAAI,SAAS,KAAK,MAAM;AAExB,UAAO,CAAC,OAAO,QAAQ,OAAO,UAAU,SAAS;AAC/C,UAAM,OAAO;AACb,aAAS,KAAK,MAAM;;AAEtB,WAAQ,KAAK,OAAO,OAAO,OAAQ,OAAO;;AAE5C,SAAO,QAAQ,MAAK,MAAK,MAAM,KAAK,EAAE;GACpC,MAAM,MAAM,QAAQ,QACjB,KAAiC,GAAG,MAAkC;AACrE,QAAI,MAAM,KACR,QAAO;AAET,QAAI,QAAQ,KAAA,KAAa,WAAW,GAAG,IAAI,GAAG,GAAG,EAC/C,QAAO,CAAC,GAAG,EAAE;AAEf,WAAO;MAET,KAAA,EACD;AAED,UAAO,QAAQ,KAAA,GAAW,mBAAmB;GAC7C,MAAM,CAAC,SAAS,YAAY;GAC5B,MAAM,OAAO,UAAU;GACvB,IAAI,SAAS,KAAK,MAAM;AACxB,UAAO,CAAC,OAAO,QAAQ,OAAO,UAAU,SAAS;AAC/C,UAAM,OAAO;AACb,aAAS,KAAK,MAAM;;AAEtB,WAAQ,YAAY,OAAO,OAAO,OAAQ,OAAO;AACjD,OACE,oBAAoB,KAAA,KACpB,WAAW,iBAAiB,QAAQ,KAAK,EAEzC;AAEF,qBAAkB;AAClB,SAAM;;UAED,GAAG;AACV,UAAQ;AACR,OAAK,MAAM,QAAQ,UACjB,KAAI;AACF,QAAK,QAAQ,EAAE;WACR,eAAe;AAK1B,QAAM;WACE;AACR,MAAI,CAAC,MACH,MAAK,MAAM,QAAQ,UACjB,KAAI;AACF,QAAK,UAAU;WACR,eAAe"}
@@ -10,7 +10,7 @@ import type { Start } from '../../zql/src/ivm/operator.ts';
10
10
  export type NoSubqueryCondition = Exclude<Condition, {
11
11
  type: 'correlatedSubquery';
12
12
  }>;
13
- export declare function buildSelectQuery(tableName: string, columns: Record<string, SchemaValue>, constraint: Constraint | undefined, filters: NoSubqueryCondition | undefined, order: Ordering, reverse: boolean | undefined, start: Start | undefined): SQLQuery;
13
+ export declare function buildSelectQuery(tableName: string, columns: Record<string, SchemaValue>, constraint: Constraint | undefined, filters: NoSubqueryCondition | undefined, order: Ordering | undefined, reverse: boolean | undefined, start: Start | undefined): SQLQuery;
14
14
  export declare function constraintsToSQL(constraint: Constraint | undefined, columns: Record<string, SchemaValue>): SQLQuery[];
15
15
  export declare function orderByToSQL(order: Ordering, reverse: boolean): SQLQuery;
16
16
  /**
@@ -1 +1 @@
1
- {"version":3,"file":"query-builder.d.ts","sourceRoot":"","sources":["../../../../zqlite/src/query-builder.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,QAAQ,EAAC,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,EACV,SAAS,EACT,QAAQ,EAGT,MAAM,gCAAgC,CAAC;AACxC,OAAO,KAAK,EACV,WAAW,EACX,SAAS,EACV,MAAM,uCAAuC,CAAC;AAE/C,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,iCAAiC,CAAC;AAChE,OAAO,KAAK,EAAC,KAAK,EAAC,MAAM,+BAA+B,CAAC;AAEzD;;;GAGG;AACH,MAAM,MAAM,mBAAmB,GAAG,OAAO,CACvC,SAAS,EACT;IAAC,IAAI,EAAE,oBAAoB,CAAA;CAAC,CAC7B,CAAC;AAEF,wBAAgB,gBAAgB,CAC9B,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,EACpC,UAAU,EAAE,UAAU,GAAG,SAAS,EAClC,OAAO,EAAE,mBAAmB,GAAG,SAAS,EACxC,KAAK,EAAE,QAAQ,EACf,OAAO,EAAE,OAAO,GAAG,SAAS,EAC5B,KAAK,EAAE,KAAK,GAAG,SAAS,YAqBzB;AAED,wBAAgB,gBAAgB,CAC9B,UAAU,EAAE,UAAU,GAAG,SAAS,EAClC,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,cAcrC;AAED,wBAAgB,YAAY,CAAC,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,GAAG,QAAQ,CAmBxE;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,OAAO,EAAE,mBAAmB,GAAG,QAAQ,CAuBnE;AAyDD,wBAAgB,YAAY,CAAC,CAAC,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS,GAAG,OAAO,CAWjE"}
1
+ {"version":3,"file":"query-builder.d.ts","sourceRoot":"","sources":["../../../../zqlite/src/query-builder.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,QAAQ,EAAC,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,EACV,SAAS,EACT,QAAQ,EAGT,MAAM,gCAAgC,CAAC;AACxC,OAAO,KAAK,EACV,WAAW,EACX,SAAS,EACV,MAAM,uCAAuC,CAAC;AAG/C,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,iCAAiC,CAAC;AAChE,OAAO,KAAK,EAAC,KAAK,EAAC,MAAM,+BAA+B,CAAC;AAEzD;;;GAGG;AACH,MAAM,MAAM,mBAAmB,GAAG,OAAO,CACvC,SAAS,EACT;IAAC,IAAI,EAAE,oBAAoB,CAAA;CAAC,CAC7B,CAAC;AAEF,wBAAgB,gBAAgB,CAC9B,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,EACpC,UAAU,EAAE,UAAU,GAAG,SAAS,EAClC,OAAO,EAAE,mBAAmB,GAAG,SAAS,EACxC,KAAK,EAAE,QAAQ,GAAG,SAAS,EAC3B,OAAO,EAAE,OAAO,GAAG,SAAS,EAC5B,KAAK,EAAE,KAAK,GAAG,SAAS,YAyBzB;AAED,wBAAgB,gBAAgB,CAC9B,UAAU,EAAE,UAAU,GAAG,SAAS,EAClC,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,cAcrC;AAED,wBAAgB,YAAY,CAAC,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,GAAG,QAAQ,CAmBxE;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,OAAO,EAAE,mBAAmB,GAAG,QAAQ,CAuBnE;AAyDD,wBAAgB,YAAY,CAAC,CAAC,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS,GAAG,OAAO,CAWjE"}
@@ -1,12 +1,17 @@
1
+ import { assert } from "../../shared/src/asserts.js";
1
2
  import { sql } from "./internal/sql.js";
2
3
  //#region ../zqlite/src/query-builder.ts
3
4
  function buildSelectQuery(tableName, columns, constraint, filters, order, reverse, start) {
4
5
  let query = sql`SELECT ${sql.join(Object.keys(columns).map((c) => sql.ident(c)), sql`,`)} FROM ${sql.ident(tableName)}`;
5
6
  const constraints = constraintsToSQL(constraint, columns);
6
- if (start) constraints.push(gatherStartConstraints(start, reverse, order, columns));
7
+ if (start) {
8
+ assert(order !== void 0, "start requires ordering");
9
+ constraints.push(gatherStartConstraints(start, reverse, order, columns));
10
+ }
7
11
  if (filters) constraints.push(filtersToSQL(filters));
8
12
  if (constraints.length > 0) query = sql`${query} WHERE ${sql.join(constraints, sql` AND `)}`;
9
- return sql`${query} ${orderByToSQL(order, !!reverse)}`;
13
+ if (order && order.length > 0) return sql`${query} ${orderByToSQL(order, !!reverse)}`;
14
+ return query;
10
15
  }
11
16
  function constraintsToSQL(constraint, columns) {
12
17
  if (!constraint) return [];
@@ -1 +1 @@
1
- {"version":3,"file":"query-builder.js","names":[],"sources":["../../../../zqlite/src/query-builder.ts"],"sourcesContent":["import type {SQLQuery} from '@databases/sql';\nimport type {\n Condition,\n Ordering,\n SimpleCondition,\n ValuePosition,\n} from '../../zero-protocol/src/ast.ts';\nimport type {\n SchemaValue,\n ValueType,\n} from '../../zero-schema/src/table-schema.ts';\nimport {sql} from './internal/sql.ts';\nimport type {Constraint} from '../../zql/src/ivm/constraint.ts';\nimport type {Start} from '../../zql/src/ivm/operator.ts';\n\n/**\n * Condition type without correlated subqueries.\n * This matches the output of transformFilters from zql/builder/filter.ts\n */\nexport type NoSubqueryCondition = Exclude<\n Condition,\n {type: 'correlatedSubquery'}\n>;\n\nexport function buildSelectQuery(\n tableName: string,\n columns: Record<string, SchemaValue>,\n constraint: Constraint | undefined,\n filters: NoSubqueryCondition | undefined,\n order: Ordering,\n reverse: boolean | undefined,\n start: Start | undefined,\n) {\n let query = sql`SELECT ${sql.join(\n Object.keys(columns).map(c => sql.ident(c)),\n sql`,`,\n )} FROM ${sql.ident(tableName)}`;\n const constraints: SQLQuery[] = constraintsToSQL(constraint, columns);\n\n if (start) {\n constraints.push(gatherStartConstraints(start, reverse, order, columns));\n }\n\n if (filters) {\n constraints.push(filtersToSQL(filters));\n }\n\n if (constraints.length > 0) {\n query = sql`${query} WHERE ${sql.join(constraints, sql` AND `)}`;\n }\n\n return sql`${query} ${orderByToSQL(order, !!reverse)}`;\n}\n\nexport function constraintsToSQL(\n constraint: Constraint | undefined,\n columns: Record<string, SchemaValue>,\n) {\n if (!constraint) {\n return [];\n }\n\n const constraints: SQLQuery[] = [];\n for (const [key, value] of Object.entries(constraint)) {\n constraints.push(\n sql`${sql.ident(key)} = ${toSQLiteType(value, columns[key].type)}`,\n );\n }\n\n return constraints;\n}\n\nexport function orderByToSQL(order: Ordering, reverse: boolean): SQLQuery {\n if (reverse) {\n return sql`ORDER BY ${sql.join(\n order.map(\n s =>\n sql`${sql.ident(s[0])} ${sql.__dangerous__rawValue(\n s[1] === 'asc' ? 'desc' : 'asc',\n )}`,\n ),\n sql`, `,\n )}`;\n } else {\n return sql`ORDER BY ${sql.join(\n order.map(\n s => sql`${sql.ident(s[0])} ${sql.__dangerous__rawValue(s[1])}`,\n ),\n sql`, `,\n )}`;\n }\n}\n\n/**\n * Converts filters (conditions) to SQL WHERE clause.\n * This applies all filters present in the AST for a query to the source.\n */\nexport function filtersToSQL(filters: NoSubqueryCondition): SQLQuery {\n switch (filters.type) {\n case 'simple':\n return simpleConditionToSQL(filters);\n case 'and':\n return filters.conditions.length > 0\n ? sql`(${sql.join(\n filters.conditions.map(condition =>\n filtersToSQL(condition as NoSubqueryCondition),\n ),\n sql` AND `,\n )})`\n : sql`TRUE`;\n case 'or':\n return filters.conditions.length > 0\n ? sql`(${sql.join(\n filters.conditions.map(condition =>\n filtersToSQL(condition as NoSubqueryCondition),\n ),\n sql` OR `,\n )})`\n : sql`FALSE`;\n }\n}\n\nfunction simpleConditionToSQL(filter: SimpleCondition): SQLQuery {\n const {op} = filter;\n if (op === 'IN' || op === 'NOT IN') {\n switch (filter.right.type) {\n case 'literal':\n return sql`${valuePositionToSQL(\n filter.left,\n )} ${sql.__dangerous__rawValue(\n filter.op,\n )} (SELECT value FROM json_each(${JSON.stringify(\n filter.right.value,\n )}))`;\n case 'static':\n throw new Error(\n 'Static parameters must be replaced before conversion to SQL',\n );\n }\n }\n return sql`${valuePositionToSQL(filter.left)} ${sql.__dangerous__rawValue(\n // SQLite's LIKE operator is case-insensitive by default, so we\n // convert ILIKE to LIKE and NOT ILIKE to NOT LIKE.\n filter.op === 'ILIKE'\n ? 'LIKE'\n : filter.op === 'NOT ILIKE'\n ? 'NOT LIKE'\n : filter.op,\n )} ${valuePositionToSQL(filter.right)}`;\n}\n\nfunction valuePositionToSQL(value: ValuePosition): SQLQuery {\n switch (value.type) {\n case 'column':\n return sql.ident(value.name);\n case 'literal':\n return sql`${toSQLiteType(value.value, getJsType(value.value))}`;\n case 'static':\n throw new Error(\n 'Static parameters must be replaced before conversion to SQL',\n );\n }\n}\n\nfunction getJsType(value: unknown): ValueType {\n if (value === null) {\n return 'null';\n }\n return typeof value === 'string'\n ? 'string'\n : typeof value === 'number'\n ? 'number'\n : typeof value === 'boolean'\n ? 'boolean'\n : 'json';\n}\n\nexport function toSQLiteType(v: unknown, type: ValueType): unknown {\n switch (type) {\n case 'boolean':\n return v === null ? null : v ? 1 : 0;\n case 'number':\n case 'string':\n case 'null':\n return v;\n case 'json':\n return JSON.stringify(v);\n }\n}\n\n/**\n * The ordering could be complex such as:\n * `ORDER BY a ASC, b DESC, c ASC`\n *\n * In those cases, we need to encode the constraints as various\n * `OR` clauses.\n *\n * E.g.,\n *\n * to get the row after (a = 1, b = 2, c = 3) would be:\n *\n * `WHERE a > 1 OR (a = 1 AND b < 2) OR (a = 1 AND b = 2 AND c > 3)`\n *\n * - after vs before flips the comparison operators.\n * - inclusive adds a final `OR` clause for the exact match.\n */\nfunction gatherStartConstraints(\n start: Start,\n reverse: boolean | undefined,\n order: Ordering,\n columnTypes: Record<string, SchemaValue>,\n): SQLQuery {\n const constraints: SQLQuery[] = [];\n const {row: from, basis} = start;\n\n for (let i = 0; i < order.length; i++) {\n const group: SQLQuery[] = [];\n const [iField, iDirection] = order[i];\n for (let j = 0; j <= i; j++) {\n if (j === i) {\n const constraintValue = toSQLiteType(\n from[iField],\n columnTypes[iField].type,\n );\n if (iDirection === 'asc') {\n if (!reverse) {\n group.push(\n sql`(${constraintValue} IS NULL OR ${sql.ident(iField)} > ${constraintValue})`,\n );\n } else {\n reverse satisfies true;\n group.push(\n sql`(${sql.ident(iField)} IS NULL OR ${sql.ident(iField)} < ${constraintValue})`,\n );\n }\n } else {\n iDirection satisfies 'desc';\n if (!reverse) {\n group.push(\n sql`(${sql.ident(iField)} IS NULL OR ${sql.ident(iField)} < ${constraintValue})`,\n );\n } else {\n reverse satisfies true;\n group.push(\n sql`(${constraintValue} IS NULL OR ${sql.ident(iField)} > ${constraintValue})`,\n );\n }\n }\n } else {\n const [jField] = order[j];\n group.push(\n sql`${sql.ident(jField)} IS ${toSQLiteType(\n from[jField],\n columnTypes[jField].type,\n )}`,\n );\n }\n }\n constraints.push(sql`(${sql.join(group, sql` AND `)})`);\n }\n\n if (basis === 'at') {\n constraints.push(\n sql`(${sql.join(\n order.map(\n s =>\n sql`${sql.ident(s[0])} IS ${toSQLiteType(\n from[s[0]],\n columnTypes[s[0]].type,\n )}`,\n ),\n sql` AND `,\n )})`,\n );\n }\n\n return sql`(${sql.join(constraints, sql` OR `)})`;\n}\n"],"mappings":";;AAwBA,SAAgB,iBACd,WACA,SACA,YACA,SACA,OACA,SACA,OACA;CACA,IAAI,QAAQ,GAAG,UAAU,IAAI,KAC3B,OAAO,KAAK,QAAQ,CAAC,KAAI,MAAK,IAAI,MAAM,EAAE,CAAC,EAC3C,GAAG,IACJ,CAAC,QAAQ,IAAI,MAAM,UAAU;CAC9B,MAAM,cAA0B,iBAAiB,YAAY,QAAQ;AAErE,KAAI,MACF,aAAY,KAAK,uBAAuB,OAAO,SAAS,OAAO,QAAQ,CAAC;AAG1E,KAAI,QACF,aAAY,KAAK,aAAa,QAAQ,CAAC;AAGzC,KAAI,YAAY,SAAS,EACvB,SAAQ,GAAG,GAAG,MAAM,SAAS,IAAI,KAAK,aAAa,GAAG,QAAQ;AAGhE,QAAO,GAAG,GAAG,MAAM,GAAG,aAAa,OAAO,CAAC,CAAC,QAAQ;;AAGtD,SAAgB,iBACd,YACA,SACA;AACA,KAAI,CAAC,WACH,QAAO,EAAE;CAGX,MAAM,cAA0B,EAAE;AAClC,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,WAAW,CACnD,aAAY,KACV,GAAG,GAAG,IAAI,MAAM,IAAI,CAAC,KAAK,aAAa,OAAO,QAAQ,KAAK,KAAK,GACjE;AAGH,QAAO;;AAGT,SAAgB,aAAa,OAAiB,SAA4B;AACxE,KAAI,QACF,QAAO,GAAG,YAAY,IAAI,KACxB,MAAM,KACJ,MACE,GAAG,GAAG,IAAI,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,sBAC3B,EAAE,OAAO,QAAQ,SAAS,MAC3B,GACJ,EACD,GAAG,KACJ;KAED,QAAO,GAAG,YAAY,IAAI,KACxB,MAAM,KACJ,MAAK,GAAG,GAAG,IAAI,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,sBAAsB,EAAE,GAAG,GAC9D,EACD,GAAG,KACJ;;;;;;AAQL,SAAgB,aAAa,SAAwC;AACnE,SAAQ,QAAQ,MAAhB;EACE,KAAK,SACH,QAAO,qBAAqB,QAAQ;EACtC,KAAK,MACH,QAAO,QAAQ,WAAW,SAAS,IAC/B,GAAG,IAAI,IAAI,KACT,QAAQ,WAAW,KAAI,cACrB,aAAa,UAAiC,CAC/C,EACD,GAAG,QACJ,CAAC,KACF,GAAG;EACT,KAAK,KACH,QAAO,QAAQ,WAAW,SAAS,IAC/B,GAAG,IAAI,IAAI,KACT,QAAQ,WAAW,KAAI,cACrB,aAAa,UAAiC,CAC/C,EACD,GAAG,OACJ,CAAC,KACF,GAAG;;;AAIb,SAAS,qBAAqB,QAAmC;CAC/D,MAAM,EAAC,OAAM;AACb,KAAI,OAAO,QAAQ,OAAO,SACxB,SAAQ,OAAO,MAAM,MAArB;EACE,KAAK,UACH,QAAO,GAAG,GAAG,mBACX,OAAO,KACR,CAAC,GAAG,IAAI,sBACP,OAAO,GACR,CAAC,gCAAgC,KAAK,UACrC,OAAO,MAAM,MACd,CAAC;EACJ,KAAK,SACH,OAAM,IAAI,MACR,8DACD;;AAGP,QAAO,GAAG,GAAG,mBAAmB,OAAO,KAAK,CAAC,GAAG,IAAI,sBAGlD,OAAO,OAAO,UACV,SACA,OAAO,OAAO,cACZ,aACA,OAAO,GACd,CAAC,GAAG,mBAAmB,OAAO,MAAM;;AAGvC,SAAS,mBAAmB,OAAgC;AAC1D,SAAQ,MAAM,MAAd;EACE,KAAK,SACH,QAAO,IAAI,MAAM,MAAM,KAAK;EAC9B,KAAK,UACH,QAAO,GAAG,GAAG,aAAa,MAAM,OAAO,UAAU,MAAM,MAAM,CAAC;EAChE,KAAK,SACH,OAAM,IAAI,MACR,8DACD;;;AAIP,SAAS,UAAU,OAA2B;AAC5C,KAAI,UAAU,KACZ,QAAO;AAET,QAAO,OAAO,UAAU,WACpB,WACA,OAAO,UAAU,WACf,WACA,OAAO,UAAU,YACf,YACA;;AAGV,SAAgB,aAAa,GAAY,MAA0B;AACjE,SAAQ,MAAR;EACE,KAAK,UACH,QAAO,MAAM,OAAO,OAAO,IAAI,IAAI;EACrC,KAAK;EACL,KAAK;EACL,KAAK,OACH,QAAO;EACT,KAAK,OACH,QAAO,KAAK,UAAU,EAAE;;;;;;;;;;;;;;;;;;;AAoB9B,SAAS,uBACP,OACA,SACA,OACA,aACU;CACV,MAAM,cAA0B,EAAE;CAClC,MAAM,EAAC,KAAK,MAAM,UAAS;AAE3B,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;EACrC,MAAM,QAAoB,EAAE;EAC5B,MAAM,CAAC,QAAQ,cAAc,MAAM;AACnC,OAAK,IAAI,IAAI,GAAG,KAAK,GAAG,IACtB,KAAI,MAAM,GAAG;GACX,MAAM,kBAAkB,aACtB,KAAK,SACL,YAAY,QAAQ,KACrB;AACD,OAAI,eAAe,MACjB,KAAI,CAAC,QACH,OAAM,KACJ,GAAG,IAAI,gBAAgB,cAAc,IAAI,MAAM,OAAO,CAAC,KAAK,gBAAgB,GAC7E;OAGD,OAAM,KACJ,GAAG,IAAI,IAAI,MAAM,OAAO,CAAC,cAAc,IAAI,MAAM,OAAO,CAAC,KAAK,gBAAgB,GAC/E;YAIC,CAAC,QACH,OAAM,KACJ,GAAG,IAAI,IAAI,MAAM,OAAO,CAAC,cAAc,IAAI,MAAM,OAAO,CAAC,KAAK,gBAAgB,GAC/E;OAGD,OAAM,KACJ,GAAG,IAAI,gBAAgB,cAAc,IAAI,MAAM,OAAO,CAAC,KAAK,gBAAgB,GAC7E;SAGA;GACL,MAAM,CAAC,UAAU,MAAM;AACvB,SAAM,KACJ,GAAG,GAAG,IAAI,MAAM,OAAO,CAAC,MAAM,aAC5B,KAAK,SACL,YAAY,QAAQ,KACrB,GACF;;AAGL,cAAY,KAAK,GAAG,IAAI,IAAI,KAAK,OAAO,GAAG,QAAQ,CAAC,GAAG;;AAGzD,KAAI,UAAU,KACZ,aAAY,KACV,GAAG,IAAI,IAAI,KACT,MAAM,KACJ,MACE,GAAG,GAAG,IAAI,MAAM,EAAE,GAAG,CAAC,MAAM,aAC1B,KAAK,EAAE,KACP,YAAY,EAAE,IAAI,KACnB,GACJ,EACD,GAAG,QACJ,CAAC,GACH;AAGH,QAAO,GAAG,IAAI,IAAI,KAAK,aAAa,GAAG,OAAO,CAAC"}
1
+ {"version":3,"file":"query-builder.js","names":[],"sources":["../../../../zqlite/src/query-builder.ts"],"sourcesContent":["import type {SQLQuery} from '@databases/sql';\nimport type {\n Condition,\n Ordering,\n SimpleCondition,\n ValuePosition,\n} from '../../zero-protocol/src/ast.ts';\nimport type {\n SchemaValue,\n ValueType,\n} from '../../zero-schema/src/table-schema.ts';\nimport {assert} from '../../shared/src/asserts.ts';\nimport {sql} from './internal/sql.ts';\nimport type {Constraint} from '../../zql/src/ivm/constraint.ts';\nimport type {Start} from '../../zql/src/ivm/operator.ts';\n\n/**\n * Condition type without correlated subqueries.\n * This matches the output of transformFilters from zql/builder/filter.ts\n */\nexport type NoSubqueryCondition = Exclude<\n Condition,\n {type: 'correlatedSubquery'}\n>;\n\nexport function buildSelectQuery(\n tableName: string,\n columns: Record<string, SchemaValue>,\n constraint: Constraint | undefined,\n filters: NoSubqueryCondition | undefined,\n order: Ordering | undefined,\n reverse: boolean | undefined,\n start: Start | undefined,\n) {\n let query = sql`SELECT ${sql.join(\n Object.keys(columns).map(c => sql.ident(c)),\n sql`,`,\n )} FROM ${sql.ident(tableName)}`;\n const constraints: SQLQuery[] = constraintsToSQL(constraint, columns);\n\n if (start) {\n assert(order !== undefined, 'start requires ordering');\n constraints.push(gatherStartConstraints(start, reverse, order, columns));\n }\n\n if (filters) {\n constraints.push(filtersToSQL(filters));\n }\n\n if (constraints.length > 0) {\n query = sql`${query} WHERE ${sql.join(constraints, sql` AND `)}`;\n }\n\n if (order && order.length > 0) {\n return sql`${query} ${orderByToSQL(order, !!reverse)}`;\n }\n return query;\n}\n\nexport function constraintsToSQL(\n constraint: Constraint | undefined,\n columns: Record<string, SchemaValue>,\n) {\n if (!constraint) {\n return [];\n }\n\n const constraints: SQLQuery[] = [];\n for (const [key, value] of Object.entries(constraint)) {\n constraints.push(\n sql`${sql.ident(key)} = ${toSQLiteType(value, columns[key].type)}`,\n );\n }\n\n return constraints;\n}\n\nexport function orderByToSQL(order: Ordering, reverse: boolean): SQLQuery {\n if (reverse) {\n return sql`ORDER BY ${sql.join(\n order.map(\n s =>\n sql`${sql.ident(s[0])} ${sql.__dangerous__rawValue(\n s[1] === 'asc' ? 'desc' : 'asc',\n )}`,\n ),\n sql`, `,\n )}`;\n } else {\n return sql`ORDER BY ${sql.join(\n order.map(\n s => sql`${sql.ident(s[0])} ${sql.__dangerous__rawValue(s[1])}`,\n ),\n sql`, `,\n )}`;\n }\n}\n\n/**\n * Converts filters (conditions) to SQL WHERE clause.\n * This applies all filters present in the AST for a query to the source.\n */\nexport function filtersToSQL(filters: NoSubqueryCondition): SQLQuery {\n switch (filters.type) {\n case 'simple':\n return simpleConditionToSQL(filters);\n case 'and':\n return filters.conditions.length > 0\n ? sql`(${sql.join(\n filters.conditions.map(condition =>\n filtersToSQL(condition as NoSubqueryCondition),\n ),\n sql` AND `,\n )})`\n : sql`TRUE`;\n case 'or':\n return filters.conditions.length > 0\n ? sql`(${sql.join(\n filters.conditions.map(condition =>\n filtersToSQL(condition as NoSubqueryCondition),\n ),\n sql` OR `,\n )})`\n : sql`FALSE`;\n }\n}\n\nfunction simpleConditionToSQL(filter: SimpleCondition): SQLQuery {\n const {op} = filter;\n if (op === 'IN' || op === 'NOT IN') {\n switch (filter.right.type) {\n case 'literal':\n return sql`${valuePositionToSQL(\n filter.left,\n )} ${sql.__dangerous__rawValue(\n filter.op,\n )} (SELECT value FROM json_each(${JSON.stringify(\n filter.right.value,\n )}))`;\n case 'static':\n throw new Error(\n 'Static parameters must be replaced before conversion to SQL',\n );\n }\n }\n return sql`${valuePositionToSQL(filter.left)} ${sql.__dangerous__rawValue(\n // SQLite's LIKE operator is case-insensitive by default, so we\n // convert ILIKE to LIKE and NOT ILIKE to NOT LIKE.\n filter.op === 'ILIKE'\n ? 'LIKE'\n : filter.op === 'NOT ILIKE'\n ? 'NOT LIKE'\n : filter.op,\n )} ${valuePositionToSQL(filter.right)}`;\n}\n\nfunction valuePositionToSQL(value: ValuePosition): SQLQuery {\n switch (value.type) {\n case 'column':\n return sql.ident(value.name);\n case 'literal':\n return sql`${toSQLiteType(value.value, getJsType(value.value))}`;\n case 'static':\n throw new Error(\n 'Static parameters must be replaced before conversion to SQL',\n );\n }\n}\n\nfunction getJsType(value: unknown): ValueType {\n if (value === null) {\n return 'null';\n }\n return typeof value === 'string'\n ? 'string'\n : typeof value === 'number'\n ? 'number'\n : typeof value === 'boolean'\n ? 'boolean'\n : 'json';\n}\n\nexport function toSQLiteType(v: unknown, type: ValueType): unknown {\n switch (type) {\n case 'boolean':\n return v === null ? null : v ? 1 : 0;\n case 'number':\n case 'string':\n case 'null':\n return v;\n case 'json':\n return JSON.stringify(v);\n }\n}\n\n/**\n * The ordering could be complex such as:\n * `ORDER BY a ASC, b DESC, c ASC`\n *\n * In those cases, we need to encode the constraints as various\n * `OR` clauses.\n *\n * E.g.,\n *\n * to get the row after (a = 1, b = 2, c = 3) would be:\n *\n * `WHERE a > 1 OR (a = 1 AND b < 2) OR (a = 1 AND b = 2 AND c > 3)`\n *\n * - after vs before flips the comparison operators.\n * - inclusive adds a final `OR` clause for the exact match.\n */\nfunction gatherStartConstraints(\n start: Start,\n reverse: boolean | undefined,\n order: Ordering,\n columnTypes: Record<string, SchemaValue>,\n): SQLQuery {\n const constraints: SQLQuery[] = [];\n const {row: from, basis} = start;\n\n for (let i = 0; i < order.length; i++) {\n const group: SQLQuery[] = [];\n const [iField, iDirection] = order[i];\n for (let j = 0; j <= i; j++) {\n if (j === i) {\n const constraintValue = toSQLiteType(\n from[iField],\n columnTypes[iField].type,\n );\n if (iDirection === 'asc') {\n if (!reverse) {\n group.push(\n sql`(${constraintValue} IS NULL OR ${sql.ident(iField)} > ${constraintValue})`,\n );\n } else {\n reverse satisfies true;\n group.push(\n sql`(${sql.ident(iField)} IS NULL OR ${sql.ident(iField)} < ${constraintValue})`,\n );\n }\n } else {\n iDirection satisfies 'desc';\n if (!reverse) {\n group.push(\n sql`(${sql.ident(iField)} IS NULL OR ${sql.ident(iField)} < ${constraintValue})`,\n );\n } else {\n reverse satisfies true;\n group.push(\n sql`(${constraintValue} IS NULL OR ${sql.ident(iField)} > ${constraintValue})`,\n );\n }\n }\n } else {\n const [jField] = order[j];\n group.push(\n sql`${sql.ident(jField)} IS ${toSQLiteType(\n from[jField],\n columnTypes[jField].type,\n )}`,\n );\n }\n }\n constraints.push(sql`(${sql.join(group, sql` AND `)})`);\n }\n\n if (basis === 'at') {\n constraints.push(\n sql`(${sql.join(\n order.map(\n s =>\n sql`${sql.ident(s[0])} IS ${toSQLiteType(\n from[s[0]],\n columnTypes[s[0]].type,\n )}`,\n ),\n sql` AND `,\n )})`,\n );\n }\n\n return sql`(${sql.join(constraints, sql` OR `)})`;\n}\n"],"mappings":";;;AAyBA,SAAgB,iBACd,WACA,SACA,YACA,SACA,OACA,SACA,OACA;CACA,IAAI,QAAQ,GAAG,UAAU,IAAI,KAC3B,OAAO,KAAK,QAAQ,CAAC,KAAI,MAAK,IAAI,MAAM,EAAE,CAAC,EAC3C,GAAG,IACJ,CAAC,QAAQ,IAAI,MAAM,UAAU;CAC9B,MAAM,cAA0B,iBAAiB,YAAY,QAAQ;AAErE,KAAI,OAAO;AACT,SAAO,UAAU,KAAA,GAAW,0BAA0B;AACtD,cAAY,KAAK,uBAAuB,OAAO,SAAS,OAAO,QAAQ,CAAC;;AAG1E,KAAI,QACF,aAAY,KAAK,aAAa,QAAQ,CAAC;AAGzC,KAAI,YAAY,SAAS,EACvB,SAAQ,GAAG,GAAG,MAAM,SAAS,IAAI,KAAK,aAAa,GAAG,QAAQ;AAGhE,KAAI,SAAS,MAAM,SAAS,EAC1B,QAAO,GAAG,GAAG,MAAM,GAAG,aAAa,OAAO,CAAC,CAAC,QAAQ;AAEtD,QAAO;;AAGT,SAAgB,iBACd,YACA,SACA;AACA,KAAI,CAAC,WACH,QAAO,EAAE;CAGX,MAAM,cAA0B,EAAE;AAClC,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,WAAW,CACnD,aAAY,KACV,GAAG,GAAG,IAAI,MAAM,IAAI,CAAC,KAAK,aAAa,OAAO,QAAQ,KAAK,KAAK,GACjE;AAGH,QAAO;;AAGT,SAAgB,aAAa,OAAiB,SAA4B;AACxE,KAAI,QACF,QAAO,GAAG,YAAY,IAAI,KACxB,MAAM,KACJ,MACE,GAAG,GAAG,IAAI,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,sBAC3B,EAAE,OAAO,QAAQ,SAAS,MAC3B,GACJ,EACD,GAAG,KACJ;KAED,QAAO,GAAG,YAAY,IAAI,KACxB,MAAM,KACJ,MAAK,GAAG,GAAG,IAAI,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,sBAAsB,EAAE,GAAG,GAC9D,EACD,GAAG,KACJ;;;;;;AAQL,SAAgB,aAAa,SAAwC;AACnE,SAAQ,QAAQ,MAAhB;EACE,KAAK,SACH,QAAO,qBAAqB,QAAQ;EACtC,KAAK,MACH,QAAO,QAAQ,WAAW,SAAS,IAC/B,GAAG,IAAI,IAAI,KACT,QAAQ,WAAW,KAAI,cACrB,aAAa,UAAiC,CAC/C,EACD,GAAG,QACJ,CAAC,KACF,GAAG;EACT,KAAK,KACH,QAAO,QAAQ,WAAW,SAAS,IAC/B,GAAG,IAAI,IAAI,KACT,QAAQ,WAAW,KAAI,cACrB,aAAa,UAAiC,CAC/C,EACD,GAAG,OACJ,CAAC,KACF,GAAG;;;AAIb,SAAS,qBAAqB,QAAmC;CAC/D,MAAM,EAAC,OAAM;AACb,KAAI,OAAO,QAAQ,OAAO,SACxB,SAAQ,OAAO,MAAM,MAArB;EACE,KAAK,UACH,QAAO,GAAG,GAAG,mBACX,OAAO,KACR,CAAC,GAAG,IAAI,sBACP,OAAO,GACR,CAAC,gCAAgC,KAAK,UACrC,OAAO,MAAM,MACd,CAAC;EACJ,KAAK,SACH,OAAM,IAAI,MACR,8DACD;;AAGP,QAAO,GAAG,GAAG,mBAAmB,OAAO,KAAK,CAAC,GAAG,IAAI,sBAGlD,OAAO,OAAO,UACV,SACA,OAAO,OAAO,cACZ,aACA,OAAO,GACd,CAAC,GAAG,mBAAmB,OAAO,MAAM;;AAGvC,SAAS,mBAAmB,OAAgC;AAC1D,SAAQ,MAAM,MAAd;EACE,KAAK,SACH,QAAO,IAAI,MAAM,MAAM,KAAK;EAC9B,KAAK,UACH,QAAO,GAAG,GAAG,aAAa,MAAM,OAAO,UAAU,MAAM,MAAM,CAAC;EAChE,KAAK,SACH,OAAM,IAAI,MACR,8DACD;;;AAIP,SAAS,UAAU,OAA2B;AAC5C,KAAI,UAAU,KACZ,QAAO;AAET,QAAO,OAAO,UAAU,WACpB,WACA,OAAO,UAAU,WACf,WACA,OAAO,UAAU,YACf,YACA;;AAGV,SAAgB,aAAa,GAAY,MAA0B;AACjE,SAAQ,MAAR;EACE,KAAK,UACH,QAAO,MAAM,OAAO,OAAO,IAAI,IAAI;EACrC,KAAK;EACL,KAAK;EACL,KAAK,OACH,QAAO;EACT,KAAK,OACH,QAAO,KAAK,UAAU,EAAE;;;;;;;;;;;;;;;;;;;AAoB9B,SAAS,uBACP,OACA,SACA,OACA,aACU;CACV,MAAM,cAA0B,EAAE;CAClC,MAAM,EAAC,KAAK,MAAM,UAAS;AAE3B,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;EACrC,MAAM,QAAoB,EAAE;EAC5B,MAAM,CAAC,QAAQ,cAAc,MAAM;AACnC,OAAK,IAAI,IAAI,GAAG,KAAK,GAAG,IACtB,KAAI,MAAM,GAAG;GACX,MAAM,kBAAkB,aACtB,KAAK,SACL,YAAY,QAAQ,KACrB;AACD,OAAI,eAAe,MACjB,KAAI,CAAC,QACH,OAAM,KACJ,GAAG,IAAI,gBAAgB,cAAc,IAAI,MAAM,OAAO,CAAC,KAAK,gBAAgB,GAC7E;OAGD,OAAM,KACJ,GAAG,IAAI,IAAI,MAAM,OAAO,CAAC,cAAc,IAAI,MAAM,OAAO,CAAC,KAAK,gBAAgB,GAC/E;YAIC,CAAC,QACH,OAAM,KACJ,GAAG,IAAI,IAAI,MAAM,OAAO,CAAC,cAAc,IAAI,MAAM,OAAO,CAAC,KAAK,gBAAgB,GAC/E;OAGD,OAAM,KACJ,GAAG,IAAI,gBAAgB,cAAc,IAAI,MAAM,OAAO,CAAC,KAAK,gBAAgB,GAC7E;SAGA;GACL,MAAM,CAAC,UAAU,MAAM;AACvB,SAAM,KACJ,GAAG,GAAG,IAAI,MAAM,OAAO,CAAC,MAAM,aAC5B,KAAK,SACL,YAAY,QAAQ,KACrB,GACF;;AAGL,cAAY,KAAK,GAAG,IAAI,IAAI,KAAK,OAAO,GAAG,QAAQ,CAAC,GAAG;;AAGzD,KAAI,UAAU,KACZ,aAAY,KACV,GAAG,IAAI,IAAI,KACT,MAAM,KACJ,MACE,GAAG,GAAG,IAAI,MAAM,EAAE,GAAG,CAAC,MAAM,aAC1B,KAAK,EAAE,KACP,YAAY,EAAE,IAAI,KACnB,GACJ,EACD,GAAG,QACJ,CAAC,GACH;AAGH,QAAO,GAAG,IAAI,IAAI,KAAK,aAAa,GAAG,OAAO,CAAC"}
@@ -41,7 +41,7 @@ export declare class TableSource implements Source {
41
41
  * algorithm for concurrent traversal of historic timelines.
42
42
  */
43
43
  setDB(db: Database): void;
44
- connect(sort: Ordering, filters?: Condition, splitEditKeys?: Set<string>, debug?: DebugDelegate): SourceInput;
44
+ connect(sort: Ordering | undefined, filters?: Condition, splitEditKeys?: Set<string>, debug?: DebugDelegate): SourceInput;
45
45
  toSQLiteRow(row: Row): Row;
46
46
  push(change: SourceChange): Stream<'yield'>;
47
47
  genPush(change: SourceChange): Generator<"yield" | undefined, void, unknown>;
@@ -1 +1 @@
1
- {"version":3,"file":"table-source.d.ts","sourceRoot":"","sources":["../../../../zqlite/src/table-source.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAEjD,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,+BAA+B,CAAC;AAK7D,OAAO,KAAK,EAAC,SAAS,EAAE,QAAQ,EAAC,MAAM,gCAAgC,CAAC;AACxE,OAAO,KAAK,EAAC,GAAG,EAAQ,MAAM,iCAAiC,CAAC;AAChE,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,wCAAwC,CAAC;AACvE,OAAO,KAAK,EACV,WAAW,EACX,SAAS,EACV,MAAM,uCAAuC,CAAC;AAC/C,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,yCAAyC,CAAC;AAe3E,OAAO,EACL,KAAK,MAAM,EACX,KAAK,YAAY,EACjB,KAAK,WAAW,EACjB,MAAM,6BAA6B,CAAC;AACrC,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,6BAA6B,CAAC;AACxD,OAAO,KAAK,EAAC,QAAQ,EAAY,MAAM,SAAS,CAAC;AAqBjD;;;;;;;;;;;;;GAaG;AACH,qBAAa,WAAY,YAAW,MAAM;;IAexC;;;;;OAKG;gBAED,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,QAAQ,EACZ,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,EACpC,UAAU,EAAE,UAAU,EACtB,WAAW,gBAAc;IAiB3B,IAAI,WAAW;;;;MAMd;IAED;;;OAGG;IACH,KAAK,CAAC,EAAE,EAAE,QAAQ;IA4FlB,OAAO,CACL,IAAI,EAAE,QAAQ,EACd,OAAO,CAAC,EAAE,SAAS,EACnB,aAAa,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,EAC3B,KAAK,CAAC,EAAE,aAAa;IAuCvB,WAAW,CAAC,GAAG,EAAE,GAAG,GAAG,GAAG;IAqGzB,IAAI,CAAC,MAAM,EAAE,YAAY,GAAG,MAAM,CAAC,OAAO,CAAC;IAQ3C,OAAO,CAAC,MAAM,EAAE,YAAY;IA4F7B;;;;;;OAMG;IACH,MAAM,CAAC,MAAM,EAAE,GAAG,GAAG,GAAG,GAAG,SAAS;CA8BrC;AA6BD,wBAAgB,aAAa,CAC3B,OAAO,EAAE,SAAS,MAAM,EAAE,EAC1B,GAAG,EAAE,GAAG,EACR,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,GACvC,SAAS,OAAO,EAAE,CAEpB;AAED,wBAAgB,gBAAgB,CAAC,IAAI,EAAE,SAAS,wCAa/C;AAED,wBAAgB,eAAe,CAC7B,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,EACvC,GAAG,EAAE,GAAG,EACR,SAAS,EAAE,MAAM,GAChB,GAAG,CAaL;AAwCD,qBAAa,qBAAsB,SAAQ,KAAK;CAAG"}
1
+ {"version":3,"file":"table-source.d.ts","sourceRoot":"","sources":["../../../../zqlite/src/table-source.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,kBAAkB,CAAC;AAEjD,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,+BAA+B,CAAC;AAK7D,OAAO,KAAK,EAAC,SAAS,EAAE,QAAQ,EAAC,MAAM,gCAAgC,CAAC;AACxE,OAAO,KAAK,EAAC,GAAG,EAAQ,MAAM,iCAAiC,CAAC;AAChE,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,wCAAwC,CAAC;AACvE,OAAO,KAAK,EACV,WAAW,EACX,SAAS,EACV,MAAM,uCAAuC,CAAC;AAC/C,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,yCAAyC,CAAC;AAgB3E,OAAO,EACL,KAAK,MAAM,EACX,KAAK,YAAY,EACjB,KAAK,WAAW,EACjB,MAAM,6BAA6B,CAAC;AACrC,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,6BAA6B,CAAC;AACxD,OAAO,KAAK,EAAC,QAAQ,EAAY,MAAM,SAAS,CAAC;AAqBjD;;;;;;;;;;;;;GAaG;AACH,qBAAa,WAAY,YAAW,MAAM;;IAexC;;;;;OAKG;gBAED,UAAU,EAAE,UAAU,EACtB,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,QAAQ,EACZ,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,EACpC,UAAU,EAAE,UAAU,EACtB,WAAW,gBAAc;IAiB3B,IAAI,WAAW;;;;MAMd;IAED;;;OAGG;IACH,KAAK,CAAC,EAAE,EAAE,QAAQ;IA4FlB,OAAO,CACL,IAAI,EAAE,QAAQ,GAAG,SAAS,EAC1B,OAAO,CAAC,EAAE,SAAS,EACnB,aAAa,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,EAC3B,KAAK,CAAC,EAAE,aAAa;IA8CvB,WAAW,CAAC,GAAG,EAAE,GAAG,GAAG,GAAG;IA6HzB,IAAI,CAAC,MAAM,EAAE,YAAY,GAAG,MAAM,CAAC,OAAO,CAAC;IAQ3C,OAAO,CAAC,MAAM,EAAE,YAAY;IA4F7B;;;;;;OAMG;IACH,MAAM,CAAC,MAAM,EAAE,GAAG,GAAG,GAAG,GAAG,SAAS;CA8BrC;AA6BD,wBAAgB,aAAa,CAC3B,OAAO,EAAE,SAAS,MAAM,EAAE,EAC1B,GAAG,EAAE,GAAG,EACR,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,GACvC,SAAS,OAAO,EAAE,CAEpB;AAED,wBAAgB,gBAAgB,CAAC,IAAI,EAAE,SAAS,wCAa/C;AAED,wBAAgB,eAAe,CAC7B,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,EACvC,GAAG,EAAE,GAAG,EACR,SAAS,EAAE,MAAM,GAChB,GAAG,CAaL;AAwCD,qBAAa,qBAAsB,SAAQ,KAAK;CAAG"}
@@ -3,7 +3,7 @@ import { must } from "../../shared/src/must.js";
3
3
  import { makeComparator } from "../../zql/src/ivm/data.js";
4
4
  import { assertOrderingIncludesPK } from "../../zql/src/query/complete-ordering.js";
5
5
  import { createPredicate, transformFilters } from "../../zql/src/builder/filter.js";
6
- import { genPushAndWriteWithSplitEdit, generateWithOverlay, generateWithStart } from "../../zql/src/ivm/memory-source.js";
6
+ import { genPushAndWriteWithSplitEdit, generateWithOverlay, generateWithOverlayUnordered, generateWithStart } from "../../zql/src/ivm/memory-source.js";
7
7
  import { timeSampled } from "../../otel/src/maybe-time.js";
8
8
  import { compile, format, sql } from "./internal/sql.js";
9
9
  import { StatementCache } from "./internal/statement-cache.js";
@@ -86,12 +86,12 @@ var TableSource = class {
86
86
  get #allColumns() {
87
87
  return sql.join(Object.keys(this.#columns).map((c) => sql.ident(c)), sql`,`);
88
88
  }
89
- #getSchema(connection) {
89
+ #getSchema(connection, unordered) {
90
90
  return {
91
91
  tableName: this.#table,
92
92
  columns: this.#columns,
93
93
  primaryKey: this.#primaryKey,
94
- sort: connection.sort,
94
+ sort: unordered ? void 0 : connection.sort,
95
95
  relationships: {},
96
96
  isHidden: false,
97
97
  system: "client",
@@ -100,6 +100,8 @@ var TableSource = class {
100
100
  }
101
101
  connect(sort, filters, splitEditKeys, debug) {
102
102
  const transformedFilters = transformFilters(filters);
103
+ const unordered = sort === void 0;
104
+ const primaryKeySort = this.#primaryKey.map((k) => [k, "asc"]);
103
105
  const input = {
104
106
  getSchema: () => schema,
105
107
  fetch: (req) => this.#fetch(req, connection),
@@ -123,11 +125,11 @@ var TableSource = class {
123
125
  condition: transformedFilters.filters,
124
126
  predicate: createPredicate(transformedFilters.filters)
125
127
  } : void 0,
126
- compareRows: makeComparator(sort),
128
+ compareRows: sort ? makeComparator(sort) : makeComparator(primaryKeySort),
127
129
  lastPushedEpoch: 0
128
130
  };
129
- const schema = this.#getSchema(connection);
130
- assertOrderingIncludesPK(sort, this.#primaryKey);
131
+ const schema = this.#getSchema(connection, unordered);
132
+ if (!unordered) assertOrderingIncludesPK(sort, this.#primaryKey);
131
133
  this.#connections.push(connection);
132
134
  return input;
133
135
  }
@@ -138,13 +140,16 @@ var TableSource = class {
138
140
  const { sort, debug } = connection;
139
141
  const sqlAndBindings = format(this.#requestToSQL(req, connection.filters?.condition, sort));
140
142
  const cachedStatement = this.#stmts.cache.get(sqlAndBindings.text);
143
+ cachedStatement.statement.safeIntegers(true);
144
+ const rowIterator = cachedStatement.statement.iterate(...sqlAndBindings.values);
141
145
  try {
142
- cachedStatement.statement.safeIntegers(true);
143
- const rowIterator = cachedStatement.statement.iterate(...sqlAndBindings.values);
144
- const comparator = makeComparator(sort, req.reverse);
145
146
  debug?.initQuery(this.#table, sqlAndBindings.text);
146
- yield* generateWithStart(generateWithYields(generateWithOverlay(req.start?.row, this.#mapFromSQLiteTypes(this.#columns, rowIterator, sqlAndBindings.text, debug), req.constraint, this.#overlay, connection.lastPushedEpoch, comparator, connection.filters?.predicate), this.#shouldYield), req.start, comparator);
147
+ if (sort) {
148
+ const comparator = makeComparator(sort, req.reverse);
149
+ yield* generateWithStart(generateWithYields(generateWithOverlay(req.start?.row, this.#mapFromSQLiteTypes(this.#columns, rowIterator, sqlAndBindings.text, debug), req.constraint, this.#overlay, connection.lastPushedEpoch, comparator, connection.filters?.predicate), this.#shouldYield), req.start, comparator);
150
+ } else yield* generateWithYields(generateWithOverlayUnordered(this.#mapFromSQLiteTypes(this.#columns, rowIterator, sqlAndBindings.text, debug), req.constraint, this.#overlay, connection.lastPushedEpoch, this.#primaryKey, connection.filters?.predicate), this.#shouldYield);
147
151
  } finally {
152
+ rowIterator.return?.();
148
153
  if (debug) {
149
154
  let totalNvisit = 0;
150
155
  let i = 0;
@@ -1 +1 @@
1
- {"version":3,"file":"table-source.js","names":["#dbCache","#connections","#table","#columns","#uniqueIndexes","#primaryKey","#logConfig","#lc","#shouldYield","#stmts","#getStatementsFor","#allColumns","#fetch","#getSchema","#requestToSQL","#mapFromSQLiteTypes","#overlay","#writeChange","#pushEpoch","#getRowStmtCache","#getRowStmt"],"sources":["../../../../zqlite/src/table-source.ts"],"sourcesContent":["import type {SQLQuery} from '@databases/sql';\nimport type {LogContext} from '@rocicorp/logger';\nimport SQLite3Database from '@rocicorp/zero-sqlite3';\nimport type {LogConfig} from '../../otel/src/log-options.ts';\nimport {timeSampled} from '../../otel/src/maybe-time.ts';\nimport {assert, unreachable} from '../../shared/src/asserts.ts';\nimport {must} from '../../shared/src/must.ts';\nimport type {Writable} from '../../shared/src/writable.ts';\nimport type {Condition, Ordering} from '../../zero-protocol/src/ast.ts';\nimport type {Row, Value} from '../../zero-protocol/src/data.ts';\nimport type {PrimaryKey} from '../../zero-protocol/src/primary-key.ts';\nimport type {\n SchemaValue,\n ValueType,\n} from '../../zero-schema/src/table-schema.ts';\nimport type {DebugDelegate} from '../../zql/src/builder/debug-delegate.ts';\nimport {\n createPredicate,\n transformFilters,\n} from '../../zql/src/builder/filter.ts';\nimport {makeComparator, type Node} from '../../zql/src/ivm/data.ts';\nimport {\n generateWithOverlay,\n generateWithStart,\n genPushAndWriteWithSplitEdit,\n type Connection,\n type Overlay,\n} from '../../zql/src/ivm/memory-source.ts';\nimport {type FetchRequest} from '../../zql/src/ivm/operator.ts';\nimport type {SourceSchema} from '../../zql/src/ivm/schema.ts';\nimport {\n type Source,\n type SourceChange,\n type SourceInput,\n} from '../../zql/src/ivm/source.ts';\nimport type {Stream} from '../../zql/src/ivm/stream.ts';\nimport type {Database, Statement} from './db.ts';\nimport {compile, format, sql} from './internal/sql.ts';\nimport {StatementCache} from './internal/statement-cache.ts';\nimport {\n buildSelectQuery,\n toSQLiteType,\n type NoSubqueryCondition,\n} from './query-builder.ts';\nimport {assertOrderingIncludesPK} from '../../zql/src/query/complete-ordering.ts';\n\ntype Statements = {\n readonly cache: StatementCache;\n readonly insert: Statement;\n readonly delete: Statement;\n readonly update: Statement | undefined;\n readonly checkExists: Statement;\n readonly getExisting: Statement;\n};\n\nlet eventCount = 0;\n\n/**\n * A source that is backed by a SQLite table.\n *\n * Values are written to the backing table _after_ being vended by the source.\n *\n * This ordering of events is to ensure self joins function properly. That is,\n * we can't reveal a value to an output before it has been pushed to that output.\n *\n * The code is fairly straightforward except for:\n * 1. Dealing with a `fetch` that has a basis of `before`.\n * 2. Dealing with compound orders that have differing directions (a ASC, b DESC, c ASC)\n *\n * See comments in relevant functions for more details.\n */\nexport class TableSource implements Source {\n readonly #dbCache = new WeakMap<Database, Statements>();\n readonly #connections: Connection[] = [];\n readonly #table: string;\n readonly #columns: Record<string, SchemaValue>;\n // Maps sorted columns JSON string (e.g. '[\"a\",\"b\"]) to Set of columns.\n readonly #uniqueIndexes: Map<string, Set<string>>;\n readonly #primaryKey: PrimaryKey;\n readonly #logConfig: LogConfig;\n readonly #lc: LogContext;\n readonly #shouldYield: () => boolean;\n #stmts: Statements;\n #overlay?: Overlay | undefined;\n #pushEpoch = 0;\n\n /**\n * @param shouldYield a function called after each row is read from the database,\n * which should return true if the source should yield the special 'yield' value\n * to yield control back to the caller at the end of the pipeline. Can\n * also throw an error to abort the pipeline processing.\n */\n constructor(\n logContext: LogContext,\n logConfig: LogConfig,\n db: Database,\n tableName: string,\n columns: Record<string, SchemaValue>,\n primaryKey: PrimaryKey,\n shouldYield = () => false,\n ) {\n this.#lc = logContext;\n this.#logConfig = logConfig;\n this.#table = tableName;\n this.#columns = columns;\n this.#uniqueIndexes = getUniqueIndexes(db, tableName);\n this.#primaryKey = primaryKey;\n this.#stmts = this.#getStatementsFor(db);\n this.#shouldYield = shouldYield;\n\n assert(\n this.#uniqueIndexes.has(JSON.stringify([...primaryKey].sort())),\n `primary key ${primaryKey} does not have a UNIQUE index`,\n );\n }\n\n get tableSchema() {\n return {\n name: this.#table,\n columns: this.#columns,\n primaryKey: this.#primaryKey,\n };\n }\n\n /**\n * Sets the db (snapshot) to use, to facilitate the Snapshotter leapfrog\n * algorithm for concurrent traversal of historic timelines.\n */\n setDB(db: Database) {\n this.#stmts = this.#getStatementsFor(db);\n }\n\n #getStatementsFor(db: Database) {\n const cached = this.#dbCache.get(db);\n if (cached) {\n return cached;\n }\n\n const stmts = {\n cache: new StatementCache(db),\n insert: db.prepare(\n compile(\n sql`INSERT INTO ${sql.ident(this.#table)} (${sql.join(\n Object.keys(this.#columns).map(c => sql.ident(c)),\n ', ',\n )}) VALUES (${sql.__dangerous__rawValue(\n Array.from({length: Object.keys(this.#columns).length})\n .fill('?')\n .join(','),\n )})`,\n ),\n ),\n delete: db.prepare(\n compile(\n sql`DELETE FROM ${sql.ident(this.#table)} WHERE ${sql.join(\n this.#primaryKey.map(k => sql`${sql.ident(k)}=?`),\n ' AND ',\n )}`,\n ),\n ),\n // If all the columns are part of the primary key, we cannot use UPDATE.\n update:\n Object.keys(this.#columns).length > this.#primaryKey.length\n ? db.prepare(\n compile(\n sql`UPDATE ${sql.ident(this.#table)} SET ${sql.join(\n nonPrimaryKeys(this.#columns, this.#primaryKey).map(\n c => sql`${sql.ident(c)}=?`,\n ),\n ',',\n )} WHERE ${sql.join(\n this.#primaryKey.map(k => sql`${sql.ident(k)}=?`),\n ' AND ',\n )}`,\n ),\n )\n : undefined,\n checkExists: db.prepare(\n compile(\n sql`SELECT 1 AS \"exists\" FROM ${sql.ident(\n this.#table,\n )} WHERE ${sql.join(\n this.#primaryKey.map(k => sql`${sql.ident(k)}=?`),\n ' AND ',\n )} LIMIT 1`,\n ),\n ),\n getExisting: db.prepare(\n compile(\n sql`SELECT * FROM ${sql.ident(this.#table)} WHERE ${sql.join(\n this.#primaryKey.map(k => sql`${sql.ident(k)}=?`),\n ' AND ',\n )}`,\n ),\n ),\n };\n this.#dbCache.set(db, stmts);\n return stmts;\n }\n\n get #allColumns() {\n return sql.join(\n Object.keys(this.#columns).map(c => sql.ident(c)),\n sql`,`,\n );\n }\n\n #getSchema(connection: Connection): SourceSchema {\n return {\n tableName: this.#table,\n columns: this.#columns,\n primaryKey: this.#primaryKey,\n sort: connection.sort,\n relationships: {},\n isHidden: false,\n system: 'client',\n compareRows: connection.compareRows,\n };\n }\n\n connect(\n sort: Ordering,\n filters?: Condition,\n splitEditKeys?: Set<string>,\n debug?: DebugDelegate,\n ) {\n const transformedFilters = transformFilters(filters);\n const input: SourceInput = {\n getSchema: () => schema,\n fetch: req => this.#fetch(req, connection),\n setOutput: output => {\n connection.output = output;\n },\n destroy: () => {\n const idx = this.#connections.indexOf(connection);\n assert(idx !== -1, 'Connection not found');\n this.#connections.splice(idx, 1);\n },\n fullyAppliedFilters: !transformedFilters.conditionsRemoved,\n };\n\n const connection: Connection = {\n input,\n debug,\n output: undefined,\n sort,\n splitEditKeys,\n filters: transformedFilters.filters\n ? {\n condition: transformedFilters.filters,\n predicate: createPredicate(transformedFilters.filters),\n }\n : undefined,\n compareRows: makeComparator(sort),\n lastPushedEpoch: 0,\n };\n const schema = this.#getSchema(connection);\n assertOrderingIncludesPK(sort, this.#primaryKey);\n\n this.#connections.push(connection);\n return input;\n }\n\n toSQLiteRow(row: Row): Row {\n return Object.fromEntries(\n Object.entries(row).map(([key, value]) => [\n key,\n toSQLiteType(value, this.#columns[key].type),\n ]),\n ) as Row;\n }\n\n *#fetch(req: FetchRequest, connection: Connection): Stream<Node | 'yield'> {\n const {sort, debug} = connection;\n\n const query = this.#requestToSQL(req, connection.filters?.condition, sort);\n const sqlAndBindings = format(query);\n\n const cachedStatement = this.#stmts.cache.get(sqlAndBindings.text);\n try {\n cachedStatement.statement.safeIntegers(true);\n const rowIterator = cachedStatement.statement.iterate<Row>(\n ...sqlAndBindings.values,\n );\n\n const comparator = makeComparator(sort, req.reverse);\n\n debug?.initQuery(this.#table, sqlAndBindings.text);\n\n yield* generateWithStart(\n generateWithYields(\n generateWithOverlay(\n req.start?.row,\n this.#mapFromSQLiteTypes(\n this.#columns,\n rowIterator,\n sqlAndBindings.text,\n debug,\n ),\n req.constraint,\n this.#overlay,\n connection.lastPushedEpoch,\n comparator,\n connection.filters?.predicate,\n ),\n this.#shouldYield,\n ),\n req.start,\n comparator,\n );\n } finally {\n if (debug) {\n let totalNvisit = 0;\n let i = 0;\n while (true) {\n const nvisit = cachedStatement.statement.scanStatus(\n i++,\n SQLite3Database.SQLITE_SCANSTAT_NVISIT,\n 1,\n );\n if (nvisit === undefined) {\n break;\n }\n totalNvisit += Number(nvisit);\n }\n if (totalNvisit !== 0) {\n debug.recordNVisit(this.#table, sqlAndBindings.text, totalNvisit);\n }\n cachedStatement.statement.scanStatusReset();\n }\n this.#stmts.cache.return(cachedStatement);\n }\n }\n\n *#mapFromSQLiteTypes(\n valueTypes: Record<string, SchemaValue>,\n rowIterator: IterableIterator<Row>,\n query: string,\n debug: DebugDelegate | undefined,\n ): IterableIterator<Row> {\n let result;\n try {\n do {\n result = timeSampled(\n this.#lc,\n ++eventCount,\n this.#logConfig.ivmSampling,\n () => rowIterator.next(),\n this.#logConfig.slowRowThreshold,\n () =>\n `table-source.next took too long for ${query}. Are you missing an index?`,\n );\n if (result.done) {\n break;\n }\n const row = fromSQLiteTypes(valueTypes, result.value, this.#table);\n debug?.rowVended(this.#table, query, row);\n yield row;\n } while (!result.done);\n } finally {\n rowIterator.return?.();\n }\n }\n\n *push(change: SourceChange): Stream<'yield'> {\n for (const result of this.genPush(change)) {\n if (result === 'yield') {\n yield result;\n }\n }\n }\n\n *genPush(change: SourceChange) {\n const exists = (row: Row) =>\n this.#stmts.checkExists.get<{exists: number} | undefined>(\n ...toSQLiteTypes(this.#primaryKey, row, this.#columns),\n )?.exists === 1;\n const setOverlay = (o: Overlay | undefined) => (this.#overlay = o);\n const writeChange = (c: SourceChange) => this.#writeChange(c);\n\n yield* genPushAndWriteWithSplitEdit(\n this.#connections,\n change,\n exists,\n setOverlay,\n writeChange,\n () => ++this.#pushEpoch,\n );\n }\n\n #writeChange(change: SourceChange) {\n switch (change.type) {\n case 'add':\n this.#stmts.insert.run(\n ...toSQLiteTypes(\n Object.keys(this.#columns),\n change.row,\n this.#columns,\n ),\n );\n break;\n case 'remove':\n this.#stmts.delete.run(\n ...toSQLiteTypes(this.#primaryKey, change.row, this.#columns),\n );\n break;\n case 'edit': {\n // If the PK is the same, use UPDATE.\n if (\n canUseUpdate(\n change.oldRow,\n change.row,\n this.#columns,\n this.#primaryKey,\n )\n ) {\n const mergedRow = {\n ...change.oldRow,\n ...change.row,\n };\n const params = [\n ...nonPrimaryValues(this.#columns, this.#primaryKey, mergedRow),\n ...toSQLiteTypes(this.#primaryKey, mergedRow, this.#columns),\n ];\n must(this.#stmts.update).run(params);\n } else {\n this.#stmts.delete.run(\n ...toSQLiteTypes(this.#primaryKey, change.oldRow, this.#columns),\n );\n this.#stmts.insert.run(\n ...toSQLiteTypes(\n Object.keys(this.#columns),\n change.row,\n this.#columns,\n ),\n );\n }\n\n break;\n }\n default:\n unreachable(change);\n }\n }\n\n #getRowStmtCache = new Map<string, string>();\n\n #getRowStmt(keyCols: string[]): string {\n const keyString = JSON.stringify(keyCols);\n let stmt = this.#getRowStmtCache.get(keyString);\n if (!stmt) {\n stmt = compile(\n sql`SELECT ${this.#allColumns} FROM ${sql.ident(\n this.#table,\n )} WHERE ${sql.join(\n keyCols.map(k => sql`${sql.ident(k)}=?`),\n sql` AND`,\n )}`,\n );\n this.#getRowStmtCache.set(keyString, stmt);\n }\n return stmt;\n }\n\n /**\n * Retrieves a row from the backing DB by a unique key, or `undefined` if such a\n * row does not exist. This is not used in the IVM pipeline but is useful\n * for retrieving data that is consistent with the state (and type\n * semantics) of the pipeline. Note that this key may not necessarily correspond\n * to the `primaryKey` with which this TableSource.\n */\n getRow(rowKey: Row): Row | undefined {\n const keyCols = Object.keys(rowKey);\n\n const stmt = this.#getRowStmt(keyCols);\n const row = this.#stmts.cache.use(stmt, cached =>\n cached.statement\n .safeIntegers(true)\n .get<Row>(...toSQLiteTypes(keyCols, rowKey, this.#columns)),\n );\n if (row) {\n return fromSQLiteTypes(this.#columns, row, this.#table);\n }\n return row;\n }\n\n #requestToSQL(\n request: FetchRequest,\n filters: NoSubqueryCondition | undefined,\n order: Ordering,\n ): SQLQuery {\n return buildSelectQuery(\n this.#table,\n this.#columns,\n request.constraint,\n filters,\n order,\n request.reverse,\n request.start,\n );\n }\n}\n\nfunction getUniqueIndexes(\n db: Database,\n tableName: string,\n): Map<string, Set<string>> {\n const sqlAndBindings = format(\n sql`\n SELECT idx.name, json_group_array(col.name) as columnsJSON\n FROM sqlite_master as idx\n JOIN pragma_index_list(idx.tbl_name) AS info ON info.name = idx.name\n JOIN pragma_index_info(idx.name) as col\n WHERE idx.tbl_name = ${tableName} AND\n idx.type = 'index' AND \n info.\"unique\" != 0\n GROUP BY idx.name\n ORDER BY idx.name`,\n );\n const stmt = db.prepare(sqlAndBindings.text);\n const indexes = stmt.all<{columnsJSON: string}>(...sqlAndBindings.values);\n return new Map(\n indexes.map(({columnsJSON}) => {\n const columns = JSON.parse(columnsJSON);\n const set = new Set<string>(columns);\n return [JSON.stringify(columns.sort()), set];\n }),\n );\n}\n\nexport function toSQLiteTypes(\n columns: readonly string[],\n row: Row,\n columnTypes: Record<string, SchemaValue>,\n): readonly unknown[] {\n return columns.map(col => toSQLiteType(row[col], columnTypes[col].type));\n}\n\nexport function toSQLiteTypeName(type: ValueType) {\n switch (type) {\n case 'boolean':\n return 'INTEGER';\n case 'number':\n return 'REAL';\n case 'string':\n return 'TEXT';\n case 'null':\n return 'NULL';\n case 'json':\n return 'TEXT';\n }\n}\n\nexport function fromSQLiteTypes(\n valueTypes: Record<string, SchemaValue>,\n row: Row,\n tableName: string,\n): Row {\n const newRow: Writable<Row> = {};\n for (const key of Object.keys(row)) {\n const valueType = valueTypes[key];\n if (valueType === undefined) {\n const columnList = Object.keys(valueTypes).sort().join(', ');\n throw new Error(\n `Invalid column \"${key}\" for table \"${tableName}\". Synced columns include ${columnList}`,\n );\n }\n newRow[key] = fromSQLiteType(valueType.type, row[key], key, tableName);\n }\n return newRow;\n}\n\nfunction fromSQLiteType(\n valueType: ValueType,\n v: Value,\n column: string,\n tableName: string,\n): Value {\n if (v === null) {\n return null;\n }\n switch (valueType) {\n case 'boolean':\n return !!v;\n case 'number':\n case 'string':\n case 'null':\n if (typeof v === 'bigint') {\n if (v > Number.MAX_SAFE_INTEGER || v < Number.MIN_SAFE_INTEGER) {\n throw new UnsupportedValueError(\n `value ${v} (in ${tableName}.${column}) is outside of supported bounds`,\n );\n }\n return Number(v);\n }\n return v;\n case 'json':\n try {\n return JSON.parse(v as string);\n } catch (error) {\n const errorMessage =\n error instanceof Error ? error.message : String(error);\n throw new UnsupportedValueError(\n `Failed to parse JSON for ${tableName}.${column}: ${errorMessage}`,\n {cause: error},\n );\n }\n }\n}\n\nexport class UnsupportedValueError extends Error {}\n\nfunction canUseUpdate(\n oldRow: Row,\n row: Row,\n columns: Record<string, SchemaValue>,\n primaryKey: PrimaryKey,\n): boolean {\n for (const pk of primaryKey) {\n if (oldRow[pk] !== row[pk]) {\n return false;\n }\n }\n return Object.keys(columns).length > primaryKey.length;\n}\n\nfunction nonPrimaryValues(\n columns: Record<string, SchemaValue>,\n primaryKey: PrimaryKey,\n row: Row,\n): Iterable<unknown> {\n return nonPrimaryKeys(columns, primaryKey).map(c =>\n toSQLiteType(row[c], columns[c].type),\n );\n}\n\nfunction nonPrimaryKeys(\n columns: Record<string, SchemaValue>,\n primaryKey: PrimaryKey,\n) {\n return Object.keys(columns).filter(c => !primaryKey.includes(c));\n}\n\nfunction* generateWithYields(stream: Stream<Node>, shouldYield: () => boolean) {\n for (const n of stream) {\n if (shouldYield()) {\n yield 'yield';\n }\n yield n;\n }\n}\n"],"mappings":";;;;;;;;;;;;AAuDA,IAAI,aAAa;;;;;;;;;;;;;;;AAgBjB,IAAa,cAAb,MAA2C;CACzC,2BAAoB,IAAI,SAA+B;CACvD,eAAsC,EAAE;CACxC;CACA;CAEA;CACA;CACA;CACA;CACA;CACA;CACA;CACA,aAAa;;;;;;;CAQb,YACE,YACA,WACA,IACA,WACA,SACA,YACA,oBAAoB,OACpB;AACA,QAAA,KAAW;AACX,QAAA,YAAkB;AAClB,QAAA,QAAc;AACd,QAAA,UAAgB;AAChB,QAAA,gBAAsB,iBAAiB,IAAI,UAAU;AACrD,QAAA,aAAmB;AACnB,QAAA,QAAc,MAAA,iBAAuB,GAAG;AACxC,QAAA,cAAoB;AAEpB,SACE,MAAA,cAAoB,IAAI,KAAK,UAAU,CAAC,GAAG,WAAW,CAAC,MAAM,CAAC,CAAC,EAC/D,eAAe,WAAW,+BAC3B;;CAGH,IAAI,cAAc;AAChB,SAAO;GACL,MAAM,MAAA;GACN,SAAS,MAAA;GACT,YAAY,MAAA;GACb;;;;;;CAOH,MAAM,IAAc;AAClB,QAAA,QAAc,MAAA,iBAAuB,GAAG;;CAG1C,kBAAkB,IAAc;EAC9B,MAAM,SAAS,MAAA,QAAc,IAAI,GAAG;AACpC,MAAI,OACF,QAAO;EAGT,MAAM,QAAQ;GACZ,OAAO,IAAI,eAAe,GAAG;GAC7B,QAAQ,GAAG,QACT,QACE,GAAG,eAAe,IAAI,MAAM,MAAA,MAAY,CAAC,IAAI,IAAI,KAC/C,OAAO,KAAK,MAAA,QAAc,CAAC,KAAI,MAAK,IAAI,MAAM,EAAE,CAAC,EACjD,KACD,CAAC,YAAY,IAAI,sBAChB,MAAM,KAAK,EAAC,QAAQ,OAAO,KAAK,MAAA,QAAc,CAAC,QAAO,CAAC,CACpD,KAAK,IAAI,CACT,KAAK,IAAI,CACb,CAAC,GACH,CACF;GACD,QAAQ,GAAG,QACT,QACE,GAAG,eAAe,IAAI,MAAM,MAAA,MAAY,CAAC,SAAS,IAAI,KACpD,MAAA,WAAiB,KAAI,MAAK,GAAG,GAAG,IAAI,MAAM,EAAE,CAAC,IAAI,EACjD,QACD,GACF,CACF;GAED,QACE,OAAO,KAAK,MAAA,QAAc,CAAC,SAAS,MAAA,WAAiB,SACjD,GAAG,QACD,QACE,GAAG,UAAU,IAAI,MAAM,MAAA,MAAY,CAAC,OAAO,IAAI,KAC7C,eAAe,MAAA,SAAe,MAAA,WAAiB,CAAC,KAC9C,MAAK,GAAG,GAAG,IAAI,MAAM,EAAE,CAAC,IACzB,EACD,IACD,CAAC,SAAS,IAAI,KACb,MAAA,WAAiB,KAAI,MAAK,GAAG,GAAG,IAAI,MAAM,EAAE,CAAC,IAAI,EACjD,QACD,GACF,CACF,GACD,KAAA;GACN,aAAa,GAAG,QACd,QACE,GAAG,6BAA6B,IAAI,MAClC,MAAA,MACD,CAAC,SAAS,IAAI,KACb,MAAA,WAAiB,KAAI,MAAK,GAAG,GAAG,IAAI,MAAM,EAAE,CAAC,IAAI,EACjD,QACD,CAAC,UACH,CACF;GACD,aAAa,GAAG,QACd,QACE,GAAG,iBAAiB,IAAI,MAAM,MAAA,MAAY,CAAC,SAAS,IAAI,KACtD,MAAA,WAAiB,KAAI,MAAK,GAAG,GAAG,IAAI,MAAM,EAAE,CAAC,IAAI,EACjD,QACD,GACF,CACF;GACF;AACD,QAAA,QAAc,IAAI,IAAI,MAAM;AAC5B,SAAO;;CAGT,KAAA,aAAkB;AAChB,SAAO,IAAI,KACT,OAAO,KAAK,MAAA,QAAc,CAAC,KAAI,MAAK,IAAI,MAAM,EAAE,CAAC,EACjD,GAAG,IACJ;;CAGH,WAAW,YAAsC;AAC/C,SAAO;GACL,WAAW,MAAA;GACX,SAAS,MAAA;GACT,YAAY,MAAA;GACZ,MAAM,WAAW;GACjB,eAAe,EAAE;GACjB,UAAU;GACV,QAAQ;GACR,aAAa,WAAW;GACzB;;CAGH,QACE,MACA,SACA,eACA,OACA;EACA,MAAM,qBAAqB,iBAAiB,QAAQ;EACpD,MAAM,QAAqB;GACzB,iBAAiB;GACjB,QAAO,QAAO,MAAA,MAAY,KAAK,WAAW;GAC1C,YAAW,WAAU;AACnB,eAAW,SAAS;;GAEtB,eAAe;IACb,MAAM,MAAM,MAAA,YAAkB,QAAQ,WAAW;AACjD,WAAO,QAAQ,IAAI,uBAAuB;AAC1C,UAAA,YAAkB,OAAO,KAAK,EAAE;;GAElC,qBAAqB,CAAC,mBAAmB;GAC1C;EAED,MAAM,aAAyB;GAC7B;GACA;GACA,QAAQ,KAAA;GACR;GACA;GACA,SAAS,mBAAmB,UACxB;IACE,WAAW,mBAAmB;IAC9B,WAAW,gBAAgB,mBAAmB,QAAQ;IACvD,GACD,KAAA;GACJ,aAAa,eAAe,KAAK;GACjC,iBAAiB;GAClB;EACD,MAAM,SAAS,MAAA,UAAgB,WAAW;AAC1C,2BAAyB,MAAM,MAAA,WAAiB;AAEhD,QAAA,YAAkB,KAAK,WAAW;AAClC,SAAO;;CAGT,YAAY,KAAe;AACzB,SAAO,OAAO,YACZ,OAAO,QAAQ,IAAI,CAAC,KAAK,CAAC,KAAK,WAAW,CACxC,KACA,aAAa,OAAO,MAAA,QAAc,KAAK,KAAK,CAC7C,CAAC,CACH;;CAGH,EAAA,MAAQ,KAAmB,YAAgD;EACzE,MAAM,EAAC,MAAM,UAAS;EAGtB,MAAM,iBAAiB,OADT,MAAA,aAAmB,KAAK,WAAW,SAAS,WAAW,KAAK,CACtC;EAEpC,MAAM,kBAAkB,MAAA,MAAY,MAAM,IAAI,eAAe,KAAK;AAClE,MAAI;AACF,mBAAgB,UAAU,aAAa,KAAK;GAC5C,MAAM,cAAc,gBAAgB,UAAU,QAC5C,GAAG,eAAe,OACnB;GAED,MAAM,aAAa,eAAe,MAAM,IAAI,QAAQ;AAEpD,UAAO,UAAU,MAAA,OAAa,eAAe,KAAK;AAElD,UAAO,kBACL,mBACE,oBACE,IAAI,OAAO,KACX,MAAA,mBACE,MAAA,SACA,aACA,eAAe,MACf,MACD,EACD,IAAI,YACJ,MAAA,SACA,WAAW,iBACX,YACA,WAAW,SAAS,UACrB,EACD,MAAA,YACD,EACD,IAAI,OACJ,WACD;YACO;AACR,OAAI,OAAO;IACT,IAAI,cAAc;IAClB,IAAI,IAAI;AACR,WAAO,MAAM;KACX,MAAM,SAAS,gBAAgB,UAAU,WACvC,KACA,gBAAgB,wBAChB,EACD;AACD,SAAI,WAAW,KAAA,EACb;AAEF,oBAAe,OAAO,OAAO;;AAE/B,QAAI,gBAAgB,EAClB,OAAM,aAAa,MAAA,OAAa,eAAe,MAAM,YAAY;AAEnE,oBAAgB,UAAU,iBAAiB;;AAE7C,SAAA,MAAY,MAAM,OAAO,gBAAgB;;;CAI7C,EAAA,mBACE,YACA,aACA,OACA,OACuB;EACvB,IAAI;AACJ,MAAI;AACF,MAAG;AACD,aAAS,YACP,MAAA,IACA,EAAE,YACF,MAAA,UAAgB,mBACV,YAAY,MAAM,EACxB,MAAA,UAAgB,wBAEd,uCAAuC,MAAM,6BAChD;AACD,QAAI,OAAO,KACT;IAEF,MAAM,MAAM,gBAAgB,YAAY,OAAO,OAAO,MAAA,MAAY;AAClE,WAAO,UAAU,MAAA,OAAa,OAAO,IAAI;AACzC,UAAM;YACC,CAAC,OAAO;YACT;AACR,eAAY,UAAU;;;CAI1B,CAAC,KAAK,QAAuC;AAC3C,OAAK,MAAM,UAAU,KAAK,QAAQ,OAAO,CACvC,KAAI,WAAW,QACb,OAAM;;CAKZ,CAAC,QAAQ,QAAsB;EAC7B,MAAM,UAAU,QACd,MAAA,MAAY,YAAY,IACtB,GAAG,cAAc,MAAA,YAAkB,KAAK,MAAA,QAAc,CACvD,EAAE,WAAW;EAChB,MAAM,cAAc,MAA4B,MAAA,UAAgB;EAChE,MAAM,eAAe,MAAoB,MAAA,YAAkB,EAAE;AAE7D,SAAO,6BACL,MAAA,aACA,QACA,QACA,YACA,mBACM,EAAE,MAAA,UACT;;CAGH,aAAa,QAAsB;AACjC,UAAQ,OAAO,MAAf;GACE,KAAK;AACH,UAAA,MAAY,OAAO,IACjB,GAAG,cACD,OAAO,KAAK,MAAA,QAAc,EAC1B,OAAO,KACP,MAAA,QACD,CACF;AACD;GACF,KAAK;AACH,UAAA,MAAY,OAAO,IACjB,GAAG,cAAc,MAAA,YAAkB,OAAO,KAAK,MAAA,QAAc,CAC9D;AACD;GACF,KAAK;AAEH,QACE,aACE,OAAO,QACP,OAAO,KACP,MAAA,SACA,MAAA,WACD,EACD;KACA,MAAM,YAAY;MAChB,GAAG,OAAO;MACV,GAAG,OAAO;MACX;KACD,MAAM,SAAS,CACb,GAAG,iBAAiB,MAAA,SAAe,MAAA,YAAkB,UAAU,EAC/D,GAAG,cAAc,MAAA,YAAkB,WAAW,MAAA,QAAc,CAC7D;AACD,UAAK,MAAA,MAAY,OAAO,CAAC,IAAI,OAAO;WAC/B;AACL,WAAA,MAAY,OAAO,IACjB,GAAG,cAAc,MAAA,YAAkB,OAAO,QAAQ,MAAA,QAAc,CACjE;AACD,WAAA,MAAY,OAAO,IACjB,GAAG,cACD,OAAO,KAAK,MAAA,QAAc,EAC1B,OAAO,KACP,MAAA,QACD,CACF;;AAGH;GAEF,QACE,aAAY,OAAO;;;CAIzB,mCAAmB,IAAI,KAAqB;CAE5C,YAAY,SAA2B;EACrC,MAAM,YAAY,KAAK,UAAU,QAAQ;EACzC,IAAI,OAAO,MAAA,gBAAsB,IAAI,UAAU;AAC/C,MAAI,CAAC,MAAM;AACT,UAAO,QACL,GAAG,UAAU,MAAA,WAAiB,QAAQ,IAAI,MACxC,MAAA,MACD,CAAC,SAAS,IAAI,KACb,QAAQ,KAAI,MAAK,GAAG,GAAG,IAAI,MAAM,EAAE,CAAC,IAAI,EACxC,GAAG,OACJ,GACF;AACD,SAAA,gBAAsB,IAAI,WAAW,KAAK;;AAE5C,SAAO;;;;;;;;;CAUT,OAAO,QAA8B;EACnC,MAAM,UAAU,OAAO,KAAK,OAAO;EAEnC,MAAM,OAAO,MAAA,WAAiB,QAAQ;EACtC,MAAM,MAAM,MAAA,MAAY,MAAM,IAAI,OAAM,WACtC,OAAO,UACJ,aAAa,KAAK,CAClB,IAAS,GAAG,cAAc,SAAS,QAAQ,MAAA,QAAc,CAAC,CAC9D;AACD,MAAI,IACF,QAAO,gBAAgB,MAAA,SAAe,KAAK,MAAA,MAAY;AAEzD,SAAO;;CAGT,cACE,SACA,SACA,OACU;AACV,SAAO,iBACL,MAAA,OACA,MAAA,SACA,QAAQ,YACR,SACA,OACA,QAAQ,SACR,QAAQ,MACT;;;AAIL,SAAS,iBACP,IACA,WAC0B;CAC1B,MAAM,iBAAiB,OACrB,GAAG;;;;;6BAKsB,UAAU;;;;yBAKpC;CAED,MAAM,UADO,GAAG,QAAQ,eAAe,KAAK,CACvB,IAA2B,GAAG,eAAe,OAAO;AACzE,QAAO,IAAI,IACT,QAAQ,KAAK,EAAC,kBAAiB;EAC7B,MAAM,UAAU,KAAK,MAAM,YAAY;EACvC,MAAM,MAAM,IAAI,IAAY,QAAQ;AACpC,SAAO,CAAC,KAAK,UAAU,QAAQ,MAAM,CAAC,EAAE,IAAI;GAC5C,CACH;;AAGH,SAAgB,cACd,SACA,KACA,aACoB;AACpB,QAAO,QAAQ,KAAI,QAAO,aAAa,IAAI,MAAM,YAAY,KAAK,KAAK,CAAC;;AAkB1E,SAAgB,gBACd,YACA,KACA,WACK;CACL,MAAM,SAAwB,EAAE;AAChC,MAAK,MAAM,OAAO,OAAO,KAAK,IAAI,EAAE;EAClC,MAAM,YAAY,WAAW;AAC7B,MAAI,cAAc,KAAA,GAAW;GAC3B,MAAM,aAAa,OAAO,KAAK,WAAW,CAAC,MAAM,CAAC,KAAK,KAAK;AAC5D,SAAM,IAAI,MACR,mBAAmB,IAAI,eAAe,UAAU,4BAA4B,aAC7E;;AAEH,SAAO,OAAO,eAAe,UAAU,MAAM,IAAI,MAAM,KAAK,UAAU;;AAExE,QAAO;;AAGT,SAAS,eACP,WACA,GACA,QACA,WACO;AACP,KAAI,MAAM,KACR,QAAO;AAET,SAAQ,WAAR;EACE,KAAK,UACH,QAAO,CAAC,CAAC;EACX,KAAK;EACL,KAAK;EACL,KAAK;AACH,OAAI,OAAO,MAAM,UAAU;AACzB,QAAI,IAAI,OAAO,oBAAoB,IAAI,OAAO,iBAC5C,OAAM,IAAI,sBACR,SAAS,EAAE,OAAO,UAAU,GAAG,OAAO,kCACvC;AAEH,WAAO,OAAO,EAAE;;AAElB,UAAO;EACT,KAAK,OACH,KAAI;AACF,UAAO,KAAK,MAAM,EAAY;WACvB,OAAO;AAGd,SAAM,IAAI,sBACR,4BAA4B,UAAU,GAAG,OAAO,IAFhD,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,IAGtD,EAAC,OAAO,OAAM,CACf;;;;AAKT,IAAa,wBAAb,cAA2C,MAAM;AAEjD,SAAS,aACP,QACA,KACA,SACA,YACS;AACT,MAAK,MAAM,MAAM,WACf,KAAI,OAAO,QAAQ,IAAI,IACrB,QAAO;AAGX,QAAO,OAAO,KAAK,QAAQ,CAAC,SAAS,WAAW;;AAGlD,SAAS,iBACP,SACA,YACA,KACmB;AACnB,QAAO,eAAe,SAAS,WAAW,CAAC,KAAI,MAC7C,aAAa,IAAI,IAAI,QAAQ,GAAG,KAAK,CACtC;;AAGH,SAAS,eACP,SACA,YACA;AACA,QAAO,OAAO,KAAK,QAAQ,CAAC,QAAO,MAAK,CAAC,WAAW,SAAS,EAAE,CAAC;;AAGlE,UAAU,mBAAmB,QAAsB,aAA4B;AAC7E,MAAK,MAAM,KAAK,QAAQ;AACtB,MAAI,aAAa,CACf,OAAM;AAER,QAAM"}
1
+ {"version":3,"file":"table-source.js","names":["#dbCache","#connections","#table","#columns","#uniqueIndexes","#primaryKey","#logConfig","#lc","#shouldYield","#stmts","#getStatementsFor","#allColumns","#fetch","#getSchema","#requestToSQL","#mapFromSQLiteTypes","#overlay","#writeChange","#pushEpoch","#getRowStmtCache","#getRowStmt"],"sources":["../../../../zqlite/src/table-source.ts"],"sourcesContent":["import type {SQLQuery} from '@databases/sql';\nimport type {LogContext} from '@rocicorp/logger';\nimport SQLite3Database from '@rocicorp/zero-sqlite3';\nimport type {LogConfig} from '../../otel/src/log-options.ts';\nimport {timeSampled} from '../../otel/src/maybe-time.ts';\nimport {assert, unreachable} from '../../shared/src/asserts.ts';\nimport {must} from '../../shared/src/must.ts';\nimport type {Writable} from '../../shared/src/writable.ts';\nimport type {Condition, Ordering} from '../../zero-protocol/src/ast.ts';\nimport type {Row, Value} from '../../zero-protocol/src/data.ts';\nimport type {PrimaryKey} from '../../zero-protocol/src/primary-key.ts';\nimport type {\n SchemaValue,\n ValueType,\n} from '../../zero-schema/src/table-schema.ts';\nimport type {DebugDelegate} from '../../zql/src/builder/debug-delegate.ts';\nimport {\n createPredicate,\n transformFilters,\n} from '../../zql/src/builder/filter.ts';\nimport {makeComparator, type Node} from '../../zql/src/ivm/data.ts';\nimport {\n generateWithOverlay,\n generateWithOverlayUnordered,\n generateWithStart,\n genPushAndWriteWithSplitEdit,\n type Connection,\n type Overlay,\n} from '../../zql/src/ivm/memory-source.ts';\nimport {type FetchRequest} from '../../zql/src/ivm/operator.ts';\nimport type {SourceSchema} from '../../zql/src/ivm/schema.ts';\nimport {\n type Source,\n type SourceChange,\n type SourceInput,\n} from '../../zql/src/ivm/source.ts';\nimport type {Stream} from '../../zql/src/ivm/stream.ts';\nimport type {Database, Statement} from './db.ts';\nimport {compile, format, sql} from './internal/sql.ts';\nimport {StatementCache} from './internal/statement-cache.ts';\nimport {\n buildSelectQuery,\n toSQLiteType,\n type NoSubqueryCondition,\n} from './query-builder.ts';\nimport {assertOrderingIncludesPK} from '../../zql/src/query/complete-ordering.ts';\n\ntype Statements = {\n readonly cache: StatementCache;\n readonly insert: Statement;\n readonly delete: Statement;\n readonly update: Statement | undefined;\n readonly checkExists: Statement;\n readonly getExisting: Statement;\n};\n\nlet eventCount = 0;\n\n/**\n * A source that is backed by a SQLite table.\n *\n * Values are written to the backing table _after_ being vended by the source.\n *\n * This ordering of events is to ensure self joins function properly. That is,\n * we can't reveal a value to an output before it has been pushed to that output.\n *\n * The code is fairly straightforward except for:\n * 1. Dealing with a `fetch` that has a basis of `before`.\n * 2. Dealing with compound orders that have differing directions (a ASC, b DESC, c ASC)\n *\n * See comments in relevant functions for more details.\n */\nexport class TableSource implements Source {\n readonly #dbCache = new WeakMap<Database, Statements>();\n readonly #connections: Connection[] = [];\n readonly #table: string;\n readonly #columns: Record<string, SchemaValue>;\n // Maps sorted columns JSON string (e.g. '[\"a\",\"b\"]) to Set of columns.\n readonly #uniqueIndexes: Map<string, Set<string>>;\n readonly #primaryKey: PrimaryKey;\n readonly #logConfig: LogConfig;\n readonly #lc: LogContext;\n readonly #shouldYield: () => boolean;\n #stmts: Statements;\n #overlay?: Overlay | undefined;\n #pushEpoch = 0;\n\n /**\n * @param shouldYield a function called after each row is read from the database,\n * which should return true if the source should yield the special 'yield' value\n * to yield control back to the caller at the end of the pipeline. Can\n * also throw an error to abort the pipeline processing.\n */\n constructor(\n logContext: LogContext,\n logConfig: LogConfig,\n db: Database,\n tableName: string,\n columns: Record<string, SchemaValue>,\n primaryKey: PrimaryKey,\n shouldYield = () => false,\n ) {\n this.#lc = logContext;\n this.#logConfig = logConfig;\n this.#table = tableName;\n this.#columns = columns;\n this.#uniqueIndexes = getUniqueIndexes(db, tableName);\n this.#primaryKey = primaryKey;\n this.#stmts = this.#getStatementsFor(db);\n this.#shouldYield = shouldYield;\n\n assert(\n this.#uniqueIndexes.has(JSON.stringify([...primaryKey].sort())),\n `primary key ${primaryKey} does not have a UNIQUE index`,\n );\n }\n\n get tableSchema() {\n return {\n name: this.#table,\n columns: this.#columns,\n primaryKey: this.#primaryKey,\n };\n }\n\n /**\n * Sets the db (snapshot) to use, to facilitate the Snapshotter leapfrog\n * algorithm for concurrent traversal of historic timelines.\n */\n setDB(db: Database) {\n this.#stmts = this.#getStatementsFor(db);\n }\n\n #getStatementsFor(db: Database) {\n const cached = this.#dbCache.get(db);\n if (cached) {\n return cached;\n }\n\n const stmts = {\n cache: new StatementCache(db),\n insert: db.prepare(\n compile(\n sql`INSERT INTO ${sql.ident(this.#table)} (${sql.join(\n Object.keys(this.#columns).map(c => sql.ident(c)),\n ', ',\n )}) VALUES (${sql.__dangerous__rawValue(\n Array.from({length: Object.keys(this.#columns).length})\n .fill('?')\n .join(','),\n )})`,\n ),\n ),\n delete: db.prepare(\n compile(\n sql`DELETE FROM ${sql.ident(this.#table)} WHERE ${sql.join(\n this.#primaryKey.map(k => sql`${sql.ident(k)}=?`),\n ' AND ',\n )}`,\n ),\n ),\n // If all the columns are part of the primary key, we cannot use UPDATE.\n update:\n Object.keys(this.#columns).length > this.#primaryKey.length\n ? db.prepare(\n compile(\n sql`UPDATE ${sql.ident(this.#table)} SET ${sql.join(\n nonPrimaryKeys(this.#columns, this.#primaryKey).map(\n c => sql`${sql.ident(c)}=?`,\n ),\n ',',\n )} WHERE ${sql.join(\n this.#primaryKey.map(k => sql`${sql.ident(k)}=?`),\n ' AND ',\n )}`,\n ),\n )\n : undefined,\n checkExists: db.prepare(\n compile(\n sql`SELECT 1 AS \"exists\" FROM ${sql.ident(\n this.#table,\n )} WHERE ${sql.join(\n this.#primaryKey.map(k => sql`${sql.ident(k)}=?`),\n ' AND ',\n )} LIMIT 1`,\n ),\n ),\n getExisting: db.prepare(\n compile(\n sql`SELECT * FROM ${sql.ident(this.#table)} WHERE ${sql.join(\n this.#primaryKey.map(k => sql`${sql.ident(k)}=?`),\n ' AND ',\n )}`,\n ),\n ),\n };\n this.#dbCache.set(db, stmts);\n return stmts;\n }\n\n get #allColumns() {\n return sql.join(\n Object.keys(this.#columns).map(c => sql.ident(c)),\n sql`,`,\n );\n }\n\n #getSchema(connection: Connection, unordered: boolean): SourceSchema {\n return {\n tableName: this.#table,\n columns: this.#columns,\n primaryKey: this.#primaryKey,\n sort: unordered ? undefined : connection.sort,\n relationships: {},\n isHidden: false,\n system: 'client',\n compareRows: connection.compareRows,\n };\n }\n\n connect(\n sort: Ordering | undefined,\n filters?: Condition,\n splitEditKeys?: Set<string>,\n debug?: DebugDelegate,\n ) {\n const transformedFilters = transformFilters(filters);\n const unordered = sort === undefined;\n // PK comparator is used for source-level overlay matching (remove by PK\n // equality) even when no ordering is requested.\n const primaryKeySort: Ordering = this.#primaryKey.map(k => [k, 'asc']);\n\n const input: SourceInput = {\n getSchema: () => schema,\n fetch: req => this.#fetch(req, connection),\n setOutput: output => {\n connection.output = output;\n },\n destroy: () => {\n const idx = this.#connections.indexOf(connection);\n assert(idx !== -1, 'Connection not found');\n this.#connections.splice(idx, 1);\n },\n fullyAppliedFilters: !transformedFilters.conditionsRemoved,\n };\n\n const connection: Connection = {\n input,\n debug,\n output: undefined,\n sort,\n splitEditKeys,\n filters: transformedFilters.filters\n ? {\n condition: transformedFilters.filters,\n predicate: createPredicate(transformedFilters.filters),\n }\n : undefined,\n compareRows: sort ? makeComparator(sort) : makeComparator(primaryKeySort),\n lastPushedEpoch: 0,\n };\n const schema = this.#getSchema(connection, unordered);\n if (!unordered) {\n assertOrderingIncludesPK(sort, this.#primaryKey);\n }\n\n this.#connections.push(connection);\n return input;\n }\n\n toSQLiteRow(row: Row): Row {\n return Object.fromEntries(\n Object.entries(row).map(([key, value]) => [\n key,\n toSQLiteType(value, this.#columns[key].type),\n ]),\n ) as Row;\n }\n\n *#fetch(req: FetchRequest, connection: Connection): Stream<Node | 'yield'> {\n const {sort, debug} = connection;\n\n const query = this.#requestToSQL(req, connection.filters?.condition, sort);\n const sqlAndBindings = format(query);\n\n const cachedStatement = this.#stmts.cache.get(sqlAndBindings.text);\n cachedStatement.statement.safeIntegers(true);\n const rowIterator = cachedStatement.statement.iterate<Row>(\n ...sqlAndBindings.values,\n );\n try {\n debug?.initQuery(this.#table, sqlAndBindings.text);\n\n if (sort) {\n const comparator = makeComparator(sort, req.reverse);\n yield* generateWithStart(\n generateWithYields(\n generateWithOverlay(\n req.start?.row,\n this.#mapFromSQLiteTypes(\n this.#columns,\n rowIterator,\n sqlAndBindings.text,\n debug,\n ),\n req.constraint,\n this.#overlay,\n connection.lastPushedEpoch,\n comparator,\n connection.filters?.predicate,\n ),\n this.#shouldYield,\n ),\n req.start,\n comparator,\n );\n } else {\n yield* generateWithYields(\n generateWithOverlayUnordered(\n this.#mapFromSQLiteTypes(\n this.#columns,\n rowIterator,\n sqlAndBindings.text,\n debug,\n ),\n req.constraint,\n this.#overlay,\n connection.lastPushedEpoch,\n this.#primaryKey,\n connection.filters?.predicate,\n ),\n this.#shouldYield,\n );\n }\n } finally {\n // Ensure the SQLite iterate() is closed. Normally #mapFromSQLiteTypes\n // closes it via its own finally block, but if the generator chain is\n // returned before #mapFromSQLiteTypes was ever started (e.g., the\n // unordered overlay yielded an add row before iterating the source),\n // the SQLite iterator would remain active and lock the connection.\n // Calling return() on an already-closed iterator is a safe no-op.\n rowIterator.return?.();\n if (debug) {\n let totalNvisit = 0;\n let i = 0;\n while (true) {\n const nvisit = cachedStatement.statement.scanStatus(\n i++,\n SQLite3Database.SQLITE_SCANSTAT_NVISIT,\n 1,\n );\n if (nvisit === undefined) {\n break;\n }\n totalNvisit += Number(nvisit);\n }\n if (totalNvisit !== 0) {\n debug.recordNVisit(this.#table, sqlAndBindings.text, totalNvisit);\n }\n cachedStatement.statement.scanStatusReset();\n }\n this.#stmts.cache.return(cachedStatement);\n }\n }\n\n *#mapFromSQLiteTypes(\n valueTypes: Record<string, SchemaValue>,\n rowIterator: IterableIterator<Row>,\n query: string,\n debug: DebugDelegate | undefined,\n ): IterableIterator<Row> {\n let result;\n try {\n do {\n result = timeSampled(\n this.#lc,\n ++eventCount,\n this.#logConfig.ivmSampling,\n () => rowIterator.next(),\n this.#logConfig.slowRowThreshold,\n () =>\n `table-source.next took too long for ${query}. Are you missing an index?`,\n );\n if (result.done) {\n break;\n }\n const row = fromSQLiteTypes(valueTypes, result.value, this.#table);\n debug?.rowVended(this.#table, query, row);\n yield row;\n } while (!result.done);\n } finally {\n rowIterator.return?.();\n }\n }\n\n *push(change: SourceChange): Stream<'yield'> {\n for (const result of this.genPush(change)) {\n if (result === 'yield') {\n yield result;\n }\n }\n }\n\n *genPush(change: SourceChange) {\n const exists = (row: Row) =>\n this.#stmts.checkExists.get<{exists: number} | undefined>(\n ...toSQLiteTypes(this.#primaryKey, row, this.#columns),\n )?.exists === 1;\n const setOverlay = (o: Overlay | undefined) => (this.#overlay = o);\n const writeChange = (c: SourceChange) => this.#writeChange(c);\n\n yield* genPushAndWriteWithSplitEdit(\n this.#connections,\n change,\n exists,\n setOverlay,\n writeChange,\n () => ++this.#pushEpoch,\n );\n }\n\n #writeChange(change: SourceChange) {\n switch (change.type) {\n case 'add':\n this.#stmts.insert.run(\n ...toSQLiteTypes(\n Object.keys(this.#columns),\n change.row,\n this.#columns,\n ),\n );\n break;\n case 'remove':\n this.#stmts.delete.run(\n ...toSQLiteTypes(this.#primaryKey, change.row, this.#columns),\n );\n break;\n case 'edit': {\n // If the PK is the same, use UPDATE.\n if (\n canUseUpdate(\n change.oldRow,\n change.row,\n this.#columns,\n this.#primaryKey,\n )\n ) {\n const mergedRow = {\n ...change.oldRow,\n ...change.row,\n };\n const params = [\n ...nonPrimaryValues(this.#columns, this.#primaryKey, mergedRow),\n ...toSQLiteTypes(this.#primaryKey, mergedRow, this.#columns),\n ];\n must(this.#stmts.update).run(params);\n } else {\n this.#stmts.delete.run(\n ...toSQLiteTypes(this.#primaryKey, change.oldRow, this.#columns),\n );\n this.#stmts.insert.run(\n ...toSQLiteTypes(\n Object.keys(this.#columns),\n change.row,\n this.#columns,\n ),\n );\n }\n\n break;\n }\n default:\n unreachable(change);\n }\n }\n\n #getRowStmtCache = new Map<string, string>();\n\n #getRowStmt(keyCols: string[]): string {\n const keyString = JSON.stringify(keyCols);\n let stmt = this.#getRowStmtCache.get(keyString);\n if (!stmt) {\n stmt = compile(\n sql`SELECT ${this.#allColumns} FROM ${sql.ident(\n this.#table,\n )} WHERE ${sql.join(\n keyCols.map(k => sql`${sql.ident(k)}=?`),\n sql` AND`,\n )}`,\n );\n this.#getRowStmtCache.set(keyString, stmt);\n }\n return stmt;\n }\n\n /**\n * Retrieves a row from the backing DB by a unique key, or `undefined` if such a\n * row does not exist. This is not used in the IVM pipeline but is useful\n * for retrieving data that is consistent with the state (and type\n * semantics) of the pipeline. Note that this key may not necessarily correspond\n * to the `primaryKey` with which this TableSource.\n */\n getRow(rowKey: Row): Row | undefined {\n const keyCols = Object.keys(rowKey);\n\n const stmt = this.#getRowStmt(keyCols);\n const row = this.#stmts.cache.use(stmt, cached =>\n cached.statement\n .safeIntegers(true)\n .get<Row>(...toSQLiteTypes(keyCols, rowKey, this.#columns)),\n );\n if (row) {\n return fromSQLiteTypes(this.#columns, row, this.#table);\n }\n return row;\n }\n\n #requestToSQL(\n request: FetchRequest,\n filters: NoSubqueryCondition | undefined,\n order: Ordering | undefined,\n ): SQLQuery {\n return buildSelectQuery(\n this.#table,\n this.#columns,\n request.constraint,\n filters,\n order,\n request.reverse,\n request.start,\n );\n }\n}\n\nfunction getUniqueIndexes(\n db: Database,\n tableName: string,\n): Map<string, Set<string>> {\n const sqlAndBindings = format(\n sql`\n SELECT idx.name, json_group_array(col.name) as columnsJSON\n FROM sqlite_master as idx\n JOIN pragma_index_list(idx.tbl_name) AS info ON info.name = idx.name\n JOIN pragma_index_info(idx.name) as col\n WHERE idx.tbl_name = ${tableName} AND\n idx.type = 'index' AND \n info.\"unique\" != 0\n GROUP BY idx.name\n ORDER BY idx.name`,\n );\n const stmt = db.prepare(sqlAndBindings.text);\n const indexes = stmt.all<{columnsJSON: string}>(...sqlAndBindings.values);\n return new Map(\n indexes.map(({columnsJSON}) => {\n const columns = JSON.parse(columnsJSON);\n const set = new Set<string>(columns);\n return [JSON.stringify(columns.sort()), set];\n }),\n );\n}\n\nexport function toSQLiteTypes(\n columns: readonly string[],\n row: Row,\n columnTypes: Record<string, SchemaValue>,\n): readonly unknown[] {\n return columns.map(col => toSQLiteType(row[col], columnTypes[col].type));\n}\n\nexport function toSQLiteTypeName(type: ValueType) {\n switch (type) {\n case 'boolean':\n return 'INTEGER';\n case 'number':\n return 'REAL';\n case 'string':\n return 'TEXT';\n case 'null':\n return 'NULL';\n case 'json':\n return 'TEXT';\n }\n}\n\nexport function fromSQLiteTypes(\n valueTypes: Record<string, SchemaValue>,\n row: Row,\n tableName: string,\n): Row {\n const newRow: Writable<Row> = {};\n for (const key of Object.keys(row)) {\n const valueType = valueTypes[key];\n if (valueType === undefined) {\n const columnList = Object.keys(valueTypes).sort().join(', ');\n throw new Error(\n `Invalid column \"${key}\" for table \"${tableName}\". Synced columns include ${columnList}`,\n );\n }\n newRow[key] = fromSQLiteType(valueType.type, row[key], key, tableName);\n }\n return newRow;\n}\n\nfunction fromSQLiteType(\n valueType: ValueType,\n v: Value,\n column: string,\n tableName: string,\n): Value {\n if (v === null) {\n return null;\n }\n switch (valueType) {\n case 'boolean':\n return !!v;\n case 'number':\n case 'string':\n case 'null':\n if (typeof v === 'bigint') {\n if (v > Number.MAX_SAFE_INTEGER || v < Number.MIN_SAFE_INTEGER) {\n throw new UnsupportedValueError(\n `value ${v} (in ${tableName}.${column}) is outside of supported bounds`,\n );\n }\n return Number(v);\n }\n return v;\n case 'json':\n try {\n return JSON.parse(v as string);\n } catch (error) {\n const errorMessage =\n error instanceof Error ? error.message : String(error);\n throw new UnsupportedValueError(\n `Failed to parse JSON for ${tableName}.${column}: ${errorMessage}`,\n {cause: error},\n );\n }\n }\n}\n\nexport class UnsupportedValueError extends Error {}\n\nfunction canUseUpdate(\n oldRow: Row,\n row: Row,\n columns: Record<string, SchemaValue>,\n primaryKey: PrimaryKey,\n): boolean {\n for (const pk of primaryKey) {\n if (oldRow[pk] !== row[pk]) {\n return false;\n }\n }\n return Object.keys(columns).length > primaryKey.length;\n}\n\nfunction nonPrimaryValues(\n columns: Record<string, SchemaValue>,\n primaryKey: PrimaryKey,\n row: Row,\n): Iterable<unknown> {\n return nonPrimaryKeys(columns, primaryKey).map(c =>\n toSQLiteType(row[c], columns[c].type),\n );\n}\n\nfunction nonPrimaryKeys(\n columns: Record<string, SchemaValue>,\n primaryKey: PrimaryKey,\n) {\n return Object.keys(columns).filter(c => !primaryKey.includes(c));\n}\n\nfunction* generateWithYields(stream: Stream<Node>, shouldYield: () => boolean) {\n for (const n of stream) {\n if (shouldYield()) {\n yield 'yield';\n }\n yield n;\n }\n}\n"],"mappings":";;;;;;;;;;;;AAwDA,IAAI,aAAa;;;;;;;;;;;;;;;AAgBjB,IAAa,cAAb,MAA2C;CACzC,2BAAoB,IAAI,SAA+B;CACvD,eAAsC,EAAE;CACxC;CACA;CAEA;CACA;CACA;CACA;CACA;CACA;CACA;CACA,aAAa;;;;;;;CAQb,YACE,YACA,WACA,IACA,WACA,SACA,YACA,oBAAoB,OACpB;AACA,QAAA,KAAW;AACX,QAAA,YAAkB;AAClB,QAAA,QAAc;AACd,QAAA,UAAgB;AAChB,QAAA,gBAAsB,iBAAiB,IAAI,UAAU;AACrD,QAAA,aAAmB;AACnB,QAAA,QAAc,MAAA,iBAAuB,GAAG;AACxC,QAAA,cAAoB;AAEpB,SACE,MAAA,cAAoB,IAAI,KAAK,UAAU,CAAC,GAAG,WAAW,CAAC,MAAM,CAAC,CAAC,EAC/D,eAAe,WAAW,+BAC3B;;CAGH,IAAI,cAAc;AAChB,SAAO;GACL,MAAM,MAAA;GACN,SAAS,MAAA;GACT,YAAY,MAAA;GACb;;;;;;CAOH,MAAM,IAAc;AAClB,QAAA,QAAc,MAAA,iBAAuB,GAAG;;CAG1C,kBAAkB,IAAc;EAC9B,MAAM,SAAS,MAAA,QAAc,IAAI,GAAG;AACpC,MAAI,OACF,QAAO;EAGT,MAAM,QAAQ;GACZ,OAAO,IAAI,eAAe,GAAG;GAC7B,QAAQ,GAAG,QACT,QACE,GAAG,eAAe,IAAI,MAAM,MAAA,MAAY,CAAC,IAAI,IAAI,KAC/C,OAAO,KAAK,MAAA,QAAc,CAAC,KAAI,MAAK,IAAI,MAAM,EAAE,CAAC,EACjD,KACD,CAAC,YAAY,IAAI,sBAChB,MAAM,KAAK,EAAC,QAAQ,OAAO,KAAK,MAAA,QAAc,CAAC,QAAO,CAAC,CACpD,KAAK,IAAI,CACT,KAAK,IAAI,CACb,CAAC,GACH,CACF;GACD,QAAQ,GAAG,QACT,QACE,GAAG,eAAe,IAAI,MAAM,MAAA,MAAY,CAAC,SAAS,IAAI,KACpD,MAAA,WAAiB,KAAI,MAAK,GAAG,GAAG,IAAI,MAAM,EAAE,CAAC,IAAI,EACjD,QACD,GACF,CACF;GAED,QACE,OAAO,KAAK,MAAA,QAAc,CAAC,SAAS,MAAA,WAAiB,SACjD,GAAG,QACD,QACE,GAAG,UAAU,IAAI,MAAM,MAAA,MAAY,CAAC,OAAO,IAAI,KAC7C,eAAe,MAAA,SAAe,MAAA,WAAiB,CAAC,KAC9C,MAAK,GAAG,GAAG,IAAI,MAAM,EAAE,CAAC,IACzB,EACD,IACD,CAAC,SAAS,IAAI,KACb,MAAA,WAAiB,KAAI,MAAK,GAAG,GAAG,IAAI,MAAM,EAAE,CAAC,IAAI,EACjD,QACD,GACF,CACF,GACD,KAAA;GACN,aAAa,GAAG,QACd,QACE,GAAG,6BAA6B,IAAI,MAClC,MAAA,MACD,CAAC,SAAS,IAAI,KACb,MAAA,WAAiB,KAAI,MAAK,GAAG,GAAG,IAAI,MAAM,EAAE,CAAC,IAAI,EACjD,QACD,CAAC,UACH,CACF;GACD,aAAa,GAAG,QACd,QACE,GAAG,iBAAiB,IAAI,MAAM,MAAA,MAAY,CAAC,SAAS,IAAI,KACtD,MAAA,WAAiB,KAAI,MAAK,GAAG,GAAG,IAAI,MAAM,EAAE,CAAC,IAAI,EACjD,QACD,GACF,CACF;GACF;AACD,QAAA,QAAc,IAAI,IAAI,MAAM;AAC5B,SAAO;;CAGT,KAAA,aAAkB;AAChB,SAAO,IAAI,KACT,OAAO,KAAK,MAAA,QAAc,CAAC,KAAI,MAAK,IAAI,MAAM,EAAE,CAAC,EACjD,GAAG,IACJ;;CAGH,WAAW,YAAwB,WAAkC;AACnE,SAAO;GACL,WAAW,MAAA;GACX,SAAS,MAAA;GACT,YAAY,MAAA;GACZ,MAAM,YAAY,KAAA,IAAY,WAAW;GACzC,eAAe,EAAE;GACjB,UAAU;GACV,QAAQ;GACR,aAAa,WAAW;GACzB;;CAGH,QACE,MACA,SACA,eACA,OACA;EACA,MAAM,qBAAqB,iBAAiB,QAAQ;EACpD,MAAM,YAAY,SAAS,KAAA;EAG3B,MAAM,iBAA2B,MAAA,WAAiB,KAAI,MAAK,CAAC,GAAG,MAAM,CAAC;EAEtE,MAAM,QAAqB;GACzB,iBAAiB;GACjB,QAAO,QAAO,MAAA,MAAY,KAAK,WAAW;GAC1C,YAAW,WAAU;AACnB,eAAW,SAAS;;GAEtB,eAAe;IACb,MAAM,MAAM,MAAA,YAAkB,QAAQ,WAAW;AACjD,WAAO,QAAQ,IAAI,uBAAuB;AAC1C,UAAA,YAAkB,OAAO,KAAK,EAAE;;GAElC,qBAAqB,CAAC,mBAAmB;GAC1C;EAED,MAAM,aAAyB;GAC7B;GACA;GACA,QAAQ,KAAA;GACR;GACA;GACA,SAAS,mBAAmB,UACxB;IACE,WAAW,mBAAmB;IAC9B,WAAW,gBAAgB,mBAAmB,QAAQ;IACvD,GACD,KAAA;GACJ,aAAa,OAAO,eAAe,KAAK,GAAG,eAAe,eAAe;GACzE,iBAAiB;GAClB;EACD,MAAM,SAAS,MAAA,UAAgB,YAAY,UAAU;AACrD,MAAI,CAAC,UACH,0BAAyB,MAAM,MAAA,WAAiB;AAGlD,QAAA,YAAkB,KAAK,WAAW;AAClC,SAAO;;CAGT,YAAY,KAAe;AACzB,SAAO,OAAO,YACZ,OAAO,QAAQ,IAAI,CAAC,KAAK,CAAC,KAAK,WAAW,CACxC,KACA,aAAa,OAAO,MAAA,QAAc,KAAK,KAAK,CAC7C,CAAC,CACH;;CAGH,EAAA,MAAQ,KAAmB,YAAgD;EACzE,MAAM,EAAC,MAAM,UAAS;EAGtB,MAAM,iBAAiB,OADT,MAAA,aAAmB,KAAK,WAAW,SAAS,WAAW,KAAK,CACtC;EAEpC,MAAM,kBAAkB,MAAA,MAAY,MAAM,IAAI,eAAe,KAAK;AAClE,kBAAgB,UAAU,aAAa,KAAK;EAC5C,MAAM,cAAc,gBAAgB,UAAU,QAC5C,GAAG,eAAe,OACnB;AACD,MAAI;AACF,UAAO,UAAU,MAAA,OAAa,eAAe,KAAK;AAElD,OAAI,MAAM;IACR,MAAM,aAAa,eAAe,MAAM,IAAI,QAAQ;AACpD,WAAO,kBACL,mBACE,oBACE,IAAI,OAAO,KACX,MAAA,mBACE,MAAA,SACA,aACA,eAAe,MACf,MACD,EACD,IAAI,YACJ,MAAA,SACA,WAAW,iBACX,YACA,WAAW,SAAS,UACrB,EACD,MAAA,YACD,EACD,IAAI,OACJ,WACD;SAED,QAAO,mBACL,6BACE,MAAA,mBACE,MAAA,SACA,aACA,eAAe,MACf,MACD,EACD,IAAI,YACJ,MAAA,SACA,WAAW,iBACX,MAAA,YACA,WAAW,SAAS,UACrB,EACD,MAAA,YACD;YAEK;AAOR,eAAY,UAAU;AACtB,OAAI,OAAO;IACT,IAAI,cAAc;IAClB,IAAI,IAAI;AACR,WAAO,MAAM;KACX,MAAM,SAAS,gBAAgB,UAAU,WACvC,KACA,gBAAgB,wBAChB,EACD;AACD,SAAI,WAAW,KAAA,EACb;AAEF,oBAAe,OAAO,OAAO;;AAE/B,QAAI,gBAAgB,EAClB,OAAM,aAAa,MAAA,OAAa,eAAe,MAAM,YAAY;AAEnE,oBAAgB,UAAU,iBAAiB;;AAE7C,SAAA,MAAY,MAAM,OAAO,gBAAgB;;;CAI7C,EAAA,mBACE,YACA,aACA,OACA,OACuB;EACvB,IAAI;AACJ,MAAI;AACF,MAAG;AACD,aAAS,YACP,MAAA,IACA,EAAE,YACF,MAAA,UAAgB,mBACV,YAAY,MAAM,EACxB,MAAA,UAAgB,wBAEd,uCAAuC,MAAM,6BAChD;AACD,QAAI,OAAO,KACT;IAEF,MAAM,MAAM,gBAAgB,YAAY,OAAO,OAAO,MAAA,MAAY;AAClE,WAAO,UAAU,MAAA,OAAa,OAAO,IAAI;AACzC,UAAM;YACC,CAAC,OAAO;YACT;AACR,eAAY,UAAU;;;CAI1B,CAAC,KAAK,QAAuC;AAC3C,OAAK,MAAM,UAAU,KAAK,QAAQ,OAAO,CACvC,KAAI,WAAW,QACb,OAAM;;CAKZ,CAAC,QAAQ,QAAsB;EAC7B,MAAM,UAAU,QACd,MAAA,MAAY,YAAY,IACtB,GAAG,cAAc,MAAA,YAAkB,KAAK,MAAA,QAAc,CACvD,EAAE,WAAW;EAChB,MAAM,cAAc,MAA4B,MAAA,UAAgB;EAChE,MAAM,eAAe,MAAoB,MAAA,YAAkB,EAAE;AAE7D,SAAO,6BACL,MAAA,aACA,QACA,QACA,YACA,mBACM,EAAE,MAAA,UACT;;CAGH,aAAa,QAAsB;AACjC,UAAQ,OAAO,MAAf;GACE,KAAK;AACH,UAAA,MAAY,OAAO,IACjB,GAAG,cACD,OAAO,KAAK,MAAA,QAAc,EAC1B,OAAO,KACP,MAAA,QACD,CACF;AACD;GACF,KAAK;AACH,UAAA,MAAY,OAAO,IACjB,GAAG,cAAc,MAAA,YAAkB,OAAO,KAAK,MAAA,QAAc,CAC9D;AACD;GACF,KAAK;AAEH,QACE,aACE,OAAO,QACP,OAAO,KACP,MAAA,SACA,MAAA,WACD,EACD;KACA,MAAM,YAAY;MAChB,GAAG,OAAO;MACV,GAAG,OAAO;MACX;KACD,MAAM,SAAS,CACb,GAAG,iBAAiB,MAAA,SAAe,MAAA,YAAkB,UAAU,EAC/D,GAAG,cAAc,MAAA,YAAkB,WAAW,MAAA,QAAc,CAC7D;AACD,UAAK,MAAA,MAAY,OAAO,CAAC,IAAI,OAAO;WAC/B;AACL,WAAA,MAAY,OAAO,IACjB,GAAG,cAAc,MAAA,YAAkB,OAAO,QAAQ,MAAA,QAAc,CACjE;AACD,WAAA,MAAY,OAAO,IACjB,GAAG,cACD,OAAO,KAAK,MAAA,QAAc,EAC1B,OAAO,KACP,MAAA,QACD,CACF;;AAGH;GAEF,QACE,aAAY,OAAO;;;CAIzB,mCAAmB,IAAI,KAAqB;CAE5C,YAAY,SAA2B;EACrC,MAAM,YAAY,KAAK,UAAU,QAAQ;EACzC,IAAI,OAAO,MAAA,gBAAsB,IAAI,UAAU;AAC/C,MAAI,CAAC,MAAM;AACT,UAAO,QACL,GAAG,UAAU,MAAA,WAAiB,QAAQ,IAAI,MACxC,MAAA,MACD,CAAC,SAAS,IAAI,KACb,QAAQ,KAAI,MAAK,GAAG,GAAG,IAAI,MAAM,EAAE,CAAC,IAAI,EACxC,GAAG,OACJ,GACF;AACD,SAAA,gBAAsB,IAAI,WAAW,KAAK;;AAE5C,SAAO;;;;;;;;;CAUT,OAAO,QAA8B;EACnC,MAAM,UAAU,OAAO,KAAK,OAAO;EAEnC,MAAM,OAAO,MAAA,WAAiB,QAAQ;EACtC,MAAM,MAAM,MAAA,MAAY,MAAM,IAAI,OAAM,WACtC,OAAO,UACJ,aAAa,KAAK,CAClB,IAAS,GAAG,cAAc,SAAS,QAAQ,MAAA,QAAc,CAAC,CAC9D;AACD,MAAI,IACF,QAAO,gBAAgB,MAAA,SAAe,KAAK,MAAA,MAAY;AAEzD,SAAO;;CAGT,cACE,SACA,SACA,OACU;AACV,SAAO,iBACL,MAAA,OACA,MAAA,SACA,QAAQ,YACR,SACA,OACA,QAAQ,SACR,QAAQ,MACT;;;AAIL,SAAS,iBACP,IACA,WAC0B;CAC1B,MAAM,iBAAiB,OACrB,GAAG;;;;;6BAKsB,UAAU;;;;yBAKpC;CAED,MAAM,UADO,GAAG,QAAQ,eAAe,KAAK,CACvB,IAA2B,GAAG,eAAe,OAAO;AACzE,QAAO,IAAI,IACT,QAAQ,KAAK,EAAC,kBAAiB;EAC7B,MAAM,UAAU,KAAK,MAAM,YAAY;EACvC,MAAM,MAAM,IAAI,IAAY,QAAQ;AACpC,SAAO,CAAC,KAAK,UAAU,QAAQ,MAAM,CAAC,EAAE,IAAI;GAC5C,CACH;;AAGH,SAAgB,cACd,SACA,KACA,aACoB;AACpB,QAAO,QAAQ,KAAI,QAAO,aAAa,IAAI,MAAM,YAAY,KAAK,KAAK,CAAC;;AAkB1E,SAAgB,gBACd,YACA,KACA,WACK;CACL,MAAM,SAAwB,EAAE;AAChC,MAAK,MAAM,OAAO,OAAO,KAAK,IAAI,EAAE;EAClC,MAAM,YAAY,WAAW;AAC7B,MAAI,cAAc,KAAA,GAAW;GAC3B,MAAM,aAAa,OAAO,KAAK,WAAW,CAAC,MAAM,CAAC,KAAK,KAAK;AAC5D,SAAM,IAAI,MACR,mBAAmB,IAAI,eAAe,UAAU,4BAA4B,aAC7E;;AAEH,SAAO,OAAO,eAAe,UAAU,MAAM,IAAI,MAAM,KAAK,UAAU;;AAExE,QAAO;;AAGT,SAAS,eACP,WACA,GACA,QACA,WACO;AACP,KAAI,MAAM,KACR,QAAO;AAET,SAAQ,WAAR;EACE,KAAK,UACH,QAAO,CAAC,CAAC;EACX,KAAK;EACL,KAAK;EACL,KAAK;AACH,OAAI,OAAO,MAAM,UAAU;AACzB,QAAI,IAAI,OAAO,oBAAoB,IAAI,OAAO,iBAC5C,OAAM,IAAI,sBACR,SAAS,EAAE,OAAO,UAAU,GAAG,OAAO,kCACvC;AAEH,WAAO,OAAO,EAAE;;AAElB,UAAO;EACT,KAAK,OACH,KAAI;AACF,UAAO,KAAK,MAAM,EAAY;WACvB,OAAO;AAGd,SAAM,IAAI,sBACR,4BAA4B,UAAU,GAAG,OAAO,IAFhD,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,IAGtD,EAAC,OAAO,OAAM,CACf;;;;AAKT,IAAa,wBAAb,cAA2C,MAAM;AAEjD,SAAS,aACP,QACA,KACA,SACA,YACS;AACT,MAAK,MAAM,MAAM,WACf,KAAI,OAAO,QAAQ,IAAI,IACrB,QAAO;AAGX,QAAO,OAAO,KAAK,QAAQ,CAAC,SAAS,WAAW;;AAGlD,SAAS,iBACP,SACA,YACA,KACmB;AACnB,QAAO,eAAe,SAAS,WAAW,CAAC,KAAI,MAC7C,aAAa,IAAI,IAAI,QAAQ,GAAG,KAAK,CACtC;;AAGH,SAAS,eACP,SACA,YACA;AACA,QAAO,OAAO,KAAK,QAAQ,CAAC,QAAO,MAAK,CAAC,WAAW,SAAS,EAAE,CAAC;;AAGlE,UAAU,mBAAmB,QAAsB,aAA4B;AAC7E,MAAK,MAAM,KAAK,QAAQ;AACtB,MAAI,aAAa,CACf,OAAM;AAER,QAAM"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@rocicorp/zero",
3
- "version": "1.0.0",
3
+ "version": "1.1.0",
4
4
  "description": "Zero is a web framework for serverless web development.",
5
5
  "author": "Rocicorp, Inc.",
6
6
  "repository": {
@@ -79,17 +79,18 @@
79
79
  },
80
80
  "devDependencies": {
81
81
  "@op-engineering/op-sqlite": ">=15",
82
- "@vitest/runner": "4.1.0",
82
+ "@vitest/runner": "4.1.2",
83
83
  "analyze-query": "0.0.0",
84
84
  "ast-to-zql": "0.0.0",
85
85
  "expo-sqlite": ">=15",
86
86
  "replicache": "15.2.1",
87
87
  "shared": "0.0.0",
88
+ "syncpack": "^14.2.1",
88
89
  "typedoc": "^0.28.17",
89
90
  "typedoc-plugin-markdown": "^4.10.0",
90
91
  "typescript": "~5.9.3",
91
- "vite": "8.0.0",
92
- "vitest": "4.1.0",
92
+ "vite": "8.0.3",
93
+ "vitest": "4.1.2",
93
94
  "zero-cache": "0.0.0",
94
95
  "zero-client": "0.0.0",
95
96
  "zero-pg": "0.0.0",
@@ -100,8 +101,8 @@
100
101
  "zqlite": "0.0.0"
101
102
  },
102
103
  "peerDependencies": {
103
- "expo-sqlite": ">=15",
104
- "@op-engineering/op-sqlite": ">=15"
104
+ "@op-engineering/op-sqlite": ">=15",
105
+ "expo-sqlite": ">=15"
105
106
  },
106
107
  "peerDependenciesMeta": {
107
108
  "expo-sqlite": {