@rocicorp/zero 0.25.0-canary.14 → 0.25.0-canary.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/out/shared/src/deep-merge.d.ts +6 -4
- package/out/shared/src/deep-merge.d.ts.map +1 -1
- package/out/shared/src/deep-merge.js +2 -1
- package/out/shared/src/deep-merge.js.map +1 -1
- package/out/shared/src/record-proxy.d.ts +13 -0
- package/out/shared/src/record-proxy.d.ts.map +1 -0
- package/out/shared/src/record-proxy.js +59 -0
- package/out/shared/src/record-proxy.js.map +1 -0
- package/out/z2s/src/compiler.d.ts.map +1 -1
- package/out/z2s/src/compiler.js +4 -2
- package/out/z2s/src/compiler.js.map +1 -1
- package/out/zero/package.json.js +1 -1
- package/out/zero/src/pg.js +4 -3
- package/out/zero/src/server.js +4 -3
- package/out/zero/src/zero.js +11 -3
- package/out/zero/src/zero.js.map +1 -1
- package/out/zero-cache/src/auth/write-authorizer.d.ts.map +1 -1
- package/out/zero-cache/src/auth/write-authorizer.js +20 -13
- package/out/zero-cache/src/auth/write-authorizer.js.map +1 -1
- package/out/zero-cache/src/config/zero-config.d.ts +16 -0
- package/out/zero-cache/src/config/zero-config.d.ts.map +1 -1
- package/out/zero-cache/src/config/zero-config.js +28 -0
- package/out/zero-cache/src/config/zero-config.js.map +1 -1
- package/out/zero-cache/src/server/otel-diag-logger.d.ts.map +1 -1
- package/out/zero-cache/src/server/otel-diag-logger.js +1 -22
- package/out/zero-cache/src/server/otel-diag-logger.js.map +1 -1
- package/out/zero-cache/src/server/otel-start.d.ts.map +1 -1
- package/out/zero-cache/src/server/otel-start.js +1 -5
- package/out/zero-cache/src/server/otel-start.js.map +1 -1
- package/out/zero-cache/src/server/syncer.d.ts.map +1 -1
- package/out/zero-cache/src/server/syncer.js +6 -1
- package/out/zero-cache/src/server/syncer.js.map +1 -1
- package/out/zero-cache/src/services/litestream/commands.js +3 -2
- package/out/zero-cache/src/services/litestream/commands.js.map +1 -1
- package/out/zero-cache/src/services/view-syncer/pipeline-driver.d.ts +8 -9
- package/out/zero-cache/src/services/view-syncer/pipeline-driver.d.ts.map +1 -1
- package/out/zero-cache/src/services/view-syncer/pipeline-driver.js +17 -11
- package/out/zero-cache/src/services/view-syncer/pipeline-driver.js.map +1 -1
- package/out/zero-cache/src/services/view-syncer/snapshotter.d.ts +2 -2
- package/out/zero-cache/src/services/view-syncer/snapshotter.d.ts.map +1 -1
- package/out/zero-cache/src/services/view-syncer/snapshotter.js +19 -4
- package/out/zero-cache/src/services/view-syncer/snapshotter.js.map +1 -1
- package/out/zero-cache/src/services/view-syncer/view-syncer.d.ts.map +1 -1
- package/out/zero-cache/src/services/view-syncer/view-syncer.js +1 -7
- package/out/zero-cache/src/services/view-syncer/view-syncer.js.map +1 -1
- package/out/zero-client/src/client/crud-impl.d.ts +11 -0
- package/out/zero-client/src/client/crud-impl.d.ts.map +1 -0
- package/out/zero-client/src/client/crud-impl.js +102 -0
- package/out/zero-client/src/client/crud-impl.js.map +1 -0
- package/out/zero-client/src/client/crud.d.ts +7 -40
- package/out/zero-client/src/client/crud.d.ts.map +1 -1
- package/out/zero-client/src/client/crud.js +21 -107
- package/out/zero-client/src/client/crud.js.map +1 -1
- package/out/zero-client/src/client/custom.d.ts +2 -2
- package/out/zero-client/src/client/custom.d.ts.map +1 -1
- package/out/zero-client/src/client/custom.js +5 -50
- package/out/zero-client/src/client/custom.js.map +1 -1
- package/out/zero-client/src/client/ivm-branch.d.ts.map +1 -1
- package/out/zero-client/src/client/ivm-branch.js +20 -13
- package/out/zero-client/src/client/ivm-branch.js.map +1 -1
- package/out/zero-client/src/client/make-mutate-property.d.ts +1 -1
- package/out/zero-client/src/client/make-mutate-property.d.ts.map +1 -1
- package/out/zero-client/src/client/make-mutate-property.js.map +1 -1
- package/out/zero-client/src/client/make-replicache-mutators.d.ts.map +1 -1
- package/out/zero-client/src/client/make-replicache-mutators.js +14 -7
- package/out/zero-client/src/client/make-replicache-mutators.js.map +1 -1
- package/out/zero-client/src/client/version.js +1 -1
- package/out/zero-client/src/client/zero.d.ts +9 -7
- package/out/zero-client/src/client/zero.d.ts.map +1 -1
- package/out/zero-client/src/client/zero.js +11 -4
- package/out/zero-client/src/client/zero.js.map +1 -1
- package/out/zero-client/src/mod.d.ts +7 -6
- package/out/zero-client/src/mod.d.ts.map +1 -1
- package/out/zero-react/src/use-query.d.ts +6 -4
- package/out/zero-react/src/use-query.d.ts.map +1 -1
- package/out/zero-react/src/use-query.js +2 -1
- package/out/zero-react/src/use-query.js.map +1 -1
- package/out/zero-server/src/custom.d.ts +44 -5
- package/out/zero-server/src/custom.d.ts.map +1 -1
- package/out/zero-server/src/custom.js +98 -35
- package/out/zero-server/src/custom.js.map +1 -1
- package/out/zero-server/src/zql-database.d.ts +1 -1
- package/out/zero-server/src/zql-database.d.ts.map +1 -1
- package/out/zero-server/src/zql-database.js +5 -13
- package/out/zero-server/src/zql-database.js.map +1 -1
- package/out/zero-solid/src/solid-view.d.ts +7 -2
- package/out/zero-solid/src/solid-view.d.ts.map +1 -1
- package/out/zero-solid/src/solid-view.js +3 -32
- package/out/zero-solid/src/solid-view.js.map +1 -1
- package/out/zero-solid/src/use-query.d.ts +5 -3
- package/out/zero-solid/src/use-query.d.ts.map +1 -1
- package/out/zero-solid/src/use-query.js +5 -1
- package/out/zero-solid/src/use-query.js.map +1 -1
- package/out/zero-types/src/schema.d.ts +4 -4
- package/out/zql/src/builder/builder.d.ts.map +1 -1
- package/out/zql/src/builder/builder.js +1 -11
- package/out/zql/src/builder/builder.js.map +1 -1
- package/out/zql/src/error.js +1 -10
- package/out/zql/src/error.js.map +1 -1
- package/out/zql/src/ivm/array-view.d.ts +1 -1
- package/out/zql/src/ivm/array-view.d.ts.map +1 -1
- package/out/zql/src/ivm/array-view.js +2 -0
- package/out/zql/src/ivm/array-view.js.map +1 -1
- package/out/zql/src/ivm/exists.d.ts +3 -2
- package/out/zql/src/ivm/exists.d.ts.map +1 -1
- package/out/zql/src/ivm/exists.js +25 -23
- package/out/zql/src/ivm/exists.js.map +1 -1
- package/out/zql/src/ivm/fan-in.d.ts +3 -3
- package/out/zql/src/ivm/fan-in.d.ts.map +1 -1
- package/out/zql/src/ivm/fan-in.js +6 -5
- package/out/zql/src/ivm/fan-in.js.map +1 -1
- package/out/zql/src/ivm/fan-out.d.ts +2 -2
- package/out/zql/src/ivm/fan-out.d.ts.map +1 -1
- package/out/zql/src/ivm/fan-out.js +5 -5
- package/out/zql/src/ivm/fan-out.js.map +1 -1
- package/out/zql/src/ivm/filter-operators.d.ts +5 -5
- package/out/zql/src/ivm/filter-operators.d.ts.map +1 -1
- package/out/zql/src/ivm/filter-operators.js +8 -8
- package/out/zql/src/ivm/filter-operators.js.map +1 -1
- package/out/zql/src/ivm/filter-push.d.ts +2 -1
- package/out/zql/src/ivm/filter-push.d.ts.map +1 -1
- package/out/zql/src/ivm/filter-push.js +5 -5
- package/out/zql/src/ivm/filter-push.js.map +1 -1
- package/out/zql/src/ivm/filter.d.ts +2 -2
- package/out/zql/src/ivm/filter.d.ts.map +1 -1
- package/out/zql/src/ivm/filter.js +4 -4
- package/out/zql/src/ivm/filter.js.map +1 -1
- package/out/zql/src/ivm/flipped-join.d.ts.map +1 -1
- package/out/zql/src/ivm/flipped-join.js +100 -83
- package/out/zql/src/ivm/flipped-join.js.map +1 -1
- package/out/zql/src/ivm/join.d.ts.map +1 -1
- package/out/zql/src/ivm/join.js +52 -50
- package/out/zql/src/ivm/join.js.map +1 -1
- package/out/zql/src/ivm/maybe-split-and-push-edit-change.d.ts +1 -1
- package/out/zql/src/ivm/maybe-split-and-push-edit-change.d.ts.map +1 -1
- package/out/zql/src/ivm/maybe-split-and-push-edit-change.js +4 -4
- package/out/zql/src/ivm/maybe-split-and-push-edit-change.js.map +1 -1
- package/out/zql/src/ivm/memory-source.d.ts +3 -3
- package/out/zql/src/ivm/memory-source.d.ts.map +1 -1
- package/out/zql/src/ivm/memory-source.js +7 -4
- package/out/zql/src/ivm/memory-source.js.map +1 -1
- package/out/zql/src/ivm/operator.d.ts +10 -3
- package/out/zql/src/ivm/operator.d.ts.map +1 -1
- package/out/zql/src/ivm/operator.js.map +1 -1
- package/out/zql/src/ivm/push-accumulated.d.ts +1 -1
- package/out/zql/src/ivm/push-accumulated.d.ts.map +1 -1
- package/out/zql/src/ivm/push-accumulated.js +8 -8
- package/out/zql/src/ivm/push-accumulated.js.map +1 -1
- package/out/zql/src/ivm/skip.d.ts +1 -1
- package/out/zql/src/ivm/skip.d.ts.map +1 -1
- package/out/zql/src/ivm/skip.js +8 -3
- package/out/zql/src/ivm/skip.js.map +1 -1
- package/out/zql/src/ivm/source.d.ts +15 -7
- package/out/zql/src/ivm/source.d.ts.map +1 -1
- package/out/zql/src/ivm/stream.d.ts +2 -0
- package/out/zql/src/ivm/stream.d.ts.map +1 -1
- package/out/zql/src/ivm/stream.js +5 -14
- package/out/zql/src/ivm/stream.js.map +1 -1
- package/out/zql/src/ivm/take.d.ts +1 -1
- package/out/zql/src/ivm/take.d.ts.map +1 -1
- package/out/zql/src/ivm/take.js +164 -147
- package/out/zql/src/ivm/take.js.map +1 -1
- package/out/zql/src/ivm/union-fan-in.d.ts +2 -2
- package/out/zql/src/ivm/union-fan-in.d.ts.map +1 -1
- package/out/zql/src/ivm/union-fan-in.js +7 -7
- package/out/zql/src/ivm/union-fan-in.js.map +1 -1
- package/out/zql/src/ivm/union-fan-out.d.ts +1 -1
- package/out/zql/src/ivm/union-fan-out.d.ts.map +1 -1
- package/out/zql/src/ivm/union-fan-out.js +3 -3
- package/out/zql/src/ivm/union-fan-out.js.map +1 -1
- package/out/zql/src/mutate/crud.d.ts +139 -0
- package/out/zql/src/mutate/crud.d.ts.map +1 -0
- package/out/zql/src/mutate/crud.js +53 -0
- package/out/zql/src/mutate/crud.js.map +1 -0
- package/out/zql/src/mutate/custom.d.ts +12 -53
- package/out/zql/src/mutate/custom.d.ts.map +1 -1
- package/out/zql/src/mutate/custom.js +1 -5
- package/out/zql/src/mutate/custom.js.map +1 -1
- package/out/zql/src/mutate/mutator-registry.d.ts +33 -32
- package/out/zql/src/mutate/mutator-registry.d.ts.map +1 -1
- package/out/zql/src/mutate/mutator-registry.js +26 -25
- package/out/zql/src/mutate/mutator-registry.js.map +1 -1
- package/out/zql/src/mutate/mutator.d.ts +48 -58
- package/out/zql/src/mutate/mutator.d.ts.map +1 -1
- package/out/zql/src/mutate/mutator.js +12 -8
- package/out/zql/src/mutate/mutator.js.map +1 -1
- package/out/zql/src/planner/planner-builder.d.ts +2 -1
- package/out/zql/src/planner/planner-builder.d.ts.map +1 -1
- package/out/zql/src/planner/planner-builder.js +5 -5
- package/out/zql/src/planner/planner-builder.js.map +1 -1
- package/out/zql/src/planner/planner-graph.d.ts +3 -1
- package/out/zql/src/planner/planner-graph.d.ts.map +1 -1
- package/out/zql/src/planner/planner-graph.js +5 -5
- package/out/zql/src/planner/planner-graph.js.map +1 -1
- package/out/zql/src/query/abstract-query.d.ts +2 -3
- package/out/zql/src/query/abstract-query.d.ts.map +1 -1
- package/out/zql/src/query/abstract-query.js +0 -3
- package/out/zql/src/query/abstract-query.js.map +1 -1
- package/out/zql/src/query/create-builder.d.ts.map +1 -1
- package/out/zql/src/query/create-builder.js +7 -36
- package/out/zql/src/query/create-builder.js.map +1 -1
- package/out/zql/src/query/measure-push-operator.d.ts +1 -1
- package/out/zql/src/query/measure-push-operator.d.ts.map +1 -1
- package/out/zql/src/query/measure-push-operator.js +2 -2
- package/out/zql/src/query/measure-push-operator.js.map +1 -1
- package/out/zql/src/query/query-impl.d.ts +2 -2
- package/out/zql/src/query/query-impl.d.ts.map +1 -1
- package/out/zql/src/query/query-impl.js.map +1 -1
- package/out/zql/src/query/query-registry.d.ts +87 -79
- package/out/zql/src/query/query-registry.d.ts.map +1 -1
- package/out/zql/src/query/query-registry.js +44 -38
- package/out/zql/src/query/query-registry.js.map +1 -1
- package/out/zql/src/query/query.d.ts +3 -18
- package/out/zql/src/query/query.d.ts.map +1 -1
- package/out/zql/src/query/runnable-query-impl.d.ts +2 -2
- package/out/zql/src/query/runnable-query-impl.d.ts.map +1 -1
- package/out/zql/src/query/runnable-query-impl.js.map +1 -1
- package/out/zql/src/query/static-query.d.ts +1 -0
- package/out/zql/src/query/static-query.d.ts.map +1 -1
- package/out/zql/src/query/static-query.js +2 -2
- package/out/zql/src/query/static-query.js.map +1 -1
- package/out/zqlite/src/internal/sql-inline.d.ts +13 -0
- package/out/zqlite/src/internal/sql-inline.d.ts.map +1 -0
- package/out/zqlite/src/internal/sql-inline.js +45 -0
- package/out/zqlite/src/internal/sql-inline.js.map +1 -0
- package/out/zqlite/src/sqlite-cost-model.d.ts.map +1 -1
- package/out/zqlite/src/sqlite-cost-model.js +2 -2
- package/out/zqlite/src/sqlite-cost-model.js.map +1 -1
- package/out/zqlite/src/table-source.d.ts +3 -2
- package/out/zqlite/src/table-source.d.ts.map +1 -1
- package/out/zqlite/src/table-source.js +5 -2
- package/out/zqlite/src/table-source.js.map +1 -1
- package/package.json +1 -1
|
@@ -190,8 +190,11 @@ class MemorySource {
|
|
|
190
190
|
);
|
|
191
191
|
yield* conn.filters ? generateWithFilter(withConstraint, conn.filters.predicate) : withConstraint;
|
|
192
192
|
}
|
|
193
|
-
push(change) {
|
|
194
|
-
for (const
|
|
193
|
+
*push(change) {
|
|
194
|
+
for (const result of this.genPush(change)) {
|
|
195
|
+
if (result === "yield") {
|
|
196
|
+
yield result;
|
|
197
|
+
}
|
|
195
198
|
}
|
|
196
199
|
}
|
|
197
200
|
*genPush(change) {
|
|
@@ -342,8 +345,8 @@ function* genPush(connections, change, exists, setOverlay, pushEpoch) {
|
|
|
342
345
|
relationships: {}
|
|
343
346
|
}
|
|
344
347
|
};
|
|
345
|
-
filterPush(outputChange, output, input, filters?.predicate);
|
|
346
|
-
yield;
|
|
348
|
+
yield* filterPush(outputChange, output, input, filters?.predicate);
|
|
349
|
+
yield void 0;
|
|
347
350
|
}
|
|
348
351
|
}
|
|
349
352
|
setOverlay(void 0);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"memory-source.js","sources":["../../../../../zql/src/ivm/memory-source.ts"],"sourcesContent":["import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport {BTreeSet} from '../../../shared/src/btree-set.ts';\nimport {hasOwn} from '../../../shared/src/has-own.ts';\nimport {once} from '../../../shared/src/iterables.ts';\nimport type {\n Condition,\n Ordering,\n OrderPart,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {Row, Value} from '../../../zero-protocol/src/data.ts';\nimport type {PrimaryKey} from '../../../zero-protocol/src/primary-key.ts';\nimport type {SchemaValue} from '../../../zero-types/src/schema-value.ts';\nimport type {DebugDelegate} from '../builder/debug-delegate.ts';\nimport {\n createPredicate,\n transformFilters,\n type NoSubqueryCondition,\n} from '../builder/filter.ts';\nimport {assertOrderingIncludesPK} from '../query/complete-ordering.ts';\nimport type {Change} from './change.ts';\nimport {\n constraintMatchesPrimaryKey,\n constraintMatchesRow,\n primaryKeyConstraintFromFilters,\n type Constraint,\n} from './constraint.ts';\nimport {\n compareValues,\n makeComparator,\n valuesEqual,\n type Comparator,\n type Node,\n} from './data.ts';\nimport {filterPush} from './filter-push.ts';\nimport {\n skipYields,\n type FetchRequest,\n type Input,\n type Output,\n type Start,\n} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {\n Source,\n SourceChange,\n SourceChangeAdd,\n SourceChangeEdit,\n SourceChangeRemove,\n SourceInput,\n} from './source.ts';\nimport type {Stream} from './stream.ts';\n\nexport type Overlay = {\n epoch: number;\n change: SourceChange;\n};\n\nexport type Overlays = {\n add: Row | undefined;\n remove: Row | undefined;\n};\n\ntype Index = {\n comparator: Comparator;\n data: BTreeSet<Row>;\n usedBy: Set<Connection>;\n};\n\nexport type Connection = {\n input: Input;\n output: Output | undefined;\n sort: Ordering;\n splitEditKeys: Set<string> | undefined;\n compareRows: Comparator;\n filters:\n | {\n condition: NoSubqueryCondition;\n predicate: (row: Row) => boolean;\n }\n | undefined;\n readonly debug?: DebugDelegate | undefined;\n lastPushedEpoch: number;\n};\n\n/**\n * A `MemorySource` is a source that provides data to the pipeline from an\n * in-memory data source.\n *\n * This data is kept in sorted order as downstream pipelines will always expect\n * the data they receive from `pull` to be in sorted order.\n */\nexport class MemorySource implements Source {\n readonly #tableName: string;\n readonly #columns: Record<string, SchemaValue>;\n readonly #primaryKey: PrimaryKey;\n readonly #primaryIndexSort: Ordering;\n readonly #indexes: Map<string, Index> = new Map();\n readonly #connections: Connection[] = [];\n\n #overlay: Overlay | undefined;\n #pushEpoch = 0;\n\n constructor(\n tableName: string,\n columns: Record<string, SchemaValue>,\n primaryKey: PrimaryKey,\n primaryIndexData?: BTreeSet<Row>,\n ) {\n this.#tableName = tableName;\n this.#columns = columns;\n this.#primaryKey = primaryKey;\n this.#primaryIndexSort = primaryKey.map(k => [k, 'asc']);\n const comparator = makeBoundComparator(this.#primaryIndexSort);\n this.#indexes.set(JSON.stringify(this.#primaryIndexSort), {\n comparator,\n data: primaryIndexData ?? new BTreeSet<Row>(comparator),\n usedBy: new Set(),\n });\n }\n\n get tableSchema() {\n return {\n name: this.#tableName,\n columns: this.#columns,\n primaryKey: this.#primaryKey,\n };\n }\n\n fork() {\n const primaryIndex = this.#getPrimaryIndex();\n return new MemorySource(\n this.#tableName,\n this.#columns,\n this.#primaryKey,\n primaryIndex.data.clone(),\n );\n }\n\n get data(): BTreeSet<Row> {\n return this.#getPrimaryIndex().data;\n }\n\n #getSchema(connection: Connection): SourceSchema {\n return {\n tableName: this.#tableName,\n columns: this.#columns,\n primaryKey: this.#primaryKey,\n sort: connection.sort,\n system: 'client',\n relationships: {},\n isHidden: false,\n compareRows: connection.compareRows,\n };\n }\n\n connect(\n sort: Ordering,\n filters?: Condition,\n splitEditKeys?: Set<string>,\n ): SourceInput {\n const transformedFilters = transformFilters(filters);\n\n const input: SourceInput = {\n getSchema: () => schema,\n fetch: req => this.#fetch(req, connection),\n setOutput: output => {\n connection.output = output;\n },\n destroy: () => {\n this.#disconnect(input);\n },\n fullyAppliedFilters: !transformedFilters.conditionsRemoved,\n };\n\n const connection: Connection = {\n input,\n output: undefined,\n sort,\n splitEditKeys,\n compareRows: makeComparator(sort),\n filters: transformedFilters.filters\n ? {\n condition: transformedFilters.filters,\n predicate: createPredicate(transformedFilters.filters),\n }\n : undefined,\n lastPushedEpoch: 0,\n };\n const schema = this.#getSchema(connection);\n assertOrderingIncludesPK(sort, this.#primaryKey);\n this.#connections.push(connection);\n return input;\n }\n\n #disconnect(input: Input): void {\n const idx = this.#connections.findIndex(c => c.input === input);\n assert(idx !== -1, 'Connection not found');\n this.#connections.splice(idx, 1);\n\n // TODO: We used to delete unused indexes here. But in common cases like\n // navigating into issue detail pages it caused a ton of constantly\n // building and destroying indexes.\n //\n // Perhaps some intelligent LRU or something is needed here but for now,\n // the opposite extreme of keeping all indexes for the lifetime of the\n // page seems better.\n }\n\n #getPrimaryIndex(): Index {\n const index = this.#indexes.get(JSON.stringify(this.#primaryIndexSort));\n assert(index, 'Primary index not found');\n return index;\n }\n\n #getOrCreateIndex(sort: Ordering, usedBy: Connection): Index {\n const key = JSON.stringify(sort);\n const index = this.#indexes.get(key);\n // Future optimization could use existing index if it's the same just sorted\n // in reverse of needed.\n if (index) {\n index.usedBy.add(usedBy);\n return index;\n }\n\n const comparator = makeBoundComparator(sort);\n\n // When creating these synchronously becomes a problem, a few options:\n // 1. Allow users to specify needed indexes up front\n // 2. Create indexes in a different thread asynchronously (this would require\n // modifying the BTree to be able to be passed over structured-clone, or using\n // a different library.)\n // 3. We could even theoretically do (2) on multiple threads and then merge the\n // results!\n const data = new BTreeSet<Row>(comparator);\n\n // I checked, there's no special path for adding data in bulk faster.\n // The constructor takes an array, but it just calls add/set over and over.\n for (const row of this.#getPrimaryIndex().data) {\n data.add(row);\n }\n\n const newIndex = {comparator, data, usedBy: new Set([usedBy])};\n this.#indexes.set(key, newIndex);\n return newIndex;\n }\n\n // For unit testing that we correctly clean up indexes.\n getIndexKeys(): string[] {\n return [...this.#indexes.keys()];\n }\n\n *#fetch(req: FetchRequest, conn: Connection): Stream<Node | 'yield'> {\n const {sort: requestedSort, compareRows} = conn;\n const connectionComparator = (r1: Row, r2: Row) =>\n compareRows(r1, r2) * (req.reverse ? -1 : 1);\n\n const pkConstraint = primaryKeyConstraintFromFilters(\n conn.filters?.condition,\n this.#primaryKey,\n );\n // The primary key constraint will be more limiting than the constraint\n // so swap out to that if it exists.\n const fetchOrPkConstraint = pkConstraint ?? req.constraint;\n\n // If there is a constraint, we need an index sorted by it first.\n const indexSort: OrderPart[] = [];\n if (fetchOrPkConstraint) {\n for (const key of Object.keys(fetchOrPkConstraint)) {\n indexSort.push([key, 'asc']);\n }\n }\n\n // For the special case of constraining by PK, we don't need to worry about\n // any requested sort since there can only be one result. Otherwise we also\n // need the index sorted by the requested sort.\n if (\n this.#primaryKey.length > 1 ||\n !fetchOrPkConstraint ||\n !constraintMatchesPrimaryKey(fetchOrPkConstraint, this.#primaryKey)\n ) {\n indexSort.push(...requestedSort);\n }\n\n const index = this.#getOrCreateIndex(indexSort, conn);\n const {data, comparator: compare} = index;\n const indexComparator = (r1: Row, r2: Row) =>\n compare(r1, r2) * (req.reverse ? -1 : 1);\n\n const startAt = req.start?.row;\n\n // If there is a constraint, we want to start our scan at the first row that\n // matches the constraint. But because the next OrderPart can be `desc`,\n // it's not true that {[constraintKey]: constraintValue} is the first\n // matching row. Because in that case, the other fields will all be\n // `undefined`, and in Zero `undefined` is always less than any other value.\n // So if the second OrderPart is descending then `undefined` values will\n // actually be the *last* row. We need a way to stay \"start at the first row\n // with this constraint value\". RowBound with the corresponding compareBound\n // comparator accomplishes this. The right thing is probably to teach the\n // btree library to support this concept.\n let scanStart: RowBound | undefined;\n\n if (fetchOrPkConstraint) {\n scanStart = {};\n for (const [key, dir] of indexSort) {\n if (hasOwn(fetchOrPkConstraint, key)) {\n scanStart[key] = fetchOrPkConstraint[key];\n } else {\n if (req.reverse) {\n scanStart[key] = dir === 'asc' ? maxValue : minValue;\n } else {\n scanStart[key] = dir === 'asc' ? minValue : maxValue;\n }\n }\n }\n } else {\n scanStart = startAt;\n }\n\n const rowsIterable = generateRows(data, scanStart, req.reverse);\n const withOverlay = generateWithOverlay(\n startAt,\n pkConstraint ? once(rowsIterable) : rowsIterable,\n // use `req.constraint` here and not `fetchOrPkConstraint` since `fetchOrPkConstraint` could be the\n // primary key constraint. The primary key constraint comes from filters and is acting as a filter\n // rather than as the fetch constraint.\n req.constraint,\n this.#overlay,\n conn.lastPushedEpoch,\n // Use indexComparator, generateWithOverlayInner has a subtle dependency\n // on this. Since generateWithConstraint is done after\n // generateWithOverlay, the generator consumed by generateWithOverlayInner\n // does not end when the constraint stops matching and so the final\n // check to yield an add overlay if not yet yielded is not reached.\n // However, using the indexComparator the add overlay will be less than\n // the first row that does not match the constraint, and so any\n // not yet yielded add overlay will be yielded when the first row\n // not matching the constraint is reached.\n indexComparator,\n conn.filters?.predicate,\n );\n\n const withConstraint = generateWithConstraint(\n skipYields(\n generateWithStart(withOverlay, req.start, connectionComparator),\n ),\n // we use `req.constraint` and not `fetchOrPkConstraint` here because we need to\n // AND the constraint with what could have been the primary key constraint\n req.constraint,\n );\n\n yield* conn.filters\n ? generateWithFilter(withConstraint, conn.filters.predicate)\n : withConstraint;\n }\n\n push(change: SourceChange): void {\n for (const _ of this.genPush(change)) {\n // Nothing to do.\n }\n }\n\n *genPush(change: SourceChange) {\n const primaryIndex = this.#getPrimaryIndex();\n const {data} = primaryIndex;\n const exists = (row: Row) => data.has(row);\n const setOverlay = (o: Overlay | undefined) => (this.#overlay = o);\n const writeChange = (c: SourceChange) => this.#writeChange(c);\n yield* genPushAndWriteWithSplitEdit(\n this.#connections,\n change,\n exists,\n setOverlay,\n writeChange,\n () => ++this.#pushEpoch,\n );\n }\n\n #writeChange(change: SourceChange) {\n for (const {data} of this.#indexes.values()) {\n switch (change.type) {\n case 'add': {\n const added = data.add(change.row);\n // must succeed since we checked has() above.\n assert(added);\n break;\n }\n case 'remove': {\n const removed = data.delete(change.row);\n // must succeed since we checked has() above.\n assert(removed);\n break;\n }\n case 'edit': {\n // TODO: We could see if the PK (form the index tree's perspective)\n // changed and if not we could use set.\n // We cannot just do `set` with the new value since the `oldRow` might\n // not map to the same entry as the new `row` in the index btree.\n const removed = data.delete(change.oldRow);\n // must succeed since we checked has() above.\n assert(removed);\n data.add(change.row);\n break;\n }\n default:\n unreachable(change);\n }\n }\n }\n}\n\nfunction* generateWithConstraint(\n it: Stream<Node>,\n constraint: Constraint | undefined,\n) {\n for (const node of it) {\n if (constraint && !constraintMatchesRow(constraint, node.row)) {\n break;\n }\n yield node;\n }\n}\n\nfunction* generateWithFilter(it: Stream<Node>, filter: (row: Row) => boolean) {\n for (const node of it) {\n if (filter(node.row)) {\n yield node;\n }\n }\n}\n\nexport function* genPushAndWriteWithSplitEdit(\n connections: readonly Connection[],\n change: SourceChange,\n exists: (row: Row) => boolean,\n setOverlay: (o: Overlay | undefined) => Overlay | undefined,\n writeChange: (c: SourceChange) => void,\n getNextEpoch: () => number,\n) {\n let shouldSplitEdit = false;\n if (change.type === 'edit') {\n for (const {splitEditKeys} of connections) {\n if (splitEditKeys) {\n for (const key of splitEditKeys) {\n if (!valuesEqual(change.row[key], change.oldRow[key])) {\n shouldSplitEdit = true;\n break;\n }\n }\n }\n }\n }\n\n if (change.type === 'edit' && shouldSplitEdit) {\n yield* genPushAndWrite(\n connections,\n {\n type: 'remove',\n row: change.oldRow,\n },\n exists,\n setOverlay,\n writeChange,\n getNextEpoch(),\n );\n yield* genPushAndWrite(\n connections,\n {\n type: 'add',\n row: change.row,\n },\n exists,\n setOverlay,\n writeChange,\n getNextEpoch(),\n );\n } else {\n yield* genPushAndWrite(\n connections,\n change,\n exists,\n setOverlay,\n writeChange,\n getNextEpoch(),\n );\n }\n}\n\nfunction* genPushAndWrite(\n connections: readonly Connection[],\n change: SourceChangeAdd | SourceChangeRemove | SourceChangeEdit,\n exists: (row: Row) => boolean,\n setOverlay: (o: Overlay | undefined) => Overlay | undefined,\n writeChange: (c: SourceChange) => void,\n pushEpoch: number,\n) {\n for (const x of genPush(connections, change, exists, setOverlay, pushEpoch)) {\n yield x;\n }\n writeChange(change);\n}\n\nfunction* genPush(\n connections: readonly Connection[],\n change: SourceChange,\n exists: (row: Row) => boolean,\n setOverlay: (o: Overlay | undefined) => void,\n pushEpoch: number,\n) {\n switch (change.type) {\n case 'add':\n assert(\n !exists(change.row),\n () => `Row already exists ${stringify(change)}`,\n );\n break;\n case 'remove':\n assert(exists(change.row), () => `Row not found ${stringify(change)}`);\n break;\n case 'edit':\n assert(exists(change.oldRow), () => `Row not found ${stringify(change)}`);\n break;\n default:\n unreachable(change);\n }\n\n for (const conn of connections) {\n const {output, filters, input} = conn;\n if (output) {\n conn.lastPushedEpoch = pushEpoch;\n setOverlay({epoch: pushEpoch, change});\n const outputChange: Change =\n change.type === 'edit'\n ? {\n type: change.type,\n oldNode: {\n row: change.oldRow,\n relationships: {},\n },\n node: {\n row: change.row,\n relationships: {},\n },\n }\n : {\n type: change.type,\n node: {\n row: change.row,\n relationships: {},\n },\n };\n filterPush(outputChange, output, input, filters?.predicate);\n yield;\n }\n }\n\n setOverlay(undefined);\n}\n\nexport function* generateWithStart(\n nodes: Iterable<Node | 'yield'>,\n start: Start | undefined,\n compare: (r1: Row, r2: Row) => number,\n): Stream<Node | 'yield'> {\n if (!start) {\n yield* nodes;\n return;\n }\n let started = false;\n for (const node of nodes) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n if (!started) {\n if (start.basis === 'at') {\n if (compare(node.row, start.row) >= 0) {\n started = true;\n }\n } else if (start.basis === 'after') {\n if (compare(node.row, start.row) > 0) {\n started = true;\n }\n }\n }\n if (started) {\n yield node;\n }\n }\n}\n\n/**\n * Takes an iterator and overlay.\n * Splices the overlay into the iterator at the correct position.\n *\n * @param startAt - if there is a lower bound to the stream. If the lower bound of the stream\n * is above the overlay, the overlay will be skipped.\n * @param rows - the stream into which the overlay should be spliced\n * @param constraint - constraint that was applied to the rowIterator and should\n * also be applied to the overlay.\n * @param overlay - the overlay values to splice in\n * @param compare - the comparator to use to find the position for the overlay\n */\nexport function* generateWithOverlay(\n startAt: Row | undefined,\n rows: Iterable<Row>,\n constraint: Constraint | undefined,\n overlay: Overlay | undefined,\n lastPushedEpoch: number,\n compare: Comparator,\n filterPredicate?: (row: Row) => boolean | undefined,\n) {\n let overlayToApply: Overlay | undefined = undefined;\n if (overlay && lastPushedEpoch >= overlay.epoch) {\n overlayToApply = overlay;\n }\n const overlays = computeOverlays(\n startAt,\n constraint,\n overlayToApply,\n compare,\n filterPredicate,\n );\n yield* generateWithOverlayInner(rows, overlays, compare);\n}\n\nfunction computeOverlays(\n startAt: Row | undefined,\n constraint: Constraint | undefined,\n overlay: Overlay | undefined,\n compare: Comparator,\n filterPredicate?: (row: Row) => boolean | undefined,\n): Overlays {\n let overlays: Overlays = {\n add: undefined,\n remove: undefined,\n };\n switch (overlay?.change.type) {\n case 'add':\n overlays = {\n add: overlay.change.row,\n remove: undefined,\n };\n break;\n case 'remove':\n overlays = {\n add: undefined,\n remove: overlay.change.row,\n };\n break;\n case 'edit':\n overlays = {\n add: overlay.change.row,\n remove: overlay.change.oldRow,\n };\n break;\n }\n\n if (startAt) {\n overlays = overlaysForStartAt(overlays, startAt, compare);\n }\n\n if (constraint) {\n overlays = overlaysForConstraint(overlays, constraint);\n }\n\n if (filterPredicate) {\n overlays = overlaysForFilterPredicate(overlays, filterPredicate);\n }\n\n return overlays;\n}\n\nexport {overlaysForStartAt as overlaysForStartAtForTest};\n\nfunction overlaysForStartAt(\n {add, remove}: Overlays,\n startAt: Row,\n compare: Comparator,\n): Overlays {\n const undefinedIfBeforeStartAt = (row: Row | undefined) =>\n row === undefined || compare(row, startAt) < 0 ? undefined : row;\n return {\n add: undefinedIfBeforeStartAt(add),\n remove: undefinedIfBeforeStartAt(remove),\n };\n}\n\nexport {overlaysForConstraint as overlaysForConstraintForTest};\n\nfunction overlaysForConstraint(\n {add, remove}: Overlays,\n constraint: Constraint,\n): Overlays {\n const undefinedIfDoesntMatchConstraint = (row: Row | undefined) =>\n row === undefined || !constraintMatchesRow(constraint, row)\n ? undefined\n : row;\n\n return {\n add: undefinedIfDoesntMatchConstraint(add),\n remove: undefinedIfDoesntMatchConstraint(remove),\n };\n}\n\nfunction overlaysForFilterPredicate(\n {add, remove}: Overlays,\n filterPredicate: (row: Row) => boolean | undefined,\n): Overlays {\n const undefinedIfDoesntMatchFilter = (row: Row | undefined) =>\n row === undefined || !filterPredicate(row) ? undefined : row;\n\n return {\n add: undefinedIfDoesntMatchFilter(add),\n remove: undefinedIfDoesntMatchFilter(remove),\n };\n}\n\nexport function* generateWithOverlayInner(\n rowIterator: Iterable<Row>,\n overlays: Overlays,\n compare: (r1: Row, r2: Row) => number,\n) {\n let addOverlayYielded = false;\n let removeOverlaySkipped = false;\n for (const row of rowIterator) {\n if (!addOverlayYielded && overlays.add) {\n const cmp = compare(overlays.add, row);\n if (cmp < 0) {\n addOverlayYielded = true;\n yield {row: overlays.add, relationships: {}};\n }\n }\n\n if (!removeOverlaySkipped && overlays.remove) {\n const cmp = compare(overlays.remove, row);\n if (cmp === 0) {\n removeOverlaySkipped = true;\n continue;\n }\n }\n yield {row, relationships: {}};\n }\n\n if (!addOverlayYielded && overlays.add) {\n yield {row: overlays.add, relationships: {}};\n }\n}\n\n/**\n * A location to begin scanning an index from. Can either be a specific value\n * or the min or max possible value for the type. This is used to start a scan\n * at the beginning of the rows matching a constraint.\n */\ntype Bound = Value | MinValue | MaxValue;\ntype RowBound = Record<string, Bound>;\nconst minValue = Symbol('min-value');\ntype MinValue = typeof minValue;\nconst maxValue = Symbol('max-value');\ntype MaxValue = typeof maxValue;\n\nfunction makeBoundComparator(sort: Ordering) {\n return (a: RowBound, b: RowBound) => {\n // Hot! Do not use destructuring\n for (const entry of sort) {\n const key = entry[0];\n const cmp = compareBounds(a[key], b[key]);\n if (cmp !== 0) {\n return entry[1] === 'asc' ? cmp : -cmp;\n }\n }\n return 0;\n };\n}\n\nfunction compareBounds(a: Bound, b: Bound): number {\n if (a === b) {\n return 0;\n }\n if (a === minValue) {\n return -1;\n }\n if (b === minValue) {\n return 1;\n }\n if (a === maxValue) {\n return 1;\n }\n if (b === maxValue) {\n return -1;\n }\n return compareValues(a, b);\n}\n\nfunction* generateRows(\n data: BTreeSet<Row>,\n scanStart: RowBound | undefined,\n reverse: boolean | undefined,\n) {\n yield* data[reverse ? 'valuesFromReversed' : 'valuesFrom'](\n scanStart as Row | undefined,\n );\n}\n\nexport function stringify(change: SourceChange) {\n return JSON.stringify(change, (_, v) =>\n typeof v === 'bigint' ? v.toString() : v,\n );\n}\n"],"names":[],"mappings":";;;;;;;;;;AA2FO,MAAM,aAA+B;AAAA,EACjC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,+BAAmC,IAAA;AAAA,EACnC,eAA6B,CAAA;AAAA,EAEtC;AAAA,EACA,aAAa;AAAA,EAEb,YACE,WACA,SACA,YACA,kBACA;AACA,SAAK,aAAa;AAClB,SAAK,WAAW;AAChB,SAAK,cAAc;AACnB,SAAK,oBAAoB,WAAW,IAAI,OAAK,CAAC,GAAG,KAAK,CAAC;AACvD,UAAM,aAAa,oBAAoB,KAAK,iBAAiB;AAC7D,SAAK,SAAS,IAAI,KAAK,UAAU,KAAK,iBAAiB,GAAG;AAAA,MACxD;AAAA,MACA,MAAM,oBAAoB,IAAI,SAAc,UAAU;AAAA,MACtD,4BAAY,IAAA;AAAA,IAAI,CACjB;AAAA,EACH;AAAA,EAEA,IAAI,cAAc;AAChB,WAAO;AAAA,MACL,MAAM,KAAK;AAAA,MACX,SAAS,KAAK;AAAA,MACd,YAAY,KAAK;AAAA,IAAA;AAAA,EAErB;AAAA,EAEA,OAAO;AACL,UAAM,eAAe,KAAK,iBAAA;AAC1B,WAAO,IAAI;AAAA,MACT,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,aAAa,KAAK,MAAA;AAAA,IAAM;AAAA,EAE5B;AAAA,EAEA,IAAI,OAAsB;AACxB,WAAO,KAAK,mBAAmB;AAAA,EACjC;AAAA,EAEA,WAAW,YAAsC;AAC/C,WAAO;AAAA,MACL,WAAW,KAAK;AAAA,MAChB,SAAS,KAAK;AAAA,MACd,YAAY,KAAK;AAAA,MACjB,MAAM,WAAW;AAAA,MACjB,QAAQ;AAAA,MACR,eAAe,CAAA;AAAA,MACf,UAAU;AAAA,MACV,aAAa,WAAW;AAAA,IAAA;AAAA,EAE5B;AAAA,EAEA,QACE,MACA,SACA,eACa;AACb,UAAM,qBAAqB,iBAAiB,OAAO;AAEnD,UAAM,QAAqB;AAAA,MACzB,WAAW,MAAM;AAAA,MACjB,OAAO,CAAA,QAAO,KAAK,OAAO,KAAK,UAAU;AAAA,MACzC,WAAW,CAAA,WAAU;AACnB,mBAAW,SAAS;AAAA,MACtB;AAAA,MACA,SAAS,MAAM;AACb,aAAK,YAAY,KAAK;AAAA,MACxB;AAAA,MACA,qBAAqB,CAAC,mBAAmB;AAAA,IAAA;AAG3C,UAAM,aAAyB;AAAA,MAC7B;AAAA,MACA,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,MACA,aAAa,eAAe,IAAI;AAAA,MAChC,SAAS,mBAAmB,UACxB;AAAA,QACE,WAAW,mBAAmB;AAAA,QAC9B,WAAW,gBAAgB,mBAAmB,OAAO;AAAA,MAAA,IAEvD;AAAA,MACJ,iBAAiB;AAAA,IAAA;AAEnB,UAAM,SAAS,KAAK,WAAW,UAAU;AACzC,6BAAyB,MAAM,KAAK,WAAW;AAC/C,SAAK,aAAa,KAAK,UAAU;AACjC,WAAO;AAAA,EACT;AAAA,EAEA,YAAY,OAAoB;AAC9B,UAAM,MAAM,KAAK,aAAa,UAAU,CAAA,MAAK,EAAE,UAAU,KAAK;AAC9D,WAAO,QAAQ,IAAI,sBAAsB;AACzC,SAAK,aAAa,OAAO,KAAK,CAAC;AAAA,EASjC;AAAA,EAEA,mBAA0B;AACxB,UAAM,QAAQ,KAAK,SAAS,IAAI,KAAK,UAAU,KAAK,iBAAiB,CAAC;AACtE,WAAO,OAAO,yBAAyB;AACvC,WAAO;AAAA,EACT;AAAA,EAEA,kBAAkB,MAAgB,QAA2B;AAC3D,UAAM,MAAM,KAAK,UAAU,IAAI;AAC/B,UAAM,QAAQ,KAAK,SAAS,IAAI,GAAG;AAGnC,QAAI,OAAO;AACT,YAAM,OAAO,IAAI,MAAM;AACvB,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,oBAAoB,IAAI;AAS3C,UAAM,OAAO,IAAI,SAAc,UAAU;AAIzC,eAAW,OAAO,KAAK,iBAAA,EAAmB,MAAM;AAC9C,WAAK,IAAI,GAAG;AAAA,IACd;AAEA,UAAM,WAAW,EAAC,YAAY,MAAM,4BAAY,IAAI,CAAC,MAAM,CAAC,EAAA;AAC5D,SAAK,SAAS,IAAI,KAAK,QAAQ;AAC/B,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,eAAyB;AACvB,WAAO,CAAC,GAAG,KAAK,SAAS,MAAM;AAAA,EACjC;AAAA,EAEA,CAAC,OAAO,KAAmB,MAA0C;AACnE,UAAM,EAAC,MAAM,eAAe,YAAA,IAAe;AAC3C,UAAM,uBAAuB,CAAC,IAAS,OACrC,YAAY,IAAI,EAAE,KAAK,IAAI,UAAU,KAAK;AAE5C,UAAM,eAAe;AAAA,MACnB,KAAK,SAAS;AAAA,MACd,KAAK;AAAA,IAAA;AAIP,UAAM,sBAAsB,gBAAgB,IAAI;AAGhD,UAAM,YAAyB,CAAA;AAC/B,QAAI,qBAAqB;AACvB,iBAAW,OAAO,OAAO,KAAK,mBAAmB,GAAG;AAClD,kBAAU,KAAK,CAAC,KAAK,KAAK,CAAC;AAAA,MAC7B;AAAA,IACF;AAKA,QACE,KAAK,YAAY,SAAS,KAC1B,CAAC,uBACD,CAAC,4BAA4B,qBAAqB,KAAK,WAAW,GAClE;AACA,gBAAU,KAAK,GAAG,aAAa;AAAA,IACjC;AAEA,UAAM,QAAQ,KAAK,kBAAkB,WAAW,IAAI;AACpD,UAAM,EAAC,MAAM,YAAY,QAAA,IAAW;AACpC,UAAM,kBAAkB,CAAC,IAAS,OAChC,QAAQ,IAAI,EAAE,KAAK,IAAI,UAAU,KAAK;AAExC,UAAM,UAAU,IAAI,OAAO;AAY3B,QAAI;AAEJ,QAAI,qBAAqB;AACvB,kBAAY,CAAA;AACZ,iBAAW,CAAC,KAAK,GAAG,KAAK,WAAW;AAClC,YAAI,OAAO,qBAAqB,GAAG,GAAG;AACpC,oBAAU,GAAG,IAAI,oBAAoB,GAAG;AAAA,QAC1C,OAAO;AACL,cAAI,IAAI,SAAS;AACf,sBAAU,GAAG,IAAI,QAAQ,QAAQ,WAAW;AAAA,UAC9C,OAAO;AACL,sBAAU,GAAG,IAAI,QAAQ,QAAQ,WAAW;AAAA,UAC9C;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AACL,kBAAY;AAAA,IACd;AAEA,UAAM,eAAe,aAAa,MAAM,WAAW,IAAI,OAAO;AAC9D,UAAM,cAAc;AAAA,MAClB;AAAA,MACA,eAAe,KAAK,YAAY,IAAI;AAAA;AAAA;AAAA;AAAA,MAIpC,IAAI;AAAA,MACJ,KAAK;AAAA,MACL,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAUL;AAAA,MACA,KAAK,SAAS;AAAA,IAAA;AAGhB,UAAM,iBAAiB;AAAA,MACrB;AAAA,QACE,kBAAkB,aAAa,IAAI,OAAO,oBAAoB;AAAA,MAAA;AAAA;AAAA;AAAA,MAIhE,IAAI;AAAA,IAAA;AAGN,WAAO,KAAK,UACR,mBAAmB,gBAAgB,KAAK,QAAQ,SAAS,IACzD;AAAA,EACN;AAAA,EAEA,KAAK,QAA4B;AAC/B,eAAW,KAAK,KAAK,QAAQ,MAAM,GAAG;AAAA,IAEtC;AAAA,EACF;AAAA,EAEA,CAAC,QAAQ,QAAsB;AAC7B,UAAM,eAAe,KAAK,iBAAA;AAC1B,UAAM,EAAC,SAAQ;AACf,UAAM,SAAS,CAAC,QAAa,KAAK,IAAI,GAAG;AACzC,UAAM,aAAa,CAAC,MAA4B,KAAK,WAAW;AAChE,UAAM,cAAc,CAAC,MAAoB,KAAK,aAAa,CAAC;AAC5D,WAAO;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,MAAM,EAAE,KAAK;AAAA,IAAA;AAAA,EAEjB;AAAA,EAEA,aAAa,QAAsB;AACjC,eAAW,EAAC,KAAA,KAAS,KAAK,SAAS,UAAU;AAC3C,cAAQ,OAAO,MAAA;AAAA,QACb,KAAK,OAAO;AACV,gBAAM,QAAQ,KAAK,IAAI,OAAO,GAAG;AAEjC,iBAAO,KAAK;AACZ;AAAA,QACF;AAAA,QACA,KAAK,UAAU;AACb,gBAAM,UAAU,KAAK,OAAO,OAAO,GAAG;AAEtC,iBAAO,OAAO;AACd;AAAA,QACF;AAAA,QACA,KAAK,QAAQ;AAKX,gBAAM,UAAU,KAAK,OAAO,OAAO,MAAM;AAEzC,iBAAO,OAAO;AACd,eAAK,IAAI,OAAO,GAAG;AACnB;AAAA,QACF;AAAA,QACA;AACE,sBAAkB;AAAA,MAAA;AAAA,IAExB;AAAA,EACF;AACF;AAEA,UAAU,uBACR,IACA,YACA;AACA,aAAW,QAAQ,IAAI;AACrB,QAAI,cAAc,CAAC,qBAAqB,YAAY,KAAK,GAAG,GAAG;AAC7D;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;AAEA,UAAU,mBAAmB,IAAkB,QAA+B;AAC5E,aAAW,QAAQ,IAAI;AACrB,QAAI,OAAO,KAAK,GAAG,GAAG;AACpB,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEO,UAAU,6BACf,aACA,QACA,QACA,YACA,aACA,cACA;AACA,MAAI,kBAAkB;AACtB,MAAI,OAAO,SAAS,QAAQ;AAC1B,eAAW,EAAC,cAAA,KAAkB,aAAa;AACzC,UAAI,eAAe;AACjB,mBAAW,OAAO,eAAe;AAC/B,cAAI,CAAC,YAAY,OAAO,IAAI,GAAG,GAAG,OAAO,OAAO,GAAG,CAAC,GAAG;AACrD,8BAAkB;AAClB;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,OAAO,SAAS,UAAU,iBAAiB;AAC7C,WAAO;AAAA,MACL;AAAA,MACA;AAAA,QACE,MAAM;AAAA,QACN,KAAK,OAAO;AAAA,MAAA;AAAA,MAEd;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAA;AAAA,IAAa;AAEf,WAAO;AAAA,MACL;AAAA,MACA;AAAA,QACE,MAAM;AAAA,QACN,KAAK,OAAO;AAAA,MAAA;AAAA,MAEd;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAA;AAAA,IAAa;AAAA,EAEjB,OAAO;AACL,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAA;AAAA,IAAa;AAAA,EAEjB;AACF;AAEA,UAAU,gBACR,aACA,QACA,QACA,YACA,aACA,WACA;AACA,aAAW,KAAK,QAAQ,aAAa,QAAQ,QAAQ,YAAY,SAAS,GAAG;AAC3E,UAAM;AAAA,EACR;AACA,cAAY,MAAM;AACpB;AAEA,UAAU,QACR,aACA,QACA,QACA,YACA,WACA;AACA,UAAQ,OAAO,MAAA;AAAA,IACb,KAAK;AACH;AAAA,QACE,CAAC,OAAO,OAAO,GAAG;AAAA,QAClB,MAAM,sBAAsB,UAAU,MAAM,CAAC;AAAA,MAAA;AAE/C;AAAA,IACF,KAAK;AACH,aAAO,OAAO,OAAO,GAAG,GAAG,MAAM,iBAAiB,UAAU,MAAM,CAAC,EAAE;AACrE;AAAA,IACF,KAAK;AACH,aAAO,OAAO,OAAO,MAAM,GAAG,MAAM,iBAAiB,UAAU,MAAM,CAAC,EAAE;AACxE;AAAA,IACF;AACE,kBAAkB;AAAA,EAAA;AAGtB,aAAW,QAAQ,aAAa;AAC9B,UAAM,EAAC,QAAQ,SAAS,MAAA,IAAS;AACjC,QAAI,QAAQ;AACV,WAAK,kBAAkB;AACvB,iBAAW,EAAC,OAAO,WAAW,OAAA,CAAO;AACrC,YAAM,eACJ,OAAO,SAAS,SACZ;AAAA,QACE,MAAM,OAAO;AAAA,QACb,SAAS;AAAA,UACP,KAAK,OAAO;AAAA,UACZ,eAAe,CAAA;AAAA,QAAC;AAAA,QAElB,MAAM;AAAA,UACJ,KAAK,OAAO;AAAA,UACZ,eAAe,CAAA;AAAA,QAAC;AAAA,MAClB,IAEF;AAAA,QACE,MAAM,OAAO;AAAA,QACb,MAAM;AAAA,UACJ,KAAK,OAAO;AAAA,UACZ,eAAe,CAAA;AAAA,QAAC;AAAA,MAClB;AAER,iBAAW,cAAc,QAAQ,OAAO,SAAS,SAAS;AAC1D;AAAA,IACF;AAAA,EACF;AAEA,aAAW,MAAS;AACtB;AAEO,UAAU,kBACf,OACA,OACA,SACwB;AACxB,MAAI,CAAC,OAAO;AACV,WAAO;AACP;AAAA,EACF;AACA,MAAI,UAAU;AACd,aAAW,QAAQ,OAAO;AACxB,QAAI,SAAS,SAAS;AACpB,YAAM;AACN;AAAA,IACF;AACA,QAAI,CAAC,SAAS;AACZ,UAAI,MAAM,UAAU,MAAM;AACxB,YAAI,QAAQ,KAAK,KAAK,MAAM,GAAG,KAAK,GAAG;AACrC,oBAAU;AAAA,QACZ;AAAA,MACF,WAAW,MAAM,UAAU,SAAS;AAClC,YAAI,QAAQ,KAAK,KAAK,MAAM,GAAG,IAAI,GAAG;AACpC,oBAAU;AAAA,QACZ;AAAA,MACF;AAAA,IACF;AACA,QAAI,SAAS;AACX,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAcO,UAAU,oBACf,SACA,MACA,YACA,SACA,iBACA,SACA,iBACA;AACA,MAAI,iBAAsC;AAC1C,MAAI,WAAW,mBAAmB,QAAQ,OAAO;AAC/C,qBAAiB;AAAA,EACnB;AACA,QAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEF,SAAO,yBAAyB,MAAM,UAAU,OAAO;AACzD;AAEA,SAAS,gBACP,SACA,YACA,SACA,SACA,iBACU;AACV,MAAI,WAAqB;AAAA,IACvB,KAAK;AAAA,IACL,QAAQ;AAAA,EAAA;AAEV,UAAQ,SAAS,OAAO,MAAA;AAAA,IACtB,KAAK;AACH,iBAAW;AAAA,QACT,KAAK,QAAQ,OAAO;AAAA,QACpB,QAAQ;AAAA,MAAA;AAEV;AAAA,IACF,KAAK;AACH,iBAAW;AAAA,QACT,KAAK;AAAA,QACL,QAAQ,QAAQ,OAAO;AAAA,MAAA;AAEzB;AAAA,IACF,KAAK;AACH,iBAAW;AAAA,QACT,KAAK,QAAQ,OAAO;AAAA,QACpB,QAAQ,QAAQ,OAAO;AAAA,MAAA;AAEzB;AAAA,EAAA;AAGJ,MAAI,SAAS;AACX,eAAW,mBAAmB,UAAU,SAAS,OAAO;AAAA,EAC1D;AAEA,MAAI,YAAY;AACd,eAAW,sBAAsB,UAAU,UAAU;AAAA,EACvD;AAEA,MAAI,iBAAiB;AACnB,eAAW,2BAA2B,UAAU,eAAe;AAAA,EACjE;AAEA,SAAO;AACT;AAIA,SAAS,mBACP,EAAC,KAAK,OAAA,GACN,SACA,SACU;AACV,QAAM,2BAA2B,CAAC,QAChC,QAAQ,UAAa,QAAQ,KAAK,OAAO,IAAI,IAAI,SAAY;AAC/D,SAAO;AAAA,IACL,KAAK,yBAAyB,GAAG;AAAA,IACjC,QAAQ,yBAAyB,MAAM;AAAA,EAAA;AAE3C;AAIA,SAAS,sBACP,EAAC,KAAK,OAAA,GACN,YACU;AACV,QAAM,mCAAmC,CAAC,QACxC,QAAQ,UAAa,CAAC,qBAAqB,YAAY,GAAG,IACtD,SACA;AAEN,SAAO;AAAA,IACL,KAAK,iCAAiC,GAAG;AAAA,IACzC,QAAQ,iCAAiC,MAAM;AAAA,EAAA;AAEnD;AAEA,SAAS,2BACP,EAAC,KAAK,OAAA,GACN,iBACU;AACV,QAAM,+BAA+B,CAAC,QACpC,QAAQ,UAAa,CAAC,gBAAgB,GAAG,IAAI,SAAY;AAE3D,SAAO;AAAA,IACL,KAAK,6BAA6B,GAAG;AAAA,IACrC,QAAQ,6BAA6B,MAAM;AAAA,EAAA;AAE/C;AAEO,UAAU,yBACf,aACA,UACA,SACA;AACA,MAAI,oBAAoB;AACxB,MAAI,uBAAuB;AAC3B,aAAW,OAAO,aAAa;AAC7B,QAAI,CAAC,qBAAqB,SAAS,KAAK;AACtC,YAAM,MAAM,QAAQ,SAAS,KAAK,GAAG;AACrC,UAAI,MAAM,GAAG;AACX,4BAAoB;AACpB,cAAM,EAAC,KAAK,SAAS,KAAK,eAAe,CAAA,EAAC;AAAA,MAC5C;AAAA,IACF;AAEA,QAAI,CAAC,wBAAwB,SAAS,QAAQ;AAC5C,YAAM,MAAM,QAAQ,SAAS,QAAQ,GAAG;AACxC,UAAI,QAAQ,GAAG;AACb,+BAAuB;AACvB;AAAA,MACF;AAAA,IACF;AACA,UAAM,EAAC,KAAK,eAAe,GAAC;AAAA,EAC9B;AAEA,MAAI,CAAC,qBAAqB,SAAS,KAAK;AACtC,UAAM,EAAC,KAAK,SAAS,KAAK,eAAe,CAAA,EAAC;AAAA,EAC5C;AACF;AASA,MAAM,WAAW,OAAO,WAAW;AAEnC,MAAM,WAAW,OAAO,WAAW;AAGnC,SAAS,oBAAoB,MAAgB;AAC3C,SAAO,CAAC,GAAa,MAAgB;AAEnC,eAAW,SAAS,MAAM;AACxB,YAAM,MAAM,MAAM,CAAC;AACnB,YAAM,MAAM,cAAc,EAAE,GAAG,GAAG,EAAE,GAAG,CAAC;AACxC,UAAI,QAAQ,GAAG;AACb,eAAO,MAAM,CAAC,MAAM,QAAQ,MAAM,CAAC;AAAA,MACrC;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;AAEA,SAAS,cAAc,GAAU,GAAkB;AACjD,MAAI,MAAM,GAAG;AACX,WAAO;AAAA,EACT;AACA,MAAI,MAAM,UAAU;AAClB,WAAO;AAAA,EACT;AACA,MAAI,MAAM,UAAU;AAClB,WAAO;AAAA,EACT;AACA,MAAI,MAAM,UAAU;AAClB,WAAO;AAAA,EACT;AACA,MAAI,MAAM,UAAU;AAClB,WAAO;AAAA,EACT;AACA,SAAO,cAAc,GAAG,CAAC;AAC3B;AAEA,UAAU,aACR,MACA,WACA,SACA;AACA,SAAO,KAAK,UAAU,uBAAuB,YAAY;AAAA,IACvD;AAAA,EAAA;AAEJ;AAEO,SAAS,UAAU,QAAsB;AAC9C,SAAO,KAAK;AAAA,IAAU;AAAA,IAAQ,CAAC,GAAG,MAChC,OAAO,MAAM,WAAW,EAAE,aAAa;AAAA,EAAA;AAE3C;"}
|
|
1
|
+
{"version":3,"file":"memory-source.js","sources":["../../../../../zql/src/ivm/memory-source.ts"],"sourcesContent":["import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport {BTreeSet} from '../../../shared/src/btree-set.ts';\nimport {hasOwn} from '../../../shared/src/has-own.ts';\nimport {once} from '../../../shared/src/iterables.ts';\nimport type {\n Condition,\n Ordering,\n OrderPart,\n} from '../../../zero-protocol/src/ast.ts';\nimport type {Row, Value} from '../../../zero-protocol/src/data.ts';\nimport type {PrimaryKey} from '../../../zero-protocol/src/primary-key.ts';\nimport type {SchemaValue} from '../../../zero-types/src/schema-value.ts';\nimport type {DebugDelegate} from '../builder/debug-delegate.ts';\nimport {\n createPredicate,\n transformFilters,\n type NoSubqueryCondition,\n} from '../builder/filter.ts';\nimport {assertOrderingIncludesPK} from '../query/complete-ordering.ts';\nimport type {Change} from './change.ts';\nimport {\n constraintMatchesPrimaryKey,\n constraintMatchesRow,\n primaryKeyConstraintFromFilters,\n type Constraint,\n} from './constraint.ts';\nimport {\n compareValues,\n makeComparator,\n valuesEqual,\n type Comparator,\n type Node,\n} from './data.ts';\nimport {filterPush} from './filter-push.ts';\nimport {\n skipYields,\n type FetchRequest,\n type Input,\n type Output,\n type Start,\n} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {\n Source,\n SourceChange,\n SourceChangeAdd,\n SourceChangeEdit,\n SourceChangeRemove,\n SourceInput,\n} from './source.ts';\nimport type {Stream} from './stream.ts';\n\nexport type Overlay = {\n epoch: number;\n change: SourceChange;\n};\n\nexport type Overlays = {\n add: Row | undefined;\n remove: Row | undefined;\n};\n\ntype Index = {\n comparator: Comparator;\n data: BTreeSet<Row>;\n usedBy: Set<Connection>;\n};\n\nexport type Connection = {\n input: Input;\n output: Output | undefined;\n sort: Ordering;\n splitEditKeys: Set<string> | undefined;\n compareRows: Comparator;\n filters:\n | {\n condition: NoSubqueryCondition;\n predicate: (row: Row) => boolean;\n }\n | undefined;\n readonly debug?: DebugDelegate | undefined;\n lastPushedEpoch: number;\n};\n\n/**\n * A `MemorySource` is a source that provides data to the pipeline from an\n * in-memory data source.\n *\n * This data is kept in sorted order as downstream pipelines will always expect\n * the data they receive from `pull` to be in sorted order.\n */\nexport class MemorySource implements Source {\n readonly #tableName: string;\n readonly #columns: Record<string, SchemaValue>;\n readonly #primaryKey: PrimaryKey;\n readonly #primaryIndexSort: Ordering;\n readonly #indexes: Map<string, Index> = new Map();\n readonly #connections: Connection[] = [];\n\n #overlay: Overlay | undefined;\n #pushEpoch = 0;\n\n constructor(\n tableName: string,\n columns: Record<string, SchemaValue>,\n primaryKey: PrimaryKey,\n primaryIndexData?: BTreeSet<Row>,\n ) {\n this.#tableName = tableName;\n this.#columns = columns;\n this.#primaryKey = primaryKey;\n this.#primaryIndexSort = primaryKey.map(k => [k, 'asc']);\n const comparator = makeBoundComparator(this.#primaryIndexSort);\n this.#indexes.set(JSON.stringify(this.#primaryIndexSort), {\n comparator,\n data: primaryIndexData ?? new BTreeSet<Row>(comparator),\n usedBy: new Set(),\n });\n }\n\n get tableSchema() {\n return {\n name: this.#tableName,\n columns: this.#columns,\n primaryKey: this.#primaryKey,\n };\n }\n\n fork() {\n const primaryIndex = this.#getPrimaryIndex();\n return new MemorySource(\n this.#tableName,\n this.#columns,\n this.#primaryKey,\n primaryIndex.data.clone(),\n );\n }\n\n get data(): BTreeSet<Row> {\n return this.#getPrimaryIndex().data;\n }\n\n #getSchema(connection: Connection): SourceSchema {\n return {\n tableName: this.#tableName,\n columns: this.#columns,\n primaryKey: this.#primaryKey,\n sort: connection.sort,\n system: 'client',\n relationships: {},\n isHidden: false,\n compareRows: connection.compareRows,\n };\n }\n\n connect(\n sort: Ordering,\n filters?: Condition,\n splitEditKeys?: Set<string>,\n ): SourceInput {\n const transformedFilters = transformFilters(filters);\n\n const input: SourceInput = {\n getSchema: () => schema,\n fetch: req => this.#fetch(req, connection),\n setOutput: output => {\n connection.output = output;\n },\n destroy: () => {\n this.#disconnect(input);\n },\n fullyAppliedFilters: !transformedFilters.conditionsRemoved,\n };\n\n const connection: Connection = {\n input,\n output: undefined,\n sort,\n splitEditKeys,\n compareRows: makeComparator(sort),\n filters: transformedFilters.filters\n ? {\n condition: transformedFilters.filters,\n predicate: createPredicate(transformedFilters.filters),\n }\n : undefined,\n lastPushedEpoch: 0,\n };\n const schema = this.#getSchema(connection);\n assertOrderingIncludesPK(sort, this.#primaryKey);\n this.#connections.push(connection);\n return input;\n }\n\n #disconnect(input: Input): void {\n const idx = this.#connections.findIndex(c => c.input === input);\n assert(idx !== -1, 'Connection not found');\n this.#connections.splice(idx, 1);\n\n // TODO: We used to delete unused indexes here. But in common cases like\n // navigating into issue detail pages it caused a ton of constantly\n // building and destroying indexes.\n //\n // Perhaps some intelligent LRU or something is needed here but for now,\n // the opposite extreme of keeping all indexes for the lifetime of the\n // page seems better.\n }\n\n #getPrimaryIndex(): Index {\n const index = this.#indexes.get(JSON.stringify(this.#primaryIndexSort));\n assert(index, 'Primary index not found');\n return index;\n }\n\n #getOrCreateIndex(sort: Ordering, usedBy: Connection): Index {\n const key = JSON.stringify(sort);\n const index = this.#indexes.get(key);\n // Future optimization could use existing index if it's the same just sorted\n // in reverse of needed.\n if (index) {\n index.usedBy.add(usedBy);\n return index;\n }\n\n const comparator = makeBoundComparator(sort);\n\n // When creating these synchronously becomes a problem, a few options:\n // 1. Allow users to specify needed indexes up front\n // 2. Create indexes in a different thread asynchronously (this would require\n // modifying the BTree to be able to be passed over structured-clone, or using\n // a different library.)\n // 3. We could even theoretically do (2) on multiple threads and then merge the\n // results!\n const data = new BTreeSet<Row>(comparator);\n\n // I checked, there's no special path for adding data in bulk faster.\n // The constructor takes an array, but it just calls add/set over and over.\n for (const row of this.#getPrimaryIndex().data) {\n data.add(row);\n }\n\n const newIndex = {comparator, data, usedBy: new Set([usedBy])};\n this.#indexes.set(key, newIndex);\n return newIndex;\n }\n\n // For unit testing that we correctly clean up indexes.\n getIndexKeys(): string[] {\n return [...this.#indexes.keys()];\n }\n\n *#fetch(req: FetchRequest, conn: Connection): Stream<Node | 'yield'> {\n const {sort: requestedSort, compareRows} = conn;\n const connectionComparator = (r1: Row, r2: Row) =>\n compareRows(r1, r2) * (req.reverse ? -1 : 1);\n\n const pkConstraint = primaryKeyConstraintFromFilters(\n conn.filters?.condition,\n this.#primaryKey,\n );\n // The primary key constraint will be more limiting than the constraint\n // so swap out to that if it exists.\n const fetchOrPkConstraint = pkConstraint ?? req.constraint;\n\n // If there is a constraint, we need an index sorted by it first.\n const indexSort: OrderPart[] = [];\n if (fetchOrPkConstraint) {\n for (const key of Object.keys(fetchOrPkConstraint)) {\n indexSort.push([key, 'asc']);\n }\n }\n\n // For the special case of constraining by PK, we don't need to worry about\n // any requested sort since there can only be one result. Otherwise we also\n // need the index sorted by the requested sort.\n if (\n this.#primaryKey.length > 1 ||\n !fetchOrPkConstraint ||\n !constraintMatchesPrimaryKey(fetchOrPkConstraint, this.#primaryKey)\n ) {\n indexSort.push(...requestedSort);\n }\n\n const index = this.#getOrCreateIndex(indexSort, conn);\n const {data, comparator: compare} = index;\n const indexComparator = (r1: Row, r2: Row) =>\n compare(r1, r2) * (req.reverse ? -1 : 1);\n\n const startAt = req.start?.row;\n\n // If there is a constraint, we want to start our scan at the first row that\n // matches the constraint. But because the next OrderPart can be `desc`,\n // it's not true that {[constraintKey]: constraintValue} is the first\n // matching row. Because in that case, the other fields will all be\n // `undefined`, and in Zero `undefined` is always less than any other value.\n // So if the second OrderPart is descending then `undefined` values will\n // actually be the *last* row. We need a way to stay \"start at the first row\n // with this constraint value\". RowBound with the corresponding compareBound\n // comparator accomplishes this. The right thing is probably to teach the\n // btree library to support this concept.\n let scanStart: RowBound | undefined;\n\n if (fetchOrPkConstraint) {\n scanStart = {};\n for (const [key, dir] of indexSort) {\n if (hasOwn(fetchOrPkConstraint, key)) {\n scanStart[key] = fetchOrPkConstraint[key];\n } else {\n if (req.reverse) {\n scanStart[key] = dir === 'asc' ? maxValue : minValue;\n } else {\n scanStart[key] = dir === 'asc' ? minValue : maxValue;\n }\n }\n }\n } else {\n scanStart = startAt;\n }\n\n const rowsIterable = generateRows(data, scanStart, req.reverse);\n const withOverlay = generateWithOverlay(\n startAt,\n pkConstraint ? once(rowsIterable) : rowsIterable,\n // use `req.constraint` here and not `fetchOrPkConstraint` since `fetchOrPkConstraint` could be the\n // primary key constraint. The primary key constraint comes from filters and is acting as a filter\n // rather than as the fetch constraint.\n req.constraint,\n this.#overlay,\n conn.lastPushedEpoch,\n // Use indexComparator, generateWithOverlayInner has a subtle dependency\n // on this. Since generateWithConstraint is done after\n // generateWithOverlay, the generator consumed by generateWithOverlayInner\n // does not end when the constraint stops matching and so the final\n // check to yield an add overlay if not yet yielded is not reached.\n // However, using the indexComparator the add overlay will be less than\n // the first row that does not match the constraint, and so any\n // not yet yielded add overlay will be yielded when the first row\n // not matching the constraint is reached.\n indexComparator,\n conn.filters?.predicate,\n );\n\n const withConstraint = generateWithConstraint(\n skipYields(\n generateWithStart(withOverlay, req.start, connectionComparator),\n ),\n // we use `req.constraint` and not `fetchOrPkConstraint` here because we need to\n // AND the constraint with what could have been the primary key constraint\n req.constraint,\n );\n\n yield* conn.filters\n ? generateWithFilter(withConstraint, conn.filters.predicate)\n : withConstraint;\n }\n\n *push(change: SourceChange): Stream<'yield'> {\n for (const result of this.genPush(change)) {\n if (result === 'yield') {\n yield result;\n }\n }\n }\n\n *genPush(change: SourceChange) {\n const primaryIndex = this.#getPrimaryIndex();\n const {data} = primaryIndex;\n const exists = (row: Row) => data.has(row);\n const setOverlay = (o: Overlay | undefined) => (this.#overlay = o);\n const writeChange = (c: SourceChange) => this.#writeChange(c);\n yield* genPushAndWriteWithSplitEdit(\n this.#connections,\n change,\n exists,\n setOverlay,\n writeChange,\n () => ++this.#pushEpoch,\n );\n }\n\n #writeChange(change: SourceChange) {\n for (const {data} of this.#indexes.values()) {\n switch (change.type) {\n case 'add': {\n const added = data.add(change.row);\n // must succeed since we checked has() above.\n assert(added);\n break;\n }\n case 'remove': {\n const removed = data.delete(change.row);\n // must succeed since we checked has() above.\n assert(removed);\n break;\n }\n case 'edit': {\n // TODO: We could see if the PK (form the index tree's perspective)\n // changed and if not we could use set.\n // We cannot just do `set` with the new value since the `oldRow` might\n // not map to the same entry as the new `row` in the index btree.\n const removed = data.delete(change.oldRow);\n // must succeed since we checked has() above.\n assert(removed);\n data.add(change.row);\n break;\n }\n default:\n unreachable(change);\n }\n }\n }\n}\n\nfunction* generateWithConstraint(\n it: Stream<Node>,\n constraint: Constraint | undefined,\n) {\n for (const node of it) {\n if (constraint && !constraintMatchesRow(constraint, node.row)) {\n break;\n }\n yield node;\n }\n}\n\nfunction* generateWithFilter(it: Stream<Node>, filter: (row: Row) => boolean) {\n for (const node of it) {\n if (filter(node.row)) {\n yield node;\n }\n }\n}\n\nexport function* genPushAndWriteWithSplitEdit(\n connections: readonly Connection[],\n change: SourceChange,\n exists: (row: Row) => boolean,\n setOverlay: (o: Overlay | undefined) => Overlay | undefined,\n writeChange: (c: SourceChange) => void,\n getNextEpoch: () => number,\n) {\n let shouldSplitEdit = false;\n if (change.type === 'edit') {\n for (const {splitEditKeys} of connections) {\n if (splitEditKeys) {\n for (const key of splitEditKeys) {\n if (!valuesEqual(change.row[key], change.oldRow[key])) {\n shouldSplitEdit = true;\n break;\n }\n }\n }\n }\n }\n\n if (change.type === 'edit' && shouldSplitEdit) {\n yield* genPushAndWrite(\n connections,\n {\n type: 'remove',\n row: change.oldRow,\n },\n exists,\n setOverlay,\n writeChange,\n getNextEpoch(),\n );\n yield* genPushAndWrite(\n connections,\n {\n type: 'add',\n row: change.row,\n },\n exists,\n setOverlay,\n writeChange,\n getNextEpoch(),\n );\n } else {\n yield* genPushAndWrite(\n connections,\n change,\n exists,\n setOverlay,\n writeChange,\n getNextEpoch(),\n );\n }\n}\n\nfunction* genPushAndWrite(\n connections: readonly Connection[],\n change: SourceChangeAdd | SourceChangeRemove | SourceChangeEdit,\n exists: (row: Row) => boolean,\n setOverlay: (o: Overlay | undefined) => Overlay | undefined,\n writeChange: (c: SourceChange) => void,\n pushEpoch: number,\n) {\n for (const x of genPush(connections, change, exists, setOverlay, pushEpoch)) {\n yield x;\n }\n writeChange(change);\n}\n\nfunction* genPush(\n connections: readonly Connection[],\n change: SourceChange,\n exists: (row: Row) => boolean,\n setOverlay: (o: Overlay | undefined) => void,\n pushEpoch: number,\n) {\n switch (change.type) {\n case 'add':\n assert(\n !exists(change.row),\n () => `Row already exists ${stringify(change)}`,\n );\n break;\n case 'remove':\n assert(exists(change.row), () => `Row not found ${stringify(change)}`);\n break;\n case 'edit':\n assert(exists(change.oldRow), () => `Row not found ${stringify(change)}`);\n break;\n default:\n unreachable(change);\n }\n\n for (const conn of connections) {\n const {output, filters, input} = conn;\n if (output) {\n conn.lastPushedEpoch = pushEpoch;\n setOverlay({epoch: pushEpoch, change});\n const outputChange: Change =\n change.type === 'edit'\n ? {\n type: change.type,\n oldNode: {\n row: change.oldRow,\n relationships: {},\n },\n node: {\n row: change.row,\n relationships: {},\n },\n }\n : {\n type: change.type,\n node: {\n row: change.row,\n relationships: {},\n },\n };\n yield* filterPush(outputChange, output, input, filters?.predicate);\n yield undefined;\n }\n }\n\n setOverlay(undefined);\n}\n\nexport function* generateWithStart(\n nodes: Iterable<Node | 'yield'>,\n start: Start | undefined,\n compare: (r1: Row, r2: Row) => number,\n): Stream<Node | 'yield'> {\n if (!start) {\n yield* nodes;\n return;\n }\n let started = false;\n for (const node of nodes) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n if (!started) {\n if (start.basis === 'at') {\n if (compare(node.row, start.row) >= 0) {\n started = true;\n }\n } else if (start.basis === 'after') {\n if (compare(node.row, start.row) > 0) {\n started = true;\n }\n }\n }\n if (started) {\n yield node;\n }\n }\n}\n\n/**\n * Takes an iterator and overlay.\n * Splices the overlay into the iterator at the correct position.\n *\n * @param startAt - if there is a lower bound to the stream. If the lower bound of the stream\n * is above the overlay, the overlay will be skipped.\n * @param rows - the stream into which the overlay should be spliced\n * @param constraint - constraint that was applied to the rowIterator and should\n * also be applied to the overlay.\n * @param overlay - the overlay values to splice in\n * @param compare - the comparator to use to find the position for the overlay\n */\nexport function* generateWithOverlay(\n startAt: Row | undefined,\n rows: Iterable<Row>,\n constraint: Constraint | undefined,\n overlay: Overlay | undefined,\n lastPushedEpoch: number,\n compare: Comparator,\n filterPredicate?: (row: Row) => boolean | undefined,\n) {\n let overlayToApply: Overlay | undefined = undefined;\n if (overlay && lastPushedEpoch >= overlay.epoch) {\n overlayToApply = overlay;\n }\n const overlays = computeOverlays(\n startAt,\n constraint,\n overlayToApply,\n compare,\n filterPredicate,\n );\n yield* generateWithOverlayInner(rows, overlays, compare);\n}\n\nfunction computeOverlays(\n startAt: Row | undefined,\n constraint: Constraint | undefined,\n overlay: Overlay | undefined,\n compare: Comparator,\n filterPredicate?: (row: Row) => boolean | undefined,\n): Overlays {\n let overlays: Overlays = {\n add: undefined,\n remove: undefined,\n };\n switch (overlay?.change.type) {\n case 'add':\n overlays = {\n add: overlay.change.row,\n remove: undefined,\n };\n break;\n case 'remove':\n overlays = {\n add: undefined,\n remove: overlay.change.row,\n };\n break;\n case 'edit':\n overlays = {\n add: overlay.change.row,\n remove: overlay.change.oldRow,\n };\n break;\n }\n\n if (startAt) {\n overlays = overlaysForStartAt(overlays, startAt, compare);\n }\n\n if (constraint) {\n overlays = overlaysForConstraint(overlays, constraint);\n }\n\n if (filterPredicate) {\n overlays = overlaysForFilterPredicate(overlays, filterPredicate);\n }\n\n return overlays;\n}\n\nexport {overlaysForStartAt as overlaysForStartAtForTest};\n\nfunction overlaysForStartAt(\n {add, remove}: Overlays,\n startAt: Row,\n compare: Comparator,\n): Overlays {\n const undefinedIfBeforeStartAt = (row: Row | undefined) =>\n row === undefined || compare(row, startAt) < 0 ? undefined : row;\n return {\n add: undefinedIfBeforeStartAt(add),\n remove: undefinedIfBeforeStartAt(remove),\n };\n}\n\nexport {overlaysForConstraint as overlaysForConstraintForTest};\n\nfunction overlaysForConstraint(\n {add, remove}: Overlays,\n constraint: Constraint,\n): Overlays {\n const undefinedIfDoesntMatchConstraint = (row: Row | undefined) =>\n row === undefined || !constraintMatchesRow(constraint, row)\n ? undefined\n : row;\n\n return {\n add: undefinedIfDoesntMatchConstraint(add),\n remove: undefinedIfDoesntMatchConstraint(remove),\n };\n}\n\nfunction overlaysForFilterPredicate(\n {add, remove}: Overlays,\n filterPredicate: (row: Row) => boolean | undefined,\n): Overlays {\n const undefinedIfDoesntMatchFilter = (row: Row | undefined) =>\n row === undefined || !filterPredicate(row) ? undefined : row;\n\n return {\n add: undefinedIfDoesntMatchFilter(add),\n remove: undefinedIfDoesntMatchFilter(remove),\n };\n}\n\nexport function* generateWithOverlayInner(\n rowIterator: Iterable<Row>,\n overlays: Overlays,\n compare: (r1: Row, r2: Row) => number,\n) {\n let addOverlayYielded = false;\n let removeOverlaySkipped = false;\n for (const row of rowIterator) {\n if (!addOverlayYielded && overlays.add) {\n const cmp = compare(overlays.add, row);\n if (cmp < 0) {\n addOverlayYielded = true;\n yield {row: overlays.add, relationships: {}};\n }\n }\n\n if (!removeOverlaySkipped && overlays.remove) {\n const cmp = compare(overlays.remove, row);\n if (cmp === 0) {\n removeOverlaySkipped = true;\n continue;\n }\n }\n yield {row, relationships: {}};\n }\n\n if (!addOverlayYielded && overlays.add) {\n yield {row: overlays.add, relationships: {}};\n }\n}\n\n/**\n * A location to begin scanning an index from. Can either be a specific value\n * or the min or max possible value for the type. This is used to start a scan\n * at the beginning of the rows matching a constraint.\n */\ntype Bound = Value | MinValue | MaxValue;\ntype RowBound = Record<string, Bound>;\nconst minValue = Symbol('min-value');\ntype MinValue = typeof minValue;\nconst maxValue = Symbol('max-value');\ntype MaxValue = typeof maxValue;\n\nfunction makeBoundComparator(sort: Ordering) {\n return (a: RowBound, b: RowBound) => {\n // Hot! Do not use destructuring\n for (const entry of sort) {\n const key = entry[0];\n const cmp = compareBounds(a[key], b[key]);\n if (cmp !== 0) {\n return entry[1] === 'asc' ? cmp : -cmp;\n }\n }\n return 0;\n };\n}\n\nfunction compareBounds(a: Bound, b: Bound): number {\n if (a === b) {\n return 0;\n }\n if (a === minValue) {\n return -1;\n }\n if (b === minValue) {\n return 1;\n }\n if (a === maxValue) {\n return 1;\n }\n if (b === maxValue) {\n return -1;\n }\n return compareValues(a, b);\n}\n\nfunction* generateRows(\n data: BTreeSet<Row>,\n scanStart: RowBound | undefined,\n reverse: boolean | undefined,\n) {\n yield* data[reverse ? 'valuesFromReversed' : 'valuesFrom'](\n scanStart as Row | undefined,\n );\n}\n\nexport function stringify(change: SourceChange) {\n return JSON.stringify(change, (_, v) =>\n typeof v === 'bigint' ? v.toString() : v,\n );\n}\n"],"names":[],"mappings":";;;;;;;;;;AA2FO,MAAM,aAA+B;AAAA,EACjC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,+BAAmC,IAAA;AAAA,EACnC,eAA6B,CAAA;AAAA,EAEtC;AAAA,EACA,aAAa;AAAA,EAEb,YACE,WACA,SACA,YACA,kBACA;AACA,SAAK,aAAa;AAClB,SAAK,WAAW;AAChB,SAAK,cAAc;AACnB,SAAK,oBAAoB,WAAW,IAAI,OAAK,CAAC,GAAG,KAAK,CAAC;AACvD,UAAM,aAAa,oBAAoB,KAAK,iBAAiB;AAC7D,SAAK,SAAS,IAAI,KAAK,UAAU,KAAK,iBAAiB,GAAG;AAAA,MACxD;AAAA,MACA,MAAM,oBAAoB,IAAI,SAAc,UAAU;AAAA,MACtD,4BAAY,IAAA;AAAA,IAAI,CACjB;AAAA,EACH;AAAA,EAEA,IAAI,cAAc;AAChB,WAAO;AAAA,MACL,MAAM,KAAK;AAAA,MACX,SAAS,KAAK;AAAA,MACd,YAAY,KAAK;AAAA,IAAA;AAAA,EAErB;AAAA,EAEA,OAAO;AACL,UAAM,eAAe,KAAK,iBAAA;AAC1B,WAAO,IAAI;AAAA,MACT,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,aAAa,KAAK,MAAA;AAAA,IAAM;AAAA,EAE5B;AAAA,EAEA,IAAI,OAAsB;AACxB,WAAO,KAAK,mBAAmB;AAAA,EACjC;AAAA,EAEA,WAAW,YAAsC;AAC/C,WAAO;AAAA,MACL,WAAW,KAAK;AAAA,MAChB,SAAS,KAAK;AAAA,MACd,YAAY,KAAK;AAAA,MACjB,MAAM,WAAW;AAAA,MACjB,QAAQ;AAAA,MACR,eAAe,CAAA;AAAA,MACf,UAAU;AAAA,MACV,aAAa,WAAW;AAAA,IAAA;AAAA,EAE5B;AAAA,EAEA,QACE,MACA,SACA,eACa;AACb,UAAM,qBAAqB,iBAAiB,OAAO;AAEnD,UAAM,QAAqB;AAAA,MACzB,WAAW,MAAM;AAAA,MACjB,OAAO,CAAA,QAAO,KAAK,OAAO,KAAK,UAAU;AAAA,MACzC,WAAW,CAAA,WAAU;AACnB,mBAAW,SAAS;AAAA,MACtB;AAAA,MACA,SAAS,MAAM;AACb,aAAK,YAAY,KAAK;AAAA,MACxB;AAAA,MACA,qBAAqB,CAAC,mBAAmB;AAAA,IAAA;AAG3C,UAAM,aAAyB;AAAA,MAC7B;AAAA,MACA,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,MACA,aAAa,eAAe,IAAI;AAAA,MAChC,SAAS,mBAAmB,UACxB;AAAA,QACE,WAAW,mBAAmB;AAAA,QAC9B,WAAW,gBAAgB,mBAAmB,OAAO;AAAA,MAAA,IAEvD;AAAA,MACJ,iBAAiB;AAAA,IAAA;AAEnB,UAAM,SAAS,KAAK,WAAW,UAAU;AACzC,6BAAyB,MAAM,KAAK,WAAW;AAC/C,SAAK,aAAa,KAAK,UAAU;AACjC,WAAO;AAAA,EACT;AAAA,EAEA,YAAY,OAAoB;AAC9B,UAAM,MAAM,KAAK,aAAa,UAAU,CAAA,MAAK,EAAE,UAAU,KAAK;AAC9D,WAAO,QAAQ,IAAI,sBAAsB;AACzC,SAAK,aAAa,OAAO,KAAK,CAAC;AAAA,EASjC;AAAA,EAEA,mBAA0B;AACxB,UAAM,QAAQ,KAAK,SAAS,IAAI,KAAK,UAAU,KAAK,iBAAiB,CAAC;AACtE,WAAO,OAAO,yBAAyB;AACvC,WAAO;AAAA,EACT;AAAA,EAEA,kBAAkB,MAAgB,QAA2B;AAC3D,UAAM,MAAM,KAAK,UAAU,IAAI;AAC/B,UAAM,QAAQ,KAAK,SAAS,IAAI,GAAG;AAGnC,QAAI,OAAO;AACT,YAAM,OAAO,IAAI,MAAM;AACvB,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,oBAAoB,IAAI;AAS3C,UAAM,OAAO,IAAI,SAAc,UAAU;AAIzC,eAAW,OAAO,KAAK,iBAAA,EAAmB,MAAM;AAC9C,WAAK,IAAI,GAAG;AAAA,IACd;AAEA,UAAM,WAAW,EAAC,YAAY,MAAM,4BAAY,IAAI,CAAC,MAAM,CAAC,EAAA;AAC5D,SAAK,SAAS,IAAI,KAAK,QAAQ;AAC/B,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,eAAyB;AACvB,WAAO,CAAC,GAAG,KAAK,SAAS,MAAM;AAAA,EACjC;AAAA,EAEA,CAAC,OAAO,KAAmB,MAA0C;AACnE,UAAM,EAAC,MAAM,eAAe,YAAA,IAAe;AAC3C,UAAM,uBAAuB,CAAC,IAAS,OACrC,YAAY,IAAI,EAAE,KAAK,IAAI,UAAU,KAAK;AAE5C,UAAM,eAAe;AAAA,MACnB,KAAK,SAAS;AAAA,MACd,KAAK;AAAA,IAAA;AAIP,UAAM,sBAAsB,gBAAgB,IAAI;AAGhD,UAAM,YAAyB,CAAA;AAC/B,QAAI,qBAAqB;AACvB,iBAAW,OAAO,OAAO,KAAK,mBAAmB,GAAG;AAClD,kBAAU,KAAK,CAAC,KAAK,KAAK,CAAC;AAAA,MAC7B;AAAA,IACF;AAKA,QACE,KAAK,YAAY,SAAS,KAC1B,CAAC,uBACD,CAAC,4BAA4B,qBAAqB,KAAK,WAAW,GAClE;AACA,gBAAU,KAAK,GAAG,aAAa;AAAA,IACjC;AAEA,UAAM,QAAQ,KAAK,kBAAkB,WAAW,IAAI;AACpD,UAAM,EAAC,MAAM,YAAY,QAAA,IAAW;AACpC,UAAM,kBAAkB,CAAC,IAAS,OAChC,QAAQ,IAAI,EAAE,KAAK,IAAI,UAAU,KAAK;AAExC,UAAM,UAAU,IAAI,OAAO;AAY3B,QAAI;AAEJ,QAAI,qBAAqB;AACvB,kBAAY,CAAA;AACZ,iBAAW,CAAC,KAAK,GAAG,KAAK,WAAW;AAClC,YAAI,OAAO,qBAAqB,GAAG,GAAG;AACpC,oBAAU,GAAG,IAAI,oBAAoB,GAAG;AAAA,QAC1C,OAAO;AACL,cAAI,IAAI,SAAS;AACf,sBAAU,GAAG,IAAI,QAAQ,QAAQ,WAAW;AAAA,UAC9C,OAAO;AACL,sBAAU,GAAG,IAAI,QAAQ,QAAQ,WAAW;AAAA,UAC9C;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AACL,kBAAY;AAAA,IACd;AAEA,UAAM,eAAe,aAAa,MAAM,WAAW,IAAI,OAAO;AAC9D,UAAM,cAAc;AAAA,MAClB;AAAA,MACA,eAAe,KAAK,YAAY,IAAI;AAAA;AAAA;AAAA;AAAA,MAIpC,IAAI;AAAA,MACJ,KAAK;AAAA,MACL,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAUL;AAAA,MACA,KAAK,SAAS;AAAA,IAAA;AAGhB,UAAM,iBAAiB;AAAA,MACrB;AAAA,QACE,kBAAkB,aAAa,IAAI,OAAO,oBAAoB;AAAA,MAAA;AAAA;AAAA;AAAA,MAIhE,IAAI;AAAA,IAAA;AAGN,WAAO,KAAK,UACR,mBAAmB,gBAAgB,KAAK,QAAQ,SAAS,IACzD;AAAA,EACN;AAAA,EAEA,CAAC,KAAK,QAAuC;AAC3C,eAAW,UAAU,KAAK,QAAQ,MAAM,GAAG;AACzC,UAAI,WAAW,SAAS;AACtB,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EAEA,CAAC,QAAQ,QAAsB;AAC7B,UAAM,eAAe,KAAK,iBAAA;AAC1B,UAAM,EAAC,SAAQ;AACf,UAAM,SAAS,CAAC,QAAa,KAAK,IAAI,GAAG;AACzC,UAAM,aAAa,CAAC,MAA4B,KAAK,WAAW;AAChE,UAAM,cAAc,CAAC,MAAoB,KAAK,aAAa,CAAC;AAC5D,WAAO;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,MAAM,EAAE,KAAK;AAAA,IAAA;AAAA,EAEjB;AAAA,EAEA,aAAa,QAAsB;AACjC,eAAW,EAAC,KAAA,KAAS,KAAK,SAAS,UAAU;AAC3C,cAAQ,OAAO,MAAA;AAAA,QACb,KAAK,OAAO;AACV,gBAAM,QAAQ,KAAK,IAAI,OAAO,GAAG;AAEjC,iBAAO,KAAK;AACZ;AAAA,QACF;AAAA,QACA,KAAK,UAAU;AACb,gBAAM,UAAU,KAAK,OAAO,OAAO,GAAG;AAEtC,iBAAO,OAAO;AACd;AAAA,QACF;AAAA,QACA,KAAK,QAAQ;AAKX,gBAAM,UAAU,KAAK,OAAO,OAAO,MAAM;AAEzC,iBAAO,OAAO;AACd,eAAK,IAAI,OAAO,GAAG;AACnB;AAAA,QACF;AAAA,QACA;AACE,sBAAkB;AAAA,MAAA;AAAA,IAExB;AAAA,EACF;AACF;AAEA,UAAU,uBACR,IACA,YACA;AACA,aAAW,QAAQ,IAAI;AACrB,QAAI,cAAc,CAAC,qBAAqB,YAAY,KAAK,GAAG,GAAG;AAC7D;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;AAEA,UAAU,mBAAmB,IAAkB,QAA+B;AAC5E,aAAW,QAAQ,IAAI;AACrB,QAAI,OAAO,KAAK,GAAG,GAAG;AACpB,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEO,UAAU,6BACf,aACA,QACA,QACA,YACA,aACA,cACA;AACA,MAAI,kBAAkB;AACtB,MAAI,OAAO,SAAS,QAAQ;AAC1B,eAAW,EAAC,cAAA,KAAkB,aAAa;AACzC,UAAI,eAAe;AACjB,mBAAW,OAAO,eAAe;AAC/B,cAAI,CAAC,YAAY,OAAO,IAAI,GAAG,GAAG,OAAO,OAAO,GAAG,CAAC,GAAG;AACrD,8BAAkB;AAClB;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,OAAO,SAAS,UAAU,iBAAiB;AAC7C,WAAO;AAAA,MACL;AAAA,MACA;AAAA,QACE,MAAM;AAAA,QACN,KAAK,OAAO;AAAA,MAAA;AAAA,MAEd;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAA;AAAA,IAAa;AAEf,WAAO;AAAA,MACL;AAAA,MACA;AAAA,QACE,MAAM;AAAA,QACN,KAAK,OAAO;AAAA,MAAA;AAAA,MAEd;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAA;AAAA,IAAa;AAAA,EAEjB,OAAO;AACL,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAA;AAAA,IAAa;AAAA,EAEjB;AACF;AAEA,UAAU,gBACR,aACA,QACA,QACA,YACA,aACA,WACA;AACA,aAAW,KAAK,QAAQ,aAAa,QAAQ,QAAQ,YAAY,SAAS,GAAG;AAC3E,UAAM;AAAA,EACR;AACA,cAAY,MAAM;AACpB;AAEA,UAAU,QACR,aACA,QACA,QACA,YACA,WACA;AACA,UAAQ,OAAO,MAAA;AAAA,IACb,KAAK;AACH;AAAA,QACE,CAAC,OAAO,OAAO,GAAG;AAAA,QAClB,MAAM,sBAAsB,UAAU,MAAM,CAAC;AAAA,MAAA;AAE/C;AAAA,IACF,KAAK;AACH,aAAO,OAAO,OAAO,GAAG,GAAG,MAAM,iBAAiB,UAAU,MAAM,CAAC,EAAE;AACrE;AAAA,IACF,KAAK;AACH,aAAO,OAAO,OAAO,MAAM,GAAG,MAAM,iBAAiB,UAAU,MAAM,CAAC,EAAE;AACxE;AAAA,IACF;AACE,kBAAkB;AAAA,EAAA;AAGtB,aAAW,QAAQ,aAAa;AAC9B,UAAM,EAAC,QAAQ,SAAS,MAAA,IAAS;AACjC,QAAI,QAAQ;AACV,WAAK,kBAAkB;AACvB,iBAAW,EAAC,OAAO,WAAW,OAAA,CAAO;AACrC,YAAM,eACJ,OAAO,SAAS,SACZ;AAAA,QACE,MAAM,OAAO;AAAA,QACb,SAAS;AAAA,UACP,KAAK,OAAO;AAAA,UACZ,eAAe,CAAA;AAAA,QAAC;AAAA,QAElB,MAAM;AAAA,UACJ,KAAK,OAAO;AAAA,UACZ,eAAe,CAAA;AAAA,QAAC;AAAA,MAClB,IAEF;AAAA,QACE,MAAM,OAAO;AAAA,QACb,MAAM;AAAA,UACJ,KAAK,OAAO;AAAA,UACZ,eAAe,CAAA;AAAA,QAAC;AAAA,MAClB;AAER,aAAO,WAAW,cAAc,QAAQ,OAAO,SAAS,SAAS;AACjE,YAAM;AAAA,IACR;AAAA,EACF;AAEA,aAAW,MAAS;AACtB;AAEO,UAAU,kBACf,OACA,OACA,SACwB;AACxB,MAAI,CAAC,OAAO;AACV,WAAO;AACP;AAAA,EACF;AACA,MAAI,UAAU;AACd,aAAW,QAAQ,OAAO;AACxB,QAAI,SAAS,SAAS;AACpB,YAAM;AACN;AAAA,IACF;AACA,QAAI,CAAC,SAAS;AACZ,UAAI,MAAM,UAAU,MAAM;AACxB,YAAI,QAAQ,KAAK,KAAK,MAAM,GAAG,KAAK,GAAG;AACrC,oBAAU;AAAA,QACZ;AAAA,MACF,WAAW,MAAM,UAAU,SAAS;AAClC,YAAI,QAAQ,KAAK,KAAK,MAAM,GAAG,IAAI,GAAG;AACpC,oBAAU;AAAA,QACZ;AAAA,MACF;AAAA,IACF;AACA,QAAI,SAAS;AACX,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAcO,UAAU,oBACf,SACA,MACA,YACA,SACA,iBACA,SACA,iBACA;AACA,MAAI,iBAAsC;AAC1C,MAAI,WAAW,mBAAmB,QAAQ,OAAO;AAC/C,qBAAiB;AAAA,EACnB;AACA,QAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEF,SAAO,yBAAyB,MAAM,UAAU,OAAO;AACzD;AAEA,SAAS,gBACP,SACA,YACA,SACA,SACA,iBACU;AACV,MAAI,WAAqB;AAAA,IACvB,KAAK;AAAA,IACL,QAAQ;AAAA,EAAA;AAEV,UAAQ,SAAS,OAAO,MAAA;AAAA,IACtB,KAAK;AACH,iBAAW;AAAA,QACT,KAAK,QAAQ,OAAO;AAAA,QACpB,QAAQ;AAAA,MAAA;AAEV;AAAA,IACF,KAAK;AACH,iBAAW;AAAA,QACT,KAAK;AAAA,QACL,QAAQ,QAAQ,OAAO;AAAA,MAAA;AAEzB;AAAA,IACF,KAAK;AACH,iBAAW;AAAA,QACT,KAAK,QAAQ,OAAO;AAAA,QACpB,QAAQ,QAAQ,OAAO;AAAA,MAAA;AAEzB;AAAA,EAAA;AAGJ,MAAI,SAAS;AACX,eAAW,mBAAmB,UAAU,SAAS,OAAO;AAAA,EAC1D;AAEA,MAAI,YAAY;AACd,eAAW,sBAAsB,UAAU,UAAU;AAAA,EACvD;AAEA,MAAI,iBAAiB;AACnB,eAAW,2BAA2B,UAAU,eAAe;AAAA,EACjE;AAEA,SAAO;AACT;AAIA,SAAS,mBACP,EAAC,KAAK,OAAA,GACN,SACA,SACU;AACV,QAAM,2BAA2B,CAAC,QAChC,QAAQ,UAAa,QAAQ,KAAK,OAAO,IAAI,IAAI,SAAY;AAC/D,SAAO;AAAA,IACL,KAAK,yBAAyB,GAAG;AAAA,IACjC,QAAQ,yBAAyB,MAAM;AAAA,EAAA;AAE3C;AAIA,SAAS,sBACP,EAAC,KAAK,OAAA,GACN,YACU;AACV,QAAM,mCAAmC,CAAC,QACxC,QAAQ,UAAa,CAAC,qBAAqB,YAAY,GAAG,IACtD,SACA;AAEN,SAAO;AAAA,IACL,KAAK,iCAAiC,GAAG;AAAA,IACzC,QAAQ,iCAAiC,MAAM;AAAA,EAAA;AAEnD;AAEA,SAAS,2BACP,EAAC,KAAK,OAAA,GACN,iBACU;AACV,QAAM,+BAA+B,CAAC,QACpC,QAAQ,UAAa,CAAC,gBAAgB,GAAG,IAAI,SAAY;AAE3D,SAAO;AAAA,IACL,KAAK,6BAA6B,GAAG;AAAA,IACrC,QAAQ,6BAA6B,MAAM;AAAA,EAAA;AAE/C;AAEO,UAAU,yBACf,aACA,UACA,SACA;AACA,MAAI,oBAAoB;AACxB,MAAI,uBAAuB;AAC3B,aAAW,OAAO,aAAa;AAC7B,QAAI,CAAC,qBAAqB,SAAS,KAAK;AACtC,YAAM,MAAM,QAAQ,SAAS,KAAK,GAAG;AACrC,UAAI,MAAM,GAAG;AACX,4BAAoB;AACpB,cAAM,EAAC,KAAK,SAAS,KAAK,eAAe,CAAA,EAAC;AAAA,MAC5C;AAAA,IACF;AAEA,QAAI,CAAC,wBAAwB,SAAS,QAAQ;AAC5C,YAAM,MAAM,QAAQ,SAAS,QAAQ,GAAG;AACxC,UAAI,QAAQ,GAAG;AACb,+BAAuB;AACvB;AAAA,MACF;AAAA,IACF;AACA,UAAM,EAAC,KAAK,eAAe,GAAC;AAAA,EAC9B;AAEA,MAAI,CAAC,qBAAqB,SAAS,KAAK;AACtC,UAAM,EAAC,KAAK,SAAS,KAAK,eAAe,CAAA,EAAC;AAAA,EAC5C;AACF;AASA,MAAM,WAAW,OAAO,WAAW;AAEnC,MAAM,WAAW,OAAO,WAAW;AAGnC,SAAS,oBAAoB,MAAgB;AAC3C,SAAO,CAAC,GAAa,MAAgB;AAEnC,eAAW,SAAS,MAAM;AACxB,YAAM,MAAM,MAAM,CAAC;AACnB,YAAM,MAAM,cAAc,EAAE,GAAG,GAAG,EAAE,GAAG,CAAC;AACxC,UAAI,QAAQ,GAAG;AACb,eAAO,MAAM,CAAC,MAAM,QAAQ,MAAM,CAAC;AAAA,MACrC;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACF;AAEA,SAAS,cAAc,GAAU,GAAkB;AACjD,MAAI,MAAM,GAAG;AACX,WAAO;AAAA,EACT;AACA,MAAI,MAAM,UAAU;AAClB,WAAO;AAAA,EACT;AACA,MAAI,MAAM,UAAU;AAClB,WAAO;AAAA,EACT;AACA,MAAI,MAAM,UAAU;AAClB,WAAO;AAAA,EACT;AACA,MAAI,MAAM,UAAU;AAClB,WAAO;AAAA,EACT;AACA,SAAO,cAAc,GAAG,CAAC;AAC3B;AAEA,UAAU,aACR,MACA,WACA,SACA;AACA,SAAO,KAAK,UAAU,uBAAuB,YAAY;AAAA,IACvD;AAAA,EAAA;AAEJ;AAEO,SAAS,UAAU,QAAsB;AAC9C,SAAO,KAAK;AAAA,IAAU;AAAA,IAAQ,CAAC,GAAG,MAChC,OAAO,MAAM,WAAW,EAAE,aAAa;AAAA,EAAA;AAE3C;"}
|
|
@@ -25,12 +25,14 @@ export interface Input extends InputBase {
|
|
|
25
25
|
* Fetch data. May modify the data in place.
|
|
26
26
|
* Returns nodes sorted in order of `SourceSchema.compareRows`.
|
|
27
27
|
*
|
|
28
|
-
* The stream may contain 'yield' to
|
|
28
|
+
* The stream may contain 'yield' to yield control to the caller for purposes
|
|
29
|
+
* of responsiveness.
|
|
29
30
|
*
|
|
30
31
|
* Contract:
|
|
31
32
|
* - During fetch: If an input yields 'yield', 'yield' must be yielded to the
|
|
32
33
|
* caller of fetch immediately.
|
|
33
|
-
* - During push: If a fetch to an input
|
|
34
|
+
* - During push: If a fetch to an input consumed by the push logic yields
|
|
35
|
+
* 'yield', it must be yielded to the caller of push immediately.
|
|
34
36
|
*/
|
|
35
37
|
fetch(req: FetchRequest): Stream<Node | 'yield'>;
|
|
36
38
|
}
|
|
@@ -57,8 +59,13 @@ export interface Output {
|
|
|
57
59
|
* Callers must maintain some invariants for correct operation:
|
|
58
60
|
* - Only add rows which do not already exist (by deep equality).
|
|
59
61
|
* - Only remove rows which do exist (by deep equality).
|
|
62
|
+
* Implmentation can yield 'yield' to yield control to the caller for purposes
|
|
63
|
+
* of responsiveness.
|
|
64
|
+
* Yield contract:
|
|
65
|
+
* - During a push: If a push call to an output yields 'yield', it must be
|
|
66
|
+
* yielded to the caller of push immediately.
|
|
60
67
|
*/
|
|
61
|
-
push(change: Change, pusher: InputBase):
|
|
68
|
+
push(change: Change, pusher: InputBase): Stream<'yield'>;
|
|
62
69
|
}
|
|
63
70
|
/**
|
|
64
71
|
* An implementation of Output that throws if pushed to. It is used as the
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"operator.d.ts","sourceRoot":"","sources":["../../../../../zql/src/ivm/operator.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,6BAA6B,CAAC;AAC3D,OAAO,KAAK,EAAC,GAAG,EAAC,MAAM,oCAAoC,CAAC;AAC5D,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,aAAa,CAAC;AACxC,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,iBAAiB,CAAC;AAChD,OAAO,KAAK,EAAC,IAAI,EAAC,MAAM,WAAW,CAAC;AACpC,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,aAAa,CAAC;AAC9C,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,aAAa,CAAC;AAExC;;GAEG;AACH,MAAM,WAAW,SAAS;IACxB,iDAAiD;IACjD,SAAS,IAAI,YAAY,CAAC;IAE1B;;;;OAIG;IACH,OAAO,IAAI,IAAI,CAAC;CACjB;AAED,MAAM,WAAW,KAAM,SAAQ,SAAS;IACtC,+CAA+C;IAC/C,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;IAEhC
|
|
1
|
+
{"version":3,"file":"operator.d.ts","sourceRoot":"","sources":["../../../../../zql/src/ivm/operator.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,6BAA6B,CAAC;AAC3D,OAAO,KAAK,EAAC,GAAG,EAAC,MAAM,oCAAoC,CAAC;AAC5D,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,aAAa,CAAC;AACxC,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,iBAAiB,CAAC;AAChD,OAAO,KAAK,EAAC,IAAI,EAAC,MAAM,WAAW,CAAC;AACpC,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,aAAa,CAAC;AAC9C,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,aAAa,CAAC;AAExC;;GAEG;AACH,MAAM,WAAW,SAAS;IACxB,iDAAiD;IACjD,SAAS,IAAI,YAAY,CAAC;IAE1B;;;;OAIG;IACH,OAAO,IAAI,IAAI,CAAC;CACjB;AAED,MAAM,WAAW,KAAM,SAAQ,SAAS;IACtC,+CAA+C;IAC/C,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;IAEhC;;;;;;;;;;;;OAYG;IACH,KAAK,CAAC,GAAG,EAAE,YAAY,GAAG,MAAM,CAAC,IAAI,GAAG,OAAO,CAAC,CAAC;CAClD;AAED,MAAM,MAAM,YAAY,GAAG;IACzB,QAAQ,CAAC,UAAU,CAAC,EAAE,UAAU,GAAG,SAAS,CAAC;IAC7C,kFAAkF;IAClF,QAAQ,CAAC,KAAK,CAAC,EAAE,KAAK,GAAG,SAAS,CAAC;IAEnC,oEAAoE;IACpE,QAAQ,CAAC,OAAO,CAAC,EAAE,OAAO,GAAG,SAAS,CAAC;CACxC,CAAC;AAEF,MAAM,MAAM,KAAK,GAAG;IAClB,QAAQ,CAAC,GAAG,EAAE,GAAG,CAAC;IAClB,QAAQ,CAAC,KAAK,EAAE,IAAI,GAAG,OAAO,CAAC;CAChC,CAAC;AAEF;;;GAGG;AACH,MAAM,WAAW,MAAM;IACrB;;;;;;;;;;;;OAYG;IACH,IAAI,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC;CAC1D;AAED;;;GAGG;AACH,eAAO,MAAM,WAAW,EAAE,MAIzB,CAAC;AAEF,wBAAiB,UAAU,CAAC,MAAM,EAAE,MAAM,CAAC,IAAI,GAAG,OAAO,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,CAMxE;AAED;;;;;GAKG;AACH,MAAM,WAAW,QAAS,SAAQ,KAAK,EAAE,MAAM;CAAG;AAElD;;;GAGG;AACH,MAAM,WAAW,OAAO;IACtB,GAAG,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,SAAS,GAAG,IAAI,CAAC;IACzC,GAAG,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,SAAS,GAAG,SAAS,GAAG,SAAS,CAAC;IACzD;;OAEG;IACH,IAAI,CAAC,OAAO,CAAC,EAAE;QAAC,MAAM,EAAE,MAAM,CAAA;KAAC,GAAG,MAAM,CAAC,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC;IAC9D,GAAG,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI,CAAC;CACxB"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"operator.js","sources":["../../../../../zql/src/ivm/operator.ts"],"sourcesContent":["import type {JSONValue} from '../../../shared/src/json.ts';\nimport type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {Change} from './change.ts';\nimport type {Constraint} from './constraint.ts';\nimport type {Node} from './data.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\n\n/**\n * Input to an operator.\n */\nexport interface InputBase {\n /** The schema of the data this input returns. */\n getSchema(): SourceSchema;\n\n /**\n * Completely destroy the input. Destroying an input\n * causes it to call destroy on its upstreams, fully\n * cleaning up a pipeline.\n */\n destroy(): void;\n}\n\nexport interface Input extends InputBase {\n /** Tell the input where to send its output. */\n setOutput(output: Output): void;\n\n /**\n * Fetch data. May modify the data in place.\n * Returns nodes sorted in order of `SourceSchema.compareRows`.\n *\n * The stream may contain 'yield' to
|
|
1
|
+
{"version":3,"file":"operator.js","sources":["../../../../../zql/src/ivm/operator.ts"],"sourcesContent":["import type {JSONValue} from '../../../shared/src/json.ts';\nimport type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {Change} from './change.ts';\nimport type {Constraint} from './constraint.ts';\nimport type {Node} from './data.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\n\n/**\n * Input to an operator.\n */\nexport interface InputBase {\n /** The schema of the data this input returns. */\n getSchema(): SourceSchema;\n\n /**\n * Completely destroy the input. Destroying an input\n * causes it to call destroy on its upstreams, fully\n * cleaning up a pipeline.\n */\n destroy(): void;\n}\n\nexport interface Input extends InputBase {\n /** Tell the input where to send its output. */\n setOutput(output: Output): void;\n\n /**\n * Fetch data. May modify the data in place.\n * Returns nodes sorted in order of `SourceSchema.compareRows`.\n *\n * The stream may contain 'yield' to yield control to the caller for purposes\n * of responsiveness.\n *\n * Contract:\n * - During fetch: If an input yields 'yield', 'yield' must be yielded to the\n * caller of fetch immediately.\n * - During push: If a fetch to an input consumed by the push logic yields\n * 'yield', it must be yielded to the caller of push immediately.\n */\n fetch(req: FetchRequest): Stream<Node | 'yield'>;\n}\n\nexport type FetchRequest = {\n readonly constraint?: Constraint | undefined;\n /** If supplied, `start.row` must have previously been output by fetch or push. */\n readonly start?: Start | undefined;\n\n /** Whether to fetch in reverse order of the SourceSchema's sort. */\n readonly reverse?: boolean | undefined;\n};\n\nexport type Start = {\n readonly row: Row;\n readonly basis: 'at' | 'after';\n};\n\n/**\n * An output for an operator. Typically another Operator but can also be\n * the code running the pipeline.\n */\nexport interface Output {\n /**\n * Push incremental changes to data previously received with fetch().\n * Consumers must apply all pushed changes or incremental result will\n * be incorrect.\n * Callers must maintain some invariants for correct operation:\n * - Only add rows which do not already exist (by deep equality).\n * - Only remove rows which do exist (by deep equality).\n * Implmentation can yield 'yield' to yield control to the caller for purposes\n * of responsiveness.\n * Yield contract:\n * - During a push: If a push call to an output yields 'yield', it must be\n * yielded to the caller of push immediately.\n */\n push(change: Change, pusher: InputBase): Stream<'yield'>;\n}\n\n/**\n * An implementation of Output that throws if pushed to. It is used as the\n * initial value for for an operator's output before it is set.\n */\nexport const throwOutput: Output = {\n push(_change: Change): Stream<'yield'> {\n throw new Error('Output not set');\n },\n};\n\nexport function* skipYields(stream: Stream<Node | 'yield'>): Stream<Node> {\n for (const node of stream) {\n if (node !== 'yield') {\n yield node;\n }\n }\n}\n\n/**\n * Operators are arranged into pipelines.\n * They are stateful.\n * Each operator is an input to the next operator in the chain and an output\n * to the previous.\n */\nexport interface Operator extends Input, Output {}\n\n/**\n * Operators get access to storage that they can store their internal\n * state in.\n */\nexport interface Storage {\n set(key: string, value: JSONValue): void;\n get(key: string, def?: JSONValue): JSONValue | undefined;\n /**\n * If options is not specified, defaults to scanning all entries.\n */\n scan(options?: {prefix: string}): Stream<[string, JSONValue]>;\n del(key: string): void;\n}\n"],"names":[],"mappings":"AAkFO,MAAM,cAAsB;AAAA,EACjC,KAAK,SAAkC;AACrC,UAAM,IAAI,MAAM,gBAAgB;AAAA,EAClC;AACF;AAEO,UAAU,WAAW,QAA8C;AACxE,aAAW,QAAQ,QAAQ;AACzB,QAAI,SAAS,SAAS;AACpB,YAAM;AAAA,IACR;AAAA,EACF;AACF;"}
|
|
@@ -72,7 +72,7 @@ import type { Stream } from './stream.ts';
|
|
|
72
72
|
* If an edit enters and is converted to only add or only remove, it exits as that change.
|
|
73
73
|
* If an edit enters and exits as edits only, it exits as a single edit.
|
|
74
74
|
*/
|
|
75
|
-
export declare function pushAccumulatedChanges(accumulatedPushes: Change[], output: Output, pusher: InputBase, fanOutChangeType: Change['type'], mergeRelationships: (existing: Change, incoming: Change) => Change, addEmptyRelationships: (change: Change) => Change):
|
|
75
|
+
export declare function pushAccumulatedChanges(accumulatedPushes: Change[], output: Output, pusher: InputBase, fanOutChangeType: Change['type'], mergeRelationships: (existing: Change, incoming: Change) => Change, addEmptyRelationships: (change: Change) => Change): Stream<'yield'>;
|
|
76
76
|
/**
|
|
77
77
|
* Puts relationships from `right` into `left` if they don't already exist in `left`.
|
|
78
78
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"push-accumulated.d.ts","sourceRoot":"","sources":["../../../../../zql/src/ivm/push-accumulated.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,aAAa,CAAC;AACxC,OAAO,KAAK,EAAC,IAAI,EAAC,MAAM,WAAW,CAAC;AACpC,OAAO,KAAK,EAAC,SAAS,EAAE,MAAM,EAAC,MAAM,eAAe,CAAC;AACrD,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,aAAa,CAAC;AAC9C,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,aAAa,CAAC;AAExC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAoEG;AACH,
|
|
1
|
+
{"version":3,"file":"push-accumulated.d.ts","sourceRoot":"","sources":["../../../../../zql/src/ivm/push-accumulated.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,aAAa,CAAC;AACxC,OAAO,KAAK,EAAC,IAAI,EAAC,MAAM,WAAW,CAAC;AACpC,OAAO,KAAK,EAAC,SAAS,EAAE,MAAM,EAAC,MAAM,eAAe,CAAC;AACrD,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,aAAa,CAAC;AAC9C,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,aAAa,CAAC;AAExC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAoEG;AACH,wBAAiB,sBAAsB,CACrC,iBAAiB,EAAE,MAAM,EAAE,EAC3B,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,SAAS,EACjB,gBAAgB,EAAE,MAAM,CAAC,MAAM,CAAC,EAChC,kBAAkB,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,KAAK,MAAM,EAClE,qBAAqB,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,MAAM,GAChD,MAAM,CAAC,OAAO,CAAC,CA+JjB;AAED;;GAEG;AACH,wBAAgB,kBAAkB,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,MAAM,CAsFtE;AAED,wBAAgB,yBAAyB,CACvC,MAAM,EAAE,YAAY,GACnB,CAAC,MAAM,EAAE,MAAM,KAAK,MAAM,CAoD5B;AAED;;;;;GAKG;AACH,wBAAgB,UAAU,CACxB,aAAa,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,MAAM,CAAC,IAAI,GAAG,OAAO,CAAC,CAAC,EAC3D,iBAAiB,EAAE,MAAM,EAAE,QAO5B"}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { assert, unreachable } from "../../../shared/src/asserts.js";
|
|
2
2
|
import { must } from "../../../shared/src/must.js";
|
|
3
3
|
import { emptyArray } from "../../../shared/src/sentinels.js";
|
|
4
|
-
function pushAccumulatedChanges(accumulatedPushes, output, pusher, fanOutChangeType, mergeRelationships2, addEmptyRelationships) {
|
|
4
|
+
function* pushAccumulatedChanges(accumulatedPushes, output, pusher, fanOutChangeType, mergeRelationships2, addEmptyRelationships) {
|
|
5
5
|
if (accumulatedPushes.length === 0) {
|
|
6
6
|
return;
|
|
7
7
|
}
|
|
@@ -28,7 +28,7 @@ function pushAccumulatedChanges(accumulatedPushes, output, pusher, fanOutChangeT
|
|
|
28
28
|
types.length === 1 && types[0] === "remove",
|
|
29
29
|
"Fan-in:remove expected all removes"
|
|
30
30
|
);
|
|
31
|
-
output.push(
|
|
31
|
+
yield* output.push(
|
|
32
32
|
addEmptyRelationships(must(candidatesToPush.get("remove"))),
|
|
33
33
|
pusher
|
|
34
34
|
);
|
|
@@ -38,7 +38,7 @@ function pushAccumulatedChanges(accumulatedPushes, output, pusher, fanOutChangeT
|
|
|
38
38
|
types.length === 1 && types[0] === "add",
|
|
39
39
|
"Fan-in:add expected all adds"
|
|
40
40
|
);
|
|
41
|
-
output.push(
|
|
41
|
+
yield* output.push(
|
|
42
42
|
addEmptyRelationships(must(candidatesToPush.get("add"))),
|
|
43
43
|
pusher
|
|
44
44
|
);
|
|
@@ -60,11 +60,11 @@ function pushAccumulatedChanges(accumulatedPushes, output, pusher, fanOutChangeT
|
|
|
60
60
|
if (removeChange) {
|
|
61
61
|
editChange = mergeRelationships2(editChange, removeChange);
|
|
62
62
|
}
|
|
63
|
-
output.push(addEmptyRelationships(editChange), pusher);
|
|
63
|
+
yield* output.push(addEmptyRelationships(editChange), pusher);
|
|
64
64
|
return;
|
|
65
65
|
}
|
|
66
66
|
if (addChange && removeChange) {
|
|
67
|
-
output.push(
|
|
67
|
+
yield* output.push(
|
|
68
68
|
addEmptyRelationships({
|
|
69
69
|
type: "edit",
|
|
70
70
|
node: addChange.node,
|
|
@@ -74,7 +74,7 @@ function pushAccumulatedChanges(accumulatedPushes, output, pusher, fanOutChangeT
|
|
|
74
74
|
);
|
|
75
75
|
return;
|
|
76
76
|
}
|
|
77
|
-
output.push(
|
|
77
|
+
yield* output.push(
|
|
78
78
|
addEmptyRelationships(must(addChange ?? removeChange)),
|
|
79
79
|
pusher
|
|
80
80
|
);
|
|
@@ -96,7 +96,7 @@ function pushAccumulatedChanges(accumulatedPushes, output, pusher, fanOutChangeT
|
|
|
96
96
|
);
|
|
97
97
|
const childChange = candidatesToPush.get("child");
|
|
98
98
|
if (childChange) {
|
|
99
|
-
output.push(childChange, pusher);
|
|
99
|
+
yield* output.push(childChange, pusher);
|
|
100
100
|
return;
|
|
101
101
|
}
|
|
102
102
|
const addChange = candidatesToPush.get("add");
|
|
@@ -105,7 +105,7 @@ function pushAccumulatedChanges(accumulatedPushes, output, pusher, fanOutChangeT
|
|
|
105
105
|
addChange === void 0 || removeChange === void 0,
|
|
106
106
|
"Fan-in:child expected either add or remove, not both"
|
|
107
107
|
);
|
|
108
|
-
output.push(
|
|
108
|
+
yield* output.push(
|
|
109
109
|
addEmptyRelationships(must(addChange ?? removeChange)),
|
|
110
110
|
pusher
|
|
111
111
|
);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"push-accumulated.js","sources":["../../../../../zql/src/ivm/push-accumulated.ts"],"sourcesContent":["import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport {emptyArray} from '../../../shared/src/sentinels.ts';\nimport type {Change} from './change.ts';\nimport type {Node} from './data.ts';\nimport type {InputBase, Output} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\n\n/**\n * # pushAccumulatedChanges\n *\n * Pushes the changes that were accumulated by\n * [fan-out, fan-in] or [ufo, ufi] sub-graphs.\n *\n * This function is called at the end of the sub-graph.\n *\n * The sub-graphs represents `OR`s.\n *\n * Changes that can enter the subgraphs:\n * 1. child (due to exist joins being above the sub-graph)\n * 2. add\n * 3. remove\n * 4. edit\n *\n * # Changes that can exit into `pushAccumulatedChanges`:\n *\n * ## Child\n * If a `child` change enters a sub-graph, it will flow to all branches.\n * Each branch will either:\n * - preserve the `child` change\n * - stop the `child` change (e.g., filter)\n * - convert it to an `add` or `remove` (e.g., exists filter)\n *\n * ## Add\n * If an `add` change enters a sub-graph, it will flow to all branches.\n * Each branch will either:\n * - preserve the `add` change\n * - hide the change (e.g., filter)\n *\n * ## Remove\n * If a `remove` change enters a sub-graph, it will flow to all branches.\n * Each branch will either:\n * - preserve the `remove` change\n * - hide the change (e.g., filter)\n *\n * ## Edit\n * If an `edit` change enters a sub-graph, it will flow to all branches.\n * Each branch will either:\n * - preserve the `edit` change\n * - convert it to an `add` (e.g., filter where old didn't match but new does)\n * - convert it to a `remove` (e.g., filter where old matched but new doesn't)\n *\n * This results in some invariants:\n * - an add coming in will only create adds coming out\n * - a remove coming in will only create removes coming out\n * - an edit coming in can create adds, removes, and edits coming out\n * - a child coming in can create adds, removes, and children coming out\n *\n * # Return of `pushAccumulatedChanges`\n *\n * This function will only push a single change.\n * Given the above invariants, how is this possible?\n *\n * An add that becomes many `adds` results in a single add\n * as the `add` is the same row across all adds. Branches do not change the row.\n *\n * A remove that becomes many `removes` results in a single remove\n * for the same reason.\n *\n * If a child enters and exits, it takes precedence over all other changes.\n * If a child enters and is converted only to add and remove it exits as an edit.\n * If a child enters and is converted to only add or only remove, it exits as that change.\n *\n * If an edit enters and is converted to add and remove it exits as an edit.\n * If an edit enters and is converted to only add or only remove, it exits as that change.\n * If an edit enters and exits as edits only, it exits as a single edit.\n */\nexport function pushAccumulatedChanges(\n accumulatedPushes: Change[],\n output: Output,\n pusher: InputBase,\n fanOutChangeType: Change['type'],\n mergeRelationships: (existing: Change, incoming: Change) => Change,\n addEmptyRelationships: (change: Change) => Change,\n) {\n if (accumulatedPushes.length === 0) {\n // It is possible for no forks to pass along the push.\n // E.g., if no filters match in any fork.\n return;\n }\n\n // collapse down to a single change per type\n const candidatesToPush = new Map<Change['type'], Change>();\n for (const change of accumulatedPushes) {\n if (fanOutChangeType === 'child' && change.type !== 'child') {\n assert(\n candidatesToPush.has(change.type) === false,\n () =>\n `Fan-in:child expected at most one ${change.type} when fan-out is of type child`,\n );\n }\n\n const existing = candidatesToPush.get(change.type);\n let mergedChange = change;\n if (existing) {\n // merge in relationships\n mergedChange = mergeRelationships(existing, change);\n }\n candidatesToPush.set(change.type, mergedChange);\n }\n\n accumulatedPushes.length = 0;\n\n const types = [...candidatesToPush.keys()];\n /**\n * Based on the received `fanOutChangeType` only certain output types are valid.\n *\n * - remove must result in all removes\n * - add must result in all adds\n * - edit must result in add or removes or edits\n * - child must result in a single add or single remove or many child changes\n * - Single add or remove because the relationship will be unique to one exist check within the fan-out,fan-in sub-graph\n * - Many child changes because other operators may preserve the child change\n */\n switch (fanOutChangeType) {\n case 'remove':\n assert(\n types.length === 1 && types[0] === 'remove',\n 'Fan-in:remove expected all removes',\n );\n output.push(\n addEmptyRelationships(must(candidatesToPush.get('remove'))),\n pusher,\n );\n return;\n case 'add':\n assert(\n types.length === 1 && types[0] === 'add',\n 'Fan-in:add expected all adds',\n );\n output.push(\n addEmptyRelationships(must(candidatesToPush.get('add'))),\n pusher,\n );\n return;\n case 'edit': {\n assert(\n types.every(\n type => type === 'add' || type === 'remove' || type === 'edit',\n ),\n 'Fan-in:edit expected all adds, removes, or edits',\n );\n const addChange = candidatesToPush.get('add');\n const removeChange = candidatesToPush.get('remove');\n let editChange = candidatesToPush.get('edit');\n\n // If an `edit` is present, it supersedes `add` and `remove`\n // as it semantically represents both.\n if (editChange) {\n if (addChange) {\n editChange = mergeRelationships(editChange, addChange);\n }\n if (removeChange) {\n editChange = mergeRelationships(editChange, removeChange);\n }\n output.push(addEmptyRelationships(editChange), pusher);\n return;\n }\n\n // If `edit` didn't make it through but both `add` and `remove` did,\n // convert back to an edit.\n //\n // When can this happen?\n //\n // EDIT old: a=1, new: a=2\n // |\n // FanOut\n // / \\\n // a=1 a=2\n // | |\n // remove add\n // \\ /\n // FanIn\n //\n // The left filter converts the edit into a remove.\n // The right filter converts the edit into an add.\n if (addChange && removeChange) {\n output.push(\n addEmptyRelationships({\n type: 'edit',\n node: addChange.node,\n oldNode: removeChange.node,\n } as const),\n pusher,\n );\n return;\n }\n\n output.push(\n addEmptyRelationships(must(addChange ?? removeChange)),\n pusher,\n );\n return;\n }\n case 'child': {\n assert(\n types.every(\n type =>\n type === 'add' || // exists can change child to add or remove\n type === 'remove' || // exists can change child to add or remove\n type === 'child', // other operators may preserve the child change\n ),\n 'Fan-in:child expected all adds, removes, or children',\n );\n assert(\n types.length <= 2,\n 'Fan-in:child expected at most 2 types on a child change from fan-out',\n );\n\n // If any branch preserved the original child change, that takes precedence over all other changes.\n const childChange = candidatesToPush.get('child');\n if (childChange) {\n output.push(childChange, pusher);\n return;\n }\n\n const addChange = candidatesToPush.get('add');\n const removeChange = candidatesToPush.get('remove');\n\n assert(\n addChange === undefined || removeChange === undefined,\n 'Fan-in:child expected either add or remove, not both',\n );\n\n output.push(\n addEmptyRelationships(must(addChange ?? removeChange)),\n pusher,\n );\n return;\n }\n default:\n fanOutChangeType satisfies never;\n }\n}\n\n/**\n * Puts relationships from `right` into `left` if they don't already exist in `left`.\n */\nexport function mergeRelationships(left: Change, right: Change): Change {\n // change types will always match\n // unless we have an edit on the left\n // then the right could be edit, add, or remove\n if (left.type === right.type) {\n switch (left.type) {\n case 'add': {\n return {\n type: 'add',\n node: {\n row: left.node.row,\n relationships: {\n ...right.node.relationships,\n ...left.node.relationships,\n },\n },\n };\n }\n case 'remove': {\n return {\n type: 'remove',\n node: {\n row: left.node.row,\n relationships: {\n ...right.node.relationships,\n ...left.node.relationships,\n },\n },\n };\n }\n case 'edit': {\n assert(right.type === 'edit');\n // merge edits into a single edit\n return {\n type: 'edit',\n node: {\n row: left.node.row,\n relationships: {\n ...right.node.relationships,\n ...left.node.relationships,\n },\n },\n oldNode: {\n row: left.oldNode.row,\n relationships: {\n ...right.oldNode.relationships,\n ...left.oldNode.relationships,\n },\n },\n };\n }\n }\n }\n\n // left is always an edit here\n assert(left.type === 'edit');\n switch (right.type) {\n case 'add': {\n return {\n type: 'edit',\n node: {\n ...left.node,\n relationships: {\n ...right.node.relationships,\n ...left.node.relationships,\n },\n },\n oldNode: left.oldNode,\n };\n }\n case 'remove': {\n return {\n type: 'edit',\n node: left.node,\n oldNode: {\n ...left.oldNode,\n relationships: {\n ...right.node.relationships,\n ...left.oldNode.relationships,\n },\n },\n };\n }\n }\n\n unreachable();\n}\n\nexport function makeAddEmptyRelationships(\n schema: SourceSchema,\n): (change: Change) => Change {\n return (change: Change): Change => {\n if (Object.keys(schema.relationships).length === 0) {\n return change;\n }\n\n switch (change.type) {\n case 'add':\n case 'remove': {\n const ret = {\n ...change,\n node: {\n ...change.node,\n relationships: {\n ...change.node.relationships,\n },\n },\n };\n\n mergeEmpty(ret.node.relationships, Object.keys(schema.relationships));\n\n return ret;\n }\n case 'edit': {\n const ret = {\n ...change,\n node: {\n ...change.node,\n relationships: {\n ...change.node.relationships,\n },\n },\n oldNode: {\n ...change.oldNode,\n relationships: {\n ...change.oldNode.relationships,\n },\n },\n };\n\n mergeEmpty(ret.node.relationships, Object.keys(schema.relationships));\n mergeEmpty(\n ret.oldNode.relationships,\n Object.keys(schema.relationships),\n );\n\n return ret;\n }\n case 'child':\n return change; // children only have relationships along the path to the change\n }\n };\n}\n\n/**\n * For each relationship in `schema` that does not exist\n * in `relationships`, add it with an empty stream.\n *\n * This modifies the `relationships` object in place.\n */\nexport function mergeEmpty(\n relationships: Record<string, () => Stream<Node | 'yield'>>,\n relationshipNames: string[],\n) {\n for (const relName of relationshipNames) {\n if (relationships[relName] === undefined) {\n relationships[relName] = () => emptyArray;\n }\n }\n}\n"],"names":["mergeRelationships"],"mappings":";;;AA8EO,SAAS,uBACd,mBACA,QACA,QACA,kBACAA,qBACA,uBACA;AACA,MAAI,kBAAkB,WAAW,GAAG;AAGlC;AAAA,EACF;AAGA,QAAM,uCAAuB,IAAA;AAC7B,aAAW,UAAU,mBAAmB;AACtC,QAAI,qBAAqB,WAAW,OAAO,SAAS,SAAS;AAC3D;AAAA,QACE,iBAAiB,IAAI,OAAO,IAAI,MAAM;AAAA,QACtC,MACE,qCAAqC,OAAO,IAAI;AAAA,MAAA;AAAA,IAEtD;AAEA,UAAM,WAAW,iBAAiB,IAAI,OAAO,IAAI;AACjD,QAAI,eAAe;AACnB,QAAI,UAAU;AAEZ,qBAAeA,oBAAmB,UAAU,MAAM;AAAA,IACpD;AACA,qBAAiB,IAAI,OAAO,MAAM,YAAY;AAAA,EAChD;AAEA,oBAAkB,SAAS;AAE3B,QAAM,QAAQ,CAAC,GAAG,iBAAiB,MAAM;AAWzC,UAAQ,kBAAA;AAAA,IACN,KAAK;AACH;AAAA,QACE,MAAM,WAAW,KAAK,MAAM,CAAC,MAAM;AAAA,QACnC;AAAA,MAAA;AAEF,aAAO;AAAA,QACL,sBAAsB,KAAK,iBAAiB,IAAI,QAAQ,CAAC,CAAC;AAAA,QAC1D;AAAA,MAAA;AAEF;AAAA,IACF,KAAK;AACH;AAAA,QACE,MAAM,WAAW,KAAK,MAAM,CAAC,MAAM;AAAA,QACnC;AAAA,MAAA;AAEF,aAAO;AAAA,QACL,sBAAsB,KAAK,iBAAiB,IAAI,KAAK,CAAC,CAAC;AAAA,QACvD;AAAA,MAAA;AAEF;AAAA,IACF,KAAK,QAAQ;AACX;AAAA,QACE,MAAM;AAAA,UACJ,CAAA,SAAQ,SAAS,SAAS,SAAS,YAAY,SAAS;AAAA,QAAA;AAAA,QAE1D;AAAA,MAAA;AAEF,YAAM,YAAY,iBAAiB,IAAI,KAAK;AAC5C,YAAM,eAAe,iBAAiB,IAAI,QAAQ;AAClD,UAAI,aAAa,iBAAiB,IAAI,MAAM;AAI5C,UAAI,YAAY;AACd,YAAI,WAAW;AACb,uBAAaA,oBAAmB,YAAY,SAAS;AAAA,QACvD;AACA,YAAI,cAAc;AAChB,uBAAaA,oBAAmB,YAAY,YAAY;AAAA,QAC1D;AACA,eAAO,KAAK,sBAAsB,UAAU,GAAG,MAAM;AACrD;AAAA,MACF;AAmBA,UAAI,aAAa,cAAc;AAC7B,eAAO;AAAA,UACL,sBAAsB;AAAA,YACpB,MAAM;AAAA,YACN,MAAM,UAAU;AAAA,YAChB,SAAS,aAAa;AAAA,UAAA,CACd;AAAA,UACV;AAAA,QAAA;AAEF;AAAA,MACF;AAEA,aAAO;AAAA,QACL,sBAAsB,KAAK,aAAa,YAAY,CAAC;AAAA,QACrD;AAAA,MAAA;AAEF;AAAA,IACF;AAAA,IACA,KAAK,SAAS;AACZ;AAAA,QACE,MAAM;AAAA,UACJ,UACE,SAAS;AAAA,UACT,SAAS;AAAA,UACT,SAAS;AAAA;AAAA,QAAA;AAAA,QAEb;AAAA,MAAA;AAEF;AAAA,QACE,MAAM,UAAU;AAAA,QAChB;AAAA,MAAA;AAIF,YAAM,cAAc,iBAAiB,IAAI,OAAO;AAChD,UAAI,aAAa;AACf,eAAO,KAAK,aAAa,MAAM;AAC/B;AAAA,MACF;AAEA,YAAM,YAAY,iBAAiB,IAAI,KAAK;AAC5C,YAAM,eAAe,iBAAiB,IAAI,QAAQ;AAElD;AAAA,QACE,cAAc,UAAa,iBAAiB;AAAA,QAC5C;AAAA,MAAA;AAGF,aAAO;AAAA,QACL,sBAAsB,KAAK,aAAa,YAAY,CAAC;AAAA,QACrD;AAAA,MAAA;AAEF;AAAA,IACF;AAAA,EAEE;AAEN;AAKO,SAAS,mBAAmB,MAAc,OAAuB;AAItE,MAAI,KAAK,SAAS,MAAM,MAAM;AAC5B,YAAQ,KAAK,MAAA;AAAA,MACX,KAAK,OAAO;AACV,eAAO;AAAA,UACL,MAAM;AAAA,UACN,MAAM;AAAA,YACJ,KAAK,KAAK,KAAK;AAAA,YACf,eAAe;AAAA,cACb,GAAG,MAAM,KAAK;AAAA,cACd,GAAG,KAAK,KAAK;AAAA,YAAA;AAAA,UACf;AAAA,QACF;AAAA,MAEJ;AAAA,MACA,KAAK,UAAU;AACb,eAAO;AAAA,UACL,MAAM;AAAA,UACN,MAAM;AAAA,YACJ,KAAK,KAAK,KAAK;AAAA,YACf,eAAe;AAAA,cACb,GAAG,MAAM,KAAK;AAAA,cACd,GAAG,KAAK,KAAK;AAAA,YAAA;AAAA,UACf;AAAA,QACF;AAAA,MAEJ;AAAA,MACA,KAAK,QAAQ;AACX,eAAO,MAAM,SAAS,MAAM;AAE5B,eAAO;AAAA,UACL,MAAM;AAAA,UACN,MAAM;AAAA,YACJ,KAAK,KAAK,KAAK;AAAA,YACf,eAAe;AAAA,cACb,GAAG,MAAM,KAAK;AAAA,cACd,GAAG,KAAK,KAAK;AAAA,YAAA;AAAA,UACf;AAAA,UAEF,SAAS;AAAA,YACP,KAAK,KAAK,QAAQ;AAAA,YAClB,eAAe;AAAA,cACb,GAAG,MAAM,QAAQ;AAAA,cACjB,GAAG,KAAK,QAAQ;AAAA,YAAA;AAAA,UAClB;AAAA,QACF;AAAA,MAEJ;AAAA,IAAA;AAAA,EAEJ;AAGA,SAAO,KAAK,SAAS,MAAM;AAC3B,UAAQ,MAAM,MAAA;AAAA,IACZ,KAAK,OAAO;AACV,aAAO;AAAA,QACL,MAAM;AAAA,QACN,MAAM;AAAA,UACJ,GAAG,KAAK;AAAA,UACR,eAAe;AAAA,YACb,GAAG,MAAM,KAAK;AAAA,YACd,GAAG,KAAK,KAAK;AAAA,UAAA;AAAA,QACf;AAAA,QAEF,SAAS,KAAK;AAAA,MAAA;AAAA,IAElB;AAAA,IACA,KAAK,UAAU;AACb,aAAO;AAAA,QACL,MAAM;AAAA,QACN,MAAM,KAAK;AAAA,QACX,SAAS;AAAA,UACP,GAAG,KAAK;AAAA,UACR,eAAe;AAAA,YACb,GAAG,MAAM,KAAK;AAAA,YACd,GAAG,KAAK,QAAQ;AAAA,UAAA;AAAA,QAClB;AAAA,MACF;AAAA,IAEJ;AAAA,EAAA;AAGF,cAAA;AACF;AAEO,SAAS,0BACd,QAC4B;AAC5B,SAAO,CAAC,WAA2B;AACjC,QAAI,OAAO,KAAK,OAAO,aAAa,EAAE,WAAW,GAAG;AAClD,aAAO;AAAA,IACT;AAEA,YAAQ,OAAO,MAAA;AAAA,MACb,KAAK;AAAA,MACL,KAAK,UAAU;AACb,cAAM,MAAM;AAAA,UACV,GAAG;AAAA,UACH,MAAM;AAAA,YACJ,GAAG,OAAO;AAAA,YACV,eAAe;AAAA,cACb,GAAG,OAAO,KAAK;AAAA,YAAA;AAAA,UACjB;AAAA,QACF;AAGF,mBAAW,IAAI,KAAK,eAAe,OAAO,KAAK,OAAO,aAAa,CAAC;AAEpE,eAAO;AAAA,MACT;AAAA,MACA,KAAK,QAAQ;AACX,cAAM,MAAM;AAAA,UACV,GAAG;AAAA,UACH,MAAM;AAAA,YACJ,GAAG,OAAO;AAAA,YACV,eAAe;AAAA,cACb,GAAG,OAAO,KAAK;AAAA,YAAA;AAAA,UACjB;AAAA,UAEF,SAAS;AAAA,YACP,GAAG,OAAO;AAAA,YACV,eAAe;AAAA,cACb,GAAG,OAAO,QAAQ;AAAA,YAAA;AAAA,UACpB;AAAA,QACF;AAGF,mBAAW,IAAI,KAAK,eAAe,OAAO,KAAK,OAAO,aAAa,CAAC;AACpE;AAAA,UACE,IAAI,QAAQ;AAAA,UACZ,OAAO,KAAK,OAAO,aAAa;AAAA,QAAA;AAGlC,eAAO;AAAA,MACT;AAAA,MACA,KAAK;AACH,eAAO;AAAA,IAAA;AAAA,EAEb;AACF;AAQO,SAAS,WACd,eACA,mBACA;AACA,aAAW,WAAW,mBAAmB;AACvC,QAAI,cAAc,OAAO,MAAM,QAAW;AACxC,oBAAc,OAAO,IAAI,MAAM;AAAA,IACjC;AAAA,EACF;AACF;"}
|
|
1
|
+
{"version":3,"file":"push-accumulated.js","sources":["../../../../../zql/src/ivm/push-accumulated.ts"],"sourcesContent":["import {assert, unreachable} from '../../../shared/src/asserts.ts';\nimport {must} from '../../../shared/src/must.ts';\nimport {emptyArray} from '../../../shared/src/sentinels.ts';\nimport type {Change} from './change.ts';\nimport type {Node} from './data.ts';\nimport type {InputBase, Output} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\n\n/**\n * # pushAccumulatedChanges\n *\n * Pushes the changes that were accumulated by\n * [fan-out, fan-in] or [ufo, ufi] sub-graphs.\n *\n * This function is called at the end of the sub-graph.\n *\n * The sub-graphs represents `OR`s.\n *\n * Changes that can enter the subgraphs:\n * 1. child (due to exist joins being above the sub-graph)\n * 2. add\n * 3. remove\n * 4. edit\n *\n * # Changes that can exit into `pushAccumulatedChanges`:\n *\n * ## Child\n * If a `child` change enters a sub-graph, it will flow to all branches.\n * Each branch will either:\n * - preserve the `child` change\n * - stop the `child` change (e.g., filter)\n * - convert it to an `add` or `remove` (e.g., exists filter)\n *\n * ## Add\n * If an `add` change enters a sub-graph, it will flow to all branches.\n * Each branch will either:\n * - preserve the `add` change\n * - hide the change (e.g., filter)\n *\n * ## Remove\n * If a `remove` change enters a sub-graph, it will flow to all branches.\n * Each branch will either:\n * - preserve the `remove` change\n * - hide the change (e.g., filter)\n *\n * ## Edit\n * If an `edit` change enters a sub-graph, it will flow to all branches.\n * Each branch will either:\n * - preserve the `edit` change\n * - convert it to an `add` (e.g., filter where old didn't match but new does)\n * - convert it to a `remove` (e.g., filter where old matched but new doesn't)\n *\n * This results in some invariants:\n * - an add coming in will only create adds coming out\n * - a remove coming in will only create removes coming out\n * - an edit coming in can create adds, removes, and edits coming out\n * - a child coming in can create adds, removes, and children coming out\n *\n * # Return of `pushAccumulatedChanges`\n *\n * This function will only push a single change.\n * Given the above invariants, how is this possible?\n *\n * An add that becomes many `adds` results in a single add\n * as the `add` is the same row across all adds. Branches do not change the row.\n *\n * A remove that becomes many `removes` results in a single remove\n * for the same reason.\n *\n * If a child enters and exits, it takes precedence over all other changes.\n * If a child enters and is converted only to add and remove it exits as an edit.\n * If a child enters and is converted to only add or only remove, it exits as that change.\n *\n * If an edit enters and is converted to add and remove it exits as an edit.\n * If an edit enters and is converted to only add or only remove, it exits as that change.\n * If an edit enters and exits as edits only, it exits as a single edit.\n */\nexport function* pushAccumulatedChanges(\n accumulatedPushes: Change[],\n output: Output,\n pusher: InputBase,\n fanOutChangeType: Change['type'],\n mergeRelationships: (existing: Change, incoming: Change) => Change,\n addEmptyRelationships: (change: Change) => Change,\n): Stream<'yield'> {\n if (accumulatedPushes.length === 0) {\n // It is possible for no forks to pass along the push.\n // E.g., if no filters match in any fork.\n return;\n }\n\n // collapse down to a single change per type\n const candidatesToPush = new Map<Change['type'], Change>();\n for (const change of accumulatedPushes) {\n if (fanOutChangeType === 'child' && change.type !== 'child') {\n assert(\n candidatesToPush.has(change.type) === false,\n () =>\n `Fan-in:child expected at most one ${change.type} when fan-out is of type child`,\n );\n }\n\n const existing = candidatesToPush.get(change.type);\n let mergedChange = change;\n if (existing) {\n // merge in relationships\n mergedChange = mergeRelationships(existing, change);\n }\n candidatesToPush.set(change.type, mergedChange);\n }\n\n accumulatedPushes.length = 0;\n\n const types = [...candidatesToPush.keys()];\n /**\n * Based on the received `fanOutChangeType` only certain output types are valid.\n *\n * - remove must result in all removes\n * - add must result in all adds\n * - edit must result in add or removes or edits\n * - child must result in a single add or single remove or many child changes\n * - Single add or remove because the relationship will be unique to one exist check within the fan-out,fan-in sub-graph\n * - Many child changes because other operators may preserve the child change\n */\n switch (fanOutChangeType) {\n case 'remove':\n assert(\n types.length === 1 && types[0] === 'remove',\n 'Fan-in:remove expected all removes',\n );\n yield* output.push(\n addEmptyRelationships(must(candidatesToPush.get('remove'))),\n pusher,\n );\n return;\n case 'add':\n assert(\n types.length === 1 && types[0] === 'add',\n 'Fan-in:add expected all adds',\n );\n yield* output.push(\n addEmptyRelationships(must(candidatesToPush.get('add'))),\n pusher,\n );\n return;\n case 'edit': {\n assert(\n types.every(\n type => type === 'add' || type === 'remove' || type === 'edit',\n ),\n 'Fan-in:edit expected all adds, removes, or edits',\n );\n const addChange = candidatesToPush.get('add');\n const removeChange = candidatesToPush.get('remove');\n let editChange = candidatesToPush.get('edit');\n\n // If an `edit` is present, it supersedes `add` and `remove`\n // as it semantically represents both.\n if (editChange) {\n if (addChange) {\n editChange = mergeRelationships(editChange, addChange);\n }\n if (removeChange) {\n editChange = mergeRelationships(editChange, removeChange);\n }\n yield* output.push(addEmptyRelationships(editChange), pusher);\n return;\n }\n\n // If `edit` didn't make it through but both `add` and `remove` did,\n // convert back to an edit.\n //\n // When can this happen?\n //\n // EDIT old: a=1, new: a=2\n // |\n // FanOut\n // / \\\n // a=1 a=2\n // | |\n // remove add\n // \\ /\n // FanIn\n //\n // The left filter converts the edit into a remove.\n // The right filter converts the edit into an add.\n if (addChange && removeChange) {\n yield* output.push(\n addEmptyRelationships({\n type: 'edit',\n node: addChange.node,\n oldNode: removeChange.node,\n } as const),\n pusher,\n );\n return;\n }\n\n yield* output.push(\n addEmptyRelationships(must(addChange ?? removeChange)),\n pusher,\n );\n return;\n }\n case 'child': {\n assert(\n types.every(\n type =>\n type === 'add' || // exists can change child to add or remove\n type === 'remove' || // exists can change child to add or remove\n type === 'child', // other operators may preserve the child change\n ),\n 'Fan-in:child expected all adds, removes, or children',\n );\n assert(\n types.length <= 2,\n 'Fan-in:child expected at most 2 types on a child change from fan-out',\n );\n\n // If any branch preserved the original child change, that takes precedence over all other changes.\n const childChange = candidatesToPush.get('child');\n if (childChange) {\n yield* output.push(childChange, pusher);\n return;\n }\n\n const addChange = candidatesToPush.get('add');\n const removeChange = candidatesToPush.get('remove');\n\n assert(\n addChange === undefined || removeChange === undefined,\n 'Fan-in:child expected either add or remove, not both',\n );\n\n yield* output.push(\n addEmptyRelationships(must(addChange ?? removeChange)),\n pusher,\n );\n return;\n }\n default:\n fanOutChangeType satisfies never;\n }\n}\n\n/**\n * Puts relationships from `right` into `left` if they don't already exist in `left`.\n */\nexport function mergeRelationships(left: Change, right: Change): Change {\n // change types will always match\n // unless we have an edit on the left\n // then the right could be edit, add, or remove\n if (left.type === right.type) {\n switch (left.type) {\n case 'add': {\n return {\n type: 'add',\n node: {\n row: left.node.row,\n relationships: {\n ...right.node.relationships,\n ...left.node.relationships,\n },\n },\n };\n }\n case 'remove': {\n return {\n type: 'remove',\n node: {\n row: left.node.row,\n relationships: {\n ...right.node.relationships,\n ...left.node.relationships,\n },\n },\n };\n }\n case 'edit': {\n assert(right.type === 'edit');\n // merge edits into a single edit\n return {\n type: 'edit',\n node: {\n row: left.node.row,\n relationships: {\n ...right.node.relationships,\n ...left.node.relationships,\n },\n },\n oldNode: {\n row: left.oldNode.row,\n relationships: {\n ...right.oldNode.relationships,\n ...left.oldNode.relationships,\n },\n },\n };\n }\n }\n }\n\n // left is always an edit here\n assert(left.type === 'edit');\n switch (right.type) {\n case 'add': {\n return {\n type: 'edit',\n node: {\n ...left.node,\n relationships: {\n ...right.node.relationships,\n ...left.node.relationships,\n },\n },\n oldNode: left.oldNode,\n };\n }\n case 'remove': {\n return {\n type: 'edit',\n node: left.node,\n oldNode: {\n ...left.oldNode,\n relationships: {\n ...right.node.relationships,\n ...left.oldNode.relationships,\n },\n },\n };\n }\n }\n\n unreachable();\n}\n\nexport function makeAddEmptyRelationships(\n schema: SourceSchema,\n): (change: Change) => Change {\n return (change: Change): Change => {\n if (Object.keys(schema.relationships).length === 0) {\n return change;\n }\n\n switch (change.type) {\n case 'add':\n case 'remove': {\n const ret = {\n ...change,\n node: {\n ...change.node,\n relationships: {\n ...change.node.relationships,\n },\n },\n };\n\n mergeEmpty(ret.node.relationships, Object.keys(schema.relationships));\n\n return ret;\n }\n case 'edit': {\n const ret = {\n ...change,\n node: {\n ...change.node,\n relationships: {\n ...change.node.relationships,\n },\n },\n oldNode: {\n ...change.oldNode,\n relationships: {\n ...change.oldNode.relationships,\n },\n },\n };\n\n mergeEmpty(ret.node.relationships, Object.keys(schema.relationships));\n mergeEmpty(\n ret.oldNode.relationships,\n Object.keys(schema.relationships),\n );\n\n return ret;\n }\n case 'child':\n return change; // children only have relationships along the path to the change\n }\n };\n}\n\n/**\n * For each relationship in `schema` that does not exist\n * in `relationships`, add it with an empty stream.\n *\n * This modifies the `relationships` object in place.\n */\nexport function mergeEmpty(\n relationships: Record<string, () => Stream<Node | 'yield'>>,\n relationshipNames: string[],\n) {\n for (const relName of relationshipNames) {\n if (relationships[relName] === undefined) {\n relationships[relName] = () => emptyArray;\n }\n }\n}\n"],"names":["mergeRelationships"],"mappings":";;;AA8EO,UAAU,uBACf,mBACA,QACA,QACA,kBACAA,qBACA,uBACiB;AACjB,MAAI,kBAAkB,WAAW,GAAG;AAGlC;AAAA,EACF;AAGA,QAAM,uCAAuB,IAAA;AAC7B,aAAW,UAAU,mBAAmB;AACtC,QAAI,qBAAqB,WAAW,OAAO,SAAS,SAAS;AAC3D;AAAA,QACE,iBAAiB,IAAI,OAAO,IAAI,MAAM;AAAA,QACtC,MACE,qCAAqC,OAAO,IAAI;AAAA,MAAA;AAAA,IAEtD;AAEA,UAAM,WAAW,iBAAiB,IAAI,OAAO,IAAI;AACjD,QAAI,eAAe;AACnB,QAAI,UAAU;AAEZ,qBAAeA,oBAAmB,UAAU,MAAM;AAAA,IACpD;AACA,qBAAiB,IAAI,OAAO,MAAM,YAAY;AAAA,EAChD;AAEA,oBAAkB,SAAS;AAE3B,QAAM,QAAQ,CAAC,GAAG,iBAAiB,MAAM;AAWzC,UAAQ,kBAAA;AAAA,IACN,KAAK;AACH;AAAA,QACE,MAAM,WAAW,KAAK,MAAM,CAAC,MAAM;AAAA,QACnC;AAAA,MAAA;AAEF,aAAO,OAAO;AAAA,QACZ,sBAAsB,KAAK,iBAAiB,IAAI,QAAQ,CAAC,CAAC;AAAA,QAC1D;AAAA,MAAA;AAEF;AAAA,IACF,KAAK;AACH;AAAA,QACE,MAAM,WAAW,KAAK,MAAM,CAAC,MAAM;AAAA,QACnC;AAAA,MAAA;AAEF,aAAO,OAAO;AAAA,QACZ,sBAAsB,KAAK,iBAAiB,IAAI,KAAK,CAAC,CAAC;AAAA,QACvD;AAAA,MAAA;AAEF;AAAA,IACF,KAAK,QAAQ;AACX;AAAA,QACE,MAAM;AAAA,UACJ,CAAA,SAAQ,SAAS,SAAS,SAAS,YAAY,SAAS;AAAA,QAAA;AAAA,QAE1D;AAAA,MAAA;AAEF,YAAM,YAAY,iBAAiB,IAAI,KAAK;AAC5C,YAAM,eAAe,iBAAiB,IAAI,QAAQ;AAClD,UAAI,aAAa,iBAAiB,IAAI,MAAM;AAI5C,UAAI,YAAY;AACd,YAAI,WAAW;AACb,uBAAaA,oBAAmB,YAAY,SAAS;AAAA,QACvD;AACA,YAAI,cAAc;AAChB,uBAAaA,oBAAmB,YAAY,YAAY;AAAA,QAC1D;AACA,eAAO,OAAO,KAAK,sBAAsB,UAAU,GAAG,MAAM;AAC5D;AAAA,MACF;AAmBA,UAAI,aAAa,cAAc;AAC7B,eAAO,OAAO;AAAA,UACZ,sBAAsB;AAAA,YACpB,MAAM;AAAA,YACN,MAAM,UAAU;AAAA,YAChB,SAAS,aAAa;AAAA,UAAA,CACd;AAAA,UACV;AAAA,QAAA;AAEF;AAAA,MACF;AAEA,aAAO,OAAO;AAAA,QACZ,sBAAsB,KAAK,aAAa,YAAY,CAAC;AAAA,QACrD;AAAA,MAAA;AAEF;AAAA,IACF;AAAA,IACA,KAAK,SAAS;AACZ;AAAA,QACE,MAAM;AAAA,UACJ,UACE,SAAS;AAAA,UACT,SAAS;AAAA,UACT,SAAS;AAAA;AAAA,QAAA;AAAA,QAEb;AAAA,MAAA;AAEF;AAAA,QACE,MAAM,UAAU;AAAA,QAChB;AAAA,MAAA;AAIF,YAAM,cAAc,iBAAiB,IAAI,OAAO;AAChD,UAAI,aAAa;AACf,eAAO,OAAO,KAAK,aAAa,MAAM;AACtC;AAAA,MACF;AAEA,YAAM,YAAY,iBAAiB,IAAI,KAAK;AAC5C,YAAM,eAAe,iBAAiB,IAAI,QAAQ;AAElD;AAAA,QACE,cAAc,UAAa,iBAAiB;AAAA,QAC5C;AAAA,MAAA;AAGF,aAAO,OAAO;AAAA,QACZ,sBAAsB,KAAK,aAAa,YAAY,CAAC;AAAA,QACrD;AAAA,MAAA;AAEF;AAAA,IACF;AAAA,EAEE;AAEN;AAKO,SAAS,mBAAmB,MAAc,OAAuB;AAItE,MAAI,KAAK,SAAS,MAAM,MAAM;AAC5B,YAAQ,KAAK,MAAA;AAAA,MACX,KAAK,OAAO;AACV,eAAO;AAAA,UACL,MAAM;AAAA,UACN,MAAM;AAAA,YACJ,KAAK,KAAK,KAAK;AAAA,YACf,eAAe;AAAA,cACb,GAAG,MAAM,KAAK;AAAA,cACd,GAAG,KAAK,KAAK;AAAA,YAAA;AAAA,UACf;AAAA,QACF;AAAA,MAEJ;AAAA,MACA,KAAK,UAAU;AACb,eAAO;AAAA,UACL,MAAM;AAAA,UACN,MAAM;AAAA,YACJ,KAAK,KAAK,KAAK;AAAA,YACf,eAAe;AAAA,cACb,GAAG,MAAM,KAAK;AAAA,cACd,GAAG,KAAK,KAAK;AAAA,YAAA;AAAA,UACf;AAAA,QACF;AAAA,MAEJ;AAAA,MACA,KAAK,QAAQ;AACX,eAAO,MAAM,SAAS,MAAM;AAE5B,eAAO;AAAA,UACL,MAAM;AAAA,UACN,MAAM;AAAA,YACJ,KAAK,KAAK,KAAK;AAAA,YACf,eAAe;AAAA,cACb,GAAG,MAAM,KAAK;AAAA,cACd,GAAG,KAAK,KAAK;AAAA,YAAA;AAAA,UACf;AAAA,UAEF,SAAS;AAAA,YACP,KAAK,KAAK,QAAQ;AAAA,YAClB,eAAe;AAAA,cACb,GAAG,MAAM,QAAQ;AAAA,cACjB,GAAG,KAAK,QAAQ;AAAA,YAAA;AAAA,UAClB;AAAA,QACF;AAAA,MAEJ;AAAA,IAAA;AAAA,EAEJ;AAGA,SAAO,KAAK,SAAS,MAAM;AAC3B,UAAQ,MAAM,MAAA;AAAA,IACZ,KAAK,OAAO;AACV,aAAO;AAAA,QACL,MAAM;AAAA,QACN,MAAM;AAAA,UACJ,GAAG,KAAK;AAAA,UACR,eAAe;AAAA,YACb,GAAG,MAAM,KAAK;AAAA,YACd,GAAG,KAAK,KAAK;AAAA,UAAA;AAAA,QACf;AAAA,QAEF,SAAS,KAAK;AAAA,MAAA;AAAA,IAElB;AAAA,IACA,KAAK,UAAU;AACb,aAAO;AAAA,QACL,MAAM;AAAA,QACN,MAAM,KAAK;AAAA,QACX,SAAS;AAAA,UACP,GAAG,KAAK;AAAA,UACR,eAAe;AAAA,YACb,GAAG,MAAM,KAAK;AAAA,YACd,GAAG,KAAK,QAAQ;AAAA,UAAA;AAAA,QAClB;AAAA,MACF;AAAA,IAEJ;AAAA,EAAA;AAGF,cAAA;AACF;AAEO,SAAS,0BACd,QAC4B;AAC5B,SAAO,CAAC,WAA2B;AACjC,QAAI,OAAO,KAAK,OAAO,aAAa,EAAE,WAAW,GAAG;AAClD,aAAO;AAAA,IACT;AAEA,YAAQ,OAAO,MAAA;AAAA,MACb,KAAK;AAAA,MACL,KAAK,UAAU;AACb,cAAM,MAAM;AAAA,UACV,GAAG;AAAA,UACH,MAAM;AAAA,YACJ,GAAG,OAAO;AAAA,YACV,eAAe;AAAA,cACb,GAAG,OAAO,KAAK;AAAA,YAAA;AAAA,UACjB;AAAA,QACF;AAGF,mBAAW,IAAI,KAAK,eAAe,OAAO,KAAK,OAAO,aAAa,CAAC;AAEpE,eAAO;AAAA,MACT;AAAA,MACA,KAAK,QAAQ;AACX,cAAM,MAAM;AAAA,UACV,GAAG;AAAA,UACH,MAAM;AAAA,YACJ,GAAG,OAAO;AAAA,YACV,eAAe;AAAA,cACb,GAAG,OAAO,KAAK;AAAA,YAAA;AAAA,UACjB;AAAA,UAEF,SAAS;AAAA,YACP,GAAG,OAAO;AAAA,YACV,eAAe;AAAA,cACb,GAAG,OAAO,QAAQ;AAAA,YAAA;AAAA,UACpB;AAAA,QACF;AAGF,mBAAW,IAAI,KAAK,eAAe,OAAO,KAAK,OAAO,aAAa,CAAC;AACpE;AAAA,UACE,IAAI,QAAQ;AAAA,UACZ,OAAO,KAAK,OAAO,aAAa;AAAA,QAAA;AAGlC,eAAO;AAAA,MACT;AAAA,MACA,KAAK;AACH,eAAO;AAAA,IAAA;AAAA,EAEb;AACF;AAQO,SAAS,WACd,eACA,mBACA;AACA,aAAW,WAAW,mBAAmB;AACvC,QAAI,cAAc,OAAO,MAAM,QAAW;AACxC,oBAAc,OAAO,IAAI,MAAM;AAAA,IACjC;AAAA,EACF;AACF;"}
|
|
@@ -19,6 +19,6 @@ export declare class Skip implements Operator {
|
|
|
19
19
|
fetch(req: FetchRequest): Stream<Node | 'yield'>;
|
|
20
20
|
setOutput(output: Output): void;
|
|
21
21
|
destroy(): void;
|
|
22
|
-
push(change: Change):
|
|
22
|
+
push(change: Change): Stream<'yield'>;
|
|
23
23
|
}
|
|
24
24
|
//# sourceMappingURL=skip.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"skip.d.ts","sourceRoot":"","sources":["../../../../../zql/src/ivm/skip.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,GAAG,EAAC,MAAM,oCAAoC,CAAC;AAC5D,OAAO,KAAK,EAAY,MAAM,EAA4B,MAAM,aAAa,CAAC;AAC9E,OAAO,KAAK,EAAa,IAAI,EAAC,MAAM,WAAW,CAAC;AAEhD,OAAO,EAEL,KAAK,YAAY,EACjB,KAAK,KAAK,EACV,KAAK,QAAQ,EACb,KAAK,MAAM,EAEZ,MAAM,eAAe,CAAC;AACvB,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,aAAa,CAAC;AAC9C,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,aAAa,CAAC;AAExC,MAAM,MAAM,KAAK,GAAG;IAClB,GAAG,EAAE,GAAG,CAAC;IACT,SAAS,EAAE,OAAO,CAAC;CACpB,CAAC;AAEF;;;GAGG;AACH,qBAAa,IAAK,YAAW,QAAQ;;gBAOvB,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK;IAOtC,SAAS,IAAI,YAAY;IAIxB,KAAK,CAAC,GAAG,EAAE,YAAY,GAAG,MAAM,CAAC,IAAI,GAAG,OAAO,CAAC;IAsBjD,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI;IAI/B,OAAO,IAAI,IAAI;
|
|
1
|
+
{"version":3,"file":"skip.d.ts","sourceRoot":"","sources":["../../../../../zql/src/ivm/skip.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,GAAG,EAAC,MAAM,oCAAoC,CAAC;AAC5D,OAAO,KAAK,EAAY,MAAM,EAA4B,MAAM,aAAa,CAAC;AAC9E,OAAO,KAAK,EAAa,IAAI,EAAC,MAAM,WAAW,CAAC;AAEhD,OAAO,EAEL,KAAK,YAAY,EACjB,KAAK,KAAK,EACV,KAAK,QAAQ,EACb,KAAK,MAAM,EAEZ,MAAM,eAAe,CAAC;AACvB,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,aAAa,CAAC;AAC9C,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,aAAa,CAAC;AAExC,MAAM,MAAM,KAAK,GAAG;IAClB,GAAG,EAAE,GAAG,CAAC;IACT,SAAS,EAAE,OAAO,CAAC;CACpB,CAAC;AAEF;;;GAGG;AACH,qBAAa,IAAK,YAAW,QAAQ;;gBAOvB,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK;IAOtC,SAAS,IAAI,YAAY;IAIxB,KAAK,CAAC,GAAG,EAAE,YAAY,GAAG,MAAM,CAAC,IAAI,GAAG,OAAO,CAAC;IAsBjD,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI;IAI/B,OAAO,IAAI,IAAI;IASd,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,CAAC,OAAO,CAAC;CA+EvC"}
|
package/out/zql/src/ivm/skip.js
CHANGED
|
@@ -45,14 +45,19 @@ class Skip {
|
|
|
45
45
|
const cmp = this.#comparator(this.#bound.row, row);
|
|
46
46
|
return cmp < 0 || cmp === 0 && !this.#bound.exclusive;
|
|
47
47
|
}
|
|
48
|
-
push(change) {
|
|
48
|
+
*push(change) {
|
|
49
49
|
const shouldBePresent = (row) => this.#shouldBePresent(row);
|
|
50
50
|
if (change.type === "edit") {
|
|
51
|
-
maybeSplitAndPushEditChange(
|
|
51
|
+
yield* maybeSplitAndPushEditChange(
|
|
52
|
+
change,
|
|
53
|
+
shouldBePresent,
|
|
54
|
+
this.#output,
|
|
55
|
+
this
|
|
56
|
+
);
|
|
52
57
|
return;
|
|
53
58
|
}
|
|
54
59
|
if (shouldBePresent(change.node.row)) {
|
|
55
|
-
this.#output.push(change, this);
|
|
60
|
+
yield* this.#output.push(change, this);
|
|
56
61
|
}
|
|
57
62
|
}
|
|
58
63
|
#getStart(req) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"skip.js","sources":["../../../../../zql/src/ivm/skip.ts"],"sourcesContent":["import type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {AddChange, Change, ChildChange, RemoveChange} from './change.ts';\nimport type {Comparator, Node} from './data.ts';\nimport {maybeSplitAndPushEditChange} from './maybe-split-and-push-edit-change.ts';\nimport {\n throwOutput,\n type FetchRequest,\n type Input,\n type Operator,\n type Output,\n type Start,\n} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\n\nexport type Bound = {\n row: Row;\n exclusive: boolean;\n};\n\n/**\n * Skip sets the start position for the pipeline. No rows before the bound will\n * be output.\n */\nexport class Skip implements Operator {\n readonly #input: Input;\n readonly #bound: Bound;\n readonly #comparator: Comparator;\n\n #output: Output = throwOutput;\n\n constructor(input: Input, bound: Bound) {\n this.#input = input;\n this.#bound = bound;\n this.#comparator = input.getSchema().compareRows;\n input.setOutput(this);\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n *fetch(req: FetchRequest): Stream<Node | 'yield'> {\n const start = this.#getStart(req);\n if (start === 'empty') {\n return;\n }\n const nodes = this.#input.fetch({...req, start});\n if (!req.reverse) {\n yield* nodes;\n return;\n }\n for (const node of nodes) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n if (!this.#shouldBePresent(node.row)) {\n return;\n }\n yield node;\n }\n }\n\n setOutput(output: Output): void {\n this.#output = output;\n }\n\n destroy(): void {\n this.#input.destroy();\n }\n\n #shouldBePresent(row: Row): boolean {\n const cmp = this.#comparator(this.#bound.row, row);\n return cmp < 0 || (cmp === 0 && !this.#bound.exclusive);\n }\n\n push(change: Change):
|
|
1
|
+
{"version":3,"file":"skip.js","sources":["../../../../../zql/src/ivm/skip.ts"],"sourcesContent":["import type {Row} from '../../../zero-protocol/src/data.ts';\nimport type {AddChange, Change, ChildChange, RemoveChange} from './change.ts';\nimport type {Comparator, Node} from './data.ts';\nimport {maybeSplitAndPushEditChange} from './maybe-split-and-push-edit-change.ts';\nimport {\n throwOutput,\n type FetchRequest,\n type Input,\n type Operator,\n type Output,\n type Start,\n} from './operator.ts';\nimport type {SourceSchema} from './schema.ts';\nimport type {Stream} from './stream.ts';\n\nexport type Bound = {\n row: Row;\n exclusive: boolean;\n};\n\n/**\n * Skip sets the start position for the pipeline. No rows before the bound will\n * be output.\n */\nexport class Skip implements Operator {\n readonly #input: Input;\n readonly #bound: Bound;\n readonly #comparator: Comparator;\n\n #output: Output = throwOutput;\n\n constructor(input: Input, bound: Bound) {\n this.#input = input;\n this.#bound = bound;\n this.#comparator = input.getSchema().compareRows;\n input.setOutput(this);\n }\n\n getSchema(): SourceSchema {\n return this.#input.getSchema();\n }\n\n *fetch(req: FetchRequest): Stream<Node | 'yield'> {\n const start = this.#getStart(req);\n if (start === 'empty') {\n return;\n }\n const nodes = this.#input.fetch({...req, start});\n if (!req.reverse) {\n yield* nodes;\n return;\n }\n for (const node of nodes) {\n if (node === 'yield') {\n yield node;\n continue;\n }\n if (!this.#shouldBePresent(node.row)) {\n return;\n }\n yield node;\n }\n }\n\n setOutput(output: Output): void {\n this.#output = output;\n }\n\n destroy(): void {\n this.#input.destroy();\n }\n\n #shouldBePresent(row: Row): boolean {\n const cmp = this.#comparator(this.#bound.row, row);\n return cmp < 0 || (cmp === 0 && !this.#bound.exclusive);\n }\n\n *push(change: Change): Stream<'yield'> {\n const shouldBePresent = (row: Row) => this.#shouldBePresent(row);\n if (change.type === 'edit') {\n yield* maybeSplitAndPushEditChange(\n change,\n shouldBePresent,\n this.#output,\n this,\n );\n return;\n }\n\n change satisfies AddChange | RemoveChange | ChildChange;\n\n if (shouldBePresent(change.node.row)) {\n yield* this.#output.push(change, this);\n }\n }\n\n #getStart(req: FetchRequest): Start | undefined | 'empty' {\n const boundStart = {\n row: this.#bound.row,\n basis: this.#bound.exclusive ? 'after' : 'at',\n } as const;\n\n if (!req.start) {\n if (req.reverse) {\n return undefined;\n }\n return boundStart;\n }\n\n const cmp = this.#comparator(this.#bound.row, req.start.row);\n\n if (!req.reverse) {\n // The skip bound is after the requested bound. The requested bound cannot\n // be relevant because even if it was basis: 'after', the skip bound is\n // itself after the requested bound. Return the skip bound.\n if (cmp > 0) {\n return boundStart;\n }\n\n // The skip bound and requested bound are equal. If either is exclusive,\n // return that bound with exclusive. Otherwise, return the skip bound.\n if (cmp === 0) {\n if (this.#bound.exclusive || req.start.basis === 'after') {\n return {\n row: this.#bound.row,\n basis: 'after',\n };\n }\n return boundStart;\n }\n\n return req.start;\n }\n\n req.reverse satisfies true;\n\n // bound is after the start, but request is for reverse so results\n // must be empty\n if (cmp > 0) {\n return 'empty';\n }\n\n if (cmp === 0) {\n // if both are inclusive, the result can be the single row at bound\n // return it as start\n if (!this.#bound.exclusive && req.start.basis === 'at') {\n return boundStart;\n }\n // otherwise the results must be empty, one or both are exclusive\n // in opposite directions\n return 'empty';\n }\n\n // bound is before the start, return start\n return req.start;\n }\n}\n"],"names":[],"mappings":";;AAwBO,MAAM,KAAyB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EAET,UAAkB;AAAA,EAElB,YAAY,OAAc,OAAc;AACtC,SAAK,SAAS;AACd,SAAK,SAAS;AACd,SAAK,cAAc,MAAM,UAAA,EAAY;AACrC,UAAM,UAAU,IAAI;AAAA,EACtB;AAAA,EAEA,YAA0B;AACxB,WAAO,KAAK,OAAO,UAAA;AAAA,EACrB;AAAA,EAEA,CAAC,MAAM,KAA2C;AAChD,UAAM,QAAQ,KAAK,UAAU,GAAG;AAChC,QAAI,UAAU,SAAS;AACrB;AAAA,IACF;AACA,UAAM,QAAQ,KAAK,OAAO,MAAM,EAAC,GAAG,KAAK,OAAM;AAC/C,QAAI,CAAC,IAAI,SAAS;AAChB,aAAO;AACP;AAAA,IACF;AACA,eAAW,QAAQ,OAAO;AACxB,UAAI,SAAS,SAAS;AACpB,cAAM;AACN;AAAA,MACF;AACA,UAAI,CAAC,KAAK,iBAAiB,KAAK,GAAG,GAAG;AACpC;AAAA,MACF;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,UAAU,QAAsB;AAC9B,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,UAAgB;AACd,SAAK,OAAO,QAAA;AAAA,EACd;AAAA,EAEA,iBAAiB,KAAmB;AAClC,UAAM,MAAM,KAAK,YAAY,KAAK,OAAO,KAAK,GAAG;AACjD,WAAO,MAAM,KAAM,QAAQ,KAAK,CAAC,KAAK,OAAO;AAAA,EAC/C;AAAA,EAEA,CAAC,KAAK,QAAiC;AACrC,UAAM,kBAAkB,CAAC,QAAa,KAAK,iBAAiB,GAAG;AAC/D,QAAI,OAAO,SAAS,QAAQ;AAC1B,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA,KAAK;AAAA,QACL;AAAA,MAAA;AAEF;AAAA,IACF;AAIA,QAAI,gBAAgB,OAAO,KAAK,GAAG,GAAG;AACpC,aAAO,KAAK,QAAQ,KAAK,QAAQ,IAAI;AAAA,IACvC;AAAA,EACF;AAAA,EAEA,UAAU,KAAgD;AACxD,UAAM,aAAa;AAAA,MACjB,KAAK,KAAK,OAAO;AAAA,MACjB,OAAO,KAAK,OAAO,YAAY,UAAU;AAAA,IAAA;AAG3C,QAAI,CAAC,IAAI,OAAO;AACd,UAAI,IAAI,SAAS;AACf,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AAEA,UAAM,MAAM,KAAK,YAAY,KAAK,OAAO,KAAK,IAAI,MAAM,GAAG;AAE3D,QAAI,CAAC,IAAI,SAAS;AAIhB,UAAI,MAAM,GAAG;AACX,eAAO;AAAA,MACT;AAIA,UAAI,QAAQ,GAAG;AACb,YAAI,KAAK,OAAO,aAAa,IAAI,MAAM,UAAU,SAAS;AACxD,iBAAO;AAAA,YACL,KAAK,KAAK,OAAO;AAAA,YACjB,OAAO;AAAA,UAAA;AAAA,QAEX;AACA,eAAO;AAAA,MACT;AAEA,aAAO,IAAI;AAAA,IACb;AAEA,QAAI;AAIJ,QAAI,MAAM,GAAG;AACX,aAAO;AAAA,IACT;AAEA,QAAI,QAAQ,GAAG;AAGb,UAAI,CAAC,KAAK,OAAO,aAAa,IAAI,MAAM,UAAU,MAAM;AACtD,eAAO;AAAA,MACT;AAGA,aAAO;AAAA,IACT;AAGA,WAAO,IAAI;AAAA,EACb;AACF;"}
|
|
@@ -3,6 +3,7 @@ import type { Row } from '../../../zero-protocol/src/data.ts';
|
|
|
3
3
|
import type { TableSchema } from '../../../zero-types/src/schema.ts';
|
|
4
4
|
import type { DebugDelegate } from '../builder/debug-delegate.ts';
|
|
5
5
|
import type { Input } from './operator.ts';
|
|
6
|
+
import type { Stream } from './stream.ts';
|
|
6
7
|
export type SourceChangeAdd = {
|
|
7
8
|
type: 'add';
|
|
8
9
|
row: Row;
|
|
@@ -54,18 +55,25 @@ export interface Source {
|
|
|
54
55
|
connect(sort: Ordering, filters?: Condition, splitEditKeys?: Set<string>, debug?: DebugDelegate): SourceInput;
|
|
55
56
|
/**
|
|
56
57
|
* Pushes a change into the source and into all connected outputs.
|
|
58
|
+
*
|
|
59
|
+
* The returned stream can yield 'yield' to yield control to the caller
|
|
60
|
+
* for purposes of responsiveness.
|
|
61
|
+
*
|
|
62
|
+
* Once the stream is exhausted, the change will have been pushed into all
|
|
63
|
+
* connected inputs and committed to the source.
|
|
57
64
|
*/
|
|
58
|
-
push(change: SourceChange):
|
|
65
|
+
push(change: SourceChange): Stream<'yield'>;
|
|
59
66
|
/**
|
|
60
67
|
* Pushes a change into the source.
|
|
61
|
-
* Iterating the returned iterator will push the
|
|
62
|
-
* change into one connected input at a time.
|
|
63
68
|
*
|
|
64
|
-
*
|
|
65
|
-
*
|
|
66
|
-
*
|
|
69
|
+
* Iterating the returned stream will push the change into one connected input
|
|
70
|
+
* at a time, yielding `undefined` between each, and yielding `'yield'` to
|
|
71
|
+
* yield control to the caller for purposes of responsiveness.
|
|
72
|
+
*
|
|
73
|
+
* Once the stream is exhausted, the change will have been pushed
|
|
74
|
+
* into all connected inputs and committed to the source.
|
|
67
75
|
*/
|
|
68
|
-
genPush(change: SourceChange):
|
|
76
|
+
genPush(change: SourceChange): Stream<'yield' | undefined>;
|
|
69
77
|
}
|
|
70
78
|
export interface SourceInput extends Input {
|
|
71
79
|
readonly fullyAppliedFilters: boolean;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"source.d.ts","sourceRoot":"","sources":["../../../../../zql/src/ivm/source.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,SAAS,EAAE,QAAQ,EAAC,MAAM,mCAAmC,CAAC;AAC3E,OAAO,KAAK,EAAC,GAAG,EAAC,MAAM,oCAAoC,CAAC;AAC5D,OAAO,KAAK,EAAC,WAAW,EAAC,MAAM,mCAAmC,CAAC;AACnE,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,8BAA8B,CAAC;AAChE,OAAO,KAAK,EAAC,KAAK,EAAC,MAAM,eAAe,CAAC;
|
|
1
|
+
{"version":3,"file":"source.d.ts","sourceRoot":"","sources":["../../../../../zql/src/ivm/source.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,SAAS,EAAE,QAAQ,EAAC,MAAM,mCAAmC,CAAC;AAC3E,OAAO,KAAK,EAAC,GAAG,EAAC,MAAM,oCAAoC,CAAC;AAC5D,OAAO,KAAK,EAAC,WAAW,EAAC,MAAM,mCAAmC,CAAC;AACnE,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,8BAA8B,CAAC;AAChE,OAAO,KAAK,EAAC,KAAK,EAAC,MAAM,eAAe,CAAC;AACzC,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,aAAa,CAAC;AAExC,MAAM,MAAM,eAAe,GAAG;IAC5B,IAAI,EAAE,KAAK,CAAC;IACZ,GAAG,EAAE,GAAG,CAAC;CACV,CAAC;AAEF,MAAM,MAAM,kBAAkB,GAAG;IAC/B,IAAI,EAAE,QAAQ,CAAC;IACf,GAAG,EAAE,GAAG,CAAC;CACV,CAAC;AAEF,MAAM,MAAM,gBAAgB,GAAG;IAC7B,IAAI,EAAE,MAAM,CAAC;IACb,GAAG,EAAE,GAAG,CAAC;IACT,MAAM,EAAE,GAAG,CAAC;CACb,CAAC;AAEF,MAAM,MAAM,YAAY,GACpB,eAAe,GACf,kBAAkB,GAClB,gBAAgB,CAAC;AAErB;;;;;;;;;;;;;;;;;;;GAmBG;AACH,MAAM,WAAW,MAAM;IACrB,IAAI,WAAW,IAAI,WAAW,CAAC;IAC/B;;;;;;;;;;;OAWG;IACH,OAAO,CACL,IAAI,EAAE,QAAQ,EACd,OAAO,CAAC,EAAE,SAAS,EACnB,aAAa,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,EAC3B,KAAK,CAAC,EAAE,aAAa,GACpB,WAAW,CAAC;IAEf;;;;;;;;OAQG;IACH,IAAI,CAAC,MAAM,EAAE,YAAY,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC;IAE5C;;;;;;;;;OASG;IACH,OAAO,CAAC,MAAM,EAAE,YAAY,GAAG,MAAM,CAAC,OAAO,GAAG,SAAS,CAAC,CAAC;CAC5D;AAED,MAAM,WAAW,WAAY,SAAQ,KAAK;IACxC,QAAQ,CAAC,mBAAmB,EAAE,OAAO,CAAC;CACvC"}
|
|
@@ -8,4 +8,6 @@
|
|
|
8
8
|
export type Stream<T> = Iterable<T>;
|
|
9
9
|
export declare function take<T>(stream: Stream<T>, limit: number): Stream<T>;
|
|
10
10
|
export declare function first<T>(stream: Stream<T>): T | undefined;
|
|
11
|
+
export declare function consume<T>(stream: Stream<T>): void;
|
|
12
|
+
export declare function drainGenerator<Yield, Return>(gen: Generator<Yield, Return, unknown>): Return;
|
|
11
13
|
//# sourceMappingURL=stream.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"stream.d.ts","sourceRoot":"","sources":["../../../../../zql/src/ivm/stream.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AACH,MAAM,MAAM,MAAM,CAAC,CAAC,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAC;AAEpC,wBAAiB,IAAI,CAAC,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC,CAWpE;AAED,wBAAgB,KAAK,CAAC,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,SAAS,CAKzD"}
|
|
1
|
+
{"version":3,"file":"stream.d.ts","sourceRoot":"","sources":["../../../../../zql/src/ivm/stream.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AACH,MAAM,MAAM,MAAM,CAAC,CAAC,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAC;AAEpC,wBAAiB,IAAI,CAAC,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC,CAWpE;AAED,wBAAgB,KAAK,CAAC,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,SAAS,CAKzD;AAED,wBAAgB,OAAO,CAAC,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC,QAE3C;AAED,wBAAgB,cAAc,CAAC,KAAK,EAAE,MAAM,EAC1C,GAAG,EAAE,SAAS,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,GACrC,MAAM,CAMR"}
|
|
@@ -1,23 +1,14 @@
|
|
|
1
|
-
function* take(stream, limit) {
|
|
2
|
-
if (limit < 1) {
|
|
3
|
-
return;
|
|
4
|
-
}
|
|
5
|
-
let count = 0;
|
|
6
|
-
for (const v of stream) {
|
|
7
|
-
yield v;
|
|
8
|
-
if (++count === limit) {
|
|
9
|
-
break;
|
|
10
|
-
}
|
|
11
|
-
}
|
|
12
|
-
}
|
|
13
1
|
function first(stream) {
|
|
14
2
|
const it = stream[Symbol.iterator]();
|
|
15
3
|
const { value } = it.next();
|
|
16
4
|
it.return?.();
|
|
17
5
|
return value;
|
|
18
6
|
}
|
|
7
|
+
function consume(stream) {
|
|
8
|
+
[...stream];
|
|
9
|
+
}
|
|
19
10
|
export {
|
|
20
|
-
|
|
21
|
-
|
|
11
|
+
consume,
|
|
12
|
+
first
|
|
22
13
|
};
|
|
23
14
|
//# sourceMappingURL=stream.js.map
|